@webiny/api-page-builder-import-export 0.0.0-unstable.de38392959 → 0.0.0-unstable.e0bfc55d5a

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (225) hide show
  1. package/client.d.ts +1 -1
  2. package/client.js.map +1 -1
  3. package/export/combine/blocksHandler.d.ts +2 -2
  4. package/export/combine/blocksHandler.js.map +1 -1
  5. package/export/combine/formsHandler.d.ts +2 -2
  6. package/export/combine/formsHandler.js.map +1 -1
  7. package/export/combine/index.d.ts +2 -2
  8. package/export/combine/index.js +2 -2
  9. package/export/combine/index.js.map +1 -1
  10. package/export/combine/templatesHandler.d.ts +2 -2
  11. package/export/combine/templatesHandler.js.map +1 -1
  12. package/export/pages/ExportPagesCleanup.d.ts +5 -0
  13. package/export/pages/ExportPagesCleanup.js +82 -0
  14. package/export/pages/ExportPagesCleanup.js.map +1 -0
  15. package/export/pages/ExportPagesController.d.ts +5 -0
  16. package/export/pages/ExportPagesController.js +31 -0
  17. package/export/pages/ExportPagesController.js.map +1 -0
  18. package/export/pages/ExportPagesZipPages.d.ts +5 -0
  19. package/export/pages/ExportPagesZipPages.js +23 -0
  20. package/export/pages/ExportPagesZipPages.js.map +1 -0
  21. package/export/pages/controller/CombineZippedPages.d.ts +5 -0
  22. package/export/pages/controller/CombineZippedPages.js +75 -0
  23. package/export/pages/controller/CombineZippedPages.js.map +1 -0
  24. package/export/pages/controller/CreateZipPagesTasks.d.ts +8 -0
  25. package/export/pages/controller/CreateZipPagesTasks.js +103 -0
  26. package/export/pages/controller/CreateZipPagesTasks.js.map +1 -0
  27. package/export/pages/controller/ProcessZipPagesTasks.d.ts +6 -0
  28. package/export/pages/controller/ProcessZipPagesTasks.js +61 -0
  29. package/export/pages/controller/ProcessZipPagesTasks.js.map +1 -0
  30. package/export/pages/types.d.ts +53 -0
  31. package/export/pages/types.js +26 -0
  32. package/export/pages/types.js.map +1 -0
  33. package/export/pages/utils.d.ts +2 -0
  34. package/export/pages/utils.js +13 -0
  35. package/export/pages/utils.js.map +1 -0
  36. package/export/pages/zipPages/ZipPages.d.ts +5 -0
  37. package/export/pages/zipPages/ZipPages.js +100 -0
  38. package/export/pages/zipPages/ZipPages.js.map +1 -0
  39. package/export/pages/zipPages/ZipPagesDataManager.d.ts +14 -0
  40. package/export/pages/zipPages/ZipPagesDataManager.js +46 -0
  41. package/export/pages/zipPages/ZipPagesDataManager.js.map +1 -0
  42. package/export/pages/zipPages/getPageFactory.d.ts +4 -0
  43. package/export/pages/zipPages/getPageFactory.js +38 -0
  44. package/export/pages/zipPages/getPageFactory.js.map +1 -0
  45. package/export/process/blocksHandler.d.ts +2 -2
  46. package/export/process/blocksHandler.js.map +1 -1
  47. package/export/process/exporters/BlockExporter.d.ts +2 -2
  48. package/export/process/exporters/BlockExporter.js.map +1 -1
  49. package/export/process/exporters/FormExporter.d.ts +2 -2
  50. package/export/process/exporters/FormExporter.js.map +1 -1
  51. package/export/process/exporters/PageExporter.d.ts +10 -3
  52. package/export/process/exporters/PageExporter.js +2 -1
  53. package/export/process/exporters/PageExporter.js.map +1 -1
  54. package/export/process/exporters/PageTemplateExporter.d.ts +3 -3
  55. package/export/process/exporters/PageTemplateExporter.js +3 -1
  56. package/export/process/exporters/PageTemplateExporter.js.map +1 -1
  57. package/export/process/formsHandler.d.ts +2 -2
  58. package/export/process/formsHandler.js.map +1 -1
  59. package/export/process/index.d.ts +2 -2
  60. package/export/process/index.js +2 -2
  61. package/export/process/index.js.map +1 -1
  62. package/export/process/templatesHandler.d.ts +2 -2
  63. package/export/process/templatesHandler.js.map +1 -1
  64. package/export/s3Stream.d.ts +6 -4
  65. package/export/s3Stream.js +1 -2
  66. package/export/s3Stream.js.map +1 -1
  67. package/export/utils.d.ts +4 -4
  68. package/export/utils.js +7 -9
  69. package/export/utils.js.map +1 -1
  70. package/export/zipper.d.ts +3 -2
  71. package/export/zipper.js.map +1 -1
  72. package/graphql/crud/blocks.crud.d.ts +1 -1
  73. package/graphql/crud/blocks.crud.js +1 -2
  74. package/graphql/crud/blocks.crud.js.map +1 -1
  75. package/graphql/crud/forms.crud.d.ts +1 -1
  76. package/graphql/crud/forms.crud.js +1 -2
  77. package/graphql/crud/forms.crud.js.map +1 -1
  78. package/graphql/crud/importExportTasks.crud.d.ts +2 -2
  79. package/graphql/crud/importExportTasks.crud.js +73 -58
  80. package/graphql/crud/importExportTasks.crud.js.map +1 -1
  81. package/graphql/crud/pages.crud.d.ts +1 -2
  82. package/graphql/crud/pages.crud.js +208 -125
  83. package/graphql/crud/pages.crud.js.map +1 -1
  84. package/graphql/crud/templates.crud.d.ts +1 -1
  85. package/graphql/crud/templates.crud.js +1 -2
  86. package/graphql/crud/templates.crud.js.map +1 -1
  87. package/graphql/crud.d.ts +2 -2
  88. package/graphql/crud.js.map +1 -1
  89. package/graphql/graphql/blocks.gql.d.ts +2 -2
  90. package/graphql/graphql/blocks.gql.js +4 -6
  91. package/graphql/graphql/blocks.gql.js.map +1 -1
  92. package/graphql/graphql/forms.gql.d.ts +2 -2
  93. package/graphql/graphql/forms.gql.js +4 -6
  94. package/graphql/graphql/forms.gql.js.map +1 -1
  95. package/graphql/graphql/importExportTasks.gql.d.ts +2 -2
  96. package/graphql/graphql/importExportTasks.gql.js +5 -7
  97. package/graphql/graphql/importExportTasks.gql.js.map +1 -1
  98. package/graphql/graphql/pages.gql.d.ts +2 -2
  99. package/graphql/graphql/pages.gql.js +101 -14
  100. package/graphql/graphql/pages.gql.js.map +1 -1
  101. package/graphql/graphql/templates.gql.d.ts +2 -2
  102. package/graphql/graphql/templates.gql.js +4 -6
  103. package/graphql/graphql/templates.gql.js.map +1 -1
  104. package/graphql/graphql/utils/resolve.d.ts +3 -3
  105. package/graphql/graphql/utils/resolve.js +14 -5
  106. package/graphql/graphql/utils/resolve.js.map +1 -1
  107. package/graphql/graphql.d.ts +1 -1
  108. package/graphql/graphql.js +1 -2
  109. package/graphql/graphql.js.map +1 -1
  110. package/graphql/index.d.ts +2 -2
  111. package/graphql/index.js +2 -1
  112. package/graphql/index.js.map +1 -1
  113. package/graphql/types.d.ts +61 -33
  114. package/graphql/types.js.map +1 -1
  115. package/import/constants.js +3 -6
  116. package/import/constants.js.map +1 -1
  117. package/import/create/blocksHandler.d.ts +2 -2
  118. package/import/create/blocksHandler.js.map +1 -1
  119. package/import/create/formsHandler.d.ts +2 -2
  120. package/import/create/formsHandler.js.map +1 -1
  121. package/import/create/index.d.ts +2 -2
  122. package/import/create/index.js +2 -2
  123. package/import/create/index.js.map +1 -1
  124. package/import/create/pagesHandler.d.ts +2 -2
  125. package/import/create/pagesHandler.js.map +1 -1
  126. package/import/create/templatesHandler.d.ts +2 -2
  127. package/import/create/templatesHandler.js.map +1 -1
  128. package/import/pages/ImportPagesController.d.ts +5 -0
  129. package/import/pages/ImportPagesController.js +29 -0
  130. package/import/pages/ImportPagesController.js.map +1 -0
  131. package/import/pages/ImportPagesProcessPages.d.ts +6 -0
  132. package/import/pages/ImportPagesProcessPages.js +112 -0
  133. package/import/pages/ImportPagesProcessPages.js.map +1 -0
  134. package/import/pages/controller/ImportPagesProcessPagesChecker.d.ts +6 -0
  135. package/import/pages/controller/ImportPagesProcessPagesChecker.js +40 -0
  136. package/import/pages/controller/ImportPagesProcessPagesChecker.js.map +1 -0
  137. package/import/pages/controller/ImportPagesProcessZipFile.d.ts +5 -0
  138. package/import/pages/controller/ImportPagesProcessZipFile.js +71 -0
  139. package/import/pages/controller/ImportPagesProcessZipFile.js.map +1 -0
  140. package/import/{process/pages → pages/process}/importPage.d.ts +3 -4
  141. package/import/pages/process/importPage.js.map +1 -0
  142. package/import/pages/types.d.ts +48 -0
  143. package/import/pages/types.js +20 -0
  144. package/import/pages/types.js.map +1 -0
  145. package/import/process/blocks/ElementIdsProcessor.d.ts +5 -0
  146. package/import/process/blocks/ElementIdsProcessor.js +26 -0
  147. package/import/process/blocks/ElementIdsProcessor.js.map +1 -0
  148. package/import/process/blocks/blocksHandler.d.ts +2 -2
  149. package/import/process/blocks/blocksHandler.js.map +1 -1
  150. package/import/process/blocks/importBlock.d.ts +3 -3
  151. package/import/process/blocks/importBlock.js +5 -2
  152. package/import/process/blocks/importBlock.js.map +1 -1
  153. package/import/process/forms/formsHandler.d.ts +2 -2
  154. package/import/process/forms/formsHandler.js.map +1 -1
  155. package/import/process/forms/importForm.d.ts +2 -2
  156. package/import/process/forms/importForm.js.map +1 -1
  157. package/import/process/index.d.ts +2 -2
  158. package/import/process/index.js +2 -2
  159. package/import/process/index.js.map +1 -1
  160. package/import/process/templates/importTemplate.d.ts +3 -3
  161. package/import/process/templates/importTemplate.js.map +1 -1
  162. package/import/process/templates/templatesHandler.d.ts +2 -2
  163. package/import/process/templates/templatesHandler.js +3 -1
  164. package/import/process/templates/templatesHandler.js.map +1 -1
  165. package/import/utils/deleteS3Folder.js.map +1 -1
  166. package/import/utils/extractAndUploadZipFileContents.d.ts +1 -1
  167. package/import/utils/extractAndUploadZipFileContents.js +6 -3
  168. package/import/utils/extractAndUploadZipFileContents.js.map +1 -1
  169. package/import/utils/extractZipAndUploadToS3.d.ts +1 -1
  170. package/import/utils/extractZipAndUploadToS3.js.map +1 -1
  171. package/import/utils/getFileNameWithoutExt.js.map +1 -1
  172. package/import/utils/index.js.map +1 -1
  173. package/import/utils/initialStats.js.map +1 -1
  174. package/import/utils/prepareDataDirMap.d.ts +1 -1
  175. package/import/utils/prepareDataDirMap.js.map +1 -1
  176. package/import/utils/updateFilesInData.d.ts +1 -1
  177. package/import/utils/updateFilesInData.js.map +1 -1
  178. package/import/utils/uploadAssets.d.ts +3 -3
  179. package/import/utils/uploadAssets.js.map +1 -1
  180. package/import/utils/uploadFilesFromS3.d.ts +3 -3
  181. package/import/utils/uploadFilesFromS3.js.map +1 -1
  182. package/mockSecurity.d.ts +1 -1
  183. package/mockSecurity.js.map +1 -1
  184. package/package.json +33 -41
  185. package/tasks/common/ChildTasksCleanup.d.ts +12 -0
  186. package/tasks/common/ChildTasksCleanup.js +64 -0
  187. package/tasks/common/ChildTasksCleanup.js.map +1 -0
  188. package/tasks/index.d.ts +1 -0
  189. package/tasks/index.js +13 -0
  190. package/tasks/index.js.map +1 -0
  191. package/tasks/pages/exportPagesCleanupTask.d.ts +3 -0
  192. package/tasks/pages/exportPagesCleanupTask.js +36 -0
  193. package/tasks/pages/exportPagesCleanupTask.js.map +1 -0
  194. package/tasks/pages/exportPagesControllerTask.d.ts +3 -0
  195. package/tasks/pages/exportPagesControllerTask.js +83 -0
  196. package/tasks/pages/exportPagesControllerTask.js.map +1 -0
  197. package/tasks/pages/exportPagesZipPagesTask.d.ts +3 -0
  198. package/tasks/pages/exportPagesZipPagesTask.js +39 -0
  199. package/tasks/pages/exportPagesZipPagesTask.js.map +1 -0
  200. package/tasks/pages/importPagesControllerTask.d.ts +3 -0
  201. package/tasks/pages/importPagesControllerTask.js +39 -0
  202. package/tasks/pages/importPagesControllerTask.js.map +1 -0
  203. package/tasks/pages/importPagesProcessPageTask.d.ts +3 -0
  204. package/tasks/pages/importPagesProcessPageTask.js +39 -0
  205. package/tasks/pages/importPagesProcessPageTask.js.map +1 -0
  206. package/tasks/pages/index.d.ts +1 -0
  207. package/tasks/pages/index.js +17 -0
  208. package/tasks/pages/index.js.map +1 -0
  209. package/types.d.ts +2 -2
  210. package/types.js +2 -4
  211. package/types.js.map +1 -1
  212. package/utils/ZipFiles.d.ts +11 -0
  213. package/utils/ZipFiles.js +124 -0
  214. package/utils/ZipFiles.js.map +1 -0
  215. package/export/combine/pagesHandler.d.ts +0 -6
  216. package/export/combine/pagesHandler.js +0 -101
  217. package/export/combine/pagesHandler.js.map +0 -1
  218. package/export/process/pagesHandler.d.ts +0 -6
  219. package/export/process/pagesHandler.js +0 -191
  220. package/export/process/pagesHandler.js.map +0 -1
  221. package/import/process/pages/importPage.js.map +0 -1
  222. package/import/process/pages/pagesHandler.d.ts +0 -3
  223. package/import/process/pages/pagesHandler.js +0 -185
  224. package/import/process/pages/pagesHandler.js.map +0 -1
  225. /package/import/{process/pages → pages/process}/importPage.js +0 -0
package/client.d.ts CHANGED
@@ -1,4 +1,4 @@
1
- import { PbImportExportContext } from "./graphql/types";
1
+ import type { PbImportExportContext } from "./graphql/types";
2
2
  export interface InvokeHandlerClientParams<TParams> {
3
3
  context: PbImportExportContext;
4
4
  name: string;
package/client.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"names":["invokeHandlerClient","context","name","payload","description","request","tenantId","tenancy","getCurrentTenant","id","handlerClient","invoke","httpMethod","method","headers","await"],"sources":["client.ts"],"sourcesContent":["import { PbImportExportContext } from \"~/graphql/types\";\n\nexport interface InvokeHandlerClientParams<TParams> {\n context: PbImportExportContext;\n name: string;\n payload: TParams;\n description: string;\n}\n\nexport async function invokeHandlerClient<TParams>({\n context,\n name,\n payload,\n description\n}: InvokeHandlerClientParams<TParams>) {\n const { request } = context;\n const tenantId = context.tenancy.getCurrentTenant().id;\n\n // Invoke handler\n await context.handlerClient.invoke<TParams & any>({\n name: name,\n payload: {\n ...payload,\n httpMethod: request.method,\n headers: {\n [\"x-i18n-locale\"]: request.headers[\"x-i18n-locale\"],\n [\"x-tenant\"]: request.headers[\"x-tenant\"] || tenantId\n }\n },\n await: false,\n description\n });\n}\n"],"mappings":";;;;;;AASO,eAAeA,mBAAmBA,CAAU;EAC/CC,OAAO;EACPC,IAAI;EACJC,OAAO;EACPC;AACgC,CAAC,EAAE;EACnC,MAAM;IAAEC;EAAQ,CAAC,GAAGJ,OAAO;EAC3B,MAAMK,QAAQ,GAAGL,OAAO,CAACM,OAAO,CAACC,gBAAgB,CAAC,CAAC,CAACC,EAAE;;EAEtD;EACA,MAAMR,OAAO,CAACS,aAAa,CAACC,MAAM,CAAgB;IAC9CT,IAAI,EAAEA,IAAI;IACVC,OAAO,EAAE;MACL,GAAGA,OAAO;MACVS,UAAU,EAAEP,OAAO,CAACQ,MAAM;MAC1BC,OAAO,EAAE;QACL,CAAC,eAAe,GAAGT,OAAO,CAACS,OAAO,CAAC,eAAe,CAAC;QACnD,CAAC,UAAU,GAAGT,OAAO,CAACS,OAAO,CAAC,UAAU,CAAC,IAAIR;MACjD;IACJ,CAAC;IACDS,KAAK,EAAE,KAAK;IACZX;EACJ,CAAC,CAAC;AACN"}
1
+ {"version":3,"names":["invokeHandlerClient","context","name","payload","description","request","tenantId","tenancy","getCurrentTenant","id","handlerClient","invoke","httpMethod","method","headers","await"],"sources":["client.ts"],"sourcesContent":["import type { PbImportExportContext } from \"~/graphql/types\";\n\nexport interface InvokeHandlerClientParams<TParams> {\n context: PbImportExportContext;\n name: string;\n payload: TParams;\n description: string;\n}\n\nexport async function invokeHandlerClient<TParams>({\n context,\n name,\n payload,\n description\n}: InvokeHandlerClientParams<TParams>) {\n const { request } = context;\n const tenantId = context.tenancy.getCurrentTenant().id;\n\n // Invoke handler\n await context.handlerClient.invoke<TParams & any>({\n name: name,\n payload: {\n ...payload,\n httpMethod: request.method,\n headers: {\n [\"x-i18n-locale\"]: request.headers[\"x-i18n-locale\"],\n [\"x-tenant\"]: request.headers[\"x-tenant\"] || tenantId\n }\n },\n await: false,\n description\n });\n}\n"],"mappings":";;;;;;AASO,eAAeA,mBAAmBA,CAAU;EAC/CC,OAAO;EACPC,IAAI;EACJC,OAAO;EACPC;AACgC,CAAC,EAAE;EACnC,MAAM;IAAEC;EAAQ,CAAC,GAAGJ,OAAO;EAC3B,MAAMK,QAAQ,GAAGL,OAAO,CAACM,OAAO,CAACC,gBAAgB,CAAC,CAAC,CAACC,EAAE;;EAEtD;EACA,MAAMR,OAAO,CAACS,aAAa,CAACC,MAAM,CAAgB;IAC9CT,IAAI,EAAEA,IAAI;IACVC,OAAO,EAAE;MACL,GAAGA,OAAO;MACVS,UAAU,EAAEP,OAAO,CAACQ,MAAM;MAC1BC,OAAO,EAAE;QACL,CAAC,eAAe,GAAGT,OAAO,CAACS,OAAO,CAAC,eAAe,CAAC;QACnD,CAAC,UAAU,GAAGT,OAAO,CAACS,OAAO,CAAC,UAAU,CAAC,IAAIR;MACjD;IACJ,CAAC;IACDS,KAAK,EAAE,KAAK;IACZX;EACJ,CAAC,CAAC;AACN","ignoreList":[]}
@@ -1,5 +1,5 @@
1
- import { PbImportExportContext } from "../../types";
2
- import { Payload, Response } from ".";
1
+ import type { PbImportExportContext } from "../../types";
2
+ import type { Payload, Response } from "./";
3
3
  /**
4
4
  * Handles the export blocks combine workflow.
5
5
  */
@@ -1 +1 @@
1
- {"version":3,"names":["_types","require","_s3Stream","_zipper","_mockSecurity","blocksHandler","payload","context","log","console","pageBuilder","taskId","identity","mockSecurity","task","importExportTask","getTask","data","error","message","exportBlocksDataKey","input","listObjectResponse","s3Stream","listObject","Contents","zipFileKeys","filter","file","Key","map","Boolean","zipOfZip","ZipOfZip","blockExportUpload","process","Location","updateTask","status","ImportExportTaskStatus","COMPLETED","key","url","getPresignedUrl","deleteFilePromises","deleteObject","Promise","all","length","e","FAILED","name","code","exports"],"sources":["blocksHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { s3Stream } from \"../s3Stream\";\nimport { ZipOfZip } from \"../zipper\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { Payload, Response } from \"~/export/combine\";\n\n/**\n * Handles the export blocks combine workflow.\n */\nexport const blocksHandler = async (\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n\n log(\"RUNNING Export Blocks Combine Handler\");\n const { pageBuilder } = context;\n const { taskId, identity } = payload;\n\n mockSecurity(identity, context);\n\n try {\n const task = await pageBuilder.importExportTask.getTask(taskId);\n if (!task) {\n return {\n data: null,\n error: {\n message: `There is no task with ID \"${taskId}\".`\n }\n };\n }\n\n const { exportBlocksDataKey } = task.input;\n\n // Get all files (zip) from given key\n const listObjectResponse = await s3Stream.listObject(exportBlocksDataKey);\n if (!listObjectResponse.Contents) {\n return {\n data: null,\n error: {\n message: \"There is no Contents defined on S3 Stream while combining blocks.\"\n }\n };\n }\n\n const zipFileKeys = listObjectResponse.Contents.filter(\n file => file.Key !== exportBlocksDataKey\n )\n .map(file => file.Key)\n .filter(Boolean) as string[];\n\n // Prepare zip of all zips\n const zipOfZip = new ZipOfZip(zipFileKeys, \"WEBINY_BLOCK_EXPORT.zip\");\n\n // Upload\n const blockExportUpload = await zipOfZip.process();\n log(`Done uploading... File is located at ${blockExportUpload.Location} `);\n\n // Update task status and save export blocks data key\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading block export.`,\n key: blockExportUpload.Key,\n url: await s3Stream.getPresignedUrl(blockExportUpload.Key)\n }\n });\n\n // Remove individual zip files from storage\n const deleteFilePromises = zipFileKeys.map(key => s3Stream.deleteObject(key));\n await Promise.all(deleteFilePromises);\n log(`Successfully deleted ${deleteFilePromises.length} zip files.`);\n } catch (e) {\n log(\"[EXPORT_BLOCKS_COMBINE] Error => \", e.message);\n\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"EXPORT_FAILED\"\n }\n });\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,SAAA,GAAAD,OAAA;AACA,IAAAE,OAAA,GAAAF,OAAA;AACA,IAAAG,aAAA,GAAAH,OAAA;AAGA;AACA;AACA;AACO,MAAMI,aAAa,GAAG,MAAAA,CACzBC,OAAgB,EAChBC,OAA8B,KACV;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EAEvBA,GAAG,CAAC,uCAAuC,CAAC;EAC5C,MAAM;IAAEE;EAAY,CAAC,GAAGH,OAAO;EAC/B,MAAM;IAAEI,MAAM;IAAEC;EAAS,CAAC,GAAGN,OAAO;EAEpC,IAAAO,0BAAY,EAACD,QAAQ,EAAEL,OAAO,CAAC;EAE/B,IAAI;IACA,MAAMO,IAAI,GAAG,MAAMJ,WAAW,CAACK,gBAAgB,CAACC,OAAO,CAACL,MAAM,CAAC;IAC/D,IAAI,CAACG,IAAI,EAAE;MACP,OAAO;QACHG,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAG,6BAA4BR,MAAO;QACjD;MACJ,CAAC;IACL;IAEA,MAAM;MAAES;IAAoB,CAAC,GAAGN,IAAI,CAACO,KAAK;;IAE1C;IACA,MAAMC,kBAAkB,GAAG,MAAMC,kBAAQ,CAACC,UAAU,CAACJ,mBAAmB,CAAC;IACzE,IAAI,CAACE,kBAAkB,CAACG,QAAQ,EAAE;MAC9B,OAAO;QACHR,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAE;QACb;MACJ,CAAC;IACL;IAEA,MAAMO,WAAW,GAAGJ,kBAAkB,CAACG,QAAQ,CAACE,MAAM,CAClDC,IAAI,IAAIA,IAAI,CAACC,GAAG,KAAKT,mBACzB,CAAC,CACIU,GAAG,CAACF,IAAI,IAAIA,IAAI,CAACC,GAAG,CAAC,CACrBF,MAAM,CAACI,OAAO,CAAa;;IAEhC;IACA,MAAMC,QAAQ,GAAG,IAAIC,gBAAQ,CAACP,WAAW,EAAE,yBAAyB,CAAC;;IAErE;IACA,MAAMQ,iBAAiB,GAAG,MAAMF,QAAQ,CAACG,OAAO,CAAC,CAAC;IAClD3B,GAAG,CAAE,wCAAuC0B,iBAAiB,CAACE,QAAS,GAAE,CAAC;;IAE1E;IACA,MAAM1B,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACC,SAAS;MACxCvB,IAAI,EAAE;QACFE,OAAO,EAAG,gCAA+B;QACzCsB,GAAG,EAAEP,iBAAiB,CAACL,GAAG;QAC1Ba,GAAG,EAAE,MAAMnB,kBAAQ,CAACoB,eAAe,CAACT,iBAAiB,CAACL,GAAG;MAC7D;IACJ,CAAC,CAAC;;IAEF;IACA,MAAMe,kBAAkB,GAAGlB,WAAW,CAACI,GAAG,CAACW,GAAG,IAAIlB,kBAAQ,CAACsB,YAAY,CAACJ,GAAG,CAAC,CAAC;IAC7E,MAAMK,OAAO,CAACC,GAAG,CAACH,kBAAkB,CAAC;IACrCpC,GAAG,CAAE,wBAAuBoC,kBAAkB,CAACI,MAAO,aAAY,CAAC;EACvE,CAAC,CAAC,OAAOC,CAAC,EAAE;IACRzC,GAAG,CAAC,mCAAmC,EAAEyC,CAAC,CAAC9B,OAAO,CAAC;;IAEnD;AACR;AACA;AACA;IACQ,MAAMT,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACW,MAAM;MACrChC,KAAK,EAAE;QACHiC,IAAI,EAAEF,CAAC,CAACE,IAAI;QACZhC,OAAO,EAAE8B,CAAC,CAAC9B,OAAO;QAClBiC,IAAI,EAAE;MACV;IACJ,CAAC,CAAC;IAEF,OAAO;MACHnC,IAAI,EAAE,IAAI;MACVC,KAAK,EAAE;QACHC,OAAO,EAAE8B,CAAC,CAAC9B;MACf;IACJ,CAAC;EACL;EACA,OAAO;IACHF,IAAI,EAAE,EAAE;IACRC,KAAK,EAAE;EACX,CAAC;AACL,CAAC;AAACmC,OAAA,CAAAhD,aAAA,GAAAA,aAAA"}
1
+ {"version":3,"names":["_types","require","_s3Stream","_zipper","_mockSecurity","blocksHandler","payload","context","log","console","pageBuilder","taskId","identity","mockSecurity","task","importExportTask","getTask","data","error","message","exportBlocksDataKey","input","listObjectResponse","s3Stream","listObject","Contents","zipFileKeys","filter","file","Key","map","Boolean","zipOfZip","ZipOfZip","blockExportUpload","process","Location","updateTask","status","ImportExportTaskStatus","COMPLETED","key","url","getPresignedUrl","deleteFilePromises","deleteObject","Promise","all","length","e","FAILED","name","code","exports"],"sources":["blocksHandler.ts"],"sourcesContent":["import type { PbImportExportContext } from \"~/types\";\nimport { ImportExportTaskStatus } from \"~/types\";\nimport { s3Stream } from \"../s3Stream\";\nimport { ZipOfZip } from \"../zipper\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport type { Payload, Response } from \"~/export/combine\";\n\n/**\n * Handles the export blocks combine workflow.\n */\nexport const blocksHandler = async (\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n\n log(\"RUNNING Export Blocks Combine Handler\");\n const { pageBuilder } = context;\n const { taskId, identity } = payload;\n\n mockSecurity(identity, context);\n\n try {\n const task = await pageBuilder.importExportTask.getTask(taskId);\n if (!task) {\n return {\n data: null,\n error: {\n message: `There is no task with ID \"${taskId}\".`\n }\n };\n }\n\n const { exportBlocksDataKey } = task.input;\n\n // Get all files (zip) from given key\n const listObjectResponse = await s3Stream.listObject(exportBlocksDataKey);\n if (!listObjectResponse.Contents) {\n return {\n data: null,\n error: {\n message: \"There is no Contents defined on S3 Stream while combining blocks.\"\n }\n };\n }\n\n const zipFileKeys = listObjectResponse.Contents.filter(\n file => file.Key !== exportBlocksDataKey\n )\n .map(file => file.Key)\n .filter(Boolean) as string[];\n\n // Prepare zip of all zips\n const zipOfZip = new ZipOfZip(zipFileKeys, \"WEBINY_BLOCK_EXPORT.zip\");\n\n // Upload\n const blockExportUpload = await zipOfZip.process();\n log(`Done uploading... File is located at ${blockExportUpload.Location} `);\n\n // Update task status and save export blocks data key\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading block export.`,\n key: blockExportUpload.Key,\n url: await s3Stream.getPresignedUrl(blockExportUpload.Key)\n }\n });\n\n // Remove individual zip files from storage\n const deleteFilePromises = zipFileKeys.map(key => s3Stream.deleteObject(key));\n await Promise.all(deleteFilePromises);\n log(`Successfully deleted ${deleteFilePromises.length} zip files.`);\n } catch (e) {\n log(\"[EXPORT_BLOCKS_COMBINE] Error => \", e.message);\n\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"EXPORT_FAILED\"\n }\n });\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;AACA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,SAAA,GAAAD,OAAA;AACA,IAAAE,OAAA,GAAAF,OAAA;AACA,IAAAG,aAAA,GAAAH,OAAA;AAGA;AACA;AACA;AACO,MAAMI,aAAa,GAAG,MAAAA,CACzBC,OAAgB,EAChBC,OAA8B,KACV;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EAEvBA,GAAG,CAAC,uCAAuC,CAAC;EAC5C,MAAM;IAAEE;EAAY,CAAC,GAAGH,OAAO;EAC/B,MAAM;IAAEI,MAAM;IAAEC;EAAS,CAAC,GAAGN,OAAO;EAEpC,IAAAO,0BAAY,EAACD,QAAQ,EAAEL,OAAO,CAAC;EAE/B,IAAI;IACA,MAAMO,IAAI,GAAG,MAAMJ,WAAW,CAACK,gBAAgB,CAACC,OAAO,CAACL,MAAM,CAAC;IAC/D,IAAI,CAACG,IAAI,EAAE;MACP,OAAO;QACHG,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAE,6BAA6BR,MAAM;QAChD;MACJ,CAAC;IACL;IAEA,MAAM;MAAES;IAAoB,CAAC,GAAGN,IAAI,CAACO,KAAK;;IAE1C;IACA,MAAMC,kBAAkB,GAAG,MAAMC,kBAAQ,CAACC,UAAU,CAACJ,mBAAmB,CAAC;IACzE,IAAI,CAACE,kBAAkB,CAACG,QAAQ,EAAE;MAC9B,OAAO;QACHR,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAE;QACb;MACJ,CAAC;IACL;IAEA,MAAMO,WAAW,GAAGJ,kBAAkB,CAACG,QAAQ,CAACE,MAAM,CAClDC,IAAI,IAAIA,IAAI,CAACC,GAAG,KAAKT,mBACzB,CAAC,CACIU,GAAG,CAACF,IAAI,IAAIA,IAAI,CAACC,GAAG,CAAC,CACrBF,MAAM,CAACI,OAAO,CAAa;;IAEhC;IACA,MAAMC,QAAQ,GAAG,IAAIC,gBAAQ,CAACP,WAAW,EAAE,yBAAyB,CAAC;;IAErE;IACA,MAAMQ,iBAAiB,GAAG,MAAMF,QAAQ,CAACG,OAAO,CAAC,CAAC;IAClD3B,GAAG,CAAC,wCAAwC0B,iBAAiB,CAACE,QAAQ,GAAG,CAAC;;IAE1E;IACA,MAAM1B,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACC,SAAS;MACxCvB,IAAI,EAAE;QACFE,OAAO,EAAE,gCAAgC;QACzCsB,GAAG,EAAEP,iBAAiB,CAACL,GAAG;QAC1Ba,GAAG,EAAE,MAAMnB,kBAAQ,CAACoB,eAAe,CAACT,iBAAiB,CAACL,GAAG;MAC7D;IACJ,CAAC,CAAC;;IAEF;IACA,MAAMe,kBAAkB,GAAGlB,WAAW,CAACI,GAAG,CAACW,GAAG,IAAIlB,kBAAQ,CAACsB,YAAY,CAACJ,GAAG,CAAC,CAAC;IAC7E,MAAMK,OAAO,CAACC,GAAG,CAACH,kBAAkB,CAAC;IACrCpC,GAAG,CAAC,wBAAwBoC,kBAAkB,CAACI,MAAM,aAAa,CAAC;EACvE,CAAC,CAAC,OAAOC,CAAC,EAAE;IACRzC,GAAG,CAAC,mCAAmC,EAAEyC,CAAC,CAAC9B,OAAO,CAAC;;IAEnD;AACR;AACA;AACA;IACQ,MAAMT,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACW,MAAM;MACrChC,KAAK,EAAE;QACHiC,IAAI,EAAEF,CAAC,CAACE,IAAI;QACZhC,OAAO,EAAE8B,CAAC,CAAC9B,OAAO;QAClBiC,IAAI,EAAE;MACV;IACJ,CAAC,CAAC;IAEF,OAAO;MACHnC,IAAI,EAAE,IAAI;MACVC,KAAK,EAAE;QACHC,OAAO,EAAE8B,CAAC,CAAC9B;MACf;IACJ,CAAC;EACL;EACA,OAAO;IACHF,IAAI,EAAE,EAAE;IACRC,KAAK,EAAE;EACX,CAAC;AACL,CAAC;AAACmC,OAAA,CAAAhD,aAAA,GAAAA,aAAA","ignoreList":[]}
@@ -1,5 +1,5 @@
1
- import { PbImportExportContext } from "../../types";
2
- import { Payload, Response } from ".";
1
+ import type { PbImportExportContext } from "../../types";
2
+ import type { Payload, Response } from "./";
3
3
  /**
4
4
  * Handles the export forms combine workflow.
5
5
  */
@@ -1 +1 @@
1
- {"version":3,"names":["_types","require","_s3Stream","_zipper","_mockSecurity","formsHandler","payload","context","log","console","pageBuilder","taskId","identity","mockSecurity","task","importExportTask","getTask","data","error","message","exportFormsDataKey","input","listObjectResponse","s3Stream","listObject","Contents","zipFileKeys","filter","file","Key","map","Boolean","zipOfZip","ZipOfZip","formExportUpload","process","Location","updateTask","status","ImportExportTaskStatus","COMPLETED","key","url","getPresignedUrl","deleteFilePromises","deleteObject","Promise","all","length","e","FAILED","name","code","exports"],"sources":["formsHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { s3Stream } from \"../s3Stream\";\nimport { ZipOfZip } from \"../zipper\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { Payload, Response } from \"~/export/combine\";\n\n/**\n * Handles the export forms combine workflow.\n */\nexport const formsHandler = async (\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n\n log(\"RUNNING Export Forms Combine Handler\");\n const { pageBuilder } = context;\n const { taskId, identity } = payload;\n\n mockSecurity(identity, context);\n\n try {\n const task = await pageBuilder.importExportTask.getTask(taskId);\n if (!task) {\n return {\n data: null,\n error: {\n message: `There is no task with ID \"${taskId}\".`\n }\n };\n }\n\n const { exportFormsDataKey } = task.input;\n\n // Get all files (zip) from given key\n const listObjectResponse = await s3Stream.listObject(exportFormsDataKey);\n if (!listObjectResponse.Contents) {\n return {\n data: null,\n error: {\n message: \"There is no Contents defined on S3 Stream while combining forms.\"\n }\n };\n }\n\n const zipFileKeys = listObjectResponse.Contents.filter(\n file => file.Key !== exportFormsDataKey\n )\n .map(file => file.Key)\n .filter(Boolean) as string[];\n\n // Prepare zip of all zips\n const zipOfZip = new ZipOfZip(zipFileKeys, \"WEBINY_FORM_EXPORT.zip\");\n\n // Upload\n const formExportUpload = await zipOfZip.process();\n log(`Done uploading... File is located at ${formExportUpload.Location} `);\n\n // Update task status and save export form data key\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading form export.`,\n key: formExportUpload.Key,\n url: await s3Stream.getPresignedUrl(formExportUpload.Key)\n }\n });\n\n // Remove individual zip files from storage\n const deleteFilePromises = zipFileKeys.map(key => s3Stream.deleteObject(key));\n await Promise.all(deleteFilePromises);\n log(`Successfully deleted ${deleteFilePromises.length} zip files.`);\n } catch (e) {\n log(\"[EXPORT_FORMS_COMBINE] Error => \", e.message);\n\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"EXPORT_FAILED\"\n }\n });\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,SAAA,GAAAD,OAAA;AACA,IAAAE,OAAA,GAAAF,OAAA;AACA,IAAAG,aAAA,GAAAH,OAAA;AAGA;AACA;AACA;AACO,MAAMI,YAAY,GAAG,MAAAA,CACxBC,OAAgB,EAChBC,OAA8B,KACV;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EAEvBA,GAAG,CAAC,sCAAsC,CAAC;EAC3C,MAAM;IAAEE;EAAY,CAAC,GAAGH,OAAO;EAC/B,MAAM;IAAEI,MAAM;IAAEC;EAAS,CAAC,GAAGN,OAAO;EAEpC,IAAAO,0BAAY,EAACD,QAAQ,EAAEL,OAAO,CAAC;EAE/B,IAAI;IACA,MAAMO,IAAI,GAAG,MAAMJ,WAAW,CAACK,gBAAgB,CAACC,OAAO,CAACL,MAAM,CAAC;IAC/D,IAAI,CAACG,IAAI,EAAE;MACP,OAAO;QACHG,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAG,6BAA4BR,MAAO;QACjD;MACJ,CAAC;IACL;IAEA,MAAM;MAAES;IAAmB,CAAC,GAAGN,IAAI,CAACO,KAAK;;IAEzC;IACA,MAAMC,kBAAkB,GAAG,MAAMC,kBAAQ,CAACC,UAAU,CAACJ,kBAAkB,CAAC;IACxE,IAAI,CAACE,kBAAkB,CAACG,QAAQ,EAAE;MAC9B,OAAO;QACHR,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAE;QACb;MACJ,CAAC;IACL;IAEA,MAAMO,WAAW,GAAGJ,kBAAkB,CAACG,QAAQ,CAACE,MAAM,CAClDC,IAAI,IAAIA,IAAI,CAACC,GAAG,KAAKT,kBACzB,CAAC,CACIU,GAAG,CAACF,IAAI,IAAIA,IAAI,CAACC,GAAG,CAAC,CACrBF,MAAM,CAACI,OAAO,CAAa;;IAEhC;IACA,MAAMC,QAAQ,GAAG,IAAIC,gBAAQ,CAACP,WAAW,EAAE,wBAAwB,CAAC;;IAEpE;IACA,MAAMQ,gBAAgB,GAAG,MAAMF,QAAQ,CAACG,OAAO,CAAC,CAAC;IACjD3B,GAAG,CAAE,wCAAuC0B,gBAAgB,CAACE,QAAS,GAAE,CAAC;;IAEzE;IACA,MAAM1B,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACC,SAAS;MACxCvB,IAAI,EAAE;QACFE,OAAO,EAAG,+BAA8B;QACxCsB,GAAG,EAAEP,gBAAgB,CAACL,GAAG;QACzBa,GAAG,EAAE,MAAMnB,kBAAQ,CAACoB,eAAe,CAACT,gBAAgB,CAACL,GAAG;MAC5D;IACJ,CAAC,CAAC;;IAEF;IACA,MAAMe,kBAAkB,GAAGlB,WAAW,CAACI,GAAG,CAACW,GAAG,IAAIlB,kBAAQ,CAACsB,YAAY,CAACJ,GAAG,CAAC,CAAC;IAC7E,MAAMK,OAAO,CAACC,GAAG,CAACH,kBAAkB,CAAC;IACrCpC,GAAG,CAAE,wBAAuBoC,kBAAkB,CAACI,MAAO,aAAY,CAAC;EACvE,CAAC,CAAC,OAAOC,CAAC,EAAE;IACRzC,GAAG,CAAC,kCAAkC,EAAEyC,CAAC,CAAC9B,OAAO,CAAC;;IAElD;AACR;AACA;AACA;IACQ,MAAMT,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACW,MAAM;MACrChC,KAAK,EAAE;QACHiC,IAAI,EAAEF,CAAC,CAACE,IAAI;QACZhC,OAAO,EAAE8B,CAAC,CAAC9B,OAAO;QAClBiC,IAAI,EAAE;MACV;IACJ,CAAC,CAAC;IAEF,OAAO;MACHnC,IAAI,EAAE,IAAI;MACVC,KAAK,EAAE;QACHC,OAAO,EAAE8B,CAAC,CAAC9B;MACf;IACJ,CAAC;EACL;EACA,OAAO;IACHF,IAAI,EAAE,EAAE;IACRC,KAAK,EAAE;EACX,CAAC;AACL,CAAC;AAACmC,OAAA,CAAAhD,YAAA,GAAAA,YAAA"}
1
+ {"version":3,"names":["_types","require","_s3Stream","_zipper","_mockSecurity","formsHandler","payload","context","log","console","pageBuilder","taskId","identity","mockSecurity","task","importExportTask","getTask","data","error","message","exportFormsDataKey","input","listObjectResponse","s3Stream","listObject","Contents","zipFileKeys","filter","file","Key","map","Boolean","zipOfZip","ZipOfZip","formExportUpload","process","Location","updateTask","status","ImportExportTaskStatus","COMPLETED","key","url","getPresignedUrl","deleteFilePromises","deleteObject","Promise","all","length","e","FAILED","name","code","exports"],"sources":["formsHandler.ts"],"sourcesContent":["import type { PbImportExportContext } from \"~/types\";\nimport { ImportExportTaskStatus } from \"~/types\";\nimport { s3Stream } from \"../s3Stream\";\nimport { ZipOfZip } from \"../zipper\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport type { Payload, Response } from \"~/export/combine\";\n\n/**\n * Handles the export forms combine workflow.\n */\nexport const formsHandler = async (\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n\n log(\"RUNNING Export Forms Combine Handler\");\n const { pageBuilder } = context;\n const { taskId, identity } = payload;\n\n mockSecurity(identity, context);\n\n try {\n const task = await pageBuilder.importExportTask.getTask(taskId);\n if (!task) {\n return {\n data: null,\n error: {\n message: `There is no task with ID \"${taskId}\".`\n }\n };\n }\n\n const { exportFormsDataKey } = task.input;\n\n // Get all files (zip) from given key\n const listObjectResponse = await s3Stream.listObject(exportFormsDataKey);\n if (!listObjectResponse.Contents) {\n return {\n data: null,\n error: {\n message: \"There is no Contents defined on S3 Stream while combining forms.\"\n }\n };\n }\n\n const zipFileKeys = listObjectResponse.Contents.filter(\n file => file.Key !== exportFormsDataKey\n )\n .map(file => file.Key)\n .filter(Boolean) as string[];\n\n // Prepare zip of all zips\n const zipOfZip = new ZipOfZip(zipFileKeys, \"WEBINY_FORM_EXPORT.zip\");\n\n // Upload\n const formExportUpload = await zipOfZip.process();\n log(`Done uploading... File is located at ${formExportUpload.Location} `);\n\n // Update task status and save export form data key\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading form export.`,\n key: formExportUpload.Key,\n url: await s3Stream.getPresignedUrl(formExportUpload.Key)\n }\n });\n\n // Remove individual zip files from storage\n const deleteFilePromises = zipFileKeys.map(key => s3Stream.deleteObject(key));\n await Promise.all(deleteFilePromises);\n log(`Successfully deleted ${deleteFilePromises.length} zip files.`);\n } catch (e) {\n log(\"[EXPORT_FORMS_COMBINE] Error => \", e.message);\n\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"EXPORT_FAILED\"\n }\n });\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;AACA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,SAAA,GAAAD,OAAA;AACA,IAAAE,OAAA,GAAAF,OAAA;AACA,IAAAG,aAAA,GAAAH,OAAA;AAGA;AACA;AACA;AACO,MAAMI,YAAY,GAAG,MAAAA,CACxBC,OAAgB,EAChBC,OAA8B,KACV;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EAEvBA,GAAG,CAAC,sCAAsC,CAAC;EAC3C,MAAM;IAAEE;EAAY,CAAC,GAAGH,OAAO;EAC/B,MAAM;IAAEI,MAAM;IAAEC;EAAS,CAAC,GAAGN,OAAO;EAEpC,IAAAO,0BAAY,EAACD,QAAQ,EAAEL,OAAO,CAAC;EAE/B,IAAI;IACA,MAAMO,IAAI,GAAG,MAAMJ,WAAW,CAACK,gBAAgB,CAACC,OAAO,CAACL,MAAM,CAAC;IAC/D,IAAI,CAACG,IAAI,EAAE;MACP,OAAO;QACHG,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAE,6BAA6BR,MAAM;QAChD;MACJ,CAAC;IACL;IAEA,MAAM;MAAES;IAAmB,CAAC,GAAGN,IAAI,CAACO,KAAK;;IAEzC;IACA,MAAMC,kBAAkB,GAAG,MAAMC,kBAAQ,CAACC,UAAU,CAACJ,kBAAkB,CAAC;IACxE,IAAI,CAACE,kBAAkB,CAACG,QAAQ,EAAE;MAC9B,OAAO;QACHR,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAE;QACb;MACJ,CAAC;IACL;IAEA,MAAMO,WAAW,GAAGJ,kBAAkB,CAACG,QAAQ,CAACE,MAAM,CAClDC,IAAI,IAAIA,IAAI,CAACC,GAAG,KAAKT,kBACzB,CAAC,CACIU,GAAG,CAACF,IAAI,IAAIA,IAAI,CAACC,GAAG,CAAC,CACrBF,MAAM,CAACI,OAAO,CAAa;;IAEhC;IACA,MAAMC,QAAQ,GAAG,IAAIC,gBAAQ,CAACP,WAAW,EAAE,wBAAwB,CAAC;;IAEpE;IACA,MAAMQ,gBAAgB,GAAG,MAAMF,QAAQ,CAACG,OAAO,CAAC,CAAC;IACjD3B,GAAG,CAAC,wCAAwC0B,gBAAgB,CAACE,QAAQ,GAAG,CAAC;;IAEzE;IACA,MAAM1B,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACC,SAAS;MACxCvB,IAAI,EAAE;QACFE,OAAO,EAAE,+BAA+B;QACxCsB,GAAG,EAAEP,gBAAgB,CAACL,GAAG;QACzBa,GAAG,EAAE,MAAMnB,kBAAQ,CAACoB,eAAe,CAACT,gBAAgB,CAACL,GAAG;MAC5D;IACJ,CAAC,CAAC;;IAEF;IACA,MAAMe,kBAAkB,GAAGlB,WAAW,CAACI,GAAG,CAACW,GAAG,IAAIlB,kBAAQ,CAACsB,YAAY,CAACJ,GAAG,CAAC,CAAC;IAC7E,MAAMK,OAAO,CAACC,GAAG,CAACH,kBAAkB,CAAC;IACrCpC,GAAG,CAAC,wBAAwBoC,kBAAkB,CAACI,MAAM,aAAa,CAAC;EACvE,CAAC,CAAC,OAAOC,CAAC,EAAE;IACRzC,GAAG,CAAC,kCAAkC,EAAEyC,CAAC,CAAC9B,OAAO,CAAC;;IAElD;AACR;AACA;AACA;IACQ,MAAMT,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACW,MAAM;MACrChC,KAAK,EAAE;QACHiC,IAAI,EAAEF,CAAC,CAACE,IAAI;QACZhC,OAAO,EAAE8B,CAAC,CAAC9B,OAAO;QAClBiC,IAAI,EAAE;MACV;IACJ,CAAC,CAAC;IAEF,OAAO;MACHnC,IAAI,EAAE,IAAI;MACVC,KAAK,EAAE;QACHC,OAAO,EAAE8B,CAAC,CAAC9B;MACf;IACJ,CAAC;EACL;EACA,OAAO;IACHF,IAAI,EAAE,EAAE;IACRC,KAAK,EAAE;EACX,CAAC;AACL,CAAC;AAACmC,OAAA,CAAAhD,YAAA,GAAAA,YAAA","ignoreList":[]}
@@ -1,5 +1,5 @@
1
- import { PbImportExportContext } from "../../types";
2
- import { SecurityIdentity } from "@webiny/api-security/types";
1
+ import type { PbImportExportContext } from "../../types";
2
+ import type { SecurityIdentity } from "@webiny/api-security/types";
3
3
  export interface Payload {
4
4
  taskId: string;
5
5
  type: string;
@@ -7,7 +7,6 @@ exports.default = void 0;
7
7
  var _handlerAws = require("@webiny/handler-aws");
8
8
  var _blocksHandler = require("./blocksHandler");
9
9
  var _formsHandler = require("./formsHandler");
10
- var _pagesHandler = require("./pagesHandler");
11
10
  var _templatesHandler = require("./templatesHandler");
12
11
  /**
13
12
  * Handles the export pages combine workflow.
@@ -33,7 +32,8 @@ var _default = () => {
33
32
  }
34
33
  default:
35
34
  {
36
- return (0, _pagesHandler.pagesHandler)(payload, context);
35
+ console.log("Export PB combine", JSON.stringify(payload));
36
+ throw new Error("Invalid type provided: pb combine.");
37
37
  }
38
38
  }
39
39
  });
@@ -1 +1 @@
1
- {"version":3,"names":["_handlerAws","require","_blocksHandler","_formsHandler","_pagesHandler","_templatesHandler","_default","createRawEventHandler","payload","context","security","withoutAuthorization","type","blocksHandler","formsHandler","templatesHandler","pagesHandler","exports","default"],"sources":["index.ts"],"sourcesContent":["import { PbImportExportContext } from \"~/types\";\nimport { SecurityIdentity } from \"@webiny/api-security/types\";\nimport { createRawEventHandler } from \"@webiny/handler-aws\";\nimport { blocksHandler } from \"~/export/combine/blocksHandler\";\nimport { formsHandler } from \"~/export/combine/formsHandler\";\nimport { pagesHandler } from \"~/export/combine/pagesHandler\";\nimport { templatesHandler } from \"~/export/combine/templatesHandler\";\n\nexport interface Payload {\n taskId: string;\n type: string;\n identity: SecurityIdentity;\n}\n\nexport interface Response {\n data: string | null;\n error: Partial<Error> | null;\n}\n\n/**\n * Handles the export pages combine workflow.\n */\nexport default () => {\n return createRawEventHandler<Payload, PbImportExportContext, Response>(\n async ({ payload, context }) => {\n return context.security.withoutAuthorization(() => {\n switch (payload.type) {\n case \"block\": {\n return blocksHandler(payload, context);\n }\n case \"form\": {\n return formsHandler(payload, context);\n }\n case \"template\": {\n return templatesHandler(payload, context);\n }\n default: {\n return pagesHandler(payload, context);\n }\n }\n });\n }\n );\n};\n"],"mappings":";;;;;;AAEA,IAAAA,WAAA,GAAAC,OAAA;AACA,IAAAC,cAAA,GAAAD,OAAA;AACA,IAAAE,aAAA,GAAAF,OAAA;AACA,IAAAG,aAAA,GAAAH,OAAA;AACA,IAAAI,iBAAA,GAAAJ,OAAA;AAaA;AACA;AACA;AAFA,IAAAK,QAAA,GAGeA,CAAA,KAAM;EACjB,OAAO,IAAAC,iCAAqB,EACxB,OAAO;IAAEC,OAAO;IAAEC;EAAQ,CAAC,KAAK;IAC5B,OAAOA,OAAO,CAACC,QAAQ,CAACC,oBAAoB,CAAC,MAAM;MAC/C,QAAQH,OAAO,CAACI,IAAI;QAChB,KAAK,OAAO;UAAE;YACV,OAAO,IAAAC,4BAAa,EAACL,OAAO,EAAEC,OAAO,CAAC;UAC1C;QACA,KAAK,MAAM;UAAE;YACT,OAAO,IAAAK,0BAAY,EAACN,OAAO,EAAEC,OAAO,CAAC;UACzC;QACA,KAAK,UAAU;UAAE;YACb,OAAO,IAAAM,kCAAgB,EAACP,OAAO,EAAEC,OAAO,CAAC;UAC7C;QACA;UAAS;YACL,OAAO,IAAAO,0BAAY,EAACR,OAAO,EAAEC,OAAO,CAAC;UACzC;MACJ;IACJ,CAAC,CAAC;EACN,CACJ,CAAC;AACL,CAAC;AAAAQ,OAAA,CAAAC,OAAA,GAAAZ,QAAA"}
1
+ {"version":3,"names":["_handlerAws","require","_blocksHandler","_formsHandler","_templatesHandler","_default","createRawEventHandler","payload","context","security","withoutAuthorization","type","blocksHandler","formsHandler","templatesHandler","console","log","JSON","stringify","Error","exports","default"],"sources":["index.ts"],"sourcesContent":["import type { PbImportExportContext } from \"~/types\";\nimport type { SecurityIdentity } from \"@webiny/api-security/types\";\nimport { createRawEventHandler } from \"@webiny/handler-aws\";\nimport { blocksHandler } from \"~/export/combine/blocksHandler\";\nimport { formsHandler } from \"~/export/combine/formsHandler\";\nimport { templatesHandler } from \"~/export/combine/templatesHandler\";\n\nexport interface Payload {\n taskId: string;\n type: string;\n identity: SecurityIdentity;\n}\n\nexport interface Response {\n data: string | null;\n error: Partial<Error> | null;\n}\n\n/**\n * Handles the export pages combine workflow.\n */\nexport default () => {\n return createRawEventHandler<Payload, PbImportExportContext, Response>(\n async ({ payload, context }) => {\n return context.security.withoutAuthorization(() => {\n switch (payload.type) {\n case \"block\": {\n return blocksHandler(payload, context);\n }\n case \"form\": {\n return formsHandler(payload, context);\n }\n case \"template\": {\n return templatesHandler(payload, context);\n }\n default: {\n console.log(\"Export PB combine\", JSON.stringify(payload));\n throw new Error(\"Invalid type provided: pb combine.\");\n }\n }\n });\n }\n );\n};\n"],"mappings":";;;;;;AAEA,IAAAA,WAAA,GAAAC,OAAA;AACA,IAAAC,cAAA,GAAAD,OAAA;AACA,IAAAE,aAAA,GAAAF,OAAA;AACA,IAAAG,iBAAA,GAAAH,OAAA;AAaA;AACA;AACA;AAFA,IAAAI,QAAA,GAGeA,CAAA,KAAM;EACjB,OAAO,IAAAC,iCAAqB,EACxB,OAAO;IAAEC,OAAO;IAAEC;EAAQ,CAAC,KAAK;IAC5B,OAAOA,OAAO,CAACC,QAAQ,CAACC,oBAAoB,CAAC,MAAM;MAC/C,QAAQH,OAAO,CAACI,IAAI;QAChB,KAAK,OAAO;UAAE;YACV,OAAO,IAAAC,4BAAa,EAACL,OAAO,EAAEC,OAAO,CAAC;UAC1C;QACA,KAAK,MAAM;UAAE;YACT,OAAO,IAAAK,0BAAY,EAACN,OAAO,EAAEC,OAAO,CAAC;UACzC;QACA,KAAK,UAAU;UAAE;YACb,OAAO,IAAAM,kCAAgB,EAACP,OAAO,EAAEC,OAAO,CAAC;UAC7C;QACA;UAAS;YACLO,OAAO,CAACC,GAAG,CAAC,mBAAmB,EAAEC,IAAI,CAACC,SAAS,CAACX,OAAO,CAAC,CAAC;YACzD,MAAM,IAAIY,KAAK,CAAC,oCAAoC,CAAC;UACzD;MACJ;IACJ,CAAC,CAAC;EACN,CACJ,CAAC;AACL,CAAC;AAAAC,OAAA,CAAAC,OAAA,GAAAhB,QAAA","ignoreList":[]}
@@ -1,5 +1,5 @@
1
- import { PbImportExportContext } from "../../types";
2
- import { Payload, Response } from ".";
1
+ import type { PbImportExportContext } from "../../types";
2
+ import type { Payload, Response } from "./";
3
3
  /**
4
4
  * Handles the export templates combine workflow.
5
5
  */
@@ -1 +1 @@
1
- {"version":3,"names":["_types","require","_s3Stream","_zipper","_mockSecurity","templatesHandler","payload","context","log","console","pageBuilder","taskId","identity","mockSecurity","task","importExportTask","getTask","data","error","message","exportTemplatesDataKey","input","listObjectResponse","s3Stream","listObject","Contents","zipFileKeys","filter","file","Key","map","Boolean","zipOfZip","ZipOfZip","templateExportUpload","process","Location","updateTask","status","ImportExportTaskStatus","COMPLETED","key","url","getPresignedUrl","deleteFilePromises","deleteObject","Promise","all","length","e","FAILED","name","code","exports"],"sources":["templatesHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { s3Stream } from \"../s3Stream\";\nimport { ZipOfZip } from \"../zipper\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { Payload, Response } from \"~/export/combine\";\n\n/**\n * Handles the export templates combine workflow.\n */\nexport const templatesHandler = async (\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n\n log(\"RUNNING Export Templates Combine Handler\");\n const { pageBuilder } = context;\n const { taskId, identity } = payload;\n\n mockSecurity(identity, context);\n\n try {\n const task = await pageBuilder.importExportTask.getTask(taskId);\n if (!task) {\n return {\n data: null,\n error: {\n message: `There is no task with ID \"${taskId}\".`\n }\n };\n }\n\n const { exportTemplatesDataKey } = task.input;\n\n // Get all files (zip) from given key\n const listObjectResponse = await s3Stream.listObject(exportTemplatesDataKey);\n if (!listObjectResponse.Contents) {\n return {\n data: null,\n error: {\n message: \"There is no Contents defined on S3 Stream while combining templates.\"\n }\n };\n }\n\n const zipFileKeys = listObjectResponse.Contents.filter(\n file => file.Key !== exportTemplatesDataKey\n )\n .map(file => file.Key)\n .filter(Boolean) as string[];\n\n // Prepare zip of all zips\n const zipOfZip = new ZipOfZip(zipFileKeys, \"WEBINY_TEMPLATE_EXPORT.zip\");\n\n // Upload\n const templateExportUpload = await zipOfZip.process();\n log(`Done uploading... File is located at ${templateExportUpload.Location} `);\n\n // Update task status and save export templates data key\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading template export.`,\n key: templateExportUpload.Key,\n url: await s3Stream.getPresignedUrl(templateExportUpload.Key)\n }\n });\n\n // Remove individual zip files from storage\n const deleteFilePromises = zipFileKeys.map(key => s3Stream.deleteObject(key));\n await Promise.all(deleteFilePromises);\n log(`Successfully deleted ${deleteFilePromises.length} zip files.`);\n } catch (e) {\n log(\"[EXPORT_TEMPLATES_COMBINE] Error => \", e.message);\n\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"EXPORT_FAILED\"\n }\n });\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,SAAA,GAAAD,OAAA;AACA,IAAAE,OAAA,GAAAF,OAAA;AACA,IAAAG,aAAA,GAAAH,OAAA;AAGA;AACA;AACA;AACO,MAAMI,gBAAgB,GAAG,MAAAA,CAC5BC,OAAgB,EAChBC,OAA8B,KACV;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EAEvBA,GAAG,CAAC,0CAA0C,CAAC;EAC/C,MAAM;IAAEE;EAAY,CAAC,GAAGH,OAAO;EAC/B,MAAM;IAAEI,MAAM;IAAEC;EAAS,CAAC,GAAGN,OAAO;EAEpC,IAAAO,0BAAY,EAACD,QAAQ,EAAEL,OAAO,CAAC;EAE/B,IAAI;IACA,MAAMO,IAAI,GAAG,MAAMJ,WAAW,CAACK,gBAAgB,CAACC,OAAO,CAACL,MAAM,CAAC;IAC/D,IAAI,CAACG,IAAI,EAAE;MACP,OAAO;QACHG,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAG,6BAA4BR,MAAO;QACjD;MACJ,CAAC;IACL;IAEA,MAAM;MAAES;IAAuB,CAAC,GAAGN,IAAI,CAACO,KAAK;;IAE7C;IACA,MAAMC,kBAAkB,GAAG,MAAMC,kBAAQ,CAACC,UAAU,CAACJ,sBAAsB,CAAC;IAC5E,IAAI,CAACE,kBAAkB,CAACG,QAAQ,EAAE;MAC9B,OAAO;QACHR,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAE;QACb;MACJ,CAAC;IACL;IAEA,MAAMO,WAAW,GAAGJ,kBAAkB,CAACG,QAAQ,CAACE,MAAM,CAClDC,IAAI,IAAIA,IAAI,CAACC,GAAG,KAAKT,sBACzB,CAAC,CACIU,GAAG,CAACF,IAAI,IAAIA,IAAI,CAACC,GAAG,CAAC,CACrBF,MAAM,CAACI,OAAO,CAAa;;IAEhC;IACA,MAAMC,QAAQ,GAAG,IAAIC,gBAAQ,CAACP,WAAW,EAAE,4BAA4B,CAAC;;IAExE;IACA,MAAMQ,oBAAoB,GAAG,MAAMF,QAAQ,CAACG,OAAO,CAAC,CAAC;IACrD3B,GAAG,CAAE,wCAAuC0B,oBAAoB,CAACE,QAAS,GAAE,CAAC;;IAE7E;IACA,MAAM1B,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACC,SAAS;MACxCvB,IAAI,EAAE;QACFE,OAAO,EAAG,mCAAkC;QAC5CsB,GAAG,EAAEP,oBAAoB,CAACL,GAAG;QAC7Ba,GAAG,EAAE,MAAMnB,kBAAQ,CAACoB,eAAe,CAACT,oBAAoB,CAACL,GAAG;MAChE;IACJ,CAAC,CAAC;;IAEF;IACA,MAAMe,kBAAkB,GAAGlB,WAAW,CAACI,GAAG,CAACW,GAAG,IAAIlB,kBAAQ,CAACsB,YAAY,CAACJ,GAAG,CAAC,CAAC;IAC7E,MAAMK,OAAO,CAACC,GAAG,CAACH,kBAAkB,CAAC;IACrCpC,GAAG,CAAE,wBAAuBoC,kBAAkB,CAACI,MAAO,aAAY,CAAC;EACvE,CAAC,CAAC,OAAOC,CAAC,EAAE;IACRzC,GAAG,CAAC,sCAAsC,EAAEyC,CAAC,CAAC9B,OAAO,CAAC;;IAEtD;AACR;AACA;AACA;IACQ,MAAMT,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACW,MAAM;MACrChC,KAAK,EAAE;QACHiC,IAAI,EAAEF,CAAC,CAACE,IAAI;QACZhC,OAAO,EAAE8B,CAAC,CAAC9B,OAAO;QAClBiC,IAAI,EAAE;MACV;IACJ,CAAC,CAAC;IAEF,OAAO;MACHnC,IAAI,EAAE,IAAI;MACVC,KAAK,EAAE;QACHC,OAAO,EAAE8B,CAAC,CAAC9B;MACf;IACJ,CAAC;EACL;EACA,OAAO;IACHF,IAAI,EAAE,EAAE;IACRC,KAAK,EAAE;EACX,CAAC;AACL,CAAC;AAACmC,OAAA,CAAAhD,gBAAA,GAAAA,gBAAA"}
1
+ {"version":3,"names":["_types","require","_s3Stream","_zipper","_mockSecurity","templatesHandler","payload","context","log","console","pageBuilder","taskId","identity","mockSecurity","task","importExportTask","getTask","data","error","message","exportTemplatesDataKey","input","listObjectResponse","s3Stream","listObject","Contents","zipFileKeys","filter","file","Key","map","Boolean","zipOfZip","ZipOfZip","templateExportUpload","process","Location","updateTask","status","ImportExportTaskStatus","COMPLETED","key","url","getPresignedUrl","deleteFilePromises","deleteObject","Promise","all","length","e","FAILED","name","code","exports"],"sources":["templatesHandler.ts"],"sourcesContent":["import type { PbImportExportContext } from \"~/types\";\nimport { ImportExportTaskStatus } from \"~/types\";\nimport { s3Stream } from \"../s3Stream\";\nimport { ZipOfZip } from \"../zipper\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport type { Payload, Response } from \"~/export/combine\";\n\n/**\n * Handles the export templates combine workflow.\n */\nexport const templatesHandler = async (\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n\n log(\"RUNNING Export Templates Combine Handler\");\n const { pageBuilder } = context;\n const { taskId, identity } = payload;\n\n mockSecurity(identity, context);\n\n try {\n const task = await pageBuilder.importExportTask.getTask(taskId);\n if (!task) {\n return {\n data: null,\n error: {\n message: `There is no task with ID \"${taskId}\".`\n }\n };\n }\n\n const { exportTemplatesDataKey } = task.input;\n\n // Get all files (zip) from given key\n const listObjectResponse = await s3Stream.listObject(exportTemplatesDataKey);\n if (!listObjectResponse.Contents) {\n return {\n data: null,\n error: {\n message: \"There is no Contents defined on S3 Stream while combining templates.\"\n }\n };\n }\n\n const zipFileKeys = listObjectResponse.Contents.filter(\n file => file.Key !== exportTemplatesDataKey\n )\n .map(file => file.Key)\n .filter(Boolean) as string[];\n\n // Prepare zip of all zips\n const zipOfZip = new ZipOfZip(zipFileKeys, \"WEBINY_TEMPLATE_EXPORT.zip\");\n\n // Upload\n const templateExportUpload = await zipOfZip.process();\n log(`Done uploading... File is located at ${templateExportUpload.Location} `);\n\n // Update task status and save export templates data key\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading template export.`,\n key: templateExportUpload.Key,\n url: await s3Stream.getPresignedUrl(templateExportUpload.Key)\n }\n });\n\n // Remove individual zip files from storage\n const deleteFilePromises = zipFileKeys.map(key => s3Stream.deleteObject(key));\n await Promise.all(deleteFilePromises);\n log(`Successfully deleted ${deleteFilePromises.length} zip files.`);\n } catch (e) {\n log(\"[EXPORT_TEMPLATES_COMBINE] Error => \", e.message);\n\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"EXPORT_FAILED\"\n }\n });\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;AACA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,SAAA,GAAAD,OAAA;AACA,IAAAE,OAAA,GAAAF,OAAA;AACA,IAAAG,aAAA,GAAAH,OAAA;AAGA;AACA;AACA;AACO,MAAMI,gBAAgB,GAAG,MAAAA,CAC5BC,OAAgB,EAChBC,OAA8B,KACV;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EAEvBA,GAAG,CAAC,0CAA0C,CAAC;EAC/C,MAAM;IAAEE;EAAY,CAAC,GAAGH,OAAO;EAC/B,MAAM;IAAEI,MAAM;IAAEC;EAAS,CAAC,GAAGN,OAAO;EAEpC,IAAAO,0BAAY,EAACD,QAAQ,EAAEL,OAAO,CAAC;EAE/B,IAAI;IACA,MAAMO,IAAI,GAAG,MAAMJ,WAAW,CAACK,gBAAgB,CAACC,OAAO,CAACL,MAAM,CAAC;IAC/D,IAAI,CAACG,IAAI,EAAE;MACP,OAAO;QACHG,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAE,6BAA6BR,MAAM;QAChD;MACJ,CAAC;IACL;IAEA,MAAM;MAAES;IAAuB,CAAC,GAAGN,IAAI,CAACO,KAAK;;IAE7C;IACA,MAAMC,kBAAkB,GAAG,MAAMC,kBAAQ,CAACC,UAAU,CAACJ,sBAAsB,CAAC;IAC5E,IAAI,CAACE,kBAAkB,CAACG,QAAQ,EAAE;MAC9B,OAAO;QACHR,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAE;QACb;MACJ,CAAC;IACL;IAEA,MAAMO,WAAW,GAAGJ,kBAAkB,CAACG,QAAQ,CAACE,MAAM,CAClDC,IAAI,IAAIA,IAAI,CAACC,GAAG,KAAKT,sBACzB,CAAC,CACIU,GAAG,CAACF,IAAI,IAAIA,IAAI,CAACC,GAAG,CAAC,CACrBF,MAAM,CAACI,OAAO,CAAa;;IAEhC;IACA,MAAMC,QAAQ,GAAG,IAAIC,gBAAQ,CAACP,WAAW,EAAE,4BAA4B,CAAC;;IAExE;IACA,MAAMQ,oBAAoB,GAAG,MAAMF,QAAQ,CAACG,OAAO,CAAC,CAAC;IACrD3B,GAAG,CAAC,wCAAwC0B,oBAAoB,CAACE,QAAQ,GAAG,CAAC;;IAE7E;IACA,MAAM1B,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACC,SAAS;MACxCvB,IAAI,EAAE;QACFE,OAAO,EAAE,mCAAmC;QAC5CsB,GAAG,EAAEP,oBAAoB,CAACL,GAAG;QAC7Ba,GAAG,EAAE,MAAMnB,kBAAQ,CAACoB,eAAe,CAACT,oBAAoB,CAACL,GAAG;MAChE;IACJ,CAAC,CAAC;;IAEF;IACA,MAAMe,kBAAkB,GAAGlB,WAAW,CAACI,GAAG,CAACW,GAAG,IAAIlB,kBAAQ,CAACsB,YAAY,CAACJ,GAAG,CAAC,CAAC;IAC7E,MAAMK,OAAO,CAACC,GAAG,CAACH,kBAAkB,CAAC;IACrCpC,GAAG,CAAC,wBAAwBoC,kBAAkB,CAACI,MAAM,aAAa,CAAC;EACvE,CAAC,CAAC,OAAOC,CAAC,EAAE;IACRzC,GAAG,CAAC,sCAAsC,EAAEyC,CAAC,CAAC9B,OAAO,CAAC;;IAEtD;AACR;AACA;AACA;IACQ,MAAMT,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACW,MAAM;MACrChC,KAAK,EAAE;QACHiC,IAAI,EAAEF,CAAC,CAACE,IAAI;QACZhC,OAAO,EAAE8B,CAAC,CAAC9B,OAAO;QAClBiC,IAAI,EAAE;MACV;IACJ,CAAC,CAAC;IAEF,OAAO;MACHnC,IAAI,EAAE,IAAI;MACVC,KAAK,EAAE;QACHC,OAAO,EAAE8B,CAAC,CAAC9B;MACf;IACJ,CAAC;EACL;EACA,OAAO;IACHF,IAAI,EAAE,EAAE;IACRC,KAAK,EAAE;EACX,CAAC;AACL,CAAC;AAACmC,OAAA,CAAAhD,gBAAA,GAAAA,gBAAA","ignoreList":[]}
@@ -0,0 +1,5 @@
1
+ import type { IExportPagesCleanupTaskParams } from "./types";
2
+ import type { ITaskResponseResult } from "@webiny/tasks";
3
+ export declare class ExportPagesCleanup {
4
+ execute(params: IExportPagesCleanupTaskParams): Promise<ITaskResponseResult>;
5
+ }
@@ -0,0 +1,82 @@
1
+ "use strict";
2
+
3
+ var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
4
+ Object.defineProperty(exports, "__esModule", {
5
+ value: true
6
+ });
7
+ exports.ExportPagesCleanup = void 0;
8
+ var _types = require("./types");
9
+ var _clientS = require("@webiny/aws-sdk/client-s3");
10
+ var _chunk = _interopRequireDefault(require("lodash/chunk"));
11
+ class ExportPagesCleanup {
12
+ async execute(params) {
13
+ const {
14
+ context,
15
+ response,
16
+ store
17
+ } = params;
18
+ /**
19
+ * This task must have a parent one. If it does not, just end with error.
20
+ * This should not happen as we trigger this task from a parent one, not directly.
21
+ *
22
+ * But let's just make sure.
23
+ */
24
+ const task = store.getTask();
25
+ if (!task.parentId) {
26
+ return response.error(`Missing task "${task.id}" parent id.`);
27
+ }
28
+ const parent = await context.tasks.getTask(task.parentId);
29
+ if (!parent) {
30
+ return response.error(`Missing parent task "${task.parentId}" in the database.`);
31
+ }
32
+ /**
33
+ * We need to find all the tasks that created zip files, so we can have a list of files to delete.
34
+ */
35
+ const {
36
+ items: subTasks
37
+ } = await context.tasks.listTasks({
38
+ where: {
39
+ parentId: parent.id,
40
+ definitionId: _types.PageExportTask.ZipPages
41
+ },
42
+ limit: 10000
43
+ });
44
+ if (subTasks.length === 0) {
45
+ return response.done("No subtasks found - nothing to cleanup.");
46
+ }
47
+ const files = subTasks.reduce((collection, subTask) => {
48
+ const done = subTask.output?.done;
49
+ if (!done) {
50
+ return collection;
51
+ }
52
+ const results = Object.values(done).filter(Boolean);
53
+ collection.push(...results);
54
+ return collection;
55
+ }, []);
56
+ const s3 = (0, _clientS.createS3)({
57
+ region: process.env.AWS_REGION
58
+ });
59
+ const parentKey = parent.output?.key;
60
+ if (typeof parentKey === "string") {
61
+ await s3.deleteObject({
62
+ Bucket: process.env.S3_BUCKET,
63
+ Key: parentKey
64
+ });
65
+ }
66
+ const chunks = (0, _chunk.default)(files, 500);
67
+ for (const chunk of chunks) {
68
+ await s3.deleteObjects({
69
+ Bucket: process.env.S3_BUCKET,
70
+ Delete: {
71
+ Objects: chunk.map(Key => ({
72
+ Key
73
+ }))
74
+ }
75
+ });
76
+ }
77
+ return response.done("Done with cleanup!");
78
+ }
79
+ }
80
+ exports.ExportPagesCleanup = ExportPagesCleanup;
81
+
82
+ //# sourceMappingURL=ExportPagesCleanup.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"names":["_types","require","_clientS","_chunk","_interopRequireDefault","ExportPagesCleanup","execute","params","context","response","store","task","getTask","parentId","error","id","parent","tasks","items","subTasks","listTasks","where","definitionId","PageExportTask","ZipPages","limit","length","done","files","reduce","collection","subTask","output","results","Object","values","filter","Boolean","push","s3","createS3","region","process","env","AWS_REGION","parentKey","key","deleteObject","Bucket","S3_BUCKET","Key","chunks","lodashChunk","chunk","deleteObjects","Delete","Objects","map","exports"],"sources":["ExportPagesCleanup.ts"],"sourcesContent":["import type {\n IExportPagesCleanupTaskParams,\n IExportPagesControllerOutput,\n IExportPagesZipPagesOutput\n} from \"~/export/pages/types\";\nimport { PageExportTask } from \"~/export/pages/types\";\nimport type { ITaskResponseResult } from \"@webiny/tasks\";\nimport { createS3 } from \"@webiny/aws-sdk/client-s3\";\nimport lodashChunk from \"lodash/chunk\";\n\nexport class ExportPagesCleanup {\n public async execute(params: IExportPagesCleanupTaskParams): Promise<ITaskResponseResult> {\n const { context, response, store } = params;\n /**\n * This task must have a parent one. If it does not, just end with error.\n * This should not happen as we trigger this task from a parent one, not directly.\n *\n * But let's just make sure.\n */\n const task = store.getTask();\n if (!task.parentId) {\n return response.error(`Missing task \"${task.id}\" parent id.`);\n }\n\n const parent = await context.tasks.getTask<IExportPagesControllerOutput>(task.parentId);\n if (!parent) {\n return response.error(`Missing parent task \"${task.parentId}\" in the database.`);\n }\n /**\n * We need to find all the tasks that created zip files, so we can have a list of files to delete.\n */\n const { items: subTasks } = await context.tasks.listTasks<any, IExportPagesZipPagesOutput>({\n where: {\n parentId: parent.id,\n definitionId: PageExportTask.ZipPages\n },\n limit: 10000\n });\n if (subTasks.length === 0) {\n return response.done(\"No subtasks found - nothing to cleanup.\");\n }\n\n const files = subTasks.reduce<string[]>((collection, subTask) => {\n const done = subTask.output?.done;\n if (!done) {\n return collection;\n }\n const results = Object.values(done).filter(Boolean);\n collection.push(...results);\n return collection;\n }, []);\n\n const s3 = createS3({\n region: process.env.AWS_REGION\n });\n\n const parentKey = parent.output?.key;\n if (typeof parentKey === \"string\") {\n await s3.deleteObject({\n Bucket: process.env.S3_BUCKET,\n Key: parentKey\n });\n }\n\n const chunks = lodashChunk(files, 500);\n\n for (const chunk of chunks) {\n await s3.deleteObjects({\n Bucket: process.env.S3_BUCKET,\n Delete: {\n Objects: chunk.map(Key => ({ Key }))\n }\n });\n }\n\n return response.done(\"Done with cleanup!\");\n }\n}\n"],"mappings":";;;;;;;AAKA,IAAAA,MAAA,GAAAC,OAAA;AAEA,IAAAC,QAAA,GAAAD,OAAA;AACA,IAAAE,MAAA,GAAAC,sBAAA,CAAAH,OAAA;AAEO,MAAMI,kBAAkB,CAAC;EAC5B,MAAaC,OAAOA,CAACC,MAAqC,EAAgC;IACtF,MAAM;MAAEC,OAAO;MAAEC,QAAQ;MAAEC;IAAM,CAAC,GAAGH,MAAM;IAC3C;AACR;AACA;AACA;AACA;AACA;IACQ,MAAMI,IAAI,GAAGD,KAAK,CAACE,OAAO,CAAC,CAAC;IAC5B,IAAI,CAACD,IAAI,CAACE,QAAQ,EAAE;MAChB,OAAOJ,QAAQ,CAACK,KAAK,CAAC,iBAAiBH,IAAI,CAACI,EAAE,cAAc,CAAC;IACjE;IAEA,MAAMC,MAAM,GAAG,MAAMR,OAAO,CAACS,KAAK,CAACL,OAAO,CAA+BD,IAAI,CAACE,QAAQ,CAAC;IACvF,IAAI,CAACG,MAAM,EAAE;MACT,OAAOP,QAAQ,CAACK,KAAK,CAAC,wBAAwBH,IAAI,CAACE,QAAQ,oBAAoB,CAAC;IACpF;IACA;AACR;AACA;IACQ,MAAM;MAAEK,KAAK,EAAEC;IAAS,CAAC,GAAG,MAAMX,OAAO,CAACS,KAAK,CAACG,SAAS,CAAkC;MACvFC,KAAK,EAAE;QACHR,QAAQ,EAAEG,MAAM,CAACD,EAAE;QACnBO,YAAY,EAAEC,qBAAc,CAACC;MACjC,CAAC;MACDC,KAAK,EAAE;IACX,CAAC,CAAC;IACF,IAAIN,QAAQ,CAACO,MAAM,KAAK,CAAC,EAAE;MACvB,OAAOjB,QAAQ,CAACkB,IAAI,CAAC,yCAAyC,CAAC;IACnE;IAEA,MAAMC,KAAK,GAAGT,QAAQ,CAACU,MAAM,CAAW,CAACC,UAAU,EAAEC,OAAO,KAAK;MAC7D,MAAMJ,IAAI,GAAGI,OAAO,CAACC,MAAM,EAAEL,IAAI;MACjC,IAAI,CAACA,IAAI,EAAE;QACP,OAAOG,UAAU;MACrB;MACA,MAAMG,OAAO,GAAGC,MAAM,CAACC,MAAM,CAACR,IAAI,CAAC,CAACS,MAAM,CAACC,OAAO,CAAC;MACnDP,UAAU,CAACQ,IAAI,CAAC,GAAGL,OAAO,CAAC;MAC3B,OAAOH,UAAU;IACrB,CAAC,EAAE,EAAE,CAAC;IAEN,MAAMS,EAAE,GAAG,IAAAC,iBAAQ,EAAC;MAChBC,MAAM,EAAEC,OAAO,CAACC,GAAG,CAACC;IACxB,CAAC,CAAC;IAEF,MAAMC,SAAS,GAAG7B,MAAM,CAACgB,MAAM,EAAEc,GAAG;IACpC,IAAI,OAAOD,SAAS,KAAK,QAAQ,EAAE;MAC/B,MAAMN,EAAE,CAACQ,YAAY,CAAC;QAClBC,MAAM,EAAEN,OAAO,CAACC,GAAG,CAACM,SAAS;QAC7BC,GAAG,EAAEL;MACT,CAAC,CAAC;IACN;IAEA,MAAMM,MAAM,GAAG,IAAAC,cAAW,EAACxB,KAAK,EAAE,GAAG,CAAC;IAEtC,KAAK,MAAMyB,KAAK,IAAIF,MAAM,EAAE;MACxB,MAAMZ,EAAE,CAACe,aAAa,CAAC;QACnBN,MAAM,EAAEN,OAAO,CAACC,GAAG,CAACM,SAAS;QAC7BM,MAAM,EAAE;UACJC,OAAO,EAAEH,KAAK,CAACI,GAAG,CAACP,GAAG,KAAK;YAAEA;UAAI,CAAC,CAAC;QACvC;MACJ,CAAC,CAAC;IACN;IAEA,OAAOzC,QAAQ,CAACkB,IAAI,CAAC,oBAAoB,CAAC;EAC9C;AACJ;AAAC+B,OAAA,CAAArD,kBAAA,GAAAA,kBAAA","ignoreList":[]}
@@ -0,0 +1,5 @@
1
+ import type { IExportPagesControllerTaskParams } from "./types";
2
+ import type { ITaskResponseResult } from "@webiny/tasks";
3
+ export declare class ExportPagesController {
4
+ execute(params: IExportPagesControllerTaskParams): Promise<ITaskResponseResult>;
5
+ }
@@ -0,0 +1,31 @@
1
+ "use strict";
2
+
3
+ Object.defineProperty(exports, "__esModule", {
4
+ value: true
5
+ });
6
+ exports.ExportPagesController = void 0;
7
+ var _ProcessZipPagesTasks = require("./controller/ProcessZipPagesTasks");
8
+ var _CreateZipPagesTasks = require("./controller/CreateZipPagesTasks");
9
+ class ExportPagesController {
10
+ async execute(params) {
11
+ const {
12
+ input
13
+ } = params;
14
+ /**
15
+ * In case subtasks for zipping pages are already created, we need to wait for them to finish.
16
+ * After they are done, we can combine all zip files into a single one.
17
+ */
18
+ if (input.zippingPages) {
19
+ const processZipPagesTasks = new _ProcessZipPagesTasks.ProcessZipPagesTasks();
20
+ return await processZipPagesTasks.execute(params);
21
+ }
22
+ /**
23
+ * On the first run of the task, we need to create subtasks for zipping pages in batches.
24
+ */
25
+ const createZipPagesTasks = new _CreateZipPagesTasks.CreateZipPagesTasks();
26
+ return await createZipPagesTasks.execute(params);
27
+ }
28
+ }
29
+ exports.ExportPagesController = ExportPagesController;
30
+
31
+ //# sourceMappingURL=ExportPagesController.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"names":["_ProcessZipPagesTasks","require","_CreateZipPagesTasks","ExportPagesController","execute","params","input","zippingPages","processZipPagesTasks","ProcessZipPagesTasks","createZipPagesTasks","CreateZipPagesTasks","exports"],"sources":["ExportPagesController.ts"],"sourcesContent":["import type { IExportPagesControllerTaskParams } from \"./types\";\nimport type { ITaskResponseResult } from \"@webiny/tasks\";\nimport { ProcessZipPagesTasks } from \"./controller/ProcessZipPagesTasks\";\nimport { CreateZipPagesTasks } from \"./controller/CreateZipPagesTasks\";\n\nexport class ExportPagesController {\n public async execute(params: IExportPagesControllerTaskParams): Promise<ITaskResponseResult> {\n const { input } = params;\n /**\n * In case subtasks for zipping pages are already created, we need to wait for them to finish.\n * After they are done, we can combine all zip files into a single one.\n */\n if (input.zippingPages) {\n const processZipPagesTasks = new ProcessZipPagesTasks();\n return await processZipPagesTasks.execute(params);\n }\n /**\n * On the first run of the task, we need to create subtasks for zipping pages in batches.\n */\n const createZipPagesTasks = new CreateZipPagesTasks();\n return await createZipPagesTasks.execute(params);\n }\n}\n"],"mappings":";;;;;;AAEA,IAAAA,qBAAA,GAAAC,OAAA;AACA,IAAAC,oBAAA,GAAAD,OAAA;AAEO,MAAME,qBAAqB,CAAC;EAC/B,MAAaC,OAAOA,CAACC,MAAwC,EAAgC;IACzF,MAAM;MAAEC;IAAM,CAAC,GAAGD,MAAM;IACxB;AACR;AACA;AACA;IACQ,IAAIC,KAAK,CAACC,YAAY,EAAE;MACpB,MAAMC,oBAAoB,GAAG,IAAIC,0CAAoB,CAAC,CAAC;MACvD,OAAO,MAAMD,oBAAoB,CAACJ,OAAO,CAACC,MAAM,CAAC;IACrD;IACA;AACR;AACA;IACQ,MAAMK,mBAAmB,GAAG,IAAIC,wCAAmB,CAAC,CAAC;IACrD,OAAO,MAAMD,mBAAmB,CAACN,OAAO,CAACC,MAAM,CAAC;EACpD;AACJ;AAACO,OAAA,CAAAT,qBAAA,GAAAA,qBAAA","ignoreList":[]}
@@ -0,0 +1,5 @@
1
+ import type { IExportPagesZipPagesTaskParams } from "./types";
2
+ import type { ITaskResponseResult } from "@webiny/tasks";
3
+ export declare class ExportPagesZipPages {
4
+ execute(params: IExportPagesZipPagesTaskParams): Promise<ITaskResponseResult>;
5
+ }
@@ -0,0 +1,23 @@
1
+ "use strict";
2
+
3
+ Object.defineProperty(exports, "__esModule", {
4
+ value: true
5
+ });
6
+ exports.ExportPagesZipPages = void 0;
7
+ var _ZipPages = require("./zipPages/ZipPages");
8
+ class ExportPagesZipPages {
9
+ async execute(params) {
10
+ const {
11
+ isAborted,
12
+ response
13
+ } = params;
14
+ if (isAborted()) {
15
+ return response.aborted();
16
+ }
17
+ const zipPages = new _ZipPages.ZipPages();
18
+ return await zipPages.execute(params);
19
+ }
20
+ }
21
+ exports.ExportPagesZipPages = ExportPagesZipPages;
22
+
23
+ //# sourceMappingURL=ExportPagesZipPages.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"names":["_ZipPages","require","ExportPagesZipPages","execute","params","isAborted","response","aborted","zipPages","ZipPages","exports"],"sources":["ExportPagesZipPages.ts"],"sourcesContent":["import type { IExportPagesZipPagesTaskParams } from \"~/export/pages/types\";\nimport type { ITaskResponseResult } from \"@webiny/tasks\";\nimport { ZipPages } from \"./zipPages/ZipPages\";\n\nexport class ExportPagesZipPages {\n public async execute(params: IExportPagesZipPagesTaskParams): Promise<ITaskResponseResult> {\n const { isAborted, response } = params;\n if (isAborted()) {\n return response.aborted();\n }\n\n const zipPages = new ZipPages();\n return await zipPages.execute(params);\n }\n}\n"],"mappings":";;;;;;AAEA,IAAAA,SAAA,GAAAC,OAAA;AAEO,MAAMC,mBAAmB,CAAC;EAC7B,MAAaC,OAAOA,CAACC,MAAsC,EAAgC;IACvF,MAAM;MAAEC,SAAS;MAAEC;IAAS,CAAC,GAAGF,MAAM;IACtC,IAAIC,SAAS,CAAC,CAAC,EAAE;MACb,OAAOC,QAAQ,CAACC,OAAO,CAAC,CAAC;IAC7B;IAEA,MAAMC,QAAQ,GAAG,IAAIC,kBAAQ,CAAC,CAAC;IAC/B,OAAO,MAAMD,QAAQ,CAACL,OAAO,CAACC,MAAM,CAAC;EACzC;AACJ;AAACM,OAAA,CAAAR,mBAAA,GAAAA,mBAAA","ignoreList":[]}
@@ -0,0 +1,5 @@
1
+ import type { IExportPagesCombineZippedPagesParams } from "../types";
2
+ import type { ITaskResponseResult } from "@webiny/tasks";
3
+ export declare class CombineZippedPages {
4
+ execute(params: IExportPagesCombineZippedPagesParams): Promise<ITaskResponseResult>;
5
+ }
@@ -0,0 +1,75 @@
1
+ "use strict";
2
+
3
+ var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
4
+ Object.defineProperty(exports, "__esModule", {
5
+ value: true
6
+ });
7
+ exports.CombineZippedPages = void 0;
8
+ var _s3Stream = require("../../s3Stream");
9
+ var _utils = require("../utils");
10
+ var _ZipFiles = require("../../../utils/ZipFiles");
11
+ var _uniqid = _interopRequireDefault(require("uniqid"));
12
+ class CombineZippedPages {
13
+ async execute(params) {
14
+ const {
15
+ response,
16
+ store
17
+ } = params;
18
+ /**
19
+ * We need to get all the subtasks of the PageExportTask.ZipPages type, so we can get all the zip files and combine them into one.
20
+ * Current task must have a parent for this to work.
21
+ */
22
+ const taskId = store.getTask().id;
23
+
24
+ /**
25
+ * When we have all the pages IDs and their zip files, we can continue to combine the zip files into one.
26
+ */
27
+ const exportPagesDataKey = (0, _utils.createExportPagesDataKey)(taskId);
28
+ let listObjectResponse;
29
+ try {
30
+ listObjectResponse = await _s3Stream.s3Stream.listObject(exportPagesDataKey);
31
+ if (!Array.isArray(listObjectResponse.Contents)) {
32
+ return response.error({
33
+ message: "There is no Contents defined on S3 Stream while combining pages."
34
+ });
35
+ } else if (listObjectResponse.Contents.length === 0) {
36
+ return response.done("No zip files to combine.");
37
+ }
38
+ } catch (ex) {
39
+ return response.error(ex);
40
+ }
41
+ const zipFileKeys = listObjectResponse.Contents.reduce((files, file) => {
42
+ if (!file.Key) {
43
+ return files;
44
+ } else if (file.Key === exportPagesDataKey) {
45
+ return files;
46
+ }
47
+ files.push(file.Key);
48
+ return files;
49
+ }, []);
50
+ let key;
51
+ try {
52
+ const zipOfZip = new _ZipFiles.ZipFiles();
53
+ const target = (0, _uniqid.default)("EXPORTS/", "-WEBINY_PAGE_EXPORT.zip");
54
+ const pageExportUpload = await zipOfZip.process(target, zipFileKeys);
55
+ if (!pageExportUpload?.Key) {
56
+ return response.error({
57
+ message: "There is no Key defined on pageExportUpload."
58
+ });
59
+ }
60
+ key = pageExportUpload.Key;
61
+ } catch (ex) {
62
+ console.error(`Error while combining zip files into a single zip: ${ex.message}`);
63
+ console.log(ex);
64
+ return response.error(ex);
65
+ }
66
+ const url = await _s3Stream.s3Stream.getPresignedUrl(key);
67
+ return response.done("Done combining pages.", {
68
+ key,
69
+ url
70
+ });
71
+ }
72
+ }
73
+ exports.CombineZippedPages = CombineZippedPages;
74
+
75
+ //# sourceMappingURL=CombineZippedPages.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"names":["_s3Stream","require","_utils","_ZipFiles","_uniqid","_interopRequireDefault","CombineZippedPages","execute","params","response","store","taskId","getTask","id","exportPagesDataKey","createExportPagesDataKey","listObjectResponse","s3Stream","listObject","Array","isArray","Contents","error","message","length","done","ex","zipFileKeys","reduce","files","file","Key","push","key","zipOfZip","ZipFiles","target","uniqueId","pageExportUpload","process","console","log","url","getPresignedUrl","exports"],"sources":["CombineZippedPages.ts"],"sourcesContent":["import type { IExportPagesCombineZippedPagesParams } from \"~/export/pages/types\";\nimport type { ITaskResponseResult } from \"@webiny/tasks\";\nimport type { ListObjectsOutput } from \"~/export/s3Stream\";\nimport { s3Stream } from \"~/export/s3Stream\";\nimport { createExportPagesDataKey } from \"~/export/pages/utils\";\nimport { ZipFiles } from \"~/utils/ZipFiles\";\nimport uniqueId from \"uniqid\";\n\nexport class CombineZippedPages {\n public async execute(\n params: IExportPagesCombineZippedPagesParams\n ): Promise<ITaskResponseResult> {\n const { response, store } = params;\n /**\n * We need to get all the subtasks of the PageExportTask.ZipPages type, so we can get all the zip files and combine them into one.\n * Current task must have a parent for this to work.\n */\n const taskId = store.getTask().id;\n\n /**\n * When we have all the pages IDs and their zip files, we can continue to combine the zip files into one.\n */\n const exportPagesDataKey = createExportPagesDataKey(taskId);\n\n let listObjectResponse: ListObjectsOutput;\n try {\n listObjectResponse = await s3Stream.listObject(exportPagesDataKey);\n if (!Array.isArray(listObjectResponse.Contents)) {\n return response.error({\n message: \"There is no Contents defined on S3 Stream while combining pages.\"\n });\n } else if (listObjectResponse.Contents.length === 0) {\n return response.done(\"No zip files to combine.\");\n }\n } catch (ex) {\n return response.error(ex);\n }\n\n const zipFileKeys = listObjectResponse.Contents.reduce<string[]>((files, file) => {\n if (!file.Key) {\n return files;\n } else if (file.Key === exportPagesDataKey) {\n return files;\n }\n files.push(file.Key);\n\n return files;\n }, []);\n\n let key: string;\n\n try {\n const zipOfZip = new ZipFiles();\n const target = uniqueId(\"EXPORTS/\", \"-WEBINY_PAGE_EXPORT.zip\");\n const pageExportUpload = await zipOfZip.process(target, zipFileKeys);\n\n if (!pageExportUpload?.Key) {\n return response.error({\n message: \"There is no Key defined on pageExportUpload.\"\n });\n }\n key = pageExportUpload.Key;\n } catch (ex) {\n console.error(`Error while combining zip files into a single zip: ${ex.message}`);\n console.log(ex);\n return response.error(ex);\n }\n\n const url = await s3Stream.getPresignedUrl(key);\n\n return response.done(\"Done combining pages.\", {\n key,\n url\n });\n }\n}\n"],"mappings":";;;;;;;AAGA,IAAAA,SAAA,GAAAC,OAAA;AACA,IAAAC,MAAA,GAAAD,OAAA;AACA,IAAAE,SAAA,GAAAF,OAAA;AACA,IAAAG,OAAA,GAAAC,sBAAA,CAAAJ,OAAA;AAEO,MAAMK,kBAAkB,CAAC;EAC5B,MAAaC,OAAOA,CAChBC,MAA4C,EAChB;IAC5B,MAAM;MAAEC,QAAQ;MAAEC;IAAM,CAAC,GAAGF,MAAM;IAClC;AACR;AACA;AACA;IACQ,MAAMG,MAAM,GAAGD,KAAK,CAACE,OAAO,CAAC,CAAC,CAACC,EAAE;;IAEjC;AACR;AACA;IACQ,MAAMC,kBAAkB,GAAG,IAAAC,+BAAwB,EAACJ,MAAM,CAAC;IAE3D,IAAIK,kBAAqC;IACzC,IAAI;MACAA,kBAAkB,GAAG,MAAMC,kBAAQ,CAACC,UAAU,CAACJ,kBAAkB,CAAC;MAClE,IAAI,CAACK,KAAK,CAACC,OAAO,CAACJ,kBAAkB,CAACK,QAAQ,CAAC,EAAE;QAC7C,OAAOZ,QAAQ,CAACa,KAAK,CAAC;UAClBC,OAAO,EAAE;QACb,CAAC,CAAC;MACN,CAAC,MAAM,IAAIP,kBAAkB,CAACK,QAAQ,CAACG,MAAM,KAAK,CAAC,EAAE;QACjD,OAAOf,QAAQ,CAACgB,IAAI,CAAC,0BAA0B,CAAC;MACpD;IACJ,CAAC,CAAC,OAAOC,EAAE,EAAE;MACT,OAAOjB,QAAQ,CAACa,KAAK,CAACI,EAAE,CAAC;IAC7B;IAEA,MAAMC,WAAW,GAAGX,kBAAkB,CAACK,QAAQ,CAACO,MAAM,CAAW,CAACC,KAAK,EAAEC,IAAI,KAAK;MAC9E,IAAI,CAACA,IAAI,CAACC,GAAG,EAAE;QACX,OAAOF,KAAK;MAChB,CAAC,MAAM,IAAIC,IAAI,CAACC,GAAG,KAAKjB,kBAAkB,EAAE;QACxC,OAAOe,KAAK;MAChB;MACAA,KAAK,CAACG,IAAI,CAACF,IAAI,CAACC,GAAG,CAAC;MAEpB,OAAOF,KAAK;IAChB,CAAC,EAAE,EAAE,CAAC;IAEN,IAAII,GAAW;IAEf,IAAI;MACA,MAAMC,QAAQ,GAAG,IAAIC,kBAAQ,CAAC,CAAC;MAC/B,MAAMC,MAAM,GAAG,IAAAC,eAAQ,EAAC,UAAU,EAAE,yBAAyB,CAAC;MAC9D,MAAMC,gBAAgB,GAAG,MAAMJ,QAAQ,CAACK,OAAO,CAACH,MAAM,EAAET,WAAW,CAAC;MAEpE,IAAI,CAACW,gBAAgB,EAAEP,GAAG,EAAE;QACxB,OAAOtB,QAAQ,CAACa,KAAK,CAAC;UAClBC,OAAO,EAAE;QACb,CAAC,CAAC;MACN;MACAU,GAAG,GAAGK,gBAAgB,CAACP,GAAG;IAC9B,CAAC,CAAC,OAAOL,EAAE,EAAE;MACTc,OAAO,CAAClB,KAAK,CAAC,sDAAsDI,EAAE,CAACH,OAAO,EAAE,CAAC;MACjFiB,OAAO,CAACC,GAAG,CAACf,EAAE,CAAC;MACf,OAAOjB,QAAQ,CAACa,KAAK,CAACI,EAAE,CAAC;IAC7B;IAEA,MAAMgB,GAAG,GAAG,MAAMzB,kBAAQ,CAAC0B,eAAe,CAACV,GAAG,CAAC;IAE/C,OAAOxB,QAAQ,CAACgB,IAAI,CAAC,uBAAuB,EAAE;MAC1CQ,GAAG;MACHS;IACJ,CAAC,CAAC;EACN;AACJ;AAACE,OAAA,CAAAtC,kBAAA,GAAAA,kBAAA","ignoreList":[]}
@@ -0,0 +1,8 @@
1
+ import type { IExportPagesControllerTaskParams } from "../types";
2
+ import type { ITaskResponseResult } from "@webiny/tasks/types";
3
+ /**
4
+ * Go through all the pages and create subtasks for zipping pages in batches.
5
+ */
6
+ export declare class CreateZipPagesTasks {
7
+ execute({ response, input, isAborted, isCloseToTimeout, context, store }: IExportPagesControllerTaskParams): Promise<ITaskResponseResult>;
8
+ }
@@ -0,0 +1,103 @@
1
+ "use strict";
2
+
3
+ Object.defineProperty(exports, "__esModule", {
4
+ value: true
5
+ });
6
+ exports.CreateZipPagesTasks = void 0;
7
+ var _ProcessZipPagesTasks = require("./ProcessZipPagesTasks");
8
+ var _types = require("../types");
9
+ const PAGES_IN_BATCH = 25;
10
+
11
+ /**
12
+ * Go through all the pages and create subtasks for zipping pages in batches.
13
+ */
14
+ class CreateZipPagesTasks {
15
+ async execute({
16
+ response,
17
+ input,
18
+ isAborted,
19
+ isCloseToTimeout,
20
+ context,
21
+ store
22
+ }) {
23
+ const listPagesParams = {
24
+ where: input.where,
25
+ after: input.after,
26
+ limit: PAGES_IN_BATCH
27
+ };
28
+ let currentBatch = input.currentBatch || 1;
29
+ let result;
30
+ while (result = await context.pageBuilder.listLatestPages(listPagesParams)) {
31
+ if (isAborted()) {
32
+ return response.aborted();
33
+ } else if (isCloseToTimeout()) {
34
+ return response.continue({
35
+ ...input,
36
+ ...listPagesParams,
37
+ currentBatch
38
+ });
39
+ }
40
+ const [pages, meta] = result;
41
+ listPagesParams.after = meta.cursor;
42
+ /**
43
+ * If no pages are returned there are two options:
44
+ * * mark task as done because there are no pages at all
45
+ * * continue with the control task, but in zippingPages mode
46
+ */
47
+ if (meta.totalCount === 0) {
48
+ return response.done("No pages to export.");
49
+ } else if (pages.length === 0) {
50
+ return response.continue({
51
+ ...input,
52
+ ...listPagesParams,
53
+ currentBatch,
54
+ totalPages: meta.totalCount,
55
+ zippingPages: true
56
+ }, {
57
+ seconds: _ProcessZipPagesTasks.ZIP_PAGES_WAIT_TIME
58
+ });
59
+ }
60
+ const queue = pages.map(page => page.id);
61
+ /**
62
+ * Trigger a task for each of the loaded pages batch.
63
+ */
64
+ await context.tasks.trigger({
65
+ name: `Page Builder - Export Pages - Zip Pages #${currentBatch}`,
66
+ parent: store.getTask(),
67
+ definition: _types.PageExportTask.ZipPages,
68
+ input: {
69
+ queue,
70
+ type: input.type
71
+ }
72
+ });
73
+ /**
74
+ * If there are no more pages to load, we can continue the controller task in a zippingPages mode, with some delay.
75
+ */
76
+ if (!meta.hasMoreItems || !meta.cursor) {
77
+ return response.continue({
78
+ ...input,
79
+ ...listPagesParams,
80
+ currentBatch,
81
+ totalPages: meta.totalCount,
82
+ zippingPages: true
83
+ }, {
84
+ seconds: _ProcessZipPagesTasks.ZIP_PAGES_WAIT_TIME
85
+ });
86
+ }
87
+ currentBatch++;
88
+ }
89
+ /**
90
+ * Should not be possible to exit the loop without returning a response, but let's have a continue response here just in case.
91
+ */
92
+ return response.continue({
93
+ ...input,
94
+ ...listPagesParams,
95
+ currentBatch
96
+ }, {
97
+ seconds: _ProcessZipPagesTasks.ZIP_PAGES_WAIT_TIME
98
+ });
99
+ }
100
+ }
101
+ exports.CreateZipPagesTasks = CreateZipPagesTasks;
102
+
103
+ //# sourceMappingURL=CreateZipPagesTasks.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"names":["_ProcessZipPagesTasks","require","_types","PAGES_IN_BATCH","CreateZipPagesTasks","execute","response","input","isAborted","isCloseToTimeout","context","store","listPagesParams","where","after","limit","currentBatch","result","pageBuilder","listLatestPages","aborted","continue","pages","meta","cursor","totalCount","done","length","totalPages","zippingPages","seconds","ZIP_PAGES_WAIT_TIME","queue","map","page","id","tasks","trigger","name","parent","getTask","definition","PageExportTask","ZipPages","type","hasMoreItems","exports"],"sources":["CreateZipPagesTasks.ts"],"sourcesContent":["import type { ListMeta, ListPagesParams, Page } from \"@webiny/api-page-builder/types\";\nimport { ZIP_PAGES_WAIT_TIME } from \"./ProcessZipPagesTasks\";\nimport type {\n IExportPagesControllerTaskParams,\n IExportPagesZipPagesInput\n} from \"~/export/pages/types\";\nimport { PageExportTask } from \"~/export/pages/types\";\nimport type { ITaskResponseResult } from \"@webiny/tasks/types\";\n\nconst PAGES_IN_BATCH = 25;\n\n/**\n * Go through all the pages and create subtasks for zipping pages in batches.\n */\nexport class CreateZipPagesTasks {\n public async execute({\n response,\n input,\n isAborted,\n isCloseToTimeout,\n context,\n store\n }: IExportPagesControllerTaskParams): Promise<ITaskResponseResult> {\n const listPagesParams: ListPagesParams = {\n where: input.where,\n after: input.after,\n limit: PAGES_IN_BATCH\n };\n\n let currentBatch = input.currentBatch || 1;\n let result: [Page[], ListMeta];\n while ((result = await context.pageBuilder.listLatestPages(listPagesParams))) {\n if (isAborted()) {\n return response.aborted();\n } else if (isCloseToTimeout()) {\n return response.continue({\n ...input,\n ...listPagesParams,\n currentBatch\n });\n }\n const [pages, meta] = result;\n\n listPagesParams.after = meta.cursor;\n /**\n * If no pages are returned there are two options:\n * * mark task as done because there are no pages at all\n * * continue with the control task, but in zippingPages mode\n */\n if (meta.totalCount === 0) {\n return response.done(\"No pages to export.\");\n } else if (pages.length === 0) {\n return response.continue(\n {\n ...input,\n ...listPagesParams,\n currentBatch,\n totalPages: meta.totalCount,\n zippingPages: true\n },\n {\n seconds: ZIP_PAGES_WAIT_TIME\n }\n );\n }\n\n const queue = pages.map(page => page.id);\n /**\n * Trigger a task for each of the loaded pages batch.\n */\n await context.tasks.trigger<IExportPagesZipPagesInput>({\n name: `Page Builder - Export Pages - Zip Pages #${currentBatch}`,\n parent: store.getTask(),\n definition: PageExportTask.ZipPages,\n input: {\n queue,\n type: input.type\n }\n });\n /**\n * If there are no more pages to load, we can continue the controller task in a zippingPages mode, with some delay.\n */\n if (!meta.hasMoreItems || !meta.cursor) {\n return response.continue(\n {\n ...input,\n ...listPagesParams,\n currentBatch,\n totalPages: meta.totalCount,\n zippingPages: true\n },\n {\n seconds: ZIP_PAGES_WAIT_TIME\n }\n );\n }\n currentBatch++;\n }\n /**\n * Should not be possible to exit the loop without returning a response, but let's have a continue response here just in case.\n */\n return response.continue(\n {\n ...input,\n ...listPagesParams,\n currentBatch\n },\n {\n seconds: ZIP_PAGES_WAIT_TIME\n }\n );\n }\n}\n"],"mappings":";;;;;;AACA,IAAAA,qBAAA,GAAAC,OAAA;AAKA,IAAAC,MAAA,GAAAD,OAAA;AAGA,MAAME,cAAc,GAAG,EAAE;;AAEzB;AACA;AACA;AACO,MAAMC,mBAAmB,CAAC;EAC7B,MAAaC,OAAOA,CAAC;IACjBC,QAAQ;IACRC,KAAK;IACLC,SAAS;IACTC,gBAAgB;IAChBC,OAAO;IACPC;EAC8B,CAAC,EAAgC;IAC/D,MAAMC,eAAgC,GAAG;MACrCC,KAAK,EAAEN,KAAK,CAACM,KAAK;MAClBC,KAAK,EAAEP,KAAK,CAACO,KAAK;MAClBC,KAAK,EAAEZ;IACX,CAAC;IAED,IAAIa,YAAY,GAAGT,KAAK,CAACS,YAAY,IAAI,CAAC;IAC1C,IAAIC,MAA0B;IAC9B,OAAQA,MAAM,GAAG,MAAMP,OAAO,CAACQ,WAAW,CAACC,eAAe,CAACP,eAAe,CAAC,EAAG;MAC1E,IAAIJ,SAAS,CAAC,CAAC,EAAE;QACb,OAAOF,QAAQ,CAACc,OAAO,CAAC,CAAC;MAC7B,CAAC,MAAM,IAAIX,gBAAgB,CAAC,CAAC,EAAE;QAC3B,OAAOH,QAAQ,CAACe,QAAQ,CAAC;UACrB,GAAGd,KAAK;UACR,GAAGK,eAAe;UAClBI;QACJ,CAAC,CAAC;MACN;MACA,MAAM,CAACM,KAAK,EAAEC,IAAI,CAAC,GAAGN,MAAM;MAE5BL,eAAe,CAACE,KAAK,GAAGS,IAAI,CAACC,MAAM;MACnC;AACZ;AACA;AACA;AACA;MACY,IAAID,IAAI,CAACE,UAAU,KAAK,CAAC,EAAE;QACvB,OAAOnB,QAAQ,CAACoB,IAAI,CAAC,qBAAqB,CAAC;MAC/C,CAAC,MAAM,IAAIJ,KAAK,CAACK,MAAM,KAAK,CAAC,EAAE;QAC3B,OAAOrB,QAAQ,CAACe,QAAQ,CACpB;UACI,GAAGd,KAAK;UACR,GAAGK,eAAe;UAClBI,YAAY;UACZY,UAAU,EAAEL,IAAI,CAACE,UAAU;UAC3BI,YAAY,EAAE;QAClB,CAAC,EACD;UACIC,OAAO,EAAEC;QACb,CACJ,CAAC;MACL;MAEA,MAAMC,KAAK,GAAGV,KAAK,CAACW,GAAG,CAACC,IAAI,IAAIA,IAAI,CAACC,EAAE,CAAC;MACxC;AACZ;AACA;MACY,MAAMzB,OAAO,CAAC0B,KAAK,CAACC,OAAO,CAA4B;QACnDC,IAAI,EAAE,4CAA4CtB,YAAY,EAAE;QAChEuB,MAAM,EAAE5B,KAAK,CAAC6B,OAAO,CAAC,CAAC;QACvBC,UAAU,EAAEC,qBAAc,CAACC,QAAQ;QACnCpC,KAAK,EAAE;UACHyB,KAAK;UACLY,IAAI,EAAErC,KAAK,CAACqC;QAChB;MACJ,CAAC,CAAC;MACF;AACZ;AACA;MACY,IAAI,CAACrB,IAAI,CAACsB,YAAY,IAAI,CAACtB,IAAI,CAACC,MAAM,EAAE;QACpC,OAAOlB,QAAQ,CAACe,QAAQ,CACpB;UACI,GAAGd,KAAK;UACR,GAAGK,eAAe;UAClBI,YAAY;UACZY,UAAU,EAAEL,IAAI,CAACE,UAAU;UAC3BI,YAAY,EAAE;QAClB,CAAC,EACD;UACIC,OAAO,EAAEC;QACb,CACJ,CAAC;MACL;MACAf,YAAY,EAAE;IAClB;IACA;AACR;AACA;IACQ,OAAOV,QAAQ,CAACe,QAAQ,CACpB;MACI,GAAGd,KAAK;MACR,GAAGK,eAAe;MAClBI;IACJ,CAAC,EACD;MACIc,OAAO,EAAEC;IACb,CACJ,CAAC;EACL;AACJ;AAACe,OAAA,CAAA1C,mBAAA,GAAAA,mBAAA","ignoreList":[]}
@@ -0,0 +1,6 @@
1
+ import type { ITaskResponseResult } from "@webiny/tasks";
2
+ import type { IExportPagesControllerTaskParams } from "../types";
3
+ export declare const ZIP_PAGES_WAIT_TIME = 5;
4
+ export declare class ProcessZipPagesTasks {
5
+ execute(params: IExportPagesControllerTaskParams): Promise<ITaskResponseResult>;
6
+ }