@webiny/api-page-builder-import-export 0.0.0-unstable.13771d80a8 → 0.0.0-unstable.14d8337988
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/client.d.ts +1 -1
- package/client.js +12 -21
- package/client.js.map +1 -1
- package/export/combine/blocksHandler.d.ts +2 -2
- package/export/combine/blocksHandler.js +4 -2
- package/export/combine/blocksHandler.js.map +1 -1
- package/export/combine/formsHandler.d.ts +2 -2
- package/export/combine/formsHandler.js +4 -2
- package/export/combine/formsHandler.js.map +1 -1
- package/export/combine/index.d.ts +2 -2
- package/export/combine/index.js +24 -20
- package/export/combine/index.js.map +1 -1
- package/export/combine/templatesHandler.d.ts +2 -2
- package/export/combine/templatesHandler.js +4 -2
- package/export/combine/templatesHandler.js.map +1 -1
- package/export/pages/ExportPagesCleanup.d.ts +5 -0
- package/export/pages/ExportPagesCleanup.js +82 -0
- package/export/pages/ExportPagesCleanup.js.map +1 -0
- package/export/pages/ExportPagesController.d.ts +5 -0
- package/export/pages/ExportPagesController.js +31 -0
- package/export/pages/ExportPagesController.js.map +1 -0
- package/export/pages/ExportPagesZipPages.d.ts +5 -0
- package/export/pages/ExportPagesZipPages.js +23 -0
- package/export/pages/ExportPagesZipPages.js.map +1 -0
- package/export/pages/controller/CombineZippedPages.d.ts +5 -0
- package/export/pages/controller/CombineZippedPages.js +75 -0
- package/export/pages/controller/CombineZippedPages.js.map +1 -0
- package/export/pages/controller/CreateZipPagesTasks.d.ts +8 -0
- package/export/pages/controller/CreateZipPagesTasks.js +103 -0
- package/export/pages/controller/CreateZipPagesTasks.js.map +1 -0
- package/export/pages/controller/ProcessZipPagesTasks.d.ts +6 -0
- package/export/pages/controller/ProcessZipPagesTasks.js +61 -0
- package/export/pages/controller/ProcessZipPagesTasks.js.map +1 -0
- package/export/pages/types.d.ts +53 -0
- package/export/pages/types.js +26 -0
- package/export/pages/types.js.map +1 -0
- package/export/pages/utils.d.ts +2 -0
- package/export/pages/utils.js +13 -0
- package/export/pages/utils.js.map +1 -0
- package/export/pages/zipPages/ZipPages.d.ts +5 -0
- package/export/pages/zipPages/ZipPages.js +100 -0
- package/export/pages/zipPages/ZipPages.js.map +1 -0
- package/export/pages/zipPages/ZipPagesDataManager.d.ts +14 -0
- package/export/pages/zipPages/ZipPagesDataManager.js +46 -0
- package/export/pages/zipPages/ZipPagesDataManager.js.map +1 -0
- package/export/pages/zipPages/getPageFactory.d.ts +4 -0
- package/export/pages/zipPages/getPageFactory.js +38 -0
- package/export/pages/zipPages/getPageFactory.js.map +1 -0
- package/export/process/blocksHandler.d.ts +2 -2
- package/export/process/blocksHandler.js +13 -6
- package/export/process/blocksHandler.js.map +1 -1
- package/export/process/exporters/BlockExporter.d.ts +12 -0
- package/export/process/exporters/BlockExporter.js +57 -0
- package/export/process/exporters/BlockExporter.js.map +1 -0
- package/export/process/exporters/FormExporter.d.ts +9 -0
- package/export/process/exporters/FormExporter.js +36 -0
- package/export/process/exporters/FormExporter.js.map +1 -0
- package/export/process/exporters/PageExporter.d.ts +18 -0
- package/export/process/exporters/PageExporter.js +59 -0
- package/export/process/exporters/PageExporter.js.map +1 -0
- package/export/process/exporters/PageTemplateExporter.d.ts +11 -0
- package/export/process/exporters/PageTemplateExporter.js +58 -0
- package/export/process/exporters/PageTemplateExporter.js.map +1 -0
- package/export/process/formsHandler.d.ts +2 -2
- package/export/process/formsHandler.js +8 -6
- package/export/process/formsHandler.js.map +1 -1
- package/export/process/index.d.ts +2 -2
- package/export/process/index.js +24 -20
- package/export/process/index.js.map +1 -1
- package/export/process/templatesHandler.d.ts +2 -2
- package/export/process/templatesHandler.js +8 -6
- package/export/process/templatesHandler.js.map +1 -1
- package/export/s3Stream.d.ts +12 -10
- package/export/s3Stream.js +27 -23
- package/export/s3Stream.js.map +1 -1
- package/export/utils.d.ts +10 -15
- package/export/utils.js +27 -46
- package/export/utils.js.map +1 -1
- package/export/zipper.d.ts +7 -6
- package/export/zipper.js +18 -22
- package/export/zipper.js.map +1 -1
- package/graphql/crud/blocks.crud.d.ts +1 -1
- package/graphql/crud/blocks.crud.js +49 -27
- package/graphql/crud/blocks.crud.js.map +1 -1
- package/graphql/crud/forms.crud.d.ts +1 -1
- package/graphql/crud/forms.crud.js +51 -18
- package/graphql/crud/forms.crud.js.map +1 -1
- package/graphql/crud/importExportTasks.crud.d.ts +2 -2
- package/graphql/crud/importExportTasks.crud.js +144 -111
- package/graphql/crud/importExportTasks.crud.js.map +1 -1
- package/graphql/crud/pages.crud.d.ts +1 -1
- package/graphql/crud/pages.crud.js +238 -121
- package/graphql/crud/pages.crud.js.map +1 -1
- package/graphql/crud/templates.crud.d.ts +1 -1
- package/graphql/crud/templates.crud.js +48 -16
- package/graphql/crud/templates.crud.js.map +1 -1
- package/graphql/crud.d.ts +2 -2
- package/graphql/crud.js +3 -1
- package/graphql/crud.js.map +1 -1
- package/graphql/graphql/blocks.gql.d.ts +2 -2
- package/graphql/graphql/blocks.gql.js +7 -7
- package/graphql/graphql/blocks.gql.js.map +1 -1
- package/graphql/graphql/forms.gql.d.ts +2 -2
- package/graphql/graphql/forms.gql.js +6 -6
- package/graphql/graphql/forms.gql.js.map +1 -1
- package/graphql/graphql/importExportTasks.gql.d.ts +2 -2
- package/graphql/graphql/importExportTasks.gql.js +7 -7
- package/graphql/graphql/importExportTasks.gql.js.map +1 -1
- package/graphql/graphql/pages.gql.d.ts +2 -2
- package/graphql/graphql/pages.gql.js +103 -14
- package/graphql/graphql/pages.gql.js.map +1 -1
- package/graphql/graphql/templates.gql.d.ts +2 -2
- package/graphql/graphql/templates.gql.js +6 -6
- package/graphql/graphql/templates.gql.js.map +1 -1
- package/graphql/graphql/utils/resolve.d.ts +3 -3
- package/graphql/graphql/utils/resolve.js +16 -5
- package/graphql/graphql/utils/resolve.js.map +1 -1
- package/graphql/graphql.d.ts +1 -1
- package/graphql/graphql.js +3 -2
- package/graphql/graphql.js.map +1 -1
- package/graphql/index.d.ts +2 -2
- package/graphql/index.js +5 -2
- package/graphql/index.js.map +1 -1
- package/graphql/types.d.ts +173 -33
- package/graphql/types.js +3 -1
- package/graphql/types.js.map +1 -1
- package/import/constants.js +5 -6
- package/import/constants.js.map +1 -1
- package/import/create/blocksHandler.d.ts +2 -2
- package/import/create/blocksHandler.js +3 -3
- package/import/create/blocksHandler.js.map +1 -1
- package/import/create/formsHandler.d.ts +2 -2
- package/import/create/formsHandler.js +3 -1
- package/import/create/formsHandler.js.map +1 -1
- package/import/create/index.d.ts +2 -2
- package/import/create/index.js +24 -20
- package/import/create/index.js.map +1 -1
- package/import/create/pagesHandler.d.ts +2 -2
- package/import/create/pagesHandler.js +3 -1
- package/import/create/pagesHandler.js.map +1 -1
- package/import/create/templatesHandler.d.ts +2 -2
- package/import/create/templatesHandler.js +3 -1
- package/import/create/templatesHandler.js.map +1 -1
- package/import/pages/ImportPagesController.d.ts +5 -0
- package/import/pages/ImportPagesController.js +29 -0
- package/import/pages/ImportPagesController.js.map +1 -0
- package/import/pages/ImportPagesProcessPages.d.ts +6 -0
- package/import/pages/ImportPagesProcessPages.js +112 -0
- package/import/pages/ImportPagesProcessPages.js.map +1 -0
- package/import/pages/controller/ImportPagesProcessPagesChecker.d.ts +6 -0
- package/import/pages/controller/ImportPagesProcessPagesChecker.js +40 -0
- package/import/pages/controller/ImportPagesProcessPagesChecker.js.map +1 -0
- package/import/pages/controller/ImportPagesProcessZipFile.d.ts +5 -0
- package/import/pages/controller/ImportPagesProcessZipFile.js +71 -0
- package/import/pages/controller/ImportPagesProcessZipFile.js.map +1 -0
- package/import/{process/pages → pages/process}/importPage.d.ts +3 -4
- package/import/{process/pages → pages/process}/importPage.js +8 -6
- package/import/pages/process/importPage.js.map +1 -0
- package/import/pages/types.d.ts +48 -0
- package/import/pages/types.js +20 -0
- package/import/pages/types.js.map +1 -0
- package/import/process/blocks/ElementIdsProcessor.d.ts +5 -0
- package/import/process/blocks/ElementIdsProcessor.js +26 -0
- package/import/process/blocks/ElementIdsProcessor.js.map +1 -0
- package/import/process/blocks/blocksHandler.d.ts +2 -2
- package/import/process/blocks/blocksHandler.js +6 -6
- package/import/process/blocks/blocksHandler.js.map +1 -1
- package/import/process/blocks/importBlock.d.ts +4 -4
- package/import/process/blocks/importBlock.js +38 -26
- package/import/process/blocks/importBlock.js.map +1 -1
- package/import/process/forms/formsHandler.d.ts +2 -2
- package/import/process/forms/formsHandler.js +4 -2
- package/import/process/forms/formsHandler.js.map +1 -1
- package/import/process/forms/importForm.d.ts +2 -2
- package/import/process/forms/importForm.js +6 -2
- package/import/process/forms/importForm.js.map +1 -1
- package/import/process/index.d.ts +2 -2
- package/import/process/index.js +24 -20
- package/import/process/index.js.map +1 -1
- package/import/process/templates/importTemplate.d.ts +3 -3
- package/import/process/templates/importTemplate.js +6 -2
- package/import/process/templates/importTemplate.js.map +1 -1
- package/import/process/templates/templatesHandler.d.ts +2 -2
- package/import/process/templates/templatesHandler.js +6 -2
- package/import/process/templates/templatesHandler.js.map +1 -1
- package/import/utils/deleteS3Folder.js +3 -1
- package/import/utils/deleteS3Folder.js.map +1 -1
- package/import/utils/extractAndUploadZipFileContents.d.ts +1 -1
- package/import/utils/extractAndUploadZipFileContents.js +9 -5
- package/import/utils/extractAndUploadZipFileContents.js.map +1 -1
- package/import/utils/extractZipAndUploadToS3.d.ts +1 -1
- package/import/utils/extractZipAndUploadToS3.js +3 -1
- package/import/utils/extractZipAndUploadToS3.js.map +1 -1
- package/import/utils/getFileNameWithoutExt.js +3 -1
- package/import/utils/getFileNameWithoutExt.js.map +1 -1
- package/import/utils/index.js +3 -1
- package/import/utils/index.js.map +1 -1
- package/import/utils/initialStats.js +3 -1
- package/import/utils/initialStats.js.map +1 -1
- package/import/utils/prepareDataDirMap.d.ts +1 -1
- package/import/utils/prepareDataDirMap.js +3 -1
- package/import/utils/prepareDataDirMap.js.map +1 -1
- package/import/utils/updateFilesInData.d.ts +1 -1
- package/import/utils/updateFilesInData.js +5 -2
- package/import/utils/updateFilesInData.js.map +1 -1
- package/import/utils/uploadAssets.d.ts +3 -3
- package/import/utils/uploadAssets.js +57 -23
- package/import/utils/uploadAssets.js.map +1 -1
- package/import/utils/uploadFilesFromS3.d.ts +3 -3
- package/import/utils/uploadFilesFromS3.js +4 -2
- package/import/utils/uploadFilesFromS3.js.map +1 -1
- package/mockSecurity.d.ts +1 -1
- package/mockSecurity.js +3 -2
- package/mockSecurity.js.map +1 -1
- package/package.json +37 -54
- package/tasks/common/ChildTasksCleanup.d.ts +12 -0
- package/tasks/common/ChildTasksCleanup.js +64 -0
- package/tasks/common/ChildTasksCleanup.js.map +1 -0
- package/tasks/index.d.ts +1 -0
- package/tasks/index.js +13 -0
- package/tasks/index.js.map +1 -0
- package/tasks/pages/exportPagesCleanupTask.d.ts +3 -0
- package/tasks/pages/exportPagesCleanupTask.js +36 -0
- package/tasks/pages/exportPagesCleanupTask.js.map +1 -0
- package/tasks/pages/exportPagesControllerTask.d.ts +3 -0
- package/tasks/pages/exportPagesControllerTask.js +83 -0
- package/tasks/pages/exportPagesControllerTask.js.map +1 -0
- package/tasks/pages/exportPagesZipPagesTask.d.ts +3 -0
- package/tasks/pages/exportPagesZipPagesTask.js +39 -0
- package/tasks/pages/exportPagesZipPagesTask.js.map +1 -0
- package/tasks/pages/importPagesControllerTask.d.ts +3 -0
- package/tasks/pages/importPagesControllerTask.js +39 -0
- package/tasks/pages/importPagesControllerTask.js.map +1 -0
- package/tasks/pages/importPagesProcessPageTask.d.ts +3 -0
- package/tasks/pages/importPagesProcessPageTask.js +39 -0
- package/tasks/pages/importPagesProcessPageTask.js.map +1 -0
- package/tasks/pages/index.d.ts +1 -0
- package/tasks/pages/index.js +17 -0
- package/tasks/pages/index.js.map +1 -0
- package/types.d.ts +2 -2
- package/types.js +60 -8
- package/types.js.map +1 -1
- package/utils/ZipFiles.d.ts +11 -0
- package/utils/ZipFiles.js +124 -0
- package/utils/ZipFiles.js.map +1 -0
- package/export/combine/pagesHandler.d.ts +0 -6
- package/export/combine/pagesHandler.js +0 -99
- package/export/combine/pagesHandler.js.map +0 -1
- package/export/process/pagesHandler.d.ts +0 -6
- package/export/process/pagesHandler.js +0 -189
- package/export/process/pagesHandler.js.map +0 -1
- package/import/process/blocksHandler.d.ts +0 -3
- package/import/process/blocksHandler.js +0 -175
- package/import/process/blocksHandler.js.map +0 -1
- package/import/process/pages/importPage.js.map +0 -1
- package/import/process/pages/pagesHandler.d.ts +0 -3
- package/import/process/pages/pagesHandler.js +0 -183
- package/import/process/pages/pagesHandler.js.map +0 -1
- package/import/process/pagesHandler.d.ts +0 -3
- package/import/process/pagesHandler.js +0 -180
- package/import/process/pagesHandler.js.map +0 -1
- package/import/utils.d.ts +0 -49
- package/import/utils.js +0 -641
- package/import/utils.js.map +0 -1
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["_s3Stream","require","_utils","_ZipFiles","_uniqid","_interopRequireDefault","CombineZippedPages","execute","params","response","store","taskId","getTask","id","exportPagesDataKey","createExportPagesDataKey","listObjectResponse","s3Stream","listObject","Array","isArray","Contents","error","message","length","done","ex","zipFileKeys","reduce","files","file","Key","push","key","zipOfZip","ZipFiles","target","uniqueId","pageExportUpload","process","console","log","url","getPresignedUrl","exports"],"sources":["CombineZippedPages.ts"],"sourcesContent":["import type { IExportPagesCombineZippedPagesParams } from \"~/export/pages/types\";\nimport type { ITaskResponseResult } from \"@webiny/tasks\";\nimport type { ListObjectsOutput } from \"~/export/s3Stream\";\nimport { s3Stream } from \"~/export/s3Stream\";\nimport { createExportPagesDataKey } from \"~/export/pages/utils\";\nimport { ZipFiles } from \"~/utils/ZipFiles\";\nimport uniqueId from \"uniqid\";\n\nexport class CombineZippedPages {\n public async execute(\n params: IExportPagesCombineZippedPagesParams\n ): Promise<ITaskResponseResult> {\n const { response, store } = params;\n /**\n * We need to get all the subtasks of the PageExportTask.ZipPages type, so we can get all the zip files and combine them into one.\n * Current task must have a parent for this to work.\n */\n const taskId = store.getTask().id;\n\n /**\n * When we have all the pages IDs and their zip files, we can continue to combine the zip files into one.\n */\n const exportPagesDataKey = createExportPagesDataKey(taskId);\n\n let listObjectResponse: ListObjectsOutput;\n try {\n listObjectResponse = await s3Stream.listObject(exportPagesDataKey);\n if (!Array.isArray(listObjectResponse.Contents)) {\n return response.error({\n message: \"There is no Contents defined on S3 Stream while combining pages.\"\n });\n } else if (listObjectResponse.Contents.length === 0) {\n return response.done(\"No zip files to combine.\");\n }\n } catch (ex) {\n return response.error(ex);\n }\n\n const zipFileKeys = listObjectResponse.Contents.reduce<string[]>((files, file) => {\n if (!file.Key) {\n return files;\n } else if (file.Key === exportPagesDataKey) {\n return files;\n }\n files.push(file.Key);\n\n return files;\n }, []);\n\n let key: string;\n\n try {\n const zipOfZip = new ZipFiles();\n const target = uniqueId(\"EXPORTS/\", \"-WEBINY_PAGE_EXPORT.zip\");\n const pageExportUpload = await zipOfZip.process(target, zipFileKeys);\n\n if (!pageExportUpload?.Key) {\n return response.error({\n message: \"There is no Key defined on pageExportUpload.\"\n });\n }\n key = pageExportUpload.Key;\n } catch (ex) {\n console.error(`Error while combining zip files into a single zip: ${ex.message}`);\n console.log(ex);\n return response.error(ex);\n }\n\n const url = await s3Stream.getPresignedUrl(key);\n\n return response.done(\"Done combining pages.\", {\n key,\n url\n });\n }\n}\n"],"mappings":";;;;;;;AAGA,IAAAA,SAAA,GAAAC,OAAA;AACA,IAAAC,MAAA,GAAAD,OAAA;AACA,IAAAE,SAAA,GAAAF,OAAA;AACA,IAAAG,OAAA,GAAAC,sBAAA,CAAAJ,OAAA;AAEO,MAAMK,kBAAkB,CAAC;EAC5B,MAAaC,OAAOA,CAChBC,MAA4C,EAChB;IAC5B,MAAM;MAAEC,QAAQ;MAAEC;IAAM,CAAC,GAAGF,MAAM;IAClC;AACR;AACA;AACA;IACQ,MAAMG,MAAM,GAAGD,KAAK,CAACE,OAAO,CAAC,CAAC,CAACC,EAAE;;IAEjC;AACR;AACA;IACQ,MAAMC,kBAAkB,GAAG,IAAAC,+BAAwB,EAACJ,MAAM,CAAC;IAE3D,IAAIK,kBAAqC;IACzC,IAAI;MACAA,kBAAkB,GAAG,MAAMC,kBAAQ,CAACC,UAAU,CAACJ,kBAAkB,CAAC;MAClE,IAAI,CAACK,KAAK,CAACC,OAAO,CAACJ,kBAAkB,CAACK,QAAQ,CAAC,EAAE;QAC7C,OAAOZ,QAAQ,CAACa,KAAK,CAAC;UAClBC,OAAO,EAAE;QACb,CAAC,CAAC;MACN,CAAC,MAAM,IAAIP,kBAAkB,CAACK,QAAQ,CAACG,MAAM,KAAK,CAAC,EAAE;QACjD,OAAOf,QAAQ,CAACgB,IAAI,CAAC,0BAA0B,CAAC;MACpD;IACJ,CAAC,CAAC,OAAOC,EAAE,EAAE;MACT,OAAOjB,QAAQ,CAACa,KAAK,CAACI,EAAE,CAAC;IAC7B;IAEA,MAAMC,WAAW,GAAGX,kBAAkB,CAACK,QAAQ,CAACO,MAAM,CAAW,CAACC,KAAK,EAAEC,IAAI,KAAK;MAC9E,IAAI,CAACA,IAAI,CAACC,GAAG,EAAE;QACX,OAAOF,KAAK;MAChB,CAAC,MAAM,IAAIC,IAAI,CAACC,GAAG,KAAKjB,kBAAkB,EAAE;QACxC,OAAOe,KAAK;MAChB;MACAA,KAAK,CAACG,IAAI,CAACF,IAAI,CAACC,GAAG,CAAC;MAEpB,OAAOF,KAAK;IAChB,CAAC,EAAE,EAAE,CAAC;IAEN,IAAII,GAAW;IAEf,IAAI;MACA,MAAMC,QAAQ,GAAG,IAAIC,kBAAQ,CAAC,CAAC;MAC/B,MAAMC,MAAM,GAAG,IAAAC,eAAQ,EAAC,UAAU,EAAE,yBAAyB,CAAC;MAC9D,MAAMC,gBAAgB,GAAG,MAAMJ,QAAQ,CAACK,OAAO,CAACH,MAAM,EAAET,WAAW,CAAC;MAEpE,IAAI,CAACW,gBAAgB,EAAEP,GAAG,EAAE;QACxB,OAAOtB,QAAQ,CAACa,KAAK,CAAC;UAClBC,OAAO,EAAE;QACb,CAAC,CAAC;MACN;MACAU,GAAG,GAAGK,gBAAgB,CAACP,GAAG;IAC9B,CAAC,CAAC,OAAOL,EAAE,EAAE;MACTc,OAAO,CAAClB,KAAK,CAAC,sDAAsDI,EAAE,CAACH,OAAO,EAAE,CAAC;MACjFiB,OAAO,CAACC,GAAG,CAACf,EAAE,CAAC;MACf,OAAOjB,QAAQ,CAACa,KAAK,CAACI,EAAE,CAAC;IAC7B;IAEA,MAAMgB,GAAG,GAAG,MAAMzB,kBAAQ,CAAC0B,eAAe,CAACV,GAAG,CAAC;IAE/C,OAAOxB,QAAQ,CAACgB,IAAI,CAAC,uBAAuB,EAAE;MAC1CQ,GAAG;MACHS;IACJ,CAAC,CAAC;EACN;AACJ;AAACE,OAAA,CAAAtC,kBAAA,GAAAA,kBAAA","ignoreList":[]}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import type { IExportPagesControllerTaskParams } from "../types";
|
|
2
|
+
import type { ITaskResponseResult } from "@webiny/tasks/types";
|
|
3
|
+
/**
|
|
4
|
+
* Go through all the pages and create subtasks for zipping pages in batches.
|
|
5
|
+
*/
|
|
6
|
+
export declare class CreateZipPagesTasks {
|
|
7
|
+
execute({ response, input, isAborted, isCloseToTimeout, context, store }: IExportPagesControllerTaskParams): Promise<ITaskResponseResult>;
|
|
8
|
+
}
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.CreateZipPagesTasks = void 0;
|
|
7
|
+
var _ProcessZipPagesTasks = require("./ProcessZipPagesTasks");
|
|
8
|
+
var _types = require("../types");
|
|
9
|
+
const PAGES_IN_BATCH = 25;
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Go through all the pages and create subtasks for zipping pages in batches.
|
|
13
|
+
*/
|
|
14
|
+
class CreateZipPagesTasks {
|
|
15
|
+
async execute({
|
|
16
|
+
response,
|
|
17
|
+
input,
|
|
18
|
+
isAborted,
|
|
19
|
+
isCloseToTimeout,
|
|
20
|
+
context,
|
|
21
|
+
store
|
|
22
|
+
}) {
|
|
23
|
+
const listPagesParams = {
|
|
24
|
+
where: input.where,
|
|
25
|
+
after: input.after,
|
|
26
|
+
limit: PAGES_IN_BATCH
|
|
27
|
+
};
|
|
28
|
+
let currentBatch = input.currentBatch || 1;
|
|
29
|
+
let result;
|
|
30
|
+
while (result = await context.pageBuilder.listLatestPages(listPagesParams)) {
|
|
31
|
+
if (isAborted()) {
|
|
32
|
+
return response.aborted();
|
|
33
|
+
} else if (isCloseToTimeout()) {
|
|
34
|
+
return response.continue({
|
|
35
|
+
...input,
|
|
36
|
+
...listPagesParams,
|
|
37
|
+
currentBatch
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
const [pages, meta] = result;
|
|
41
|
+
listPagesParams.after = meta.cursor;
|
|
42
|
+
/**
|
|
43
|
+
* If no pages are returned there are two options:
|
|
44
|
+
* * mark task as done because there are no pages at all
|
|
45
|
+
* * continue with the control task, but in zippingPages mode
|
|
46
|
+
*/
|
|
47
|
+
if (meta.totalCount === 0) {
|
|
48
|
+
return response.done("No pages to export.");
|
|
49
|
+
} else if (pages.length === 0) {
|
|
50
|
+
return response.continue({
|
|
51
|
+
...input,
|
|
52
|
+
...listPagesParams,
|
|
53
|
+
currentBatch,
|
|
54
|
+
totalPages: meta.totalCount,
|
|
55
|
+
zippingPages: true
|
|
56
|
+
}, {
|
|
57
|
+
seconds: _ProcessZipPagesTasks.ZIP_PAGES_WAIT_TIME
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
const queue = pages.map(page => page.id);
|
|
61
|
+
/**
|
|
62
|
+
* Trigger a task for each of the loaded pages batch.
|
|
63
|
+
*/
|
|
64
|
+
await context.tasks.trigger({
|
|
65
|
+
name: `Page Builder - Export Pages - Zip Pages #${currentBatch}`,
|
|
66
|
+
parent: store.getTask(),
|
|
67
|
+
definition: _types.PageExportTask.ZipPages,
|
|
68
|
+
input: {
|
|
69
|
+
queue,
|
|
70
|
+
type: input.type
|
|
71
|
+
}
|
|
72
|
+
});
|
|
73
|
+
/**
|
|
74
|
+
* If there are no more pages to load, we can continue the controller task in a zippingPages mode, with some delay.
|
|
75
|
+
*/
|
|
76
|
+
if (!meta.hasMoreItems || !meta.cursor) {
|
|
77
|
+
return response.continue({
|
|
78
|
+
...input,
|
|
79
|
+
...listPagesParams,
|
|
80
|
+
currentBatch,
|
|
81
|
+
totalPages: meta.totalCount,
|
|
82
|
+
zippingPages: true
|
|
83
|
+
}, {
|
|
84
|
+
seconds: _ProcessZipPagesTasks.ZIP_PAGES_WAIT_TIME
|
|
85
|
+
});
|
|
86
|
+
}
|
|
87
|
+
currentBatch++;
|
|
88
|
+
}
|
|
89
|
+
/**
|
|
90
|
+
* Should not be possible to exit the loop without returning a response, but let's have a continue response here just in case.
|
|
91
|
+
*/
|
|
92
|
+
return response.continue({
|
|
93
|
+
...input,
|
|
94
|
+
...listPagesParams,
|
|
95
|
+
currentBatch
|
|
96
|
+
}, {
|
|
97
|
+
seconds: _ProcessZipPagesTasks.ZIP_PAGES_WAIT_TIME
|
|
98
|
+
});
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
exports.CreateZipPagesTasks = CreateZipPagesTasks;
|
|
102
|
+
|
|
103
|
+
//# sourceMappingURL=CreateZipPagesTasks.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["_ProcessZipPagesTasks","require","_types","PAGES_IN_BATCH","CreateZipPagesTasks","execute","response","input","isAborted","isCloseToTimeout","context","store","listPagesParams","where","after","limit","currentBatch","result","pageBuilder","listLatestPages","aborted","continue","pages","meta","cursor","totalCount","done","length","totalPages","zippingPages","seconds","ZIP_PAGES_WAIT_TIME","queue","map","page","id","tasks","trigger","name","parent","getTask","definition","PageExportTask","ZipPages","type","hasMoreItems","exports"],"sources":["CreateZipPagesTasks.ts"],"sourcesContent":["import type { ListMeta, ListPagesParams, Page } from \"@webiny/api-page-builder/types\";\nimport { ZIP_PAGES_WAIT_TIME } from \"./ProcessZipPagesTasks\";\nimport type {\n IExportPagesControllerTaskParams,\n IExportPagesZipPagesInput\n} from \"~/export/pages/types\";\nimport { PageExportTask } from \"~/export/pages/types\";\nimport type { ITaskResponseResult } from \"@webiny/tasks/types\";\n\nconst PAGES_IN_BATCH = 25;\n\n/**\n * Go through all the pages and create subtasks for zipping pages in batches.\n */\nexport class CreateZipPagesTasks {\n public async execute({\n response,\n input,\n isAborted,\n isCloseToTimeout,\n context,\n store\n }: IExportPagesControllerTaskParams): Promise<ITaskResponseResult> {\n const listPagesParams: ListPagesParams = {\n where: input.where,\n after: input.after,\n limit: PAGES_IN_BATCH\n };\n\n let currentBatch = input.currentBatch || 1;\n let result: [Page[], ListMeta];\n while ((result = await context.pageBuilder.listLatestPages(listPagesParams))) {\n if (isAborted()) {\n return response.aborted();\n } else if (isCloseToTimeout()) {\n return response.continue({\n ...input,\n ...listPagesParams,\n currentBatch\n });\n }\n const [pages, meta] = result;\n\n listPagesParams.after = meta.cursor;\n /**\n * If no pages are returned there are two options:\n * * mark task as done because there are no pages at all\n * * continue with the control task, but in zippingPages mode\n */\n if (meta.totalCount === 0) {\n return response.done(\"No pages to export.\");\n } else if (pages.length === 0) {\n return response.continue(\n {\n ...input,\n ...listPagesParams,\n currentBatch,\n totalPages: meta.totalCount,\n zippingPages: true\n },\n {\n seconds: ZIP_PAGES_WAIT_TIME\n }\n );\n }\n\n const queue = pages.map(page => page.id);\n /**\n * Trigger a task for each of the loaded pages batch.\n */\n await context.tasks.trigger<IExportPagesZipPagesInput>({\n name: `Page Builder - Export Pages - Zip Pages #${currentBatch}`,\n parent: store.getTask(),\n definition: PageExportTask.ZipPages,\n input: {\n queue,\n type: input.type\n }\n });\n /**\n * If there are no more pages to load, we can continue the controller task in a zippingPages mode, with some delay.\n */\n if (!meta.hasMoreItems || !meta.cursor) {\n return response.continue(\n {\n ...input,\n ...listPagesParams,\n currentBatch,\n totalPages: meta.totalCount,\n zippingPages: true\n },\n {\n seconds: ZIP_PAGES_WAIT_TIME\n }\n );\n }\n currentBatch++;\n }\n /**\n * Should not be possible to exit the loop without returning a response, but let's have a continue response here just in case.\n */\n return response.continue(\n {\n ...input,\n ...listPagesParams,\n currentBatch\n },\n {\n seconds: ZIP_PAGES_WAIT_TIME\n }\n );\n }\n}\n"],"mappings":";;;;;;AACA,IAAAA,qBAAA,GAAAC,OAAA;AAKA,IAAAC,MAAA,GAAAD,OAAA;AAGA,MAAME,cAAc,GAAG,EAAE;;AAEzB;AACA;AACA;AACO,MAAMC,mBAAmB,CAAC;EAC7B,MAAaC,OAAOA,CAAC;IACjBC,QAAQ;IACRC,KAAK;IACLC,SAAS;IACTC,gBAAgB;IAChBC,OAAO;IACPC;EAC8B,CAAC,EAAgC;IAC/D,MAAMC,eAAgC,GAAG;MACrCC,KAAK,EAAEN,KAAK,CAACM,KAAK;MAClBC,KAAK,EAAEP,KAAK,CAACO,KAAK;MAClBC,KAAK,EAAEZ;IACX,CAAC;IAED,IAAIa,YAAY,GAAGT,KAAK,CAACS,YAAY,IAAI,CAAC;IAC1C,IAAIC,MAA0B;IAC9B,OAAQA,MAAM,GAAG,MAAMP,OAAO,CAACQ,WAAW,CAACC,eAAe,CAACP,eAAe,CAAC,EAAG;MAC1E,IAAIJ,SAAS,CAAC,CAAC,EAAE;QACb,OAAOF,QAAQ,CAACc,OAAO,CAAC,CAAC;MAC7B,CAAC,MAAM,IAAIX,gBAAgB,CAAC,CAAC,EAAE;QAC3B,OAAOH,QAAQ,CAACe,QAAQ,CAAC;UACrB,GAAGd,KAAK;UACR,GAAGK,eAAe;UAClBI;QACJ,CAAC,CAAC;MACN;MACA,MAAM,CAACM,KAAK,EAAEC,IAAI,CAAC,GAAGN,MAAM;MAE5BL,eAAe,CAACE,KAAK,GAAGS,IAAI,CAACC,MAAM;MACnC;AACZ;AACA;AACA;AACA;MACY,IAAID,IAAI,CAACE,UAAU,KAAK,CAAC,EAAE;QACvB,OAAOnB,QAAQ,CAACoB,IAAI,CAAC,qBAAqB,CAAC;MAC/C,CAAC,MAAM,IAAIJ,KAAK,CAACK,MAAM,KAAK,CAAC,EAAE;QAC3B,OAAOrB,QAAQ,CAACe,QAAQ,CACpB;UACI,GAAGd,KAAK;UACR,GAAGK,eAAe;UAClBI,YAAY;UACZY,UAAU,EAAEL,IAAI,CAACE,UAAU;UAC3BI,YAAY,EAAE;QAClB,CAAC,EACD;UACIC,OAAO,EAAEC;QACb,CACJ,CAAC;MACL;MAEA,MAAMC,KAAK,GAAGV,KAAK,CAACW,GAAG,CAACC,IAAI,IAAIA,IAAI,CAACC,EAAE,CAAC;MACxC;AACZ;AACA;MACY,MAAMzB,OAAO,CAAC0B,KAAK,CAACC,OAAO,CAA4B;QACnDC,IAAI,EAAE,4CAA4CtB,YAAY,EAAE;QAChEuB,MAAM,EAAE5B,KAAK,CAAC6B,OAAO,CAAC,CAAC;QACvBC,UAAU,EAAEC,qBAAc,CAACC,QAAQ;QACnCpC,KAAK,EAAE;UACHyB,KAAK;UACLY,IAAI,EAAErC,KAAK,CAACqC;QAChB;MACJ,CAAC,CAAC;MACF;AACZ;AACA;MACY,IAAI,CAACrB,IAAI,CAACsB,YAAY,IAAI,CAACtB,IAAI,CAACC,MAAM,EAAE;QACpC,OAAOlB,QAAQ,CAACe,QAAQ,CACpB;UACI,GAAGd,KAAK;UACR,GAAGK,eAAe;UAClBI,YAAY;UACZY,UAAU,EAAEL,IAAI,CAACE,UAAU;UAC3BI,YAAY,EAAE;QAClB,CAAC,EACD;UACIC,OAAO,EAAEC;QACb,CACJ,CAAC;MACL;MACAf,YAAY,EAAE;IAClB;IACA;AACR;AACA;IACQ,OAAOV,QAAQ,CAACe,QAAQ,CACpB;MACI,GAAGd,KAAK;MACR,GAAGK,eAAe;MAClBI;IACJ,CAAC,EACD;MACIc,OAAO,EAAEC;IACb,CACJ,CAAC;EACL;AACJ;AAACe,OAAA,CAAA1C,mBAAA,GAAAA,mBAAA","ignoreList":[]}
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import type { ITaskResponseResult } from "@webiny/tasks";
|
|
2
|
+
import type { IExportPagesControllerTaskParams } from "../types";
|
|
3
|
+
export declare const ZIP_PAGES_WAIT_TIME = 5;
|
|
4
|
+
export declare class ProcessZipPagesTasks {
|
|
5
|
+
execute(params: IExportPagesControllerTaskParams): Promise<ITaskResponseResult>;
|
|
6
|
+
}
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.ZIP_PAGES_WAIT_TIME = exports.ProcessZipPagesTasks = void 0;
|
|
7
|
+
var _tasks = require("@webiny/tasks");
|
|
8
|
+
var _types = require("../types");
|
|
9
|
+
var _CombineZippedPages = require("./CombineZippedPages");
|
|
10
|
+
const ZIP_PAGES_WAIT_TIME = exports.ZIP_PAGES_WAIT_TIME = 5;
|
|
11
|
+
class ProcessZipPagesTasks {
|
|
12
|
+
async execute(params) {
|
|
13
|
+
const {
|
|
14
|
+
response,
|
|
15
|
+
input,
|
|
16
|
+
isAborted,
|
|
17
|
+
isCloseToTimeout,
|
|
18
|
+
context,
|
|
19
|
+
store
|
|
20
|
+
} = params;
|
|
21
|
+
if (isAborted()) {
|
|
22
|
+
return response.aborted();
|
|
23
|
+
} else if (isCloseToTimeout()) {
|
|
24
|
+
return response.continue({
|
|
25
|
+
...input
|
|
26
|
+
});
|
|
27
|
+
}
|
|
28
|
+
/**
|
|
29
|
+
* TODO: implement subtasks and subtask management into the base tasks package.
|
|
30
|
+
*/
|
|
31
|
+
const result = await context.tasks.listTasks({
|
|
32
|
+
where: {
|
|
33
|
+
parentId: store.getTask().id,
|
|
34
|
+
definitionId: _types.PageExportTask.ZipPages,
|
|
35
|
+
taskStatus_in: [_tasks.TaskDataStatus.RUNNING, _tasks.TaskDataStatus.PENDING]
|
|
36
|
+
},
|
|
37
|
+
limit: 1
|
|
38
|
+
});
|
|
39
|
+
/**
|
|
40
|
+
* Do we still need to wait until all subtasks (Zip Pages) are done?
|
|
41
|
+
*/
|
|
42
|
+
if (result.items.length > 0) {
|
|
43
|
+
return response.continue({
|
|
44
|
+
...input
|
|
45
|
+
}, {
|
|
46
|
+
seconds: ZIP_PAGES_WAIT_TIME
|
|
47
|
+
});
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* If all subtasks (Zip Pages) are done, we can continue with zipping all zip files into a single one.
|
|
51
|
+
*/
|
|
52
|
+
const combineZippedPages = new _CombineZippedPages.CombineZippedPages();
|
|
53
|
+
return combineZippedPages.execute({
|
|
54
|
+
store,
|
|
55
|
+
response
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
exports.ProcessZipPagesTasks = ProcessZipPagesTasks;
|
|
60
|
+
|
|
61
|
+
//# sourceMappingURL=ProcessZipPagesTasks.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["_tasks","require","_types","_CombineZippedPages","ZIP_PAGES_WAIT_TIME","exports","ProcessZipPagesTasks","execute","params","response","input","isAborted","isCloseToTimeout","context","store","aborted","continue","result","tasks","listTasks","where","parentId","getTask","id","definitionId","PageExportTask","ZipPages","taskStatus_in","TaskDataStatus","RUNNING","PENDING","limit","items","length","seconds","combineZippedPages","CombineZippedPages"],"sources":["ProcessZipPagesTasks.ts"],"sourcesContent":["import type { ITaskResponseResult } from \"@webiny/tasks\";\nimport { TaskDataStatus } from \"@webiny/tasks\";\nimport type { IExportPagesControllerTaskParams } from \"../types\";\nimport { PageExportTask } from \"../types\";\nimport { CombineZippedPages } from \"~/export/pages/controller/CombineZippedPages\";\n\nexport const ZIP_PAGES_WAIT_TIME = 5;\n\nexport class ProcessZipPagesTasks {\n public async execute(params: IExportPagesControllerTaskParams): Promise<ITaskResponseResult> {\n const { response, input, isAborted, isCloseToTimeout, context, store } = params;\n\n if (isAborted()) {\n return response.aborted();\n } else if (isCloseToTimeout()) {\n return response.continue({\n ...input\n });\n }\n /**\n * TODO: implement subtasks and subtask management into the base tasks package.\n */\n const result = await context.tasks.listTasks({\n where: {\n parentId: store.getTask().id,\n definitionId: PageExportTask.ZipPages,\n taskStatus_in: [TaskDataStatus.RUNNING, TaskDataStatus.PENDING]\n },\n limit: 1\n });\n /**\n * Do we still need to wait until all subtasks (Zip Pages) are done?\n */\n if (result.items.length > 0) {\n return response.continue(\n {\n ...input\n },\n {\n seconds: ZIP_PAGES_WAIT_TIME\n }\n );\n }\n /**\n * If all subtasks (Zip Pages) are done, we can continue with zipping all zip files into a single one.\n */\n const combineZippedPages = new CombineZippedPages();\n\n return combineZippedPages.execute({\n store,\n response\n });\n }\n}\n"],"mappings":";;;;;;AACA,IAAAA,MAAA,GAAAC,OAAA;AAEA,IAAAC,MAAA,GAAAD,OAAA;AACA,IAAAE,mBAAA,GAAAF,OAAA;AAEO,MAAMG,mBAAmB,GAAAC,OAAA,CAAAD,mBAAA,GAAG,CAAC;AAE7B,MAAME,oBAAoB,CAAC;EAC9B,MAAaC,OAAOA,CAACC,MAAwC,EAAgC;IACzF,MAAM;MAAEC,QAAQ;MAAEC,KAAK;MAAEC,SAAS;MAAEC,gBAAgB;MAAEC,OAAO;MAAEC;IAAM,CAAC,GAAGN,MAAM;IAE/E,IAAIG,SAAS,CAAC,CAAC,EAAE;MACb,OAAOF,QAAQ,CAACM,OAAO,CAAC,CAAC;IAC7B,CAAC,MAAM,IAAIH,gBAAgB,CAAC,CAAC,EAAE;MAC3B,OAAOH,QAAQ,CAACO,QAAQ,CAAC;QACrB,GAAGN;MACP,CAAC,CAAC;IACN;IACA;AACR;AACA;IACQ,MAAMO,MAAM,GAAG,MAAMJ,OAAO,CAACK,KAAK,CAACC,SAAS,CAAC;MACzCC,KAAK,EAAE;QACHC,QAAQ,EAAEP,KAAK,CAACQ,OAAO,CAAC,CAAC,CAACC,EAAE;QAC5BC,YAAY,EAAEC,qBAAc,CAACC,QAAQ;QACrCC,aAAa,EAAE,CAACC,qBAAc,CAACC,OAAO,EAAED,qBAAc,CAACE,OAAO;MAClE,CAAC;MACDC,KAAK,EAAE;IACX,CAAC,CAAC;IACF;AACR;AACA;IACQ,IAAId,MAAM,CAACe,KAAK,CAACC,MAAM,GAAG,CAAC,EAAE;MACzB,OAAOxB,QAAQ,CAACO,QAAQ,CACpB;QACI,GAAGN;MACP,CAAC,EACD;QACIwB,OAAO,EAAE9B;MACb,CACJ,CAAC;IACL;IACA;AACR;AACA;IACQ,MAAM+B,kBAAkB,GAAG,IAAIC,sCAAkB,CAAC,CAAC;IAEnD,OAAOD,kBAAkB,CAAC5B,OAAO,CAAC;MAC9BO,KAAK;MACLL;IACJ,CAAC,CAAC;EACN;AACJ;AAACJ,OAAA,CAAAC,oBAAA,GAAAA,oBAAA","ignoreList":[]}
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import type { ITaskResponseDoneResultOutput, ITaskRunParams } from "@webiny/tasks";
|
|
2
|
+
import type { PbImportExportContext } from "../../types";
|
|
3
|
+
export declare enum PageExportTask {
|
|
4
|
+
Controller = "pageBuilderExportPagesController",
|
|
5
|
+
ZipPages = "pageBuilderExportPagesZipPages",
|
|
6
|
+
Cleanup = "pageBuilderExportPagesCleanup"
|
|
7
|
+
}
|
|
8
|
+
/**
|
|
9
|
+
* Controller
|
|
10
|
+
*/
|
|
11
|
+
export interface IExportPagesControllerInput {
|
|
12
|
+
type: "published" | "latest";
|
|
13
|
+
where?: Record<string, any>;
|
|
14
|
+
totalPages: number;
|
|
15
|
+
after?: string | null;
|
|
16
|
+
currentBatch?: number;
|
|
17
|
+
zippingPages?: boolean;
|
|
18
|
+
}
|
|
19
|
+
export interface IExportPagesControllerOutput extends ITaskResponseDoneResultOutput {
|
|
20
|
+
key: string;
|
|
21
|
+
url: string;
|
|
22
|
+
}
|
|
23
|
+
export type IExportPagesControllerTaskParams = ITaskRunParams<PbImportExportContext, IExportPagesControllerInput, IExportPagesControllerOutput>;
|
|
24
|
+
/**
|
|
25
|
+
* Zip Pages
|
|
26
|
+
*/
|
|
27
|
+
export interface IExportPagesZipPagesDone {
|
|
28
|
+
[pageId: string]: string;
|
|
29
|
+
}
|
|
30
|
+
export interface IExportPagesZipPagesInput {
|
|
31
|
+
type: "published" | "latest";
|
|
32
|
+
queue: string[];
|
|
33
|
+
done?: IExportPagesZipPagesDone;
|
|
34
|
+
failed?: string[];
|
|
35
|
+
}
|
|
36
|
+
export interface IExportPagesZipPagesOutput extends ITaskResponseDoneResultOutput {
|
|
37
|
+
done: IExportPagesZipPagesDone;
|
|
38
|
+
failed: string[];
|
|
39
|
+
}
|
|
40
|
+
export type IExportPagesZipPagesTaskParams = ITaskRunParams<PbImportExportContext, IExportPagesZipPagesInput, IExportPagesZipPagesOutput>;
|
|
41
|
+
/**
|
|
42
|
+
* Combine Zipped Pages
|
|
43
|
+
*/
|
|
44
|
+
export interface IExportPagesCombineZippedPagesOutput extends ITaskResponseDoneResultOutput {
|
|
45
|
+
key: string;
|
|
46
|
+
url: string;
|
|
47
|
+
}
|
|
48
|
+
export type IExportPagesCombineZippedPagesParams = Pick<ITaskRunParams<PbImportExportContext, IExportPagesControllerInput, IExportPagesCombineZippedPagesOutput>, "store" | "response">;
|
|
49
|
+
/**
|
|
50
|
+
* Cleanup Zip files
|
|
51
|
+
*/
|
|
52
|
+
export type IExportPagesCleanupInput = Record<string, boolean>;
|
|
53
|
+
export type IExportPagesCleanupTaskParams = ITaskRunParams<PbImportExportContext, IExportPagesCleanupInput>;
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.PageExportTask = void 0;
|
|
7
|
+
let PageExportTask = exports.PageExportTask = /*#__PURE__*/function (PageExportTask) {
|
|
8
|
+
PageExportTask["Controller"] = "pageBuilderExportPagesController";
|
|
9
|
+
PageExportTask["ZipPages"] = "pageBuilderExportPagesZipPages";
|
|
10
|
+
PageExportTask["Cleanup"] = "pageBuilderExportPagesCleanup";
|
|
11
|
+
return PageExportTask;
|
|
12
|
+
}({});
|
|
13
|
+
/**
|
|
14
|
+
* Controller
|
|
15
|
+
*/
|
|
16
|
+
/**
|
|
17
|
+
* Zip Pages
|
|
18
|
+
*/
|
|
19
|
+
/**
|
|
20
|
+
* Combine Zipped Pages
|
|
21
|
+
*/
|
|
22
|
+
/**
|
|
23
|
+
* Cleanup Zip files
|
|
24
|
+
*/
|
|
25
|
+
|
|
26
|
+
//# sourceMappingURL=types.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["PageExportTask","exports"],"sources":["types.ts"],"sourcesContent":["import type { ITaskResponseDoneResultOutput, ITaskRunParams } from \"@webiny/tasks\";\nimport type { PbImportExportContext } from \"~/types\";\n\nexport enum PageExportTask {\n Controller = \"pageBuilderExportPagesController\",\n ZipPages = \"pageBuilderExportPagesZipPages\",\n Cleanup = \"pageBuilderExportPagesCleanup\"\n}\n\n/**\n * Controller\n */\nexport interface IExportPagesControllerInput {\n type: \"published\" | \"latest\";\n where?: Record<string, any>;\n totalPages: number;\n after?: string | null;\n currentBatch?: number;\n zippingPages?: boolean;\n}\n\nexport interface IExportPagesControllerOutput extends ITaskResponseDoneResultOutput {\n key: string;\n url: string;\n}\n\nexport type IExportPagesControllerTaskParams = ITaskRunParams<\n PbImportExportContext,\n IExportPagesControllerInput,\n IExportPagesControllerOutput\n>;\n\n/**\n * Zip Pages\n */\nexport interface IExportPagesZipPagesDone {\n [pageId: string]: string;\n}\nexport interface IExportPagesZipPagesInput {\n type: \"published\" | \"latest\";\n queue: string[];\n done?: IExportPagesZipPagesDone;\n failed?: string[];\n}\n\nexport interface IExportPagesZipPagesOutput extends ITaskResponseDoneResultOutput {\n done: IExportPagesZipPagesDone;\n failed: string[];\n}\n\nexport type IExportPagesZipPagesTaskParams = ITaskRunParams<\n PbImportExportContext,\n IExportPagesZipPagesInput,\n IExportPagesZipPagesOutput\n>;\n\n/**\n * Combine Zipped Pages\n */\n\nexport interface IExportPagesCombineZippedPagesOutput extends ITaskResponseDoneResultOutput {\n key: string;\n url: string;\n}\n\nexport type IExportPagesCombineZippedPagesParams = Pick<\n ITaskRunParams<\n PbImportExportContext,\n IExportPagesControllerInput,\n IExportPagesCombineZippedPagesOutput\n >,\n \"store\" | \"response\"\n>;\n\n/**\n * Cleanup Zip files\n */\nexport type IExportPagesCleanupInput = Record<string, boolean>;\n\nexport type IExportPagesCleanupTaskParams = ITaskRunParams<\n PbImportExportContext,\n IExportPagesCleanupInput\n>;\n"],"mappings":";;;;;;IAGYA,cAAc,GAAAC,OAAA,CAAAD,cAAA,0BAAdA,cAAc;EAAdA,cAAc;EAAdA,cAAc;EAAdA,cAAc;EAAA,OAAdA,cAAc;AAAA;AAM1B;AACA;AACA;AAqBA;AACA;AACA;AAsBA;AACA;AACA;AAgBA;AACA;AACA","ignoreList":[]}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.createExportPagesDataKey = exports.EXPORT_PAGES_FOLDER_KEY = void 0;
|
|
7
|
+
const EXPORT_PAGES_FOLDER_KEY = exports.EXPORT_PAGES_FOLDER_KEY = "WEBINY_PB_EXPORT_PAGES";
|
|
8
|
+
const createExportPagesDataKey = taskId => {
|
|
9
|
+
return `${EXPORT_PAGES_FOLDER_KEY}/${taskId}`;
|
|
10
|
+
};
|
|
11
|
+
exports.createExportPagesDataKey = createExportPagesDataKey;
|
|
12
|
+
|
|
13
|
+
//# sourceMappingURL=utils.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["EXPORT_PAGES_FOLDER_KEY","exports","createExportPagesDataKey","taskId"],"sources":["utils.ts"],"sourcesContent":["export const EXPORT_PAGES_FOLDER_KEY = \"WEBINY_PB_EXPORT_PAGES\";\n\nexport const createExportPagesDataKey = (taskId: string) => {\n return `${EXPORT_PAGES_FOLDER_KEY}/${taskId}`;\n};\n"],"mappings":";;;;;;AAAO,MAAMA,uBAAuB,GAAAC,OAAA,CAAAD,uBAAA,GAAG,wBAAwB;AAExD,MAAME,wBAAwB,GAAIC,MAAc,IAAK;EACxD,OAAO,GAAGH,uBAAuB,IAAIG,MAAM,EAAE;AACjD,CAAC;AAACF,OAAA,CAAAC,wBAAA,GAAAA,wBAAA","ignoreList":[]}
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.ZipPages = void 0;
|
|
7
|
+
var _PageExporter = require("../../process/exporters/PageExporter");
|
|
8
|
+
var _ZipPagesDataManager = require("./ZipPagesDataManager");
|
|
9
|
+
var _getPageFactory = require("./getPageFactory");
|
|
10
|
+
var _utils = require("../utils");
|
|
11
|
+
/**
|
|
12
|
+
* We will pause the execution of the task if there is less than CLOSE_TO_TIMEOUT_SECONDS left on the Lambda execution time.
|
|
13
|
+
*/
|
|
14
|
+
const CLOSE_TO_TIMEOUT_SECONDS = 300;
|
|
15
|
+
class ZipPages {
|
|
16
|
+
async execute(params) {
|
|
17
|
+
const {
|
|
18
|
+
response,
|
|
19
|
+
input,
|
|
20
|
+
isAborted,
|
|
21
|
+
isCloseToTimeout,
|
|
22
|
+
context,
|
|
23
|
+
store
|
|
24
|
+
} = params;
|
|
25
|
+
const parentId = store.getTask().parentId;
|
|
26
|
+
if (!parentId) {
|
|
27
|
+
return response.error({
|
|
28
|
+
message: `Could not find parent task ID.`
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
const exportPageDataKey = (0, _utils.createExportPagesDataKey)(parentId);
|
|
32
|
+
const dataManager = new _ZipPagesDataManager.ZipPagesDataManager(input);
|
|
33
|
+
if (dataManager.hasMore() === false) {
|
|
34
|
+
return response.done("Task done.", {
|
|
35
|
+
done: dataManager.getDone(),
|
|
36
|
+
failed: dataManager.getFailed()
|
|
37
|
+
});
|
|
38
|
+
}
|
|
39
|
+
const getPage = (0, _getPageFactory.getPageFactory)(context, store, input.type === "published");
|
|
40
|
+
/**
|
|
41
|
+
* We will go page by page and zip them.
|
|
42
|
+
* We are using the input.queue here because we are removing page from the ZipPagesDataManager queue as it is processed.
|
|
43
|
+
*
|
|
44
|
+
*/
|
|
45
|
+
for (const pageId of input.queue) {
|
|
46
|
+
/**
|
|
47
|
+
* Check for a possibility that the task was aborted.
|
|
48
|
+
*/
|
|
49
|
+
if (isAborted()) {
|
|
50
|
+
return response.aborted();
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* We need to check if there is enough time left to finish the task.
|
|
54
|
+
*/
|
|
55
|
+
if (isCloseToTimeout(CLOSE_TO_TIMEOUT_SECONDS)) {
|
|
56
|
+
/**
|
|
57
|
+
* If there is not enough time left, we will pause the task and return the current state.
|
|
58
|
+
*/
|
|
59
|
+
return response.continue(dataManager.getInput());
|
|
60
|
+
}
|
|
61
|
+
const page = await getPage(pageId);
|
|
62
|
+
if (!page) {
|
|
63
|
+
await store.addErrorLog({
|
|
64
|
+
message: `Could not load page "${pageId}".`,
|
|
65
|
+
error: {
|
|
66
|
+
message: `Could not load page "${pageId}".`
|
|
67
|
+
}
|
|
68
|
+
});
|
|
69
|
+
dataManager.addFailed(pageId);
|
|
70
|
+
continue;
|
|
71
|
+
}
|
|
72
|
+
try {
|
|
73
|
+
const pageExporter = new _PageExporter.PageExporter(context.fileManager);
|
|
74
|
+
const pageDataZip = await pageExporter.execute(page, exportPageDataKey);
|
|
75
|
+
if (!pageDataZip.Key) {
|
|
76
|
+
throw new Error(`Failed to export page "${pageId}" into a zip file.`);
|
|
77
|
+
}
|
|
78
|
+
dataManager.addDone(pageId, pageDataZip.Key);
|
|
79
|
+
} catch (ex) {
|
|
80
|
+
const message = ex.message || `Failed to export page "${pageId}" into a zip file.`;
|
|
81
|
+
try {
|
|
82
|
+
await store.addErrorLog({
|
|
83
|
+
message,
|
|
84
|
+
error: ex
|
|
85
|
+
});
|
|
86
|
+
} catch {
|
|
87
|
+
console.error(`Failed to add error log: "${message}"`);
|
|
88
|
+
}
|
|
89
|
+
dataManager.addFailed(pageId);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
return response.done("Task done.", {
|
|
93
|
+
done: dataManager.getDone(),
|
|
94
|
+
failed: dataManager.getFailed()
|
|
95
|
+
});
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
exports.ZipPages = ZipPages;
|
|
99
|
+
|
|
100
|
+
//# sourceMappingURL=ZipPages.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["_PageExporter","require","_ZipPagesDataManager","_getPageFactory","_utils","CLOSE_TO_TIMEOUT_SECONDS","ZipPages","execute","params","response","input","isAborted","isCloseToTimeout","context","store","parentId","getTask","error","message","exportPageDataKey","createExportPagesDataKey","dataManager","ZipPagesDataManager","hasMore","done","getDone","failed","getFailed","getPage","getPageFactory","type","pageId","queue","aborted","continue","getInput","page","addErrorLog","addFailed","pageExporter","PageExporter","fileManager","pageDataZip","Key","Error","addDone","ex","console","exports"],"sources":["ZipPages.ts"],"sourcesContent":["import type { ITaskResponseResult } from \"@webiny/tasks\";\nimport type { IExportPagesZipPagesTaskParams } from \"~/export/pages/types\";\nimport { PageExporter } from \"~/export/process/exporters/PageExporter\";\nimport { ZipPagesDataManager } from \"./ZipPagesDataManager\";\nimport { getPageFactory } from \"./getPageFactory\";\nimport { createExportPagesDataKey } from \"~/export/pages/utils\";\n\n/**\n * We will pause the execution of the task if there is less than CLOSE_TO_TIMEOUT_SECONDS left on the Lambda execution time.\n */\nconst CLOSE_TO_TIMEOUT_SECONDS = 300;\n\nexport class ZipPages {\n public async execute(params: IExportPagesZipPagesTaskParams): Promise<ITaskResponseResult> {\n const { response, input, isAborted, isCloseToTimeout, context, store } = params;\n\n const parentId = store.getTask().parentId;\n if (!parentId) {\n return response.error({\n message: `Could not find parent task ID.`\n });\n }\n const exportPageDataKey = createExportPagesDataKey(parentId);\n\n const dataManager = new ZipPagesDataManager(input);\n if (dataManager.hasMore() === false) {\n return response.done(\"Task done.\", {\n done: dataManager.getDone(),\n failed: dataManager.getFailed()\n });\n }\n\n const getPage = getPageFactory(context, store, input.type === \"published\");\n /**\n * We will go page by page and zip them.\n * We are using the input.queue here because we are removing page from the ZipPagesDataManager queue as it is processed.\n *\n */\n for (const pageId of input.queue) {\n /**\n * Check for a possibility that the task was aborted.\n */\n if (isAborted()) {\n return response.aborted();\n }\n /**\n * We need to check if there is enough time left to finish the task.\n */\n if (isCloseToTimeout(CLOSE_TO_TIMEOUT_SECONDS)) {\n /**\n * If there is not enough time left, we will pause the task and return the current state.\n */\n return response.continue(dataManager.getInput());\n }\n\n const page = await getPage(pageId);\n if (!page) {\n await store.addErrorLog({\n message: `Could not load page \"${pageId}\".`,\n error: {\n message: `Could not load page \"${pageId}\".`\n }\n });\n dataManager.addFailed(pageId);\n continue;\n }\n try {\n const pageExporter = new PageExporter(context.fileManager);\n const pageDataZip = await pageExporter.execute(page, exportPageDataKey);\n if (!pageDataZip.Key) {\n throw new Error(`Failed to export page \"${pageId}\" into a zip file.`);\n }\n dataManager.addDone(pageId, pageDataZip.Key);\n } catch (ex) {\n const message = ex.message || `Failed to export page \"${pageId}\" into a zip file.`;\n try {\n await store.addErrorLog({\n message,\n error: ex\n });\n } catch {\n console.error(`Failed to add error log: \"${message}\"`);\n }\n dataManager.addFailed(pageId);\n }\n }\n\n return response.done(\"Task done.\", {\n done: dataManager.getDone(),\n failed: dataManager.getFailed()\n });\n }\n}\n"],"mappings":";;;;;;AAEA,IAAAA,aAAA,GAAAC,OAAA;AACA,IAAAC,oBAAA,GAAAD,OAAA;AACA,IAAAE,eAAA,GAAAF,OAAA;AACA,IAAAG,MAAA,GAAAH,OAAA;AAEA;AACA;AACA;AACA,MAAMI,wBAAwB,GAAG,GAAG;AAE7B,MAAMC,QAAQ,CAAC;EAClB,MAAaC,OAAOA,CAACC,MAAsC,EAAgC;IACvF,MAAM;MAAEC,QAAQ;MAAEC,KAAK;MAAEC,SAAS;MAAEC,gBAAgB;MAAEC,OAAO;MAAEC;IAAM,CAAC,GAAGN,MAAM;IAE/E,MAAMO,QAAQ,GAAGD,KAAK,CAACE,OAAO,CAAC,CAAC,CAACD,QAAQ;IACzC,IAAI,CAACA,QAAQ,EAAE;MACX,OAAON,QAAQ,CAACQ,KAAK,CAAC;QAClBC,OAAO,EAAE;MACb,CAAC,CAAC;IACN;IACA,MAAMC,iBAAiB,GAAG,IAAAC,+BAAwB,EAACL,QAAQ,CAAC;IAE5D,MAAMM,WAAW,GAAG,IAAIC,wCAAmB,CAACZ,KAAK,CAAC;IAClD,IAAIW,WAAW,CAACE,OAAO,CAAC,CAAC,KAAK,KAAK,EAAE;MACjC,OAAOd,QAAQ,CAACe,IAAI,CAAC,YAAY,EAAE;QAC/BA,IAAI,EAAEH,WAAW,CAACI,OAAO,CAAC,CAAC;QAC3BC,MAAM,EAAEL,WAAW,CAACM,SAAS,CAAC;MAClC,CAAC,CAAC;IACN;IAEA,MAAMC,OAAO,GAAG,IAAAC,8BAAc,EAAChB,OAAO,EAAEC,KAAK,EAAEJ,KAAK,CAACoB,IAAI,KAAK,WAAW,CAAC;IAC1E;AACR;AACA;AACA;AACA;IACQ,KAAK,MAAMC,MAAM,IAAIrB,KAAK,CAACsB,KAAK,EAAE;MAC9B;AACZ;AACA;MACY,IAAIrB,SAAS,CAAC,CAAC,EAAE;QACb,OAAOF,QAAQ,CAACwB,OAAO,CAAC,CAAC;MAC7B;MACA;AACZ;AACA;MACY,IAAIrB,gBAAgB,CAACP,wBAAwB,CAAC,EAAE;QAC5C;AAChB;AACA;QACgB,OAAOI,QAAQ,CAACyB,QAAQ,CAACb,WAAW,CAACc,QAAQ,CAAC,CAAC,CAAC;MACpD;MAEA,MAAMC,IAAI,GAAG,MAAMR,OAAO,CAACG,MAAM,CAAC;MAClC,IAAI,CAACK,IAAI,EAAE;QACP,MAAMtB,KAAK,CAACuB,WAAW,CAAC;UACpBnB,OAAO,EAAE,wBAAwBa,MAAM,IAAI;UAC3Cd,KAAK,EAAE;YACHC,OAAO,EAAE,wBAAwBa,MAAM;UAC3C;QACJ,CAAC,CAAC;QACFV,WAAW,CAACiB,SAAS,CAACP,MAAM,CAAC;QAC7B;MACJ;MACA,IAAI;QACA,MAAMQ,YAAY,GAAG,IAAIC,0BAAY,CAAC3B,OAAO,CAAC4B,WAAW,CAAC;QAC1D,MAAMC,WAAW,GAAG,MAAMH,YAAY,CAAChC,OAAO,CAAC6B,IAAI,EAAEjB,iBAAiB,CAAC;QACvE,IAAI,CAACuB,WAAW,CAACC,GAAG,EAAE;UAClB,MAAM,IAAIC,KAAK,CAAC,0BAA0Bb,MAAM,oBAAoB,CAAC;QACzE;QACAV,WAAW,CAACwB,OAAO,CAACd,MAAM,EAAEW,WAAW,CAACC,GAAG,CAAC;MAChD,CAAC,CAAC,OAAOG,EAAE,EAAE;QACT,MAAM5B,OAAO,GAAG4B,EAAE,CAAC5B,OAAO,IAAI,0BAA0Ba,MAAM,oBAAoB;QAClF,IAAI;UACA,MAAMjB,KAAK,CAACuB,WAAW,CAAC;YACpBnB,OAAO;YACPD,KAAK,EAAE6B;UACX,CAAC,CAAC;QACN,CAAC,CAAC,MAAM;UACJC,OAAO,CAAC9B,KAAK,CAAC,6BAA6BC,OAAO,GAAG,CAAC;QAC1D;QACAG,WAAW,CAACiB,SAAS,CAACP,MAAM,CAAC;MACjC;IACJ;IAEA,OAAOtB,QAAQ,CAACe,IAAI,CAAC,YAAY,EAAE;MAC/BA,IAAI,EAAEH,WAAW,CAACI,OAAO,CAAC,CAAC;MAC3BC,MAAM,EAAEL,WAAW,CAACM,SAAS,CAAC;IAClC,CAAC,CAAC;EACN;AACJ;AAACqB,OAAA,CAAA1C,QAAA,GAAAA,QAAA","ignoreList":[]}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import type { IExportPagesZipPagesDone, IExportPagesZipPagesInput } from "../types";
|
|
2
|
+
export declare class ZipPagesDataManager {
|
|
3
|
+
private readonly input;
|
|
4
|
+
private readonly queue;
|
|
5
|
+
private readonly done;
|
|
6
|
+
private readonly failed;
|
|
7
|
+
constructor(input: IExportPagesZipPagesInput);
|
|
8
|
+
hasMore(): boolean;
|
|
9
|
+
addDone(pageId: string, key: string): void;
|
|
10
|
+
addFailed(pageId: string): void;
|
|
11
|
+
getFailed(): string[];
|
|
12
|
+
getDone(): IExportPagesZipPagesDone;
|
|
13
|
+
getInput(): IExportPagesZipPagesInput;
|
|
14
|
+
}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.ZipPagesDataManager = void 0;
|
|
7
|
+
class ZipPagesDataManager {
|
|
8
|
+
constructor(input) {
|
|
9
|
+
this.input = input;
|
|
10
|
+
this.queue = new Set(input.queue);
|
|
11
|
+
this.done = {
|
|
12
|
+
...input.done
|
|
13
|
+
};
|
|
14
|
+
this.failed = new Set(input.failed || []);
|
|
15
|
+
}
|
|
16
|
+
hasMore() {
|
|
17
|
+
return this.queue.size > 0;
|
|
18
|
+
}
|
|
19
|
+
addDone(pageId, key) {
|
|
20
|
+
this.queue.delete(pageId);
|
|
21
|
+
this.failed.delete(pageId);
|
|
22
|
+
this.done[pageId] = key;
|
|
23
|
+
}
|
|
24
|
+
addFailed(pageId) {
|
|
25
|
+
this.queue.delete(pageId);
|
|
26
|
+
this.failed.add(pageId);
|
|
27
|
+
delete this.done[pageId];
|
|
28
|
+
}
|
|
29
|
+
getFailed() {
|
|
30
|
+
return Array.from(this.failed);
|
|
31
|
+
}
|
|
32
|
+
getDone() {
|
|
33
|
+
return this.done;
|
|
34
|
+
}
|
|
35
|
+
getInput() {
|
|
36
|
+
return {
|
|
37
|
+
...this.input,
|
|
38
|
+
queue: Array.from(this.queue),
|
|
39
|
+
failed: Array.from(this.failed),
|
|
40
|
+
done: this.done
|
|
41
|
+
};
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
exports.ZipPagesDataManager = ZipPagesDataManager;
|
|
45
|
+
|
|
46
|
+
//# sourceMappingURL=ZipPagesDataManager.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["ZipPagesDataManager","constructor","input","queue","Set","done","failed","hasMore","size","addDone","pageId","key","delete","addFailed","add","getFailed","Array","from","getDone","getInput","exports"],"sources":["ZipPagesDataManager.ts"],"sourcesContent":["import type { IExportPagesZipPagesDone, IExportPagesZipPagesInput } from \"~/export/pages/types\";\n\nexport class ZipPagesDataManager {\n private readonly input: IExportPagesZipPagesInput;\n private readonly queue: Set<string>;\n private readonly done: IExportPagesZipPagesDone;\n private readonly failed: Set<string>;\n\n public constructor(input: IExportPagesZipPagesInput) {\n this.input = input;\n this.queue = new Set(input.queue);\n this.done = {\n ...input.done\n };\n this.failed = new Set(input.failed || []);\n }\n\n public hasMore(): boolean {\n return this.queue.size > 0;\n }\n\n public addDone(pageId: string, key: string): void {\n this.queue.delete(pageId);\n this.failed.delete(pageId);\n this.done[pageId] = key;\n }\n\n public addFailed(pageId: string): void {\n this.queue.delete(pageId);\n this.failed.add(pageId);\n delete this.done[pageId];\n }\n\n public getFailed() {\n return Array.from(this.failed);\n }\n\n public getDone() {\n return this.done;\n }\n\n public getInput(): IExportPagesZipPagesInput {\n return {\n ...this.input,\n queue: Array.from(this.queue),\n failed: Array.from(this.failed),\n done: this.done\n };\n }\n}\n"],"mappings":";;;;;;AAEO,MAAMA,mBAAmB,CAAC;EAMtBC,WAAWA,CAACC,KAAgC,EAAE;IACjD,IAAI,CAACA,KAAK,GAAGA,KAAK;IAClB,IAAI,CAACC,KAAK,GAAG,IAAIC,GAAG,CAACF,KAAK,CAACC,KAAK,CAAC;IACjC,IAAI,CAACE,IAAI,GAAG;MACR,GAAGH,KAAK,CAACG;IACb,CAAC;IACD,IAAI,CAACC,MAAM,GAAG,IAAIF,GAAG,CAACF,KAAK,CAACI,MAAM,IAAI,EAAE,CAAC;EAC7C;EAEOC,OAAOA,CAAA,EAAY;IACtB,OAAO,IAAI,CAACJ,KAAK,CAACK,IAAI,GAAG,CAAC;EAC9B;EAEOC,OAAOA,CAACC,MAAc,EAAEC,GAAW,EAAQ;IAC9C,IAAI,CAACR,KAAK,CAACS,MAAM,CAACF,MAAM,CAAC;IACzB,IAAI,CAACJ,MAAM,CAACM,MAAM,CAACF,MAAM,CAAC;IAC1B,IAAI,CAACL,IAAI,CAACK,MAAM,CAAC,GAAGC,GAAG;EAC3B;EAEOE,SAASA,CAACH,MAAc,EAAQ;IACnC,IAAI,CAACP,KAAK,CAACS,MAAM,CAACF,MAAM,CAAC;IACzB,IAAI,CAACJ,MAAM,CAACQ,GAAG,CAACJ,MAAM,CAAC;IACvB,OAAO,IAAI,CAACL,IAAI,CAACK,MAAM,CAAC;EAC5B;EAEOK,SAASA,CAAA,EAAG;IACf,OAAOC,KAAK,CAACC,IAAI,CAAC,IAAI,CAACX,MAAM,CAAC;EAClC;EAEOY,OAAOA,CAAA,EAAG;IACb,OAAO,IAAI,CAACb,IAAI;EACpB;EAEOc,QAAQA,CAAA,EAA8B;IACzC,OAAO;MACH,GAAG,IAAI,CAACjB,KAAK;MACbC,KAAK,EAAEa,KAAK,CAACC,IAAI,CAAC,IAAI,CAACd,KAAK,CAAC;MAC7BG,MAAM,EAAEU,KAAK,CAACC,IAAI,CAAC,IAAI,CAACX,MAAM,CAAC;MAC/BD,IAAI,EAAE,IAAI,CAACA;IACf,CAAC;EACL;AACJ;AAACe,OAAA,CAAApB,mBAAA,GAAAA,mBAAA","ignoreList":[]}
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import type { PbImportExportContext } from "../../../graphql/types";
|
|
2
|
+
import type { ITaskManagerStore } from "@webiny/tasks";
|
|
3
|
+
import type { IExportPagesZipPagesInput } from "../types";
|
|
4
|
+
export declare const getPageFactory: (context: PbImportExportContext, store: ITaskManagerStore<IExportPagesZipPagesInput>, published: boolean) => (pageId: string) => Promise<import("@webiny/api-page-builder/types").Page<Record<string, any> | null> | null>;
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.getPageFactory = void 0;
|
|
7
|
+
const getPageFactory = (context, store, published) => {
|
|
8
|
+
return async pageId => {
|
|
9
|
+
if (published) {
|
|
10
|
+
try {
|
|
11
|
+
return await context.pageBuilder.getPublishedPageById({
|
|
12
|
+
id: pageId
|
|
13
|
+
});
|
|
14
|
+
} catch (ex) {
|
|
15
|
+
/**
|
|
16
|
+
* We do not need to do anything on exception because we will fetch the latest version.
|
|
17
|
+
*/
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
try {
|
|
21
|
+
return await context.pageBuilder.getPage(pageId);
|
|
22
|
+
} catch (ex) {
|
|
23
|
+
const message = `There is no page with ID ${pageId}.`;
|
|
24
|
+
try {
|
|
25
|
+
await store.addErrorLog({
|
|
26
|
+
message,
|
|
27
|
+
error: ex
|
|
28
|
+
});
|
|
29
|
+
} catch {
|
|
30
|
+
console.error(`Failed to add error log: "${message}"`);
|
|
31
|
+
}
|
|
32
|
+
return null;
|
|
33
|
+
}
|
|
34
|
+
};
|
|
35
|
+
};
|
|
36
|
+
exports.getPageFactory = getPageFactory;
|
|
37
|
+
|
|
38
|
+
//# sourceMappingURL=getPageFactory.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["getPageFactory","context","store","published","pageId","pageBuilder","getPublishedPageById","id","ex","getPage","message","addErrorLog","error","console","exports"],"sources":["getPageFactory.ts"],"sourcesContent":["import type { PbImportExportContext } from \"~/graphql/types\";\nimport type { ITaskManagerStore } from \"@webiny/tasks\";\nimport type { IExportPagesZipPagesInput } from \"~/export/pages/types\";\n\nexport const getPageFactory = (\n context: PbImportExportContext,\n store: ITaskManagerStore<IExportPagesZipPagesInput>,\n published: boolean\n) => {\n return async (pageId: string) => {\n if (published) {\n try {\n return await context.pageBuilder.getPublishedPageById({\n id: pageId\n });\n } catch (ex) {\n /**\n * We do not need to do anything on exception because we will fetch the latest version.\n */\n }\n }\n try {\n return await context.pageBuilder.getPage(pageId);\n } catch (ex) {\n const message = `There is no page with ID ${pageId}.`;\n try {\n await store.addErrorLog({\n message,\n error: ex\n });\n } catch {\n console.error(`Failed to add error log: \"${message}\"`);\n }\n return null;\n }\n };\n};\n"],"mappings":";;;;;;AAIO,MAAMA,cAAc,GAAGA,CAC1BC,OAA8B,EAC9BC,KAAmD,EACnDC,SAAkB,KACjB;EACD,OAAO,MAAOC,MAAc,IAAK;IAC7B,IAAID,SAAS,EAAE;MACX,IAAI;QACA,OAAO,MAAMF,OAAO,CAACI,WAAW,CAACC,oBAAoB,CAAC;UAClDC,EAAE,EAAEH;QACR,CAAC,CAAC;MACN,CAAC,CAAC,OAAOI,EAAE,EAAE;QACT;AAChB;AACA;MAFgB;IAIR;IACA,IAAI;MACA,OAAO,MAAMP,OAAO,CAACI,WAAW,CAACI,OAAO,CAACL,MAAM,CAAC;IACpD,CAAC,CAAC,OAAOI,EAAE,EAAE;MACT,MAAME,OAAO,GAAG,4BAA4BN,MAAM,GAAG;MACrD,IAAI;QACA,MAAMF,KAAK,CAACS,WAAW,CAAC;UACpBD,OAAO;UACPE,KAAK,EAAEJ;QACX,CAAC,CAAC;MACN,CAAC,CAAC,MAAM;QACJK,OAAO,CAACD,KAAK,CAAC,6BAA6BF,OAAO,GAAG,CAAC;MAC1D;MACA,OAAO,IAAI;IACf;EACJ,CAAC;AACL,CAAC;AAACI,OAAA,CAAAd,cAAA,GAAAA,cAAA","ignoreList":[]}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { PbImportExportContext } from "../../types";
|
|
2
|
-
import { Configuration, Payload, Response } from "
|
|
1
|
+
import type { PbImportExportContext } from "../../types";
|
|
2
|
+
import type { Configuration, Payload, Response } from "./";
|
|
3
3
|
/**
|
|
4
4
|
* Handles the export blocks process workflow.
|
|
5
5
|
*/
|
|
@@ -7,9 +7,9 @@ exports.blocksHandler = void 0;
|
|
|
7
7
|
var _types = require("../../types");
|
|
8
8
|
var _client = require("../../client");
|
|
9
9
|
var _handlerGraphql = require("@webiny/handler-graphql");
|
|
10
|
-
var _utils = require("../utils");
|
|
11
10
|
var _mockSecurity = require("../../mockSecurity");
|
|
12
|
-
var
|
|
11
|
+
var _utils = require("@webiny/utils");
|
|
12
|
+
var _BlockExporter = require("./exporters/BlockExporter");
|
|
13
13
|
/**
|
|
14
14
|
* Handles the export blocks process workflow.
|
|
15
15
|
*/
|
|
@@ -37,7 +37,7 @@ const blocksHandler = async (configuration, payload, context) => {
|
|
|
37
37
|
* Note: We're not going to DB for finding the next sub-task to process,
|
|
38
38
|
* because the data might be out of sync due to GSI eventual consistency.
|
|
39
39
|
*/
|
|
40
|
-
subTask = await pageBuilder.importExportTask.getSubTask(taskId, (0,
|
|
40
|
+
subTask = await pageBuilder.importExportTask.getSubTask(taskId, (0, _utils.zeroPad)(subTaskIndex, 5));
|
|
41
41
|
/**
|
|
42
42
|
* Base condition!!
|
|
43
43
|
* Bail out early, if task not found or task's status is not "pending".
|
|
@@ -65,6 +65,11 @@ const blocksHandler = async (configuration, payload, context) => {
|
|
|
65
65
|
throw new _handlerGraphql.NotFoundError(`Unable to load block "${blockId}"`);
|
|
66
66
|
}
|
|
67
67
|
log(`Processing block key "${blockId}"`);
|
|
68
|
+
const blockCategory = await pageBuilder.getBlockCategory(block.blockCategory);
|
|
69
|
+
if (!blockCategory) {
|
|
70
|
+
log(`Unable to load block category "${block.blockCategory}"`);
|
|
71
|
+
throw new _handlerGraphql.NotFoundError(`Unable to load block category "${block.blockCategory}"`);
|
|
72
|
+
}
|
|
68
73
|
|
|
69
74
|
// Mark task status as PROCESSING
|
|
70
75
|
subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {
|
|
@@ -77,8 +82,8 @@ const blocksHandler = async (configuration, payload, context) => {
|
|
|
77
82
|
});
|
|
78
83
|
prevStatusOfSubTask = subTask.status;
|
|
79
84
|
log(`Extracting block data and uploading to storage...`);
|
|
80
|
-
|
|
81
|
-
const blockDataZip = await
|
|
85
|
+
const blockExporter = new _BlockExporter.BlockExporter(fileManager);
|
|
86
|
+
const blockDataZip = await blockExporter.execute(block, blockCategory, exportBlocksDataKey);
|
|
82
87
|
log(`Finish uploading zip...`);
|
|
83
88
|
// Update task record in DB
|
|
84
89
|
subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {
|
|
@@ -159,4 +164,6 @@ const blocksHandler = async (configuration, payload, context) => {
|
|
|
159
164
|
error: null
|
|
160
165
|
};
|
|
161
166
|
};
|
|
162
|
-
exports.blocksHandler = blocksHandler;
|
|
167
|
+
exports.blocksHandler = blocksHandler;
|
|
168
|
+
|
|
169
|
+
//# sourceMappingURL=blocksHandler.js.map
|