@webiny/api-page-builder-import-export 0.0.0-unstable.2696f9d9e8 → 0.0.0-unstable.2aaa1916d9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/client.d.ts +1 -1
- package/client.js.map +1 -1
- package/export/combine/blocksHandler.d.ts +2 -2
- package/export/combine/blocksHandler.js.map +1 -1
- package/export/combine/formsHandler.d.ts +2 -2
- package/export/combine/formsHandler.js.map +1 -1
- package/export/combine/index.d.ts +2 -2
- package/export/combine/index.js +2 -2
- package/export/combine/index.js.map +1 -1
- package/export/combine/templatesHandler.d.ts +2 -2
- package/export/combine/templatesHandler.js.map +1 -1
- package/export/pages/ExportPagesCleanup.d.ts +5 -0
- package/export/pages/ExportPagesCleanup.js +82 -0
- package/export/pages/ExportPagesCleanup.js.map +1 -0
- package/export/pages/ExportPagesController.d.ts +5 -0
- package/export/pages/ExportPagesController.js +31 -0
- package/export/pages/ExportPagesController.js.map +1 -0
- package/export/pages/ExportPagesZipPages.d.ts +5 -0
- package/export/pages/ExportPagesZipPages.js +23 -0
- package/export/pages/ExportPagesZipPages.js.map +1 -0
- package/export/pages/controller/CombineZippedPages.d.ts +5 -0
- package/export/pages/controller/CombineZippedPages.js +75 -0
- package/export/pages/controller/CombineZippedPages.js.map +1 -0
- package/export/pages/controller/CreateZipPagesTasks.d.ts +8 -0
- package/export/pages/controller/CreateZipPagesTasks.js +103 -0
- package/export/pages/controller/CreateZipPagesTasks.js.map +1 -0
- package/export/pages/controller/ProcessZipPagesTasks.d.ts +6 -0
- package/export/pages/controller/ProcessZipPagesTasks.js +61 -0
- package/export/pages/controller/ProcessZipPagesTasks.js.map +1 -0
- package/export/pages/types.d.ts +53 -0
- package/export/pages/types.js +26 -0
- package/export/pages/types.js.map +1 -0
- package/export/pages/utils.d.ts +2 -0
- package/export/pages/utils.js +13 -0
- package/export/pages/utils.js.map +1 -0
- package/export/pages/zipPages/ZipPages.d.ts +5 -0
- package/export/pages/zipPages/ZipPages.js +100 -0
- package/export/pages/zipPages/ZipPages.js.map +1 -0
- package/export/pages/zipPages/ZipPagesDataManager.d.ts +14 -0
- package/export/pages/zipPages/ZipPagesDataManager.js +46 -0
- package/export/pages/zipPages/ZipPagesDataManager.js.map +1 -0
- package/export/pages/zipPages/getPageFactory.d.ts +4 -0
- package/export/pages/zipPages/getPageFactory.js +38 -0
- package/export/pages/zipPages/getPageFactory.js.map +1 -0
- package/export/process/blocksHandler.d.ts +2 -2
- package/export/process/blocksHandler.js.map +1 -1
- package/export/process/exporters/BlockExporter.d.ts +2 -2
- package/export/process/exporters/BlockExporter.js.map +1 -1
- package/export/process/exporters/FormExporter.d.ts +2 -2
- package/export/process/exporters/FormExporter.js.map +1 -1
- package/export/process/exporters/PageExporter.d.ts +10 -3
- package/export/process/exporters/PageExporter.js +2 -1
- package/export/process/exporters/PageExporter.js.map +1 -1
- package/export/process/exporters/PageTemplateExporter.d.ts +3 -3
- package/export/process/exporters/PageTemplateExporter.js +3 -1
- package/export/process/exporters/PageTemplateExporter.js.map +1 -1
- package/export/process/formsHandler.d.ts +2 -2
- package/export/process/formsHandler.js.map +1 -1
- package/export/process/index.d.ts +2 -2
- package/export/process/index.js +2 -2
- package/export/process/index.js.map +1 -1
- package/export/process/templatesHandler.d.ts +2 -2
- package/export/process/templatesHandler.js.map +1 -1
- package/export/s3Stream.d.ts +6 -4
- package/export/s3Stream.js +1 -2
- package/export/s3Stream.js.map +1 -1
- package/export/utils.d.ts +4 -4
- package/export/utils.js +7 -9
- package/export/utils.js.map +1 -1
- package/export/zipper.d.ts +3 -2
- package/export/zipper.js.map +1 -1
- package/graphql/crud/blocks.crud.d.ts +1 -1
- package/graphql/crud/blocks.crud.js +1 -2
- package/graphql/crud/blocks.crud.js.map +1 -1
- package/graphql/crud/forms.crud.d.ts +1 -1
- package/graphql/crud/forms.crud.js +1 -2
- package/graphql/crud/forms.crud.js.map +1 -1
- package/graphql/crud/importExportTasks.crud.d.ts +2 -2
- package/graphql/crud/importExportTasks.crud.js +73 -58
- package/graphql/crud/importExportTasks.crud.js.map +1 -1
- package/graphql/crud/pages.crud.d.ts +1 -2
- package/graphql/crud/pages.crud.js +208 -125
- package/graphql/crud/pages.crud.js.map +1 -1
- package/graphql/crud/templates.crud.d.ts +1 -1
- package/graphql/crud/templates.crud.js +1 -2
- package/graphql/crud/templates.crud.js.map +1 -1
- package/graphql/crud.d.ts +2 -2
- package/graphql/crud.js.map +1 -1
- package/graphql/graphql/blocks.gql.d.ts +2 -2
- package/graphql/graphql/blocks.gql.js +4 -6
- package/graphql/graphql/blocks.gql.js.map +1 -1
- package/graphql/graphql/forms.gql.d.ts +2 -2
- package/graphql/graphql/forms.gql.js +4 -6
- package/graphql/graphql/forms.gql.js.map +1 -1
- package/graphql/graphql/importExportTasks.gql.d.ts +2 -2
- package/graphql/graphql/importExportTasks.gql.js +5 -7
- package/graphql/graphql/importExportTasks.gql.js.map +1 -1
- package/graphql/graphql/pages.gql.d.ts +2 -2
- package/graphql/graphql/pages.gql.js +101 -14
- package/graphql/graphql/pages.gql.js.map +1 -1
- package/graphql/graphql/templates.gql.d.ts +2 -2
- package/graphql/graphql/templates.gql.js +4 -6
- package/graphql/graphql/templates.gql.js.map +1 -1
- package/graphql/graphql/utils/resolve.d.ts +3 -3
- package/graphql/graphql/utils/resolve.js +14 -5
- package/graphql/graphql/utils/resolve.js.map +1 -1
- package/graphql/graphql.d.ts +1 -1
- package/graphql/graphql.js +1 -2
- package/graphql/graphql.js.map +1 -1
- package/graphql/index.d.ts +2 -2
- package/graphql/index.js +2 -1
- package/graphql/index.js.map +1 -1
- package/graphql/types.d.ts +61 -33
- package/graphql/types.js.map +1 -1
- package/import/constants.js +3 -6
- package/import/constants.js.map +1 -1
- package/import/create/blocksHandler.d.ts +2 -2
- package/import/create/blocksHandler.js.map +1 -1
- package/import/create/formsHandler.d.ts +2 -2
- package/import/create/formsHandler.js.map +1 -1
- package/import/create/index.d.ts +2 -2
- package/import/create/index.js +2 -2
- package/import/create/index.js.map +1 -1
- package/import/create/pagesHandler.d.ts +2 -2
- package/import/create/pagesHandler.js.map +1 -1
- package/import/create/templatesHandler.d.ts +2 -2
- package/import/create/templatesHandler.js.map +1 -1
- package/import/pages/ImportPagesController.d.ts +5 -0
- package/import/pages/ImportPagesController.js +29 -0
- package/import/pages/ImportPagesController.js.map +1 -0
- package/import/pages/ImportPagesProcessPages.d.ts +6 -0
- package/import/pages/ImportPagesProcessPages.js +112 -0
- package/import/pages/ImportPagesProcessPages.js.map +1 -0
- package/import/pages/controller/ImportPagesProcessPagesChecker.d.ts +6 -0
- package/import/pages/controller/ImportPagesProcessPagesChecker.js +40 -0
- package/import/pages/controller/ImportPagesProcessPagesChecker.js.map +1 -0
- package/import/pages/controller/ImportPagesProcessZipFile.d.ts +5 -0
- package/import/pages/controller/ImportPagesProcessZipFile.js +71 -0
- package/import/pages/controller/ImportPagesProcessZipFile.js.map +1 -0
- package/import/{process/pages → pages/process}/importPage.d.ts +3 -4
- package/import/pages/process/importPage.js.map +1 -0
- package/import/pages/types.d.ts +48 -0
- package/import/pages/types.js +20 -0
- package/import/pages/types.js.map +1 -0
- package/import/process/blocks/ElementIdsProcessor.d.ts +5 -0
- package/import/process/blocks/ElementIdsProcessor.js +26 -0
- package/import/process/blocks/ElementIdsProcessor.js.map +1 -0
- package/import/process/blocks/blocksHandler.d.ts +2 -2
- package/import/process/blocks/blocksHandler.js.map +1 -1
- package/import/process/blocks/importBlock.d.ts +3 -3
- package/import/process/blocks/importBlock.js +5 -2
- package/import/process/blocks/importBlock.js.map +1 -1
- package/import/process/forms/formsHandler.d.ts +2 -2
- package/import/process/forms/formsHandler.js.map +1 -1
- package/import/process/forms/importForm.d.ts +2 -2
- package/import/process/forms/importForm.js.map +1 -1
- package/import/process/index.d.ts +2 -2
- package/import/process/index.js +2 -2
- package/import/process/index.js.map +1 -1
- package/import/process/templates/importTemplate.d.ts +3 -3
- package/import/process/templates/importTemplate.js.map +1 -1
- package/import/process/templates/templatesHandler.d.ts +2 -2
- package/import/process/templates/templatesHandler.js +3 -1
- package/import/process/templates/templatesHandler.js.map +1 -1
- package/import/utils/deleteS3Folder.js.map +1 -1
- package/import/utils/extractAndUploadZipFileContents.d.ts +1 -1
- package/import/utils/extractAndUploadZipFileContents.js +6 -3
- package/import/utils/extractAndUploadZipFileContents.js.map +1 -1
- package/import/utils/extractZipAndUploadToS3.d.ts +1 -1
- package/import/utils/extractZipAndUploadToS3.js.map +1 -1
- package/import/utils/getFileNameWithoutExt.js.map +1 -1
- package/import/utils/index.js.map +1 -1
- package/import/utils/initialStats.js.map +1 -1
- package/import/utils/prepareDataDirMap.d.ts +1 -1
- package/import/utils/prepareDataDirMap.js.map +1 -1
- package/import/utils/updateFilesInData.d.ts +1 -1
- package/import/utils/updateFilesInData.js.map +1 -1
- package/import/utils/uploadAssets.d.ts +3 -3
- package/import/utils/uploadAssets.js.map +1 -1
- package/import/utils/uploadFilesFromS3.d.ts +3 -3
- package/import/utils/uploadFilesFromS3.js.map +1 -1
- package/mockSecurity.d.ts +1 -1
- package/mockSecurity.js.map +1 -1
- package/package.json +33 -41
- package/tasks/common/ChildTasksCleanup.d.ts +12 -0
- package/tasks/common/ChildTasksCleanup.js +64 -0
- package/tasks/common/ChildTasksCleanup.js.map +1 -0
- package/tasks/index.d.ts +1 -0
- package/tasks/index.js +13 -0
- package/tasks/index.js.map +1 -0
- package/tasks/pages/exportPagesCleanupTask.d.ts +3 -0
- package/tasks/pages/exportPagesCleanupTask.js +36 -0
- package/tasks/pages/exportPagesCleanupTask.js.map +1 -0
- package/tasks/pages/exportPagesControllerTask.d.ts +3 -0
- package/tasks/pages/exportPagesControllerTask.js +83 -0
- package/tasks/pages/exportPagesControllerTask.js.map +1 -0
- package/tasks/pages/exportPagesZipPagesTask.d.ts +3 -0
- package/tasks/pages/exportPagesZipPagesTask.js +39 -0
- package/tasks/pages/exportPagesZipPagesTask.js.map +1 -0
- package/tasks/pages/importPagesControllerTask.d.ts +3 -0
- package/tasks/pages/importPagesControllerTask.js +39 -0
- package/tasks/pages/importPagesControllerTask.js.map +1 -0
- package/tasks/pages/importPagesProcessPageTask.d.ts +3 -0
- package/tasks/pages/importPagesProcessPageTask.js +39 -0
- package/tasks/pages/importPagesProcessPageTask.js.map +1 -0
- package/tasks/pages/index.d.ts +1 -0
- package/tasks/pages/index.js +17 -0
- package/tasks/pages/index.js.map +1 -0
- package/types.d.ts +2 -2
- package/types.js +2 -4
- package/types.js.map +1 -1
- package/utils/ZipFiles.d.ts +11 -0
- package/utils/ZipFiles.js +124 -0
- package/utils/ZipFiles.js.map +1 -0
- package/export/combine/pagesHandler.d.ts +0 -6
- package/export/combine/pagesHandler.js +0 -101
- package/export/combine/pagesHandler.js.map +0 -1
- package/export/process/pagesHandler.d.ts +0 -6
- package/export/process/pagesHandler.js +0 -191
- package/export/process/pagesHandler.js.map +0 -1
- package/import/process/pages/importPage.js.map +0 -1
- package/import/process/pages/pagesHandler.d.ts +0 -3
- package/import/process/pages/pagesHandler.js +0 -185
- package/import/process/pages/pagesHandler.js.map +0 -1
- /package/import/{process/pages → pages/process}/importPage.js +0 -0
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.ZIP_PAGES_WAIT_TIME = exports.ProcessZipPagesTasks = void 0;
|
|
7
|
+
var _tasks = require("@webiny/tasks");
|
|
8
|
+
var _types = require("../types");
|
|
9
|
+
var _CombineZippedPages = require("./CombineZippedPages");
|
|
10
|
+
const ZIP_PAGES_WAIT_TIME = exports.ZIP_PAGES_WAIT_TIME = 5;
|
|
11
|
+
class ProcessZipPagesTasks {
|
|
12
|
+
async execute(params) {
|
|
13
|
+
const {
|
|
14
|
+
response,
|
|
15
|
+
input,
|
|
16
|
+
isAborted,
|
|
17
|
+
isCloseToTimeout,
|
|
18
|
+
context,
|
|
19
|
+
store
|
|
20
|
+
} = params;
|
|
21
|
+
if (isAborted()) {
|
|
22
|
+
return response.aborted();
|
|
23
|
+
} else if (isCloseToTimeout()) {
|
|
24
|
+
return response.continue({
|
|
25
|
+
...input
|
|
26
|
+
});
|
|
27
|
+
}
|
|
28
|
+
/**
|
|
29
|
+
* TODO: implement subtasks and subtask management into the base tasks package.
|
|
30
|
+
*/
|
|
31
|
+
const result = await context.tasks.listTasks({
|
|
32
|
+
where: {
|
|
33
|
+
parentId: store.getTask().id,
|
|
34
|
+
definitionId: _types.PageExportTask.ZipPages,
|
|
35
|
+
taskStatus_in: [_tasks.TaskDataStatus.RUNNING, _tasks.TaskDataStatus.PENDING]
|
|
36
|
+
},
|
|
37
|
+
limit: 1
|
|
38
|
+
});
|
|
39
|
+
/**
|
|
40
|
+
* Do we still need to wait until all subtasks (Zip Pages) are done?
|
|
41
|
+
*/
|
|
42
|
+
if (result.items.length > 0) {
|
|
43
|
+
return response.continue({
|
|
44
|
+
...input
|
|
45
|
+
}, {
|
|
46
|
+
seconds: ZIP_PAGES_WAIT_TIME
|
|
47
|
+
});
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* If all subtasks (Zip Pages) are done, we can continue with zipping all zip files into a single one.
|
|
51
|
+
*/
|
|
52
|
+
const combineZippedPages = new _CombineZippedPages.CombineZippedPages();
|
|
53
|
+
return combineZippedPages.execute({
|
|
54
|
+
store,
|
|
55
|
+
response
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
exports.ProcessZipPagesTasks = ProcessZipPagesTasks;
|
|
60
|
+
|
|
61
|
+
//# sourceMappingURL=ProcessZipPagesTasks.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["_tasks","require","_types","_CombineZippedPages","ZIP_PAGES_WAIT_TIME","exports","ProcessZipPagesTasks","execute","params","response","input","isAborted","isCloseToTimeout","context","store","aborted","continue","result","tasks","listTasks","where","parentId","getTask","id","definitionId","PageExportTask","ZipPages","taskStatus_in","TaskDataStatus","RUNNING","PENDING","limit","items","length","seconds","combineZippedPages","CombineZippedPages"],"sources":["ProcessZipPagesTasks.ts"],"sourcesContent":["import type { ITaskResponseResult } from \"@webiny/tasks\";\nimport { TaskDataStatus } from \"@webiny/tasks\";\nimport type { IExportPagesControllerTaskParams } from \"../types\";\nimport { PageExportTask } from \"../types\";\nimport { CombineZippedPages } from \"~/export/pages/controller/CombineZippedPages\";\n\nexport const ZIP_PAGES_WAIT_TIME = 5;\n\nexport class ProcessZipPagesTasks {\n public async execute(params: IExportPagesControllerTaskParams): Promise<ITaskResponseResult> {\n const { response, input, isAborted, isCloseToTimeout, context, store } = params;\n\n if (isAborted()) {\n return response.aborted();\n } else if (isCloseToTimeout()) {\n return response.continue({\n ...input\n });\n }\n /**\n * TODO: implement subtasks and subtask management into the base tasks package.\n */\n const result = await context.tasks.listTasks({\n where: {\n parentId: store.getTask().id,\n definitionId: PageExportTask.ZipPages,\n taskStatus_in: [TaskDataStatus.RUNNING, TaskDataStatus.PENDING]\n },\n limit: 1\n });\n /**\n * Do we still need to wait until all subtasks (Zip Pages) are done?\n */\n if (result.items.length > 0) {\n return response.continue(\n {\n ...input\n },\n {\n seconds: ZIP_PAGES_WAIT_TIME\n }\n );\n }\n /**\n * If all subtasks (Zip Pages) are done, we can continue with zipping all zip files into a single one.\n */\n const combineZippedPages = new CombineZippedPages();\n\n return combineZippedPages.execute({\n store,\n response\n });\n }\n}\n"],"mappings":";;;;;;AACA,IAAAA,MAAA,GAAAC,OAAA;AAEA,IAAAC,MAAA,GAAAD,OAAA;AACA,IAAAE,mBAAA,GAAAF,OAAA;AAEO,MAAMG,mBAAmB,GAAAC,OAAA,CAAAD,mBAAA,GAAG,CAAC;AAE7B,MAAME,oBAAoB,CAAC;EAC9B,MAAaC,OAAOA,CAACC,MAAwC,EAAgC;IACzF,MAAM;MAAEC,QAAQ;MAAEC,KAAK;MAAEC,SAAS;MAAEC,gBAAgB;MAAEC,OAAO;MAAEC;IAAM,CAAC,GAAGN,MAAM;IAE/E,IAAIG,SAAS,CAAC,CAAC,EAAE;MACb,OAAOF,QAAQ,CAACM,OAAO,CAAC,CAAC;IAC7B,CAAC,MAAM,IAAIH,gBAAgB,CAAC,CAAC,EAAE;MAC3B,OAAOH,QAAQ,CAACO,QAAQ,CAAC;QACrB,GAAGN;MACP,CAAC,CAAC;IACN;IACA;AACR;AACA;IACQ,MAAMO,MAAM,GAAG,MAAMJ,OAAO,CAACK,KAAK,CAACC,SAAS,CAAC;MACzCC,KAAK,EAAE;QACHC,QAAQ,EAAEP,KAAK,CAACQ,OAAO,CAAC,CAAC,CAACC,EAAE;QAC5BC,YAAY,EAAEC,qBAAc,CAACC,QAAQ;QACrCC,aAAa,EAAE,CAACC,qBAAc,CAACC,OAAO,EAAED,qBAAc,CAACE,OAAO;MAClE,CAAC;MACDC,KAAK,EAAE;IACX,CAAC,CAAC;IACF;AACR;AACA;IACQ,IAAId,MAAM,CAACe,KAAK,CAACC,MAAM,GAAG,CAAC,EAAE;MACzB,OAAOxB,QAAQ,CAACO,QAAQ,CACpB;QACI,GAAGN;MACP,CAAC,EACD;QACIwB,OAAO,EAAE9B;MACb,CACJ,CAAC;IACL;IACA;AACR;AACA;IACQ,MAAM+B,kBAAkB,GAAG,IAAIC,sCAAkB,CAAC,CAAC;IAEnD,OAAOD,kBAAkB,CAAC5B,OAAO,CAAC;MAC9BO,KAAK;MACLL;IACJ,CAAC,CAAC;EACN;AACJ;AAACJ,OAAA,CAAAC,oBAAA,GAAAA,oBAAA","ignoreList":[]}
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import type { ITaskResponseDoneResultOutput, ITaskRunParams } from "@webiny/tasks";
|
|
2
|
+
import type { PbImportExportContext } from "../../types";
|
|
3
|
+
export declare enum PageExportTask {
|
|
4
|
+
Controller = "pageBuilderExportPagesController",
|
|
5
|
+
ZipPages = "pageBuilderExportPagesZipPages",
|
|
6
|
+
Cleanup = "pageBuilderExportPagesCleanup"
|
|
7
|
+
}
|
|
8
|
+
/**
|
|
9
|
+
* Controller
|
|
10
|
+
*/
|
|
11
|
+
export interface IExportPagesControllerInput {
|
|
12
|
+
type: "published" | "latest";
|
|
13
|
+
where?: Record<string, any>;
|
|
14
|
+
totalPages: number;
|
|
15
|
+
after?: string | null;
|
|
16
|
+
currentBatch?: number;
|
|
17
|
+
zippingPages?: boolean;
|
|
18
|
+
}
|
|
19
|
+
export interface IExportPagesControllerOutput extends ITaskResponseDoneResultOutput {
|
|
20
|
+
key: string;
|
|
21
|
+
url: string;
|
|
22
|
+
}
|
|
23
|
+
export type IExportPagesControllerTaskParams = ITaskRunParams<PbImportExportContext, IExportPagesControllerInput, IExportPagesControllerOutput>;
|
|
24
|
+
/**
|
|
25
|
+
* Zip Pages
|
|
26
|
+
*/
|
|
27
|
+
export interface IExportPagesZipPagesDone {
|
|
28
|
+
[pageId: string]: string;
|
|
29
|
+
}
|
|
30
|
+
export interface IExportPagesZipPagesInput {
|
|
31
|
+
type: "published" | "latest";
|
|
32
|
+
queue: string[];
|
|
33
|
+
done?: IExportPagesZipPagesDone;
|
|
34
|
+
failed?: string[];
|
|
35
|
+
}
|
|
36
|
+
export interface IExportPagesZipPagesOutput extends ITaskResponseDoneResultOutput {
|
|
37
|
+
done: IExportPagesZipPagesDone;
|
|
38
|
+
failed: string[];
|
|
39
|
+
}
|
|
40
|
+
export type IExportPagesZipPagesTaskParams = ITaskRunParams<PbImportExportContext, IExportPagesZipPagesInput, IExportPagesZipPagesOutput>;
|
|
41
|
+
/**
|
|
42
|
+
* Combine Zipped Pages
|
|
43
|
+
*/
|
|
44
|
+
export interface IExportPagesCombineZippedPagesOutput extends ITaskResponseDoneResultOutput {
|
|
45
|
+
key: string;
|
|
46
|
+
url: string;
|
|
47
|
+
}
|
|
48
|
+
export type IExportPagesCombineZippedPagesParams = Pick<ITaskRunParams<PbImportExportContext, IExportPagesControllerInput, IExportPagesCombineZippedPagesOutput>, "store" | "response">;
|
|
49
|
+
/**
|
|
50
|
+
* Cleanup Zip files
|
|
51
|
+
*/
|
|
52
|
+
export type IExportPagesCleanupInput = Record<string, boolean>;
|
|
53
|
+
export type IExportPagesCleanupTaskParams = ITaskRunParams<PbImportExportContext, IExportPagesCleanupInput>;
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.PageExportTask = void 0;
|
|
7
|
+
let PageExportTask = exports.PageExportTask = /*#__PURE__*/function (PageExportTask) {
|
|
8
|
+
PageExportTask["Controller"] = "pageBuilderExportPagesController";
|
|
9
|
+
PageExportTask["ZipPages"] = "pageBuilderExportPagesZipPages";
|
|
10
|
+
PageExportTask["Cleanup"] = "pageBuilderExportPagesCleanup";
|
|
11
|
+
return PageExportTask;
|
|
12
|
+
}({});
|
|
13
|
+
/**
|
|
14
|
+
* Controller
|
|
15
|
+
*/
|
|
16
|
+
/**
|
|
17
|
+
* Zip Pages
|
|
18
|
+
*/
|
|
19
|
+
/**
|
|
20
|
+
* Combine Zipped Pages
|
|
21
|
+
*/
|
|
22
|
+
/**
|
|
23
|
+
* Cleanup Zip files
|
|
24
|
+
*/
|
|
25
|
+
|
|
26
|
+
//# sourceMappingURL=types.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["PageExportTask","exports"],"sources":["types.ts"],"sourcesContent":["import type { ITaskResponseDoneResultOutput, ITaskRunParams } from \"@webiny/tasks\";\nimport type { PbImportExportContext } from \"~/types\";\n\nexport enum PageExportTask {\n Controller = \"pageBuilderExportPagesController\",\n ZipPages = \"pageBuilderExportPagesZipPages\",\n Cleanup = \"pageBuilderExportPagesCleanup\"\n}\n\n/**\n * Controller\n */\nexport interface IExportPagesControllerInput {\n type: \"published\" | \"latest\";\n where?: Record<string, any>;\n totalPages: number;\n after?: string | null;\n currentBatch?: number;\n zippingPages?: boolean;\n}\n\nexport interface IExportPagesControllerOutput extends ITaskResponseDoneResultOutput {\n key: string;\n url: string;\n}\n\nexport type IExportPagesControllerTaskParams = ITaskRunParams<\n PbImportExportContext,\n IExportPagesControllerInput,\n IExportPagesControllerOutput\n>;\n\n/**\n * Zip Pages\n */\nexport interface IExportPagesZipPagesDone {\n [pageId: string]: string;\n}\nexport interface IExportPagesZipPagesInput {\n type: \"published\" | \"latest\";\n queue: string[];\n done?: IExportPagesZipPagesDone;\n failed?: string[];\n}\n\nexport interface IExportPagesZipPagesOutput extends ITaskResponseDoneResultOutput {\n done: IExportPagesZipPagesDone;\n failed: string[];\n}\n\nexport type IExportPagesZipPagesTaskParams = ITaskRunParams<\n PbImportExportContext,\n IExportPagesZipPagesInput,\n IExportPagesZipPagesOutput\n>;\n\n/**\n * Combine Zipped Pages\n */\n\nexport interface IExportPagesCombineZippedPagesOutput extends ITaskResponseDoneResultOutput {\n key: string;\n url: string;\n}\n\nexport type IExportPagesCombineZippedPagesParams = Pick<\n ITaskRunParams<\n PbImportExportContext,\n IExportPagesControllerInput,\n IExportPagesCombineZippedPagesOutput\n >,\n \"store\" | \"response\"\n>;\n\n/**\n * Cleanup Zip files\n */\nexport type IExportPagesCleanupInput = Record<string, boolean>;\n\nexport type IExportPagesCleanupTaskParams = ITaskRunParams<\n PbImportExportContext,\n IExportPagesCleanupInput\n>;\n"],"mappings":";;;;;;IAGYA,cAAc,GAAAC,OAAA,CAAAD,cAAA,0BAAdA,cAAc;EAAdA,cAAc;EAAdA,cAAc;EAAdA,cAAc;EAAA,OAAdA,cAAc;AAAA;AAM1B;AACA;AACA;AAqBA;AACA;AACA;AAsBA;AACA;AACA;AAgBA;AACA;AACA","ignoreList":[]}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.createExportPagesDataKey = exports.EXPORT_PAGES_FOLDER_KEY = void 0;
|
|
7
|
+
const EXPORT_PAGES_FOLDER_KEY = exports.EXPORT_PAGES_FOLDER_KEY = "WEBINY_PB_EXPORT_PAGES";
|
|
8
|
+
const createExportPagesDataKey = taskId => {
|
|
9
|
+
return `${EXPORT_PAGES_FOLDER_KEY}/${taskId}`;
|
|
10
|
+
};
|
|
11
|
+
exports.createExportPagesDataKey = createExportPagesDataKey;
|
|
12
|
+
|
|
13
|
+
//# sourceMappingURL=utils.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["EXPORT_PAGES_FOLDER_KEY","exports","createExportPagesDataKey","taskId"],"sources":["utils.ts"],"sourcesContent":["export const EXPORT_PAGES_FOLDER_KEY = \"WEBINY_PB_EXPORT_PAGES\";\n\nexport const createExportPagesDataKey = (taskId: string) => {\n return `${EXPORT_PAGES_FOLDER_KEY}/${taskId}`;\n};\n"],"mappings":";;;;;;AAAO,MAAMA,uBAAuB,GAAAC,OAAA,CAAAD,uBAAA,GAAG,wBAAwB;AAExD,MAAME,wBAAwB,GAAIC,MAAc,IAAK;EACxD,OAAO,GAAGH,uBAAuB,IAAIG,MAAM,EAAE;AACjD,CAAC;AAACF,OAAA,CAAAC,wBAAA,GAAAA,wBAAA","ignoreList":[]}
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.ZipPages = void 0;
|
|
7
|
+
var _PageExporter = require("../../process/exporters/PageExporter");
|
|
8
|
+
var _ZipPagesDataManager = require("./ZipPagesDataManager");
|
|
9
|
+
var _getPageFactory = require("./getPageFactory");
|
|
10
|
+
var _utils = require("../utils");
|
|
11
|
+
/**
|
|
12
|
+
* We will pause the execution of the task if there is less than CLOSE_TO_TIMEOUT_SECONDS left on the Lambda execution time.
|
|
13
|
+
*/
|
|
14
|
+
const CLOSE_TO_TIMEOUT_SECONDS = 300;
|
|
15
|
+
class ZipPages {
|
|
16
|
+
async execute(params) {
|
|
17
|
+
const {
|
|
18
|
+
response,
|
|
19
|
+
input,
|
|
20
|
+
isAborted,
|
|
21
|
+
isCloseToTimeout,
|
|
22
|
+
context,
|
|
23
|
+
store
|
|
24
|
+
} = params;
|
|
25
|
+
const parentId = store.getTask().parentId;
|
|
26
|
+
if (!parentId) {
|
|
27
|
+
return response.error({
|
|
28
|
+
message: `Could not find parent task ID.`
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
const exportPageDataKey = (0, _utils.createExportPagesDataKey)(parentId);
|
|
32
|
+
const dataManager = new _ZipPagesDataManager.ZipPagesDataManager(input);
|
|
33
|
+
if (dataManager.hasMore() === false) {
|
|
34
|
+
return response.done("Task done.", {
|
|
35
|
+
done: dataManager.getDone(),
|
|
36
|
+
failed: dataManager.getFailed()
|
|
37
|
+
});
|
|
38
|
+
}
|
|
39
|
+
const getPage = (0, _getPageFactory.getPageFactory)(context, store, input.type === "published");
|
|
40
|
+
/**
|
|
41
|
+
* We will go page by page and zip them.
|
|
42
|
+
* We are using the input.queue here because we are removing page from the ZipPagesDataManager queue as it is processed.
|
|
43
|
+
*
|
|
44
|
+
*/
|
|
45
|
+
for (const pageId of input.queue) {
|
|
46
|
+
/**
|
|
47
|
+
* Check for a possibility that the task was aborted.
|
|
48
|
+
*/
|
|
49
|
+
if (isAborted()) {
|
|
50
|
+
return response.aborted();
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* We need to check if there is enough time left to finish the task.
|
|
54
|
+
*/
|
|
55
|
+
if (isCloseToTimeout(CLOSE_TO_TIMEOUT_SECONDS)) {
|
|
56
|
+
/**
|
|
57
|
+
* If there is not enough time left, we will pause the task and return the current state.
|
|
58
|
+
*/
|
|
59
|
+
return response.continue(dataManager.getInput());
|
|
60
|
+
}
|
|
61
|
+
const page = await getPage(pageId);
|
|
62
|
+
if (!page) {
|
|
63
|
+
await store.addErrorLog({
|
|
64
|
+
message: `Could not load page "${pageId}".`,
|
|
65
|
+
error: {
|
|
66
|
+
message: `Could not load page "${pageId}".`
|
|
67
|
+
}
|
|
68
|
+
});
|
|
69
|
+
dataManager.addFailed(pageId);
|
|
70
|
+
continue;
|
|
71
|
+
}
|
|
72
|
+
try {
|
|
73
|
+
const pageExporter = new _PageExporter.PageExporter(context.fileManager);
|
|
74
|
+
const pageDataZip = await pageExporter.execute(page, exportPageDataKey);
|
|
75
|
+
if (!pageDataZip.Key) {
|
|
76
|
+
throw new Error(`Failed to export page "${pageId}" into a zip file.`);
|
|
77
|
+
}
|
|
78
|
+
dataManager.addDone(pageId, pageDataZip.Key);
|
|
79
|
+
} catch (ex) {
|
|
80
|
+
const message = ex.message || `Failed to export page "${pageId}" into a zip file.`;
|
|
81
|
+
try {
|
|
82
|
+
await store.addErrorLog({
|
|
83
|
+
message,
|
|
84
|
+
error: ex
|
|
85
|
+
});
|
|
86
|
+
} catch {
|
|
87
|
+
console.error(`Failed to add error log: "${message}"`);
|
|
88
|
+
}
|
|
89
|
+
dataManager.addFailed(pageId);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
return response.done("Task done.", {
|
|
93
|
+
done: dataManager.getDone(),
|
|
94
|
+
failed: dataManager.getFailed()
|
|
95
|
+
});
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
exports.ZipPages = ZipPages;
|
|
99
|
+
|
|
100
|
+
//# sourceMappingURL=ZipPages.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["_PageExporter","require","_ZipPagesDataManager","_getPageFactory","_utils","CLOSE_TO_TIMEOUT_SECONDS","ZipPages","execute","params","response","input","isAborted","isCloseToTimeout","context","store","parentId","getTask","error","message","exportPageDataKey","createExportPagesDataKey","dataManager","ZipPagesDataManager","hasMore","done","getDone","failed","getFailed","getPage","getPageFactory","type","pageId","queue","aborted","continue","getInput","page","addErrorLog","addFailed","pageExporter","PageExporter","fileManager","pageDataZip","Key","Error","addDone","ex","console","exports"],"sources":["ZipPages.ts"],"sourcesContent":["import type { ITaskResponseResult } from \"@webiny/tasks\";\nimport type { IExportPagesZipPagesTaskParams } from \"~/export/pages/types\";\nimport { PageExporter } from \"~/export/process/exporters/PageExporter\";\nimport { ZipPagesDataManager } from \"./ZipPagesDataManager\";\nimport { getPageFactory } from \"./getPageFactory\";\nimport { createExportPagesDataKey } from \"~/export/pages/utils\";\n\n/**\n * We will pause the execution of the task if there is less than CLOSE_TO_TIMEOUT_SECONDS left on the Lambda execution time.\n */\nconst CLOSE_TO_TIMEOUT_SECONDS = 300;\n\nexport class ZipPages {\n public async execute(params: IExportPagesZipPagesTaskParams): Promise<ITaskResponseResult> {\n const { response, input, isAborted, isCloseToTimeout, context, store } = params;\n\n const parentId = store.getTask().parentId;\n if (!parentId) {\n return response.error({\n message: `Could not find parent task ID.`\n });\n }\n const exportPageDataKey = createExportPagesDataKey(parentId);\n\n const dataManager = new ZipPagesDataManager(input);\n if (dataManager.hasMore() === false) {\n return response.done(\"Task done.\", {\n done: dataManager.getDone(),\n failed: dataManager.getFailed()\n });\n }\n\n const getPage = getPageFactory(context, store, input.type === \"published\");\n /**\n * We will go page by page and zip them.\n * We are using the input.queue here because we are removing page from the ZipPagesDataManager queue as it is processed.\n *\n */\n for (const pageId of input.queue) {\n /**\n * Check for a possibility that the task was aborted.\n */\n if (isAborted()) {\n return response.aborted();\n }\n /**\n * We need to check if there is enough time left to finish the task.\n */\n if (isCloseToTimeout(CLOSE_TO_TIMEOUT_SECONDS)) {\n /**\n * If there is not enough time left, we will pause the task and return the current state.\n */\n return response.continue(dataManager.getInput());\n }\n\n const page = await getPage(pageId);\n if (!page) {\n await store.addErrorLog({\n message: `Could not load page \"${pageId}\".`,\n error: {\n message: `Could not load page \"${pageId}\".`\n }\n });\n dataManager.addFailed(pageId);\n continue;\n }\n try {\n const pageExporter = new PageExporter(context.fileManager);\n const pageDataZip = await pageExporter.execute(page, exportPageDataKey);\n if (!pageDataZip.Key) {\n throw new Error(`Failed to export page \"${pageId}\" into a zip file.`);\n }\n dataManager.addDone(pageId, pageDataZip.Key);\n } catch (ex) {\n const message = ex.message || `Failed to export page \"${pageId}\" into a zip file.`;\n try {\n await store.addErrorLog({\n message,\n error: ex\n });\n } catch {\n console.error(`Failed to add error log: \"${message}\"`);\n }\n dataManager.addFailed(pageId);\n }\n }\n\n return response.done(\"Task done.\", {\n done: dataManager.getDone(),\n failed: dataManager.getFailed()\n });\n }\n}\n"],"mappings":";;;;;;AAEA,IAAAA,aAAA,GAAAC,OAAA;AACA,IAAAC,oBAAA,GAAAD,OAAA;AACA,IAAAE,eAAA,GAAAF,OAAA;AACA,IAAAG,MAAA,GAAAH,OAAA;AAEA;AACA;AACA;AACA,MAAMI,wBAAwB,GAAG,GAAG;AAE7B,MAAMC,QAAQ,CAAC;EAClB,MAAaC,OAAOA,CAACC,MAAsC,EAAgC;IACvF,MAAM;MAAEC,QAAQ;MAAEC,KAAK;MAAEC,SAAS;MAAEC,gBAAgB;MAAEC,OAAO;MAAEC;IAAM,CAAC,GAAGN,MAAM;IAE/E,MAAMO,QAAQ,GAAGD,KAAK,CAACE,OAAO,CAAC,CAAC,CAACD,QAAQ;IACzC,IAAI,CAACA,QAAQ,EAAE;MACX,OAAON,QAAQ,CAACQ,KAAK,CAAC;QAClBC,OAAO,EAAE;MACb,CAAC,CAAC;IACN;IACA,MAAMC,iBAAiB,GAAG,IAAAC,+BAAwB,EAACL,QAAQ,CAAC;IAE5D,MAAMM,WAAW,GAAG,IAAIC,wCAAmB,CAACZ,KAAK,CAAC;IAClD,IAAIW,WAAW,CAACE,OAAO,CAAC,CAAC,KAAK,KAAK,EAAE;MACjC,OAAOd,QAAQ,CAACe,IAAI,CAAC,YAAY,EAAE;QAC/BA,IAAI,EAAEH,WAAW,CAACI,OAAO,CAAC,CAAC;QAC3BC,MAAM,EAAEL,WAAW,CAACM,SAAS,CAAC;MAClC,CAAC,CAAC;IACN;IAEA,MAAMC,OAAO,GAAG,IAAAC,8BAAc,EAAChB,OAAO,EAAEC,KAAK,EAAEJ,KAAK,CAACoB,IAAI,KAAK,WAAW,CAAC;IAC1E;AACR;AACA;AACA;AACA;IACQ,KAAK,MAAMC,MAAM,IAAIrB,KAAK,CAACsB,KAAK,EAAE;MAC9B;AACZ;AACA;MACY,IAAIrB,SAAS,CAAC,CAAC,EAAE;QACb,OAAOF,QAAQ,CAACwB,OAAO,CAAC,CAAC;MAC7B;MACA;AACZ;AACA;MACY,IAAIrB,gBAAgB,CAACP,wBAAwB,CAAC,EAAE;QAC5C;AAChB;AACA;QACgB,OAAOI,QAAQ,CAACyB,QAAQ,CAACb,WAAW,CAACc,QAAQ,CAAC,CAAC,CAAC;MACpD;MAEA,MAAMC,IAAI,GAAG,MAAMR,OAAO,CAACG,MAAM,CAAC;MAClC,IAAI,CAACK,IAAI,EAAE;QACP,MAAMtB,KAAK,CAACuB,WAAW,CAAC;UACpBnB,OAAO,EAAE,wBAAwBa,MAAM,IAAI;UAC3Cd,KAAK,EAAE;YACHC,OAAO,EAAE,wBAAwBa,MAAM;UAC3C;QACJ,CAAC,CAAC;QACFV,WAAW,CAACiB,SAAS,CAACP,MAAM,CAAC;QAC7B;MACJ;MACA,IAAI;QACA,MAAMQ,YAAY,GAAG,IAAIC,0BAAY,CAAC3B,OAAO,CAAC4B,WAAW,CAAC;QAC1D,MAAMC,WAAW,GAAG,MAAMH,YAAY,CAAChC,OAAO,CAAC6B,IAAI,EAAEjB,iBAAiB,CAAC;QACvE,IAAI,CAACuB,WAAW,CAACC,GAAG,EAAE;UAClB,MAAM,IAAIC,KAAK,CAAC,0BAA0Bb,MAAM,oBAAoB,CAAC;QACzE;QACAV,WAAW,CAACwB,OAAO,CAACd,MAAM,EAAEW,WAAW,CAACC,GAAG,CAAC;MAChD,CAAC,CAAC,OAAOG,EAAE,EAAE;QACT,MAAM5B,OAAO,GAAG4B,EAAE,CAAC5B,OAAO,IAAI,0BAA0Ba,MAAM,oBAAoB;QAClF,IAAI;UACA,MAAMjB,KAAK,CAACuB,WAAW,CAAC;YACpBnB,OAAO;YACPD,KAAK,EAAE6B;UACX,CAAC,CAAC;QACN,CAAC,CAAC,MAAM;UACJC,OAAO,CAAC9B,KAAK,CAAC,6BAA6BC,OAAO,GAAG,CAAC;QAC1D;QACAG,WAAW,CAACiB,SAAS,CAACP,MAAM,CAAC;MACjC;IACJ;IAEA,OAAOtB,QAAQ,CAACe,IAAI,CAAC,YAAY,EAAE;MAC/BA,IAAI,EAAEH,WAAW,CAACI,OAAO,CAAC,CAAC;MAC3BC,MAAM,EAAEL,WAAW,CAACM,SAAS,CAAC;IAClC,CAAC,CAAC;EACN;AACJ;AAACqB,OAAA,CAAA1C,QAAA,GAAAA,QAAA","ignoreList":[]}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import type { IExportPagesZipPagesDone, IExportPagesZipPagesInput } from "../types";
|
|
2
|
+
export declare class ZipPagesDataManager {
|
|
3
|
+
private readonly input;
|
|
4
|
+
private readonly queue;
|
|
5
|
+
private readonly done;
|
|
6
|
+
private readonly failed;
|
|
7
|
+
constructor(input: IExportPagesZipPagesInput);
|
|
8
|
+
hasMore(): boolean;
|
|
9
|
+
addDone(pageId: string, key: string): void;
|
|
10
|
+
addFailed(pageId: string): void;
|
|
11
|
+
getFailed(): string[];
|
|
12
|
+
getDone(): IExportPagesZipPagesDone;
|
|
13
|
+
getInput(): IExportPagesZipPagesInput;
|
|
14
|
+
}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.ZipPagesDataManager = void 0;
|
|
7
|
+
class ZipPagesDataManager {
|
|
8
|
+
constructor(input) {
|
|
9
|
+
this.input = input;
|
|
10
|
+
this.queue = new Set(input.queue);
|
|
11
|
+
this.done = {
|
|
12
|
+
...input.done
|
|
13
|
+
};
|
|
14
|
+
this.failed = new Set(input.failed || []);
|
|
15
|
+
}
|
|
16
|
+
hasMore() {
|
|
17
|
+
return this.queue.size > 0;
|
|
18
|
+
}
|
|
19
|
+
addDone(pageId, key) {
|
|
20
|
+
this.queue.delete(pageId);
|
|
21
|
+
this.failed.delete(pageId);
|
|
22
|
+
this.done[pageId] = key;
|
|
23
|
+
}
|
|
24
|
+
addFailed(pageId) {
|
|
25
|
+
this.queue.delete(pageId);
|
|
26
|
+
this.failed.add(pageId);
|
|
27
|
+
delete this.done[pageId];
|
|
28
|
+
}
|
|
29
|
+
getFailed() {
|
|
30
|
+
return Array.from(this.failed);
|
|
31
|
+
}
|
|
32
|
+
getDone() {
|
|
33
|
+
return this.done;
|
|
34
|
+
}
|
|
35
|
+
getInput() {
|
|
36
|
+
return {
|
|
37
|
+
...this.input,
|
|
38
|
+
queue: Array.from(this.queue),
|
|
39
|
+
failed: Array.from(this.failed),
|
|
40
|
+
done: this.done
|
|
41
|
+
};
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
exports.ZipPagesDataManager = ZipPagesDataManager;
|
|
45
|
+
|
|
46
|
+
//# sourceMappingURL=ZipPagesDataManager.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["ZipPagesDataManager","constructor","input","queue","Set","done","failed","hasMore","size","addDone","pageId","key","delete","addFailed","add","getFailed","Array","from","getDone","getInput","exports"],"sources":["ZipPagesDataManager.ts"],"sourcesContent":["import type { IExportPagesZipPagesDone, IExportPagesZipPagesInput } from \"~/export/pages/types\";\n\nexport class ZipPagesDataManager {\n private readonly input: IExportPagesZipPagesInput;\n private readonly queue: Set<string>;\n private readonly done: IExportPagesZipPagesDone;\n private readonly failed: Set<string>;\n\n public constructor(input: IExportPagesZipPagesInput) {\n this.input = input;\n this.queue = new Set(input.queue);\n this.done = {\n ...input.done\n };\n this.failed = new Set(input.failed || []);\n }\n\n public hasMore(): boolean {\n return this.queue.size > 0;\n }\n\n public addDone(pageId: string, key: string): void {\n this.queue.delete(pageId);\n this.failed.delete(pageId);\n this.done[pageId] = key;\n }\n\n public addFailed(pageId: string): void {\n this.queue.delete(pageId);\n this.failed.add(pageId);\n delete this.done[pageId];\n }\n\n public getFailed() {\n return Array.from(this.failed);\n }\n\n public getDone() {\n return this.done;\n }\n\n public getInput(): IExportPagesZipPagesInput {\n return {\n ...this.input,\n queue: Array.from(this.queue),\n failed: Array.from(this.failed),\n done: this.done\n };\n }\n}\n"],"mappings":";;;;;;AAEO,MAAMA,mBAAmB,CAAC;EAMtBC,WAAWA,CAACC,KAAgC,EAAE;IACjD,IAAI,CAACA,KAAK,GAAGA,KAAK;IAClB,IAAI,CAACC,KAAK,GAAG,IAAIC,GAAG,CAACF,KAAK,CAACC,KAAK,CAAC;IACjC,IAAI,CAACE,IAAI,GAAG;MACR,GAAGH,KAAK,CAACG;IACb,CAAC;IACD,IAAI,CAACC,MAAM,GAAG,IAAIF,GAAG,CAACF,KAAK,CAACI,MAAM,IAAI,EAAE,CAAC;EAC7C;EAEOC,OAAOA,CAAA,EAAY;IACtB,OAAO,IAAI,CAACJ,KAAK,CAACK,IAAI,GAAG,CAAC;EAC9B;EAEOC,OAAOA,CAACC,MAAc,EAAEC,GAAW,EAAQ;IAC9C,IAAI,CAACR,KAAK,CAACS,MAAM,CAACF,MAAM,CAAC;IACzB,IAAI,CAACJ,MAAM,CAACM,MAAM,CAACF,MAAM,CAAC;IAC1B,IAAI,CAACL,IAAI,CAACK,MAAM,CAAC,GAAGC,GAAG;EAC3B;EAEOE,SAASA,CAACH,MAAc,EAAQ;IACnC,IAAI,CAACP,KAAK,CAACS,MAAM,CAACF,MAAM,CAAC;IACzB,IAAI,CAACJ,MAAM,CAACQ,GAAG,CAACJ,MAAM,CAAC;IACvB,OAAO,IAAI,CAACL,IAAI,CAACK,MAAM,CAAC;EAC5B;EAEOK,SAASA,CAAA,EAAG;IACf,OAAOC,KAAK,CAACC,IAAI,CAAC,IAAI,CAACX,MAAM,CAAC;EAClC;EAEOY,OAAOA,CAAA,EAAG;IACb,OAAO,IAAI,CAACb,IAAI;EACpB;EAEOc,QAAQA,CAAA,EAA8B;IACzC,OAAO;MACH,GAAG,IAAI,CAACjB,KAAK;MACbC,KAAK,EAAEa,KAAK,CAACC,IAAI,CAAC,IAAI,CAACd,KAAK,CAAC;MAC7BG,MAAM,EAAEU,KAAK,CAACC,IAAI,CAAC,IAAI,CAACX,MAAM,CAAC;MAC/BD,IAAI,EAAE,IAAI,CAACA;IACf,CAAC;EACL;AACJ;AAACe,OAAA,CAAApB,mBAAA,GAAAA,mBAAA","ignoreList":[]}
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import type { PbImportExportContext } from "../../../graphql/types";
|
|
2
|
+
import type { ITaskManagerStore } from "@webiny/tasks";
|
|
3
|
+
import type { IExportPagesZipPagesInput } from "../types";
|
|
4
|
+
export declare const getPageFactory: (context: PbImportExportContext, store: ITaskManagerStore<IExportPagesZipPagesInput>, published: boolean) => (pageId: string) => Promise<import("@webiny/api-page-builder/types").Page<Record<string, any> | null> | null>;
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.getPageFactory = void 0;
|
|
7
|
+
const getPageFactory = (context, store, published) => {
|
|
8
|
+
return async pageId => {
|
|
9
|
+
if (published) {
|
|
10
|
+
try {
|
|
11
|
+
return await context.pageBuilder.getPublishedPageById({
|
|
12
|
+
id: pageId
|
|
13
|
+
});
|
|
14
|
+
} catch (ex) {
|
|
15
|
+
/**
|
|
16
|
+
* We do not need to do anything on exception because we will fetch the latest version.
|
|
17
|
+
*/
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
try {
|
|
21
|
+
return await context.pageBuilder.getPage(pageId);
|
|
22
|
+
} catch (ex) {
|
|
23
|
+
const message = `There is no page with ID ${pageId}.`;
|
|
24
|
+
try {
|
|
25
|
+
await store.addErrorLog({
|
|
26
|
+
message,
|
|
27
|
+
error: ex
|
|
28
|
+
});
|
|
29
|
+
} catch {
|
|
30
|
+
console.error(`Failed to add error log: "${message}"`);
|
|
31
|
+
}
|
|
32
|
+
return null;
|
|
33
|
+
}
|
|
34
|
+
};
|
|
35
|
+
};
|
|
36
|
+
exports.getPageFactory = getPageFactory;
|
|
37
|
+
|
|
38
|
+
//# sourceMappingURL=getPageFactory.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["getPageFactory","context","store","published","pageId","pageBuilder","getPublishedPageById","id","ex","getPage","message","addErrorLog","error","console","exports"],"sources":["getPageFactory.ts"],"sourcesContent":["import type { PbImportExportContext } from \"~/graphql/types\";\nimport type { ITaskManagerStore } from \"@webiny/tasks\";\nimport type { IExportPagesZipPagesInput } from \"~/export/pages/types\";\n\nexport const getPageFactory = (\n context: PbImportExportContext,\n store: ITaskManagerStore<IExportPagesZipPagesInput>,\n published: boolean\n) => {\n return async (pageId: string) => {\n if (published) {\n try {\n return await context.pageBuilder.getPublishedPageById({\n id: pageId\n });\n } catch (ex) {\n /**\n * We do not need to do anything on exception because we will fetch the latest version.\n */\n }\n }\n try {\n return await context.pageBuilder.getPage(pageId);\n } catch (ex) {\n const message = `There is no page with ID ${pageId}.`;\n try {\n await store.addErrorLog({\n message,\n error: ex\n });\n } catch {\n console.error(`Failed to add error log: \"${message}\"`);\n }\n return null;\n }\n };\n};\n"],"mappings":";;;;;;AAIO,MAAMA,cAAc,GAAGA,CAC1BC,OAA8B,EAC9BC,KAAmD,EACnDC,SAAkB,KACjB;EACD,OAAO,MAAOC,MAAc,IAAK;IAC7B,IAAID,SAAS,EAAE;MACX,IAAI;QACA,OAAO,MAAMF,OAAO,CAACI,WAAW,CAACC,oBAAoB,CAAC;UAClDC,EAAE,EAAEH;QACR,CAAC,CAAC;MACN,CAAC,CAAC,OAAOI,EAAE,EAAE;QACT;AAChB;AACA;MAFgB;IAIR;IACA,IAAI;MACA,OAAO,MAAMP,OAAO,CAACI,WAAW,CAACI,OAAO,CAACL,MAAM,CAAC;IACpD,CAAC,CAAC,OAAOI,EAAE,EAAE;MACT,MAAME,OAAO,GAAG,4BAA4BN,MAAM,GAAG;MACrD,IAAI;QACA,MAAMF,KAAK,CAACS,WAAW,CAAC;UACpBD,OAAO;UACPE,KAAK,EAAEJ;QACX,CAAC,CAAC;MACN,CAAC,CAAC,MAAM;QACJK,OAAO,CAACD,KAAK,CAAC,6BAA6BF,OAAO,GAAG,CAAC;MAC1D;MACA,OAAO,IAAI;IACf;EACJ,CAAC;AACL,CAAC;AAACI,OAAA,CAAAd,cAAA,GAAAA,cAAA","ignoreList":[]}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { PbImportExportContext } from "../../types";
|
|
2
|
-
import { Configuration, Payload, Response } from "
|
|
1
|
+
import type { PbImportExportContext } from "../../types";
|
|
2
|
+
import type { Configuration, Payload, Response } from "./";
|
|
3
3
|
/**
|
|
4
4
|
* Handles the export blocks process workflow.
|
|
5
5
|
*/
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"names":["_types","require","_client","_handlerGraphql","_mockSecurity","_utils","_BlockExporter","blocksHandler","configuration","payload","context","log","console","subTask","noPendingTask","prevStatusOfSubTask","ImportExportTaskStatus","PENDING","pageBuilder","fileManager","taskId","subTaskIndex","type","identity","mockSecurity","importExportTask","getSubTask","zeroPad","status","data","error","id","input","blockId","exportBlocksDataKey","block","getPageBlock","NotFoundError","blockCategory","getBlockCategory","updateSubTask","PROCESSING","updateStats","prevStatus","nextStatus","blockExporter","BlockExporter","blockDataZip","execute","COMPLETED","message","key","Key","e","FAILED","name","code","invokeHandlerClient","handlers","combine","security","getIdentity","description","process","exports"],"sources":["blocksHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { invokeHandlerClient } from \"~/client\";\nimport { NotFoundError } from \"@webiny/handler-graphql\";\nimport { Payload as ExtractPayload } from \"../combine\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { SecurityIdentity } from \"@webiny/api-security/types\";\nimport { zeroPad } from \"@webiny/utils\";\nimport { Configuration, Payload, Response } from \"~/export/process\";\nimport { BlockExporter } from \"./exporters/BlockExporter\";\n\n/**\n * Handles the export blocks process workflow.\n */\nexport const blocksHandler = async (\n configuration: Configuration,\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n let subTask;\n let noPendingTask = true;\n let prevStatusOfSubTask = ImportExportTaskStatus.PENDING;\n\n log(\"RUNNING Export Blocks Process Handler\");\n const { pageBuilder, fileManager } = context;\n const { taskId, subTaskIndex, type, identity } = payload;\n // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks\n // and this Lambda is invoked internally, without credentials.\n mockSecurity(identity as SecurityIdentity, context);\n try {\n /*\n * Note: We're not going to DB for finding the next sub-task to process,\n * because the data might be out of sync due to GSI eventual consistency.\n */\n subTask = await pageBuilder.importExportTask.getSubTask(taskId, zeroPad(subTaskIndex, 5));\n /**\n * Base condition!!\n * Bail out early, if task not found or task's status is not \"pending\".\n */\n if (!subTask || subTask.status !== ImportExportTaskStatus.PENDING) {\n noPendingTask = true;\n return {\n data: \"\",\n error: null\n };\n } else {\n noPendingTask = false;\n }\n\n log(`Fetched sub task => ${subTask.id}`);\n\n const { input } = subTask;\n const { blockId, exportBlocksDataKey } = input;\n\n const block = await pageBuilder.getPageBlock(blockId);\n\n if (!block) {\n log(`Unable to load block \"${blockId}\"`);\n throw new NotFoundError(`Unable to load block \"${blockId}\"`);\n }\n\n log(`Processing block key \"${blockId}\"`);\n\n const blockCategory = await pageBuilder.getBlockCategory(block.blockCategory);\n\n if (!blockCategory) {\n log(`Unable to load block category \"${block.blockCategory}\"`);\n throw new NotFoundError(`Unable to load block category \"${block.blockCategory}\"`);\n }\n\n // Mark task status as PROCESSING\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.PROCESSING\n });\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.PROCESSING\n });\n prevStatusOfSubTask = subTask.status;\n\n log(`Extracting block data and uploading to storage...`);\n const blockExporter = new BlockExporter(fileManager);\n const blockDataZip = await blockExporter.execute(block, blockCategory, exportBlocksDataKey);\n\n log(`Finish uploading zip...`);\n // Update task record in DB\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading data for block \"${block.id}\"`,\n key: blockDataZip.Key\n }\n });\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.COMPLETED\n });\n prevStatusOfSubTask = subTask.status;\n } catch (e) {\n log(\"[EXPORT_BLOCKS_PROCESS] Error => \", e.message);\n\n if (subTask && subTask.id) {\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"EXPORT_FAILED\"\n }\n });\n\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.FAILED\n });\n prevStatusOfSubTask = subTask.status;\n }\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n } finally {\n // Base condition!\n if (noPendingTask) {\n log(`No pending sub-task for task ${taskId}`);\n // Combine individual block zip files.\n await invokeHandlerClient<ExtractPayload>({\n context,\n name: configuration.handlers.combine,\n payload: {\n taskId,\n type,\n identity: context.security.getIdentity()\n },\n description: \"Export blocks - combine\"\n });\n } else {\n console.log(`Invoking PROCESS for task \"${subTaskIndex + 1}\"`);\n // We want to continue with Self invocation no matter if current block error out.\n await invokeHandlerClient<Payload>({\n context,\n name: configuration.handlers.process,\n payload: {\n taskId,\n subTaskIndex: subTaskIndex + 1,\n type,\n identity: context.security.getIdentity()\n },\n description: \"Export blocks - process - subtask\"\n });\n }\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,OAAA,GAAAD,OAAA;AACA,IAAAE,eAAA,GAAAF,OAAA;AAEA,IAAAG,aAAA,GAAAH,OAAA;AAEA,IAAAI,MAAA,GAAAJ,OAAA;AAEA,IAAAK,cAAA,GAAAL,OAAA;AAEA;AACA;AACA;AACO,MAAMM,aAAa,GAAG,MAAAA,CACzBC,aAA4B,EAC5BC,OAAgB,EAChBC,OAA8B,KACV;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EACvB,IAAIE,OAAO;EACX,IAAIC,aAAa,GAAG,IAAI;EACxB,IAAIC,mBAAmB,GAAGC,6BAAsB,CAACC,OAAO;EAExDN,GAAG,CAAC,uCAAuC,CAAC;EAC5C,MAAM;IAAEO,WAAW;IAAEC;EAAY,CAAC,GAAGT,OAAO;EAC5C,MAAM;IAAEU,MAAM;IAAEC,YAAY;IAAEC,IAAI;IAAEC;EAAS,CAAC,GAAGd,OAAO;EACxD;EACA;EACA,IAAAe,0BAAY,EAACD,QAAQ,EAAsBb,OAAO,CAAC;EACnD,IAAI;IACA;AACR;AACA;AACA;IACQG,OAAO,GAAG,MAAMK,WAAW,CAACO,gBAAgB,CAACC,UAAU,CAACN,MAAM,EAAE,IAAAO,cAAO,EAACN,YAAY,EAAE,CAAC,CAAC,CAAC;IACzF;AACR;AACA;AACA;IACQ,IAAI,CAACR,OAAO,IAAIA,OAAO,CAACe,MAAM,KAAKZ,6BAAsB,CAACC,OAAO,EAAE;MAC/DH,aAAa,GAAG,IAAI;MACpB,OAAO;QACHe,IAAI,EAAE,EAAE;QACRC,KAAK,EAAE;MACX,CAAC;IACL,CAAC,MAAM;MACHhB,aAAa,GAAG,KAAK;IACzB;IAEAH,GAAG,CAAE,uBAAsBE,OAAO,CAACkB,EAAG,EAAC,CAAC;IAExC,MAAM;MAAEC;IAAM,CAAC,GAAGnB,OAAO;IACzB,MAAM;MAAEoB,OAAO;MAAEC;IAAoB,CAAC,GAAGF,KAAK;IAE9C,MAAMG,KAAK,GAAG,MAAMjB,WAAW,CAACkB,YAAY,CAACH,OAAO,CAAC;IAErD,IAAI,CAACE,KAAK,EAAE;MACRxB,GAAG,CAAE,yBAAwBsB,OAAQ,GAAE,CAAC;MACxC,MAAM,IAAII,6BAAa,CAAE,yBAAwBJ,OAAQ,GAAE,CAAC;IAChE;IAEAtB,GAAG,CAAE,yBAAwBsB,OAAQ,GAAE,CAAC;IAExC,MAAMK,aAAa,GAAG,MAAMpB,WAAW,CAACqB,gBAAgB,CAACJ,KAAK,CAACG,aAAa,CAAC;IAE7E,IAAI,CAACA,aAAa,EAAE;MAChB3B,GAAG,CAAE,kCAAiCwB,KAAK,CAACG,aAAc,GAAE,CAAC;MAC7D,MAAM,IAAID,6BAAa,CAAE,kCAAiCF,KAAK,CAACG,aAAc,GAAE,CAAC;IACrF;;IAEA;IACAzB,OAAO,GAAG,MAAMK,WAAW,CAACO,gBAAgB,CAACe,aAAa,CAACpB,MAAM,EAAEP,OAAO,CAACkB,EAAE,EAAE;MAC3EH,MAAM,EAAEZ,6BAAsB,CAACyB;IACnC,CAAC,CAAC;IACF;IACA,MAAMvB,WAAW,CAACO,gBAAgB,CAACiB,WAAW,CAACtB,MAAM,EAAE;MACnDuB,UAAU,EAAE5B,mBAAmB;MAC/B6B,UAAU,EAAE5B,6BAAsB,CAACyB;IACvC,CAAC,CAAC;IACF1B,mBAAmB,GAAGF,OAAO,CAACe,MAAM;IAEpCjB,GAAG,CAAE,mDAAkD,CAAC;IACxD,MAAMkC,aAAa,GAAG,IAAIC,4BAAa,CAAC3B,WAAW,CAAC;IACpD,MAAM4B,YAAY,GAAG,MAAMF,aAAa,CAACG,OAAO,CAACb,KAAK,EAAEG,aAAa,EAAEJ,mBAAmB,CAAC;IAE3FvB,GAAG,CAAE,yBAAwB,CAAC;IAC9B;IACAE,OAAO,GAAG,MAAMK,WAAW,CAACO,gBAAgB,CAACe,aAAa,CAACpB,MAAM,EAAEP,OAAO,CAACkB,EAAE,EAAE;MAC3EH,MAAM,EAAEZ,6BAAsB,CAACiC,SAAS;MACxCpB,IAAI,EAAE;QACFqB,OAAO,EAAG,oCAAmCf,KAAK,CAACJ,EAAG,GAAE;QACxDoB,GAAG,EAAEJ,YAAY,CAACK;MACtB;IACJ,CAAC,CAAC;IACF;IACA,MAAMlC,WAAW,CAACO,gBAAgB,CAACiB,WAAW,CAACtB,MAAM,EAAE;MACnDuB,UAAU,EAAE5B,mBAAmB;MAC/B6B,UAAU,EAAE5B,6BAAsB,CAACiC;IACvC,CAAC,CAAC;IACFlC,mBAAmB,GAAGF,OAAO,CAACe,MAAM;EACxC,CAAC,CAAC,OAAOyB,CAAC,EAAE;IACR1C,GAAG,CAAC,mCAAmC,EAAE0C,CAAC,CAACH,OAAO,CAAC;IAEnD,IAAIrC,OAAO,IAAIA,OAAO,CAACkB,EAAE,EAAE;MACvB;AACZ;AACA;AACA;MACYlB,OAAO,GAAG,MAAMK,WAAW,CAACO,gBAAgB,CAACe,aAAa,CAACpB,MAAM,EAAEP,OAAO,CAACkB,EAAE,EAAE;QAC3EH,MAAM,EAAEZ,6BAAsB,CAACsC,MAAM;QACrCxB,KAAK,EAAE;UACHyB,IAAI,EAAEF,CAAC,CAACE,IAAI;UACZL,OAAO,EAAEG,CAAC,CAACH,OAAO;UAClBM,IAAI,EAAE;QACV;MACJ,CAAC,CAAC;;MAEF;MACA,MAAMtC,WAAW,CAACO,gBAAgB,CAACiB,WAAW,CAACtB,MAAM,EAAE;QACnDuB,UAAU,EAAE5B,mBAAmB;QAC/B6B,UAAU,EAAE5B,6BAAsB,CAACsC;MACvC,CAAC,CAAC;MACFvC,mBAAmB,GAAGF,OAAO,CAACe,MAAM;IACxC;IAEA,OAAO;MACHC,IAAI,EAAE,IAAI;MACVC,KAAK,EAAE;QACHoB,OAAO,EAAEG,CAAC,CAACH;MACf;IACJ,CAAC;EACL,CAAC,SAAS;IACN;IACA,IAAIpC,aAAa,EAAE;MACfH,GAAG,CAAE,gCAA+BS,MAAO,EAAC,CAAC;MAC7C;MACA,MAAM,IAAAqC,2BAAmB,EAAiB;QACtC/C,OAAO;QACP6C,IAAI,EAAE/C,aAAa,CAACkD,QAAQ,CAACC,OAAO;QACpClD,OAAO,EAAE;UACLW,MAAM;UACNE,IAAI;UACJC,QAAQ,EAAEb,OAAO,CAACkD,QAAQ,CAACC,WAAW,CAAC;QAC3C,CAAC;QACDC,WAAW,EAAE;MACjB,CAAC,CAAC;IACN,CAAC,MAAM;MACHlD,OAAO,CAACD,GAAG,CAAE,8BAA6BU,YAAY,GAAG,CAAE,GAAE,CAAC;MAC9D;MACA,MAAM,IAAAoC,2BAAmB,EAAU;QAC/B/C,OAAO;QACP6C,IAAI,EAAE/C,aAAa,CAACkD,QAAQ,CAACK,OAAO;QACpCtD,OAAO,EAAE;UACLW,MAAM;UACNC,YAAY,EAAEA,YAAY,GAAG,CAAC;UAC9BC,IAAI;UACJC,QAAQ,EAAEb,OAAO,CAACkD,QAAQ,CAACC,WAAW,CAAC;QAC3C,CAAC;QACDC,WAAW,EAAE;MACjB,CAAC,CAAC;IACN;EACJ;EACA,OAAO;IACHjC,IAAI,EAAE,EAAE;IACRC,KAAK,EAAE;EACX,CAAC;AACL,CAAC;AAACkC,OAAA,CAAAzD,aAAA,GAAAA,aAAA"}
|
|
1
|
+
{"version":3,"names":["_types","require","_client","_handlerGraphql","_mockSecurity","_utils","_BlockExporter","blocksHandler","configuration","payload","context","log","console","subTask","noPendingTask","prevStatusOfSubTask","ImportExportTaskStatus","PENDING","pageBuilder","fileManager","taskId","subTaskIndex","type","identity","mockSecurity","importExportTask","getSubTask","zeroPad","status","data","error","id","input","blockId","exportBlocksDataKey","block","getPageBlock","NotFoundError","blockCategory","getBlockCategory","updateSubTask","PROCESSING","updateStats","prevStatus","nextStatus","blockExporter","BlockExporter","blockDataZip","execute","COMPLETED","message","key","Key","e","FAILED","name","code","invokeHandlerClient","handlers","combine","security","getIdentity","description","process","exports"],"sources":["blocksHandler.ts"],"sourcesContent":["import type { PbImportExportContext } from \"~/types\";\nimport { ImportExportTaskStatus } from \"~/types\";\nimport { invokeHandlerClient } from \"~/client\";\nimport { NotFoundError } from \"@webiny/handler-graphql\";\nimport type { Payload as ExtractPayload } from \"../combine\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport type { SecurityIdentity } from \"@webiny/api-security/types\";\nimport { zeroPad } from \"@webiny/utils\";\nimport type { Configuration, Payload, Response } from \"~/export/process\";\nimport { BlockExporter } from \"./exporters/BlockExporter\";\n\n/**\n * Handles the export blocks process workflow.\n */\nexport const blocksHandler = async (\n configuration: Configuration,\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n let subTask;\n let noPendingTask = true;\n let prevStatusOfSubTask = ImportExportTaskStatus.PENDING;\n\n log(\"RUNNING Export Blocks Process Handler\");\n const { pageBuilder, fileManager } = context;\n const { taskId, subTaskIndex, type, identity } = payload;\n // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks\n // and this Lambda is invoked internally, without credentials.\n mockSecurity(identity as SecurityIdentity, context);\n try {\n /*\n * Note: We're not going to DB for finding the next sub-task to process,\n * because the data might be out of sync due to GSI eventual consistency.\n */\n subTask = await pageBuilder.importExportTask.getSubTask(taskId, zeroPad(subTaskIndex, 5));\n /**\n * Base condition!!\n * Bail out early, if task not found or task's status is not \"pending\".\n */\n if (!subTask || subTask.status !== ImportExportTaskStatus.PENDING) {\n noPendingTask = true;\n return {\n data: \"\",\n error: null\n };\n } else {\n noPendingTask = false;\n }\n\n log(`Fetched sub task => ${subTask.id}`);\n\n const { input } = subTask;\n const { blockId, exportBlocksDataKey } = input;\n\n const block = await pageBuilder.getPageBlock(blockId);\n\n if (!block) {\n log(`Unable to load block \"${blockId}\"`);\n throw new NotFoundError(`Unable to load block \"${blockId}\"`);\n }\n\n log(`Processing block key \"${blockId}\"`);\n\n const blockCategory = await pageBuilder.getBlockCategory(block.blockCategory);\n\n if (!blockCategory) {\n log(`Unable to load block category \"${block.blockCategory}\"`);\n throw new NotFoundError(`Unable to load block category \"${block.blockCategory}\"`);\n }\n\n // Mark task status as PROCESSING\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.PROCESSING\n });\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.PROCESSING\n });\n prevStatusOfSubTask = subTask.status;\n\n log(`Extracting block data and uploading to storage...`);\n const blockExporter = new BlockExporter(fileManager);\n const blockDataZip = await blockExporter.execute(block, blockCategory, exportBlocksDataKey);\n\n log(`Finish uploading zip...`);\n // Update task record in DB\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading data for block \"${block.id}\"`,\n key: blockDataZip.Key\n }\n });\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.COMPLETED\n });\n prevStatusOfSubTask = subTask.status;\n } catch (e) {\n log(\"[EXPORT_BLOCKS_PROCESS] Error => \", e.message);\n\n if (subTask && subTask.id) {\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"EXPORT_FAILED\"\n }\n });\n\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.FAILED\n });\n prevStatusOfSubTask = subTask.status;\n }\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n } finally {\n // Base condition!\n if (noPendingTask) {\n log(`No pending sub-task for task ${taskId}`);\n // Combine individual block zip files.\n await invokeHandlerClient<ExtractPayload>({\n context,\n name: configuration.handlers.combine,\n payload: {\n taskId,\n type,\n identity: context.security.getIdentity()\n },\n description: \"Export blocks - combine\"\n });\n } else {\n console.log(`Invoking PROCESS for task \"${subTaskIndex + 1}\"`);\n // We want to continue with Self invocation no matter if current block error out.\n await invokeHandlerClient<Payload>({\n context,\n name: configuration.handlers.process,\n payload: {\n taskId,\n subTaskIndex: subTaskIndex + 1,\n type,\n identity: context.security.getIdentity()\n },\n description: \"Export blocks - process - subtask\"\n });\n }\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;AACA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,OAAA,GAAAD,OAAA;AACA,IAAAE,eAAA,GAAAF,OAAA;AAEA,IAAAG,aAAA,GAAAH,OAAA;AAEA,IAAAI,MAAA,GAAAJ,OAAA;AAEA,IAAAK,cAAA,GAAAL,OAAA;AAEA;AACA;AACA;AACO,MAAMM,aAAa,GAAG,MAAAA,CACzBC,aAA4B,EAC5BC,OAAgB,EAChBC,OAA8B,KACV;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EACvB,IAAIE,OAAO;EACX,IAAIC,aAAa,GAAG,IAAI;EACxB,IAAIC,mBAAmB,GAAGC,6BAAsB,CAACC,OAAO;EAExDN,GAAG,CAAC,uCAAuC,CAAC;EAC5C,MAAM;IAAEO,WAAW;IAAEC;EAAY,CAAC,GAAGT,OAAO;EAC5C,MAAM;IAAEU,MAAM;IAAEC,YAAY;IAAEC,IAAI;IAAEC;EAAS,CAAC,GAAGd,OAAO;EACxD;EACA;EACA,IAAAe,0BAAY,EAACD,QAAQ,EAAsBb,OAAO,CAAC;EACnD,IAAI;IACA;AACR;AACA;AACA;IACQG,OAAO,GAAG,MAAMK,WAAW,CAACO,gBAAgB,CAACC,UAAU,CAACN,MAAM,EAAE,IAAAO,cAAO,EAACN,YAAY,EAAE,CAAC,CAAC,CAAC;IACzF;AACR;AACA;AACA;IACQ,IAAI,CAACR,OAAO,IAAIA,OAAO,CAACe,MAAM,KAAKZ,6BAAsB,CAACC,OAAO,EAAE;MAC/DH,aAAa,GAAG,IAAI;MACpB,OAAO;QACHe,IAAI,EAAE,EAAE;QACRC,KAAK,EAAE;MACX,CAAC;IACL,CAAC,MAAM;MACHhB,aAAa,GAAG,KAAK;IACzB;IAEAH,GAAG,CAAC,uBAAuBE,OAAO,CAACkB,EAAE,EAAE,CAAC;IAExC,MAAM;MAAEC;IAAM,CAAC,GAAGnB,OAAO;IACzB,MAAM;MAAEoB,OAAO;MAAEC;IAAoB,CAAC,GAAGF,KAAK;IAE9C,MAAMG,KAAK,GAAG,MAAMjB,WAAW,CAACkB,YAAY,CAACH,OAAO,CAAC;IAErD,IAAI,CAACE,KAAK,EAAE;MACRxB,GAAG,CAAC,yBAAyBsB,OAAO,GAAG,CAAC;MACxC,MAAM,IAAII,6BAAa,CAAC,yBAAyBJ,OAAO,GAAG,CAAC;IAChE;IAEAtB,GAAG,CAAC,yBAAyBsB,OAAO,GAAG,CAAC;IAExC,MAAMK,aAAa,GAAG,MAAMpB,WAAW,CAACqB,gBAAgB,CAACJ,KAAK,CAACG,aAAa,CAAC;IAE7E,IAAI,CAACA,aAAa,EAAE;MAChB3B,GAAG,CAAC,kCAAkCwB,KAAK,CAACG,aAAa,GAAG,CAAC;MAC7D,MAAM,IAAID,6BAAa,CAAC,kCAAkCF,KAAK,CAACG,aAAa,GAAG,CAAC;IACrF;;IAEA;IACAzB,OAAO,GAAG,MAAMK,WAAW,CAACO,gBAAgB,CAACe,aAAa,CAACpB,MAAM,EAAEP,OAAO,CAACkB,EAAE,EAAE;MAC3EH,MAAM,EAAEZ,6BAAsB,CAACyB;IACnC,CAAC,CAAC;IACF;IACA,MAAMvB,WAAW,CAACO,gBAAgB,CAACiB,WAAW,CAACtB,MAAM,EAAE;MACnDuB,UAAU,EAAE5B,mBAAmB;MAC/B6B,UAAU,EAAE5B,6BAAsB,CAACyB;IACvC,CAAC,CAAC;IACF1B,mBAAmB,GAAGF,OAAO,CAACe,MAAM;IAEpCjB,GAAG,CAAC,mDAAmD,CAAC;IACxD,MAAMkC,aAAa,GAAG,IAAIC,4BAAa,CAAC3B,WAAW,CAAC;IACpD,MAAM4B,YAAY,GAAG,MAAMF,aAAa,CAACG,OAAO,CAACb,KAAK,EAAEG,aAAa,EAAEJ,mBAAmB,CAAC;IAE3FvB,GAAG,CAAC,yBAAyB,CAAC;IAC9B;IACAE,OAAO,GAAG,MAAMK,WAAW,CAACO,gBAAgB,CAACe,aAAa,CAACpB,MAAM,EAAEP,OAAO,CAACkB,EAAE,EAAE;MAC3EH,MAAM,EAAEZ,6BAAsB,CAACiC,SAAS;MACxCpB,IAAI,EAAE;QACFqB,OAAO,EAAE,oCAAoCf,KAAK,CAACJ,EAAE,GAAG;QACxDoB,GAAG,EAAEJ,YAAY,CAACK;MACtB;IACJ,CAAC,CAAC;IACF;IACA,MAAMlC,WAAW,CAACO,gBAAgB,CAACiB,WAAW,CAACtB,MAAM,EAAE;MACnDuB,UAAU,EAAE5B,mBAAmB;MAC/B6B,UAAU,EAAE5B,6BAAsB,CAACiC;IACvC,CAAC,CAAC;IACFlC,mBAAmB,GAAGF,OAAO,CAACe,MAAM;EACxC,CAAC,CAAC,OAAOyB,CAAC,EAAE;IACR1C,GAAG,CAAC,mCAAmC,EAAE0C,CAAC,CAACH,OAAO,CAAC;IAEnD,IAAIrC,OAAO,IAAIA,OAAO,CAACkB,EAAE,EAAE;MACvB;AACZ;AACA;AACA;MACYlB,OAAO,GAAG,MAAMK,WAAW,CAACO,gBAAgB,CAACe,aAAa,CAACpB,MAAM,EAAEP,OAAO,CAACkB,EAAE,EAAE;QAC3EH,MAAM,EAAEZ,6BAAsB,CAACsC,MAAM;QACrCxB,KAAK,EAAE;UACHyB,IAAI,EAAEF,CAAC,CAACE,IAAI;UACZL,OAAO,EAAEG,CAAC,CAACH,OAAO;UAClBM,IAAI,EAAE;QACV;MACJ,CAAC,CAAC;;MAEF;MACA,MAAMtC,WAAW,CAACO,gBAAgB,CAACiB,WAAW,CAACtB,MAAM,EAAE;QACnDuB,UAAU,EAAE5B,mBAAmB;QAC/B6B,UAAU,EAAE5B,6BAAsB,CAACsC;MACvC,CAAC,CAAC;MACFvC,mBAAmB,GAAGF,OAAO,CAACe,MAAM;IACxC;IAEA,OAAO;MACHC,IAAI,EAAE,IAAI;MACVC,KAAK,EAAE;QACHoB,OAAO,EAAEG,CAAC,CAACH;MACf;IACJ,CAAC;EACL,CAAC,SAAS;IACN;IACA,IAAIpC,aAAa,EAAE;MACfH,GAAG,CAAC,gCAAgCS,MAAM,EAAE,CAAC;MAC7C;MACA,MAAM,IAAAqC,2BAAmB,EAAiB;QACtC/C,OAAO;QACP6C,IAAI,EAAE/C,aAAa,CAACkD,QAAQ,CAACC,OAAO;QACpClD,OAAO,EAAE;UACLW,MAAM;UACNE,IAAI;UACJC,QAAQ,EAAEb,OAAO,CAACkD,QAAQ,CAACC,WAAW,CAAC;QAC3C,CAAC;QACDC,WAAW,EAAE;MACjB,CAAC,CAAC;IACN,CAAC,MAAM;MACHlD,OAAO,CAACD,GAAG,CAAC,8BAA8BU,YAAY,GAAG,CAAC,GAAG,CAAC;MAC9D;MACA,MAAM,IAAAoC,2BAAmB,EAAU;QAC/B/C,OAAO;QACP6C,IAAI,EAAE/C,aAAa,CAACkD,QAAQ,CAACK,OAAO;QACpCtD,OAAO,EAAE;UACLW,MAAM;UACNC,YAAY,EAAEA,YAAY,GAAG,CAAC;UAC9BC,IAAI;UACJC,QAAQ,EAAEb,OAAO,CAACkD,QAAQ,CAACC,WAAW,CAAC;QAC3C,CAAC;QACDC,WAAW,EAAE;MACjB,CAAC,CAAC;IACN;EACJ;EACA,OAAO;IACHjC,IAAI,EAAE,EAAE;IACRC,KAAK,EAAE;EACX,CAAC;AACL,CAAC;AAACkC,OAAA,CAAAzD,aAAA,GAAAA,aAAA","ignoreList":[]}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { BlockCategory, PageBlock } from "@webiny/api-page-builder/types";
|
|
2
|
-
import { File, FileManagerContext } from "@webiny/api-file-manager/types";
|
|
1
|
+
import type { BlockCategory, PageBlock } from "@webiny/api-page-builder/types";
|
|
2
|
+
import type { File, FileManagerContext } from "@webiny/api-file-manager/types";
|
|
3
3
|
export interface ExportedBlockData {
|
|
4
4
|
block: Pick<PageBlock, "name" | "content">;
|
|
5
5
|
category: BlockCategory;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"names":["_zipper","_interopRequireDefault","require","_utils","BlockExporter","constructor","fileManager","execute","block","blockCategory","exportBlocksDataKey","files","extractFilesFromData","content","fileIds","map","imageFile","id","imageFilesData","length","filesData","listFiles","where","id_in","push","blockData","name","category","slug","icon","description","blockDataBuffer","Buffer","from","JSON","stringify","zipper","Zipper","exportInfo","dataBuffer","archiveFileKey","process","exports"],"sources":["BlockExporter.ts"],"sourcesContent":["import { BlockCategory, PageBlock } from \"@webiny/api-page-builder/types\";\nimport { File, FileManagerContext } from \"@webiny/api-file-manager/types\";\nimport Zipper from \"~/export/zipper\";\nimport { extractFilesFromData } from \"~/export/utils\";\n\nexport interface ExportedBlockData {\n block: Pick<PageBlock, \"name\" | \"content\">;\n category: BlockCategory;\n files: File[];\n}\n\nexport class BlockExporter {\n private fileManager: FileManagerContext[\"fileManager\"];\n\n constructor(fileManager: FileManagerContext[\"fileManager\"]) {\n this.fileManager = fileManager;\n }\n\n async execute(block: PageBlock, blockCategory: BlockCategory, exportBlocksDataKey: string) {\n // Extract all files\n const files = extractFilesFromData(block.content || {});\n const fileIds = files.map(imageFile => imageFile.id);\n // Get file data for all images\n const imageFilesData = [];\n if (fileIds.length > 0) {\n const [filesData] = await this.fileManager.listFiles({ where: { id_in: fileIds } });\n imageFilesData.push(...filesData);\n }\n\n // Extract the block data in a json file and upload it to S3\n const blockData = {\n block: {\n name: block.name,\n content: block.content\n },\n category: {\n name: blockCategory.name,\n slug: blockCategory.slug,\n icon: blockCategory.icon,\n description: blockCategory.description\n },\n files: imageFilesData\n };\n const blockDataBuffer = Buffer.from(JSON.stringify(blockData));\n\n const zipper = new Zipper({\n exportInfo: {\n files: imageFilesData,\n name: block.name,\n dataBuffer: blockDataBuffer\n },\n archiveFileKey: exportBlocksDataKey\n });\n\n return zipper.process();\n }\n}\n"],"mappings":";;;;;;;AAEA,IAAAA,OAAA,GAAAC,sBAAA,CAAAC,OAAA;AACA,IAAAC,MAAA,GAAAD,OAAA;AAQO,MAAME,aAAa,CAAC;EAGvBC,WAAWA,CAACC,WAA8C,EAAE;IACxD,IAAI,CAACA,WAAW,GAAGA,WAAW;EAClC;EAEA,MAAMC,OAAOA,CAACC,KAAgB,EAAEC,aAA4B,EAAEC,mBAA2B,EAAE;IACvF;IACA,MAAMC,KAAK,GAAG,IAAAC,2BAAoB,EAACJ,KAAK,CAACK,OAAO,IAAI,CAAC,CAAC,CAAC;IACvD,MAAMC,OAAO,GAAGH,KAAK,CAACI,GAAG,CAACC,SAAS,IAAIA,SAAS,CAACC,EAAE,CAAC;IACpD;IACA,MAAMC,cAAc,GAAG,EAAE;IACzB,IAAIJ,OAAO,CAACK,MAAM,GAAG,CAAC,EAAE;MACpB,MAAM,CAACC,SAAS,CAAC,GAAG,MAAM,IAAI,CAACd,WAAW,CAACe,SAAS,CAAC;QAAEC,KAAK,EAAE;UAAEC,KAAK,EAAET;QAAQ;MAAE,CAAC,CAAC;MACnFI,cAAc,CAACM,IAAI,CAAC,GAAGJ,SAAS,CAAC;IACrC;;IAEA;IACA,MAAMK,SAAS,GAAG;MACdjB,KAAK,EAAE;QACHkB,IAAI,EAAElB,KAAK,CAACkB,IAAI;QAChBb,OAAO,EAAEL,KAAK,CAACK;MACnB,CAAC;MACDc,QAAQ,EAAE;QACND,IAAI,EAAEjB,aAAa,CAACiB,IAAI;QACxBE,IAAI,EAAEnB,aAAa,CAACmB,IAAI;QACxBC,IAAI,EAAEpB,aAAa,CAACoB,IAAI;QACxBC,WAAW,EAAErB,aAAa,CAACqB;MAC/B,CAAC;MACDnB,KAAK,EAAEO;IACX,CAAC;IACD,MAAMa,eAAe,GAAGC,MAAM,CAACC,IAAI,CAACC,IAAI,CAACC,SAAS,CAACV,SAAS,CAAC,CAAC;IAE9D,MAAMW,MAAM,GAAG,IAAIC,eAAM,CAAC;MACtBC,UAAU,EAAE;QACR3B,KAAK,EAAEO,cAAc;QACrBQ,IAAI,EAAElB,KAAK,CAACkB,IAAI;QAChBa,UAAU,EAAER;MAChB,CAAC;MACDS,cAAc,EAAE9B;IACpB,CAAC,CAAC;IAEF,OAAO0B,MAAM,CAACK,OAAO,CAAC,CAAC;EAC3B;AACJ;AAACC,OAAA,CAAAtC,aAAA,GAAAA,aAAA"}
|
|
1
|
+
{"version":3,"names":["_zipper","_interopRequireDefault","require","_utils","BlockExporter","constructor","fileManager","execute","block","blockCategory","exportBlocksDataKey","files","extractFilesFromData","content","fileIds","map","imageFile","id","imageFilesData","length","filesData","listFiles","where","id_in","push","blockData","name","category","slug","icon","description","blockDataBuffer","Buffer","from","JSON","stringify","zipper","Zipper","exportInfo","dataBuffer","archiveFileKey","process","exports"],"sources":["BlockExporter.ts"],"sourcesContent":["import type { BlockCategory, PageBlock } from \"@webiny/api-page-builder/types\";\nimport type { File, FileManagerContext } from \"@webiny/api-file-manager/types\";\nimport Zipper from \"~/export/zipper\";\nimport { extractFilesFromData } from \"~/export/utils\";\n\nexport interface ExportedBlockData {\n block: Pick<PageBlock, \"name\" | \"content\">;\n category: BlockCategory;\n files: File[];\n}\n\nexport class BlockExporter {\n private fileManager: FileManagerContext[\"fileManager\"];\n\n constructor(fileManager: FileManagerContext[\"fileManager\"]) {\n this.fileManager = fileManager;\n }\n\n async execute(block: PageBlock, blockCategory: BlockCategory, exportBlocksDataKey: string) {\n // Extract all files\n const files = extractFilesFromData(block.content || {});\n const fileIds = files.map(imageFile => imageFile.id);\n // Get file data for all images\n const imageFilesData = [];\n if (fileIds.length > 0) {\n const [filesData] = await this.fileManager.listFiles({ where: { id_in: fileIds } });\n imageFilesData.push(...filesData);\n }\n\n // Extract the block data in a json file and upload it to S3\n const blockData = {\n block: {\n name: block.name,\n content: block.content\n },\n category: {\n name: blockCategory.name,\n slug: blockCategory.slug,\n icon: blockCategory.icon,\n description: blockCategory.description\n },\n files: imageFilesData\n };\n const blockDataBuffer = Buffer.from(JSON.stringify(blockData));\n\n const zipper = new Zipper({\n exportInfo: {\n files: imageFilesData,\n name: block.name,\n dataBuffer: blockDataBuffer\n },\n archiveFileKey: exportBlocksDataKey\n });\n\n return zipper.process();\n }\n}\n"],"mappings":";;;;;;;AAEA,IAAAA,OAAA,GAAAC,sBAAA,CAAAC,OAAA;AACA,IAAAC,MAAA,GAAAD,OAAA;AAQO,MAAME,aAAa,CAAC;EAGvBC,WAAWA,CAACC,WAA8C,EAAE;IACxD,IAAI,CAACA,WAAW,GAAGA,WAAW;EAClC;EAEA,MAAMC,OAAOA,CAACC,KAAgB,EAAEC,aAA4B,EAAEC,mBAA2B,EAAE;IACvF;IACA,MAAMC,KAAK,GAAG,IAAAC,2BAAoB,EAACJ,KAAK,CAACK,OAAO,IAAI,CAAC,CAAC,CAAC;IACvD,MAAMC,OAAO,GAAGH,KAAK,CAACI,GAAG,CAACC,SAAS,IAAIA,SAAS,CAACC,EAAE,CAAC;IACpD;IACA,MAAMC,cAAc,GAAG,EAAE;IACzB,IAAIJ,OAAO,CAACK,MAAM,GAAG,CAAC,EAAE;MACpB,MAAM,CAACC,SAAS,CAAC,GAAG,MAAM,IAAI,CAACd,WAAW,CAACe,SAAS,CAAC;QAAEC,KAAK,EAAE;UAAEC,KAAK,EAAET;QAAQ;MAAE,CAAC,CAAC;MACnFI,cAAc,CAACM,IAAI,CAAC,GAAGJ,SAAS,CAAC;IACrC;;IAEA;IACA,MAAMK,SAAS,GAAG;MACdjB,KAAK,EAAE;QACHkB,IAAI,EAAElB,KAAK,CAACkB,IAAI;QAChBb,OAAO,EAAEL,KAAK,CAACK;MACnB,CAAC;MACDc,QAAQ,EAAE;QACND,IAAI,EAAEjB,aAAa,CAACiB,IAAI;QACxBE,IAAI,EAAEnB,aAAa,CAACmB,IAAI;QACxBC,IAAI,EAAEpB,aAAa,CAACoB,IAAI;QACxBC,WAAW,EAAErB,aAAa,CAACqB;MAC/B,CAAC;MACDnB,KAAK,EAAEO;IACX,CAAC;IACD,MAAMa,eAAe,GAAGC,MAAM,CAACC,IAAI,CAACC,IAAI,CAACC,SAAS,CAACV,SAAS,CAAC,CAAC;IAE9D,MAAMW,MAAM,GAAG,IAAIC,eAAM,CAAC;MACtBC,UAAU,EAAE;QACR3B,KAAK,EAAEO,cAAc;QACrBQ,IAAI,EAAElB,KAAK,CAACkB,IAAI;QAChBa,UAAU,EAAER;MAChB,CAAC;MACDS,cAAc,EAAE9B;IACpB,CAAC,CAAC;IAEF,OAAO0B,MAAM,CAACK,OAAO,CAAC,CAAC;EAC3B;AACJ;AAACC,OAAA,CAAAtC,aAAA,GAAAA,aAAA","ignoreList":[]}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { FbForm } from "@webiny/api-form-builder/types";
|
|
2
|
-
import { File } from "@webiny/api-file-manager/types";
|
|
1
|
+
import type { FbForm } from "@webiny/api-form-builder/types";
|
|
2
|
+
import type { File } from "@webiny/api-file-manager/types";
|
|
3
3
|
export interface ExportedFormData {
|
|
4
4
|
form: Pick<FbForm, "name" | "status" | "version" | "fields" | "steps" | "settings" | "triggers">;
|
|
5
5
|
files: File[];
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"names":["_zipper","_interopRequireDefault","require","FormExporter","execute","form","exportFormsDataKey","formData","name","status","version","fields","steps","settings","triggers","formDataBuffer","Buffer","from","JSON","stringify","zipper","Zipper","exportInfo","files","dataBuffer","archiveFileKey","process","exports"],"sources":["FormExporter.ts"],"sourcesContent":["import Zipper from \"~/export/zipper\";\nimport { FbForm } from \"@webiny/api-form-builder/types\";\nimport { File } from \"@webiny/api-file-manager/types\";\n\nexport interface ExportedFormData {\n form: Pick<\n FbForm,\n \"name\" | \"status\" | \"version\" | \"fields\" | \"steps\" | \"settings\" | \"triggers\"\n >;\n files: File[];\n}\n\nexport class FormExporter {\n async execute(form: FbForm, exportFormsDataKey: string) {\n const formData = {\n form: {\n name: form.name,\n status: form.status,\n version: form.version,\n fields: form.fields,\n steps: form.steps,\n settings: form.settings,\n triggers: form.triggers\n }\n };\n const formDataBuffer = Buffer.from(JSON.stringify(formData));\n\n const zipper = new Zipper({\n exportInfo: {\n files: [],\n name: form.name,\n dataBuffer: formDataBuffer\n },\n archiveFileKey: exportFormsDataKey\n });\n\n return zipper.process();\n }\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,OAAA,GAAAC,sBAAA,CAAAC,OAAA;AAYO,MAAMC,YAAY,CAAC;EACtB,MAAMC,OAAOA,CAACC,IAAY,EAAEC,kBAA0B,EAAE;IACpD,MAAMC,QAAQ,GAAG;MACbF,IAAI,EAAE;QACFG,IAAI,EAAEH,IAAI,CAACG,IAAI;QACfC,MAAM,EAAEJ,IAAI,CAACI,MAAM;QACnBC,OAAO,EAAEL,IAAI,CAACK,OAAO;QACrBC,MAAM,EAAEN,IAAI,CAACM,MAAM;QACnBC,KAAK,EAAEP,IAAI,CAACO,KAAK;QACjBC,QAAQ,EAAER,IAAI,CAACQ,QAAQ;QACvBC,QAAQ,EAAET,IAAI,CAACS;MACnB;IACJ,CAAC;IACD,MAAMC,cAAc,GAAGC,MAAM,CAACC,IAAI,CAACC,IAAI,CAACC,SAAS,CAACZ,QAAQ,CAAC,CAAC;IAE5D,MAAMa,MAAM,GAAG,IAAIC,eAAM,CAAC;MACtBC,UAAU,EAAE;QACRC,KAAK,EAAE,EAAE;QACTf,IAAI,EAAEH,IAAI,CAACG,IAAI;QACfgB,UAAU,EAAET;MAChB,CAAC;MACDU,cAAc,EAAEnB;IACpB,CAAC,CAAC;IAEF,OAAOc,MAAM,CAACM,OAAO,CAAC,CAAC;EAC3B;AACJ;AAACC,OAAA,CAAAxB,YAAA,GAAAA,YAAA"}
|
|
1
|
+
{"version":3,"names":["_zipper","_interopRequireDefault","require","FormExporter","execute","form","exportFormsDataKey","formData","name","status","version","fields","steps","settings","triggers","formDataBuffer","Buffer","from","JSON","stringify","zipper","Zipper","exportInfo","files","dataBuffer","archiveFileKey","process","exports"],"sources":["FormExporter.ts"],"sourcesContent":["import Zipper from \"~/export/zipper\";\nimport type { FbForm } from \"@webiny/api-form-builder/types\";\nimport type { File } from \"@webiny/api-file-manager/types\";\n\nexport interface ExportedFormData {\n form: Pick<\n FbForm,\n \"name\" | \"status\" | \"version\" | \"fields\" | \"steps\" | \"settings\" | \"triggers\"\n >;\n files: File[];\n}\n\nexport class FormExporter {\n async execute(form: FbForm, exportFormsDataKey: string) {\n const formData = {\n form: {\n name: form.name,\n status: form.status,\n version: form.version,\n fields: form.fields,\n steps: form.steps,\n settings: form.settings,\n triggers: form.triggers\n }\n };\n const formDataBuffer = Buffer.from(JSON.stringify(formData));\n\n const zipper = new Zipper({\n exportInfo: {\n files: [],\n name: form.name,\n dataBuffer: formDataBuffer\n },\n archiveFileKey: exportFormsDataKey\n });\n\n return zipper.process();\n }\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,OAAA,GAAAC,sBAAA,CAAAC,OAAA;AAYO,MAAMC,YAAY,CAAC;EACtB,MAAMC,OAAOA,CAACC,IAAY,EAAEC,kBAA0B,EAAE;IACpD,MAAMC,QAAQ,GAAG;MACbF,IAAI,EAAE;QACFG,IAAI,EAAEH,IAAI,CAACG,IAAI;QACfC,MAAM,EAAEJ,IAAI,CAACI,MAAM;QACnBC,OAAO,EAAEL,IAAI,CAACK,OAAO;QACrBC,MAAM,EAAEN,IAAI,CAACM,MAAM;QACnBC,KAAK,EAAEP,IAAI,CAACO,KAAK;QACjBC,QAAQ,EAAER,IAAI,CAACQ,QAAQ;QACvBC,QAAQ,EAAET,IAAI,CAACS;MACnB;IACJ,CAAC;IACD,MAAMC,cAAc,GAAGC,MAAM,CAACC,IAAI,CAACC,IAAI,CAACC,SAAS,CAACZ,QAAQ,CAAC,CAAC;IAE5D,MAAMa,MAAM,GAAG,IAAIC,eAAM,CAAC;MACtBC,UAAU,EAAE;QACRC,KAAK,EAAE,EAAE;QACTf,IAAI,EAAEH,IAAI,CAACG,IAAI;QACfgB,UAAU,EAAET;MAChB,CAAC;MACDU,cAAc,EAAEnB;IACpB,CAAC,CAAC;IAEF,OAAOc,MAAM,CAACM,OAAO,CAAC,CAAC;EAC3B;AACJ;AAACC,OAAA,CAAAxB,YAAA,GAAAA,YAAA","ignoreList":[]}
|
|
@@ -1,7 +1,14 @@
|
|
|
1
|
-
import { Page } from "@webiny/api-page-builder/types";
|
|
2
|
-
import { File, FileManagerContext } from "@webiny/api-file-manager/types";
|
|
1
|
+
import type { Page } from "@webiny/api-page-builder/types";
|
|
2
|
+
import type { File, FileManagerContext } from "@webiny/api-file-manager/types";
|
|
3
3
|
export interface ExportedPageData {
|
|
4
|
-
|
|
4
|
+
/**
|
|
5
|
+
* TODO: is it really ok not to export whole page object?
|
|
6
|
+
* What if we add some new properties in the future?
|
|
7
|
+
*
|
|
8
|
+
* Type of the exported page data is now created by removing unnecessary properties from the Page type.
|
|
9
|
+
* This way TS will break if we add new property and forget to handle it in the export/import process.
|
|
10
|
+
*/
|
|
11
|
+
page: Omit<Page, "id" | "pid" | "tenant" | "locale" | "editor" | "category" | "createdFrom" | "createdBy" | "createdOn" | "savedOn" | "publishedOn" | "locked" | "ownedBy" | "webinyVersion">;
|
|
5
12
|
files: File[];
|
|
6
13
|
}
|
|
7
14
|
export declare class PageExporter {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"names":["_get","_interopRequireDefault","require","_zipper","_utils","PageExporter","constructor","fileManager","execute","page","exportPagesDataKey","files","extractFilesFromData","content","pageSettingsImages","get","filter","image","src","fileIds","map","imageFile","id","imageFilesData","length","filesData","listFiles","where","id_in","push","pageData","title","path","version","status","settings","pageDataBuffer","Buffer","from","JSON","stringify","zipper","Zipper","exportInfo","name","dataBuffer","archiveFileKey","process","exports"],"sources":["PageExporter.ts"],"sourcesContent":["import get from \"lodash/get\";\nimport { Page } from \"@webiny/api-page-builder/types\";\nimport { File, FileManagerContext } from \"@webiny/api-file-manager/types\";\nimport Zipper from \"~/export/zipper\";\nimport { extractFilesFromData } from \"~/export/utils\";\n\nexport interface ExportedPageData {\n page:
|
|
1
|
+
{"version":3,"names":["_get","_interopRequireDefault","require","_zipper","_utils","PageExporter","constructor","fileManager","execute","page","exportPagesDataKey","files","extractFilesFromData","content","pageSettingsImages","get","filter","image","src","fileIds","map","imageFile","id","imageFilesData","length","filesData","listFiles","where","id_in","limit","push","pageData","title","path","version","status","settings","pageDataBuffer","Buffer","from","JSON","stringify","zipper","Zipper","exportInfo","name","dataBuffer","archiveFileKey","process","exports"],"sources":["PageExporter.ts"],"sourcesContent":["import get from \"lodash/get\";\nimport type { Page } from \"@webiny/api-page-builder/types\";\nimport type { File, FileManagerContext } from \"@webiny/api-file-manager/types\";\nimport Zipper from \"~/export/zipper\";\nimport { extractFilesFromData } from \"~/export/utils\";\n\nexport interface ExportedPageData {\n /**\n * TODO: is it really ok not to export whole page object?\n * What if we add some new properties in the future?\n *\n * Type of the exported page data is now created by removing unnecessary properties from the Page type.\n * This way TS will break if we add new property and forget to handle it in the export/import process.\n */\n page: Omit<\n Page,\n | \"id\"\n | \"pid\"\n | \"tenant\"\n | \"locale\"\n | \"editor\"\n | \"category\"\n | \"createdFrom\"\n | \"createdBy\"\n | \"createdOn\"\n | \"savedOn\"\n | \"publishedOn\"\n | \"locked\"\n | \"ownedBy\"\n | \"webinyVersion\"\n >;\n files: File[];\n}\n\nexport class PageExporter {\n private fileManager: FileManagerContext[\"fileManager\"];\n\n constructor(fileManager: FileManagerContext[\"fileManager\"]) {\n this.fileManager = fileManager;\n }\n\n async execute(page: Page, exportPagesDataKey: string) {\n // Extract all files\n const files = extractFilesFromData(page.content || {});\n // Extract images from page settings\n const pageSettingsImages = [\n get(page, \"settings.general.image\") as unknown as File,\n get(page, \"settings.social.image\") as unknown as File\n ].filter(image => image && image.src);\n\n const fileIds = [...files, ...pageSettingsImages].map(imageFile => imageFile.id);\n // Get file data for all images\n const imageFilesData: File[] = [];\n if (fileIds.length > 0) {\n const [filesData] = await this.fileManager.listFiles({\n where: {\n id_in: fileIds\n },\n limit: 10000\n });\n imageFilesData.push(...filesData);\n }\n\n // Extract the page data in a json file and upload it to S3\n const pageData: ExportedPageData = {\n page: {\n content: page.content,\n title: page.title,\n path: page.path,\n version: page.version,\n status: page.status,\n settings: page.settings\n },\n files: imageFilesData\n };\n const pageDataBuffer = Buffer.from(JSON.stringify(pageData));\n\n const zipper = new Zipper({\n exportInfo: {\n files: imageFilesData,\n name: page.title,\n dataBuffer: pageDataBuffer\n },\n archiveFileKey: exportPagesDataKey\n });\n\n return zipper.process();\n }\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,IAAA,GAAAC,sBAAA,CAAAC,OAAA;AAGA,IAAAC,OAAA,GAAAF,sBAAA,CAAAC,OAAA;AACA,IAAAE,MAAA,GAAAF,OAAA;AA8BO,MAAMG,YAAY,CAAC;EAGtBC,WAAWA,CAACC,WAA8C,EAAE;IACxD,IAAI,CAACA,WAAW,GAAGA,WAAW;EAClC;EAEA,MAAMC,OAAOA,CAACC,IAAU,EAAEC,kBAA0B,EAAE;IAClD;IACA,MAAMC,KAAK,GAAG,IAAAC,2BAAoB,EAACH,IAAI,CAACI,OAAO,IAAI,CAAC,CAAC,CAAC;IACtD;IACA,MAAMC,kBAAkB,GAAG,CACvB,IAAAC,YAAG,EAACN,IAAI,EAAE,wBAAwB,CAAC,EACnC,IAAAM,YAAG,EAACN,IAAI,EAAE,uBAAuB,CAAC,CACrC,CAACO,MAAM,CAACC,KAAK,IAAIA,KAAK,IAAIA,KAAK,CAACC,GAAG,CAAC;IAErC,MAAMC,OAAO,GAAG,CAAC,GAAGR,KAAK,EAAE,GAAGG,kBAAkB,CAAC,CAACM,GAAG,CAACC,SAAS,IAAIA,SAAS,CAACC,EAAE,CAAC;IAChF;IACA,MAAMC,cAAsB,GAAG,EAAE;IACjC,IAAIJ,OAAO,CAACK,MAAM,GAAG,CAAC,EAAE;MACpB,MAAM,CAACC,SAAS,CAAC,GAAG,MAAM,IAAI,CAAClB,WAAW,CAACmB,SAAS,CAAC;QACjDC,KAAK,EAAE;UACHC,KAAK,EAAET;QACX,CAAC;QACDU,KAAK,EAAE;MACX,CAAC,CAAC;MACFN,cAAc,CAACO,IAAI,CAAC,GAAGL,SAAS,CAAC;IACrC;;IAEA;IACA,MAAMM,QAA0B,GAAG;MAC/BtB,IAAI,EAAE;QACFI,OAAO,EAAEJ,IAAI,CAACI,OAAO;QACrBmB,KAAK,EAAEvB,IAAI,CAACuB,KAAK;QACjBC,IAAI,EAAExB,IAAI,CAACwB,IAAI;QACfC,OAAO,EAAEzB,IAAI,CAACyB,OAAO;QACrBC,MAAM,EAAE1B,IAAI,CAAC0B,MAAM;QACnBC,QAAQ,EAAE3B,IAAI,CAAC2B;MACnB,CAAC;MACDzB,KAAK,EAAEY;IACX,CAAC;IACD,MAAMc,cAAc,GAAGC,MAAM,CAACC,IAAI,CAACC,IAAI,CAACC,SAAS,CAACV,QAAQ,CAAC,CAAC;IAE5D,MAAMW,MAAM,GAAG,IAAIC,eAAM,CAAC;MACtBC,UAAU,EAAE;QACRjC,KAAK,EAAEY,cAAc;QACrBsB,IAAI,EAAEpC,IAAI,CAACuB,KAAK;QAChBc,UAAU,EAAET;MAChB,CAAC;MACDU,cAAc,EAAErC;IACpB,CAAC,CAAC;IAEF,OAAOgC,MAAM,CAACM,OAAO,CAAC,CAAC;EAC3B;AACJ;AAACC,OAAA,CAAA5C,YAAA,GAAAA,YAAA","ignoreList":[]}
|