@webiny/api-page-builder-import-export 0.0.0-unstable.3386f66516 → 0.0.0-unstable.3bc8100a7f
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/client.d.ts +1 -1
- package/client.js +12 -25
- package/client.js.map +1 -1
- package/export/combine/blocksHandler.d.ts +2 -2
- package/export/combine/blocksHandler.js +14 -19
- package/export/combine/blocksHandler.js.map +1 -1
- package/export/combine/formsHandler.d.ts +6 -0
- package/export/combine/{pagesHandler.js → formsHandler.js} +25 -30
- package/export/combine/formsHandler.js.map +1 -0
- package/export/combine/index.d.ts +2 -2
- package/export/combine/index.js +24 -23
- package/export/combine/index.js.map +1 -1
- package/export/combine/templatesHandler.d.ts +2 -2
- package/export/combine/templatesHandler.js +14 -19
- package/export/combine/templatesHandler.js.map +1 -1
- package/export/pages/ExportPagesCleanup.d.ts +5 -0
- package/export/pages/ExportPagesCleanup.js +82 -0
- package/export/pages/ExportPagesCleanup.js.map +1 -0
- package/export/pages/ExportPagesController.d.ts +5 -0
- package/export/pages/ExportPagesController.js +31 -0
- package/export/pages/ExportPagesController.js.map +1 -0
- package/export/pages/ExportPagesZipPages.d.ts +5 -0
- package/export/pages/ExportPagesZipPages.js +23 -0
- package/export/pages/ExportPagesZipPages.js.map +1 -0
- package/export/pages/controller/CombineZippedPages.d.ts +5 -0
- package/export/pages/controller/CombineZippedPages.js +75 -0
- package/export/pages/controller/CombineZippedPages.js.map +1 -0
- package/export/pages/controller/CreateZipPagesTasks.d.ts +8 -0
- package/export/pages/controller/CreateZipPagesTasks.js +103 -0
- package/export/pages/controller/CreateZipPagesTasks.js.map +1 -0
- package/export/pages/controller/ProcessZipPagesTasks.d.ts +6 -0
- package/export/pages/controller/ProcessZipPagesTasks.js +61 -0
- package/export/pages/controller/ProcessZipPagesTasks.js.map +1 -0
- package/export/pages/types.d.ts +53 -0
- package/export/pages/types.js +26 -0
- package/export/pages/types.js.map +1 -0
- package/export/pages/utils.d.ts +2 -0
- package/export/pages/utils.js +13 -0
- package/export/pages/utils.js.map +1 -0
- package/export/pages/zipPages/ZipPages.d.ts +5 -0
- package/export/pages/zipPages/ZipPages.js +100 -0
- package/export/pages/zipPages/ZipPages.js.map +1 -0
- package/export/pages/zipPages/ZipPagesDataManager.d.ts +14 -0
- package/export/pages/zipPages/ZipPagesDataManager.js +46 -0
- package/export/pages/zipPages/ZipPagesDataManager.js.map +1 -0
- package/export/pages/zipPages/getPageFactory.d.ts +4 -0
- package/export/pages/zipPages/getPageFactory.js +38 -0
- package/export/pages/zipPages/getPageFactory.js.map +1 -0
- package/export/process/blocksHandler.d.ts +2 -2
- package/export/process/blocksHandler.js +29 -36
- package/export/process/blocksHandler.js.map +1 -1
- package/export/process/exporters/BlockExporter.d.ts +12 -0
- package/export/process/exporters/BlockExporter.js +57 -0
- package/export/process/exporters/BlockExporter.js.map +1 -0
- package/export/process/exporters/FormExporter.d.ts +9 -0
- package/export/process/exporters/FormExporter.js +36 -0
- package/export/process/exporters/FormExporter.js.map +1 -0
- package/export/process/exporters/PageExporter.d.ts +18 -0
- package/export/process/exporters/PageExporter.js +59 -0
- package/export/process/exporters/PageExporter.js.map +1 -0
- package/export/process/exporters/PageTemplateExporter.d.ts +11 -0
- package/export/process/exporters/PageTemplateExporter.js +58 -0
- package/export/process/exporters/PageTemplateExporter.js.map +1 -0
- package/export/process/formsHandler.d.ts +6 -0
- package/export/process/{pagesHandler.js → formsHandler.js} +48 -63
- package/export/process/formsHandler.js.map +1 -0
- package/export/process/index.d.ts +2 -2
- package/export/process/index.js +24 -23
- package/export/process/index.js.map +1 -1
- package/export/process/templatesHandler.d.ts +2 -2
- package/export/process/templatesHandler.js +29 -37
- package/export/process/templatesHandler.js.map +1 -1
- package/export/s3Stream.d.ts +16 -11
- package/export/s3Stream.js +27 -42
- package/export/s3Stream.js.map +1 -1
- package/export/utils.d.ts +11 -9
- package/export/utils.js +54 -58
- package/export/utils.js.map +1 -1
- package/export/zipper.d.ts +7 -6
- package/export/zipper.js +46 -62
- package/export/zipper.js.map +1 -1
- package/graphql/crud/blocks.crud.d.ts +1 -1
- package/graphql/crud/blocks.crud.js +65 -62
- package/graphql/crud/blocks.crud.js.map +1 -1
- package/graphql/crud/forms.crud.d.ts +4 -0
- package/graphql/crud/forms.crud.js +162 -0
- package/graphql/crud/forms.crud.js.map +1 -0
- package/graphql/crud/importExportTasks.crud.d.ts +2 -2
- package/graphql/crud/importExportTasks.crud.js +150 -165
- package/graphql/crud/importExportTasks.crud.js.map +1 -1
- package/graphql/crud/pages.crud.d.ts +1 -1
- package/graphql/crud/pages.crud.js +241 -142
- package/graphql/crud/pages.crud.js.map +1 -1
- package/graphql/crud/templates.crud.d.ts +1 -1
- package/graphql/crud/templates.crud.js +63 -47
- package/graphql/crud/templates.crud.js.map +1 -1
- package/graphql/crud.d.ts +2 -2
- package/graphql/crud.js +4 -8
- package/graphql/crud.js.map +1 -1
- package/graphql/graphql/blocks.gql.d.ts +2 -2
- package/graphql/graphql/blocks.gql.js +8 -13
- package/graphql/graphql/blocks.gql.js.map +1 -1
- package/graphql/graphql/forms.gql.d.ts +4 -0
- package/graphql/graphql/forms.gql.js +60 -0
- package/graphql/graphql/forms.gql.js.map +1 -0
- package/graphql/graphql/importExportTasks.gql.d.ts +2 -2
- package/graphql/graphql/importExportTasks.gql.js +8 -13
- package/graphql/graphql/importExportTasks.gql.js.map +1 -1
- package/graphql/graphql/pages.gql.d.ts +2 -2
- package/graphql/graphql/pages.gql.js +104 -20
- package/graphql/graphql/pages.gql.js.map +1 -1
- package/graphql/graphql/templates.gql.d.ts +2 -2
- package/graphql/graphql/templates.gql.js +7 -12
- package/graphql/graphql/templates.gql.js.map +1 -1
- package/graphql/graphql/utils/resolve.d.ts +3 -3
- package/graphql/graphql/utils/resolve.js +15 -7
- package/graphql/graphql/utils/resolve.js.map +1 -1
- package/graphql/graphql.d.ts +1 -1
- package/graphql/graphql.js +3 -7
- package/graphql/graphql.js.map +1 -1
- package/graphql/index.d.ts +2 -2
- package/graphql/index.js +4 -6
- package/graphql/index.js.map +1 -1
- package/graphql/types.d.ts +194 -31
- package/graphql/types.js +3 -1
- package/graphql/types.js.map +1 -1
- package/import/constants.d.ts +3 -0
- package/import/constants.js +13 -0
- package/import/constants.js.map +1 -0
- package/import/create/blocksHandler.d.ts +2 -2
- package/import/create/blocksHandler.js +13 -22
- package/import/create/blocksHandler.js.map +1 -1
- package/import/create/formsHandler.d.ts +3 -0
- package/import/create/formsHandler.js +105 -0
- package/import/create/formsHandler.js.map +1 -0
- package/import/create/index.d.ts +2 -2
- package/import/create/index.js +24 -23
- package/import/create/index.js.map +1 -1
- package/import/create/pagesHandler.d.ts +2 -2
- package/import/create/pagesHandler.js +12 -19
- package/import/create/pagesHandler.js.map +1 -1
- package/import/create/templatesHandler.d.ts +2 -2
- package/import/create/templatesHandler.js +12 -19
- package/import/create/templatesHandler.js.map +1 -1
- package/import/pages/ImportPagesController.d.ts +5 -0
- package/import/pages/ImportPagesController.js +29 -0
- package/import/pages/ImportPagesController.js.map +1 -0
- package/import/pages/ImportPagesProcessPages.d.ts +6 -0
- package/import/pages/ImportPagesProcessPages.js +112 -0
- package/import/pages/ImportPagesProcessPages.js.map +1 -0
- package/import/pages/controller/ImportPagesProcessPagesChecker.d.ts +6 -0
- package/import/pages/controller/ImportPagesProcessPagesChecker.js +40 -0
- package/import/pages/controller/ImportPagesProcessPagesChecker.js.map +1 -0
- package/import/pages/controller/ImportPagesProcessZipFile.d.ts +5 -0
- package/import/pages/controller/ImportPagesProcessZipFile.js +71 -0
- package/import/pages/controller/ImportPagesProcessZipFile.js.map +1 -0
- package/import/pages/process/importPage.d.ts +10 -0
- package/import/pages/process/importPage.js +94 -0
- package/import/pages/process/importPage.js.map +1 -0
- package/import/pages/types.d.ts +48 -0
- package/import/pages/types.js +20 -0
- package/import/pages/types.js.map +1 -0
- package/import/process/blocks/ElementIdsProcessor.d.ts +5 -0
- package/import/process/blocks/ElementIdsProcessor.js +26 -0
- package/import/process/blocks/ElementIdsProcessor.js.map +1 -0
- package/import/process/{blocksHandler.d.ts → blocks/blocksHandler.d.ts} +2 -2
- package/import/process/{blocksHandler.js → blocks/blocksHandler.js} +32 -38
- package/import/process/blocks/blocksHandler.js.map +1 -0
- package/import/process/blocks/importBlock.d.ts +11 -0
- package/import/process/blocks/importBlock.js +101 -0
- package/import/process/blocks/importBlock.js.map +1 -0
- package/import/process/forms/formsHandler.d.ts +3 -0
- package/import/process/{pagesHandler.js → forms/formsHandler.js} +54 -57
- package/import/process/forms/formsHandler.js.map +1 -0
- package/import/process/forms/importForm.d.ts +9 -0
- package/import/process/forms/importForm.js +47 -0
- package/import/process/forms/importForm.js.map +1 -0
- package/import/process/index.d.ts +2 -2
- package/import/process/index.js +26 -25
- package/import/process/index.js.map +1 -1
- package/import/process/templates/importTemplate.d.ts +11 -0
- package/import/process/templates/importTemplate.js +70 -0
- package/import/process/templates/importTemplate.js.map +1 -0
- package/import/process/{templatesHandler.d.ts → templates/templatesHandler.d.ts} +2 -2
- package/import/process/{templatesHandler.js → templates/templatesHandler.js} +36 -34
- package/import/process/templates/templatesHandler.js.map +1 -0
- package/import/utils/deleteS3Folder.d.ts +1 -0
- package/import/utils/deleteS3Folder.js +21 -0
- package/import/utils/deleteS3Folder.js.map +1 -0
- package/import/utils/extractAndUploadZipFileContents.d.ts +7 -0
- package/import/utils/extractAndUploadZipFileContents.js +126 -0
- package/import/utils/extractAndUploadZipFileContents.js.map +1 -0
- package/import/utils/extractZipAndUploadToS3.d.ts +2 -0
- package/import/utils/extractZipAndUploadToS3.js +100 -0
- package/import/utils/extractZipAndUploadToS3.js.map +1 -0
- package/import/utils/getFileNameWithoutExt.d.ts +1 -0
- package/import/utils/getFileNameWithoutExt.js +13 -0
- package/import/utils/getFileNameWithoutExt.js.map +1 -0
- package/import/utils/index.d.ts +9 -0
- package/import/utils/index.js +106 -0
- package/import/utils/index.js.map +1 -0
- package/import/utils/initialStats.d.ts +7 -0
- package/import/utils/initialStats.js +18 -0
- package/import/utils/initialStats.js.map +1 -0
- package/import/utils/prepareDataDirMap.d.ts +6 -0
- package/import/utils/prepareDataDirMap.js +31 -0
- package/import/utils/prepareDataDirMap.js.map +1 -0
- package/import/utils/updateFilesInData.d.ts +8 -0
- package/import/utils/updateFilesInData.js +51 -0
- package/import/utils/updateFilesInData.js.map +1 -0
- package/import/utils/uploadAssets.d.ts +10 -0
- package/import/utils/uploadAssets.js +92 -0
- package/import/utils/uploadAssets.js.map +1 -0
- package/import/utils/uploadFilesFromS3.d.ts +3 -0
- package/import/utils/uploadFilesFromS3.js +21 -0
- package/import/utils/uploadFilesFromS3.js.map +1 -0
- package/mockSecurity.d.ts +1 -1
- package/mockSecurity.js +2 -3
- package/mockSecurity.js.map +1 -1
- package/package.json +36 -52
- package/tasks/common/ChildTasksCleanup.d.ts +12 -0
- package/tasks/common/ChildTasksCleanup.js +64 -0
- package/tasks/common/ChildTasksCleanup.js.map +1 -0
- package/tasks/index.d.ts +1 -0
- package/tasks/index.js +13 -0
- package/tasks/index.js.map +1 -0
- package/tasks/pages/exportPagesCleanupTask.d.ts +3 -0
- package/tasks/pages/exportPagesCleanupTask.js +36 -0
- package/tasks/pages/exportPagesCleanupTask.js.map +1 -0
- package/tasks/pages/exportPagesControllerTask.d.ts +3 -0
- package/tasks/pages/exportPagesControllerTask.js +83 -0
- package/tasks/pages/exportPagesControllerTask.js.map +1 -0
- package/tasks/pages/exportPagesZipPagesTask.d.ts +3 -0
- package/tasks/pages/exportPagesZipPagesTask.js +39 -0
- package/tasks/pages/exportPagesZipPagesTask.js.map +1 -0
- package/tasks/pages/importPagesControllerTask.d.ts +3 -0
- package/tasks/pages/importPagesControllerTask.js +39 -0
- package/tasks/pages/importPagesControllerTask.js.map +1 -0
- package/tasks/pages/importPagesProcessPageTask.d.ts +3 -0
- package/tasks/pages/importPagesProcessPageTask.js +39 -0
- package/tasks/pages/importPagesProcessPageTask.js.map +1 -0
- package/tasks/pages/index.d.ts +1 -0
- package/tasks/pages/index.js +17 -0
- package/tasks/pages/index.js.map +1 -0
- package/types.d.ts +23 -2
- package/types.js +60 -13
- package/types.js.map +1 -1
- package/utils/ZipFiles.d.ts +11 -0
- package/utils/ZipFiles.js +124 -0
- package/utils/ZipFiles.js.map +1 -0
- package/export/combine/pagesHandler.d.ts +0 -6
- package/export/combine/pagesHandler.js.map +0 -1
- package/export/process/pagesHandler.d.ts +0 -6
- package/export/process/pagesHandler.js.map +0 -1
- package/import/process/blocksHandler.js.map +0 -1
- package/import/process/pagesHandler.d.ts +0 -3
- package/import/process/pagesHandler.js.map +0 -1
- package/import/process/templatesHandler.js.map +0 -1
- package/import/utils.d.ts +0 -56
- package/import/utils.js +0 -696
- package/import/utils.js.map +0 -1
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.ZipPages = void 0;
|
|
7
|
+
var _PageExporter = require("../../process/exporters/PageExporter");
|
|
8
|
+
var _ZipPagesDataManager = require("./ZipPagesDataManager");
|
|
9
|
+
var _getPageFactory = require("./getPageFactory");
|
|
10
|
+
var _utils = require("../utils");
|
|
11
|
+
/**
|
|
12
|
+
* We will pause the execution of the task if there is less than CLOSE_TO_TIMEOUT_SECONDS left on the Lambda execution time.
|
|
13
|
+
*/
|
|
14
|
+
const CLOSE_TO_TIMEOUT_SECONDS = 300;
|
|
15
|
+
class ZipPages {
|
|
16
|
+
async execute(params) {
|
|
17
|
+
const {
|
|
18
|
+
response,
|
|
19
|
+
input,
|
|
20
|
+
isAborted,
|
|
21
|
+
isCloseToTimeout,
|
|
22
|
+
context,
|
|
23
|
+
store
|
|
24
|
+
} = params;
|
|
25
|
+
const parentId = store.getTask().parentId;
|
|
26
|
+
if (!parentId) {
|
|
27
|
+
return response.error({
|
|
28
|
+
message: `Could not find parent task ID.`
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
const exportPageDataKey = (0, _utils.createExportPagesDataKey)(parentId);
|
|
32
|
+
const dataManager = new _ZipPagesDataManager.ZipPagesDataManager(input);
|
|
33
|
+
if (dataManager.hasMore() === false) {
|
|
34
|
+
return response.done("Task done.", {
|
|
35
|
+
done: dataManager.getDone(),
|
|
36
|
+
failed: dataManager.getFailed()
|
|
37
|
+
});
|
|
38
|
+
}
|
|
39
|
+
const getPage = (0, _getPageFactory.getPageFactory)(context, store, input.type === "published");
|
|
40
|
+
/**
|
|
41
|
+
* We will go page by page and zip them.
|
|
42
|
+
* We are using the input.queue here because we are removing page from the ZipPagesDataManager queue as it is processed.
|
|
43
|
+
*
|
|
44
|
+
*/
|
|
45
|
+
for (const pageId of input.queue) {
|
|
46
|
+
/**
|
|
47
|
+
* Check for a possibility that the task was aborted.
|
|
48
|
+
*/
|
|
49
|
+
if (isAborted()) {
|
|
50
|
+
return response.aborted();
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* We need to check if there is enough time left to finish the task.
|
|
54
|
+
*/
|
|
55
|
+
if (isCloseToTimeout(CLOSE_TO_TIMEOUT_SECONDS)) {
|
|
56
|
+
/**
|
|
57
|
+
* If there is not enough time left, we will pause the task and return the current state.
|
|
58
|
+
*/
|
|
59
|
+
return response.continue(dataManager.getInput());
|
|
60
|
+
}
|
|
61
|
+
const page = await getPage(pageId);
|
|
62
|
+
if (!page) {
|
|
63
|
+
await store.addErrorLog({
|
|
64
|
+
message: `Could not load page "${pageId}".`,
|
|
65
|
+
error: {
|
|
66
|
+
message: `Could not load page "${pageId}".`
|
|
67
|
+
}
|
|
68
|
+
});
|
|
69
|
+
dataManager.addFailed(pageId);
|
|
70
|
+
continue;
|
|
71
|
+
}
|
|
72
|
+
try {
|
|
73
|
+
const pageExporter = new _PageExporter.PageExporter(context.fileManager);
|
|
74
|
+
const pageDataZip = await pageExporter.execute(page, exportPageDataKey);
|
|
75
|
+
if (!pageDataZip.Key) {
|
|
76
|
+
throw new Error(`Failed to export page "${pageId}" into a zip file.`);
|
|
77
|
+
}
|
|
78
|
+
dataManager.addDone(pageId, pageDataZip.Key);
|
|
79
|
+
} catch (ex) {
|
|
80
|
+
const message = ex.message || `Failed to export page "${pageId}" into a zip file.`;
|
|
81
|
+
try {
|
|
82
|
+
await store.addErrorLog({
|
|
83
|
+
message,
|
|
84
|
+
error: ex
|
|
85
|
+
});
|
|
86
|
+
} catch {
|
|
87
|
+
console.error(`Failed to add error log: "${message}"`);
|
|
88
|
+
}
|
|
89
|
+
dataManager.addFailed(pageId);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
return response.done("Task done.", {
|
|
93
|
+
done: dataManager.getDone(),
|
|
94
|
+
failed: dataManager.getFailed()
|
|
95
|
+
});
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
exports.ZipPages = ZipPages;
|
|
99
|
+
|
|
100
|
+
//# sourceMappingURL=ZipPages.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["_PageExporter","require","_ZipPagesDataManager","_getPageFactory","_utils","CLOSE_TO_TIMEOUT_SECONDS","ZipPages","execute","params","response","input","isAborted","isCloseToTimeout","context","store","parentId","getTask","error","message","exportPageDataKey","createExportPagesDataKey","dataManager","ZipPagesDataManager","hasMore","done","getDone","failed","getFailed","getPage","getPageFactory","type","pageId","queue","aborted","continue","getInput","page","addErrorLog","addFailed","pageExporter","PageExporter","fileManager","pageDataZip","Key","Error","addDone","ex","console","exports"],"sources":["ZipPages.ts"],"sourcesContent":["import type { ITaskResponseResult } from \"@webiny/tasks\";\nimport type { IExportPagesZipPagesTaskParams } from \"~/export/pages/types\";\nimport { PageExporter } from \"~/export/process/exporters/PageExporter\";\nimport { ZipPagesDataManager } from \"./ZipPagesDataManager\";\nimport { getPageFactory } from \"./getPageFactory\";\nimport { createExportPagesDataKey } from \"~/export/pages/utils\";\n\n/**\n * We will pause the execution of the task if there is less than CLOSE_TO_TIMEOUT_SECONDS left on the Lambda execution time.\n */\nconst CLOSE_TO_TIMEOUT_SECONDS = 300;\n\nexport class ZipPages {\n public async execute(params: IExportPagesZipPagesTaskParams): Promise<ITaskResponseResult> {\n const { response, input, isAborted, isCloseToTimeout, context, store } = params;\n\n const parentId = store.getTask().parentId;\n if (!parentId) {\n return response.error({\n message: `Could not find parent task ID.`\n });\n }\n const exportPageDataKey = createExportPagesDataKey(parentId);\n\n const dataManager = new ZipPagesDataManager(input);\n if (dataManager.hasMore() === false) {\n return response.done(\"Task done.\", {\n done: dataManager.getDone(),\n failed: dataManager.getFailed()\n });\n }\n\n const getPage = getPageFactory(context, store, input.type === \"published\");\n /**\n * We will go page by page and zip them.\n * We are using the input.queue here because we are removing page from the ZipPagesDataManager queue as it is processed.\n *\n */\n for (const pageId of input.queue) {\n /**\n * Check for a possibility that the task was aborted.\n */\n if (isAborted()) {\n return response.aborted();\n }\n /**\n * We need to check if there is enough time left to finish the task.\n */\n if (isCloseToTimeout(CLOSE_TO_TIMEOUT_SECONDS)) {\n /**\n * If there is not enough time left, we will pause the task and return the current state.\n */\n return response.continue(dataManager.getInput());\n }\n\n const page = await getPage(pageId);\n if (!page) {\n await store.addErrorLog({\n message: `Could not load page \"${pageId}\".`,\n error: {\n message: `Could not load page \"${pageId}\".`\n }\n });\n dataManager.addFailed(pageId);\n continue;\n }\n try {\n const pageExporter = new PageExporter(context.fileManager);\n const pageDataZip = await pageExporter.execute(page, exportPageDataKey);\n if (!pageDataZip.Key) {\n throw new Error(`Failed to export page \"${pageId}\" into a zip file.`);\n }\n dataManager.addDone(pageId, pageDataZip.Key);\n } catch (ex) {\n const message = ex.message || `Failed to export page \"${pageId}\" into a zip file.`;\n try {\n await store.addErrorLog({\n message,\n error: ex\n });\n } catch {\n console.error(`Failed to add error log: \"${message}\"`);\n }\n dataManager.addFailed(pageId);\n }\n }\n\n return response.done(\"Task done.\", {\n done: dataManager.getDone(),\n failed: dataManager.getFailed()\n });\n }\n}\n"],"mappings":";;;;;;AAEA,IAAAA,aAAA,GAAAC,OAAA;AACA,IAAAC,oBAAA,GAAAD,OAAA;AACA,IAAAE,eAAA,GAAAF,OAAA;AACA,IAAAG,MAAA,GAAAH,OAAA;AAEA;AACA;AACA;AACA,MAAMI,wBAAwB,GAAG,GAAG;AAE7B,MAAMC,QAAQ,CAAC;EAClB,MAAaC,OAAOA,CAACC,MAAsC,EAAgC;IACvF,MAAM;MAAEC,QAAQ;MAAEC,KAAK;MAAEC,SAAS;MAAEC,gBAAgB;MAAEC,OAAO;MAAEC;IAAM,CAAC,GAAGN,MAAM;IAE/E,MAAMO,QAAQ,GAAGD,KAAK,CAACE,OAAO,CAAC,CAAC,CAACD,QAAQ;IACzC,IAAI,CAACA,QAAQ,EAAE;MACX,OAAON,QAAQ,CAACQ,KAAK,CAAC;QAClBC,OAAO,EAAE;MACb,CAAC,CAAC;IACN;IACA,MAAMC,iBAAiB,GAAG,IAAAC,+BAAwB,EAACL,QAAQ,CAAC;IAE5D,MAAMM,WAAW,GAAG,IAAIC,wCAAmB,CAACZ,KAAK,CAAC;IAClD,IAAIW,WAAW,CAACE,OAAO,CAAC,CAAC,KAAK,KAAK,EAAE;MACjC,OAAOd,QAAQ,CAACe,IAAI,CAAC,YAAY,EAAE;QAC/BA,IAAI,EAAEH,WAAW,CAACI,OAAO,CAAC,CAAC;QAC3BC,MAAM,EAAEL,WAAW,CAACM,SAAS,CAAC;MAClC,CAAC,CAAC;IACN;IAEA,MAAMC,OAAO,GAAG,IAAAC,8BAAc,EAAChB,OAAO,EAAEC,KAAK,EAAEJ,KAAK,CAACoB,IAAI,KAAK,WAAW,CAAC;IAC1E;AACR;AACA;AACA;AACA;IACQ,KAAK,MAAMC,MAAM,IAAIrB,KAAK,CAACsB,KAAK,EAAE;MAC9B;AACZ;AACA;MACY,IAAIrB,SAAS,CAAC,CAAC,EAAE;QACb,OAAOF,QAAQ,CAACwB,OAAO,CAAC,CAAC;MAC7B;MACA;AACZ;AACA;MACY,IAAIrB,gBAAgB,CAACP,wBAAwB,CAAC,EAAE;QAC5C;AAChB;AACA;QACgB,OAAOI,QAAQ,CAACyB,QAAQ,CAACb,WAAW,CAACc,QAAQ,CAAC,CAAC,CAAC;MACpD;MAEA,MAAMC,IAAI,GAAG,MAAMR,OAAO,CAACG,MAAM,CAAC;MAClC,IAAI,CAACK,IAAI,EAAE;QACP,MAAMtB,KAAK,CAACuB,WAAW,CAAC;UACpBnB,OAAO,EAAE,wBAAwBa,MAAM,IAAI;UAC3Cd,KAAK,EAAE;YACHC,OAAO,EAAE,wBAAwBa,MAAM;UAC3C;QACJ,CAAC,CAAC;QACFV,WAAW,CAACiB,SAAS,CAACP,MAAM,CAAC;QAC7B;MACJ;MACA,IAAI;QACA,MAAMQ,YAAY,GAAG,IAAIC,0BAAY,CAAC3B,OAAO,CAAC4B,WAAW,CAAC;QAC1D,MAAMC,WAAW,GAAG,MAAMH,YAAY,CAAChC,OAAO,CAAC6B,IAAI,EAAEjB,iBAAiB,CAAC;QACvE,IAAI,CAACuB,WAAW,CAACC,GAAG,EAAE;UAClB,MAAM,IAAIC,KAAK,CAAC,0BAA0Bb,MAAM,oBAAoB,CAAC;QACzE;QACAV,WAAW,CAACwB,OAAO,CAACd,MAAM,EAAEW,WAAW,CAACC,GAAG,CAAC;MAChD,CAAC,CAAC,OAAOG,EAAE,EAAE;QACT,MAAM5B,OAAO,GAAG4B,EAAE,CAAC5B,OAAO,IAAI,0BAA0Ba,MAAM,oBAAoB;QAClF,IAAI;UACA,MAAMjB,KAAK,CAACuB,WAAW,CAAC;YACpBnB,OAAO;YACPD,KAAK,EAAE6B;UACX,CAAC,CAAC;QACN,CAAC,CAAC,MAAM;UACJC,OAAO,CAAC9B,KAAK,CAAC,6BAA6BC,OAAO,GAAG,CAAC;QAC1D;QACAG,WAAW,CAACiB,SAAS,CAACP,MAAM,CAAC;MACjC;IACJ;IAEA,OAAOtB,QAAQ,CAACe,IAAI,CAAC,YAAY,EAAE;MAC/BA,IAAI,EAAEH,WAAW,CAACI,OAAO,CAAC,CAAC;MAC3BC,MAAM,EAAEL,WAAW,CAACM,SAAS,CAAC;IAClC,CAAC,CAAC;EACN;AACJ;AAACqB,OAAA,CAAA1C,QAAA,GAAAA,QAAA","ignoreList":[]}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import type { IExportPagesZipPagesDone, IExportPagesZipPagesInput } from "../types";
|
|
2
|
+
export declare class ZipPagesDataManager {
|
|
3
|
+
private readonly input;
|
|
4
|
+
private readonly queue;
|
|
5
|
+
private readonly done;
|
|
6
|
+
private readonly failed;
|
|
7
|
+
constructor(input: IExportPagesZipPagesInput);
|
|
8
|
+
hasMore(): boolean;
|
|
9
|
+
addDone(pageId: string, key: string): void;
|
|
10
|
+
addFailed(pageId: string): void;
|
|
11
|
+
getFailed(): string[];
|
|
12
|
+
getDone(): IExportPagesZipPagesDone;
|
|
13
|
+
getInput(): IExportPagesZipPagesInput;
|
|
14
|
+
}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.ZipPagesDataManager = void 0;
|
|
7
|
+
class ZipPagesDataManager {
|
|
8
|
+
constructor(input) {
|
|
9
|
+
this.input = input;
|
|
10
|
+
this.queue = new Set(input.queue);
|
|
11
|
+
this.done = {
|
|
12
|
+
...input.done
|
|
13
|
+
};
|
|
14
|
+
this.failed = new Set(input.failed || []);
|
|
15
|
+
}
|
|
16
|
+
hasMore() {
|
|
17
|
+
return this.queue.size > 0;
|
|
18
|
+
}
|
|
19
|
+
addDone(pageId, key) {
|
|
20
|
+
this.queue.delete(pageId);
|
|
21
|
+
this.failed.delete(pageId);
|
|
22
|
+
this.done[pageId] = key;
|
|
23
|
+
}
|
|
24
|
+
addFailed(pageId) {
|
|
25
|
+
this.queue.delete(pageId);
|
|
26
|
+
this.failed.add(pageId);
|
|
27
|
+
delete this.done[pageId];
|
|
28
|
+
}
|
|
29
|
+
getFailed() {
|
|
30
|
+
return Array.from(this.failed);
|
|
31
|
+
}
|
|
32
|
+
getDone() {
|
|
33
|
+
return this.done;
|
|
34
|
+
}
|
|
35
|
+
getInput() {
|
|
36
|
+
return {
|
|
37
|
+
...this.input,
|
|
38
|
+
queue: Array.from(this.queue),
|
|
39
|
+
failed: Array.from(this.failed),
|
|
40
|
+
done: this.done
|
|
41
|
+
};
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
exports.ZipPagesDataManager = ZipPagesDataManager;
|
|
45
|
+
|
|
46
|
+
//# sourceMappingURL=ZipPagesDataManager.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["ZipPagesDataManager","constructor","input","queue","Set","done","failed","hasMore","size","addDone","pageId","key","delete","addFailed","add","getFailed","Array","from","getDone","getInput","exports"],"sources":["ZipPagesDataManager.ts"],"sourcesContent":["import type { IExportPagesZipPagesDone, IExportPagesZipPagesInput } from \"~/export/pages/types\";\n\nexport class ZipPagesDataManager {\n private readonly input: IExportPagesZipPagesInput;\n private readonly queue: Set<string>;\n private readonly done: IExportPagesZipPagesDone;\n private readonly failed: Set<string>;\n\n public constructor(input: IExportPagesZipPagesInput) {\n this.input = input;\n this.queue = new Set(input.queue);\n this.done = {\n ...input.done\n };\n this.failed = new Set(input.failed || []);\n }\n\n public hasMore(): boolean {\n return this.queue.size > 0;\n }\n\n public addDone(pageId: string, key: string): void {\n this.queue.delete(pageId);\n this.failed.delete(pageId);\n this.done[pageId] = key;\n }\n\n public addFailed(pageId: string): void {\n this.queue.delete(pageId);\n this.failed.add(pageId);\n delete this.done[pageId];\n }\n\n public getFailed() {\n return Array.from(this.failed);\n }\n\n public getDone() {\n return this.done;\n }\n\n public getInput(): IExportPagesZipPagesInput {\n return {\n ...this.input,\n queue: Array.from(this.queue),\n failed: Array.from(this.failed),\n done: this.done\n };\n }\n}\n"],"mappings":";;;;;;AAEO,MAAMA,mBAAmB,CAAC;EAMtBC,WAAWA,CAACC,KAAgC,EAAE;IACjD,IAAI,CAACA,KAAK,GAAGA,KAAK;IAClB,IAAI,CAACC,KAAK,GAAG,IAAIC,GAAG,CAACF,KAAK,CAACC,KAAK,CAAC;IACjC,IAAI,CAACE,IAAI,GAAG;MACR,GAAGH,KAAK,CAACG;IACb,CAAC;IACD,IAAI,CAACC,MAAM,GAAG,IAAIF,GAAG,CAACF,KAAK,CAACI,MAAM,IAAI,EAAE,CAAC;EAC7C;EAEOC,OAAOA,CAAA,EAAY;IACtB,OAAO,IAAI,CAACJ,KAAK,CAACK,IAAI,GAAG,CAAC;EAC9B;EAEOC,OAAOA,CAACC,MAAc,EAAEC,GAAW,EAAQ;IAC9C,IAAI,CAACR,KAAK,CAACS,MAAM,CAACF,MAAM,CAAC;IACzB,IAAI,CAACJ,MAAM,CAACM,MAAM,CAACF,MAAM,CAAC;IAC1B,IAAI,CAACL,IAAI,CAACK,MAAM,CAAC,GAAGC,GAAG;EAC3B;EAEOE,SAASA,CAACH,MAAc,EAAQ;IACnC,IAAI,CAACP,KAAK,CAACS,MAAM,CAACF,MAAM,CAAC;IACzB,IAAI,CAACJ,MAAM,CAACQ,GAAG,CAACJ,MAAM,CAAC;IACvB,OAAO,IAAI,CAACL,IAAI,CAACK,MAAM,CAAC;EAC5B;EAEOK,SAASA,CAAA,EAAG;IACf,OAAOC,KAAK,CAACC,IAAI,CAAC,IAAI,CAACX,MAAM,CAAC;EAClC;EAEOY,OAAOA,CAAA,EAAG;IACb,OAAO,IAAI,CAACb,IAAI;EACpB;EAEOc,QAAQA,CAAA,EAA8B;IACzC,OAAO;MACH,GAAG,IAAI,CAACjB,KAAK;MACbC,KAAK,EAAEa,KAAK,CAACC,IAAI,CAAC,IAAI,CAACd,KAAK,CAAC;MAC7BG,MAAM,EAAEU,KAAK,CAACC,IAAI,CAAC,IAAI,CAACX,MAAM,CAAC;MAC/BD,IAAI,EAAE,IAAI,CAACA;IACf,CAAC;EACL;AACJ;AAACe,OAAA,CAAApB,mBAAA,GAAAA,mBAAA","ignoreList":[]}
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import type { PbImportExportContext } from "../../../graphql/types";
|
|
2
|
+
import type { ITaskManagerStore } from "@webiny/tasks";
|
|
3
|
+
import type { IExportPagesZipPagesInput } from "../types";
|
|
4
|
+
export declare const getPageFactory: (context: PbImportExportContext, store: ITaskManagerStore<IExportPagesZipPagesInput>, published: boolean) => (pageId: string) => Promise<import("@webiny/api-page-builder/types").Page<Record<string, any> | null> | null>;
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.getPageFactory = void 0;
|
|
7
|
+
const getPageFactory = (context, store, published) => {
|
|
8
|
+
return async pageId => {
|
|
9
|
+
if (published) {
|
|
10
|
+
try {
|
|
11
|
+
return await context.pageBuilder.getPublishedPageById({
|
|
12
|
+
id: pageId
|
|
13
|
+
});
|
|
14
|
+
} catch (ex) {
|
|
15
|
+
/**
|
|
16
|
+
* We do not need to do anything on exception because we will fetch the latest version.
|
|
17
|
+
*/
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
try {
|
|
21
|
+
return await context.pageBuilder.getPage(pageId);
|
|
22
|
+
} catch (ex) {
|
|
23
|
+
const message = `There is no page with ID ${pageId}.`;
|
|
24
|
+
try {
|
|
25
|
+
await store.addErrorLog({
|
|
26
|
+
message,
|
|
27
|
+
error: ex
|
|
28
|
+
});
|
|
29
|
+
} catch {
|
|
30
|
+
console.error(`Failed to add error log: "${message}"`);
|
|
31
|
+
}
|
|
32
|
+
return null;
|
|
33
|
+
}
|
|
34
|
+
};
|
|
35
|
+
};
|
|
36
|
+
exports.getPageFactory = getPageFactory;
|
|
37
|
+
|
|
38
|
+
//# sourceMappingURL=getPageFactory.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["getPageFactory","context","store","published","pageId","pageBuilder","getPublishedPageById","id","ex","getPage","message","addErrorLog","error","console","exports"],"sources":["getPageFactory.ts"],"sourcesContent":["import type { PbImportExportContext } from \"~/graphql/types\";\nimport type { ITaskManagerStore } from \"@webiny/tasks\";\nimport type { IExportPagesZipPagesInput } from \"~/export/pages/types\";\n\nexport const getPageFactory = (\n context: PbImportExportContext,\n store: ITaskManagerStore<IExportPagesZipPagesInput>,\n published: boolean\n) => {\n return async (pageId: string) => {\n if (published) {\n try {\n return await context.pageBuilder.getPublishedPageById({\n id: pageId\n });\n } catch (ex) {\n /**\n * We do not need to do anything on exception because we will fetch the latest version.\n */\n }\n }\n try {\n return await context.pageBuilder.getPage(pageId);\n } catch (ex) {\n const message = `There is no page with ID ${pageId}.`;\n try {\n await store.addErrorLog({\n message,\n error: ex\n });\n } catch {\n console.error(`Failed to add error log: \"${message}\"`);\n }\n return null;\n }\n };\n};\n"],"mappings":";;;;;;AAIO,MAAMA,cAAc,GAAGA,CAC1BC,OAA8B,EAC9BC,KAAmD,EACnDC,SAAkB,KACjB;EACD,OAAO,MAAOC,MAAc,IAAK;IAC7B,IAAID,SAAS,EAAE;MACX,IAAI;QACA,OAAO,MAAMF,OAAO,CAACI,WAAW,CAACC,oBAAoB,CAAC;UAClDC,EAAE,EAAEH;QACR,CAAC,CAAC;MACN,CAAC,CAAC,OAAOI,EAAE,EAAE;QACT;AAChB;AACA;MAFgB;IAIR;IACA,IAAI;MACA,OAAO,MAAMP,OAAO,CAACI,WAAW,CAACI,OAAO,CAACL,MAAM,CAAC;IACpD,CAAC,CAAC,OAAOI,EAAE,EAAE;MACT,MAAME,OAAO,GAAG,4BAA4BN,MAAM,GAAG;MACrD,IAAI;QACA,MAAMF,KAAK,CAACS,WAAW,CAAC;UACpBD,OAAO;UACPE,KAAK,EAAEJ;QACX,CAAC,CAAC;MACN,CAAC,CAAC,MAAM;QACJK,OAAO,CAACD,KAAK,CAAC,6BAA6BF,OAAO,GAAG,CAAC;MAC1D;MACA,OAAO,IAAI;IACf;EACJ,CAAC;AACL,CAAC;AAACI,OAAA,CAAAd,cAAA,GAAAA,cAAA","ignoreList":[]}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { PbImportExportContext } from "../../types";
|
|
2
|
-
import { Configuration, Payload, Response } from "
|
|
1
|
+
import type { PbImportExportContext } from "../../types";
|
|
2
|
+
import type { Configuration, Payload, Response } from "./";
|
|
3
3
|
/**
|
|
4
4
|
* Handles the export blocks process workflow.
|
|
5
5
|
*/
|
|
@@ -4,19 +4,12 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
4
4
|
value: true
|
|
5
5
|
});
|
|
6
6
|
exports.blocksHandler = void 0;
|
|
7
|
-
|
|
8
7
|
var _types = require("../../types");
|
|
9
|
-
|
|
10
8
|
var _client = require("../../client");
|
|
11
|
-
|
|
12
9
|
var _handlerGraphql = require("@webiny/handler-graphql");
|
|
13
|
-
|
|
14
|
-
var _utils = require("../utils");
|
|
15
|
-
|
|
16
10
|
var _mockSecurity = require("../../mockSecurity");
|
|
17
|
-
|
|
18
|
-
var
|
|
19
|
-
|
|
11
|
+
var _utils = require("@webiny/utils");
|
|
12
|
+
var _BlockExporter = require("./exporters/BlockExporter");
|
|
20
13
|
/**
|
|
21
14
|
* Handles the export blocks process workflow.
|
|
22
15
|
*/
|
|
@@ -35,22 +28,20 @@ const blocksHandler = async (configuration, payload, context) => {
|
|
|
35
28
|
subTaskIndex,
|
|
36
29
|
type,
|
|
37
30
|
identity
|
|
38
|
-
} = payload;
|
|
31
|
+
} = payload;
|
|
32
|
+
// Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks
|
|
39
33
|
// and this Lambda is invoked internally, without credentials.
|
|
40
|
-
|
|
41
34
|
(0, _mockSecurity.mockSecurity)(identity, context);
|
|
42
|
-
|
|
43
35
|
try {
|
|
44
36
|
/*
|
|
45
37
|
* Note: We're not going to DB for finding the next sub-task to process,
|
|
46
38
|
* because the data might be out of sync due to GSI eventual consistency.
|
|
47
39
|
*/
|
|
48
|
-
subTask = await pageBuilder.importExportTask.getSubTask(taskId, (0,
|
|
40
|
+
subTask = await pageBuilder.importExportTask.getSubTask(taskId, (0, _utils.zeroPad)(subTaskIndex, 5));
|
|
49
41
|
/**
|
|
50
42
|
* Base condition!!
|
|
51
43
|
* Bail out early, if task not found or task's status is not "pending".
|
|
52
44
|
*/
|
|
53
|
-
|
|
54
45
|
if (!subTask || subTask.status !== _types.ImportExportTaskStatus.PENDING) {
|
|
55
46
|
noPendingTask = true;
|
|
56
47
|
return {
|
|
@@ -60,7 +51,6 @@ const blocksHandler = async (configuration, payload, context) => {
|
|
|
60
51
|
} else {
|
|
61
52
|
noPendingTask = false;
|
|
62
53
|
}
|
|
63
|
-
|
|
64
54
|
log(`Fetched sub task => ${subTask.id}`);
|
|
65
55
|
const {
|
|
66
56
|
input
|
|
@@ -70,36 +60,40 @@ const blocksHandler = async (configuration, payload, context) => {
|
|
|
70
60
|
exportBlocksDataKey
|
|
71
61
|
} = input;
|
|
72
62
|
const block = await pageBuilder.getPageBlock(blockId);
|
|
73
|
-
|
|
74
63
|
if (!block) {
|
|
75
64
|
log(`Unable to load block "${blockId}"`);
|
|
76
65
|
throw new _handlerGraphql.NotFoundError(`Unable to load block "${blockId}"`);
|
|
77
66
|
}
|
|
67
|
+
log(`Processing block key "${blockId}"`);
|
|
68
|
+
const blockCategory = await pageBuilder.getBlockCategory(block.blockCategory);
|
|
69
|
+
if (!blockCategory) {
|
|
70
|
+
log(`Unable to load block category "${block.blockCategory}"`);
|
|
71
|
+
throw new _handlerGraphql.NotFoundError(`Unable to load block category "${block.blockCategory}"`);
|
|
72
|
+
}
|
|
78
73
|
|
|
79
|
-
|
|
80
|
-
|
|
74
|
+
// Mark task status as PROCESSING
|
|
81
75
|
subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {
|
|
82
76
|
status: _types.ImportExportTaskStatus.PROCESSING
|
|
83
|
-
});
|
|
84
|
-
|
|
77
|
+
});
|
|
78
|
+
// Update stats in main task
|
|
85
79
|
await pageBuilder.importExportTask.updateStats(taskId, {
|
|
86
80
|
prevStatus: prevStatusOfSubTask,
|
|
87
81
|
nextStatus: _types.ImportExportTaskStatus.PROCESSING
|
|
88
82
|
});
|
|
89
83
|
prevStatusOfSubTask = subTask.status;
|
|
90
|
-
log(`Extracting block data and uploading to storage...`);
|
|
91
|
-
|
|
92
|
-
const blockDataZip = await
|
|
93
|
-
log(`Finish uploading zip...`);
|
|
94
|
-
|
|
84
|
+
log(`Extracting block data and uploading to storage...`);
|
|
85
|
+
const blockExporter = new _BlockExporter.BlockExporter(fileManager);
|
|
86
|
+
const blockDataZip = await blockExporter.execute(block, blockCategory, exportBlocksDataKey);
|
|
87
|
+
log(`Finish uploading zip...`);
|
|
88
|
+
// Update task record in DB
|
|
95
89
|
subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {
|
|
96
90
|
status: _types.ImportExportTaskStatus.COMPLETED,
|
|
97
91
|
data: {
|
|
98
92
|
message: `Finish uploading data for block "${block.id}"`,
|
|
99
93
|
key: blockDataZip.Key
|
|
100
94
|
}
|
|
101
|
-
});
|
|
102
|
-
|
|
95
|
+
});
|
|
96
|
+
// Update stats in main task
|
|
103
97
|
await pageBuilder.importExportTask.updateStats(taskId, {
|
|
104
98
|
prevStatus: prevStatusOfSubTask,
|
|
105
99
|
nextStatus: _types.ImportExportTaskStatus.COMPLETED
|
|
@@ -107,7 +101,6 @@ const blocksHandler = async (configuration, payload, context) => {
|
|
|
107
101
|
prevStatusOfSubTask = subTask.status;
|
|
108
102
|
} catch (e) {
|
|
109
103
|
log("[EXPORT_BLOCKS_PROCESS] Error => ", e.message);
|
|
110
|
-
|
|
111
104
|
if (subTask && subTask.id) {
|
|
112
105
|
/**
|
|
113
106
|
* In case of error, we'll update the task status to "failed",
|
|
@@ -120,15 +113,15 @@ const blocksHandler = async (configuration, payload, context) => {
|
|
|
120
113
|
message: e.message,
|
|
121
114
|
code: "EXPORT_FAILED"
|
|
122
115
|
}
|
|
123
|
-
});
|
|
116
|
+
});
|
|
124
117
|
|
|
118
|
+
// Update stats in main task
|
|
125
119
|
await pageBuilder.importExportTask.updateStats(taskId, {
|
|
126
120
|
prevStatus: prevStatusOfSubTask,
|
|
127
121
|
nextStatus: _types.ImportExportTaskStatus.FAILED
|
|
128
122
|
});
|
|
129
123
|
prevStatusOfSubTask = subTask.status;
|
|
130
124
|
}
|
|
131
|
-
|
|
132
125
|
return {
|
|
133
126
|
data: null,
|
|
134
127
|
error: {
|
|
@@ -138,8 +131,8 @@ const blocksHandler = async (configuration, payload, context) => {
|
|
|
138
131
|
} finally {
|
|
139
132
|
// Base condition!
|
|
140
133
|
if (noPendingTask) {
|
|
141
|
-
log(`No pending sub-task for task ${taskId}`);
|
|
142
|
-
|
|
134
|
+
log(`No pending sub-task for task ${taskId}`);
|
|
135
|
+
// Combine individual block zip files.
|
|
143
136
|
await (0, _client.invokeHandlerClient)({
|
|
144
137
|
context,
|
|
145
138
|
name: configuration.handlers.combine,
|
|
@@ -151,8 +144,8 @@ const blocksHandler = async (configuration, payload, context) => {
|
|
|
151
144
|
description: "Export blocks - combine"
|
|
152
145
|
});
|
|
153
146
|
} else {
|
|
154
|
-
console.log(`Invoking PROCESS for task "${subTaskIndex + 1}"`);
|
|
155
|
-
|
|
147
|
+
console.log(`Invoking PROCESS for task "${subTaskIndex + 1}"`);
|
|
148
|
+
// We want to continue with Self invocation no matter if current block error out.
|
|
156
149
|
await (0, _client.invokeHandlerClient)({
|
|
157
150
|
context,
|
|
158
151
|
name: configuration.handlers.process,
|
|
@@ -166,11 +159,11 @@ const blocksHandler = async (configuration, payload, context) => {
|
|
|
166
159
|
});
|
|
167
160
|
}
|
|
168
161
|
}
|
|
169
|
-
|
|
170
162
|
return {
|
|
171
163
|
data: "",
|
|
172
164
|
error: null
|
|
173
165
|
};
|
|
174
166
|
};
|
|
167
|
+
exports.blocksHandler = blocksHandler;
|
|
175
168
|
|
|
176
|
-
|
|
169
|
+
//# sourceMappingURL=blocksHandler.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"names":["blocksHandler","configuration","payload","context","log","console","subTask","noPendingTask","prevStatusOfSubTask","ImportExportTaskStatus","PENDING","pageBuilder","fileManager","taskId","subTaskIndex","type","identity","mockSecurity","importExportTask","getSubTask","zeroPad","status","data","error","id","input","blockId","exportBlocksDataKey","block","getPageBlock","NotFoundError","updateSubTask","PROCESSING","updateStats","prevStatus","nextStatus","blockDataZip","exportBlock","COMPLETED","message","key","Key","e","FAILED","name","code","invokeHandlerClient","handlers","combine","security","getIdentity","description","process"],"sources":["blocksHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { invokeHandlerClient } from \"~/client\";\nimport { NotFoundError } from \"@webiny/handler-graphql\";\nimport { exportBlock } from \"~/export/utils\";\nimport { Payload as ExtractPayload } from \"../combine\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { SecurityIdentity } from \"@webiny/api-security/types\";\nimport { zeroPad } from \"@webiny/utils\";\nimport { Configuration, Payload, Response } from \"~/export/process\";\n\n/**\n * Handles the export blocks process workflow.\n */\nexport const blocksHandler = async (\n configuration: Configuration,\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n let subTask;\n let noPendingTask = true;\n let prevStatusOfSubTask = ImportExportTaskStatus.PENDING;\n\n log(\"RUNNING Export Blocks Process Handler\");\n const { pageBuilder, fileManager } = context;\n const { taskId, subTaskIndex, type, identity } = payload;\n // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks\n // and this Lambda is invoked internally, without credentials.\n mockSecurity(identity as SecurityIdentity, context);\n try {\n /*\n * Note: We're not going to DB for finding the next sub-task to process,\n * because the data might be out of sync due to GSI eventual consistency.\n */\n subTask = await pageBuilder.importExportTask.getSubTask(taskId, zeroPad(subTaskIndex, 5));\n /**\n * Base condition!!\n * Bail out early, if task not found or task's status is not \"pending\".\n */\n if (!subTask || subTask.status !== ImportExportTaskStatus.PENDING) {\n noPendingTask = true;\n return {\n data: \"\",\n error: null\n };\n } else {\n noPendingTask = false;\n }\n\n log(`Fetched sub task => ${subTask.id}`);\n\n const { input } = subTask;\n const { blockId, exportBlocksDataKey } = input;\n\n const block = await pageBuilder.getPageBlock(blockId);\n\n if (!block) {\n log(`Unable to load block \"${blockId}\"`);\n throw new NotFoundError(`Unable to load block \"${blockId}\"`);\n }\n\n log(`Processing block key \"${blockId}\"`);\n\n // Mark task status as PROCESSING\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.PROCESSING\n });\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.PROCESSING\n });\n prevStatusOfSubTask = subTask.status;\n\n log(`Extracting block data and uploading to storage...`);\n // Extract Block\n const blockDataZip = await exportBlock(block, exportBlocksDataKey, fileManager);\n log(`Finish uploading zip...`);\n // Update task record in DB\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading data for block \"${block.id}\"`,\n key: blockDataZip.Key\n }\n });\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.COMPLETED\n });\n prevStatusOfSubTask = subTask.status;\n } catch (e) {\n log(\"[EXPORT_BLOCKS_PROCESS] Error => \", e.message);\n\n if (subTask && subTask.id) {\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"EXPORT_FAILED\"\n }\n });\n\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.FAILED\n });\n prevStatusOfSubTask = subTask.status;\n }\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n } finally {\n // Base condition!\n if (noPendingTask) {\n log(`No pending sub-task for task ${taskId}`);\n // Combine individual block zip files.\n await invokeHandlerClient<ExtractPayload>({\n context,\n name: configuration.handlers.combine,\n payload: {\n taskId,\n type,\n identity: context.security.getIdentity()\n },\n description: \"Export blocks - combine\"\n });\n } else {\n console.log(`Invoking PROCESS for task \"${subTaskIndex + 1}\"`);\n // We want to continue with Self invocation no matter if current block error out.\n await invokeHandlerClient<Payload>({\n context,\n name: configuration.handlers.process,\n payload: {\n taskId,\n subTaskIndex: subTaskIndex + 1,\n type,\n identity: context.security.getIdentity()\n },\n description: \"Export blocks - process - subtask\"\n });\n }\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;;AAAA;;AACA;;AACA;;AACA;;AAEA;;AAEA;;AAGA;AACA;AACA;AACO,MAAMA,aAAa,GAAG,OACzBC,aADyB,EAEzBC,OAFyB,EAGzBC,OAHyB,KAIL;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAApB;EACA,IAAIE,OAAJ;EACA,IAAIC,aAAa,GAAG,IAApB;EACA,IAAIC,mBAAmB,GAAGC,6BAAA,CAAuBC,OAAjD;EAEAN,GAAG,CAAC,uCAAD,CAAH;EACA,MAAM;IAAEO,WAAF;IAAeC;EAAf,IAA+BT,OAArC;EACA,MAAM;IAAEU,MAAF;IAAUC,YAAV;IAAwBC,IAAxB;IAA8BC;EAA9B,IAA2Cd,OAAjD,CARoB,CASpB;EACA;;EACA,IAAAe,0BAAA,EAAaD,QAAb,EAA2Cb,OAA3C;;EACA,IAAI;IACA;AACR;AACA;AACA;IACQG,OAAO,GAAG,MAAMK,WAAW,CAACO,gBAAZ,CAA6BC,UAA7B,CAAwCN,MAAxC,EAAgD,IAAAO,eAAA,EAAQN,YAAR,EAAsB,CAAtB,CAAhD,CAAhB;IACA;AACR;AACA;AACA;;IACQ,IAAI,CAACR,OAAD,IAAYA,OAAO,CAACe,MAAR,KAAmBZ,6BAAA,CAAuBC,OAA1D,EAAmE;MAC/DH,aAAa,GAAG,IAAhB;MACA,OAAO;QACHe,IAAI,EAAE,EADH;QAEHC,KAAK,EAAE;MAFJ,CAAP;IAIH,CAND,MAMO;MACHhB,aAAa,GAAG,KAAhB;IACH;;IAEDH,GAAG,CAAE,uBAAsBE,OAAO,CAACkB,EAAG,EAAnC,CAAH;IAEA,MAAM;MAAEC;IAAF,IAAYnB,OAAlB;IACA,MAAM;MAAEoB,OAAF;MAAWC;IAAX,IAAmCF,KAAzC;IAEA,MAAMG,KAAK,GAAG,MAAMjB,WAAW,CAACkB,YAAZ,CAAyBH,OAAzB,CAApB;;IAEA,IAAI,CAACE,KAAL,EAAY;MACRxB,GAAG,CAAE,yBAAwBsB,OAAQ,GAAlC,CAAH;MACA,MAAM,IAAII,6BAAJ,CAAmB,yBAAwBJ,OAAQ,GAAnD,CAAN;IACH;;IAEDtB,GAAG,CAAE,yBAAwBsB,OAAQ,GAAlC,CAAH,CAhCA,CAkCA;;IACApB,OAAO,GAAG,MAAMK,WAAW,CAACO,gBAAZ,CAA6Ba,aAA7B,CAA2ClB,MAA3C,EAAmDP,OAAO,CAACkB,EAA3D,EAA+D;MAC3EH,MAAM,EAAEZ,6BAAA,CAAuBuB;IAD4C,CAA/D,CAAhB,CAnCA,CAsCA;;IACA,MAAMrB,WAAW,CAACO,gBAAZ,CAA6Be,WAA7B,CAAyCpB,MAAzC,EAAiD;MACnDqB,UAAU,EAAE1B,mBADuC;MAEnD2B,UAAU,EAAE1B,6BAAA,CAAuBuB;IAFgB,CAAjD,CAAN;IAIAxB,mBAAmB,GAAGF,OAAO,CAACe,MAA9B;IAEAjB,GAAG,CAAE,mDAAF,CAAH,CA7CA,CA8CA;;IACA,MAAMgC,YAAY,GAAG,MAAM,IAAAC,kBAAA,EAAYT,KAAZ,EAAmBD,mBAAnB,EAAwCf,WAAxC,CAA3B;IACAR,GAAG,CAAE,yBAAF,CAAH,CAhDA,CAiDA;;IACAE,OAAO,GAAG,MAAMK,WAAW,CAACO,gBAAZ,CAA6Ba,aAA7B,CAA2ClB,MAA3C,EAAmDP,OAAO,CAACkB,EAA3D,EAA+D;MAC3EH,MAAM,EAAEZ,6BAAA,CAAuB6B,SAD4C;MAE3EhB,IAAI,EAAE;QACFiB,OAAO,EAAG,oCAAmCX,KAAK,CAACJ,EAAG,GADpD;QAEFgB,GAAG,EAAEJ,YAAY,CAACK;MAFhB;IAFqE,CAA/D,CAAhB,CAlDA,CAyDA;;IACA,MAAM9B,WAAW,CAACO,gBAAZ,CAA6Be,WAA7B,CAAyCpB,MAAzC,EAAiD;MACnDqB,UAAU,EAAE1B,mBADuC;MAEnD2B,UAAU,EAAE1B,6BAAA,CAAuB6B;IAFgB,CAAjD,CAAN;IAIA9B,mBAAmB,GAAGF,OAAO,CAACe,MAA9B;EACH,CA/DD,CA+DE,OAAOqB,CAAP,EAAU;IACRtC,GAAG,CAAC,mCAAD,EAAsCsC,CAAC,CAACH,OAAxC,CAAH;;IAEA,IAAIjC,OAAO,IAAIA,OAAO,CAACkB,EAAvB,EAA2B;MACvB;AACZ;AACA;AACA;MACYlB,OAAO,GAAG,MAAMK,WAAW,CAACO,gBAAZ,CAA6Ba,aAA7B,CAA2ClB,MAA3C,EAAmDP,OAAO,CAACkB,EAA3D,EAA+D;QAC3EH,MAAM,EAAEZ,6BAAA,CAAuBkC,MAD4C;QAE3EpB,KAAK,EAAE;UACHqB,IAAI,EAAEF,CAAC,CAACE,IADL;UAEHL,OAAO,EAAEG,CAAC,CAACH,OAFR;UAGHM,IAAI,EAAE;QAHH;MAFoE,CAA/D,CAAhB,CALuB,CAcvB;;MACA,MAAMlC,WAAW,CAACO,gBAAZ,CAA6Be,WAA7B,CAAyCpB,MAAzC,EAAiD;QACnDqB,UAAU,EAAE1B,mBADuC;QAEnD2B,UAAU,EAAE1B,6BAAA,CAAuBkC;MAFgB,CAAjD,CAAN;MAIAnC,mBAAmB,GAAGF,OAAO,CAACe,MAA9B;IACH;;IAED,OAAO;MACHC,IAAI,EAAE,IADH;MAEHC,KAAK,EAAE;QACHgB,OAAO,EAAEG,CAAC,CAACH;MADR;IAFJ,CAAP;EAMH,CA9FD,SA8FU;IACN;IACA,IAAIhC,aAAJ,EAAmB;MACfH,GAAG,CAAE,gCAA+BS,MAAO,EAAxC,CAAH,CADe,CAEf;;MACA,MAAM,IAAAiC,2BAAA,EAAoC;QACtC3C,OADsC;QAEtCyC,IAAI,EAAE3C,aAAa,CAAC8C,QAAd,CAAuBC,OAFS;QAGtC9C,OAAO,EAAE;UACLW,MADK;UAELE,IAFK;UAGLC,QAAQ,EAAEb,OAAO,CAAC8C,QAAR,CAAiBC,WAAjB;QAHL,CAH6B;QAQtCC,WAAW,EAAE;MARyB,CAApC,CAAN;IAUH,CAbD,MAaO;MACH9C,OAAO,CAACD,GAAR,CAAa,8BAA6BU,YAAY,GAAG,CAAE,GAA3D,EADG,CAEH;;MACA,MAAM,IAAAgC,2BAAA,EAA6B;QAC/B3C,OAD+B;QAE/ByC,IAAI,EAAE3C,aAAa,CAAC8C,QAAd,CAAuBK,OAFE;QAG/BlD,OAAO,EAAE;UACLW,MADK;UAELC,YAAY,EAAEA,YAAY,GAAG,CAFxB;UAGLC,IAHK;UAILC,QAAQ,EAAEb,OAAO,CAAC8C,QAAR,CAAiBC,WAAjB;QAJL,CAHsB;QAS/BC,WAAW,EAAE;MATkB,CAA7B,CAAN;IAWH;EACJ;;EACD,OAAO;IACH7B,IAAI,EAAE,EADH;IAEHC,KAAK,EAAE;EAFJ,CAAP;AAIH,CAjJM"}
|
|
1
|
+
{"version":3,"names":["_types","require","_client","_handlerGraphql","_mockSecurity","_utils","_BlockExporter","blocksHandler","configuration","payload","context","log","console","subTask","noPendingTask","prevStatusOfSubTask","ImportExportTaskStatus","PENDING","pageBuilder","fileManager","taskId","subTaskIndex","type","identity","mockSecurity","importExportTask","getSubTask","zeroPad","status","data","error","id","input","blockId","exportBlocksDataKey","block","getPageBlock","NotFoundError","blockCategory","getBlockCategory","updateSubTask","PROCESSING","updateStats","prevStatus","nextStatus","blockExporter","BlockExporter","blockDataZip","execute","COMPLETED","message","key","Key","e","FAILED","name","code","invokeHandlerClient","handlers","combine","security","getIdentity","description","process","exports"],"sources":["blocksHandler.ts"],"sourcesContent":["import type { PbImportExportContext } from \"~/types\";\nimport { ImportExportTaskStatus } from \"~/types\";\nimport { invokeHandlerClient } from \"~/client\";\nimport { NotFoundError } from \"@webiny/handler-graphql\";\nimport type { Payload as ExtractPayload } from \"../combine\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport type { SecurityIdentity } from \"@webiny/api-security/types\";\nimport { zeroPad } from \"@webiny/utils\";\nimport type { Configuration, Payload, Response } from \"~/export/process\";\nimport { BlockExporter } from \"./exporters/BlockExporter\";\n\n/**\n * Handles the export blocks process workflow.\n */\nexport const blocksHandler = async (\n configuration: Configuration,\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n let subTask;\n let noPendingTask = true;\n let prevStatusOfSubTask = ImportExportTaskStatus.PENDING;\n\n log(\"RUNNING Export Blocks Process Handler\");\n const { pageBuilder, fileManager } = context;\n const { taskId, subTaskIndex, type, identity } = payload;\n // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks\n // and this Lambda is invoked internally, without credentials.\n mockSecurity(identity as SecurityIdentity, context);\n try {\n /*\n * Note: We're not going to DB for finding the next sub-task to process,\n * because the data might be out of sync due to GSI eventual consistency.\n */\n subTask = await pageBuilder.importExportTask.getSubTask(taskId, zeroPad(subTaskIndex, 5));\n /**\n * Base condition!!\n * Bail out early, if task not found or task's status is not \"pending\".\n */\n if (!subTask || subTask.status !== ImportExportTaskStatus.PENDING) {\n noPendingTask = true;\n return {\n data: \"\",\n error: null\n };\n } else {\n noPendingTask = false;\n }\n\n log(`Fetched sub task => ${subTask.id}`);\n\n const { input } = subTask;\n const { blockId, exportBlocksDataKey } = input;\n\n const block = await pageBuilder.getPageBlock(blockId);\n\n if (!block) {\n log(`Unable to load block \"${blockId}\"`);\n throw new NotFoundError(`Unable to load block \"${blockId}\"`);\n }\n\n log(`Processing block key \"${blockId}\"`);\n\n const blockCategory = await pageBuilder.getBlockCategory(block.blockCategory);\n\n if (!blockCategory) {\n log(`Unable to load block category \"${block.blockCategory}\"`);\n throw new NotFoundError(`Unable to load block category \"${block.blockCategory}\"`);\n }\n\n // Mark task status as PROCESSING\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.PROCESSING\n });\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.PROCESSING\n });\n prevStatusOfSubTask = subTask.status;\n\n log(`Extracting block data and uploading to storage...`);\n const blockExporter = new BlockExporter(fileManager);\n const blockDataZip = await blockExporter.execute(block, blockCategory, exportBlocksDataKey);\n\n log(`Finish uploading zip...`);\n // Update task record in DB\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading data for block \"${block.id}\"`,\n key: blockDataZip.Key\n }\n });\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.COMPLETED\n });\n prevStatusOfSubTask = subTask.status;\n } catch (e) {\n log(\"[EXPORT_BLOCKS_PROCESS] Error => \", e.message);\n\n if (subTask && subTask.id) {\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"EXPORT_FAILED\"\n }\n });\n\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.FAILED\n });\n prevStatusOfSubTask = subTask.status;\n }\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n } finally {\n // Base condition!\n if (noPendingTask) {\n log(`No pending sub-task for task ${taskId}`);\n // Combine individual block zip files.\n await invokeHandlerClient<ExtractPayload>({\n context,\n name: configuration.handlers.combine,\n payload: {\n taskId,\n type,\n identity: context.security.getIdentity()\n },\n description: \"Export blocks - combine\"\n });\n } else {\n console.log(`Invoking PROCESS for task \"${subTaskIndex + 1}\"`);\n // We want to continue with Self invocation no matter if current block error out.\n await invokeHandlerClient<Payload>({\n context,\n name: configuration.handlers.process,\n payload: {\n taskId,\n subTaskIndex: subTaskIndex + 1,\n type,\n identity: context.security.getIdentity()\n },\n description: \"Export blocks - process - subtask\"\n });\n }\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;AACA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,OAAA,GAAAD,OAAA;AACA,IAAAE,eAAA,GAAAF,OAAA;AAEA,IAAAG,aAAA,GAAAH,OAAA;AAEA,IAAAI,MAAA,GAAAJ,OAAA;AAEA,IAAAK,cAAA,GAAAL,OAAA;AAEA;AACA;AACA;AACO,MAAMM,aAAa,GAAG,MAAAA,CACzBC,aAA4B,EAC5BC,OAAgB,EAChBC,OAA8B,KACV;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EACvB,IAAIE,OAAO;EACX,IAAIC,aAAa,GAAG,IAAI;EACxB,IAAIC,mBAAmB,GAAGC,6BAAsB,CAACC,OAAO;EAExDN,GAAG,CAAC,uCAAuC,CAAC;EAC5C,MAAM;IAAEO,WAAW;IAAEC;EAAY,CAAC,GAAGT,OAAO;EAC5C,MAAM;IAAEU,MAAM;IAAEC,YAAY;IAAEC,IAAI;IAAEC;EAAS,CAAC,GAAGd,OAAO;EACxD;EACA;EACA,IAAAe,0BAAY,EAACD,QAAQ,EAAsBb,OAAO,CAAC;EACnD,IAAI;IACA;AACR;AACA;AACA;IACQG,OAAO,GAAG,MAAMK,WAAW,CAACO,gBAAgB,CAACC,UAAU,CAACN,MAAM,EAAE,IAAAO,cAAO,EAACN,YAAY,EAAE,CAAC,CAAC,CAAC;IACzF;AACR;AACA;AACA;IACQ,IAAI,CAACR,OAAO,IAAIA,OAAO,CAACe,MAAM,KAAKZ,6BAAsB,CAACC,OAAO,EAAE;MAC/DH,aAAa,GAAG,IAAI;MACpB,OAAO;QACHe,IAAI,EAAE,EAAE;QACRC,KAAK,EAAE;MACX,CAAC;IACL,CAAC,MAAM;MACHhB,aAAa,GAAG,KAAK;IACzB;IAEAH,GAAG,CAAC,uBAAuBE,OAAO,CAACkB,EAAE,EAAE,CAAC;IAExC,MAAM;MAAEC;IAAM,CAAC,GAAGnB,OAAO;IACzB,MAAM;MAAEoB,OAAO;MAAEC;IAAoB,CAAC,GAAGF,KAAK;IAE9C,MAAMG,KAAK,GAAG,MAAMjB,WAAW,CAACkB,YAAY,CAACH,OAAO,CAAC;IAErD,IAAI,CAACE,KAAK,EAAE;MACRxB,GAAG,CAAC,yBAAyBsB,OAAO,GAAG,CAAC;MACxC,MAAM,IAAII,6BAAa,CAAC,yBAAyBJ,OAAO,GAAG,CAAC;IAChE;IAEAtB,GAAG,CAAC,yBAAyBsB,OAAO,GAAG,CAAC;IAExC,MAAMK,aAAa,GAAG,MAAMpB,WAAW,CAACqB,gBAAgB,CAACJ,KAAK,CAACG,aAAa,CAAC;IAE7E,IAAI,CAACA,aAAa,EAAE;MAChB3B,GAAG,CAAC,kCAAkCwB,KAAK,CAACG,aAAa,GAAG,CAAC;MAC7D,MAAM,IAAID,6BAAa,CAAC,kCAAkCF,KAAK,CAACG,aAAa,GAAG,CAAC;IACrF;;IAEA;IACAzB,OAAO,GAAG,MAAMK,WAAW,CAACO,gBAAgB,CAACe,aAAa,CAACpB,MAAM,EAAEP,OAAO,CAACkB,EAAE,EAAE;MAC3EH,MAAM,EAAEZ,6BAAsB,CAACyB;IACnC,CAAC,CAAC;IACF;IACA,MAAMvB,WAAW,CAACO,gBAAgB,CAACiB,WAAW,CAACtB,MAAM,EAAE;MACnDuB,UAAU,EAAE5B,mBAAmB;MAC/B6B,UAAU,EAAE5B,6BAAsB,CAACyB;IACvC,CAAC,CAAC;IACF1B,mBAAmB,GAAGF,OAAO,CAACe,MAAM;IAEpCjB,GAAG,CAAC,mDAAmD,CAAC;IACxD,MAAMkC,aAAa,GAAG,IAAIC,4BAAa,CAAC3B,WAAW,CAAC;IACpD,MAAM4B,YAAY,GAAG,MAAMF,aAAa,CAACG,OAAO,CAACb,KAAK,EAAEG,aAAa,EAAEJ,mBAAmB,CAAC;IAE3FvB,GAAG,CAAC,yBAAyB,CAAC;IAC9B;IACAE,OAAO,GAAG,MAAMK,WAAW,CAACO,gBAAgB,CAACe,aAAa,CAACpB,MAAM,EAAEP,OAAO,CAACkB,EAAE,EAAE;MAC3EH,MAAM,EAAEZ,6BAAsB,CAACiC,SAAS;MACxCpB,IAAI,EAAE;QACFqB,OAAO,EAAE,oCAAoCf,KAAK,CAACJ,EAAE,GAAG;QACxDoB,GAAG,EAAEJ,YAAY,CAACK;MACtB;IACJ,CAAC,CAAC;IACF;IACA,MAAMlC,WAAW,CAACO,gBAAgB,CAACiB,WAAW,CAACtB,MAAM,EAAE;MACnDuB,UAAU,EAAE5B,mBAAmB;MAC/B6B,UAAU,EAAE5B,6BAAsB,CAACiC;IACvC,CAAC,CAAC;IACFlC,mBAAmB,GAAGF,OAAO,CAACe,MAAM;EACxC,CAAC,CAAC,OAAOyB,CAAC,EAAE;IACR1C,GAAG,CAAC,mCAAmC,EAAE0C,CAAC,CAACH,OAAO,CAAC;IAEnD,IAAIrC,OAAO,IAAIA,OAAO,CAACkB,EAAE,EAAE;MACvB;AACZ;AACA;AACA;MACYlB,OAAO,GAAG,MAAMK,WAAW,CAACO,gBAAgB,CAACe,aAAa,CAACpB,MAAM,EAAEP,OAAO,CAACkB,EAAE,EAAE;QAC3EH,MAAM,EAAEZ,6BAAsB,CAACsC,MAAM;QACrCxB,KAAK,EAAE;UACHyB,IAAI,EAAEF,CAAC,CAACE,IAAI;UACZL,OAAO,EAAEG,CAAC,CAACH,OAAO;UAClBM,IAAI,EAAE;QACV;MACJ,CAAC,CAAC;;MAEF;MACA,MAAMtC,WAAW,CAACO,gBAAgB,CAACiB,WAAW,CAACtB,MAAM,EAAE;QACnDuB,UAAU,EAAE5B,mBAAmB;QAC/B6B,UAAU,EAAE5B,6BAAsB,CAACsC;MACvC,CAAC,CAAC;MACFvC,mBAAmB,GAAGF,OAAO,CAACe,MAAM;IACxC;IAEA,OAAO;MACHC,IAAI,EAAE,IAAI;MACVC,KAAK,EAAE;QACHoB,OAAO,EAAEG,CAAC,CAACH;MACf;IACJ,CAAC;EACL,CAAC,SAAS;IACN;IACA,IAAIpC,aAAa,EAAE;MACfH,GAAG,CAAC,gCAAgCS,MAAM,EAAE,CAAC;MAC7C;MACA,MAAM,IAAAqC,2BAAmB,EAAiB;QACtC/C,OAAO;QACP6C,IAAI,EAAE/C,aAAa,CAACkD,QAAQ,CAACC,OAAO;QACpClD,OAAO,EAAE;UACLW,MAAM;UACNE,IAAI;UACJC,QAAQ,EAAEb,OAAO,CAACkD,QAAQ,CAACC,WAAW,CAAC;QAC3C,CAAC;QACDC,WAAW,EAAE;MACjB,CAAC,CAAC;IACN,CAAC,MAAM;MACHlD,OAAO,CAACD,GAAG,CAAC,8BAA8BU,YAAY,GAAG,CAAC,GAAG,CAAC;MAC9D;MACA,MAAM,IAAAoC,2BAAmB,EAAU;QAC/B/C,OAAO;QACP6C,IAAI,EAAE/C,aAAa,CAACkD,QAAQ,CAACK,OAAO;QACpCtD,OAAO,EAAE;UACLW,MAAM;UACNC,YAAY,EAAEA,YAAY,GAAG,CAAC;UAC9BC,IAAI;UACJC,QAAQ,EAAEb,OAAO,CAACkD,QAAQ,CAACC,WAAW,CAAC;QAC3C,CAAC;QACDC,WAAW,EAAE;MACjB,CAAC,CAAC;IACN;EACJ;EACA,OAAO;IACHjC,IAAI,EAAE,EAAE;IACRC,KAAK,EAAE;EACX,CAAC;AACL,CAAC;AAACkC,OAAA,CAAAzD,aAAA,GAAAA,aAAA","ignoreList":[]}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import type { BlockCategory, PageBlock } from "@webiny/api-page-builder/types";
|
|
2
|
+
import type { File, FileManagerContext } from "@webiny/api-file-manager/types";
|
|
3
|
+
export interface ExportedBlockData {
|
|
4
|
+
block: Pick<PageBlock, "name" | "content">;
|
|
5
|
+
category: BlockCategory;
|
|
6
|
+
files: File[];
|
|
7
|
+
}
|
|
8
|
+
export declare class BlockExporter {
|
|
9
|
+
private fileManager;
|
|
10
|
+
constructor(fileManager: FileManagerContext["fileManager"]);
|
|
11
|
+
execute(block: PageBlock, blockCategory: BlockCategory, exportBlocksDataKey: string): Promise<import("@aws-sdk/client-s3").CompleteMultipartUploadOutput>;
|
|
12
|
+
}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
|
4
|
+
Object.defineProperty(exports, "__esModule", {
|
|
5
|
+
value: true
|
|
6
|
+
});
|
|
7
|
+
exports.BlockExporter = void 0;
|
|
8
|
+
var _zipper = _interopRequireDefault(require("../../zipper"));
|
|
9
|
+
var _utils = require("../../utils");
|
|
10
|
+
class BlockExporter {
|
|
11
|
+
constructor(fileManager) {
|
|
12
|
+
this.fileManager = fileManager;
|
|
13
|
+
}
|
|
14
|
+
async execute(block, blockCategory, exportBlocksDataKey) {
|
|
15
|
+
// Extract all files
|
|
16
|
+
const files = (0, _utils.extractFilesFromData)(block.content || {});
|
|
17
|
+
const fileIds = files.map(imageFile => imageFile.id);
|
|
18
|
+
// Get file data for all images
|
|
19
|
+
const imageFilesData = [];
|
|
20
|
+
if (fileIds.length > 0) {
|
|
21
|
+
const [filesData] = await this.fileManager.listFiles({
|
|
22
|
+
where: {
|
|
23
|
+
id_in: fileIds
|
|
24
|
+
}
|
|
25
|
+
});
|
|
26
|
+
imageFilesData.push(...filesData);
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
// Extract the block data in a json file and upload it to S3
|
|
30
|
+
const blockData = {
|
|
31
|
+
block: {
|
|
32
|
+
name: block.name,
|
|
33
|
+
content: block.content
|
|
34
|
+
},
|
|
35
|
+
category: {
|
|
36
|
+
name: blockCategory.name,
|
|
37
|
+
slug: blockCategory.slug,
|
|
38
|
+
icon: blockCategory.icon,
|
|
39
|
+
description: blockCategory.description
|
|
40
|
+
},
|
|
41
|
+
files: imageFilesData
|
|
42
|
+
};
|
|
43
|
+
const blockDataBuffer = Buffer.from(JSON.stringify(blockData));
|
|
44
|
+
const zipper = new _zipper.default({
|
|
45
|
+
exportInfo: {
|
|
46
|
+
files: imageFilesData,
|
|
47
|
+
name: block.name,
|
|
48
|
+
dataBuffer: blockDataBuffer
|
|
49
|
+
},
|
|
50
|
+
archiveFileKey: exportBlocksDataKey
|
|
51
|
+
});
|
|
52
|
+
return zipper.process();
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
exports.BlockExporter = BlockExporter;
|
|
56
|
+
|
|
57
|
+
//# sourceMappingURL=BlockExporter.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["_zipper","_interopRequireDefault","require","_utils","BlockExporter","constructor","fileManager","execute","block","blockCategory","exportBlocksDataKey","files","extractFilesFromData","content","fileIds","map","imageFile","id","imageFilesData","length","filesData","listFiles","where","id_in","push","blockData","name","category","slug","icon","description","blockDataBuffer","Buffer","from","JSON","stringify","zipper","Zipper","exportInfo","dataBuffer","archiveFileKey","process","exports"],"sources":["BlockExporter.ts"],"sourcesContent":["import type { BlockCategory, PageBlock } from \"@webiny/api-page-builder/types\";\nimport type { File, FileManagerContext } from \"@webiny/api-file-manager/types\";\nimport Zipper from \"~/export/zipper\";\nimport { extractFilesFromData } from \"~/export/utils\";\n\nexport interface ExportedBlockData {\n block: Pick<PageBlock, \"name\" | \"content\">;\n category: BlockCategory;\n files: File[];\n}\n\nexport class BlockExporter {\n private fileManager: FileManagerContext[\"fileManager\"];\n\n constructor(fileManager: FileManagerContext[\"fileManager\"]) {\n this.fileManager = fileManager;\n }\n\n async execute(block: PageBlock, blockCategory: BlockCategory, exportBlocksDataKey: string) {\n // Extract all files\n const files = extractFilesFromData(block.content || {});\n const fileIds = files.map(imageFile => imageFile.id);\n // Get file data for all images\n const imageFilesData = [];\n if (fileIds.length > 0) {\n const [filesData] = await this.fileManager.listFiles({ where: { id_in: fileIds } });\n imageFilesData.push(...filesData);\n }\n\n // Extract the block data in a json file and upload it to S3\n const blockData = {\n block: {\n name: block.name,\n content: block.content\n },\n category: {\n name: blockCategory.name,\n slug: blockCategory.slug,\n icon: blockCategory.icon,\n description: blockCategory.description\n },\n files: imageFilesData\n };\n const blockDataBuffer = Buffer.from(JSON.stringify(blockData));\n\n const zipper = new Zipper({\n exportInfo: {\n files: imageFilesData,\n name: block.name,\n dataBuffer: blockDataBuffer\n },\n archiveFileKey: exportBlocksDataKey\n });\n\n return zipper.process();\n }\n}\n"],"mappings":";;;;;;;AAEA,IAAAA,OAAA,GAAAC,sBAAA,CAAAC,OAAA;AACA,IAAAC,MAAA,GAAAD,OAAA;AAQO,MAAME,aAAa,CAAC;EAGvBC,WAAWA,CAACC,WAA8C,EAAE;IACxD,IAAI,CAACA,WAAW,GAAGA,WAAW;EAClC;EAEA,MAAMC,OAAOA,CAACC,KAAgB,EAAEC,aAA4B,EAAEC,mBAA2B,EAAE;IACvF;IACA,MAAMC,KAAK,GAAG,IAAAC,2BAAoB,EAACJ,KAAK,CAACK,OAAO,IAAI,CAAC,CAAC,CAAC;IACvD,MAAMC,OAAO,GAAGH,KAAK,CAACI,GAAG,CAACC,SAAS,IAAIA,SAAS,CAACC,EAAE,CAAC;IACpD;IACA,MAAMC,cAAc,GAAG,EAAE;IACzB,IAAIJ,OAAO,CAACK,MAAM,GAAG,CAAC,EAAE;MACpB,MAAM,CAACC,SAAS,CAAC,GAAG,MAAM,IAAI,CAACd,WAAW,CAACe,SAAS,CAAC;QAAEC,KAAK,EAAE;UAAEC,KAAK,EAAET;QAAQ;MAAE,CAAC,CAAC;MACnFI,cAAc,CAACM,IAAI,CAAC,GAAGJ,SAAS,CAAC;IACrC;;IAEA;IACA,MAAMK,SAAS,GAAG;MACdjB,KAAK,EAAE;QACHkB,IAAI,EAAElB,KAAK,CAACkB,IAAI;QAChBb,OAAO,EAAEL,KAAK,CAACK;MACnB,CAAC;MACDc,QAAQ,EAAE;QACND,IAAI,EAAEjB,aAAa,CAACiB,IAAI;QACxBE,IAAI,EAAEnB,aAAa,CAACmB,IAAI;QACxBC,IAAI,EAAEpB,aAAa,CAACoB,IAAI;QACxBC,WAAW,EAAErB,aAAa,CAACqB;MAC/B,CAAC;MACDnB,KAAK,EAAEO;IACX,CAAC;IACD,MAAMa,eAAe,GAAGC,MAAM,CAACC,IAAI,CAACC,IAAI,CAACC,SAAS,CAACV,SAAS,CAAC,CAAC;IAE9D,MAAMW,MAAM,GAAG,IAAIC,eAAM,CAAC;MACtBC,UAAU,EAAE;QACR3B,KAAK,EAAEO,cAAc;QACrBQ,IAAI,EAAElB,KAAK,CAACkB,IAAI;QAChBa,UAAU,EAAER;MAChB,CAAC;MACDS,cAAc,EAAE9B;IACpB,CAAC,CAAC;IAEF,OAAO0B,MAAM,CAACK,OAAO,CAAC,CAAC;EAC3B;AACJ;AAACC,OAAA,CAAAtC,aAAA,GAAAA,aAAA","ignoreList":[]}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import type { FbForm } from "@webiny/api-form-builder/types";
|
|
2
|
+
import type { File } from "@webiny/api-file-manager/types";
|
|
3
|
+
export interface ExportedFormData {
|
|
4
|
+
form: Pick<FbForm, "name" | "status" | "version" | "fields" | "steps" | "settings" | "triggers">;
|
|
5
|
+
files: File[];
|
|
6
|
+
}
|
|
7
|
+
export declare class FormExporter {
|
|
8
|
+
execute(form: FbForm, exportFormsDataKey: string): Promise<import("@aws-sdk/client-s3").CompleteMultipartUploadOutput>;
|
|
9
|
+
}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
|
4
|
+
Object.defineProperty(exports, "__esModule", {
|
|
5
|
+
value: true
|
|
6
|
+
});
|
|
7
|
+
exports.FormExporter = void 0;
|
|
8
|
+
var _zipper = _interopRequireDefault(require("../../zipper"));
|
|
9
|
+
class FormExporter {
|
|
10
|
+
async execute(form, exportFormsDataKey) {
|
|
11
|
+
const formData = {
|
|
12
|
+
form: {
|
|
13
|
+
name: form.name,
|
|
14
|
+
status: form.status,
|
|
15
|
+
version: form.version,
|
|
16
|
+
fields: form.fields,
|
|
17
|
+
steps: form.steps,
|
|
18
|
+
settings: form.settings,
|
|
19
|
+
triggers: form.triggers
|
|
20
|
+
}
|
|
21
|
+
};
|
|
22
|
+
const formDataBuffer = Buffer.from(JSON.stringify(formData));
|
|
23
|
+
const zipper = new _zipper.default({
|
|
24
|
+
exportInfo: {
|
|
25
|
+
files: [],
|
|
26
|
+
name: form.name,
|
|
27
|
+
dataBuffer: formDataBuffer
|
|
28
|
+
},
|
|
29
|
+
archiveFileKey: exportFormsDataKey
|
|
30
|
+
});
|
|
31
|
+
return zipper.process();
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
exports.FormExporter = FormExporter;
|
|
35
|
+
|
|
36
|
+
//# sourceMappingURL=FormExporter.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["_zipper","_interopRequireDefault","require","FormExporter","execute","form","exportFormsDataKey","formData","name","status","version","fields","steps","settings","triggers","formDataBuffer","Buffer","from","JSON","stringify","zipper","Zipper","exportInfo","files","dataBuffer","archiveFileKey","process","exports"],"sources":["FormExporter.ts"],"sourcesContent":["import Zipper from \"~/export/zipper\";\nimport type { FbForm } from \"@webiny/api-form-builder/types\";\nimport type { File } from \"@webiny/api-file-manager/types\";\n\nexport interface ExportedFormData {\n form: Pick<\n FbForm,\n \"name\" | \"status\" | \"version\" | \"fields\" | \"steps\" | \"settings\" | \"triggers\"\n >;\n files: File[];\n}\n\nexport class FormExporter {\n async execute(form: FbForm, exportFormsDataKey: string) {\n const formData = {\n form: {\n name: form.name,\n status: form.status,\n version: form.version,\n fields: form.fields,\n steps: form.steps,\n settings: form.settings,\n triggers: form.triggers\n }\n };\n const formDataBuffer = Buffer.from(JSON.stringify(formData));\n\n const zipper = new Zipper({\n exportInfo: {\n files: [],\n name: form.name,\n dataBuffer: formDataBuffer\n },\n archiveFileKey: exportFormsDataKey\n });\n\n return zipper.process();\n }\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,OAAA,GAAAC,sBAAA,CAAAC,OAAA;AAYO,MAAMC,YAAY,CAAC;EACtB,MAAMC,OAAOA,CAACC,IAAY,EAAEC,kBAA0B,EAAE;IACpD,MAAMC,QAAQ,GAAG;MACbF,IAAI,EAAE;QACFG,IAAI,EAAEH,IAAI,CAACG,IAAI;QACfC,MAAM,EAAEJ,IAAI,CAACI,MAAM;QACnBC,OAAO,EAAEL,IAAI,CAACK,OAAO;QACrBC,MAAM,EAAEN,IAAI,CAACM,MAAM;QACnBC,KAAK,EAAEP,IAAI,CAACO,KAAK;QACjBC,QAAQ,EAAER,IAAI,CAACQ,QAAQ;QACvBC,QAAQ,EAAET,IAAI,CAACS;MACnB;IACJ,CAAC;IACD,MAAMC,cAAc,GAAGC,MAAM,CAACC,IAAI,CAACC,IAAI,CAACC,SAAS,CAACZ,QAAQ,CAAC,CAAC;IAE5D,MAAMa,MAAM,GAAG,IAAIC,eAAM,CAAC;MACtBC,UAAU,EAAE;QACRC,KAAK,EAAE,EAAE;QACTf,IAAI,EAAEH,IAAI,CAACG,IAAI;QACfgB,UAAU,EAAET;MAChB,CAAC;MACDU,cAAc,EAAEnB;IACpB,CAAC,CAAC;IAEF,OAAOc,MAAM,CAACM,OAAO,CAAC,CAAC;EAC3B;AACJ;AAACC,OAAA,CAAAxB,YAAA,GAAAA,YAAA","ignoreList":[]}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import type { Page } from "@webiny/api-page-builder/types";
|
|
2
|
+
import type { File, FileManagerContext } from "@webiny/api-file-manager/types";
|
|
3
|
+
export interface ExportedPageData {
|
|
4
|
+
/**
|
|
5
|
+
* TODO: is it really ok not to export whole page object?
|
|
6
|
+
* What if we add some new properties in the future?
|
|
7
|
+
*
|
|
8
|
+
* Type of the exported page data is now created by removing unnecessary properties from the Page type.
|
|
9
|
+
* This way TS will break if we add new property and forget to handle it in the export/import process.
|
|
10
|
+
*/
|
|
11
|
+
page: Omit<Page, "id" | "pid" | "tenant" | "locale" | "editor" | "category" | "createdFrom" | "createdBy" | "createdOn" | "savedOn" | "publishedOn" | "locked" | "ownedBy" | "webinyVersion">;
|
|
12
|
+
files: File[];
|
|
13
|
+
}
|
|
14
|
+
export declare class PageExporter {
|
|
15
|
+
private fileManager;
|
|
16
|
+
constructor(fileManager: FileManagerContext["fileManager"]);
|
|
17
|
+
execute(page: Page, exportPagesDataKey: string): Promise<import("@aws-sdk/client-s3").CompleteMultipartUploadOutput>;
|
|
18
|
+
}
|