@webiny/api-page-builder-import-export 0.0.0-unstable.d16f688daf → 0.0.0-unstable.d7f521b032
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/client.js +2 -6
- package/client.js.map +1 -1
- package/export/combine/blocksHandler.js +11 -18
- package/export/combine/blocksHandler.js.map +1 -1
- package/export/combine/index.js +14 -9
- package/export/combine/index.js.map +1 -1
- package/export/combine/pagesHandler.js +11 -18
- package/export/combine/pagesHandler.js.map +1 -1
- package/export/combine/templatesHandler.d.ts +6 -0
- package/export/combine/templatesHandler.js +99 -0
- package/export/combine/templatesHandler.js.map +1 -0
- package/export/process/blocksHandler.js +18 -32
- package/export/process/blocksHandler.js.map +1 -1
- package/export/process/index.js +14 -9
- package/export/process/index.js.map +1 -1
- package/export/process/pagesHandler.js +19 -34
- package/export/process/pagesHandler.js.map +1 -1
- package/export/process/templatesHandler.d.ts +6 -0
- package/export/process/templatesHandler.js +166 -0
- package/export/process/templatesHandler.js.map +1 -0
- package/export/s3Stream.js +1 -20
- package/export/s3Stream.js.map +1 -1
- package/export/utils.d.ts +7 -1
- package/export/utils.js +57 -32
- package/export/utils.js.map +1 -1
- package/export/zipper.js +29 -41
- package/export/zipper.js.map +1 -1
- package/graphql/crud/blocks.crud.js +19 -37
- package/graphql/crud/blocks.crud.js.map +1 -1
- package/graphql/crud/importExportTasks.crud.js +9 -57
- package/graphql/crud/importExportTasks.crud.js.map +1 -1
- package/graphql/crud/pages.crud.js +25 -41
- package/graphql/crud/pages.crud.js.map +1 -1
- package/graphql/crud/templates.crud.d.ts +4 -0
- package/graphql/crud/templates.crud.js +124 -0
- package/graphql/crud/templates.crud.js.map +1 -0
- package/graphql/crud.js +2 -7
- package/graphql/crud.js.map +1 -1
- package/graphql/graphql/blocks.gql.js +1 -6
- package/graphql/graphql/blocks.gql.js.map +1 -1
- package/graphql/graphql/importExportTasks.gql.js +1 -6
- package/graphql/graphql/importExportTasks.gql.js.map +1 -1
- package/graphql/graphql/pages.gql.js +2 -7
- package/graphql/graphql/pages.gql.js.map +1 -1
- package/graphql/graphql/templates.gql.d.ts +4 -0
- package/graphql/graphql/templates.gql.js +52 -0
- package/graphql/graphql/templates.gql.js.map +1 -0
- package/graphql/graphql/utils/resolve.d.ts +1 -1
- package/graphql/graphql/utils/resolve.js +0 -3
- package/graphql/graphql/utils/resolve.js.map +1 -1
- package/graphql/graphql.js +2 -6
- package/graphql/graphql.js.map +1 -1
- package/graphql/index.js +0 -5
- package/graphql/index.js.map +1 -1
- package/graphql/types.d.ts +17 -0
- package/graphql/types.js.map +1 -1
- package/import/create/blocksHandler.js +9 -19
- package/import/create/blocksHandler.js.map +1 -1
- package/import/create/index.d.ts +2 -1
- package/import/create/index.js +14 -9
- package/import/create/index.js.map +1 -1
- package/import/create/pagesHandler.js +12 -20
- package/import/create/pagesHandler.js.map +1 -1
- package/import/create/templatesHandler.d.ts +3 -0
- package/import/create/templatesHandler.js +98 -0
- package/import/create/templatesHandler.js.map +1 -0
- package/import/process/blocksHandler.js +20 -26
- package/import/process/blocksHandler.js.map +1 -1
- package/import/process/index.d.ts +1 -0
- package/import/process/index.js +14 -9
- package/import/process/index.js.map +1 -1
- package/import/process/pagesHandler.js +26 -29
- package/import/process/pagesHandler.js.map +1 -1
- package/import/process/templatesHandler.d.ts +3 -0
- package/import/process/templatesHandler.js +169 -0
- package/import/process/templatesHandler.js.map +1 -0
- package/import/utils.d.ts +8 -1
- package/import/utils.js +103 -137
- package/import/utils.js.map +1 -1
- package/mockSecurity.js +0 -2
- package/mockSecurity.js.map +1 -1
- package/package.json +24 -24
- package/types.js +0 -5
- package/types.js.map +1 -1
package/client.js
CHANGED
@@ -1,14 +1,11 @@
|
|
1
1
|
"use strict";
|
2
2
|
|
3
3
|
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
4
|
-
|
5
4
|
Object.defineProperty(exports, "__esModule", {
|
6
5
|
value: true
|
7
6
|
});
|
8
7
|
exports.invokeHandlerClient = invokeHandlerClient;
|
9
|
-
|
10
8
|
var _objectSpread2 = _interopRequireDefault(require("@babel/runtime/helpers/objectSpread2"));
|
11
|
-
|
12
9
|
async function invokeHandlerClient({
|
13
10
|
context,
|
14
11
|
name,
|
@@ -31,13 +28,12 @@ async function invokeHandlerClient({
|
|
31
28
|
httpMethod: request.method,
|
32
29
|
body: request.body,
|
33
30
|
headers,
|
34
|
-
|
35
31
|
/**
|
36
32
|
* Required until type augmentation works correctly.
|
37
33
|
*/
|
38
34
|
cookies: request.cookies
|
39
|
-
};
|
40
|
-
|
35
|
+
};
|
36
|
+
// Invoke handler
|
41
37
|
await context.handlerClient.invoke({
|
42
38
|
name: name,
|
43
39
|
payload: (0, _objectSpread2.default)((0, _objectSpread2.default)({}, payload), invocationArgs),
|
package/client.js.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"names":["invokeHandlerClient","context","name","payload","description","request","tenantId","tenancy","getCurrentTenant","id","headers","invocationArgs","httpMethod","method","body","cookies","handlerClient","invoke","await"],"sources":["client.ts"],"sourcesContent":["import { PbImportExportContext } from \"~/graphql/types\";\n\nexport interface InvokeHandlerClientParams<TParams> {\n context: PbImportExportContext;\n name: string;\n payload: TParams;\n description: string;\n}\n\nexport async function invokeHandlerClient<TParams>({\n context,\n name,\n payload,\n description\n}: InvokeHandlerClientParams<TParams>) {\n /*\n * Prepare \"invocationArgs\", we're hacking our wat here.\n * They are necessary to setup the \"context.pageBuilder\" object among other things in IMPORT_PAGE_FUNCTION\n */\n const { request } = context;\n\n const tenantId = context.tenancy.getCurrentTenant().id;\n\n const headers = {\n ...request.headers,\n [\"x-tenant\"]: request.headers[\"x-tenant\"] || tenantId\n };\n delete headers[\"content-length\"];\n const invocationArgs = {\n httpMethod: request.method,\n body: request.body,\n headers,\n /**\n * Required until type augmentation works correctly.\n */\n cookies: (request as any).cookies\n };\n // Invoke handler\n await context.handlerClient.invoke<TParams & any>({\n name: name,\n payload: {\n ...payload,\n ...invocationArgs\n },\n await: false,\n description\n });\n}\n"],"mappings":"
|
1
|
+
{"version":3,"names":["invokeHandlerClient","context","name","payload","description","request","tenantId","tenancy","getCurrentTenant","id","headers","invocationArgs","httpMethod","method","body","cookies","handlerClient","invoke","await"],"sources":["client.ts"],"sourcesContent":["import { PbImportExportContext } from \"~/graphql/types\";\n\nexport interface InvokeHandlerClientParams<TParams> {\n context: PbImportExportContext;\n name: string;\n payload: TParams;\n description: string;\n}\n\nexport async function invokeHandlerClient<TParams>({\n context,\n name,\n payload,\n description\n}: InvokeHandlerClientParams<TParams>) {\n /*\n * Prepare \"invocationArgs\", we're hacking our wat here.\n * They are necessary to setup the \"context.pageBuilder\" object among other things in IMPORT_PAGE_FUNCTION\n */\n const { request } = context;\n\n const tenantId = context.tenancy.getCurrentTenant().id;\n\n const headers = {\n ...request.headers,\n [\"x-tenant\"]: request.headers[\"x-tenant\"] || tenantId\n };\n delete headers[\"content-length\"];\n const invocationArgs = {\n httpMethod: request.method,\n body: request.body,\n headers,\n /**\n * Required until type augmentation works correctly.\n */\n cookies: (request as any).cookies\n };\n // Invoke handler\n await context.handlerClient.invoke<TParams & any>({\n name: name,\n payload: {\n ...payload,\n ...invocationArgs\n },\n await: false,\n description\n });\n}\n"],"mappings":";;;;;;;;AASO,eAAeA,mBAAmB,CAAU;EAC/CC,OAAO;EACPC,IAAI;EACJC,OAAO;EACPC;AACgC,CAAC,EAAE;EACnC;AACJ;AACA;AACA;EACI,MAAM;IAAEC;EAAQ,CAAC,GAAGJ,OAAO;EAE3B,MAAMK,QAAQ,GAAGL,OAAO,CAACM,OAAO,CAACC,gBAAgB,EAAE,CAACC,EAAE;EAEtD,MAAMC,OAAO,+DACNL,OAAO,CAACK,OAAO;IAClB,CAAC,UAAU,GAAGL,OAAO,CAACK,OAAO,CAAC,UAAU,CAAC,IAAIJ;EAAQ,EACxD;EACD,OAAOI,OAAO,CAAC,gBAAgB,CAAC;EAChC,MAAMC,cAAc,GAAG;IACnBC,UAAU,EAAEP,OAAO,CAACQ,MAAM;IAC1BC,IAAI,EAAET,OAAO,CAACS,IAAI;IAClBJ,OAAO;IACP;AACR;AACA;IACQK,OAAO,EAAGV,OAAO,CAASU;EAC9B,CAAC;EACD;EACA,MAAMd,OAAO,CAACe,aAAa,CAACC,MAAM,CAAgB;IAC9Cf,IAAI,EAAEA,IAAI;IACVC,OAAO,8DACAA,OAAO,GACPQ,cAAc,CACpB;IACDO,KAAK,EAAE,KAAK;IACZd;EACJ,CAAC,CAAC;AACN"}
|
@@ -4,15 +4,10 @@ Object.defineProperty(exports, "__esModule", {
|
|
4
4
|
value: true
|
5
5
|
});
|
6
6
|
exports.blocksHandler = void 0;
|
7
|
-
|
8
7
|
var _types = require("../../types");
|
9
|
-
|
10
8
|
var _s3Stream = require("../s3Stream");
|
11
|
-
|
12
9
|
var _zipper = require("../zipper");
|
13
|
-
|
14
10
|
var _mockSecurity = require("../../mockSecurity");
|
15
|
-
|
16
11
|
/**
|
17
12
|
* Handles the export blocks combine workflow.
|
18
13
|
*/
|
@@ -27,10 +22,8 @@ const blocksHandler = async (payload, context) => {
|
|
27
22
|
identity
|
28
23
|
} = payload;
|
29
24
|
(0, _mockSecurity.mockSecurity)(identity, context);
|
30
|
-
|
31
25
|
try {
|
32
26
|
const task = await pageBuilder.importExportTask.getTask(taskId);
|
33
|
-
|
34
27
|
if (!task) {
|
35
28
|
return {
|
36
29
|
data: null,
|
@@ -39,13 +32,12 @@ const blocksHandler = async (payload, context) => {
|
|
39
32
|
}
|
40
33
|
};
|
41
34
|
}
|
42
|
-
|
43
35
|
const {
|
44
36
|
exportBlocksDataKey
|
45
|
-
} = task.input;
|
37
|
+
} = task.input;
|
46
38
|
|
39
|
+
// Get all files (zip) from given key
|
47
40
|
const listObjectResponse = await _s3Stream.s3Stream.listObject(exportBlocksDataKey);
|
48
|
-
|
49
41
|
if (!listObjectResponse.Contents) {
|
50
42
|
return {
|
51
43
|
data: null,
|
@@ -54,14 +46,16 @@ const blocksHandler = async (payload, context) => {
|
|
54
46
|
}
|
55
47
|
};
|
56
48
|
}
|
49
|
+
const zipFileKeys = listObjectResponse.Contents.filter(file => file.Key !== exportBlocksDataKey).map(file => file.Key).filter(Boolean);
|
57
50
|
|
58
|
-
|
59
|
-
|
60
|
-
const zipOfZip = new _zipper.ZipOfZip(zipFileKeys, "WEBINY_BLOCK_EXPORT.zip"); // Upload
|
51
|
+
// Prepare zip of all zips
|
52
|
+
const zipOfZip = new _zipper.ZipOfZip(zipFileKeys, "WEBINY_BLOCK_EXPORT.zip");
|
61
53
|
|
54
|
+
// Upload
|
62
55
|
const blockExportUpload = await zipOfZip.process();
|
63
|
-
log(`Done uploading... File is located at ${blockExportUpload.Location} `);
|
56
|
+
log(`Done uploading... File is located at ${blockExportUpload.Location} `);
|
64
57
|
|
58
|
+
// Update task status and save export blocks data key
|
65
59
|
await pageBuilder.importExportTask.updateTask(taskId, {
|
66
60
|
status: _types.ImportExportTaskStatus.COMPLETED,
|
67
61
|
data: {
|
@@ -69,18 +63,19 @@ const blocksHandler = async (payload, context) => {
|
|
69
63
|
key: blockExportUpload.Key,
|
70
64
|
url: _s3Stream.s3Stream.getPresignedUrl(blockExportUpload.Key)
|
71
65
|
}
|
72
|
-
});
|
66
|
+
});
|
73
67
|
|
68
|
+
// Remove individual zip files from storage
|
74
69
|
const deleteFilePromises = zipFileKeys.map(key => _s3Stream.s3Stream.deleteObject(key));
|
75
70
|
await Promise.all(deleteFilePromises);
|
76
71
|
log(`Successfully deleted ${deleteFilePromises.length} zip files.`);
|
77
72
|
} catch (e) {
|
78
73
|
log("[EXPORT_BLOCKS_COMBINE] Error => ", e.message);
|
74
|
+
|
79
75
|
/**
|
80
76
|
* In case of error, we'll update the task status to "failed",
|
81
77
|
* so that, client can show notify the user appropriately.
|
82
78
|
*/
|
83
|
-
|
84
79
|
await pageBuilder.importExportTask.updateTask(taskId, {
|
85
80
|
status: _types.ImportExportTaskStatus.FAILED,
|
86
81
|
error: {
|
@@ -96,11 +91,9 @@ const blocksHandler = async (payload, context) => {
|
|
96
91
|
}
|
97
92
|
};
|
98
93
|
}
|
99
|
-
|
100
94
|
return {
|
101
95
|
data: "",
|
102
96
|
error: null
|
103
97
|
};
|
104
98
|
};
|
105
|
-
|
106
99
|
exports.blocksHandler = blocksHandler;
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"names":["blocksHandler","payload","context","log","console","pageBuilder","taskId","identity","mockSecurity","task","importExportTask","getTask","data","error","message","exportBlocksDataKey","input","listObjectResponse","s3Stream","listObject","Contents","zipFileKeys","filter","file","Key","map","Boolean","zipOfZip","ZipOfZip","blockExportUpload","process","Location","updateTask","status","ImportExportTaskStatus","COMPLETED","key","url","getPresignedUrl","deleteFilePromises","deleteObject","Promise","all","length","e","FAILED","name","code"],"sources":["blocksHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { s3Stream } from \"../s3Stream\";\nimport { ZipOfZip } from \"../zipper\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { Payload, Response } from \"~/export/combine\";\n\n/**\n * Handles the export blocks combine workflow.\n */\nexport const blocksHandler = async (\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n\n log(\"RUNNING Export Blocks Combine Handler\");\n const { pageBuilder } = context;\n const { taskId, identity } = payload;\n\n mockSecurity(identity, context);\n\n try {\n const task = await pageBuilder.importExportTask.getTask(taskId);\n if (!task) {\n return {\n data: null,\n error: {\n message: `There is no task with ID \"${taskId}\".`\n }\n };\n }\n\n const { exportBlocksDataKey } = task.input;\n\n // Get all files (zip) from given key\n const listObjectResponse = await s3Stream.listObject(exportBlocksDataKey);\n if (!listObjectResponse.Contents) {\n return {\n data: null,\n error: {\n message: \"There is no Contents defined on S3 Stream while combining blocks.\"\n }\n };\n }\n\n const zipFileKeys = listObjectResponse.Contents.filter(\n file => file.Key !== exportBlocksDataKey\n )\n .map(file => file.Key)\n .filter(Boolean) as string[];\n\n // Prepare zip of all zips\n const zipOfZip = new ZipOfZip(zipFileKeys, \"WEBINY_BLOCK_EXPORT.zip\");\n\n // Upload\n const blockExportUpload = await zipOfZip.process();\n log(`Done uploading... File is located at ${blockExportUpload.Location} `);\n\n // Update task status and save export blocks data key\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading block export.`,\n key: blockExportUpload.Key,\n url: s3Stream.getPresignedUrl(blockExportUpload.Key)\n }\n });\n\n // Remove individual zip files from storage\n const deleteFilePromises = zipFileKeys.map(key => s3Stream.deleteObject(key));\n await Promise.all(deleteFilePromises);\n log(`Successfully deleted ${deleteFilePromises.length} zip files.`);\n } catch (e) {\n log(\"[EXPORT_BLOCKS_COMBINE] Error => \", e.message);\n\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"EXPORT_FAILED\"\n }\n });\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":"
|
1
|
+
{"version":3,"names":["blocksHandler","payload","context","log","console","pageBuilder","taskId","identity","mockSecurity","task","importExportTask","getTask","data","error","message","exportBlocksDataKey","input","listObjectResponse","s3Stream","listObject","Contents","zipFileKeys","filter","file","Key","map","Boolean","zipOfZip","ZipOfZip","blockExportUpload","process","Location","updateTask","status","ImportExportTaskStatus","COMPLETED","key","url","getPresignedUrl","deleteFilePromises","deleteObject","Promise","all","length","e","FAILED","name","code"],"sources":["blocksHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { s3Stream } from \"../s3Stream\";\nimport { ZipOfZip } from \"../zipper\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { Payload, Response } from \"~/export/combine\";\n\n/**\n * Handles the export blocks combine workflow.\n */\nexport const blocksHandler = async (\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n\n log(\"RUNNING Export Blocks Combine Handler\");\n const { pageBuilder } = context;\n const { taskId, identity } = payload;\n\n mockSecurity(identity, context);\n\n try {\n const task = await pageBuilder.importExportTask.getTask(taskId);\n if (!task) {\n return {\n data: null,\n error: {\n message: `There is no task with ID \"${taskId}\".`\n }\n };\n }\n\n const { exportBlocksDataKey } = task.input;\n\n // Get all files (zip) from given key\n const listObjectResponse = await s3Stream.listObject(exportBlocksDataKey);\n if (!listObjectResponse.Contents) {\n return {\n data: null,\n error: {\n message: \"There is no Contents defined on S3 Stream while combining blocks.\"\n }\n };\n }\n\n const zipFileKeys = listObjectResponse.Contents.filter(\n file => file.Key !== exportBlocksDataKey\n )\n .map(file => file.Key)\n .filter(Boolean) as string[];\n\n // Prepare zip of all zips\n const zipOfZip = new ZipOfZip(zipFileKeys, \"WEBINY_BLOCK_EXPORT.zip\");\n\n // Upload\n const blockExportUpload = await zipOfZip.process();\n log(`Done uploading... File is located at ${blockExportUpload.Location} `);\n\n // Update task status and save export blocks data key\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading block export.`,\n key: blockExportUpload.Key,\n url: s3Stream.getPresignedUrl(blockExportUpload.Key)\n }\n });\n\n // Remove individual zip files from storage\n const deleteFilePromises = zipFileKeys.map(key => s3Stream.deleteObject(key));\n await Promise.all(deleteFilePromises);\n log(`Successfully deleted ${deleteFilePromises.length} zip files.`);\n } catch (e) {\n log(\"[EXPORT_BLOCKS_COMBINE] Error => \", e.message);\n\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"EXPORT_FAILED\"\n }\n });\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;AAAA;AACA;AACA;AACA;AAGA;AACA;AACA;AACO,MAAMA,aAAa,GAAG,OACzBC,OAAgB,EAChBC,OAA8B,KACV;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EAEvBA,GAAG,CAAC,uCAAuC,CAAC;EAC5C,MAAM;IAAEE;EAAY,CAAC,GAAGH,OAAO;EAC/B,MAAM;IAAEI,MAAM;IAAEC;EAAS,CAAC,GAAGN,OAAO;EAEpC,IAAAO,0BAAY,EAACD,QAAQ,EAAEL,OAAO,CAAC;EAE/B,IAAI;IACA,MAAMO,IAAI,GAAG,MAAMJ,WAAW,CAACK,gBAAgB,CAACC,OAAO,CAACL,MAAM,CAAC;IAC/D,IAAI,CAACG,IAAI,EAAE;MACP,OAAO;QACHG,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAG,6BAA4BR,MAAO;QACjD;MACJ,CAAC;IACL;IAEA,MAAM;MAAES;IAAoB,CAAC,GAAGN,IAAI,CAACO,KAAK;;IAE1C;IACA,MAAMC,kBAAkB,GAAG,MAAMC,kBAAQ,CAACC,UAAU,CAACJ,mBAAmB,CAAC;IACzE,IAAI,CAACE,kBAAkB,CAACG,QAAQ,EAAE;MAC9B,OAAO;QACHR,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAE;QACb;MACJ,CAAC;IACL;IAEA,MAAMO,WAAW,GAAGJ,kBAAkB,CAACG,QAAQ,CAACE,MAAM,CAClDC,IAAI,IAAIA,IAAI,CAACC,GAAG,KAAKT,mBAAmB,CAC3C,CACIU,GAAG,CAACF,IAAI,IAAIA,IAAI,CAACC,GAAG,CAAC,CACrBF,MAAM,CAACI,OAAO,CAAa;;IAEhC;IACA,MAAMC,QAAQ,GAAG,IAAIC,gBAAQ,CAACP,WAAW,EAAE,yBAAyB,CAAC;;IAErE;IACA,MAAMQ,iBAAiB,GAAG,MAAMF,QAAQ,CAACG,OAAO,EAAE;IAClD3B,GAAG,CAAE,wCAAuC0B,iBAAiB,CAACE,QAAS,GAAE,CAAC;;IAE1E;IACA,MAAM1B,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACC,SAAS;MACxCvB,IAAI,EAAE;QACFE,OAAO,EAAG,gCAA+B;QACzCsB,GAAG,EAAEP,iBAAiB,CAACL,GAAG;QAC1Ba,GAAG,EAAEnB,kBAAQ,CAACoB,eAAe,CAACT,iBAAiB,CAACL,GAAG;MACvD;IACJ,CAAC,CAAC;;IAEF;IACA,MAAMe,kBAAkB,GAAGlB,WAAW,CAACI,GAAG,CAACW,GAAG,IAAIlB,kBAAQ,CAACsB,YAAY,CAACJ,GAAG,CAAC,CAAC;IAC7E,MAAMK,OAAO,CAACC,GAAG,CAACH,kBAAkB,CAAC;IACrCpC,GAAG,CAAE,wBAAuBoC,kBAAkB,CAACI,MAAO,aAAY,CAAC;EACvE,CAAC,CAAC,OAAOC,CAAC,EAAE;IACRzC,GAAG,CAAC,mCAAmC,EAAEyC,CAAC,CAAC9B,OAAO,CAAC;;IAEnD;AACR;AACA;AACA;IACQ,MAAMT,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACW,MAAM;MACrChC,KAAK,EAAE;QACHiC,IAAI,EAAEF,CAAC,CAACE,IAAI;QACZhC,OAAO,EAAE8B,CAAC,CAAC9B,OAAO;QAClBiC,IAAI,EAAE;MACV;IACJ,CAAC,CAAC;IAEF,OAAO;MACHnC,IAAI,EAAE,IAAI;MACVC,KAAK,EAAE;QACHC,OAAO,EAAE8B,CAAC,CAAC9B;MACf;IACJ,CAAC;EACL;EACA,OAAO;IACHF,IAAI,EAAE,EAAE;IACRC,KAAK,EAAE;EACX,CAAC;AACL,CAAC;AAAC"}
|
package/export/combine/index.js
CHANGED
@@ -4,13 +4,10 @@ Object.defineProperty(exports, "__esModule", {
|
|
4
4
|
value: true
|
5
5
|
});
|
6
6
|
exports.default = void 0;
|
7
|
-
|
8
7
|
var _handlerAws = require("@webiny/handler-aws");
|
9
|
-
|
10
8
|
var _blocksHandler = require("./blocksHandler");
|
11
|
-
|
12
9
|
var _pagesHandler = require("./pagesHandler");
|
13
|
-
|
10
|
+
var _templatesHandler = require("./templatesHandler");
|
14
11
|
/**
|
15
12
|
* Handles the export pages combine workflow.
|
16
13
|
*/
|
@@ -19,12 +16,20 @@ var _default = () => {
|
|
19
16
|
payload,
|
20
17
|
context
|
21
18
|
}) => {
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
19
|
+
switch (payload.type) {
|
20
|
+
case "block":
|
21
|
+
{
|
22
|
+
return await (0, _blocksHandler.blocksHandler)(payload, context);
|
23
|
+
}
|
24
|
+
case "template":
|
25
|
+
{
|
26
|
+
return await (0, _templatesHandler.templatesHandler)(payload, context);
|
27
|
+
}
|
28
|
+
default:
|
29
|
+
{
|
30
|
+
return await (0, _pagesHandler.pagesHandler)(payload, context);
|
31
|
+
}
|
26
32
|
}
|
27
33
|
});
|
28
34
|
};
|
29
|
-
|
30
35
|
exports.default = _default;
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"names":["createRawEventHandler","payload","context","type","blocksHandler","pagesHandler"],"sources":["index.ts"],"sourcesContent":["import { PbImportExportContext } from \"~/types\";\nimport { SecurityIdentity } from \"@webiny/api-security/types\";\nimport { createRawEventHandler } from \"@webiny/handler-aws\";\nimport { blocksHandler } from \"~/export/combine/blocksHandler\";\nimport { pagesHandler } from \"~/export/combine/pagesHandler\";\n\nexport interface Payload {\n taskId: string;\n type: string;\n identity: SecurityIdentity;\n}\n\nexport interface Response {\n data: string | null;\n error: Partial<Error> | null;\n}\n\n/**\n * Handles the export pages combine workflow.\n */\nexport default () => {\n return createRawEventHandler<Payload, PbImportExportContext, Response>(\n async ({ payload, context }) => {\n
|
1
|
+
{"version":3,"names":["createRawEventHandler","payload","context","type","blocksHandler","templatesHandler","pagesHandler"],"sources":["index.ts"],"sourcesContent":["import { PbImportExportContext } from \"~/types\";\nimport { SecurityIdentity } from \"@webiny/api-security/types\";\nimport { createRawEventHandler } from \"@webiny/handler-aws\";\nimport { blocksHandler } from \"~/export/combine/blocksHandler\";\nimport { pagesHandler } from \"~/export/combine/pagesHandler\";\nimport { templatesHandler } from \"~/export/combine/templatesHandler\";\n\nexport interface Payload {\n taskId: string;\n type: string;\n identity: SecurityIdentity;\n}\n\nexport interface Response {\n data: string | null;\n error: Partial<Error> | null;\n}\n\n/**\n * Handles the export pages combine workflow.\n */\nexport default () => {\n return createRawEventHandler<Payload, PbImportExportContext, Response>(\n async ({ payload, context }) => {\n switch (payload.type) {\n case \"block\": {\n return await blocksHandler(payload, context);\n }\n case \"template\": {\n return await templatesHandler(payload, context);\n }\n default: {\n return await pagesHandler(payload, context);\n }\n }\n }\n );\n};\n"],"mappings":";;;;;;AAEA;AACA;AACA;AACA;AAaA;AACA;AACA;AAFA,eAGe,MAAM;EACjB,OAAO,IAAAA,iCAAqB,EACxB,OAAO;IAAEC,OAAO;IAAEC;EAAQ,CAAC,KAAK;IAC5B,QAAQD,OAAO,CAACE,IAAI;MAChB,KAAK,OAAO;QAAE;UACV,OAAO,MAAM,IAAAC,4BAAa,EAACH,OAAO,EAAEC,OAAO,CAAC;QAChD;MACA,KAAK,UAAU;QAAE;UACb,OAAO,MAAM,IAAAG,kCAAgB,EAACJ,OAAO,EAAEC,OAAO,CAAC;QACnD;MACA;QAAS;UACL,OAAO,MAAM,IAAAI,0BAAY,EAACL,OAAO,EAAEC,OAAO,CAAC;QAC/C;IAAC;EAET,CAAC,CACJ;AACL,CAAC;AAAA"}
|
@@ -4,15 +4,10 @@ Object.defineProperty(exports, "__esModule", {
|
|
4
4
|
value: true
|
5
5
|
});
|
6
6
|
exports.pagesHandler = void 0;
|
7
|
-
|
8
7
|
var _types = require("../../types");
|
9
|
-
|
10
8
|
var _s3Stream = require("../s3Stream");
|
11
|
-
|
12
9
|
var _zipper = require("../zipper");
|
13
|
-
|
14
10
|
var _mockSecurity = require("../../mockSecurity");
|
15
|
-
|
16
11
|
/**
|
17
12
|
* Handles the export pages combine workflow.
|
18
13
|
*/
|
@@ -27,10 +22,8 @@ const pagesHandler = async (payload, context) => {
|
|
27
22
|
identity
|
28
23
|
} = payload;
|
29
24
|
(0, _mockSecurity.mockSecurity)(identity, context);
|
30
|
-
|
31
25
|
try {
|
32
26
|
const task = await pageBuilder.importExportTask.getTask(taskId);
|
33
|
-
|
34
27
|
if (!task) {
|
35
28
|
return {
|
36
29
|
data: null,
|
@@ -39,13 +32,12 @@ const pagesHandler = async (payload, context) => {
|
|
39
32
|
}
|
40
33
|
};
|
41
34
|
}
|
42
|
-
|
43
35
|
const {
|
44
36
|
exportPagesDataKey
|
45
|
-
} = task.input;
|
37
|
+
} = task.input;
|
46
38
|
|
39
|
+
// Get all files (zip) from given key
|
47
40
|
const listObjectResponse = await _s3Stream.s3Stream.listObject(exportPagesDataKey);
|
48
|
-
|
49
41
|
if (!listObjectResponse.Contents) {
|
50
42
|
return {
|
51
43
|
data: null,
|
@@ -54,14 +46,16 @@ const pagesHandler = async (payload, context) => {
|
|
54
46
|
}
|
55
47
|
};
|
56
48
|
}
|
49
|
+
const zipFileKeys = listObjectResponse.Contents.filter(file => file.Key !== exportPagesDataKey).map(file => file.Key).filter(Boolean);
|
57
50
|
|
58
|
-
|
59
|
-
|
60
|
-
const zipOfZip = new _zipper.ZipOfZip(zipFileKeys, "WEBINY_PAGE_EXPORT.zip"); // Upload
|
51
|
+
// Prepare zip of all zips
|
52
|
+
const zipOfZip = new _zipper.ZipOfZip(zipFileKeys, "WEBINY_PAGE_EXPORT.zip");
|
61
53
|
|
54
|
+
// Upload
|
62
55
|
const pageExportUpload = await zipOfZip.process();
|
63
|
-
log(`Done uploading... File is located at ${pageExportUpload.Location} `);
|
56
|
+
log(`Done uploading... File is located at ${pageExportUpload.Location} `);
|
64
57
|
|
58
|
+
// Update task status and save export page data key
|
65
59
|
await pageBuilder.importExportTask.updateTask(taskId, {
|
66
60
|
status: _types.ImportExportTaskStatus.COMPLETED,
|
67
61
|
data: {
|
@@ -69,18 +63,19 @@ const pagesHandler = async (payload, context) => {
|
|
69
63
|
key: pageExportUpload.Key,
|
70
64
|
url: _s3Stream.s3Stream.getPresignedUrl(pageExportUpload.Key)
|
71
65
|
}
|
72
|
-
});
|
66
|
+
});
|
73
67
|
|
68
|
+
// Remove individual zip files from storage
|
74
69
|
const deleteFilePromises = zipFileKeys.map(key => _s3Stream.s3Stream.deleteObject(key));
|
75
70
|
await Promise.all(deleteFilePromises);
|
76
71
|
log(`Successfully deleted ${deleteFilePromises.length} zip files.`);
|
77
72
|
} catch (e) {
|
78
73
|
log("[EXPORT_PAGES_COMBINE] Error => ", e.message);
|
74
|
+
|
79
75
|
/**
|
80
76
|
* In case of error, we'll update the task status to "failed",
|
81
77
|
* so that, client can show notify the user appropriately.
|
82
78
|
*/
|
83
|
-
|
84
79
|
await pageBuilder.importExportTask.updateTask(taskId, {
|
85
80
|
status: _types.ImportExportTaskStatus.FAILED,
|
86
81
|
error: {
|
@@ -96,11 +91,9 @@ const pagesHandler = async (payload, context) => {
|
|
96
91
|
}
|
97
92
|
};
|
98
93
|
}
|
99
|
-
|
100
94
|
return {
|
101
95
|
data: "",
|
102
96
|
error: null
|
103
97
|
};
|
104
98
|
};
|
105
|
-
|
106
99
|
exports.pagesHandler = pagesHandler;
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"names":["pagesHandler","payload","context","log","console","pageBuilder","taskId","identity","mockSecurity","task","importExportTask","getTask","data","error","message","exportPagesDataKey","input","listObjectResponse","s3Stream","listObject","Contents","zipFileKeys","filter","file","Key","map","Boolean","zipOfZip","ZipOfZip","pageExportUpload","process","Location","updateTask","status","ImportExportTaskStatus","COMPLETED","key","url","getPresignedUrl","deleteFilePromises","deleteObject","Promise","all","length","e","FAILED","name","code"],"sources":["pagesHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { s3Stream } from \"../s3Stream\";\nimport { ZipOfZip } from \"../zipper\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { Payload, Response } from \"~/export/combine\";\n\n/**\n * Handles the export pages combine workflow.\n */\nexport const pagesHandler = async (\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n\n log(\"RUNNING Export Pages Combine Handler\");\n const { pageBuilder } = context;\n const { taskId, identity } = payload;\n\n mockSecurity(identity, context);\n\n try {\n const task = await pageBuilder.importExportTask.getTask(taskId);\n if (!task) {\n return {\n data: null,\n error: {\n message: `There is no task with ID \"${taskId}\".`\n }\n };\n }\n\n const { exportPagesDataKey } = task.input;\n\n // Get all files (zip) from given key\n const listObjectResponse = await s3Stream.listObject(exportPagesDataKey);\n if (!listObjectResponse.Contents) {\n return {\n data: null,\n error: {\n message: \"There is no Contents defined on S3 Stream while combining pages.\"\n }\n };\n }\n\n const zipFileKeys = listObjectResponse.Contents.filter(\n file => file.Key !== exportPagesDataKey\n )\n .map(file => file.Key)\n .filter(Boolean) as string[];\n\n // Prepare zip of all zips\n const zipOfZip = new ZipOfZip(zipFileKeys, \"WEBINY_PAGE_EXPORT.zip\");\n\n // Upload\n const pageExportUpload = await zipOfZip.process();\n log(`Done uploading... File is located at ${pageExportUpload.Location} `);\n\n // Update task status and save export page data key\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading page export.`,\n key: pageExportUpload.Key,\n url: s3Stream.getPresignedUrl(pageExportUpload.Key)\n }\n });\n\n // Remove individual zip files from storage\n const deleteFilePromises = zipFileKeys.map(key => s3Stream.deleteObject(key));\n await Promise.all(deleteFilePromises);\n log(`Successfully deleted ${deleteFilePromises.length} zip files.`);\n } catch (e) {\n log(\"[EXPORT_PAGES_COMBINE] Error => \", e.message);\n\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"EXPORT_FAILED\"\n }\n });\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":"
|
1
|
+
{"version":3,"names":["pagesHandler","payload","context","log","console","pageBuilder","taskId","identity","mockSecurity","task","importExportTask","getTask","data","error","message","exportPagesDataKey","input","listObjectResponse","s3Stream","listObject","Contents","zipFileKeys","filter","file","Key","map","Boolean","zipOfZip","ZipOfZip","pageExportUpload","process","Location","updateTask","status","ImportExportTaskStatus","COMPLETED","key","url","getPresignedUrl","deleteFilePromises","deleteObject","Promise","all","length","e","FAILED","name","code"],"sources":["pagesHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { s3Stream } from \"../s3Stream\";\nimport { ZipOfZip } from \"../zipper\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { Payload, Response } from \"~/export/combine\";\n\n/**\n * Handles the export pages combine workflow.\n */\nexport const pagesHandler = async (\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n\n log(\"RUNNING Export Pages Combine Handler\");\n const { pageBuilder } = context;\n const { taskId, identity } = payload;\n\n mockSecurity(identity, context);\n\n try {\n const task = await pageBuilder.importExportTask.getTask(taskId);\n if (!task) {\n return {\n data: null,\n error: {\n message: `There is no task with ID \"${taskId}\".`\n }\n };\n }\n\n const { exportPagesDataKey } = task.input;\n\n // Get all files (zip) from given key\n const listObjectResponse = await s3Stream.listObject(exportPagesDataKey);\n if (!listObjectResponse.Contents) {\n return {\n data: null,\n error: {\n message: \"There is no Contents defined on S3 Stream while combining pages.\"\n }\n };\n }\n\n const zipFileKeys = listObjectResponse.Contents.filter(\n file => file.Key !== exportPagesDataKey\n )\n .map(file => file.Key)\n .filter(Boolean) as string[];\n\n // Prepare zip of all zips\n const zipOfZip = new ZipOfZip(zipFileKeys, \"WEBINY_PAGE_EXPORT.zip\");\n\n // Upload\n const pageExportUpload = await zipOfZip.process();\n log(`Done uploading... File is located at ${pageExportUpload.Location} `);\n\n // Update task status and save export page data key\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading page export.`,\n key: pageExportUpload.Key,\n url: s3Stream.getPresignedUrl(pageExportUpload.Key)\n }\n });\n\n // Remove individual zip files from storage\n const deleteFilePromises = zipFileKeys.map(key => s3Stream.deleteObject(key));\n await Promise.all(deleteFilePromises);\n log(`Successfully deleted ${deleteFilePromises.length} zip files.`);\n } catch (e) {\n log(\"[EXPORT_PAGES_COMBINE] Error => \", e.message);\n\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"EXPORT_FAILED\"\n }\n });\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;AAAA;AACA;AACA;AACA;AAGA;AACA;AACA;AACO,MAAMA,YAAY,GAAG,OACxBC,OAAgB,EAChBC,OAA8B,KACV;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EAEvBA,GAAG,CAAC,sCAAsC,CAAC;EAC3C,MAAM;IAAEE;EAAY,CAAC,GAAGH,OAAO;EAC/B,MAAM;IAAEI,MAAM;IAAEC;EAAS,CAAC,GAAGN,OAAO;EAEpC,IAAAO,0BAAY,EAACD,QAAQ,EAAEL,OAAO,CAAC;EAE/B,IAAI;IACA,MAAMO,IAAI,GAAG,MAAMJ,WAAW,CAACK,gBAAgB,CAACC,OAAO,CAACL,MAAM,CAAC;IAC/D,IAAI,CAACG,IAAI,EAAE;MACP,OAAO;QACHG,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAG,6BAA4BR,MAAO;QACjD;MACJ,CAAC;IACL;IAEA,MAAM;MAAES;IAAmB,CAAC,GAAGN,IAAI,CAACO,KAAK;;IAEzC;IACA,MAAMC,kBAAkB,GAAG,MAAMC,kBAAQ,CAACC,UAAU,CAACJ,kBAAkB,CAAC;IACxE,IAAI,CAACE,kBAAkB,CAACG,QAAQ,EAAE;MAC9B,OAAO;QACHR,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAE;QACb;MACJ,CAAC;IACL;IAEA,MAAMO,WAAW,GAAGJ,kBAAkB,CAACG,QAAQ,CAACE,MAAM,CAClDC,IAAI,IAAIA,IAAI,CAACC,GAAG,KAAKT,kBAAkB,CAC1C,CACIU,GAAG,CAACF,IAAI,IAAIA,IAAI,CAACC,GAAG,CAAC,CACrBF,MAAM,CAACI,OAAO,CAAa;;IAEhC;IACA,MAAMC,QAAQ,GAAG,IAAIC,gBAAQ,CAACP,WAAW,EAAE,wBAAwB,CAAC;;IAEpE;IACA,MAAMQ,gBAAgB,GAAG,MAAMF,QAAQ,CAACG,OAAO,EAAE;IACjD3B,GAAG,CAAE,wCAAuC0B,gBAAgB,CAACE,QAAS,GAAE,CAAC;;IAEzE;IACA,MAAM1B,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACC,SAAS;MACxCvB,IAAI,EAAE;QACFE,OAAO,EAAG,+BAA8B;QACxCsB,GAAG,EAAEP,gBAAgB,CAACL,GAAG;QACzBa,GAAG,EAAEnB,kBAAQ,CAACoB,eAAe,CAACT,gBAAgB,CAACL,GAAG;MACtD;IACJ,CAAC,CAAC;;IAEF;IACA,MAAMe,kBAAkB,GAAGlB,WAAW,CAACI,GAAG,CAACW,GAAG,IAAIlB,kBAAQ,CAACsB,YAAY,CAACJ,GAAG,CAAC,CAAC;IAC7E,MAAMK,OAAO,CAACC,GAAG,CAACH,kBAAkB,CAAC;IACrCpC,GAAG,CAAE,wBAAuBoC,kBAAkB,CAACI,MAAO,aAAY,CAAC;EACvE,CAAC,CAAC,OAAOC,CAAC,EAAE;IACRzC,GAAG,CAAC,kCAAkC,EAAEyC,CAAC,CAAC9B,OAAO,CAAC;;IAElD;AACR;AACA;AACA;IACQ,MAAMT,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACW,MAAM;MACrChC,KAAK,EAAE;QACHiC,IAAI,EAAEF,CAAC,CAACE,IAAI;QACZhC,OAAO,EAAE8B,CAAC,CAAC9B,OAAO;QAClBiC,IAAI,EAAE;MACV;IACJ,CAAC,CAAC;IAEF,OAAO;MACHnC,IAAI,EAAE,IAAI;MACVC,KAAK,EAAE;QACHC,OAAO,EAAE8B,CAAC,CAAC9B;MACf;IACJ,CAAC;EACL;EACA,OAAO;IACHF,IAAI,EAAE,EAAE;IACRC,KAAK,EAAE;EACX,CAAC;AACL,CAAC;AAAC"}
|
@@ -0,0 +1,99 @@
|
|
1
|
+
"use strict";
|
2
|
+
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
4
|
+
value: true
|
5
|
+
});
|
6
|
+
exports.templatesHandler = void 0;
|
7
|
+
var _types = require("../../types");
|
8
|
+
var _s3Stream = require("../s3Stream");
|
9
|
+
var _zipper = require("../zipper");
|
10
|
+
var _mockSecurity = require("../../mockSecurity");
|
11
|
+
/**
|
12
|
+
* Handles the export templates combine workflow.
|
13
|
+
*/
|
14
|
+
const templatesHandler = async (payload, context) => {
|
15
|
+
const log = console.log;
|
16
|
+
log("RUNNING Export Templates Combine Handler");
|
17
|
+
const {
|
18
|
+
pageBuilder
|
19
|
+
} = context;
|
20
|
+
const {
|
21
|
+
taskId,
|
22
|
+
identity
|
23
|
+
} = payload;
|
24
|
+
(0, _mockSecurity.mockSecurity)(identity, context);
|
25
|
+
try {
|
26
|
+
const task = await pageBuilder.importExportTask.getTask(taskId);
|
27
|
+
if (!task) {
|
28
|
+
return {
|
29
|
+
data: null,
|
30
|
+
error: {
|
31
|
+
message: `There is no task with ID "${taskId}".`
|
32
|
+
}
|
33
|
+
};
|
34
|
+
}
|
35
|
+
const {
|
36
|
+
exportTemplatesDataKey
|
37
|
+
} = task.input;
|
38
|
+
|
39
|
+
// Get all files (zip) from given key
|
40
|
+
const listObjectResponse = await _s3Stream.s3Stream.listObject(exportTemplatesDataKey);
|
41
|
+
if (!listObjectResponse.Contents) {
|
42
|
+
return {
|
43
|
+
data: null,
|
44
|
+
error: {
|
45
|
+
message: "There is no Contents defined on S3 Stream while combining templates."
|
46
|
+
}
|
47
|
+
};
|
48
|
+
}
|
49
|
+
const zipFileKeys = listObjectResponse.Contents.filter(file => file.Key !== exportTemplatesDataKey).map(file => file.Key).filter(Boolean);
|
50
|
+
|
51
|
+
// Prepare zip of all zips
|
52
|
+
const zipOfZip = new _zipper.ZipOfZip(zipFileKeys, "WEBINY_TEMPLATE_EXPORT.zip");
|
53
|
+
|
54
|
+
// Upload
|
55
|
+
const templateExportUpload = await zipOfZip.process();
|
56
|
+
log(`Done uploading... File is located at ${templateExportUpload.Location} `);
|
57
|
+
|
58
|
+
// Update task status and save export templates data key
|
59
|
+
await pageBuilder.importExportTask.updateTask(taskId, {
|
60
|
+
status: _types.ImportExportTaskStatus.COMPLETED,
|
61
|
+
data: {
|
62
|
+
message: `Finish uploading template export.`,
|
63
|
+
key: templateExportUpload.Key,
|
64
|
+
url: _s3Stream.s3Stream.getPresignedUrl(templateExportUpload.Key)
|
65
|
+
}
|
66
|
+
});
|
67
|
+
|
68
|
+
// Remove individual zip files from storage
|
69
|
+
const deleteFilePromises = zipFileKeys.map(key => _s3Stream.s3Stream.deleteObject(key));
|
70
|
+
await Promise.all(deleteFilePromises);
|
71
|
+
log(`Successfully deleted ${deleteFilePromises.length} zip files.`);
|
72
|
+
} catch (e) {
|
73
|
+
log("[EXPORT_TEMPLATES_COMBINE] Error => ", e.message);
|
74
|
+
|
75
|
+
/**
|
76
|
+
* In case of error, we'll update the task status to "failed",
|
77
|
+
* so that, client can show notify the user appropriately.
|
78
|
+
*/
|
79
|
+
await pageBuilder.importExportTask.updateTask(taskId, {
|
80
|
+
status: _types.ImportExportTaskStatus.FAILED,
|
81
|
+
error: {
|
82
|
+
name: e.name,
|
83
|
+
message: e.message,
|
84
|
+
code: "EXPORT_FAILED"
|
85
|
+
}
|
86
|
+
});
|
87
|
+
return {
|
88
|
+
data: null,
|
89
|
+
error: {
|
90
|
+
message: e.message
|
91
|
+
}
|
92
|
+
};
|
93
|
+
}
|
94
|
+
return {
|
95
|
+
data: "",
|
96
|
+
error: null
|
97
|
+
};
|
98
|
+
};
|
99
|
+
exports.templatesHandler = templatesHandler;
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"names":["templatesHandler","payload","context","log","console","pageBuilder","taskId","identity","mockSecurity","task","importExportTask","getTask","data","error","message","exportTemplatesDataKey","input","listObjectResponse","s3Stream","listObject","Contents","zipFileKeys","filter","file","Key","map","Boolean","zipOfZip","ZipOfZip","templateExportUpload","process","Location","updateTask","status","ImportExportTaskStatus","COMPLETED","key","url","getPresignedUrl","deleteFilePromises","deleteObject","Promise","all","length","e","FAILED","name","code"],"sources":["templatesHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { s3Stream } from \"../s3Stream\";\nimport { ZipOfZip } from \"../zipper\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { Payload, Response } from \"~/export/combine\";\n\n/**\n * Handles the export templates combine workflow.\n */\nexport const templatesHandler = async (\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n\n log(\"RUNNING Export Templates Combine Handler\");\n const { pageBuilder } = context;\n const { taskId, identity } = payload;\n\n mockSecurity(identity, context);\n\n try {\n const task = await pageBuilder.importExportTask.getTask(taskId);\n if (!task) {\n return {\n data: null,\n error: {\n message: `There is no task with ID \"${taskId}\".`\n }\n };\n }\n\n const { exportTemplatesDataKey } = task.input;\n\n // Get all files (zip) from given key\n const listObjectResponse = await s3Stream.listObject(exportTemplatesDataKey);\n if (!listObjectResponse.Contents) {\n return {\n data: null,\n error: {\n message: \"There is no Contents defined on S3 Stream while combining templates.\"\n }\n };\n }\n\n const zipFileKeys = listObjectResponse.Contents.filter(\n file => file.Key !== exportTemplatesDataKey\n )\n .map(file => file.Key)\n .filter(Boolean) as string[];\n\n // Prepare zip of all zips\n const zipOfZip = new ZipOfZip(zipFileKeys, \"WEBINY_TEMPLATE_EXPORT.zip\");\n\n // Upload\n const templateExportUpload = await zipOfZip.process();\n log(`Done uploading... File is located at ${templateExportUpload.Location} `);\n\n // Update task status and save export templates data key\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading template export.`,\n key: templateExportUpload.Key,\n url: s3Stream.getPresignedUrl(templateExportUpload.Key)\n }\n });\n\n // Remove individual zip files from storage\n const deleteFilePromises = zipFileKeys.map(key => s3Stream.deleteObject(key));\n await Promise.all(deleteFilePromises);\n log(`Successfully deleted ${deleteFilePromises.length} zip files.`);\n } catch (e) {\n log(\"[EXPORT_TEMPLATES_COMBINE] Error => \", e.message);\n\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"EXPORT_FAILED\"\n }\n });\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;AAAA;AACA;AACA;AACA;AAGA;AACA;AACA;AACO,MAAMA,gBAAgB,GAAG,OAC5BC,OAAgB,EAChBC,OAA8B,KACV;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EAEvBA,GAAG,CAAC,0CAA0C,CAAC;EAC/C,MAAM;IAAEE;EAAY,CAAC,GAAGH,OAAO;EAC/B,MAAM;IAAEI,MAAM;IAAEC;EAAS,CAAC,GAAGN,OAAO;EAEpC,IAAAO,0BAAY,EAACD,QAAQ,EAAEL,OAAO,CAAC;EAE/B,IAAI;IACA,MAAMO,IAAI,GAAG,MAAMJ,WAAW,CAACK,gBAAgB,CAACC,OAAO,CAACL,MAAM,CAAC;IAC/D,IAAI,CAACG,IAAI,EAAE;MACP,OAAO;QACHG,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAG,6BAA4BR,MAAO;QACjD;MACJ,CAAC;IACL;IAEA,MAAM;MAAES;IAAuB,CAAC,GAAGN,IAAI,CAACO,KAAK;;IAE7C;IACA,MAAMC,kBAAkB,GAAG,MAAMC,kBAAQ,CAACC,UAAU,CAACJ,sBAAsB,CAAC;IAC5E,IAAI,CAACE,kBAAkB,CAACG,QAAQ,EAAE;MAC9B,OAAO;QACHR,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAE;QACb;MACJ,CAAC;IACL;IAEA,MAAMO,WAAW,GAAGJ,kBAAkB,CAACG,QAAQ,CAACE,MAAM,CAClDC,IAAI,IAAIA,IAAI,CAACC,GAAG,KAAKT,sBAAsB,CAC9C,CACIU,GAAG,CAACF,IAAI,IAAIA,IAAI,CAACC,GAAG,CAAC,CACrBF,MAAM,CAACI,OAAO,CAAa;;IAEhC;IACA,MAAMC,QAAQ,GAAG,IAAIC,gBAAQ,CAACP,WAAW,EAAE,4BAA4B,CAAC;;IAExE;IACA,MAAMQ,oBAAoB,GAAG,MAAMF,QAAQ,CAACG,OAAO,EAAE;IACrD3B,GAAG,CAAE,wCAAuC0B,oBAAoB,CAACE,QAAS,GAAE,CAAC;;IAE7E;IACA,MAAM1B,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACC,SAAS;MACxCvB,IAAI,EAAE;QACFE,OAAO,EAAG,mCAAkC;QAC5CsB,GAAG,EAAEP,oBAAoB,CAACL,GAAG;QAC7Ba,GAAG,EAAEnB,kBAAQ,CAACoB,eAAe,CAACT,oBAAoB,CAACL,GAAG;MAC1D;IACJ,CAAC,CAAC;;IAEF;IACA,MAAMe,kBAAkB,GAAGlB,WAAW,CAACI,GAAG,CAACW,GAAG,IAAIlB,kBAAQ,CAACsB,YAAY,CAACJ,GAAG,CAAC,CAAC;IAC7E,MAAMK,OAAO,CAACC,GAAG,CAACH,kBAAkB,CAAC;IACrCpC,GAAG,CAAE,wBAAuBoC,kBAAkB,CAACI,MAAO,aAAY,CAAC;EACvE,CAAC,CAAC,OAAOC,CAAC,EAAE;IACRzC,GAAG,CAAC,sCAAsC,EAAEyC,CAAC,CAAC9B,OAAO,CAAC;;IAEtD;AACR;AACA;AACA;IACQ,MAAMT,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACW,MAAM;MACrChC,KAAK,EAAE;QACHiC,IAAI,EAAEF,CAAC,CAACE,IAAI;QACZhC,OAAO,EAAE8B,CAAC,CAAC9B,OAAO;QAClBiC,IAAI,EAAE;MACV;IACJ,CAAC,CAAC;IAEF,OAAO;MACHnC,IAAI,EAAE,IAAI;MACVC,KAAK,EAAE;QACHC,OAAO,EAAE8B,CAAC,CAAC9B;MACf;IACJ,CAAC;EACL;EACA,OAAO;IACHF,IAAI,EAAE,EAAE;IACRC,KAAK,EAAE;EACX,CAAC;AACL,CAAC;AAAC"}
|
@@ -4,19 +4,12 @@ Object.defineProperty(exports, "__esModule", {
|
|
4
4
|
value: true
|
5
5
|
});
|
6
6
|
exports.blocksHandler = void 0;
|
7
|
-
|
8
7
|
var _types = require("../../types");
|
9
|
-
|
10
8
|
var _client = require("../../client");
|
11
|
-
|
12
9
|
var _handlerGraphql = require("@webiny/handler-graphql");
|
13
|
-
|
14
10
|
var _utils = require("../utils");
|
15
|
-
|
16
11
|
var _mockSecurity = require("../../mockSecurity");
|
17
|
-
|
18
12
|
var _utils2 = require("@webiny/utils");
|
19
|
-
|
20
13
|
/**
|
21
14
|
* Handles the export blocks process workflow.
|
22
15
|
*/
|
@@ -35,11 +28,10 @@ const blocksHandler = async (configuration, payload, context) => {
|
|
35
28
|
subTaskIndex,
|
36
29
|
type,
|
37
30
|
identity
|
38
|
-
} = payload;
|
31
|
+
} = payload;
|
32
|
+
// Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks
|
39
33
|
// and this Lambda is invoked internally, without credentials.
|
40
|
-
|
41
34
|
(0, _mockSecurity.mockSecurity)(identity, context);
|
42
|
-
|
43
35
|
try {
|
44
36
|
/*
|
45
37
|
* Note: We're not going to DB for finding the next sub-task to process,
|
@@ -50,7 +42,6 @@ const blocksHandler = async (configuration, payload, context) => {
|
|
50
42
|
* Base condition!!
|
51
43
|
* Bail out early, if task not found or task's status is not "pending".
|
52
44
|
*/
|
53
|
-
|
54
45
|
if (!subTask || subTask.status !== _types.ImportExportTaskStatus.PENDING) {
|
55
46
|
noPendingTask = true;
|
56
47
|
return {
|
@@ -60,7 +51,6 @@ const blocksHandler = async (configuration, payload, context) => {
|
|
60
51
|
} else {
|
61
52
|
noPendingTask = false;
|
62
53
|
}
|
63
|
-
|
64
54
|
log(`Fetched sub task => ${subTask.id}`);
|
65
55
|
const {
|
66
56
|
input
|
@@ -70,36 +60,35 @@ const blocksHandler = async (configuration, payload, context) => {
|
|
70
60
|
exportBlocksDataKey
|
71
61
|
} = input;
|
72
62
|
const block = await pageBuilder.getPageBlock(blockId);
|
73
|
-
|
74
63
|
if (!block) {
|
75
64
|
log(`Unable to load block "${blockId}"`);
|
76
65
|
throw new _handlerGraphql.NotFoundError(`Unable to load block "${blockId}"`);
|
77
66
|
}
|
67
|
+
log(`Processing block key "${blockId}"`);
|
78
68
|
|
79
|
-
|
80
|
-
|
69
|
+
// Mark task status as PROCESSING
|
81
70
|
subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {
|
82
71
|
status: _types.ImportExportTaskStatus.PROCESSING
|
83
|
-
});
|
84
|
-
|
72
|
+
});
|
73
|
+
// Update stats in main task
|
85
74
|
await pageBuilder.importExportTask.updateStats(taskId, {
|
86
75
|
prevStatus: prevStatusOfSubTask,
|
87
76
|
nextStatus: _types.ImportExportTaskStatus.PROCESSING
|
88
77
|
});
|
89
78
|
prevStatusOfSubTask = subTask.status;
|
90
|
-
log(`Extracting block data and uploading to storage...`);
|
91
|
-
|
79
|
+
log(`Extracting block data and uploading to storage...`);
|
80
|
+
// Extract Block
|
92
81
|
const blockDataZip = await (0, _utils.exportBlock)(block, exportBlocksDataKey, fileManager);
|
93
|
-
log(`Finish uploading zip...`);
|
94
|
-
|
82
|
+
log(`Finish uploading zip...`);
|
83
|
+
// Update task record in DB
|
95
84
|
subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {
|
96
85
|
status: _types.ImportExportTaskStatus.COMPLETED,
|
97
86
|
data: {
|
98
87
|
message: `Finish uploading data for block "${block.id}"`,
|
99
88
|
key: blockDataZip.Key
|
100
89
|
}
|
101
|
-
});
|
102
|
-
|
90
|
+
});
|
91
|
+
// Update stats in main task
|
103
92
|
await pageBuilder.importExportTask.updateStats(taskId, {
|
104
93
|
prevStatus: prevStatusOfSubTask,
|
105
94
|
nextStatus: _types.ImportExportTaskStatus.COMPLETED
|
@@ -107,7 +96,6 @@ const blocksHandler = async (configuration, payload, context) => {
|
|
107
96
|
prevStatusOfSubTask = subTask.status;
|
108
97
|
} catch (e) {
|
109
98
|
log("[EXPORT_BLOCKS_PROCESS] Error => ", e.message);
|
110
|
-
|
111
99
|
if (subTask && subTask.id) {
|
112
100
|
/**
|
113
101
|
* In case of error, we'll update the task status to "failed",
|
@@ -120,15 +108,15 @@ const blocksHandler = async (configuration, payload, context) => {
|
|
120
108
|
message: e.message,
|
121
109
|
code: "EXPORT_FAILED"
|
122
110
|
}
|
123
|
-
});
|
111
|
+
});
|
124
112
|
|
113
|
+
// Update stats in main task
|
125
114
|
await pageBuilder.importExportTask.updateStats(taskId, {
|
126
115
|
prevStatus: prevStatusOfSubTask,
|
127
116
|
nextStatus: _types.ImportExportTaskStatus.FAILED
|
128
117
|
});
|
129
118
|
prevStatusOfSubTask = subTask.status;
|
130
119
|
}
|
131
|
-
|
132
120
|
return {
|
133
121
|
data: null,
|
134
122
|
error: {
|
@@ -138,8 +126,8 @@ const blocksHandler = async (configuration, payload, context) => {
|
|
138
126
|
} finally {
|
139
127
|
// Base condition!
|
140
128
|
if (noPendingTask) {
|
141
|
-
log(`No pending sub-task for task ${taskId}`);
|
142
|
-
|
129
|
+
log(`No pending sub-task for task ${taskId}`);
|
130
|
+
// Combine individual block zip files.
|
143
131
|
await (0, _client.invokeHandlerClient)({
|
144
132
|
context,
|
145
133
|
name: configuration.handlers.combine,
|
@@ -151,8 +139,8 @@ const blocksHandler = async (configuration, payload, context) => {
|
|
151
139
|
description: "Export blocks - combine"
|
152
140
|
});
|
153
141
|
} else {
|
154
|
-
console.log(`Invoking PROCESS for task "${subTaskIndex + 1}"`);
|
155
|
-
|
142
|
+
console.log(`Invoking PROCESS for task "${subTaskIndex + 1}"`);
|
143
|
+
// We want to continue with Self invocation no matter if current block error out.
|
156
144
|
await (0, _client.invokeHandlerClient)({
|
157
145
|
context,
|
158
146
|
name: configuration.handlers.process,
|
@@ -166,11 +154,9 @@ const blocksHandler = async (configuration, payload, context) => {
|
|
166
154
|
});
|
167
155
|
}
|
168
156
|
}
|
169
|
-
|
170
157
|
return {
|
171
158
|
data: "",
|
172
159
|
error: null
|
173
160
|
};
|
174
161
|
};
|
175
|
-
|
176
162
|
exports.blocksHandler = blocksHandler;
|