@webiny/api-page-builder-import-export 0.0.0-unstable.78f581c1d2 → 0.0.0-unstable.7f63ea0744

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (124) hide show
  1. package/client.d.ts +2 -2
  2. package/client.js +2 -6
  3. package/client.js.map +1 -1
  4. package/export/combine/blocksHandler.d.ts +6 -0
  5. package/export/combine/blocksHandler.js +99 -0
  6. package/export/combine/blocksHandler.js.map +1 -0
  7. package/{exportPages → export}/combine/index.d.ts +3 -2
  8. package/export/combine/index.js +35 -0
  9. package/export/combine/index.js.map +1 -0
  10. package/export/combine/pagesHandler.d.ts +6 -0
  11. package/export/combine/pagesHandler.js +99 -0
  12. package/export/combine/pagesHandler.js.map +1 -0
  13. package/export/combine/templatesHandler.d.ts +6 -0
  14. package/export/combine/templatesHandler.js +99 -0
  15. package/export/combine/templatesHandler.js.map +1 -0
  16. package/export/process/blocksHandler.d.ts +6 -0
  17. package/export/process/blocksHandler.js +162 -0
  18. package/export/process/blocksHandler.js.map +1 -0
  19. package/{exportPages → export}/process/index.d.ts +4 -6
  20. package/export/process/index.js +32 -0
  21. package/export/process/index.js.map +1 -0
  22. package/export/process/pagesHandler.d.ts +6 -0
  23. package/export/process/pagesHandler.js +189 -0
  24. package/export/process/pagesHandler.js.map +1 -0
  25. package/export/process/templatesHandler.d.ts +6 -0
  26. package/export/process/templatesHandler.js +162 -0
  27. package/export/process/templatesHandler.js.map +1 -0
  28. package/{exportPages → export}/s3Stream.d.ts +0 -0
  29. package/{exportPages → export}/s3Stream.js +1 -20
  30. package/{exportPages → export}/s3Stream.js.map +1 -1
  31. package/export/utils.d.ts +22 -0
  32. package/export/utils.js +157 -0
  33. package/export/utils.js.map +1 -0
  34. package/{exportPages → export}/zipper.d.ts +6 -5
  35. package/{exportPages → export}/zipper.js +37 -48
  36. package/export/zipper.js.map +1 -0
  37. package/graphql/crud/blocks.crud.d.ts +4 -0
  38. package/graphql/crud/blocks.crud.js +137 -0
  39. package/graphql/crud/blocks.crud.js.map +1 -0
  40. package/graphql/crud/importExportTasks.crud.d.ts +5 -0
  41. package/graphql/crud/{pageImportExportTasks.crud.js → importExportTasks.crud.js} +57 -105
  42. package/graphql/crud/importExportTasks.crud.js.map +1 -0
  43. package/graphql/crud/pages.crud.d.ts +2 -2
  44. package/graphql/crud/pages.crud.js +39 -53
  45. package/graphql/crud/pages.crud.js.map +1 -1
  46. package/graphql/crud/templates.crud.d.ts +4 -0
  47. package/graphql/crud/templates.crud.js +124 -0
  48. package/graphql/crud/templates.crud.js.map +1 -0
  49. package/graphql/crud.d.ts +2 -2
  50. package/graphql/crud.js +4 -7
  51. package/graphql/crud.js.map +1 -1
  52. package/graphql/graphql/blocks.gql.d.ts +4 -0
  53. package/graphql/graphql/blocks.gql.js +52 -0
  54. package/graphql/graphql/blocks.gql.js.map +1 -0
  55. package/graphql/graphql/importExportTasks.gql.d.ts +4 -0
  56. package/graphql/graphql/{pageImportExportTasks.gql.js → importExportTasks.gql.js} +18 -23
  57. package/graphql/graphql/importExportTasks.gql.js.map +1 -0
  58. package/graphql/graphql/pages.gql.d.ts +2 -2
  59. package/graphql/graphql/pages.gql.js +4 -15
  60. package/graphql/graphql/pages.gql.js.map +1 -1
  61. package/graphql/graphql/templates.gql.d.ts +4 -0
  62. package/graphql/graphql/templates.gql.js +52 -0
  63. package/graphql/graphql/templates.gql.js.map +1 -0
  64. package/graphql/graphql/utils/resolve.d.ts +1 -1
  65. package/graphql/graphql/utils/resolve.js +0 -3
  66. package/graphql/graphql/utils/resolve.js.map +1 -1
  67. package/graphql/graphql.js +4 -6
  68. package/graphql/graphql.js.map +1 -1
  69. package/graphql/index.d.ts +2 -2
  70. package/graphql/index.js +0 -5
  71. package/graphql/index.js.map +1 -1
  72. package/graphql/types.d.ts +60 -23
  73. package/graphql/types.js.map +1 -1
  74. package/import/create/blocksHandler.d.ts +3 -0
  75. package/import/create/blocksHandler.js +100 -0
  76. package/import/create/blocksHandler.js.map +1 -0
  77. package/{importPages → import}/create/index.d.ts +7 -5
  78. package/import/create/index.js +35 -0
  79. package/import/create/index.js.map +1 -0
  80. package/import/create/pagesHandler.d.ts +3 -0
  81. package/import/create/pagesHandler.js +102 -0
  82. package/import/create/pagesHandler.js.map +1 -0
  83. package/import/create/templatesHandler.d.ts +3 -0
  84. package/import/create/templatesHandler.js +98 -0
  85. package/import/create/templatesHandler.js.map +1 -0
  86. package/import/process/blocksHandler.d.ts +3 -0
  87. package/import/process/blocksHandler.js +169 -0
  88. package/import/process/blocksHandler.js.map +1 -0
  89. package/{importPages → import}/process/index.d.ts +5 -3
  90. package/import/process/index.js +32 -0
  91. package/import/process/index.js.map +1 -0
  92. package/import/process/pagesHandler.d.ts +3 -0
  93. package/import/process/pagesHandler.js +177 -0
  94. package/import/process/pagesHandler.js.map +1 -0
  95. package/import/process/templatesHandler.d.ts +3 -0
  96. package/import/process/templatesHandler.js +166 -0
  97. package/import/process/templatesHandler.js.map +1 -0
  98. package/import/utils.d.ts +56 -0
  99. package/{importPages → import}/utils.js +193 -156
  100. package/import/utils.js.map +1 -0
  101. package/mockSecurity.js +0 -2
  102. package/mockSecurity.js.map +1 -1
  103. package/package.json +24 -24
  104. package/types.d.ts +62 -65
  105. package/types.js +17 -22
  106. package/types.js.map +1 -1
  107. package/exportPages/combine/index.js +0 -114
  108. package/exportPages/combine/index.js.map +0 -1
  109. package/exportPages/process/index.js +0 -208
  110. package/exportPages/process/index.js.map +0 -1
  111. package/exportPages/utils.d.ts +0 -13
  112. package/exportPages/utils.js +0 -113
  113. package/exportPages/utils.js.map +0 -1
  114. package/exportPages/zipper.js.map +0 -1
  115. package/graphql/crud/pageImportExportTasks.crud.d.ts +0 -5
  116. package/graphql/crud/pageImportExportTasks.crud.js.map +0 -1
  117. package/graphql/graphql/pageImportExportTasks.gql.d.ts +0 -4
  118. package/graphql/graphql/pageImportExportTasks.gql.js.map +0 -1
  119. package/importPages/create/index.js +0 -118
  120. package/importPages/create/index.js.map +0 -1
  121. package/importPages/process/index.js +0 -185
  122. package/importPages/process/index.js.map +0 -1
  123. package/importPages/utils.d.ts +0 -50
  124. package/importPages/utils.js.map +0 -1
package/client.d.ts CHANGED
@@ -1,6 +1,6 @@
1
- import { PbPageImportExportContext } from "./graphql/types";
1
+ import { PbImportExportContext } from "./graphql/types";
2
2
  export interface InvokeHandlerClientParams<TParams> {
3
- context: PbPageImportExportContext;
3
+ context: PbImportExportContext;
4
4
  name: string;
5
5
  payload: TParams;
6
6
  description: string;
package/client.js CHANGED
@@ -1,14 +1,11 @@
1
1
  "use strict";
2
2
 
3
3
  var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
4
-
5
4
  Object.defineProperty(exports, "__esModule", {
6
5
  value: true
7
6
  });
8
7
  exports.invokeHandlerClient = invokeHandlerClient;
9
-
10
8
  var _objectSpread2 = _interopRequireDefault(require("@babel/runtime/helpers/objectSpread2"));
11
-
12
9
  async function invokeHandlerClient({
13
10
  context,
14
11
  name,
@@ -31,13 +28,12 @@ async function invokeHandlerClient({
31
28
  httpMethod: request.method,
32
29
  body: request.body,
33
30
  headers,
34
-
35
31
  /**
36
32
  * Required until type augmentation works correctly.
37
33
  */
38
34
  cookies: request.cookies
39
- }; // Invoke handler
40
-
35
+ };
36
+ // Invoke handler
41
37
  await context.handlerClient.invoke({
42
38
  name: name,
43
39
  payload: (0, _objectSpread2.default)((0, _objectSpread2.default)({}, payload), invocationArgs),
package/client.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"names":["invokeHandlerClient","context","name","payload","description","request","tenantId","tenancy","getCurrentTenant","id","headers","invocationArgs","httpMethod","method","body","cookies","handlerClient","invoke","await"],"sources":["client.ts"],"sourcesContent":["import { PbPageImportExportContext } from \"~/graphql/types\";\n\nexport interface InvokeHandlerClientParams<TParams> {\n context: PbPageImportExportContext;\n name: string;\n payload: TParams;\n description: string;\n}\n\nexport async function invokeHandlerClient<TParams>({\n context,\n name,\n payload,\n description\n}: InvokeHandlerClientParams<TParams>) {\n /*\n * Prepare \"invocationArgs\", we're hacking our wat here.\n * They are necessary to setup the \"context.pageBuilder\" object among other things in IMPORT_PAGE_FUNCTION\n */\n const { request } = context;\n\n const tenantId = context.tenancy.getCurrentTenant().id;\n\n const headers = {\n ...request.headers,\n [\"x-tenant\"]: request.headers[\"x-tenant\"] || tenantId\n };\n delete headers[\"content-length\"];\n const invocationArgs = {\n httpMethod: request.method,\n body: request.body,\n headers,\n /**\n * Required until type augmentation works correctly.\n */\n cookies: (request as any).cookies\n };\n // Invoke handler\n await context.handlerClient.invoke<TParams & any>({\n name: name,\n payload: {\n ...payload,\n ...invocationArgs\n },\n await: false,\n description\n });\n}\n"],"mappings":";;;;;;;;;;;AASO,eAAeA,mBAAf,CAA4C;EAC/CC,OAD+C;EAE/CC,IAF+C;EAG/CC,OAH+C;EAI/CC;AAJ+C,CAA5C,EAKgC;EACnC;AACJ;AACA;AACA;EACI,MAAM;IAAEC;EAAF,IAAcJ,OAApB;EAEA,MAAMK,QAAQ,GAAGL,OAAO,CAACM,OAAR,CAAgBC,gBAAhB,GAAmCC,EAApD;EAEA,MAAMC,OAAO,+DACNL,OAAO,CAACK,OADF;IAET,CAAC,UAAD,GAAcL,OAAO,CAACK,OAAR,CAAgB,UAAhB,KAA+BJ;EAFpC,EAAb;EAIA,OAAOI,OAAO,CAAC,gBAAD,CAAd;EACA,MAAMC,cAAc,GAAG;IACnBC,UAAU,EAAEP,OAAO,CAACQ,MADD;IAEnBC,IAAI,EAAET,OAAO,CAACS,IAFK;IAGnBJ,OAHmB;;IAInB;AACR;AACA;IACQK,OAAO,EAAGV,OAAD,CAAiBU;EAPP,CAAvB,CAdmC,CAuBnC;;EACA,MAAMd,OAAO,CAACe,aAAR,CAAsBC,MAAtB,CAA4C;IAC9Cf,IAAI,EAAEA,IADwC;IAE9CC,OAAO,8DACAA,OADA,GAEAQ,cAFA,CAFuC;IAM9CO,KAAK,EAAE,KANuC;IAO9Cd;EAP8C,CAA5C,CAAN;AASH"}
1
+ {"version":3,"names":["invokeHandlerClient","context","name","payload","description","request","tenantId","tenancy","getCurrentTenant","id","headers","invocationArgs","httpMethod","method","body","cookies","handlerClient","invoke","await"],"sources":["client.ts"],"sourcesContent":["import { PbImportExportContext } from \"~/graphql/types\";\n\nexport interface InvokeHandlerClientParams<TParams> {\n context: PbImportExportContext;\n name: string;\n payload: TParams;\n description: string;\n}\n\nexport async function invokeHandlerClient<TParams>({\n context,\n name,\n payload,\n description\n}: InvokeHandlerClientParams<TParams>) {\n /*\n * Prepare \"invocationArgs\", we're hacking our wat here.\n * They are necessary to setup the \"context.pageBuilder\" object among other things in IMPORT_PAGE_FUNCTION\n */\n const { request } = context;\n\n const tenantId = context.tenancy.getCurrentTenant().id;\n\n const headers = {\n ...request.headers,\n [\"x-tenant\"]: request.headers[\"x-tenant\"] || tenantId\n };\n delete headers[\"content-length\"];\n const invocationArgs = {\n httpMethod: request.method,\n body: request.body,\n headers,\n /**\n * Required until type augmentation works correctly.\n */\n cookies: (request as any).cookies\n };\n // Invoke handler\n await context.handlerClient.invoke<TParams & any>({\n name: name,\n payload: {\n ...payload,\n ...invocationArgs\n },\n await: false,\n description\n });\n}\n"],"mappings":";;;;;;;;AASO,eAAeA,mBAAmB,CAAU;EAC/CC,OAAO;EACPC,IAAI;EACJC,OAAO;EACPC;AACgC,CAAC,EAAE;EACnC;AACJ;AACA;AACA;EACI,MAAM;IAAEC;EAAQ,CAAC,GAAGJ,OAAO;EAE3B,MAAMK,QAAQ,GAAGL,OAAO,CAACM,OAAO,CAACC,gBAAgB,EAAE,CAACC,EAAE;EAEtD,MAAMC,OAAO,+DACNL,OAAO,CAACK,OAAO;IAClB,CAAC,UAAU,GAAGL,OAAO,CAACK,OAAO,CAAC,UAAU,CAAC,IAAIJ;EAAQ,EACxD;EACD,OAAOI,OAAO,CAAC,gBAAgB,CAAC;EAChC,MAAMC,cAAc,GAAG;IACnBC,UAAU,EAAEP,OAAO,CAACQ,MAAM;IAC1BC,IAAI,EAAET,OAAO,CAACS,IAAI;IAClBJ,OAAO;IACP;AACR;AACA;IACQK,OAAO,EAAGV,OAAO,CAASU;EAC9B,CAAC;EACD;EACA,MAAMd,OAAO,CAACe,aAAa,CAACC,MAAM,CAAgB;IAC9Cf,IAAI,EAAEA,IAAI;IACVC,OAAO,8DACAA,OAAO,GACPQ,cAAc,CACpB;IACDO,KAAK,EAAE,KAAK;IACZd;EACJ,CAAC,CAAC;AACN"}
@@ -0,0 +1,6 @@
1
+ import { PbImportExportContext } from "../../types";
2
+ import { Payload, Response } from ".";
3
+ /**
4
+ * Handles the export blocks combine workflow.
5
+ */
6
+ export declare const blocksHandler: (payload: Payload, context: PbImportExportContext) => Promise<Response>;
@@ -0,0 +1,99 @@
1
+ "use strict";
2
+
3
+ Object.defineProperty(exports, "__esModule", {
4
+ value: true
5
+ });
6
+ exports.blocksHandler = void 0;
7
+ var _types = require("../../types");
8
+ var _s3Stream = require("../s3Stream");
9
+ var _zipper = require("../zipper");
10
+ var _mockSecurity = require("../../mockSecurity");
11
+ /**
12
+ * Handles the export blocks combine workflow.
13
+ */
14
+ const blocksHandler = async (payload, context) => {
15
+ const log = console.log;
16
+ log("RUNNING Export Blocks Combine Handler");
17
+ const {
18
+ pageBuilder
19
+ } = context;
20
+ const {
21
+ taskId,
22
+ identity
23
+ } = payload;
24
+ (0, _mockSecurity.mockSecurity)(identity, context);
25
+ try {
26
+ const task = await pageBuilder.importExportTask.getTask(taskId);
27
+ if (!task) {
28
+ return {
29
+ data: null,
30
+ error: {
31
+ message: `There is no task with ID "${taskId}".`
32
+ }
33
+ };
34
+ }
35
+ const {
36
+ exportBlocksDataKey
37
+ } = task.input;
38
+
39
+ // Get all files (zip) from given key
40
+ const listObjectResponse = await _s3Stream.s3Stream.listObject(exportBlocksDataKey);
41
+ if (!listObjectResponse.Contents) {
42
+ return {
43
+ data: null,
44
+ error: {
45
+ message: "There is no Contents defined on S3 Stream while combining blocks."
46
+ }
47
+ };
48
+ }
49
+ const zipFileKeys = listObjectResponse.Contents.filter(file => file.Key !== exportBlocksDataKey).map(file => file.Key).filter(Boolean);
50
+
51
+ // Prepare zip of all zips
52
+ const zipOfZip = new _zipper.ZipOfZip(zipFileKeys, "WEBINY_BLOCK_EXPORT.zip");
53
+
54
+ // Upload
55
+ const blockExportUpload = await zipOfZip.process();
56
+ log(`Done uploading... File is located at ${blockExportUpload.Location} `);
57
+
58
+ // Update task status and save export blocks data key
59
+ await pageBuilder.importExportTask.updateTask(taskId, {
60
+ status: _types.ImportExportTaskStatus.COMPLETED,
61
+ data: {
62
+ message: `Finish uploading block export.`,
63
+ key: blockExportUpload.Key,
64
+ url: _s3Stream.s3Stream.getPresignedUrl(blockExportUpload.Key)
65
+ }
66
+ });
67
+
68
+ // Remove individual zip files from storage
69
+ const deleteFilePromises = zipFileKeys.map(key => _s3Stream.s3Stream.deleteObject(key));
70
+ await Promise.all(deleteFilePromises);
71
+ log(`Successfully deleted ${deleteFilePromises.length} zip files.`);
72
+ } catch (e) {
73
+ log("[EXPORT_BLOCKS_COMBINE] Error => ", e.message);
74
+
75
+ /**
76
+ * In case of error, we'll update the task status to "failed",
77
+ * so that, client can show notify the user appropriately.
78
+ */
79
+ await pageBuilder.importExportTask.updateTask(taskId, {
80
+ status: _types.ImportExportTaskStatus.FAILED,
81
+ error: {
82
+ name: e.name,
83
+ message: e.message,
84
+ code: "EXPORT_FAILED"
85
+ }
86
+ });
87
+ return {
88
+ data: null,
89
+ error: {
90
+ message: e.message
91
+ }
92
+ };
93
+ }
94
+ return {
95
+ data: "",
96
+ error: null
97
+ };
98
+ };
99
+ exports.blocksHandler = blocksHandler;
@@ -0,0 +1 @@
1
+ {"version":3,"names":["blocksHandler","payload","context","log","console","pageBuilder","taskId","identity","mockSecurity","task","importExportTask","getTask","data","error","message","exportBlocksDataKey","input","listObjectResponse","s3Stream","listObject","Contents","zipFileKeys","filter","file","Key","map","Boolean","zipOfZip","ZipOfZip","blockExportUpload","process","Location","updateTask","status","ImportExportTaskStatus","COMPLETED","key","url","getPresignedUrl","deleteFilePromises","deleteObject","Promise","all","length","e","FAILED","name","code"],"sources":["blocksHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { s3Stream } from \"../s3Stream\";\nimport { ZipOfZip } from \"../zipper\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { Payload, Response } from \"~/export/combine\";\n\n/**\n * Handles the export blocks combine workflow.\n */\nexport const blocksHandler = async (\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n\n log(\"RUNNING Export Blocks Combine Handler\");\n const { pageBuilder } = context;\n const { taskId, identity } = payload;\n\n mockSecurity(identity, context);\n\n try {\n const task = await pageBuilder.importExportTask.getTask(taskId);\n if (!task) {\n return {\n data: null,\n error: {\n message: `There is no task with ID \"${taskId}\".`\n }\n };\n }\n\n const { exportBlocksDataKey } = task.input;\n\n // Get all files (zip) from given key\n const listObjectResponse = await s3Stream.listObject(exportBlocksDataKey);\n if (!listObjectResponse.Contents) {\n return {\n data: null,\n error: {\n message: \"There is no Contents defined on S3 Stream while combining blocks.\"\n }\n };\n }\n\n const zipFileKeys = listObjectResponse.Contents.filter(\n file => file.Key !== exportBlocksDataKey\n )\n .map(file => file.Key)\n .filter(Boolean) as string[];\n\n // Prepare zip of all zips\n const zipOfZip = new ZipOfZip(zipFileKeys, \"WEBINY_BLOCK_EXPORT.zip\");\n\n // Upload\n const blockExportUpload = await zipOfZip.process();\n log(`Done uploading... File is located at ${blockExportUpload.Location} `);\n\n // Update task status and save export blocks data key\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading block export.`,\n key: blockExportUpload.Key,\n url: s3Stream.getPresignedUrl(blockExportUpload.Key)\n }\n });\n\n // Remove individual zip files from storage\n const deleteFilePromises = zipFileKeys.map(key => s3Stream.deleteObject(key));\n await Promise.all(deleteFilePromises);\n log(`Successfully deleted ${deleteFilePromises.length} zip files.`);\n } catch (e) {\n log(\"[EXPORT_BLOCKS_COMBINE] Error => \", e.message);\n\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"EXPORT_FAILED\"\n }\n });\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;AAAA;AACA;AACA;AACA;AAGA;AACA;AACA;AACO,MAAMA,aAAa,GAAG,OACzBC,OAAgB,EAChBC,OAA8B,KACV;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EAEvBA,GAAG,CAAC,uCAAuC,CAAC;EAC5C,MAAM;IAAEE;EAAY,CAAC,GAAGH,OAAO;EAC/B,MAAM;IAAEI,MAAM;IAAEC;EAAS,CAAC,GAAGN,OAAO;EAEpC,IAAAO,0BAAY,EAACD,QAAQ,EAAEL,OAAO,CAAC;EAE/B,IAAI;IACA,MAAMO,IAAI,GAAG,MAAMJ,WAAW,CAACK,gBAAgB,CAACC,OAAO,CAACL,MAAM,CAAC;IAC/D,IAAI,CAACG,IAAI,EAAE;MACP,OAAO;QACHG,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAG,6BAA4BR,MAAO;QACjD;MACJ,CAAC;IACL;IAEA,MAAM;MAAES;IAAoB,CAAC,GAAGN,IAAI,CAACO,KAAK;;IAE1C;IACA,MAAMC,kBAAkB,GAAG,MAAMC,kBAAQ,CAACC,UAAU,CAACJ,mBAAmB,CAAC;IACzE,IAAI,CAACE,kBAAkB,CAACG,QAAQ,EAAE;MAC9B,OAAO;QACHR,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAE;QACb;MACJ,CAAC;IACL;IAEA,MAAMO,WAAW,GAAGJ,kBAAkB,CAACG,QAAQ,CAACE,MAAM,CAClDC,IAAI,IAAIA,IAAI,CAACC,GAAG,KAAKT,mBAAmB,CAC3C,CACIU,GAAG,CAACF,IAAI,IAAIA,IAAI,CAACC,GAAG,CAAC,CACrBF,MAAM,CAACI,OAAO,CAAa;;IAEhC;IACA,MAAMC,QAAQ,GAAG,IAAIC,gBAAQ,CAACP,WAAW,EAAE,yBAAyB,CAAC;;IAErE;IACA,MAAMQ,iBAAiB,GAAG,MAAMF,QAAQ,CAACG,OAAO,EAAE;IAClD3B,GAAG,CAAE,wCAAuC0B,iBAAiB,CAACE,QAAS,GAAE,CAAC;;IAE1E;IACA,MAAM1B,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACC,SAAS;MACxCvB,IAAI,EAAE;QACFE,OAAO,EAAG,gCAA+B;QACzCsB,GAAG,EAAEP,iBAAiB,CAACL,GAAG;QAC1Ba,GAAG,EAAEnB,kBAAQ,CAACoB,eAAe,CAACT,iBAAiB,CAACL,GAAG;MACvD;IACJ,CAAC,CAAC;;IAEF;IACA,MAAMe,kBAAkB,GAAGlB,WAAW,CAACI,GAAG,CAACW,GAAG,IAAIlB,kBAAQ,CAACsB,YAAY,CAACJ,GAAG,CAAC,CAAC;IAC7E,MAAMK,OAAO,CAACC,GAAG,CAACH,kBAAkB,CAAC;IACrCpC,GAAG,CAAE,wBAAuBoC,kBAAkB,CAACI,MAAO,aAAY,CAAC;EACvE,CAAC,CAAC,OAAOC,CAAC,EAAE;IACRzC,GAAG,CAAC,mCAAmC,EAAEyC,CAAC,CAAC9B,OAAO,CAAC;;IAEnD;AACR;AACA;AACA;IACQ,MAAMT,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACW,MAAM;MACrChC,KAAK,EAAE;QACHiC,IAAI,EAAEF,CAAC,CAACE,IAAI;QACZhC,OAAO,EAAE8B,CAAC,CAAC9B,OAAO;QAClBiC,IAAI,EAAE;MACV;IACJ,CAAC,CAAC;IAEF,OAAO;MACHnC,IAAI,EAAE,IAAI;MACVC,KAAK,EAAE;QACHC,OAAO,EAAE8B,CAAC,CAAC9B;MACf;IACJ,CAAC;EACL;EACA,OAAO;IACHF,IAAI,EAAE,EAAE;IACRC,KAAK,EAAE;EACX,CAAC;AACL,CAAC;AAAC"}
@@ -1,7 +1,8 @@
1
- import { PbPageImportExportContext } from "../../types";
1
+ import { PbImportExportContext } from "../../types";
2
2
  import { SecurityIdentity } from "@webiny/api-security/types";
3
3
  export interface Payload {
4
4
  taskId: string;
5
+ type: string;
5
6
  identity: SecurityIdentity;
6
7
  }
7
8
  export interface Response {
@@ -11,5 +12,5 @@ export interface Response {
11
12
  /**
12
13
  * Handles the export pages combine workflow.
13
14
  */
14
- declare const _default: () => import("@webiny/handler-aws").RawEventHandler<Payload, PbPageImportExportContext, Response>;
15
+ declare const _default: () => import("@webiny/handler-aws").RawEventHandler<Payload, PbImportExportContext, Response>;
15
16
  export default _default;
@@ -0,0 +1,35 @@
1
+ "use strict";
2
+
3
+ Object.defineProperty(exports, "__esModule", {
4
+ value: true
5
+ });
6
+ exports.default = void 0;
7
+ var _handlerAws = require("@webiny/handler-aws");
8
+ var _blocksHandler = require("./blocksHandler");
9
+ var _pagesHandler = require("./pagesHandler");
10
+ var _templatesHandler = require("./templatesHandler");
11
+ /**
12
+ * Handles the export pages combine workflow.
13
+ */
14
+ var _default = () => {
15
+ return (0, _handlerAws.createRawEventHandler)(async ({
16
+ payload,
17
+ context
18
+ }) => {
19
+ switch (payload.type) {
20
+ case "block":
21
+ {
22
+ return await (0, _blocksHandler.blocksHandler)(payload, context);
23
+ }
24
+ case "template":
25
+ {
26
+ return await (0, _templatesHandler.templatesHandler)(payload, context);
27
+ }
28
+ default:
29
+ {
30
+ return await (0, _pagesHandler.pagesHandler)(payload, context);
31
+ }
32
+ }
33
+ });
34
+ };
35
+ exports.default = _default;
@@ -0,0 +1 @@
1
+ {"version":3,"names":["createRawEventHandler","payload","context","type","blocksHandler","templatesHandler","pagesHandler"],"sources":["index.ts"],"sourcesContent":["import { PbImportExportContext } from \"~/types\";\nimport { SecurityIdentity } from \"@webiny/api-security/types\";\nimport { createRawEventHandler } from \"@webiny/handler-aws\";\nimport { blocksHandler } from \"~/export/combine/blocksHandler\";\nimport { pagesHandler } from \"~/export/combine/pagesHandler\";\nimport { templatesHandler } from \"~/export/combine/templatesHandler\";\n\nexport interface Payload {\n taskId: string;\n type: string;\n identity: SecurityIdentity;\n}\n\nexport interface Response {\n data: string | null;\n error: Partial<Error> | null;\n}\n\n/**\n * Handles the export pages combine workflow.\n */\nexport default () => {\n return createRawEventHandler<Payload, PbImportExportContext, Response>(\n async ({ payload, context }) => {\n switch (payload.type) {\n case \"block\": {\n return await blocksHandler(payload, context);\n }\n case \"template\": {\n return await templatesHandler(payload, context);\n }\n default: {\n return await pagesHandler(payload, context);\n }\n }\n }\n );\n};\n"],"mappings":";;;;;;AAEA;AACA;AACA;AACA;AAaA;AACA;AACA;AAFA,eAGe,MAAM;EACjB,OAAO,IAAAA,iCAAqB,EACxB,OAAO;IAAEC,OAAO;IAAEC;EAAQ,CAAC,KAAK;IAC5B,QAAQD,OAAO,CAACE,IAAI;MAChB,KAAK,OAAO;QAAE;UACV,OAAO,MAAM,IAAAC,4BAAa,EAACH,OAAO,EAAEC,OAAO,CAAC;QAChD;MACA,KAAK,UAAU;QAAE;UACb,OAAO,MAAM,IAAAG,kCAAgB,EAACJ,OAAO,EAAEC,OAAO,CAAC;QACnD;MACA;QAAS;UACL,OAAO,MAAM,IAAAI,0BAAY,EAACL,OAAO,EAAEC,OAAO,CAAC;QAC/C;IAAC;EAET,CAAC,CACJ;AACL,CAAC;AAAA"}
@@ -0,0 +1,6 @@
1
+ import { PbImportExportContext } from "../../types";
2
+ import { Payload, Response } from ".";
3
+ /**
4
+ * Handles the export pages combine workflow.
5
+ */
6
+ export declare const pagesHandler: (payload: Payload, context: PbImportExportContext) => Promise<Response>;
@@ -0,0 +1,99 @@
1
+ "use strict";
2
+
3
+ Object.defineProperty(exports, "__esModule", {
4
+ value: true
5
+ });
6
+ exports.pagesHandler = void 0;
7
+ var _types = require("../../types");
8
+ var _s3Stream = require("../s3Stream");
9
+ var _zipper = require("../zipper");
10
+ var _mockSecurity = require("../../mockSecurity");
11
+ /**
12
+ * Handles the export pages combine workflow.
13
+ */
14
+ const pagesHandler = async (payload, context) => {
15
+ const log = console.log;
16
+ log("RUNNING Export Pages Combine Handler");
17
+ const {
18
+ pageBuilder
19
+ } = context;
20
+ const {
21
+ taskId,
22
+ identity
23
+ } = payload;
24
+ (0, _mockSecurity.mockSecurity)(identity, context);
25
+ try {
26
+ const task = await pageBuilder.importExportTask.getTask(taskId);
27
+ if (!task) {
28
+ return {
29
+ data: null,
30
+ error: {
31
+ message: `There is no task with ID "${taskId}".`
32
+ }
33
+ };
34
+ }
35
+ const {
36
+ exportPagesDataKey
37
+ } = task.input;
38
+
39
+ // Get all files (zip) from given key
40
+ const listObjectResponse = await _s3Stream.s3Stream.listObject(exportPagesDataKey);
41
+ if (!listObjectResponse.Contents) {
42
+ return {
43
+ data: null,
44
+ error: {
45
+ message: "There is no Contents defined on S3 Stream while combining pages."
46
+ }
47
+ };
48
+ }
49
+ const zipFileKeys = listObjectResponse.Contents.filter(file => file.Key !== exportPagesDataKey).map(file => file.Key).filter(Boolean);
50
+
51
+ // Prepare zip of all zips
52
+ const zipOfZip = new _zipper.ZipOfZip(zipFileKeys, "WEBINY_PAGE_EXPORT.zip");
53
+
54
+ // Upload
55
+ const pageExportUpload = await zipOfZip.process();
56
+ log(`Done uploading... File is located at ${pageExportUpload.Location} `);
57
+
58
+ // Update task status and save export page data key
59
+ await pageBuilder.importExportTask.updateTask(taskId, {
60
+ status: _types.ImportExportTaskStatus.COMPLETED,
61
+ data: {
62
+ message: `Finish uploading page export.`,
63
+ key: pageExportUpload.Key,
64
+ url: _s3Stream.s3Stream.getPresignedUrl(pageExportUpload.Key)
65
+ }
66
+ });
67
+
68
+ // Remove individual zip files from storage
69
+ const deleteFilePromises = zipFileKeys.map(key => _s3Stream.s3Stream.deleteObject(key));
70
+ await Promise.all(deleteFilePromises);
71
+ log(`Successfully deleted ${deleteFilePromises.length} zip files.`);
72
+ } catch (e) {
73
+ log("[EXPORT_PAGES_COMBINE] Error => ", e.message);
74
+
75
+ /**
76
+ * In case of error, we'll update the task status to "failed",
77
+ * so that, client can show notify the user appropriately.
78
+ */
79
+ await pageBuilder.importExportTask.updateTask(taskId, {
80
+ status: _types.ImportExportTaskStatus.FAILED,
81
+ error: {
82
+ name: e.name,
83
+ message: e.message,
84
+ code: "EXPORT_FAILED"
85
+ }
86
+ });
87
+ return {
88
+ data: null,
89
+ error: {
90
+ message: e.message
91
+ }
92
+ };
93
+ }
94
+ return {
95
+ data: "",
96
+ error: null
97
+ };
98
+ };
99
+ exports.pagesHandler = pagesHandler;
@@ -0,0 +1 @@
1
+ {"version":3,"names":["pagesHandler","payload","context","log","console","pageBuilder","taskId","identity","mockSecurity","task","importExportTask","getTask","data","error","message","exportPagesDataKey","input","listObjectResponse","s3Stream","listObject","Contents","zipFileKeys","filter","file","Key","map","Boolean","zipOfZip","ZipOfZip","pageExportUpload","process","Location","updateTask","status","ImportExportTaskStatus","COMPLETED","key","url","getPresignedUrl","deleteFilePromises","deleteObject","Promise","all","length","e","FAILED","name","code"],"sources":["pagesHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { s3Stream } from \"../s3Stream\";\nimport { ZipOfZip } from \"../zipper\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { Payload, Response } from \"~/export/combine\";\n\n/**\n * Handles the export pages combine workflow.\n */\nexport const pagesHandler = async (\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n\n log(\"RUNNING Export Pages Combine Handler\");\n const { pageBuilder } = context;\n const { taskId, identity } = payload;\n\n mockSecurity(identity, context);\n\n try {\n const task = await pageBuilder.importExportTask.getTask(taskId);\n if (!task) {\n return {\n data: null,\n error: {\n message: `There is no task with ID \"${taskId}\".`\n }\n };\n }\n\n const { exportPagesDataKey } = task.input;\n\n // Get all files (zip) from given key\n const listObjectResponse = await s3Stream.listObject(exportPagesDataKey);\n if (!listObjectResponse.Contents) {\n return {\n data: null,\n error: {\n message: \"There is no Contents defined on S3 Stream while combining pages.\"\n }\n };\n }\n\n const zipFileKeys = listObjectResponse.Contents.filter(\n file => file.Key !== exportPagesDataKey\n )\n .map(file => file.Key)\n .filter(Boolean) as string[];\n\n // Prepare zip of all zips\n const zipOfZip = new ZipOfZip(zipFileKeys, \"WEBINY_PAGE_EXPORT.zip\");\n\n // Upload\n const pageExportUpload = await zipOfZip.process();\n log(`Done uploading... File is located at ${pageExportUpload.Location} `);\n\n // Update task status and save export page data key\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading page export.`,\n key: pageExportUpload.Key,\n url: s3Stream.getPresignedUrl(pageExportUpload.Key)\n }\n });\n\n // Remove individual zip files from storage\n const deleteFilePromises = zipFileKeys.map(key => s3Stream.deleteObject(key));\n await Promise.all(deleteFilePromises);\n log(`Successfully deleted ${deleteFilePromises.length} zip files.`);\n } catch (e) {\n log(\"[EXPORT_PAGES_COMBINE] Error => \", e.message);\n\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"EXPORT_FAILED\"\n }\n });\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;AAAA;AACA;AACA;AACA;AAGA;AACA;AACA;AACO,MAAMA,YAAY,GAAG,OACxBC,OAAgB,EAChBC,OAA8B,KACV;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EAEvBA,GAAG,CAAC,sCAAsC,CAAC;EAC3C,MAAM;IAAEE;EAAY,CAAC,GAAGH,OAAO;EAC/B,MAAM;IAAEI,MAAM;IAAEC;EAAS,CAAC,GAAGN,OAAO;EAEpC,IAAAO,0BAAY,EAACD,QAAQ,EAAEL,OAAO,CAAC;EAE/B,IAAI;IACA,MAAMO,IAAI,GAAG,MAAMJ,WAAW,CAACK,gBAAgB,CAACC,OAAO,CAACL,MAAM,CAAC;IAC/D,IAAI,CAACG,IAAI,EAAE;MACP,OAAO;QACHG,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAG,6BAA4BR,MAAO;QACjD;MACJ,CAAC;IACL;IAEA,MAAM;MAAES;IAAmB,CAAC,GAAGN,IAAI,CAACO,KAAK;;IAEzC;IACA,MAAMC,kBAAkB,GAAG,MAAMC,kBAAQ,CAACC,UAAU,CAACJ,kBAAkB,CAAC;IACxE,IAAI,CAACE,kBAAkB,CAACG,QAAQ,EAAE;MAC9B,OAAO;QACHR,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAE;QACb;MACJ,CAAC;IACL;IAEA,MAAMO,WAAW,GAAGJ,kBAAkB,CAACG,QAAQ,CAACE,MAAM,CAClDC,IAAI,IAAIA,IAAI,CAACC,GAAG,KAAKT,kBAAkB,CAC1C,CACIU,GAAG,CAACF,IAAI,IAAIA,IAAI,CAACC,GAAG,CAAC,CACrBF,MAAM,CAACI,OAAO,CAAa;;IAEhC;IACA,MAAMC,QAAQ,GAAG,IAAIC,gBAAQ,CAACP,WAAW,EAAE,wBAAwB,CAAC;;IAEpE;IACA,MAAMQ,gBAAgB,GAAG,MAAMF,QAAQ,CAACG,OAAO,EAAE;IACjD3B,GAAG,CAAE,wCAAuC0B,gBAAgB,CAACE,QAAS,GAAE,CAAC;;IAEzE;IACA,MAAM1B,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACC,SAAS;MACxCvB,IAAI,EAAE;QACFE,OAAO,EAAG,+BAA8B;QACxCsB,GAAG,EAAEP,gBAAgB,CAACL,GAAG;QACzBa,GAAG,EAAEnB,kBAAQ,CAACoB,eAAe,CAACT,gBAAgB,CAACL,GAAG;MACtD;IACJ,CAAC,CAAC;;IAEF;IACA,MAAMe,kBAAkB,GAAGlB,WAAW,CAACI,GAAG,CAACW,GAAG,IAAIlB,kBAAQ,CAACsB,YAAY,CAACJ,GAAG,CAAC,CAAC;IAC7E,MAAMK,OAAO,CAACC,GAAG,CAACH,kBAAkB,CAAC;IACrCpC,GAAG,CAAE,wBAAuBoC,kBAAkB,CAACI,MAAO,aAAY,CAAC;EACvE,CAAC,CAAC,OAAOC,CAAC,EAAE;IACRzC,GAAG,CAAC,kCAAkC,EAAEyC,CAAC,CAAC9B,OAAO,CAAC;;IAElD;AACR;AACA;AACA;IACQ,MAAMT,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACW,MAAM;MACrChC,KAAK,EAAE;QACHiC,IAAI,EAAEF,CAAC,CAACE,IAAI;QACZhC,OAAO,EAAE8B,CAAC,CAAC9B,OAAO;QAClBiC,IAAI,EAAE;MACV;IACJ,CAAC,CAAC;IAEF,OAAO;MACHnC,IAAI,EAAE,IAAI;MACVC,KAAK,EAAE;QACHC,OAAO,EAAE8B,CAAC,CAAC9B;MACf;IACJ,CAAC;EACL;EACA,OAAO;IACHF,IAAI,EAAE,EAAE;IACRC,KAAK,EAAE;EACX,CAAC;AACL,CAAC;AAAC"}
@@ -0,0 +1,6 @@
1
+ import { PbImportExportContext } from "../../types";
2
+ import { Payload, Response } from ".";
3
+ /**
4
+ * Handles the export templates combine workflow.
5
+ */
6
+ export declare const templatesHandler: (payload: Payload, context: PbImportExportContext) => Promise<Response>;
@@ -0,0 +1,99 @@
1
+ "use strict";
2
+
3
+ Object.defineProperty(exports, "__esModule", {
4
+ value: true
5
+ });
6
+ exports.templatesHandler = void 0;
7
+ var _types = require("../../types");
8
+ var _s3Stream = require("../s3Stream");
9
+ var _zipper = require("../zipper");
10
+ var _mockSecurity = require("../../mockSecurity");
11
+ /**
12
+ * Handles the export templates combine workflow.
13
+ */
14
+ const templatesHandler = async (payload, context) => {
15
+ const log = console.log;
16
+ log("RUNNING Export Templates Combine Handler");
17
+ const {
18
+ pageBuilder
19
+ } = context;
20
+ const {
21
+ taskId,
22
+ identity
23
+ } = payload;
24
+ (0, _mockSecurity.mockSecurity)(identity, context);
25
+ try {
26
+ const task = await pageBuilder.importExportTask.getTask(taskId);
27
+ if (!task) {
28
+ return {
29
+ data: null,
30
+ error: {
31
+ message: `There is no task with ID "${taskId}".`
32
+ }
33
+ };
34
+ }
35
+ const {
36
+ exportTemplatesDataKey
37
+ } = task.input;
38
+
39
+ // Get all files (zip) from given key
40
+ const listObjectResponse = await _s3Stream.s3Stream.listObject(exportTemplatesDataKey);
41
+ if (!listObjectResponse.Contents) {
42
+ return {
43
+ data: null,
44
+ error: {
45
+ message: "There is no Contents defined on S3 Stream while combining templates."
46
+ }
47
+ };
48
+ }
49
+ const zipFileKeys = listObjectResponse.Contents.filter(file => file.Key !== exportTemplatesDataKey).map(file => file.Key).filter(Boolean);
50
+
51
+ // Prepare zip of all zips
52
+ const zipOfZip = new _zipper.ZipOfZip(zipFileKeys, "WEBINY_TEMPLATE_EXPORT.zip");
53
+
54
+ // Upload
55
+ const templateExportUpload = await zipOfZip.process();
56
+ log(`Done uploading... File is located at ${templateExportUpload.Location} `);
57
+
58
+ // Update task status and save export templates data key
59
+ await pageBuilder.importExportTask.updateTask(taskId, {
60
+ status: _types.ImportExportTaskStatus.COMPLETED,
61
+ data: {
62
+ message: `Finish uploading template export.`,
63
+ key: templateExportUpload.Key,
64
+ url: _s3Stream.s3Stream.getPresignedUrl(templateExportUpload.Key)
65
+ }
66
+ });
67
+
68
+ // Remove individual zip files from storage
69
+ const deleteFilePromises = zipFileKeys.map(key => _s3Stream.s3Stream.deleteObject(key));
70
+ await Promise.all(deleteFilePromises);
71
+ log(`Successfully deleted ${deleteFilePromises.length} zip files.`);
72
+ } catch (e) {
73
+ log("[EXPORT_TEMPLATES_COMBINE] Error => ", e.message);
74
+
75
+ /**
76
+ * In case of error, we'll update the task status to "failed",
77
+ * so that, client can show notify the user appropriately.
78
+ */
79
+ await pageBuilder.importExportTask.updateTask(taskId, {
80
+ status: _types.ImportExportTaskStatus.FAILED,
81
+ error: {
82
+ name: e.name,
83
+ message: e.message,
84
+ code: "EXPORT_FAILED"
85
+ }
86
+ });
87
+ return {
88
+ data: null,
89
+ error: {
90
+ message: e.message
91
+ }
92
+ };
93
+ }
94
+ return {
95
+ data: "",
96
+ error: null
97
+ };
98
+ };
99
+ exports.templatesHandler = templatesHandler;
@@ -0,0 +1 @@
1
+ {"version":3,"names":["templatesHandler","payload","context","log","console","pageBuilder","taskId","identity","mockSecurity","task","importExportTask","getTask","data","error","message","exportTemplatesDataKey","input","listObjectResponse","s3Stream","listObject","Contents","zipFileKeys","filter","file","Key","map","Boolean","zipOfZip","ZipOfZip","templateExportUpload","process","Location","updateTask","status","ImportExportTaskStatus","COMPLETED","key","url","getPresignedUrl","deleteFilePromises","deleteObject","Promise","all","length","e","FAILED","name","code"],"sources":["templatesHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { s3Stream } from \"../s3Stream\";\nimport { ZipOfZip } from \"../zipper\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { Payload, Response } from \"~/export/combine\";\n\n/**\n * Handles the export templates combine workflow.\n */\nexport const templatesHandler = async (\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n\n log(\"RUNNING Export Templates Combine Handler\");\n const { pageBuilder } = context;\n const { taskId, identity } = payload;\n\n mockSecurity(identity, context);\n\n try {\n const task = await pageBuilder.importExportTask.getTask(taskId);\n if (!task) {\n return {\n data: null,\n error: {\n message: `There is no task with ID \"${taskId}\".`\n }\n };\n }\n\n const { exportTemplatesDataKey } = task.input;\n\n // Get all files (zip) from given key\n const listObjectResponse = await s3Stream.listObject(exportTemplatesDataKey);\n if (!listObjectResponse.Contents) {\n return {\n data: null,\n error: {\n message: \"There is no Contents defined on S3 Stream while combining templates.\"\n }\n };\n }\n\n const zipFileKeys = listObjectResponse.Contents.filter(\n file => file.Key !== exportTemplatesDataKey\n )\n .map(file => file.Key)\n .filter(Boolean) as string[];\n\n // Prepare zip of all zips\n const zipOfZip = new ZipOfZip(zipFileKeys, \"WEBINY_TEMPLATE_EXPORT.zip\");\n\n // Upload\n const templateExportUpload = await zipOfZip.process();\n log(`Done uploading... File is located at ${templateExportUpload.Location} `);\n\n // Update task status and save export templates data key\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading template export.`,\n key: templateExportUpload.Key,\n url: s3Stream.getPresignedUrl(templateExportUpload.Key)\n }\n });\n\n // Remove individual zip files from storage\n const deleteFilePromises = zipFileKeys.map(key => s3Stream.deleteObject(key));\n await Promise.all(deleteFilePromises);\n log(`Successfully deleted ${deleteFilePromises.length} zip files.`);\n } catch (e) {\n log(\"[EXPORT_TEMPLATES_COMBINE] Error => \", e.message);\n\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"EXPORT_FAILED\"\n }\n });\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;AAAA;AACA;AACA;AACA;AAGA;AACA;AACA;AACO,MAAMA,gBAAgB,GAAG,OAC5BC,OAAgB,EAChBC,OAA8B,KACV;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EAEvBA,GAAG,CAAC,0CAA0C,CAAC;EAC/C,MAAM;IAAEE;EAAY,CAAC,GAAGH,OAAO;EAC/B,MAAM;IAAEI,MAAM;IAAEC;EAAS,CAAC,GAAGN,OAAO;EAEpC,IAAAO,0BAAY,EAACD,QAAQ,EAAEL,OAAO,CAAC;EAE/B,IAAI;IACA,MAAMO,IAAI,GAAG,MAAMJ,WAAW,CAACK,gBAAgB,CAACC,OAAO,CAACL,MAAM,CAAC;IAC/D,IAAI,CAACG,IAAI,EAAE;MACP,OAAO;QACHG,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAG,6BAA4BR,MAAO;QACjD;MACJ,CAAC;IACL;IAEA,MAAM;MAAES;IAAuB,CAAC,GAAGN,IAAI,CAACO,KAAK;;IAE7C;IACA,MAAMC,kBAAkB,GAAG,MAAMC,kBAAQ,CAACC,UAAU,CAACJ,sBAAsB,CAAC;IAC5E,IAAI,CAACE,kBAAkB,CAACG,QAAQ,EAAE;MAC9B,OAAO;QACHR,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAE;QACb;MACJ,CAAC;IACL;IAEA,MAAMO,WAAW,GAAGJ,kBAAkB,CAACG,QAAQ,CAACE,MAAM,CAClDC,IAAI,IAAIA,IAAI,CAACC,GAAG,KAAKT,sBAAsB,CAC9C,CACIU,GAAG,CAACF,IAAI,IAAIA,IAAI,CAACC,GAAG,CAAC,CACrBF,MAAM,CAACI,OAAO,CAAa;;IAEhC;IACA,MAAMC,QAAQ,GAAG,IAAIC,gBAAQ,CAACP,WAAW,EAAE,4BAA4B,CAAC;;IAExE;IACA,MAAMQ,oBAAoB,GAAG,MAAMF,QAAQ,CAACG,OAAO,EAAE;IACrD3B,GAAG,CAAE,wCAAuC0B,oBAAoB,CAACE,QAAS,GAAE,CAAC;;IAE7E;IACA,MAAM1B,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACC,SAAS;MACxCvB,IAAI,EAAE;QACFE,OAAO,EAAG,mCAAkC;QAC5CsB,GAAG,EAAEP,oBAAoB,CAACL,GAAG;QAC7Ba,GAAG,EAAEnB,kBAAQ,CAACoB,eAAe,CAACT,oBAAoB,CAACL,GAAG;MAC1D;IACJ,CAAC,CAAC;;IAEF;IACA,MAAMe,kBAAkB,GAAGlB,WAAW,CAACI,GAAG,CAACW,GAAG,IAAIlB,kBAAQ,CAACsB,YAAY,CAACJ,GAAG,CAAC,CAAC;IAC7E,MAAMK,OAAO,CAACC,GAAG,CAACH,kBAAkB,CAAC;IACrCpC,GAAG,CAAE,wBAAuBoC,kBAAkB,CAACI,MAAO,aAAY,CAAC;EACvE,CAAC,CAAC,OAAOC,CAAC,EAAE;IACRzC,GAAG,CAAC,sCAAsC,EAAEyC,CAAC,CAAC9B,OAAO,CAAC;;IAEtD;AACR;AACA;AACA;IACQ,MAAMT,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACW,MAAM;MACrChC,KAAK,EAAE;QACHiC,IAAI,EAAEF,CAAC,CAACE,IAAI;QACZhC,OAAO,EAAE8B,CAAC,CAAC9B,OAAO;QAClBiC,IAAI,EAAE;MACV;IACJ,CAAC,CAAC;IAEF,OAAO;MACHnC,IAAI,EAAE,IAAI;MACVC,KAAK,EAAE;QACHC,OAAO,EAAE8B,CAAC,CAAC9B;MACf;IACJ,CAAC;EACL;EACA,OAAO;IACHF,IAAI,EAAE,EAAE;IACRC,KAAK,EAAE;EACX,CAAC;AACL,CAAC;AAAC"}
@@ -0,0 +1,6 @@
1
+ import { PbImportExportContext } from "../../types";
2
+ import { Configuration, Payload, Response } from ".";
3
+ /**
4
+ * Handles the export blocks process workflow.
5
+ */
6
+ export declare const blocksHandler: (configuration: Configuration, payload: Payload, context: PbImportExportContext) => Promise<Response>;
@@ -0,0 +1,162 @@
1
+ "use strict";
2
+
3
+ Object.defineProperty(exports, "__esModule", {
4
+ value: true
5
+ });
6
+ exports.blocksHandler = void 0;
7
+ var _types = require("../../types");
8
+ var _client = require("../../client");
9
+ var _handlerGraphql = require("@webiny/handler-graphql");
10
+ var _utils = require("../utils");
11
+ var _mockSecurity = require("../../mockSecurity");
12
+ var _utils2 = require("@webiny/utils");
13
+ /**
14
+ * Handles the export blocks process workflow.
15
+ */
16
+ const blocksHandler = async (configuration, payload, context) => {
17
+ const log = console.log;
18
+ let subTask;
19
+ let noPendingTask = true;
20
+ let prevStatusOfSubTask = _types.ImportExportTaskStatus.PENDING;
21
+ log("RUNNING Export Blocks Process Handler");
22
+ const {
23
+ pageBuilder,
24
+ fileManager
25
+ } = context;
26
+ const {
27
+ taskId,
28
+ subTaskIndex,
29
+ type,
30
+ identity
31
+ } = payload;
32
+ // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks
33
+ // and this Lambda is invoked internally, without credentials.
34
+ (0, _mockSecurity.mockSecurity)(identity, context);
35
+ try {
36
+ /*
37
+ * Note: We're not going to DB for finding the next sub-task to process,
38
+ * because the data might be out of sync due to GSI eventual consistency.
39
+ */
40
+ subTask = await pageBuilder.importExportTask.getSubTask(taskId, (0, _utils2.zeroPad)(subTaskIndex, 5));
41
+ /**
42
+ * Base condition!!
43
+ * Bail out early, if task not found or task's status is not "pending".
44
+ */
45
+ if (!subTask || subTask.status !== _types.ImportExportTaskStatus.PENDING) {
46
+ noPendingTask = true;
47
+ return {
48
+ data: "",
49
+ error: null
50
+ };
51
+ } else {
52
+ noPendingTask = false;
53
+ }
54
+ log(`Fetched sub task => ${subTask.id}`);
55
+ const {
56
+ input
57
+ } = subTask;
58
+ const {
59
+ blockId,
60
+ exportBlocksDataKey
61
+ } = input;
62
+ const block = await pageBuilder.getPageBlock(blockId);
63
+ if (!block) {
64
+ log(`Unable to load block "${blockId}"`);
65
+ throw new _handlerGraphql.NotFoundError(`Unable to load block "${blockId}"`);
66
+ }
67
+ log(`Processing block key "${blockId}"`);
68
+
69
+ // Mark task status as PROCESSING
70
+ subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {
71
+ status: _types.ImportExportTaskStatus.PROCESSING
72
+ });
73
+ // Update stats in main task
74
+ await pageBuilder.importExportTask.updateStats(taskId, {
75
+ prevStatus: prevStatusOfSubTask,
76
+ nextStatus: _types.ImportExportTaskStatus.PROCESSING
77
+ });
78
+ prevStatusOfSubTask = subTask.status;
79
+ log(`Extracting block data and uploading to storage...`);
80
+ // Extract Block
81
+ const blockDataZip = await (0, _utils.exportBlock)(block, exportBlocksDataKey, fileManager);
82
+ log(`Finish uploading zip...`);
83
+ // Update task record in DB
84
+ subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {
85
+ status: _types.ImportExportTaskStatus.COMPLETED,
86
+ data: {
87
+ message: `Finish uploading data for block "${block.id}"`,
88
+ key: blockDataZip.Key
89
+ }
90
+ });
91
+ // Update stats in main task
92
+ await pageBuilder.importExportTask.updateStats(taskId, {
93
+ prevStatus: prevStatusOfSubTask,
94
+ nextStatus: _types.ImportExportTaskStatus.COMPLETED
95
+ });
96
+ prevStatusOfSubTask = subTask.status;
97
+ } catch (e) {
98
+ log("[EXPORT_BLOCKS_PROCESS] Error => ", e.message);
99
+ if (subTask && subTask.id) {
100
+ /**
101
+ * In case of error, we'll update the task status to "failed",
102
+ * so that, client can show notify the user appropriately.
103
+ */
104
+ subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {
105
+ status: _types.ImportExportTaskStatus.FAILED,
106
+ error: {
107
+ name: e.name,
108
+ message: e.message,
109
+ code: "EXPORT_FAILED"
110
+ }
111
+ });
112
+
113
+ // Update stats in main task
114
+ await pageBuilder.importExportTask.updateStats(taskId, {
115
+ prevStatus: prevStatusOfSubTask,
116
+ nextStatus: _types.ImportExportTaskStatus.FAILED
117
+ });
118
+ prevStatusOfSubTask = subTask.status;
119
+ }
120
+ return {
121
+ data: null,
122
+ error: {
123
+ message: e.message
124
+ }
125
+ };
126
+ } finally {
127
+ // Base condition!
128
+ if (noPendingTask) {
129
+ log(`No pending sub-task for task ${taskId}`);
130
+ // Combine individual block zip files.
131
+ await (0, _client.invokeHandlerClient)({
132
+ context,
133
+ name: configuration.handlers.combine,
134
+ payload: {
135
+ taskId,
136
+ type,
137
+ identity: context.security.getIdentity()
138
+ },
139
+ description: "Export blocks - combine"
140
+ });
141
+ } else {
142
+ console.log(`Invoking PROCESS for task "${subTaskIndex + 1}"`);
143
+ // We want to continue with Self invocation no matter if current block error out.
144
+ await (0, _client.invokeHandlerClient)({
145
+ context,
146
+ name: configuration.handlers.process,
147
+ payload: {
148
+ taskId,
149
+ subTaskIndex: subTaskIndex + 1,
150
+ type,
151
+ identity: context.security.getIdentity()
152
+ },
153
+ description: "Export blocks - process - subtask"
154
+ });
155
+ }
156
+ }
157
+ return {
158
+ data: "",
159
+ error: null
160
+ };
161
+ };
162
+ exports.blocksHandler = blocksHandler;