@webiny/api-page-builder-import-export 0.0.0-unstable.990c3ab1b6 → 0.0.0-unstable.d4f203fa97

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/client.js +6 -2
  2. package/client.js.map +1 -1
  3. package/exportPages/combine/index.js +19 -11
  4. package/exportPages/combine/index.js.map +1 -1
  5. package/exportPages/process/index.js +35 -19
  6. package/exportPages/process/index.js.map +1 -1
  7. package/exportPages/s3Stream.js +20 -1
  8. package/exportPages/s3Stream.js.map +1 -1
  9. package/exportPages/utils.js +31 -18
  10. package/exportPages/utils.js.map +1 -1
  11. package/exportPages/zipper.js +41 -29
  12. package/exportPages/zipper.js.map +1 -1
  13. package/graphql/crud/pageImportExportTasks.crud.js +57 -9
  14. package/graphql/crud/pageImportExportTasks.crud.js.map +1 -1
  15. package/graphql/crud/pages.crud.js +39 -21
  16. package/graphql/crud/pages.crud.js.map +1 -1
  17. package/graphql/crud.js +5 -0
  18. package/graphql/crud.js.map +1 -1
  19. package/graphql/graphql/pageImportExportTasks.gql.js +6 -1
  20. package/graphql/graphql/pageImportExportTasks.gql.js.map +1 -1
  21. package/graphql/graphql/pages.gql.js +6 -1
  22. package/graphql/graphql/pages.gql.js.map +1 -1
  23. package/graphql/graphql/utils/resolve.js +3 -0
  24. package/graphql/graphql/utils/resolve.js.map +1 -1
  25. package/graphql/graphql.js +4 -0
  26. package/graphql/graphql.js.map +1 -1
  27. package/graphql/index.js +5 -0
  28. package/graphql/index.js.map +1 -1
  29. package/importPages/create/index.js +20 -9
  30. package/importPages/create/index.js.map +1 -1
  31. package/importPages/process/index.js +29 -24
  32. package/importPages/process/index.js.map +1 -1
  33. package/importPages/utils.js +126 -44
  34. package/importPages/utils.js.map +1 -1
  35. package/mockSecurity.js +2 -0
  36. package/mockSecurity.js.map +1 -1
  37. package/package.json +23 -23
  38. package/types.js +5 -0
  39. package/types.js.map +1 -1
package/client.js CHANGED
@@ -1,11 +1,14 @@
1
1
  "use strict";
2
2
 
3
3
  var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
4
+
4
5
  Object.defineProperty(exports, "__esModule", {
5
6
  value: true
6
7
  });
7
8
  exports.invokeHandlerClient = invokeHandlerClient;
9
+
8
10
  var _objectSpread2 = _interopRequireDefault(require("@babel/runtime/helpers/objectSpread2"));
11
+
9
12
  async function invokeHandlerClient({
10
13
  context,
11
14
  name,
@@ -28,12 +31,13 @@ async function invokeHandlerClient({
28
31
  httpMethod: request.method,
29
32
  body: request.body,
30
33
  headers,
34
+
31
35
  /**
32
36
  * Required until type augmentation works correctly.
33
37
  */
34
38
  cookies: request.cookies
35
- };
36
- // Invoke handler
39
+ }; // Invoke handler
40
+
37
41
  await context.handlerClient.invoke({
38
42
  name: name,
39
43
  payload: (0, _objectSpread2.default)((0, _objectSpread2.default)({}, payload), invocationArgs),
package/client.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"names":["invokeHandlerClient","context","name","payload","description","request","tenantId","tenancy","getCurrentTenant","id","headers","invocationArgs","httpMethod","method","body","cookies","handlerClient","invoke","await"],"sources":["client.ts"],"sourcesContent":["import { PbPageImportExportContext } from \"~/graphql/types\";\n\nexport interface InvokeHandlerClientParams<TParams> {\n context: PbPageImportExportContext;\n name: string;\n payload: TParams;\n description: string;\n}\n\nexport async function invokeHandlerClient<TParams>({\n context,\n name,\n payload,\n description\n}: InvokeHandlerClientParams<TParams>) {\n /*\n * Prepare \"invocationArgs\", we're hacking our wat here.\n * They are necessary to setup the \"context.pageBuilder\" object among other things in IMPORT_PAGE_FUNCTION\n */\n const { request } = context;\n\n const tenantId = context.tenancy.getCurrentTenant().id;\n\n const headers = {\n ...request.headers,\n [\"x-tenant\"]: request.headers[\"x-tenant\"] || tenantId\n };\n delete headers[\"content-length\"];\n const invocationArgs = {\n httpMethod: request.method,\n body: request.body,\n headers,\n /**\n * Required until type augmentation works correctly.\n */\n cookies: (request as any).cookies\n };\n // Invoke handler\n await context.handlerClient.invoke<TParams & any>({\n name: name,\n payload: {\n ...payload,\n ...invocationArgs\n },\n await: false,\n description\n });\n}\n"],"mappings":";;;;;;;;AASO,eAAeA,mBAAmB,CAAU;EAC/CC,OAAO;EACPC,IAAI;EACJC,OAAO;EACPC;AACgC,CAAC,EAAE;EACnC;AACJ;AACA;AACA;EACI,MAAM;IAAEC;EAAQ,CAAC,GAAGJ,OAAO;EAE3B,MAAMK,QAAQ,GAAGL,OAAO,CAACM,OAAO,CAACC,gBAAgB,EAAE,CAACC,EAAE;EAEtD,MAAMC,OAAO,+DACNL,OAAO,CAACK,OAAO;IAClB,CAAC,UAAU,GAAGL,OAAO,CAACK,OAAO,CAAC,UAAU,CAAC,IAAIJ;EAAQ,EACxD;EACD,OAAOI,OAAO,CAAC,gBAAgB,CAAC;EAChC,MAAMC,cAAc,GAAG;IACnBC,UAAU,EAAEP,OAAO,CAACQ,MAAM;IAC1BC,IAAI,EAAET,OAAO,CAACS,IAAI;IAClBJ,OAAO;IACP;AACR;AACA;IACQK,OAAO,EAAGV,OAAO,CAASU;EAC9B,CAAC;EACD;EACA,MAAMd,OAAO,CAACe,aAAa,CAACC,MAAM,CAAgB;IAC9Cf,IAAI,EAAEA,IAAI;IACVC,OAAO,8DACAA,OAAO,GACPQ,cAAc,CACpB;IACDO,KAAK,EAAE,KAAK;IACZd;EACJ,CAAC,CAAC;AACN"}
1
+ {"version":3,"names":["invokeHandlerClient","context","name","payload","description","request","tenantId","tenancy","getCurrentTenant","id","headers","invocationArgs","httpMethod","method","body","cookies","handlerClient","invoke","await"],"sources":["client.ts"],"sourcesContent":["import { PbPageImportExportContext } from \"~/graphql/types\";\n\nexport interface InvokeHandlerClientParams<TParams> {\n context: PbPageImportExportContext;\n name: string;\n payload: TParams;\n description: string;\n}\n\nexport async function invokeHandlerClient<TParams>({\n context,\n name,\n payload,\n description\n}: InvokeHandlerClientParams<TParams>) {\n /*\n * Prepare \"invocationArgs\", we're hacking our wat here.\n * They are necessary to setup the \"context.pageBuilder\" object among other things in IMPORT_PAGE_FUNCTION\n */\n const { request } = context;\n\n const tenantId = context.tenancy.getCurrentTenant().id;\n\n const headers = {\n ...request.headers,\n [\"x-tenant\"]: request.headers[\"x-tenant\"] || tenantId\n };\n delete headers[\"content-length\"];\n const invocationArgs = {\n httpMethod: request.method,\n body: request.body,\n headers,\n /**\n * Required until type augmentation works correctly.\n */\n cookies: (request as any).cookies\n };\n // Invoke handler\n await context.handlerClient.invoke<TParams & any>({\n name: name,\n payload: {\n ...payload,\n ...invocationArgs\n },\n await: false,\n description\n });\n}\n"],"mappings":";;;;;;;;;;;AASO,eAAeA,mBAAf,CAA4C;EAC/CC,OAD+C;EAE/CC,IAF+C;EAG/CC,OAH+C;EAI/CC;AAJ+C,CAA5C,EAKgC;EACnC;AACJ;AACA;AACA;EACI,MAAM;IAAEC;EAAF,IAAcJ,OAApB;EAEA,MAAMK,QAAQ,GAAGL,OAAO,CAACM,OAAR,CAAgBC,gBAAhB,GAAmCC,EAApD;EAEA,MAAMC,OAAO,+DACNL,OAAO,CAACK,OADF;IAET,CAAC,UAAD,GAAcL,OAAO,CAACK,OAAR,CAAgB,UAAhB,KAA+BJ;EAFpC,EAAb;EAIA,OAAOI,OAAO,CAAC,gBAAD,CAAd;EACA,MAAMC,cAAc,GAAG;IACnBC,UAAU,EAAEP,OAAO,CAACQ,MADD;IAEnBC,IAAI,EAAET,OAAO,CAACS,IAFK;IAGnBJ,OAHmB;;IAInB;AACR;AACA;IACQK,OAAO,EAAGV,OAAD,CAAiBU;EAPP,CAAvB,CAdmC,CAuBnC;;EACA,MAAMd,OAAO,CAACe,aAAR,CAAsBC,MAAtB,CAA4C;IAC9Cf,IAAI,EAAEA,IADwC;IAE9CC,OAAO,8DACAA,OADA,GAEAQ,cAFA,CAFuC;IAM9CO,KAAK,EAAE,KANuC;IAO9Cd;EAP8C,CAA5C,CAAN;AASH"}
@@ -4,11 +4,17 @@ Object.defineProperty(exports, "__esModule", {
4
4
  value: true
5
5
  });
6
6
  exports.default = void 0;
7
+
7
8
  var _types = require("../../types");
9
+
8
10
  var _s3Stream = require("../s3Stream");
11
+
9
12
  var _zipper = require("../zipper");
13
+
10
14
  var _mockSecurity = require("../../mockSecurity");
15
+
11
16
  var _handlerAws = require("@webiny/handler-aws");
17
+
12
18
  /**
13
19
  * Handles the export pages combine workflow.
14
20
  */
@@ -27,8 +33,10 @@ var _default = () => {
27
33
  identity
28
34
  } = payload;
29
35
  (0, _mockSecurity.mockSecurity)(identity, context);
36
+
30
37
  try {
31
38
  const task = await pageBuilder.pageImportExportTask.getTask(taskId);
39
+
32
40
  if (!task) {
33
41
  return {
34
42
  data: null,
@@ -37,12 +45,13 @@ var _default = () => {
37
45
  }
38
46
  };
39
47
  }
48
+
40
49
  const {
41
50
  exportPagesDataKey
42
- } = task.input;
51
+ } = task.input; // Get all files (zip) from given key
43
52
 
44
- // Get all files (zip) from given key
45
53
  const listObjectResponse = await _s3Stream.s3Stream.listObject(exportPagesDataKey);
54
+
46
55
  if (!listObjectResponse.Contents) {
47
56
  return {
48
57
  data: null,
@@ -51,16 +60,14 @@ var _default = () => {
51
60
  }
52
61
  };
53
62
  }
54
- const zipFileKeys = listObjectResponse.Contents.filter(file => file.Key !== exportPagesDataKey).map(file => file.Key).filter(Boolean);
55
63
 
56
- // Prepare zip of all zips
57
- const zipOfZip = new _zipper.ZipOfZip(zipFileKeys);
64
+ const zipFileKeys = listObjectResponse.Contents.filter(file => file.Key !== exportPagesDataKey).map(file => file.Key).filter(Boolean); // Prepare zip of all zips
65
+
66
+ const zipOfZip = new _zipper.ZipOfZip(zipFileKeys); // Upload
58
67
 
59
- // Upload
60
68
  const pageExportUpload = await zipOfZip.process();
61
- log(`Done uploading... File is located at ${pageExportUpload.Location} `);
69
+ log(`Done uploading... File is located at ${pageExportUpload.Location} `); // Update task status and save export page data key
62
70
 
63
- // Update task status and save export page data key
64
71
  await pageBuilder.pageImportExportTask.updateTask(taskId, {
65
72
  status: _types.PageImportExportTaskStatus.COMPLETED,
66
73
  data: {
@@ -68,19 +75,18 @@ var _default = () => {
68
75
  key: pageExportUpload.Key,
69
76
  url: _s3Stream.s3Stream.getPresignedUrl(pageExportUpload.Key)
70
77
  }
71
- });
78
+ }); // Remove individual zip files from storage
72
79
 
73
- // Remove individual zip files from storage
74
80
  const deleteFilePromises = zipFileKeys.map(key => _s3Stream.s3Stream.deleteObject(key));
75
81
  await Promise.all(deleteFilePromises);
76
82
  log(`Successfully deleted ${deleteFilePromises.length} zip files.`);
77
83
  } catch (e) {
78
84
  log("[EXPORT_PAGES_COMBINE] Error => ", e);
79
-
80
85
  /**
81
86
  * In case of error, we'll update the task status to "failed",
82
87
  * so that, client can show notify the user appropriately.
83
88
  */
89
+
84
90
  await pageBuilder.pageImportExportTask.updateTask(taskId, {
85
91
  status: _types.PageImportExportTaskStatus.FAILED,
86
92
  error: {
@@ -97,10 +103,12 @@ var _default = () => {
97
103
  }
98
104
  };
99
105
  }
106
+
100
107
  return {
101
108
  data: "",
102
109
  error: null
103
110
  };
104
111
  });
105
112
  };
113
+
106
114
  exports.default = _default;
@@ -1 +1 @@
1
- {"version":3,"names":["createRawEventHandler","payload","context","log","console","pageBuilder","taskId","identity","mockSecurity","task","pageImportExportTask","getTask","data","error","message","exportPagesDataKey","input","listObjectResponse","s3Stream","listObject","Contents","zipFileKeys","filter","file","Key","map","Boolean","zipOfZip","ZipOfZip","pageExportUpload","process","Location","updateTask","status","PageImportExportTaskStatus","COMPLETED","key","url","getPresignedUrl","deleteFilePromises","deleteObject","Promise","all","length","e","FAILED","name","stack","code"],"sources":["index.ts"],"sourcesContent":["import { PageImportExportTaskStatus, PbPageImportExportContext } from \"~/types\";\nimport { s3Stream } from \"../s3Stream\";\nimport { ZipOfZip } from \"../zipper\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { SecurityIdentity } from \"@webiny/api-security/types\";\nimport { createRawEventHandler } from \"@webiny/handler-aws\";\n\nexport interface Payload {\n taskId: string;\n identity: SecurityIdentity;\n}\n\nexport interface Response {\n data: string | null;\n error: Partial<Error> | null;\n}\n\n/**\n * Handles the export pages combine workflow.\n */\nexport default () => {\n return createRawEventHandler<Payload, PbPageImportExportContext, Response>(\n async ({ payload, context }) => {\n const log = console.log;\n\n log(\"RUNNING Export Pages Combine Handler\");\n const { pageBuilder } = context;\n const { taskId, identity } = payload;\n\n mockSecurity(identity, context);\n\n try {\n const task = await pageBuilder.pageImportExportTask.getTask(taskId);\n if (!task) {\n return {\n data: null,\n error: {\n message: `There is no task with ID \"${taskId}\".`\n }\n };\n }\n\n const { exportPagesDataKey } = task.input;\n\n // Get all files (zip) from given key\n const listObjectResponse = await s3Stream.listObject(exportPagesDataKey);\n if (!listObjectResponse.Contents) {\n return {\n data: null,\n error: {\n message:\n \"There is no Contents defined on S3 Stream while combining pages.\"\n }\n };\n }\n\n const zipFileKeys = listObjectResponse.Contents.filter(\n file => file.Key !== exportPagesDataKey\n )\n .map(file => file.Key)\n .filter(Boolean) as string[];\n\n // Prepare zip of all zips\n const zipOfZip = new ZipOfZip(zipFileKeys);\n\n // Upload\n const pageExportUpload = await zipOfZip.process();\n log(`Done uploading... File is located at ${pageExportUpload.Location} `);\n\n // Update task status and save export page data key\n await pageBuilder.pageImportExportTask.updateTask(taskId, {\n status: PageImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading page export.`,\n key: pageExportUpload.Key,\n url: s3Stream.getPresignedUrl(pageExportUpload.Key)\n }\n });\n\n // Remove individual zip files from storage\n const deleteFilePromises = zipFileKeys.map(key => s3Stream.deleteObject(key));\n await Promise.all(deleteFilePromises);\n log(`Successfully deleted ${deleteFilePromises.length} zip files.`);\n } catch (e) {\n log(\"[EXPORT_PAGES_COMBINE] Error => \", e);\n\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n await pageBuilder.pageImportExportTask.updateTask(taskId, {\n status: PageImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n stack: e.stack,\n code: \"EXPORT_FAILED\"\n }\n });\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n }\n return {\n data: \"\",\n error: null\n };\n }\n );\n};\n"],"mappings":";;;;;;AAAA;AACA;AACA;AACA;AAEA;AAYA;AACA;AACA;AAFA,eAGe,MAAM;EACjB,OAAO,IAAAA,iCAAqB,EACxB,OAAO;IAAEC,OAAO;IAAEC;EAAQ,CAAC,KAAK;IAC5B,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;IAEvBA,GAAG,CAAC,sCAAsC,CAAC;IAC3C,MAAM;MAAEE;IAAY,CAAC,GAAGH,OAAO;IAC/B,MAAM;MAAEI,MAAM;MAAEC;IAAS,CAAC,GAAGN,OAAO;IAEpC,IAAAO,0BAAY,EAACD,QAAQ,EAAEL,OAAO,CAAC;IAE/B,IAAI;MACA,MAAMO,IAAI,GAAG,MAAMJ,WAAW,CAACK,oBAAoB,CAACC,OAAO,CAACL,MAAM,CAAC;MACnE,IAAI,CAACG,IAAI,EAAE;QACP,OAAO;UACHG,IAAI,EAAE,IAAI;UACVC,KAAK,EAAE;YACHC,OAAO,EAAG,6BAA4BR,MAAO;UACjD;QACJ,CAAC;MACL;MAEA,MAAM;QAAES;MAAmB,CAAC,GAAGN,IAAI,CAACO,KAAK;;MAEzC;MACA,MAAMC,kBAAkB,GAAG,MAAMC,kBAAQ,CAACC,UAAU,CAACJ,kBAAkB,CAAC;MACxE,IAAI,CAACE,kBAAkB,CAACG,QAAQ,EAAE;QAC9B,OAAO;UACHR,IAAI,EAAE,IAAI;UACVC,KAAK,EAAE;YACHC,OAAO,EACH;UACR;QACJ,CAAC;MACL;MAEA,MAAMO,WAAW,GAAGJ,kBAAkB,CAACG,QAAQ,CAACE,MAAM,CAClDC,IAAI,IAAIA,IAAI,CAACC,GAAG,KAAKT,kBAAkB,CAC1C,CACIU,GAAG,CAACF,IAAI,IAAIA,IAAI,CAACC,GAAG,CAAC,CACrBF,MAAM,CAACI,OAAO,CAAa;;MAEhC;MACA,MAAMC,QAAQ,GAAG,IAAIC,gBAAQ,CAACP,WAAW,CAAC;;MAE1C;MACA,MAAMQ,gBAAgB,GAAG,MAAMF,QAAQ,CAACG,OAAO,EAAE;MACjD3B,GAAG,CAAE,wCAAuC0B,gBAAgB,CAACE,QAAS,GAAE,CAAC;;MAEzE;MACA,MAAM1B,WAAW,CAACK,oBAAoB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;QACtD2B,MAAM,EAAEC,iCAA0B,CAACC,SAAS;QAC5CvB,IAAI,EAAE;UACFE,OAAO,EAAG,+BAA8B;UACxCsB,GAAG,EAAEP,gBAAgB,CAACL,GAAG;UACzBa,GAAG,EAAEnB,kBAAQ,CAACoB,eAAe,CAACT,gBAAgB,CAACL,GAAG;QACtD;MACJ,CAAC,CAAC;;MAEF;MACA,MAAMe,kBAAkB,GAAGlB,WAAW,CAACI,GAAG,CAACW,GAAG,IAAIlB,kBAAQ,CAACsB,YAAY,CAACJ,GAAG,CAAC,CAAC;MAC7E,MAAMK,OAAO,CAACC,GAAG,CAACH,kBAAkB,CAAC;MACrCpC,GAAG,CAAE,wBAAuBoC,kBAAkB,CAACI,MAAO,aAAY,CAAC;IACvE,CAAC,CAAC,OAAOC,CAAC,EAAE;MACRzC,GAAG,CAAC,kCAAkC,EAAEyC,CAAC,CAAC;;MAE1C;AAChB;AACA;AACA;MACgB,MAAMvC,WAAW,CAACK,oBAAoB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;QACtD2B,MAAM,EAAEC,iCAA0B,CAACW,MAAM;QACzChC,KAAK,EAAE;UACHiC,IAAI,EAAEF,CAAC,CAACE,IAAI;UACZhC,OAAO,EAAE8B,CAAC,CAAC9B,OAAO;UAClBiC,KAAK,EAAEH,CAAC,CAACG,KAAK;UACdC,IAAI,EAAE;QACV;MACJ,CAAC,CAAC;MAEF,OAAO;QACHpC,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAE8B,CAAC,CAAC9B;QACf;MACJ,CAAC;IACL;IACA,OAAO;MACHF,IAAI,EAAE,EAAE;MACRC,KAAK,EAAE;IACX,CAAC;EACL,CAAC,CACJ;AACL,CAAC;AAAA"}
1
+ {"version":3,"names":["createRawEventHandler","payload","context","log","console","pageBuilder","taskId","identity","mockSecurity","task","pageImportExportTask","getTask","data","error","message","exportPagesDataKey","input","listObjectResponse","s3Stream","listObject","Contents","zipFileKeys","filter","file","Key","map","Boolean","zipOfZip","ZipOfZip","pageExportUpload","process","Location","updateTask","status","PageImportExportTaskStatus","COMPLETED","key","url","getPresignedUrl","deleteFilePromises","deleteObject","Promise","all","length","e","FAILED","name","stack","code"],"sources":["index.ts"],"sourcesContent":["import { PageImportExportTaskStatus, PbPageImportExportContext } from \"~/types\";\nimport { s3Stream } from \"../s3Stream\";\nimport { ZipOfZip } from \"../zipper\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { SecurityIdentity } from \"@webiny/api-security/types\";\nimport { createRawEventHandler } from \"@webiny/handler-aws\";\n\nexport interface Payload {\n taskId: string;\n identity: SecurityIdentity;\n}\n\nexport interface Response {\n data: string | null;\n error: Partial<Error> | null;\n}\n\n/**\n * Handles the export pages combine workflow.\n */\nexport default () => {\n return createRawEventHandler<Payload, PbPageImportExportContext, Response>(\n async ({ payload, context }) => {\n const log = console.log;\n\n log(\"RUNNING Export Pages Combine Handler\");\n const { pageBuilder } = context;\n const { taskId, identity } = payload;\n\n mockSecurity(identity, context);\n\n try {\n const task = await pageBuilder.pageImportExportTask.getTask(taskId);\n if (!task) {\n return {\n data: null,\n error: {\n message: `There is no task with ID \"${taskId}\".`\n }\n };\n }\n\n const { exportPagesDataKey } = task.input;\n\n // Get all files (zip) from given key\n const listObjectResponse = await s3Stream.listObject(exportPagesDataKey);\n if (!listObjectResponse.Contents) {\n return {\n data: null,\n error: {\n message:\n \"There is no Contents defined on S3 Stream while combining pages.\"\n }\n };\n }\n\n const zipFileKeys = listObjectResponse.Contents.filter(\n file => file.Key !== exportPagesDataKey\n )\n .map(file => file.Key)\n .filter(Boolean) as string[];\n\n // Prepare zip of all zips\n const zipOfZip = new ZipOfZip(zipFileKeys);\n\n // Upload\n const pageExportUpload = await zipOfZip.process();\n log(`Done uploading... File is located at ${pageExportUpload.Location} `);\n\n // Update task status and save export page data key\n await pageBuilder.pageImportExportTask.updateTask(taskId, {\n status: PageImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading page export.`,\n key: pageExportUpload.Key,\n url: s3Stream.getPresignedUrl(pageExportUpload.Key)\n }\n });\n\n // Remove individual zip files from storage\n const deleteFilePromises = zipFileKeys.map(key => s3Stream.deleteObject(key));\n await Promise.all(deleteFilePromises);\n log(`Successfully deleted ${deleteFilePromises.length} zip files.`);\n } catch (e) {\n log(\"[EXPORT_PAGES_COMBINE] Error => \", e);\n\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n await pageBuilder.pageImportExportTask.updateTask(taskId, {\n status: PageImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n stack: e.stack,\n code: \"EXPORT_FAILED\"\n }\n });\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n }\n return {\n data: \"\",\n error: null\n };\n }\n );\n};\n"],"mappings":";;;;;;;AAAA;;AACA;;AACA;;AACA;;AAEA;;AAYA;AACA;AACA;eACe,MAAM;EACjB,OAAO,IAAAA,iCAAA,EACH,OAAO;IAAEC,OAAF;IAAWC;EAAX,CAAP,KAAgC;IAC5B,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAApB;IAEAA,GAAG,CAAC,sCAAD,CAAH;IACA,MAAM;MAAEE;IAAF,IAAkBH,OAAxB;IACA,MAAM;MAAEI,MAAF;MAAUC;IAAV,IAAuBN,OAA7B;IAEA,IAAAO,0BAAA,EAAaD,QAAb,EAAuBL,OAAvB;;IAEA,IAAI;MACA,MAAMO,IAAI,GAAG,MAAMJ,WAAW,CAACK,oBAAZ,CAAiCC,OAAjC,CAAyCL,MAAzC,CAAnB;;MACA,IAAI,CAACG,IAAL,EAAW;QACP,OAAO;UACHG,IAAI,EAAE,IADH;UAEHC,KAAK,EAAE;YACHC,OAAO,EAAG,6BAA4BR,MAAO;UAD1C;QAFJ,CAAP;MAMH;;MAED,MAAM;QAAES;MAAF,IAAyBN,IAAI,CAACO,KAApC,CAXA,CAaA;;MACA,MAAMC,kBAAkB,GAAG,MAAMC,kBAAA,CAASC,UAAT,CAAoBJ,kBAApB,CAAjC;;MACA,IAAI,CAACE,kBAAkB,CAACG,QAAxB,EAAkC;QAC9B,OAAO;UACHR,IAAI,EAAE,IADH;UAEHC,KAAK,EAAE;YACHC,OAAO,EACH;UAFD;QAFJ,CAAP;MAOH;;MAED,MAAMO,WAAW,GAAGJ,kBAAkB,CAACG,QAAnB,CAA4BE,MAA5B,CAChBC,IAAI,IAAIA,IAAI,CAACC,GAAL,KAAaT,kBADL,EAGfU,GAHe,CAGXF,IAAI,IAAIA,IAAI,CAACC,GAHF,EAIfF,MAJe,CAIRI,OAJQ,CAApB,CAzBA,CA+BA;;MACA,MAAMC,QAAQ,GAAG,IAAIC,gBAAJ,CAAaP,WAAb,CAAjB,CAhCA,CAkCA;;MACA,MAAMQ,gBAAgB,GAAG,MAAMF,QAAQ,CAACG,OAAT,EAA/B;MACA3B,GAAG,CAAE,wCAAuC0B,gBAAgB,CAACE,QAAS,GAAnE,CAAH,CApCA,CAsCA;;MACA,MAAM1B,WAAW,CAACK,oBAAZ,CAAiCsB,UAAjC,CAA4C1B,MAA5C,EAAoD;QACtD2B,MAAM,EAAEC,iCAAA,CAA2BC,SADmB;QAEtDvB,IAAI,EAAE;UACFE,OAAO,EAAG,+BADR;UAEFsB,GAAG,EAAEP,gBAAgB,CAACL,GAFpB;UAGFa,GAAG,EAAEnB,kBAAA,CAASoB,eAAT,CAAyBT,gBAAgB,CAACL,GAA1C;QAHH;MAFgD,CAApD,CAAN,CAvCA,CAgDA;;MACA,MAAMe,kBAAkB,GAAGlB,WAAW,CAACI,GAAZ,CAAgBW,GAAG,IAAIlB,kBAAA,CAASsB,YAAT,CAAsBJ,GAAtB,CAAvB,CAA3B;MACA,MAAMK,OAAO,CAACC,GAAR,CAAYH,kBAAZ,CAAN;MACApC,GAAG,CAAE,wBAAuBoC,kBAAkB,CAACI,MAAO,aAAnD,CAAH;IACH,CApDD,CAoDE,OAAOC,CAAP,EAAU;MACRzC,GAAG,CAAC,kCAAD,EAAqCyC,CAArC,CAAH;MAEA;AAChB;AACA;AACA;;MACgB,MAAMvC,WAAW,CAACK,oBAAZ,CAAiCsB,UAAjC,CAA4C1B,MAA5C,EAAoD;QACtD2B,MAAM,EAAEC,iCAAA,CAA2BW,MADmB;QAEtDhC,KAAK,EAAE;UACHiC,IAAI,EAAEF,CAAC,CAACE,IADL;UAEHhC,OAAO,EAAE8B,CAAC,CAAC9B,OAFR;UAGHiC,KAAK,EAAEH,CAAC,CAACG,KAHN;UAIHC,IAAI,EAAE;QAJH;MAF+C,CAApD,CAAN;MAUA,OAAO;QACHpC,IAAI,EAAE,IADH;QAEHC,KAAK,EAAE;UACHC,OAAO,EAAE8B,CAAC,CAAC9B;QADR;MAFJ,CAAP;IAMH;;IACD,OAAO;MACHF,IAAI,EAAE,EADH;MAEHC,KAAK,EAAE;IAFJ,CAAP;EAIH,CA1FE,CAAP;AA4FH,C"}
@@ -4,13 +4,21 @@ Object.defineProperty(exports, "__esModule", {
4
4
  value: true
5
5
  });
6
6
  exports.default = void 0;
7
+
7
8
  var _types = require("../../types");
9
+
8
10
  var _client = require("../../client");
11
+
9
12
  var _handlerGraphql = require("@webiny/handler-graphql");
13
+
10
14
  var _utils = require("../utils");
15
+
11
16
  var _mockSecurity = require("../../mockSecurity");
17
+
12
18
  var _utils2 = require("@webiny/utils");
19
+
13
20
  var _handlerAws = require("@webiny/handler-aws");
21
+
14
22
  /**
15
23
  * Handles the export pages process workflow.
16
24
  */
@@ -32,10 +40,11 @@ var _default = configuration => {
32
40
  taskId,
33
41
  subTaskIndex,
34
42
  identity
35
- } = payload;
36
- // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks
43
+ } = payload; // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks
37
44
  // and this Lambda is invoked internally, without credentials.
45
+
38
46
  (0, _mockSecurity.mockSecurity)(identity, context);
47
+
39
48
  try {
40
49
  /*
41
50
  * Note: We're not going to DB for finding the next sub-task to process,
@@ -46,6 +55,7 @@ var _default = configuration => {
46
55
  * Base condition!!
47
56
  * Bail out early, if task not found or task's status is not "pending".
48
57
  */
58
+
49
59
  if (!subTask || subTask.status !== _types.PageImportExportTaskStatus.PENDING) {
50
60
  noPendingTask = true;
51
61
  return {
@@ -55,6 +65,7 @@ var _default = configuration => {
55
65
  } else {
56
66
  noPendingTask = false;
57
67
  }
68
+
58
69
  log(`Fetched sub task => ${subTask.id}`);
59
70
  const {
60
71
  input
@@ -64,14 +75,15 @@ var _default = configuration => {
64
75
  exportPagesDataKey,
65
76
  revisionType
66
77
  } = input;
67
-
68
78
  /**
69
79
  * At the moment, we only export a single revision of the page.
70
80
  * It could be "published" or "latest" depending upon user input.
71
81
  *
72
82
  * Note: In case of no "published" revision available, we use the latest revision.
73
83
  */
84
+
74
85
  let page;
86
+
75
87
  try {
76
88
  if (revisionType === _types.PageExportRevisionType.PUBLISHED) {
77
89
  // Get "published" page.
@@ -90,35 +102,36 @@ var _default = configuration => {
90
102
  throw e;
91
103
  }
92
104
  }
105
+
93
106
  if (!page) {
94
107
  log(`Unable to load page "${pageId}"`);
95
108
  throw new _handlerGraphql.NotFoundError(`Unable to load page "${pageId}"`);
96
109
  }
97
- log(`Processing page key "${pageId}" | version ${page.version} | ${page.status}`);
98
110
 
99
- // Mark task status as PROCESSING
111
+ log(`Processing page key "${pageId}" | version ${page.version} | ${page.status}`); // Mark task status as PROCESSING
112
+
100
113
  subTask = await pageBuilder.pageImportExportTask.updateSubTask(taskId, subTask.id, {
101
114
  status: _types.PageImportExportTaskStatus.PROCESSING
102
- });
103
- // Update stats in main task
115
+ }); // Update stats in main task
116
+
104
117
  await pageBuilder.pageImportExportTask.updateStats(taskId, {
105
118
  prevStatus: prevStatusOfSubTask,
106
119
  nextStatus: _types.PageImportExportTaskStatus.PROCESSING
107
120
  });
108
121
  prevStatusOfSubTask = subTask.status;
109
- log(`Extracting page data and uploading to storage...`);
110
- // Extract Page
122
+ log(`Extracting page data and uploading to storage...`); // Extract Page
123
+
111
124
  const pageDataZip = await (0, _utils.exportPage)(page, exportPagesDataKey, fileManager);
112
- log(`Finish uploading zip...`);
113
- // Update task record in DB
125
+ log(`Finish uploading zip...`); // Update task record in DB
126
+
114
127
  subTask = await pageBuilder.pageImportExportTask.updateSubTask(taskId, subTask.id, {
115
128
  status: _types.PageImportExportTaskStatus.COMPLETED,
116
129
  data: {
117
130
  message: `Finish uploading data for page "${page.id}" v${page.version} (${page.status}).`,
118
131
  key: pageDataZip.Key
119
132
  }
120
- });
121
- // Update stats in main task
133
+ }); // Update stats in main task
134
+
122
135
  await pageBuilder.pageImportExportTask.updateStats(taskId, {
123
136
  prevStatus: prevStatusOfSubTask,
124
137
  nextStatus: _types.PageImportExportTaskStatus.COMPLETED
@@ -126,6 +139,7 @@ var _default = configuration => {
126
139
  prevStatusOfSubTask = subTask.status;
127
140
  } catch (e) {
128
141
  log("[EXPORT_PAGES_PROCESS] Error => ", e);
142
+
129
143
  if (subTask && subTask.id) {
130
144
  /**
131
145
  * In case of error, we'll update the task status to "failed",
@@ -139,15 +153,15 @@ var _default = configuration => {
139
153
  stack: e.stack,
140
154
  code: "EXPORT_FAILED"
141
155
  }
142
- });
156
+ }); // Update stats in main task
143
157
 
144
- // Update stats in main task
145
158
  await pageBuilder.pageImportExportTask.updateStats(taskId, {
146
159
  prevStatus: prevStatusOfSubTask,
147
160
  nextStatus: _types.PageImportExportTaskStatus.FAILED
148
161
  });
149
162
  prevStatusOfSubTask = subTask.status;
150
163
  }
164
+
151
165
  return {
152
166
  data: null,
153
167
  error: {
@@ -157,8 +171,8 @@ var _default = configuration => {
157
171
  } finally {
158
172
  // Base condition!
159
173
  if (noPendingTask) {
160
- log(`No pending sub-task for task ${taskId}`);
161
- // Combine individual page zip files.
174
+ log(`No pending sub-task for task ${taskId}`); // Combine individual page zip files.
175
+
162
176
  await (0, _client.invokeHandlerClient)({
163
177
  context,
164
178
  name: configuration.handlers.combine,
@@ -169,8 +183,8 @@ var _default = configuration => {
169
183
  description: "Export pages - combine"
170
184
  });
171
185
  } else {
172
- console.log(`Invoking PROCESS for task "${subTaskIndex + 1}"`);
173
- // We want to continue with Self invocation no matter if current page error out.
186
+ console.log(`Invoking PROCESS for task "${subTaskIndex + 1}"`); // We want to continue with Self invocation no matter if current page error out.
187
+
174
188
  await (0, _client.invokeHandlerClient)({
175
189
  context,
176
190
  name: configuration.handlers.process,
@@ -183,10 +197,12 @@ var _default = configuration => {
183
197
  });
184
198
  }
185
199
  }
200
+
186
201
  return {
187
202
  data: "",
188
203
  error: null
189
204
  };
190
205
  });
191
206
  };
207
+
192
208
  exports.default = _default;
@@ -1 +1 @@
1
- {"version":3,"names":["configuration","createRawEventHandler","payload","context","log","console","subTask","noPendingTask","prevStatusOfSubTask","PageImportExportTaskStatus","PENDING","pageBuilder","fileManager","taskId","subTaskIndex","identity","mockSecurity","pageImportExportTask","getSubTask","zeroPad","status","data","error","id","input","pageId","exportPagesDataKey","revisionType","page","PageExportRevisionType","PUBLISHED","getPublishedPageById","getPage","e","NotFoundError","version","updateSubTask","PROCESSING","updateStats","prevStatus","nextStatus","pageDataZip","exportPage","COMPLETED","message","key","Key","FAILED","name","stack","code","invokeHandlerClient","handlers","combine","security","getIdentity","description","process"],"sources":["index.ts"],"sourcesContent":["import {\n PageExportRevisionType,\n PageImportExportTaskStatus,\n PbPageImportExportContext\n} from \"~/types\";\nimport { invokeHandlerClient } from \"~/client\";\nimport { NotFoundError } from \"@webiny/handler-graphql\";\nimport { exportPage } from \"~/exportPages/utils\";\nimport { Payload as ExtractPayload } from \"../combine\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { SecurityIdentity } from \"@webiny/api-security/types\";\nimport { zeroPad } from \"@webiny/utils\";\nimport { createRawEventHandler } from \"@webiny/handler-aws\";\n\ninterface Configuration {\n handlers: {\n process: string;\n combine: string;\n };\n}\n\nexport interface Payload {\n taskId: string;\n subTaskIndex: number;\n identity?: SecurityIdentity;\n}\n\nexport interface Response {\n data: string | null;\n error: Partial<Error> | null;\n}\n\n/**\n * Handles the export pages process workflow.\n */\nexport default (configuration: Configuration) => {\n return createRawEventHandler<Payload, PbPageImportExportContext, Response>(\n async ({ payload, context }) => {\n const log = console.log;\n let subTask;\n let noPendingTask = true;\n let prevStatusOfSubTask = PageImportExportTaskStatus.PENDING;\n\n log(\"RUNNING Export Pages Process Handler\");\n const { pageBuilder, fileManager } = context;\n const { taskId, subTaskIndex, identity } = payload;\n // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks\n // and this Lambda is invoked internally, without credentials.\n mockSecurity(identity as SecurityIdentity, context);\n\n try {\n /*\n * Note: We're not going to DB for finding the next sub-task to process,\n * because the data might be out of sync due to GSI eventual consistency.\n */\n subTask = await pageBuilder.pageImportExportTask.getSubTask(\n taskId,\n zeroPad(subTaskIndex, 5)\n );\n /**\n * Base condition!!\n * Bail out early, if task not found or task's status is not \"pending\".\n */\n if (!subTask || subTask.status !== PageImportExportTaskStatus.PENDING) {\n noPendingTask = true;\n return {\n data: \"\",\n error: null\n };\n } else {\n noPendingTask = false;\n }\n\n log(`Fetched sub task => ${subTask.id}`);\n\n const { input } = subTask;\n const { pageId, exportPagesDataKey, revisionType } = input;\n\n /**\n * At the moment, we only export a single revision of the page.\n * It could be \"published\" or \"latest\" depending upon user input.\n *\n * Note: In case of no \"published\" revision available, we use the latest revision.\n */\n let page;\n try {\n if (revisionType === PageExportRevisionType.PUBLISHED) {\n // Get \"published\" page.\n page = await pageBuilder.getPublishedPageById({ id: pageId });\n } else {\n // Get \"latest\" page.\n page = await pageBuilder.getPage(pageId);\n }\n } catch (e) {\n // If we're looking for \"published\" page and doesn't found it, get latest page.\n if (\n revisionType === PageExportRevisionType.PUBLISHED &&\n e instanceof NotFoundError\n ) {\n page = await pageBuilder.getPage(pageId);\n } else {\n throw e;\n }\n }\n\n if (!page) {\n log(`Unable to load page \"${pageId}\"`);\n throw new NotFoundError(`Unable to load page \"${pageId}\"`);\n }\n\n log(`Processing page key \"${pageId}\" | version ${page.version} | ${page.status}`);\n\n // Mark task status as PROCESSING\n subTask = await pageBuilder.pageImportExportTask.updateSubTask(taskId, subTask.id, {\n status: PageImportExportTaskStatus.PROCESSING\n });\n // Update stats in main task\n await pageBuilder.pageImportExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: PageImportExportTaskStatus.PROCESSING\n });\n prevStatusOfSubTask = subTask.status;\n\n log(`Extracting page data and uploading to storage...`);\n // Extract Page\n const pageDataZip = await exportPage(page, exportPagesDataKey, fileManager);\n log(`Finish uploading zip...`);\n // Update task record in DB\n subTask = await pageBuilder.pageImportExportTask.updateSubTask(taskId, subTask.id, {\n status: PageImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading data for page \"${page.id}\" v${page.version} (${page.status}).`,\n key: pageDataZip.Key\n }\n });\n // Update stats in main task\n await pageBuilder.pageImportExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: PageImportExportTaskStatus.COMPLETED\n });\n prevStatusOfSubTask = subTask.status;\n } catch (e) {\n log(\"[EXPORT_PAGES_PROCESS] Error => \", e);\n\n if (subTask && subTask.id) {\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n subTask = await pageBuilder.pageImportExportTask.updateSubTask(\n taskId,\n subTask.id,\n {\n status: PageImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n stack: e.stack,\n code: \"EXPORT_FAILED\"\n }\n }\n );\n\n // Update stats in main task\n await pageBuilder.pageImportExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: PageImportExportTaskStatus.FAILED\n });\n prevStatusOfSubTask = subTask.status;\n }\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n } finally {\n // Base condition!\n if (noPendingTask) {\n log(`No pending sub-task for task ${taskId}`);\n // Combine individual page zip files.\n await invokeHandlerClient<ExtractPayload>({\n context,\n name: configuration.handlers.combine,\n payload: {\n taskId,\n identity: context.security.getIdentity()\n },\n description: \"Export pages - combine\"\n });\n } else {\n console.log(`Invoking PROCESS for task \"${subTaskIndex + 1}\"`);\n // We want to continue with Self invocation no matter if current page error out.\n await invokeHandlerClient<Payload>({\n context,\n name: configuration.handlers.process,\n payload: {\n taskId,\n subTaskIndex: subTaskIndex + 1,\n identity: context.security.getIdentity()\n },\n description: \"Export pages - process - subtask\"\n });\n }\n }\n return {\n data: \"\",\n error: null\n };\n }\n );\n};\n"],"mappings":";;;;;;AAAA;AAKA;AACA;AACA;AAEA;AAEA;AACA;AAoBA;AACA;AACA;AAFA,eAGgBA,aAA4B,IAAK;EAC7C,OAAO,IAAAC,iCAAqB,EACxB,OAAO;IAAEC,OAAO;IAAEC;EAAQ,CAAC,KAAK;IAC5B,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;IACvB,IAAIE,OAAO;IACX,IAAIC,aAAa,GAAG,IAAI;IACxB,IAAIC,mBAAmB,GAAGC,iCAA0B,CAACC,OAAO;IAE5DN,GAAG,CAAC,sCAAsC,CAAC;IAC3C,MAAM;MAAEO,WAAW;MAAEC;IAAY,CAAC,GAAGT,OAAO;IAC5C,MAAM;MAAEU,MAAM;MAAEC,YAAY;MAAEC;IAAS,CAAC,GAAGb,OAAO;IAClD;IACA;IACA,IAAAc,0BAAY,EAACD,QAAQ,EAAsBZ,OAAO,CAAC;IAEnD,IAAI;MACA;AAChB;AACA;AACA;MACgBG,OAAO,GAAG,MAAMK,WAAW,CAACM,oBAAoB,CAACC,UAAU,CACvDL,MAAM,EACN,IAAAM,eAAO,EAACL,YAAY,EAAE,CAAC,CAAC,CAC3B;MACD;AAChB;AACA;AACA;MACgB,IAAI,CAACR,OAAO,IAAIA,OAAO,CAACc,MAAM,KAAKX,iCAA0B,CAACC,OAAO,EAAE;QACnEH,aAAa,GAAG,IAAI;QACpB,OAAO;UACHc,IAAI,EAAE,EAAE;UACRC,KAAK,EAAE;QACX,CAAC;MACL,CAAC,MAAM;QACHf,aAAa,GAAG,KAAK;MACzB;MAEAH,GAAG,CAAE,uBAAsBE,OAAO,CAACiB,EAAG,EAAC,CAAC;MAExC,MAAM;QAAEC;MAAM,CAAC,GAAGlB,OAAO;MACzB,MAAM;QAAEmB,MAAM;QAAEC,kBAAkB;QAAEC;MAAa,CAAC,GAAGH,KAAK;;MAE1D;AAChB;AACA;AACA;AACA;AACA;MACgB,IAAII,IAAI;MACR,IAAI;QACA,IAAID,YAAY,KAAKE,6BAAsB,CAACC,SAAS,EAAE;UACnD;UACAF,IAAI,GAAG,MAAMjB,WAAW,CAACoB,oBAAoB,CAAC;YAAER,EAAE,EAAEE;UAAO,CAAC,CAAC;QACjE,CAAC,MAAM;UACH;UACAG,IAAI,GAAG,MAAMjB,WAAW,CAACqB,OAAO,CAACP,MAAM,CAAC;QAC5C;MACJ,CAAC,CAAC,OAAOQ,CAAC,EAAE;QACR;QACA,IACIN,YAAY,KAAKE,6BAAsB,CAACC,SAAS,IACjDG,CAAC,YAAYC,6BAAa,EAC5B;UACEN,IAAI,GAAG,MAAMjB,WAAW,CAACqB,OAAO,CAACP,MAAM,CAAC;QAC5C,CAAC,MAAM;UACH,MAAMQ,CAAC;QACX;MACJ;MAEA,IAAI,CAACL,IAAI,EAAE;QACPxB,GAAG,CAAE,wBAAuBqB,MAAO,GAAE,CAAC;QACtC,MAAM,IAAIS,6BAAa,CAAE,wBAAuBT,MAAO,GAAE,CAAC;MAC9D;MAEArB,GAAG,CAAE,wBAAuBqB,MAAO,eAAcG,IAAI,CAACO,OAAQ,MAAKP,IAAI,CAACR,MAAO,EAAC,CAAC;;MAEjF;MACAd,OAAO,GAAG,MAAMK,WAAW,CAACM,oBAAoB,CAACmB,aAAa,CAACvB,MAAM,EAAEP,OAAO,CAACiB,EAAE,EAAE;QAC/EH,MAAM,EAAEX,iCAA0B,CAAC4B;MACvC,CAAC,CAAC;MACF;MACA,MAAM1B,WAAW,CAACM,oBAAoB,CAACqB,WAAW,CAACzB,MAAM,EAAE;QACvD0B,UAAU,EAAE/B,mBAAmB;QAC/BgC,UAAU,EAAE/B,iCAA0B,CAAC4B;MAC3C,CAAC,CAAC;MACF7B,mBAAmB,GAAGF,OAAO,CAACc,MAAM;MAEpChB,GAAG,CAAE,kDAAiD,CAAC;MACvD;MACA,MAAMqC,WAAW,GAAG,MAAM,IAAAC,iBAAU,EAACd,IAAI,EAAEF,kBAAkB,EAAEd,WAAW,CAAC;MAC3ER,GAAG,CAAE,yBAAwB,CAAC;MAC9B;MACAE,OAAO,GAAG,MAAMK,WAAW,CAACM,oBAAoB,CAACmB,aAAa,CAACvB,MAAM,EAAEP,OAAO,CAACiB,EAAE,EAAE;QAC/EH,MAAM,EAAEX,iCAA0B,CAACkC,SAAS;QAC5CtB,IAAI,EAAE;UACFuB,OAAO,EAAG,mCAAkChB,IAAI,CAACL,EAAG,MAAKK,IAAI,CAACO,OAAQ,KAAIP,IAAI,CAACR,MAAO,IAAG;UACzFyB,GAAG,EAAEJ,WAAW,CAACK;QACrB;MACJ,CAAC,CAAC;MACF;MACA,MAAMnC,WAAW,CAACM,oBAAoB,CAACqB,WAAW,CAACzB,MAAM,EAAE;QACvD0B,UAAU,EAAE/B,mBAAmB;QAC/BgC,UAAU,EAAE/B,iCAA0B,CAACkC;MAC3C,CAAC,CAAC;MACFnC,mBAAmB,GAAGF,OAAO,CAACc,MAAM;IACxC,CAAC,CAAC,OAAOa,CAAC,EAAE;MACR7B,GAAG,CAAC,kCAAkC,EAAE6B,CAAC,CAAC;MAE1C,IAAI3B,OAAO,IAAIA,OAAO,CAACiB,EAAE,EAAE;QACvB;AACpB;AACA;AACA;QACoBjB,OAAO,GAAG,MAAMK,WAAW,CAACM,oBAAoB,CAACmB,aAAa,CAC1DvB,MAAM,EACNP,OAAO,CAACiB,EAAE,EACV;UACIH,MAAM,EAAEX,iCAA0B,CAACsC,MAAM;UACzCzB,KAAK,EAAE;YACH0B,IAAI,EAAEf,CAAC,CAACe,IAAI;YACZJ,OAAO,EAAEX,CAAC,CAACW,OAAO;YAClBK,KAAK,EAAEhB,CAAC,CAACgB,KAAK;YACdC,IAAI,EAAE;UACV;QACJ,CAAC,CACJ;;QAED;QACA,MAAMvC,WAAW,CAACM,oBAAoB,CAACqB,WAAW,CAACzB,MAAM,EAAE;UACvD0B,UAAU,EAAE/B,mBAAmB;UAC/BgC,UAAU,EAAE/B,iCAA0B,CAACsC;QAC3C,CAAC,CAAC;QACFvC,mBAAmB,GAAGF,OAAO,CAACc,MAAM;MACxC;MAEA,OAAO;QACHC,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHsB,OAAO,EAAEX,CAAC,CAACW;QACf;MACJ,CAAC;IACL,CAAC,SAAS;MACN;MACA,IAAIrC,aAAa,EAAE;QACfH,GAAG,CAAE,gCAA+BS,MAAO,EAAC,CAAC;QAC7C;QACA,MAAM,IAAAsC,2BAAmB,EAAiB;UACtChD,OAAO;UACP6C,IAAI,EAAEhD,aAAa,CAACoD,QAAQ,CAACC,OAAO;UACpCnD,OAAO,EAAE;YACLW,MAAM;YACNE,QAAQ,EAAEZ,OAAO,CAACmD,QAAQ,CAACC,WAAW;UAC1C,CAAC;UACDC,WAAW,EAAE;QACjB,CAAC,CAAC;MACN,CAAC,MAAM;QACHnD,OAAO,CAACD,GAAG,CAAE,8BAA6BU,YAAY,GAAG,CAAE,GAAE,CAAC;QAC9D;QACA,MAAM,IAAAqC,2BAAmB,EAAU;UAC/BhD,OAAO;UACP6C,IAAI,EAAEhD,aAAa,CAACoD,QAAQ,CAACK,OAAO;UACpCvD,OAAO,EAAE;YACLW,MAAM;YACNC,YAAY,EAAEA,YAAY,GAAG,CAAC;YAC9BC,QAAQ,EAAEZ,OAAO,CAACmD,QAAQ,CAACC,WAAW;UAC1C,CAAC;UACDC,WAAW,EAAE;QACjB,CAAC,CAAC;MACN;IACJ;IACA,OAAO;MACHnC,IAAI,EAAE,EAAE;MACRC,KAAK,EAAE;IACX,CAAC;EACL,CAAC,CACJ;AACL,CAAC;AAAA"}
1
+ {"version":3,"names":["configuration","createRawEventHandler","payload","context","log","console","subTask","noPendingTask","prevStatusOfSubTask","PageImportExportTaskStatus","PENDING","pageBuilder","fileManager","taskId","subTaskIndex","identity","mockSecurity","pageImportExportTask","getSubTask","zeroPad","status","data","error","id","input","pageId","exportPagesDataKey","revisionType","page","PageExportRevisionType","PUBLISHED","getPublishedPageById","getPage","e","NotFoundError","version","updateSubTask","PROCESSING","updateStats","prevStatus","nextStatus","pageDataZip","exportPage","COMPLETED","message","key","Key","FAILED","name","stack","code","invokeHandlerClient","handlers","combine","security","getIdentity","description","process"],"sources":["index.ts"],"sourcesContent":["import {\n PageExportRevisionType,\n PageImportExportTaskStatus,\n PbPageImportExportContext\n} from \"~/types\";\nimport { invokeHandlerClient } from \"~/client\";\nimport { NotFoundError } from \"@webiny/handler-graphql\";\nimport { exportPage } from \"~/exportPages/utils\";\nimport { Payload as ExtractPayload } from \"../combine\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { SecurityIdentity } from \"@webiny/api-security/types\";\nimport { zeroPad } from \"@webiny/utils\";\nimport { createRawEventHandler } from \"@webiny/handler-aws\";\n\ninterface Configuration {\n handlers: {\n process: string;\n combine: string;\n };\n}\n\nexport interface Payload {\n taskId: string;\n subTaskIndex: number;\n identity?: SecurityIdentity;\n}\n\nexport interface Response {\n data: string | null;\n error: Partial<Error> | null;\n}\n\n/**\n * Handles the export pages process workflow.\n */\nexport default (configuration: Configuration) => {\n return createRawEventHandler<Payload, PbPageImportExportContext, Response>(\n async ({ payload, context }) => {\n const log = console.log;\n let subTask;\n let noPendingTask = true;\n let prevStatusOfSubTask = PageImportExportTaskStatus.PENDING;\n\n log(\"RUNNING Export Pages Process Handler\");\n const { pageBuilder, fileManager } = context;\n const { taskId, subTaskIndex, identity } = payload;\n // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks\n // and this Lambda is invoked internally, without credentials.\n mockSecurity(identity as SecurityIdentity, context);\n\n try {\n /*\n * Note: We're not going to DB for finding the next sub-task to process,\n * because the data might be out of sync due to GSI eventual consistency.\n */\n subTask = await pageBuilder.pageImportExportTask.getSubTask(\n taskId,\n zeroPad(subTaskIndex, 5)\n );\n /**\n * Base condition!!\n * Bail out early, if task not found or task's status is not \"pending\".\n */\n if (!subTask || subTask.status !== PageImportExportTaskStatus.PENDING) {\n noPendingTask = true;\n return {\n data: \"\",\n error: null\n };\n } else {\n noPendingTask = false;\n }\n\n log(`Fetched sub task => ${subTask.id}`);\n\n const { input } = subTask;\n const { pageId, exportPagesDataKey, revisionType } = input;\n\n /**\n * At the moment, we only export a single revision of the page.\n * It could be \"published\" or \"latest\" depending upon user input.\n *\n * Note: In case of no \"published\" revision available, we use the latest revision.\n */\n let page;\n try {\n if (revisionType === PageExportRevisionType.PUBLISHED) {\n // Get \"published\" page.\n page = await pageBuilder.getPublishedPageById({ id: pageId });\n } else {\n // Get \"latest\" page.\n page = await pageBuilder.getPage(pageId);\n }\n } catch (e) {\n // If we're looking for \"published\" page and doesn't found it, get latest page.\n if (\n revisionType === PageExportRevisionType.PUBLISHED &&\n e instanceof NotFoundError\n ) {\n page = await pageBuilder.getPage(pageId);\n } else {\n throw e;\n }\n }\n\n if (!page) {\n log(`Unable to load page \"${pageId}\"`);\n throw new NotFoundError(`Unable to load page \"${pageId}\"`);\n }\n\n log(`Processing page key \"${pageId}\" | version ${page.version} | ${page.status}`);\n\n // Mark task status as PROCESSING\n subTask = await pageBuilder.pageImportExportTask.updateSubTask(taskId, subTask.id, {\n status: PageImportExportTaskStatus.PROCESSING\n });\n // Update stats in main task\n await pageBuilder.pageImportExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: PageImportExportTaskStatus.PROCESSING\n });\n prevStatusOfSubTask = subTask.status;\n\n log(`Extracting page data and uploading to storage...`);\n // Extract Page\n const pageDataZip = await exportPage(page, exportPagesDataKey, fileManager);\n log(`Finish uploading zip...`);\n // Update task record in DB\n subTask = await pageBuilder.pageImportExportTask.updateSubTask(taskId, subTask.id, {\n status: PageImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading data for page \"${page.id}\" v${page.version} (${page.status}).`,\n key: pageDataZip.Key\n }\n });\n // Update stats in main task\n await pageBuilder.pageImportExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: PageImportExportTaskStatus.COMPLETED\n });\n prevStatusOfSubTask = subTask.status;\n } catch (e) {\n log(\"[EXPORT_PAGES_PROCESS] Error => \", e);\n\n if (subTask && subTask.id) {\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n subTask = await pageBuilder.pageImportExportTask.updateSubTask(\n taskId,\n subTask.id,\n {\n status: PageImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n stack: e.stack,\n code: \"EXPORT_FAILED\"\n }\n }\n );\n\n // Update stats in main task\n await pageBuilder.pageImportExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: PageImportExportTaskStatus.FAILED\n });\n prevStatusOfSubTask = subTask.status;\n }\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n } finally {\n // Base condition!\n if (noPendingTask) {\n log(`No pending sub-task for task ${taskId}`);\n // Combine individual page zip files.\n await invokeHandlerClient<ExtractPayload>({\n context,\n name: configuration.handlers.combine,\n payload: {\n taskId,\n identity: context.security.getIdentity()\n },\n description: \"Export pages - combine\"\n });\n } else {\n console.log(`Invoking PROCESS for task \"${subTaskIndex + 1}\"`);\n // We want to continue with Self invocation no matter if current page error out.\n await invokeHandlerClient<Payload>({\n context,\n name: configuration.handlers.process,\n payload: {\n taskId,\n subTaskIndex: subTaskIndex + 1,\n identity: context.security.getIdentity()\n },\n description: \"Export pages - process - subtask\"\n });\n }\n }\n return {\n data: \"\",\n error: null\n };\n }\n );\n};\n"],"mappings":";;;;;;;AAAA;;AAKA;;AACA;;AACA;;AAEA;;AAEA;;AACA;;AAoBA;AACA;AACA;eACgBA,aAAD,IAAkC;EAC7C,OAAO,IAAAC,iCAAA,EACH,OAAO;IAAEC,OAAF;IAAWC;EAAX,CAAP,KAAgC;IAC5B,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAApB;IACA,IAAIE,OAAJ;IACA,IAAIC,aAAa,GAAG,IAApB;IACA,IAAIC,mBAAmB,GAAGC,iCAAA,CAA2BC,OAArD;IAEAN,GAAG,CAAC,sCAAD,CAAH;IACA,MAAM;MAAEO,WAAF;MAAeC;IAAf,IAA+BT,OAArC;IACA,MAAM;MAAEU,MAAF;MAAUC,YAAV;MAAwBC;IAAxB,IAAqCb,OAA3C,CAR4B,CAS5B;IACA;;IACA,IAAAc,0BAAA,EAAaD,QAAb,EAA2CZ,OAA3C;;IAEA,IAAI;MACA;AAChB;AACA;AACA;MACgBG,OAAO,GAAG,MAAMK,WAAW,CAACM,oBAAZ,CAAiCC,UAAjC,CACZL,MADY,EAEZ,IAAAM,eAAA,EAAQL,YAAR,EAAsB,CAAtB,CAFY,CAAhB;MAIA;AAChB;AACA;AACA;;MACgB,IAAI,CAACR,OAAD,IAAYA,OAAO,CAACc,MAAR,KAAmBX,iCAAA,CAA2BC,OAA9D,EAAuE;QACnEH,aAAa,GAAG,IAAhB;QACA,OAAO;UACHc,IAAI,EAAE,EADH;UAEHC,KAAK,EAAE;QAFJ,CAAP;MAIH,CAND,MAMO;QACHf,aAAa,GAAG,KAAhB;MACH;;MAEDH,GAAG,CAAE,uBAAsBE,OAAO,CAACiB,EAAG,EAAnC,CAAH;MAEA,MAAM;QAAEC;MAAF,IAAYlB,OAAlB;MACA,MAAM;QAAEmB,MAAF;QAAUC,kBAAV;QAA8BC;MAA9B,IAA+CH,KAArD;MAEA;AAChB;AACA;AACA;AACA;AACA;;MACgB,IAAII,IAAJ;;MACA,IAAI;QACA,IAAID,YAAY,KAAKE,6BAAA,CAAuBC,SAA5C,EAAuD;UACnD;UACAF,IAAI,GAAG,MAAMjB,WAAW,CAACoB,oBAAZ,CAAiC;YAAER,EAAE,EAAEE;UAAN,CAAjC,CAAb;QACH,CAHD,MAGO;UACH;UACAG,IAAI,GAAG,MAAMjB,WAAW,CAACqB,OAAZ,CAAoBP,MAApB,CAAb;QACH;MACJ,CARD,CAQE,OAAOQ,CAAP,EAAU;QACR;QACA,IACIN,YAAY,KAAKE,6BAAA,CAAuBC,SAAxC,IACAG,CAAC,YAAYC,6BAFjB,EAGE;UACEN,IAAI,GAAG,MAAMjB,WAAW,CAACqB,OAAZ,CAAoBP,MAApB,CAAb;QACH,CALD,MAKO;UACH,MAAMQ,CAAN;QACH;MACJ;;MAED,IAAI,CAACL,IAAL,EAAW;QACPxB,GAAG,CAAE,wBAAuBqB,MAAO,GAAhC,CAAH;QACA,MAAM,IAAIS,6BAAJ,CAAmB,wBAAuBT,MAAO,GAAjD,CAAN;MACH;;MAEDrB,GAAG,CAAE,wBAAuBqB,MAAO,eAAcG,IAAI,CAACO,OAAQ,MAAKP,IAAI,CAACR,MAAO,EAA5E,CAAH,CA5DA,CA8DA;;MACAd,OAAO,GAAG,MAAMK,WAAW,CAACM,oBAAZ,CAAiCmB,aAAjC,CAA+CvB,MAA/C,EAAuDP,OAAO,CAACiB,EAA/D,EAAmE;QAC/EH,MAAM,EAAEX,iCAAA,CAA2B4B;MAD4C,CAAnE,CAAhB,CA/DA,CAkEA;;MACA,MAAM1B,WAAW,CAACM,oBAAZ,CAAiCqB,WAAjC,CAA6CzB,MAA7C,EAAqD;QACvD0B,UAAU,EAAE/B,mBAD2C;QAEvDgC,UAAU,EAAE/B,iCAAA,CAA2B4B;MAFgB,CAArD,CAAN;MAIA7B,mBAAmB,GAAGF,OAAO,CAACc,MAA9B;MAEAhB,GAAG,CAAE,kDAAF,CAAH,CAzEA,CA0EA;;MACA,MAAMqC,WAAW,GAAG,MAAM,IAAAC,iBAAA,EAAWd,IAAX,EAAiBF,kBAAjB,EAAqCd,WAArC,CAA1B;MACAR,GAAG,CAAE,yBAAF,CAAH,CA5EA,CA6EA;;MACAE,OAAO,GAAG,MAAMK,WAAW,CAACM,oBAAZ,CAAiCmB,aAAjC,CAA+CvB,MAA/C,EAAuDP,OAAO,CAACiB,EAA/D,EAAmE;QAC/EH,MAAM,EAAEX,iCAAA,CAA2BkC,SAD4C;QAE/EtB,IAAI,EAAE;UACFuB,OAAO,EAAG,mCAAkChB,IAAI,CAACL,EAAG,MAAKK,IAAI,CAACO,OAAQ,KAAIP,IAAI,CAACR,MAAO,IADpF;UAEFyB,GAAG,EAAEJ,WAAW,CAACK;QAFf;MAFyE,CAAnE,CAAhB,CA9EA,CAqFA;;MACA,MAAMnC,WAAW,CAACM,oBAAZ,CAAiCqB,WAAjC,CAA6CzB,MAA7C,EAAqD;QACvD0B,UAAU,EAAE/B,mBAD2C;QAEvDgC,UAAU,EAAE/B,iCAAA,CAA2BkC;MAFgB,CAArD,CAAN;MAIAnC,mBAAmB,GAAGF,OAAO,CAACc,MAA9B;IACH,CA3FD,CA2FE,OAAOa,CAAP,EAAU;MACR7B,GAAG,CAAC,kCAAD,EAAqC6B,CAArC,CAAH;;MAEA,IAAI3B,OAAO,IAAIA,OAAO,CAACiB,EAAvB,EAA2B;QACvB;AACpB;AACA;AACA;QACoBjB,OAAO,GAAG,MAAMK,WAAW,CAACM,oBAAZ,CAAiCmB,aAAjC,CACZvB,MADY,EAEZP,OAAO,CAACiB,EAFI,EAGZ;UACIH,MAAM,EAAEX,iCAAA,CAA2BsC,MADvC;UAEIzB,KAAK,EAAE;YACH0B,IAAI,EAAEf,CAAC,CAACe,IADL;YAEHJ,OAAO,EAAEX,CAAC,CAACW,OAFR;YAGHK,KAAK,EAAEhB,CAAC,CAACgB,KAHN;YAIHC,IAAI,EAAE;UAJH;QAFX,CAHY,CAAhB,CALuB,CAmBvB;;QACA,MAAMvC,WAAW,CAACM,oBAAZ,CAAiCqB,WAAjC,CAA6CzB,MAA7C,EAAqD;UACvD0B,UAAU,EAAE/B,mBAD2C;UAEvDgC,UAAU,EAAE/B,iCAAA,CAA2BsC;QAFgB,CAArD,CAAN;QAIAvC,mBAAmB,GAAGF,OAAO,CAACc,MAA9B;MACH;;MAED,OAAO;QACHC,IAAI,EAAE,IADH;QAEHC,KAAK,EAAE;UACHsB,OAAO,EAAEX,CAAC,CAACW;QADR;MAFJ,CAAP;IAMH,CA/HD,SA+HU;MACN;MACA,IAAIrC,aAAJ,EAAmB;QACfH,GAAG,CAAE,gCAA+BS,MAAO,EAAxC,CAAH,CADe,CAEf;;QACA,MAAM,IAAAsC,2BAAA,EAAoC;UACtChD,OADsC;UAEtC6C,IAAI,EAAEhD,aAAa,CAACoD,QAAd,CAAuBC,OAFS;UAGtCnD,OAAO,EAAE;YACLW,MADK;YAELE,QAAQ,EAAEZ,OAAO,CAACmD,QAAR,CAAiBC,WAAjB;UAFL,CAH6B;UAOtCC,WAAW,EAAE;QAPyB,CAApC,CAAN;MASH,CAZD,MAYO;QACHnD,OAAO,CAACD,GAAR,CAAa,8BAA6BU,YAAY,GAAG,CAAE,GAA3D,EADG,CAEH;;QACA,MAAM,IAAAqC,2BAAA,EAA6B;UAC/BhD,OAD+B;UAE/B6C,IAAI,EAAEhD,aAAa,CAACoD,QAAd,CAAuBK,OAFE;UAG/BvD,OAAO,EAAE;YACLW,MADK;YAELC,YAAY,EAAEA,YAAY,GAAG,CAFxB;YAGLC,QAAQ,EAAEZ,OAAO,CAACmD,QAAR,CAAiBC,WAAjB;UAHL,CAHsB;UAQ/BC,WAAW,EAAE;QARkB,CAA7B,CAAN;MAUH;IACJ;;IACD,OAAO;MACHnC,IAAI,EAAE,EADH;MAEHC,KAAK,EAAE;IAFJ,CAAP;EAIH,CA9KE,CAAP;AAgLH,C"}
@@ -1,15 +1,22 @@
1
1
  "use strict";
2
2
 
3
3
  var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
4
+
4
5
  Object.defineProperty(exports, "__esModule", {
5
6
  value: true
6
7
  });
7
8
  exports.s3Stream = void 0;
9
+
8
10
  var _objectSpread2 = _interopRequireDefault(require("@babel/runtime/helpers/objectSpread2"));
11
+
9
12
  var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
13
+
10
14
  var _stream = require("stream");
15
+
11
16
  var _s = _interopRequireDefault(require("aws-sdk/clients/s3"));
17
+
12
18
  const ARCHIVE_CONTENT_TYPE = "application/zip";
19
+
13
20
  class S3Stream {
14
21
  constructor() {
15
22
  (0, _defineProperty2.default)(this, "s3", void 0);
@@ -19,19 +26,22 @@ class S3Stream {
19
26
  });
20
27
  this.bucket = process.env.S3_BUCKET;
21
28
  }
29
+
22
30
  getPresignedUrl(key) {
23
31
  return this.s3.getSignedUrl("getObject", {
24
32
  Bucket: this.bucket,
25
33
  Key: key,
26
34
  Expires: 604800 // 1 week
35
+
27
36
  });
28
37
  }
29
-
30
38
  /**
31
39
  * We're checking if the file is accessible on S3 by getting object meta data.
32
40
  * It help us to filter files that we need to download as part of export data.
33
41
  * @param Key {string}
34
42
  */
43
+
44
+
35
45
  async isFileAccessible(Key) {
36
46
  try {
37
47
  await this.getObjectHead(Key);
@@ -42,18 +52,21 @@ class S3Stream {
42
52
  return false;
43
53
  }
44
54
  }
55
+
45
56
  getObjectHead(Key) {
46
57
  return this.s3.headObject({
47
58
  Bucket: this.bucket,
48
59
  Key
49
60
  }).promise();
50
61
  }
62
+
51
63
  readStream(Key) {
52
64
  return this.s3.getObject({
53
65
  Bucket: this.bucket,
54
66
  Key
55
67
  }).createReadStream();
56
68
  }
69
+
57
70
  writeStream(Key, contentType = ARCHIVE_CONTENT_TYPE) {
58
71
  const streamPassThrough = new _stream.Stream.PassThrough();
59
72
  const params = {
@@ -65,30 +78,36 @@ class S3Stream {
65
78
  };
66
79
  return {
67
80
  streamPassThrough: streamPassThrough,
81
+
68
82
  /**
69
83
  * We're not using the `FileManager` storage plugin here because it currently doesn't support streams.
70
84
  */
71
85
  streamPassThroughUploadPromise: this.s3.upload(params).promise()
72
86
  };
73
87
  }
88
+
74
89
  upload(params) {
75
90
  return this.s3.upload((0, _objectSpread2.default)({
76
91
  ACL: "private",
77
92
  Bucket: this.bucket
78
93
  }, params)).promise();
79
94
  }
95
+
80
96
  listObject(prefix) {
81
97
  return this.s3.listObjects({
82
98
  Bucket: this.bucket,
83
99
  Prefix: prefix
84
100
  }).promise();
85
101
  }
102
+
86
103
  deleteObject(key) {
87
104
  return this.s3.deleteObject({
88
105
  Key: key,
89
106
  Bucket: this.bucket
90
107
  }).promise();
91
108
  }
109
+
92
110
  }
111
+
93
112
  const s3Stream = new S3Stream();
94
113
  exports.s3Stream = s3Stream;
@@ -1 +1 @@
1
- {"version":3,"names":["ARCHIVE_CONTENT_TYPE","S3Stream","constructor","s3","S3","region","process","env","AWS_REGION","bucket","S3_BUCKET","getPresignedUrl","key","getSignedUrl","Bucket","Key","Expires","isFileAccessible","getObjectHead","error","console","warn","log","headObject","promise","readStream","getObject","createReadStream","writeStream","contentType","streamPassThrough","Stream","PassThrough","params","ACL","Body","ContentType","streamPassThroughUploadPromise","upload","listObject","prefix","listObjects","Prefix","deleteObject","s3Stream"],"sources":["s3Stream.ts"],"sourcesContent":["import { Stream, PassThrough, Readable } from \"stream\";\nimport S3 from \"aws-sdk/clients/s3\";\n\nconst ARCHIVE_CONTENT_TYPE = \"application/zip\";\n\nclass S3Stream {\n s3: S3;\n bucket: string;\n\n constructor() {\n this.s3 = new S3({\n region: process.env.AWS_REGION as string\n });\n this.bucket = process.env.S3_BUCKET as string;\n }\n\n getPresignedUrl(key: string) {\n return this.s3.getSignedUrl(\"getObject\", {\n Bucket: this.bucket,\n Key: key,\n Expires: 604800 // 1 week\n });\n }\n\n /**\n * We're checking if the file is accessible on S3 by getting object meta data.\n * It help us to filter files that we need to download as part of export data.\n * @param Key {string}\n */\n async isFileAccessible(Key: string): Promise<boolean> {\n try {\n await this.getObjectHead(Key);\n return true;\n } catch (error) {\n console.warn(`Error while fetching meta data for file \"${Key}\"`);\n console.log(error);\n return false;\n }\n }\n\n getObjectHead(Key: string): Promise<S3.HeadObjectOutput> {\n return this.s3.headObject({ Bucket: this.bucket, Key }).promise();\n }\n\n readStream(Key: string): Readable {\n return this.s3.getObject({ Bucket: this.bucket, Key }).createReadStream();\n }\n\n writeStream(\n Key: string,\n contentType: string = ARCHIVE_CONTENT_TYPE\n ): {\n streamPassThrough: PassThrough;\n streamPassThroughUploadPromise: Promise<S3.ManagedUpload.SendData>;\n } {\n const streamPassThrough = new Stream.PassThrough();\n\n const params: S3.PutObjectRequest = {\n ACL: \"private\",\n Body: streamPassThrough,\n Bucket: this.bucket,\n ContentType: contentType,\n Key\n };\n\n return {\n streamPassThrough: streamPassThrough,\n /**\n * We're not using the `FileManager` storage plugin here because it currently doesn't support streams.\n */\n streamPassThroughUploadPromise: this.s3.upload(params).promise()\n };\n }\n\n upload(params: {\n Key: string;\n ContentType: string;\n Body: Buffer;\n }): Promise<S3.ManagedUpload.SendData> {\n return this.s3\n .upload({\n ACL: \"private\",\n Bucket: this.bucket,\n ...params\n })\n .promise();\n }\n\n listObject(prefix: string): Promise<S3.ListObjectsOutput> {\n return this.s3\n .listObjects({\n Bucket: this.bucket,\n Prefix: prefix\n })\n .promise();\n }\n\n deleteObject(key: string): Promise<S3.DeleteObjectOutput> {\n return this.s3.deleteObject({ Key: key, Bucket: this.bucket }).promise();\n }\n}\n\nexport const s3Stream = new S3Stream();\n"],"mappings":";;;;;;;;;AAAA;AACA;AAEA,MAAMA,oBAAoB,GAAG,iBAAiB;AAE9C,MAAMC,QAAQ,CAAC;EAIXC,WAAW,GAAG;IAAA;IAAA;IACV,IAAI,CAACC,EAAE,GAAG,IAAIC,UAAE,CAAC;MACbC,MAAM,EAAEC,OAAO,CAACC,GAAG,CAACC;IACxB,CAAC,CAAC;IACF,IAAI,CAACC,MAAM,GAAGH,OAAO,CAACC,GAAG,CAACG,SAAmB;EACjD;EAEAC,eAAe,CAACC,GAAW,EAAE;IACzB,OAAO,IAAI,CAACT,EAAE,CAACU,YAAY,CAAC,WAAW,EAAE;MACrCC,MAAM,EAAE,IAAI,CAACL,MAAM;MACnBM,GAAG,EAAEH,GAAG;MACRI,OAAO,EAAE,MAAM,CAAC;IACpB,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;AACA;AACA;EACI,MAAMC,gBAAgB,CAACF,GAAW,EAAoB;IAClD,IAAI;MACA,MAAM,IAAI,CAACG,aAAa,CAACH,GAAG,CAAC;MAC7B,OAAO,IAAI;IACf,CAAC,CAAC,OAAOI,KAAK,EAAE;MACZC,OAAO,CAACC,IAAI,CAAE,4CAA2CN,GAAI,GAAE,CAAC;MAChEK,OAAO,CAACE,GAAG,CAACH,KAAK,CAAC;MAClB,OAAO,KAAK;IAChB;EACJ;EAEAD,aAAa,CAACH,GAAW,EAAgC;IACrD,OAAO,IAAI,CAACZ,EAAE,CAACoB,UAAU,CAAC;MAAET,MAAM,EAAE,IAAI,CAACL,MAAM;MAAEM;IAAI,CAAC,CAAC,CAACS,OAAO,EAAE;EACrE;EAEAC,UAAU,CAACV,GAAW,EAAY;IAC9B,OAAO,IAAI,CAACZ,EAAE,CAACuB,SAAS,CAAC;MAAEZ,MAAM,EAAE,IAAI,CAACL,MAAM;MAAEM;IAAI,CAAC,CAAC,CAACY,gBAAgB,EAAE;EAC7E;EAEAC,WAAW,CACPb,GAAW,EACXc,WAAmB,GAAG7B,oBAAoB,EAI5C;IACE,MAAM8B,iBAAiB,GAAG,IAAIC,cAAM,CAACC,WAAW,EAAE;IAElD,MAAMC,MAA2B,GAAG;MAChCC,GAAG,EAAE,SAAS;MACdC,IAAI,EAAEL,iBAAiB;MACvBhB,MAAM,EAAE,IAAI,CAACL,MAAM;MACnB2B,WAAW,EAAEP,WAAW;MACxBd;IACJ,CAAC;IAED,OAAO;MACHe,iBAAiB,EAAEA,iBAAiB;MACpC;AACZ;AACA;MACYO,8BAA8B,EAAE,IAAI,CAAClC,EAAE,CAACmC,MAAM,CAACL,MAAM,CAAC,CAACT,OAAO;IAClE,CAAC;EACL;EAEAc,MAAM,CAACL,MAIN,EAAsC;IACnC,OAAO,IAAI,CAAC9B,EAAE,CACTmC,MAAM;MACHJ,GAAG,EAAE,SAAS;MACdpB,MAAM,EAAE,IAAI,CAACL;IAAM,GAChBwB,MAAM,EACX,CACDT,OAAO,EAAE;EAClB;EAEAe,UAAU,CAACC,MAAc,EAAiC;IACtD,OAAO,IAAI,CAACrC,EAAE,CACTsC,WAAW,CAAC;MACT3B,MAAM,EAAE,IAAI,CAACL,MAAM;MACnBiC,MAAM,EAAEF;IACZ,CAAC,CAAC,CACDhB,OAAO,EAAE;EAClB;EAEAmB,YAAY,CAAC/B,GAAW,EAAkC;IACtD,OAAO,IAAI,CAACT,EAAE,CAACwC,YAAY,CAAC;MAAE5B,GAAG,EAAEH,GAAG;MAAEE,MAAM,EAAE,IAAI,CAACL;IAAO,CAAC,CAAC,CAACe,OAAO,EAAE;EAC5E;AACJ;AAEO,MAAMoB,QAAQ,GAAG,IAAI3C,QAAQ,EAAE;AAAC"}
1
+ {"version":3,"names":["ARCHIVE_CONTENT_TYPE","S3Stream","constructor","s3","S3","region","process","env","AWS_REGION","bucket","S3_BUCKET","getPresignedUrl","key","getSignedUrl","Bucket","Key","Expires","isFileAccessible","getObjectHead","error","console","warn","log","headObject","promise","readStream","getObject","createReadStream","writeStream","contentType","streamPassThrough","Stream","PassThrough","params","ACL","Body","ContentType","streamPassThroughUploadPromise","upload","listObject","prefix","listObjects","Prefix","deleteObject","s3Stream"],"sources":["s3Stream.ts"],"sourcesContent":["import { Stream, PassThrough, Readable } from \"stream\";\nimport S3 from \"aws-sdk/clients/s3\";\n\nconst ARCHIVE_CONTENT_TYPE = \"application/zip\";\n\nclass S3Stream {\n s3: S3;\n bucket: string;\n\n constructor() {\n this.s3 = new S3({\n region: process.env.AWS_REGION as string\n });\n this.bucket = process.env.S3_BUCKET as string;\n }\n\n getPresignedUrl(key: string) {\n return this.s3.getSignedUrl(\"getObject\", {\n Bucket: this.bucket,\n Key: key,\n Expires: 604800 // 1 week\n });\n }\n\n /**\n * We're checking if the file is accessible on S3 by getting object meta data.\n * It help us to filter files that we need to download as part of export data.\n * @param Key {string}\n */\n async isFileAccessible(Key: string): Promise<boolean> {\n try {\n await this.getObjectHead(Key);\n return true;\n } catch (error) {\n console.warn(`Error while fetching meta data for file \"${Key}\"`);\n console.log(error);\n return false;\n }\n }\n\n getObjectHead(Key: string): Promise<S3.HeadObjectOutput> {\n return this.s3.headObject({ Bucket: this.bucket, Key }).promise();\n }\n\n readStream(Key: string): Readable {\n return this.s3.getObject({ Bucket: this.bucket, Key }).createReadStream();\n }\n\n writeStream(\n Key: string,\n contentType: string = ARCHIVE_CONTENT_TYPE\n ): {\n streamPassThrough: PassThrough;\n streamPassThroughUploadPromise: Promise<S3.ManagedUpload.SendData>;\n } {\n const streamPassThrough = new Stream.PassThrough();\n\n const params: S3.PutObjectRequest = {\n ACL: \"private\",\n Body: streamPassThrough,\n Bucket: this.bucket,\n ContentType: contentType,\n Key\n };\n\n return {\n streamPassThrough: streamPassThrough,\n /**\n * We're not using the `FileManager` storage plugin here because it currently doesn't support streams.\n */\n streamPassThroughUploadPromise: this.s3.upload(params).promise()\n };\n }\n\n upload(params: {\n Key: string;\n ContentType: string;\n Body: Buffer;\n }): Promise<S3.ManagedUpload.SendData> {\n return this.s3\n .upload({\n ACL: \"private\",\n Bucket: this.bucket,\n ...params\n })\n .promise();\n }\n\n listObject(prefix: string): Promise<S3.ListObjectsOutput> {\n return this.s3\n .listObjects({\n Bucket: this.bucket,\n Prefix: prefix\n })\n .promise();\n }\n\n deleteObject(key: string): Promise<S3.DeleteObjectOutput> {\n return this.s3.deleteObject({ Key: key, Bucket: this.bucket }).promise();\n }\n}\n\nexport const s3Stream = new S3Stream();\n"],"mappings":";;;;;;;;;;;;;AAAA;;AACA;;AAEA,MAAMA,oBAAoB,GAAG,iBAA7B;;AAEA,MAAMC,QAAN,CAAe;EAIXC,WAAW,GAAG;IAAA;IAAA;IACV,KAAKC,EAAL,GAAU,IAAIC,UAAJ,CAAO;MACbC,MAAM,EAAEC,OAAO,CAACC,GAAR,CAAYC;IADP,CAAP,CAAV;IAGA,KAAKC,MAAL,GAAcH,OAAO,CAACC,GAAR,CAAYG,SAA1B;EACH;;EAEDC,eAAe,CAACC,GAAD,EAAc;IACzB,OAAO,KAAKT,EAAL,CAAQU,YAAR,CAAqB,WAArB,EAAkC;MACrCC,MAAM,EAAE,KAAKL,MADwB;MAErCM,GAAG,EAAEH,GAFgC;MAGrCI,OAAO,EAAE,MAH4B,CAGrB;;IAHqB,CAAlC,CAAP;EAKH;EAED;AACJ;AACA;AACA;AACA;;;EAC0B,MAAhBC,gBAAgB,CAACF,GAAD,EAAgC;IAClD,IAAI;MACA,MAAM,KAAKG,aAAL,CAAmBH,GAAnB,CAAN;MACA,OAAO,IAAP;IACH,CAHD,CAGE,OAAOI,KAAP,EAAc;MACZC,OAAO,CAACC,IAAR,CAAc,4CAA2CN,GAAI,GAA7D;MACAK,OAAO,CAACE,GAAR,CAAYH,KAAZ;MACA,OAAO,KAAP;IACH;EACJ;;EAEDD,aAAa,CAACH,GAAD,EAA4C;IACrD,OAAO,KAAKZ,EAAL,CAAQoB,UAAR,CAAmB;MAAET,MAAM,EAAE,KAAKL,MAAf;MAAuBM;IAAvB,CAAnB,EAAiDS,OAAjD,EAAP;EACH;;EAEDC,UAAU,CAACV,GAAD,EAAwB;IAC9B,OAAO,KAAKZ,EAAL,CAAQuB,SAAR,CAAkB;MAAEZ,MAAM,EAAE,KAAKL,MAAf;MAAuBM;IAAvB,CAAlB,EAAgDY,gBAAhD,EAAP;EACH;;EAEDC,WAAW,CACPb,GADO,EAEPc,WAAmB,GAAG7B,oBAFf,EAMT;IACE,MAAM8B,iBAAiB,GAAG,IAAIC,cAAA,CAAOC,WAAX,EAA1B;IAEA,MAAMC,MAA2B,GAAG;MAChCC,GAAG,EAAE,SAD2B;MAEhCC,IAAI,EAAEL,iBAF0B;MAGhChB,MAAM,EAAE,KAAKL,MAHmB;MAIhC2B,WAAW,EAAEP,WAJmB;MAKhCd;IALgC,CAApC;IAQA,OAAO;MACHe,iBAAiB,EAAEA,iBADhB;;MAEH;AACZ;AACA;MACYO,8BAA8B,EAAE,KAAKlC,EAAL,CAAQmC,MAAR,CAAeL,MAAf,EAAuBT,OAAvB;IAL7B,CAAP;EAOH;;EAEDc,MAAM,CAACL,MAAD,EAIiC;IACnC,OAAO,KAAK9B,EAAL,CACFmC,MADE;MAECJ,GAAG,EAAE,SAFN;MAGCpB,MAAM,EAAE,KAAKL;IAHd,GAIIwB,MAJJ,GAMFT,OANE,EAAP;EAOH;;EAEDe,UAAU,CAACC,MAAD,EAAgD;IACtD,OAAO,KAAKrC,EAAL,CACFsC,WADE,CACU;MACT3B,MAAM,EAAE,KAAKL,MADJ;MAETiC,MAAM,EAAEF;IAFC,CADV,EAKFhB,OALE,EAAP;EAMH;;EAEDmB,YAAY,CAAC/B,GAAD,EAA8C;IACtD,OAAO,KAAKT,EAAL,CAAQwC,YAAR,CAAqB;MAAE5B,GAAG,EAAEH,GAAP;MAAYE,MAAM,EAAE,KAAKL;IAAzB,CAArB,EAAwDe,OAAxD,EAAP;EACH;;AA9FU;;AAiGR,MAAMoB,QAAQ,GAAG,IAAI3C,QAAJ,EAAjB"}