@webiny/api-page-builder-import-export 0.0.0-mt-3 → 0.0.0-unstable.8c4d9f045a
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/{importPages/client.d.ts → client.d.ts} +3 -2
- package/client.js +53 -0
- package/client.js.map +1 -0
- package/exportPages/combine/index.d.ts +7 -11
- package/exportPages/combine/index.js +37 -11
- package/exportPages/combine/index.js.map +1 -0
- package/exportPages/process/index.d.ts +10 -14
- package/exportPages/process/index.js +32 -28
- package/exportPages/process/index.js.map +1 -0
- package/exportPages/s3Stream.d.ts +2 -0
- package/exportPages/s3Stream.js +12 -3
- package/exportPages/s3Stream.js.map +1 -0
- package/exportPages/utils.d.ts +1 -1
- package/exportPages/utils.js +1 -1
- package/exportPages/utils.js.map +1 -0
- package/exportPages/zipper.d.ts +1 -0
- package/exportPages/zipper.js.map +1 -0
- package/graphql/crud/pageImportExportTasks.crud.d.ts +1 -1
- package/graphql/crud/pageImportExportTasks.crud.js +23 -13
- package/graphql/crud/pageImportExportTasks.crud.js.map +1 -0
- package/graphql/crud/pages.crud.d.ts +1 -1
- package/graphql/crud/pages.crud.js +28 -20
- package/graphql/crud/pages.crud.js.map +1 -0
- package/graphql/crud.d.ts +1 -1
- package/graphql/crud.js.map +1 -0
- package/graphql/graphql/pageImportExportTasks.gql.js.map +1 -0
- package/graphql/graphql/pages.gql.js +7 -5
- package/graphql/graphql/pages.gql.js.map +1 -0
- package/graphql/graphql/utils/resolve.d.ts +1 -1
- package/graphql/graphql/utils/resolve.js.map +1 -0
- package/graphql/graphql.d.ts +1 -1
- package/graphql/graphql.js.map +1 -0
- package/graphql/index.d.ts +1 -1
- package/graphql/index.js.map +1 -0
- package/graphql/types.d.ts +5 -6
- package/graphql/types.js.map +1 -0
- package/importPages/create/index.d.ts +11 -16
- package/importPages/create/index.js +39 -30
- package/importPages/create/index.js.map +1 -0
- package/importPages/process/index.d.ts +10 -17
- package/importPages/process/index.js +25 -23
- package/importPages/process/index.js.map +1 -0
- package/importPages/utils.d.ts +17 -10
- package/importPages/utils.js +89 -58
- package/importPages/utils.js.map +1 -0
- package/mockSecurity.js.map +1 -0
- package/package.json +36 -34
- package/types.d.ts +8 -7
- package/types.js.map +1 -0
- package/importPages/client.js +0 -40
@@ -1,7 +1,8 @@
|
|
1
|
-
import { PbPageImportExportContext } from "
|
1
|
+
import { PbPageImportExportContext } from "./graphql/types";
|
2
2
|
export interface InvokeHandlerClientParams<TParams> {
|
3
3
|
context: PbPageImportExportContext;
|
4
4
|
name: string;
|
5
5
|
payload: TParams;
|
6
|
+
description: string;
|
6
7
|
}
|
7
|
-
export declare function invokeHandlerClient<TParams>({ context, name, payload }: InvokeHandlerClientParams<TParams>): Promise<void>;
|
8
|
+
export declare function invokeHandlerClient<TParams>({ context, name, payload, description }: InvokeHandlerClientParams<TParams>): Promise<void>;
|
package/client.js
ADDED
@@ -0,0 +1,53 @@
|
|
1
|
+
"use strict";
|
2
|
+
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
4
|
+
|
5
|
+
Object.defineProperty(exports, "__esModule", {
|
6
|
+
value: true
|
7
|
+
});
|
8
|
+
exports.invokeHandlerClient = invokeHandlerClient;
|
9
|
+
|
10
|
+
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
|
11
|
+
|
12
|
+
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }
|
13
|
+
|
14
|
+
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { (0, _defineProperty2.default)(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }
|
15
|
+
|
16
|
+
async function invokeHandlerClient({
|
17
|
+
context,
|
18
|
+
name,
|
19
|
+
payload,
|
20
|
+
description
|
21
|
+
}) {
|
22
|
+
/*
|
23
|
+
* Prepare "invocationArgs", we're hacking our wat here.
|
24
|
+
* They are necessary to setup the "context.pageBuilder" object among other things in IMPORT_PAGE_FUNCTION
|
25
|
+
*/
|
26
|
+
const {
|
27
|
+
request
|
28
|
+
} = context;
|
29
|
+
const tenantId = context.tenancy.getCurrentTenant().id;
|
30
|
+
|
31
|
+
const headers = _objectSpread(_objectSpread({}, request.headers), {}, {
|
32
|
+
["x-tenant"]: request.headers["x-tenant"] || tenantId
|
33
|
+
});
|
34
|
+
|
35
|
+
delete headers["content-length"];
|
36
|
+
const invocationArgs = {
|
37
|
+
httpMethod: request.method,
|
38
|
+
body: request.body,
|
39
|
+
headers,
|
40
|
+
|
41
|
+
/**
|
42
|
+
* Required until type augmentation works correctly.
|
43
|
+
*/
|
44
|
+
cookies: request.cookies
|
45
|
+
}; // Invoke handler
|
46
|
+
|
47
|
+
await context.handlerClient.invoke({
|
48
|
+
name: name,
|
49
|
+
payload: _objectSpread(_objectSpread({}, payload), invocationArgs),
|
50
|
+
await: false,
|
51
|
+
description
|
52
|
+
});
|
53
|
+
}
|
package/client.js.map
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"names":["invokeHandlerClient","context","name","payload","description","request","tenantId","tenancy","getCurrentTenant","id","headers","invocationArgs","httpMethod","method","body","cookies","handlerClient","invoke","await"],"sources":["client.ts"],"sourcesContent":["import { PbPageImportExportContext } from \"~/graphql/types\";\n\nexport interface InvokeHandlerClientParams<TParams> {\n context: PbPageImportExportContext;\n name: string;\n payload: TParams;\n description: string;\n}\n\nexport async function invokeHandlerClient<TParams>({\n context,\n name,\n payload,\n description\n}: InvokeHandlerClientParams<TParams>) {\n /*\n * Prepare \"invocationArgs\", we're hacking our wat here.\n * They are necessary to setup the \"context.pageBuilder\" object among other things in IMPORT_PAGE_FUNCTION\n */\n const { request } = context;\n\n const tenantId = context.tenancy.getCurrentTenant().id;\n\n const headers = {\n ...request.headers,\n [\"x-tenant\"]: request.headers[\"x-tenant\"] || tenantId\n };\n delete headers[\"content-length\"];\n const invocationArgs = {\n httpMethod: request.method,\n body: request.body,\n headers,\n /**\n * Required until type augmentation works correctly.\n */\n cookies: (request as any).cookies\n };\n // Invoke handler\n await context.handlerClient.invoke<TParams & any>({\n name: name,\n payload: {\n ...payload,\n ...invocationArgs\n },\n await: false,\n description\n });\n}\n"],"mappings":";;;;;;;;;;;;;;;AASO,eAAeA,mBAAf,CAA4C;EAC/CC,OAD+C;EAE/CC,IAF+C;EAG/CC,OAH+C;EAI/CC;AAJ+C,CAA5C,EAKgC;EACnC;AACJ;AACA;AACA;EACI,MAAM;IAAEC;EAAF,IAAcJ,OAApB;EAEA,MAAMK,QAAQ,GAAGL,OAAO,CAACM,OAAR,CAAgBC,gBAAhB,GAAmCC,EAApD;;EAEA,MAAMC,OAAO,mCACNL,OAAO,CAACK,OADF;IAET,CAAC,UAAD,GAAcL,OAAO,CAACK,OAAR,CAAgB,UAAhB,KAA+BJ;EAFpC,EAAb;;EAIA,OAAOI,OAAO,CAAC,gBAAD,CAAd;EACA,MAAMC,cAAc,GAAG;IACnBC,UAAU,EAAEP,OAAO,CAACQ,MADD;IAEnBC,IAAI,EAAET,OAAO,CAACS,IAFK;IAGnBJ,OAHmB;;IAInB;AACR;AACA;IACQK,OAAO,EAAGV,OAAD,CAAiBU;EAPP,CAAvB,CAdmC,CAuBnC;;EACA,MAAMd,OAAO,CAACe,aAAR,CAAsBC,MAAtB,CAA4C;IAC9Cf,IAAI,EAAEA,IADwC;IAE9CC,OAAO,kCACAA,OADA,GAEAQ,cAFA,CAFuC;IAM9CO,KAAK,EAAE,KANuC;IAO9Cd;EAP8C,CAA5C,CAAN;AASH"}
|
@@ -1,19 +1,15 @@
|
|
1
|
-
import { HandlerPlugin } from "@webiny/handler/types";
|
2
|
-
import { ArgsContext } from "@webiny/handler-args/types";
|
3
1
|
import { PbPageImportExportContext } from "../../types";
|
4
2
|
import { SecurityIdentity } from "@webiny/api-security/types";
|
5
|
-
export
|
3
|
+
export interface Payload {
|
6
4
|
taskId: string;
|
7
5
|
identity: SecurityIdentity;
|
8
|
-
}
|
9
|
-
export
|
10
|
-
data: string;
|
11
|
-
error:
|
12
|
-
|
13
|
-
};
|
14
|
-
};
|
15
|
-
declare const _default: () => HandlerPlugin<PbPageImportExportContext, ArgsContext<HandlerArgs>>;
|
6
|
+
}
|
7
|
+
export interface Response {
|
8
|
+
data: string | null;
|
9
|
+
error: Partial<Error> | null;
|
10
|
+
}
|
16
11
|
/**
|
17
12
|
* Handles the export pages combine workflow.
|
18
13
|
*/
|
14
|
+
declare const _default: () => import("@webiny/handler-aws").RawEventHandler<Payload, PbPageImportExportContext, Response>;
|
19
15
|
export default _default;
|
@@ -13,33 +13,55 @@ var _zipper = require("../zipper");
|
|
13
13
|
|
14
14
|
var _mockSecurity = require("../../mockSecurity");
|
15
15
|
|
16
|
+
var _handlerAws = require("@webiny/handler-aws");
|
17
|
+
|
16
18
|
/**
|
17
19
|
* Handles the export pages combine workflow.
|
18
20
|
*/
|
19
|
-
var _default = () =>
|
20
|
-
|
21
|
-
|
22
|
-
|
21
|
+
var _default = () => {
|
22
|
+
return (0, _handlerAws.createRawEventHandler)(async ({
|
23
|
+
payload,
|
24
|
+
context
|
25
|
+
}) => {
|
23
26
|
const log = console.log;
|
24
27
|
log("RUNNING Export Pages Combine Handler");
|
25
28
|
const {
|
26
|
-
invocationArgs: args,
|
27
29
|
pageBuilder
|
28
30
|
} = context;
|
29
31
|
const {
|
30
32
|
taskId,
|
31
33
|
identity
|
32
|
-
} =
|
34
|
+
} = payload;
|
33
35
|
(0, _mockSecurity.mockSecurity)(identity, context);
|
34
36
|
|
35
37
|
try {
|
36
38
|
const task = await pageBuilder.pageImportExportTask.getTask(taskId);
|
39
|
+
|
40
|
+
if (!task) {
|
41
|
+
return {
|
42
|
+
data: null,
|
43
|
+
error: {
|
44
|
+
message: `There is no task with ID "${taskId}".`
|
45
|
+
}
|
46
|
+
};
|
47
|
+
}
|
48
|
+
|
37
49
|
const {
|
38
50
|
exportPagesDataKey
|
39
51
|
} = task.input; // Get all files (zip) from given key
|
40
52
|
|
41
53
|
const listObjectResponse = await _s3Stream.s3Stream.listObject(exportPagesDataKey);
|
42
|
-
|
54
|
+
|
55
|
+
if (!listObjectResponse.Contents) {
|
56
|
+
return {
|
57
|
+
data: null,
|
58
|
+
error: {
|
59
|
+
message: "There is no Contents defined on S3 Stream while combining pages."
|
60
|
+
}
|
61
|
+
};
|
62
|
+
}
|
63
|
+
|
64
|
+
const zipFileKeys = listObjectResponse.Contents.filter(file => file.Key !== exportPagesDataKey).map(file => file.Key).filter(Boolean); // Prepare zip of all zips
|
43
65
|
|
44
66
|
const zipOfZip = new _zipper.ZipOfZip(zipFileKeys); // Upload
|
45
67
|
|
@@ -51,7 +73,7 @@ var _default = () => ({
|
|
51
73
|
data: {
|
52
74
|
message: `Finish uploading page export.`,
|
53
75
|
key: pageExportUpload.Key,
|
54
|
-
url: pageExportUpload.
|
76
|
+
url: _s3Stream.s3Stream.getPresignedUrl(pageExportUpload.Key)
|
55
77
|
}
|
56
78
|
}); // Remove individual zip files from storage
|
57
79
|
|
@@ -71,7 +93,7 @@ var _default = () => ({
|
|
71
93
|
name: e.name,
|
72
94
|
message: e.message,
|
73
95
|
stack: e.stack,
|
74
|
-
code: "
|
96
|
+
code: "EXPORT_FAILED"
|
75
97
|
}
|
76
98
|
});
|
77
99
|
return {
|
@@ -81,8 +103,12 @@ var _default = () => ({
|
|
81
103
|
}
|
82
104
|
};
|
83
105
|
}
|
84
|
-
}
|
85
106
|
|
86
|
-
|
107
|
+
return {
|
108
|
+
data: "",
|
109
|
+
error: null
|
110
|
+
};
|
111
|
+
});
|
112
|
+
};
|
87
113
|
|
88
114
|
exports.default = _default;
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"names":["createRawEventHandler","payload","context","log","console","pageBuilder","taskId","identity","mockSecurity","task","pageImportExportTask","getTask","data","error","message","exportPagesDataKey","input","listObjectResponse","s3Stream","listObject","Contents","zipFileKeys","filter","file","Key","map","Boolean","zipOfZip","ZipOfZip","pageExportUpload","process","Location","updateTask","status","PageImportExportTaskStatus","COMPLETED","key","url","getPresignedUrl","deleteFilePromises","deleteObject","Promise","all","length","e","FAILED","name","stack","code"],"sources":["index.ts"],"sourcesContent":["import { PageImportExportTaskStatus, PbPageImportExportContext } from \"~/types\";\nimport { s3Stream } from \"../s3Stream\";\nimport { ZipOfZip } from \"../zipper\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { SecurityIdentity } from \"@webiny/api-security/types\";\nimport { createRawEventHandler } from \"@webiny/handler-aws\";\n\nexport interface Payload {\n taskId: string;\n identity: SecurityIdentity;\n}\n\nexport interface Response {\n data: string | null;\n error: Partial<Error> | null;\n}\n\n/**\n * Handles the export pages combine workflow.\n */\nexport default () => {\n return createRawEventHandler<Payload, PbPageImportExportContext, Response>(\n async ({ payload, context }) => {\n const log = console.log;\n\n log(\"RUNNING Export Pages Combine Handler\");\n const { pageBuilder } = context;\n const { taskId, identity } = payload;\n\n mockSecurity(identity, context);\n\n try {\n const task = await pageBuilder.pageImportExportTask.getTask(taskId);\n if (!task) {\n return {\n data: null,\n error: {\n message: `There is no task with ID \"${taskId}\".`\n }\n };\n }\n\n const { exportPagesDataKey } = task.input;\n\n // Get all files (zip) from given key\n const listObjectResponse = await s3Stream.listObject(exportPagesDataKey);\n if (!listObjectResponse.Contents) {\n return {\n data: null,\n error: {\n message:\n \"There is no Contents defined on S3 Stream while combining pages.\"\n }\n };\n }\n\n const zipFileKeys = listObjectResponse.Contents.filter(\n file => file.Key !== exportPagesDataKey\n )\n .map(file => file.Key)\n .filter(Boolean) as string[];\n\n // Prepare zip of all zips\n const zipOfZip = new ZipOfZip(zipFileKeys);\n\n // Upload\n const pageExportUpload = await zipOfZip.process();\n log(`Done uploading... File is located at ${pageExportUpload.Location} `);\n\n // Update task status and save export page data key\n await pageBuilder.pageImportExportTask.updateTask(taskId, {\n status: PageImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading page export.`,\n key: pageExportUpload.Key,\n url: s3Stream.getPresignedUrl(pageExportUpload.Key)\n }\n });\n\n // Remove individual zip files from storage\n const deleteFilePromises = zipFileKeys.map(key => s3Stream.deleteObject(key));\n await Promise.all(deleteFilePromises);\n log(`Successfully deleted ${deleteFilePromises.length} zip files.`);\n } catch (e) {\n log(\"[EXPORT_PAGES_COMBINE] Error => \", e);\n\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n await pageBuilder.pageImportExportTask.updateTask(taskId, {\n status: PageImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n stack: e.stack,\n code: \"EXPORT_FAILED\"\n }\n });\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n }\n return {\n data: \"\",\n error: null\n };\n }\n );\n};\n"],"mappings":";;;;;;;AAAA;;AACA;;AACA;;AACA;;AAEA;;AAYA;AACA;AACA;eACe,MAAM;EACjB,OAAO,IAAAA,iCAAA,EACH,OAAO;IAAEC,OAAF;IAAWC;EAAX,CAAP,KAAgC;IAC5B,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAApB;IAEAA,GAAG,CAAC,sCAAD,CAAH;IACA,MAAM;MAAEE;IAAF,IAAkBH,OAAxB;IACA,MAAM;MAAEI,MAAF;MAAUC;IAAV,IAAuBN,OAA7B;IAEA,IAAAO,0BAAA,EAAaD,QAAb,EAAuBL,OAAvB;;IAEA,IAAI;MACA,MAAMO,IAAI,GAAG,MAAMJ,WAAW,CAACK,oBAAZ,CAAiCC,OAAjC,CAAyCL,MAAzC,CAAnB;;MACA,IAAI,CAACG,IAAL,EAAW;QACP,OAAO;UACHG,IAAI,EAAE,IADH;UAEHC,KAAK,EAAE;YACHC,OAAO,EAAG,6BAA4BR,MAAO;UAD1C;QAFJ,CAAP;MAMH;;MAED,MAAM;QAAES;MAAF,IAAyBN,IAAI,CAACO,KAApC,CAXA,CAaA;;MACA,MAAMC,kBAAkB,GAAG,MAAMC,kBAAA,CAASC,UAAT,CAAoBJ,kBAApB,CAAjC;;MACA,IAAI,CAACE,kBAAkB,CAACG,QAAxB,EAAkC;QAC9B,OAAO;UACHR,IAAI,EAAE,IADH;UAEHC,KAAK,EAAE;YACHC,OAAO,EACH;UAFD;QAFJ,CAAP;MAOH;;MAED,MAAMO,WAAW,GAAGJ,kBAAkB,CAACG,QAAnB,CAA4BE,MAA5B,CAChBC,IAAI,IAAIA,IAAI,CAACC,GAAL,KAAaT,kBADL,EAGfU,GAHe,CAGXF,IAAI,IAAIA,IAAI,CAACC,GAHF,EAIfF,MAJe,CAIRI,OAJQ,CAApB,CAzBA,CA+BA;;MACA,MAAMC,QAAQ,GAAG,IAAIC,gBAAJ,CAAaP,WAAb,CAAjB,CAhCA,CAkCA;;MACA,MAAMQ,gBAAgB,GAAG,MAAMF,QAAQ,CAACG,OAAT,EAA/B;MACA3B,GAAG,CAAE,wCAAuC0B,gBAAgB,CAACE,QAAS,GAAnE,CAAH,CApCA,CAsCA;;MACA,MAAM1B,WAAW,CAACK,oBAAZ,CAAiCsB,UAAjC,CAA4C1B,MAA5C,EAAoD;QACtD2B,MAAM,EAAEC,iCAAA,CAA2BC,SADmB;QAEtDvB,IAAI,EAAE;UACFE,OAAO,EAAG,+BADR;UAEFsB,GAAG,EAAEP,gBAAgB,CAACL,GAFpB;UAGFa,GAAG,EAAEnB,kBAAA,CAASoB,eAAT,CAAyBT,gBAAgB,CAACL,GAA1C;QAHH;MAFgD,CAApD,CAAN,CAvCA,CAgDA;;MACA,MAAMe,kBAAkB,GAAGlB,WAAW,CAACI,GAAZ,CAAgBW,GAAG,IAAIlB,kBAAA,CAASsB,YAAT,CAAsBJ,GAAtB,CAAvB,CAA3B;MACA,MAAMK,OAAO,CAACC,GAAR,CAAYH,kBAAZ,CAAN;MACApC,GAAG,CAAE,wBAAuBoC,kBAAkB,CAACI,MAAO,aAAnD,CAAH;IACH,CApDD,CAoDE,OAAOC,CAAP,EAAU;MACRzC,GAAG,CAAC,kCAAD,EAAqCyC,CAArC,CAAH;MAEA;AAChB;AACA;AACA;;MACgB,MAAMvC,WAAW,CAACK,oBAAZ,CAAiCsB,UAAjC,CAA4C1B,MAA5C,EAAoD;QACtD2B,MAAM,EAAEC,iCAAA,CAA2BW,MADmB;QAEtDhC,KAAK,EAAE;UACHiC,IAAI,EAAEF,CAAC,CAACE,IADL;UAEHhC,OAAO,EAAE8B,CAAC,CAAC9B,OAFR;UAGHiC,KAAK,EAAEH,CAAC,CAACG,KAHN;UAIHC,IAAI,EAAE;QAJH;MAF+C,CAApD,CAAN;MAUA,OAAO;QACHpC,IAAI,EAAE,IADH;QAEHC,KAAK,EAAE;UACHC,OAAO,EAAE8B,CAAC,CAAC9B;QADR;MAFJ,CAAP;IAMH;;IACD,OAAO;MACHF,IAAI,EAAE,EADH;MAEHC,KAAK,EAAE;IAFJ,CAAP;EAIH,CA1FE,CAAP;AA4FH,C"}
|
@@ -1,26 +1,22 @@
|
|
1
|
-
import { HandlerPlugin } from "@webiny/handler/types";
|
2
|
-
import { ArgsContext } from "@webiny/handler-args/types";
|
3
1
|
import { PbPageImportExportContext } from "../../types";
|
4
2
|
import { SecurityIdentity } from "@webiny/api-security/types";
|
5
|
-
export declare type HandlerArgs = {
|
6
|
-
taskId: string;
|
7
|
-
subTaskIndex: number;
|
8
|
-
identity?: SecurityIdentity;
|
9
|
-
};
|
10
|
-
export declare type HandlerResponse = {
|
11
|
-
data: string;
|
12
|
-
error: {
|
13
|
-
message: string;
|
14
|
-
};
|
15
|
-
};
|
16
3
|
interface Configuration {
|
17
4
|
handlers: {
|
18
5
|
process: string;
|
19
6
|
combine: string;
|
20
7
|
};
|
21
8
|
}
|
22
|
-
|
9
|
+
export interface Payload {
|
10
|
+
taskId: string;
|
11
|
+
subTaskIndex: number;
|
12
|
+
identity?: SecurityIdentity;
|
13
|
+
}
|
14
|
+
export interface Response {
|
15
|
+
data: string | null;
|
16
|
+
error: Partial<Error> | null;
|
17
|
+
}
|
23
18
|
/**
|
24
19
|
* Handles the export pages process workflow.
|
25
20
|
*/
|
21
|
+
declare const _default: (configuration: Configuration) => import("@webiny/handler-aws").RawEventHandler<Payload, PbPageImportExportContext, Response>;
|
26
22
|
export default _default;
|
@@ -7,30 +7,32 @@ exports.default = void 0;
|
|
7
7
|
|
8
8
|
var _types = require("../../types");
|
9
9
|
|
10
|
-
var
|
11
|
-
|
12
|
-
var _client = require("../../importPages/client");
|
10
|
+
var _client = require("../../client");
|
13
11
|
|
14
12
|
var _handlerGraphql = require("@webiny/handler-graphql");
|
15
13
|
|
16
|
-
var
|
14
|
+
var _utils = require("../utils");
|
17
15
|
|
18
16
|
var _mockSecurity = require("../../mockSecurity");
|
19
17
|
|
18
|
+
var _utils2 = require("@webiny/utils");
|
19
|
+
|
20
|
+
var _handlerAws = require("@webiny/handler-aws");
|
21
|
+
|
20
22
|
/**
|
21
23
|
* Handles the export pages process workflow.
|
22
24
|
*/
|
23
|
-
var _default = configuration =>
|
24
|
-
|
25
|
-
|
26
|
-
|
25
|
+
var _default = configuration => {
|
26
|
+
return (0, _handlerAws.createRawEventHandler)(async ({
|
27
|
+
payload,
|
28
|
+
context
|
29
|
+
}) => {
|
27
30
|
const log = console.log;
|
28
31
|
let subTask;
|
29
32
|
let noPendingTask = true;
|
30
33
|
let prevStatusOfSubTask = _types.PageImportExportTaskStatus.PENDING;
|
31
34
|
log("RUNNING Export Pages Process Handler");
|
32
35
|
const {
|
33
|
-
invocationArgs: args,
|
34
36
|
pageBuilder,
|
35
37
|
fileManager
|
36
38
|
} = context;
|
@@ -38,7 +40,7 @@ var _default = configuration => ({
|
|
38
40
|
taskId,
|
39
41
|
subTaskIndex,
|
40
42
|
identity
|
41
|
-
} =
|
43
|
+
} = payload; // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks
|
42
44
|
// and this Lambda is invoked internally, without credentials.
|
43
45
|
|
44
46
|
(0, _mockSecurity.mockSecurity)(identity, context);
|
@@ -48,7 +50,7 @@ var _default = configuration => ({
|
|
48
50
|
* Note: We're not going to DB for finding the next sub-task to process,
|
49
51
|
* because the data might be out of sync due to GSI eventual consistency.
|
50
52
|
*/
|
51
|
-
subTask = await pageBuilder.pageImportExportTask.getSubTask(taskId, (0,
|
53
|
+
subTask = await pageBuilder.pageImportExportTask.getSubTask(taskId, (0, _utils2.zeroPad)(subTaskIndex, 5));
|
52
54
|
/**
|
53
55
|
* Base condition!!
|
54
56
|
* Bail out early, if task not found or task's status is not "pending".
|
@@ -56,7 +58,10 @@ var _default = configuration => ({
|
|
56
58
|
|
57
59
|
if (!subTask || subTask.status !== _types.PageImportExportTaskStatus.PENDING) {
|
58
60
|
noPendingTask = true;
|
59
|
-
return
|
61
|
+
return {
|
62
|
+
data: "",
|
63
|
+
error: null
|
64
|
+
};
|
60
65
|
} else {
|
61
66
|
noPendingTask = false;
|
62
67
|
}
|
@@ -82,17 +87,17 @@ var _default = configuration => ({
|
|
82
87
|
try {
|
83
88
|
if (revisionType === _types.PageExportRevisionType.PUBLISHED) {
|
84
89
|
// Get "published" page.
|
85
|
-
page = await pageBuilder.
|
90
|
+
page = await pageBuilder.getPublishedPageById({
|
86
91
|
id: pageId
|
87
92
|
});
|
88
93
|
} else {
|
89
94
|
// Get "latest" page.
|
90
|
-
page = await pageBuilder.
|
95
|
+
page = await pageBuilder.getPage(pageId);
|
91
96
|
}
|
92
97
|
} catch (e) {
|
93
98
|
// If we're looking for "published" page and doesn't found it, get latest page.
|
94
99
|
if (revisionType === _types.PageExportRevisionType.PUBLISHED && e instanceof _handlerGraphql.NotFoundError) {
|
95
|
-
page = await pageBuilder.
|
100
|
+
page = await pageBuilder.getPage(pageId);
|
96
101
|
} else {
|
97
102
|
throw e;
|
98
103
|
}
|
@@ -116,7 +121,7 @@ var _default = configuration => ({
|
|
116
121
|
prevStatusOfSubTask = subTask.status;
|
117
122
|
log(`Extracting page data and uploading to storage...`); // Extract Page
|
118
123
|
|
119
|
-
const pageDataZip = await (0,
|
124
|
+
const pageDataZip = await (0, _utils.exportPage)(page, exportPagesDataKey, fileManager);
|
120
125
|
log(`Finish uploading zip...`); // Update task record in DB
|
121
126
|
|
122
127
|
subTask = await pageBuilder.pageImportExportTask.updateSubTask(taskId, subTask.id, {
|
@@ -140,20 +145,13 @@ var _default = configuration => ({
|
|
140
145
|
* In case of error, we'll update the task status to "failed",
|
141
146
|
* so that, client can show notify the user appropriately.
|
142
147
|
*/
|
143
|
-
const {
|
144
|
-
invocationArgs: args,
|
145
|
-
pageBuilder
|
146
|
-
} = context;
|
147
|
-
const {
|
148
|
-
taskId
|
149
|
-
} = args;
|
150
148
|
subTask = await pageBuilder.pageImportExportTask.updateSubTask(taskId, subTask.id, {
|
151
149
|
status: _types.PageImportExportTaskStatus.FAILED,
|
152
150
|
error: {
|
153
151
|
name: e.name,
|
154
152
|
message: e.message,
|
155
153
|
stack: e.stack,
|
156
|
-
code: "
|
154
|
+
code: "EXPORT_FAILED"
|
157
155
|
}
|
158
156
|
}); // Update stats in main task
|
159
157
|
|
@@ -181,7 +179,8 @@ var _default = configuration => ({
|
|
181
179
|
payload: {
|
182
180
|
taskId,
|
183
181
|
identity: context.security.getIdentity()
|
184
|
-
}
|
182
|
+
},
|
183
|
+
description: "Export pages - combine"
|
185
184
|
});
|
186
185
|
} else {
|
187
186
|
console.log(`Invoking PROCESS for task "${subTaskIndex + 1}"`); // We want to continue with Self invocation no matter if current page error out.
|
@@ -193,12 +192,17 @@ var _default = configuration => ({
|
|
193
192
|
taskId,
|
194
193
|
subTaskIndex: subTaskIndex + 1,
|
195
194
|
identity: context.security.getIdentity()
|
196
|
-
}
|
195
|
+
},
|
196
|
+
description: "Export pages - process - subtask"
|
197
197
|
});
|
198
198
|
}
|
199
199
|
}
|
200
|
-
}
|
201
200
|
|
202
|
-
|
201
|
+
return {
|
202
|
+
data: "",
|
203
|
+
error: null
|
204
|
+
};
|
205
|
+
});
|
206
|
+
};
|
203
207
|
|
204
208
|
exports.default = _default;
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"names":["configuration","createRawEventHandler","payload","context","log","console","subTask","noPendingTask","prevStatusOfSubTask","PageImportExportTaskStatus","PENDING","pageBuilder","fileManager","taskId","subTaskIndex","identity","mockSecurity","pageImportExportTask","getSubTask","zeroPad","status","data","error","id","input","pageId","exportPagesDataKey","revisionType","page","PageExportRevisionType","PUBLISHED","getPublishedPageById","getPage","e","NotFoundError","version","updateSubTask","PROCESSING","updateStats","prevStatus","nextStatus","pageDataZip","exportPage","COMPLETED","message","key","Key","FAILED","name","stack","code","invokeHandlerClient","handlers","combine","security","getIdentity","description","process"],"sources":["index.ts"],"sourcesContent":["import {\n PageExportRevisionType,\n PageImportExportTaskStatus,\n PbPageImportExportContext\n} from \"~/types\";\nimport { invokeHandlerClient } from \"~/client\";\nimport { NotFoundError } from \"@webiny/handler-graphql\";\nimport { exportPage } from \"~/exportPages/utils\";\nimport { Payload as ExtractPayload } from \"../combine\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { SecurityIdentity } from \"@webiny/api-security/types\";\nimport { zeroPad } from \"@webiny/utils\";\nimport { createRawEventHandler } from \"@webiny/handler-aws\";\n\ninterface Configuration {\n handlers: {\n process: string;\n combine: string;\n };\n}\n\nexport interface Payload {\n taskId: string;\n subTaskIndex: number;\n identity?: SecurityIdentity;\n}\n\nexport interface Response {\n data: string | null;\n error: Partial<Error> | null;\n}\n\n/**\n * Handles the export pages process workflow.\n */\nexport default (configuration: Configuration) => {\n return createRawEventHandler<Payload, PbPageImportExportContext, Response>(\n async ({ payload, context }) => {\n const log = console.log;\n let subTask;\n let noPendingTask = true;\n let prevStatusOfSubTask = PageImportExportTaskStatus.PENDING;\n\n log(\"RUNNING Export Pages Process Handler\");\n const { pageBuilder, fileManager } = context;\n const { taskId, subTaskIndex, identity } = payload;\n // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks\n // and this Lambda is invoked internally, without credentials.\n mockSecurity(identity as SecurityIdentity, context);\n\n try {\n /*\n * Note: We're not going to DB for finding the next sub-task to process,\n * because the data might be out of sync due to GSI eventual consistency.\n */\n subTask = await pageBuilder.pageImportExportTask.getSubTask(\n taskId,\n zeroPad(subTaskIndex, 5)\n );\n /**\n * Base condition!!\n * Bail out early, if task not found or task's status is not \"pending\".\n */\n if (!subTask || subTask.status !== PageImportExportTaskStatus.PENDING) {\n noPendingTask = true;\n return {\n data: \"\",\n error: null\n };\n } else {\n noPendingTask = false;\n }\n\n log(`Fetched sub task => ${subTask.id}`);\n\n const { input } = subTask;\n const { pageId, exportPagesDataKey, revisionType } = input;\n\n /**\n * At the moment, we only export a single revision of the page.\n * It could be \"published\" or \"latest\" depending upon user input.\n *\n * Note: In case of no \"published\" revision available, we use the latest revision.\n */\n let page;\n try {\n if (revisionType === PageExportRevisionType.PUBLISHED) {\n // Get \"published\" page.\n page = await pageBuilder.getPublishedPageById({ id: pageId });\n } else {\n // Get \"latest\" page.\n page = await pageBuilder.getPage(pageId);\n }\n } catch (e) {\n // If we're looking for \"published\" page and doesn't found it, get latest page.\n if (\n revisionType === PageExportRevisionType.PUBLISHED &&\n e instanceof NotFoundError\n ) {\n page = await pageBuilder.getPage(pageId);\n } else {\n throw e;\n }\n }\n\n if (!page) {\n log(`Unable to load page \"${pageId}\"`);\n throw new NotFoundError(`Unable to load page \"${pageId}\"`);\n }\n\n log(`Processing page key \"${pageId}\" | version ${page.version} | ${page.status}`);\n\n // Mark task status as PROCESSING\n subTask = await pageBuilder.pageImportExportTask.updateSubTask(taskId, subTask.id, {\n status: PageImportExportTaskStatus.PROCESSING\n });\n // Update stats in main task\n await pageBuilder.pageImportExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: PageImportExportTaskStatus.PROCESSING\n });\n prevStatusOfSubTask = subTask.status;\n\n log(`Extracting page data and uploading to storage...`);\n // Extract Page\n const pageDataZip = await exportPage(page, exportPagesDataKey, fileManager);\n log(`Finish uploading zip...`);\n // Update task record in DB\n subTask = await pageBuilder.pageImportExportTask.updateSubTask(taskId, subTask.id, {\n status: PageImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading data for page \"${page.id}\" v${page.version} (${page.status}).`,\n key: pageDataZip.Key\n }\n });\n // Update stats in main task\n await pageBuilder.pageImportExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: PageImportExportTaskStatus.COMPLETED\n });\n prevStatusOfSubTask = subTask.status;\n } catch (e) {\n log(\"[EXPORT_PAGES_PROCESS] Error => \", e);\n\n if (subTask && subTask.id) {\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n subTask = await pageBuilder.pageImportExportTask.updateSubTask(\n taskId,\n subTask.id,\n {\n status: PageImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n stack: e.stack,\n code: \"EXPORT_FAILED\"\n }\n }\n );\n\n // Update stats in main task\n await pageBuilder.pageImportExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: PageImportExportTaskStatus.FAILED\n });\n prevStatusOfSubTask = subTask.status;\n }\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n } finally {\n // Base condition!\n if (noPendingTask) {\n log(`No pending sub-task for task ${taskId}`);\n // Combine individual page zip files.\n await invokeHandlerClient<ExtractPayload>({\n context,\n name: configuration.handlers.combine,\n payload: {\n taskId,\n identity: context.security.getIdentity()\n },\n description: \"Export pages - combine\"\n });\n } else {\n console.log(`Invoking PROCESS for task \"${subTaskIndex + 1}\"`);\n // We want to continue with Self invocation no matter if current page error out.\n await invokeHandlerClient<Payload>({\n context,\n name: configuration.handlers.process,\n payload: {\n taskId,\n subTaskIndex: subTaskIndex + 1,\n identity: context.security.getIdentity()\n },\n description: \"Export pages - process - subtask\"\n });\n }\n }\n return {\n data: \"\",\n error: null\n };\n }\n );\n};\n"],"mappings":";;;;;;;AAAA;;AAKA;;AACA;;AACA;;AAEA;;AAEA;;AACA;;AAoBA;AACA;AACA;eACgBA,aAAD,IAAkC;EAC7C,OAAO,IAAAC,iCAAA,EACH,OAAO;IAAEC,OAAF;IAAWC;EAAX,CAAP,KAAgC;IAC5B,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAApB;IACA,IAAIE,OAAJ;IACA,IAAIC,aAAa,GAAG,IAApB;IACA,IAAIC,mBAAmB,GAAGC,iCAAA,CAA2BC,OAArD;IAEAN,GAAG,CAAC,sCAAD,CAAH;IACA,MAAM;MAAEO,WAAF;MAAeC;IAAf,IAA+BT,OAArC;IACA,MAAM;MAAEU,MAAF;MAAUC,YAAV;MAAwBC;IAAxB,IAAqCb,OAA3C,CAR4B,CAS5B;IACA;;IACA,IAAAc,0BAAA,EAAaD,QAAb,EAA2CZ,OAA3C;;IAEA,IAAI;MACA;AAChB;AACA;AACA;MACgBG,OAAO,GAAG,MAAMK,WAAW,CAACM,oBAAZ,CAAiCC,UAAjC,CACZL,MADY,EAEZ,IAAAM,eAAA,EAAQL,YAAR,EAAsB,CAAtB,CAFY,CAAhB;MAIA;AAChB;AACA;AACA;;MACgB,IAAI,CAACR,OAAD,IAAYA,OAAO,CAACc,MAAR,KAAmBX,iCAAA,CAA2BC,OAA9D,EAAuE;QACnEH,aAAa,GAAG,IAAhB;QACA,OAAO;UACHc,IAAI,EAAE,EADH;UAEHC,KAAK,EAAE;QAFJ,CAAP;MAIH,CAND,MAMO;QACHf,aAAa,GAAG,KAAhB;MACH;;MAEDH,GAAG,CAAE,uBAAsBE,OAAO,CAACiB,EAAG,EAAnC,CAAH;MAEA,MAAM;QAAEC;MAAF,IAAYlB,OAAlB;MACA,MAAM;QAAEmB,MAAF;QAAUC,kBAAV;QAA8BC;MAA9B,IAA+CH,KAArD;MAEA;AAChB;AACA;AACA;AACA;AACA;;MACgB,IAAII,IAAJ;;MACA,IAAI;QACA,IAAID,YAAY,KAAKE,6BAAA,CAAuBC,SAA5C,EAAuD;UACnD;UACAF,IAAI,GAAG,MAAMjB,WAAW,CAACoB,oBAAZ,CAAiC;YAAER,EAAE,EAAEE;UAAN,CAAjC,CAAb;QACH,CAHD,MAGO;UACH;UACAG,IAAI,GAAG,MAAMjB,WAAW,CAACqB,OAAZ,CAAoBP,MAApB,CAAb;QACH;MACJ,CARD,CAQE,OAAOQ,CAAP,EAAU;QACR;QACA,IACIN,YAAY,KAAKE,6BAAA,CAAuBC,SAAxC,IACAG,CAAC,YAAYC,6BAFjB,EAGE;UACEN,IAAI,GAAG,MAAMjB,WAAW,CAACqB,OAAZ,CAAoBP,MAApB,CAAb;QACH,CALD,MAKO;UACH,MAAMQ,CAAN;QACH;MACJ;;MAED,IAAI,CAACL,IAAL,EAAW;QACPxB,GAAG,CAAE,wBAAuBqB,MAAO,GAAhC,CAAH;QACA,MAAM,IAAIS,6BAAJ,CAAmB,wBAAuBT,MAAO,GAAjD,CAAN;MACH;;MAEDrB,GAAG,CAAE,wBAAuBqB,MAAO,eAAcG,IAAI,CAACO,OAAQ,MAAKP,IAAI,CAACR,MAAO,EAA5E,CAAH,CA5DA,CA8DA;;MACAd,OAAO,GAAG,MAAMK,WAAW,CAACM,oBAAZ,CAAiCmB,aAAjC,CAA+CvB,MAA/C,EAAuDP,OAAO,CAACiB,EAA/D,EAAmE;QAC/EH,MAAM,EAAEX,iCAAA,CAA2B4B;MAD4C,CAAnE,CAAhB,CA/DA,CAkEA;;MACA,MAAM1B,WAAW,CAACM,oBAAZ,CAAiCqB,WAAjC,CAA6CzB,MAA7C,EAAqD;QACvD0B,UAAU,EAAE/B,mBAD2C;QAEvDgC,UAAU,EAAE/B,iCAAA,CAA2B4B;MAFgB,CAArD,CAAN;MAIA7B,mBAAmB,GAAGF,OAAO,CAACc,MAA9B;MAEAhB,GAAG,CAAE,kDAAF,CAAH,CAzEA,CA0EA;;MACA,MAAMqC,WAAW,GAAG,MAAM,IAAAC,iBAAA,EAAWd,IAAX,EAAiBF,kBAAjB,EAAqCd,WAArC,CAA1B;MACAR,GAAG,CAAE,yBAAF,CAAH,CA5EA,CA6EA;;MACAE,OAAO,GAAG,MAAMK,WAAW,CAACM,oBAAZ,CAAiCmB,aAAjC,CAA+CvB,MAA/C,EAAuDP,OAAO,CAACiB,EAA/D,EAAmE;QAC/EH,MAAM,EAAEX,iCAAA,CAA2BkC,SAD4C;QAE/EtB,IAAI,EAAE;UACFuB,OAAO,EAAG,mCAAkChB,IAAI,CAACL,EAAG,MAAKK,IAAI,CAACO,OAAQ,KAAIP,IAAI,CAACR,MAAO,IADpF;UAEFyB,GAAG,EAAEJ,WAAW,CAACK;QAFf;MAFyE,CAAnE,CAAhB,CA9EA,CAqFA;;MACA,MAAMnC,WAAW,CAACM,oBAAZ,CAAiCqB,WAAjC,CAA6CzB,MAA7C,EAAqD;QACvD0B,UAAU,EAAE/B,mBAD2C;QAEvDgC,UAAU,EAAE/B,iCAAA,CAA2BkC;MAFgB,CAArD,CAAN;MAIAnC,mBAAmB,GAAGF,OAAO,CAACc,MAA9B;IACH,CA3FD,CA2FE,OAAOa,CAAP,EAAU;MACR7B,GAAG,CAAC,kCAAD,EAAqC6B,CAArC,CAAH;;MAEA,IAAI3B,OAAO,IAAIA,OAAO,CAACiB,EAAvB,EAA2B;QACvB;AACpB;AACA;AACA;QACoBjB,OAAO,GAAG,MAAMK,WAAW,CAACM,oBAAZ,CAAiCmB,aAAjC,CACZvB,MADY,EAEZP,OAAO,CAACiB,EAFI,EAGZ;UACIH,MAAM,EAAEX,iCAAA,CAA2BsC,MADvC;UAEIzB,KAAK,EAAE;YACH0B,IAAI,EAAEf,CAAC,CAACe,IADL;YAEHJ,OAAO,EAAEX,CAAC,CAACW,OAFR;YAGHK,KAAK,EAAEhB,CAAC,CAACgB,KAHN;YAIHC,IAAI,EAAE;UAJH;QAFX,CAHY,CAAhB,CALuB,CAmBvB;;QACA,MAAMvC,WAAW,CAACM,oBAAZ,CAAiCqB,WAAjC,CAA6CzB,MAA7C,EAAqD;UACvD0B,UAAU,EAAE/B,mBAD2C;UAEvDgC,UAAU,EAAE/B,iCAAA,CAA2BsC;QAFgB,CAArD,CAAN;QAIAvC,mBAAmB,GAAGF,OAAO,CAACc,MAA9B;MACH;;MAED,OAAO;QACHC,IAAI,EAAE,IADH;QAEHC,KAAK,EAAE;UACHsB,OAAO,EAAEX,CAAC,CAACW;QADR;MAFJ,CAAP;IAMH,CA/HD,SA+HU;MACN;MACA,IAAIrC,aAAJ,EAAmB;QACfH,GAAG,CAAE,gCAA+BS,MAAO,EAAxC,CAAH,CADe,CAEf;;QACA,MAAM,IAAAsC,2BAAA,EAAoC;UACtChD,OADsC;UAEtC6C,IAAI,EAAEhD,aAAa,CAACoD,QAAd,CAAuBC,OAFS;UAGtCnD,OAAO,EAAE;YACLW,MADK;YAELE,QAAQ,EAAEZ,OAAO,CAACmD,QAAR,CAAiBC,WAAjB;UAFL,CAH6B;UAOtCC,WAAW,EAAE;QAPyB,CAApC,CAAN;MASH,CAZD,MAYO;QACHnD,OAAO,CAACD,GAAR,CAAa,8BAA6BU,YAAY,GAAG,CAAE,GAA3D,EADG,CAEH;;QACA,MAAM,IAAAqC,2BAAA,EAA6B;UAC/BhD,OAD+B;UAE/B6C,IAAI,EAAEhD,aAAa,CAACoD,QAAd,CAAuBK,OAFE;UAG/BvD,OAAO,EAAE;YACLW,MADK;YAELC,YAAY,EAAEA,YAAY,GAAG,CAFxB;YAGLC,QAAQ,EAAEZ,OAAO,CAACmD,QAAR,CAAiBC,WAAjB;UAHL,CAHsB;UAQ/BC,WAAW,EAAE;QARkB,CAA7B,CAAN;MAUH;IACJ;;IACD,OAAO;MACHnC,IAAI,EAAE,EADH;MAEHC,KAAK,EAAE;IAFJ,CAAP;EAIH,CA9KE,CAAP;AAgLH,C"}
|
@@ -1,10 +1,12 @@
|
|
1
1
|
/// <reference types="node" />
|
2
|
+
/// <reference types="node" />
|
2
3
|
import { PassThrough, Readable } from "stream";
|
3
4
|
import S3 from "aws-sdk/clients/s3";
|
4
5
|
declare class S3Stream {
|
5
6
|
s3: S3;
|
6
7
|
bucket: string;
|
7
8
|
constructor();
|
9
|
+
getPresignedUrl(key: string): string;
|
8
10
|
/**
|
9
11
|
* We're checking if the file is accessible on S3 by getting object meta data.
|
10
12
|
* It help us to filter files that we need to download as part of export data.
|
package/exportPages/s3Stream.js
CHANGED
@@ -13,9 +13,9 @@ var _stream = require("stream");
|
|
13
13
|
|
14
14
|
var _s = _interopRequireDefault(require("aws-sdk/clients/s3"));
|
15
15
|
|
16
|
-
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object);
|
16
|
+
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }
|
17
17
|
|
18
|
-
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]
|
18
|
+
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { (0, _defineProperty2.default)(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }
|
19
19
|
|
20
20
|
const ARCHIVE_CONTENT_TYPE = "application/zip";
|
21
21
|
|
@@ -28,6 +28,15 @@ class S3Stream {
|
|
28
28
|
});
|
29
29
|
this.bucket = process.env.S3_BUCKET;
|
30
30
|
}
|
31
|
+
|
32
|
+
getPresignedUrl(key) {
|
33
|
+
return this.s3.getSignedUrl("getObject", {
|
34
|
+
Bucket: this.bucket,
|
35
|
+
Key: key,
|
36
|
+
Expires: 604800 // 1 week
|
37
|
+
|
38
|
+
});
|
39
|
+
}
|
31
40
|
/**
|
32
41
|
* We're checking if the file is accessible on S3 by getting object meta data.
|
33
42
|
* It help us to filter files that we need to download as part of export data.
|
@@ -63,7 +72,7 @@ class S3Stream {
|
|
63
72
|
writeStream(Key, contentType = ARCHIVE_CONTENT_TYPE) {
|
64
73
|
const streamPassThrough = new _stream.Stream.PassThrough();
|
65
74
|
const params = {
|
66
|
-
ACL: "
|
75
|
+
ACL: "private",
|
67
76
|
Body: streamPassThrough,
|
68
77
|
Bucket: this.bucket,
|
69
78
|
ContentType: contentType,
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"names":["ARCHIVE_CONTENT_TYPE","S3Stream","constructor","s3","S3","region","process","env","AWS_REGION","bucket","S3_BUCKET","getPresignedUrl","key","getSignedUrl","Bucket","Key","Expires","isFileAccessible","getObjectHead","error","console","warn","log","headObject","promise","readStream","getObject","createReadStream","writeStream","contentType","streamPassThrough","Stream","PassThrough","params","ACL","Body","ContentType","streamPassThroughUploadPromise","upload","listObject","prefix","listObjects","Prefix","deleteObject","s3Stream"],"sources":["s3Stream.ts"],"sourcesContent":["import { Stream, PassThrough, Readable } from \"stream\";\nimport S3 from \"aws-sdk/clients/s3\";\n\nconst ARCHIVE_CONTENT_TYPE = \"application/zip\";\n\nclass S3Stream {\n s3: S3;\n bucket: string;\n\n constructor() {\n this.s3 = new S3({\n region: process.env.AWS_REGION as string\n });\n this.bucket = process.env.S3_BUCKET as string;\n }\n\n getPresignedUrl(key: string) {\n return this.s3.getSignedUrl(\"getObject\", {\n Bucket: this.bucket,\n Key: key,\n Expires: 604800 // 1 week\n });\n }\n\n /**\n * We're checking if the file is accessible on S3 by getting object meta data.\n * It help us to filter files that we need to download as part of export data.\n * @param Key {string}\n */\n async isFileAccessible(Key: string): Promise<boolean> {\n try {\n await this.getObjectHead(Key);\n return true;\n } catch (error) {\n console.warn(`Error while fetching meta data for file \"${Key}\"`);\n console.log(error);\n return false;\n }\n }\n\n getObjectHead(Key: string): Promise<S3.HeadObjectOutput> {\n return this.s3.headObject({ Bucket: this.bucket, Key }).promise();\n }\n\n readStream(Key: string): Readable {\n return this.s3.getObject({ Bucket: this.bucket, Key }).createReadStream();\n }\n\n writeStream(\n Key: string,\n contentType: string = ARCHIVE_CONTENT_TYPE\n ): {\n streamPassThrough: PassThrough;\n streamPassThroughUploadPromise: Promise<S3.ManagedUpload.SendData>;\n } {\n const streamPassThrough = new Stream.PassThrough();\n\n const params: S3.PutObjectRequest = {\n ACL: \"private\",\n Body: streamPassThrough,\n Bucket: this.bucket,\n ContentType: contentType,\n Key\n };\n\n return {\n streamPassThrough: streamPassThrough,\n /**\n * We're not using the `FileManager` storage plugin here because it currently doesn't support streams.\n */\n streamPassThroughUploadPromise: this.s3.upload(params).promise()\n };\n }\n\n upload(params: {\n Key: string;\n ContentType: string;\n Body: Buffer;\n }): Promise<S3.ManagedUpload.SendData> {\n return this.s3\n .upload({\n ACL: \"private\",\n Bucket: this.bucket,\n ...params\n })\n .promise();\n }\n\n listObject(prefix: string): Promise<S3.ListObjectsOutput> {\n return this.s3\n .listObjects({\n Bucket: this.bucket,\n Prefix: prefix\n })\n .promise();\n }\n\n deleteObject(key: string): Promise<S3.DeleteObjectOutput> {\n return this.s3.deleteObject({ Key: key, Bucket: this.bucket }).promise();\n }\n}\n\nexport const s3Stream = new S3Stream();\n"],"mappings":";;;;;;;;;;;AAAA;;AACA;;;;;;AAEA,MAAMA,oBAAoB,GAAG,iBAA7B;;AAEA,MAAMC,QAAN,CAAe;EAIXC,WAAW,GAAG;IAAA;IAAA;IACV,KAAKC,EAAL,GAAU,IAAIC,UAAJ,CAAO;MACbC,MAAM,EAAEC,OAAO,CAACC,GAAR,CAAYC;IADP,CAAP,CAAV;IAGA,KAAKC,MAAL,GAAcH,OAAO,CAACC,GAAR,CAAYG,SAA1B;EACH;;EAEDC,eAAe,CAACC,GAAD,EAAc;IACzB,OAAO,KAAKT,EAAL,CAAQU,YAAR,CAAqB,WAArB,EAAkC;MACrCC,MAAM,EAAE,KAAKL,MADwB;MAErCM,GAAG,EAAEH,GAFgC;MAGrCI,OAAO,EAAE,MAH4B,CAGrB;;IAHqB,CAAlC,CAAP;EAKH;EAED;AACJ;AACA;AACA;AACA;;;EAC0B,MAAhBC,gBAAgB,CAACF,GAAD,EAAgC;IAClD,IAAI;MACA,MAAM,KAAKG,aAAL,CAAmBH,GAAnB,CAAN;MACA,OAAO,IAAP;IACH,CAHD,CAGE,OAAOI,KAAP,EAAc;MACZC,OAAO,CAACC,IAAR,CAAc,4CAA2CN,GAAI,GAA7D;MACAK,OAAO,CAACE,GAAR,CAAYH,KAAZ;MACA,OAAO,KAAP;IACH;EACJ;;EAEDD,aAAa,CAACH,GAAD,EAA4C;IACrD,OAAO,KAAKZ,EAAL,CAAQoB,UAAR,CAAmB;MAAET,MAAM,EAAE,KAAKL,MAAf;MAAuBM;IAAvB,CAAnB,EAAiDS,OAAjD,EAAP;EACH;;EAEDC,UAAU,CAACV,GAAD,EAAwB;IAC9B,OAAO,KAAKZ,EAAL,CAAQuB,SAAR,CAAkB;MAAEZ,MAAM,EAAE,KAAKL,MAAf;MAAuBM;IAAvB,CAAlB,EAAgDY,gBAAhD,EAAP;EACH;;EAEDC,WAAW,CACPb,GADO,EAEPc,WAAmB,GAAG7B,oBAFf,EAMT;IACE,MAAM8B,iBAAiB,GAAG,IAAIC,cAAA,CAAOC,WAAX,EAA1B;IAEA,MAAMC,MAA2B,GAAG;MAChCC,GAAG,EAAE,SAD2B;MAEhCC,IAAI,EAAEL,iBAF0B;MAGhChB,MAAM,EAAE,KAAKL,MAHmB;MAIhC2B,WAAW,EAAEP,WAJmB;MAKhCd;IALgC,CAApC;IAQA,OAAO;MACHe,iBAAiB,EAAEA,iBADhB;;MAEH;AACZ;AACA;MACYO,8BAA8B,EAAE,KAAKlC,EAAL,CAAQmC,MAAR,CAAeL,MAAf,EAAuBT,OAAvB;IAL7B,CAAP;EAOH;;EAEDc,MAAM,CAACL,MAAD,EAIiC;IACnC,OAAO,KAAK9B,EAAL,CACFmC,MADE;MAECJ,GAAG,EAAE,SAFN;MAGCpB,MAAM,EAAE,KAAKL;IAHd,GAIIwB,MAJJ,GAMFT,OANE,EAAP;EAOH;;EAEDe,UAAU,CAACC,MAAD,EAAgD;IACtD,OAAO,KAAKrC,EAAL,CACFsC,WADE,CACU;MACT3B,MAAM,EAAE,KAAKL,MADJ;MAETiC,MAAM,EAAEF;IAFC,CADV,EAKFhB,OALE,EAAP;EAMH;;EAEDmB,YAAY,CAAC/B,GAAD,EAA8C;IACtD,OAAO,KAAKT,EAAL,CAAQwC,YAAR,CAAqB;MAAE5B,GAAG,EAAEH,GAAP;MAAYE,MAAM,EAAE,KAAKL;IAAzB,CAArB,EAAwDe,OAAxD,EAAP;EACH;;AA9FU;;AAiGR,MAAMoB,QAAQ,GAAG,IAAI3C,QAAJ,EAAjB"}
|
package/exportPages/utils.d.ts
CHANGED
@@ -7,7 +7,7 @@ export interface ExportedPageData {
|
|
7
7
|
files: ImageFile[];
|
8
8
|
}
|
9
9
|
export declare function exportPage(page: Page, exportPagesDataKey: string, fileManager: FileManagerContext["fileManager"]): Promise<S3.ManagedUpload.SendData>;
|
10
|
-
export interface ImageFile extends File {
|
10
|
+
export interface ImageFile extends Omit<File, "src"> {
|
11
11
|
key: string;
|
12
12
|
}
|
13
13
|
export declare function extractFilesFromPageData(data: Record<string, any>, files?: any[]): ImageFile[];
|
package/exportPages/utils.js
CHANGED
@@ -38,7 +38,7 @@ async function getFilteredFiles(files) {
|
|
38
38
|
|
39
39
|
async function exportPage(page, exportPagesDataKey, fileManager) {
|
40
40
|
// Extract all files
|
41
|
-
const files = extractFilesFromPageData(page.content); // Filter files
|
41
|
+
const files = extractFilesFromPageData(page.content || {}); // Filter files
|
42
42
|
|
43
43
|
const filesAvailableForDownload = await getFilteredFiles(files); // Extract images from page settings
|
44
44
|
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"names":["EXPORT_PAGES_FOLDER_KEY","getFilteredFiles","files","uniqueFileKeys","Map","promises","map","file","s3Stream","isFileAccessible","key","isFileAvailableResults","Promise","all","filesAvailableForDownload","i","length","has","push","set","exportPage","page","exportPagesDataKey","fileManager","extractFilesFromPageData","content","pageSettingsImages","get","filter","image","src","pageSettingsImagesData","id","getFile","pageData","title","path","version","status","settings","pageDataBuffer","Buffer","from","JSON","stringify","zipper","Zipper","exportInfo","pageTitle","archiveFileKey","process","data","Array","isArray","element","tuple","Object","entries","value"],"sources":["utils.ts"],"sourcesContent":["import S3 from \"aws-sdk/clients/s3\";\nimport { Page, File } from \"@webiny/api-page-builder/types\";\nimport { FileManagerContext } from \"@webiny/api-file-manager/types\";\nimport get from \"lodash/get\";\nimport { s3Stream } from \"./s3Stream\";\nimport Zipper from \"./zipper\";\n\nexport const EXPORT_PAGES_FOLDER_KEY = \"WEBINY_PB_EXPORT_PAGES\";\n\nasync function getFilteredFiles(files: ImageFile[]) {\n const uniqueFileKeys = new Map<string, boolean>();\n const promises = files.map(file => s3Stream.isFileAccessible(file.key));\n const isFileAvailableResults = await Promise.all(promises);\n\n const filesAvailableForDownload = [];\n // Filter files\n for (let i = 0; i < files.length; i++) {\n const file = files[i];\n // Check file accessibility\n if (isFileAvailableResults[i] && !uniqueFileKeys.has(file.key)) {\n filesAvailableForDownload.push(file);\n uniqueFileKeys.set(file.key, true);\n }\n }\n return filesAvailableForDownload;\n}\n\nexport interface ExportedPageData {\n page: Pick<Page, \"content\" | \"title\" | \"version\" | \"status\" | \"settings\" | \"path\">;\n files: ImageFile[];\n}\n\nexport async function exportPage(\n page: Page,\n exportPagesDataKey: string,\n fileManager: FileManagerContext[\"fileManager\"]\n): Promise<S3.ManagedUpload.SendData> {\n // Extract all files\n const files = extractFilesFromPageData(page.content || {});\n // Filter files\n const filesAvailableForDownload = await getFilteredFiles(files);\n // Extract images from page settings\n const pageSettingsImages = [\n get(page, \"settings.general.image\"),\n get(page, \"settings.social.image\")\n ].filter(image => image && image.src);\n const pageSettingsImagesData = [];\n // Get file data for all images inside \"page.settings\"\n for (let i = 0; i < pageSettingsImages.length; i++) {\n const { id } = pageSettingsImages[i];\n const file = await fileManager.files.getFile(id);\n pageSettingsImagesData.push(file);\n }\n\n // Extract the page data in a json file and upload it to S3\n const pageData = {\n page: {\n content: page.content,\n title: page.title,\n path: page.path,\n version: page.version,\n status: page.status,\n settings: page.settings\n },\n files: [...filesAvailableForDownload, ...pageSettingsImagesData]\n };\n const pageDataBuffer = Buffer.from(JSON.stringify(pageData));\n\n const zipper = new Zipper({\n exportInfo: {\n files: [...filesAvailableForDownload, ...pageSettingsImagesData],\n pageTitle: page.title,\n pageDataBuffer\n },\n archiveFileKey: exportPagesDataKey\n });\n\n return zipper.process();\n}\n\nexport interface ImageFile extends Omit<File, \"src\"> {\n key: string;\n}\n\nexport function extractFilesFromPageData(\n data: Record<string, any>,\n files: any[] = []\n): ImageFile[] {\n // Base case: termination\n if (!data || typeof data !== \"object\") {\n return files;\n }\n // Recursively call function for each element\n if (Array.isArray(data)) {\n for (let i = 0; i < data.length; i++) {\n const element = data[i];\n extractFilesFromPageData(element, files);\n }\n return files;\n }\n\n // Main\n const tuple = Object.entries(data);\n for (let i = 0; i < tuple.length; i++) {\n const [key, value] = tuple[i];\n // TODO: @ashutosh extract it to plugins, so that, we can handle cases for other components too.\n if (key === \"file\" && value) {\n files.push(value);\n } else if (key === \"images\" && Array.isArray(value)) {\n // Handle case for \"images-list\" component\n files.push(...value);\n } else {\n extractFilesFromPageData(value, files);\n }\n }\n return files;\n}\n"],"mappings":";;;;;;;;;;;AAGA;;AACA;;AACA;;AAEO,MAAMA,uBAAuB,GAAG,wBAAhC;;;AAEP,eAAeC,gBAAf,CAAgCC,KAAhC,EAAoD;EAChD,MAAMC,cAAc,GAAG,IAAIC,GAAJ,EAAvB;EACA,MAAMC,QAAQ,GAAGH,KAAK,CAACI,GAAN,CAAUC,IAAI,IAAIC,kBAAA,CAASC,gBAAT,CAA0BF,IAAI,CAACG,GAA/B,CAAlB,CAAjB;EACA,MAAMC,sBAAsB,GAAG,MAAMC,OAAO,CAACC,GAAR,CAAYR,QAAZ,CAArC;EAEA,MAAMS,yBAAyB,GAAG,EAAlC,CALgD,CAMhD;;EACA,KAAK,IAAIC,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGb,KAAK,CAACc,MAA1B,EAAkCD,CAAC,EAAnC,EAAuC;IACnC,MAAMR,IAAI,GAAGL,KAAK,CAACa,CAAD,CAAlB,CADmC,CAEnC;;IACA,IAAIJ,sBAAsB,CAACI,CAAD,CAAtB,IAA6B,CAACZ,cAAc,CAACc,GAAf,CAAmBV,IAAI,CAACG,GAAxB,CAAlC,EAAgE;MAC5DI,yBAAyB,CAACI,IAA1B,CAA+BX,IAA/B;MACAJ,cAAc,CAACgB,GAAf,CAAmBZ,IAAI,CAACG,GAAxB,EAA6B,IAA7B;IACH;EACJ;;EACD,OAAOI,yBAAP;AACH;;AAOM,eAAeM,UAAf,CACHC,IADG,EAEHC,kBAFG,EAGHC,WAHG,EAI+B;EAClC;EACA,MAAMrB,KAAK,GAAGsB,wBAAwB,CAACH,IAAI,CAACI,OAAL,IAAgB,EAAjB,CAAtC,CAFkC,CAGlC;;EACA,MAAMX,yBAAyB,GAAG,MAAMb,gBAAgB,CAACC,KAAD,CAAxD,CAJkC,CAKlC;;EACA,MAAMwB,kBAAkB,GAAG,CACvB,IAAAC,YAAA,EAAIN,IAAJ,EAAU,wBAAV,CADuB,EAEvB,IAAAM,YAAA,EAAIN,IAAJ,EAAU,uBAAV,CAFuB,EAGzBO,MAHyB,CAGlBC,KAAK,IAAIA,KAAK,IAAIA,KAAK,CAACC,GAHN,CAA3B;EAIA,MAAMC,sBAAsB,GAAG,EAA/B,CAVkC,CAWlC;;EACA,KAAK,IAAIhB,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGW,kBAAkB,CAACV,MAAvC,EAA+CD,CAAC,EAAhD,EAAoD;IAChD,MAAM;MAAEiB;IAAF,IAASN,kBAAkB,CAACX,CAAD,CAAjC;IACA,MAAMR,IAAI,GAAG,MAAMgB,WAAW,CAACrB,KAAZ,CAAkB+B,OAAlB,CAA0BD,EAA1B,CAAnB;IACAD,sBAAsB,CAACb,IAAvB,CAA4BX,IAA5B;EACH,CAhBiC,CAkBlC;;;EACA,MAAM2B,QAAQ,GAAG;IACbb,IAAI,EAAE;MACFI,OAAO,EAAEJ,IAAI,CAACI,OADZ;MAEFU,KAAK,EAAEd,IAAI,CAACc,KAFV;MAGFC,IAAI,EAAEf,IAAI,CAACe,IAHT;MAIFC,OAAO,EAAEhB,IAAI,CAACgB,OAJZ;MAKFC,MAAM,EAAEjB,IAAI,CAACiB,MALX;MAMFC,QAAQ,EAAElB,IAAI,CAACkB;IANb,CADO;IASbrC,KAAK,EAAE,CAAC,GAAGY,yBAAJ,EAA+B,GAAGiB,sBAAlC;EATM,CAAjB;EAWA,MAAMS,cAAc,GAAGC,MAAM,CAACC,IAAP,CAAYC,IAAI,CAACC,SAAL,CAAeV,QAAf,CAAZ,CAAvB;EAEA,MAAMW,MAAM,GAAG,IAAIC,eAAJ,CAAW;IACtBC,UAAU,EAAE;MACR7C,KAAK,EAAE,CAAC,GAAGY,yBAAJ,EAA+B,GAAGiB,sBAAlC,CADC;MAERiB,SAAS,EAAE3B,IAAI,CAACc,KAFR;MAGRK;IAHQ,CADU;IAMtBS,cAAc,EAAE3B;EANM,CAAX,CAAf;EASA,OAAOuB,MAAM,CAACK,OAAP,EAAP;AACH;;AAMM,SAAS1B,wBAAT,CACH2B,IADG,EAEHjD,KAAY,GAAG,EAFZ,EAGQ;EACX;EACA,IAAI,CAACiD,IAAD,IAAS,OAAOA,IAAP,KAAgB,QAA7B,EAAuC;IACnC,OAAOjD,KAAP;EACH,CAJU,CAKX;;;EACA,IAAIkD,KAAK,CAACC,OAAN,CAAcF,IAAd,CAAJ,EAAyB;IACrB,KAAK,IAAIpC,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGoC,IAAI,CAACnC,MAAzB,EAAiCD,CAAC,EAAlC,EAAsC;MAClC,MAAMuC,OAAO,GAAGH,IAAI,CAACpC,CAAD,CAApB;MACAS,wBAAwB,CAAC8B,OAAD,EAAUpD,KAAV,CAAxB;IACH;;IACD,OAAOA,KAAP;EACH,CAZU,CAcX;;;EACA,MAAMqD,KAAK,GAAGC,MAAM,CAACC,OAAP,CAAeN,IAAf,CAAd;;EACA,KAAK,IAAIpC,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGwC,KAAK,CAACvC,MAA1B,EAAkCD,CAAC,EAAnC,EAAuC;IACnC,MAAM,CAACL,GAAD,EAAMgD,KAAN,IAAeH,KAAK,CAACxC,CAAD,CAA1B,CADmC,CAEnC;;IACA,IAAIL,GAAG,KAAK,MAAR,IAAkBgD,KAAtB,EAA6B;MACzBxD,KAAK,CAACgB,IAAN,CAAWwC,KAAX;IACH,CAFD,MAEO,IAAIhD,GAAG,KAAK,QAAR,IAAoB0C,KAAK,CAACC,OAAN,CAAcK,KAAd,CAAxB,EAA8C;MACjD;MACAxD,KAAK,CAACgB,IAAN,CAAW,GAAGwC,KAAd;IACH,CAHM,MAGA;MACHlC,wBAAwB,CAACkC,KAAD,EAAQxD,KAAR,CAAxB;IACH;EACJ;;EACD,OAAOA,KAAP;AACH"}
|
package/exportPages/zipper.d.ts
CHANGED
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"names":["Zipper","constructor","config","archiveFileName","uniqueId","archiveFileKey","kebabCase","exportInfo","pageTitle","s3DownloadStreams","prefix","files","map","key","stream","s3Stream","readStream","filename","filesDirName","path","basename","Readable","from","pageDataBuffer","process","streamPassThrough","streamPassThroughUploadPromise","writeStream","s3FilesStreams","archive","vending","create","archiveFormat","on","error","Error","name","code","message","stack","forEach","streamDetails","append","pipe","finalize","PAGE_EXPORT_BASENAME","ZipOfZip","keys","getFileStreams","fileStreamDetails"],"sources":["zipper.ts"],"sourcesContent":["// TODO: Move \"archive\" in layer\nimport vending, { ArchiverError } from \"archiver\";\nimport S3 from \"aws-sdk/clients/s3\";\nimport { Readable } from \"stream\";\nimport * as path from \"path\";\nimport kebabCase from \"lodash/kebabCase\";\nimport uniqueId from \"uniqid\";\nimport { s3Stream } from \"./s3Stream\";\nimport { ImageFile } from \"./utils\";\n\ninterface FileStreamDetails {\n stream: Readable;\n filename: string;\n}\n\ninterface ExportInfo {\n files: ImageFile[];\n pageTitle: string;\n pageDataBuffer: Buffer;\n}\n\nexport interface ZipperConfig {\n exportInfo: ExportInfo;\n archiveFileKey: string;\n}\n\nexport default class Zipper {\n private readonly archiveFormat = \"zip\";\n private readonly filesDirName = \"assets\";\n private readonly archiveFileName: string;\n config: ZipperConfig;\n\n constructor(config: ZipperConfig) {\n this.config = config;\n this.archiveFileName = uniqueId(\n `${this.config.archiveFileKey}/`,\n `-${kebabCase(this.config.exportInfo.pageTitle)}.zip`\n );\n }\n\n s3DownloadStreams(): FileStreamDetails[] {\n const exportInfo = this.config.exportInfo;\n const prefix = uniqueId(\"\", `-${kebabCase(exportInfo.pageTitle)}`);\n const files = exportInfo.files.map(({ key }) => {\n return {\n stream: s3Stream.readStream(key),\n filename: `${prefix}\\\\${this.filesDirName}\\\\${path.basename(key)}`\n };\n });\n\n return [\n ...files,\n {\n stream: Readable.from(exportInfo.pageDataBuffer),\n filename: `${prefix}\\\\${exportInfo.pageTitle}.json`\n }\n ];\n }\n\n process(): Promise<S3.ManagedUpload.SendData> {\n const { streamPassThrough, streamPassThroughUploadPromise } = s3Stream.writeStream(\n this.archiveFileName\n );\n\n // 1. Read all files from S3 using stream.\n const s3FilesStreams = this.s3DownloadStreams();\n\n // 2. Prepare zip from the file stream.\n const archive = vending.create(this.archiveFormat);\n // Handle archive events.\n archive.on(\"error\", (error: ArchiverError) => {\n throw new Error(\n `${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`\n );\n });\n\n // Append all file streams to archive.\n s3FilesStreams.forEach((streamDetails: FileStreamDetails) =>\n archive.append(streamDetails.stream, { name: streamDetails.filename })\n );\n\n // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.\n archive.pipe(streamPassThrough);\n // Finalize the archive (ie we are done appending files but streams have to finish yet)\n // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand\n archive.finalize();\n\n // 3. Return upload stream promise.\n return streamPassThroughUploadPromise;\n }\n}\n\nconst PAGE_EXPORT_BASENAME = `WEBINY_PAGE_EXPORT.zip`;\n\nexport class ZipOfZip {\n private readonly archiveFormat = \"zip\";\n private readonly archiveFileName: string;\n keys: string[];\n\n constructor(keys: string[]) {\n this.keys = keys;\n this.archiveFileName = uniqueId(\"\", `-${PAGE_EXPORT_BASENAME}`);\n }\n\n getFileStreams(): FileStreamDetails[] {\n return this.keys.map(key => {\n return {\n stream: s3Stream.readStream(key),\n filename: `${path.basename(key)}`\n };\n });\n }\n\n process(): Promise<S3.ManagedUpload.SendData> {\n const { streamPassThrough, streamPassThroughUploadPromise } = s3Stream.writeStream(\n this.archiveFileName\n );\n\n // 1. Read all files from S3 using stream.\n const fileStreamDetails = this.getFileStreams();\n\n // 2. Prepare zip from the file stream.\n const archive = vending.create(this.archiveFormat);\n // Handle archive events.\n archive.on(\"error\", (error: ArchiverError) => {\n throw new Error(\n `${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`\n );\n });\n\n // Append all file streams to archive.\n fileStreamDetails.forEach((streamDetails: FileStreamDetails) =>\n archive.append(streamDetails.stream, { name: streamDetails.filename })\n );\n\n // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.\n archive.pipe(streamPassThrough);\n // Finalize the archive (ie we are done appending files but streams have to finish yet)\n // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand\n archive.finalize();\n\n // 3. Return upload stream promise.\n return streamPassThroughUploadPromise;\n }\n}\n"],"mappings":";;;;;;;;;;;AACA;;AAEA;;AACA;;AACA;;AACA;;AACA;;;;;;AAPA;AA0Be,MAAMA,MAAN,CAAa;EAMxBC,WAAW,CAACC,MAAD,EAAuB;IAAA,qDALD,KAKC;IAAA,oDAJF,QAIE;IAAA;IAAA;IAC9B,KAAKA,MAAL,GAAcA,MAAd;IACA,KAAKC,eAAL,GAAuB,IAAAC,eAAA,EAClB,GAAE,KAAKF,MAAL,CAAYG,cAAe,GADX,EAElB,IAAG,IAAAC,kBAAA,EAAU,KAAKJ,MAAL,CAAYK,UAAZ,CAAuBC,SAAjC,CAA4C,MAF7B,CAAvB;EAIH;;EAEDC,iBAAiB,GAAwB;IACrC,MAAMF,UAAU,GAAG,KAAKL,MAAL,CAAYK,UAA/B;IACA,MAAMG,MAAM,GAAG,IAAAN,eAAA,EAAS,EAAT,EAAc,IAAG,IAAAE,kBAAA,EAAUC,UAAU,CAACC,SAArB,CAAgC,EAAjD,CAAf;IACA,MAAMG,KAAK,GAAGJ,UAAU,CAACI,KAAX,CAAiBC,GAAjB,CAAqB,CAAC;MAAEC;IAAF,CAAD,KAAa;MAC5C,OAAO;QACHC,MAAM,EAAEC,kBAAA,CAASC,UAAT,CAAoBH,GAApB,CADL;QAEHI,QAAQ,EAAG,GAAEP,MAAO,KAAI,KAAKQ,YAAa,KAAIC,IAAI,CAACC,QAAL,CAAcP,GAAd,CAAmB;MAF9D,CAAP;IAIH,CALa,CAAd;IAOA,OAAO,CACH,GAAGF,KADA,EAEH;MACIG,MAAM,EAAEO,gBAAA,CAASC,IAAT,CAAcf,UAAU,CAACgB,cAAzB,CADZ;MAEIN,QAAQ,EAAG,GAAEP,MAAO,KAAIH,UAAU,CAACC,SAAU;IAFjD,CAFG,CAAP;EAOH;;EAEDgB,OAAO,GAAuC;IAC1C,MAAM;MAAEC,iBAAF;MAAqBC;IAArB,IAAwDX,kBAAA,CAASY,WAAT,CAC1D,KAAKxB,eADqD,CAA9D,CAD0C,CAK1C;;;IACA,MAAMyB,cAAc,GAAG,KAAKnB,iBAAL,EAAvB,CAN0C,CAQ1C;;IACA,MAAMoB,OAAO,GAAGC,iBAAA,CAAQC,MAAR,CAAe,KAAKC,aAApB,CAAhB,CAT0C,CAU1C;;;IACAH,OAAO,CAACI,EAAR,CAAW,OAAX,EAAqBC,KAAD,IAA0B;MAC1C,MAAM,IAAIC,KAAJ,CACD,GAAED,KAAK,CAACE,IAAK,IAAGF,KAAK,CAACG,IAAK,IAAGH,KAAK,CAACI,OAAQ,IAAGJ,KAAK,CAACf,IAAK,IAAGe,KAAK,CAACK,KAAM,EADxE,CAAN;IAGH,CAJD,EAX0C,CAiB1C;;IACAX,cAAc,CAACY,OAAf,CAAwBC,aAAD,IACnBZ,OAAO,CAACa,MAAR,CAAeD,aAAa,CAAC3B,MAA7B,EAAqC;MAAEsB,IAAI,EAAEK,aAAa,CAACxB;IAAtB,CAArC,CADJ,EAlB0C,CAsB1C;;IACAY,OAAO,CAACc,IAAR,CAAalB,iBAAb,EAvB0C,CAwB1C;IACA;;IACAI,OAAO,CAACe,QAAR,GA1B0C,CA4B1C;;IACA,OAAOlB,8BAAP;EACH;;AA/DuB;;;AAkE5B,MAAMmB,oBAAoB,GAAI,wBAA9B;;AAEO,MAAMC,QAAN,CAAe;EAKlB7C,WAAW,CAAC8C,IAAD,EAAiB;IAAA,qDAJK,KAIL;IAAA;IAAA;IACxB,KAAKA,IAAL,GAAYA,IAAZ;IACA,KAAK5C,eAAL,GAAuB,IAAAC,eAAA,EAAS,EAAT,EAAc,IAAGyC,oBAAqB,EAAtC,CAAvB;EACH;;EAEDG,cAAc,GAAwB;IAClC,OAAO,KAAKD,IAAL,CAAUnC,GAAV,CAAcC,GAAG,IAAI;MACxB,OAAO;QACHC,MAAM,EAAEC,kBAAA,CAASC,UAAT,CAAoBH,GAApB,CADL;QAEHI,QAAQ,EAAG,GAAEE,IAAI,CAACC,QAAL,CAAcP,GAAd,CAAmB;MAF7B,CAAP;IAIH,CALM,CAAP;EAMH;;EAEDW,OAAO,GAAuC;IAC1C,MAAM;MAAEC,iBAAF;MAAqBC;IAArB,IAAwDX,kBAAA,CAASY,WAAT,CAC1D,KAAKxB,eADqD,CAA9D,CAD0C,CAK1C;;;IACA,MAAM8C,iBAAiB,GAAG,KAAKD,cAAL,EAA1B,CAN0C,CAQ1C;;IACA,MAAMnB,OAAO,GAAGC,iBAAA,CAAQC,MAAR,CAAe,KAAKC,aAApB,CAAhB,CAT0C,CAU1C;;;IACAH,OAAO,CAACI,EAAR,CAAW,OAAX,EAAqBC,KAAD,IAA0B;MAC1C,MAAM,IAAIC,KAAJ,CACD,GAAED,KAAK,CAACE,IAAK,IAAGF,KAAK,CAACG,IAAK,IAAGH,KAAK,CAACI,OAAQ,IAAGJ,KAAK,CAACf,IAAK,IAAGe,KAAK,CAACK,KAAM,EADxE,CAAN;IAGH,CAJD,EAX0C,CAiB1C;;IACAU,iBAAiB,CAACT,OAAlB,CAA2BC,aAAD,IACtBZ,OAAO,CAACa,MAAR,CAAeD,aAAa,CAAC3B,MAA7B,EAAqC;MAAEsB,IAAI,EAAEK,aAAa,CAACxB;IAAtB,CAArC,CADJ,EAlB0C,CAsB1C;;IACAY,OAAO,CAACc,IAAR,CAAalB,iBAAb,EAvB0C,CAwB1C;IACA;;IACAI,OAAO,CAACe,QAAR,GA1B0C,CA4B1C;;IACA,OAAOlB,8BAAP;EACH;;AAjDiB"}
|
@@ -1,4 +1,4 @@
|
|
1
|
-
import { ContextPlugin } from "@webiny/
|
1
|
+
import { ContextPlugin } from "@webiny/api";
|
2
2
|
import { PageImportExportPluginsParams } from "../../types";
|
3
3
|
import { PbPageImportExportContext } from "../types";
|
4
4
|
declare const _default: ({ storageOperations }: PageImportExportPluginsParams) => ContextPlugin<PbPageImportExportContext>;
|