@webiny/api-page-builder-import-export 5.39.0-beta.0 → 5.39.0-beta.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/client.js +9 -20
- package/client.js.map +1 -1
- package/export/combine/blocksHandler.js +1 -1
- package/export/combine/blocksHandler.js.map +1 -1
- package/export/combine/formsHandler.js +1 -1
- package/export/combine/formsHandler.js.map +1 -1
- package/export/combine/pagesHandler.js +1 -1
- package/export/combine/pagesHandler.js.map +1 -1
- package/export/combine/templatesHandler.js +1 -1
- package/export/combine/templatesHandler.js.map +1 -1
- package/export/process/exporters/BlockExporter.d.ts +1 -1
- package/export/process/exporters/BlockExporter.js +0 -2
- package/export/process/exporters/BlockExporter.js.map +1 -1
- package/export/process/exporters/FormExporter.d.ts +1 -1
- package/export/process/exporters/PageExporter.d.ts +1 -1
- package/export/process/exporters/PageExporter.js +0 -2
- package/export/process/exporters/PageExporter.js.map +1 -1
- package/export/process/exporters/PageTemplateExporter.d.ts +1 -1
- package/export/process/exporters/PageTemplateExporter.js +0 -2
- package/export/process/exporters/PageTemplateExporter.js.map +1 -1
- package/export/s3Stream.d.ts +8 -8
- package/export/s3Stream.js +24 -21
- package/export/s3Stream.js.map +1 -1
- package/export/utils.d.ts +4 -4
- package/export/utils.js.map +1 -1
- package/export/zipper.d.ts +5 -5
- package/export/zipper.js +15 -21
- package/export/zipper.js.map +1 -1
- package/graphql/crud/importExportTasks.crud.js +40 -25
- package/graphql/crud/importExportTasks.crud.js.map +1 -1
- package/graphql/graphql/utils/resolve.d.ts +1 -1
- package/import/process/blocks/importBlock.js +7 -5
- package/import/process/blocks/importBlock.js.map +1 -1
- package/import/process/forms/importForm.js +3 -1
- package/import/process/forms/importForm.js.map +1 -1
- package/import/process/pages/importPage.js +5 -5
- package/import/process/pages/importPage.js.map +1 -1
- package/import/process/templates/importTemplate.js +3 -1
- package/import/process/templates/importTemplate.js.map +1 -1
- package/import/utils/extractAndUploadZipFileContents.js +0 -1
- package/import/utils/extractAndUploadZipFileContents.js.map +1 -1
- package/import/utils/extractZipAndUploadToS3.js.map +1 -1
- package/import/utils/uploadAssets.js +6 -6
- package/import/utils/uploadAssets.js.map +1 -1
- package/import/utils/uploadFilesFromS3.d.ts +1 -1
- package/import/utils/uploadFilesFromS3.js +1 -1
- package/import/utils/uploadFilesFromS3.js.map +1 -1
- package/package.json +22 -28
package/client.js
CHANGED
@@ -1,42 +1,31 @@
|
|
1
1
|
"use strict";
|
2
2
|
|
3
|
-
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
4
3
|
Object.defineProperty(exports, "__esModule", {
|
5
4
|
value: true
|
6
5
|
});
|
7
6
|
exports.invokeHandlerClient = invokeHandlerClient;
|
8
|
-
var _objectSpread2 = _interopRequireDefault(require("@babel/runtime/helpers/objectSpread2"));
|
9
7
|
async function invokeHandlerClient({
|
10
8
|
context,
|
11
9
|
name,
|
12
10
|
payload,
|
13
11
|
description
|
14
12
|
}) {
|
15
|
-
/*
|
16
|
-
* Prepare "invocationArgs", we're hacking our wat here.
|
17
|
-
* They are necessary to setup the "context.pageBuilder" object among other things in IMPORT_PAGE_FUNCTION
|
18
|
-
*/
|
19
13
|
const {
|
20
14
|
request
|
21
15
|
} = context;
|
22
16
|
const tenantId = context.tenancy.getCurrentTenant().id;
|
23
|
-
|
24
|
-
["x-tenant"]: request.headers["x-tenant"] || tenantId
|
25
|
-
});
|
26
|
-
delete headers["content-length"];
|
27
|
-
const invocationArgs = {
|
28
|
-
httpMethod: request.method,
|
29
|
-
body: request.body,
|
30
|
-
headers,
|
31
|
-
/**
|
32
|
-
* Required until type augmentation works correctly.
|
33
|
-
*/
|
34
|
-
cookies: request.cookies
|
35
|
-
};
|
17
|
+
|
36
18
|
// Invoke handler
|
37
19
|
await context.handlerClient.invoke({
|
38
20
|
name: name,
|
39
|
-
payload:
|
21
|
+
payload: {
|
22
|
+
...payload,
|
23
|
+
httpMethod: request.method,
|
24
|
+
headers: {
|
25
|
+
["x-i18n-locale"]: request.headers["x-i18n-locale"],
|
26
|
+
["x-tenant"]: request.headers["x-tenant"] || tenantId
|
27
|
+
}
|
28
|
+
},
|
40
29
|
await: false,
|
41
30
|
description
|
42
31
|
});
|
package/client.js.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"names":["invokeHandlerClient","context","name","payload","description","request","tenantId","tenancy","getCurrentTenant","id","
|
1
|
+
{"version":3,"names":["invokeHandlerClient","context","name","payload","description","request","tenantId","tenancy","getCurrentTenant","id","handlerClient","invoke","httpMethod","method","headers","await"],"sources":["client.ts"],"sourcesContent":["import { PbImportExportContext } from \"~/graphql/types\";\n\nexport interface InvokeHandlerClientParams<TParams> {\n context: PbImportExportContext;\n name: string;\n payload: TParams;\n description: string;\n}\n\nexport async function invokeHandlerClient<TParams>({\n context,\n name,\n payload,\n description\n}: InvokeHandlerClientParams<TParams>) {\n const { request } = context;\n const tenantId = context.tenancy.getCurrentTenant().id;\n\n // Invoke handler\n await context.handlerClient.invoke<TParams & any>({\n name: name,\n payload: {\n ...payload,\n httpMethod: request.method,\n headers: {\n [\"x-i18n-locale\"]: request.headers[\"x-i18n-locale\"],\n [\"x-tenant\"]: request.headers[\"x-tenant\"] || tenantId\n }\n },\n await: false,\n description\n });\n}\n"],"mappings":";;;;;;AASO,eAAeA,mBAAmBA,CAAU;EAC/CC,OAAO;EACPC,IAAI;EACJC,OAAO;EACPC;AACgC,CAAC,EAAE;EACnC,MAAM;IAAEC;EAAQ,CAAC,GAAGJ,OAAO;EAC3B,MAAMK,QAAQ,GAAGL,OAAO,CAACM,OAAO,CAACC,gBAAgB,CAAC,CAAC,CAACC,EAAE;;EAEtD;EACA,MAAMR,OAAO,CAACS,aAAa,CAACC,MAAM,CAAgB;IAC9CT,IAAI,EAAEA,IAAI;IACVC,OAAO,EAAE;MACL,GAAGA,OAAO;MACVS,UAAU,EAAEP,OAAO,CAACQ,MAAM;MAC1BC,OAAO,EAAE;QACL,CAAC,eAAe,GAAGT,OAAO,CAACS,OAAO,CAAC,eAAe,CAAC;QACnD,CAAC,UAAU,GAAGT,OAAO,CAACS,OAAO,CAAC,UAAU,CAAC,IAAIR;MACjD;IACJ,CAAC;IACDS,KAAK,EAAE,KAAK;IACZX;EACJ,CAAC,CAAC;AACN"}
|
@@ -61,7 +61,7 @@ const blocksHandler = async (payload, context) => {
|
|
61
61
|
data: {
|
62
62
|
message: `Finish uploading block export.`,
|
63
63
|
key: blockExportUpload.Key,
|
64
|
-
url: _s3Stream.s3Stream.getPresignedUrl(blockExportUpload.Key)
|
64
|
+
url: await _s3Stream.s3Stream.getPresignedUrl(blockExportUpload.Key)
|
65
65
|
}
|
66
66
|
});
|
67
67
|
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"names":["_types","require","_s3Stream","_zipper","_mockSecurity","blocksHandler","payload","context","log","console","pageBuilder","taskId","identity","mockSecurity","task","importExportTask","getTask","data","error","message","exportBlocksDataKey","input","listObjectResponse","s3Stream","listObject","Contents","zipFileKeys","filter","file","Key","map","Boolean","zipOfZip","ZipOfZip","blockExportUpload","process","Location","updateTask","status","ImportExportTaskStatus","COMPLETED","key","url","getPresignedUrl","deleteFilePromises","deleteObject","Promise","all","length","e","FAILED","name","code","exports"],"sources":["blocksHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { s3Stream } from \"../s3Stream\";\nimport { ZipOfZip } from \"../zipper\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { Payload, Response } from \"~/export/combine\";\n\n/**\n * Handles the export blocks combine workflow.\n */\nexport const blocksHandler = async (\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n\n log(\"RUNNING Export Blocks Combine Handler\");\n const { pageBuilder } = context;\n const { taskId, identity } = payload;\n\n mockSecurity(identity, context);\n\n try {\n const task = await pageBuilder.importExportTask.getTask(taskId);\n if (!task) {\n return {\n data: null,\n error: {\n message: `There is no task with ID \"${taskId}\".`\n }\n };\n }\n\n const { exportBlocksDataKey } = task.input;\n\n // Get all files (zip) from given key\n const listObjectResponse = await s3Stream.listObject(exportBlocksDataKey);\n if (!listObjectResponse.Contents) {\n return {\n data: null,\n error: {\n message: \"There is no Contents defined on S3 Stream while combining blocks.\"\n }\n };\n }\n\n const zipFileKeys = listObjectResponse.Contents.filter(\n file => file.Key !== exportBlocksDataKey\n )\n .map(file => file.Key)\n .filter(Boolean) as string[];\n\n // Prepare zip of all zips\n const zipOfZip = new ZipOfZip(zipFileKeys, \"WEBINY_BLOCK_EXPORT.zip\");\n\n // Upload\n const blockExportUpload = await zipOfZip.process();\n log(`Done uploading... File is located at ${blockExportUpload.Location} `);\n\n // Update task status and save export blocks data key\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading block export.`,\n key: blockExportUpload.Key,\n url: s3Stream.getPresignedUrl(blockExportUpload.Key)\n }\n });\n\n // Remove individual zip files from storage\n const deleteFilePromises = zipFileKeys.map(key => s3Stream.deleteObject(key));\n await Promise.all(deleteFilePromises);\n log(`Successfully deleted ${deleteFilePromises.length} zip files.`);\n } catch (e) {\n log(\"[EXPORT_BLOCKS_COMBINE] Error => \", e.message);\n\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"EXPORT_FAILED\"\n }\n });\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,SAAA,GAAAD,OAAA;AACA,IAAAE,OAAA,GAAAF,OAAA;AACA,IAAAG,aAAA,GAAAH,OAAA;AAGA;AACA;AACA;AACO,MAAMI,aAAa,GAAG,MAAAA,CACzBC,OAAgB,EAChBC,OAA8B,KACV;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EAEvBA,GAAG,CAAC,uCAAuC,CAAC;EAC5C,MAAM;IAAEE;EAAY,CAAC,GAAGH,OAAO;EAC/B,MAAM;IAAEI,MAAM;IAAEC;EAAS,CAAC,GAAGN,OAAO;EAEpC,IAAAO,0BAAY,EAACD,QAAQ,EAAEL,OAAO,CAAC;EAE/B,IAAI;IACA,MAAMO,IAAI,GAAG,MAAMJ,WAAW,CAACK,gBAAgB,CAACC,OAAO,CAACL,MAAM,CAAC;IAC/D,IAAI,CAACG,IAAI,EAAE;MACP,OAAO;QACHG,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAG,6BAA4BR,MAAO;QACjD;MACJ,CAAC;IACL;IAEA,MAAM;MAAES;IAAoB,CAAC,GAAGN,IAAI,CAACO,KAAK;;IAE1C;IACA,MAAMC,kBAAkB,GAAG,MAAMC,kBAAQ,CAACC,UAAU,CAACJ,mBAAmB,CAAC;IACzE,IAAI,CAACE,kBAAkB,CAACG,QAAQ,EAAE;MAC9B,OAAO;QACHR,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAE;QACb;MACJ,CAAC;IACL;IAEA,MAAMO,WAAW,GAAGJ,kBAAkB,CAACG,QAAQ,CAACE,MAAM,CAClDC,IAAI,IAAIA,IAAI,CAACC,GAAG,KAAKT,mBACzB,CAAC,CACIU,GAAG,CAACF,IAAI,IAAIA,IAAI,CAACC,GAAG,CAAC,CACrBF,MAAM,CAACI,OAAO,CAAa;;IAEhC;IACA,MAAMC,QAAQ,GAAG,IAAIC,gBAAQ,CAACP,WAAW,EAAE,yBAAyB,CAAC;;IAErE;IACA,MAAMQ,iBAAiB,GAAG,MAAMF,QAAQ,CAACG,OAAO,CAAC,CAAC;IAClD3B,GAAG,CAAE,wCAAuC0B,iBAAiB,CAACE,QAAS,GAAE,CAAC;;IAE1E;IACA,MAAM1B,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACC,SAAS;MACxCvB,IAAI,EAAE;QACFE,OAAO,EAAG,gCAA+B;QACzCsB,GAAG,EAAEP,iBAAiB,CAACL,GAAG;QAC1Ba,GAAG,
|
1
|
+
{"version":3,"names":["_types","require","_s3Stream","_zipper","_mockSecurity","blocksHandler","payload","context","log","console","pageBuilder","taskId","identity","mockSecurity","task","importExportTask","getTask","data","error","message","exportBlocksDataKey","input","listObjectResponse","s3Stream","listObject","Contents","zipFileKeys","filter","file","Key","map","Boolean","zipOfZip","ZipOfZip","blockExportUpload","process","Location","updateTask","status","ImportExportTaskStatus","COMPLETED","key","url","getPresignedUrl","deleteFilePromises","deleteObject","Promise","all","length","e","FAILED","name","code","exports"],"sources":["blocksHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { s3Stream } from \"../s3Stream\";\nimport { ZipOfZip } from \"../zipper\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { Payload, Response } from \"~/export/combine\";\n\n/**\n * Handles the export blocks combine workflow.\n */\nexport const blocksHandler = async (\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n\n log(\"RUNNING Export Blocks Combine Handler\");\n const { pageBuilder } = context;\n const { taskId, identity } = payload;\n\n mockSecurity(identity, context);\n\n try {\n const task = await pageBuilder.importExportTask.getTask(taskId);\n if (!task) {\n return {\n data: null,\n error: {\n message: `There is no task with ID \"${taskId}\".`\n }\n };\n }\n\n const { exportBlocksDataKey } = task.input;\n\n // Get all files (zip) from given key\n const listObjectResponse = await s3Stream.listObject(exportBlocksDataKey);\n if (!listObjectResponse.Contents) {\n return {\n data: null,\n error: {\n message: \"There is no Contents defined on S3 Stream while combining blocks.\"\n }\n };\n }\n\n const zipFileKeys = listObjectResponse.Contents.filter(\n file => file.Key !== exportBlocksDataKey\n )\n .map(file => file.Key)\n .filter(Boolean) as string[];\n\n // Prepare zip of all zips\n const zipOfZip = new ZipOfZip(zipFileKeys, \"WEBINY_BLOCK_EXPORT.zip\");\n\n // Upload\n const blockExportUpload = await zipOfZip.process();\n log(`Done uploading... File is located at ${blockExportUpload.Location} `);\n\n // Update task status and save export blocks data key\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading block export.`,\n key: blockExportUpload.Key,\n url: await s3Stream.getPresignedUrl(blockExportUpload.Key)\n }\n });\n\n // Remove individual zip files from storage\n const deleteFilePromises = zipFileKeys.map(key => s3Stream.deleteObject(key));\n await Promise.all(deleteFilePromises);\n log(`Successfully deleted ${deleteFilePromises.length} zip files.`);\n } catch (e) {\n log(\"[EXPORT_BLOCKS_COMBINE] Error => \", e.message);\n\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"EXPORT_FAILED\"\n }\n });\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,SAAA,GAAAD,OAAA;AACA,IAAAE,OAAA,GAAAF,OAAA;AACA,IAAAG,aAAA,GAAAH,OAAA;AAGA;AACA;AACA;AACO,MAAMI,aAAa,GAAG,MAAAA,CACzBC,OAAgB,EAChBC,OAA8B,KACV;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EAEvBA,GAAG,CAAC,uCAAuC,CAAC;EAC5C,MAAM;IAAEE;EAAY,CAAC,GAAGH,OAAO;EAC/B,MAAM;IAAEI,MAAM;IAAEC;EAAS,CAAC,GAAGN,OAAO;EAEpC,IAAAO,0BAAY,EAACD,QAAQ,EAAEL,OAAO,CAAC;EAE/B,IAAI;IACA,MAAMO,IAAI,GAAG,MAAMJ,WAAW,CAACK,gBAAgB,CAACC,OAAO,CAACL,MAAM,CAAC;IAC/D,IAAI,CAACG,IAAI,EAAE;MACP,OAAO;QACHG,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAG,6BAA4BR,MAAO;QACjD;MACJ,CAAC;IACL;IAEA,MAAM;MAAES;IAAoB,CAAC,GAAGN,IAAI,CAACO,KAAK;;IAE1C;IACA,MAAMC,kBAAkB,GAAG,MAAMC,kBAAQ,CAACC,UAAU,CAACJ,mBAAmB,CAAC;IACzE,IAAI,CAACE,kBAAkB,CAACG,QAAQ,EAAE;MAC9B,OAAO;QACHR,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAE;QACb;MACJ,CAAC;IACL;IAEA,MAAMO,WAAW,GAAGJ,kBAAkB,CAACG,QAAQ,CAACE,MAAM,CAClDC,IAAI,IAAIA,IAAI,CAACC,GAAG,KAAKT,mBACzB,CAAC,CACIU,GAAG,CAACF,IAAI,IAAIA,IAAI,CAACC,GAAG,CAAC,CACrBF,MAAM,CAACI,OAAO,CAAa;;IAEhC;IACA,MAAMC,QAAQ,GAAG,IAAIC,gBAAQ,CAACP,WAAW,EAAE,yBAAyB,CAAC;;IAErE;IACA,MAAMQ,iBAAiB,GAAG,MAAMF,QAAQ,CAACG,OAAO,CAAC,CAAC;IAClD3B,GAAG,CAAE,wCAAuC0B,iBAAiB,CAACE,QAAS,GAAE,CAAC;;IAE1E;IACA,MAAM1B,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACC,SAAS;MACxCvB,IAAI,EAAE;QACFE,OAAO,EAAG,gCAA+B;QACzCsB,GAAG,EAAEP,iBAAiB,CAACL,GAAG;QAC1Ba,GAAG,EAAE,MAAMnB,kBAAQ,CAACoB,eAAe,CAACT,iBAAiB,CAACL,GAAG;MAC7D;IACJ,CAAC,CAAC;;IAEF;IACA,MAAMe,kBAAkB,GAAGlB,WAAW,CAACI,GAAG,CAACW,GAAG,IAAIlB,kBAAQ,CAACsB,YAAY,CAACJ,GAAG,CAAC,CAAC;IAC7E,MAAMK,OAAO,CAACC,GAAG,CAACH,kBAAkB,CAAC;IACrCpC,GAAG,CAAE,wBAAuBoC,kBAAkB,CAACI,MAAO,aAAY,CAAC;EACvE,CAAC,CAAC,OAAOC,CAAC,EAAE;IACRzC,GAAG,CAAC,mCAAmC,EAAEyC,CAAC,CAAC9B,OAAO,CAAC;;IAEnD;AACR;AACA;AACA;IACQ,MAAMT,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACW,MAAM;MACrChC,KAAK,EAAE;QACHiC,IAAI,EAAEF,CAAC,CAACE,IAAI;QACZhC,OAAO,EAAE8B,CAAC,CAAC9B,OAAO;QAClBiC,IAAI,EAAE;MACV;IACJ,CAAC,CAAC;IAEF,OAAO;MACHnC,IAAI,EAAE,IAAI;MACVC,KAAK,EAAE;QACHC,OAAO,EAAE8B,CAAC,CAAC9B;MACf;IACJ,CAAC;EACL;EACA,OAAO;IACHF,IAAI,EAAE,EAAE;IACRC,KAAK,EAAE;EACX,CAAC;AACL,CAAC;AAACmC,OAAA,CAAAhD,aAAA,GAAAA,aAAA"}
|
@@ -61,7 +61,7 @@ const formsHandler = async (payload, context) => {
|
|
61
61
|
data: {
|
62
62
|
message: `Finish uploading form export.`,
|
63
63
|
key: formExportUpload.Key,
|
64
|
-
url: _s3Stream.s3Stream.getPresignedUrl(formExportUpload.Key)
|
64
|
+
url: await _s3Stream.s3Stream.getPresignedUrl(formExportUpload.Key)
|
65
65
|
}
|
66
66
|
});
|
67
67
|
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"names":["_types","require","_s3Stream","_zipper","_mockSecurity","formsHandler","payload","context","log","console","pageBuilder","taskId","identity","mockSecurity","task","importExportTask","getTask","data","error","message","exportFormsDataKey","input","listObjectResponse","s3Stream","listObject","Contents","zipFileKeys","filter","file","Key","map","Boolean","zipOfZip","ZipOfZip","formExportUpload","process","Location","updateTask","status","ImportExportTaskStatus","COMPLETED","key","url","getPresignedUrl","deleteFilePromises","deleteObject","Promise","all","length","e","FAILED","name","code","exports"],"sources":["formsHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { s3Stream } from \"../s3Stream\";\nimport { ZipOfZip } from \"../zipper\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { Payload, Response } from \"~/export/combine\";\n\n/**\n * Handles the export forms combine workflow.\n */\nexport const formsHandler = async (\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n\n log(\"RUNNING Export Forms Combine Handler\");\n const { pageBuilder } = context;\n const { taskId, identity } = payload;\n\n mockSecurity(identity, context);\n\n try {\n const task = await pageBuilder.importExportTask.getTask(taskId);\n if (!task) {\n return {\n data: null,\n error: {\n message: `There is no task with ID \"${taskId}\".`\n }\n };\n }\n\n const { exportFormsDataKey } = task.input;\n\n // Get all files (zip) from given key\n const listObjectResponse = await s3Stream.listObject(exportFormsDataKey);\n if (!listObjectResponse.Contents) {\n return {\n data: null,\n error: {\n message: \"There is no Contents defined on S3 Stream while combining forms.\"\n }\n };\n }\n\n const zipFileKeys = listObjectResponse.Contents.filter(\n file => file.Key !== exportFormsDataKey\n )\n .map(file => file.Key)\n .filter(Boolean) as string[];\n\n // Prepare zip of all zips\n const zipOfZip = new ZipOfZip(zipFileKeys, \"WEBINY_FORM_EXPORT.zip\");\n\n // Upload\n const formExportUpload = await zipOfZip.process();\n log(`Done uploading... File is located at ${formExportUpload.Location} `);\n\n // Update task status and save export form data key\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading form export.`,\n key: formExportUpload.Key,\n url: s3Stream.getPresignedUrl(formExportUpload.Key)\n }\n });\n\n // Remove individual zip files from storage\n const deleteFilePromises = zipFileKeys.map(key => s3Stream.deleteObject(key));\n await Promise.all(deleteFilePromises);\n log(`Successfully deleted ${deleteFilePromises.length} zip files.`);\n } catch (e) {\n log(\"[EXPORT_FORMS_COMBINE] Error => \", e.message);\n\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"EXPORT_FAILED\"\n }\n });\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,SAAA,GAAAD,OAAA;AACA,IAAAE,OAAA,GAAAF,OAAA;AACA,IAAAG,aAAA,GAAAH,OAAA;AAGA;AACA;AACA;AACO,MAAMI,YAAY,GAAG,MAAAA,CACxBC,OAAgB,EAChBC,OAA8B,KACV;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EAEvBA,GAAG,CAAC,sCAAsC,CAAC;EAC3C,MAAM;IAAEE;EAAY,CAAC,GAAGH,OAAO;EAC/B,MAAM;IAAEI,MAAM;IAAEC;EAAS,CAAC,GAAGN,OAAO;EAEpC,IAAAO,0BAAY,EAACD,QAAQ,EAAEL,OAAO,CAAC;EAE/B,IAAI;IACA,MAAMO,IAAI,GAAG,MAAMJ,WAAW,CAACK,gBAAgB,CAACC,OAAO,CAACL,MAAM,CAAC;IAC/D,IAAI,CAACG,IAAI,EAAE;MACP,OAAO;QACHG,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAG,6BAA4BR,MAAO;QACjD;MACJ,CAAC;IACL;IAEA,MAAM;MAAES;IAAmB,CAAC,GAAGN,IAAI,CAACO,KAAK;;IAEzC;IACA,MAAMC,kBAAkB,GAAG,MAAMC,kBAAQ,CAACC,UAAU,CAACJ,kBAAkB,CAAC;IACxE,IAAI,CAACE,kBAAkB,CAACG,QAAQ,EAAE;MAC9B,OAAO;QACHR,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAE;QACb;MACJ,CAAC;IACL;IAEA,MAAMO,WAAW,GAAGJ,kBAAkB,CAACG,QAAQ,CAACE,MAAM,CAClDC,IAAI,IAAIA,IAAI,CAACC,GAAG,KAAKT,kBACzB,CAAC,CACIU,GAAG,CAACF,IAAI,IAAIA,IAAI,CAACC,GAAG,CAAC,CACrBF,MAAM,CAACI,OAAO,CAAa;;IAEhC;IACA,MAAMC,QAAQ,GAAG,IAAIC,gBAAQ,CAACP,WAAW,EAAE,wBAAwB,CAAC;;IAEpE;IACA,MAAMQ,gBAAgB,GAAG,MAAMF,QAAQ,CAACG,OAAO,CAAC,CAAC;IACjD3B,GAAG,CAAE,wCAAuC0B,gBAAgB,CAACE,QAAS,GAAE,CAAC;;IAEzE;IACA,MAAM1B,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACC,SAAS;MACxCvB,IAAI,EAAE;QACFE,OAAO,EAAG,+BAA8B;QACxCsB,GAAG,EAAEP,gBAAgB,CAACL,GAAG;QACzBa,GAAG,
|
1
|
+
{"version":3,"names":["_types","require","_s3Stream","_zipper","_mockSecurity","formsHandler","payload","context","log","console","pageBuilder","taskId","identity","mockSecurity","task","importExportTask","getTask","data","error","message","exportFormsDataKey","input","listObjectResponse","s3Stream","listObject","Contents","zipFileKeys","filter","file","Key","map","Boolean","zipOfZip","ZipOfZip","formExportUpload","process","Location","updateTask","status","ImportExportTaskStatus","COMPLETED","key","url","getPresignedUrl","deleteFilePromises","deleteObject","Promise","all","length","e","FAILED","name","code","exports"],"sources":["formsHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { s3Stream } from \"../s3Stream\";\nimport { ZipOfZip } from \"../zipper\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { Payload, Response } from \"~/export/combine\";\n\n/**\n * Handles the export forms combine workflow.\n */\nexport const formsHandler = async (\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n\n log(\"RUNNING Export Forms Combine Handler\");\n const { pageBuilder } = context;\n const { taskId, identity } = payload;\n\n mockSecurity(identity, context);\n\n try {\n const task = await pageBuilder.importExportTask.getTask(taskId);\n if (!task) {\n return {\n data: null,\n error: {\n message: `There is no task with ID \"${taskId}\".`\n }\n };\n }\n\n const { exportFormsDataKey } = task.input;\n\n // Get all files (zip) from given key\n const listObjectResponse = await s3Stream.listObject(exportFormsDataKey);\n if (!listObjectResponse.Contents) {\n return {\n data: null,\n error: {\n message: \"There is no Contents defined on S3 Stream while combining forms.\"\n }\n };\n }\n\n const zipFileKeys = listObjectResponse.Contents.filter(\n file => file.Key !== exportFormsDataKey\n )\n .map(file => file.Key)\n .filter(Boolean) as string[];\n\n // Prepare zip of all zips\n const zipOfZip = new ZipOfZip(zipFileKeys, \"WEBINY_FORM_EXPORT.zip\");\n\n // Upload\n const formExportUpload = await zipOfZip.process();\n log(`Done uploading... File is located at ${formExportUpload.Location} `);\n\n // Update task status and save export form data key\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading form export.`,\n key: formExportUpload.Key,\n url: await s3Stream.getPresignedUrl(formExportUpload.Key)\n }\n });\n\n // Remove individual zip files from storage\n const deleteFilePromises = zipFileKeys.map(key => s3Stream.deleteObject(key));\n await Promise.all(deleteFilePromises);\n log(`Successfully deleted ${deleteFilePromises.length} zip files.`);\n } catch (e) {\n log(\"[EXPORT_FORMS_COMBINE] Error => \", e.message);\n\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"EXPORT_FAILED\"\n }\n });\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,SAAA,GAAAD,OAAA;AACA,IAAAE,OAAA,GAAAF,OAAA;AACA,IAAAG,aAAA,GAAAH,OAAA;AAGA;AACA;AACA;AACO,MAAMI,YAAY,GAAG,MAAAA,CACxBC,OAAgB,EAChBC,OAA8B,KACV;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EAEvBA,GAAG,CAAC,sCAAsC,CAAC;EAC3C,MAAM;IAAEE;EAAY,CAAC,GAAGH,OAAO;EAC/B,MAAM;IAAEI,MAAM;IAAEC;EAAS,CAAC,GAAGN,OAAO;EAEpC,IAAAO,0BAAY,EAACD,QAAQ,EAAEL,OAAO,CAAC;EAE/B,IAAI;IACA,MAAMO,IAAI,GAAG,MAAMJ,WAAW,CAACK,gBAAgB,CAACC,OAAO,CAACL,MAAM,CAAC;IAC/D,IAAI,CAACG,IAAI,EAAE;MACP,OAAO;QACHG,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAG,6BAA4BR,MAAO;QACjD;MACJ,CAAC;IACL;IAEA,MAAM;MAAES;IAAmB,CAAC,GAAGN,IAAI,CAACO,KAAK;;IAEzC;IACA,MAAMC,kBAAkB,GAAG,MAAMC,kBAAQ,CAACC,UAAU,CAACJ,kBAAkB,CAAC;IACxE,IAAI,CAACE,kBAAkB,CAACG,QAAQ,EAAE;MAC9B,OAAO;QACHR,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAE;QACb;MACJ,CAAC;IACL;IAEA,MAAMO,WAAW,GAAGJ,kBAAkB,CAACG,QAAQ,CAACE,MAAM,CAClDC,IAAI,IAAIA,IAAI,CAACC,GAAG,KAAKT,kBACzB,CAAC,CACIU,GAAG,CAACF,IAAI,IAAIA,IAAI,CAACC,GAAG,CAAC,CACrBF,MAAM,CAACI,OAAO,CAAa;;IAEhC;IACA,MAAMC,QAAQ,GAAG,IAAIC,gBAAQ,CAACP,WAAW,EAAE,wBAAwB,CAAC;;IAEpE;IACA,MAAMQ,gBAAgB,GAAG,MAAMF,QAAQ,CAACG,OAAO,CAAC,CAAC;IACjD3B,GAAG,CAAE,wCAAuC0B,gBAAgB,CAACE,QAAS,GAAE,CAAC;;IAEzE;IACA,MAAM1B,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACC,SAAS;MACxCvB,IAAI,EAAE;QACFE,OAAO,EAAG,+BAA8B;QACxCsB,GAAG,EAAEP,gBAAgB,CAACL,GAAG;QACzBa,GAAG,EAAE,MAAMnB,kBAAQ,CAACoB,eAAe,CAACT,gBAAgB,CAACL,GAAG;MAC5D;IACJ,CAAC,CAAC;;IAEF;IACA,MAAMe,kBAAkB,GAAGlB,WAAW,CAACI,GAAG,CAACW,GAAG,IAAIlB,kBAAQ,CAACsB,YAAY,CAACJ,GAAG,CAAC,CAAC;IAC7E,MAAMK,OAAO,CAACC,GAAG,CAACH,kBAAkB,CAAC;IACrCpC,GAAG,CAAE,wBAAuBoC,kBAAkB,CAACI,MAAO,aAAY,CAAC;EACvE,CAAC,CAAC,OAAOC,CAAC,EAAE;IACRzC,GAAG,CAAC,kCAAkC,EAAEyC,CAAC,CAAC9B,OAAO,CAAC;;IAElD;AACR;AACA;AACA;IACQ,MAAMT,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACW,MAAM;MACrChC,KAAK,EAAE;QACHiC,IAAI,EAAEF,CAAC,CAACE,IAAI;QACZhC,OAAO,EAAE8B,CAAC,CAAC9B,OAAO;QAClBiC,IAAI,EAAE;MACV;IACJ,CAAC,CAAC;IAEF,OAAO;MACHnC,IAAI,EAAE,IAAI;MACVC,KAAK,EAAE;QACHC,OAAO,EAAE8B,CAAC,CAAC9B;MACf;IACJ,CAAC;EACL;EACA,OAAO;IACHF,IAAI,EAAE,EAAE;IACRC,KAAK,EAAE;EACX,CAAC;AACL,CAAC;AAACmC,OAAA,CAAAhD,YAAA,GAAAA,YAAA"}
|
@@ -61,7 +61,7 @@ const pagesHandler = async (payload, context) => {
|
|
61
61
|
data: {
|
62
62
|
message: `Finish uploading page export.`,
|
63
63
|
key: pageExportUpload.Key,
|
64
|
-
url: _s3Stream.s3Stream.getPresignedUrl(pageExportUpload.Key)
|
64
|
+
url: await _s3Stream.s3Stream.getPresignedUrl(pageExportUpload.Key)
|
65
65
|
}
|
66
66
|
});
|
67
67
|
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"names":["_types","require","_s3Stream","_zipper","_mockSecurity","pagesHandler","payload","context","log","console","pageBuilder","taskId","identity","mockSecurity","task","importExportTask","getTask","data","error","message","exportPagesDataKey","input","listObjectResponse","s3Stream","listObject","Contents","zipFileKeys","filter","file","Key","map","Boolean","zipOfZip","ZipOfZip","pageExportUpload","process","Location","updateTask","status","ImportExportTaskStatus","COMPLETED","key","url","getPresignedUrl","deleteFilePromises","deleteObject","Promise","all","length","e","FAILED","name","code","exports"],"sources":["pagesHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { s3Stream } from \"../s3Stream\";\nimport { ZipOfZip } from \"../zipper\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { Payload, Response } from \"~/export/combine\";\n\n/**\n * Handles the export pages combine workflow.\n */\nexport const pagesHandler = async (\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n\n log(\"RUNNING Export Pages Combine Handler\");\n const { pageBuilder } = context;\n const { taskId, identity } = payload;\n\n mockSecurity(identity, context);\n\n try {\n const task = await pageBuilder.importExportTask.getTask(taskId);\n if (!task) {\n return {\n data: null,\n error: {\n message: `There is no task with ID \"${taskId}\".`\n }\n };\n }\n\n const { exportPagesDataKey } = task.input;\n\n // Get all files (zip) from given key\n const listObjectResponse = await s3Stream.listObject(exportPagesDataKey);\n if (!listObjectResponse.Contents) {\n return {\n data: null,\n error: {\n message: \"There is no Contents defined on S3 Stream while combining pages.\"\n }\n };\n }\n\n const zipFileKeys = listObjectResponse.Contents.filter(\n file => file.Key !== exportPagesDataKey\n )\n .map(file => file.Key)\n .filter(Boolean) as string[];\n\n // Prepare zip of all zips\n const zipOfZip = new ZipOfZip(zipFileKeys, \"WEBINY_PAGE_EXPORT.zip\");\n\n // Upload\n const pageExportUpload = await zipOfZip.process();\n log(`Done uploading... File is located at ${pageExportUpload.Location} `);\n\n // Update task status and save export page data key\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading page export.`,\n key: pageExportUpload.Key,\n url: s3Stream.getPresignedUrl(pageExportUpload.Key)\n }\n });\n\n // Remove individual zip files from storage\n const deleteFilePromises = zipFileKeys.map(key => s3Stream.deleteObject(key));\n await Promise.all(deleteFilePromises);\n log(`Successfully deleted ${deleteFilePromises.length} zip files.`);\n } catch (e) {\n log(\"[EXPORT_PAGES_COMBINE] Error => \", e.message);\n\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"EXPORT_FAILED\"\n }\n });\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,SAAA,GAAAD,OAAA;AACA,IAAAE,OAAA,GAAAF,OAAA;AACA,IAAAG,aAAA,GAAAH,OAAA;AAGA;AACA;AACA;AACO,MAAMI,YAAY,GAAG,MAAAA,CACxBC,OAAgB,EAChBC,OAA8B,KACV;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EAEvBA,GAAG,CAAC,sCAAsC,CAAC;EAC3C,MAAM;IAAEE;EAAY,CAAC,GAAGH,OAAO;EAC/B,MAAM;IAAEI,MAAM;IAAEC;EAAS,CAAC,GAAGN,OAAO;EAEpC,IAAAO,0BAAY,EAACD,QAAQ,EAAEL,OAAO,CAAC;EAE/B,IAAI;IACA,MAAMO,IAAI,GAAG,MAAMJ,WAAW,CAACK,gBAAgB,CAACC,OAAO,CAACL,MAAM,CAAC;IAC/D,IAAI,CAACG,IAAI,EAAE;MACP,OAAO;QACHG,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAG,6BAA4BR,MAAO;QACjD;MACJ,CAAC;IACL;IAEA,MAAM;MAAES;IAAmB,CAAC,GAAGN,IAAI,CAACO,KAAK;;IAEzC;IACA,MAAMC,kBAAkB,GAAG,MAAMC,kBAAQ,CAACC,UAAU,CAACJ,kBAAkB,CAAC;IACxE,IAAI,CAACE,kBAAkB,CAACG,QAAQ,EAAE;MAC9B,OAAO;QACHR,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAE;QACb;MACJ,CAAC;IACL;IAEA,MAAMO,WAAW,GAAGJ,kBAAkB,CAACG,QAAQ,CAACE,MAAM,CAClDC,IAAI,IAAIA,IAAI,CAACC,GAAG,KAAKT,kBACzB,CAAC,CACIU,GAAG,CAACF,IAAI,IAAIA,IAAI,CAACC,GAAG,CAAC,CACrBF,MAAM,CAACI,OAAO,CAAa;;IAEhC;IACA,MAAMC,QAAQ,GAAG,IAAIC,gBAAQ,CAACP,WAAW,EAAE,wBAAwB,CAAC;;IAEpE;IACA,MAAMQ,gBAAgB,GAAG,MAAMF,QAAQ,CAACG,OAAO,CAAC,CAAC;IACjD3B,GAAG,CAAE,wCAAuC0B,gBAAgB,CAACE,QAAS,GAAE,CAAC;;IAEzE;IACA,MAAM1B,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACC,SAAS;MACxCvB,IAAI,EAAE;QACFE,OAAO,EAAG,+BAA8B;QACxCsB,GAAG,EAAEP,gBAAgB,CAACL,GAAG;QACzBa,GAAG,
|
1
|
+
{"version":3,"names":["_types","require","_s3Stream","_zipper","_mockSecurity","pagesHandler","payload","context","log","console","pageBuilder","taskId","identity","mockSecurity","task","importExportTask","getTask","data","error","message","exportPagesDataKey","input","listObjectResponse","s3Stream","listObject","Contents","zipFileKeys","filter","file","Key","map","Boolean","zipOfZip","ZipOfZip","pageExportUpload","process","Location","updateTask","status","ImportExportTaskStatus","COMPLETED","key","url","getPresignedUrl","deleteFilePromises","deleteObject","Promise","all","length","e","FAILED","name","code","exports"],"sources":["pagesHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { s3Stream } from \"../s3Stream\";\nimport { ZipOfZip } from \"../zipper\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { Payload, Response } from \"~/export/combine\";\n\n/**\n * Handles the export pages combine workflow.\n */\nexport const pagesHandler = async (\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n\n log(\"RUNNING Export Pages Combine Handler\");\n const { pageBuilder } = context;\n const { taskId, identity } = payload;\n\n mockSecurity(identity, context);\n\n try {\n const task = await pageBuilder.importExportTask.getTask(taskId);\n if (!task) {\n return {\n data: null,\n error: {\n message: `There is no task with ID \"${taskId}\".`\n }\n };\n }\n\n const { exportPagesDataKey } = task.input;\n\n // Get all files (zip) from given key\n const listObjectResponse = await s3Stream.listObject(exportPagesDataKey);\n if (!listObjectResponse.Contents) {\n return {\n data: null,\n error: {\n message: \"There is no Contents defined on S3 Stream while combining pages.\"\n }\n };\n }\n\n const zipFileKeys = listObjectResponse.Contents.filter(\n file => file.Key !== exportPagesDataKey\n )\n .map(file => file.Key)\n .filter(Boolean) as string[];\n\n // Prepare zip of all zips\n const zipOfZip = new ZipOfZip(zipFileKeys, \"WEBINY_PAGE_EXPORT.zip\");\n\n // Upload\n const pageExportUpload = await zipOfZip.process();\n log(`Done uploading... File is located at ${pageExportUpload.Location} `);\n\n // Update task status and save export page data key\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading page export.`,\n key: pageExportUpload.Key,\n url: await s3Stream.getPresignedUrl(pageExportUpload.Key)\n }\n });\n\n // Remove individual zip files from storage\n const deleteFilePromises = zipFileKeys.map(key => s3Stream.deleteObject(key));\n await Promise.all(deleteFilePromises);\n log(`Successfully deleted ${deleteFilePromises.length} zip files.`);\n } catch (e) {\n log(\"[EXPORT_PAGES_COMBINE] Error => \", e.message);\n\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"EXPORT_FAILED\"\n }\n });\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,SAAA,GAAAD,OAAA;AACA,IAAAE,OAAA,GAAAF,OAAA;AACA,IAAAG,aAAA,GAAAH,OAAA;AAGA;AACA;AACA;AACO,MAAMI,YAAY,GAAG,MAAAA,CACxBC,OAAgB,EAChBC,OAA8B,KACV;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EAEvBA,GAAG,CAAC,sCAAsC,CAAC;EAC3C,MAAM;IAAEE;EAAY,CAAC,GAAGH,OAAO;EAC/B,MAAM;IAAEI,MAAM;IAAEC;EAAS,CAAC,GAAGN,OAAO;EAEpC,IAAAO,0BAAY,EAACD,QAAQ,EAAEL,OAAO,CAAC;EAE/B,IAAI;IACA,MAAMO,IAAI,GAAG,MAAMJ,WAAW,CAACK,gBAAgB,CAACC,OAAO,CAACL,MAAM,CAAC;IAC/D,IAAI,CAACG,IAAI,EAAE;MACP,OAAO;QACHG,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAG,6BAA4BR,MAAO;QACjD;MACJ,CAAC;IACL;IAEA,MAAM;MAAES;IAAmB,CAAC,GAAGN,IAAI,CAACO,KAAK;;IAEzC;IACA,MAAMC,kBAAkB,GAAG,MAAMC,kBAAQ,CAACC,UAAU,CAACJ,kBAAkB,CAAC;IACxE,IAAI,CAACE,kBAAkB,CAACG,QAAQ,EAAE;MAC9B,OAAO;QACHR,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAE;QACb;MACJ,CAAC;IACL;IAEA,MAAMO,WAAW,GAAGJ,kBAAkB,CAACG,QAAQ,CAACE,MAAM,CAClDC,IAAI,IAAIA,IAAI,CAACC,GAAG,KAAKT,kBACzB,CAAC,CACIU,GAAG,CAACF,IAAI,IAAIA,IAAI,CAACC,GAAG,CAAC,CACrBF,MAAM,CAACI,OAAO,CAAa;;IAEhC;IACA,MAAMC,QAAQ,GAAG,IAAIC,gBAAQ,CAACP,WAAW,EAAE,wBAAwB,CAAC;;IAEpE;IACA,MAAMQ,gBAAgB,GAAG,MAAMF,QAAQ,CAACG,OAAO,CAAC,CAAC;IACjD3B,GAAG,CAAE,wCAAuC0B,gBAAgB,CAACE,QAAS,GAAE,CAAC;;IAEzE;IACA,MAAM1B,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACC,SAAS;MACxCvB,IAAI,EAAE;QACFE,OAAO,EAAG,+BAA8B;QACxCsB,GAAG,EAAEP,gBAAgB,CAACL,GAAG;QACzBa,GAAG,EAAE,MAAMnB,kBAAQ,CAACoB,eAAe,CAACT,gBAAgB,CAACL,GAAG;MAC5D;IACJ,CAAC,CAAC;;IAEF;IACA,MAAMe,kBAAkB,GAAGlB,WAAW,CAACI,GAAG,CAACW,GAAG,IAAIlB,kBAAQ,CAACsB,YAAY,CAACJ,GAAG,CAAC,CAAC;IAC7E,MAAMK,OAAO,CAACC,GAAG,CAACH,kBAAkB,CAAC;IACrCpC,GAAG,CAAE,wBAAuBoC,kBAAkB,CAACI,MAAO,aAAY,CAAC;EACvE,CAAC,CAAC,OAAOC,CAAC,EAAE;IACRzC,GAAG,CAAC,kCAAkC,EAAEyC,CAAC,CAAC9B,OAAO,CAAC;;IAElD;AACR;AACA;AACA;IACQ,MAAMT,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACW,MAAM;MACrChC,KAAK,EAAE;QACHiC,IAAI,EAAEF,CAAC,CAACE,IAAI;QACZhC,OAAO,EAAE8B,CAAC,CAAC9B,OAAO;QAClBiC,IAAI,EAAE;MACV;IACJ,CAAC,CAAC;IAEF,OAAO;MACHnC,IAAI,EAAE,IAAI;MACVC,KAAK,EAAE;QACHC,OAAO,EAAE8B,CAAC,CAAC9B;MACf;IACJ,CAAC;EACL;EACA,OAAO;IACHF,IAAI,EAAE,EAAE;IACRC,KAAK,EAAE;EACX,CAAC;AACL,CAAC;AAACmC,OAAA,CAAAhD,YAAA,GAAAA,YAAA"}
|
@@ -61,7 +61,7 @@ const templatesHandler = async (payload, context) => {
|
|
61
61
|
data: {
|
62
62
|
message: `Finish uploading template export.`,
|
63
63
|
key: templateExportUpload.Key,
|
64
|
-
url: _s3Stream.s3Stream.getPresignedUrl(templateExportUpload.Key)
|
64
|
+
url: await _s3Stream.s3Stream.getPresignedUrl(templateExportUpload.Key)
|
65
65
|
}
|
66
66
|
});
|
67
67
|
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"names":["_types","require","_s3Stream","_zipper","_mockSecurity","templatesHandler","payload","context","log","console","pageBuilder","taskId","identity","mockSecurity","task","importExportTask","getTask","data","error","message","exportTemplatesDataKey","input","listObjectResponse","s3Stream","listObject","Contents","zipFileKeys","filter","file","Key","map","Boolean","zipOfZip","ZipOfZip","templateExportUpload","process","Location","updateTask","status","ImportExportTaskStatus","COMPLETED","key","url","getPresignedUrl","deleteFilePromises","deleteObject","Promise","all","length","e","FAILED","name","code","exports"],"sources":["templatesHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { s3Stream } from \"../s3Stream\";\nimport { ZipOfZip } from \"../zipper\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { Payload, Response } from \"~/export/combine\";\n\n/**\n * Handles the export templates combine workflow.\n */\nexport const templatesHandler = async (\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n\n log(\"RUNNING Export Templates Combine Handler\");\n const { pageBuilder } = context;\n const { taskId, identity } = payload;\n\n mockSecurity(identity, context);\n\n try {\n const task = await pageBuilder.importExportTask.getTask(taskId);\n if (!task) {\n return {\n data: null,\n error: {\n message: `There is no task with ID \"${taskId}\".`\n }\n };\n }\n\n const { exportTemplatesDataKey } = task.input;\n\n // Get all files (zip) from given key\n const listObjectResponse = await s3Stream.listObject(exportTemplatesDataKey);\n if (!listObjectResponse.Contents) {\n return {\n data: null,\n error: {\n message: \"There is no Contents defined on S3 Stream while combining templates.\"\n }\n };\n }\n\n const zipFileKeys = listObjectResponse.Contents.filter(\n file => file.Key !== exportTemplatesDataKey\n )\n .map(file => file.Key)\n .filter(Boolean) as string[];\n\n // Prepare zip of all zips\n const zipOfZip = new ZipOfZip(zipFileKeys, \"WEBINY_TEMPLATE_EXPORT.zip\");\n\n // Upload\n const templateExportUpload = await zipOfZip.process();\n log(`Done uploading... File is located at ${templateExportUpload.Location} `);\n\n // Update task status and save export templates data key\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading template export.`,\n key: templateExportUpload.Key,\n url: s3Stream.getPresignedUrl(templateExportUpload.Key)\n }\n });\n\n // Remove individual zip files from storage\n const deleteFilePromises = zipFileKeys.map(key => s3Stream.deleteObject(key));\n await Promise.all(deleteFilePromises);\n log(`Successfully deleted ${deleteFilePromises.length} zip files.`);\n } catch (e) {\n log(\"[EXPORT_TEMPLATES_COMBINE] Error => \", e.message);\n\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"EXPORT_FAILED\"\n }\n });\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,SAAA,GAAAD,OAAA;AACA,IAAAE,OAAA,GAAAF,OAAA;AACA,IAAAG,aAAA,GAAAH,OAAA;AAGA;AACA;AACA;AACO,MAAMI,gBAAgB,GAAG,MAAAA,CAC5BC,OAAgB,EAChBC,OAA8B,KACV;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EAEvBA,GAAG,CAAC,0CAA0C,CAAC;EAC/C,MAAM;IAAEE;EAAY,CAAC,GAAGH,OAAO;EAC/B,MAAM;IAAEI,MAAM;IAAEC;EAAS,CAAC,GAAGN,OAAO;EAEpC,IAAAO,0BAAY,EAACD,QAAQ,EAAEL,OAAO,CAAC;EAE/B,IAAI;IACA,MAAMO,IAAI,GAAG,MAAMJ,WAAW,CAACK,gBAAgB,CAACC,OAAO,CAACL,MAAM,CAAC;IAC/D,IAAI,CAACG,IAAI,EAAE;MACP,OAAO;QACHG,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAG,6BAA4BR,MAAO;QACjD;MACJ,CAAC;IACL;IAEA,MAAM;MAAES;IAAuB,CAAC,GAAGN,IAAI,CAACO,KAAK;;IAE7C;IACA,MAAMC,kBAAkB,GAAG,MAAMC,kBAAQ,CAACC,UAAU,CAACJ,sBAAsB,CAAC;IAC5E,IAAI,CAACE,kBAAkB,CAACG,QAAQ,EAAE;MAC9B,OAAO;QACHR,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAE;QACb;MACJ,CAAC;IACL;IAEA,MAAMO,WAAW,GAAGJ,kBAAkB,CAACG,QAAQ,CAACE,MAAM,CAClDC,IAAI,IAAIA,IAAI,CAACC,GAAG,KAAKT,sBACzB,CAAC,CACIU,GAAG,CAACF,IAAI,IAAIA,IAAI,CAACC,GAAG,CAAC,CACrBF,MAAM,CAACI,OAAO,CAAa;;IAEhC;IACA,MAAMC,QAAQ,GAAG,IAAIC,gBAAQ,CAACP,WAAW,EAAE,4BAA4B,CAAC;;IAExE;IACA,MAAMQ,oBAAoB,GAAG,MAAMF,QAAQ,CAACG,OAAO,CAAC,CAAC;IACrD3B,GAAG,CAAE,wCAAuC0B,oBAAoB,CAACE,QAAS,GAAE,CAAC;;IAE7E;IACA,MAAM1B,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACC,SAAS;MACxCvB,IAAI,EAAE;QACFE,OAAO,EAAG,mCAAkC;QAC5CsB,GAAG,EAAEP,oBAAoB,CAACL,GAAG;QAC7Ba,GAAG,
|
1
|
+
{"version":3,"names":["_types","require","_s3Stream","_zipper","_mockSecurity","templatesHandler","payload","context","log","console","pageBuilder","taskId","identity","mockSecurity","task","importExportTask","getTask","data","error","message","exportTemplatesDataKey","input","listObjectResponse","s3Stream","listObject","Contents","zipFileKeys","filter","file","Key","map","Boolean","zipOfZip","ZipOfZip","templateExportUpload","process","Location","updateTask","status","ImportExportTaskStatus","COMPLETED","key","url","getPresignedUrl","deleteFilePromises","deleteObject","Promise","all","length","e","FAILED","name","code","exports"],"sources":["templatesHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { s3Stream } from \"../s3Stream\";\nimport { ZipOfZip } from \"../zipper\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { Payload, Response } from \"~/export/combine\";\n\n/**\n * Handles the export templates combine workflow.\n */\nexport const templatesHandler = async (\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n\n log(\"RUNNING Export Templates Combine Handler\");\n const { pageBuilder } = context;\n const { taskId, identity } = payload;\n\n mockSecurity(identity, context);\n\n try {\n const task = await pageBuilder.importExportTask.getTask(taskId);\n if (!task) {\n return {\n data: null,\n error: {\n message: `There is no task with ID \"${taskId}\".`\n }\n };\n }\n\n const { exportTemplatesDataKey } = task.input;\n\n // Get all files (zip) from given key\n const listObjectResponse = await s3Stream.listObject(exportTemplatesDataKey);\n if (!listObjectResponse.Contents) {\n return {\n data: null,\n error: {\n message: \"There is no Contents defined on S3 Stream while combining templates.\"\n }\n };\n }\n\n const zipFileKeys = listObjectResponse.Contents.filter(\n file => file.Key !== exportTemplatesDataKey\n )\n .map(file => file.Key)\n .filter(Boolean) as string[];\n\n // Prepare zip of all zips\n const zipOfZip = new ZipOfZip(zipFileKeys, \"WEBINY_TEMPLATE_EXPORT.zip\");\n\n // Upload\n const templateExportUpload = await zipOfZip.process();\n log(`Done uploading... File is located at ${templateExportUpload.Location} `);\n\n // Update task status and save export templates data key\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish uploading template export.`,\n key: templateExportUpload.Key,\n url: await s3Stream.getPresignedUrl(templateExportUpload.Key)\n }\n });\n\n // Remove individual zip files from storage\n const deleteFilePromises = zipFileKeys.map(key => s3Stream.deleteObject(key));\n await Promise.all(deleteFilePromises);\n log(`Successfully deleted ${deleteFilePromises.length} zip files.`);\n } catch (e) {\n log(\"[EXPORT_TEMPLATES_COMBINE] Error => \", e.message);\n\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"EXPORT_FAILED\"\n }\n });\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,SAAA,GAAAD,OAAA;AACA,IAAAE,OAAA,GAAAF,OAAA;AACA,IAAAG,aAAA,GAAAH,OAAA;AAGA;AACA;AACA;AACO,MAAMI,gBAAgB,GAAG,MAAAA,CAC5BC,OAAgB,EAChBC,OAA8B,KACV;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EAEvBA,GAAG,CAAC,0CAA0C,CAAC;EAC/C,MAAM;IAAEE;EAAY,CAAC,GAAGH,OAAO;EAC/B,MAAM;IAAEI,MAAM;IAAEC;EAAS,CAAC,GAAGN,OAAO;EAEpC,IAAAO,0BAAY,EAACD,QAAQ,EAAEL,OAAO,CAAC;EAE/B,IAAI;IACA,MAAMO,IAAI,GAAG,MAAMJ,WAAW,CAACK,gBAAgB,CAACC,OAAO,CAACL,MAAM,CAAC;IAC/D,IAAI,CAACG,IAAI,EAAE;MACP,OAAO;QACHG,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAG,6BAA4BR,MAAO;QACjD;MACJ,CAAC;IACL;IAEA,MAAM;MAAES;IAAuB,CAAC,GAAGN,IAAI,CAACO,KAAK;;IAE7C;IACA,MAAMC,kBAAkB,GAAG,MAAMC,kBAAQ,CAACC,UAAU,CAACJ,sBAAsB,CAAC;IAC5E,IAAI,CAACE,kBAAkB,CAACG,QAAQ,EAAE;MAC9B,OAAO;QACHR,IAAI,EAAE,IAAI;QACVC,KAAK,EAAE;UACHC,OAAO,EAAE;QACb;MACJ,CAAC;IACL;IAEA,MAAMO,WAAW,GAAGJ,kBAAkB,CAACG,QAAQ,CAACE,MAAM,CAClDC,IAAI,IAAIA,IAAI,CAACC,GAAG,KAAKT,sBACzB,CAAC,CACIU,GAAG,CAACF,IAAI,IAAIA,IAAI,CAACC,GAAG,CAAC,CACrBF,MAAM,CAACI,OAAO,CAAa;;IAEhC;IACA,MAAMC,QAAQ,GAAG,IAAIC,gBAAQ,CAACP,WAAW,EAAE,4BAA4B,CAAC;;IAExE;IACA,MAAMQ,oBAAoB,GAAG,MAAMF,QAAQ,CAACG,OAAO,CAAC,CAAC;IACrD3B,GAAG,CAAE,wCAAuC0B,oBAAoB,CAACE,QAAS,GAAE,CAAC;;IAE7E;IACA,MAAM1B,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACC,SAAS;MACxCvB,IAAI,EAAE;QACFE,OAAO,EAAG,mCAAkC;QAC5CsB,GAAG,EAAEP,oBAAoB,CAACL,GAAG;QAC7Ba,GAAG,EAAE,MAAMnB,kBAAQ,CAACoB,eAAe,CAACT,oBAAoB,CAACL,GAAG;MAChE;IACJ,CAAC,CAAC;;IAEF;IACA,MAAMe,kBAAkB,GAAGlB,WAAW,CAACI,GAAG,CAACW,GAAG,IAAIlB,kBAAQ,CAACsB,YAAY,CAACJ,GAAG,CAAC,CAAC;IAC7E,MAAMK,OAAO,CAACC,GAAG,CAACH,kBAAkB,CAAC;IACrCpC,GAAG,CAAE,wBAAuBoC,kBAAkB,CAACI,MAAO,aAAY,CAAC;EACvE,CAAC,CAAC,OAAOC,CAAC,EAAE;IACRzC,GAAG,CAAC,sCAAsC,EAAEyC,CAAC,CAAC9B,OAAO,CAAC;;IAEtD;AACR;AACA;AACA;IACQ,MAAMT,WAAW,CAACK,gBAAgB,CAACsB,UAAU,CAAC1B,MAAM,EAAE;MAClD2B,MAAM,EAAEC,6BAAsB,CAACW,MAAM;MACrChC,KAAK,EAAE;QACHiC,IAAI,EAAEF,CAAC,CAACE,IAAI;QACZhC,OAAO,EAAE8B,CAAC,CAAC9B,OAAO;QAClBiC,IAAI,EAAE;MACV;IACJ,CAAC,CAAC;IAEF,OAAO;MACHnC,IAAI,EAAE,IAAI;MACVC,KAAK,EAAE;QACHC,OAAO,EAAE8B,CAAC,CAAC9B;MACf;IACJ,CAAC;EACL;EACA,OAAO;IACHF,IAAI,EAAE,EAAE;IACRC,KAAK,EAAE;EACX,CAAC;AACL,CAAC;AAACmC,OAAA,CAAAhD,gBAAA,GAAAA,gBAAA"}
|
@@ -8,5 +8,5 @@ export interface ExportedBlockData {
|
|
8
8
|
export declare class BlockExporter {
|
9
9
|
private fileManager;
|
10
10
|
constructor(fileManager: FileManagerContext["fileManager"]);
|
11
|
-
execute(block: PageBlock, blockCategory: BlockCategory, exportBlocksDataKey: string): Promise<import("aws-sdk/
|
11
|
+
execute(block: PageBlock, blockCategory: BlockCategory, exportBlocksDataKey: string): Promise<import("@aws-sdk/client-s3").CompleteMultipartUploadOutput>;
|
12
12
|
}
|
@@ -5,12 +5,10 @@ Object.defineProperty(exports, "__esModule", {
|
|
5
5
|
value: true
|
6
6
|
});
|
7
7
|
exports.BlockExporter = void 0;
|
8
|
-
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
|
9
8
|
var _zipper = _interopRequireDefault(require("../../zipper"));
|
10
9
|
var _utils = require("../../utils");
|
11
10
|
class BlockExporter {
|
12
11
|
constructor(fileManager) {
|
13
|
-
(0, _defineProperty2.default)(this, "fileManager", void 0);
|
14
12
|
this.fileManager = fileManager;
|
15
13
|
}
|
16
14
|
async execute(block, blockCategory, exportBlocksDataKey) {
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"names":["_zipper","_interopRequireDefault","require","_utils","BlockExporter","constructor","fileManager","
|
1
|
+
{"version":3,"names":["_zipper","_interopRequireDefault","require","_utils","BlockExporter","constructor","fileManager","execute","block","blockCategory","exportBlocksDataKey","files","extractFilesFromData","content","fileIds","map","imageFile","id","imageFilesData","length","filesData","listFiles","where","id_in","push","blockData","name","category","slug","icon","description","blockDataBuffer","Buffer","from","JSON","stringify","zipper","Zipper","exportInfo","dataBuffer","archiveFileKey","process","exports"],"sources":["BlockExporter.ts"],"sourcesContent":["import { BlockCategory, PageBlock } from \"@webiny/api-page-builder/types\";\nimport { File, FileManagerContext } from \"@webiny/api-file-manager/types\";\nimport Zipper from \"~/export/zipper\";\nimport { extractFilesFromData } from \"~/export/utils\";\n\nexport interface ExportedBlockData {\n block: Pick<PageBlock, \"name\" | \"content\">;\n category: BlockCategory;\n files: File[];\n}\n\nexport class BlockExporter {\n private fileManager: FileManagerContext[\"fileManager\"];\n\n constructor(fileManager: FileManagerContext[\"fileManager\"]) {\n this.fileManager = fileManager;\n }\n\n async execute(block: PageBlock, blockCategory: BlockCategory, exportBlocksDataKey: string) {\n // Extract all files\n const files = extractFilesFromData(block.content || {});\n const fileIds = files.map(imageFile => imageFile.id);\n // Get file data for all images\n const imageFilesData = [];\n if (fileIds.length > 0) {\n const [filesData] = await this.fileManager.listFiles({ where: { id_in: fileIds } });\n imageFilesData.push(...filesData);\n }\n\n // Extract the block data in a json file and upload it to S3\n const blockData = {\n block: {\n name: block.name,\n content: block.content\n },\n category: {\n name: blockCategory.name,\n slug: blockCategory.slug,\n icon: blockCategory.icon,\n description: blockCategory.description\n },\n files: imageFilesData\n };\n const blockDataBuffer = Buffer.from(JSON.stringify(blockData));\n\n const zipper = new Zipper({\n exportInfo: {\n files: imageFilesData,\n name: block.name,\n dataBuffer: blockDataBuffer\n },\n archiveFileKey: exportBlocksDataKey\n });\n\n return zipper.process();\n }\n}\n"],"mappings":";;;;;;;AAEA,IAAAA,OAAA,GAAAC,sBAAA,CAAAC,OAAA;AACA,IAAAC,MAAA,GAAAD,OAAA;AAQO,MAAME,aAAa,CAAC;EAGvBC,WAAWA,CAACC,WAA8C,EAAE;IACxD,IAAI,CAACA,WAAW,GAAGA,WAAW;EAClC;EAEA,MAAMC,OAAOA,CAACC,KAAgB,EAAEC,aAA4B,EAAEC,mBAA2B,EAAE;IACvF;IACA,MAAMC,KAAK,GAAG,IAAAC,2BAAoB,EAACJ,KAAK,CAACK,OAAO,IAAI,CAAC,CAAC,CAAC;IACvD,MAAMC,OAAO,GAAGH,KAAK,CAACI,GAAG,CAACC,SAAS,IAAIA,SAAS,CAACC,EAAE,CAAC;IACpD;IACA,MAAMC,cAAc,GAAG,EAAE;IACzB,IAAIJ,OAAO,CAACK,MAAM,GAAG,CAAC,EAAE;MACpB,MAAM,CAACC,SAAS,CAAC,GAAG,MAAM,IAAI,CAACd,WAAW,CAACe,SAAS,CAAC;QAAEC,KAAK,EAAE;UAAEC,KAAK,EAAET;QAAQ;MAAE,CAAC,CAAC;MACnFI,cAAc,CAACM,IAAI,CAAC,GAAGJ,SAAS,CAAC;IACrC;;IAEA;IACA,MAAMK,SAAS,GAAG;MACdjB,KAAK,EAAE;QACHkB,IAAI,EAAElB,KAAK,CAACkB,IAAI;QAChBb,OAAO,EAAEL,KAAK,CAACK;MACnB,CAAC;MACDc,QAAQ,EAAE;QACND,IAAI,EAAEjB,aAAa,CAACiB,IAAI;QACxBE,IAAI,EAAEnB,aAAa,CAACmB,IAAI;QACxBC,IAAI,EAAEpB,aAAa,CAACoB,IAAI;QACxBC,WAAW,EAAErB,aAAa,CAACqB;MAC/B,CAAC;MACDnB,KAAK,EAAEO;IACX,CAAC;IACD,MAAMa,eAAe,GAAGC,MAAM,CAACC,IAAI,CAACC,IAAI,CAACC,SAAS,CAACV,SAAS,CAAC,CAAC;IAE9D,MAAMW,MAAM,GAAG,IAAIC,eAAM,CAAC;MACtBC,UAAU,EAAE;QACR3B,KAAK,EAAEO,cAAc;QACrBQ,IAAI,EAAElB,KAAK,CAACkB,IAAI;QAChBa,UAAU,EAAER;MAChB,CAAC;MACDS,cAAc,EAAE9B;IACpB,CAAC,CAAC;IAEF,OAAO0B,MAAM,CAACK,OAAO,CAAC,CAAC;EAC3B;AACJ;AAACC,OAAA,CAAAtC,aAAA,GAAAA,aAAA"}
|
@@ -5,5 +5,5 @@ export interface ExportedFormData {
|
|
5
5
|
files: File[];
|
6
6
|
}
|
7
7
|
export declare class FormExporter {
|
8
|
-
execute(form: FbForm, exportFormsDataKey: string): Promise<import("aws-sdk/
|
8
|
+
execute(form: FbForm, exportFormsDataKey: string): Promise<import("@aws-sdk/client-s3").CompleteMultipartUploadOutput>;
|
9
9
|
}
|
@@ -7,5 +7,5 @@ export interface ExportedPageData {
|
|
7
7
|
export declare class PageExporter {
|
8
8
|
private fileManager;
|
9
9
|
constructor(fileManager: FileManagerContext["fileManager"]);
|
10
|
-
execute(page: Page, exportPagesDataKey: string): Promise<import("aws-sdk/
|
10
|
+
execute(page: Page, exportPagesDataKey: string): Promise<import("@aws-sdk/client-s3").CompleteMultipartUploadOutput>;
|
11
11
|
}
|
@@ -5,13 +5,11 @@ Object.defineProperty(exports, "__esModule", {
|
|
5
5
|
value: true
|
6
6
|
});
|
7
7
|
exports.PageExporter = void 0;
|
8
|
-
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
|
9
8
|
var _get = _interopRequireDefault(require("lodash/get"));
|
10
9
|
var _zipper = _interopRequireDefault(require("../../zipper"));
|
11
10
|
var _utils = require("../../utils");
|
12
11
|
class PageExporter {
|
13
12
|
constructor(fileManager) {
|
14
|
-
(0, _defineProperty2.default)(this, "fileManager", void 0);
|
15
13
|
this.fileManager = fileManager;
|
16
14
|
}
|
17
15
|
async execute(page, exportPagesDataKey) {
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"names":["_get","_interopRequireDefault","require","_zipper","_utils","PageExporter","constructor","fileManager","
|
1
|
+
{"version":3,"names":["_get","_interopRequireDefault","require","_zipper","_utils","PageExporter","constructor","fileManager","execute","page","exportPagesDataKey","files","extractFilesFromData","content","pageSettingsImages","get","filter","image","src","fileIds","map","imageFile","id","imageFilesData","length","filesData","listFiles","where","id_in","push","pageData","title","path","version","status","settings","pageDataBuffer","Buffer","from","JSON","stringify","zipper","Zipper","exportInfo","name","dataBuffer","archiveFileKey","process","exports"],"sources":["PageExporter.ts"],"sourcesContent":["import get from \"lodash/get\";\nimport { Page } from \"@webiny/api-page-builder/types\";\nimport { File, FileManagerContext } from \"@webiny/api-file-manager/types\";\nimport Zipper from \"~/export/zipper\";\nimport { extractFilesFromData } from \"~/export/utils\";\n\nexport interface ExportedPageData {\n page: Pick<Page, \"content\" | \"title\" | \"version\" | \"status\" | \"settings\" | \"path\">;\n files: File[];\n}\n\nexport class PageExporter {\n private fileManager: FileManagerContext[\"fileManager\"];\n\n constructor(fileManager: FileManagerContext[\"fileManager\"]) {\n this.fileManager = fileManager;\n }\n\n async execute(page: Page, exportPagesDataKey: string) {\n // Extract all files\n const files = extractFilesFromData(page.content || {});\n // Extract images from page settings\n const pageSettingsImages = [\n get(page, \"settings.general.image\") as unknown as File,\n get(page, \"settings.social.image\") as unknown as File\n ].filter(image => image && image.src);\n\n const fileIds = [...files, ...pageSettingsImages].map(imageFile => imageFile.id);\n // Get file data for all images\n const imageFilesData = [];\n if (fileIds.length > 0) {\n const [filesData] = await this.fileManager.listFiles({ where: { id_in: fileIds } });\n imageFilesData.push(...filesData);\n }\n\n // Extract the page data in a json file and upload it to S3\n const pageData = {\n page: {\n content: page.content,\n title: page.title,\n path: page.path,\n version: page.version,\n status: page.status,\n settings: page.settings\n },\n files: imageFilesData\n };\n const pageDataBuffer = Buffer.from(JSON.stringify(pageData));\n\n const zipper = new Zipper({\n exportInfo: {\n files: imageFilesData,\n name: page.title,\n dataBuffer: pageDataBuffer\n },\n archiveFileKey: exportPagesDataKey\n });\n\n return zipper.process();\n }\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,IAAA,GAAAC,sBAAA,CAAAC,OAAA;AAGA,IAAAC,OAAA,GAAAF,sBAAA,CAAAC,OAAA;AACA,IAAAE,MAAA,GAAAF,OAAA;AAOO,MAAMG,YAAY,CAAC;EAGtBC,WAAWA,CAACC,WAA8C,EAAE;IACxD,IAAI,CAACA,WAAW,GAAGA,WAAW;EAClC;EAEA,MAAMC,OAAOA,CAACC,IAAU,EAAEC,kBAA0B,EAAE;IAClD;IACA,MAAMC,KAAK,GAAG,IAAAC,2BAAoB,EAACH,IAAI,CAACI,OAAO,IAAI,CAAC,CAAC,CAAC;IACtD;IACA,MAAMC,kBAAkB,GAAG,CACvB,IAAAC,YAAG,EAACN,IAAI,EAAE,wBAAwB,CAAC,EACnC,IAAAM,YAAG,EAACN,IAAI,EAAE,uBAAuB,CAAC,CACrC,CAACO,MAAM,CAACC,KAAK,IAAIA,KAAK,IAAIA,KAAK,CAACC,GAAG,CAAC;IAErC,MAAMC,OAAO,GAAG,CAAC,GAAGR,KAAK,EAAE,GAAGG,kBAAkB,CAAC,CAACM,GAAG,CAACC,SAAS,IAAIA,SAAS,CAACC,EAAE,CAAC;IAChF;IACA,MAAMC,cAAc,GAAG,EAAE;IACzB,IAAIJ,OAAO,CAACK,MAAM,GAAG,CAAC,EAAE;MACpB,MAAM,CAACC,SAAS,CAAC,GAAG,MAAM,IAAI,CAAClB,WAAW,CAACmB,SAAS,CAAC;QAAEC,KAAK,EAAE;UAAEC,KAAK,EAAET;QAAQ;MAAE,CAAC,CAAC;MACnFI,cAAc,CAACM,IAAI,CAAC,GAAGJ,SAAS,CAAC;IACrC;;IAEA;IACA,MAAMK,QAAQ,GAAG;MACbrB,IAAI,EAAE;QACFI,OAAO,EAAEJ,IAAI,CAACI,OAAO;QACrBkB,KAAK,EAAEtB,IAAI,CAACsB,KAAK;QACjBC,IAAI,EAAEvB,IAAI,CAACuB,IAAI;QACfC,OAAO,EAAExB,IAAI,CAACwB,OAAO;QACrBC,MAAM,EAAEzB,IAAI,CAACyB,MAAM;QACnBC,QAAQ,EAAE1B,IAAI,CAAC0B;MACnB,CAAC;MACDxB,KAAK,EAAEY;IACX,CAAC;IACD,MAAMa,cAAc,GAAGC,MAAM,CAACC,IAAI,CAACC,IAAI,CAACC,SAAS,CAACV,QAAQ,CAAC,CAAC;IAE5D,MAAMW,MAAM,GAAG,IAAIC,eAAM,CAAC;MACtBC,UAAU,EAAE;QACRhC,KAAK,EAAEY,cAAc;QACrBqB,IAAI,EAAEnC,IAAI,CAACsB,KAAK;QAChBc,UAAU,EAAET;MAChB,CAAC;MACDU,cAAc,EAAEpC;IACpB,CAAC,CAAC;IAEF,OAAO+B,MAAM,CAACM,OAAO,CAAC,CAAC;EAC3B;AACJ;AAACC,OAAA,CAAA3C,YAAA,GAAAA,YAAA"}
|
@@ -7,5 +7,5 @@ export interface ExportedTemplateData {
|
|
7
7
|
export declare class PageTemplateExporter {
|
8
8
|
private fileManager;
|
9
9
|
constructor(fileManager: FileManagerContext["fileManager"]);
|
10
|
-
execute(template: PageTemplate, exportTemplatesDataKey: string): Promise<import("aws-sdk/
|
10
|
+
execute(template: PageTemplate, exportTemplatesDataKey: string): Promise<import("@aws-sdk/client-s3").CompleteMultipartUploadOutput>;
|
11
11
|
}
|
@@ -5,12 +5,10 @@ Object.defineProperty(exports, "__esModule", {
|
|
5
5
|
value: true
|
6
6
|
});
|
7
7
|
exports.PageTemplateExporter = void 0;
|
8
|
-
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
|
9
8
|
var _zipper = _interopRequireDefault(require("../../zipper"));
|
10
9
|
var _utils = require("../../utils");
|
11
10
|
class PageTemplateExporter {
|
12
11
|
constructor(fileManager) {
|
13
|
-
(0, _defineProperty2.default)(this, "fileManager", void 0);
|
14
12
|
this.fileManager = fileManager;
|
15
13
|
}
|
16
14
|
async execute(template, exportTemplatesDataKey) {
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"names":["_zipper","_interopRequireDefault","require","_utils","PageTemplateExporter","constructor","fileManager","
|
1
|
+
{"version":3,"names":["_zipper","_interopRequireDefault","require","_utils","PageTemplateExporter","constructor","fileManager","execute","template","exportTemplatesDataKey","files","extractFilesFromData","content","fileIds","map","imageFile","id","imageFilesData","length","filesData","listFiles","where","id_in","push","templateData","title","slug","tags","description","layout","pageCategory","templateDataBuffer","Buffer","from","JSON","stringify","zipper","Zipper","exportInfo","name","dataBuffer","archiveFileKey","process","exports"],"sources":["PageTemplateExporter.ts"],"sourcesContent":["import { PageTemplate } from \"@webiny/api-page-builder/types\";\nimport { File, FileManagerContext } from \"@webiny/api-file-manager/types\";\nimport Zipper from \"~/export/zipper\";\nimport { extractFilesFromData } from \"~/export/utils\";\n\nexport interface ExportedTemplateData {\n template: Pick<\n PageTemplate,\n \"title\" | \"slug\" | \"tags\" | \"description\" | \"content\" | \"layout\" | \"pageCategory\"\n >;\n files: File[];\n}\n\nexport class PageTemplateExporter {\n private fileManager: FileManagerContext[\"fileManager\"];\n\n constructor(fileManager: FileManagerContext[\"fileManager\"]) {\n this.fileManager = fileManager;\n }\n\n async execute(template: PageTemplate, exportTemplatesDataKey: string) {\n // Extract all files\n const files = extractFilesFromData(template.content || {});\n const fileIds = files.map(imageFile => imageFile.id);\n // Get file data for all images\n const imageFilesData = [];\n if (fileIds.length > 0) {\n const [filesData] = await this.fileManager.listFiles({ where: { id_in: fileIds } });\n imageFilesData.push(...filesData);\n }\n\n // Extract the template data in a json file and upload it to S3\n const templateData = {\n template: {\n title: template.title,\n slug: template.slug,\n tags: template.tags,\n description: template.description,\n content: template.content,\n layout: template.layout,\n pageCategory: template.pageCategory\n },\n files: imageFilesData\n };\n const templateDataBuffer = Buffer.from(JSON.stringify(templateData));\n\n const zipper = new Zipper({\n exportInfo: {\n files: imageFilesData,\n name: template.title,\n dataBuffer: templateDataBuffer\n },\n archiveFileKey: exportTemplatesDataKey\n });\n\n return zipper.process();\n }\n}\n"],"mappings":";;;;;;;AAEA,IAAAA,OAAA,GAAAC,sBAAA,CAAAC,OAAA;AACA,IAAAC,MAAA,GAAAD,OAAA;AAUO,MAAME,oBAAoB,CAAC;EAG9BC,WAAWA,CAACC,WAA8C,EAAE;IACxD,IAAI,CAACA,WAAW,GAAGA,WAAW;EAClC;EAEA,MAAMC,OAAOA,CAACC,QAAsB,EAAEC,sBAA8B,EAAE;IAClE;IACA,MAAMC,KAAK,GAAG,IAAAC,2BAAoB,EAACH,QAAQ,CAACI,OAAO,IAAI,CAAC,CAAC,CAAC;IAC1D,MAAMC,OAAO,GAAGH,KAAK,CAACI,GAAG,CAACC,SAAS,IAAIA,SAAS,CAACC,EAAE,CAAC;IACpD;IACA,MAAMC,cAAc,GAAG,EAAE;IACzB,IAAIJ,OAAO,CAACK,MAAM,GAAG,CAAC,EAAE;MACpB,MAAM,CAACC,SAAS,CAAC,GAAG,MAAM,IAAI,CAACb,WAAW,CAACc,SAAS,CAAC;QAAEC,KAAK,EAAE;UAAEC,KAAK,EAAET;QAAQ;MAAE,CAAC,CAAC;MACnFI,cAAc,CAACM,IAAI,CAAC,GAAGJ,SAAS,CAAC;IACrC;;IAEA;IACA,MAAMK,YAAY,GAAG;MACjBhB,QAAQ,EAAE;QACNiB,KAAK,EAAEjB,QAAQ,CAACiB,KAAK;QACrBC,IAAI,EAAElB,QAAQ,CAACkB,IAAI;QACnBC,IAAI,EAAEnB,QAAQ,CAACmB,IAAI;QACnBC,WAAW,EAAEpB,QAAQ,CAACoB,WAAW;QACjChB,OAAO,EAAEJ,QAAQ,CAACI,OAAO;QACzBiB,MAAM,EAAErB,QAAQ,CAACqB,MAAM;QACvBC,YAAY,EAAEtB,QAAQ,CAACsB;MAC3B,CAAC;MACDpB,KAAK,EAAEO;IACX,CAAC;IACD,MAAMc,kBAAkB,GAAGC,MAAM,CAACC,IAAI,CAACC,IAAI,CAACC,SAAS,CAACX,YAAY,CAAC,CAAC;IAEpE,MAAMY,MAAM,GAAG,IAAIC,eAAM,CAAC;MACtBC,UAAU,EAAE;QACR5B,KAAK,EAAEO,cAAc;QACrBsB,IAAI,EAAE/B,QAAQ,CAACiB,KAAK;QACpBe,UAAU,EAAET;MAChB,CAAC;MACDU,cAAc,EAAEhC;IACpB,CAAC,CAAC;IAEF,OAAO2B,MAAM,CAACM,OAAO,CAAC,CAAC;EAC3B;AACJ;AAACC,OAAA,CAAAvC,oBAAA,GAAAA,oBAAA"}
|
package/export/s3Stream.d.ts
CHANGED
@@ -1,34 +1,34 @@
|
|
1
1
|
/// <reference types="node" />
|
2
2
|
/// <reference types="node" />
|
3
3
|
import { Readable } from "stream";
|
4
|
-
import S3 from "aws-sdk/
|
4
|
+
import { S3, ListObjectsOutput, DeleteObjectOutput, HeadObjectOutput } from "@webiny/aws-sdk/client-s3";
|
5
5
|
declare class S3Stream {
|
6
6
|
s3: S3;
|
7
7
|
bucket: string;
|
8
8
|
constructor();
|
9
|
-
getPresignedUrl(key
|
9
|
+
getPresignedUrl(key?: string): Promise<string>;
|
10
10
|
/**
|
11
11
|
* We're checking if the file is accessible on S3 by getting object meta data.
|
12
12
|
* It help us to filter files that we need to download as part of export data.
|
13
13
|
* @param Key {string}
|
14
14
|
*/
|
15
15
|
isFileAccessible(Key: string): Promise<boolean>;
|
16
|
-
getObjectHead(Key: string): Promise<
|
17
|
-
readStream(Key: string): Readable
|
16
|
+
getObjectHead(Key: string): Promise<HeadObjectOutput>;
|
17
|
+
readStream(Key: string): Promise<Readable>;
|
18
18
|
writeStream(Key: string, contentType?: string): {
|
19
19
|
streamPassThrough: import("stream").PassThrough;
|
20
20
|
/**
|
21
21
|
* We're not using the `FileManager` storage plugin here because it currently doesn't support streams.
|
22
22
|
*/
|
23
|
-
streamPassThroughUploadPromise: Promise<
|
23
|
+
streamPassThroughUploadPromise: Promise<import("@aws-sdk/client-s3").AbortMultipartUploadCommandOutput | import("@webiny/aws-sdk/client-s3").CompleteMultipartUploadCommandOutput>;
|
24
24
|
};
|
25
25
|
upload(params: {
|
26
26
|
Key: string;
|
27
27
|
ContentType: string;
|
28
28
|
Body: Buffer;
|
29
|
-
}): Promise<
|
30
|
-
listObject(prefix: string): Promise<
|
31
|
-
deleteObject(key: string): Promise<
|
29
|
+
}): Promise<void>;
|
30
|
+
listObject(prefix: string): Promise<ListObjectsOutput>;
|
31
|
+
deleteObject(key: string): Promise<DeleteObjectOutput>;
|
32
32
|
}
|
33
33
|
export declare const s3Stream: S3Stream;
|
34
34
|
export {};
|
package/export/s3Stream.js
CHANGED
@@ -1,29 +1,26 @@
|
|
1
1
|
"use strict";
|
2
2
|
|
3
|
-
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
4
3
|
Object.defineProperty(exports, "__esModule", {
|
5
4
|
value: true
|
6
5
|
});
|
7
6
|
exports.s3Stream = void 0;
|
8
|
-
var _objectSpread2 = _interopRequireDefault(require("@babel/runtime/helpers/objectSpread2"));
|
9
|
-
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
|
10
7
|
var _stream = require("stream");
|
11
|
-
var
|
8
|
+
var _clientS = require("@webiny/aws-sdk/client-s3");
|
9
|
+
var _libStorage = require("@webiny/aws-sdk/lib-storage");
|
12
10
|
const ARCHIVE_CONTENT_TYPE = "application/zip";
|
13
11
|
class S3Stream {
|
14
12
|
constructor() {
|
15
|
-
|
16
|
-
(0, _defineProperty2.default)(this, "bucket", void 0);
|
17
|
-
this.s3 = new _s.default({
|
13
|
+
this.s3 = new _clientS.S3({
|
18
14
|
region: process.env.AWS_REGION
|
19
15
|
});
|
20
16
|
this.bucket = process.env.S3_BUCKET;
|
21
17
|
}
|
22
18
|
getPresignedUrl(key) {
|
23
|
-
return this.s3.
|
19
|
+
return (0, _clientS.getSignedUrl)(this.s3, new _clientS.GetObjectCommand({
|
24
20
|
Bucket: this.bucket,
|
25
|
-
Key: key
|
26
|
-
|
21
|
+
Key: key
|
22
|
+
}), {
|
23
|
+
expiresIn: 604800 // 1 week
|
27
24
|
});
|
28
25
|
}
|
29
26
|
|
@@ -46,13 +43,14 @@ class S3Stream {
|
|
46
43
|
return this.s3.headObject({
|
47
44
|
Bucket: this.bucket,
|
48
45
|
Key
|
49
|
-
})
|
46
|
+
});
|
50
47
|
}
|
51
|
-
readStream(Key) {
|
52
|
-
|
48
|
+
async readStream(Key) {
|
49
|
+
const response = await this.s3.send(new _clientS.GetObjectCommand({
|
53
50
|
Bucket: this.bucket,
|
54
51
|
Key
|
55
|
-
})
|
52
|
+
}));
|
53
|
+
return response.Body;
|
56
54
|
}
|
57
55
|
writeStream(Key, contentType = ARCHIVE_CONTENT_TYPE) {
|
58
56
|
const streamPassThrough = new _stream.Stream.PassThrough();
|
@@ -63,31 +61,36 @@ class S3Stream {
|
|
63
61
|
ContentType: contentType,
|
64
62
|
Key
|
65
63
|
};
|
64
|
+
const upload = new _libStorage.Upload({
|
65
|
+
client: this.s3,
|
66
|
+
params
|
67
|
+
});
|
66
68
|
return {
|
67
69
|
streamPassThrough: streamPassThrough,
|
68
70
|
/**
|
69
71
|
* We're not using the `FileManager` storage plugin here because it currently doesn't support streams.
|
70
72
|
*/
|
71
|
-
streamPassThroughUploadPromise:
|
73
|
+
streamPassThroughUploadPromise: upload.done()
|
72
74
|
};
|
73
75
|
}
|
74
|
-
upload(params) {
|
75
|
-
|
76
|
+
async upload(params) {
|
77
|
+
await this.s3.send(new _clientS.PutObjectCommand({
|
76
78
|
ACL: "private",
|
77
|
-
Bucket: this.bucket
|
78
|
-
|
79
|
+
Bucket: this.bucket,
|
80
|
+
...params
|
81
|
+
}));
|
79
82
|
}
|
80
83
|
listObject(prefix) {
|
81
84
|
return this.s3.listObjects({
|
82
85
|
Bucket: this.bucket,
|
83
86
|
Prefix: prefix
|
84
|
-
})
|
87
|
+
});
|
85
88
|
}
|
86
89
|
deleteObject(key) {
|
87
90
|
return this.s3.deleteObject({
|
88
91
|
Key: key,
|
89
92
|
Bucket: this.bucket
|
90
|
-
})
|
93
|
+
});
|
91
94
|
}
|
92
95
|
}
|
93
96
|
const s3Stream = new S3Stream();
|
package/export/s3Stream.js.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"names":["_stream","require","
|
1
|
+
{"version":3,"names":["_stream","require","_clientS","_libStorage","ARCHIVE_CONTENT_TYPE","S3Stream","constructor","s3","S3","region","process","env","AWS_REGION","bucket","S3_BUCKET","getPresignedUrl","key","getSignedUrl","GetObjectCommand","Bucket","Key","expiresIn","isFileAccessible","getObjectHead","error","console","warn","log","headObject","readStream","response","send","Body","writeStream","contentType","streamPassThrough","Stream","PassThrough","params","ACL","ContentType","upload","Upload","client","streamPassThroughUploadPromise","done","PutObjectCommand","listObject","prefix","listObjects","Prefix","deleteObject","s3Stream","exports"],"sources":["s3Stream.ts"],"sourcesContent":["import { Stream, Readable } from \"stream\";\nimport {\n S3,\n ListObjectsOutput,\n DeleteObjectOutput,\n HeadObjectOutput,\n getSignedUrl,\n GetObjectCommand,\n PutObjectCommand,\n PutObjectCommandInput\n} from \"@webiny/aws-sdk/client-s3\";\nimport { Upload } from \"@webiny/aws-sdk/lib-storage\";\n\nconst ARCHIVE_CONTENT_TYPE = \"application/zip\";\n\nclass S3Stream {\n s3: S3;\n bucket: string;\n\n constructor() {\n this.s3 = new S3({\n region: process.env.AWS_REGION as string\n });\n this.bucket = process.env.S3_BUCKET as string;\n }\n\n getPresignedUrl(key?: string) {\n return getSignedUrl(\n this.s3,\n new GetObjectCommand({\n Bucket: this.bucket,\n Key: key\n }),\n {\n expiresIn: 604800 // 1 week\n }\n );\n }\n\n /**\n * We're checking if the file is accessible on S3 by getting object meta data.\n * It help us to filter files that we need to download as part of export data.\n * @param Key {string}\n */\n async isFileAccessible(Key: string): Promise<boolean> {\n try {\n await this.getObjectHead(Key);\n return true;\n } catch (error) {\n console.warn(`Error while fetching meta data for file \"${Key}\"`);\n console.log(error);\n return false;\n }\n }\n\n getObjectHead(Key: string): Promise<HeadObjectOutput> {\n return this.s3.headObject({ Bucket: this.bucket, Key });\n }\n\n async readStream(Key: string): Promise<Readable> {\n const response = await this.s3.send(new GetObjectCommand({ Bucket: this.bucket, Key }));\n return response.Body as Readable;\n }\n\n writeStream(Key: string, contentType: string = ARCHIVE_CONTENT_TYPE) {\n const streamPassThrough = new Stream.PassThrough();\n\n const params: PutObjectCommandInput = {\n ACL: \"private\",\n Body: streamPassThrough,\n Bucket: this.bucket,\n ContentType: contentType,\n Key\n };\n\n const upload = new Upload({\n client: this.s3,\n params\n });\n\n return {\n streamPassThrough: streamPassThrough,\n /**\n * We're not using the `FileManager` storage plugin here because it currently doesn't support streams.\n */\n streamPassThroughUploadPromise: upload.done()\n };\n }\n\n async upload(params: { Key: string; ContentType: string; Body: Buffer }): Promise<void> {\n await this.s3.send(\n new PutObjectCommand({\n ACL: \"private\",\n Bucket: this.bucket,\n ...params\n })\n );\n }\n\n listObject(prefix: string): Promise<ListObjectsOutput> {\n return this.s3.listObjects({\n Bucket: this.bucket,\n Prefix: prefix\n });\n }\n\n deleteObject(key: string): Promise<DeleteObjectOutput> {\n return this.s3.deleteObject({ Key: key, Bucket: this.bucket });\n }\n}\n\nexport const s3Stream = new S3Stream();\n"],"mappings":";;;;;;AAAA,IAAAA,OAAA,GAAAC,OAAA;AACA,IAAAC,QAAA,GAAAD,OAAA;AAUA,IAAAE,WAAA,GAAAF,OAAA;AAEA,MAAMG,oBAAoB,GAAG,iBAAiB;AAE9C,MAAMC,QAAQ,CAAC;EAIXC,WAAWA,CAAA,EAAG;IACV,IAAI,CAACC,EAAE,GAAG,IAAIC,WAAE,CAAC;MACbC,MAAM,EAAEC,OAAO,CAACC,GAAG,CAACC;IACxB,CAAC,CAAC;IACF,IAAI,CAACC,MAAM,GAAGH,OAAO,CAACC,GAAG,CAACG,SAAmB;EACjD;EAEAC,eAAeA,CAACC,GAAY,EAAE;IAC1B,OAAO,IAAAC,qBAAY,EACf,IAAI,CAACV,EAAE,EACP,IAAIW,yBAAgB,CAAC;MACjBC,MAAM,EAAE,IAAI,CAACN,MAAM;MACnBO,GAAG,EAAEJ;IACT,CAAC,CAAC,EACF;MACIK,SAAS,EAAE,MAAM,CAAC;IACtB,CACJ,CAAC;EACL;;EAEA;AACJ;AACA;AACA;AACA;EACI,MAAMC,gBAAgBA,CAACF,GAAW,EAAoB;IAClD,IAAI;MACA,MAAM,IAAI,CAACG,aAAa,CAACH,GAAG,CAAC;MAC7B,OAAO,IAAI;IACf,CAAC,CAAC,OAAOI,KAAK,EAAE;MACZC,OAAO,CAACC,IAAI,CAAE,4CAA2CN,GAAI,GAAE,CAAC;MAChEK,OAAO,CAACE,GAAG,CAACH,KAAK,CAAC;MAClB,OAAO,KAAK;IAChB;EACJ;EAEAD,aAAaA,CAACH,GAAW,EAA6B;IAClD,OAAO,IAAI,CAACb,EAAE,CAACqB,UAAU,CAAC;MAAET,MAAM,EAAE,IAAI,CAACN,MAAM;MAAEO;IAAI,CAAC,CAAC;EAC3D;EAEA,MAAMS,UAAUA,CAACT,GAAW,EAAqB;IAC7C,MAAMU,QAAQ,GAAG,MAAM,IAAI,CAACvB,EAAE,CAACwB,IAAI,CAAC,IAAIb,yBAAgB,CAAC;MAAEC,MAAM,EAAE,IAAI,CAACN,MAAM;MAAEO;IAAI,CAAC,CAAC,CAAC;IACvF,OAAOU,QAAQ,CAACE,IAAI;EACxB;EAEAC,WAAWA,CAACb,GAAW,EAAEc,WAAmB,GAAG9B,oBAAoB,EAAE;IACjE,MAAM+B,iBAAiB,GAAG,IAAIC,cAAM,CAACC,WAAW,CAAC,CAAC;IAElD,MAAMC,MAA6B,GAAG;MAClCC,GAAG,EAAE,SAAS;MACdP,IAAI,EAAEG,iBAAiB;MACvBhB,MAAM,EAAE,IAAI,CAACN,MAAM;MACnB2B,WAAW,EAAEN,WAAW;MACxBd;IACJ,CAAC;IAED,MAAMqB,MAAM,GAAG,IAAIC,kBAAM,CAAC;MACtBC,MAAM,EAAE,IAAI,CAACpC,EAAE;MACf+B;IACJ,CAAC,CAAC;IAEF,OAAO;MACHH,iBAAiB,EAAEA,iBAAiB;MACpC;AACZ;AACA;MACYS,8BAA8B,EAAEH,MAAM,CAACI,IAAI,CAAC;IAChD,CAAC;EACL;EAEA,MAAMJ,MAAMA,CAACH,MAA0D,EAAiB;IACpF,MAAM,IAAI,CAAC/B,EAAE,CAACwB,IAAI,CACd,IAAIe,yBAAgB,CAAC;MACjBP,GAAG,EAAE,SAAS;MACdpB,MAAM,EAAE,IAAI,CAACN,MAAM;MACnB,GAAGyB;IACP,CAAC,CACL,CAAC;EACL;EAEAS,UAAUA,CAACC,MAAc,EAA8B;IACnD,OAAO,IAAI,CAACzC,EAAE,CAAC0C,WAAW,CAAC;MACvB9B,MAAM,EAAE,IAAI,CAACN,MAAM;MACnBqC,MAAM,EAAEF;IACZ,CAAC,CAAC;EACN;EAEAG,YAAYA,CAACnC,GAAW,EAA+B;IACnD,OAAO,IAAI,CAACT,EAAE,CAAC4C,YAAY,CAAC;MAAE/B,GAAG,EAAEJ,GAAG;MAAEG,MAAM,EAAE,IAAI,CAACN;IAAO,CAAC,CAAC;EAClE;AACJ;AAEO,MAAMuC,QAAQ,GAAG,IAAI/C,QAAQ,CAAC,CAAC;AAACgD,OAAA,CAAAD,QAAA,GAAAA,QAAA"}
|