@webiny/api-page-builder-import-export 0.0.0-unstable.1e66d121db → 0.0.0-unstable.611c5af35e
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/client.js +2 -6
- package/client.js.map +1 -1
- package/exportPages/combine/index.js +11 -19
- package/exportPages/combine/index.js.map +1 -1
- package/exportPages/process/index.js +19 -35
- package/exportPages/process/index.js.map +1 -1
- package/exportPages/s3Stream.js +1 -20
- package/exportPages/s3Stream.js.map +1 -1
- package/exportPages/utils.js +18 -31
- package/exportPages/utils.js.map +1 -1
- package/exportPages/zipper.js +29 -41
- package/exportPages/zipper.js.map +1 -1
- package/graphql/crud/pageImportExportTasks.crud.js +9 -57
- package/graphql/crud/pageImportExportTasks.crud.js.map +1 -1
- package/graphql/crud/pages.crud.js +21 -39
- package/graphql/crud/pages.crud.js.map +1 -1
- package/graphql/crud.js +0 -5
- package/graphql/crud.js.map +1 -1
- package/graphql/graphql/pageImportExportTasks.gql.js +1 -6
- package/graphql/graphql/pageImportExportTasks.gql.js.map +1 -1
- package/graphql/graphql/pages.gql.js +1 -6
- package/graphql/graphql/pages.gql.js.map +1 -1
- package/graphql/graphql/utils/resolve.js +0 -3
- package/graphql/graphql/utils/resolve.js.map +1 -1
- package/graphql/graphql.js +0 -4
- package/graphql/graphql.js.map +1 -1
- package/graphql/index.js +0 -5
- package/graphql/index.js.map +1 -1
- package/importPages/create/index.js +9 -20
- package/importPages/create/index.js.map +1 -1
- package/importPages/process/index.js +24 -29
- package/importPages/process/index.js.map +1 -1
- package/importPages/utils.js +44 -126
- package/importPages/utils.js.map +1 -1
- package/mockSecurity.js +0 -2
- package/mockSecurity.js.map +1 -1
- package/package.json +23 -23
- package/types.js +0 -5
- package/types.js.map +1 -1
package/exportPages/utils.js
CHANGED
@@ -1,59 +1,51 @@
|
|
1
1
|
"use strict";
|
2
2
|
|
3
3
|
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
4
|
-
|
5
4
|
Object.defineProperty(exports, "__esModule", {
|
6
5
|
value: true
|
7
6
|
});
|
8
7
|
exports.EXPORT_PAGES_FOLDER_KEY = void 0;
|
9
8
|
exports.exportPage = exportPage;
|
10
9
|
exports.extractFilesFromPageData = extractFilesFromPageData;
|
11
|
-
|
12
10
|
var _get = _interopRequireDefault(require("lodash/get"));
|
13
|
-
|
14
11
|
var _s3Stream = require("./s3Stream");
|
15
|
-
|
16
12
|
var _zipper = _interopRequireDefault(require("./zipper"));
|
17
|
-
|
18
13
|
const EXPORT_PAGES_FOLDER_KEY = "WEBINY_PB_EXPORT_PAGES";
|
19
14
|
exports.EXPORT_PAGES_FOLDER_KEY = EXPORT_PAGES_FOLDER_KEY;
|
20
|
-
|
21
15
|
async function getFilteredFiles(files) {
|
22
16
|
const uniqueFileKeys = new Map();
|
23
17
|
const promises = files.map(file => _s3Stream.s3Stream.isFileAccessible(file.key));
|
24
18
|
const isFileAvailableResults = await Promise.all(promises);
|
25
|
-
const filesAvailableForDownload = [];
|
26
|
-
|
19
|
+
const filesAvailableForDownload = [];
|
20
|
+
// Filter files
|
27
21
|
for (let i = 0; i < files.length; i++) {
|
28
|
-
const file = files[i];
|
29
|
-
|
22
|
+
const file = files[i];
|
23
|
+
// Check file accessibility
|
30
24
|
if (isFileAvailableResults[i] && !uniqueFileKeys.has(file.key)) {
|
31
25
|
filesAvailableForDownload.push(file);
|
32
26
|
uniqueFileKeys.set(file.key, true);
|
33
27
|
}
|
34
28
|
}
|
35
|
-
|
36
29
|
return filesAvailableForDownload;
|
37
30
|
}
|
38
|
-
|
39
31
|
async function exportPage(page, exportPagesDataKey, fileManager) {
|
40
32
|
// Extract all files
|
41
|
-
const files = extractFilesFromPageData(page.content || {});
|
42
|
-
|
43
|
-
const filesAvailableForDownload = await getFilteredFiles(files);
|
44
|
-
|
33
|
+
const files = extractFilesFromPageData(page.content || {});
|
34
|
+
// Filter files
|
35
|
+
const filesAvailableForDownload = await getFilteredFiles(files);
|
36
|
+
// Extract images from page settings
|
45
37
|
const pageSettingsImages = [(0, _get.default)(page, "settings.general.image"), (0, _get.default)(page, "settings.social.image")].filter(image => image && image.src);
|
46
|
-
const pageSettingsImagesData = [];
|
47
|
-
|
38
|
+
const pageSettingsImagesData = [];
|
39
|
+
// Get file data for all images inside "page.settings"
|
48
40
|
for (let i = 0; i < pageSettingsImages.length; i++) {
|
49
41
|
const {
|
50
42
|
id
|
51
43
|
} = pageSettingsImages[i];
|
52
44
|
const file = await fileManager.files.getFile(id);
|
53
45
|
pageSettingsImagesData.push(file);
|
54
|
-
}
|
55
|
-
|
46
|
+
}
|
56
47
|
|
48
|
+
// Extract the page data in a json file and upload it to S3
|
57
49
|
const pageData = {
|
58
50
|
page: {
|
59
51
|
content: page.content,
|
@@ -76,29 +68,25 @@ async function exportPage(page, exportPagesDataKey, fileManager) {
|
|
76
68
|
});
|
77
69
|
return zipper.process();
|
78
70
|
}
|
79
|
-
|
80
71
|
function extractFilesFromPageData(data, files = []) {
|
81
72
|
// Base case: termination
|
82
73
|
if (!data || typeof data !== "object") {
|
83
74
|
return files;
|
84
|
-
}
|
85
|
-
|
86
|
-
|
75
|
+
}
|
76
|
+
// Recursively call function for each element
|
87
77
|
if (Array.isArray(data)) {
|
88
78
|
for (let i = 0; i < data.length; i++) {
|
89
79
|
const element = data[i];
|
90
80
|
extractFilesFromPageData(element, files);
|
91
81
|
}
|
92
|
-
|
93
82
|
return files;
|
94
|
-
}
|
95
|
-
|
83
|
+
}
|
96
84
|
|
85
|
+
// Main
|
97
86
|
const tuple = Object.entries(data);
|
98
|
-
|
99
87
|
for (let i = 0; i < tuple.length; i++) {
|
100
|
-
const [key, value] = tuple[i];
|
101
|
-
|
88
|
+
const [key, value] = tuple[i];
|
89
|
+
// TODO: @ashutosh extract it to plugins, so that, we can handle cases for other components too.
|
102
90
|
if (key === "file" && value) {
|
103
91
|
files.push(value);
|
104
92
|
} else if (key === "images" && Array.isArray(value)) {
|
@@ -108,6 +96,5 @@ function extractFilesFromPageData(data, files = []) {
|
|
108
96
|
extractFilesFromPageData(value, files);
|
109
97
|
}
|
110
98
|
}
|
111
|
-
|
112
99
|
return files;
|
113
100
|
}
|
package/exportPages/utils.js.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"names":["EXPORT_PAGES_FOLDER_KEY","getFilteredFiles","files","uniqueFileKeys","Map","promises","map","file","s3Stream","isFileAccessible","key","isFileAvailableResults","Promise","all","filesAvailableForDownload","i","length","has","push","set","exportPage","page","exportPagesDataKey","fileManager","extractFilesFromPageData","content","pageSettingsImages","get","filter","image","src","pageSettingsImagesData","id","getFile","pageData","title","path","version","status","settings","pageDataBuffer","Buffer","from","JSON","stringify","zipper","Zipper","exportInfo","pageTitle","archiveFileKey","process","data","Array","isArray","element","tuple","Object","entries","value"],"sources":["utils.ts"],"sourcesContent":["import S3 from \"aws-sdk/clients/s3\";\nimport { Page, File } from \"@webiny/api-page-builder/types\";\nimport { FileManagerContext } from \"@webiny/api-file-manager/types\";\nimport get from \"lodash/get\";\nimport { s3Stream } from \"./s3Stream\";\nimport Zipper from \"./zipper\";\n\nexport const EXPORT_PAGES_FOLDER_KEY = \"WEBINY_PB_EXPORT_PAGES\";\n\nasync function getFilteredFiles(files: ImageFile[]) {\n const uniqueFileKeys = new Map<string, boolean>();\n const promises = files.map(file => s3Stream.isFileAccessible(file.key));\n const isFileAvailableResults = await Promise.all(promises);\n\n const filesAvailableForDownload = [];\n // Filter files\n for (let i = 0; i < files.length; i++) {\n const file = files[i];\n // Check file accessibility\n if (isFileAvailableResults[i] && !uniqueFileKeys.has(file.key)) {\n filesAvailableForDownload.push(file);\n uniqueFileKeys.set(file.key, true);\n }\n }\n return filesAvailableForDownload;\n}\n\nexport interface ExportedPageData {\n page: Pick<Page, \"content\" | \"title\" | \"version\" | \"status\" | \"settings\" | \"path\">;\n files: ImageFile[];\n}\n\nexport async function exportPage(\n page: Page,\n exportPagesDataKey: string,\n fileManager: FileManagerContext[\"fileManager\"]\n): Promise<S3.ManagedUpload.SendData> {\n // Extract all files\n const files = extractFilesFromPageData(page.content || {});\n // Filter files\n const filesAvailableForDownload = await getFilteredFiles(files);\n // Extract images from page settings\n const pageSettingsImages = [\n get(page, \"settings.general.image\"),\n get(page, \"settings.social.image\")\n ].filter(image => image && image.src);\n const pageSettingsImagesData = [];\n // Get file data for all images inside \"page.settings\"\n for (let i = 0; i < pageSettingsImages.length; i++) {\n const { id } = pageSettingsImages[i];\n const file = await fileManager.files.getFile(id);\n pageSettingsImagesData.push(file);\n }\n\n // Extract the page data in a json file and upload it to S3\n const pageData = {\n page: {\n content: page.content,\n title: page.title,\n path: page.path,\n version: page.version,\n status: page.status,\n settings: page.settings\n },\n files: [...filesAvailableForDownload, ...pageSettingsImagesData]\n };\n const pageDataBuffer = Buffer.from(JSON.stringify(pageData));\n\n const zipper = new Zipper({\n exportInfo: {\n files: [...filesAvailableForDownload, ...pageSettingsImagesData],\n pageTitle: page.title,\n pageDataBuffer\n },\n archiveFileKey: exportPagesDataKey\n });\n\n return zipper.process();\n}\n\nexport interface ImageFile extends Omit<File, \"src\"> {\n key: string;\n}\n\nexport function extractFilesFromPageData(\n data: Record<string, any>,\n files: any[] = []\n): ImageFile[] {\n // Base case: termination\n if (!data || typeof data !== \"object\") {\n return files;\n }\n // Recursively call function for each element\n if (Array.isArray(data)) {\n for (let i = 0; i < data.length; i++) {\n const element = data[i];\n extractFilesFromPageData(element, files);\n }\n return files;\n }\n\n // Main\n const tuple = Object.entries(data);\n for (let i = 0; i < tuple.length; i++) {\n const [key, value] = tuple[i];\n // TODO: @ashutosh extract it to plugins, so that, we can handle cases for other components too.\n if (key === \"file\" && value) {\n files.push(value);\n } else if (key === \"images\" && Array.isArray(value)) {\n // Handle case for \"images-list\" component\n files.push(...value);\n } else {\n extractFilesFromPageData(value, files);\n }\n }\n return files;\n}\n"],"mappings":"
|
1
|
+
{"version":3,"names":["EXPORT_PAGES_FOLDER_KEY","getFilteredFiles","files","uniqueFileKeys","Map","promises","map","file","s3Stream","isFileAccessible","key","isFileAvailableResults","Promise","all","filesAvailableForDownload","i","length","has","push","set","exportPage","page","exportPagesDataKey","fileManager","extractFilesFromPageData","content","pageSettingsImages","get","filter","image","src","pageSettingsImagesData","id","getFile","pageData","title","path","version","status","settings","pageDataBuffer","Buffer","from","JSON","stringify","zipper","Zipper","exportInfo","pageTitle","archiveFileKey","process","data","Array","isArray","element","tuple","Object","entries","value"],"sources":["utils.ts"],"sourcesContent":["import S3 from \"aws-sdk/clients/s3\";\nimport { Page, File } from \"@webiny/api-page-builder/types\";\nimport { FileManagerContext } from \"@webiny/api-file-manager/types\";\nimport get from \"lodash/get\";\nimport { s3Stream } from \"./s3Stream\";\nimport Zipper from \"./zipper\";\n\nexport const EXPORT_PAGES_FOLDER_KEY = \"WEBINY_PB_EXPORT_PAGES\";\n\nasync function getFilteredFiles(files: ImageFile[]) {\n const uniqueFileKeys = new Map<string, boolean>();\n const promises = files.map(file => s3Stream.isFileAccessible(file.key));\n const isFileAvailableResults = await Promise.all(promises);\n\n const filesAvailableForDownload = [];\n // Filter files\n for (let i = 0; i < files.length; i++) {\n const file = files[i];\n // Check file accessibility\n if (isFileAvailableResults[i] && !uniqueFileKeys.has(file.key)) {\n filesAvailableForDownload.push(file);\n uniqueFileKeys.set(file.key, true);\n }\n }\n return filesAvailableForDownload;\n}\n\nexport interface ExportedPageData {\n page: Pick<Page, \"content\" | \"title\" | \"version\" | \"status\" | \"settings\" | \"path\">;\n files: ImageFile[];\n}\n\nexport async function exportPage(\n page: Page,\n exportPagesDataKey: string,\n fileManager: FileManagerContext[\"fileManager\"]\n): Promise<S3.ManagedUpload.SendData> {\n // Extract all files\n const files = extractFilesFromPageData(page.content || {});\n // Filter files\n const filesAvailableForDownload = await getFilteredFiles(files);\n // Extract images from page settings\n const pageSettingsImages = [\n get(page, \"settings.general.image\") as unknown as File,\n get(page, \"settings.social.image\") as unknown as File\n ].filter(image => image && image.src);\n const pageSettingsImagesData = [];\n // Get file data for all images inside \"page.settings\"\n for (let i = 0; i < pageSettingsImages.length; i++) {\n const { id } = pageSettingsImages[i];\n const file = await fileManager.files.getFile(id);\n pageSettingsImagesData.push(file);\n }\n\n // Extract the page data in a json file and upload it to S3\n const pageData = {\n page: {\n content: page.content,\n title: page.title,\n path: page.path,\n version: page.version,\n status: page.status,\n settings: page.settings\n },\n files: [...filesAvailableForDownload, ...pageSettingsImagesData]\n };\n const pageDataBuffer = Buffer.from(JSON.stringify(pageData));\n\n const zipper = new Zipper({\n exportInfo: {\n files: [...filesAvailableForDownload, ...pageSettingsImagesData],\n pageTitle: page.title,\n pageDataBuffer\n },\n archiveFileKey: exportPagesDataKey\n });\n\n return zipper.process();\n}\n\nexport interface ImageFile extends Omit<File, \"src\"> {\n key: string;\n}\n\nexport function extractFilesFromPageData(\n data: Record<string, any>,\n files: any[] = []\n): ImageFile[] {\n // Base case: termination\n if (!data || typeof data !== \"object\") {\n return files;\n }\n // Recursively call function for each element\n if (Array.isArray(data)) {\n for (let i = 0; i < data.length; i++) {\n const element = data[i];\n extractFilesFromPageData(element, files);\n }\n return files;\n }\n\n // Main\n const tuple = Object.entries(data);\n for (let i = 0; i < tuple.length; i++) {\n const [key, value] = tuple[i];\n // TODO: @ashutosh extract it to plugins, so that, we can handle cases for other components too.\n if (key === \"file\" && value) {\n files.push(value);\n } else if (key === \"images\" && Array.isArray(value)) {\n // Handle case for \"images-list\" component\n files.push(...value);\n } else {\n extractFilesFromPageData(value, files);\n }\n }\n return files;\n}\n"],"mappings":";;;;;;;;;AAGA;AACA;AACA;AAEO,MAAMA,uBAAuB,GAAG,wBAAwB;AAAC;AAEhE,eAAeC,gBAAgB,CAACC,KAAkB,EAAE;EAChD,MAAMC,cAAc,GAAG,IAAIC,GAAG,EAAmB;EACjD,MAAMC,QAAQ,GAAGH,KAAK,CAACI,GAAG,CAACC,IAAI,IAAIC,kBAAQ,CAACC,gBAAgB,CAACF,IAAI,CAACG,GAAG,CAAC,CAAC;EACvE,MAAMC,sBAAsB,GAAG,MAAMC,OAAO,CAACC,GAAG,CAACR,QAAQ,CAAC;EAE1D,MAAMS,yBAAyB,GAAG,EAAE;EACpC;EACA,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGb,KAAK,CAACc,MAAM,EAAED,CAAC,EAAE,EAAE;IACnC,MAAMR,IAAI,GAAGL,KAAK,CAACa,CAAC,CAAC;IACrB;IACA,IAAIJ,sBAAsB,CAACI,CAAC,CAAC,IAAI,CAACZ,cAAc,CAACc,GAAG,CAACV,IAAI,CAACG,GAAG,CAAC,EAAE;MAC5DI,yBAAyB,CAACI,IAAI,CAACX,IAAI,CAAC;MACpCJ,cAAc,CAACgB,GAAG,CAACZ,IAAI,CAACG,GAAG,EAAE,IAAI,CAAC;IACtC;EACJ;EACA,OAAOI,yBAAyB;AACpC;AAOO,eAAeM,UAAU,CAC5BC,IAAU,EACVC,kBAA0B,EAC1BC,WAA8C,EACZ;EAClC;EACA,MAAMrB,KAAK,GAAGsB,wBAAwB,CAACH,IAAI,CAACI,OAAO,IAAI,CAAC,CAAC,CAAC;EAC1D;EACA,MAAMX,yBAAyB,GAAG,MAAMb,gBAAgB,CAACC,KAAK,CAAC;EAC/D;EACA,MAAMwB,kBAAkB,GAAG,CACvB,IAAAC,YAAG,EAACN,IAAI,EAAE,wBAAwB,CAAC,EACnC,IAAAM,YAAG,EAACN,IAAI,EAAE,uBAAuB,CAAC,CACrC,CAACO,MAAM,CAACC,KAAK,IAAIA,KAAK,IAAIA,KAAK,CAACC,GAAG,CAAC;EACrC,MAAMC,sBAAsB,GAAG,EAAE;EACjC;EACA,KAAK,IAAIhB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGW,kBAAkB,CAACV,MAAM,EAAED,CAAC,EAAE,EAAE;IAChD,MAAM;MAAEiB;IAAG,CAAC,GAAGN,kBAAkB,CAACX,CAAC,CAAC;IACpC,MAAMR,IAAI,GAAG,MAAMgB,WAAW,CAACrB,KAAK,CAAC+B,OAAO,CAACD,EAAE,CAAC;IAChDD,sBAAsB,CAACb,IAAI,CAACX,IAAI,CAAC;EACrC;;EAEA;EACA,MAAM2B,QAAQ,GAAG;IACbb,IAAI,EAAE;MACFI,OAAO,EAAEJ,IAAI,CAACI,OAAO;MACrBU,KAAK,EAAEd,IAAI,CAACc,KAAK;MACjBC,IAAI,EAAEf,IAAI,CAACe,IAAI;MACfC,OAAO,EAAEhB,IAAI,CAACgB,OAAO;MACrBC,MAAM,EAAEjB,IAAI,CAACiB,MAAM;MACnBC,QAAQ,EAAElB,IAAI,CAACkB;IACnB,CAAC;IACDrC,KAAK,EAAE,CAAC,GAAGY,yBAAyB,EAAE,GAAGiB,sBAAsB;EACnE,CAAC;EACD,MAAMS,cAAc,GAAGC,MAAM,CAACC,IAAI,CAACC,IAAI,CAACC,SAAS,CAACV,QAAQ,CAAC,CAAC;EAE5D,MAAMW,MAAM,GAAG,IAAIC,eAAM,CAAC;IACtBC,UAAU,EAAE;MACR7C,KAAK,EAAE,CAAC,GAAGY,yBAAyB,EAAE,GAAGiB,sBAAsB,CAAC;MAChEiB,SAAS,EAAE3B,IAAI,CAACc,KAAK;MACrBK;IACJ,CAAC;IACDS,cAAc,EAAE3B;EACpB,CAAC,CAAC;EAEF,OAAOuB,MAAM,CAACK,OAAO,EAAE;AAC3B;AAMO,SAAS1B,wBAAwB,CACpC2B,IAAyB,EACzBjD,KAAY,GAAG,EAAE,EACN;EACX;EACA,IAAI,CAACiD,IAAI,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;IACnC,OAAOjD,KAAK;EAChB;EACA;EACA,IAAIkD,KAAK,CAACC,OAAO,CAACF,IAAI,CAAC,EAAE;IACrB,KAAK,IAAIpC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGoC,IAAI,CAACnC,MAAM,EAAED,CAAC,EAAE,EAAE;MAClC,MAAMuC,OAAO,GAAGH,IAAI,CAACpC,CAAC,CAAC;MACvBS,wBAAwB,CAAC8B,OAAO,EAAEpD,KAAK,CAAC;IAC5C;IACA,OAAOA,KAAK;EAChB;;EAEA;EACA,MAAMqD,KAAK,GAAGC,MAAM,CAACC,OAAO,CAACN,IAAI,CAAC;EAClC,KAAK,IAAIpC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGwC,KAAK,CAACvC,MAAM,EAAED,CAAC,EAAE,EAAE;IACnC,MAAM,CAACL,GAAG,EAAEgD,KAAK,CAAC,GAAGH,KAAK,CAACxC,CAAC,CAAC;IAC7B;IACA,IAAIL,GAAG,KAAK,MAAM,IAAIgD,KAAK,EAAE;MACzBxD,KAAK,CAACgB,IAAI,CAACwC,KAAK,CAAC;IACrB,CAAC,MAAM,IAAIhD,GAAG,KAAK,QAAQ,IAAI0C,KAAK,CAACC,OAAO,CAACK,KAAK,CAAC,EAAE;MACjD;MACAxD,KAAK,CAACgB,IAAI,CAAC,GAAGwC,KAAK,CAAC;IACxB,CAAC,MAAM;MACHlC,wBAAwB,CAACkC,KAAK,EAAExD,KAAK,CAAC;IAC1C;EACJ;EACA,OAAOA,KAAK;AAChB"}
|
package/exportPages/zipper.js
CHANGED
@@ -1,29 +1,20 @@
|
|
1
1
|
"use strict";
|
2
2
|
|
3
3
|
var _interopRequireWildcard = require("@babel/runtime/helpers/interopRequireWildcard").default;
|
4
|
-
|
5
4
|
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
6
|
-
|
7
5
|
Object.defineProperty(exports, "__esModule", {
|
8
6
|
value: true
|
9
7
|
});
|
10
8
|
exports.default = exports.ZipOfZip = void 0;
|
11
|
-
|
12
9
|
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
|
13
|
-
|
14
10
|
var _archiver = _interopRequireDefault(require("archiver"));
|
15
|
-
|
16
11
|
var _stream = require("stream");
|
17
|
-
|
18
12
|
var path = _interopRequireWildcard(require("path"));
|
19
|
-
|
20
13
|
var _kebabCase = _interopRequireDefault(require("lodash/kebabCase"));
|
21
|
-
|
22
14
|
var _uniqid = _interopRequireDefault(require("uniqid"));
|
23
|
-
|
24
15
|
var _s3Stream = require("./s3Stream");
|
25
|
-
|
26
16
|
// TODO: Move "archive" in layer
|
17
|
+
|
27
18
|
class Zipper {
|
28
19
|
constructor(config) {
|
29
20
|
(0, _defineProperty2.default)(this, "archiveFormat", "zip");
|
@@ -33,7 +24,6 @@ class Zipper {
|
|
33
24
|
this.config = config;
|
34
25
|
this.archiveFileName = (0, _uniqid.default)(`${this.config.archiveFileKey}/`, `-${(0, _kebabCase.default)(this.config.exportInfo.pageTitle)}.zip`);
|
35
26
|
}
|
36
|
-
|
37
27
|
s3DownloadStreams() {
|
38
28
|
const exportInfo = this.config.exportInfo;
|
39
29
|
const prefix = (0, _uniqid.default)("", `-${(0, _kebabCase.default)(exportInfo.pageTitle)}`);
|
@@ -50,40 +40,39 @@ class Zipper {
|
|
50
40
|
filename: `${prefix}\\${exportInfo.pageTitle}.json`
|
51
41
|
}];
|
52
42
|
}
|
53
|
-
|
54
43
|
process() {
|
55
44
|
const {
|
56
45
|
streamPassThrough,
|
57
46
|
streamPassThroughUploadPromise
|
58
|
-
} = _s3Stream.s3Stream.writeStream(this.archiveFileName);
|
59
|
-
|
60
|
-
|
61
|
-
const s3FilesStreams = this.s3DownloadStreams(); // 2. Prepare zip from the file stream.
|
62
|
-
|
63
|
-
const archive = _archiver.default.create(this.archiveFormat); // Handle archive events.
|
47
|
+
} = _s3Stream.s3Stream.writeStream(this.archiveFileName);
|
64
48
|
|
49
|
+
// 1. Read all files from S3 using stream.
|
50
|
+
const s3FilesStreams = this.s3DownloadStreams();
|
65
51
|
|
52
|
+
// 2. Prepare zip from the file stream.
|
53
|
+
const archive = _archiver.default.create(this.archiveFormat);
|
54
|
+
// Handle archive events.
|
66
55
|
archive.on("error", error => {
|
67
56
|
throw new Error(`${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`);
|
68
|
-
});
|
57
|
+
});
|
69
58
|
|
59
|
+
// Append all file streams to archive.
|
70
60
|
s3FilesStreams.forEach(streamDetails => archive.append(streamDetails.stream, {
|
71
61
|
name: streamDetails.filename
|
72
|
-
}));
|
62
|
+
}));
|
73
63
|
|
74
|
-
|
64
|
+
// Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.
|
65
|
+
archive.pipe(streamPassThrough);
|
66
|
+
// Finalize the archive (ie we are done appending files but streams have to finish yet)
|
75
67
|
// 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand
|
68
|
+
archive.finalize();
|
76
69
|
|
77
|
-
|
78
|
-
|
70
|
+
// 3. Return upload stream promise.
|
79
71
|
return streamPassThroughUploadPromise;
|
80
72
|
}
|
81
|
-
|
82
73
|
}
|
83
|
-
|
84
74
|
exports.default = Zipper;
|
85
75
|
const PAGE_EXPORT_BASENAME = `WEBINY_PAGE_EXPORT.zip`;
|
86
|
-
|
87
76
|
class ZipOfZip {
|
88
77
|
constructor(keys) {
|
89
78
|
(0, _defineProperty2.default)(this, "archiveFormat", "zip");
|
@@ -92,7 +81,6 @@ class ZipOfZip {
|
|
92
81
|
this.keys = keys;
|
93
82
|
this.archiveFileName = (0, _uniqid.default)("", `-${PAGE_EXPORT_BASENAME}`);
|
94
83
|
}
|
95
|
-
|
96
84
|
getFileStreams() {
|
97
85
|
return this.keys.map(key => {
|
98
86
|
return {
|
@@ -101,35 +89,35 @@ class ZipOfZip {
|
|
101
89
|
};
|
102
90
|
});
|
103
91
|
}
|
104
|
-
|
105
92
|
process() {
|
106
93
|
const {
|
107
94
|
streamPassThrough,
|
108
95
|
streamPassThroughUploadPromise
|
109
|
-
} = _s3Stream.s3Stream.writeStream(this.archiveFileName);
|
110
|
-
|
111
|
-
|
112
|
-
const fileStreamDetails = this.getFileStreams(); // 2. Prepare zip from the file stream.
|
113
|
-
|
114
|
-
const archive = _archiver.default.create(this.archiveFormat); // Handle archive events.
|
96
|
+
} = _s3Stream.s3Stream.writeStream(this.archiveFileName);
|
115
97
|
|
98
|
+
// 1. Read all files from S3 using stream.
|
99
|
+
const fileStreamDetails = this.getFileStreams();
|
116
100
|
|
101
|
+
// 2. Prepare zip from the file stream.
|
102
|
+
const archive = _archiver.default.create(this.archiveFormat);
|
103
|
+
// Handle archive events.
|
117
104
|
archive.on("error", error => {
|
118
105
|
throw new Error(`${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`);
|
119
|
-
});
|
106
|
+
});
|
120
107
|
|
108
|
+
// Append all file streams to archive.
|
121
109
|
fileStreamDetails.forEach(streamDetails => archive.append(streamDetails.stream, {
|
122
110
|
name: streamDetails.filename
|
123
|
-
}));
|
111
|
+
}));
|
124
112
|
|
125
|
-
|
113
|
+
// Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.
|
114
|
+
archive.pipe(streamPassThrough);
|
115
|
+
// Finalize the archive (ie we are done appending files but streams have to finish yet)
|
126
116
|
// 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand
|
117
|
+
archive.finalize();
|
127
118
|
|
128
|
-
|
129
|
-
|
119
|
+
// 3. Return upload stream promise.
|
130
120
|
return streamPassThroughUploadPromise;
|
131
121
|
}
|
132
|
-
|
133
122
|
}
|
134
|
-
|
135
123
|
exports.ZipOfZip = ZipOfZip;
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"names":["Zipper","constructor","config","archiveFileName","uniqueId","archiveFileKey","kebabCase","exportInfo","pageTitle","s3DownloadStreams","prefix","files","map","key","stream","s3Stream","readStream","filename","filesDirName","path","basename","Readable","from","pageDataBuffer","process","streamPassThrough","streamPassThroughUploadPromise","writeStream","s3FilesStreams","archive","vending","create","archiveFormat","on","error","Error","name","code","message","stack","forEach","streamDetails","append","pipe","finalize","PAGE_EXPORT_BASENAME","ZipOfZip","keys","getFileStreams","fileStreamDetails"],"sources":["zipper.ts"],"sourcesContent":["// TODO: Move \"archive\" in layer\nimport vending, { ArchiverError } from \"archiver\";\nimport S3 from \"aws-sdk/clients/s3\";\nimport { Readable } from \"stream\";\nimport * as path from \"path\";\nimport kebabCase from \"lodash/kebabCase\";\nimport uniqueId from \"uniqid\";\nimport { s3Stream } from \"./s3Stream\";\nimport { ImageFile } from \"./utils\";\n\ninterface FileStreamDetails {\n stream: Readable;\n filename: string;\n}\n\ninterface ExportInfo {\n files: ImageFile[];\n pageTitle: string;\n pageDataBuffer: Buffer;\n}\n\nexport interface ZipperConfig {\n exportInfo: ExportInfo;\n archiveFileKey: string;\n}\n\nexport default class Zipper {\n private readonly archiveFormat = \"zip\";\n private readonly filesDirName = \"assets\";\n private readonly archiveFileName: string;\n config: ZipperConfig;\n\n constructor(config: ZipperConfig) {\n this.config = config;\n this.archiveFileName = uniqueId(\n `${this.config.archiveFileKey}/`,\n `-${kebabCase(this.config.exportInfo.pageTitle)}.zip`\n );\n }\n\n s3DownloadStreams(): FileStreamDetails[] {\n const exportInfo = this.config.exportInfo;\n const prefix = uniqueId(\"\", `-${kebabCase(exportInfo.pageTitle)}`);\n const files = exportInfo.files.map(({ key }) => {\n return {\n stream: s3Stream.readStream(key),\n filename: `${prefix}\\\\${this.filesDirName}\\\\${path.basename(key)}`\n };\n });\n\n return [\n ...files,\n {\n stream: Readable.from(exportInfo.pageDataBuffer),\n filename: `${prefix}\\\\${exportInfo.pageTitle}.json`\n }\n ];\n }\n\n process(): Promise<S3.ManagedUpload.SendData> {\n const { streamPassThrough, streamPassThroughUploadPromise } = s3Stream.writeStream(\n this.archiveFileName\n );\n\n // 1. Read all files from S3 using stream.\n const s3FilesStreams = this.s3DownloadStreams();\n\n // 2. Prepare zip from the file stream.\n const archive = vending.create(this.archiveFormat);\n // Handle archive events.\n archive.on(\"error\", (error: ArchiverError) => {\n throw new Error(\n `${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`\n );\n });\n\n // Append all file streams to archive.\n s3FilesStreams.forEach((streamDetails: FileStreamDetails) =>\n archive.append(streamDetails.stream, { name: streamDetails.filename })\n );\n\n // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.\n archive.pipe(streamPassThrough);\n // Finalize the archive (ie we are done appending files but streams have to finish yet)\n // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand\n archive.finalize();\n\n // 3. Return upload stream promise.\n return streamPassThroughUploadPromise;\n }\n}\n\nconst PAGE_EXPORT_BASENAME = `WEBINY_PAGE_EXPORT.zip`;\n\nexport class ZipOfZip {\n private readonly archiveFormat = \"zip\";\n private readonly archiveFileName: string;\n keys: string[];\n\n constructor(keys: string[]) {\n this.keys = keys;\n this.archiveFileName = uniqueId(\"\", `-${PAGE_EXPORT_BASENAME}`);\n }\n\n getFileStreams(): FileStreamDetails[] {\n return this.keys.map(key => {\n return {\n stream: s3Stream.readStream(key),\n filename: `${path.basename(key)}`\n };\n });\n }\n\n process(): Promise<S3.ManagedUpload.SendData> {\n const { streamPassThrough, streamPassThroughUploadPromise } = s3Stream.writeStream(\n this.archiveFileName\n );\n\n // 1. Read all files from S3 using stream.\n const fileStreamDetails = this.getFileStreams();\n\n // 2. Prepare zip from the file stream.\n const archive = vending.create(this.archiveFormat);\n // Handle archive events.\n archive.on(\"error\", (error: ArchiverError) => {\n throw new Error(\n `${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`\n );\n });\n\n // Append all file streams to archive.\n fileStreamDetails.forEach((streamDetails: FileStreamDetails) =>\n archive.append(streamDetails.stream, { name: streamDetails.filename })\n );\n\n // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.\n archive.pipe(streamPassThrough);\n // Finalize the archive (ie we are done appending files but streams have to finish yet)\n // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand\n archive.finalize();\n\n // 3. Return upload stream promise.\n return streamPassThroughUploadPromise;\n }\n}\n"],"mappings":"
|
1
|
+
{"version":3,"names":["Zipper","constructor","config","archiveFileName","uniqueId","archiveFileKey","kebabCase","exportInfo","pageTitle","s3DownloadStreams","prefix","files","map","key","stream","s3Stream","readStream","filename","filesDirName","path","basename","Readable","from","pageDataBuffer","process","streamPassThrough","streamPassThroughUploadPromise","writeStream","s3FilesStreams","archive","vending","create","archiveFormat","on","error","Error","name","code","message","stack","forEach","streamDetails","append","pipe","finalize","PAGE_EXPORT_BASENAME","ZipOfZip","keys","getFileStreams","fileStreamDetails"],"sources":["zipper.ts"],"sourcesContent":["// TODO: Move \"archive\" in layer\nimport vending, { ArchiverError } from \"archiver\";\nimport S3 from \"aws-sdk/clients/s3\";\nimport { Readable } from \"stream\";\nimport * as path from \"path\";\nimport kebabCase from \"lodash/kebabCase\";\nimport uniqueId from \"uniqid\";\nimport { s3Stream } from \"./s3Stream\";\nimport { ImageFile } from \"./utils\";\n\ninterface FileStreamDetails {\n stream: Readable;\n filename: string;\n}\n\ninterface ExportInfo {\n files: ImageFile[];\n pageTitle: string;\n pageDataBuffer: Buffer;\n}\n\nexport interface ZipperConfig {\n exportInfo: ExportInfo;\n archiveFileKey: string;\n}\n\nexport default class Zipper {\n private readonly archiveFormat = \"zip\";\n private readonly filesDirName = \"assets\";\n private readonly archiveFileName: string;\n config: ZipperConfig;\n\n constructor(config: ZipperConfig) {\n this.config = config;\n this.archiveFileName = uniqueId(\n `${this.config.archiveFileKey}/`,\n `-${kebabCase(this.config.exportInfo.pageTitle)}.zip`\n );\n }\n\n s3DownloadStreams(): FileStreamDetails[] {\n const exportInfo = this.config.exportInfo;\n const prefix = uniqueId(\"\", `-${kebabCase(exportInfo.pageTitle)}`);\n const files = exportInfo.files.map(({ key }) => {\n return {\n stream: s3Stream.readStream(key),\n filename: `${prefix}\\\\${this.filesDirName}\\\\${path.basename(key)}`\n };\n });\n\n return [\n ...files,\n {\n stream: Readable.from(exportInfo.pageDataBuffer),\n filename: `${prefix}\\\\${exportInfo.pageTitle}.json`\n }\n ];\n }\n\n process(): Promise<S3.ManagedUpload.SendData> {\n const { streamPassThrough, streamPassThroughUploadPromise } = s3Stream.writeStream(\n this.archiveFileName\n );\n\n // 1. Read all files from S3 using stream.\n const s3FilesStreams = this.s3DownloadStreams();\n\n // 2. Prepare zip from the file stream.\n const archive = vending.create(this.archiveFormat);\n // Handle archive events.\n archive.on(\"error\", (error: ArchiverError) => {\n throw new Error(\n `${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`\n );\n });\n\n // Append all file streams to archive.\n s3FilesStreams.forEach((streamDetails: FileStreamDetails) =>\n archive.append(streamDetails.stream, { name: streamDetails.filename })\n );\n\n // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.\n archive.pipe(streamPassThrough);\n // Finalize the archive (ie we are done appending files but streams have to finish yet)\n // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand\n archive.finalize();\n\n // 3. Return upload stream promise.\n return streamPassThroughUploadPromise;\n }\n}\n\nconst PAGE_EXPORT_BASENAME = `WEBINY_PAGE_EXPORT.zip`;\n\nexport class ZipOfZip {\n private readonly archiveFormat = \"zip\";\n private readonly archiveFileName: string;\n keys: string[];\n\n constructor(keys: string[]) {\n this.keys = keys;\n this.archiveFileName = uniqueId(\"\", `-${PAGE_EXPORT_BASENAME}`);\n }\n\n getFileStreams(): FileStreamDetails[] {\n return this.keys.map(key => {\n return {\n stream: s3Stream.readStream(key),\n filename: `${path.basename(key)}`\n };\n });\n }\n\n process(): Promise<S3.ManagedUpload.SendData> {\n const { streamPassThrough, streamPassThroughUploadPromise } = s3Stream.writeStream(\n this.archiveFileName\n );\n\n // 1. Read all files from S3 using stream.\n const fileStreamDetails = this.getFileStreams();\n\n // 2. Prepare zip from the file stream.\n const archive = vending.create(this.archiveFormat);\n // Handle archive events.\n archive.on(\"error\", (error: ArchiverError) => {\n throw new Error(\n `${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`\n );\n });\n\n // Append all file streams to archive.\n fileStreamDetails.forEach((streamDetails: FileStreamDetails) =>\n archive.append(streamDetails.stream, { name: streamDetails.filename })\n );\n\n // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.\n archive.pipe(streamPassThrough);\n // Finalize the archive (ie we are done appending files but streams have to finish yet)\n // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand\n archive.finalize();\n\n // 3. Return upload stream promise.\n return streamPassThroughUploadPromise;\n }\n}\n"],"mappings":";;;;;;;;;AACA;AAEA;AACA;AACA;AACA;AACA;AAPA;;AA0Be,MAAMA,MAAM,CAAC;EAMxBC,WAAW,CAACC,MAAoB,EAAE;IAAA,qDALD,KAAK;IAAA,oDACN,QAAQ;IAAA;IAAA;IAKpC,IAAI,CAACA,MAAM,GAAGA,MAAM;IACpB,IAAI,CAACC,eAAe,GAAG,IAAAC,eAAQ,EAC1B,GAAE,IAAI,CAACF,MAAM,CAACG,cAAe,GAAE,EAC/B,IAAG,IAAAC,kBAAS,EAAC,IAAI,CAACJ,MAAM,CAACK,UAAU,CAACC,SAAS,CAAE,MAAK,CACxD;EACL;EAEAC,iBAAiB,GAAwB;IACrC,MAAMF,UAAU,GAAG,IAAI,CAACL,MAAM,CAACK,UAAU;IACzC,MAAMG,MAAM,GAAG,IAAAN,eAAQ,EAAC,EAAE,EAAG,IAAG,IAAAE,kBAAS,EAACC,UAAU,CAACC,SAAS,CAAE,EAAC,CAAC;IAClE,MAAMG,KAAK,GAAGJ,UAAU,CAACI,KAAK,CAACC,GAAG,CAAC,CAAC;MAAEC;IAAI,CAAC,KAAK;MAC5C,OAAO;QACHC,MAAM,EAAEC,kBAAQ,CAACC,UAAU,CAACH,GAAG,CAAC;QAChCI,QAAQ,EAAG,GAAEP,MAAO,KAAI,IAAI,CAACQ,YAAa,KAAIC,IAAI,CAACC,QAAQ,CAACP,GAAG,CAAE;MACrE,CAAC;IACL,CAAC,CAAC;IAEF,OAAO,CACH,GAAGF,KAAK,EACR;MACIG,MAAM,EAAEO,gBAAQ,CAACC,IAAI,CAACf,UAAU,CAACgB,cAAc,CAAC;MAChDN,QAAQ,EAAG,GAAEP,MAAO,KAAIH,UAAU,CAACC,SAAU;IACjD,CAAC,CACJ;EACL;EAEAgB,OAAO,GAAuC;IAC1C,MAAM;MAAEC,iBAAiB;MAAEC;IAA+B,CAAC,GAAGX,kBAAQ,CAACY,WAAW,CAC9E,IAAI,CAACxB,eAAe,CACvB;;IAED;IACA,MAAMyB,cAAc,GAAG,IAAI,CAACnB,iBAAiB,EAAE;;IAE/C;IACA,MAAMoB,OAAO,GAAGC,iBAAO,CAACC,MAAM,CAAC,IAAI,CAACC,aAAa,CAAC;IAClD;IACAH,OAAO,CAACI,EAAE,CAAC,OAAO,EAAGC,KAAoB,IAAK;MAC1C,MAAM,IAAIC,KAAK,CACV,GAAED,KAAK,CAACE,IAAK,IAAGF,KAAK,CAACG,IAAK,IAAGH,KAAK,CAACI,OAAQ,IAAGJ,KAAK,CAACf,IAAK,IAAGe,KAAK,CAACK,KAAM,EAAC,CAC9E;IACL,CAAC,CAAC;;IAEF;IACAX,cAAc,CAACY,OAAO,CAAEC,aAAgC,IACpDZ,OAAO,CAACa,MAAM,CAACD,aAAa,CAAC3B,MAAM,EAAE;MAAEsB,IAAI,EAAEK,aAAa,CAACxB;IAAS,CAAC,CAAC,CACzE;;IAED;IACAY,OAAO,CAACc,IAAI,CAAClB,iBAAiB,CAAC;IAC/B;IACA;IACAI,OAAO,CAACe,QAAQ,EAAE;;IAElB;IACA,OAAOlB,8BAA8B;EACzC;AACJ;AAAC;AAED,MAAMmB,oBAAoB,GAAI,wBAAuB;AAE9C,MAAMC,QAAQ,CAAC;EAKlB7C,WAAW,CAAC8C,IAAc,EAAE;IAAA,qDAJK,KAAK;IAAA;IAAA;IAKlC,IAAI,CAACA,IAAI,GAAGA,IAAI;IAChB,IAAI,CAAC5C,eAAe,GAAG,IAAAC,eAAQ,EAAC,EAAE,EAAG,IAAGyC,oBAAqB,EAAC,CAAC;EACnE;EAEAG,cAAc,GAAwB;IAClC,OAAO,IAAI,CAACD,IAAI,CAACnC,GAAG,CAACC,GAAG,IAAI;MACxB,OAAO;QACHC,MAAM,EAAEC,kBAAQ,CAACC,UAAU,CAACH,GAAG,CAAC;QAChCI,QAAQ,EAAG,GAAEE,IAAI,CAACC,QAAQ,CAACP,GAAG,CAAE;MACpC,CAAC;IACL,CAAC,CAAC;EACN;EAEAW,OAAO,GAAuC;IAC1C,MAAM;MAAEC,iBAAiB;MAAEC;IAA+B,CAAC,GAAGX,kBAAQ,CAACY,WAAW,CAC9E,IAAI,CAACxB,eAAe,CACvB;;IAED;IACA,MAAM8C,iBAAiB,GAAG,IAAI,CAACD,cAAc,EAAE;;IAE/C;IACA,MAAMnB,OAAO,GAAGC,iBAAO,CAACC,MAAM,CAAC,IAAI,CAACC,aAAa,CAAC;IAClD;IACAH,OAAO,CAACI,EAAE,CAAC,OAAO,EAAGC,KAAoB,IAAK;MAC1C,MAAM,IAAIC,KAAK,CACV,GAAED,KAAK,CAACE,IAAK,IAAGF,KAAK,CAACG,IAAK,IAAGH,KAAK,CAACI,OAAQ,IAAGJ,KAAK,CAACf,IAAK,IAAGe,KAAK,CAACK,KAAM,EAAC,CAC9E;IACL,CAAC,CAAC;;IAEF;IACAU,iBAAiB,CAACT,OAAO,CAAEC,aAAgC,IACvDZ,OAAO,CAACa,MAAM,CAACD,aAAa,CAAC3B,MAAM,EAAE;MAAEsB,IAAI,EAAEK,aAAa,CAACxB;IAAS,CAAC,CAAC,CACzE;;IAED;IACAY,OAAO,CAACc,IAAI,CAAClB,iBAAiB,CAAC;IAC/B;IACA;IACAI,OAAO,CAACe,QAAQ,EAAE;;IAElB;IACA,OAAOlB,8BAA8B;EACzC;AACJ;AAAC"}
|
@@ -1,34 +1,21 @@
|
|
1
1
|
"use strict";
|
2
2
|
|
3
3
|
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
4
|
-
|
5
4
|
Object.defineProperty(exports, "__esModule", {
|
6
5
|
value: true
|
7
6
|
});
|
8
7
|
exports.default = void 0;
|
9
|
-
|
10
8
|
var _objectSpread2 = _interopRequireDefault(require("@babel/runtime/helpers/objectSpread2"));
|
11
|
-
|
12
9
|
var _mdbid = _interopRequireDefault(require("mdbid"));
|
13
|
-
|
14
10
|
var _fields = require("@commodo/fields");
|
15
|
-
|
16
11
|
var _commodoFieldsObject = require("commodo-fields-object");
|
17
|
-
|
18
12
|
var _validation = require("@webiny/validation");
|
19
|
-
|
20
13
|
var _api = require("@webiny/api");
|
21
|
-
|
22
14
|
var _checkBasePermissions = _interopRequireDefault(require("@webiny/api-page-builder/graphql/crud/utils/checkBasePermissions"));
|
23
|
-
|
24
15
|
var _checkOwnPermissions = _interopRequireDefault(require("@webiny/api-page-builder/graphql/crud/utils/checkOwnPermissions"));
|
25
|
-
|
26
16
|
var _handlerGraphql = require("@webiny/handler-graphql");
|
27
|
-
|
28
17
|
var _types = require("../../types");
|
29
|
-
|
30
18
|
var _error = _interopRequireDefault(require("@webiny/error"));
|
31
|
-
|
32
19
|
/**
|
33
20
|
* Package mdbid does not have types.
|
34
21
|
*/
|
@@ -43,6 +30,7 @@ var _error = _interopRequireDefault(require("@webiny/error"));
|
|
43
30
|
* Package commodo-fields-object does not have types.
|
44
31
|
*/
|
45
32
|
// @ts-ignore
|
33
|
+
|
46
34
|
const validStatus = `${_types.PageImportExportTaskStatus.PENDING}:${_types.PageImportExportTaskStatus.PROCESSING}:${_types.PageImportExportTaskStatus.COMPLETED}:${_types.PageImportExportTaskStatus.FAILED}`;
|
47
35
|
const CreateDataModel = (0, _fields.withFields)({
|
48
36
|
status: (0, _fields.string)({
|
@@ -63,7 +51,6 @@ const UpdateDataModel = (0, _fields.withFields)({
|
|
63
51
|
error: (0, _commodoFieldsObject.object)()
|
64
52
|
})();
|
65
53
|
const PERMISSION_NAME = "pb.page";
|
66
|
-
|
67
54
|
var _default = ({
|
68
55
|
storageOperations
|
69
56
|
}) => new _api.ContextPlugin(async context => {
|
@@ -74,21 +61,17 @@ var _default = ({
|
|
74
61
|
console.log("Missing pageBuilder on context. Skipping Page ImportExportTasks crud.");
|
75
62
|
return;
|
76
63
|
}
|
77
|
-
|
78
64
|
const getLocale = () => {
|
79
65
|
const locale = context.i18n.getContentLocale();
|
80
|
-
|
81
66
|
if (!locale) {
|
82
67
|
throw new _error.default("Missing content locale in pageImportExportTasks.crud.ts", "LOCALE_ERROR");
|
83
68
|
}
|
84
|
-
|
85
69
|
return locale;
|
86
|
-
};
|
87
|
-
|
70
|
+
};
|
88
71
|
|
72
|
+
// Modify context
|
89
73
|
context.pageBuilder.pageImportExportTask = {
|
90
74
|
storageOperations,
|
91
|
-
|
92
75
|
async getTask(id) {
|
93
76
|
const permission = await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
|
94
77
|
rwd: "r"
|
@@ -103,10 +86,8 @@ var _default = ({
|
|
103
86
|
}
|
104
87
|
};
|
105
88
|
let pageImportExportTask = null;
|
106
|
-
|
107
89
|
try {
|
108
90
|
pageImportExportTask = await storageOperations.getTask(params);
|
109
|
-
|
110
91
|
if (!pageImportExportTask) {
|
111
92
|
return null;
|
112
93
|
}
|
@@ -115,12 +96,10 @@ var _default = ({
|
|
115
96
|
params
|
116
97
|
}));
|
117
98
|
}
|
118
|
-
|
119
99
|
const identity = context.security.getIdentity();
|
120
100
|
(0, _checkOwnPermissions.default)(identity, permission, pageImportExportTask);
|
121
101
|
return pageImportExportTask;
|
122
102
|
},
|
123
|
-
|
124
103
|
async listTasks(params) {
|
125
104
|
const permission = await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
|
126
105
|
rwd: "r"
|
@@ -138,13 +117,13 @@ var _default = ({
|
|
138
117
|
},
|
139
118
|
sort: Array.isArray(sort) && sort.length > 0 ? sort : ["createdOn_ASC"],
|
140
119
|
limit: limit
|
141
|
-
};
|
120
|
+
};
|
142
121
|
|
122
|
+
// If user can only manage own records, let's add that to the listing.
|
143
123
|
if (permission.own) {
|
144
124
|
const identity = context.security.getIdentity();
|
145
125
|
listParams.where.createdBy = identity.id;
|
146
126
|
}
|
147
|
-
|
148
127
|
try {
|
149
128
|
const [items] = await storageOperations.listTasks(listParams);
|
150
129
|
return items;
|
@@ -154,7 +133,6 @@ var _default = ({
|
|
154
133
|
});
|
155
134
|
}
|
156
135
|
},
|
157
|
-
|
158
136
|
async createTask(input) {
|
159
137
|
await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
|
160
138
|
rwd: "w"
|
@@ -175,7 +153,6 @@ var _default = ({
|
|
175
153
|
displayName: identity.displayName
|
176
154
|
}
|
177
155
|
});
|
178
|
-
|
179
156
|
try {
|
180
157
|
return await storageOperations.createTask({
|
181
158
|
input: data,
|
@@ -187,17 +164,14 @@ var _default = ({
|
|
187
164
|
}));
|
188
165
|
}
|
189
166
|
},
|
190
|
-
|
191
167
|
async updateTask(id, input) {
|
192
168
|
const permission = await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
|
193
169
|
rwd: "w"
|
194
170
|
});
|
195
171
|
const original = await context.pageBuilder.pageImportExportTask.getTask(id);
|
196
|
-
|
197
172
|
if (!original) {
|
198
173
|
throw new _handlerGraphql.NotFoundError(`PageImportExportTask "${id}" not found.`);
|
199
174
|
}
|
200
|
-
|
201
175
|
const identity = context.security.getIdentity();
|
202
176
|
(0, _checkOwnPermissions.default)(identity, permission, original);
|
203
177
|
const updateDataModel = new UpdateDataModel().populate(input);
|
@@ -206,7 +180,6 @@ var _default = ({
|
|
206
180
|
onlyDirty: true
|
207
181
|
});
|
208
182
|
const pageImportExportTask = (0, _objectSpread2.default)((0, _objectSpread2.default)({}, original), data);
|
209
|
-
|
210
183
|
try {
|
211
184
|
return await storageOperations.updateTask({
|
212
185
|
input: data,
|
@@ -220,20 +193,16 @@ var _default = ({
|
|
220
193
|
}));
|
221
194
|
}
|
222
195
|
},
|
223
|
-
|
224
196
|
async deleteTask(id) {
|
225
197
|
const permission = await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
|
226
198
|
rwd: "d"
|
227
199
|
});
|
228
200
|
const pageImportExportTask = await context.pageBuilder.pageImportExportTask.getTask(id);
|
229
|
-
|
230
201
|
if (!pageImportExportTask) {
|
231
202
|
throw new _handlerGraphql.NotFoundError(`PageImportExportTask "${id}" not found.`);
|
232
203
|
}
|
233
|
-
|
234
204
|
const identity = context.security.getIdentity();
|
235
205
|
(0, _checkOwnPermissions.default)(identity, permission, pageImportExportTask);
|
236
|
-
|
237
206
|
try {
|
238
207
|
return await storageOperations.deleteTask({
|
239
208
|
task: pageImportExportTask
|
@@ -244,20 +213,16 @@ var _default = ({
|
|
244
213
|
}));
|
245
214
|
}
|
246
215
|
},
|
247
|
-
|
248
216
|
async updateStats(id, input) {
|
249
217
|
const permission = await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
|
250
218
|
rwd: "w"
|
251
219
|
});
|
252
220
|
const original = await context.pageBuilder.pageImportExportTask.getTask(id);
|
253
|
-
|
254
221
|
if (!original) {
|
255
222
|
throw new _handlerGraphql.NotFoundError(`PageImportExportTask "${id}" not found.`);
|
256
223
|
}
|
257
|
-
|
258
224
|
const identity = context.security.getIdentity();
|
259
225
|
(0, _checkOwnPermissions.default)(identity, permission, original);
|
260
|
-
|
261
226
|
try {
|
262
227
|
return await storageOperations.updateTaskStats({
|
263
228
|
input,
|
@@ -269,7 +234,6 @@ var _default = ({
|
|
269
234
|
}));
|
270
235
|
}
|
271
236
|
},
|
272
|
-
|
273
237
|
async createSubTask(parent, id, input) {
|
274
238
|
await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
|
275
239
|
rwd: "w"
|
@@ -290,7 +254,6 @@ var _default = ({
|
|
290
254
|
displayName: identity.displayName
|
291
255
|
}
|
292
256
|
});
|
293
|
-
|
294
257
|
try {
|
295
258
|
return await storageOperations.createSubTask({
|
296
259
|
input: data,
|
@@ -302,27 +265,23 @@ var _default = ({
|
|
302
265
|
}));
|
303
266
|
}
|
304
267
|
},
|
305
|
-
|
306
268
|
async updateSubTask(parent, subTaskId, input) {
|
307
269
|
const permission = await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
|
308
270
|
rwd: "w"
|
309
271
|
});
|
310
272
|
const original = await context.pageBuilder.pageImportExportTask.getSubTask(parent, subTaskId);
|
311
|
-
|
312
273
|
if (!original) {
|
313
274
|
throw new _handlerGraphql.NotFoundError(`PageImportExportTask parent: "${parent}" and id: "${subTaskId}" not found.`);
|
314
275
|
}
|
315
|
-
|
316
276
|
const identity = context.security.getIdentity();
|
317
277
|
(0, _checkOwnPermissions.default)(identity, permission, original);
|
318
278
|
const updateDataModel = new UpdateDataModel().populate(input);
|
319
279
|
await updateDataModel.validate();
|
320
280
|
const data = await updateDataModel.toJSON({
|
321
281
|
onlyDirty: true
|
322
|
-
});
|
323
|
-
|
282
|
+
});
|
283
|
+
// TODO: Merge recursively
|
324
284
|
const pageImportExportSubTask = (0, _objectSpread2.default)((0, _objectSpread2.default)({}, original), data);
|
325
|
-
|
326
285
|
try {
|
327
286
|
return await storageOperations.updateSubTask({
|
328
287
|
input: data,
|
@@ -336,7 +295,6 @@ var _default = ({
|
|
336
295
|
}));
|
337
296
|
}
|
338
297
|
},
|
339
|
-
|
340
298
|
async getSubTask(parent, subTaskId) {
|
341
299
|
const permission = await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
|
342
300
|
rwd: "r"
|
@@ -352,10 +310,8 @@ var _default = ({
|
|
352
310
|
}
|
353
311
|
};
|
354
312
|
let pageImportExportSubTask = null;
|
355
|
-
|
356
313
|
try {
|
357
314
|
pageImportExportSubTask = await storageOperations.getSubTask(params);
|
358
|
-
|
359
315
|
if (!pageImportExportSubTask) {
|
360
316
|
return null;
|
361
317
|
}
|
@@ -364,12 +320,10 @@ var _default = ({
|
|
364
320
|
params
|
365
321
|
}));
|
366
322
|
}
|
367
|
-
|
368
323
|
const identity = context.security.getIdentity();
|
369
324
|
(0, _checkOwnPermissions.default)(identity, permission, pageImportExportSubTask);
|
370
325
|
return pageImportExportSubTask;
|
371
326
|
},
|
372
|
-
|
373
327
|
async listSubTasks(parent, status, limit) {
|
374
328
|
const permission = await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
|
375
329
|
rwd: "r"
|
@@ -384,13 +338,13 @@ var _default = ({
|
|
384
338
|
status
|
385
339
|
},
|
386
340
|
limit
|
387
|
-
};
|
341
|
+
};
|
388
342
|
|
343
|
+
// If user can only manage own records, let's add that to the listing.
|
389
344
|
if (permission.own) {
|
390
345
|
const identity = context.security.getIdentity();
|
391
346
|
listParams.where.createdBy = identity.id;
|
392
347
|
}
|
393
|
-
|
394
348
|
try {
|
395
349
|
const [items] = await storageOperations.listSubTasks(listParams);
|
396
350
|
return items;
|
@@ -404,8 +358,6 @@ var _default = ({
|
|
404
358
|
});
|
405
359
|
}
|
406
360
|
}
|
407
|
-
|
408
361
|
};
|
409
362
|
});
|
410
|
-
|
411
363
|
exports.default = _default;
|