@webiny/api-page-builder-import-export 0.0.0-unstable.5e7233243f → 0.0.0-unstable.6e5425ee89
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/client.js +6 -16
- package/client.js.map +1 -1
- package/exportPages/combine/index.js +11 -19
- package/exportPages/combine/index.js.map +1 -1
- package/exportPages/process/index.js +19 -35
- package/exportPages/process/index.js.map +1 -1
- package/exportPages/s3Stream.js +4 -25
- package/exportPages/s3Stream.js.map +1 -1
- package/exportPages/utils.js +19 -32
- package/exportPages/utils.js.map +1 -1
- package/exportPages/zipper.js +31 -45
- package/exportPages/zipper.js.map +1 -1
- package/graphql/crud/pageImportExportTasks.crud.js +34 -75
- package/graphql/crud/pageImportExportTasks.crud.js.map +1 -1
- package/graphql/crud/pages.crud.js +22 -40
- package/graphql/crud/pages.crud.js.map +1 -1
- package/graphql/crud.js +1 -6
- package/graphql/crud.js.map +1 -1
- package/graphql/graphql/pageImportExportTasks.gql.js +2 -7
- package/graphql/graphql/pageImportExportTasks.gql.js.map +1 -1
- package/graphql/graphql/pages.gql.js +2 -7
- package/graphql/graphql/pages.gql.js.map +1 -1
- package/graphql/graphql/utils/resolve.js +0 -3
- package/graphql/graphql/utils/resolve.js.map +1 -1
- package/graphql/graphql.js +1 -5
- package/graphql/graphql.js.map +1 -1
- package/graphql/index.js +1 -6
- package/graphql/index.js.map +1 -1
- package/importPages/create/index.js +9 -20
- package/importPages/create/index.js.map +1 -1
- package/importPages/process/index.js +24 -29
- package/importPages/process/index.js.map +1 -1
- package/importPages/utils.js +45 -127
- package/importPages/utils.js.map +1 -1
- package/mockSecurity.js +0 -2
- package/mockSecurity.js.map +1 -1
- package/package.json +27 -27
- package/types.js +0 -5
- package/types.js.map +1 -1
package/exportPages/utils.js
CHANGED
@@ -1,59 +1,51 @@
|
|
1
1
|
"use strict";
|
2
2
|
|
3
|
-
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
4
|
-
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
5
4
|
Object.defineProperty(exports, "__esModule", {
|
6
5
|
value: true
|
7
6
|
});
|
8
7
|
exports.EXPORT_PAGES_FOLDER_KEY = void 0;
|
9
8
|
exports.exportPage = exportPage;
|
10
9
|
exports.extractFilesFromPageData = extractFilesFromPageData;
|
11
|
-
|
12
10
|
var _get = _interopRequireDefault(require("lodash/get"));
|
13
|
-
|
14
11
|
var _s3Stream = require("./s3Stream");
|
15
|
-
|
16
12
|
var _zipper = _interopRequireDefault(require("./zipper"));
|
17
|
-
|
18
13
|
const EXPORT_PAGES_FOLDER_KEY = "WEBINY_PB_EXPORT_PAGES";
|
19
14
|
exports.EXPORT_PAGES_FOLDER_KEY = EXPORT_PAGES_FOLDER_KEY;
|
20
|
-
|
21
15
|
async function getFilteredFiles(files) {
|
22
16
|
const uniqueFileKeys = new Map();
|
23
17
|
const promises = files.map(file => _s3Stream.s3Stream.isFileAccessible(file.key));
|
24
18
|
const isFileAvailableResults = await Promise.all(promises);
|
25
|
-
const filesAvailableForDownload = [];
|
26
|
-
|
19
|
+
const filesAvailableForDownload = [];
|
20
|
+
// Filter files
|
27
21
|
for (let i = 0; i < files.length; i++) {
|
28
|
-
const file = files[i];
|
29
|
-
|
22
|
+
const file = files[i];
|
23
|
+
// Check file accessibility
|
30
24
|
if (isFileAvailableResults[i] && !uniqueFileKeys.has(file.key)) {
|
31
25
|
filesAvailableForDownload.push(file);
|
32
26
|
uniqueFileKeys.set(file.key, true);
|
33
27
|
}
|
34
28
|
}
|
35
|
-
|
36
29
|
return filesAvailableForDownload;
|
37
30
|
}
|
38
|
-
|
39
31
|
async function exportPage(page, exportPagesDataKey, fileManager) {
|
40
32
|
// Extract all files
|
41
|
-
const files = extractFilesFromPageData(page.content || {});
|
42
|
-
|
43
|
-
const filesAvailableForDownload = await getFilteredFiles(files);
|
44
|
-
|
33
|
+
const files = extractFilesFromPageData(page.content || {});
|
34
|
+
// Filter files
|
35
|
+
const filesAvailableForDownload = await getFilteredFiles(files);
|
36
|
+
// Extract images from page settings
|
45
37
|
const pageSettingsImages = [(0, _get.default)(page, "settings.general.image"), (0, _get.default)(page, "settings.social.image")].filter(image => image && image.src);
|
46
|
-
const pageSettingsImagesData = [];
|
47
|
-
|
38
|
+
const pageSettingsImagesData = [];
|
39
|
+
// Get file data for all images inside "page.settings"
|
48
40
|
for (let i = 0; i < pageSettingsImages.length; i++) {
|
49
41
|
const {
|
50
42
|
id
|
51
43
|
} = pageSettingsImages[i];
|
52
44
|
const file = await fileManager.files.getFile(id);
|
53
45
|
pageSettingsImagesData.push(file);
|
54
|
-
}
|
55
|
-
|
46
|
+
}
|
56
47
|
|
48
|
+
// Extract the page data in a json file and upload it to S3
|
57
49
|
const pageData = {
|
58
50
|
page: {
|
59
51
|
content: page.content,
|
@@ -76,29 +68,25 @@ async function exportPage(page, exportPagesDataKey, fileManager) {
|
|
76
68
|
});
|
77
69
|
return zipper.process();
|
78
70
|
}
|
79
|
-
|
80
71
|
function extractFilesFromPageData(data, files = []) {
|
81
72
|
// Base case: termination
|
82
73
|
if (!data || typeof data !== "object") {
|
83
74
|
return files;
|
84
|
-
}
|
85
|
-
|
86
|
-
|
75
|
+
}
|
76
|
+
// Recursively call function for each element
|
87
77
|
if (Array.isArray(data)) {
|
88
78
|
for (let i = 0; i < data.length; i++) {
|
89
79
|
const element = data[i];
|
90
80
|
extractFilesFromPageData(element, files);
|
91
81
|
}
|
92
|
-
|
93
82
|
return files;
|
94
|
-
}
|
95
|
-
|
83
|
+
}
|
96
84
|
|
85
|
+
// Main
|
97
86
|
const tuple = Object.entries(data);
|
98
|
-
|
99
87
|
for (let i = 0; i < tuple.length; i++) {
|
100
|
-
const [key, value] = tuple[i];
|
101
|
-
|
88
|
+
const [key, value] = tuple[i];
|
89
|
+
// TODO: @ashutosh extract it to plugins, so that, we can handle cases for other components too.
|
102
90
|
if (key === "file" && value) {
|
103
91
|
files.push(value);
|
104
92
|
} else if (key === "images" && Array.isArray(value)) {
|
@@ -108,6 +96,5 @@ function extractFilesFromPageData(data, files = []) {
|
|
108
96
|
extractFilesFromPageData(value, files);
|
109
97
|
}
|
110
98
|
}
|
111
|
-
|
112
99
|
return files;
|
113
100
|
}
|
package/exportPages/utils.js.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"names":["EXPORT_PAGES_FOLDER_KEY","getFilteredFiles","files","uniqueFileKeys","Map","promises","map","file","s3Stream","isFileAccessible","key","isFileAvailableResults","Promise","all","filesAvailableForDownload","i","length","has","push","set","exportPage","page","exportPagesDataKey","fileManager","extractFilesFromPageData","content","pageSettingsImages","get","filter","image","src","pageSettingsImagesData","id","getFile","pageData","title","path","version","status","settings","pageDataBuffer","Buffer","from","JSON","stringify","zipper","Zipper","exportInfo","pageTitle","archiveFileKey","process","data","Array","isArray","element","tuple","Object","entries","value"],"sources":["utils.ts"],"sourcesContent":["import S3 from \"aws-sdk/clients/s3\";\nimport { Page, File } from \"@webiny/api-page-builder/types\";\nimport { FileManagerContext } from \"@webiny/api-file-manager/types\";\nimport get from \"lodash/get\";\nimport { s3Stream } from \"./s3Stream\";\nimport Zipper from \"./zipper\";\n\nexport const EXPORT_PAGES_FOLDER_KEY = \"WEBINY_PB_EXPORT_PAGES\";\n\nasync function getFilteredFiles(files: ImageFile[]) {\n const uniqueFileKeys = new Map<string, boolean>();\n const promises = files.map(file => s3Stream.isFileAccessible(file.key));\n const isFileAvailableResults = await Promise.all(promises);\n\n const filesAvailableForDownload = [];\n // Filter files\n for (let i = 0; i < files.length; i++) {\n const file = files[i];\n // Check file accessibility\n if (isFileAvailableResults[i] && !uniqueFileKeys.has(file.key)) {\n filesAvailableForDownload.push(file);\n uniqueFileKeys.set(file.key, true);\n }\n }\n return filesAvailableForDownload;\n}\n\nexport interface ExportedPageData {\n page: Pick<Page, \"content\" | \"title\" | \"version\" | \"status\" | \"settings\" | \"path\">;\n files: ImageFile[];\n}\n\nexport async function exportPage(\n page: Page,\n exportPagesDataKey: string,\n fileManager: FileManagerContext[\"fileManager\"]\n): Promise<S3.ManagedUpload.SendData> {\n // Extract all files\n const files = extractFilesFromPageData(page.content || {});\n // Filter files\n const filesAvailableForDownload = await getFilteredFiles(files);\n // Extract images from page settings\n const pageSettingsImages = [\n get(page, \"settings.general.image\"),\n get(page, \"settings.social.image\")\n ].filter(image => image && image.src);\n const pageSettingsImagesData = [];\n // Get file data for all images inside \"page.settings\"\n for (let i = 0; i < pageSettingsImages.length; i++) {\n const { id } = pageSettingsImages[i];\n const file = await fileManager.files.getFile(id);\n pageSettingsImagesData.push(file);\n }\n\n // Extract the page data in a json file and upload it to S3\n const pageData = {\n page: {\n content: page.content,\n title: page.title,\n path: page.path,\n version: page.version,\n status: page.status,\n settings: page.settings\n },\n files: [...filesAvailableForDownload, ...pageSettingsImagesData]\n };\n const pageDataBuffer = Buffer.from(JSON.stringify(pageData));\n\n const zipper = new Zipper({\n exportInfo: {\n files: [...filesAvailableForDownload, ...pageSettingsImagesData],\n pageTitle: page.title,\n pageDataBuffer\n },\n archiveFileKey: exportPagesDataKey\n });\n\n return zipper.process();\n}\n\nexport interface ImageFile extends Omit<File, \"src\"> {\n key: string;\n}\n\nexport function extractFilesFromPageData(\n data: Record<string, any>,\n files: any[] = []\n): ImageFile[] {\n // Base case: termination\n if (!data || typeof data !== \"object\") {\n return files;\n }\n // Recursively call function for each element\n if (Array.isArray(data)) {\n for (let i = 0; i < data.length; i++) {\n const element = data[i];\n extractFilesFromPageData(element, files);\n }\n return files;\n }\n\n // Main\n const tuple = Object.entries(data);\n for (let i = 0; i < tuple.length; i++) {\n const [key, value] = tuple[i];\n // TODO: @ashutosh extract it to plugins, so that, we can handle cases for other components too.\n if (key === \"file\" && value) {\n files.push(value);\n } else if (key === \"images\" && Array.isArray(value)) {\n // Handle case for \"images-list\" component\n files.push(...value);\n } else {\n extractFilesFromPageData(value, files);\n }\n }\n return files;\n}\n"],"mappings":"
|
1
|
+
{"version":3,"names":["EXPORT_PAGES_FOLDER_KEY","getFilteredFiles","files","uniqueFileKeys","Map","promises","map","file","s3Stream","isFileAccessible","key","isFileAvailableResults","Promise","all","filesAvailableForDownload","i","length","has","push","set","exportPage","page","exportPagesDataKey","fileManager","extractFilesFromPageData","content","pageSettingsImages","get","filter","image","src","pageSettingsImagesData","id","getFile","pageData","title","path","version","status","settings","pageDataBuffer","Buffer","from","JSON","stringify","zipper","Zipper","exportInfo","pageTitle","archiveFileKey","process","data","Array","isArray","element","tuple","Object","entries","value"],"sources":["utils.ts"],"sourcesContent":["import S3 from \"aws-sdk/clients/s3\";\nimport { Page, File } from \"@webiny/api-page-builder/types\";\nimport { FileManagerContext } from \"@webiny/api-file-manager/types\";\nimport get from \"lodash/get\";\nimport { s3Stream } from \"./s3Stream\";\nimport Zipper from \"./zipper\";\n\nexport const EXPORT_PAGES_FOLDER_KEY = \"WEBINY_PB_EXPORT_PAGES\";\n\nasync function getFilteredFiles(files: ImageFile[]) {\n const uniqueFileKeys = new Map<string, boolean>();\n const promises = files.map(file => s3Stream.isFileAccessible(file.key));\n const isFileAvailableResults = await Promise.all(promises);\n\n const filesAvailableForDownload = [];\n // Filter files\n for (let i = 0; i < files.length; i++) {\n const file = files[i];\n // Check file accessibility\n if (isFileAvailableResults[i] && !uniqueFileKeys.has(file.key)) {\n filesAvailableForDownload.push(file);\n uniqueFileKeys.set(file.key, true);\n }\n }\n return filesAvailableForDownload;\n}\n\nexport interface ExportedPageData {\n page: Pick<Page, \"content\" | \"title\" | \"version\" | \"status\" | \"settings\" | \"path\">;\n files: ImageFile[];\n}\n\nexport async function exportPage(\n page: Page,\n exportPagesDataKey: string,\n fileManager: FileManagerContext[\"fileManager\"]\n): Promise<S3.ManagedUpload.SendData> {\n // Extract all files\n const files = extractFilesFromPageData(page.content || {});\n // Filter files\n const filesAvailableForDownload = await getFilteredFiles(files);\n // Extract images from page settings\n const pageSettingsImages = [\n get(page, \"settings.general.image\") as unknown as File,\n get(page, \"settings.social.image\") as unknown as File\n ].filter(image => image && image.src);\n const pageSettingsImagesData = [];\n // Get file data for all images inside \"page.settings\"\n for (let i = 0; i < pageSettingsImages.length; i++) {\n const { id } = pageSettingsImages[i];\n const file = await fileManager.files.getFile(id);\n pageSettingsImagesData.push(file);\n }\n\n // Extract the page data in a json file and upload it to S3\n const pageData = {\n page: {\n content: page.content,\n title: page.title,\n path: page.path,\n version: page.version,\n status: page.status,\n settings: page.settings\n },\n files: [...filesAvailableForDownload, ...pageSettingsImagesData]\n };\n const pageDataBuffer = Buffer.from(JSON.stringify(pageData));\n\n const zipper = new Zipper({\n exportInfo: {\n files: [...filesAvailableForDownload, ...pageSettingsImagesData],\n pageTitle: page.title,\n pageDataBuffer\n },\n archiveFileKey: exportPagesDataKey\n });\n\n return zipper.process();\n}\n\nexport interface ImageFile extends Omit<File, \"src\"> {\n key: string;\n}\n\nexport function extractFilesFromPageData(\n data: Record<string, any>,\n files: any[] = []\n): ImageFile[] {\n // Base case: termination\n if (!data || typeof data !== \"object\") {\n return files;\n }\n // Recursively call function for each element\n if (Array.isArray(data)) {\n for (let i = 0; i < data.length; i++) {\n const element = data[i];\n extractFilesFromPageData(element, files);\n }\n return files;\n }\n\n // Main\n const tuple = Object.entries(data);\n for (let i = 0; i < tuple.length; i++) {\n const [key, value] = tuple[i];\n // TODO: @ashutosh extract it to plugins, so that, we can handle cases for other components too.\n if (key === \"file\" && value) {\n files.push(value);\n } else if (key === \"images\" && Array.isArray(value)) {\n // Handle case for \"images-list\" component\n files.push(...value);\n } else {\n extractFilesFromPageData(value, files);\n }\n }\n return files;\n}\n"],"mappings":";;;;;;;;;AAGA;AACA;AACA;AAEO,MAAMA,uBAAuB,GAAG,wBAAwB;AAAC;AAEhE,eAAeC,gBAAgB,CAACC,KAAkB,EAAE;EAChD,MAAMC,cAAc,GAAG,IAAIC,GAAG,EAAmB;EACjD,MAAMC,QAAQ,GAAGH,KAAK,CAACI,GAAG,CAACC,IAAI,IAAIC,kBAAQ,CAACC,gBAAgB,CAACF,IAAI,CAACG,GAAG,CAAC,CAAC;EACvE,MAAMC,sBAAsB,GAAG,MAAMC,OAAO,CAACC,GAAG,CAACR,QAAQ,CAAC;EAE1D,MAAMS,yBAAyB,GAAG,EAAE;EACpC;EACA,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGb,KAAK,CAACc,MAAM,EAAED,CAAC,EAAE,EAAE;IACnC,MAAMR,IAAI,GAAGL,KAAK,CAACa,CAAC,CAAC;IACrB;IACA,IAAIJ,sBAAsB,CAACI,CAAC,CAAC,IAAI,CAACZ,cAAc,CAACc,GAAG,CAACV,IAAI,CAACG,GAAG,CAAC,EAAE;MAC5DI,yBAAyB,CAACI,IAAI,CAACX,IAAI,CAAC;MACpCJ,cAAc,CAACgB,GAAG,CAACZ,IAAI,CAACG,GAAG,EAAE,IAAI,CAAC;IACtC;EACJ;EACA,OAAOI,yBAAyB;AACpC;AAOO,eAAeM,UAAU,CAC5BC,IAAU,EACVC,kBAA0B,EAC1BC,WAA8C,EACZ;EAClC;EACA,MAAMrB,KAAK,GAAGsB,wBAAwB,CAACH,IAAI,CAACI,OAAO,IAAI,CAAC,CAAC,CAAC;EAC1D;EACA,MAAMX,yBAAyB,GAAG,MAAMb,gBAAgB,CAACC,KAAK,CAAC;EAC/D;EACA,MAAMwB,kBAAkB,GAAG,CACvB,IAAAC,YAAG,EAACN,IAAI,EAAE,wBAAwB,CAAC,EACnC,IAAAM,YAAG,EAACN,IAAI,EAAE,uBAAuB,CAAC,CACrC,CAACO,MAAM,CAACC,KAAK,IAAIA,KAAK,IAAIA,KAAK,CAACC,GAAG,CAAC;EACrC,MAAMC,sBAAsB,GAAG,EAAE;EACjC;EACA,KAAK,IAAIhB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGW,kBAAkB,CAACV,MAAM,EAAED,CAAC,EAAE,EAAE;IAChD,MAAM;MAAEiB;IAAG,CAAC,GAAGN,kBAAkB,CAACX,CAAC,CAAC;IACpC,MAAMR,IAAI,GAAG,MAAMgB,WAAW,CAACrB,KAAK,CAAC+B,OAAO,CAACD,EAAE,CAAC;IAChDD,sBAAsB,CAACb,IAAI,CAACX,IAAI,CAAC;EACrC;;EAEA;EACA,MAAM2B,QAAQ,GAAG;IACbb,IAAI,EAAE;MACFI,OAAO,EAAEJ,IAAI,CAACI,OAAO;MACrBU,KAAK,EAAEd,IAAI,CAACc,KAAK;MACjBC,IAAI,EAAEf,IAAI,CAACe,IAAI;MACfC,OAAO,EAAEhB,IAAI,CAACgB,OAAO;MACrBC,MAAM,EAAEjB,IAAI,CAACiB,MAAM;MACnBC,QAAQ,EAAElB,IAAI,CAACkB;IACnB,CAAC;IACDrC,KAAK,EAAE,CAAC,GAAGY,yBAAyB,EAAE,GAAGiB,sBAAsB;EACnE,CAAC;EACD,MAAMS,cAAc,GAAGC,MAAM,CAACC,IAAI,CAACC,IAAI,CAACC,SAAS,CAACV,QAAQ,CAAC,CAAC;EAE5D,MAAMW,MAAM,GAAG,IAAIC,eAAM,CAAC;IACtBC,UAAU,EAAE;MACR7C,KAAK,EAAE,CAAC,GAAGY,yBAAyB,EAAE,GAAGiB,sBAAsB,CAAC;MAChEiB,SAAS,EAAE3B,IAAI,CAACc,KAAK;MACrBK;IACJ,CAAC;IACDS,cAAc,EAAE3B;EACpB,CAAC,CAAC;EAEF,OAAOuB,MAAM,CAACK,OAAO,EAAE;AAC3B;AAMO,SAAS1B,wBAAwB,CACpC2B,IAAyB,EACzBjD,KAAY,GAAG,EAAE,EACN;EACX;EACA,IAAI,CAACiD,IAAI,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;IACnC,OAAOjD,KAAK;EAChB;EACA;EACA,IAAIkD,KAAK,CAACC,OAAO,CAACF,IAAI,CAAC,EAAE;IACrB,KAAK,IAAIpC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGoC,IAAI,CAACnC,MAAM,EAAED,CAAC,EAAE,EAAE;MAClC,MAAMuC,OAAO,GAAGH,IAAI,CAACpC,CAAC,CAAC;MACvBS,wBAAwB,CAAC8B,OAAO,EAAEpD,KAAK,CAAC;IAC5C;IACA,OAAOA,KAAK;EAChB;;EAEA;EACA,MAAMqD,KAAK,GAAGC,MAAM,CAACC,OAAO,CAACN,IAAI,CAAC;EAClC,KAAK,IAAIpC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGwC,KAAK,CAACvC,MAAM,EAAED,CAAC,EAAE,EAAE;IACnC,MAAM,CAACL,GAAG,EAAEgD,KAAK,CAAC,GAAGH,KAAK,CAACxC,CAAC,CAAC;IAC7B;IACA,IAAIL,GAAG,KAAK,MAAM,IAAIgD,KAAK,EAAE;MACzBxD,KAAK,CAACgB,IAAI,CAACwC,KAAK,CAAC;IACrB,CAAC,MAAM,IAAIhD,GAAG,KAAK,QAAQ,IAAI0C,KAAK,CAACC,OAAO,CAACK,KAAK,CAAC,EAAE;MACjD;MACAxD,KAAK,CAACgB,IAAI,CAAC,GAAGwC,KAAK,CAAC;IACxB,CAAC,MAAM;MACHlC,wBAAwB,CAACkC,KAAK,EAAExD,KAAK,CAAC;IAC1C;EACJ;EACA,OAAOA,KAAK;AAChB"}
|
package/exportPages/zipper.js
CHANGED
@@ -1,31 +1,20 @@
|
|
1
1
|
"use strict";
|
2
2
|
|
3
|
-
var
|
4
|
-
|
3
|
+
var _interopRequireWildcard = require("@babel/runtime/helpers/interopRequireWildcard").default;
|
4
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
5
5
|
Object.defineProperty(exports, "__esModule", {
|
6
6
|
value: true
|
7
7
|
});
|
8
8
|
exports.default = exports.ZipOfZip = void 0;
|
9
|
-
|
10
9
|
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
|
11
|
-
|
12
10
|
var _archiver = _interopRequireDefault(require("archiver"));
|
13
|
-
|
14
11
|
var _stream = require("stream");
|
15
|
-
|
16
12
|
var path = _interopRequireWildcard(require("path"));
|
17
|
-
|
18
13
|
var _kebabCase = _interopRequireDefault(require("lodash/kebabCase"));
|
19
|
-
|
20
14
|
var _uniqid = _interopRequireDefault(require("uniqid"));
|
21
|
-
|
22
15
|
var _s3Stream = require("./s3Stream");
|
23
|
-
|
24
|
-
function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function (nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
|
25
|
-
|
26
|
-
function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
|
27
|
-
|
28
16
|
// TODO: Move "archive" in layer
|
17
|
+
|
29
18
|
class Zipper {
|
30
19
|
constructor(config) {
|
31
20
|
(0, _defineProperty2.default)(this, "archiveFormat", "zip");
|
@@ -35,7 +24,6 @@ class Zipper {
|
|
35
24
|
this.config = config;
|
36
25
|
this.archiveFileName = (0, _uniqid.default)(`${this.config.archiveFileKey}/`, `-${(0, _kebabCase.default)(this.config.exportInfo.pageTitle)}.zip`);
|
37
26
|
}
|
38
|
-
|
39
27
|
s3DownloadStreams() {
|
40
28
|
const exportInfo = this.config.exportInfo;
|
41
29
|
const prefix = (0, _uniqid.default)("", `-${(0, _kebabCase.default)(exportInfo.pageTitle)}`);
|
@@ -52,40 +40,39 @@ class Zipper {
|
|
52
40
|
filename: `${prefix}\\${exportInfo.pageTitle}.json`
|
53
41
|
}];
|
54
42
|
}
|
55
|
-
|
56
43
|
process() {
|
57
44
|
const {
|
58
45
|
streamPassThrough,
|
59
46
|
streamPassThroughUploadPromise
|
60
|
-
} = _s3Stream.s3Stream.writeStream(this.archiveFileName);
|
61
|
-
|
62
|
-
|
63
|
-
const s3FilesStreams = this.s3DownloadStreams(); // 2. Prepare zip from the file stream.
|
64
|
-
|
65
|
-
const archive = _archiver.default.create(this.archiveFormat); // Handle archive events.
|
47
|
+
} = _s3Stream.s3Stream.writeStream(this.archiveFileName);
|
66
48
|
|
49
|
+
// 1. Read all files from S3 using stream.
|
50
|
+
const s3FilesStreams = this.s3DownloadStreams();
|
67
51
|
|
52
|
+
// 2. Prepare zip from the file stream.
|
53
|
+
const archive = _archiver.default.create(this.archiveFormat);
|
54
|
+
// Handle archive events.
|
68
55
|
archive.on("error", error => {
|
69
56
|
throw new Error(`${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`);
|
70
|
-
});
|
57
|
+
});
|
71
58
|
|
59
|
+
// Append all file streams to archive.
|
72
60
|
s3FilesStreams.forEach(streamDetails => archive.append(streamDetails.stream, {
|
73
61
|
name: streamDetails.filename
|
74
|
-
}));
|
62
|
+
}));
|
75
63
|
|
76
|
-
|
64
|
+
// Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.
|
65
|
+
archive.pipe(streamPassThrough);
|
66
|
+
// Finalize the archive (ie we are done appending files but streams have to finish yet)
|
77
67
|
// 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand
|
68
|
+
archive.finalize();
|
78
69
|
|
79
|
-
|
80
|
-
|
70
|
+
// 3. Return upload stream promise.
|
81
71
|
return streamPassThroughUploadPromise;
|
82
72
|
}
|
83
|
-
|
84
73
|
}
|
85
|
-
|
86
74
|
exports.default = Zipper;
|
87
75
|
const PAGE_EXPORT_BASENAME = `WEBINY_PAGE_EXPORT.zip`;
|
88
|
-
|
89
76
|
class ZipOfZip {
|
90
77
|
constructor(keys) {
|
91
78
|
(0, _defineProperty2.default)(this, "archiveFormat", "zip");
|
@@ -94,7 +81,6 @@ class ZipOfZip {
|
|
94
81
|
this.keys = keys;
|
95
82
|
this.archiveFileName = (0, _uniqid.default)("", `-${PAGE_EXPORT_BASENAME}`);
|
96
83
|
}
|
97
|
-
|
98
84
|
getFileStreams() {
|
99
85
|
return this.keys.map(key => {
|
100
86
|
return {
|
@@ -103,35 +89,35 @@ class ZipOfZip {
|
|
103
89
|
};
|
104
90
|
});
|
105
91
|
}
|
106
|
-
|
107
92
|
process() {
|
108
93
|
const {
|
109
94
|
streamPassThrough,
|
110
95
|
streamPassThroughUploadPromise
|
111
|
-
} = _s3Stream.s3Stream.writeStream(this.archiveFileName);
|
112
|
-
|
113
|
-
|
114
|
-
const fileStreamDetails = this.getFileStreams(); // 2. Prepare zip from the file stream.
|
115
|
-
|
116
|
-
const archive = _archiver.default.create(this.archiveFormat); // Handle archive events.
|
96
|
+
} = _s3Stream.s3Stream.writeStream(this.archiveFileName);
|
117
97
|
|
98
|
+
// 1. Read all files from S3 using stream.
|
99
|
+
const fileStreamDetails = this.getFileStreams();
|
118
100
|
|
101
|
+
// 2. Prepare zip from the file stream.
|
102
|
+
const archive = _archiver.default.create(this.archiveFormat);
|
103
|
+
// Handle archive events.
|
119
104
|
archive.on("error", error => {
|
120
105
|
throw new Error(`${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`);
|
121
|
-
});
|
106
|
+
});
|
122
107
|
|
108
|
+
// Append all file streams to archive.
|
123
109
|
fileStreamDetails.forEach(streamDetails => archive.append(streamDetails.stream, {
|
124
110
|
name: streamDetails.filename
|
125
|
-
}));
|
111
|
+
}));
|
126
112
|
|
127
|
-
|
113
|
+
// Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.
|
114
|
+
archive.pipe(streamPassThrough);
|
115
|
+
// Finalize the archive (ie we are done appending files but streams have to finish yet)
|
128
116
|
// 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand
|
117
|
+
archive.finalize();
|
129
118
|
|
130
|
-
|
131
|
-
|
119
|
+
// 3. Return upload stream promise.
|
132
120
|
return streamPassThroughUploadPromise;
|
133
121
|
}
|
134
|
-
|
135
122
|
}
|
136
|
-
|
137
123
|
exports.ZipOfZip = ZipOfZip;
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"names":["Zipper","constructor","config","archiveFileName","uniqueId","archiveFileKey","kebabCase","exportInfo","pageTitle","s3DownloadStreams","prefix","files","map","key","stream","s3Stream","readStream","filename","filesDirName","path","basename","Readable","from","pageDataBuffer","process","streamPassThrough","streamPassThroughUploadPromise","writeStream","s3FilesStreams","archive","vending","create","archiveFormat","on","error","Error","name","code","message","stack","forEach","streamDetails","append","pipe","finalize","PAGE_EXPORT_BASENAME","ZipOfZip","keys","getFileStreams","fileStreamDetails"],"sources":["zipper.ts"],"sourcesContent":["// TODO: Move \"archive\" in layer\nimport vending, { ArchiverError } from \"archiver\";\nimport S3 from \"aws-sdk/clients/s3\";\nimport { Readable } from \"stream\";\nimport * as path from \"path\";\nimport kebabCase from \"lodash/kebabCase\";\nimport uniqueId from \"uniqid\";\nimport { s3Stream } from \"./s3Stream\";\nimport { ImageFile } from \"./utils\";\n\ninterface FileStreamDetails {\n stream: Readable;\n filename: string;\n}\n\ninterface ExportInfo {\n files: ImageFile[];\n pageTitle: string;\n pageDataBuffer: Buffer;\n}\n\nexport interface ZipperConfig {\n exportInfo: ExportInfo;\n archiveFileKey: string;\n}\n\nexport default class Zipper {\n private readonly archiveFormat = \"zip\";\n private readonly filesDirName = \"assets\";\n private readonly archiveFileName: string;\n config: ZipperConfig;\n\n constructor(config: ZipperConfig) {\n this.config = config;\n this.archiveFileName = uniqueId(\n `${this.config.archiveFileKey}/`,\n `-${kebabCase(this.config.exportInfo.pageTitle)}.zip`\n );\n }\n\n s3DownloadStreams(): FileStreamDetails[] {\n const exportInfo = this.config.exportInfo;\n const prefix = uniqueId(\"\", `-${kebabCase(exportInfo.pageTitle)}`);\n const files = exportInfo.files.map(({ key }) => {\n return {\n stream: s3Stream.readStream(key),\n filename: `${prefix}\\\\${this.filesDirName}\\\\${path.basename(key)}`\n };\n });\n\n return [\n ...files,\n {\n stream: Readable.from(exportInfo.pageDataBuffer),\n filename: `${prefix}\\\\${exportInfo.pageTitle}.json`\n }\n ];\n }\n\n process(): Promise<S3.ManagedUpload.SendData> {\n const { streamPassThrough, streamPassThroughUploadPromise } = s3Stream.writeStream(\n this.archiveFileName\n );\n\n // 1. Read all files from S3 using stream.\n const s3FilesStreams = this.s3DownloadStreams();\n\n // 2. Prepare zip from the file stream.\n const archive = vending.create(this.archiveFormat);\n // Handle archive events.\n archive.on(\"error\", (error: ArchiverError) => {\n throw new Error(\n `${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`\n );\n });\n\n // Append all file streams to archive.\n s3FilesStreams.forEach((streamDetails: FileStreamDetails) =>\n archive.append(streamDetails.stream, { name: streamDetails.filename })\n );\n\n // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.\n archive.pipe(streamPassThrough);\n // Finalize the archive (ie we are done appending files but streams have to finish yet)\n // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand\n archive.finalize();\n\n // 3. Return upload stream promise.\n return streamPassThroughUploadPromise;\n }\n}\n\nconst PAGE_EXPORT_BASENAME = `WEBINY_PAGE_EXPORT.zip`;\n\nexport class ZipOfZip {\n private readonly archiveFormat = \"zip\";\n private readonly archiveFileName: string;\n keys: string[];\n\n constructor(keys: string[]) {\n this.keys = keys;\n this.archiveFileName = uniqueId(\"\", `-${PAGE_EXPORT_BASENAME}`);\n }\n\n getFileStreams(): FileStreamDetails[] {\n return this.keys.map(key => {\n return {\n stream: s3Stream.readStream(key),\n filename: `${path.basename(key)}`\n };\n });\n }\n\n process(): Promise<S3.ManagedUpload.SendData> {\n const { streamPassThrough, streamPassThroughUploadPromise } = s3Stream.writeStream(\n this.archiveFileName\n );\n\n // 1. Read all files from S3 using stream.\n const fileStreamDetails = this.getFileStreams();\n\n // 2. Prepare zip from the file stream.\n const archive = vending.create(this.archiveFormat);\n // Handle archive events.\n archive.on(\"error\", (error: ArchiverError) => {\n throw new Error(\n `${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`\n );\n });\n\n // Append all file streams to archive.\n fileStreamDetails.forEach((streamDetails: FileStreamDetails) =>\n archive.append(streamDetails.stream, { name: streamDetails.filename })\n );\n\n // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.\n archive.pipe(streamPassThrough);\n // Finalize the archive (ie we are done appending files but streams have to finish yet)\n // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand\n archive.finalize();\n\n // 3. Return upload stream promise.\n return streamPassThroughUploadPromise;\n }\n}\n"],"mappings":"
|
1
|
+
{"version":3,"names":["Zipper","constructor","config","archiveFileName","uniqueId","archiveFileKey","kebabCase","exportInfo","pageTitle","s3DownloadStreams","prefix","files","map","key","stream","s3Stream","readStream","filename","filesDirName","path","basename","Readable","from","pageDataBuffer","process","streamPassThrough","streamPassThroughUploadPromise","writeStream","s3FilesStreams","archive","vending","create","archiveFormat","on","error","Error","name","code","message","stack","forEach","streamDetails","append","pipe","finalize","PAGE_EXPORT_BASENAME","ZipOfZip","keys","getFileStreams","fileStreamDetails"],"sources":["zipper.ts"],"sourcesContent":["// TODO: Move \"archive\" in layer\nimport vending, { ArchiverError } from \"archiver\";\nimport S3 from \"aws-sdk/clients/s3\";\nimport { Readable } from \"stream\";\nimport * as path from \"path\";\nimport kebabCase from \"lodash/kebabCase\";\nimport uniqueId from \"uniqid\";\nimport { s3Stream } from \"./s3Stream\";\nimport { ImageFile } from \"./utils\";\n\ninterface FileStreamDetails {\n stream: Readable;\n filename: string;\n}\n\ninterface ExportInfo {\n files: ImageFile[];\n pageTitle: string;\n pageDataBuffer: Buffer;\n}\n\nexport interface ZipperConfig {\n exportInfo: ExportInfo;\n archiveFileKey: string;\n}\n\nexport default class Zipper {\n private readonly archiveFormat = \"zip\";\n private readonly filesDirName = \"assets\";\n private readonly archiveFileName: string;\n config: ZipperConfig;\n\n constructor(config: ZipperConfig) {\n this.config = config;\n this.archiveFileName = uniqueId(\n `${this.config.archiveFileKey}/`,\n `-${kebabCase(this.config.exportInfo.pageTitle)}.zip`\n );\n }\n\n s3DownloadStreams(): FileStreamDetails[] {\n const exportInfo = this.config.exportInfo;\n const prefix = uniqueId(\"\", `-${kebabCase(exportInfo.pageTitle)}`);\n const files = exportInfo.files.map(({ key }) => {\n return {\n stream: s3Stream.readStream(key),\n filename: `${prefix}\\\\${this.filesDirName}\\\\${path.basename(key)}`\n };\n });\n\n return [\n ...files,\n {\n stream: Readable.from(exportInfo.pageDataBuffer),\n filename: `${prefix}\\\\${exportInfo.pageTitle}.json`\n }\n ];\n }\n\n process(): Promise<S3.ManagedUpload.SendData> {\n const { streamPassThrough, streamPassThroughUploadPromise } = s3Stream.writeStream(\n this.archiveFileName\n );\n\n // 1. Read all files from S3 using stream.\n const s3FilesStreams = this.s3DownloadStreams();\n\n // 2. Prepare zip from the file stream.\n const archive = vending.create(this.archiveFormat);\n // Handle archive events.\n archive.on(\"error\", (error: ArchiverError) => {\n throw new Error(\n `${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`\n );\n });\n\n // Append all file streams to archive.\n s3FilesStreams.forEach((streamDetails: FileStreamDetails) =>\n archive.append(streamDetails.stream, { name: streamDetails.filename })\n );\n\n // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.\n archive.pipe(streamPassThrough);\n // Finalize the archive (ie we are done appending files but streams have to finish yet)\n // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand\n archive.finalize();\n\n // 3. Return upload stream promise.\n return streamPassThroughUploadPromise;\n }\n}\n\nconst PAGE_EXPORT_BASENAME = `WEBINY_PAGE_EXPORT.zip`;\n\nexport class ZipOfZip {\n private readonly archiveFormat = \"zip\";\n private readonly archiveFileName: string;\n keys: string[];\n\n constructor(keys: string[]) {\n this.keys = keys;\n this.archiveFileName = uniqueId(\"\", `-${PAGE_EXPORT_BASENAME}`);\n }\n\n getFileStreams(): FileStreamDetails[] {\n return this.keys.map(key => {\n return {\n stream: s3Stream.readStream(key),\n filename: `${path.basename(key)}`\n };\n });\n }\n\n process(): Promise<S3.ManagedUpload.SendData> {\n const { streamPassThrough, streamPassThroughUploadPromise } = s3Stream.writeStream(\n this.archiveFileName\n );\n\n // 1. Read all files from S3 using stream.\n const fileStreamDetails = this.getFileStreams();\n\n // 2. Prepare zip from the file stream.\n const archive = vending.create(this.archiveFormat);\n // Handle archive events.\n archive.on(\"error\", (error: ArchiverError) => {\n throw new Error(\n `${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`\n );\n });\n\n // Append all file streams to archive.\n fileStreamDetails.forEach((streamDetails: FileStreamDetails) =>\n archive.append(streamDetails.stream, { name: streamDetails.filename })\n );\n\n // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.\n archive.pipe(streamPassThrough);\n // Finalize the archive (ie we are done appending files but streams have to finish yet)\n // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand\n archive.finalize();\n\n // 3. Return upload stream promise.\n return streamPassThroughUploadPromise;\n }\n}\n"],"mappings":";;;;;;;;;AACA;AAEA;AACA;AACA;AACA;AACA;AAPA;;AA0Be,MAAMA,MAAM,CAAC;EAMxBC,WAAW,CAACC,MAAoB,EAAE;IAAA,qDALD,KAAK;IAAA,oDACN,QAAQ;IAAA;IAAA;IAKpC,IAAI,CAACA,MAAM,GAAGA,MAAM;IACpB,IAAI,CAACC,eAAe,GAAG,IAAAC,eAAQ,EAC1B,GAAE,IAAI,CAACF,MAAM,CAACG,cAAe,GAAE,EAC/B,IAAG,IAAAC,kBAAS,EAAC,IAAI,CAACJ,MAAM,CAACK,UAAU,CAACC,SAAS,CAAE,MAAK,CACxD;EACL;EAEAC,iBAAiB,GAAwB;IACrC,MAAMF,UAAU,GAAG,IAAI,CAACL,MAAM,CAACK,UAAU;IACzC,MAAMG,MAAM,GAAG,IAAAN,eAAQ,EAAC,EAAE,EAAG,IAAG,IAAAE,kBAAS,EAACC,UAAU,CAACC,SAAS,CAAE,EAAC,CAAC;IAClE,MAAMG,KAAK,GAAGJ,UAAU,CAACI,KAAK,CAACC,GAAG,CAAC,CAAC;MAAEC;IAAI,CAAC,KAAK;MAC5C,OAAO;QACHC,MAAM,EAAEC,kBAAQ,CAACC,UAAU,CAACH,GAAG,CAAC;QAChCI,QAAQ,EAAG,GAAEP,MAAO,KAAI,IAAI,CAACQ,YAAa,KAAIC,IAAI,CAACC,QAAQ,CAACP,GAAG,CAAE;MACrE,CAAC;IACL,CAAC,CAAC;IAEF,OAAO,CACH,GAAGF,KAAK,EACR;MACIG,MAAM,EAAEO,gBAAQ,CAACC,IAAI,CAACf,UAAU,CAACgB,cAAc,CAAC;MAChDN,QAAQ,EAAG,GAAEP,MAAO,KAAIH,UAAU,CAACC,SAAU;IACjD,CAAC,CACJ;EACL;EAEAgB,OAAO,GAAuC;IAC1C,MAAM;MAAEC,iBAAiB;MAAEC;IAA+B,CAAC,GAAGX,kBAAQ,CAACY,WAAW,CAC9E,IAAI,CAACxB,eAAe,CACvB;;IAED;IACA,MAAMyB,cAAc,GAAG,IAAI,CAACnB,iBAAiB,EAAE;;IAE/C;IACA,MAAMoB,OAAO,GAAGC,iBAAO,CAACC,MAAM,CAAC,IAAI,CAACC,aAAa,CAAC;IAClD;IACAH,OAAO,CAACI,EAAE,CAAC,OAAO,EAAGC,KAAoB,IAAK;MAC1C,MAAM,IAAIC,KAAK,CACV,GAAED,KAAK,CAACE,IAAK,IAAGF,KAAK,CAACG,IAAK,IAAGH,KAAK,CAACI,OAAQ,IAAGJ,KAAK,CAACf,IAAK,IAAGe,KAAK,CAACK,KAAM,EAAC,CAC9E;IACL,CAAC,CAAC;;IAEF;IACAX,cAAc,CAACY,OAAO,CAAEC,aAAgC,IACpDZ,OAAO,CAACa,MAAM,CAACD,aAAa,CAAC3B,MAAM,EAAE;MAAEsB,IAAI,EAAEK,aAAa,CAACxB;IAAS,CAAC,CAAC,CACzE;;IAED;IACAY,OAAO,CAACc,IAAI,CAAClB,iBAAiB,CAAC;IAC/B;IACA;IACAI,OAAO,CAACe,QAAQ,EAAE;;IAElB;IACA,OAAOlB,8BAA8B;EACzC;AACJ;AAAC;AAED,MAAMmB,oBAAoB,GAAI,wBAAuB;AAE9C,MAAMC,QAAQ,CAAC;EAKlB7C,WAAW,CAAC8C,IAAc,EAAE;IAAA,qDAJK,KAAK;IAAA;IAAA;IAKlC,IAAI,CAACA,IAAI,GAAGA,IAAI;IAChB,IAAI,CAAC5C,eAAe,GAAG,IAAAC,eAAQ,EAAC,EAAE,EAAG,IAAGyC,oBAAqB,EAAC,CAAC;EACnE;EAEAG,cAAc,GAAwB;IAClC,OAAO,IAAI,CAACD,IAAI,CAACnC,GAAG,CAACC,GAAG,IAAI;MACxB,OAAO;QACHC,MAAM,EAAEC,kBAAQ,CAACC,UAAU,CAACH,GAAG,CAAC;QAChCI,QAAQ,EAAG,GAAEE,IAAI,CAACC,QAAQ,CAACP,GAAG,CAAE;MACpC,CAAC;IACL,CAAC,CAAC;EACN;EAEAW,OAAO,GAAuC;IAC1C,MAAM;MAAEC,iBAAiB;MAAEC;IAA+B,CAAC,GAAGX,kBAAQ,CAACY,WAAW,CAC9E,IAAI,CAACxB,eAAe,CACvB;;IAED;IACA,MAAM8C,iBAAiB,GAAG,IAAI,CAACD,cAAc,EAAE;;IAE/C;IACA,MAAMnB,OAAO,GAAGC,iBAAO,CAACC,MAAM,CAAC,IAAI,CAACC,aAAa,CAAC;IAClD;IACAH,OAAO,CAACI,EAAE,CAAC,OAAO,EAAGC,KAAoB,IAAK;MAC1C,MAAM,IAAIC,KAAK,CACV,GAAED,KAAK,CAACE,IAAK,IAAGF,KAAK,CAACG,IAAK,IAAGH,KAAK,CAACI,OAAQ,IAAGJ,KAAK,CAACf,IAAK,IAAGe,KAAK,CAACK,KAAM,EAAC,CAC9E;IACL,CAAC,CAAC;;IAEF;IACAU,iBAAiB,CAACT,OAAO,CAAEC,aAAgC,IACvDZ,OAAO,CAACa,MAAM,CAACD,aAAa,CAAC3B,MAAM,EAAE;MAAEsB,IAAI,EAAEK,aAAa,CAACxB;IAAS,CAAC,CAAC,CACzE;;IAED;IACAY,OAAO,CAACc,IAAI,CAAClB,iBAAiB,CAAC;IAC/B;IACA;IACAI,OAAO,CAACe,QAAQ,EAAE;;IAElB;IACA,OAAOlB,8BAA8B;EACzC;AACJ;AAAC"}
|