@webiny/api-page-builder-import-export 5.35.0-beta.2 → 5.35.1-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,3 +1,3 @@
1
1
  import { Response, ErrorResponse } from "@webiny/handler-graphql";
2
- declare const _default: (fn: () => Promise<any>) => Promise<Response<any> | ErrorResponse>;
2
+ declare const _default: (fn: () => Promise<any>) => Promise<ErrorResponse | Response<any>>;
3
3
  export default _default;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@webiny/api-page-builder-import-export",
3
- "version": "5.35.0-beta.2",
3
+ "version": "5.35.1-beta.0",
4
4
  "main": "index.js",
5
5
  "keywords": [
6
6
  "pbie:base"
@@ -16,17 +16,17 @@
16
16
  "dependencies": {
17
17
  "@babel/runtime": "7.20.13",
18
18
  "@commodo/fields": "1.1.2-beta.20",
19
- "@webiny/api": "5.35.0-beta.2",
20
- "@webiny/api-file-manager": "5.35.0-beta.2",
21
- "@webiny/api-form-builder": "5.35.0-beta.2",
22
- "@webiny/api-page-builder": "5.35.0-beta.2",
23
- "@webiny/api-security": "5.35.0-beta.2",
24
- "@webiny/error": "5.35.0-beta.2",
25
- "@webiny/handler": "5.35.0-beta.2",
26
- "@webiny/handler-aws": "5.35.0-beta.2",
27
- "@webiny/handler-graphql": "5.35.0-beta.2",
28
- "@webiny/utils": "5.35.0-beta.2",
29
- "@webiny/validation": "5.35.0-beta.2",
19
+ "@webiny/api": "^5.35.1-beta.0",
20
+ "@webiny/api-file-manager": "^5.35.1-beta.0",
21
+ "@webiny/api-form-builder": "^5.35.1-beta.0",
22
+ "@webiny/api-page-builder": "^5.35.1-beta.0",
23
+ "@webiny/api-security": "^5.35.1-beta.0",
24
+ "@webiny/error": "^5.35.1-beta.0",
25
+ "@webiny/handler": "^5.35.1-beta.0",
26
+ "@webiny/handler-aws": "^5.35.1-beta.0",
27
+ "@webiny/handler-graphql": "^5.35.1-beta.0",
28
+ "@webiny/utils": "^5.35.1-beta.0",
29
+ "@webiny/validation": "^5.35.1-beta.0",
30
30
  "archiver": "5.3.1",
31
31
  "commodo-fields-object": "1.0.6",
32
32
  "dot-prop-immutable": "2.1.1",
@@ -40,28 +40,28 @@
40
40
  "yauzl": "2.10.0"
41
41
  },
42
42
  "devDependencies": {
43
- "@babel/cli": "^7.19.3",
44
- "@babel/core": "^7.19.3",
45
- "@babel/plugin-proposal-export-default-from": "^7.16.0",
46
- "@babel/preset-env": "^7.19.4",
47
- "@babel/preset-typescript": "^7.18.6",
48
- "@types/archiver": "^5.3.1",
49
- "@types/node-fetch": "^2.6.1",
50
- "@types/yauzl": "^2.9.2",
51
- "@webiny/api-dynamodb-to-elasticsearch": "^5.35.0-beta.2",
52
- "@webiny/api-file-manager-ddb-es": "^5.35.0-beta.2",
53
- "@webiny/api-i18n-ddb": "^5.35.0-beta.2",
54
- "@webiny/api-security-so-ddb": "^5.35.0-beta.2",
55
- "@webiny/api-tenancy": "^5.35.0-beta.2",
56
- "@webiny/api-tenancy-so-ddb": "^5.35.0-beta.2",
57
- "@webiny/api-wcp": "^5.35.0-beta.2",
58
- "@webiny/cli": "^5.35.0-beta.2",
59
- "@webiny/db": "^5.35.0-beta.2",
60
- "@webiny/project-utils": "^5.35.0-beta.2",
61
- "jest": "^28.1.0",
62
- "jest-dynalite": "^3.2.0",
63
- "rimraf": "^3.0.2",
64
- "ttypescript": "^1.5.12",
43
+ "@babel/cli": "7.20.7",
44
+ "@babel/core": "7.20.12",
45
+ "@babel/plugin-proposal-export-default-from": "7.18.10",
46
+ "@babel/preset-env": "7.20.2",
47
+ "@babel/preset-typescript": "7.18.6",
48
+ "@types/archiver": "5.3.1",
49
+ "@types/node-fetch": "2.6.2",
50
+ "@types/yauzl": "2.10.0",
51
+ "@webiny/api-dynamodb-to-elasticsearch": "^5.35.1-beta.0",
52
+ "@webiny/api-file-manager-ddb-es": "^5.35.1-beta.0",
53
+ "@webiny/api-i18n-ddb": "^5.35.1-beta.0",
54
+ "@webiny/api-security-so-ddb": "^5.35.1-beta.0",
55
+ "@webiny/api-tenancy": "^5.35.1-beta.0",
56
+ "@webiny/api-tenancy-so-ddb": "^5.35.1-beta.0",
57
+ "@webiny/api-wcp": "^5.35.1-beta.0",
58
+ "@webiny/cli": "^5.35.1-beta.0",
59
+ "@webiny/db": "^5.35.1-beta.0",
60
+ "@webiny/project-utils": "^5.35.1-beta.0",
61
+ "jest": "28.1.3",
62
+ "jest-dynalite": "3.6.1",
63
+ "rimraf": "3.0.2",
64
+ "ttypescript": "1.5.15",
65
65
  "typescript": "4.7.4"
66
66
  },
67
67
  "publishConfig": {
@@ -79,5 +79,5 @@
79
79
  ]
80
80
  }
81
81
  },
82
- "gitHead": "948cd1e05978e0ed25137ace7dd6c15ed0bf2cca"
82
+ "gitHead": "d6a257ebbb9e7aeccfaa33a7f83301cf1919b082"
83
83
  }
@@ -1,3 +0,0 @@
1
- import { PbImportExportContext } from "../../types";
2
- import { Configuration, Payload, Response } from ".";
3
- export declare const blocksHandler: (configuration: Configuration, payload: Payload, context: PbImportExportContext) => Promise<Response>;
@@ -1,175 +0,0 @@
1
- "use strict";
2
-
3
- Object.defineProperty(exports, "__esModule", {
4
- value: true
5
- });
6
- exports.blocksHandler = void 0;
7
-
8
- var _types = require("../../types");
9
-
10
- var _utils = require("../utils");
11
-
12
- var _client = require("../../client");
13
-
14
- var _mockSecurity = require("../../mockSecurity");
15
-
16
- var _utils2 = require("@webiny/utils");
17
-
18
- const blocksHandler = async (configuration, payload, context) => {
19
- const log = console.log;
20
- let subTask;
21
- let noPendingTask = true;
22
- let prevStatusOfSubTask = _types.ImportExportTaskStatus.PENDING;
23
- log("RUNNING Import Block Queue Process");
24
- const {
25
- pageBuilder
26
- } = context;
27
- const {
28
- taskId,
29
- subTaskIndex,
30
- type,
31
- identity
32
- } = payload; // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks
33
- // and this Lambda is invoked internally, without credentials.
34
-
35
- (0, _mockSecurity.mockSecurity)(identity, context);
36
-
37
- try {
38
- /*
39
- * Note: We're not going to DB for getting next sub-task to process,
40
- * because the data might be out of sync due to GSI eventual consistency.
41
- */
42
- subTask = await pageBuilder.importExportTask.getSubTask(taskId, (0, _utils2.zeroPad)(subTaskIndex, 5));
43
- /**
44
- * Base condition!!
45
- * Bail out early, if task not found or task's status is not "pending".
46
- */
47
-
48
- if (!subTask || subTask.status !== _types.ImportExportTaskStatus.PENDING) {
49
- noPendingTask = true;
50
- return {
51
- data: "",
52
- error: null
53
- };
54
- } else {
55
- noPendingTask = false;
56
- }
57
-
58
- prevStatusOfSubTask = subTask.status;
59
- log(`Fetched sub task => ${subTask.id}`);
60
- console.log("subTask", subTask);
61
- const {
62
- blockKey,
63
- category,
64
- zipFileKey,
65
- input
66
- } = subTask.data;
67
- const {
68
- fileUploadsData
69
- } = input;
70
- log(`Processing block key "${blockKey}"`); // Mark task status as PROCESSING
71
-
72
- subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {
73
- status: _types.ImportExportTaskStatus.PROCESSING
74
- }); // Update stats in main task
75
-
76
- await pageBuilder.importExportTask.updateStats(taskId, {
77
- prevStatus: prevStatusOfSubTask,
78
- nextStatus: _types.ImportExportTaskStatus.PROCESSING
79
- });
80
- prevStatusOfSubTask = subTask.status; // Real job
81
-
82
- const block = await (0, _utils.importBlock)({
83
- context,
84
- blockKey,
85
- key: zipFileKey,
86
- fileUploadsData
87
- }); // Create a block
88
-
89
- const pbBlock = await context.pageBuilder.createPageBlock({
90
- name: block.name,
91
- blockCategory: category,
92
- content: block.content,
93
- preview: block.preview
94
- }); // Update task record in DB
95
-
96
- subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {
97
- status: _types.ImportExportTaskStatus.COMPLETED,
98
- data: {
99
- message: "Done",
100
- block: {
101
- id: pbBlock.id,
102
- name: pbBlock.name
103
- }
104
- }
105
- }); // Update stats in main task
106
-
107
- await pageBuilder.importExportTask.updateStats(taskId, {
108
- prevStatus: prevStatusOfSubTask,
109
- nextStatus: _types.ImportExportTaskStatus.COMPLETED
110
- });
111
- prevStatusOfSubTask = subTask.status;
112
- } catch (e) {
113
- log("[IMPORT_BLOCKS_PROCESS] Error => ", e.message);
114
-
115
- if (subTask && subTask.id) {
116
- /**
117
- * In case of error, we'll update the task status to "failed",
118
- * so that, client can show notify the user appropriately.
119
- */
120
- subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {
121
- status: _types.ImportExportTaskStatus.FAILED,
122
- error: {
123
- name: e.name,
124
- message: e.message,
125
- code: "IMPORT_FAILED"
126
- }
127
- }); // Update stats in main task
128
-
129
- await pageBuilder.importExportTask.updateStats(taskId, {
130
- prevStatus: prevStatusOfSubTask,
131
- nextStatus: _types.ImportExportTaskStatus.FAILED
132
- });
133
- prevStatusOfSubTask = subTask.status;
134
- }
135
-
136
- return {
137
- data: null,
138
- error: {
139
- message: e.message
140
- }
141
- };
142
- } finally {
143
- // Base condition!
144
- if (noPendingTask) {
145
- log(`No pending sub-task for task ${taskId}`);
146
- await pageBuilder.importExportTask.updateTask(taskId, {
147
- status: _types.ImportExportTaskStatus.COMPLETED,
148
- data: {
149
- message: `Finish importing blocks.`
150
- }
151
- });
152
- } else {
153
- log(`Invoking PROCESS for task "${subTaskIndex + 1}"`); // We want to continue with Self invocation no matter if current block error out.
154
-
155
- await (0, _client.invokeHandlerClient)({
156
- context,
157
- name: configuration.handlers.process,
158
- payload: {
159
- taskId,
160
- subTaskIndex: subTaskIndex + 1,
161
- type,
162
- identity: context.security.getIdentity()
163
- },
164
- description: "Import blocks - process - subtask"
165
- });
166
- }
167
- }
168
-
169
- return {
170
- data: "",
171
- error: null
172
- };
173
- };
174
-
175
- exports.blocksHandler = blocksHandler;
@@ -1 +0,0 @@
1
- {"version":3,"names":["blocksHandler","configuration","payload","context","log","console","subTask","noPendingTask","prevStatusOfSubTask","ImportExportTaskStatus","PENDING","pageBuilder","taskId","subTaskIndex","type","identity","mockSecurity","importExportTask","getSubTask","zeroPad","status","data","error","id","blockKey","category","zipFileKey","input","fileUploadsData","updateSubTask","PROCESSING","updateStats","prevStatus","nextStatus","block","importBlock","key","pbBlock","createPageBlock","name","blockCategory","content","preview","COMPLETED","message","e","FAILED","code","updateTask","invokeHandlerClient","handlers","process","security","getIdentity","description"],"sources":["blocksHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { importBlock } from \"~/import/utils\";\nimport { invokeHandlerClient } from \"~/client\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { zeroPad } from \"@webiny/utils\";\nimport { Configuration, Payload, Response } from \"~/import/process\";\n\nexport const blocksHandler = async (\n configuration: Configuration,\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n let subTask;\n let noPendingTask = true;\n let prevStatusOfSubTask = ImportExportTaskStatus.PENDING;\n\n log(\"RUNNING Import Block Queue Process\");\n const { pageBuilder } = context;\n const { taskId, subTaskIndex, type, identity } = payload;\n // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks\n // and this Lambda is invoked internally, without credentials.\n mockSecurity(identity, context);\n\n try {\n /*\n * Note: We're not going to DB for getting next sub-task to process,\n * because the data might be out of sync due to GSI eventual consistency.\n */\n\n subTask = await pageBuilder.importExportTask.getSubTask(taskId, zeroPad(subTaskIndex, 5));\n\n /**\n * Base condition!!\n * Bail out early, if task not found or task's status is not \"pending\".\n */\n if (!subTask || subTask.status !== ImportExportTaskStatus.PENDING) {\n noPendingTask = true;\n return {\n data: \"\",\n error: null\n };\n } else {\n noPendingTask = false;\n }\n prevStatusOfSubTask = subTask.status;\n\n log(`Fetched sub task => ${subTask.id}`);\n console.log(\"subTask\", subTask);\n\n const { blockKey, category, zipFileKey, input } = subTask.data;\n const { fileUploadsData } = input;\n\n log(`Processing block key \"${blockKey}\"`);\n\n // Mark task status as PROCESSING\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.PROCESSING\n });\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.PROCESSING\n });\n prevStatusOfSubTask = subTask.status;\n\n // Real job\n const block = await importBlock({\n context,\n blockKey,\n key: zipFileKey,\n fileUploadsData\n });\n\n // Create a block\n const pbBlock = await context.pageBuilder.createPageBlock({\n name: block.name,\n blockCategory: category,\n content: block.content,\n preview: block.preview\n });\n\n // Update task record in DB\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: \"Done\",\n block: {\n id: pbBlock.id,\n name: pbBlock.name\n }\n }\n });\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.COMPLETED\n });\n prevStatusOfSubTask = subTask.status;\n } catch (e) {\n log(\"[IMPORT_BLOCKS_PROCESS] Error => \", e.message);\n\n if (subTask && subTask.id) {\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"IMPORT_FAILED\"\n }\n });\n\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.FAILED\n });\n prevStatusOfSubTask = subTask.status;\n }\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n } finally {\n // Base condition!\n if (noPendingTask) {\n log(`No pending sub-task for task ${taskId}`);\n\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish importing blocks.`\n }\n });\n } else {\n log(`Invoking PROCESS for task \"${subTaskIndex + 1}\"`);\n // We want to continue with Self invocation no matter if current block error out.\n await invokeHandlerClient<Payload>({\n context,\n name: configuration.handlers.process,\n payload: {\n taskId,\n subTaskIndex: subTaskIndex + 1,\n type,\n identity: context.security.getIdentity()\n },\n description: \"Import blocks - process - subtask\"\n });\n }\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;;AAAA;;AACA;;AACA;;AACA;;AACA;;AAGO,MAAMA,aAAa,GAAG,OACzBC,aADyB,EAEzBC,OAFyB,EAGzBC,OAHyB,KAIL;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAApB;EACA,IAAIE,OAAJ;EACA,IAAIC,aAAa,GAAG,IAApB;EACA,IAAIC,mBAAmB,GAAGC,6BAAA,CAAuBC,OAAjD;EAEAN,GAAG,CAAC,oCAAD,CAAH;EACA,MAAM;IAAEO;EAAF,IAAkBR,OAAxB;EACA,MAAM;IAAES,MAAF;IAAUC,YAAV;IAAwBC,IAAxB;IAA8BC;EAA9B,IAA2Cb,OAAjD,CARoB,CASpB;EACA;;EACA,IAAAc,0BAAA,EAAaD,QAAb,EAAuBZ,OAAvB;;EAEA,IAAI;IACA;AACR;AACA;AACA;IAEQG,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAZ,CAA6BC,UAA7B,CAAwCN,MAAxC,EAAgD,IAAAO,eAAA,EAAQN,YAAR,EAAsB,CAAtB,CAAhD,CAAhB;IAEA;AACR;AACA;AACA;;IACQ,IAAI,CAACP,OAAD,IAAYA,OAAO,CAACc,MAAR,KAAmBX,6BAAA,CAAuBC,OAA1D,EAAmE;MAC/DH,aAAa,GAAG,IAAhB;MACA,OAAO;QACHc,IAAI,EAAE,EADH;QAEHC,KAAK,EAAE;MAFJ,CAAP;IAIH,CAND,MAMO;MACHf,aAAa,GAAG,KAAhB;IACH;;IACDC,mBAAmB,GAAGF,OAAO,CAACc,MAA9B;IAEAhB,GAAG,CAAE,uBAAsBE,OAAO,CAACiB,EAAG,EAAnC,CAAH;IACAlB,OAAO,CAACD,GAAR,CAAY,SAAZ,EAAuBE,OAAvB;IAEA,MAAM;MAAEkB,QAAF;MAAYC,QAAZ;MAAsBC,UAAtB;MAAkCC;IAAlC,IAA4CrB,OAAO,CAACe,IAA1D;IACA,MAAM;MAAEO;IAAF,IAAsBD,KAA5B;IAEAvB,GAAG,CAAE,yBAAwBoB,QAAS,GAAnC,CAAH,CA7BA,CA+BA;;IACAlB,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAZ,CAA6BY,aAA7B,CAA2CjB,MAA3C,EAAmDN,OAAO,CAACiB,EAA3D,EAA+D;MAC3EH,MAAM,EAAEX,6BAAA,CAAuBqB;IAD4C,CAA/D,CAAhB,CAhCA,CAmCA;;IACA,MAAMnB,WAAW,CAACM,gBAAZ,CAA6Bc,WAA7B,CAAyCnB,MAAzC,EAAiD;MACnDoB,UAAU,EAAExB,mBADuC;MAEnDyB,UAAU,EAAExB,6BAAA,CAAuBqB;IAFgB,CAAjD,CAAN;IAIAtB,mBAAmB,GAAGF,OAAO,CAACc,MAA9B,CAxCA,CA0CA;;IACA,MAAMc,KAAK,GAAG,MAAM,IAAAC,kBAAA,EAAY;MAC5BhC,OAD4B;MAE5BqB,QAF4B;MAG5BY,GAAG,EAAEV,UAHuB;MAI5BE;IAJ4B,CAAZ,CAApB,CA3CA,CAkDA;;IACA,MAAMS,OAAO,GAAG,MAAMlC,OAAO,CAACQ,WAAR,CAAoB2B,eAApB,CAAoC;MACtDC,IAAI,EAAEL,KAAK,CAACK,IAD0C;MAEtDC,aAAa,EAAEf,QAFuC;MAGtDgB,OAAO,EAAEP,KAAK,CAACO,OAHuC;MAItDC,OAAO,EAAER,KAAK,CAACQ;IAJuC,CAApC,CAAtB,CAnDA,CA0DA;;IACApC,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAZ,CAA6BY,aAA7B,CAA2CjB,MAA3C,EAAmDN,OAAO,CAACiB,EAA3D,EAA+D;MAC3EH,MAAM,EAAEX,6BAAA,CAAuBkC,SAD4C;MAE3EtB,IAAI,EAAE;QACFuB,OAAO,EAAE,MADP;QAEFV,KAAK,EAAE;UACHX,EAAE,EAAEc,OAAO,CAACd,EADT;UAEHgB,IAAI,EAAEF,OAAO,CAACE;QAFX;MAFL;IAFqE,CAA/D,CAAhB,CA3DA,CAqEA;;IACA,MAAM5B,WAAW,CAACM,gBAAZ,CAA6Bc,WAA7B,CAAyCnB,MAAzC,EAAiD;MACnDoB,UAAU,EAAExB,mBADuC;MAEnDyB,UAAU,EAAExB,6BAAA,CAAuBkC;IAFgB,CAAjD,CAAN;IAIAnC,mBAAmB,GAAGF,OAAO,CAACc,MAA9B;EACH,CA3ED,CA2EE,OAAOyB,CAAP,EAAU;IACRzC,GAAG,CAAC,mCAAD,EAAsCyC,CAAC,CAACD,OAAxC,CAAH;;IAEA,IAAItC,OAAO,IAAIA,OAAO,CAACiB,EAAvB,EAA2B;MACvB;AACZ;AACA;AACA;MACYjB,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAZ,CAA6BY,aAA7B,CAA2CjB,MAA3C,EAAmDN,OAAO,CAACiB,EAA3D,EAA+D;QAC3EH,MAAM,EAAEX,6BAAA,CAAuBqC,MAD4C;QAE3ExB,KAAK,EAAE;UACHiB,IAAI,EAAEM,CAAC,CAACN,IADL;UAEHK,OAAO,EAAEC,CAAC,CAACD,OAFR;UAGHG,IAAI,EAAE;QAHH;MAFoE,CAA/D,CAAhB,CALuB,CAcvB;;MACA,MAAMpC,WAAW,CAACM,gBAAZ,CAA6Bc,WAA7B,CAAyCnB,MAAzC,EAAiD;QACnDoB,UAAU,EAAExB,mBADuC;QAEnDyB,UAAU,EAAExB,6BAAA,CAAuBqC;MAFgB,CAAjD,CAAN;MAIAtC,mBAAmB,GAAGF,OAAO,CAACc,MAA9B;IACH;;IAED,OAAO;MACHC,IAAI,EAAE,IADH;MAEHC,KAAK,EAAE;QACHsB,OAAO,EAAEC,CAAC,CAACD;MADR;IAFJ,CAAP;EAMH,CA1GD,SA0GU;IACN;IACA,IAAIrC,aAAJ,EAAmB;MACfH,GAAG,CAAE,gCAA+BQ,MAAO,EAAxC,CAAH;MAEA,MAAMD,WAAW,CAACM,gBAAZ,CAA6B+B,UAA7B,CAAwCpC,MAAxC,EAAgD;QAClDQ,MAAM,EAAEX,6BAAA,CAAuBkC,SADmB;QAElDtB,IAAI,EAAE;UACFuB,OAAO,EAAG;QADR;MAF4C,CAAhD,CAAN;IAMH,CATD,MASO;MACHxC,GAAG,CAAE,8BAA6BS,YAAY,GAAG,CAAE,GAAhD,CAAH,CADG,CAEH;;MACA,MAAM,IAAAoC,2BAAA,EAA6B;QAC/B9C,OAD+B;QAE/BoC,IAAI,EAAEtC,aAAa,CAACiD,QAAd,CAAuBC,OAFE;QAG/BjD,OAAO,EAAE;UACLU,MADK;UAELC,YAAY,EAAEA,YAAY,GAAG,CAFxB;UAGLC,IAHK;UAILC,QAAQ,EAAEZ,OAAO,CAACiD,QAAR,CAAiBC,WAAjB;QAJL,CAHsB;QAS/BC,WAAW,EAAE;MATkB,CAA7B,CAAN;IAWH;EACJ;;EACD,OAAO;IACHjC,IAAI,EAAE,EADH;IAEHC,KAAK,EAAE;EAFJ,CAAP;AAIH,CA1JM"}
@@ -1,3 +0,0 @@
1
- import { PbImportExportContext } from "../../types";
2
- import { Configuration, Payload, Response } from ".";
3
- export declare const pagesHandler: (configuration: Configuration, payload: Payload, context: PbImportExportContext) => Promise<Response>;
@@ -1,180 +0,0 @@
1
- "use strict";
2
-
3
- Object.defineProperty(exports, "__esModule", {
4
- value: true
5
- });
6
- exports.pagesHandler = void 0;
7
-
8
- var _types = require("../../types");
9
-
10
- var _utils = require("../utils");
11
-
12
- var _client = require("../../client");
13
-
14
- var _mockSecurity = require("../../mockSecurity");
15
-
16
- var _utils2 = require("@webiny/utils");
17
-
18
- const pagesHandler = async (configuration, payload, context) => {
19
- const log = console.log;
20
- let subTask;
21
- let noPendingTask = true;
22
- let prevStatusOfSubTask = _types.ImportExportTaskStatus.PENDING;
23
- log("RUNNING Import Page Queue Process");
24
- const {
25
- pageBuilder
26
- } = context;
27
- const {
28
- taskId,
29
- subTaskIndex,
30
- type,
31
- identity
32
- } = payload; // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks
33
- // and this Lambda is invoked internally, without credentials.
34
-
35
- (0, _mockSecurity.mockSecurity)(identity, context);
36
-
37
- try {
38
- /*
39
- * Note: We're not going to DB for getting next sub-task to process,
40
- * because the data might be out of sync due to GSI eventual consistency.
41
- */
42
- subTask = await pageBuilder.importExportTask.getSubTask(taskId, (0, _utils2.zeroPad)(subTaskIndex, 5));
43
- /**
44
- * Base condition!!
45
- * Bail out early, if task not found or task's status is not "pending".
46
- */
47
-
48
- if (!subTask || subTask.status !== _types.ImportExportTaskStatus.PENDING) {
49
- noPendingTask = true;
50
- return {
51
- data: "",
52
- error: null
53
- };
54
- } else {
55
- noPendingTask = false;
56
- }
57
-
58
- prevStatusOfSubTask = subTask.status;
59
- log(`Fetched sub task => ${subTask.id}`);
60
- const {
61
- pageKey,
62
- category,
63
- zipFileKey,
64
- input
65
- } = subTask.data;
66
- const {
67
- fileUploadsData
68
- } = input;
69
- log(`Processing page key "${pageKey}"`); // Mark task status as PROCESSING
70
-
71
- subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {
72
- status: _types.ImportExportTaskStatus.PROCESSING
73
- }); // Update stats in main task
74
-
75
- await pageBuilder.importExportTask.updateStats(taskId, {
76
- prevStatus: prevStatusOfSubTask,
77
- nextStatus: _types.ImportExportTaskStatus.PROCESSING
78
- });
79
- prevStatusOfSubTask = subTask.status; // Real job
80
-
81
- const page = await (0, _utils.importPage)({
82
- context,
83
- pageKey,
84
- key: zipFileKey,
85
- fileUploadsData
86
- }); // Create a page
87
-
88
- let pbPage = await context.pageBuilder.createPage(category); // Update page with data
89
-
90
- pbPage = await context.pageBuilder.updatePage(pbPage.id, {
91
- content: page.content,
92
- title: page.title,
93
- path: page.path,
94
- settings: page.settings
95
- }); // TODO: Publish page
96
- // Update task record in DB
97
-
98
- subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {
99
- status: _types.ImportExportTaskStatus.COMPLETED,
100
- data: {
101
- message: "Done",
102
- page: {
103
- id: pbPage.id,
104
- title: pbPage.title,
105
- version: pbPage.version,
106
- status: pbPage.status
107
- }
108
- }
109
- }); // Update stats in main task
110
-
111
- await pageBuilder.importExportTask.updateStats(taskId, {
112
- prevStatus: prevStatusOfSubTask,
113
- nextStatus: _types.ImportExportTaskStatus.COMPLETED
114
- });
115
- prevStatusOfSubTask = subTask.status;
116
- } catch (e) {
117
- log("[IMPORT_PAGES_PROCESS] Error => ", e);
118
-
119
- if (subTask && subTask.id) {
120
- /**
121
- * In case of error, we'll update the task status to "failed",
122
- * so that, client can show notify the user appropriately.
123
- */
124
- subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {
125
- status: _types.ImportExportTaskStatus.FAILED,
126
- error: {
127
- name: e.name,
128
- message: e.message,
129
- stack: e.stack,
130
- code: "IMPORT_FAILED"
131
- }
132
- }); // Update stats in main task
133
-
134
- await pageBuilder.importExportTask.updateStats(taskId, {
135
- prevStatus: prevStatusOfSubTask,
136
- nextStatus: _types.ImportExportTaskStatus.FAILED
137
- });
138
- prevStatusOfSubTask = subTask.status;
139
- }
140
-
141
- return {
142
- data: null,
143
- error: {
144
- message: e.message
145
- }
146
- };
147
- } finally {
148
- // Base condition!
149
- if (noPendingTask) {
150
- log(`No pending sub-task for task ${taskId}`);
151
- await pageBuilder.importExportTask.updateTask(taskId, {
152
- status: _types.ImportExportTaskStatus.COMPLETED,
153
- data: {
154
- message: `Finish importing pages.`
155
- }
156
- });
157
- } else {
158
- log(`Invoking PROCESS for task "${subTaskIndex + 1}"`); // We want to continue with Self invocation no matter if current page error out.
159
-
160
- await (0, _client.invokeHandlerClient)({
161
- context,
162
- name: configuration.handlers.process,
163
- payload: {
164
- taskId,
165
- subTaskIndex: subTaskIndex + 1,
166
- type,
167
- identity: context.security.getIdentity()
168
- },
169
- description: "Import pages - process - subtask"
170
- });
171
- }
172
- }
173
-
174
- return {
175
- data: "",
176
- error: null
177
- };
178
- };
179
-
180
- exports.pagesHandler = pagesHandler;
@@ -1 +0,0 @@
1
- {"version":3,"names":["pagesHandler","configuration","payload","context","log","console","subTask","noPendingTask","prevStatusOfSubTask","ImportExportTaskStatus","PENDING","pageBuilder","taskId","subTaskIndex","type","identity","mockSecurity","importExportTask","getSubTask","zeroPad","status","data","error","id","pageKey","category","zipFileKey","input","fileUploadsData","updateSubTask","PROCESSING","updateStats","prevStatus","nextStatus","page","importPage","key","pbPage","createPage","updatePage","content","title","path","settings","COMPLETED","message","version","e","FAILED","name","stack","code","updateTask","invokeHandlerClient","handlers","process","security","getIdentity","description"],"sources":["pagesHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { importPage } from \"~/import/utils\";\nimport { invokeHandlerClient } from \"~/client\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { zeroPad } from \"@webiny/utils\";\nimport { Configuration, Payload, Response } from \"~/import/process\";\n\nexport const pagesHandler = async (\n configuration: Configuration,\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n let subTask;\n let noPendingTask = true;\n let prevStatusOfSubTask = ImportExportTaskStatus.PENDING;\n\n log(\"RUNNING Import Page Queue Process\");\n const { pageBuilder } = context;\n const { taskId, subTaskIndex, type, identity } = payload;\n // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks\n // and this Lambda is invoked internally, without credentials.\n mockSecurity(identity, context);\n\n try {\n /*\n * Note: We're not going to DB for getting next sub-task to process,\n * because the data might be out of sync due to GSI eventual consistency.\n */\n\n subTask = await pageBuilder.importExportTask.getSubTask(taskId, zeroPad(subTaskIndex, 5));\n\n /**\n * Base condition!!\n * Bail out early, if task not found or task's status is not \"pending\".\n */\n if (!subTask || subTask.status !== ImportExportTaskStatus.PENDING) {\n noPendingTask = true;\n return {\n data: \"\",\n error: null\n };\n } else {\n noPendingTask = false;\n }\n prevStatusOfSubTask = subTask.status;\n\n log(`Fetched sub task => ${subTask.id}`);\n\n const { pageKey, category, zipFileKey, input } = subTask.data;\n const { fileUploadsData } = input;\n\n log(`Processing page key \"${pageKey}\"`);\n\n // Mark task status as PROCESSING\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.PROCESSING\n });\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.PROCESSING\n });\n prevStatusOfSubTask = subTask.status;\n\n // Real job\n const page = await importPage({\n context,\n pageKey,\n key: zipFileKey,\n fileUploadsData\n });\n\n // Create a page\n let pbPage = await context.pageBuilder.createPage(category);\n\n // Update page with data\n pbPage = await context.pageBuilder.updatePage(pbPage.id, {\n content: page.content,\n title: page.title,\n path: page.path,\n settings: page.settings\n });\n\n // TODO: Publish page\n\n // Update task record in DB\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: \"Done\",\n page: {\n id: pbPage.id,\n title: pbPage.title,\n version: pbPage.version,\n status: pbPage.status\n }\n }\n });\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.COMPLETED\n });\n prevStatusOfSubTask = subTask.status;\n } catch (e) {\n log(\"[IMPORT_PAGES_PROCESS] Error => \", e);\n\n if (subTask && subTask.id) {\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n stack: e.stack,\n code: \"IMPORT_FAILED\"\n }\n });\n\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.FAILED\n });\n prevStatusOfSubTask = subTask.status;\n }\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n } finally {\n // Base condition!\n if (noPendingTask) {\n log(`No pending sub-task for task ${taskId}`);\n\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish importing pages.`\n }\n });\n } else {\n log(`Invoking PROCESS for task \"${subTaskIndex + 1}\"`);\n // We want to continue with Self invocation no matter if current page error out.\n await invokeHandlerClient<Payload>({\n context,\n name: configuration.handlers.process,\n payload: {\n taskId,\n subTaskIndex: subTaskIndex + 1,\n type,\n identity: context.security.getIdentity()\n },\n description: \"Import pages - process - subtask\"\n });\n }\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;;AAAA;;AACA;;AACA;;AACA;;AACA;;AAGO,MAAMA,YAAY,GAAG,OACxBC,aADwB,EAExBC,OAFwB,EAGxBC,OAHwB,KAIJ;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAApB;EACA,IAAIE,OAAJ;EACA,IAAIC,aAAa,GAAG,IAApB;EACA,IAAIC,mBAAmB,GAAGC,6BAAA,CAAuBC,OAAjD;EAEAN,GAAG,CAAC,mCAAD,CAAH;EACA,MAAM;IAAEO;EAAF,IAAkBR,OAAxB;EACA,MAAM;IAAES,MAAF;IAAUC,YAAV;IAAwBC,IAAxB;IAA8BC;EAA9B,IAA2Cb,OAAjD,CARoB,CASpB;EACA;;EACA,IAAAc,0BAAA,EAAaD,QAAb,EAAuBZ,OAAvB;;EAEA,IAAI;IACA;AACR;AACA;AACA;IAEQG,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAZ,CAA6BC,UAA7B,CAAwCN,MAAxC,EAAgD,IAAAO,eAAA,EAAQN,YAAR,EAAsB,CAAtB,CAAhD,CAAhB;IAEA;AACR;AACA;AACA;;IACQ,IAAI,CAACP,OAAD,IAAYA,OAAO,CAACc,MAAR,KAAmBX,6BAAA,CAAuBC,OAA1D,EAAmE;MAC/DH,aAAa,GAAG,IAAhB;MACA,OAAO;QACHc,IAAI,EAAE,EADH;QAEHC,KAAK,EAAE;MAFJ,CAAP;IAIH,CAND,MAMO;MACHf,aAAa,GAAG,KAAhB;IACH;;IACDC,mBAAmB,GAAGF,OAAO,CAACc,MAA9B;IAEAhB,GAAG,CAAE,uBAAsBE,OAAO,CAACiB,EAAG,EAAnC,CAAH;IAEA,MAAM;MAAEC,OAAF;MAAWC,QAAX;MAAqBC,UAArB;MAAiCC;IAAjC,IAA2CrB,OAAO,CAACe,IAAzD;IACA,MAAM;MAAEO;IAAF,IAAsBD,KAA5B;IAEAvB,GAAG,CAAE,wBAAuBoB,OAAQ,GAAjC,CAAH,CA5BA,CA8BA;;IACAlB,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAZ,CAA6BY,aAA7B,CAA2CjB,MAA3C,EAAmDN,OAAO,CAACiB,EAA3D,EAA+D;MAC3EH,MAAM,EAAEX,6BAAA,CAAuBqB;IAD4C,CAA/D,CAAhB,CA/BA,CAkCA;;IACA,MAAMnB,WAAW,CAACM,gBAAZ,CAA6Bc,WAA7B,CAAyCnB,MAAzC,EAAiD;MACnDoB,UAAU,EAAExB,mBADuC;MAEnDyB,UAAU,EAAExB,6BAAA,CAAuBqB;IAFgB,CAAjD,CAAN;IAIAtB,mBAAmB,GAAGF,OAAO,CAACc,MAA9B,CAvCA,CAyCA;;IACA,MAAMc,IAAI,GAAG,MAAM,IAAAC,iBAAA,EAAW;MAC1BhC,OAD0B;MAE1BqB,OAF0B;MAG1BY,GAAG,EAAEV,UAHqB;MAI1BE;IAJ0B,CAAX,CAAnB,CA1CA,CAiDA;;IACA,IAAIS,MAAM,GAAG,MAAMlC,OAAO,CAACQ,WAAR,CAAoB2B,UAApB,CAA+Bb,QAA/B,CAAnB,CAlDA,CAoDA;;IACAY,MAAM,GAAG,MAAMlC,OAAO,CAACQ,WAAR,CAAoB4B,UAApB,CAA+BF,MAAM,CAACd,EAAtC,EAA0C;MACrDiB,OAAO,EAAEN,IAAI,CAACM,OADuC;MAErDC,KAAK,EAAEP,IAAI,CAACO,KAFyC;MAGrDC,IAAI,EAAER,IAAI,CAACQ,IAH0C;MAIrDC,QAAQ,EAAET,IAAI,CAACS;IAJsC,CAA1C,CAAf,CArDA,CA4DA;IAEA;;IACArC,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAZ,CAA6BY,aAA7B,CAA2CjB,MAA3C,EAAmDN,OAAO,CAACiB,EAA3D,EAA+D;MAC3EH,MAAM,EAAEX,6BAAA,CAAuBmC,SAD4C;MAE3EvB,IAAI,EAAE;QACFwB,OAAO,EAAE,MADP;QAEFX,IAAI,EAAE;UACFX,EAAE,EAAEc,MAAM,CAACd,EADT;UAEFkB,KAAK,EAAEJ,MAAM,CAACI,KAFZ;UAGFK,OAAO,EAAET,MAAM,CAACS,OAHd;UAIF1B,MAAM,EAAEiB,MAAM,CAACjB;QAJb;MAFJ;IAFqE,CAA/D,CAAhB,CA/DA,CA2EA;;IACA,MAAMT,WAAW,CAACM,gBAAZ,CAA6Bc,WAA7B,CAAyCnB,MAAzC,EAAiD;MACnDoB,UAAU,EAAExB,mBADuC;MAEnDyB,UAAU,EAAExB,6BAAA,CAAuBmC;IAFgB,CAAjD,CAAN;IAIApC,mBAAmB,GAAGF,OAAO,CAACc,MAA9B;EACH,CAjFD,CAiFE,OAAO2B,CAAP,EAAU;IACR3C,GAAG,CAAC,kCAAD,EAAqC2C,CAArC,CAAH;;IAEA,IAAIzC,OAAO,IAAIA,OAAO,CAACiB,EAAvB,EAA2B;MACvB;AACZ;AACA;AACA;MACYjB,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAZ,CAA6BY,aAA7B,CAA2CjB,MAA3C,EAAmDN,OAAO,CAACiB,EAA3D,EAA+D;QAC3EH,MAAM,EAAEX,6BAAA,CAAuBuC,MAD4C;QAE3E1B,KAAK,EAAE;UACH2B,IAAI,EAAEF,CAAC,CAACE,IADL;UAEHJ,OAAO,EAAEE,CAAC,CAACF,OAFR;UAGHK,KAAK,EAAEH,CAAC,CAACG,KAHN;UAIHC,IAAI,EAAE;QAJH;MAFoE,CAA/D,CAAhB,CALuB,CAevB;;MACA,MAAMxC,WAAW,CAACM,gBAAZ,CAA6Bc,WAA7B,CAAyCnB,MAAzC,EAAiD;QACnDoB,UAAU,EAAExB,mBADuC;QAEnDyB,UAAU,EAAExB,6BAAA,CAAuBuC;MAFgB,CAAjD,CAAN;MAIAxC,mBAAmB,GAAGF,OAAO,CAACc,MAA9B;IACH;;IAED,OAAO;MACHC,IAAI,EAAE,IADH;MAEHC,KAAK,EAAE;QACHuB,OAAO,EAAEE,CAAC,CAACF;MADR;IAFJ,CAAP;EAMH,CAjHD,SAiHU;IACN;IACA,IAAItC,aAAJ,EAAmB;MACfH,GAAG,CAAE,gCAA+BQ,MAAO,EAAxC,CAAH;MAEA,MAAMD,WAAW,CAACM,gBAAZ,CAA6BmC,UAA7B,CAAwCxC,MAAxC,EAAgD;QAClDQ,MAAM,EAAEX,6BAAA,CAAuBmC,SADmB;QAElDvB,IAAI,EAAE;UACFwB,OAAO,EAAG;QADR;MAF4C,CAAhD,CAAN;IAMH,CATD,MASO;MACHzC,GAAG,CAAE,8BAA6BS,YAAY,GAAG,CAAE,GAAhD,CAAH,CADG,CAEH;;MACA,MAAM,IAAAwC,2BAAA,EAA6B;QAC/BlD,OAD+B;QAE/B8C,IAAI,EAAEhD,aAAa,CAACqD,QAAd,CAAuBC,OAFE;QAG/BrD,OAAO,EAAE;UACLU,MADK;UAELC,YAAY,EAAEA,YAAY,GAAG,CAFxB;UAGLC,IAHK;UAILC,QAAQ,EAAEZ,OAAO,CAACqD,QAAR,CAAiBC,WAAjB;QAJL,CAHsB;QAS/BC,WAAW,EAAE;MATkB,CAA7B,CAAN;IAWH;EACJ;;EACD,OAAO;IACHrC,IAAI,EAAE,EADH;IAEHC,KAAK,EAAE;EAFJ,CAAP;AAIH,CAjKM"}
package/import/utils.d.ts DELETED
@@ -1,49 +0,0 @@
1
- import { File } from "@webiny/api-file-manager/types";
2
- import { PbImportExportContext } from "../graphql/types";
3
- import { ExportedPageData, ExportedBlockData } from "../export/utils";
4
- interface UploadAssetsParams {
5
- context: PbImportExportContext;
6
- filesData: File[];
7
- fileUploadsData: FileUploadsData;
8
- }
9
- interface UploadAssetsReturnType {
10
- fileIdToKeyMap: Map<string, string>;
11
- }
12
- export declare const uploadAssets: (params: UploadAssetsParams) => Promise<UploadAssetsReturnType>;
13
- interface FileUploadsData {
14
- data: string;
15
- assets: Record<string, string>;
16
- }
17
- interface ImportPageParams {
18
- key: string;
19
- pageKey: string;
20
- context: PbImportExportContext;
21
- fileUploadsData: FileUploadsData;
22
- }
23
- export declare function importPage({ pageKey, context, fileUploadsData }: ImportPageParams): Promise<ExportedPageData["page"]>;
24
- interface ImportBlockParams {
25
- key: string;
26
- blockKey: string;
27
- context: PbImportExportContext;
28
- fileUploadsData: FileUploadsData;
29
- }
30
- export declare function importBlock({ blockKey, context, fileUploadsData }: ImportBlockParams): Promise<ExportedBlockData["block"]>;
31
- interface ImportData {
32
- assets: Record<string, string>;
33
- data: string;
34
- key: string;
35
- }
36
- /**
37
- * Function will read the given zip file from S3 via stream, extract its content and upload it to S3 bucket.
38
- * @param zipFileUrl
39
- * @return ImportData S3 file keys for all uploaded assets group by page/block.
40
- */
41
- export declare function readExtractAndUploadZipFileContents(zipFileUrl: string): Promise<ImportData[]>;
42
- export declare function initialStats(total: number): {
43
- pending: number;
44
- processing: number;
45
- completed: number;
46
- failed: number;
47
- total: number;
48
- };
49
- export {};
package/import/utils.js DELETED
@@ -1,641 +0,0 @@
1
- "use strict";
2
-
3
- var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
4
-
5
- Object.defineProperty(exports, "__esModule", {
6
- value: true
7
- });
8
- exports.importBlock = importBlock;
9
- exports.importPage = importPage;
10
- exports.initialStats = initialStats;
11
- exports.readExtractAndUploadZipFileContents = readExtractAndUploadZipFileContents;
12
- exports.uploadAssets = void 0;
13
-
14
- var _uniqid = _interopRequireDefault(require("uniqid"));
15
-
16
- var _dotPropImmutable = _interopRequireDefault(require("dot-prop-immutable"));
17
-
18
- var _fs = require("fs");
19
-
20
- var _fsExtra = require("fs-extra");
21
-
22
- var _util = require("util");
23
-
24
- var _stream = require("stream");
25
-
26
- var _nodeFetch = _interopRequireDefault(require("node-fetch"));
27
-
28
- var _path = _interopRequireDefault(require("path"));
29
-
30
- var _yauzl = _interopRequireDefault(require("yauzl"));
31
-
32
- var _chunk = _interopRequireDefault(require("lodash/chunk"));
33
-
34
- var _loadJsonFile = _interopRequireDefault(require("load-json-file"));
35
-
36
- var _error = _interopRequireDefault(require("@webiny/error"));
37
-
38
- var _downloadInstallFiles = require("@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles");
39
-
40
- var _types = require("../types");
41
-
42
- var _s3Stream = require("../export/s3Stream");
43
-
44
- const streamPipeline = (0, _util.promisify)(_stream.pipeline);
45
- const INSTALL_DIR = "/tmp";
46
-
47
- const INSTALL_EXTRACT_DIR = _path.default.join(INSTALL_DIR, "apiPageBuilderImport");
48
-
49
- const FILES_COUNT_IN_EACH_BATCH = 15;
50
-
51
- function updateImageInPageSettings(params) {
52
- const {
53
- settings,
54
- fileIdToKeyMap,
55
- srcPrefix
56
- } = params;
57
- let newSettings = settings;
58
- const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith("/") ? srcPrefix.slice(0, -1) : srcPrefix;
59
-
60
- if (_dotPropImmutable.default.get(newSettings, "general.image.src")) {
61
- var _settings$general, _settings$general$ima;
62
-
63
- newSettings = _dotPropImmutable.default.set(newSettings, "general.image.src", `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(((_settings$general = settings.general) === null || _settings$general === void 0 ? void 0 : (_settings$general$ima = _settings$general.image) === null || _settings$general$ima === void 0 ? void 0 : _settings$general$ima.id) || "")}`);
64
- }
65
-
66
- if (_dotPropImmutable.default.get(newSettings, "social.image.src")) {
67
- var _settings$social, _settings$social$imag;
68
-
69
- newSettings = _dotPropImmutable.default.set(newSettings, "social.image.src", `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(((_settings$social = settings.social) === null || _settings$social === void 0 ? void 0 : (_settings$social$imag = _settings$social.image) === null || _settings$social$imag === void 0 ? void 0 : _settings$social$imag.id) || "")}`);
70
- }
71
-
72
- return newSettings;
73
- }
74
-
75
- function updateBlockPreviewImage(params) {
76
- const {
77
- file,
78
- fileIdToKeyMap,
79
- srcPrefix
80
- } = params;
81
- const newFile = file;
82
- const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith("/") ? srcPrefix.slice(0, -1) : srcPrefix;
83
- newFile.src = `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(file.id || "")}`;
84
- return newFile;
85
- }
86
-
87
- function updateFilesInData({
88
- data,
89
- fileIdToKeyMap,
90
- srcPrefix
91
- }) {
92
- // BASE CASE: Termination point
93
- if (!data || typeof data !== "object") {
94
- return;
95
- } // Recursively call function if data is array
96
-
97
-
98
- if (Array.isArray(data)) {
99
- for (let i = 0; i < data.length; i++) {
100
- const element = data[i];
101
- updateFilesInData({
102
- data: element,
103
- fileIdToKeyMap,
104
- srcPrefix
105
- });
106
- }
107
-
108
- return;
109
- } // Main logic
110
-
111
-
112
- const tuple = Object.entries(data);
113
-
114
- for (let i = 0; i < tuple.length; i++) {
115
- const [key, value] = tuple[i];
116
-
117
- if (key === "file" && value && fileIdToKeyMap.has(value.id)) {
118
- value.key = fileIdToKeyMap.get(value.id);
119
- value.name = fileIdToKeyMap.get(value.id);
120
- value.src = `${srcPrefix}${srcPrefix.endsWith("/") ? "" : "/"}${fileIdToKeyMap.get(value.id)}`;
121
- } else {
122
- updateFilesInData({
123
- data: value,
124
- srcPrefix,
125
- fileIdToKeyMap
126
- });
127
- }
128
- }
129
- }
130
-
131
- const uploadAssets = async params => {
132
- const {
133
- context,
134
- filesData,
135
- fileUploadsData
136
- } = params; // Save uploaded file key against static id for later use.
137
-
138
- const fileIdToKeyMap = new Map();
139
- /**
140
- * This function contains logic of file download from S3.
141
- * Current we're not mocking zip file download from S3 in tests at the moment.
142
- * So, we're manually mocking it in case of test just by returning an empty object.
143
- */
144
-
145
- if (process.env.NODE_ENV === "test") {
146
- return {
147
- fileIdToKeyMap
148
- };
149
- } // Save files meta data against old key for later use.
150
-
151
-
152
- const fileKeyToFileMap = new Map(); // Initialize maps.
153
-
154
- for (let i = 0; i < filesData.length; i++) {
155
- const file = filesData[i];
156
- fileKeyToFileMap.set(file.key, file); // Initialize the value
157
-
158
- fileIdToKeyMap.set(file.id, file.type);
159
- }
160
-
161
- const fileUploadResults = await uploadFilesFromS3({
162
- fileKeyToFileMap,
163
- oldKeyToNewKeyMap: fileUploadsData.assets
164
- }); // Create files in File Manager
165
-
166
- const createFilesInput = fileUploadResults.map(uploadResult => {
167
- const newKey = uploadResult.Key;
168
- const file = fileKeyToFileMap.get(getOldFileKey(newKey));
169
-
170
- if (!file) {
171
- return null;
172
- } // Update the file map with newly uploaded file.
173
-
174
-
175
- fileIdToKeyMap.set(file.id, newKey);
176
- return {
177
- key: newKey,
178
- name: file.name,
179
- size: file.size,
180
- type: file.type,
181
- meta: file.meta,
182
- tags: file.tags
183
- };
184
- }).filter(Boolean);
185
- const createFilesPromises = []; // Gives an array of chunks (each consists of FILES_COUNT_IN_EACH_BATCH items).
186
-
187
- const createFilesInputChunks = (0, _chunk.default)(createFilesInput, FILES_COUNT_IN_EACH_BATCH);
188
-
189
- for (let i = 0; i < createFilesInputChunks.length; i++) {
190
- const createFilesInputChunk = createFilesInputChunks[i];
191
- createFilesPromises.push(
192
- /*
193
- * We need to break down files into chunks because
194
- * `createFilesInBatch` operation has a limit on number of files it can handle at once.
195
- */
196
- context.fileManager.files.createFilesInBatch(createFilesInputChunk));
197
- }
198
-
199
- await Promise.all(createFilesPromises);
200
- return {
201
- fileIdToKeyMap
202
- };
203
- };
204
-
205
- exports.uploadAssets = uploadAssets;
206
-
207
- async function importPage({
208
- pageKey,
209
- context,
210
- fileUploadsData
211
- }) {
212
- const log = console.log; // Making Directory for page in which we're going to extract the page data file.
213
-
214
- const PAGE_EXTRACT_DIR = _path.default.join(INSTALL_EXTRACT_DIR, pageKey);
215
-
216
- (0, _fsExtra.ensureDirSync)(PAGE_EXTRACT_DIR);
217
-
218
- const pageDataFileKey = _dotPropImmutable.default.get(fileUploadsData, `data`);
219
-
220
- const PAGE_DATA_FILE_PATH = _path.default.join(PAGE_EXTRACT_DIR, _path.default.basename(pageDataFileKey));
221
-
222
- log(`Downloading Page data file: ${pageDataFileKey} at "${PAGE_DATA_FILE_PATH}"`); // Download and save page data file in disk.
223
-
224
- await new Promise((resolve, reject) => {
225
- _s3Stream.s3Stream.readStream(pageDataFileKey).on("error", reject).pipe((0, _fs.createWriteStream)(PAGE_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
226
- }); // Load the page data file from disk.
227
-
228
- log(`Load file ${pageDataFileKey}`);
229
- const {
230
- page,
231
- files
232
- } = await (0, _loadJsonFile.default)(PAGE_DATA_FILE_PATH); // Only update page data if there are files.
233
-
234
- if (files && Array.isArray(files) && files.length > 0) {
235
- // Upload page assets.
236
- const {
237
- fileIdToKeyMap
238
- } = await uploadAssets({
239
- context,
240
-
241
- /**
242
- * TODO @ts-refactor @ashutosh figure out correct types.
243
- */
244
- // @ts-ignore
245
- filesData: files,
246
- fileUploadsData
247
- });
248
- const settings = await context.fileManager.settings.getSettings();
249
- const {
250
- srcPrefix = ""
251
- } = settings || {};
252
- updateFilesInData({
253
- data: page.content || {},
254
- fileIdToKeyMap,
255
- srcPrefix
256
- });
257
- page.settings = updateImageInPageSettings({
258
- settings: page.settings || {},
259
- fileIdToKeyMap,
260
- srcPrefix
261
- });
262
- }
263
-
264
- log("Removing Directory for page...");
265
- await (0, _downloadInstallFiles.deleteFile)(pageKey);
266
- log(`Remove page contents from S3...`);
267
- await deleteS3Folder(_path.default.dirname(fileUploadsData.data));
268
- return page;
269
- }
270
-
271
- async function importBlock({
272
- blockKey,
273
- context,
274
- fileUploadsData
275
- }) {
276
- const log = console.log; // Making Directory for block in which we're going to extract the block data file.
277
-
278
- const BLOCK_EXTRACT_DIR = _path.default.join(INSTALL_EXTRACT_DIR, blockKey);
279
-
280
- (0, _fsExtra.ensureDirSync)(BLOCK_EXTRACT_DIR);
281
-
282
- const blockDataFileKey = _dotPropImmutable.default.get(fileUploadsData, `data`);
283
-
284
- const BLOCK_DATA_FILE_PATH = _path.default.join(BLOCK_EXTRACT_DIR, _path.default.basename(blockDataFileKey));
285
-
286
- log(`Downloading Block data file: ${blockDataFileKey} at "${BLOCK_DATA_FILE_PATH}"`); // Download and save block data file in disk.
287
-
288
- await new Promise((resolve, reject) => {
289
- _s3Stream.s3Stream.readStream(blockDataFileKey).on("error", reject).pipe((0, _fs.createWriteStream)(BLOCK_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
290
- }); // Load the block data file from disk.
291
-
292
- log(`Load file ${blockDataFileKey}`);
293
- const {
294
- block,
295
- files
296
- } = await (0, _loadJsonFile.default)(BLOCK_DATA_FILE_PATH); // Only update block data if there are files.
297
-
298
- if (files && Array.isArray(files) && files.length > 0) {
299
- // Upload block assets.
300
- const {
301
- fileIdToKeyMap
302
- } = await uploadAssets({
303
- context,
304
- filesData: files,
305
- fileUploadsData
306
- });
307
- const settings = await context.fileManager.settings.getSettings();
308
- const {
309
- srcPrefix = ""
310
- } = settings || {};
311
- updateFilesInData({
312
- data: block.content || {},
313
- fileIdToKeyMap,
314
- srcPrefix
315
- });
316
- block.preview = updateBlockPreviewImage({
317
- file: block.preview || {},
318
- fileIdToKeyMap,
319
- srcPrefix
320
- });
321
- }
322
-
323
- log("Removing Directory for block...");
324
- await (0, _downloadInstallFiles.deleteFile)(blockKey);
325
- log(`Remove block contents from S3...`);
326
- await deleteS3Folder(_path.default.dirname(fileUploadsData.data));
327
- return block;
328
- }
329
-
330
- async function uploadFilesFromS3({
331
- fileKeyToFileMap,
332
- oldKeyToNewKeyMap
333
- }) {
334
- const oldKeysForAssets = Object.keys(oldKeyToNewKeyMap);
335
- const promises = []; // Upload all assets.
336
-
337
- for (let i = 0; i < oldKeysForAssets.length; i++) {
338
- const oldKey = oldKeysForAssets[i];
339
- const tempNewKey = oldKeyToNewKeyMap[oldKey]; // Read file.
340
-
341
- const readStream = _s3Stream.s3Stream.readStream(tempNewKey); // Get file meta data.
342
-
343
-
344
- const fileMetaData = fileKeyToFileMap.get(oldKey);
345
-
346
- if (fileMetaData) {
347
- const newKey = (0, _uniqid.default)("", `-${fileMetaData.key}`);
348
-
349
- const {
350
- streamPassThrough,
351
- streamPassThroughUploadPromise: promise
352
- } = _s3Stream.s3Stream.writeStream(newKey, fileMetaData.type);
353
-
354
- readStream.pipe(streamPassThrough);
355
- promises.push(promise);
356
- console.log(`Successfully queued file "${newKey}"`);
357
- }
358
- }
359
-
360
- return Promise.all(promises);
361
- }
362
-
363
- function getOldFileKey(key) {
364
- /*
365
- * Because we know the naming convention, we can extract the old key from new key.
366
- */
367
- try {
368
- const [, ...rest] = key.split("-");
369
- return rest.join("-");
370
- } catch (e) {
371
- return key;
372
- }
373
- }
374
-
375
- const FILE_CONTENT_TYPE = "application/octet-stream";
376
-
377
- function getFileNameWithoutExt(fileName) {
378
- return _path.default.basename(fileName).replace(_path.default.extname(fileName), "");
379
- }
380
-
381
- /**
382
- * Function will read the given zip file from S3 via stream, extract its content and upload it to S3 bucket.
383
- * @param zipFileUrl
384
- * @return ImportData S3 file keys for all uploaded assets group by page/block.
385
- */
386
- async function readExtractAndUploadZipFileContents(zipFileUrl) {
387
- const log = console.log;
388
- const importDataList = [];
389
-
390
- const zipFileName = _path.default.basename(zipFileUrl).split("?")[0];
391
-
392
- const response = await (0, _nodeFetch.default)(zipFileUrl);
393
-
394
- if (!response.ok) {
395
- throw new _error.default(`Unable to downloading file: "${zipFileUrl}"`, response.statusText);
396
- }
397
-
398
- const readStream = response.body;
399
- const uniquePath = (0, _uniqid.default)("IMPORTS/"); // Read export file and download it in the disk
400
-
401
- const ZIP_FILE_PATH = _path.default.join(INSTALL_DIR, zipFileName);
402
-
403
- const writeStream = (0, _fs.createWriteStream)(ZIP_FILE_PATH);
404
- await streamPipeline(readStream, writeStream);
405
- log(`Downloaded file "${zipFileName}" at ${ZIP_FILE_PATH}`); // Extract the downloaded zip file
406
-
407
- const zipFilePaths = await extractZipToDisk(ZIP_FILE_PATH);
408
- log(`Removing ZIP file "${zipFileUrl}" from ${ZIP_FILE_PATH}`);
409
- await (0, _downloadInstallFiles.deleteFile)(ZIP_FILE_PATH); // Extract each page/block zip and upload their content's to S3
410
-
411
- for (let i = 0; i < zipFilePaths.length; i++) {
412
- const currentPath = zipFilePaths[i];
413
- const dataMap = await extractZipAndUploadToS3(currentPath, uniquePath);
414
- importDataList.push(dataMap);
415
- }
416
-
417
- log("Removing all ZIP files located at ", _path.default.dirname(zipFilePaths[0]));
418
- await (0, _downloadInstallFiles.deleteFile)(_path.default.dirname(zipFilePaths[0]));
419
- return importDataList;
420
- }
421
-
422
- const ASSETS_DIR_NAME = "/assets";
423
-
424
- function prepareDataDirMap({
425
- map,
426
- filePath,
427
- newKey
428
- }) {
429
- const dirname = _path.default.dirname(filePath);
430
-
431
- const fileName = _path.default.basename(filePath);
432
- /*
433
- * We want to use dot (.) as part of object key rather than creating nested object(s).
434
- * Also, the file name might contain dots in it beside the extension, so, we are escaping them all.
435
- */
436
-
437
-
438
- const oldKey = fileName.replace(/\./g, "\\.");
439
- const isAsset = dirname.endsWith(ASSETS_DIR_NAME);
440
-
441
- if (isAsset) {
442
- map = _dotPropImmutable.default.set(map, `assets.${oldKey}`, newKey);
443
- } else {
444
- // We only need to know the newKey for data file.
445
- map = _dotPropImmutable.default.set(map, `data`, newKey);
446
- }
447
-
448
- return map;
449
- }
450
-
451
- async function deleteS3Folder(key) {
452
- // Append trailing slash i.e "/" to key to make sure we only delete a specific folder.
453
- if (!key.endsWith("/")) {
454
- key = `${key}/`;
455
- }
456
-
457
- const response = await _s3Stream.s3Stream.listObject(key);
458
- const keys = (response.Contents || []).map(c => c.Key).filter(Boolean);
459
- console.log(`Found ${keys.length} files.`);
460
- const deleteFilePromises = keys.map(key => _s3Stream.s3Stream.deleteObject(key));
461
- await Promise.all(deleteFilePromises);
462
- console.log(`Successfully deleted ${deleteFilePromises.length} files.`);
463
- } // export const zeroPad = version => `${version}`.padStart(5, "0");
464
-
465
-
466
- function initialStats(total) {
467
- return {
468
- [_types.ImportExportTaskStatus.PENDING]: total,
469
- [_types.ImportExportTaskStatus.PROCESSING]: 0,
470
- [_types.ImportExportTaskStatus.COMPLETED]: 0,
471
- [_types.ImportExportTaskStatus.FAILED]: 0,
472
- total
473
- };
474
- }
475
-
476
- function extractZipToDisk(exportFileZipPath) {
477
- return new Promise((resolve, reject) => {
478
- const zipFilePaths = [];
479
- const uniqueFolderNameForExport = getFileNameWithoutExt(exportFileZipPath);
480
-
481
- const EXPORT_FILE_EXTRACTION_PATH = _path.default.join(INSTALL_DIR, uniqueFolderNameForExport); // Make sure DIR exists
482
-
483
-
484
- (0, _fsExtra.ensureDirSync)(EXPORT_FILE_EXTRACTION_PATH);
485
-
486
- _yauzl.default.open(exportFileZipPath, {
487
- lazyEntries: true
488
- }, function (err, zipFile) {
489
- if (err) {
490
- console.warn("ERROR: Failed to extract zip: ", exportFileZipPath, err);
491
- reject(err);
492
- return;
493
- }
494
-
495
- if (!zipFile) {
496
- console.log("ERROR: Missing zip file resource for path: " + exportFileZipPath);
497
- reject("Missing Zip File Resource.");
498
- return;
499
- }
500
-
501
- console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);
502
- zipFile.on("end", function (err) {
503
- if (err) {
504
- console.warn("ERROR: Failed on END event for file: ", exportFileZipPath, err);
505
- reject(err);
506
- }
507
-
508
- resolve(zipFilePaths);
509
- });
510
- zipFile.readEntry();
511
- zipFile.on("entry", function (entry) {
512
- console.info(`Processing entry: "${entry.fileName}"`);
513
-
514
- if (/\/$/.test(entry.fileName)) {
515
- // Directory file names end with '/'.
516
- // Note that entries for directories themselves are optional.
517
- // An entry's fileName implicitly requires its parent directories to exist.
518
- zipFile.readEntry();
519
- } else {
520
- // file entry
521
- zipFile.openReadStream(entry, function (err, readStream) {
522
- if (err) {
523
- console.warn("ERROR: Failed to openReadStream for file: ", entry.fileName, err);
524
- reject(err);
525
- return;
526
- }
527
-
528
- if (!readStream) {
529
- console.log("ERROR: Missing Read Stream Resource when extracting to disk.");
530
- reject("Missing Read Stream Resource.");
531
- return;
532
- }
533
-
534
- const filePath = _path.default.join(EXPORT_FILE_EXTRACTION_PATH, entry.fileName);
535
-
536
- readStream.on("end", function () {
537
- zipFilePaths.push(filePath);
538
- zipFile.readEntry();
539
- });
540
- streamPipeline(readStream, (0, _fs.createWriteStream)(filePath)).catch(error => {
541
- reject(error);
542
- });
543
- });
544
- }
545
- });
546
- });
547
- });
548
- }
549
-
550
- function extractZipAndUploadToS3(dataZipFilePath, uniquePath) {
551
- return new Promise((resolve, reject) => {
552
- const filePaths = [];
553
- const fileUploadPromises = [];
554
- const uniqueKey = getFileNameWithoutExt(dataZipFilePath);
555
- let dataMap = {
556
- key: uniqueKey,
557
- assets: {},
558
- data: ""
559
- };
560
-
561
- _yauzl.default.open(dataZipFilePath, {
562
- lazyEntries: true
563
- }, function (err, zipFile) {
564
- if (err) {
565
- console.warn("ERROR: Failed to extract zip: ", dataZipFilePath, err);
566
- reject(err);
567
- return;
568
- }
569
-
570
- if (!zipFile) {
571
- console.log("ERROR: Probably failed to extract zip: " + dataZipFilePath);
572
- reject("Missing Zip File Resource.");
573
- return;
574
- }
575
-
576
- console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);
577
- zipFile.on("end", function (err) {
578
- if (err) {
579
- console.warn('ERROR: Failed on "END" for file: ', dataZipFilePath, err);
580
- reject(err);
581
- }
582
-
583
- Promise.all(fileUploadPromises).then(res => {
584
- res.forEach(r => {
585
- console.info("Done uploading... ", r);
586
- });
587
- resolve(dataMap);
588
- });
589
- });
590
- zipFile.readEntry();
591
- zipFile.on("entry", function (entry) {
592
- console.info(`Processing entry: "${entry.fileName}"`);
593
-
594
- if (/\/$/.test(entry.fileName)) {
595
- // Directory file names end with '/'.
596
- // Note that entries for directories themselves are optional.
597
- // An entry's fileName implicitly requires its parent directories to exist.
598
- zipFile.readEntry();
599
- } else {
600
- // file entry
601
- zipFile.openReadStream(entry, function (err, readStream) {
602
- if (err) {
603
- console.warn("ERROR: Failed while performing [openReadStream] for file: ", entry.fileName, err);
604
- reject(err);
605
- return;
606
- }
607
-
608
- if (!readStream) {
609
- console.log("ERROR: Missing Read Stream while importing.");
610
- reject("Missing Read Strea Resource.");
611
- return;
612
- }
613
-
614
- readStream.on("end", function () {
615
- filePaths.push(entry.fileName);
616
- zipFile.readEntry();
617
- });
618
- const newKey = `${uniquePath}/${uniqueKey}/${entry.fileName}`; // Modify in place
619
-
620
- dataMap = prepareDataDirMap({
621
- map: dataMap,
622
- filePath: entry.fileName,
623
- newKey
624
- });
625
-
626
- const {
627
- streamPassThrough,
628
- streamPassThroughUploadPromise: promise
629
- } = _s3Stream.s3Stream.writeStream(newKey, FILE_CONTENT_TYPE);
630
-
631
- streamPipeline(readStream, streamPassThrough).then(() => {
632
- fileUploadPromises.push(promise);
633
- }).catch(error => {
634
- reject(error);
635
- });
636
- });
637
- }
638
- });
639
- });
640
- });
641
- }
@@ -1 +0,0 @@
1
- {"version":3,"names":["streamPipeline","promisify","pipeline","INSTALL_DIR","INSTALL_EXTRACT_DIR","path","join","FILES_COUNT_IN_EACH_BATCH","updateImageInPageSettings","params","settings","fileIdToKeyMap","srcPrefix","newSettings","srcPrefixWithoutTrailingSlash","endsWith","slice","dotProp","get","set","general","image","id","social","updateBlockPreviewImage","file","newFile","src","updateFilesInData","data","Array","isArray","i","length","element","tuple","Object","entries","key","value","has","name","uploadAssets","context","filesData","fileUploadsData","Map","process","env","NODE_ENV","fileKeyToFileMap","type","fileUploadResults","uploadFilesFromS3","oldKeyToNewKeyMap","assets","createFilesInput","map","uploadResult","newKey","Key","getOldFileKey","size","meta","tags","filter","Boolean","createFilesPromises","createFilesInputChunks","chunk","createFilesInputChunk","push","fileManager","files","createFilesInBatch","Promise","all","importPage","pageKey","log","console","PAGE_EXTRACT_DIR","ensureDirSync","pageDataFileKey","PAGE_DATA_FILE_PATH","basename","resolve","reject","s3Stream","readStream","on","pipe","createWriteStream","page","loadJson","getSettings","content","deleteFile","deleteS3Folder","dirname","importBlock","blockKey","BLOCK_EXTRACT_DIR","blockDataFileKey","BLOCK_DATA_FILE_PATH","block","preview","oldKeysForAssets","keys","promises","oldKey","tempNewKey","fileMetaData","uniqueId","streamPassThrough","streamPassThroughUploadPromise","promise","writeStream","rest","split","e","FILE_CONTENT_TYPE","getFileNameWithoutExt","fileName","replace","extname","readExtractAndUploadZipFileContents","zipFileUrl","importDataList","zipFileName","response","fetch","ok","WebinyError","statusText","body","uniquePath","ZIP_FILE_PATH","zipFilePaths","extractZipToDisk","currentPath","dataMap","extractZipAndUploadToS3","ASSETS_DIR_NAME","prepareDataDirMap","filePath","isAsset","listObject","Contents","c","deleteFilePromises","deleteObject","initialStats","total","ImportExportTaskStatus","PENDING","PROCESSING","COMPLETED","FAILED","exportFileZipPath","uniqueFolderNameForExport","EXPORT_FILE_EXTRACTION_PATH","yauzl","open","lazyEntries","err","zipFile","warn","info","entryCount","readEntry","entry","test","openReadStream","catch","error","dataZipFilePath","filePaths","fileUploadPromises","uniqueKey","then","res","forEach","r"],"sources":["utils.ts"],"sourcesContent":["import uniqueId from \"uniqid\";\nimport S3 from \"aws-sdk/clients/s3\";\nimport dotProp from \"dot-prop-immutable\";\nimport { createWriteStream } from \"fs\";\nimport { ensureDirSync } from \"fs-extra\";\nimport { promisify } from \"util\";\nimport { pipeline } from \"stream\";\nimport fetch from \"node-fetch\";\nimport path from \"path\";\nimport yauzl from \"yauzl\";\nimport chunk from \"lodash/chunk\";\nimport loadJson from \"load-json-file\";\nimport { FileInput, File } from \"@webiny/api-file-manager/types\";\nimport WebinyError from \"@webiny/error\";\nimport { deleteFile } from \"@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles\";\nimport { File as ImageFile, ImportExportTaskStatus } from \"~/types\";\nimport { PbImportExportContext } from \"~/graphql/types\";\nimport { s3Stream } from \"~/export/s3Stream\";\nimport { ExportedPageData, ExportedBlockData } from \"~/export/utils\";\nimport { PageSettings } from \"@webiny/api-page-builder/types\";\n\ninterface FileItem extends File {\n key: string;\n type: string;\n name: string;\n size: number;\n meta: Record<string, any>;\n tags: string[];\n}\n\nconst streamPipeline = promisify(pipeline);\n\nconst INSTALL_DIR = \"/tmp\";\nconst INSTALL_EXTRACT_DIR = path.join(INSTALL_DIR, \"apiPageBuilderImport\");\nconst FILES_COUNT_IN_EACH_BATCH = 15;\n\ninterface UpdateFilesInDataParams {\n data: Record<string, any>;\n fileIdToKeyMap: Map<string, string>;\n srcPrefix: string;\n}\n\ninterface UpdateImageInPageSettingsParams {\n fileIdToKeyMap: Map<string, string>;\n srcPrefix: string;\n settings: PageSettings;\n}\n\nfunction updateImageInPageSettings(\n params: UpdateImageInPageSettingsParams\n): UpdateImageInPageSettingsParams[\"settings\"] {\n const { settings, fileIdToKeyMap, srcPrefix } = params;\n let newSettings = settings;\n\n const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith(\"/\")\n ? srcPrefix.slice(0, -1)\n : srcPrefix;\n\n if (dotProp.get(newSettings, \"general.image.src\")) {\n newSettings = dotProp.set(\n newSettings,\n \"general.image.src\",\n `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(\n settings.general?.image?.id || \"\"\n )}`\n );\n }\n if (dotProp.get(newSettings, \"social.image.src\")) {\n newSettings = dotProp.set(\n newSettings,\n \"social.image.src\",\n `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(\n settings.social?.image?.id || \"\"\n )}`\n );\n }\n\n return newSettings;\n}\n\ninterface UpdateBlockPreviewImage {\n fileIdToKeyMap: Map<string, string>;\n srcPrefix: string;\n file: ImageFile;\n}\n\nfunction updateBlockPreviewImage(params: UpdateBlockPreviewImage): ImageFile {\n const { file, fileIdToKeyMap, srcPrefix } = params;\n const newFile = file;\n\n const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith(\"/\")\n ? srcPrefix.slice(0, -1)\n : srcPrefix;\n\n newFile.src = `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(file.id || \"\")}`;\n\n return newFile;\n}\n\nfunction updateFilesInData({ data, fileIdToKeyMap, srcPrefix }: UpdateFilesInDataParams) {\n // BASE CASE: Termination point\n if (!data || typeof data !== \"object\") {\n return;\n }\n // Recursively call function if data is array\n if (Array.isArray(data)) {\n for (let i = 0; i < data.length; i++) {\n const element = data[i];\n updateFilesInData({ data: element, fileIdToKeyMap, srcPrefix });\n }\n return;\n }\n // Main logic\n const tuple = Object.entries(data);\n for (let i = 0; i < tuple.length; i++) {\n const [key, value] = tuple[i];\n\n if (key === \"file\" && value && fileIdToKeyMap.has(value.id)) {\n value.key = fileIdToKeyMap.get(value.id);\n value.name = fileIdToKeyMap.get(value.id);\n value.src = `${srcPrefix}${srcPrefix.endsWith(\"/\") ? \"\" : \"/\"}${fileIdToKeyMap.get(\n value.id\n )}`;\n } else {\n updateFilesInData({ data: value, srcPrefix, fileIdToKeyMap });\n }\n }\n}\n\ninterface UploadAssetsParams {\n context: PbImportExportContext;\n filesData: File[];\n fileUploadsData: FileUploadsData;\n}\n\ninterface UploadAssetsReturnType {\n fileIdToKeyMap: Map<string, string>;\n}\n\nexport const uploadAssets = async (params: UploadAssetsParams): Promise<UploadAssetsReturnType> => {\n const { context, filesData, fileUploadsData } = params;\n // Save uploaded file key against static id for later use.\n const fileIdToKeyMap = new Map<string, string>();\n /**\n * This function contains logic of file download from S3.\n * Current we're not mocking zip file download from S3 in tests at the moment.\n * So, we're manually mocking it in case of test just by returning an empty object.\n */\n if (process.env.NODE_ENV === \"test\") {\n return {\n fileIdToKeyMap\n };\n }\n\n // Save files meta data against old key for later use.\n const fileKeyToFileMap = new Map<string, FileItem>();\n // Initialize maps.\n for (let i = 0; i < filesData.length; i++) {\n const file = filesData[i];\n fileKeyToFileMap.set(file.key, file);\n\n // Initialize the value\n fileIdToKeyMap.set(file.id, file.type);\n }\n\n const fileUploadResults = await uploadFilesFromS3({\n fileKeyToFileMap,\n oldKeyToNewKeyMap: fileUploadsData.assets\n });\n\n // Create files in File Manager\n const createFilesInput = fileUploadResults\n .map((uploadResult): FileInput | null => {\n const newKey = uploadResult.Key;\n const file = fileKeyToFileMap.get(getOldFileKey(newKey));\n if (!file) {\n return null;\n }\n\n // Update the file map with newly uploaded file.\n fileIdToKeyMap.set(file.id, newKey);\n\n return {\n key: newKey,\n name: file.name,\n size: file.size,\n type: file.type,\n meta: file.meta,\n tags: file.tags\n };\n })\n .filter(Boolean) as FileInput[];\n\n const createFilesPromises = [];\n // Gives an array of chunks (each consists of FILES_COUNT_IN_EACH_BATCH items).\n const createFilesInputChunks = chunk(createFilesInput, FILES_COUNT_IN_EACH_BATCH);\n for (let i = 0; i < createFilesInputChunks.length; i++) {\n const createFilesInputChunk = createFilesInputChunks[i];\n createFilesPromises.push(\n /*\n * We need to break down files into chunks because\n * `createFilesInBatch` operation has a limit on number of files it can handle at once.\n */\n context.fileManager.files.createFilesInBatch(createFilesInputChunk)\n );\n }\n\n await Promise.all(createFilesPromises);\n\n return {\n fileIdToKeyMap\n };\n};\n\ninterface FileUploadsData {\n data: string;\n assets: Record<string, string>;\n}\n\ninterface ImportPageParams {\n key: string;\n pageKey: string;\n context: PbImportExportContext;\n fileUploadsData: FileUploadsData;\n}\n\nexport async function importPage({\n pageKey,\n context,\n fileUploadsData\n}: ImportPageParams): Promise<ExportedPageData[\"page\"]> {\n const log = console.log;\n\n // Making Directory for page in which we're going to extract the page data file.\n const PAGE_EXTRACT_DIR = path.join(INSTALL_EXTRACT_DIR, pageKey);\n ensureDirSync(PAGE_EXTRACT_DIR);\n\n const pageDataFileKey = dotProp.get(fileUploadsData, `data`);\n const PAGE_DATA_FILE_PATH = path.join(PAGE_EXTRACT_DIR, path.basename(pageDataFileKey));\n\n log(`Downloading Page data file: ${pageDataFileKey} at \"${PAGE_DATA_FILE_PATH}\"`);\n // Download and save page data file in disk.\n await new Promise((resolve, reject) => {\n s3Stream\n .readStream(pageDataFileKey)\n .on(\"error\", reject)\n .pipe(createWriteStream(PAGE_DATA_FILE_PATH))\n .on(\"error\", reject)\n .on(\"finish\", resolve);\n });\n\n // Load the page data file from disk.\n log(`Load file ${pageDataFileKey}`);\n const { page, files } = await loadJson<ExportedPageData>(PAGE_DATA_FILE_PATH);\n\n // Only update page data if there are files.\n if (files && Array.isArray(files) && files.length > 0) {\n // Upload page assets.\n const { fileIdToKeyMap } = await uploadAssets({\n context,\n /**\n * TODO @ts-refactor @ashutosh figure out correct types.\n */\n // @ts-ignore\n filesData: files,\n fileUploadsData\n });\n\n const settings = await context.fileManager.settings.getSettings();\n\n const { srcPrefix = \"\" } = settings || {};\n updateFilesInData({\n data: page.content || {},\n fileIdToKeyMap,\n srcPrefix\n });\n\n page.settings = updateImageInPageSettings({\n settings: page.settings || {},\n fileIdToKeyMap,\n srcPrefix\n });\n }\n\n log(\"Removing Directory for page...\");\n await deleteFile(pageKey);\n\n log(`Remove page contents from S3...`);\n await deleteS3Folder(path.dirname(fileUploadsData.data));\n\n return page;\n}\n\ninterface ImportBlockParams {\n key: string;\n blockKey: string;\n context: PbImportExportContext;\n fileUploadsData: FileUploadsData;\n}\n\nexport async function importBlock({\n blockKey,\n context,\n fileUploadsData\n}: ImportBlockParams): Promise<ExportedBlockData[\"block\"]> {\n const log = console.log;\n\n // Making Directory for block in which we're going to extract the block data file.\n const BLOCK_EXTRACT_DIR = path.join(INSTALL_EXTRACT_DIR, blockKey);\n ensureDirSync(BLOCK_EXTRACT_DIR);\n\n const blockDataFileKey = dotProp.get(fileUploadsData, `data`);\n const BLOCK_DATA_FILE_PATH = path.join(BLOCK_EXTRACT_DIR, path.basename(blockDataFileKey));\n\n log(`Downloading Block data file: ${blockDataFileKey} at \"${BLOCK_DATA_FILE_PATH}\"`);\n // Download and save block data file in disk.\n await new Promise((resolve, reject) => {\n s3Stream\n .readStream(blockDataFileKey)\n .on(\"error\", reject)\n .pipe(createWriteStream(BLOCK_DATA_FILE_PATH))\n .on(\"error\", reject)\n .on(\"finish\", resolve);\n });\n\n // Load the block data file from disk.\n log(`Load file ${blockDataFileKey}`);\n const { block, files } = await loadJson<ExportedBlockData>(BLOCK_DATA_FILE_PATH);\n\n // Only update block data if there are files.\n if (files && Array.isArray(files) && files.length > 0) {\n // Upload block assets.\n const { fileIdToKeyMap } = await uploadAssets({\n context,\n filesData: files,\n fileUploadsData\n });\n\n const settings = await context.fileManager.settings.getSettings();\n\n const { srcPrefix = \"\" } = settings || {};\n updateFilesInData({\n data: block.content || {},\n fileIdToKeyMap,\n srcPrefix\n });\n\n block.preview = updateBlockPreviewImage({\n file: block.preview || {},\n fileIdToKeyMap,\n srcPrefix\n });\n }\n\n log(\"Removing Directory for block...\");\n await deleteFile(blockKey);\n\n log(`Remove block contents from S3...`);\n await deleteS3Folder(path.dirname(fileUploadsData.data));\n\n return block;\n}\n\ninterface UploadFilesFromZipParams {\n fileKeyToFileMap: Map<string, any>;\n oldKeyToNewKeyMap: Record<string, string>;\n}\n\nasync function uploadFilesFromS3({\n fileKeyToFileMap,\n oldKeyToNewKeyMap\n}: UploadFilesFromZipParams): Promise<S3.ManagedUpload.SendData[]> {\n const oldKeysForAssets = Object.keys(oldKeyToNewKeyMap);\n\n const promises = [];\n // Upload all assets.\n for (let i = 0; i < oldKeysForAssets.length; i++) {\n const oldKey = oldKeysForAssets[i];\n const tempNewKey = oldKeyToNewKeyMap[oldKey];\n\n // Read file.\n const readStream = s3Stream.readStream(tempNewKey);\n // Get file meta data.\n const fileMetaData = fileKeyToFileMap.get(oldKey);\n\n if (fileMetaData) {\n const newKey = uniqueId(\"\", `-${fileMetaData.key}`);\n const { streamPassThrough, streamPassThroughUploadPromise: promise } =\n s3Stream.writeStream(newKey, fileMetaData.type);\n readStream.pipe(streamPassThrough);\n promises.push(promise);\n\n console.log(`Successfully queued file \"${newKey}\"`);\n }\n }\n\n return Promise.all(promises);\n}\n\nfunction getOldFileKey(key: string) {\n /*\n * Because we know the naming convention, we can extract the old key from new key.\n */\n try {\n const [, ...rest] = key.split(\"-\");\n return rest.join(\"-\");\n } catch (e) {\n return key;\n }\n}\n\nconst FILE_CONTENT_TYPE = \"application/octet-stream\";\n\nfunction getFileNameWithoutExt(fileName: string): string {\n return path.basename(fileName).replace(path.extname(fileName), \"\");\n}\n\ninterface ImportData {\n assets: Record<string, string>;\n data: string;\n key: string;\n}\n\n/**\n * Function will read the given zip file from S3 via stream, extract its content and upload it to S3 bucket.\n * @param zipFileUrl\n * @return ImportData S3 file keys for all uploaded assets group by page/block.\n */\nexport async function readExtractAndUploadZipFileContents(\n zipFileUrl: string\n): Promise<ImportData[]> {\n const log = console.log;\n const importDataList = [];\n\n const zipFileName = path.basename(zipFileUrl).split(\"?\")[0];\n\n const response = await fetch(zipFileUrl);\n if (!response.ok) {\n throw new WebinyError(`Unable to downloading file: \"${zipFileUrl}\"`, response.statusText);\n }\n\n const readStream = response.body;\n\n const uniquePath = uniqueId(\"IMPORTS/\");\n // Read export file and download it in the disk\n const ZIP_FILE_PATH = path.join(INSTALL_DIR, zipFileName);\n\n const writeStream = createWriteStream(ZIP_FILE_PATH);\n await streamPipeline(readStream, writeStream);\n log(`Downloaded file \"${zipFileName}\" at ${ZIP_FILE_PATH}`);\n\n // Extract the downloaded zip file\n const zipFilePaths = await extractZipToDisk(ZIP_FILE_PATH);\n\n log(`Removing ZIP file \"${zipFileUrl}\" from ${ZIP_FILE_PATH}`);\n await deleteFile(ZIP_FILE_PATH);\n\n // Extract each page/block zip and upload their content's to S3\n for (let i = 0; i < zipFilePaths.length; i++) {\n const currentPath = zipFilePaths[i];\n const dataMap = await extractZipAndUploadToS3(currentPath, uniquePath);\n importDataList.push(dataMap);\n }\n log(\"Removing all ZIP files located at \", path.dirname(zipFilePaths[0]));\n await deleteFile(path.dirname(zipFilePaths[0]));\n\n return importDataList;\n}\n\nconst ASSETS_DIR_NAME = \"/assets\";\n\nfunction prepareDataDirMap({\n map,\n filePath,\n newKey\n}: {\n map: ImportData;\n filePath: string;\n newKey: string;\n}): ImportData {\n const dirname = path.dirname(filePath);\n const fileName = path.basename(filePath);\n /*\n * We want to use dot (.) as part of object key rather than creating nested object(s).\n * Also, the file name might contain dots in it beside the extension, so, we are escaping them all.\n */\n const oldKey = fileName.replace(/\\./g, \"\\\\.\");\n\n const isAsset = dirname.endsWith(ASSETS_DIR_NAME);\n\n if (isAsset) {\n map = dotProp.set(map, `assets.${oldKey}`, newKey);\n } else {\n // We only need to know the newKey for data file.\n map = dotProp.set(map, `data`, newKey);\n }\n\n return map;\n}\n\nasync function deleteS3Folder(key: string): Promise<void> {\n // Append trailing slash i.e \"/\" to key to make sure we only delete a specific folder.\n if (!key.endsWith(\"/\")) {\n key = `${key}/`;\n }\n\n const response = await s3Stream.listObject(key);\n const keys = (response.Contents || []).map(c => c.Key).filter(Boolean) as string[];\n console.log(`Found ${keys.length} files.`);\n\n const deleteFilePromises = keys.map(key => s3Stream.deleteObject(key));\n\n await Promise.all(deleteFilePromises);\n console.log(`Successfully deleted ${deleteFilePromises.length} files.`);\n}\n\n// export const zeroPad = version => `${version}`.padStart(5, \"0\");\n\nexport function initialStats(total: number) {\n return {\n [ImportExportTaskStatus.PENDING]: total,\n [ImportExportTaskStatus.PROCESSING]: 0,\n [ImportExportTaskStatus.COMPLETED]: 0,\n [ImportExportTaskStatus.FAILED]: 0,\n total\n };\n}\n\nfunction extractZipToDisk(exportFileZipPath: string): Promise<string[]> {\n return new Promise((resolve, reject) => {\n const zipFilePaths: string[] = [];\n const uniqueFolderNameForExport = getFileNameWithoutExt(exportFileZipPath);\n const EXPORT_FILE_EXTRACTION_PATH = path.join(INSTALL_DIR, uniqueFolderNameForExport);\n // Make sure DIR exists\n ensureDirSync(EXPORT_FILE_EXTRACTION_PATH);\n\n yauzl.open(exportFileZipPath, { lazyEntries: true }, function (err, zipFile) {\n if (err) {\n console.warn(\"ERROR: Failed to extract zip: \", exportFileZipPath, err);\n reject(err);\n return;\n }\n if (!zipFile) {\n console.log(\"ERROR: Missing zip file resource for path: \" + exportFileZipPath);\n reject(\"Missing Zip File Resource.\");\n return;\n }\n\n console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);\n\n zipFile.on(\"end\", function (err) {\n if (err) {\n console.warn(\"ERROR: Failed on END event for file: \", exportFileZipPath, err);\n reject(err);\n }\n resolve(zipFilePaths);\n });\n\n zipFile.readEntry();\n\n zipFile.on(\"entry\", function (entry) {\n console.info(`Processing entry: \"${entry.fileName}\"`);\n if (/\\/$/.test(entry.fileName)) {\n // Directory file names end with '/'.\n // Note that entries for directories themselves are optional.\n // An entry's fileName implicitly requires its parent directories to exist.\n zipFile.readEntry();\n } else {\n // file entry\n zipFile.openReadStream(entry, function (err, readStream) {\n if (err) {\n console.warn(\n \"ERROR: Failed to openReadStream for file: \",\n entry.fileName,\n err\n );\n reject(err);\n return;\n }\n if (!readStream) {\n console.log(\n \"ERROR: Missing Read Stream Resource when extracting to disk.\"\n );\n reject(\"Missing Read Stream Resource.\");\n return;\n }\n\n const filePath = path.join(EXPORT_FILE_EXTRACTION_PATH, entry.fileName);\n\n readStream.on(\"end\", function () {\n zipFilePaths.push(filePath);\n zipFile.readEntry();\n });\n\n streamPipeline(readStream, createWriteStream(filePath)).catch(error => {\n reject(error);\n });\n });\n }\n });\n });\n });\n}\n\nfunction extractZipAndUploadToS3(dataZipFilePath: string, uniquePath: string): Promise<ImportData> {\n return new Promise((resolve, reject) => {\n const filePaths = [];\n const fileUploadPromises: Promise<S3.ManagedUpload.SendData>[] = [];\n const uniqueKey = getFileNameWithoutExt(dataZipFilePath);\n let dataMap: ImportData = {\n key: uniqueKey,\n assets: {},\n data: \"\"\n };\n yauzl.open(dataZipFilePath, { lazyEntries: true }, function (err, zipFile) {\n if (err) {\n console.warn(\"ERROR: Failed to extract zip: \", dataZipFilePath, err);\n reject(err);\n return;\n }\n if (!zipFile) {\n console.log(\"ERROR: Probably failed to extract zip: \" + dataZipFilePath);\n reject(\"Missing Zip File Resource.\");\n return;\n }\n console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);\n zipFile.on(\"end\", function (err) {\n if (err) {\n console.warn('ERROR: Failed on \"END\" for file: ', dataZipFilePath, err);\n reject(err);\n }\n\n Promise.all(fileUploadPromises).then(res => {\n res.forEach(r => {\n console.info(\"Done uploading... \", r);\n });\n resolve(dataMap);\n });\n });\n\n zipFile.readEntry();\n\n zipFile.on(\"entry\", function (entry) {\n console.info(`Processing entry: \"${entry.fileName}\"`);\n if (/\\/$/.test(entry.fileName)) {\n // Directory file names end with '/'.\n // Note that entries for directories themselves are optional.\n // An entry's fileName implicitly requires its parent directories to exist.\n zipFile.readEntry();\n } else {\n // file entry\n zipFile.openReadStream(entry, function (err, readStream) {\n if (err) {\n console.warn(\n \"ERROR: Failed while performing [openReadStream] for file: \",\n entry.fileName,\n err\n );\n reject(err);\n return;\n }\n if (!readStream) {\n console.log(\"ERROR: Missing Read Stream while importing.\");\n reject(\"Missing Read Strea Resource.\");\n return;\n }\n readStream.on(\"end\", function () {\n filePaths.push(entry.fileName);\n zipFile.readEntry();\n });\n\n const newKey = `${uniquePath}/${uniqueKey}/${entry.fileName}`;\n // Modify in place\n dataMap = prepareDataDirMap({\n map: dataMap,\n filePath: entry.fileName,\n newKey\n });\n\n const { streamPassThrough, streamPassThroughUploadPromise: promise } =\n s3Stream.writeStream(newKey, FILE_CONTENT_TYPE);\n\n streamPipeline(readStream, streamPassThrough)\n .then(() => {\n fileUploadPromises.push(promise);\n })\n .catch(error => {\n reject(error);\n });\n });\n }\n });\n });\n });\n}\n"],"mappings":";;;;;;;;;;;;;AAAA;;AAEA;;AACA;;AACA;;AACA;;AACA;;AACA;;AACA;;AACA;;AACA;;AACA;;AAEA;;AACA;;AACA;;AAEA;;AAaA,MAAMA,cAAc,GAAG,IAAAC,eAAA,EAAUC,gBAAV,CAAvB;AAEA,MAAMC,WAAW,GAAG,MAApB;;AACA,MAAMC,mBAAmB,GAAGC,aAAA,CAAKC,IAAL,CAAUH,WAAV,EAAuB,sBAAvB,CAA5B;;AACA,MAAMI,yBAAyB,GAAG,EAAlC;;AAcA,SAASC,yBAAT,CACIC,MADJ,EAE+C;EAC3C,MAAM;IAAEC,QAAF;IAAYC,cAAZ;IAA4BC;EAA5B,IAA0CH,MAAhD;EACA,IAAII,WAAW,GAAGH,QAAlB;EAEA,MAAMI,6BAA6B,GAAGF,SAAS,CAACG,QAAV,CAAmB,GAAnB,IAChCH,SAAS,CAACI,KAAV,CAAgB,CAAhB,EAAmB,CAAC,CAApB,CADgC,GAEhCJ,SAFN;;EAIA,IAAIK,yBAAA,CAAQC,GAAR,CAAYL,WAAZ,EAAyB,mBAAzB,CAAJ,EAAmD;IAAA;;IAC/CA,WAAW,GAAGI,yBAAA,CAAQE,GAAR,CACVN,WADU,EAEV,mBAFU,EAGT,GAAEC,6BAA8B,IAAGH,cAAc,CAACO,GAAf,CAChC,sBAAAR,QAAQ,CAACU,OAAT,iGAAkBC,KAAlB,gFAAyBC,EAAzB,KAA+B,EADC,CAElC,EALQ,CAAd;EAOH;;EACD,IAAIL,yBAAA,CAAQC,GAAR,CAAYL,WAAZ,EAAyB,kBAAzB,CAAJ,EAAkD;IAAA;;IAC9CA,WAAW,GAAGI,yBAAA,CAAQE,GAAR,CACVN,WADU,EAEV,kBAFU,EAGT,GAAEC,6BAA8B,IAAGH,cAAc,CAACO,GAAf,CAChC,qBAAAR,QAAQ,CAACa,MAAT,+FAAiBF,KAAjB,gFAAwBC,EAAxB,KAA8B,EADE,CAElC,EALQ,CAAd;EAOH;;EAED,OAAOT,WAAP;AACH;;AAQD,SAASW,uBAAT,CAAiCf,MAAjC,EAA6E;EACzE,MAAM;IAAEgB,IAAF;IAAQd,cAAR;IAAwBC;EAAxB,IAAsCH,MAA5C;EACA,MAAMiB,OAAO,GAAGD,IAAhB;EAEA,MAAMX,6BAA6B,GAAGF,SAAS,CAACG,QAAV,CAAmB,GAAnB,IAChCH,SAAS,CAACI,KAAV,CAAgB,CAAhB,EAAmB,CAAC,CAApB,CADgC,GAEhCJ,SAFN;EAIAc,OAAO,CAACC,GAAR,GAAe,GAAEb,6BAA8B,IAAGH,cAAc,CAACO,GAAf,CAAmBO,IAAI,CAACH,EAAL,IAAW,EAA9B,CAAkC,EAApF;EAEA,OAAOI,OAAP;AACH;;AAED,SAASE,iBAAT,CAA2B;EAAEC,IAAF;EAAQlB,cAAR;EAAwBC;AAAxB,CAA3B,EAAyF;EACrF;EACA,IAAI,CAACiB,IAAD,IAAS,OAAOA,IAAP,KAAgB,QAA7B,EAAuC;IACnC;EACH,CAJoF,CAKrF;;;EACA,IAAIC,KAAK,CAACC,OAAN,CAAcF,IAAd,CAAJ,EAAyB;IACrB,KAAK,IAAIG,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGH,IAAI,CAACI,MAAzB,EAAiCD,CAAC,EAAlC,EAAsC;MAClC,MAAME,OAAO,GAAGL,IAAI,CAACG,CAAD,CAApB;MACAJ,iBAAiB,CAAC;QAAEC,IAAI,EAAEK,OAAR;QAAiBvB,cAAjB;QAAiCC;MAAjC,CAAD,CAAjB;IACH;;IACD;EACH,CAZoF,CAarF;;;EACA,MAAMuB,KAAK,GAAGC,MAAM,CAACC,OAAP,CAAeR,IAAf,CAAd;;EACA,KAAK,IAAIG,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGG,KAAK,CAACF,MAA1B,EAAkCD,CAAC,EAAnC,EAAuC;IACnC,MAAM,CAACM,GAAD,EAAMC,KAAN,IAAeJ,KAAK,CAACH,CAAD,CAA1B;;IAEA,IAAIM,GAAG,KAAK,MAAR,IAAkBC,KAAlB,IAA2B5B,cAAc,CAAC6B,GAAf,CAAmBD,KAAK,CAACjB,EAAzB,CAA/B,EAA6D;MACzDiB,KAAK,CAACD,GAAN,GAAY3B,cAAc,CAACO,GAAf,CAAmBqB,KAAK,CAACjB,EAAzB,CAAZ;MACAiB,KAAK,CAACE,IAAN,GAAa9B,cAAc,CAACO,GAAf,CAAmBqB,KAAK,CAACjB,EAAzB,CAAb;MACAiB,KAAK,CAACZ,GAAN,GAAa,GAAEf,SAAU,GAAEA,SAAS,CAACG,QAAV,CAAmB,GAAnB,IAA0B,EAA1B,GAA+B,GAAI,GAAEJ,cAAc,CAACO,GAAf,CAC5DqB,KAAK,CAACjB,EADsD,CAE9D,EAFF;IAGH,CAND,MAMO;MACHM,iBAAiB,CAAC;QAAEC,IAAI,EAAEU,KAAR;QAAe3B,SAAf;QAA0BD;MAA1B,CAAD,CAAjB;IACH;EACJ;AACJ;;AAYM,MAAM+B,YAAY,GAAG,MAAOjC,MAAP,IAAuE;EAC/F,MAAM;IAAEkC,OAAF;IAAWC,SAAX;IAAsBC;EAAtB,IAA0CpC,MAAhD,CAD+F,CAE/F;;EACA,MAAME,cAAc,GAAG,IAAImC,GAAJ,EAAvB;EACA;AACJ;AACA;AACA;AACA;;EACI,IAAIC,OAAO,CAACC,GAAR,CAAYC,QAAZ,KAAyB,MAA7B,EAAqC;IACjC,OAAO;MACHtC;IADG,CAAP;EAGH,CAb8F,CAe/F;;;EACA,MAAMuC,gBAAgB,GAAG,IAAIJ,GAAJ,EAAzB,CAhB+F,CAiB/F;;EACA,KAAK,IAAId,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGY,SAAS,CAACX,MAA9B,EAAsCD,CAAC,EAAvC,EAA2C;IACvC,MAAMP,IAAI,GAAGmB,SAAS,CAACZ,CAAD,CAAtB;IACAkB,gBAAgB,CAAC/B,GAAjB,CAAqBM,IAAI,CAACa,GAA1B,EAA+Bb,IAA/B,EAFuC,CAIvC;;IACAd,cAAc,CAACQ,GAAf,CAAmBM,IAAI,CAACH,EAAxB,EAA4BG,IAAI,CAAC0B,IAAjC;EACH;;EAED,MAAMC,iBAAiB,GAAG,MAAMC,iBAAiB,CAAC;IAC9CH,gBAD8C;IAE9CI,iBAAiB,EAAET,eAAe,CAACU;EAFW,CAAD,CAAjD,CA1B+F,CA+B/F;;EACA,MAAMC,gBAAgB,GAAGJ,iBAAiB,CACrCK,GADoB,CACfC,YAAD,IAAoC;IACrC,MAAMC,MAAM,GAAGD,YAAY,CAACE,GAA5B;IACA,MAAMnC,IAAI,GAAGyB,gBAAgB,CAAChC,GAAjB,CAAqB2C,aAAa,CAACF,MAAD,CAAlC,CAAb;;IACA,IAAI,CAAClC,IAAL,EAAW;MACP,OAAO,IAAP;IACH,CALoC,CAOrC;;;IACAd,cAAc,CAACQ,GAAf,CAAmBM,IAAI,CAACH,EAAxB,EAA4BqC,MAA5B;IAEA,OAAO;MACHrB,GAAG,EAAEqB,MADF;MAEHlB,IAAI,EAAEhB,IAAI,CAACgB,IAFR;MAGHqB,IAAI,EAAErC,IAAI,CAACqC,IAHR;MAIHX,IAAI,EAAE1B,IAAI,CAAC0B,IAJR;MAKHY,IAAI,EAAEtC,IAAI,CAACsC,IALR;MAMHC,IAAI,EAAEvC,IAAI,CAACuC;IANR,CAAP;EAQH,CAnBoB,EAoBpBC,MApBoB,CAoBbC,OApBa,CAAzB;EAsBA,MAAMC,mBAAmB,GAAG,EAA5B,CAtD+F,CAuD/F;;EACA,MAAMC,sBAAsB,GAAG,IAAAC,cAAA,EAAMb,gBAAN,EAAwBjD,yBAAxB,CAA/B;;EACA,KAAK,IAAIyB,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGoC,sBAAsB,CAACnC,MAA3C,EAAmDD,CAAC,EAApD,EAAwD;IACpD,MAAMsC,qBAAqB,GAAGF,sBAAsB,CAACpC,CAAD,CAApD;IACAmC,mBAAmB,CAACI,IAApB;IACI;AACZ;AACA;AACA;IACY5B,OAAO,CAAC6B,WAAR,CAAoBC,KAApB,CAA0BC,kBAA1B,CAA6CJ,qBAA7C,CALJ;EAOH;;EAED,MAAMK,OAAO,CAACC,GAAR,CAAYT,mBAAZ,CAAN;EAEA,OAAO;IACHxD;EADG,CAAP;AAGH,CAzEM;;;;AAuFA,eAAekE,UAAf,CAA0B;EAC7BC,OAD6B;EAE7BnC,OAF6B;EAG7BE;AAH6B,CAA1B,EAIiD;EACpD,MAAMkC,GAAG,GAAGC,OAAO,CAACD,GAApB,CADoD,CAGpD;;EACA,MAAME,gBAAgB,GAAG5E,aAAA,CAAKC,IAAL,CAAUF,mBAAV,EAA+B0E,OAA/B,CAAzB;;EACA,IAAAI,sBAAA,EAAcD,gBAAd;;EAEA,MAAME,eAAe,GAAGlE,yBAAA,CAAQC,GAAR,CAAY2B,eAAZ,EAA8B,MAA9B,CAAxB;;EACA,MAAMuC,mBAAmB,GAAG/E,aAAA,CAAKC,IAAL,CAAU2E,gBAAV,EAA4B5E,aAAA,CAAKgF,QAAL,CAAcF,eAAd,CAA5B,CAA5B;;EAEAJ,GAAG,CAAE,+BAA8BI,eAAgB,QAAOC,mBAAoB,GAA3E,CAAH,CAVoD,CAWpD;;EACA,MAAM,IAAIT,OAAJ,CAAY,CAACW,OAAD,EAAUC,MAAV,KAAqB;IACnCC,kBAAA,CACKC,UADL,CACgBN,eADhB,EAEKO,EAFL,CAEQ,OAFR,EAEiBH,MAFjB,EAGKI,IAHL,CAGU,IAAAC,qBAAA,EAAkBR,mBAAlB,CAHV,EAIKM,EAJL,CAIQ,OAJR,EAIiBH,MAJjB,EAKKG,EALL,CAKQ,QALR,EAKkBJ,OALlB;EAMH,CAPK,CAAN,CAZoD,CAqBpD;;EACAP,GAAG,CAAE,aAAYI,eAAgB,EAA9B,CAAH;EACA,MAAM;IAAEU,IAAF;IAAQpB;EAAR,IAAkB,MAAM,IAAAqB,qBAAA,EAA2BV,mBAA3B,CAA9B,CAvBoD,CAyBpD;;EACA,IAAIX,KAAK,IAAI3C,KAAK,CAACC,OAAN,CAAc0C,KAAd,CAAT,IAAiCA,KAAK,CAACxC,MAAN,GAAe,CAApD,EAAuD;IACnD;IACA,MAAM;MAAEtB;IAAF,IAAqB,MAAM+B,YAAY,CAAC;MAC1CC,OAD0C;;MAE1C;AACZ;AACA;MACY;MACAC,SAAS,EAAE6B,KAN+B;MAO1C5B;IAP0C,CAAD,CAA7C;IAUA,MAAMnC,QAAQ,GAAG,MAAMiC,OAAO,CAAC6B,WAAR,CAAoB9D,QAApB,CAA6BqF,WAA7B,EAAvB;IAEA,MAAM;MAAEnF,SAAS,GAAG;IAAd,IAAqBF,QAAQ,IAAI,EAAvC;IACAkB,iBAAiB,CAAC;MACdC,IAAI,EAAEgE,IAAI,CAACG,OAAL,IAAgB,EADR;MAEdrF,cAFc;MAGdC;IAHc,CAAD,CAAjB;IAMAiF,IAAI,CAACnF,QAAL,GAAgBF,yBAAyB,CAAC;MACtCE,QAAQ,EAAEmF,IAAI,CAACnF,QAAL,IAAiB,EADW;MAEtCC,cAFsC;MAGtCC;IAHsC,CAAD,CAAzC;EAKH;;EAEDmE,GAAG,CAAC,gCAAD,CAAH;EACA,MAAM,IAAAkB,gCAAA,EAAWnB,OAAX,CAAN;EAEAC,GAAG,CAAE,iCAAF,CAAH;EACA,MAAMmB,cAAc,CAAC7F,aAAA,CAAK8F,OAAL,CAAatD,eAAe,CAAChB,IAA7B,CAAD,CAApB;EAEA,OAAOgE,IAAP;AACH;;AASM,eAAeO,WAAf,CAA2B;EAC9BC,QAD8B;EAE9B1D,OAF8B;EAG9BE;AAH8B,CAA3B,EAIoD;EACvD,MAAMkC,GAAG,GAAGC,OAAO,CAACD,GAApB,CADuD,CAGvD;;EACA,MAAMuB,iBAAiB,GAAGjG,aAAA,CAAKC,IAAL,CAAUF,mBAAV,EAA+BiG,QAA/B,CAA1B;;EACA,IAAAnB,sBAAA,EAAcoB,iBAAd;;EAEA,MAAMC,gBAAgB,GAAGtF,yBAAA,CAAQC,GAAR,CAAY2B,eAAZ,EAA8B,MAA9B,CAAzB;;EACA,MAAM2D,oBAAoB,GAAGnG,aAAA,CAAKC,IAAL,CAAUgG,iBAAV,EAA6BjG,aAAA,CAAKgF,QAAL,CAAckB,gBAAd,CAA7B,CAA7B;;EAEAxB,GAAG,CAAE,gCAA+BwB,gBAAiB,QAAOC,oBAAqB,GAA9E,CAAH,CAVuD,CAWvD;;EACA,MAAM,IAAI7B,OAAJ,CAAY,CAACW,OAAD,EAAUC,MAAV,KAAqB;IACnCC,kBAAA,CACKC,UADL,CACgBc,gBADhB,EAEKb,EAFL,CAEQ,OAFR,EAEiBH,MAFjB,EAGKI,IAHL,CAGU,IAAAC,qBAAA,EAAkBY,oBAAlB,CAHV,EAIKd,EAJL,CAIQ,OAJR,EAIiBH,MAJjB,EAKKG,EALL,CAKQ,QALR,EAKkBJ,OALlB;EAMH,CAPK,CAAN,CAZuD,CAqBvD;;EACAP,GAAG,CAAE,aAAYwB,gBAAiB,EAA/B,CAAH;EACA,MAAM;IAAEE,KAAF;IAAShC;EAAT,IAAmB,MAAM,IAAAqB,qBAAA,EAA4BU,oBAA5B,CAA/B,CAvBuD,CAyBvD;;EACA,IAAI/B,KAAK,IAAI3C,KAAK,CAACC,OAAN,CAAc0C,KAAd,CAAT,IAAiCA,KAAK,CAACxC,MAAN,GAAe,CAApD,EAAuD;IACnD;IACA,MAAM;MAAEtB;IAAF,IAAqB,MAAM+B,YAAY,CAAC;MAC1CC,OAD0C;MAE1CC,SAAS,EAAE6B,KAF+B;MAG1C5B;IAH0C,CAAD,CAA7C;IAMA,MAAMnC,QAAQ,GAAG,MAAMiC,OAAO,CAAC6B,WAAR,CAAoB9D,QAApB,CAA6BqF,WAA7B,EAAvB;IAEA,MAAM;MAAEnF,SAAS,GAAG;IAAd,IAAqBF,QAAQ,IAAI,EAAvC;IACAkB,iBAAiB,CAAC;MACdC,IAAI,EAAE4E,KAAK,CAACT,OAAN,IAAiB,EADT;MAEdrF,cAFc;MAGdC;IAHc,CAAD,CAAjB;IAMA6F,KAAK,CAACC,OAAN,GAAgBlF,uBAAuB,CAAC;MACpCC,IAAI,EAAEgF,KAAK,CAACC,OAAN,IAAiB,EADa;MAEpC/F,cAFoC;MAGpCC;IAHoC,CAAD,CAAvC;EAKH;;EAEDmE,GAAG,CAAC,iCAAD,CAAH;EACA,MAAM,IAAAkB,gCAAA,EAAWI,QAAX,CAAN;EAEAtB,GAAG,CAAE,kCAAF,CAAH;EACA,MAAMmB,cAAc,CAAC7F,aAAA,CAAK8F,OAAL,CAAatD,eAAe,CAAChB,IAA7B,CAAD,CAApB;EAEA,OAAO4E,KAAP;AACH;;AAOD,eAAepD,iBAAf,CAAiC;EAC7BH,gBAD6B;EAE7BI;AAF6B,CAAjC,EAGmE;EAC/D,MAAMqD,gBAAgB,GAAGvE,MAAM,CAACwE,IAAP,CAAYtD,iBAAZ,CAAzB;EAEA,MAAMuD,QAAQ,GAAG,EAAjB,CAH+D,CAI/D;;EACA,KAAK,IAAI7E,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAG2E,gBAAgB,CAAC1E,MAArC,EAA6CD,CAAC,EAA9C,EAAkD;IAC9C,MAAM8E,MAAM,GAAGH,gBAAgB,CAAC3E,CAAD,CAA/B;IACA,MAAM+E,UAAU,GAAGzD,iBAAiB,CAACwD,MAAD,CAApC,CAF8C,CAI9C;;IACA,MAAMrB,UAAU,GAAGD,kBAAA,CAASC,UAAT,CAAoBsB,UAApB,CAAnB,CAL8C,CAM9C;;;IACA,MAAMC,YAAY,GAAG9D,gBAAgB,CAAChC,GAAjB,CAAqB4F,MAArB,CAArB;;IAEA,IAAIE,YAAJ,EAAkB;MACd,MAAMrD,MAAM,GAAG,IAAAsD,eAAA,EAAS,EAAT,EAAc,IAAGD,YAAY,CAAC1E,GAAI,EAAlC,CAAf;;MACA,MAAM;QAAE4E,iBAAF;QAAqBC,8BAA8B,EAAEC;MAArD,IACF5B,kBAAA,CAAS6B,WAAT,CAAqB1D,MAArB,EAA6BqD,YAAY,CAAC7D,IAA1C,CADJ;;MAEAsC,UAAU,CAACE,IAAX,CAAgBuB,iBAAhB;MACAL,QAAQ,CAACtC,IAAT,CAAc6C,OAAd;MAEApC,OAAO,CAACD,GAAR,CAAa,6BAA4BpB,MAAO,GAAhD;IACH;EACJ;;EAED,OAAOgB,OAAO,CAACC,GAAR,CAAYiC,QAAZ,CAAP;AACH;;AAED,SAAShD,aAAT,CAAuBvB,GAAvB,EAAoC;EAChC;AACJ;AACA;EACI,IAAI;IACA,MAAM,GAAG,GAAGgF,IAAN,IAAchF,GAAG,CAACiF,KAAJ,CAAU,GAAV,CAApB;IACA,OAAOD,IAAI,CAAChH,IAAL,CAAU,GAAV,CAAP;EACH,CAHD,CAGE,OAAOkH,CAAP,EAAU;IACR,OAAOlF,GAAP;EACH;AACJ;;AAED,MAAMmF,iBAAiB,GAAG,0BAA1B;;AAEA,SAASC,qBAAT,CAA+BC,QAA/B,EAAyD;EACrD,OAAOtH,aAAA,CAAKgF,QAAL,CAAcsC,QAAd,EAAwBC,OAAxB,CAAgCvH,aAAA,CAAKwH,OAAL,CAAaF,QAAb,CAAhC,EAAwD,EAAxD,CAAP;AACH;;AAQD;AACA;AACA;AACA;AACA;AACO,eAAeG,mCAAf,CACHC,UADG,EAEkB;EACrB,MAAMhD,GAAG,GAAGC,OAAO,CAACD,GAApB;EACA,MAAMiD,cAAc,GAAG,EAAvB;;EAEA,MAAMC,WAAW,GAAG5H,aAAA,CAAKgF,QAAL,CAAc0C,UAAd,EAA0BR,KAA1B,CAAgC,GAAhC,EAAqC,CAArC,CAApB;;EAEA,MAAMW,QAAQ,GAAG,MAAM,IAAAC,kBAAA,EAAMJ,UAAN,CAAvB;;EACA,IAAI,CAACG,QAAQ,CAACE,EAAd,EAAkB;IACd,MAAM,IAAIC,cAAJ,CAAiB,gCAA+BN,UAAW,GAA3D,EAA+DG,QAAQ,CAACI,UAAxE,CAAN;EACH;;EAED,MAAM7C,UAAU,GAAGyC,QAAQ,CAACK,IAA5B;EAEA,MAAMC,UAAU,GAAG,IAAAvB,eAAA,EAAS,UAAT,CAAnB,CAbqB,CAcrB;;EACA,MAAMwB,aAAa,GAAGpI,aAAA,CAAKC,IAAL,CAAUH,WAAV,EAAuB8H,WAAvB,CAAtB;;EAEA,MAAMZ,WAAW,GAAG,IAAAzB,qBAAA,EAAkB6C,aAAlB,CAApB;EACA,MAAMzI,cAAc,CAACyF,UAAD,EAAa4B,WAAb,CAApB;EACAtC,GAAG,CAAE,oBAAmBkD,WAAY,QAAOQ,aAAc,EAAtD,CAAH,CAnBqB,CAqBrB;;EACA,MAAMC,YAAY,GAAG,MAAMC,gBAAgB,CAACF,aAAD,CAA3C;EAEA1D,GAAG,CAAE,sBAAqBgD,UAAW,UAASU,aAAc,EAAzD,CAAH;EACA,MAAM,IAAAxC,gCAAA,EAAWwC,aAAX,CAAN,CAzBqB,CA2BrB;;EACA,KAAK,IAAIzG,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAG0G,YAAY,CAACzG,MAAjC,EAAyCD,CAAC,EAA1C,EAA8C;IAC1C,MAAM4G,WAAW,GAAGF,YAAY,CAAC1G,CAAD,CAAhC;IACA,MAAM6G,OAAO,GAAG,MAAMC,uBAAuB,CAACF,WAAD,EAAcJ,UAAd,CAA7C;IACAR,cAAc,CAACzD,IAAf,CAAoBsE,OAApB;EACH;;EACD9D,GAAG,CAAC,oCAAD,EAAuC1E,aAAA,CAAK8F,OAAL,CAAauC,YAAY,CAAC,CAAD,CAAzB,CAAvC,CAAH;EACA,MAAM,IAAAzC,gCAAA,EAAW5F,aAAA,CAAK8F,OAAL,CAAauC,YAAY,CAAC,CAAD,CAAzB,CAAX,CAAN;EAEA,OAAOV,cAAP;AACH;;AAED,MAAMe,eAAe,GAAG,SAAxB;;AAEA,SAASC,iBAAT,CAA2B;EACvBvF,GADuB;EAEvBwF,QAFuB;EAGvBtF;AAHuB,CAA3B,EAQe;EACX,MAAMwC,OAAO,GAAG9F,aAAA,CAAK8F,OAAL,CAAa8C,QAAb,CAAhB;;EACA,MAAMtB,QAAQ,GAAGtH,aAAA,CAAKgF,QAAL,CAAc4D,QAAd,CAAjB;EACA;AACJ;AACA;AACA;;;EACI,MAAMnC,MAAM,GAAGa,QAAQ,CAACC,OAAT,CAAiB,KAAjB,EAAwB,KAAxB,CAAf;EAEA,MAAMsB,OAAO,GAAG/C,OAAO,CAACpF,QAAR,CAAiBgI,eAAjB,CAAhB;;EAEA,IAAIG,OAAJ,EAAa;IACTzF,GAAG,GAAGxC,yBAAA,CAAQE,GAAR,CAAYsC,GAAZ,EAAkB,UAASqD,MAAO,EAAlC,EAAqCnD,MAArC,CAAN;EACH,CAFD,MAEO;IACH;IACAF,GAAG,GAAGxC,yBAAA,CAAQE,GAAR,CAAYsC,GAAZ,EAAkB,MAAlB,EAAyBE,MAAzB,CAAN;EACH;;EAED,OAAOF,GAAP;AACH;;AAED,eAAeyC,cAAf,CAA8B5D,GAA9B,EAA0D;EACtD;EACA,IAAI,CAACA,GAAG,CAACvB,QAAJ,CAAa,GAAb,CAAL,EAAwB;IACpBuB,GAAG,GAAI,GAAEA,GAAI,GAAb;EACH;;EAED,MAAM4F,QAAQ,GAAG,MAAM1C,kBAAA,CAAS2D,UAAT,CAAoB7G,GAApB,CAAvB;EACA,MAAMsE,IAAI,GAAG,CAACsB,QAAQ,CAACkB,QAAT,IAAqB,EAAtB,EAA0B3F,GAA1B,CAA8B4F,CAAC,IAAIA,CAAC,CAACzF,GAArC,EAA0CK,MAA1C,CAAiDC,OAAjD,CAAb;EACAc,OAAO,CAACD,GAAR,CAAa,SAAQ6B,IAAI,CAAC3E,MAAO,SAAjC;EAEA,MAAMqH,kBAAkB,GAAG1C,IAAI,CAACnD,GAAL,CAASnB,GAAG,IAAIkD,kBAAA,CAAS+D,YAAT,CAAsBjH,GAAtB,CAAhB,CAA3B;EAEA,MAAMqC,OAAO,CAACC,GAAR,CAAY0E,kBAAZ,CAAN;EACAtE,OAAO,CAACD,GAAR,CAAa,wBAAuBuE,kBAAkB,CAACrH,MAAO,SAA9D;AACH,C,CAED;;;AAEO,SAASuH,YAAT,CAAsBC,KAAtB,EAAqC;EACxC,OAAO;IACH,CAACC,6BAAA,CAAuBC,OAAxB,GAAkCF,KAD/B;IAEH,CAACC,6BAAA,CAAuBE,UAAxB,GAAqC,CAFlC;IAGH,CAACF,6BAAA,CAAuBG,SAAxB,GAAoC,CAHjC;IAIH,CAACH,6BAAA,CAAuBI,MAAxB,GAAiC,CAJ9B;IAKHL;EALG,CAAP;AAOH;;AAED,SAASd,gBAAT,CAA0BoB,iBAA1B,EAAwE;EACpE,OAAO,IAAIpF,OAAJ,CAAY,CAACW,OAAD,EAAUC,MAAV,KAAqB;IACpC,MAAMmD,YAAsB,GAAG,EAA/B;IACA,MAAMsB,yBAAyB,GAAGtC,qBAAqB,CAACqC,iBAAD,CAAvD;;IACA,MAAME,2BAA2B,GAAG5J,aAAA,CAAKC,IAAL,CAAUH,WAAV,EAAuB6J,yBAAvB,CAApC,CAHoC,CAIpC;;;IACA,IAAA9E,sBAAA,EAAc+E,2BAAd;;IAEAC,cAAA,CAAMC,IAAN,CAAWJ,iBAAX,EAA8B;MAAEK,WAAW,EAAE;IAAf,CAA9B,EAAqD,UAAUC,GAAV,EAAeC,OAAf,EAAwB;MACzE,IAAID,GAAJ,EAAS;QACLrF,OAAO,CAACuF,IAAR,CAAa,gCAAb,EAA+CR,iBAA/C,EAAkEM,GAAlE;QACA9E,MAAM,CAAC8E,GAAD,CAAN;QACA;MACH;;MACD,IAAI,CAACC,OAAL,EAAc;QACVtF,OAAO,CAACD,GAAR,CAAY,gDAAgDgF,iBAA5D;QACAxE,MAAM,CAAC,4BAAD,CAAN;QACA;MACH;;MAEDP,OAAO,CAACwF,IAAR,CAAc,yBAAwBF,OAAO,CAACG,UAAW,WAAzD;MAEAH,OAAO,CAAC5E,EAAR,CAAW,KAAX,EAAkB,UAAU2E,GAAV,EAAe;QAC7B,IAAIA,GAAJ,EAAS;UACLrF,OAAO,CAACuF,IAAR,CAAa,uCAAb,EAAsDR,iBAAtD,EAAyEM,GAAzE;UACA9E,MAAM,CAAC8E,GAAD,CAAN;QACH;;QACD/E,OAAO,CAACoD,YAAD,CAAP;MACH,CAND;MAQA4B,OAAO,CAACI,SAAR;MAEAJ,OAAO,CAAC5E,EAAR,CAAW,OAAX,EAAoB,UAAUiF,KAAV,EAAiB;QACjC3F,OAAO,CAACwF,IAAR,CAAc,sBAAqBG,KAAK,CAAChD,QAAS,GAAlD;;QACA,IAAI,MAAMiD,IAAN,CAAWD,KAAK,CAAChD,QAAjB,CAAJ,EAAgC;UAC5B;UACA;UACA;UACA2C,OAAO,CAACI,SAAR;QACH,CALD,MAKO;UACH;UACAJ,OAAO,CAACO,cAAR,CAAuBF,KAAvB,EAA8B,UAAUN,GAAV,EAAe5E,UAAf,EAA2B;YACrD,IAAI4E,GAAJ,EAAS;cACLrF,OAAO,CAACuF,IAAR,CACI,4CADJ,EAEII,KAAK,CAAChD,QAFV,EAGI0C,GAHJ;cAKA9E,MAAM,CAAC8E,GAAD,CAAN;cACA;YACH;;YACD,IAAI,CAAC5E,UAAL,EAAiB;cACbT,OAAO,CAACD,GAAR,CACI,8DADJ;cAGAQ,MAAM,CAAC,+BAAD,CAAN;cACA;YACH;;YAED,MAAM0D,QAAQ,GAAG5I,aAAA,CAAKC,IAAL,CAAU2J,2BAAV,EAAuCU,KAAK,CAAChD,QAA7C,CAAjB;;YAEAlC,UAAU,CAACC,EAAX,CAAc,KAAd,EAAqB,YAAY;cAC7BgD,YAAY,CAACnE,IAAb,CAAkB0E,QAAlB;cACAqB,OAAO,CAACI,SAAR;YACH,CAHD;YAKA1K,cAAc,CAACyF,UAAD,EAAa,IAAAG,qBAAA,EAAkBqD,QAAlB,CAAb,CAAd,CAAwD6B,KAAxD,CAA8DC,KAAK,IAAI;cACnExF,MAAM,CAACwF,KAAD,CAAN;YACH,CAFD;UAGH,CA5BD;QA6BH;MACJ,CAvCD;IAwCH,CAhED;EAiEH,CAxEM,CAAP;AAyEH;;AAED,SAASjC,uBAAT,CAAiCkC,eAAjC,EAA0DxC,UAA1D,EAAmG;EAC/F,OAAO,IAAI7D,OAAJ,CAAY,CAACW,OAAD,EAAUC,MAAV,KAAqB;IACpC,MAAM0F,SAAS,GAAG,EAAlB;IACA,MAAMC,kBAAwD,GAAG,EAAjE;IACA,MAAMC,SAAS,GAAGzD,qBAAqB,CAACsD,eAAD,CAAvC;IACA,IAAInC,OAAmB,GAAG;MACtBvG,GAAG,EAAE6I,SADiB;MAEtB5H,MAAM,EAAE,EAFc;MAGtB1B,IAAI,EAAE;IAHgB,CAA1B;;IAKAqI,cAAA,CAAMC,IAAN,CAAWa,eAAX,EAA4B;MAAEZ,WAAW,EAAE;IAAf,CAA5B,EAAmD,UAAUC,GAAV,EAAeC,OAAf,EAAwB;MACvE,IAAID,GAAJ,EAAS;QACLrF,OAAO,CAACuF,IAAR,CAAa,gCAAb,EAA+CS,eAA/C,EAAgEX,GAAhE;QACA9E,MAAM,CAAC8E,GAAD,CAAN;QACA;MACH;;MACD,IAAI,CAACC,OAAL,EAAc;QACVtF,OAAO,CAACD,GAAR,CAAY,4CAA4CiG,eAAxD;QACAzF,MAAM,CAAC,4BAAD,CAAN;QACA;MACH;;MACDP,OAAO,CAACwF,IAAR,CAAc,yBAAwBF,OAAO,CAACG,UAAW,WAAzD;MACAH,OAAO,CAAC5E,EAAR,CAAW,KAAX,EAAkB,UAAU2E,GAAV,EAAe;QAC7B,IAAIA,GAAJ,EAAS;UACLrF,OAAO,CAACuF,IAAR,CAAa,mCAAb,EAAkDS,eAAlD,EAAmEX,GAAnE;UACA9E,MAAM,CAAC8E,GAAD,CAAN;QACH;;QAED1F,OAAO,CAACC,GAAR,CAAYsG,kBAAZ,EAAgCE,IAAhC,CAAqCC,GAAG,IAAI;UACxCA,GAAG,CAACC,OAAJ,CAAYC,CAAC,IAAI;YACbvG,OAAO,CAACwF,IAAR,CAAa,oBAAb,EAAmCe,CAAnC;UACH,CAFD;UAGAjG,OAAO,CAACuD,OAAD,CAAP;QACH,CALD;MAMH,CAZD;MAcAyB,OAAO,CAACI,SAAR;MAEAJ,OAAO,CAAC5E,EAAR,CAAW,OAAX,EAAoB,UAAUiF,KAAV,EAAiB;QACjC3F,OAAO,CAACwF,IAAR,CAAc,sBAAqBG,KAAK,CAAChD,QAAS,GAAlD;;QACA,IAAI,MAAMiD,IAAN,CAAWD,KAAK,CAAChD,QAAjB,CAAJ,EAAgC;UAC5B;UACA;UACA;UACA2C,OAAO,CAACI,SAAR;QACH,CALD,MAKO;UACH;UACAJ,OAAO,CAACO,cAAR,CAAuBF,KAAvB,EAA8B,UAAUN,GAAV,EAAe5E,UAAf,EAA2B;YACrD,IAAI4E,GAAJ,EAAS;cACLrF,OAAO,CAACuF,IAAR,CACI,4DADJ,EAEII,KAAK,CAAChD,QAFV,EAGI0C,GAHJ;cAKA9E,MAAM,CAAC8E,GAAD,CAAN;cACA;YACH;;YACD,IAAI,CAAC5E,UAAL,EAAiB;cACbT,OAAO,CAACD,GAAR,CAAY,6CAAZ;cACAQ,MAAM,CAAC,8BAAD,CAAN;cACA;YACH;;YACDE,UAAU,CAACC,EAAX,CAAc,KAAd,EAAqB,YAAY;cAC7BuF,SAAS,CAAC1G,IAAV,CAAeoG,KAAK,CAAChD,QAArB;cACA2C,OAAO,CAACI,SAAR;YACH,CAHD;YAKA,MAAM/G,MAAM,GAAI,GAAE6E,UAAW,IAAG2C,SAAU,IAAGR,KAAK,CAAChD,QAAS,EAA5D,CApBqD,CAqBrD;;YACAkB,OAAO,GAAGG,iBAAiB,CAAC;cACxBvF,GAAG,EAAEoF,OADmB;cAExBI,QAAQ,EAAE0B,KAAK,CAAChD,QAFQ;cAGxBhE;YAHwB,CAAD,CAA3B;;YAMA,MAAM;cAAEuD,iBAAF;cAAqBC,8BAA8B,EAAEC;YAArD,IACF5B,kBAAA,CAAS6B,WAAT,CAAqB1D,MAArB,EAA6B8D,iBAA7B,CADJ;;YAGAzH,cAAc,CAACyF,UAAD,EAAayB,iBAAb,CAAd,CACKkE,IADL,CACU,MAAM;cACRF,kBAAkB,CAAC3G,IAAnB,CAAwB6C,OAAxB;YACH,CAHL,EAIK0D,KAJL,CAIWC,KAAK,IAAI;cACZxF,MAAM,CAACwF,KAAD,CAAN;YACH,CANL;UAOH,CAtCD;QAuCH;MACJ,CAjDD;IAkDH,CA9ED;EA+EH,CAxFM,CAAP;AAyFH"}