@webiny/api-page-builder-import-export 5.33.5 → 5.34.0-beta.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. package/client.d.ts +2 -2
  2. package/client.js +4 -10
  3. package/client.js.map +1 -1
  4. package/export/combine/blocksHandler.d.ts +6 -0
  5. package/export/combine/blocksHandler.js +106 -0
  6. package/export/combine/blocksHandler.js.map +1 -0
  7. package/{exportPages → export}/combine/index.d.ts +3 -2
  8. package/export/combine/index.js +30 -0
  9. package/export/combine/index.js.map +1 -0
  10. package/export/combine/pagesHandler.d.ts +6 -0
  11. package/export/combine/pagesHandler.js +106 -0
  12. package/export/combine/pagesHandler.js.map +1 -0
  13. package/export/process/blocksHandler.d.ts +6 -0
  14. package/export/process/blocksHandler.js +176 -0
  15. package/export/process/blocksHandler.js.map +1 -0
  16. package/{exportPages → export}/process/index.d.ts +4 -6
  17. package/export/process/index.js +27 -0
  18. package/export/process/index.js.map +1 -0
  19. package/export/process/pagesHandler.d.ts +6 -0
  20. package/export/process/pagesHandler.js +204 -0
  21. package/export/process/pagesHandler.js.map +1 -0
  22. package/{exportPages → export}/s3Stream.d.ts +0 -0
  23. package/{exportPages → export}/s3Stream.js +4 -6
  24. package/{exportPages → export}/s3Stream.js.map +1 -1
  25. package/export/utils.d.ts +16 -0
  26. package/export/utils.js +135 -0
  27. package/export/utils.js.map +1 -0
  28. package/{exportPages → export}/zipper.d.ts +6 -5
  29. package/{exportPages → export}/zipper.js +11 -12
  30. package/export/zipper.js.map +1 -0
  31. package/graphql/crud/blocks.crud.d.ts +4 -0
  32. package/graphql/crud/blocks.crud.js +155 -0
  33. package/graphql/crud/blocks.crud.js.map +1 -0
  34. package/graphql/crud/importExportTasks.crud.d.ts +5 -0
  35. package/graphql/crud/{pageImportExportTasks.crud.js → importExportTasks.crud.js} +64 -57
  36. package/graphql/crud/importExportTasks.crud.js.map +1 -0
  37. package/graphql/crud/pages.crud.d.ts +2 -2
  38. package/graphql/crud/pages.crud.js +15 -13
  39. package/graphql/crud/pages.crud.js.map +1 -1
  40. package/graphql/crud.d.ts +2 -2
  41. package/graphql/crud.js +5 -3
  42. package/graphql/crud.js.map +1 -1
  43. package/graphql/graphql/blocks.gql.d.ts +4 -0
  44. package/graphql/graphql/blocks.gql.js +57 -0
  45. package/graphql/graphql/blocks.gql.js.map +1 -0
  46. package/graphql/graphql/importExportTasks.gql.d.ts +4 -0
  47. package/graphql/graphql/{pageImportExportTasks.gql.js → importExportTasks.gql.js} +18 -18
  48. package/graphql/graphql/importExportTasks.gql.js.map +1 -0
  49. package/graphql/graphql/pages.gql.d.ts +2 -2
  50. package/graphql/graphql/pages.gql.js +3 -9
  51. package/graphql/graphql/pages.gql.js.map +1 -1
  52. package/graphql/graphql/utils/resolve.d.ts +1 -1
  53. package/graphql/graphql.js +5 -3
  54. package/graphql/graphql.js.map +1 -1
  55. package/graphql/index.d.ts +2 -2
  56. package/graphql/index.js +1 -1
  57. package/graphql/index.js.map +1 -1
  58. package/graphql/types.d.ts +43 -23
  59. package/graphql/types.js.map +1 -1
  60. package/import/create/blocksHandler.d.ts +3 -0
  61. package/import/create/blocksHandler.js +110 -0
  62. package/import/create/blocksHandler.js.map +1 -0
  63. package/{importPages → import}/create/index.d.ts +5 -4
  64. package/import/create/index.js +30 -0
  65. package/import/create/index.js.map +1 -0
  66. package/import/create/pagesHandler.d.ts +3 -0
  67. package/import/create/pagesHandler.js +110 -0
  68. package/import/create/pagesHandler.js.map +1 -0
  69. package/import/process/blocksHandler.d.ts +3 -0
  70. package/import/process/blocksHandler.js +175 -0
  71. package/import/process/blocksHandler.js.map +1 -0
  72. package/{importPages → import}/process/index.d.ts +4 -3
  73. package/import/process/index.js +27 -0
  74. package/import/process/index.js.map +1 -0
  75. package/import/process/pagesHandler.d.ts +3 -0
  76. package/import/process/pagesHandler.js +180 -0
  77. package/import/process/pagesHandler.js.map +1 -0
  78. package/{importPages → import}/utils.d.ts +19 -20
  79. package/{importPages → import}/utils.js +108 -37
  80. package/import/utils.js.map +1 -0
  81. package/package.json +27 -27
  82. package/types.d.ts +62 -65
  83. package/types.js +17 -17
  84. package/types.js.map +1 -1
  85. package/exportPages/combine/index.js +0 -114
  86. package/exportPages/combine/index.js.map +0 -1
  87. package/exportPages/process/index.js +0 -208
  88. package/exportPages/process/index.js.map +0 -1
  89. package/exportPages/utils.d.ts +0 -13
  90. package/exportPages/utils.js +0 -113
  91. package/exportPages/utils.js.map +0 -1
  92. package/exportPages/zipper.js.map +0 -1
  93. package/graphql/crud/pageImportExportTasks.crud.d.ts +0 -5
  94. package/graphql/crud/pageImportExportTasks.crud.js.map +0 -1
  95. package/graphql/graphql/pageImportExportTasks.gql.d.ts +0 -4
  96. package/graphql/graphql/pageImportExportTasks.gql.js.map +0 -1
  97. package/importPages/create/index.js +0 -118
  98. package/importPages/create/index.js.map +0 -1
  99. package/importPages/process/index.js +0 -185
  100. package/importPages/process/index.js.map +0 -1
  101. package/importPages/utils.js.map +0 -1
@@ -0,0 +1,180 @@
1
+ "use strict";
2
+
3
+ Object.defineProperty(exports, "__esModule", {
4
+ value: true
5
+ });
6
+ exports.pagesHandler = void 0;
7
+
8
+ var _types = require("../../types");
9
+
10
+ var _utils = require("../utils");
11
+
12
+ var _client = require("../../client");
13
+
14
+ var _mockSecurity = require("../../mockSecurity");
15
+
16
+ var _utils2 = require("@webiny/utils");
17
+
18
+ const pagesHandler = async (configuration, payload, context) => {
19
+ const log = console.log;
20
+ let subTask;
21
+ let noPendingTask = true;
22
+ let prevStatusOfSubTask = _types.ImportExportTaskStatus.PENDING;
23
+ log("RUNNING Import Page Queue Process");
24
+ const {
25
+ pageBuilder
26
+ } = context;
27
+ const {
28
+ taskId,
29
+ subTaskIndex,
30
+ type,
31
+ identity
32
+ } = payload; // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks
33
+ // and this Lambda is invoked internally, without credentials.
34
+
35
+ (0, _mockSecurity.mockSecurity)(identity, context);
36
+
37
+ try {
38
+ /*
39
+ * Note: We're not going to DB for getting next sub-task to process,
40
+ * because the data might be out of sync due to GSI eventual consistency.
41
+ */
42
+ subTask = await pageBuilder.importExportTask.getSubTask(taskId, (0, _utils2.zeroPad)(subTaskIndex, 5));
43
+ /**
44
+ * Base condition!!
45
+ * Bail out early, if task not found or task's status is not "pending".
46
+ */
47
+
48
+ if (!subTask || subTask.status !== _types.ImportExportTaskStatus.PENDING) {
49
+ noPendingTask = true;
50
+ return {
51
+ data: "",
52
+ error: null
53
+ };
54
+ } else {
55
+ noPendingTask = false;
56
+ }
57
+
58
+ prevStatusOfSubTask = subTask.status;
59
+ log(`Fetched sub task => ${subTask.id}`);
60
+ const {
61
+ pageKey,
62
+ category,
63
+ zipFileKey,
64
+ input
65
+ } = subTask.data;
66
+ const {
67
+ fileUploadsData
68
+ } = input;
69
+ log(`Processing page key "${pageKey}"`); // Mark task status as PROCESSING
70
+
71
+ subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {
72
+ status: _types.ImportExportTaskStatus.PROCESSING
73
+ }); // Update stats in main task
74
+
75
+ await pageBuilder.importExportTask.updateStats(taskId, {
76
+ prevStatus: prevStatusOfSubTask,
77
+ nextStatus: _types.ImportExportTaskStatus.PROCESSING
78
+ });
79
+ prevStatusOfSubTask = subTask.status; // Real job
80
+
81
+ const page = await (0, _utils.importPage)({
82
+ context,
83
+ pageKey,
84
+ key: zipFileKey,
85
+ fileUploadsData
86
+ }); // Create a page
87
+
88
+ let pbPage = await context.pageBuilder.createPage(category); // Update page with data
89
+
90
+ pbPage = await context.pageBuilder.updatePage(pbPage.id, {
91
+ content: page.content,
92
+ title: page.title,
93
+ path: page.path,
94
+ settings: page.settings
95
+ }); // TODO: Publish page
96
+ // Update task record in DB
97
+
98
+ subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {
99
+ status: _types.ImportExportTaskStatus.COMPLETED,
100
+ data: {
101
+ message: "Done",
102
+ page: {
103
+ id: pbPage.id,
104
+ title: pbPage.title,
105
+ version: pbPage.version,
106
+ status: pbPage.status
107
+ }
108
+ }
109
+ }); // Update stats in main task
110
+
111
+ await pageBuilder.importExportTask.updateStats(taskId, {
112
+ prevStatus: prevStatusOfSubTask,
113
+ nextStatus: _types.ImportExportTaskStatus.COMPLETED
114
+ });
115
+ prevStatusOfSubTask = subTask.status;
116
+ } catch (e) {
117
+ log("[IMPORT_PAGES_PROCESS] Error => ", e);
118
+
119
+ if (subTask && subTask.id) {
120
+ /**
121
+ * In case of error, we'll update the task status to "failed",
122
+ * so that, client can show notify the user appropriately.
123
+ */
124
+ subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {
125
+ status: _types.ImportExportTaskStatus.FAILED,
126
+ error: {
127
+ name: e.name,
128
+ message: e.message,
129
+ stack: e.stack,
130
+ code: "IMPORT_FAILED"
131
+ }
132
+ }); // Update stats in main task
133
+
134
+ await pageBuilder.importExportTask.updateStats(taskId, {
135
+ prevStatus: prevStatusOfSubTask,
136
+ nextStatus: _types.ImportExportTaskStatus.FAILED
137
+ });
138
+ prevStatusOfSubTask = subTask.status;
139
+ }
140
+
141
+ return {
142
+ data: null,
143
+ error: {
144
+ message: e.message
145
+ }
146
+ };
147
+ } finally {
148
+ // Base condition!
149
+ if (noPendingTask) {
150
+ log(`No pending sub-task for task ${taskId}`);
151
+ await pageBuilder.importExportTask.updateTask(taskId, {
152
+ status: _types.ImportExportTaskStatus.COMPLETED,
153
+ data: {
154
+ message: `Finish importing pages.`
155
+ }
156
+ });
157
+ } else {
158
+ log(`Invoking PROCESS for task "${subTaskIndex + 1}"`); // We want to continue with Self invocation no matter if current page error out.
159
+
160
+ await (0, _client.invokeHandlerClient)({
161
+ context,
162
+ name: configuration.handlers.process,
163
+ payload: {
164
+ taskId,
165
+ subTaskIndex: subTaskIndex + 1,
166
+ type,
167
+ identity: context.security.getIdentity()
168
+ },
169
+ description: "Import pages - process - subtask"
170
+ });
171
+ }
172
+ }
173
+
174
+ return {
175
+ data: "",
176
+ error: null
177
+ };
178
+ };
179
+
180
+ exports.pagesHandler = pagesHandler;
@@ -0,0 +1 @@
1
+ {"version":3,"names":["pagesHandler","configuration","payload","context","log","console","subTask","noPendingTask","prevStatusOfSubTask","ImportExportTaskStatus","PENDING","pageBuilder","taskId","subTaskIndex","type","identity","mockSecurity","importExportTask","getSubTask","zeroPad","status","data","error","id","pageKey","category","zipFileKey","input","fileUploadsData","updateSubTask","PROCESSING","updateStats","prevStatus","nextStatus","page","importPage","key","pbPage","createPage","updatePage","content","title","path","settings","COMPLETED","message","version","e","FAILED","name","stack","code","updateTask","invokeHandlerClient","handlers","process","security","getIdentity","description"],"sources":["pagesHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { importPage } from \"~/import/utils\";\nimport { invokeHandlerClient } from \"~/client\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { zeroPad } from \"@webiny/utils\";\nimport { Configuration, Payload, Response } from \"~/import/process\";\n\nexport const pagesHandler = async (\n configuration: Configuration,\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n let subTask;\n let noPendingTask = true;\n let prevStatusOfSubTask = ImportExportTaskStatus.PENDING;\n\n log(\"RUNNING Import Page Queue Process\");\n const { pageBuilder } = context;\n const { taskId, subTaskIndex, type, identity } = payload;\n // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks\n // and this Lambda is invoked internally, without credentials.\n mockSecurity(identity, context);\n\n try {\n /*\n * Note: We're not going to DB for getting next sub-task to process,\n * because the data might be out of sync due to GSI eventual consistency.\n */\n\n subTask = await pageBuilder.importExportTask.getSubTask(taskId, zeroPad(subTaskIndex, 5));\n\n /**\n * Base condition!!\n * Bail out early, if task not found or task's status is not \"pending\".\n */\n if (!subTask || subTask.status !== ImportExportTaskStatus.PENDING) {\n noPendingTask = true;\n return {\n data: \"\",\n error: null\n };\n } else {\n noPendingTask = false;\n }\n prevStatusOfSubTask = subTask.status;\n\n log(`Fetched sub task => ${subTask.id}`);\n\n const { pageKey, category, zipFileKey, input } = subTask.data;\n const { fileUploadsData } = input;\n\n log(`Processing page key \"${pageKey}\"`);\n\n // Mark task status as PROCESSING\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.PROCESSING\n });\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.PROCESSING\n });\n prevStatusOfSubTask = subTask.status;\n\n // Real job\n const page = await importPage({\n context,\n pageKey,\n key: zipFileKey,\n fileUploadsData\n });\n\n // Create a page\n let pbPage = await context.pageBuilder.createPage(category);\n\n // Update page with data\n pbPage = await context.pageBuilder.updatePage(pbPage.id, {\n content: page.content,\n title: page.title,\n path: page.path,\n settings: page.settings\n });\n\n // TODO: Publish page\n\n // Update task record in DB\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: \"Done\",\n page: {\n id: pbPage.id,\n title: pbPage.title,\n version: pbPage.version,\n status: pbPage.status\n }\n }\n });\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.COMPLETED\n });\n prevStatusOfSubTask = subTask.status;\n } catch (e) {\n log(\"[IMPORT_PAGES_PROCESS] Error => \", e);\n\n if (subTask && subTask.id) {\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n stack: e.stack,\n code: \"IMPORT_FAILED\"\n }\n });\n\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.FAILED\n });\n prevStatusOfSubTask = subTask.status;\n }\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n } finally {\n // Base condition!\n if (noPendingTask) {\n log(`No pending sub-task for task ${taskId}`);\n\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish importing pages.`\n }\n });\n } else {\n log(`Invoking PROCESS for task \"${subTaskIndex + 1}\"`);\n // We want to continue with Self invocation no matter if current page error out.\n await invokeHandlerClient<Payload>({\n context,\n name: configuration.handlers.process,\n payload: {\n taskId,\n subTaskIndex: subTaskIndex + 1,\n type,\n identity: context.security.getIdentity()\n },\n description: \"Import pages - process - subtask\"\n });\n }\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;;AAAA;;AACA;;AACA;;AACA;;AACA;;AAGO,MAAMA,YAAY,GAAG,OACxBC,aADwB,EAExBC,OAFwB,EAGxBC,OAHwB,KAIJ;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAApB;EACA,IAAIE,OAAJ;EACA,IAAIC,aAAa,GAAG,IAApB;EACA,IAAIC,mBAAmB,GAAGC,6BAAA,CAAuBC,OAAjD;EAEAN,GAAG,CAAC,mCAAD,CAAH;EACA,MAAM;IAAEO;EAAF,IAAkBR,OAAxB;EACA,MAAM;IAAES,MAAF;IAAUC,YAAV;IAAwBC,IAAxB;IAA8BC;EAA9B,IAA2Cb,OAAjD,CARoB,CASpB;EACA;;EACA,IAAAc,0BAAA,EAAaD,QAAb,EAAuBZ,OAAvB;;EAEA,IAAI;IACA;AACR;AACA;AACA;IAEQG,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAZ,CAA6BC,UAA7B,CAAwCN,MAAxC,EAAgD,IAAAO,eAAA,EAAQN,YAAR,EAAsB,CAAtB,CAAhD,CAAhB;IAEA;AACR;AACA;AACA;;IACQ,IAAI,CAACP,OAAD,IAAYA,OAAO,CAACc,MAAR,KAAmBX,6BAAA,CAAuBC,OAA1D,EAAmE;MAC/DH,aAAa,GAAG,IAAhB;MACA,OAAO;QACHc,IAAI,EAAE,EADH;QAEHC,KAAK,EAAE;MAFJ,CAAP;IAIH,CAND,MAMO;MACHf,aAAa,GAAG,KAAhB;IACH;;IACDC,mBAAmB,GAAGF,OAAO,CAACc,MAA9B;IAEAhB,GAAG,CAAE,uBAAsBE,OAAO,CAACiB,EAAG,EAAnC,CAAH;IAEA,MAAM;MAAEC,OAAF;MAAWC,QAAX;MAAqBC,UAArB;MAAiCC;IAAjC,IAA2CrB,OAAO,CAACe,IAAzD;IACA,MAAM;MAAEO;IAAF,IAAsBD,KAA5B;IAEAvB,GAAG,CAAE,wBAAuBoB,OAAQ,GAAjC,CAAH,CA5BA,CA8BA;;IACAlB,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAZ,CAA6BY,aAA7B,CAA2CjB,MAA3C,EAAmDN,OAAO,CAACiB,EAA3D,EAA+D;MAC3EH,MAAM,EAAEX,6BAAA,CAAuBqB;IAD4C,CAA/D,CAAhB,CA/BA,CAkCA;;IACA,MAAMnB,WAAW,CAACM,gBAAZ,CAA6Bc,WAA7B,CAAyCnB,MAAzC,EAAiD;MACnDoB,UAAU,EAAExB,mBADuC;MAEnDyB,UAAU,EAAExB,6BAAA,CAAuBqB;IAFgB,CAAjD,CAAN;IAIAtB,mBAAmB,GAAGF,OAAO,CAACc,MAA9B,CAvCA,CAyCA;;IACA,MAAMc,IAAI,GAAG,MAAM,IAAAC,iBAAA,EAAW;MAC1BhC,OAD0B;MAE1BqB,OAF0B;MAG1BY,GAAG,EAAEV,UAHqB;MAI1BE;IAJ0B,CAAX,CAAnB,CA1CA,CAiDA;;IACA,IAAIS,MAAM,GAAG,MAAMlC,OAAO,CAACQ,WAAR,CAAoB2B,UAApB,CAA+Bb,QAA/B,CAAnB,CAlDA,CAoDA;;IACAY,MAAM,GAAG,MAAMlC,OAAO,CAACQ,WAAR,CAAoB4B,UAApB,CAA+BF,MAAM,CAACd,EAAtC,EAA0C;MACrDiB,OAAO,EAAEN,IAAI,CAACM,OADuC;MAErDC,KAAK,EAAEP,IAAI,CAACO,KAFyC;MAGrDC,IAAI,EAAER,IAAI,CAACQ,IAH0C;MAIrDC,QAAQ,EAAET,IAAI,CAACS;IAJsC,CAA1C,CAAf,CArDA,CA4DA;IAEA;;IACArC,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAZ,CAA6BY,aAA7B,CAA2CjB,MAA3C,EAAmDN,OAAO,CAACiB,EAA3D,EAA+D;MAC3EH,MAAM,EAAEX,6BAAA,CAAuBmC,SAD4C;MAE3EvB,IAAI,EAAE;QACFwB,OAAO,EAAE,MADP;QAEFX,IAAI,EAAE;UACFX,EAAE,EAAEc,MAAM,CAACd,EADT;UAEFkB,KAAK,EAAEJ,MAAM,CAACI,KAFZ;UAGFK,OAAO,EAAET,MAAM,CAACS,OAHd;UAIF1B,MAAM,EAAEiB,MAAM,CAACjB;QAJb;MAFJ;IAFqE,CAA/D,CAAhB,CA/DA,CA2EA;;IACA,MAAMT,WAAW,CAACM,gBAAZ,CAA6Bc,WAA7B,CAAyCnB,MAAzC,EAAiD;MACnDoB,UAAU,EAAExB,mBADuC;MAEnDyB,UAAU,EAAExB,6BAAA,CAAuBmC;IAFgB,CAAjD,CAAN;IAIApC,mBAAmB,GAAGF,OAAO,CAACc,MAA9B;EACH,CAjFD,CAiFE,OAAO2B,CAAP,EAAU;IACR3C,GAAG,CAAC,kCAAD,EAAqC2C,CAArC,CAAH;;IAEA,IAAIzC,OAAO,IAAIA,OAAO,CAACiB,EAAvB,EAA2B;MACvB;AACZ;AACA;AACA;MACYjB,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAZ,CAA6BY,aAA7B,CAA2CjB,MAA3C,EAAmDN,OAAO,CAACiB,EAA3D,EAA+D;QAC3EH,MAAM,EAAEX,6BAAA,CAAuBuC,MAD4C;QAE3E1B,KAAK,EAAE;UACH2B,IAAI,EAAEF,CAAC,CAACE,IADL;UAEHJ,OAAO,EAAEE,CAAC,CAACF,OAFR;UAGHK,KAAK,EAAEH,CAAC,CAACG,KAHN;UAIHC,IAAI,EAAE;QAJH;MAFoE,CAA/D,CAAhB,CALuB,CAevB;;MACA,MAAMxC,WAAW,CAACM,gBAAZ,CAA6Bc,WAA7B,CAAyCnB,MAAzC,EAAiD;QACnDoB,UAAU,EAAExB,mBADuC;QAEnDyB,UAAU,EAAExB,6BAAA,CAAuBuC;MAFgB,CAAjD,CAAN;MAIAxC,mBAAmB,GAAGF,OAAO,CAACc,MAA9B;IACH;;IAED,OAAO;MACHC,IAAI,EAAE,IADH;MAEHC,KAAK,EAAE;QACHuB,OAAO,EAAEE,CAAC,CAACF;MADR;IAFJ,CAAP;EAMH,CAjHD,SAiHU;IACN;IACA,IAAItC,aAAJ,EAAmB;MACfH,GAAG,CAAE,gCAA+BQ,MAAO,EAAxC,CAAH;MAEA,MAAMD,WAAW,CAACM,gBAAZ,CAA6BmC,UAA7B,CAAwCxC,MAAxC,EAAgD;QAClDQ,MAAM,EAAEX,6BAAA,CAAuBmC,SADmB;QAElDvB,IAAI,EAAE;UACFwB,OAAO,EAAG;QADR;MAF4C,CAAhD,CAAN;IAMH,CATD,MASO;MACHzC,GAAG,CAAE,8BAA6BS,YAAY,GAAG,CAAE,GAAhD,CAAH,CADG,CAEH;;MACA,MAAM,IAAAwC,2BAAA,EAA6B;QAC/BlD,OAD+B;QAE/B8C,IAAI,EAAEhD,aAAa,CAACqD,QAAd,CAAuBC,OAFE;QAG/BrD,OAAO,EAAE;UACLU,MADK;UAELC,YAAY,EAAEA,YAAY,GAAG,CAFxB;UAGLC,IAHK;UAILC,QAAQ,EAAEZ,OAAO,CAACqD,QAAR,CAAiBC,WAAjB;QAJL,CAHsB;QAS/BC,WAAW,EAAE;MATkB,CAA7B,CAAN;IAWH;EACJ;;EACD,OAAO;IACHrC,IAAI,EAAE,EADH;IAEHC,KAAK,EAAE;EAFJ,CAAP;AAIH,CAjKM"}
@@ -1,23 +1,15 @@
1
- import { File } from "../types";
2
- import { PbPageImportExportContext } from "../graphql/types";
3
- import { ExportedPageData } from "../exportPages/utils";
4
- interface FileItem extends File {
5
- key: string;
6
- type: string;
7
- name: string;
8
- size: number;
9
- meta: Record<string, any>;
10
- tags: string[];
11
- }
12
- interface UploadPageAssetsParams {
13
- context: PbPageImportExportContext;
14
- filesData: FileItem[];
1
+ import { File } from "@webiny/api-file-manager/types";
2
+ import { PbImportExportContext } from "../graphql/types";
3
+ import { ExportedPageData, ExportedBlockData } from "../export/utils";
4
+ interface UploadAssetsParams {
5
+ context: PbImportExportContext;
6
+ filesData: File[];
15
7
  fileUploadsData: FileUploadsData;
16
8
  }
17
- interface UploadPageAssetsReturnType {
9
+ interface UploadAssetsReturnType {
18
10
  fileIdToKeyMap: Map<string, string>;
19
11
  }
20
- export declare const uploadPageAssets: (params: UploadPageAssetsParams) => Promise<UploadPageAssetsReturnType>;
12
+ export declare const uploadAssets: (params: UploadAssetsParams) => Promise<UploadAssetsReturnType>;
21
13
  interface FileUploadsData {
22
14
  data: string;
23
15
  assets: Record<string, string>;
@@ -25,11 +17,18 @@ interface FileUploadsData {
25
17
  interface ImportPageParams {
26
18
  key: string;
27
19
  pageKey: string;
28
- context: PbPageImportExportContext;
20
+ context: PbImportExportContext;
29
21
  fileUploadsData: FileUploadsData;
30
22
  }
31
23
  export declare function importPage({ pageKey, context, fileUploadsData }: ImportPageParams): Promise<ExportedPageData["page"]>;
32
- interface PageImportData {
24
+ interface ImportBlockParams {
25
+ key: string;
26
+ blockKey: string;
27
+ context: PbImportExportContext;
28
+ fileUploadsData: FileUploadsData;
29
+ }
30
+ export declare function importBlock({ blockKey, context, fileUploadsData }: ImportBlockParams): Promise<ExportedBlockData["block"]>;
31
+ interface ImportData {
33
32
  assets: Record<string, string>;
34
33
  data: string;
35
34
  key: string;
@@ -37,9 +36,9 @@ interface PageImportData {
37
36
  /**
38
37
  * Function will read the given zip file from S3 via stream, extract its content and upload it to S3 bucket.
39
38
  * @param zipFileUrl
40
- * @return PageImportData S3 file keys for all uploaded assets group by page.
39
+ * @return ImportData S3 file keys for all uploaded assets group by page/block.
41
40
  */
42
- export declare function readExtractAndUploadZipFileContents(zipFileUrl: string): Promise<PageImportData[]>;
41
+ export declare function readExtractAndUploadZipFileContents(zipFileUrl: string): Promise<ImportData[]>;
43
42
  export declare function initialStats(total: number): {
44
43
  pending: number;
45
44
  processing: number;
@@ -1,14 +1,15 @@
1
1
  "use strict";
2
2
 
3
- var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
3
+ var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
4
4
 
5
5
  Object.defineProperty(exports, "__esModule", {
6
6
  value: true
7
7
  });
8
+ exports.importBlock = importBlock;
8
9
  exports.importPage = importPage;
9
10
  exports.initialStats = initialStats;
10
11
  exports.readExtractAndUploadZipFileContents = readExtractAndUploadZipFileContents;
11
- exports.uploadPageAssets = void 0;
12
+ exports.uploadAssets = void 0;
12
13
 
13
14
  var _uniqid = _interopRequireDefault(require("uniqid"));
14
15
 
@@ -38,12 +39,12 @@ var _downloadInstallFiles = require("@webiny/api-page-builder/graphql/crud/insta
38
39
 
39
40
  var _types = require("../types");
40
41
 
41
- var _s3Stream = require("../exportPages/s3Stream");
42
+ var _s3Stream = require("../export/s3Stream");
42
43
 
43
44
  const streamPipeline = (0, _util.promisify)(_stream.pipeline);
44
45
  const INSTALL_DIR = "/tmp";
45
46
 
46
- const INSTALL_EXTRACT_DIR = _path.default.join(INSTALL_DIR, "apiPageBuilderImportPage");
47
+ const INSTALL_EXTRACT_DIR = _path.default.join(INSTALL_DIR, "apiPageBuilderImport");
47
48
 
48
49
  const FILES_COUNT_IN_EACH_BATCH = 15;
49
50
 
@@ -71,7 +72,19 @@ function updateImageInPageSettings(params) {
71
72
  return newSettings;
72
73
  }
73
74
 
74
- function updateFilesInPageData({
75
+ function updateBlockPreviewImage(params) {
76
+ const {
77
+ file,
78
+ fileIdToKeyMap,
79
+ srcPrefix
80
+ } = params;
81
+ const newFile = file;
82
+ const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith("/") ? srcPrefix.slice(0, -1) : srcPrefix;
83
+ newFile.src = `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(file.id || "")}`;
84
+ return newFile;
85
+ }
86
+
87
+ function updateFilesInData({
75
88
  data,
76
89
  fileIdToKeyMap,
77
90
  srcPrefix
@@ -85,7 +98,7 @@ function updateFilesInPageData({
85
98
  if (Array.isArray(data)) {
86
99
  for (let i = 0; i < data.length; i++) {
87
100
  const element = data[i];
88
- updateFilesInPageData({
101
+ updateFilesInData({
89
102
  data: element,
90
103
  fileIdToKeyMap,
91
104
  srcPrefix
@@ -106,7 +119,7 @@ function updateFilesInPageData({
106
119
  value.name = fileIdToKeyMap.get(value.id);
107
120
  value.src = `${srcPrefix}${srcPrefix.endsWith("/") ? "" : "/"}${fileIdToKeyMap.get(value.id)}`;
108
121
  } else {
109
- updateFilesInPageData({
122
+ updateFilesInData({
110
123
  data: value,
111
124
  srcPrefix,
112
125
  fileIdToKeyMap
@@ -115,7 +128,7 @@ function updateFilesInPageData({
115
128
  }
116
129
  }
117
130
 
118
- const uploadPageAssets = async params => {
131
+ const uploadAssets = async params => {
119
132
  const {
120
133
  context,
121
134
  filesData,
@@ -133,9 +146,8 @@ const uploadPageAssets = async params => {
133
146
  return {
134
147
  fileIdToKeyMap
135
148
  };
136
- }
149
+ } // Save files meta data against old key for later use.
137
150
 
138
- console.log("INSIDE uploadPageAssets"); // Save files meta data against old key for later use.
139
151
 
140
152
  const fileKeyToFileMap = new Map(); // Initialize maps.
141
153
 
@@ -190,7 +202,7 @@ const uploadPageAssets = async params => {
190
202
  };
191
203
  };
192
204
 
193
- exports.uploadPageAssets = uploadPageAssets;
205
+ exports.uploadAssets = uploadAssets;
194
206
 
195
207
  async function importPage({
196
208
  pageKey,
@@ -223,7 +235,7 @@ async function importPage({
223
235
  // Upload page assets.
224
236
  const {
225
237
  fileIdToKeyMap
226
- } = await uploadPageAssets({
238
+ } = await uploadAssets({
227
239
  context,
228
240
 
229
241
  /**
@@ -237,7 +249,7 @@ async function importPage({
237
249
  const {
238
250
  srcPrefix = ""
239
251
  } = settings || {};
240
- updateFilesInPageData({
252
+ updateFilesInData({
241
253
  data: page.content || {},
242
254
  fileIdToKeyMap,
243
255
  srcPrefix
@@ -256,6 +268,65 @@ async function importPage({
256
268
  return page;
257
269
  }
258
270
 
271
+ async function importBlock({
272
+ blockKey,
273
+ context,
274
+ fileUploadsData
275
+ }) {
276
+ const log = console.log; // Making Directory for block in which we're going to extract the block data file.
277
+
278
+ const BLOCK_EXTRACT_DIR = _path.default.join(INSTALL_EXTRACT_DIR, blockKey);
279
+
280
+ (0, _fsExtra.ensureDirSync)(BLOCK_EXTRACT_DIR);
281
+
282
+ const blockDataFileKey = _dotPropImmutable.default.get(fileUploadsData, `data`);
283
+
284
+ const BLOCK_DATA_FILE_PATH = _path.default.join(BLOCK_EXTRACT_DIR, _path.default.basename(blockDataFileKey));
285
+
286
+ log(`Downloading Block data file: ${blockDataFileKey} at "${BLOCK_DATA_FILE_PATH}"`); // Download and save block data file in disk.
287
+
288
+ await new Promise((resolve, reject) => {
289
+ _s3Stream.s3Stream.readStream(blockDataFileKey).on("error", reject).pipe((0, _fs.createWriteStream)(BLOCK_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
290
+ }); // Load the block data file from disk.
291
+
292
+ log(`Load file ${blockDataFileKey}`);
293
+ const {
294
+ block,
295
+ files
296
+ } = await (0, _loadJsonFile.default)(BLOCK_DATA_FILE_PATH); // Only update block data if there are files.
297
+
298
+ if (files && Array.isArray(files) && files.length > 0) {
299
+ // Upload block assets.
300
+ const {
301
+ fileIdToKeyMap
302
+ } = await uploadAssets({
303
+ context,
304
+ filesData: files,
305
+ fileUploadsData
306
+ });
307
+ const settings = await context.fileManager.settings.getSettings();
308
+ const {
309
+ srcPrefix = ""
310
+ } = settings || {};
311
+ updateFilesInData({
312
+ data: block.content || {},
313
+ fileIdToKeyMap,
314
+ srcPrefix
315
+ });
316
+ block.preview = updateBlockPreviewImage({
317
+ file: block.preview || {},
318
+ fileIdToKeyMap,
319
+ srcPrefix
320
+ });
321
+ }
322
+
323
+ log("Removing Directory for block...");
324
+ await (0, _downloadInstallFiles.deleteFile)(blockKey);
325
+ log(`Remove block contents from S3...`);
326
+ await deleteS3Folder(_path.default.dirname(fileUploadsData.data));
327
+ return block;
328
+ }
329
+
259
330
  async function uploadFilesFromS3({
260
331
  fileKeyToFileMap,
261
332
  oldKeyToNewKeyMap
@@ -310,11 +381,11 @@ function getFileNameWithoutExt(fileName) {
310
381
  /**
311
382
  * Function will read the given zip file from S3 via stream, extract its content and upload it to S3 bucket.
312
383
  * @param zipFileUrl
313
- * @return PageImportData S3 file keys for all uploaded assets group by page.
384
+ * @return ImportData S3 file keys for all uploaded assets group by page/block.
314
385
  */
315
386
  async function readExtractAndUploadZipFileContents(zipFileUrl) {
316
387
  const log = console.log;
317
- const pageImportDataList = [];
388
+ const importDataList = [];
318
389
 
319
390
  const zipFileName = _path.default.basename(zipFileUrl).split("?")[0];
320
391
 
@@ -325,7 +396,7 @@ async function readExtractAndUploadZipFileContents(zipFileUrl) {
325
396
  }
326
397
 
327
398
  const readStream = response.body;
328
- const uniquePath = (0, _uniqid.default)("IMPORT_PAGES/"); // Read export file and download it in the disk
399
+ const uniquePath = (0, _uniqid.default)("IMPORTS/"); // Read export file and download it in the disk
329
400
 
330
401
  const ZIP_FILE_PATH = _path.default.join(INSTALL_DIR, zipFileName);
331
402
 
@@ -335,22 +406,22 @@ async function readExtractAndUploadZipFileContents(zipFileUrl) {
335
406
 
336
407
  const zipFilePaths = await extractZipToDisk(ZIP_FILE_PATH);
337
408
  log(`Removing ZIP file "${zipFileUrl}" from ${ZIP_FILE_PATH}`);
338
- await (0, _downloadInstallFiles.deleteFile)(ZIP_FILE_PATH); // Extract each page zip and upload their content's to S3
409
+ await (0, _downloadInstallFiles.deleteFile)(ZIP_FILE_PATH); // Extract each page/block zip and upload their content's to S3
339
410
 
340
411
  for (let i = 0; i < zipFilePaths.length; i++) {
341
412
  const currentPath = zipFilePaths[i];
342
413
  const dataMap = await extractZipAndUploadToS3(currentPath, uniquePath);
343
- pageImportDataList.push(dataMap);
414
+ importDataList.push(dataMap);
344
415
  }
345
416
 
346
417
  log("Removing all ZIP files located at ", _path.default.dirname(zipFilePaths[0]));
347
418
  await (0, _downloadInstallFiles.deleteFile)(_path.default.dirname(zipFilePaths[0]));
348
- return pageImportDataList;
419
+ return importDataList;
349
420
  }
350
421
 
351
422
  const ASSETS_DIR_NAME = "/assets";
352
423
 
353
- function preparePageDataDirMap({
424
+ function prepareDataDirMap({
354
425
  map,
355
426
  filePath,
356
427
  newKey
@@ -394,17 +465,17 @@ async function deleteS3Folder(key) {
394
465
 
395
466
  function initialStats(total) {
396
467
  return {
397
- [_types.PageImportExportTaskStatus.PENDING]: total,
398
- [_types.PageImportExportTaskStatus.PROCESSING]: 0,
399
- [_types.PageImportExportTaskStatus.COMPLETED]: 0,
400
- [_types.PageImportExportTaskStatus.FAILED]: 0,
468
+ [_types.ImportExportTaskStatus.PENDING]: total,
469
+ [_types.ImportExportTaskStatus.PROCESSING]: 0,
470
+ [_types.ImportExportTaskStatus.COMPLETED]: 0,
471
+ [_types.ImportExportTaskStatus.FAILED]: 0,
401
472
  total
402
473
  };
403
474
  }
404
475
 
405
476
  function extractZipToDisk(exportFileZipPath) {
406
477
  return new Promise((resolve, reject) => {
407
- const pageZipFilePaths = [];
478
+ const zipFilePaths = [];
408
479
  const uniqueFolderNameForExport = getFileNameWithoutExt(exportFileZipPath);
409
480
 
410
481
  const EXPORT_FILE_EXTRACTION_PATH = _path.default.join(INSTALL_DIR, uniqueFolderNameForExport); // Make sure DIR exists
@@ -434,7 +505,7 @@ function extractZipToDisk(exportFileZipPath) {
434
505
  reject(err);
435
506
  }
436
507
 
437
- resolve(pageZipFilePaths);
508
+ resolve(zipFilePaths);
438
509
  });
439
510
  zipFile.readEntry();
440
511
  zipFile.on("entry", function (entry) {
@@ -463,7 +534,7 @@ function extractZipToDisk(exportFileZipPath) {
463
534
  const filePath = _path.default.join(EXPORT_FILE_EXTRACTION_PATH, entry.fileName);
464
535
 
465
536
  readStream.on("end", function () {
466
- pageZipFilePaths.push(filePath);
537
+ zipFilePaths.push(filePath);
467
538
  zipFile.readEntry();
468
539
  });
469
540
  streamPipeline(readStream, (0, _fs.createWriteStream)(filePath)).catch(error => {
@@ -476,28 +547,28 @@ function extractZipToDisk(exportFileZipPath) {
476
547
  });
477
548
  }
478
549
 
479
- function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
550
+ function extractZipAndUploadToS3(dataZipFilePath, uniquePath) {
480
551
  return new Promise((resolve, reject) => {
481
552
  const filePaths = [];
482
553
  const fileUploadPromises = [];
483
- const uniquePageKey = getFileNameWithoutExt(pageDataZipFilePath);
554
+ const uniqueKey = getFileNameWithoutExt(dataZipFilePath);
484
555
  let dataMap = {
485
- key: uniquePageKey,
556
+ key: uniqueKey,
486
557
  assets: {},
487
558
  data: ""
488
559
  };
489
560
 
490
- _yauzl.default.open(pageDataZipFilePath, {
561
+ _yauzl.default.open(dataZipFilePath, {
491
562
  lazyEntries: true
492
563
  }, function (err, zipFile) {
493
564
  if (err) {
494
- console.warn("ERROR: Failed to extract zip: ", pageDataZipFilePath, err);
565
+ console.warn("ERROR: Failed to extract zip: ", dataZipFilePath, err);
495
566
  reject(err);
496
567
  return;
497
568
  }
498
569
 
499
570
  if (!zipFile) {
500
- console.log("ERROR: Probably failed to extract zip: " + pageDataZipFilePath);
571
+ console.log("ERROR: Probably failed to extract zip: " + dataZipFilePath);
501
572
  reject("Missing Zip File Resource.");
502
573
  return;
503
574
  }
@@ -505,7 +576,7 @@ function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
505
576
  console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);
506
577
  zipFile.on("end", function (err) {
507
578
  if (err) {
508
- console.warn('ERROR: Failed on "END" for file: ', pageDataZipFilePath, err);
579
+ console.warn('ERROR: Failed on "END" for file: ', dataZipFilePath, err);
509
580
  reject(err);
510
581
  }
511
582
 
@@ -535,7 +606,7 @@ function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
535
606
  }
536
607
 
537
608
  if (!readStream) {
538
- console.log("ERROR: Missing Read Stream while importing pages.");
609
+ console.log("ERROR: Missing Read Stream while importing.");
539
610
  reject("Missing Read Strea Resource.");
540
611
  return;
541
612
  }
@@ -544,9 +615,9 @@ function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
544
615
  filePaths.push(entry.fileName);
545
616
  zipFile.readEntry();
546
617
  });
547
- const newKey = `${uniquePath}/${uniquePageKey}/${entry.fileName}`; // Modify in place
618
+ const newKey = `${uniquePath}/${uniqueKey}/${entry.fileName}`; // Modify in place
548
619
 
549
- dataMap = preparePageDataDirMap({
620
+ dataMap = prepareDataDirMap({
550
621
  map: dataMap,
551
622
  filePath: entry.fileName,
552
623
  newKey