@webiny/api-page-builder-import-export 0.0.0-mt-3 → 0.0.0-unstable.5e7233243f

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/{importPages/client.d.ts → client.d.ts} +3 -2
  2. package/client.js +53 -0
  3. package/client.js.map +1 -0
  4. package/exportPages/combine/index.d.ts +7 -11
  5. package/exportPages/combine/index.js +37 -11
  6. package/exportPages/combine/index.js.map +1 -0
  7. package/exportPages/process/index.d.ts +10 -14
  8. package/exportPages/process/index.js +32 -28
  9. package/exportPages/process/index.js.map +1 -0
  10. package/exportPages/s3Stream.d.ts +2 -0
  11. package/exportPages/s3Stream.js +12 -3
  12. package/exportPages/s3Stream.js.map +1 -0
  13. package/exportPages/utils.d.ts +1 -1
  14. package/exportPages/utils.js +1 -1
  15. package/exportPages/utils.js.map +1 -0
  16. package/exportPages/zipper.d.ts +1 -0
  17. package/exportPages/zipper.js.map +1 -0
  18. package/graphql/crud/pageImportExportTasks.crud.d.ts +1 -1
  19. package/graphql/crud/pageImportExportTasks.crud.js +23 -13
  20. package/graphql/crud/pageImportExportTasks.crud.js.map +1 -0
  21. package/graphql/crud/pages.crud.d.ts +1 -1
  22. package/graphql/crud/pages.crud.js +28 -20
  23. package/graphql/crud/pages.crud.js.map +1 -0
  24. package/graphql/crud.d.ts +1 -1
  25. package/graphql/crud.js.map +1 -0
  26. package/graphql/graphql/pageImportExportTasks.gql.js.map +1 -0
  27. package/graphql/graphql/pages.gql.js +7 -5
  28. package/graphql/graphql/pages.gql.js.map +1 -0
  29. package/graphql/graphql/utils/resolve.d.ts +1 -1
  30. package/graphql/graphql/utils/resolve.js.map +1 -0
  31. package/graphql/graphql.d.ts +1 -1
  32. package/graphql/graphql.js.map +1 -0
  33. package/graphql/index.d.ts +1 -1
  34. package/graphql/index.js.map +1 -0
  35. package/graphql/types.d.ts +5 -6
  36. package/graphql/types.js.map +1 -0
  37. package/importPages/create/index.d.ts +11 -16
  38. package/importPages/create/index.js +39 -30
  39. package/importPages/create/index.js.map +1 -0
  40. package/importPages/process/index.d.ts +10 -17
  41. package/importPages/process/index.js +25 -23
  42. package/importPages/process/index.js.map +1 -0
  43. package/importPages/utils.d.ts +17 -10
  44. package/importPages/utils.js +89 -58
  45. package/importPages/utils.js.map +1 -0
  46. package/mockSecurity.js.map +1 -0
  47. package/package.json +36 -34
  48. package/types.d.ts +8 -7
  49. package/types.js.map +1 -0
  50. package/importPages/client.js +0 -40
@@ -9,46 +9,57 @@ var _types = require("../../types");
9
9
 
10
10
  var _utils = require("../utils");
11
11
 
12
- var _client = require("../client");
12
+ var _client = require("../../client");
13
13
 
14
14
  var _mockSecurity = require("../../mockSecurity");
15
15
 
16
+ var _utils2 = require("@webiny/utils");
17
+
18
+ var _handlerAws = require("@webiny/handler-aws");
19
+
16
20
  /**
17
21
  * Handles the import page workflow.
18
22
  */
19
- var _default = configuration => ({
20
- type: "handler",
21
-
22
- async handle(context) {
23
+ var _default = configuration => {
24
+ return (0, _handlerAws.createRawEventHandler)(async ({
25
+ payload,
26
+ context
27
+ }) => {
23
28
  const log = console.log;
29
+ const {
30
+ pageBuilder
31
+ } = context;
32
+ const {
33
+ task,
34
+ category,
35
+ zipFileUrl,
36
+ identity
37
+ } = payload;
24
38
 
25
39
  try {
26
40
  log("RUNNING Import Pages Create");
27
- const {
28
- invocationArgs: args,
29
- pageBuilder
30
- } = context;
31
- const {
32
- task,
33
- category,
34
- zipFileKey,
35
- zipFileUrl,
36
- identity
37
- } = args;
41
+
42
+ if (!zipFileUrl) {
43
+ return {
44
+ data: null,
45
+ error: {
46
+ message: `Missing "zipFileUrl"!`
47
+ }
48
+ };
49
+ }
50
+
38
51
  (0, _mockSecurity.mockSecurity)(identity, context); // Step 1: Read the zip file
39
52
 
40
- const pageImportDataList = await (0, _utils.readExtractAndUploadZipFileContents)(zipFileKey || zipFileUrl); // Once we have map we can start processing each page
41
- // For each page create a sub task and invoke the process handler
53
+ const pageImportDataList = await (0, _utils.readExtractAndUploadZipFileContents)(zipFileUrl); // For each page create a subtask and invoke the process handler
42
54
 
43
55
  for (let i = 0; i < pageImportDataList.length; i++) {
44
56
  const pagesDirMap = pageImportDataList[i]; // Create sub task
45
57
 
46
- const subtask = await pageBuilder.pageImportExportTask.createSubTask(task.id, (0, _utils.zeroPad)(i + 1), {
58
+ const subtask = await pageBuilder.pageImportExportTask.createSubTask(task.id, (0, _utils2.zeroPad)(i + 1, 5), {
47
59
  status: _types.PageImportExportTaskStatus.PENDING,
48
60
  data: {
49
61
  pageKey: pagesDirMap.key,
50
62
  category,
51
- zipFileKey,
52
63
  zipFileUrl,
53
64
  input: {
54
65
  fileUploadsData: pagesDirMap
@@ -71,7 +82,8 @@ var _default = configuration => ({
71
82
  // Execute "Process" for the first sub task.
72
83
  subTaskIndex: 1,
73
84
  identity: context.security.getIdentity()
74
- }
85
+ },
86
+ description: "Import pages - process - first"
75
87
  });
76
88
  } catch (e) {
77
89
  log("[IMPORT_PAGES_CREATE] Error => ", e);
@@ -80,13 +92,6 @@ var _default = configuration => ({
80
92
  * so that, client can show notify the user appropriately.
81
93
  */
82
94
 
83
- const {
84
- invocationArgs: args,
85
- pageBuilder
86
- } = context;
87
- const {
88
- task
89
- } = args;
90
95
  await pageBuilder.pageImportExportTask.updateTask(task.id, {
91
96
  status: _types.PageImportExportTaskStatus.FAILED,
92
97
  error: {
@@ -102,8 +107,12 @@ var _default = configuration => ({
102
107
  }
103
108
  };
104
109
  }
105
- }
106
110
 
107
- });
111
+ return {
112
+ data: "",
113
+ error: null
114
+ };
115
+ });
116
+ };
108
117
 
109
118
  exports.default = _default;
@@ -0,0 +1 @@
1
+ {"version":3,"names":["configuration","createRawEventHandler","payload","context","log","console","pageBuilder","task","category","zipFileUrl","identity","data","error","message","mockSecurity","pageImportDataList","readExtractAndUploadZipFileContents","i","length","pagesDirMap","subtask","pageImportExportTask","createSubTask","id","zeroPad","status","PageImportExportTaskStatus","PENDING","pageKey","key","input","fileUploadsData","updateTask","PROCESSING","stats","initialStats","invokeHandlerClient","name","handlers","process","taskId","subTaskIndex","security","getIdentity","description","e","FAILED","code"],"sources":["index.ts"],"sourcesContent":["import {\n PageImportExportTask,\n PageImportExportTaskStatus,\n PbPageImportExportContext\n} from \"~/types\";\nimport { initialStats, readExtractAndUploadZipFileContents } from \"~/importPages/utils\";\nimport { invokeHandlerClient } from \"~/client\";\nimport { Payload as ProcessPayload } from \"../process\";\nimport { SecurityIdentity } from \"@webiny/api-security/types\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { zeroPad } from \"@webiny/utils\";\nimport { createRawEventHandler } from \"@webiny/handler-aws\";\n\ninterface Configuration {\n handlers: {\n process: string;\n };\n}\n\nexport interface Payload {\n category: string;\n zipFileUrl: string;\n task: PageImportExportTask;\n identity: SecurityIdentity;\n}\nexport interface Response {\n data: string | null;\n error: Partial<Error> | null;\n}\n\n/**\n * Handles the import page workflow.\n */\nexport default (configuration: Configuration) => {\n return createRawEventHandler<Payload, PbPageImportExportContext, Response>(\n async ({ payload, context }) => {\n const log = console.log;\n\n const { pageBuilder } = context;\n const { task, category, zipFileUrl, identity } = payload;\n try {\n log(\"RUNNING Import Pages Create\");\n if (!zipFileUrl) {\n return {\n data: null,\n error: {\n message: `Missing \"zipFileUrl\"!`\n }\n };\n }\n mockSecurity(identity, context);\n // Step 1: Read the zip file\n const pageImportDataList = await readExtractAndUploadZipFileContents(zipFileUrl);\n\n // For each page create a subtask and invoke the process handler\n for (let i = 0; i < pageImportDataList.length; i++) {\n const pagesDirMap = pageImportDataList[i];\n // Create sub task\n const subtask = await pageBuilder.pageImportExportTask.createSubTask(\n task.id,\n zeroPad(i + 1, 5),\n {\n status: PageImportExportTaskStatus.PENDING,\n data: {\n pageKey: pagesDirMap.key,\n category,\n zipFileUrl,\n input: {\n fileUploadsData: pagesDirMap\n }\n }\n }\n );\n log(`Added SUB_TASK \"${subtask.id}\" to queue.`);\n }\n // Update main task status\n await pageBuilder.pageImportExportTask.updateTask(task.id, {\n status: PageImportExportTaskStatus.PROCESSING,\n stats: initialStats(pageImportDataList.length)\n });\n\n await invokeHandlerClient<ProcessPayload>({\n context,\n name: configuration.handlers.process,\n payload: {\n taskId: task.id,\n // Execute \"Process\" for the first sub task.\n subTaskIndex: 1,\n identity: context.security.getIdentity()\n },\n description: \"Import pages - process - first\"\n });\n } catch (e) {\n log(\"[IMPORT_PAGES_CREATE] Error => \", e);\n\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n\n await pageBuilder.pageImportExportTask.updateTask(task.id, {\n status: PageImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: e.code || \"EXPORT_FAILED\"\n }\n });\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n }\n\n return {\n data: \"\",\n error: null\n };\n }\n );\n};\n"],"mappings":";;;;;;;AAAA;;AAKA;;AACA;;AAGA;;AACA;;AACA;;AAmBA;AACA;AACA;eACgBA,aAAD,IAAkC;EAC7C,OAAO,IAAAC,iCAAA,EACH,OAAO;IAAEC,OAAF;IAAWC;EAAX,CAAP,KAAgC;IAC5B,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAApB;IAEA,MAAM;MAAEE;IAAF,IAAkBH,OAAxB;IACA,MAAM;MAAEI,IAAF;MAAQC,QAAR;MAAkBC,UAAlB;MAA8BC;IAA9B,IAA2CR,OAAjD;;IACA,IAAI;MACAE,GAAG,CAAC,6BAAD,CAAH;;MACA,IAAI,CAACK,UAAL,EAAiB;QACb,OAAO;UACHE,IAAI,EAAE,IADH;UAEHC,KAAK,EAAE;YACHC,OAAO,EAAG;UADP;QAFJ,CAAP;MAMH;;MACD,IAAAC,0BAAA,EAAaJ,QAAb,EAAuBP,OAAvB,EAVA,CAWA;;MACA,MAAMY,kBAAkB,GAAG,MAAM,IAAAC,0CAAA,EAAoCP,UAApC,CAAjC,CAZA,CAcA;;MACA,KAAK,IAAIQ,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGF,kBAAkB,CAACG,MAAvC,EAA+CD,CAAC,EAAhD,EAAoD;QAChD,MAAME,WAAW,GAAGJ,kBAAkB,CAACE,CAAD,CAAtC,CADgD,CAEhD;;QACA,MAAMG,OAAO,GAAG,MAAMd,WAAW,CAACe,oBAAZ,CAAiCC,aAAjC,CAClBf,IAAI,CAACgB,EADa,EAElB,IAAAC,eAAA,EAAQP,CAAC,GAAG,CAAZ,EAAe,CAAf,CAFkB,EAGlB;UACIQ,MAAM,EAAEC,iCAAA,CAA2BC,OADvC;UAEIhB,IAAI,EAAE;YACFiB,OAAO,EAAET,WAAW,CAACU,GADnB;YAEFrB,QAFE;YAGFC,UAHE;YAIFqB,KAAK,EAAE;cACHC,eAAe,EAAEZ;YADd;UAJL;QAFV,CAHkB,CAAtB;QAeAf,GAAG,CAAE,mBAAkBgB,OAAO,CAACG,EAAG,aAA/B,CAAH;MACH,CAlCD,CAmCA;;;MACA,MAAMjB,WAAW,CAACe,oBAAZ,CAAiCW,UAAjC,CAA4CzB,IAAI,CAACgB,EAAjD,EAAqD;QACvDE,MAAM,EAAEC,iCAAA,CAA2BO,UADoB;QAEvDC,KAAK,EAAE,IAAAC,mBAAA,EAAapB,kBAAkB,CAACG,MAAhC;MAFgD,CAArD,CAAN;MAKA,MAAM,IAAAkB,2BAAA,EAAoC;QACtCjC,OADsC;QAEtCkC,IAAI,EAAErC,aAAa,CAACsC,QAAd,CAAuBC,OAFS;QAGtCrC,OAAO,EAAE;UACLsC,MAAM,EAAEjC,IAAI,CAACgB,EADR;UAEL;UACAkB,YAAY,EAAE,CAHT;UAIL/B,QAAQ,EAAEP,OAAO,CAACuC,QAAR,CAAiBC,WAAjB;QAJL,CAH6B;QAStCC,WAAW,EAAE;MATyB,CAApC,CAAN;IAWH,CApDD,CAoDE,OAAOC,CAAP,EAAU;MACRzC,GAAG,CAAC,iCAAD,EAAoCyC,CAApC,CAAH;MAEA;AAChB;AACA;AACA;;MAEgB,MAAMvC,WAAW,CAACe,oBAAZ,CAAiCW,UAAjC,CAA4CzB,IAAI,CAACgB,EAAjD,EAAqD;QACvDE,MAAM,EAAEC,iCAAA,CAA2BoB,MADoB;QAEvDlC,KAAK,EAAE;UACHyB,IAAI,EAAEQ,CAAC,CAACR,IADL;UAEHxB,OAAO,EAAEgC,CAAC,CAAChC,OAFR;UAGHkC,IAAI,EAAEF,CAAC,CAACE,IAAF,IAAU;QAHb;MAFgD,CAArD,CAAN;MASA,OAAO;QACHpC,IAAI,EAAE,IADH;QAEHC,KAAK,EAAE;UACHC,OAAO,EAAEgC,CAAC,CAAChC;QADR;MAFJ,CAAP;IAMH;;IAED,OAAO;MACHF,IAAI,EAAE,EADH;MAEHC,KAAK,EAAE;IAFJ,CAAP;EAIH,CAvFE,CAAP;AAyFH,C"}
@@ -1,25 +1,18 @@
1
- import { HandlerPlugin } from "@webiny/handler/types";
2
- import { ArgsContext } from "@webiny/handler-args/types";
3
1
  import { PbPageImportExportContext } from "../../types";
4
2
  import { SecurityIdentity } from "@webiny/api-security/types";
5
- export declare type HandlerArgs = {
6
- taskId: string;
7
- subTaskIndex: number;
8
- identity: SecurityIdentity;
9
- };
10
- export declare type HandlerResponse = {
11
- data: string;
12
- error: {
13
- message: string;
14
- };
15
- };
16
3
  interface Configuration {
17
4
  handlers: {
18
5
  process: string;
19
6
  };
20
7
  }
21
- declare const _default: (configuration: Configuration) => HandlerPlugin<PbPageImportExportContext, ArgsContext<HandlerArgs>>;
22
- /**
23
- * Handles the import page workflow.
24
- */
8
+ export interface Payload {
9
+ taskId: string;
10
+ subTaskIndex: number;
11
+ identity: SecurityIdentity;
12
+ }
13
+ export interface Response {
14
+ data: string | null;
15
+ error: Partial<Error> | null;
16
+ }
17
+ declare const _default: (configuration: Configuration) => import("@webiny/handler-aws").RawEventHandler<Payload, PbPageImportExportContext, Response>;
25
18
  export default _default;
@@ -9,31 +9,32 @@ var _types = require("../../types");
9
9
 
10
10
  var _utils = require("../utils");
11
11
 
12
- var _client = require("../client");
12
+ var _client = require("../../client");
13
13
 
14
14
  var _mockSecurity = require("../../mockSecurity");
15
15
 
16
- /**
17
- * Handles the import page workflow.
18
- */
19
- var _default = configuration => ({
20
- type: "handler",
16
+ var _utils2 = require("@webiny/utils");
21
17
 
22
- async handle(context) {
18
+ var _handlerAws = require("@webiny/handler-aws");
19
+
20
+ var _default = configuration => {
21
+ return (0, _handlerAws.createRawEventHandler)(async ({
22
+ payload,
23
+ context
24
+ }) => {
23
25
  const log = console.log;
24
26
  let subTask;
25
27
  let noPendingTask = true;
26
28
  let prevStatusOfSubTask = _types.PageImportExportTaskStatus.PENDING;
27
29
  log("RUNNING Import Page Queue Process");
28
30
  const {
29
- invocationArgs: args,
30
31
  pageBuilder
31
32
  } = context;
32
33
  const {
33
34
  taskId,
34
35
  subTaskIndex,
35
36
  identity
36
- } = args; // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks
37
+ } = payload; // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks
37
38
  // and this Lambda is invoked internally, without credentials.
38
39
 
39
40
  (0, _mockSecurity.mockSecurity)(identity, context);
@@ -43,7 +44,7 @@ var _default = configuration => ({
43
44
  * Note: We're not going to DB for getting next sub-task to process,
44
45
  * because the data might be out of sync due to GSI eventual consistency.
45
46
  */
46
- subTask = await pageBuilder.pageImportExportTask.getSubTask(taskId, (0, _utils.zeroPad)(subTaskIndex));
47
+ subTask = await pageBuilder.pageImportExportTask.getSubTask(taskId, (0, _utils2.zeroPad)(subTaskIndex, 5));
47
48
  /**
48
49
  * Base condition!!
49
50
  * Bail out early, if task not found or task's status is not "pending".
@@ -51,7 +52,10 @@ var _default = configuration => ({
51
52
 
52
53
  if (!subTask || subTask.status !== _types.PageImportExportTaskStatus.PENDING) {
53
54
  noPendingTask = true;
54
- return;
55
+ return {
56
+ data: "",
57
+ error: null
58
+ };
55
59
  } else {
56
60
  noPendingTask = false;
57
61
  }
@@ -86,9 +90,9 @@ var _default = configuration => ({
86
90
  fileUploadsData
87
91
  }); // Create a page
88
92
 
89
- let pbPage = await context.pageBuilder.pages.create(category); // Update page with data
93
+ let pbPage = await context.pageBuilder.createPage(category); // Update page with data
90
94
 
91
- pbPage = await context.pageBuilder.pages.update(pbPage.id, {
95
+ pbPage = await context.pageBuilder.updatePage(pbPage.id, {
92
96
  content: page.content,
93
97
  title: page.title,
94
98
  path: page.path,
@@ -122,13 +126,6 @@ var _default = configuration => ({
122
126
  * In case of error, we'll update the task status to "failed",
123
127
  * so that, client can show notify the user appropriately.
124
128
  */
125
- const {
126
- invocationArgs: args,
127
- pageBuilder
128
- } = context;
129
- const {
130
- taskId
131
- } = args;
132
129
  subTask = await pageBuilder.pageImportExportTask.updateSubTask(taskId, subTask.id, {
133
130
  status: _types.PageImportExportTaskStatus.FAILED,
134
131
  error: {
@@ -172,12 +169,17 @@ var _default = configuration => ({
172
169
  taskId,
173
170
  subTaskIndex: subTaskIndex + 1,
174
171
  identity: context.security.getIdentity()
175
- }
172
+ },
173
+ description: "Import pages - process - subtask"
176
174
  });
177
175
  }
178
176
  }
179
- }
180
177
 
181
- });
178
+ return {
179
+ data: "",
180
+ error: null
181
+ };
182
+ });
183
+ };
182
184
 
183
185
  exports.default = _default;
@@ -0,0 +1 @@
1
+ {"version":3,"names":["configuration","createRawEventHandler","payload","context","log","console","subTask","noPendingTask","prevStatusOfSubTask","PageImportExportTaskStatus","PENDING","pageBuilder","taskId","subTaskIndex","identity","mockSecurity","pageImportExportTask","getSubTask","zeroPad","status","data","error","id","pageKey","category","zipFileKey","input","fileUploadsData","updateSubTask","PROCESSING","updateStats","prevStatus","nextStatus","page","importPage","key","pbPage","createPage","updatePage","content","title","path","settings","COMPLETED","message","version","e","FAILED","name","stack","code","updateTask","invokeHandlerClient","handlers","process","security","getIdentity","description"],"sources":["index.ts"],"sourcesContent":["import { PageImportExportTaskStatus, PbPageImportExportContext } from \"~/types\";\nimport { importPage } from \"~/importPages/utils\";\nimport { invokeHandlerClient } from \"~/client\";\nimport { SecurityIdentity } from \"@webiny/api-security/types\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { zeroPad } from \"@webiny/utils\";\nimport { createRawEventHandler } from \"@webiny/handler-aws\";\n\ninterface Configuration {\n handlers: {\n process: string;\n };\n}\n\nexport interface Payload {\n taskId: string;\n subTaskIndex: number;\n identity: SecurityIdentity;\n}\n\nexport interface Response {\n data: string | null;\n error: Partial<Error> | null;\n}\n\nexport default (configuration: Configuration) => {\n return createRawEventHandler<Payload, PbPageImportExportContext, Response>(\n async ({ payload, context }) => {\n const log = console.log;\n let subTask;\n let noPendingTask = true;\n let prevStatusOfSubTask = PageImportExportTaskStatus.PENDING;\n\n log(\"RUNNING Import Page Queue Process\");\n const { pageBuilder } = context;\n const { taskId, subTaskIndex, identity } = payload;\n // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks\n // and this Lambda is invoked internally, without credentials.\n mockSecurity(identity, context);\n\n try {\n /*\n * Note: We're not going to DB for getting next sub-task to process,\n * because the data might be out of sync due to GSI eventual consistency.\n */\n\n subTask = await pageBuilder.pageImportExportTask.getSubTask(\n taskId,\n zeroPad(subTaskIndex, 5)\n );\n\n /**\n * Base condition!!\n * Bail out early, if task not found or task's status is not \"pending\".\n */\n if (!subTask || subTask.status !== PageImportExportTaskStatus.PENDING) {\n noPendingTask = true;\n return {\n data: \"\",\n error: null\n };\n } else {\n noPendingTask = false;\n }\n prevStatusOfSubTask = subTask.status;\n\n log(`Fetched sub task => ${subTask.id}`);\n\n const { pageKey, category, zipFileKey, input } = subTask.data;\n const { fileUploadsData } = input;\n\n log(`Processing page key \"${pageKey}\"`);\n\n // Mark task status as PROCESSING\n subTask = await pageBuilder.pageImportExportTask.updateSubTask(taskId, subTask.id, {\n status: PageImportExportTaskStatus.PROCESSING\n });\n // Update stats in main task\n await pageBuilder.pageImportExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: PageImportExportTaskStatus.PROCESSING\n });\n prevStatusOfSubTask = subTask.status;\n\n // Real job\n const page = await importPage({\n context,\n pageKey,\n key: zipFileKey,\n fileUploadsData\n });\n\n // Create a page\n let pbPage = await context.pageBuilder.createPage(category);\n\n // Update page with data\n pbPage = await context.pageBuilder.updatePage(pbPage.id, {\n content: page.content,\n title: page.title,\n path: page.path,\n settings: page.settings\n });\n\n // TODO: Publish page\n\n // Update task record in DB\n subTask = await pageBuilder.pageImportExportTask.updateSubTask(taskId, subTask.id, {\n status: PageImportExportTaskStatus.COMPLETED,\n data: {\n message: \"Done\",\n page: {\n id: pbPage.id,\n title: pbPage.title,\n version: pbPage.version,\n status: pbPage.status\n }\n }\n });\n // Update stats in main task\n await pageBuilder.pageImportExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: PageImportExportTaskStatus.COMPLETED\n });\n prevStatusOfSubTask = subTask.status;\n } catch (e) {\n log(\"[IMPORT_PAGES_PROCESS] Error => \", e);\n\n if (subTask && subTask.id) {\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n subTask = await pageBuilder.pageImportExportTask.updateSubTask(\n taskId,\n subTask.id,\n {\n status: PageImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n stack: e.stack,\n code: \"IMPORT_FAILED\"\n }\n }\n );\n\n // Update stats in main task\n await pageBuilder.pageImportExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: PageImportExportTaskStatus.FAILED\n });\n prevStatusOfSubTask = subTask.status;\n }\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n } finally {\n // Base condition!\n if (noPendingTask) {\n log(`No pending sub-task for task ${taskId}`);\n\n await pageBuilder.pageImportExportTask.updateTask(taskId, {\n status: PageImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish importing pages.`\n }\n });\n } else {\n log(`Invoking PROCESS for task \"${subTaskIndex + 1}\"`);\n // We want to continue with Self invocation no matter if current page error out.\n await invokeHandlerClient<Payload>({\n context,\n name: configuration.handlers.process,\n payload: {\n taskId,\n subTaskIndex: subTaskIndex + 1,\n identity: context.security.getIdentity()\n },\n description: \"Import pages - process - subtask\"\n });\n }\n }\n return {\n data: \"\",\n error: null\n };\n }\n );\n};\n"],"mappings":";;;;;;;AAAA;;AACA;;AACA;;AAEA;;AACA;;AACA;;eAmBgBA,aAAD,IAAkC;EAC7C,OAAO,IAAAC,iCAAA,EACH,OAAO;IAAEC,OAAF;IAAWC;EAAX,CAAP,KAAgC;IAC5B,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAApB;IACA,IAAIE,OAAJ;IACA,IAAIC,aAAa,GAAG,IAApB;IACA,IAAIC,mBAAmB,GAAGC,iCAAA,CAA2BC,OAArD;IAEAN,GAAG,CAAC,mCAAD,CAAH;IACA,MAAM;MAAEO;IAAF,IAAkBR,OAAxB;IACA,MAAM;MAAES,MAAF;MAAUC,YAAV;MAAwBC;IAAxB,IAAqCZ,OAA3C,CAR4B,CAS5B;IACA;;IACA,IAAAa,0BAAA,EAAaD,QAAb,EAAuBX,OAAvB;;IAEA,IAAI;MACA;AAChB;AACA;AACA;MAEgBG,OAAO,GAAG,MAAMK,WAAW,CAACK,oBAAZ,CAAiCC,UAAjC,CACZL,MADY,EAEZ,IAAAM,eAAA,EAAQL,YAAR,EAAsB,CAAtB,CAFY,CAAhB;MAKA;AAChB;AACA;AACA;;MACgB,IAAI,CAACP,OAAD,IAAYA,OAAO,CAACa,MAAR,KAAmBV,iCAAA,CAA2BC,OAA9D,EAAuE;QACnEH,aAAa,GAAG,IAAhB;QACA,OAAO;UACHa,IAAI,EAAE,EADH;UAEHC,KAAK,EAAE;QAFJ,CAAP;MAIH,CAND,MAMO;QACHd,aAAa,GAAG,KAAhB;MACH;;MACDC,mBAAmB,GAAGF,OAAO,CAACa,MAA9B;MAEAf,GAAG,CAAE,uBAAsBE,OAAO,CAACgB,EAAG,EAAnC,CAAH;MAEA,MAAM;QAAEC,OAAF;QAAWC,QAAX;QAAqBC,UAArB;QAAiCC;MAAjC,IAA2CpB,OAAO,CAACc,IAAzD;MACA,MAAM;QAAEO;MAAF,IAAsBD,KAA5B;MAEAtB,GAAG,CAAE,wBAAuBmB,OAAQ,GAAjC,CAAH,CA/BA,CAiCA;;MACAjB,OAAO,GAAG,MAAMK,WAAW,CAACK,oBAAZ,CAAiCY,aAAjC,CAA+ChB,MAA/C,EAAuDN,OAAO,CAACgB,EAA/D,EAAmE;QAC/EH,MAAM,EAAEV,iCAAA,CAA2BoB;MAD4C,CAAnE,CAAhB,CAlCA,CAqCA;;MACA,MAAMlB,WAAW,CAACK,oBAAZ,CAAiCc,WAAjC,CAA6ClB,MAA7C,EAAqD;QACvDmB,UAAU,EAAEvB,mBAD2C;QAEvDwB,UAAU,EAAEvB,iCAAA,CAA2BoB;MAFgB,CAArD,CAAN;MAIArB,mBAAmB,GAAGF,OAAO,CAACa,MAA9B,CA1CA,CA4CA;;MACA,MAAMc,IAAI,GAAG,MAAM,IAAAC,iBAAA,EAAW;QAC1B/B,OAD0B;QAE1BoB,OAF0B;QAG1BY,GAAG,EAAEV,UAHqB;QAI1BE;MAJ0B,CAAX,CAAnB,CA7CA,CAoDA;;MACA,IAAIS,MAAM,GAAG,MAAMjC,OAAO,CAACQ,WAAR,CAAoB0B,UAApB,CAA+Bb,QAA/B,CAAnB,CArDA,CAuDA;;MACAY,MAAM,GAAG,MAAMjC,OAAO,CAACQ,WAAR,CAAoB2B,UAApB,CAA+BF,MAAM,CAACd,EAAtC,EAA0C;QACrDiB,OAAO,EAAEN,IAAI,CAACM,OADuC;QAErDC,KAAK,EAAEP,IAAI,CAACO,KAFyC;QAGrDC,IAAI,EAAER,IAAI,CAACQ,IAH0C;QAIrDC,QAAQ,EAAET,IAAI,CAACS;MAJsC,CAA1C,CAAf,CAxDA,CA+DA;MAEA;;MACApC,OAAO,GAAG,MAAMK,WAAW,CAACK,oBAAZ,CAAiCY,aAAjC,CAA+ChB,MAA/C,EAAuDN,OAAO,CAACgB,EAA/D,EAAmE;QAC/EH,MAAM,EAAEV,iCAAA,CAA2BkC,SAD4C;QAE/EvB,IAAI,EAAE;UACFwB,OAAO,EAAE,MADP;UAEFX,IAAI,EAAE;YACFX,EAAE,EAAEc,MAAM,CAACd,EADT;YAEFkB,KAAK,EAAEJ,MAAM,CAACI,KAFZ;YAGFK,OAAO,EAAET,MAAM,CAACS,OAHd;YAIF1B,MAAM,EAAEiB,MAAM,CAACjB;UAJb;QAFJ;MAFyE,CAAnE,CAAhB,CAlEA,CA8EA;;MACA,MAAMR,WAAW,CAACK,oBAAZ,CAAiCc,WAAjC,CAA6ClB,MAA7C,EAAqD;QACvDmB,UAAU,EAAEvB,mBAD2C;QAEvDwB,UAAU,EAAEvB,iCAAA,CAA2BkC;MAFgB,CAArD,CAAN;MAIAnC,mBAAmB,GAAGF,OAAO,CAACa,MAA9B;IACH,CApFD,CAoFE,OAAO2B,CAAP,EAAU;MACR1C,GAAG,CAAC,kCAAD,EAAqC0C,CAArC,CAAH;;MAEA,IAAIxC,OAAO,IAAIA,OAAO,CAACgB,EAAvB,EAA2B;QACvB;AACpB;AACA;AACA;QACoBhB,OAAO,GAAG,MAAMK,WAAW,CAACK,oBAAZ,CAAiCY,aAAjC,CACZhB,MADY,EAEZN,OAAO,CAACgB,EAFI,EAGZ;UACIH,MAAM,EAAEV,iCAAA,CAA2BsC,MADvC;UAEI1B,KAAK,EAAE;YACH2B,IAAI,EAAEF,CAAC,CAACE,IADL;YAEHJ,OAAO,EAAEE,CAAC,CAACF,OAFR;YAGHK,KAAK,EAAEH,CAAC,CAACG,KAHN;YAIHC,IAAI,EAAE;UAJH;QAFX,CAHY,CAAhB,CALuB,CAmBvB;;QACA,MAAMvC,WAAW,CAACK,oBAAZ,CAAiCc,WAAjC,CAA6ClB,MAA7C,EAAqD;UACvDmB,UAAU,EAAEvB,mBAD2C;UAEvDwB,UAAU,EAAEvB,iCAAA,CAA2BsC;QAFgB,CAArD,CAAN;QAIAvC,mBAAmB,GAAGF,OAAO,CAACa,MAA9B;MACH;;MAED,OAAO;QACHC,IAAI,EAAE,IADH;QAEHC,KAAK,EAAE;UACHuB,OAAO,EAAEE,CAAC,CAACF;QADR;MAFJ,CAAP;IAMH,CAxHD,SAwHU;MACN;MACA,IAAIrC,aAAJ,EAAmB;QACfH,GAAG,CAAE,gCAA+BQ,MAAO,EAAxC,CAAH;QAEA,MAAMD,WAAW,CAACK,oBAAZ,CAAiCmC,UAAjC,CAA4CvC,MAA5C,EAAoD;UACtDO,MAAM,EAAEV,iCAAA,CAA2BkC,SADmB;UAEtDvB,IAAI,EAAE;YACFwB,OAAO,EAAG;UADR;QAFgD,CAApD,CAAN;MAMH,CATD,MASO;QACHxC,GAAG,CAAE,8BAA6BS,YAAY,GAAG,CAAE,GAAhD,CAAH,CADG,CAEH;;QACA,MAAM,IAAAuC,2BAAA,EAA6B;UAC/BjD,OAD+B;UAE/B6C,IAAI,EAAEhD,aAAa,CAACqD,QAAd,CAAuBC,OAFE;UAG/BpD,OAAO,EAAE;YACLU,MADK;YAELC,YAAY,EAAEA,YAAY,GAAG,CAFxB;YAGLC,QAAQ,EAAEX,OAAO,CAACoD,QAAR,CAAiBC,WAAjB;UAHL,CAHsB;UAQ/BC,WAAW,EAAE;QARkB,CAA7B,CAAN;MAUH;IACJ;;IACD,OAAO;MACHrC,IAAI,EAAE,EADH;MAEHC,KAAK,EAAE;IAFJ,CAAP;EAIH,CApKE,CAAP;AAsKH,C"}
@@ -1,15 +1,23 @@
1
- import { PageImportExportTaskStatus } from "../types";
1
+ import { File } from "../types";
2
2
  import { PbPageImportExportContext } from "../graphql/types";
3
3
  import { ExportedPageData } from "../exportPages/utils";
4
+ interface FileItem extends File {
5
+ key: string;
6
+ type: string;
7
+ name: string;
8
+ size: number;
9
+ meta: Record<string, any>;
10
+ tags: string[];
11
+ }
4
12
  interface UploadPageAssetsParams {
5
13
  context: PbPageImportExportContext;
6
- filesData: Record<string, any>[];
14
+ filesData: FileItem[];
7
15
  fileUploadsData: FileUploadsData;
8
16
  }
9
17
  interface UploadPageAssetsReturnType {
10
- fileIdToKeyMap?: Map<string, string>;
18
+ fileIdToKeyMap: Map<string, string>;
11
19
  }
12
- export declare const uploadPageAssets: ({ context, filesData, fileUploadsData }: UploadPageAssetsParams) => Promise<UploadPageAssetsReturnType>;
20
+ export declare const uploadPageAssets: (params: UploadPageAssetsParams) => Promise<UploadPageAssetsReturnType>;
13
21
  interface FileUploadsData {
14
22
  data: string;
15
23
  assets: Record<string, string>;
@@ -28,16 +36,15 @@ interface PageImportData {
28
36
  }
29
37
  /**
30
38
  * Function will read the given zip file from S3 via stream, extract its content and upload it to S3 bucket.
31
- * @param zipFileKey
39
+ * @param zipFileUrl
32
40
  * @return PageImportData S3 file keys for all uploaded assets group by page.
33
41
  */
34
- export declare function readExtractAndUploadZipFileContents(zipFileKey: string): Promise<PageImportData[]>;
35
- export declare const zeroPad: (version: any) => string;
36
- export declare function initialStats(total: any): {
37
- pending: any;
42
+ export declare function readExtractAndUploadZipFileContents(zipFileUrl: string): Promise<PageImportData[]>;
43
+ export declare function initialStats(total: number): {
44
+ pending: number;
38
45
  processing: number;
39
46
  completed: number;
40
47
  failed: number;
41
- total: any;
48
+ total: number;
42
49
  };
43
50
  export {};
@@ -8,7 +8,7 @@ Object.defineProperty(exports, "__esModule", {
8
8
  exports.importPage = importPage;
9
9
  exports.initialStats = initialStats;
10
10
  exports.readExtractAndUploadZipFileContents = readExtractAndUploadZipFileContents;
11
- exports.zeroPad = exports.uploadPageAssets = void 0;
11
+ exports.uploadPageAssets = void 0;
12
12
 
13
13
  var _uniqid = _interopRequireDefault(require("uniqid"));
14
14
 
@@ -46,25 +46,29 @@ const INSTALL_DIR = "/tmp";
46
46
  const INSTALL_EXTRACT_DIR = _path.default.join(INSTALL_DIR, "apiPageBuilderImportPage");
47
47
 
48
48
  const FILES_COUNT_IN_EACH_BATCH = 15;
49
- const ZIP_CONTENT_TYPE = "application/zip";
50
49
 
51
- function updateImageInPageSettings({
52
- settings,
53
- fileIdToKeyMap,
54
- srcPrefix
55
- }) {
50
+ function updateImageInPageSettings(params) {
51
+ const {
52
+ settings,
53
+ fileIdToKeyMap,
54
+ srcPrefix
55
+ } = params;
56
56
  let newSettings = settings;
57
57
  const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith("/") ? srcPrefix.slice(0, -1) : srcPrefix;
58
58
 
59
59
  if (_dotPropImmutable.default.get(newSettings, "general.image.src")) {
60
- newSettings = _dotPropImmutable.default.set(newSettings, "general.image.src", `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(settings.general.image.id)}`);
60
+ var _settings$general, _settings$general$ima;
61
+
62
+ newSettings = _dotPropImmutable.default.set(newSettings, "general.image.src", `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(((_settings$general = settings.general) === null || _settings$general === void 0 ? void 0 : (_settings$general$ima = _settings$general.image) === null || _settings$general$ima === void 0 ? void 0 : _settings$general$ima.id) || "")}`);
61
63
  }
62
64
 
63
65
  if (_dotPropImmutable.default.get(newSettings, "social.image.src")) {
64
- newSettings = _dotPropImmutable.default.set(newSettings, "social.image.src", `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(settings.social.image.id)}`);
66
+ var _settings$social, _settings$social$imag;
67
+
68
+ newSettings = _dotPropImmutable.default.set(newSettings, "social.image.src", `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(((_settings$social = settings.social) === null || _settings$social === void 0 ? void 0 : (_settings$social$imag = _settings$social.image) === null || _settings$social$imag === void 0 ? void 0 : _settings$social$imag.id) || "")}`);
65
69
  }
66
70
 
67
- return settings;
71
+ return newSettings;
68
72
  }
69
73
 
70
74
  function updateFilesInPageData({
@@ -111,23 +115,27 @@ function updateFilesInPageData({
111
115
  }
112
116
  }
113
117
 
114
- const uploadPageAssets = async ({
115
- context,
116
- filesData,
117
- fileUploadsData
118
- }) => {
118
+ const uploadPageAssets = async params => {
119
+ const {
120
+ context,
121
+ filesData,
122
+ fileUploadsData
123
+ } = params; // Save uploaded file key against static id for later use.
124
+
125
+ const fileIdToKeyMap = new Map();
119
126
  /**
120
127
  * This function contains logic of file download from S3.
121
128
  * Current we're not mocking zip file download from S3 in tests at the moment.
122
129
  * So, we're manually mocking it in case of test just by returning an empty object.
123
130
  */
131
+
124
132
  if (process.env.NODE_ENV === "test") {
125
- return {};
133
+ return {
134
+ fileIdToKeyMap
135
+ };
126
136
  }
127
137
 
128
- console.log("INSIDE uploadPageAssets"); // Save uploaded file key against static id for later use.
129
-
130
- const fileIdToKeyMap = new Map(); // Save files meta data against old key for later use.
138
+ console.log("INSIDE uploadPageAssets"); // Save files meta data against old key for later use.
131
139
 
132
140
  const fileKeyToFileMap = new Map(); // Initialize maps.
133
141
 
@@ -145,7 +153,12 @@ const uploadPageAssets = async ({
145
153
 
146
154
  const createFilesInput = fileUploadResults.map(uploadResult => {
147
155
  const newKey = uploadResult.Key;
148
- const file = fileKeyToFileMap.get(getOldFileKey(newKey)); // Update the file map with newly uploaded file.
156
+ const file = fileKeyToFileMap.get(getOldFileKey(newKey));
157
+
158
+ if (!file) {
159
+ return null;
160
+ } // Update the file map with newly uploaded file.
161
+
149
162
 
150
163
  fileIdToKeyMap.set(file.id, newKey);
151
164
  return {
@@ -156,7 +169,7 @@ const uploadPageAssets = async ({
156
169
  meta: file.meta,
157
170
  tags: file.tags
158
171
  };
159
- });
172
+ }).filter(Boolean);
160
173
  const createFilesPromises = []; // Gives an array of chunks (each consists of FILES_COUNT_IN_EACH_BATCH items).
161
174
 
162
175
  const createFilesInputChunks = (0, _chunk.default)(createFilesInput, FILES_COUNT_IN_EACH_BATCH);
@@ -206,25 +219,31 @@ async function importPage({
206
219
  files
207
220
  } = await (0, _loadJsonFile.default)(PAGE_DATA_FILE_PATH); // Only update page data if there are files.
208
221
 
209
- if (Array.isArray(files) && files.length) {
222
+ if (files && Array.isArray(files) && files.length > 0) {
210
223
  // Upload page assets.
211
224
  const {
212
225
  fileIdToKeyMap
213
226
  } = await uploadPageAssets({
214
227
  context,
228
+
229
+ /**
230
+ * TODO @ts-refactor @ashutosh figure out correct types.
231
+ */
232
+ // @ts-ignore
215
233
  filesData: files,
216
234
  fileUploadsData
217
235
  });
236
+ const settings = await context.fileManager.settings.getSettings();
218
237
  const {
219
- srcPrefix
220
- } = await context.fileManager.settings.getSettings();
238
+ srcPrefix = ""
239
+ } = settings || {};
221
240
  updateFilesInPageData({
222
- data: page.content,
241
+ data: page.content || {},
223
242
  fileIdToKeyMap,
224
243
  srcPrefix
225
244
  });
226
245
  page.settings = updateImageInPageSettings({
227
- settings: page.settings,
246
+ settings: page.settings || {},
228
247
  fileIdToKeyMap,
229
248
  srcPrefix
230
249
  });
@@ -270,14 +289,6 @@ async function uploadFilesFromS3({
270
289
  return Promise.all(promises);
271
290
  }
272
291
 
273
- async function getObjectMetaFromS3(Key) {
274
- const meta = await _s3Stream.s3Stream.getObjectHead(Key);
275
-
276
- if (meta.ContentType !== ZIP_CONTENT_TYPE) {
277
- throw new _error.default(`Unsupported file type: "${meta.ContentType}"`, "UNSUPPORTED_FILE");
278
- }
279
- }
280
-
281
292
  function getOldFileKey(key) {
282
293
  /*
283
294
  * Because we know the naming convention, we can extract the old key from new key.
@@ -298,32 +309,23 @@ function getFileNameWithoutExt(fileName) {
298
309
 
299
310
  /**
300
311
  * Function will read the given zip file from S3 via stream, extract its content and upload it to S3 bucket.
301
- * @param zipFileKey
312
+ * @param zipFileUrl
302
313
  * @return PageImportData S3 file keys for all uploaded assets group by page.
303
314
  */
304
- async function readExtractAndUploadZipFileContents(zipFileKey) {
315
+ async function readExtractAndUploadZipFileContents(zipFileUrl) {
305
316
  const log = console.log;
306
317
  const pageImportDataList = [];
307
- let readStream; // Check whether it is a URL
308
318
 
309
- if (zipFileKey.startsWith("http")) {
310
- const response = await (0, _nodeFetch.default)(zipFileKey);
319
+ const zipFileName = _path.default.basename(zipFileUrl).split("?")[0];
311
320
 
312
- if (!response.ok) {
313
- throw new _error.default(`Unable to downloading file: "${zipFileKey}"`, response.statusText);
314
- }
321
+ const response = await (0, _nodeFetch.default)(zipFileUrl);
315
322
 
316
- readStream = response.body;
317
- } else {
318
- // We're first retrieving object's meta data, just to check whether the file is available at the given Key
319
- await getObjectMetaFromS3(zipFileKey);
320
- readStream = _s3Stream.s3Stream.readStream(zipFileKey);
323
+ if (!response.ok) {
324
+ throw new _error.default(`Unable to downloading file: "${zipFileUrl}"`, response.statusText);
321
325
  }
322
326
 
323
- const uniquePath = (0, _uniqid.default)("IMPORT_PAGES/");
324
-
325
- const zipFileName = _path.default.basename(zipFileKey); // Read export file and download it in the disk
326
-
327
+ const readStream = response.body;
328
+ const uniquePath = (0, _uniqid.default)("IMPORT_PAGES/"); // Read export file and download it in the disk
327
329
 
328
330
  const ZIP_FILE_PATH = _path.default.join(INSTALL_DIR, zipFileName);
329
331
 
@@ -332,7 +334,7 @@ async function readExtractAndUploadZipFileContents(zipFileKey) {
332
334
  log(`Downloaded file "${zipFileName}" at ${ZIP_FILE_PATH}`); // Extract the downloaded zip file
333
335
 
334
336
  const zipFilePaths = await extractZipToDisk(ZIP_FILE_PATH);
335
- log(`Removing ZIP file "${zipFileKey}" from ${ZIP_FILE_PATH}`);
337
+ log(`Removing ZIP file "${zipFileUrl}" from ${ZIP_FILE_PATH}`);
336
338
  await (0, _downloadInstallFiles.deleteFile)(ZIP_FILE_PATH); // Extract each page zip and upload their content's to S3
337
339
 
338
340
  for (let i = 0; i < zipFilePaths.length; i++) {
@@ -382,16 +384,13 @@ async function deleteS3Folder(key) {
382
384
  }
383
385
 
384
386
  const response = await _s3Stream.s3Stream.listObject(key);
385
- const keys = response.Contents.map(c => c.Key);
387
+ const keys = (response.Contents || []).map(c => c.Key).filter(Boolean);
386
388
  console.log(`Found ${keys.length} files.`);
387
389
  const deleteFilePromises = keys.map(key => _s3Stream.s3Stream.deleteObject(key));
388
390
  await Promise.all(deleteFilePromises);
389
391
  console.log(`Successfully deleted ${deleteFilePromises.length} files.`);
390
- }
391
-
392
- const zeroPad = version => `${version}`.padStart(5, "0");
392
+ } // export const zeroPad = version => `${version}`.padStart(5, "0");
393
393
 
394
- exports.zeroPad = zeroPad;
395
394
 
396
395
  function initialStats(total) {
397
396
  return {
@@ -419,6 +418,13 @@ function extractZipToDisk(exportFileZipPath) {
419
418
  if (err) {
420
419
  console.warn("ERROR: Failed to extract zip: ", exportFileZipPath, err);
421
420
  reject(err);
421
+ return;
422
+ }
423
+
424
+ if (!zipFile) {
425
+ console.log("ERROR: Missing zip file resource for path: " + exportFileZipPath);
426
+ reject("Missing Zip File Resource.");
427
+ return;
422
428
  }
423
429
 
424
430
  console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);
@@ -445,6 +451,13 @@ function extractZipToDisk(exportFileZipPath) {
445
451
  if (err) {
446
452
  console.warn("ERROR: Failed to openReadStream for file: ", entry.fileName, err);
447
453
  reject(err);
454
+ return;
455
+ }
456
+
457
+ if (!readStream) {
458
+ console.log("ERROR: Missing Read Stream Resource when extracting to disk.");
459
+ reject("Missing Read Stream Resource.");
460
+ return;
448
461
  }
449
462
 
450
463
  const filePath = _path.default.join(EXPORT_FILE_EXTRACTION_PATH, entry.fileName);
@@ -453,7 +466,9 @@ function extractZipToDisk(exportFileZipPath) {
453
466
  pageZipFilePaths.push(filePath);
454
467
  zipFile.readEntry();
455
468
  });
456
- streamPipeline(readStream, (0, _fs.createWriteStream)(filePath));
469
+ streamPipeline(readStream, (0, _fs.createWriteStream)(filePath)).catch(error => {
470
+ reject(error);
471
+ });
457
472
  });
458
473
  }
459
474
  });
@@ -478,6 +493,13 @@ function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
478
493
  if (err) {
479
494
  console.warn("ERROR: Failed to extract zip: ", pageDataZipFilePath, err);
480
495
  reject(err);
496
+ return;
497
+ }
498
+
499
+ if (!zipFile) {
500
+ console.log("ERROR: Probably failed to extract zip: " + pageDataZipFilePath);
501
+ reject("Missing Zip File Resource.");
502
+ return;
481
503
  }
482
504
 
483
505
  console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);
@@ -509,6 +531,13 @@ function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
509
531
  if (err) {
510
532
  console.warn("ERROR: Failed while performing [openReadStream] for file: ", entry.fileName, err);
511
533
  reject(err);
534
+ return;
535
+ }
536
+
537
+ if (!readStream) {
538
+ console.log("ERROR: Missing Read Stream while importing pages.");
539
+ reject("Missing Read Strea Resource.");
540
+ return;
512
541
  }
513
542
 
514
543
  readStream.on("end", function () {
@@ -530,6 +559,8 @@ function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
530
559
 
531
560
  streamPipeline(readStream, streamPassThrough).then(() => {
532
561
  fileUploadPromises.push(promise);
562
+ }).catch(error => {
563
+ reject(error);
533
564
  });
534
565
  });
535
566
  }