@webiny/api-page-builder-import-export 0.0.0-unstable.3386f66516 → 0.0.0-unstable.496cf268ac

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. package/client.js +2 -6
  2. package/client.js.map +1 -1
  3. package/export/combine/blocksHandler.js +11 -18
  4. package/export/combine/blocksHandler.js.map +1 -1
  5. package/export/combine/formsHandler.d.ts +6 -0
  6. package/export/combine/formsHandler.js +99 -0
  7. package/export/combine/formsHandler.js.map +1 -0
  8. package/export/combine/index.js +5 -8
  9. package/export/combine/index.js.map +1 -1
  10. package/export/combine/pagesHandler.js +11 -18
  11. package/export/combine/pagesHandler.js.map +1 -1
  12. package/export/combine/templatesHandler.js +11 -18
  13. package/export/combine/templatesHandler.js.map +1 -1
  14. package/export/process/blocksHandler.js +18 -32
  15. package/export/process/blocksHandler.js.map +1 -1
  16. package/export/process/formsHandler.d.ts +6 -0
  17. package/export/process/formsHandler.js +187 -0
  18. package/export/process/formsHandler.js.map +1 -0
  19. package/export/process/index.js +5 -8
  20. package/export/process/index.js.map +1 -1
  21. package/export/process/pagesHandler.js +19 -34
  22. package/export/process/pagesHandler.js.map +1 -1
  23. package/export/process/templatesHandler.js +23 -33
  24. package/export/process/templatesHandler.js.map +1 -1
  25. package/export/s3Stream.d.ts +5 -2
  26. package/export/s3Stream.js +1 -20
  27. package/export/s3Stream.js.map +1 -1
  28. package/export/utils.d.ts +8 -1
  29. package/export/utils.js +59 -44
  30. package/export/utils.js.map +1 -1
  31. package/export/zipper.js +31 -43
  32. package/export/zipper.js.map +1 -1
  33. package/graphql/crud/blocks.crud.js +20 -39
  34. package/graphql/crud/blocks.crud.js.map +1 -1
  35. package/graphql/crud/forms.crud.d.ts +4 -0
  36. package/graphql/crud/forms.crud.js +129 -0
  37. package/graphql/crud/forms.crud.js.map +1 -0
  38. package/graphql/crud/importExportTasks.crud.js +9 -57
  39. package/graphql/crud/importExportTasks.crud.js.map +1 -1
  40. package/graphql/crud/pages.crud.js +21 -39
  41. package/graphql/crud/pages.crud.js.map +1 -1
  42. package/graphql/crud/templates.crud.js +17 -33
  43. package/graphql/crud/templates.crud.js.map +1 -1
  44. package/graphql/crud.js +2 -8
  45. package/graphql/crud.js.map +1 -1
  46. package/graphql/graphql/blocks.gql.js +1 -6
  47. package/graphql/graphql/blocks.gql.js.map +1 -1
  48. package/graphql/graphql/forms.gql.d.ts +4 -0
  49. package/graphql/graphql/forms.gql.js +60 -0
  50. package/graphql/graphql/forms.gql.js.map +1 -0
  51. package/graphql/graphql/importExportTasks.gql.js +1 -6
  52. package/graphql/graphql/importExportTasks.gql.js.map +1 -1
  53. package/graphql/graphql/pages.gql.js +1 -6
  54. package/graphql/graphql/pages.gql.js.map +1 -1
  55. package/graphql/graphql/templates.gql.js +1 -6
  56. package/graphql/graphql/templates.gql.js.map +1 -1
  57. package/graphql/graphql/utils/resolve.js +0 -3
  58. package/graphql/graphql/utils/resolve.js.map +1 -1
  59. package/graphql/graphql.js +2 -7
  60. package/graphql/graphql.js.map +1 -1
  61. package/graphql/index.js +0 -5
  62. package/graphql/index.js.map +1 -1
  63. package/graphql/types.d.ts +23 -0
  64. package/graphql/types.js.map +1 -1
  65. package/import/constants.d.ts +3 -0
  66. package/import/constants.js +14 -0
  67. package/import/constants.js.map +1 -0
  68. package/import/create/blocksHandler.js +11 -20
  69. package/import/create/blocksHandler.js.map +1 -1
  70. package/import/create/formsHandler.d.ts +3 -0
  71. package/import/create/formsHandler.js +103 -0
  72. package/import/create/formsHandler.js.map +1 -0
  73. package/import/create/index.js +5 -8
  74. package/import/create/index.js.map +1 -1
  75. package/import/create/pagesHandler.js +10 -19
  76. package/import/create/pagesHandler.js.map +1 -1
  77. package/import/create/templatesHandler.js +10 -19
  78. package/import/create/templatesHandler.js.map +1 -1
  79. package/import/process/blocks/blocksHandler.d.ts +3 -0
  80. package/import/process/blocks/blocksHandler.js +169 -0
  81. package/import/process/blocks/blocksHandler.js.map +1 -0
  82. package/import/process/blocks/importBlock.d.ts +11 -0
  83. package/import/process/blocks/importBlock.js +89 -0
  84. package/import/process/blocks/importBlock.js.map +1 -0
  85. package/import/process/forms/formsHandler.d.ts +3 -0
  86. package/import/process/forms/formsHandler.js +176 -0
  87. package/import/process/forms/formsHandler.js.map +1 -0
  88. package/import/process/forms/importForm.d.ts +9 -0
  89. package/import/process/forms/importForm.js +43 -0
  90. package/import/process/forms/importForm.js.map +1 -0
  91. package/import/process/index.js +8 -11
  92. package/import/process/index.js.map +1 -1
  93. package/import/process/pages/importPage.d.ts +11 -0
  94. package/import/process/pages/importPage.js +92 -0
  95. package/import/process/pages/importPage.js.map +1 -0
  96. package/import/process/pages/pagesHandler.d.ts +3 -0
  97. package/import/process/pages/pagesHandler.js +183 -0
  98. package/import/process/pages/pagesHandler.js.map +1 -0
  99. package/import/process/pagesHandler.js +2 -3
  100. package/import/process/pagesHandler.js.map +1 -1
  101. package/import/process/templates/importTemplate.d.ts +11 -0
  102. package/import/process/templates/importTemplate.js +66 -0
  103. package/import/process/templates/importTemplate.js.map +1 -0
  104. package/import/process/{templatesHandler.d.ts → templates/templatesHandler.d.ts} +2 -2
  105. package/import/process/{templatesHandler.js → templates/templatesHandler.js} +31 -33
  106. package/import/process/templates/templatesHandler.js.map +1 -0
  107. package/import/utils/deleteS3Folder.d.ts +1 -0
  108. package/import/utils/deleteS3Folder.js +19 -0
  109. package/import/utils/deleteS3Folder.js.map +1 -0
  110. package/import/utils/extractAndUploadZipFileContents.d.ts +7 -0
  111. package/import/utils/extractAndUploadZipFileContents.js +122 -0
  112. package/import/utils/extractAndUploadZipFileContents.js.map +1 -0
  113. package/import/utils/extractZipAndUploadToS3.d.ts +2 -0
  114. package/import/utils/extractZipAndUploadToS3.js +98 -0
  115. package/import/utils/extractZipAndUploadToS3.js.map +1 -0
  116. package/import/utils/getFileNameWithoutExt.d.ts +1 -0
  117. package/import/utils/getFileNameWithoutExt.js +11 -0
  118. package/import/utils/getFileNameWithoutExt.js.map +1 -0
  119. package/import/utils/index.d.ts +9 -0
  120. package/import/utils/index.js +104 -0
  121. package/import/utils/index.js.map +1 -0
  122. package/import/utils/initialStats.d.ts +7 -0
  123. package/import/utils/initialStats.js +16 -0
  124. package/import/utils/initialStats.js.map +1 -0
  125. package/import/utils/prepareDataDirMap.d.ts +6 -0
  126. package/import/utils/prepareDataDirMap.js +29 -0
  127. package/import/utils/prepareDataDirMap.js.map +1 -0
  128. package/import/utils/updateFilesInData.d.ts +8 -0
  129. package/import/utils/updateFilesInData.js +48 -0
  130. package/import/utils/updateFilesInData.js.map +1 -0
  131. package/import/utils/uploadAssets.d.ts +10 -0
  132. package/import/utils/uploadAssets.js +51 -0
  133. package/import/utils/uploadAssets.js.map +1 -0
  134. package/import/utils/uploadFilesFromS3.d.ts +3 -0
  135. package/import/utils/uploadFilesFromS3.js +19 -0
  136. package/import/utils/uploadFilesFromS3.js.map +1 -0
  137. package/import/utils.d.ts +1 -8
  138. package/import/utils.js +0 -55
  139. package/import/utils.js.map +1 -1
  140. package/mockSecurity.js +0 -2
  141. package/mockSecurity.js.map +1 -1
  142. package/package.json +25 -24
  143. package/types.d.ts +21 -0
  144. package/types.js +0 -5
  145. package/types.js.map +1 -1
  146. package/import/process/templatesHandler.js.map +0 -1
@@ -0,0 +1 @@
1
+ {"version":3,"names":["pagesHandler","configuration","payload","context","log","console","subTask","noPendingTask","prevStatusOfSubTask","ImportExportTaskStatus","PENDING","pageBuilder","taskId","subTaskIndex","type","identity","mockSecurity","importExportTask","getSubTask","zeroPad","status","data","error","id","pageKey","category","zipFileKey","input","meta","fileUploadsData","updateSubTask","PROCESSING","updateStats","prevStatus","nextStatus","page","importPage","key","pbPage","createPage","updatePage","content","title","path","settings","COMPLETED","message","version","e","FAILED","name","stack","code","updateTask","invokeHandlerClient","handlers","process","security","getIdentity","description"],"sources":["pagesHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { importPage } from \"./importPage\";\nimport { invokeHandlerClient } from \"~/client\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { zeroPad } from \"@webiny/utils\";\nimport { Configuration, Payload, Response } from \"~/import/process\";\n\nexport const pagesHandler = async (\n configuration: Configuration,\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n let subTask;\n let noPendingTask = true;\n let prevStatusOfSubTask = ImportExportTaskStatus.PENDING;\n\n log(\"RUNNING Import Page Queue Process\");\n const { pageBuilder } = context;\n const { taskId, subTaskIndex, type, identity } = payload;\n // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks\n // and this Lambda is invoked internally, without credentials.\n mockSecurity(identity, context);\n\n try {\n /*\n * Note: We're not going to DB for getting next sub-task to process,\n * because the data might be out of sync due to GSI eventual consistency.\n */\n\n subTask = await pageBuilder.importExportTask.getSubTask(taskId, zeroPad(subTaskIndex, 5));\n\n /**\n * Base condition!!\n * Bail out early, if task not found or task's status is not \"pending\".\n */\n if (!subTask || subTask.status !== ImportExportTaskStatus.PENDING) {\n noPendingTask = true;\n return {\n data: \"\",\n error: null\n };\n } else {\n noPendingTask = false;\n }\n prevStatusOfSubTask = subTask.status;\n\n log(`Fetched sub task => ${subTask.id}`);\n\n const { pageKey, category, zipFileKey, input, meta } = subTask.data;\n const { fileUploadsData } = input;\n\n log(`Processing page key \"${pageKey}\"`);\n\n // Mark task status as PROCESSING\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.PROCESSING\n });\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.PROCESSING\n });\n prevStatusOfSubTask = subTask.status;\n\n // Real job\n const page = await importPage({\n context,\n pageKey,\n key: zipFileKey,\n fileUploadsData\n });\n\n // Create a page\n let pbPage = await context.pageBuilder.createPage(category, meta);\n\n // Hooks attached to `pageBuilder.createPage` might enable security back again, here we disable security\n mockSecurity(identity, context);\n\n // Update page with data\n pbPage = await context.pageBuilder.updatePage(pbPage.id, {\n content: page.content,\n title: page.title,\n path: page.path,\n settings: page.settings\n });\n\n // Hooks attached to `pageBuilder.updatePage` might enable security back again, here we disable security\n mockSecurity(identity, context);\n\n // TODO: Publish page\n\n // Update task record in DB\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: \"Done\",\n page: {\n id: pbPage.id,\n title: pbPage.title,\n version: pbPage.version,\n status: pbPage.status\n }\n }\n });\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.COMPLETED\n });\n prevStatusOfSubTask = subTask.status;\n } catch (e) {\n log(\"[IMPORT_PAGES_PROCESS] Error => \", e);\n\n if (subTask && subTask.id) {\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n stack: e.stack,\n code: \"IMPORT_FAILED\"\n }\n });\n\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.FAILED\n });\n prevStatusOfSubTask = subTask.status;\n }\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n } finally {\n // Base condition!\n if (noPendingTask) {\n log(`No pending sub-task for task ${taskId}`);\n\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish importing pages.`\n }\n });\n } else {\n log(`Invoking PROCESS for task \"${subTaskIndex + 1}\"`);\n // We want to continue with Self invocation no matter if current page error out.\n await invokeHandlerClient<Payload>({\n context,\n name: configuration.handlers.process,\n payload: {\n taskId,\n subTaskIndex: subTaskIndex + 1,\n type,\n identity: context.security.getIdentity()\n },\n description: \"Import pages - process - subtask\"\n });\n }\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;AAAA;AACA;AACA;AACA;AACA;AAGO,MAAMA,YAAY,GAAG,OACxBC,aAA4B,EAC5BC,OAAgB,EAChBC,OAA8B,KACV;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EACvB,IAAIE,OAAO;EACX,IAAIC,aAAa,GAAG,IAAI;EACxB,IAAIC,mBAAmB,GAAGC,6BAAsB,CAACC,OAAO;EAExDN,GAAG,CAAC,mCAAmC,CAAC;EACxC,MAAM;IAAEO;EAAY,CAAC,GAAGR,OAAO;EAC/B,MAAM;IAAES,MAAM;IAAEC,YAAY;IAAEC,IAAI;IAAEC;EAAS,CAAC,GAAGb,OAAO;EACxD;EACA;EACA,IAAAc,0BAAY,EAACD,QAAQ,EAAEZ,OAAO,CAAC;EAE/B,IAAI;IACA;AACR;AACA;AACA;;IAEQG,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAgB,CAACC,UAAU,CAACN,MAAM,EAAE,IAAAO,cAAO,EAACN,YAAY,EAAE,CAAC,CAAC,CAAC;;IAEzF;AACR;AACA;AACA;IACQ,IAAI,CAACP,OAAO,IAAIA,OAAO,CAACc,MAAM,KAAKX,6BAAsB,CAACC,OAAO,EAAE;MAC/DH,aAAa,GAAG,IAAI;MACpB,OAAO;QACHc,IAAI,EAAE,EAAE;QACRC,KAAK,EAAE;MACX,CAAC;IACL,CAAC,MAAM;MACHf,aAAa,GAAG,KAAK;IACzB;IACAC,mBAAmB,GAAGF,OAAO,CAACc,MAAM;IAEpChB,GAAG,CAAE,uBAAsBE,OAAO,CAACiB,EAAG,EAAC,CAAC;IAExC,MAAM;MAAEC,OAAO;MAAEC,QAAQ;MAAEC,UAAU;MAAEC,KAAK;MAAEC;IAAK,CAAC,GAAGtB,OAAO,CAACe,IAAI;IACnE,MAAM;MAAEQ;IAAgB,CAAC,GAAGF,KAAK;IAEjCvB,GAAG,CAAE,wBAAuBoB,OAAQ,GAAE,CAAC;;IAEvC;IACAlB,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAgB,CAACa,aAAa,CAAClB,MAAM,EAAEN,OAAO,CAACiB,EAAE,EAAE;MAC3EH,MAAM,EAAEX,6BAAsB,CAACsB;IACnC,CAAC,CAAC;IACF;IACA,MAAMpB,WAAW,CAACM,gBAAgB,CAACe,WAAW,CAACpB,MAAM,EAAE;MACnDqB,UAAU,EAAEzB,mBAAmB;MAC/B0B,UAAU,EAAEzB,6BAAsB,CAACsB;IACvC,CAAC,CAAC;IACFvB,mBAAmB,GAAGF,OAAO,CAACc,MAAM;;IAEpC;IACA,MAAMe,IAAI,GAAG,MAAM,IAAAC,sBAAU,EAAC;MAC1BjC,OAAO;MACPqB,OAAO;MACPa,GAAG,EAAEX,UAAU;MACfG;IACJ,CAAC,CAAC;;IAEF;IACA,IAAIS,MAAM,GAAG,MAAMnC,OAAO,CAACQ,WAAW,CAAC4B,UAAU,CAACd,QAAQ,EAAEG,IAAI,CAAC;;IAEjE;IACA,IAAAZ,0BAAY,EAACD,QAAQ,EAAEZ,OAAO,CAAC;;IAE/B;IACAmC,MAAM,GAAG,MAAMnC,OAAO,CAACQ,WAAW,CAAC6B,UAAU,CAACF,MAAM,CAACf,EAAE,EAAE;MACrDkB,OAAO,EAAEN,IAAI,CAACM,OAAO;MACrBC,KAAK,EAAEP,IAAI,CAACO,KAAK;MACjBC,IAAI,EAAER,IAAI,CAACQ,IAAI;MACfC,QAAQ,EAAET,IAAI,CAACS;IACnB,CAAC,CAAC;;IAEF;IACA,IAAA5B,0BAAY,EAACD,QAAQ,EAAEZ,OAAO,CAAC;;IAE/B;;IAEA;IACAG,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAgB,CAACa,aAAa,CAAClB,MAAM,EAAEN,OAAO,CAACiB,EAAE,EAAE;MAC3EH,MAAM,EAAEX,6BAAsB,CAACoC,SAAS;MACxCxB,IAAI,EAAE;QACFyB,OAAO,EAAE,MAAM;QACfX,IAAI,EAAE;UACFZ,EAAE,EAAEe,MAAM,CAACf,EAAE;UACbmB,KAAK,EAAEJ,MAAM,CAACI,KAAK;UACnBK,OAAO,EAAET,MAAM,CAACS,OAAO;UACvB3B,MAAM,EAAEkB,MAAM,CAAClB;QACnB;MACJ;IACJ,CAAC,CAAC;IACF;IACA,MAAMT,WAAW,CAACM,gBAAgB,CAACe,WAAW,CAACpB,MAAM,EAAE;MACnDqB,UAAU,EAAEzB,mBAAmB;MAC/B0B,UAAU,EAAEzB,6BAAsB,CAACoC;IACvC,CAAC,CAAC;IACFrC,mBAAmB,GAAGF,OAAO,CAACc,MAAM;EACxC,CAAC,CAAC,OAAO4B,CAAC,EAAE;IACR5C,GAAG,CAAC,kCAAkC,EAAE4C,CAAC,CAAC;IAE1C,IAAI1C,OAAO,IAAIA,OAAO,CAACiB,EAAE,EAAE;MACvB;AACZ;AACA;AACA;MACYjB,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAgB,CAACa,aAAa,CAAClB,MAAM,EAAEN,OAAO,CAACiB,EAAE,EAAE;QAC3EH,MAAM,EAAEX,6BAAsB,CAACwC,MAAM;QACrC3B,KAAK,EAAE;UACH4B,IAAI,EAAEF,CAAC,CAACE,IAAI;UACZJ,OAAO,EAAEE,CAAC,CAACF,OAAO;UAClBK,KAAK,EAAEH,CAAC,CAACG,KAAK;UACdC,IAAI,EAAE;QACV;MACJ,CAAC,CAAC;;MAEF;MACA,MAAMzC,WAAW,CAACM,gBAAgB,CAACe,WAAW,CAACpB,MAAM,EAAE;QACnDqB,UAAU,EAAEzB,mBAAmB;QAC/B0B,UAAU,EAAEzB,6BAAsB,CAACwC;MACvC,CAAC,CAAC;MACFzC,mBAAmB,GAAGF,OAAO,CAACc,MAAM;IACxC;IAEA,OAAO;MACHC,IAAI,EAAE,IAAI;MACVC,KAAK,EAAE;QACHwB,OAAO,EAAEE,CAAC,CAACF;MACf;IACJ,CAAC;EACL,CAAC,SAAS;IACN;IACA,IAAIvC,aAAa,EAAE;MACfH,GAAG,CAAE,gCAA+BQ,MAAO,EAAC,CAAC;MAE7C,MAAMD,WAAW,CAACM,gBAAgB,CAACoC,UAAU,CAACzC,MAAM,EAAE;QAClDQ,MAAM,EAAEX,6BAAsB,CAACoC,SAAS;QACxCxB,IAAI,EAAE;UACFyB,OAAO,EAAG;QACd;MACJ,CAAC,CAAC;IACN,CAAC,MAAM;MACH1C,GAAG,CAAE,8BAA6BS,YAAY,GAAG,CAAE,GAAE,CAAC;MACtD;MACA,MAAM,IAAAyC,2BAAmB,EAAU;QAC/BnD,OAAO;QACP+C,IAAI,EAAEjD,aAAa,CAACsD,QAAQ,CAACC,OAAO;QACpCtD,OAAO,EAAE;UACLU,MAAM;UACNC,YAAY,EAAEA,YAAY,GAAG,CAAC;UAC9BC,IAAI;UACJC,QAAQ,EAAEZ,OAAO,CAACsD,QAAQ,CAACC,WAAW;QAC1C,CAAC;QACDC,WAAW,EAAE;MACjB,CAAC,CAAC;IACN;EACJ;EACA,OAAO;IACHtC,IAAI,EAAE,EAAE;IACRC,KAAK,EAAE;EACX,CAAC;AACL,CAAC;AAAC"}
@@ -61,8 +61,7 @@ const pagesHandler = async (configuration, payload, context) => {
61
61
  pageKey,
62
62
  category,
63
63
  zipFileKey,
64
- input,
65
- meta
64
+ input
66
65
  } = subTask.data;
67
66
  const {
68
67
  fileUploadsData
@@ -86,7 +85,7 @@ const pagesHandler = async (configuration, payload, context) => {
86
85
  fileUploadsData
87
86
  }); // Create a page
88
87
 
89
- let pbPage = await context.pageBuilder.createPage(category, meta); // Update page with data
88
+ let pbPage = await context.pageBuilder.createPage(category); // Update page with data
90
89
 
91
90
  pbPage = await context.pageBuilder.updatePage(pbPage.id, {
92
91
  content: page.content,
@@ -1 +1 @@
1
- {"version":3,"names":["pagesHandler","configuration","payload","context","log","console","subTask","noPendingTask","prevStatusOfSubTask","ImportExportTaskStatus","PENDING","pageBuilder","taskId","subTaskIndex","type","identity","mockSecurity","importExportTask","getSubTask","zeroPad","status","data","error","id","pageKey","category","zipFileKey","input","meta","fileUploadsData","updateSubTask","PROCESSING","updateStats","prevStatus","nextStatus","page","importPage","key","pbPage","createPage","updatePage","content","title","path","settings","COMPLETED","message","version","e","FAILED","name","stack","code","updateTask","invokeHandlerClient","handlers","process","security","getIdentity","description"],"sources":["pagesHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { importPage } from \"~/import/utils\";\nimport { invokeHandlerClient } from \"~/client\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { zeroPad } from \"@webiny/utils\";\nimport { Configuration, Payload, Response } from \"~/import/process\";\n\nexport const pagesHandler = async (\n configuration: Configuration,\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n let subTask;\n let noPendingTask = true;\n let prevStatusOfSubTask = ImportExportTaskStatus.PENDING;\n\n log(\"RUNNING Import Page Queue Process\");\n const { pageBuilder } = context;\n const { taskId, subTaskIndex, type, identity } = payload;\n // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks\n // and this Lambda is invoked internally, without credentials.\n mockSecurity(identity, context);\n\n try {\n /*\n * Note: We're not going to DB for getting next sub-task to process,\n * because the data might be out of sync due to GSI eventual consistency.\n */\n\n subTask = await pageBuilder.importExportTask.getSubTask(taskId, zeroPad(subTaskIndex, 5));\n\n /**\n * Base condition!!\n * Bail out early, if task not found or task's status is not \"pending\".\n */\n if (!subTask || subTask.status !== ImportExportTaskStatus.PENDING) {\n noPendingTask = true;\n return {\n data: \"\",\n error: null\n };\n } else {\n noPendingTask = false;\n }\n prevStatusOfSubTask = subTask.status;\n\n log(`Fetched sub task => ${subTask.id}`);\n\n const { pageKey, category, zipFileKey, input, meta } = subTask.data;\n const { fileUploadsData } = input;\n\n log(`Processing page key \"${pageKey}\"`);\n\n // Mark task status as PROCESSING\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.PROCESSING\n });\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.PROCESSING\n });\n prevStatusOfSubTask = subTask.status;\n\n // Real job\n const page = await importPage({\n context,\n pageKey,\n key: zipFileKey,\n fileUploadsData\n });\n\n // Create a page\n let pbPage = await context.pageBuilder.createPage(category, meta);\n\n // Update page with data\n pbPage = await context.pageBuilder.updatePage(pbPage.id, {\n content: page.content,\n title: page.title,\n path: page.path,\n settings: page.settings\n });\n\n // TODO: Publish page\n\n // Update task record in DB\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: \"Done\",\n page: {\n id: pbPage.id,\n title: pbPage.title,\n version: pbPage.version,\n status: pbPage.status\n }\n }\n });\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.COMPLETED\n });\n prevStatusOfSubTask = subTask.status;\n } catch (e) {\n log(\"[IMPORT_PAGES_PROCESS] Error => \", e);\n\n if (subTask && subTask.id) {\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n stack: e.stack,\n code: \"IMPORT_FAILED\"\n }\n });\n\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.FAILED\n });\n prevStatusOfSubTask = subTask.status;\n }\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n } finally {\n // Base condition!\n if (noPendingTask) {\n log(`No pending sub-task for task ${taskId}`);\n\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish importing pages.`\n }\n });\n } else {\n log(`Invoking PROCESS for task \"${subTaskIndex + 1}\"`);\n // We want to continue with Self invocation no matter if current page error out.\n await invokeHandlerClient<Payload>({\n context,\n name: configuration.handlers.process,\n payload: {\n taskId,\n subTaskIndex: subTaskIndex + 1,\n type,\n identity: context.security.getIdentity()\n },\n description: \"Import pages - process - subtask\"\n });\n }\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;;AAAA;;AACA;;AACA;;AACA;;AACA;;AAGO,MAAMA,YAAY,GAAG,OACxBC,aADwB,EAExBC,OAFwB,EAGxBC,OAHwB,KAIJ;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAApB;EACA,IAAIE,OAAJ;EACA,IAAIC,aAAa,GAAG,IAApB;EACA,IAAIC,mBAAmB,GAAGC,6BAAA,CAAuBC,OAAjD;EAEAN,GAAG,CAAC,mCAAD,CAAH;EACA,MAAM;IAAEO;EAAF,IAAkBR,OAAxB;EACA,MAAM;IAAES,MAAF;IAAUC,YAAV;IAAwBC,IAAxB;IAA8BC;EAA9B,IAA2Cb,OAAjD,CARoB,CASpB;EACA;;EACA,IAAAc,0BAAA,EAAaD,QAAb,EAAuBZ,OAAvB;;EAEA,IAAI;IACA;AACR;AACA;AACA;IAEQG,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAZ,CAA6BC,UAA7B,CAAwCN,MAAxC,EAAgD,IAAAO,eAAA,EAAQN,YAAR,EAAsB,CAAtB,CAAhD,CAAhB;IAEA;AACR;AACA;AACA;;IACQ,IAAI,CAACP,OAAD,IAAYA,OAAO,CAACc,MAAR,KAAmBX,6BAAA,CAAuBC,OAA1D,EAAmE;MAC/DH,aAAa,GAAG,IAAhB;MACA,OAAO;QACHc,IAAI,EAAE,EADH;QAEHC,KAAK,EAAE;MAFJ,CAAP;IAIH,CAND,MAMO;MACHf,aAAa,GAAG,KAAhB;IACH;;IACDC,mBAAmB,GAAGF,OAAO,CAACc,MAA9B;IAEAhB,GAAG,CAAE,uBAAsBE,OAAO,CAACiB,EAAG,EAAnC,CAAH;IAEA,MAAM;MAAEC,OAAF;MAAWC,QAAX;MAAqBC,UAArB;MAAiCC,KAAjC;MAAwCC;IAAxC,IAAiDtB,OAAO,CAACe,IAA/D;IACA,MAAM;MAAEQ;IAAF,IAAsBF,KAA5B;IAEAvB,GAAG,CAAE,wBAAuBoB,OAAQ,GAAjC,CAAH,CA5BA,CA8BA;;IACAlB,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAZ,CAA6Ba,aAA7B,CAA2ClB,MAA3C,EAAmDN,OAAO,CAACiB,EAA3D,EAA+D;MAC3EH,MAAM,EAAEX,6BAAA,CAAuBsB;IAD4C,CAA/D,CAAhB,CA/BA,CAkCA;;IACA,MAAMpB,WAAW,CAACM,gBAAZ,CAA6Be,WAA7B,CAAyCpB,MAAzC,EAAiD;MACnDqB,UAAU,EAAEzB,mBADuC;MAEnD0B,UAAU,EAAEzB,6BAAA,CAAuBsB;IAFgB,CAAjD,CAAN;IAIAvB,mBAAmB,GAAGF,OAAO,CAACc,MAA9B,CAvCA,CAyCA;;IACA,MAAMe,IAAI,GAAG,MAAM,IAAAC,iBAAA,EAAW;MAC1BjC,OAD0B;MAE1BqB,OAF0B;MAG1Ba,GAAG,EAAEX,UAHqB;MAI1BG;IAJ0B,CAAX,CAAnB,CA1CA,CAiDA;;IACA,IAAIS,MAAM,GAAG,MAAMnC,OAAO,CAACQ,WAAR,CAAoB4B,UAApB,CAA+Bd,QAA/B,EAAyCG,IAAzC,CAAnB,CAlDA,CAoDA;;IACAU,MAAM,GAAG,MAAMnC,OAAO,CAACQ,WAAR,CAAoB6B,UAApB,CAA+BF,MAAM,CAACf,EAAtC,EAA0C;MACrDkB,OAAO,EAAEN,IAAI,CAACM,OADuC;MAErDC,KAAK,EAAEP,IAAI,CAACO,KAFyC;MAGrDC,IAAI,EAAER,IAAI,CAACQ,IAH0C;MAIrDC,QAAQ,EAAET,IAAI,CAACS;IAJsC,CAA1C,CAAf,CArDA,CA4DA;IAEA;;IACAtC,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAZ,CAA6Ba,aAA7B,CAA2ClB,MAA3C,EAAmDN,OAAO,CAACiB,EAA3D,EAA+D;MAC3EH,MAAM,EAAEX,6BAAA,CAAuBoC,SAD4C;MAE3ExB,IAAI,EAAE;QACFyB,OAAO,EAAE,MADP;QAEFX,IAAI,EAAE;UACFZ,EAAE,EAAEe,MAAM,CAACf,EADT;UAEFmB,KAAK,EAAEJ,MAAM,CAACI,KAFZ;UAGFK,OAAO,EAAET,MAAM,CAACS,OAHd;UAIF3B,MAAM,EAAEkB,MAAM,CAAClB;QAJb;MAFJ;IAFqE,CAA/D,CAAhB,CA/DA,CA2EA;;IACA,MAAMT,WAAW,CAACM,gBAAZ,CAA6Be,WAA7B,CAAyCpB,MAAzC,EAAiD;MACnDqB,UAAU,EAAEzB,mBADuC;MAEnD0B,UAAU,EAAEzB,6BAAA,CAAuBoC;IAFgB,CAAjD,CAAN;IAIArC,mBAAmB,GAAGF,OAAO,CAACc,MAA9B;EACH,CAjFD,CAiFE,OAAO4B,CAAP,EAAU;IACR5C,GAAG,CAAC,kCAAD,EAAqC4C,CAArC,CAAH;;IAEA,IAAI1C,OAAO,IAAIA,OAAO,CAACiB,EAAvB,EAA2B;MACvB;AACZ;AACA;AACA;MACYjB,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAZ,CAA6Ba,aAA7B,CAA2ClB,MAA3C,EAAmDN,OAAO,CAACiB,EAA3D,EAA+D;QAC3EH,MAAM,EAAEX,6BAAA,CAAuBwC,MAD4C;QAE3E3B,KAAK,EAAE;UACH4B,IAAI,EAAEF,CAAC,CAACE,IADL;UAEHJ,OAAO,EAAEE,CAAC,CAACF,OAFR;UAGHK,KAAK,EAAEH,CAAC,CAACG,KAHN;UAIHC,IAAI,EAAE;QAJH;MAFoE,CAA/D,CAAhB,CALuB,CAevB;;MACA,MAAMzC,WAAW,CAACM,gBAAZ,CAA6Be,WAA7B,CAAyCpB,MAAzC,EAAiD;QACnDqB,UAAU,EAAEzB,mBADuC;QAEnD0B,UAAU,EAAEzB,6BAAA,CAAuBwC;MAFgB,CAAjD,CAAN;MAIAzC,mBAAmB,GAAGF,OAAO,CAACc,MAA9B;IACH;;IAED,OAAO;MACHC,IAAI,EAAE,IADH;MAEHC,KAAK,EAAE;QACHwB,OAAO,EAAEE,CAAC,CAACF;MADR;IAFJ,CAAP;EAMH,CAjHD,SAiHU;IACN;IACA,IAAIvC,aAAJ,EAAmB;MACfH,GAAG,CAAE,gCAA+BQ,MAAO,EAAxC,CAAH;MAEA,MAAMD,WAAW,CAACM,gBAAZ,CAA6BoC,UAA7B,CAAwCzC,MAAxC,EAAgD;QAClDQ,MAAM,EAAEX,6BAAA,CAAuBoC,SADmB;QAElDxB,IAAI,EAAE;UACFyB,OAAO,EAAG;QADR;MAF4C,CAAhD,CAAN;IAMH,CATD,MASO;MACH1C,GAAG,CAAE,8BAA6BS,YAAY,GAAG,CAAE,GAAhD,CAAH,CADG,CAEH;;MACA,MAAM,IAAAyC,2BAAA,EAA6B;QAC/BnD,OAD+B;QAE/B+C,IAAI,EAAEjD,aAAa,CAACsD,QAAd,CAAuBC,OAFE;QAG/BtD,OAAO,EAAE;UACLU,MADK;UAELC,YAAY,EAAEA,YAAY,GAAG,CAFxB;UAGLC,IAHK;UAILC,QAAQ,EAAEZ,OAAO,CAACsD,QAAR,CAAiBC,WAAjB;QAJL,CAHsB;QAS/BC,WAAW,EAAE;MATkB,CAA7B,CAAN;IAWH;EACJ;;EACD,OAAO;IACHtC,IAAI,EAAE,EADH;IAEHC,KAAK,EAAE;EAFJ,CAAP;AAIH,CAjKM"}
1
+ {"version":3,"names":["pagesHandler","configuration","payload","context","log","console","subTask","noPendingTask","prevStatusOfSubTask","ImportExportTaskStatus","PENDING","pageBuilder","taskId","subTaskIndex","type","identity","mockSecurity","importExportTask","getSubTask","zeroPad","status","data","error","id","pageKey","category","zipFileKey","input","fileUploadsData","updateSubTask","PROCESSING","updateStats","prevStatus","nextStatus","page","importPage","key","pbPage","createPage","updatePage","content","title","path","settings","COMPLETED","message","version","e","FAILED","name","stack","code","updateTask","invokeHandlerClient","handlers","process","security","getIdentity","description"],"sources":["pagesHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { importPage } from \"~/import/utils\";\nimport { invokeHandlerClient } from \"~/client\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { zeroPad } from \"@webiny/utils\";\nimport { Configuration, Payload, Response } from \"~/import/process\";\n\nexport const pagesHandler = async (\n configuration: Configuration,\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n let subTask;\n let noPendingTask = true;\n let prevStatusOfSubTask = ImportExportTaskStatus.PENDING;\n\n log(\"RUNNING Import Page Queue Process\");\n const { pageBuilder } = context;\n const { taskId, subTaskIndex, type, identity } = payload;\n // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks\n // and this Lambda is invoked internally, without credentials.\n mockSecurity(identity, context);\n\n try {\n /*\n * Note: We're not going to DB for getting next sub-task to process,\n * because the data might be out of sync due to GSI eventual consistency.\n */\n\n subTask = await pageBuilder.importExportTask.getSubTask(taskId, zeroPad(subTaskIndex, 5));\n\n /**\n * Base condition!!\n * Bail out early, if task not found or task's status is not \"pending\".\n */\n if (!subTask || subTask.status !== ImportExportTaskStatus.PENDING) {\n noPendingTask = true;\n return {\n data: \"\",\n error: null\n };\n } else {\n noPendingTask = false;\n }\n prevStatusOfSubTask = subTask.status;\n\n log(`Fetched sub task => ${subTask.id}`);\n\n const { pageKey, category, zipFileKey, input } = subTask.data;\n const { fileUploadsData } = input;\n\n log(`Processing page key \"${pageKey}\"`);\n\n // Mark task status as PROCESSING\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.PROCESSING\n });\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.PROCESSING\n });\n prevStatusOfSubTask = subTask.status;\n\n // Real job\n const page = await importPage({\n context,\n pageKey,\n key: zipFileKey,\n fileUploadsData\n });\n\n // Create a page\n let pbPage = await context.pageBuilder.createPage(category);\n\n // Update page with data\n pbPage = await context.pageBuilder.updatePage(pbPage.id, {\n content: page.content,\n title: page.title,\n path: page.path,\n settings: page.settings\n });\n\n // TODO: Publish page\n\n // Update task record in DB\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: \"Done\",\n page: {\n id: pbPage.id,\n title: pbPage.title,\n version: pbPage.version,\n status: pbPage.status\n }\n }\n });\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.COMPLETED\n });\n prevStatusOfSubTask = subTask.status;\n } catch (e) {\n log(\"[IMPORT_PAGES_PROCESS] Error => \", e);\n\n if (subTask && subTask.id) {\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n stack: e.stack,\n code: \"IMPORT_FAILED\"\n }\n });\n\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.FAILED\n });\n prevStatusOfSubTask = subTask.status;\n }\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n } finally {\n // Base condition!\n if (noPendingTask) {\n log(`No pending sub-task for task ${taskId}`);\n\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish importing pages.`\n }\n });\n } else {\n log(`Invoking PROCESS for task \"${subTaskIndex + 1}\"`);\n // We want to continue with Self invocation no matter if current page error out.\n await invokeHandlerClient<Payload>({\n context,\n name: configuration.handlers.process,\n payload: {\n taskId,\n subTaskIndex: subTaskIndex + 1,\n type,\n identity: context.security.getIdentity()\n },\n description: \"Import pages - process - subtask\"\n });\n }\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;;AAAA;;AACA;;AACA;;AACA;;AACA;;AAGO,MAAMA,YAAY,GAAG,OACxBC,aADwB,EAExBC,OAFwB,EAGxBC,OAHwB,KAIJ;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAApB;EACA,IAAIE,OAAJ;EACA,IAAIC,aAAa,GAAG,IAApB;EACA,IAAIC,mBAAmB,GAAGC,6BAAA,CAAuBC,OAAjD;EAEAN,GAAG,CAAC,mCAAD,CAAH;EACA,MAAM;IAAEO;EAAF,IAAkBR,OAAxB;EACA,MAAM;IAAES,MAAF;IAAUC,YAAV;IAAwBC,IAAxB;IAA8BC;EAA9B,IAA2Cb,OAAjD,CARoB,CASpB;EACA;;EACA,IAAAc,0BAAA,EAAaD,QAAb,EAAuBZ,OAAvB;;EAEA,IAAI;IACA;AACR;AACA;AACA;IAEQG,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAZ,CAA6BC,UAA7B,CAAwCN,MAAxC,EAAgD,IAAAO,eAAA,EAAQN,YAAR,EAAsB,CAAtB,CAAhD,CAAhB;IAEA;AACR;AACA;AACA;;IACQ,IAAI,CAACP,OAAD,IAAYA,OAAO,CAACc,MAAR,KAAmBX,6BAAA,CAAuBC,OAA1D,EAAmE;MAC/DH,aAAa,GAAG,IAAhB;MACA,OAAO;QACHc,IAAI,EAAE,EADH;QAEHC,KAAK,EAAE;MAFJ,CAAP;IAIH,CAND,MAMO;MACHf,aAAa,GAAG,KAAhB;IACH;;IACDC,mBAAmB,GAAGF,OAAO,CAACc,MAA9B;IAEAhB,GAAG,CAAE,uBAAsBE,OAAO,CAACiB,EAAG,EAAnC,CAAH;IAEA,MAAM;MAAEC,OAAF;MAAWC,QAAX;MAAqBC,UAArB;MAAiCC;IAAjC,IAA2CrB,OAAO,CAACe,IAAzD;IACA,MAAM;MAAEO;IAAF,IAAsBD,KAA5B;IAEAvB,GAAG,CAAE,wBAAuBoB,OAAQ,GAAjC,CAAH,CA5BA,CA8BA;;IACAlB,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAZ,CAA6BY,aAA7B,CAA2CjB,MAA3C,EAAmDN,OAAO,CAACiB,EAA3D,EAA+D;MAC3EH,MAAM,EAAEX,6BAAA,CAAuBqB;IAD4C,CAA/D,CAAhB,CA/BA,CAkCA;;IACA,MAAMnB,WAAW,CAACM,gBAAZ,CAA6Bc,WAA7B,CAAyCnB,MAAzC,EAAiD;MACnDoB,UAAU,EAAExB,mBADuC;MAEnDyB,UAAU,EAAExB,6BAAA,CAAuBqB;IAFgB,CAAjD,CAAN;IAIAtB,mBAAmB,GAAGF,OAAO,CAACc,MAA9B,CAvCA,CAyCA;;IACA,MAAMc,IAAI,GAAG,MAAM,IAAAC,iBAAA,EAAW;MAC1BhC,OAD0B;MAE1BqB,OAF0B;MAG1BY,GAAG,EAAEV,UAHqB;MAI1BE;IAJ0B,CAAX,CAAnB,CA1CA,CAiDA;;IACA,IAAIS,MAAM,GAAG,MAAMlC,OAAO,CAACQ,WAAR,CAAoB2B,UAApB,CAA+Bb,QAA/B,CAAnB,CAlDA,CAoDA;;IACAY,MAAM,GAAG,MAAMlC,OAAO,CAACQ,WAAR,CAAoB4B,UAApB,CAA+BF,MAAM,CAACd,EAAtC,EAA0C;MACrDiB,OAAO,EAAEN,IAAI,CAACM,OADuC;MAErDC,KAAK,EAAEP,IAAI,CAACO,KAFyC;MAGrDC,IAAI,EAAER,IAAI,CAACQ,IAH0C;MAIrDC,QAAQ,EAAET,IAAI,CAACS;IAJsC,CAA1C,CAAf,CArDA,CA4DA;IAEA;;IACArC,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAZ,CAA6BY,aAA7B,CAA2CjB,MAA3C,EAAmDN,OAAO,CAACiB,EAA3D,EAA+D;MAC3EH,MAAM,EAAEX,6BAAA,CAAuBmC,SAD4C;MAE3EvB,IAAI,EAAE;QACFwB,OAAO,EAAE,MADP;QAEFX,IAAI,EAAE;UACFX,EAAE,EAAEc,MAAM,CAACd,EADT;UAEFkB,KAAK,EAAEJ,MAAM,CAACI,KAFZ;UAGFK,OAAO,EAAET,MAAM,CAACS,OAHd;UAIF1B,MAAM,EAAEiB,MAAM,CAACjB;QAJb;MAFJ;IAFqE,CAA/D,CAAhB,CA/DA,CA2EA;;IACA,MAAMT,WAAW,CAACM,gBAAZ,CAA6Bc,WAA7B,CAAyCnB,MAAzC,EAAiD;MACnDoB,UAAU,EAAExB,mBADuC;MAEnDyB,UAAU,EAAExB,6BAAA,CAAuBmC;IAFgB,CAAjD,CAAN;IAIApC,mBAAmB,GAAGF,OAAO,CAACc,MAA9B;EACH,CAjFD,CAiFE,OAAO2B,CAAP,EAAU;IACR3C,GAAG,CAAC,kCAAD,EAAqC2C,CAArC,CAAH;;IAEA,IAAIzC,OAAO,IAAIA,OAAO,CAACiB,EAAvB,EAA2B;MACvB;AACZ;AACA;AACA;MACYjB,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAZ,CAA6BY,aAA7B,CAA2CjB,MAA3C,EAAmDN,OAAO,CAACiB,EAA3D,EAA+D;QAC3EH,MAAM,EAAEX,6BAAA,CAAuBuC,MAD4C;QAE3E1B,KAAK,EAAE;UACH2B,IAAI,EAAEF,CAAC,CAACE,IADL;UAEHJ,OAAO,EAAEE,CAAC,CAACF,OAFR;UAGHK,KAAK,EAAEH,CAAC,CAACG,KAHN;UAIHC,IAAI,EAAE;QAJH;MAFoE,CAA/D,CAAhB,CALuB,CAevB;;MACA,MAAMxC,WAAW,CAACM,gBAAZ,CAA6Bc,WAA7B,CAAyCnB,MAAzC,EAAiD;QACnDoB,UAAU,EAAExB,mBADuC;QAEnDyB,UAAU,EAAExB,6BAAA,CAAuBuC;MAFgB,CAAjD,CAAN;MAIAxC,mBAAmB,GAAGF,OAAO,CAACc,MAA9B;IACH;;IAED,OAAO;MACHC,IAAI,EAAE,IADH;MAEHC,KAAK,EAAE;QACHuB,OAAO,EAAEE,CAAC,CAACF;MADR;IAFJ,CAAP;EAMH,CAjHD,SAiHU;IACN;IACA,IAAItC,aAAJ,EAAmB;MACfH,GAAG,CAAE,gCAA+BQ,MAAO,EAAxC,CAAH;MAEA,MAAMD,WAAW,CAACM,gBAAZ,CAA6BmC,UAA7B,CAAwCxC,MAAxC,EAAgD;QAClDQ,MAAM,EAAEX,6BAAA,CAAuBmC,SADmB;QAElDvB,IAAI,EAAE;UACFwB,OAAO,EAAG;QADR;MAF4C,CAAhD,CAAN;IAMH,CATD,MASO;MACHzC,GAAG,CAAE,8BAA6BS,YAAY,GAAG,CAAE,GAAhD,CAAH,CADG,CAEH;;MACA,MAAM,IAAAwC,2BAAA,EAA6B;QAC/BlD,OAD+B;QAE/B8C,IAAI,EAAEhD,aAAa,CAACqD,QAAd,CAAuBC,OAFE;QAG/BrD,OAAO,EAAE;UACLU,MADK;UAELC,YAAY,EAAEA,YAAY,GAAG,CAFxB;UAGLC,IAHK;UAILC,QAAQ,EAAEZ,OAAO,CAACqD,QAAR,CAAiBC,WAAjB;QAJL,CAHsB;QAS/BC,WAAW,EAAE;MATkB,CAA7B,CAAN;IAWH;EACJ;;EACD,OAAO;IACHrC,IAAI,EAAE,EADH;IAEHC,KAAK,EAAE;EAFJ,CAAP;AAIH,CAjKM"}
@@ -0,0 +1,11 @@
1
+ import { PbImportExportContext } from "../../../graphql/types";
2
+ import { FileUploadsData } from "../../../types";
3
+ import { ExportedTemplateData } from "../../../export/utils";
4
+ interface ImportTemplateParams {
5
+ key: string;
6
+ templateKey: string;
7
+ context: PbImportExportContext;
8
+ fileUploadsData: FileUploadsData;
9
+ }
10
+ export declare function importTemplate({ templateKey, context, fileUploadsData }: ImportTemplateParams): Promise<ExportedTemplateData["template"]>;
11
+ export {};
@@ -0,0 +1,66 @@
1
+ "use strict";
2
+
3
+ var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
4
+ Object.defineProperty(exports, "__esModule", {
5
+ value: true
6
+ });
7
+ exports.importTemplate = importTemplate;
8
+ var _path = _interopRequireDefault(require("path"));
9
+ var _dotPropImmutable = _interopRequireDefault(require("dot-prop-immutable"));
10
+ var _loadJsonFile = _interopRequireDefault(require("load-json-file"));
11
+ var _fsExtra = require("fs-extra");
12
+ var _constants = require("../../constants");
13
+ var _s3Stream = require("../../../export/s3Stream");
14
+ var _uploadAssets = require("../../utils/uploadAssets");
15
+ var _updateFilesInData = require("../../utils/updateFilesInData");
16
+ var _downloadInstallFiles = require("@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles");
17
+ var _deleteS3Folder = require("../../utils/deleteS3Folder");
18
+ async function importTemplate({
19
+ templateKey,
20
+ context,
21
+ fileUploadsData
22
+ }) {
23
+ const log = console.log;
24
+
25
+ // Making Directory for template in which we're going to extract the template data file.
26
+ const TEMPLATE_EXTRACT_DIR = _path.default.join(_constants.INSTALL_EXTRACT_DIR, templateKey);
27
+ (0, _fsExtra.ensureDirSync)(TEMPLATE_EXTRACT_DIR);
28
+ const templateDataFileKey = _dotPropImmutable.default.get(fileUploadsData, `data`);
29
+ const TEMPLATE_DATA_FILE_PATH = _path.default.join(TEMPLATE_EXTRACT_DIR, _path.default.basename(templateDataFileKey));
30
+ log(`Downloading Template data file: ${templateDataFileKey} at "${TEMPLATE_DATA_FILE_PATH}"`);
31
+ // Download and save template data file in disk.
32
+ await new Promise((resolve, reject) => {
33
+ _s3Stream.s3Stream.readStream(templateDataFileKey).on("error", reject).pipe((0, _fsExtra.createWriteStream)(TEMPLATE_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
34
+ });
35
+
36
+ // Load the template data file from disk.
37
+ log(`Load file ${templateDataFileKey}`);
38
+ const {
39
+ template,
40
+ files
41
+ } = await (0, _loadJsonFile.default)(TEMPLATE_DATA_FILE_PATH);
42
+
43
+ // Only update template data if there are files.
44
+ if (files && Array.isArray(files) && files.length > 0) {
45
+ // Upload template assets.
46
+ const fileIdToNewFileMap = await (0, _uploadAssets.uploadAssets)({
47
+ context,
48
+ files,
49
+ fileUploadsData
50
+ });
51
+ const settings = await context.fileManager.getSettings();
52
+ const {
53
+ srcPrefix = ""
54
+ } = settings || {};
55
+ (0, _updateFilesInData.updateFilesInData)({
56
+ data: template.content || {},
57
+ fileIdToNewFileMap,
58
+ srcPrefix
59
+ });
60
+ }
61
+ log("Removing Directory for template...");
62
+ await (0, _downloadInstallFiles.deleteFile)(templateKey);
63
+ log(`Remove template contents from S3...`);
64
+ await (0, _deleteS3Folder.deleteS3Folder)(_path.default.dirname(fileUploadsData.data));
65
+ return template;
66
+ }
@@ -0,0 +1 @@
1
+ {"version":3,"names":["importTemplate","templateKey","context","fileUploadsData","log","console","TEMPLATE_EXTRACT_DIR","path","join","INSTALL_EXTRACT_DIR","ensureDirSync","templateDataFileKey","dotProp","get","TEMPLATE_DATA_FILE_PATH","basename","Promise","resolve","reject","s3Stream","readStream","on","pipe","createWriteStream","template","files","loadJson","Array","isArray","length","fileIdToNewFileMap","uploadAssets","settings","fileManager","getSettings","srcPrefix","updateFilesInData","data","content","deleteFile","deleteS3Folder","dirname"],"sources":["importTemplate.ts"],"sourcesContent":["import path from \"path\";\nimport dotProp from \"dot-prop-immutable\";\nimport loadJson from \"load-json-file\";\nimport { createWriteStream, ensureDirSync } from \"fs-extra\";\nimport { PbImportExportContext } from \"~/graphql/types\";\nimport { FileUploadsData } from \"~/types\";\nimport { ExportedTemplateData } from \"~/export/utils\";\nimport { INSTALL_EXTRACT_DIR } from \"~/import/constants\";\nimport { s3Stream } from \"~/export/s3Stream\";\nimport { uploadAssets } from \"~/import/utils/uploadAssets\";\nimport { updateFilesInData } from \"~/import/utils/updateFilesInData\";\nimport { deleteFile } from \"@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles\";\nimport { deleteS3Folder } from \"~/import/utils/deleteS3Folder\";\n\ninterface ImportTemplateParams {\n key: string;\n templateKey: string;\n context: PbImportExportContext;\n fileUploadsData: FileUploadsData;\n}\n\nexport async function importTemplate({\n templateKey,\n context,\n fileUploadsData\n}: ImportTemplateParams): Promise<ExportedTemplateData[\"template\"]> {\n const log = console.log;\n\n // Making Directory for template in which we're going to extract the template data file.\n const TEMPLATE_EXTRACT_DIR = path.join(INSTALL_EXTRACT_DIR, templateKey);\n ensureDirSync(TEMPLATE_EXTRACT_DIR);\n\n const templateDataFileKey = dotProp.get(fileUploadsData, `data`);\n const TEMPLATE_DATA_FILE_PATH = path.join(\n TEMPLATE_EXTRACT_DIR,\n path.basename(templateDataFileKey)\n );\n\n log(`Downloading Template data file: ${templateDataFileKey} at \"${TEMPLATE_DATA_FILE_PATH}\"`);\n // Download and save template data file in disk.\n await new Promise((resolve, reject) => {\n s3Stream\n .readStream(templateDataFileKey)\n .on(\"error\", reject)\n .pipe(createWriteStream(TEMPLATE_DATA_FILE_PATH))\n .on(\"error\", reject)\n .on(\"finish\", resolve);\n });\n\n // Load the template data file from disk.\n log(`Load file ${templateDataFileKey}`);\n const { template, files } = await loadJson<ExportedTemplateData>(TEMPLATE_DATA_FILE_PATH);\n\n // Only update template data if there are files.\n if (files && Array.isArray(files) && files.length > 0) {\n // Upload template assets.\n const fileIdToNewFileMap = await uploadAssets({\n context,\n files,\n fileUploadsData\n });\n\n const settings = await context.fileManager.getSettings();\n\n const { srcPrefix = \"\" } = settings || {};\n updateFilesInData({\n data: template.content || {},\n fileIdToNewFileMap,\n srcPrefix\n });\n }\n\n log(\"Removing Directory for template...\");\n await deleteFile(templateKey);\n\n log(`Remove template contents from S3...`);\n await deleteS3Folder(path.dirname(fileUploadsData.data));\n\n return template;\n}\n"],"mappings":";;;;;;;AAAA;AACA;AACA;AACA;AAIA;AACA;AACA;AACA;AACA;AACA;AASO,eAAeA,cAAc,CAAC;EACjCC,WAAW;EACXC,OAAO;EACPC;AACkB,CAAC,EAA6C;EAChE,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;;EAEvB;EACA,MAAME,oBAAoB,GAAGC,aAAI,CAACC,IAAI,CAACC,8BAAmB,EAAER,WAAW,CAAC;EACxE,IAAAS,sBAAa,EAACJ,oBAAoB,CAAC;EAEnC,MAAMK,mBAAmB,GAAGC,yBAAO,CAACC,GAAG,CAACV,eAAe,EAAG,MAAK,CAAC;EAChE,MAAMW,uBAAuB,GAAGP,aAAI,CAACC,IAAI,CACrCF,oBAAoB,EACpBC,aAAI,CAACQ,QAAQ,CAACJ,mBAAmB,CAAC,CACrC;EAEDP,GAAG,CAAE,mCAAkCO,mBAAoB,QAAOG,uBAAwB,GAAE,CAAC;EAC7F;EACA,MAAM,IAAIE,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACnCC,kBAAQ,CACHC,UAAU,CAACT,mBAAmB,CAAC,CAC/BU,EAAE,CAAC,OAAO,EAAEH,MAAM,CAAC,CACnBI,IAAI,CAAC,IAAAC,0BAAiB,EAACT,uBAAuB,CAAC,CAAC,CAChDO,EAAE,CAAC,OAAO,EAAEH,MAAM,CAAC,CACnBG,EAAE,CAAC,QAAQ,EAAEJ,OAAO,CAAC;EAC9B,CAAC,CAAC;;EAEF;EACAb,GAAG,CAAE,aAAYO,mBAAoB,EAAC,CAAC;EACvC,MAAM;IAAEa,QAAQ;IAAEC;EAAM,CAAC,GAAG,MAAM,IAAAC,qBAAQ,EAAuBZ,uBAAuB,CAAC;;EAEzF;EACA,IAAIW,KAAK,IAAIE,KAAK,CAACC,OAAO,CAACH,KAAK,CAAC,IAAIA,KAAK,CAACI,MAAM,GAAG,CAAC,EAAE;IACnD;IACA,MAAMC,kBAAkB,GAAG,MAAM,IAAAC,0BAAY,EAAC;MAC1C7B,OAAO;MACPuB,KAAK;MACLtB;IACJ,CAAC,CAAC;IAEF,MAAM6B,QAAQ,GAAG,MAAM9B,OAAO,CAAC+B,WAAW,CAACC,WAAW,EAAE;IAExD,MAAM;MAAEC,SAAS,GAAG;IAAG,CAAC,GAAGH,QAAQ,IAAI,CAAC,CAAC;IACzC,IAAAI,oCAAiB,EAAC;MACdC,IAAI,EAAEb,QAAQ,CAACc,OAAO,IAAI,CAAC,CAAC;MAC5BR,kBAAkB;MAClBK;IACJ,CAAC,CAAC;EACN;EAEA/B,GAAG,CAAC,oCAAoC,CAAC;EACzC,MAAM,IAAAmC,gCAAU,EAACtC,WAAW,CAAC;EAE7BG,GAAG,CAAE,qCAAoC,CAAC;EAC1C,MAAM,IAAAoC,8BAAc,EAACjC,aAAI,CAACkC,OAAO,CAACtC,eAAe,CAACkC,IAAI,CAAC,CAAC;EAExD,OAAOb,QAAQ;AACnB"}
@@ -1,3 +1,3 @@
1
- import { PbImportExportContext } from "../../types";
2
- import { Configuration, Payload, Response } from ".";
1
+ import { PbImportExportContext } from "../../../types";
2
+ import { Configuration, Payload, Response } from "..";
3
3
  export declare const templatesHandler: (configuration: Configuration, payload: Payload, context: PbImportExportContext) => Promise<Response>;
@@ -4,17 +4,11 @@ Object.defineProperty(exports, "__esModule", {
4
4
  value: true
5
5
  });
6
6
  exports.templatesHandler = void 0;
7
-
8
- var _types = require("../../types");
9
-
10
- var _utils = require("../utils");
11
-
12
- var _client = require("../../client");
13
-
14
- var _mockSecurity = require("../../mockSecurity");
15
-
16
- var _utils2 = require("@webiny/utils");
17
-
7
+ var _types = require("../../../types");
8
+ var _importTemplate = require("./importTemplate");
9
+ var _client = require("../../../client");
10
+ var _mockSecurity = require("../../../mockSecurity");
11
+ var _utils = require("@webiny/utils");
18
12
  const templatesHandler = async (configuration, payload, context) => {
19
13
  const log = console.log;
20
14
  let subTask;
@@ -29,22 +23,22 @@ const templatesHandler = async (configuration, payload, context) => {
29
23
  subTaskIndex,
30
24
  type,
31
25
  identity
32
- } = payload; // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks
26
+ } = payload;
27
+ // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks
33
28
  // and this Lambda is invoked internally, without credentials.
34
-
35
29
  (0, _mockSecurity.mockSecurity)(identity, context);
36
-
37
30
  try {
38
31
  /*
39
32
  * Note: We're not going to DB for getting next sub-task to process,
40
33
  * because the data might be out of sync due to GSI eventual consistency.
41
34
  */
42
- subTask = await pageBuilder.importExportTask.getSubTask(taskId, (0, _utils2.zeroPad)(subTaskIndex, 5));
35
+
36
+ subTask = await pageBuilder.importExportTask.getSubTask(taskId, (0, _utils.zeroPad)(subTaskIndex, 5));
37
+
43
38
  /**
44
39
  * Base condition!!
45
40
  * Bail out early, if task not found or task's status is not "pending".
46
41
  */
47
-
48
42
  if (!subTask || subTask.status !== _types.ImportExportTaskStatus.PENDING) {
49
43
  noPendingTask = true;
50
44
  return {
@@ -54,7 +48,6 @@ const templatesHandler = async (configuration, payload, context) => {
54
48
  } else {
55
49
  noPendingTask = false;
56
50
  }
57
-
58
51
  prevStatusOfSubTask = subTask.status;
59
52
  log(`Fetched sub task => ${subTask.id}`);
60
53
  const {
@@ -65,31 +58,39 @@ const templatesHandler = async (configuration, payload, context) => {
65
58
  const {
66
59
  fileUploadsData
67
60
  } = input;
68
- log(`Processing template key "${templateKey}"`); // Mark task status as PROCESSING
61
+ log(`Processing template key "${templateKey}"`);
69
62
 
63
+ // Mark task status as PROCESSING
70
64
  subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {
71
65
  status: _types.ImportExportTaskStatus.PROCESSING
72
- }); // Update stats in main task
73
-
66
+ });
67
+ // Update stats in main task
74
68
  await pageBuilder.importExportTask.updateStats(taskId, {
75
69
  prevStatus: prevStatusOfSubTask,
76
70
  nextStatus: _types.ImportExportTaskStatus.PROCESSING
77
71
  });
78
- prevStatusOfSubTask = subTask.status; // Real job
72
+ prevStatusOfSubTask = subTask.status;
79
73
 
80
- const template = await (0, _utils.importTemplate)({
74
+ // Real job
75
+ const template = await (0, _importTemplate.importTemplate)({
81
76
  context,
82
77
  templateKey,
83
78
  key: zipFileKey,
84
79
  fileUploadsData
85
- }); // Create a template
80
+ });
86
81
 
82
+ // Create a template
87
83
  const pbTemplate = await context.pageBuilder.createPageTemplate({
88
84
  title: template.title,
85
+ slug: template.slug,
86
+ tags: template.tags,
87
+ layout: template.layout,
88
+ pageCategory: template.pageCategory,
89
89
  description: template.description,
90
90
  content: template.content
91
- }); // Update task record in DB
91
+ });
92
92
 
93
+ // Update task record in DB
93
94
  subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {
94
95
  status: _types.ImportExportTaskStatus.COMPLETED,
95
96
  data: {
@@ -99,8 +100,8 @@ const templatesHandler = async (configuration, payload, context) => {
99
100
  title: pbTemplate.title
100
101
  }
101
102
  }
102
- }); // Update stats in main task
103
-
103
+ });
104
+ // Update stats in main task
104
105
  await pageBuilder.importExportTask.updateStats(taskId, {
105
106
  prevStatus: prevStatusOfSubTask,
106
107
  nextStatus: _types.ImportExportTaskStatus.COMPLETED
@@ -108,7 +109,6 @@ const templatesHandler = async (configuration, payload, context) => {
108
109
  prevStatusOfSubTask = subTask.status;
109
110
  } catch (e) {
110
111
  log("[IMPORT_TEMPLATES_PROCESS] Error => ", e.message);
111
-
112
112
  if (subTask && subTask.id) {
113
113
  /**
114
114
  * In case of error, we'll update the task status to "failed",
@@ -121,15 +121,15 @@ const templatesHandler = async (configuration, payload, context) => {
121
121
  message: e.message,
122
122
  code: "IMPORT_FAILED"
123
123
  }
124
- }); // Update stats in main task
124
+ });
125
125
 
126
+ // Update stats in main task
126
127
  await pageBuilder.importExportTask.updateStats(taskId, {
127
128
  prevStatus: prevStatusOfSubTask,
128
129
  nextStatus: _types.ImportExportTaskStatus.FAILED
129
130
  });
130
131
  prevStatusOfSubTask = subTask.status;
131
132
  }
132
-
133
133
  return {
134
134
  data: null,
135
135
  error: {
@@ -147,8 +147,8 @@ const templatesHandler = async (configuration, payload, context) => {
147
147
  }
148
148
  });
149
149
  } else {
150
- log(`Invoking PROCESS for task "${subTaskIndex + 1}"`); // We want to continue with Self invocation no matter if current template error out.
151
-
150
+ log(`Invoking PROCESS for task "${subTaskIndex + 1}"`);
151
+ // We want to continue with Self invocation no matter if current template error out.
152
152
  await (0, _client.invokeHandlerClient)({
153
153
  context,
154
154
  name: configuration.handlers.process,
@@ -162,11 +162,9 @@ const templatesHandler = async (configuration, payload, context) => {
162
162
  });
163
163
  }
164
164
  }
165
-
166
165
  return {
167
166
  data: "",
168
167
  error: null
169
168
  };
170
169
  };
171
-
172
170
  exports.templatesHandler = templatesHandler;
@@ -0,0 +1 @@
1
+ {"version":3,"names":["templatesHandler","configuration","payload","context","log","console","subTask","noPendingTask","prevStatusOfSubTask","ImportExportTaskStatus","PENDING","pageBuilder","taskId","subTaskIndex","type","identity","mockSecurity","importExportTask","getSubTask","zeroPad","status","data","error","id","templateKey","zipFileKey","input","fileUploadsData","updateSubTask","PROCESSING","updateStats","prevStatus","nextStatus","template","importTemplate","key","pbTemplate","createPageTemplate","title","slug","tags","layout","pageCategory","description","content","COMPLETED","message","e","FAILED","name","code","updateTask","invokeHandlerClient","handlers","process","security","getIdentity"],"sources":["templatesHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { importTemplate } from \"./importTemplate\";\nimport { invokeHandlerClient } from \"~/client\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { zeroPad } from \"@webiny/utils\";\nimport { Configuration, Payload, Response } from \"~/import/process\";\n\nexport const templatesHandler = async (\n configuration: Configuration,\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n let subTask;\n let noPendingTask = true;\n let prevStatusOfSubTask = ImportExportTaskStatus.PENDING;\n\n log(\"RUNNING Import Template Queue Process\");\n const { pageBuilder } = context;\n const { taskId, subTaskIndex, type, identity } = payload;\n // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks\n // and this Lambda is invoked internally, without credentials.\n mockSecurity(identity, context);\n\n try {\n /*\n * Note: We're not going to DB for getting next sub-task to process,\n * because the data might be out of sync due to GSI eventual consistency.\n */\n\n subTask = await pageBuilder.importExportTask.getSubTask(taskId, zeroPad(subTaskIndex, 5));\n\n /**\n * Base condition!!\n * Bail out early, if task not found or task's status is not \"pending\".\n */\n if (!subTask || subTask.status !== ImportExportTaskStatus.PENDING) {\n noPendingTask = true;\n return {\n data: \"\",\n error: null\n };\n } else {\n noPendingTask = false;\n }\n prevStatusOfSubTask = subTask.status;\n\n log(`Fetched sub task => ${subTask.id}`);\n\n const { templateKey, zipFileKey, input } = subTask.data;\n const { fileUploadsData } = input;\n\n log(`Processing template key \"${templateKey}\"`);\n\n // Mark task status as PROCESSING\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.PROCESSING\n });\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.PROCESSING\n });\n prevStatusOfSubTask = subTask.status;\n\n // Real job\n const template = await importTemplate({\n context,\n templateKey,\n key: zipFileKey,\n fileUploadsData\n });\n\n // Create a template\n const pbTemplate = await context.pageBuilder.createPageTemplate({\n title: template.title,\n slug: template.slug,\n tags: template.tags,\n layout: template.layout,\n pageCategory: template.pageCategory,\n description: template.description,\n content: template.content\n });\n\n // Update task record in DB\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: \"Done\",\n template: {\n id: pbTemplate.id,\n title: pbTemplate.title\n }\n }\n });\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.COMPLETED\n });\n prevStatusOfSubTask = subTask.status;\n } catch (e) {\n log(\"[IMPORT_TEMPLATES_PROCESS] Error => \", e.message);\n\n if (subTask && subTask.id) {\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"IMPORT_FAILED\"\n }\n });\n\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.FAILED\n });\n prevStatusOfSubTask = subTask.status;\n }\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n } finally {\n // Base condition!\n if (noPendingTask) {\n log(`No pending sub-task for task ${taskId}`);\n\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish importing templates.`\n }\n });\n } else {\n log(`Invoking PROCESS for task \"${subTaskIndex + 1}\"`);\n // We want to continue with Self invocation no matter if current template error out.\n await invokeHandlerClient<Payload>({\n context,\n name: configuration.handlers.process,\n payload: {\n taskId,\n subTaskIndex: subTaskIndex + 1,\n type,\n identity: context.security.getIdentity()\n },\n description: \"Import templates - process - subtask\"\n });\n }\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;AAAA;AACA;AACA;AACA;AACA;AAGO,MAAMA,gBAAgB,GAAG,OAC5BC,aAA4B,EAC5BC,OAAgB,EAChBC,OAA8B,KACV;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EACvB,IAAIE,OAAO;EACX,IAAIC,aAAa,GAAG,IAAI;EACxB,IAAIC,mBAAmB,GAAGC,6BAAsB,CAACC,OAAO;EAExDN,GAAG,CAAC,uCAAuC,CAAC;EAC5C,MAAM;IAAEO;EAAY,CAAC,GAAGR,OAAO;EAC/B,MAAM;IAAES,MAAM;IAAEC,YAAY;IAAEC,IAAI;IAAEC;EAAS,CAAC,GAAGb,OAAO;EACxD;EACA;EACA,IAAAc,0BAAY,EAACD,QAAQ,EAAEZ,OAAO,CAAC;EAE/B,IAAI;IACA;AACR;AACA;AACA;;IAEQG,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAgB,CAACC,UAAU,CAACN,MAAM,EAAE,IAAAO,cAAO,EAACN,YAAY,EAAE,CAAC,CAAC,CAAC;;IAEzF;AACR;AACA;AACA;IACQ,IAAI,CAACP,OAAO,IAAIA,OAAO,CAACc,MAAM,KAAKX,6BAAsB,CAACC,OAAO,EAAE;MAC/DH,aAAa,GAAG,IAAI;MACpB,OAAO;QACHc,IAAI,EAAE,EAAE;QACRC,KAAK,EAAE;MACX,CAAC;IACL,CAAC,MAAM;MACHf,aAAa,GAAG,KAAK;IACzB;IACAC,mBAAmB,GAAGF,OAAO,CAACc,MAAM;IAEpChB,GAAG,CAAE,uBAAsBE,OAAO,CAACiB,EAAG,EAAC,CAAC;IAExC,MAAM;MAAEC,WAAW;MAAEC,UAAU;MAAEC;IAAM,CAAC,GAAGpB,OAAO,CAACe,IAAI;IACvD,MAAM;MAAEM;IAAgB,CAAC,GAAGD,KAAK;IAEjCtB,GAAG,CAAE,4BAA2BoB,WAAY,GAAE,CAAC;;IAE/C;IACAlB,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAgB,CAACW,aAAa,CAAChB,MAAM,EAAEN,OAAO,CAACiB,EAAE,EAAE;MAC3EH,MAAM,EAAEX,6BAAsB,CAACoB;IACnC,CAAC,CAAC;IACF;IACA,MAAMlB,WAAW,CAACM,gBAAgB,CAACa,WAAW,CAAClB,MAAM,EAAE;MACnDmB,UAAU,EAAEvB,mBAAmB;MAC/BwB,UAAU,EAAEvB,6BAAsB,CAACoB;IACvC,CAAC,CAAC;IACFrB,mBAAmB,GAAGF,OAAO,CAACc,MAAM;;IAEpC;IACA,MAAMa,QAAQ,GAAG,MAAM,IAAAC,8BAAc,EAAC;MAClC/B,OAAO;MACPqB,WAAW;MACXW,GAAG,EAAEV,UAAU;MACfE;IACJ,CAAC,CAAC;;IAEF;IACA,MAAMS,UAAU,GAAG,MAAMjC,OAAO,CAACQ,WAAW,CAAC0B,kBAAkB,CAAC;MAC5DC,KAAK,EAAEL,QAAQ,CAACK,KAAK;MACrBC,IAAI,EAAEN,QAAQ,CAACM,IAAI;MACnBC,IAAI,EAAEP,QAAQ,CAACO,IAAI;MACnBC,MAAM,EAAER,QAAQ,CAACQ,MAAM;MACvBC,YAAY,EAAET,QAAQ,CAACS,YAAY;MACnCC,WAAW,EAAEV,QAAQ,CAACU,WAAW;MACjCC,OAAO,EAAEX,QAAQ,CAACW;IACtB,CAAC,CAAC;;IAEF;IACAtC,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAgB,CAACW,aAAa,CAAChB,MAAM,EAAEN,OAAO,CAACiB,EAAE,EAAE;MAC3EH,MAAM,EAAEX,6BAAsB,CAACoC,SAAS;MACxCxB,IAAI,EAAE;QACFyB,OAAO,EAAE,MAAM;QACfb,QAAQ,EAAE;UACNV,EAAE,EAAEa,UAAU,CAACb,EAAE;UACjBe,KAAK,EAAEF,UAAU,CAACE;QACtB;MACJ;IACJ,CAAC,CAAC;IACF;IACA,MAAM3B,WAAW,CAACM,gBAAgB,CAACa,WAAW,CAAClB,MAAM,EAAE;MACnDmB,UAAU,EAAEvB,mBAAmB;MAC/BwB,UAAU,EAAEvB,6BAAsB,CAACoC;IACvC,CAAC,CAAC;IACFrC,mBAAmB,GAAGF,OAAO,CAACc,MAAM;EACxC,CAAC,CAAC,OAAO2B,CAAC,EAAE;IACR3C,GAAG,CAAC,sCAAsC,EAAE2C,CAAC,CAACD,OAAO,CAAC;IAEtD,IAAIxC,OAAO,IAAIA,OAAO,CAACiB,EAAE,EAAE;MACvB;AACZ;AACA;AACA;MACYjB,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAgB,CAACW,aAAa,CAAChB,MAAM,EAAEN,OAAO,CAACiB,EAAE,EAAE;QAC3EH,MAAM,EAAEX,6BAAsB,CAACuC,MAAM;QACrC1B,KAAK,EAAE;UACH2B,IAAI,EAAEF,CAAC,CAACE,IAAI;UACZH,OAAO,EAAEC,CAAC,CAACD,OAAO;UAClBI,IAAI,EAAE;QACV;MACJ,CAAC,CAAC;;MAEF;MACA,MAAMvC,WAAW,CAACM,gBAAgB,CAACa,WAAW,CAAClB,MAAM,EAAE;QACnDmB,UAAU,EAAEvB,mBAAmB;QAC/BwB,UAAU,EAAEvB,6BAAsB,CAACuC;MACvC,CAAC,CAAC;MACFxC,mBAAmB,GAAGF,OAAO,CAACc,MAAM;IACxC;IAEA,OAAO;MACHC,IAAI,EAAE,IAAI;MACVC,KAAK,EAAE;QACHwB,OAAO,EAAEC,CAAC,CAACD;MACf;IACJ,CAAC;EACL,CAAC,SAAS;IACN;IACA,IAAIvC,aAAa,EAAE;MACfH,GAAG,CAAE,gCAA+BQ,MAAO,EAAC,CAAC;MAE7C,MAAMD,WAAW,CAACM,gBAAgB,CAACkC,UAAU,CAACvC,MAAM,EAAE;QAClDQ,MAAM,EAAEX,6BAAsB,CAACoC,SAAS;QACxCxB,IAAI,EAAE;UACFyB,OAAO,EAAG;QACd;MACJ,CAAC,CAAC;IACN,CAAC,MAAM;MACH1C,GAAG,CAAE,8BAA6BS,YAAY,GAAG,CAAE,GAAE,CAAC;MACtD;MACA,MAAM,IAAAuC,2BAAmB,EAAU;QAC/BjD,OAAO;QACP8C,IAAI,EAAEhD,aAAa,CAACoD,QAAQ,CAACC,OAAO;QACpCpD,OAAO,EAAE;UACLU,MAAM;UACNC,YAAY,EAAEA,YAAY,GAAG,CAAC;UAC9BC,IAAI;UACJC,QAAQ,EAAEZ,OAAO,CAACoD,QAAQ,CAACC,WAAW;QAC1C,CAAC;QACDb,WAAW,EAAE;MACjB,CAAC,CAAC;IACN;EACJ;EACA,OAAO;IACHtB,IAAI,EAAE,EAAE;IACRC,KAAK,EAAE;EACX,CAAC;AACL,CAAC;AAAC"}
@@ -0,0 +1 @@
1
+ export declare function deleteS3Folder(key: string): Promise<void>;
@@ -0,0 +1,19 @@
1
+ "use strict";
2
+
3
+ Object.defineProperty(exports, "__esModule", {
4
+ value: true
5
+ });
6
+ exports.deleteS3Folder = deleteS3Folder;
7
+ var _s3Stream = require("../../export/s3Stream");
8
+ async function deleteS3Folder(key) {
9
+ // Append trailing slash i.e "/" to key to make sure we only delete a specific folder.
10
+ if (!key.endsWith("/")) {
11
+ key = `${key}/`;
12
+ }
13
+ const response = await _s3Stream.s3Stream.listObject(key);
14
+ const keys = (response.Contents || []).map(c => c.Key).filter(Boolean);
15
+ console.log(`Found ${keys.length} files.`);
16
+ const deleteFilePromises = keys.map(key => _s3Stream.s3Stream.deleteObject(key));
17
+ await Promise.all(deleteFilePromises);
18
+ console.log(`Successfully deleted ${deleteFilePromises.length} files.`);
19
+ }
@@ -0,0 +1 @@
1
+ {"version":3,"names":["deleteS3Folder","key","endsWith","response","s3Stream","listObject","keys","Contents","map","c","Key","filter","Boolean","console","log","length","deleteFilePromises","deleteObject","Promise","all"],"sources":["deleteS3Folder.ts"],"sourcesContent":["import { s3Stream } from \"~/export/s3Stream\";\n\nexport async function deleteS3Folder(key: string): Promise<void> {\n // Append trailing slash i.e \"/\" to key to make sure we only delete a specific folder.\n if (!key.endsWith(\"/\")) {\n key = `${key}/`;\n }\n\n const response = await s3Stream.listObject(key);\n const keys = (response.Contents || []).map(c => c.Key).filter(Boolean) as string[];\n console.log(`Found ${keys.length} files.`);\n\n const deleteFilePromises = keys.map(key => s3Stream.deleteObject(key));\n\n await Promise.all(deleteFilePromises);\n console.log(`Successfully deleted ${deleteFilePromises.length} files.`);\n}\n"],"mappings":";;;;;;AAAA;AAEO,eAAeA,cAAc,CAACC,GAAW,EAAiB;EAC7D;EACA,IAAI,CAACA,GAAG,CAACC,QAAQ,CAAC,GAAG,CAAC,EAAE;IACpBD,GAAG,GAAI,GAAEA,GAAI,GAAE;EACnB;EAEA,MAAME,QAAQ,GAAG,MAAMC,kBAAQ,CAACC,UAAU,CAACJ,GAAG,CAAC;EAC/C,MAAMK,IAAI,GAAG,CAACH,QAAQ,CAACI,QAAQ,IAAI,EAAE,EAAEC,GAAG,CAACC,CAAC,IAAIA,CAAC,CAACC,GAAG,CAAC,CAACC,MAAM,CAACC,OAAO,CAAa;EAClFC,OAAO,CAACC,GAAG,CAAE,SAAQR,IAAI,CAACS,MAAO,SAAQ,CAAC;EAE1C,MAAMC,kBAAkB,GAAGV,IAAI,CAACE,GAAG,CAACP,GAAG,IAAIG,kBAAQ,CAACa,YAAY,CAAChB,GAAG,CAAC,CAAC;EAEtE,MAAMiB,OAAO,CAACC,GAAG,CAACH,kBAAkB,CAAC;EACrCH,OAAO,CAACC,GAAG,CAAE,wBAAuBE,kBAAkB,CAACD,MAAO,SAAQ,CAAC;AAC3E"}
@@ -0,0 +1,7 @@
1
+ import { ImportData } from "../../types";
2
+ /**
3
+ * Function will read the given zip file from S3 via stream, extract its content and upload it to S3 bucket.
4
+ * @param zipFileUrl
5
+ * @return ImportData S3 file keys for all uploaded assets group by page/block.
6
+ */
7
+ export declare function extractAndUploadZipFileContents(zipFileUrl: string): Promise<ImportData[]>;
@@ -0,0 +1,122 @@
1
+ "use strict";
2
+
3
+ var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
4
+ Object.defineProperty(exports, "__esModule", {
5
+ value: true
6
+ });
7
+ exports.extractAndUploadZipFileContents = extractAndUploadZipFileContents;
8
+ var _stream = require("stream");
9
+ var _util = require("util");
10
+ var _nodeFetch = _interopRequireDefault(require("node-fetch"));
11
+ var _path = _interopRequireDefault(require("path"));
12
+ var _yauzl = _interopRequireDefault(require("yauzl"));
13
+ var _fs = require("fs");
14
+ var _uniqid = _interopRequireDefault(require("uniqid"));
15
+ var _error = _interopRequireDefault(require("@webiny/error"));
16
+ var _downloadInstallFiles = require("@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles");
17
+ var _extractZipAndUploadToS = require("./extractZipAndUploadToS3");
18
+ var _getFileNameWithoutExt = require("./getFileNameWithoutExt");
19
+ var _constants = require("../constants");
20
+ var _fsExtra = require("fs-extra");
21
+ const streamPipeline = (0, _util.promisify)(_stream.pipeline);
22
+
23
+ /**
24
+ * Function will read the given zip file from S3 via stream, extract its content and upload it to S3 bucket.
25
+ * @param zipFileUrl
26
+ * @return ImportData S3 file keys for all uploaded assets group by page/block.
27
+ */
28
+ async function extractAndUploadZipFileContents(zipFileUrl) {
29
+ const log = console.log;
30
+ const importDataList = [];
31
+ const zipFileName = _path.default.basename(zipFileUrl).split("?")[0];
32
+ const response = await (0, _nodeFetch.default)(zipFileUrl);
33
+ const readStream = response.body;
34
+ if (!response.ok || !readStream) {
35
+ throw new _error.default(`Unable to downloading file: "${zipFileUrl}"`, response.statusText);
36
+ }
37
+ const uniquePath = (0, _uniqid.default)("IMPORTS/");
38
+ // Read export file and download it in the disk
39
+ const ZIP_FILE_PATH = _path.default.join(_constants.INSTALL_DIR, zipFileName);
40
+ const writeStream = (0, _fs.createWriteStream)(ZIP_FILE_PATH);
41
+ // @ts-ignore
42
+ await streamPipeline(readStream, writeStream);
43
+ log(`Downloaded file "${zipFileName}" at ${ZIP_FILE_PATH}`);
44
+
45
+ // Extract the downloaded zip file
46
+ const zipFilePaths = await extractZipToDisk(ZIP_FILE_PATH);
47
+ log(`Removing ZIP file "${zipFileUrl}" from ${ZIP_FILE_PATH}`);
48
+ await (0, _downloadInstallFiles.deleteFile)(ZIP_FILE_PATH);
49
+
50
+ // Extract each page/block zip and upload their content's to S3
51
+ for (let i = 0; i < zipFilePaths.length; i++) {
52
+ const currentPath = zipFilePaths[i];
53
+ const dataMap = await (0, _extractZipAndUploadToS.extractZipAndUploadToS3)(currentPath, uniquePath);
54
+ importDataList.push(dataMap);
55
+ }
56
+ log("Removing all ZIP files located at ", _path.default.dirname(zipFilePaths[0]));
57
+ await (0, _downloadInstallFiles.deleteFile)(_path.default.dirname(zipFilePaths[0]));
58
+ return importDataList;
59
+ }
60
+ function extractZipToDisk(exportFileZipPath) {
61
+ return new Promise((resolve, reject) => {
62
+ const zipFilePaths = [];
63
+ const uniqueFolderNameForExport = (0, _getFileNameWithoutExt.getFileNameWithoutExt)(exportFileZipPath);
64
+ const EXPORT_FILE_EXTRACTION_PATH = _path.default.join(_constants.INSTALL_DIR, uniqueFolderNameForExport);
65
+ // Make sure DIR exists
66
+ (0, _fsExtra.ensureDirSync)(EXPORT_FILE_EXTRACTION_PATH);
67
+ _yauzl.default.open(exportFileZipPath, {
68
+ lazyEntries: true
69
+ }, function (err, zipFile) {
70
+ if (err) {
71
+ console.warn("ERROR: Failed to extract zip: ", exportFileZipPath, err);
72
+ reject(err);
73
+ return;
74
+ }
75
+ if (!zipFile) {
76
+ console.log("ERROR: Missing zip file resource for path: " + exportFileZipPath);
77
+ reject("Missing Zip File Resource.");
78
+ return;
79
+ }
80
+ console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);
81
+ zipFile.on("end", function (err) {
82
+ if (err) {
83
+ console.warn("ERROR: Failed on END event for file: ", exportFileZipPath, err);
84
+ reject(err);
85
+ }
86
+ resolve(zipFilePaths);
87
+ });
88
+ zipFile.readEntry();
89
+ zipFile.on("entry", function (entry) {
90
+ console.info(`Processing entry: "${entry.fileName}"`);
91
+ if (/\/$/.test(entry.fileName)) {
92
+ // Directory file names end with '/'.
93
+ // Note that entries for directories themselves are optional.
94
+ // An entry's fileName implicitly requires its parent directories to exist.
95
+ zipFile.readEntry();
96
+ } else {
97
+ // file entry
98
+ zipFile.openReadStream(entry, function (err, readStream) {
99
+ if (err) {
100
+ console.warn("ERROR: Failed to openReadStream for file: ", entry.fileName, err);
101
+ reject(err);
102
+ return;
103
+ }
104
+ if (!readStream) {
105
+ console.log("ERROR: Missing Read Stream Resource when extracting to disk.");
106
+ reject("Missing Read Stream Resource.");
107
+ return;
108
+ }
109
+ const filePath = _path.default.join(EXPORT_FILE_EXTRACTION_PATH, entry.fileName);
110
+ readStream.on("end", function () {
111
+ zipFilePaths.push(filePath);
112
+ zipFile.readEntry();
113
+ });
114
+ streamPipeline(readStream, (0, _fs.createWriteStream)(filePath)).catch(error => {
115
+ reject(error);
116
+ });
117
+ });
118
+ }
119
+ });
120
+ });
121
+ });
122
+ }
@@ -0,0 +1 @@
1
+ {"version":3,"names":["streamPipeline","promisify","pipeline","extractAndUploadZipFileContents","zipFileUrl","log","console","importDataList","zipFileName","path","basename","split","response","fetch","readStream","body","ok","WebinyError","statusText","uniquePath","uniqueId","ZIP_FILE_PATH","join","INSTALL_DIR","writeStream","createWriteStream","zipFilePaths","extractZipToDisk","deleteFile","i","length","currentPath","dataMap","extractZipAndUploadToS3","push","dirname","exportFileZipPath","Promise","resolve","reject","uniqueFolderNameForExport","getFileNameWithoutExt","EXPORT_FILE_EXTRACTION_PATH","ensureDirSync","yauzl","open","lazyEntries","err","zipFile","warn","info","entryCount","on","readEntry","entry","fileName","test","openReadStream","filePath","catch","error"],"sources":["extractAndUploadZipFileContents.ts"],"sourcesContent":["import { pipeline } from \"stream\";\nimport { promisify } from \"util\";\nimport fetch from \"node-fetch\";\nimport path from \"path\";\nimport yauzl from \"yauzl\";\nimport { createWriteStream } from \"fs\";\nimport uniqueId from \"uniqid\";\nimport WebinyError from \"@webiny/error\";\nimport { deleteFile } from \"@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles\";\nimport { extractZipAndUploadToS3 } from \"~/import/utils/extractZipAndUploadToS3\";\nimport { getFileNameWithoutExt } from \"~/import/utils/getFileNameWithoutExt\";\nimport { ImportData } from \"~/types\";\nimport { INSTALL_DIR } from \"~/import/constants\";\nimport { ensureDirSync } from \"fs-extra\";\n\nconst streamPipeline = promisify(pipeline);\n\n/**\n * Function will read the given zip file from S3 via stream, extract its content and upload it to S3 bucket.\n * @param zipFileUrl\n * @return ImportData S3 file keys for all uploaded assets group by page/block.\n */\nexport async function extractAndUploadZipFileContents(zipFileUrl: string): Promise<ImportData[]> {\n const log = console.log;\n const importDataList = [];\n\n const zipFileName = path.basename(zipFileUrl).split(\"?\")[0];\n\n const response = await fetch(zipFileUrl);\n const readStream = response.body;\n if (!response.ok || !readStream) {\n throw new WebinyError(`Unable to downloading file: \"${zipFileUrl}\"`, response.statusText);\n }\n\n const uniquePath = uniqueId(\"IMPORTS/\");\n // Read export file and download it in the disk\n const ZIP_FILE_PATH = path.join(INSTALL_DIR, zipFileName);\n\n const writeStream = createWriteStream(ZIP_FILE_PATH);\n // @ts-ignore\n await streamPipeline(readStream, writeStream);\n log(`Downloaded file \"${zipFileName}\" at ${ZIP_FILE_PATH}`);\n\n // Extract the downloaded zip file\n const zipFilePaths = await extractZipToDisk(ZIP_FILE_PATH);\n\n log(`Removing ZIP file \"${zipFileUrl}\" from ${ZIP_FILE_PATH}`);\n await deleteFile(ZIP_FILE_PATH);\n\n // Extract each page/block zip and upload their content's to S3\n for (let i = 0; i < zipFilePaths.length; i++) {\n const currentPath = zipFilePaths[i];\n const dataMap = await extractZipAndUploadToS3(currentPath, uniquePath);\n importDataList.push(dataMap);\n }\n log(\"Removing all ZIP files located at \", path.dirname(zipFilePaths[0]));\n await deleteFile(path.dirname(zipFilePaths[0]));\n\n return importDataList;\n}\n\nfunction extractZipToDisk(exportFileZipPath: string): Promise<string[]> {\n return new Promise((resolve, reject) => {\n const zipFilePaths: string[] = [];\n const uniqueFolderNameForExport = getFileNameWithoutExt(exportFileZipPath);\n const EXPORT_FILE_EXTRACTION_PATH = path.join(INSTALL_DIR, uniqueFolderNameForExport);\n // Make sure DIR exists\n ensureDirSync(EXPORT_FILE_EXTRACTION_PATH);\n\n yauzl.open(exportFileZipPath, { lazyEntries: true }, function (err, zipFile) {\n if (err) {\n console.warn(\"ERROR: Failed to extract zip: \", exportFileZipPath, err);\n reject(err);\n return;\n }\n if (!zipFile) {\n console.log(\"ERROR: Missing zip file resource for path: \" + exportFileZipPath);\n reject(\"Missing Zip File Resource.\");\n return;\n }\n\n console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);\n\n zipFile.on(\"end\", function (err) {\n if (err) {\n console.warn(\"ERROR: Failed on END event for file: \", exportFileZipPath, err);\n reject(err);\n }\n resolve(zipFilePaths);\n });\n\n zipFile.readEntry();\n\n zipFile.on(\"entry\", function (entry) {\n console.info(`Processing entry: \"${entry.fileName}\"`);\n if (/\\/$/.test(entry.fileName)) {\n // Directory file names end with '/'.\n // Note that entries for directories themselves are optional.\n // An entry's fileName implicitly requires its parent directories to exist.\n zipFile.readEntry();\n } else {\n // file entry\n zipFile.openReadStream(entry, function (err, readStream) {\n if (err) {\n console.warn(\n \"ERROR: Failed to openReadStream for file: \",\n entry.fileName,\n err\n );\n reject(err);\n return;\n }\n if (!readStream) {\n console.log(\n \"ERROR: Missing Read Stream Resource when extracting to disk.\"\n );\n reject(\"Missing Read Stream Resource.\");\n return;\n }\n\n const filePath = path.join(EXPORT_FILE_EXTRACTION_PATH, entry.fileName);\n\n readStream.on(\"end\", function () {\n zipFilePaths.push(filePath);\n zipFile.readEntry();\n });\n\n streamPipeline(readStream, createWriteStream(filePath)).catch(error => {\n reject(error);\n });\n });\n }\n });\n });\n });\n}\n"],"mappings":";;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AAEA,MAAMA,cAAc,GAAG,IAAAC,eAAS,EAACC,gBAAQ,CAAC;;AAE1C;AACA;AACA;AACA;AACA;AACO,eAAeC,+BAA+B,CAACC,UAAkB,EAAyB;EAC7F,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EACvB,MAAME,cAAc,GAAG,EAAE;EAEzB,MAAMC,WAAW,GAAGC,aAAI,CAACC,QAAQ,CAACN,UAAU,CAAC,CAACO,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;EAE3D,MAAMC,QAAQ,GAAG,MAAM,IAAAC,kBAAK,EAACT,UAAU,CAAC;EACxC,MAAMU,UAAU,GAAGF,QAAQ,CAACG,IAAI;EAChC,IAAI,CAACH,QAAQ,CAACI,EAAE,IAAI,CAACF,UAAU,EAAE;IAC7B,MAAM,IAAIG,cAAW,CAAE,gCAA+Bb,UAAW,GAAE,EAAEQ,QAAQ,CAACM,UAAU,CAAC;EAC7F;EAEA,MAAMC,UAAU,GAAG,IAAAC,eAAQ,EAAC,UAAU,CAAC;EACvC;EACA,MAAMC,aAAa,GAAGZ,aAAI,CAACa,IAAI,CAACC,sBAAW,EAAEf,WAAW,CAAC;EAEzD,MAAMgB,WAAW,GAAG,IAAAC,qBAAiB,EAACJ,aAAa,CAAC;EACpD;EACA,MAAMrB,cAAc,CAACc,UAAU,EAAEU,WAAW,CAAC;EAC7CnB,GAAG,CAAE,oBAAmBG,WAAY,QAAOa,aAAc,EAAC,CAAC;;EAE3D;EACA,MAAMK,YAAY,GAAG,MAAMC,gBAAgB,CAACN,aAAa,CAAC;EAE1DhB,GAAG,CAAE,sBAAqBD,UAAW,UAASiB,aAAc,EAAC,CAAC;EAC9D,MAAM,IAAAO,gCAAU,EAACP,aAAa,CAAC;;EAE/B;EACA,KAAK,IAAIQ,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGH,YAAY,CAACI,MAAM,EAAED,CAAC,EAAE,EAAE;IAC1C,MAAME,WAAW,GAAGL,YAAY,CAACG,CAAC,CAAC;IACnC,MAAMG,OAAO,GAAG,MAAM,IAAAC,+CAAuB,EAACF,WAAW,EAAEZ,UAAU,CAAC;IACtEZ,cAAc,CAAC2B,IAAI,CAACF,OAAO,CAAC;EAChC;EACA3B,GAAG,CAAC,oCAAoC,EAAEI,aAAI,CAAC0B,OAAO,CAACT,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC;EACxE,MAAM,IAAAE,gCAAU,EAACnB,aAAI,CAAC0B,OAAO,CAACT,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC;EAE/C,OAAOnB,cAAc;AACzB;AAEA,SAASoB,gBAAgB,CAACS,iBAAyB,EAAqB;EACpE,OAAO,IAAIC,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACpC,MAAMb,YAAsB,GAAG,EAAE;IACjC,MAAMc,yBAAyB,GAAG,IAAAC,4CAAqB,EAACL,iBAAiB,CAAC;IAC1E,MAAMM,2BAA2B,GAAGjC,aAAI,CAACa,IAAI,CAACC,sBAAW,EAAEiB,yBAAyB,CAAC;IACrF;IACA,IAAAG,sBAAa,EAACD,2BAA2B,CAAC;IAE1CE,cAAK,CAACC,IAAI,CAACT,iBAAiB,EAAE;MAAEU,WAAW,EAAE;IAAK,CAAC,EAAE,UAAUC,GAAG,EAAEC,OAAO,EAAE;MACzE,IAAID,GAAG,EAAE;QACLzC,OAAO,CAAC2C,IAAI,CAAC,gCAAgC,EAAEb,iBAAiB,EAAEW,GAAG,CAAC;QACtER,MAAM,CAACQ,GAAG,CAAC;QACX;MACJ;MACA,IAAI,CAACC,OAAO,EAAE;QACV1C,OAAO,CAACD,GAAG,CAAC,6CAA6C,GAAG+B,iBAAiB,CAAC;QAC9EG,MAAM,CAAC,4BAA4B,CAAC;QACpC;MACJ;MAEAjC,OAAO,CAAC4C,IAAI,CAAE,yBAAwBF,OAAO,CAACG,UAAW,WAAU,CAAC;MAEpEH,OAAO,CAACI,EAAE,CAAC,KAAK,EAAE,UAAUL,GAAG,EAAE;QAC7B,IAAIA,GAAG,EAAE;UACLzC,OAAO,CAAC2C,IAAI,CAAC,uCAAuC,EAAEb,iBAAiB,EAAEW,GAAG,CAAC;UAC7ER,MAAM,CAACQ,GAAG,CAAC;QACf;QACAT,OAAO,CAACZ,YAAY,CAAC;MACzB,CAAC,CAAC;MAEFsB,OAAO,CAACK,SAAS,EAAE;MAEnBL,OAAO,CAACI,EAAE,CAAC,OAAO,EAAE,UAAUE,KAAK,EAAE;QACjChD,OAAO,CAAC4C,IAAI,CAAE,sBAAqBI,KAAK,CAACC,QAAS,GAAE,CAAC;QACrD,IAAI,KAAK,CAACC,IAAI,CAACF,KAAK,CAACC,QAAQ,CAAC,EAAE;UAC5B;UACA;UACA;UACAP,OAAO,CAACK,SAAS,EAAE;QACvB,CAAC,MAAM;UACH;UACAL,OAAO,CAACS,cAAc,CAACH,KAAK,EAAE,UAAUP,GAAG,EAAEjC,UAAU,EAAE;YACrD,IAAIiC,GAAG,EAAE;cACLzC,OAAO,CAAC2C,IAAI,CACR,4CAA4C,EAC5CK,KAAK,CAACC,QAAQ,EACdR,GAAG,CACN;cACDR,MAAM,CAACQ,GAAG,CAAC;cACX;YACJ;YACA,IAAI,CAACjC,UAAU,EAAE;cACbR,OAAO,CAACD,GAAG,CACP,8DAA8D,CACjE;cACDkC,MAAM,CAAC,+BAA+B,CAAC;cACvC;YACJ;YAEA,MAAMmB,QAAQ,GAAGjD,aAAI,CAACa,IAAI,CAACoB,2BAA2B,EAAEY,KAAK,CAACC,QAAQ,CAAC;YAEvEzC,UAAU,CAACsC,EAAE,CAAC,KAAK,EAAE,YAAY;cAC7B1B,YAAY,CAACQ,IAAI,CAACwB,QAAQ,CAAC;cAC3BV,OAAO,CAACK,SAAS,EAAE;YACvB,CAAC,CAAC;YAEFrD,cAAc,CAACc,UAAU,EAAE,IAAAW,qBAAiB,EAACiC,QAAQ,CAAC,CAAC,CAACC,KAAK,CAACC,KAAK,IAAI;cACnErB,MAAM,CAACqB,KAAK,CAAC;YACjB,CAAC,CAAC;UACN,CAAC,CAAC;QACN;MACJ,CAAC,CAAC;IACN,CAAC,CAAC;EACN,CAAC,CAAC;AACN"}
@@ -0,0 +1,2 @@
1
+ import { ImportData } from "../../types";
2
+ export declare function extractZipAndUploadToS3(dataZipFilePath: string, uniquePath: string): Promise<ImportData>;