@webiny/api-page-builder-import-export 0.0.0-unstable.8acc9e8892 → 0.0.0-unstable.8feaff8c32

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. package/client.js.map +1 -1
  2. package/export/combine/blocksHandler.js.map +1 -1
  3. package/export/combine/formsHandler.js.map +1 -1
  4. package/export/combine/index.js +20 -18
  5. package/export/combine/index.js.map +1 -1
  6. package/export/combine/pagesHandler.js.map +1 -1
  7. package/export/combine/templatesHandler.js.map +1 -1
  8. package/export/process/blocksHandler.js +6 -1
  9. package/export/process/blocksHandler.js.map +1 -1
  10. package/export/process/formsHandler.js.map +1 -1
  11. package/export/process/index.js +20 -18
  12. package/export/process/index.js.map +1 -1
  13. package/export/process/pagesHandler.js.map +1 -1
  14. package/export/process/templatesHandler.js.map +1 -1
  15. package/export/s3Stream.js.map +1 -1
  16. package/export/utils.d.ts +3 -2
  17. package/export/utils.js +16 -4
  18. package/export/utils.js.map +1 -1
  19. package/export/zipper.js.map +1 -1
  20. package/graphql/crud/blocks.crud.js +8 -14
  21. package/graphql/crud/blocks.crud.js.map +1 -1
  22. package/graphql/crud/forms.crud.js +14 -9
  23. package/graphql/crud/forms.crud.js.map +1 -1
  24. package/graphql/crud/importExportTasks.crud.js +39 -38
  25. package/graphql/crud/importExportTasks.crud.js.map +1 -1
  26. package/graphql/crud/pages.crud.js +8 -4
  27. package/graphql/crud/pages.crud.js.map +1 -1
  28. package/graphql/crud/templates.crud.js +8 -4
  29. package/graphql/crud/templates.crud.js.map +1 -1
  30. package/graphql/crud.js.map +1 -1
  31. package/graphql/graphql/blocks.gql.js +1 -1
  32. package/graphql/graphql/blocks.gql.js.map +1 -1
  33. package/graphql/graphql/forms.gql.js.map +1 -1
  34. package/graphql/graphql/importExportTasks.gql.js.map +1 -1
  35. package/graphql/graphql/pages.gql.js.map +1 -1
  36. package/graphql/graphql/templates.gql.js.map +1 -1
  37. package/graphql/graphql/utils/resolve.d.ts +1 -1
  38. package/graphql/graphql/utils/resolve.js.map +1 -1
  39. package/graphql/graphql.js.map +1 -1
  40. package/graphql/index.js.map +1 -1
  41. package/graphql/types.d.ts +0 -1
  42. package/graphql/types.js.map +1 -1
  43. package/import/constants.js.map +1 -1
  44. package/import/create/blocksHandler.js +0 -2
  45. package/import/create/blocksHandler.js.map +1 -1
  46. package/import/create/formsHandler.js.map +1 -1
  47. package/import/create/index.js +20 -18
  48. package/import/create/index.js.map +1 -1
  49. package/import/create/pagesHandler.js.map +1 -1
  50. package/import/create/templatesHandler.js.map +1 -1
  51. package/import/process/blocks/blocksHandler.js +1 -2
  52. package/import/process/blocks/blocksHandler.js.map +1 -1
  53. package/import/process/blocks/importBlock.d.ts +2 -2
  54. package/import/process/blocks/importBlock.js +16 -2
  55. package/import/process/blocks/importBlock.js.map +1 -1
  56. package/import/process/forms/formsHandler.js.map +1 -1
  57. package/import/process/forms/importForm.js.map +1 -1
  58. package/import/process/index.js +20 -18
  59. package/import/process/index.js.map +1 -1
  60. package/import/process/pages/importPage.js +4 -4
  61. package/import/process/pages/importPage.js.map +1 -1
  62. package/import/process/pages/pagesHandler.js.map +1 -1
  63. package/import/process/templates/importTemplate.js.map +1 -1
  64. package/import/process/templates/templatesHandler.js.map +1 -1
  65. package/import/utils/deleteS3Folder.js.map +1 -1
  66. package/import/utils/extractAndUploadZipFileContents.js.map +1 -1
  67. package/import/utils/extractZipAndUploadToS3.js.map +1 -1
  68. package/import/utils/getFileNameWithoutExt.js.map +1 -1
  69. package/import/utils/index.js.map +1 -1
  70. package/import/utils/initialStats.js.map +1 -1
  71. package/import/utils/prepareDataDirMap.js.map +1 -1
  72. package/import/utils/updateFilesInData.js +2 -1
  73. package/import/utils/updateFilesInData.js.map +1 -1
  74. package/import/utils/uploadAssets.js +47 -11
  75. package/import/utils/uploadAssets.js.map +1 -1
  76. package/import/utils/uploadFilesFromS3.js.map +1 -1
  77. package/mockSecurity.js +0 -1
  78. package/mockSecurity.js.map +1 -1
  79. package/package.json +33 -37
  80. package/types.js +60 -8
  81. package/types.js.map +1 -1
  82. package/import/process/blocksHandler.d.ts +0 -3
  83. package/import/process/blocksHandler.js +0 -175
  84. package/import/process/blocksHandler.js.map +0 -1
  85. package/import/process/pagesHandler.d.ts +0 -3
  86. package/import/process/pagesHandler.js +0 -180
  87. package/import/process/pagesHandler.js.map +0 -1
  88. package/import/utils.d.ts +0 -49
  89. package/import/utils.js +0 -641
  90. package/import/utils.js.map +0 -1
@@ -6,10 +6,18 @@ Object.defineProperty(exports, "__esModule", {
6
6
  });
7
7
  exports.uploadAssets = void 0;
8
8
  var _objectSpread2 = _interopRequireDefault(require("@babel/runtime/helpers/objectSpread2"));
9
- var _mdbid = _interopRequireDefault(require("mdbid"));
9
+ var _utils = require("@webiny/utils");
10
10
  var _uploadFilesFromS = require("./uploadFilesFromS3");
11
- // @ts-ignore
12
-
11
+ function notAPreviouslyImportedFile(importedImages) {
12
+ return file => {
13
+ return !importedImages.some(existingImportedImage => existingImportedImage.meta.originalKey === file.key);
14
+ };
15
+ }
16
+ function notAnExistingFile(existingFiles) {
17
+ return file => {
18
+ return !existingFiles.some(existingFile => existingFile.key === file.key);
19
+ };
20
+ }
13
21
  const uploadAssets = async params => {
14
22
  const {
15
23
  context,
@@ -27,22 +35,50 @@ const uploadAssets = async params => {
27
35
  return oldIdToNewFileMap;
28
36
  }
29
37
 
38
+ // Check if the requested files were already imported in the past.
39
+ const [importedImages] = await context.fileManager.listFiles({
40
+ where: {
41
+ meta: {
42
+ originalKey_in: files.map(file => file.key)
43
+ }
44
+ }
45
+ });
46
+
47
+ // Link files that were already imported.
48
+ for (const importedImage of importedImages) {
49
+ const fileBeingImported = files.find(file => file.key === importedImage.meta.originalKey);
50
+ if (fileBeingImported) {
51
+ oldIdToNewFileMap.set(fileBeingImported.id, importedImage);
52
+ }
53
+ }
54
+
55
+ // Check if files with such IDs already exist.
56
+ const [existingFiles] = await context.fileManager.listFiles({
57
+ where: {
58
+ id_in: files.map(file => file.id)
59
+ }
60
+ });
61
+ const newFilesToImport = files.filter(notAnExistingFile(existingFiles)).filter(notAPreviouslyImportedFile(importedImages));
62
+
30
63
  // A map of temporary file keys (created during ZIP upload) to permanent file keys.
31
64
  const uploadFileMap = new Map();
32
65
 
33
66
  // Array of file inputs, to insert into the DB.
34
67
  const createFilesInput = [];
35
- for (const oldFile of files) {
36
- const id = (0, _mdbid.default)();
37
- // We replace the old file ID with a new one.
38
- const newKey = `${id}/${oldFile.key.replace(`${oldFile.id}/`, "")}`;
39
- const newFile = (0, _objectSpread2.default)((0, _objectSpread2.default)({}, oldFile), {}, {
68
+ for (const toImport of newFilesToImport) {
69
+ // We generate a new file id, key, and add `meta.originalKey` property to prevent duplicates on future imports.
70
+ const id = (0, _utils.mdbid)();
71
+ const newKey = `${id}/${toImport.key.replace(`${toImport.id}/`, "")}`;
72
+ const newFile = (0, _objectSpread2.default)((0, _objectSpread2.default)({}, toImport), {}, {
40
73
  id,
41
- key: newKey
74
+ key: newKey,
75
+ meta: (0, _objectSpread2.default)((0, _objectSpread2.default)({}, toImport.meta), {}, {
76
+ originalKey: toImport.key
77
+ })
42
78
  });
43
79
  createFilesInput.push(newFile);
44
- oldIdToNewFileMap.set(oldFile.id, newFile);
45
- uploadFileMap.set(fileUploadsData.assets[oldFile.key], newFile);
80
+ oldIdToNewFileMap.set(toImport.id, newFile);
81
+ uploadFileMap.set(fileUploadsData.assets[toImport.key], newFile);
46
82
  }
47
83
  await (0, _uploadFilesFromS.uploadFilesFromS3)(uploadFileMap);
48
84
  await context.fileManager.createFilesInBatch(createFilesInput);
@@ -1 +1 @@
1
- {"version":3,"names":["uploadAssets","params","context","files","fileUploadsData","oldIdToNewFileMap","Map","process","env","NODE_ENV","uploadFileMap","createFilesInput","oldFile","id","mdbid","newKey","key","replace","newFile","push","set","assets","uploadFilesFromS3","fileManager","createFilesInBatch"],"sources":["uploadAssets.ts"],"sourcesContent":["// @ts-ignore\nimport mdbid from \"mdbid\";\nimport { PbImportExportContext } from \"~/graphql/types\";\nimport { File, FileInput } from \"@webiny/api-file-manager/types\";\nimport { UploadFileMap, uploadFilesFromS3 } from \"~/import/utils/uploadFilesFromS3\";\nimport { FileUploadsData } from \"~/types\";\n\ninterface UploadAssetsParams {\n context: PbImportExportContext;\n files: File[];\n fileUploadsData: FileUploadsData;\n}\n\nexport const uploadAssets = async (params: UploadAssetsParams) => {\n const { context, files, fileUploadsData } = params;\n\n const oldIdToNewFileMap = new Map<string, FileInput>();\n\n /**\n * This function contains logic of file download from S3.\n * Current we're not mocking zip file download from S3 in tests at the moment.\n * So, we're manually mocking it in case of test just by returning an empty object.\n */\n if (process.env.NODE_ENV === \"test\") {\n return oldIdToNewFileMap;\n }\n\n // A map of temporary file keys (created during ZIP upload) to permanent file keys.\n const uploadFileMap: UploadFileMap = new Map();\n\n // Array of file inputs, to insert into the DB.\n const createFilesInput: FileInput[] = [];\n\n for (const oldFile of files) {\n const id = mdbid();\n // We replace the old file ID with a new one.\n const newKey = `${id}/${oldFile.key.replace(`${oldFile.id}/`, \"\")}`;\n const newFile: FileInput = { ...oldFile, id, key: newKey };\n\n createFilesInput.push(newFile);\n oldIdToNewFileMap.set(oldFile.id, newFile);\n uploadFileMap.set(fileUploadsData.assets[oldFile.key], newFile);\n }\n\n await uploadFilesFromS3(uploadFileMap);\n\n await context.fileManager.createFilesInBatch(createFilesInput);\n\n return oldIdToNewFileMap;\n};\n"],"mappings":";;;;;;;;AACA;AAGA;AAJA;;AAaO,MAAMA,YAAY,GAAG,MAAOC,MAA0B,IAAK;EAC9D,MAAM;IAAEC,OAAO;IAAEC,KAAK;IAAEC;EAAgB,CAAC,GAAGH,MAAM;EAElD,MAAMI,iBAAiB,GAAG,IAAIC,GAAG,EAAqB;;EAEtD;AACJ;AACA;AACA;AACA;EACI,IAAIC,OAAO,CAACC,GAAG,CAACC,QAAQ,KAAK,MAAM,EAAE;IACjC,OAAOJ,iBAAiB;EAC5B;;EAEA;EACA,MAAMK,aAA4B,GAAG,IAAIJ,GAAG,EAAE;;EAE9C;EACA,MAAMK,gBAA6B,GAAG,EAAE;EAExC,KAAK,MAAMC,OAAO,IAAIT,KAAK,EAAE;IACzB,MAAMU,EAAE,GAAG,IAAAC,cAAK,GAAE;IAClB;IACA,MAAMC,MAAM,GAAI,GAAEF,EAAG,IAAGD,OAAO,CAACI,GAAG,CAACC,OAAO,CAAE,GAAEL,OAAO,CAACC,EAAG,GAAE,EAAE,EAAE,CAAE,EAAC;IACnE,MAAMK,OAAkB,+DAAQN,OAAO;MAAEC,EAAE;MAAEG,GAAG,EAAED;IAAM,EAAE;IAE1DJ,gBAAgB,CAACQ,IAAI,CAACD,OAAO,CAAC;IAC9Bb,iBAAiB,CAACe,GAAG,CAACR,OAAO,CAACC,EAAE,EAAEK,OAAO,CAAC;IAC1CR,aAAa,CAACU,GAAG,CAAChB,eAAe,CAACiB,MAAM,CAACT,OAAO,CAACI,GAAG,CAAC,EAAEE,OAAO,CAAC;EACnE;EAEA,MAAM,IAAAI,mCAAiB,EAACZ,aAAa,CAAC;EAEtC,MAAMR,OAAO,CAACqB,WAAW,CAACC,kBAAkB,CAACb,gBAAgB,CAAC;EAE9D,OAAON,iBAAiB;AAC5B,CAAC;AAAC"}
1
+ {"version":3,"names":["_utils","require","_uploadFilesFromS","notAPreviouslyImportedFile","importedImages","file","some","existingImportedImage","meta","originalKey","key","notAnExistingFile","existingFiles","existingFile","uploadAssets","params","context","files","fileUploadsData","oldIdToNewFileMap","Map","process","env","NODE_ENV","fileManager","listFiles","where","originalKey_in","map","importedImage","fileBeingImported","find","set","id","id_in","newFilesToImport","filter","uploadFileMap","createFilesInput","toImport","mdbid","newKey","replace","newFile","_objectSpread2","default","push","assets","uploadFilesFromS3","createFilesInBatch","exports"],"sources":["uploadAssets.ts"],"sourcesContent":["import { mdbid } from \"@webiny/utils\";\nimport { PbImportExportContext } from \"~/graphql/types\";\nimport { File, FileInput } from \"@webiny/api-file-manager/types\";\nimport { UploadFileMap, uploadFilesFromS3 } from \"~/import/utils/uploadFilesFromS3\";\nimport { FileUploadsData } from \"~/types\";\n\ninterface UploadAssetsParams {\n context: PbImportExportContext;\n files: File[];\n fileUploadsData: FileUploadsData;\n}\n\nfunction notAPreviouslyImportedFile(importedImages: File[]) {\n return (file: File) => {\n return !importedImages.some(\n existingImportedImage => existingImportedImage.meta.originalKey === file.key\n );\n };\n}\n\nfunction notAnExistingFile(existingFiles: File[]) {\n return (file: File) => {\n return !existingFiles.some(existingFile => existingFile.key === file.key);\n };\n}\n\nexport const uploadAssets = async (params: UploadAssetsParams) => {\n const { context, files, fileUploadsData } = params;\n\n const oldIdToNewFileMap = new Map<string, FileInput>();\n\n /**\n * This function contains logic of file download from S3.\n * Current we're not mocking zip file download from S3 in tests at the moment.\n * So, we're manually mocking it in case of test just by returning an empty object.\n */\n if (process.env.NODE_ENV === \"test\") {\n return oldIdToNewFileMap;\n }\n\n // Check if the requested files were already imported in the past.\n const [importedImages] = await context.fileManager.listFiles({\n where: { meta: { originalKey_in: files.map(file => file.key) } }\n });\n\n // Link files that were already imported.\n for (const importedImage of importedImages) {\n const fileBeingImported = files.find(file => file.key === importedImage.meta.originalKey);\n\n if (fileBeingImported) {\n oldIdToNewFileMap.set(fileBeingImported.id, importedImage);\n }\n }\n\n // Check if files with such IDs already exist.\n const [existingFiles] = await context.fileManager.listFiles({\n where: { id_in: files.map(file => file.id) }\n });\n\n const newFilesToImport = files\n .filter(notAnExistingFile(existingFiles))\n .filter(notAPreviouslyImportedFile(importedImages));\n\n // A map of temporary file keys (created during ZIP upload) to permanent file keys.\n const uploadFileMap: UploadFileMap = new Map();\n\n // Array of file inputs, to insert into the DB.\n const createFilesInput: FileInput[] = [];\n\n for (const toImport of newFilesToImport) {\n // We generate a new file id, key, and add `meta.originalKey` property to prevent duplicates on future imports.\n const id = mdbid();\n const newKey = `${id}/${toImport.key.replace(`${toImport.id}/`, \"\")}`;\n const newFile: FileInput = {\n ...toImport,\n id,\n key: newKey,\n meta: { ...toImport.meta, originalKey: toImport.key }\n };\n\n createFilesInput.push(newFile);\n oldIdToNewFileMap.set(toImport.id, newFile);\n uploadFileMap.set(fileUploadsData.assets[toImport.key], newFile);\n }\n\n await uploadFilesFromS3(uploadFileMap);\n\n await context.fileManager.createFilesInBatch(createFilesInput);\n\n return oldIdToNewFileMap;\n};\n"],"mappings":";;;;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AAGA,IAAAC,iBAAA,GAAAD,OAAA;AASA,SAASE,0BAA0BA,CAACC,cAAsB,EAAE;EACxD,OAAQC,IAAU,IAAK;IACnB,OAAO,CAACD,cAAc,CAACE,IAAI,CACvBC,qBAAqB,IAAIA,qBAAqB,CAACC,IAAI,CAACC,WAAW,KAAKJ,IAAI,CAACK,GAC7E,CAAC;EACL,CAAC;AACL;AAEA,SAASC,iBAAiBA,CAACC,aAAqB,EAAE;EAC9C,OAAQP,IAAU,IAAK;IACnB,OAAO,CAACO,aAAa,CAACN,IAAI,CAACO,YAAY,IAAIA,YAAY,CAACH,GAAG,KAAKL,IAAI,CAACK,GAAG,CAAC;EAC7E,CAAC;AACL;AAEO,MAAMI,YAAY,GAAG,MAAOC,MAA0B,IAAK;EAC9D,MAAM;IAAEC,OAAO;IAAEC,KAAK;IAAEC;EAAgB,CAAC,GAAGH,MAAM;EAElD,MAAMI,iBAAiB,GAAG,IAAIC,GAAG,CAAoB,CAAC;;EAEtD;AACJ;AACA;AACA;AACA;EACI,IAAIC,OAAO,CAACC,GAAG,CAACC,QAAQ,KAAK,MAAM,EAAE;IACjC,OAAOJ,iBAAiB;EAC5B;;EAEA;EACA,MAAM,CAACf,cAAc,CAAC,GAAG,MAAMY,OAAO,CAACQ,WAAW,CAACC,SAAS,CAAC;IACzDC,KAAK,EAAE;MAAElB,IAAI,EAAE;QAAEmB,cAAc,EAAEV,KAAK,CAACW,GAAG,CAACvB,IAAI,IAAIA,IAAI,CAACK,GAAG;MAAE;IAAE;EACnE,CAAC,CAAC;;EAEF;EACA,KAAK,MAAMmB,aAAa,IAAIzB,cAAc,EAAE;IACxC,MAAM0B,iBAAiB,GAAGb,KAAK,CAACc,IAAI,CAAC1B,IAAI,IAAIA,IAAI,CAACK,GAAG,KAAKmB,aAAa,CAACrB,IAAI,CAACC,WAAW,CAAC;IAEzF,IAAIqB,iBAAiB,EAAE;MACnBX,iBAAiB,CAACa,GAAG,CAACF,iBAAiB,CAACG,EAAE,EAAEJ,aAAa,CAAC;IAC9D;EACJ;;EAEA;EACA,MAAM,CAACjB,aAAa,CAAC,GAAG,MAAMI,OAAO,CAACQ,WAAW,CAACC,SAAS,CAAC;IACxDC,KAAK,EAAE;MAAEQ,KAAK,EAAEjB,KAAK,CAACW,GAAG,CAACvB,IAAI,IAAIA,IAAI,CAAC4B,EAAE;IAAE;EAC/C,CAAC,CAAC;EAEF,MAAME,gBAAgB,GAAGlB,KAAK,CACzBmB,MAAM,CAACzB,iBAAiB,CAACC,aAAa,CAAC,CAAC,CACxCwB,MAAM,CAACjC,0BAA0B,CAACC,cAAc,CAAC,CAAC;;EAEvD;EACA,MAAMiC,aAA4B,GAAG,IAAIjB,GAAG,CAAC,CAAC;;EAE9C;EACA,MAAMkB,gBAA6B,GAAG,EAAE;EAExC,KAAK,MAAMC,QAAQ,IAAIJ,gBAAgB,EAAE;IACrC;IACA,MAAMF,EAAE,GAAG,IAAAO,YAAK,EAAC,CAAC;IAClB,MAAMC,MAAM,GAAI,GAAER,EAAG,IAAGM,QAAQ,CAAC7B,GAAG,CAACgC,OAAO,CAAE,GAAEH,QAAQ,CAACN,EAAG,GAAE,EAAE,EAAE,CAAE,EAAC;IACrE,MAAMU,OAAkB,OAAAC,cAAA,CAAAC,OAAA,MAAAD,cAAA,CAAAC,OAAA,MACjBN,QAAQ;MACXN,EAAE;MACFvB,GAAG,EAAE+B,MAAM;MACXjC,IAAI,MAAAoC,cAAA,CAAAC,OAAA,MAAAD,cAAA,CAAAC,OAAA,MAAON,QAAQ,CAAC/B,IAAI;QAAEC,WAAW,EAAE8B,QAAQ,CAAC7B;MAAG;IAAE,EACxD;IAED4B,gBAAgB,CAACQ,IAAI,CAACH,OAAO,CAAC;IAC9BxB,iBAAiB,CAACa,GAAG,CAACO,QAAQ,CAACN,EAAE,EAAEU,OAAO,CAAC;IAC3CN,aAAa,CAACL,GAAG,CAACd,eAAe,CAAC6B,MAAM,CAACR,QAAQ,CAAC7B,GAAG,CAAC,EAAEiC,OAAO,CAAC;EACpE;EAEA,MAAM,IAAAK,mCAAiB,EAACX,aAAa,CAAC;EAEtC,MAAMrB,OAAO,CAACQ,WAAW,CAACyB,kBAAkB,CAACX,gBAAgB,CAAC;EAE9D,OAAOnB,iBAAiB;AAC5B,CAAC;AAAC+B,OAAA,CAAApC,YAAA,GAAAA,YAAA"}
@@ -1 +1 @@
1
- {"version":3,"names":["uploadFilesFromS3","fileMap","promises","source","target","Array","from","entries","readStream","s3Stream","ws","writeStream","key","type","pipe","streamPassThrough","push","streamPassThroughUploadPromise","console","log","Promise","all"],"sources":["uploadFilesFromS3.ts"],"sourcesContent":["import { s3Stream } from \"~/export/s3Stream\";\nimport { FileInput } from \"@webiny/api-file-manager/types\";\n\nexport type UploadFileMap = Map<string, FileInput>;\n\nexport async function uploadFilesFromS3(fileMap: UploadFileMap) {\n const promises = [];\n for (const [source, target] of Array.from(fileMap.entries())) {\n // Read file.\n const readStream = s3Stream.readStream(source);\n\n const ws = s3Stream.writeStream(target.key, target.type);\n readStream.pipe(ws.streamPassThrough);\n promises.push(ws.streamPassThroughUploadPromise);\n\n console.log(`Successfully queued file \"${target.key}\"`);\n }\n\n return Promise.all(promises);\n}\n"],"mappings":";;;;;;AAAA;AAKO,eAAeA,iBAAiB,CAACC,OAAsB,EAAE;EAC5D,MAAMC,QAAQ,GAAG,EAAE;EACnB,KAAK,MAAM,CAACC,MAAM,EAAEC,MAAM,CAAC,IAAIC,KAAK,CAACC,IAAI,CAACL,OAAO,CAACM,OAAO,EAAE,CAAC,EAAE;IAC1D;IACA,MAAMC,UAAU,GAAGC,kBAAQ,CAACD,UAAU,CAACL,MAAM,CAAC;IAE9C,MAAMO,EAAE,GAAGD,kBAAQ,CAACE,WAAW,CAACP,MAAM,CAACQ,GAAG,EAAER,MAAM,CAACS,IAAI,CAAC;IACxDL,UAAU,CAACM,IAAI,CAACJ,EAAE,CAACK,iBAAiB,CAAC;IACrCb,QAAQ,CAACc,IAAI,CAACN,EAAE,CAACO,8BAA8B,CAAC;IAEhDC,OAAO,CAACC,GAAG,CAAE,6BAA4Bf,MAAM,CAACQ,GAAI,GAAE,CAAC;EAC3D;EAEA,OAAOQ,OAAO,CAACC,GAAG,CAACnB,QAAQ,CAAC;AAChC"}
1
+ {"version":3,"names":["_s3Stream","require","uploadFilesFromS3","fileMap","promises","source","target","Array","from","entries","readStream","s3Stream","ws","writeStream","key","type","pipe","streamPassThrough","push","streamPassThroughUploadPromise","console","log","Promise","all"],"sources":["uploadFilesFromS3.ts"],"sourcesContent":["import { s3Stream } from \"~/export/s3Stream\";\nimport { FileInput } from \"@webiny/api-file-manager/types\";\n\nexport type UploadFileMap = Map<string, FileInput>;\n\nexport async function uploadFilesFromS3(fileMap: UploadFileMap) {\n const promises = [];\n for (const [source, target] of Array.from(fileMap.entries())) {\n // Read file.\n const readStream = s3Stream.readStream(source);\n\n const ws = s3Stream.writeStream(target.key, target.type);\n readStream.pipe(ws.streamPassThrough);\n promises.push(ws.streamPassThroughUploadPromise);\n\n console.log(`Successfully queued file \"${target.key}\"`);\n }\n\n return Promise.all(promises);\n}\n"],"mappings":";;;;;;AAAA,IAAAA,SAAA,GAAAC,OAAA;AAKO,eAAeC,iBAAiBA,CAACC,OAAsB,EAAE;EAC5D,MAAMC,QAAQ,GAAG,EAAE;EACnB,KAAK,MAAM,CAACC,MAAM,EAAEC,MAAM,CAAC,IAAIC,KAAK,CAACC,IAAI,CAACL,OAAO,CAACM,OAAO,CAAC,CAAC,CAAC,EAAE;IAC1D;IACA,MAAMC,UAAU,GAAGC,kBAAQ,CAACD,UAAU,CAACL,MAAM,CAAC;IAE9C,MAAMO,EAAE,GAAGD,kBAAQ,CAACE,WAAW,CAACP,MAAM,CAACQ,GAAG,EAAER,MAAM,CAACS,IAAI,CAAC;IACxDL,UAAU,CAACM,IAAI,CAACJ,EAAE,CAACK,iBAAiB,CAAC;IACrCb,QAAQ,CAACc,IAAI,CAACN,EAAE,CAACO,8BAA8B,CAAC;IAEhDC,OAAO,CAACC,GAAG,CAAE,6BAA4Bf,MAAM,CAACQ,GAAI,GAAE,CAAC;EAC3D;EAEA,OAAOQ,OAAO,CAACC,GAAG,CAACnB,QAAQ,CAAC;AAChC"}
package/mockSecurity.js CHANGED
@@ -5,7 +5,6 @@ Object.defineProperty(exports, "__esModule", {
5
5
  });
6
6
  exports.mockSecurity = void 0;
7
7
  const mockSecurity = (identity, context) => {
8
- context.security.disableAuthorization();
9
8
  context.security.setIdentity(identity);
10
9
  };
11
10
  exports.mockSecurity = mockSecurity;
@@ -1 +1 @@
1
- {"version":3,"names":["mockSecurity","identity","context","security","disableAuthorization","setIdentity"],"sources":["mockSecurity.ts"],"sourcesContent":["import { SecurityContext, SecurityIdentity } from \"@webiny/api-security/types\";\n\nexport const mockSecurity = (identity: SecurityIdentity, context: SecurityContext) => {\n context.security.disableAuthorization();\n context.security.setIdentity(identity);\n};\n"],"mappings":";;;;;;AAEO,MAAMA,YAAY,GAAG,CAACC,QAA0B,EAAEC,OAAwB,KAAK;EAClFA,OAAO,CAACC,QAAQ,CAACC,oBAAoB,EAAE;EACvCF,OAAO,CAACC,QAAQ,CAACE,WAAW,CAACJ,QAAQ,CAAC;AAC1C,CAAC;AAAC"}
1
+ {"version":3,"names":["mockSecurity","identity","context","security","setIdentity","exports"],"sources":["mockSecurity.ts"],"sourcesContent":["import { SecurityContext, SecurityIdentity } from \"@webiny/api-security/types\";\n\nexport const mockSecurity = (identity: SecurityIdentity, context: SecurityContext) => {\n context.security.setIdentity(identity);\n};\n"],"mappings":";;;;;;AAEO,MAAMA,YAAY,GAAGA,CAACC,QAA0B,EAAEC,OAAwB,KAAK;EAClFA,OAAO,CAACC,QAAQ,CAACC,WAAW,CAACH,QAAQ,CAAC;AAC1C,CAAC;AAACI,OAAA,CAAAL,YAAA,GAAAA,YAAA"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@webiny/api-page-builder-import-export",
3
- "version": "0.0.0-unstable.8acc9e8892",
3
+ "version": "0.0.0-unstable.8feaff8c32",
4
4
  "main": "index.js",
5
5
  "keywords": [
6
6
  "pbie:base"
@@ -14,54 +14,50 @@
14
14
  "author": "Webiny Ltd",
15
15
  "license": "MIT",
16
16
  "dependencies": {
17
- "@babel/runtime": "7.20.13",
17
+ "@babel/runtime": "7.22.6",
18
18
  "@commodo/fields": "1.1.2-beta.20",
19
- "@webiny/api": "0.0.0-unstable.8acc9e8892",
20
- "@webiny/api-file-manager": "0.0.0-unstable.8acc9e8892",
21
- "@webiny/api-form-builder": "0.0.0-unstable.8acc9e8892",
22
- "@webiny/api-page-builder": "0.0.0-unstable.8acc9e8892",
23
- "@webiny/api-security": "0.0.0-unstable.8acc9e8892",
24
- "@webiny/error": "0.0.0-unstable.8acc9e8892",
25
- "@webiny/handler": "0.0.0-unstable.8acc9e8892",
26
- "@webiny/handler-aws": "0.0.0-unstable.8acc9e8892",
27
- "@webiny/handler-graphql": "0.0.0-unstable.8acc9e8892",
28
- "@webiny/utils": "0.0.0-unstable.8acc9e8892",
29
- "@webiny/validation": "0.0.0-unstable.8acc9e8892",
19
+ "@webiny/api": "0.0.0-unstable.8feaff8c32",
20
+ "@webiny/api-file-manager": "0.0.0-unstable.8feaff8c32",
21
+ "@webiny/api-form-builder": "0.0.0-unstable.8feaff8c32",
22
+ "@webiny/api-page-builder": "0.0.0-unstable.8feaff8c32",
23
+ "@webiny/api-security": "0.0.0-unstable.8feaff8c32",
24
+ "@webiny/error": "0.0.0-unstable.8feaff8c32",
25
+ "@webiny/handler": "0.0.0-unstable.8feaff8c32",
26
+ "@webiny/handler-aws": "0.0.0-unstable.8feaff8c32",
27
+ "@webiny/handler-graphql": "0.0.0-unstable.8feaff8c32",
28
+ "@webiny/utils": "0.0.0-unstable.8feaff8c32",
29
+ "@webiny/validation": "0.0.0-unstable.8feaff8c32",
30
30
  "archiver": "5.3.1",
31
31
  "commodo-fields-object": "1.0.6",
32
32
  "dot-prop-immutable": "2.1.1",
33
33
  "fs-extra": "9.1.0",
34
34
  "load-json-file": "6.2.0",
35
35
  "lodash": "4.17.21",
36
- "mdbid": "1.0.0",
37
36
  "node-fetch": "2.6.9",
38
37
  "stream": "0.0.2",
39
38
  "uniqid": "5.4.0",
40
39
  "yauzl": "2.10.0"
41
40
  },
42
41
  "devDependencies": {
43
- "@babel/cli": "^7.19.3",
44
- "@babel/core": "^7.19.3",
45
- "@babel/plugin-proposal-export-default-from": "^7.16.0",
46
- "@babel/preset-env": "^7.19.4",
47
- "@babel/preset-typescript": "^7.18.6",
48
- "@types/archiver": "^5.3.1",
49
- "@types/node-fetch": "^2.6.1",
50
- "@types/yauzl": "^2.9.2",
51
- "@webiny/api-dynamodb-to-elasticsearch": "^0.0.0-unstable.8acc9e8892",
52
- "@webiny/api-file-manager-ddb-es": "^0.0.0-unstable.8acc9e8892",
53
- "@webiny/api-i18n-ddb": "^0.0.0-unstable.8acc9e8892",
54
- "@webiny/api-security-so-ddb": "^0.0.0-unstable.8acc9e8892",
55
- "@webiny/api-tenancy": "^0.0.0-unstable.8acc9e8892",
56
- "@webiny/api-tenancy-so-ddb": "^0.0.0-unstable.8acc9e8892",
57
- "@webiny/api-wcp": "^0.0.0-unstable.8acc9e8892",
58
- "@webiny/cli": "^0.0.0-unstable.8acc9e8892",
59
- "@webiny/db": "^0.0.0-unstable.8acc9e8892",
60
- "@webiny/project-utils": "^0.0.0-unstable.8acc9e8892",
61
- "jest": "^28.1.0",
62
- "jest-dynalite": "^3.2.0",
63
- "rimraf": "^3.0.2",
64
- "ttypescript": "^1.5.12",
42
+ "@babel/cli": "7.22.6",
43
+ "@babel/core": "7.22.8",
44
+ "@babel/plugin-proposal-export-default-from": "7.18.10",
45
+ "@babel/preset-env": "7.22.7",
46
+ "@babel/preset-typescript": "7.22.5",
47
+ "@types/archiver": "5.3.1",
48
+ "@types/node-fetch": "2.6.2",
49
+ "@types/yauzl": "2.10.0",
50
+ "@webiny/api-authentication": "0.0.0-unstable.8feaff8c32",
51
+ "@webiny/api-dynamodb-to-elasticsearch": "0.0.0-unstable.8feaff8c32",
52
+ "@webiny/api-tenancy": "0.0.0-unstable.8feaff8c32",
53
+ "@webiny/api-wcp": "0.0.0-unstable.8feaff8c32",
54
+ "@webiny/cli": "0.0.0-unstable.8feaff8c32",
55
+ "@webiny/db": "0.0.0-unstable.8feaff8c32",
56
+ "@webiny/project-utils": "0.0.0-unstable.8feaff8c32",
57
+ "jest": "29.5.0",
58
+ "jest-dynalite": "3.6.1",
59
+ "rimraf": "3.0.2",
60
+ "ttypescript": "1.5.15",
65
61
  "typescript": "4.7.4"
66
62
  },
67
63
  "publishConfig": {
@@ -79,5 +75,5 @@
79
75
  ]
80
76
  }
81
77
  },
82
- "gitHead": "8acc9e8892842cabb3980ce0b6432fde55968d5b"
78
+ "gitHead": "8feaff8c32ad81701938e9e3eadd836c8eb2fcb1"
83
79
  }
package/types.js CHANGED
@@ -21,17 +21,69 @@ Object.keys(_types).forEach(function (key) {
21
21
  });
22
22
  });
23
23
  // Entities.
24
- let ExportRevisionType;
25
- exports.ExportRevisionType = ExportRevisionType;
26
- (function (ExportRevisionType) {
24
+ let ExportRevisionType = /*#__PURE__*/function (ExportRevisionType) {
27
25
  ExportRevisionType["PUBLISHED"] = "published";
28
26
  ExportRevisionType["LATEST"] = "latest";
29
- })(ExportRevisionType || (exports.ExportRevisionType = ExportRevisionType = {}));
30
- let ImportExportTaskStatus;
31
- exports.ImportExportTaskStatus = ImportExportTaskStatus;
32
- (function (ImportExportTaskStatus) {
27
+ return ExportRevisionType;
28
+ }({});
29
+ exports.ExportRevisionType = ExportRevisionType;
30
+ let ImportExportTaskStatus = /*#__PURE__*/function (ImportExportTaskStatus) {
33
31
  ImportExportTaskStatus["PENDING"] = "pending";
34
32
  ImportExportTaskStatus["PROCESSING"] = "processing";
35
33
  ImportExportTaskStatus["COMPLETED"] = "completed";
36
34
  ImportExportTaskStatus["FAILED"] = "failed";
37
- })(ImportExportTaskStatus || (exports.ImportExportTaskStatus = ImportExportTaskStatus = {}));
35
+ return ImportExportTaskStatus;
36
+ }({});
37
+ /**
38
+ * @category StorageOperations
39
+ * @category ImportExportTaskStorageOperations
40
+ */
41
+ /**
42
+ * @category StorageOperations
43
+ * @category ImportExportTaskStorageOperations
44
+ */
45
+ /**
46
+ * @category StorageOperations
47
+ * @category ImportExportTaskStorageOperations
48
+ */
49
+ /**
50
+ * @category StorageOperations
51
+ * @category ImportExportTaskStorageOperations
52
+ */
53
+ /**
54
+ * @category StorageOperations
55
+ * @category ImportExportTaskStorageOperations
56
+ */
57
+ /**
58
+ * @category StorageOperations
59
+ * @category ImportExportTaskStorageOperations
60
+ */
61
+ /**
62
+ * @category StorageOperations
63
+ * @category ImportExportTaskStorageOperations
64
+ */
65
+ /**
66
+ * @category StorageOperations
67
+ * @category ImportExportTaskStorageOperations
68
+ */
69
+ /**
70
+ * @category StorageOperations
71
+ * @category ImportExportTaskStorageOperations
72
+ */
73
+ /**
74
+ * @category StorageOperations
75
+ * @category ImportExportTaskStorageOperations
76
+ */
77
+ /**
78
+ * @category StorageOperations
79
+ * @category ImportExportTaskStorageOperations
80
+ */
81
+ /**
82
+ * @category StorageOperations
83
+ * @category ImportExportTaskStorageOperations
84
+ */
85
+ /**
86
+ * @category StorageOperations
87
+ * @category ImportExportTaskStorageOperations
88
+ */
89
+ exports.ImportExportTaskStatus = ImportExportTaskStatus;
package/types.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"names":["ExportRevisionType","ImportExportTaskStatus"],"sources":["types.ts"],"sourcesContent":["export * from \"./graphql/types\";\n\n// Entities.\nexport enum ExportRevisionType {\n PUBLISHED = \"published\",\n LATEST = \"latest\"\n}\n\nexport enum ImportExportTaskStatus {\n PENDING = \"pending\",\n PROCESSING = \"processing\",\n COMPLETED = \"completed\",\n FAILED = \"failed\"\n}\n\nexport interface ImportExportTaskStats {\n [ImportExportTaskStatus.PENDING]: number;\n [ImportExportTaskStatus.PROCESSING]: number;\n [ImportExportTaskStatus.COMPLETED]: number;\n [ImportExportTaskStatus.FAILED]: number;\n total: number;\n}\n\ninterface CreatedBy {\n id: string;\n type: string;\n displayName: string | null;\n}\n\nexport interface ImportExportTask {\n id: string;\n parent: string;\n status: ImportExportTaskStatus;\n data: Record<string, any>;\n stats: ImportExportTaskStats;\n error: Record<string, any>;\n input: Record<string, any>;\n createdOn: string;\n createdBy: CreatedBy;\n tenant: string;\n locale: string;\n}\n\nexport interface File {\n id: string;\n src: string;\n}\n\nexport interface MetaResponse {\n cursor: string | null;\n totalCount: number;\n hasMoreItems: boolean;\n}\n\n/**\n * @category StorageOperations\n * @category ImportExportTaskStorageOperations\n */\nexport interface ImportExportTaskStorageOperationsGetParams {\n where: {\n id: string;\n tenant: string;\n locale: string;\n };\n}\n\n/**\n * @category StorageOperations\n * @category ImportExportTaskStorageOperations\n */\nexport interface ImportExportTaskStorageOperationsListParams {\n where: {\n tenant: string;\n locale: string;\n };\n sort?: string[];\n limit?: number;\n after?: string | null;\n}\n\n/**\n * @category StorageOperations\n * @category ImportExportTaskStorageOperations\n */\nexport type ImportExportTaskStorageOperationsListResponse = [ImportExportTask[], MetaResponse];\n\n/**\n * @category StorageOperations\n * @category ImportExportTaskStorageOperations\n */\nexport interface ImportExportTaskStorageOperationsCreateParams {\n input: Record<string, any>;\n task: ImportExportTask;\n}\n\n/**\n * @category StorageOperations\n * @category ImportExportTaskStorageOperations\n */\nexport interface ImportExportTaskStorageOperationsUpdateParams {\n input: Record<string, any>;\n original: ImportExportTask;\n task: ImportExportTask;\n}\n\n/**\n * @category StorageOperations\n * @category ImportExportTaskStorageOperations\n */\nexport interface ImportExportTaskStorageOperationsDeleteParams {\n task: ImportExportTask;\n}\n\n/**\n * @category StorageOperations\n * @category ImportExportTaskStorageOperations\n */\nexport interface ImportExportTaskStorageOperationsGetSubTaskParams {\n where: {\n id: string;\n parent: string;\n tenant: string;\n locale: string;\n };\n}\n\n/**\n * @category StorageOperations\n * @category ImportExportTaskStorageOperations\n */\nexport interface ImportExportTaskStorageOperationsListSubTaskParams {\n where: {\n tenant: string;\n locale: string;\n parent: string;\n status: ImportExportTaskStatus;\n createdBy?: string;\n };\n sort?: string[];\n limit?: number;\n after?: string | null;\n}\n\n/**\n * @category StorageOperations\n * @category ImportExportTaskStorageOperations\n */\nexport type ImportExportTaskStorageOperationsListSubTaskResponse = [\n ImportExportTask[],\n MetaResponse\n];\n\n/**\n * @category StorageOperations\n * @category ImportExportTaskStorageOperations\n */\nexport interface ImportExportTaskStorageOperationsCreateSubTaskParams {\n input: Record<string, any>;\n subTask: ImportExportTask;\n}\n\n/**\n * @category StorageOperations\n * @category ImportExportTaskStorageOperations\n */\nexport interface ImportExportTaskStorageOperationsUpdateSubTaskParams {\n input: Record<string, any>;\n original: ImportExportTask;\n subTask: ImportExportTask;\n}\n\n/**\n * @category StorageOperations\n * @category ImportExportTaskStorageOperations\n */\nexport interface ImportExportTaskStorageOperationsUpdateTaskStatsParams {\n input: {\n prevStatus: ImportExportTaskStatus;\n nextStatus: ImportExportTaskStatus;\n };\n original: ImportExportTask;\n}\n\n/**\n * @category StorageOperations\n * @category ImportExportTaskStorageOperations\n */\nexport interface ImportExportTaskStorageOperations {\n /**\n * Get a single import export task item by given params.\n */\n getTask(params: ImportExportTaskStorageOperationsGetParams): Promise<ImportExportTask | null>;\n\n /**\n * Get all import export tasks by given params.\n */\n listTasks(\n params: ImportExportTaskStorageOperationsListParams\n ): Promise<ImportExportTaskStorageOperationsListResponse>;\n\n createTask(params: ImportExportTaskStorageOperationsCreateParams): Promise<ImportExportTask>;\n\n updateTask(params: ImportExportTaskStorageOperationsUpdateParams): Promise<ImportExportTask>;\n\n deleteTask(params: ImportExportTaskStorageOperationsDeleteParams): Promise<ImportExportTask>;\n\n updateTaskStats(\n params: ImportExportTaskStorageOperationsUpdateTaskStatsParams\n ): Promise<ImportExportTask>;\n\n /**\n * Get a single import export sub-task item by given params.\n */\n getSubTask(\n params: ImportExportTaskStorageOperationsGetSubTaskParams\n ): Promise<ImportExportTask | null>;\n\n /**\n * Get all import export sub-tasks by given params.\n */\n listSubTasks(\n params: ImportExportTaskStorageOperationsListSubTaskParams\n ): Promise<ImportExportTaskStorageOperationsListSubTaskResponse>;\n\n createSubTask(\n params: ImportExportTaskStorageOperationsCreateSubTaskParams\n ): Promise<ImportExportTask>;\n\n updateSubTask(\n params: ImportExportTaskStorageOperationsUpdateSubTaskParams\n ): Promise<ImportExportTask>;\n}\n\nexport interface FileUploadsData {\n /**\n * Location of export data file. Export data contains the relevant entity data (block, page, template), and an\n * array of file objects, exported from the DB, that need to be imported.\n *\n * Example:\n * 'IMPORTS/8lf6y7xp5/8lf6x9v68-header-1/8lf6x9v69-header-1/Header #1.json'\n */\n data: string;\n /**\n * Example:\n * '8ldspraka-9l9iaffak-1.jpeg': 'IMPORTS/8lf6y7xp5/8lf6x9v68-header-1/assets/8ldspraka-9l9iaffak-1.jpeg',\n * '8ldwyq8ao-pb-editor-page-element-rzfKWtdTWN.png': 'IMPORTS/8lf6y7xp5/8lf6x9v69-header-1/assets/8ldwyq8ao-pb-editor-page-element-rzfKWtdTWN.png'\n */\n assets: Record<string, string>;\n}\n\nexport interface ImportData {\n assets: Record<string, string>;\n data: string;\n key: string;\n}\n"],"mappings":";;;;;;;;;;AAAA;AAAA;EAAA;EAAA;EAAA;EAAA;IAAA;IAAA;MAAA;IAAA;EAAA;AAAA;AAEA;AAAA,IACYA,kBAAkB;AAAA;AAAA,WAAlBA,kBAAkB;EAAlBA,kBAAkB;EAAlBA,kBAAkB;AAAA,GAAlBA,kBAAkB,kCAAlBA,kBAAkB;AAAA,IAKlBC,sBAAsB;AAAA;AAAA,WAAtBA,sBAAsB;EAAtBA,sBAAsB;EAAtBA,sBAAsB;EAAtBA,sBAAsB;EAAtBA,sBAAsB;AAAA,GAAtBA,sBAAsB,sCAAtBA,sBAAsB"}
1
+ {"version":3,"names":["_types","require","Object","keys","forEach","key","prototype","hasOwnProperty","call","_exportNames","exports","defineProperty","enumerable","get","ExportRevisionType","ImportExportTaskStatus"],"sources":["types.ts"],"sourcesContent":["export * from \"./graphql/types\";\n\n// Entities.\nexport enum ExportRevisionType {\n PUBLISHED = \"published\",\n LATEST = \"latest\"\n}\n\nexport enum ImportExportTaskStatus {\n PENDING = \"pending\",\n PROCESSING = \"processing\",\n COMPLETED = \"completed\",\n FAILED = \"failed\"\n}\n\nexport interface ImportExportTaskStats {\n [ImportExportTaskStatus.PENDING]: number;\n [ImportExportTaskStatus.PROCESSING]: number;\n [ImportExportTaskStatus.COMPLETED]: number;\n [ImportExportTaskStatus.FAILED]: number;\n total: number;\n}\n\ninterface CreatedBy {\n id: string;\n type: string;\n displayName: string | null;\n}\n\nexport interface ImportExportTask {\n id: string;\n parent: string;\n status: ImportExportTaskStatus;\n data: Record<string, any>;\n stats: ImportExportTaskStats;\n error: Record<string, any>;\n input: Record<string, any>;\n createdOn: string;\n createdBy: CreatedBy;\n tenant: string;\n locale: string;\n}\n\nexport interface File {\n id: string;\n src: string;\n}\n\nexport interface MetaResponse {\n cursor: string | null;\n totalCount: number;\n hasMoreItems: boolean;\n}\n\n/**\n * @category StorageOperations\n * @category ImportExportTaskStorageOperations\n */\nexport interface ImportExportTaskStorageOperationsGetParams {\n where: {\n id: string;\n tenant: string;\n locale: string;\n };\n}\n\n/**\n * @category StorageOperations\n * @category ImportExportTaskStorageOperations\n */\nexport interface ImportExportTaskStorageOperationsListParams {\n where: {\n tenant: string;\n locale: string;\n };\n sort?: string[];\n limit?: number;\n after?: string | null;\n}\n\n/**\n * @category StorageOperations\n * @category ImportExportTaskStorageOperations\n */\nexport type ImportExportTaskStorageOperationsListResponse = [ImportExportTask[], MetaResponse];\n\n/**\n * @category StorageOperations\n * @category ImportExportTaskStorageOperations\n */\nexport interface ImportExportTaskStorageOperationsCreateParams {\n input: Record<string, any>;\n task: ImportExportTask;\n}\n\n/**\n * @category StorageOperations\n * @category ImportExportTaskStorageOperations\n */\nexport interface ImportExportTaskStorageOperationsUpdateParams {\n input: Record<string, any>;\n original: ImportExportTask;\n task: ImportExportTask;\n}\n\n/**\n * @category StorageOperations\n * @category ImportExportTaskStorageOperations\n */\nexport interface ImportExportTaskStorageOperationsDeleteParams {\n task: ImportExportTask;\n}\n\n/**\n * @category StorageOperations\n * @category ImportExportTaskStorageOperations\n */\nexport interface ImportExportTaskStorageOperationsGetSubTaskParams {\n where: {\n id: string;\n parent: string;\n tenant: string;\n locale: string;\n };\n}\n\n/**\n * @category StorageOperations\n * @category ImportExportTaskStorageOperations\n */\nexport interface ImportExportTaskStorageOperationsListSubTaskParams {\n where: {\n tenant: string;\n locale: string;\n parent: string;\n status: ImportExportTaskStatus;\n createdBy?: string;\n };\n sort?: string[];\n limit?: number;\n after?: string | null;\n}\n\n/**\n * @category StorageOperations\n * @category ImportExportTaskStorageOperations\n */\nexport type ImportExportTaskStorageOperationsListSubTaskResponse = [\n ImportExportTask[],\n MetaResponse\n];\n\n/**\n * @category StorageOperations\n * @category ImportExportTaskStorageOperations\n */\nexport interface ImportExportTaskStorageOperationsCreateSubTaskParams {\n input: Record<string, any>;\n subTask: ImportExportTask;\n}\n\n/**\n * @category StorageOperations\n * @category ImportExportTaskStorageOperations\n */\nexport interface ImportExportTaskStorageOperationsUpdateSubTaskParams {\n input: Record<string, any>;\n original: ImportExportTask;\n subTask: ImportExportTask;\n}\n\n/**\n * @category StorageOperations\n * @category ImportExportTaskStorageOperations\n */\nexport interface ImportExportTaskStorageOperationsUpdateTaskStatsParams {\n input: {\n prevStatus: ImportExportTaskStatus;\n nextStatus: ImportExportTaskStatus;\n };\n original: ImportExportTask;\n}\n\n/**\n * @category StorageOperations\n * @category ImportExportTaskStorageOperations\n */\nexport interface ImportExportTaskStorageOperations {\n /**\n * Get a single import export task item by given params.\n */\n getTask(params: ImportExportTaskStorageOperationsGetParams): Promise<ImportExportTask | null>;\n\n /**\n * Get all import export tasks by given params.\n */\n listTasks(\n params: ImportExportTaskStorageOperationsListParams\n ): Promise<ImportExportTaskStorageOperationsListResponse>;\n\n createTask(params: ImportExportTaskStorageOperationsCreateParams): Promise<ImportExportTask>;\n\n updateTask(params: ImportExportTaskStorageOperationsUpdateParams): Promise<ImportExportTask>;\n\n deleteTask(params: ImportExportTaskStorageOperationsDeleteParams): Promise<ImportExportTask>;\n\n updateTaskStats(\n params: ImportExportTaskStorageOperationsUpdateTaskStatsParams\n ): Promise<ImportExportTask>;\n\n /**\n * Get a single import export sub-task item by given params.\n */\n getSubTask(\n params: ImportExportTaskStorageOperationsGetSubTaskParams\n ): Promise<ImportExportTask | null>;\n\n /**\n * Get all import export sub-tasks by given params.\n */\n listSubTasks(\n params: ImportExportTaskStorageOperationsListSubTaskParams\n ): Promise<ImportExportTaskStorageOperationsListSubTaskResponse>;\n\n createSubTask(\n params: ImportExportTaskStorageOperationsCreateSubTaskParams\n ): Promise<ImportExportTask>;\n\n updateSubTask(\n params: ImportExportTaskStorageOperationsUpdateSubTaskParams\n ): Promise<ImportExportTask>;\n}\n\nexport interface FileUploadsData {\n /**\n * Location of export data file. Export data contains the relevant entity data (block, page, template), and an\n * array of file objects, exported from the DB, that need to be imported.\n *\n * Example:\n * 'IMPORTS/8lf6y7xp5/8lf6x9v68-header-1/8lf6x9v69-header-1/Header #1.json'\n */\n data: string;\n /**\n * Example:\n * '8ldspraka-9l9iaffak-1.jpeg': 'IMPORTS/8lf6y7xp5/8lf6x9v68-header-1/assets/8ldspraka-9l9iaffak-1.jpeg',\n * '8ldwyq8ao-pb-editor-page-element-rzfKWtdTWN.png': 'IMPORTS/8lf6y7xp5/8lf6x9v69-header-1/assets/8ldwyq8ao-pb-editor-page-element-rzfKWtdTWN.png'\n */\n assets: Record<string, string>;\n}\n\nexport interface ImportData {\n assets: Record<string, string>;\n data: string;\n key: string;\n}\n"],"mappings":";;;;;;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAH,MAAA,EAAAI,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAL,MAAA,CAAAK,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAb,MAAA,CAAAK,GAAA;IAAA;EAAA;AAAA;AAEA;AAAA,IACYS,kBAAkB,0BAAlBA,kBAAkB;EAAlBA,kBAAkB;EAAlBA,kBAAkB;EAAA,OAAlBA,kBAAkB;AAAA;AAAAJ,OAAA,CAAAI,kBAAA,GAAAA,kBAAA;AAAA,IAKlBC,sBAAsB,0BAAtBA,sBAAsB;EAAtBA,sBAAsB;EAAtBA,sBAAsB;EAAtBA,sBAAsB;EAAtBA,sBAAsB;EAAA,OAAtBA,sBAAsB;AAAA;AA8ClC;AACA;AACA;AACA;AASA;AACA;AACA;AACA;AAWA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AAMA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AAKA;AACA;AACA;AACA;AAUA;AACA;AACA;AACA;AAcA;AACA;AACA;AACA;AAMA;AACA;AACA;AACA;AAMA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AASA;AACA;AACA;AACA;AAHAL,OAAA,CAAAK,sBAAA,GAAAA,sBAAA"}
@@ -1,3 +0,0 @@
1
- import { PbImportExportContext } from "../../types";
2
- import { Configuration, Payload, Response } from ".";
3
- export declare const blocksHandler: (configuration: Configuration, payload: Payload, context: PbImportExportContext) => Promise<Response>;
@@ -1,175 +0,0 @@
1
- "use strict";
2
-
3
- Object.defineProperty(exports, "__esModule", {
4
- value: true
5
- });
6
- exports.blocksHandler = void 0;
7
-
8
- var _types = require("../../types");
9
-
10
- var _utils = require("../utils");
11
-
12
- var _client = require("../../client");
13
-
14
- var _mockSecurity = require("../../mockSecurity");
15
-
16
- var _utils2 = require("@webiny/utils");
17
-
18
- const blocksHandler = async (configuration, payload, context) => {
19
- const log = console.log;
20
- let subTask;
21
- let noPendingTask = true;
22
- let prevStatusOfSubTask = _types.ImportExportTaskStatus.PENDING;
23
- log("RUNNING Import Block Queue Process");
24
- const {
25
- pageBuilder
26
- } = context;
27
- const {
28
- taskId,
29
- subTaskIndex,
30
- type,
31
- identity
32
- } = payload; // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks
33
- // and this Lambda is invoked internally, without credentials.
34
-
35
- (0, _mockSecurity.mockSecurity)(identity, context);
36
-
37
- try {
38
- /*
39
- * Note: We're not going to DB for getting next sub-task to process,
40
- * because the data might be out of sync due to GSI eventual consistency.
41
- */
42
- subTask = await pageBuilder.importExportTask.getSubTask(taskId, (0, _utils2.zeroPad)(subTaskIndex, 5));
43
- /**
44
- * Base condition!!
45
- * Bail out early, if task not found or task's status is not "pending".
46
- */
47
-
48
- if (!subTask || subTask.status !== _types.ImportExportTaskStatus.PENDING) {
49
- noPendingTask = true;
50
- return {
51
- data: "",
52
- error: null
53
- };
54
- } else {
55
- noPendingTask = false;
56
- }
57
-
58
- prevStatusOfSubTask = subTask.status;
59
- log(`Fetched sub task => ${subTask.id}`);
60
- console.log("subTask", subTask);
61
- const {
62
- blockKey,
63
- category,
64
- zipFileKey,
65
- input
66
- } = subTask.data;
67
- const {
68
- fileUploadsData
69
- } = input;
70
- log(`Processing block key "${blockKey}"`); // Mark task status as PROCESSING
71
-
72
- subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {
73
- status: _types.ImportExportTaskStatus.PROCESSING
74
- }); // Update stats in main task
75
-
76
- await pageBuilder.importExportTask.updateStats(taskId, {
77
- prevStatus: prevStatusOfSubTask,
78
- nextStatus: _types.ImportExportTaskStatus.PROCESSING
79
- });
80
- prevStatusOfSubTask = subTask.status; // Real job
81
-
82
- const block = await (0, _utils.importBlock)({
83
- context,
84
- blockKey,
85
- key: zipFileKey,
86
- fileUploadsData
87
- }); // Create a block
88
-
89
- const pbBlock = await context.pageBuilder.createPageBlock({
90
- name: block.name,
91
- blockCategory: category,
92
- content: block.content,
93
- preview: block.preview
94
- }); // Update task record in DB
95
-
96
- subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {
97
- status: _types.ImportExportTaskStatus.COMPLETED,
98
- data: {
99
- message: "Done",
100
- block: {
101
- id: pbBlock.id,
102
- name: pbBlock.name
103
- }
104
- }
105
- }); // Update stats in main task
106
-
107
- await pageBuilder.importExportTask.updateStats(taskId, {
108
- prevStatus: prevStatusOfSubTask,
109
- nextStatus: _types.ImportExportTaskStatus.COMPLETED
110
- });
111
- prevStatusOfSubTask = subTask.status;
112
- } catch (e) {
113
- log("[IMPORT_BLOCKS_PROCESS] Error => ", e.message);
114
-
115
- if (subTask && subTask.id) {
116
- /**
117
- * In case of error, we'll update the task status to "failed",
118
- * so that, client can show notify the user appropriately.
119
- */
120
- subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {
121
- status: _types.ImportExportTaskStatus.FAILED,
122
- error: {
123
- name: e.name,
124
- message: e.message,
125
- code: "IMPORT_FAILED"
126
- }
127
- }); // Update stats in main task
128
-
129
- await pageBuilder.importExportTask.updateStats(taskId, {
130
- prevStatus: prevStatusOfSubTask,
131
- nextStatus: _types.ImportExportTaskStatus.FAILED
132
- });
133
- prevStatusOfSubTask = subTask.status;
134
- }
135
-
136
- return {
137
- data: null,
138
- error: {
139
- message: e.message
140
- }
141
- };
142
- } finally {
143
- // Base condition!
144
- if (noPendingTask) {
145
- log(`No pending sub-task for task ${taskId}`);
146
- await pageBuilder.importExportTask.updateTask(taskId, {
147
- status: _types.ImportExportTaskStatus.COMPLETED,
148
- data: {
149
- message: `Finish importing blocks.`
150
- }
151
- });
152
- } else {
153
- log(`Invoking PROCESS for task "${subTaskIndex + 1}"`); // We want to continue with Self invocation no matter if current block error out.
154
-
155
- await (0, _client.invokeHandlerClient)({
156
- context,
157
- name: configuration.handlers.process,
158
- payload: {
159
- taskId,
160
- subTaskIndex: subTaskIndex + 1,
161
- type,
162
- identity: context.security.getIdentity()
163
- },
164
- description: "Import blocks - process - subtask"
165
- });
166
- }
167
- }
168
-
169
- return {
170
- data: "",
171
- error: null
172
- };
173
- };
174
-
175
- exports.blocksHandler = blocksHandler;
@@ -1 +0,0 @@
1
- {"version":3,"names":["blocksHandler","configuration","payload","context","log","console","subTask","noPendingTask","prevStatusOfSubTask","ImportExportTaskStatus","PENDING","pageBuilder","taskId","subTaskIndex","type","identity","mockSecurity","importExportTask","getSubTask","zeroPad","status","data","error","id","blockKey","category","zipFileKey","input","fileUploadsData","updateSubTask","PROCESSING","updateStats","prevStatus","nextStatus","block","importBlock","key","pbBlock","createPageBlock","name","blockCategory","content","preview","COMPLETED","message","e","FAILED","code","updateTask","invokeHandlerClient","handlers","process","security","getIdentity","description"],"sources":["blocksHandler.ts"],"sourcesContent":["import { ImportExportTaskStatus, PbImportExportContext } from \"~/types\";\nimport { importBlock } from \"~/import/utils\";\nimport { invokeHandlerClient } from \"~/client\";\nimport { mockSecurity } from \"~/mockSecurity\";\nimport { zeroPad } from \"@webiny/utils\";\nimport { Configuration, Payload, Response } from \"~/import/process\";\n\nexport const blocksHandler = async (\n configuration: Configuration,\n payload: Payload,\n context: PbImportExportContext\n): Promise<Response> => {\n const log = console.log;\n let subTask;\n let noPendingTask = true;\n let prevStatusOfSubTask = ImportExportTaskStatus.PENDING;\n\n log(\"RUNNING Import Block Queue Process\");\n const { pageBuilder } = context;\n const { taskId, subTaskIndex, type, identity } = payload;\n // Disable authorization; this is necessary because we call Page Builder CRUD methods which include authorization checks\n // and this Lambda is invoked internally, without credentials.\n mockSecurity(identity, context);\n\n try {\n /*\n * Note: We're not going to DB for getting next sub-task to process,\n * because the data might be out of sync due to GSI eventual consistency.\n */\n\n subTask = await pageBuilder.importExportTask.getSubTask(taskId, zeroPad(subTaskIndex, 5));\n\n /**\n * Base condition!!\n * Bail out early, if task not found or task's status is not \"pending\".\n */\n if (!subTask || subTask.status !== ImportExportTaskStatus.PENDING) {\n noPendingTask = true;\n return {\n data: \"\",\n error: null\n };\n } else {\n noPendingTask = false;\n }\n prevStatusOfSubTask = subTask.status;\n\n log(`Fetched sub task => ${subTask.id}`);\n console.log(\"subTask\", subTask);\n\n const { blockKey, category, zipFileKey, input } = subTask.data;\n const { fileUploadsData } = input;\n\n log(`Processing block key \"${blockKey}\"`);\n\n // Mark task status as PROCESSING\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.PROCESSING\n });\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.PROCESSING\n });\n prevStatusOfSubTask = subTask.status;\n\n // Real job\n const block = await importBlock({\n context,\n blockKey,\n key: zipFileKey,\n fileUploadsData\n });\n\n // Create a block\n const pbBlock = await context.pageBuilder.createPageBlock({\n name: block.name,\n blockCategory: category,\n content: block.content,\n preview: block.preview\n });\n\n // Update task record in DB\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: \"Done\",\n block: {\n id: pbBlock.id,\n name: pbBlock.name\n }\n }\n });\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.COMPLETED\n });\n prevStatusOfSubTask = subTask.status;\n } catch (e) {\n log(\"[IMPORT_BLOCKS_PROCESS] Error => \", e.message);\n\n if (subTask && subTask.id) {\n /**\n * In case of error, we'll update the task status to \"failed\",\n * so that, client can show notify the user appropriately.\n */\n subTask = await pageBuilder.importExportTask.updateSubTask(taskId, subTask.id, {\n status: ImportExportTaskStatus.FAILED,\n error: {\n name: e.name,\n message: e.message,\n code: \"IMPORT_FAILED\"\n }\n });\n\n // Update stats in main task\n await pageBuilder.importExportTask.updateStats(taskId, {\n prevStatus: prevStatusOfSubTask,\n nextStatus: ImportExportTaskStatus.FAILED\n });\n prevStatusOfSubTask = subTask.status;\n }\n\n return {\n data: null,\n error: {\n message: e.message\n }\n };\n } finally {\n // Base condition!\n if (noPendingTask) {\n log(`No pending sub-task for task ${taskId}`);\n\n await pageBuilder.importExportTask.updateTask(taskId, {\n status: ImportExportTaskStatus.COMPLETED,\n data: {\n message: `Finish importing blocks.`\n }\n });\n } else {\n log(`Invoking PROCESS for task \"${subTaskIndex + 1}\"`);\n // We want to continue with Self invocation no matter if current block error out.\n await invokeHandlerClient<Payload>({\n context,\n name: configuration.handlers.process,\n payload: {\n taskId,\n subTaskIndex: subTaskIndex + 1,\n type,\n identity: context.security.getIdentity()\n },\n description: \"Import blocks - process - subtask\"\n });\n }\n }\n return {\n data: \"\",\n error: null\n };\n};\n"],"mappings":";;;;;;;AAAA;;AACA;;AACA;;AACA;;AACA;;AAGO,MAAMA,aAAa,GAAG,OACzBC,aADyB,EAEzBC,OAFyB,EAGzBC,OAHyB,KAIL;EACpB,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAApB;EACA,IAAIE,OAAJ;EACA,IAAIC,aAAa,GAAG,IAApB;EACA,IAAIC,mBAAmB,GAAGC,6BAAA,CAAuBC,OAAjD;EAEAN,GAAG,CAAC,oCAAD,CAAH;EACA,MAAM;IAAEO;EAAF,IAAkBR,OAAxB;EACA,MAAM;IAAES,MAAF;IAAUC,YAAV;IAAwBC,IAAxB;IAA8BC;EAA9B,IAA2Cb,OAAjD,CARoB,CASpB;EACA;;EACA,IAAAc,0BAAA,EAAaD,QAAb,EAAuBZ,OAAvB;;EAEA,IAAI;IACA;AACR;AACA;AACA;IAEQG,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAZ,CAA6BC,UAA7B,CAAwCN,MAAxC,EAAgD,IAAAO,eAAA,EAAQN,YAAR,EAAsB,CAAtB,CAAhD,CAAhB;IAEA;AACR;AACA;AACA;;IACQ,IAAI,CAACP,OAAD,IAAYA,OAAO,CAACc,MAAR,KAAmBX,6BAAA,CAAuBC,OAA1D,EAAmE;MAC/DH,aAAa,GAAG,IAAhB;MACA,OAAO;QACHc,IAAI,EAAE,EADH;QAEHC,KAAK,EAAE;MAFJ,CAAP;IAIH,CAND,MAMO;MACHf,aAAa,GAAG,KAAhB;IACH;;IACDC,mBAAmB,GAAGF,OAAO,CAACc,MAA9B;IAEAhB,GAAG,CAAE,uBAAsBE,OAAO,CAACiB,EAAG,EAAnC,CAAH;IACAlB,OAAO,CAACD,GAAR,CAAY,SAAZ,EAAuBE,OAAvB;IAEA,MAAM;MAAEkB,QAAF;MAAYC,QAAZ;MAAsBC,UAAtB;MAAkCC;IAAlC,IAA4CrB,OAAO,CAACe,IAA1D;IACA,MAAM;MAAEO;IAAF,IAAsBD,KAA5B;IAEAvB,GAAG,CAAE,yBAAwBoB,QAAS,GAAnC,CAAH,CA7BA,CA+BA;;IACAlB,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAZ,CAA6BY,aAA7B,CAA2CjB,MAA3C,EAAmDN,OAAO,CAACiB,EAA3D,EAA+D;MAC3EH,MAAM,EAAEX,6BAAA,CAAuBqB;IAD4C,CAA/D,CAAhB,CAhCA,CAmCA;;IACA,MAAMnB,WAAW,CAACM,gBAAZ,CAA6Bc,WAA7B,CAAyCnB,MAAzC,EAAiD;MACnDoB,UAAU,EAAExB,mBADuC;MAEnDyB,UAAU,EAAExB,6BAAA,CAAuBqB;IAFgB,CAAjD,CAAN;IAIAtB,mBAAmB,GAAGF,OAAO,CAACc,MAA9B,CAxCA,CA0CA;;IACA,MAAMc,KAAK,GAAG,MAAM,IAAAC,kBAAA,EAAY;MAC5BhC,OAD4B;MAE5BqB,QAF4B;MAG5BY,GAAG,EAAEV,UAHuB;MAI5BE;IAJ4B,CAAZ,CAApB,CA3CA,CAkDA;;IACA,MAAMS,OAAO,GAAG,MAAMlC,OAAO,CAACQ,WAAR,CAAoB2B,eAApB,CAAoC;MACtDC,IAAI,EAAEL,KAAK,CAACK,IAD0C;MAEtDC,aAAa,EAAEf,QAFuC;MAGtDgB,OAAO,EAAEP,KAAK,CAACO,OAHuC;MAItDC,OAAO,EAAER,KAAK,CAACQ;IAJuC,CAApC,CAAtB,CAnDA,CA0DA;;IACApC,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAZ,CAA6BY,aAA7B,CAA2CjB,MAA3C,EAAmDN,OAAO,CAACiB,EAA3D,EAA+D;MAC3EH,MAAM,EAAEX,6BAAA,CAAuBkC,SAD4C;MAE3EtB,IAAI,EAAE;QACFuB,OAAO,EAAE,MADP;QAEFV,KAAK,EAAE;UACHX,EAAE,EAAEc,OAAO,CAACd,EADT;UAEHgB,IAAI,EAAEF,OAAO,CAACE;QAFX;MAFL;IAFqE,CAA/D,CAAhB,CA3DA,CAqEA;;IACA,MAAM5B,WAAW,CAACM,gBAAZ,CAA6Bc,WAA7B,CAAyCnB,MAAzC,EAAiD;MACnDoB,UAAU,EAAExB,mBADuC;MAEnDyB,UAAU,EAAExB,6BAAA,CAAuBkC;IAFgB,CAAjD,CAAN;IAIAnC,mBAAmB,GAAGF,OAAO,CAACc,MAA9B;EACH,CA3ED,CA2EE,OAAOyB,CAAP,EAAU;IACRzC,GAAG,CAAC,mCAAD,EAAsCyC,CAAC,CAACD,OAAxC,CAAH;;IAEA,IAAItC,OAAO,IAAIA,OAAO,CAACiB,EAAvB,EAA2B;MACvB;AACZ;AACA;AACA;MACYjB,OAAO,GAAG,MAAMK,WAAW,CAACM,gBAAZ,CAA6BY,aAA7B,CAA2CjB,MAA3C,EAAmDN,OAAO,CAACiB,EAA3D,EAA+D;QAC3EH,MAAM,EAAEX,6BAAA,CAAuBqC,MAD4C;QAE3ExB,KAAK,EAAE;UACHiB,IAAI,EAAEM,CAAC,CAACN,IADL;UAEHK,OAAO,EAAEC,CAAC,CAACD,OAFR;UAGHG,IAAI,EAAE;QAHH;MAFoE,CAA/D,CAAhB,CALuB,CAcvB;;MACA,MAAMpC,WAAW,CAACM,gBAAZ,CAA6Bc,WAA7B,CAAyCnB,MAAzC,EAAiD;QACnDoB,UAAU,EAAExB,mBADuC;QAEnDyB,UAAU,EAAExB,6BAAA,CAAuBqC;MAFgB,CAAjD,CAAN;MAIAtC,mBAAmB,GAAGF,OAAO,CAACc,MAA9B;IACH;;IAED,OAAO;MACHC,IAAI,EAAE,IADH;MAEHC,KAAK,EAAE;QACHsB,OAAO,EAAEC,CAAC,CAACD;MADR;IAFJ,CAAP;EAMH,CA1GD,SA0GU;IACN;IACA,IAAIrC,aAAJ,EAAmB;MACfH,GAAG,CAAE,gCAA+BQ,MAAO,EAAxC,CAAH;MAEA,MAAMD,WAAW,CAACM,gBAAZ,CAA6B+B,UAA7B,CAAwCpC,MAAxC,EAAgD;QAClDQ,MAAM,EAAEX,6BAAA,CAAuBkC,SADmB;QAElDtB,IAAI,EAAE;UACFuB,OAAO,EAAG;QADR;MAF4C,CAAhD,CAAN;IAMH,CATD,MASO;MACHxC,GAAG,CAAE,8BAA6BS,YAAY,GAAG,CAAE,GAAhD,CAAH,CADG,CAEH;;MACA,MAAM,IAAAoC,2BAAA,EAA6B;QAC/B9C,OAD+B;QAE/BoC,IAAI,EAAEtC,aAAa,CAACiD,QAAd,CAAuBC,OAFE;QAG/BjD,OAAO,EAAE;UACLU,MADK;UAELC,YAAY,EAAEA,YAAY,GAAG,CAFxB;UAGLC,IAHK;UAILC,QAAQ,EAAEZ,OAAO,CAACiD,QAAR,CAAiBC,WAAjB;QAJL,CAHsB;QAS/BC,WAAW,EAAE;MATkB,CAA7B,CAAN;IAWH;EACJ;;EACD,OAAO;IACHjC,IAAI,EAAE,EADH;IAEHC,KAAK,EAAE;EAFJ,CAAP;AAIH,CA1JM"}
@@ -1,3 +0,0 @@
1
- import { PbImportExportContext } from "../../types";
2
- import { Configuration, Payload, Response } from ".";
3
- export declare const pagesHandler: (configuration: Configuration, payload: Payload, context: PbImportExportContext) => Promise<Response>;