@webiny/api-page-builder-import-export 0.0.0-mt-3 → 0.0.0-unstable.3386f66516
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/client.d.ts +8 -0
- package/client.js +47 -0
- package/client.js.map +1 -0
- package/export/combine/blocksHandler.d.ts +6 -0
- package/export/combine/blocksHandler.js +106 -0
- package/export/combine/blocksHandler.js.map +1 -0
- package/export/combine/index.d.ts +16 -0
- package/export/combine/index.js +43 -0
- package/export/combine/index.js.map +1 -0
- package/export/combine/pagesHandler.d.ts +6 -0
- package/export/combine/pagesHandler.js +106 -0
- package/export/combine/pagesHandler.js.map +1 -0
- package/export/combine/templatesHandler.d.ts +6 -0
- package/export/combine/templatesHandler.js +106 -0
- package/export/combine/templatesHandler.js.map +1 -0
- package/export/process/blocksHandler.d.ts +6 -0
- package/export/process/blocksHandler.js +176 -0
- package/export/process/blocksHandler.js.map +1 -0
- package/export/process/index.d.ts +20 -0
- package/export/process/index.js +40 -0
- package/export/process/index.js.map +1 -0
- package/export/process/pagesHandler.d.ts +6 -0
- package/export/process/pagesHandler.js +204 -0
- package/export/process/pagesHandler.js.map +1 -0
- package/export/process/templatesHandler.d.ts +6 -0
- package/export/process/templatesHandler.js +176 -0
- package/export/process/templatesHandler.js.map +1 -0
- package/{exportPages → export}/s3Stream.d.ts +2 -0
- package/{exportPages → export}/s3Stream.js +14 -7
- package/export/s3Stream.js.map +1 -0
- package/export/utils.d.ts +22 -0
- package/export/utils.js +173 -0
- package/export/utils.js.map +1 -0
- package/{exportPages → export}/zipper.d.ts +7 -5
- package/{exportPages → export}/zipper.js +11 -12
- package/export/zipper.js.map +1 -0
- package/graphql/crud/blocks.crud.d.ts +4 -0
- package/graphql/crud/blocks.crud.js +155 -0
- package/graphql/crud/blocks.crud.js.map +1 -0
- package/graphql/crud/importExportTasks.crud.d.ts +5 -0
- package/graphql/crud/{pageImportExportTasks.crud.js → importExportTasks.crud.js} +82 -65
- package/graphql/crud/importExportTasks.crud.js.map +1 -0
- package/graphql/crud/pages.crud.d.ts +3 -3
- package/graphql/crud/pages.crud.js +46 -34
- package/graphql/crud/pages.crud.js.map +1 -0
- package/graphql/crud/templates.crud.d.ts +4 -0
- package/graphql/crud/templates.crud.js +140 -0
- package/graphql/crud/templates.crud.js.map +1 -0
- package/graphql/crud.d.ts +2 -2
- package/graphql/crud.js +7 -3
- package/graphql/crud.js.map +1 -0
- package/graphql/graphql/blocks.gql.d.ts +4 -0
- package/graphql/graphql/blocks.gql.js +57 -0
- package/graphql/graphql/blocks.gql.js.map +1 -0
- package/graphql/graphql/importExportTasks.gql.d.ts +4 -0
- package/graphql/graphql/{pageImportExportTasks.gql.js → importExportTasks.gql.js} +18 -18
- package/graphql/graphql/importExportTasks.gql.js.map +1 -0
- package/graphql/graphql/pages.gql.d.ts +2 -2
- package/graphql/graphql/pages.gql.js +4 -8
- package/graphql/graphql/pages.gql.js.map +1 -0
- package/graphql/graphql/templates.gql.d.ts +4 -0
- package/graphql/graphql/templates.gql.js +57 -0
- package/graphql/graphql/templates.gql.js.map +1 -0
- package/graphql/graphql/utils/resolve.d.ts +1 -1
- package/graphql/graphql/utils/resolve.js.map +1 -0
- package/graphql/graphql.d.ts +1 -1
- package/graphql/graphql.js +7 -3
- package/graphql/graphql.js.map +1 -0
- package/graphql/index.d.ts +2 -2
- package/graphql/index.js +1 -1
- package/graphql/index.js.map +1 -0
- package/graphql/types.d.ts +63 -27
- package/graphql/types.js.map +1 -0
- package/import/create/blocksHandler.d.ts +3 -0
- package/import/create/blocksHandler.js +110 -0
- package/import/create/blocksHandler.js.map +1 -0
- package/import/create/index.d.ts +24 -0
- package/import/create/index.js +43 -0
- package/import/create/index.js.map +1 -0
- package/import/create/pagesHandler.d.ts +3 -0
- package/import/create/pagesHandler.js +112 -0
- package/import/create/pagesHandler.js.map +1 -0
- package/import/create/templatesHandler.d.ts +3 -0
- package/import/create/templatesHandler.js +108 -0
- package/import/create/templatesHandler.js.map +1 -0
- package/import/process/blocksHandler.d.ts +3 -0
- package/import/process/blocksHandler.js +175 -0
- package/import/process/blocksHandler.js.map +1 -0
- package/import/process/index.d.ts +20 -0
- package/import/process/index.js +40 -0
- package/import/process/index.js.map +1 -0
- package/import/process/pagesHandler.d.ts +3 -0
- package/import/process/pagesHandler.js +181 -0
- package/import/process/pagesHandler.js.map +1 -0
- package/import/process/templatesHandler.d.ts +3 -0
- package/import/process/templatesHandler.js +172 -0
- package/import/process/templatesHandler.js.map +1 -0
- package/import/utils.d.ts +56 -0
- package/{importPages → import}/utils.js +246 -89
- package/import/utils.js.map +1 -0
- package/mockSecurity.js.map +1 -0
- package/package.json +36 -34
- package/types.d.ts +70 -72
- package/types.js +17 -17
- package/types.js.map +1 -0
- package/exportPages/combine/index.d.ts +0 -19
- package/exportPages/combine/index.js +0 -88
- package/exportPages/process/index.d.ts +0 -26
- package/exportPages/process/index.js +0 -204
- package/exportPages/utils.d.ts +0 -13
- package/exportPages/utils.js +0 -113
- package/graphql/crud/pageImportExportTasks.crud.d.ts +0 -5
- package/graphql/graphql/pageImportExportTasks.gql.d.ts +0 -4
- package/importPages/client.d.ts +0 -7
- package/importPages/client.js +0 -40
- package/importPages/create/index.d.ts +0 -27
- package/importPages/create/index.js +0 -109
- package/importPages/process/index.d.ts +0 -25
- package/importPages/process/index.js +0 -183
- package/importPages/utils.d.ts +0 -43
@@ -1,14 +1,16 @@
|
|
1
1
|
"use strict";
|
2
2
|
|
3
|
-
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
4
4
|
|
5
5
|
Object.defineProperty(exports, "__esModule", {
|
6
6
|
value: true
|
7
7
|
});
|
8
|
+
exports.importBlock = importBlock;
|
8
9
|
exports.importPage = importPage;
|
10
|
+
exports.importTemplate = importTemplate;
|
9
11
|
exports.initialStats = initialStats;
|
10
12
|
exports.readExtractAndUploadZipFileContents = readExtractAndUploadZipFileContents;
|
11
|
-
exports.
|
13
|
+
exports.uploadAssets = void 0;
|
12
14
|
|
13
15
|
var _uniqid = _interopRequireDefault(require("uniqid"));
|
14
16
|
|
@@ -38,36 +40,52 @@ var _downloadInstallFiles = require("@webiny/api-page-builder/graphql/crud/insta
|
|
38
40
|
|
39
41
|
var _types = require("../types");
|
40
42
|
|
41
|
-
var _s3Stream = require("../
|
43
|
+
var _s3Stream = require("../export/s3Stream");
|
42
44
|
|
43
45
|
const streamPipeline = (0, _util.promisify)(_stream.pipeline);
|
44
46
|
const INSTALL_DIR = "/tmp";
|
45
47
|
|
46
|
-
const INSTALL_EXTRACT_DIR = _path.default.join(INSTALL_DIR, "
|
48
|
+
const INSTALL_EXTRACT_DIR = _path.default.join(INSTALL_DIR, "apiPageBuilderImport");
|
47
49
|
|
48
50
|
const FILES_COUNT_IN_EACH_BATCH = 15;
|
49
|
-
const ZIP_CONTENT_TYPE = "application/zip";
|
50
51
|
|
51
|
-
function updateImageInPageSettings({
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
52
|
+
function updateImageInPageSettings(params) {
|
53
|
+
const {
|
54
|
+
settings,
|
55
|
+
fileIdToKeyMap,
|
56
|
+
srcPrefix
|
57
|
+
} = params;
|
56
58
|
let newSettings = settings;
|
57
59
|
const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith("/") ? srcPrefix.slice(0, -1) : srcPrefix;
|
58
60
|
|
59
61
|
if (_dotPropImmutable.default.get(newSettings, "general.image.src")) {
|
60
|
-
|
62
|
+
var _settings$general, _settings$general$ima;
|
63
|
+
|
64
|
+
newSettings = _dotPropImmutable.default.set(newSettings, "general.image.src", `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(((_settings$general = settings.general) === null || _settings$general === void 0 ? void 0 : (_settings$general$ima = _settings$general.image) === null || _settings$general$ima === void 0 ? void 0 : _settings$general$ima.id) || "")}`);
|
61
65
|
}
|
62
66
|
|
63
67
|
if (_dotPropImmutable.default.get(newSettings, "social.image.src")) {
|
64
|
-
|
68
|
+
var _settings$social, _settings$social$imag;
|
69
|
+
|
70
|
+
newSettings = _dotPropImmutable.default.set(newSettings, "social.image.src", `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(((_settings$social = settings.social) === null || _settings$social === void 0 ? void 0 : (_settings$social$imag = _settings$social.image) === null || _settings$social$imag === void 0 ? void 0 : _settings$social$imag.id) || "")}`);
|
65
71
|
}
|
66
72
|
|
67
|
-
return
|
73
|
+
return newSettings;
|
74
|
+
}
|
75
|
+
|
76
|
+
function updateBlockPreviewImage(params) {
|
77
|
+
const {
|
78
|
+
file,
|
79
|
+
fileIdToKeyMap,
|
80
|
+
srcPrefix
|
81
|
+
} = params;
|
82
|
+
const newFile = file;
|
83
|
+
const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith("/") ? srcPrefix.slice(0, -1) : srcPrefix;
|
84
|
+
newFile.src = `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(file.id || "")}`;
|
85
|
+
return newFile;
|
68
86
|
}
|
69
87
|
|
70
|
-
function
|
88
|
+
function updateFilesInData({
|
71
89
|
data,
|
72
90
|
fileIdToKeyMap,
|
73
91
|
srcPrefix
|
@@ -81,7 +99,7 @@ function updateFilesInPageData({
|
|
81
99
|
if (Array.isArray(data)) {
|
82
100
|
for (let i = 0; i < data.length; i++) {
|
83
101
|
const element = data[i];
|
84
|
-
|
102
|
+
updateFilesInData({
|
85
103
|
data: element,
|
86
104
|
fileIdToKeyMap,
|
87
105
|
srcPrefix
|
@@ -102,7 +120,7 @@ function updateFilesInPageData({
|
|
102
120
|
value.name = fileIdToKeyMap.get(value.id);
|
103
121
|
value.src = `${srcPrefix}${srcPrefix.endsWith("/") ? "" : "/"}${fileIdToKeyMap.get(value.id)}`;
|
104
122
|
} else {
|
105
|
-
|
123
|
+
updateFilesInData({
|
106
124
|
data: value,
|
107
125
|
srcPrefix,
|
108
126
|
fileIdToKeyMap
|
@@ -111,23 +129,26 @@ function updateFilesInPageData({
|
|
111
129
|
}
|
112
130
|
}
|
113
131
|
|
114
|
-
const
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
132
|
+
const uploadAssets = async params => {
|
133
|
+
const {
|
134
|
+
context,
|
135
|
+
filesData,
|
136
|
+
fileUploadsData
|
137
|
+
} = params; // Save uploaded file key against static id for later use.
|
138
|
+
|
139
|
+
const fileIdToKeyMap = new Map();
|
119
140
|
/**
|
120
141
|
* This function contains logic of file download from S3.
|
121
142
|
* Current we're not mocking zip file download from S3 in tests at the moment.
|
122
143
|
* So, we're manually mocking it in case of test just by returning an empty object.
|
123
144
|
*/
|
124
|
-
if (process.env.NODE_ENV === "test") {
|
125
|
-
return {};
|
126
|
-
}
|
127
145
|
|
128
|
-
|
146
|
+
if (process.env.NODE_ENV === "test") {
|
147
|
+
return {
|
148
|
+
fileIdToKeyMap
|
149
|
+
};
|
150
|
+
} // Save files meta data against old key for later use.
|
129
151
|
|
130
|
-
const fileIdToKeyMap = new Map(); // Save files meta data against old key for later use.
|
131
152
|
|
132
153
|
const fileKeyToFileMap = new Map(); // Initialize maps.
|
133
154
|
|
@@ -145,7 +166,12 @@ const uploadPageAssets = async ({
|
|
145
166
|
|
146
167
|
const createFilesInput = fileUploadResults.map(uploadResult => {
|
147
168
|
const newKey = uploadResult.Key;
|
148
|
-
const file = fileKeyToFileMap.get(getOldFileKey(newKey));
|
169
|
+
const file = fileKeyToFileMap.get(getOldFileKey(newKey));
|
170
|
+
|
171
|
+
if (!file) {
|
172
|
+
return null;
|
173
|
+
} // Update the file map with newly uploaded file.
|
174
|
+
|
149
175
|
|
150
176
|
fileIdToKeyMap.set(file.id, newKey);
|
151
177
|
return {
|
@@ -156,7 +182,7 @@ const uploadPageAssets = async ({
|
|
156
182
|
meta: file.meta,
|
157
183
|
tags: file.tags
|
158
184
|
};
|
159
|
-
});
|
185
|
+
}).filter(Boolean);
|
160
186
|
const createFilesPromises = []; // Gives an array of chunks (each consists of FILES_COUNT_IN_EACH_BATCH items).
|
161
187
|
|
162
188
|
const createFilesInputChunks = (0, _chunk.default)(createFilesInput, FILES_COUNT_IN_EACH_BATCH);
|
@@ -177,7 +203,7 @@ const uploadPageAssets = async ({
|
|
177
203
|
};
|
178
204
|
};
|
179
205
|
|
180
|
-
exports.
|
206
|
+
exports.uploadAssets = uploadAssets;
|
181
207
|
|
182
208
|
async function importPage({
|
183
209
|
pageKey,
|
@@ -206,25 +232,31 @@ async function importPage({
|
|
206
232
|
files
|
207
233
|
} = await (0, _loadJsonFile.default)(PAGE_DATA_FILE_PATH); // Only update page data if there are files.
|
208
234
|
|
209
|
-
if (Array.isArray(files) && files.length) {
|
235
|
+
if (files && Array.isArray(files) && files.length > 0) {
|
210
236
|
// Upload page assets.
|
211
237
|
const {
|
212
238
|
fileIdToKeyMap
|
213
|
-
} = await
|
239
|
+
} = await uploadAssets({
|
214
240
|
context,
|
241
|
+
|
242
|
+
/**
|
243
|
+
* TODO @ts-refactor @ashutosh figure out correct types.
|
244
|
+
*/
|
245
|
+
// @ts-ignore
|
215
246
|
filesData: files,
|
216
247
|
fileUploadsData
|
217
248
|
});
|
249
|
+
const settings = await context.fileManager.settings.getSettings();
|
218
250
|
const {
|
219
|
-
srcPrefix
|
220
|
-
} =
|
221
|
-
|
222
|
-
data: page.content,
|
251
|
+
srcPrefix = ""
|
252
|
+
} = settings || {};
|
253
|
+
updateFilesInData({
|
254
|
+
data: page.content || {},
|
223
255
|
fileIdToKeyMap,
|
224
256
|
srcPrefix
|
225
257
|
});
|
226
258
|
page.settings = updateImageInPageSettings({
|
227
|
-
settings: page.settings,
|
259
|
+
settings: page.settings || {},
|
228
260
|
fileIdToKeyMap,
|
229
261
|
srcPrefix
|
230
262
|
});
|
@@ -237,6 +269,119 @@ async function importPage({
|
|
237
269
|
return page;
|
238
270
|
}
|
239
271
|
|
272
|
+
async function importBlock({
|
273
|
+
blockKey,
|
274
|
+
context,
|
275
|
+
fileUploadsData
|
276
|
+
}) {
|
277
|
+
const log = console.log; // Making Directory for block in which we're going to extract the block data file.
|
278
|
+
|
279
|
+
const BLOCK_EXTRACT_DIR = _path.default.join(INSTALL_EXTRACT_DIR, blockKey);
|
280
|
+
|
281
|
+
(0, _fsExtra.ensureDirSync)(BLOCK_EXTRACT_DIR);
|
282
|
+
|
283
|
+
const blockDataFileKey = _dotPropImmutable.default.get(fileUploadsData, `data`);
|
284
|
+
|
285
|
+
const BLOCK_DATA_FILE_PATH = _path.default.join(BLOCK_EXTRACT_DIR, _path.default.basename(blockDataFileKey));
|
286
|
+
|
287
|
+
log(`Downloading Block data file: ${blockDataFileKey} at "${BLOCK_DATA_FILE_PATH}"`); // Download and save block data file in disk.
|
288
|
+
|
289
|
+
await new Promise((resolve, reject) => {
|
290
|
+
_s3Stream.s3Stream.readStream(blockDataFileKey).on("error", reject).pipe((0, _fs.createWriteStream)(BLOCK_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
|
291
|
+
}); // Load the block data file from disk.
|
292
|
+
|
293
|
+
log(`Load file ${blockDataFileKey}`);
|
294
|
+
const {
|
295
|
+
block,
|
296
|
+
files
|
297
|
+
} = await (0, _loadJsonFile.default)(BLOCK_DATA_FILE_PATH); // Only update block data if there are files.
|
298
|
+
|
299
|
+
if (files && Array.isArray(files) && files.length > 0) {
|
300
|
+
// Upload block assets.
|
301
|
+
const {
|
302
|
+
fileIdToKeyMap
|
303
|
+
} = await uploadAssets({
|
304
|
+
context,
|
305
|
+
filesData: files,
|
306
|
+
fileUploadsData
|
307
|
+
});
|
308
|
+
const settings = await context.fileManager.settings.getSettings();
|
309
|
+
const {
|
310
|
+
srcPrefix = ""
|
311
|
+
} = settings || {};
|
312
|
+
updateFilesInData({
|
313
|
+
data: block.content || {},
|
314
|
+
fileIdToKeyMap,
|
315
|
+
srcPrefix
|
316
|
+
});
|
317
|
+
block.preview = updateBlockPreviewImage({
|
318
|
+
file: block.preview || {},
|
319
|
+
fileIdToKeyMap,
|
320
|
+
srcPrefix
|
321
|
+
});
|
322
|
+
}
|
323
|
+
|
324
|
+
log("Removing Directory for block...");
|
325
|
+
await (0, _downloadInstallFiles.deleteFile)(blockKey);
|
326
|
+
log(`Remove block contents from S3...`);
|
327
|
+
await deleteS3Folder(_path.default.dirname(fileUploadsData.data));
|
328
|
+
return block;
|
329
|
+
}
|
330
|
+
|
331
|
+
async function importTemplate({
|
332
|
+
templateKey,
|
333
|
+
context,
|
334
|
+
fileUploadsData
|
335
|
+
}) {
|
336
|
+
const log = console.log; // Making Directory for template in which we're going to extract the template data file.
|
337
|
+
|
338
|
+
const TEMPLATE_EXTRACT_DIR = _path.default.join(INSTALL_EXTRACT_DIR, templateKey);
|
339
|
+
|
340
|
+
(0, _fsExtra.ensureDirSync)(TEMPLATE_EXTRACT_DIR);
|
341
|
+
|
342
|
+
const templateDataFileKey = _dotPropImmutable.default.get(fileUploadsData, `data`);
|
343
|
+
|
344
|
+
const TEMPLATE_DATA_FILE_PATH = _path.default.join(TEMPLATE_EXTRACT_DIR, _path.default.basename(templateDataFileKey));
|
345
|
+
|
346
|
+
log(`Downloading Template data file: ${templateDataFileKey} at "${TEMPLATE_DATA_FILE_PATH}"`); // Download and save template data file in disk.
|
347
|
+
|
348
|
+
await new Promise((resolve, reject) => {
|
349
|
+
_s3Stream.s3Stream.readStream(templateDataFileKey).on("error", reject).pipe((0, _fs.createWriteStream)(TEMPLATE_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
|
350
|
+
}); // Load the template data file from disk.
|
351
|
+
|
352
|
+
log(`Load file ${templateDataFileKey}`);
|
353
|
+
const {
|
354
|
+
template,
|
355
|
+
files
|
356
|
+
} = await (0, _loadJsonFile.default)(TEMPLATE_DATA_FILE_PATH); // Only update template data if there are files.
|
357
|
+
|
358
|
+
if (files && Array.isArray(files) && files.length > 0) {
|
359
|
+
// Upload template assets.
|
360
|
+
const {
|
361
|
+
fileIdToKeyMap
|
362
|
+
} = await uploadAssets({
|
363
|
+
context,
|
364
|
+
filesData: files,
|
365
|
+
fileUploadsData
|
366
|
+
});
|
367
|
+
const settings = await context.fileManager.settings.getSettings();
|
368
|
+
const {
|
369
|
+
srcPrefix = ""
|
370
|
+
} = settings || {};
|
371
|
+
updateFilesInData({
|
372
|
+
data: template.content || {},
|
373
|
+
fileIdToKeyMap,
|
374
|
+
srcPrefix
|
375
|
+
});
|
376
|
+
}
|
377
|
+
|
378
|
+
log("Removing Directory for template...");
|
379
|
+
await (0, _downloadInstallFiles.deleteFile)(templateKey);
|
380
|
+
log(`Remove template contents from S3...`);
|
381
|
+
await deleteS3Folder(_path.default.dirname(fileUploadsData.data));
|
382
|
+
return template;
|
383
|
+
}
|
384
|
+
|
240
385
|
async function uploadFilesFromS3({
|
241
386
|
fileKeyToFileMap,
|
242
387
|
oldKeyToNewKeyMap
|
@@ -270,14 +415,6 @@ async function uploadFilesFromS3({
|
|
270
415
|
return Promise.all(promises);
|
271
416
|
}
|
272
417
|
|
273
|
-
async function getObjectMetaFromS3(Key) {
|
274
|
-
const meta = await _s3Stream.s3Stream.getObjectHead(Key);
|
275
|
-
|
276
|
-
if (meta.ContentType !== ZIP_CONTENT_TYPE) {
|
277
|
-
throw new _error.default(`Unsupported file type: "${meta.ContentType}"`, "UNSUPPORTED_FILE");
|
278
|
-
}
|
279
|
-
}
|
280
|
-
|
281
418
|
function getOldFileKey(key) {
|
282
419
|
/*
|
283
420
|
* Because we know the naming convention, we can extract the old key from new key.
|
@@ -298,32 +435,23 @@ function getFileNameWithoutExt(fileName) {
|
|
298
435
|
|
299
436
|
/**
|
300
437
|
* Function will read the given zip file from S3 via stream, extract its content and upload it to S3 bucket.
|
301
|
-
* @param
|
302
|
-
* @return
|
438
|
+
* @param zipFileUrl
|
439
|
+
* @return ImportData S3 file keys for all uploaded assets group by page/block.
|
303
440
|
*/
|
304
|
-
async function readExtractAndUploadZipFileContents(
|
441
|
+
async function readExtractAndUploadZipFileContents(zipFileUrl) {
|
305
442
|
const log = console.log;
|
306
|
-
const
|
307
|
-
let readStream; // Check whether it is a URL
|
443
|
+
const importDataList = [];
|
308
444
|
|
309
|
-
|
310
|
-
const response = await (0, _nodeFetch.default)(zipFileKey);
|
445
|
+
const zipFileName = _path.default.basename(zipFileUrl).split("?")[0];
|
311
446
|
|
312
|
-
|
313
|
-
throw new _error.default(`Unable to downloading file: "${zipFileKey}"`, response.statusText);
|
314
|
-
}
|
447
|
+
const response = await (0, _nodeFetch.default)(zipFileUrl);
|
315
448
|
|
316
|
-
|
317
|
-
|
318
|
-
// We're first retrieving object's meta data, just to check whether the file is available at the given Key
|
319
|
-
await getObjectMetaFromS3(zipFileKey);
|
320
|
-
readStream = _s3Stream.s3Stream.readStream(zipFileKey);
|
449
|
+
if (!response.ok) {
|
450
|
+
throw new _error.default(`Unable to downloading file: "${zipFileUrl}"`, response.statusText);
|
321
451
|
}
|
322
452
|
|
323
|
-
const
|
324
|
-
|
325
|
-
const zipFileName = _path.default.basename(zipFileKey); // Read export file and download it in the disk
|
326
|
-
|
453
|
+
const readStream = response.body;
|
454
|
+
const uniquePath = (0, _uniqid.default)("IMPORTS/"); // Read export file and download it in the disk
|
327
455
|
|
328
456
|
const ZIP_FILE_PATH = _path.default.join(INSTALL_DIR, zipFileName);
|
329
457
|
|
@@ -332,23 +460,23 @@ async function readExtractAndUploadZipFileContents(zipFileKey) {
|
|
332
460
|
log(`Downloaded file "${zipFileName}" at ${ZIP_FILE_PATH}`); // Extract the downloaded zip file
|
333
461
|
|
334
462
|
const zipFilePaths = await extractZipToDisk(ZIP_FILE_PATH);
|
335
|
-
log(`Removing ZIP file "${
|
336
|
-
await (0, _downloadInstallFiles.deleteFile)(ZIP_FILE_PATH); // Extract each page zip and upload their content's to S3
|
463
|
+
log(`Removing ZIP file "${zipFileUrl}" from ${ZIP_FILE_PATH}`);
|
464
|
+
await (0, _downloadInstallFiles.deleteFile)(ZIP_FILE_PATH); // Extract each page/block zip and upload their content's to S3
|
337
465
|
|
338
466
|
for (let i = 0; i < zipFilePaths.length; i++) {
|
339
467
|
const currentPath = zipFilePaths[i];
|
340
468
|
const dataMap = await extractZipAndUploadToS3(currentPath, uniquePath);
|
341
|
-
|
469
|
+
importDataList.push(dataMap);
|
342
470
|
}
|
343
471
|
|
344
472
|
log("Removing all ZIP files located at ", _path.default.dirname(zipFilePaths[0]));
|
345
473
|
await (0, _downloadInstallFiles.deleteFile)(_path.default.dirname(zipFilePaths[0]));
|
346
|
-
return
|
474
|
+
return importDataList;
|
347
475
|
}
|
348
476
|
|
349
477
|
const ASSETS_DIR_NAME = "/assets";
|
350
478
|
|
351
|
-
function
|
479
|
+
function prepareDataDirMap({
|
352
480
|
map,
|
353
481
|
filePath,
|
354
482
|
newKey
|
@@ -382,30 +510,27 @@ async function deleteS3Folder(key) {
|
|
382
510
|
}
|
383
511
|
|
384
512
|
const response = await _s3Stream.s3Stream.listObject(key);
|
385
|
-
const keys = response.Contents.map(c => c.Key);
|
513
|
+
const keys = (response.Contents || []).map(c => c.Key).filter(Boolean);
|
386
514
|
console.log(`Found ${keys.length} files.`);
|
387
515
|
const deleteFilePromises = keys.map(key => _s3Stream.s3Stream.deleteObject(key));
|
388
516
|
await Promise.all(deleteFilePromises);
|
389
517
|
console.log(`Successfully deleted ${deleteFilePromises.length} files.`);
|
390
|
-
}
|
391
|
-
|
392
|
-
const zeroPad = version => `${version}`.padStart(5, "0");
|
518
|
+
} // export const zeroPad = version => `${version}`.padStart(5, "0");
|
393
519
|
|
394
|
-
exports.zeroPad = zeroPad;
|
395
520
|
|
396
521
|
function initialStats(total) {
|
397
522
|
return {
|
398
|
-
[_types.
|
399
|
-
[_types.
|
400
|
-
[_types.
|
401
|
-
[_types.
|
523
|
+
[_types.ImportExportTaskStatus.PENDING]: total,
|
524
|
+
[_types.ImportExportTaskStatus.PROCESSING]: 0,
|
525
|
+
[_types.ImportExportTaskStatus.COMPLETED]: 0,
|
526
|
+
[_types.ImportExportTaskStatus.FAILED]: 0,
|
402
527
|
total
|
403
528
|
};
|
404
529
|
}
|
405
530
|
|
406
531
|
function extractZipToDisk(exportFileZipPath) {
|
407
532
|
return new Promise((resolve, reject) => {
|
408
|
-
const
|
533
|
+
const zipFilePaths = [];
|
409
534
|
const uniqueFolderNameForExport = getFileNameWithoutExt(exportFileZipPath);
|
410
535
|
|
411
536
|
const EXPORT_FILE_EXTRACTION_PATH = _path.default.join(INSTALL_DIR, uniqueFolderNameForExport); // Make sure DIR exists
|
@@ -419,6 +544,13 @@ function extractZipToDisk(exportFileZipPath) {
|
|
419
544
|
if (err) {
|
420
545
|
console.warn("ERROR: Failed to extract zip: ", exportFileZipPath, err);
|
421
546
|
reject(err);
|
547
|
+
return;
|
548
|
+
}
|
549
|
+
|
550
|
+
if (!zipFile) {
|
551
|
+
console.log("ERROR: Missing zip file resource for path: " + exportFileZipPath);
|
552
|
+
reject("Missing Zip File Resource.");
|
553
|
+
return;
|
422
554
|
}
|
423
555
|
|
424
556
|
console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);
|
@@ -428,7 +560,7 @@ function extractZipToDisk(exportFileZipPath) {
|
|
428
560
|
reject(err);
|
429
561
|
}
|
430
562
|
|
431
|
-
resolve(
|
563
|
+
resolve(zipFilePaths);
|
432
564
|
});
|
433
565
|
zipFile.readEntry();
|
434
566
|
zipFile.on("entry", function (entry) {
|
@@ -445,15 +577,24 @@ function extractZipToDisk(exportFileZipPath) {
|
|
445
577
|
if (err) {
|
446
578
|
console.warn("ERROR: Failed to openReadStream for file: ", entry.fileName, err);
|
447
579
|
reject(err);
|
580
|
+
return;
|
581
|
+
}
|
582
|
+
|
583
|
+
if (!readStream) {
|
584
|
+
console.log("ERROR: Missing Read Stream Resource when extracting to disk.");
|
585
|
+
reject("Missing Read Stream Resource.");
|
586
|
+
return;
|
448
587
|
}
|
449
588
|
|
450
589
|
const filePath = _path.default.join(EXPORT_FILE_EXTRACTION_PATH, entry.fileName);
|
451
590
|
|
452
591
|
readStream.on("end", function () {
|
453
|
-
|
592
|
+
zipFilePaths.push(filePath);
|
454
593
|
zipFile.readEntry();
|
455
594
|
});
|
456
|
-
streamPipeline(readStream, (0, _fs.createWriteStream)(filePath))
|
595
|
+
streamPipeline(readStream, (0, _fs.createWriteStream)(filePath)).catch(error => {
|
596
|
+
reject(error);
|
597
|
+
});
|
457
598
|
});
|
458
599
|
}
|
459
600
|
});
|
@@ -461,29 +602,36 @@ function extractZipToDisk(exportFileZipPath) {
|
|
461
602
|
});
|
462
603
|
}
|
463
604
|
|
464
|
-
function extractZipAndUploadToS3(
|
605
|
+
function extractZipAndUploadToS3(dataZipFilePath, uniquePath) {
|
465
606
|
return new Promise((resolve, reject) => {
|
466
607
|
const filePaths = [];
|
467
608
|
const fileUploadPromises = [];
|
468
|
-
const
|
609
|
+
const uniqueKey = getFileNameWithoutExt(dataZipFilePath);
|
469
610
|
let dataMap = {
|
470
|
-
key:
|
611
|
+
key: uniqueKey,
|
471
612
|
assets: {},
|
472
613
|
data: ""
|
473
614
|
};
|
474
615
|
|
475
|
-
_yauzl.default.open(
|
616
|
+
_yauzl.default.open(dataZipFilePath, {
|
476
617
|
lazyEntries: true
|
477
618
|
}, function (err, zipFile) {
|
478
619
|
if (err) {
|
479
|
-
console.warn("ERROR: Failed to extract zip: ",
|
620
|
+
console.warn("ERROR: Failed to extract zip: ", dataZipFilePath, err);
|
480
621
|
reject(err);
|
622
|
+
return;
|
623
|
+
}
|
624
|
+
|
625
|
+
if (!zipFile) {
|
626
|
+
console.log("ERROR: Probably failed to extract zip: " + dataZipFilePath);
|
627
|
+
reject("Missing Zip File Resource.");
|
628
|
+
return;
|
481
629
|
}
|
482
630
|
|
483
631
|
console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);
|
484
632
|
zipFile.on("end", function (err) {
|
485
633
|
if (err) {
|
486
|
-
console.warn('ERROR: Failed on "END" for file: ',
|
634
|
+
console.warn('ERROR: Failed on "END" for file: ', dataZipFilePath, err);
|
487
635
|
reject(err);
|
488
636
|
}
|
489
637
|
|
@@ -509,15 +657,22 @@ function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
|
|
509
657
|
if (err) {
|
510
658
|
console.warn("ERROR: Failed while performing [openReadStream] for file: ", entry.fileName, err);
|
511
659
|
reject(err);
|
660
|
+
return;
|
661
|
+
}
|
662
|
+
|
663
|
+
if (!readStream) {
|
664
|
+
console.log("ERROR: Missing Read Stream while importing.");
|
665
|
+
reject("Missing Read Strea Resource.");
|
666
|
+
return;
|
512
667
|
}
|
513
668
|
|
514
669
|
readStream.on("end", function () {
|
515
670
|
filePaths.push(entry.fileName);
|
516
671
|
zipFile.readEntry();
|
517
672
|
});
|
518
|
-
const newKey = `${uniquePath}/${
|
673
|
+
const newKey = `${uniquePath}/${uniqueKey}/${entry.fileName}`; // Modify in place
|
519
674
|
|
520
|
-
dataMap =
|
675
|
+
dataMap = prepareDataDirMap({
|
521
676
|
map: dataMap,
|
522
677
|
filePath: entry.fileName,
|
523
678
|
newKey
|
@@ -530,6 +685,8 @@ function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
|
|
530
685
|
|
531
686
|
streamPipeline(readStream, streamPassThrough).then(() => {
|
532
687
|
fileUploadPromises.push(promise);
|
688
|
+
}).catch(error => {
|
689
|
+
reject(error);
|
533
690
|
});
|
534
691
|
});
|
535
692
|
}
|