@webiny/api-page-builder-import-export 0.0.0-unstable.990c3ab1b6 → 0.0.0-unstable.d4f203fa97
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/client.js +6 -2
- package/client.js.map +1 -1
- package/exportPages/combine/index.js +19 -11
- package/exportPages/combine/index.js.map +1 -1
- package/exportPages/process/index.js +35 -19
- package/exportPages/process/index.js.map +1 -1
- package/exportPages/s3Stream.js +20 -1
- package/exportPages/s3Stream.js.map +1 -1
- package/exportPages/utils.js +31 -18
- package/exportPages/utils.js.map +1 -1
- package/exportPages/zipper.js +41 -29
- package/exportPages/zipper.js.map +1 -1
- package/graphql/crud/pageImportExportTasks.crud.js +57 -9
- package/graphql/crud/pageImportExportTasks.crud.js.map +1 -1
- package/graphql/crud/pages.crud.js +39 -21
- package/graphql/crud/pages.crud.js.map +1 -1
- package/graphql/crud.js +5 -0
- package/graphql/crud.js.map +1 -1
- package/graphql/graphql/pageImportExportTasks.gql.js +6 -1
- package/graphql/graphql/pageImportExportTasks.gql.js.map +1 -1
- package/graphql/graphql/pages.gql.js +6 -1
- package/graphql/graphql/pages.gql.js.map +1 -1
- package/graphql/graphql/utils/resolve.js +3 -0
- package/graphql/graphql/utils/resolve.js.map +1 -1
- package/graphql/graphql.js +4 -0
- package/graphql/graphql.js.map +1 -1
- package/graphql/index.js +5 -0
- package/graphql/index.js.map +1 -1
- package/importPages/create/index.js +20 -9
- package/importPages/create/index.js.map +1 -1
- package/importPages/process/index.js +29 -24
- package/importPages/process/index.js.map +1 -1
- package/importPages/utils.js +126 -44
- package/importPages/utils.js.map +1 -1
- package/mockSecurity.js +2 -0
- package/mockSecurity.js.map +1 -1
- package/package.json +23 -23
- package/types.js +5 -0
- package/types.js.map +1 -1
package/importPages/utils.js
CHANGED
@@ -1,6 +1,7 @@
|
|
1
1
|
"use strict";
|
2
2
|
|
3
3
|
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
4
|
+
|
4
5
|
Object.defineProperty(exports, "__esModule", {
|
5
6
|
value: true
|
6
7
|
});
|
@@ -8,25 +9,44 @@ exports.importPage = importPage;
|
|
8
9
|
exports.initialStats = initialStats;
|
9
10
|
exports.readExtractAndUploadZipFileContents = readExtractAndUploadZipFileContents;
|
10
11
|
exports.uploadPageAssets = void 0;
|
12
|
+
|
11
13
|
var _uniqid = _interopRequireDefault(require("uniqid"));
|
14
|
+
|
12
15
|
var _dotPropImmutable = _interopRequireDefault(require("dot-prop-immutable"));
|
16
|
+
|
13
17
|
var _fs = require("fs");
|
18
|
+
|
14
19
|
var _fsExtra = require("fs-extra");
|
20
|
+
|
15
21
|
var _util = require("util");
|
22
|
+
|
16
23
|
var _stream = require("stream");
|
24
|
+
|
17
25
|
var _nodeFetch = _interopRequireDefault(require("node-fetch"));
|
26
|
+
|
18
27
|
var _path = _interopRequireDefault(require("path"));
|
28
|
+
|
19
29
|
var _yauzl = _interopRequireDefault(require("yauzl"));
|
30
|
+
|
20
31
|
var _chunk = _interopRequireDefault(require("lodash/chunk"));
|
32
|
+
|
21
33
|
var _loadJsonFile = _interopRequireDefault(require("load-json-file"));
|
34
|
+
|
22
35
|
var _error = _interopRequireDefault(require("@webiny/error"));
|
36
|
+
|
23
37
|
var _downloadInstallFiles = require("@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles");
|
38
|
+
|
24
39
|
var _types = require("../types");
|
40
|
+
|
25
41
|
var _s3Stream = require("../exportPages/s3Stream");
|
42
|
+
|
26
43
|
const streamPipeline = (0, _util.promisify)(_stream.pipeline);
|
27
44
|
const INSTALL_DIR = "/tmp";
|
45
|
+
|
28
46
|
const INSTALL_EXTRACT_DIR = _path.default.join(INSTALL_DIR, "apiPageBuilderImportPage");
|
47
|
+
|
29
48
|
const FILES_COUNT_IN_EACH_BATCH = 15;
|
49
|
+
|
30
50
|
function updateImageInPageSettings(params) {
|
31
51
|
const {
|
32
52
|
settings,
|
@@ -35,16 +55,22 @@ function updateImageInPageSettings(params) {
|
|
35
55
|
} = params;
|
36
56
|
let newSettings = settings;
|
37
57
|
const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith("/") ? srcPrefix.slice(0, -1) : srcPrefix;
|
58
|
+
|
38
59
|
if (_dotPropImmutable.default.get(newSettings, "general.image.src")) {
|
39
60
|
var _settings$general, _settings$general$ima;
|
61
|
+
|
40
62
|
newSettings = _dotPropImmutable.default.set(newSettings, "general.image.src", `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(((_settings$general = settings.general) === null || _settings$general === void 0 ? void 0 : (_settings$general$ima = _settings$general.image) === null || _settings$general$ima === void 0 ? void 0 : _settings$general$ima.id) || "")}`);
|
41
63
|
}
|
64
|
+
|
42
65
|
if (_dotPropImmutable.default.get(newSettings, "social.image.src")) {
|
43
66
|
var _settings$social, _settings$social$imag;
|
67
|
+
|
44
68
|
newSettings = _dotPropImmutable.default.set(newSettings, "social.image.src", `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(((_settings$social = settings.social) === null || _settings$social === void 0 ? void 0 : (_settings$social$imag = _settings$social.image) === null || _settings$social$imag === void 0 ? void 0 : _settings$social$imag.id) || "")}`);
|
45
69
|
}
|
70
|
+
|
46
71
|
return newSettings;
|
47
72
|
}
|
73
|
+
|
48
74
|
function updateFilesInPageData({
|
49
75
|
data,
|
50
76
|
fileIdToKeyMap,
|
@@ -53,8 +79,9 @@ function updateFilesInPageData({
|
|
53
79
|
// BASE CASE: Termination point
|
54
80
|
if (!data || typeof data !== "object") {
|
55
81
|
return;
|
56
|
-
}
|
57
|
-
|
82
|
+
} // Recursively call function if data is array
|
83
|
+
|
84
|
+
|
58
85
|
if (Array.isArray(data)) {
|
59
86
|
for (let i = 0; i < data.length; i++) {
|
60
87
|
const element = data[i];
|
@@ -64,12 +91,16 @@ function updateFilesInPageData({
|
|
64
91
|
srcPrefix
|
65
92
|
});
|
66
93
|
}
|
94
|
+
|
67
95
|
return;
|
68
|
-
}
|
69
|
-
|
96
|
+
} // Main logic
|
97
|
+
|
98
|
+
|
70
99
|
const tuple = Object.entries(data);
|
100
|
+
|
71
101
|
for (let i = 0; i < tuple.length; i++) {
|
72
102
|
const [key, value] = tuple[i];
|
103
|
+
|
73
104
|
if (key === "file" && value && fileIdToKeyMap.has(value.id)) {
|
74
105
|
value.key = fileIdToKeyMap.get(value.id);
|
75
106
|
value.name = fileIdToKeyMap.get(value.id);
|
@@ -83,50 +114,52 @@ function updateFilesInPageData({
|
|
83
114
|
}
|
84
115
|
}
|
85
116
|
}
|
117
|
+
|
86
118
|
const uploadPageAssets = async params => {
|
87
119
|
const {
|
88
120
|
context,
|
89
121
|
filesData,
|
90
122
|
fileUploadsData
|
91
|
-
} = params;
|
92
|
-
|
123
|
+
} = params; // Save uploaded file key against static id for later use.
|
124
|
+
|
93
125
|
const fileIdToKeyMap = new Map();
|
94
126
|
/**
|
95
127
|
* This function contains logic of file download from S3.
|
96
128
|
* Current we're not mocking zip file download from S3 in tests at the moment.
|
97
129
|
* So, we're manually mocking it in case of test just by returning an empty object.
|
98
130
|
*/
|
131
|
+
|
99
132
|
if (process.env.NODE_ENV === "test") {
|
100
133
|
return {
|
101
134
|
fileIdToKeyMap
|
102
135
|
};
|
103
136
|
}
|
104
|
-
console.log("INSIDE uploadPageAssets");
|
105
137
|
|
106
|
-
// Save files meta data against old key for later use.
|
107
|
-
|
108
|
-
// Initialize maps.
|
138
|
+
console.log("INSIDE uploadPageAssets"); // Save files meta data against old key for later use.
|
139
|
+
|
140
|
+
const fileKeyToFileMap = new Map(); // Initialize maps.
|
141
|
+
|
109
142
|
for (let i = 0; i < filesData.length; i++) {
|
110
143
|
const file = filesData[i];
|
111
|
-
fileKeyToFileMap.set(file.key, file);
|
144
|
+
fileKeyToFileMap.set(file.key, file); // Initialize the value
|
112
145
|
|
113
|
-
// Initialize the value
|
114
146
|
fileIdToKeyMap.set(file.id, file.type);
|
115
147
|
}
|
148
|
+
|
116
149
|
const fileUploadResults = await uploadFilesFromS3({
|
117
150
|
fileKeyToFileMap,
|
118
151
|
oldKeyToNewKeyMap: fileUploadsData.assets
|
119
|
-
});
|
152
|
+
}); // Create files in File Manager
|
120
153
|
|
121
|
-
// Create files in File Manager
|
122
154
|
const createFilesInput = fileUploadResults.map(uploadResult => {
|
123
155
|
const newKey = uploadResult.Key;
|
124
156
|
const file = fileKeyToFileMap.get(getOldFileKey(newKey));
|
157
|
+
|
125
158
|
if (!file) {
|
126
159
|
return null;
|
127
|
-
}
|
160
|
+
} // Update the file map with newly uploaded file.
|
161
|
+
|
128
162
|
|
129
|
-
// Update the file map with newly uploaded file.
|
130
163
|
fileIdToKeyMap.set(file.id, newKey);
|
131
164
|
return {
|
132
165
|
key: newKey,
|
@@ -137,9 +170,10 @@ const uploadPageAssets = async params => {
|
|
137
170
|
tags: file.tags
|
138
171
|
};
|
139
172
|
}).filter(Boolean);
|
140
|
-
const createFilesPromises = [];
|
141
|
-
|
173
|
+
const createFilesPromises = []; // Gives an array of chunks (each consists of FILES_COUNT_IN_EACH_BATCH items).
|
174
|
+
|
142
175
|
const createFilesInputChunks = (0, _chunk.default)(createFilesInput, FILES_COUNT_IN_EACH_BATCH);
|
176
|
+
|
143
177
|
for (let i = 0; i < createFilesInputChunks.length; i++) {
|
144
178
|
const createFilesInputChunk = createFilesInputChunks[i];
|
145
179
|
createFilesPromises.push(
|
@@ -149,44 +183,49 @@ const uploadPageAssets = async params => {
|
|
149
183
|
*/
|
150
184
|
context.fileManager.files.createFilesInBatch(createFilesInputChunk));
|
151
185
|
}
|
186
|
+
|
152
187
|
await Promise.all(createFilesPromises);
|
153
188
|
return {
|
154
189
|
fileIdToKeyMap
|
155
190
|
};
|
156
191
|
};
|
192
|
+
|
157
193
|
exports.uploadPageAssets = uploadPageAssets;
|
194
|
+
|
158
195
|
async function importPage({
|
159
196
|
pageKey,
|
160
197
|
context,
|
161
198
|
fileUploadsData
|
162
199
|
}) {
|
163
|
-
const log = console.log;
|
200
|
+
const log = console.log; // Making Directory for page in which we're going to extract the page data file.
|
164
201
|
|
165
|
-
// Making Directory for page in which we're going to extract the page data file.
|
166
202
|
const PAGE_EXTRACT_DIR = _path.default.join(INSTALL_EXTRACT_DIR, pageKey);
|
203
|
+
|
167
204
|
(0, _fsExtra.ensureDirSync)(PAGE_EXTRACT_DIR);
|
205
|
+
|
168
206
|
const pageDataFileKey = _dotPropImmutable.default.get(fileUploadsData, `data`);
|
207
|
+
|
169
208
|
const PAGE_DATA_FILE_PATH = _path.default.join(PAGE_EXTRACT_DIR, _path.default.basename(pageDataFileKey));
|
170
|
-
|
171
|
-
// Download and save page data file in disk.
|
209
|
+
|
210
|
+
log(`Downloading Page data file: ${pageDataFileKey} at "${PAGE_DATA_FILE_PATH}"`); // Download and save page data file in disk.
|
211
|
+
|
172
212
|
await new Promise((resolve, reject) => {
|
173
213
|
_s3Stream.s3Stream.readStream(pageDataFileKey).on("error", reject).pipe((0, _fs.createWriteStream)(PAGE_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
|
174
|
-
});
|
214
|
+
}); // Load the page data file from disk.
|
175
215
|
|
176
|
-
// Load the page data file from disk.
|
177
216
|
log(`Load file ${pageDataFileKey}`);
|
178
217
|
const {
|
179
218
|
page,
|
180
219
|
files
|
181
|
-
} = await (0, _loadJsonFile.default)(PAGE_DATA_FILE_PATH);
|
220
|
+
} = await (0, _loadJsonFile.default)(PAGE_DATA_FILE_PATH); // Only update page data if there are files.
|
182
221
|
|
183
|
-
// Only update page data if there are files.
|
184
222
|
if (files && Array.isArray(files) && files.length > 0) {
|
185
223
|
// Upload page assets.
|
186
224
|
const {
|
187
225
|
fileIdToKeyMap
|
188
226
|
} = await uploadPageAssets({
|
189
227
|
context,
|
228
|
+
|
190
229
|
/**
|
191
230
|
* TODO @ts-refactor @ashutosh figure out correct types.
|
192
231
|
*/
|
@@ -209,40 +248,47 @@ async function importPage({
|
|
209
248
|
srcPrefix
|
210
249
|
});
|
211
250
|
}
|
251
|
+
|
212
252
|
log("Removing Directory for page...");
|
213
253
|
await (0, _downloadInstallFiles.deleteFile)(pageKey);
|
214
254
|
log(`Remove page contents from S3...`);
|
215
255
|
await deleteS3Folder(_path.default.dirname(fileUploadsData.data));
|
216
256
|
return page;
|
217
257
|
}
|
258
|
+
|
218
259
|
async function uploadFilesFromS3({
|
219
260
|
fileKeyToFileMap,
|
220
261
|
oldKeyToNewKeyMap
|
221
262
|
}) {
|
222
263
|
const oldKeysForAssets = Object.keys(oldKeyToNewKeyMap);
|
223
|
-
const promises = [];
|
224
|
-
|
264
|
+
const promises = []; // Upload all assets.
|
265
|
+
|
225
266
|
for (let i = 0; i < oldKeysForAssets.length; i++) {
|
226
267
|
const oldKey = oldKeysForAssets[i];
|
227
|
-
const tempNewKey = oldKeyToNewKeyMap[oldKey];
|
268
|
+
const tempNewKey = oldKeyToNewKeyMap[oldKey]; // Read file.
|
269
|
+
|
270
|
+
const readStream = _s3Stream.s3Stream.readStream(tempNewKey); // Get file meta data.
|
271
|
+
|
228
272
|
|
229
|
-
// Read file.
|
230
|
-
const readStream = _s3Stream.s3Stream.readStream(tempNewKey);
|
231
|
-
// Get file meta data.
|
232
273
|
const fileMetaData = fileKeyToFileMap.get(oldKey);
|
274
|
+
|
233
275
|
if (fileMetaData) {
|
234
276
|
const newKey = (0, _uniqid.default)("", `-${fileMetaData.key}`);
|
277
|
+
|
235
278
|
const {
|
236
279
|
streamPassThrough,
|
237
280
|
streamPassThroughUploadPromise: promise
|
238
281
|
} = _s3Stream.s3Stream.writeStream(newKey, fileMetaData.type);
|
282
|
+
|
239
283
|
readStream.pipe(streamPassThrough);
|
240
284
|
promises.push(promise);
|
241
285
|
console.log(`Successfully queued file "${newKey}"`);
|
242
286
|
}
|
243
287
|
}
|
288
|
+
|
244
289
|
return Promise.all(promises);
|
245
290
|
}
|
291
|
+
|
246
292
|
function getOldFileKey(key) {
|
247
293
|
/*
|
248
294
|
* Because we know the naming convention, we can extract the old key from new key.
|
@@ -254,10 +300,13 @@ function getOldFileKey(key) {
|
|
254
300
|
return key;
|
255
301
|
}
|
256
302
|
}
|
303
|
+
|
257
304
|
const FILE_CONTENT_TYPE = "application/octet-stream";
|
305
|
+
|
258
306
|
function getFileNameWithoutExt(fileName) {
|
259
307
|
return _path.default.basename(fileName).replace(_path.default.extname(fileName), "");
|
260
308
|
}
|
309
|
+
|
261
310
|
/**
|
262
311
|
* Function will read the given zip file from S3 via stream, extract its content and upload it to S3 bucket.
|
263
312
|
* @param zipFileUrl
|
@@ -266,70 +315,82 @@ function getFileNameWithoutExt(fileName) {
|
|
266
315
|
async function readExtractAndUploadZipFileContents(zipFileUrl) {
|
267
316
|
const log = console.log;
|
268
317
|
const pageImportDataList = [];
|
318
|
+
|
269
319
|
const zipFileName = _path.default.basename(zipFileUrl).split("?")[0];
|
320
|
+
|
270
321
|
const response = await (0, _nodeFetch.default)(zipFileUrl);
|
322
|
+
|
271
323
|
if (!response.ok) {
|
272
324
|
throw new _error.default(`Unable to downloading file: "${zipFileUrl}"`, response.statusText);
|
273
325
|
}
|
326
|
+
|
274
327
|
const readStream = response.body;
|
275
|
-
const uniquePath = (0, _uniqid.default)("IMPORT_PAGES/");
|
276
|
-
|
328
|
+
const uniquePath = (0, _uniqid.default)("IMPORT_PAGES/"); // Read export file and download it in the disk
|
329
|
+
|
277
330
|
const ZIP_FILE_PATH = _path.default.join(INSTALL_DIR, zipFileName);
|
331
|
+
|
278
332
|
const writeStream = (0, _fs.createWriteStream)(ZIP_FILE_PATH);
|
279
333
|
await streamPipeline(readStream, writeStream);
|
280
|
-
log(`Downloaded file "${zipFileName}" at ${ZIP_FILE_PATH}`);
|
334
|
+
log(`Downloaded file "${zipFileName}" at ${ZIP_FILE_PATH}`); // Extract the downloaded zip file
|
281
335
|
|
282
|
-
// Extract the downloaded zip file
|
283
336
|
const zipFilePaths = await extractZipToDisk(ZIP_FILE_PATH);
|
284
337
|
log(`Removing ZIP file "${zipFileUrl}" from ${ZIP_FILE_PATH}`);
|
285
|
-
await (0, _downloadInstallFiles.deleteFile)(ZIP_FILE_PATH);
|
338
|
+
await (0, _downloadInstallFiles.deleteFile)(ZIP_FILE_PATH); // Extract each page zip and upload their content's to S3
|
286
339
|
|
287
|
-
// Extract each page zip and upload their content's to S3
|
288
340
|
for (let i = 0; i < zipFilePaths.length; i++) {
|
289
341
|
const currentPath = zipFilePaths[i];
|
290
342
|
const dataMap = await extractZipAndUploadToS3(currentPath, uniquePath);
|
291
343
|
pageImportDataList.push(dataMap);
|
292
344
|
}
|
345
|
+
|
293
346
|
log("Removing all ZIP files located at ", _path.default.dirname(zipFilePaths[0]));
|
294
347
|
await (0, _downloadInstallFiles.deleteFile)(_path.default.dirname(zipFilePaths[0]));
|
295
348
|
return pageImportDataList;
|
296
349
|
}
|
350
|
+
|
297
351
|
const ASSETS_DIR_NAME = "/assets";
|
352
|
+
|
298
353
|
function preparePageDataDirMap({
|
299
354
|
map,
|
300
355
|
filePath,
|
301
356
|
newKey
|
302
357
|
}) {
|
303
358
|
const dirname = _path.default.dirname(filePath);
|
359
|
+
|
304
360
|
const fileName = _path.default.basename(filePath);
|
305
361
|
/*
|
306
362
|
* We want to use dot (.) as part of object key rather than creating nested object(s).
|
307
363
|
* Also, the file name might contain dots in it beside the extension, so, we are escaping them all.
|
308
364
|
*/
|
365
|
+
|
366
|
+
|
309
367
|
const oldKey = fileName.replace(/\./g, "\\.");
|
310
368
|
const isAsset = dirname.endsWith(ASSETS_DIR_NAME);
|
369
|
+
|
311
370
|
if (isAsset) {
|
312
371
|
map = _dotPropImmutable.default.set(map, `assets.${oldKey}`, newKey);
|
313
372
|
} else {
|
314
373
|
// We only need to know the newKey for data file.
|
315
374
|
map = _dotPropImmutable.default.set(map, `data`, newKey);
|
316
375
|
}
|
376
|
+
|
317
377
|
return map;
|
318
378
|
}
|
379
|
+
|
319
380
|
async function deleteS3Folder(key) {
|
320
381
|
// Append trailing slash i.e "/" to key to make sure we only delete a specific folder.
|
321
382
|
if (!key.endsWith("/")) {
|
322
383
|
key = `${key}/`;
|
323
384
|
}
|
385
|
+
|
324
386
|
const response = await _s3Stream.s3Stream.listObject(key);
|
325
387
|
const keys = (response.Contents || []).map(c => c.Key).filter(Boolean);
|
326
388
|
console.log(`Found ${keys.length} files.`);
|
327
389
|
const deleteFilePromises = keys.map(key => _s3Stream.s3Stream.deleteObject(key));
|
328
390
|
await Promise.all(deleteFilePromises);
|
329
391
|
console.log(`Successfully deleted ${deleteFilePromises.length} files.`);
|
330
|
-
}
|
392
|
+
} // export const zeroPad = version => `${version}`.padStart(5, "0");
|
331
393
|
|
332
|
-
// export const zeroPad = version => `${version}`.padStart(5, "0");
|
333
394
|
|
334
395
|
function initialStats(total) {
|
335
396
|
return {
|
@@ -340,13 +401,17 @@ function initialStats(total) {
|
|
340
401
|
total
|
341
402
|
};
|
342
403
|
}
|
404
|
+
|
343
405
|
function extractZipToDisk(exportFileZipPath) {
|
344
406
|
return new Promise((resolve, reject) => {
|
345
407
|
const pageZipFilePaths = [];
|
346
408
|
const uniqueFolderNameForExport = getFileNameWithoutExt(exportFileZipPath);
|
347
|
-
|
348
|
-
// Make sure DIR exists
|
409
|
+
|
410
|
+
const EXPORT_FILE_EXTRACTION_PATH = _path.default.join(INSTALL_DIR, uniqueFolderNameForExport); // Make sure DIR exists
|
411
|
+
|
412
|
+
|
349
413
|
(0, _fsExtra.ensureDirSync)(EXPORT_FILE_EXTRACTION_PATH);
|
414
|
+
|
350
415
|
_yauzl.default.open(exportFileZipPath, {
|
351
416
|
lazyEntries: true
|
352
417
|
}, function (err, zipFile) {
|
@@ -355,22 +420,26 @@ function extractZipToDisk(exportFileZipPath) {
|
|
355
420
|
reject(err);
|
356
421
|
return;
|
357
422
|
}
|
423
|
+
|
358
424
|
if (!zipFile) {
|
359
425
|
console.log("ERROR: Missing zip file resource for path: " + exportFileZipPath);
|
360
426
|
reject("Missing Zip File Resource.");
|
361
427
|
return;
|
362
428
|
}
|
429
|
+
|
363
430
|
console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);
|
364
431
|
zipFile.on("end", function (err) {
|
365
432
|
if (err) {
|
366
433
|
console.warn("ERROR: Failed on END event for file: ", exportFileZipPath, err);
|
367
434
|
reject(err);
|
368
435
|
}
|
436
|
+
|
369
437
|
resolve(pageZipFilePaths);
|
370
438
|
});
|
371
439
|
zipFile.readEntry();
|
372
440
|
zipFile.on("entry", function (entry) {
|
373
441
|
console.info(`Processing entry: "${entry.fileName}"`);
|
442
|
+
|
374
443
|
if (/\/$/.test(entry.fileName)) {
|
375
444
|
// Directory file names end with '/'.
|
376
445
|
// Note that entries for directories themselves are optional.
|
@@ -384,12 +453,15 @@ function extractZipToDisk(exportFileZipPath) {
|
|
384
453
|
reject(err);
|
385
454
|
return;
|
386
455
|
}
|
456
|
+
|
387
457
|
if (!readStream) {
|
388
458
|
console.log("ERROR: Missing Read Stream Resource when extracting to disk.");
|
389
459
|
reject("Missing Read Stream Resource.");
|
390
460
|
return;
|
391
461
|
}
|
462
|
+
|
392
463
|
const filePath = _path.default.join(EXPORT_FILE_EXTRACTION_PATH, entry.fileName);
|
464
|
+
|
393
465
|
readStream.on("end", function () {
|
394
466
|
pageZipFilePaths.push(filePath);
|
395
467
|
zipFile.readEntry();
|
@@ -403,6 +475,7 @@ function extractZipToDisk(exportFileZipPath) {
|
|
403
475
|
});
|
404
476
|
});
|
405
477
|
}
|
478
|
+
|
406
479
|
function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
|
407
480
|
return new Promise((resolve, reject) => {
|
408
481
|
const filePaths = [];
|
@@ -413,6 +486,7 @@ function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
|
|
413
486
|
assets: {},
|
414
487
|
data: ""
|
415
488
|
};
|
489
|
+
|
416
490
|
_yauzl.default.open(pageDataZipFilePath, {
|
417
491
|
lazyEntries: true
|
418
492
|
}, function (err, zipFile) {
|
@@ -421,17 +495,20 @@ function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
|
|
421
495
|
reject(err);
|
422
496
|
return;
|
423
497
|
}
|
498
|
+
|
424
499
|
if (!zipFile) {
|
425
500
|
console.log("ERROR: Probably failed to extract zip: " + pageDataZipFilePath);
|
426
501
|
reject("Missing Zip File Resource.");
|
427
502
|
return;
|
428
503
|
}
|
504
|
+
|
429
505
|
console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);
|
430
506
|
zipFile.on("end", function (err) {
|
431
507
|
if (err) {
|
432
508
|
console.warn('ERROR: Failed on "END" for file: ', pageDataZipFilePath, err);
|
433
509
|
reject(err);
|
434
510
|
}
|
511
|
+
|
435
512
|
Promise.all(fileUploadPromises).then(res => {
|
436
513
|
res.forEach(r => {
|
437
514
|
console.info("Done uploading... ", r);
|
@@ -442,6 +519,7 @@ function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
|
|
442
519
|
zipFile.readEntry();
|
443
520
|
zipFile.on("entry", function (entry) {
|
444
521
|
console.info(`Processing entry: "${entry.fileName}"`);
|
522
|
+
|
445
523
|
if (/\/$/.test(entry.fileName)) {
|
446
524
|
// Directory file names end with '/'.
|
447
525
|
// Note that entries for directories themselves are optional.
|
@@ -455,26 +533,30 @@ function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
|
|
455
533
|
reject(err);
|
456
534
|
return;
|
457
535
|
}
|
536
|
+
|
458
537
|
if (!readStream) {
|
459
538
|
console.log("ERROR: Missing Read Stream while importing pages.");
|
460
539
|
reject("Missing Read Strea Resource.");
|
461
540
|
return;
|
462
541
|
}
|
542
|
+
|
463
543
|
readStream.on("end", function () {
|
464
544
|
filePaths.push(entry.fileName);
|
465
545
|
zipFile.readEntry();
|
466
546
|
});
|
467
|
-
const newKey = `${uniquePath}/${uniquePageKey}/${entry.fileName}`;
|
468
|
-
|
547
|
+
const newKey = `${uniquePath}/${uniquePageKey}/${entry.fileName}`; // Modify in place
|
548
|
+
|
469
549
|
dataMap = preparePageDataDirMap({
|
470
550
|
map: dataMap,
|
471
551
|
filePath: entry.fileName,
|
472
552
|
newKey
|
473
553
|
});
|
554
|
+
|
474
555
|
const {
|
475
556
|
streamPassThrough,
|
476
557
|
streamPassThroughUploadPromise: promise
|
477
558
|
} = _s3Stream.s3Stream.writeStream(newKey, FILE_CONTENT_TYPE);
|
559
|
+
|
478
560
|
streamPipeline(readStream, streamPassThrough).then(() => {
|
479
561
|
fileUploadPromises.push(promise);
|
480
562
|
}).catch(error => {
|