@webiny/api-page-builder-import-export 0.0.0-unstable.990c3ab1b6 → 0.0.0-unstable.aad28a72ae
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/client.d.ts +2 -2
- package/client.js +6 -2
- package/client.js.map +1 -1
- package/export/combine/blocksHandler.d.ts +6 -0
- package/export/combine/blocksHandler.js +106 -0
- package/export/combine/blocksHandler.js.map +1 -0
- package/{exportPages → export}/combine/index.d.ts +3 -2
- package/export/combine/index.js +30 -0
- package/export/combine/index.js.map +1 -0
- package/export/combine/pagesHandler.d.ts +6 -0
- package/export/combine/pagesHandler.js +106 -0
- package/export/combine/pagesHandler.js.map +1 -0
- package/export/process/blocksHandler.d.ts +6 -0
- package/export/process/blocksHandler.js +176 -0
- package/export/process/blocksHandler.js.map +1 -0
- package/{exportPages → export}/process/index.d.ts +4 -6
- package/export/process/index.js +27 -0
- package/export/process/index.js.map +1 -0
- package/export/process/pagesHandler.d.ts +6 -0
- package/export/process/pagesHandler.js +204 -0
- package/export/process/pagesHandler.js.map +1 -0
- package/{exportPages → export}/s3Stream.d.ts +0 -0
- package/{exportPages → export}/s3Stream.js +20 -1
- package/{exportPages → export}/s3Stream.js.map +1 -1
- package/export/utils.d.ts +16 -0
- package/export/utils.js +135 -0
- package/export/utils.js.map +1 -0
- package/{exportPages → export}/zipper.d.ts +6 -5
- package/{exportPages → export}/zipper.js +49 -36
- package/export/zipper.js.map +1 -0
- package/graphql/crud/blocks.crud.d.ts +4 -0
- package/graphql/crud/blocks.crud.js +155 -0
- package/graphql/crud/blocks.crud.js.map +1 -0
- package/graphql/crud/importExportTasks.crud.d.ts +5 -0
- package/graphql/crud/{pageImportExportTasks.crud.js → importExportTasks.crud.js} +105 -57
- package/graphql/crud/importExportTasks.crud.js.map +1 -0
- package/graphql/crud/pages.crud.d.ts +2 -2
- package/graphql/crud/pages.crud.js +53 -33
- package/graphql/crud/pages.crud.js.map +1 -1
- package/graphql/crud.d.ts +2 -2
- package/graphql/crud.js +9 -2
- package/graphql/crud.js.map +1 -1
- package/graphql/graphql/blocks.gql.d.ts +4 -0
- package/graphql/graphql/blocks.gql.js +57 -0
- package/graphql/graphql/blocks.gql.js.map +1 -0
- package/graphql/graphql/importExportTasks.gql.d.ts +4 -0
- package/graphql/graphql/{pageImportExportTasks.gql.js → importExportTasks.gql.js} +23 -18
- package/graphql/graphql/importExportTasks.gql.js.map +1 -0
- package/graphql/graphql/pages.gql.d.ts +2 -2
- package/graphql/graphql/pages.gql.js +8 -9
- package/graphql/graphql/pages.gql.js.map +1 -1
- package/graphql/graphql/utils/resolve.js +3 -0
- package/graphql/graphql/utils/resolve.js.map +1 -1
- package/graphql/graphql.js +8 -2
- package/graphql/graphql.js.map +1 -1
- package/graphql/index.d.ts +2 -2
- package/graphql/index.js +5 -0
- package/graphql/index.js.map +1 -1
- package/graphql/types.d.ts +43 -23
- package/graphql/types.js.map +1 -1
- package/import/create/blocksHandler.d.ts +3 -0
- package/import/create/blocksHandler.js +110 -0
- package/import/create/blocksHandler.js.map +1 -0
- package/{importPages → import}/create/index.d.ts +5 -4
- package/import/create/index.js +30 -0
- package/import/create/index.js.map +1 -0
- package/import/create/pagesHandler.d.ts +3 -0
- package/import/create/pagesHandler.js +110 -0
- package/import/create/pagesHandler.js.map +1 -0
- package/import/process/blocksHandler.d.ts +3 -0
- package/import/process/blocksHandler.js +175 -0
- package/import/process/blocksHandler.js.map +1 -0
- package/{importPages → import}/process/index.d.ts +4 -3
- package/import/process/index.js +27 -0
- package/import/process/index.js.map +1 -0
- package/import/process/pagesHandler.d.ts +3 -0
- package/import/process/pagesHandler.js +180 -0
- package/import/process/pagesHandler.js.map +1 -0
- package/{importPages → import}/utils.d.ts +19 -20
- package/{importPages → import}/utils.js +229 -76
- package/import/utils.js.map +1 -0
- package/mockSecurity.js +2 -0
- package/mockSecurity.js.map +1 -1
- package/package.json +23 -23
- package/types.d.ts +62 -65
- package/types.js +22 -17
- package/types.js.map +1 -1
- package/exportPages/combine/index.js +0 -106
- package/exportPages/combine/index.js.map +0 -1
- package/exportPages/process/index.js +0 -192
- package/exportPages/process/index.js.map +0 -1
- package/exportPages/utils.d.ts +0 -13
- package/exportPages/utils.js +0 -100
- package/exportPages/utils.js.map +0 -1
- package/exportPages/zipper.js.map +0 -1
- package/graphql/crud/pageImportExportTasks.crud.d.ts +0 -5
- package/graphql/crud/pageImportExportTasks.crud.js.map +0 -1
- package/graphql/graphql/pageImportExportTasks.gql.d.ts +0 -4
- package/graphql/graphql/pageImportExportTasks.gql.js.map +0 -1
- package/importPages/create/index.js +0 -107
- package/importPages/create/index.js.map +0 -1
- package/importPages/process/index.js +0 -180
- package/importPages/process/index.js.map +0 -1
- package/importPages/utils.js.map +0 -1
@@ -1,32 +1,53 @@
|
|
1
1
|
"use strict";
|
2
2
|
|
3
3
|
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
4
|
+
|
4
5
|
Object.defineProperty(exports, "__esModule", {
|
5
6
|
value: true
|
6
7
|
});
|
8
|
+
exports.importBlock = importBlock;
|
7
9
|
exports.importPage = importPage;
|
8
10
|
exports.initialStats = initialStats;
|
9
11
|
exports.readExtractAndUploadZipFileContents = readExtractAndUploadZipFileContents;
|
10
|
-
exports.
|
12
|
+
exports.uploadAssets = void 0;
|
13
|
+
|
11
14
|
var _uniqid = _interopRequireDefault(require("uniqid"));
|
15
|
+
|
12
16
|
var _dotPropImmutable = _interopRequireDefault(require("dot-prop-immutable"));
|
17
|
+
|
13
18
|
var _fs = require("fs");
|
19
|
+
|
14
20
|
var _fsExtra = require("fs-extra");
|
21
|
+
|
15
22
|
var _util = require("util");
|
23
|
+
|
16
24
|
var _stream = require("stream");
|
25
|
+
|
17
26
|
var _nodeFetch = _interopRequireDefault(require("node-fetch"));
|
27
|
+
|
18
28
|
var _path = _interopRequireDefault(require("path"));
|
29
|
+
|
19
30
|
var _yauzl = _interopRequireDefault(require("yauzl"));
|
31
|
+
|
20
32
|
var _chunk = _interopRequireDefault(require("lodash/chunk"));
|
33
|
+
|
21
34
|
var _loadJsonFile = _interopRequireDefault(require("load-json-file"));
|
35
|
+
|
22
36
|
var _error = _interopRequireDefault(require("@webiny/error"));
|
37
|
+
|
23
38
|
var _downloadInstallFiles = require("@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles");
|
39
|
+
|
24
40
|
var _types = require("../types");
|
25
|
-
|
41
|
+
|
42
|
+
var _s3Stream = require("../export/s3Stream");
|
43
|
+
|
26
44
|
const streamPipeline = (0, _util.promisify)(_stream.pipeline);
|
27
45
|
const INSTALL_DIR = "/tmp";
|
28
|
-
|
46
|
+
|
47
|
+
const INSTALL_EXTRACT_DIR = _path.default.join(INSTALL_DIR, "apiPageBuilderImport");
|
48
|
+
|
29
49
|
const FILES_COUNT_IN_EACH_BATCH = 15;
|
50
|
+
|
30
51
|
function updateImageInPageSettings(params) {
|
31
52
|
const {
|
32
53
|
settings,
|
@@ -35,17 +56,35 @@ function updateImageInPageSettings(params) {
|
|
35
56
|
} = params;
|
36
57
|
let newSettings = settings;
|
37
58
|
const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith("/") ? srcPrefix.slice(0, -1) : srcPrefix;
|
59
|
+
|
38
60
|
if (_dotPropImmutable.default.get(newSettings, "general.image.src")) {
|
39
61
|
var _settings$general, _settings$general$ima;
|
62
|
+
|
40
63
|
newSettings = _dotPropImmutable.default.set(newSettings, "general.image.src", `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(((_settings$general = settings.general) === null || _settings$general === void 0 ? void 0 : (_settings$general$ima = _settings$general.image) === null || _settings$general$ima === void 0 ? void 0 : _settings$general$ima.id) || "")}`);
|
41
64
|
}
|
65
|
+
|
42
66
|
if (_dotPropImmutable.default.get(newSettings, "social.image.src")) {
|
43
67
|
var _settings$social, _settings$social$imag;
|
68
|
+
|
44
69
|
newSettings = _dotPropImmutable.default.set(newSettings, "social.image.src", `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(((_settings$social = settings.social) === null || _settings$social === void 0 ? void 0 : (_settings$social$imag = _settings$social.image) === null || _settings$social$imag === void 0 ? void 0 : _settings$social$imag.id) || "")}`);
|
45
70
|
}
|
71
|
+
|
46
72
|
return newSettings;
|
47
73
|
}
|
48
|
-
|
74
|
+
|
75
|
+
function updateBlockPreviewImage(params) {
|
76
|
+
const {
|
77
|
+
file,
|
78
|
+
fileIdToKeyMap,
|
79
|
+
srcPrefix
|
80
|
+
} = params;
|
81
|
+
const newFile = file;
|
82
|
+
const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith("/") ? srcPrefix.slice(0, -1) : srcPrefix;
|
83
|
+
newFile.src = `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(file.id || "")}`;
|
84
|
+
return newFile;
|
85
|
+
}
|
86
|
+
|
87
|
+
function updateFilesInData({
|
49
88
|
data,
|
50
89
|
fileIdToKeyMap,
|
51
90
|
srcPrefix
|
@@ -53,29 +92,34 @@ function updateFilesInPageData({
|
|
53
92
|
// BASE CASE: Termination point
|
54
93
|
if (!data || typeof data !== "object") {
|
55
94
|
return;
|
56
|
-
}
|
57
|
-
|
95
|
+
} // Recursively call function if data is array
|
96
|
+
|
97
|
+
|
58
98
|
if (Array.isArray(data)) {
|
59
99
|
for (let i = 0; i < data.length; i++) {
|
60
100
|
const element = data[i];
|
61
|
-
|
101
|
+
updateFilesInData({
|
62
102
|
data: element,
|
63
103
|
fileIdToKeyMap,
|
64
104
|
srcPrefix
|
65
105
|
});
|
66
106
|
}
|
107
|
+
|
67
108
|
return;
|
68
|
-
}
|
69
|
-
|
109
|
+
} // Main logic
|
110
|
+
|
111
|
+
|
70
112
|
const tuple = Object.entries(data);
|
113
|
+
|
71
114
|
for (let i = 0; i < tuple.length; i++) {
|
72
115
|
const [key, value] = tuple[i];
|
116
|
+
|
73
117
|
if (key === "file" && value && fileIdToKeyMap.has(value.id)) {
|
74
118
|
value.key = fileIdToKeyMap.get(value.id);
|
75
119
|
value.name = fileIdToKeyMap.get(value.id);
|
76
120
|
value.src = `${srcPrefix}${srcPrefix.endsWith("/") ? "" : "/"}${fileIdToKeyMap.get(value.id)}`;
|
77
121
|
} else {
|
78
|
-
|
122
|
+
updateFilesInData({
|
79
123
|
data: value,
|
80
124
|
srcPrefix,
|
81
125
|
fileIdToKeyMap
|
@@ -83,50 +127,51 @@ function updateFilesInPageData({
|
|
83
127
|
}
|
84
128
|
}
|
85
129
|
}
|
86
|
-
|
130
|
+
|
131
|
+
const uploadAssets = async params => {
|
87
132
|
const {
|
88
133
|
context,
|
89
134
|
filesData,
|
90
135
|
fileUploadsData
|
91
|
-
} = params;
|
92
|
-
|
136
|
+
} = params; // Save uploaded file key against static id for later use.
|
137
|
+
|
93
138
|
const fileIdToKeyMap = new Map();
|
94
139
|
/**
|
95
140
|
* This function contains logic of file download from S3.
|
96
141
|
* Current we're not mocking zip file download from S3 in tests at the moment.
|
97
142
|
* So, we're manually mocking it in case of test just by returning an empty object.
|
98
143
|
*/
|
144
|
+
|
99
145
|
if (process.env.NODE_ENV === "test") {
|
100
146
|
return {
|
101
147
|
fileIdToKeyMap
|
102
148
|
};
|
103
|
-
}
|
104
|
-
|
149
|
+
} // Save files meta data against old key for later use.
|
150
|
+
|
151
|
+
|
152
|
+
const fileKeyToFileMap = new Map(); // Initialize maps.
|
105
153
|
|
106
|
-
// Save files meta data against old key for later use.
|
107
|
-
const fileKeyToFileMap = new Map();
|
108
|
-
// Initialize maps.
|
109
154
|
for (let i = 0; i < filesData.length; i++) {
|
110
155
|
const file = filesData[i];
|
111
|
-
fileKeyToFileMap.set(file.key, file);
|
156
|
+
fileKeyToFileMap.set(file.key, file); // Initialize the value
|
112
157
|
|
113
|
-
// Initialize the value
|
114
158
|
fileIdToKeyMap.set(file.id, file.type);
|
115
159
|
}
|
160
|
+
|
116
161
|
const fileUploadResults = await uploadFilesFromS3({
|
117
162
|
fileKeyToFileMap,
|
118
163
|
oldKeyToNewKeyMap: fileUploadsData.assets
|
119
|
-
});
|
164
|
+
}); // Create files in File Manager
|
120
165
|
|
121
|
-
// Create files in File Manager
|
122
166
|
const createFilesInput = fileUploadResults.map(uploadResult => {
|
123
167
|
const newKey = uploadResult.Key;
|
124
168
|
const file = fileKeyToFileMap.get(getOldFileKey(newKey));
|
169
|
+
|
125
170
|
if (!file) {
|
126
171
|
return null;
|
127
|
-
}
|
172
|
+
} // Update the file map with newly uploaded file.
|
173
|
+
|
128
174
|
|
129
|
-
// Update the file map with newly uploaded file.
|
130
175
|
fileIdToKeyMap.set(file.id, newKey);
|
131
176
|
return {
|
132
177
|
key: newKey,
|
@@ -137,9 +182,10 @@ const uploadPageAssets = async params => {
|
|
137
182
|
tags: file.tags
|
138
183
|
};
|
139
184
|
}).filter(Boolean);
|
140
|
-
const createFilesPromises = [];
|
141
|
-
|
185
|
+
const createFilesPromises = []; // Gives an array of chunks (each consists of FILES_COUNT_IN_EACH_BATCH items).
|
186
|
+
|
142
187
|
const createFilesInputChunks = (0, _chunk.default)(createFilesInput, FILES_COUNT_IN_EACH_BATCH);
|
188
|
+
|
143
189
|
for (let i = 0; i < createFilesInputChunks.length; i++) {
|
144
190
|
const createFilesInputChunk = createFilesInputChunks[i];
|
145
191
|
createFilesPromises.push(
|
@@ -149,44 +195,49 @@ const uploadPageAssets = async params => {
|
|
149
195
|
*/
|
150
196
|
context.fileManager.files.createFilesInBatch(createFilesInputChunk));
|
151
197
|
}
|
198
|
+
|
152
199
|
await Promise.all(createFilesPromises);
|
153
200
|
return {
|
154
201
|
fileIdToKeyMap
|
155
202
|
};
|
156
203
|
};
|
157
|
-
|
204
|
+
|
205
|
+
exports.uploadAssets = uploadAssets;
|
206
|
+
|
158
207
|
async function importPage({
|
159
208
|
pageKey,
|
160
209
|
context,
|
161
210
|
fileUploadsData
|
162
211
|
}) {
|
163
|
-
const log = console.log;
|
212
|
+
const log = console.log; // Making Directory for page in which we're going to extract the page data file.
|
164
213
|
|
165
|
-
// Making Directory for page in which we're going to extract the page data file.
|
166
214
|
const PAGE_EXTRACT_DIR = _path.default.join(INSTALL_EXTRACT_DIR, pageKey);
|
215
|
+
|
167
216
|
(0, _fsExtra.ensureDirSync)(PAGE_EXTRACT_DIR);
|
217
|
+
|
168
218
|
const pageDataFileKey = _dotPropImmutable.default.get(fileUploadsData, `data`);
|
219
|
+
|
169
220
|
const PAGE_DATA_FILE_PATH = _path.default.join(PAGE_EXTRACT_DIR, _path.default.basename(pageDataFileKey));
|
170
|
-
|
171
|
-
// Download and save page data file in disk.
|
221
|
+
|
222
|
+
log(`Downloading Page data file: ${pageDataFileKey} at "${PAGE_DATA_FILE_PATH}"`); // Download and save page data file in disk.
|
223
|
+
|
172
224
|
await new Promise((resolve, reject) => {
|
173
225
|
_s3Stream.s3Stream.readStream(pageDataFileKey).on("error", reject).pipe((0, _fs.createWriteStream)(PAGE_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
|
174
|
-
});
|
226
|
+
}); // Load the page data file from disk.
|
175
227
|
|
176
|
-
// Load the page data file from disk.
|
177
228
|
log(`Load file ${pageDataFileKey}`);
|
178
229
|
const {
|
179
230
|
page,
|
180
231
|
files
|
181
|
-
} = await (0, _loadJsonFile.default)(PAGE_DATA_FILE_PATH);
|
232
|
+
} = await (0, _loadJsonFile.default)(PAGE_DATA_FILE_PATH); // Only update page data if there are files.
|
182
233
|
|
183
|
-
// Only update page data if there are files.
|
184
234
|
if (files && Array.isArray(files) && files.length > 0) {
|
185
235
|
// Upload page assets.
|
186
236
|
const {
|
187
237
|
fileIdToKeyMap
|
188
|
-
} = await
|
238
|
+
} = await uploadAssets({
|
189
239
|
context,
|
240
|
+
|
190
241
|
/**
|
191
242
|
* TODO @ts-refactor @ashutosh figure out correct types.
|
192
243
|
*/
|
@@ -198,7 +249,7 @@ async function importPage({
|
|
198
249
|
const {
|
199
250
|
srcPrefix = ""
|
200
251
|
} = settings || {};
|
201
|
-
|
252
|
+
updateFilesInData({
|
202
253
|
data: page.content || {},
|
203
254
|
fileIdToKeyMap,
|
204
255
|
srcPrefix
|
@@ -209,40 +260,106 @@ async function importPage({
|
|
209
260
|
srcPrefix
|
210
261
|
});
|
211
262
|
}
|
263
|
+
|
212
264
|
log("Removing Directory for page...");
|
213
265
|
await (0, _downloadInstallFiles.deleteFile)(pageKey);
|
214
266
|
log(`Remove page contents from S3...`);
|
215
267
|
await deleteS3Folder(_path.default.dirname(fileUploadsData.data));
|
216
268
|
return page;
|
217
269
|
}
|
270
|
+
|
271
|
+
async function importBlock({
|
272
|
+
blockKey,
|
273
|
+
context,
|
274
|
+
fileUploadsData
|
275
|
+
}) {
|
276
|
+
const log = console.log; // Making Directory for block in which we're going to extract the block data file.
|
277
|
+
|
278
|
+
const BLOCK_EXTRACT_DIR = _path.default.join(INSTALL_EXTRACT_DIR, blockKey);
|
279
|
+
|
280
|
+
(0, _fsExtra.ensureDirSync)(BLOCK_EXTRACT_DIR);
|
281
|
+
|
282
|
+
const blockDataFileKey = _dotPropImmutable.default.get(fileUploadsData, `data`);
|
283
|
+
|
284
|
+
const BLOCK_DATA_FILE_PATH = _path.default.join(BLOCK_EXTRACT_DIR, _path.default.basename(blockDataFileKey));
|
285
|
+
|
286
|
+
log(`Downloading Block data file: ${blockDataFileKey} at "${BLOCK_DATA_FILE_PATH}"`); // Download and save block data file in disk.
|
287
|
+
|
288
|
+
await new Promise((resolve, reject) => {
|
289
|
+
_s3Stream.s3Stream.readStream(blockDataFileKey).on("error", reject).pipe((0, _fs.createWriteStream)(BLOCK_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
|
290
|
+
}); // Load the block data file from disk.
|
291
|
+
|
292
|
+
log(`Load file ${blockDataFileKey}`);
|
293
|
+
const {
|
294
|
+
block,
|
295
|
+
files
|
296
|
+
} = await (0, _loadJsonFile.default)(BLOCK_DATA_FILE_PATH); // Only update block data if there are files.
|
297
|
+
|
298
|
+
if (files && Array.isArray(files) && files.length > 0) {
|
299
|
+
// Upload block assets.
|
300
|
+
const {
|
301
|
+
fileIdToKeyMap
|
302
|
+
} = await uploadAssets({
|
303
|
+
context,
|
304
|
+
filesData: files,
|
305
|
+
fileUploadsData
|
306
|
+
});
|
307
|
+
const settings = await context.fileManager.settings.getSettings();
|
308
|
+
const {
|
309
|
+
srcPrefix = ""
|
310
|
+
} = settings || {};
|
311
|
+
updateFilesInData({
|
312
|
+
data: block.content || {},
|
313
|
+
fileIdToKeyMap,
|
314
|
+
srcPrefix
|
315
|
+
});
|
316
|
+
block.preview = updateBlockPreviewImage({
|
317
|
+
file: block.preview || {},
|
318
|
+
fileIdToKeyMap,
|
319
|
+
srcPrefix
|
320
|
+
});
|
321
|
+
}
|
322
|
+
|
323
|
+
log("Removing Directory for block...");
|
324
|
+
await (0, _downloadInstallFiles.deleteFile)(blockKey);
|
325
|
+
log(`Remove block contents from S3...`);
|
326
|
+
await deleteS3Folder(_path.default.dirname(fileUploadsData.data));
|
327
|
+
return block;
|
328
|
+
}
|
329
|
+
|
218
330
|
async function uploadFilesFromS3({
|
219
331
|
fileKeyToFileMap,
|
220
332
|
oldKeyToNewKeyMap
|
221
333
|
}) {
|
222
334
|
const oldKeysForAssets = Object.keys(oldKeyToNewKeyMap);
|
223
|
-
const promises = [];
|
224
|
-
|
335
|
+
const promises = []; // Upload all assets.
|
336
|
+
|
225
337
|
for (let i = 0; i < oldKeysForAssets.length; i++) {
|
226
338
|
const oldKey = oldKeysForAssets[i];
|
227
|
-
const tempNewKey = oldKeyToNewKeyMap[oldKey];
|
339
|
+
const tempNewKey = oldKeyToNewKeyMap[oldKey]; // Read file.
|
340
|
+
|
341
|
+
const readStream = _s3Stream.s3Stream.readStream(tempNewKey); // Get file meta data.
|
342
|
+
|
228
343
|
|
229
|
-
// Read file.
|
230
|
-
const readStream = _s3Stream.s3Stream.readStream(tempNewKey);
|
231
|
-
// Get file meta data.
|
232
344
|
const fileMetaData = fileKeyToFileMap.get(oldKey);
|
345
|
+
|
233
346
|
if (fileMetaData) {
|
234
347
|
const newKey = (0, _uniqid.default)("", `-${fileMetaData.key}`);
|
348
|
+
|
235
349
|
const {
|
236
350
|
streamPassThrough,
|
237
351
|
streamPassThroughUploadPromise: promise
|
238
352
|
} = _s3Stream.s3Stream.writeStream(newKey, fileMetaData.type);
|
353
|
+
|
239
354
|
readStream.pipe(streamPassThrough);
|
240
355
|
promises.push(promise);
|
241
356
|
console.log(`Successfully queued file "${newKey}"`);
|
242
357
|
}
|
243
358
|
}
|
359
|
+
|
244
360
|
return Promise.all(promises);
|
245
361
|
}
|
362
|
+
|
246
363
|
function getOldFileKey(key) {
|
247
364
|
/*
|
248
365
|
* Because we know the naming convention, we can extract the old key from new key.
|
@@ -254,99 +371,118 @@ function getOldFileKey(key) {
|
|
254
371
|
return key;
|
255
372
|
}
|
256
373
|
}
|
374
|
+
|
257
375
|
const FILE_CONTENT_TYPE = "application/octet-stream";
|
376
|
+
|
258
377
|
function getFileNameWithoutExt(fileName) {
|
259
378
|
return _path.default.basename(fileName).replace(_path.default.extname(fileName), "");
|
260
379
|
}
|
380
|
+
|
261
381
|
/**
|
262
382
|
* Function will read the given zip file from S3 via stream, extract its content and upload it to S3 bucket.
|
263
383
|
* @param zipFileUrl
|
264
|
-
* @return
|
384
|
+
* @return ImportData S3 file keys for all uploaded assets group by page/block.
|
265
385
|
*/
|
266
386
|
async function readExtractAndUploadZipFileContents(zipFileUrl) {
|
267
387
|
const log = console.log;
|
268
|
-
const
|
388
|
+
const importDataList = [];
|
389
|
+
|
269
390
|
const zipFileName = _path.default.basename(zipFileUrl).split("?")[0];
|
391
|
+
|
270
392
|
const response = await (0, _nodeFetch.default)(zipFileUrl);
|
393
|
+
|
271
394
|
if (!response.ok) {
|
272
395
|
throw new _error.default(`Unable to downloading file: "${zipFileUrl}"`, response.statusText);
|
273
396
|
}
|
397
|
+
|
274
398
|
const readStream = response.body;
|
275
|
-
const uniquePath = (0, _uniqid.default)("
|
276
|
-
|
399
|
+
const uniquePath = (0, _uniqid.default)("IMPORTS/"); // Read export file and download it in the disk
|
400
|
+
|
277
401
|
const ZIP_FILE_PATH = _path.default.join(INSTALL_DIR, zipFileName);
|
402
|
+
|
278
403
|
const writeStream = (0, _fs.createWriteStream)(ZIP_FILE_PATH);
|
279
404
|
await streamPipeline(readStream, writeStream);
|
280
|
-
log(`Downloaded file "${zipFileName}" at ${ZIP_FILE_PATH}`);
|
405
|
+
log(`Downloaded file "${zipFileName}" at ${ZIP_FILE_PATH}`); // Extract the downloaded zip file
|
281
406
|
|
282
|
-
// Extract the downloaded zip file
|
283
407
|
const zipFilePaths = await extractZipToDisk(ZIP_FILE_PATH);
|
284
408
|
log(`Removing ZIP file "${zipFileUrl}" from ${ZIP_FILE_PATH}`);
|
285
|
-
await (0, _downloadInstallFiles.deleteFile)(ZIP_FILE_PATH);
|
409
|
+
await (0, _downloadInstallFiles.deleteFile)(ZIP_FILE_PATH); // Extract each page/block zip and upload their content's to S3
|
286
410
|
|
287
|
-
// Extract each page zip and upload their content's to S3
|
288
411
|
for (let i = 0; i < zipFilePaths.length; i++) {
|
289
412
|
const currentPath = zipFilePaths[i];
|
290
413
|
const dataMap = await extractZipAndUploadToS3(currentPath, uniquePath);
|
291
|
-
|
414
|
+
importDataList.push(dataMap);
|
292
415
|
}
|
416
|
+
|
293
417
|
log("Removing all ZIP files located at ", _path.default.dirname(zipFilePaths[0]));
|
294
418
|
await (0, _downloadInstallFiles.deleteFile)(_path.default.dirname(zipFilePaths[0]));
|
295
|
-
return
|
419
|
+
return importDataList;
|
296
420
|
}
|
421
|
+
|
297
422
|
const ASSETS_DIR_NAME = "/assets";
|
298
|
-
|
423
|
+
|
424
|
+
function prepareDataDirMap({
|
299
425
|
map,
|
300
426
|
filePath,
|
301
427
|
newKey
|
302
428
|
}) {
|
303
429
|
const dirname = _path.default.dirname(filePath);
|
430
|
+
|
304
431
|
const fileName = _path.default.basename(filePath);
|
305
432
|
/*
|
306
433
|
* We want to use dot (.) as part of object key rather than creating nested object(s).
|
307
434
|
* Also, the file name might contain dots in it beside the extension, so, we are escaping them all.
|
308
435
|
*/
|
436
|
+
|
437
|
+
|
309
438
|
const oldKey = fileName.replace(/\./g, "\\.");
|
310
439
|
const isAsset = dirname.endsWith(ASSETS_DIR_NAME);
|
440
|
+
|
311
441
|
if (isAsset) {
|
312
442
|
map = _dotPropImmutable.default.set(map, `assets.${oldKey}`, newKey);
|
313
443
|
} else {
|
314
444
|
// We only need to know the newKey for data file.
|
315
445
|
map = _dotPropImmutable.default.set(map, `data`, newKey);
|
316
446
|
}
|
447
|
+
|
317
448
|
return map;
|
318
449
|
}
|
450
|
+
|
319
451
|
async function deleteS3Folder(key) {
|
320
452
|
// Append trailing slash i.e "/" to key to make sure we only delete a specific folder.
|
321
453
|
if (!key.endsWith("/")) {
|
322
454
|
key = `${key}/`;
|
323
455
|
}
|
456
|
+
|
324
457
|
const response = await _s3Stream.s3Stream.listObject(key);
|
325
458
|
const keys = (response.Contents || []).map(c => c.Key).filter(Boolean);
|
326
459
|
console.log(`Found ${keys.length} files.`);
|
327
460
|
const deleteFilePromises = keys.map(key => _s3Stream.s3Stream.deleteObject(key));
|
328
461
|
await Promise.all(deleteFilePromises);
|
329
462
|
console.log(`Successfully deleted ${deleteFilePromises.length} files.`);
|
330
|
-
}
|
463
|
+
} // export const zeroPad = version => `${version}`.padStart(5, "0");
|
331
464
|
|
332
|
-
// export const zeroPad = version => `${version}`.padStart(5, "0");
|
333
465
|
|
334
466
|
function initialStats(total) {
|
335
467
|
return {
|
336
|
-
[_types.
|
337
|
-
[_types.
|
338
|
-
[_types.
|
339
|
-
[_types.
|
468
|
+
[_types.ImportExportTaskStatus.PENDING]: total,
|
469
|
+
[_types.ImportExportTaskStatus.PROCESSING]: 0,
|
470
|
+
[_types.ImportExportTaskStatus.COMPLETED]: 0,
|
471
|
+
[_types.ImportExportTaskStatus.FAILED]: 0,
|
340
472
|
total
|
341
473
|
};
|
342
474
|
}
|
475
|
+
|
343
476
|
function extractZipToDisk(exportFileZipPath) {
|
344
477
|
return new Promise((resolve, reject) => {
|
345
|
-
const
|
478
|
+
const zipFilePaths = [];
|
346
479
|
const uniqueFolderNameForExport = getFileNameWithoutExt(exportFileZipPath);
|
347
|
-
|
348
|
-
// Make sure DIR exists
|
480
|
+
|
481
|
+
const EXPORT_FILE_EXTRACTION_PATH = _path.default.join(INSTALL_DIR, uniqueFolderNameForExport); // Make sure DIR exists
|
482
|
+
|
483
|
+
|
349
484
|
(0, _fsExtra.ensureDirSync)(EXPORT_FILE_EXTRACTION_PATH);
|
485
|
+
|
350
486
|
_yauzl.default.open(exportFileZipPath, {
|
351
487
|
lazyEntries: true
|
352
488
|
}, function (err, zipFile) {
|
@@ -355,22 +491,26 @@ function extractZipToDisk(exportFileZipPath) {
|
|
355
491
|
reject(err);
|
356
492
|
return;
|
357
493
|
}
|
494
|
+
|
358
495
|
if (!zipFile) {
|
359
496
|
console.log("ERROR: Missing zip file resource for path: " + exportFileZipPath);
|
360
497
|
reject("Missing Zip File Resource.");
|
361
498
|
return;
|
362
499
|
}
|
500
|
+
|
363
501
|
console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);
|
364
502
|
zipFile.on("end", function (err) {
|
365
503
|
if (err) {
|
366
504
|
console.warn("ERROR: Failed on END event for file: ", exportFileZipPath, err);
|
367
505
|
reject(err);
|
368
506
|
}
|
369
|
-
|
507
|
+
|
508
|
+
resolve(zipFilePaths);
|
370
509
|
});
|
371
510
|
zipFile.readEntry();
|
372
511
|
zipFile.on("entry", function (entry) {
|
373
512
|
console.info(`Processing entry: "${entry.fileName}"`);
|
513
|
+
|
374
514
|
if (/\/$/.test(entry.fileName)) {
|
375
515
|
// Directory file names end with '/'.
|
376
516
|
// Note that entries for directories themselves are optional.
|
@@ -384,14 +524,17 @@ function extractZipToDisk(exportFileZipPath) {
|
|
384
524
|
reject(err);
|
385
525
|
return;
|
386
526
|
}
|
527
|
+
|
387
528
|
if (!readStream) {
|
388
529
|
console.log("ERROR: Missing Read Stream Resource when extracting to disk.");
|
389
530
|
reject("Missing Read Stream Resource.");
|
390
531
|
return;
|
391
532
|
}
|
533
|
+
|
392
534
|
const filePath = _path.default.join(EXPORT_FILE_EXTRACTION_PATH, entry.fileName);
|
535
|
+
|
393
536
|
readStream.on("end", function () {
|
394
|
-
|
537
|
+
zipFilePaths.push(filePath);
|
395
538
|
zipFile.readEntry();
|
396
539
|
});
|
397
540
|
streamPipeline(readStream, (0, _fs.createWriteStream)(filePath)).catch(error => {
|
@@ -403,35 +546,40 @@ function extractZipToDisk(exportFileZipPath) {
|
|
403
546
|
});
|
404
547
|
});
|
405
548
|
}
|
406
|
-
|
549
|
+
|
550
|
+
function extractZipAndUploadToS3(dataZipFilePath, uniquePath) {
|
407
551
|
return new Promise((resolve, reject) => {
|
408
552
|
const filePaths = [];
|
409
553
|
const fileUploadPromises = [];
|
410
|
-
const
|
554
|
+
const uniqueKey = getFileNameWithoutExt(dataZipFilePath);
|
411
555
|
let dataMap = {
|
412
|
-
key:
|
556
|
+
key: uniqueKey,
|
413
557
|
assets: {},
|
414
558
|
data: ""
|
415
559
|
};
|
416
|
-
|
560
|
+
|
561
|
+
_yauzl.default.open(dataZipFilePath, {
|
417
562
|
lazyEntries: true
|
418
563
|
}, function (err, zipFile) {
|
419
564
|
if (err) {
|
420
|
-
console.warn("ERROR: Failed to extract zip: ",
|
565
|
+
console.warn("ERROR: Failed to extract zip: ", dataZipFilePath, err);
|
421
566
|
reject(err);
|
422
567
|
return;
|
423
568
|
}
|
569
|
+
|
424
570
|
if (!zipFile) {
|
425
|
-
console.log("ERROR: Probably failed to extract zip: " +
|
571
|
+
console.log("ERROR: Probably failed to extract zip: " + dataZipFilePath);
|
426
572
|
reject("Missing Zip File Resource.");
|
427
573
|
return;
|
428
574
|
}
|
575
|
+
|
429
576
|
console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);
|
430
577
|
zipFile.on("end", function (err) {
|
431
578
|
if (err) {
|
432
|
-
console.warn('ERROR: Failed on "END" for file: ',
|
579
|
+
console.warn('ERROR: Failed on "END" for file: ', dataZipFilePath, err);
|
433
580
|
reject(err);
|
434
581
|
}
|
582
|
+
|
435
583
|
Promise.all(fileUploadPromises).then(res => {
|
436
584
|
res.forEach(r => {
|
437
585
|
console.info("Done uploading... ", r);
|
@@ -442,6 +590,7 @@ function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
|
|
442
590
|
zipFile.readEntry();
|
443
591
|
zipFile.on("entry", function (entry) {
|
444
592
|
console.info(`Processing entry: "${entry.fileName}"`);
|
593
|
+
|
445
594
|
if (/\/$/.test(entry.fileName)) {
|
446
595
|
// Directory file names end with '/'.
|
447
596
|
// Note that entries for directories themselves are optional.
|
@@ -455,26 +604,30 @@ function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
|
|
455
604
|
reject(err);
|
456
605
|
return;
|
457
606
|
}
|
607
|
+
|
458
608
|
if (!readStream) {
|
459
|
-
console.log("ERROR: Missing Read Stream while importing
|
609
|
+
console.log("ERROR: Missing Read Stream while importing.");
|
460
610
|
reject("Missing Read Strea Resource.");
|
461
611
|
return;
|
462
612
|
}
|
613
|
+
|
463
614
|
readStream.on("end", function () {
|
464
615
|
filePaths.push(entry.fileName);
|
465
616
|
zipFile.readEntry();
|
466
617
|
});
|
467
|
-
const newKey = `${uniquePath}/${
|
468
|
-
|
469
|
-
dataMap =
|
618
|
+
const newKey = `${uniquePath}/${uniqueKey}/${entry.fileName}`; // Modify in place
|
619
|
+
|
620
|
+
dataMap = prepareDataDirMap({
|
470
621
|
map: dataMap,
|
471
622
|
filePath: entry.fileName,
|
472
623
|
newKey
|
473
624
|
});
|
625
|
+
|
474
626
|
const {
|
475
627
|
streamPassThrough,
|
476
628
|
streamPassThroughUploadPromise: promise
|
477
629
|
} = _s3Stream.s3Stream.writeStream(newKey, FILE_CONTENT_TYPE);
|
630
|
+
|
478
631
|
streamPipeline(readStream, streamPassThrough).then(() => {
|
479
632
|
fileUploadPromises.push(promise);
|
480
633
|
}).catch(error => {
|