@webiny/api-page-builder-import-export 0.0.0-unstable.d16f688daf → 0.0.0-unstable.d65ec29d44
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/client.js +2 -6
- package/client.js.map +1 -1
- package/export/combine/blocksHandler.js +11 -18
- package/export/combine/blocksHandler.js.map +1 -1
- package/export/combine/index.js +14 -9
- package/export/combine/index.js.map +1 -1
- package/export/combine/pagesHandler.js +11 -18
- package/export/combine/pagesHandler.js.map +1 -1
- package/export/combine/templatesHandler.d.ts +6 -0
- package/export/combine/templatesHandler.js +99 -0
- package/export/combine/templatesHandler.js.map +1 -0
- package/export/process/blocksHandler.js +18 -32
- package/export/process/blocksHandler.js.map +1 -1
- package/export/process/index.js +14 -9
- package/export/process/index.js.map +1 -1
- package/export/process/pagesHandler.js +19 -34
- package/export/process/pagesHandler.js.map +1 -1
- package/export/process/templatesHandler.d.ts +6 -0
- package/export/process/templatesHandler.js +166 -0
- package/export/process/templatesHandler.js.map +1 -0
- package/export/s3Stream.js +1 -20
- package/export/s3Stream.js.map +1 -1
- package/export/utils.d.ts +7 -1
- package/export/utils.js +57 -32
- package/export/utils.js.map +1 -1
- package/export/zipper.js +29 -41
- package/export/zipper.js.map +1 -1
- package/graphql/crud/blocks.crud.js +19 -37
- package/graphql/crud/blocks.crud.js.map +1 -1
- package/graphql/crud/importExportTasks.crud.js +9 -57
- package/graphql/crud/importExportTasks.crud.js.map +1 -1
- package/graphql/crud/pages.crud.js +25 -41
- package/graphql/crud/pages.crud.js.map +1 -1
- package/graphql/crud/templates.crud.d.ts +4 -0
- package/graphql/crud/templates.crud.js +124 -0
- package/graphql/crud/templates.crud.js.map +1 -0
- package/graphql/crud.js +2 -7
- package/graphql/crud.js.map +1 -1
- package/graphql/graphql/blocks.gql.js +1 -6
- package/graphql/graphql/blocks.gql.js.map +1 -1
- package/graphql/graphql/importExportTasks.gql.js +1 -6
- package/graphql/graphql/importExportTasks.gql.js.map +1 -1
- package/graphql/graphql/pages.gql.js +2 -7
- package/graphql/graphql/pages.gql.js.map +1 -1
- package/graphql/graphql/templates.gql.d.ts +4 -0
- package/graphql/graphql/templates.gql.js +52 -0
- package/graphql/graphql/templates.gql.js.map +1 -0
- package/graphql/graphql/utils/resolve.d.ts +1 -1
- package/graphql/graphql/utils/resolve.js +0 -3
- package/graphql/graphql/utils/resolve.js.map +1 -1
- package/graphql/graphql.js +2 -6
- package/graphql/graphql.js.map +1 -1
- package/graphql/index.js +0 -5
- package/graphql/index.js.map +1 -1
- package/graphql/types.d.ts +17 -0
- package/graphql/types.js.map +1 -1
- package/import/create/blocksHandler.js +9 -19
- package/import/create/blocksHandler.js.map +1 -1
- package/import/create/index.d.ts +2 -1
- package/import/create/index.js +14 -9
- package/import/create/index.js.map +1 -1
- package/import/create/pagesHandler.js +12 -20
- package/import/create/pagesHandler.js.map +1 -1
- package/import/create/templatesHandler.d.ts +3 -0
- package/import/create/templatesHandler.js +98 -0
- package/import/create/templatesHandler.js.map +1 -0
- package/import/process/blocksHandler.js +20 -26
- package/import/process/blocksHandler.js.map +1 -1
- package/import/process/index.d.ts +1 -0
- package/import/process/index.js +14 -9
- package/import/process/index.js.map +1 -1
- package/import/process/pagesHandler.js +26 -29
- package/import/process/pagesHandler.js.map +1 -1
- package/import/process/templatesHandler.d.ts +3 -0
- package/import/process/templatesHandler.js +169 -0
- package/import/process/templatesHandler.js.map +1 -0
- package/import/utils.d.ts +8 -1
- package/import/utils.js +103 -137
- package/import/utils.js.map +1 -1
- package/mockSecurity.js +0 -2
- package/mockSecurity.js.map +1 -1
- package/package.json +24 -24
- package/types.js +0 -5
- package/types.js.map +1 -1
package/import/utils.js
CHANGED
@@ -1,53 +1,34 @@
|
|
1
1
|
"use strict";
|
2
2
|
|
3
3
|
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
4
|
-
|
5
4
|
Object.defineProperty(exports, "__esModule", {
|
6
5
|
value: true
|
7
6
|
});
|
8
7
|
exports.importBlock = importBlock;
|
9
8
|
exports.importPage = importPage;
|
9
|
+
exports.importTemplate = importTemplate;
|
10
10
|
exports.initialStats = initialStats;
|
11
11
|
exports.readExtractAndUploadZipFileContents = readExtractAndUploadZipFileContents;
|
12
12
|
exports.uploadAssets = void 0;
|
13
|
-
|
14
13
|
var _uniqid = _interopRequireDefault(require("uniqid"));
|
15
|
-
|
16
14
|
var _dotPropImmutable = _interopRequireDefault(require("dot-prop-immutable"));
|
17
|
-
|
18
15
|
var _fs = require("fs");
|
19
|
-
|
20
16
|
var _fsExtra = require("fs-extra");
|
21
|
-
|
22
17
|
var _util = require("util");
|
23
|
-
|
24
18
|
var _stream = require("stream");
|
25
|
-
|
26
19
|
var _nodeFetch = _interopRequireDefault(require("node-fetch"));
|
27
|
-
|
28
20
|
var _path = _interopRequireDefault(require("path"));
|
29
|
-
|
30
21
|
var _yauzl = _interopRequireDefault(require("yauzl"));
|
31
|
-
|
32
22
|
var _chunk = _interopRequireDefault(require("lodash/chunk"));
|
33
|
-
|
34
23
|
var _loadJsonFile = _interopRequireDefault(require("load-json-file"));
|
35
|
-
|
36
24
|
var _error = _interopRequireDefault(require("@webiny/error"));
|
37
|
-
|
38
25
|
var _downloadInstallFiles = require("@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles");
|
39
|
-
|
40
26
|
var _types = require("../types");
|
41
|
-
|
42
27
|
var _s3Stream = require("../export/s3Stream");
|
43
|
-
|
44
28
|
const streamPipeline = (0, _util.promisify)(_stream.pipeline);
|
45
29
|
const INSTALL_DIR = "/tmp";
|
46
|
-
|
47
30
|
const INSTALL_EXTRACT_DIR = _path.default.join(INSTALL_DIR, "apiPageBuilderImport");
|
48
|
-
|
49
31
|
const FILES_COUNT_IN_EACH_BATCH = 15;
|
50
|
-
|
51
32
|
function updateImageInPageSettings(params) {
|
52
33
|
const {
|
53
34
|
settings,
|
@@ -56,22 +37,16 @@ function updateImageInPageSettings(params) {
|
|
56
37
|
} = params;
|
57
38
|
let newSettings = settings;
|
58
39
|
const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith("/") ? srcPrefix.slice(0, -1) : srcPrefix;
|
59
|
-
|
60
40
|
if (_dotPropImmutable.default.get(newSettings, "general.image.src")) {
|
61
41
|
var _settings$general, _settings$general$ima;
|
62
|
-
|
63
42
|
newSettings = _dotPropImmutable.default.set(newSettings, "general.image.src", `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(((_settings$general = settings.general) === null || _settings$general === void 0 ? void 0 : (_settings$general$ima = _settings$general.image) === null || _settings$general$ima === void 0 ? void 0 : _settings$general$ima.id) || "")}`);
|
64
43
|
}
|
65
|
-
|
66
44
|
if (_dotPropImmutable.default.get(newSettings, "social.image.src")) {
|
67
45
|
var _settings$social, _settings$social$imag;
|
68
|
-
|
69
46
|
newSettings = _dotPropImmutable.default.set(newSettings, "social.image.src", `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(((_settings$social = settings.social) === null || _settings$social === void 0 ? void 0 : (_settings$social$imag = _settings$social.image) === null || _settings$social$imag === void 0 ? void 0 : _settings$social$imag.id) || "")}`);
|
70
47
|
}
|
71
|
-
|
72
48
|
return newSettings;
|
73
49
|
}
|
74
|
-
|
75
50
|
function updateBlockPreviewImage(params) {
|
76
51
|
const {
|
77
52
|
file,
|
@@ -83,7 +58,6 @@ function updateBlockPreviewImage(params) {
|
|
83
58
|
newFile.src = `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(file.id || "")}`;
|
84
59
|
return newFile;
|
85
60
|
}
|
86
|
-
|
87
61
|
function updateFilesInData({
|
88
62
|
data,
|
89
63
|
fileIdToKeyMap,
|
@@ -92,9 +66,8 @@ function updateFilesInData({
|
|
92
66
|
// BASE CASE: Termination point
|
93
67
|
if (!data || typeof data !== "object") {
|
94
68
|
return;
|
95
|
-
}
|
96
|
-
|
97
|
-
|
69
|
+
}
|
70
|
+
// Recursively call function if data is array
|
98
71
|
if (Array.isArray(data)) {
|
99
72
|
for (let i = 0; i < data.length; i++) {
|
100
73
|
const element = data[i];
|
@@ -104,16 +77,12 @@ function updateFilesInData({
|
|
104
77
|
srcPrefix
|
105
78
|
});
|
106
79
|
}
|
107
|
-
|
108
80
|
return;
|
109
|
-
}
|
110
|
-
|
111
|
-
|
81
|
+
}
|
82
|
+
// Main logic
|
112
83
|
const tuple = Object.entries(data);
|
113
|
-
|
114
84
|
for (let i = 0; i < tuple.length; i++) {
|
115
85
|
const [key, value] = tuple[i];
|
116
|
-
|
117
86
|
if (key === "file" && value && fileIdToKeyMap.has(value.id)) {
|
118
87
|
value.key = fileIdToKeyMap.get(value.id);
|
119
88
|
value.name = fileIdToKeyMap.get(value.id);
|
@@ -127,51 +96,49 @@ function updateFilesInData({
|
|
127
96
|
}
|
128
97
|
}
|
129
98
|
}
|
130
|
-
|
131
99
|
const uploadAssets = async params => {
|
132
100
|
const {
|
133
101
|
context,
|
134
102
|
filesData,
|
135
103
|
fileUploadsData
|
136
|
-
} = params;
|
137
|
-
|
104
|
+
} = params;
|
105
|
+
// Save uploaded file key against static id for later use.
|
138
106
|
const fileIdToKeyMap = new Map();
|
139
107
|
/**
|
140
108
|
* This function contains logic of file download from S3.
|
141
109
|
* Current we're not mocking zip file download from S3 in tests at the moment.
|
142
110
|
* So, we're manually mocking it in case of test just by returning an empty object.
|
143
111
|
*/
|
144
|
-
|
145
112
|
if (process.env.NODE_ENV === "test") {
|
146
113
|
return {
|
147
114
|
fileIdToKeyMap
|
148
115
|
};
|
149
|
-
}
|
150
|
-
|
151
|
-
|
152
|
-
const fileKeyToFileMap = new Map(); // Initialize maps.
|
116
|
+
}
|
153
117
|
|
118
|
+
// Save files meta data against old key for later use.
|
119
|
+
const fileKeyToFileMap = new Map();
|
120
|
+
// Initialize maps.
|
154
121
|
for (let i = 0; i < filesData.length; i++) {
|
155
122
|
const file = filesData[i];
|
156
|
-
fileKeyToFileMap.set(file.key, file);
|
123
|
+
fileKeyToFileMap.set(file.key, file);
|
157
124
|
|
125
|
+
// Initialize the value
|
158
126
|
fileIdToKeyMap.set(file.id, file.type);
|
159
127
|
}
|
160
|
-
|
161
128
|
const fileUploadResults = await uploadFilesFromS3({
|
162
129
|
fileKeyToFileMap,
|
163
130
|
oldKeyToNewKeyMap: fileUploadsData.assets
|
164
|
-
});
|
131
|
+
});
|
165
132
|
|
133
|
+
// Create files in File Manager
|
166
134
|
const createFilesInput = fileUploadResults.map(uploadResult => {
|
167
135
|
const newKey = uploadResult.Key;
|
168
136
|
const file = fileKeyToFileMap.get(getOldFileKey(newKey));
|
169
|
-
|
170
137
|
if (!file) {
|
171
138
|
return null;
|
172
|
-
}
|
173
|
-
|
139
|
+
}
|
174
140
|
|
141
|
+
// Update the file map with newly uploaded file.
|
175
142
|
fileIdToKeyMap.set(file.id, newKey);
|
176
143
|
return {
|
177
144
|
key: newKey,
|
@@ -182,10 +149,9 @@ const uploadAssets = async params => {
|
|
182
149
|
tags: file.tags
|
183
150
|
};
|
184
151
|
}).filter(Boolean);
|
185
|
-
const createFilesPromises = [];
|
186
|
-
|
152
|
+
const createFilesPromises = [];
|
153
|
+
// Gives an array of chunks (each consists of FILES_COUNT_IN_EACH_BATCH items).
|
187
154
|
const createFilesInputChunks = (0, _chunk.default)(createFilesInput, FILES_COUNT_IN_EACH_BATCH);
|
188
|
-
|
189
155
|
for (let i = 0; i < createFilesInputChunks.length; i++) {
|
190
156
|
const createFilesInputChunk = createFilesInputChunks[i];
|
191
157
|
createFilesPromises.push(
|
@@ -195,49 +161,44 @@ const uploadAssets = async params => {
|
|
195
161
|
*/
|
196
162
|
context.fileManager.files.createFilesInBatch(createFilesInputChunk));
|
197
163
|
}
|
198
|
-
|
199
164
|
await Promise.all(createFilesPromises);
|
200
165
|
return {
|
201
166
|
fileIdToKeyMap
|
202
167
|
};
|
203
168
|
};
|
204
|
-
|
205
169
|
exports.uploadAssets = uploadAssets;
|
206
|
-
|
207
170
|
async function importPage({
|
208
171
|
pageKey,
|
209
172
|
context,
|
210
173
|
fileUploadsData
|
211
174
|
}) {
|
212
|
-
const log = console.log;
|
175
|
+
const log = console.log;
|
213
176
|
|
177
|
+
// Making Directory for page in which we're going to extract the page data file.
|
214
178
|
const PAGE_EXTRACT_DIR = _path.default.join(INSTALL_EXTRACT_DIR, pageKey);
|
215
|
-
|
216
179
|
(0, _fsExtra.ensureDirSync)(PAGE_EXTRACT_DIR);
|
217
|
-
|
218
180
|
const pageDataFileKey = _dotPropImmutable.default.get(fileUploadsData, `data`);
|
219
|
-
|
220
181
|
const PAGE_DATA_FILE_PATH = _path.default.join(PAGE_EXTRACT_DIR, _path.default.basename(pageDataFileKey));
|
221
|
-
|
222
|
-
|
223
|
-
|
182
|
+
log(`Downloading Page data file: ${pageDataFileKey} at "${PAGE_DATA_FILE_PATH}"`);
|
183
|
+
// Download and save page data file in disk.
|
224
184
|
await new Promise((resolve, reject) => {
|
225
185
|
_s3Stream.s3Stream.readStream(pageDataFileKey).on("error", reject).pipe((0, _fs.createWriteStream)(PAGE_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
|
226
|
-
});
|
186
|
+
});
|
227
187
|
|
188
|
+
// Load the page data file from disk.
|
228
189
|
log(`Load file ${pageDataFileKey}`);
|
229
190
|
const {
|
230
191
|
page,
|
231
192
|
files
|
232
|
-
} = await (0, _loadJsonFile.default)(PAGE_DATA_FILE_PATH);
|
193
|
+
} = await (0, _loadJsonFile.default)(PAGE_DATA_FILE_PATH);
|
233
194
|
|
195
|
+
// Only update page data if there are files.
|
234
196
|
if (files && Array.isArray(files) && files.length > 0) {
|
235
197
|
// Upload page assets.
|
236
198
|
const {
|
237
199
|
fileIdToKeyMap
|
238
200
|
} = await uploadAssets({
|
239
201
|
context,
|
240
|
-
|
241
202
|
/**
|
242
203
|
* TODO @ts-refactor @ashutosh figure out correct types.
|
243
204
|
*/
|
@@ -260,41 +221,38 @@ async function importPage({
|
|
260
221
|
srcPrefix
|
261
222
|
});
|
262
223
|
}
|
263
|
-
|
264
224
|
log("Removing Directory for page...");
|
265
225
|
await (0, _downloadInstallFiles.deleteFile)(pageKey);
|
266
226
|
log(`Remove page contents from S3...`);
|
267
227
|
await deleteS3Folder(_path.default.dirname(fileUploadsData.data));
|
268
228
|
return page;
|
269
229
|
}
|
270
|
-
|
271
230
|
async function importBlock({
|
272
231
|
blockKey,
|
273
232
|
context,
|
274
233
|
fileUploadsData
|
275
234
|
}) {
|
276
|
-
const log = console.log;
|
235
|
+
const log = console.log;
|
277
236
|
|
237
|
+
// Making Directory for block in which we're going to extract the block data file.
|
278
238
|
const BLOCK_EXTRACT_DIR = _path.default.join(INSTALL_EXTRACT_DIR, blockKey);
|
279
|
-
|
280
239
|
(0, _fsExtra.ensureDirSync)(BLOCK_EXTRACT_DIR);
|
281
|
-
|
282
240
|
const blockDataFileKey = _dotPropImmutable.default.get(fileUploadsData, `data`);
|
283
|
-
|
284
241
|
const BLOCK_DATA_FILE_PATH = _path.default.join(BLOCK_EXTRACT_DIR, _path.default.basename(blockDataFileKey));
|
285
|
-
|
286
|
-
|
287
|
-
|
242
|
+
log(`Downloading Block data file: ${blockDataFileKey} at "${BLOCK_DATA_FILE_PATH}"`);
|
243
|
+
// Download and save block data file in disk.
|
288
244
|
await new Promise((resolve, reject) => {
|
289
245
|
_s3Stream.s3Stream.readStream(blockDataFileKey).on("error", reject).pipe((0, _fs.createWriteStream)(BLOCK_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
|
290
|
-
});
|
246
|
+
});
|
291
247
|
|
248
|
+
// Load the block data file from disk.
|
292
249
|
log(`Load file ${blockDataFileKey}`);
|
293
250
|
const {
|
294
251
|
block,
|
295
252
|
files
|
296
|
-
} = await (0, _loadJsonFile.default)(BLOCK_DATA_FILE_PATH);
|
253
|
+
} = await (0, _loadJsonFile.default)(BLOCK_DATA_FILE_PATH);
|
297
254
|
|
255
|
+
// Only update block data if there are files.
|
298
256
|
if (files && Array.isArray(files) && files.length > 0) {
|
299
257
|
// Upload block assets.
|
300
258
|
const {
|
@@ -319,47 +277,91 @@ async function importBlock({
|
|
319
277
|
srcPrefix
|
320
278
|
});
|
321
279
|
}
|
322
|
-
|
323
280
|
log("Removing Directory for block...");
|
324
281
|
await (0, _downloadInstallFiles.deleteFile)(blockKey);
|
325
282
|
log(`Remove block contents from S3...`);
|
326
283
|
await deleteS3Folder(_path.default.dirname(fileUploadsData.data));
|
327
284
|
return block;
|
328
285
|
}
|
286
|
+
async function importTemplate({
|
287
|
+
templateKey,
|
288
|
+
context,
|
289
|
+
fileUploadsData
|
290
|
+
}) {
|
291
|
+
const log = console.log;
|
329
292
|
|
293
|
+
// Making Directory for template in which we're going to extract the template data file.
|
294
|
+
const TEMPLATE_EXTRACT_DIR = _path.default.join(INSTALL_EXTRACT_DIR, templateKey);
|
295
|
+
(0, _fsExtra.ensureDirSync)(TEMPLATE_EXTRACT_DIR);
|
296
|
+
const templateDataFileKey = _dotPropImmutable.default.get(fileUploadsData, `data`);
|
297
|
+
const TEMPLATE_DATA_FILE_PATH = _path.default.join(TEMPLATE_EXTRACT_DIR, _path.default.basename(templateDataFileKey));
|
298
|
+
log(`Downloading Template data file: ${templateDataFileKey} at "${TEMPLATE_DATA_FILE_PATH}"`);
|
299
|
+
// Download and save template data file in disk.
|
300
|
+
await new Promise((resolve, reject) => {
|
301
|
+
_s3Stream.s3Stream.readStream(templateDataFileKey).on("error", reject).pipe((0, _fs.createWriteStream)(TEMPLATE_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
|
302
|
+
});
|
303
|
+
|
304
|
+
// Load the template data file from disk.
|
305
|
+
log(`Load file ${templateDataFileKey}`);
|
306
|
+
const {
|
307
|
+
template,
|
308
|
+
files
|
309
|
+
} = await (0, _loadJsonFile.default)(TEMPLATE_DATA_FILE_PATH);
|
310
|
+
|
311
|
+
// Only update template data if there are files.
|
312
|
+
if (files && Array.isArray(files) && files.length > 0) {
|
313
|
+
// Upload template assets.
|
314
|
+
const {
|
315
|
+
fileIdToKeyMap
|
316
|
+
} = await uploadAssets({
|
317
|
+
context,
|
318
|
+
filesData: files,
|
319
|
+
fileUploadsData
|
320
|
+
});
|
321
|
+
const settings = await context.fileManager.settings.getSettings();
|
322
|
+
const {
|
323
|
+
srcPrefix = ""
|
324
|
+
} = settings || {};
|
325
|
+
updateFilesInData({
|
326
|
+
data: template.content || {},
|
327
|
+
fileIdToKeyMap,
|
328
|
+
srcPrefix
|
329
|
+
});
|
330
|
+
}
|
331
|
+
log("Removing Directory for template...");
|
332
|
+
await (0, _downloadInstallFiles.deleteFile)(templateKey);
|
333
|
+
log(`Remove template contents from S3...`);
|
334
|
+
await deleteS3Folder(_path.default.dirname(fileUploadsData.data));
|
335
|
+
return template;
|
336
|
+
}
|
330
337
|
async function uploadFilesFromS3({
|
331
338
|
fileKeyToFileMap,
|
332
339
|
oldKeyToNewKeyMap
|
333
340
|
}) {
|
334
341
|
const oldKeysForAssets = Object.keys(oldKeyToNewKeyMap);
|
335
|
-
const promises = [];
|
336
|
-
|
342
|
+
const promises = [];
|
343
|
+
// Upload all assets.
|
337
344
|
for (let i = 0; i < oldKeysForAssets.length; i++) {
|
338
345
|
const oldKey = oldKeysForAssets[i];
|
339
|
-
const tempNewKey = oldKeyToNewKeyMap[oldKey];
|
340
|
-
|
341
|
-
const readStream = _s3Stream.s3Stream.readStream(tempNewKey); // Get file meta data.
|
342
|
-
|
346
|
+
const tempNewKey = oldKeyToNewKeyMap[oldKey];
|
343
347
|
|
348
|
+
// Read file.
|
349
|
+
const readStream = _s3Stream.s3Stream.readStream(tempNewKey);
|
350
|
+
// Get file meta data.
|
344
351
|
const fileMetaData = fileKeyToFileMap.get(oldKey);
|
345
|
-
|
346
352
|
if (fileMetaData) {
|
347
353
|
const newKey = (0, _uniqid.default)("", `-${fileMetaData.key}`);
|
348
|
-
|
349
354
|
const {
|
350
355
|
streamPassThrough,
|
351
356
|
streamPassThroughUploadPromise: promise
|
352
357
|
} = _s3Stream.s3Stream.writeStream(newKey, fileMetaData.type);
|
353
|
-
|
354
358
|
readStream.pipe(streamPassThrough);
|
355
359
|
promises.push(promise);
|
356
360
|
console.log(`Successfully queued file "${newKey}"`);
|
357
361
|
}
|
358
362
|
}
|
359
|
-
|
360
363
|
return Promise.all(promises);
|
361
364
|
}
|
362
|
-
|
363
365
|
function getOldFileKey(key) {
|
364
366
|
/*
|
365
367
|
* Because we know the naming convention, we can extract the old key from new key.
|
@@ -371,13 +373,10 @@ function getOldFileKey(key) {
|
|
371
373
|
return key;
|
372
374
|
}
|
373
375
|
}
|
374
|
-
|
375
376
|
const FILE_CONTENT_TYPE = "application/octet-stream";
|
376
|
-
|
377
377
|
function getFileNameWithoutExt(fileName) {
|
378
378
|
return _path.default.basename(fileName).replace(_path.default.extname(fileName), "");
|
379
379
|
}
|
380
|
-
|
381
380
|
/**
|
382
381
|
* Function will read the given zip file from S3 via stream, extract its content and upload it to S3 bucket.
|
383
382
|
* @param zipFileUrl
|
@@ -386,82 +385,70 @@ function getFileNameWithoutExt(fileName) {
|
|
386
385
|
async function readExtractAndUploadZipFileContents(zipFileUrl) {
|
387
386
|
const log = console.log;
|
388
387
|
const importDataList = [];
|
389
|
-
|
390
388
|
const zipFileName = _path.default.basename(zipFileUrl).split("?")[0];
|
391
|
-
|
392
389
|
const response = await (0, _nodeFetch.default)(zipFileUrl);
|
393
|
-
|
394
390
|
if (!response.ok) {
|
395
391
|
throw new _error.default(`Unable to downloading file: "${zipFileUrl}"`, response.statusText);
|
396
392
|
}
|
397
|
-
|
398
393
|
const readStream = response.body;
|
399
|
-
const uniquePath = (0, _uniqid.default)("IMPORTS/");
|
400
|
-
|
394
|
+
const uniquePath = (0, _uniqid.default)("IMPORTS/");
|
395
|
+
// Read export file and download it in the disk
|
401
396
|
const ZIP_FILE_PATH = _path.default.join(INSTALL_DIR, zipFileName);
|
402
|
-
|
403
397
|
const writeStream = (0, _fs.createWriteStream)(ZIP_FILE_PATH);
|
404
398
|
await streamPipeline(readStream, writeStream);
|
405
|
-
log(`Downloaded file "${zipFileName}" at ${ZIP_FILE_PATH}`);
|
399
|
+
log(`Downloaded file "${zipFileName}" at ${ZIP_FILE_PATH}`);
|
406
400
|
|
401
|
+
// Extract the downloaded zip file
|
407
402
|
const zipFilePaths = await extractZipToDisk(ZIP_FILE_PATH);
|
408
403
|
log(`Removing ZIP file "${zipFileUrl}" from ${ZIP_FILE_PATH}`);
|
409
|
-
await (0, _downloadInstallFiles.deleteFile)(ZIP_FILE_PATH);
|
404
|
+
await (0, _downloadInstallFiles.deleteFile)(ZIP_FILE_PATH);
|
410
405
|
|
406
|
+
// Extract each page/block zip and upload their content's to S3
|
411
407
|
for (let i = 0; i < zipFilePaths.length; i++) {
|
412
408
|
const currentPath = zipFilePaths[i];
|
413
409
|
const dataMap = await extractZipAndUploadToS3(currentPath, uniquePath);
|
414
410
|
importDataList.push(dataMap);
|
415
411
|
}
|
416
|
-
|
417
412
|
log("Removing all ZIP files located at ", _path.default.dirname(zipFilePaths[0]));
|
418
413
|
await (0, _downloadInstallFiles.deleteFile)(_path.default.dirname(zipFilePaths[0]));
|
419
414
|
return importDataList;
|
420
415
|
}
|
421
|
-
|
422
416
|
const ASSETS_DIR_NAME = "/assets";
|
423
|
-
|
424
417
|
function prepareDataDirMap({
|
425
418
|
map,
|
426
419
|
filePath,
|
427
420
|
newKey
|
428
421
|
}) {
|
429
422
|
const dirname = _path.default.dirname(filePath);
|
430
|
-
|
431
423
|
const fileName = _path.default.basename(filePath);
|
432
424
|
/*
|
433
425
|
* We want to use dot (.) as part of object key rather than creating nested object(s).
|
434
426
|
* Also, the file name might contain dots in it beside the extension, so, we are escaping them all.
|
435
427
|
*/
|
436
|
-
|
437
|
-
|
438
428
|
const oldKey = fileName.replace(/\./g, "\\.");
|
439
429
|
const isAsset = dirname.endsWith(ASSETS_DIR_NAME);
|
440
|
-
|
441
430
|
if (isAsset) {
|
442
431
|
map = _dotPropImmutable.default.set(map, `assets.${oldKey}`, newKey);
|
443
432
|
} else {
|
444
433
|
// We only need to know the newKey for data file.
|
445
434
|
map = _dotPropImmutable.default.set(map, `data`, newKey);
|
446
435
|
}
|
447
|
-
|
448
436
|
return map;
|
449
437
|
}
|
450
|
-
|
451
438
|
async function deleteS3Folder(key) {
|
452
439
|
// Append trailing slash i.e "/" to key to make sure we only delete a specific folder.
|
453
440
|
if (!key.endsWith("/")) {
|
454
441
|
key = `${key}/`;
|
455
442
|
}
|
456
|
-
|
457
443
|
const response = await _s3Stream.s3Stream.listObject(key);
|
458
444
|
const keys = (response.Contents || []).map(c => c.Key).filter(Boolean);
|
459
445
|
console.log(`Found ${keys.length} files.`);
|
460
446
|
const deleteFilePromises = keys.map(key => _s3Stream.s3Stream.deleteObject(key));
|
461
447
|
await Promise.all(deleteFilePromises);
|
462
448
|
console.log(`Successfully deleted ${deleteFilePromises.length} files.`);
|
463
|
-
}
|
449
|
+
}
|
464
450
|
|
451
|
+
// export const zeroPad = version => `${version}`.padStart(5, "0");
|
465
452
|
|
466
453
|
function initialStats(total) {
|
467
454
|
return {
|
@@ -472,17 +459,13 @@ function initialStats(total) {
|
|
472
459
|
total
|
473
460
|
};
|
474
461
|
}
|
475
|
-
|
476
462
|
function extractZipToDisk(exportFileZipPath) {
|
477
463
|
return new Promise((resolve, reject) => {
|
478
464
|
const zipFilePaths = [];
|
479
465
|
const uniqueFolderNameForExport = getFileNameWithoutExt(exportFileZipPath);
|
480
|
-
|
481
|
-
|
482
|
-
|
483
|
-
|
466
|
+
const EXPORT_FILE_EXTRACTION_PATH = _path.default.join(INSTALL_DIR, uniqueFolderNameForExport);
|
467
|
+
// Make sure DIR exists
|
484
468
|
(0, _fsExtra.ensureDirSync)(EXPORT_FILE_EXTRACTION_PATH);
|
485
|
-
|
486
469
|
_yauzl.default.open(exportFileZipPath, {
|
487
470
|
lazyEntries: true
|
488
471
|
}, function (err, zipFile) {
|
@@ -491,26 +474,22 @@ function extractZipToDisk(exportFileZipPath) {
|
|
491
474
|
reject(err);
|
492
475
|
return;
|
493
476
|
}
|
494
|
-
|
495
477
|
if (!zipFile) {
|
496
478
|
console.log("ERROR: Missing zip file resource for path: " + exportFileZipPath);
|
497
479
|
reject("Missing Zip File Resource.");
|
498
480
|
return;
|
499
481
|
}
|
500
|
-
|
501
482
|
console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);
|
502
483
|
zipFile.on("end", function (err) {
|
503
484
|
if (err) {
|
504
485
|
console.warn("ERROR: Failed on END event for file: ", exportFileZipPath, err);
|
505
486
|
reject(err);
|
506
487
|
}
|
507
|
-
|
508
488
|
resolve(zipFilePaths);
|
509
489
|
});
|
510
490
|
zipFile.readEntry();
|
511
491
|
zipFile.on("entry", function (entry) {
|
512
492
|
console.info(`Processing entry: "${entry.fileName}"`);
|
513
|
-
|
514
493
|
if (/\/$/.test(entry.fileName)) {
|
515
494
|
// Directory file names end with '/'.
|
516
495
|
// Note that entries for directories themselves are optional.
|
@@ -524,15 +503,12 @@ function extractZipToDisk(exportFileZipPath) {
|
|
524
503
|
reject(err);
|
525
504
|
return;
|
526
505
|
}
|
527
|
-
|
528
506
|
if (!readStream) {
|
529
507
|
console.log("ERROR: Missing Read Stream Resource when extracting to disk.");
|
530
508
|
reject("Missing Read Stream Resource.");
|
531
509
|
return;
|
532
510
|
}
|
533
|
-
|
534
511
|
const filePath = _path.default.join(EXPORT_FILE_EXTRACTION_PATH, entry.fileName);
|
535
|
-
|
536
512
|
readStream.on("end", function () {
|
537
513
|
zipFilePaths.push(filePath);
|
538
514
|
zipFile.readEntry();
|
@@ -546,7 +522,6 @@ function extractZipToDisk(exportFileZipPath) {
|
|
546
522
|
});
|
547
523
|
});
|
548
524
|
}
|
549
|
-
|
550
525
|
function extractZipAndUploadToS3(dataZipFilePath, uniquePath) {
|
551
526
|
return new Promise((resolve, reject) => {
|
552
527
|
const filePaths = [];
|
@@ -557,7 +532,6 @@ function extractZipAndUploadToS3(dataZipFilePath, uniquePath) {
|
|
557
532
|
assets: {},
|
558
533
|
data: ""
|
559
534
|
};
|
560
|
-
|
561
535
|
_yauzl.default.open(dataZipFilePath, {
|
562
536
|
lazyEntries: true
|
563
537
|
}, function (err, zipFile) {
|
@@ -566,20 +540,17 @@ function extractZipAndUploadToS3(dataZipFilePath, uniquePath) {
|
|
566
540
|
reject(err);
|
567
541
|
return;
|
568
542
|
}
|
569
|
-
|
570
543
|
if (!zipFile) {
|
571
544
|
console.log("ERROR: Probably failed to extract zip: " + dataZipFilePath);
|
572
545
|
reject("Missing Zip File Resource.");
|
573
546
|
return;
|
574
547
|
}
|
575
|
-
|
576
548
|
console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);
|
577
549
|
zipFile.on("end", function (err) {
|
578
550
|
if (err) {
|
579
551
|
console.warn('ERROR: Failed on "END" for file: ', dataZipFilePath, err);
|
580
552
|
reject(err);
|
581
553
|
}
|
582
|
-
|
583
554
|
Promise.all(fileUploadPromises).then(res => {
|
584
555
|
res.forEach(r => {
|
585
556
|
console.info("Done uploading... ", r);
|
@@ -590,7 +561,6 @@ function extractZipAndUploadToS3(dataZipFilePath, uniquePath) {
|
|
590
561
|
zipFile.readEntry();
|
591
562
|
zipFile.on("entry", function (entry) {
|
592
563
|
console.info(`Processing entry: "${entry.fileName}"`);
|
593
|
-
|
594
564
|
if (/\/$/.test(entry.fileName)) {
|
595
565
|
// Directory file names end with '/'.
|
596
566
|
// Note that entries for directories themselves are optional.
|
@@ -604,30 +574,26 @@ function extractZipAndUploadToS3(dataZipFilePath, uniquePath) {
|
|
604
574
|
reject(err);
|
605
575
|
return;
|
606
576
|
}
|
607
|
-
|
608
577
|
if (!readStream) {
|
609
578
|
console.log("ERROR: Missing Read Stream while importing.");
|
610
579
|
reject("Missing Read Strea Resource.");
|
611
580
|
return;
|
612
581
|
}
|
613
|
-
|
614
582
|
readStream.on("end", function () {
|
615
583
|
filePaths.push(entry.fileName);
|
616
584
|
zipFile.readEntry();
|
617
585
|
});
|
618
|
-
const newKey = `${uniquePath}/${uniqueKey}/${entry.fileName}`;
|
619
|
-
|
586
|
+
const newKey = `${uniquePath}/${uniqueKey}/${entry.fileName}`;
|
587
|
+
// Modify in place
|
620
588
|
dataMap = prepareDataDirMap({
|
621
589
|
map: dataMap,
|
622
590
|
filePath: entry.fileName,
|
623
591
|
newKey
|
624
592
|
});
|
625
|
-
|
626
593
|
const {
|
627
594
|
streamPassThrough,
|
628
595
|
streamPassThroughUploadPromise: promise
|
629
596
|
} = _s3Stream.s3Stream.writeStream(newKey, FILE_CONTENT_TYPE);
|
630
|
-
|
631
597
|
streamPipeline(readStream, streamPassThrough).then(() => {
|
632
598
|
fileUploadPromises.push(promise);
|
633
599
|
}).catch(error => {
|