@webiny/api-page-builder-import-export 0.0.0-unstable.d4f203fa97 → 0.0.0-unstable.d7f521b032
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/client.d.ts +2 -2
- package/client.js +2 -6
- package/client.js.map +1 -1
- package/export/combine/blocksHandler.d.ts +6 -0
- package/export/combine/blocksHandler.js +99 -0
- package/export/combine/blocksHandler.js.map +1 -0
- package/{exportPages → export}/combine/index.d.ts +3 -2
- package/export/combine/index.js +35 -0
- package/export/combine/index.js.map +1 -0
- package/export/combine/pagesHandler.d.ts +6 -0
- package/export/combine/pagesHandler.js +99 -0
- package/export/combine/pagesHandler.js.map +1 -0
- package/export/combine/templatesHandler.d.ts +6 -0
- package/export/combine/templatesHandler.js +99 -0
- package/export/combine/templatesHandler.js.map +1 -0
- package/export/process/blocksHandler.d.ts +6 -0
- package/export/process/blocksHandler.js +162 -0
- package/export/process/blocksHandler.js.map +1 -0
- package/{exportPages → export}/process/index.d.ts +4 -6
- package/export/process/index.js +32 -0
- package/export/process/index.js.map +1 -0
- package/export/process/pagesHandler.d.ts +6 -0
- package/export/process/pagesHandler.js +189 -0
- package/export/process/pagesHandler.js.map +1 -0
- package/export/process/templatesHandler.d.ts +6 -0
- package/export/process/templatesHandler.js +166 -0
- package/export/process/templatesHandler.js.map +1 -0
- package/{exportPages → export}/s3Stream.js +1 -20
- package/{exportPages → export}/s3Stream.js.map +1 -1
- package/export/utils.d.ts +22 -0
- package/export/utils.js +160 -0
- package/export/utils.js.map +1 -0
- package/{exportPages → export}/zipper.d.ts +6 -5
- package/{exportPages → export}/zipper.js +37 -48
- package/export/zipper.js.map +1 -0
- package/graphql/crud/blocks.crud.d.ts +4 -0
- package/graphql/crud/blocks.crud.js +137 -0
- package/graphql/crud/blocks.crud.js.map +1 -0
- package/graphql/crud/importExportTasks.crud.d.ts +5 -0
- package/graphql/crud/{pageImportExportTasks.crud.js → importExportTasks.crud.js} +57 -105
- package/graphql/crud/importExportTasks.crud.js.map +1 -0
- package/graphql/crud/pages.crud.d.ts +2 -2
- package/graphql/crud/pages.crud.js +39 -53
- package/graphql/crud/pages.crud.js.map +1 -1
- package/graphql/crud/templates.crud.d.ts +4 -0
- package/graphql/crud/templates.crud.js +124 -0
- package/graphql/crud/templates.crud.js.map +1 -0
- package/graphql/crud.d.ts +2 -2
- package/graphql/crud.js +4 -7
- package/graphql/crud.js.map +1 -1
- package/graphql/graphql/blocks.gql.d.ts +4 -0
- package/graphql/graphql/blocks.gql.js +52 -0
- package/graphql/graphql/blocks.gql.js.map +1 -0
- package/graphql/graphql/importExportTasks.gql.d.ts +4 -0
- package/graphql/graphql/{pageImportExportTasks.gql.js → importExportTasks.gql.js} +18 -23
- package/graphql/graphql/importExportTasks.gql.js.map +1 -0
- package/graphql/graphql/pages.gql.d.ts +2 -2
- package/graphql/graphql/pages.gql.js +4 -15
- package/graphql/graphql/pages.gql.js.map +1 -1
- package/graphql/graphql/templates.gql.d.ts +4 -0
- package/graphql/graphql/templates.gql.js +52 -0
- package/graphql/graphql/templates.gql.js.map +1 -0
- package/graphql/graphql/utils/resolve.js +0 -3
- package/graphql/graphql/utils/resolve.js.map +1 -1
- package/graphql/graphql.js +4 -6
- package/graphql/graphql.js.map +1 -1
- package/graphql/index.d.ts +2 -2
- package/graphql/index.js +0 -5
- package/graphql/index.js.map +1 -1
- package/graphql/types.d.ts +60 -23
- package/graphql/types.js.map +1 -1
- package/import/create/blocksHandler.d.ts +3 -0
- package/import/create/blocksHandler.js +100 -0
- package/import/create/blocksHandler.js.map +1 -0
- package/{importPages → import}/create/index.d.ts +7 -5
- package/import/create/index.js +35 -0
- package/import/create/index.js.map +1 -0
- package/import/create/pagesHandler.d.ts +3 -0
- package/import/create/pagesHandler.js +102 -0
- package/import/create/pagesHandler.js.map +1 -0
- package/import/create/templatesHandler.d.ts +3 -0
- package/import/create/templatesHandler.js +98 -0
- package/import/create/templatesHandler.js.map +1 -0
- package/import/process/blocksHandler.d.ts +3 -0
- package/import/process/blocksHandler.js +169 -0
- package/import/process/blocksHandler.js.map +1 -0
- package/{importPages → import}/process/index.d.ts +5 -3
- package/import/process/index.js +32 -0
- package/import/process/index.js.map +1 -0
- package/import/process/pagesHandler.d.ts +3 -0
- package/import/process/pagesHandler.js +177 -0
- package/import/process/pagesHandler.js.map +1 -0
- package/import/process/templatesHandler.d.ts +3 -0
- package/import/process/templatesHandler.js +169 -0
- package/import/process/templatesHandler.js.map +1 -0
- package/import/utils.d.ts +56 -0
- package/{importPages → import}/utils.js +193 -156
- package/import/utils.js.map +1 -0
- package/mockSecurity.js +0 -2
- package/mockSecurity.js.map +1 -1
- package/package.json +24 -24
- package/types.d.ts +62 -65
- package/types.js +17 -22
- package/types.js.map +1 -1
- package/exportPages/combine/index.js +0 -114
- package/exportPages/combine/index.js.map +0 -1
- package/exportPages/process/index.js +0 -208
- package/exportPages/process/index.js.map +0 -1
- package/exportPages/utils.d.ts +0 -13
- package/exportPages/utils.js +0 -113
- package/exportPages/utils.js.map +0 -1
- package/exportPages/zipper.js.map +0 -1
- package/graphql/crud/pageImportExportTasks.crud.d.ts +0 -5
- package/graphql/crud/pageImportExportTasks.crud.js.map +0 -1
- package/graphql/graphql/pageImportExportTasks.gql.d.ts +0 -4
- package/graphql/graphql/pageImportExportTasks.gql.js.map +0 -1
- package/importPages/create/index.js +0 -118
- package/importPages/create/index.js.map +0 -1
- package/importPages/process/index.js +0 -185
- package/importPages/process/index.js.map +0 -1
- package/importPages/utils.d.ts +0 -50
- package/importPages/utils.js.map +0 -1
- /package/{exportPages → export}/s3Stream.d.ts +0 -0
@@ -1,52 +1,34 @@
|
|
1
1
|
"use strict";
|
2
2
|
|
3
3
|
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
4
|
-
|
5
4
|
Object.defineProperty(exports, "__esModule", {
|
6
5
|
value: true
|
7
6
|
});
|
7
|
+
exports.importBlock = importBlock;
|
8
8
|
exports.importPage = importPage;
|
9
|
+
exports.importTemplate = importTemplate;
|
9
10
|
exports.initialStats = initialStats;
|
10
11
|
exports.readExtractAndUploadZipFileContents = readExtractAndUploadZipFileContents;
|
11
|
-
exports.
|
12
|
-
|
12
|
+
exports.uploadAssets = void 0;
|
13
13
|
var _uniqid = _interopRequireDefault(require("uniqid"));
|
14
|
-
|
15
14
|
var _dotPropImmutable = _interopRequireDefault(require("dot-prop-immutable"));
|
16
|
-
|
17
15
|
var _fs = require("fs");
|
18
|
-
|
19
16
|
var _fsExtra = require("fs-extra");
|
20
|
-
|
21
17
|
var _util = require("util");
|
22
|
-
|
23
18
|
var _stream = require("stream");
|
24
|
-
|
25
19
|
var _nodeFetch = _interopRequireDefault(require("node-fetch"));
|
26
|
-
|
27
20
|
var _path = _interopRequireDefault(require("path"));
|
28
|
-
|
29
21
|
var _yauzl = _interopRequireDefault(require("yauzl"));
|
30
|
-
|
31
22
|
var _chunk = _interopRequireDefault(require("lodash/chunk"));
|
32
|
-
|
33
23
|
var _loadJsonFile = _interopRequireDefault(require("load-json-file"));
|
34
|
-
|
35
24
|
var _error = _interopRequireDefault(require("@webiny/error"));
|
36
|
-
|
37
25
|
var _downloadInstallFiles = require("@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles");
|
38
|
-
|
39
26
|
var _types = require("../types");
|
40
|
-
|
41
|
-
var _s3Stream = require("../exportPages/s3Stream");
|
42
|
-
|
27
|
+
var _s3Stream = require("../export/s3Stream");
|
43
28
|
const streamPipeline = (0, _util.promisify)(_stream.pipeline);
|
44
29
|
const INSTALL_DIR = "/tmp";
|
45
|
-
|
46
|
-
const INSTALL_EXTRACT_DIR = _path.default.join(INSTALL_DIR, "apiPageBuilderImportPage");
|
47
|
-
|
30
|
+
const INSTALL_EXTRACT_DIR = _path.default.join(INSTALL_DIR, "apiPageBuilderImport");
|
48
31
|
const FILES_COUNT_IN_EACH_BATCH = 15;
|
49
|
-
|
50
32
|
function updateImageInPageSettings(params) {
|
51
33
|
const {
|
52
34
|
settings,
|
@@ -55,23 +37,28 @@ function updateImageInPageSettings(params) {
|
|
55
37
|
} = params;
|
56
38
|
let newSettings = settings;
|
57
39
|
const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith("/") ? srcPrefix.slice(0, -1) : srcPrefix;
|
58
|
-
|
59
40
|
if (_dotPropImmutable.default.get(newSettings, "general.image.src")) {
|
60
41
|
var _settings$general, _settings$general$ima;
|
61
|
-
|
62
42
|
newSettings = _dotPropImmutable.default.set(newSettings, "general.image.src", `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(((_settings$general = settings.general) === null || _settings$general === void 0 ? void 0 : (_settings$general$ima = _settings$general.image) === null || _settings$general$ima === void 0 ? void 0 : _settings$general$ima.id) || "")}`);
|
63
43
|
}
|
64
|
-
|
65
44
|
if (_dotPropImmutable.default.get(newSettings, "social.image.src")) {
|
66
45
|
var _settings$social, _settings$social$imag;
|
67
|
-
|
68
46
|
newSettings = _dotPropImmutable.default.set(newSettings, "social.image.src", `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(((_settings$social = settings.social) === null || _settings$social === void 0 ? void 0 : (_settings$social$imag = _settings$social.image) === null || _settings$social$imag === void 0 ? void 0 : _settings$social$imag.id) || "")}`);
|
69
47
|
}
|
70
|
-
|
71
48
|
return newSettings;
|
72
49
|
}
|
73
|
-
|
74
|
-
|
50
|
+
function updateBlockPreviewImage(params) {
|
51
|
+
const {
|
52
|
+
file,
|
53
|
+
fileIdToKeyMap,
|
54
|
+
srcPrefix
|
55
|
+
} = params;
|
56
|
+
const newFile = file;
|
57
|
+
const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith("/") ? srcPrefix.slice(0, -1) : srcPrefix;
|
58
|
+
newFile.src = `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(file.id || "")}`;
|
59
|
+
return newFile;
|
60
|
+
}
|
61
|
+
function updateFilesInData({
|
75
62
|
data,
|
76
63
|
fileIdToKeyMap,
|
77
64
|
srcPrefix
|
@@ -79,34 +66,29 @@ function updateFilesInPageData({
|
|
79
66
|
// BASE CASE: Termination point
|
80
67
|
if (!data || typeof data !== "object") {
|
81
68
|
return;
|
82
|
-
}
|
83
|
-
|
84
|
-
|
69
|
+
}
|
70
|
+
// Recursively call function if data is array
|
85
71
|
if (Array.isArray(data)) {
|
86
72
|
for (let i = 0; i < data.length; i++) {
|
87
73
|
const element = data[i];
|
88
|
-
|
74
|
+
updateFilesInData({
|
89
75
|
data: element,
|
90
76
|
fileIdToKeyMap,
|
91
77
|
srcPrefix
|
92
78
|
});
|
93
79
|
}
|
94
|
-
|
95
80
|
return;
|
96
|
-
}
|
97
|
-
|
98
|
-
|
81
|
+
}
|
82
|
+
// Main logic
|
99
83
|
const tuple = Object.entries(data);
|
100
|
-
|
101
84
|
for (let i = 0; i < tuple.length; i++) {
|
102
85
|
const [key, value] = tuple[i];
|
103
|
-
|
104
86
|
if (key === "file" && value && fileIdToKeyMap.has(value.id)) {
|
105
87
|
value.key = fileIdToKeyMap.get(value.id);
|
106
88
|
value.name = fileIdToKeyMap.get(value.id);
|
107
89
|
value.src = `${srcPrefix}${srcPrefix.endsWith("/") ? "" : "/"}${fileIdToKeyMap.get(value.id)}`;
|
108
90
|
} else {
|
109
|
-
|
91
|
+
updateFilesInData({
|
110
92
|
data: value,
|
111
93
|
srcPrefix,
|
112
94
|
fileIdToKeyMap
|
@@ -114,52 +96,49 @@ function updateFilesInPageData({
|
|
114
96
|
}
|
115
97
|
}
|
116
98
|
}
|
117
|
-
|
118
|
-
const uploadPageAssets = async params => {
|
99
|
+
const uploadAssets = async params => {
|
119
100
|
const {
|
120
101
|
context,
|
121
102
|
filesData,
|
122
103
|
fileUploadsData
|
123
|
-
} = params;
|
124
|
-
|
104
|
+
} = params;
|
105
|
+
// Save uploaded file key against static id for later use.
|
125
106
|
const fileIdToKeyMap = new Map();
|
126
107
|
/**
|
127
108
|
* This function contains logic of file download from S3.
|
128
109
|
* Current we're not mocking zip file download from S3 in tests at the moment.
|
129
110
|
* So, we're manually mocking it in case of test just by returning an empty object.
|
130
111
|
*/
|
131
|
-
|
132
112
|
if (process.env.NODE_ENV === "test") {
|
133
113
|
return {
|
134
114
|
fileIdToKeyMap
|
135
115
|
};
|
136
116
|
}
|
137
117
|
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
118
|
+
// Save files meta data against old key for later use.
|
119
|
+
const fileKeyToFileMap = new Map();
|
120
|
+
// Initialize maps.
|
142
121
|
for (let i = 0; i < filesData.length; i++) {
|
143
122
|
const file = filesData[i];
|
144
|
-
fileKeyToFileMap.set(file.key, file);
|
123
|
+
fileKeyToFileMap.set(file.key, file);
|
145
124
|
|
125
|
+
// Initialize the value
|
146
126
|
fileIdToKeyMap.set(file.id, file.type);
|
147
127
|
}
|
148
|
-
|
149
128
|
const fileUploadResults = await uploadFilesFromS3({
|
150
129
|
fileKeyToFileMap,
|
151
130
|
oldKeyToNewKeyMap: fileUploadsData.assets
|
152
|
-
});
|
131
|
+
});
|
153
132
|
|
133
|
+
// Create files in File Manager
|
154
134
|
const createFilesInput = fileUploadResults.map(uploadResult => {
|
155
135
|
const newKey = uploadResult.Key;
|
156
136
|
const file = fileKeyToFileMap.get(getOldFileKey(newKey));
|
157
|
-
|
158
137
|
if (!file) {
|
159
138
|
return null;
|
160
|
-
}
|
161
|
-
|
139
|
+
}
|
162
140
|
|
141
|
+
// Update the file map with newly uploaded file.
|
163
142
|
fileIdToKeyMap.set(file.id, newKey);
|
164
143
|
return {
|
165
144
|
key: newKey,
|
@@ -170,10 +149,9 @@ const uploadPageAssets = async params => {
|
|
170
149
|
tags: file.tags
|
171
150
|
};
|
172
151
|
}).filter(Boolean);
|
173
|
-
const createFilesPromises = [];
|
174
|
-
|
152
|
+
const createFilesPromises = [];
|
153
|
+
// Gives an array of chunks (each consists of FILES_COUNT_IN_EACH_BATCH items).
|
175
154
|
const createFilesInputChunks = (0, _chunk.default)(createFilesInput, FILES_COUNT_IN_EACH_BATCH);
|
176
|
-
|
177
155
|
for (let i = 0; i < createFilesInputChunks.length; i++) {
|
178
156
|
const createFilesInputChunk = createFilesInputChunks[i];
|
179
157
|
createFilesPromises.push(
|
@@ -183,49 +161,44 @@ const uploadPageAssets = async params => {
|
|
183
161
|
*/
|
184
162
|
context.fileManager.files.createFilesInBatch(createFilesInputChunk));
|
185
163
|
}
|
186
|
-
|
187
164
|
await Promise.all(createFilesPromises);
|
188
165
|
return {
|
189
166
|
fileIdToKeyMap
|
190
167
|
};
|
191
168
|
};
|
192
|
-
|
193
|
-
exports.uploadPageAssets = uploadPageAssets;
|
194
|
-
|
169
|
+
exports.uploadAssets = uploadAssets;
|
195
170
|
async function importPage({
|
196
171
|
pageKey,
|
197
172
|
context,
|
198
173
|
fileUploadsData
|
199
174
|
}) {
|
200
|
-
const log = console.log;
|
175
|
+
const log = console.log;
|
201
176
|
|
177
|
+
// Making Directory for page in which we're going to extract the page data file.
|
202
178
|
const PAGE_EXTRACT_DIR = _path.default.join(INSTALL_EXTRACT_DIR, pageKey);
|
203
|
-
|
204
179
|
(0, _fsExtra.ensureDirSync)(PAGE_EXTRACT_DIR);
|
205
|
-
|
206
180
|
const pageDataFileKey = _dotPropImmutable.default.get(fileUploadsData, `data`);
|
207
|
-
|
208
181
|
const PAGE_DATA_FILE_PATH = _path.default.join(PAGE_EXTRACT_DIR, _path.default.basename(pageDataFileKey));
|
209
|
-
|
210
|
-
|
211
|
-
|
182
|
+
log(`Downloading Page data file: ${pageDataFileKey} at "${PAGE_DATA_FILE_PATH}"`);
|
183
|
+
// Download and save page data file in disk.
|
212
184
|
await new Promise((resolve, reject) => {
|
213
185
|
_s3Stream.s3Stream.readStream(pageDataFileKey).on("error", reject).pipe((0, _fs.createWriteStream)(PAGE_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
|
214
|
-
});
|
186
|
+
});
|
215
187
|
|
188
|
+
// Load the page data file from disk.
|
216
189
|
log(`Load file ${pageDataFileKey}`);
|
217
190
|
const {
|
218
191
|
page,
|
219
192
|
files
|
220
|
-
} = await (0, _loadJsonFile.default)(PAGE_DATA_FILE_PATH);
|
193
|
+
} = await (0, _loadJsonFile.default)(PAGE_DATA_FILE_PATH);
|
221
194
|
|
195
|
+
// Only update page data if there are files.
|
222
196
|
if (files && Array.isArray(files) && files.length > 0) {
|
223
197
|
// Upload page assets.
|
224
198
|
const {
|
225
199
|
fileIdToKeyMap
|
226
|
-
} = await
|
200
|
+
} = await uploadAssets({
|
227
201
|
context,
|
228
|
-
|
229
202
|
/**
|
230
203
|
* TODO @ts-refactor @ashutosh figure out correct types.
|
231
204
|
*/
|
@@ -237,7 +210,7 @@ async function importPage({
|
|
237
210
|
const {
|
238
211
|
srcPrefix = ""
|
239
212
|
} = settings || {};
|
240
|
-
|
213
|
+
updateFilesInData({
|
241
214
|
data: page.content || {},
|
242
215
|
fileIdToKeyMap,
|
243
216
|
srcPrefix
|
@@ -248,47 +221,147 @@ async function importPage({
|
|
248
221
|
srcPrefix
|
249
222
|
});
|
250
223
|
}
|
251
|
-
|
252
224
|
log("Removing Directory for page...");
|
253
225
|
await (0, _downloadInstallFiles.deleteFile)(pageKey);
|
254
226
|
log(`Remove page contents from S3...`);
|
255
227
|
await deleteS3Folder(_path.default.dirname(fileUploadsData.data));
|
256
228
|
return page;
|
257
229
|
}
|
230
|
+
async function importBlock({
|
231
|
+
blockKey,
|
232
|
+
context,
|
233
|
+
fileUploadsData
|
234
|
+
}) {
|
235
|
+
const log = console.log;
|
258
236
|
|
237
|
+
// Making Directory for block in which we're going to extract the block data file.
|
238
|
+
const BLOCK_EXTRACT_DIR = _path.default.join(INSTALL_EXTRACT_DIR, blockKey);
|
239
|
+
(0, _fsExtra.ensureDirSync)(BLOCK_EXTRACT_DIR);
|
240
|
+
const blockDataFileKey = _dotPropImmutable.default.get(fileUploadsData, `data`);
|
241
|
+
const BLOCK_DATA_FILE_PATH = _path.default.join(BLOCK_EXTRACT_DIR, _path.default.basename(blockDataFileKey));
|
242
|
+
log(`Downloading Block data file: ${blockDataFileKey} at "${BLOCK_DATA_FILE_PATH}"`);
|
243
|
+
// Download and save block data file in disk.
|
244
|
+
await new Promise((resolve, reject) => {
|
245
|
+
_s3Stream.s3Stream.readStream(blockDataFileKey).on("error", reject).pipe((0, _fs.createWriteStream)(BLOCK_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
|
246
|
+
});
|
247
|
+
|
248
|
+
// Load the block data file from disk.
|
249
|
+
log(`Load file ${blockDataFileKey}`);
|
250
|
+
const {
|
251
|
+
block,
|
252
|
+
files
|
253
|
+
} = await (0, _loadJsonFile.default)(BLOCK_DATA_FILE_PATH);
|
254
|
+
|
255
|
+
// Only update block data if there are files.
|
256
|
+
if (files && Array.isArray(files) && files.length > 0) {
|
257
|
+
// Upload block assets.
|
258
|
+
const {
|
259
|
+
fileIdToKeyMap
|
260
|
+
} = await uploadAssets({
|
261
|
+
context,
|
262
|
+
filesData: files,
|
263
|
+
fileUploadsData
|
264
|
+
});
|
265
|
+
const settings = await context.fileManager.settings.getSettings();
|
266
|
+
const {
|
267
|
+
srcPrefix = ""
|
268
|
+
} = settings || {};
|
269
|
+
updateFilesInData({
|
270
|
+
data: block.content || {},
|
271
|
+
fileIdToKeyMap,
|
272
|
+
srcPrefix
|
273
|
+
});
|
274
|
+
block.preview = updateBlockPreviewImage({
|
275
|
+
file: block.preview || {},
|
276
|
+
fileIdToKeyMap,
|
277
|
+
srcPrefix
|
278
|
+
});
|
279
|
+
}
|
280
|
+
log("Removing Directory for block...");
|
281
|
+
await (0, _downloadInstallFiles.deleteFile)(blockKey);
|
282
|
+
log(`Remove block contents from S3...`);
|
283
|
+
await deleteS3Folder(_path.default.dirname(fileUploadsData.data));
|
284
|
+
return block;
|
285
|
+
}
|
286
|
+
async function importTemplate({
|
287
|
+
templateKey,
|
288
|
+
context,
|
289
|
+
fileUploadsData
|
290
|
+
}) {
|
291
|
+
const log = console.log;
|
292
|
+
|
293
|
+
// Making Directory for template in which we're going to extract the template data file.
|
294
|
+
const TEMPLATE_EXTRACT_DIR = _path.default.join(INSTALL_EXTRACT_DIR, templateKey);
|
295
|
+
(0, _fsExtra.ensureDirSync)(TEMPLATE_EXTRACT_DIR);
|
296
|
+
const templateDataFileKey = _dotPropImmutable.default.get(fileUploadsData, `data`);
|
297
|
+
const TEMPLATE_DATA_FILE_PATH = _path.default.join(TEMPLATE_EXTRACT_DIR, _path.default.basename(templateDataFileKey));
|
298
|
+
log(`Downloading Template data file: ${templateDataFileKey} at "${TEMPLATE_DATA_FILE_PATH}"`);
|
299
|
+
// Download and save template data file in disk.
|
300
|
+
await new Promise((resolve, reject) => {
|
301
|
+
_s3Stream.s3Stream.readStream(templateDataFileKey).on("error", reject).pipe((0, _fs.createWriteStream)(TEMPLATE_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
|
302
|
+
});
|
303
|
+
|
304
|
+
// Load the template data file from disk.
|
305
|
+
log(`Load file ${templateDataFileKey}`);
|
306
|
+
const {
|
307
|
+
template,
|
308
|
+
files
|
309
|
+
} = await (0, _loadJsonFile.default)(TEMPLATE_DATA_FILE_PATH);
|
310
|
+
|
311
|
+
// Only update template data if there are files.
|
312
|
+
if (files && Array.isArray(files) && files.length > 0) {
|
313
|
+
// Upload template assets.
|
314
|
+
const {
|
315
|
+
fileIdToKeyMap
|
316
|
+
} = await uploadAssets({
|
317
|
+
context,
|
318
|
+
filesData: files,
|
319
|
+
fileUploadsData
|
320
|
+
});
|
321
|
+
const settings = await context.fileManager.settings.getSettings();
|
322
|
+
const {
|
323
|
+
srcPrefix = ""
|
324
|
+
} = settings || {};
|
325
|
+
updateFilesInData({
|
326
|
+
data: template.content || {},
|
327
|
+
fileIdToKeyMap,
|
328
|
+
srcPrefix
|
329
|
+
});
|
330
|
+
}
|
331
|
+
log("Removing Directory for template...");
|
332
|
+
await (0, _downloadInstallFiles.deleteFile)(templateKey);
|
333
|
+
log(`Remove template contents from S3...`);
|
334
|
+
await deleteS3Folder(_path.default.dirname(fileUploadsData.data));
|
335
|
+
return template;
|
336
|
+
}
|
259
337
|
async function uploadFilesFromS3({
|
260
338
|
fileKeyToFileMap,
|
261
339
|
oldKeyToNewKeyMap
|
262
340
|
}) {
|
263
341
|
const oldKeysForAssets = Object.keys(oldKeyToNewKeyMap);
|
264
|
-
const promises = [];
|
265
|
-
|
342
|
+
const promises = [];
|
343
|
+
// Upload all assets.
|
266
344
|
for (let i = 0; i < oldKeysForAssets.length; i++) {
|
267
345
|
const oldKey = oldKeysForAssets[i];
|
268
|
-
const tempNewKey = oldKeyToNewKeyMap[oldKey];
|
269
|
-
|
270
|
-
const readStream = _s3Stream.s3Stream.readStream(tempNewKey); // Get file meta data.
|
271
|
-
|
346
|
+
const tempNewKey = oldKeyToNewKeyMap[oldKey];
|
272
347
|
|
348
|
+
// Read file.
|
349
|
+
const readStream = _s3Stream.s3Stream.readStream(tempNewKey);
|
350
|
+
// Get file meta data.
|
273
351
|
const fileMetaData = fileKeyToFileMap.get(oldKey);
|
274
|
-
|
275
352
|
if (fileMetaData) {
|
276
353
|
const newKey = (0, _uniqid.default)("", `-${fileMetaData.key}`);
|
277
|
-
|
278
354
|
const {
|
279
355
|
streamPassThrough,
|
280
356
|
streamPassThroughUploadPromise: promise
|
281
357
|
} = _s3Stream.s3Stream.writeStream(newKey, fileMetaData.type);
|
282
|
-
|
283
358
|
readStream.pipe(streamPassThrough);
|
284
359
|
promises.push(promise);
|
285
360
|
console.log(`Successfully queued file "${newKey}"`);
|
286
361
|
}
|
287
362
|
}
|
288
|
-
|
289
363
|
return Promise.all(promises);
|
290
364
|
}
|
291
|
-
|
292
365
|
function getOldFileKey(key) {
|
293
366
|
/*
|
294
367
|
* Because we know the naming convention, we can extract the old key from new key.
|
@@ -300,118 +373,99 @@ function getOldFileKey(key) {
|
|
300
373
|
return key;
|
301
374
|
}
|
302
375
|
}
|
303
|
-
|
304
376
|
const FILE_CONTENT_TYPE = "application/octet-stream";
|
305
|
-
|
306
377
|
function getFileNameWithoutExt(fileName) {
|
307
378
|
return _path.default.basename(fileName).replace(_path.default.extname(fileName), "");
|
308
379
|
}
|
309
|
-
|
310
380
|
/**
|
311
381
|
* Function will read the given zip file from S3 via stream, extract its content and upload it to S3 bucket.
|
312
382
|
* @param zipFileUrl
|
313
|
-
* @return
|
383
|
+
* @return ImportData S3 file keys for all uploaded assets group by page/block.
|
314
384
|
*/
|
315
385
|
async function readExtractAndUploadZipFileContents(zipFileUrl) {
|
316
386
|
const log = console.log;
|
317
|
-
const
|
318
|
-
|
387
|
+
const importDataList = [];
|
319
388
|
const zipFileName = _path.default.basename(zipFileUrl).split("?")[0];
|
320
|
-
|
321
389
|
const response = await (0, _nodeFetch.default)(zipFileUrl);
|
322
|
-
|
323
390
|
if (!response.ok) {
|
324
391
|
throw new _error.default(`Unable to downloading file: "${zipFileUrl}"`, response.statusText);
|
325
392
|
}
|
326
|
-
|
327
393
|
const readStream = response.body;
|
328
|
-
const uniquePath = (0, _uniqid.default)("
|
329
|
-
|
394
|
+
const uniquePath = (0, _uniqid.default)("IMPORTS/");
|
395
|
+
// Read export file and download it in the disk
|
330
396
|
const ZIP_FILE_PATH = _path.default.join(INSTALL_DIR, zipFileName);
|
331
|
-
|
332
397
|
const writeStream = (0, _fs.createWriteStream)(ZIP_FILE_PATH);
|
333
398
|
await streamPipeline(readStream, writeStream);
|
334
|
-
log(`Downloaded file "${zipFileName}" at ${ZIP_FILE_PATH}`);
|
399
|
+
log(`Downloaded file "${zipFileName}" at ${ZIP_FILE_PATH}`);
|
335
400
|
|
401
|
+
// Extract the downloaded zip file
|
336
402
|
const zipFilePaths = await extractZipToDisk(ZIP_FILE_PATH);
|
337
403
|
log(`Removing ZIP file "${zipFileUrl}" from ${ZIP_FILE_PATH}`);
|
338
|
-
await (0, _downloadInstallFiles.deleteFile)(ZIP_FILE_PATH);
|
404
|
+
await (0, _downloadInstallFiles.deleteFile)(ZIP_FILE_PATH);
|
339
405
|
|
406
|
+
// Extract each page/block zip and upload their content's to S3
|
340
407
|
for (let i = 0; i < zipFilePaths.length; i++) {
|
341
408
|
const currentPath = zipFilePaths[i];
|
342
409
|
const dataMap = await extractZipAndUploadToS3(currentPath, uniquePath);
|
343
|
-
|
410
|
+
importDataList.push(dataMap);
|
344
411
|
}
|
345
|
-
|
346
412
|
log("Removing all ZIP files located at ", _path.default.dirname(zipFilePaths[0]));
|
347
413
|
await (0, _downloadInstallFiles.deleteFile)(_path.default.dirname(zipFilePaths[0]));
|
348
|
-
return
|
414
|
+
return importDataList;
|
349
415
|
}
|
350
|
-
|
351
416
|
const ASSETS_DIR_NAME = "/assets";
|
352
|
-
|
353
|
-
function preparePageDataDirMap({
|
417
|
+
function prepareDataDirMap({
|
354
418
|
map,
|
355
419
|
filePath,
|
356
420
|
newKey
|
357
421
|
}) {
|
358
422
|
const dirname = _path.default.dirname(filePath);
|
359
|
-
|
360
423
|
const fileName = _path.default.basename(filePath);
|
361
424
|
/*
|
362
425
|
* We want to use dot (.) as part of object key rather than creating nested object(s).
|
363
426
|
* Also, the file name might contain dots in it beside the extension, so, we are escaping them all.
|
364
427
|
*/
|
365
|
-
|
366
|
-
|
367
428
|
const oldKey = fileName.replace(/\./g, "\\.");
|
368
429
|
const isAsset = dirname.endsWith(ASSETS_DIR_NAME);
|
369
|
-
|
370
430
|
if (isAsset) {
|
371
431
|
map = _dotPropImmutable.default.set(map, `assets.${oldKey}`, newKey);
|
372
432
|
} else {
|
373
433
|
// We only need to know the newKey for data file.
|
374
434
|
map = _dotPropImmutable.default.set(map, `data`, newKey);
|
375
435
|
}
|
376
|
-
|
377
436
|
return map;
|
378
437
|
}
|
379
|
-
|
380
438
|
async function deleteS3Folder(key) {
|
381
439
|
// Append trailing slash i.e "/" to key to make sure we only delete a specific folder.
|
382
440
|
if (!key.endsWith("/")) {
|
383
441
|
key = `${key}/`;
|
384
442
|
}
|
385
|
-
|
386
443
|
const response = await _s3Stream.s3Stream.listObject(key);
|
387
444
|
const keys = (response.Contents || []).map(c => c.Key).filter(Boolean);
|
388
445
|
console.log(`Found ${keys.length} files.`);
|
389
446
|
const deleteFilePromises = keys.map(key => _s3Stream.s3Stream.deleteObject(key));
|
390
447
|
await Promise.all(deleteFilePromises);
|
391
448
|
console.log(`Successfully deleted ${deleteFilePromises.length} files.`);
|
392
|
-
}
|
449
|
+
}
|
393
450
|
|
451
|
+
// export const zeroPad = version => `${version}`.padStart(5, "0");
|
394
452
|
|
395
453
|
function initialStats(total) {
|
396
454
|
return {
|
397
|
-
[_types.
|
398
|
-
[_types.
|
399
|
-
[_types.
|
400
|
-
[_types.
|
455
|
+
[_types.ImportExportTaskStatus.PENDING]: total,
|
456
|
+
[_types.ImportExportTaskStatus.PROCESSING]: 0,
|
457
|
+
[_types.ImportExportTaskStatus.COMPLETED]: 0,
|
458
|
+
[_types.ImportExportTaskStatus.FAILED]: 0,
|
401
459
|
total
|
402
460
|
};
|
403
461
|
}
|
404
|
-
|
405
462
|
function extractZipToDisk(exportFileZipPath) {
|
406
463
|
return new Promise((resolve, reject) => {
|
407
|
-
const
|
464
|
+
const zipFilePaths = [];
|
408
465
|
const uniqueFolderNameForExport = getFileNameWithoutExt(exportFileZipPath);
|
409
|
-
|
410
|
-
|
411
|
-
|
412
|
-
|
466
|
+
const EXPORT_FILE_EXTRACTION_PATH = _path.default.join(INSTALL_DIR, uniqueFolderNameForExport);
|
467
|
+
// Make sure DIR exists
|
413
468
|
(0, _fsExtra.ensureDirSync)(EXPORT_FILE_EXTRACTION_PATH);
|
414
|
-
|
415
469
|
_yauzl.default.open(exportFileZipPath, {
|
416
470
|
lazyEntries: true
|
417
471
|
}, function (err, zipFile) {
|
@@ -420,26 +474,22 @@ function extractZipToDisk(exportFileZipPath) {
|
|
420
474
|
reject(err);
|
421
475
|
return;
|
422
476
|
}
|
423
|
-
|
424
477
|
if (!zipFile) {
|
425
478
|
console.log("ERROR: Missing zip file resource for path: " + exportFileZipPath);
|
426
479
|
reject("Missing Zip File Resource.");
|
427
480
|
return;
|
428
481
|
}
|
429
|
-
|
430
482
|
console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);
|
431
483
|
zipFile.on("end", function (err) {
|
432
484
|
if (err) {
|
433
485
|
console.warn("ERROR: Failed on END event for file: ", exportFileZipPath, err);
|
434
486
|
reject(err);
|
435
487
|
}
|
436
|
-
|
437
|
-
resolve(pageZipFilePaths);
|
488
|
+
resolve(zipFilePaths);
|
438
489
|
});
|
439
490
|
zipFile.readEntry();
|
440
491
|
zipFile.on("entry", function (entry) {
|
441
492
|
console.info(`Processing entry: "${entry.fileName}"`);
|
442
|
-
|
443
493
|
if (/\/$/.test(entry.fileName)) {
|
444
494
|
// Directory file names end with '/'.
|
445
495
|
// Note that entries for directories themselves are optional.
|
@@ -453,17 +503,14 @@ function extractZipToDisk(exportFileZipPath) {
|
|
453
503
|
reject(err);
|
454
504
|
return;
|
455
505
|
}
|
456
|
-
|
457
506
|
if (!readStream) {
|
458
507
|
console.log("ERROR: Missing Read Stream Resource when extracting to disk.");
|
459
508
|
reject("Missing Read Stream Resource.");
|
460
509
|
return;
|
461
510
|
}
|
462
|
-
|
463
511
|
const filePath = _path.default.join(EXPORT_FILE_EXTRACTION_PATH, entry.fileName);
|
464
|
-
|
465
512
|
readStream.on("end", function () {
|
466
|
-
|
513
|
+
zipFilePaths.push(filePath);
|
467
514
|
zipFile.readEntry();
|
468
515
|
});
|
469
516
|
streamPipeline(readStream, (0, _fs.createWriteStream)(filePath)).catch(error => {
|
@@ -475,40 +522,35 @@ function extractZipToDisk(exportFileZipPath) {
|
|
475
522
|
});
|
476
523
|
});
|
477
524
|
}
|
478
|
-
|
479
|
-
function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
|
525
|
+
function extractZipAndUploadToS3(dataZipFilePath, uniquePath) {
|
480
526
|
return new Promise((resolve, reject) => {
|
481
527
|
const filePaths = [];
|
482
528
|
const fileUploadPromises = [];
|
483
|
-
const
|
529
|
+
const uniqueKey = getFileNameWithoutExt(dataZipFilePath);
|
484
530
|
let dataMap = {
|
485
|
-
key:
|
531
|
+
key: uniqueKey,
|
486
532
|
assets: {},
|
487
533
|
data: ""
|
488
534
|
};
|
489
|
-
|
490
|
-
_yauzl.default.open(pageDataZipFilePath, {
|
535
|
+
_yauzl.default.open(dataZipFilePath, {
|
491
536
|
lazyEntries: true
|
492
537
|
}, function (err, zipFile) {
|
493
538
|
if (err) {
|
494
|
-
console.warn("ERROR: Failed to extract zip: ",
|
539
|
+
console.warn("ERROR: Failed to extract zip: ", dataZipFilePath, err);
|
495
540
|
reject(err);
|
496
541
|
return;
|
497
542
|
}
|
498
|
-
|
499
543
|
if (!zipFile) {
|
500
|
-
console.log("ERROR: Probably failed to extract zip: " +
|
544
|
+
console.log("ERROR: Probably failed to extract zip: " + dataZipFilePath);
|
501
545
|
reject("Missing Zip File Resource.");
|
502
546
|
return;
|
503
547
|
}
|
504
|
-
|
505
548
|
console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);
|
506
549
|
zipFile.on("end", function (err) {
|
507
550
|
if (err) {
|
508
|
-
console.warn('ERROR: Failed on "END" for file: ',
|
551
|
+
console.warn('ERROR: Failed on "END" for file: ', dataZipFilePath, err);
|
509
552
|
reject(err);
|
510
553
|
}
|
511
|
-
|
512
554
|
Promise.all(fileUploadPromises).then(res => {
|
513
555
|
res.forEach(r => {
|
514
556
|
console.info("Done uploading... ", r);
|
@@ -519,7 +561,6 @@ function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
|
|
519
561
|
zipFile.readEntry();
|
520
562
|
zipFile.on("entry", function (entry) {
|
521
563
|
console.info(`Processing entry: "${entry.fileName}"`);
|
522
|
-
|
523
564
|
if (/\/$/.test(entry.fileName)) {
|
524
565
|
// Directory file names end with '/'.
|
525
566
|
// Note that entries for directories themselves are optional.
|
@@ -533,30 +574,26 @@ function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
|
|
533
574
|
reject(err);
|
534
575
|
return;
|
535
576
|
}
|
536
|
-
|
537
577
|
if (!readStream) {
|
538
|
-
console.log("ERROR: Missing Read Stream while importing
|
578
|
+
console.log("ERROR: Missing Read Stream while importing.");
|
539
579
|
reject("Missing Read Strea Resource.");
|
540
580
|
return;
|
541
581
|
}
|
542
|
-
|
543
582
|
readStream.on("end", function () {
|
544
583
|
filePaths.push(entry.fileName);
|
545
584
|
zipFile.readEntry();
|
546
585
|
});
|
547
|
-
const newKey = `${uniquePath}/${
|
548
|
-
|
549
|
-
dataMap =
|
586
|
+
const newKey = `${uniquePath}/${uniqueKey}/${entry.fileName}`;
|
587
|
+
// Modify in place
|
588
|
+
dataMap = prepareDataDirMap({
|
550
589
|
map: dataMap,
|
551
590
|
filePath: entry.fileName,
|
552
591
|
newKey
|
553
592
|
});
|
554
|
-
|
555
593
|
const {
|
556
594
|
streamPassThrough,
|
557
595
|
streamPassThroughUploadPromise: promise
|
558
596
|
} = _s3Stream.s3Stream.writeStream(newKey, FILE_CONTENT_TYPE);
|
559
|
-
|
560
597
|
streamPipeline(readStream, streamPassThrough).then(() => {
|
561
598
|
fileUploadPromises.push(promise);
|
562
599
|
}).catch(error => {
|