@webiny/api-page-builder-import-export 0.0.0-mt-3 → 0.0.0-unstable.2af142b57e
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/client.d.ts +8 -0
- package/client.js +43 -0
- package/client.js.map +1 -0
- package/export/combine/blocksHandler.d.ts +6 -0
- package/export/combine/blocksHandler.js +99 -0
- package/export/combine/blocksHandler.js.map +1 -0
- package/export/combine/index.d.ts +16 -0
- package/export/combine/index.js +35 -0
- package/export/combine/index.js.map +1 -0
- package/export/combine/pagesHandler.d.ts +6 -0
- package/export/combine/pagesHandler.js +99 -0
- package/export/combine/pagesHandler.js.map +1 -0
- package/export/combine/templatesHandler.d.ts +6 -0
- package/export/combine/templatesHandler.js +99 -0
- package/export/combine/templatesHandler.js.map +1 -0
- package/export/process/blocksHandler.d.ts +6 -0
- package/export/process/blocksHandler.js +162 -0
- package/export/process/blocksHandler.js.map +1 -0
- package/export/process/index.d.ts +20 -0
- package/export/process/index.js +32 -0
- package/export/process/index.js.map +1 -0
- package/export/process/pagesHandler.d.ts +6 -0
- package/export/process/pagesHandler.js +189 -0
- package/export/process/pagesHandler.js.map +1 -0
- package/export/process/templatesHandler.d.ts +6 -0
- package/export/process/templatesHandler.js +166 -0
- package/export/process/templatesHandler.js.map +1 -0
- package/{exportPages → export}/s3Stream.d.ts +2 -0
- package/{exportPages → export}/s3Stream.js +12 -24
- package/export/s3Stream.js.map +1 -0
- package/export/utils.d.ts +22 -0
- package/export/utils.js +160 -0
- package/export/utils.js.map +1 -0
- package/{exportPages → export}/zipper.d.ts +7 -5
- package/{exportPages → export}/zipper.js +39 -52
- package/export/zipper.js.map +1 -0
- package/graphql/crud/blocks.crud.d.ts +4 -0
- package/graphql/crud/blocks.crud.js +137 -0
- package/graphql/crud/blocks.crud.js.map +1 -0
- package/graphql/crud/importExportTasks.crud.d.ts +5 -0
- package/graphql/crud/{pageImportExportTasks.crud.js → importExportTasks.crud.js} +87 -118
- package/graphql/crud/importExportTasks.crud.js.map +1 -0
- package/graphql/crud/pages.crud.d.ts +3 -3
- package/graphql/crud/pages.crud.js +64 -70
- package/graphql/crud/pages.crud.js.map +1 -0
- package/graphql/crud/templates.crud.d.ts +4 -0
- package/graphql/crud/templates.crud.js +124 -0
- package/graphql/crud/templates.crud.js.map +1 -0
- package/graphql/crud.d.ts +2 -2
- package/graphql/crud.js +5 -8
- package/graphql/crud.js.map +1 -0
- package/graphql/graphql/blocks.gql.d.ts +4 -0
- package/graphql/graphql/blocks.gql.js +52 -0
- package/graphql/graphql/blocks.gql.js.map +1 -0
- package/graphql/graphql/importExportTasks.gql.d.ts +4 -0
- package/graphql/graphql/{pageImportExportTasks.gql.js → importExportTasks.gql.js} +19 -24
- package/graphql/graphql/importExportTasks.gql.js.map +1 -0
- package/graphql/graphql/pages.gql.d.ts +2 -2
- package/graphql/graphql/pages.gql.js +5 -14
- package/graphql/graphql/pages.gql.js.map +1 -0
- package/graphql/graphql/templates.gql.d.ts +4 -0
- package/graphql/graphql/templates.gql.js +52 -0
- package/graphql/graphql/templates.gql.js.map +1 -0
- package/graphql/graphql/utils/resolve.d.ts +1 -1
- package/graphql/graphql/utils/resolve.js +0 -3
- package/graphql/graphql/utils/resolve.js.map +1 -0
- package/graphql/graphql.d.ts +1 -1
- package/graphql/graphql.js +5 -7
- package/graphql/graphql.js.map +1 -0
- package/graphql/index.d.ts +2 -2
- package/graphql/index.js +1 -6
- package/graphql/index.js.map +1 -0
- package/graphql/types.d.ts +63 -27
- package/graphql/types.js.map +1 -0
- package/import/create/blocksHandler.d.ts +3 -0
- package/import/create/blocksHandler.js +100 -0
- package/import/create/blocksHandler.js.map +1 -0
- package/import/create/index.d.ts +24 -0
- package/import/create/index.js +35 -0
- package/import/create/index.js.map +1 -0
- package/import/create/pagesHandler.d.ts +3 -0
- package/import/create/pagesHandler.js +102 -0
- package/import/create/pagesHandler.js.map +1 -0
- package/import/create/templatesHandler.d.ts +3 -0
- package/import/create/templatesHandler.js +98 -0
- package/import/create/templatesHandler.js.map +1 -0
- package/import/process/blocksHandler.d.ts +3 -0
- package/import/process/blocksHandler.js +169 -0
- package/import/process/blocksHandler.js.map +1 -0
- package/import/process/index.d.ts +20 -0
- package/import/process/index.js +32 -0
- package/import/process/index.js.map +1 -0
- package/import/process/pagesHandler.d.ts +3 -0
- package/import/process/pagesHandler.js +177 -0
- package/import/process/pagesHandler.js.map +1 -0
- package/import/process/templatesHandler.d.ts +3 -0
- package/import/process/templatesHandler.js +169 -0
- package/import/process/templatesHandler.js.map +1 -0
- package/import/utils.d.ts +56 -0
- package/{importPages → import}/utils.js +262 -194
- package/import/utils.js.map +1 -0
- package/mockSecurity.js +0 -2
- package/mockSecurity.js.map +1 -0
- package/package.json +36 -34
- package/types.d.ts +70 -72
- package/types.js +17 -22
- package/types.js.map +1 -0
- package/exportPages/combine/index.d.ts +0 -19
- package/exportPages/combine/index.js +0 -88
- package/exportPages/process/index.d.ts +0 -26
- package/exportPages/process/index.js +0 -204
- package/exportPages/utils.d.ts +0 -13
- package/exportPages/utils.js +0 -113
- package/graphql/crud/pageImportExportTasks.crud.d.ts +0 -5
- package/graphql/graphql/pageImportExportTasks.gql.d.ts +0 -4
- package/importPages/client.d.ts +0 -7
- package/importPages/client.js +0 -40
- package/importPages/create/index.d.ts +0 -27
- package/importPages/create/index.js +0 -109
- package/importPages/process/index.d.ts +0 -25
- package/importPages/process/index.js +0 -183
- package/importPages/utils.d.ts +0 -43
@@ -1,73 +1,64 @@
|
|
1
1
|
"use strict";
|
2
2
|
|
3
|
-
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
4
|
-
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
5
4
|
Object.defineProperty(exports, "__esModule", {
|
6
5
|
value: true
|
7
6
|
});
|
7
|
+
exports.importBlock = importBlock;
|
8
8
|
exports.importPage = importPage;
|
9
|
+
exports.importTemplate = importTemplate;
|
9
10
|
exports.initialStats = initialStats;
|
10
11
|
exports.readExtractAndUploadZipFileContents = readExtractAndUploadZipFileContents;
|
11
|
-
exports.
|
12
|
-
|
12
|
+
exports.uploadAssets = void 0;
|
13
13
|
var _uniqid = _interopRequireDefault(require("uniqid"));
|
14
|
-
|
15
14
|
var _dotPropImmutable = _interopRequireDefault(require("dot-prop-immutable"));
|
16
|
-
|
17
15
|
var _fs = require("fs");
|
18
|
-
|
19
16
|
var _fsExtra = require("fs-extra");
|
20
|
-
|
21
17
|
var _util = require("util");
|
22
|
-
|
23
18
|
var _stream = require("stream");
|
24
|
-
|
25
19
|
var _nodeFetch = _interopRequireDefault(require("node-fetch"));
|
26
|
-
|
27
20
|
var _path = _interopRequireDefault(require("path"));
|
28
|
-
|
29
21
|
var _yauzl = _interopRequireDefault(require("yauzl"));
|
30
|
-
|
31
22
|
var _chunk = _interopRequireDefault(require("lodash/chunk"));
|
32
|
-
|
33
23
|
var _loadJsonFile = _interopRequireDefault(require("load-json-file"));
|
34
|
-
|
35
24
|
var _error = _interopRequireDefault(require("@webiny/error"));
|
36
|
-
|
37
25
|
var _downloadInstallFiles = require("@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles");
|
38
|
-
|
39
26
|
var _types = require("../types");
|
40
|
-
|
41
|
-
var _s3Stream = require("../exportPages/s3Stream");
|
42
|
-
|
27
|
+
var _s3Stream = require("../export/s3Stream");
|
43
28
|
const streamPipeline = (0, _util.promisify)(_stream.pipeline);
|
44
29
|
const INSTALL_DIR = "/tmp";
|
45
|
-
|
46
|
-
const INSTALL_EXTRACT_DIR = _path.default.join(INSTALL_DIR, "apiPageBuilderImportPage");
|
47
|
-
|
30
|
+
const INSTALL_EXTRACT_DIR = _path.default.join(INSTALL_DIR, "apiPageBuilderImport");
|
48
31
|
const FILES_COUNT_IN_EACH_BATCH = 15;
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
}) {
|
32
|
+
function updateImageInPageSettings(params) {
|
33
|
+
const {
|
34
|
+
settings,
|
35
|
+
fileIdToKeyMap,
|
36
|
+
srcPrefix
|
37
|
+
} = params;
|
56
38
|
let newSettings = settings;
|
57
39
|
const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith("/") ? srcPrefix.slice(0, -1) : srcPrefix;
|
58
|
-
|
59
40
|
if (_dotPropImmutable.default.get(newSettings, "general.image.src")) {
|
60
|
-
|
41
|
+
var _settings$general, _settings$general$ima;
|
42
|
+
newSettings = _dotPropImmutable.default.set(newSettings, "general.image.src", `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(((_settings$general = settings.general) === null || _settings$general === void 0 ? void 0 : (_settings$general$ima = _settings$general.image) === null || _settings$general$ima === void 0 ? void 0 : _settings$general$ima.id) || "")}`);
|
61
43
|
}
|
62
|
-
|
63
44
|
if (_dotPropImmutable.default.get(newSettings, "social.image.src")) {
|
64
|
-
|
45
|
+
var _settings$social, _settings$social$imag;
|
46
|
+
newSettings = _dotPropImmutable.default.set(newSettings, "social.image.src", `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(((_settings$social = settings.social) === null || _settings$social === void 0 ? void 0 : (_settings$social$imag = _settings$social.image) === null || _settings$social$imag === void 0 ? void 0 : _settings$social$imag.id) || "")}`);
|
65
47
|
}
|
66
|
-
|
67
|
-
return settings;
|
48
|
+
return newSettings;
|
68
49
|
}
|
69
|
-
|
70
|
-
|
50
|
+
function updateBlockPreviewImage(params) {
|
51
|
+
const {
|
52
|
+
file,
|
53
|
+
fileIdToKeyMap,
|
54
|
+
srcPrefix
|
55
|
+
} = params;
|
56
|
+
const newFile = file;
|
57
|
+
const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith("/") ? srcPrefix.slice(0, -1) : srcPrefix;
|
58
|
+
newFile.src = `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(file.id || "")}`;
|
59
|
+
return newFile;
|
60
|
+
}
|
61
|
+
function updateFilesInData({
|
71
62
|
data,
|
72
63
|
fileIdToKeyMap,
|
73
64
|
srcPrefix
|
@@ -75,34 +66,29 @@ function updateFilesInPageData({
|
|
75
66
|
// BASE CASE: Termination point
|
76
67
|
if (!data || typeof data !== "object") {
|
77
68
|
return;
|
78
|
-
}
|
79
|
-
|
80
|
-
|
69
|
+
}
|
70
|
+
// Recursively call function if data is array
|
81
71
|
if (Array.isArray(data)) {
|
82
72
|
for (let i = 0; i < data.length; i++) {
|
83
73
|
const element = data[i];
|
84
|
-
|
74
|
+
updateFilesInData({
|
85
75
|
data: element,
|
86
76
|
fileIdToKeyMap,
|
87
77
|
srcPrefix
|
88
78
|
});
|
89
79
|
}
|
90
|
-
|
91
80
|
return;
|
92
|
-
}
|
93
|
-
|
94
|
-
|
81
|
+
}
|
82
|
+
// Main logic
|
95
83
|
const tuple = Object.entries(data);
|
96
|
-
|
97
84
|
for (let i = 0; i < tuple.length; i++) {
|
98
85
|
const [key, value] = tuple[i];
|
99
|
-
|
100
86
|
if (key === "file" && value && fileIdToKeyMap.has(value.id)) {
|
101
87
|
value.key = fileIdToKeyMap.get(value.id);
|
102
88
|
value.name = fileIdToKeyMap.get(value.id);
|
103
89
|
value.src = `${srcPrefix}${srcPrefix.endsWith("/") ? "" : "/"}${fileIdToKeyMap.get(value.id)}`;
|
104
90
|
} else {
|
105
|
-
|
91
|
+
updateFilesInData({
|
106
92
|
data: value,
|
107
93
|
srcPrefix,
|
108
94
|
fileIdToKeyMap
|
@@ -110,43 +96,49 @@ function updateFilesInPageData({
|
|
110
96
|
}
|
111
97
|
}
|
112
98
|
}
|
113
|
-
|
114
|
-
const
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
}
|
99
|
+
const uploadAssets = async params => {
|
100
|
+
const {
|
101
|
+
context,
|
102
|
+
filesData,
|
103
|
+
fileUploadsData
|
104
|
+
} = params;
|
105
|
+
// Save uploaded file key against static id for later use.
|
106
|
+
const fileIdToKeyMap = new Map();
|
119
107
|
/**
|
120
108
|
* This function contains logic of file download from S3.
|
121
109
|
* Current we're not mocking zip file download from S3 in tests at the moment.
|
122
110
|
* So, we're manually mocking it in case of test just by returning an empty object.
|
123
111
|
*/
|
124
112
|
if (process.env.NODE_ENV === "test") {
|
125
|
-
return {
|
113
|
+
return {
|
114
|
+
fileIdToKeyMap
|
115
|
+
};
|
126
116
|
}
|
127
117
|
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
const fileKeyToFileMap = new Map(); // Initialize maps.
|
133
|
-
|
118
|
+
// Save files meta data against old key for later use.
|
119
|
+
const fileKeyToFileMap = new Map();
|
120
|
+
// Initialize maps.
|
134
121
|
for (let i = 0; i < filesData.length; i++) {
|
135
122
|
const file = filesData[i];
|
136
|
-
fileKeyToFileMap.set(file.key, file);
|
123
|
+
fileKeyToFileMap.set(file.key, file);
|
137
124
|
|
125
|
+
// Initialize the value
|
138
126
|
fileIdToKeyMap.set(file.id, file.type);
|
139
127
|
}
|
140
|
-
|
141
128
|
const fileUploadResults = await uploadFilesFromS3({
|
142
129
|
fileKeyToFileMap,
|
143
130
|
oldKeyToNewKeyMap: fileUploadsData.assets
|
144
|
-
});
|
131
|
+
});
|
145
132
|
|
133
|
+
// Create files in File Manager
|
146
134
|
const createFilesInput = fileUploadResults.map(uploadResult => {
|
147
135
|
const newKey = uploadResult.Key;
|
148
|
-
const file = fileKeyToFileMap.get(getOldFileKey(newKey));
|
136
|
+
const file = fileKeyToFileMap.get(getOldFileKey(newKey));
|
137
|
+
if (!file) {
|
138
|
+
return null;
|
139
|
+
}
|
149
140
|
|
141
|
+
// Update the file map with newly uploaded file.
|
150
142
|
fileIdToKeyMap.set(file.id, newKey);
|
151
143
|
return {
|
152
144
|
key: newKey,
|
@@ -156,11 +148,10 @@ const uploadPageAssets = async ({
|
|
156
148
|
meta: file.meta,
|
157
149
|
tags: file.tags
|
158
150
|
};
|
159
|
-
});
|
160
|
-
const createFilesPromises = [];
|
161
|
-
|
151
|
+
}).filter(Boolean);
|
152
|
+
const createFilesPromises = [];
|
153
|
+
// Gives an array of chunks (each consists of FILES_COUNT_IN_EACH_BATCH items).
|
162
154
|
const createFilesInputChunks = (0, _chunk.default)(createFilesInput, FILES_COUNT_IN_EACH_BATCH);
|
163
|
-
|
164
155
|
for (let i = 0; i < createFilesInputChunks.length; i++) {
|
165
156
|
const createFilesInputChunk = createFilesInputChunks[i];
|
166
157
|
createFilesPromises.push(
|
@@ -170,114 +161,207 @@ const uploadPageAssets = async ({
|
|
170
161
|
*/
|
171
162
|
context.fileManager.files.createFilesInBatch(createFilesInputChunk));
|
172
163
|
}
|
173
|
-
|
174
164
|
await Promise.all(createFilesPromises);
|
175
165
|
return {
|
176
166
|
fileIdToKeyMap
|
177
167
|
};
|
178
168
|
};
|
179
|
-
|
180
|
-
exports.uploadPageAssets = uploadPageAssets;
|
181
|
-
|
169
|
+
exports.uploadAssets = uploadAssets;
|
182
170
|
async function importPage({
|
183
171
|
pageKey,
|
184
172
|
context,
|
185
173
|
fileUploadsData
|
186
174
|
}) {
|
187
|
-
const log = console.log;
|
175
|
+
const log = console.log;
|
188
176
|
|
177
|
+
// Making Directory for page in which we're going to extract the page data file.
|
189
178
|
const PAGE_EXTRACT_DIR = _path.default.join(INSTALL_EXTRACT_DIR, pageKey);
|
190
|
-
|
191
179
|
(0, _fsExtra.ensureDirSync)(PAGE_EXTRACT_DIR);
|
192
|
-
|
193
180
|
const pageDataFileKey = _dotPropImmutable.default.get(fileUploadsData, `data`);
|
194
|
-
|
195
181
|
const PAGE_DATA_FILE_PATH = _path.default.join(PAGE_EXTRACT_DIR, _path.default.basename(pageDataFileKey));
|
196
|
-
|
197
|
-
|
198
|
-
|
182
|
+
log(`Downloading Page data file: ${pageDataFileKey} at "${PAGE_DATA_FILE_PATH}"`);
|
183
|
+
// Download and save page data file in disk.
|
199
184
|
await new Promise((resolve, reject) => {
|
200
185
|
_s3Stream.s3Stream.readStream(pageDataFileKey).on("error", reject).pipe((0, _fs.createWriteStream)(PAGE_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
|
201
|
-
});
|
186
|
+
});
|
202
187
|
|
188
|
+
// Load the page data file from disk.
|
203
189
|
log(`Load file ${pageDataFileKey}`);
|
204
190
|
const {
|
205
191
|
page,
|
206
192
|
files
|
207
|
-
} = await (0, _loadJsonFile.default)(PAGE_DATA_FILE_PATH);
|
193
|
+
} = await (0, _loadJsonFile.default)(PAGE_DATA_FILE_PATH);
|
208
194
|
|
209
|
-
if
|
195
|
+
// Only update page data if there are files.
|
196
|
+
if (files && Array.isArray(files) && files.length > 0) {
|
210
197
|
// Upload page assets.
|
211
198
|
const {
|
212
199
|
fileIdToKeyMap
|
213
|
-
} = await
|
200
|
+
} = await uploadAssets({
|
214
201
|
context,
|
202
|
+
/**
|
203
|
+
* TODO @ts-refactor @ashutosh figure out correct types.
|
204
|
+
*/
|
205
|
+
// @ts-ignore
|
215
206
|
filesData: files,
|
216
207
|
fileUploadsData
|
217
208
|
});
|
209
|
+
const settings = await context.fileManager.settings.getSettings();
|
218
210
|
const {
|
219
|
-
srcPrefix
|
220
|
-
} =
|
221
|
-
|
222
|
-
data: page.content,
|
211
|
+
srcPrefix = ""
|
212
|
+
} = settings || {};
|
213
|
+
updateFilesInData({
|
214
|
+
data: page.content || {},
|
223
215
|
fileIdToKeyMap,
|
224
216
|
srcPrefix
|
225
217
|
});
|
226
218
|
page.settings = updateImageInPageSettings({
|
227
|
-
settings: page.settings,
|
219
|
+
settings: page.settings || {},
|
228
220
|
fileIdToKeyMap,
|
229
221
|
srcPrefix
|
230
222
|
});
|
231
223
|
}
|
232
|
-
|
233
224
|
log("Removing Directory for page...");
|
234
225
|
await (0, _downloadInstallFiles.deleteFile)(pageKey);
|
235
226
|
log(`Remove page contents from S3...`);
|
236
227
|
await deleteS3Folder(_path.default.dirname(fileUploadsData.data));
|
237
228
|
return page;
|
238
229
|
}
|
230
|
+
async function importBlock({
|
231
|
+
blockKey,
|
232
|
+
context,
|
233
|
+
fileUploadsData
|
234
|
+
}) {
|
235
|
+
const log = console.log;
|
236
|
+
|
237
|
+
// Making Directory for block in which we're going to extract the block data file.
|
238
|
+
const BLOCK_EXTRACT_DIR = _path.default.join(INSTALL_EXTRACT_DIR, blockKey);
|
239
|
+
(0, _fsExtra.ensureDirSync)(BLOCK_EXTRACT_DIR);
|
240
|
+
const blockDataFileKey = _dotPropImmutable.default.get(fileUploadsData, `data`);
|
241
|
+
const BLOCK_DATA_FILE_PATH = _path.default.join(BLOCK_EXTRACT_DIR, _path.default.basename(blockDataFileKey));
|
242
|
+
log(`Downloading Block data file: ${blockDataFileKey} at "${BLOCK_DATA_FILE_PATH}"`);
|
243
|
+
// Download and save block data file in disk.
|
244
|
+
await new Promise((resolve, reject) => {
|
245
|
+
_s3Stream.s3Stream.readStream(blockDataFileKey).on("error", reject).pipe((0, _fs.createWriteStream)(BLOCK_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
|
246
|
+
});
|
247
|
+
|
248
|
+
// Load the block data file from disk.
|
249
|
+
log(`Load file ${blockDataFileKey}`);
|
250
|
+
const {
|
251
|
+
block,
|
252
|
+
files
|
253
|
+
} = await (0, _loadJsonFile.default)(BLOCK_DATA_FILE_PATH);
|
254
|
+
|
255
|
+
// Only update block data if there are files.
|
256
|
+
if (files && Array.isArray(files) && files.length > 0) {
|
257
|
+
// Upload block assets.
|
258
|
+
const {
|
259
|
+
fileIdToKeyMap
|
260
|
+
} = await uploadAssets({
|
261
|
+
context,
|
262
|
+
filesData: files,
|
263
|
+
fileUploadsData
|
264
|
+
});
|
265
|
+
const settings = await context.fileManager.settings.getSettings();
|
266
|
+
const {
|
267
|
+
srcPrefix = ""
|
268
|
+
} = settings || {};
|
269
|
+
updateFilesInData({
|
270
|
+
data: block.content || {},
|
271
|
+
fileIdToKeyMap,
|
272
|
+
srcPrefix
|
273
|
+
});
|
274
|
+
block.preview = updateBlockPreviewImage({
|
275
|
+
file: block.preview || {},
|
276
|
+
fileIdToKeyMap,
|
277
|
+
srcPrefix
|
278
|
+
});
|
279
|
+
}
|
280
|
+
log("Removing Directory for block...");
|
281
|
+
await (0, _downloadInstallFiles.deleteFile)(blockKey);
|
282
|
+
log(`Remove block contents from S3...`);
|
283
|
+
await deleteS3Folder(_path.default.dirname(fileUploadsData.data));
|
284
|
+
return block;
|
285
|
+
}
|
286
|
+
async function importTemplate({
|
287
|
+
templateKey,
|
288
|
+
context,
|
289
|
+
fileUploadsData
|
290
|
+
}) {
|
291
|
+
const log = console.log;
|
239
292
|
|
293
|
+
// Making Directory for template in which we're going to extract the template data file.
|
294
|
+
const TEMPLATE_EXTRACT_DIR = _path.default.join(INSTALL_EXTRACT_DIR, templateKey);
|
295
|
+
(0, _fsExtra.ensureDirSync)(TEMPLATE_EXTRACT_DIR);
|
296
|
+
const templateDataFileKey = _dotPropImmutable.default.get(fileUploadsData, `data`);
|
297
|
+
const TEMPLATE_DATA_FILE_PATH = _path.default.join(TEMPLATE_EXTRACT_DIR, _path.default.basename(templateDataFileKey));
|
298
|
+
log(`Downloading Template data file: ${templateDataFileKey} at "${TEMPLATE_DATA_FILE_PATH}"`);
|
299
|
+
// Download and save template data file in disk.
|
300
|
+
await new Promise((resolve, reject) => {
|
301
|
+
_s3Stream.s3Stream.readStream(templateDataFileKey).on("error", reject).pipe((0, _fs.createWriteStream)(TEMPLATE_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
|
302
|
+
});
|
303
|
+
|
304
|
+
// Load the template data file from disk.
|
305
|
+
log(`Load file ${templateDataFileKey}`);
|
306
|
+
const {
|
307
|
+
template,
|
308
|
+
files
|
309
|
+
} = await (0, _loadJsonFile.default)(TEMPLATE_DATA_FILE_PATH);
|
310
|
+
|
311
|
+
// Only update template data if there are files.
|
312
|
+
if (files && Array.isArray(files) && files.length > 0) {
|
313
|
+
// Upload template assets.
|
314
|
+
const {
|
315
|
+
fileIdToKeyMap
|
316
|
+
} = await uploadAssets({
|
317
|
+
context,
|
318
|
+
filesData: files,
|
319
|
+
fileUploadsData
|
320
|
+
});
|
321
|
+
const settings = await context.fileManager.settings.getSettings();
|
322
|
+
const {
|
323
|
+
srcPrefix = ""
|
324
|
+
} = settings || {};
|
325
|
+
updateFilesInData({
|
326
|
+
data: template.content || {},
|
327
|
+
fileIdToKeyMap,
|
328
|
+
srcPrefix
|
329
|
+
});
|
330
|
+
}
|
331
|
+
log("Removing Directory for template...");
|
332
|
+
await (0, _downloadInstallFiles.deleteFile)(templateKey);
|
333
|
+
log(`Remove template contents from S3...`);
|
334
|
+
await deleteS3Folder(_path.default.dirname(fileUploadsData.data));
|
335
|
+
return template;
|
336
|
+
}
|
240
337
|
async function uploadFilesFromS3({
|
241
338
|
fileKeyToFileMap,
|
242
339
|
oldKeyToNewKeyMap
|
243
340
|
}) {
|
244
341
|
const oldKeysForAssets = Object.keys(oldKeyToNewKeyMap);
|
245
|
-
const promises = [];
|
246
|
-
|
342
|
+
const promises = [];
|
343
|
+
// Upload all assets.
|
247
344
|
for (let i = 0; i < oldKeysForAssets.length; i++) {
|
248
345
|
const oldKey = oldKeysForAssets[i];
|
249
|
-
const tempNewKey = oldKeyToNewKeyMap[oldKey];
|
250
|
-
|
251
|
-
const readStream = _s3Stream.s3Stream.readStream(tempNewKey); // Get file meta data.
|
252
|
-
|
346
|
+
const tempNewKey = oldKeyToNewKeyMap[oldKey];
|
253
347
|
|
348
|
+
// Read file.
|
349
|
+
const readStream = _s3Stream.s3Stream.readStream(tempNewKey);
|
350
|
+
// Get file meta data.
|
254
351
|
const fileMetaData = fileKeyToFileMap.get(oldKey);
|
255
|
-
|
256
352
|
if (fileMetaData) {
|
257
353
|
const newKey = (0, _uniqid.default)("", `-${fileMetaData.key}`);
|
258
|
-
|
259
354
|
const {
|
260
355
|
streamPassThrough,
|
261
356
|
streamPassThroughUploadPromise: promise
|
262
357
|
} = _s3Stream.s3Stream.writeStream(newKey, fileMetaData.type);
|
263
|
-
|
264
358
|
readStream.pipe(streamPassThrough);
|
265
359
|
promises.push(promise);
|
266
360
|
console.log(`Successfully queued file "${newKey}"`);
|
267
361
|
}
|
268
362
|
}
|
269
|
-
|
270
363
|
return Promise.all(promises);
|
271
364
|
}
|
272
|
-
|
273
|
-
async function getObjectMetaFromS3(Key) {
|
274
|
-
const meta = await _s3Stream.s3Stream.getObjectHead(Key);
|
275
|
-
|
276
|
-
if (meta.ContentType !== ZIP_CONTENT_TYPE) {
|
277
|
-
throw new _error.default(`Unsupported file type: "${meta.ContentType}"`, "UNSUPPORTED_FILE");
|
278
|
-
}
|
279
|
-
}
|
280
|
-
|
281
365
|
function getOldFileKey(key) {
|
282
366
|
/*
|
283
367
|
* Because we know the naming convention, we can extract the old key from new key.
|
@@ -289,151 +373,123 @@ function getOldFileKey(key) {
|
|
289
373
|
return key;
|
290
374
|
}
|
291
375
|
}
|
292
|
-
|
293
376
|
const FILE_CONTENT_TYPE = "application/octet-stream";
|
294
|
-
|
295
377
|
function getFileNameWithoutExt(fileName) {
|
296
378
|
return _path.default.basename(fileName).replace(_path.default.extname(fileName), "");
|
297
379
|
}
|
298
|
-
|
299
380
|
/**
|
300
381
|
* Function will read the given zip file from S3 via stream, extract its content and upload it to S3 bucket.
|
301
|
-
* @param
|
302
|
-
* @return
|
382
|
+
* @param zipFileUrl
|
383
|
+
* @return ImportData S3 file keys for all uploaded assets group by page/block.
|
303
384
|
*/
|
304
|
-
async function readExtractAndUploadZipFileContents(
|
385
|
+
async function readExtractAndUploadZipFileContents(zipFileUrl) {
|
305
386
|
const log = console.log;
|
306
|
-
const
|
307
|
-
|
308
|
-
|
309
|
-
if (
|
310
|
-
|
311
|
-
|
312
|
-
if (!response.ok) {
|
313
|
-
throw new _error.default(`Unable to downloading file: "${zipFileKey}"`, response.statusText);
|
314
|
-
}
|
315
|
-
|
316
|
-
readStream = response.body;
|
317
|
-
} else {
|
318
|
-
// We're first retrieving object's meta data, just to check whether the file is available at the given Key
|
319
|
-
await getObjectMetaFromS3(zipFileKey);
|
320
|
-
readStream = _s3Stream.s3Stream.readStream(zipFileKey);
|
387
|
+
const importDataList = [];
|
388
|
+
const zipFileName = _path.default.basename(zipFileUrl).split("?")[0];
|
389
|
+
const response = await (0, _nodeFetch.default)(zipFileUrl);
|
390
|
+
if (!response.ok) {
|
391
|
+
throw new _error.default(`Unable to downloading file: "${zipFileUrl}"`, response.statusText);
|
321
392
|
}
|
322
|
-
|
323
|
-
const uniquePath = (0, _uniqid.default)("
|
324
|
-
|
325
|
-
const zipFileName = _path.default.basename(zipFileKey); // Read export file and download it in the disk
|
326
|
-
|
327
|
-
|
393
|
+
const readStream = response.body;
|
394
|
+
const uniquePath = (0, _uniqid.default)("IMPORTS/");
|
395
|
+
// Read export file and download it in the disk
|
328
396
|
const ZIP_FILE_PATH = _path.default.join(INSTALL_DIR, zipFileName);
|
329
|
-
|
330
397
|
const writeStream = (0, _fs.createWriteStream)(ZIP_FILE_PATH);
|
331
398
|
await streamPipeline(readStream, writeStream);
|
332
|
-
log(`Downloaded file "${zipFileName}" at ${ZIP_FILE_PATH}`);
|
399
|
+
log(`Downloaded file "${zipFileName}" at ${ZIP_FILE_PATH}`);
|
333
400
|
|
401
|
+
// Extract the downloaded zip file
|
334
402
|
const zipFilePaths = await extractZipToDisk(ZIP_FILE_PATH);
|
335
|
-
log(`Removing ZIP file "${
|
336
|
-
await (0, _downloadInstallFiles.deleteFile)(ZIP_FILE_PATH);
|
403
|
+
log(`Removing ZIP file "${zipFileUrl}" from ${ZIP_FILE_PATH}`);
|
404
|
+
await (0, _downloadInstallFiles.deleteFile)(ZIP_FILE_PATH);
|
337
405
|
|
406
|
+
// Extract each page/block zip and upload their content's to S3
|
338
407
|
for (let i = 0; i < zipFilePaths.length; i++) {
|
339
408
|
const currentPath = zipFilePaths[i];
|
340
409
|
const dataMap = await extractZipAndUploadToS3(currentPath, uniquePath);
|
341
|
-
|
410
|
+
importDataList.push(dataMap);
|
342
411
|
}
|
343
|
-
|
344
412
|
log("Removing all ZIP files located at ", _path.default.dirname(zipFilePaths[0]));
|
345
413
|
await (0, _downloadInstallFiles.deleteFile)(_path.default.dirname(zipFilePaths[0]));
|
346
|
-
return
|
414
|
+
return importDataList;
|
347
415
|
}
|
348
|
-
|
349
416
|
const ASSETS_DIR_NAME = "/assets";
|
350
|
-
|
351
|
-
function preparePageDataDirMap({
|
417
|
+
function prepareDataDirMap({
|
352
418
|
map,
|
353
419
|
filePath,
|
354
420
|
newKey
|
355
421
|
}) {
|
356
422
|
const dirname = _path.default.dirname(filePath);
|
357
|
-
|
358
423
|
const fileName = _path.default.basename(filePath);
|
359
424
|
/*
|
360
425
|
* We want to use dot (.) as part of object key rather than creating nested object(s).
|
361
426
|
* Also, the file name might contain dots in it beside the extension, so, we are escaping them all.
|
362
427
|
*/
|
363
|
-
|
364
|
-
|
365
428
|
const oldKey = fileName.replace(/\./g, "\\.");
|
366
429
|
const isAsset = dirname.endsWith(ASSETS_DIR_NAME);
|
367
|
-
|
368
430
|
if (isAsset) {
|
369
431
|
map = _dotPropImmutable.default.set(map, `assets.${oldKey}`, newKey);
|
370
432
|
} else {
|
371
433
|
// We only need to know the newKey for data file.
|
372
434
|
map = _dotPropImmutable.default.set(map, `data`, newKey);
|
373
435
|
}
|
374
|
-
|
375
436
|
return map;
|
376
437
|
}
|
377
|
-
|
378
438
|
async function deleteS3Folder(key) {
|
379
439
|
// Append trailing slash i.e "/" to key to make sure we only delete a specific folder.
|
380
440
|
if (!key.endsWith("/")) {
|
381
441
|
key = `${key}/`;
|
382
442
|
}
|
383
|
-
|
384
443
|
const response = await _s3Stream.s3Stream.listObject(key);
|
385
|
-
const keys = response.Contents.map(c => c.Key);
|
444
|
+
const keys = (response.Contents || []).map(c => c.Key).filter(Boolean);
|
386
445
|
console.log(`Found ${keys.length} files.`);
|
387
446
|
const deleteFilePromises = keys.map(key => _s3Stream.s3Stream.deleteObject(key));
|
388
447
|
await Promise.all(deleteFilePromises);
|
389
448
|
console.log(`Successfully deleted ${deleteFilePromises.length} files.`);
|
390
449
|
}
|
391
450
|
|
392
|
-
const zeroPad = version => `${version}`.padStart(5, "0");
|
393
|
-
|
394
|
-
exports.zeroPad = zeroPad;
|
451
|
+
// export const zeroPad = version => `${version}`.padStart(5, "0");
|
395
452
|
|
396
453
|
function initialStats(total) {
|
397
454
|
return {
|
398
|
-
[_types.
|
399
|
-
[_types.
|
400
|
-
[_types.
|
401
|
-
[_types.
|
455
|
+
[_types.ImportExportTaskStatus.PENDING]: total,
|
456
|
+
[_types.ImportExportTaskStatus.PROCESSING]: 0,
|
457
|
+
[_types.ImportExportTaskStatus.COMPLETED]: 0,
|
458
|
+
[_types.ImportExportTaskStatus.FAILED]: 0,
|
402
459
|
total
|
403
460
|
};
|
404
461
|
}
|
405
|
-
|
406
462
|
function extractZipToDisk(exportFileZipPath) {
|
407
463
|
return new Promise((resolve, reject) => {
|
408
|
-
const
|
464
|
+
const zipFilePaths = [];
|
409
465
|
const uniqueFolderNameForExport = getFileNameWithoutExt(exportFileZipPath);
|
410
|
-
|
411
|
-
|
412
|
-
|
413
|
-
|
466
|
+
const EXPORT_FILE_EXTRACTION_PATH = _path.default.join(INSTALL_DIR, uniqueFolderNameForExport);
|
467
|
+
// Make sure DIR exists
|
414
468
|
(0, _fsExtra.ensureDirSync)(EXPORT_FILE_EXTRACTION_PATH);
|
415
|
-
|
416
469
|
_yauzl.default.open(exportFileZipPath, {
|
417
470
|
lazyEntries: true
|
418
471
|
}, function (err, zipFile) {
|
419
472
|
if (err) {
|
420
473
|
console.warn("ERROR: Failed to extract zip: ", exportFileZipPath, err);
|
421
474
|
reject(err);
|
475
|
+
return;
|
476
|
+
}
|
477
|
+
if (!zipFile) {
|
478
|
+
console.log("ERROR: Missing zip file resource for path: " + exportFileZipPath);
|
479
|
+
reject("Missing Zip File Resource.");
|
480
|
+
return;
|
422
481
|
}
|
423
|
-
|
424
482
|
console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);
|
425
483
|
zipFile.on("end", function (err) {
|
426
484
|
if (err) {
|
427
485
|
console.warn("ERROR: Failed on END event for file: ", exportFileZipPath, err);
|
428
486
|
reject(err);
|
429
487
|
}
|
430
|
-
|
431
|
-
resolve(pageZipFilePaths);
|
488
|
+
resolve(zipFilePaths);
|
432
489
|
});
|
433
490
|
zipFile.readEntry();
|
434
491
|
zipFile.on("entry", function (entry) {
|
435
492
|
console.info(`Processing entry: "${entry.fileName}"`);
|
436
|
-
|
437
493
|
if (/\/$/.test(entry.fileName)) {
|
438
494
|
// Directory file names end with '/'.
|
439
495
|
// Note that entries for directories themselves are optional.
|
@@ -445,48 +501,56 @@ function extractZipToDisk(exportFileZipPath) {
|
|
445
501
|
if (err) {
|
446
502
|
console.warn("ERROR: Failed to openReadStream for file: ", entry.fileName, err);
|
447
503
|
reject(err);
|
504
|
+
return;
|
505
|
+
}
|
506
|
+
if (!readStream) {
|
507
|
+
console.log("ERROR: Missing Read Stream Resource when extracting to disk.");
|
508
|
+
reject("Missing Read Stream Resource.");
|
509
|
+
return;
|
448
510
|
}
|
449
|
-
|
450
511
|
const filePath = _path.default.join(EXPORT_FILE_EXTRACTION_PATH, entry.fileName);
|
451
|
-
|
452
512
|
readStream.on("end", function () {
|
453
|
-
|
513
|
+
zipFilePaths.push(filePath);
|
454
514
|
zipFile.readEntry();
|
455
515
|
});
|
456
|
-
streamPipeline(readStream, (0, _fs.createWriteStream)(filePath))
|
516
|
+
streamPipeline(readStream, (0, _fs.createWriteStream)(filePath)).catch(error => {
|
517
|
+
reject(error);
|
518
|
+
});
|
457
519
|
});
|
458
520
|
}
|
459
521
|
});
|
460
522
|
});
|
461
523
|
});
|
462
524
|
}
|
463
|
-
|
464
|
-
function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
|
525
|
+
function extractZipAndUploadToS3(dataZipFilePath, uniquePath) {
|
465
526
|
return new Promise((resolve, reject) => {
|
466
527
|
const filePaths = [];
|
467
528
|
const fileUploadPromises = [];
|
468
|
-
const
|
529
|
+
const uniqueKey = getFileNameWithoutExt(dataZipFilePath);
|
469
530
|
let dataMap = {
|
470
|
-
key:
|
531
|
+
key: uniqueKey,
|
471
532
|
assets: {},
|
472
533
|
data: ""
|
473
534
|
};
|
474
|
-
|
475
|
-
_yauzl.default.open(pageDataZipFilePath, {
|
535
|
+
_yauzl.default.open(dataZipFilePath, {
|
476
536
|
lazyEntries: true
|
477
537
|
}, function (err, zipFile) {
|
478
538
|
if (err) {
|
479
|
-
console.warn("ERROR: Failed to extract zip: ",
|
539
|
+
console.warn("ERROR: Failed to extract zip: ", dataZipFilePath, err);
|
480
540
|
reject(err);
|
541
|
+
return;
|
542
|
+
}
|
543
|
+
if (!zipFile) {
|
544
|
+
console.log("ERROR: Probably failed to extract zip: " + dataZipFilePath);
|
545
|
+
reject("Missing Zip File Resource.");
|
546
|
+
return;
|
481
547
|
}
|
482
|
-
|
483
548
|
console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);
|
484
549
|
zipFile.on("end", function (err) {
|
485
550
|
if (err) {
|
486
|
-
console.warn('ERROR: Failed on "END" for file: ',
|
551
|
+
console.warn('ERROR: Failed on "END" for file: ', dataZipFilePath, err);
|
487
552
|
reject(err);
|
488
553
|
}
|
489
|
-
|
490
554
|
Promise.all(fileUploadPromises).then(res => {
|
491
555
|
res.forEach(r => {
|
492
556
|
console.info("Done uploading... ", r);
|
@@ -497,7 +561,6 @@ function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
|
|
497
561
|
zipFile.readEntry();
|
498
562
|
zipFile.on("entry", function (entry) {
|
499
563
|
console.info(`Processing entry: "${entry.fileName}"`);
|
500
|
-
|
501
564
|
if (/\/$/.test(entry.fileName)) {
|
502
565
|
// Directory file names end with '/'.
|
503
566
|
// Note that entries for directories themselves are optional.
|
@@ -509,27 +572,32 @@ function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
|
|
509
572
|
if (err) {
|
510
573
|
console.warn("ERROR: Failed while performing [openReadStream] for file: ", entry.fileName, err);
|
511
574
|
reject(err);
|
575
|
+
return;
|
576
|
+
}
|
577
|
+
if (!readStream) {
|
578
|
+
console.log("ERROR: Missing Read Stream while importing.");
|
579
|
+
reject("Missing Read Strea Resource.");
|
580
|
+
return;
|
512
581
|
}
|
513
|
-
|
514
582
|
readStream.on("end", function () {
|
515
583
|
filePaths.push(entry.fileName);
|
516
584
|
zipFile.readEntry();
|
517
585
|
});
|
518
|
-
const newKey = `${uniquePath}/${
|
519
|
-
|
520
|
-
dataMap =
|
586
|
+
const newKey = `${uniquePath}/${uniqueKey}/${entry.fileName}`;
|
587
|
+
// Modify in place
|
588
|
+
dataMap = prepareDataDirMap({
|
521
589
|
map: dataMap,
|
522
590
|
filePath: entry.fileName,
|
523
591
|
newKey
|
524
592
|
});
|
525
|
-
|
526
593
|
const {
|
527
594
|
streamPassThrough,
|
528
595
|
streamPassThroughUploadPromise: promise
|
529
596
|
} = _s3Stream.s3Stream.writeStream(newKey, FILE_CONTENT_TYPE);
|
530
|
-
|
531
597
|
streamPipeline(readStream, streamPassThrough).then(() => {
|
532
598
|
fileUploadPromises.push(promise);
|
599
|
+
}).catch(error => {
|
600
|
+
reject(error);
|
533
601
|
});
|
534
602
|
});
|
535
603
|
}
|