@webiny/api-page-builder-import-export 0.0.0-unstable.bca7b3e350 → 0.0.0-unstable.c2780f51fe
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/export/combine/formsHandler.d.ts +6 -0
- package/export/combine/formsHandler.js +99 -0
- package/export/combine/formsHandler.js.map +1 -0
- package/export/combine/index.js +5 -0
- package/export/combine/index.js.map +1 -1
- package/export/process/formsHandler.d.ts +6 -0
- package/export/process/formsHandler.js +187 -0
- package/export/process/formsHandler.js.map +1 -0
- package/export/process/index.js +5 -0
- package/export/process/index.js.map +1 -1
- package/export/s3Stream.d.ts +5 -2
- package/export/s3Stream.js.map +1 -1
- package/export/utils.d.ts +8 -1
- package/export/utils.js +34 -6
- package/export/utils.js.map +1 -1
- package/export/zipper.js +2 -2
- package/export/zipper.js.map +1 -1
- package/graphql/crud/blocks.crud.js +1 -2
- package/graphql/crud/blocks.crud.js.map +1 -1
- package/graphql/crud/forms.crud.d.ts +4 -0
- package/graphql/crud/forms.crud.js +129 -0
- package/graphql/crud/forms.crud.js.map +1 -0
- package/graphql/crud.js +2 -1
- package/graphql/crud.js.map +1 -1
- package/graphql/graphql/forms.gql.d.ts +4 -0
- package/graphql/graphql/forms.gql.js +60 -0
- package/graphql/graphql/forms.gql.js.map +1 -0
- package/graphql/graphql.js +2 -1
- package/graphql/graphql.js.map +1 -1
- package/graphql/types.d.ts +23 -0
- package/graphql/types.js.map +1 -1
- package/import/constants.d.ts +3 -0
- package/import/constants.js +14 -0
- package/import/constants.js.map +1 -0
- package/import/create/blocksHandler.js +4 -3
- package/import/create/blocksHandler.js.map +1 -1
- package/import/create/formsHandler.d.ts +3 -0
- package/import/create/formsHandler.js +103 -0
- package/import/create/formsHandler.js.map +1 -0
- package/import/create/index.js +5 -0
- package/import/create/index.js.map +1 -1
- package/import/create/pagesHandler.js +3 -2
- package/import/create/pagesHandler.js.map +1 -1
- package/import/create/templatesHandler.js +2 -1
- package/import/create/templatesHandler.js.map +1 -1
- package/import/process/blocks/blocksHandler.d.ts +3 -0
- package/import/process/blocks/blocksHandler.js +169 -0
- package/import/process/blocks/blocksHandler.js.map +1 -0
- package/import/process/blocks/importBlock.d.ts +11 -0
- package/import/process/blocks/importBlock.js +92 -0
- package/import/process/blocks/importBlock.js.map +1 -0
- package/import/process/blocksHandler.js +26 -20
- package/import/process/blocksHandler.js.map +1 -1
- package/import/process/forms/formsHandler.d.ts +3 -0
- package/import/process/forms/formsHandler.js +176 -0
- package/import/process/forms/formsHandler.js.map +1 -0
- package/import/process/forms/importForm.d.ts +9 -0
- package/import/process/forms/importForm.js +43 -0
- package/import/process/forms/importForm.js.map +1 -0
- package/import/process/index.js +8 -3
- package/import/process/index.js.map +1 -1
- package/import/process/pages/importPage.d.ts +11 -0
- package/import/process/pages/importPage.js +92 -0
- package/import/process/pages/importPage.js.map +1 -0
- package/import/process/pages/pagesHandler.d.ts +3 -0
- package/import/process/pages/pagesHandler.js +183 -0
- package/import/process/pages/pagesHandler.js.map +1 -0
- package/import/process/pagesHandler.js +29 -26
- package/import/process/pagesHandler.js.map +1 -1
- package/import/process/templates/importTemplate.d.ts +11 -0
- package/import/process/templates/importTemplate.js +66 -0
- package/import/process/templates/importTemplate.js.map +1 -0
- package/import/process/{templatesHandler.d.ts → templates/templatesHandler.d.ts} +2 -2
- package/import/process/{templatesHandler.js → templates/templatesHandler.js} +8 -7
- package/import/process/templates/templatesHandler.js.map +1 -0
- package/import/utils/deleteS3Folder.d.ts +1 -0
- package/import/utils/deleteS3Folder.js +19 -0
- package/import/utils/deleteS3Folder.js.map +1 -0
- package/import/utils/extractAndUploadZipFileContents.d.ts +7 -0
- package/import/utils/extractAndUploadZipFileContents.js +122 -0
- package/import/utils/extractAndUploadZipFileContents.js.map +1 -0
- package/import/utils/extractZipAndUploadToS3.d.ts +2 -0
- package/import/utils/extractZipAndUploadToS3.js +98 -0
- package/import/utils/extractZipAndUploadToS3.js.map +1 -0
- package/import/utils/getFileNameWithoutExt.d.ts +1 -0
- package/import/utils/getFileNameWithoutExt.js +11 -0
- package/import/utils/getFileNameWithoutExt.js.map +1 -0
- package/import/utils/index.d.ts +9 -0
- package/import/utils/index.js +104 -0
- package/import/utils/index.js.map +1 -0
- package/import/utils/initialStats.d.ts +7 -0
- package/import/utils/initialStats.js +16 -0
- package/import/utils/initialStats.js.map +1 -0
- package/import/utils/prepareDataDirMap.d.ts +6 -0
- package/import/utils/prepareDataDirMap.js +29 -0
- package/import/utils/prepareDataDirMap.js.map +1 -0
- package/import/utils/updateFilesInData.d.ts +8 -0
- package/import/utils/updateFilesInData.js +48 -0
- package/import/utils/updateFilesInData.js.map +1 -0
- package/import/utils/uploadAssets.d.ts +10 -0
- package/import/utils/uploadAssets.js +51 -0
- package/import/utils/uploadAssets.js.map +1 -0
- package/import/utils/uploadFilesFromS3.d.ts +3 -0
- package/import/utils/uploadFilesFromS3.js +19 -0
- package/import/utils/uploadFilesFromS3.js.map +1 -0
- package/import/utils.d.ts +1 -8
- package/import/utils.js +137 -103
- package/import/utils.js.map +1 -1
- package/package.json +23 -22
- package/types.d.ts +21 -0
- package/types.js.map +1 -1
- package/import/process/templatesHandler.js.map +0 -1
@@ -0,0 +1,19 @@
|
|
1
|
+
"use strict";
|
2
|
+
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
4
|
+
value: true
|
5
|
+
});
|
6
|
+
exports.uploadFilesFromS3 = uploadFilesFromS3;
|
7
|
+
var _s3Stream = require("../../export/s3Stream");
|
8
|
+
async function uploadFilesFromS3(fileMap) {
|
9
|
+
const promises = [];
|
10
|
+
for (const [source, target] of Array.from(fileMap.entries())) {
|
11
|
+
// Read file.
|
12
|
+
const readStream = _s3Stream.s3Stream.readStream(source);
|
13
|
+
const ws = _s3Stream.s3Stream.writeStream(target.key, target.type);
|
14
|
+
readStream.pipe(ws.streamPassThrough);
|
15
|
+
promises.push(ws.streamPassThroughUploadPromise);
|
16
|
+
console.log(`Successfully queued file "${target.key}"`);
|
17
|
+
}
|
18
|
+
return Promise.all(promises);
|
19
|
+
}
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"names":["uploadFilesFromS3","fileMap","promises","source","target","Array","from","entries","readStream","s3Stream","ws","writeStream","key","type","pipe","streamPassThrough","push","streamPassThroughUploadPromise","console","log","Promise","all"],"sources":["uploadFilesFromS3.ts"],"sourcesContent":["import { s3Stream } from \"~/export/s3Stream\";\nimport { FileInput } from \"@webiny/api-file-manager/types\";\n\nexport type UploadFileMap = Map<string, FileInput>;\n\nexport async function uploadFilesFromS3(fileMap: UploadFileMap) {\n const promises = [];\n for (const [source, target] of Array.from(fileMap.entries())) {\n // Read file.\n const readStream = s3Stream.readStream(source);\n\n const ws = s3Stream.writeStream(target.key, target.type);\n readStream.pipe(ws.streamPassThrough);\n promises.push(ws.streamPassThroughUploadPromise);\n\n console.log(`Successfully queued file \"${target.key}\"`);\n }\n\n return Promise.all(promises);\n}\n"],"mappings":";;;;;;AAAA;AAKO,eAAeA,iBAAiB,CAACC,OAAsB,EAAE;EAC5D,MAAMC,QAAQ,GAAG,EAAE;EACnB,KAAK,MAAM,CAACC,MAAM,EAAEC,MAAM,CAAC,IAAIC,KAAK,CAACC,IAAI,CAACL,OAAO,CAACM,OAAO,EAAE,CAAC,EAAE;IAC1D;IACA,MAAMC,UAAU,GAAGC,kBAAQ,CAACD,UAAU,CAACL,MAAM,CAAC;IAE9C,MAAMO,EAAE,GAAGD,kBAAQ,CAACE,WAAW,CAACP,MAAM,CAACQ,GAAG,EAAER,MAAM,CAACS,IAAI,CAAC;IACxDL,UAAU,CAACM,IAAI,CAACJ,EAAE,CAACK,iBAAiB,CAAC;IACrCb,QAAQ,CAACc,IAAI,CAACN,EAAE,CAACO,8BAA8B,CAAC;IAEhDC,OAAO,CAACC,GAAG,CAAE,6BAA4Bf,MAAM,CAACQ,GAAI,GAAE,CAAC;EAC3D;EAEA,OAAOQ,OAAO,CAACC,GAAG,CAACnB,QAAQ,CAAC;AAChC"}
|
package/import/utils.d.ts
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
import { File } from "@webiny/api-file-manager/types";
|
2
2
|
import { PbImportExportContext } from "../graphql/types";
|
3
|
-
import { ExportedPageData, ExportedBlockData
|
3
|
+
import { ExportedPageData, ExportedBlockData } from "../export/utils";
|
4
4
|
interface UploadAssetsParams {
|
5
5
|
context: PbImportExportContext;
|
6
6
|
filesData: File[];
|
@@ -28,13 +28,6 @@ interface ImportBlockParams {
|
|
28
28
|
fileUploadsData: FileUploadsData;
|
29
29
|
}
|
30
30
|
export declare function importBlock({ blockKey, context, fileUploadsData }: ImportBlockParams): Promise<ExportedBlockData["block"]>;
|
31
|
-
interface ImportTemplateParams {
|
32
|
-
key: string;
|
33
|
-
templateKey: string;
|
34
|
-
context: PbImportExportContext;
|
35
|
-
fileUploadsData: FileUploadsData;
|
36
|
-
}
|
37
|
-
export declare function importTemplate({ templateKey, context, fileUploadsData }: ImportTemplateParams): Promise<ExportedTemplateData["template"]>;
|
38
31
|
interface ImportData {
|
39
32
|
assets: Record<string, string>;
|
40
33
|
data: string;
|
package/import/utils.js
CHANGED
@@ -1,34 +1,53 @@
|
|
1
1
|
"use strict";
|
2
2
|
|
3
3
|
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
4
|
+
|
4
5
|
Object.defineProperty(exports, "__esModule", {
|
5
6
|
value: true
|
6
7
|
});
|
7
8
|
exports.importBlock = importBlock;
|
8
9
|
exports.importPage = importPage;
|
9
|
-
exports.importTemplate = importTemplate;
|
10
10
|
exports.initialStats = initialStats;
|
11
11
|
exports.readExtractAndUploadZipFileContents = readExtractAndUploadZipFileContents;
|
12
12
|
exports.uploadAssets = void 0;
|
13
|
+
|
13
14
|
var _uniqid = _interopRequireDefault(require("uniqid"));
|
15
|
+
|
14
16
|
var _dotPropImmutable = _interopRequireDefault(require("dot-prop-immutable"));
|
17
|
+
|
15
18
|
var _fs = require("fs");
|
19
|
+
|
16
20
|
var _fsExtra = require("fs-extra");
|
21
|
+
|
17
22
|
var _util = require("util");
|
23
|
+
|
18
24
|
var _stream = require("stream");
|
25
|
+
|
19
26
|
var _nodeFetch = _interopRequireDefault(require("node-fetch"));
|
27
|
+
|
20
28
|
var _path = _interopRequireDefault(require("path"));
|
29
|
+
|
21
30
|
var _yauzl = _interopRequireDefault(require("yauzl"));
|
31
|
+
|
22
32
|
var _chunk = _interopRequireDefault(require("lodash/chunk"));
|
33
|
+
|
23
34
|
var _loadJsonFile = _interopRequireDefault(require("load-json-file"));
|
35
|
+
|
24
36
|
var _error = _interopRequireDefault(require("@webiny/error"));
|
37
|
+
|
25
38
|
var _downloadInstallFiles = require("@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles");
|
39
|
+
|
26
40
|
var _types = require("../types");
|
41
|
+
|
27
42
|
var _s3Stream = require("../export/s3Stream");
|
43
|
+
|
28
44
|
const streamPipeline = (0, _util.promisify)(_stream.pipeline);
|
29
45
|
const INSTALL_DIR = "/tmp";
|
46
|
+
|
30
47
|
const INSTALL_EXTRACT_DIR = _path.default.join(INSTALL_DIR, "apiPageBuilderImport");
|
48
|
+
|
31
49
|
const FILES_COUNT_IN_EACH_BATCH = 15;
|
50
|
+
|
32
51
|
function updateImageInPageSettings(params) {
|
33
52
|
const {
|
34
53
|
settings,
|
@@ -37,16 +56,22 @@ function updateImageInPageSettings(params) {
|
|
37
56
|
} = params;
|
38
57
|
let newSettings = settings;
|
39
58
|
const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith("/") ? srcPrefix.slice(0, -1) : srcPrefix;
|
59
|
+
|
40
60
|
if (_dotPropImmutable.default.get(newSettings, "general.image.src")) {
|
41
61
|
var _settings$general, _settings$general$ima;
|
62
|
+
|
42
63
|
newSettings = _dotPropImmutable.default.set(newSettings, "general.image.src", `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(((_settings$general = settings.general) === null || _settings$general === void 0 ? void 0 : (_settings$general$ima = _settings$general.image) === null || _settings$general$ima === void 0 ? void 0 : _settings$general$ima.id) || "")}`);
|
43
64
|
}
|
65
|
+
|
44
66
|
if (_dotPropImmutable.default.get(newSettings, "social.image.src")) {
|
45
67
|
var _settings$social, _settings$social$imag;
|
68
|
+
|
46
69
|
newSettings = _dotPropImmutable.default.set(newSettings, "social.image.src", `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(((_settings$social = settings.social) === null || _settings$social === void 0 ? void 0 : (_settings$social$imag = _settings$social.image) === null || _settings$social$imag === void 0 ? void 0 : _settings$social$imag.id) || "")}`);
|
47
70
|
}
|
71
|
+
|
48
72
|
return newSettings;
|
49
73
|
}
|
74
|
+
|
50
75
|
function updateBlockPreviewImage(params) {
|
51
76
|
const {
|
52
77
|
file,
|
@@ -58,6 +83,7 @@ function updateBlockPreviewImage(params) {
|
|
58
83
|
newFile.src = `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(file.id || "")}`;
|
59
84
|
return newFile;
|
60
85
|
}
|
86
|
+
|
61
87
|
function updateFilesInData({
|
62
88
|
data,
|
63
89
|
fileIdToKeyMap,
|
@@ -66,8 +92,9 @@ function updateFilesInData({
|
|
66
92
|
// BASE CASE: Termination point
|
67
93
|
if (!data || typeof data !== "object") {
|
68
94
|
return;
|
69
|
-
}
|
70
|
-
|
95
|
+
} // Recursively call function if data is array
|
96
|
+
|
97
|
+
|
71
98
|
if (Array.isArray(data)) {
|
72
99
|
for (let i = 0; i < data.length; i++) {
|
73
100
|
const element = data[i];
|
@@ -77,12 +104,16 @@ function updateFilesInData({
|
|
77
104
|
srcPrefix
|
78
105
|
});
|
79
106
|
}
|
107
|
+
|
80
108
|
return;
|
81
|
-
}
|
82
|
-
|
109
|
+
} // Main logic
|
110
|
+
|
111
|
+
|
83
112
|
const tuple = Object.entries(data);
|
113
|
+
|
84
114
|
for (let i = 0; i < tuple.length; i++) {
|
85
115
|
const [key, value] = tuple[i];
|
116
|
+
|
86
117
|
if (key === "file" && value && fileIdToKeyMap.has(value.id)) {
|
87
118
|
value.key = fileIdToKeyMap.get(value.id);
|
88
119
|
value.name = fileIdToKeyMap.get(value.id);
|
@@ -96,49 +127,51 @@ function updateFilesInData({
|
|
96
127
|
}
|
97
128
|
}
|
98
129
|
}
|
130
|
+
|
99
131
|
const uploadAssets = async params => {
|
100
132
|
const {
|
101
133
|
context,
|
102
134
|
filesData,
|
103
135
|
fileUploadsData
|
104
|
-
} = params;
|
105
|
-
|
136
|
+
} = params; // Save uploaded file key against static id for later use.
|
137
|
+
|
106
138
|
const fileIdToKeyMap = new Map();
|
107
139
|
/**
|
108
140
|
* This function contains logic of file download from S3.
|
109
141
|
* Current we're not mocking zip file download from S3 in tests at the moment.
|
110
142
|
* So, we're manually mocking it in case of test just by returning an empty object.
|
111
143
|
*/
|
144
|
+
|
112
145
|
if (process.env.NODE_ENV === "test") {
|
113
146
|
return {
|
114
147
|
fileIdToKeyMap
|
115
148
|
};
|
116
|
-
}
|
149
|
+
} // Save files meta data against old key for later use.
|
150
|
+
|
151
|
+
|
152
|
+
const fileKeyToFileMap = new Map(); // Initialize maps.
|
117
153
|
|
118
|
-
// Save files meta data against old key for later use.
|
119
|
-
const fileKeyToFileMap = new Map();
|
120
|
-
// Initialize maps.
|
121
154
|
for (let i = 0; i < filesData.length; i++) {
|
122
155
|
const file = filesData[i];
|
123
|
-
fileKeyToFileMap.set(file.key, file);
|
156
|
+
fileKeyToFileMap.set(file.key, file); // Initialize the value
|
124
157
|
|
125
|
-
// Initialize the value
|
126
158
|
fileIdToKeyMap.set(file.id, file.type);
|
127
159
|
}
|
160
|
+
|
128
161
|
const fileUploadResults = await uploadFilesFromS3({
|
129
162
|
fileKeyToFileMap,
|
130
163
|
oldKeyToNewKeyMap: fileUploadsData.assets
|
131
|
-
});
|
164
|
+
}); // Create files in File Manager
|
132
165
|
|
133
|
-
// Create files in File Manager
|
134
166
|
const createFilesInput = fileUploadResults.map(uploadResult => {
|
135
167
|
const newKey = uploadResult.Key;
|
136
168
|
const file = fileKeyToFileMap.get(getOldFileKey(newKey));
|
169
|
+
|
137
170
|
if (!file) {
|
138
171
|
return null;
|
139
|
-
}
|
172
|
+
} // Update the file map with newly uploaded file.
|
173
|
+
|
140
174
|
|
141
|
-
// Update the file map with newly uploaded file.
|
142
175
|
fileIdToKeyMap.set(file.id, newKey);
|
143
176
|
return {
|
144
177
|
key: newKey,
|
@@ -149,9 +182,10 @@ const uploadAssets = async params => {
|
|
149
182
|
tags: file.tags
|
150
183
|
};
|
151
184
|
}).filter(Boolean);
|
152
|
-
const createFilesPromises = [];
|
153
|
-
|
185
|
+
const createFilesPromises = []; // Gives an array of chunks (each consists of FILES_COUNT_IN_EACH_BATCH items).
|
186
|
+
|
154
187
|
const createFilesInputChunks = (0, _chunk.default)(createFilesInput, FILES_COUNT_IN_EACH_BATCH);
|
188
|
+
|
155
189
|
for (let i = 0; i < createFilesInputChunks.length; i++) {
|
156
190
|
const createFilesInputChunk = createFilesInputChunks[i];
|
157
191
|
createFilesPromises.push(
|
@@ -161,44 +195,49 @@ const uploadAssets = async params => {
|
|
161
195
|
*/
|
162
196
|
context.fileManager.files.createFilesInBatch(createFilesInputChunk));
|
163
197
|
}
|
198
|
+
|
164
199
|
await Promise.all(createFilesPromises);
|
165
200
|
return {
|
166
201
|
fileIdToKeyMap
|
167
202
|
};
|
168
203
|
};
|
204
|
+
|
169
205
|
exports.uploadAssets = uploadAssets;
|
206
|
+
|
170
207
|
async function importPage({
|
171
208
|
pageKey,
|
172
209
|
context,
|
173
210
|
fileUploadsData
|
174
211
|
}) {
|
175
|
-
const log = console.log;
|
212
|
+
const log = console.log; // Making Directory for page in which we're going to extract the page data file.
|
176
213
|
|
177
|
-
// Making Directory for page in which we're going to extract the page data file.
|
178
214
|
const PAGE_EXTRACT_DIR = _path.default.join(INSTALL_EXTRACT_DIR, pageKey);
|
215
|
+
|
179
216
|
(0, _fsExtra.ensureDirSync)(PAGE_EXTRACT_DIR);
|
217
|
+
|
180
218
|
const pageDataFileKey = _dotPropImmutable.default.get(fileUploadsData, `data`);
|
219
|
+
|
181
220
|
const PAGE_DATA_FILE_PATH = _path.default.join(PAGE_EXTRACT_DIR, _path.default.basename(pageDataFileKey));
|
182
|
-
|
183
|
-
// Download and save page data file in disk.
|
221
|
+
|
222
|
+
log(`Downloading Page data file: ${pageDataFileKey} at "${PAGE_DATA_FILE_PATH}"`); // Download and save page data file in disk.
|
223
|
+
|
184
224
|
await new Promise((resolve, reject) => {
|
185
225
|
_s3Stream.s3Stream.readStream(pageDataFileKey).on("error", reject).pipe((0, _fs.createWriteStream)(PAGE_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
|
186
|
-
});
|
226
|
+
}); // Load the page data file from disk.
|
187
227
|
|
188
|
-
// Load the page data file from disk.
|
189
228
|
log(`Load file ${pageDataFileKey}`);
|
190
229
|
const {
|
191
230
|
page,
|
192
231
|
files
|
193
|
-
} = await (0, _loadJsonFile.default)(PAGE_DATA_FILE_PATH);
|
232
|
+
} = await (0, _loadJsonFile.default)(PAGE_DATA_FILE_PATH); // Only update page data if there are files.
|
194
233
|
|
195
|
-
// Only update page data if there are files.
|
196
234
|
if (files && Array.isArray(files) && files.length > 0) {
|
197
235
|
// Upload page assets.
|
198
236
|
const {
|
199
237
|
fileIdToKeyMap
|
200
238
|
} = await uploadAssets({
|
201
239
|
context,
|
240
|
+
|
202
241
|
/**
|
203
242
|
* TODO @ts-refactor @ashutosh figure out correct types.
|
204
243
|
*/
|
@@ -221,38 +260,41 @@ async function importPage({
|
|
221
260
|
srcPrefix
|
222
261
|
});
|
223
262
|
}
|
263
|
+
|
224
264
|
log("Removing Directory for page...");
|
225
265
|
await (0, _downloadInstallFiles.deleteFile)(pageKey);
|
226
266
|
log(`Remove page contents from S3...`);
|
227
267
|
await deleteS3Folder(_path.default.dirname(fileUploadsData.data));
|
228
268
|
return page;
|
229
269
|
}
|
270
|
+
|
230
271
|
async function importBlock({
|
231
272
|
blockKey,
|
232
273
|
context,
|
233
274
|
fileUploadsData
|
234
275
|
}) {
|
235
|
-
const log = console.log;
|
276
|
+
const log = console.log; // Making Directory for block in which we're going to extract the block data file.
|
236
277
|
|
237
|
-
// Making Directory for block in which we're going to extract the block data file.
|
238
278
|
const BLOCK_EXTRACT_DIR = _path.default.join(INSTALL_EXTRACT_DIR, blockKey);
|
279
|
+
|
239
280
|
(0, _fsExtra.ensureDirSync)(BLOCK_EXTRACT_DIR);
|
281
|
+
|
240
282
|
const blockDataFileKey = _dotPropImmutable.default.get(fileUploadsData, `data`);
|
283
|
+
|
241
284
|
const BLOCK_DATA_FILE_PATH = _path.default.join(BLOCK_EXTRACT_DIR, _path.default.basename(blockDataFileKey));
|
242
|
-
|
243
|
-
// Download and save block data file in disk.
|
285
|
+
|
286
|
+
log(`Downloading Block data file: ${blockDataFileKey} at "${BLOCK_DATA_FILE_PATH}"`); // Download and save block data file in disk.
|
287
|
+
|
244
288
|
await new Promise((resolve, reject) => {
|
245
289
|
_s3Stream.s3Stream.readStream(blockDataFileKey).on("error", reject).pipe((0, _fs.createWriteStream)(BLOCK_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
|
246
|
-
});
|
290
|
+
}); // Load the block data file from disk.
|
247
291
|
|
248
|
-
// Load the block data file from disk.
|
249
292
|
log(`Load file ${blockDataFileKey}`);
|
250
293
|
const {
|
251
294
|
block,
|
252
295
|
files
|
253
|
-
} = await (0, _loadJsonFile.default)(BLOCK_DATA_FILE_PATH);
|
296
|
+
} = await (0, _loadJsonFile.default)(BLOCK_DATA_FILE_PATH); // Only update block data if there are files.
|
254
297
|
|
255
|
-
// Only update block data if there are files.
|
256
298
|
if (files && Array.isArray(files) && files.length > 0) {
|
257
299
|
// Upload block assets.
|
258
300
|
const {
|
@@ -277,91 +319,47 @@ async function importBlock({
|
|
277
319
|
srcPrefix
|
278
320
|
});
|
279
321
|
}
|
322
|
+
|
280
323
|
log("Removing Directory for block...");
|
281
324
|
await (0, _downloadInstallFiles.deleteFile)(blockKey);
|
282
325
|
log(`Remove block contents from S3...`);
|
283
326
|
await deleteS3Folder(_path.default.dirname(fileUploadsData.data));
|
284
327
|
return block;
|
285
328
|
}
|
286
|
-
async function importTemplate({
|
287
|
-
templateKey,
|
288
|
-
context,
|
289
|
-
fileUploadsData
|
290
|
-
}) {
|
291
|
-
const log = console.log;
|
292
329
|
|
293
|
-
// Making Directory for template in which we're going to extract the template data file.
|
294
|
-
const TEMPLATE_EXTRACT_DIR = _path.default.join(INSTALL_EXTRACT_DIR, templateKey);
|
295
|
-
(0, _fsExtra.ensureDirSync)(TEMPLATE_EXTRACT_DIR);
|
296
|
-
const templateDataFileKey = _dotPropImmutable.default.get(fileUploadsData, `data`);
|
297
|
-
const TEMPLATE_DATA_FILE_PATH = _path.default.join(TEMPLATE_EXTRACT_DIR, _path.default.basename(templateDataFileKey));
|
298
|
-
log(`Downloading Template data file: ${templateDataFileKey} at "${TEMPLATE_DATA_FILE_PATH}"`);
|
299
|
-
// Download and save template data file in disk.
|
300
|
-
await new Promise((resolve, reject) => {
|
301
|
-
_s3Stream.s3Stream.readStream(templateDataFileKey).on("error", reject).pipe((0, _fs.createWriteStream)(TEMPLATE_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
|
302
|
-
});
|
303
|
-
|
304
|
-
// Load the template data file from disk.
|
305
|
-
log(`Load file ${templateDataFileKey}`);
|
306
|
-
const {
|
307
|
-
template,
|
308
|
-
files
|
309
|
-
} = await (0, _loadJsonFile.default)(TEMPLATE_DATA_FILE_PATH);
|
310
|
-
|
311
|
-
// Only update template data if there are files.
|
312
|
-
if (files && Array.isArray(files) && files.length > 0) {
|
313
|
-
// Upload template assets.
|
314
|
-
const {
|
315
|
-
fileIdToKeyMap
|
316
|
-
} = await uploadAssets({
|
317
|
-
context,
|
318
|
-
filesData: files,
|
319
|
-
fileUploadsData
|
320
|
-
});
|
321
|
-
const settings = await context.fileManager.settings.getSettings();
|
322
|
-
const {
|
323
|
-
srcPrefix = ""
|
324
|
-
} = settings || {};
|
325
|
-
updateFilesInData({
|
326
|
-
data: template.content || {},
|
327
|
-
fileIdToKeyMap,
|
328
|
-
srcPrefix
|
329
|
-
});
|
330
|
-
}
|
331
|
-
log("Removing Directory for template...");
|
332
|
-
await (0, _downloadInstallFiles.deleteFile)(templateKey);
|
333
|
-
log(`Remove template contents from S3...`);
|
334
|
-
await deleteS3Folder(_path.default.dirname(fileUploadsData.data));
|
335
|
-
return template;
|
336
|
-
}
|
337
330
|
async function uploadFilesFromS3({
|
338
331
|
fileKeyToFileMap,
|
339
332
|
oldKeyToNewKeyMap
|
340
333
|
}) {
|
341
334
|
const oldKeysForAssets = Object.keys(oldKeyToNewKeyMap);
|
342
|
-
const promises = [];
|
343
|
-
|
335
|
+
const promises = []; // Upload all assets.
|
336
|
+
|
344
337
|
for (let i = 0; i < oldKeysForAssets.length; i++) {
|
345
338
|
const oldKey = oldKeysForAssets[i];
|
346
|
-
const tempNewKey = oldKeyToNewKeyMap[oldKey];
|
339
|
+
const tempNewKey = oldKeyToNewKeyMap[oldKey]; // Read file.
|
340
|
+
|
341
|
+
const readStream = _s3Stream.s3Stream.readStream(tempNewKey); // Get file meta data.
|
342
|
+
|
347
343
|
|
348
|
-
// Read file.
|
349
|
-
const readStream = _s3Stream.s3Stream.readStream(tempNewKey);
|
350
|
-
// Get file meta data.
|
351
344
|
const fileMetaData = fileKeyToFileMap.get(oldKey);
|
345
|
+
|
352
346
|
if (fileMetaData) {
|
353
347
|
const newKey = (0, _uniqid.default)("", `-${fileMetaData.key}`);
|
348
|
+
|
354
349
|
const {
|
355
350
|
streamPassThrough,
|
356
351
|
streamPassThroughUploadPromise: promise
|
357
352
|
} = _s3Stream.s3Stream.writeStream(newKey, fileMetaData.type);
|
353
|
+
|
358
354
|
readStream.pipe(streamPassThrough);
|
359
355
|
promises.push(promise);
|
360
356
|
console.log(`Successfully queued file "${newKey}"`);
|
361
357
|
}
|
362
358
|
}
|
359
|
+
|
363
360
|
return Promise.all(promises);
|
364
361
|
}
|
362
|
+
|
365
363
|
function getOldFileKey(key) {
|
366
364
|
/*
|
367
365
|
* Because we know the naming convention, we can extract the old key from new key.
|
@@ -373,10 +371,13 @@ function getOldFileKey(key) {
|
|
373
371
|
return key;
|
374
372
|
}
|
375
373
|
}
|
374
|
+
|
376
375
|
const FILE_CONTENT_TYPE = "application/octet-stream";
|
376
|
+
|
377
377
|
function getFileNameWithoutExt(fileName) {
|
378
378
|
return _path.default.basename(fileName).replace(_path.default.extname(fileName), "");
|
379
379
|
}
|
380
|
+
|
380
381
|
/**
|
381
382
|
* Function will read the given zip file from S3 via stream, extract its content and upload it to S3 bucket.
|
382
383
|
* @param zipFileUrl
|
@@ -385,70 +386,82 @@ function getFileNameWithoutExt(fileName) {
|
|
385
386
|
async function readExtractAndUploadZipFileContents(zipFileUrl) {
|
386
387
|
const log = console.log;
|
387
388
|
const importDataList = [];
|
389
|
+
|
388
390
|
const zipFileName = _path.default.basename(zipFileUrl).split("?")[0];
|
391
|
+
|
389
392
|
const response = await (0, _nodeFetch.default)(zipFileUrl);
|
393
|
+
|
390
394
|
if (!response.ok) {
|
391
395
|
throw new _error.default(`Unable to downloading file: "${zipFileUrl}"`, response.statusText);
|
392
396
|
}
|
397
|
+
|
393
398
|
const readStream = response.body;
|
394
|
-
const uniquePath = (0, _uniqid.default)("IMPORTS/");
|
395
|
-
|
399
|
+
const uniquePath = (0, _uniqid.default)("IMPORTS/"); // Read export file and download it in the disk
|
400
|
+
|
396
401
|
const ZIP_FILE_PATH = _path.default.join(INSTALL_DIR, zipFileName);
|
402
|
+
|
397
403
|
const writeStream = (0, _fs.createWriteStream)(ZIP_FILE_PATH);
|
398
404
|
await streamPipeline(readStream, writeStream);
|
399
|
-
log(`Downloaded file "${zipFileName}" at ${ZIP_FILE_PATH}`);
|
405
|
+
log(`Downloaded file "${zipFileName}" at ${ZIP_FILE_PATH}`); // Extract the downloaded zip file
|
400
406
|
|
401
|
-
// Extract the downloaded zip file
|
402
407
|
const zipFilePaths = await extractZipToDisk(ZIP_FILE_PATH);
|
403
408
|
log(`Removing ZIP file "${zipFileUrl}" from ${ZIP_FILE_PATH}`);
|
404
|
-
await (0, _downloadInstallFiles.deleteFile)(ZIP_FILE_PATH);
|
409
|
+
await (0, _downloadInstallFiles.deleteFile)(ZIP_FILE_PATH); // Extract each page/block zip and upload their content's to S3
|
405
410
|
|
406
|
-
// Extract each page/block zip and upload their content's to S3
|
407
411
|
for (let i = 0; i < zipFilePaths.length; i++) {
|
408
412
|
const currentPath = zipFilePaths[i];
|
409
413
|
const dataMap = await extractZipAndUploadToS3(currentPath, uniquePath);
|
410
414
|
importDataList.push(dataMap);
|
411
415
|
}
|
416
|
+
|
412
417
|
log("Removing all ZIP files located at ", _path.default.dirname(zipFilePaths[0]));
|
413
418
|
await (0, _downloadInstallFiles.deleteFile)(_path.default.dirname(zipFilePaths[0]));
|
414
419
|
return importDataList;
|
415
420
|
}
|
421
|
+
|
416
422
|
const ASSETS_DIR_NAME = "/assets";
|
423
|
+
|
417
424
|
function prepareDataDirMap({
|
418
425
|
map,
|
419
426
|
filePath,
|
420
427
|
newKey
|
421
428
|
}) {
|
422
429
|
const dirname = _path.default.dirname(filePath);
|
430
|
+
|
423
431
|
const fileName = _path.default.basename(filePath);
|
424
432
|
/*
|
425
433
|
* We want to use dot (.) as part of object key rather than creating nested object(s).
|
426
434
|
* Also, the file name might contain dots in it beside the extension, so, we are escaping them all.
|
427
435
|
*/
|
436
|
+
|
437
|
+
|
428
438
|
const oldKey = fileName.replace(/\./g, "\\.");
|
429
439
|
const isAsset = dirname.endsWith(ASSETS_DIR_NAME);
|
440
|
+
|
430
441
|
if (isAsset) {
|
431
442
|
map = _dotPropImmutable.default.set(map, `assets.${oldKey}`, newKey);
|
432
443
|
} else {
|
433
444
|
// We only need to know the newKey for data file.
|
434
445
|
map = _dotPropImmutable.default.set(map, `data`, newKey);
|
435
446
|
}
|
447
|
+
|
436
448
|
return map;
|
437
449
|
}
|
450
|
+
|
438
451
|
async function deleteS3Folder(key) {
|
439
452
|
// Append trailing slash i.e "/" to key to make sure we only delete a specific folder.
|
440
453
|
if (!key.endsWith("/")) {
|
441
454
|
key = `${key}/`;
|
442
455
|
}
|
456
|
+
|
443
457
|
const response = await _s3Stream.s3Stream.listObject(key);
|
444
458
|
const keys = (response.Contents || []).map(c => c.Key).filter(Boolean);
|
445
459
|
console.log(`Found ${keys.length} files.`);
|
446
460
|
const deleteFilePromises = keys.map(key => _s3Stream.s3Stream.deleteObject(key));
|
447
461
|
await Promise.all(deleteFilePromises);
|
448
462
|
console.log(`Successfully deleted ${deleteFilePromises.length} files.`);
|
449
|
-
}
|
463
|
+
} // export const zeroPad = version => `${version}`.padStart(5, "0");
|
450
464
|
|
451
|
-
// export const zeroPad = version => `${version}`.padStart(5, "0");
|
452
465
|
|
453
466
|
function initialStats(total) {
|
454
467
|
return {
|
@@ -459,13 +472,17 @@ function initialStats(total) {
|
|
459
472
|
total
|
460
473
|
};
|
461
474
|
}
|
475
|
+
|
462
476
|
function extractZipToDisk(exportFileZipPath) {
|
463
477
|
return new Promise((resolve, reject) => {
|
464
478
|
const zipFilePaths = [];
|
465
479
|
const uniqueFolderNameForExport = getFileNameWithoutExt(exportFileZipPath);
|
466
|
-
|
467
|
-
// Make sure DIR exists
|
480
|
+
|
481
|
+
const EXPORT_FILE_EXTRACTION_PATH = _path.default.join(INSTALL_DIR, uniqueFolderNameForExport); // Make sure DIR exists
|
482
|
+
|
483
|
+
|
468
484
|
(0, _fsExtra.ensureDirSync)(EXPORT_FILE_EXTRACTION_PATH);
|
485
|
+
|
469
486
|
_yauzl.default.open(exportFileZipPath, {
|
470
487
|
lazyEntries: true
|
471
488
|
}, function (err, zipFile) {
|
@@ -474,22 +491,26 @@ function extractZipToDisk(exportFileZipPath) {
|
|
474
491
|
reject(err);
|
475
492
|
return;
|
476
493
|
}
|
494
|
+
|
477
495
|
if (!zipFile) {
|
478
496
|
console.log("ERROR: Missing zip file resource for path: " + exportFileZipPath);
|
479
497
|
reject("Missing Zip File Resource.");
|
480
498
|
return;
|
481
499
|
}
|
500
|
+
|
482
501
|
console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);
|
483
502
|
zipFile.on("end", function (err) {
|
484
503
|
if (err) {
|
485
504
|
console.warn("ERROR: Failed on END event for file: ", exportFileZipPath, err);
|
486
505
|
reject(err);
|
487
506
|
}
|
507
|
+
|
488
508
|
resolve(zipFilePaths);
|
489
509
|
});
|
490
510
|
zipFile.readEntry();
|
491
511
|
zipFile.on("entry", function (entry) {
|
492
512
|
console.info(`Processing entry: "${entry.fileName}"`);
|
513
|
+
|
493
514
|
if (/\/$/.test(entry.fileName)) {
|
494
515
|
// Directory file names end with '/'.
|
495
516
|
// Note that entries for directories themselves are optional.
|
@@ -503,12 +524,15 @@ function extractZipToDisk(exportFileZipPath) {
|
|
503
524
|
reject(err);
|
504
525
|
return;
|
505
526
|
}
|
527
|
+
|
506
528
|
if (!readStream) {
|
507
529
|
console.log("ERROR: Missing Read Stream Resource when extracting to disk.");
|
508
530
|
reject("Missing Read Stream Resource.");
|
509
531
|
return;
|
510
532
|
}
|
533
|
+
|
511
534
|
const filePath = _path.default.join(EXPORT_FILE_EXTRACTION_PATH, entry.fileName);
|
535
|
+
|
512
536
|
readStream.on("end", function () {
|
513
537
|
zipFilePaths.push(filePath);
|
514
538
|
zipFile.readEntry();
|
@@ -522,6 +546,7 @@ function extractZipToDisk(exportFileZipPath) {
|
|
522
546
|
});
|
523
547
|
});
|
524
548
|
}
|
549
|
+
|
525
550
|
function extractZipAndUploadToS3(dataZipFilePath, uniquePath) {
|
526
551
|
return new Promise((resolve, reject) => {
|
527
552
|
const filePaths = [];
|
@@ -532,6 +557,7 @@ function extractZipAndUploadToS3(dataZipFilePath, uniquePath) {
|
|
532
557
|
assets: {},
|
533
558
|
data: ""
|
534
559
|
};
|
560
|
+
|
535
561
|
_yauzl.default.open(dataZipFilePath, {
|
536
562
|
lazyEntries: true
|
537
563
|
}, function (err, zipFile) {
|
@@ -540,17 +566,20 @@ function extractZipAndUploadToS3(dataZipFilePath, uniquePath) {
|
|
540
566
|
reject(err);
|
541
567
|
return;
|
542
568
|
}
|
569
|
+
|
543
570
|
if (!zipFile) {
|
544
571
|
console.log("ERROR: Probably failed to extract zip: " + dataZipFilePath);
|
545
572
|
reject("Missing Zip File Resource.");
|
546
573
|
return;
|
547
574
|
}
|
575
|
+
|
548
576
|
console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);
|
549
577
|
zipFile.on("end", function (err) {
|
550
578
|
if (err) {
|
551
579
|
console.warn('ERROR: Failed on "END" for file: ', dataZipFilePath, err);
|
552
580
|
reject(err);
|
553
581
|
}
|
582
|
+
|
554
583
|
Promise.all(fileUploadPromises).then(res => {
|
555
584
|
res.forEach(r => {
|
556
585
|
console.info("Done uploading... ", r);
|
@@ -561,6 +590,7 @@ function extractZipAndUploadToS3(dataZipFilePath, uniquePath) {
|
|
561
590
|
zipFile.readEntry();
|
562
591
|
zipFile.on("entry", function (entry) {
|
563
592
|
console.info(`Processing entry: "${entry.fileName}"`);
|
593
|
+
|
564
594
|
if (/\/$/.test(entry.fileName)) {
|
565
595
|
// Directory file names end with '/'.
|
566
596
|
// Note that entries for directories themselves are optional.
|
@@ -574,26 +604,30 @@ function extractZipAndUploadToS3(dataZipFilePath, uniquePath) {
|
|
574
604
|
reject(err);
|
575
605
|
return;
|
576
606
|
}
|
607
|
+
|
577
608
|
if (!readStream) {
|
578
609
|
console.log("ERROR: Missing Read Stream while importing.");
|
579
610
|
reject("Missing Read Strea Resource.");
|
580
611
|
return;
|
581
612
|
}
|
613
|
+
|
582
614
|
readStream.on("end", function () {
|
583
615
|
filePaths.push(entry.fileName);
|
584
616
|
zipFile.readEntry();
|
585
617
|
});
|
586
|
-
const newKey = `${uniquePath}/${uniqueKey}/${entry.fileName}`;
|
587
|
-
|
618
|
+
const newKey = `${uniquePath}/${uniqueKey}/${entry.fileName}`; // Modify in place
|
619
|
+
|
588
620
|
dataMap = prepareDataDirMap({
|
589
621
|
map: dataMap,
|
590
622
|
filePath: entry.fileName,
|
591
623
|
newKey
|
592
624
|
});
|
625
|
+
|
593
626
|
const {
|
594
627
|
streamPassThrough,
|
595
628
|
streamPassThroughUploadPromise: promise
|
596
629
|
} = _s3Stream.s3Stream.writeStream(newKey, FILE_CONTENT_TYPE);
|
630
|
+
|
597
631
|
streamPipeline(readStream, streamPassThrough).then(() => {
|
598
632
|
fileUploadPromises.push(promise);
|
599
633
|
}).catch(error => {
|