@webiny/api-page-builder-import-export 0.0.0-mt-1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +17 -0
  3. package/exportPages/combine/index.d.ts +19 -0
  4. package/exportPages/combine/index.js +88 -0
  5. package/exportPages/process/index.d.ts +26 -0
  6. package/exportPages/process/index.js +204 -0
  7. package/exportPages/s3Stream.d.ts +29 -0
  8. package/exportPages/s3Stream.js +106 -0
  9. package/exportPages/utils.d.ts +13 -0
  10. package/exportPages/utils.js +113 -0
  11. package/exportPages/zipper.d.ts +35 -0
  12. package/exportPages/zipper.js +137 -0
  13. package/graphql/crud/pageImportExportTasks.crud.d.ts +5 -0
  14. package/graphql/crud/pageImportExportTasks.crud.js +394 -0
  15. package/graphql/crud/pages.crud.d.ts +4 -0
  16. package/graphql/crud/pages.crud.js +162 -0
  17. package/graphql/crud.d.ts +3 -0
  18. package/graphql/crud.js +16 -0
  19. package/graphql/graphql/pageImportExportTasks.gql.d.ts +4 -0
  20. package/graphql/graphql/pageImportExportTasks.gql.js +80 -0
  21. package/graphql/graphql/pages.gql.d.ts +4 -0
  22. package/graphql/graphql/pages.gql.js +72 -0
  23. package/graphql/graphql/utils/resolve.d.ts +3 -0
  24. package/graphql/graphql/utils/resolve.js +18 -0
  25. package/graphql/graphql.d.ts +3 -0
  26. package/graphql/graphql.js +15 -0
  27. package/graphql/index.d.ts +3 -0
  28. package/graphql/index.js +16 -0
  29. package/graphql/types.d.ts +63 -0
  30. package/graphql/types.js +5 -0
  31. package/importPages/client.d.ts +7 -0
  32. package/importPages/client.js +40 -0
  33. package/importPages/create/index.d.ts +27 -0
  34. package/importPages/create/index.js +109 -0
  35. package/importPages/process/index.d.ts +25 -0
  36. package/importPages/process/index.js +183 -0
  37. package/importPages/utils.d.ts +43 -0
  38. package/importPages/utils.js +539 -0
  39. package/mockSecurity.d.ts +2 -0
  40. package/mockSecurity.js +13 -0
  41. package/package.json +80 -0
  42. package/types.d.ts +192 -0
  43. package/types.js +42 -0
@@ -0,0 +1,539 @@
1
+ "use strict";
2
+
3
+ var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
4
+
5
+ Object.defineProperty(exports, "__esModule", {
6
+ value: true
7
+ });
8
+ exports.importPage = importPage;
9
+ exports.initialStats = initialStats;
10
+ exports.readExtractAndUploadZipFileContents = readExtractAndUploadZipFileContents;
11
+ exports.zeroPad = exports.uploadPageAssets = void 0;
12
+
13
+ var _uniqid = _interopRequireDefault(require("uniqid"));
14
+
15
+ var _dotPropImmutable = _interopRequireDefault(require("dot-prop-immutable"));
16
+
17
+ var _fs = require("fs");
18
+
19
+ var _fsExtra = require("fs-extra");
20
+
21
+ var _util = require("util");
22
+
23
+ var _stream = require("stream");
24
+
25
+ var _nodeFetch = _interopRequireDefault(require("node-fetch"));
26
+
27
+ var _path = _interopRequireDefault(require("path"));
28
+
29
+ var _yauzl = _interopRequireDefault(require("yauzl"));
30
+
31
+ var _chunk = _interopRequireDefault(require("lodash/chunk"));
32
+
33
+ var _loadJsonFile = _interopRequireDefault(require("load-json-file"));
34
+
35
+ var _error = _interopRequireDefault(require("@webiny/error"));
36
+
37
+ var _downloadInstallFiles = require("@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles");
38
+
39
+ var _types = require("../types");
40
+
41
+ var _s3Stream = require("../exportPages/s3Stream");
42
+
43
+ const streamPipeline = (0, _util.promisify)(_stream.pipeline);
44
+ const INSTALL_DIR = "/tmp";
45
+
46
+ const INSTALL_EXTRACT_DIR = _path.default.join(INSTALL_DIR, "apiPageBuilderImportPage");
47
+
48
+ const FILES_COUNT_IN_EACH_BATCH = 15;
49
+ const ZIP_CONTENT_TYPE = "application/zip";
50
+
51
+ function updateImageInPageSettings({
52
+ settings,
53
+ fileIdToKeyMap,
54
+ srcPrefix
55
+ }) {
56
+ let newSettings = settings;
57
+ const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith("/") ? srcPrefix.slice(0, -1) : srcPrefix;
58
+
59
+ if (_dotPropImmutable.default.get(newSettings, "general.image.src")) {
60
+ newSettings = _dotPropImmutable.default.set(newSettings, "general.image.src", `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(settings.general.image.id)}`);
61
+ }
62
+
63
+ if (_dotPropImmutable.default.get(newSettings, "social.image.src")) {
64
+ newSettings = _dotPropImmutable.default.set(newSettings, "social.image.src", `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(settings.social.image.id)}`);
65
+ }
66
+
67
+ return settings;
68
+ }
69
+
70
+ function updateFilesInPageData({
71
+ data,
72
+ fileIdToKeyMap,
73
+ srcPrefix
74
+ }) {
75
+ // BASE CASE: Termination point
76
+ if (!data || typeof data !== "object") {
77
+ return;
78
+ } // Recursively call function if data is array
79
+
80
+
81
+ if (Array.isArray(data)) {
82
+ for (let i = 0; i < data.length; i++) {
83
+ const element = data[i];
84
+ updateFilesInPageData({
85
+ data: element,
86
+ fileIdToKeyMap,
87
+ srcPrefix
88
+ });
89
+ }
90
+
91
+ return;
92
+ } // Main logic
93
+
94
+
95
+ const tuple = Object.entries(data);
96
+
97
+ for (let i = 0; i < tuple.length; i++) {
98
+ const [key, value] = tuple[i];
99
+
100
+ if (key === "file" && value && fileIdToKeyMap.has(value.id)) {
101
+ value.key = fileIdToKeyMap.get(value.id);
102
+ value.name = fileIdToKeyMap.get(value.id);
103
+ value.src = `${srcPrefix}${srcPrefix.endsWith("/") ? "" : "/"}${fileIdToKeyMap.get(value.id)}`;
104
+ } else {
105
+ updateFilesInPageData({
106
+ data: value,
107
+ srcPrefix,
108
+ fileIdToKeyMap
109
+ });
110
+ }
111
+ }
112
+ }
113
+
114
+ const uploadPageAssets = async ({
115
+ context,
116
+ filesData,
117
+ fileUploadsData
118
+ }) => {
119
+ /**
120
+ * This function contains logic of file download from S3.
121
+ * Current we're not mocking zip file download from S3 in tests at the moment.
122
+ * So, we're manually mocking it in case of test just by returning an empty object.
123
+ */
124
+ if (process.env.NODE_ENV === "test") {
125
+ return {};
126
+ }
127
+
128
+ console.log("INSIDE uploadPageAssets"); // Save uploaded file key against static id for later use.
129
+
130
+ const fileIdToKeyMap = new Map(); // Save files meta data against old key for later use.
131
+
132
+ const fileKeyToFileMap = new Map(); // Initialize maps.
133
+
134
+ for (let i = 0; i < filesData.length; i++) {
135
+ const file = filesData[i];
136
+ fileKeyToFileMap.set(file.key, file); // Initialize the value
137
+
138
+ fileIdToKeyMap.set(file.id, file.type);
139
+ }
140
+
141
+ const fileUploadResults = await uploadFilesFromS3({
142
+ fileKeyToFileMap,
143
+ oldKeyToNewKeyMap: fileUploadsData.assets
144
+ }); // Create files in File Manager
145
+
146
+ const createFilesInput = fileUploadResults.map(uploadResult => {
147
+ const newKey = uploadResult.Key;
148
+ const file = fileKeyToFileMap.get(getOldFileKey(newKey)); // Update the file map with newly uploaded file.
149
+
150
+ fileIdToKeyMap.set(file.id, newKey);
151
+ return {
152
+ key: newKey,
153
+ name: file.name,
154
+ size: file.size,
155
+ type: file.type,
156
+ meta: file.meta,
157
+ tags: file.tags
158
+ };
159
+ });
160
+ const createFilesPromises = []; // Gives an array of chunks (each consists of FILES_COUNT_IN_EACH_BATCH items).
161
+
162
+ const createFilesInputChunks = (0, _chunk.default)(createFilesInput, FILES_COUNT_IN_EACH_BATCH);
163
+
164
+ for (let i = 0; i < createFilesInputChunks.length; i++) {
165
+ const createFilesInputChunk = createFilesInputChunks[i];
166
+ createFilesPromises.push(
167
+ /*
168
+ * We need to break down files into chunks because
169
+ * `createFilesInBatch` operation has a limit on number of files it can handle at once.
170
+ */
171
+ context.fileManager.files.createFilesInBatch(createFilesInputChunk));
172
+ }
173
+
174
+ await Promise.all(createFilesPromises);
175
+ return {
176
+ fileIdToKeyMap
177
+ };
178
+ };
179
+
180
+ exports.uploadPageAssets = uploadPageAssets;
181
+
182
+ async function importPage({
183
+ pageKey,
184
+ context,
185
+ fileUploadsData
186
+ }) {
187
+ const log = console.log; // Making Directory for page in which we're going to extract the page data file.
188
+
189
+ const PAGE_EXTRACT_DIR = _path.default.join(INSTALL_EXTRACT_DIR, pageKey);
190
+
191
+ (0, _fsExtra.ensureDirSync)(PAGE_EXTRACT_DIR);
192
+
193
+ const pageDataFileKey = _dotPropImmutable.default.get(fileUploadsData, `data`);
194
+
195
+ const PAGE_DATA_FILE_PATH = _path.default.join(PAGE_EXTRACT_DIR, _path.default.basename(pageDataFileKey));
196
+
197
+ log(`Downloading Page data file: ${pageDataFileKey} at "${PAGE_DATA_FILE_PATH}"`); // Download and save page data file in disk.
198
+
199
+ await new Promise((resolve, reject) => {
200
+ _s3Stream.s3Stream.readStream(pageDataFileKey).on("error", reject).pipe((0, _fs.createWriteStream)(PAGE_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
201
+ }); // Load the page data file from disk.
202
+
203
+ log(`Load file ${pageDataFileKey}`);
204
+ const {
205
+ page,
206
+ files
207
+ } = await (0, _loadJsonFile.default)(PAGE_DATA_FILE_PATH); // Only update page data if there are files.
208
+
209
+ if (Array.isArray(files) && files.length) {
210
+ // Upload page assets.
211
+ const {
212
+ fileIdToKeyMap
213
+ } = await uploadPageAssets({
214
+ context,
215
+ filesData: files,
216
+ fileUploadsData
217
+ });
218
+ const {
219
+ srcPrefix
220
+ } = await context.fileManager.settings.getSettings();
221
+ updateFilesInPageData({
222
+ data: page.content,
223
+ fileIdToKeyMap,
224
+ srcPrefix
225
+ });
226
+ page.settings = updateImageInPageSettings({
227
+ settings: page.settings,
228
+ fileIdToKeyMap,
229
+ srcPrefix
230
+ });
231
+ }
232
+
233
+ log("Removing Directory for page...");
234
+ await (0, _downloadInstallFiles.deleteFile)(pageKey);
235
+ log(`Remove page contents from S3...`);
236
+ await deleteS3Folder(_path.default.dirname(fileUploadsData.data));
237
+ return page;
238
+ }
239
+
240
+ async function uploadFilesFromS3({
241
+ fileKeyToFileMap,
242
+ oldKeyToNewKeyMap
243
+ }) {
244
+ const oldKeysForAssets = Object.keys(oldKeyToNewKeyMap);
245
+ const promises = []; // Upload all assets.
246
+
247
+ for (let i = 0; i < oldKeysForAssets.length; i++) {
248
+ const oldKey = oldKeysForAssets[i];
249
+ const tempNewKey = oldKeyToNewKeyMap[oldKey]; // Read file.
250
+
251
+ const readStream = _s3Stream.s3Stream.readStream(tempNewKey); // Get file meta data.
252
+
253
+
254
+ const fileMetaData = fileKeyToFileMap.get(oldKey);
255
+
256
+ if (fileMetaData) {
257
+ const newKey = (0, _uniqid.default)("", `-${fileMetaData.key}`);
258
+
259
+ const {
260
+ streamPassThrough,
261
+ streamPassThroughUploadPromise: promise
262
+ } = _s3Stream.s3Stream.writeStream(newKey, fileMetaData.type);
263
+
264
+ readStream.pipe(streamPassThrough);
265
+ promises.push(promise);
266
+ console.log(`Successfully queued file "${newKey}"`);
267
+ }
268
+ }
269
+
270
+ return Promise.all(promises);
271
+ }
272
+
273
+ async function getObjectMetaFromS3(Key) {
274
+ const meta = await _s3Stream.s3Stream.getObjectHead(Key);
275
+
276
+ if (meta.ContentType !== ZIP_CONTENT_TYPE) {
277
+ throw new _error.default(`Unsupported file type: "${meta.ContentType}"`, "UNSUPPORTED_FILE");
278
+ }
279
+ }
280
+
281
+ function getOldFileKey(key) {
282
+ /*
283
+ * Because we know the naming convention, we can extract the old key from new key.
284
+ */
285
+ try {
286
+ const [, ...rest] = key.split("-");
287
+ return rest.join("-");
288
+ } catch (e) {
289
+ return key;
290
+ }
291
+ }
292
+
293
+ const FILE_CONTENT_TYPE = "application/octet-stream";
294
+
295
+ function getFileNameWithoutExt(fileName) {
296
+ return _path.default.basename(fileName).replace(_path.default.extname(fileName), "");
297
+ }
298
+
299
+ /**
300
+ * Function will read the given zip file from S3 via stream, extract its content and upload it to S3 bucket.
301
+ * @param zipFileKey
302
+ * @return PageImportData S3 file keys for all uploaded assets group by page.
303
+ */
304
+ async function readExtractAndUploadZipFileContents(zipFileKey) {
305
+ const log = console.log;
306
+ const pageImportDataList = [];
307
+ let readStream; // Check whether it is a URL
308
+
309
+ if (zipFileKey.startsWith("http")) {
310
+ const response = await (0, _nodeFetch.default)(zipFileKey);
311
+
312
+ if (!response.ok) {
313
+ throw new _error.default(`Unable to downloading file: "${zipFileKey}"`, response.statusText);
314
+ }
315
+
316
+ readStream = response.body;
317
+ } else {
318
+ // We're first retrieving object's meta data, just to check whether the file is available at the given Key
319
+ await getObjectMetaFromS3(zipFileKey);
320
+ readStream = _s3Stream.s3Stream.readStream(zipFileKey);
321
+ }
322
+
323
+ const uniquePath = (0, _uniqid.default)("IMPORT_PAGES/");
324
+
325
+ const zipFileName = _path.default.basename(zipFileKey); // Read export file and download it in the disk
326
+
327
+
328
+ const ZIP_FILE_PATH = _path.default.join(INSTALL_DIR, zipFileName);
329
+
330
+ const writeStream = (0, _fs.createWriteStream)(ZIP_FILE_PATH);
331
+ await streamPipeline(readStream, writeStream);
332
+ log(`Downloaded file "${zipFileName}" at ${ZIP_FILE_PATH}`); // Extract the downloaded zip file
333
+
334
+ const zipFilePaths = await extractZipToDisk(ZIP_FILE_PATH);
335
+ log(`Removing ZIP file "${zipFileKey}" from ${ZIP_FILE_PATH}`);
336
+ await (0, _downloadInstallFiles.deleteFile)(ZIP_FILE_PATH); // Extract each page zip and upload their content's to S3
337
+
338
+ for (let i = 0; i < zipFilePaths.length; i++) {
339
+ const currentPath = zipFilePaths[i];
340
+ const dataMap = await extractZipAndUploadToS3(currentPath, uniquePath);
341
+ pageImportDataList.push(dataMap);
342
+ }
343
+
344
+ log("Removing all ZIP files located at ", _path.default.dirname(zipFilePaths[0]));
345
+ await (0, _downloadInstallFiles.deleteFile)(_path.default.dirname(zipFilePaths[0]));
346
+ return pageImportDataList;
347
+ }
348
+
349
+ const ASSETS_DIR_NAME = "/assets";
350
+
351
+ function preparePageDataDirMap({
352
+ map,
353
+ filePath,
354
+ newKey
355
+ }) {
356
+ const dirname = _path.default.dirname(filePath);
357
+
358
+ const fileName = _path.default.basename(filePath);
359
+ /*
360
+ * We want to use dot (.) as part of object key rather than creating nested object(s).
361
+ * Also, the file name might contain dots in it beside the extension, so, we are escaping them all.
362
+ */
363
+
364
+
365
+ const oldKey = fileName.replace(/\./g, "\\.");
366
+ const isAsset = dirname.endsWith(ASSETS_DIR_NAME);
367
+
368
+ if (isAsset) {
369
+ map = _dotPropImmutable.default.set(map, `assets.${oldKey}`, newKey);
370
+ } else {
371
+ // We only need to know the newKey for data file.
372
+ map = _dotPropImmutable.default.set(map, `data`, newKey);
373
+ }
374
+
375
+ return map;
376
+ }
377
+
378
+ async function deleteS3Folder(key) {
379
+ // Append trailing slash i.e "/" to key to make sure we only delete a specific folder.
380
+ if (!key.endsWith("/")) {
381
+ key = `${key}/`;
382
+ }
383
+
384
+ const response = await _s3Stream.s3Stream.listObject(key);
385
+ const keys = response.Contents.map(c => c.Key);
386
+ console.log(`Found ${keys.length} files.`);
387
+ const deleteFilePromises = keys.map(key => _s3Stream.s3Stream.deleteObject(key));
388
+ await Promise.all(deleteFilePromises);
389
+ console.log(`Successfully deleted ${deleteFilePromises.length} files.`);
390
+ }
391
+
392
+ const zeroPad = version => `${version}`.padStart(5, "0");
393
+
394
+ exports.zeroPad = zeroPad;
395
+
396
+ function initialStats(total) {
397
+ return {
398
+ [_types.PageImportExportTaskStatus.PENDING]: total,
399
+ [_types.PageImportExportTaskStatus.PROCESSING]: 0,
400
+ [_types.PageImportExportTaskStatus.COMPLETED]: 0,
401
+ [_types.PageImportExportTaskStatus.FAILED]: 0,
402
+ total
403
+ };
404
+ }
405
+
406
+ function extractZipToDisk(exportFileZipPath) {
407
+ return new Promise((resolve, reject) => {
408
+ const pageZipFilePaths = [];
409
+ const uniqueFolderNameForExport = getFileNameWithoutExt(exportFileZipPath);
410
+
411
+ const EXPORT_FILE_EXTRACTION_PATH = _path.default.join(INSTALL_DIR, uniqueFolderNameForExport); // Make sure DIR exists
412
+
413
+
414
+ (0, _fsExtra.ensureDirSync)(EXPORT_FILE_EXTRACTION_PATH);
415
+
416
+ _yauzl.default.open(exportFileZipPath, {
417
+ lazyEntries: true
418
+ }, function (err, zipFile) {
419
+ if (err) {
420
+ console.warn("ERROR: Failed to extract zip: ", exportFileZipPath, err);
421
+ reject(err);
422
+ }
423
+
424
+ console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);
425
+ zipFile.on("end", function (err) {
426
+ if (err) {
427
+ console.warn("ERROR: Failed on END event for file: ", exportFileZipPath, err);
428
+ reject(err);
429
+ }
430
+
431
+ resolve(pageZipFilePaths);
432
+ });
433
+ zipFile.readEntry();
434
+ zipFile.on("entry", function (entry) {
435
+ console.info(`Processing entry: "${entry.fileName}"`);
436
+
437
+ if (/\/$/.test(entry.fileName)) {
438
+ // Directory file names end with '/'.
439
+ // Note that entries for directories themselves are optional.
440
+ // An entry's fileName implicitly requires its parent directories to exist.
441
+ zipFile.readEntry();
442
+ } else {
443
+ // file entry
444
+ zipFile.openReadStream(entry, function (err, readStream) {
445
+ if (err) {
446
+ console.warn("ERROR: Failed to openReadStream for file: ", entry.fileName, err);
447
+ reject(err);
448
+ }
449
+
450
+ const filePath = _path.default.join(EXPORT_FILE_EXTRACTION_PATH, entry.fileName);
451
+
452
+ readStream.on("end", function () {
453
+ pageZipFilePaths.push(filePath);
454
+ zipFile.readEntry();
455
+ });
456
+ streamPipeline(readStream, (0, _fs.createWriteStream)(filePath));
457
+ });
458
+ }
459
+ });
460
+ });
461
+ });
462
+ }
463
+
464
+ function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
465
+ return new Promise((resolve, reject) => {
466
+ const filePaths = [];
467
+ const fileUploadPromises = [];
468
+ const uniquePageKey = getFileNameWithoutExt(pageDataZipFilePath);
469
+ let dataMap = {
470
+ key: uniquePageKey,
471
+ assets: {},
472
+ data: ""
473
+ };
474
+
475
+ _yauzl.default.open(pageDataZipFilePath, {
476
+ lazyEntries: true
477
+ }, function (err, zipFile) {
478
+ if (err) {
479
+ console.warn("ERROR: Failed to extract zip: ", pageDataZipFilePath, err);
480
+ reject(err);
481
+ }
482
+
483
+ console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);
484
+ zipFile.on("end", function (err) {
485
+ if (err) {
486
+ console.warn('ERROR: Failed on "END" for file: ', pageDataZipFilePath, err);
487
+ reject(err);
488
+ }
489
+
490
+ Promise.all(fileUploadPromises).then(res => {
491
+ res.forEach(r => {
492
+ console.info("Done uploading... ", r);
493
+ });
494
+ resolve(dataMap);
495
+ });
496
+ });
497
+ zipFile.readEntry();
498
+ zipFile.on("entry", function (entry) {
499
+ console.info(`Processing entry: "${entry.fileName}"`);
500
+
501
+ if (/\/$/.test(entry.fileName)) {
502
+ // Directory file names end with '/'.
503
+ // Note that entries for directories themselves are optional.
504
+ // An entry's fileName implicitly requires its parent directories to exist.
505
+ zipFile.readEntry();
506
+ } else {
507
+ // file entry
508
+ zipFile.openReadStream(entry, function (err, readStream) {
509
+ if (err) {
510
+ console.warn("ERROR: Failed while performing [openReadStream] for file: ", entry.fileName, err);
511
+ reject(err);
512
+ }
513
+
514
+ readStream.on("end", function () {
515
+ filePaths.push(entry.fileName);
516
+ zipFile.readEntry();
517
+ });
518
+ const newKey = `${uniquePath}/${uniquePageKey}/${entry.fileName}`; // Modify in place
519
+
520
+ dataMap = preparePageDataDirMap({
521
+ map: dataMap,
522
+ filePath: entry.fileName,
523
+ newKey
524
+ });
525
+
526
+ const {
527
+ streamPassThrough,
528
+ streamPassThroughUploadPromise: promise
529
+ } = _s3Stream.s3Stream.writeStream(newKey, FILE_CONTENT_TYPE);
530
+
531
+ streamPipeline(readStream, streamPassThrough).then(() => {
532
+ fileUploadPromises.push(promise);
533
+ });
534
+ });
535
+ }
536
+ });
537
+ });
538
+ });
539
+ }
@@ -0,0 +1,2 @@
1
+ import { SecurityContext, SecurityIdentity } from "@webiny/api-security/types";
2
+ export declare const mockSecurity: (identity: SecurityIdentity, context: SecurityContext) => void;
@@ -0,0 +1,13 @@
1
+ "use strict";
2
+
3
+ Object.defineProperty(exports, "__esModule", {
4
+ value: true
5
+ });
6
+ exports.mockSecurity = void 0;
7
+
8
+ const mockSecurity = (identity, context) => {
9
+ context.security.disableAuthorization();
10
+ context.security.setIdentity(identity);
11
+ };
12
+
13
+ exports.mockSecurity = mockSecurity;
package/package.json ADDED
@@ -0,0 +1,80 @@
1
+ {
2
+ "name": "@webiny/api-page-builder-import-export",
3
+ "version": "0.0.0-mt-1",
4
+ "main": "index.js",
5
+ "keywords": [
6
+ "pbie:base"
7
+ ],
8
+ "repository": {
9
+ "type": "git",
10
+ "url": "https://github.com/webiny/webiny-js.git",
11
+ "directory": "packages/api-page-builder-import-export"
12
+ },
13
+ "description": "The API for the import export feature of the Webiny Page Builder app.",
14
+ "author": "Webiny Ltd",
15
+ "license": "MIT",
16
+ "dependencies": {
17
+ "@babel/runtime": "7.15.4",
18
+ "@commodo/fields": "1.1.2-beta.20",
19
+ "@webiny/api-file-manager": "0.0.0-mt-1",
20
+ "@webiny/api-page-builder": "0.0.0-mt-1",
21
+ "@webiny/api-security": "0.0.0-mt-1",
22
+ "@webiny/error": "0.0.0-mt-1",
23
+ "@webiny/handler": "0.0.0-mt-1",
24
+ "@webiny/handler-args": "0.0.0-mt-1",
25
+ "@webiny/handler-aws": "0.0.0-mt-1",
26
+ "@webiny/handler-graphql": "0.0.0-mt-1",
27
+ "@webiny/validation": "0.0.0-mt-1",
28
+ "archiver": "5.3.0",
29
+ "commodo-fields-object": "1.0.6",
30
+ "dot-prop-immutable": "2.1.1",
31
+ "fs-extra": "7.0.1",
32
+ "load-json-file": "6.2.0",
33
+ "lodash": "4.17.21",
34
+ "mdbid": "1.0.0",
35
+ "node-fetch": "2.6.5",
36
+ "stream": "0.0.2",
37
+ "uniqid": "5.4.0",
38
+ "yauzl": "2.10.0"
39
+ },
40
+ "devDependencies": {
41
+ "@babel/cli": "^7.5.5",
42
+ "@babel/core": "^7.5.5",
43
+ "@babel/plugin-proposal-export-default-from": "^7.5.2",
44
+ "@babel/preset-env": "^7.5.5",
45
+ "@babel/preset-typescript": "^7.8.3",
46
+ "@elastic/elasticsearch": "7.12.0",
47
+ "@shelf/jest-elasticsearch": "^1.0.0",
48
+ "@types/puppeteer": "^5.4.2",
49
+ "@webiny/api-dynamodb-to-elasticsearch": "^0.0.0-mt-1",
50
+ "@webiny/api-file-manager-ddb-es": "^0.0.0-mt-1",
51
+ "@webiny/api-i18n-ddb": "^0.0.0-mt-1",
52
+ "@webiny/api-security-so-ddb": "^0.0.0-mt-1",
53
+ "@webiny/api-tenancy": "^0.0.0-mt-1",
54
+ "@webiny/api-tenancy-so-ddb": "^0.0.0-mt-1",
55
+ "@webiny/cli": "^0.0.0-mt-1",
56
+ "@webiny/db": "^0.0.0-mt-1",
57
+ "@webiny/project-utils": "^0.0.0-mt-1",
58
+ "jest": "^26.6.3",
59
+ "jest-dynalite": "^3.2.0",
60
+ "rimraf": "^3.0.2",
61
+ "ttypescript": "^1.5.12",
62
+ "typescript": "^4.1.3"
63
+ },
64
+ "publishConfig": {
65
+ "access": "public",
66
+ "directory": "dist"
67
+ },
68
+ "scripts": {
69
+ "build": "yarn webiny run build",
70
+ "watch": "yarn webiny run watch"
71
+ },
72
+ "adio": {
73
+ "ignore": {
74
+ "src": [
75
+ "aws-sdk"
76
+ ]
77
+ }
78
+ },
79
+ "gitHead": "37736d8456a6ecb342a6c3645060bd9a3f2d4bb0"
80
+ }