@contentstack/datasync-manager 1.2.3 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/jira.yml +1 -0
- package/dist/api.js +14 -9
- package/dist/config.js +2 -0
- package/dist/core/index.js +96 -59
- package/dist/core/inet.js +11 -7
- package/dist/core/plugins.js +7 -5
- package/dist/core/process.js +8 -6
- package/dist/core/q.js +23 -17
- package/dist/core/token-management.js +28 -23
- package/dist/index.js +53 -40
- package/dist/plugins/helper.js +1 -1
- package/dist/util/build-paths.js +16 -14
- package/dist/util/fs.js +22 -17
- package/dist/util/index.js +63 -43
- package/dist/util/logger.js +4 -2
- package/dist/util/promise.map.js +4 -2
- package/dist/util/series.js +6 -3
- package/dist/util/unprocessible.js +18 -14
- package/dist/util/validations.js +36 -25
- package/package.json +4 -3
package/dist/util/fs.js
CHANGED
|
@@ -9,9 +9,10 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
9
9
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
10
10
|
};
|
|
11
11
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
+
exports.mkdirpSync = exports.stat = exports.mkdir = exports.readFileSync = exports.readFile = exports.writeFile = exports.existsSync = void 0;
|
|
12
13
|
const Debug = require("debug");
|
|
13
14
|
const fs_1 = require("fs");
|
|
14
|
-
exports
|
|
15
|
+
Object.defineProperty(exports, "existsSync", { enumerable: true, get: function () { return fs_1.existsSync; } });
|
|
15
16
|
const mkdirp_1 = __importDefault(require("mkdirp"));
|
|
16
17
|
const path_1 = require("path");
|
|
17
18
|
const write_file_atomic_1 = __importDefault(require("write-file-atomic"));
|
|
@@ -22,19 +23,19 @@ const debug = Debug('sm:util-fs');
|
|
|
22
23
|
* @param {Object} data - Data that's to be written
|
|
23
24
|
* @returns {Promise} Returns a promise
|
|
24
25
|
*/
|
|
25
|
-
|
|
26
|
+
const writeFile = (filePath, data) => {
|
|
26
27
|
debug(`Write file called on ${filePath}`);
|
|
27
28
|
return new Promise((resolve, reject) => {
|
|
28
29
|
try {
|
|
29
|
-
const fileDirectory = path_1.dirname(filePath);
|
|
30
|
-
if (!fs_1.existsSync(fileDirectory)) {
|
|
30
|
+
const fileDirectory = (0, path_1.dirname)(filePath);
|
|
31
|
+
if (!(0, fs_1.existsSync)(fileDirectory)) {
|
|
31
32
|
mkdirp_1.default.sync(fileDirectory);
|
|
32
33
|
}
|
|
33
|
-
return write_file_atomic_1.default(filePath, (typeof data === 'object') ? JSON.stringify(data) : data, (wfError) => {
|
|
34
|
+
return (0, write_file_atomic_1.default)(filePath, (typeof data === 'object') ? JSON.stringify(data) : data, (wfError) => {
|
|
34
35
|
if (wfError) {
|
|
35
36
|
return reject(wfError);
|
|
36
37
|
}
|
|
37
|
-
return resolve();
|
|
38
|
+
return resolve('');
|
|
38
39
|
});
|
|
39
40
|
}
|
|
40
41
|
catch (writeFileError) {
|
|
@@ -42,21 +43,22 @@ exports.writeFile = (filePath, data) => {
|
|
|
42
43
|
}
|
|
43
44
|
});
|
|
44
45
|
};
|
|
46
|
+
exports.writeFile = writeFile;
|
|
45
47
|
/**
|
|
46
48
|
* @description A wrapper around nodejs fs module's 'readFile()'
|
|
47
49
|
* @param {String} filePath - Path from where data is to be read
|
|
48
50
|
* @returns {Promise} Returns a promise
|
|
49
51
|
*/
|
|
50
|
-
|
|
52
|
+
const readFile = (filePath) => {
|
|
51
53
|
debug(`Read file called on ${filePath}`);
|
|
52
54
|
return new Promise((resolve, reject) => {
|
|
53
55
|
try {
|
|
54
|
-
return fs_1.stat(filePath, (error, stats) => {
|
|
56
|
+
return (0, fs_1.stat)(filePath, (error, stats) => {
|
|
55
57
|
if (error) {
|
|
56
58
|
return reject(error);
|
|
57
59
|
}
|
|
58
60
|
else if (stats.isFile) {
|
|
59
|
-
return fs_1.readFile(filePath, { encoding: 'utf-8' }, (rfError, data) => {
|
|
61
|
+
return (0, fs_1.readFile)(filePath, { encoding: 'utf-8' }, (rfError, data) => {
|
|
60
62
|
if (rfError) {
|
|
61
63
|
return reject(rfError);
|
|
62
64
|
}
|
|
@@ -73,34 +75,36 @@ exports.readFile = (filePath) => {
|
|
|
73
75
|
}
|
|
74
76
|
});
|
|
75
77
|
};
|
|
78
|
+
exports.readFile = readFile;
|
|
76
79
|
/**
|
|
77
80
|
* @description A wrapper around nodejs fs module's 'readFileSync()'
|
|
78
81
|
* @param filePath - Path from where data is to be read
|
|
79
82
|
* @returns {String} Returns the data that's been read
|
|
80
83
|
*/
|
|
81
|
-
|
|
84
|
+
const readFileSync = (filePath) => {
|
|
82
85
|
debug(`Read file sync called on ${filePath}`);
|
|
83
|
-
if (fs_1.existsSync(filePath)) {
|
|
84
|
-
return fs_1.readFileSync(filePath, { encoding: 'utf-8' });
|
|
86
|
+
if ((0, fs_1.existsSync)(filePath)) {
|
|
87
|
+
return (0, fs_1.readFileSync)(filePath, { encoding: 'utf-8' });
|
|
85
88
|
}
|
|
86
89
|
const err = new Error(`Invalid 'read' operation on file. Expected ${filePath} to be of type 'file'!`);
|
|
87
90
|
err.code = 'IOORFS';
|
|
88
91
|
throw err;
|
|
89
92
|
};
|
|
93
|
+
exports.readFileSync = readFileSync;
|
|
90
94
|
/**
|
|
91
95
|
* @description Safely creats a directory at the specified 'path'
|
|
92
96
|
* @param filePath - Path from where directory is to be created
|
|
93
97
|
* @returns {String} Returns a promise
|
|
94
98
|
*/
|
|
95
|
-
|
|
99
|
+
const mkdir = (path) => {
|
|
96
100
|
debug(`mkdir called on ${path}`);
|
|
97
101
|
return new Promise((resolve, reject) => {
|
|
98
102
|
try {
|
|
99
|
-
return mkdirp_1.default(path, (error) => {
|
|
103
|
+
return (0, mkdirp_1.default)(path, (error) => {
|
|
100
104
|
if (error) {
|
|
101
105
|
return reject(error);
|
|
102
106
|
}
|
|
103
|
-
return resolve();
|
|
107
|
+
return resolve('');
|
|
104
108
|
});
|
|
105
109
|
}
|
|
106
110
|
catch (error) {
|
|
@@ -108,13 +112,14 @@ exports.mkdir = (path) => {
|
|
|
108
112
|
}
|
|
109
113
|
});
|
|
110
114
|
};
|
|
115
|
+
exports.mkdir = mkdir;
|
|
111
116
|
/**
|
|
112
117
|
* @description exports fs.stat
|
|
113
118
|
*/
|
|
114
119
|
var fs_2 = require("fs");
|
|
115
|
-
exports
|
|
120
|
+
Object.defineProperty(exports, "stat", { enumerable: true, get: function () { return fs_2.stat; } });
|
|
116
121
|
/**
|
|
117
122
|
* @description synchnonous way of creating nested folder directory structure
|
|
118
123
|
*/
|
|
119
124
|
var mkdirp_2 = require("mkdirp");
|
|
120
|
-
exports
|
|
125
|
+
Object.defineProperty(exports, "mkdirpSync", { enumerable: true, get: function () { return mkdirp_2.sync; } });
|
package/dist/util/index.js
CHANGED
|
@@ -5,10 +5,11 @@
|
|
|
5
5
|
* MIT Licensed
|
|
6
6
|
*/
|
|
7
7
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
8
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
8
9
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
9
10
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
10
11
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
11
|
-
function step(result) { result.done ? resolve(result.value) :
|
|
12
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
12
13
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
13
14
|
});
|
|
14
15
|
};
|
|
@@ -16,6 +17,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
16
17
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
17
18
|
};
|
|
18
19
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
20
|
+
exports.getSchema = exports.filterUnwantedKeys = exports.normalizePluginPath = exports.getOrSetRTEMarkdownAssets = exports.getFile = exports.markCheckpoint = exports.formatItems = exports.groupItems = exports.formatSyncFilters = exports.filterItems = void 0;
|
|
19
21
|
const debug_1 = __importDefault(require("debug"));
|
|
20
22
|
const lodash_1 = require("lodash");
|
|
21
23
|
const marked_1 = __importDefault(require("marked"));
|
|
@@ -25,7 +27,7 @@ const fs_1 = require("./fs");
|
|
|
25
27
|
const logger_1 = require("./logger");
|
|
26
28
|
const unprocessible_1 = require("./unprocessible");
|
|
27
29
|
const validations_1 = require("./validations");
|
|
28
|
-
const debug = debug_1.default('util:index');
|
|
30
|
+
const debug = (0, debug_1.default)('util:index');
|
|
29
31
|
const formattedAssetType = '_assets';
|
|
30
32
|
const formattedContentType = '_content_types';
|
|
31
33
|
const assetType = 'sys_assets';
|
|
@@ -35,13 +37,17 @@ const assetType = 'sys_assets';
|
|
|
35
37
|
* @param {Object} config - Application config
|
|
36
38
|
* @returns {Promise} Returns a promise
|
|
37
39
|
*/
|
|
38
|
-
|
|
39
|
-
const locales = lodash_1.map(config.locales, 'code');
|
|
40
|
-
const filteredObjects = lodash_1.remove(response.items, (item) => {
|
|
40
|
+
const filterItems = (response, config) => __awaiter(void 0, void 0, void 0, function* () {
|
|
41
|
+
const locales = (0, lodash_1.map)(config.locales, 'code');
|
|
42
|
+
const filteredObjects = (0, lodash_1.remove)(response.items, (item) => {
|
|
41
43
|
// validate item structure. If the structure is not as expected, filter it out
|
|
42
|
-
if (!(validations_1.validateItemStructure(item))) {
|
|
44
|
+
if (!((0, validations_1.validateItemStructure)(item))) {
|
|
43
45
|
return item;
|
|
44
46
|
}
|
|
47
|
+
// To handle content-type.
|
|
48
|
+
if (!item.data) {
|
|
49
|
+
return false;
|
|
50
|
+
}
|
|
45
51
|
// for published items
|
|
46
52
|
if (item.data.publish_details) {
|
|
47
53
|
return locales.indexOf(item.data.publish_details.locale) !== -1;
|
|
@@ -63,10 +69,11 @@ exports.filterItems = (response, config) => __awaiter(this, void 0, void 0, func
|
|
|
63
69
|
else {
|
|
64
70
|
name = 'sync_token';
|
|
65
71
|
}
|
|
66
|
-
yield unprocessible_1.saveFilteredItems(filteredObjects, name, response[name]);
|
|
72
|
+
yield (0, unprocessible_1.saveFilteredItems)(filteredObjects, name, response[name]);
|
|
67
73
|
return;
|
|
68
74
|
});
|
|
69
|
-
exports.
|
|
75
|
+
exports.filterItems = filterItems;
|
|
76
|
+
const formatSyncFilters = (config) => {
|
|
70
77
|
if (config.syncManager.filters && typeof config.syncManager.filters === 'object') {
|
|
71
78
|
const filters = config.syncManager.filters;
|
|
72
79
|
for (const filter in filters) {
|
|
@@ -86,12 +93,13 @@ exports.formatSyncFilters = (config) => {
|
|
|
86
93
|
}
|
|
87
94
|
return config;
|
|
88
95
|
};
|
|
96
|
+
exports.formatSyncFilters = formatSyncFilters;
|
|
89
97
|
/**
|
|
90
98
|
* @description Groups items based on their content type
|
|
91
99
|
* @param {Array} items - An array of SYNC API's item
|
|
92
100
|
* @returns {Object} Returns an 'object' who's keys are content type uids
|
|
93
101
|
*/
|
|
94
|
-
|
|
102
|
+
const groupItems = (items) => {
|
|
95
103
|
const bucket = {};
|
|
96
104
|
items.forEach((item) => {
|
|
97
105
|
if (item._content_type_uid === assetType) {
|
|
@@ -106,12 +114,13 @@ exports.groupItems = (items) => {
|
|
|
106
114
|
});
|
|
107
115
|
return bucket;
|
|
108
116
|
};
|
|
117
|
+
exports.groupItems = groupItems;
|
|
109
118
|
/**
|
|
110
119
|
* @description Formats SYNC API's items into defined standard
|
|
111
120
|
* @param {Array} items - SYNC API's items
|
|
112
121
|
* @param {Object} config - Application config
|
|
113
122
|
*/
|
|
114
|
-
|
|
123
|
+
const formatItems = (items, config) => {
|
|
115
124
|
const time = new Date().toISOString();
|
|
116
125
|
for (let i = 0, j = items.length; i < j; i++) {
|
|
117
126
|
switch (items[i].type) {
|
|
@@ -121,20 +130,20 @@ exports.formatItems = (items, config) => {
|
|
|
121
130
|
items[i]._type = config.contentstack.actions.publish;
|
|
122
131
|
// extra keys
|
|
123
132
|
items[i]._synced_at = time;
|
|
124
|
-
items[i] = lodash_1.merge(items[i], items[i].data);
|
|
133
|
+
items[i] = (0, lodash_1.merge)(items[i], items[i].data);
|
|
125
134
|
items[i].locale = items[i].data.publish_details.locale;
|
|
126
135
|
break;
|
|
127
136
|
case 'asset_unpublished':
|
|
128
137
|
delete items[i].type;
|
|
129
138
|
items[i]._content_type_uid = formattedAssetType;
|
|
130
139
|
items[i]._type = config.contentstack.actions.unpublish;
|
|
131
|
-
items[i] = lodash_1.merge(items[i], items[i].data);
|
|
140
|
+
items[i] = (0, lodash_1.merge)(items[i], items[i].data);
|
|
132
141
|
break;
|
|
133
142
|
case 'asset_deleted':
|
|
134
143
|
delete items[i].type;
|
|
135
144
|
items[i]._content_type_uid = formattedAssetType;
|
|
136
145
|
items[i]._type = config.contentstack.actions.delete;
|
|
137
|
-
items[i] = lodash_1.merge(items[i], items[i].data);
|
|
146
|
+
items[i] = (0, lodash_1.merge)(items[i], items[i].data);
|
|
138
147
|
break;
|
|
139
148
|
case 'entry_published':
|
|
140
149
|
delete items[i].type;
|
|
@@ -142,20 +151,20 @@ exports.formatItems = (items, config) => {
|
|
|
142
151
|
items[i]._content_type_uid = items[i].content_type_uid;
|
|
143
152
|
// extra keys
|
|
144
153
|
items[i]._synced_at = time;
|
|
145
|
-
items[i] = lodash_1.merge(items[i], items[i].data);
|
|
154
|
+
items[i] = (0, lodash_1.merge)(items[i], items[i].data);
|
|
146
155
|
items[i].locale = items[i].data.publish_details.locale;
|
|
147
156
|
break;
|
|
148
157
|
case 'entry_unpublished':
|
|
149
158
|
delete items[i].type;
|
|
150
159
|
items[i]._content_type_uid = items[i].content_type_uid;
|
|
151
160
|
items[i]._type = config.contentstack.actions.unpublish;
|
|
152
|
-
items[i] = lodash_1.merge(items[i], items[i].data);
|
|
161
|
+
items[i] = (0, lodash_1.merge)(items[i], items[i].data);
|
|
153
162
|
break;
|
|
154
163
|
case 'entry_deleted':
|
|
155
164
|
delete items[i].type;
|
|
156
165
|
items[i]._content_type_uid = items[i].content_type_uid;
|
|
157
166
|
items[i]._type = config.contentstack.actions.delete;
|
|
158
|
-
items[i] = lodash_1.merge(items[i], items[i].data);
|
|
167
|
+
items[i] = (0, lodash_1.merge)(items[i], items[i].data);
|
|
159
168
|
break;
|
|
160
169
|
case 'content_type_deleted':
|
|
161
170
|
delete items[i].type;
|
|
@@ -174,12 +183,13 @@ exports.formatItems = (items, config) => {
|
|
|
174
183
|
}
|
|
175
184
|
return items;
|
|
176
185
|
};
|
|
186
|
+
exports.formatItems = formatItems;
|
|
177
187
|
/**
|
|
178
188
|
* @description Add's checkpoint data on the last item found on the 'SYNC API items' collection
|
|
179
189
|
* @param {Object} groupedItems - Grouped items { groupItems(items) - see above } referred by their content type
|
|
180
190
|
* @param {Object} syncResponse - SYNC API's response
|
|
181
191
|
*/
|
|
182
|
-
|
|
192
|
+
const markCheckpoint = (groupedItems, syncResponse) => {
|
|
183
193
|
const tokenName = (syncResponse.pagination_token) ? 'pagination_token' : 'sync_token';
|
|
184
194
|
const tokenValue = syncResponse[tokenName];
|
|
185
195
|
const contentTypeUids = Object.keys(groupedItems);
|
|
@@ -221,18 +231,19 @@ exports.markCheckpoint = (groupedItems, syncResponse) => {
|
|
|
221
231
|
}
|
|
222
232
|
return groupedItems;
|
|
223
233
|
};
|
|
234
|
+
exports.markCheckpoint = markCheckpoint;
|
|
224
235
|
/**
|
|
225
236
|
* @description Calcuates filename for ledger and unprocessible files
|
|
226
237
|
* @param {String} file - File to be calculated on
|
|
227
238
|
* @param {Function} rotate - File rotation logic (should return a string)
|
|
228
239
|
* @returns {String} Returns path to a file
|
|
229
240
|
*/
|
|
230
|
-
|
|
241
|
+
const getFile = (file, rotate) => {
|
|
231
242
|
// tslint:disable-next-line: no-shadowed-variable
|
|
232
243
|
return new Promise((resolve, reject) => {
|
|
233
|
-
const config = index_1.getConfig();
|
|
234
|
-
if (fs_1.existsSync(file)) {
|
|
235
|
-
return fs_1.stat(file, (statError, stats) => {
|
|
244
|
+
const config = (0, index_1.getConfig)();
|
|
245
|
+
if ((0, fs_1.existsSync)(file)) {
|
|
246
|
+
return (0, fs_1.stat)(file, (statError, stats) => {
|
|
236
247
|
if (statError) {
|
|
237
248
|
return reject(statError);
|
|
238
249
|
}
|
|
@@ -248,14 +259,15 @@ exports.getFile = (file, rotate) => {
|
|
|
248
259
|
});
|
|
249
260
|
}
|
|
250
261
|
else {
|
|
251
|
-
fs_1.mkdirpSync(config.paths.unprocessibleDir);
|
|
262
|
+
(0, fs_1.mkdirpSync)(config.paths.unprocessibleDir);
|
|
252
263
|
return resolve(file);
|
|
253
264
|
}
|
|
254
265
|
});
|
|
255
266
|
};
|
|
267
|
+
exports.getFile = getFile;
|
|
256
268
|
const findAssets = (parentEntry, key, schema, entry, bucket, isFindNotReplace) => {
|
|
257
269
|
try {
|
|
258
|
-
const { contentstack } = index_1.getConfig();
|
|
270
|
+
const { contentstack } = (0, index_1.getConfig)();
|
|
259
271
|
const isMarkdown = (schema.field_metadata.markdown) ? true : false;
|
|
260
272
|
let matches;
|
|
261
273
|
let convertedText;
|
|
@@ -280,7 +292,7 @@ const findAssets = (parentEntry, key, schema, entry, bucket, isFindNotReplace) =
|
|
|
280
292
|
bucket.push(assetObject);
|
|
281
293
|
}
|
|
282
294
|
else {
|
|
283
|
-
const asset = lodash_1.find(bucket, (item) => {
|
|
295
|
+
const asset = (0, lodash_1.find)(bucket, (item) => {
|
|
284
296
|
// tslint:disable-next-line: max-line-length
|
|
285
297
|
const newRegexp = new RegExp(contentstack.regexp.rte_asset_pattern_2.url, contentstack.regexp.rte_asset_pattern_2.options);
|
|
286
298
|
let urlparts;
|
|
@@ -311,11 +323,11 @@ const iterate = (schema, entry, bucket, findNoteReplace, parentKeys) => {
|
|
|
311
323
|
for (let index = 0; index < parentKeys.length; index++) {
|
|
312
324
|
const parentKey = parentKeys[index];
|
|
313
325
|
const subEntry = entry[parentKey];
|
|
314
|
-
if (subEntry && !(lodash_1.isEmpty(subEntry)) && index === (parentKeys.length - 1)) {
|
|
326
|
+
if (subEntry && !((0, lodash_1.isEmpty)(subEntry)) && index === (parentKeys.length - 1)) {
|
|
315
327
|
if (subEntry && subEntry.length) {
|
|
316
328
|
subEntry.forEach((subEntryItem, idx) => {
|
|
317
329
|
// tricky!
|
|
318
|
-
if (!(lodash_1.isEmpty(subEntryItem))) {
|
|
330
|
+
if (!((0, lodash_1.isEmpty)(subEntryItem))) {
|
|
319
331
|
findAssets(subEntry, idx, schema, subEntryItem, bucket, findNoteReplace);
|
|
320
332
|
}
|
|
321
333
|
// iterate(schema, subEntryItem, bucket, findNoteReplace, parentKeys)
|
|
@@ -328,10 +340,10 @@ const iterate = (schema, entry, bucket, findNoteReplace, parentKeys) => {
|
|
|
328
340
|
}
|
|
329
341
|
}
|
|
330
342
|
else if (subEntry !== undefined) {
|
|
331
|
-
const subKeys = lodash_1.cloneDeep(parentKeys).splice(index);
|
|
343
|
+
const subKeys = (0, lodash_1.cloneDeep)(parentKeys).splice(index);
|
|
332
344
|
if (subEntry && subEntry instanceof Array && subEntry.length) {
|
|
333
345
|
subEntry.forEach((subEntryItem) => {
|
|
334
|
-
iterate(schema, subEntryItem, bucket, findNoteReplace, lodash_1.cloneDeep(subKeys));
|
|
346
|
+
iterate(schema, subEntryItem, bucket, findNoteReplace, (0, lodash_1.cloneDeep)(subKeys));
|
|
335
347
|
});
|
|
336
348
|
return;
|
|
337
349
|
}
|
|
@@ -346,7 +358,7 @@ const iterate = (schema, entry, bucket, findNoteReplace, parentKeys) => {
|
|
|
346
358
|
logger_1.logger.error(error);
|
|
347
359
|
}
|
|
348
360
|
};
|
|
349
|
-
|
|
361
|
+
const getOrSetRTEMarkdownAssets = (schema, entry, bucket = [], isFindNotReplace, parent = []) => {
|
|
350
362
|
for (let i = 0, j = schema.length; i < j; i++) {
|
|
351
363
|
if (schema[i].data_type === 'text' && schema[i].field_metadata && (schema[i].field_metadata.allow_rich_text ||
|
|
352
364
|
schema[i].field_metadata.markdown)) {
|
|
@@ -356,14 +368,14 @@ exports.getOrSetRTEMarkdownAssets = (schema, entry, bucket = [], isFindNotReplac
|
|
|
356
368
|
}
|
|
357
369
|
else if ((schema[i].data_type === 'group' || schema[i].data_type === 'global_field') && schema[i].schema) {
|
|
358
370
|
parent.push(schema[i].uid);
|
|
359
|
-
exports.getOrSetRTEMarkdownAssets(schema[i].schema, entry, bucket, isFindNotReplace, parent);
|
|
371
|
+
(0, exports.getOrSetRTEMarkdownAssets)(schema[i].schema, entry, bucket, isFindNotReplace, parent);
|
|
360
372
|
parent.pop();
|
|
361
373
|
}
|
|
362
374
|
else if (schema[i].data_type === 'blocks') {
|
|
363
375
|
for (let k = 0, l = schema[i].blocks.length; k < l; k++) {
|
|
364
376
|
parent.push(schema[i].uid);
|
|
365
377
|
parent.push(schema[i].blocks[k].uid);
|
|
366
|
-
exports.getOrSetRTEMarkdownAssets(schema[i].blocks[k].schema, entry, bucket, isFindNotReplace, parent);
|
|
378
|
+
(0, exports.getOrSetRTEMarkdownAssets)(schema[i].blocks[k].schema, entry, bucket, isFindNotReplace, parent);
|
|
367
379
|
parent.pop();
|
|
368
380
|
parent.pop();
|
|
369
381
|
}
|
|
@@ -374,36 +386,38 @@ exports.getOrSetRTEMarkdownAssets = (schema, entry, bucket = [], isFindNotReplac
|
|
|
374
386
|
}
|
|
375
387
|
return entry;
|
|
376
388
|
};
|
|
377
|
-
exports.
|
|
389
|
+
exports.getOrSetRTEMarkdownAssets = getOrSetRTEMarkdownAssets;
|
|
390
|
+
const normalizePluginPath = (config, plugin, isInternal) => {
|
|
378
391
|
let pluginPath;
|
|
379
392
|
if (plugin.path && typeof plugin.path === 'string' && plugin.path.length > 0) {
|
|
380
|
-
if (path_1.isAbsolute(plugin.path)) {
|
|
381
|
-
if (!fs_1.existsSync(plugin.path)) {
|
|
393
|
+
if ((0, path_1.isAbsolute)(plugin.path)) {
|
|
394
|
+
if (!(0, fs_1.existsSync)(plugin.path)) {
|
|
382
395
|
throw new Error(`${plugin.path} does not exist!`);
|
|
383
396
|
}
|
|
384
397
|
return plugin.path;
|
|
385
398
|
}
|
|
386
|
-
pluginPath = path_1.resolve(path_1.join(config.paths.baseDir, plugin.name, 'index.js'));
|
|
387
|
-
if (!fs_1.existsSync(pluginPath)) {
|
|
399
|
+
pluginPath = (0, path_1.resolve)((0, path_1.join)(config.paths.baseDir, plugin.name, 'index.js'));
|
|
400
|
+
if (!(0, fs_1.existsSync)(pluginPath)) {
|
|
388
401
|
throw new Error(`${pluginPath} does not exist!`);
|
|
389
402
|
}
|
|
390
403
|
return pluginPath;
|
|
391
404
|
}
|
|
392
405
|
if (isInternal) {
|
|
393
|
-
pluginPath = path_1.join(__dirname, '..', 'plugins', plugin.name.slice(13), 'index.js');
|
|
394
|
-
if (fs_1.existsSync(pluginPath)) {
|
|
406
|
+
pluginPath = (0, path_1.join)(__dirname, '..', 'plugins', plugin.name.slice(13), 'index.js');
|
|
407
|
+
if ((0, fs_1.existsSync)(pluginPath)) {
|
|
395
408
|
return pluginPath;
|
|
396
409
|
}
|
|
397
410
|
}
|
|
398
|
-
pluginPath = path_1.resolve(path_1.join(config.paths.plugin, plugin.name, 'index.js'));
|
|
399
|
-
if (!fs_1.existsSync(pluginPath)) {
|
|
411
|
+
pluginPath = (0, path_1.resolve)((0, path_1.join)(config.paths.plugin, plugin.name, 'index.js'));
|
|
412
|
+
if (!(0, fs_1.existsSync)(pluginPath)) {
|
|
400
413
|
throw new Error(`Unable to find plugin: ${JSON.stringify(plugin)}`);
|
|
401
414
|
}
|
|
402
415
|
return pluginPath;
|
|
403
416
|
};
|
|
404
|
-
exports.
|
|
417
|
+
exports.normalizePluginPath = normalizePluginPath;
|
|
418
|
+
const filterUnwantedKeys = (action, data) => {
|
|
405
419
|
if (action === 'publish') {
|
|
406
|
-
const contentStore = index_1.getConfig().contentStore;
|
|
420
|
+
const contentStore = (0, index_1.getConfig)().contentStore;
|
|
407
421
|
switch (data._content_type_uid) {
|
|
408
422
|
case '_assets':
|
|
409
423
|
data = filterKeys(data, contentStore.unwanted.asset);
|
|
@@ -417,17 +431,22 @@ exports.filterUnwantedKeys = (action, data) => {
|
|
|
417
431
|
}
|
|
418
432
|
return data;
|
|
419
433
|
};
|
|
434
|
+
exports.filterUnwantedKeys = filterUnwantedKeys;
|
|
420
435
|
// TODO
|
|
421
436
|
// Add option to delete embedded documents
|
|
422
437
|
const filterKeys = (data, unwantedKeys) => {
|
|
423
438
|
for (const key in unwantedKeys) {
|
|
439
|
+
// We need _content_type for handling asset published/unpublished events in entry object (Wherever it is referenced).
|
|
440
|
+
if (key === '_content_type') {
|
|
441
|
+
continue;
|
|
442
|
+
}
|
|
424
443
|
if (unwantedKeys[key] && data.hasOwnProperty(key)) {
|
|
425
444
|
delete data[key];
|
|
426
445
|
}
|
|
427
446
|
}
|
|
428
447
|
return data;
|
|
429
448
|
};
|
|
430
|
-
|
|
449
|
+
const getSchema = (action, data) => {
|
|
431
450
|
let schema;
|
|
432
451
|
if (action === 'publish' && data._content_type_uid !== '_assets') {
|
|
433
452
|
schema = data._content_type;
|
|
@@ -438,3 +457,4 @@ exports.getSchema = (action, data) => {
|
|
|
438
457
|
}
|
|
439
458
|
return { schema };
|
|
440
459
|
};
|
|
460
|
+
exports.getSchema = getSchema;
|
package/dist/util/logger.js
CHANGED
|
@@ -5,6 +5,7 @@
|
|
|
5
5
|
* MIT Licensed
|
|
6
6
|
*/
|
|
7
7
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
8
|
+
exports.logger = exports.setLogger = void 0;
|
|
8
9
|
const validations_1 = require("./validations");
|
|
9
10
|
/**
|
|
10
11
|
* @summary Creates a logger instance
|
|
@@ -12,11 +13,11 @@ const validations_1 = require("./validations");
|
|
|
12
13
|
* const log = createLogger(instance)
|
|
13
14
|
* log.info('Hello world!')
|
|
14
15
|
*/
|
|
15
|
-
|
|
16
|
+
const setLogger = (customLogger) => {
|
|
16
17
|
if (exports.logger) {
|
|
17
18
|
return exports.logger;
|
|
18
19
|
}
|
|
19
|
-
else if (!validations_1.validateLogger(customLogger) && !customLogger) {
|
|
20
|
+
else if (!(0, validations_1.validateLogger)(customLogger) && !customLogger) {
|
|
20
21
|
exports.logger = console;
|
|
21
22
|
}
|
|
22
23
|
else {
|
|
@@ -24,3 +25,4 @@ exports.setLogger = (customLogger) => {
|
|
|
24
25
|
}
|
|
25
26
|
return exports.logger;
|
|
26
27
|
};
|
|
28
|
+
exports.setLogger = setLogger;
|
package/dist/util/promise.map.js
CHANGED
|
@@ -5,6 +5,7 @@
|
|
|
5
5
|
* MIT Licensed
|
|
6
6
|
*/
|
|
7
7
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
8
|
+
exports.map = void 0;
|
|
8
9
|
/**
|
|
9
10
|
* @description Custom promisified map - mimicing 'Bluebird.map'
|
|
10
11
|
* @param {Object} arr - List of items to be passed to 'fn'
|
|
@@ -13,7 +14,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
13
14
|
* @param {Array} resultBucket - Collection of results returned by 'arr items' passed onto 'fn'
|
|
14
15
|
* @returns {Promise} Returns a promisifed collection result
|
|
15
16
|
*/
|
|
16
|
-
|
|
17
|
+
const map = (arr, fn, concurrency = 1, resultBucket = []) => {
|
|
17
18
|
return new Promise((resolve, reject) => {
|
|
18
19
|
if (arr.length === 0) {
|
|
19
20
|
return resolve(resultBucket);
|
|
@@ -26,10 +27,11 @@ exports.map = (arr, fn, concurrency = 1, resultBucket = []) => {
|
|
|
26
27
|
}
|
|
27
28
|
return Promise.all(resultBucket)
|
|
28
29
|
.then(() => {
|
|
29
|
-
return exports.map(arr, fn, concurrency, resultBucket)
|
|
30
|
+
return (0, exports.map)(arr, fn, concurrency, resultBucket)
|
|
30
31
|
.then(resolve)
|
|
31
32
|
.catch(reject);
|
|
32
33
|
})
|
|
33
34
|
.catch(reject);
|
|
34
35
|
});
|
|
35
36
|
};
|
|
37
|
+
exports.map = map;
|
package/dist/util/series.js
CHANGED
|
@@ -1,17 +1,20 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
3
4
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
4
5
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
5
6
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
6
|
-
function step(result) { result.done ? resolve(result.value) :
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
7
8
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
8
9
|
});
|
|
9
10
|
};
|
|
10
11
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
11
|
-
exports.series =
|
|
12
|
+
exports.series = void 0;
|
|
13
|
+
const series = (promises, output = [], counter = 0) => __awaiter(void 0, void 0, void 0, function* () {
|
|
12
14
|
if (counter === promises.length) {
|
|
13
15
|
return output;
|
|
14
16
|
}
|
|
15
17
|
output.push(yield promises[counter]());
|
|
16
|
-
return exports.series(promises, output, ++counter);
|
|
18
|
+
return (0, exports.series)(promises, output, ++counter);
|
|
17
19
|
});
|
|
20
|
+
exports.series = series;
|
|
@@ -6,14 +6,16 @@
|
|
|
6
6
|
* MIT Licensed
|
|
7
7
|
*/
|
|
8
8
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
9
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
9
10
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
10
11
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
11
12
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
12
|
-
function step(result) { result.done ? resolve(result.value) :
|
|
13
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
13
14
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
14
15
|
});
|
|
15
16
|
};
|
|
16
17
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
18
|
+
exports.saveFilteredItems = exports.saveFailedItems = void 0;
|
|
17
19
|
const index_1 = require("../index");
|
|
18
20
|
const fs_1 = require("./fs");
|
|
19
21
|
const index_2 = require("./index");
|
|
@@ -29,12 +31,13 @@ const counter = {
|
|
|
29
31
|
* @param {Object} obj - Contains 'error' and 'data' key
|
|
30
32
|
* @returns {Promise} Returns a promisified object
|
|
31
33
|
*/
|
|
32
|
-
|
|
34
|
+
const saveFailedItems = (obj) => {
|
|
33
35
|
return new Promise((resolve) => {
|
|
34
36
|
// const path = getConfig().paths.failedItems
|
|
35
37
|
return resolve(obj);
|
|
36
38
|
});
|
|
37
39
|
};
|
|
40
|
+
exports.saveFailedItems = saveFailedItems;
|
|
38
41
|
/**
|
|
39
42
|
* @description Saves items filtered from SYNC API response
|
|
40
43
|
* @param {Object} items - Filtered items
|
|
@@ -42,13 +45,13 @@ exports.saveFailedItems = (obj) => {
|
|
|
42
45
|
* @param {String} token - Page token value
|
|
43
46
|
* @returns {Promise} Returns a promise
|
|
44
47
|
*/
|
|
45
|
-
|
|
46
|
-
return new Promise((resolve, reject) => __awaiter(
|
|
48
|
+
const saveFilteredItems = (items, name, token) => {
|
|
49
|
+
return new Promise((resolve, reject) => __awaiter(void 0, void 0, void 0, function* () {
|
|
47
50
|
try {
|
|
48
|
-
const config = index_1.getConfig();
|
|
51
|
+
const config = (0, index_1.getConfig)();
|
|
49
52
|
let filename;
|
|
50
53
|
if (!config.syncManager.saveFilteredItems) {
|
|
51
|
-
return resolve();
|
|
54
|
+
return resolve('');
|
|
52
55
|
}
|
|
53
56
|
const objDetails = {
|
|
54
57
|
items,
|
|
@@ -62,30 +65,30 @@ exports.saveFilteredItems = (items, name, token) => {
|
|
|
62
65
|
else {
|
|
63
66
|
filename = `${config.paths.filtered}-${counter.filtered}.json`;
|
|
64
67
|
}
|
|
65
|
-
const file = yield index_2.getFile(filename, () => {
|
|
68
|
+
const file = yield (0, index_2.getFile)(filename, () => {
|
|
66
69
|
counter.filtered++;
|
|
67
70
|
return `${config.paths.filtered}-${counter.filtered}.json`;
|
|
68
71
|
});
|
|
69
|
-
if (fs_1.existsSync(file)) {
|
|
70
|
-
return fs_1.readFile(file).then((data) => {
|
|
72
|
+
if ((0, fs_1.existsSync)(file)) {
|
|
73
|
+
return (0, fs_1.readFile)(file).then((data) => {
|
|
71
74
|
const loggedItems = JSON.parse(data);
|
|
72
75
|
loggedItems.push(objDetails);
|
|
73
|
-
return fs_1.writeFile(file, JSON.stringify(loggedItems)).then(resolve).catch((error) => {
|
|
76
|
+
return (0, fs_1.writeFile)(file, JSON.stringify(loggedItems)).then(resolve).catch((error) => {
|
|
74
77
|
// failed to log failed items
|
|
75
78
|
logger_1.logger.error(`Failed to write ${JSON.stringify(loggedItems)} at ${error}`);
|
|
76
79
|
logger_1.logger.error(error);
|
|
77
|
-
return resolve();
|
|
80
|
+
return resolve('');
|
|
78
81
|
});
|
|
79
82
|
}).catch((error) => {
|
|
80
83
|
logger_1.logger.error(`Failed to read file from path ${fail}`);
|
|
81
84
|
logger_1.logger.error(error);
|
|
82
|
-
return resolve();
|
|
85
|
+
return resolve('');
|
|
83
86
|
});
|
|
84
87
|
}
|
|
85
|
-
return fs_1.writeFile(file, JSON.stringify([objDetails])).then(resolve).catch((error) => {
|
|
88
|
+
return (0, fs_1.writeFile)(file, JSON.stringify([objDetails])).then(resolve).catch((error) => {
|
|
86
89
|
logger_1.logger.error(`Failed while writing ${JSON.stringify(objDetails)} at ${file}`);
|
|
87
90
|
logger_1.logger.error(error);
|
|
88
|
-
return resolve();
|
|
91
|
+
return resolve('');
|
|
89
92
|
});
|
|
90
93
|
}
|
|
91
94
|
catch (error) {
|
|
@@ -93,3 +96,4 @@ exports.saveFilteredItems = (items, name, token) => {
|
|
|
93
96
|
}
|
|
94
97
|
}));
|
|
95
98
|
};
|
|
99
|
+
exports.saveFilteredItems = saveFilteredItems;
|