@contentstack/datasync-manager 1.2.4 → 2.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. package/LICENSE +1 -1
  2. package/dist/api.js +16 -10
  3. package/dist/config.js +1 -0
  4. package/dist/core/index.js +33 -26
  5. package/dist/core/inet.js +10 -7
  6. package/dist/core/plugins.js +6 -5
  7. package/dist/core/process.js +7 -6
  8. package/dist/core/q.js +22 -16
  9. package/dist/core/token-management.js +23 -20
  10. package/dist/index.js +48 -37
  11. package/dist/util/build-paths.js +15 -14
  12. package/dist/util/fs.js +19 -15
  13. package/dist/util/index.js +53 -43
  14. package/dist/util/logger.js +3 -2
  15. package/dist/util/promise.map.js +3 -2
  16. package/dist/util/series.js +3 -2
  17. package/dist/util/unprocessible.js +14 -12
  18. package/dist/util/validations.js +34 -24
  19. package/package.json +8 -2
  20. package/.github/ISSUE_TEMPLATE/bug_report.md +0 -31
  21. package/.github/ISSUE_TEMPLATE/feature_request.md +0 -20
  22. package/.github/workflows/codeql-analysis.yml +0 -68
  23. package/.github/workflows/jira.yml +0 -28
  24. package/.github/workflows/release.yml +0 -53
  25. package/.github/workflows/sast-scan.yml +0 -11
  26. package/.github/workflows/sca-scan.yml +0 -15
  27. package/.github/workflows/secrets-scan.yml +0 -11
  28. package/.releaserc +0 -9
  29. package/.talismanrc +0 -4
  30. package/CODEOWNERS +0 -1
  31. package/SECURITY.md +0 -27
  32. package/example/config.js +0 -60
  33. package/example/index.js +0 -30
  34. package/test/api.ts +0 -152
  35. package/test/core/filteredItems.ts +0 -58
  36. package/test/core/index.ts +0 -18
  37. package/test/core/inet.ts +0 -23
  38. package/test/core/q.ts +0 -42
  39. package/test/core/sync.ts +0 -19
  40. package/test/core/token-management.ts +0 -40
  41. package/test/dummy/api-responses/404.ts +0 -3
  42. package/test/dummy/api-responses/content-type.ts +0 -30
  43. package/test/dummy/api-responses/delete-asset.ts +0 -9
  44. package/test/dummy/api-responses/delete-content-type.ts +0 -6
  45. package/test/dummy/api-responses/delete-entry.ts +0 -9
  46. package/test/dummy/api-responses/delete.ts +0 -32
  47. package/test/dummy/api-responses/empty.ts +0 -7
  48. package/test/dummy/api-responses/entries.ts +0 -31
  49. package/test/dummy/api-responses/filter-items.ts +0 -32
  50. package/test/dummy/api-responses/global-field.ts +0 -175
  51. package/test/dummy/api-responses/markdown-content-type.ts +0 -202
  52. package/test/dummy/api-responses/markdown-entries.ts +0 -56
  53. package/test/dummy/api-responses/mixed.ts +0 -77
  54. package/test/dummy/api-responses/publish-asset.ts +0 -14
  55. package/test/dummy/api-responses/publish-entry.ts +0 -14
  56. package/test/dummy/api-responses/publish.ts +0 -35
  57. package/test/dummy/api-responses/references-content-type-2.ts +0 -240
  58. package/test/dummy/api-responses/references-content-type.ts +0 -272
  59. package/test/dummy/api-responses/references-entries.ts +0 -156
  60. package/test/dummy/api-responses/unpublish-asset.ts +0 -9
  61. package/test/dummy/api-responses/unpublish-entry.ts +0 -9
  62. package/test/dummy/api-responses/unpublish.ts +0 -26
  63. package/test/dummy/config.ts +0 -34
  64. package/test/dummy/connector-listener-instances.ts +0 -69
  65. package/test/dummy/filter-items.ts +0 -32
  66. package/test/dummy/plugins/myplugin1/index.js +0 -20
  67. package/test/dummy/plugins/myplugin2/index.js +0 -20
  68. package/test/dummy/references-content-type.ts +0 -340
  69. package/test/dummy/references-entry-expected.ts +0 -161
  70. package/test/dummy/references-entry.ts +0 -95
  71. package/test/index.ts +0 -330
  72. package/test/util/fs.ts +0 -92
  73. package/test/util/index.ts +0 -157
  74. package/test/util/log-save-filtered-items.ts +0 -42
  75. package/test/util/validations.ts +0 -158
  76. package/tslint.json +0 -53
@@ -27,7 +27,7 @@ const fs_1 = require("./fs");
27
27
  const logger_1 = require("./logger");
28
28
  const unprocessible_1 = require("./unprocessible");
29
29
  const validations_1 = require("./validations");
30
- const debug = debug_1.default('util:index');
30
+ const debug = (0, debug_1.default)('util:index');
31
31
  const formattedAssetType = '_assets';
32
32
  const formattedContentType = '_content_types';
33
33
  const assetType = 'sys_assets';
@@ -37,11 +37,11 @@ const assetType = 'sys_assets';
37
37
  * @param {Object} config - Application config
38
38
  * @returns {Promise} Returns a promise
39
39
  */
40
- exports.filterItems = (response, config) => __awaiter(void 0, void 0, void 0, function* () {
41
- const locales = lodash_1.map(config.locales, 'code');
42
- const filteredObjects = lodash_1.remove(response.items, (item) => {
40
+ const filterItems = (response, config) => __awaiter(void 0, void 0, void 0, function* () {
41
+ const locales = (0, lodash_1.map)(config.locales, 'code');
42
+ const filteredObjects = (0, lodash_1.remove)(response.items, (item) => {
43
43
  // validate item structure. If the structure is not as expected, filter it out
44
- if (!(validations_1.validateItemStructure(item))) {
44
+ if (!((0, validations_1.validateItemStructure)(item))) {
45
45
  return item;
46
46
  }
47
47
  // To handle content-type.
@@ -69,10 +69,11 @@ exports.filterItems = (response, config) => __awaiter(void 0, void 0, void 0, fu
69
69
  else {
70
70
  name = 'sync_token';
71
71
  }
72
- yield unprocessible_1.saveFilteredItems(filteredObjects, name, response[name]);
72
+ yield (0, unprocessible_1.saveFilteredItems)(filteredObjects, name, response[name]);
73
73
  return;
74
74
  });
75
- exports.formatSyncFilters = (config) => {
75
+ exports.filterItems = filterItems;
76
+ const formatSyncFilters = (config) => {
76
77
  if (config.syncManager.filters && typeof config.syncManager.filters === 'object') {
77
78
  const filters = config.syncManager.filters;
78
79
  for (const filter in filters) {
@@ -92,12 +93,13 @@ exports.formatSyncFilters = (config) => {
92
93
  }
93
94
  return config;
94
95
  };
96
+ exports.formatSyncFilters = formatSyncFilters;
95
97
  /**
96
98
  * @description Groups items based on their content type
97
99
  * @param {Array} items - An array of SYNC API's item
98
100
  * @returns {Object} Returns an 'object' who's keys are content type uids
99
101
  */
100
- exports.groupItems = (items) => {
102
+ const groupItems = (items) => {
101
103
  const bucket = {};
102
104
  items.forEach((item) => {
103
105
  if (item._content_type_uid === assetType) {
@@ -112,12 +114,13 @@ exports.groupItems = (items) => {
112
114
  });
113
115
  return bucket;
114
116
  };
117
+ exports.groupItems = groupItems;
115
118
  /**
116
119
  * @description Formats SYNC API's items into defined standard
117
120
  * @param {Array} items - SYNC API's items
118
121
  * @param {Object} config - Application config
119
122
  */
120
- exports.formatItems = (items, config) => {
123
+ const formatItems = (items, config) => {
121
124
  const time = new Date().toISOString();
122
125
  for (let i = 0, j = items.length; i < j; i++) {
123
126
  switch (items[i].type) {
@@ -127,20 +130,20 @@ exports.formatItems = (items, config) => {
127
130
  items[i]._type = config.contentstack.actions.publish;
128
131
  // extra keys
129
132
  items[i]._synced_at = time;
130
- items[i] = lodash_1.merge(items[i], items[i].data);
133
+ items[i] = (0, lodash_1.merge)(items[i], items[i].data);
131
134
  items[i].locale = items[i].data.publish_details.locale;
132
135
  break;
133
136
  case 'asset_unpublished':
134
137
  delete items[i].type;
135
138
  items[i]._content_type_uid = formattedAssetType;
136
139
  items[i]._type = config.contentstack.actions.unpublish;
137
- items[i] = lodash_1.merge(items[i], items[i].data);
140
+ items[i] = (0, lodash_1.merge)(items[i], items[i].data);
138
141
  break;
139
142
  case 'asset_deleted':
140
143
  delete items[i].type;
141
144
  items[i]._content_type_uid = formattedAssetType;
142
145
  items[i]._type = config.contentstack.actions.delete;
143
- items[i] = lodash_1.merge(items[i], items[i].data);
146
+ items[i] = (0, lodash_1.merge)(items[i], items[i].data);
144
147
  break;
145
148
  case 'entry_published':
146
149
  delete items[i].type;
@@ -148,20 +151,20 @@ exports.formatItems = (items, config) => {
148
151
  items[i]._content_type_uid = items[i].content_type_uid;
149
152
  // extra keys
150
153
  items[i]._synced_at = time;
151
- items[i] = lodash_1.merge(items[i], items[i].data);
154
+ items[i] = (0, lodash_1.merge)(items[i], items[i].data);
152
155
  items[i].locale = items[i].data.publish_details.locale;
153
156
  break;
154
157
  case 'entry_unpublished':
155
158
  delete items[i].type;
156
159
  items[i]._content_type_uid = items[i].content_type_uid;
157
160
  items[i]._type = config.contentstack.actions.unpublish;
158
- items[i] = lodash_1.merge(items[i], items[i].data);
161
+ items[i] = (0, lodash_1.merge)(items[i], items[i].data);
159
162
  break;
160
163
  case 'entry_deleted':
161
164
  delete items[i].type;
162
165
  items[i]._content_type_uid = items[i].content_type_uid;
163
166
  items[i]._type = config.contentstack.actions.delete;
164
- items[i] = lodash_1.merge(items[i], items[i].data);
167
+ items[i] = (0, lodash_1.merge)(items[i], items[i].data);
165
168
  break;
166
169
  case 'content_type_deleted':
167
170
  delete items[i].type;
@@ -180,12 +183,13 @@ exports.formatItems = (items, config) => {
180
183
  }
181
184
  return items;
182
185
  };
186
+ exports.formatItems = formatItems;
183
187
  /**
184
188
  * @description Add's checkpoint data on the last item found on the 'SYNC API items' collection
185
189
  * @param {Object} groupedItems - Grouped items { groupItems(items) - see above } referred by their content type
186
190
  * @param {Object} syncResponse - SYNC API's response
187
191
  */
188
- exports.markCheckpoint = (groupedItems, syncResponse) => {
192
+ const markCheckpoint = (groupedItems, syncResponse) => {
189
193
  const tokenName = (syncResponse.pagination_token) ? 'pagination_token' : 'sync_token';
190
194
  const tokenValue = syncResponse[tokenName];
191
195
  const contentTypeUids = Object.keys(groupedItems);
@@ -227,18 +231,19 @@ exports.markCheckpoint = (groupedItems, syncResponse) => {
227
231
  }
228
232
  return groupedItems;
229
233
  };
234
+ exports.markCheckpoint = markCheckpoint;
230
235
  /**
231
236
  * @description Calcuates filename for ledger and unprocessible files
232
237
  * @param {String} file - File to be calculated on
233
238
  * @param {Function} rotate - File rotation logic (should return a string)
234
239
  * @returns {String} Returns path to a file
235
240
  */
236
- exports.getFile = (file, rotate) => {
241
+ const getFile = (file, rotate) => {
237
242
  // tslint:disable-next-line: no-shadowed-variable
238
243
  return new Promise((resolve, reject) => {
239
- const config = index_1.getConfig();
240
- if (fs_1.existsSync(file)) {
241
- return fs_1.stat(file, (statError, stats) => {
244
+ const config = (0, index_1.getConfig)();
245
+ if ((0, fs_1.existsSync)(file)) {
246
+ return (0, fs_1.stat)(file, (statError, stats) => {
242
247
  if (statError) {
243
248
  return reject(statError);
244
249
  }
@@ -254,14 +259,15 @@ exports.getFile = (file, rotate) => {
254
259
  });
255
260
  }
256
261
  else {
257
- fs_1.mkdirpSync(config.paths.unprocessibleDir);
262
+ (0, fs_1.mkdirpSync)(config.paths.unprocessibleDir);
258
263
  return resolve(file);
259
264
  }
260
265
  });
261
266
  };
267
+ exports.getFile = getFile;
262
268
  const findAssets = (parentEntry, key, schema, entry, bucket, isFindNotReplace) => {
263
269
  try {
264
- const { contentstack } = index_1.getConfig();
270
+ const { contentstack } = (0, index_1.getConfig)();
265
271
  const isMarkdown = (schema.field_metadata.markdown) ? true : false;
266
272
  let matches;
267
273
  let convertedText;
@@ -286,7 +292,7 @@ const findAssets = (parentEntry, key, schema, entry, bucket, isFindNotReplace) =
286
292
  bucket.push(assetObject);
287
293
  }
288
294
  else {
289
- const asset = lodash_1.find(bucket, (item) => {
295
+ const asset = (0, lodash_1.find)(bucket, (item) => {
290
296
  // tslint:disable-next-line: max-line-length
291
297
  const newRegexp = new RegExp(contentstack.regexp.rte_asset_pattern_2.url, contentstack.regexp.rte_asset_pattern_2.options);
292
298
  let urlparts;
@@ -317,11 +323,11 @@ const iterate = (schema, entry, bucket, findNoteReplace, parentKeys) => {
317
323
  for (let index = 0; index < parentKeys.length; index++) {
318
324
  const parentKey = parentKeys[index];
319
325
  const subEntry = entry[parentKey];
320
- if (subEntry && !(lodash_1.isEmpty(subEntry)) && index === (parentKeys.length - 1)) {
321
- if (subEntry && subEntry.length) {
326
+ if (subEntry && !((0, lodash_1.isEmpty)(subEntry)) && index === (parentKeys.length - 1)) {
327
+ if (subEntry instanceof Array && subEntry.length) {
322
328
  subEntry.forEach((subEntryItem, idx) => {
323
329
  // tricky!
324
- if (!(lodash_1.isEmpty(subEntryItem))) {
330
+ if (!((0, lodash_1.isEmpty)(subEntryItem))) {
325
331
  findAssets(subEntry, idx, schema, subEntryItem, bucket, findNoteReplace);
326
332
  }
327
333
  // iterate(schema, subEntryItem, bucket, findNoteReplace, parentKeys)
@@ -334,10 +340,10 @@ const iterate = (schema, entry, bucket, findNoteReplace, parentKeys) => {
334
340
  }
335
341
  }
336
342
  else if (subEntry !== undefined) {
337
- const subKeys = lodash_1.cloneDeep(parentKeys).splice(index);
343
+ const subKeys = (0, lodash_1.cloneDeep)(parentKeys).splice(index);
338
344
  if (subEntry && subEntry instanceof Array && subEntry.length) {
339
345
  subEntry.forEach((subEntryItem) => {
340
- iterate(schema, subEntryItem, bucket, findNoteReplace, lodash_1.cloneDeep(subKeys));
346
+ iterate(schema, subEntryItem, bucket, findNoteReplace, (0, lodash_1.cloneDeep)(subKeys));
341
347
  });
342
348
  return;
343
349
  }
@@ -352,7 +358,7 @@ const iterate = (schema, entry, bucket, findNoteReplace, parentKeys) => {
352
358
  logger_1.logger.error(error);
353
359
  }
354
360
  };
355
- exports.getOrSetRTEMarkdownAssets = (schema, entry, bucket = [], isFindNotReplace, parent = []) => {
361
+ const getOrSetRTEMarkdownAssets = (schema, entry, bucket = [], isFindNotReplace, parent = []) => {
356
362
  for (let i = 0, j = schema.length; i < j; i++) {
357
363
  if (schema[i].data_type === 'text' && schema[i].field_metadata && (schema[i].field_metadata.allow_rich_text ||
358
364
  schema[i].field_metadata.markdown)) {
@@ -362,14 +368,14 @@ exports.getOrSetRTEMarkdownAssets = (schema, entry, bucket = [], isFindNotReplac
362
368
  }
363
369
  else if ((schema[i].data_type === 'group' || schema[i].data_type === 'global_field') && schema[i].schema) {
364
370
  parent.push(schema[i].uid);
365
- exports.getOrSetRTEMarkdownAssets(schema[i].schema, entry, bucket, isFindNotReplace, parent);
371
+ (0, exports.getOrSetRTEMarkdownAssets)(schema[i].schema, entry, bucket, isFindNotReplace, parent);
366
372
  parent.pop();
367
373
  }
368
374
  else if (schema[i].data_type === 'blocks') {
369
375
  for (let k = 0, l = schema[i].blocks.length; k < l; k++) {
370
376
  parent.push(schema[i].uid);
371
377
  parent.push(schema[i].blocks[k].uid);
372
- exports.getOrSetRTEMarkdownAssets(schema[i].blocks[k].schema, entry, bucket, isFindNotReplace, parent);
378
+ (0, exports.getOrSetRTEMarkdownAssets)(schema[i].blocks[k].schema, entry, bucket, isFindNotReplace, parent);
373
379
  parent.pop();
374
380
  parent.pop();
375
381
  }
@@ -380,36 +386,38 @@ exports.getOrSetRTEMarkdownAssets = (schema, entry, bucket = [], isFindNotReplac
380
386
  }
381
387
  return entry;
382
388
  };
383
- exports.normalizePluginPath = (config, plugin, isInternal) => {
389
+ exports.getOrSetRTEMarkdownAssets = getOrSetRTEMarkdownAssets;
390
+ const normalizePluginPath = (config, plugin, isInternal) => {
384
391
  let pluginPath;
385
392
  if (plugin.path && typeof plugin.path === 'string' && plugin.path.length > 0) {
386
- if (path_1.isAbsolute(plugin.path)) {
387
- if (!fs_1.existsSync(plugin.path)) {
393
+ if ((0, path_1.isAbsolute)(plugin.path)) {
394
+ if (!(0, fs_1.existsSync)(plugin.path)) {
388
395
  throw new Error(`${plugin.path} does not exist!`);
389
396
  }
390
397
  return plugin.path;
391
398
  }
392
- pluginPath = path_1.resolve(path_1.join(config.paths.baseDir, plugin.name, 'index.js'));
393
- if (!fs_1.existsSync(pluginPath)) {
399
+ pluginPath = (0, path_1.resolve)((0, path_1.join)(config.paths.baseDir, plugin.name, 'index.js'));
400
+ if (!(0, fs_1.existsSync)(pluginPath)) {
394
401
  throw new Error(`${pluginPath} does not exist!`);
395
402
  }
396
403
  return pluginPath;
397
404
  }
398
405
  if (isInternal) {
399
- pluginPath = path_1.join(__dirname, '..', 'plugins', plugin.name.slice(13), 'index.js');
400
- if (fs_1.existsSync(pluginPath)) {
406
+ pluginPath = (0, path_1.join)(__dirname, '..', 'plugins', plugin.name.slice(13), 'index.js');
407
+ if ((0, fs_1.existsSync)(pluginPath)) {
401
408
  return pluginPath;
402
409
  }
403
410
  }
404
- pluginPath = path_1.resolve(path_1.join(config.paths.plugin, plugin.name, 'index.js'));
405
- if (!fs_1.existsSync(pluginPath)) {
411
+ pluginPath = (0, path_1.resolve)((0, path_1.join)(config.paths.plugin, plugin.name, 'index.js'));
412
+ if (!(0, fs_1.existsSync)(pluginPath)) {
406
413
  throw new Error(`Unable to find plugin: ${JSON.stringify(plugin)}`);
407
414
  }
408
415
  return pluginPath;
409
416
  };
410
- exports.filterUnwantedKeys = (action, data) => {
417
+ exports.normalizePluginPath = normalizePluginPath;
418
+ const filterUnwantedKeys = (action, data) => {
411
419
  if (action === 'publish') {
412
- const contentStore = index_1.getConfig().contentStore;
420
+ const contentStore = (0, index_1.getConfig)().contentStore;
413
421
  switch (data._content_type_uid) {
414
422
  case '_assets':
415
423
  data = filterKeys(data, contentStore.unwanted.asset);
@@ -423,6 +431,7 @@ exports.filterUnwantedKeys = (action, data) => {
423
431
  }
424
432
  return data;
425
433
  };
434
+ exports.filterUnwantedKeys = filterUnwantedKeys;
426
435
  // TODO
427
436
  // Add option to delete embedded documents
428
437
  const filterKeys = (data, unwantedKeys) => {
@@ -437,7 +446,7 @@ const filterKeys = (data, unwantedKeys) => {
437
446
  }
438
447
  return data;
439
448
  };
440
- exports.getSchema = (action, data) => {
449
+ const getSchema = (action, data) => {
441
450
  let schema;
442
451
  if (action === 'publish' && data._content_type_uid !== '_assets') {
443
452
  schema = data._content_type;
@@ -448,3 +457,4 @@ exports.getSchema = (action, data) => {
448
457
  }
449
458
  return { schema };
450
459
  };
460
+ exports.getSchema = getSchema;
@@ -13,11 +13,11 @@ const validations_1 = require("./validations");
13
13
  * const log = createLogger(instance)
14
14
  * log.info('Hello world!')
15
15
  */
16
- exports.setLogger = (customLogger) => {
16
+ const setLogger = (customLogger) => {
17
17
  if (exports.logger) {
18
18
  return exports.logger;
19
19
  }
20
- else if (!validations_1.validateLogger(customLogger) && !customLogger) {
20
+ else if (!(0, validations_1.validateLogger)(customLogger) && !customLogger) {
21
21
  exports.logger = console;
22
22
  }
23
23
  else {
@@ -25,3 +25,4 @@ exports.setLogger = (customLogger) => {
25
25
  }
26
26
  return exports.logger;
27
27
  };
28
+ exports.setLogger = setLogger;
@@ -14,7 +14,7 @@ exports.map = void 0;
14
14
  * @param {Array} resultBucket - Collection of results returned by 'arr items' passed onto 'fn'
15
15
  * @returns {Promise} Returns a promisifed collection result
16
16
  */
17
- exports.map = (arr, fn, concurrency = 1, resultBucket = []) => {
17
+ const map = (arr, fn, concurrency = 1, resultBucket = []) => {
18
18
  return new Promise((resolve, reject) => {
19
19
  if (arr.length === 0) {
20
20
  return resolve(resultBucket);
@@ -27,10 +27,11 @@ exports.map = (arr, fn, concurrency = 1, resultBucket = []) => {
27
27
  }
28
28
  return Promise.all(resultBucket)
29
29
  .then(() => {
30
- return exports.map(arr, fn, concurrency, resultBucket)
30
+ return (0, exports.map)(arr, fn, concurrency, resultBucket)
31
31
  .then(resolve)
32
32
  .catch(reject);
33
33
  })
34
34
  .catch(reject);
35
35
  });
36
36
  };
37
+ exports.map = map;
@@ -10,10 +10,11 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
10
10
  };
11
11
  Object.defineProperty(exports, "__esModule", { value: true });
12
12
  exports.series = void 0;
13
- exports.series = (promises, output = [], counter = 0) => __awaiter(void 0, void 0, void 0, function* () {
13
+ const series = (promises, output = [], counter = 0) => __awaiter(void 0, void 0, void 0, function* () {
14
14
  if (counter === promises.length) {
15
15
  return output;
16
16
  }
17
17
  output.push(yield promises[counter]());
18
- return exports.series(promises, output, ++counter);
18
+ return (0, exports.series)(promises, output, ++counter);
19
19
  });
20
+ exports.series = series;
@@ -31,12 +31,13 @@ const counter = {
31
31
  * @param {Object} obj - Contains 'error' and 'data' key
32
32
  * @returns {Promise} Returns a promisified object
33
33
  */
34
- exports.saveFailedItems = (obj) => {
34
+ const saveFailedItems = (obj) => {
35
35
  return new Promise((resolve) => {
36
36
  // const path = getConfig().paths.failedItems
37
37
  return resolve(obj);
38
38
  });
39
39
  };
40
+ exports.saveFailedItems = saveFailedItems;
40
41
  /**
41
42
  * @description Saves items filtered from SYNC API response
42
43
  * @param {Object} items - Filtered items
@@ -44,13 +45,13 @@ exports.saveFailedItems = (obj) => {
44
45
  * @param {String} token - Page token value
45
46
  * @returns {Promise} Returns a promise
46
47
  */
47
- exports.saveFilteredItems = (items, name, token) => {
48
+ const saveFilteredItems = (items, name, token) => {
48
49
  return new Promise((resolve, reject) => __awaiter(void 0, void 0, void 0, function* () {
49
50
  try {
50
- const config = index_1.getConfig();
51
+ const config = (0, index_1.getConfig)();
51
52
  let filename;
52
53
  if (!config.syncManager.saveFilteredItems) {
53
- return resolve();
54
+ return resolve('');
54
55
  }
55
56
  const objDetails = {
56
57
  items,
@@ -64,30 +65,30 @@ exports.saveFilteredItems = (items, name, token) => {
64
65
  else {
65
66
  filename = `${config.paths.filtered}-${counter.filtered}.json`;
66
67
  }
67
- const file = yield index_2.getFile(filename, () => {
68
+ const file = yield (0, index_2.getFile)(filename, () => {
68
69
  counter.filtered++;
69
70
  return `${config.paths.filtered}-${counter.filtered}.json`;
70
71
  });
71
- if (fs_1.existsSync(file)) {
72
- return fs_1.readFile(file).then((data) => {
72
+ if ((0, fs_1.existsSync)(file)) {
73
+ return (0, fs_1.readFile)(file).then((data) => {
73
74
  const loggedItems = JSON.parse(data);
74
75
  loggedItems.push(objDetails);
75
- return fs_1.writeFile(file, JSON.stringify(loggedItems)).then(resolve).catch((error) => {
76
+ return (0, fs_1.writeFile)(file, JSON.stringify(loggedItems)).then(resolve).catch((error) => {
76
77
  // failed to log failed items
77
78
  logger_1.logger.error(`Failed to write ${JSON.stringify(loggedItems)} at ${error}`);
78
79
  logger_1.logger.error(error);
79
- return resolve();
80
+ return resolve('');
80
81
  });
81
82
  }).catch((error) => {
82
83
  logger_1.logger.error(`Failed to read file from path ${fail}`);
83
84
  logger_1.logger.error(error);
84
- return resolve();
85
+ return resolve('');
85
86
  });
86
87
  }
87
- return fs_1.writeFile(file, JSON.stringify([objDetails])).then(resolve).catch((error) => {
88
+ return (0, fs_1.writeFile)(file, JSON.stringify([objDetails])).then(resolve).catch((error) => {
88
89
  logger_1.logger.error(`Failed while writing ${JSON.stringify(objDetails)} at ${file}`);
89
90
  logger_1.logger.error(error);
90
- return resolve();
91
+ return resolve('');
91
92
  });
92
93
  }
93
94
  catch (error) {
@@ -95,3 +96,4 @@ exports.saveFilteredItems = (items, name, token) => {
95
96
  }
96
97
  }));
97
98
  };
99
+ exports.saveFilteredItems = saveFilteredItems;