@contentstack/cli-cm-import 1.1.0 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -18,7 +18,7 @@ const stack = require('../util/contentstack-management-sdk');
18
18
  const extension_suppress = require('../util/extensionsUidReplace');
19
19
  const lookupReplaceAssets = require('../util/lookupReplaceAssets');
20
20
  const lookupReplaceEntries = require('../util/lookupReplaceEntries');
21
- const { getInstalledExtensions } = require('../util/marketplace-app-helper')
21
+ const { getInstalledExtensions } = require('../util/marketplace-app-helper');
22
22
 
23
23
  let client;
24
24
  let config = util.getConfig();
@@ -42,8 +42,10 @@ let masterLanguage;
42
42
 
43
43
  let skipFiles = ['__master.json', '__priority.json', 'schema.json'];
44
44
  let entryBatchLimit = config.rateLimit || 10;
45
+ const importConcurrency = eConfig.importConcurrency || config.importConcurrency;
46
+ const writeConcurrency = eConfig.writeConcurrency || config.writeConcurrency;
45
47
 
46
- function importEntries() {
48
+ function EntriesImport() {
47
49
  let self = this;
48
50
  mappedAssetUidPath = path.resolve(config.data, 'mapper', 'assets', 'uid-mapping.json');
49
51
  mappedAssetUrlPath = path.resolve(config.data, 'mapper', 'assets', 'url-mapping.json');
@@ -58,7 +60,7 @@ function importEntries() {
58
60
 
59
61
  createdEntriesWOUidPath = path.join(entryMapperPath, 'created-entries-wo-uid.json');
60
62
  failedWOPath = path.join(entryMapperPath, 'failedWO.json');
61
-
63
+
62
64
  // Object of Schemas, referred to by their content type uid
63
65
  this.ctSchemas = {};
64
66
  // Array of content type uids, that have reference fields
@@ -80,10 +82,10 @@ function importEntries() {
80
82
  // Entries that failed to get created OR updated
81
83
  this.fails = [];
82
84
  // List of installed extensions to replace uid
83
- this.installedExtensions = []
85
+ this.installedExtensions = [];
84
86
 
85
87
  let files = fs.readdirSync(ctPath);
86
- this.environment = helper.readFile(environmentPath);
88
+ this.environment = helper.readFileSync(environmentPath);
87
89
  for (let index in files) {
88
90
  if (index) {
89
91
  try {
@@ -94,14 +96,14 @@ function importEntries() {
94
96
  }
95
97
  }
96
98
  } catch (error) {
97
- console.error(error);
99
+ addlogs(config, `Failed to read the content types to import entries ${util.formatError(error)}`);
98
100
  process.exit(0);
99
101
  }
100
102
  }
101
103
  }
102
104
  }
103
105
 
104
- importEntries.prototype = {
106
+ EntriesImport.prototype = {
105
107
  /**
106
108
  * Start point for entry import
107
109
  * @return promise
@@ -112,15 +114,15 @@ importEntries.prototype = {
112
114
  client = stack.Client(config);
113
115
  masterLanguage = config.master_locale;
114
116
  addlogs(config, 'Migrating entries', 'success');
115
- let languages = helper.readFile(lPath);
117
+ let languages = helper.readFileSync(lPath);
116
118
  const appMapperFolderPath = path.join(config.data, 'mapper', 'marketplace_apps');
117
119
 
118
120
  if (fs.existsSync(path.join(appMapperFolderPath, 'marketplace-apps.json'))) {
119
- self.installedExtensions = helper.readFile(path.join(appMapperFolderPath, 'marketplace-apps.json')) || {};
121
+ self.installedExtensions = helper.readFileSync(path.join(appMapperFolderPath, 'marketplace-apps.json')) || {};
120
122
  }
121
123
 
122
124
  if (_.isEmpty(self.installedExtensions)) {
123
- self.installedExtensions = await getInstalledExtensions(config)
125
+ self.installedExtensions = await getInstalledExtensions(config);
124
126
  }
125
127
 
126
128
  return new Promise(function (resolve, reject) {
@@ -138,8 +140,10 @@ importEntries.prototype = {
138
140
  return self
139
141
  .supressFields()
140
142
  .then(async function () {
141
- let mappedAssetUids = helper.readFile(mappedAssetUidPath) || {};
142
- let mappedAssetUrls = helper.readFile(mappedAssetUrlPath) || {};
143
+ addlogs(config, 'Completed suppressing content type reference fields', 'success');
144
+
145
+ let mappedAssetUids = helper.readFileSync(mappedAssetUidPath) || {};
146
+ let mappedAssetUrls = helper.readFileSync(mappedAssetUrlPath) || {};
143
147
 
144
148
  // Step 2: Iterate over available languages to create entries in each.
145
149
  let counter = 0;
@@ -151,8 +155,18 @@ importEntries.prototype = {
151
155
  (config.hasOwnProperty('onlylocales') && config.onlylocales.indexOf(lang) !== -1) ||
152
156
  !config.hasOwnProperty('onlylocales')
153
157
  ) {
158
+ addlogs(config, `Starting to create entries ${lang} locale`, 'info');
154
159
  await self.createEntries(lang, mappedAssetUids, mappedAssetUrls);
155
- await self.getCreatedEntriesWOUid();
160
+ addlogs(config, 'Entries created successfully', 'info');
161
+ try {
162
+ await self.getCreatedEntriesWOUid();
163
+ } catch (error) {
164
+ addlogs(
165
+ config,
166
+ `Failed get the existing entries to update the mapper ${util.formatError(error)}, 'error`,
167
+ );
168
+ }
169
+ addlogs(config, 'Starting to update entries with references', 'info');
156
170
  await self.repostEntries(lang);
157
171
  addlogs(config, "Successfully imported '" + lang + "' entries!", 'success');
158
172
  counter++;
@@ -166,51 +180,62 @@ importEntries.prototype = {
166
180
  },
167
181
  ).then(async function () {
168
182
  // Step 3: Revert all the changes done in content type in step 1
183
+ addlogs(config, 'Restoring content type changes', 'info');
169
184
  await self.unSuppressFields();
185
+ addlogs(config, 'Removing entries from master language which got created by default', 'info');
170
186
  await self.removeBuggedEntries();
171
- let ct_field_visibility_uid = helper.readFile(path.join(ctPath + '/field_rules_uid.json'));
187
+ addlogs(config, 'Updating the field rules of content type', 'info');
188
+ let ct_field_visibility_uid = helper.readFileSync(path.join(ctPath + '/field_rules_uid.json'));
172
189
  let ct_files = fs.readdirSync(ctPath);
173
190
  if (ct_field_visibility_uid && ct_field_visibility_uid != 'undefined') {
174
191
  for (const element of ct_field_visibility_uid) {
175
192
  if (ct_files.indexOf(element + '.json') > -1) {
176
193
  let schema = require(path.resolve(ctPath, element));
177
- await self.field_rules_update(schema);
194
+ try {
195
+ await self.field_rules_update(schema);
196
+ } catch (error) {
197
+ addlogs(
198
+ config,
199
+ `Failed to update the field rules for content type ${schema.uid} ${util.formatError(error)}`,
200
+ );
201
+ }
178
202
  }
179
203
  }
180
204
  }
181
205
  addlogs(config, chalk.green('Entries have been imported successfully!'), 'success');
182
206
  if (config.entriesPublish) {
207
+ addlogs(config, chalk.green('Publishing entries'), 'success');
183
208
  return self
184
209
  .publish(langs)
185
210
  .then(function () {
186
211
  addlogs(config, chalk.green('All the entries have been published successfully'), 'success');
187
212
  return resolve();
188
213
  })
189
- .catch((errors) => {
190
- addlogs(config, chalk.error('Some entries might have failed to publish.'), 'error');
191
- return reject(errors);
214
+ .catch((error) => {
215
+ addlogs(config, `Error in publishing entries ${util.formatError(error)}`, 'error');
192
216
  });
193
217
  }
194
218
  return resolve();
195
219
  });
196
220
  })
197
221
  .catch(function (error) {
198
- return reject(error);
222
+ addlogs.log(config, util.formatError(error), 'error');
223
+ reject('Failed import entries');
199
224
  });
200
225
  });
201
226
  },
202
227
 
203
228
  createEntries: function (lang, mappedAssetUids, mappedAssetUrls) {
204
229
  let self = this;
205
- return new Promise(function (resolve, reject) {
230
+ return new Promise(async function (resolve, reject) {
206
231
  let contentTypeUids = Object.keys(self.ctSchemas);
207
232
  if (fs.existsSync(entryUidMapperPath)) {
208
- self.mappedUids = helper.readFile(entryUidMapperPath);
233
+ self.mappedUids = await helper.readLargeFile(entryUidMapperPath);
209
234
  }
210
235
  self.mappedUids = self.mappedUids || {};
211
236
  return Promise.map(
212
237
  contentTypeUids,
213
- function (ctUid) {
238
+ async function (ctUid) {
214
239
  let eLangFolderPath = path.join(entryMapperPath, lang);
215
240
  let eLogFolderPath = path.join(entryMapperPath, lang, ctUid);
216
241
  mkdirp.sync(eLogFolderPath);
@@ -228,11 +253,11 @@ importEntries.prototype = {
228
253
  });
229
254
 
230
255
  if (fs.existsSync(createdEntriesPath)) {
231
- createdEntries = helper.readFile(createdEntriesPath);
256
+ createdEntries = await helper.readLargeFile(createdEntriesPath);
232
257
  createdEntries = createdEntries || {};
233
258
  }
234
259
  if (fs.existsSync(eFilePath)) {
235
- let entries = helper.readFile(eFilePath);
260
+ let entries = await helper.readLargeFile(eFilePath);
236
261
  if (!_.isPlainObject(entries) || _.isEmpty(entries)) {
237
262
  addlogs(
238
263
  config,
@@ -243,43 +268,51 @@ importEntries.prototype = {
243
268
  addlogs(config, `Creating entries for content type ${ctUid} in language ${lang} ...`, 'success');
244
269
  for (let eUid in entries) {
245
270
  if (eUid) {
246
- // check ctUid in self.ctJsonRte array, if ct exists there... only then remove entry references for json rte
247
- // also with json rte, api creates the json-rte field with the same uid as passed in the payload.
248
-
249
- if (self.ctJsonRte.indexOf(ctUid) > -1) {
250
- entries[eUid] = self.removeUidsFromJsonRteFields(entries[eUid], self.ctSchemas[ctUid].schema);
251
- }
252
-
253
- // remove entry references from json-rte fields
254
- if (self.ctJsonRteWithEntryRefs.indexOf(ctUid) > -1) {
255
- entries[eUid] = self.removeEntryRefsFromJSONRTE(entries[eUid], self.ctSchemas[ctUid].schema);
271
+ try {
272
+ // check ctUid in self.ctJsonRte array, if ct exists there... only then remove entry references for json rte
273
+ // also with json rte, api creates the json-rte field with the same uid as passed in the payload.
274
+
275
+ if (self.ctJsonRte.indexOf(ctUid) > -1) {
276
+ entries[eUid] = self.removeUidsFromJsonRteFields(entries[eUid], self.ctSchemas[ctUid].schema);
277
+ }
278
+
279
+ // remove entry references from json-rte fields
280
+ if (self.ctJsonRteWithEntryRefs.indexOf(ctUid) > -1) {
281
+ entries[eUid] = self.removeEntryRefsFromJSONRTE(entries[eUid], self.ctSchemas[ctUid].schema);
282
+ }
283
+ // will replace all old asset uid/urls with new ones
284
+ entries[eUid] = lookupReplaceAssets(
285
+ {
286
+ content_type: self.ctSchemas[ctUid],
287
+ entry: entries[eUid],
288
+ },
289
+ mappedAssetUids,
290
+ mappedAssetUrls,
291
+ eLangFolderPath,
292
+ self.installedExtensions,
293
+ );
294
+ } catch (error) {
295
+ addlogs(config, 'Failed to update entry while creating entry id ' + eUid);
296
+ addlogs(config, util.formatError(error), 'error');
256
297
  }
257
- // will replace all old asset uid/urls with new ones
258
- entries[eUid] = lookupReplaceAssets(
259
- {
260
- content_type: self.ctSchemas[ctUid],
261
- entry: entries[eUid],
262
- },
263
- mappedAssetUids,
264
- mappedAssetUrls,
265
- eLangFolderPath,
266
- self.installedExtensions
267
- );
268
298
  }
269
299
  }
270
300
  let eUids = Object.keys(entries);
271
301
  let batches = [];
272
302
 
303
+ let entryBatchLimit = eConfig.batchLimit || 10;
304
+ let batchSize = Math.round(entryBatchLimit / 3);
305
+
273
306
  // Run entry creation in batches of ~16~ entries
274
- for (let i = 0; i < eUids.length; i += Math.round(entryBatchLimit / 3)) {
275
- batches.push(eUids.slice(i, i + Math.round(entryBatchLimit / 3)));
307
+ for (let i = 0; i < eUids.length; i += batchSize) {
308
+ batches.push(eUids.slice(i, i + batchSize));
276
309
  }
277
310
  return Promise.map(
278
311
  batches,
279
312
  async function (batch) {
280
313
  return Promise.map(
281
314
  batch,
282
- async function (eUid) {
315
+ async function (eUid, batchIndex) {
283
316
  // if entry is already created
284
317
  if (createdEntries.hasOwnProperty(eUid)) {
285
318
  addlogs(
@@ -338,16 +371,15 @@ importEntries.prototype = {
338
371
  }
339
372
  }
340
373
  })
341
- .catch(function (err) {
342
- let error = JSON.parse(err.message);
343
- addlogs(config, chalk.red('Error updating entry', JSON.stringify(error)), 'error');
374
+ .catch(function (error) {
375
+ addlogs(config, `Failed to update an entry ${eUid} ${util.formatError(error)}`, 'error');
344
376
  self.fails.push({
345
377
  content_type: ctUid,
346
378
  locale: lang,
347
379
  entry: entries[eUid],
348
- error: error,
380
+ error: util.formatError(error),
349
381
  });
350
- return err;
382
+ return error;
351
383
  });
352
384
  }
353
385
  delete requestObject.json.entry.publish_details;
@@ -383,11 +415,7 @@ importEntries.prototype = {
383
415
  'error',
384
416
  );
385
417
  } else {
386
- addlogs(
387
- config,
388
- chalk.red('Error creating entry due to: ' + JSON.stringify(error)),
389
- 'error',
390
- );
418
+ addlogs(config, `Failed to create an entry ${eUid} ${util.formatError(error)}`, 'error');
391
419
  }
392
420
  self.createdEntriesWOUid.push({
393
421
  content_type: ctUid,
@@ -395,12 +423,12 @@ importEntries.prototype = {
395
423
  entry: entries[eUid],
396
424
  error: error,
397
425
  });
398
- helper.writeFile(createdEntriesWOUidPath, self.createdEntriesWOUid);
426
+ helper.writeFileSync(createdEntriesWOUidPath, self.createdEntriesWOUid);
399
427
  return;
400
428
  }
401
429
  // TODO: if status code: 422, check the reason
402
430
  // 429 for rate limit
403
- addlogs(config, chalk.red('Error creating entry', JSON.stringify(error)), 'error');
431
+ addlogs(config, `Failed to create an entry ${eUid} ${util.formatError(error)}`, 'error');
404
432
  self.fails.push({
405
433
  content_type: ctUid,
406
434
  locale: lang,
@@ -411,14 +439,14 @@ importEntries.prototype = {
411
439
  // create/update 5 entries at a time
412
440
  },
413
441
  {
414
- concurrency: 1,
442
+ concurrency: importConcurrency,
415
443
  },
416
444
  ).then(function () {
417
- helper.writeFile(successEntryLogPath, self.success[ctUid]);
418
- helper.writeFile(failedEntryLogPath, self.fails[ctUid]);
419
- helper.writeFile(entryUidMapperPath, self.mappedUids);
420
- helper.writeFile(uniqueUidMapperPath, self.uniqueUids);
421
- helper.writeFile(createdEntriesPath, createdEntries);
445
+ helper.writeFileSync(successEntryLogPath, self.success[ctUid]);
446
+ helper.writeFileSync(failedEntryLogPath, self.fails[ctUid]);
447
+ helper.writeFileSync(entryUidMapperPath, self.mappedUids);
448
+ helper.writeFileSync(uniqueUidMapperPath, self.uniqueUids);
449
+ helper.writeFileSync(createdEntriesPath, createdEntries);
422
450
  });
423
451
  // process one batch at a time
424
452
  },
@@ -467,7 +495,7 @@ importEntries.prototype = {
467
495
  }
468
496
  },
469
497
  {
470
- concurrency: reqConcurrency,
498
+ concurrency: 1,
471
499
  },
472
500
  )
473
501
  .then(function () {
@@ -483,7 +511,7 @@ importEntries.prototype = {
483
511
  getCreatedEntriesWOUid: function () {
484
512
  let self = this;
485
513
  return new Promise(function (resolve) {
486
- self.createdEntriesWOUid = helper.readFile(createdEntriesWOUidPath);
514
+ self.createdEntriesWOUid = helper.readFileSync(createdEntriesWOUidPath);
487
515
  self.failedWO = [];
488
516
  if (_.isArray(self.createdEntriesWOUid) && self.createdEntriesWOUid.length > 0) {
489
517
  return Promise.map(
@@ -492,10 +520,10 @@ importEntries.prototype = {
492
520
  return self.fetchEntry(entry);
493
521
  },
494
522
  {
495
- concurrency: reqConcurrency,
523
+ concurrency: importConcurrency,
496
524
  },
497
525
  ).then(function () {
498
- helper.writeFile(failedWOPath, self.failedWO);
526
+ helper.writeFileSync(failedWOPath, self.failedWO);
499
527
  addlogs(config, 'Mapped entries without mapped uid successfully!', 'success');
500
528
  return resolve();
501
529
  });
@@ -506,25 +534,25 @@ importEntries.prototype = {
506
534
  },
507
535
  repostEntries: function (lang) {
508
536
  let self = this;
509
- return new Promise(function (resolve, reject) {
510
- let _mapped_ = helper.readFile(path.join(entryMapperPath, 'uid-mapping.json'));
537
+ return new Promise(async function (resolve, reject) {
538
+ let _mapped_ = await helper.readLargeFile(path.join(entryMapperPath, 'uid-mapping.json'));
511
539
  if (_.isPlainObject(_mapped_)) {
512
540
  self.mappedUids = _.merge(_mapped_, self.mappedUids);
513
541
  }
514
542
  return Promise.map(
515
543
  self.refSchemas,
516
- function (ctUid) {
544
+ async function (ctUid) {
517
545
  let eFolderPath = path.join(entryMapperPath, lang, ctUid);
518
546
  let eSuccessFilePath = path.join(eFolderPath, 'success.json');
519
547
  let eFilePath = path.resolve(ePath, ctUid, lang + '.json');
520
- let sourceStackEntries = helper.readFile(eFilePath);
548
+ let sourceStackEntries = await helper.readLargeFile(eFilePath);
521
549
 
522
550
  if (!fs.existsSync(eSuccessFilePath)) {
523
551
  addlogs(config, 'Success file was not found at: ' + eSuccessFilePath, 'success');
524
552
  return;
525
553
  }
526
554
 
527
- let entries = helper.readFile(eSuccessFilePath);
555
+ let entries = await helper.readLargeFile(eSuccessFilePath, { type: 'array' }); // TBD LARGE
528
556
  entries = entries || [];
529
557
  if (entries.length === 0) {
530
558
  addlogs(config, "No entries were created to be updated in '" + lang + "' language!", 'success');
@@ -532,8 +560,8 @@ importEntries.prototype = {
532
560
  }
533
561
 
534
562
  // Keep track of entries that have their references updated
535
- let refsUpdatedUids = helper.readFile(path.join(eFolderPath, 'refsUpdatedUids.json'));
536
- let refsUpdateFailed = helper.readFile(path.join(eFolderPath, 'refsUpdateFailed.json'));
563
+ let refsUpdatedUids = helper.readFileSync(path.join(eFolderPath, 'refsUpdatedUids.json'));
564
+ let refsUpdateFailed = helper.readFileSync(path.join(eFolderPath, 'refsUpdateFailed.json'));
537
565
  let schema = self.ctSchemas[ctUid];
538
566
 
539
567
  let batches = [];
@@ -544,6 +572,8 @@ importEntries.prototype = {
544
572
  // map failed reference uids @mapper/language/unmapped-uids.json
545
573
  let refUidMapperPath = path.join(entryMapperPath, lang);
546
574
 
575
+ addlogs(config, 'staring to update the entry for reposting');
576
+
547
577
  entries = _.map(entries, function (entry) {
548
578
  try {
549
579
  let uid = entry.uid;
@@ -569,14 +599,20 @@ importEntries.prototype = {
569
599
  _entry.uid = uid;
570
600
  return _entry;
571
601
  } catch (error) {
572
- console.error(error);
573
- return error;
602
+ addlogs(
603
+ config,
604
+ `Failed to update the entry ${uid} references while reposting ${util.formatError(error)}`,
605
+ );
574
606
  }
575
607
  });
576
608
 
577
- // Run entry creation in batches of ~16~ entries
578
- for (let i = 0; i < entries.length; i += Math.round(entryBatchLimit / 3)) {
579
- batches.push(entries.slice(i, i + Math.round(entryBatchLimit / 3)));
609
+ addlogs(config, 'Starting the reposting process for entries');
610
+
611
+ const entryBatchLimit = eConfig.batchLimit || 10;
612
+ const batchSize = Math.round(entryBatchLimit / 3);
613
+ // Run entry creation in batches
614
+ for (let i = 0; i < entries.length; i += batchSize) {
615
+ batches.push(entries.slice(i, i + batchSize));
580
616
  }
581
617
  return Promise.map(
582
618
  batches,
@@ -610,12 +646,12 @@ importEntries.prototype = {
610
646
  return entryResponse
611
647
  .update({ locale: lang })
612
648
  .then((response) => {
613
- for (let j = 0; j < entries.length; j++) {
614
- if (entries[j].uid === response.uid) {
615
- entries[j] = response;
616
- break;
617
- }
618
- }
649
+ // for (let j = 0; j < entries.length; j++) {
650
+ // if (entries[j].uid === response.uid) {
651
+ // entries[j] = response;
652
+ // break;
653
+ // }
654
+ // }
619
655
  refsUpdatedUids.push(response.uid);
620
656
  return resolveUpdatedUids();
621
657
  })
@@ -633,7 +669,7 @@ importEntries.prototype = {
633
669
  'error',
634
670
  );
635
671
 
636
- addlogs(config, error, 'error');
672
+ addlogs(config, util.formatError(error), 'error');
637
673
  refsUpdateFailed.push({
638
674
  content_type: ctUid,
639
675
  entry: entry,
@@ -646,24 +682,25 @@ importEntries.prototype = {
646
682
  await promiseResult;
647
683
  },
648
684
  {
649
- concurrency: reqConcurrency,
685
+ concurrency: importConcurrency,
650
686
  },
651
687
  )
652
688
  .then(function () {
653
689
  // batch completed successfully
654
- helper.writeFile(path.join(eFolderPath, 'success.json'), entries);
655
- helper.writeFile(path.join(eFolderPath, 'refsUpdatedUids.json'), refsUpdatedUids);
656
- helper.writeFile(path.join(eFolderPath, 'refsUpdateFailed.json'), refsUpdateFailed);
690
+ helper.writeFileSync(path.join(eFolderPath, 'success.json'), entries);
691
+ helper.writeFileSync(path.join(eFolderPath, 'refsUpdatedUids.json'), refsUpdatedUids);
692
+ helper.writeFileSync(path.join(eFolderPath, 'refsUpdateFailed.json'), refsUpdateFailed);
657
693
  addlogs(config, 'Completed re-post entries batch no: ' + (index + 1) + ' successfully!', 'success');
658
694
  })
659
695
  .catch(function (error) {
660
696
  // error while executing entry in batch
661
697
  addlogs(config, chalk.red('Failed re-post entries at batch no: ' + (index + 1)), 'error');
662
- throw error;
698
+ addlogs(config, util.formatError(error), 'error');
699
+ // throw error;
663
700
  });
664
701
  },
665
702
  {
666
- concurrency: reqConcurrency,
703
+ concurrency: 1,
667
704
  },
668
705
  )
669
706
  .then(function () {
@@ -676,22 +713,13 @@ importEntries.prototype = {
676
713
  })
677
714
  .catch(function (error) {
678
715
  // error while updating entries with references
679
- addlogs(
680
- config,
681
- chalk.red(
682
- "Failed while importing entries of Content Type: '" +
683
- ctUid +
684
- "' in language: '" +
685
- lang +
686
- "' successfully!",
687
- ),
688
- 'error',
689
- );
690
- throw error;
716
+ addlogs(config, chalk.red(`Failed re-post entries of content type ${ctUid} locale ${lang}`, 'error'));
717
+ addlogs(config, util.formatError(error), 'error');
718
+ // throw error;
691
719
  });
692
720
  },
693
721
  {
694
- concurrency: reqConcurrency,
722
+ concurrency: 1,
695
723
  },
696
724
  )
697
725
  .then(function () {
@@ -708,7 +736,7 @@ importEntries.prototype = {
708
736
  },
709
737
  supressFields: async function () {
710
738
  // it should be spelled as suppressFields
711
- addlogs(config, chalk.white('Suppressing content type fields...'), 'success');
739
+ addlogs(config, 'Suppressing content type reference fields', 'success');
712
740
  let self = this;
713
741
  return new Promise(async function (resolve, reject) {
714
742
  let modifiedSchemas = [];
@@ -771,7 +799,7 @@ importEntries.prototype = {
771
799
  }
772
800
 
773
801
  // write modified schema in backup file
774
- helper.writeFile(modifiedSchemaPath, modifiedSchemas);
802
+ helper.writeFileSync(modifiedSchemaPath, modifiedSchemas);
775
803
 
776
804
  return Promise.map(
777
805
  suppressedSchemas,
@@ -786,25 +814,22 @@ importEntries.prototype = {
786
814
  // empty function
787
815
  })
788
816
  .catch(function (_error) {
789
- addlogs(
790
- config,
791
- chalk.red("Failed to modify mandatory field of '" + schema.uid + "' content type"),
792
- 'error',
793
- );
817
+ addlogs(config, util.formatError(error), 'error');
818
+ reject(`Failed suppress content type ${schema.uid} reference fields`);
794
819
  });
795
820
  // update 5 content types at a time
796
821
  },
797
822
  {
798
823
  // update reqConcurrency content types at a time
799
- concurrency: reqConcurrency,
824
+ concurrency: importConcurrency,
800
825
  },
801
826
  )
802
827
  .then(function () {
803
828
  return resolve();
804
829
  })
805
830
  .catch(function (error) {
806
- addlogs(config, chalk.red('Error while suppressing mandatory field schemas'), 'error');
807
- return reject(error);
831
+ addlogs(config, util.formatError(error), 'error');
832
+ return reject('Failed to suppress reference fields in content type');
808
833
  });
809
834
  });
810
835
  },
@@ -834,9 +859,9 @@ importEntries.prototype = {
834
859
  }
835
860
  self.mappedUids[query.entry.uid] = response.body.entries[0].uid;
836
861
  let _ePath = path.join(entryMapperPath, query.locale, query.content_type, 'success.json');
837
- let entries = helper.readFile(_ePath);
862
+ let entries = helper.readFileSync(_ePath);
838
863
  entries.push(query.entry);
839
- helper.writeFile(_ePath, entries);
864
+ helper.writeFileSync(_ePath, entries);
840
865
  addlogs(
841
866
  config,
842
867
  'Completed mapping entry wo uid: ' + query.entry.uid + ': ' + response.body.entries[0].uid,
@@ -852,7 +877,7 @@ importEntries.prototype = {
852
877
  unSuppressFields: function () {
853
878
  let self = this;
854
879
  return new Promise(async function (resolve, reject) {
855
- let modifiedSchemas = helper.readFile(modifiedSchemaPath);
880
+ let modifiedSchemas = helper.readFileSync(modifiedSchemaPath);
856
881
  let modifiedSchemasUids = [];
857
882
  let updatedExtensionUidsSchemas = [];
858
883
  for (let uid in modifiedSchemas) {
@@ -912,7 +937,7 @@ importEntries.prototype = {
912
937
  }
913
938
  }
914
939
  // re-write, in case some schemas failed to update
915
- helper.writeFile(modifiedSchemaPath, _.compact(modifiedSchemas));
940
+ helper.writeFileSync(modifiedSchemaPath, _.compact(modifiedSchemas));
916
941
  addlogs(config, 'Re-modified content type schemas to their original form!', 'success');
917
942
  return resolve();
918
943
  })
@@ -925,7 +950,7 @@ importEntries.prototype = {
925
950
  removeBuggedEntries: function () {
926
951
  let self = this;
927
952
  return new Promise(function (resolve, reject) {
928
- let entries = helper.readFile(uniqueUidMapperPath);
953
+ let entries = helper.readFileSync(uniqueUidMapperPath);
929
954
  let bugged = [];
930
955
  let removed = [];
931
956
  for (let uid in entries) {
@@ -951,12 +976,11 @@ importEntries.prototype = {
951
976
  })
952
977
  .catch(function (error) {
953
978
  addlogs(config, chalk.red('Failed to remove bugged entry from master language'), 'error');
954
- addlogs(config, error, 'error');
955
- addlogs(config, JSON.stringify(entry), 'error');
979
+ addlogs(config, util.formatError(error), 'error');
956
980
  });
957
981
  },
958
982
  {
959
- concurrency: reqConcurrency,
983
+ concurrency: importConcurrency,
960
984
  },
961
985
  )
962
986
  .then(function () {
@@ -967,15 +991,15 @@ importEntries.prototype = {
967
991
  }
968
992
  }
969
993
 
970
- helper.writeFile(path.join(entryMapperPath, 'removed-uids.json'), removed);
971
- helper.writeFile(path.join(entryMapperPath, 'pending-uids.json'), bugged);
994
+ helper.writeFileSync(path.join(entryMapperPath, 'removed-uids.json'), removed);
995
+ helper.writeFileSync(path.join(entryMapperPath, 'pending-uids.json'), bugged);
972
996
 
973
997
  addlogs(config, chalk.green('The stack has been eradicated from bugged entries!'), 'success');
974
998
  return resolve();
975
999
  })
976
1000
  .catch(function (error) {
977
1001
  // error while removing bugged entries from stack
978
- return reject(error);
1002
+ addlogs(config, util.formatError(error), 'error');
979
1003
  });
980
1004
  });
981
1005
  },
@@ -992,7 +1016,7 @@ importEntries.prototype = {
992
1016
  let updatedValue = [];
993
1017
  for (const element of fieldRulesArray) {
994
1018
  let splitedFieldRulesValue = element;
995
- let oldUid = helper.readFile(path.join(entryUidMapperPath));
1019
+ let oldUid = helper.readFileSync(path.join(entryUidMapperPath));
996
1020
  if (oldUid.hasOwnProperty(splitedFieldRulesValue)) {
997
1021
  updatedValue.push(oldUid[splitedFieldRulesValue]);
998
1022
  } else {
@@ -1004,7 +1028,7 @@ importEntries.prototype = {
1004
1028
  }
1005
1029
  }
1006
1030
  } else {
1007
- addlogs(config, 'field_rules is not available...', 'error');
1031
+ addlogs(config, 'field_rules is not available', 'error');
1008
1032
  }
1009
1033
 
1010
1034
  client
@@ -1014,13 +1038,13 @@ importEntries.prototype = {
1014
1038
  .then((contentTypeResponse) => {
1015
1039
  // Object.assign(ctObj, _.cloneDeep(schema))
1016
1040
  contentTypeResponse.field_rules = schema.field_rules;
1017
- contentTypeResponse.update();
1041
+ return contentTypeResponse.update();
1018
1042
  })
1019
1043
  .then(() => {
1020
1044
  return resolve();
1021
1045
  })
1022
1046
  .catch(function (error) {
1023
- return reject(error);
1047
+ addlogs(config, `failed to update the field rules ${util.formatError(error)}`);
1024
1048
  });
1025
1049
  });
1026
1050
  },
@@ -1031,7 +1055,7 @@ importEntries.prototype = {
1031
1055
  };
1032
1056
 
1033
1057
  let contentTypeUids = Object.keys(self.ctSchemas);
1034
- let entryMapper = helper.readFile(entryUidMapperPath);
1058
+ let entryMapper = helper.readFileSync(entryUidMapperPath);
1035
1059
 
1036
1060
  return new Promise(function (resolve, reject) {
1037
1061
  return Promise.map(
@@ -1040,16 +1064,20 @@ importEntries.prototype = {
1040
1064
  let lang = langs[counter];
1041
1065
  return Promise.map(
1042
1066
  contentTypeUids,
1043
- function (ctUid) {
1067
+ async function (ctUid) {
1044
1068
  let eFilePath = path.resolve(ePath, ctUid, lang + '.json');
1045
- let entries = helper.readFile(eFilePath);
1069
+ let entries = await helper.readLargeFile(eFilePath);
1046
1070
 
1047
1071
  let eUids = Object.keys(entries);
1048
1072
  let batches = [];
1073
+ let batchSize;
1049
1074
 
1050
1075
  if (eUids.length > 0) {
1051
- for (let i = 0; i < eUids.length; i += entryBatchLimit) {
1052
- batches.push(eUids.slice(i, i + entryBatchLimit));
1076
+ let entryBatchLimit = eConfig.batchLimit || 10;
1077
+ batchSize = Math.round(entryBatchLimit / 3);
1078
+ // Run entry creation in batches
1079
+ for (let i = 0; i < eUids.length; i += batchSize) {
1080
+ batches.push(eUids.slice(i, i + batchSize));
1053
1081
  }
1054
1082
  } else {
1055
1083
  return;
@@ -1057,7 +1085,7 @@ importEntries.prototype = {
1057
1085
 
1058
1086
  return Promise.map(
1059
1087
  batches,
1060
- async function (batch) {
1088
+ async function (batch, index) {
1061
1089
  return Promise.map(
1062
1090
  batch,
1063
1091
  async function (eUid) {
@@ -1093,16 +1121,20 @@ importEntries.prototype = {
1093
1121
  // eslint-disable-next-line max-nested-callbacks
1094
1122
  .then((result) => {
1095
1123
  // addlogs(config, 'Entry ' + eUid + ' published successfully in ' + ctUid + ' content type', 'success')
1096
- console.log('Entry ' + eUid + ' published successfully in ' + ctUid + ' content type');
1124
+ addlogs(
1125
+ config,
1126
+ 'Entry ' + eUid + ' published successfully in ' + ctUid + ' content type',
1127
+ 'success',
1128
+ );
1097
1129
  return resolveEntryPublished(result);
1098
1130
  // eslint-disable-next-line max-nested-callbacks
1099
1131
  })
1100
1132
  .catch(function (err) {
1101
- // addlogs(config, 'Entry ' + eUid + ' not published successfully in ' + ctUid + ' content type', 'error')
1102
- console.log(
1103
- 'Entry ' + eUid + ' not published successfully in ' + ctUid + ' content type',
1133
+ addlogs(
1134
+ config,
1135
+ `failed to publish entry ${eUid} content type ${ctUid} ${util.formatError(err)}`,
1104
1136
  );
1105
- return rejectEntryPublished(err.errorMessage);
1137
+ return resolveEntryPublished('');
1106
1138
  });
1107
1139
  });
1108
1140
  }
@@ -1111,7 +1143,7 @@ importEntries.prototype = {
1111
1143
  }
1112
1144
  },
1113
1145
  {
1114
- concurrency: reqConcurrency,
1146
+ concurrency: 1,
1115
1147
  },
1116
1148
  )
1117
1149
  .then(function () {
@@ -1119,8 +1151,7 @@ importEntries.prototype = {
1119
1151
  })
1120
1152
  .catch(function (error) {
1121
1153
  // error while executing entry in batch
1122
- addlogs(config, error, 'error');
1123
- return error;
1154
+ addlogs(config, util.formatError(error), 'error');
1124
1155
  });
1125
1156
  },
1126
1157
  {
@@ -1129,15 +1160,10 @@ importEntries.prototype = {
1129
1160
  )
1130
1161
  .then(function () {
1131
1162
  // addlogs(config, 'Entries published successfully in ' + ctUid + ' content type', 'success')
1132
- console.log('Entries published successfully in ' + ctUid + ' content type');
1163
+ addlogs('Entries published successfully in ' + ctUid + ' content type');
1133
1164
  })
1134
1165
  .catch(function (error) {
1135
- addlogs(
1136
- config,
1137
- 'Failed some of the Entry publishing in ' + ctUid + ' content type, go through logs for details.',
1138
- 'error',
1139
- );
1140
- return error;
1166
+ addlogs(config, `failed to publish entry in content type ${ctUid} ${util.formatError(error)}`);
1141
1167
  });
1142
1168
  },
1143
1169
  {
@@ -1146,9 +1172,10 @@ importEntries.prototype = {
1146
1172
  )
1147
1173
  .then(function () {
1148
1174
  // empty function
1175
+ // addlogs('Published entries successfully in ' +);
1149
1176
  })
1150
1177
  .catch(function (error) {
1151
- return error;
1178
+ addlogs(`Failed to publish few entries in ${lang} ${util.formatError(error)}`);
1152
1179
  });
1153
1180
  },
1154
1181
  {
@@ -1159,7 +1186,8 @@ importEntries.prototype = {
1159
1186
  return resolve();
1160
1187
  })
1161
1188
  .catch((error) => {
1162
- return reject(error);
1189
+ addlogs(`Failed to publish entries ${util.formatError(error)}`);
1190
+ // return reject(error);
1163
1191
  });
1164
1192
  });
1165
1193
  },
@@ -1227,11 +1255,7 @@ importEntries.prototype = {
1227
1255
 
1228
1256
  if (element.length) {
1229
1257
  for (const item of element) {
1230
- if (
1231
- (item.type === 'p' || item.type === 'a') &&
1232
- item.children &&
1233
- item.children.length > 0
1234
- ) {
1258
+ if ((item.type === 'p' || item.type === 'a') && item.children && item.children.length > 0) {
1235
1259
  return this.doEntryReferencesExist(item.children);
1236
1260
  } else if (this.isEntryRef(item)) {
1237
1261
  return true;
@@ -1249,8 +1273,8 @@ importEntries.prototype = {
1249
1273
  return false;
1250
1274
  },
1251
1275
  restoreJsonRteEntryRefs: function (entry, sourceStackEntry, ctSchema) {
1252
- let mappedAssetUids = helper.readFile(mappedAssetUidPath) || {};
1253
- let mappedAssetUrls = helper.readFile(mappedAssetUrlPath) || {};
1276
+ let mappedAssetUids = helper.readFileSync(mappedAssetUidPath) || {};
1277
+ let mappedAssetUrls = helper.readFileSync(mappedAssetUrlPath) || {};
1254
1278
  for (const element of ctSchema) {
1255
1279
  switch (element.data_type) {
1256
1280
  case 'blocks': {
@@ -1383,16 +1407,27 @@ importEntries.prototype = {
1383
1407
  if (element.multiple) {
1384
1408
  entry[element.uid] = entry[element.uid].map((jsonRteData) => {
1385
1409
  delete jsonRteData.uid; // remove uid
1386
- jsonRteData.attrs.dirty = true;
1387
- jsonRteData.children = jsonRteData.children.map((child) => this.removeUidsFromChildren(child));
1410
+
1411
+ if (_.isObject(jsonRteData.attrs)) {
1412
+ jsonRteData.attrs.dirty = true;
1413
+ }
1414
+
1415
+ if (!_.isEmpty(jsonRteData.children)) {
1416
+ jsonRteData.children = _.map(jsonRteData.children, (child) => this.removeUidsFromChildren(child));
1417
+ }
1418
+
1388
1419
  return jsonRteData;
1389
1420
  });
1390
1421
  } else {
1391
1422
  delete entry[element.uid].uid; // remove uid
1392
- entry[element.uid].attrs.dirty = true;
1393
- entry[element.uid].children = entry[element.uid].children.map((child) =>
1394
- this.removeUidsFromChildren(child),
1395
- );
1423
+ if (entry[element.uid] && _.isObject(entry[element.uid].attrs)) {
1424
+ entry[element.uid].attrs.dirty = true;
1425
+ }
1426
+ if (entry[element.uid] && !_.isEmpty(entry[element.uid].children)) {
1427
+ entry[element.uid].children = _.map(entry[element.uid].children, (child) =>
1428
+ this.removeUidsFromChildren(child),
1429
+ );
1430
+ }
1396
1431
  }
1397
1432
  }
1398
1433
  break;
@@ -1406,7 +1441,10 @@ importEntries.prototype = {
1406
1441
  return children.map((child) => {
1407
1442
  if (child.type && child.type.length > 0) {
1408
1443
  delete child.uid; // remove uid
1409
- child.attrs.dirty = true;
1444
+
1445
+ if (_.isObject(child.attrs)) {
1446
+ child.attrs.dirty = true;
1447
+ }
1410
1448
  }
1411
1449
  if (child.children && child.children.length > 0) {
1412
1450
  child.children = this.removeUidsFromChildren(child.children);
@@ -1416,7 +1454,9 @@ importEntries.prototype = {
1416
1454
  } else {
1417
1455
  if (children.type && children.type.length > 0) {
1418
1456
  delete children.uid; // remove uid
1419
- children.attrs.dirty = true;
1457
+ if (_.isObject(children.attrs)) {
1458
+ children.attrs.dirty = true;
1459
+ }
1420
1460
  }
1421
1461
  if (children.children && children.children.length > 0) {
1422
1462
  children.children = this.removeUidsFromChildren(children.children);
@@ -1427,7 +1467,9 @@ importEntries.prototype = {
1427
1467
  setDirtyTrue: function (jsonRteChild) {
1428
1468
  // also removing uids in this function
1429
1469
  if (jsonRteChild.type) {
1430
- jsonRteChild.attrs['dirty'] = true;
1470
+ if (_.isObject(jsonRteChild.attrs)) {
1471
+ jsonRteChild.attrs['dirty'] = true;
1472
+ }
1431
1473
  delete jsonRteChild.uid;
1432
1474
 
1433
1475
  if (jsonRteChild.children && jsonRteChild.children.length > 0) {
@@ -1467,7 +1509,7 @@ importEntries.prototype = {
1467
1509
  }
1468
1510
 
1469
1511
  return jsonRteChild;
1470
- }
1512
+ },
1471
1513
  };
1472
1514
 
1473
- module.exports = new importEntries();
1515
+ module.exports = EntriesImport;