@contentstack/cli-cm-import 1.25.0 → 1.26.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. package/README.md +3 -3
  2. package/lib/commands/cm/stacks/import.d.ts +1 -0
  3. package/lib/commands/cm/stacks/import.js +33 -12
  4. package/lib/import/module-importer.js +1 -1
  5. package/lib/import/modules/assets.d.ts +1 -1
  6. package/lib/import/modules/assets.js +93 -39
  7. package/lib/import/modules/base-class.js +11 -3
  8. package/lib/import/modules/content-types.js +79 -28
  9. package/lib/import/modules/custom-roles.js +95 -19
  10. package/lib/import/modules/entries.js +128 -57
  11. package/lib/import/modules/environments.js +48 -14
  12. package/lib/import/modules/extensions.js +78 -16
  13. package/lib/import/modules/global-fields.js +86 -19
  14. package/lib/import/modules/labels.d.ts +4 -4
  15. package/lib/import/modules/labels.js +60 -18
  16. package/lib/import/modules/locales.js +63 -20
  17. package/lib/import/modules/marketplace-apps.js +160 -31
  18. package/lib/import/modules/personalize.js +33 -7
  19. package/lib/import/modules/stack.js +5 -0
  20. package/lib/import/modules/taxonomies.js +52 -13
  21. package/lib/import/modules/variant-entries.js +21 -3
  22. package/lib/import/modules/webhooks.js +44 -12
  23. package/lib/import/modules/workflows.js +65 -21
  24. package/lib/types/import-config.d.ts +3 -1
  25. package/lib/types/index.d.ts +22 -0
  26. package/lib/utils/asset-helper.js +24 -1
  27. package/lib/utils/backup-handler.js +15 -1
  28. package/lib/utils/common-helper.js +41 -16
  29. package/lib/utils/content-type-helper.js +35 -2
  30. package/lib/utils/entries-helper.js +24 -2
  31. package/lib/utils/extension-helper.js +35 -1
  32. package/lib/utils/global-field-helper.js +1 -1
  33. package/lib/utils/import-config-handler.js +21 -0
  34. package/lib/utils/login-handler.js +8 -4
  35. package/lib/utils/marketplace-app-helper.js +50 -11
  36. package/lib/utils/taxonomies-helper.js +22 -4
  37. package/oclif.manifest.json +2 -2
  38. package/package.json +5 -5
@@ -20,6 +20,7 @@ function validate(req) {
20
20
  const uploadAssetHelper = function (config, req, fsPath, RETRY) {
21
21
  return new bluebird_1.default(function (resolve, reject) {
22
22
  try {
23
+ cli_utilities_1.log.debug(`Starting asset upload helper for path: ${fsPath}`);
23
24
  (0, cli_utilities_1.managementSDKClient)(config)
24
25
  .then((APIClient) => {
25
26
  validate(req);
@@ -27,8 +28,10 @@ const uploadAssetHelper = function (config, req, fsPath, RETRY) {
27
28
  RETRY = 1;
28
29
  }
29
30
  else if (RETRY > MAX_RETRY_LIMIT) {
31
+ cli_utilities_1.log.debug(`Max retry limit exceeded for asset upload: ${fsPath}`);
30
32
  return reject(new Error('Max retry limit exceeded!'));
31
33
  }
34
+ cli_utilities_1.log.debug(`Uploading asset (attempt ${RETRY}/${MAX_RETRY_LIMIT}): ${fsPath}`);
32
35
  req.upload = fsPath;
33
36
  const stackAPIClient = APIClient.stack({
34
37
  api_key: config.target_stack,
@@ -38,16 +41,21 @@ const uploadAssetHelper = function (config, req, fsPath, RETRY) {
38
41
  .asset()
39
42
  .create(req)
40
43
  .then((response) => {
44
+ cli_utilities_1.log.debug(`Asset upload successful: ${fsPath}`);
41
45
  return resolve(response);
42
46
  })
43
47
  .catch((error) => {
48
+ cli_utilities_1.log.error(`Asset upload failed: ${fsPath}`);
44
49
  return reject(error);
45
50
  });
46
51
  })
47
- .catch(reject);
52
+ .catch((error) => {
53
+ reject(error);
54
+ });
48
55
  }
49
56
  catch (error) {
50
57
  debug(error);
58
+ cli_utilities_1.log.debug('Unexpected error in uploadAssetHelper');
51
59
  return reject(error);
52
60
  }
53
61
  });
@@ -55,6 +63,7 @@ const uploadAssetHelper = function (config, req, fsPath, RETRY) {
55
63
  exports.uploadAssetHelper = uploadAssetHelper;
56
64
  // get assets object
57
65
  const lookupAssets = function (data, mappedAssetUids, mappedAssetUrls, assetUidMapperPath, installedExtensions) {
66
+ var _a, _b, _c;
58
67
  if (!_.has(data, 'entry') ||
59
68
  !_.has(data, 'content_type') ||
60
69
  !_.isPlainObject(mappedAssetUids) ||
@@ -62,6 +71,7 @@ const lookupAssets = function (data, mappedAssetUids, mappedAssetUrls, assetUidM
62
71
  typeof assetUidMapperPath !== 'string') {
63
72
  throw new Error('Invalid inputs for lookupAssets!');
64
73
  }
74
+ cli_utilities_1.log.debug(`Starting asset lookup for entry: ${(_a = data.entry) === null || _a === void 0 ? void 0 : _a.uid}, content type: ${(_b = data.content_type) === null || _b === void 0 ? void 0 : _b.uid}`);
65
75
  let parent = [];
66
76
  let assetUids = [];
67
77
  let assetUrls = [];
@@ -114,20 +124,24 @@ const lookupAssets = function (data, mappedAssetUids, mappedAssetUrls, assetUidM
114
124
  }
115
125
  else if (schema[i].data_type === 'json' && schema[i].field_metadata.extension) {
116
126
  if (installedExtensions && installedExtensions[schema[i].extension_uid]) {
127
+ cli_utilities_1.log.debug(`Mapping extension UID: ${schema[i].extension_uid} to ${installedExtensions[schema[i].extension_uid]}`);
117
128
  schema[i].extension_uid = installedExtensions[schema[i].extension_uid];
118
129
  }
119
130
  }
120
131
  }
121
132
  };
122
133
  function findAssetIdsFromJsonCustomFields(entryObj, ctSchema) {
134
+ cli_utilities_1.log.debug('Processing JSON custom fields for asset references');
123
135
  ctSchema.map((row) => {
124
136
  if (row.data_type === 'json') {
125
137
  if (entryObj[row.uid] && row.field_metadata.extension && row.field_metadata.is_asset) {
126
138
  if (installedExtensions && installedExtensions[row.extension_uid]) {
139
+ cli_utilities_1.log.debug(`Mapping extension UID in custom field: ${row.extension_uid}`);
127
140
  row.extension_uid = installedExtensions[row.extension_uid];
128
141
  }
129
142
  if (entryObj[row.uid].metadata && entryObj[row.uid].metadata.extension_uid) {
130
143
  if (installedExtensions && installedExtensions[entryObj[row.uid].metadata.extension_uid]) {
144
+ cli_utilities_1.log.debug(`Mapping metadata extension UID: ${entryObj[row.uid].metadata.extension_uid}`);
131
145
  entryObj[row.uid].metadata.extension_uid = installedExtensions[entryObj[row.uid].metadata.extension_uid];
132
146
  }
133
147
  }
@@ -137,15 +151,18 @@ const lookupAssets = function (data, mappedAssetUids, mappedAssetUrls, assetUidM
137
151
  });
138
152
  }
139
153
  function findAssetIdsFromHtmlRte(entryObj, ctSchema) {
154
+ cli_utilities_1.log.debug('Extracting asset UIDs from HTML RTE fields');
140
155
  const regex = /<img asset_uid=\\"([^"]+)\\"/g;
141
156
  let match;
142
157
  const entry = JSON.stringify(entryObj);
143
158
  while ((match = regex.exec(entry)) !== null) {
144
159
  assetUids.push(match[1]);
145
160
  }
161
+ cli_utilities_1.log.debug(`Found ${assetUids.length} asset UIDs in HTML RTE`);
146
162
  }
147
163
  function findAssetIdsFromJsonRte(entryObj, ctSchema) {
148
164
  var _a;
165
+ cli_utilities_1.log.debug('Processing JSON RTE fields for asset references');
149
166
  for (const element of ctSchema) {
150
167
  switch (element.data_type) {
151
168
  case 'blocks': {
@@ -233,6 +250,7 @@ const lookupAssets = function (data, mappedAssetUids, mappedAssetUrls, assetUidM
233
250
  updateFileFields(data.entry, data, null, mappedAssetUids, matchedUids, unmatchedUids, mappedAssetUrls);
234
251
  assetUids = _.uniq(assetUids);
235
252
  assetUrls = _.uniq(assetUrls);
253
+ cli_utilities_1.log.debug(`Found ${assetUids.length} unique asset UIDs and ${assetUrls.length} unique asset URLs`);
236
254
  let entry = JSON.stringify(data.entry);
237
255
  assetUrls.forEach(function (assetUrl) {
238
256
  let mappedAssetUrl = mappedAssetUrls[assetUrl];
@@ -256,6 +274,7 @@ const lookupAssets = function (data, mappedAssetUids, mappedAssetUrls, assetUidM
256
274
  }
257
275
  });
258
276
  if (matchedUids.length) {
277
+ cli_utilities_1.log.debug(`Successfully mapped ${matchedUids.length} asset UIDs`);
259
278
  let matchedAssetUids = helper.readFileSync(path.join(assetUidMapperPath, 'matched-asset-uids.json'));
260
279
  matchedAssetUids = matchedAssetUids || {};
261
280
  if (matchedAssetUids.hasOwnProperty(data.content_type.uid)) {
@@ -271,6 +290,7 @@ const lookupAssets = function (data, mappedAssetUids, mappedAssetUrls, assetUidM
271
290
  }
272
291
  }
273
292
  if (unmatchedUids.length) {
293
+ cli_utilities_1.log.warn(`Found ${unmatchedUids.length} unmatched asset UIDs`);
274
294
  let unmatchedAssetUids = helper.readFileSync(path.join(assetUidMapperPath, 'unmatched-asset-uids.json'));
275
295
  unmatchedAssetUids = unmatchedAssetUids || {};
276
296
  if (unmatchedAssetUids.hasOwnProperty(data.content_type.uid)) {
@@ -284,6 +304,7 @@ const lookupAssets = function (data, mappedAssetUids, mappedAssetUrls, assetUidM
284
304
  helper.writeFile(path.join(assetUidMapperPath, 'unmatched-asset-uids.json'));
285
305
  }
286
306
  if (unmatchedUrls.length) {
307
+ cli_utilities_1.log.warn(`Found ${unmatchedUrls.length} unmatched asset URLs`);
287
308
  let unmatchedAssetUrls = helper.readFileSync(path.join(assetUidMapperPath, 'unmatched-asset-urls.json'));
288
309
  unmatchedAssetUrls = unmatchedAssetUrls || {};
289
310
  if (unmatchedAssetUrls.hasOwnProperty(data.content_type.uid)) {
@@ -297,6 +318,7 @@ const lookupAssets = function (data, mappedAssetUids, mappedAssetUrls, assetUidM
297
318
  helper.writeFile(path.join(assetUidMapperPath, 'unmatched-asset-urls.json'));
298
319
  }
299
320
  if (matchedUrls.length) {
321
+ cli_utilities_1.log.debug(`Successfully mapped ${matchedUrls.length} asset URLs`);
300
322
  let matchedAssetUrls = helper.readFileSync(path.join(assetUidMapperPath, 'matched-asset-urls.json'));
301
323
  matchedAssetUrls = matchedAssetUrls || {};
302
324
  if (matchedAssetUrls.hasOwnProperty(data.content_type.uid)) {
@@ -309,6 +331,7 @@ const lookupAssets = function (data, mappedAssetUids, mappedAssetUrls, assetUidM
309
331
  }
310
332
  helper.writeFile(path.join(assetUidMapperPath, 'matched-asset-urls.json'));
311
333
  }
334
+ cli_utilities_1.log.debug(`Asset lookup completed for entry: ${(_c = data.entry) === null || _c === void 0 ? void 0 : _c.uid}`);
312
335
  return JSON.parse(entry);
313
336
  };
314
337
  exports.lookupAssets = lookupAssets;
@@ -6,13 +6,16 @@ const fs_extra_1 = require("fs-extra");
6
6
  const cli_utilities_1 = require("@contentstack/cli-utilities");
7
7
  const index_1 = require("./index");
8
8
  async function backupHandler(importConfig) {
9
+ cli_utilities_1.log.debug('Starting backup handler process');
9
10
  if (importConfig.hasOwnProperty('useBackedupDir')) {
11
+ cli_utilities_1.log.debug(`Using existing backup directory: ${importConfig.useBackedupDir}`);
10
12
  return importConfig.useBackedupDir;
11
13
  }
12
14
  let backupDirPath;
13
15
  const subDir = isSubDirectory(importConfig);
14
16
  if (subDir) {
15
17
  backupDirPath = path.resolve((0, cli_utilities_1.sanitizePath)(importConfig.contentDir), '..', '_backup_' + Math.floor(Math.random() * 1000));
18
+ cli_utilities_1.log.debug(`Detected subdirectory configuration, creating backup at: ${backupDirPath}`);
16
19
  if (importConfig.createBackupDir) {
17
20
  cli_utilities_1.cliux.print(`Warning!!! Provided backup directory path is a sub directory of the content directory, Cannot copy to a sub directory. Hence new backup directory created - ${backupDirPath}`, {
18
21
  color: 'yellow',
@@ -22,15 +25,20 @@ async function backupHandler(importConfig) {
22
25
  else {
23
26
  // NOTE: If the backup folder's directory is provided, create it at that location; otherwise, the default path (working directory).
24
27
  backupDirPath = path.join(process.cwd(), '_backup_' + Math.floor(Math.random() * 1000));
28
+ cli_utilities_1.log.debug(`Using default backup directory: ${backupDirPath}`);
25
29
  if (importConfig.createBackupDir) {
30
+ cli_utilities_1.log.debug(`Custom backup directory specified: ${importConfig.createBackupDir}`);
26
31
  if (index_1.fileHelper.fileExistsSync(importConfig.createBackupDir)) {
32
+ cli_utilities_1.log.debug(`Removing existing backup directory: ${importConfig.createBackupDir}`);
27
33
  index_1.fileHelper.removeDirSync(importConfig.createBackupDir);
28
34
  }
35
+ cli_utilities_1.log.debug(`Creating backup directory: ${importConfig.createBackupDir}`);
29
36
  index_1.fileHelper.makeDirectory(importConfig.createBackupDir);
30
37
  backupDirPath = importConfig.createBackupDir;
31
38
  }
32
39
  }
33
40
  if (backupDirPath) {
41
+ cli_utilities_1.log.debug(`Starting content copy to backup directory: ${backupDirPath}`);
34
42
  cli_utilities_1.cliux.print('Copying content to the backup directory...');
35
43
  return new Promise((resolve, reject) => {
36
44
  return (0, fs_extra_1.copy)(importConfig.contentDir, backupDirPath, (error) => {
@@ -38,6 +46,7 @@ async function backupHandler(importConfig) {
38
46
  (0, index_1.trace)(error, 'error', true);
39
47
  return reject(error);
40
48
  }
49
+ cli_utilities_1.log.debug(`Successfully created backup at: ${backupDirPath}`);
41
50
  resolve(backupDirPath);
42
51
  });
43
52
  });
@@ -50,12 +59,17 @@ exports.default = backupHandler;
50
59
  * @returns
51
60
  */
52
61
  function isSubDirectory(importConfig) {
62
+ cli_utilities_1.log.debug('Checking if backup directory is a subdirectory');
53
63
  const parent = importConfig.contentDir;
54
64
  const child = importConfig.createBackupDir ? importConfig.createBackupDir : process.cwd();
55
65
  const relative = path.relative(parent, child);
66
+ cli_utilities_1.log.debug(`Parent directory: ${parent}, Child directory: ${child}, Relative path: ${relative}`);
56
67
  if (relative) {
57
- return !relative.startsWith('..') && !path.isAbsolute(relative);
68
+ const isSubDir = !relative.startsWith('..') && !path.isAbsolute(relative);
69
+ cli_utilities_1.log.debug(`Is subdirectory: ${isSubDir}`);
70
+ return isSubDir;
58
71
  }
59
72
  // true if both parent and child have same path
73
+ cli_utilities_1.log.debug('Parent and child directories are the same');
60
74
  return true;
61
75
  }
@@ -12,8 +12,6 @@ const _ = tslib_1.__importStar(require("lodash"));
12
12
  const path = tslib_1.__importStar(require("path"));
13
13
  const cli_utilities_1 = require("@contentstack/cli-utilities");
14
14
  const file_helper_1 = require("./file-helper");
15
- const chalk_1 = tslib_1.__importDefault(require("chalk"));
16
- const logger_1 = require("./logger");
17
15
  const config_1 = tslib_1.__importDefault(require("../config"));
18
16
  const promise_limit_1 = tslib_1.__importDefault(require("promise-limit"));
19
17
  let config;
@@ -26,8 +24,9 @@ const initialization = (configData) => {
26
24
  };
27
25
  exports.initialization = initialization;
28
26
  const validateConfig = (importConfig) => {
27
+ cli_utilities_1.log.debug('Validating import configuration');
29
28
  if (importConfig.email && importConfig.password && !importConfig.target_stack) {
30
- (0, logger_1.log)(importConfig, chalk_1.default.red('Kindly provide api_token'), 'error');
29
+ cli_utilities_1.log.debug('Target stack API token is required when using email/password authentication');
31
30
  return 'error';
32
31
  }
33
32
  else if (!importConfig.email &&
@@ -35,29 +34,34 @@ const validateConfig = (importConfig) => {
35
34
  !importConfig.management_token &&
36
35
  importConfig.target_stack &&
37
36
  !(0, cli_utilities_1.isAuthenticated)()) {
38
- (0, logger_1.log)(importConfig, chalk_1.default.red('Kindly provide management_token or email and password'), 'error');
37
+ cli_utilities_1.log.debug('Authentication credentials missing - either management token or email/password required');
39
38
  return 'error';
40
39
  }
41
40
  else if (!importConfig.email && !importConfig.password && importConfig.preserveStackVersion) {
42
- (0, logger_1.log)(importConfig, chalk_1.default.red('Kindly provide Email and password for old version stack'), 'error');
41
+ cli_utilities_1.log.debug('Email and password required for stack version preservation');
43
42
  return 'error';
44
43
  }
45
44
  else if ((importConfig.email && !importConfig.password) || (!importConfig.email && importConfig.password)) {
46
- (0, logger_1.log)(importConfig, chalk_1.default.red('Kindly provide Email and password'), 'error');
45
+ cli_utilities_1.log.debug('Both email and password must be provided together');
47
46
  return 'error';
48
47
  }
49
48
  };
50
49
  exports.validateConfig = validateConfig;
51
50
  const buildAppConfig = (importConfig) => {
51
+ cli_utilities_1.log.debug('Building application configuration with defaults');
52
52
  importConfig = _.merge(config_1.default, importConfig);
53
53
  return importConfig;
54
54
  };
55
55
  exports.buildAppConfig = buildAppConfig;
56
56
  const sanitizeStack = (importConfig) => {
57
57
  if (typeof importConfig.preserveStackVersion !== 'boolean' || !importConfig.preserveStackVersion) {
58
+ cli_utilities_1.log.debug('Stack version preservation not enabled, skipping sanitization');
59
+ return Promise.resolve();
60
+ }
61
+ if (importConfig.management_token) {
62
+ cli_utilities_1.log.info('Skipping stack version sanitization: Operation is not supported when using a management token.');
58
63
  return Promise.resolve();
59
64
  }
60
- (0, logger_1.log)(importConfig, 'Running script to maintain stack version.', 'success');
61
65
  try {
62
66
  const httpClient = cli_utilities_1.HttpClient.create();
63
67
  httpClient.headers(importConfig.headers);
@@ -65,20 +69,22 @@ const sanitizeStack = (importConfig) => {
65
69
  if (stackDetails.data && stackDetails.data.stack && stackDetails.data.stack.settings) {
66
70
  const newStackVersion = stackDetails.data.stack.settings.version;
67
71
  const newStackDate = new Date(newStackVersion).toString();
72
+ cli_utilities_1.log.debug(`New stack version: ${newStackVersion} (${newStackDate})`);
68
73
  const stackFilePath = path.join((0, cli_utilities_1.sanitizePath)(importConfig.data), (0, cli_utilities_1.sanitizePath)(importConfig.modules.stack.dirName), (0, cli_utilities_1.sanitizePath)(importConfig.modules.stack.fileName));
74
+ cli_utilities_1.log.debug(`Reading stack file from: ${stackFilePath}`);
69
75
  const oldStackDetails = (0, file_helper_1.readFileSync)(stackFilePath);
70
76
  if (!oldStackDetails || !oldStackDetails.settings || !oldStackDetails.settings.hasOwnProperty('version')) {
71
77
  throw new Error(`${JSON.stringify(oldStackDetails)} is invalid!`);
72
78
  }
73
79
  const oldStackDate = new Date(oldStackDetails.settings.version).toString();
80
+ cli_utilities_1.log.debug(`Old stack version: ${oldStackDetails.settings.version} (${oldStackDate})`);
74
81
  if (oldStackDate > newStackDate) {
75
82
  throw new Error('Migration Error. You cannot migrate data from new stack onto old. Kindly contact support@contentstack.com for more details.');
76
83
  }
77
84
  else if (oldStackDate === newStackDate) {
78
- (0, logger_1.log)(importConfig, 'The version of both the stacks are same.', 'success');
79
85
  return Promise.resolve();
80
86
  }
81
- (0, logger_1.log)(importConfig, 'Updating stack version.', 'success');
87
+ cli_utilities_1.log.debug('Updating stack version to preserve compatibility');
82
88
  return httpClient
83
89
  .put(`https://${importConfig.host}/v3${importConfig.apis.stacks}settings/set-version`, {
84
90
  stack_settings: {
@@ -86,7 +92,7 @@ const sanitizeStack = (importConfig) => {
86
92
  },
87
93
  })
88
94
  .then((response) => {
89
- (0, logger_1.log)(importConfig, `Stack version preserved successfully!\n${JSON.stringify(response.data)}`, 'success');
95
+ cli_utilities_1.log.info(`Stack version preserved successfully!\n${JSON.stringify(response.data)}`);
90
96
  });
91
97
  }
92
98
  throw new Error(`Unexpected stack details ${stackDetails && JSON.stringify(stackDetails.data)}`);
@@ -98,30 +104,39 @@ const sanitizeStack = (importConfig) => {
98
104
  };
99
105
  exports.sanitizeStack = sanitizeStack;
100
106
  const masterLocalDetails = (stackAPIClient) => {
107
+ cli_utilities_1.log.debug('Fetching master locale details');
101
108
  return stackAPIClient
102
109
  .locale()
103
110
  .query({ query: { fallback_locale: null } })
104
111
  .find()
105
- .then(({ items }) => items[0]);
112
+ .then(({ items }) => {
113
+ var _a;
114
+ cli_utilities_1.log.debug(`Found master locale: ${(_a = items[0]) === null || _a === void 0 ? void 0 : _a.code}`);
115
+ return items[0];
116
+ });
106
117
  };
107
118
  exports.masterLocalDetails = masterLocalDetails;
108
119
  const field_rules_update = (importConfig, ctPath) => {
109
120
  return new Promise(async (resolve, reject) => {
121
+ cli_utilities_1.log.debug('Starting field rules update process');
110
122
  let client = await (0, cli_utilities_1.managementSDKClient)(config);
111
123
  (0, file_helper_1.readFile)(path.join(ctPath + '/field_rules_uid.json'))
112
124
  .then(async (data) => {
125
+ cli_utilities_1.log.debug('Processing field rules UID mapping');
113
126
  const ct_field_visibility_uid = JSON.parse(data);
114
127
  let ct_files = (0, file_helper_1.readdirSync)(ctPath);
115
128
  if (ct_field_visibility_uid && ct_field_visibility_uid != 'undefined') {
129
+ cli_utilities_1.log.debug(`Processing ${ct_field_visibility_uid.length} content types with field rules`);
116
130
  for (const ele of ct_field_visibility_uid) {
117
131
  if (ct_files.indexOf(ele + '.json') > -1) {
132
+ cli_utilities_1.log.debug(`Updating field rules for content type: ${ele}`);
118
133
  let schema = require(path.resolve(ctPath, ele));
119
- // await field_rules_update(schema)
120
134
  let fieldRuleLength = schema.field_rules.length;
121
135
  for (let k = 0; k < fieldRuleLength; k++) {
122
136
  let fieldRuleConditionLength = schema.field_rules[k].conditions.length;
123
137
  for (let i = 0; i < fieldRuleConditionLength; i++) {
124
138
  if (schema.field_rules[k].conditions[i].operand_field === 'reference') {
139
+ cli_utilities_1.log.debug(`Processing reference field rule condition`);
125
140
  let entryMapperPath = path.resolve(importConfig.data, 'mapper', 'entries');
126
141
  let entryUidMapperPath = path.join(entryMapperPath, 'uid-mapping.json');
127
142
  let fieldRulesValue = schema.field_rules[k].conditions[i].value;
@@ -131,9 +146,11 @@ const field_rules_update = (importConfig, ctPath) => {
131
146
  let splitedFieldRulesValue = element;
132
147
  let oldUid = (0, file_helper_1.readFileSync)(path.join(entryUidMapperPath));
133
148
  if (oldUid.hasOwnProperty(splitedFieldRulesValue)) {
149
+ cli_utilities_1.log.debug(`Mapped UID: ${splitedFieldRulesValue} -> ${oldUid[splitedFieldRulesValue]}`);
134
150
  updatedValue.push(oldUid[splitedFieldRulesValue]);
135
151
  }
136
152
  else {
153
+ cli_utilities_1.log.debug(`No mapping found for UID: ${splitedFieldRulesValue}`);
137
154
  updatedValue.push(element);
138
155
  }
139
156
  }
@@ -153,9 +170,11 @@ const field_rules_update = (importConfig, ctPath) => {
153
170
  ctObj
154
171
  .update()
155
172
  .then(() => {
173
+ cli_utilities_1.log.debug(`Successfully updated field rules for content type: ${schema.uid}`);
156
174
  return resolve('');
157
175
  })
158
176
  .catch((error) => {
177
+ cli_utilities_1.log.error(`Failed to update field rules for content type: ${schema.uid}`);
159
178
  return reject(error);
160
179
  });
161
180
  }
@@ -163,7 +182,9 @@ const field_rules_update = (importConfig, ctPath) => {
163
182
  }
164
183
  }
165
184
  })
166
- .catch(reject);
185
+ .catch((error) => {
186
+ reject(error);
187
+ });
167
188
  });
168
189
  };
169
190
  exports.field_rules_update = field_rules_update;
@@ -200,7 +221,9 @@ const formatError = (error) => {
200
221
  };
201
222
  exports.formatError = formatError;
202
223
  const executeTask = (tasks = [], handler, options) => {
224
+ cli_utilities_1.log.debug(`Executing ${tasks.length} tasks with concurrency: ${options.concurrency}`);
203
225
  if (typeof handler !== 'function') {
226
+ cli_utilities_1.log.error('Invalid handler function provided for task execution');
204
227
  throw new Error('Invalid handler');
205
228
  }
206
229
  const { concurrency = 1 } = options;
@@ -212,24 +235,26 @@ const executeTask = (tasks = [], handler, options) => {
212
235
  exports.executeTask = executeTask;
213
236
  const validateBranch = async (stackAPIClient, config, branch) => {
214
237
  return new Promise(async (resolve, reject) => {
238
+ cli_utilities_1.log.debug(`Validating branch: ${branch}`);
215
239
  try {
216
240
  const data = await stackAPIClient.branch(branch).fetch();
217
241
  if (data && typeof data === 'object') {
218
242
  if (data.error_message) {
219
- (0, logger_1.log)(config, chalk_1.default.red(data.error_message), 'error');
220
- (0, logger_1.log)(config, chalk_1.default.red('No branch found with the name ' + branch), 'error');
243
+ cli_utilities_1.log.error(`Branch validation failed: ${data.error_message}`);
221
244
  reject({ message: 'No branch found with the name ' + branch, error: data.error_message });
222
245
  }
223
246
  else {
247
+ cli_utilities_1.log.info(`Branch validation successful: ${branch}`);
224
248
  resolve(data);
225
249
  }
226
250
  }
227
251
  else {
252
+ cli_utilities_1.log.error(`Invalid branch data received for: ${branch}`);
228
253
  reject({ message: 'No branch found with the name ' + branch, error: {} });
229
254
  }
230
255
  }
231
256
  catch (error) {
232
- (0, logger_1.log)(config, chalk_1.default.red('No branch found with the name ' + branch), 'error');
257
+ cli_utilities_1.log.error(`No branch found with the name: ${branch}`);
233
258
  reject({ message: 'No branch found with the name ' + branch, error });
234
259
  }
235
260
  });
@@ -7,6 +7,7 @@
7
7
  */
8
8
  Object.defineProperty(exports, "__esModule", { value: true });
9
9
  exports.updateFieldRules = exports.removeReferenceFields = exports.suppressSchemaReference = exports.schemaTemplate = void 0;
10
+ const cli_utilities_1 = require("@contentstack/cli-utilities");
10
11
  exports.schemaTemplate = {
11
12
  content_type: {
12
13
  title: 'Seed',
@@ -52,70 +53,91 @@ exports.schemaTemplate = {
52
53
  */
53
54
  const suppressSchemaReference = function (schema, flag) {
54
55
  var _a, _b, _c, _d;
56
+ cli_utilities_1.log.debug('Starting schema reference suppression process');
55
57
  for (var i in schema) {
56
58
  if (schema[i].data_type === 'group' || schema[i].data_type === 'global_field') {
59
+ cli_utilities_1.log.debug(`Processing ${schema[i].data_type} field: ${schema[i].uid}`);
57
60
  (0, exports.suppressSchemaReference)(schema[i].schema, flag);
58
61
  }
59
62
  else if (schema[i].data_type === 'blocks') {
63
+ cli_utilities_1.log.debug(`Processing blocks field: ${schema[i].uid}`);
60
64
  for (var block in schema[i].blocks) {
61
65
  (0, exports.suppressSchemaReference)(schema[i].blocks[block].schema, flag);
62
66
  }
63
67
  }
64
68
  else if (schema[i].data_type === 'reference') {
69
+ cli_utilities_1.log.debug(`Found reference field: ${schema[i].uid}`);
65
70
  flag.references = true;
66
71
  }
67
72
  else if (schema[i].data_type === 'json' && ((_b = (_a = schema[i]) === null || _a === void 0 ? void 0 : _a.field_metadata) === null || _b === void 0 ? void 0 : _b.rich_text_type)) {
73
+ cli_utilities_1.log.debug(`Found JSON RTE field: ${schema[i].uid}`);
68
74
  flag.jsonRte = true;
69
- if (schema[i].field_metadata.embed_entry === true)
75
+ if (schema[i].field_metadata.embed_entry === true) {
76
+ cli_utilities_1.log.debug(`JSON RTE field has embedded entries: ${schema[i].uid}`);
70
77
  flag.jsonRteEmbeddedEntries = true;
78
+ }
71
79
  }
72
80
  else if (schema[i].data_type === 'text' && ((_d = (_c = schema[i]) === null || _c === void 0 ? void 0 : _c.field_metadata) === null || _d === void 0 ? void 0 : _d.rich_text_type)) {
81
+ cli_utilities_1.log.debug(`Found text RTE field: ${schema[i].uid}`);
73
82
  flag.rte = true;
74
- if (schema[i].field_metadata.embed_entry === true)
83
+ if (schema[i].field_metadata.embed_entry === true) {
84
+ cli_utilities_1.log.debug(`Text RTE field has embedded entries: ${schema[i].uid}`);
75
85
  flag.rteEmbeddedEntries = true;
86
+ }
76
87
  }
77
88
  if ((schema[i].hasOwnProperty('mandatory') && schema[i].mandatory) ||
78
89
  (schema[i].hasOwnProperty('unique') && schema[i].unique)) {
79
90
  if (schema[i].uid !== 'title') {
91
+ cli_utilities_1.log.debug(`Suppressing mandatory/unique constraints for field: ${schema[i].uid}`);
80
92
  schema[i].unique = false;
81
93
  schema[i].mandatory = false;
82
94
  flag.suppressed = true;
83
95
  }
84
96
  }
85
97
  }
98
+ cli_utilities_1.log.debug('Schema reference suppression completed');
86
99
  };
87
100
  exports.suppressSchemaReference = suppressSchemaReference;
88
101
  const removeReferenceFields = async function (schema, flag = { supressed: false }, stackAPIClient) {
89
102
  var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m;
103
+ cli_utilities_1.log.debug('Starting reference field removal process');
90
104
  if (schema === null || schema === void 0 ? void 0 : schema.length) {
91
105
  for (let i = 0; i < schema.length; i++) {
92
106
  if (schema[i].data_type === 'group') {
107
+ cli_utilities_1.log.debug(`Processing group field: ${schema[i].uid}`);
93
108
  await (0, exports.removeReferenceFields)(schema[i].schema, flag, stackAPIClient);
94
109
  }
95
110
  else if (schema[i].data_type === 'blocks') {
111
+ cli_utilities_1.log.debug(`Processing blocks field: ${schema[i].uid}`);
96
112
  for (var block in schema[i].blocks) {
97
113
  await (0, exports.removeReferenceFields)(schema[i].blocks[block].schema, flag, stackAPIClient);
98
114
  }
99
115
  }
100
116
  else if (schema[i].data_type === 'reference') {
117
+ cli_utilities_1.log.debug(`Processing reference field: ${schema[i].uid}`);
101
118
  flag.supressed = true;
102
119
  // Check if content-type exists
103
120
  // If exists, then no change should be required.
104
121
  let isContentTypeError = false;
105
122
  for (let j = 0; j < schema[i].reference_to.length; j++) {
106
123
  try {
124
+ cli_utilities_1.log.debug(`Checking if content type exists: ${schema[i].reference_to[j]}`);
107
125
  await stackAPIClient.contentType(schema[i].reference_to[j]).fetch();
126
+ cli_utilities_1.log.debug(`Content type exists: ${schema[i].reference_to[j]}`);
108
127
  }
109
128
  catch (error) {
110
129
  // Else warn and modify the schema object.
111
130
  isContentTypeError = true;
131
+ cli_utilities_1.log.warn(`Content type does not exist: ${schema[i].reference_to[j]}`);
112
132
  console.warn(`Content-type ${schema[i].reference_to[j]} does not exist. Removing the field from schema`);
113
133
  }
114
134
  }
115
135
  if (isContentTypeError) {
136
+ cli_utilities_1.log.debug(`Removing reference field due to missing content types: ${schema[i].uid}`);
116
137
  schema.splice(i, 1);
117
138
  --i;
118
139
  if (schema.length < 1) {
140
+ cli_utilities_1.log.debug('Adding dummy field to prevent empty schema');
119
141
  schema.push({
120
142
  data_type: 'text',
121
143
  display_name: 'dummyTest',
@@ -143,6 +165,7 @@ const removeReferenceFields = async function (schema, flag = { supressed: false
143
165
  schema[i].field_metadata.allow_json_rte &&
144
166
  schema[i].field_metadata.embed_entry &&
145
167
  schema[i].reference_to.length > 1) {
168
+ cli_utilities_1.log.debug(`Restricting JSON RTE field to assets only: ${schema[i].uid}`);
146
169
  flag.supressed = true;
147
170
  schema[i].reference_to = ['sys_assets'];
148
171
  }
@@ -152,6 +175,7 @@ const removeReferenceFields = async function (schema, flag = { supressed: false
152
175
  ((_b = (_a = schema[i]) === null || _a === void 0 ? void 0 : _a.field_metadata) === null || _b === void 0 ? void 0 : _b.rich_text_type) &&
153
176
  ((_d = (_c = schema[i]) === null || _c === void 0 ? void 0 : _c.field_metadata) === null || _d === void 0 ? void 0 : _d.embed_entry) &&
154
177
  ((_f = (_e = schema[i]) === null || _e === void 0 ? void 0 : _e.reference_to) === null || _f === void 0 ? void 0 : _f.length) > 1) {
178
+ cli_utilities_1.log.debug(`Restricting JSON RTE field to assets only: ${schema[i].uid}`);
155
179
  flag.supressed = true;
156
180
  schema[i].reference_to = ['sys_assets'];
157
181
  }
@@ -161,21 +185,26 @@ const removeReferenceFields = async function (schema, flag = { supressed: false
161
185
  ((_h = (_g = schema[i]) === null || _g === void 0 ? void 0 : _g.field_metadata) === null || _h === void 0 ? void 0 : _h.rich_text_type) &&
162
186
  ((_k = (_j = schema[i]) === null || _j === void 0 ? void 0 : _j.field_metadata) === null || _k === void 0 ? void 0 : _k.embed_entry) &&
163
187
  ((_m = (_l = schema[i]) === null || _l === void 0 ? void 0 : _l.reference_to) === null || _m === void 0 ? void 0 : _m.length) >= 1) {
188
+ cli_utilities_1.log.debug(`Restricting text RTE field to assets only: ${schema[i].uid}`);
164
189
  flag.supressed = true;
165
190
  schema[i].reference_to = ['sys_assets'];
166
191
  }
167
192
  }
168
193
  }
194
+ cli_utilities_1.log.debug('Reference field removal process completed');
169
195
  };
170
196
  exports.removeReferenceFields = removeReferenceFields;
171
197
  const updateFieldRules = function (contentType) {
198
+ cli_utilities_1.log.debug(`Starting field rules update for content type: ${contentType.uid}`);
172
199
  const fieldDataTypeMap = {};
173
200
  for (let i = 0; i < contentType.schema.length; i++) {
174
201
  const field = contentType.schema[i];
175
202
  fieldDataTypeMap[field.uid] = field.data_type;
176
203
  }
204
+ cli_utilities_1.log.debug(`Created field data type mapping for ${Object.keys(fieldDataTypeMap).length} fields`);
177
205
  const fieldRules = [...contentType.field_rules];
178
206
  let len = fieldRules.length;
207
+ let removedRules = 0;
179
208
  // Looping backwards as we need to delete elements as we move.
180
209
  for (let i = len - 1; i >= 0; i--) {
181
210
  const conditions = fieldRules[i].conditions;
@@ -183,13 +212,17 @@ const updateFieldRules = function (contentType) {
183
212
  for (let j = 0; j < conditions.length; j++) {
184
213
  const field = conditions[j].operand_field;
185
214
  if (fieldDataTypeMap[field] === 'reference') {
215
+ cli_utilities_1.log.debug(`Found reference field in rule condition: ${field}`);
186
216
  isReference = true;
187
217
  }
188
218
  }
189
219
  if (isReference) {
220
+ cli_utilities_1.log.debug(`Removing field rule with reference condition`);
190
221
  fieldRules.splice(i, 1);
222
+ removedRules++;
191
223
  }
192
224
  }
225
+ cli_utilities_1.log.debug(`Field rules update completed. Removed ${removedRules} rules with reference conditions`);
193
226
  return fieldRules;
194
227
  };
195
228
  exports.updateFieldRules = updateFieldRules;