@ibm-cloud/cd-tools 1.2.1 → 1.2.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -49,11 +49,15 @@ async function main(options) {
49
49
  for (let i = 0; i < getToolsRes.tools.length; i++) {
50
50
  const tool = getToolsRes.tools[i];
51
51
 
52
+ // Skip iff it's GitHub/GitLab/GRIT integration with OAuth
53
+ if (['githubconsolidated', 'github_integrated', 'gitlab', 'hostedgit'].includes(tool.tool_type_id) && (tool.parameters?.auth_type === '' || tool.parameters?.auth_type === 'oauth'))
54
+ continue;
55
+
52
56
  // Check tool integrations for any plain text secret values
53
57
  if (SECRET_KEYS_MAP[tool.tool_type_id]) {
54
58
  SECRET_KEYS_MAP[tool.tool_type_id].forEach((entry) => {
55
59
  const updateableSecretParam = entry.key;
56
- if (tool.parameters[updateableSecretParam] && !isSecretReference(tool.parameters[updateableSecretParam])) {
60
+ if (tool.parameters[updateableSecretParam] && !isSecretReference(tool.parameters[updateableSecretParam]) && tool.parameters[updateableSecretParam].length > 0) {
57
61
  toolResults.push({
58
62
  'Tool ID': tool.id,
59
63
  'Tool Type': tool.tool_type_id,
@@ -68,7 +72,7 @@ async function main(options) {
68
72
  const pipelineData = await getPipelineData(token, tool.id, region);
69
73
 
70
74
  pipelineData?.properties.forEach((prop) => {
71
- if (prop.type === 'secure' && !isSecretReference(prop.value)) {
75
+ if (prop.type === 'secure' && !isSecretReference(prop.value) && prop.value.length > 0) {
72
76
  pipelineResults.push({
73
77
  'Pipeline ID': pipelineData.id,
74
78
  'Trigger Name': '-',
@@ -79,7 +83,7 @@ async function main(options) {
79
83
 
80
84
  pipelineData?.triggers.forEach((trigger) => {
81
85
  trigger.properties?.forEach((prop) => {
82
- if (prop.type === 'secure' && !isSecretReference(prop.value)) {
86
+ if (prop.type === 'secure' && !isSecretReference(prop.value) && prop.value.length > 0) {
83
87
  pipelineResults.push({
84
88
  'Pipeline ID': pipelineData.id,
85
89
  'Trigger Name': trigger.name,
@@ -16,7 +16,7 @@ import { Command, Option } from 'commander';
16
16
  import { parseEnvVar } from './utils/utils.js';
17
17
  import { logger, LOG_STAGES } from './utils/logger.js';
18
18
  import { setTerraformEnv, initProviderFile, setupTerraformFiles, runTerraformInit, getNumResourcesPlanned, runTerraformApply, getNumResourcesCreated, getNewToolchainId } from './utils/terraform.js';
19
- import { getAccountId, getBearerToken, getIamAuthPolicies, getResourceGroupIdAndName, getToolchain } from './utils/requests.js';
19
+ import { getAccountId, getBearerToken, getCdInstanceByRegion, getIamAuthPolicies, getResourceGroupIdAndName, getToolchain } from './utils/requests.js';
20
20
  import { validatePrereqsVersions, validateTag, validateToolchainId, validateToolchainName, validateTools, validateOAuth, warnDuplicateName, validateGritUrl } from './utils/validate.js';
21
21
  import { importTerraform } from './utils/import-terraform.js';
22
22
 
@@ -26,11 +26,12 @@ process.on('exit', (code) => {
26
26
  if (code !== 0) logger.print(`Need help? Visit ${MIGRATION_DOC_URL} for more troubleshooting information.`);
27
27
  });
28
28
 
29
+ const TIME_SUFFIX = new Date().getTime();
29
30
  const LOGS_DIR = '.logs';
30
- const TEMP_DIR = '.migration-temp'
31
+ const TEMP_DIR = '.migration-temp-' + TIME_SUFFIX;
31
32
  const LOG_DUMP = process.env['LOG_DUMP'] === 'false' ? false : true; // when true or not specified, logs are also written to a log file in LOGS_DIR
32
33
  const DEBUG_MODE = process.env['DEBUG_MODE'] === 'true' ? true : false; // when true, temp folder is preserved
33
- const OUTPUT_DIR = 'output-' + new Date().getTime();
34
+ const OUTPUT_DIR = 'output-' + TIME_SUFFIX;
34
35
  const DRY_RUN = false; // when true, terraform apply does not run
35
36
 
36
37
 
@@ -100,6 +101,16 @@ async function main(options) {
100
101
  bearer = await getBearerToken(apiKey);
101
102
  const accountId = await getAccountId(bearer, apiKey);
102
103
 
104
+ // check for continuous delivery instance in target region
105
+ if (!await getCdInstanceByRegion(bearer, accountId, targetRegion)) throw Error(`Could not find a Continuous Delivery instance in the target region '${targetRegion}', please create one before proceeding.`);
106
+
107
+ // check for existing .tf files in output directory
108
+ if (fs.existsSync(outputDir)) {
109
+ let files = fs.readdirSync(outputDir, { recursive: true });
110
+ files = files.filter((f) => f.endsWith('.tf'));
111
+ if (files.length > 0) throw Error(`Output directory already has ${files.length} '.tf' files, please specify a different output directory`);
112
+ }
113
+
103
114
  if (options.gritMappingFile) {
104
115
  gritMapping = JSON.parse(fs.readFileSync(resolve(options.gritMappingFile)));
105
116
  const gritPromises = [];
@@ -238,7 +249,8 @@ async function main(options) {
238
249
  LOG_STAGES.import
239
250
  );
240
251
 
241
- if (nonSecretRefs.length > 0) logger.warn(`\nWarning! The following generated terraform resource contains a hashed secret, applying without changes may result in error(s):\n${nonSecretRefs.map((entry) => `- ${entry}\n`).join('')}`, '', true);
252
+ if (nonSecretRefs.length > 0) logger.warn(`\nWarning! The following generated terraform resource contains hashed secret(s) that cannot be migrated, applying without changes may result in error(s):`);
253
+ logger.table(nonSecretRefs);
242
254
 
243
255
  } catch (err) {
244
256
  if (err.message && err.stack) {
@@ -8,7 +8,6 @@
8
8
  */
9
9
 
10
10
  import fs from 'node:fs';
11
- import { promisify } from 'node:util';
12
11
 
13
12
  import { parse as tfToJson } from '@cdktf/hcl2json'
14
13
  import { jsonToTf } from 'json-to-tf';
@@ -19,8 +18,6 @@ import { getRandChars, isSecretReference, normalizeName } from './utils.js';
19
18
 
20
19
  import { SECRET_KEYS_MAP, SUPPORTED_TOOLS_MAP } from '../../config.js';
21
20
 
22
- const writeFilePromise = promisify(fs.writeFile);
23
-
24
21
  export async function importTerraform(token, apiKey, region, toolchainId, toolchainName, policyIds, dir, isCompact, verbosity) {
25
22
  // STEP 1/2: set up terraform file with import blocks
26
23
  const importBlocks = []; // an array of objects representing import blocks, used in importBlocksToTf
@@ -73,8 +70,15 @@ export async function importTerraform(token, apiKey, region, toolchainId, toolch
73
70
  if (isSecretReference(tool.parameters[key])) {
74
71
  additionalProps[block.name].push({ param: tfKey, value: tool.parameters[key] });
75
72
  } else {
76
- nonSecretRefs.push(block.name);
77
- if (required) additionalProps[block.name].push({ param: tfKey, value: `<${tfKey}>` });
73
+ const newFileName = SUPPORTED_TOOLS_MAP[tool.tool_type_id].split('ibm_')[1];
74
+ if (required) {
75
+ nonSecretRefs.push({
76
+ resource_name: block.name,
77
+ property_name: tfKey,
78
+ file_name: isCompact ? 'resources.tf' : `${newFileName}.tf`
79
+ });
80
+ additionalProps[block.name].push({ param: tfKey, value: `<${tfKey}>` });
81
+ }
78
82
  }
79
83
  });
80
84
  }
@@ -150,7 +154,7 @@ export async function importTerraform(token, apiKey, region, toolchainId, toolch
150
154
  }
151
155
  }
152
156
 
153
- await importBlocksToTf(importBlocks, dir);
157
+ importBlocksToTf(importBlocks, dir);
154
158
 
155
159
  if (!fs.existsSync(`${dir}/generated`)) fs.mkdirSync(`${dir}/generated`);
156
160
 
@@ -318,7 +322,7 @@ function importBlock(id, name, resourceType) {
318
322
  }
319
323
 
320
324
  // importBlocks array to tf file
321
- async function importBlocksToTf(blocks, dir) {
325
+ function importBlocksToTf(blocks, dir) {
322
326
  let fileContent = '';
323
327
 
324
328
  blocks.forEach((block) => {
@@ -329,5 +333,5 @@ async function importBlocksToTf(blocks, dir) {
329
333
  fileContent += template;
330
334
  });
331
335
 
332
- return await writeFilePromise(`${dir}/import.tf`, fileContent);
336
+ return fs.writeFileSync(`${dir}/import.tf`, fileContent);
333
337
  }
@@ -141,6 +141,31 @@ async function getToolchainsByName(bearer, accountId, toolchainName) {
141
141
  }
142
142
  }
143
143
 
144
+ async function getCdInstanceByRegion(bearer, accountId, region) {
145
+ const apiBaseUrl = 'https://api.global-search-tagging.cloud.ibm.com/v3';
146
+ const options = {
147
+ url: apiBaseUrl + '/resources/search',
148
+ method: 'POST',
149
+ headers: {
150
+ 'Authorization': `Bearer ${bearer}`,
151
+ 'Content-Type': 'application/json',
152
+ },
153
+ data: {
154
+ 'query': `service_name:continuous-delivery AND region:"${region}" AND doc.state:ACTIVE`,
155
+ 'fields': ['doc.resource_group_id', 'doc.region_id']
156
+ },
157
+ params: { account_id: accountId },
158
+ validateStatus: () => true
159
+ };
160
+ const response = await axios(options);
161
+ switch (response.status) {
162
+ case 200:
163
+ return response.data.items.length > 0;
164
+ default:
165
+ throw Error('Get CD instance failed');
166
+ }
167
+ }
168
+
144
169
  async function getToolchainTools(bearer, toolchainId, region) {
145
170
  const apiBaseUrl = `https://api.${region}.devops.cloud.ibm.com/toolchain/v2`;
146
171
  const options = {
@@ -388,6 +413,7 @@ async function deleteToolchain(bearer, toolchainId, region) {
388
413
  export {
389
414
  getBearerToken,
390
415
  getAccountId,
416
+ getCdInstanceByRegion,
391
417
  getToolchain,
392
418
  getToolchainsByName,
393
419
  getToolchainTools,
@@ -9,7 +9,6 @@
9
9
 
10
10
  import child_process from 'node:child_process';
11
11
  import fs from 'node:fs';
12
- import { randomInt } from 'node:crypto';
13
12
  import { promisify } from 'node:util';
14
13
 
15
14
  import { parse as tfToJson } from '@cdktf/hcl2json'
@@ -302,7 +301,7 @@ async function setupTerraformFiles({ token, srcRegion, targetRegion, targetTag,
302
301
  }
303
302
 
304
303
  async function runTerraformPlanGenerate(dir, fileName) {
305
- return await execPromise(`terraform plan -generate-config-out=${fileName}`, { cwd: dir });
304
+ return await execPromise(`terraform plan -generate-config-out="${fileName}"`, { cwd: dir });
306
305
  }
307
306
 
308
307
  async function runTerraformInit(dir) {
@@ -11,7 +11,7 @@ import { execSync } from 'child_process';
11
11
  import { logger, LOG_STAGES } from './logger.js'
12
12
  import { RESERVED_GRIT_PROJECT_NAMES, RESERVED_GRIT_GROUP_NAMES, RESERVED_GRIT_SUBGROUP_NAME, TERRAFORM_REQUIRED_VERSION, SECRET_KEYS_MAP } from '../../config.js';
13
13
  import { getToolchainsByName, getToolchainTools, getPipelineData, getAppConfigHealthcheck, getSecretsHealthcheck, getGitOAuth, getGritUserProject, getGritGroup, getGritGroupProject } from './requests.js';
14
- import { promptUserConfirmation, promptUserInput } from './utils.js';
14
+ import { promptUserConfirmation, promptUserInput, isSecretReference } from './utils.js';
15
15
 
16
16
 
17
17
  function validatePrereqsVersions() {
@@ -146,7 +146,6 @@ async function validateTools(token, tcId, region, skipPrompt) {
146
146
  const toolsWithHashedParams = [];
147
147
  const patTools = [];
148
148
  const classicPipelines = [];
149
- const secretPattern = /^hash:SHA3-512:[a-zA-Z0-9]{128}$/;
150
149
 
151
150
  for (const tool of allTools.tools) {
152
151
  const toolName = (tool.name || tool.parameters?.name || tool.parameters?.label || '').replace(/\s+/g, '+');
@@ -203,7 +202,7 @@ async function validateTools(token, tcId, region, skipPrompt) {
203
202
  url: toolUrl
204
203
  });
205
204
  }
206
- else if (['githubconsolidated', 'github_integrated', 'gitlab'].includes(tool.tool_type_id) && (tool.parameters?.auth_type === '' || tool.parameters?.auth_type === 'oauth')) { // Skip secret check iff it's GitHub/GitLab integration with OAuth
205
+ else if (['githubconsolidated', 'github_integrated', 'gitlab', 'hostedgit'].includes(tool.tool_type_id) && (tool.parameters?.auth_type === '' || tool.parameters?.auth_type === 'oauth')) { // Skip secret check iff it's GitHub/GitLab/GRIT integration with OAuth
207
206
  continue;
208
207
  }
209
208
  else {
@@ -212,20 +211,23 @@ async function validateTools(token, tcId, region, skipPrompt) {
212
211
  const pipelineData = await getPipelineData(token, tool.id, region);
213
212
 
214
213
  pipelineData.properties.forEach((prop) => {
215
- if (prop.type === 'secure' && secretPattern.test(prop.value)) secrets.push(['properties', prop.name].join('.').replace(/\s+/g, '+'));
214
+ if (prop.type === 'secure' && !isSecretReference(prop.value) && prop.value.length > 0)
215
+ secrets.push(['properties', prop.name].join('.').replace(/\s+/g, '+'));
216
216
  });
217
217
 
218
218
  pipelineData.triggers.forEach((trigger) => {
219
- if ((trigger?.secret?.type === 'token_matches' || trigger?.secret?.type === 'digest_matches') && secretPattern.test(trigger.secret.value)) secrets.push([trigger.name, trigger.secret.key_name].join('.').replace(/\s+/g, '+'));
219
+ if ((trigger?.secret?.type === 'token_matches' || trigger?.secret?.type === 'digest_matches') && !isSecretReference(trigger.secret.value) && trigger.secret.value.length > 0)
220
+ secrets.push([trigger.name, trigger.secret.key_name].join('.').replace(/\s+/g, '+'));
220
221
  trigger.properties.forEach((prop) => {
221
- if (prop.type === 'secure' && secretPattern.test(prop.value)) secrets.push([trigger.name, 'properties', prop.name].join('.').replace(/\s+/g, '+'));
222
+ if (prop.type === 'secure' && !isSecretReference(prop.value) && prop.value.length > 0)
223
+ secrets.push([trigger.name, 'properties', prop.name].join('.').replace(/\s+/g, '+'));
222
224
  });
223
225
  });
224
226
  }
225
227
  else {
226
228
  const secretsToCheck = (SECRET_KEYS_MAP[tool.tool_type_id] || []).map((entry) => entry.key); // Check for secrets in the rest of the tools
227
229
  Object.entries(tool.parameters).forEach(([key, value]) => {
228
- if (secretPattern.test(value) && secretsToCheck.includes(key)) secrets.push(key);
230
+ if (!isSecretReference(value) && value.length > 0 && secretsToCheck.includes(key)) secrets.push(key);
229
231
  });
230
232
  }
231
233
  if (secrets.length > 0) {
@@ -260,7 +262,7 @@ async function validateTools(token, tcId, region, skipPrompt) {
260
262
  }
261
263
 
262
264
  if (toolsWithHashedParams.length > 0) {
263
- logger.warn('Warning! The following tools contain secrets that cannot be migrated, please use the \'check-secret\' command to export the secrets: \n', LOG_STAGES.setup, true);
265
+ logger.warn('Warning! The following tools contain secrets that cannot be migrated, please use the \'check-secrets\' command to export the secrets: \n', LOG_STAGES.setup, true);
264
266
  logger.table(toolsWithHashedParams);
265
267
  }
266
268
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@ibm-cloud/cd-tools",
3
- "version": "1.2.1",
3
+ "version": "1.2.3",
4
4
  "description": "Tools and utilities for the IBM Cloud Continuous Delivery service and resources",
5
5
  "repository": {
6
6
  "type": "git",