@ibm-cloud/cd-tools 1.1.2 → 1.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -22,7 +22,6 @@ Provides tools to work with IBM Cloud Continuous Delivery resources, including *
22
22
  ## Prerequisites
23
23
  - Node.js v20 (or later)
24
24
  - Terraform v1.13.3 (or later)
25
- - Terraformer v0.8.30 (or later)
26
25
  - An **IBM Cloud API key** with the following IAM access permissions:
27
26
  - **Viewer** for the source Toolchain(s) being copied
28
27
  - **Editor** for create new Toolchains in the target region
@@ -30,20 +29,18 @@ Provides tools to work with IBM Cloud Continuous Delivery resources, including *
30
29
  - For Git Repos and Issue Tracking projects, Personal Access Tokens (PAT) for the source and destination regions are required, with the `api` scope.
31
30
 
32
31
  ## Install
33
- ### Install Node.js, Terraform, Terraformer
32
+ ### Install Node.js, Terraform
34
33
 
35
34
  #### MacOS
36
35
  ```sh
37
36
  brew install node
38
37
  brew tap hashicorp/tap
39
38
  brew install hashicorp/tap/terraform
40
- brew install terraformer
41
39
  ```
42
40
 
43
41
  #### Other platfoms
44
42
  - Node.js [install instructions](https://nodejs.org/en/download)
45
43
  - Terraform [install instructions](https://developer.hashicorp.com/terraform/install)
46
- - Terraformer [install instructions](https://github.com/GoogleCloudPlatform/terraformer?tab=readme-ov-file#installation)
47
44
 
48
45
  ## Usage
49
46
 
@@ -65,3 +62,6 @@ Commands:
65
62
  copy-toolchain [options] Copies a toolchain, including tool integrations and Tekton pipelines, to another region or resource group.
66
63
  help [command] display help for command
67
64
  ```
65
+
66
+ ## Test
67
+ All test setup and usage instructions are documented in [test/README.md](./test/README.md).
@@ -13,15 +13,15 @@ import { Command } from 'commander';
13
13
  import { parseEnvVar, decomposeCrn, isSecretReference } from './utils/utils.js';
14
14
  import { logger, LOG_STAGES } from './utils/logger.js';
15
15
  import { getBearerToken, getToolchainTools, getPipelineData } from './utils/requests.js';
16
- import { UPDATEABLE_SECRET_PROPERTIES_BY_TOOL_TYPE } from '../config.js';
16
+ import { SECRET_KEYS_MAP } from '../config.js';
17
17
 
18
18
  const command = new Command('check-secrets')
19
- .description('Checks if you have any stored secrets in your toolchain or pipelines')
20
- .requiredOption('-c, --toolchain-crn <crn>', 'The CRN of the source toolchain to check')
21
- .option('-a --apikey <api key>', 'IBM Cloud IAM API key with permissions to read the toolchain.')
22
- .showHelpAfterError()
23
- .hook('preAction', cmd => cmd.showHelpAfterError(false)) // only show help during validation
24
- .action(main);
19
+ .description('Checks if you have any stored secrets in your toolchain or pipelines')
20
+ .requiredOption('-c, --toolchain-crn <crn>', 'The CRN of the source toolchain to check')
21
+ .option('-a --apikey <api key>', 'IBM Cloud IAM API key with permissions to read the toolchain.')
22
+ .showHelpAfterError()
23
+ .hook('preAction', cmd => cmd.showHelpAfterError(false)) // only show help during validation
24
+ .action(main);
25
25
 
26
26
  async function main(options) {
27
27
  const toolchainCrn = options.toolchainCrn;
@@ -50,8 +50,9 @@ async function main(options) {
50
50
  const tool = getToolsRes.tools[i];
51
51
 
52
52
  // Check tool integrations for any plain text secret values
53
- if (UPDATEABLE_SECRET_PROPERTIES_BY_TOOL_TYPE[tool.tool_type_id]) {
54
- UPDATEABLE_SECRET_PROPERTIES_BY_TOOL_TYPE[tool.tool_type_id].forEach((updateableSecretParam) => {
53
+ if (SECRET_KEYS_MAP[tool.tool_type_id]) {
54
+ SECRET_KEYS_MAP[tool.tool_type_id].forEach((entry) => {
55
+ const updateableSecretParam = entry.key;
55
56
  if (tool.parameters[updateableSecretParam] && !isSecretReference(tool.parameters[updateableSecretParam])) {
56
57
  toolResults.push({
57
58
  'Tool ID': tool.id,
@@ -64,13 +65,13 @@ async function main(options) {
64
65
 
65
66
  // For tekton pipelines, check for any plain text secret properties
66
67
  if (tool.tool_type_id === 'pipeline' && tool.parameters?.type === 'tekton') {
67
- const pipelineData = await getPipelineData (token, tool.id, region);
68
+ const pipelineData = await getPipelineData(token, tool.id, region);
68
69
 
69
70
  pipelineData?.properties.forEach((prop) => {
70
71
  if (prop.type === 'secure' && !isSecretReference(prop.value)) {
71
72
  pipelineResults.push({
72
73
  'Pipeline ID': pipelineData.id,
73
- 'Trigger Name': '-',
74
+ 'Trigger Name': '-',
74
75
  'Property Name': prop.name
75
76
  });
76
77
  };
@@ -81,7 +82,7 @@ async function main(options) {
81
82
  if (prop.type === 'secure' && !isSecretReference(prop.value)) {
82
83
  pipelineResults.push({
83
84
  'Pipeline ID': pipelineData.id,
84
- 'Trigger Name': trigger.name,
85
+ 'Trigger Name': trigger.name,
85
86
  'Property Name': prop.name
86
87
  });
87
88
  };
@@ -15,9 +15,10 @@ import { Command, Option } from 'commander';
15
15
 
16
16
  import { parseEnvVar } from './utils/utils.js';
17
17
  import { logger, LOG_STAGES } from './utils/logger.js';
18
- import { setTerraformerEnv, setTerraformEnv, initProviderFile, runTerraformerImport, setupTerraformFiles, runTerraformInit, getNumResourcesPlanned, runTerraformApply, getNumResourcesCreated, getNewToolchainId } from './utils/terraform.js';
19
- import { getAccountId, getBearerToken, getResourceGroupIdAndName, getToolchain } from './utils/requests.js';
18
+ import { setTerraformEnv, initProviderFile, setupTerraformFiles, runTerraformInit, getNumResourcesPlanned, runTerraformApply, getNumResourcesCreated, getNewToolchainId } from './utils/terraform.js';
19
+ import { getAccountId, getBearerToken, getIamAuthPolicies, getResourceGroupIdAndName, getToolchain } from './utils/requests.js';
20
20
  import { validatePrereqsVersions, validateTag, validateToolchainId, validateToolchainName, validateTools, validateOAuth, warnDuplicateName, validateGritUrl } from './utils/validate.js';
21
+ import { importTerraform } from './utils/import-terraform.js';
21
22
 
22
23
  import { COPY_TOOLCHAIN_DESC, MIGRATION_DOC_URL, TARGET_REGIONS, SOURCE_REGIONS } from '../config.js';
23
24
 
@@ -27,7 +28,8 @@ process.on('exit', (code) => {
27
28
 
28
29
  const LOGS_DIR = '.logs';
29
30
  const TEMP_DIR = '.migration-temp'
30
- const DEBUG_MODE = false; // when true, temp folder is preserved
31
+ const LOG_DUMP = process.env['LOG_DUMP'] === 'false' ? false : true; // when true or not specified, logs are also written to a log file in LOGS_DIR
32
+ const DEBUG_MODE = process.env['DEBUG_MODE'] === 'true' ? true : false; // when true, temp folder is preserved
31
33
  const OUTPUT_DIR = 'output-' + new Date().getTime();
32
34
  const DRY_RUN = false; // when true, terraform apply does not run
33
35
 
@@ -74,7 +76,7 @@ async function main(options) {
74
76
  const verbosity = options.silent ? 0 : options.verbose ? 2 : 1;
75
77
 
76
78
  logger.setVerbosity(verbosity);
77
- logger.createLogStream(`${LOGS_DIR}/copy-toolchain-${new Date().getTime()}.log`);
79
+ if (LOG_DUMP) logger.createLogStream(`${LOGS_DIR}/copy-toolchain-${new Date().getTime()}.log`);
78
80
  logger.dump(`Options: ${JSON.stringify(options)}\n`);
79
81
 
80
82
  let bearer;
@@ -86,10 +88,11 @@ async function main(options) {
86
88
  let targetRgId;
87
89
  let targetRgName;
88
90
  let apiKey = options.apikey;
91
+ let policyIds; // used to include s2s auth policies
89
92
  let moreTfResources = {};
90
93
  let gritMapping = {};
91
94
 
92
- // Validate arguments are valid and check if Terraformer and Terraform are installed appropriately
95
+ // Validate arguments are valid and check if Terraform is installed appropriately
93
96
  try {
94
97
  validatePrereqsVersions();
95
98
 
@@ -168,6 +171,7 @@ async function main(options) {
168
171
  // collect instances of legacy GHE tool integrations
169
172
  const collectGHE = () => {
170
173
  moreTfResources['github_integrated'] = [];
174
+
171
175
  allTools.forEach((t) => {
172
176
  if (t.tool_type_id === 'github_integrated') {
173
177
  moreTfResources['github_integrated'].push(t);
@@ -177,6 +181,26 @@ async function main(options) {
177
181
 
178
182
  collectGHE();
179
183
 
184
+ const collectPolicyIds = async () => {
185
+ moreTfResources['iam_authorization_policy'] = [];
186
+
187
+ const res = await getIamAuthPolicies(bearer, accountId);
188
+
189
+ policyIds = res['policies'].filter((p) => p.subjects[0].attributes.find(
190
+ (a) => a.name === 'serviceInstance' && a.value === sourceToolchainId)
191
+ );
192
+ policyIds = policyIds.map((p) => p.id);
193
+ };
194
+
195
+ if (includeS2S) {
196
+ try {
197
+ collectPolicyIds();
198
+ } catch (e) {
199
+ logger.error('Something went wrong while fetching service-to-service auth policies', LOG_STAGES.setup);
200
+ throw e;
201
+ }
202
+ }
203
+
180
204
  logger.info('Arguments and required packages verified, proceeding with copying toolchain...', LOG_STAGES.setup);
181
205
 
182
206
  // Set up temp folder
@@ -194,24 +218,28 @@ async function main(options) {
194
218
  }
195
219
 
196
220
  try {
197
- const runTerraformer = async () => {
221
+ let nonSecretRefs;
222
+
223
+ const importTerraformWrapper = async () => {
198
224
  setTimeout(() => {
199
225
  logger.updateSpinnerMsg('Still importing toolchain...');
200
226
  }, 5000);
201
227
 
202
- setTerraformerEnv(apiKey, sourceToolchainId, includeS2S);
203
-
204
228
  await initProviderFile(sourceRegion, TEMP_DIR);
205
229
  await runTerraformInit(TEMP_DIR);
206
230
 
207
- await runTerraformerImport(sourceRegion, TEMP_DIR, isCompact, verbosity);
231
+ nonSecretRefs = await importTerraform(bearer, apiKey, sourceRegion, sourceToolchainId, targetToolchainName, policyIds, TEMP_DIR, isCompact, verbosity);
208
232
  };
233
+
209
234
  await logger.withSpinner(
210
- runTerraformer,
235
+ importTerraformWrapper,
211
236
  'Importing toolchain...',
212
- 'Toolchain successfully imported using Terraformer',
213
- LOG_STAGES.terraformer
237
+ 'Toolchain successfully imported',
238
+ LOG_STAGES.import
214
239
  );
240
+
241
+ if (nonSecretRefs.length > 0) logger.warn(`\nWarning! The following generated terraform resource contains a hashed secret, applying without changes may result in error(s):\n${nonSecretRefs.map((entry) => `- ${entry}\n`).join('')}`, '', true);
242
+
215
243
  } catch (err) {
216
244
  if (err.message && err.stack) {
217
245
  const errMsg = verbosity > 1 ? err.stack : err.message;
@@ -224,10 +252,10 @@ async function main(options) {
224
252
  // Prepare for Terraform
225
253
  try {
226
254
  if (!fs.existsSync(outputDir)) {
227
- logger.info(`Creating output directory "${outputDir}"...`, LOG_STAGES.terraformer);
255
+ logger.info(`Creating output directory "${outputDir}"...`, LOG_STAGES.import);
228
256
  fs.mkdirSync(outputDir);
229
257
  } else {
230
- logger.info(`Output directory "${outputDir}" already exists`, LOG_STAGES.terraformer);
258
+ logger.info(`Output directory "${outputDir}" already exists`, LOG_STAGES.import);
231
259
  }
232
260
 
233
261
  await setupTerraformFiles({
@@ -248,7 +276,7 @@ async function main(options) {
248
276
  } catch (err) {
249
277
  if (err.message && err.stack) {
250
278
  const errMsg = verbosity > 1 ? err.stack : err.message;
251
- logger.error(errMsg, LOG_STAGES.terraformer);
279
+ logger.error(errMsg, LOG_STAGES.import);
252
280
  }
253
281
  await handleCleanup();
254
282
  exit(1);
@@ -257,7 +285,7 @@ async function main(options) {
257
285
  // Run Terraform
258
286
  try {
259
287
  if (!dryRun) {
260
- setTerraformEnv(verbosity);
288
+ setTerraformEnv(apiKey, verbosity);
261
289
 
262
290
  await logger.withSpinner(runTerraformInit,
263
291
  'Running terraform init...',
@@ -0,0 +1,333 @@
1
+ /**
2
+ * Licensed Materials - Property of IBM
3
+ * (c) Copyright IBM Corporation 2025. All Rights Reserved.
4
+ *
5
+ * Note to U.S. Government Users Restricted Rights:
6
+ * Use, duplication or disclosure restricted by GSA ADP Schedule
7
+ * Contract with IBM Corp.
8
+ */
9
+
10
+ import fs from 'node:fs';
11
+ import { promisify } from 'node:util';
12
+
13
+ import { parse as tfToJson } from '@cdktf/hcl2json'
14
+ import { jsonToTf } from 'json-to-tf';
15
+
16
+ import { getPipelineData, getToolchainTools } from './requests.js';
17
+ import { runTerraformPlanGenerate, setTerraformEnv } from './terraform.js';
18
+ import { getRandChars, isSecretReference, normalizeName } from './utils.js';
19
+
20
+ import { SECRET_KEYS_MAP, SUPPORTED_TOOLS_MAP } from '../../config.js';
21
+
22
+ const writeFilePromise = promisify(fs.writeFile);
23
+
24
+ export async function importTerraform(token, apiKey, region, toolchainId, toolchainName, policyIds, dir, isCompact, verbosity) {
25
+ // STEP 1/2: set up terraform file with import blocks
26
+ const importBlocks = []; // an array of objects representing import blocks, used in importBlocksToTf
27
+ const additionalProps = {}; // maps resource name to array of { property/param, value }, used to override terraform import
28
+
29
+ const toolIdMap = {}; // maps tool ids to { type, name }, used to add references
30
+
31
+ const repoUrlMap = {}; // maps repo urls to { type, name }, used to add references
32
+ const repoResources = [
33
+ 'ibm_cd_toolchain_tool_bitbucketgit',
34
+ 'ibm_cd_toolchain_tool_hostedgit',
35
+ 'ibm_cd_toolchain_tool_gitlab',
36
+ 'ibm_cd_toolchain_tool_githubconsolidated'
37
+ ];
38
+
39
+ const nonSecretRefs = [];
40
+
41
+ let block = importBlock(toolchainId, toolchainName, 'ibm_cd_toolchain');
42
+ importBlocks.push(block);
43
+
44
+ const toolchainResName = block.name;
45
+ let pipelineResName;
46
+
47
+ // get list of tools
48
+ const allTools = await getToolchainTools(token, toolchainId, region);
49
+ for (const tool of allTools.tools) {
50
+ const toolName = tool.parameters?.name ?? tool.tool_type_id;
51
+
52
+ if (tool.tool_type_id in SUPPORTED_TOOLS_MAP) {
53
+ block = importBlock(`${toolchainId}/${tool.id}`, toolName, SUPPORTED_TOOLS_MAP[tool.tool_type_id]);
54
+ importBlocks.push(block);
55
+
56
+ const toolResName = block.name;
57
+ pipelineResName = block.name; // used below
58
+
59
+ toolIdMap[tool.id] = { type: SUPPORTED_TOOLS_MAP[tool.tool_type_id], name: toolResName };
60
+
61
+ // overwrite hard-coded id with reference
62
+ additionalProps[block.name] = [
63
+ { property: 'toolchain_id', value: `\${ibm_cd_toolchain.${toolchainResName}.id}` },
64
+ ];
65
+
66
+ // check and add secret refs
67
+ if (tool.tool_type_id in SECRET_KEYS_MAP) {
68
+ SECRET_KEYS_MAP[tool.tool_type_id].forEach(({ key, tfKey, prereq, required }) => {
69
+ if (prereq) {
70
+ if (!prereq.values.includes(tool[prereq.key])) return;
71
+ }
72
+
73
+ if (isSecretReference(tool.parameters[key])) {
74
+ additionalProps[block.name].push({ param: tfKey, value: tool.parameters[key] });
75
+ } else {
76
+ nonSecretRefs.push(block.name);
77
+ if (required) additionalProps[block.name].push({ param: tfKey, value: `<${tfKey}>` });
78
+ }
79
+ });
80
+ }
81
+ }
82
+
83
+ if (tool.tool_type_id === 'pipeline' && tool.parameters?.type === 'tekton') {
84
+ const pipelineData = await getPipelineData(token, tool.id, region);
85
+
86
+ block = importBlock(pipelineData.id, toolName, 'ibm_cd_tekton_pipeline');
87
+ importBlocks.push(block);
88
+
89
+ // overwrite hard-coded id with reference
90
+ additionalProps[block.name] = [
91
+ { property: 'pipeline_id', value: `\${ibm_cd_toolchain_tool_pipeline.${pipelineResName}.tool_id}` },
92
+ ];
93
+
94
+
95
+ pipelineData.definitions.forEach((def) => {
96
+ block = importBlock(`${pipelineData.id}/${def.id}`, 'definition', 'ibm_cd_tekton_pipeline_definition');
97
+ importBlocks.push(block);
98
+
99
+ // overwrite hard-coded id with reference
100
+ additionalProps[block.name] = [
101
+ { property: 'pipeline_id', value: `\${ibm_cd_toolchain_tool_pipeline.${pipelineResName}.tool_id}` },
102
+ ];
103
+ });
104
+
105
+ pipelineData.properties.forEach((prop) => {
106
+ block = importBlock(`${pipelineData.id}/${prop.name}`, prop.name, 'ibm_cd_tekton_pipeline_property');
107
+ importBlocks.push(block);
108
+
109
+ // overwrite hard-coded id with reference
110
+ additionalProps[block.name] = [
111
+ { property: 'pipeline_id', value: `\${ibm_cd_toolchain_tool_pipeline.${pipelineResName}.tool_id}` },
112
+ ];
113
+ });
114
+
115
+ pipelineData.triggers.forEach((trig) => {
116
+ block = importBlock(`${pipelineData.id}/${trig.id}`, trig.name, 'ibm_cd_tekton_pipeline_trigger');
117
+ importBlocks.push(block);
118
+
119
+ // overwrite hard-coded id with reference
120
+ additionalProps[block.name] = [
121
+ { property: 'pipeline_id', value: `\${ibm_cd_toolchain_tool_pipeline.${pipelineResName}.tool_id}` },
122
+ ];
123
+
124
+ const triggerResName = block.name;
125
+
126
+ trig.properties.forEach((trigProp) => {
127
+ block = importBlock(`${pipelineData.id}/${trig.id}/${trigProp.name}`, trigProp.name, 'ibm_cd_tekton_pipeline_trigger_property');
128
+ importBlocks.push(block);
129
+
130
+ // overwrite hard-coded id with reference
131
+ additionalProps[block.name] = [
132
+ { property: 'pipeline_id', value: `\${ibm_cd_toolchain_tool_pipeline.${pipelineResName}.tool_id}` },
133
+ { property: 'trigger_id', value: `\${ibm_cd_tekton_pipeline_trigger.${triggerResName}.trigger_id}` }
134
+ ];
135
+ });
136
+ });
137
+ }
138
+ }
139
+
140
+ // include s2s
141
+ if (policyIds) {
142
+ for (const policyId of policyIds) {
143
+ block = importBlock(policyId, 'iam_authorization_policy', 'ibm_iam_authorization_policy');
144
+ importBlocks.push(block);
145
+
146
+ // overwrite hard-coded id with reference
147
+ additionalProps[block.name] = [
148
+ { property: 'source_resource_instance_id', value: `\${ibm_cd_toolchain.${toolchainResName}.id}` },
149
+ ];
150
+ }
151
+ }
152
+
153
+ await importBlocksToTf(importBlocks, dir);
154
+
155
+ if (!fs.existsSync(`${dir}/generated`)) fs.mkdirSync(`${dir}/generated`);
156
+
157
+ // STEP 2/2: run terraform import and post-processing
158
+ setTerraformEnv(apiKey, verbosity);
159
+ await runTerraformPlanGenerate(dir, 'generated/draft.tf').catch(() => { }); // temp fix for errors due to bugs in the provider
160
+
161
+ const generatedFile = fs.readFileSync(`${dir}/generated/draft.tf`);
162
+ const generatedFileJson = await tfToJson('draft.tf', generatedFile.toString());
163
+
164
+ const newTfFileObj = { 'resource': {} }
165
+
166
+ for (const [key, value] of Object.entries(generatedFileJson['resource'])) {
167
+ for (const [k, v] of Object.entries(value)) {
168
+ newTfFileObj['resource'][key] = { ...(newTfFileObj['resource'][key] ?? []), [k]: v[0] };
169
+
170
+ // remove empty tool, which breaks jsonToTf
171
+ try {
172
+ if (Object.keys(newTfFileObj['resource'][key][k]['source'][0]['properties'][0]['tool'][0]).length < 1) {
173
+ delete newTfFileObj['resource'][key][k]['source'][0]['properties'][0]['tool'];
174
+ }
175
+ } catch {
176
+ // do nothing
177
+ }
178
+
179
+ // ignore null values
180
+ for (const [k2, v2] of Object.entries(v[0])) {
181
+ if (v2 === null) delete newTfFileObj['resource'][key][k][k2];
182
+ }
183
+
184
+ // ignore null values in parameters
185
+ try {
186
+ if (Object.keys(v[0]['parameters'][0]).length > 0) {
187
+ for (const [k2, v2] of Object.entries(v[0]['parameters'][0])) {
188
+ if (v2 === null) delete newTfFileObj['resource'][key][k]['parameters'][0][k2];
189
+ }
190
+ }
191
+ } catch {
192
+ // do nothing
193
+ }
194
+
195
+ // ignore null values in source properties
196
+ try {
197
+ if (Object.keys(v[0]['source'][0]['properties'][0]).length > 0) {
198
+ for (const [k2, v2] of Object.entries(v[0]['source'][0]['properties'][0])) {
199
+ if (v2 === null) delete newTfFileObj['resource'][key][k]['source'][0]['properties'][0][k2];
200
+ }
201
+ }
202
+ } catch {
203
+ // do nothing
204
+ }
205
+
206
+ // add/overwrite additional props
207
+ if (k in additionalProps) {
208
+ additionalProps[k].forEach(({ param, property, value }) => {
209
+ if (property) newTfFileObj['resource'][key][k][property] = value;
210
+ if (param) {
211
+ newTfFileObj['resource'][key][k]['parameters'][0][param] = value;
212
+ }
213
+ })
214
+ }
215
+
216
+ // add relevent references and depends_on
217
+ if (key === 'ibm_cd_tekton_pipeline') {
218
+ const workerId = newTfFileObj['resource'][key][k]['worker'][0]['id'];
219
+ if (workerId != null && workerId != 'public' && workerId in toolIdMap) {
220
+ newTfFileObj['resource'][key][k]['worker'][0]['id'] = `\${${toolIdMap[workerId].type}.${toolIdMap[workerId].name}.tool_id}`;
221
+ }
222
+ } else if (key === 'ibm_cd_tekton_pipeline_property' || key === 'ibm_cd_tekton_pipeline_trigger_property') {
223
+ const propValue = newTfFileObj['resource'][key][k]['value'];
224
+ if (newTfFileObj['resource'][key][k]['type'] === 'integration' && propValue in toolIdMap) {
225
+ newTfFileObj['resource'][key][k]['depends_on'] = [`\${${toolIdMap[propValue].type}.${toolIdMap[propValue].name}}`];
226
+ }
227
+ }
228
+
229
+ // clean up unused/misplaced params
230
+ if (key === 'ibm_iam_authorization_policy') {
231
+ const deleteKeys = [
232
+ 'subject_attributes',
233
+ 'resource_attributes',
234
+ 'source_service_account',
235
+ 'transaction_id'
236
+ ];
237
+
238
+ for (const toDelete of deleteKeys) {
239
+ delete newTfFileObj['resource'][key][k][toDelete];
240
+ }
241
+ }
242
+
243
+ if (repoResources.includes(key)) {
244
+ const paramsMap = newTfFileObj['resource'][key][k]['parameters'][0];
245
+
246
+ // collect repo url references to be added on second pass
247
+ const repoUrl = paramsMap['repo_url'];
248
+ repoUrlMap[repoUrl] = { type: key, name: k };
249
+
250
+ // set up initialization
251
+ const initializationMap = {
252
+ git_id: paramsMap['git_id'],
253
+ type: paramsMap['type'],
254
+ repo_url: paramsMap['repo_url'],
255
+ private_repo: paramsMap['private_repo'],
256
+ };
257
+ newTfFileObj['resource'][key][k]['initialization'] = [initializationMap];
258
+
259
+ // clean up parameters
260
+ const newParamsMap = {};
261
+ const paramsToInclude = ['api_token', 'auth_type', 'enable_traceability', 'integration_owner', 'toolchain_issues_enabled'];
262
+ for (const param of paramsToInclude) {
263
+ newParamsMap[param] = paramsMap[param];
264
+ }
265
+ newTfFileObj['resource'][key][k]['parameters'][0] = newParamsMap;
266
+ }
267
+ }
268
+ }
269
+
270
+ // add repo url depends_on on second pass
271
+ for (const [key, value] of Object.entries(generatedFileJson['resource'])) {
272
+ for (const [k, _] of Object.entries(value)) {
273
+ if (key === 'ibm_cd_tekton_pipeline_definition' || key === 'ibm_cd_tekton_pipeline_trigger') {
274
+ try {
275
+ const thisUrl = newTfFileObj['resource'][key][k]['source'][0]['properties'][0]['url'];
276
+
277
+ if (thisUrl in repoUrlMap) {
278
+ newTfFileObj['resource'][key][k]['depends_on'] = [`\${${repoUrlMap[thisUrl].type}.${repoUrlMap[thisUrl].name}}`];
279
+ }
280
+ } catch {
281
+ // do nothing
282
+ }
283
+ }
284
+ }
285
+ }
286
+
287
+ if (!isCompact) {
288
+ for (const [key, value] of Object.entries(newTfFileObj['resource'])) {
289
+ if (!key.startsWith('ibm_')) continue;
290
+ const newFileName = key.split('ibm_')[1];
291
+
292
+ const newFileContents = { 'resource': { [key]: value } };
293
+ const newFileContentsJson = jsonToTf(JSON.stringify(newFileContents));
294
+
295
+ fs.writeFileSync(`${dir}/generated/${newFileName}.tf`, newFileContentsJson);
296
+ }
297
+ } else {
298
+ const generatedFileNew = jsonToTf(JSON.stringify(newTfFileObj));
299
+ fs.writeFileSync(`${dir}/generated/resources.tf`, generatedFileNew);
300
+ }
301
+
302
+ // remove draft
303
+ if (fs.existsSync(`${dir}/generated/draft.tf`)) fs.rmSync(`${dir}/generated/draft.tf`, { recursive: true });
304
+
305
+ return nonSecretRefs;
306
+ }
307
+
308
+ // objects have two keys, "id" and "to"
309
+ // e.g. { id: 'bc3d05f1-e6f7-4b5e-8647-8119d8037039', to: 'ibm_cd_toolchain.my_everything_toolchain_e22c' }
310
+ function importBlock(id, name, resourceType) {
311
+ const newName = `${normalizeName(name)}_${getRandChars(4)}`;
312
+
313
+ return {
314
+ id: id,
315
+ to: `${resourceType}.${newName}`,
316
+ name: newName
317
+ }
318
+ }
319
+
320
+ // importBlocks array to tf file
321
+ async function importBlocksToTf(blocks, dir) {
322
+ let fileContent = '';
323
+
324
+ blocks.forEach((block) => {
325
+ const template = `import {
326
+ id = "${block.id}"
327
+ to = ${block.to}
328
+ }\n\n`;
329
+ fileContent += template;
330
+ });
331
+
332
+ return await writeFilePromise(`${dir}/import.tf`, fileContent);
333
+ }
@@ -14,6 +14,8 @@ import stripAnsi from 'strip-ansi';
14
14
 
15
15
  import fs from 'node:fs';
16
16
 
17
+ const DISABLE_SPINNER = process.env.DISABLE_SPINNER === 'true';
18
+
17
19
  const COLORS = {
18
20
  reset: '\x1b[0m',
19
21
  gray: '\x1b[90m',
@@ -84,14 +86,15 @@ class Logger {
84
86
 
85
87
  close() {
86
88
  return new Promise((resolve, reject) => {
87
- this.logStream?.on('finish', resolve);
88
- this.logStream?.on('error', reject);
89
- this.logStream?.end();
89
+ if (!this.logStream) resolve();
90
+ this.logStream.on('finish', resolve);
91
+ this.logStream.on('error', reject);
92
+ this.logStream.end();
90
93
  });
91
94
  }
92
95
 
93
96
  startSpinner(msg, prefix = '') {
94
- if (this.verbosity < 1) return;
97
+ if (this.verbosity < 1 || DISABLE_SPINNER) return;
95
98
  this.spinner = ora({
96
99
  prefixText: this.#getFullPrefix(prefix),
97
100
  text: msg
@@ -103,7 +106,7 @@ class Logger {
103
106
  resetSpinner() { if (this.verbosity >= 1) this.spinner = null; }
104
107
 
105
108
  async withSpinner(asyncFn, loadingMsg, successMsg, prefix, ...args) {
106
- if (this.verbosity < 1) {
109
+ if (this.verbosity < 1 || DISABLE_SPINNER) {
107
110
  try {
108
111
  return await asyncFn(...args);
109
112
  }
@@ -167,7 +170,7 @@ export const logger = new Logger();
167
170
 
168
171
  export const LOG_STAGES = {
169
172
  setup: 'setup',
170
- terraformer: 'terraformer',
173
+ import: 'import',
171
174
  tf: 'terraform',
172
175
  info: 'info'
173
176
  };