@ibm-cloud/cd-tools 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/release.yml +35 -0
- package/LICENSE +201 -0
- package/README.md +2 -0
- package/cmd/check-secrets.js +106 -0
- package/cmd/copy-toolchain.js +333 -0
- package/cmd/direct-transfer.js +288 -0
- package/cmd/index.js +13 -0
- package/cmd/utils/logger.js +173 -0
- package/cmd/utils/requests.js +359 -0
- package/cmd/utils/terraform.js +441 -0
- package/cmd/utils/utils.js +128 -0
- package/cmd/utils/validate.js +503 -0
- package/config.js +202 -0
- package/index.js +24 -0
- package/package.json +28 -0
|
@@ -0,0 +1,441 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Licensed Materials - Property of IBM
|
|
3
|
+
* (c) Copyright IBM Corporation 2025. All Rights Reserved.
|
|
4
|
+
*
|
|
5
|
+
* Note to U.S. Government Users Restricted Rights:
|
|
6
|
+
* Use, duplication or disclosure restricted by GSA ADP Schedule
|
|
7
|
+
* Contract with IBM Corp.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import child_process from 'node:child_process';
|
|
11
|
+
import fs from 'node:fs';
|
|
12
|
+
import { randomInt } from 'node:crypto';
|
|
13
|
+
import { promisify } from 'node:util';
|
|
14
|
+
|
|
15
|
+
import { parse as tfToJson } from '@cdktf/hcl2json'
|
|
16
|
+
import { jsonToTf } from 'json-to-tf';
|
|
17
|
+
|
|
18
|
+
import { validateToolchainId, validateGritUrl } from './validate.js';
|
|
19
|
+
import { logger } from './logger.js';
|
|
20
|
+
import { promptUserInput, replaceUrlRegion } from './utils.js';
|
|
21
|
+
|
|
22
|
+
// promisify
|
|
23
|
+
const readFilePromise = promisify(fs.readFile);
|
|
24
|
+
const readDirPromise = promisify(fs.readdir);
|
|
25
|
+
const writeFilePromise = promisify(fs.writeFile)
|
|
26
|
+
|
|
27
|
+
async function execPromise(command, options) {
|
|
28
|
+
try {
|
|
29
|
+
const exec = promisify(child_process.exec);
|
|
30
|
+
const { stdout, stderr } = await exec(command, options);
|
|
31
|
+
return stdout.trim();
|
|
32
|
+
} catch (err) {
|
|
33
|
+
throw new Error(`Command failed: ${command} \n${err.stderr || err.stdout}`);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
function setTerraformerEnv(apiKey, tcId, includeS2S) {
|
|
38
|
+
process.env['IC_API_KEY'] = apiKey;
|
|
39
|
+
process.env['IBM_CD_TOOLCHAIN_TARGET'] = tcId;
|
|
40
|
+
if (includeS2S) process.env['IBM_CD_TOOLCHAIN_INCLUDE_S2S'] = 1;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
function setTerraformEnv(verbosity) {
|
|
44
|
+
if (verbosity >= 2) process.env['TF_LOG'] = 'DEBUG';
|
|
45
|
+
process.env['TF_VAR_ibmcloud_api_key'] = process.env.IC_API_KEY;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
async function initProviderFile(targetRegion, dir) {
|
|
49
|
+
const newProviderTf = { 'provider': {}, 'terraform': { 'required_providers': {} }, 'variable': {} };
|
|
50
|
+
|
|
51
|
+
newProviderTf['provider']['ibm'] = [{ 'ibmcloud_api_key': '${var.ibmcloud_api_key}', 'region': targetRegion }];
|
|
52
|
+
newProviderTf['terraform']['required_providers']['ibm'] = { 'source': 'IBM-Cloud/ibm' };
|
|
53
|
+
newProviderTf['variable']['ibmcloud_api_key'] = {};
|
|
54
|
+
|
|
55
|
+
const newProviderTfStr = JSON.stringify(newProviderTf)
|
|
56
|
+
|
|
57
|
+
return writeFilePromise(`${dir}/provider.tf`, jsonToTf(newProviderTfStr), () => { });
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
async function runTerraformerImport(srcRegion, tempDir, isCompact, verbosity) {
|
|
61
|
+
const stdout = await execPromise(`terraformer import ibm --resources=ibm_cd_toolchain --region=${srcRegion} -S ${isCompact ? '--compact' : ''} ${verbosity >= 2 ? '--verbose' : ''}`, { cwd: tempDir });
|
|
62
|
+
if (verbosity >= 2) logger.print(stdout);
|
|
63
|
+
return stdout;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
async function setupTerraformFiles({ token, srcRegion, targetRegion, targetTag, targetToolchainName, targetRgId, disableTriggers, isCompact, outputDir, tempDir, moreTfResources, gritMapping, skipUserConfirmation }) {
|
|
67
|
+
const promises = [];
|
|
68
|
+
|
|
69
|
+
const writeProviderPromise = await initProviderFile(targetRegion, outputDir);
|
|
70
|
+
promises.push(writeProviderPromise);
|
|
71
|
+
|
|
72
|
+
// Get toolchain resource
|
|
73
|
+
const toolchainLocation = isCompact ? 'resources.tf' : 'cd_toolchain.tf'
|
|
74
|
+
const resources = await readFilePromise(`${tempDir}/generated/ibm/ibm_cd_toolchain/${toolchainLocation}`, 'utf8');
|
|
75
|
+
const resourcesObj = await tfToJson('output.tf', resources);
|
|
76
|
+
const newTcId = Object.keys(resourcesObj['resource']['ibm_cd_toolchain'])[0];
|
|
77
|
+
|
|
78
|
+
// output newly created toolchain ID
|
|
79
|
+
const newOutputTf =
|
|
80
|
+
`output "ibm_cd_toolchain_${newTcId}_id" {
|
|
81
|
+
value = "$\{ibm_cd_toolchain.${newTcId}.id}"
|
|
82
|
+
}`;
|
|
83
|
+
const writeOutputPromise = writeFilePromise(`${outputDir}/output.tf`, newOutputTf);
|
|
84
|
+
promises.push(writeOutputPromise);
|
|
85
|
+
|
|
86
|
+
// Copy over cd_*.tf
|
|
87
|
+
let files = await readDirPromise(`${tempDir}/generated/ibm/ibm_cd_toolchain`);
|
|
88
|
+
|
|
89
|
+
if (isCompact) {
|
|
90
|
+
files = files.filter((f) => f === 'resources.tf');
|
|
91
|
+
if (files.length != 1) throw new Error('Something went wrong, resources.tf was not generated...');
|
|
92
|
+
} else {
|
|
93
|
+
const prefix = 'cd_';
|
|
94
|
+
files = files.filter((f) => f.slice(0, prefix.length) === prefix || f === 'iam_authorization_policy.tf');
|
|
95
|
+
|
|
96
|
+
// should be processing GRIT first in non-compact case
|
|
97
|
+
if (files.find((f) => f === 'cd_toolchain_tool_hostedgit.tf')) {
|
|
98
|
+
files = [
|
|
99
|
+
'cd_toolchain_tool_hostedgit.tf',
|
|
100
|
+
...files.filter((f) => f != 'cd_toolchain_tool_hostedgit.tf')
|
|
101
|
+
];
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
// for converting legacy GHE tool integrations
|
|
106
|
+
const hasGHE = moreTfResources['github_integrated'].length > 0;
|
|
107
|
+
const repoToTfName = {};
|
|
108
|
+
const newConvertedTf = {};
|
|
109
|
+
|
|
110
|
+
if (hasGHE) {
|
|
111
|
+
const getRandChars = (size) => {
|
|
112
|
+
const charSet = 'abcdefghijklmnopqrstuvwxyz0123456789';
|
|
113
|
+
let res = '';
|
|
114
|
+
|
|
115
|
+
for (let i = 0; i < size; i++) {
|
|
116
|
+
const pos = randomInt(charSet.length);
|
|
117
|
+
res += charSet[pos];
|
|
118
|
+
}
|
|
119
|
+
return res;
|
|
120
|
+
};
|
|
121
|
+
|
|
122
|
+
moreTfResources['github_integrated'].forEach(t => {
|
|
123
|
+
const gitUrl = t['parameters']['repo_url'];
|
|
124
|
+
const tfName = `converted--githubconsolidated_${getRandChars(4)}`;
|
|
125
|
+
|
|
126
|
+
repoToTfName[gitUrl] = tfName;
|
|
127
|
+
newConvertedTf[tfName] = {
|
|
128
|
+
toolchain_id: `\${ibm_cd_toolchain.${newTcId}.id}`,
|
|
129
|
+
initialization: [{
|
|
130
|
+
auto_init: 'false',
|
|
131
|
+
blind_connection: 'false',
|
|
132
|
+
git_id: 'integrated',
|
|
133
|
+
private_repo: 'false',
|
|
134
|
+
repo_url: gitUrl,
|
|
135
|
+
type: 'link',
|
|
136
|
+
}],
|
|
137
|
+
parameters: [{
|
|
138
|
+
...(t['parameters']['auth_type'] === 'pat' ? { api_token: t['parameters']['api_token'] } : {}),
|
|
139
|
+
auth_type: t['parameters']['auth_type'],
|
|
140
|
+
enable_traceability: t['parameters']['enable_traceability'],
|
|
141
|
+
integration_owner: t['parameters']['integration_owner'],
|
|
142
|
+
toolchain_issues_enabled: t['parameters']['has_issues'],
|
|
143
|
+
}]
|
|
144
|
+
};
|
|
145
|
+
});
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
for (const fileName of files) {
|
|
149
|
+
const tfFile = await readFilePromise(`${tempDir}/generated/ibm/ibm_cd_toolchain/${fileName}`, 'utf8');
|
|
150
|
+
const tfFileObj = await tfToJson(fileName, tfFile);
|
|
151
|
+
|
|
152
|
+
const newTfFileObj = { 'resource': {} }
|
|
153
|
+
for (const [key, value] of Object.entries(tfFileObj['resource'])) {
|
|
154
|
+
for (const [k, v] of Object.entries(value)) {
|
|
155
|
+
newTfFileObj['resource'][key] = { ...(newTfFileObj['resource'][key] ?? []), [k]: v[0] };
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
const resourceName = `ibm_${fileName.split('.tf')[0]}`;
|
|
160
|
+
|
|
161
|
+
const usedGritUrls = new Set(Object.values(gritMapping));
|
|
162
|
+
const attemptAddUsedGritUrl = (url) => {
|
|
163
|
+
if (usedGritUrls.has(url)) throw Error(`"${url}" has already been used in another mapping entry`);
|
|
164
|
+
usedGritUrls.add(url);
|
|
165
|
+
};
|
|
166
|
+
|
|
167
|
+
let firstGritPrompt = false;
|
|
168
|
+
|
|
169
|
+
// should be processed first in non-compact case
|
|
170
|
+
if (isCompact || resourceName === 'ibm_cd_toolchain_tool_hostedgit') {
|
|
171
|
+
if (newTfFileObj['resource']['ibm_cd_toolchain_tool_hostedgit']) {
|
|
172
|
+
for (const [k, v] of Object.entries(newTfFileObj['resource']['ibm_cd_toolchain_tool_hostedgit'])) {
|
|
173
|
+
try {
|
|
174
|
+
const thisUrl = v['initialization'][0]['repo_url'];
|
|
175
|
+
if (thisUrl in gritMapping) {
|
|
176
|
+
newTfFileObj['resource']['ibm_cd_toolchain_tool_hostedgit'][k]['initialization'][0]['repo_url'] = gritMapping[thisUrl];
|
|
177
|
+
continue;
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
let newUrl = replaceUrlRegion(thisUrl, srcRegion, targetRegion);
|
|
181
|
+
|
|
182
|
+
// check if same group/project exists, if yes, don't prompt user
|
|
183
|
+
if (skipUserConfirmation || (newUrl && !usedGritUrls.has(newUrl) && await validateGritUrl(token, targetRegion, newUrl, true).catch(() => { return false }))) {
|
|
184
|
+
newTfFileObj['resource']['ibm_cd_toolchain_tool_hostedgit'][k]['initialization'][0]['repo_url'] = newUrl;
|
|
185
|
+
attemptAddUsedGritUrl(newUrl);
|
|
186
|
+
gritMapping[thisUrl] = newUrl;
|
|
187
|
+
} else {
|
|
188
|
+
// prompt user
|
|
189
|
+
const validateGritUrlPrompt = async (str) => {
|
|
190
|
+
const newUrl = `https://${targetRegion}.git.cloud.ibm.com/${str}.git`;
|
|
191
|
+
if (usedGritUrls.has(newUrl)) throw Error(`"${newUrl}" has already been used in another mapping entry`);
|
|
192
|
+
return validateGritUrl(token, targetRegion, str, false);
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
if (!firstGritPrompt) {
|
|
196
|
+
firstGritPrompt = true;
|
|
197
|
+
logger.print('Please enter the new URLs for the following GRIT tool(s):\n');
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
const newRepoSlug = await promptUserInput(`Old URL: ${thisUrl.slice(0, thisUrl.length - 4)}\nNew URL: https://${targetRegion}.git.cloud.ibm.com/`, '', validateGritUrlPrompt);
|
|
201
|
+
|
|
202
|
+
newUrl = `https://${targetRegion}.git.cloud.ibm.com/${newRepoSlug}.git`;
|
|
203
|
+
newTfFileObj['resource']['ibm_cd_toolchain_tool_hostedgit'][k]['initialization'][0]['repo_url'] = newUrl;
|
|
204
|
+
attemptAddUsedGritUrl(newUrl);
|
|
205
|
+
gritMapping[thisUrl] = newUrl;
|
|
206
|
+
logger.print('\n');
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
catch (e) {
|
|
210
|
+
logger.error(`Could not verify/replace URL for the following GRIT tool resource: "${k}", ${e}`);
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
if (isCompact || resourceName === 'ibm_cd_toolchain') {
|
|
217
|
+
if (targetTag) newTfFileObj['resource']['ibm_cd_toolchain'][newTcId]['tags'] = [
|
|
218
|
+
...newTfFileObj['resource']['ibm_cd_toolchain'][newTcId]['tags'] ?? [],
|
|
219
|
+
targetTag
|
|
220
|
+
];
|
|
221
|
+
if (targetToolchainName) newTfFileObj['resource']['ibm_cd_toolchain'][newTcId]['name'] = targetToolchainName;
|
|
222
|
+
if (targetRgId) newTfFileObj['resource']['ibm_cd_toolchain'][newTcId]['resource_group_id'] = targetRgId;
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
if (isCompact || resourceName === 'ibm_cd_tekton_pipeline_trigger') {
|
|
226
|
+
// by default, disable triggers
|
|
227
|
+
if (disableTriggers && newTfFileObj['resource']['ibm_cd_tekton_pipeline_trigger']) {
|
|
228
|
+
for (const key of Object.keys(newTfFileObj['resource']['ibm_cd_tekton_pipeline_trigger'])) {
|
|
229
|
+
if (newTfFileObj['resource']['ibm_cd_tekton_pipeline_trigger'][key]['type'] === 'manual') continue; // skip manual triggers
|
|
230
|
+
newTfFileObj['resource']['ibm_cd_tekton_pipeline_trigger'][key]['enabled'] = false;
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
// set depends_on for references to legacy GHE integrations
|
|
235
|
+
if (hasGHE && newTfFileObj['resource']['ibm_cd_tekton_pipeline_trigger']) {
|
|
236
|
+
for (const [k, v] of Object.entries(newTfFileObj['resource']['ibm_cd_tekton_pipeline_trigger'])) {
|
|
237
|
+
try {
|
|
238
|
+
const thisUrl = v['source'][0]['properties'][0]['url'];
|
|
239
|
+
if (!v['depends_on'] && thisUrl) {
|
|
240
|
+
newTfFileObj['resource']['ibm_cd_tekton_pipeline_trigger'][k]['depends_on'] = [`ibm_cd_toolchain_tool_githubconsolidated.${repoToTfName[thisUrl]}`]
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
catch {
|
|
244
|
+
// do nothing
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
// update GRIT urls
|
|
250
|
+
if (newTfFileObj['resource']['ibm_cd_tekton_pipeline_trigger']) {
|
|
251
|
+
for (const [k, v] of Object.entries(newTfFileObj['resource']['ibm_cd_tekton_pipeline_trigger'])) {
|
|
252
|
+
try {
|
|
253
|
+
const thisUrl = v['source'][0]['properties'][0]['url'];
|
|
254
|
+
const newUrl = gritMapping[thisUrl];
|
|
255
|
+
|
|
256
|
+
if (newUrl) {
|
|
257
|
+
newTfFileObj['resource']['ibm_cd_tekton_pipeline_trigger'][k]['source'][0]['properties'][0]['url'] = newUrl;
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
catch {
|
|
261
|
+
// do nothing
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
if (isCompact || resourceName === 'ibm_cd_tekton_pipeline_definition') {
|
|
268
|
+
// set depends_on for references to legacy GHE integrations
|
|
269
|
+
if (hasGHE && newTfFileObj['resource']['ibm_cd_tekton_pipeline_definition']) {
|
|
270
|
+
for (const [k, v] of Object.entries(newTfFileObj['resource']['ibm_cd_tekton_pipeline_definition'])) {
|
|
271
|
+
try {
|
|
272
|
+
const thisUrl = v['source'][0]['properties'][0]['url'];
|
|
273
|
+
if (!v['depends_on'] && thisUrl) {
|
|
274
|
+
newTfFileObj['resource']['ibm_cd_tekton_pipeline_definition'][k]['depends_on'] = [`ibm_cd_toolchain_tool_githubconsolidated.${repoToTfName[thisUrl]}`]
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
catch {
|
|
278
|
+
// do nothing
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
// update GRIT urls
|
|
284
|
+
if (newTfFileObj['resource']['ibm_cd_tekton_pipeline_definition']) {
|
|
285
|
+
for (const [k, v] of Object.entries(newTfFileObj['resource']['ibm_cd_tekton_pipeline_definition'])) {
|
|
286
|
+
try {
|
|
287
|
+
const thisUrl = v['source'][0]['properties'][0]['url'];
|
|
288
|
+
const newUrl = gritMapping[thisUrl];
|
|
289
|
+
|
|
290
|
+
if (newUrl) {
|
|
291
|
+
newTfFileObj['resource']['ibm_cd_tekton_pipeline_definition'][k]['source'][0]['properties'][0]['url'] = newUrl;
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
catch {
|
|
295
|
+
// do nothing
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
if (isCompact || resourceName === 'ibm_cd_toolchain_tool_githubconsolidated') {
|
|
302
|
+
if (hasGHE) {
|
|
303
|
+
newTfFileObj['resource']['ibm_cd_toolchain_tool_githubconsolidated'] = {
|
|
304
|
+
...(newTfFileObj['resource']['ibm_cd_toolchain_tool_githubconsolidated'] ?? {}),
|
|
305
|
+
...newConvertedTf
|
|
306
|
+
};
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
const newTfFileObjStr = JSON.stringify(newTfFileObj);
|
|
311
|
+
const newTfFile = replaceDependsOn(jsonToTf(newTfFileObjStr));
|
|
312
|
+
const copyResourcesPromise = writeFilePromise(`${outputDir}/${fileName}`, newTfFile);
|
|
313
|
+
promises.push(copyResourcesPromise);
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
// handle case where there is no GH tool integrations, and not compact
|
|
317
|
+
if (hasGHE && !isCompact && !files.includes('cd_toolchain_tool_githubconsolidated.tf')) {
|
|
318
|
+
const newTfFileObj = { 'resource': { ['ibm_cd_toolchain_tool_githubconsolidated']: newConvertedTf } };
|
|
319
|
+
const newTfFileObjStr = JSON.stringify(newTfFileObj);
|
|
320
|
+
const newTfFile = jsonToTf(newTfFileObjStr);
|
|
321
|
+
const copyResourcesPromise = writeFilePromise(`${outputDir}/cd_toolchain_tool_githubconsolidated.tf`, newTfFile);
|
|
322
|
+
promises.push(copyResourcesPromise);
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
return Promise.all(promises);
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
async function runTerraformInit(dir) {
|
|
329
|
+
return await execPromise('terraform init', { cwd: dir });
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
// primarily used to get number of resources to be used
|
|
333
|
+
async function getNumResourcesPlanned(dir) {
|
|
334
|
+
const planOutput = await execPromise('terraform plan -json', { cwd: dir });
|
|
335
|
+
const planLines = planOutput.split('\n');
|
|
336
|
+
|
|
337
|
+
for (const p of planLines) {
|
|
338
|
+
const jsonLine = JSON.parse(p);
|
|
339
|
+
|
|
340
|
+
if (jsonLine.type === 'change_summary') {
|
|
341
|
+
return jsonLine.changes.add;
|
|
342
|
+
}
|
|
343
|
+
};
|
|
344
|
+
}
|
|
345
|
+
|
|
346
|
+
async function runTerraformApply(skipTfConfirmation, outputDir, verbosity) {
|
|
347
|
+
let command = 'terraform apply';
|
|
348
|
+
if (skipTfConfirmation || verbosity === 0) {
|
|
349
|
+
command = 'terraform apply -auto-approve';
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
const child = child_process.spawn(command, {
|
|
353
|
+
cwd: `${outputDir}`,
|
|
354
|
+
stdio: ['inherit', 'pipe', 'pipe'], // to pass stdin from the parent process, and pipe the stdout and stderr
|
|
355
|
+
shell: true,
|
|
356
|
+
env: process.env,
|
|
357
|
+
});
|
|
358
|
+
|
|
359
|
+
let stdoutData = '';
|
|
360
|
+
let stderrData = '';
|
|
361
|
+
|
|
362
|
+
child.stdout.on('data', (chunk) => {
|
|
363
|
+
const text = chunk.toString();
|
|
364
|
+
stdoutData += text;
|
|
365
|
+
if (verbosity >= 1) {
|
|
366
|
+
process.stdout.write(text);
|
|
367
|
+
logger.dump(text);
|
|
368
|
+
}
|
|
369
|
+
});
|
|
370
|
+
|
|
371
|
+
child.stderr.on('data', (chunk) => {
|
|
372
|
+
const text = chunk.toString();
|
|
373
|
+
stderrData += text;
|
|
374
|
+
if (verbosity >= 1) {
|
|
375
|
+
process.stderr.write(text);
|
|
376
|
+
logger.dump(text);
|
|
377
|
+
}
|
|
378
|
+
});
|
|
379
|
+
|
|
380
|
+
return await new Promise((resolve, reject) => {
|
|
381
|
+
child.on('close', (code) => {
|
|
382
|
+
if (code === 0) {
|
|
383
|
+
resolve(stdoutData.trim());
|
|
384
|
+
} else {
|
|
385
|
+
reject(new Error(`Terraform apply failed with code ${code}`));
|
|
386
|
+
}
|
|
387
|
+
});
|
|
388
|
+
});
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
async function getNumResourcesCreated(dir) {
|
|
392
|
+
try {
|
|
393
|
+
// prints a line for every resource in the state file
|
|
394
|
+
const resourcesListStr = await execPromise('terraform state list', { cwd: dir });
|
|
395
|
+
return resourcesListStr.split('\n').length;
|
|
396
|
+
} catch {
|
|
397
|
+
return 0;
|
|
398
|
+
}
|
|
399
|
+
}
|
|
400
|
+
|
|
401
|
+
// get new toolchain link from terraform output
|
|
402
|
+
async function getNewToolchainId(dir) {
|
|
403
|
+
try {
|
|
404
|
+
const output = await execPromise('terraform output', { cwd: dir });
|
|
405
|
+
|
|
406
|
+
// should look something like: ibm_cd_toolchain_tfer--<toolchain_resource>_id = "<new_toolchain_id>"
|
|
407
|
+
const lineSplit = output.split('"');
|
|
408
|
+
const newTcId = lineSplit[lineSplit.length - 2];
|
|
409
|
+
|
|
410
|
+
return validateToolchainId(newTcId);
|
|
411
|
+
} catch {
|
|
412
|
+
return '';
|
|
413
|
+
}
|
|
414
|
+
}
|
|
415
|
+
|
|
416
|
+
// fix quoted references warning for depends_on
|
|
417
|
+
function replaceDependsOn(str) {
|
|
418
|
+
try {
|
|
419
|
+
if (typeof str === 'string') {
|
|
420
|
+
const pattern = /^ depends_on = \[\n ("[a-z0-9_\-.]*")\n ]$/gm;
|
|
421
|
+
|
|
422
|
+
// get rid of the quotes
|
|
423
|
+
return str.replaceAll(pattern, (match, s) => ` depends_on = \[\n ${s.slice(1, s.length - 1)}\n ]`);
|
|
424
|
+
}
|
|
425
|
+
} catch {
|
|
426
|
+
return str;
|
|
427
|
+
}
|
|
428
|
+
}
|
|
429
|
+
|
|
430
|
+
export {
|
|
431
|
+
setTerraformerEnv,
|
|
432
|
+
setTerraformEnv,
|
|
433
|
+
initProviderFile,
|
|
434
|
+
runTerraformerImport,
|
|
435
|
+
setupTerraformFiles,
|
|
436
|
+
runTerraformInit,
|
|
437
|
+
getNumResourcesPlanned,
|
|
438
|
+
runTerraformApply,
|
|
439
|
+
getNewToolchainId,
|
|
440
|
+
getNumResourcesCreated
|
|
441
|
+
}
|
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Licensed Materials - Property of IBM
|
|
3
|
+
* (c) Copyright IBM Corporation 2025. All Rights Reserved.
|
|
4
|
+
*
|
|
5
|
+
* Note to U.S. Government Users Restricted Rights:
|
|
6
|
+
* Use, duplication or disclosure restricted by GSA ADP Schedule
|
|
7
|
+
* Contract with IBM Corp.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import * as readline from 'node:readline/promises';
|
|
11
|
+
import { logger } from './logger.js';
|
|
12
|
+
import { VAULT_REGEX } from '../../config.js';
|
|
13
|
+
|
|
14
|
+
export function parseEnvVar(name) {
|
|
15
|
+
const value = process.env[name];
|
|
16
|
+
if (!value) {
|
|
17
|
+
throw new Error(`Environment variable '${name}' is required but not set`);
|
|
18
|
+
}
|
|
19
|
+
return value;
|
|
20
|
+
};
|
|
21
|
+
|
|
22
|
+
export async function promptUserConfirmation(question, expectedAns, exitMsg) {
|
|
23
|
+
const rl = readline.createInterface({
|
|
24
|
+
input: process.stdin,
|
|
25
|
+
output: process.stdout
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
const fullPrompt = question + `\n\nOnly '${expectedAns}' will be accepted to proceed.\n\nEnter a value: `;
|
|
29
|
+
const answer = await rl.question(fullPrompt);
|
|
30
|
+
|
|
31
|
+
logger.dump(fullPrompt + '\n' + answer + '\n');
|
|
32
|
+
|
|
33
|
+
if (answer.toLowerCase().trim() !== expectedAns) {
|
|
34
|
+
logger.print('\n' + exitMsg);
|
|
35
|
+
rl.close();
|
|
36
|
+
await logger.close();
|
|
37
|
+
process.exit(1);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
rl.close();
|
|
41
|
+
logger.print('\n');
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
export async function promptUserInput(question, initialInput, validationFn) {
|
|
45
|
+
const rl = readline.createInterface({
|
|
46
|
+
input: process.stdin,
|
|
47
|
+
output: process.stdout,
|
|
48
|
+
prompt: question
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
let answer;
|
|
52
|
+
|
|
53
|
+
rl.on('SIGINT', async () => {
|
|
54
|
+
logger.print('\n' + 'Received SIGINT signal');
|
|
55
|
+
await logger.close();
|
|
56
|
+
process.exit(1);
|
|
57
|
+
});
|
|
58
|
+
|
|
59
|
+
rl.prompt(true);
|
|
60
|
+
rl.write(initialInput);
|
|
61
|
+
|
|
62
|
+
for await (const ans of rl) {
|
|
63
|
+
try {
|
|
64
|
+
logger.dump(question + ans + '\n\n');
|
|
65
|
+
await validationFn(ans.trim());
|
|
66
|
+
answer = ans;
|
|
67
|
+
break;
|
|
68
|
+
} catch (e) {
|
|
69
|
+
// loop
|
|
70
|
+
logger.print('Validation failed...', e.message, '\n');
|
|
71
|
+
|
|
72
|
+
rl.prompt(true);
|
|
73
|
+
rl.write(initialInput);
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
rl.close();
|
|
78
|
+
return answer.trim();
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
export function replaceUrlRegion(inputUrl, srcRegion, targetRegion) {
|
|
82
|
+
if (!inputUrl) return '';
|
|
83
|
+
|
|
84
|
+
try {
|
|
85
|
+
const url = new URL(inputUrl);
|
|
86
|
+
|
|
87
|
+
url.host = url.host.split('.').map(i => i === srcRegion ? targetRegion : i).join('.');
|
|
88
|
+
return url.toString();
|
|
89
|
+
} catch {
|
|
90
|
+
return '';
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
/**
|
|
95
|
+
* Decomposes a CRN into its parts from the defined structure:
|
|
96
|
+
* crn:v1:{cname}:{ctype}:{service-name}:{location}:a/{IBM-account}:{service-instance}:{resource-type}:{resource}
|
|
97
|
+
*
|
|
98
|
+
* @param {String} crn - The crn to decompose.
|
|
99
|
+
**/
|
|
100
|
+
export function decomposeCrn (crn) {
|
|
101
|
+
const crnParts = crn.split(':');
|
|
102
|
+
|
|
103
|
+
// Remove the 'a/' segment.
|
|
104
|
+
let accountId = crnParts[6];
|
|
105
|
+
if(accountId) {
|
|
106
|
+
accountId = accountId.split('/')[1];
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
return {
|
|
110
|
+
cname: crnParts[2],
|
|
111
|
+
ctype: crnParts[3],
|
|
112
|
+
serviceName: crnParts[4],
|
|
113
|
+
location: crnParts[5],
|
|
114
|
+
accountId: accountId,
|
|
115
|
+
serviceInstance: crnParts[7],
|
|
116
|
+
resourceType: crnParts[8],
|
|
117
|
+
resource: crnParts[9]
|
|
118
|
+
};
|
|
119
|
+
};
|
|
120
|
+
|
|
121
|
+
/**
|
|
122
|
+
* Verifies that a value is a secret reference.
|
|
123
|
+
*
|
|
124
|
+
* @param {String} value - The value to verify.
|
|
125
|
+
**/
|
|
126
|
+
export function isSecretReference (value) {
|
|
127
|
+
return !!(VAULT_REGEX.find(r => r.test(value)));
|
|
128
|
+
};
|