@ibm-cloud/cd-tools 1.3.3 → 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -8,7 +8,7 @@
8
8
  */
9
9
 
10
10
  import { exit } from 'node:process';
11
- import { resolve } from 'node:path'
11
+ import { resolve } from 'node:path';
12
12
  import fs from 'node:fs';
13
13
 
14
14
  import { Command, Option } from 'commander';
@@ -16,7 +16,7 @@ import { Command, Option } from 'commander';
16
16
  import { parseEnvVar } from './utils/utils.js';
17
17
  import { logger, LOG_STAGES } from './utils/logger.js';
18
18
  import { setTerraformEnv, initProviderFile, setupTerraformFiles, runTerraformInit, getNumResourcesPlanned, runTerraformApply, getNumResourcesCreated, getNewToolchainId } from './utils/terraform.js';
19
- import { getAccountId, getBearerToken, getCdInstanceByRegion, getIamAuthPolicies, getResourceGroupIdAndName, getToolchain } from './utils/requests.js';
19
+ import { getAccountId, getBearerToken, getCdInstanceByRegion, getResourceGroupIdAndName, getToolchain } from './utils/requests.js';
20
20
  import { validatePrereqsVersions, validateTag, validateToolchainId, validateToolchainName, validateTools, validateOAuth, warnDuplicateName, validateGritUrl } from './utils/validate.js';
21
21
  import { importTerraform } from './utils/import-terraform.js';
22
22
 
@@ -54,7 +54,7 @@ const command = new Command('copy-toolchain')
54
54
  .option('-d, --terraform-dir <path>', '(Optional) The target local directory to store the generated Terraform (.tf) files')
55
55
  .option('-D, --dry-run', '(Optional) Skip running terraform apply; only generate the Terraform (.tf) files')
56
56
  .option('-f, --force', '(Optional) Force the copy toolchain command to run without user confirmation')
57
- .option('-S, --skip-s2s', '(Optional) Skip importing toolchain-generated service-to-service authorizations')
57
+ .option('-S, --skip-s2s', '(Optional) Skip creating toolchain-generated service-to-service authorizations')
58
58
  .option('-T, --skip-disable-triggers', '(Optional) Skip disabling Tekton pipeline Git or timed triggers. Note: This may result in duplicate pipeline runs')
59
59
  .option('-C, --compact', '(Optional) Generate all resources in a single resources.tf file')
60
60
  .option('-v, --verbose', '(Optional) Increase log output')
@@ -92,7 +92,6 @@ async function main(options) {
92
92
  let targetRgId;
93
93
  let targetRgName;
94
94
  let apiKey = options.apikey;
95
- let policyIds; // used to include s2s auth policies
96
95
  let moreTfResources = {};
97
96
  let gritMapping = {};
98
97
 
@@ -195,26 +194,6 @@ async function main(options) {
195
194
 
196
195
  collectGHE();
197
196
 
198
- const collectPolicyIds = async () => {
199
- moreTfResources['iam_authorization_policy'] = [];
200
-
201
- const res = await getIamAuthPolicies(bearer, accountId);
202
-
203
- policyIds = res['policies'].filter((p) => p.subjects[0].attributes.find(
204
- (a) => a.name === 'serviceInstance' && a.value === sourceToolchainId)
205
- );
206
- policyIds = policyIds.map((p) => p.id);
207
- };
208
-
209
- if (includeS2S) {
210
- try {
211
- collectPolicyIds();
212
- } catch (e) {
213
- logger.error('Something went wrong while fetching service-to-service auth policies', LOG_STAGES.setup);
214
- throw e;
215
- }
216
- }
217
-
218
197
  logger.info('Arguments and required packages verified, proceeding with copying toolchain...', LOG_STAGES.setup);
219
198
 
220
199
  // Set up temp folder
@@ -231,6 +210,9 @@ async function main(options) {
231
210
  exit(1);
232
211
  }
233
212
 
213
+ let toolchainTfName; // to target creating toolchain first
214
+ let s2sAuthTools; // to create s2s auth with script
215
+
234
216
  try {
235
217
  let nonSecretRefs;
236
218
 
@@ -242,7 +224,7 @@ async function main(options) {
242
224
  await initProviderFile(sourceRegion, TEMP_DIR);
243
225
  await runTerraformInit(TEMP_DIR, verbosity);
244
226
 
245
- nonSecretRefs = await importTerraform(bearer, apiKey, sourceRegion, sourceToolchainId, targetToolchainName, policyIds, TEMP_DIR, isCompact, verbosity);
227
+ [toolchainTfName, nonSecretRefs, s2sAuthTools] = await importTerraform(bearer, apiKey, sourceRegion, sourceToolchainId, targetToolchainName, TEMP_DIR, isCompact, verbosity);
246
228
  };
247
229
 
248
230
  await logger.withSpinner(
@@ -286,7 +268,8 @@ async function main(options) {
286
268
  tempDir: TEMP_DIR,
287
269
  moreTfResources: moreTfResources,
288
270
  gritMapping: gritMapping,
289
- skipUserConfirmation: skipUserConfirmation
271
+ skipUserConfirmation: skipUserConfirmation,
272
+ includeS2S: includeS2S
290
273
  });
291
274
  } catch (err) {
292
275
  if (err.message && err.stack) {
@@ -317,6 +300,27 @@ async function main(options) {
317
300
 
318
301
  let applyErrors = false;
319
302
 
303
+ if (includeS2S) {
304
+ const s2sRequests = s2sAuthTools.map((item) => {
305
+ return {
306
+ parameters: item['parameters'],
307
+ serviceId: item.tool_type_id,
308
+ env_id: `ibm:yp:${targetRegion}`
309
+ };
310
+ });
311
+ fs.writeFileSync(resolve(`${outputDir}/create-s2s.json`), JSON.stringify(s2sRequests));
312
+
313
+ // copy script
314
+ fs.copyFileSync(resolve('create-s2s-script.js'), resolve(`${outputDir}/create-s2s-script.js`), fs.constants.COPYFILE_EXCL);
315
+ }
316
+
317
+ // create toolchain, which invokes script to create s2s if applicable
318
+ await runTerraformApply(true, outputDir, verbosity, `ibm_cd_toolchain.${toolchainTfName}`).catch((err) => {
319
+ logger.error(err, LOG_STAGES.tf);
320
+ applyErrors = true;
321
+ });
322
+
323
+ // create the rest
320
324
  await runTerraformApply(skipUserConfirmation, outputDir, verbosity).catch((err) => {
321
325
  logger.error(err, LOG_STAGES.tf);
322
326
  applyErrors = true;
@@ -18,7 +18,7 @@ import { getRandChars, isSecretReference, normalizeName } from './utils.js';
18
18
 
19
19
  import { SECRET_KEYS_MAP, SUPPORTED_TOOLS_MAP } from '../../config.js';
20
20
 
21
- export async function importTerraform(token, apiKey, region, toolchainId, toolchainName, policyIds, dir, isCompact, verbosity) {
21
+ export async function importTerraform(token, apiKey, region, toolchainId, toolchainName, dir, isCompact, verbosity) {
22
22
  // STEP 1/2: set up terraform file with import blocks
23
23
  const importBlocks = []; // an array of objects representing import blocks, used in importBlocksToTf
24
24
  const additionalProps = {}; // maps resource name to array of { property/param, value }, used to override terraform import
@@ -41,6 +41,14 @@ export async function importTerraform(token, apiKey, region, toolchainId, toolch
41
41
  const toolchainResName = block.name;
42
42
  let pipelineResName;
43
43
 
44
+ const requiresS2S = [
45
+ 'ibm_cd_toolchain_tool_appconfig',
46
+ 'ibm_cd_toolchain_tool_eventnotifications',
47
+ 'ibm_cd_toolchain_tool_keyprotect',
48
+ 'ibm_cd_toolchain_tool_secretsmanager'
49
+ ];
50
+ let s2sAuthTools = [];
51
+
44
52
  // get list of tools
45
53
  const allTools = await getToolchainTools(token, toolchainId, region);
46
54
  for (const tool of allTools.tools) {
@@ -55,6 +63,10 @@ export async function importTerraform(token, apiKey, region, toolchainId, toolch
55
63
 
56
64
  toolIdMap[tool.id] = { type: SUPPORTED_TOOLS_MAP[tool.tool_type_id], name: toolResName };
57
65
 
66
+ if (requiresS2S.includes(SUPPORTED_TOOLS_MAP[tool.tool_type_id])) {
67
+ s2sAuthTools.push(tool);
68
+ }
69
+
58
70
  // overwrite hard-coded id with reference
59
71
  additionalProps[block.name] = [
60
72
  { property: 'toolchain_id', value: `\${ibm_cd_toolchain.${toolchainResName}.id}` },
@@ -63,19 +75,17 @@ export async function importTerraform(token, apiKey, region, toolchainId, toolch
63
75
  // check and add secret refs
64
76
  if (tool.tool_type_id in SECRET_KEYS_MAP) {
65
77
  SECRET_KEYS_MAP[tool.tool_type_id].forEach(({ key, tfKey, prereq, required }) => {
66
- if (prereq) {
67
- if (!prereq.values.includes(tool[prereq.key])) return;
68
- }
78
+ if (prereq && !prereq.values.includes(tool.parameters[prereq.key])) return; // missing prereq
69
79
 
70
80
  if (isSecretReference(tool.parameters[key])) {
71
81
  additionalProps[block.name].push({ param: tfKey, value: tool.parameters[key] });
72
82
  } else {
73
83
  const newFileName = SUPPORTED_TOOLS_MAP[tool.tool_type_id].split('ibm_')[1];
74
- if (required) {
84
+ if (required || prereq) {
75
85
  nonSecretRefs.push({
76
- resource_name: block.name,
77
- property_name: tfKey,
78
- file_name: isCompact ? 'resources.tf' : `${newFileName}.tf`
86
+ resource_name: block.name,
87
+ property_name: tfKey,
88
+ file_name: isCompact ? 'resources.tf' : `${newFileName}.tf`
79
89
  });
80
90
  additionalProps[block.name].push({ param: tfKey, value: `<${tfKey}>` });
81
91
  }
@@ -141,19 +151,6 @@ export async function importTerraform(token, apiKey, region, toolchainId, toolch
141
151
  }
142
152
  }
143
153
 
144
- // include s2s
145
- if (policyIds) {
146
- for (const policyId of policyIds) {
147
- block = importBlock(policyId, 'iam_authorization_policy', 'ibm_iam_authorization_policy');
148
- importBlocks.push(block);
149
-
150
- // overwrite hard-coded id with reference
151
- additionalProps[block.name] = [
152
- { property: 'source_resource_instance_id', value: `\${ibm_cd_toolchain.${toolchainResName}.id}` },
153
- ];
154
- }
155
- }
156
-
157
154
  importBlocksToTf(importBlocks, dir);
158
155
 
159
156
  if (!fs.existsSync(`${dir}/generated`)) fs.mkdirSync(`${dir}/generated`);
@@ -180,6 +177,15 @@ export async function importTerraform(token, apiKey, region, toolchainId, toolch
180
177
  // do nothing
181
178
  }
182
179
 
180
+ // handle missing worker, which breaks terraform
181
+ try {
182
+ if (newTfFileObj['resource'][key][k]['worker'][0]['id'] === null) {
183
+ delete newTfFileObj['resource'][key][k]['worker'];
184
+ }
185
+ } catch {
186
+ // do nothing
187
+ }
188
+
183
189
  // ignore null values
184
190
  for (const [k2, v2] of Object.entries(v[0])) {
185
191
  if (v2 === null) delete newTfFileObj['resource'][key][k][k2];
@@ -218,7 +224,7 @@ export async function importTerraform(token, apiKey, region, toolchainId, toolch
218
224
  }
219
225
 
220
226
  // add relevent references and depends_on
221
- if (key === 'ibm_cd_tekton_pipeline') {
227
+ if (key === 'ibm_cd_tekton_pipeline' && newTfFileObj['resource'][key][k]['worker']) {
222
228
  const workerId = newTfFileObj['resource'][key][k]['worker'][0]['id'];
223
229
  if (workerId != null && workerId != 'public' && workerId in toolIdMap) {
224
230
  newTfFileObj['resource'][key][k]['worker'][0]['id'] = `\${${toolIdMap[workerId].type}.${toolIdMap[workerId].name}.tool_id}`;
@@ -226,7 +232,7 @@ export async function importTerraform(token, apiKey, region, toolchainId, toolch
226
232
  } else if (key === 'ibm_cd_tekton_pipeline_property' || key === 'ibm_cd_tekton_pipeline_trigger_property') {
227
233
  const propValue = newTfFileObj['resource'][key][k]['value'];
228
234
  if (newTfFileObj['resource'][key][k]['type'] === 'integration' && propValue in toolIdMap) {
229
- newTfFileObj['resource'][key][k]['depends_on'] = [`\${${toolIdMap[propValue].type}.${toolIdMap[propValue].name}}`];
235
+ newTfFileObj['resource'][key][k]['value'] = `\${${toolIdMap[propValue].type}.${toolIdMap[propValue].name}.tool_id}`;
230
236
  }
231
237
  }
232
238
 
@@ -306,7 +312,7 @@ export async function importTerraform(token, apiKey, region, toolchainId, toolch
306
312
  // remove draft
307
313
  if (fs.existsSync(`${dir}/generated/draft.tf`)) fs.rmSync(`${dir}/generated/draft.tf`, { recursive: true });
308
314
 
309
- return nonSecretRefs;
315
+ return [toolchainResName, nonSecretRefs, s2sAuthTools];
310
316
  }
311
317
 
312
318
  // objects have two keys, "id" and "to"
@@ -373,27 +373,6 @@ async function getGritGroupProject(privToken, region, groupId, projectName) {
373
373
  }
374
374
  }
375
375
 
376
- async function getIamAuthPolicies(bearer, accountId) {
377
- const apiBaseUrl = 'https://iam.cloud.ibm.com/v1';
378
- const options = {
379
- url: apiBaseUrl + '/policies',
380
- method: 'GET',
381
- headers: {
382
- 'Authorization': `Bearer ${bearer}`,
383
- 'Content-Type': 'application/json',
384
- },
385
- params: { account_id: accountId, type: 'authorization' },
386
- validateStatus: () => true
387
- };
388
- const response = await axios(options);
389
- switch (response.status) {
390
- case 200:
391
- return response.data;
392
- default:
393
- throw Error('Get auth policies failed');
394
- }
395
- }
396
-
397
376
  async function deleteToolchain(bearer, toolchainId, region) {
398
377
  const apiBaseUrl = `https://api.${region}.devops.cloud.ibm.com/toolchain/v2`;
399
378
  const options = {
@@ -430,6 +409,5 @@ export {
430
409
  getGritUserProject,
431
410
  getGritGroup,
432
411
  getGritGroupProject,
433
- getIamAuthPolicies,
434
412
  deleteToolchain
435
413
  }
@@ -50,7 +50,7 @@ async function initProviderFile(targetRegion, dir) {
50
50
  return writeFilePromise(`${dir}/provider.tf`, jsonToTf(newProviderTfStr));
51
51
  }
52
52
 
53
- async function setupTerraformFiles({ token, srcRegion, targetRegion, targetTag, targetToolchainName, targetRgId, disableTriggers, isCompact, outputDir, tempDir, moreTfResources, gritMapping, skipUserConfirmation }) {
53
+ async function setupTerraformFiles({ token, srcRegion, targetRegion, targetTag, targetToolchainName, targetRgId, disableTriggers, isCompact, outputDir, tempDir, moreTfResources, gritMapping, skipUserConfirmation, includeS2S }) {
54
54
  const promises = [];
55
55
 
56
56
  const writeProviderPromise = await initProviderFile(targetRegion, outputDir);
@@ -92,6 +92,7 @@ async function setupTerraformFiles({ token, srcRegion, targetRegion, targetTag,
92
92
  // for converting legacy GHE tool integrations
93
93
  const hasGHE = moreTfResources['github_integrated'].length > 0;
94
94
  const repoToTfName = {};
95
+ const toolIdToTfName = {};
95
96
  const newConvertedTf = {};
96
97
 
97
98
  if (hasGHE) {
@@ -100,8 +101,10 @@ async function setupTerraformFiles({ token, srcRegion, targetRegion, targetTag,
100
101
  const tfName = `converted--githubconsolidated_${getRandChars(4)}`;
101
102
 
102
103
  repoToTfName[gitUrl] = tfName;
104
+ toolIdToTfName[t['id']] = tfName;
103
105
  newConvertedTf[tfName] = {
104
106
  toolchain_id: `\${ibm_cd_toolchain.${newTcId}.id}`,
107
+ name: t['name'],
105
108
  initialization: [{
106
109
  auto_init: 'false',
107
110
  blind_connection: 'false',
@@ -203,8 +206,9 @@ async function setupTerraformFiles({ token, srcRegion, targetRegion, targetTag,
203
206
 
204
207
  if (isCompact || resourceName === 'ibm_cd_toolchain') {
205
208
  if (targetTag) newTfFileObj['resource']['ibm_cd_toolchain'][newTcId]['tags'] = [
206
- ...newTfFileObj['resource']['ibm_cd_toolchain'][newTcId]['tags'] ?? [],
207
- targetTag
209
+ Array.from(new Set( // uniqueness
210
+ (newTfFileObj['resource']['ibm_cd_toolchain'][newTcId]['tags'] ?? []).concat([targetTag])
211
+ ))
208
212
  ];
209
213
  if (targetToolchainName) newTfFileObj['resource']['ibm_cd_toolchain'][newTcId]['name'] = targetToolchainName;
210
214
  if (targetRgId) newTfFileObj['resource']['ibm_cd_toolchain'][newTcId]['resource_group_id'] = targetRgId;
@@ -286,6 +290,46 @@ async function setupTerraformFiles({ token, srcRegion, targetRegion, targetTag,
286
290
  }
287
291
  }
288
292
 
293
+ // add references to converted GHE integrations
294
+ if (isCompact || resourceName === 'ibm_cd_tekton_pipeline_property') {
295
+ for (const [k, v] of Object.entries(newTfFileObj['resource']['ibm_cd_tekton_pipeline_property'])) {
296
+ try {
297
+ if (v['type'] === 'integration') {
298
+ const thisValue = v['value'];
299
+
300
+ if (thisValue in toolIdToTfName) {
301
+ const thisTfName = toolIdToTfName[thisValue];
302
+ newTfFileObj['resource']['ibm_cd_tekton_pipeline_property'][k]['value'] = `\${ibm_cd_toolchain_tool_githubconsolidated.${thisTfName}.tool_id}`;
303
+ }
304
+ }
305
+ }
306
+ catch {
307
+ // do nothing
308
+ }
309
+
310
+ }
311
+ }
312
+
313
+ // add references to converted GHE integrations
314
+ if (isCompact || resourceName === 'ibm_cd_tekton_pipeline_trigger_property') {
315
+ for (const [k, v] of Object.entries(newTfFileObj['resource']['ibm_cd_tekton_pipeline_trigger_property'])) {
316
+ try {
317
+ if (v['type'] === 'integration') {
318
+ const thisValue = v['value'];
319
+
320
+ if (thisValue in toolIdToTfName) {
321
+ const thisTfName = toolIdToTfName[thisValue];
322
+ newTfFileObj['resource']['ibm_cd_tekton_pipeline_trigger_property'][k]['value'] = `\${ibm_cd_toolchain_tool_githubconsolidated.${thisTfName}.tool_id}`;
323
+ }
324
+ }
325
+ }
326
+ catch {
327
+ // do nothing
328
+ }
329
+
330
+ }
331
+ }
332
+
289
333
  if (isCompact || resourceName === 'ibm_cd_toolchain_tool_githubconsolidated') {
290
334
  if (hasGHE) {
291
335
  newTfFileObj['resource']['ibm_cd_toolchain_tool_githubconsolidated'] = {
@@ -296,7 +340,8 @@ async function setupTerraformFiles({ token, srcRegion, targetRegion, targetTag,
296
340
  }
297
341
 
298
342
  const newTfFileObjStr = JSON.stringify(newTfFileObj);
299
- const newTfFile = replaceDependsOn(jsonToTf(newTfFileObjStr));
343
+ let newTfFile = replaceDependsOn(jsonToTf(newTfFileObjStr));
344
+ if (includeS2S && (isCompact || resourceName === 'ibm_cd_toolchain')) newTfFile = addS2sScriptToToolchainTf(newTfFile);
300
345
  const copyResourcesPromise = writeFilePromise(`${outputDir}/${fileName}`, newTfFile);
301
346
  promises.push(copyResourcesPromise);
302
347
  }
@@ -339,11 +384,14 @@ async function getNumResourcesPlanned(dir) {
339
384
  };
340
385
  }
341
386
 
342
- async function runTerraformApply(skipTfConfirmation, outputDir, verbosity) {
387
+ async function runTerraformApply(skipTfConfirmation, outputDir, verbosity, target) {
343
388
  let command = 'terraform apply';
344
389
  if (skipTfConfirmation || verbosity === 0) {
345
390
  command = 'terraform apply -auto-approve';
346
391
  }
392
+ if (target) {
393
+ command += ` -target="${target}"`
394
+ }
347
395
 
348
396
  const child = child_process.spawn(command, {
349
397
  cwd: `${outputDir}`,
@@ -423,6 +471,25 @@ function replaceDependsOn(str) {
423
471
  }
424
472
  }
425
473
 
474
+ function addS2sScriptToToolchainTf(str) {
475
+ const provisionerStr = (tfName) => `\n\n provisioner "local-exec" {
476
+ command = "node create-s2s-script.js"
477
+ environment = {
478
+ IBMCLOUD_API_KEY = var.ibmcloud_api_key
479
+ TARGET_TOOLCHAIN_ID = ibm_cd_toolchain.${tfName}.id
480
+ }\n }`
481
+ try {
482
+ if (typeof str === 'string') {
483
+ const pattern = /^resource "ibm_cd_toolchain" "([a-z0-9_-]*)" \{$\n((.|\n)*)\n^\}$/gm;
484
+
485
+ // get rid of the quotes
486
+ return str.replace(pattern, (match, s1, s2) => `resource "ibm_cd_toolchain" "${s1}" {\n${s2}${provisionerStr(s1)}\n}`);
487
+ }
488
+ } catch {
489
+ return str;
490
+ }
491
+ }
492
+
426
493
  export {
427
494
  setTerraformEnv,
428
495
  initProviderFile,
package/config.js CHANGED
@@ -167,9 +167,9 @@ const SECRET_KEYS_MAP = {
167
167
  'nexus': [
168
168
  { key: 'token', tfKey: 'token' },
169
169
  ],
170
- 'pagerduty': [
171
- { key: 'service_key', tfKey: 'service_key', required: true },
172
- ],
170
+ // 'pagerduty': [
171
+ // { key: 'service_key', tfKey: 'service_key', required: true },
172
+ // ],
173
173
  'private_worker': [
174
174
  { key: 'workerQueueCredentials', tfKey: 'worker_queue_credentials', required: true },
175
175
  ],
@@ -205,7 +205,7 @@ const SUPPORTED_TOOLS_MAP = {
205
205
  'keyprotect': 'ibm_cd_toolchain_tool_keyprotect',
206
206
  'nexus': 'ibm_cd_toolchain_tool_nexus',
207
207
  'customtool': 'ibm_cd_toolchain_tool_custom',
208
- 'pagerduty': 'ibm_cd_toolchain_tool_pagerduty',
208
+ // 'pagerduty': 'ibm_cd_toolchain_tool_pagerduty',
209
209
  'saucelabs': 'ibm_cd_toolchain_tool_saucelabs',
210
210
  'secretsmanager': 'ibm_cd_toolchain_tool_secretsmanager',
211
211
  'security_compliance': 'ibm_cd_toolchain_tool_securitycompliance',
@@ -0,0 +1,119 @@
1
+ /**
2
+ * Licensed Materials - Property of IBM
3
+ * (c) Copyright IBM Corporation 2025. All Rights Reserved.
4
+ *
5
+ * Note to U.S. Government Users Restricted Rights:
6
+ * Use, duplication or disclosure restricted by GSA ADP Schedule
7
+ * Contract with IBM Corp.
8
+ */
9
+
10
+ import fs from 'node:fs';
11
+ import { resolve } from 'node:path';
12
+
13
+ const API_KEY = process.env['IBMCLOUD_API_KEY'];
14
+ if (!API_KEY) throw Error(`Missing 'IBMCLOUD_API_KEY'`);
15
+
16
+ const TC_ID = process.env['TARGET_TOOLCHAIN_ID'];
17
+ if (!TC_ID) throw Error(`Missing 'TARGET_TOOLCHAIN_ID'`);
18
+
19
+ const INPUT_PATH = 'create-s2s.json';
20
+ const CLOUD_PLATFORM = 'https://cloud.ibm.com';
21
+ const IAM_BASE_URL = 'https://iam.cloud.ibm.com';
22
+
23
+ async function getBearer() {
24
+ const url = `${IAM_BASE_URL}/identity/token`;
25
+
26
+ const params = new URLSearchParams();
27
+ params.append('grant_type', 'urn:ibm:params:oauth:grant-type:apikey');
28
+ params.append('apikey', API_KEY);
29
+ params.append('response_type', 'cloud_iam');
30
+
31
+ try {
32
+ const response = await fetch(url, {
33
+ method: "POST",
34
+ headers: {
35
+ 'Accept': 'application/json',
36
+ 'Content-Type': 'application/x-www-form-urlencoded'
37
+ },
38
+ body: params
39
+ });
40
+
41
+ if (!response.ok) {
42
+ throw new Error(`Response status: ${response.status}, ${response.statusText}`);
43
+ }
44
+
45
+ console.log(`GETTING BEARER TOKEN... ${response.status}, ${response.statusText}`);
46
+
47
+ return (await response.json()).access_token;
48
+ } catch (error) {
49
+ console.error(error.message);
50
+ }
51
+ }
52
+
53
+ /* expecting item as an object with the format of:
54
+ {
55
+ "parameters": {
56
+ "name": "",
57
+ "integration-status": "",
58
+ "instance-id-type": "",
59
+ "region": "",
60
+ "resource-group": "",
61
+ "instance-name": "",
62
+ "instance-crn": "",
63
+ "setup-authorization-type": ""
64
+ },
65
+ "toolchainId": "",
66
+ "serviceId": "",
67
+ "env_id": ""
68
+ }
69
+ */
70
+
71
+ async function createS2sAuthPolicy(item) {
72
+ const url = `${CLOUD_PLATFORM}/devops/setup/api/v2/s2s_authorization?${new URLSearchParams({
73
+ toolchainId: TC_ID,
74
+ serviceId: item['serviceId'],
75
+ env_id: item['env_id']
76
+ }).toString()}`;
77
+
78
+ const data = JSON.stringify({
79
+ 'parameters': {
80
+ 'name': item['parameters']['name'],
81
+ 'integration-status': '',
82
+ 'instance-id-type': item['parameters']['instance-id-type'],
83
+ 'region': item['parameters']['region'],
84
+ 'resource-group': item['parameters']['resource-group'],
85
+ 'instance-name': item['parameters']['instance-name'],
86
+ 'instance-crn': item['parameters']['instance-crn'],
87
+ 'setup-authorization-type': 'select'
88
+ }
89
+ });
90
+
91
+ try {
92
+ const response = await fetch(url, {
93
+ method: "POST",
94
+ headers: {
95
+ 'Authorization': `Bearer ${bearer}`,
96
+ 'Content-Type': 'application/json',
97
+ },
98
+ body: data,
99
+ });
100
+
101
+ if (!response.ok) {
102
+ throw new Error(`Response status: ${response.status}, ${response.statusText}`);
103
+ }
104
+
105
+ console.log(`CREATING AUTH POLICY... ${response.status}, ${response.statusText}`);
106
+ } catch (error) {
107
+ console.error(error.message);
108
+ }
109
+ }
110
+
111
+ // main
112
+
113
+ const bearer = await getBearer();
114
+
115
+ const inputArr = JSON.parse(fs.readFileSync(resolve(INPUT_PATH)));
116
+
117
+ inputArr.forEach(async (item) => {
118
+ await createS2sAuthPolicy(item);
119
+ });
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@ibm-cloud/cd-tools",
3
- "version": "1.3.3",
3
+ "version": "1.4.0",
4
4
  "description": "Tools and utilities for the IBM Cloud Continuous Delivery service and resources",
5
5
  "repository": {
6
6
  "type": "git",