@ibm-cloud/cd-tools 1.8.1 → 1.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cmd/copy-toolchain.js +4 -0
- package/cmd/direct-transfer.js +101 -3
- package/cmd/utils/terraform.js +1 -1
- package/create-s2s-script.js +14 -10
- package/package.json +1 -1
package/cmd/copy-toolchain.js
CHANGED
|
@@ -324,6 +324,9 @@ async function main(options) {
|
|
|
324
324
|
// create toolchain, which invokes script to create s2s if applicable
|
|
325
325
|
await runTerraformApply(true, outputDir, verbosity, `ibm_cd_toolchain.${toolchainTfName}`);
|
|
326
326
|
|
|
327
|
+
const hasS2SFailures = fs.existsSync(resolve(`${outputDir}/.s2s-script-failures`));
|
|
328
|
+
if (hasS2SFailures) logger.warn('\nWarning! One or more service-to-service auth policies could not be created!\n');
|
|
329
|
+
|
|
327
330
|
// create the rest
|
|
328
331
|
await runTerraformApply(skipUserConfirmation, outputDir, verbosity).catch((err) => {
|
|
329
332
|
logger.error(err, LOG_STAGES.tf);
|
|
@@ -335,6 +338,7 @@ async function main(options) {
|
|
|
335
338
|
|
|
336
339
|
logger.print('\n');
|
|
337
340
|
logger.info(`Toolchain "${sourceToolchainData['name']}" from ${sourceRegion} was cloned to "${targetToolchainName ?? sourceToolchainData['name']}" in ${targetRegion} ${applyErrors ? 'with some errors' : 'successfully'}, with ${numResourcesCreated} / ${numResourcesPlanned} resources created!`, LOG_STAGES.info);
|
|
341
|
+
if (hasS2SFailures) logger.warn('One or more service-to-service auth policies could not be created, see .s2s-script-failures for more details.');
|
|
338
342
|
if (newTcId) logger.info(`See cloned toolchain: https://${CLOUD_PLATFORM}/devops/toolchains/${newTcId}?env_id=ibm:yp:${targetRegion}`, LOG_STAGES.info, true);
|
|
339
343
|
} else {
|
|
340
344
|
logger.info(`DRY_RUN: ${dryRun}, skipping terraform apply...`, LOG_STAGES.tf);
|
package/cmd/direct-transfer.js
CHANGED
|
@@ -207,6 +207,20 @@ class GitLabClient {
|
|
|
207
207
|
|
|
208
208
|
return all;
|
|
209
209
|
}
|
|
210
|
+
|
|
211
|
+
async getGroupByFullPath(fullPath) {
|
|
212
|
+
const encoded = encodeURIComponent(fullPath);
|
|
213
|
+
const resp = await this.client.get(`/groups/${encoded}`);
|
|
214
|
+
return resp.data;
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
async listBulkImports({ page = 1, perPage = 50 } = {}) {
|
|
218
|
+
const resp = await getWithRetry(this.client, `/bulk_imports`, { page, per_page: perPage });
|
|
219
|
+
return {
|
|
220
|
+
imports: resp.data || [],
|
|
221
|
+
nextPage: Number(resp.headers?.['x-next-page'] || 0),
|
|
222
|
+
};
|
|
223
|
+
}
|
|
210
224
|
}
|
|
211
225
|
|
|
212
226
|
async function promptUser(name) {
|
|
@@ -361,6 +375,84 @@ function formatBulkImportProgressLine(importStatus, summary) {
|
|
|
361
375
|
return parts.join(' | ');
|
|
362
376
|
}
|
|
363
377
|
|
|
378
|
+
function buildGroupUrl(base, path) {
|
|
379
|
+
try {
|
|
380
|
+
return new URL(path.replace(/^\//, ''), base).toString();
|
|
381
|
+
} catch {
|
|
382
|
+
return null;
|
|
383
|
+
}
|
|
384
|
+
}
|
|
385
|
+
|
|
386
|
+
function isGroupEntity(e) {
|
|
387
|
+
return e?.source_type === 'group_entity' || e?.entity_type === 'group_entity' || e?.entity_type === 'group';
|
|
388
|
+
}
|
|
389
|
+
|
|
390
|
+
async function handleBulkImportConflict({destination, destUrl, sourceGroupFullPath, destinationGroupPath, importResErr}) {
|
|
391
|
+
const historyUrl = buildGroupImportHistoryUrl(destUrl);
|
|
392
|
+
const groupUrl = buildGroupUrl(destUrl, `/groups/${destinationGroupPath}`);
|
|
393
|
+
const fallback = () => {
|
|
394
|
+
console.log(`\nDestination group already exists.`);
|
|
395
|
+
if (groupUrl) console.log(`Group: ${groupUrl}`);
|
|
396
|
+
if (historyUrl) console.log(`Group import history: ${historyUrl}`);
|
|
397
|
+
process.exit(0);
|
|
398
|
+
};
|
|
399
|
+
|
|
400
|
+
try {
|
|
401
|
+
await destination.getGroupByFullPath(destinationGroupPath);
|
|
402
|
+
} catch {
|
|
403
|
+
fallback();
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
try {
|
|
407
|
+
const IMPORT_PAGES = 3;
|
|
408
|
+
const ENTITY_PAGES = 2;
|
|
409
|
+
|
|
410
|
+
let page = 1;
|
|
411
|
+
for (let p = 0; p < IMPORT_PAGES; p++) {
|
|
412
|
+
const { imports, nextPage } = await destination.listBulkImports({ page, perPage: 50 });
|
|
413
|
+
|
|
414
|
+
for (const bi of imports) {
|
|
415
|
+
if (!bi?.id) continue;
|
|
416
|
+
|
|
417
|
+
const status = bi.status;
|
|
418
|
+
if (!['created', 'started', 'finished'].includes(status)) continue;
|
|
419
|
+
|
|
420
|
+
const entities = await destination.getBulkImportEntitiesAll(bi.id, { perPage: 100, maxPages: ENTITY_PAGES });
|
|
421
|
+
|
|
422
|
+
const matchesThisGroup = entities.some(e =>
|
|
423
|
+
isGroupEntity(e) &&
|
|
424
|
+
e.source_full_path === sourceGroupFullPath &&
|
|
425
|
+
(e.destination_full_path === destinationGroupPath || e.destination_slug === destinationGroupPath)
|
|
426
|
+
);
|
|
427
|
+
|
|
428
|
+
if (!matchesThisGroup) continue;
|
|
429
|
+
|
|
430
|
+
if (status === 'created' || status === 'started') {
|
|
431
|
+
console.log(`\nGroup is already in migration...`);
|
|
432
|
+
console.log(`Bulk import ID: ${bi.id}`);
|
|
433
|
+
if (groupUrl) console.log(`Migrated group: ${groupUrl}`);
|
|
434
|
+
if (historyUrl) console.log(`Group import history: ${historyUrl}`);
|
|
435
|
+
process.exit(0);
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
console.log(`\nConflict detected: ${importResErr}`);
|
|
439
|
+
console.log(`Please specify a new group name using -n, --new-name <n> when trying again`);
|
|
440
|
+
console.log(`\nGroup already migrated.`);
|
|
441
|
+
if (groupUrl) console.log(`Migrated group: ${groupUrl}`);
|
|
442
|
+
if (historyUrl) console.log(`Group import history: ${historyUrl}`);
|
|
443
|
+
process.exit(0);
|
|
444
|
+
}
|
|
445
|
+
|
|
446
|
+
if (!nextPage) break;
|
|
447
|
+
page = nextPage;
|
|
448
|
+
}
|
|
449
|
+
|
|
450
|
+
fallback();
|
|
451
|
+
} catch {
|
|
452
|
+
fallback();
|
|
453
|
+
}
|
|
454
|
+
}
|
|
455
|
+
|
|
364
456
|
async function directTransfer(options) {
|
|
365
457
|
const sourceUrl = validateAndConvertRegion(options.sourceRegion);
|
|
366
458
|
const destUrl = validateAndConvertRegion(options.destRegion);
|
|
@@ -418,9 +510,13 @@ async function directTransfer(options) {
|
|
|
418
510
|
console.log(`Bulk import request succeeded!`);
|
|
419
511
|
console.log(`Bulk import initiated successfully (ID: ${importRes.data?.id})`);
|
|
420
512
|
} else if (importRes.conflict) {
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
513
|
+
await handleBulkImportConflict({
|
|
514
|
+
destination,
|
|
515
|
+
destUrl,
|
|
516
|
+
sourceGroupFullPath: sourceGroup.full_path,
|
|
517
|
+
destinationGroupPath,
|
|
518
|
+
importResErr: importRes.error
|
|
519
|
+
});
|
|
424
520
|
}
|
|
425
521
|
} catch (error) {
|
|
426
522
|
console.log(`Bulk import request failed - ${error.message}`);
|
|
@@ -506,6 +602,8 @@ async function directTransfer(options) {
|
|
|
506
602
|
console.log(`${e.source_type}: ${e.source_full_path} (${e.status})`);
|
|
507
603
|
});
|
|
508
604
|
}
|
|
605
|
+
const migratedGroupUrl = buildGroupUrl(destUrl, `/groups/${destinationGroupPath}`);
|
|
606
|
+
if (migratedGroupUrl) console.log(`\nMigrated group: ${migratedGroupUrl}`);
|
|
509
607
|
|
|
510
608
|
return 0;
|
|
511
609
|
} else {
|
package/cmd/utils/terraform.js
CHANGED
|
@@ -492,7 +492,7 @@ function replaceDependsOn(str) {
|
|
|
492
492
|
function addS2sScriptToToolchainTf(str) {
|
|
493
493
|
const provisionerStr = (tfName) => `\n\n provisioner "local-exec" {
|
|
494
494
|
command = "node create-s2s-script.cjs"
|
|
495
|
-
on_failure =
|
|
495
|
+
on_failure = continue
|
|
496
496
|
environment = {
|
|
497
497
|
IBMCLOUD_API_KEY = var.ibmcloud_api_key
|
|
498
498
|
TARGET_TOOLCHAIN_ID = ibm_cd_toolchain.${tfName}.id
|
package/create-s2s-script.js
CHANGED
|
@@ -23,7 +23,8 @@ if (!CLOUD_PLATFORM) throw Error(`Missing 'IBMCLOUD_PLATFORM'`);
|
|
|
23
23
|
const IAM_BASE_URL = process.env['IAM_BASE_URL'] || 'https://iam.cloud.ibm.com';
|
|
24
24
|
if (!IAM_BASE_URL) throw Error(`Missing 'IAM_BASE_URL'`);
|
|
25
25
|
|
|
26
|
-
const INPUT_PATH = 'create-s2s.json';
|
|
26
|
+
const INPUT_PATH = resolve('create-s2s.json');
|
|
27
|
+
const ERROR_PATH = resolve('.s2s-script-failures');
|
|
27
28
|
|
|
28
29
|
async function getBearer() {
|
|
29
30
|
const url = `${IAM_BASE_URL}/identity/token`;
|
|
@@ -44,14 +45,12 @@ async function getBearer() {
|
|
|
44
45
|
});
|
|
45
46
|
|
|
46
47
|
if (!response.ok) {
|
|
47
|
-
throw new Error(`
|
|
48
|
+
throw new Error(`Failed to get bearer token with status: ${response.status}, ${response.statusText}`);
|
|
48
49
|
}
|
|
49
50
|
|
|
50
|
-
console.log(`GETTING BEARER TOKEN... ${response.status}, ${response.statusText}`);
|
|
51
|
-
|
|
52
51
|
return (await response.json()).access_token;
|
|
53
52
|
} catch (error) {
|
|
54
|
-
console.error(error.message);
|
|
53
|
+
console.error(`Failed to get bearer token: ${error.message}`);
|
|
55
54
|
}
|
|
56
55
|
}
|
|
57
56
|
|
|
@@ -106,17 +105,20 @@ async function createS2sAuthPolicy(bearer, item) {
|
|
|
106
105
|
if (!response.ok) {
|
|
107
106
|
return Promise.reject(`Failed to create service-to-service authorization policy for ${item['serviceId']} '${item['parameters']['label'] ?? item['parameters']['name']}' with status: ${response.status} ${response.statusText}`);
|
|
108
107
|
}
|
|
109
|
-
|
|
110
|
-
console.log(`CREATING AUTH POLICY... ${response.status}, ${response.statusText}`);
|
|
111
108
|
} catch (error) {
|
|
112
|
-
return Promise.reject(error.message);
|
|
109
|
+
return Promise.reject(`Failed to create service-to-service authorization policy for ${item['serviceId']} '${error.message}`);
|
|
113
110
|
}
|
|
114
111
|
}
|
|
115
112
|
|
|
116
113
|
// main
|
|
117
114
|
|
|
118
115
|
getBearer().then(async (bearer) => {
|
|
119
|
-
|
|
116
|
+
// remove temp file from previous runs
|
|
117
|
+
if (fs.existsSync(ERROR_PATH)) {
|
|
118
|
+
fs.rmSync(ERROR_PATH);
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
const inputArr = JSON.parse(fs.readFileSync(INPUT_PATH));
|
|
120
122
|
|
|
121
123
|
const promises = [];
|
|
122
124
|
inputArr.forEach((item) => {
|
|
@@ -126,7 +128,9 @@ getBearer().then(async (bearer) => {
|
|
|
126
128
|
try {
|
|
127
129
|
await Promise.all(promises);
|
|
128
130
|
} catch (e) {
|
|
129
|
-
console.error(e)
|
|
131
|
+
console.error(e);
|
|
132
|
+
// create temp file on error
|
|
133
|
+
fs.writeFileSync(ERROR_PATH, e);
|
|
130
134
|
exit(1);
|
|
131
135
|
}
|
|
132
136
|
});
|