@ibm-cloud/cd-tools 1.12.0 → 1.13.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -283,7 +283,8 @@ async function main(options) {
283
283
  moreTfResources: moreTfResources,
284
284
  gritMapping: gritMapping,
285
285
  skipUserConfirmation: skipUserConfirmation,
286
- includeS2S: includeS2S
286
+ includeS2S: includeS2S,
287
+ timeSuffix: TIME_SUFFIX
287
288
  });
288
289
  } catch (err) {
289
290
  if (err.message && err.stack) {
@@ -332,8 +333,11 @@ async function main(options) {
332
333
  // create toolchain, which invokes script to create s2s if applicable
333
334
  await runTerraformApply(true, outputDir, verbosity, `ibm_cd_toolchain.${toolchainTfName}`);
334
335
 
335
- const hasS2SFailures = fs.existsSync(resolve(`${outputDir}/.s2s-script-failures`));
336
- if (hasS2SFailures) logger.warn('\nWarning! One or more service-to-service auth policies could not be created!\n', LOG_STAGES.setup, true);
336
+ const hasS2SFailures = fs.existsSync(resolve(`${outputDir}/.s2s-script-failures-${TIME_SUFFIX}`));
337
+ if (hasS2SFailures) {
338
+ logger.print(''); // newline for spacing
339
+ logger.warn(`Warning! One or more service-to-service auth policies could not be created! See ${outputDir}/.s2s-script-failures-${TIME_SUFFIX} for more details.\n`, LOG_STAGES.setup, true);
340
+ }
337
341
 
338
342
  // create the rest
339
343
  await runTerraformApply(skipUserConfirmation, outputDir, verbosity).catch((err) => {
@@ -346,7 +350,7 @@ async function main(options) {
346
350
 
347
351
  if (verbosity >= 1) logger.print(''); // newline for spacing
348
352
  logger.info(`Toolchain "${sourceToolchainData['name']}" from ${sourceRegion} was cloned to "${targetToolchainName ?? sourceToolchainData['name']}" in ${targetRegion} ${applyErrors ? 'with some errors' : 'successfully'}, with ${numResourcesCreated} / ${numResourcesPlanned} resources created!`, LOG_STAGES.info, true);
349
- if (hasS2SFailures) logger.warn('One or more service-to-service auth policies could not be created, see .s2s-script-failures for more details.');
353
+ if (hasS2SFailures) logger.warn(`One or more service-to-service auth policies could not be created, see ${outputDir}/.s2s-script-failures-${TIME_SUFFIX} for more details.`, '', true);
350
354
  if (newTcId) logger.info(`Cloned toolchain: https://${CLOUD_PLATFORM}/devops/toolchains/${newTcId}?env_id=ibm:yp:${targetRegion}`, LOG_STAGES.info, true);
351
355
  } else {
352
356
  logger.info(`DRY_RUN: ${dryRun}, skipping terraform apply...`, LOG_STAGES.tf);
@@ -9,10 +9,11 @@
9
9
 
10
10
  import { Command } from 'commander';
11
11
  import axios from 'axios';
12
- import readline from 'readline/promises';
13
12
  import { writeFile } from 'fs/promises';
14
13
  import { COPY_PROJECT_GROUP_DESC, SOURCE_REGIONS } from '../config.js';
15
14
  import { getWithRetry } from './utils/requests.js';
15
+ import { logger, LOG_STAGES } from './utils/logger.js';
16
+ import { promptUserYesNo } from './utils/utils.js';
16
17
 
17
18
  const HTTP_TIMEOUT_MS = 5 * 60 * 1000; // 5 minutes default
18
19
 
@@ -39,8 +40,9 @@ class GitLabClient {
39
40
  const toVisit = [groupId];
40
41
  const visited = new Set();
41
42
 
42
- console.log(
43
- `[DEBUG] Starting BFS project listing from group ${groupId} (maxProjects=${maxProjects}, maxRequests=${maxRequests})`
43
+ logger.debug(
44
+ `Starting BFS project listing from group ${groupId} (maxProjects=${maxProjects}, maxRequests=${maxRequests})`,
45
+ LOG_STAGES.setup
44
46
  );
45
47
 
46
48
  while (toVisit.length > 0) {
@@ -48,7 +50,7 @@ class GitLabClient {
48
50
  if (visited.has(currentGroupId)) continue;
49
51
  visited.add(currentGroupId);
50
52
 
51
- console.log(`[DEBUG] Visiting group ${currentGroupId}. Remaining groups in queue: ${toVisit.length}`);
53
+ logger.debug(`Visiting group ${currentGroupId}. Remaining groups in queue: ${toVisit.length}`, LOG_STAGES.setup);
52
54
 
53
55
  // List projects for THIS group (no include_subgroups!)
54
56
  let projPage = 1;
@@ -56,7 +58,7 @@ class GitLabClient {
56
58
 
57
59
  while (hasMoreProjects) {
58
60
  if (requestCount >= maxRequests || projects.length >= maxProjects) {
59
- console.warn(`[WARN] Stopping project traversal: requestCount=${requestCount}, projects=${projects.length}`);
61
+ logger.warn(`Stopping project traversal early: requestCount=${requestCount}, projects=${projects.length}`, LOG_STAGES.setup);
60
62
  return projects;
61
63
  }
62
64
 
@@ -82,9 +84,7 @@ class GitLabClient {
82
84
 
83
85
  while (hasMoreSubgroups) {
84
86
  if (requestCount >= maxRequests) {
85
- console.warn(
86
- `[WARN] Stopping subgroup traversal: requestCount=${requestCount}`
87
- );
87
+ logger.warn(`Stopping subgroup traversal early: requestCount=${requestCount}`, LOG_STAGES.setup);
88
88
  return projects;
89
89
  }
90
90
 
@@ -110,7 +110,7 @@ class GitLabClient {
110
110
  }
111
111
  }
112
112
 
113
- console.log(`[DEBUG] Finished BFS project listing. Total projects=${projects.length}, total requests=${requestCount}`);
113
+ logger.debug(`Finished BFS project listing. Total projects=${projects.length}, total requests=${requestCount}`, LOG_STAGES.setup);
114
114
  return projects;
115
115
  }
116
116
 
@@ -271,23 +271,6 @@ class GitLabClient {
271
271
  }
272
272
  }
273
273
 
274
- async function promptUser(name) {
275
- const rl = readline.createInterface({
276
- input: process.stdin,
277
- output: process.stdout,
278
- });
279
-
280
- const answer = await rl.question(`Your new group name is ${name}. Are you sure? (Yes/No)`);
281
-
282
- rl.close();
283
-
284
- if (answer.toLowerCase() === 'yes' || answer.toLowerCase() === 'y') {
285
- console.log("Proceeding...");
286
- } else {
287
- process.exit(0);
288
- }
289
- }
290
-
291
274
  function validateAndConvertRegion(region) {
292
275
  if (!SOURCE_REGIONS.includes(region)) {
293
276
  throw new Error(
@@ -322,8 +305,9 @@ async function generateUrlMappingFile({ destUrl, sourceGroup, destinationGroupPa
322
305
  encoding: 'utf8',
323
306
  });
324
307
 
325
- console.log(`\nURL mapping JSON generated at: ${mappingFile}`);
326
- console.log(`Total mapped projects: ${sourceProjects.length}`);
308
+ logger.print();
309
+ logger.info(`Created file mapping old project urls to new urls at: ${mappingFile}`, LOG_STAGES.info);
310
+ logger.info(`Total mapped projects: ${sourceProjects.length}`, LOG_STAGES.info);
327
311
  }
328
312
 
329
313
  function buildGroupImportHistoryUrl(destUrl) {
@@ -386,10 +370,12 @@ function summarizeBulkImportProgress(entities = []) {
386
370
 
387
371
  return {
388
372
  entityTotal,
373
+ entityFinished,
389
374
  entityDone,
390
375
  entityFailed,
391
376
  entityPct,
392
377
  projectTotal,
378
+ projectFinished,
393
379
  projectDone,
394
380
  projectFailed,
395
381
  projectPct,
@@ -435,9 +421,10 @@ async function handleBulkImportConflict({ destination, destUrl, sourceGroupFullP
435
421
  const historyUrl = buildGroupImportHistoryUrl(destUrl);
436
422
  const groupUrl = buildGroupUrl(destUrl, `/groups/${destinationGroupPath}`);
437
423
  const fallback = () => {
438
- console.log(`\nDestination group already exists.`);
439
- if (groupUrl) console.log(`Group: ${groupUrl}`);
440
- if (historyUrl) console.log(`Group import history: ${historyUrl}`);
424
+ logger.print();
425
+ logger.warn(`Destination group already exists.`, LOG_STAGES.import);
426
+ if (groupUrl) logger.info(`Group: ${groupUrl}`, LOG_STAGES.import);
427
+ if (historyUrl) logger.info(`Group import history: ${historyUrl}`, LOG_STAGES.import);
441
428
  process.exit(0);
442
429
  };
443
430
 
@@ -472,18 +459,21 @@ async function handleBulkImportConflict({ destination, destUrl, sourceGroupFullP
472
459
  if (!matchesThisGroup) continue;
473
460
 
474
461
  if (status === 'created' || status === 'started') {
475
- console.log(`\nGroup is already in migration...`);
476
- console.log(`Bulk import ID: ${bi.id}`);
477
- if (groupUrl) console.log(`Migrated group: ${groupUrl}`);
478
- if (historyUrl) console.log(`Group import history: ${historyUrl}`);
462
+ logger.print();
463
+ logger.warn(`Group is already in migration...`, LOG_STAGES.import);
464
+ logger.info(`Bulk import ID: ${bi.id}`, LOG_STAGES.import);
465
+ if (groupUrl) logger.info(`Group URL: ${groupUrl}`, LOG_STAGES.import);
466
+ if (historyUrl) logger.info(`Group import history: ${historyUrl}`, LOG_STAGES.import);
479
467
  process.exit(0);
480
468
  }
481
469
 
482
- console.log(`\nConflict detected: ${importResErr}`);
483
- console.log(`Please specify a new group name using -n, --new-group-slug <n> when trying again`);
484
- console.log(`\nGroup already migrated.`);
485
- if (groupUrl) console.log(`Migrated group: ${groupUrl}`);
486
- if (historyUrl) console.log(`Group import history: ${historyUrl}`);
470
+ logger.print();
471
+ logger.warn(`Conflict detected: ${importResErr}`, LOG_STAGES.import);
472
+ logger.info(`Tip: specify a new group name using -n, --new-group-slug <slug> and try again.`, LOG_STAGES.import);
473
+ logger.print();
474
+ logger.info(`Group already migrated.`, LOG_STAGES.import);
475
+ if (groupUrl) logger.info(`Group URL: ${groupUrl}`, LOG_STAGES.import);
476
+ if (historyUrl) logger.info(`Group import history: ${historyUrl}`, LOG_STAGES.import);
487
477
  process.exit(0);
488
478
  }
489
479
 
@@ -504,20 +494,20 @@ async function directTransfer(options) {
504
494
  const destination = new GitLabClient(destUrl, options.destToken);
505
495
 
506
496
  try {
507
- console.log(`Fetching source group from ID: ${options.groupId}...`);
497
+ logger.info(`Fetching source group from ID: ${options.groupId}...`, LOG_STAGES.setup);
508
498
  let sourceGroup;
509
499
  try {
510
500
  sourceGroup = await source.getGroup(options.groupId);
511
501
  } catch (err) {
512
502
  if (err?.response?.status === 404) {
513
- console.error(
503
+ logger.error(
514
504
  `Error: group "${options.groupId}" not found in source region "${options.sourceRegion}".\n` +
515
505
  `Tip: -g accepts numeric ID or full group path like "parent/subgroup".`
516
506
  );
517
507
  return 1;
518
508
  }
519
509
 
520
- console.error(`Error: failed to fetch group "${options.groupId}": ${err?.message || err}`);
510
+ logger.error(`Error: failed to fetch group "${options.groupId}": ${err?.message || err}`, LOG_STAGES.setup);
521
511
  return 1;
522
512
  }
523
513
 
@@ -531,18 +521,20 @@ async function directTransfer(options) {
531
521
  maxProjects: 10000,
532
522
  });
533
523
  } catch (e) {
534
- console.warn(`[WARN] GraphQL listing failed (${e.message}). Falling back to REST safe listing...`);
524
+ logger.warn(`GraphQL listing failed. Falling back to REST project listing...`, LOG_STAGES.setup);
525
+ logger.debug(`GraphQL error: ${e.message}`, LOG_STAGES.setup);
535
526
  sourceProjects = await source.getGroupProjects(sourceGroup.id);
536
527
  }
537
528
 
538
- console.log(`Found ${sourceProjects.length} projects in source group`);
529
+ logger.info(`Found ${sourceProjects.length} projects in source group`, LOG_STAGES.setup);
539
530
  if (sourceProjects.length > 0) {
540
- console.log('Projects to be migrated:');
541
- sourceProjects.forEach(p => console.log(p.name_with_namespace || p.nameWithNamespace || p.fullPath));
531
+ logger.info('Projects to be migrated:', LOG_STAGES.setup);
532
+ sourceProjects.forEach(p => logger.print(p.name_with_namespace || p.nameWithNamespace || p.fullPath));
542
533
  }
543
534
 
544
535
  if (options.newGroupSlug) {
545
- await promptUser(options.newGroupSlug);
536
+ const ok = await promptUserYesNo(`Your new group slug is "${options.newGroupSlug}". Proceed?`);
537
+ if (!ok) return 0;
546
538
  }
547
539
 
548
540
  // Generate URL mapping JSON before starting the migration
@@ -571,11 +563,12 @@ async function directTransfer(options) {
571
563
  let importRes = null;
572
564
 
573
565
  try {
566
+ logger.print();
567
+ logger.info(`Requesting bulk import request in '${options.destRegion}'...`, LOG_STAGES.request);
574
568
  importRes = await destination.bulkImport(requestPayload);
575
569
  if (importRes.success) {
576
570
  bulkImport = importRes.data;
577
- console.log(`Bulk import request succeeded!`);
578
- console.log(`Bulk import initiated successfully (ID: ${importRes.data?.id})`);
571
+ logger.success(`✔ Bulk import initiated successfully (ID: ${importRes.data?.id})`, LOG_STAGES.request);
579
572
  } else if (importRes.conflict) {
580
573
  await handleBulkImportConflict({
581
574
  destination,
@@ -586,11 +579,17 @@ async function directTransfer(options) {
586
579
  });
587
580
  }
588
581
  } catch (error) {
589
- console.log(`Bulk import request failed - ${error.message}`);
582
+ logger.error(`✖ Bulk import request failed - ${error.message}`, LOG_STAGES.request);
590
583
  process.exit(0);
591
584
  }
585
+
586
+ logger.print();
587
+ const spinnerOff = process.env.DISABLE_SPINNER === 'true';
588
+ if (spinnerOff) {
589
+ logger.info('Waiting for bulk project import to complete...', LOG_STAGES.import);
590
+ logger.info('This may take time depending on the number and size of projects.', LOG_STAGES.import);
591
+ }
592
592
 
593
- console.log('\nPolling bulk import status (adaptive: 1m→2m→3m→4m→5m, max 60 checks)...');
594
593
  const MAX_ATTEMPTS = 60;
595
594
  const POLLS_PER_STEP = 5;
596
595
  const MIN_INTERVAL_MIN = 1;
@@ -598,94 +597,121 @@ async function directTransfer(options) {
598
597
 
599
598
  let importStatus = 'created';
600
599
  let attempts = 0;
600
+ let entitiesAll = [];
601
601
 
602
- while (!['finished', 'failed', 'timeout'].includes(importStatus) && attempts < MAX_ATTEMPTS) {
603
- if (attempts > 0) {
604
- const step = Math.floor(attempts / POLLS_PER_STEP);
605
- const waitMin = Math.min(MIN_INTERVAL_MIN + step, MAX_INTERVAL_MIN);
602
+ const emit = (msg) => {
603
+ if (spinnerOff) logger.info(msg, LOG_STAGES.import);
604
+ else logger.updateSpinnerMsg(msg);
605
+ };
606
+
607
+ const waitStep = async () => {
608
+ const step = Math.floor(attempts / POLLS_PER_STEP);
609
+ const waitMin = Math.min(MIN_INTERVAL_MIN + step, MAX_INTERVAL_MIN);
610
+
611
+ if (options.verbose) emit(`Waiting ${waitMin} minute before next status check...`);
612
+ await new Promise(r => setTimeout(r, waitMin * 60000));
613
+ };
614
+
615
+ const pollBulkImport = async () => {
616
+ while (!['finished', 'failed', 'timeout'].includes(importStatus) && attempts < MAX_ATTEMPTS) {
617
+ if (attempts > 0) await waitStep();
606
618
 
607
- console.log(`Waiting ${waitMin} minute before next status check...`);
608
- await new Promise(resolve => setTimeout(resolve, waitMin * 60000));
609
- }
610
- try {
611
619
  const importDetails = await destination.getBulkImport(bulkImport.id);
612
620
  importStatus = importDetails.status;
621
+
613
622
  let progressLine;
614
623
  try {
615
- const entitiesAll = await destination.getBulkImportEntitiesAll(bulkImport.id);
616
- const summary = summarizeBulkImportProgress(entitiesAll);
617
- progressLine = formatBulkImportProgressLine(importStatus, summary);
624
+ entitiesAll = await destination.getBulkImportEntitiesAll(bulkImport.id);
625
+ progressLine = formatBulkImportProgressLine(importStatus, summarizeBulkImportProgress(entitiesAll));
618
626
  } catch {
619
627
  progressLine = `Import status: ${importStatus} | Progress: (unable to fetch entity details)`;
620
628
  }
621
629
 
622
- console.log(`[${new Date().toLocaleTimeString()}] ${progressLine}`);
630
+ emit(progressLine);
623
631
 
624
- if (importStatus === 'finished') {
625
- console.log('Bulk import completed successfully!');
626
- break;
627
- } else if (importStatus === 'failed') {
628
- console.log('Bulk import failed!');
629
- break;
630
- }
631
- } catch (e) {
632
- console.error(`Error checking import status: ${e.message}`);
633
- if (e.response?.status === 404) {
634
- throw new Error('Bulk import not found - it may have been deleted');
635
- }
632
+ if (importStatus === 'finished') return { importStatus, entitiesAll };
633
+ if (importStatus === 'failed') throw new Error('GitLab bulk import failed');
634
+
635
+ attempts++;
636
636
  }
637
- attempts++;
638
- }
639
637
 
640
- if (attempts >= MAX_ATTEMPTS) {
641
- const historyUrl = buildGroupImportHistoryUrl(destUrl);
638
+ if (attempts >= MAX_ATTEMPTS) {
639
+ const err = new Error('POLLING_TIMEOUT');
640
+ err.code = 'POLLING_TIMEOUT';
641
+ err.importStatus = importStatus;
642
+ throw err;
643
+ }
642
644
 
643
- console.error('\nThe CLI has stopped polling for the GitLab bulk import.');
644
- console.error('The migration itself may still be running inside GitLab — the CLI only waits for a limited time.');
645
- console.error(`Last reported status for bulk import ${bulkImport.id}: ${importStatus}`);
645
+ return { importStatus, entitiesAll };
646
+ };
646
647
 
647
- if (historyUrl) {
648
- console.error('\nYou can continue monitoring this migration in the GitLab UI.');
649
- console.error(`Group import history: ${historyUrl}`);
650
- } else {
651
- console.error('\nYou can continue monitoring this migration from the Group import history page in the GitLab UI.');
652
- }
653
- process.exit(0);
654
- }
648
+ let pollResult;
649
+ try {
650
+ pollResult = await logger.withSpinner(
651
+ pollBulkImport,
652
+ 'Waiting for bulk project import to complete... (may take some time)',
653
+ 'Bulk import completed successfully!',
654
+ LOG_STAGES.import
655
+ );
655
656
 
656
- const entities = await destination.getBulkImportEntities(bulkImport.id);
657
- const finishedEntities = entities.filter(e => e.status === 'finished');
658
- const failedEntities = entities.filter(e => e.status === 'failed');
657
+ if (spinnerOff) logger.success('Bulk import completed successfully!', LOG_STAGES.import);
658
+ importStatus = pollResult.importStatus;
659
+ entitiesAll = pollResult.entitiesAll?.length ? pollResult.entitiesAll : entitiesAll;
660
+ } catch (e) {
661
+ logger.failSpinner('✖ Bulk import did not complete');
662
+ logger.resetSpinner();
663
+
664
+ if (e?.code === 'POLLING_TIMEOUT') {
665
+ const historyUrl = buildGroupImportHistoryUrl(destUrl);
666
+
667
+ logger.print();
668
+ logger.error('The CLI has stopped polling for the GitLab bulk import.', LOG_STAGES.import);
669
+ logger.error('The migration itself may still be running inside GitLab — the CLI only waits for a limited time.', LOG_STAGES.import);
670
+ logger.error(`Last reported status for bulk import ${bulkImport.id}: ${e.importStatus}`, LOG_STAGES.import);
671
+
672
+ logger.print();
673
+ if (historyUrl) {
674
+ logger.info('You can continue monitoring this migration in the GitLab UI:', LOG_STAGES.import);
675
+ logger.info(`Group import history: ${historyUrl}`, LOG_STAGES.import);
676
+ } else {
677
+ logger.info('You can continue monitoring this migration from the Group import history page in the GitLab UI.', LOG_STAGES.import);
678
+ }
679
+ process.exit(0);
680
+ }
659
681
 
660
- if (importStatus === 'finished' && finishedEntities.length > 0) {
661
- console.log(`\nGroup migration completed successfully!`);
662
- console.log(`Migration Results:`);
663
- console.log(`Successfully migrated: ${finishedEntities.length} entities`);
664
- console.log(`Failed: ${failedEntities.length} entities`);
682
+ throw e;
683
+ }
665
684
 
666
- if (failedEntities.length > 0) {
667
- console.log(`\nFailed entities:\n`);
668
- failedEntities.forEach(e => {
669
- console.log(`${e.source_type}: ${e.source_full_path} (${e.status})`);
685
+ const summary = summarizeBulkImportProgress(entitiesAll);
686
+
687
+ if (importStatus === 'finished' && summary.entityFinished > 0) {
688
+ const newGroupUrl = buildGroupUrl(destUrl, `/groups/${destinationGroupPath}`);
689
+
690
+ logger.print();
691
+ logger.success('✔ Project group copy completed successfully.', LOG_STAGES.import);
692
+ logger.info('Summary:', LOG_STAGES.import);
693
+ logger.info(`${sourceProjects.length} projects copied successfully`, LOG_STAGES.import);
694
+ logger.info(`${summary.entityFinished} entities copied successfully`, LOG_STAGES.import);
695
+ logger.info(`${summary.entityFailed} entities failed to copy`, LOG_STAGES.import);
696
+ if (newGroupUrl) logger.info(`New group URL: ${newGroupUrl}`, LOG_STAGES.import);
697
+
698
+ // show failed list only in verbose (or if failures exist)
699
+ if (summary.entityFailed > 0) {
700
+ logger.print();
701
+ logger.warn('Failed entities:', LOG_STAGES.import);
702
+ entitiesAll.filter(e => e.status === 'failed').forEach(e => {
703
+ logger.print(`- ${e.source_type}: ${e.source_full_path} (${e.status})`);
670
704
  });
671
705
  }
672
- const migratedGroupUrl = buildGroupUrl(destUrl, `/groups/${destinationGroupPath}`);
673
- if (migratedGroupUrl) console.log(`\nMigrated group: ${migratedGroupUrl}`);
674
-
675
706
  return 0;
676
707
  } else {
677
- console.error('\nBulk import failed!');
678
- if (failedEntities.length > 0) {
679
- console.error('Failed entities:');
680
- failedEntities.forEach(e => {
681
- console.error(`${e.source_type}: ${e.source_full_path} (${e.status})`);
682
- });
683
- }
708
+ logger.print();
709
+ logger.error('✖ Bulk import failed!', LOG_STAGES.import);
684
710
  throw new Error('GitLab bulk import failed');
685
711
  }
686
712
 
687
713
  } catch (error) {
688
- console.error(`Group migration failed: ${error.message}`);
714
+ logger.error(`Project group copy failed: ${error.message}`, LOG_STAGES.import);
689
715
  throw error;
690
716
  }
691
717
  }
@@ -699,9 +725,11 @@ const command = new Command('copy-project-group')
699
725
  .requiredOption('--dt, --dest-token <token>', 'A Git Repos and Issue Tracking personal access token from the target region. The api scope is required on the token.')
700
726
  .requiredOption('-g, --group-id <id>', 'The id of the group to copy from the source region (e.g. "1796019"), or the group name (e.g. "mygroup") for top-level groups. For sub-groups, a path is also allowed, e.g. "mygroup/subgroup"')
701
727
  .option('-n, --new-group-slug <slug>', '(Optional) Destination group URL slug (single path segment, e.g. "mygroup-copy"). Must be unique. Group display name remains the same as source.')
728
+ .option('-v, --verbose', 'Enable verbose output (debug logs + wait details)')
702
729
  .showHelpAfterError()
703
730
  .hook('preAction', cmd => cmd.showHelpAfterError(false)) // only show help during validation
704
731
  .action(async (options) => {
732
+ logger.setVerbosity(options.verbose ? 2 : 1);
705
733
  await directTransfer(options);
706
734
  });
707
735
 
@@ -280,7 +280,7 @@ async function main(options) {
280
280
  const commonProps = {
281
281
  toolchain_id: toolchainId,
282
282
  destination: {
283
- is_private: false, // TODO: set this back to 'true' once 'otc-api' has the 'export_secret' endpoint, should always use SM private endpoint
283
+ is_private: true,
284
284
  is_production: CLOUD_PLATFORM === 'cloud.ibm.com',
285
285
  secrets_manager_crn: smInstance.crn,
286
286
  secret_name: smSecretName,
@@ -68,7 +68,7 @@ async function initProviderFile(targetRegion, dir) {
68
68
  return writeFilePromise(`${dir}/provider.tf`, jsonToTf(newProviderTfStr));
69
69
  }
70
70
 
71
- async function setupTerraformFiles({ token, srcRegion, targetRegion, targetTag, targetToolchainName, targetRgId, disableTriggers, isCompact, outputDir, tempDir, moreTfResources, gritMapping, skipUserConfirmation, includeS2S }) {
71
+ async function setupTerraformFiles({ token, srcRegion, targetRegion, targetTag, targetToolchainName, targetRgId, disableTriggers, isCompact, outputDir, tempDir, moreTfResources, gritMapping, skipUserConfirmation, includeS2S, timeSuffix }) {
72
72
  const promises = [];
73
73
 
74
74
  const writeProviderPromise = await initProviderFile(targetRegion, outputDir);
@@ -359,7 +359,9 @@ async function setupTerraformFiles({ token, srcRegion, targetRegion, targetTag,
359
359
 
360
360
  const newTfFileObjStr = JSON.stringify(newTfFileObj);
361
361
  let newTfFile = replaceDependsOn(jsonToTf(newTfFileObjStr));
362
- if (includeS2S && (isCompact || resourceName === 'ibm_cd_toolchain')) newTfFile = addS2sScriptToToolchainTf(newTfFile);
362
+ if (includeS2S && (isCompact || resourceName === 'ibm_cd_toolchain')) {
363
+ newTfFile = addS2sScriptToToolchainTf(newTfFile, timeSuffix);
364
+ }
363
365
  const copyResourcesPromise = writeFilePromise(`${outputDir}/${fileName}`, newTfFile);
364
366
  promises.push(copyResourcesPromise);
365
367
  }
@@ -487,7 +489,7 @@ function replaceDependsOn(str) {
487
489
  }
488
490
  }
489
491
 
490
- function addS2sScriptToToolchainTf(str) {
492
+ function addS2sScriptToToolchainTf(str, timeSuffix) {
491
493
  const provisionerStr = (tfName) => `\n\n provisioner "local-exec" {
492
494
  command = "node create-s2s-script.cjs"
493
495
  on_failure = continue
@@ -496,6 +498,7 @@ function addS2sScriptToToolchainTf(str) {
496
498
  TARGET_TOOLCHAIN_ID = ibm_cd_toolchain.${tfName}.id
497
499
  IBMCLOUD_PLATFORM = "${CLOUD_PLATFORM}"
498
500
  IAM_BASE_URL = "${IAM_BASE_URL}"
501
+ GENERATED_TIME = "${timeSuffix}" # corresponds with error log
499
502
  }\n }`
500
503
  try {
501
504
  if (typeof str === 'string') {
@@ -23,8 +23,11 @@ if (!CLOUD_PLATFORM) throw Error(`Missing 'IBMCLOUD_PLATFORM'`);
23
23
  const IAM_BASE_URL = process.env['IAM_BASE_URL'] || 'https://iam.cloud.ibm.com';
24
24
  if (!IAM_BASE_URL) throw Error(`Missing 'IAM_BASE_URL'`);
25
25
 
26
+ const GENERATED_TIME = process.env['GENERATED_TIME'];
27
+ if (!GENERATED_TIME) throw Error(`Missing 'GENERATED_TIME'`);
28
+
26
29
  const INPUT_PATH = resolve('create-s2s.json');
27
- const ERROR_PATH = resolve('.s2s-script-failures');
30
+ const ERROR_PATH = resolve(`.s2s-script-failures-${GENERATED_TIME}`);
28
31
 
29
32
  async function getBearer() {
30
33
  const url = `${IAM_BASE_URL}/identity/token`;
@@ -125,12 +128,23 @@ getBearer().then(async (bearer) => {
125
128
  promises.push(createS2sAuthPolicy(bearer, item));
126
129
  });
127
130
 
128
- try {
129
- await Promise.all(promises);
130
- } catch (e) {
131
- console.error(e);
132
- // create temp file on error
133
- fs.writeFileSync(ERROR_PATH, e);
134
- exit(1);
135
- }
131
+ await Promise.allSettled(promises).then((res) => {
132
+ const rejectReasons = res.filter(r => r.status === 'rejected').map(r => r.reason);
133
+
134
+ if (rejectReasons.length > 0) {
135
+ let errFileContents = '';
136
+ rejectReasons.forEach((reason) => {
137
+ console.error(reason);
138
+ // create temp file on error
139
+ errFileContents += reason;
140
+ errFileContents += '\n';
141
+ });
142
+ fs.writeFileSync(ERROR_PATH, errFileContents);
143
+ exit(1);
144
+ }
145
+ });
146
+ }).catch((reason) => {
147
+ console.error(reason);
148
+ // create temp file on error
149
+ fs.writeFileSync(ERROR_PATH, reason + '\n');
136
150
  });
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@ibm-cloud/cd-tools",
3
- "version": "1.12.0",
3
+ "version": "1.13.1",
4
4
  "description": "Tools and utilities for the IBM Cloud Continuous Delivery service and resources",
5
5
  "repository": {
6
6
  "type": "git",