@ibm-cloud/cd-tools 1.6.1 → 1.8.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -132,9 +132,9 @@ Copies a toolchain, including tool integrations and Tekton pipelines, to another
132
132
 
133
133
  Examples:
134
134
  export IBMCLOUD_API_KEY='...'
135
- npx @ibm-cloud/cd-migration-tools copy-toolchain -c ${TOOLCHAIN_CRN} -r us-south
135
+ npx @ibm-cloud/cd-tools copy-toolchain -c ${TOOLCHAIN_CRN} -r us-south
136
136
  Copy a toolchain to the Dallas region with the same name, in the same resource group.
137
- npx @ibm-cloud/cd-migration-tools copy-toolchain -c ${TOOLCHAIN_CRN} -r eu-de -n new-toolchain-name -g new-resource-group --apikey ${APIKEY}
137
+ npx @ibm-cloud/cd-tools copy-toolchain -c ${TOOLCHAIN_CRN} -r eu-de -n new-toolchain-name -g new-resource-group --apikey ${APIKEY}
138
138
  Copy a toolchain to the Frankfurt region with the specified name and target resource group, using the given API key
139
139
 
140
140
  Environment Variables:
@@ -7,15 +7,20 @@
7
7
  * Contract with IBM Corp.
8
8
  */
9
9
 
10
- import { Command, Option } from 'commander';
10
+ import { Command } from 'commander';
11
11
  import axios from 'axios';
12
12
  import readline from 'readline/promises';
13
+ import { writeFile } from 'fs/promises';
13
14
  import { TARGET_REGIONS, SOURCE_REGIONS } from '../config.js';
15
+ import { getWithRetry } from './utils/requests.js';
16
+
17
+ const HTTP_TIMEOUT_MS = 5 * 60 * 1000; // 5 minutes default
14
18
 
15
19
  class GitLabClient {
16
20
  constructor(baseURL, token) {
17
21
  this.client = axios.create({
18
22
  baseURL: baseURL.endsWith('/') ? `${baseURL}api/v4` : `${baseURL}/api/v4`,
23
+ timeout: HTTP_TIMEOUT_MS,
19
24
  headers: {
20
25
  'Authorization': `Bearer ${token}`,
21
26
  'Content-Type': 'application/json'
@@ -23,24 +28,88 @@ class GitLabClient {
23
28
  });
24
29
  }
25
30
 
26
- async getGroupProjects(groupId) {
31
+ // List all projects in a group + all its subgroups using BFS.
32
+ async getGroupProjects(groupId, { maxProjects = 1000, maxRequests = 2000 } = {}) {
33
+ let requestCount = 0;
27
34
  const projects = [];
28
- let page = 1;
29
- let hasMore = true;
30
-
31
- while (hasMore) {
32
- const response = await this.client.get(`/groups/${groupId}/projects`, {
33
- params: { page, per_page: 100, include_subgroups: true }
34
- });
35
-
36
- projects.push(...response.data);
37
- hasMore = response.data.length === 100;
38
- page++;
35
+ const toVisit = [groupId];
36
+ const visited = new Set();
37
+
38
+ console.log(
39
+ `[DEBUG] Starting BFS project listing from group ${groupId} (maxProjects=${maxProjects}, maxRequests=${maxRequests})`
40
+ );
41
+
42
+ while (toVisit.length > 0) {
43
+ const currentGroupId = toVisit.shift();
44
+ if (visited.has(currentGroupId)) continue;
45
+ visited.add(currentGroupId);
46
+
47
+ console.log(`[DEBUG] Visiting group ${currentGroupId}. Remaining groups in queue: ${toVisit.length}`);
48
+
49
+ // List projects for THIS group (no include_subgroups!)
50
+ let projPage = 1;
51
+ let hasMoreProjects = true;
52
+
53
+ while (hasMoreProjects) {
54
+ if (requestCount >= maxRequests || projects.length >= maxProjects) {
55
+ console.warn(`[WARN] Stopping project traversal: requestCount=${requestCount}, projects=${projects.length}`);
56
+ return projects;
57
+ }
58
+
59
+ const projRes = await getWithRetry(
60
+ this.client,
61
+ `/groups/${currentGroupId}/projects`,
62
+ { page: projPage, per_page: 100 }
63
+ );
64
+
65
+ requestCount++;
66
+ const pageProjects = projRes.data || [];
67
+ if (pageProjects.length > 0) {
68
+ projects.push(...pageProjects);
69
+ }
70
+
71
+ hasMoreProjects = pageProjects.length === 100;
72
+ projPage++;
73
+ }
74
+
75
+ // List DIRECT subgroups and enqueue them
76
+ let subgroupPage = 1;
77
+ let hasMoreSubgroups = true;
78
+
79
+ while (hasMoreSubgroups) {
80
+ if (requestCount >= maxRequests) {
81
+ console.warn(
82
+ `[WARN] Stopping subgroup traversal: requestCount=${requestCount}`
83
+ );
84
+ return projects;
85
+ }
86
+
87
+ const subgroupRes = await getWithRetry(
88
+ this.client,
89
+ `/groups/${currentGroupId}/subgroups`,
90
+ { page: subgroupPage, per_page: 100 }
91
+ );
92
+
93
+ requestCount++;
94
+ const subgroups = subgroupRes.data || [];
95
+
96
+ if (subgroups.length > 0) {
97
+ for (const sg of subgroups) {
98
+ if (!visited.has(sg.id)) {
99
+ toVisit.push(sg.id);
100
+ }
101
+ }
102
+ }
103
+
104
+ hasMoreSubgroups = subgroups.length === 100;
105
+ subgroupPage++;
106
+ }
39
107
  }
40
-
108
+
109
+ console.log(`[DEBUG] Finished BFS project listing. Total projects=${projects.length}, total requests=${requestCount}`);
41
110
  return projects;
42
111
  }
43
-
112
+
44
113
  async getGroup(groupId) {
45
114
  const response = await this.client.get(`/groups/${groupId}`);
46
115
  return response.data;
@@ -116,6 +185,28 @@ class GitLabClient {
116
185
  throw new Error(`Bulk import API call failed: ${error.response?.status} ${error.response?.statusText} - ${JSON.stringify(error.response?.data)}`);
117
186
  }
118
187
  }
188
+
189
+ async getBulkImportEntitiesAll(importId, { perPage = 100, maxPages = 200 } = {}) {
190
+ const all = [];
191
+ let page = 1;
192
+
193
+ while (page <= maxPages) {
194
+ const resp = await getWithRetry(
195
+ this.client,
196
+ `/bulk_imports/${importId}/entities`,
197
+ { page, per_page: perPage }
198
+ );
199
+
200
+ all.push(...(resp.data || []));
201
+
202
+ const nextPage = Number(resp.headers?.['x-next-page'] || 0);
203
+ if (!nextPage) break;
204
+
205
+ page = nextPage;
206
+ }
207
+
208
+ return all;
209
+ }
119
210
  }
120
211
 
121
212
  async function promptUser(name) {
@@ -126,7 +217,7 @@ async function promptUser(name) {
126
217
 
127
218
  const answer = await rl.question(`Your new group name is ${name}. Are you sure? (Yes/No)`);
128
219
 
129
- rl.close();
220
+ rl.close();
130
221
 
131
222
  if (answer.toLowerCase() === 'yes' || answer.toLowerCase() === 'y') {
132
223
  console.log("Proceeding...");
@@ -144,6 +235,132 @@ function validateAndConvertRegion(region) {
144
235
  return `https://${region}.git.cloud.ibm.com/`;
145
236
  }
146
237
 
238
+ // Build a mapping of: old http_url_to_repo -> new http_url_to_repo
239
+ async function generateUrlMappingFile({sourceUrl, destUrl, sourceGroup, destinationGroupPath, sourceProjects}) {
240
+ const destBase = destUrl.endsWith('/') ? destUrl.slice(0, -1) : destUrl;
241
+ const urlMapping = {};
242
+
243
+ const groupPrefix = `${sourceGroup.full_path}/`;
244
+
245
+ for (const project of sourceProjects) {
246
+ const oldRepoUrl = project.http_url_to_repo; // ends with .git
247
+
248
+ // path_with_namespace is like "group/subgroup/project-1"
249
+ let relativePath;
250
+ if (project.path_with_namespace.startsWith(groupPrefix)) {
251
+ relativePath = project.path_with_namespace.slice(groupPrefix.length);
252
+ } else {
253
+ // Fallback if for some reason full_path is not a prefix
254
+ relativePath = project.path_with_namespace;
255
+ }
256
+
257
+ const newRepoUrl = `${destBase}/${destinationGroupPath}/${relativePath}.git`;
258
+ urlMapping[oldRepoUrl] = newRepoUrl;
259
+ }
260
+
261
+ const mappingFile = 'grit-url-map.json';
262
+
263
+ await writeFile(mappingFile, JSON.stringify(urlMapping, null, 2), {
264
+ encoding: 'utf8',
265
+ });
266
+
267
+ console.log(`\nURL mapping JSON generated at: ${mappingFile}`);
268
+ console.log(`Total mapped projects: ${sourceProjects.length}`);
269
+ }
270
+
271
+ function buildGroupImportHistoryUrl(destUrl) {
272
+ try {
273
+ return new URL('import/bulk_imports/history', destUrl).toString();
274
+ } catch {
275
+ return null;
276
+ }
277
+ }
278
+
279
+ function summarizeBulkImportProgress(entities = []) {
280
+ let entityTotal = 0;
281
+ let entityFinished = 0;
282
+ let entityFailed = 0;
283
+
284
+ let projectTotal = 0;
285
+ let projectFinished = 0;
286
+ let projectFailed = 0;
287
+
288
+ let lastCompleted = null;
289
+ let lastCompletedTs = 0;
290
+
291
+ for (const e of entities) {
292
+ entityTotal++;
293
+
294
+ const status = e.status;
295
+ const isFinished = status === 'finished';
296
+ const isFailed = status === 'failed';
297
+
298
+ if (isFinished) entityFinished++;
299
+ if (isFailed) entityFailed++;
300
+
301
+ const isProjectEntity =
302
+ e.source_type === 'project_entity' ||
303
+ e.entity_type === 'project_entity' ||
304
+ e.entity_type === 'project';
305
+
306
+ if (isProjectEntity) {
307
+ projectTotal++;
308
+ if (isFinished) projectFinished++;
309
+ if (isFailed) projectFailed++;
310
+ }
311
+
312
+ if (isFinished) {
313
+ const ts = new Date(e.updated_at || e.created_at || 0).getTime();
314
+ if (ts > lastCompletedTs) {
315
+ lastCompletedTs = ts;
316
+ lastCompleted = e;
317
+ }
318
+ }
319
+ }
320
+
321
+ const entityDone = entityFinished + entityFailed;
322
+ const entityPct = entityTotal ? Math.floor((entityDone / entityTotal) * 100) : 0;
323
+
324
+ const projectDone = projectFinished + projectFailed;
325
+ const projectPct = projectTotal ? Math.floor((projectDone / projectTotal) * 100) : 0;
326
+
327
+ const lastCompletedLabel = lastCompleted?.source_full_path || '';
328
+
329
+ return {
330
+ entityTotal,
331
+ entityDone,
332
+ entityFailed,
333
+ entityPct,
334
+ projectTotal,
335
+ projectDone,
336
+ projectFailed,
337
+ projectPct,
338
+ lastCompletedLabel,
339
+ };
340
+ }
341
+
342
+ function formatBulkImportProgressLine(importStatus, summary) {
343
+ if (!summary || summary.entityTotal === 0) {
344
+ return `Import status: ${importStatus} | Progress: initializing...`;
345
+ }
346
+
347
+ const parts = [`Import status: ${importStatus}`];
348
+
349
+ if (summary.projectTotal > 0) {
350
+ parts.push(`Projects: ${summary.projectDone}/${summary.projectTotal} (${summary.projectPct}%)`);
351
+ if (summary.projectFailed > 0) parts.push(`Project failed: ${summary.projectFailed}`);
352
+ }
353
+
354
+ parts.push(`Entities: ${summary.entityDone}/${summary.entityTotal} (${summary.entityPct}%)`);
355
+ if (summary.entityFailed > 0) parts.push(`Failed: ${summary.entityFailed}`);
356
+
357
+ if (summary.lastCompletedLabel) {
358
+ parts.push(`Last completed: ${summary.lastCompletedLabel}`);
359
+ }
360
+
361
+ return parts.join(' | ');
362
+ }
363
+
147
364
  async function directTransfer(options) {
148
365
  const sourceUrl = validateAndConvertRegion(options.sourceRegion);
149
366
  const destUrl = validateAndConvertRegion(options.destRegion);
@@ -168,6 +385,15 @@ async function directTransfer(options) {
168
385
  await promptUser(options.newName);
169
386
  }
170
387
 
388
+ // Generate URL mapping JSON before starting the migration
389
+ await generateUrlMappingFile({
390
+ sourceUrl,
391
+ destUrl,
392
+ sourceGroup,
393
+ destinationGroupPath,
394
+ sourceProjects,
395
+ });
396
+
171
397
  let bulkImport = null;
172
398
 
173
399
  const requestPayload = {
@@ -181,10 +407,10 @@ async function directTransfer(options) {
181
407
  destination_slug: destinationGroupPath,
182
408
  destination_namespace: ""
183
409
  }]
184
- }
410
+ };
185
411
 
186
412
  let importRes = null;
187
-
413
+
188
414
  try {
189
415
  importRes = await destination.bulkImport(requestPayload);
190
416
  if (importRes.success) {
@@ -192,28 +418,45 @@ async function directTransfer(options) {
192
418
  console.log(`Bulk import request succeeded!`);
193
419
  console.log(`Bulk import initiated successfully (ID: ${importRes.data?.id})`);
194
420
  } else if (importRes.conflict) {
195
- console.log(`Conflict detected: ${importRes.error}`);
196
- console.log(`Please specify a new group name using -n, --new-name <n> when trying again`);
197
- process.exit(0);
421
+ console.log(`Conflict detected: ${importRes.error}`);
422
+ console.log(`Please specify a new group name using -n, --new-name <n> when trying again`);
423
+ process.exit(0);
198
424
  }
199
425
  } catch (error) {
200
426
  console.log(`Bulk import request failed - ${error.message}`);
201
427
  process.exit(0);
202
428
  }
203
429
 
204
- console.log('\nPolling bulk import status (checking every 5 minute)...');
430
+ console.log('\nPolling bulk import status (adaptive: 1m→2m→3m→4m→5m, max 60 checks)...');
431
+ const MAX_ATTEMPTS = 60;
432
+ const POLLS_PER_STEP = 5;
433
+ const MIN_INTERVAL_MIN = 1;
434
+ const MAX_INTERVAL_MIN = 5;
435
+
205
436
  let importStatus = 'created';
206
437
  let attempts = 0;
207
-
208
- while (!['finished', 'failed', 'timeout'].includes(importStatus) && attempts < 60) {
438
+
439
+ while (!['finished', 'failed', 'timeout'].includes(importStatus) && attempts < MAX_ATTEMPTS) {
209
440
  if (attempts > 0) {
210
- console.log(`Waiting 5 minute before next status check...`);
211
- await new Promise(resolve => setTimeout(resolve, 5*60000));
441
+ const step = Math.floor(attempts / POLLS_PER_STEP);
442
+ const waitMin = Math.min(MIN_INTERVAL_MIN + step, MAX_INTERVAL_MIN);
443
+
444
+ console.log(`Waiting ${waitMin} minute before next status check...`);
445
+ await new Promise(resolve => setTimeout(resolve, waitMin * 60000));
212
446
  }
213
447
  try {
214
448
  const importDetails = await destination.getBulkImport(bulkImport.id);
215
449
  importStatus = importDetails.status;
216
- console.log(`[${new Date().toLocaleTimeString()}] Import status: ${importStatus}`);
450
+ let progressLine;
451
+ try {
452
+ const entitiesAll = await destination.getBulkImportEntitiesAll(bulkImport.id);
453
+ const summary = summarizeBulkImportProgress(entitiesAll);
454
+ progressLine = formatBulkImportProgressLine(importStatus, summary);
455
+ } catch {
456
+ progressLine = `Import status: ${importStatus} | Progress: (unable to fetch entity details)`;
457
+ }
458
+
459
+ console.log(`[${new Date().toLocaleTimeString()}] ${progressLine}`);
217
460
 
218
461
  if (importStatus === 'finished') {
219
462
  console.log('Bulk import completed successfully!');
@@ -230,29 +473,40 @@ async function directTransfer(options) {
230
473
  }
231
474
  attempts++;
232
475
  }
233
-
234
- if (attempts >= 60) {
235
- console.error(`Bulk import either timed out or is still running in the background`);
476
+
477
+ if (attempts >= MAX_ATTEMPTS) {
478
+ const historyUrl = buildGroupImportHistoryUrl(destUrl);
479
+
480
+ console.error('\nThe CLI has stopped polling for the GitLab bulk import.');
481
+ console.error('The migration itself may still be running inside GitLab — the CLI only waits for a limited time.');
482
+ console.error(`Last reported status for bulk import ${bulkImport.id}: ${importStatus}`);
483
+
484
+ if (historyUrl) {
485
+ console.error('\nYou can continue monitoring this migration in the GitLab UI.');
486
+ console.error(`Group import history: ${historyUrl}`);
487
+ } else {
488
+ console.error('\nYou can continue monitoring this migration from the Group import history page in the GitLab UI.');
489
+ }
236
490
  process.exit(0);
237
491
  }
238
492
 
239
493
  const entities = await destination.getBulkImportEntities(bulkImport.id);
240
494
  const finishedEntities = entities.filter(e => e.status === 'finished');
241
495
  const failedEntities = entities.filter(e => e.status === 'failed');
242
-
496
+
243
497
  if (importStatus === 'finished' && finishedEntities.length > 0) {
244
498
  console.log(`\nGroup migration completed successfully!`);
245
499
  console.log(`Migration Results:`);
246
500
  console.log(`Successfully migrated: ${finishedEntities.length} entities`);
247
501
  console.log(`Failed: ${failedEntities.length} entities`);
248
-
502
+
249
503
  if (failedEntities.length > 0) {
250
504
  console.log(`\nFailed entities:\n`);
251
505
  failedEntities.forEach(e => {
252
506
  console.log(`${e.source_type}: ${e.source_full_path} (${e.status})`);
253
507
  });
254
508
  }
255
-
509
+
256
510
  return 0;
257
511
  } else {
258
512
  console.error('\nBulk import failed!');
@@ -282,7 +536,7 @@ const command = new Command('copy-project-group')
282
536
  .showHelpAfterError()
283
537
  .hook('preAction', cmd => cmd.showHelpAfterError(false)) // only show help during validation
284
538
  .action(async (options) => {
285
- await directTransfer(options);
539
+ await directTransfer(options);
286
540
  });
287
541
 
288
542
  export default command;
@@ -477,6 +477,28 @@ async function migrateToolchainSecrets(bearer, data, region) {
477
477
  }
478
478
  }
479
479
 
480
+ // GET with retry for flaky 5xx/520 errors (Cloudflare / origin issues)
481
+ async function getWithRetry(client, path, params = {}, { retries = 3, retryDelayMs = 2000 } = {}) {
482
+ let lastError;
483
+ for (let attempt = 1; attempt <= retries; attempt++) {
484
+ try {
485
+ return await client.get(path, { params });
486
+ } catch (error) {
487
+ const status = error.response?.status;
488
+ if (attempt < retries && status && status >= 500) {
489
+ console.warn(
490
+ `[WARN] GET ${path} failed with status ${status} (attempt ${attempt}/${retries}). Retrying...`
491
+ );
492
+ await new Promise(resolve => setTimeout(resolve, retryDelayMs * attempt));
493
+ lastError = error;
494
+ continue;
495
+ }
496
+ throw error; // Non-5xx or out of retries: rethrow
497
+ }
498
+ }
499
+ throw lastError;
500
+ }
501
+
480
502
  export {
481
503
  getBearerToken,
482
504
  getAccountId,
@@ -495,5 +517,6 @@ export {
495
517
  deleteToolchain,
496
518
  createTool,
497
519
  getSmInstances,
498
- migrateToolchainSecrets
520
+ migrateToolchainSecrets,
521
+ getWithRetry
499
522
  }
@@ -205,7 +205,7 @@ async function setupTerraformFiles({ token, srcRegion, targetRegion, targetTag,
205
205
  logger.print('Please enter the new URLs for the following GRIT tool(s) (or submit empty input to skip):\n');
206
206
  }
207
207
 
208
- const newRepoSlug = await promptUserInput(`Old URL: ${thisUrl.slice(0, thisUrl.length - 4)}\nNew URL: ${GIT_BASE_URL || 'https://' + targetRegion + '.git.cloud.ibm.com'}`, '', validateGritUrlPrompt);
208
+ const newRepoSlug = await promptUserInput(`Old URL: ${thisUrl.slice(0, thisUrl.length - 4)}\nNew URL: ${GIT_BASE_URL || 'https://' + targetRegion + '.git.cloud.ibm.com'}/`, '', validateGritUrlPrompt);
209
209
 
210
210
  if (newRepoSlug) {
211
211
  newUrl = (GIT_BASE_URL || `https://${targetRegion}.git.cloud.ibm.com`) + `/${newRepoSlug}.git`;
@@ -405,14 +405,17 @@ async function validateOAuth(token, tools, targetRegion, skipPrompt) {
405
405
 
406
406
  async function validateGritUrl(token, region, url, validateFull) {
407
407
  if (typeof url != 'string') throw Error('Provided GRIT url is not a string');
408
- let trimmed;
408
+ let trimmed = url.trim();
409
409
 
410
410
  if (validateFull) {
411
411
  const baseUrl = (GIT_BASE_URL || `https://${region}.git.cloud.ibm.com`) + '/';
412
- if (!url.startsWith(baseUrl) || !url.endsWith('.git')) throw Error('Provided full GRIT url is not valid');
413
- trimmed = url.slice(baseUrl.length, url.length - '.git'.length);
414
- } else {
415
- trimmed = url.trim();
412
+ if (!trimmed.startsWith(baseUrl)) throw Error('Provided full GRIT url is not valid');
413
+
414
+ if (trimmed.endsWith('.git')) {
415
+ trimmed = trimmed.slice(baseUrl.length, trimmed.length - '.git'.length);
416
+ } else {
417
+ trimmed = trimmed.slice(baseUrl.length);
418
+ }
416
419
  }
417
420
 
418
421
  // split into two parts, user/group/subgroup and project
@@ -468,7 +471,7 @@ async function validateGritUrl(token, region, url, validateFull) {
468
471
  await getGritGroupProject(accessToken, region, urlStart, projectName);
469
472
  return trimmed;
470
473
  } catch {
471
- throw Error('Provided GRIT url not found');
474
+ throw Error(`Provided GRIT url not found: ${url}`);
472
475
  }
473
476
  }
474
477
 
package/config.js CHANGED
@@ -11,9 +11,9 @@ const COPY_TOOLCHAIN_DESC = `Copies a toolchain, including tool integrations and
11
11
 
12
12
  Examples:
13
13
  export IBMCLOUD_API_KEY='...'
14
- npx @ibm-cloud/cd-migration-tools copy-toolchain -c \${TOOLCHAIN_CRN} -r us-south
14
+ npx @ibm-cloud/cd-tools copy-toolchain -c \${TOOLCHAIN_CRN} -r us-south
15
15
  Copy a toolchain to the Dallas region with the same name, in the same resource group.
16
- npx @ibm-cloud/cd-migration-tools copy-toolchain -c \${TOOLCHAIN_CRN} -r eu-de -n new-toolchain-name -g new-resource-group --apikey \${APIKEY}
16
+ npx @ibm-cloud/cd-tools copy-toolchain -c \${TOOLCHAIN_CRN} -r eu-de -n new-toolchain-name -g new-resource-group --apikey \${APIKEY}
17
17
  Copy a toolchain to the Frankfurt region with the specified name and target resource group, using the given API key
18
18
 
19
19
  Environment Variables:
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@ibm-cloud/cd-tools",
3
- "version": "1.6.1",
3
+ "version": "1.8.1",
4
4
  "description": "Tools and utilities for the IBM Cloud Continuous Delivery service and resources",
5
5
  "repository": {
6
6
  "type": "git",