@ibm-cloud/cd-tools 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,288 @@
1
+ /**
2
+ * Licensed Materials - Property of IBM
3
+ * (c) Copyright IBM Corporation 2025. All Rights Reserved.
4
+ *
5
+ * Note to U.S. Government Users Restricted Rights:
6
+ * Use, duplication or disclosure restricted by GSA ADP Schedule
7
+ * Contract with IBM Corp.
8
+ */
9
+
10
+ import { Command, Option } from 'commander';
11
+ import axios from 'axios';
12
+ import readline from 'readline/promises';
13
+ import { TARGET_REGIONS, SOURCE_REGIONS } from '../config.js';
14
+
15
+ class GitLabClient {
16
+ constructor(baseURL, token) {
17
+ this.client = axios.create({
18
+ baseURL: baseURL.endsWith('/') ? `${baseURL}api/v4` : `${baseURL}/api/v4`,
19
+ headers: {
20
+ 'Authorization': `Bearer ${token}`,
21
+ 'Content-Type': 'application/json'
22
+ }
23
+ });
24
+ }
25
+
26
+ async getGroupProjects(groupId) {
27
+ const projects = [];
28
+ let page = 1;
29
+ let hasMore = true;
30
+
31
+ while (hasMore) {
32
+ const response = await this.client.get(`/groups/${groupId}/projects`, {
33
+ params: { page, per_page: 100, include_subgroups: true }
34
+ });
35
+
36
+ projects.push(...response.data);
37
+ hasMore = response.data.length === 100;
38
+ page++;
39
+ }
40
+
41
+ return projects;
42
+ }
43
+
44
+ async getGroup(groupId) {
45
+ const response = await this.client.get(`/groups/${groupId}`);
46
+ return response.data;
47
+ }
48
+
49
+ async createBulkImport(importData) {
50
+ const response = await this.client.post('/bulk_imports', importData);
51
+ return response.data;
52
+ }
53
+
54
+ async getBulkImport(importId) {
55
+ const response = await this.client.get(`/bulk_imports/${importId}`);
56
+ return response.data;
57
+ }
58
+
59
+ async getBulkImportEntities(importId) {
60
+ const response = await this.client.get(`/bulk_imports/${importId}/entities`);
61
+ return response.data;
62
+ }
63
+
64
+ async getBulkImportEntity(importId, entityId) {
65
+ const response = await this.client.get(`/bulk_imports/${importId}/entities/${entityId}`);
66
+ return response.data;
67
+ }
68
+
69
+ async getCustomAttributes(projectId) {
70
+ try {
71
+ const response = await this.client.get(`/projects/${projectId}/custom_attributes`);
72
+ return response.data;
73
+ } catch (error) {
74
+ if (error.response?.status === 404) {
75
+ return []; // No custom attributes
76
+ }
77
+ throw error;
78
+ }
79
+ }
80
+
81
+ async deleteCustomAttribute(projectId, key) {
82
+ try {
83
+ await this.client.delete(`/projects/${projectId}/custom_attributes/${key}`);
84
+ return true;
85
+ } catch (error) {
86
+ if (error.response?.status === 404) {
87
+ return false; // custom attribute doesn't exist
88
+ }
89
+ throw error;
90
+ }
91
+ }
92
+
93
+ async deleteAllCustomAttributes(projectId) {
94
+ const attributes = await this.getCustomAttributes(projectId);
95
+ const results = [];
96
+ for (const attr of attributes) {
97
+ try {
98
+ await this.deleteCustomAttribute(projectId, attr.key);
99
+ results.push({ key: attr.key, deleted: true });
100
+ } catch (error) {
101
+ results.push({ key: attr.key, deleted: false, error: error.message });
102
+ }
103
+ }
104
+ return results;
105
+ }
106
+
107
+ async bulkImport(importData) {
108
+ try {
109
+ const response = await this.client.post('/bulk_imports', importData);
110
+ return { success: true, data: response.data };
111
+ } catch (error) {
112
+ // name/path already exists
113
+ if (error.response?.status === 409 || error.response?.data?.message?.includes("already exists")) {
114
+ return { success: false, conflict: true, error: error.response?.data?.message };
115
+ }
116
+ throw new Error(`Bulk import API call failed: ${error.response?.status} ${error.response?.statusText} - ${JSON.stringify(error.response?.data)}`);
117
+ }
118
+ }
119
+ }
120
+
121
+ async function promptUser(name) {
122
+ const rl = readline.createInterface({
123
+ input: process.stdin,
124
+ output: process.stdout,
125
+ });
126
+
127
+ const answer = await rl.question(`Your new group name is ${name}. Are you sure? (Yes/No)`);
128
+
129
+ rl.close();
130
+
131
+ if (answer.toLowerCase() === 'yes' || answer.toLowerCase() === 'y') {
132
+ console.log("Proceeding...");
133
+ } else {
134
+ process.exit(0);
135
+ }
136
+ }
137
+
138
+ function validateAndConvertRegion(region) {
139
+ if (!SOURCE_REGIONS.includes(region)) {
140
+ throw new Error(
141
+ `Invalid region: ${region}. Must be one of: ${SOURCE_REGIONS.join(', ')}`
142
+ );
143
+ }
144
+ return `https://${region}.git.cloud.ibm.com/`;
145
+ }
146
+
147
+ async function directTransfer(options) {
148
+ const sourceUrl = validateAndConvertRegion(options.sourceRegion);
149
+ const destUrl = validateAndConvertRegion(options.destRegion);
150
+ const source = new GitLabClient(sourceUrl, options.sourceToken);
151
+ const destination = new GitLabClient(destUrl, options.destToken);
152
+
153
+ try {
154
+ console.log(`Fetching source group from ID: ${options.groupId}...`);
155
+ const sourceGroup = await source.getGroup(options.groupId);
156
+
157
+ let destinationGroupName = options.newName || sourceGroup.name;
158
+ let destinationGroupPath = options.newName || sourceGroup.path;
159
+
160
+ const sourceProjects = await source.getGroupProjects(sourceGroup.id);
161
+ console.log(`Found ${sourceProjects.length} projects in source group`);
162
+ if (sourceProjects.length > 0) {
163
+ console.log('Projects to be migrated:');
164
+ sourceProjects.forEach(p => console.log(`${p.name_with_namespace}`));
165
+ }
166
+
167
+ if (options.newName) {
168
+ await promptUser(options.newName);
169
+ }
170
+
171
+ let bulkImport = null;
172
+
173
+ const requestPayload = {
174
+ configuration: {
175
+ url: sourceUrl,
176
+ access_token: options.sourceToken
177
+ },
178
+ entities: [{
179
+ source_full_path: sourceGroup.full_path,
180
+ source_type: 'group_entity',
181
+ destination_slug: destinationGroupPath,
182
+ destination_namespace: ""
183
+ }]
184
+ }
185
+
186
+ let importRes = null;
187
+
188
+ try {
189
+ importRes = await destination.bulkImport(requestPayload);
190
+ if (importRes.success) {
191
+ bulkImport = importRes.data;
192
+ console.log(`Bulk import request succeeded!`);
193
+ console.log(`Bulk import initiated successfully (ID: ${importRes.data?.id})`);
194
+ } else if (importRes.conflict) {
195
+ console.log(`Conflict detected: ${importRes.error}`);
196
+ console.log(`Please specify a new group name using -n, --new-name <n> when trying again`);
197
+ process.exit(0);
198
+ }
199
+ } catch (error) {
200
+ console.log(`Bulk import request failed - ${error.message}`);
201
+ process.exit(0);
202
+ }
203
+
204
+ console.log('\nPolling bulk import status (checking every 5 minute)...');
205
+ let importStatus = 'created';
206
+ let attempts = 0;
207
+
208
+ while (!['finished', 'failed', 'timeout'].includes(importStatus) && attempts < 60) {
209
+ if (attempts > 0) {
210
+ console.log(`Waiting 5 minute before next status check...`);
211
+ await new Promise(resolve => setTimeout(resolve, 5*60000));
212
+ }
213
+ try {
214
+ const importDetails = await destination.getBulkImport(bulkImport.id);
215
+ importStatus = importDetails.status;
216
+ console.log(`[${new Date().toLocaleTimeString()}] Import status: ${importStatus}`);
217
+
218
+ if (importStatus === 'finished') {
219
+ console.log('Bulk import completed successfully!');
220
+ break;
221
+ } else if (importStatus === 'failed') {
222
+ console.log('Bulk import failed!');
223
+ break;
224
+ }
225
+ } catch (e) {
226
+ console.error(`Error checking import status: ${e.message}`);
227
+ if (e.response?.status === 404) {
228
+ throw new Error('Bulk import not found - it may have been deleted');
229
+ }
230
+ }
231
+ attempts++;
232
+ }
233
+
234
+ if (attempts >= 60) {
235
+ console.error(`Bulk import either timed out or is still running in the background`);
236
+ process.exit(0);
237
+ }
238
+
239
+ const entities = await destination.getBulkImportEntities(bulkImport.id);
240
+ const finishedEntities = entities.filter(e => e.status === 'finished');
241
+ const failedEntities = entities.filter(e => e.status === 'failed');
242
+
243
+ if (importStatus === 'finished' && finishedEntities.length > 0) {
244
+ console.log(`\nGroup migration completed successfully!`);
245
+ console.log(`Migration Results:`);
246
+ console.log(`Successfully migrated: ${finishedEntities.length} entities`);
247
+ console.log(`Failed: ${failedEntities.length} entities`);
248
+
249
+ if (failedEntities.length > 0) {
250
+ console.log(`\nFailed entities:\n`);
251
+ failedEntities.forEach(e => {
252
+ console.log(`${e.source_type}: ${e.source_full_path} (${e.status})`);
253
+ });
254
+ }
255
+
256
+ return 0;
257
+ } else {
258
+ console.error('\nBulk import failed!');
259
+ if (failedEntities.length > 0) {
260
+ console.error('Failed entities:');
261
+ failedEntities.forEach(e => {
262
+ console.error(`${e.source_type}: ${e.source_full_path} (${e.status})`);
263
+ });
264
+ }
265
+ throw new Error('GitLab bulk import failed');
266
+ }
267
+
268
+ } catch (error) {
269
+ console.error(`Group migration failed: ${error.message}`);
270
+ throw error;
271
+ }
272
+ }
273
+
274
+ const command = new Command('copy-project-group')
275
+ .description('Bulk migrate GitLab group projects')
276
+ .requiredOption('-s, --source-region <region>', 'Source GitLab instance region')
277
+ .requiredOption('-d, --dest-region <region>', 'Destination GitLab instance region')
278
+ .requiredOption('--st, --source-token <token>', 'Source GitLab access token')
279
+ .requiredOption('--dt, --dest-token <token>', 'Destination GitLab access token')
280
+ .requiredOption('-g, --group-id <id>', 'Source group ID to migrate')
281
+ .option('-n, --new-name <n>', 'New group path (optional)')
282
+ .showHelpAfterError()
283
+ .hook('preAction', cmd => cmd.showHelpAfterError(false)) // only show help during validation
284
+ .action(async (options) => {
285
+ await directTransfer(options);
286
+ });
287
+
288
+ export default command;
package/cmd/index.js ADDED
@@ -0,0 +1,13 @@
1
+ /**
2
+ * Licensed Materials - Property of IBM
3
+ * (c) Copyright IBM Corporation 2025. All Rights Reserved.
4
+ *
5
+ * Note to U.S. Government Users Restricted Rights:
6
+ * Use, duplication or disclosure restricted by GSA ADP Schedule
7
+ * Contract with IBM Corp.
8
+ */
9
+
10
+ import checkSecrets from './check-secrets.js';
11
+ import copyToolchain from './copy-toolchain.js';
12
+ import directTransfer from './direct-transfer.js';
13
+ export { checkSecrets, copyToolchain, directTransfer };
@@ -0,0 +1,173 @@
1
+ /**
2
+ * Licensed Materials - Property of IBM
3
+ * (c) Copyright IBM Corporation 2025. All Rights Reserved.
4
+ *
5
+ * Note to U.S. Government Users Restricted Rights:
6
+ * Use, duplication or disclosure restricted by GSA ADP Schedule
7
+ * Contract with IBM Corp.
8
+ */
9
+
10
+ import ora from 'ora';
11
+ import path from 'path';
12
+ import Table from 'cli-table3';
13
+ import stripAnsi from 'strip-ansi';
14
+
15
+ import fs from 'node:fs';
16
+
17
+ const COLORS = {
18
+ reset: '\x1b[0m',
19
+ gray: '\x1b[90m',
20
+ green: '\x1b[32m',
21
+ yellow: '\x1b[33m',
22
+ red: '\x1b[31m',
23
+ blue: '\x1b[34m',
24
+ white: '\x1b[37m'
25
+ };
26
+
27
+ const LEVELS = {
28
+ log: { color: COLORS.gray, method: 'log' },
29
+ info: { color: COLORS.white, method: 'info' },
30
+ success: { color: COLORS.green, method: 'info' },
31
+ warn: { color: COLORS.yellow, method: 'warn' },
32
+ error: { color: COLORS.red, method: 'error' },
33
+ debug: { color: COLORS.blue, method: 'debug' }
34
+ };
35
+
36
+ class Logger {
37
+ constructor() {
38
+ this.spinner = null;
39
+ this.verbosity = 1;
40
+ }
41
+
42
+ setVerbosity(level) {
43
+ this.verbosity = level;
44
+ }
45
+
46
+ createLogStream(logPath) {
47
+ const logsDir = path.dirname(logPath);
48
+ if (!fs.existsSync(logsDir)) fs.mkdirSync(logsDir, { recursive: true });
49
+ this.logStream = fs.createWriteStream(logPath, { flags: 'a' });
50
+ }
51
+
52
+ #getFullPrefix(prefix) {
53
+ if (!prefix) return '';
54
+ const timestamp = new Date().toLocaleTimeString();
55
+ const upperPrefix = prefix.toUpperCase();
56
+ return `${COLORS.gray}${timestamp} [${upperPrefix}]${COLORS.reset}`
57
+ }
58
+
59
+ #baseLog(type, msg, prefix) {
60
+ const level = LEVELS[type] || LEVELS.log;
61
+ const formatted = this.#getFullPrefix(prefix) + ' ' + `${level.color}${msg}${COLORS.reset}`;
62
+ console[level.method](formatted);
63
+ this.logStream?.write(stripAnsi(this.#getFullPrefix(prefix) + ` [${type.toUpperCase()}] ` + msg) + '\n');
64
+ }
65
+
66
+ info(msg, prefix = '', force = false) { if (this.verbosity >= 1 || force) this.#baseLog('info', msg, prefix); }
67
+ success(msg, prefix = '') { this.#baseLog('success', msg, prefix); }
68
+ warn(msg, prefix = '', force = false) { if (this.verbosity >= 1 || force) this.#baseLog('warn', msg, prefix); }
69
+ error(msg, prefix = '') { this.#baseLog('error', msg, prefix); }
70
+
71
+ // Only writes to console and log file in verbose mode or force === true
72
+ log(msg, prefix = '', force = false) { if (this.verbosity >= 2 || force) this.#baseLog('log', msg, prefix); }
73
+ debug(msg, prefix = '', force = false) { if (this.verbosity >= 2 || force) this.#baseLog('debug', msg, prefix); }
74
+
75
+ print(...msg) {
76
+ const message = msg.join(' ');
77
+ console.log(message);
78
+ this.logStream?.write(stripAnsi(message));
79
+ }
80
+
81
+ dump(msg) {
82
+ this.logStream?.write(stripAnsi(msg));
83
+ }
84
+
85
+ close() {
86
+ return new Promise((resolve, reject) => {
87
+ this.logStream?.on('finish', resolve);
88
+ this.logStream?.on('error', reject);
89
+ this.logStream?.end();
90
+ });
91
+ }
92
+
93
+ startSpinner(msg, prefix = '') {
94
+ if (this.verbosity < 1) return;
95
+ this.spinner = ora({
96
+ prefixText: this.#getFullPrefix(prefix),
97
+ text: msg
98
+ }).start();
99
+ }
100
+ updateSpinnerMsg(msg) { if (this.verbosity >= 1 && this.spinner) this.spinner.text = msg; }
101
+ succeedSpinner(msg) { if (this.verbosity >= 1) this.spinner?.succeed(msg); }
102
+ failSpinner(msg) { if (this.verbosity >= 1) this.spinner?.fail(msg); }
103
+ resetSpinner() { if (this.verbosity >= 1) this.spinner = null; }
104
+
105
+ async withSpinner(asyncFn, loadingMsg, successMsg, prefix, ...args) {
106
+ if (this.verbosity < 1) {
107
+ try {
108
+ return await asyncFn(...args);
109
+ }
110
+ catch (err) {
111
+ throw (err);
112
+ }
113
+ }
114
+
115
+ this.spinner = ora({
116
+ prefixText: this.#getFullPrefix(prefix),
117
+ text: loadingMsg
118
+ });
119
+ this.spinner.start();
120
+ let res;
121
+ try {
122
+ res = await asyncFn(...args);
123
+ }
124
+ catch (err) {
125
+ this.spinner?.clear(); // allows the outer try-catch block to handle error and log it out, avoiding duplicate error messages
126
+ throw (err);
127
+ }
128
+ this.spinner?.succeed(successMsg);
129
+ return res;
130
+ }
131
+
132
+ table(data, rowSpanField = 'url') {
133
+ if (!Array.isArray(data) || data.length < 1) return;
134
+ const tableData = structuredClone(data);
135
+ const headers = Object.keys(tableData[0]).filter(key => key !== rowSpanField);
136
+ const t = new Table({
137
+ head: headers,
138
+ style: { head: ['cyan'] }
139
+ });
140
+ for (const row of tableData) {
141
+ const tableRow = [];
142
+ let rowSpanFieldVal = '';
143
+ if (rowSpanField in row) {
144
+ const rowKey = Object.keys(row)[0];
145
+ tableRow.push({ content: row[rowKey], rowSpan: 2 });
146
+ rowSpanFieldVal = row[rowSpanField];
147
+ delete row[rowSpanField];
148
+ delete row[rowKey];
149
+ }
150
+ tableRow.push(
151
+ ...Object.values(row).map(val => {
152
+ if (Array.isArray(val))
153
+ return val.map((item, idx) => `${idx + 1}: ${item}`).join('\n');
154
+ else if (typeof val === 'string')
155
+ return val;
156
+ return JSON.stringify(val);
157
+ })
158
+ );
159
+ t.push(tableRow);
160
+ if (rowSpanFieldVal !== '') t.push([{ content: rowSpanFieldVal, colSpan: headers.length - 1 }]);
161
+ }
162
+ this.print(t.toString(), '\n');
163
+ }
164
+ }
165
+
166
+ export const logger = new Logger();
167
+
168
+ export const LOG_STAGES = {
169
+ setup: 'setup',
170
+ terraformer: 'terraformer',
171
+ tf: 'terraform',
172
+ info: 'info'
173
+ };