dependency-change-report 1.0.3 โ 1.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/generate-html.mjs +1 -1
- package/lib/index.mjs +409 -292
- package/package.json +5 -2
package/lib/generate-html.mjs
CHANGED
package/lib/index.mjs
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
|
|
3
|
-
import { spawn } from 'child_process';
|
|
4
3
|
import { mkdir, writeFile, readFile, rm } from 'fs/promises';
|
|
5
4
|
import { join, basename } from 'path';
|
|
6
5
|
import { fileURLToPath } from 'url';
|
|
@@ -8,11 +7,103 @@ import { dirname } from 'path';
|
|
|
8
7
|
import semver from 'semver';
|
|
9
8
|
import os from 'os';
|
|
10
9
|
import https from 'https';
|
|
10
|
+
import { execa } from 'execa';
|
|
11
|
+
import cliProgress from 'cli-progress';
|
|
12
|
+
import PQueue from 'p-queue';
|
|
13
|
+
|
|
14
|
+
const time_10min = 10 * 60 * 1000; // 10 minutes in milliseconds
|
|
15
|
+
const time_5min = 5 * 60 * 1000; // 5 minutes in milliseconds
|
|
16
|
+
const time_2min = 2 * 60 * 1000; // 2 minutes in milliseconds
|
|
17
|
+
const time_1min = 60 * 1000; // 1 minute in milliseconds
|
|
18
|
+
|
|
19
|
+
// Global cleanup state
|
|
20
|
+
let globalCleanupState = {
|
|
21
|
+
multibar: null,
|
|
22
|
+
tempDirs: new Set(),
|
|
23
|
+
isShuttingDown: false
|
|
24
|
+
};
|
|
11
25
|
|
|
12
26
|
// Get the current directory
|
|
13
27
|
const __filename = fileURLToPath(import.meta.url);
|
|
14
28
|
const __dirname = dirname(__filename);
|
|
15
29
|
|
|
30
|
+
/**
|
|
31
|
+
* Setup signal handlers for graceful shutdown
|
|
32
|
+
*/
|
|
33
|
+
const setupSignalHandlers = () => {
|
|
34
|
+
const cleanup = async (signal) => {
|
|
35
|
+
if (globalCleanupState.isShuttingDown) {
|
|
36
|
+
return;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
globalCleanupState.isShuttingDown = true;
|
|
40
|
+
console.log(`\n\n๐ Received ${signal}, cleaning up...`);
|
|
41
|
+
|
|
42
|
+
// Stop progress bars and restore cursor
|
|
43
|
+
if (globalCleanupState.multibar) {
|
|
44
|
+
try {
|
|
45
|
+
globalCleanupState.multibar.stop();
|
|
46
|
+
} catch (error) {
|
|
47
|
+
// Ignore errors during cleanup
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// Restore cursor and clear any progress bar artifacts
|
|
52
|
+
process.stdout.write('\x1b[?25h'); // Show cursor
|
|
53
|
+
process.stdout.write('\x1b[0m'); // Reset colors
|
|
54
|
+
|
|
55
|
+
// Clean up temporary directories
|
|
56
|
+
const cleanupPromises = Array.from(globalCleanupState.tempDirs).map(async (dir) => {
|
|
57
|
+
try {
|
|
58
|
+
await rm(dir, { recursive: true, force: true });
|
|
59
|
+
console.log(`๐๏ธ Cleaned up: ${dir}`);
|
|
60
|
+
} catch (error) {
|
|
61
|
+
console.warn(`โ ๏ธ Failed to clean up ${dir}: ${error.message}`);
|
|
62
|
+
}
|
|
63
|
+
});
|
|
64
|
+
|
|
65
|
+
if (cleanupPromises.length > 0) {
|
|
66
|
+
console.log(`๐งน Cleaning up ${cleanupPromises.length} temporary directories...`);
|
|
67
|
+
await Promise.all(cleanupPromises);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
console.log('โ
Cleanup complete');
|
|
71
|
+
process.exit(signal === 'SIGTERM' ? 0 : 1);
|
|
72
|
+
};
|
|
73
|
+
|
|
74
|
+
// Handle various termination signals
|
|
75
|
+
process.on('SIGINT', () => cleanup('SIGINT')); // Ctrl+C
|
|
76
|
+
process.on('SIGTERM', () => cleanup('SIGTERM')); // Termination request
|
|
77
|
+
process.on('SIGHUP', () => cleanup('SIGHUP')); // Terminal closed
|
|
78
|
+
|
|
79
|
+
// Handle uncaught exceptions and unhandled rejections
|
|
80
|
+
process.on('uncaughtException', async (error) => {
|
|
81
|
+
console.error('\n๐ฅ Uncaught Exception:', error);
|
|
82
|
+
await cleanup('uncaughtException');
|
|
83
|
+
});
|
|
84
|
+
|
|
85
|
+
process.on('unhandledRejection', async (reason, promise) => {
|
|
86
|
+
console.error('\n๐ฅ Unhandled Rejection at:', promise, 'reason:', reason);
|
|
87
|
+
await cleanup('unhandledRejection');
|
|
88
|
+
});
|
|
89
|
+
};
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* Register a temporary directory for cleanup
|
|
93
|
+
* @param {string} dir - Directory path to register for cleanup
|
|
94
|
+
*/
|
|
95
|
+
const registerTempDir = (dir) => {
|
|
96
|
+
globalCleanupState.tempDirs.add(dir);
|
|
97
|
+
};
|
|
98
|
+
|
|
99
|
+
/**
|
|
100
|
+
* Unregister a temporary directory from cleanup (when manually cleaned)
|
|
101
|
+
* @param {string} dir - Directory path to unregister
|
|
102
|
+
*/
|
|
103
|
+
const unregisterTempDir = (dir) => {
|
|
104
|
+
globalCleanupState.tempDirs.delete(dir);
|
|
105
|
+
};
|
|
106
|
+
|
|
16
107
|
/**
|
|
17
108
|
* Execute a command and return its output
|
|
18
109
|
* @param {string} command - The command to execute
|
|
@@ -21,109 +112,29 @@ const __dirname = dirname(__filename);
|
|
|
21
112
|
* @param {number} timeout - Timeout in milliseconds (default: 5 minutes)
|
|
22
113
|
* @returns {Promise<string>} - Command output
|
|
23
114
|
*/
|
|
24
|
-
const executeCommand = (command, args, cwd, timeout =
|
|
25
|
-
|
|
26
|
-
const
|
|
115
|
+
const executeCommand = async (command, args, cwd, timeout = time_5min) => {
|
|
116
|
+
try {
|
|
117
|
+
const result = await execa(command, args, {
|
|
27
118
|
cwd,
|
|
28
|
-
|
|
29
|
-
|
|
119
|
+
timeout,
|
|
120
|
+
cleanup: true,
|
|
121
|
+
killSignal: 'SIGTERM',
|
|
122
|
+
forceKillAfterTimeout: 5000, // Force kill after 5 seconds if SIGTERM doesn't work
|
|
123
|
+
stdio: 'pipe'
|
|
30
124
|
});
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
// Remove all listeners to prevent memory leaks and hanging
|
|
44
|
-
childProcess.removeAllListeners();
|
|
45
|
-
|
|
46
|
-
// Ensure streams are properly closed
|
|
47
|
-
if (childProcess.stdout) {
|
|
48
|
-
childProcess.stdout.removeAllListeners();
|
|
49
|
-
}
|
|
50
|
-
if (childProcess.stderr) {
|
|
51
|
-
childProcess.stderr.removeAllListeners();
|
|
52
|
-
}
|
|
53
|
-
};
|
|
54
|
-
|
|
55
|
-
// Kill process function
|
|
56
|
-
const killProcess = () => {
|
|
57
|
-
try {
|
|
58
|
-
// Just kill the process directly, don't try process group
|
|
59
|
-
childProcess.kill('SIGTERM');
|
|
60
|
-
|
|
61
|
-
// If SIGTERM doesn't work after a short delay, use SIGKILL
|
|
62
|
-
setTimeout(() => {
|
|
63
|
-
try {
|
|
64
|
-
if (!childProcess.killed) {
|
|
65
|
-
childProcess.kill('SIGKILL');
|
|
66
|
-
}
|
|
67
|
-
} catch (e) {
|
|
68
|
-
// Process might already be dead
|
|
69
|
-
}
|
|
70
|
-
}, 1000);
|
|
71
|
-
} catch (e) {
|
|
72
|
-
// Process might already be dead
|
|
73
|
-
}
|
|
74
|
-
};
|
|
75
|
-
|
|
76
|
-
// Set up timeout with proper cleanup
|
|
77
|
-
timeoutId = setTimeout(() => {
|
|
78
|
-
if (!isResolved) {
|
|
79
|
-
isResolved = true;
|
|
80
|
-
killProcess();
|
|
81
|
-
cleanup();
|
|
82
|
-
reject(new Error(`Command timed out after ${timeout}ms: ${command} ${args.join(' ')}`));
|
|
83
|
-
}
|
|
84
|
-
}, timeout);
|
|
85
|
-
|
|
86
|
-
// Handle stdout data
|
|
87
|
-
if (childProcess.stdout) {
|
|
88
|
-
childProcess.stdout.on('data', (data) => {
|
|
89
|
-
stdout += data.toString();
|
|
90
|
-
});
|
|
91
|
-
}
|
|
92
|
-
|
|
93
|
-
// Handle stderr data
|
|
94
|
-
if (childProcess.stderr) {
|
|
95
|
-
childProcess.stderr.on('data', (data) => {
|
|
96
|
-
stderr += data.toString();
|
|
97
|
-
});
|
|
125
|
+
|
|
126
|
+
return result.stdout;
|
|
127
|
+
} catch (error) {
|
|
128
|
+
if (error.timedOut) {
|
|
129
|
+
throw new Error(`Command timed out after ${timeout}ms: ${command} ${args.join(' ')}`);
|
|
130
|
+
} else if (error.killed) {
|
|
131
|
+
throw new Error(`Command was killed: ${command} ${args.join(' ')}`);
|
|
132
|
+
} else if (error.exitCode !== 0) {
|
|
133
|
+
throw new Error(`Command failed with code ${error.exitCode}: ${error.stderr}`);
|
|
134
|
+
} else {
|
|
135
|
+
throw error;
|
|
98
136
|
}
|
|
99
|
-
|
|
100
|
-
// Handle process close (preferred over exit)
|
|
101
|
-
childProcess.on('close', (code, signal) => {
|
|
102
|
-
if (!isResolved) {
|
|
103
|
-
isResolved = true;
|
|
104
|
-
cleanup();
|
|
105
|
-
|
|
106
|
-
if (signal) {
|
|
107
|
-
reject(new Error(`Command was killed with signal ${signal}: ${command} ${args.join(' ')}`));
|
|
108
|
-
} else if (code !== 0) {
|
|
109
|
-
console.warn(`Warning: Command ${command} ${args.join(' ')} failed with code ${code}`);
|
|
110
|
-
console.warn(`Error: ${stderr}`);
|
|
111
|
-
reject(new Error(`Command failed with code ${code}: ${stderr}`));
|
|
112
|
-
} else {
|
|
113
|
-
resolve(stdout);
|
|
114
|
-
}
|
|
115
|
-
}
|
|
116
|
-
});
|
|
117
|
-
|
|
118
|
-
// Handle process errors
|
|
119
|
-
childProcess.on('error', (error) => {
|
|
120
|
-
if (!isResolved) {
|
|
121
|
-
isResolved = true;
|
|
122
|
-
cleanup();
|
|
123
|
-
reject(error);
|
|
124
|
-
}
|
|
125
|
-
});
|
|
126
|
-
});
|
|
137
|
+
}
|
|
127
138
|
};
|
|
128
139
|
|
|
129
140
|
/**
|
|
@@ -135,13 +146,19 @@ const executeCommand = (command, args, cwd, timeout = 300000) => {
|
|
|
135
146
|
*/
|
|
136
147
|
const cloneRepo = async (repoUrl, ref, targetDir) => {
|
|
137
148
|
try {
|
|
138
|
-
|
|
149
|
+
// Use shallow clone with depth=1 and single-branch for faster cloning
|
|
139
150
|
// Use --quiet to avoid printing credentials in logs
|
|
140
|
-
|
|
141
|
-
await executeCommand('git', ['
|
|
151
|
+
// 2 minute timeout for very large repositories
|
|
152
|
+
await executeCommand('git', ['clone', '--quiet', '--depth=1', '--single-branch', '--branch', ref, repoUrl, targetDir], undefined, time_2min);
|
|
142
153
|
} catch (error) {
|
|
143
|
-
|
|
144
|
-
|
|
154
|
+
// If shallow clone with specific branch fails, try traditional approach
|
|
155
|
+
try {
|
|
156
|
+
// Full clone with 5 minute timeout for very large repos
|
|
157
|
+
await executeCommand('git', ['clone', '--quiet', repoUrl, targetDir], undefined, time_5min);
|
|
158
|
+
await executeCommand('git', ['checkout', ref], targetDir, time_1min);
|
|
159
|
+
} catch (fallbackError) {
|
|
160
|
+
throw fallbackError;
|
|
161
|
+
}
|
|
145
162
|
}
|
|
146
163
|
};
|
|
147
164
|
|
|
@@ -152,10 +169,8 @@ const cloneRepo = async (repoUrl, ref, targetDir) => {
|
|
|
152
169
|
*/
|
|
153
170
|
const installDependencies = async (dir) => {
|
|
154
171
|
try {
|
|
155
|
-
console.log(`Installing dependencies in ${dir}...`);
|
|
156
172
|
await executeCommand('npm', ['install'], dir);
|
|
157
173
|
} catch (error) {
|
|
158
|
-
console.warn(`Warning: Failed to install dependencies: ${error.message}`);
|
|
159
174
|
throw error;
|
|
160
175
|
}
|
|
161
176
|
};
|
|
@@ -168,7 +183,6 @@ const installDependencies = async (dir) => {
|
|
|
168
183
|
*/
|
|
169
184
|
const getDependencies = async (dir, namespace = null) => {
|
|
170
185
|
try {
|
|
171
|
-
console.log(`Getting dependency list from ${dir}...`);
|
|
172
186
|
const output = await executeCommand('npm', ['ls', '--all', '--omit=dev', '--json'], dir);
|
|
173
187
|
const dependencies = JSON.parse(output).dependencies || {};
|
|
174
188
|
|
|
@@ -214,7 +228,7 @@ const getDependencies = async (dir, namespace = null) => {
|
|
|
214
228
|
}
|
|
215
229
|
}
|
|
216
230
|
} catch (err) {
|
|
217
|
-
|
|
231
|
+
// Silently skip nested dependencies we can't read
|
|
218
232
|
}
|
|
219
233
|
}
|
|
220
234
|
}
|
|
@@ -225,13 +239,13 @@ const getDependencies = async (dir, namespace = null) => {
|
|
|
225
239
|
}
|
|
226
240
|
}
|
|
227
241
|
} catch (err) {
|
|
228
|
-
|
|
242
|
+
// Silently skip dependencies we can't read
|
|
229
243
|
}
|
|
230
244
|
}
|
|
231
245
|
|
|
232
246
|
return dependencies;
|
|
233
247
|
} catch (error) {
|
|
234
|
-
|
|
248
|
+
// Silently return empty object if we can't get dependencies
|
|
235
249
|
// Return empty object if we can't get dependencies
|
|
236
250
|
return {};
|
|
237
251
|
}
|
|
@@ -246,34 +260,26 @@ const getDependencies = async (dir, namespace = null) => {
|
|
|
246
260
|
*/
|
|
247
261
|
const getGitHubActionsStatus = async (repoUrl, version, commitSha = null) => {
|
|
248
262
|
try {
|
|
249
|
-
console.log(`[CI] Checking GitHub Actions for ${repoUrl} at version ${version}`);
|
|
250
|
-
|
|
251
263
|
// Check if it's a GitHub repository
|
|
252
264
|
if (!repoUrl.includes('github.com')) {
|
|
253
|
-
console.log(`[CI] Not a GitHub repository: ${repoUrl}`);
|
|
254
265
|
return null;
|
|
255
266
|
}
|
|
256
267
|
|
|
257
268
|
// Extract owner and repo from URL
|
|
258
269
|
const match = repoUrl.match(/github\.com[:/]([^/]+)\/([^/.]+)/);
|
|
259
270
|
if (!match) {
|
|
260
|
-
console.log(`[CI] Could not extract owner/repo from URL: ${repoUrl}`);
|
|
261
271
|
return null;
|
|
262
272
|
}
|
|
263
273
|
|
|
264
274
|
const [, owner, repo] = match;
|
|
265
|
-
console.log(`[CI] Extracted GitHub repo: ${owner}/${repo}`);
|
|
266
275
|
|
|
267
276
|
// Use provided commit SHA or try to resolve it
|
|
268
|
-
if (commitSha) {
|
|
269
|
-
console.log(`[CI] Using provided commit SHA: ${commitSha.substring(0, 7)}`);
|
|
270
|
-
} else {
|
|
271
|
-
console.log(`[CI] No commit SHA provided, trying to resolve version ${version} via GitHub API`);
|
|
277
|
+
if (!commitSha) {
|
|
272
278
|
|
|
273
279
|
// Try to get commit SHA from GitHub API for the tag/ref
|
|
280
|
+
|
|
274
281
|
try {
|
|
275
282
|
const refUrl = `https://api.github.com/repos/${owner}/${repo}/git/refs/tags/${version}`;
|
|
276
|
-
console.log(`[CI] Requesting tag info: ${refUrl}`);
|
|
277
283
|
const refData = await makeGitHubApiRequest(refUrl);
|
|
278
284
|
|
|
279
285
|
if (refData && refData.object) {
|
|
@@ -304,7 +310,6 @@ const getGitHubActionsStatus = async (repoUrl, version, commitSha = null) => {
|
|
|
304
310
|
}
|
|
305
311
|
|
|
306
312
|
if (!commitSha) {
|
|
307
|
-
console.warn(`[CI] Could not find commit SHA for ${owner}/${repo} at version ${version}`);
|
|
308
313
|
return {
|
|
309
314
|
status: 'unknown',
|
|
310
315
|
error: 'Could not find commit SHA for version'
|
|
@@ -314,19 +319,15 @@ const getGitHubActionsStatus = async (repoUrl, version, commitSha = null) => {
|
|
|
314
319
|
|
|
315
320
|
// Get workflow runs for the commit
|
|
316
321
|
const runsUrl = `https://api.github.com/repos/${owner}/${repo}/actions/runs?head_sha=${commitSha}`;
|
|
317
|
-
console.log(`[CI] Requesting workflow runs: ${runsUrl}`);
|
|
318
322
|
const runsData = await makeGitHubApiRequest(runsUrl);
|
|
319
323
|
|
|
320
324
|
if (!runsData || !runsData.workflow_runs || runsData.workflow_runs.length === 0) {
|
|
321
|
-
console.log(`[CI] No workflow runs found for ${owner}/${repo} at commit ${commitSha.substring(0, 7)}`);
|
|
322
325
|
return {
|
|
323
326
|
status: 'no_workflows',
|
|
324
327
|
message: 'No GitHub Actions workflows found for this commit'
|
|
325
328
|
};
|
|
326
329
|
}
|
|
327
330
|
|
|
328
|
-
console.log(`[CI] Found ${runsData.workflow_runs.length} workflow runs for ${owner}/${repo}`);
|
|
329
|
-
|
|
330
331
|
// Analyze the workflow runs
|
|
331
332
|
const runs = runsData.workflow_runs;
|
|
332
333
|
const latestRun = runs[0]; // Most recent run
|
|
@@ -396,7 +397,6 @@ const getGitHubActionsStatus = async (repoUrl, version, commitSha = null) => {
|
|
|
396
397
|
};
|
|
397
398
|
|
|
398
399
|
} catch (error) {
|
|
399
|
-
console.warn(`Warning: Could not get GitHub Actions status for ${repoUrl} at ${version}: ${error.message}`);
|
|
400
400
|
return {
|
|
401
401
|
status: 'error',
|
|
402
402
|
error: error.message
|
|
@@ -487,7 +487,6 @@ const getRepositoryUrl = async (packageDir) => {
|
|
|
487
487
|
|
|
488
488
|
return null;
|
|
489
489
|
} catch (error) {
|
|
490
|
-
console.warn(`Warning: Could not get repository URL for ${packageDir}: ${error.message}`);
|
|
491
490
|
return null;
|
|
492
491
|
}
|
|
493
492
|
};
|
|
@@ -501,18 +500,23 @@ const getRepositoryUrl = async (packageDir) => {
|
|
|
501
500
|
* @returns {Promise<Array>} - Array of commit objects
|
|
502
501
|
*/
|
|
503
502
|
const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
503
|
+
let tempDir = null;
|
|
504
|
+
|
|
504
505
|
try {
|
|
505
506
|
// Create a directory for the repository within the repos directory
|
|
506
507
|
const packageName = basename(repoUrl, '.git');
|
|
507
|
-
|
|
508
|
+
tempDir = join(reposDir, `${packageName}-history`);
|
|
508
509
|
await mkdir(tempDir, { recursive: true });
|
|
509
510
|
|
|
511
|
+
// Register this temp directory for cleanup
|
|
512
|
+
registerTempDir(tempDir);
|
|
513
|
+
|
|
510
514
|
// Clone the repository with optimizations for faster cloning
|
|
511
|
-
console.log(`Cloning ${repoUrl} into ${tempDir} to get commit history...`);
|
|
512
515
|
// Use --quiet to avoid printing credentials in logs
|
|
513
516
|
// Use --depth=1 and --single-branch for faster cloning, then fetch what we need
|
|
514
517
|
try {
|
|
515
|
-
|
|
518
|
+
// 2 minute timeout for very large repositories
|
|
519
|
+
await executeCommand('git', ['clone', '--quiet', '--depth=1', '--single-branch', repoUrl, tempDir], undefined, time_2min);
|
|
516
520
|
} catch (error) {
|
|
517
521
|
// If the repository doesn't exist or can't be accessed, throw a more specific error
|
|
518
522
|
if (error.message.includes("Repository not found") ||
|
|
@@ -525,9 +529,9 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
525
529
|
|
|
526
530
|
// Fetch all tags to ensure we have the version references
|
|
527
531
|
try {
|
|
528
|
-
|
|
532
|
+
// 2 minute timeout for fetching tags from large repositories
|
|
533
|
+
await executeCommand('git', ['fetch', '--tags', '--force', '--unshallow'], tempDir, time_2min);
|
|
529
534
|
} catch (error) {
|
|
530
|
-
console.warn(`Warning: Failed to fetch tags: ${error.message}`);
|
|
531
535
|
// Continue without tags if fetch fails
|
|
532
536
|
}
|
|
533
537
|
|
|
@@ -545,9 +549,8 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
545
549
|
const checkRef = async (ref) => {
|
|
546
550
|
// Make sure we're in the right directory and have fetched everything
|
|
547
551
|
try {
|
|
548
|
-
await executeCommand('git', ['fetch', '--all'], tempDir,
|
|
552
|
+
await executeCommand('git', ['fetch', '--all'], tempDir, time_1min); // 1 minute timeout
|
|
549
553
|
} catch (error) {
|
|
550
|
-
console.warn(`Warning: Failed to fetch all refs: ${error.message}`);
|
|
551
554
|
// Continue without full fetch
|
|
552
555
|
}
|
|
553
556
|
|
|
@@ -568,7 +571,6 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
568
571
|
for (const pattern of uniquePatterns) {
|
|
569
572
|
try {
|
|
570
573
|
const result = await executeCommand('git', ['rev-parse', '--verify', pattern], tempDir);
|
|
571
|
-
console.log(`Found reference ${ref} as ${pattern}`);
|
|
572
574
|
return { ref: pattern, hash: result.trim() };
|
|
573
575
|
} catch (error) {
|
|
574
576
|
// Continue to next pattern
|
|
@@ -583,8 +585,6 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
583
585
|
// Find commit with version bump
|
|
584
586
|
const findVersionCommit = async (version) => {
|
|
585
587
|
try {
|
|
586
|
-
console.log(`Looking for commit that bumps version to ${version}...`);
|
|
587
|
-
|
|
588
588
|
// Look for version in commit messages (common patterns)
|
|
589
589
|
const patterns = [
|
|
590
590
|
`version bump to ${version}`,
|
|
@@ -605,7 +605,6 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
605
605
|
);
|
|
606
606
|
|
|
607
607
|
if (result.trim()) {
|
|
608
|
-
console.log(`Found commit for version ${version} using pattern: ${pattern}`);
|
|
609
608
|
return { ref: version, hash: result.trim() };
|
|
610
609
|
}
|
|
611
610
|
} catch (e) {
|
|
@@ -622,7 +621,6 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
622
621
|
);
|
|
623
622
|
|
|
624
623
|
if (result.trim()) {
|
|
625
|
-
console.log(`Found commit that changes package.json version to ${version}`);
|
|
626
624
|
return { ref: version, hash: result.trim() };
|
|
627
625
|
}
|
|
628
626
|
} catch (e) {
|
|
@@ -631,7 +629,6 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
631
629
|
|
|
632
630
|
return null;
|
|
633
631
|
} catch (error) {
|
|
634
|
-
console.warn(`Error finding version commit: ${error.message}`);
|
|
635
632
|
return null;
|
|
636
633
|
}
|
|
637
634
|
};
|
|
@@ -642,18 +639,15 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
642
639
|
|
|
643
640
|
// If direct references not found, try to find commits with version bumps
|
|
644
641
|
if (!resolvedOldRef) {
|
|
645
|
-
console.log(`Reference ${oldVersion} not found directly, looking for version bump commit...`);
|
|
646
642
|
resolvedOldRef = await findVersionCommit(oldVersion);
|
|
647
643
|
}
|
|
648
644
|
|
|
649
645
|
if (!resolvedNewRef) {
|
|
650
|
-
console.log(`Reference ${newVersion} not found directly, looking for version bump commit...`);
|
|
651
646
|
resolvedNewRef = await findVersionCommit(newVersion);
|
|
652
647
|
}
|
|
653
648
|
|
|
654
649
|
// If still no references found, try to get all tags and find closest matches
|
|
655
650
|
if (!resolvedOldRef || !resolvedNewRef) {
|
|
656
|
-
console.log('Trying to find closest version matches from available tags...');
|
|
657
651
|
try {
|
|
658
652
|
const tagsOutput = await executeCommand('git', ['tag', '-l'], tempDir);
|
|
659
653
|
const availableTags = tagsOutput.split('\n').filter(tag => tag.trim());
|
|
@@ -669,7 +663,6 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
669
663
|
try {
|
|
670
664
|
const result = await executeCommand('git', ['rev-parse', '--verify', oldMatch], tempDir);
|
|
671
665
|
resolvedOldRef = { ref: oldMatch, hash: result.trim() };
|
|
672
|
-
console.log(`Found closest match for ${oldVersion}: ${oldMatch}`);
|
|
673
666
|
} catch (e) {
|
|
674
667
|
// Continue to fallback
|
|
675
668
|
}
|
|
@@ -687,20 +680,18 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
687
680
|
try {
|
|
688
681
|
const result = await executeCommand('git', ['rev-parse', '--verify', newMatch], tempDir);
|
|
689
682
|
resolvedNewRef = { ref: newMatch, hash: result.trim() };
|
|
690
|
-
console.log(`Found closest match for ${newVersion}: ${newMatch}`);
|
|
691
683
|
} catch (e) {
|
|
692
684
|
// Continue to fallback
|
|
693
685
|
}
|
|
694
686
|
}
|
|
695
687
|
}
|
|
696
688
|
} catch (error) {
|
|
697
|
-
|
|
689
|
+
// Continue to fallback
|
|
698
690
|
}
|
|
699
691
|
}
|
|
700
692
|
|
|
701
693
|
// Last resort: if we can't find specific versions, use default branch for newer and first commit for older
|
|
702
694
|
if (!resolvedOldRef && !resolvedNewRef) {
|
|
703
|
-
console.warn(`Warning: Could not find references for both ${oldVersion} and ${newVersion}. Using first and latest commits instead.`);
|
|
704
695
|
try {
|
|
705
696
|
// Get the first commit
|
|
706
697
|
const firstCommit = await executeCommand('git', ['rev-list', '--max-parents=0', 'HEAD'], tempDir);
|
|
@@ -709,40 +700,27 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
709
700
|
// Get the latest commit on default branch
|
|
710
701
|
const latestCommit = await executeCommand('git', ['rev-parse', 'HEAD'], tempDir);
|
|
711
702
|
resolvedNewRef = { ref: 'latest-commit', hash: latestCommit.trim() };
|
|
712
|
-
|
|
713
|
-
console.log(`Using first commit (${resolvedOldRef.hash.substring(0, 7)}) and latest commit (${resolvedNewRef.hash.substring(0, 7)}) as fallback`);
|
|
714
703
|
} catch (error) {
|
|
715
|
-
console.warn(`Warning: Failed to get first and latest commits: ${error.message}`);
|
|
716
704
|
return [];
|
|
717
705
|
}
|
|
718
706
|
} else if (!resolvedOldRef) {
|
|
719
|
-
console.warn(`Warning: Could not find reference for ${oldVersion}. Using first commit instead.`);
|
|
720
707
|
try {
|
|
721
708
|
// Get the first commit
|
|
722
709
|
const firstCommit = await executeCommand('git', ['rev-list', '--max-parents=0', 'HEAD'], tempDir);
|
|
723
710
|
resolvedOldRef = { ref: 'first-commit', hash: firstCommit.trim() };
|
|
724
|
-
console.log(`Using first commit (${resolvedOldRef.hash.substring(0, 7)}) as fallback for ${oldVersion}`);
|
|
725
711
|
} catch (error) {
|
|
726
|
-
console.warn(`Warning: Failed to get first commit: ${error.message}`);
|
|
727
712
|
return [];
|
|
728
713
|
}
|
|
729
714
|
} else if (!resolvedNewRef) {
|
|
730
|
-
console.warn(`Warning: Could not find reference for ${newVersion}. Using latest commit instead.`);
|
|
731
715
|
try {
|
|
732
716
|
// Get the latest commit on default branch
|
|
733
717
|
const latestCommit = await executeCommand('git', ['rev-parse', 'HEAD'], tempDir);
|
|
734
718
|
resolvedNewRef = { ref: 'latest-commit', hash: latestCommit.trim() };
|
|
735
|
-
console.log(`Using latest commit (${resolvedNewRef.hash.substring(0, 7)}) as fallback for ${newVersion}`);
|
|
736
719
|
} catch (error) {
|
|
737
|
-
console.warn(`Warning: Failed to get latest commit: ${error.message}`);
|
|
738
720
|
return [];
|
|
739
721
|
}
|
|
740
722
|
}
|
|
741
723
|
|
|
742
|
-
// Get commit history between versions
|
|
743
|
-
// Format: hash,author,date,message
|
|
744
|
-
console.log(`Getting commits between ${resolvedOldRef.ref} (${resolvedOldRef.hash.substring(0, 7)}) and ${resolvedNewRef.ref} (${resolvedNewRef.hash.substring(0, 7)})...`);
|
|
745
|
-
|
|
746
724
|
// Check if the order is correct (older should come before newer)
|
|
747
725
|
try {
|
|
748
726
|
// Try to determine which commit came first
|
|
@@ -758,13 +736,11 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
758
736
|
|
|
759
737
|
if (mergeBase.trim() === resolvedNewRef.hash.trim()) {
|
|
760
738
|
// Order is reversed, swap them
|
|
761
|
-
console.log('Detected reversed version order, swapping references...');
|
|
762
739
|
const temp = resolvedOldRef;
|
|
763
740
|
resolvedOldRef = resolvedNewRef;
|
|
764
741
|
resolvedNewRef = temp;
|
|
765
742
|
}
|
|
766
743
|
} catch (error) {
|
|
767
|
-
console.warn(`Warning: Could not determine commit order: ${error.message}`);
|
|
768
744
|
// Continue with original order
|
|
769
745
|
}
|
|
770
746
|
|
|
@@ -776,17 +752,14 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
776
752
|
tempDir
|
|
777
753
|
);
|
|
778
754
|
} catch (error) {
|
|
779
|
-
console.warn(`Warning: Failed to get commit log: ${error.message}`);
|
|
780
755
|
// Try with a different approach - get all commits and filter
|
|
781
756
|
try {
|
|
782
|
-
console.log('Trying alternative approach to get commit history...');
|
|
783
757
|
output = await executeCommand(
|
|
784
758
|
'git',
|
|
785
759
|
['log', '--pretty=format:%H,%an,%ad,%s'],
|
|
786
760
|
tempDir
|
|
787
761
|
);
|
|
788
762
|
} catch (e) {
|
|
789
|
-
console.warn(`Warning: Alternative approach also failed: ${e.message}`);
|
|
790
763
|
return [];
|
|
791
764
|
}
|
|
792
765
|
}
|
|
@@ -805,11 +778,22 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
805
778
|
});
|
|
806
779
|
|
|
807
780
|
// Clean up
|
|
808
|
-
|
|
781
|
+
if (tempDir) {
|
|
782
|
+
await rm(tempDir, { recursive: true, force: true });
|
|
783
|
+
unregisterTempDir(tempDir);
|
|
784
|
+
}
|
|
809
785
|
|
|
810
786
|
return commits;
|
|
811
787
|
} catch (error) {
|
|
812
|
-
|
|
788
|
+
// Clean up on error
|
|
789
|
+
if (tempDir) {
|
|
790
|
+
try {
|
|
791
|
+
await rm(tempDir, { recursive: true, force: true });
|
|
792
|
+
unregisterTempDir(tempDir);
|
|
793
|
+
} catch (cleanupError) {
|
|
794
|
+
// Ignore cleanup errors
|
|
795
|
+
}
|
|
796
|
+
}
|
|
813
797
|
return [];
|
|
814
798
|
}
|
|
815
799
|
};
|
|
@@ -830,12 +814,9 @@ const getPackageLockChanges = async (olderVersionDir, newerVersionDir) => {
|
|
|
830
814
|
await readFile(oldLockPath);
|
|
831
815
|
await readFile(newLockPath);
|
|
832
816
|
} catch (error) {
|
|
833
|
-
console.log('package-lock.json not found in one or both versions, skipping lock file analysis');
|
|
834
817
|
return { changedPackages: [], packageVersions: {} };
|
|
835
818
|
}
|
|
836
819
|
|
|
837
|
-
console.log('Found package-lock.json in both versions, analyzing changes...');
|
|
838
|
-
|
|
839
820
|
// Read and parse both lock files
|
|
840
821
|
const oldLock = JSON.parse(await readFile(oldLockPath, 'utf8'));
|
|
841
822
|
const newLock = JSON.parse(await readFile(newLockPath, 'utf8'));
|
|
@@ -882,14 +863,12 @@ const getPackageLockChanges = async (olderVersionDir, newerVersionDir) => {
|
|
|
882
863
|
}
|
|
883
864
|
}
|
|
884
865
|
|
|
885
|
-
console.log(`Found ${changedPackages.size} packages with changes in package-lock.json`);
|
|
886
866
|
return {
|
|
887
867
|
changedPackages: Array.from(changedPackages),
|
|
888
868
|
packageVersions
|
|
889
869
|
};
|
|
890
870
|
|
|
891
871
|
} catch (error) {
|
|
892
|
-
console.warn(`Warning: Failed to analyze package-lock.json changes: ${error.message}`);
|
|
893
872
|
return { changedPackages: [], packageVersions: {} };
|
|
894
873
|
}
|
|
895
874
|
};
|
|
@@ -949,107 +928,220 @@ const extractFromDependencies = (dependencies, packages) => {
|
|
|
949
928
|
};
|
|
950
929
|
|
|
951
930
|
/**
|
|
952
|
-
*
|
|
953
|
-
* @param {
|
|
931
|
+
* Process a single dependency for changelog and CI status with progress updates
|
|
932
|
+
* @param {Object} dep - Dependency object
|
|
954
933
|
* @param {string} newerVersionDir - Directory of the newer version
|
|
955
934
|
* @param {string} reposDir - Repository directory
|
|
956
|
-
* @
|
|
935
|
+
* @param {Object} multibar - CLI multi progress bar instance
|
|
936
|
+
* @param {number} maxNameLength - Maximum name length for consistent padding
|
|
937
|
+
* @returns {Promise<Object>} - Object with changelog, error, and CI status
|
|
957
938
|
*/
|
|
958
|
-
const
|
|
939
|
+
const processSingleDependency = async (dep, newerVersionDir, reposDir, multibar, maxNameLength) => {
|
|
940
|
+
const result = {
|
|
941
|
+
name: dep.name,
|
|
942
|
+
changelog: null,
|
|
943
|
+
error: null,
|
|
944
|
+
ciStatus: null
|
|
945
|
+
};
|
|
946
|
+
|
|
947
|
+
// Format the name with consistent padding, truncating if necessary
|
|
948
|
+
let displayName = dep.name;
|
|
949
|
+
if (displayName.length > maxNameLength) {
|
|
950
|
+
displayName = displayName.substring(0, maxNameLength - 3) + '...';
|
|
951
|
+
}
|
|
952
|
+
displayName = displayName.padEnd(maxNameLength);
|
|
953
|
+
|
|
954
|
+
// Create individual progress bar for this dependency
|
|
955
|
+
const depBar = multibar.create(100, 0, { name: displayName, status: 'Starting...' });
|
|
956
|
+
|
|
957
|
+
depBar.update(10, { status: 'Getting repo URL...' });
|
|
958
|
+
const packageDir = join(newerVersionDir, 'node_modules', dep.name);
|
|
959
|
+
const repoUrl = await getRepositoryUrl(packageDir);
|
|
960
|
+
|
|
961
|
+
if (!repoUrl) {
|
|
962
|
+
depBar.update(100, { status: 'โ No repo URL' });
|
|
963
|
+
result.error = {
|
|
964
|
+
repoUrl: null,
|
|
965
|
+
oldVersion: dep.oldVersion,
|
|
966
|
+
newVersion: dep.newVersion,
|
|
967
|
+
error: "No repository URL found"
|
|
968
|
+
};
|
|
969
|
+
return result;
|
|
970
|
+
}
|
|
971
|
+
|
|
972
|
+
depBar.update(20, { status: 'Cleaning repo URL...' });
|
|
973
|
+
|
|
974
|
+
// Clean the repository URL and convert to git URL for authentication
|
|
975
|
+
let cleanRepoUrl = repoUrl.replace(/^git\+/, '');
|
|
976
|
+
|
|
977
|
+
// Remove .git extension if present (we'll add it back later if needed)
|
|
978
|
+
cleanRepoUrl = cleanRepoUrl.replace(/\.git$/, '');
|
|
979
|
+
|
|
980
|
+
// Handle GitHub shorthand (github:user/repo)
|
|
981
|
+
if (cleanRepoUrl.match(/^(github|gitlab|bitbucket):/)) {
|
|
982
|
+
cleanRepoUrl = `git@github.com:${cleanRepoUrl.split(':')[1]}`;
|
|
983
|
+
}
|
|
984
|
+
// Convert https GitHub URLs to git URLs
|
|
985
|
+
else if (cleanRepoUrl.match(/^https?:\/\/github\.com\//)) {
|
|
986
|
+
cleanRepoUrl = `git@github.com:${cleanRepoUrl.replace(/^https?:\/\/github\.com\//, '')}`;
|
|
987
|
+
}
|
|
988
|
+
// Handle git:// protocol URLs
|
|
989
|
+
else if (cleanRepoUrl.match(/^git:\/\/github\.com\//)) {
|
|
990
|
+
cleanRepoUrl = `git@github.com:${cleanRepoUrl.replace(/^git:\/\/github\.com\//, '')}`;
|
|
991
|
+
}
|
|
992
|
+
// Ensure URL is in the correct format for GitHub
|
|
993
|
+
else if (!cleanRepoUrl.match(/^git@github\.com:/)) {
|
|
994
|
+
// If it's not already in the git@github.com format, try to convert it
|
|
995
|
+
const parts = cleanRepoUrl.split('/');
|
|
996
|
+
const repoName = parts.pop();
|
|
997
|
+
const orgName = parts.pop();
|
|
998
|
+
if (orgName && repoName) {
|
|
999
|
+
cleanRepoUrl = `git@github.com:${orgName}/${repoName}`;
|
|
1000
|
+
}
|
|
1001
|
+
}
|
|
1002
|
+
|
|
1003
|
+
// Add .git extension if not present
|
|
1004
|
+
if (!cleanRepoUrl.endsWith('.git')) {
|
|
1005
|
+
cleanRepoUrl += '.git';
|
|
1006
|
+
}
|
|
1007
|
+
|
|
1008
|
+
depBar.update(30, { status: 'Getting commits...' });
|
|
1009
|
+
|
|
1010
|
+
let commits = [];
|
|
1011
|
+
try {
|
|
1012
|
+
commits = await getCommitHistory(cleanRepoUrl, dep.oldVersion, dep.newVersion, reposDir);
|
|
1013
|
+
if (commits.length > 0) {
|
|
1014
|
+
depBar.update(70, { status: `Found ${commits.length} commits` });
|
|
1015
|
+
result.changelog = {
|
|
1016
|
+
repoUrl: cleanRepoUrl,
|
|
1017
|
+
oldVersion: dep.oldVersion,
|
|
1018
|
+
newVersion: dep.newVersion,
|
|
1019
|
+
commits
|
|
1020
|
+
};
|
|
1021
|
+
} else {
|
|
1022
|
+
depBar.update(70, { status: 'โ ๏ธ No commits found' });
|
|
1023
|
+
result.error = {
|
|
1024
|
+
repoUrl: cleanRepoUrl,
|
|
1025
|
+
oldVersion: dep.oldVersion,
|
|
1026
|
+
newVersion: dep.newVersion,
|
|
1027
|
+
error: "No commits found between versions"
|
|
1028
|
+
};
|
|
1029
|
+
}
|
|
1030
|
+
} catch (error) {
|
|
1031
|
+
depBar.update(70, { status: 'โ Commit error' });
|
|
1032
|
+
result.error = {
|
|
1033
|
+
repoUrl: cleanRepoUrl,
|
|
1034
|
+
oldVersion: dep.oldVersion,
|
|
1035
|
+
newVersion: dep.newVersion,
|
|
1036
|
+
error: error.message
|
|
1037
|
+
};
|
|
1038
|
+
}
|
|
1039
|
+
|
|
1040
|
+
// Get GitHub Actions status for the new version
|
|
1041
|
+
depBar.update(80, { status: 'Getting CI status...' });
|
|
1042
|
+
try {
|
|
1043
|
+
const best = commits.length > 0 ? commits[0].hash : null;
|
|
1044
|
+
const actionsStatus = await getGitHubActionsStatus(cleanRepoUrl, dep.newVersion, best);
|
|
1045
|
+
if (actionsStatus) {
|
|
1046
|
+
result.ciStatus = actionsStatus;
|
|
1047
|
+
depBar.update(100, { status: `โ
Complete (CI: ${actionsStatus.status})` });
|
|
1048
|
+
} else {
|
|
1049
|
+
depBar.update(100, { status: 'โ
Complete (no CI)' });
|
|
1050
|
+
}
|
|
1051
|
+
} catch (error) {
|
|
1052
|
+
// Silently ignore CI status errors
|
|
1053
|
+
depBar.update(100, { status: 'โ
Complete (CI error)' });
|
|
1054
|
+
}
|
|
1055
|
+
|
|
1056
|
+
return result;
|
|
1057
|
+
};
|
|
1058
|
+
|
|
1059
|
+
/**
|
|
1060
|
+
* Process dependencies in parallel with concurrency limit using a proper queue
|
|
1061
|
+
* @param {Array} dependencies - Array of dependencies to process
|
|
1062
|
+
* @param {string} newerVersionDir - Directory of the newer version
|
|
1063
|
+
* @param {string} reposDir - Repository directory
|
|
1064
|
+
* @param {number} concurrency - Maximum number of concurrent operations
|
|
1065
|
+
* @returns {Promise<Object>} - Object with changelogs, errors, and CI status
|
|
1066
|
+
*/
|
|
1067
|
+
const processInParallel = async (dependencies, newerVersionDir, reposDir, concurrency = 5) => {
|
|
959
1068
|
const changelogs = {};
|
|
960
1069
|
const errors = {};
|
|
961
1070
|
const ciStatus = {};
|
|
962
1071
|
|
|
963
|
-
|
|
964
|
-
|
|
965
|
-
|
|
966
|
-
|
|
967
|
-
|
|
968
|
-
|
|
969
|
-
|
|
970
|
-
|
|
971
|
-
|
|
972
|
-
|
|
973
|
-
|
|
974
|
-
|
|
975
|
-
|
|
976
|
-
|
|
977
|
-
|
|
978
|
-
|
|
979
|
-
|
|
980
|
-
|
|
981
|
-
|
|
982
|
-
|
|
983
|
-
|
|
984
|
-
|
|
985
|
-
|
|
986
|
-
|
|
987
|
-
|
|
988
|
-
|
|
989
|
-
|
|
990
|
-
|
|
991
|
-
|
|
992
|
-
|
|
993
|
-
|
|
994
|
-
|
|
995
|
-
|
|
996
|
-
|
|
997
|
-
|
|
998
|
-
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
|
|
1002
|
-
|
|
1003
|
-
|
|
1004
|
-
|
|
1005
|
-
|
|
1006
|
-
|
|
1007
|
-
|
|
1008
|
-
|
|
1009
|
-
repoUrl: cleanRepoUrl,
|
|
1010
|
-
oldVersion: dep.oldVersion,
|
|
1011
|
-
newVersion: dep.newVersion,
|
|
1012
|
-
commits
|
|
1013
|
-
};
|
|
1014
|
-
} else {
|
|
1015
|
-
console.warn(`No commits found between ${dep.oldVersion} and ${dep.newVersion} for ${dep.name}`);
|
|
1016
|
-
errors[dep.name] = {
|
|
1017
|
-
repoUrl: cleanRepoUrl,
|
|
1018
|
-
oldVersion: dep.oldVersion,
|
|
1019
|
-
newVersion: dep.newVersion,
|
|
1020
|
-
error: "No commits found between versions"
|
|
1021
|
-
};
|
|
1022
|
-
}
|
|
1023
|
-
} catch (error) {
|
|
1024
|
-
console.warn(`Error getting changelog for ${dep.name}: ${error.message}`);
|
|
1025
|
-
errors[dep.name] = {
|
|
1026
|
-
repoUrl: cleanRepoUrl,
|
|
1027
|
-
oldVersion: dep.oldVersion,
|
|
1028
|
-
newVersion: dep.newVersion,
|
|
1029
|
-
error: error.message
|
|
1030
|
-
};
|
|
1031
|
-
}
|
|
1032
|
-
|
|
1033
|
-
// Get GitHub Actions status for the new version
|
|
1034
|
-
try {
|
|
1035
|
-
const best = commits[0].hash
|
|
1036
|
-
console.log(`Getting GitHub Actions status for ${dep.name} at version ${dep.newVersion} ${best} from ${cleanRepoUrl}`);
|
|
1037
|
-
const actionsStatus = await getGitHubActionsStatus(cleanRepoUrl, dep.newVersion, best);
|
|
1038
|
-
if (actionsStatus) {
|
|
1039
|
-
console.log(`โ Got CI status for ${dep.name}: ${actionsStatus.status}`);
|
|
1040
|
-
ciStatus[dep.name] = actionsStatus;
|
|
1041
|
-
} else {
|
|
1042
|
-
console.log(`โ No CI status returned for ${dep.name}`);
|
|
1043
|
-
}
|
|
1044
|
-
} catch (error) {
|
|
1045
|
-
console.warn(`Error getting GitHub Actions status for ${dep.name}: ${error.message}`);
|
|
1046
|
-
}
|
|
1072
|
+
if (dependencies.length === 0) {
|
|
1073
|
+
return { changelogs, errors, ciStatus };
|
|
1074
|
+
}
|
|
1075
|
+
|
|
1076
|
+
// Calculate the maximum name length for consistent padding
|
|
1077
|
+
const maxNameLength = Math.min(
|
|
1078
|
+
Math.max(...dependencies.map(dep => dep.name.length)),
|
|
1079
|
+
40 // Reasonable maximum to prevent extremely long lines
|
|
1080
|
+
);
|
|
1081
|
+
|
|
1082
|
+
// Create multi progress bar
|
|
1083
|
+
const multibar = new cliProgress.MultiBar({
|
|
1084
|
+
clearOnComplete: false,
|
|
1085
|
+
hideCursor: true,
|
|
1086
|
+
format: '{name} |{bar}| {percentage}% | {status}'
|
|
1087
|
+
}, {
|
|
1088
|
+
barCompleteChar: '\u2588',
|
|
1089
|
+
barIncompleteChar: '\u2591'
|
|
1090
|
+
});
|
|
1091
|
+
|
|
1092
|
+
// Register multibar for cleanup
|
|
1093
|
+
globalCleanupState.multibar = multibar;
|
|
1094
|
+
|
|
1095
|
+
console.log(`\nProcessing ${dependencies.length} dependencies with concurrency limit of ${concurrency}:\n`);
|
|
1096
|
+
|
|
1097
|
+
// Create queue with concurrency limit
|
|
1098
|
+
const queue = new PQueue({ concurrency });
|
|
1099
|
+
|
|
1100
|
+
// Add all dependencies to the queue
|
|
1101
|
+
const promises = dependencies.map(dep =>
|
|
1102
|
+
queue.add(() => processSingleDependency(dep, newerVersionDir, reposDir, multibar, maxNameLength))
|
|
1103
|
+
);
|
|
1104
|
+
|
|
1105
|
+
// Wait for all tasks to complete
|
|
1106
|
+
const results = await Promise.all(promises);
|
|
1107
|
+
|
|
1108
|
+
// Collect results
|
|
1109
|
+
for (const result of results) {
|
|
1110
|
+
if (result.changelog) {
|
|
1111
|
+
changelogs[result.name] = result.changelog;
|
|
1112
|
+
}
|
|
1113
|
+
if (result.error) {
|
|
1114
|
+
errors[result.name] = result.error;
|
|
1115
|
+
}
|
|
1116
|
+
if (result.ciStatus) {
|
|
1117
|
+
ciStatus[result.name] = result.ciStatus;
|
|
1047
1118
|
}
|
|
1048
1119
|
}
|
|
1049
1120
|
|
|
1121
|
+
multibar.stop();
|
|
1122
|
+
|
|
1123
|
+
// Unregister multibar from cleanup
|
|
1124
|
+
globalCleanupState.multibar = null;
|
|
1125
|
+
|
|
1126
|
+
// Ensure cursor is visible after progress bars
|
|
1127
|
+
process.stdout.write('\x1b[?25h'); // Show cursor
|
|
1128
|
+
|
|
1129
|
+
console.log(`\nโ
Completed processing ${dependencies.length} dependencies\n`);
|
|
1130
|
+
|
|
1050
1131
|
return { changelogs, errors, ciStatus };
|
|
1051
1132
|
};
|
|
1052
1133
|
|
|
1134
|
+
/**
|
|
1135
|
+
* Get changelog and CI status for upgraded dependencies
|
|
1136
|
+
* @param {Array} upgradedDeps - Array of upgraded dependencies
|
|
1137
|
+
* @param {string} newerVersionDir - Directory of the newer version
|
|
1138
|
+
* @param {string} reposDir - Repository directory
|
|
1139
|
+
* @returns {Promise<Object>} - Object mapping package names to changelogs and CI status
|
|
1140
|
+
*/
|
|
1141
|
+
const getChangelogs = async (upgradedDeps, newerVersionDir, reposDir) => {
|
|
1142
|
+
return await processInParallel(upgradedDeps, newerVersionDir, reposDir, 5);
|
|
1143
|
+
};
|
|
1144
|
+
|
|
1053
1145
|
/**
|
|
1054
1146
|
* Compare dependencies between two versions
|
|
1055
1147
|
* @param {Object} oldDeps - Old dependencies
|
|
@@ -1100,7 +1192,7 @@ const compareDependencies = (oldDeps, newDeps) => {
|
|
|
1100
1192
|
}
|
|
1101
1193
|
}
|
|
1102
1194
|
} catch (error) {
|
|
1103
|
-
|
|
1195
|
+
// Silently continue with unknown change type
|
|
1104
1196
|
}
|
|
1105
1197
|
|
|
1106
1198
|
upgraded.push({
|
|
@@ -1195,7 +1287,7 @@ const compareDependencies = (oldDeps, newDeps) => {
|
|
|
1195
1287
|
}
|
|
1196
1288
|
}
|
|
1197
1289
|
} catch (error) {
|
|
1198
|
-
|
|
1290
|
+
// Silently continue with unknown change type
|
|
1199
1291
|
}
|
|
1200
1292
|
|
|
1201
1293
|
nestedUpgraded.push({
|
|
@@ -1252,6 +1344,9 @@ const compareDependencies = (oldDeps, newDeps) => {
|
|
|
1252
1344
|
* @returns {Promise<Object>} - Analysis report
|
|
1253
1345
|
*/
|
|
1254
1346
|
const analyzeDependencyChanges = async (repoUrl, olderVersion, newerVersion, workingDir = process.cwd(), namespace = null) => {
|
|
1347
|
+
// Setup signal handlers for graceful shutdown
|
|
1348
|
+
setupSignalHandlers();
|
|
1349
|
+
|
|
1255
1350
|
// Extract project name from repo URL
|
|
1256
1351
|
const projectName = basename(repoUrl, '.git');
|
|
1257
1352
|
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
|
@@ -1264,6 +1359,9 @@ const analyzeDependencyChanges = async (repoUrl, olderVersion, newerVersion, wor
|
|
|
1264
1359
|
// Create the repos directory
|
|
1265
1360
|
await mkdir(reposDir, { recursive: true });
|
|
1266
1361
|
|
|
1362
|
+
// Register temp directory for cleanup
|
|
1363
|
+
registerTempDir(reposDir);
|
|
1364
|
+
|
|
1267
1365
|
// Clone both versions
|
|
1268
1366
|
await cloneRepo(repoUrl, olderVersion, olderVersionDir);
|
|
1269
1367
|
await cloneRepo(repoUrl, newerVersion, newerVersionDir);
|
|
@@ -1310,7 +1408,7 @@ const analyzeDependencyChanges = async (repoUrl, olderVersion, newerVersion, wor
|
|
|
1310
1408
|
}
|
|
1311
1409
|
}
|
|
1312
1410
|
} catch (e) {
|
|
1313
|
-
|
|
1411
|
+
// Silently skip packages we can't read
|
|
1314
1412
|
}
|
|
1315
1413
|
|
|
1316
1414
|
if (repoUrl) {
|
|
@@ -1348,44 +1446,48 @@ const analyzeDependencyChanges = async (repoUrl, olderVersion, newerVersion, wor
|
|
|
1348
1446
|
// Also add to the comparison.upgraded array so it appears in the report
|
|
1349
1447
|
comparison.upgraded.push(lockFileDep);
|
|
1350
1448
|
|
|
1351
|
-
|
|
1449
|
+
// Added package from lock file analysis
|
|
1352
1450
|
}
|
|
1353
1451
|
} catch (error) {
|
|
1354
|
-
|
|
1452
|
+
// Silently skip packages we can't get info for
|
|
1355
1453
|
}
|
|
1356
1454
|
}
|
|
1357
1455
|
}
|
|
1358
1456
|
}
|
|
1359
1457
|
|
|
1360
1458
|
// Get changelogs for upgraded dependencies
|
|
1361
|
-
console.log(
|
|
1459
|
+
console.log(`\nGenerating changelogs for ${allChangedPackages.length} dependencies...`);
|
|
1362
1460
|
const { changelogs, errors, ciStatus } = await getChangelogs(allChangedPackages, newerVersionDir, reposDir);
|
|
1363
1461
|
|
|
1364
1462
|
// Get changelogs for modified dependencies (namespace changes)
|
|
1365
|
-
|
|
1366
|
-
|
|
1367
|
-
|
|
1368
|
-
|
|
1369
|
-
|
|
1370
|
-
|
|
1371
|
-
|
|
1372
|
-
|
|
1373
|
-
|
|
1374
|
-
|
|
1375
|
-
|
|
1376
|
-
|
|
1377
|
-
|
|
1378
|
-
|
|
1463
|
+
if (comparison.modified.length > 0) {
|
|
1464
|
+
console.log(`Generating changelogs for ${comparison.modified.length} modified dependencies...`);
|
|
1465
|
+
const modifiedDepsForChangelog = comparison.modified.map(dep => ({
|
|
1466
|
+
name: dep.newName,
|
|
1467
|
+
oldVersion: dep.oldVersion,
|
|
1468
|
+
newVersion: dep.newVersion,
|
|
1469
|
+
changeType: 'namespace'
|
|
1470
|
+
}));
|
|
1471
|
+
const { changelogs: modifiedChangelogs, errors: modifiedErrors, ciStatus: modifiedCiStatus } =
|
|
1472
|
+
await getChangelogs(modifiedDepsForChangelog, newerVersionDir, reposDir);
|
|
1473
|
+
|
|
1474
|
+
// Merge changelogs, errors, and CI status
|
|
1475
|
+
Object.assign(changelogs, modifiedChangelogs);
|
|
1476
|
+
Object.assign(errors, modifiedErrors);
|
|
1477
|
+
Object.assign(ciStatus, modifiedCiStatus);
|
|
1478
|
+
}
|
|
1379
1479
|
|
|
1380
1480
|
// Get changelogs for nested upgraded dependencies
|
|
1381
|
-
|
|
1382
|
-
|
|
1383
|
-
|
|
1384
|
-
|
|
1385
|
-
|
|
1386
|
-
|
|
1387
|
-
|
|
1388
|
-
|
|
1481
|
+
if (comparison.nested.upgraded.length > 0) {
|
|
1482
|
+
console.log(`Generating changelogs for ${comparison.nested.upgraded.length} nested upgraded dependencies...`);
|
|
1483
|
+
const { changelogs: nestedChangelogs, errors: nestedErrors, ciStatus: nestedCiStatus } =
|
|
1484
|
+
await getChangelogs(comparison.nested.upgraded, newerVersionDir, reposDir);
|
|
1485
|
+
|
|
1486
|
+
// Merge nested changelogs, errors, and CI status
|
|
1487
|
+
Object.assign(changelogs, nestedChangelogs);
|
|
1488
|
+
Object.assign(errors, nestedErrors);
|
|
1489
|
+
Object.assign(ciStatus, nestedCiStatus);
|
|
1490
|
+
}
|
|
1389
1491
|
|
|
1390
1492
|
// Write report to file
|
|
1391
1493
|
const reportPath = join(reposDir, 'report.json');
|
|
@@ -1408,10 +1510,25 @@ const analyzeDependencyChanges = async (repoUrl, olderVersion, newerVersion, wor
|
|
|
1408
1510
|
// Add reportPath after writing the file
|
|
1409
1511
|
report.reportPath = reportPath;
|
|
1410
1512
|
|
|
1411
|
-
console.log(
|
|
1513
|
+
console.log(`\n๐ Report generated: ${reportPath}`);
|
|
1514
|
+
|
|
1515
|
+
// Don't auto-cleanup on success - user might want to examine the files
|
|
1516
|
+
// But unregister from emergency cleanup since we completed successfully
|
|
1517
|
+
unregisterTempDir(reposDir);
|
|
1518
|
+
|
|
1412
1519
|
return report;
|
|
1413
1520
|
} catch (error) {
|
|
1414
|
-
console.error(
|
|
1521
|
+
console.error(`\nโ Error analyzing dependency changes: ${error.message}`);
|
|
1522
|
+
|
|
1523
|
+
// Clean up on error
|
|
1524
|
+
try {
|
|
1525
|
+
await rm(reposDir, { recursive: true, force: true });
|
|
1526
|
+
unregisterTempDir(reposDir);
|
|
1527
|
+
console.log(`๐๏ธ Cleaned up temporary directory: ${reposDir}`);
|
|
1528
|
+
} catch (cleanupError) {
|
|
1529
|
+
console.warn(`โ ๏ธ Failed to clean up ${reposDir}: ${cleanupError.message}`);
|
|
1530
|
+
}
|
|
1531
|
+
|
|
1415
1532
|
throw error;
|
|
1416
1533
|
}
|
|
1417
1534
|
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "dependency-change-report",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.5",
|
|
4
4
|
"main": "index.mjs",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"bin": {
|
|
@@ -14,6 +14,9 @@
|
|
|
14
14
|
"keywords": [],
|
|
15
15
|
"description": "Generate a dependency change report between different versions of a project",
|
|
16
16
|
"dependencies": {
|
|
17
|
-
"
|
|
17
|
+
"cli-progress": "^3.12.0",
|
|
18
|
+
"execa": "^9.6.0",
|
|
19
|
+
"semver": "^7.7.1",
|
|
20
|
+
"p-queue": "^8.0.1"
|
|
18
21
|
}
|
|
19
22
|
}
|