dependency-change-report 1.0.4 โ 1.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/generate-html.mjs +1 -1
- package/lib/index.mjs +389 -194
- package/package.json +4 -2
package/lib/generate-html.mjs
CHANGED
package/lib/index.mjs
CHANGED
|
@@ -8,11 +8,102 @@ import semver from 'semver';
|
|
|
8
8
|
import os from 'os';
|
|
9
9
|
import https from 'https';
|
|
10
10
|
import { execa } from 'execa';
|
|
11
|
+
import cliProgress from 'cli-progress';
|
|
12
|
+
import PQueue from 'p-queue';
|
|
13
|
+
|
|
14
|
+
const time_10min = 10 * 60 * 1000; // 10 minutes in milliseconds
|
|
15
|
+
const time_5min = 5 * 60 * 1000; // 5 minutes in milliseconds
|
|
16
|
+
const time_2min = 2 * 60 * 1000; // 2 minutes in milliseconds
|
|
17
|
+
const time_1min = 60 * 1000; // 1 minute in milliseconds
|
|
18
|
+
|
|
19
|
+
// Global cleanup state
|
|
20
|
+
let globalCleanupState = {
|
|
21
|
+
multibar: null,
|
|
22
|
+
tempDirs: new Set(),
|
|
23
|
+
isShuttingDown: false
|
|
24
|
+
};
|
|
11
25
|
|
|
12
26
|
// Get the current directory
|
|
13
27
|
const __filename = fileURLToPath(import.meta.url);
|
|
14
28
|
const __dirname = dirname(__filename);
|
|
15
29
|
|
|
30
|
+
/**
|
|
31
|
+
* Setup signal handlers for graceful shutdown
|
|
32
|
+
*/
|
|
33
|
+
const setupSignalHandlers = () => {
|
|
34
|
+
const cleanup = async (signal) => {
|
|
35
|
+
if (globalCleanupState.isShuttingDown) {
|
|
36
|
+
return;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
globalCleanupState.isShuttingDown = true;
|
|
40
|
+
console.log(`\n\n๐ Received ${signal}, cleaning up...`);
|
|
41
|
+
|
|
42
|
+
// Stop progress bars and restore cursor
|
|
43
|
+
if (globalCleanupState.multibar) {
|
|
44
|
+
try {
|
|
45
|
+
globalCleanupState.multibar.stop();
|
|
46
|
+
} catch (error) {
|
|
47
|
+
// Ignore errors during cleanup
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// Restore cursor and clear any progress bar artifacts
|
|
52
|
+
process.stdout.write('\x1b[?25h'); // Show cursor
|
|
53
|
+
process.stdout.write('\x1b[0m'); // Reset colors
|
|
54
|
+
|
|
55
|
+
// Clean up temporary directories
|
|
56
|
+
const cleanupPromises = Array.from(globalCleanupState.tempDirs).map(async (dir) => {
|
|
57
|
+
try {
|
|
58
|
+
await rm(dir, { recursive: true, force: true });
|
|
59
|
+
console.log(`๐๏ธ Cleaned up: ${dir}`);
|
|
60
|
+
} catch (error) {
|
|
61
|
+
console.warn(`โ ๏ธ Failed to clean up ${dir}: ${error.message}`);
|
|
62
|
+
}
|
|
63
|
+
});
|
|
64
|
+
|
|
65
|
+
if (cleanupPromises.length > 0) {
|
|
66
|
+
console.log(`๐งน Cleaning up ${cleanupPromises.length} temporary directories...`);
|
|
67
|
+
await Promise.all(cleanupPromises);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
console.log('โ
Cleanup complete');
|
|
71
|
+
process.exit(signal === 'SIGTERM' ? 0 : 1);
|
|
72
|
+
};
|
|
73
|
+
|
|
74
|
+
// Handle various termination signals
|
|
75
|
+
process.on('SIGINT', () => cleanup('SIGINT')); // Ctrl+C
|
|
76
|
+
process.on('SIGTERM', () => cleanup('SIGTERM')); // Termination request
|
|
77
|
+
process.on('SIGHUP', () => cleanup('SIGHUP')); // Terminal closed
|
|
78
|
+
|
|
79
|
+
// Handle uncaught exceptions and unhandled rejections
|
|
80
|
+
process.on('uncaughtException', async (error) => {
|
|
81
|
+
console.error('\n๐ฅ Uncaught Exception:', error);
|
|
82
|
+
await cleanup('uncaughtException');
|
|
83
|
+
});
|
|
84
|
+
|
|
85
|
+
process.on('unhandledRejection', async (reason, promise) => {
|
|
86
|
+
console.error('\n๐ฅ Unhandled Rejection at:', promise, 'reason:', reason);
|
|
87
|
+
await cleanup('unhandledRejection');
|
|
88
|
+
});
|
|
89
|
+
};
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* Register a temporary directory for cleanup
|
|
93
|
+
* @param {string} dir - Directory path to register for cleanup
|
|
94
|
+
*/
|
|
95
|
+
const registerTempDir = (dir) => {
|
|
96
|
+
globalCleanupState.tempDirs.add(dir);
|
|
97
|
+
};
|
|
98
|
+
|
|
99
|
+
/**
|
|
100
|
+
* Unregister a temporary directory from cleanup (when manually cleaned)
|
|
101
|
+
* @param {string} dir - Directory path to unregister
|
|
102
|
+
*/
|
|
103
|
+
const unregisterTempDir = (dir) => {
|
|
104
|
+
globalCleanupState.tempDirs.delete(dir);
|
|
105
|
+
};
|
|
106
|
+
|
|
16
107
|
/**
|
|
17
108
|
* Execute a command and return its output
|
|
18
109
|
* @param {string} command - The command to execute
|
|
@@ -21,7 +112,7 @@ const __dirname = dirname(__filename);
|
|
|
21
112
|
* @param {number} timeout - Timeout in milliseconds (default: 5 minutes)
|
|
22
113
|
* @returns {Promise<string>} - Command output
|
|
23
114
|
*/
|
|
24
|
-
const executeCommand = async (command, args, cwd, timeout =
|
|
115
|
+
const executeCommand = async (command, args, cwd, timeout = time_5min) => {
|
|
25
116
|
try {
|
|
26
117
|
const result = await execa(command, args, {
|
|
27
118
|
cwd,
|
|
@@ -39,8 +130,6 @@ const executeCommand = async (command, args, cwd, timeout = 300000) => {
|
|
|
39
130
|
} else if (error.killed) {
|
|
40
131
|
throw new Error(`Command was killed: ${command} ${args.join(' ')}`);
|
|
41
132
|
} else if (error.exitCode !== 0) {
|
|
42
|
-
console.warn(`Warning: Command ${command} ${args.join(' ')} failed with code ${error.exitCode}`);
|
|
43
|
-
console.warn(`Error: ${error.stderr}`);
|
|
44
133
|
throw new Error(`Command failed with code ${error.exitCode}: ${error.stderr}`);
|
|
45
134
|
} else {
|
|
46
135
|
throw error;
|
|
@@ -57,13 +146,19 @@ const executeCommand = async (command, args, cwd, timeout = 300000) => {
|
|
|
57
146
|
*/
|
|
58
147
|
const cloneRepo = async (repoUrl, ref, targetDir) => {
|
|
59
148
|
try {
|
|
60
|
-
|
|
149
|
+
// Use shallow clone with depth=1 and single-branch for faster cloning
|
|
61
150
|
// Use --quiet to avoid printing credentials in logs
|
|
62
|
-
|
|
63
|
-
await executeCommand('git', ['
|
|
151
|
+
// 2 minute timeout for very large repositories
|
|
152
|
+
await executeCommand('git', ['clone', '--quiet', '--depth=1', '--single-branch', '--branch', ref, repoUrl, targetDir], undefined, time_2min);
|
|
64
153
|
} catch (error) {
|
|
65
|
-
|
|
66
|
-
|
|
154
|
+
// If shallow clone with specific branch fails, try traditional approach
|
|
155
|
+
try {
|
|
156
|
+
// Full clone with 5 minute timeout for very large repos
|
|
157
|
+
await executeCommand('git', ['clone', '--quiet', repoUrl, targetDir], undefined, time_5min);
|
|
158
|
+
await executeCommand('git', ['checkout', ref], targetDir, time_1min);
|
|
159
|
+
} catch (fallbackError) {
|
|
160
|
+
throw fallbackError;
|
|
161
|
+
}
|
|
67
162
|
}
|
|
68
163
|
};
|
|
69
164
|
|
|
@@ -74,10 +169,8 @@ const cloneRepo = async (repoUrl, ref, targetDir) => {
|
|
|
74
169
|
*/
|
|
75
170
|
const installDependencies = async (dir) => {
|
|
76
171
|
try {
|
|
77
|
-
console.log(`Installing dependencies in ${dir}...`);
|
|
78
172
|
await executeCommand('npm', ['install'], dir);
|
|
79
173
|
} catch (error) {
|
|
80
|
-
console.warn(`Warning: Failed to install dependencies: ${error.message}`);
|
|
81
174
|
throw error;
|
|
82
175
|
}
|
|
83
176
|
};
|
|
@@ -90,7 +183,6 @@ const installDependencies = async (dir) => {
|
|
|
90
183
|
*/
|
|
91
184
|
const getDependencies = async (dir, namespace = null) => {
|
|
92
185
|
try {
|
|
93
|
-
console.log(`Getting dependency list from ${dir}...`);
|
|
94
186
|
const output = await executeCommand('npm', ['ls', '--all', '--omit=dev', '--json'], dir);
|
|
95
187
|
const dependencies = JSON.parse(output).dependencies || {};
|
|
96
188
|
|
|
@@ -136,7 +228,7 @@ const getDependencies = async (dir, namespace = null) => {
|
|
|
136
228
|
}
|
|
137
229
|
}
|
|
138
230
|
} catch (err) {
|
|
139
|
-
|
|
231
|
+
// Silently skip nested dependencies we can't read
|
|
140
232
|
}
|
|
141
233
|
}
|
|
142
234
|
}
|
|
@@ -147,13 +239,13 @@ const getDependencies = async (dir, namespace = null) => {
|
|
|
147
239
|
}
|
|
148
240
|
}
|
|
149
241
|
} catch (err) {
|
|
150
|
-
|
|
242
|
+
// Silently skip dependencies we can't read
|
|
151
243
|
}
|
|
152
244
|
}
|
|
153
245
|
|
|
154
246
|
return dependencies;
|
|
155
247
|
} catch (error) {
|
|
156
|
-
|
|
248
|
+
// Silently return empty object if we can't get dependencies
|
|
157
249
|
// Return empty object if we can't get dependencies
|
|
158
250
|
return {};
|
|
159
251
|
}
|
|
@@ -168,34 +260,26 @@ const getDependencies = async (dir, namespace = null) => {
|
|
|
168
260
|
*/
|
|
169
261
|
const getGitHubActionsStatus = async (repoUrl, version, commitSha = null) => {
|
|
170
262
|
try {
|
|
171
|
-
console.log(`[CI] Checking GitHub Actions for ${repoUrl} at version ${version}`);
|
|
172
|
-
|
|
173
263
|
// Check if it's a GitHub repository
|
|
174
264
|
if (!repoUrl.includes('github.com')) {
|
|
175
|
-
console.log(`[CI] Not a GitHub repository: ${repoUrl}`);
|
|
176
265
|
return null;
|
|
177
266
|
}
|
|
178
267
|
|
|
179
268
|
// Extract owner and repo from URL
|
|
180
269
|
const match = repoUrl.match(/github\.com[:/]([^/]+)\/([^/.]+)/);
|
|
181
270
|
if (!match) {
|
|
182
|
-
console.log(`[CI] Could not extract owner/repo from URL: ${repoUrl}`);
|
|
183
271
|
return null;
|
|
184
272
|
}
|
|
185
273
|
|
|
186
274
|
const [, owner, repo] = match;
|
|
187
|
-
console.log(`[CI] Extracted GitHub repo: ${owner}/${repo}`);
|
|
188
275
|
|
|
189
276
|
// Use provided commit SHA or try to resolve it
|
|
190
|
-
if (commitSha) {
|
|
191
|
-
console.log(`[CI] Using provided commit SHA: ${commitSha.substring(0, 7)}`);
|
|
192
|
-
} else {
|
|
193
|
-
console.log(`[CI] No commit SHA provided, trying to resolve version ${version} via GitHub API`);
|
|
277
|
+
if (!commitSha) {
|
|
194
278
|
|
|
195
279
|
// Try to get commit SHA from GitHub API for the tag/ref
|
|
280
|
+
|
|
196
281
|
try {
|
|
197
282
|
const refUrl = `https://api.github.com/repos/${owner}/${repo}/git/refs/tags/${version}`;
|
|
198
|
-
console.log(`[CI] Requesting tag info: ${refUrl}`);
|
|
199
283
|
const refData = await makeGitHubApiRequest(refUrl);
|
|
200
284
|
|
|
201
285
|
if (refData && refData.object) {
|
|
@@ -226,7 +310,6 @@ const getGitHubActionsStatus = async (repoUrl, version, commitSha = null) => {
|
|
|
226
310
|
}
|
|
227
311
|
|
|
228
312
|
if (!commitSha) {
|
|
229
|
-
console.warn(`[CI] Could not find commit SHA for ${owner}/${repo} at version ${version}`);
|
|
230
313
|
return {
|
|
231
314
|
status: 'unknown',
|
|
232
315
|
error: 'Could not find commit SHA for version'
|
|
@@ -236,19 +319,15 @@ const getGitHubActionsStatus = async (repoUrl, version, commitSha = null) => {
|
|
|
236
319
|
|
|
237
320
|
// Get workflow runs for the commit
|
|
238
321
|
const runsUrl = `https://api.github.com/repos/${owner}/${repo}/actions/runs?head_sha=${commitSha}`;
|
|
239
|
-
console.log(`[CI] Requesting workflow runs: ${runsUrl}`);
|
|
240
322
|
const runsData = await makeGitHubApiRequest(runsUrl);
|
|
241
323
|
|
|
242
324
|
if (!runsData || !runsData.workflow_runs || runsData.workflow_runs.length === 0) {
|
|
243
|
-
console.log(`[CI] No workflow runs found for ${owner}/${repo} at commit ${commitSha.substring(0, 7)}`);
|
|
244
325
|
return {
|
|
245
326
|
status: 'no_workflows',
|
|
246
327
|
message: 'No GitHub Actions workflows found for this commit'
|
|
247
328
|
};
|
|
248
329
|
}
|
|
249
330
|
|
|
250
|
-
console.log(`[CI] Found ${runsData.workflow_runs.length} workflow runs for ${owner}/${repo}`);
|
|
251
|
-
|
|
252
331
|
// Analyze the workflow runs
|
|
253
332
|
const runs = runsData.workflow_runs;
|
|
254
333
|
const latestRun = runs[0]; // Most recent run
|
|
@@ -318,7 +397,6 @@ const getGitHubActionsStatus = async (repoUrl, version, commitSha = null) => {
|
|
|
318
397
|
};
|
|
319
398
|
|
|
320
399
|
} catch (error) {
|
|
321
|
-
console.warn(`Warning: Could not get GitHub Actions status for ${repoUrl} at ${version}: ${error.message}`);
|
|
322
400
|
return {
|
|
323
401
|
status: 'error',
|
|
324
402
|
error: error.message
|
|
@@ -409,7 +487,6 @@ const getRepositoryUrl = async (packageDir) => {
|
|
|
409
487
|
|
|
410
488
|
return null;
|
|
411
489
|
} catch (error) {
|
|
412
|
-
console.warn(`Warning: Could not get repository URL for ${packageDir}: ${error.message}`);
|
|
413
490
|
return null;
|
|
414
491
|
}
|
|
415
492
|
};
|
|
@@ -423,18 +500,23 @@ const getRepositoryUrl = async (packageDir) => {
|
|
|
423
500
|
* @returns {Promise<Array>} - Array of commit objects
|
|
424
501
|
*/
|
|
425
502
|
const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
503
|
+
let tempDir = null;
|
|
504
|
+
|
|
426
505
|
try {
|
|
427
506
|
// Create a directory for the repository within the repos directory
|
|
428
507
|
const packageName = basename(repoUrl, '.git');
|
|
429
|
-
|
|
508
|
+
tempDir = join(reposDir, `${packageName}-history`);
|
|
430
509
|
await mkdir(tempDir, { recursive: true });
|
|
431
510
|
|
|
511
|
+
// Register this temp directory for cleanup
|
|
512
|
+
registerTempDir(tempDir);
|
|
513
|
+
|
|
432
514
|
// Clone the repository with optimizations for faster cloning
|
|
433
|
-
console.log(`Cloning ${repoUrl} into ${tempDir} to get commit history...`);
|
|
434
515
|
// Use --quiet to avoid printing credentials in logs
|
|
435
516
|
// Use --depth=1 and --single-branch for faster cloning, then fetch what we need
|
|
436
517
|
try {
|
|
437
|
-
|
|
518
|
+
// 2 minute timeout for very large repositories
|
|
519
|
+
await executeCommand('git', ['clone', '--quiet', '--depth=1', '--single-branch', repoUrl, tempDir], undefined, time_2min);
|
|
438
520
|
} catch (error) {
|
|
439
521
|
// If the repository doesn't exist or can't be accessed, throw a more specific error
|
|
440
522
|
if (error.message.includes("Repository not found") ||
|
|
@@ -447,9 +529,9 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
447
529
|
|
|
448
530
|
// Fetch all tags to ensure we have the version references
|
|
449
531
|
try {
|
|
450
|
-
|
|
532
|
+
// 2 minute timeout for fetching tags from large repositories
|
|
533
|
+
await executeCommand('git', ['fetch', '--tags', '--force', '--unshallow'], tempDir, time_2min);
|
|
451
534
|
} catch (error) {
|
|
452
|
-
console.warn(`Warning: Failed to fetch tags: ${error.message}`);
|
|
453
535
|
// Continue without tags if fetch fails
|
|
454
536
|
}
|
|
455
537
|
|
|
@@ -467,9 +549,8 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
467
549
|
const checkRef = async (ref) => {
|
|
468
550
|
// Make sure we're in the right directory and have fetched everything
|
|
469
551
|
try {
|
|
470
|
-
await executeCommand('git', ['fetch', '--all'], tempDir,
|
|
552
|
+
await executeCommand('git', ['fetch', '--all'], tempDir, time_1min); // 1 minute timeout
|
|
471
553
|
} catch (error) {
|
|
472
|
-
console.warn(`Warning: Failed to fetch all refs: ${error.message}`);
|
|
473
554
|
// Continue without full fetch
|
|
474
555
|
}
|
|
475
556
|
|
|
@@ -490,7 +571,6 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
490
571
|
for (const pattern of uniquePatterns) {
|
|
491
572
|
try {
|
|
492
573
|
const result = await executeCommand('git', ['rev-parse', '--verify', pattern], tempDir);
|
|
493
|
-
console.log(`Found reference ${ref} as ${pattern}`);
|
|
494
574
|
return { ref: pattern, hash: result.trim() };
|
|
495
575
|
} catch (error) {
|
|
496
576
|
// Continue to next pattern
|
|
@@ -505,8 +585,6 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
505
585
|
// Find commit with version bump
|
|
506
586
|
const findVersionCommit = async (version) => {
|
|
507
587
|
try {
|
|
508
|
-
console.log(`Looking for commit that bumps version to ${version}...`);
|
|
509
|
-
|
|
510
588
|
// Look for version in commit messages (common patterns)
|
|
511
589
|
const patterns = [
|
|
512
590
|
`version bump to ${version}`,
|
|
@@ -527,7 +605,6 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
527
605
|
);
|
|
528
606
|
|
|
529
607
|
if (result.trim()) {
|
|
530
|
-
console.log(`Found commit for version ${version} using pattern: ${pattern}`);
|
|
531
608
|
return { ref: version, hash: result.trim() };
|
|
532
609
|
}
|
|
533
610
|
} catch (e) {
|
|
@@ -544,7 +621,6 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
544
621
|
);
|
|
545
622
|
|
|
546
623
|
if (result.trim()) {
|
|
547
|
-
console.log(`Found commit that changes package.json version to ${version}`);
|
|
548
624
|
return { ref: version, hash: result.trim() };
|
|
549
625
|
}
|
|
550
626
|
} catch (e) {
|
|
@@ -553,7 +629,6 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
553
629
|
|
|
554
630
|
return null;
|
|
555
631
|
} catch (error) {
|
|
556
|
-
console.warn(`Error finding version commit: ${error.message}`);
|
|
557
632
|
return null;
|
|
558
633
|
}
|
|
559
634
|
};
|
|
@@ -564,18 +639,15 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
564
639
|
|
|
565
640
|
// If direct references not found, try to find commits with version bumps
|
|
566
641
|
if (!resolvedOldRef) {
|
|
567
|
-
console.log(`Reference ${oldVersion} not found directly, looking for version bump commit...`);
|
|
568
642
|
resolvedOldRef = await findVersionCommit(oldVersion);
|
|
569
643
|
}
|
|
570
644
|
|
|
571
645
|
if (!resolvedNewRef) {
|
|
572
|
-
console.log(`Reference ${newVersion} not found directly, looking for version bump commit...`);
|
|
573
646
|
resolvedNewRef = await findVersionCommit(newVersion);
|
|
574
647
|
}
|
|
575
648
|
|
|
576
649
|
// If still no references found, try to get all tags and find closest matches
|
|
577
650
|
if (!resolvedOldRef || !resolvedNewRef) {
|
|
578
|
-
console.log('Trying to find closest version matches from available tags...');
|
|
579
651
|
try {
|
|
580
652
|
const tagsOutput = await executeCommand('git', ['tag', '-l'], tempDir);
|
|
581
653
|
const availableTags = tagsOutput.split('\n').filter(tag => tag.trim());
|
|
@@ -591,7 +663,6 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
591
663
|
try {
|
|
592
664
|
const result = await executeCommand('git', ['rev-parse', '--verify', oldMatch], tempDir);
|
|
593
665
|
resolvedOldRef = { ref: oldMatch, hash: result.trim() };
|
|
594
|
-
console.log(`Found closest match for ${oldVersion}: ${oldMatch}`);
|
|
595
666
|
} catch (e) {
|
|
596
667
|
// Continue to fallback
|
|
597
668
|
}
|
|
@@ -609,20 +680,18 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
609
680
|
try {
|
|
610
681
|
const result = await executeCommand('git', ['rev-parse', '--verify', newMatch], tempDir);
|
|
611
682
|
resolvedNewRef = { ref: newMatch, hash: result.trim() };
|
|
612
|
-
console.log(`Found closest match for ${newVersion}: ${newMatch}`);
|
|
613
683
|
} catch (e) {
|
|
614
684
|
// Continue to fallback
|
|
615
685
|
}
|
|
616
686
|
}
|
|
617
687
|
}
|
|
618
688
|
} catch (error) {
|
|
619
|
-
|
|
689
|
+
// Continue to fallback
|
|
620
690
|
}
|
|
621
691
|
}
|
|
622
692
|
|
|
623
693
|
// Last resort: if we can't find specific versions, use default branch for newer and first commit for older
|
|
624
694
|
if (!resolvedOldRef && !resolvedNewRef) {
|
|
625
|
-
console.warn(`Warning: Could not find references for both ${oldVersion} and ${newVersion}. Using first and latest commits instead.`);
|
|
626
695
|
try {
|
|
627
696
|
// Get the first commit
|
|
628
697
|
const firstCommit = await executeCommand('git', ['rev-list', '--max-parents=0', 'HEAD'], tempDir);
|
|
@@ -631,40 +700,27 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
631
700
|
// Get the latest commit on default branch
|
|
632
701
|
const latestCommit = await executeCommand('git', ['rev-parse', 'HEAD'], tempDir);
|
|
633
702
|
resolvedNewRef = { ref: 'latest-commit', hash: latestCommit.trim() };
|
|
634
|
-
|
|
635
|
-
console.log(`Using first commit (${resolvedOldRef.hash.substring(0, 7)}) and latest commit (${resolvedNewRef.hash.substring(0, 7)}) as fallback`);
|
|
636
703
|
} catch (error) {
|
|
637
|
-
console.warn(`Warning: Failed to get first and latest commits: ${error.message}`);
|
|
638
704
|
return [];
|
|
639
705
|
}
|
|
640
706
|
} else if (!resolvedOldRef) {
|
|
641
|
-
console.warn(`Warning: Could not find reference for ${oldVersion}. Using first commit instead.`);
|
|
642
707
|
try {
|
|
643
708
|
// Get the first commit
|
|
644
709
|
const firstCommit = await executeCommand('git', ['rev-list', '--max-parents=0', 'HEAD'], tempDir);
|
|
645
710
|
resolvedOldRef = { ref: 'first-commit', hash: firstCommit.trim() };
|
|
646
|
-
console.log(`Using first commit (${resolvedOldRef.hash.substring(0, 7)}) as fallback for ${oldVersion}`);
|
|
647
711
|
} catch (error) {
|
|
648
|
-
console.warn(`Warning: Failed to get first commit: ${error.message}`);
|
|
649
712
|
return [];
|
|
650
713
|
}
|
|
651
714
|
} else if (!resolvedNewRef) {
|
|
652
|
-
console.warn(`Warning: Could not find reference for ${newVersion}. Using latest commit instead.`);
|
|
653
715
|
try {
|
|
654
716
|
// Get the latest commit on default branch
|
|
655
717
|
const latestCommit = await executeCommand('git', ['rev-parse', 'HEAD'], tempDir);
|
|
656
718
|
resolvedNewRef = { ref: 'latest-commit', hash: latestCommit.trim() };
|
|
657
|
-
console.log(`Using latest commit (${resolvedNewRef.hash.substring(0, 7)}) as fallback for ${newVersion}`);
|
|
658
719
|
} catch (error) {
|
|
659
|
-
console.warn(`Warning: Failed to get latest commit: ${error.message}`);
|
|
660
720
|
return [];
|
|
661
721
|
}
|
|
662
722
|
}
|
|
663
723
|
|
|
664
|
-
// Get commit history between versions
|
|
665
|
-
// Format: hash,author,date,message
|
|
666
|
-
console.log(`Getting commits between ${resolvedOldRef.ref} (${resolvedOldRef.hash.substring(0, 7)}) and ${resolvedNewRef.ref} (${resolvedNewRef.hash.substring(0, 7)})...`);
|
|
667
|
-
|
|
668
724
|
// Check if the order is correct (older should come before newer)
|
|
669
725
|
try {
|
|
670
726
|
// Try to determine which commit came first
|
|
@@ -680,13 +736,11 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
680
736
|
|
|
681
737
|
if (mergeBase.trim() === resolvedNewRef.hash.trim()) {
|
|
682
738
|
// Order is reversed, swap them
|
|
683
|
-
console.log('Detected reversed version order, swapping references...');
|
|
684
739
|
const temp = resolvedOldRef;
|
|
685
740
|
resolvedOldRef = resolvedNewRef;
|
|
686
741
|
resolvedNewRef = temp;
|
|
687
742
|
}
|
|
688
743
|
} catch (error) {
|
|
689
|
-
console.warn(`Warning: Could not determine commit order: ${error.message}`);
|
|
690
744
|
// Continue with original order
|
|
691
745
|
}
|
|
692
746
|
|
|
@@ -698,17 +752,14 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
698
752
|
tempDir
|
|
699
753
|
);
|
|
700
754
|
} catch (error) {
|
|
701
|
-
console.warn(`Warning: Failed to get commit log: ${error.message}`);
|
|
702
755
|
// Try with a different approach - get all commits and filter
|
|
703
756
|
try {
|
|
704
|
-
console.log('Trying alternative approach to get commit history...');
|
|
705
757
|
output = await executeCommand(
|
|
706
758
|
'git',
|
|
707
759
|
['log', '--pretty=format:%H,%an,%ad,%s'],
|
|
708
760
|
tempDir
|
|
709
761
|
);
|
|
710
762
|
} catch (e) {
|
|
711
|
-
console.warn(`Warning: Alternative approach also failed: ${e.message}`);
|
|
712
763
|
return [];
|
|
713
764
|
}
|
|
714
765
|
}
|
|
@@ -727,11 +778,22 @@ const getCommitHistory = async (repoUrl, oldVersion, newVersion, reposDir) => {
|
|
|
727
778
|
});
|
|
728
779
|
|
|
729
780
|
// Clean up
|
|
730
|
-
|
|
781
|
+
if (tempDir) {
|
|
782
|
+
await rm(tempDir, { recursive: true, force: true });
|
|
783
|
+
unregisterTempDir(tempDir);
|
|
784
|
+
}
|
|
731
785
|
|
|
732
786
|
return commits;
|
|
733
787
|
} catch (error) {
|
|
734
|
-
|
|
788
|
+
// Clean up on error
|
|
789
|
+
if (tempDir) {
|
|
790
|
+
try {
|
|
791
|
+
await rm(tempDir, { recursive: true, force: true });
|
|
792
|
+
unregisterTempDir(tempDir);
|
|
793
|
+
} catch (cleanupError) {
|
|
794
|
+
// Ignore cleanup errors
|
|
795
|
+
}
|
|
796
|
+
}
|
|
735
797
|
return [];
|
|
736
798
|
}
|
|
737
799
|
};
|
|
@@ -752,12 +814,9 @@ const getPackageLockChanges = async (olderVersionDir, newerVersionDir) => {
|
|
|
752
814
|
await readFile(oldLockPath);
|
|
753
815
|
await readFile(newLockPath);
|
|
754
816
|
} catch (error) {
|
|
755
|
-
console.log('package-lock.json not found in one or both versions, skipping lock file analysis');
|
|
756
817
|
return { changedPackages: [], packageVersions: {} };
|
|
757
818
|
}
|
|
758
819
|
|
|
759
|
-
console.log('Found package-lock.json in both versions, analyzing changes...');
|
|
760
|
-
|
|
761
820
|
// Read and parse both lock files
|
|
762
821
|
const oldLock = JSON.parse(await readFile(oldLockPath, 'utf8'));
|
|
763
822
|
const newLock = JSON.parse(await readFile(newLockPath, 'utf8'));
|
|
@@ -804,14 +863,12 @@ const getPackageLockChanges = async (olderVersionDir, newerVersionDir) => {
|
|
|
804
863
|
}
|
|
805
864
|
}
|
|
806
865
|
|
|
807
|
-
console.log(`Found ${changedPackages.size} packages with changes in package-lock.json`);
|
|
808
866
|
return {
|
|
809
867
|
changedPackages: Array.from(changedPackages),
|
|
810
868
|
packageVersions
|
|
811
869
|
};
|
|
812
870
|
|
|
813
871
|
} catch (error) {
|
|
814
|
-
console.warn(`Warning: Failed to analyze package-lock.json changes: ${error.message}`);
|
|
815
872
|
return { changedPackages: [], packageVersions: {} };
|
|
816
873
|
}
|
|
817
874
|
};
|
|
@@ -871,107 +928,220 @@ const extractFromDependencies = (dependencies, packages) => {
|
|
|
871
928
|
};
|
|
872
929
|
|
|
873
930
|
/**
|
|
874
|
-
*
|
|
875
|
-
* @param {
|
|
931
|
+
* Process a single dependency for changelog and CI status with progress updates
|
|
932
|
+
* @param {Object} dep - Dependency object
|
|
876
933
|
* @param {string} newerVersionDir - Directory of the newer version
|
|
877
934
|
* @param {string} reposDir - Repository directory
|
|
878
|
-
* @
|
|
935
|
+
* @param {Object} multibar - CLI multi progress bar instance
|
|
936
|
+
* @param {number} maxNameLength - Maximum name length for consistent padding
|
|
937
|
+
* @returns {Promise<Object>} - Object with changelog, error, and CI status
|
|
879
938
|
*/
|
|
880
|
-
const
|
|
939
|
+
const processSingleDependency = async (dep, newerVersionDir, reposDir, multibar, maxNameLength) => {
|
|
940
|
+
const result = {
|
|
941
|
+
name: dep.name,
|
|
942
|
+
changelog: null,
|
|
943
|
+
error: null,
|
|
944
|
+
ciStatus: null
|
|
945
|
+
};
|
|
946
|
+
|
|
947
|
+
// Format the name with consistent padding, truncating if necessary
|
|
948
|
+
let displayName = dep.name;
|
|
949
|
+
if (displayName.length > maxNameLength) {
|
|
950
|
+
displayName = displayName.substring(0, maxNameLength - 3) + '...';
|
|
951
|
+
}
|
|
952
|
+
displayName = displayName.padEnd(maxNameLength);
|
|
953
|
+
|
|
954
|
+
// Create individual progress bar for this dependency
|
|
955
|
+
const depBar = multibar.create(100, 0, { name: displayName, status: 'Starting...' });
|
|
956
|
+
|
|
957
|
+
depBar.update(10, { status: 'Getting repo URL...' });
|
|
958
|
+
const packageDir = join(newerVersionDir, 'node_modules', dep.name);
|
|
959
|
+
const repoUrl = await getRepositoryUrl(packageDir);
|
|
960
|
+
|
|
961
|
+
if (!repoUrl) {
|
|
962
|
+
depBar.update(100, { status: 'โ No repo URL' });
|
|
963
|
+
result.error = {
|
|
964
|
+
repoUrl: null,
|
|
965
|
+
oldVersion: dep.oldVersion,
|
|
966
|
+
newVersion: dep.newVersion,
|
|
967
|
+
error: "No repository URL found"
|
|
968
|
+
};
|
|
969
|
+
return result;
|
|
970
|
+
}
|
|
971
|
+
|
|
972
|
+
depBar.update(20, { status: 'Cleaning repo URL...' });
|
|
973
|
+
|
|
974
|
+
// Clean the repository URL and convert to git URL for authentication
|
|
975
|
+
let cleanRepoUrl = repoUrl.replace(/^git\+/, '');
|
|
976
|
+
|
|
977
|
+
// Remove .git extension if present (we'll add it back later if needed)
|
|
978
|
+
cleanRepoUrl = cleanRepoUrl.replace(/\.git$/, '');
|
|
979
|
+
|
|
980
|
+
// Handle GitHub shorthand (github:user/repo)
|
|
981
|
+
if (cleanRepoUrl.match(/^(github|gitlab|bitbucket):/)) {
|
|
982
|
+
cleanRepoUrl = `git@github.com:${cleanRepoUrl.split(':')[1]}`;
|
|
983
|
+
}
|
|
984
|
+
// Convert https GitHub URLs to git URLs
|
|
985
|
+
else if (cleanRepoUrl.match(/^https?:\/\/github\.com\//)) {
|
|
986
|
+
cleanRepoUrl = `git@github.com:${cleanRepoUrl.replace(/^https?:\/\/github\.com\//, '')}`;
|
|
987
|
+
}
|
|
988
|
+
// Handle git:// protocol URLs
|
|
989
|
+
else if (cleanRepoUrl.match(/^git:\/\/github\.com\//)) {
|
|
990
|
+
cleanRepoUrl = `git@github.com:${cleanRepoUrl.replace(/^git:\/\/github\.com\//, '')}`;
|
|
991
|
+
}
|
|
992
|
+
// Ensure URL is in the correct format for GitHub
|
|
993
|
+
else if (!cleanRepoUrl.match(/^git@github\.com:/)) {
|
|
994
|
+
// If it's not already in the git@github.com format, try to convert it
|
|
995
|
+
const parts = cleanRepoUrl.split('/');
|
|
996
|
+
const repoName = parts.pop();
|
|
997
|
+
const orgName = parts.pop();
|
|
998
|
+
if (orgName && repoName) {
|
|
999
|
+
cleanRepoUrl = `git@github.com:${orgName}/${repoName}`;
|
|
1000
|
+
}
|
|
1001
|
+
}
|
|
1002
|
+
|
|
1003
|
+
// Add .git extension if not present
|
|
1004
|
+
if (!cleanRepoUrl.endsWith('.git')) {
|
|
1005
|
+
cleanRepoUrl += '.git';
|
|
1006
|
+
}
|
|
1007
|
+
|
|
1008
|
+
depBar.update(30, { status: 'Getting commits...' });
|
|
1009
|
+
|
|
1010
|
+
let commits = [];
|
|
1011
|
+
try {
|
|
1012
|
+
commits = await getCommitHistory(cleanRepoUrl, dep.oldVersion, dep.newVersion, reposDir);
|
|
1013
|
+
if (commits.length > 0) {
|
|
1014
|
+
depBar.update(70, { status: `Found ${commits.length} commits` });
|
|
1015
|
+
result.changelog = {
|
|
1016
|
+
repoUrl: cleanRepoUrl,
|
|
1017
|
+
oldVersion: dep.oldVersion,
|
|
1018
|
+
newVersion: dep.newVersion,
|
|
1019
|
+
commits
|
|
1020
|
+
};
|
|
1021
|
+
} else {
|
|
1022
|
+
depBar.update(70, { status: 'โ ๏ธ No commits found' });
|
|
1023
|
+
result.error = {
|
|
1024
|
+
repoUrl: cleanRepoUrl,
|
|
1025
|
+
oldVersion: dep.oldVersion,
|
|
1026
|
+
newVersion: dep.newVersion,
|
|
1027
|
+
error: "No commits found between versions"
|
|
1028
|
+
};
|
|
1029
|
+
}
|
|
1030
|
+
} catch (error) {
|
|
1031
|
+
depBar.update(70, { status: 'โ Commit error' });
|
|
1032
|
+
result.error = {
|
|
1033
|
+
repoUrl: cleanRepoUrl,
|
|
1034
|
+
oldVersion: dep.oldVersion,
|
|
1035
|
+
newVersion: dep.newVersion,
|
|
1036
|
+
error: error.message
|
|
1037
|
+
};
|
|
1038
|
+
}
|
|
1039
|
+
|
|
1040
|
+
// Get GitHub Actions status for the new version
|
|
1041
|
+
depBar.update(80, { status: 'Getting CI status...' });
|
|
1042
|
+
try {
|
|
1043
|
+
const best = commits.length > 0 ? commits[0].hash : null;
|
|
1044
|
+
const actionsStatus = await getGitHubActionsStatus(cleanRepoUrl, dep.newVersion, best);
|
|
1045
|
+
if (actionsStatus) {
|
|
1046
|
+
result.ciStatus = actionsStatus;
|
|
1047
|
+
depBar.update(100, { status: `โ
Complete (CI: ${actionsStatus.status})` });
|
|
1048
|
+
} else {
|
|
1049
|
+
depBar.update(100, { status: 'โ
Complete (no CI)' });
|
|
1050
|
+
}
|
|
1051
|
+
} catch (error) {
|
|
1052
|
+
// Silently ignore CI status errors
|
|
1053
|
+
depBar.update(100, { status: 'โ
Complete (CI error)' });
|
|
1054
|
+
}
|
|
1055
|
+
|
|
1056
|
+
return result;
|
|
1057
|
+
};
|
|
1058
|
+
|
|
1059
|
+
/**
|
|
1060
|
+
* Process dependencies in parallel with concurrency limit using a proper queue
|
|
1061
|
+
* @param {Array} dependencies - Array of dependencies to process
|
|
1062
|
+
* @param {string} newerVersionDir - Directory of the newer version
|
|
1063
|
+
* @param {string} reposDir - Repository directory
|
|
1064
|
+
* @param {number} concurrency - Maximum number of concurrent operations
|
|
1065
|
+
* @returns {Promise<Object>} - Object with changelogs, errors, and CI status
|
|
1066
|
+
*/
|
|
1067
|
+
const processInParallel = async (dependencies, newerVersionDir, reposDir, concurrency = 5) => {
|
|
881
1068
|
const changelogs = {};
|
|
882
1069
|
const errors = {};
|
|
883
1070
|
const ciStatus = {};
|
|
884
1071
|
|
|
885
|
-
|
|
886
|
-
|
|
887
|
-
|
|
888
|
-
|
|
889
|
-
|
|
890
|
-
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
|
|
894
|
-
|
|
895
|
-
|
|
896
|
-
|
|
897
|
-
|
|
898
|
-
|
|
899
|
-
|
|
900
|
-
|
|
901
|
-
|
|
902
|
-
|
|
903
|
-
|
|
904
|
-
|
|
905
|
-
|
|
906
|
-
|
|
907
|
-
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
|
|
911
|
-
|
|
912
|
-
|
|
913
|
-
|
|
914
|
-
|
|
915
|
-
|
|
916
|
-
|
|
917
|
-
|
|
918
|
-
|
|
919
|
-
|
|
920
|
-
|
|
921
|
-
|
|
922
|
-
|
|
923
|
-
|
|
924
|
-
|
|
925
|
-
|
|
926
|
-
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
|
|
930
|
-
|
|
931
|
-
repoUrl: cleanRepoUrl,
|
|
932
|
-
oldVersion: dep.oldVersion,
|
|
933
|
-
newVersion: dep.newVersion,
|
|
934
|
-
commits
|
|
935
|
-
};
|
|
936
|
-
} else {
|
|
937
|
-
console.warn(`No commits found between ${dep.oldVersion} and ${dep.newVersion} for ${dep.name}`);
|
|
938
|
-
errors[dep.name] = {
|
|
939
|
-
repoUrl: cleanRepoUrl,
|
|
940
|
-
oldVersion: dep.oldVersion,
|
|
941
|
-
newVersion: dep.newVersion,
|
|
942
|
-
error: "No commits found between versions"
|
|
943
|
-
};
|
|
944
|
-
}
|
|
945
|
-
} catch (error) {
|
|
946
|
-
console.warn(`Error getting changelog for ${dep.name}: ${error.message}`);
|
|
947
|
-
errors[dep.name] = {
|
|
948
|
-
repoUrl: cleanRepoUrl,
|
|
949
|
-
oldVersion: dep.oldVersion,
|
|
950
|
-
newVersion: dep.newVersion,
|
|
951
|
-
error: error.message
|
|
952
|
-
};
|
|
953
|
-
}
|
|
954
|
-
|
|
955
|
-
// Get GitHub Actions status for the new version
|
|
956
|
-
try {
|
|
957
|
-
const best = commits[0].hash
|
|
958
|
-
console.log(`Getting GitHub Actions status for ${dep.name} at version ${dep.newVersion} ${best} from ${cleanRepoUrl}`);
|
|
959
|
-
const actionsStatus = await getGitHubActionsStatus(cleanRepoUrl, dep.newVersion, best);
|
|
960
|
-
if (actionsStatus) {
|
|
961
|
-
console.log(`โ Got CI status for ${dep.name}: ${actionsStatus.status}`);
|
|
962
|
-
ciStatus[dep.name] = actionsStatus;
|
|
963
|
-
} else {
|
|
964
|
-
console.log(`โ No CI status returned for ${dep.name}`);
|
|
965
|
-
}
|
|
966
|
-
} catch (error) {
|
|
967
|
-
console.warn(`Error getting GitHub Actions status for ${dep.name}: ${error.message}`);
|
|
968
|
-
}
|
|
1072
|
+
if (dependencies.length === 0) {
|
|
1073
|
+
return { changelogs, errors, ciStatus };
|
|
1074
|
+
}
|
|
1075
|
+
|
|
1076
|
+
// Calculate the maximum name length for consistent padding
|
|
1077
|
+
const maxNameLength = Math.min(
|
|
1078
|
+
Math.max(...dependencies.map(dep => dep.name.length)),
|
|
1079
|
+
40 // Reasonable maximum to prevent extremely long lines
|
|
1080
|
+
);
|
|
1081
|
+
|
|
1082
|
+
// Create multi progress bar
|
|
1083
|
+
const multibar = new cliProgress.MultiBar({
|
|
1084
|
+
clearOnComplete: false,
|
|
1085
|
+
hideCursor: true,
|
|
1086
|
+
format: '{name} |{bar}| {percentage}% | {status}'
|
|
1087
|
+
}, {
|
|
1088
|
+
barCompleteChar: '\u2588',
|
|
1089
|
+
barIncompleteChar: '\u2591'
|
|
1090
|
+
});
|
|
1091
|
+
|
|
1092
|
+
// Register multibar for cleanup
|
|
1093
|
+
globalCleanupState.multibar = multibar;
|
|
1094
|
+
|
|
1095
|
+
console.log(`\nProcessing ${dependencies.length} dependencies with concurrency limit of ${concurrency}:\n`);
|
|
1096
|
+
|
|
1097
|
+
// Create queue with concurrency limit
|
|
1098
|
+
const queue = new PQueue({ concurrency });
|
|
1099
|
+
|
|
1100
|
+
// Add all dependencies to the queue
|
|
1101
|
+
const promises = dependencies.map(dep =>
|
|
1102
|
+
queue.add(() => processSingleDependency(dep, newerVersionDir, reposDir, multibar, maxNameLength))
|
|
1103
|
+
);
|
|
1104
|
+
|
|
1105
|
+
// Wait for all tasks to complete
|
|
1106
|
+
const results = await Promise.all(promises);
|
|
1107
|
+
|
|
1108
|
+
// Collect results
|
|
1109
|
+
for (const result of results) {
|
|
1110
|
+
if (result.changelog) {
|
|
1111
|
+
changelogs[result.name] = result.changelog;
|
|
1112
|
+
}
|
|
1113
|
+
if (result.error) {
|
|
1114
|
+
errors[result.name] = result.error;
|
|
1115
|
+
}
|
|
1116
|
+
if (result.ciStatus) {
|
|
1117
|
+
ciStatus[result.name] = result.ciStatus;
|
|
969
1118
|
}
|
|
970
1119
|
}
|
|
971
1120
|
|
|
1121
|
+
multibar.stop();
|
|
1122
|
+
|
|
1123
|
+
// Unregister multibar from cleanup
|
|
1124
|
+
globalCleanupState.multibar = null;
|
|
1125
|
+
|
|
1126
|
+
// Ensure cursor is visible after progress bars
|
|
1127
|
+
process.stdout.write('\x1b[?25h'); // Show cursor
|
|
1128
|
+
|
|
1129
|
+
console.log(`\nโ
Completed processing ${dependencies.length} dependencies\n`);
|
|
1130
|
+
|
|
972
1131
|
return { changelogs, errors, ciStatus };
|
|
973
1132
|
};
|
|
974
1133
|
|
|
1134
|
+
/**
|
|
1135
|
+
* Get changelog and CI status for upgraded dependencies
|
|
1136
|
+
* @param {Array} upgradedDeps - Array of upgraded dependencies
|
|
1137
|
+
* @param {string} newerVersionDir - Directory of the newer version
|
|
1138
|
+
* @param {string} reposDir - Repository directory
|
|
1139
|
+
* @returns {Promise<Object>} - Object mapping package names to changelogs and CI status
|
|
1140
|
+
*/
|
|
1141
|
+
const getChangelogs = async (upgradedDeps, newerVersionDir, reposDir) => {
|
|
1142
|
+
return await processInParallel(upgradedDeps, newerVersionDir, reposDir, 5);
|
|
1143
|
+
};
|
|
1144
|
+
|
|
975
1145
|
/**
|
|
976
1146
|
* Compare dependencies between two versions
|
|
977
1147
|
* @param {Object} oldDeps - Old dependencies
|
|
@@ -1022,7 +1192,7 @@ const compareDependencies = (oldDeps, newDeps) => {
|
|
|
1022
1192
|
}
|
|
1023
1193
|
}
|
|
1024
1194
|
} catch (error) {
|
|
1025
|
-
|
|
1195
|
+
// Silently continue with unknown change type
|
|
1026
1196
|
}
|
|
1027
1197
|
|
|
1028
1198
|
upgraded.push({
|
|
@@ -1117,7 +1287,7 @@ const compareDependencies = (oldDeps, newDeps) => {
|
|
|
1117
1287
|
}
|
|
1118
1288
|
}
|
|
1119
1289
|
} catch (error) {
|
|
1120
|
-
|
|
1290
|
+
// Silently continue with unknown change type
|
|
1121
1291
|
}
|
|
1122
1292
|
|
|
1123
1293
|
nestedUpgraded.push({
|
|
@@ -1174,6 +1344,9 @@ const compareDependencies = (oldDeps, newDeps) => {
|
|
|
1174
1344
|
* @returns {Promise<Object>} - Analysis report
|
|
1175
1345
|
*/
|
|
1176
1346
|
const analyzeDependencyChanges = async (repoUrl, olderVersion, newerVersion, workingDir = process.cwd(), namespace = null) => {
|
|
1347
|
+
// Setup signal handlers for graceful shutdown
|
|
1348
|
+
setupSignalHandlers();
|
|
1349
|
+
|
|
1177
1350
|
// Extract project name from repo URL
|
|
1178
1351
|
const projectName = basename(repoUrl, '.git');
|
|
1179
1352
|
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
|
@@ -1186,6 +1359,9 @@ const analyzeDependencyChanges = async (repoUrl, olderVersion, newerVersion, wor
|
|
|
1186
1359
|
// Create the repos directory
|
|
1187
1360
|
await mkdir(reposDir, { recursive: true });
|
|
1188
1361
|
|
|
1362
|
+
// Register temp directory for cleanup
|
|
1363
|
+
registerTempDir(reposDir);
|
|
1364
|
+
|
|
1189
1365
|
// Clone both versions
|
|
1190
1366
|
await cloneRepo(repoUrl, olderVersion, olderVersionDir);
|
|
1191
1367
|
await cloneRepo(repoUrl, newerVersion, newerVersionDir);
|
|
@@ -1232,7 +1408,7 @@ const analyzeDependencyChanges = async (repoUrl, olderVersion, newerVersion, wor
|
|
|
1232
1408
|
}
|
|
1233
1409
|
}
|
|
1234
1410
|
} catch (e) {
|
|
1235
|
-
|
|
1411
|
+
// Silently skip packages we can't read
|
|
1236
1412
|
}
|
|
1237
1413
|
|
|
1238
1414
|
if (repoUrl) {
|
|
@@ -1270,44 +1446,48 @@ const analyzeDependencyChanges = async (repoUrl, olderVersion, newerVersion, wor
|
|
|
1270
1446
|
// Also add to the comparison.upgraded array so it appears in the report
|
|
1271
1447
|
comparison.upgraded.push(lockFileDep);
|
|
1272
1448
|
|
|
1273
|
-
|
|
1449
|
+
// Added package from lock file analysis
|
|
1274
1450
|
}
|
|
1275
1451
|
} catch (error) {
|
|
1276
|
-
|
|
1452
|
+
// Silently skip packages we can't get info for
|
|
1277
1453
|
}
|
|
1278
1454
|
}
|
|
1279
1455
|
}
|
|
1280
1456
|
}
|
|
1281
1457
|
|
|
1282
1458
|
// Get changelogs for upgraded dependencies
|
|
1283
|
-
console.log(
|
|
1459
|
+
console.log(`\nGenerating changelogs for ${allChangedPackages.length} dependencies...`);
|
|
1284
1460
|
const { changelogs, errors, ciStatus } = await getChangelogs(allChangedPackages, newerVersionDir, reposDir);
|
|
1285
1461
|
|
|
1286
1462
|
// Get changelogs for modified dependencies (namespace changes)
|
|
1287
|
-
|
|
1288
|
-
|
|
1289
|
-
|
|
1290
|
-
|
|
1291
|
-
|
|
1292
|
-
|
|
1293
|
-
|
|
1294
|
-
|
|
1295
|
-
|
|
1296
|
-
|
|
1297
|
-
|
|
1298
|
-
|
|
1299
|
-
|
|
1300
|
-
|
|
1463
|
+
if (comparison.modified.length > 0) {
|
|
1464
|
+
console.log(`Generating changelogs for ${comparison.modified.length} modified dependencies...`);
|
|
1465
|
+
const modifiedDepsForChangelog = comparison.modified.map(dep => ({
|
|
1466
|
+
name: dep.newName,
|
|
1467
|
+
oldVersion: dep.oldVersion,
|
|
1468
|
+
newVersion: dep.newVersion,
|
|
1469
|
+
changeType: 'namespace'
|
|
1470
|
+
}));
|
|
1471
|
+
const { changelogs: modifiedChangelogs, errors: modifiedErrors, ciStatus: modifiedCiStatus } =
|
|
1472
|
+
await getChangelogs(modifiedDepsForChangelog, newerVersionDir, reposDir);
|
|
1473
|
+
|
|
1474
|
+
// Merge changelogs, errors, and CI status
|
|
1475
|
+
Object.assign(changelogs, modifiedChangelogs);
|
|
1476
|
+
Object.assign(errors, modifiedErrors);
|
|
1477
|
+
Object.assign(ciStatus, modifiedCiStatus);
|
|
1478
|
+
}
|
|
1301
1479
|
|
|
1302
1480
|
// Get changelogs for nested upgraded dependencies
|
|
1303
|
-
|
|
1304
|
-
|
|
1305
|
-
|
|
1306
|
-
|
|
1307
|
-
|
|
1308
|
-
|
|
1309
|
-
|
|
1310
|
-
|
|
1481
|
+
if (comparison.nested.upgraded.length > 0) {
|
|
1482
|
+
console.log(`Generating changelogs for ${comparison.nested.upgraded.length} nested upgraded dependencies...`);
|
|
1483
|
+
const { changelogs: nestedChangelogs, errors: nestedErrors, ciStatus: nestedCiStatus } =
|
|
1484
|
+
await getChangelogs(comparison.nested.upgraded, newerVersionDir, reposDir);
|
|
1485
|
+
|
|
1486
|
+
// Merge nested changelogs, errors, and CI status
|
|
1487
|
+
Object.assign(changelogs, nestedChangelogs);
|
|
1488
|
+
Object.assign(errors, nestedErrors);
|
|
1489
|
+
Object.assign(ciStatus, nestedCiStatus);
|
|
1490
|
+
}
|
|
1311
1491
|
|
|
1312
1492
|
// Write report to file
|
|
1313
1493
|
const reportPath = join(reposDir, 'report.json');
|
|
@@ -1330,10 +1510,25 @@ const analyzeDependencyChanges = async (repoUrl, olderVersion, newerVersion, wor
|
|
|
1330
1510
|
// Add reportPath after writing the file
|
|
1331
1511
|
report.reportPath = reportPath;
|
|
1332
1512
|
|
|
1333
|
-
console.log(
|
|
1513
|
+
console.log(`\n๐ Report generated: ${reportPath}`);
|
|
1514
|
+
|
|
1515
|
+
// Don't auto-cleanup on success - user might want to examine the files
|
|
1516
|
+
// But unregister from emergency cleanup since we completed successfully
|
|
1517
|
+
unregisterTempDir(reposDir);
|
|
1518
|
+
|
|
1334
1519
|
return report;
|
|
1335
1520
|
} catch (error) {
|
|
1336
|
-
console.error(
|
|
1521
|
+
console.error(`\nโ Error analyzing dependency changes: ${error.message}`);
|
|
1522
|
+
|
|
1523
|
+
// Clean up on error
|
|
1524
|
+
try {
|
|
1525
|
+
await rm(reposDir, { recursive: true, force: true });
|
|
1526
|
+
unregisterTempDir(reposDir);
|
|
1527
|
+
console.log(`๐๏ธ Cleaned up temporary directory: ${reposDir}`);
|
|
1528
|
+
} catch (cleanupError) {
|
|
1529
|
+
console.warn(`โ ๏ธ Failed to clean up ${reposDir}: ${cleanupError.message}`);
|
|
1530
|
+
}
|
|
1531
|
+
|
|
1337
1532
|
throw error;
|
|
1338
1533
|
}
|
|
1339
1534
|
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "dependency-change-report",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.5",
|
|
4
4
|
"main": "index.mjs",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"bin": {
|
|
@@ -14,7 +14,9 @@
|
|
|
14
14
|
"keywords": [],
|
|
15
15
|
"description": "Generate a dependency change report between different versions of a project",
|
|
16
16
|
"dependencies": {
|
|
17
|
+
"cli-progress": "^3.12.0",
|
|
17
18
|
"execa": "^9.6.0",
|
|
18
|
-
"semver": "^7.7.1"
|
|
19
|
+
"semver": "^7.7.1",
|
|
20
|
+
"p-queue": "^8.0.1"
|
|
19
21
|
}
|
|
20
22
|
}
|