depopsy 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,298 @@
1
+ // Packages that are well-known low-level utilities — never actionable as root causes
2
+ const LEAF_BLACKLIST = new Set([
3
+ // Terminal styling
4
+ 'chalk', 'ansi-styles', 'ansi-regex', 'strip-ansi', 'wrap-ansi', 'string-width',
5
+ 'color-convert', 'color-name', 'supports-color', 'has-flag', 'kleur', 'picocolors',
6
+ // Character / string utilities
7
+ 'is-fullwidth-code-point', 'emoji-regex', 'escape-string-regexp', 'string-length',
8
+ 'widest-line', 'wrap-ansi-cjs',
9
+ // Core Node.js polyfills / util
10
+ 'inherits', 'safe-buffer', 'once', 'wrappy', 'inflight', 'ee-first', 'depd',
11
+ 'ms', 'debug', 'signal-exit',
12
+ // Glob / file matching
13
+ 'minimatch', 'glob', 'brace-expansion', 'balanced-match', 'concat-map',
14
+ 'path-is-absolute', 'path-type', 'picomatch', 'micromatch', 'fast-glob',
15
+ 'glob-parent', 'is-glob', 'is-extglob', 'fill-range', 'to-regex-range',
16
+ // Version / semver
17
+ 'semver', 'semver-compare', 'compare-versions',
18
+ // MIME
19
+ 'mime', 'mime-types', 'mime-db',
20
+ // Collection / iteration utilities
21
+ 'lru-cache', 'yallist', 'minipass',
22
+ // Argument parsing
23
+ 'camelcase', 'decamelize', 'minimist', 'yargs-parser',
24
+ // Config / rc
25
+ 'ini', 'js-ini', 'strip-json-comments',
26
+ // Process
27
+ 'cross-spawn', 'execa', 'which', 'shebang-command', 'shebang-regex', 'isexe',
28
+ // Misc
29
+ 'p-limit', 'p-locate', 'locate-path', 'path-exists', 'yocto-queue',
30
+ 'queue-microtask', 'run-parallel', 'reusify', 'fastq',
31
+ ]);
32
+
33
+ // Regex-based matcher for families not enumerated above
34
+ function isLeafPackage(name) {
35
+ if (LEAF_BLACKLIST.has(name)) return true;
36
+ if (/^(ansi|color|strip|wrap|supports|is-|has-|get-|set-)/.test(name)) return true;
37
+ if (/(-regex|-styles|-utils|-compat|-ify|-cjs)$/.test(name)) return true;
38
+ return false;
39
+ }
40
+
41
+ // Same predicate exported for the formatter
42
+ export function isLowLevelPackage(name) {
43
+ if (!name || name.startsWith('⚠️')) return true;
44
+ const bare = name.startsWith('@') ? name.split('/').slice(1).join('/') : name;
45
+ if (isLeafPackage(bare)) return true;
46
+ return /(ansi|regex|string|width|wrap|strip|minimatch|semver|color|glob)/i.test(bare);
47
+ }
48
+
49
+ /**
50
+ * Extract the bare package name from a "name@version" string.
51
+ * Handles scoped packages like "@babel/core@7.0.0" correctly.
52
+ */
53
+ function pkgName(nameAtVersion) {
54
+ if (!nameAtVersion) return '';
55
+ if (nameAtVersion.startsWith('@')) {
56
+ const second = nameAtVersion.indexOf('@', 1);
57
+ return second > 0 ? nameAtVersion.substring(0, second) : nameAtVersion;
58
+ }
59
+ const at = nameAtVersion.indexOf('@');
60
+ return at > 0 ? nameAtVersion.substring(0, at) : nameAtVersion;
61
+ }
62
+
63
+ /**
64
+ * SOURCE PRIORITY — The single source of truth for which packages to
65
+ * attribute a duplicate to.
66
+ *
67
+ * Priority (highest → lowest):
68
+ * 1. roots[] — pre-computed top-level introducers (pnpm ownership DFS
69
+ * or npm graph traversal root nodes). Always the preferred
70
+ * attribution because they represent INTENTIONAL dependencies.
71
+ * 2. parents[] — immediate dependents. Closer to the signal than the full
72
+ * ancestor chain, and avoids "ancestor soup" dilution.
73
+ * 3. ancestors — full flattened chain. Last resort; very noisy because it
74
+ * includes every transitive dependency, making low-level
75
+ * packages appear highly frequent.
76
+ *
77
+ * NEVER use ancestors first — on large graphs (Next.js) the ancestor list
78
+ * is hundreds of entries long and low-level packages like `semver` appear
79
+ * in almost every chain, making them look like root causes.
80
+ */
81
+ function getSources(detail) {
82
+ if (detail.roots && detail.roots.length > 0) return detail.roots;
83
+ if (detail.parents && detail.parents.length > 0) return detail.parents;
84
+ // Canonical ancestors field (detector maps inst.allParents → ancestors)
85
+ const anc = detail.ancestors || detail.allParents || [];
86
+ return anc;
87
+ }
88
+
89
+ function computeFixLikelihood(safeties) {
90
+ const safeCount = safeties.filter(s => s === 'SAFE').length;
91
+ const totalCount = safeties.length;
92
+ if (totalCount === 0) return 'LOW';
93
+ if (safeCount === totalCount) return 'HIGH';
94
+ if (safeCount / totalCount >= 0.5) return 'MEDIUM';
95
+ return 'LOW';
96
+ }
97
+
98
+ /**
99
+ * Compute confidence based on per-package version spread.
100
+ *
101
+ * Rather than pooling ALL version strings together (which always yields
102
+ * 3+ majors for large groups like lerna→164 packages), we score each
103
+ * affected package individually:
104
+ * - singleMajor: the package's duplicate versions share 1 major → easy fix
105
+ * - twoMajors: 2 distinct majors → likely fixable
106
+ * - manyMajors: 3+ distinct majors → hard / risky
107
+ *
108
+ * Confidence is then determined by what fraction of affected packages
109
+ * fall into each bucket:
110
+ * HIGH → >60% are singleMajor
111
+ * MEDIUM → >30% are singleMajor (or >50% are singleMajor+twoMajors)
112
+ * LOW → everything else
113
+ */
114
+ function computeConfidence(group, scoredDuplicates) {
115
+ if (!group.affectedPackages || group.affectedPackages.length === 0) {
116
+ return computeFixLikelihood(group.safeties || []);
117
+ }
118
+
119
+ let singleMajor = 0;
120
+ let twoMajors = 0;
121
+ let manyMajors = 0;
122
+ let noData = 0;
123
+
124
+ for (const pkgName of group.affectedPackages) {
125
+ const dup = scoredDuplicates.find(d => d.name === pkgName);
126
+ if (!dup || !dup.versions || dup.versions.length === 0) {
127
+ noData++;
128
+ continue;
129
+ }
130
+ const majors = new Set(
131
+ dup.versions
132
+ .map(v => (v || '').split('.')[0].replace(/[^0-9]/g, ''))
133
+ .filter(Boolean)
134
+ );
135
+ if (majors.size <= 1) singleMajor++;
136
+ else if (majors.size <= 2) twoMajors++;
137
+ else manyMajors++;
138
+ }
139
+
140
+ const total = singleMajor + twoMajors + manyMajors;
141
+ if (total === 0) return computeFixLikelihood(group.safeties || []);
142
+
143
+ const pctSingle = singleMajor / total;
144
+ const pctEasy = (singleMajor + twoMajors) / total;
145
+
146
+ if (pctSingle > 0.6) return 'HIGH';
147
+ if (pctSingle > 0.3 || pctEasy > 0.5) return 'MEDIUM';
148
+ return 'LOW';
149
+ }
150
+
151
+ /**
152
+ * Build an introducer attribution map.
153
+ * filterFn(pkgName) → true means this name is an acceptable introducer.
154
+ * Each duplicate is counted once per introducer (deduped via seenForDup).
155
+ */
156
+ function buildIntroducerMap(scoredDuplicates, filterFn) {
157
+ const introducerMap = {};
158
+
159
+ for (const dup of scoredDuplicates) {
160
+ const seenForDup = new Set();
161
+
162
+ for (const detail of dup.details) {
163
+ const sources = getSources(detail);
164
+
165
+ for (const p of sources) {
166
+ const name = pkgName(p);
167
+ if (!name || !filterFn(name)) continue;
168
+ if (seenForDup.has(name)) continue;
169
+ seenForDup.add(name);
170
+
171
+ if (!introducerMap[name]) {
172
+ introducerMap[name] = { name, affectedPackages: [], count: 0, safeties: [] };
173
+ }
174
+ introducerMap[name].count += dup.totalInstances;
175
+ if (!introducerMap[name].affectedPackages.includes(dup.name)) {
176
+ introducerMap[name].affectedPackages.push(dup.name);
177
+ }
178
+ introducerMap[name].safeties.push(dup.safety);
179
+ }
180
+ }
181
+ }
182
+
183
+ return introducerMap;
184
+ }
185
+
186
+ function formatGroups(map, limit = 8, scoredDuplicates = []) {
187
+ const groups = Object.values(map);
188
+
189
+ // Degradation guard: silently continue — each root maps to 1 package is a valid
190
+ // (though non-ideal) state that the formatter handles with the low-level fallback.
191
+
192
+ return groups
193
+ .sort((a, b) =>
194
+ b.affectedPackages.length - a.affectedPackages.length ||
195
+ b.count - a.count
196
+ )
197
+ .slice(0, limit)
198
+ .map(g => {
199
+ const fixLikelihood = computeFixLikelihood(g.safeties);
200
+ const confidence = computeConfidence(g, scoredDuplicates);
201
+ return {
202
+ name: g.name,
203
+ affectedPackages: g.affectedPackages,
204
+ count: g.count,
205
+ fixLikelihood,
206
+ confidence
207
+ };
208
+ });
209
+ }
210
+
211
+ /**
212
+ * Build a coverage map: pkgName → Set<dupName>
213
+ * Tells us how many distinct duplicate packages each candidate name covers.
214
+ * Uses getSources() priority so roots are always preferred over ancestors.
215
+ */
216
+ function buildCoverageMap(scoredDuplicates) {
217
+ const coverageMap = {};
218
+
219
+ for (const dup of scoredDuplicates) {
220
+ const seenNames = new Set();
221
+
222
+ for (const detail of dup.details) {
223
+ const sources = getSources(detail);
224
+
225
+ for (const p of sources) {
226
+ const name = pkgName(p);
227
+ if (!name) continue;
228
+ if (seenNames.has(name)) continue;
229
+
230
+ seenNames.add(name);
231
+
232
+ // soft penalty for low-level packages instead of removing them
233
+ const penalty = isLeafPackage(name) ? 0.3 : 1.0;
234
+
235
+ if (!coverageMap[name]) {
236
+ coverageMap[name] = {
237
+ dups: new Set(),
238
+ weight: 0
239
+ };
240
+ }
241
+
242
+ coverageMap[name].dups.add(dup.name);
243
+ coverageMap[name].weight += penalty;
244
+ }
245
+ }
246
+ }
247
+
248
+ return coverageMap;
249
+ }
250
+
251
+ export function groupRootCauses(scoredDuplicates, topLevelDeps = new Set()) {
252
+ // ── LAYER 1: Explicit top-level matching (pnpm importers) ──────────────────
253
+ // When topLevelDeps is provided, only names from that set qualify.
254
+ // getSources() ensures we read roots[] first, so pnpm ownership DFS results
255
+ // are used directly without going through the noisy ancestor chain.
256
+ if (topLevelDeps && topLevelDeps.size > 0) {
257
+ const map = buildIntroducerMap(scoredDuplicates, name => topLevelDeps.has(name));
258
+ if (Object.keys(map).length > 0) {
259
+ return formatGroups(map, 8, scoredDuplicates);
260
+ }
261
+ }
262
+
263
+ // ── LAYER 2: Coverage-based grouping (npm / yarn / pnpm without importers) ─
264
+ // Build coverageMap using getSources() priority (roots > parents > ancestors).
265
+ // Candidates with highest coverage (most distinct dups) win.
266
+ const coverageMap = buildCoverageMap(scoredDuplicates);
267
+
268
+ const candidates = Object.entries(coverageMap)
269
+ .sort((a, b) =>
270
+ b[1].weight - a[1].weight ||
271
+ b[1].dups.size - a[1].dups.size
272
+ );
273
+
274
+ // Try strict threshold first (≥2 dups covered)
275
+ const strict = candidates.filter(([, data]) => data.dups.size >= 2);
276
+ if (strict.length > 0) {
277
+ const topNames = new Set(strict.slice(0, 10).map(([n]) => n));
278
+ const map = buildIntroducerMap(scoredDuplicates, name => topNames.has(name));
279
+ if (Object.keys(map).length > 0) return formatGroups(map, 8, scoredDuplicates);
280
+ }
281
+
282
+ // Drop to threshold=1 if nothing met ≥2
283
+ if (candidates.length > 0) {
284
+ const topNames = new Set(candidates.slice(0, 10).map(([n]) => n));
285
+ const map = buildIntroducerMap(scoredDuplicates, name => topNames.has(name));
286
+ if (Object.keys(map).length > 0) return formatGroups(map, 8, scoredDuplicates);
287
+ }
288
+
289
+ // candidates.length === 0 falls through to the MISC bucket below
290
+ // ── LAYER 3: Misc fallback (truly no graph data) ────────────────────────────
291
+ const MISC = '⚠️ Misc / Low-level dependencies';
292
+ return [{
293
+ name: MISC,
294
+ affectedPackages: scoredDuplicates.map(d => d.name),
295
+ count: scoredDuplicates.reduce((s, d) => s + d.totalInstances, 0),
296
+ fixLikelihood: 'LOW'
297
+ }];
298
+ }
@@ -0,0 +1,40 @@
1
+ import semver from 'semver';
2
+
3
+ /**
4
+ * Calculates a severity score and confidence level for fixing the duplicate.
5
+ * @param {Array} duplicates The raw duplicate analysis
6
+ * @returns {Array} Duplicates with embedded severity/confidence scores
7
+ */
8
+ export function scoreDuplicates(duplicates) {
9
+ const scored = duplicates.map(dup => {
10
+ let severity = 'LOW';
11
+ // Update severity based on counts based on user prompt (e.g. 4+ -> HIGH)
12
+ if (dup.totalInstances >= 4 || dup.wastedBytes > 200 * 1024) {
13
+ severity = 'HIGH';
14
+ } else if (dup.totalInstances >= 2 || dup.wastedBytes > 50 * 1024) {
15
+ severity = 'MEDIUM';
16
+ }
17
+
18
+ let confidence = 'LOW';
19
+ if (dup.safety === 'SAFE') {
20
+ confidence = 'HIGH';
21
+ } else if (dup.details && dup.details.length > 0 && dup.details[0].count >= dup.totalInstances * 0.8) {
22
+ confidence = 'MEDIUM';
23
+ }
24
+
25
+ let severityWeight = 10;
26
+ if (severity === 'HIGH') severityWeight = 100;
27
+ else if (severity === 'MEDIUM') severityWeight = 50;
28
+
29
+ const score = (dup.totalInstances * 10) + severityWeight;
30
+
31
+ return {
32
+ ...dup,
33
+ severity,
34
+ confidence,
35
+ score
36
+ };
37
+ });
38
+
39
+ return scored.sort((a, b) => b.score - a.score);
40
+ }
@@ -0,0 +1,57 @@
1
+ import chalk from 'chalk';
2
+ import { parseLockfile } from '../parser/index.js';
3
+ import { buildDependencyGraph } from '../graph/builder.js';
4
+ import { detectDuplicates } from '../analyze/detector.js';
5
+ import { scoreDuplicates } from '../analyze/scorer.js';
6
+ import { groupRootCauses } from '../analyze/grouper.js';
7
+ import { printTextReport, printJsonReport, printCiReport } from '../report/formatter.js';
8
+ import { detectWorkspaces } from '../utils/workspace.js';
9
+ import fs from 'fs/promises';
10
+ import path from 'path';
11
+
12
+ export async function commandAnalyze(options) {
13
+ const projectDir = process.cwd();
14
+
15
+ try {
16
+ if (!options.json && !options.ci) {
17
+ console.log(chalk.dim('Analyzing large dependency graph...'));
18
+ }
19
+
20
+ // Try reading package.json for workspaces
21
+ let pkg = null;
22
+ try {
23
+ const pkgStr = await fs.readFile(path.join(projectDir, 'package.json'), 'utf-8');
24
+ pkg = JSON.parse(pkgStr);
25
+ } catch(e) {}
26
+
27
+ // We can use the workspace config eventually to refine ignored internal packages
28
+ await detectWorkspaces(projectDir, pkg);
29
+
30
+ const { type, map: rawPackagesMap, topLevelDeps = new Set() } = await parseLockfile(projectDir);
31
+ const cleanMap = buildDependencyGraph(rawPackagesMap);
32
+ const duplicates = await detectDuplicates(cleanMap, projectDir);
33
+ const scoredDuplicates = scoreDuplicates(duplicates);
34
+ const rootCauses = groupRootCauses(scoredDuplicates, topLevelDeps);
35
+
36
+ if (options.ci) {
37
+ printCiReport(scoredDuplicates);
38
+ process.exit(scoredDuplicates.length > 0 ? 1 : 0);
39
+ } else if (options.json) {
40
+ printJsonReport(scoredDuplicates, rootCauses);
41
+ process.exit(scoredDuplicates.length > 0 ? 1 : 0);
42
+ } else {
43
+ if (type === 'npm') console.log(chalk.dim('Detected Package Manager: npm'));
44
+ if (type === 'yarn') console.log(chalk.dim('Detected Package Manager: yarn'));
45
+ if (type === 'pnpm') console.log(chalk.dim('Detected Package Manager: pnpm'));
46
+ printTextReport(scoredDuplicates, rootCauses, options);
47
+ process.exit(scoredDuplicates.length > 0 ? 1 : 0);
48
+ }
49
+ } catch (error) {
50
+ if (options.json || options.ci) {
51
+ console.log(JSON.stringify({ error: error.message }));
52
+ } else {
53
+ console.error(chalk.red(`❌ Error: ${error.message}`));
54
+ }
55
+ process.exit(2);
56
+ }
57
+ }
@@ -0,0 +1,37 @@
1
+ import chalk from 'chalk';
2
+ import { parseLockfile } from '../parser/index.js';
3
+ import { buildDependencyGraph } from '../graph/builder.js';
4
+ import { detectDuplicates } from '../analyze/detector.js';
5
+ import { scoreDuplicates } from '../analyze/scorer.js';
6
+ import { applyFixes } from '../fix/fixer.js';
7
+ import { detectWorkspaces } from '../utils/workspace.js';
8
+ import fs from 'fs/promises';
9
+ import path from 'path';
10
+
11
+ export async function commandFix(options) {
12
+ const projectDir = process.cwd();
13
+ const isDryRun = !options.yes;
14
+
15
+ try {
16
+ console.log(chalk.dim('Analyzing dependencies to prepare fix plan...'));
17
+
18
+ let pkg = null;
19
+ try {
20
+ const pkgStr = await fs.readFile(path.join(projectDir, 'package.json'), 'utf-8');
21
+ pkg = JSON.parse(pkgStr);
22
+ } catch(e) {}
23
+
24
+ await detectWorkspaces(projectDir, pkg);
25
+
26
+ const { type, map: rawPackagesMap } = await parseLockfile(projectDir);
27
+ const cleanMap = buildDependencyGraph(rawPackagesMap);
28
+ const duplicates = await detectDuplicates(cleanMap, projectDir);
29
+ const scoredDuplicates = scoreDuplicates(duplicates);
30
+
31
+ await applyFixes(scoredDuplicates, projectDir, isDryRun, type);
32
+
33
+ } catch (error) {
34
+ console.error(chalk.red(`❌ Error: ${error.message}`));
35
+ process.exit(1);
36
+ }
37
+ }
@@ -0,0 +1,27 @@
1
+ import { commandAnalyze } from './analyze-command.js';
2
+ import { commandFix } from './fix-command.js';
3
+ import { commandTrace } from './trace-command.js';
4
+
5
+ export function setupCommands(program) {
6
+ // Add default behavior when no command is passed to run 'analyze'
7
+ program
8
+ .command('analyze', { isDefault: true })
9
+ .description('Analyze the lockfile and output a duplicate dependencies report')
10
+ .option('--json', 'Output report as JSON')
11
+ .option('--ci', 'Run in CI mode (minimal JSON, correct exit codes, no formatting)')
12
+ .option('--verbose', 'Show full breakdown of all root causes and packages')
13
+ .option('--simple', 'Show only top 3 root causes — great for a quick overview')
14
+ .option('--top <number>', 'Limit results to top N root cause groups (default 5)', parseInt)
15
+ .action(commandAnalyze);
16
+
17
+ program
18
+ .command('fix')
19
+ .description('Safely automatically consolidate duplicate dependencies via package.json overrides')
20
+ .option('--yes', 'Confirm to actually write the changes (defaults to dry-run)')
21
+ .action(commandFix);
22
+
23
+ program
24
+ .command('trace <package>')
25
+ .description('Trace which top-level dependencies introduce a given package')
26
+ .action(commandTrace);
27
+ }
@@ -0,0 +1,104 @@
1
+ import chalk from 'chalk';
2
+ import { parseLockfile } from '../parser/index.js';
3
+ import { buildDependencyGraph } from '../graph/builder.js';
4
+ import { detectDuplicates } from '../analyze/detector.js';
5
+ import { scoreDuplicates } from '../analyze/scorer.js';
6
+
7
+ export async function commandTrace(pkgArg, options) {
8
+ const projectDir = process.cwd();
9
+ const targetName = pkgArg.replace(/@.*/, ''); // strip version if accidentally passed
10
+
11
+ try {
12
+ console.log(chalk.dim(`\nTracing "${targetName}" through dependency graph...\n`));
13
+
14
+ const { map: rawPackagesMap } = await parseLockfile(projectDir);
15
+ const cleanMap = buildDependencyGraph(rawPackagesMap);
16
+ const duplicates = await detectDuplicates(cleanMap, projectDir);
17
+ const scored = scoreDuplicates(duplicates);
18
+
19
+ // Find the target duplicate (if any)
20
+ const target = scored.find(d => d.name === targetName);
21
+
22
+ if (!target) {
23
+ console.log(chalk.yellow(` ⚠ "${targetName}" is not in the duplicate list.`));
24
+ console.log(chalk.dim(` It might exist at a single version (no conflict) or not be installed.\n`));
25
+ return;
26
+ }
27
+
28
+ // Collect all root introducers across all instances
29
+ const rootSet = new Map(); // rootName → versions[]
30
+ for (const detail of target.details) {
31
+ for (const r of (detail.roots || [])) {
32
+ if (!rootSet.has(r)) rootSet.set(r, new Set());
33
+ }
34
+ for (const p of (detail.parents || [])) {
35
+ // Group immediate parents as secondary info
36
+ if (!rootSet.has(p)) rootSet.set(p, new Set());
37
+ }
38
+ }
39
+
40
+ // Collect all allParents for chain display
41
+ const allAncestors = new Set();
42
+ for (const detail of target.details) {
43
+ for (const a of (detail.ancestors || detail.allParents || detail.roots || [])) {
44
+ allAncestors.add(a);
45
+ }
46
+ }
47
+
48
+ console.log(chalk.bold.white(`🔍 Trace: ${chalk.cyan(targetName)}`));
49
+ console.log(chalk.dim('─'.repeat(50)));
50
+ console.log('');
51
+
52
+ // Show versions found
53
+ console.log(` ${chalk.bold('Versions found:')} ${target.versions.map(v => chalk.yellow(v)).join(', ')}`);
54
+ console.log(` ${chalk.bold('Safety:')} ${target.safety === 'SAFE' ? chalk.green('SAFE') : chalk.red('RISKY')}`);
55
+ console.log('');
56
+
57
+ // Show which top-level deps introduce this
58
+ const roots = [...new Set(target.details.flatMap(d => d.roots || []))].filter(Boolean);
59
+ if (roots.length > 0) {
60
+ console.log(chalk.bold.white(` ${targetName} is introduced by:`));
61
+ for (const r of roots) {
62
+ console.log(` ${chalk.dim('▶')} ${chalk.cyan(r)}`);
63
+ }
64
+ } else if (allAncestors.size > 0) {
65
+ console.log(chalk.bold.white(` ${targetName} appears via these ancestors:`));
66
+ const ancestors = [...allAncestors].slice(0, 15);
67
+ for (const a of ancestors) {
68
+ console.log(` ${chalk.dim('→')} ${a}`);
69
+ }
70
+ if (allAncestors.size > 15) {
71
+ console.log(chalk.dim(` (+ ${allAncestors.size - 15} more)`));
72
+ }
73
+ } else {
74
+ console.log(chalk.yellow(` No parent chain found.`));
75
+ console.log(chalk.dim(` "${targetName}" may be a direct project dependency.\n`));
76
+ }
77
+
78
+ // Show immediate parents per version
79
+ console.log('');
80
+ console.log(chalk.bold.white(' Per-version breakdown:'));
81
+ for (const detail of target.details) {
82
+ const immediateParents = (detail.parents || []).slice(0, 5);
83
+ const more = (detail.parents || []).length - immediateParents.length;
84
+ console.log(` ${chalk.yellow(detail.version)} — required by: ${
85
+ immediateParents.length > 0
86
+ ? immediateParents.join(', ') + (more > 0 ? chalk.dim(` (+${more} more)`) : '')
87
+ : chalk.dim('(unknown)')
88
+ }`);
89
+ }
90
+
91
+ console.log('');
92
+ console.log(chalk.dim('─'.repeat(50)));
93
+ if (target.suggestedVersion) {
94
+ console.log(` 💡 Suggested fix: align all to ${chalk.green(target.suggestedVersion)}`);
95
+ console.log(chalk.dim(` Run: npx depopsy fix\n`));
96
+ } else {
97
+ console.log(chalk.dim(` ⚠ No auto-fix available — versions span multiple majors.\n`));
98
+ }
99
+
100
+ } catch (err) {
101
+ console.error(chalk.red(`❌ Error: ${err.message}`));
102
+ process.exit(2);
103
+ }
104
+ }
@@ -0,0 +1,90 @@
1
+ import fs from 'fs/promises';
2
+ import path from 'path';
3
+ import chalk from 'chalk';
4
+
5
+ export async function applyFixes(scoredDuplicates, projectDir, isDryRun, packageManagerType) {
6
+ const packageJsonPath = path.join(projectDir, 'package.json');
7
+
8
+ let pkgContent;
9
+ try {
10
+ pkgContent = await fs.readFile(packageJsonPath, 'utf-8');
11
+ } catch (e) {
12
+ throw new Error('Could not read package.json');
13
+ }
14
+
15
+ const pkg = JSON.parse(pkgContent);
16
+
17
+ // Filter for safe duplicates to fix automatically
18
+ const targets = scoredDuplicates.filter(dup => dup.safety === 'SAFE' && dup.suggestedVersion !== null);
19
+ const riskyTargets = scoredDuplicates.filter(dup => dup.safety === 'RISKY');
20
+
21
+ if (targets.length === 0) {
22
+ console.log(chalk.green('✅ No auto-fixes available. Semver-compatible SAFE duplicates not found.'));
23
+ return;
24
+ }
25
+
26
+ // Build the overrides object
27
+ const newOverrides = {};
28
+ for (const target of targets) {
29
+ newOverrides[target.name] = target.suggestedVersion;
30
+ }
31
+
32
+ console.log(chalk.bold.underline(`\n🛠️ Deduplication Fix Plan (${packageManagerType}):`));
33
+ console.log(chalk.dim('Applying SAFE fixes only...'));
34
+
35
+ for (const target of targets) {
36
+ console.log(` ✔ ${chalk.cyan(target.name)} (SAFE) -> aligns to ${chalk.green(target.suggestedVersion)}`);
37
+ }
38
+
39
+ if (riskyTargets.length > 0) {
40
+ for (const target of riskyTargets) {
41
+ console.log(` ⚠️ skipped ${chalk.cyan(target.name)} (RISKY)`);
42
+ }
43
+ }
44
+
45
+ if (isDryRun) {
46
+ console.log(chalk.yellow('\n⚠️ This is a DRY RUN. No files have been modified.'));
47
+ console.log(`To apply these changes and update your package.json, run:\n ${chalk.cyan('npx depopsy fix --yes')}\n`);
48
+ return;
49
+ }
50
+
51
+ // Determine property to update based on package manager
52
+ let propToUpdate = 'overrides';
53
+ if (packageManagerType === 'yarn') {
54
+ propToUpdate = 'resolutions';
55
+ } else if (packageManagerType === 'pnpm') {
56
+ if (!pkg.pnpm) pkg.pnpm = {};
57
+ propToUpdate = 'pnpm.overrides';
58
+ }
59
+
60
+ // Apply fixes
61
+ if (packageManagerType === 'pnpm') {
62
+ if (!pkg.pnpm.overrides) pkg.pnpm.overrides = {};
63
+ Object.assign(pkg.pnpm.overrides, newOverrides);
64
+ } else {
65
+ if (!pkg[propToUpdate]) pkg[propToUpdate] = {};
66
+ Object.assign(pkg[propToUpdate], newOverrides);
67
+ }
68
+
69
+ // Backup files
70
+ try {
71
+ const backupDir = path.join(projectDir, '.depopsy-backup');
72
+ await fs.mkdir(backupDir, { recursive: true });
73
+ await fs.writeFile(path.join(backupDir, 'package.json.bak'), pkgContent);
74
+ console.log(chalk.dim(`\nBacked up package.json to ${backupDir}`));
75
+ } catch (e) {
76
+ console.log(chalk.dim(`Warning: Failed to create backups.`));
77
+ }
78
+
79
+ // Write new package.json
80
+ await fs.writeFile(packageJsonPath, JSON.stringify(pkg, null, 2) + '\n');
81
+ console.log(chalk.green(`\n✅ Successfully updated package.json "${propToUpdate}".`));
82
+
83
+ if (packageManagerType === 'npm') {
84
+ console.log(chalk.bold(`You MUST now run ${chalk.cyan('npm install')} to apply the deduplication to your lockfile!\n`));
85
+ } else if (packageManagerType === 'yarn') {
86
+ console.log(chalk.bold(`You MUST now run ${chalk.cyan('yarn install')} to apply the deduplication to your lockfile!\n`));
87
+ } else if (packageManagerType === 'pnpm') {
88
+ console.log(chalk.bold(`You MUST now run ${chalk.cyan('pnpm install')} to apply the deduplication to your lockfile!\n`));
89
+ }
90
+ }
@@ -0,0 +1,30 @@
1
+ import { isLocalVersion } from '../utils/workspace.js';
2
+
3
+ export function buildDependencyGraph(rawPackagesMap) {
4
+ // In the future, this can stitch together full traversal trees.
5
+ // For duplicate bloat detection, we just need to ensure the raw map is clean
6
+ // of local/workspace internal packages, which we filter out here.
7
+
8
+ const cleanMap = new Map();
9
+
10
+ for (const [name, data] of rawPackagesMap.entries()) {
11
+ const validVersions = new Set();
12
+ const validInstances = [];
13
+
14
+ for (const instance of data.instances) {
15
+ if (!isLocalVersion(instance.version)) {
16
+ validVersions.add(instance.version);
17
+ validInstances.push(instance);
18
+ }
19
+ }
20
+
21
+ if (validVersions.size > 0) {
22
+ cleanMap.set(name, {
23
+ versions: validVersions,
24
+ instances: validInstances
25
+ });
26
+ }
27
+ }
28
+
29
+ return cleanMap;
30
+ }