prunify 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js ADDED
@@ -0,0 +1,1174 @@
1
+ #!/usr/bin/env node
2
+
3
+ // src/cli.ts
4
+ import { Command } from "commander";
5
+ import chalk6 from "chalk";
6
+ import Table5 from "cli-table3";
7
+ import fs8 from "fs";
8
+ import path8 from "path";
9
+ import { fileURLToPath } from "url";
10
+ import readline from "readline";
11
+
12
+ // src/core/parser.ts
13
+ import fs2 from "fs";
14
+ import path2 from "path";
15
+ import { Project, Node, SyntaxKind } from "ts-morph";
16
+
17
+ // src/utils/file.ts
18
+ import fs from "fs";
19
+ import path from "path";
20
+ import { minimatch } from "minimatch";
21
+ function glob(dir, patterns, ignore = []) {
22
+ const results = [];
23
+ collect(dir, dir, patterns, ignore, results);
24
+ return results;
25
+ }
26
+ function collect(base, current, patterns, ignore, results) {
27
+ let entries;
28
+ try {
29
+ entries = fs.readdirSync(current, { withFileTypes: true });
30
+ } catch {
31
+ return;
32
+ }
33
+ for (const entry of entries) {
34
+ const fullPath = path.join(current, entry.name);
35
+ const relativePath = path.relative(base, fullPath).replace(/\\/g, "/");
36
+ if (entry.isDirectory()) {
37
+ const isIgnored = ignore.some((pattern) => minimatch(relativePath, pattern));
38
+ if (!isIgnored) collect(base, fullPath, patterns, ignore, results);
39
+ } else if (entry.isFile()) {
40
+ const isIgnored = ignore.some((pattern) => minimatch(relativePath, pattern));
41
+ if (!isIgnored) {
42
+ const matches = patterns.some((pattern) => minimatch(relativePath, pattern));
43
+ if (matches) results.push(fullPath);
44
+ }
45
+ }
46
+ }
47
+ }
48
+
49
+ // src/core/parser.ts
50
+ var DEFAULT_IGNORE = [
51
+ "node_modules",
52
+ "node_modules/**",
53
+ "dist",
54
+ "dist/**",
55
+ ".next",
56
+ ".next/**",
57
+ "coverage",
58
+ "coverage/**",
59
+ "**/*.test.ts",
60
+ "**/*.spec.ts",
61
+ "**/*.stories.tsx",
62
+ "**/*.d.ts"
63
+ ];
64
+ var SOURCE_PATTERNS = ["**/*.ts", "**/*.tsx", "**/*.js", "**/*.jsx"];
65
+ var RESOLVE_EXTENSIONS = [
66
+ "",
67
+ ".ts",
68
+ ".tsx",
69
+ ".js",
70
+ ".jsx"
71
+ ];
72
+ var INDEX_FILES = ["index.ts", "index.tsx", "index.js", "index.jsx"];
73
+ function discoverFiles(rootDir, ignore = []) {
74
+ return glob(rootDir, SOURCE_PATTERNS, [...DEFAULT_IGNORE, ...ignore]);
75
+ }
76
+ function buildProject(files, tsconfigPath) {
77
+ const resolved = tsconfigPath ?? (files.length > 0 ? findTsconfig(files[0]) : void 0);
78
+ const project = resolved ? new Project({ tsConfigFilePath: resolved, skipAddingFilesFromTsConfig: true }) : new Project({
79
+ compilerOptions: {
80
+ allowJs: true,
81
+ resolveJsonModule: true
82
+ }
83
+ });
84
+ project.addSourceFilesAtPaths(files);
85
+ return project;
86
+ }
87
+ function getImportsForFile(sourceFile) {
88
+ const result = /* @__PURE__ */ new Set();
89
+ const fileDir = path2.dirname(sourceFile.getFilePath());
90
+ const project = sourceFile.getProject();
91
+ const compilerOptions = project.getCompilerOptions();
92
+ const pathAliases = compilerOptions.paths ?? {};
93
+ const baseUrl = compilerOptions.baseUrl;
94
+ function addResolved(sf) {
95
+ if (!sf) return;
96
+ const p = path2.normalize(sf.getFilePath());
97
+ if (!p.includes(`${path2.sep}node_modules${path2.sep}`) && !p.includes("/node_modules/") && path2.normalize(sourceFile.getFilePath()) !== p) {
98
+ result.add(p);
99
+ }
100
+ }
101
+ function resolveAndAdd(specifier) {
102
+ if (!specifier) return;
103
+ const isRelative = specifier.startsWith("./") || specifier.startsWith("../");
104
+ if (isRelative) {
105
+ const p = resolveRelativePath(fileDir, specifier, project);
106
+ if (p) result.add(p);
107
+ } else if (!specifier.startsWith("node:")) {
108
+ const p = resolvePathAlias(specifier, pathAliases, baseUrl, project);
109
+ if (p) result.add(p);
110
+ }
111
+ }
112
+ for (const decl of sourceFile.getImportDeclarations()) {
113
+ const sf = decl.getModuleSpecifierSourceFile();
114
+ sf ? addResolved(sf) : resolveAndAdd(decl.getModuleSpecifierValue());
115
+ }
116
+ for (const decl of sourceFile.getExportDeclarations()) {
117
+ const specifier = decl.getModuleSpecifierValue();
118
+ if (!specifier) continue;
119
+ const sf = decl.getModuleSpecifierSourceFile();
120
+ sf ? addResolved(sf) : resolveAndAdd(specifier);
121
+ }
122
+ for (const call of sourceFile.getDescendantsOfKind(SyntaxKind.CallExpression)) {
123
+ if (call.getExpression().getKind() !== SyntaxKind.ImportKeyword) continue;
124
+ const args = call.getArguments();
125
+ if (args.length > 0 && Node.isStringLiteral(args[0])) {
126
+ resolveAndAdd(args[0].getLiteralValue());
127
+ }
128
+ }
129
+ return [...result];
130
+ }
131
+ function resolveRelativePath(fromDir, specifier, project) {
132
+ const base = path2.resolve(fromDir, specifier);
133
+ for (const ext of RESOLVE_EXTENSIONS) {
134
+ const sf = project.getSourceFile(base + ext);
135
+ if (sf) return path2.normalize(sf.getFilePath());
136
+ }
137
+ for (const index of INDEX_FILES) {
138
+ const sf = project.getSourceFile(path2.join(base, index));
139
+ if (sf) return path2.normalize(sf.getFilePath());
140
+ }
141
+ return null;
142
+ }
143
+ function resolvePathAlias(specifier, pathAliases, baseUrl, project) {
144
+ for (const [alias, targets] of Object.entries(pathAliases)) {
145
+ const match = matchAlias(alias, specifier);
146
+ if (!match) continue;
147
+ const capture = match[1] ?? "";
148
+ for (const target of targets) {
149
+ const resolved = target.replaceAll("*", capture);
150
+ const absolute = baseUrl ? path2.resolve(baseUrl, resolved) : path2.resolve(resolved);
151
+ const hit = tryResolveAbsolute(absolute, project);
152
+ if (hit) return hit;
153
+ }
154
+ }
155
+ return null;
156
+ }
157
+ function matchAlias(alias, specifier) {
158
+ const escaped = alias.replace(/[.+^${}()|[\]\\]/g, "\\$&");
159
+ const pattern = escaped.replaceAll("*", "(.*)");
160
+ return new RegExp(`^${pattern}$`).exec(specifier);
161
+ }
162
+ function tryResolveAbsolute(absolute, project) {
163
+ for (const ext of RESOLVE_EXTENSIONS) {
164
+ const sf = project.getSourceFile(absolute + ext);
165
+ if (sf) return path2.normalize(sf.getFilePath());
166
+ }
167
+ for (const index of INDEX_FILES) {
168
+ const sf = project.getSourceFile(path2.join(absolute, index));
169
+ if (sf) return path2.normalize(sf.getFilePath());
170
+ }
171
+ return null;
172
+ }
173
+ function findTsconfig(fromFile) {
174
+ let dir = path2.dirname(fromFile);
175
+ const root = path2.parse(dir).root;
176
+ while (dir !== root) {
177
+ const candidate = path2.join(dir, "tsconfig.json");
178
+ if (fs2.existsSync(candidate)) return candidate;
179
+ const parent = path2.dirname(dir);
180
+ if (parent === dir) break;
181
+ dir = parent;
182
+ }
183
+ return void 0;
184
+ }
185
+
186
+ // src/core/graph.ts
187
+ import fs3 from "fs";
188
+ import path3 from "path";
189
+ function buildGraph(files, getImports) {
190
+ const graph = /* @__PURE__ */ new Map();
191
+ for (const file of files) {
192
+ graph.set(file, /* @__PURE__ */ new Set());
193
+ }
194
+ for (const file of files) {
195
+ for (const imported of getImports(file)) {
196
+ graph.get(file)?.add(imported);
197
+ if (!graph.has(imported)) graph.set(imported, /* @__PURE__ */ new Set());
198
+ }
199
+ }
200
+ return graph;
201
+ }
202
+ function findEntryPoints(rootDir, packageJson) {
203
+ const entries = [
204
+ ...resolveNextJsEntries(rootDir),
205
+ ...resolvePkgFieldEntries(rootDir, packageJson)
206
+ ];
207
+ if (entries.length === 0) {
208
+ const fallback = resolveFallbackEntry(rootDir);
209
+ if (fallback) entries.push(fallback);
210
+ }
211
+ return [...new Set(entries)];
212
+ }
213
+ function runDFS(graph, entryPoints) {
214
+ const visited = /* @__PURE__ */ new Set();
215
+ const stack = [...entryPoints];
216
+ let node;
217
+ while ((node = stack.pop()) !== void 0) {
218
+ if (visited.has(node)) continue;
219
+ visited.add(node);
220
+ for (const neighbor of graph.get(node) ?? []) {
221
+ if (!visited.has(neighbor)) stack.push(neighbor);
222
+ }
223
+ }
224
+ return visited;
225
+ }
226
+ function findDeadChains(graph, deadFiles) {
227
+ const reverseGraph = buildReverseGraph(graph);
228
+ const result = /* @__PURE__ */ new Map();
229
+ for (const deadRoot of deadFiles) {
230
+ result.set(deadRoot, dfsDeadChain(deadRoot, graph, deadFiles, reverseGraph));
231
+ }
232
+ return result;
233
+ }
234
+ function detectCycles(graph) {
235
+ const cycles = [];
236
+ const seenKeys = /* @__PURE__ */ new Set();
237
+ const visited = /* @__PURE__ */ new Set();
238
+ const inStack = /* @__PURE__ */ new Set();
239
+ const path9 = [];
240
+ const acc = { seenKeys, cycles };
241
+ for (const start of graph.keys()) {
242
+ if (!visited.has(start)) {
243
+ dfsForCycles(start, graph, visited, inStack, path9, acc);
244
+ }
245
+ }
246
+ return cycles;
247
+ }
248
+ function resolveNextJsEntries(rootDir) {
249
+ const isNext = fs3.existsSync(path3.join(rootDir, "next.config.js")) || fs3.existsSync(path3.join(rootDir, "next.config.ts")) || fs3.existsSync(path3.join(rootDir, "next.config.mjs"));
250
+ if (!isNext) return [];
251
+ const entries = [];
252
+ for (const dir of ["pages", "app"]) {
253
+ const dirPath = path3.join(rootDir, dir);
254
+ if (fs3.existsSync(dirPath)) entries.push(...collectSourceFiles(dirPath));
255
+ }
256
+ return entries;
257
+ }
258
+ function resolvePkgFieldEntries(rootDir, packageJson) {
259
+ const entries = [];
260
+ for (const field of ["main", "module"]) {
261
+ const value = packageJson?.[field];
262
+ if (typeof value !== "string") continue;
263
+ const abs = path3.resolve(rootDir, value);
264
+ if (fs3.existsSync(abs)) entries.push(abs);
265
+ }
266
+ return entries;
267
+ }
268
+ function resolveFallbackEntry(rootDir) {
269
+ const fallbacks = ["src/main.ts", "src/main.tsx", "src/index.ts", "src/index.tsx"];
270
+ for (const rel of fallbacks) {
271
+ const abs = path3.join(rootDir, rel);
272
+ if (fs3.existsSync(abs)) return abs;
273
+ }
274
+ return void 0;
275
+ }
276
+ function mkFrame(node, graph) {
277
+ return { node, neighbors: (graph.get(node) ?? /* @__PURE__ */ new Set()).values(), entered: false };
278
+ }
279
+ function dfsForCycles(start, graph, visited, inStack, path9, acc) {
280
+ const stack = [mkFrame(start, graph)];
281
+ while (stack.length > 0) {
282
+ const frame = stack.at(-1);
283
+ if (!frame) break;
284
+ if (!frame.entered) {
285
+ if (visited.has(frame.node)) {
286
+ stack.pop();
287
+ continue;
288
+ }
289
+ frame.entered = true;
290
+ inStack.add(frame.node);
291
+ path9.push(frame.node);
292
+ }
293
+ const { done, value: neighbor } = frame.neighbors.next();
294
+ if (done) {
295
+ stack.pop();
296
+ path9.pop();
297
+ inStack.delete(frame.node);
298
+ visited.add(frame.node);
299
+ } else {
300
+ handleCycleNeighbor(neighbor, stack, path9, inStack, visited, acc, graph);
301
+ }
302
+ }
303
+ }
304
+ function handleCycleNeighbor(neighbor, stack, path9, inStack, visited, acc, graph) {
305
+ if (inStack.has(neighbor)) {
306
+ recordCycle(neighbor, path9, acc);
307
+ } else if (!visited.has(neighbor)) {
308
+ stack.push(mkFrame(neighbor, graph));
309
+ }
310
+ }
311
+ function recordCycle(cycleStart, path9, acc) {
312
+ const idx = path9.indexOf(cycleStart);
313
+ if (idx === -1) return;
314
+ const cycle = normalizeCycle(path9.slice(idx));
315
+ const key = cycle.join("\0");
316
+ if (!acc.seenKeys.has(key)) {
317
+ acc.seenKeys.add(key);
318
+ acc.cycles.push(cycle);
319
+ }
320
+ }
321
+ function dfsDeadChain(deadRoot, graph, deadFiles, reverseGraph) {
322
+ const chain = [];
323
+ const visited = /* @__PURE__ */ new Set();
324
+ const stack = [...graph.get(deadRoot) ?? []];
325
+ let node;
326
+ while ((node = stack.pop()) !== void 0) {
327
+ if (visited.has(node) || node === deadRoot) continue;
328
+ visited.add(node);
329
+ if (deadFiles.has(node) || isOnlyImportedByDead(node, deadFiles, reverseGraph)) {
330
+ chain.push(node);
331
+ for (const next of graph.get(node) ?? []) {
332
+ if (!visited.has(next)) stack.push(next);
333
+ }
334
+ }
335
+ }
336
+ return chain;
337
+ }
338
+ function isOnlyImportedByDead(file, deadFiles, reverseGraph) {
339
+ const importers = reverseGraph.get(file) ?? /* @__PURE__ */ new Set();
340
+ return importers.size === 0 || [...importers].every((imp) => deadFiles.has(imp));
341
+ }
342
+ function buildReverseGraph(graph) {
343
+ const rev = /* @__PURE__ */ new Map();
344
+ for (const [file] of graph) {
345
+ if (!rev.has(file)) rev.set(file, /* @__PURE__ */ new Set());
346
+ }
347
+ for (const [file, imports] of graph) {
348
+ for (const imp of imports) {
349
+ if (!rev.has(imp)) rev.set(imp, /* @__PURE__ */ new Set());
350
+ rev.get(imp)?.add(file);
351
+ }
352
+ }
353
+ return rev;
354
+ }
355
+ function normalizeCycle(cycle) {
356
+ if (cycle.length === 0) return cycle;
357
+ const minIdx = cycle.reduce(
358
+ (best, cur, i) => cur < cycle[best] ? i : best,
359
+ 0
360
+ );
361
+ return [...cycle.slice(minIdx), ...cycle.slice(0, minIdx)];
362
+ }
363
+ function collectSourceFiles(dir) {
364
+ const results = [];
365
+ const SOURCE_RE = /\.(tsx?|jsx?)$/;
366
+ function walk(current) {
367
+ let entries;
368
+ try {
369
+ entries = fs3.readdirSync(current, { withFileTypes: true });
370
+ } catch {
371
+ return;
372
+ }
373
+ for (const entry of entries) {
374
+ const full = path3.join(current, entry.name);
375
+ if (entry.isDirectory()) {
376
+ walk(full);
377
+ } else if (entry.isFile() && SOURCE_RE.test(entry.name)) {
378
+ results.push(full);
379
+ }
380
+ }
381
+ }
382
+ walk(dir);
383
+ return results;
384
+ }
385
+
386
+ // src/modules/dead-code.ts
387
+ import ora2 from "ora";
388
+ import chalk from "chalk";
389
+ import Table from "cli-table3";
390
+ import fs5 from "fs";
391
+ import path5 from "path";
392
+ import { Node as Node2 } from "ts-morph";
393
+
394
+ // src/core/reporter.ts
395
+ import fs4 from "fs";
396
+ import path4 from "path";
397
+ import ora from "ora";
398
+ function createSpinner(text) {
399
+ return ora(text).start();
400
+ }
401
+ var REPORTS_DIR_NAME = "prunify-reports";
402
+ function ensureReportsDir(rootDir, outDir) {
403
+ const base = outDir ? path4.resolve(outDir) : path4.resolve(rootDir);
404
+ const reportsDir = path4.join(base, REPORTS_DIR_NAME);
405
+ if (!fs4.existsSync(reportsDir)) {
406
+ fs4.mkdirSync(reportsDir, { recursive: true });
407
+ }
408
+ return reportsDir;
409
+ }
410
+ function writeReport(reportsDir, filename, content) {
411
+ ensureDir(reportsDir);
412
+ const filePath = path4.join(reportsDir, filename);
413
+ fs4.writeFileSync(filePath, content, "utf-8");
414
+ console.log(` Report saved \u2192 ${filePath}`);
415
+ }
416
+ function appendToGitignore(rootDir) {
417
+ const gitignorePath = path4.join(rootDir, ".gitignore");
418
+ const entry = `${REPORTS_DIR_NAME}/`;
419
+ if (fs4.existsSync(gitignorePath)) {
420
+ const contents = fs4.readFileSync(gitignorePath, "utf-8");
421
+ if (contents.split("\n").some((line) => line.trim() === entry)) return;
422
+ fs4.appendFileSync(gitignorePath, `
423
+ ${entry}
424
+ `, "utf-8");
425
+ } else {
426
+ fs4.writeFileSync(gitignorePath, `${entry}
427
+ `, "utf-8");
428
+ }
429
+ }
430
+ function writeMarkdown(report, outputPath) {
431
+ const lines = [
432
+ `# ${report.title}`,
433
+ "",
434
+ `> ${report.summary}`,
435
+ "",
436
+ `_Generated: ${report.generatedAt.toISOString()}_`,
437
+ ""
438
+ ];
439
+ for (const section of report.sections) {
440
+ lines.push(`## ${section.title}`, "");
441
+ if (section.rows.length === 0) {
442
+ lines.push("_Nothing found._", "");
443
+ continue;
444
+ }
445
+ if (section.headers && section.headers.length > 0) {
446
+ lines.push(`| ${section.headers.join(" | ")} |`);
447
+ lines.push(`| ${section.headers.map(() => "---").join(" | ")} |`);
448
+ }
449
+ for (const row of section.rows) {
450
+ lines.push(`| ${row.join(" | ")} |`);
451
+ }
452
+ lines.push("");
453
+ }
454
+ ensureDir(path4.dirname(outputPath));
455
+ fs4.writeFileSync(outputPath, lines.join("\n"), "utf-8");
456
+ }
457
+ function writeJson(data, outputPath) {
458
+ ensureDir(path4.dirname(outputPath));
459
+ fs4.writeFileSync(outputPath, JSON.stringify(data, null, 2), "utf-8");
460
+ }
461
+ function ensureDir(dir) {
462
+ if (dir && !fs4.existsSync(dir)) {
463
+ fs4.mkdirSync(dir, { recursive: true });
464
+ }
465
+ }
466
+
467
+ // src/modules/dead-code.ts
468
+ function runDeadCodeModule(project, graph, entryPoints, rootDir) {
469
+ const allFiles = [...graph.keys()];
470
+ const effectiveEntries = entryPoints.length > 0 ? entryPoints : allFiles.slice(0, 1);
471
+ const liveFiles = runDFS(graph, effectiveEntries);
472
+ const deadFiles = allFiles.filter((f) => !liveFiles.has(f));
473
+ const deadSet = new Set(deadFiles);
474
+ const chains = findDeadChains(graph, deadSet);
475
+ const deadExports = findDeadExports(project, liveFiles);
476
+ const report = buildDeadCodeReport(deadFiles, chains, deadExports, rootDir);
477
+ return { deadFiles, liveFiles, chains, deadExports, report };
478
+ }
479
+ function findDeadExports(project, liveFiles) {
480
+ const importedNames = buildImportedNameMap(project, liveFiles);
481
+ const dead = [];
482
+ for (const filePath of liveFiles) {
483
+ collectFileDeadExports(filePath, project, importedNames, dead);
484
+ }
485
+ return dead;
486
+ }
487
+ function collectFileDeadExports(filePath, project, importedNames, dead) {
488
+ const sf = project.getSourceFile(filePath);
489
+ if (!sf) return;
490
+ const usedNames = importedNames.get(filePath) ?? /* @__PURE__ */ new Set();
491
+ if (usedNames.has("*")) return;
492
+ for (const [exportName, declarations] of sf.getExportedDeclarations()) {
493
+ if (usedNames.has(exportName)) continue;
494
+ const line = getExportLine(declarations[0]);
495
+ dead.push({ filePath, exportName, line });
496
+ }
497
+ }
498
+ function getExportLine(decl) {
499
+ if (!decl) return 0;
500
+ if (!Node2.isNode(decl)) return 0;
501
+ return decl.getStartLineNumber();
502
+ }
503
+ function getFileSize(filePath) {
504
+ try {
505
+ return fs5.statSync(filePath).size;
506
+ } catch {
507
+ return 0;
508
+ }
509
+ }
510
+ function buildDeadCodeReport(deadFiles, chains, deadExports, rootDir) {
511
+ const rel = (p) => path5.relative(rootDir, p).replaceAll("\\", "/");
512
+ const totalBytes = deadFiles.reduce((sum, f) => {
513
+ const chain = chains.get(f) ?? [];
514
+ return sum + getFileSize(f) + chain.reduce((s, c) => s + getFileSize(c), 0);
515
+ }, 0);
516
+ const totalKb = (totalBytes / 1024).toFixed(1);
517
+ const lines = [
518
+ "========================================",
519
+ " DEAD CODE REPORT",
520
+ ` Dead files : ${deadFiles.length}`,
521
+ ` Dead exports: ${deadExports.length}`,
522
+ ` Recoverable : ~${totalKb} KB`,
523
+ "========================================",
524
+ ""
525
+ ];
526
+ if (deadFiles.length > 0) {
527
+ lines.push("\u2500\u2500 DEAD FILES \u2500\u2500", "");
528
+ for (const filePath of deadFiles) {
529
+ const chain = chains.get(filePath) ?? [];
530
+ const allFiles = [filePath, ...chain];
531
+ const sizeBytes = allFiles.reduce((s, f) => s + getFileSize(f), 0);
532
+ const sizeKb = (sizeBytes / 1024).toFixed(1);
533
+ const chainStr = chain.length > 0 ? [rel(filePath), ...chain.map(rel)].join(" \u2192 ") : rel(filePath);
534
+ const plural = allFiles.length === 1 ? "" : "s";
535
+ lines.push(
536
+ `DEAD FILE \u2014 ${rel(filePath)}`,
537
+ `Reason: Not imported anywhere in the codebase`,
538
+ `Chain: ${chainStr}`,
539
+ `Size: ~${sizeKb} KB removable across ${allFiles.length} file${plural}`,
540
+ `Action: Safe to delete all ${allFiles.length} file${plural}`,
541
+ ""
542
+ );
543
+ }
544
+ }
545
+ if (deadExports.length > 0) {
546
+ lines.push("\u2500\u2500 DEAD EXPORTS \u2500\u2500", "");
547
+ for (const entry of deadExports) {
548
+ lines.push(
549
+ `DEAD EXPORT \u2014 ${rel(entry.filePath)} \u2192 ${entry.exportName}() [line ${entry.line}]`,
550
+ `Reason: Exported but never imported`,
551
+ `Action: Remove the export (file itself is still live)`,
552
+ ""
553
+ );
554
+ }
555
+ }
556
+ return lines.join("\n");
557
+ }
558
+ async function runDeadCode(dir, opts) {
559
+ const spinner = ora2(chalk.cyan("Scanning for dead code\u2026")).start();
560
+ try {
561
+ const fileList = discoverFiles(dir, []);
562
+ const project = buildProject(fileList);
563
+ const graph = buildGraph(fileList, (f) => {
564
+ const sf = project.getSourceFile(f);
565
+ return sf ? getImportsForFile(sf) : [];
566
+ });
567
+ const packageJson = loadPackageJson(dir);
568
+ const entries = findEntryPoints(dir, packageJson);
569
+ const result = runDeadCodeModule(project, graph, entries, dir);
570
+ const dead = [
571
+ ...result.deadFiles.map((f) => ({ file: f, exportName: "(entire file)" })),
572
+ ...result.deadExports.map((e) => ({ file: e.filePath, exportName: e.exportName }))
573
+ ];
574
+ spinner.succeed(chalk.green(`Dead code scan complete \u2014 ${dead.length} item(s) found`));
575
+ if (dead.length === 0) {
576
+ console.log(chalk.green(" No dead code detected."));
577
+ return dead;
578
+ }
579
+ printDeadTable(dead);
580
+ writeDeadOutput(result, opts);
581
+ return dead;
582
+ } catch (err) {
583
+ spinner.fail(chalk.red("Dead code scan failed"));
584
+ throw err;
585
+ }
586
+ }
587
+ function buildImportedNameMap(project, liveFiles) {
588
+ const importedNames = /* @__PURE__ */ new Map();
589
+ const touch = (file, name) => {
590
+ if (!importedNames.has(file)) importedNames.set(file, /* @__PURE__ */ new Set());
591
+ importedNames.get(file).add(name);
592
+ };
593
+ for (const filePath of liveFiles) {
594
+ const sf = project.getSourceFile(filePath);
595
+ if (!sf) continue;
596
+ processImports(sf, touch);
597
+ processReExports(sf, touch);
598
+ }
599
+ return importedNames;
600
+ }
601
+ function processImports(sf, touch) {
602
+ for (const decl of sf.getImportDeclarations()) {
603
+ const resolved = decl.getModuleSpecifierSourceFile();
604
+ if (!resolved) continue;
605
+ const target = resolved.getFilePath();
606
+ if (decl.getNamespaceImport()) {
607
+ touch(target, "*");
608
+ continue;
609
+ }
610
+ const defaultImport = decl.getDefaultImport();
611
+ if (defaultImport) touch(target, "default");
612
+ for (const named of decl.getNamedImports()) {
613
+ touch(target, named.getAliasNode()?.getText() ?? named.getName());
614
+ }
615
+ }
616
+ }
617
+ function processReExports(sf, touch) {
618
+ for (const decl of sf.getExportDeclarations()) {
619
+ const resolved = decl.getModuleSpecifierSourceFile();
620
+ if (!resolved) continue;
621
+ const target = resolved.getFilePath();
622
+ if (decl.isNamespaceExport()) {
623
+ touch(target, "*");
624
+ continue;
625
+ }
626
+ for (const named of decl.getNamedExports()) {
627
+ touch(target, named.getName());
628
+ }
629
+ }
630
+ }
631
+ function loadPackageJson(dir) {
632
+ const pkgPath = path5.join(dir, "package.json");
633
+ if (!fs5.existsSync(pkgPath)) return null;
634
+ return JSON.parse(fs5.readFileSync(pkgPath, "utf-8"));
635
+ }
636
+ function printDeadTable(dead) {
637
+ const table = new Table({ head: ["File", "Export"] });
638
+ for (const row of dead) {
639
+ table.push([row.file, row.exportName]);
640
+ }
641
+ console.log(table.toString());
642
+ }
643
+ function writeDeadOutput(result, opts) {
644
+ if (!opts.output) return;
645
+ if (opts.json) {
646
+ writeJson(
647
+ { deadFiles: result.deadFiles, deadExports: result.deadExports },
648
+ opts.output
649
+ );
650
+ console.log(chalk.cyan(` JSON written to ${opts.output}`));
651
+ return;
652
+ }
653
+ writeMarkdown(
654
+ {
655
+ title: "Dead Code Report",
656
+ summary: `${result.deadFiles.length} dead file(s), ${result.deadExports.length} dead export(s)`,
657
+ sections: [
658
+ {
659
+ title: "Dead Files",
660
+ headers: ["File", "Chain"],
661
+ rows: result.deadFiles.map((f) => [
662
+ f,
663
+ (result.chains.get(f) ?? []).join(" \u2192 ") || "\u2014"
664
+ ])
665
+ },
666
+ {
667
+ title: "Dead Exports",
668
+ headers: ["File", "Export", "Line"],
669
+ rows: result.deadExports.map((e) => [e.filePath, e.exportName, String(e.line)])
670
+ }
671
+ ],
672
+ generatedAt: /* @__PURE__ */ new Date()
673
+ },
674
+ opts.output
675
+ );
676
+ console.log(chalk.cyan(` Report written to ${opts.output}`));
677
+ }
678
+
679
+ // src/modules/dupe-finder.ts
680
+ import ora3 from "ora";
681
+ import chalk2 from "chalk";
682
+ import Table2 from "cli-table3";
683
+ import fs6 from "fs";
684
+ import { SyntaxKind as SyntaxKind3, VariableDeclarationKind } from "ts-morph";
685
+
686
+ // src/utils/ast.ts
687
+ import crypto from "crypto";
688
+ import {
689
+ Node as Node3,
690
+ SyntaxKind as SyntaxKind2
691
+ } from "ts-morph";
692
+
693
+ // src/modules/dupe-finder.ts
694
+ async function runDupeFinder(dir, opts) {
695
+ const minLines = parseInt(opts.minLines ?? "5", 10);
696
+ const spinner = ora3(chalk2.cyan(`Scanning for duplicate blocks (\u2265${minLines} lines)\u2026`)).start();
697
+ try {
698
+ const files = glob(dir, ["**/*.ts", "**/*.tsx", "**/*.js", "**/*.jsx"], ["node_modules", "dist"]);
699
+ const blockMap = /* @__PURE__ */ new Map();
700
+ for (const filePath of files) {
701
+ const content = fs6.readFileSync(filePath, "utf-8");
702
+ const lines = content.split("\n").map((l) => l.trim()).filter(Boolean);
703
+ for (let i = 0; i <= lines.length - minLines; i++) {
704
+ const block = lines.slice(i, i + minLines).join("\n");
705
+ if (!blockMap.has(block)) blockMap.set(block, []);
706
+ blockMap.get(block).push({ file: filePath, startLine: i + 1 });
707
+ }
708
+ }
709
+ const dupes = [];
710
+ for (const [block, occurrences] of blockMap) {
711
+ if (occurrences.length > 1) {
712
+ dupes.push({
713
+ hash: hashString(block),
714
+ lines: minLines,
715
+ occurrences
716
+ });
717
+ }
718
+ }
719
+ spinner.succeed(chalk2.green(`Duplicate scan complete \u2014 ${dupes.length} duplicate block(s) found`));
720
+ if (dupes.length === 0) {
721
+ console.log(chalk2.green(" No duplicate blocks detected."));
722
+ return dupes;
723
+ }
724
+ const table = new Table2({ head: ["Hash", "Lines", "Count", "First Occurrence"] });
725
+ for (const d of dupes) {
726
+ table.push([
727
+ chalk2.gray(d.hash.slice(0, 8)),
728
+ String(d.lines),
729
+ chalk2.yellow(String(d.occurrences.length)),
730
+ `${d.occurrences[0].file}:${d.occurrences[0].startLine}`
731
+ ]);
732
+ }
733
+ console.log(table.toString());
734
+ if (opts.output) {
735
+ const rows = dupes.flatMap(
736
+ (d) => d.occurrences.map((o) => [d.hash.slice(0, 8), String(d.lines), o.file, String(o.startLine)])
737
+ );
738
+ writeMarkdown(
739
+ {
740
+ title: "Duplicate Code Report",
741
+ summary: `${dupes.length} duplicate block(s) found (min lines: ${minLines})`,
742
+ sections: [{ title: "Duplicates", headers: ["Hash", "Lines", "File", "Start Line"], rows }],
743
+ generatedAt: /* @__PURE__ */ new Date()
744
+ },
745
+ opts.output
746
+ );
747
+ console.log(chalk2.cyan(` Report written to ${opts.output}`));
748
+ }
749
+ return dupes;
750
+ } catch (err) {
751
+ spinner.fail(chalk2.red("Duplicate scan failed"));
752
+ throw err;
753
+ }
754
+ }
755
+ function hashString(str) {
756
+ let hash = 5381;
757
+ for (let i = 0; i < str.length; i++) {
758
+ hash = (hash << 5) + hash ^ str.charCodeAt(i);
759
+ }
760
+ return (hash >>> 0).toString(16).padStart(8, "0");
761
+ }
762
+
763
+ // src/modules/dep-check.ts
764
+ import ora4 from "ora";
765
+ import chalk3 from "chalk";
766
+ import Table3 from "cli-table3";
767
+ import fs7 from "fs";
768
+ import path6 from "path";
769
+ import { SyntaxKind as SyntaxKind4 } from "ts-morph";
770
+ async function runDepCheck(opts) {
771
+ const spinner = ora4(chalk3.cyan("Auditing dependencies\u2026")).start();
772
+ try {
773
+ const pkgPath = path6.join(opts.cwd, "package.json");
774
+ if (!fs7.existsSync(pkgPath)) {
775
+ spinner.fail(chalk3.red(`No package.json found at ${opts.cwd}`));
776
+ return [];
777
+ }
778
+ const pkg = JSON.parse(fs7.readFileSync(pkgPath, "utf-8"));
779
+ const declared = /* @__PURE__ */ new Set([
780
+ ...Object.keys(pkg.dependencies ?? {}),
781
+ ...Object.keys(pkg.devDependencies ?? {})
782
+ ]);
783
+ const srcDir = path6.join(opts.cwd, "src");
784
+ const files = glob(srcDir, ["**/*.ts", "**/*.tsx", "**/*.js", "**/*.jsx"], ["node_modules", "dist"]);
785
+ const usedPackages = /* @__PURE__ */ new Set();
786
+ for (const filePath of files) {
787
+ const content = fs7.readFileSync(filePath, "utf-8");
788
+ const importRegex = /from\s+['"]([^'"./][^'"]*)['"]/g;
789
+ let match;
790
+ while ((match = importRegex.exec(content)) !== null) {
791
+ const specifier = match[1];
792
+ const pkgName = specifier.startsWith("@") ? specifier.split("/").slice(0, 2).join("/") : specifier.split("/")[0];
793
+ usedPackages.add(pkgName);
794
+ }
795
+ }
796
+ const issues = [];
797
+ for (const dep of declared) {
798
+ if (!usedPackages.has(dep)) {
799
+ issues.push({ name: dep, type: "unused" });
800
+ }
801
+ }
802
+ for (const pkg2 of usedPackages) {
803
+ if (!declared.has(pkg2) && !isBuiltin(pkg2)) {
804
+ issues.push({ name: pkg2, type: "missing" });
805
+ }
806
+ }
807
+ spinner.succeed(chalk3.green(`Dependency audit complete \u2014 ${issues.length} issue(s) found`));
808
+ if (issues.length === 0) {
809
+ console.log(chalk3.green(" All dependencies look healthy."));
810
+ return issues;
811
+ }
812
+ const table = new Table3({ head: ["Package", "Issue"] });
813
+ for (const issue of issues) {
814
+ const label = issue.type === "unused" ? chalk3.yellow("unused") : issue.type === "missing" ? chalk3.red("missing from package.json") : chalk3.magenta("unlisted dev dep");
815
+ table.push([chalk3.gray(issue.name), label]);
816
+ }
817
+ console.log(table.toString());
818
+ if (opts.output) {
819
+ writeMarkdown(
820
+ {
821
+ title: "Dependency Audit Report",
822
+ summary: `${issues.length} dependency issue(s) found`,
823
+ sections: [
824
+ {
825
+ title: "Issues",
826
+ headers: ["Package", "Type"],
827
+ rows: issues.map((i) => [i.name, i.type])
828
+ }
829
+ ],
830
+ generatedAt: /* @__PURE__ */ new Date()
831
+ },
832
+ opts.output
833
+ );
834
+ console.log(chalk3.cyan(` Report written to ${opts.output}`));
835
+ }
836
+ return issues;
837
+ } catch (err) {
838
+ spinner.fail(chalk3.red("Dependency audit failed"));
839
+ throw err;
840
+ }
841
+ }
842
+ var NODE_BUILTINS = /* @__PURE__ */ new Set([
843
+ "node:fs",
844
+ "node:path",
845
+ "node:os",
846
+ "node:url",
847
+ "node:crypto",
848
+ "node:util",
849
+ "node:stream",
850
+ "node:events",
851
+ "node:child_process",
852
+ "node:process",
853
+ "fs",
854
+ "path",
855
+ "os",
856
+ "url",
857
+ "crypto",
858
+ "util",
859
+ "stream",
860
+ "events",
861
+ "child_process"
862
+ ]);
863
+ function isBuiltin(name) {
864
+ return NODE_BUILTINS.has(name) || name.startsWith("node:");
865
+ }
866
+
867
+ // src/modules/health-report.ts
868
+ import chalk5 from "chalk";
869
+ import path7 from "path";
870
+
871
+ // src/modules/circular.ts
872
+ import ora5 from "ora";
873
+ import chalk4 from "chalk";
874
+ import Table4 from "cli-table3";
875
+ async function runCircular(dir, opts) {
876
+ const spinner = ora5(chalk4.cyan("Scanning for circular imports\u2026")).start();
877
+ try {
878
+ const fileList = discoverFiles(dir, []);
879
+ const project = buildProject(fileList);
880
+ const graph = buildGraph(fileList, (f) => {
881
+ const sf = project.getSourceFile(f);
882
+ return sf ? getImportsForFile(sf) : [];
883
+ });
884
+ const cycles = detectCycles(graph);
885
+ spinner.succeed(
886
+ chalk4.green(`Circular import scan complete \u2014 ${cycles.length} cycle(s) found`)
887
+ );
888
+ if (cycles.length === 0) {
889
+ console.log(chalk4.green(" No circular imports detected."));
890
+ return cycles;
891
+ }
892
+ const table = new Table4({ head: ["Cycle #", "Files involved"] });
893
+ cycles.forEach((cycle, i) => {
894
+ table.push([chalk4.yellow(String(i + 1)), cycle.map((f) => chalk4.gray(f)).join("\n \u2192 ")]);
895
+ });
896
+ console.log(table.toString());
897
+ if (opts.output) {
898
+ writeMarkdown(
899
+ {
900
+ title: "Circular Imports Report",
901
+ summary: `${cycles.length} circular import chain(s) found`,
902
+ sections: [
903
+ {
904
+ title: "Circular Chains",
905
+ headers: ["Cycle #", "Files"],
906
+ rows: cycles.map((cycle, i) => [String(i + 1), cycle.join(" \u2192 ")])
907
+ }
908
+ ],
909
+ generatedAt: /* @__PURE__ */ new Date()
910
+ },
911
+ opts.output
912
+ );
913
+ console.log(chalk4.cyan(` Report written to ${opts.output}`));
914
+ }
915
+ return cycles;
916
+ } catch (err) {
917
+ spinner.fail(chalk4.red("Circular import scan failed"));
918
+ throw err;
919
+ }
920
+ }
921
+
922
+ // src/modules/health-report.ts
923
+ async function runHealthReport(dir, opts) {
924
+ console.log(chalk5.bold.cyan("\n prunify \u2014 Codebase Health Report\n"));
925
+ const [deadExports, dupes, cycles, depIssues] = await Promise.all([
926
+ runDeadCode(dir, {}).catch(() => []),
927
+ runDupeFinder(dir, {}).catch(() => []),
928
+ runCircular(dir, {}).catch(() => []),
929
+ runDepCheck({ cwd: path7.resolve(dir, ".."), output: void 0 }).catch(() => [])
930
+ ]);
931
+ const sections = [
932
+ {
933
+ title: "\u{1F6A8} Dead Exports",
934
+ headers: ["File", "Export"],
935
+ rows: deadExports.map((d) => [d.file, d.exportName])
936
+ },
937
+ {
938
+ title: "\u{1F501} Duplicate Blocks",
939
+ headers: ["Hash", "Lines", "Occurrences"],
940
+ rows: dupes.map((d) => [d.hash.slice(0, 8), String(d.lines), String(d.occurrences.length)])
941
+ },
942
+ {
943
+ title: "\u267B\uFE0F Circular Imports",
944
+ headers: ["Cycle #", "Chain"],
945
+ rows: cycles.map((cycle, i) => [String(i + 1), cycle.join(" \u2192 ")])
946
+ },
947
+ {
948
+ title: "\u{1F4E6} Dependency Issues",
949
+ headers: ["Package", "Issue"],
950
+ rows: depIssues.map((i) => [i.name, i.type])
951
+ }
952
+ ];
953
+ const totalIssues = deadExports.length + dupes.length + cycles.length + depIssues.length;
954
+ const report = {
955
+ title: "Codebase Health Report",
956
+ summary: `Analysed: ${path7.resolve(dir)} | Total issues: ${totalIssues}`,
957
+ sections,
958
+ generatedAt: /* @__PURE__ */ new Date()
959
+ };
960
+ writeMarkdown(report, opts.output);
961
+ console.log(
962
+ chalk5.bold(`
963
+ Health report written to `) + chalk5.cyan(opts.output)
964
+ );
965
+ console.log(
966
+ chalk5.dim(` Total issues found: `) + (totalIssues > 0 ? chalk5.red(String(totalIssues)) : chalk5.green("0"))
967
+ );
968
+ }
969
+
970
+ // src/cli.ts
971
+ function readPkgVersion() {
972
+ try {
973
+ if (typeof import.meta !== "undefined" && import.meta.url) {
974
+ const dir = path8.dirname(fileURLToPath(import.meta.url));
975
+ const pkgPath = path8.resolve(dir, "..", "package.json");
976
+ return JSON.parse(fs8.readFileSync(pkgPath, "utf-8")).version;
977
+ }
978
+ } catch {
979
+ }
980
+ try {
981
+ const dir = globalThis.__dirname ?? __dirname;
982
+ const pkgPath = path8.resolve(dir, "..", "package.json");
983
+ return JSON.parse(fs8.readFileSync(pkgPath, "utf-8")).version;
984
+ } catch {
985
+ return "0.0.0";
986
+ }
987
+ }
988
+ var PKG_VERSION = readPkgVersion();
989
+ var ALL_MODULES = ["dead-code", "dupes", "circular", "deps"];
990
+ var program = new Command();
991
+ program.name("prunify").description("npm run clean. ship with confidence.").version(PKG_VERSION, "-v, --version").option("--dir <path>", "Root directory to analyze", process.cwd()).option("--entry <path>", "Override entry point").option("--only <modules>", "Comma-separated: dead-code,dupes,circular,deps,health").option(
992
+ "--ignore <pattern>",
993
+ "Glob pattern to ignore (repeatable)",
994
+ (val, acc) => [...acc, val],
995
+ []
996
+ ).option("--out <path>", "Output directory for reports").option("--html", "Also generate code_health.html").option("--delete", "Prompt to delete dead files after analysis").option("--ci", "CI mode: exit 1 if issues found, no interactive prompts").action(main);
997
+ program.parse();
998
+ async function main(opts) {
999
+ const rootDir = path8.resolve(opts.dir);
1000
+ if (!fs8.existsSync(path8.join(rootDir, "package.json"))) {
1001
+ console.error(chalk6.red(`\u2717 No package.json found in ${rootDir}`));
1002
+ console.error(chalk6.dim(" Use --dir <path> to point to your project root."));
1003
+ process.exit(1);
1004
+ }
1005
+ const modules = resolveModules(opts.only);
1006
+ console.log();
1007
+ console.log(chalk6.bold.cyan("\u{1F9F9} prunify \u2014 npm run clean. ship with confidence."));
1008
+ console.log();
1009
+ const parseSpinner = createSpinner(chalk6.cyan("Parsing codebase\u2026"));
1010
+ const files = discoverFiles(rootDir, opts.ignore);
1011
+ parseSpinner.succeed(chalk6.green(`Parsed codebase \u2014 ${files.length} file(s) found`));
1012
+ const graphSpinner = createSpinner(chalk6.cyan("Building import graph\u2026"));
1013
+ const project = buildProject(files);
1014
+ const graph = buildGraph(files, (f) => {
1015
+ const sf = project.getSourceFile(f);
1016
+ return sf ? getImportsForFile(sf) : [];
1017
+ });
1018
+ const edgeCount = [...graph.values()].reduce((n, s) => n + s.size, 0);
1019
+ graphSpinner.succeed(chalk6.green(`Import graph built \u2014 ${edgeCount} edge(s)`));
1020
+ const packageJson = loadPackageJson2(rootDir);
1021
+ const entryPoints = opts.entry ? [path8.resolve(opts.entry)] : findEntryPoints(rootDir, packageJson);
1022
+ const reportsDir = ensureReportsDir(rootDir, opts.out);
1023
+ appendToGitignore(rootDir);
1024
+ console.log();
1025
+ let deadFileCount = 0;
1026
+ let dupeCount = 0;
1027
+ let unusedPkgCount = 0;
1028
+ let circularCount = 0;
1029
+ let deadReportFile = "";
1030
+ let dupesReportFile = "";
1031
+ let depsReportFile = "";
1032
+ let circularReportFile = "";
1033
+ const deadFilePaths = [];
1034
+ if (modules.includes("dead-code")) {
1035
+ const spinner = createSpinner(chalk6.cyan("Analysing dead code\u2026"));
1036
+ const result = runDeadCodeModule(project, graph, entryPoints, rootDir);
1037
+ deadFileCount = result.deadFiles.length + result.deadExports.length;
1038
+ deadFilePaths.push(...result.deadFiles);
1039
+ spinner.succeed(chalk6.green(`Dead code analysis complete \u2014 ${deadFileCount} item(s) found`));
1040
+ if (result.report) {
1041
+ deadReportFile = "dead-code.txt";
1042
+ writeReport(reportsDir, deadReportFile, result.report);
1043
+ }
1044
+ }
1045
+ if (modules.includes("dupes")) {
1046
+ const outputPath = path8.join(reportsDir, "dupes.md");
1047
+ const dupes = await runDupeFinder(rootDir, { output: outputPath });
1048
+ dupeCount = dupes.length;
1049
+ if (dupeCount > 0) dupesReportFile = "dupes.md";
1050
+ }
1051
+ if (modules.includes("circular")) {
1052
+ const spinner = createSpinner(chalk6.cyan("Analysing circular imports\u2026"));
1053
+ const cycles = detectCycles(graph);
1054
+ circularCount = cycles.length;
1055
+ spinner.succeed(chalk6.green(`Circular import analysis complete \u2014 ${circularCount} cycle(s) found`));
1056
+ if (circularCount > 0) {
1057
+ circularReportFile = "circular.txt";
1058
+ const cycleText = cycles.map((c, i) => `Cycle ${i + 1}: ${c.join(" \u2192 ")}`).join("\n");
1059
+ writeReport(reportsDir, circularReportFile, cycleText);
1060
+ }
1061
+ }
1062
+ if (modules.includes("deps")) {
1063
+ const outputPath = path8.join(reportsDir, "deps.md");
1064
+ const issues = await runDepCheck({ cwd: rootDir, output: outputPath });
1065
+ unusedPkgCount = issues.filter((i) => i.type === "unused").length;
1066
+ if (issues.length > 0) depsReportFile = "deps.md";
1067
+ }
1068
+ if (modules.includes("health")) {
1069
+ const outputPath = path8.join(reportsDir, "health-report.md");
1070
+ await runHealthReport(rootDir, { output: outputPath });
1071
+ }
1072
+ if (opts.html) {
1073
+ const htmlPath = path8.join(reportsDir, "code_health.html");
1074
+ writeHtmlReport(htmlPath, rootDir, deadFilePaths, circularCount, dupeCount, unusedPkgCount);
1075
+ console.log(chalk6.cyan(` HTML report written to ${htmlPath}`));
1076
+ }
1077
+ console.log();
1078
+ console.log(chalk6.bold("Summary"));
1079
+ console.log();
1080
+ const table = new Table5({
1081
+ head: [chalk6.bold("Check"), chalk6.bold("Found"), chalk6.bold("Output File")],
1082
+ style: { head: [], border: [] }
1083
+ });
1084
+ const fmt = (n) => n > 0 ? chalk6.yellow(String(n)) : chalk6.green("0");
1085
+ table.push(
1086
+ ["Dead Files / Exports", fmt(deadFileCount), deadReportFile || "\u2014"],
1087
+ ["Duplicate Clusters", fmt(dupeCount), dupesReportFile || "\u2014"],
1088
+ ["Unused Packages", fmt(unusedPkgCount), depsReportFile || "\u2014"],
1089
+ ["Circular Deps", fmt(circularCount), circularReportFile || "\u2014"]
1090
+ );
1091
+ console.log(table.toString());
1092
+ console.log();
1093
+ if (opts.delete && deadFilePaths.length > 0) {
1094
+ console.log(chalk6.yellow(`Dead files (${deadFilePaths.length}):`));
1095
+ for (const f of deadFilePaths) {
1096
+ console.log(chalk6.dim(` ${path8.relative(rootDir, f)}`));
1097
+ }
1098
+ console.log();
1099
+ if (!opts.ci) {
1100
+ const confirmed = await confirmPrompt("Delete these files? (y/N) ");
1101
+ if (confirmed) {
1102
+ for (const f of deadFilePaths) {
1103
+ fs8.rmSync(f, { force: true });
1104
+ }
1105
+ console.log(chalk6.green(` Deleted ${deadFilePaths.length} file(s).`));
1106
+ } else {
1107
+ console.log(chalk6.dim(" Skipped."));
1108
+ }
1109
+ }
1110
+ }
1111
+ if (opts.ci) {
1112
+ const hasIssues = deadFileCount > 0 || dupeCount > 0 || unusedPkgCount > 0 || circularCount > 0;
1113
+ if (hasIssues) process.exit(1);
1114
+ }
1115
+ }
1116
+ function resolveModules(only) {
1117
+ if (!only) return ALL_MODULES;
1118
+ const valid = /* @__PURE__ */ new Set([...ALL_MODULES, "health"]);
1119
+ return only.split(",").map((s) => s.trim()).filter((m) => valid.has(m));
1120
+ }
1121
+ function loadPackageJson2(dir) {
1122
+ try {
1123
+ return JSON.parse(fs8.readFileSync(path8.join(dir, "package.json"), "utf-8"));
1124
+ } catch {
1125
+ return null;
1126
+ }
1127
+ }
1128
+ function confirmPrompt(question) {
1129
+ return new Promise((resolve) => {
1130
+ const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
1131
+ rl.question(question, (answer) => {
1132
+ rl.close();
1133
+ resolve(answer.trim().toLowerCase() === "y");
1134
+ });
1135
+ });
1136
+ }
1137
+ function writeHtmlReport(outputPath, rootDir, deadFiles, circularCount, dupeCount, unusedPkgCount) {
1138
+ const rows = [
1139
+ ["Dead Files / Exports", String(deadFiles.length)],
1140
+ ["Duplicate Clusters", String(dupeCount)],
1141
+ ["Circular Dependencies", String(circularCount)],
1142
+ ["Unused Packages", String(unusedPkgCount)]
1143
+ ].map(([label, val]) => ` <tr><td>${label}</td><td>${val}</td></tr>`).join("\n");
1144
+ const deadList = deadFiles.length > 0 ? `<ul>${deadFiles.map((f) => `<li>${path8.relative(rootDir, f)}</li>`).join("")}</ul>` : "<p>None</p>";
1145
+ const html = `<!DOCTYPE html>
1146
+ <html lang="en">
1147
+ <head>
1148
+ <meta charset="UTF-8">
1149
+ <title>prunify \u2014 Code Health Report</title>
1150
+ <style>
1151
+ body { font-family: system-ui, sans-serif; max-width: 800px; margin: 2rem auto; padding: 0 1rem; }
1152
+ h1 { color: #0ea5e9; }
1153
+ table { border-collapse: collapse; width: 100%; margin-bottom: 2rem; }
1154
+ th, td { border: 1px solid #e2e8f0; padding: .5rem 1rem; text-align: left; }
1155
+ th { background: #f8fafc; }
1156
+ small { color: #94a3b8; }
1157
+ </style>
1158
+ </head>
1159
+ <body>
1160
+ <h1>\u{1F9F9} prunify \u2014 Code Health Report</h1>
1161
+ <small>Generated ${(/* @__PURE__ */ new Date()).toISOString()}</small>
1162
+ <h2>Summary</h2>
1163
+ <table>
1164
+ <tr><th>Check</th><th>Found</th></tr>
1165
+ ${rows}
1166
+ </table>
1167
+ <h2>Dead Files</h2>
1168
+ ${deadList}
1169
+ </body>
1170
+ </html>`;
1171
+ fs8.mkdirSync(path8.dirname(outputPath), { recursive: true });
1172
+ fs8.writeFileSync(outputPath, html, "utf-8");
1173
+ }
1174
+ //# sourceMappingURL=cli.js.map