pi-lens 2.0.38 → 2.0.39

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -2,6 +2,22 @@
2
2
 
3
3
  All notable changes to pi-lens will be documented in this file.
4
4
 
5
+ ## [2.0.39] - 2026-03-27
6
+
7
+ ### Added
8
+ - **Historical metrics tracking**: New `clients/metrics-history.ts` module captures complexity snapshots per commit. Tracks MI, cognitive complexity, and nesting depth across sessions.
9
+ - **Trend analysis in `/lens-metrics`**: New "Trend" column shows 📈/📉/➡️ with MI delta. "Trend Summary" section aggregates improving/stable/regressing counts with worst regressions.
10
+ - **Passive capture**: Snapshots captured on session start cache + every `/lens-metrics` run. Max 20 snapshots per file (sliding window).
11
+
12
+ ## [2.0.38] - 2026-03-27
13
+
14
+ ### Changed
15
+ - **Refactored 4 client files** via `/lens-booboo-refactor` loop:
16
+ - `biome-client.ts`: Extracted `withValidatedPath()` guard pattern (4 methods consolidated)
17
+ - `complexity-client.ts`: Extracted `analyzeFile()` pipeline into `readAndParse()`, `computeMetrics()`, `aggregateFunctionStats()`
18
+ - `dependency-checker.ts`: Simplified `importsChanged()` — replaced 3 for-loops with `setsEqual()` helper
19
+ - `ast-grep-client.ts`: Simplified `groupSimilarFunctions()` with `filter().map()` pattern + `extractFunctionName()` helper
20
+
5
21
  ## [2.0.29] - 2026-03-26
6
22
 
7
23
  ### Added
package/README.md CHANGED
@@ -77,7 +77,7 @@ On every new session, scans run silently in the background. Data is cached for r
77
77
  | `/lens-booboo-fix [path]` | Iterative automated fix loop. Runs Biome/Ruff autofix, then scans for fixable issues (ast-grep agent rules, dead code). Generates a fix plan for the agent to execute. Re-run for up to 3 iterations, then reset. |
78
78
  | `/lens-booboo-refactor [path]` | Interactive architectural refactor. Scans for worst offender by combined debt score (ast-grep skip rules + complexity metrics). Opens a browser interview with impact metrics — agent proposes refactoring options with rationale, user picks one, agent implements and shows a post-change report. |
79
79
  | `/lens-format [file\|--all]` | Apply Biome formatting |
80
- | `/lens-metrics [path]` | Measure complexity metrics for all files. Exports `report.md` with grades (A-F), summary stats, and top 10 worst files |
80
+ | `/lens-metrics [path]` | Measure complexity metrics for all files. Exports `report.md` with grades (A-F), summary stats, top 10 worst files, and **historical trends** (📈📉 per file) |
81
81
 
82
82
  ### On-demand tools
83
83
 
@@ -101,30 +101,26 @@ message: found
101
101
  return this.groupSimilarFunctions(matches);
102
102
  }
103
103
  groupSimilarFunctions(matches) {
104
- const normalized = new Map();
104
+ const grouped = new Map();
105
105
  for (const item of matches) {
106
- const text = item.text || "";
107
- const nameMatch = text.match(/function\s+(\w+)/);
108
- if (!nameMatch?.[1])
106
+ const name = this.extractFunctionName(item.text);
107
+ if (!name)
109
108
  continue;
110
- const signature = this.normalizeFunction(text);
111
- if (!normalized.has(signature)) {
112
- normalized.set(signature, []);
113
- }
114
- const line = item.range?.start?.line || item.labels?.[0]?.range?.start?.line || 0;
115
- normalized.get(signature)?.push({
116
- name: nameMatch[1],
117
- file: item.file,
118
- line: line + 1,
119
- });
109
+ const signature = this.normalizeFunction(item.text);
110
+ const line = (item.range?.start?.line || item.labels?.[0]?.range?.start?.line || 0) + 1;
111
+ const group = grouped.get(signature) ?? [];
112
+ group.push({ name, file: item.file, line });
113
+ grouped.set(signature, group);
120
114
  }
121
- const result_groups = [];
122
- for (const [pattern, functions] of normalized) {
123
- if (functions.length > 1) {
124
- result_groups.push({ pattern, functions });
125
- }
126
- }
127
- return result_groups;
115
+ return Array.from(grouped.entries())
116
+ .filter(([_, functions]) => functions.length > 1)
117
+ .map(([pattern, functions]) => ({ pattern, functions }));
118
+ }
119
+ /**
120
+ * Extract function name from match text
121
+ */
122
+ extractFunctionName(text) {
123
+ return text.match(/function\s+(\w+)/)?.[1] ?? null;
128
124
  }
129
125
  normalizeFunction(text) {
130
126
  const normalizedText = text
@@ -56,14 +56,24 @@ export class BiomeClient {
56
56
  ".cjs",
57
57
  ].includes(ext);
58
58
  }
59
+ // --- Internal helpers ---
59
60
  /**
60
- * Run biome check (format + lint) without fixing returns diagnostics
61
+ * Validate path and availability returns path or null on failure
61
62
  */
62
- checkFile(filePath) {
63
+ withValidatedPath(filePath) {
63
64
  if (!this.isAvailable())
64
- return [];
65
+ return null;
65
66
  const absolutePath = path.resolve(filePath);
66
67
  if (!fs.existsSync(absolutePath))
68
+ return null;
69
+ return absolutePath;
70
+ }
71
+ /**
72
+ * Run biome check (format + lint) without fixing — returns diagnostics
73
+ */
74
+ checkFile(filePath) {
75
+ const absolutePath = this.withValidatedPath(filePath);
76
+ if (!absolutePath)
67
77
  return [];
68
78
  try {
69
79
  const result = spawnSync("npx", [
@@ -92,11 +102,13 @@ export class BiomeClient {
92
102
  * Format a file (writes to disk)
93
103
  */
94
104
  formatFile(filePath) {
95
- if (!this.isAvailable())
96
- return { success: false, changed: false, error: "Biome not available" };
97
- const absolutePath = path.resolve(filePath);
98
- if (!fs.existsSync(absolutePath))
99
- return { success: false, changed: false, error: "File not found" };
105
+ const absolutePath = this.withValidatedPath(filePath);
106
+ if (!absolutePath)
107
+ return {
108
+ success: false,
109
+ changed: false,
110
+ error: this.isAvailable() ? "File not found" : "Biome not available",
111
+ };
100
112
  const content = fs.readFileSync(absolutePath, "utf-8");
101
113
  try {
102
114
  const result = spawnSync("npx", ["@biomejs/biome", "format", "--write", absolutePath], {
@@ -123,20 +135,13 @@ export class BiomeClient {
123
135
  * Fix both formatting and linting issues (writes to disk)
124
136
  */
125
137
  fixFile(filePath) {
126
- if (!this.isAvailable())
138
+ const absolutePath = this.withValidatedPath(filePath);
139
+ if (!absolutePath)
127
140
  return {
128
141
  success: false,
129
142
  changed: false,
130
143
  fixed: 0,
131
- error: "Biome not available",
132
- };
133
- const absolutePath = path.resolve(filePath);
134
- if (!fs.existsSync(absolutePath))
135
- return {
136
- success: false,
137
- changed: false,
138
- fixed: 0,
139
- error: "File not found",
144
+ error: this.isAvailable() ? "File not found" : "Biome not available",
140
145
  };
141
146
  const content = fs.readFileSync(absolutePath, "utf-8");
142
147
  try {
@@ -211,10 +216,8 @@ export class BiomeClient {
211
216
  * Generate a diff-like summary of formatting changes
212
217
  */
213
218
  getFormatDiff(filePath) {
214
- if (!this.isAvailable())
215
- return "";
216
- const absolutePath = path.resolve(filePath);
217
- if (!fs.existsSync(absolutePath))
219
+ const absolutePath = this.withValidatedPath(filePath);
220
+ if (!absolutePath)
218
221
  return "";
219
222
  const content = fs.readFileSync(absolutePath, "utf-8");
220
223
  try {
@@ -126,70 +126,88 @@ export class ComplexityClient {
126
126
  * Analyze complexity metrics for a file
127
127
  */
128
128
  analyzeFile(filePath) {
129
- const absolutePath = path.resolve(filePath);
130
- if (!fs.existsSync(absolutePath))
129
+ const parsed = this.readAndParse(filePath);
130
+ if (!parsed)
131
131
  return null;
132
132
  try {
133
- const content = fs.readFileSync(absolutePath, "utf-8");
134
- const lines = content.split("\n");
135
- const sourceFile = ts.createSourceFile(filePath, content, ts.ScriptTarget.Latest, true);
136
- // Count lines of code (non-empty, non-comment)
137
- const { codeLines, commentLines } = this.countLines(sourceFile, lines);
138
- // Collect function metrics
139
- const functions = [];
140
- this.collectFunctionMetrics(sourceFile, sourceFile, functions, 0);
141
- // Calculate file-level metrics
142
- const maxNestingDepth = this.calculateMaxNesting(sourceFile, 0);
143
- const _cyclomatic = this.calculateCyclomaticComplexity(sourceFile);
144
- const cognitive = this.calculateCognitiveComplexity(sourceFile);
145
- const halstead = this.calculateHalsteadVolume(sourceFile);
146
- // Function length stats
147
- const funcLengths = functions.map((f) => f.length);
148
- const avgFunctionLength = funcLengths.length > 0
149
- ? Math.round(funcLengths.reduce((a, b) => a + b, 0) / funcLengths.length)
150
- : 0;
151
- const maxFunctionLength = funcLengths.length > 0 ? Math.max(...funcLengths) : 0;
152
- // Function cyclomatic stats
153
- const cyclomatics = functions.map((f) => f.cyclomatic);
154
- const avgCyclomatic = cyclomatics.length > 0
155
- ? Math.round(cyclomatics.reduce((a, b) => a + b, 0) / cyclomatics.length)
156
- : 1;
157
- const maxCyclomatic = cyclomatics.length > 0 ? Math.max(...cyclomatics) : 1;
158
- // Maintainability Index (simplified Microsoft formula)
159
- // MI = max(0, (171 - 5.2 * ln(Halstead) - 0.23 * Cyclomatic - 16.2 * ln(LOC)) * 100 / 171)
160
- const maintainabilityIndex = this.calculateMaintainabilityIndex(halstead, avgCyclomatic, codeLines, commentLines);
161
- // Code Entropy (Shannon entropy of code tokens)
162
- const codeEntropy = this.calculateCodeEntropy(content);
163
- // AI slop indicators
164
- const maxParamsInFunction = this.calculateMaxParams(functions);
165
- const aiCommentPatterns = this.countAICommentPatterns(sourceFile);
166
- const singleUseFunctions = this.countSingleUseFunctions(functions);
167
- const tryCatchCount = this.countTryCatch(sourceFile);
168
- return {
169
- filePath: path.relative(process.cwd(), absolutePath),
170
- maxNestingDepth,
171
- avgFunctionLength,
172
- maxFunctionLength,
173
- functionCount: functions.length,
174
- cyclomaticComplexity: avgCyclomatic,
175
- maxCyclomaticComplexity: maxCyclomatic,
176
- cognitiveComplexity: cognitive,
177
- halsteadVolume: Math.round(halstead * 10) / 10,
178
- maintainabilityIndex: Math.round(maintainabilityIndex * 10) / 10,
179
- linesOfCode: codeLines,
180
- commentLines,
181
- codeEntropy: Math.round(codeEntropy * 100) / 100,
182
- maxParamsInFunction,
183
- aiCommentPatterns,
184
- singleUseFunctions,
185
- tryCatchCount,
186
- };
133
+ return this.computeMetrics(parsed);
187
134
  }
188
135
  catch (err) {
189
136
  this.log(`Analysis error for ${filePath}: ${err.message}`);
190
137
  return null;
191
138
  }
192
139
  }
140
+ /**
141
+ * Read file and parse to TypeScript AST
142
+ */
143
+ readAndParse(filePath) {
144
+ const absolutePath = path.resolve(filePath);
145
+ if (!fs.existsSync(absolutePath))
146
+ return null;
147
+ const content = fs.readFileSync(absolutePath, "utf-8");
148
+ const sourceFile = ts.createSourceFile(filePath, content, ts.ScriptTarget.Latest, true);
149
+ return { absolutePath, content, sourceFile };
150
+ }
151
+ /**
152
+ * Compute all metrics from parsed source
153
+ */
154
+ computeMetrics(parsed) {
155
+ const { absolutePath, content, sourceFile } = parsed;
156
+ const lines = content.split("\n");
157
+ // Line counts and function collection
158
+ const { codeLines, commentLines } = this.countLines(sourceFile, lines);
159
+ const functions = this.collectFunctionMetrics(sourceFile);
160
+ // File-level complexity metrics
161
+ const maxNestingDepth = this.calculateMaxNesting(sourceFile, 0);
162
+ const cognitive = this.calculateCognitiveComplexity(sourceFile);
163
+ const halstead = this.calculateHalsteadVolume(sourceFile);
164
+ // Aggregate function statistics
165
+ const funcStats = this.aggregateFunctionStats(functions);
166
+ // Derived metrics
167
+ const maintainabilityIndex = this.calculateMaintainabilityIndex(halstead, funcStats.avgCyclomatic, codeLines, commentLines);
168
+ const codeEntropy = this.calculateCodeEntropy(content);
169
+ // AI slop indicators
170
+ const maxParamsInFunction = this.calculateMaxParams(functions);
171
+ const aiCommentPatterns = this.countAICommentPatterns(sourceFile);
172
+ const singleUseFunctions = this.countSingleUseFunctions(functions);
173
+ const tryCatchCount = this.countTryCatch(sourceFile);
174
+ return {
175
+ filePath: path.relative(process.cwd(), absolutePath),
176
+ maxNestingDepth,
177
+ avgFunctionLength: funcStats.avgLength,
178
+ maxFunctionLength: funcStats.maxLength,
179
+ functionCount: functions.length,
180
+ cyclomaticComplexity: funcStats.avgCyclomatic,
181
+ maxCyclomaticComplexity: funcStats.maxCyclomatic,
182
+ cognitiveComplexity: cognitive,
183
+ halsteadVolume: Math.round(halstead * 10) / 10,
184
+ maintainabilityIndex: Math.round(maintainabilityIndex * 10) / 10,
185
+ linesOfCode: codeLines,
186
+ commentLines,
187
+ codeEntropy: Math.round(codeEntropy * 100) / 100,
188
+ maxParamsInFunction,
189
+ aiCommentPatterns,
190
+ singleUseFunctions,
191
+ tryCatchCount,
192
+ };
193
+ }
194
+ /**
195
+ * Aggregate function metrics into summary statistics
196
+ */
197
+ aggregateFunctionStats(functions) {
198
+ if (functions.length === 0) {
199
+ return { avgLength: 0, maxLength: 0, avgCyclomatic: 1, maxCyclomatic: 1 };
200
+ }
201
+ const lengths = functions.map((f) => f.length);
202
+ const cyclomatics = functions.map((f) => f.cyclomatic);
203
+ const sum = (arr) => arr.reduce((a, b) => a + b, 0);
204
+ return {
205
+ avgLength: Math.round(sum(lengths) / lengths.length),
206
+ maxLength: Math.max(...lengths),
207
+ avgCyclomatic: Math.max(1, Math.round(sum(cyclomatics) / cyclomatics.length)),
208
+ maxCyclomatic: Math.max(1, Math.max(...cyclomatics)),
209
+ };
210
+ }
193
211
  /**
194
212
  * Format metrics for display
195
213
  */
@@ -401,7 +419,15 @@ export class ComplexityClient {
401
419
  return { codeLines, commentLines };
402
420
  }
403
421
  // --- Private: Function Metrics Collection ---
404
- collectFunctionMetrics(node, sourceFile, functions, nestingLevel) {
422
+ /**
423
+ * Collect metrics for all functions in the source file
424
+ */
425
+ collectFunctionMetrics(sourceFile) {
426
+ const functions = [];
427
+ this.visitFunctionMetrics(sourceFile, sourceFile, functions, 0);
428
+ return functions;
429
+ }
430
+ visitFunctionMetrics(node, sourceFile, functions, nestingLevel) {
405
431
  if (FUNCTION_LIKE_NODES.has(node.kind)) {
406
432
  const funcNode = node;
407
433
  const startLine = sourceFile.getLineAndCharacterOfPosition(node.getStart()).line;
@@ -427,7 +453,7 @@ export class ComplexityClient {
427
453
  ? nestingLevel + 1
428
454
  : nestingLevel;
429
455
  ts.forEachChild(node, (child) => {
430
- this.collectFunctionMetrics(child, sourceFile, functions, newNesting);
456
+ this.visitFunctionMetrics(child, sourceFile, functions, newNesting);
431
457
  });
432
458
  }
433
459
  // --- Private: Max Nesting Depth ---
@@ -95,46 +95,32 @@ export class DependencyChecker {
95
95
  importsChanged(filePath) {
96
96
  const normalized = path.resolve(filePath);
97
97
  if (!fs.existsSync(normalized)) {
98
- // File deleted, remove from cache
99
98
  this.importCache.delete(normalized);
100
99
  return true;
101
100
  }
102
101
  const stat = fs.statSync(normalized);
103
- const mtime = stat.mtimeMs;
104
102
  const cached = this.importCache.get(normalized);
105
- // If timestamp hasn't changed, imports haven't changed
106
- if (cached && cached.timestamp >= mtime) {
103
+ // Fast path: timestamp hasn't changed
104
+ if (cached && cached.timestamp >= stat.mtimeMs) {
107
105
  return false;
108
106
  }
109
- // Parse new imports
107
+ // Compare actual imports
110
108
  const newImports = this.extractImports(normalized);
111
- const newEntry = {
112
- imports: newImports,
113
- timestamp: mtime,
114
- };
115
- // Check if imports actually changed
116
- if (cached) {
117
- if (cached.imports.size !== newImports.size) {
118
- this.importCache.set(normalized, newEntry);
119
- return true;
120
- }
121
- for (const imp of newImports) {
122
- if (!cached.imports.has(imp)) {
123
- this.importCache.set(normalized, newEntry);
124
- return true;
125
- }
126
- }
127
- for (const imp of cached.imports) {
128
- if (!newImports.has(imp)) {
129
- this.importCache.set(normalized, newEntry);
130
- return true;
131
- }
132
- }
133
- // Imports are the same, just update timestamp
134
- this.importCache.set(normalized, newEntry);
109
+ const hasChanged = !cached || !this.setsEqual(cached.imports, newImports);
110
+ // Update cache
111
+ this.importCache.set(normalized, { imports: newImports, timestamp: stat.mtimeMs });
112
+ return hasChanged;
113
+ }
114
+ /**
115
+ * Check if two sets have the same elements
116
+ */
117
+ setsEqual(a, b) {
118
+ if (a.size !== b.size)
135
119
  return false;
120
+ for (const item of a) {
121
+ if (!b.has(item))
122
+ return false;
136
123
  }
137
- this.importCache.set(normalized, newEntry);
138
124
  return true;
139
125
  }
140
126
  /**
@@ -0,0 +1,215 @@
1
+ /**
2
+ * Metrics History Tracker for pi-lens
3
+ *
4
+ * Persists complexity metrics per commit to track trends over time.
5
+ * Captures snapshots passively (session start) and explicitly (/lens-metrics).
6
+ *
7
+ * Storage: .pi-lens/metrics-history.json
8
+ */
9
+ import * as fs from "node:fs";
10
+ import * as path from "node:path";
11
+ // --- Constants ---
12
+ const HISTORY_FILE = ".pi-lens/metrics-history.json";
13
+ const MAX_HISTORY_PER_FILE = 20;
14
+ // --- Git Helpers ---
15
+ /**
16
+ * Get current git commit hash (short)
17
+ */
18
+ function getCurrentCommit() {
19
+ try {
20
+ const { execSync } = require("node:child_process");
21
+ return execSync("git rev-parse --short HEAD", {
22
+ encoding: "utf-8",
23
+ timeout: 5000,
24
+ }).trim();
25
+ }
26
+ catch {
27
+ return "unknown";
28
+ }
29
+ }
30
+ // --- History Management ---
31
+ /**
32
+ * Load history from disk (or return empty)
33
+ */
34
+ export function loadHistory() {
35
+ const historyPath = path.join(process.cwd(), HISTORY_FILE);
36
+ if (!fs.existsSync(historyPath)) {
37
+ return {
38
+ version: 1,
39
+ files: {},
40
+ capturedAt: new Date().toISOString(),
41
+ };
42
+ }
43
+ try {
44
+ const content = fs.readFileSync(historyPath, "utf-8");
45
+ return JSON.parse(content);
46
+ }
47
+ catch {
48
+ return {
49
+ version: 1,
50
+ files: {},
51
+ capturedAt: new Date().toISOString(),
52
+ };
53
+ }
54
+ }
55
+ /**
56
+ * Save history to disk
57
+ */
58
+ export function saveHistory(history) {
59
+ const historyDir = path.join(process.cwd(), ".pi-lens");
60
+ if (!fs.existsSync(historyDir)) {
61
+ fs.mkdirSync(historyDir, { recursive: true });
62
+ }
63
+ history.capturedAt = new Date().toISOString();
64
+ const historyPath = path.join(historyDir, "metrics-history.json");
65
+ fs.writeFileSync(historyPath, JSON.stringify(history, null, 2));
66
+ }
67
+ /**
68
+ * Capture a snapshot for a file's current metrics
69
+ */
70
+ export function captureSnapshot(filePath, metrics, history) {
71
+ const hist = history ?? loadHistory();
72
+ const relativePath = path.relative(process.cwd(), filePath);
73
+ const commit = getCurrentCommit();
74
+ const snapshot = {
75
+ commit,
76
+ timestamp: new Date().toISOString(),
77
+ mi: Math.round(metrics.maintainabilityIndex * 10) / 10,
78
+ cognitive: metrics.cognitiveComplexity,
79
+ nesting: metrics.maxNestingDepth,
80
+ lines: metrics.linesOfCode,
81
+ };
82
+ const existing = hist.files[relativePath];
83
+ if (existing) {
84
+ // Append to history (cap at MAX_HISTORY_PER_FILE)
85
+ existing.history.push(snapshot);
86
+ if (existing.history.length > MAX_HISTORY_PER_FILE) {
87
+ existing.history = existing.history.slice(-MAX_HISTORY_PER_FILE);
88
+ }
89
+ existing.latest = snapshot;
90
+ existing.trend = computeTrend(existing.history);
91
+ }
92
+ else {
93
+ // New file
94
+ hist.files[relativePath] = {
95
+ latest: snapshot,
96
+ history: [snapshot],
97
+ trend: "stable",
98
+ };
99
+ }
100
+ return hist;
101
+ }
102
+ /**
103
+ * Capture snapshots for multiple files
104
+ */
105
+ export function captureSnapshots(files) {
106
+ let history = loadHistory();
107
+ for (const file of files) {
108
+ history = captureSnapshot(file.filePath, file.metrics, history);
109
+ }
110
+ saveHistory(history);
111
+ return history;
112
+ }
113
+ // --- Trend Analysis ---
114
+ /**
115
+ * Compute trend direction from history snapshots
116
+ * Uses last 3 snapshots for stability (or 2 if only 2 available)
117
+ */
118
+ export function computeTrend(history) {
119
+ if (history.length < 2)
120
+ return "stable";
121
+ const recent = history.slice(-3);
122
+ const first = recent[0];
123
+ const last = recent[recent.length - 1];
124
+ // Use MI as primary indicator, cognitive as secondary
125
+ const miDelta = last.mi - first.mi;
126
+ const cogDelta = last.cognitive - first.cognitive;
127
+ // Thresholds (MI changes < 2 are noise)
128
+ if (miDelta > 2)
129
+ return "improving";
130
+ if (miDelta < -2)
131
+ return "regressing";
132
+ // If MI is stable, check cognitive
133
+ if (cogDelta < -10)
134
+ return "improving";
135
+ if (cogDelta > 10)
136
+ return "regressing";
137
+ return "stable";
138
+ }
139
+ /**
140
+ * Get delta between current snapshot and previous
141
+ */
142
+ export function getDelta(history) {
143
+ if (!history || history.history.length < 2)
144
+ return null;
145
+ const current = history.history[history.history.length - 1];
146
+ const previous = history.history[history.history.length - 2];
147
+ return {
148
+ mi: Math.round((current.mi - previous.mi) * 10) / 10,
149
+ cognitive: current.cognitive - previous.cognitive,
150
+ trend: history.trend,
151
+ };
152
+ }
153
+ /**
154
+ * Get trend emoji for display
155
+ */
156
+ export function getTrendEmoji(trend) {
157
+ switch (trend) {
158
+ case "improving":
159
+ return "📈";
160
+ case "regressing":
161
+ return "📉";
162
+ default:
163
+ return "➡️";
164
+ }
165
+ }
166
+ /**
167
+ * Get trend summary across all files
168
+ */
169
+ export function getTrendSummary(history) {
170
+ let improving = 0;
171
+ let regressing = 0;
172
+ let stable = 0;
173
+ const regressions = [];
174
+ for (const [file, fileHistory] of Object.entries(history.files)) {
175
+ switch (fileHistory.trend) {
176
+ case "improving":
177
+ improving++;
178
+ break;
179
+ case "regressing":
180
+ regressing++;
181
+ const delta = getDelta(fileHistory);
182
+ if (delta) {
183
+ regressions.push({ file, miDelta: delta.mi });
184
+ }
185
+ break;
186
+ default:
187
+ stable++;
188
+ }
189
+ }
190
+ // Sort regressions by MI delta (worst first)
191
+ regressions.sort((a, b) => a.miDelta - b.miDelta);
192
+ return {
193
+ improving,
194
+ regressing,
195
+ stable,
196
+ worstRegressions: regressions.slice(0, 5),
197
+ };
198
+ }
199
+ /**
200
+ * Format trend for metrics table
201
+ */
202
+ export function formatTrendCell(filePath, history) {
203
+ const relativePath = path.relative(process.cwd(), filePath);
204
+ const fileHistory = history.files[relativePath];
205
+ if (!fileHistory || fileHistory.history.length < 2) {
206
+ return "—"; // No history
207
+ }
208
+ const delta = getDelta(fileHistory);
209
+ if (!delta)
210
+ return "—";
211
+ const emoji = getTrendEmoji(delta.trend);
212
+ const miSign = delta.mi > 0 ? "+" : "";
213
+ const miColor = delta.mi > 0 ? "🟢" : delta.mi < 0 ? "🔴" : "⚪";
214
+ return `${emoji} ${miColor}${miSign}${delta.mi}`;
215
+ }
@@ -0,0 +1,300 @@
1
+ /**
2
+ * Metrics History Tracker for pi-lens
3
+ *
4
+ * Persists complexity metrics per commit to track trends over time.
5
+ * Captures snapshots passively (session start) and explicitly (/lens-metrics).
6
+ *
7
+ * Storage: .pi-lens/metrics-history.json
8
+ */
9
+
10
+ import * as fs from "node:fs";
11
+ import * as path from "node:path";
12
+
13
+ // --- Types ---
14
+
15
+ export interface MetricSnapshot {
16
+ commit: string;
17
+ timestamp: string;
18
+ mi: number;
19
+ cognitive: number;
20
+ nesting: number;
21
+ lines: number;
22
+ }
23
+
24
+ export interface FileHistory {
25
+ latest: MetricSnapshot;
26
+ history: MetricSnapshot[];
27
+ trend: "improving" | "stable" | "regressing";
28
+ }
29
+
30
+ export interface MetricsHistory {
31
+ version: number;
32
+ files: Record<string, FileHistory>;
33
+ capturedAt: string;
34
+ }
35
+
36
+ export type TrendDirection = "improving" | "stable" | "regressing";
37
+
38
+ // --- Constants ---
39
+
40
+ const HISTORY_FILE = ".pi-lens/metrics-history.json";
41
+ const MAX_HISTORY_PER_FILE = 20;
42
+
43
+ // --- Git Helpers ---
44
+
45
+ /**
46
+ * Get current git commit hash (short)
47
+ */
48
+ function getCurrentCommit(): string {
49
+ try {
50
+ const { execSync } = require("node:child_process");
51
+ return execSync("git rev-parse --short HEAD", {
52
+ encoding: "utf-8",
53
+ timeout: 5000,
54
+ }).trim();
55
+ } catch {
56
+ return "unknown";
57
+ }
58
+ }
59
+
60
+ // --- History Management ---
61
+
62
+ /**
63
+ * Load history from disk (or return empty)
64
+ */
65
+ export function loadHistory(): MetricsHistory {
66
+ const historyPath = path.join(process.cwd(), HISTORY_FILE);
67
+
68
+ if (!fs.existsSync(historyPath)) {
69
+ return {
70
+ version: 1,
71
+ files: {},
72
+ capturedAt: new Date().toISOString(),
73
+ };
74
+ }
75
+
76
+ try {
77
+ const content = fs.readFileSync(historyPath, "utf-8");
78
+ return JSON.parse(content);
79
+ } catch {
80
+ return {
81
+ version: 1,
82
+ files: {},
83
+ capturedAt: new Date().toISOString(),
84
+ };
85
+ }
86
+ }
87
+
88
+ /**
89
+ * Save history to disk
90
+ */
91
+ export function saveHistory(history: MetricsHistory): void {
92
+ const historyDir = path.join(process.cwd(), ".pi-lens");
93
+ if (!fs.existsSync(historyDir)) {
94
+ fs.mkdirSync(historyDir, { recursive: true });
95
+ }
96
+
97
+ history.capturedAt = new Date().toISOString();
98
+ const historyPath = path.join(historyDir, "metrics-history.json");
99
+ fs.writeFileSync(historyPath, JSON.stringify(history, null, 2));
100
+ }
101
+
102
+ /**
103
+ * Capture a snapshot for a file's current metrics
104
+ */
105
+ export function captureSnapshot(
106
+ filePath: string,
107
+ metrics: {
108
+ maintainabilityIndex: number;
109
+ cognitiveComplexity: number;
110
+ maxNestingDepth: number;
111
+ linesOfCode: number;
112
+ },
113
+ history?: MetricsHistory,
114
+ ): MetricsHistory {
115
+ const hist = history ?? loadHistory();
116
+ const relativePath = path.relative(process.cwd(), filePath);
117
+ const commit = getCurrentCommit();
118
+
119
+ const snapshot: MetricSnapshot = {
120
+ commit,
121
+ timestamp: new Date().toISOString(),
122
+ mi: Math.round(metrics.maintainabilityIndex * 10) / 10,
123
+ cognitive: metrics.cognitiveComplexity,
124
+ nesting: metrics.maxNestingDepth,
125
+ lines: metrics.linesOfCode,
126
+ };
127
+
128
+ const existing = hist.files[relativePath];
129
+
130
+ if (existing) {
131
+ // Append to history (cap at MAX_HISTORY_PER_FILE)
132
+ existing.history.push(snapshot);
133
+ if (existing.history.length > MAX_HISTORY_PER_FILE) {
134
+ existing.history = existing.history.slice(-MAX_HISTORY_PER_FILE);
135
+ }
136
+ existing.latest = snapshot;
137
+ existing.trend = computeTrend(existing.history);
138
+ } else {
139
+ // New file
140
+ hist.files[relativePath] = {
141
+ latest: snapshot,
142
+ history: [snapshot],
143
+ trend: "stable",
144
+ };
145
+ }
146
+
147
+ return hist;
148
+ }
149
+
150
+ /**
151
+ * Capture snapshots for multiple files
152
+ */
153
+ export function captureSnapshots(
154
+ files: Array<{
155
+ filePath: string;
156
+ metrics: {
157
+ maintainabilityIndex: number;
158
+ cognitiveComplexity: number;
159
+ maxNestingDepth: number;
160
+ linesOfCode: number;
161
+ };
162
+ }>,
163
+ ): MetricsHistory {
164
+ let history = loadHistory();
165
+
166
+ for (const file of files) {
167
+ history = captureSnapshot(file.filePath, file.metrics, history);
168
+ }
169
+
170
+ saveHistory(history);
171
+ return history;
172
+ }
173
+
174
+ // --- Trend Analysis ---
175
+
176
+ /**
177
+ * Compute trend direction from history snapshots
178
+ * Uses last 3 snapshots for stability (or 2 if only 2 available)
179
+ */
180
+ export function computeTrend(history: MetricSnapshot[]): TrendDirection {
181
+ if (history.length < 2) return "stable";
182
+
183
+ const recent = history.slice(-3);
184
+ const first = recent[0];
185
+ const last = recent[recent.length - 1];
186
+
187
+ // Use MI as primary indicator, cognitive as secondary
188
+ const miDelta = last.mi - first.mi;
189
+ const cogDelta = last.cognitive - first.cognitive;
190
+
191
+ // Thresholds (MI changes < 2 are noise)
192
+ if (miDelta > 2) return "improving";
193
+ if (miDelta < -2) return "regressing";
194
+
195
+ // If MI is stable, check cognitive
196
+ if (cogDelta < -10) return "improving";
197
+ if (cogDelta > 10) return "regressing";
198
+
199
+ return "stable";
200
+ }
201
+
202
+ /**
203
+ * Get delta between current snapshot and previous
204
+ */
205
+ export function getDelta(history: FileHistory | null): {
206
+ mi: number;
207
+ cognitive: number;
208
+ trend: TrendDirection;
209
+ } | null {
210
+ if (!history || history.history.length < 2) return null;
211
+
212
+ const current = history.history[history.history.length - 1];
213
+ const previous = history.history[history.history.length - 2];
214
+
215
+ return {
216
+ mi: Math.round((current.mi - previous.mi) * 10) / 10,
217
+ cognitive: current.cognitive - previous.cognitive,
218
+ trend: history.trend,
219
+ };
220
+ }
221
+
222
+ /**
223
+ * Get trend emoji for display
224
+ */
225
+ export function getTrendEmoji(trend: TrendDirection): string {
226
+ switch (trend) {
227
+ case "improving":
228
+ return "📈";
229
+ case "regressing":
230
+ return "📉";
231
+ default:
232
+ return "➡️";
233
+ }
234
+ }
235
+
236
+ /**
237
+ * Get trend summary across all files
238
+ */
239
+ export function getTrendSummary(history: MetricsHistory): {
240
+ improving: number;
241
+ regressing: number;
242
+ stable: number;
243
+ worstRegressions: Array<{ file: string; miDelta: number }>;
244
+ } {
245
+ let improving = 0;
246
+ let regressing = 0;
247
+ let stable = 0;
248
+ const regressions: Array<{ file: string; miDelta: number }> = [];
249
+
250
+ for (const [file, fileHistory] of Object.entries(history.files)) {
251
+ switch (fileHistory.trend) {
252
+ case "improving":
253
+ improving++;
254
+ break;
255
+ case "regressing":
256
+ regressing++;
257
+ const delta = getDelta(fileHistory);
258
+ if (delta) {
259
+ regressions.push({ file, miDelta: delta.mi });
260
+ }
261
+ break;
262
+ default:
263
+ stable++;
264
+ }
265
+ }
266
+
267
+ // Sort regressions by MI delta (worst first)
268
+ regressions.sort((a, b) => a.miDelta - b.miDelta);
269
+
270
+ return {
271
+ improving,
272
+ regressing,
273
+ stable,
274
+ worstRegressions: regressions.slice(0, 5),
275
+ };
276
+ }
277
+
278
+ /**
279
+ * Format trend for metrics table
280
+ */
281
+ export function formatTrendCell(
282
+ filePath: string,
283
+ history: MetricsHistory,
284
+ ): string {
285
+ const relativePath = path.relative(process.cwd(), filePath);
286
+ const fileHistory = history.files[relativePath];
287
+
288
+ if (!fileHistory || fileHistory.history.length < 2) {
289
+ return "—"; // No history
290
+ }
291
+
292
+ const delta = getDelta(fileHistory);
293
+ if (!delta) return "—";
294
+
295
+ const emoji = getTrendEmoji(delta.trend);
296
+ const miSign = delta.mi > 0 ? "+" : "";
297
+ const miColor = delta.mi > 0 ? "🟢" : delta.mi < 0 ? "🔴" : "⚪";
298
+
299
+ return `${emoji} ${miColor}${miSign}${delta.mi}`;
300
+ }
package/index.ts CHANGED
@@ -14,6 +14,7 @@ import { buildInterviewer } from "./clients/interviewer.js";
14
14
  import { JscpdClient } from "./clients/jscpd-client.js";
15
15
  import { KnipClient } from "./clients/knip-client.js";
16
16
  import { MetricsClient } from "./clients/metrics-client.js";
17
+ import { captureSnapshots, getTrendSummary, formatTrendCell } from "./clients/metrics-history.js";
17
18
  import { RuffClient } from "./clients/ruff-client.js";
18
19
  import { RustClient } from "./clients/rust-client.js";
19
20
  import { getSourceFiles } from "./clients/scan-utils.js";
@@ -366,6 +367,19 @@ export default function (pi: ExtensionAPI) {
366
367
  gradeCount[g.letter as keyof typeof gradeCount]++;
367
368
  }
368
369
 
370
+ // Capture snapshots for history tracking
371
+ const history = captureSnapshots(
372
+ results.map((r) => ({
373
+ filePath: r.filePath,
374
+ metrics: {
375
+ maintainabilityIndex: r.maintainabilityIndex,
376
+ cognitiveComplexity: r.cognitiveComplexity,
377
+ maxNestingDepth: r.maxNestingDepth,
378
+ linesOfCode: r.linesOfCode,
379
+ },
380
+ })),
381
+ );
382
+
369
383
  // Build report
370
384
  let report = `# Code Metrics Report: ${projectName}\n\n`;
371
385
  report += `**Generated:** ${new Date().toISOString()}\n\n`;
@@ -438,8 +452,8 @@ export default function (pi: ExtensionAPI) {
438
452
 
439
453
  // All files table (sorted by MI ascending)
440
454
  report += `## All Files\n\n`;
441
- report += `| Grade | File | MI | Cognitive | Cyclomatic | Nesting | Functions | LOC | Entropy |\n`;
442
- report += `|-------|------|-----|-----------|------------|---------|-----------|-----|--------|\n`;
455
+ report += `| Grade | File | MI | Cognitive | Nesting | LOC | Trend |\n`;
456
+ report += `|-------|------|-----|-----------|---------|-----|-------|\n`;
443
457
 
444
458
  const sorted = [...results].sort(
445
459
  (a, b) => a.maintainabilityIndex - b.maintainabilityIndex,
@@ -455,11 +469,30 @@ export default function (pi: ExtensionAPI) {
455
469
 
456
470
  // Make path relative for readability
457
471
  const relPath = path.relative(targetPath, f.filePath);
472
+ const trendCell = formatTrendCell(f.filePath, history);
458
473
 
459
- report += `| ${grade} | ${relPath} | ${mi.toFixed(1)} | ${f.cognitiveComplexity} | ${f.cyclomaticComplexity.toFixed(1)} | ${f.maxNestingDepth} | ${f.functionCount} | ${f.linesOfCode} | ${f.codeEntropy.toFixed(2)} |\n`;
474
+ report += `| ${grade} | ${relPath} | ${mi.toFixed(1)} | ${f.cognitiveComplexity} | ${f.maxNestingDepth} | ${f.linesOfCode} | ${trendCell} |\n`;
460
475
  }
461
476
  report += `\n`;
462
477
 
478
+ // Trend Summary
479
+ const trendSummary = getTrendSummary(history);
480
+ report += `## Trend Summary\n\n`;
481
+ report += `| Trend | Count |\n`;
482
+ report += `|-------|-------|\n`;
483
+ report += `| 📈 Improving | ${trendSummary.improving} |\n`;
484
+ report += `| ➡️ Stable | ${trendSummary.stable} |\n`;
485
+ report += `| 📉 Regressing | ${trendSummary.regressing} |\n\n`;
486
+
487
+ if (trendSummary.worstRegressions.length > 0) {
488
+ report += `### Top Regressions\n\n`;
489
+ report += `Files with largest MI decline since last scan:\n\n`;
490
+ for (const r of trendSummary.worstRegressions) {
491
+ report += `- **${r.file}**: MI ${r.miDelta > 0 ? "+" : ""}${r.miDelta}\n`;
492
+ }
493
+ report += `\n`;
494
+ }
495
+
463
496
  // Top 10 worst files (actionable)
464
497
  report += `## Top 10 Files Needing Attention\n\n`;
465
498
  report += `These files have the lowest maintainability scores:\n\n`;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "pi-lens",
3
- "version": "2.0.38",
3
+ "version": "2.0.39",
4
4
  "description": "Real-time code quality feedback for pi — TypeScript LSP, Biome, ast-grep, Ruff, complexity metrics, duplicate detection. Includes automated fix loop (/lens-booboo-fix) and interactive architectural refactoring (/lens-booboo-refactor) with browser-based interviews.",
5
5
  "repository": {
6
6
  "type": "git",