ctrlcode 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ctrlcode/__init__.py +8 -0
- ctrlcode/agents/__init__.py +29 -0
- ctrlcode/agents/cleanup.py +388 -0
- ctrlcode/agents/communication.py +439 -0
- ctrlcode/agents/observability.py +421 -0
- ctrlcode/agents/react_loop.py +297 -0
- ctrlcode/agents/registry.py +211 -0
- ctrlcode/agents/result_parser.py +242 -0
- ctrlcode/agents/workflow.py +723 -0
- ctrlcode/analysis/__init__.py +28 -0
- ctrlcode/analysis/ast_diff.py +163 -0
- ctrlcode/analysis/bug_detector.py +149 -0
- ctrlcode/analysis/code_graphs.py +329 -0
- ctrlcode/analysis/semantic.py +205 -0
- ctrlcode/analysis/static.py +183 -0
- ctrlcode/analysis/synthesizer.py +281 -0
- ctrlcode/analysis/tests.py +189 -0
- ctrlcode/cleanup/__init__.py +16 -0
- ctrlcode/cleanup/auto_merge.py +350 -0
- ctrlcode/cleanup/doc_gardening.py +388 -0
- ctrlcode/cleanup/pr_automation.py +330 -0
- ctrlcode/cleanup/scheduler.py +356 -0
- ctrlcode/config.py +380 -0
- ctrlcode/embeddings/__init__.py +6 -0
- ctrlcode/embeddings/embedder.py +192 -0
- ctrlcode/embeddings/vector_store.py +213 -0
- ctrlcode/fuzzing/__init__.py +24 -0
- ctrlcode/fuzzing/analyzer.py +280 -0
- ctrlcode/fuzzing/budget.py +112 -0
- ctrlcode/fuzzing/context.py +665 -0
- ctrlcode/fuzzing/context_fuzzer.py +506 -0
- ctrlcode/fuzzing/derived_orchestrator.py +732 -0
- ctrlcode/fuzzing/oracle_adapter.py +135 -0
- ctrlcode/linters/__init__.py +11 -0
- ctrlcode/linters/hand_rolled_utils.py +221 -0
- ctrlcode/linters/yolo_parsing.py +217 -0
- ctrlcode/metrics/__init__.py +6 -0
- ctrlcode/metrics/dashboard.py +283 -0
- ctrlcode/metrics/tech_debt.py +663 -0
- ctrlcode/paths.py +68 -0
- ctrlcode/permissions.py +179 -0
- ctrlcode/providers/__init__.py +15 -0
- ctrlcode/providers/anthropic.py +138 -0
- ctrlcode/providers/base.py +77 -0
- ctrlcode/providers/openai.py +197 -0
- ctrlcode/providers/parallel.py +104 -0
- ctrlcode/server.py +871 -0
- ctrlcode/session/__init__.py +6 -0
- ctrlcode/session/baseline.py +57 -0
- ctrlcode/session/manager.py +967 -0
- ctrlcode/skills/__init__.py +10 -0
- ctrlcode/skills/builtin/commit.toml +29 -0
- ctrlcode/skills/builtin/docs.toml +25 -0
- ctrlcode/skills/builtin/refactor.toml +33 -0
- ctrlcode/skills/builtin/review.toml +28 -0
- ctrlcode/skills/builtin/test.toml +28 -0
- ctrlcode/skills/loader.py +111 -0
- ctrlcode/skills/registry.py +139 -0
- ctrlcode/storage/__init__.py +19 -0
- ctrlcode/storage/history_db.py +708 -0
- ctrlcode/tools/__init__.py +220 -0
- ctrlcode/tools/bash.py +112 -0
- ctrlcode/tools/browser.py +352 -0
- ctrlcode/tools/executor.py +153 -0
- ctrlcode/tools/explore.py +486 -0
- ctrlcode/tools/mcp.py +108 -0
- ctrlcode/tools/observability.py +561 -0
- ctrlcode/tools/registry.py +193 -0
- ctrlcode/tools/todo.py +291 -0
- ctrlcode/tools/update.py +266 -0
- ctrlcode/tools/webfetch.py +147 -0
- ctrlcode-0.1.0.dist-info/METADATA +93 -0
- ctrlcode-0.1.0.dist-info/RECORD +75 -0
- ctrlcode-0.1.0.dist-info/WHEEL +4 -0
- ctrlcode-0.1.0.dist-info/entry_points.txt +3 -0
|
@@ -0,0 +1,663 @@
|
|
|
1
|
+
"""Technical debt metrics tracking and dashboard generation."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import sqlite3
|
|
5
|
+
from datetime import datetime, timedelta
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any
|
|
8
|
+
from dataclasses import dataclass
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dataclass
|
|
12
|
+
class TechDebtSnapshot:
|
|
13
|
+
"""Snapshot of technical debt at a point in time."""
|
|
14
|
+
|
|
15
|
+
timestamp: str
|
|
16
|
+
total_violations: int
|
|
17
|
+
violations_by_type: dict[str, int]
|
|
18
|
+
violations_by_file: dict[str, int]
|
|
19
|
+
stale_docs_count: int
|
|
20
|
+
code_smells: dict[str, int]
|
|
21
|
+
cleanup_prs: dict[str, int] # created, merged, rejected
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class TechDebtMetrics:
|
|
25
|
+
"""Track and analyze technical debt over time."""
|
|
26
|
+
|
|
27
|
+
def __init__(self, storage_path: Path | str, project_path: Path | str | None = None):
|
|
28
|
+
"""
|
|
29
|
+
Initialize tech debt metrics tracker.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
storage_path: Path to SQLite database or JSON storage directory
|
|
33
|
+
project_path: Base directory of the project being tracked (defaults to cwd)
|
|
34
|
+
"""
|
|
35
|
+
self.storage_path = Path(storage_path)
|
|
36
|
+
self.storage_path.mkdir(parents=True, exist_ok=True)
|
|
37
|
+
|
|
38
|
+
# Store project path for filtering
|
|
39
|
+
self.project_path = str(Path(project_path).resolve()) if project_path else str(Path.cwd().resolve())
|
|
40
|
+
|
|
41
|
+
# Use SQLite for efficient querying
|
|
42
|
+
self.db_path = self.storage_path / "tech_debt.db"
|
|
43
|
+
self._init_database()
|
|
44
|
+
|
|
45
|
+
def _get_schema_version(self, cursor) -> int:
|
|
46
|
+
"""Get current schema version."""
|
|
47
|
+
try:
|
|
48
|
+
cursor.execute("SELECT version FROM schema_version ORDER BY version DESC LIMIT 1")
|
|
49
|
+
row = cursor.fetchone()
|
|
50
|
+
return row[0] if row else 0
|
|
51
|
+
except sqlite3.OperationalError:
|
|
52
|
+
# schema_version table doesn't exist yet
|
|
53
|
+
return 0
|
|
54
|
+
|
|
55
|
+
def _set_schema_version(self, cursor, version: int):
|
|
56
|
+
"""Set schema version."""
|
|
57
|
+
cursor.execute("INSERT INTO schema_version (version) VALUES (?)", (version,))
|
|
58
|
+
|
|
59
|
+
def _run_migrations(self, conn, cursor):
|
|
60
|
+
"""Run database migrations to bring schema up to date."""
|
|
61
|
+
current_version = self._get_schema_version(cursor)
|
|
62
|
+
|
|
63
|
+
# Migration 1: Initial schema
|
|
64
|
+
if current_version < 1:
|
|
65
|
+
# Schema version table
|
|
66
|
+
cursor.execute("""
|
|
67
|
+
CREATE TABLE IF NOT EXISTS schema_version (
|
|
68
|
+
version INTEGER PRIMARY KEY,
|
|
69
|
+
applied_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
70
|
+
)
|
|
71
|
+
""")
|
|
72
|
+
|
|
73
|
+
# Snapshots table (without project_path - old schema)
|
|
74
|
+
cursor.execute("""
|
|
75
|
+
CREATE TABLE IF NOT EXISTS snapshots (
|
|
76
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
77
|
+
timestamp TEXT NOT NULL,
|
|
78
|
+
total_violations INTEGER NOT NULL,
|
|
79
|
+
stale_docs_count INTEGER NOT NULL,
|
|
80
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
81
|
+
)
|
|
82
|
+
""")
|
|
83
|
+
|
|
84
|
+
# Violations table (without project_path - old schema)
|
|
85
|
+
cursor.execute("""
|
|
86
|
+
CREATE TABLE IF NOT EXISTS violations (
|
|
87
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
88
|
+
snapshot_id INTEGER NOT NULL,
|
|
89
|
+
violation_type TEXT NOT NULL,
|
|
90
|
+
file_path TEXT NOT NULL,
|
|
91
|
+
line_number INTEGER,
|
|
92
|
+
message TEXT,
|
|
93
|
+
severity TEXT,
|
|
94
|
+
FOREIGN KEY (snapshot_id) REFERENCES snapshots (id)
|
|
95
|
+
)
|
|
96
|
+
""")
|
|
97
|
+
|
|
98
|
+
# Code smells table (without project_path - old schema)
|
|
99
|
+
cursor.execute("""
|
|
100
|
+
CREATE TABLE IF NOT EXISTS code_smells (
|
|
101
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
102
|
+
snapshot_id INTEGER NOT NULL,
|
|
103
|
+
smell_type TEXT NOT NULL,
|
|
104
|
+
count INTEGER NOT NULL,
|
|
105
|
+
FOREIGN KEY (snapshot_id) REFERENCES snapshots (id)
|
|
106
|
+
)
|
|
107
|
+
""")
|
|
108
|
+
|
|
109
|
+
# Cleanup PRs table (without project_path - old schema)
|
|
110
|
+
cursor.execute("""
|
|
111
|
+
CREATE TABLE IF NOT EXISTS cleanup_prs (
|
|
112
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
113
|
+
pr_number INTEGER,
|
|
114
|
+
created_at TEXT,
|
|
115
|
+
merged_at TEXT,
|
|
116
|
+
status TEXT NOT NULL,
|
|
117
|
+
files_changed INTEGER,
|
|
118
|
+
violations_fixed INTEGER
|
|
119
|
+
)
|
|
120
|
+
""")
|
|
121
|
+
|
|
122
|
+
self._set_schema_version(cursor, 1)
|
|
123
|
+
conn.commit()
|
|
124
|
+
|
|
125
|
+
# Migration 2: Add project_path columns
|
|
126
|
+
if current_version < 2:
|
|
127
|
+
# Add project_path to all tables
|
|
128
|
+
for table in ["snapshots", "violations", "code_smells", "cleanup_prs"]:
|
|
129
|
+
cursor.execute(f"PRAGMA table_info({table})")
|
|
130
|
+
columns = {row[1] for row in cursor.fetchall()}
|
|
131
|
+
if "project_path" not in columns:
|
|
132
|
+
cursor.execute(f"ALTER TABLE {table} ADD COLUMN project_path TEXT NOT NULL DEFAULT ''")
|
|
133
|
+
|
|
134
|
+
# Create indexes
|
|
135
|
+
cursor.execute("""
|
|
136
|
+
CREATE INDEX IF NOT EXISTS idx_snapshots_project
|
|
137
|
+
ON snapshots(project_path)
|
|
138
|
+
""")
|
|
139
|
+
cursor.execute("""
|
|
140
|
+
CREATE INDEX IF NOT EXISTS idx_violations_project
|
|
141
|
+
ON violations(project_path)
|
|
142
|
+
""")
|
|
143
|
+
cursor.execute("""
|
|
144
|
+
CREATE INDEX IF NOT EXISTS idx_code_smells_project
|
|
145
|
+
ON code_smells(project_path)
|
|
146
|
+
""")
|
|
147
|
+
cursor.execute("""
|
|
148
|
+
CREATE INDEX IF NOT EXISTS idx_cleanup_prs_project
|
|
149
|
+
ON cleanup_prs(project_path)
|
|
150
|
+
""")
|
|
151
|
+
|
|
152
|
+
self._set_schema_version(cursor, 2)
|
|
153
|
+
conn.commit()
|
|
154
|
+
|
|
155
|
+
def _init_database(self):
|
|
156
|
+
"""Initialize SQLite database schema with migrations."""
|
|
157
|
+
conn = sqlite3.connect(self.db_path)
|
|
158
|
+
cursor = conn.cursor()
|
|
159
|
+
|
|
160
|
+
try:
|
|
161
|
+
self._run_migrations(conn, cursor)
|
|
162
|
+
finally:
|
|
163
|
+
conn.close()
|
|
164
|
+
|
|
165
|
+
def record_snapshot(self, snapshot: TechDebtSnapshot) -> int:
|
|
166
|
+
"""
|
|
167
|
+
Record a tech debt snapshot.
|
|
168
|
+
|
|
169
|
+
Args:
|
|
170
|
+
snapshot: TechDebtSnapshot to record
|
|
171
|
+
|
|
172
|
+
Returns:
|
|
173
|
+
Snapshot ID
|
|
174
|
+
"""
|
|
175
|
+
conn = sqlite3.connect(self.db_path)
|
|
176
|
+
cursor = conn.cursor()
|
|
177
|
+
|
|
178
|
+
# Insert snapshot
|
|
179
|
+
cursor.execute("""
|
|
180
|
+
INSERT INTO snapshots (project_path, timestamp, total_violations, stale_docs_count)
|
|
181
|
+
VALUES (?, ?, ?, ?)
|
|
182
|
+
""", (self.project_path, snapshot.timestamp, snapshot.total_violations, snapshot.stale_docs_count))
|
|
183
|
+
|
|
184
|
+
snapshot_id = cursor.lastrowid
|
|
185
|
+
|
|
186
|
+
# Insert violations by type and file
|
|
187
|
+
for violation_type, count in snapshot.violations_by_type.items():
|
|
188
|
+
# Store aggregated by type
|
|
189
|
+
cursor.execute("""
|
|
190
|
+
INSERT INTO violations (snapshot_id, project_path, violation_type, file_path, message)
|
|
191
|
+
VALUES (?, ?, ?, ?, ?)
|
|
192
|
+
""", (snapshot_id, self.project_path, violation_type, "aggregated", f"{count} violations"))
|
|
193
|
+
|
|
194
|
+
# Insert code smells
|
|
195
|
+
for smell_type, count in snapshot.code_smells.items():
|
|
196
|
+
cursor.execute("""
|
|
197
|
+
INSERT INTO code_smells (snapshot_id, project_path, smell_type, count)
|
|
198
|
+
VALUES (?, ?, ?, ?)
|
|
199
|
+
""", (snapshot_id, self.project_path, smell_type, count))
|
|
200
|
+
|
|
201
|
+
conn.commit()
|
|
202
|
+
conn.close()
|
|
203
|
+
|
|
204
|
+
return snapshot_id
|
|
205
|
+
|
|
206
|
+
def record_cleanup_pr(
|
|
207
|
+
self,
|
|
208
|
+
pr_number: int,
|
|
209
|
+
status: str,
|
|
210
|
+
files_changed: int,
|
|
211
|
+
violations_fixed: int,
|
|
212
|
+
created_at: str | None = None,
|
|
213
|
+
merged_at: str | None = None,
|
|
214
|
+
):
|
|
215
|
+
"""
|
|
216
|
+
Record a cleanup PR.
|
|
217
|
+
|
|
218
|
+
Args:
|
|
219
|
+
pr_number: PR number
|
|
220
|
+
status: PR status (open, merged, closed)
|
|
221
|
+
files_changed: Number of files changed
|
|
222
|
+
violations_fixed: Number of violations fixed
|
|
223
|
+
created_at: When PR was created (ISO format)
|
|
224
|
+
merged_at: When PR was merged (ISO format)
|
|
225
|
+
"""
|
|
226
|
+
conn = sqlite3.connect(self.db_path)
|
|
227
|
+
cursor = conn.cursor()
|
|
228
|
+
|
|
229
|
+
cursor.execute("""
|
|
230
|
+
INSERT INTO cleanup_prs
|
|
231
|
+
(project_path, pr_number, created_at, merged_at, status, files_changed, violations_fixed)
|
|
232
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
233
|
+
""", (self.project_path, pr_number, created_at, merged_at, status, files_changed, violations_fixed))
|
|
234
|
+
|
|
235
|
+
conn.commit()
|
|
236
|
+
conn.close()
|
|
237
|
+
|
|
238
|
+
def get_current_state(self) -> dict[str, Any]:
|
|
239
|
+
"""
|
|
240
|
+
Get current tech debt state for this project.
|
|
241
|
+
|
|
242
|
+
Returns:
|
|
243
|
+
Dict with current metrics
|
|
244
|
+
"""
|
|
245
|
+
conn = sqlite3.connect(self.db_path)
|
|
246
|
+
cursor = conn.cursor()
|
|
247
|
+
|
|
248
|
+
# Get latest snapshot for this project
|
|
249
|
+
cursor.execute("""
|
|
250
|
+
SELECT timestamp, total_violations, stale_docs_count
|
|
251
|
+
FROM snapshots
|
|
252
|
+
WHERE project_path = ?
|
|
253
|
+
ORDER BY id DESC
|
|
254
|
+
LIMIT 1
|
|
255
|
+
""", (self.project_path,))
|
|
256
|
+
|
|
257
|
+
latest = cursor.fetchone()
|
|
258
|
+
|
|
259
|
+
if not latest:
|
|
260
|
+
conn.close()
|
|
261
|
+
return {
|
|
262
|
+
"total_violations": 0,
|
|
263
|
+
"stale_docs_count": 0,
|
|
264
|
+
"timestamp": None,
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
timestamp, total_violations, stale_docs = latest
|
|
268
|
+
|
|
269
|
+
# Get snapshot ID
|
|
270
|
+
cursor.execute("""
|
|
271
|
+
SELECT id FROM snapshots
|
|
272
|
+
WHERE project_path = ? AND timestamp = ?
|
|
273
|
+
""", (self.project_path, timestamp))
|
|
274
|
+
snapshot_id = cursor.fetchone()[0]
|
|
275
|
+
|
|
276
|
+
# Get violations by type
|
|
277
|
+
cursor.execute("""
|
|
278
|
+
SELECT violation_type, COUNT(*)
|
|
279
|
+
FROM violations
|
|
280
|
+
WHERE snapshot_id = ? AND project_path = ?
|
|
281
|
+
GROUP BY violation_type
|
|
282
|
+
""", (snapshot_id, self.project_path))
|
|
283
|
+
|
|
284
|
+
violations_by_type = dict(cursor.fetchall())
|
|
285
|
+
|
|
286
|
+
# Get code smells
|
|
287
|
+
cursor.execute("""
|
|
288
|
+
SELECT smell_type, SUM(count)
|
|
289
|
+
FROM code_smells
|
|
290
|
+
WHERE snapshot_id = ? AND project_path = ?
|
|
291
|
+
GROUP BY smell_type
|
|
292
|
+
""", (snapshot_id, self.project_path))
|
|
293
|
+
|
|
294
|
+
code_smells = dict(cursor.fetchall())
|
|
295
|
+
|
|
296
|
+
conn.close()
|
|
297
|
+
|
|
298
|
+
return {
|
|
299
|
+
"timestamp": timestamp,
|
|
300
|
+
"total_violations": total_violations,
|
|
301
|
+
"stale_docs_count": stale_docs,
|
|
302
|
+
"violations_by_type": violations_by_type,
|
|
303
|
+
"code_smells": code_smells,
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
def get_trend(self, days: int = 30) -> list[dict[str, Any]]:
|
|
307
|
+
"""
|
|
308
|
+
Get tech debt trend over time for this project.
|
|
309
|
+
|
|
310
|
+
Args:
|
|
311
|
+
days: Number of days to look back
|
|
312
|
+
|
|
313
|
+
Returns:
|
|
314
|
+
List of snapshots ordered by time
|
|
315
|
+
"""
|
|
316
|
+
conn = sqlite3.connect(self.db_path)
|
|
317
|
+
cursor = conn.cursor()
|
|
318
|
+
|
|
319
|
+
cutoff = (datetime.now() - timedelta(days=days)).isoformat()
|
|
320
|
+
|
|
321
|
+
cursor.execute("""
|
|
322
|
+
SELECT timestamp, total_violations, stale_docs_count
|
|
323
|
+
FROM snapshots
|
|
324
|
+
WHERE project_path = ? AND timestamp > ?
|
|
325
|
+
ORDER BY timestamp ASC
|
|
326
|
+
""", (self.project_path, cutoff))
|
|
327
|
+
|
|
328
|
+
snapshots = []
|
|
329
|
+
for row in cursor.fetchall():
|
|
330
|
+
snapshots.append({
|
|
331
|
+
"timestamp": row[0],
|
|
332
|
+
"total_violations": row[1],
|
|
333
|
+
"stale_docs_count": row[2],
|
|
334
|
+
})
|
|
335
|
+
|
|
336
|
+
conn.close()
|
|
337
|
+
return snapshots
|
|
338
|
+
|
|
339
|
+
def get_cleanup_velocity(self, days: int = 30) -> dict[str, Any]:
|
|
340
|
+
"""
|
|
341
|
+
Get cleanup PR velocity metrics for this project.
|
|
342
|
+
|
|
343
|
+
Args:
|
|
344
|
+
days: Number of days to look back
|
|
345
|
+
|
|
346
|
+
Returns:
|
|
347
|
+
Dict with velocity metrics
|
|
348
|
+
"""
|
|
349
|
+
conn = sqlite3.connect(self.db_path)
|
|
350
|
+
cursor = conn.cursor()
|
|
351
|
+
|
|
352
|
+
cutoff = (datetime.now() - timedelta(days=days)).isoformat()
|
|
353
|
+
|
|
354
|
+
# Count PRs by status for this project
|
|
355
|
+
cursor.execute("""
|
|
356
|
+
SELECT status, COUNT(*), SUM(violations_fixed)
|
|
357
|
+
FROM cleanup_prs
|
|
358
|
+
WHERE project_path = ? AND created_at > ?
|
|
359
|
+
GROUP BY status
|
|
360
|
+
""", (self.project_path, cutoff))
|
|
361
|
+
|
|
362
|
+
velocity = {
|
|
363
|
+
"created": 0,
|
|
364
|
+
"merged": 0,
|
|
365
|
+
"closed": 0,
|
|
366
|
+
"violations_fixed": 0,
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
for row in cursor.fetchall():
|
|
370
|
+
status, count, fixed = row
|
|
371
|
+
if status == "merged":
|
|
372
|
+
velocity["merged"] = count
|
|
373
|
+
velocity["violations_fixed"] = fixed or 0
|
|
374
|
+
elif status == "open":
|
|
375
|
+
velocity["created"] = count
|
|
376
|
+
elif status == "closed":
|
|
377
|
+
velocity["closed"] = count
|
|
378
|
+
|
|
379
|
+
conn.close()
|
|
380
|
+
return velocity
|
|
381
|
+
|
|
382
|
+
def get_hot_spots(self, limit: int = 10) -> list[dict[str, Any]]:
|
|
383
|
+
"""
|
|
384
|
+
Get files with most violations (hot spots) for this project.
|
|
385
|
+
|
|
386
|
+
Args:
|
|
387
|
+
limit: Maximum number of hot spots to return
|
|
388
|
+
|
|
389
|
+
Returns:
|
|
390
|
+
List of files with violation counts
|
|
391
|
+
"""
|
|
392
|
+
conn = sqlite3.connect(self.db_path)
|
|
393
|
+
cursor = conn.cursor()
|
|
394
|
+
|
|
395
|
+
# Get latest snapshot for this project
|
|
396
|
+
cursor.execute("""
|
|
397
|
+
SELECT id FROM snapshots
|
|
398
|
+
WHERE project_path = ?
|
|
399
|
+
ORDER BY id DESC
|
|
400
|
+
LIMIT 1
|
|
401
|
+
""", (self.project_path,))
|
|
402
|
+
|
|
403
|
+
snapshot_result = cursor.fetchone()
|
|
404
|
+
if not snapshot_result:
|
|
405
|
+
conn.close()
|
|
406
|
+
return []
|
|
407
|
+
|
|
408
|
+
snapshot_id = snapshot_result[0]
|
|
409
|
+
|
|
410
|
+
cursor.execute("""
|
|
411
|
+
SELECT file_path, COUNT(*) as count
|
|
412
|
+
FROM violations
|
|
413
|
+
WHERE snapshot_id = ? AND project_path = ? AND file_path != 'aggregated'
|
|
414
|
+
GROUP BY file_path
|
|
415
|
+
ORDER BY count DESC
|
|
416
|
+
LIMIT ?
|
|
417
|
+
""", (snapshot_id, self.project_path, limit))
|
|
418
|
+
|
|
419
|
+
hot_spots = []
|
|
420
|
+
for row in cursor.fetchall():
|
|
421
|
+
hot_spots.append({
|
|
422
|
+
"file": row[0],
|
|
423
|
+
"violations": row[1],
|
|
424
|
+
})
|
|
425
|
+
|
|
426
|
+
conn.close()
|
|
427
|
+
return hot_spots
|
|
428
|
+
|
|
429
|
+
def generate_dashboard_html(self) -> str:
|
|
430
|
+
"""
|
|
431
|
+
Generate HTML dashboard for tech debt metrics.
|
|
432
|
+
|
|
433
|
+
Returns:
|
|
434
|
+
HTML string
|
|
435
|
+
"""
|
|
436
|
+
current = self.get_current_state()
|
|
437
|
+
trend = self.get_trend(days=30)
|
|
438
|
+
velocity = self.get_cleanup_velocity(days=30)
|
|
439
|
+
hot_spots = self.get_hot_spots(limit=10)
|
|
440
|
+
|
|
441
|
+
# Generate trend chart data
|
|
442
|
+
trend_labels = [t["timestamp"][:10] for t in trend] # Date only
|
|
443
|
+
trend_values = [t["total_violations"] for t in trend]
|
|
444
|
+
|
|
445
|
+
html = f"""<!DOCTYPE html>
|
|
446
|
+
<html>
|
|
447
|
+
<head>
|
|
448
|
+
<title>Tech Debt Dashboard - ctrl+code</title>
|
|
449
|
+
<script src="https://cdn.jsdelivr.net/npm/chart.js"></script>
|
|
450
|
+
<style>
|
|
451
|
+
body {{
|
|
452
|
+
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif;
|
|
453
|
+
margin: 0;
|
|
454
|
+
padding: 20px;
|
|
455
|
+
background: #1e1e1e;
|
|
456
|
+
color: #d4d4d4;
|
|
457
|
+
}}
|
|
458
|
+
.container {{
|
|
459
|
+
max-width: 1200px;
|
|
460
|
+
margin: 0 auto;
|
|
461
|
+
}}
|
|
462
|
+
h1 {{
|
|
463
|
+
color: #fff;
|
|
464
|
+
margin-bottom: 10px;
|
|
465
|
+
}}
|
|
466
|
+
.subtitle {{
|
|
467
|
+
color: #888;
|
|
468
|
+
margin-bottom: 30px;
|
|
469
|
+
}}
|
|
470
|
+
.grid {{
|
|
471
|
+
display: grid;
|
|
472
|
+
grid-template-columns: repeat(auto-fit, minmax(300px, 1fr));
|
|
473
|
+
gap: 20px;
|
|
474
|
+
margin-bottom: 30px;
|
|
475
|
+
}}
|
|
476
|
+
.card {{
|
|
477
|
+
background: #252526;
|
|
478
|
+
border: 1px solid #3e3e42;
|
|
479
|
+
border-radius: 8px;
|
|
480
|
+
padding: 20px;
|
|
481
|
+
}}
|
|
482
|
+
.card h2 {{
|
|
483
|
+
margin-top: 0;
|
|
484
|
+
font-size: 18px;
|
|
485
|
+
color: #fff;
|
|
486
|
+
}}
|
|
487
|
+
.metric {{
|
|
488
|
+
font-size: 36px;
|
|
489
|
+
font-weight: bold;
|
|
490
|
+
color: #4ec9b0;
|
|
491
|
+
margin: 10px 0;
|
|
492
|
+
}}
|
|
493
|
+
.metric.warning {{
|
|
494
|
+
color: #ce9178;
|
|
495
|
+
}}
|
|
496
|
+
.metric.error {{
|
|
497
|
+
color: #f48771;
|
|
498
|
+
}}
|
|
499
|
+
.chart-container {{
|
|
500
|
+
position: relative;
|
|
501
|
+
height: 300px;
|
|
502
|
+
margin-top: 20px;
|
|
503
|
+
}}
|
|
504
|
+
.list {{
|
|
505
|
+
list-style: none;
|
|
506
|
+
padding: 0;
|
|
507
|
+
margin: 0;
|
|
508
|
+
}}
|
|
509
|
+
.list li {{
|
|
510
|
+
padding: 10px;
|
|
511
|
+
border-bottom: 1px solid #3e3e42;
|
|
512
|
+
display: flex;
|
|
513
|
+
justify-content: space-between;
|
|
514
|
+
}}
|
|
515
|
+
.list li:last-child {{
|
|
516
|
+
border-bottom: none;
|
|
517
|
+
}}
|
|
518
|
+
.badge {{
|
|
519
|
+
background: #3e3e42;
|
|
520
|
+
padding: 4px 8px;
|
|
521
|
+
border-radius: 4px;
|
|
522
|
+
font-size: 12px;
|
|
523
|
+
}}
|
|
524
|
+
.timestamp {{
|
|
525
|
+
color: #888;
|
|
526
|
+
font-size: 14px;
|
|
527
|
+
}}
|
|
528
|
+
</style>
|
|
529
|
+
</head>
|
|
530
|
+
<body>
|
|
531
|
+
<div class="container">
|
|
532
|
+
<h1>📊 Tech Debt Dashboard</h1>
|
|
533
|
+
<p class="subtitle">Last updated: {current.get('timestamp', 'Never')}</p>
|
|
534
|
+
|
|
535
|
+
<div class="grid">
|
|
536
|
+
<div class="card">
|
|
537
|
+
<h2>Total Violations</h2>
|
|
538
|
+
<div class="metric {'warning' if current['total_violations'] > 50 else 'error' if current['total_violations'] > 100 else ''}">
|
|
539
|
+
{current['total_violations']}
|
|
540
|
+
</div>
|
|
541
|
+
<p class="timestamp">Golden principle violations</p>
|
|
542
|
+
</div>
|
|
543
|
+
|
|
544
|
+
<div class="card">
|
|
545
|
+
<h2>Stale Documentation</h2>
|
|
546
|
+
<div class="metric {'warning' if current['stale_docs_count'] > 5 else ''}">
|
|
547
|
+
{current['stale_docs_count']}
|
|
548
|
+
</div>
|
|
549
|
+
<p class="timestamp">Docs needing updates</p>
|
|
550
|
+
</div>
|
|
551
|
+
|
|
552
|
+
<div class="card">
|
|
553
|
+
<h2>Cleanup Velocity</h2>
|
|
554
|
+
<div class="metric">
|
|
555
|
+
{velocity['merged']}
|
|
556
|
+
</div>
|
|
557
|
+
<p class="timestamp">PRs merged (30 days)</p>
|
|
558
|
+
</div>
|
|
559
|
+
|
|
560
|
+
<div class="card">
|
|
561
|
+
<h2>Violations Fixed</h2>
|
|
562
|
+
<div class="metric">
|
|
563
|
+
{velocity['violations_fixed']}
|
|
564
|
+
</div>
|
|
565
|
+
<p class="timestamp">Via cleanup PRs (30 days)</p>
|
|
566
|
+
</div>
|
|
567
|
+
</div>
|
|
568
|
+
|
|
569
|
+
<div class="card">
|
|
570
|
+
<h2>Violation Trend (30 Days)</h2>
|
|
571
|
+
<div class="chart-container">
|
|
572
|
+
<canvas id="trendChart"></canvas>
|
|
573
|
+
</div>
|
|
574
|
+
</div>
|
|
575
|
+
|
|
576
|
+
<div class="grid" style="margin-top: 20px;">
|
|
577
|
+
<div class="card">
|
|
578
|
+
<h2>Violations by Type</h2>
|
|
579
|
+
<ul class="list">
|
|
580
|
+
{''.join(f'<li><span>{vtype}</span><span class="badge">{count}</span></li>'
|
|
581
|
+
for vtype, count in current.get('violations_by_type', {}).items())}
|
|
582
|
+
{'' if current.get('violations_by_type') else '<li><span>No violations</span></li>'}
|
|
583
|
+
</ul>
|
|
584
|
+
</div>
|
|
585
|
+
|
|
586
|
+
<div class="card">
|
|
587
|
+
<h2>Hot Spots (Top Files)</h2>
|
|
588
|
+
<ul class="list">
|
|
589
|
+
{''.join(f'<li><span>{h["file"]}</span><span class="badge">{h["violations"]}</span></li>'
|
|
590
|
+
for h in hot_spots)}
|
|
591
|
+
{'' if hot_spots else '<li><span>No hot spots</span></li>'}
|
|
592
|
+
</ul>
|
|
593
|
+
</div>
|
|
594
|
+
</div>
|
|
595
|
+
</div>
|
|
596
|
+
|
|
597
|
+
<script>
|
|
598
|
+
const ctx = document.getElementById('trendChart').getContext('2d');
|
|
599
|
+
new Chart(ctx, {{
|
|
600
|
+
type: 'line',
|
|
601
|
+
data: {{
|
|
602
|
+
labels: {json.dumps(trend_labels)},
|
|
603
|
+
datasets: [{{
|
|
604
|
+
label: 'Total Violations',
|
|
605
|
+
data: {json.dumps(trend_values)},
|
|
606
|
+
borderColor: '#4ec9b0',
|
|
607
|
+
backgroundColor: 'rgba(78, 201, 176, 0.1)',
|
|
608
|
+
tension: 0.4
|
|
609
|
+
}}]
|
|
610
|
+
}},
|
|
611
|
+
options: {{
|
|
612
|
+
responsive: true,
|
|
613
|
+
maintainAspectRatio: false,
|
|
614
|
+
plugins: {{
|
|
615
|
+
legend: {{
|
|
616
|
+
labels: {{
|
|
617
|
+
color: '#d4d4d4'
|
|
618
|
+
}}
|
|
619
|
+
}}
|
|
620
|
+
}},
|
|
621
|
+
scales: {{
|
|
622
|
+
y: {{
|
|
623
|
+
beginAtZero: true,
|
|
624
|
+
ticks: {{
|
|
625
|
+
color: '#d4d4d4'
|
|
626
|
+
}},
|
|
627
|
+
grid: {{
|
|
628
|
+
color: '#3e3e42'
|
|
629
|
+
}}
|
|
630
|
+
}},
|
|
631
|
+
x: {{
|
|
632
|
+
ticks: {{
|
|
633
|
+
color: '#d4d4d4'
|
|
634
|
+
}},
|
|
635
|
+
grid: {{
|
|
636
|
+
color: '#3e3e42'
|
|
637
|
+
}}
|
|
638
|
+
}}
|
|
639
|
+
}}
|
|
640
|
+
}}
|
|
641
|
+
}});
|
|
642
|
+
</script>
|
|
643
|
+
</body>
|
|
644
|
+
</html>
|
|
645
|
+
"""
|
|
646
|
+
return html
|
|
647
|
+
|
|
648
|
+
def save_dashboard(self, output_path: Path | str | None = None):
|
|
649
|
+
"""
|
|
650
|
+
Save dashboard HTML to file.
|
|
651
|
+
|
|
652
|
+
Args:
|
|
653
|
+
output_path: Where to save dashboard (defaults to storage_path/dashboard.html)
|
|
654
|
+
"""
|
|
655
|
+
if output_path is None:
|
|
656
|
+
output_path = self.storage_path / "dashboard.html"
|
|
657
|
+
else:
|
|
658
|
+
output_path = Path(output_path)
|
|
659
|
+
|
|
660
|
+
html = self.generate_dashboard_html()
|
|
661
|
+
output_path.write_text(html)
|
|
662
|
+
|
|
663
|
+
return output_path
|