ctrlcode 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ctrlcode/__init__.py +8 -0
- ctrlcode/agents/__init__.py +29 -0
- ctrlcode/agents/cleanup.py +388 -0
- ctrlcode/agents/communication.py +439 -0
- ctrlcode/agents/observability.py +421 -0
- ctrlcode/agents/react_loop.py +297 -0
- ctrlcode/agents/registry.py +211 -0
- ctrlcode/agents/result_parser.py +242 -0
- ctrlcode/agents/workflow.py +723 -0
- ctrlcode/analysis/__init__.py +28 -0
- ctrlcode/analysis/ast_diff.py +163 -0
- ctrlcode/analysis/bug_detector.py +149 -0
- ctrlcode/analysis/code_graphs.py +329 -0
- ctrlcode/analysis/semantic.py +205 -0
- ctrlcode/analysis/static.py +183 -0
- ctrlcode/analysis/synthesizer.py +281 -0
- ctrlcode/analysis/tests.py +189 -0
- ctrlcode/cleanup/__init__.py +16 -0
- ctrlcode/cleanup/auto_merge.py +350 -0
- ctrlcode/cleanup/doc_gardening.py +388 -0
- ctrlcode/cleanup/pr_automation.py +330 -0
- ctrlcode/cleanup/scheduler.py +356 -0
- ctrlcode/config.py +380 -0
- ctrlcode/embeddings/__init__.py +6 -0
- ctrlcode/embeddings/embedder.py +192 -0
- ctrlcode/embeddings/vector_store.py +213 -0
- ctrlcode/fuzzing/__init__.py +24 -0
- ctrlcode/fuzzing/analyzer.py +280 -0
- ctrlcode/fuzzing/budget.py +112 -0
- ctrlcode/fuzzing/context.py +665 -0
- ctrlcode/fuzzing/context_fuzzer.py +506 -0
- ctrlcode/fuzzing/derived_orchestrator.py +732 -0
- ctrlcode/fuzzing/oracle_adapter.py +135 -0
- ctrlcode/linters/__init__.py +11 -0
- ctrlcode/linters/hand_rolled_utils.py +221 -0
- ctrlcode/linters/yolo_parsing.py +217 -0
- ctrlcode/metrics/__init__.py +6 -0
- ctrlcode/metrics/dashboard.py +283 -0
- ctrlcode/metrics/tech_debt.py +663 -0
- ctrlcode/paths.py +68 -0
- ctrlcode/permissions.py +179 -0
- ctrlcode/providers/__init__.py +15 -0
- ctrlcode/providers/anthropic.py +138 -0
- ctrlcode/providers/base.py +77 -0
- ctrlcode/providers/openai.py +197 -0
- ctrlcode/providers/parallel.py +104 -0
- ctrlcode/server.py +871 -0
- ctrlcode/session/__init__.py +6 -0
- ctrlcode/session/baseline.py +57 -0
- ctrlcode/session/manager.py +967 -0
- ctrlcode/skills/__init__.py +10 -0
- ctrlcode/skills/builtin/commit.toml +29 -0
- ctrlcode/skills/builtin/docs.toml +25 -0
- ctrlcode/skills/builtin/refactor.toml +33 -0
- ctrlcode/skills/builtin/review.toml +28 -0
- ctrlcode/skills/builtin/test.toml +28 -0
- ctrlcode/skills/loader.py +111 -0
- ctrlcode/skills/registry.py +139 -0
- ctrlcode/storage/__init__.py +19 -0
- ctrlcode/storage/history_db.py +708 -0
- ctrlcode/tools/__init__.py +220 -0
- ctrlcode/tools/bash.py +112 -0
- ctrlcode/tools/browser.py +352 -0
- ctrlcode/tools/executor.py +153 -0
- ctrlcode/tools/explore.py +486 -0
- ctrlcode/tools/mcp.py +108 -0
- ctrlcode/tools/observability.py +561 -0
- ctrlcode/tools/registry.py +193 -0
- ctrlcode/tools/todo.py +291 -0
- ctrlcode/tools/update.py +266 -0
- ctrlcode/tools/webfetch.py +147 -0
- ctrlcode-0.1.0.dist-info/METADATA +93 -0
- ctrlcode-0.1.0.dist-info/RECORD +75 -0
- ctrlcode-0.1.0.dist-info/WHEEL +4 -0
- ctrlcode-0.1.0.dist-info/entry_points.txt +3 -0
|
@@ -0,0 +1,388 @@
|
|
|
1
|
+
"""Documentation gardening automation."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import re
|
|
5
|
+
import subprocess
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
from datetime import datetime
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Any
|
|
10
|
+
|
|
11
|
+
logger = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@dataclass
|
|
15
|
+
class DocHealthReport:
|
|
16
|
+
"""Documentation health report."""
|
|
17
|
+
|
|
18
|
+
total_docs: int
|
|
19
|
+
stale_docs: list[dict[str, Any]]
|
|
20
|
+
broken_links: list[dict[str, Any]]
|
|
21
|
+
orphaned_docs: list[dict[str, Any]]
|
|
22
|
+
undocumented_apis: list[dict[str, Any]]
|
|
23
|
+
overall_health_score: float
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class DocGardener:
|
|
27
|
+
"""Automated documentation maintenance."""
|
|
28
|
+
|
|
29
|
+
def __init__(self, workspace_root: Path, docs_dir: str = "docs"):
|
|
30
|
+
"""
|
|
31
|
+
Initialize doc gardener.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
workspace_root: Root directory of workspace
|
|
35
|
+
docs_dir: Documentation directory (relative to workspace)
|
|
36
|
+
"""
|
|
37
|
+
self.workspace_root = Path(workspace_root)
|
|
38
|
+
self.docs_dir = self.workspace_root / docs_dir
|
|
39
|
+
self.stale_threshold_days = 90
|
|
40
|
+
|
|
41
|
+
def scan_documentation(self) -> DocHealthReport:
|
|
42
|
+
"""
|
|
43
|
+
Scan documentation and generate health report.
|
|
44
|
+
|
|
45
|
+
Returns:
|
|
46
|
+
Documentation health report
|
|
47
|
+
"""
|
|
48
|
+
logger.info("Scanning documentation...")
|
|
49
|
+
|
|
50
|
+
stale_docs = self._find_stale_docs()
|
|
51
|
+
broken_links = self._find_broken_links()
|
|
52
|
+
orphaned_docs = self._find_orphaned_docs()
|
|
53
|
+
undocumented_apis = self._find_undocumented_apis()
|
|
54
|
+
|
|
55
|
+
total_docs = sum(1 for _ in self.docs_dir.rglob("*.md"))
|
|
56
|
+
|
|
57
|
+
# Calculate health score (0-100)
|
|
58
|
+
health_score = self._calculate_health_score(
|
|
59
|
+
total_docs, stale_docs, broken_links, orphaned_docs, undocumented_apis
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
return DocHealthReport(
|
|
63
|
+
total_docs=total_docs,
|
|
64
|
+
stale_docs=stale_docs,
|
|
65
|
+
broken_links=broken_links,
|
|
66
|
+
orphaned_docs=orphaned_docs,
|
|
67
|
+
undocumented_apis=undocumented_apis,
|
|
68
|
+
overall_health_score=health_score,
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
def _find_stale_docs(self) -> list[dict[str, Any]]:
|
|
72
|
+
"""Find documentation files that haven't been updated recently."""
|
|
73
|
+
stale_docs = []
|
|
74
|
+
|
|
75
|
+
for md_file in self.docs_dir.rglob("*.md"):
|
|
76
|
+
try:
|
|
77
|
+
# Get last modified time from git
|
|
78
|
+
result = subprocess.run(
|
|
79
|
+
["git", "log", "-1", "--format=%ct", str(md_file)],
|
|
80
|
+
cwd=self.workspace_root,
|
|
81
|
+
capture_output=True,
|
|
82
|
+
text=True,
|
|
83
|
+
timeout=5,
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
if result.returncode == 0 and result.stdout.strip():
|
|
87
|
+
last_modified = int(result.stdout.strip())
|
|
88
|
+
last_modified_date = datetime.fromtimestamp(last_modified)
|
|
89
|
+
days_old = (datetime.now() - last_modified_date).days
|
|
90
|
+
|
|
91
|
+
if days_old > self.stale_threshold_days:
|
|
92
|
+
stale_docs.append({
|
|
93
|
+
"file": str(md_file.relative_to(self.workspace_root)),
|
|
94
|
+
"days_old": days_old,
|
|
95
|
+
"last_modified": last_modified_date.isoformat(),
|
|
96
|
+
})
|
|
97
|
+
except Exception as e:
|
|
98
|
+
logger.debug(f"Error checking {md_file}: {e}")
|
|
99
|
+
continue
|
|
100
|
+
|
|
101
|
+
return sorted(stale_docs, key=lambda x: x["days_old"], reverse=True)
|
|
102
|
+
|
|
103
|
+
def _find_broken_links(self) -> list[dict[str, Any]]:
|
|
104
|
+
"""Find broken cross-references in documentation."""
|
|
105
|
+
broken_links = []
|
|
106
|
+
|
|
107
|
+
for md_file in self.docs_dir.rglob("*.md"):
|
|
108
|
+
try:
|
|
109
|
+
content = md_file.read_text()
|
|
110
|
+
|
|
111
|
+
# Find all markdown links
|
|
112
|
+
links = re.findall(r'\[([^\]]+)\]\(([^\)]+)\)', content)
|
|
113
|
+
|
|
114
|
+
for link_text, link_url in links:
|
|
115
|
+
# Skip external links
|
|
116
|
+
if link_url.startswith(("http://", "https://", "#")):
|
|
117
|
+
continue
|
|
118
|
+
|
|
119
|
+
# Resolve relative link
|
|
120
|
+
link_path = (md_file.parent / link_url).resolve()
|
|
121
|
+
|
|
122
|
+
# Check if target exists
|
|
123
|
+
if not link_path.exists():
|
|
124
|
+
broken_links.append({
|
|
125
|
+
"file": str(md_file.relative_to(self.workspace_root)),
|
|
126
|
+
"link_text": link_text,
|
|
127
|
+
"target": link_url,
|
|
128
|
+
"line": self._find_line_number(content, link_url),
|
|
129
|
+
})
|
|
130
|
+
|
|
131
|
+
except Exception as e:
|
|
132
|
+
logger.debug(f"Error checking links in {md_file}: {e}")
|
|
133
|
+
continue
|
|
134
|
+
|
|
135
|
+
return broken_links
|
|
136
|
+
|
|
137
|
+
def _find_orphaned_docs(self) -> list[dict[str, Any]]:
|
|
138
|
+
"""Find documentation files that aren't linked from anywhere."""
|
|
139
|
+
all_docs = set(self.docs_dir.rglob("*.md"))
|
|
140
|
+
linked_docs = set()
|
|
141
|
+
|
|
142
|
+
# Find all links from all docs
|
|
143
|
+
for md_file in self.docs_dir.rglob("*.md"):
|
|
144
|
+
try:
|
|
145
|
+
content = md_file.read_text()
|
|
146
|
+
links = re.findall(r'\[([^\]]+)\]\(([^\)]+)\)', content)
|
|
147
|
+
|
|
148
|
+
for _, link_url in links:
|
|
149
|
+
if not link_url.startswith(("http://", "https://", "#")):
|
|
150
|
+
link_path = (md_file.parent / link_url).resolve()
|
|
151
|
+
if link_path in all_docs:
|
|
152
|
+
linked_docs.add(link_path)
|
|
153
|
+
except Exception:
|
|
154
|
+
continue
|
|
155
|
+
|
|
156
|
+
# Also consider README.md and index files as non-orphans
|
|
157
|
+
entry_points = {
|
|
158
|
+
self.docs_dir / "README.md",
|
|
159
|
+
self.docs_dir / "index.md",
|
|
160
|
+
self.docs_dir / "INDEX.md",
|
|
161
|
+
}
|
|
162
|
+
linked_docs.update(p for p in entry_points if p.exists())
|
|
163
|
+
|
|
164
|
+
orphaned = all_docs - linked_docs
|
|
165
|
+
|
|
166
|
+
return [
|
|
167
|
+
{"file": str(doc.relative_to(self.workspace_root))}
|
|
168
|
+
for doc in sorted(orphaned)
|
|
169
|
+
]
|
|
170
|
+
|
|
171
|
+
def _find_undocumented_apis(self) -> list[dict[str, Any]]:
|
|
172
|
+
"""Find public APIs that lack documentation."""
|
|
173
|
+
undocumented = []
|
|
174
|
+
|
|
175
|
+
# Scan Python files for public classes/functions
|
|
176
|
+
src_dir = self.workspace_root / "src"
|
|
177
|
+
if not src_dir.exists():
|
|
178
|
+
return []
|
|
179
|
+
|
|
180
|
+
for py_file in src_dir.rglob("*.py"):
|
|
181
|
+
# Skip test files and private modules
|
|
182
|
+
if "test" in str(py_file) or py_file.name.startswith("_"):
|
|
183
|
+
continue
|
|
184
|
+
|
|
185
|
+
try:
|
|
186
|
+
content = py_file.read_text()
|
|
187
|
+
|
|
188
|
+
# Find public classes
|
|
189
|
+
classes = re.findall(r'^class\s+([A-Z][A-Za-z0-9_]*)', content, re.MULTILINE)
|
|
190
|
+
|
|
191
|
+
# Find public functions
|
|
192
|
+
functions = re.findall(r'^def\s+([a-z][a-z0-9_]*)', content, re.MULTILINE)
|
|
193
|
+
|
|
194
|
+
# Check if documented (has docstring)
|
|
195
|
+
for name in classes + functions:
|
|
196
|
+
# Simple heuristic: look for docstring after definition
|
|
197
|
+
pattern = rf'(class|def)\s+{re.escape(name)}.*?:\s*("""|\'\'\')(.*?)("""|\'\'\')'
|
|
198
|
+
if not re.search(pattern, content, re.DOTALL):
|
|
199
|
+
undocumented.append({
|
|
200
|
+
"file": str(py_file.relative_to(self.workspace_root)),
|
|
201
|
+
"name": name,
|
|
202
|
+
"type": "class" if name[0].isupper() else "function",
|
|
203
|
+
})
|
|
204
|
+
|
|
205
|
+
except Exception as e:
|
|
206
|
+
logger.debug(f"Error checking {py_file}: {e}")
|
|
207
|
+
continue
|
|
208
|
+
|
|
209
|
+
return undocumented[:20] # Limit to first 20
|
|
210
|
+
|
|
211
|
+
def _calculate_health_score(
|
|
212
|
+
self,
|
|
213
|
+
total_docs: int,
|
|
214
|
+
stale_docs: list,
|
|
215
|
+
broken_links: list,
|
|
216
|
+
orphaned_docs: list,
|
|
217
|
+
undocumented_apis: list,
|
|
218
|
+
) -> float:
|
|
219
|
+
"""
|
|
220
|
+
Calculate overall documentation health score.
|
|
221
|
+
|
|
222
|
+
Args:
|
|
223
|
+
total_docs: Total number of doc files
|
|
224
|
+
stale_docs: List of stale docs
|
|
225
|
+
broken_links: List of broken links
|
|
226
|
+
orphaned_docs: List of orphaned docs
|
|
227
|
+
undocumented_apis: List of undocumented APIs
|
|
228
|
+
|
|
229
|
+
Returns:
|
|
230
|
+
Health score (0-100)
|
|
231
|
+
"""
|
|
232
|
+
if total_docs == 0:
|
|
233
|
+
return 100.0
|
|
234
|
+
|
|
235
|
+
# Start with perfect score
|
|
236
|
+
score = 100.0
|
|
237
|
+
|
|
238
|
+
# Deduct for issues (with different weights)
|
|
239
|
+
score -= (len(stale_docs) / total_docs) * 30 # Max 30 points
|
|
240
|
+
score -= min(len(broken_links) * 5, 20) # Max 20 points
|
|
241
|
+
score -= (len(orphaned_docs) / total_docs) * 15 # Max 15 points
|
|
242
|
+
score -= min(len(undocumented_apis) * 2, 35) # Max 35 points
|
|
243
|
+
|
|
244
|
+
return max(0.0, score)
|
|
245
|
+
|
|
246
|
+
def _find_line_number(self, content: str, search_text: str) -> int:
|
|
247
|
+
"""Find line number of text in content."""
|
|
248
|
+
for i, line in enumerate(content.split("\n"), start=1):
|
|
249
|
+
if search_text in line:
|
|
250
|
+
return i
|
|
251
|
+
return 0
|
|
252
|
+
|
|
253
|
+
def generate_report_markdown(self, report: DocHealthReport) -> str:
|
|
254
|
+
"""
|
|
255
|
+
Generate markdown report from health report.
|
|
256
|
+
|
|
257
|
+
Args:
|
|
258
|
+
report: Documentation health report
|
|
259
|
+
|
|
260
|
+
Returns:
|
|
261
|
+
Markdown formatted report
|
|
262
|
+
"""
|
|
263
|
+
md = f"""# Documentation Health Report
|
|
264
|
+
|
|
265
|
+
Generated: {datetime.now().strftime("%Y-%m-%d %H:%M:%S")}
|
|
266
|
+
|
|
267
|
+
## Summary
|
|
268
|
+
|
|
269
|
+
- **Total Documentation Files**: {report.total_docs}
|
|
270
|
+
- **Overall Health Score**: {report.overall_health_score:.1f}/100
|
|
271
|
+
|
|
272
|
+
## Issues Found
|
|
273
|
+
|
|
274
|
+
### Stale Documentation ({len(report.stale_docs)})
|
|
275
|
+
|
|
276
|
+
Documentation not updated in >{self.stale_threshold_days} days:
|
|
277
|
+
|
|
278
|
+
"""
|
|
279
|
+
|
|
280
|
+
for doc in report.stale_docs[:10]: # Show top 10
|
|
281
|
+
md += f"- `{doc['file']}` - {doc['days_old']} days old\n"
|
|
282
|
+
|
|
283
|
+
if len(report.stale_docs) > 10:
|
|
284
|
+
md += f"\n...and {len(report.stale_docs) - 10} more\n"
|
|
285
|
+
|
|
286
|
+
md += f"""
|
|
287
|
+
|
|
288
|
+
### Broken Links ({len(report.broken_links)})
|
|
289
|
+
|
|
290
|
+
"""
|
|
291
|
+
|
|
292
|
+
for link in report.broken_links[:10]:
|
|
293
|
+
md += f"- `{link['file']}:{link['line']}` - [{link['link_text']}]({link['target']})\n"
|
|
294
|
+
|
|
295
|
+
if len(report.broken_links) > 10:
|
|
296
|
+
md += f"\n...and {len(report.broken_links) - 10} more\n"
|
|
297
|
+
|
|
298
|
+
md += f"""
|
|
299
|
+
|
|
300
|
+
### Orphaned Documentation ({len(report.orphaned_docs)})
|
|
301
|
+
|
|
302
|
+
Documentation files not linked from anywhere:
|
|
303
|
+
|
|
304
|
+
"""
|
|
305
|
+
|
|
306
|
+
for doc in report.orphaned_docs[:10]:
|
|
307
|
+
md += f"- `{doc['file']}`\n"
|
|
308
|
+
|
|
309
|
+
if len(report.orphaned_docs) > 10:
|
|
310
|
+
md += f"\n...and {len(report.orphaned_docs) - 10} more\n"
|
|
311
|
+
|
|
312
|
+
md += f"""
|
|
313
|
+
|
|
314
|
+
### Undocumented APIs ({len(report.undocumented_apis)})
|
|
315
|
+
|
|
316
|
+
Public classes/functions without docstrings:
|
|
317
|
+
|
|
318
|
+
"""
|
|
319
|
+
|
|
320
|
+
for api in report.undocumented_apis[:10]:
|
|
321
|
+
md += f"- `{api['file']}` - {api['type']} `{api['name']}`\n"
|
|
322
|
+
|
|
323
|
+
if len(report.undocumented_apis) > 10:
|
|
324
|
+
md += f"\n...and {len(report.undocumented_apis) - 10} more\n"
|
|
325
|
+
|
|
326
|
+
md += """
|
|
327
|
+
|
|
328
|
+
## Recommendations
|
|
329
|
+
|
|
330
|
+
"""
|
|
331
|
+
|
|
332
|
+
if report.stale_docs:
|
|
333
|
+
md += "1. Review and update stale documentation\n"
|
|
334
|
+
if report.broken_links:
|
|
335
|
+
md += "2. Fix broken cross-references\n"
|
|
336
|
+
if report.orphaned_docs:
|
|
337
|
+
md += "3. Link orphaned docs from index or archive them\n"
|
|
338
|
+
if report.undocumented_apis:
|
|
339
|
+
md += "4. Add docstrings to public APIs\n"
|
|
340
|
+
|
|
341
|
+
if report.overall_health_score < 70:
|
|
342
|
+
md += "\n⚠️ **Documentation health is below 70%** - prioritize improvements\n"
|
|
343
|
+
elif report.overall_health_score >= 90:
|
|
344
|
+
md += "\n✅ **Documentation health is excellent!**\n"
|
|
345
|
+
|
|
346
|
+
return md
|
|
347
|
+
|
|
348
|
+
def auto_fix_stale_dates(self, dry_run: bool = False) -> list[dict[str, Any]]:
|
|
349
|
+
"""
|
|
350
|
+
Auto-fix stale dates by updating "last reviewed" markers.
|
|
351
|
+
|
|
352
|
+
Args:
|
|
353
|
+
dry_run: If True, don't actually modify files
|
|
354
|
+
|
|
355
|
+
Returns:
|
|
356
|
+
List of files updated
|
|
357
|
+
"""
|
|
358
|
+
updated_files = []
|
|
359
|
+
|
|
360
|
+
for md_file in self.docs_dir.rglob("*.md"):
|
|
361
|
+
try:
|
|
362
|
+
content = md_file.read_text()
|
|
363
|
+
|
|
364
|
+
# Look for "last reviewed" marker
|
|
365
|
+
pattern = r'(Last reviewed|Last updated):\s*(\d{4}-\d{2}-\d{2})'
|
|
366
|
+
match = re.search(pattern, content, re.IGNORECASE)
|
|
367
|
+
|
|
368
|
+
if match:
|
|
369
|
+
old_date = match.group(2)
|
|
370
|
+
new_date = datetime.now().strftime("%Y-%m-%d")
|
|
371
|
+
|
|
372
|
+
if old_date != new_date:
|
|
373
|
+
new_content = content.replace(match.group(0), f"{match.group(1)}: {new_date}")
|
|
374
|
+
|
|
375
|
+
if not dry_run:
|
|
376
|
+
md_file.write_text(new_content)
|
|
377
|
+
|
|
378
|
+
updated_files.append({
|
|
379
|
+
"file": str(md_file.relative_to(self.workspace_root)),
|
|
380
|
+
"old_date": old_date,
|
|
381
|
+
"new_date": new_date,
|
|
382
|
+
})
|
|
383
|
+
|
|
384
|
+
except Exception as e:
|
|
385
|
+
logger.debug(f"Error updating {md_file}: {e}")
|
|
386
|
+
continue
|
|
387
|
+
|
|
388
|
+
return updated_files
|