anvil-dev-framework 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (190) hide show
  1. package/README.md +719 -0
  2. package/VERSION +1 -0
  3. package/docs/ANVIL-REPO-IMPLEMENTATION-PLAN.md +441 -0
  4. package/docs/FIRST-SKILL-TUTORIAL.md +408 -0
  5. package/docs/INSTALLATION-RETRO-NOTES.md +458 -0
  6. package/docs/INSTALLATION.md +984 -0
  7. package/docs/anvil-hud.md +469 -0
  8. package/docs/anvil-init.md +255 -0
  9. package/docs/anvil-state.md +210 -0
  10. package/docs/boris-cherny-ralph-wiggum-insights.md +608 -0
  11. package/docs/command-reference.md +2022 -0
  12. package/docs/hooks-tts.md +368 -0
  13. package/docs/implementation-guide.md +810 -0
  14. package/docs/linear-github-integration.md +247 -0
  15. package/docs/local-issues.md +677 -0
  16. package/docs/patterns/README.md +419 -0
  17. package/docs/planning-responsibilities.md +139 -0
  18. package/docs/session-workflow.md +573 -0
  19. package/docs/simplification-plan-template.md +297 -0
  20. package/docs/simplification-principles.md +129 -0
  21. package/docs/specifications/CCS-RALPH-INTEGRATION-DESIGN.md +633 -0
  22. package/docs/specifications/CCS-RESEARCH-REPORT.md +169 -0
  23. package/docs/specifications/PLAN-ANV-verification-ralph-wiggum.md +403 -0
  24. package/docs/specifications/PLAN-parallel-tracks-anvil-memory-ccs.md +494 -0
  25. package/docs/specifications/SPEC-ANV-VRW/component-01-verify.md +208 -0
  26. package/docs/specifications/SPEC-ANV-VRW/component-02-stop-gate.md +226 -0
  27. package/docs/specifications/SPEC-ANV-VRW/component-03-posttooluse.md +209 -0
  28. package/docs/specifications/SPEC-ANV-VRW/component-04-ralph-wiggum.md +604 -0
  29. package/docs/specifications/SPEC-ANV-VRW/component-05-atomic-actions.md +311 -0
  30. package/docs/specifications/SPEC-ANV-VRW/component-06-verify-subagent.md +264 -0
  31. package/docs/specifications/SPEC-ANV-VRW/component-07-claude-md.md +363 -0
  32. package/docs/specifications/SPEC-ANV-VRW/index.md +182 -0
  33. package/docs/specifications/SPEC-ANV-anvil-memory.md +573 -0
  34. package/docs/specifications/SPEC-ANV-context-checkpoints.md +781 -0
  35. package/docs/specifications/SPEC-ANV-verification-ralph-wiggum.md +789 -0
  36. package/docs/sync.md +122 -0
  37. package/global/CLAUDE.md +140 -0
  38. package/global/agents/verify-app.md +164 -0
  39. package/global/commands/anvil-settings.md +527 -0
  40. package/global/commands/anvil-sync.md +121 -0
  41. package/global/commands/change.md +197 -0
  42. package/global/commands/clarify.md +252 -0
  43. package/global/commands/cleanup.md +292 -0
  44. package/global/commands/commit-push-pr.md +207 -0
  45. package/global/commands/decay-review.md +127 -0
  46. package/global/commands/discover.md +158 -0
  47. package/global/commands/doc-coverage.md +122 -0
  48. package/global/commands/evidence.md +307 -0
  49. package/global/commands/explore.md +121 -0
  50. package/global/commands/force-exit.md +135 -0
  51. package/global/commands/handoff.md +191 -0
  52. package/global/commands/healthcheck.md +302 -0
  53. package/global/commands/hud.md +84 -0
  54. package/global/commands/insights.md +319 -0
  55. package/global/commands/linear-setup.md +184 -0
  56. package/global/commands/lint-fix.md +198 -0
  57. package/global/commands/orient.md +510 -0
  58. package/global/commands/plan.md +228 -0
  59. package/global/commands/ralph.md +346 -0
  60. package/global/commands/ready.md +182 -0
  61. package/global/commands/release.md +305 -0
  62. package/global/commands/retro.md +96 -0
  63. package/global/commands/shard.md +166 -0
  64. package/global/commands/spec.md +227 -0
  65. package/global/commands/sprint.md +184 -0
  66. package/global/commands/tasks.md +228 -0
  67. package/global/commands/test-and-commit.md +151 -0
  68. package/global/commands/validate.md +132 -0
  69. package/global/commands/verify.md +251 -0
  70. package/global/commands/weekly-review.md +156 -0
  71. package/global/hooks/__pycache__/ralph_context_monitor.cpython-314.pyc +0 -0
  72. package/global/hooks/__pycache__/statusline_agent_sync.cpython-314.pyc +0 -0
  73. package/global/hooks/anvil_memory_observe.ts +322 -0
  74. package/global/hooks/anvil_memory_session.ts +166 -0
  75. package/global/hooks/anvil_memory_stop.ts +187 -0
  76. package/global/hooks/parse_transcript.py +116 -0
  77. package/global/hooks/post_merge_cleanup.sh +132 -0
  78. package/global/hooks/post_tool_format.sh +215 -0
  79. package/global/hooks/ralph_context_monitor.py +240 -0
  80. package/global/hooks/ralph_stop.sh +502 -0
  81. package/global/hooks/statusline.sh +1110 -0
  82. package/global/hooks/statusline_agent_sync.py +224 -0
  83. package/global/hooks/stop_gate.sh +250 -0
  84. package/global/lib/.claude/anvil-state.json +21 -0
  85. package/global/lib/__pycache__/agent_registry.cpython-314.pyc +0 -0
  86. package/global/lib/__pycache__/claim_service.cpython-314.pyc +0 -0
  87. package/global/lib/__pycache__/coderabbit_service.cpython-314.pyc +0 -0
  88. package/global/lib/__pycache__/config_service.cpython-314.pyc +0 -0
  89. package/global/lib/__pycache__/coordination_service.cpython-314.pyc +0 -0
  90. package/global/lib/__pycache__/doc_coverage_service.cpython-314.pyc +0 -0
  91. package/global/lib/__pycache__/gate_logger.cpython-314.pyc +0 -0
  92. package/global/lib/__pycache__/github_service.cpython-314.pyc +0 -0
  93. package/global/lib/__pycache__/hygiene_service.cpython-314.pyc +0 -0
  94. package/global/lib/__pycache__/issue_models.cpython-314.pyc +0 -0
  95. package/global/lib/__pycache__/issue_provider.cpython-314.pyc +0 -0
  96. package/global/lib/__pycache__/linear_data_service.cpython-314.pyc +0 -0
  97. package/global/lib/__pycache__/linear_provider.cpython-314.pyc +0 -0
  98. package/global/lib/__pycache__/local_provider.cpython-314.pyc +0 -0
  99. package/global/lib/__pycache__/quality_service.cpython-314.pyc +0 -0
  100. package/global/lib/__pycache__/ralph_state.cpython-314.pyc +0 -0
  101. package/global/lib/__pycache__/state_manager.cpython-314.pyc +0 -0
  102. package/global/lib/__pycache__/transcript_parser.cpython-314.pyc +0 -0
  103. package/global/lib/__pycache__/verification_runner.cpython-314.pyc +0 -0
  104. package/global/lib/__pycache__/verify_iteration.cpython-314.pyc +0 -0
  105. package/global/lib/__pycache__/verify_subagent.cpython-314.pyc +0 -0
  106. package/global/lib/agent_registry.py +995 -0
  107. package/global/lib/anvil-state.sh +435 -0
  108. package/global/lib/claim_service.py +515 -0
  109. package/global/lib/coderabbit_service.py +314 -0
  110. package/global/lib/config_service.py +423 -0
  111. package/global/lib/coordination_service.py +331 -0
  112. package/global/lib/doc_coverage_service.py +1305 -0
  113. package/global/lib/gate_logger.py +316 -0
  114. package/global/lib/github_service.py +310 -0
  115. package/global/lib/handoff_generator.py +775 -0
  116. package/global/lib/hygiene_service.py +712 -0
  117. package/global/lib/issue_models.py +257 -0
  118. package/global/lib/issue_provider.py +339 -0
  119. package/global/lib/linear_data_service.py +210 -0
  120. package/global/lib/linear_provider.py +987 -0
  121. package/global/lib/linear_provider.py.backup +671 -0
  122. package/global/lib/local_provider.py +486 -0
  123. package/global/lib/orient_fast.py +457 -0
  124. package/global/lib/quality_service.py +470 -0
  125. package/global/lib/ralph_prompt_generator.py +563 -0
  126. package/global/lib/ralph_state.py +1202 -0
  127. package/global/lib/state_manager.py +417 -0
  128. package/global/lib/transcript_parser.py +597 -0
  129. package/global/lib/verification_runner.py +557 -0
  130. package/global/lib/verify_iteration.py +490 -0
  131. package/global/lib/verify_subagent.py +250 -0
  132. package/global/skills/README.md +155 -0
  133. package/global/skills/quality-gates/SKILL.md +252 -0
  134. package/global/skills/skill-template/SKILL.md +109 -0
  135. package/global/skills/testing-strategies/SKILL.md +337 -0
  136. package/global/templates/CHANGE-template.md +105 -0
  137. package/global/templates/HANDOFF-template.md +63 -0
  138. package/global/templates/PLAN-template.md +111 -0
  139. package/global/templates/SPEC-template.md +93 -0
  140. package/global/templates/ralph/PROMPT.md.template +89 -0
  141. package/global/templates/ralph/fix_plan.md.template +31 -0
  142. package/global/templates/ralph/progress.txt.template +23 -0
  143. package/global/tests/__pycache__/test_doc_coverage.cpython-314.pyc +0 -0
  144. package/global/tests/test_doc_coverage.py +520 -0
  145. package/global/tests/test_issue_models.py +299 -0
  146. package/global/tests/test_local_provider.py +323 -0
  147. package/global/tools/README.md +178 -0
  148. package/global/tools/__pycache__/anvil-hud.cpython-314.pyc +0 -0
  149. package/global/tools/anvil-hud.py +3622 -0
  150. package/global/tools/anvil-hud.py.bak +3318 -0
  151. package/global/tools/anvil-issue.py +432 -0
  152. package/global/tools/anvil-memory/CLAUDE.md +49 -0
  153. package/global/tools/anvil-memory/README.md +42 -0
  154. package/global/tools/anvil-memory/bun.lock +25 -0
  155. package/global/tools/anvil-memory/bunfig.toml +9 -0
  156. package/global/tools/anvil-memory/package.json +23 -0
  157. package/global/tools/anvil-memory/src/__tests__/ccs/context-monitor.test.ts +535 -0
  158. package/global/tools/anvil-memory/src/__tests__/ccs/edge-cases.test.ts +645 -0
  159. package/global/tools/anvil-memory/src/__tests__/ccs/fixtures.ts +363 -0
  160. package/global/tools/anvil-memory/src/__tests__/ccs/index.ts +8 -0
  161. package/global/tools/anvil-memory/src/__tests__/ccs/integration.test.ts +417 -0
  162. package/global/tools/anvil-memory/src/__tests__/ccs/prompt-generator.test.ts +571 -0
  163. package/global/tools/anvil-memory/src/__tests__/ccs/ralph-stop.test.ts +440 -0
  164. package/global/tools/anvil-memory/src/__tests__/ccs/test-utils.ts +252 -0
  165. package/global/tools/anvil-memory/src/__tests__/commands.test.ts +657 -0
  166. package/global/tools/anvil-memory/src/__tests__/db.test.ts +641 -0
  167. package/global/tools/anvil-memory/src/__tests__/hooks.test.ts +272 -0
  168. package/global/tools/anvil-memory/src/__tests__/performance.test.ts +427 -0
  169. package/global/tools/anvil-memory/src/__tests__/test-utils.ts +113 -0
  170. package/global/tools/anvil-memory/src/commands/checkpoint.ts +197 -0
  171. package/global/tools/anvil-memory/src/commands/get.ts +115 -0
  172. package/global/tools/anvil-memory/src/commands/init.ts +94 -0
  173. package/global/tools/anvil-memory/src/commands/observe.ts +163 -0
  174. package/global/tools/anvil-memory/src/commands/search.ts +112 -0
  175. package/global/tools/anvil-memory/src/db.ts +638 -0
  176. package/global/tools/anvil-memory/src/index.ts +205 -0
  177. package/global/tools/anvil-memory/src/types.ts +122 -0
  178. package/global/tools/anvil-memory/tsconfig.json +29 -0
  179. package/global/tools/ralph-loop.sh +359 -0
  180. package/package.json +45 -0
  181. package/scripts/anvil +822 -0
  182. package/scripts/extract_patterns.py +222 -0
  183. package/scripts/init-project.sh +541 -0
  184. package/scripts/install.sh +229 -0
  185. package/scripts/postinstall.js +41 -0
  186. package/scripts/rollback.sh +188 -0
  187. package/scripts/sync.sh +623 -0
  188. package/scripts/test-statusline.sh +248 -0
  189. package/scripts/update_claude_md.py +224 -0
  190. package/scripts/verify.sh +255 -0
@@ -0,0 +1,1305 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ doc_coverage_service.py - Documentation Coverage System (ANV-31)
4
+
5
+ Provides intelligent tracking and enforcement of documentation coverage:
6
+ - Maps source files to expected documentation
7
+ - Extracts public exports from Python/TypeScript files
8
+ - Calculates documentation coverage metrics
9
+ - Detects gaps when code changes without doc updates
10
+
11
+ Usage:
12
+ from doc_coverage_service import DocCoverageService
13
+
14
+ service = DocCoverageService()
15
+ coverage = service.calculate_coverage()
16
+ print(f"Coverage: {coverage.percent}%")
17
+
18
+ CLI:
19
+ python doc_coverage_service.py --report
20
+ python doc_coverage_service.py --check
21
+ python doc_coverage_service.py --gaps
22
+ """
23
+
24
+ import ast
25
+ import fnmatch
26
+ import json
27
+ import os
28
+ import re
29
+ from dataclasses import dataclass, field
30
+ from datetime import datetime, timezone
31
+ from pathlib import Path
32
+ from typing import Any, ClassVar, Dict, List, Literal, Optional, Set
33
+
34
+ # Import state manager for anvil-state.json integration
35
+ try:
36
+ from state_manager import on_doc_coverage as _sync_state
37
+ STATE_MANAGER_AVAILABLE = True
38
+ except ImportError:
39
+ try:
40
+ from .state_manager import on_doc_coverage as _sync_state
41
+ STATE_MANAGER_AVAILABLE = True
42
+ except ImportError:
43
+ STATE_MANAGER_AVAILABLE = False
44
+
45
+
46
+ # =============================================================================
47
+ # Data Classes
48
+ # =============================================================================
49
+
50
+ @dataclass
51
+ class DocMapping:
52
+ """Configuration for mapping source files to documentation."""
53
+ source: str # Glob pattern for source files (e.g., "global/lib/*.py")
54
+ docs: str # Doc path template (e.g., "docs/api/{basename}.md")
55
+ doc_type: str = "api" # Type: api, command, hook, skill
56
+
57
+ def matches(self, source_path: str) -> bool:
58
+ """Check if a source path matches this mapping's pattern."""
59
+ return fnmatch.fnmatch(source_path, self.source)
60
+
61
+ def get_doc_path(self, source_path: str) -> str:
62
+ """Get the expected doc path for a source file."""
63
+ path = Path(source_path)
64
+ return self.docs.format(
65
+ basename=path.stem,
66
+ dirname=path.parent.name,
67
+ filename=path.name,
68
+ ext=path.suffix
69
+ )
70
+
71
+
72
+ try:
73
+ import yaml
74
+ HAS_YAML = True
75
+ except ImportError:
76
+ HAS_YAML = False
77
+
78
+
79
+ @dataclass
80
+ class DocCoverageConfig:
81
+ """Configuration for documentation coverage checking."""
82
+ enabled: bool = True
83
+ gap_detection_enabled: bool = True # ANV-218: Can disable gap detection separately
84
+ thresholds: Dict[str, int] = field(default_factory=lambda: {
85
+ "warning": 80,
86
+ "critical": 60
87
+ })
88
+ mappings: List[DocMapping] = field(default_factory=list)
89
+ exclude: List[str] = field(default_factory=lambda: [
90
+ "**/test_*.py",
91
+ "**/__pycache__/**",
92
+ "**/node_modules/**",
93
+ "**/*.test.ts",
94
+ "**/*.spec.ts"
95
+ ])
96
+ sensitivity: Literal["aggressive", "balanced", "quiet"] = "balanced"
97
+
98
+ @classmethod
99
+ def from_file(cls, config_path: str) -> "DocCoverageConfig":
100
+ """Load configuration from a YAML file.
101
+
102
+ Args:
103
+ config_path: Path to YAML configuration file
104
+
105
+ Returns:
106
+ DocCoverageConfig loaded from file, or default if file doesn't exist
107
+ """
108
+ path = Path(config_path)
109
+ if not path.exists() or not HAS_YAML:
110
+ return cls.default()
111
+
112
+ try:
113
+ with open(path, "r") as f:
114
+ data = yaml.safe_load(f) or {}
115
+
116
+ config = cls.default()
117
+
118
+ if "enabled" in data:
119
+ config.enabled = bool(data["enabled"])
120
+
121
+ if "gap_detection_enabled" in data:
122
+ config.gap_detection_enabled = bool(data["gap_detection_enabled"])
123
+
124
+ if "thresholds" in data:
125
+ config.thresholds.update(data["thresholds"])
126
+
127
+ if "sensitivity" in data:
128
+ config.sensitivity = data["sensitivity"]
129
+
130
+ if "exclude" in data:
131
+ config.exclude = data["exclude"]
132
+
133
+ if "mappings" in data:
134
+ config.mappings = [
135
+ DocMapping(
136
+ source=m.get("source", ""),
137
+ docs=m.get("docs", ""),
138
+ doc_type=m.get("type", "api")
139
+ )
140
+ for m in data["mappings"]
141
+ ]
142
+
143
+ return config
144
+ except Exception:
145
+ # Config loading is best-effort; silently fall back to defaults
146
+ # to allow the tool to work even with malformed config files
147
+ return cls.default()
148
+
149
+ @classmethod
150
+ def default(cls) -> "DocCoverageConfig":
151
+ """Create default configuration with standard mappings."""
152
+ return cls(
153
+ mappings=[
154
+ DocMapping(
155
+ source="global/lib/*.py",
156
+ docs="docs/api/{basename}.md",
157
+ doc_type="api"
158
+ ),
159
+ DocMapping(
160
+ source=".claude/commands/*.md",
161
+ docs="docs/command-reference.md",
162
+ doc_type="command"
163
+ ),
164
+ DocMapping(
165
+ source="global/commands/*.md",
166
+ docs="docs/command-reference.md",
167
+ doc_type="command"
168
+ ),
169
+ DocMapping(
170
+ source=".claude/hooks/*.py",
171
+ docs=".claude/hooks/README.md",
172
+ doc_type="hook"
173
+ ),
174
+ DocMapping(
175
+ source="global/hooks/*.py",
176
+ docs="global/hooks/README.md",
177
+ doc_type="hook"
178
+ ),
179
+ DocMapping(
180
+ source="global/skills/*/skill.md",
181
+ docs="docs/skills/{dirname}.md",
182
+ doc_type="skill"
183
+ ),
184
+ ]
185
+ )
186
+
187
+
188
+ @dataclass
189
+ class DocExport:
190
+ """Represents a public export extracted from a source file."""
191
+ name: str # Export name (function, class, etc.)
192
+ export_type: str # "function", "class", "variable", "command"
193
+ source_file: str # Source file path
194
+ line_number: int # Line number in source
195
+ docstring: Optional[str] = None # Extracted docstring if any
196
+ signature: Optional[str] = None # Function/method signature
197
+
198
+ @property
199
+ def qualified_name(self) -> str:
200
+ """Get fully qualified name (file:export)."""
201
+ return f"{self.source_file}:{self.name}"
202
+
203
+
204
+ @dataclass
205
+ class DocGap:
206
+ """Represents a documentation gap."""
207
+ export: DocExport # The undocumented export
208
+ expected_doc: str # Expected documentation path
209
+ doc_type: str # Type of documentation needed
210
+ severity: Literal["missing", "stale", "incomplete"] = "missing"
211
+ suggestion: Optional[str] = None # Suggested action
212
+
213
+
214
+ @dataclass
215
+ class CoverageResult:
216
+ """Result of coverage calculation."""
217
+ total_exports: int
218
+ documented_exports: int
219
+ coverage_percent: float
220
+ gaps: List[DocGap]
221
+ by_type: Dict[str, Dict[str, int]] # Coverage by doc type
222
+ timestamp: str = field(default_factory=lambda: datetime.now(timezone.utc).isoformat())
223
+
224
+ @property
225
+ def status(self) -> Literal["healthy", "warning", "critical"]:
226
+ """Get health status based on coverage percentage.
227
+
228
+ Thresholds (matches DocCoverageConfig defaults):
229
+ - healthy: >= 80%
230
+ - warning: >= 60%
231
+ - critical: < 60%
232
+ """
233
+ if self.coverage_percent >= 80:
234
+ return "healthy"
235
+ elif self.coverage_percent >= 60:
236
+ return "warning"
237
+ return "critical"
238
+
239
+ @property
240
+ def status_emoji(self) -> str:
241
+ """Get emoji for status."""
242
+ return {"healthy": "✅", "warning": "⚠️", "critical": "❌"}[self.status]
243
+
244
+
245
+ @dataclass
246
+ class StaleDocWarning:
247
+ """Warning about potentially stale documentation."""
248
+ source_file: str # Source file that changed
249
+ doc_file: str # Related documentation file
250
+ source_mtime: float # Source file modification time
251
+ doc_mtime: float # Doc file modification time
252
+ days_stale: int # Days since doc was updated
253
+ severity: Literal["info", "warning", "critical"] = "warning"
254
+
255
+ @property
256
+ def message(self) -> str:
257
+ """Human-readable warning message."""
258
+ return (
259
+ f"Documentation may be stale: {self.doc_file} "
260
+ f"(last updated {self.days_stale} days ago, "
261
+ f"source changed more recently)"
262
+ )
263
+
264
+
265
+ @dataclass
266
+ class GapDetectionResult:
267
+ """Result of gap detection analysis."""
268
+ stale_docs: List["StaleDocWarning"]
269
+ changed_files_without_docs: List[str]
270
+ total_warnings: int
271
+ sensitivity: Literal["aggressive", "balanced", "quiet"]
272
+ timestamp: str = field(default_factory=lambda: datetime.now(timezone.utc).isoformat())
273
+
274
+ def to_context_block(self) -> str:
275
+ """Format warnings for session context injection."""
276
+ if self.total_warnings == 0:
277
+ return ""
278
+
279
+ lines = ["## Documentation Coverage Warnings\n"]
280
+
281
+ if self.stale_docs:
282
+ lines.append("### Potentially Stale Documentation\n")
283
+ for warning in self.stale_docs:
284
+ lines.append(f"- {warning.message}")
285
+ lines.append("")
286
+
287
+ if self.changed_files_without_docs:
288
+ lines.append("### Changed Files Without Documentation\n")
289
+ for file in self.changed_files_without_docs[:5]:
290
+ lines.append(f"- `{file}`")
291
+ if len(self.changed_files_without_docs) > 5:
292
+ lines.append(f"- ... and {len(self.changed_files_without_docs) - 5} more")
293
+ lines.append("")
294
+
295
+ lines.append("Run `/doc-coverage --gaps` for details.\n")
296
+ return "\n".join(lines)
297
+
298
+
299
+ # =============================================================================
300
+ # GapDetector - Detects Stale and Missing Documentation
301
+ # =============================================================================
302
+
303
+ class GapDetector:
304
+ """Detects documentation gaps by analyzing file changes and timestamps.
305
+
306
+ Sensitivity levels:
307
+ - aggressive: Warn on any source change without doc update
308
+ - balanced: Warn if source is >7 days newer than doc (default)
309
+ - quiet: Warn if source is >30 days newer than doc
310
+ """
311
+
312
+ SENSITIVITY_THRESHOLDS: ClassVar[Dict[str, int]] = {
313
+ "aggressive": 0,
314
+ "balanced": 7,
315
+ "quiet": 30,
316
+ }
317
+
318
+ def __init__(
319
+ self,
320
+ config: "DocCoverageConfig",
321
+ sensitivity: Literal["aggressive", "balanced", "quiet"] = "balanced",
322
+ ):
323
+ """Initialize the gap detector.
324
+
325
+ Args:
326
+ config: Documentation coverage configuration
327
+ sensitivity: How sensitive to be about stale docs
328
+ """
329
+ self.config = config
330
+ self.sensitivity = sensitivity
331
+ self.threshold_days = self.SENSITIVITY_THRESHOLDS[sensitivity]
332
+
333
+ def detect_stale_docs(self, project_path: str = ".") -> List[StaleDocWarning]:
334
+ """Detect documentation that may be stale based on file timestamps.
335
+
336
+ Args:
337
+ project_path: Root path of the project
338
+
339
+ Returns:
340
+ List of warnings about potentially stale documentation
341
+ """
342
+ warnings: List[StaleDocWarning] = []
343
+ project = Path(project_path)
344
+
345
+ for mapping in self.config.mappings:
346
+ source_pattern = mapping.source
347
+ for source_path in project.glob(source_pattern):
348
+ if not source_path.is_file():
349
+ continue
350
+
351
+ rel_source = str(source_path.relative_to(project))
352
+ if any(excl in rel_source for excl in self.config.exclude):
353
+ continue
354
+
355
+ doc_path_str = mapping.get_doc_path(rel_source)
356
+ doc_path = project / doc_path_str
357
+
358
+ if not doc_path.exists():
359
+ continue
360
+
361
+ source_mtime = source_path.stat().st_mtime
362
+ doc_mtime = doc_path.stat().st_mtime
363
+
364
+ if source_mtime > doc_mtime:
365
+ days_stale = int((source_mtime - doc_mtime) / 86400)
366
+
367
+ if days_stale >= self.threshold_days:
368
+ sev: Literal["info", "warning", "critical"]
369
+ if days_stale > 30:
370
+ sev = "critical"
371
+ elif days_stale > 7:
372
+ sev = "warning"
373
+ else:
374
+ sev = "info"
375
+
376
+ warnings.append(StaleDocWarning(
377
+ source_file=rel_source,
378
+ doc_file=doc_path_str,
379
+ source_mtime=source_mtime,
380
+ doc_mtime=doc_mtime,
381
+ days_stale=days_stale,
382
+ severity=sev,
383
+ ))
384
+
385
+ return warnings
386
+
387
+ def detect_changed_without_docs(
388
+ self,
389
+ project_path: str = ".",
390
+ since_days: int = 7,
391
+ ) -> List[str]:
392
+ """Detect recently changed source files without documentation.
393
+
394
+ Args:
395
+ project_path: Root path of the project
396
+ since_days: Look at changes within this many days
397
+
398
+ Returns:
399
+ List of source file paths that changed without doc updates
400
+ """
401
+ import subprocess
402
+
403
+ project = Path(project_path)
404
+ changed_files: List[str] = []
405
+
406
+ try:
407
+ result = subprocess.run(
408
+ ["git", "log", f"--since={since_days} days ago",
409
+ "--name-only", "--pretty=format:"],
410
+ cwd=project,
411
+ capture_output=True,
412
+ text=True,
413
+ timeout=30,
414
+ )
415
+
416
+ if result.returncode != 0:
417
+ return []
418
+
419
+ git_changed = set(
420
+ f.strip() for f in result.stdout.split("\n")
421
+ if f.strip() and not f.startswith(".")
422
+ )
423
+
424
+ except (subprocess.TimeoutExpired, FileNotFoundError):
425
+ return []
426
+
427
+ for mapping in self.config.mappings:
428
+ source_pattern = mapping.source
429
+ for source_path in project.glob(source_pattern):
430
+ if not source_path.is_file():
431
+ continue
432
+
433
+ rel_source = str(source_path.relative_to(project))
434
+
435
+ if rel_source not in git_changed:
436
+ continue
437
+
438
+ if any(excl in rel_source for excl in self.config.exclude):
439
+ continue
440
+
441
+ doc_path_str = mapping.get_doc_path(rel_source)
442
+ doc_path = project / doc_path_str
443
+
444
+ if not doc_path.exists():
445
+ changed_files.append(rel_source)
446
+
447
+ return changed_files
448
+
449
+ def analyze(self, project_path: str = ".") -> GapDetectionResult:
450
+ """Run full gap detection analysis.
451
+
452
+ Args:
453
+ project_path: Root path of the project
454
+
455
+ Returns:
456
+ Complete gap detection result with all warnings
457
+ """
458
+ stale_docs = self.detect_stale_docs(project_path)
459
+ changed_without_docs = self.detect_changed_without_docs(project_path)
460
+
461
+ return GapDetectionResult(
462
+ stale_docs=stale_docs,
463
+ changed_files_without_docs=changed_without_docs,
464
+ total_warnings=len(stale_docs) + len(changed_without_docs),
465
+ sensitivity=self.sensitivity,
466
+ )
467
+
468
+
469
+ # =============================================================================
470
+ # DocIndexer - Scans and Extracts Exports
471
+ # =============================================================================
472
+
473
+ class DocIndexer:
474
+ """Scans source files and extracts public exports."""
475
+
476
+ def __init__(self, config: DocCoverageConfig):
477
+ """Initialize the indexer with configuration.
478
+
479
+ Args:
480
+ config: Documentation coverage configuration
481
+ """
482
+ self.config = config
483
+ self._exports_cache: Dict[str, List[DocExport]] = {}
484
+
485
+ def scan_directory(self, root_path: str) -> List[DocExport]:
486
+ """Scan a directory for source files and extract exports.
487
+
488
+ Args:
489
+ root_path: Root directory to scan
490
+
491
+ Returns:
492
+ List of discovered exports
493
+ """
494
+ all_exports: List[DocExport] = []
495
+ root = Path(root_path)
496
+
497
+ # Find all matching source files
498
+ for mapping in self.config.mappings:
499
+ # Handle glob pattern
500
+ for source_file in root.glob(mapping.source):
501
+ if self._is_excluded(str(source_file)):
502
+ continue
503
+
504
+ relative_path = str(source_file.relative_to(root))
505
+ exports = self.extract_exports(str(source_file), relative_path)
506
+ all_exports.extend(exports)
507
+
508
+ return all_exports
509
+
510
+ def extract_exports(self, file_path: str, relative_path: str) -> List[DocExport]:
511
+ """Extract public exports from a file.
512
+
513
+ Args:
514
+ file_path: Absolute path to the file
515
+ relative_path: Relative path for display
516
+
517
+ Returns:
518
+ List of exports from the file
519
+ """
520
+ # Check cache
521
+ if relative_path in self._exports_cache:
522
+ return self._exports_cache[relative_path]
523
+
524
+ exports: List[DocExport] = []
525
+
526
+ if file_path.endswith(".py"):
527
+ exports = self._extract_python_exports(file_path, relative_path)
528
+ elif file_path.endswith(".md"):
529
+ exports = self._extract_markdown_command(file_path, relative_path)
530
+ elif file_path.endswith((".ts", ".tsx", ".js", ".jsx")):
531
+ exports = self._extract_typescript_exports(file_path, relative_path)
532
+
533
+ self._exports_cache[relative_path] = exports
534
+ return exports
535
+
536
+ def _extract_python_exports(self, file_path: str, relative_path: str) -> List[DocExport]:
537
+ """Extract public exports from a Python file using AST.
538
+
539
+ Args:
540
+ file_path: Path to Python file
541
+ relative_path: Relative path for display
542
+
543
+ Returns:
544
+ List of public exports
545
+ """
546
+ exports: List[DocExport] = []
547
+
548
+ try:
549
+ with open(file_path, "r", encoding="utf-8") as f:
550
+ content = f.read()
551
+
552
+ tree = ast.parse(content, filename=file_path)
553
+
554
+ for node in ast.iter_child_nodes(tree):
555
+ # Skip private (underscore-prefixed) names
556
+ name = getattr(node, "name", None)
557
+ if name and name.startswith("_") and not name.startswith("__"):
558
+ continue
559
+
560
+ if isinstance(node, ast.FunctionDef):
561
+ # Public function
562
+ docstring = ast.get_docstring(node)
563
+ signature = self._get_function_signature(node)
564
+ exports.append(DocExport(
565
+ name=node.name,
566
+ export_type="function",
567
+ source_file=relative_path,
568
+ line_number=node.lineno,
569
+ docstring=docstring,
570
+ signature=signature
571
+ ))
572
+
573
+ elif isinstance(node, ast.AsyncFunctionDef):
574
+ # Async function
575
+ docstring = ast.get_docstring(node)
576
+ signature = self._get_function_signature(node, async_func=True)
577
+ exports.append(DocExport(
578
+ name=node.name,
579
+ export_type="function",
580
+ source_file=relative_path,
581
+ line_number=node.lineno,
582
+ docstring=docstring,
583
+ signature=signature
584
+ ))
585
+
586
+ elif isinstance(node, ast.ClassDef):
587
+ # Public class
588
+ if not node.name.startswith("_"):
589
+ docstring = ast.get_docstring(node)
590
+ exports.append(DocExport(
591
+ name=node.name,
592
+ export_type="class",
593
+ source_file=relative_path,
594
+ line_number=node.lineno,
595
+ docstring=docstring,
596
+ signature=f"class {node.name}"
597
+ ))
598
+
599
+ elif isinstance(node, ast.Assign):
600
+ # Module-level variable (potential export)
601
+ for target in node.targets:
602
+ if isinstance(target, ast.Name):
603
+ if not target.id.startswith("_") and target.id.isupper():
604
+ # CONSTANT style variables
605
+ exports.append(DocExport(
606
+ name=target.id,
607
+ export_type="variable",
608
+ source_file=relative_path,
609
+ line_number=node.lineno
610
+ ))
611
+
612
+ except SyntaxError:
613
+ pass # Skip files with syntax errors (common in WIP code)
614
+ except Exception:
615
+ pass # Skip files that can't be parsed (encoding issues, etc.)
616
+
617
+ return exports
618
+
619
+ def _get_function_signature(self, node: ast.FunctionDef, async_func: bool = False) -> str:
620
+ """Extract function signature from AST node.
621
+
622
+ Args:
623
+ node: AST function definition node
624
+ async_func: Whether this is an async function
625
+
626
+ Returns:
627
+ Function signature string
628
+ """
629
+ args = []
630
+
631
+ # Regular arguments
632
+ for arg in node.args.args:
633
+ arg_str = arg.arg
634
+ if arg.annotation:
635
+ arg_str += f": {ast.unparse(arg.annotation)}"
636
+ args.append(arg_str)
637
+
638
+ # *args
639
+ if node.args.vararg:
640
+ args.append(f"*{node.args.vararg.arg}")
641
+
642
+ # **kwargs
643
+ if node.args.kwarg:
644
+ args.append(f"**{node.args.kwarg.arg}")
645
+
646
+ prefix = "async def" if async_func else "def"
647
+ return_annotation = ""
648
+ if node.returns:
649
+ return_annotation = f" -> {ast.unparse(node.returns)}"
650
+
651
+ return f"{prefix} {node.name}({', '.join(args)}){return_annotation}"
652
+
653
+ def _extract_markdown_command(self, file_path: str, relative_path: str) -> List[DocExport]:
654
+ """Extract command info from a markdown command file.
655
+
656
+ Args:
657
+ file_path: Path to markdown file
658
+ relative_path: Relative path for display
659
+
660
+ Returns:
661
+ List containing single command export
662
+ """
663
+ exports: List[DocExport] = []
664
+
665
+ try:
666
+ with open(file_path, "r", encoding="utf-8") as f:
667
+ content = f.read()
668
+
669
+ # Extract command name from filename or first heading
670
+ path = Path(file_path)
671
+ command_name = path.stem
672
+
673
+ # Try to get description from first line after heading
674
+ lines = content.split("\n")
675
+ description = None
676
+ for i, line in enumerate(lines):
677
+ if line.startswith("# "):
678
+ # Found heading, look for description
679
+ if i + 1 < len(lines) and lines[i + 1].startswith("> "):
680
+ description = lines[i + 1][2:].strip()
681
+ break
682
+
683
+ exports.append(DocExport(
684
+ name=f"/{command_name}",
685
+ export_type="command",
686
+ source_file=relative_path,
687
+ line_number=1,
688
+ docstring=description
689
+ ))
690
+
691
+ except Exception:
692
+ pass # Best-effort extraction; skip unreadable files
693
+
694
+ return exports
695
+
696
+ def _extract_typescript_exports(self, file_path: str, relative_path: str) -> List[DocExport]:
697
+ """Extract exports from TypeScript/JavaScript file using regex.
698
+
699
+ Note: This is a simplified implementation. For full accuracy,
700
+ use TypeScript compiler API.
701
+
702
+ Args:
703
+ file_path: Path to TS/JS file
704
+ relative_path: Relative path for display
705
+
706
+ Returns:
707
+ List of exports
708
+ """
709
+ exports: List[DocExport] = []
710
+
711
+ try:
712
+ with open(file_path, "r", encoding="utf-8") as f:
713
+ content = f.read()
714
+
715
+ # Match export function/const/class declarations
716
+ patterns = [
717
+ # export function name(
718
+ (r"export\s+(?:async\s+)?function\s+(\w+)", "function"),
719
+ # export const name =
720
+ (r"export\s+const\s+(\w+)\s*=", "variable"),
721
+ # export class Name
722
+ (r"export\s+class\s+(\w+)", "class"),
723
+ # export default function/class
724
+ (r"export\s+default\s+(?:async\s+)?function\s+(\w+)", "function"),
725
+ (r"export\s+default\s+class\s+(\w+)", "class"),
726
+ ]
727
+
728
+ for pattern, export_type in patterns:
729
+ for match in re.finditer(pattern, content):
730
+ name = match.group(1)
731
+ # Find line number
732
+ line_num = content[:match.start()].count("\n") + 1
733
+
734
+ exports.append(DocExport(
735
+ name=name,
736
+ export_type=export_type,
737
+ source_file=relative_path,
738
+ line_number=line_num
739
+ ))
740
+
741
+ except Exception:
742
+ pass # Best-effort extraction; skip unreadable files
743
+
744
+ return exports
745
+
746
+ def _is_excluded(self, file_path: str) -> bool:
747
+ """Check if a file should be excluded.
748
+
749
+ Args:
750
+ file_path: File path to check
751
+
752
+ Returns:
753
+ True if file should be excluded
754
+ """
755
+ # Normalize path separators
756
+ normalized = file_path.replace("\\", "/")
757
+
758
+ for pattern in self.config.exclude:
759
+ # Handle ** patterns by checking directory containment
760
+ if pattern.startswith("**/") and pattern.endswith("/**"):
761
+ # Pattern like **/node_modules/** - check if dir is in path
762
+ dir_name = pattern[3:-3] # Extract "node_modules"
763
+ if f"/{dir_name}/" in f"/{normalized}/" or normalized.startswith(f"{dir_name}/"):
764
+ return True
765
+ elif pattern.startswith("**/"):
766
+ # Pattern like **/test_*.py - check filename pattern
767
+ suffix_pattern = pattern[3:]
768
+ if fnmatch.fnmatch(Path(normalized).name, suffix_pattern):
769
+ return True
770
+ # Also check if any parent dir + filename matches
771
+ if fnmatch.fnmatch(normalized, f"*/{suffix_pattern}"):
772
+ return True
773
+ else:
774
+ # Simple fnmatch pattern
775
+ if fnmatch.fnmatch(normalized, pattern):
776
+ return True
777
+
778
+ return False
779
+
780
+ def clear_cache(self) -> None:
781
+ """Clear the exports cache."""
782
+ self._exports_cache.clear()
783
+
784
+
785
+ # =============================================================================
786
+ # DocCoverageService - Main Service
787
+ # =============================================================================
788
+
789
+ class DocCoverageService:
790
+ """Service for documentation coverage tracking and analysis.
791
+
792
+ Provides methods to:
793
+ - Index source files and extract exports
794
+ - Check which exports have documentation
795
+ - Calculate coverage metrics
796
+ - Detect documentation gaps
797
+ """
798
+
799
+ def __init__(self, project_path: Optional[str] = None, config: Optional[DocCoverageConfig] = None):
800
+ """Initialize the documentation coverage service.
801
+
802
+ Args:
803
+ project_path: Root path of the project (defaults to cwd)
804
+ config: Coverage configuration (defaults to standard config)
805
+ """
806
+ self.project_path = Path(project_path) if project_path else Path.cwd()
807
+ self.config = config or DocCoverageConfig.default()
808
+ self.indexer = DocIndexer(self.config)
809
+ self._doc_index: Dict[str, Set[str]] = {} # doc_path -> set of documented items
810
+
811
+ def index_documentation(self) -> Dict[str, Set[str]]:
812
+ """Index all documentation files to find what's documented.
813
+
814
+ Returns:
815
+ Dict mapping doc paths to sets of documented export names
816
+ """
817
+ self._doc_index.clear()
818
+
819
+ for mapping in self.config.mappings:
820
+ # Get all potential doc files
821
+ doc_pattern = mapping.docs.replace("{basename}", "*").replace("{dirname}", "*")
822
+
823
+ for doc_file in self.project_path.glob(doc_pattern):
824
+ if not doc_file.exists():
825
+ continue
826
+
827
+ relative_path = str(doc_file.relative_to(self.project_path))
828
+ documented_items = self._extract_documented_items(str(doc_file), mapping.doc_type)
829
+ self._doc_index[relative_path] = documented_items
830
+
831
+ # Also check README files and index files
832
+ readme_patterns = [
833
+ ".claude/hooks/README.md",
834
+ "global/hooks/README.md",
835
+ "docs/command-reference.md",
836
+ "docs/api/*.md"
837
+ ]
838
+
839
+ for pattern in readme_patterns:
840
+ for doc_file in self.project_path.glob(pattern):
841
+ if doc_file.exists():
842
+ relative_path = str(doc_file.relative_to(self.project_path))
843
+ if relative_path not in self._doc_index:
844
+ documented_items = self._extract_documented_items(str(doc_file), "mixed")
845
+ self._doc_index[relative_path] = documented_items
846
+
847
+ return self._doc_index
848
+
849
+ def _extract_documented_items(self, doc_path: str, doc_type: str) -> Set[str]:
850
+ """Extract names of documented items from a doc file.
851
+
852
+ Args:
853
+ doc_path: Path to documentation file
854
+ doc_type: Type of documentation (api, command, hook, etc.)
855
+
856
+ Returns:
857
+ Set of documented item names
858
+ """
859
+ documented: Set[str] = set()
860
+
861
+ try:
862
+ with open(doc_path, "r", encoding="utf-8") as f:
863
+ content = f.read()
864
+
865
+ # Look for documented items based on doc type
866
+ if doc_type in ("api", "mixed"):
867
+ # Look for function/class references
868
+ # Pattern: ## FunctionName or ### `function_name`
869
+ patterns = [
870
+ r"^##\s+(\w+)", # ## FunctionName
871
+ r"^###\s+`?(\w+)`?", # ### `function_name`
872
+ r"^\*\*(\w+)\*\*", # **function_name**
873
+ r"`(\w+)\(\)`", # `function_name()`
874
+ r"def\s+(\w+)\(", # def function_name(
875
+ r"class\s+(\w+)", # class ClassName
876
+ ]
877
+ for pattern in patterns:
878
+ for match in re.finditer(pattern, content, re.MULTILINE):
879
+ documented.add(match.group(1))
880
+
881
+ if doc_type in ("command", "mixed"):
882
+ # Look for command references
883
+ patterns = [
884
+ r"^##\s+/(\w+)", # ## /command
885
+ r"^\| `/(\w+)`", # | `/command`
886
+ r"`/(\w+)`", # `/command`
887
+ ]
888
+ for pattern in patterns:
889
+ for match in re.finditer(pattern, content, re.MULTILINE):
890
+ documented.add(f"/{match.group(1)}")
891
+
892
+ if doc_type in ("hook", "mixed"):
893
+ # Look for hook references
894
+ patterns = [
895
+ r"(\w+)\.py", # hook_name.py
896
+ r"^##\s+(\w+)", # ## HookName
897
+ ]
898
+ for pattern in patterns:
899
+ for match in re.finditer(pattern, content, re.MULTILINE):
900
+ documented.add(match.group(1))
901
+
902
+ except Exception:
903
+ pass # Best-effort extraction; skip unreadable doc files
904
+
905
+ return documented
906
+
907
+ def calculate_coverage(self) -> CoverageResult:
908
+ """Calculate documentation coverage for the project.
909
+
910
+ Returns:
911
+ CoverageResult with coverage metrics and gaps
912
+ """
913
+ # Index documentation first
914
+ self.index_documentation()
915
+
916
+ # Scan for all exports
917
+ all_exports = self.indexer.scan_directory(str(self.project_path))
918
+
919
+ # Check which exports are documented
920
+ documented_count = 0
921
+ gaps: List[DocGap] = []
922
+ by_type: Dict[str, Dict[str, int]] = {}
923
+
924
+ for export in all_exports:
925
+ # Find the mapping for this export
926
+ mapping = self._find_mapping(export.source_file)
927
+ if not mapping:
928
+ continue
929
+
930
+ # Initialize type counters
931
+ if mapping.doc_type not in by_type:
932
+ by_type[mapping.doc_type] = {"total": 0, "documented": 0}
933
+ by_type[mapping.doc_type]["total"] += 1
934
+
935
+ # Check if documented
936
+ expected_doc = mapping.get_doc_path(export.source_file)
937
+ is_documented = self._is_documented(export, expected_doc)
938
+
939
+ if is_documented:
940
+ documented_count += 1
941
+ by_type[mapping.doc_type]["documented"] += 1
942
+ else:
943
+ gaps.append(DocGap(
944
+ export=export,
945
+ expected_doc=expected_doc,
946
+ doc_type=mapping.doc_type,
947
+ severity="missing",
948
+ suggestion=f"Add documentation for {export.name} in {expected_doc}"
949
+ ))
950
+
951
+ total = len(all_exports)
952
+ percent = (documented_count / total * 100) if total > 0 else 100.0
953
+
954
+ return CoverageResult(
955
+ total_exports=total,
956
+ documented_exports=documented_count,
957
+ coverage_percent=round(percent, 1),
958
+ gaps=gaps,
959
+ by_type=by_type
960
+ )
961
+
962
+ def _find_mapping(self, source_path: str) -> Optional[DocMapping]:
963
+ """Find the mapping configuration for a source file.
964
+
965
+ Args:
966
+ source_path: Relative path to source file
967
+
968
+ Returns:
969
+ Matching DocMapping or None
970
+ """
971
+ for mapping in self.config.mappings:
972
+ if mapping.matches(source_path):
973
+ return mapping
974
+ return None
975
+
976
+ def _is_documented(self, export: DocExport, _expected_doc: str) -> bool:
977
+ """Check if an export is documented.
978
+
979
+ Args:
980
+ export: The export to check
981
+ _expected_doc: Expected documentation path (reserved for future use)
982
+
983
+ Returns:
984
+ True if the export appears to be documented
985
+ """
986
+ # Check if export has a docstring (self-documenting)
987
+ if export.docstring and len(export.docstring) > 20:
988
+ return True
989
+
990
+ # Check indexed documentation
991
+ for _doc_path, documented_items in self._doc_index.items():
992
+ # Check if the export name appears in documented items
993
+ if export.name in documented_items:
994
+ return True
995
+
996
+ # For commands, check with slash prefix
997
+ if export.export_type == "command":
998
+ cmd_name = export.name.lstrip("/")
999
+ if cmd_name in documented_items or f"/{cmd_name}" in documented_items:
1000
+ return True
1001
+
1002
+ return False
1003
+
1004
+ def get_gaps(self) -> List[DocGap]:
1005
+ """Get list of documentation gaps.
1006
+
1007
+ Returns:
1008
+ List of DocGap objects
1009
+ """
1010
+ result = self.calculate_coverage()
1011
+ return result.gaps
1012
+
1013
+ def generate_report(self, output_format: Literal["markdown", "json"] = "markdown") -> str:
1014
+ """Generate a coverage report.
1015
+
1016
+ Args:
1017
+ output_format: Output format (markdown or json)
1018
+
1019
+ Returns:
1020
+ Report string
1021
+ """
1022
+ result = self.calculate_coverage()
1023
+
1024
+ if output_format == "json":
1025
+ return json.dumps({
1026
+ "total_exports": result.total_exports,
1027
+ "documented_exports": result.documented_exports,
1028
+ "coverage_percent": result.coverage_percent,
1029
+ "status": result.status,
1030
+ "by_type": result.by_type,
1031
+ "gaps": [
1032
+ {
1033
+ "name": gap.export.name,
1034
+ "type": gap.export.export_type,
1035
+ "source": gap.export.source_file,
1036
+ "line": gap.export.line_number,
1037
+ "expected_doc": gap.expected_doc,
1038
+ "suggestion": gap.suggestion
1039
+ }
1040
+ for gap in result.gaps
1041
+ ],
1042
+ "timestamp": result.timestamp
1043
+ }, indent=2)
1044
+
1045
+ # Markdown report
1046
+ lines = [
1047
+ "---",
1048
+ f"generated: {result.timestamp}",
1049
+ f"coverage: {result.coverage_percent}%",
1050
+ f"status: {result.status}",
1051
+ "---",
1052
+ "",
1053
+ "# Documentation Coverage Report",
1054
+ "",
1055
+ "## Summary",
1056
+ "",
1057
+ "| Metric | Value | Status |",
1058
+ "|--------|-------|--------|",
1059
+ f"| Total Exports | {result.total_exports} | — |",
1060
+ f"| Documented | {result.documented_exports} | — |",
1061
+ f"| Coverage | {result.coverage_percent}% | {result.status_emoji} {result.status.title()} |",
1062
+ "",
1063
+ ]
1064
+
1065
+ # Coverage by type
1066
+ if result.by_type:
1067
+ lines.extend([
1068
+ "## Coverage by Type",
1069
+ "",
1070
+ "| Type | Total | Documented | Coverage |",
1071
+ "|------|-------|------------|----------|",
1072
+ ])
1073
+ for doc_type, counts in result.by_type.items():
1074
+ type_pct = (counts["documented"] / counts["total"] * 100) if counts["total"] > 0 else 100
1075
+ lines.append(f"| {doc_type} | {counts['total']} | {counts['documented']} | {type_pct:.1f}% |")
1076
+ lines.append("")
1077
+
1078
+ # Gaps
1079
+ if result.gaps:
1080
+ lines.extend([
1081
+ "## Undocumented Exports",
1082
+ "",
1083
+ "| Export | Type | Source | Line | Suggested Doc |",
1084
+ "|--------|------|--------|------|---------------|",
1085
+ ])
1086
+ for gap in result.gaps[:20]: # Limit to 20 for readability
1087
+ lines.append(
1088
+ f"| `{gap.export.name}` | {gap.export.export_type} | "
1089
+ f"{gap.export.source_file} | {gap.export.line_number} | {gap.expected_doc} |"
1090
+ )
1091
+
1092
+ if len(result.gaps) > 20:
1093
+ lines.append(f"| ... | ... | ... | ... | ({len(result.gaps) - 20} more) |")
1094
+ lines.append("")
1095
+ else:
1096
+ lines.extend([
1097
+ "## Undocumented Exports",
1098
+ "",
1099
+ "No documentation gaps found! 🎉",
1100
+ ""
1101
+ ])
1102
+
1103
+ return "\n".join(lines)
1104
+
1105
+ def check(self, threshold: Optional[int] = None) -> bool:
1106
+ """Check if coverage meets threshold.
1107
+
1108
+ Args:
1109
+ threshold: Coverage threshold (defaults to config warning threshold)
1110
+
1111
+ Returns:
1112
+ True if coverage meets threshold
1113
+ """
1114
+ if threshold is None:
1115
+ threshold = self.config.thresholds.get("warning", 80)
1116
+
1117
+ result = self.calculate_coverage()
1118
+ return result.coverage_percent >= threshold
1119
+
1120
+ def detect_stale_docs(
1121
+ self,
1122
+ sensitivity: Optional[Literal["aggressive", "balanced", "quiet"]] = None,
1123
+ ) -> GapDetectionResult:
1124
+ """Detect potentially stale documentation using GapDetector.
1125
+
1126
+ Args:
1127
+ sensitivity: Detection sensitivity level (defaults to config value):
1128
+ - aggressive: Flag any timestamp difference
1129
+ - balanced: Flag docs >7 days stale
1130
+ - quiet: Flag docs >30 days stale
1131
+
1132
+ Returns:
1133
+ GapDetectionResult with stale docs and changed files,
1134
+ or empty result if gap detection is disabled
1135
+ """
1136
+ # Check if gap detection is disabled via config
1137
+ if not self.config.gap_detection_enabled:
1138
+ return GapDetectionResult(
1139
+ stale_docs=[],
1140
+ changed_files_without_docs=[],
1141
+ total_warnings=0,
1142
+ sensitivity=sensitivity or self.config.sensitivity,
1143
+ )
1144
+
1145
+ sens = sensitivity or self.config.sensitivity
1146
+ detector = GapDetector(self.config, sensitivity=sens)
1147
+ return detector.analyze(str(self.project_path))
1148
+
1149
+ def get_session_warnings(
1150
+ self,
1151
+ sensitivity: Optional[Literal["aggressive", "balanced", "quiet"]] = None,
1152
+ ) -> str:
1153
+ """Get documentation warnings formatted for session context injection.
1154
+
1155
+ This is the main entry point for SessionStart hooks to get warnings
1156
+ that can be injected into the session context.
1157
+
1158
+ Args:
1159
+ sensitivity: Detection sensitivity level (defaults to config value)
1160
+
1161
+ Returns:
1162
+ Formatted markdown block for session context, or empty string if
1163
+ no warnings or gap detection is disabled
1164
+ """
1165
+ result = self.detect_stale_docs(sensitivity=sensitivity)
1166
+ return result.to_context_block()
1167
+
1168
+
1169
+ # =============================================================================
1170
+ # State Integration
1171
+ # =============================================================================
1172
+
1173
+ def _sync_coverage_to_state(result: CoverageResult) -> None:
1174
+ """Sync coverage result to anvil-state.json for tracking."""
1175
+ if not STATE_MANAGER_AVAILABLE:
1176
+ return
1177
+
1178
+ try:
1179
+ _sync_state(
1180
+ percent=result.coverage_percent,
1181
+ total=result.total_exports,
1182
+ documented=result.documented_exports,
1183
+ status=result.status,
1184
+ )
1185
+ except Exception:
1186
+ # Don't fail CLI if state sync fails
1187
+ pass
1188
+
1189
+
1190
+ # =============================================================================
1191
+ # CLI Interface
1192
+ # =============================================================================
1193
+
1194
+ def main():
1195
+ """CLI entry point for doc coverage service."""
1196
+ import argparse
1197
+
1198
+ parser = argparse.ArgumentParser(
1199
+ description="Documentation Coverage System (ANV-31)",
1200
+ formatter_class=argparse.RawDescriptionHelpFormatter,
1201
+ epilog="""
1202
+ Examples:
1203
+ python doc_coverage_service.py --report
1204
+ python doc_coverage_service.py --check
1205
+ python doc_coverage_service.py --gaps
1206
+ python doc_coverage_service.py --json
1207
+ """
1208
+ )
1209
+
1210
+ parser.add_argument(
1211
+ "--report",
1212
+ action="store_true",
1213
+ help="Generate full coverage report"
1214
+ )
1215
+ parser.add_argument(
1216
+ "--check",
1217
+ action="store_true",
1218
+ help="Check coverage against threshold (exits 1 if below)"
1219
+ )
1220
+ parser.add_argument(
1221
+ "--gaps",
1222
+ action="store_true",
1223
+ help="Show only documentation gaps"
1224
+ )
1225
+ parser.add_argument(
1226
+ "--json",
1227
+ action="store_true",
1228
+ help="Output in JSON format"
1229
+ )
1230
+ parser.add_argument(
1231
+ "--threshold",
1232
+ type=int,
1233
+ default=80,
1234
+ help="Coverage threshold for --check (default: 80)"
1235
+ )
1236
+ parser.add_argument(
1237
+ "--project",
1238
+ type=str,
1239
+ default=".",
1240
+ help="Project path (default: current directory)"
1241
+ )
1242
+
1243
+ args = parser.parse_args()
1244
+
1245
+ # Initialize service
1246
+ service = DocCoverageService(project_path=args.project)
1247
+
1248
+ if args.check:
1249
+ # Check mode - exit with code 1 if below threshold
1250
+ # Calculate once and use result for both check and display
1251
+ result = service.calculate_coverage()
1252
+ _sync_coverage_to_state(result) # Sync to anvil-state.json
1253
+ passed = result.coverage_percent >= args.threshold
1254
+
1255
+ if passed:
1256
+ print(f"✅ Coverage: {result.coverage_percent}% (threshold: {args.threshold}%)")
1257
+ return 0
1258
+ else:
1259
+ print(f"❌ Coverage: {result.coverage_percent}% (threshold: {args.threshold}%)")
1260
+ print(f" {len(result.gaps)} undocumented exports")
1261
+ return 1
1262
+
1263
+ elif args.gaps:
1264
+ # Show only gaps
1265
+ gaps = service.get_gaps()
1266
+
1267
+ if not gaps:
1268
+ print("No documentation gaps found! 🎉")
1269
+ return 0
1270
+
1271
+ if args.json:
1272
+ print(json.dumps([
1273
+ {
1274
+ "name": g.export.name,
1275
+ "type": g.export.export_type,
1276
+ "source": g.export.source_file,
1277
+ "line": g.export.line_number,
1278
+ "expected_doc": g.expected_doc
1279
+ }
1280
+ for g in gaps
1281
+ ], indent=2))
1282
+ else:
1283
+ print(f"Found {len(gaps)} documentation gaps:\n")
1284
+ for gap in gaps:
1285
+ print(f" • {gap.export.name} ({gap.export.export_type})")
1286
+ print(f" Source: {gap.export.source_file}:{gap.export.line_number}")
1287
+ print(f" Expected: {gap.expected_doc}")
1288
+ print()
1289
+ return 0
1290
+
1291
+ else:
1292
+ # Full report (default)
1293
+ # Calculate coverage for state sync, then generate report
1294
+ result = service.calculate_coverage()
1295
+ _sync_coverage_to_state(result) # Sync to anvil-state.json
1296
+
1297
+ output_format = "json" if args.json else "markdown"
1298
+ report = service.generate_report(output_format=output_format)
1299
+ print(report)
1300
+ return 0
1301
+
1302
+
1303
+ if __name__ == "__main__":
1304
+ import sys
1305
+ sys.exit(main())