spec-kitty-cli 0.12.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- spec_kitty_cli-0.12.1.dist-info/METADATA +1767 -0
- spec_kitty_cli-0.12.1.dist-info/RECORD +242 -0
- spec_kitty_cli-0.12.1.dist-info/WHEEL +4 -0
- spec_kitty_cli-0.12.1.dist-info/entry_points.txt +2 -0
- spec_kitty_cli-0.12.1.dist-info/licenses/LICENSE +21 -0
- specify_cli/__init__.py +171 -0
- specify_cli/acceptance.py +627 -0
- specify_cli/agent_utils/README.md +157 -0
- specify_cli/agent_utils/__init__.py +9 -0
- specify_cli/agent_utils/status.py +356 -0
- specify_cli/cli/__init__.py +6 -0
- specify_cli/cli/commands/__init__.py +46 -0
- specify_cli/cli/commands/accept.py +189 -0
- specify_cli/cli/commands/agent/__init__.py +22 -0
- specify_cli/cli/commands/agent/config.py +382 -0
- specify_cli/cli/commands/agent/context.py +191 -0
- specify_cli/cli/commands/agent/feature.py +1057 -0
- specify_cli/cli/commands/agent/release.py +11 -0
- specify_cli/cli/commands/agent/tasks.py +1253 -0
- specify_cli/cli/commands/agent/workflow.py +801 -0
- specify_cli/cli/commands/context.py +246 -0
- specify_cli/cli/commands/dashboard.py +85 -0
- specify_cli/cli/commands/implement.py +973 -0
- specify_cli/cli/commands/init.py +827 -0
- specify_cli/cli/commands/init_help.py +62 -0
- specify_cli/cli/commands/merge.py +755 -0
- specify_cli/cli/commands/mission.py +240 -0
- specify_cli/cli/commands/ops.py +265 -0
- specify_cli/cli/commands/orchestrate.py +640 -0
- specify_cli/cli/commands/repair.py +175 -0
- specify_cli/cli/commands/research.py +165 -0
- specify_cli/cli/commands/sync.py +364 -0
- specify_cli/cli/commands/upgrade.py +249 -0
- specify_cli/cli/commands/validate_encoding.py +186 -0
- specify_cli/cli/commands/validate_tasks.py +186 -0
- specify_cli/cli/commands/verify.py +310 -0
- specify_cli/cli/helpers.py +123 -0
- specify_cli/cli/step_tracker.py +91 -0
- specify_cli/cli/ui.py +192 -0
- specify_cli/core/__init__.py +53 -0
- specify_cli/core/agent_context.py +311 -0
- specify_cli/core/config.py +96 -0
- specify_cli/core/context_validation.py +362 -0
- specify_cli/core/dependency_graph.py +351 -0
- specify_cli/core/git_ops.py +129 -0
- specify_cli/core/multi_parent_merge.py +323 -0
- specify_cli/core/paths.py +260 -0
- specify_cli/core/project_resolver.py +110 -0
- specify_cli/core/stale_detection.py +263 -0
- specify_cli/core/tool_checker.py +79 -0
- specify_cli/core/utils.py +43 -0
- specify_cli/core/vcs/__init__.py +114 -0
- specify_cli/core/vcs/detection.py +341 -0
- specify_cli/core/vcs/exceptions.py +85 -0
- specify_cli/core/vcs/git.py +1304 -0
- specify_cli/core/vcs/jujutsu.py +1208 -0
- specify_cli/core/vcs/protocol.py +285 -0
- specify_cli/core/vcs/types.py +249 -0
- specify_cli/core/version_checker.py +261 -0
- specify_cli/core/worktree.py +506 -0
- specify_cli/dashboard/__init__.py +28 -0
- specify_cli/dashboard/diagnostics.py +204 -0
- specify_cli/dashboard/handlers/__init__.py +17 -0
- specify_cli/dashboard/handlers/api.py +143 -0
- specify_cli/dashboard/handlers/base.py +65 -0
- specify_cli/dashboard/handlers/features.py +390 -0
- specify_cli/dashboard/handlers/router.py +81 -0
- specify_cli/dashboard/handlers/static.py +50 -0
- specify_cli/dashboard/lifecycle.py +541 -0
- specify_cli/dashboard/scanner.py +437 -0
- specify_cli/dashboard/server.py +123 -0
- specify_cli/dashboard/static/dashboard/dashboard.css +722 -0
- specify_cli/dashboard/static/dashboard/dashboard.js +1424 -0
- specify_cli/dashboard/static/spec-kitty.png +0 -0
- specify_cli/dashboard/templates/__init__.py +36 -0
- specify_cli/dashboard/templates/index.html +258 -0
- specify_cli/doc_generators.py +621 -0
- specify_cli/doc_state.py +408 -0
- specify_cli/frontmatter.py +384 -0
- specify_cli/gap_analysis.py +915 -0
- specify_cli/gitignore_manager.py +300 -0
- specify_cli/guards.py +145 -0
- specify_cli/legacy_detector.py +83 -0
- specify_cli/manifest.py +286 -0
- specify_cli/merge/__init__.py +63 -0
- specify_cli/merge/executor.py +653 -0
- specify_cli/merge/forecast.py +215 -0
- specify_cli/merge/ordering.py +126 -0
- specify_cli/merge/preflight.py +230 -0
- specify_cli/merge/state.py +185 -0
- specify_cli/merge/status_resolver.py +354 -0
- specify_cli/mission.py +654 -0
- specify_cli/missions/documentation/command-templates/implement.md +309 -0
- specify_cli/missions/documentation/command-templates/plan.md +275 -0
- specify_cli/missions/documentation/command-templates/review.md +344 -0
- specify_cli/missions/documentation/command-templates/specify.md +206 -0
- specify_cli/missions/documentation/command-templates/tasks.md +189 -0
- specify_cli/missions/documentation/mission.yaml +113 -0
- specify_cli/missions/documentation/templates/divio/explanation-template.md +192 -0
- specify_cli/missions/documentation/templates/divio/howto-template.md +168 -0
- specify_cli/missions/documentation/templates/divio/reference-template.md +179 -0
- specify_cli/missions/documentation/templates/divio/tutorial-template.md +146 -0
- specify_cli/missions/documentation/templates/generators/jsdoc.json.template +18 -0
- specify_cli/missions/documentation/templates/generators/sphinx-conf.py.template +36 -0
- specify_cli/missions/documentation/templates/plan-template.md +269 -0
- specify_cli/missions/documentation/templates/release-template.md +222 -0
- specify_cli/missions/documentation/templates/spec-template.md +172 -0
- specify_cli/missions/documentation/templates/task-prompt-template.md +140 -0
- specify_cli/missions/documentation/templates/tasks-template.md +159 -0
- specify_cli/missions/research/command-templates/merge.md +388 -0
- specify_cli/missions/research/command-templates/plan.md +125 -0
- specify_cli/missions/research/command-templates/review.md +144 -0
- specify_cli/missions/research/command-templates/tasks.md +225 -0
- specify_cli/missions/research/mission.yaml +115 -0
- specify_cli/missions/research/templates/data-model-template.md +33 -0
- specify_cli/missions/research/templates/plan-template.md +161 -0
- specify_cli/missions/research/templates/research/evidence-log.csv +18 -0
- specify_cli/missions/research/templates/research/source-register.csv +18 -0
- specify_cli/missions/research/templates/research-template.md +35 -0
- specify_cli/missions/research/templates/spec-template.md +64 -0
- specify_cli/missions/research/templates/task-prompt-template.md +148 -0
- specify_cli/missions/research/templates/tasks-template.md +114 -0
- specify_cli/missions/software-dev/command-templates/accept.md +75 -0
- specify_cli/missions/software-dev/command-templates/analyze.md +183 -0
- specify_cli/missions/software-dev/command-templates/checklist.md +286 -0
- specify_cli/missions/software-dev/command-templates/clarify.md +157 -0
- specify_cli/missions/software-dev/command-templates/constitution.md +432 -0
- specify_cli/missions/software-dev/command-templates/dashboard.md +101 -0
- specify_cli/missions/software-dev/command-templates/implement.md +41 -0
- specify_cli/missions/software-dev/command-templates/merge.md +383 -0
- specify_cli/missions/software-dev/command-templates/plan.md +171 -0
- specify_cli/missions/software-dev/command-templates/review.md +32 -0
- specify_cli/missions/software-dev/command-templates/specify.md +321 -0
- specify_cli/missions/software-dev/command-templates/tasks.md +566 -0
- specify_cli/missions/software-dev/mission.yaml +100 -0
- specify_cli/missions/software-dev/templates/plan-template.md +132 -0
- specify_cli/missions/software-dev/templates/spec-template.md +116 -0
- specify_cli/missions/software-dev/templates/task-prompt-template.md +140 -0
- specify_cli/missions/software-dev/templates/tasks-template.md +159 -0
- specify_cli/orchestrator/__init__.py +75 -0
- specify_cli/orchestrator/agent_config.py +224 -0
- specify_cli/orchestrator/agents/__init__.py +170 -0
- specify_cli/orchestrator/agents/augment.py +112 -0
- specify_cli/orchestrator/agents/base.py +243 -0
- specify_cli/orchestrator/agents/claude.py +112 -0
- specify_cli/orchestrator/agents/codex.py +106 -0
- specify_cli/orchestrator/agents/copilot.py +137 -0
- specify_cli/orchestrator/agents/cursor.py +139 -0
- specify_cli/orchestrator/agents/gemini.py +115 -0
- specify_cli/orchestrator/agents/kilocode.py +94 -0
- specify_cli/orchestrator/agents/opencode.py +132 -0
- specify_cli/orchestrator/agents/qwen.py +96 -0
- specify_cli/orchestrator/config.py +455 -0
- specify_cli/orchestrator/executor.py +642 -0
- specify_cli/orchestrator/integration.py +1230 -0
- specify_cli/orchestrator/monitor.py +898 -0
- specify_cli/orchestrator/scheduler.py +832 -0
- specify_cli/orchestrator/state.py +508 -0
- specify_cli/orchestrator/testing/__init__.py +122 -0
- specify_cli/orchestrator/testing/availability.py +346 -0
- specify_cli/orchestrator/testing/fixtures.py +684 -0
- specify_cli/orchestrator/testing/paths.py +218 -0
- specify_cli/plan_validation.py +107 -0
- specify_cli/scripts/debug-dashboard-scan.py +61 -0
- specify_cli/scripts/tasks/acceptance_support.py +695 -0
- specify_cli/scripts/tasks/task_helpers.py +506 -0
- specify_cli/scripts/tasks/tasks_cli.py +848 -0
- specify_cli/scripts/validate_encoding.py +180 -0
- specify_cli/task_metadata_validation.py +274 -0
- specify_cli/tasks_support.py +447 -0
- specify_cli/template/__init__.py +47 -0
- specify_cli/template/asset_generator.py +206 -0
- specify_cli/template/github_client.py +334 -0
- specify_cli/template/manager.py +193 -0
- specify_cli/template/renderer.py +99 -0
- specify_cli/templates/AGENTS.md +190 -0
- specify_cli/templates/POWERSHELL_SYNTAX.md +229 -0
- specify_cli/templates/agent-file-template.md +35 -0
- specify_cli/templates/checklist-template.md +42 -0
- specify_cli/templates/claudeignore-template +58 -0
- specify_cli/templates/command-templates/accept.md +141 -0
- specify_cli/templates/command-templates/analyze.md +253 -0
- specify_cli/templates/command-templates/checklist.md +352 -0
- specify_cli/templates/command-templates/clarify.md +224 -0
- specify_cli/templates/command-templates/constitution.md +432 -0
- specify_cli/templates/command-templates/dashboard.md +175 -0
- specify_cli/templates/command-templates/implement.md +190 -0
- specify_cli/templates/command-templates/merge.md +374 -0
- specify_cli/templates/command-templates/plan.md +171 -0
- specify_cli/templates/command-templates/research.md +88 -0
- specify_cli/templates/command-templates/review.md +510 -0
- specify_cli/templates/command-templates/specify.md +321 -0
- specify_cli/templates/command-templates/status.md +92 -0
- specify_cli/templates/command-templates/tasks.md +199 -0
- specify_cli/templates/git-hooks/pre-commit +22 -0
- specify_cli/templates/git-hooks/pre-commit-agent-check +37 -0
- specify_cli/templates/git-hooks/pre-commit-encoding-check +142 -0
- specify_cli/templates/plan-template.md +108 -0
- specify_cli/templates/spec-template.md +118 -0
- specify_cli/templates/task-prompt-template.md +165 -0
- specify_cli/templates/tasks-template.md +161 -0
- specify_cli/templates/vscode-settings.json +13 -0
- specify_cli/text_sanitization.py +225 -0
- specify_cli/upgrade/__init__.py +18 -0
- specify_cli/upgrade/detector.py +239 -0
- specify_cli/upgrade/metadata.py +182 -0
- specify_cli/upgrade/migrations/__init__.py +65 -0
- specify_cli/upgrade/migrations/base.py +80 -0
- specify_cli/upgrade/migrations/m_0_10_0_python_only.py +359 -0
- specify_cli/upgrade/migrations/m_0_10_12_constitution_cleanup.py +99 -0
- specify_cli/upgrade/migrations/m_0_10_14_update_implement_slash_command.py +176 -0
- specify_cli/upgrade/migrations/m_0_10_1_populate_slash_commands.py +174 -0
- specify_cli/upgrade/migrations/m_0_10_2_update_slash_commands.py +172 -0
- specify_cli/upgrade/migrations/m_0_10_6_workflow_simplification.py +174 -0
- specify_cli/upgrade/migrations/m_0_10_8_fix_memory_structure.py +252 -0
- specify_cli/upgrade/migrations/m_0_10_9_repair_templates.py +168 -0
- specify_cli/upgrade/migrations/m_0_11_0_workspace_per_wp.py +182 -0
- specify_cli/upgrade/migrations/m_0_11_1_improved_workflow_templates.py +173 -0
- specify_cli/upgrade/migrations/m_0_11_1_update_implement_slash_command.py +160 -0
- specify_cli/upgrade/migrations/m_0_11_2_improved_workflow_templates.py +173 -0
- specify_cli/upgrade/migrations/m_0_11_3_workflow_agent_flag.py +114 -0
- specify_cli/upgrade/migrations/m_0_12_0_documentation_mission.py +155 -0
- specify_cli/upgrade/migrations/m_0_12_1_remove_kitty_specs_from_gitignore.py +183 -0
- specify_cli/upgrade/migrations/m_0_2_0_specify_to_kittify.py +80 -0
- specify_cli/upgrade/migrations/m_0_4_8_gitignore_agents.py +118 -0
- specify_cli/upgrade/migrations/m_0_5_0_encoding_hooks.py +141 -0
- specify_cli/upgrade/migrations/m_0_6_5_commands_rename.py +169 -0
- specify_cli/upgrade/migrations/m_0_6_7_ensure_missions.py +228 -0
- specify_cli/upgrade/migrations/m_0_7_2_worktree_commands_dedup.py +89 -0
- specify_cli/upgrade/migrations/m_0_7_3_update_scripts.py +114 -0
- specify_cli/upgrade/migrations/m_0_8_0_remove_active_mission.py +82 -0
- specify_cli/upgrade/migrations/m_0_8_0_worktree_agents_symlink.py +148 -0
- specify_cli/upgrade/migrations/m_0_9_0_frontmatter_only_lanes.py +346 -0
- specify_cli/upgrade/migrations/m_0_9_1_complete_lane_migration.py +656 -0
- specify_cli/upgrade/migrations/m_0_9_2_research_mission_templates.py +221 -0
- specify_cli/upgrade/registry.py +121 -0
- specify_cli/upgrade/runner.py +284 -0
- specify_cli/validators/__init__.py +14 -0
- specify_cli/validators/paths.py +154 -0
- specify_cli/validators/research.py +428 -0
- specify_cli/verify_enhanced.py +270 -0
- specify_cli/workspace_context.py +224 -0
|
@@ -0,0 +1,915 @@
|
|
|
1
|
+
"""Gap analysis for documentation missions.
|
|
2
|
+
|
|
3
|
+
This module provides functionality to audit existing documentation, classify
|
|
4
|
+
docs into Divio types, build coverage matrices, and identify gaps.
|
|
5
|
+
|
|
6
|
+
The multi-strategy approach:
|
|
7
|
+
1. Detect documentation framework from file structure
|
|
8
|
+
2. Parse frontmatter for explicit type classification
|
|
9
|
+
3. Apply content heuristics if no explicit type
|
|
10
|
+
4. Build coverage matrix showing what exists vs what's needed
|
|
11
|
+
5. Prioritize gaps by user impact
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
from __future__ import annotations
|
|
15
|
+
|
|
16
|
+
from dataclasses import dataclass, field
|
|
17
|
+
from datetime import datetime
|
|
18
|
+
from enum import Enum
|
|
19
|
+
from pathlib import Path
|
|
20
|
+
from typing import Any, Dict, List, Optional, Tuple
|
|
21
|
+
|
|
22
|
+
from ruamel.yaml import YAML
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class DocFramework(Enum):
|
|
26
|
+
"""Supported documentation frameworks."""
|
|
27
|
+
|
|
28
|
+
SPHINX = "sphinx"
|
|
29
|
+
MKDOCS = "mkdocs"
|
|
30
|
+
DOCUSAURUS = "docusaurus"
|
|
31
|
+
JEKYLL = "jekyll"
|
|
32
|
+
HUGO = "hugo"
|
|
33
|
+
PLAIN_MARKDOWN = "plain-markdown"
|
|
34
|
+
UNKNOWN = "unknown"
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def detect_doc_framework(docs_dir: Path) -> DocFramework:
|
|
38
|
+
"""Detect documentation framework from file structure.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
docs_dir: Directory containing documentation
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
Detected framework or UNKNOWN if cannot determine
|
|
45
|
+
"""
|
|
46
|
+
# Sphinx: conf.py is definitive indicator
|
|
47
|
+
if (docs_dir / "conf.py").exists():
|
|
48
|
+
return DocFramework.SPHINX
|
|
49
|
+
|
|
50
|
+
# MkDocs: mkdocs.yml is definitive
|
|
51
|
+
if (docs_dir / "mkdocs.yml").exists():
|
|
52
|
+
return DocFramework.MKDOCS
|
|
53
|
+
|
|
54
|
+
# Docusaurus: docusaurus.config.js
|
|
55
|
+
if (docs_dir / "docusaurus.config.js").exists():
|
|
56
|
+
return DocFramework.DOCUSAURUS
|
|
57
|
+
|
|
58
|
+
# Jekyll: _config.yml
|
|
59
|
+
if (docs_dir / "_config.yml").exists():
|
|
60
|
+
return DocFramework.JEKYLL
|
|
61
|
+
|
|
62
|
+
# Hugo: config.toml or config.yaml
|
|
63
|
+
if (docs_dir / "config.toml").exists() or (docs_dir / "config.yaml").exists():
|
|
64
|
+
return DocFramework.HUGO
|
|
65
|
+
|
|
66
|
+
# Check for markdown files without framework
|
|
67
|
+
if list(docs_dir.rglob("*.md")):
|
|
68
|
+
return DocFramework.PLAIN_MARKDOWN
|
|
69
|
+
|
|
70
|
+
return DocFramework.UNKNOWN
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class DivioType(Enum):
|
|
74
|
+
"""Divio documentation types."""
|
|
75
|
+
|
|
76
|
+
TUTORIAL = "tutorial"
|
|
77
|
+
HOWTO = "how-to"
|
|
78
|
+
REFERENCE = "reference"
|
|
79
|
+
EXPLANATION = "explanation"
|
|
80
|
+
UNCLASSIFIED = "unclassified"
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def parse_frontmatter(content: str) -> Optional[Dict[str, Any]]:
|
|
84
|
+
"""Parse YAML frontmatter from markdown file.
|
|
85
|
+
|
|
86
|
+
Args:
|
|
87
|
+
content: File content
|
|
88
|
+
|
|
89
|
+
Returns:
|
|
90
|
+
Frontmatter dict if present, None otherwise
|
|
91
|
+
"""
|
|
92
|
+
if not content.startswith("---"):
|
|
93
|
+
return None
|
|
94
|
+
|
|
95
|
+
# Find closing ---
|
|
96
|
+
lines = content.split("\n")
|
|
97
|
+
end_idx = None
|
|
98
|
+
for i, line in enumerate(lines[1:], start=1):
|
|
99
|
+
if line.strip() == "---":
|
|
100
|
+
end_idx = i
|
|
101
|
+
break
|
|
102
|
+
|
|
103
|
+
if end_idx is None:
|
|
104
|
+
return None
|
|
105
|
+
|
|
106
|
+
# Parse YAML frontmatter
|
|
107
|
+
yaml = YAML()
|
|
108
|
+
yaml.preserve_quotes = True
|
|
109
|
+
try:
|
|
110
|
+
frontmatter_text = "\n".join(lines[1:end_idx])
|
|
111
|
+
return yaml.load(frontmatter_text)
|
|
112
|
+
except Exception:
|
|
113
|
+
return None
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def classify_by_content_heuristics(content: str) -> DivioType:
|
|
117
|
+
"""Classify document by analyzing content patterns.
|
|
118
|
+
|
|
119
|
+
Args:
|
|
120
|
+
content: Document content (without frontmatter)
|
|
121
|
+
|
|
122
|
+
Returns:
|
|
123
|
+
Best-guess Divio type based on content analysis
|
|
124
|
+
"""
|
|
125
|
+
content_lower = content.lower()
|
|
126
|
+
|
|
127
|
+
# Tutorial markers
|
|
128
|
+
tutorial_markers = [
|
|
129
|
+
"step 1",
|
|
130
|
+
"step 2",
|
|
131
|
+
"first,",
|
|
132
|
+
"next,",
|
|
133
|
+
"now,",
|
|
134
|
+
"you should see",
|
|
135
|
+
"let's",
|
|
136
|
+
"you'll learn",
|
|
137
|
+
"by the end",
|
|
138
|
+
"what you'll build",
|
|
139
|
+
]
|
|
140
|
+
tutorial_score = sum(1 for marker in tutorial_markers if marker in content_lower)
|
|
141
|
+
|
|
142
|
+
# How-to markers
|
|
143
|
+
howto_markers = [
|
|
144
|
+
"how to",
|
|
145
|
+
"to do",
|
|
146
|
+
"follow these steps",
|
|
147
|
+
"problem:",
|
|
148
|
+
"solution:",
|
|
149
|
+
"before you begin",
|
|
150
|
+
"prerequisites:",
|
|
151
|
+
"verification:",
|
|
152
|
+
]
|
|
153
|
+
howto_score = sum(1 for marker in howto_markers if marker in content_lower)
|
|
154
|
+
|
|
155
|
+
# Reference markers
|
|
156
|
+
reference_markers = [
|
|
157
|
+
"parameters:",
|
|
158
|
+
"returns:",
|
|
159
|
+
"arguments:",
|
|
160
|
+
"options:",
|
|
161
|
+
"methods:",
|
|
162
|
+
"properties:",
|
|
163
|
+
"attributes:",
|
|
164
|
+
"class:",
|
|
165
|
+
"function:",
|
|
166
|
+
"api",
|
|
167
|
+
]
|
|
168
|
+
reference_score = sum(1 for marker in reference_markers if marker in content_lower)
|
|
169
|
+
|
|
170
|
+
# Explanation markers
|
|
171
|
+
explanation_markers = [
|
|
172
|
+
"why",
|
|
173
|
+
"background",
|
|
174
|
+
"concepts",
|
|
175
|
+
"architecture",
|
|
176
|
+
"design decision",
|
|
177
|
+
"alternatives",
|
|
178
|
+
"trade-offs",
|
|
179
|
+
"how it works",
|
|
180
|
+
"understanding",
|
|
181
|
+
]
|
|
182
|
+
explanation_score = sum(
|
|
183
|
+
1 for marker in explanation_markers if marker in content_lower
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
# Determine type by highest score
|
|
187
|
+
scores = {
|
|
188
|
+
DivioType.TUTORIAL: tutorial_score,
|
|
189
|
+
DivioType.HOWTO: howto_score,
|
|
190
|
+
DivioType.REFERENCE: reference_score,
|
|
191
|
+
DivioType.EXPLANATION: explanation_score,
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
max_score = max(scores.values())
|
|
195
|
+
if max_score == 0:
|
|
196
|
+
return DivioType.UNCLASSIFIED
|
|
197
|
+
|
|
198
|
+
# Return type with highest score
|
|
199
|
+
for divio_type, score in scores.items():
|
|
200
|
+
if score == max_score:
|
|
201
|
+
return divio_type
|
|
202
|
+
|
|
203
|
+
return DivioType.UNCLASSIFIED
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
def classify_divio_type(content: str) -> Tuple[DivioType, float]:
|
|
207
|
+
"""Classify document into Divio type.
|
|
208
|
+
|
|
209
|
+
Uses multi-strategy approach:
|
|
210
|
+
1. Check frontmatter for explicit 'type' field (confidence: 1.0)
|
|
211
|
+
2. Apply content heuristics (confidence: 0.7)
|
|
212
|
+
|
|
213
|
+
Args:
|
|
214
|
+
content: Full document content including frontmatter
|
|
215
|
+
|
|
216
|
+
Returns:
|
|
217
|
+
Tuple of (DivioType, confidence_score)
|
|
218
|
+
"""
|
|
219
|
+
# Strategy 1: Frontmatter (explicit classification)
|
|
220
|
+
frontmatter = parse_frontmatter(content)
|
|
221
|
+
if frontmatter and "type" in frontmatter:
|
|
222
|
+
type_str = frontmatter["type"].lower()
|
|
223
|
+
type_map = {
|
|
224
|
+
"tutorial": DivioType.TUTORIAL,
|
|
225
|
+
"how-to": DivioType.HOWTO,
|
|
226
|
+
"howto": DivioType.HOWTO,
|
|
227
|
+
"reference": DivioType.REFERENCE,
|
|
228
|
+
"explanation": DivioType.EXPLANATION,
|
|
229
|
+
}
|
|
230
|
+
if type_str in type_map:
|
|
231
|
+
return (type_map[type_str], 1.0) # High confidence
|
|
232
|
+
|
|
233
|
+
# Strategy 2: Content heuristics
|
|
234
|
+
divio_type = classify_by_content_heuristics(content)
|
|
235
|
+
confidence = 0.7 if divio_type != DivioType.UNCLASSIFIED else 0.0
|
|
236
|
+
|
|
237
|
+
return (divio_type, confidence)
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
@dataclass
|
|
241
|
+
class CoverageMatrix:
|
|
242
|
+
"""Documentation coverage matrix showing Divio type coverage by project area.
|
|
243
|
+
|
|
244
|
+
The matrix shows which project areas (features, modules, components) have
|
|
245
|
+
documentation for each Divio type (tutorial, how-to, reference, explanation).
|
|
246
|
+
"""
|
|
247
|
+
|
|
248
|
+
project_areas: List[str] = field(default_factory=list) # e.g., ["auth", "api", "cli"]
|
|
249
|
+
divio_types: List[str] = field(
|
|
250
|
+
default_factory=lambda: ["tutorial", "how-to", "reference", "explanation"]
|
|
251
|
+
)
|
|
252
|
+
|
|
253
|
+
# Maps (area, type) to doc file path (None if missing)
|
|
254
|
+
cells: Dict[Tuple[str, str], Optional[Path]] = field(default_factory=dict)
|
|
255
|
+
|
|
256
|
+
def get_coverage_for_area(self, area: str) -> Dict[str, Optional[Path]]:
|
|
257
|
+
"""Get all Divio type coverage for one project area.
|
|
258
|
+
|
|
259
|
+
Args:
|
|
260
|
+
area: Project area name
|
|
261
|
+
|
|
262
|
+
Returns:
|
|
263
|
+
Dict mapping Divio type to doc file path (or None if missing)
|
|
264
|
+
"""
|
|
265
|
+
return {dtype: self.cells.get((area, dtype)) for dtype in self.divio_types}
|
|
266
|
+
|
|
267
|
+
def get_coverage_for_type(self, divio_type: str) -> Dict[str, Optional[Path]]:
|
|
268
|
+
"""Get all project area coverage for one Divio type.
|
|
269
|
+
|
|
270
|
+
Args:
|
|
271
|
+
divio_type: Divio type name
|
|
272
|
+
|
|
273
|
+
Returns:
|
|
274
|
+
Dict mapping project area to doc file path (or None if missing)
|
|
275
|
+
"""
|
|
276
|
+
return {
|
|
277
|
+
area: self.cells.get((area, divio_type)) for area in self.project_areas
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
def get_gaps(self) -> List[Tuple[str, str]]:
|
|
281
|
+
"""Return list of (area, type) tuples with missing documentation.
|
|
282
|
+
|
|
283
|
+
Returns:
|
|
284
|
+
List of (area, divio_type) tuples where documentation is missing
|
|
285
|
+
"""
|
|
286
|
+
gaps = []
|
|
287
|
+
for area in self.project_areas:
|
|
288
|
+
for dtype in self.divio_types:
|
|
289
|
+
if self.cells.get((area, dtype)) is None:
|
|
290
|
+
gaps.append((area, dtype))
|
|
291
|
+
return gaps
|
|
292
|
+
|
|
293
|
+
def get_coverage_percentage(self) -> float:
|
|
294
|
+
"""Calculate percentage of cells with documentation.
|
|
295
|
+
|
|
296
|
+
Returns:
|
|
297
|
+
Coverage percentage (0.0 to 1.0)
|
|
298
|
+
"""
|
|
299
|
+
total_cells = len(self.project_areas) * len(self.divio_types)
|
|
300
|
+
if total_cells == 0:
|
|
301
|
+
return 0.0
|
|
302
|
+
|
|
303
|
+
filled_cells = sum(1 for path in self.cells.values() if path is not None)
|
|
304
|
+
|
|
305
|
+
return filled_cells / total_cells
|
|
306
|
+
|
|
307
|
+
def to_markdown_table(self) -> str:
|
|
308
|
+
"""Generate Markdown table representation of coverage.
|
|
309
|
+
|
|
310
|
+
Returns:
|
|
311
|
+
Markdown table showing coverage matrix
|
|
312
|
+
"""
|
|
313
|
+
if not self.project_areas:
|
|
314
|
+
return "No project areas identified."
|
|
315
|
+
|
|
316
|
+
# Build table header
|
|
317
|
+
header = "| Area | " + " | ".join(self.divio_types) + " |"
|
|
318
|
+
separator = "|" + "|".join(["---"] * (len(self.divio_types) + 1)) + "|"
|
|
319
|
+
|
|
320
|
+
# Build table rows
|
|
321
|
+
rows = []
|
|
322
|
+
for area in self.project_areas:
|
|
323
|
+
cells = []
|
|
324
|
+
for dtype in self.divio_types:
|
|
325
|
+
doc_path = self.cells.get((area, dtype))
|
|
326
|
+
if doc_path:
|
|
327
|
+
cells.append("✓")
|
|
328
|
+
else:
|
|
329
|
+
cells.append("✗")
|
|
330
|
+
row = f"| {area} | " + " | ".join(cells) + " |"
|
|
331
|
+
rows.append(row)
|
|
332
|
+
|
|
333
|
+
# Combine
|
|
334
|
+
table_lines = [header, separator] + rows
|
|
335
|
+
|
|
336
|
+
# Add coverage percentage
|
|
337
|
+
coverage_pct = self.get_coverage_percentage() * 100
|
|
338
|
+
summary = f"\n**Coverage**: {len([c for c in self.cells.values() if c])}/{len(self.cells)} cells = {coverage_pct:.1f}%"
|
|
339
|
+
|
|
340
|
+
return "\n".join(table_lines) + summary
|
|
341
|
+
|
|
342
|
+
|
|
343
|
+
class GapPriority(Enum):
|
|
344
|
+
"""Priority levels for documentation gaps."""
|
|
345
|
+
|
|
346
|
+
HIGH = "high"
|
|
347
|
+
MEDIUM = "medium"
|
|
348
|
+
LOW = "low"
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
@dataclass
|
|
352
|
+
class DocumentationGap:
|
|
353
|
+
"""Represents a missing piece of documentation.
|
|
354
|
+
|
|
355
|
+
Attributes:
|
|
356
|
+
area: Project area missing documentation
|
|
357
|
+
divio_type: Which Divio type is missing
|
|
358
|
+
priority: How important this gap is (high/medium/low)
|
|
359
|
+
reason: Why this gap matters
|
|
360
|
+
"""
|
|
361
|
+
|
|
362
|
+
area: str
|
|
363
|
+
divio_type: str
|
|
364
|
+
priority: GapPriority
|
|
365
|
+
reason: str
|
|
366
|
+
|
|
367
|
+
def __repr__(self) -> str:
|
|
368
|
+
return f"[{self.priority.value.upper()}] {self.area} → {self.divio_type}: {self.reason}"
|
|
369
|
+
|
|
370
|
+
|
|
371
|
+
def prioritize_gaps(
|
|
372
|
+
gaps: List[Tuple[str, str]],
|
|
373
|
+
project_areas: List[str],
|
|
374
|
+
existing_docs: Dict[Path, DivioType],
|
|
375
|
+
) -> List[DocumentationGap]:
|
|
376
|
+
"""Assign priorities to documentation gaps based on user impact.
|
|
377
|
+
|
|
378
|
+
Prioritization rules (from research):
|
|
379
|
+
- HIGH: Missing tutorials (blocks new users)
|
|
380
|
+
- HIGH: Missing reference for core features (users can't find APIs)
|
|
381
|
+
- MEDIUM: Missing how-tos for common tasks (users struggle with problems)
|
|
382
|
+
- MEDIUM: Missing tutorials for advanced features
|
|
383
|
+
- LOW: Missing explanations (nice-to-have, not blocking)
|
|
384
|
+
|
|
385
|
+
Args:
|
|
386
|
+
gaps: List of (area, divio_type) tuples with missing docs
|
|
387
|
+
project_areas: All project areas
|
|
388
|
+
existing_docs: Map of doc paths to classified types (for context)
|
|
389
|
+
|
|
390
|
+
Returns:
|
|
391
|
+
List of DocumentationGap objects with priorities assigned
|
|
392
|
+
"""
|
|
393
|
+
prioritized = []
|
|
394
|
+
|
|
395
|
+
for area, divio_type in gaps:
|
|
396
|
+
# Determine if this is a core area (heuristic: alphabetically first areas are core)
|
|
397
|
+
is_core_area = project_areas.index(area) < len(project_areas) // 2
|
|
398
|
+
|
|
399
|
+
# Prioritization logic
|
|
400
|
+
if divio_type == "tutorial":
|
|
401
|
+
if is_core_area:
|
|
402
|
+
priority = GapPriority.HIGH
|
|
403
|
+
reason = "New users need tutorials to get started with core functionality"
|
|
404
|
+
else:
|
|
405
|
+
priority = GapPriority.MEDIUM
|
|
406
|
+
reason = "Users need tutorials for advanced features"
|
|
407
|
+
|
|
408
|
+
elif divio_type == "reference":
|
|
409
|
+
if is_core_area:
|
|
410
|
+
priority = GapPriority.HIGH
|
|
411
|
+
reason = "Users need API reference to use core features"
|
|
412
|
+
else:
|
|
413
|
+
priority = GapPriority.MEDIUM
|
|
414
|
+
reason = "API reference helps users discover all capabilities"
|
|
415
|
+
|
|
416
|
+
elif divio_type == "how-to":
|
|
417
|
+
priority = GapPriority.MEDIUM
|
|
418
|
+
reason = "Users need how-tos to solve common problems and tasks"
|
|
419
|
+
|
|
420
|
+
elif divio_type == "explanation":
|
|
421
|
+
priority = GapPriority.LOW
|
|
422
|
+
reason = "Explanations aid understanding but are not blocking"
|
|
423
|
+
|
|
424
|
+
else:
|
|
425
|
+
priority = GapPriority.LOW
|
|
426
|
+
reason = "Unknown Divio type"
|
|
427
|
+
|
|
428
|
+
prioritized.append(
|
|
429
|
+
DocumentationGap(
|
|
430
|
+
area=area, divio_type=divio_type, priority=priority, reason=reason
|
|
431
|
+
)
|
|
432
|
+
)
|
|
433
|
+
|
|
434
|
+
# Sort by priority (high first)
|
|
435
|
+
priority_order = {GapPriority.HIGH: 0, GapPriority.MEDIUM: 1, GapPriority.LOW: 2}
|
|
436
|
+
prioritized.sort(key=lambda gap: priority_order[gap.priority])
|
|
437
|
+
|
|
438
|
+
return prioritized
|
|
439
|
+
|
|
440
|
+
|
|
441
|
+
def extract_public_api_from_python(source_dir: Path) -> List[str]:
|
|
442
|
+
"""Extract public API elements from Python source.
|
|
443
|
+
|
|
444
|
+
Finds:
|
|
445
|
+
- Public functions (not starting with _)
|
|
446
|
+
- Public classes (not starting with _)
|
|
447
|
+
|
|
448
|
+
Args:
|
|
449
|
+
source_dir: Directory containing Python source
|
|
450
|
+
|
|
451
|
+
Returns:
|
|
452
|
+
List of API element names (e.g., ["ClassName", "function_name"])
|
|
453
|
+
"""
|
|
454
|
+
import ast
|
|
455
|
+
|
|
456
|
+
api_elements = []
|
|
457
|
+
|
|
458
|
+
for py_file in source_dir.rglob("*.py"):
|
|
459
|
+
try:
|
|
460
|
+
source = py_file.read_text()
|
|
461
|
+
tree = ast.parse(source)
|
|
462
|
+
|
|
463
|
+
for node in ast.walk(tree):
|
|
464
|
+
# Extract public functions
|
|
465
|
+
if isinstance(node, ast.FunctionDef):
|
|
466
|
+
if not node.name.startswith("_"):
|
|
467
|
+
api_elements.append(node.name)
|
|
468
|
+
|
|
469
|
+
# Extract public classes
|
|
470
|
+
elif isinstance(node, ast.ClassDef):
|
|
471
|
+
if not node.name.startswith("_"):
|
|
472
|
+
api_elements.append(node.name)
|
|
473
|
+
|
|
474
|
+
except Exception:
|
|
475
|
+
# Skip files that can't be parsed
|
|
476
|
+
continue
|
|
477
|
+
|
|
478
|
+
return sorted(set(api_elements)) # Unique, sorted
|
|
479
|
+
|
|
480
|
+
|
|
481
|
+
def extract_documented_api_from_sphinx(docs_dir: Path) -> List[str]:
|
|
482
|
+
"""Extract documented API elements from Sphinx documentation.
|
|
483
|
+
|
|
484
|
+
Parses generated Sphinx HTML or source .rst files for documented APIs.
|
|
485
|
+
|
|
486
|
+
Args:
|
|
487
|
+
docs_dir: Directory containing Sphinx documentation
|
|
488
|
+
|
|
489
|
+
Returns:
|
|
490
|
+
List of documented API element names
|
|
491
|
+
"""
|
|
492
|
+
# Look for autodoc-generated files or .rst source
|
|
493
|
+
documented = []
|
|
494
|
+
|
|
495
|
+
# Check Sphinx build output
|
|
496
|
+
build_dir = docs_dir / "_build" / "html"
|
|
497
|
+
if build_dir.exists():
|
|
498
|
+
# Parse HTML for documented classes/functions
|
|
499
|
+
for html_file in build_dir.rglob("*.html"):
|
|
500
|
+
content = html_file.read_text()
|
|
501
|
+
# Simple heuristic: look for Sphinx autodoc class/function markers
|
|
502
|
+
# Example: <dt class="sig sig-object py" id="ClassName">
|
|
503
|
+
import re
|
|
504
|
+
|
|
505
|
+
matches = re.findall(r'id="([a-zA-Z_][a-zA-Z0-9_]*)"', content)
|
|
506
|
+
documented.extend(matches)
|
|
507
|
+
|
|
508
|
+
return sorted(set(documented)) # Unique, sorted
|
|
509
|
+
|
|
510
|
+
|
|
511
|
+
def detect_version_mismatch(
|
|
512
|
+
code_dir: Path, docs_dir: Path, language: str = "python"
|
|
513
|
+
) -> List[str]:
|
|
514
|
+
"""Detect API elements in code that are missing from documentation.
|
|
515
|
+
|
|
516
|
+
Args:
|
|
517
|
+
code_dir: Directory containing source code
|
|
518
|
+
docs_dir: Directory containing documentation
|
|
519
|
+
language: Programming language (currently only "python" supported)
|
|
520
|
+
|
|
521
|
+
Returns:
|
|
522
|
+
List of API element names present in code but missing from docs
|
|
523
|
+
"""
|
|
524
|
+
if language == "python":
|
|
525
|
+
code_api = extract_public_api_from_python(code_dir)
|
|
526
|
+
docs_api = extract_documented_api_from_sphinx(docs_dir)
|
|
527
|
+
else:
|
|
528
|
+
# Other languages not yet supported
|
|
529
|
+
return []
|
|
530
|
+
|
|
531
|
+
missing = set(code_api) - set(docs_api)
|
|
532
|
+
return sorted(missing)
|
|
533
|
+
|
|
534
|
+
|
|
535
|
+
@dataclass
|
|
536
|
+
class GapAnalysis:
|
|
537
|
+
"""Complete gap analysis results.
|
|
538
|
+
|
|
539
|
+
Attributes:
|
|
540
|
+
project_name: Project being analyzed
|
|
541
|
+
analysis_date: When analysis was performed
|
|
542
|
+
framework: Detected documentation framework
|
|
543
|
+
coverage_matrix: Coverage matrix showing existing docs
|
|
544
|
+
gaps: Prioritized list of documentation gaps
|
|
545
|
+
outdated: List of outdated documentation files
|
|
546
|
+
existing: Map of existing doc files to their classified types
|
|
547
|
+
"""
|
|
548
|
+
|
|
549
|
+
project_name: str
|
|
550
|
+
analysis_date: datetime
|
|
551
|
+
framework: DocFramework
|
|
552
|
+
coverage_matrix: CoverageMatrix
|
|
553
|
+
gaps: List[DocumentationGap]
|
|
554
|
+
outdated: List[Tuple[Path, str]] = field(
|
|
555
|
+
default_factory=list
|
|
556
|
+
) # (file, reason)
|
|
557
|
+
existing: Dict[Path, Tuple[DivioType, float]] = field(
|
|
558
|
+
default_factory=dict
|
|
559
|
+
) # (type, confidence)
|
|
560
|
+
|
|
561
|
+
def to_markdown(self) -> str:
|
|
562
|
+
"""Generate Markdown report of gap analysis.
|
|
563
|
+
|
|
564
|
+
Returns:
|
|
565
|
+
Full gap analysis report as Markdown
|
|
566
|
+
"""
|
|
567
|
+
lines = [
|
|
568
|
+
f"# Gap Analysis: {self.project_name}",
|
|
569
|
+
"",
|
|
570
|
+
f"**Analysis Date**: {self.analysis_date.strftime('%Y-%m-%d %H:%M:%S')}",
|
|
571
|
+
f"**Documentation Framework**: {self.framework.value}",
|
|
572
|
+
f"**Coverage**: {self.coverage_matrix.get_coverage_percentage() * 100:.1f}%",
|
|
573
|
+
"",
|
|
574
|
+
"## Coverage Matrix",
|
|
575
|
+
"",
|
|
576
|
+
self.coverage_matrix.to_markdown_table(),
|
|
577
|
+
"",
|
|
578
|
+
"## Identified Gaps",
|
|
579
|
+
"",
|
|
580
|
+
]
|
|
581
|
+
|
|
582
|
+
if not self.gaps:
|
|
583
|
+
lines.append("No gaps identified - documentation coverage is complete!")
|
|
584
|
+
else:
|
|
585
|
+
lines.append(f"Found {len(self.gaps)} documentation gaps:")
|
|
586
|
+
lines.append("")
|
|
587
|
+
|
|
588
|
+
# Group by priority
|
|
589
|
+
high_gaps = [g for g in self.gaps if g.priority == GapPriority.HIGH]
|
|
590
|
+
medium_gaps = [g for g in self.gaps if g.priority == GapPriority.MEDIUM]
|
|
591
|
+
low_gaps = [g for g in self.gaps if g.priority == GapPriority.LOW]
|
|
592
|
+
|
|
593
|
+
if high_gaps:
|
|
594
|
+
lines.append("### High Priority")
|
|
595
|
+
lines.append("")
|
|
596
|
+
for gap in high_gaps:
|
|
597
|
+
lines.append(
|
|
598
|
+
f"- **{gap.area} → {gap.divio_type}**: {gap.reason}"
|
|
599
|
+
)
|
|
600
|
+
lines.append("")
|
|
601
|
+
|
|
602
|
+
if medium_gaps:
|
|
603
|
+
lines.append("### Medium Priority")
|
|
604
|
+
lines.append("")
|
|
605
|
+
for gap in medium_gaps:
|
|
606
|
+
lines.append(
|
|
607
|
+
f"- **{gap.area} → {gap.divio_type}**: {gap.reason}"
|
|
608
|
+
)
|
|
609
|
+
lines.append("")
|
|
610
|
+
|
|
611
|
+
if low_gaps:
|
|
612
|
+
lines.append("### Low Priority")
|
|
613
|
+
lines.append("")
|
|
614
|
+
for gap in low_gaps:
|
|
615
|
+
lines.append(
|
|
616
|
+
f"- **{gap.area} → {gap.divio_type}**: {gap.reason}"
|
|
617
|
+
)
|
|
618
|
+
lines.append("")
|
|
619
|
+
|
|
620
|
+
# Existing documentation inventory
|
|
621
|
+
lines.extend(
|
|
622
|
+
[
|
|
623
|
+
"## Existing Documentation",
|
|
624
|
+
"",
|
|
625
|
+
]
|
|
626
|
+
)
|
|
627
|
+
|
|
628
|
+
if not self.existing:
|
|
629
|
+
lines.append("No existing documentation found.")
|
|
630
|
+
else:
|
|
631
|
+
lines.append(f"Found {len(self.existing)} documentation files:")
|
|
632
|
+
lines.append("")
|
|
633
|
+
|
|
634
|
+
# Group by Divio type
|
|
635
|
+
by_type: Dict[DivioType, List[Tuple[Path, float]]] = {}
|
|
636
|
+
for path, (dtype, confidence) in self.existing.items():
|
|
637
|
+
if dtype not in by_type:
|
|
638
|
+
by_type[dtype] = []
|
|
639
|
+
by_type[dtype].append((path, confidence))
|
|
640
|
+
|
|
641
|
+
for dtype in DivioType:
|
|
642
|
+
if dtype in by_type and dtype != DivioType.UNCLASSIFIED:
|
|
643
|
+
lines.append(f"### {dtype.value.title()}")
|
|
644
|
+
lines.append("")
|
|
645
|
+
for path, confidence in by_type[dtype]:
|
|
646
|
+
conf_str = (
|
|
647
|
+
f"({confidence * 100:.0f}% confidence)"
|
|
648
|
+
if confidence < 1.0
|
|
649
|
+
else ""
|
|
650
|
+
)
|
|
651
|
+
lines.append(f"- {path} {conf_str}")
|
|
652
|
+
lines.append("")
|
|
653
|
+
|
|
654
|
+
# Unclassified docs
|
|
655
|
+
if DivioType.UNCLASSIFIED in by_type:
|
|
656
|
+
lines.append("### Unclassified")
|
|
657
|
+
lines.append("")
|
|
658
|
+
for path, _ in by_type[DivioType.UNCLASSIFIED]:
|
|
659
|
+
lines.append(f"- {path}")
|
|
660
|
+
lines.append("")
|
|
661
|
+
|
|
662
|
+
# Outdated documentation
|
|
663
|
+
if self.outdated:
|
|
664
|
+
lines.extend(
|
|
665
|
+
[
|
|
666
|
+
"## Outdated Documentation",
|
|
667
|
+
"",
|
|
668
|
+
f"Found {len(self.outdated)} outdated documentation files:",
|
|
669
|
+
"",
|
|
670
|
+
]
|
|
671
|
+
)
|
|
672
|
+
for path, reason in self.outdated:
|
|
673
|
+
lines.append(f"- **{path}**: {reason}")
|
|
674
|
+
lines.append("")
|
|
675
|
+
|
|
676
|
+
# Recommendations
|
|
677
|
+
lines.extend(
|
|
678
|
+
[
|
|
679
|
+
"## Recommendations",
|
|
680
|
+
"",
|
|
681
|
+
]
|
|
682
|
+
)
|
|
683
|
+
|
|
684
|
+
if high_gaps:
|
|
685
|
+
lines.append("**Immediate action needed**:")
|
|
686
|
+
for gap in high_gaps[:3]: # Top 3 high-priority gaps
|
|
687
|
+
lines.append(
|
|
688
|
+
f"1. Create {gap.divio_type} for {gap.area} - {gap.reason}"
|
|
689
|
+
)
|
|
690
|
+
lines.append("")
|
|
691
|
+
|
|
692
|
+
if medium_gaps:
|
|
693
|
+
lines.append("**Should address soon**:")
|
|
694
|
+
for gap in medium_gaps[:3]: # Top 3 medium-priority gaps
|
|
695
|
+
lines.append(f"- Add {gap.divio_type} for {gap.area}")
|
|
696
|
+
lines.append("")
|
|
697
|
+
|
|
698
|
+
if low_gaps:
|
|
699
|
+
lines.append(
|
|
700
|
+
f"**Nice to have**: {len(low_gaps)} low-priority gaps (see above)"
|
|
701
|
+
)
|
|
702
|
+
lines.append("")
|
|
703
|
+
|
|
704
|
+
return "\n".join(lines)
|
|
705
|
+
|
|
706
|
+
|
|
707
|
+
def detect_project_areas(docs_dir: Path, project_root: Path) -> List[str]:
|
|
708
|
+
"""Detect project areas from directory structure.
|
|
709
|
+
|
|
710
|
+
Heuristics:
|
|
711
|
+
- Check docs/ subdirectories (e.g., docs/tutorials/auth/ → "auth" area)
|
|
712
|
+
- Check source code directories (e.g., src/api/ → "api" area)
|
|
713
|
+
- Fallback: Single area named after project
|
|
714
|
+
|
|
715
|
+
Args:
|
|
716
|
+
docs_dir: Documentation directory
|
|
717
|
+
project_root: Project root directory
|
|
718
|
+
|
|
719
|
+
Returns:
|
|
720
|
+
List of project area names
|
|
721
|
+
"""
|
|
722
|
+
areas = set()
|
|
723
|
+
|
|
724
|
+
# Check docs subdirectories
|
|
725
|
+
for item in docs_dir.iterdir():
|
|
726
|
+
if item.is_dir() and item.name not in ["_build", "_static", "_templates"]:
|
|
727
|
+
areas.add(item.name)
|
|
728
|
+
|
|
729
|
+
# Check source code directories
|
|
730
|
+
src_dir = project_root / "src"
|
|
731
|
+
if src_dir.exists():
|
|
732
|
+
for item in src_dir.iterdir():
|
|
733
|
+
if item.is_dir():
|
|
734
|
+
areas.add(item.name)
|
|
735
|
+
|
|
736
|
+
# Fallback: project name as single area
|
|
737
|
+
if not areas:
|
|
738
|
+
areas.add(project_root.name)
|
|
739
|
+
|
|
740
|
+
return sorted(areas)
|
|
741
|
+
|
|
742
|
+
|
|
743
|
+
def infer_area_from_path(doc_path: Path, project_areas: List[str]) -> Optional[str]:
|
|
744
|
+
"""Infer which project area a doc file belongs to.
|
|
745
|
+
|
|
746
|
+
Args:
|
|
747
|
+
doc_path: Path to documentation file
|
|
748
|
+
project_areas: Known project areas
|
|
749
|
+
|
|
750
|
+
Returns:
|
|
751
|
+
Area name if match found, None otherwise
|
|
752
|
+
"""
|
|
753
|
+
# Check if any area name appears in path
|
|
754
|
+
path_str = str(doc_path).lower()
|
|
755
|
+
for area in project_areas:
|
|
756
|
+
if area.lower() in path_str:
|
|
757
|
+
return area
|
|
758
|
+
|
|
759
|
+
# Fallback: use first area (generic)
|
|
760
|
+
return project_areas[0] if project_areas else None
|
|
761
|
+
|
|
762
|
+
|
|
763
|
+
def build_coverage_matrix(
|
|
764
|
+
classified: Dict[Path, Tuple[DivioType, float]], project_areas: List[str]
|
|
765
|
+
) -> CoverageMatrix:
|
|
766
|
+
"""Build coverage matrix from classified documents.
|
|
767
|
+
|
|
768
|
+
Args:
|
|
769
|
+
classified: Map of doc paths to (DivioType, confidence)
|
|
770
|
+
project_areas: List of project area names
|
|
771
|
+
|
|
772
|
+
Returns:
|
|
773
|
+
CoverageMatrix showing coverage by area and type
|
|
774
|
+
"""
|
|
775
|
+
matrix = CoverageMatrix(project_areas=project_areas)
|
|
776
|
+
|
|
777
|
+
# Map each classified doc to (area, type) cell
|
|
778
|
+
for doc_path, (divio_type, _) in classified.items():
|
|
779
|
+
if divio_type == DivioType.UNCLASSIFIED:
|
|
780
|
+
continue
|
|
781
|
+
|
|
782
|
+
# Infer area from path (heuristic: directory name or filename prefix)
|
|
783
|
+
area = infer_area_from_path(doc_path, project_areas)
|
|
784
|
+
if area:
|
|
785
|
+
matrix.cells[(area, divio_type.value)] = doc_path
|
|
786
|
+
|
|
787
|
+
return matrix
|
|
788
|
+
|
|
789
|
+
|
|
790
|
+
def analyze_documentation_gaps(
|
|
791
|
+
docs_dir: Path, project_root: Optional[Path] = None
|
|
792
|
+
) -> GapAnalysis:
|
|
793
|
+
"""Analyze documentation directory and identify gaps.
|
|
794
|
+
|
|
795
|
+
Args:
|
|
796
|
+
docs_dir: Directory containing documentation
|
|
797
|
+
project_root: Project root (for code analysis), defaults to docs_dir.parent
|
|
798
|
+
|
|
799
|
+
Returns:
|
|
800
|
+
GapAnalysis object with coverage matrix, gaps, and recommendations
|
|
801
|
+
"""
|
|
802
|
+
if project_root is None:
|
|
803
|
+
project_root = docs_dir.parent
|
|
804
|
+
|
|
805
|
+
project_name = project_root.name
|
|
806
|
+
|
|
807
|
+
# Detect framework
|
|
808
|
+
framework = detect_doc_framework(docs_dir)
|
|
809
|
+
|
|
810
|
+
# Discover all markdown files
|
|
811
|
+
doc_files = list(docs_dir.rglob("*.md"))
|
|
812
|
+
|
|
813
|
+
# Classify each file
|
|
814
|
+
classified = {}
|
|
815
|
+
for doc_file in doc_files:
|
|
816
|
+
try:
|
|
817
|
+
content = doc_file.read_text()
|
|
818
|
+
divio_type, confidence = classify_divio_type(content)
|
|
819
|
+
classified[doc_file] = (divio_type, confidence)
|
|
820
|
+
except Exception:
|
|
821
|
+
# Skip files that can't be read/classified
|
|
822
|
+
classified[doc_file] = (DivioType.UNCLASSIFIED, 0.0)
|
|
823
|
+
|
|
824
|
+
# Detect project areas from directory structure or code
|
|
825
|
+
project_areas = detect_project_areas(docs_dir, project_root)
|
|
826
|
+
|
|
827
|
+
# Build coverage matrix
|
|
828
|
+
coverage_matrix = build_coverage_matrix(classified, project_areas)
|
|
829
|
+
|
|
830
|
+
# Identify gaps
|
|
831
|
+
gap_tuples = coverage_matrix.get_gaps()
|
|
832
|
+
|
|
833
|
+
# Prioritize gaps
|
|
834
|
+
prioritized_gaps = prioritize_gaps(gap_tuples, project_areas, classified)
|
|
835
|
+
|
|
836
|
+
# Detect version mismatches (Python only for now)
|
|
837
|
+
outdated = []
|
|
838
|
+
# TODO: Implement version mismatch detection (T038)
|
|
839
|
+
|
|
840
|
+
return GapAnalysis(
|
|
841
|
+
project_name=project_name,
|
|
842
|
+
analysis_date=datetime.now(),
|
|
843
|
+
framework=framework,
|
|
844
|
+
coverage_matrix=coverage_matrix,
|
|
845
|
+
gaps=prioritized_gaps,
|
|
846
|
+
outdated=outdated,
|
|
847
|
+
existing=classified,
|
|
848
|
+
)
|
|
849
|
+
|
|
850
|
+
|
|
851
|
+
def generate_gap_analysis_report(
|
|
852
|
+
docs_dir: Path, output_file: Path, project_root: Optional[Path] = None
|
|
853
|
+
) -> GapAnalysis:
|
|
854
|
+
"""Analyze documentation and generate gap analysis report.
|
|
855
|
+
|
|
856
|
+
This is the main entry point for gap analysis. It:
|
|
857
|
+
1. Detects documentation framework
|
|
858
|
+
2. Classifies existing docs into Divio types
|
|
859
|
+
3. Builds coverage matrix
|
|
860
|
+
4. Identifies gaps
|
|
861
|
+
5. Prioritizes gaps by impact
|
|
862
|
+
6. Detects outdated documentation
|
|
863
|
+
7. Generates comprehensive report
|
|
864
|
+
|
|
865
|
+
Args:
|
|
866
|
+
docs_dir: Directory containing documentation to analyze
|
|
867
|
+
output_file: Path where gap-analysis.md should be written
|
|
868
|
+
project_root: Project root directory (for code analysis)
|
|
869
|
+
|
|
870
|
+
Returns:
|
|
871
|
+
GapAnalysis object with full results
|
|
872
|
+
|
|
873
|
+
Raises:
|
|
874
|
+
FileNotFoundError: If docs_dir doesn't exist
|
|
875
|
+
"""
|
|
876
|
+
if not docs_dir.exists():
|
|
877
|
+
raise FileNotFoundError(f"Documentation directory not found: {docs_dir}")
|
|
878
|
+
|
|
879
|
+
# Run analysis
|
|
880
|
+
analysis = analyze_documentation_gaps(docs_dir, project_root)
|
|
881
|
+
|
|
882
|
+
# Generate report
|
|
883
|
+
report_content = analysis.to_markdown()
|
|
884
|
+
|
|
885
|
+
# Write to file
|
|
886
|
+
output_file.parent.mkdir(parents=True, exist_ok=True)
|
|
887
|
+
output_file.write_text(report_content)
|
|
888
|
+
|
|
889
|
+
return analysis
|
|
890
|
+
|
|
891
|
+
|
|
892
|
+
def run_gap_analysis_for_feature(feature_dir: Path) -> GapAnalysis:
|
|
893
|
+
"""Run gap analysis for a documentation mission feature.
|
|
894
|
+
|
|
895
|
+
Assumes standard paths:
|
|
896
|
+
- Documentation: {project_root}/docs/
|
|
897
|
+
- Output: {feature_dir}/gap-analysis.md
|
|
898
|
+
|
|
899
|
+
Args:
|
|
900
|
+
feature_dir: Feature directory (kitty-specs/###-doc-feature/)
|
|
901
|
+
|
|
902
|
+
Returns:
|
|
903
|
+
GapAnalysis results
|
|
904
|
+
"""
|
|
905
|
+
# Find project root (walk up from feature_dir to find docs/)
|
|
906
|
+
project_root = feature_dir
|
|
907
|
+
while project_root != project_root.parent:
|
|
908
|
+
if (project_root / "docs").exists():
|
|
909
|
+
break
|
|
910
|
+
project_root = project_root.parent
|
|
911
|
+
|
|
912
|
+
docs_dir = project_root / "docs"
|
|
913
|
+
output_file = feature_dir / "gap-analysis.md"
|
|
914
|
+
|
|
915
|
+
return generate_gap_analysis_report(docs_dir, output_file, project_root)
|