code2docs 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- code2docs/__init__.py +32 -0
- code2docs/__main__.py +6 -0
- code2docs/analyzers/__init__.py +13 -0
- code2docs/analyzers/dependency_scanner.py +159 -0
- code2docs/analyzers/docstring_extractor.py +111 -0
- code2docs/analyzers/endpoint_detector.py +116 -0
- code2docs/analyzers/project_scanner.py +45 -0
- code2docs/cli.py +226 -0
- code2docs/config.py +158 -0
- code2docs/formatters/__init__.py +7 -0
- code2docs/formatters/badges.py +52 -0
- code2docs/formatters/markdown.py +73 -0
- code2docs/formatters/toc.py +63 -0
- code2docs/generators/__init__.py +42 -0
- code2docs/generators/api_reference_gen.py +150 -0
- code2docs/generators/architecture_gen.py +192 -0
- code2docs/generators/changelog_gen.py +121 -0
- code2docs/generators/examples_gen.py +194 -0
- code2docs/generators/module_docs_gen.py +204 -0
- code2docs/generators/readme_gen.py +229 -0
- code2docs/sync/__init__.py +6 -0
- code2docs/sync/differ.py +125 -0
- code2docs/sync/updater.py +77 -0
- code2docs/sync/watcher.py +75 -0
- code2docs/templates/api_module.md.j2 +62 -0
- code2docs/templates/architecture.md.j2 +45 -0
- code2docs/templates/example_usage.py.j2 +12 -0
- code2docs/templates/index.md.j2 +31 -0
- code2docs/templates/readme.md.j2 +85 -0
- code2docs-0.1.1.dist-info/METADATA +228 -0
- code2docs-0.1.1.dist-info/RECORD +35 -0
- code2docs-0.1.1.dist-info/WHEEL +5 -0
- code2docs-0.1.1.dist-info/entry_points.txt +2 -0
- code2docs-0.1.1.dist-info/licenses/LICENSE +201 -0
- code2docs-0.1.1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,204 @@
|
|
|
1
|
+
"""Per-module detailed documentation generator."""
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Dict, List
|
|
5
|
+
|
|
6
|
+
from code2llm.core.models import AnalysisResult, ModuleInfo, FunctionInfo, ClassInfo
|
|
7
|
+
|
|
8
|
+
from ..config import Code2DocsConfig
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class ModuleDocsGenerator:
|
|
12
|
+
"""Generate docs/modules/ — detailed per-module documentation."""
|
|
13
|
+
|
|
14
|
+
def __init__(self, config: Code2DocsConfig, result: AnalysisResult):
|
|
15
|
+
self.config = config
|
|
16
|
+
self.result = result
|
|
17
|
+
|
|
18
|
+
def generate_all(self) -> Dict[str, str]:
|
|
19
|
+
"""Generate documentation for all modules. Returns {filename: content}."""
|
|
20
|
+
files: Dict[str, str] = {}
|
|
21
|
+
|
|
22
|
+
for mod_name, mod_info in sorted(self.result.modules.items()):
|
|
23
|
+
safe_name = mod_name.replace(".", "_").replace("/", "_")
|
|
24
|
+
filename = f"{safe_name}.md"
|
|
25
|
+
files[filename] = self._generate_module(mod_name, mod_info)
|
|
26
|
+
|
|
27
|
+
return files
|
|
28
|
+
|
|
29
|
+
def _generate_module(self, mod_name: str, mod_info: ModuleInfo) -> str:
|
|
30
|
+
"""Generate detailed documentation for a single module."""
|
|
31
|
+
lines: List[str] = []
|
|
32
|
+
|
|
33
|
+
# Header
|
|
34
|
+
lines.append(f"# {mod_name}\n")
|
|
35
|
+
|
|
36
|
+
# Source metadata
|
|
37
|
+
file_lines = self._count_file_lines(mod_info.file)
|
|
38
|
+
avg_cc = self._calc_module_avg_cc(mod_name)
|
|
39
|
+
meta_parts = [f"Source: `{mod_info.file}`"]
|
|
40
|
+
if file_lines:
|
|
41
|
+
meta_parts.append(f"{file_lines} lines")
|
|
42
|
+
if avg_cc:
|
|
43
|
+
meta_parts.append(f"CC avg: {avg_cc}")
|
|
44
|
+
lines.append(f"> {' | '.join(meta_parts)}\n")
|
|
45
|
+
|
|
46
|
+
# Overview (module docstring)
|
|
47
|
+
module_doc = self._get_module_docstring(mod_info)
|
|
48
|
+
if module_doc:
|
|
49
|
+
lines.append("## Overview\n")
|
|
50
|
+
lines.append(f"{module_doc}\n")
|
|
51
|
+
|
|
52
|
+
# Classes
|
|
53
|
+
module_classes = self._get_module_classes(mod_name)
|
|
54
|
+
if module_classes:
|
|
55
|
+
lines.append("## Classes\n")
|
|
56
|
+
for cls_name, cls_info in module_classes.items():
|
|
57
|
+
lines.append(f"### {cls_info.name}\n")
|
|
58
|
+
if cls_info.bases:
|
|
59
|
+
lines.append(f"*Bases:* {', '.join(f'`{b}`' for b in cls_info.bases)}\n")
|
|
60
|
+
if cls_info.docstring:
|
|
61
|
+
lines.append(f"{cls_info.docstring.strip()}\n")
|
|
62
|
+
|
|
63
|
+
methods = self._get_class_methods(cls_info)
|
|
64
|
+
if methods:
|
|
65
|
+
lines.append("#### Methods\n")
|
|
66
|
+
lines.append("| Method | Args | Returns | CC |")
|
|
67
|
+
lines.append("|--------|------|---------|----|")
|
|
68
|
+
for m in methods:
|
|
69
|
+
args = ", ".join(m.args[:4])
|
|
70
|
+
if len(m.args) > 4:
|
|
71
|
+
args += ", ..."
|
|
72
|
+
ret = m.returns or "—"
|
|
73
|
+
cc = m.complexity.get("cyclomatic", "—")
|
|
74
|
+
warn = " ⚠️" if isinstance(cc, (int, float)) and cc > 10 else ""
|
|
75
|
+
lines.append(f"| `{m.name}` | `{args}` | `{ret}` | {cc}{warn} |")
|
|
76
|
+
lines.append("")
|
|
77
|
+
|
|
78
|
+
# Functions
|
|
79
|
+
module_functions = self._get_module_functions(mod_name)
|
|
80
|
+
if module_functions:
|
|
81
|
+
lines.append("## Functions\n")
|
|
82
|
+
for func_name, func_info in module_functions.items():
|
|
83
|
+
args_str = ", ".join(func_info.args)
|
|
84
|
+
ret = f" → {func_info.returns}" if func_info.returns else ""
|
|
85
|
+
lines.append(f"### `{func_info.name}({args_str}){ret}`\n")
|
|
86
|
+
if func_info.docstring:
|
|
87
|
+
lines.append(f"{func_info.docstring.strip()}\n")
|
|
88
|
+
if func_info.calls:
|
|
89
|
+
lines.append(f"**Calls:** {', '.join(f'`{c}`' for c in func_info.calls[:10])}\n")
|
|
90
|
+
if func_info.called_by:
|
|
91
|
+
lines.append(f"**Called by:** {', '.join(f'`{c}`' for c in func_info.called_by[:10])}\n")
|
|
92
|
+
|
|
93
|
+
# Dependencies
|
|
94
|
+
if mod_info.imports:
|
|
95
|
+
lines.append("## Dependencies\n")
|
|
96
|
+
internal = [i for i in mod_info.imports if not i.startswith(("os", "sys", "re", "json", "typing"))]
|
|
97
|
+
external = [i for i in mod_info.imports if i.startswith(("os", "sys", "re", "json", "typing"))]
|
|
98
|
+
if internal:
|
|
99
|
+
lines.append("**Internal imports:**")
|
|
100
|
+
for imp in sorted(internal):
|
|
101
|
+
lines.append(f"- `{imp}`")
|
|
102
|
+
lines.append("")
|
|
103
|
+
if external:
|
|
104
|
+
lines.append("**Standard library:**")
|
|
105
|
+
for imp in sorted(external):
|
|
106
|
+
lines.append(f"- `{imp}`")
|
|
107
|
+
lines.append("")
|
|
108
|
+
|
|
109
|
+
# Metrics table
|
|
110
|
+
metrics = self._get_module_metrics(mod_name, mod_info)
|
|
111
|
+
if metrics:
|
|
112
|
+
lines.append("## Metrics\n")
|
|
113
|
+
lines.append("| Metric | Value |")
|
|
114
|
+
lines.append("|--------|-------|")
|
|
115
|
+
for k, v in metrics.items():
|
|
116
|
+
lines.append(f"| {k} | {v} |")
|
|
117
|
+
lines.append("")
|
|
118
|
+
|
|
119
|
+
return "\n".join(lines)
|
|
120
|
+
|
|
121
|
+
def _count_file_lines(self, file_path: str) -> int:
|
|
122
|
+
"""Count lines in source file."""
|
|
123
|
+
try:
|
|
124
|
+
path = Path(file_path)
|
|
125
|
+
if path.exists():
|
|
126
|
+
return len(path.read_text(encoding="utf-8").splitlines())
|
|
127
|
+
except (OSError, UnicodeDecodeError):
|
|
128
|
+
pass
|
|
129
|
+
return 0
|
|
130
|
+
|
|
131
|
+
def _calc_module_avg_cc(self, mod_name: str) -> float:
|
|
132
|
+
"""Calculate average cyclomatic complexity for module functions."""
|
|
133
|
+
complexities = []
|
|
134
|
+
for func in self.result.functions.values():
|
|
135
|
+
if func.module == mod_name:
|
|
136
|
+
cc = func.complexity.get("cyclomatic", 0)
|
|
137
|
+
if cc > 0:
|
|
138
|
+
complexities.append(cc)
|
|
139
|
+
return round(sum(complexities) / len(complexities), 1) if complexities else 0.0
|
|
140
|
+
|
|
141
|
+
def _get_module_docstring(self, mod_info: ModuleInfo) -> str:
|
|
142
|
+
"""Try to extract module-level docstring."""
|
|
143
|
+
try:
|
|
144
|
+
import ast
|
|
145
|
+
path = Path(mod_info.file)
|
|
146
|
+
if path.exists():
|
|
147
|
+
tree = ast.parse(path.read_text(encoding="utf-8"))
|
|
148
|
+
return ast.get_docstring(tree) or ""
|
|
149
|
+
except Exception:
|
|
150
|
+
pass
|
|
151
|
+
return ""
|
|
152
|
+
|
|
153
|
+
def _get_module_classes(self, mod_name: str) -> Dict[str, ClassInfo]:
|
|
154
|
+
return {
|
|
155
|
+
k: v for k, v in self.result.classes.items()
|
|
156
|
+
if v.module == mod_name or k.startswith(mod_name + ".")
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
def _get_module_functions(self, mod_name: str) -> Dict[str, FunctionInfo]:
|
|
160
|
+
return {
|
|
161
|
+
k: v for k, v in self.result.functions.items()
|
|
162
|
+
if (v.module == mod_name or k.startswith(mod_name + ".")) and not v.is_method
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
def _get_class_methods(self, cls_info: ClassInfo) -> List[FunctionInfo]:
|
|
166
|
+
methods = []
|
|
167
|
+
for method_name in cls_info.methods:
|
|
168
|
+
for key in [method_name, f"{cls_info.qualified_name}.{method_name}"]:
|
|
169
|
+
if key in self.result.functions:
|
|
170
|
+
methods.append(self.result.functions[key])
|
|
171
|
+
break
|
|
172
|
+
return methods
|
|
173
|
+
|
|
174
|
+
def _get_module_metrics(self, mod_name: str, mod_info: ModuleInfo) -> Dict[str, str]:
|
|
175
|
+
metrics = {}
|
|
176
|
+
lines = self._count_file_lines(mod_info.file)
|
|
177
|
+
if lines:
|
|
178
|
+
metrics["Lines"] = str(lines)
|
|
179
|
+
avg_cc = self._calc_module_avg_cc(mod_name)
|
|
180
|
+
if avg_cc:
|
|
181
|
+
metrics["Complexity (avg)"] = str(avg_cc)
|
|
182
|
+
metrics["Functions"] = str(len(mod_info.functions))
|
|
183
|
+
metrics["Classes"] = str(len(mod_info.classes))
|
|
184
|
+
|
|
185
|
+
# Fan-in / fan-out
|
|
186
|
+
fan_in = 0
|
|
187
|
+
fan_out = 0
|
|
188
|
+
for func in self.result.functions.values():
|
|
189
|
+
if func.module == mod_name:
|
|
190
|
+
fan_out += len(func.calls)
|
|
191
|
+
fan_in += len(func.called_by)
|
|
192
|
+
if fan_in:
|
|
193
|
+
metrics["Fan-in"] = str(fan_in)
|
|
194
|
+
if fan_out:
|
|
195
|
+
metrics["Fan-out"] = str(fan_out)
|
|
196
|
+
|
|
197
|
+
return metrics
|
|
198
|
+
|
|
199
|
+
def write_all(self, output_dir: str, files: Dict[str, str]) -> None:
|
|
200
|
+
"""Write all generated module docs."""
|
|
201
|
+
out = Path(output_dir)
|
|
202
|
+
out.mkdir(parents=True, exist_ok=True)
|
|
203
|
+
for filename, content in files.items():
|
|
204
|
+
(out / filename).write_text(content, encoding="utf-8")
|
|
@@ -0,0 +1,229 @@
|
|
|
1
|
+
"""README.md generator from AnalysisResult."""
|
|
2
|
+
|
|
3
|
+
import re
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Dict, List, Optional
|
|
6
|
+
|
|
7
|
+
from jinja2 import Environment, PackageLoader, select_autoescape
|
|
8
|
+
|
|
9
|
+
from code2llm.core.models import AnalysisResult, FunctionInfo, ClassInfo
|
|
10
|
+
|
|
11
|
+
from ..config import Code2DocsConfig
|
|
12
|
+
from ..analyzers.dependency_scanner import DependencyScanner
|
|
13
|
+
from ..analyzers.endpoint_detector import EndpointDetector
|
|
14
|
+
from ..formatters.badges import generate_badges
|
|
15
|
+
from ..formatters.toc import generate_toc
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
MARKER_START = "<!-- code2docs:start -->"
|
|
19
|
+
MARKER_END = "<!-- code2docs:end -->"
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class ReadmeGenerator:
|
|
23
|
+
"""Generate README.md from AnalysisResult."""
|
|
24
|
+
|
|
25
|
+
def __init__(self, config: Code2DocsConfig, result: AnalysisResult):
|
|
26
|
+
self.config = config
|
|
27
|
+
self.result = result
|
|
28
|
+
self.env = Environment(
|
|
29
|
+
loader=PackageLoader("code2docs", "templates"),
|
|
30
|
+
autoescape=select_autoescape([]),
|
|
31
|
+
trim_blocks=True,
|
|
32
|
+
lstrip_blocks=True,
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
def generate(self) -> str:
|
|
36
|
+
"""Generate full README content."""
|
|
37
|
+
sections = self.config.readme.sections
|
|
38
|
+
project_name = self.config.project_name or Path(self.result.project_path).name
|
|
39
|
+
|
|
40
|
+
context = self._build_context(project_name)
|
|
41
|
+
|
|
42
|
+
try:
|
|
43
|
+
template = self.env.get_template("readme.md.j2")
|
|
44
|
+
return template.render(**context, sections=sections)
|
|
45
|
+
except Exception:
|
|
46
|
+
# Fallback: build manually if template fails
|
|
47
|
+
return self._build_manual(project_name, sections, context)
|
|
48
|
+
|
|
49
|
+
def _build_context(self, project_name: str) -> Dict:
|
|
50
|
+
"""Build template context from analysis result."""
|
|
51
|
+
# Dependencies
|
|
52
|
+
dep_scanner = DependencyScanner()
|
|
53
|
+
deps = dep_scanner.scan(self.result.project_path)
|
|
54
|
+
|
|
55
|
+
# Endpoints
|
|
56
|
+
endpoint_detector = EndpointDetector()
|
|
57
|
+
endpoints = endpoint_detector.detect(self.result, self.result.project_path)
|
|
58
|
+
|
|
59
|
+
# Public API
|
|
60
|
+
public_functions = {
|
|
61
|
+
k: v for k, v in self.result.functions.items()
|
|
62
|
+
if not v.is_private and not v.is_method
|
|
63
|
+
}
|
|
64
|
+
public_classes = {
|
|
65
|
+
k: v for k, v in self.result.classes.items()
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
# Entry points
|
|
69
|
+
entry_points = self.result.entry_points or []
|
|
70
|
+
|
|
71
|
+
# Metrics
|
|
72
|
+
stats = self.result.stats or {}
|
|
73
|
+
avg_complexity = self._calc_avg_complexity()
|
|
74
|
+
|
|
75
|
+
# Module tree
|
|
76
|
+
module_tree = self._build_module_tree()
|
|
77
|
+
|
|
78
|
+
return {
|
|
79
|
+
"project_name": project_name,
|
|
80
|
+
"project_path": self.result.project_path,
|
|
81
|
+
"badges": generate_badges(project_name, self.config.readme.badges, stats, deps),
|
|
82
|
+
"stats": stats,
|
|
83
|
+
"avg_complexity": avg_complexity,
|
|
84
|
+
"dependencies": deps,
|
|
85
|
+
"endpoints": endpoints,
|
|
86
|
+
"public_functions": public_functions,
|
|
87
|
+
"public_classes": public_classes,
|
|
88
|
+
"entry_points": entry_points,
|
|
89
|
+
"module_tree": module_tree,
|
|
90
|
+
"modules": self.result.modules,
|
|
91
|
+
"sync_markers": self.config.readme.sync_markers,
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
def _calc_avg_complexity(self) -> float:
|
|
95
|
+
"""Calculate average cyclomatic complexity."""
|
|
96
|
+
complexities = []
|
|
97
|
+
for func in self.result.functions.values():
|
|
98
|
+
cc = func.complexity.get("cyclomatic", 0)
|
|
99
|
+
if cc > 0:
|
|
100
|
+
complexities.append(cc)
|
|
101
|
+
return round(sum(complexities) / len(complexities), 1) if complexities else 0.0
|
|
102
|
+
|
|
103
|
+
def _build_module_tree(self) -> str:
|
|
104
|
+
"""Build text-based module tree."""
|
|
105
|
+
if not self.result.modules:
|
|
106
|
+
return ""
|
|
107
|
+
|
|
108
|
+
lines = []
|
|
109
|
+
sorted_modules = sorted(self.result.modules.keys())
|
|
110
|
+
for mod_name in sorted_modules:
|
|
111
|
+
mod = self.result.modules[mod_name]
|
|
112
|
+
prefix = "📦" if mod.is_package else "📄"
|
|
113
|
+
func_count = len(mod.functions)
|
|
114
|
+
class_count = len(mod.classes)
|
|
115
|
+
detail = []
|
|
116
|
+
if func_count:
|
|
117
|
+
detail.append(f"{func_count} functions")
|
|
118
|
+
if class_count:
|
|
119
|
+
detail.append(f"{class_count} classes")
|
|
120
|
+
detail_str = f" ({', '.join(detail)})" if detail else ""
|
|
121
|
+
lines.append(f"{prefix} `{mod_name}`{detail_str}")
|
|
122
|
+
|
|
123
|
+
return "\n".join(lines)
|
|
124
|
+
|
|
125
|
+
def _build_manual(self, project_name: str, sections: List[str], context: Dict) -> str:
|
|
126
|
+
"""Fallback manual README builder."""
|
|
127
|
+
parts: List[str] = []
|
|
128
|
+
|
|
129
|
+
if context.get("sync_markers"):
|
|
130
|
+
parts.append(MARKER_START)
|
|
131
|
+
|
|
132
|
+
if "overview" in sections:
|
|
133
|
+
parts.append(f"# {project_name}\n")
|
|
134
|
+
if context.get("badges"):
|
|
135
|
+
parts.append(context["badges"] + "\n")
|
|
136
|
+
stats = context.get("stats", {})
|
|
137
|
+
if stats:
|
|
138
|
+
parts.append(
|
|
139
|
+
f"> **{stats.get('functions_found', 0)}** functions | "
|
|
140
|
+
f"**{stats.get('classes_found', 0)}** classes | "
|
|
141
|
+
f"**{stats.get('files_processed', 0)}** files | "
|
|
142
|
+
f"CC̄ = {context.get('avg_complexity', 0)}\n"
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
if "install" in sections:
|
|
146
|
+
deps = context.get("dependencies")
|
|
147
|
+
if deps and deps.install_command:
|
|
148
|
+
parts.append("## Installation\n")
|
|
149
|
+
parts.append(f"```bash\n{deps.install_command}\n```\n")
|
|
150
|
+
if deps.python_version:
|
|
151
|
+
parts.append(f"Requires Python {deps.python_version}\n")
|
|
152
|
+
|
|
153
|
+
if "quickstart" in sections:
|
|
154
|
+
parts.append("## Quick Start\n")
|
|
155
|
+
entry_points = context.get("entry_points", [])
|
|
156
|
+
if entry_points:
|
|
157
|
+
parts.append("```python")
|
|
158
|
+
parts.append(f"# Entry points: {', '.join(entry_points[:3])}")
|
|
159
|
+
parts.append("```\n")
|
|
160
|
+
|
|
161
|
+
if "api" in sections:
|
|
162
|
+
parts.append("## API Overview\n")
|
|
163
|
+
for name, cls in list(context.get("public_classes", {}).items())[:20]:
|
|
164
|
+
doc = f" — {cls.docstring.splitlines()[0]}" if cls.docstring else ""
|
|
165
|
+
parts.append(f"- **`{cls.name}`**{doc}")
|
|
166
|
+
parts.append("")
|
|
167
|
+
for name, func in list(context.get("public_functions", {}).items())[:30]:
|
|
168
|
+
args_str = ", ".join(func.args[:5])
|
|
169
|
+
ret = f" → {func.returns}" if func.returns else ""
|
|
170
|
+
parts.append(f"- `{func.name}({args_str}){ret}`")
|
|
171
|
+
parts.append("")
|
|
172
|
+
|
|
173
|
+
if "structure" in sections:
|
|
174
|
+
tree = context.get("module_tree", "")
|
|
175
|
+
if tree:
|
|
176
|
+
parts.append("## Project Structure\n")
|
|
177
|
+
parts.append(tree + "\n")
|
|
178
|
+
|
|
179
|
+
if "endpoints" in sections:
|
|
180
|
+
endpoints = context.get("endpoints", [])
|
|
181
|
+
if endpoints:
|
|
182
|
+
parts.append("## Endpoints\n")
|
|
183
|
+
parts.append("| Method | Path | Function | Framework |")
|
|
184
|
+
parts.append("|--------|------|----------|-----------|")
|
|
185
|
+
for ep in endpoints:
|
|
186
|
+
parts.append(f"| {ep.method} | `{ep.path}` | `{ep.function_name}` | {ep.framework} |")
|
|
187
|
+
parts.append("")
|
|
188
|
+
|
|
189
|
+
if context.get("sync_markers"):
|
|
190
|
+
parts.append(MARKER_END)
|
|
191
|
+
|
|
192
|
+
return "\n".join(parts)
|
|
193
|
+
|
|
194
|
+
def write(self, path: str, content: str) -> None:
|
|
195
|
+
"""Write README, respecting sync markers if existing file has them."""
|
|
196
|
+
readme_path = Path(path)
|
|
197
|
+
|
|
198
|
+
if readme_path.exists():
|
|
199
|
+
existing = readme_path.read_text(encoding="utf-8")
|
|
200
|
+
if MARKER_START in existing and MARKER_END in existing:
|
|
201
|
+
# Replace only between markers
|
|
202
|
+
pattern = re.compile(
|
|
203
|
+
re.escape(MARKER_START) + r".*?" + re.escape(MARKER_END),
|
|
204
|
+
re.DOTALL,
|
|
205
|
+
)
|
|
206
|
+
content = pattern.sub(content, existing)
|
|
207
|
+
|
|
208
|
+
readme_path.parent.mkdir(parents=True, exist_ok=True)
|
|
209
|
+
readme_path.write_text(content, encoding="utf-8")
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
def generate_readme(project_path: str = "./", output: str = "README.md",
|
|
213
|
+
sections: Optional[List[str]] = None, sync_markers: bool = True,
|
|
214
|
+
config: Optional[Code2DocsConfig] = None) -> str:
|
|
215
|
+
"""Convenience function to generate a README."""
|
|
216
|
+
from ..analyzers.project_scanner import ProjectScanner
|
|
217
|
+
|
|
218
|
+
config = config or Code2DocsConfig()
|
|
219
|
+
if sections:
|
|
220
|
+
config.readme.sections = sections
|
|
221
|
+
config.readme.sync_markers = sync_markers
|
|
222
|
+
|
|
223
|
+
scanner = ProjectScanner(config)
|
|
224
|
+
result = scanner.analyze(project_path)
|
|
225
|
+
|
|
226
|
+
gen = ReadmeGenerator(config, result)
|
|
227
|
+
content = gen.generate()
|
|
228
|
+
gen.write(output, content)
|
|
229
|
+
return content
|
code2docs/sync/differ.py
ADDED
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
"""Detect changes in source code for selective documentation regeneration."""
|
|
2
|
+
|
|
3
|
+
import hashlib
|
|
4
|
+
import json
|
|
5
|
+
from dataclasses import dataclass, field
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Dict, List, Optional
|
|
8
|
+
|
|
9
|
+
from ..config import Code2DocsConfig
|
|
10
|
+
|
|
11
|
+
STATE_FILE = ".code2docs.state"
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@dataclass
|
|
15
|
+
class ChangeInfo:
|
|
16
|
+
"""Describes a detected change."""
|
|
17
|
+
module: str
|
|
18
|
+
file: str
|
|
19
|
+
change_type: str # added, modified, deleted
|
|
20
|
+
old_hash: str = ""
|
|
21
|
+
new_hash: str = ""
|
|
22
|
+
|
|
23
|
+
def __str__(self) -> str:
|
|
24
|
+
return f"[{self.change_type}] {self.module} ({self.file})"
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class Differ:
|
|
28
|
+
"""Detect changes between current source and previous state."""
|
|
29
|
+
|
|
30
|
+
def __init__(self, config: Optional[Code2DocsConfig] = None):
|
|
31
|
+
self.config = config or Code2DocsConfig()
|
|
32
|
+
|
|
33
|
+
def detect_changes(self, project_path: str) -> List[ChangeInfo]:
|
|
34
|
+
"""Compare current file hashes with saved state. Return list of changes."""
|
|
35
|
+
project = Path(project_path).resolve()
|
|
36
|
+
state_path = project / STATE_FILE
|
|
37
|
+
|
|
38
|
+
old_state = self._load_state(state_path)
|
|
39
|
+
new_state = self._compute_state(project)
|
|
40
|
+
|
|
41
|
+
changes: List[ChangeInfo] = []
|
|
42
|
+
|
|
43
|
+
# Detect modified and added
|
|
44
|
+
for filepath, new_hash in new_state.items():
|
|
45
|
+
old_hash = old_state.get(filepath, "")
|
|
46
|
+
if not old_hash:
|
|
47
|
+
changes.append(ChangeInfo(
|
|
48
|
+
module=self._file_to_module(filepath, project),
|
|
49
|
+
file=filepath,
|
|
50
|
+
change_type="added",
|
|
51
|
+
new_hash=new_hash,
|
|
52
|
+
))
|
|
53
|
+
elif old_hash != new_hash:
|
|
54
|
+
changes.append(ChangeInfo(
|
|
55
|
+
module=self._file_to_module(filepath, project),
|
|
56
|
+
file=filepath,
|
|
57
|
+
change_type="modified",
|
|
58
|
+
old_hash=old_hash,
|
|
59
|
+
new_hash=new_hash,
|
|
60
|
+
))
|
|
61
|
+
|
|
62
|
+
# Detect deleted
|
|
63
|
+
for filepath, old_hash in old_state.items():
|
|
64
|
+
if filepath not in new_state:
|
|
65
|
+
changes.append(ChangeInfo(
|
|
66
|
+
module=self._file_to_module(filepath, project),
|
|
67
|
+
file=filepath,
|
|
68
|
+
change_type="deleted",
|
|
69
|
+
old_hash=old_hash,
|
|
70
|
+
))
|
|
71
|
+
|
|
72
|
+
return changes
|
|
73
|
+
|
|
74
|
+
def save_state(self, project_path: str) -> None:
|
|
75
|
+
"""Save current file hashes as state."""
|
|
76
|
+
project = Path(project_path).resolve()
|
|
77
|
+
state = self._compute_state(project)
|
|
78
|
+
state_path = project / STATE_FILE
|
|
79
|
+
state_path.write_text(json.dumps(state, indent=2), encoding="utf-8")
|
|
80
|
+
|
|
81
|
+
def _load_state(self, state_path: Path) -> Dict[str, str]:
|
|
82
|
+
"""Load previous state from file."""
|
|
83
|
+
if not state_path.exists():
|
|
84
|
+
return {}
|
|
85
|
+
try:
|
|
86
|
+
return json.loads(state_path.read_text(encoding="utf-8"))
|
|
87
|
+
except (json.JSONDecodeError, OSError):
|
|
88
|
+
return {}
|
|
89
|
+
|
|
90
|
+
def _compute_state(self, project: Path) -> Dict[str, str]:
|
|
91
|
+
"""Compute file hashes for all Python files in the project."""
|
|
92
|
+
state: Dict[str, str] = {}
|
|
93
|
+
ignore_patterns = self.config.sync.ignore if self.config else []
|
|
94
|
+
|
|
95
|
+
for py_file in project.rglob("*.py"):
|
|
96
|
+
rel = str(py_file.relative_to(project))
|
|
97
|
+
|
|
98
|
+
# Check ignore patterns
|
|
99
|
+
if any(pattern.rstrip("/") in rel for pattern in ignore_patterns):
|
|
100
|
+
continue
|
|
101
|
+
|
|
102
|
+
try:
|
|
103
|
+
content = py_file.read_bytes()
|
|
104
|
+
file_hash = hashlib.sha256(content).hexdigest()[:16]
|
|
105
|
+
state[rel] = file_hash
|
|
106
|
+
except OSError:
|
|
107
|
+
continue
|
|
108
|
+
|
|
109
|
+
return state
|
|
110
|
+
|
|
111
|
+
@staticmethod
|
|
112
|
+
def _file_to_module(filepath: str, project: Path) -> str:
|
|
113
|
+
"""Convert file path to module name."""
|
|
114
|
+
p = Path(filepath)
|
|
115
|
+
if p.is_absolute():
|
|
116
|
+
try:
|
|
117
|
+
p = p.relative_to(project)
|
|
118
|
+
except ValueError:
|
|
119
|
+
pass
|
|
120
|
+
parts = list(p.parts)
|
|
121
|
+
if parts and parts[-1] == "__init__.py":
|
|
122
|
+
parts = parts[:-1]
|
|
123
|
+
elif parts:
|
|
124
|
+
parts[-1] = parts[-1].replace(".py", "")
|
|
125
|
+
return ".".join(parts)
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
"""Selectively regenerate documentation for changed modules."""
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import List, Optional
|
|
5
|
+
|
|
6
|
+
from ..config import Code2DocsConfig
|
|
7
|
+
from .differ import ChangeInfo, Differ
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class Updater:
|
|
11
|
+
"""Apply selective documentation updates based on detected changes."""
|
|
12
|
+
|
|
13
|
+
def __init__(self, config: Optional[Code2DocsConfig] = None):
|
|
14
|
+
self.config = config or Code2DocsConfig()
|
|
15
|
+
|
|
16
|
+
def apply(self, project_path: str, changes: List[ChangeInfo]) -> None:
|
|
17
|
+
"""Regenerate documentation for changed modules."""
|
|
18
|
+
from ..analyzers.project_scanner import ProjectScanner
|
|
19
|
+
from ..generators.readme_gen import ReadmeGenerator
|
|
20
|
+
from ..generators.api_reference_gen import ApiReferenceGenerator
|
|
21
|
+
from ..generators.module_docs_gen import ModuleDocsGenerator
|
|
22
|
+
|
|
23
|
+
project = Path(project_path).resolve()
|
|
24
|
+
|
|
25
|
+
# Re-analyze project
|
|
26
|
+
scanner = ProjectScanner(self.config)
|
|
27
|
+
result = scanner.analyze(str(project))
|
|
28
|
+
|
|
29
|
+
changed_modules = {c.module for c in changes}
|
|
30
|
+
|
|
31
|
+
# Always regenerate README (it references all modules)
|
|
32
|
+
readme_gen = ReadmeGenerator(self.config, result)
|
|
33
|
+
readme_content = readme_gen.generate()
|
|
34
|
+
readme_gen.write(str(project / self.config.readme_output), readme_content)
|
|
35
|
+
|
|
36
|
+
# Regenerate API docs for changed modules
|
|
37
|
+
if self.config.docs.api_reference:
|
|
38
|
+
api_gen = ApiReferenceGenerator(self.config, result)
|
|
39
|
+
all_files = api_gen.generate_all()
|
|
40
|
+
|
|
41
|
+
# Filter to changed modules only
|
|
42
|
+
docs_dir = project / self.config.output / "api"
|
|
43
|
+
docs_dir.mkdir(parents=True, exist_ok=True)
|
|
44
|
+
|
|
45
|
+
# Always regenerate index
|
|
46
|
+
index_content = all_files.get("index.md", "")
|
|
47
|
+
if index_content:
|
|
48
|
+
(docs_dir / "index.md").write_text(index_content, encoding="utf-8")
|
|
49
|
+
|
|
50
|
+
for filename, content in all_files.items():
|
|
51
|
+
if filename == "index.md":
|
|
52
|
+
continue
|
|
53
|
+
# Check if this file corresponds to a changed module
|
|
54
|
+
for mod in changed_modules:
|
|
55
|
+
safe_mod = mod.replace(".", "_").replace("/", "_")
|
|
56
|
+
if safe_mod in filename:
|
|
57
|
+
(docs_dir / filename).write_text(content, encoding="utf-8")
|
|
58
|
+
break
|
|
59
|
+
|
|
60
|
+
# Regenerate module docs for changed modules
|
|
61
|
+
if self.config.docs.module_docs:
|
|
62
|
+
mod_gen = ModuleDocsGenerator(self.config, result)
|
|
63
|
+
all_files = mod_gen.generate_all()
|
|
64
|
+
|
|
65
|
+
docs_dir = project / self.config.output / "modules"
|
|
66
|
+
docs_dir.mkdir(parents=True, exist_ok=True)
|
|
67
|
+
|
|
68
|
+
for filename, content in all_files.items():
|
|
69
|
+
for mod in changed_modules:
|
|
70
|
+
safe_mod = mod.replace(".", "_").replace("/", "_")
|
|
71
|
+
if safe_mod in filename:
|
|
72
|
+
(docs_dir / filename).write_text(content, encoding="utf-8")
|
|
73
|
+
break
|
|
74
|
+
|
|
75
|
+
# Save new state
|
|
76
|
+
differ = Differ(self.config)
|
|
77
|
+
differ.save_state(str(project))
|