elspais 0.9.3__py3-none-any.whl → 0.11.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- elspais/cli.py +99 -1
- elspais/commands/hash_cmd.py +72 -26
- elspais/commands/reformat_cmd.py +458 -0
- elspais/commands/trace.py +157 -3
- elspais/commands/validate.py +44 -16
- elspais/core/models.py +2 -0
- elspais/core/parser.py +68 -24
- elspais/reformat/__init__.py +50 -0
- elspais/reformat/detector.py +119 -0
- elspais/reformat/hierarchy.py +246 -0
- elspais/reformat/line_breaks.py +220 -0
- elspais/reformat/prompts.py +123 -0
- elspais/reformat/transformer.py +264 -0
- elspais/sponsors/__init__.py +432 -0
- elspais/trace_view/__init__.py +54 -0
- elspais/trace_view/coverage.py +183 -0
- elspais/trace_view/generators/__init__.py +12 -0
- elspais/trace_view/generators/base.py +329 -0
- elspais/trace_view/generators/csv.py +122 -0
- elspais/trace_view/generators/markdown.py +175 -0
- elspais/trace_view/html/__init__.py +31 -0
- elspais/trace_view/html/generator.py +1006 -0
- elspais/trace_view/html/templates/base.html +283 -0
- elspais/trace_view/html/templates/components/code_viewer_modal.html +14 -0
- elspais/trace_view/html/templates/components/file_picker_modal.html +20 -0
- elspais/trace_view/html/templates/components/legend_modal.html +69 -0
- elspais/trace_view/html/templates/components/review_panel.html +118 -0
- elspais/trace_view/html/templates/partials/review/help/help-panel.json +244 -0
- elspais/trace_view/html/templates/partials/review/help/onboarding.json +77 -0
- elspais/trace_view/html/templates/partials/review/help/tooltips.json +237 -0
- elspais/trace_view/html/templates/partials/review/review-comments.js +928 -0
- elspais/trace_view/html/templates/partials/review/review-data.js +961 -0
- elspais/trace_view/html/templates/partials/review/review-help.js +679 -0
- elspais/trace_view/html/templates/partials/review/review-init.js +177 -0
- elspais/trace_view/html/templates/partials/review/review-line-numbers.js +429 -0
- elspais/trace_view/html/templates/partials/review/review-packages.js +1029 -0
- elspais/trace_view/html/templates/partials/review/review-position.js +540 -0
- elspais/trace_view/html/templates/partials/review/review-resize.js +115 -0
- elspais/trace_view/html/templates/partials/review/review-status.js +659 -0
- elspais/trace_view/html/templates/partials/review/review-sync.js +992 -0
- elspais/trace_view/html/templates/partials/review-styles.css +2238 -0
- elspais/trace_view/html/templates/partials/scripts.js +1741 -0
- elspais/trace_view/html/templates/partials/styles.css +1756 -0
- elspais/trace_view/models.py +353 -0
- elspais/trace_view/review/__init__.py +60 -0
- elspais/trace_view/review/branches.py +1149 -0
- elspais/trace_view/review/models.py +1205 -0
- elspais/trace_view/review/position.py +609 -0
- elspais/trace_view/review/server.py +1056 -0
- elspais/trace_view/review/status.py +470 -0
- elspais/trace_view/review/storage.py +1367 -0
- elspais/trace_view/scanning.py +213 -0
- elspais/trace_view/specs/README.md +84 -0
- elspais/trace_view/specs/tv-d00001-template-architecture.md +36 -0
- elspais/trace_view/specs/tv-d00002-css-extraction.md +37 -0
- elspais/trace_view/specs/tv-d00003-js-extraction.md +43 -0
- elspais/trace_view/specs/tv-d00004-build-embedding.md +40 -0
- elspais/trace_view/specs/tv-d00005-test-format.md +78 -0
- elspais/trace_view/specs/tv-d00010-review-data-models.md +33 -0
- elspais/trace_view/specs/tv-d00011-review-storage.md +33 -0
- elspais/trace_view/specs/tv-d00012-position-resolution.md +33 -0
- elspais/trace_view/specs/tv-d00013-git-branches.md +31 -0
- elspais/trace_view/specs/tv-d00014-review-api-server.md +31 -0
- elspais/trace_view/specs/tv-d00015-status-modifier.md +27 -0
- elspais/trace_view/specs/tv-d00016-js-integration.md +33 -0
- elspais/trace_view/specs/tv-p00001-html-generator.md +33 -0
- elspais/trace_view/specs/tv-p00002-review-system.md +29 -0
- {elspais-0.9.3.dist-info → elspais-0.11.0.dist-info}/METADATA +33 -18
- elspais-0.11.0.dist-info/RECORD +101 -0
- elspais-0.9.3.dist-info/RECORD +0 -40
- {elspais-0.9.3.dist-info → elspais-0.11.0.dist-info}/WHEEL +0 -0
- {elspais-0.9.3.dist-info → elspais-0.11.0.dist-info}/entry_points.txt +0 -0
- {elspais-0.9.3.dist-info → elspais-0.11.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,329 @@
|
|
|
1
|
+
# Implements: REQ-tv-p00001 (TraceViewGenerator)
|
|
2
|
+
"""
|
|
3
|
+
elspais.trace_view.generators.base - Base generator for trace-view.
|
|
4
|
+
|
|
5
|
+
Provides the main TraceViewGenerator class that orchestrates
|
|
6
|
+
requirement parsing, implementation scanning, and output generation.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Dict, List, Optional
|
|
11
|
+
|
|
12
|
+
from elspais.config.loader import find_config_file, load_config, get_spec_directories
|
|
13
|
+
from elspais.config.defaults import DEFAULT_CONFIG
|
|
14
|
+
from elspais.core.parser import RequirementParser
|
|
15
|
+
from elspais.core.patterns import PatternConfig
|
|
16
|
+
from elspais.core.git import get_git_changes, GitChangeInfo
|
|
17
|
+
|
|
18
|
+
from elspais.trace_view.models import TraceViewRequirement, GitChangeInfo as TVGitChangeInfo
|
|
19
|
+
from elspais.trace_view.scanning import scan_implementation_files
|
|
20
|
+
from elspais.trace_view.coverage import (
|
|
21
|
+
calculate_coverage,
|
|
22
|
+
generate_coverage_report,
|
|
23
|
+
get_implementation_status,
|
|
24
|
+
)
|
|
25
|
+
from elspais.trace_view.generators.csv import generate_csv, generate_planning_csv
|
|
26
|
+
from elspais.trace_view.generators.markdown import generate_markdown
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class TraceViewGenerator:
|
|
30
|
+
"""Generates traceability matrices.
|
|
31
|
+
|
|
32
|
+
This is the main entry point for generating traceability reports.
|
|
33
|
+
Supports multiple output formats: markdown, html, csv.
|
|
34
|
+
|
|
35
|
+
Args:
|
|
36
|
+
spec_dir: Path to the spec directory containing requirement files
|
|
37
|
+
impl_dirs: List of directories to scan for implementation references
|
|
38
|
+
sponsor: Sponsor name for sponsor-specific reports
|
|
39
|
+
mode: Report mode ('core', 'sponsor', 'combined')
|
|
40
|
+
repo_root: Repository root path for relative path calculation
|
|
41
|
+
associated_repos: List of associated repo dicts for multi-repo scanning
|
|
42
|
+
config: Optional pre-loaded configuration dict
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
# Version number - increment with each change
|
|
46
|
+
VERSION = 17
|
|
47
|
+
|
|
48
|
+
def __init__(
|
|
49
|
+
self,
|
|
50
|
+
spec_dir: Optional[Path] = None,
|
|
51
|
+
impl_dirs: Optional[List[Path]] = None,
|
|
52
|
+
sponsor: Optional[str] = None,
|
|
53
|
+
mode: str = "core",
|
|
54
|
+
repo_root: Optional[Path] = None,
|
|
55
|
+
associated_repos: Optional[list] = None,
|
|
56
|
+
config: Optional[dict] = None,
|
|
57
|
+
):
|
|
58
|
+
self.spec_dir = spec_dir
|
|
59
|
+
self.requirements: Dict[str, TraceViewRequirement] = {}
|
|
60
|
+
self.impl_dirs = impl_dirs or []
|
|
61
|
+
self.sponsor = sponsor
|
|
62
|
+
self.mode = mode
|
|
63
|
+
self.repo_root = repo_root or (spec_dir.parent if spec_dir else Path.cwd())
|
|
64
|
+
self.associated_repos = associated_repos or []
|
|
65
|
+
self._base_path = ""
|
|
66
|
+
self._config = config
|
|
67
|
+
self._git_info: Optional[TVGitChangeInfo] = None
|
|
68
|
+
|
|
69
|
+
def generate(
|
|
70
|
+
self,
|
|
71
|
+
format: str = "markdown",
|
|
72
|
+
output_file: Optional[Path] = None,
|
|
73
|
+
embed_content: bool = False,
|
|
74
|
+
edit_mode: bool = False,
|
|
75
|
+
review_mode: bool = False,
|
|
76
|
+
quiet: bool = False,
|
|
77
|
+
) -> str:
|
|
78
|
+
"""Generate traceability matrix in specified format.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
format: Output format ('markdown', 'html', 'csv')
|
|
82
|
+
output_file: Path to write output (default: traceability_matrix.{ext})
|
|
83
|
+
embed_content: If True, embed full requirement content in HTML
|
|
84
|
+
edit_mode: If True, include edit mode UI in HTML output
|
|
85
|
+
review_mode: If True, include review mode UI in HTML output
|
|
86
|
+
quiet: If True, suppress progress messages
|
|
87
|
+
|
|
88
|
+
Returns:
|
|
89
|
+
The generated content as a string
|
|
90
|
+
"""
|
|
91
|
+
# Initialize git state
|
|
92
|
+
self._init_git_state(quiet)
|
|
93
|
+
|
|
94
|
+
# Parse requirements
|
|
95
|
+
if not quiet:
|
|
96
|
+
print(f"Scanning for requirements...")
|
|
97
|
+
self._parse_requirements(quiet)
|
|
98
|
+
|
|
99
|
+
if not self.requirements:
|
|
100
|
+
if not quiet:
|
|
101
|
+
print("Warning: No requirements found")
|
|
102
|
+
return ""
|
|
103
|
+
|
|
104
|
+
if not quiet:
|
|
105
|
+
print(f"Found {len(self.requirements)} requirements")
|
|
106
|
+
|
|
107
|
+
# Pre-detect cycles and mark affected requirements
|
|
108
|
+
self._detect_and_mark_cycles(quiet)
|
|
109
|
+
|
|
110
|
+
# Scan implementation files
|
|
111
|
+
if self.impl_dirs:
|
|
112
|
+
if not quiet:
|
|
113
|
+
print("Scanning implementation files...")
|
|
114
|
+
scan_implementation_files(
|
|
115
|
+
self.requirements,
|
|
116
|
+
self.impl_dirs,
|
|
117
|
+
self.repo_root,
|
|
118
|
+
self.mode,
|
|
119
|
+
self.sponsor,
|
|
120
|
+
quiet=quiet,
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
if not quiet:
|
|
124
|
+
print(f"Generating {format.upper()} traceability matrix...")
|
|
125
|
+
|
|
126
|
+
# Determine output path and extension
|
|
127
|
+
if format == "html":
|
|
128
|
+
ext = ".html"
|
|
129
|
+
elif format == "csv":
|
|
130
|
+
ext = ".csv"
|
|
131
|
+
else:
|
|
132
|
+
ext = ".md"
|
|
133
|
+
|
|
134
|
+
if output_file is None:
|
|
135
|
+
output_file = Path(f"traceability_matrix{ext}")
|
|
136
|
+
|
|
137
|
+
# Calculate relative path for links
|
|
138
|
+
self._calculate_base_path(output_file)
|
|
139
|
+
|
|
140
|
+
# Generate content
|
|
141
|
+
if format == "html":
|
|
142
|
+
from elspais.trace_view.html import HTMLGenerator
|
|
143
|
+
|
|
144
|
+
html_gen = HTMLGenerator(
|
|
145
|
+
requirements=self.requirements,
|
|
146
|
+
base_path=self._base_path,
|
|
147
|
+
mode=self.mode,
|
|
148
|
+
sponsor=self.sponsor,
|
|
149
|
+
version=self.VERSION,
|
|
150
|
+
repo_root=self.repo_root,
|
|
151
|
+
)
|
|
152
|
+
content = html_gen.generate(
|
|
153
|
+
embed_content=embed_content, edit_mode=edit_mode, review_mode=review_mode
|
|
154
|
+
)
|
|
155
|
+
elif format == "csv":
|
|
156
|
+
content = generate_csv(self.requirements)
|
|
157
|
+
else:
|
|
158
|
+
content = generate_markdown(self.requirements, self._base_path)
|
|
159
|
+
|
|
160
|
+
# Write output file
|
|
161
|
+
output_file.write_text(content)
|
|
162
|
+
if not quiet:
|
|
163
|
+
print(f"Traceability matrix written to: {output_file}")
|
|
164
|
+
|
|
165
|
+
return content
|
|
166
|
+
|
|
167
|
+
def _init_git_state(self, quiet: bool = False):
|
|
168
|
+
"""Initialize git state for requirement status detection."""
|
|
169
|
+
try:
|
|
170
|
+
git_changes = get_git_changes(self.repo_root)
|
|
171
|
+
|
|
172
|
+
# Convert to trace_view GitChangeInfo
|
|
173
|
+
self._git_info = TVGitChangeInfo(
|
|
174
|
+
uncommitted_files=git_changes.uncommitted_files,
|
|
175
|
+
untracked_files=git_changes.untracked_files,
|
|
176
|
+
branch_changed_files=git_changes.branch_changed_files,
|
|
177
|
+
committed_req_locations=git_changes.committed_req_locations,
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
# Report uncommitted changes
|
|
181
|
+
if not quiet and git_changes.uncommitted_files:
|
|
182
|
+
spec_uncommitted = [
|
|
183
|
+
f for f in git_changes.uncommitted_files if f.startswith("spec/")
|
|
184
|
+
]
|
|
185
|
+
if spec_uncommitted:
|
|
186
|
+
print(f"Uncommitted spec files: {len(spec_uncommitted)}")
|
|
187
|
+
|
|
188
|
+
# Report branch changes vs main
|
|
189
|
+
if not quiet and git_changes.branch_changed_files:
|
|
190
|
+
spec_branch = [
|
|
191
|
+
f for f in git_changes.branch_changed_files if f.startswith("spec/")
|
|
192
|
+
]
|
|
193
|
+
if spec_branch:
|
|
194
|
+
print(f"Spec files changed vs main: {len(spec_branch)}")
|
|
195
|
+
|
|
196
|
+
except Exception as e:
|
|
197
|
+
# Git state is optional - continue without it
|
|
198
|
+
if not quiet:
|
|
199
|
+
print(f"Warning: Could not get git state: {e}")
|
|
200
|
+
self._git_info = None
|
|
201
|
+
|
|
202
|
+
def _parse_requirements(self, quiet: bool = False):
|
|
203
|
+
"""Parse all requirements using elspais parser directly."""
|
|
204
|
+
# Load config if not provided
|
|
205
|
+
if self._config is None:
|
|
206
|
+
config_path = find_config_file(self.repo_root)
|
|
207
|
+
if config_path and config_path.exists():
|
|
208
|
+
self._config = load_config(config_path)
|
|
209
|
+
else:
|
|
210
|
+
self._config = DEFAULT_CONFIG
|
|
211
|
+
|
|
212
|
+
# Get spec directories
|
|
213
|
+
spec_dirs = get_spec_directories(self.spec_dir, self._config)
|
|
214
|
+
if not spec_dirs:
|
|
215
|
+
return
|
|
216
|
+
|
|
217
|
+
# Parse requirements using elspais parser
|
|
218
|
+
pattern_config = PatternConfig.from_dict(self._config.get("patterns", {}))
|
|
219
|
+
spec_config = self._config.get("spec", {})
|
|
220
|
+
no_reference_values = spec_config.get("no_reference_values")
|
|
221
|
+
skip_files = spec_config.get("skip_files", [])
|
|
222
|
+
|
|
223
|
+
parser = RequirementParser(pattern_config, no_reference_values=no_reference_values)
|
|
224
|
+
parse_result = parser.parse_directories(spec_dirs, skip_files=skip_files)
|
|
225
|
+
|
|
226
|
+
roadmap_count = 0
|
|
227
|
+
conflict_count = 0
|
|
228
|
+
cycle_count = 0
|
|
229
|
+
|
|
230
|
+
for req_id, core_req in parse_result.items():
|
|
231
|
+
# Wrap in TraceViewRequirement
|
|
232
|
+
tv_req = TraceViewRequirement.from_core(core_req, git_info=self._git_info)
|
|
233
|
+
|
|
234
|
+
if tv_req.is_roadmap:
|
|
235
|
+
roadmap_count += 1
|
|
236
|
+
if tv_req.is_conflict:
|
|
237
|
+
conflict_count += 1
|
|
238
|
+
if not quiet:
|
|
239
|
+
print(f" Warning: Conflict: {req_id} conflicts with {tv_req.conflict_with}")
|
|
240
|
+
|
|
241
|
+
# Store by short ID (without REQ- prefix)
|
|
242
|
+
self.requirements[tv_req.id] = tv_req
|
|
243
|
+
|
|
244
|
+
if not quiet:
|
|
245
|
+
if roadmap_count > 0:
|
|
246
|
+
print(f" Found {roadmap_count} roadmap requirements")
|
|
247
|
+
if conflict_count > 0:
|
|
248
|
+
print(f" Found {conflict_count} conflicts")
|
|
249
|
+
if cycle_count > 0:
|
|
250
|
+
print(f" Found {cycle_count} requirements in dependency cycles")
|
|
251
|
+
|
|
252
|
+
def _detect_and_mark_cycles(self, quiet: bool = False):
|
|
253
|
+
"""Detect and mark requirements that are part of dependency cycles."""
|
|
254
|
+
# Simple cycle detection using DFS
|
|
255
|
+
visited = set()
|
|
256
|
+
rec_stack = set()
|
|
257
|
+
cycle_members = set()
|
|
258
|
+
|
|
259
|
+
def dfs(req_id: str, path: List[str]) -> bool:
|
|
260
|
+
if req_id in rec_stack:
|
|
261
|
+
# Found cycle - mark all members in the cycle path
|
|
262
|
+
cycle_start = path.index(req_id)
|
|
263
|
+
for member in path[cycle_start:]:
|
|
264
|
+
cycle_members.add(member)
|
|
265
|
+
return True
|
|
266
|
+
|
|
267
|
+
if req_id in visited:
|
|
268
|
+
return False
|
|
269
|
+
|
|
270
|
+
visited.add(req_id)
|
|
271
|
+
rec_stack.add(req_id)
|
|
272
|
+
|
|
273
|
+
req = self.requirements.get(req_id)
|
|
274
|
+
if req:
|
|
275
|
+
for parent_id in req.implements:
|
|
276
|
+
# Normalize parent_id
|
|
277
|
+
parent_id = parent_id.replace("REQ-", "")
|
|
278
|
+
if parent_id in self.requirements:
|
|
279
|
+
if dfs(parent_id, path + [req_id]):
|
|
280
|
+
cycle_members.add(req_id)
|
|
281
|
+
|
|
282
|
+
rec_stack.remove(req_id)
|
|
283
|
+
return False
|
|
284
|
+
|
|
285
|
+
# Run DFS from each requirement
|
|
286
|
+
for req_id in self.requirements:
|
|
287
|
+
if req_id not in visited:
|
|
288
|
+
dfs(req_id, [])
|
|
289
|
+
|
|
290
|
+
# Clear implements for cycle members so they appear as orphaned
|
|
291
|
+
cycle_count = 0
|
|
292
|
+
for req_id in cycle_members:
|
|
293
|
+
if req_id in self.requirements:
|
|
294
|
+
req = self.requirements[req_id]
|
|
295
|
+
if req.implements:
|
|
296
|
+
# Modify the underlying core requirement
|
|
297
|
+
req.core.implements = []
|
|
298
|
+
cycle_count += 1
|
|
299
|
+
|
|
300
|
+
if not quiet and cycle_count > 0:
|
|
301
|
+
print(f" Warning: {cycle_count} requirements marked as cyclic (shown as orphaned items)")
|
|
302
|
+
|
|
303
|
+
def _calculate_base_path(self, output_file: Path):
|
|
304
|
+
"""Calculate relative path from output file location to repo root."""
|
|
305
|
+
try:
|
|
306
|
+
output_dir = output_file.resolve().parent
|
|
307
|
+
repo_root = self.repo_root.resolve()
|
|
308
|
+
|
|
309
|
+
try:
|
|
310
|
+
rel_path = output_dir.relative_to(repo_root)
|
|
311
|
+
depth = len(rel_path.parts)
|
|
312
|
+
if depth == 0:
|
|
313
|
+
self._base_path = ""
|
|
314
|
+
else:
|
|
315
|
+
self._base_path = "../" * depth
|
|
316
|
+
except ValueError:
|
|
317
|
+
self._base_path = f"file://{repo_root}/"
|
|
318
|
+
except Exception:
|
|
319
|
+
self._base_path = "../"
|
|
320
|
+
|
|
321
|
+
def generate_planning_csv(self) -> str:
|
|
322
|
+
"""Generate planning CSV with actionable requirements."""
|
|
323
|
+
get_status = lambda req_id: get_implementation_status(self.requirements, req_id)
|
|
324
|
+
calc_coverage = lambda req_id: calculate_coverage(self.requirements, req_id)
|
|
325
|
+
return generate_planning_csv(self.requirements, get_status, calc_coverage)
|
|
326
|
+
|
|
327
|
+
def generate_coverage_report(self) -> str:
|
|
328
|
+
"""Generate coverage report showing implementation status."""
|
|
329
|
+
return generate_coverage_report(self.requirements)
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
"""
|
|
2
|
+
elspais.trace_view.generators.csv - CSV generation.
|
|
3
|
+
|
|
4
|
+
Provides functions to generate CSV traceability matrices and planning exports.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import csv
|
|
8
|
+
from io import StringIO
|
|
9
|
+
from typing import Callable, Dict
|
|
10
|
+
|
|
11
|
+
from elspais.trace_view.models import TraceViewRequirement
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def generate_csv(requirements: Dict[str, TraceViewRequirement]) -> str:
|
|
15
|
+
"""Generate CSV traceability matrix.
|
|
16
|
+
|
|
17
|
+
Args:
|
|
18
|
+
requirements: Dict mapping requirement ID to TraceViewRequirement
|
|
19
|
+
|
|
20
|
+
Returns:
|
|
21
|
+
CSV string with columns: Requirement ID, Title, Level, Status,
|
|
22
|
+
Implements, Traced By, File, Line, Implementation Files
|
|
23
|
+
"""
|
|
24
|
+
output = StringIO()
|
|
25
|
+
writer = csv.writer(output)
|
|
26
|
+
|
|
27
|
+
# Header
|
|
28
|
+
writer.writerow(
|
|
29
|
+
[
|
|
30
|
+
"Requirement ID",
|
|
31
|
+
"Title",
|
|
32
|
+
"Level",
|
|
33
|
+
"Status",
|
|
34
|
+
"Implements",
|
|
35
|
+
"Traced By",
|
|
36
|
+
"File",
|
|
37
|
+
"Line",
|
|
38
|
+
"Implementation Files",
|
|
39
|
+
]
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
# Sort requirements by ID
|
|
43
|
+
sorted_reqs = sorted(requirements.values(), key=lambda r: r.id)
|
|
44
|
+
|
|
45
|
+
for req in sorted_reqs:
|
|
46
|
+
# Compute children (traced by) dynamically
|
|
47
|
+
children = [r.id for r in requirements.values() if req.id in r.implements]
|
|
48
|
+
|
|
49
|
+
# Format implementation files as "file:line" strings
|
|
50
|
+
impl_files_str = (
|
|
51
|
+
", ".join([f"{path}:{line}" for path, line in req.implementation_files])
|
|
52
|
+
if req.implementation_files
|
|
53
|
+
else "-"
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
writer.writerow(
|
|
57
|
+
[
|
|
58
|
+
req.id,
|
|
59
|
+
req.title,
|
|
60
|
+
req.level,
|
|
61
|
+
req.status,
|
|
62
|
+
", ".join(req.implements) if req.implements else "-",
|
|
63
|
+
", ".join(sorted(children)) if children else "-",
|
|
64
|
+
req.display_filename,
|
|
65
|
+
req.line_number,
|
|
66
|
+
impl_files_str,
|
|
67
|
+
]
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
return output.getvalue()
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def generate_planning_csv(
|
|
74
|
+
requirements: Dict[str, TraceViewRequirement],
|
|
75
|
+
get_implementation_status: Callable[[str], str],
|
|
76
|
+
calculate_coverage: Callable[[str], dict],
|
|
77
|
+
) -> str:
|
|
78
|
+
"""Generate CSV for sprint planning (actionable items only).
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
requirements: Dict mapping requirement ID to TraceViewRequirement
|
|
82
|
+
get_implementation_status: Function that takes req_id and returns status string
|
|
83
|
+
calculate_coverage: Function that takes req_id and returns coverage dict
|
|
84
|
+
|
|
85
|
+
Returns:
|
|
86
|
+
CSV with columns: REQ ID, Title, Level, Status, Impl Status, Coverage, Code Refs
|
|
87
|
+
Includes only actionable items (Active or Draft status, not deprecated)
|
|
88
|
+
"""
|
|
89
|
+
output = StringIO()
|
|
90
|
+
writer = csv.writer(output)
|
|
91
|
+
|
|
92
|
+
# Header
|
|
93
|
+
writer.writerow(
|
|
94
|
+
["REQ ID", "Title", "Level", "Status", "Impl Status", "Coverage", "Code Refs"]
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
# Filter to actionable requirements (Active or Draft status)
|
|
98
|
+
actionable_reqs = [
|
|
99
|
+
req for req in requirements.values() if req.status in ["Active", "Draft"]
|
|
100
|
+
]
|
|
101
|
+
|
|
102
|
+
# Sort by ID
|
|
103
|
+
actionable_reqs.sort(key=lambda r: r.id)
|
|
104
|
+
|
|
105
|
+
for req in actionable_reqs:
|
|
106
|
+
impl_status = get_implementation_status(req.id)
|
|
107
|
+
coverage = calculate_coverage(req.id)
|
|
108
|
+
code_refs = len(req.implementation_files)
|
|
109
|
+
|
|
110
|
+
writer.writerow(
|
|
111
|
+
[
|
|
112
|
+
req.id,
|
|
113
|
+
req.title,
|
|
114
|
+
req.level,
|
|
115
|
+
req.status,
|
|
116
|
+
impl_status,
|
|
117
|
+
f"{coverage['traced']}/{coverage['children']}",
|
|
118
|
+
code_refs,
|
|
119
|
+
]
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
return output.getvalue()
|
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
"""
|
|
2
|
+
elspais.trace_view.generators.markdown - Markdown generation.
|
|
3
|
+
|
|
4
|
+
Provides functions to generate markdown traceability matrices.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import sys
|
|
8
|
+
from datetime import datetime
|
|
9
|
+
from typing import Dict, List, Optional
|
|
10
|
+
|
|
11
|
+
from elspais.trace_view.models import TraceViewRequirement
|
|
12
|
+
from elspais.trace_view.coverage import count_by_level, find_orphaned_requirements
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def generate_legend_markdown() -> str:
|
|
16
|
+
"""Generate markdown legend section.
|
|
17
|
+
|
|
18
|
+
Returns:
|
|
19
|
+
Markdown string with legend explaining symbols
|
|
20
|
+
"""
|
|
21
|
+
return """## Legend
|
|
22
|
+
|
|
23
|
+
**Requirement Status:**
|
|
24
|
+
- Active requirement
|
|
25
|
+
- Draft requirement
|
|
26
|
+
- Deprecated requirement
|
|
27
|
+
|
|
28
|
+
**Traceability:**
|
|
29
|
+
- Has implementation file(s)
|
|
30
|
+
- No implementation found
|
|
31
|
+
|
|
32
|
+
**Interactive (HTML only):**
|
|
33
|
+
- Expandable (has child requirements)
|
|
34
|
+
- Collapsed (click to expand)
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def generate_markdown(
|
|
39
|
+
requirements: Dict[str, TraceViewRequirement],
|
|
40
|
+
base_path: str = "",
|
|
41
|
+
) -> str:
|
|
42
|
+
"""Generate markdown traceability matrix.
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
requirements: Dict mapping requirement ID to TraceViewRequirement
|
|
46
|
+
base_path: Base path for links (e.g., '../' for files in subdirectory)
|
|
47
|
+
|
|
48
|
+
Returns:
|
|
49
|
+
Complete markdown traceability matrix
|
|
50
|
+
"""
|
|
51
|
+
lines = []
|
|
52
|
+
lines.append("# Requirements Traceability Matrix")
|
|
53
|
+
lines.append(f"\n**Generated**: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
|
|
54
|
+
lines.append(f"**Total Requirements**: {len(requirements)}\n")
|
|
55
|
+
|
|
56
|
+
# Summary by level (using active counts, excluding deprecated)
|
|
57
|
+
by_level = count_by_level(requirements)
|
|
58
|
+
lines.append("## Summary\n")
|
|
59
|
+
lines.append(f"- **PRD Requirements**: {by_level['active']['PRD']}")
|
|
60
|
+
lines.append(f"- **OPS Requirements**: {by_level['active']['OPS']}")
|
|
61
|
+
lines.append(f"- **DEV Requirements**: {by_level['active']['DEV']}\n")
|
|
62
|
+
|
|
63
|
+
# Add legend
|
|
64
|
+
lines.append(generate_legend_markdown())
|
|
65
|
+
|
|
66
|
+
# Full traceability tree
|
|
67
|
+
lines.append("## Traceability Tree\n")
|
|
68
|
+
|
|
69
|
+
# Start with top-level PRD requirements
|
|
70
|
+
prd_reqs = [req for req in requirements.values() if req.level == "PRD"]
|
|
71
|
+
prd_reqs.sort(key=lambda r: r.id)
|
|
72
|
+
|
|
73
|
+
for prd_req in prd_reqs:
|
|
74
|
+
lines.append(
|
|
75
|
+
format_req_tree_md(
|
|
76
|
+
prd_req, requirements, indent=0, ancestor_path=[], base_path=base_path
|
|
77
|
+
)
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
# Orphaned ops/dev requirements
|
|
81
|
+
orphaned = find_orphaned_requirements(requirements)
|
|
82
|
+
if orphaned:
|
|
83
|
+
lines.append("\n## Orphaned Requirements\n")
|
|
84
|
+
lines.append("*(Requirements not linked from any parent)*\n")
|
|
85
|
+
for req in orphaned:
|
|
86
|
+
lines.append(
|
|
87
|
+
f"- **REQ-{req.id}**: {req.title} ({req.level}) - {req.display_filename}"
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
return "\n".join(lines)
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def format_req_tree_md(
|
|
94
|
+
req: TraceViewRequirement,
|
|
95
|
+
requirements: Dict[str, TraceViewRequirement],
|
|
96
|
+
indent: int,
|
|
97
|
+
ancestor_path: Optional[List[str]] = None,
|
|
98
|
+
base_path: str = "",
|
|
99
|
+
) -> str:
|
|
100
|
+
"""Format requirement and its children as markdown tree.
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
req: The requirement to format
|
|
104
|
+
requirements: Dict mapping requirement ID to TraceViewRequirement
|
|
105
|
+
indent: Current indentation level
|
|
106
|
+
ancestor_path: List of requirement IDs in the current traversal path (for cycle detection)
|
|
107
|
+
base_path: Base path for links
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
Formatted markdown string
|
|
111
|
+
"""
|
|
112
|
+
if ancestor_path is None:
|
|
113
|
+
ancestor_path = []
|
|
114
|
+
|
|
115
|
+
# Cycle detection: check if this requirement is already in our traversal path
|
|
116
|
+
if req.id in ancestor_path:
|
|
117
|
+
cycle_path = ancestor_path + [req.id]
|
|
118
|
+
cycle_str = " -> ".join([f"REQ-{rid}" for rid in cycle_path])
|
|
119
|
+
print(f"Warning: CYCLE DETECTED: {cycle_str}", file=sys.stderr)
|
|
120
|
+
return (
|
|
121
|
+
" " * indent
|
|
122
|
+
+ f"- **CYCLE DETECTED**: REQ-{req.id} (path: {cycle_str})"
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
# Safety depth limit
|
|
126
|
+
MAX_DEPTH = 50
|
|
127
|
+
if indent > MAX_DEPTH:
|
|
128
|
+
print(f"Warning: MAX DEPTH ({MAX_DEPTH}) exceeded at REQ-{req.id}", file=sys.stderr)
|
|
129
|
+
return " " * indent + f"- **MAX DEPTH EXCEEDED**: REQ-{req.id}"
|
|
130
|
+
|
|
131
|
+
lines = []
|
|
132
|
+
prefix = " " * indent
|
|
133
|
+
|
|
134
|
+
# Format current requirement
|
|
135
|
+
status_indicator = {
|
|
136
|
+
"Active": "[Active]",
|
|
137
|
+
"Draft": "[Draft]",
|
|
138
|
+
"Deprecated": "[Deprecated]",
|
|
139
|
+
}
|
|
140
|
+
indicator = status_indicator.get(req.status, "[?]")
|
|
141
|
+
|
|
142
|
+
# Create link to source file with REQ anchor
|
|
143
|
+
if req.external_spec_path:
|
|
144
|
+
req_link = f"[REQ-{req.id}](file://{req.external_spec_path}#REQ-{req.id})"
|
|
145
|
+
else:
|
|
146
|
+
spec_subpath = "spec/roadmap" if req.is_roadmap else "spec"
|
|
147
|
+
req_link = f"[REQ-{req.id}]({base_path}{spec_subpath}/{req.file_path.name}#REQ-{req.id})"
|
|
148
|
+
|
|
149
|
+
lines.append(
|
|
150
|
+
f"{prefix}- {indicator} **{req_link}**: {req.title}\n"
|
|
151
|
+
f"{prefix} - Level: {req.level} | Status: {req.status}\n"
|
|
152
|
+
f"{prefix} - File: {req.display_filename}:{req.line_number}"
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
# Format implementation files as nested list with clickable links
|
|
156
|
+
if req.implementation_files:
|
|
157
|
+
lines.append(f"{prefix} - **Implemented in**:")
|
|
158
|
+
for file_path, line_num in req.implementation_files:
|
|
159
|
+
# Create markdown link to file with line number anchor
|
|
160
|
+
link = f"[{file_path}:{line_num}]({base_path}{file_path}#L{line_num})"
|
|
161
|
+
lines.append(f"{prefix} - {link}")
|
|
162
|
+
|
|
163
|
+
# Find and format children
|
|
164
|
+
children = [r for r in requirements.values() if req.id in r.implements]
|
|
165
|
+
children.sort(key=lambda r: r.id)
|
|
166
|
+
|
|
167
|
+
if children:
|
|
168
|
+
# Add current req to path before recursing into children
|
|
169
|
+
current_path = ancestor_path + [req.id]
|
|
170
|
+
for child in children:
|
|
171
|
+
lines.append(
|
|
172
|
+
format_req_tree_md(child, requirements, indent + 1, current_path, base_path)
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
return "\n".join(lines)
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
# Implements: REQ-int-d00002-B (HTML generation requires jinja2)
|
|
2
|
+
"""
|
|
3
|
+
elspais.trace_view.html - Interactive HTML generation.
|
|
4
|
+
|
|
5
|
+
Requires: pip install elspais[trace-view]
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
def _check_jinja2():
|
|
9
|
+
try:
|
|
10
|
+
import jinja2 # noqa: F401
|
|
11
|
+
return True
|
|
12
|
+
except ImportError:
|
|
13
|
+
return False
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
JINJA2_AVAILABLE = _check_jinja2()
|
|
17
|
+
|
|
18
|
+
if JINJA2_AVAILABLE:
|
|
19
|
+
from elspais.trace_view.html.generator import HTMLGenerator
|
|
20
|
+
__all__ = ["HTMLGenerator", "JINJA2_AVAILABLE"]
|
|
21
|
+
else:
|
|
22
|
+
__all__ = ["JINJA2_AVAILABLE"]
|
|
23
|
+
|
|
24
|
+
class HTMLGenerator:
|
|
25
|
+
"""Placeholder when jinja2 is not installed."""
|
|
26
|
+
|
|
27
|
+
def __init__(self, *args, **kwargs):
|
|
28
|
+
raise ImportError(
|
|
29
|
+
"HTMLGenerator requires Jinja2. "
|
|
30
|
+
"Install with: pip install elspais[trace-view]"
|
|
31
|
+
)
|