elspais 0.9.3__py3-none-any.whl → 0.11.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. elspais/cli.py +99 -1
  2. elspais/commands/hash_cmd.py +72 -26
  3. elspais/commands/reformat_cmd.py +458 -0
  4. elspais/commands/trace.py +157 -3
  5. elspais/commands/validate.py +44 -16
  6. elspais/core/models.py +2 -0
  7. elspais/core/parser.py +68 -24
  8. elspais/reformat/__init__.py +50 -0
  9. elspais/reformat/detector.py +119 -0
  10. elspais/reformat/hierarchy.py +246 -0
  11. elspais/reformat/line_breaks.py +220 -0
  12. elspais/reformat/prompts.py +123 -0
  13. elspais/reformat/transformer.py +264 -0
  14. elspais/sponsors/__init__.py +432 -0
  15. elspais/trace_view/__init__.py +54 -0
  16. elspais/trace_view/coverage.py +183 -0
  17. elspais/trace_view/generators/__init__.py +12 -0
  18. elspais/trace_view/generators/base.py +329 -0
  19. elspais/trace_view/generators/csv.py +122 -0
  20. elspais/trace_view/generators/markdown.py +175 -0
  21. elspais/trace_view/html/__init__.py +31 -0
  22. elspais/trace_view/html/generator.py +1006 -0
  23. elspais/trace_view/html/templates/base.html +283 -0
  24. elspais/trace_view/html/templates/components/code_viewer_modal.html +14 -0
  25. elspais/trace_view/html/templates/components/file_picker_modal.html +20 -0
  26. elspais/trace_view/html/templates/components/legend_modal.html +69 -0
  27. elspais/trace_view/html/templates/components/review_panel.html +118 -0
  28. elspais/trace_view/html/templates/partials/review/help/help-panel.json +244 -0
  29. elspais/trace_view/html/templates/partials/review/help/onboarding.json +77 -0
  30. elspais/trace_view/html/templates/partials/review/help/tooltips.json +237 -0
  31. elspais/trace_view/html/templates/partials/review/review-comments.js +928 -0
  32. elspais/trace_view/html/templates/partials/review/review-data.js +961 -0
  33. elspais/trace_view/html/templates/partials/review/review-help.js +679 -0
  34. elspais/trace_view/html/templates/partials/review/review-init.js +177 -0
  35. elspais/trace_view/html/templates/partials/review/review-line-numbers.js +429 -0
  36. elspais/trace_view/html/templates/partials/review/review-packages.js +1029 -0
  37. elspais/trace_view/html/templates/partials/review/review-position.js +540 -0
  38. elspais/trace_view/html/templates/partials/review/review-resize.js +115 -0
  39. elspais/trace_view/html/templates/partials/review/review-status.js +659 -0
  40. elspais/trace_view/html/templates/partials/review/review-sync.js +992 -0
  41. elspais/trace_view/html/templates/partials/review-styles.css +2238 -0
  42. elspais/trace_view/html/templates/partials/scripts.js +1741 -0
  43. elspais/trace_view/html/templates/partials/styles.css +1756 -0
  44. elspais/trace_view/models.py +353 -0
  45. elspais/trace_view/review/__init__.py +60 -0
  46. elspais/trace_view/review/branches.py +1149 -0
  47. elspais/trace_view/review/models.py +1205 -0
  48. elspais/trace_view/review/position.py +609 -0
  49. elspais/trace_view/review/server.py +1056 -0
  50. elspais/trace_view/review/status.py +470 -0
  51. elspais/trace_view/review/storage.py +1367 -0
  52. elspais/trace_view/scanning.py +213 -0
  53. elspais/trace_view/specs/README.md +84 -0
  54. elspais/trace_view/specs/tv-d00001-template-architecture.md +36 -0
  55. elspais/trace_view/specs/tv-d00002-css-extraction.md +37 -0
  56. elspais/trace_view/specs/tv-d00003-js-extraction.md +43 -0
  57. elspais/trace_view/specs/tv-d00004-build-embedding.md +40 -0
  58. elspais/trace_view/specs/tv-d00005-test-format.md +78 -0
  59. elspais/trace_view/specs/tv-d00010-review-data-models.md +33 -0
  60. elspais/trace_view/specs/tv-d00011-review-storage.md +33 -0
  61. elspais/trace_view/specs/tv-d00012-position-resolution.md +33 -0
  62. elspais/trace_view/specs/tv-d00013-git-branches.md +31 -0
  63. elspais/trace_view/specs/tv-d00014-review-api-server.md +31 -0
  64. elspais/trace_view/specs/tv-d00015-status-modifier.md +27 -0
  65. elspais/trace_view/specs/tv-d00016-js-integration.md +33 -0
  66. elspais/trace_view/specs/tv-p00001-html-generator.md +33 -0
  67. elspais/trace_view/specs/tv-p00002-review-system.md +29 -0
  68. {elspais-0.9.3.dist-info → elspais-0.11.0.dist-info}/METADATA +33 -18
  69. elspais-0.11.0.dist-info/RECORD +101 -0
  70. elspais-0.9.3.dist-info/RECORD +0 -40
  71. {elspais-0.9.3.dist-info → elspais-0.11.0.dist-info}/WHEEL +0 -0
  72. {elspais-0.9.3.dist-info → elspais-0.11.0.dist-info}/entry_points.txt +0 -0
  73. {elspais-0.9.3.dist-info → elspais-0.11.0.dist-info}/licenses/LICENSE +0 -0
@@ -17,6 +17,7 @@ from elspais.core.models import ParseWarning, Requirement
17
17
  from elspais.core.parser import RequirementParser
18
18
  from elspais.core.patterns import PatternConfig
19
19
  from elspais.core.rules import RuleEngine, RulesConfig, RuleViolation, Severity
20
+ from elspais.sponsors import get_sponsor_spec_directories
20
21
  from elspais.testing.config import TestingConfig
21
22
 
22
23
 
@@ -41,6 +42,19 @@ def run(args: argparse.Namespace) -> int:
41
42
  print("Error: No spec directories found", file=sys.stderr)
42
43
  return 1
43
44
 
45
+ # Add sponsor spec directories if mode is "combined" and include_associated is enabled
46
+ mode = getattr(args, 'mode', 'combined')
47
+ include_associated = config.get('traceability', {}).get('include_associated', True)
48
+
49
+ if mode == 'combined' and include_associated:
50
+ base_path = find_project_root(spec_dirs)
51
+ sponsor_dirs = get_sponsor_spec_directories(config, base_path)
52
+ if sponsor_dirs:
53
+ spec_dirs = list(spec_dirs) + sponsor_dirs
54
+ if not args.quiet:
55
+ for sponsor_dir in sponsor_dirs:
56
+ print(f"Including sponsor specs: {sponsor_dir}")
57
+
44
58
  if not args.quiet:
45
59
  if len(spec_dirs) == 1:
46
60
  print(f"Validating requirements in: {spec_dirs[0]}")
@@ -245,8 +259,9 @@ def validate_links(
245
259
 
246
260
  # Load core requirements if this is an associated repo
247
261
  core_requirements = {}
248
- if args.core_repo:
249
- core_requirements = load_core_requirements(args.core_repo, config)
262
+ core_path = args.core_repo or config.get("core", {}).get("path")
263
+ if core_path:
264
+ core_requirements = load_requirements_from_repo(Path(core_path), config)
250
265
 
251
266
  all_requirements = {**core_requirements, **requirements}
252
267
  all_ids = set(all_requirements.keys())
@@ -311,24 +326,32 @@ def convert_parse_warnings_to_violations(
311
326
  return violations
312
327
 
313
328
 
314
- def load_core_requirements(core_path: Path, config: Dict) -> Dict[str, Requirement]:
315
- """Load requirements from core repository."""
316
- if not core_path.exists():
329
+ def load_requirements_from_repo(repo_path: Path, config: Dict) -> Dict[str, Requirement]:
330
+ """Load requirements from any repository path.
331
+
332
+ Args:
333
+ repo_path: Path to the repository root
334
+ config: Configuration dict (used as fallback if repo has no config)
335
+
336
+ Returns:
337
+ Dict mapping requirement ID to Requirement object
338
+ """
339
+ if not repo_path.exists():
317
340
  return {}
318
341
 
319
- # Find core config
320
- core_config_path = core_path / ".elspais.toml"
321
- if core_config_path.exists():
322
- core_config = load_config(core_config_path)
342
+ # Find repo config
343
+ repo_config_path = repo_path / ".elspais.toml"
344
+ if repo_config_path.exists():
345
+ repo_config = load_config(repo_config_path)
323
346
  else:
324
- core_config = config # Use same config
347
+ repo_config = config # Use same config
325
348
 
326
- spec_dir = core_path / core_config.get("directories", {}).get("spec", "spec")
349
+ spec_dir = repo_path / repo_config.get("directories", {}).get("spec", "spec")
327
350
  if not spec_dir.exists():
328
351
  return {}
329
352
 
330
- pattern_config = PatternConfig.from_dict(core_config.get("patterns", {}))
331
- spec_config = core_config.get("spec", {})
353
+ pattern_config = PatternConfig.from_dict(repo_config.get("patterns", {}))
354
+ spec_config = repo_config.get("spec", {})
332
355
  no_reference_values = spec_config.get("no_reference_values")
333
356
  parser = RequirementParser(pattern_config, no_reference_values=no_reference_values)
334
357
  skip_files = spec_config.get("skip_files", [])
@@ -368,21 +391,26 @@ def format_requirements_json(
368
391
 
369
392
  # Check for specific violation types
370
393
  is_cycle = any("cycle" in v.rule_name.lower() for v in req_violations)
371
- is_conflict = any(
394
+
395
+ # Use the model's is_conflict flag directly, or check violations for older behavior
396
+ is_conflict = req.is_conflict or any(
372
397
  "conflict" in v.rule_name.lower() or "duplicate" in v.rule_name.lower()
373
398
  for v in req_violations
374
399
  )
375
- conflict_with = None
400
+ conflict_with = req.conflict_with if req.conflict_with else None
376
401
  cycle_path = None
377
402
 
403
+ # Also check violations for additional context
378
404
  for v in req_violations:
379
- if "duplicate" in v.rule_name.lower():
405
+ if "duplicate" in v.rule_name.lower() and not conflict_with:
380
406
  # Try to extract conflicting ID from message
381
407
  conflict_with = v.message
382
408
  if "cycle" in v.rule_name.lower():
383
409
  cycle_path = v.message
384
410
 
385
411
  # Build requirement data matching hht_diary format
412
+ # Use the original ID (strip __conflict suffix) for output key
413
+ output_key = req_id.replace("__conflict", "") if req.is_conflict else req_id
386
414
  output[req_id] = {
387
415
  "title": req.title,
388
416
  "status": req.status,
elspais/core/models.py CHANGED
@@ -118,6 +118,8 @@ class Requirement:
118
118
  line_number: Optional[int] = None
119
119
  tags: List[str] = field(default_factory=list)
120
120
  subdir: str = "" # Subdirectory within spec/, e.g., "roadmap", "archive", ""
121
+ is_conflict: bool = False # True if this is a conflicting duplicate entry
122
+ conflict_with: str = "" # ID of the original requirement this conflicts with
121
123
 
122
124
  @property
123
125
  def type_code(self) -> str:
elspais/core/parser.py CHANGED
@@ -155,12 +155,12 @@ class RequirementParser:
155
155
  if req:
156
156
  # Check for duplicate ID
157
157
  if req_id in requirements:
158
- warnings.append(ParseWarning(
159
- requirement_id=req_id,
160
- message=f"Duplicate ID ignored (first occurrence at line {requirements[req_id].line_number})",
161
- file_path=file_path,
162
- line_number=start_line,
163
- ))
158
+ # Keep both: original stays, duplicate gets __conflict suffix
159
+ conflict_key, conflict_req, warning = self._make_conflict_entry(
160
+ req, req_id, requirements[req_id], file_path, start_line
161
+ )
162
+ requirements[conflict_key] = conflict_req
163
+ warnings.append(warning)
164
164
  else:
165
165
  requirements[req_id] = req
166
166
  else:
@@ -221,12 +221,12 @@ class RequirementParser:
221
221
  # Merge requirements, checking for cross-file duplicates
222
222
  for req_id, req in result.requirements.items():
223
223
  if req_id in requirements:
224
- warnings.append(ParseWarning(
225
- requirement_id=req_id,
226
- message=f"Duplicate ID ignored (first occurrence in {requirements[req_id].file_path})",
227
- file_path=file_path,
228
- line_number=req.line_number,
229
- ))
224
+ # Keep both: original stays, duplicate gets __conflict suffix
225
+ conflict_key, conflict_req, warning = self._make_conflict_entry(
226
+ req, req_id, requirements[req_id], file_path, req.line_number
227
+ )
228
+ requirements[conflict_key] = conflict_req
229
+ warnings.append(warning)
230
230
  else:
231
231
  requirements[req_id] = req
232
232
  warnings.extend(result.warnings)
@@ -278,12 +278,12 @@ class RequirementParser:
278
278
  # Merge requirements, checking for cross-directory duplicates
279
279
  for req_id, req in result.requirements.items():
280
280
  if req_id in requirements:
281
- warnings.append(ParseWarning(
282
- requirement_id=req_id,
283
- message=f"Duplicate ID ignored (first occurrence in {requirements[req_id].file_path})",
284
- file_path=req.file_path,
285
- line_number=req.line_number,
286
- ))
281
+ # Keep both: original stays, duplicate gets __conflict suffix
282
+ conflict_key, conflict_req, warning = self._make_conflict_entry(
283
+ req, req_id, requirements[req_id], req.file_path, req.line_number
284
+ )
285
+ requirements[conflict_key] = conflict_req
286
+ warnings.append(warning)
287
287
  else:
288
288
  requirements[req_id] = req
289
289
  warnings.extend(result.warnings)
@@ -336,18 +336,62 @@ class RequirementParser:
336
336
  # Merge requirements, checking for cross-subdir duplicates
337
337
  for req_id, req in subdir_result.requirements.items():
338
338
  if req_id in requirements:
339
- warnings.append(ParseWarning(
340
- requirement_id=req_id,
341
- message=f"Duplicate ID ignored (first occurrence in {requirements[req_id].file_path})",
342
- file_path=req.file_path,
343
- line_number=req.line_number,
344
- ))
339
+ # Keep both: original stays, duplicate gets __conflict suffix
340
+ conflict_key, conflict_req, warning = self._make_conflict_entry(
341
+ req, req_id, requirements[req_id], req.file_path, req.line_number
342
+ )
343
+ requirements[conflict_key] = conflict_req
344
+ warnings.append(warning)
345
345
  else:
346
346
  requirements[req_id] = req
347
347
  warnings.extend(subdir_result.warnings)
348
348
 
349
349
  return ParseResult(requirements=requirements, warnings=warnings)
350
350
 
351
+ def _make_conflict_entry(
352
+ self,
353
+ duplicate_req: Requirement,
354
+ original_id: str,
355
+ original_req: Requirement,
356
+ file_path: Optional[Path],
357
+ line_number: Optional[int],
358
+ ) -> tuple:
359
+ """
360
+ Create a conflict entry for a duplicate requirement.
361
+
362
+ When a requirement ID already exists, this creates a modified version
363
+ of the duplicate with:
364
+ - Key suffix `__conflict` for storage
365
+ - `is_conflict=True` flag
366
+ - `conflict_with` set to the original ID
367
+ - `implements=[]` (treated as orphaned)
368
+
369
+ Args:
370
+ duplicate_req: The duplicate requirement that was found
371
+ original_id: The ID that is duplicated
372
+ original_req: The original requirement that was first
373
+ file_path: File path for the warning
374
+ line_number: Line number for the warning
375
+
376
+ Returns:
377
+ Tuple of (conflict_key, modified_requirement, ParseWarning)
378
+ """
379
+ conflict_key = f"{original_id}__conflict"
380
+
381
+ # Modify the duplicate requirement
382
+ duplicate_req.is_conflict = True
383
+ duplicate_req.conflict_with = original_id
384
+ duplicate_req.implements = [] # Treat as orphaned
385
+
386
+ warning = ParseWarning(
387
+ requirement_id=original_id,
388
+ message=f"Duplicate ID found (first occurrence in {original_req.file_path}:{original_req.line_number})",
389
+ file_path=file_path,
390
+ line_number=line_number,
391
+ )
392
+
393
+ return conflict_key, duplicate_req, warning
394
+
351
395
  def _parse_requirement_block(
352
396
  self,
353
397
  req_id: str,
@@ -0,0 +1,50 @@
1
+ # Implements: REQ-int-d00008 (Reformat Command)
2
+ """
3
+ elspais.reformat - Requirement format transformation.
4
+
5
+ Transforms legacy Acceptance Criteria format to Assertions format.
6
+ Also provides line break normalization.
7
+
8
+ IMPLEMENTS REQUIREMENTS:
9
+ REQ-int-d00008: Reformat Command
10
+ """
11
+
12
+ from elspais.reformat.detector import detect_format, needs_reformatting, FormatAnalysis
13
+ from elspais.reformat.transformer import (
14
+ reformat_requirement,
15
+ assemble_new_format,
16
+ validate_reformatted_content,
17
+ )
18
+ from elspais.reformat.line_breaks import (
19
+ normalize_line_breaks,
20
+ fix_requirement_line_breaks,
21
+ detect_line_break_issues,
22
+ )
23
+ from elspais.reformat.hierarchy import (
24
+ RequirementNode,
25
+ get_all_requirements,
26
+ build_hierarchy,
27
+ traverse_top_down,
28
+ normalize_req_id,
29
+ )
30
+
31
+ __all__ = [
32
+ # Detection
33
+ "detect_format",
34
+ "needs_reformatting",
35
+ "FormatAnalysis",
36
+ # Transformation
37
+ "reformat_requirement",
38
+ "assemble_new_format",
39
+ "validate_reformatted_content",
40
+ # Line breaks
41
+ "normalize_line_breaks",
42
+ "fix_requirement_line_breaks",
43
+ "detect_line_break_issues",
44
+ # Hierarchy
45
+ "RequirementNode",
46
+ "get_all_requirements",
47
+ "build_hierarchy",
48
+ "traverse_top_down",
49
+ "normalize_req_id",
50
+ ]
@@ -0,0 +1,119 @@
1
+ # Implements: REQ-int-d00008 (Reformat Command)
2
+ """
3
+ Format detection for requirements.
4
+
5
+ Detects whether a requirement is in old format (needs reformatting)
6
+ or new format (already reformatted).
7
+ """
8
+
9
+ import re
10
+ from dataclasses import dataclass
11
+
12
+
13
+ @dataclass
14
+ class FormatAnalysis:
15
+ """Result of format detection analysis."""
16
+ is_new_format: bool
17
+ has_assertions_section: bool
18
+ has_labeled_assertions: bool
19
+ has_acceptance_criteria: bool
20
+ uses_shall_language: bool
21
+ assertion_count: int
22
+ confidence: float # 0.0 to 1.0
23
+
24
+
25
+ def detect_format(body: str, rationale: str = "") -> FormatAnalysis:
26
+ """
27
+ Detect whether a requirement is in old or new format.
28
+
29
+ New format indicators:
30
+ - Has '## Assertions' section with labeled assertions (A., B., C.)
31
+ - Does NOT have '**Acceptance Criteria**:' section
32
+ - Uses prescriptive SHALL language in assertions
33
+
34
+ Old format indicators:
35
+ - Has '**Acceptance Criteria**:' or 'Acceptance Criteria:' section
36
+ - Uses descriptive language (does, has, provides) without labeled assertions
37
+ - May have bullet points without letter labels
38
+
39
+ Args:
40
+ body: The requirement body text
41
+ rationale: Optional rationale text
42
+
43
+ Returns:
44
+ FormatAnalysis with detection results
45
+ """
46
+ full_text = f"{body}\n{rationale}".strip()
47
+
48
+ # Check for ## Assertions section
49
+ has_assertions_section = bool(
50
+ re.search(r'^##\s+Assertions\s*$', full_text, re.MULTILINE)
51
+ )
52
+
53
+ # Check for labeled assertions (A., B., C., etc. followed by SHALL somewhere in the line)
54
+ labeled_assertions = re.findall(
55
+ r'^[A-Z]\.\s+.*\bSHALL\b',
56
+ full_text,
57
+ re.MULTILINE | re.IGNORECASE
58
+ )
59
+ has_labeled_assertions = len(labeled_assertions) >= 1
60
+ assertion_count = len(labeled_assertions)
61
+
62
+ # Check for Acceptance Criteria section
63
+ has_acceptance_criteria = bool(re.search(
64
+ r'\*?\*?Acceptance\s+Criteria\*?\*?\s*:',
65
+ full_text,
66
+ re.IGNORECASE
67
+ ))
68
+
69
+ # Check for SHALL language usage anywhere
70
+ shall_count = len(re.findall(r'\bSHALL\b', full_text, re.IGNORECASE))
71
+ uses_shall_language = shall_count >= 1
72
+
73
+ # Determine if new format
74
+ # New format: has Assertions section with labeled assertions, no Acceptance Criteria
75
+ is_new_format = (
76
+ has_assertions_section and
77
+ has_labeled_assertions and
78
+ not has_acceptance_criteria
79
+ )
80
+
81
+ # Calculate confidence score
82
+ confidence = 0.0
83
+ if has_assertions_section:
84
+ confidence += 0.35
85
+ if has_labeled_assertions:
86
+ confidence += 0.35
87
+ if not has_acceptance_criteria:
88
+ confidence += 0.20
89
+ if uses_shall_language:
90
+ confidence += 0.10
91
+
92
+ # Invert confidence if old format
93
+ if not is_new_format:
94
+ confidence = 1.0 - confidence
95
+
96
+ return FormatAnalysis(
97
+ is_new_format=is_new_format,
98
+ has_assertions_section=has_assertions_section,
99
+ has_labeled_assertions=has_labeled_assertions,
100
+ has_acceptance_criteria=has_acceptance_criteria,
101
+ uses_shall_language=uses_shall_language,
102
+ assertion_count=assertion_count,
103
+ confidence=confidence
104
+ )
105
+
106
+
107
+ def needs_reformatting(body: str, rationale: str = "") -> bool:
108
+ """
109
+ Simple check if a requirement needs reformatting.
110
+
111
+ Args:
112
+ body: The requirement body text
113
+ rationale: Optional rationale text
114
+
115
+ Returns:
116
+ True if the requirement needs reformatting (is in old format)
117
+ """
118
+ analysis = detect_format(body, rationale)
119
+ return not analysis.is_new_format
@@ -0,0 +1,246 @@
1
+ # Implements: REQ-int-d00008 (Reformat Command)
2
+ """
3
+ Hierarchy traversal logic for requirements.
4
+
5
+ Uses elspais core modules directly to parse requirements and build
6
+ a traversable hierarchy based on implements relationships.
7
+ """
8
+
9
+ import sys
10
+ from dataclasses import dataclass, field
11
+ from pathlib import Path
12
+ from typing import Callable, Dict, List, Optional, TYPE_CHECKING
13
+
14
+ if TYPE_CHECKING:
15
+ from elspais.core.models import Requirement
16
+ from elspais.core.patterns import PatternValidator
17
+
18
+
19
+ @dataclass
20
+ class RequirementNode:
21
+ """Represents a requirement with its metadata and hierarchy info."""
22
+ req_id: str
23
+ title: str
24
+ body: str
25
+ rationale: str
26
+ file_path: str
27
+ line: int
28
+ implements: List[str] # Parent REQ IDs
29
+ hash: str
30
+ status: str
31
+ level: str
32
+ children: List[str] = field(default_factory=list) # Child REQ IDs
33
+
34
+ @classmethod
35
+ def from_core(cls, req: "Requirement") -> "RequirementNode":
36
+ """
37
+ Create a RequirementNode from a core Requirement object.
38
+
39
+ Args:
40
+ req: Core Requirement object from elspais.core.models
41
+
42
+ Returns:
43
+ RequirementNode with mapped fields
44
+ """
45
+ return cls(
46
+ req_id=req.id,
47
+ title=req.title,
48
+ body=req.body,
49
+ rationale=req.rationale or "",
50
+ file_path=str(req.file_path) if req.file_path else "",
51
+ line=req.line_number or 0,
52
+ implements=list(req.implements),
53
+ hash=req.hash or "",
54
+ status=req.status,
55
+ level=req.level,
56
+ children=[],
57
+ )
58
+
59
+
60
+ def get_all_requirements(
61
+ config_path: Optional[Path] = None,
62
+ base_path: Optional[Path] = None,
63
+ mode: str = "combined",
64
+ ) -> Dict[str, RequirementNode]:
65
+ """
66
+ Get all requirements using core parser directly.
67
+
68
+ Args:
69
+ config_path: Optional path to .elspais.toml config file
70
+ base_path: Base path for resolving relative directories
71
+ mode: Which repos to include:
72
+ - "combined" (default): Load local + core/associated repo requirements
73
+ - "core-only": Load only core/associated repo requirements
74
+ - "local-only": Load only local requirements
75
+
76
+ Returns:
77
+ Dict mapping requirement ID (e.g., 'REQ-d00027') to RequirementNode
78
+ """
79
+ from elspais.config.loader import load_config, find_config_file, get_spec_directories
80
+ from elspais.core.parser import RequirementParser
81
+ from elspais.core.patterns import PatternConfig
82
+ from elspais.commands.validate import load_requirements_from_repo
83
+
84
+ # Find and load config
85
+ if config_path is None:
86
+ config_path = find_config_file(base_path or Path.cwd())
87
+
88
+ if config_path is None:
89
+ print("Warning: No .elspais.toml found", file=sys.stderr)
90
+ return {}
91
+
92
+ try:
93
+ config = load_config(config_path)
94
+ except Exception as e:
95
+ print(f"Warning: Failed to load config: {e}", file=sys.stderr)
96
+ return {}
97
+
98
+ requirements = {}
99
+
100
+ # Load local requirements (unless core-only mode)
101
+ if mode in ("combined", "local-only"):
102
+ # Create parser with pattern config
103
+ pattern_config = PatternConfig.from_dict(config.get("patterns", {}))
104
+ parser = RequirementParser(pattern_config)
105
+
106
+ # Get spec directories
107
+ spec_dirs = get_spec_directories(None, config, base_path or config_path.parent)
108
+
109
+ if spec_dirs:
110
+ try:
111
+ parse_result = parser.parse_directories(spec_dirs)
112
+ for req_id, req in parse_result.requirements.items():
113
+ requirements[req_id] = RequirementNode.from_core(req)
114
+ except Exception as e:
115
+ print(f"Warning: Failed to parse local requirements: {e}", file=sys.stderr)
116
+
117
+ # Load core/associated repo requirements (unless local-only mode)
118
+ if mode in ("combined", "core-only"):
119
+ core_path = config.get("core", {}).get("path")
120
+ if core_path:
121
+ core_reqs = load_requirements_from_repo(Path(core_path), config)
122
+ for req_id, req in core_reqs.items():
123
+ # Don't overwrite local requirements with same ID
124
+ if req_id not in requirements:
125
+ requirements[req_id] = RequirementNode.from_core(req)
126
+
127
+ if not requirements:
128
+ print("Warning: No requirements found", file=sys.stderr)
129
+
130
+ return requirements
131
+
132
+
133
+ def build_hierarchy(requirements: Dict[str, RequirementNode]) -> Dict[str, RequirementNode]:
134
+ """
135
+ Compute children for each requirement by inverting implements relationships.
136
+
137
+ This modifies the requirements dict in-place, populating each node's
138
+ children list.
139
+ """
140
+ for req_id, node in requirements.items():
141
+ for parent_id in node.implements:
142
+ # Normalize parent ID format
143
+ parent_key = parent_id if parent_id.startswith('REQ-') else f"REQ-{parent_id}"
144
+ if parent_key in requirements:
145
+ requirements[parent_key].children.append(req_id)
146
+
147
+ # Sort children for deterministic traversal
148
+ for node in requirements.values():
149
+ node.children.sort()
150
+
151
+ return requirements
152
+
153
+
154
+ def traverse_top_down(
155
+ requirements: Dict[str, RequirementNode],
156
+ start_req: str,
157
+ max_depth: Optional[int] = None,
158
+ callback: Optional[Callable[[RequirementNode, int], None]] = None
159
+ ) -> List[str]:
160
+ """
161
+ Traverse hierarchy from start_req downward using BFS.
162
+
163
+ Args:
164
+ requirements: All requirements with children computed
165
+ start_req: Starting REQ ID (e.g., 'REQ-p00044')
166
+ max_depth: Maximum depth to traverse (None = unlimited)
167
+ callback: Function to call for each REQ visited (node, depth)
168
+
169
+ Returns:
170
+ List of REQ IDs in traversal order
171
+ """
172
+ visited = []
173
+ queue = [(start_req, 0)] # (req_id, depth)
174
+ seen = set()
175
+
176
+ while queue:
177
+ req_id, depth = queue.pop(0)
178
+
179
+ if req_id in seen:
180
+ continue
181
+
182
+ # Depth limit check (depth 0 is the start node)
183
+ if max_depth is not None and depth > max_depth:
184
+ continue
185
+
186
+ seen.add(req_id)
187
+
188
+ if req_id not in requirements:
189
+ print(f"Warning: {req_id} not found in requirements", file=sys.stderr)
190
+ continue
191
+
192
+ visited.append(req_id)
193
+ node = requirements[req_id]
194
+
195
+ if callback:
196
+ callback(node, depth)
197
+
198
+ # Add children to queue
199
+ for child_id in node.children:
200
+ if child_id not in seen:
201
+ queue.append((child_id, depth + 1))
202
+
203
+ return visited
204
+
205
+
206
+ def normalize_req_id(req_id: str, validator: Optional["PatternValidator"] = None) -> str:
207
+ """
208
+ Normalize requirement ID to canonical format using PatternValidator.
209
+
210
+ Args:
211
+ req_id: Requirement ID (e.g., "d00027", "REQ-d00027", "REQ-CAL-p00001")
212
+ validator: PatternValidator instance (created from config if not provided)
213
+
214
+ Returns:
215
+ Normalized ID in canonical format from config
216
+ """
217
+ from elspais.config.loader import load_config, find_config_file
218
+ from elspais.core.patterns import PatternValidator, PatternConfig
219
+
220
+ # Create validator if not provided
221
+ if validator is None:
222
+ try:
223
+ config_path = find_config_file(Path.cwd())
224
+ config = load_config(config_path) if config_path else {}
225
+ except Exception:
226
+ config = {}
227
+ pattern_config = PatternConfig.from_dict(config.get("patterns", {}))
228
+ validator = PatternValidator(pattern_config)
229
+
230
+ # Try parsing the ID as-is
231
+ parsed = validator.parse(req_id)
232
+
233
+ # If that fails, try with prefix
234
+ if parsed is None and not req_id.upper().startswith(validator.config.prefix):
235
+ parsed = validator.parse(f"{validator.config.prefix}-{req_id}")
236
+
237
+ if parsed:
238
+ # Reconstruct canonical ID from parsed components
239
+ parts = [parsed.prefix]
240
+ if parsed.associated:
241
+ parts.append(parsed.associated)
242
+ parts.append(f"{parsed.type_code}{parsed.number}")
243
+ return "-".join(parts)
244
+
245
+ # Return as-is if unparseable
246
+ return req_id