elspais 0.9.1__py3-none-any.whl → 0.11.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. elspais/cli.py +123 -1
  2. elspais/commands/changed.py +160 -0
  3. elspais/commands/hash_cmd.py +72 -26
  4. elspais/commands/reformat_cmd.py +458 -0
  5. elspais/commands/trace.py +157 -3
  6. elspais/commands/validate.py +81 -18
  7. elspais/core/git.py +352 -0
  8. elspais/core/models.py +2 -0
  9. elspais/core/parser.py +68 -24
  10. elspais/reformat/__init__.py +50 -0
  11. elspais/reformat/detector.py +119 -0
  12. elspais/reformat/hierarchy.py +246 -0
  13. elspais/reformat/line_breaks.py +220 -0
  14. elspais/reformat/prompts.py +123 -0
  15. elspais/reformat/transformer.py +264 -0
  16. elspais/sponsors/__init__.py +432 -0
  17. elspais/trace_view/__init__.py +54 -0
  18. elspais/trace_view/coverage.py +183 -0
  19. elspais/trace_view/generators/__init__.py +12 -0
  20. elspais/trace_view/generators/base.py +329 -0
  21. elspais/trace_view/generators/csv.py +122 -0
  22. elspais/trace_view/generators/markdown.py +175 -0
  23. elspais/trace_view/html/__init__.py +31 -0
  24. elspais/trace_view/html/generator.py +1006 -0
  25. elspais/trace_view/html/templates/base.html +283 -0
  26. elspais/trace_view/html/templates/components/code_viewer_modal.html +14 -0
  27. elspais/trace_view/html/templates/components/file_picker_modal.html +20 -0
  28. elspais/trace_view/html/templates/components/legend_modal.html +69 -0
  29. elspais/trace_view/html/templates/components/review_panel.html +118 -0
  30. elspais/trace_view/html/templates/partials/review/help/help-panel.json +244 -0
  31. elspais/trace_view/html/templates/partials/review/help/onboarding.json +77 -0
  32. elspais/trace_view/html/templates/partials/review/help/tooltips.json +237 -0
  33. elspais/trace_view/html/templates/partials/review/review-comments.js +928 -0
  34. elspais/trace_view/html/templates/partials/review/review-data.js +961 -0
  35. elspais/trace_view/html/templates/partials/review/review-help.js +679 -0
  36. elspais/trace_view/html/templates/partials/review/review-init.js +177 -0
  37. elspais/trace_view/html/templates/partials/review/review-line-numbers.js +429 -0
  38. elspais/trace_view/html/templates/partials/review/review-packages.js +1029 -0
  39. elspais/trace_view/html/templates/partials/review/review-position.js +540 -0
  40. elspais/trace_view/html/templates/partials/review/review-resize.js +115 -0
  41. elspais/trace_view/html/templates/partials/review/review-status.js +659 -0
  42. elspais/trace_view/html/templates/partials/review/review-sync.js +992 -0
  43. elspais/trace_view/html/templates/partials/review-styles.css +2238 -0
  44. elspais/trace_view/html/templates/partials/scripts.js +1741 -0
  45. elspais/trace_view/html/templates/partials/styles.css +1756 -0
  46. elspais/trace_view/models.py +353 -0
  47. elspais/trace_view/review/__init__.py +60 -0
  48. elspais/trace_view/review/branches.py +1149 -0
  49. elspais/trace_view/review/models.py +1205 -0
  50. elspais/trace_view/review/position.py +609 -0
  51. elspais/trace_view/review/server.py +1056 -0
  52. elspais/trace_view/review/status.py +470 -0
  53. elspais/trace_view/review/storage.py +1367 -0
  54. elspais/trace_view/scanning.py +213 -0
  55. elspais/trace_view/specs/README.md +84 -0
  56. elspais/trace_view/specs/tv-d00001-template-architecture.md +36 -0
  57. elspais/trace_view/specs/tv-d00002-css-extraction.md +37 -0
  58. elspais/trace_view/specs/tv-d00003-js-extraction.md +43 -0
  59. elspais/trace_view/specs/tv-d00004-build-embedding.md +40 -0
  60. elspais/trace_view/specs/tv-d00005-test-format.md +78 -0
  61. elspais/trace_view/specs/tv-d00010-review-data-models.md +33 -0
  62. elspais/trace_view/specs/tv-d00011-review-storage.md +33 -0
  63. elspais/trace_view/specs/tv-d00012-position-resolution.md +33 -0
  64. elspais/trace_view/specs/tv-d00013-git-branches.md +31 -0
  65. elspais/trace_view/specs/tv-d00014-review-api-server.md +31 -0
  66. elspais/trace_view/specs/tv-d00015-status-modifier.md +27 -0
  67. elspais/trace_view/specs/tv-d00016-js-integration.md +33 -0
  68. elspais/trace_view/specs/tv-p00001-html-generator.md +33 -0
  69. elspais/trace_view/specs/tv-p00002-review-system.md +29 -0
  70. {elspais-0.9.1.dist-info → elspais-0.11.0.dist-info}/METADATA +78 -26
  71. elspais-0.11.0.dist-info/RECORD +101 -0
  72. elspais-0.9.1.dist-info/RECORD +0 -38
  73. {elspais-0.9.1.dist-info → elspais-0.11.0.dist-info}/WHEEL +0 -0
  74. {elspais-0.9.1.dist-info → elspais-0.11.0.dist-info}/entry_points.txt +0 -0
  75. {elspais-0.9.1.dist-info → elspais-0.11.0.dist-info}/licenses/LICENSE +0 -0
@@ -13,10 +13,11 @@ from typing import Any, Dict, List, Optional
13
13
  from elspais.config.defaults import DEFAULT_CONFIG
14
14
  from elspais.config.loader import find_config_file, get_spec_directories, load_config
15
15
  from elspais.core.hasher import calculate_hash, verify_hash
16
- from elspais.core.models import Requirement
16
+ from elspais.core.models import ParseWarning, Requirement
17
17
  from elspais.core.parser import RequirementParser
18
18
  from elspais.core.patterns import PatternConfig
19
19
  from elspais.core.rules import RuleEngine, RulesConfig, RuleViolation, Severity
20
+ from elspais.sponsors import get_sponsor_spec_directories
20
21
  from elspais.testing.config import TestingConfig
21
22
 
22
23
 
@@ -41,6 +42,19 @@ def run(args: argparse.Namespace) -> int:
41
42
  print("Error: No spec directories found", file=sys.stderr)
42
43
  return 1
43
44
 
45
+ # Add sponsor spec directories if mode is "combined" and include_associated is enabled
46
+ mode = getattr(args, 'mode', 'combined')
47
+ include_associated = config.get('traceability', {}).get('include_associated', True)
48
+
49
+ if mode == 'combined' and include_associated:
50
+ base_path = find_project_root(spec_dirs)
51
+ sponsor_dirs = get_sponsor_spec_directories(config, base_path)
52
+ if sponsor_dirs:
53
+ spec_dirs = list(spec_dirs) + sponsor_dirs
54
+ if not args.quiet:
55
+ for sponsor_dir in sponsor_dirs:
56
+ print(f"Including sponsor specs: {sponsor_dir}")
57
+
44
58
  if not args.quiet:
45
59
  if len(spec_dirs) == 1:
46
60
  print(f"Validating requirements in: {spec_dirs[0]}")
@@ -55,7 +69,8 @@ def run(args: argparse.Namespace) -> int:
55
69
  skip_files = spec_config.get("skip_files", [])
56
70
 
57
71
  try:
58
- requirements = parser.parse_directories(spec_dirs, skip_files=skip_files)
72
+ parse_result = parser.parse_directories(spec_dirs, skip_files=skip_files)
73
+ requirements = dict(parse_result) # ParseResult supports dict-like access
59
74
  except Exception as e:
60
75
  print(f"Error parsing requirements: {e}", file=sys.stderr)
61
76
  return 1
@@ -81,6 +96,10 @@ def run(args: argparse.Namespace) -> int:
81
96
  link_violations = validate_links(requirements, args, config)
82
97
  violations.extend(link_violations)
83
98
 
99
+ # Add parser warnings (duplicates, etc.) as violations
100
+ parse_violations = convert_parse_warnings_to_violations(parse_result.warnings)
101
+ violations.extend(parse_violations)
102
+
84
103
  # Filter skipped rules
85
104
  if args.skip_rule:
86
105
  violations = [
@@ -240,8 +259,9 @@ def validate_links(
240
259
 
241
260
  # Load core requirements if this is an associated repo
242
261
  core_requirements = {}
243
- if args.core_repo:
244
- core_requirements = load_core_requirements(args.core_repo, config)
262
+ core_path = args.core_repo or config.get("core", {}).get("path")
263
+ if core_path:
264
+ core_requirements = load_requirements_from_repo(Path(core_path), config)
245
265
 
246
266
  all_requirements = {**core_requirements, **requirements}
247
267
  all_ids = set(all_requirements.keys())
@@ -276,24 +296,62 @@ def validate_links(
276
296
  return violations
277
297
 
278
298
 
279
- def load_core_requirements(core_path: Path, config: Dict) -> Dict[str, Requirement]:
280
- """Load requirements from core repository."""
281
- if not core_path.exists():
299
+ def convert_parse_warnings_to_violations(
300
+ warnings: List[ParseWarning],
301
+ ) -> List[RuleViolation]:
302
+ """Convert parser warnings (like duplicates) to rule violations.
303
+
304
+ The parser detects duplicate REQ IDs and generates ParseWarning objects.
305
+ This function converts them to RuleViolation objects so they appear in
306
+ validation output.
307
+
308
+ Args:
309
+ warnings: List of ParseWarning objects from parser
310
+
311
+ Returns:
312
+ List of RuleViolation objects for duplicate IDs
313
+ """
314
+ violations = []
315
+ for warning in warnings:
316
+ if "duplicate" in warning.message.lower():
317
+ violations.append(
318
+ RuleViolation(
319
+ rule_name="id.duplicate",
320
+ requirement_id=warning.requirement_id,
321
+ message=warning.message,
322
+ severity=Severity.ERROR,
323
+ location=f"{warning.file_path}:{warning.line_number}",
324
+ )
325
+ )
326
+ return violations
327
+
328
+
329
+ def load_requirements_from_repo(repo_path: Path, config: Dict) -> Dict[str, Requirement]:
330
+ """Load requirements from any repository path.
331
+
332
+ Args:
333
+ repo_path: Path to the repository root
334
+ config: Configuration dict (used as fallback if repo has no config)
335
+
336
+ Returns:
337
+ Dict mapping requirement ID to Requirement object
338
+ """
339
+ if not repo_path.exists():
282
340
  return {}
283
341
 
284
- # Find core config
285
- core_config_path = core_path / ".elspais.toml"
286
- if core_config_path.exists():
287
- core_config = load_config(core_config_path)
342
+ # Find repo config
343
+ repo_config_path = repo_path / ".elspais.toml"
344
+ if repo_config_path.exists():
345
+ repo_config = load_config(repo_config_path)
288
346
  else:
289
- core_config = config # Use same config
347
+ repo_config = config # Use same config
290
348
 
291
- spec_dir = core_path / core_config.get("directories", {}).get("spec", "spec")
349
+ spec_dir = repo_path / repo_config.get("directories", {}).get("spec", "spec")
292
350
  if not spec_dir.exists():
293
351
  return {}
294
352
 
295
- pattern_config = PatternConfig.from_dict(core_config.get("patterns", {}))
296
- spec_config = core_config.get("spec", {})
353
+ pattern_config = PatternConfig.from_dict(repo_config.get("patterns", {}))
354
+ spec_config = repo_config.get("spec", {})
297
355
  no_reference_values = spec_config.get("no_reference_values")
298
356
  parser = RequirementParser(pattern_config, no_reference_values=no_reference_values)
299
357
  skip_files = spec_config.get("skip_files", [])
@@ -333,21 +391,26 @@ def format_requirements_json(
333
391
 
334
392
  # Check for specific violation types
335
393
  is_cycle = any("cycle" in v.rule_name.lower() for v in req_violations)
336
- is_conflict = any(
394
+
395
+ # Use the model's is_conflict flag directly, or check violations for older behavior
396
+ is_conflict = req.is_conflict or any(
337
397
  "conflict" in v.rule_name.lower() or "duplicate" in v.rule_name.lower()
338
398
  for v in req_violations
339
399
  )
340
- conflict_with = None
400
+ conflict_with = req.conflict_with if req.conflict_with else None
341
401
  cycle_path = None
342
402
 
403
+ # Also check violations for additional context
343
404
  for v in req_violations:
344
- if "duplicate" in v.rule_name.lower():
405
+ if "duplicate" in v.rule_name.lower() and not conflict_with:
345
406
  # Try to extract conflicting ID from message
346
407
  conflict_with = v.message
347
408
  if "cycle" in v.rule_name.lower():
348
409
  cycle_path = v.message
349
410
 
350
411
  # Build requirement data matching hht_diary format
412
+ # Use the original ID (strip __conflict suffix) for output key
413
+ output_key = req_id.replace("__conflict", "") if req.is_conflict else req_id
351
414
  output[req_id] = {
352
415
  "title": req.title,
353
416
  "status": req.status,
elspais/core/git.py ADDED
@@ -0,0 +1,352 @@
1
+ """
2
+ Git state management for elspais.
3
+
4
+ Provides functions to query git status and detect changes to requirement files,
5
+ enabling detection of:
6
+ - Uncommitted changes to spec files
7
+ - New (untracked) requirement files
8
+ - Files changed vs main/master branch
9
+ - Moved requirements (comparing current location to committed state)
10
+ """
11
+
12
+ import re
13
+ import subprocess
14
+ from dataclasses import dataclass, field
15
+ from pathlib import Path
16
+ from typing import Dict, List, Optional, Set, Tuple
17
+
18
+
19
+ @dataclass
20
+ class GitChangeInfo:
21
+ """Information about git changes to requirement files."""
22
+
23
+ modified_files: Set[str] = field(default_factory=set)
24
+ """Files with uncommitted modifications (staged or unstaged)."""
25
+
26
+ untracked_files: Set[str] = field(default_factory=set)
27
+ """New files not yet tracked by git."""
28
+
29
+ branch_changed_files: Set[str] = field(default_factory=set)
30
+ """Files changed between current branch and main/master."""
31
+
32
+ committed_req_locations: Dict[str, str] = field(default_factory=dict)
33
+ """REQ ID -> file path mapping from committed state (HEAD)."""
34
+
35
+ @property
36
+ def all_changed_files(self) -> Set[str]:
37
+ """Get all files with any kind of change."""
38
+ return self.modified_files | self.untracked_files | self.branch_changed_files
39
+
40
+ @property
41
+ def uncommitted_files(self) -> Set[str]:
42
+ """Get all files with uncommitted changes (modified or untracked)."""
43
+ return self.modified_files | self.untracked_files
44
+
45
+
46
+ @dataclass
47
+ class MovedRequirement:
48
+ """Information about a requirement that was moved between files."""
49
+
50
+ req_id: str
51
+ """The requirement ID (e.g., 'd00001')."""
52
+
53
+ old_path: str
54
+ """Path in the committed state."""
55
+
56
+ new_path: str
57
+ """Path in the current working directory."""
58
+
59
+
60
+ def get_repo_root(start_path: Optional[Path] = None) -> Optional[Path]:
61
+ """Find the git repository root.
62
+
63
+ Args:
64
+ start_path: Path to start searching from (default: current directory)
65
+
66
+ Returns:
67
+ Path to repository root, or None if not in a git repository
68
+ """
69
+ try:
70
+ result = subprocess.run(
71
+ ["git", "rev-parse", "--show-toplevel"],
72
+ cwd=start_path or Path.cwd(),
73
+ capture_output=True,
74
+ text=True,
75
+ check=True,
76
+ )
77
+ return Path(result.stdout.strip())
78
+ except (subprocess.CalledProcessError, FileNotFoundError):
79
+ return None
80
+
81
+
82
+ def get_modified_files(repo_root: Path) -> Tuple[Set[str], Set[str]]:
83
+ """Get sets of modified and untracked files according to git status.
84
+
85
+ Args:
86
+ repo_root: Path to repository root
87
+
88
+ Returns:
89
+ Tuple of (modified_files, untracked_files):
90
+ - modified_files: Tracked files with changes (M, A, R, etc.)
91
+ - untracked_files: New files not yet tracked (??)
92
+ """
93
+ try:
94
+ result = subprocess.run(
95
+ ["git", "status", "--porcelain", "--untracked-files=all"],
96
+ cwd=repo_root,
97
+ capture_output=True,
98
+ text=True,
99
+ check=True,
100
+ )
101
+ modified_files: Set[str] = set()
102
+ untracked_files: Set[str] = set()
103
+
104
+ for line in result.stdout.split("\n"):
105
+ if line and len(line) >= 3:
106
+ # Format: "XY filename" or "XY orig -> renamed"
107
+ # XY = two-letter status (e.g., " M", "??", "A ", "R ")
108
+ status_code = line[:2]
109
+ file_path = line[3:].strip()
110
+
111
+ # Handle renames: "orig -> new"
112
+ if " -> " in file_path:
113
+ file_path = file_path.split(" -> ")[1]
114
+
115
+ if file_path:
116
+ if status_code == "??":
117
+ untracked_files.add(file_path)
118
+ else:
119
+ modified_files.add(file_path)
120
+
121
+ return modified_files, untracked_files
122
+ except (subprocess.CalledProcessError, FileNotFoundError):
123
+ return set(), set()
124
+
125
+
126
+ def get_changed_vs_branch(
127
+ repo_root: Path, base_branch: str = "main"
128
+ ) -> Set[str]:
129
+ """Get set of files changed between current branch and base branch.
130
+
131
+ Args:
132
+ repo_root: Path to repository root
133
+ base_branch: Name of base branch (default: 'main')
134
+
135
+ Returns:
136
+ Set of file paths changed vs base branch
137
+ """
138
+ # Try local branch first, then remote
139
+ for branch_ref in [base_branch, f"origin/{base_branch}"]:
140
+ try:
141
+ result = subprocess.run(
142
+ ["git", "diff", "--name-only", f"{branch_ref}...HEAD"],
143
+ cwd=repo_root,
144
+ capture_output=True,
145
+ text=True,
146
+ check=True,
147
+ )
148
+ changed_files: Set[str] = set()
149
+ for line in result.stdout.split("\n"):
150
+ if line.strip():
151
+ changed_files.add(line.strip())
152
+ return changed_files
153
+ except subprocess.CalledProcessError:
154
+ continue
155
+ except FileNotFoundError:
156
+ return set()
157
+
158
+ return set()
159
+
160
+
161
+ def get_committed_req_locations(
162
+ repo_root: Path,
163
+ spec_dir: str = "spec",
164
+ exclude_files: Optional[List[str]] = None,
165
+ ) -> Dict[str, str]:
166
+ """Get REQ ID -> file path mapping from committed state (HEAD).
167
+
168
+ This allows detection of moved requirements by comparing current location
169
+ to where the REQ was in the last commit.
170
+
171
+ Args:
172
+ repo_root: Path to repository root
173
+ spec_dir: Spec directory relative to repo root
174
+ exclude_files: Files to exclude (default: INDEX.md, README.md)
175
+
176
+ Returns:
177
+ Dict mapping REQ ID (e.g., 'd00001') to relative file path
178
+ """
179
+ if exclude_files is None:
180
+ exclude_files = ["INDEX.md", "README.md", "requirements-format.md"]
181
+
182
+ req_locations: Dict[str, str] = {}
183
+ # Pattern matches REQ headers with optional associated prefix
184
+ req_pattern = re.compile(
185
+ r"^#{1,6}\s+REQ-(?:[A-Z]{2,4}-)?([pod]\d{5}):", re.MULTILINE
186
+ )
187
+
188
+ try:
189
+ # Get list of spec files in committed state
190
+ result = subprocess.run(
191
+ ["git", "ls-tree", "-r", "--name-only", "HEAD", f"{spec_dir}/"],
192
+ cwd=repo_root,
193
+ capture_output=True,
194
+ text=True,
195
+ check=True,
196
+ )
197
+
198
+ for file_path in result.stdout.strip().split("\n"):
199
+ if not file_path.endswith(".md"):
200
+ continue
201
+ if any(skip in file_path for skip in exclude_files):
202
+ continue
203
+
204
+ # Get file content from committed state
205
+ try:
206
+ content_result = subprocess.run(
207
+ ["git", "show", f"HEAD:{file_path}"],
208
+ cwd=repo_root,
209
+ capture_output=True,
210
+ text=True,
211
+ check=True,
212
+ )
213
+ content = content_result.stdout
214
+
215
+ # Find all REQ IDs in this file
216
+ for match in req_pattern.finditer(content):
217
+ req_id = match.group(1)
218
+ req_locations[req_id] = file_path
219
+
220
+ except subprocess.CalledProcessError:
221
+ # File might not exist in HEAD (new file)
222
+ continue
223
+
224
+ except (subprocess.CalledProcessError, FileNotFoundError):
225
+ pass
226
+
227
+ return req_locations
228
+
229
+
230
+ def get_current_req_locations(
231
+ repo_root: Path,
232
+ spec_dir: str = "spec",
233
+ exclude_files: Optional[List[str]] = None,
234
+ ) -> Dict[str, str]:
235
+ """Get REQ ID -> file path mapping from current working directory.
236
+
237
+ Args:
238
+ repo_root: Path to repository root
239
+ spec_dir: Spec directory relative to repo root
240
+ exclude_files: Files to exclude (default: INDEX.md, README.md)
241
+
242
+ Returns:
243
+ Dict mapping REQ ID (e.g., 'd00001') to relative file path
244
+ """
245
+ if exclude_files is None:
246
+ exclude_files = ["INDEX.md", "README.md", "requirements-format.md"]
247
+
248
+ req_locations: Dict[str, str] = {}
249
+ req_pattern = re.compile(
250
+ r"^#{1,6}\s+REQ-(?:[A-Z]{2,4}-)?([pod]\d{5}):", re.MULTILINE
251
+ )
252
+
253
+ spec_path = repo_root / spec_dir
254
+ if not spec_path.exists():
255
+ return req_locations
256
+
257
+ for md_file in spec_path.rglob("*.md"):
258
+ if any(skip in md_file.name for skip in exclude_files):
259
+ continue
260
+
261
+ try:
262
+ content = md_file.read_text(encoding="utf-8")
263
+ rel_path = str(md_file.relative_to(repo_root))
264
+
265
+ for match in req_pattern.finditer(content):
266
+ req_id = match.group(1)
267
+ req_locations[req_id] = rel_path
268
+
269
+ except (IOError, UnicodeDecodeError):
270
+ continue
271
+
272
+ return req_locations
273
+
274
+
275
+ def detect_moved_requirements(
276
+ committed_locations: Dict[str, str],
277
+ current_locations: Dict[str, str],
278
+ ) -> List[MovedRequirement]:
279
+ """Detect requirements that have been moved between files.
280
+
281
+ Args:
282
+ committed_locations: REQ ID -> path mapping from committed state
283
+ current_locations: REQ ID -> path mapping from current state
284
+
285
+ Returns:
286
+ List of MovedRequirement objects for requirements whose location changed
287
+ """
288
+ moved = []
289
+ for req_id, old_path in committed_locations.items():
290
+ if req_id in current_locations:
291
+ new_path = current_locations[req_id]
292
+ if old_path != new_path:
293
+ moved.append(
294
+ MovedRequirement(
295
+ req_id=req_id,
296
+ old_path=old_path,
297
+ new_path=new_path,
298
+ )
299
+ )
300
+ return moved
301
+
302
+
303
+ def get_git_changes(
304
+ repo_root: Optional[Path] = None,
305
+ spec_dir: str = "spec",
306
+ base_branch: str = "main",
307
+ ) -> GitChangeInfo:
308
+ """Get comprehensive git change information for requirement files.
309
+
310
+ This is the main entry point for git change detection. It gathers:
311
+ - Modified files (uncommitted changes to tracked files)
312
+ - Untracked files (new files not yet in git)
313
+ - Branch changed files (files changed vs main/master)
314
+ - Committed REQ locations (for move detection)
315
+
316
+ Args:
317
+ repo_root: Path to repository root (auto-detected if None)
318
+ spec_dir: Spec directory relative to repo root
319
+ base_branch: Base branch for comparison (default: 'main')
320
+
321
+ Returns:
322
+ GitChangeInfo with all change information
323
+ """
324
+ if repo_root is None:
325
+ repo_root = get_repo_root()
326
+ if repo_root is None:
327
+ return GitChangeInfo()
328
+
329
+ modified, untracked = get_modified_files(repo_root)
330
+ branch_changed = get_changed_vs_branch(repo_root, base_branch)
331
+ committed_locations = get_committed_req_locations(repo_root, spec_dir)
332
+
333
+ return GitChangeInfo(
334
+ modified_files=modified,
335
+ untracked_files=untracked,
336
+ branch_changed_files=branch_changed,
337
+ committed_req_locations=committed_locations,
338
+ )
339
+
340
+
341
+ def filter_spec_files(files: Set[str], spec_dir: str = "spec") -> Set[str]:
342
+ """Filter a set of files to only include spec directory files.
343
+
344
+ Args:
345
+ files: Set of file paths
346
+ spec_dir: Spec directory prefix
347
+
348
+ Returns:
349
+ Set of files that are in the spec directory
350
+ """
351
+ prefix = f"{spec_dir}/"
352
+ return {f for f in files if f.startswith(prefix) and f.endswith(".md")}
elspais/core/models.py CHANGED
@@ -118,6 +118,8 @@ class Requirement:
118
118
  line_number: Optional[int] = None
119
119
  tags: List[str] = field(default_factory=list)
120
120
  subdir: str = "" # Subdirectory within spec/, e.g., "roadmap", "archive", ""
121
+ is_conflict: bool = False # True if this is a conflicting duplicate entry
122
+ conflict_with: str = "" # ID of the original requirement this conflicts with
121
123
 
122
124
  @property
123
125
  def type_code(self) -> str:
elspais/core/parser.py CHANGED
@@ -155,12 +155,12 @@ class RequirementParser:
155
155
  if req:
156
156
  # Check for duplicate ID
157
157
  if req_id in requirements:
158
- warnings.append(ParseWarning(
159
- requirement_id=req_id,
160
- message=f"Duplicate ID ignored (first occurrence at line {requirements[req_id].line_number})",
161
- file_path=file_path,
162
- line_number=start_line,
163
- ))
158
+ # Keep both: original stays, duplicate gets __conflict suffix
159
+ conflict_key, conflict_req, warning = self._make_conflict_entry(
160
+ req, req_id, requirements[req_id], file_path, start_line
161
+ )
162
+ requirements[conflict_key] = conflict_req
163
+ warnings.append(warning)
164
164
  else:
165
165
  requirements[req_id] = req
166
166
  else:
@@ -221,12 +221,12 @@ class RequirementParser:
221
221
  # Merge requirements, checking for cross-file duplicates
222
222
  for req_id, req in result.requirements.items():
223
223
  if req_id in requirements:
224
- warnings.append(ParseWarning(
225
- requirement_id=req_id,
226
- message=f"Duplicate ID ignored (first occurrence in {requirements[req_id].file_path})",
227
- file_path=file_path,
228
- line_number=req.line_number,
229
- ))
224
+ # Keep both: original stays, duplicate gets __conflict suffix
225
+ conflict_key, conflict_req, warning = self._make_conflict_entry(
226
+ req, req_id, requirements[req_id], file_path, req.line_number
227
+ )
228
+ requirements[conflict_key] = conflict_req
229
+ warnings.append(warning)
230
230
  else:
231
231
  requirements[req_id] = req
232
232
  warnings.extend(result.warnings)
@@ -278,12 +278,12 @@ class RequirementParser:
278
278
  # Merge requirements, checking for cross-directory duplicates
279
279
  for req_id, req in result.requirements.items():
280
280
  if req_id in requirements:
281
- warnings.append(ParseWarning(
282
- requirement_id=req_id,
283
- message=f"Duplicate ID ignored (first occurrence in {requirements[req_id].file_path})",
284
- file_path=req.file_path,
285
- line_number=req.line_number,
286
- ))
281
+ # Keep both: original stays, duplicate gets __conflict suffix
282
+ conflict_key, conflict_req, warning = self._make_conflict_entry(
283
+ req, req_id, requirements[req_id], req.file_path, req.line_number
284
+ )
285
+ requirements[conflict_key] = conflict_req
286
+ warnings.append(warning)
287
287
  else:
288
288
  requirements[req_id] = req
289
289
  warnings.extend(result.warnings)
@@ -336,18 +336,62 @@ class RequirementParser:
336
336
  # Merge requirements, checking for cross-subdir duplicates
337
337
  for req_id, req in subdir_result.requirements.items():
338
338
  if req_id in requirements:
339
- warnings.append(ParseWarning(
340
- requirement_id=req_id,
341
- message=f"Duplicate ID ignored (first occurrence in {requirements[req_id].file_path})",
342
- file_path=req.file_path,
343
- line_number=req.line_number,
344
- ))
339
+ # Keep both: original stays, duplicate gets __conflict suffix
340
+ conflict_key, conflict_req, warning = self._make_conflict_entry(
341
+ req, req_id, requirements[req_id], req.file_path, req.line_number
342
+ )
343
+ requirements[conflict_key] = conflict_req
344
+ warnings.append(warning)
345
345
  else:
346
346
  requirements[req_id] = req
347
347
  warnings.extend(subdir_result.warnings)
348
348
 
349
349
  return ParseResult(requirements=requirements, warnings=warnings)
350
350
 
351
+ def _make_conflict_entry(
352
+ self,
353
+ duplicate_req: Requirement,
354
+ original_id: str,
355
+ original_req: Requirement,
356
+ file_path: Optional[Path],
357
+ line_number: Optional[int],
358
+ ) -> tuple:
359
+ """
360
+ Create a conflict entry for a duplicate requirement.
361
+
362
+ When a requirement ID already exists, this creates a modified version
363
+ of the duplicate with:
364
+ - Key suffix `__conflict` for storage
365
+ - `is_conflict=True` flag
366
+ - `conflict_with` set to the original ID
367
+ - `implements=[]` (treated as orphaned)
368
+
369
+ Args:
370
+ duplicate_req: The duplicate requirement that was found
371
+ original_id: The ID that is duplicated
372
+ original_req: The original requirement that was first
373
+ file_path: File path for the warning
374
+ line_number: Line number for the warning
375
+
376
+ Returns:
377
+ Tuple of (conflict_key, modified_requirement, ParseWarning)
378
+ """
379
+ conflict_key = f"{original_id}__conflict"
380
+
381
+ # Modify the duplicate requirement
382
+ duplicate_req.is_conflict = True
383
+ duplicate_req.conflict_with = original_id
384
+ duplicate_req.implements = [] # Treat as orphaned
385
+
386
+ warning = ParseWarning(
387
+ requirement_id=original_id,
388
+ message=f"Duplicate ID found (first occurrence in {original_req.file_path}:{original_req.line_number})",
389
+ file_path=file_path,
390
+ line_number=line_number,
391
+ )
392
+
393
+ return conflict_key, duplicate_req, warning
394
+
351
395
  def _parse_requirement_block(
352
396
  self,
353
397
  req_id: str,