elspais 0.11.1__py3-none-any.whl → 0.11.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. elspais/__init__.py +1 -1
  2. elspais/cli.py +29 -10
  3. elspais/commands/analyze.py +5 -6
  4. elspais/commands/changed.py +2 -6
  5. elspais/commands/config_cmd.py +4 -4
  6. elspais/commands/edit.py +32 -36
  7. elspais/commands/hash_cmd.py +24 -18
  8. elspais/commands/index.py +8 -7
  9. elspais/commands/init.py +4 -4
  10. elspais/commands/reformat_cmd.py +32 -43
  11. elspais/commands/rules_cmd.py +6 -2
  12. elspais/commands/trace.py +23 -19
  13. elspais/commands/validate.py +8 -10
  14. elspais/config/defaults.py +7 -1
  15. elspais/core/content_rules.py +0 -1
  16. elspais/core/git.py +4 -10
  17. elspais/core/parser.py +55 -56
  18. elspais/core/patterns.py +2 -6
  19. elspais/core/rules.py +10 -15
  20. elspais/mcp/__init__.py +2 -0
  21. elspais/mcp/context.py +1 -0
  22. elspais/mcp/serializers.py +1 -1
  23. elspais/mcp/server.py +54 -39
  24. elspais/reformat/__init__.py +13 -13
  25. elspais/reformat/detector.py +9 -16
  26. elspais/reformat/hierarchy.py +8 -7
  27. elspais/reformat/line_breaks.py +36 -38
  28. elspais/reformat/prompts.py +22 -12
  29. elspais/reformat/transformer.py +43 -41
  30. elspais/sponsors/__init__.py +0 -2
  31. elspais/testing/__init__.py +1 -1
  32. elspais/testing/result_parser.py +25 -21
  33. elspais/trace_view/__init__.py +4 -3
  34. elspais/trace_view/coverage.py +5 -5
  35. elspais/trace_view/generators/__init__.py +1 -1
  36. elspais/trace_view/generators/base.py +17 -12
  37. elspais/trace_view/generators/csv.py +2 -6
  38. elspais/trace_view/generators/markdown.py +3 -8
  39. elspais/trace_view/html/__init__.py +4 -2
  40. elspais/trace_view/html/generator.py +423 -289
  41. elspais/trace_view/models.py +25 -0
  42. elspais/trace_view/review/__init__.py +21 -18
  43. elspais/trace_view/review/branches.py +114 -121
  44. elspais/trace_view/review/models.py +232 -237
  45. elspais/trace_view/review/position.py +53 -71
  46. elspais/trace_view/review/server.py +264 -288
  47. elspais/trace_view/review/status.py +43 -58
  48. elspais/trace_view/review/storage.py +48 -72
  49. {elspais-0.11.1.dist-info → elspais-0.11.2.dist-info}/METADATA +1 -1
  50. {elspais-0.11.1.dist-info → elspais-0.11.2.dist-info}/RECORD +53 -53
  51. {elspais-0.11.1.dist-info → elspais-0.11.2.dist-info}/WHEEL +0 -0
  52. {elspais-0.11.1.dist-info → elspais-0.11.2.dist-info}/entry_points.txt +0 -0
  53. {elspais-0.11.1.dist-info → elspais-0.11.2.dist-info}/licenses/LICENSE +0 -0
@@ -15,11 +15,11 @@ import argparse
15
15
  import shutil
16
16
  import sys
17
17
  from pathlib import Path
18
- from typing import List, Optional
18
+ from typing import Optional
19
19
 
20
- from elspais.config.loader import load_config, find_config_file, get_spec_directories
20
+ from elspais.config.loader import find_config_file, get_spec_directories, load_config
21
21
  from elspais.core.parser import RequirementParser
22
- from elspais.core.patterns import PatternValidator, PatternConfig
22
+ from elspais.core.patterns import PatternConfig, PatternValidator
23
23
  from elspais.core.rules import RuleEngine, RulesConfig
24
24
 
25
25
 
@@ -30,15 +30,14 @@ def run(args: argparse.Namespace) -> int:
30
30
  to the new Assertions format using Claude AI.
31
31
  """
32
32
  from elspais.reformat import (
33
- get_all_requirements,
33
+ assemble_new_format,
34
34
  build_hierarchy,
35
- traverse_top_down,
35
+ get_all_requirements,
36
+ normalize_line_breaks,
36
37
  normalize_req_id,
37
38
  reformat_requirement,
38
- assemble_new_format,
39
+ traverse_top_down,
39
40
  validate_reformatted_content,
40
- normalize_line_breaks,
41
- fix_requirement_line_breaks,
42
41
  )
43
42
 
44
43
  print("elspais reformat-with-claude")
@@ -55,10 +54,10 @@ def run(args: argparse.Namespace) -> int:
55
54
  backup = args.backup
56
55
  force = args.force
57
56
  fix_line_breaks = args.fix_line_breaks
58
- verbose = getattr(args, 'verbose', False)
59
- mode = getattr(args, 'mode', 'combined')
57
+ verbose = getattr(args, "verbose", False)
58
+ mode = getattr(args, "mode", "combined")
60
59
 
61
- print(f"Options:")
60
+ print("Options:")
62
61
  print(f" Start REQ: {start_req or 'All PRD requirements'}")
63
62
  print(f" Max depth: {max_depth or 'Unlimited'}")
64
63
  print(f" Mode: {mode}")
@@ -86,8 +85,7 @@ def run(args: argparse.Namespace) -> int:
86
85
  requirements = get_all_requirements(mode=mode)
87
86
  if not requirements:
88
87
  print("FAILED")
89
- print("Error: Could not load requirements. Run 'elspais validate' first.",
90
- file=sys.stderr)
88
+ print("Error: Could not load requirements. Run 'elspais validate' first.", file=sys.stderr)
91
89
  return 1
92
90
  print(f"found {len(requirements)} requirements")
93
91
 
@@ -108,13 +106,10 @@ def run(args: argparse.Namespace) -> int:
108
106
 
109
107
  print(f"Traversing from {start_req}...", flush=True)
110
108
  req_ids = traverse_top_down(requirements, start_req, max_depth)
111
- print(f"Traversal complete", flush=True)
109
+ print("Traversal complete", flush=True)
112
110
  else:
113
111
  # Process all PRD requirements first, then their descendants
114
- prd_reqs = [
115
- req_id for req_id, node in requirements.items()
116
- if node.level.upper() == 'PRD'
117
- ]
112
+ prd_reqs = [req_id for req_id, node in requirements.items() if node.level.upper() == "PRD"]
118
113
  prd_reqs.sort()
119
114
 
120
115
  print(f"Processing {len(prd_reqs)} PRD requirements and their descendants...")
@@ -167,8 +162,8 @@ def run(args: argparse.Namespace) -> int:
167
162
  continue
168
163
 
169
164
  # Validate the result
170
- rationale = result.get('rationale', '')
171
- assertions = result.get('assertions', [])
165
+ rationale = result.get("rationale", "")
166
+ assertions = result.get("assertions", [])
172
167
 
173
168
  is_valid, warnings = validate_reformatted_content(node, rationale, assertions)
174
169
 
@@ -177,7 +172,7 @@ def run(args: argparse.Namespace) -> int:
177
172
  print(f" WARNING: {warning}")
178
173
 
179
174
  if not is_valid:
180
- print(f" INVALID: Skipping due to validation errors")
175
+ print(" INVALID: Skipping due to validation errors")
181
176
  errors += 1
182
177
  continue
183
178
 
@@ -189,7 +184,7 @@ def run(args: argparse.Namespace) -> int:
189
184
  status=node.status,
190
185
  implements=node.implements,
191
186
  rationale=rationale,
192
- assertions=assertions
187
+ assertions=assertions,
193
188
  )
194
189
 
195
190
  # Optionally normalize line breaks
@@ -207,7 +202,7 @@ def run(args: argparse.Namespace) -> int:
207
202
  file_path = Path(node.file_path)
208
203
 
209
204
  if backup:
210
- backup_path = file_path.with_suffix(file_path.suffix + '.bak')
205
+ backup_path = file_path.with_suffix(file_path.suffix + ".bak")
211
206
  shutil.copy2(file_path, backup_path)
212
207
  print(f" Backup: {backup_path}")
213
208
 
@@ -226,7 +221,7 @@ def run(args: argparse.Namespace) -> int:
226
221
  print(f" Written: {file_path}")
227
222
  reformatted += 1
228
223
  else:
229
- print(f" ERROR: Could not locate requirement in file")
224
+ print(" ERROR: Could not locate requirement in file")
230
225
  errors += 1
231
226
 
232
227
  except Exception as e:
@@ -236,7 +231,7 @@ def run(args: argparse.Namespace) -> int:
236
231
  # Summary
237
232
  print()
238
233
  print("=" * 60)
239
- print(f"Summary:")
234
+ print("Summary:")
240
235
  print(f" Reformatted: {reformatted}")
241
236
  print(f" Skipped: {skipped}")
242
237
  print(f" Errors: {errors}")
@@ -247,10 +242,7 @@ def run(args: argparse.Namespace) -> int:
247
242
 
248
243
 
249
244
  def replace_requirement_content(
250
- file_content: str,
251
- req_id: str,
252
- title: str,
253
- new_content: str
245
+ file_content: str, req_id: str, title: str, new_content: str
254
246
  ) -> Optional[str]:
255
247
  """
256
248
  Replace a requirement's content in a file.
@@ -271,13 +263,13 @@ def replace_requirement_content(
271
263
 
272
264
  # Pattern to match the requirement header
273
265
  # # REQ-d00027: Title
274
- header_pattern = rf'^# {re.escape(req_id)}:\s*'
266
+ header_pattern = rf"^# {re.escape(req_id)}:\s*"
275
267
 
276
268
  # Pattern to match the footer
277
269
  # *End* *Title* | **Hash**: xxxxxxxx
278
- footer_pattern = rf'^\*End\*\s+\*{re.escape(title)}\*\s+\|\s+\*\*Hash\*\*:\s*[a-fA-F0-9]+'
270
+ footer_pattern = rf"^\*End\*\s+\*{re.escape(title)}\*\s+\|\s+\*\*Hash\*\*:\s*[a-fA-F0-9]+"
279
271
 
280
- lines = file_content.split('\n')
272
+ lines = file_content.split("\n")
281
273
  result_lines = []
282
274
  in_requirement = False
283
275
  found = False
@@ -292,7 +284,7 @@ def replace_requirement_content(
292
284
  in_requirement = True
293
285
  found = True
294
286
  # Insert new content (without trailing newline, we'll add it)
295
- new_lines = new_content.rstrip('\n').split('\n')
287
+ new_lines = new_content.rstrip("\n").split("\n")
296
288
  result_lines.extend(new_lines)
297
289
  i += 1
298
290
  continue
@@ -307,7 +299,7 @@ def replace_requirement_content(
307
299
  in_requirement = False
308
300
  i += 1
309
301
  # Skip any trailing blank lines after the footer
310
- while i < len(lines) and lines[i].strip() == '':
302
+ while i < len(lines) and lines[i].strip() == "":
311
303
  i += 1
312
304
  else:
313
305
  # Skip this line (part of old requirement)
@@ -316,15 +308,15 @@ def replace_requirement_content(
316
308
  if not found:
317
309
  return None
318
310
 
319
- return '\n'.join(result_lines)
311
+ return "\n".join(result_lines)
320
312
 
321
313
 
322
314
  def run_line_breaks_only(args: argparse.Namespace) -> int:
323
315
  """Run line break normalization only."""
324
316
  from elspais.reformat import (
317
+ detect_line_break_issues,
325
318
  get_all_requirements,
326
319
  normalize_line_breaks,
327
- detect_line_break_issues,
328
320
  )
329
321
 
330
322
  dry_run = args.dry_run
@@ -358,7 +350,7 @@ def run_line_breaks_only(args: argparse.Namespace) -> int:
358
350
  unchanged = 0
359
351
  errors = 0
360
352
 
361
- for file_path_str, req_ids in sorted(files_to_process.items()):
353
+ for file_path_str, _req_ids in sorted(files_to_process.items()):
362
354
  file_path = Path(file_path_str)
363
355
 
364
356
  try:
@@ -381,7 +373,7 @@ def run_line_breaks_only(args: argparse.Namespace) -> int:
381
373
  fixed_content = normalize_line_breaks(content)
382
374
 
383
375
  if backup:
384
- backup_path = file_path.with_suffix(file_path.suffix + '.bak')
376
+ backup_path = file_path.with_suffix(file_path.suffix + ".bak")
385
377
  shutil.copy2(file_path, backup_path)
386
378
 
387
379
  file_path.write_text(fixed_content)
@@ -393,7 +385,7 @@ def run_line_breaks_only(args: argparse.Namespace) -> int:
393
385
 
394
386
  print()
395
387
  print("=" * 60)
396
- print(f"Summary:")
388
+ print("Summary:")
397
389
  print(f" Fixed: {fixed}")
398
390
  print(f" Unchanged: {unchanged}")
399
391
  print(f" Errors: {errors}")
@@ -435,10 +427,7 @@ def get_requirements_needing_reformat(config: dict, base_path: Path) -> set:
435
427
  violations = engine.validate(requirements)
436
428
 
437
429
  # Filter to acceptance_criteria violations
438
- return {
439
- v.requirement_id for v in violations
440
- if v.rule_name == "format.acceptance_criteria"
441
- }
430
+ return {v.requirement_id for v in violations if v.rule_name == "format.acceptance_criteria"}
442
431
 
443
432
 
444
433
  def is_local_file(file_path: str, base_path: Path) -> bool:
@@ -10,7 +10,7 @@ from pathlib import Path
10
10
  from typing import Optional
11
11
 
12
12
  from elspais.config.loader import find_config_file, load_config
13
- from elspais.core.content_rules import load_content_rules, load_content_rule
13
+ from elspais.core.content_rules import load_content_rule, load_content_rules
14
14
 
15
15
 
16
16
  def run(args: argparse.Namespace) -> int:
@@ -56,7 +56,11 @@ def cmd_list(args: argparse.Namespace) -> int:
56
56
  print("Content Rules:")
57
57
  print("-" * 60)
58
58
  for rule in rules:
59
- rel_path = rule.file_path.relative_to(base_path) if base_path in rule.file_path.parents else rule.file_path
59
+ rel_path = (
60
+ rule.file_path.relative_to(base_path)
61
+ if base_path in rule.file_path.parents
62
+ else rule.file_path
63
+ )
60
64
  print(f" {rel_path}")
61
65
  print(f" Title: {rule.title}")
62
66
  print(f" Type: {rule.type}")
elspais/commands/trace.py CHANGED
@@ -24,11 +24,11 @@ def run(args: argparse.Namespace) -> int:
24
24
  """
25
25
  # Check if enhanced trace-view features are requested
26
26
  use_trace_view = (
27
- getattr(args, 'view', False) or
28
- getattr(args, 'embed_content', False) or
29
- getattr(args, 'edit_mode', False) or
30
- getattr(args, 'review_mode', False) or
31
- getattr(args, 'server', False)
27
+ getattr(args, "view", False)
28
+ or getattr(args, "embed_content", False)
29
+ or getattr(args, "edit_mode", False)
30
+ or getattr(args, "review_mode", False)
31
+ or getattr(args, "server", False)
32
32
  )
33
33
 
34
34
  if use_trace_view:
@@ -119,7 +119,7 @@ def run_trace_view(args: argparse.Namespace) -> int:
119
119
  except ImportError as e:
120
120
  print("Error: trace-view features require additional dependencies.", file=sys.stderr)
121
121
  print("Install with: pip install elspais[trace-view]", file=sys.stderr)
122
- if args.verbose if hasattr(args, 'verbose') else False:
122
+ if args.verbose if hasattr(args, "verbose") else False:
123
123
  print(f"Import error: {e}", file=sys.stderr)
124
124
  return 1
125
125
 
@@ -151,8 +151,8 @@ def run_trace_view(args: argparse.Namespace) -> int:
151
151
  generator = TraceViewGenerator(
152
152
  spec_dir=spec_dir,
153
153
  impl_dirs=impl_dirs,
154
- sponsor=getattr(args, 'sponsor', None),
155
- mode=getattr(args, 'mode', 'core'),
154
+ sponsor=getattr(args, "sponsor", None),
155
+ mode=getattr(args, "mode", "core"),
156
156
  repo_root=repo_root,
157
157
  config=config,
158
158
  )
@@ -174,13 +174,13 @@ def run_trace_view(args: argparse.Namespace) -> int:
174
174
  output_file = Path("traceability_matrix.md")
175
175
 
176
176
  # Generate
177
- quiet = getattr(args, 'quiet', False)
177
+ quiet = getattr(args, "quiet", False)
178
178
  generator.generate(
179
179
  format=output_format,
180
180
  output_file=output_file,
181
- embed_content=getattr(args, 'embed_content', False),
182
- edit_mode=getattr(args, 'edit_mode', False),
183
- review_mode=getattr(args, 'review_mode', False),
181
+ embed_content=getattr(args, "embed_content", False),
182
+ edit_mode=getattr(args, "edit_mode", False),
183
+ review_mode=getattr(args, "review_mode", False),
184
184
  quiet=quiet,
185
185
  )
186
186
 
@@ -194,7 +194,7 @@ def run_review_server(args: argparse.Namespace) -> int:
194
194
  elspais[trace-review] extra.
195
195
  """
196
196
  try:
197
- from elspais.trace_view.review import create_app, FLASK_AVAILABLE
197
+ from elspais.trace_view.review import FLASK_AVAILABLE, create_app
198
198
  except ImportError:
199
199
  print("Error: Review server requires additional dependencies.", file=sys.stderr)
200
200
  print("Install with: pip install elspais[trace-review]", file=sys.stderr)
@@ -212,9 +212,10 @@ def run_review_server(args: argparse.Namespace) -> int:
212
212
  else:
213
213
  repo_root = Path.cwd()
214
214
 
215
- port = getattr(args, 'port', 8080)
215
+ port = getattr(args, "port", 8080)
216
216
 
217
- print(f"""
217
+ print(
218
+ f"""
218
219
  ======================================
219
220
  elspais Review Server
220
221
  ======================================
@@ -223,11 +224,12 @@ Repository: {repo_root}
223
224
  Server: http://localhost:{port}
224
225
 
225
226
  Press Ctrl+C to stop
226
- """)
227
+ """
228
+ )
227
229
 
228
230
  app = create_app(repo_root, auto_sync=True)
229
231
  try:
230
- app.run(host='0.0.0.0', port=port, debug=False)
232
+ app.run(host="0.0.0.0", port=port, debug=False)
231
233
  except KeyboardInterrupt:
232
234
  print("\nServer stopped.")
233
235
 
@@ -323,7 +325,7 @@ def generate_html_matrix(requirements: Dict[str, Requirement]) -> str:
323
325
  status_class = f"status-{req.status.lower()}"
324
326
  subdir_attr = f'data-subdir="{req.subdir}"'
325
327
  html += (
326
- f' <tr {subdir_attr}><td>{req_id}</td><td>{req.title}</td>'
328
+ f" <tr {subdir_attr}><td>{req_id}</td><td>{req.title}</td>"
327
329
  f'<td>{impl_str}</td><td class="{status_class}">{req.status}</td></tr>\n'
328
330
  )
329
331
 
@@ -343,7 +345,9 @@ def generate_csv_matrix(requirements: Dict[str, Requirement]) -> str:
343
345
  for req_id, req in sorted(requirements.items()):
344
346
  impl_str = ";".join(req.implements) if req.implements else ""
345
347
  title = req.title.replace('"', '""')
346
- lines.append(f'"{req_id}","{title}","{req.level}","{req.status}","{impl_str}","{req.subdir}"')
348
+ lines.append(
349
+ f'"{req_id}","{title}","{req.level}","{req.status}","{impl_str}","{req.subdir}"'
350
+ )
347
351
 
348
352
  return "\n".join(lines)
349
353
 
@@ -43,10 +43,10 @@ def run(args: argparse.Namespace) -> int:
43
43
  return 1
44
44
 
45
45
  # Add sponsor spec directories if mode is "combined" and include_associated is enabled
46
- mode = getattr(args, 'mode', 'combined')
47
- include_associated = config.get('traceability', {}).get('include_associated', True)
46
+ mode = getattr(args, "mode", "combined")
47
+ include_associated = config.get("traceability", {}).get("include_associated", True)
48
48
 
49
- if mode == 'combined' and include_associated:
49
+ if mode == "combined" and include_associated:
50
50
  base_path = find_project_root(spec_dirs)
51
51
  sponsor_dirs = get_sponsor_spec_directories(config, base_path)
52
52
  if sponsor_dirs:
@@ -103,12 +103,11 @@ def run(args: argparse.Namespace) -> int:
103
103
  # Filter skipped rules
104
104
  if args.skip_rule:
105
105
  violations = [
106
- v for v in violations
107
- if not any(skip in v.rule_name for skip in args.skip_rule)
106
+ v for v in violations if not any(skip in v.rule_name for skip in args.skip_rule)
108
107
  ]
109
108
 
110
109
  # JSON output mode - output and exit
111
- if getattr(args, 'json', False):
110
+ if getattr(args, "json", False):
112
111
  # Test mapping (if enabled)
113
112
  test_data = None
114
113
  testing_config = TestingConfig.from_dict(config.get("testing", {}))
@@ -191,9 +190,9 @@ def should_scan_tests(args: argparse.Namespace, config: TestingConfig) -> bool:
191
190
  Returns:
192
191
  True if test scanning should run
193
192
  """
194
- if getattr(args, 'no_tests', False):
193
+ if getattr(args, "no_tests", False):
195
194
  return False
196
- if getattr(args, 'tests', False):
195
+ if getattr(args, "tests", False):
197
196
  return True
198
197
  return config.enabled
199
198
 
@@ -409,8 +408,7 @@ def format_requirements_json(
409
408
  cycle_path = v.message
410
409
 
411
410
  # Build requirement data matching hht_diary format
412
- # Use the original ID (strip __conflict suffix) for output key
413
- output_key = req_id.replace("__conflict", "") if req.is_conflict else req_id
411
+ # Note: req_id includes __conflict suffix for conflicts to avoid key collision
414
412
  output[req_id] = {
415
413
  "title": req.title,
416
414
  "status": req.status,
@@ -96,7 +96,13 @@ DEFAULT_CONFIG = {
96
96
  "labels_sequential": True,
97
97
  "labels_unique": True,
98
98
  "placeholder_values": [
99
- "obsolete", "removed", "deprecated", "N/A", "n/a", "-", "reserved"
99
+ "obsolete",
100
+ "removed",
101
+ "deprecated",
102
+ "N/A",
103
+ "n/a",
104
+ "-",
105
+ "reserved",
100
106
  ],
101
107
  },
102
108
  "traceability": {
@@ -5,7 +5,6 @@ Content rules are markdown files that provide semantic validation guidance
5
5
  for requirements authoring. They can include YAML frontmatter for metadata.
6
6
  """
7
7
 
8
- import re
9
8
  from pathlib import Path
10
9
  from typing import Any, Dict, List, Tuple
11
10
 
elspais/core/git.py CHANGED
@@ -123,9 +123,7 @@ def get_modified_files(repo_root: Path) -> Tuple[Set[str], Set[str]]:
123
123
  return set(), set()
124
124
 
125
125
 
126
- def get_changed_vs_branch(
127
- repo_root: Path, base_branch: str = "main"
128
- ) -> Set[str]:
126
+ def get_changed_vs_branch(repo_root: Path, base_branch: str = "main") -> Set[str]:
129
127
  """Get set of files changed between current branch and base branch.
130
128
 
131
129
  Args:
@@ -181,9 +179,7 @@ def get_committed_req_locations(
181
179
 
182
180
  req_locations: Dict[str, str] = {}
183
181
  # Pattern matches REQ headers with optional associated prefix
184
- req_pattern = re.compile(
185
- r"^#{1,6}\s+REQ-(?:[A-Z]{2,4}-)?([pod]\d{5}):", re.MULTILINE
186
- )
182
+ req_pattern = re.compile(r"^#{1,6}\s+REQ-(?:[A-Z]{2,4}-)?([pod]\d{5}):", re.MULTILINE)
187
183
 
188
184
  try:
189
185
  # Get list of spec files in committed state
@@ -246,9 +242,7 @@ def get_current_req_locations(
246
242
  exclude_files = ["INDEX.md", "README.md", "requirements-format.md"]
247
243
 
248
244
  req_locations: Dict[str, str] = {}
249
- req_pattern = re.compile(
250
- r"^#{1,6}\s+REQ-(?:[A-Z]{2,4}-)?([pod]\d{5}):", re.MULTILINE
251
- )
245
+ req_pattern = re.compile(r"^#{1,6}\s+REQ-(?:[A-Z]{2,4}-)?([pod]\d{5}):", re.MULTILINE)
252
246
 
253
247
  spec_path = repo_root / spec_dir
254
248
  if not spec_path.exists():
@@ -266,7 +260,7 @@ def get_current_req_locations(
266
260
  req_id = match.group(1)
267
261
  req_locations[req_id] = rel_path
268
262
 
269
- except (IOError, UnicodeDecodeError):
263
+ except (OSError, UnicodeDecodeError):
270
264
  continue
271
265
 
272
266
  return req_locations
elspais/core/parser.py CHANGED
@@ -20,46 +20,39 @@ class RequirementParser:
20
20
  # Regex patterns for parsing
21
21
  # Generic pattern to find potential requirement headers
22
22
  # Actual ID validation is done by PatternValidator
23
- HEADER_PATTERN = re.compile(
24
- r"^#*\s*(?P<id>[A-Z]+-[A-Za-z0-9-]+):\s*(?P<title>.+)$"
25
- )
23
+ HEADER_PATTERN = re.compile(r"^#*\s*(?P<id>[A-Z]+-[A-Za-z0-9-]+):\s*(?P<title>.+)$")
26
24
  LEVEL_STATUS_PATTERN = re.compile(
27
25
  r"\*\*Level\*\*:\s*(?P<level>\w+)"
28
26
  r"(?:\s*\|\s*\*\*Implements\*\*:\s*(?P<implements>[^|\n]+))?"
29
27
  r"(?:\s*\|\s*\*\*Status\*\*:\s*(?P<status>\w+))?"
30
28
  )
31
- ALT_STATUS_PATTERN = re.compile(
32
- r"\*\*Status\*\*:\s*(?P<status>\w+)"
33
- )
34
- IMPLEMENTS_PATTERN = re.compile(
35
- r"\*\*Implements\*\*:\s*(?P<implements>[^|\n]+)"
36
- )
29
+ ALT_STATUS_PATTERN = re.compile(r"\*\*Status\*\*:\s*(?P<status>\w+)")
30
+ IMPLEMENTS_PATTERN = re.compile(r"\*\*Implements\*\*:\s*(?P<implements>[^|\n]+)")
37
31
  END_MARKER_PATTERN = re.compile(
38
- r"^\*End\*\s+\*[^*]+\*\s*(?:\|\s*\*\*Hash\*\*:\s*(?P<hash>[a-zA-Z0-9]+))?",
39
- re.MULTILINE
40
- )
41
- RATIONALE_PATTERN = re.compile(
42
- r"\*\*Rationale\*\*:\s*(.+?)(?=\n\n|\n\*\*|\Z)", re.DOTALL
32
+ r"^\*End\*\s+\*[^*]+\*\s*(?:\|\s*\*\*Hash\*\*:\s*(?P<hash>[a-zA-Z0-9]+))?", re.MULTILINE
43
33
  )
34
+ RATIONALE_PATTERN = re.compile(r"\*\*Rationale\*\*:\s*(.+?)(?=\n\n|\n\*\*|\Z)", re.DOTALL)
44
35
  ACCEPTANCE_PATTERN = re.compile(
45
36
  r"\*\*Acceptance Criteria\*\*:\s*\n((?:\s*-\s*.+\n?)+)", re.MULTILINE
46
37
  )
47
38
  # Assertions section header (## Assertions or **Assertions**)
48
- ASSERTIONS_HEADER_PATTERN = re.compile(
49
- r"^##\s+Assertions\s*$", re.MULTILINE
50
- )
39
+ ASSERTIONS_HEADER_PATTERN = re.compile(r"^##\s+Assertions\s*$", re.MULTILINE)
51
40
  # Individual assertion line: "A. The system SHALL..." or "01. ..." etc.
52
41
  # Captures: label (any alphanumeric), text (rest of line, may continue)
53
- ASSERTION_LINE_PATTERN = re.compile(
54
- r"^\s*([A-Z0-9]+)\.\s+(.+)$", re.MULTILINE
55
- )
42
+ ASSERTION_LINE_PATTERN = re.compile(r"^\s*([A-Z0-9]+)\.\s+(.+)$", re.MULTILINE)
56
43
 
57
44
  # Default values that mean "no references" in Implements field
58
45
  DEFAULT_NO_REFERENCE_VALUES = ["-", "null", "none", "x", "X", "N/A", "n/a"]
59
46
 
60
47
  # Default placeholder values that indicate a removed/deprecated assertion
61
48
  DEFAULT_PLACEHOLDER_VALUES = [
62
- "obsolete", "removed", "deprecated", "N/A", "n/a", "-", "reserved"
49
+ "obsolete",
50
+ "removed",
51
+ "deprecated",
52
+ "N/A",
53
+ "n/a",
54
+ "-",
55
+ "reserved",
63
56
  ]
64
57
 
65
58
  def __init__(
@@ -272,9 +265,7 @@ class RequirementParser:
272
265
  else:
273
266
  dir_path = base_path / dir_entry
274
267
  if dir_path.exists() and dir_path.is_dir():
275
- result = self.parse_directory(
276
- dir_path, patterns=patterns, skip_files=skip_files
277
- )
268
+ result = self.parse_directory(dir_path, patterns=patterns, skip_files=skip_files)
278
269
  # Merge requirements, checking for cross-directory duplicates
279
270
  for req_id, req in result.requirements.items():
280
271
  if req_id in requirements:
@@ -385,7 +376,10 @@ class RequirementParser:
385
376
 
386
377
  warning = ParseWarning(
387
378
  requirement_id=original_id,
388
- message=f"Duplicate ID found (first occurrence in {original_req.file_path}:{original_req.line_number})",
379
+ message=(
380
+ f"Duplicate ID found "
381
+ f"(first occurrence in {original_req.file_path}:{original_req.line_number})"
382
+ ),
389
383
  file_path=file_path,
390
384
  line_number=line_number,
391
385
  )
@@ -444,12 +438,14 @@ class RequirementParser:
444
438
  implements = self._parse_implements(implements_str)
445
439
  for ref in implements:
446
440
  if not self.validator.is_valid(ref):
447
- block_warnings.append(ParseWarning(
448
- requirement_id=req_id,
449
- message=f"Invalid implements reference: {ref}",
450
- file_path=file_path,
451
- line_number=line_number,
452
- ))
441
+ block_warnings.append(
442
+ ParseWarning(
443
+ requirement_id=req_id,
444
+ message=f"Invalid implements reference: {ref}",
445
+ file_path=file_path,
446
+ line_number=line_number,
447
+ )
448
+ )
453
449
 
454
450
  # Extract body (text between header and acceptance/end)
455
451
  body = self._extract_body(text)
@@ -475,12 +471,14 @@ class RequirementParser:
475
471
  assertions = self._extract_assertions(text)
476
472
  for assertion in assertions:
477
473
  if not self._is_valid_assertion_label(assertion.label):
478
- block_warnings.append(ParseWarning(
479
- requirement_id=req_id,
480
- message=f"Invalid assertion label format: {assertion.label}",
481
- file_path=file_path,
482
- line_number=line_number,
483
- ))
474
+ block_warnings.append(
475
+ ParseWarning(
476
+ requirement_id=req_id,
477
+ message=f"Invalid assertion label format: {assertion.label}",
478
+ file_path=file_path,
479
+ line_number=line_number,
480
+ )
481
+ )
484
482
 
485
483
  # Extract hash from end marker
486
484
  hash_value = None
@@ -511,17 +509,17 @@ class RequirementParser:
511
509
  Default expectation is uppercase letters A-Z.
512
510
  """
513
511
  # Check against configured assertion label pattern if available
514
- assertion_config = getattr(self.pattern_config, 'assertions', None)
512
+ assertion_config = getattr(self.pattern_config, "assertions", None)
515
513
  if assertion_config:
516
- label_style = assertion_config.get('label_style', 'uppercase')
517
- if label_style == 'uppercase':
518
- return bool(re.match(r'^[A-Z]$', label))
519
- elif label_style == 'numeric':
520
- return bool(re.match(r'^\d+$', label))
521
- elif label_style == 'alphanumeric':
522
- return bool(re.match(r'^[A-Z0-9]+$', label))
514
+ label_style = assertion_config.get("label_style", "uppercase")
515
+ if label_style == "uppercase":
516
+ return bool(re.match(r"^[A-Z]$", label))
517
+ elif label_style == "numeric":
518
+ return bool(re.match(r"^\d+$", label))
519
+ elif label_style == "alphanumeric":
520
+ return bool(re.match(r"^[A-Z0-9]+$", label))
523
521
  # Default: uppercase single letter
524
- return bool(re.match(r'^[A-Z]$', label))
522
+ return bool(re.match(r"^[A-Z]$", label))
525
523
 
526
524
  def _parse_implements(self, implements_str: str) -> List[str]:
527
525
  """Parse comma-separated implements list.
@@ -608,9 +606,9 @@ class RequirementParser:
608
606
 
609
607
  # Find the end of the assertions section (next ## header, Rationale, or End marker)
610
608
  end_patterns = [
611
- r"^##\s+", # Next section header
612
- r"^\*End\*", # End marker
613
- r"^---\s*$", # Separator line
609
+ r"^##\s+", # Next section header
610
+ r"^\*End\*", # End marker
611
+ r"^---\s*$", # Separator line
614
612
  ]
615
613
  end_pos = len(section_text)
616
614
  for pattern in end_patterns:
@@ -627,14 +625,15 @@ class RequirementParser:
627
625
 
628
626
  # Check if this is a placeholder
629
627
  is_placeholder = any(
630
- assertion_text.lower().startswith(pv.lower())
631
- for pv in self.placeholder_values
628
+ assertion_text.lower().startswith(pv.lower()) for pv in self.placeholder_values
632
629
  )
633
630
 
634
- assertions.append(Assertion(
635
- label=label,
636
- text=assertion_text,
637
- is_placeholder=is_placeholder,
638
- ))
631
+ assertions.append(
632
+ Assertion(
633
+ label=label,
634
+ text=assertion_text,
635
+ is_placeholder=is_placeholder,
636
+ )
637
+ )
639
638
 
640
639
  return assertions