elspais 0.9.1__py3-none-any.whl → 0.9.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
elspais/cli.py CHANGED
@@ -10,7 +10,7 @@ from pathlib import Path
10
10
  from typing import List, Optional
11
11
 
12
12
  from elspais import __version__
13
- from elspais.commands import analyze, config_cmd, edit, hash_cmd, index, init, rules_cmd, trace, validate
13
+ from elspais.commands import analyze, changed, config_cmd, edit, hash_cmd, index, init, rules_cmd, trace, validate
14
14
 
15
15
 
16
16
  def create_parser() -> argparse.ArgumentParser:
@@ -179,6 +179,28 @@ Examples:
179
179
  help="Implementation coverage report",
180
180
  )
181
181
 
182
+ # changed command
183
+ changed_parser = subparsers.add_parser(
184
+ "changed",
185
+ help="Detect git changes to spec files",
186
+ )
187
+ changed_parser.add_argument(
188
+ "--base-branch",
189
+ default="main",
190
+ help="Base branch for comparison (default: main)",
191
+ metavar="BRANCH",
192
+ )
193
+ changed_parser.add_argument(
194
+ "-j", "--json",
195
+ action="store_true",
196
+ help="Output as JSON",
197
+ )
198
+ changed_parser.add_argument(
199
+ "-a", "--all",
200
+ action="store_true",
201
+ help="Include all changed files (not just spec)",
202
+ )
203
+
182
204
  # version command
183
205
  version_parser = subparsers.add_parser(
184
206
  "version",
@@ -455,6 +477,8 @@ def main(argv: Optional[List[str]] = None) -> int:
455
477
  return index.run(args)
456
478
  elif args.command == "analyze":
457
479
  return analyze.run(args)
480
+ elif args.command == "changed":
481
+ return changed.run(args)
458
482
  elif args.command == "version":
459
483
  return version_command(args)
460
484
  elif args.command == "init":
@@ -0,0 +1,160 @@
1
+ """
2
+ elspais.commands.changed - Git-based change detection for requirements.
3
+
4
+ Detects changes to requirement files using git:
5
+ - Uncommitted changes (modified or new files)
6
+ - Changes vs main/master branch
7
+ - Moved requirements (comparing current location to committed state)
8
+ """
9
+
10
+ import argparse
11
+ import json
12
+ import sys
13
+ from pathlib import Path
14
+ from typing import Dict, List, Optional
15
+
16
+ from elspais.config.defaults import DEFAULT_CONFIG
17
+ from elspais.config.loader import find_config_file, load_config
18
+ from elspais.core.git import (
19
+ GitChangeInfo,
20
+ MovedRequirement,
21
+ detect_moved_requirements,
22
+ filter_spec_files,
23
+ get_current_req_locations,
24
+ get_git_changes,
25
+ get_repo_root,
26
+ )
27
+
28
+
29
+ def load_configuration(args: argparse.Namespace) -> Optional[Dict]:
30
+ """Load configuration from file or use defaults."""
31
+ config_path = getattr(args, "config", None)
32
+ if config_path:
33
+ pass # Use provided path
34
+ else:
35
+ config_path = find_config_file(Path.cwd())
36
+
37
+ if config_path and config_path.exists():
38
+ try:
39
+ return load_config(config_path)
40
+ except Exception as e:
41
+ print(f"Error loading config: {e}", file=sys.stderr)
42
+ return None
43
+ else:
44
+ return DEFAULT_CONFIG
45
+
46
+
47
+ def run(args: argparse.Namespace) -> int:
48
+ """Run the changed command."""
49
+ # Get repository root
50
+ repo_root = get_repo_root()
51
+ if repo_root is None:
52
+ print("Error: Not in a git repository")
53
+ return 1
54
+
55
+ # Load config to get spec directory
56
+ config = load_configuration(args)
57
+ if config is None:
58
+ return 1
59
+
60
+ spec_dir = config.get("directories", {}).get("spec", "spec")
61
+ if isinstance(spec_dir, list):
62
+ spec_dir = spec_dir[0] if spec_dir else "spec"
63
+
64
+ base_branch = getattr(args, "base_branch", None) or "main"
65
+ json_output = getattr(args, "json", False)
66
+ show_all = getattr(args, "all", False)
67
+ quiet = getattr(args, "quiet", False)
68
+
69
+ # Get git change information
70
+ changes = get_git_changes(repo_root, spec_dir, base_branch)
71
+
72
+ # Filter to spec files only
73
+ spec_modified = filter_spec_files(changes.modified_files, spec_dir)
74
+ spec_untracked = filter_spec_files(changes.untracked_files, spec_dir)
75
+ spec_branch = filter_spec_files(changes.branch_changed_files, spec_dir)
76
+
77
+ # Detect moved requirements
78
+ current_locations = get_current_req_locations(repo_root, spec_dir)
79
+ moved = detect_moved_requirements(
80
+ changes.committed_req_locations, current_locations
81
+ )
82
+
83
+ # Build result
84
+ result = {
85
+ "repo_root": str(repo_root),
86
+ "spec_dir": spec_dir,
87
+ "base_branch": base_branch,
88
+ "uncommitted": {
89
+ "modified": sorted(spec_modified),
90
+ "untracked": sorted(spec_untracked),
91
+ "count": len(spec_modified) + len(spec_untracked),
92
+ },
93
+ "branch_changed": {
94
+ "files": sorted(spec_branch),
95
+ "count": len(spec_branch),
96
+ },
97
+ "moved_requirements": [
98
+ {
99
+ "req_id": m.req_id,
100
+ "old_path": m.old_path,
101
+ "new_path": m.new_path,
102
+ }
103
+ for m in moved
104
+ ],
105
+ }
106
+
107
+ # Include all files if --all flag is set
108
+ if show_all:
109
+ result["all_modified"] = sorted(changes.modified_files)
110
+ result["all_untracked"] = sorted(changes.untracked_files)
111
+ result["all_branch_changed"] = sorted(changes.branch_changed_files)
112
+
113
+ if json_output:
114
+ print(json.dumps(result, indent=2))
115
+ return 0
116
+
117
+ # Human-readable output
118
+ has_changes = False
119
+
120
+ if spec_modified or spec_untracked:
121
+ has_changes = True
122
+ if not quiet:
123
+ uncommitted_count = len(spec_modified) + len(spec_untracked)
124
+ print(f"Uncommitted spec changes: {uncommitted_count}")
125
+
126
+ if spec_modified:
127
+ print(f" Modified ({len(spec_modified)}):")
128
+ for f in sorted(spec_modified):
129
+ print(f" M {f}")
130
+
131
+ if spec_untracked:
132
+ print(f" New ({len(spec_untracked)}):")
133
+ for f in sorted(spec_untracked):
134
+ print(f" + {f}")
135
+ print()
136
+
137
+ if spec_branch:
138
+ has_changes = True
139
+ if not quiet:
140
+ print(f"Changed vs {base_branch}: {len(spec_branch)}")
141
+ for f in sorted(spec_branch):
142
+ print(f" {f}")
143
+ print()
144
+
145
+ if moved:
146
+ has_changes = True
147
+ if not quiet:
148
+ print(f"Moved requirements: {len(moved)}")
149
+ for m in moved:
150
+ print(f" REQ-{m.req_id}:")
151
+ print(f" from: {m.old_path}")
152
+ print(f" to: {m.new_path}")
153
+ print()
154
+
155
+ if not has_changes:
156
+ if not quiet:
157
+ print("No uncommitted changes to spec files")
158
+ return 0
159
+
160
+ return 0
@@ -13,7 +13,7 @@ from typing import Any, Dict, List, Optional
13
13
  from elspais.config.defaults import DEFAULT_CONFIG
14
14
  from elspais.config.loader import find_config_file, get_spec_directories, load_config
15
15
  from elspais.core.hasher import calculate_hash, verify_hash
16
- from elspais.core.models import Requirement
16
+ from elspais.core.models import ParseWarning, Requirement
17
17
  from elspais.core.parser import RequirementParser
18
18
  from elspais.core.patterns import PatternConfig
19
19
  from elspais.core.rules import RuleEngine, RulesConfig, RuleViolation, Severity
@@ -55,7 +55,8 @@ def run(args: argparse.Namespace) -> int:
55
55
  skip_files = spec_config.get("skip_files", [])
56
56
 
57
57
  try:
58
- requirements = parser.parse_directories(spec_dirs, skip_files=skip_files)
58
+ parse_result = parser.parse_directories(spec_dirs, skip_files=skip_files)
59
+ requirements = dict(parse_result) # ParseResult supports dict-like access
59
60
  except Exception as e:
60
61
  print(f"Error parsing requirements: {e}", file=sys.stderr)
61
62
  return 1
@@ -81,6 +82,10 @@ def run(args: argparse.Namespace) -> int:
81
82
  link_violations = validate_links(requirements, args, config)
82
83
  violations.extend(link_violations)
83
84
 
85
+ # Add parser warnings (duplicates, etc.) as violations
86
+ parse_violations = convert_parse_warnings_to_violations(parse_result.warnings)
87
+ violations.extend(parse_violations)
88
+
84
89
  # Filter skipped rules
85
90
  if args.skip_rule:
86
91
  violations = [
@@ -276,6 +281,36 @@ def validate_links(
276
281
  return violations
277
282
 
278
283
 
284
+ def convert_parse_warnings_to_violations(
285
+ warnings: List[ParseWarning],
286
+ ) -> List[RuleViolation]:
287
+ """Convert parser warnings (like duplicates) to rule violations.
288
+
289
+ The parser detects duplicate REQ IDs and generates ParseWarning objects.
290
+ This function converts them to RuleViolation objects so they appear in
291
+ validation output.
292
+
293
+ Args:
294
+ warnings: List of ParseWarning objects from parser
295
+
296
+ Returns:
297
+ List of RuleViolation objects for duplicate IDs
298
+ """
299
+ violations = []
300
+ for warning in warnings:
301
+ if "duplicate" in warning.message.lower():
302
+ violations.append(
303
+ RuleViolation(
304
+ rule_name="id.duplicate",
305
+ requirement_id=warning.requirement_id,
306
+ message=warning.message,
307
+ severity=Severity.ERROR,
308
+ location=f"{warning.file_path}:{warning.line_number}",
309
+ )
310
+ )
311
+ return violations
312
+
313
+
279
314
  def load_core_requirements(core_path: Path, config: Dict) -> Dict[str, Requirement]:
280
315
  """Load requirements from core repository."""
281
316
  if not core_path.exists():
elspais/core/git.py ADDED
@@ -0,0 +1,352 @@
1
+ """
2
+ Git state management for elspais.
3
+
4
+ Provides functions to query git status and detect changes to requirement files,
5
+ enabling detection of:
6
+ - Uncommitted changes to spec files
7
+ - New (untracked) requirement files
8
+ - Files changed vs main/master branch
9
+ - Moved requirements (comparing current location to committed state)
10
+ """
11
+
12
+ import re
13
+ import subprocess
14
+ from dataclasses import dataclass, field
15
+ from pathlib import Path
16
+ from typing import Dict, List, Optional, Set, Tuple
17
+
18
+
19
+ @dataclass
20
+ class GitChangeInfo:
21
+ """Information about git changes to requirement files."""
22
+
23
+ modified_files: Set[str] = field(default_factory=set)
24
+ """Files with uncommitted modifications (staged or unstaged)."""
25
+
26
+ untracked_files: Set[str] = field(default_factory=set)
27
+ """New files not yet tracked by git."""
28
+
29
+ branch_changed_files: Set[str] = field(default_factory=set)
30
+ """Files changed between current branch and main/master."""
31
+
32
+ committed_req_locations: Dict[str, str] = field(default_factory=dict)
33
+ """REQ ID -> file path mapping from committed state (HEAD)."""
34
+
35
+ @property
36
+ def all_changed_files(self) -> Set[str]:
37
+ """Get all files with any kind of change."""
38
+ return self.modified_files | self.untracked_files | self.branch_changed_files
39
+
40
+ @property
41
+ def uncommitted_files(self) -> Set[str]:
42
+ """Get all files with uncommitted changes (modified or untracked)."""
43
+ return self.modified_files | self.untracked_files
44
+
45
+
46
+ @dataclass
47
+ class MovedRequirement:
48
+ """Information about a requirement that was moved between files."""
49
+
50
+ req_id: str
51
+ """The requirement ID (e.g., 'd00001')."""
52
+
53
+ old_path: str
54
+ """Path in the committed state."""
55
+
56
+ new_path: str
57
+ """Path in the current working directory."""
58
+
59
+
60
+ def get_repo_root(start_path: Optional[Path] = None) -> Optional[Path]:
61
+ """Find the git repository root.
62
+
63
+ Args:
64
+ start_path: Path to start searching from (default: current directory)
65
+
66
+ Returns:
67
+ Path to repository root, or None if not in a git repository
68
+ """
69
+ try:
70
+ result = subprocess.run(
71
+ ["git", "rev-parse", "--show-toplevel"],
72
+ cwd=start_path or Path.cwd(),
73
+ capture_output=True,
74
+ text=True,
75
+ check=True,
76
+ )
77
+ return Path(result.stdout.strip())
78
+ except (subprocess.CalledProcessError, FileNotFoundError):
79
+ return None
80
+
81
+
82
+ def get_modified_files(repo_root: Path) -> Tuple[Set[str], Set[str]]:
83
+ """Get sets of modified and untracked files according to git status.
84
+
85
+ Args:
86
+ repo_root: Path to repository root
87
+
88
+ Returns:
89
+ Tuple of (modified_files, untracked_files):
90
+ - modified_files: Tracked files with changes (M, A, R, etc.)
91
+ - untracked_files: New files not yet tracked (??)
92
+ """
93
+ try:
94
+ result = subprocess.run(
95
+ ["git", "status", "--porcelain", "--untracked-files=all"],
96
+ cwd=repo_root,
97
+ capture_output=True,
98
+ text=True,
99
+ check=True,
100
+ )
101
+ modified_files: Set[str] = set()
102
+ untracked_files: Set[str] = set()
103
+
104
+ for line in result.stdout.split("\n"):
105
+ if line and len(line) >= 3:
106
+ # Format: "XY filename" or "XY orig -> renamed"
107
+ # XY = two-letter status (e.g., " M", "??", "A ", "R ")
108
+ status_code = line[:2]
109
+ file_path = line[3:].strip()
110
+
111
+ # Handle renames: "orig -> new"
112
+ if " -> " in file_path:
113
+ file_path = file_path.split(" -> ")[1]
114
+
115
+ if file_path:
116
+ if status_code == "??":
117
+ untracked_files.add(file_path)
118
+ else:
119
+ modified_files.add(file_path)
120
+
121
+ return modified_files, untracked_files
122
+ except (subprocess.CalledProcessError, FileNotFoundError):
123
+ return set(), set()
124
+
125
+
126
+ def get_changed_vs_branch(
127
+ repo_root: Path, base_branch: str = "main"
128
+ ) -> Set[str]:
129
+ """Get set of files changed between current branch and base branch.
130
+
131
+ Args:
132
+ repo_root: Path to repository root
133
+ base_branch: Name of base branch (default: 'main')
134
+
135
+ Returns:
136
+ Set of file paths changed vs base branch
137
+ """
138
+ # Try local branch first, then remote
139
+ for branch_ref in [base_branch, f"origin/{base_branch}"]:
140
+ try:
141
+ result = subprocess.run(
142
+ ["git", "diff", "--name-only", f"{branch_ref}...HEAD"],
143
+ cwd=repo_root,
144
+ capture_output=True,
145
+ text=True,
146
+ check=True,
147
+ )
148
+ changed_files: Set[str] = set()
149
+ for line in result.stdout.split("\n"):
150
+ if line.strip():
151
+ changed_files.add(line.strip())
152
+ return changed_files
153
+ except subprocess.CalledProcessError:
154
+ continue
155
+ except FileNotFoundError:
156
+ return set()
157
+
158
+ return set()
159
+
160
+
161
+ def get_committed_req_locations(
162
+ repo_root: Path,
163
+ spec_dir: str = "spec",
164
+ exclude_files: Optional[List[str]] = None,
165
+ ) -> Dict[str, str]:
166
+ """Get REQ ID -> file path mapping from committed state (HEAD).
167
+
168
+ This allows detection of moved requirements by comparing current location
169
+ to where the REQ was in the last commit.
170
+
171
+ Args:
172
+ repo_root: Path to repository root
173
+ spec_dir: Spec directory relative to repo root
174
+ exclude_files: Files to exclude (default: INDEX.md, README.md)
175
+
176
+ Returns:
177
+ Dict mapping REQ ID (e.g., 'd00001') to relative file path
178
+ """
179
+ if exclude_files is None:
180
+ exclude_files = ["INDEX.md", "README.md", "requirements-format.md"]
181
+
182
+ req_locations: Dict[str, str] = {}
183
+ # Pattern matches REQ headers with optional associated prefix
184
+ req_pattern = re.compile(
185
+ r"^#{1,6}\s+REQ-(?:[A-Z]{2,4}-)?([pod]\d{5}):", re.MULTILINE
186
+ )
187
+
188
+ try:
189
+ # Get list of spec files in committed state
190
+ result = subprocess.run(
191
+ ["git", "ls-tree", "-r", "--name-only", "HEAD", f"{spec_dir}/"],
192
+ cwd=repo_root,
193
+ capture_output=True,
194
+ text=True,
195
+ check=True,
196
+ )
197
+
198
+ for file_path in result.stdout.strip().split("\n"):
199
+ if not file_path.endswith(".md"):
200
+ continue
201
+ if any(skip in file_path for skip in exclude_files):
202
+ continue
203
+
204
+ # Get file content from committed state
205
+ try:
206
+ content_result = subprocess.run(
207
+ ["git", "show", f"HEAD:{file_path}"],
208
+ cwd=repo_root,
209
+ capture_output=True,
210
+ text=True,
211
+ check=True,
212
+ )
213
+ content = content_result.stdout
214
+
215
+ # Find all REQ IDs in this file
216
+ for match in req_pattern.finditer(content):
217
+ req_id = match.group(1)
218
+ req_locations[req_id] = file_path
219
+
220
+ except subprocess.CalledProcessError:
221
+ # File might not exist in HEAD (new file)
222
+ continue
223
+
224
+ except (subprocess.CalledProcessError, FileNotFoundError):
225
+ pass
226
+
227
+ return req_locations
228
+
229
+
230
+ def get_current_req_locations(
231
+ repo_root: Path,
232
+ spec_dir: str = "spec",
233
+ exclude_files: Optional[List[str]] = None,
234
+ ) -> Dict[str, str]:
235
+ """Get REQ ID -> file path mapping from current working directory.
236
+
237
+ Args:
238
+ repo_root: Path to repository root
239
+ spec_dir: Spec directory relative to repo root
240
+ exclude_files: Files to exclude (default: INDEX.md, README.md)
241
+
242
+ Returns:
243
+ Dict mapping REQ ID (e.g., 'd00001') to relative file path
244
+ """
245
+ if exclude_files is None:
246
+ exclude_files = ["INDEX.md", "README.md", "requirements-format.md"]
247
+
248
+ req_locations: Dict[str, str] = {}
249
+ req_pattern = re.compile(
250
+ r"^#{1,6}\s+REQ-(?:[A-Z]{2,4}-)?([pod]\d{5}):", re.MULTILINE
251
+ )
252
+
253
+ spec_path = repo_root / spec_dir
254
+ if not spec_path.exists():
255
+ return req_locations
256
+
257
+ for md_file in spec_path.rglob("*.md"):
258
+ if any(skip in md_file.name for skip in exclude_files):
259
+ continue
260
+
261
+ try:
262
+ content = md_file.read_text(encoding="utf-8")
263
+ rel_path = str(md_file.relative_to(repo_root))
264
+
265
+ for match in req_pattern.finditer(content):
266
+ req_id = match.group(1)
267
+ req_locations[req_id] = rel_path
268
+
269
+ except (IOError, UnicodeDecodeError):
270
+ continue
271
+
272
+ return req_locations
273
+
274
+
275
+ def detect_moved_requirements(
276
+ committed_locations: Dict[str, str],
277
+ current_locations: Dict[str, str],
278
+ ) -> List[MovedRequirement]:
279
+ """Detect requirements that have been moved between files.
280
+
281
+ Args:
282
+ committed_locations: REQ ID -> path mapping from committed state
283
+ current_locations: REQ ID -> path mapping from current state
284
+
285
+ Returns:
286
+ List of MovedRequirement objects for requirements whose location changed
287
+ """
288
+ moved = []
289
+ for req_id, old_path in committed_locations.items():
290
+ if req_id in current_locations:
291
+ new_path = current_locations[req_id]
292
+ if old_path != new_path:
293
+ moved.append(
294
+ MovedRequirement(
295
+ req_id=req_id,
296
+ old_path=old_path,
297
+ new_path=new_path,
298
+ )
299
+ )
300
+ return moved
301
+
302
+
303
+ def get_git_changes(
304
+ repo_root: Optional[Path] = None,
305
+ spec_dir: str = "spec",
306
+ base_branch: str = "main",
307
+ ) -> GitChangeInfo:
308
+ """Get comprehensive git change information for requirement files.
309
+
310
+ This is the main entry point for git change detection. It gathers:
311
+ - Modified files (uncommitted changes to tracked files)
312
+ - Untracked files (new files not yet in git)
313
+ - Branch changed files (files changed vs main/master)
314
+ - Committed REQ locations (for move detection)
315
+
316
+ Args:
317
+ repo_root: Path to repository root (auto-detected if None)
318
+ spec_dir: Spec directory relative to repo root
319
+ base_branch: Base branch for comparison (default: 'main')
320
+
321
+ Returns:
322
+ GitChangeInfo with all change information
323
+ """
324
+ if repo_root is None:
325
+ repo_root = get_repo_root()
326
+ if repo_root is None:
327
+ return GitChangeInfo()
328
+
329
+ modified, untracked = get_modified_files(repo_root)
330
+ branch_changed = get_changed_vs_branch(repo_root, base_branch)
331
+ committed_locations = get_committed_req_locations(repo_root, spec_dir)
332
+
333
+ return GitChangeInfo(
334
+ modified_files=modified,
335
+ untracked_files=untracked,
336
+ branch_changed_files=branch_changed,
337
+ committed_req_locations=committed_locations,
338
+ )
339
+
340
+
341
+ def filter_spec_files(files: Set[str], spec_dir: str = "spec") -> Set[str]:
342
+ """Filter a set of files to only include spec directory files.
343
+
344
+ Args:
345
+ files: Set of file paths
346
+ spec_dir: Spec directory prefix
347
+
348
+ Returns:
349
+ Set of files that are in the spec directory
350
+ """
351
+ prefix = f"{spec_dir}/"
352
+ return {f for f in files if f.startswith(prefix) and f.endswith(".md")}
@@ -1,15 +1,14 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.4
2
2
  Name: elspais
3
- Version: 0.9.1
3
+ Version: 0.9.3
4
4
  Summary: Requirements validation and traceability tools - L-Space connects all libraries
5
- Home-page: https://github.com/anspar/elspais
6
- Author: Anspar
7
- Author-email: dev@anspar.io
8
- License: MIT
5
+ Project-URL: Homepage, https://github.com/anspar/elspais
9
6
  Project-URL: Documentation, https://github.com/anspar/elspais#readme
10
7
  Project-URL: Repository, https://github.com/anspar/elspais
11
8
  Project-URL: Issues, https://github.com/anspar/elspais/issues
12
9
  Project-URL: Changelog, https://github.com/anspar/elspais/blob/main/CHANGELOG.md
10
+ Author-email: Anspar <dev@anspar.io>
11
+ License-File: LICENSE
13
12
  Keywords: documentation,requirements,specifications,traceability,validation
14
13
  Classifier: Development Status :: 4 - Beta
15
14
  Classifier: Environment :: Console
@@ -57,18 +56,50 @@ Description-Content-Type: text/markdown
57
56
 
58
57
  ## Installation
59
58
 
59
+ ### For End Users
60
+
60
61
  ```bash
62
+ # Standard installation
61
63
  pip install elspais
64
+
65
+ # Recommended for CLI tools: Isolated installation
66
+ pipx install elspais
62
67
  ```
63
68
 
64
- Or install from source:
69
+ ### For Development
65
70
 
66
71
  ```bash
67
72
  git clone https://github.com/anspar/elspais.git
68
73
  cd elspais
69
- pip install -e .
74
+ pip install -e ".[dev]"
75
+ ```
76
+
77
+ ### For Docker and CI/CD
78
+
79
+ For faster installation in containerized environments, consider [uv](https://github.com/astral-sh/uv):
80
+
81
+ ```dockerfile
82
+ # Example Dockerfile
83
+ FROM python:3.11-slim
84
+
85
+ # Copy uv binary
86
+ COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv
87
+
88
+ # Install elspais (10-100x faster than pip)
89
+ RUN uv pip install --system --no-cache elspais==0.9.1
70
90
  ```
71
91
 
92
+ ```yaml
93
+ # Example GitHub Actions
94
+ - name: Install uv
95
+ uses: astral-sh/setup-uv@v2
96
+
97
+ - name: Install elspais
98
+ run: uv pip install --system elspais==0.9.1
99
+ ```
100
+
101
+ **Note:** For regulated/medical software projects, always pin the exact version for reproducibility.
102
+
72
103
  ## Quick Start
73
104
 
74
105
  ### Initialize a Repository
@@ -185,22 +216,28 @@ See [docs/configuration.md](docs/configuration.md) for full reference.
185
216
  elspais expects requirements in Markdown format:
186
217
 
187
218
  ```markdown
188
- ### REQ-d00001: Requirement Title
219
+ # REQ-d00001: Requirement Title
189
220
 
190
- **Level**: Dev | **Implements**: p00001 | **Status**: Active
221
+ **Level**: Dev | **Status**: Active | **Implements**: REQ-p00001
191
222
 
192
- The system SHALL provide user authentication.
223
+ ## Assertions
193
224
 
194
- **Rationale**: Security requires identity verification.
225
+ A. The system SHALL provide user authentication via email/password.
226
+ B. Sessions SHALL expire after 30 minutes of inactivity.
195
227
 
196
- **Acceptance Criteria**:
197
- - Users can log in with email/password
198
- - Session expires after 30 minutes of inactivity
228
+ ## Rationale
229
+
230
+ Security requires identity verification.
199
231
 
200
232
  *End* *Requirement Title* | **Hash**: a1b2c3d4
201
233
  ---
202
234
  ```
203
235
 
236
+ Key format elements:
237
+ - **Assertions section**: Labeled A-Z, each using SHALL for normative statements
238
+ - **One-way traceability**: Children reference parents via `Implements:`
239
+ - **Hash footer**: SHA-256 hash for change detection
240
+
204
241
  ## ID Pattern Examples
205
242
 
206
243
  elspais supports multiple ID formats:
@@ -1,8 +1,9 @@
1
1
  elspais/__init__.py,sha256=grQHU0RmRQChHUEb_cH_OuGmxADj5fDSNNayhXEQco4,1014
2
2
  elspais/__main__.py,sha256=rCMaObqJeT_6dhyfND7S4dh_lv30j7Ww3Z7992YYwaE,130
3
- elspais/cli.py,sha256=yJ8Bs_LfDA9hFvlowYgKOQGx9EoM5-9M2bAphYhrSHc,14098
3
+ elspais/cli.py,sha256=krVJcjCVXAuxniRWFPIIjBd7znQSD6egrJIN2YWuHYQ,14770
4
4
  elspais/commands/__init__.py,sha256=jS7ry2ez7xri-fUlYUw9fGKJi5yTHtVN4PU8voHjgLI,155
5
5
  elspais/commands/analyze.py,sha256=5hE6YRL8AoAz2ukkR1rj6aiD2TNJi63UHYVJXmKoOUs,7135
6
+ elspais/commands/changed.py,sha256=H0v95I6eXqcYXfqIUHKb3rP_2jMiH52bkXMe0TXM6y8,4872
6
7
  elspais/commands/config_cmd.py,sha256=KnH7vISH4041rqQrFftgffYLIujmIllZ_NCl5hIvCnE,13943
7
8
  elspais/commands/edit.py,sha256=Ey42VCAsV2G1WzCZ3qy1IAoG88zeba3O0iZjiodJZZA,16054
8
9
  elspais/commands/hash_cmd.py,sha256=pPKvhS99Nb-AEO5JrLUJQNQbXkfCJ0vVQE_L8PZW5NQ,5686
@@ -10,12 +11,13 @@ elspais/commands/index.py,sha256=_jPGt_LnGI6gGJLonzOoM978YpTj14GWDCCemtSIah4,528
10
11
  elspais/commands/init.py,sha256=y2vxR5pvd0gmONJYjX3GtGbMfmJU11k_zRKWCm7O6Qo,3870
11
12
  elspais/commands/rules_cmd.py,sha256=b2d0l-rn-WTd6ULJWsjAebBTcKv_yndO7pVNc29QEoo,3415
12
13
  elspais/commands/trace.py,sha256=xXpXD_ZDsr2B0YA342V4RJpV0zeRrq_l-gxwvO8NKHo,7651
13
- elspais/commands/validate.py,sha256=JItxCp9LjfJ-jD11C08NcnF37JFcfNacIYJCP3SV-2M,13083
14
+ elspais/commands/validate.py,sha256=dIjDUU3zKfgnVB9rFggGr6_W9i1X6NVqRhfWoTkP7f0,14346
14
15
  elspais/config/__init__.py,sha256=NkQEonHWWiXXCDrfehOCrsEYSuhj75KYj8oBQ7FuS4c,292
15
16
  elspais/config/defaults.py,sha256=6GddebKwrDAiTp0tGzkNSEuGM_HeA846o8CQPYS4yz8,5177
16
17
  elspais/config/loader.py,sha256=MnG_j2W1pOSRTTUHAOvu5xcJUAzDFNPvRjloFXpM15w,13827
17
18
  elspais/core/__init__.py,sha256=OTQ1TOf7iLN4czmlkrV5tiaxpAat0VcrWpxYbfonAys,576
18
19
  elspais/core/content_rules.py,sha256=4UoA_SUJK5R2MG9NE0Z8sNzMVPq3beEJGKEtX_NAeTk,4515
20
+ elspais/core/git.py,sha256=oM0jQUuqdMYc95j2Q1xqEH8a3X33u0Xe57TsTDUrhH4,11213
19
21
  elspais/core/hasher.py,sha256=X3Ry_M6zgs2pZPZ3UFItlvM0mRW6aPS0SKPhCYeaFUE,4259
20
22
  elspais/core/models.py,sha256=G08Yg9_69SSrdfGCgZJmya6DESiCijS2Y228q32m0l8,9558
21
23
  elspais/core/parser.py,sha256=LlureFW9dbpf26VsSF8yP7IvHcZC4zaJ_YPWgqx9sdc,22140
@@ -31,8 +33,8 @@ elspais/testing/config.py,sha256=PQkrnuk_p8pObc-Nio3s3mIKLO4AfB-2eiwXruXTw0Y,162
31
33
  elspais/testing/mapper.py,sha256=EGund_KgeYbAnWRRcnuVs0xCzlf7EvJ-qiluSmvRIso,5649
32
34
  elspais/testing/result_parser.py,sha256=u-TI5oytCqxz769AyU7WF9mOOULSzZAArvBg6JXwG1Q,9280
33
35
  elspais/testing/scanner.py,sha256=PEn0qJCE2eX2SCru3Cc-v18mf81q7BNkPYE-MZ3m8CM,7037
34
- elspais-0.9.1.dist-info/METADATA,sha256=3AtcIz9g0uMkL_txPwM638E5NBPSWA-jujYGwNahIAU,9682
35
- elspais-0.9.1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
36
- elspais-0.9.1.dist-info/entry_points.txt,sha256=yWZZEfn2fBSKSzGoS-fMQ9YoTkyeu6-i7Oht6NsdKpk,45
37
- elspais-0.9.1.dist-info/licenses/LICENSE,sha256=x_dNMsy_askp2MmKXZFL2bKW_tDiJHcRTyAg0TY1RMI,1063
38
- elspais-0.9.1.dist-info/RECORD,,
36
+ elspais-0.9.3.dist-info/METADATA,sha256=Le09mH3JQg_AZEPTcI5fmmIGpawHRp6IrZOOfAvffno,10642
37
+ elspais-0.9.3.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
38
+ elspais-0.9.3.dist-info/entry_points.txt,sha256=yWZZEfn2fBSKSzGoS-fMQ9YoTkyeu6-i7Oht6NsdKpk,45
39
+ elspais-0.9.3.dist-info/licenses/LICENSE,sha256=x_dNMsy_askp2MmKXZFL2bKW_tDiJHcRTyAg0TY1RMI,1063
40
+ elspais-0.9.3.dist-info/RECORD,,