elspais 0.11.2__py3-none-any.whl → 0.43.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- elspais/__init__.py +1 -10
- elspais/{sponsors/__init__.py → associates.py} +102 -56
- elspais/cli.py +366 -69
- elspais/commands/__init__.py +9 -3
- elspais/commands/analyze.py +118 -169
- elspais/commands/changed.py +12 -23
- elspais/commands/config_cmd.py +10 -13
- elspais/commands/edit.py +33 -13
- elspais/commands/example_cmd.py +319 -0
- elspais/commands/hash_cmd.py +161 -183
- elspais/commands/health.py +1177 -0
- elspais/commands/index.py +98 -115
- elspais/commands/init.py +99 -22
- elspais/commands/reformat_cmd.py +41 -433
- elspais/commands/rules_cmd.py +2 -2
- elspais/commands/trace.py +443 -324
- elspais/commands/validate.py +193 -411
- elspais/config/__init__.py +799 -5
- elspais/{core/content_rules.py → content_rules.py} +20 -2
- elspais/docs/cli/assertions.md +67 -0
- elspais/docs/cli/commands.md +304 -0
- elspais/docs/cli/config.md +262 -0
- elspais/docs/cli/format.md +66 -0
- elspais/docs/cli/git.md +45 -0
- elspais/docs/cli/health.md +190 -0
- elspais/docs/cli/hierarchy.md +60 -0
- elspais/docs/cli/ignore.md +72 -0
- elspais/docs/cli/mcp.md +245 -0
- elspais/docs/cli/quickstart.md +58 -0
- elspais/docs/cli/traceability.md +89 -0
- elspais/docs/cli/validation.md +96 -0
- elspais/graph/GraphNode.py +383 -0
- elspais/graph/__init__.py +40 -0
- elspais/graph/annotators.py +927 -0
- elspais/graph/builder.py +1886 -0
- elspais/graph/deserializer.py +248 -0
- elspais/graph/factory.py +284 -0
- elspais/graph/metrics.py +127 -0
- elspais/graph/mutations.py +161 -0
- elspais/graph/parsers/__init__.py +156 -0
- elspais/graph/parsers/code.py +213 -0
- elspais/graph/parsers/comments.py +112 -0
- elspais/graph/parsers/config_helpers.py +29 -0
- elspais/graph/parsers/heredocs.py +225 -0
- elspais/graph/parsers/journey.py +131 -0
- elspais/graph/parsers/remainder.py +79 -0
- elspais/graph/parsers/requirement.py +347 -0
- elspais/graph/parsers/results/__init__.py +6 -0
- elspais/graph/parsers/results/junit_xml.py +229 -0
- elspais/graph/parsers/results/pytest_json.py +313 -0
- elspais/graph/parsers/test.py +305 -0
- elspais/graph/relations.py +78 -0
- elspais/graph/serialize.py +216 -0
- elspais/html/__init__.py +8 -0
- elspais/html/generator.py +731 -0
- elspais/html/templates/trace_view.html.j2 +2151 -0
- elspais/mcp/__init__.py +45 -29
- elspais/mcp/__main__.py +5 -1
- elspais/mcp/file_mutations.py +138 -0
- elspais/mcp/server.py +1998 -244
- elspais/testing/__init__.py +3 -3
- elspais/testing/config.py +3 -0
- elspais/testing/mapper.py +1 -1
- elspais/testing/scanner.py +301 -12
- elspais/utilities/__init__.py +1 -0
- elspais/utilities/docs_loader.py +115 -0
- elspais/utilities/git.py +607 -0
- elspais/{core → utilities}/hasher.py +8 -22
- elspais/utilities/md_renderer.py +189 -0
- elspais/{core → utilities}/patterns.py +56 -51
- elspais/utilities/reference_config.py +626 -0
- elspais/validation/__init__.py +19 -0
- elspais/validation/format.py +264 -0
- {elspais-0.11.2.dist-info → elspais-0.43.5.dist-info}/METADATA +7 -4
- elspais-0.43.5.dist-info/RECORD +80 -0
- elspais/config/defaults.py +0 -179
- elspais/config/loader.py +0 -494
- elspais/core/__init__.py +0 -21
- elspais/core/git.py +0 -346
- elspais/core/models.py +0 -320
- elspais/core/parser.py +0 -639
- elspais/core/rules.py +0 -509
- elspais/mcp/context.py +0 -172
- elspais/mcp/serializers.py +0 -112
- elspais/reformat/__init__.py +0 -50
- elspais/reformat/detector.py +0 -112
- elspais/reformat/hierarchy.py +0 -247
- elspais/reformat/line_breaks.py +0 -218
- elspais/reformat/prompts.py +0 -133
- elspais/reformat/transformer.py +0 -266
- elspais/trace_view/__init__.py +0 -55
- elspais/trace_view/coverage.py +0 -183
- elspais/trace_view/generators/__init__.py +0 -12
- elspais/trace_view/generators/base.py +0 -334
- elspais/trace_view/generators/csv.py +0 -118
- elspais/trace_view/generators/markdown.py +0 -170
- elspais/trace_view/html/__init__.py +0 -33
- elspais/trace_view/html/generator.py +0 -1140
- elspais/trace_view/html/templates/base.html +0 -283
- elspais/trace_view/html/templates/components/code_viewer_modal.html +0 -14
- elspais/trace_view/html/templates/components/file_picker_modal.html +0 -20
- elspais/trace_view/html/templates/components/legend_modal.html +0 -69
- elspais/trace_view/html/templates/components/review_panel.html +0 -118
- elspais/trace_view/html/templates/partials/review/help/help-panel.json +0 -244
- elspais/trace_view/html/templates/partials/review/help/onboarding.json +0 -77
- elspais/trace_view/html/templates/partials/review/help/tooltips.json +0 -237
- elspais/trace_view/html/templates/partials/review/review-comments.js +0 -928
- elspais/trace_view/html/templates/partials/review/review-data.js +0 -961
- elspais/trace_view/html/templates/partials/review/review-help.js +0 -679
- elspais/trace_view/html/templates/partials/review/review-init.js +0 -177
- elspais/trace_view/html/templates/partials/review/review-line-numbers.js +0 -429
- elspais/trace_view/html/templates/partials/review/review-packages.js +0 -1029
- elspais/trace_view/html/templates/partials/review/review-position.js +0 -540
- elspais/trace_view/html/templates/partials/review/review-resize.js +0 -115
- elspais/trace_view/html/templates/partials/review/review-status.js +0 -659
- elspais/trace_view/html/templates/partials/review/review-sync.js +0 -992
- elspais/trace_view/html/templates/partials/review-styles.css +0 -2238
- elspais/trace_view/html/templates/partials/scripts.js +0 -1741
- elspais/trace_view/html/templates/partials/styles.css +0 -1756
- elspais/trace_view/models.py +0 -378
- elspais/trace_view/review/__init__.py +0 -63
- elspais/trace_view/review/branches.py +0 -1142
- elspais/trace_view/review/models.py +0 -1200
- elspais/trace_view/review/position.py +0 -591
- elspais/trace_view/review/server.py +0 -1032
- elspais/trace_view/review/status.py +0 -455
- elspais/trace_view/review/storage.py +0 -1343
- elspais/trace_view/scanning.py +0 -213
- elspais/trace_view/specs/README.md +0 -84
- elspais/trace_view/specs/tv-d00001-template-architecture.md +0 -36
- elspais/trace_view/specs/tv-d00002-css-extraction.md +0 -37
- elspais/trace_view/specs/tv-d00003-js-extraction.md +0 -43
- elspais/trace_view/specs/tv-d00004-build-embedding.md +0 -40
- elspais/trace_view/specs/tv-d00005-test-format.md +0 -78
- elspais/trace_view/specs/tv-d00010-review-data-models.md +0 -33
- elspais/trace_view/specs/tv-d00011-review-storage.md +0 -33
- elspais/trace_view/specs/tv-d00012-position-resolution.md +0 -33
- elspais/trace_view/specs/tv-d00013-git-branches.md +0 -31
- elspais/trace_view/specs/tv-d00014-review-api-server.md +0 -31
- elspais/trace_view/specs/tv-d00015-status-modifier.md +0 -27
- elspais/trace_view/specs/tv-d00016-js-integration.md +0 -33
- elspais/trace_view/specs/tv-p00001-html-generator.md +0 -33
- elspais/trace_view/specs/tv-p00002-review-system.md +0 -29
- elspais-0.11.2.dist-info/RECORD +0 -101
- {elspais-0.11.2.dist-info → elspais-0.43.5.dist-info}/WHEEL +0 -0
- {elspais-0.11.2.dist-info → elspais-0.43.5.dist-info}/entry_points.txt +0 -0
- {elspais-0.11.2.dist-info → elspais-0.43.5.dist-info}/licenses/LICENSE +0 -0
elspais/core/rules.py
DELETED
|
@@ -1,509 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
elspais.core.rules - Validation rule engine.
|
|
3
|
-
|
|
4
|
-
Provides configurable validation rules for requirement hierarchies,
|
|
5
|
-
format compliance, and traceability.
|
|
6
|
-
"""
|
|
7
|
-
|
|
8
|
-
from dataclasses import dataclass, field
|
|
9
|
-
from enum import Enum
|
|
10
|
-
from typing import Any, Dict, List, Optional, Set
|
|
11
|
-
|
|
12
|
-
from elspais.core.models import Requirement
|
|
13
|
-
from elspais.core.patterns import PatternConfig, PatternValidator
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
class Severity(Enum):
|
|
17
|
-
"""Severity level for rule violations."""
|
|
18
|
-
|
|
19
|
-
ERROR = "error"
|
|
20
|
-
WARNING = "warning"
|
|
21
|
-
INFO = "info"
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
@dataclass
|
|
25
|
-
class RuleViolation:
|
|
26
|
-
"""
|
|
27
|
-
Represents a rule violation found during validation.
|
|
28
|
-
|
|
29
|
-
Attributes:
|
|
30
|
-
rule_name: Name of the violated rule (e.g., "hierarchy.circular")
|
|
31
|
-
requirement_id: ID of the requirement with the violation
|
|
32
|
-
message: Human-readable description of the violation
|
|
33
|
-
severity: Severity level
|
|
34
|
-
location: File:line location string
|
|
35
|
-
"""
|
|
36
|
-
|
|
37
|
-
rule_name: str
|
|
38
|
-
requirement_id: str
|
|
39
|
-
message: str
|
|
40
|
-
severity: Severity
|
|
41
|
-
location: str = ""
|
|
42
|
-
|
|
43
|
-
def __str__(self) -> str:
|
|
44
|
-
prefix = {
|
|
45
|
-
Severity.ERROR: "❌ ERROR",
|
|
46
|
-
Severity.WARNING: "⚠️ WARNING",
|
|
47
|
-
Severity.INFO: "ℹ️ INFO",
|
|
48
|
-
}.get(self.severity, "?")
|
|
49
|
-
return (
|
|
50
|
-
f"{prefix} [{self.rule_name}] {self.requirement_id}\n"
|
|
51
|
-
f" {self.message}\n {self.location}"
|
|
52
|
-
)
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
@dataclass
|
|
56
|
-
class HierarchyConfig:
|
|
57
|
-
"""Configuration for hierarchy validation rules."""
|
|
58
|
-
|
|
59
|
-
allowed_implements: List[str] = field(default_factory=list)
|
|
60
|
-
allow_circular: bool = False
|
|
61
|
-
allow_orphans: bool = False
|
|
62
|
-
max_depth: int = 5
|
|
63
|
-
cross_repo_implements: bool = True
|
|
64
|
-
|
|
65
|
-
# Parsed allowed relationships: source_type -> set of allowed target types
|
|
66
|
-
_allowed_map: Dict[str, Set[str]] = field(default_factory=dict, repr=False)
|
|
67
|
-
|
|
68
|
-
def __post_init__(self) -> None:
|
|
69
|
-
"""Parse allowed_implements into a lookup map."""
|
|
70
|
-
self._allowed_map = {}
|
|
71
|
-
for rule in self.allowed_implements:
|
|
72
|
-
# Parse "dev -> ops, prd"
|
|
73
|
-
parts = rule.split("->")
|
|
74
|
-
if len(parts) == 2:
|
|
75
|
-
source = parts[0].strip().lower()
|
|
76
|
-
targets = [t.strip().lower() for t in parts[1].split(",")]
|
|
77
|
-
self._allowed_map[source] = set(targets)
|
|
78
|
-
|
|
79
|
-
def can_implement(self, source_type: str, target_type: str) -> bool:
|
|
80
|
-
"""Check if source type can implement target type."""
|
|
81
|
-
source = source_type.lower()
|
|
82
|
-
target = target_type.lower()
|
|
83
|
-
allowed = self._allowed_map.get(source, set())
|
|
84
|
-
return target in allowed
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
@dataclass
|
|
88
|
-
class FormatConfig:
|
|
89
|
-
"""Configuration for format validation rules."""
|
|
90
|
-
|
|
91
|
-
require_hash: bool = True
|
|
92
|
-
require_rationale: bool = False
|
|
93
|
-
require_status: bool = True
|
|
94
|
-
allowed_statuses: List[str] = field(
|
|
95
|
-
default_factory=lambda: ["Active", "Draft", "Deprecated", "Superseded"]
|
|
96
|
-
)
|
|
97
|
-
|
|
98
|
-
# Assertion format rules
|
|
99
|
-
require_assertions: bool = True
|
|
100
|
-
acceptance_criteria: str = "warn" # "allow" | "warn" | "error"
|
|
101
|
-
require_shall: bool = True
|
|
102
|
-
labels_sequential: bool = True
|
|
103
|
-
labels_unique: bool = True
|
|
104
|
-
placeholder_values: List[str] = field(
|
|
105
|
-
default_factory=lambda: ["obsolete", "removed", "deprecated", "N/A", "n/a", "-", "reserved"]
|
|
106
|
-
)
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
@dataclass
|
|
110
|
-
class RulesConfig:
|
|
111
|
-
"""Complete configuration for all validation rules."""
|
|
112
|
-
|
|
113
|
-
hierarchy: HierarchyConfig = field(default_factory=HierarchyConfig)
|
|
114
|
-
format: FormatConfig = field(default_factory=FormatConfig)
|
|
115
|
-
|
|
116
|
-
@classmethod
|
|
117
|
-
def from_dict(cls, data: Dict[str, Any]) -> "RulesConfig":
|
|
118
|
-
"""Create RulesConfig from configuration dictionary."""
|
|
119
|
-
hierarchy_data = data.get("hierarchy", {})
|
|
120
|
-
format_data = data.get("format", {})
|
|
121
|
-
|
|
122
|
-
hierarchy = HierarchyConfig(
|
|
123
|
-
allowed_implements=hierarchy_data.get(
|
|
124
|
-
"allowed_implements", ["dev -> ops, prd", "ops -> prd", "prd -> prd"]
|
|
125
|
-
),
|
|
126
|
-
allow_circular=hierarchy_data.get("allow_circular", False),
|
|
127
|
-
allow_orphans=hierarchy_data.get("allow_orphans", False),
|
|
128
|
-
max_depth=hierarchy_data.get("max_depth", 5),
|
|
129
|
-
cross_repo_implements=hierarchy_data.get("cross_repo_implements", True),
|
|
130
|
-
)
|
|
131
|
-
|
|
132
|
-
format_config = FormatConfig(
|
|
133
|
-
require_hash=format_data.get("require_hash", True),
|
|
134
|
-
require_rationale=format_data.get("require_rationale", False),
|
|
135
|
-
require_status=format_data.get("require_status", True),
|
|
136
|
-
allowed_statuses=format_data.get(
|
|
137
|
-
"allowed_statuses", ["Active", "Draft", "Deprecated", "Superseded"]
|
|
138
|
-
),
|
|
139
|
-
# Assertion rules
|
|
140
|
-
require_assertions=format_data.get("require_assertions", True),
|
|
141
|
-
acceptance_criteria=format_data.get("acceptance_criteria", "warn"),
|
|
142
|
-
require_shall=format_data.get("require_shall", True),
|
|
143
|
-
labels_sequential=format_data.get("labels_sequential", True),
|
|
144
|
-
labels_unique=format_data.get("labels_unique", True),
|
|
145
|
-
placeholder_values=format_data.get(
|
|
146
|
-
"placeholder_values",
|
|
147
|
-
["obsolete", "removed", "deprecated", "N/A", "n/a", "-", "reserved"],
|
|
148
|
-
),
|
|
149
|
-
)
|
|
150
|
-
|
|
151
|
-
return cls(hierarchy=hierarchy, format=format_config)
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
class RuleEngine:
|
|
155
|
-
"""
|
|
156
|
-
Validates requirements against configured rules.
|
|
157
|
-
"""
|
|
158
|
-
|
|
159
|
-
def __init__(
|
|
160
|
-
self,
|
|
161
|
-
config: RulesConfig,
|
|
162
|
-
pattern_config: Optional[PatternConfig] = None,
|
|
163
|
-
):
|
|
164
|
-
"""
|
|
165
|
-
Initialize rule engine.
|
|
166
|
-
|
|
167
|
-
Args:
|
|
168
|
-
config: Rules configuration
|
|
169
|
-
pattern_config: Optional pattern configuration for assertion label validation
|
|
170
|
-
"""
|
|
171
|
-
self.config = config
|
|
172
|
-
self.pattern_config = pattern_config
|
|
173
|
-
self.pattern_validator = PatternValidator(pattern_config) if pattern_config else None
|
|
174
|
-
|
|
175
|
-
def validate(self, requirements: Dict[str, Requirement]) -> List[RuleViolation]:
|
|
176
|
-
"""
|
|
177
|
-
Validate all requirements against configured rules.
|
|
178
|
-
|
|
179
|
-
Args:
|
|
180
|
-
requirements: Dictionary of requirement ID -> Requirement
|
|
181
|
-
|
|
182
|
-
Returns:
|
|
183
|
-
List of RuleViolation objects
|
|
184
|
-
"""
|
|
185
|
-
violations = []
|
|
186
|
-
|
|
187
|
-
# Run all validation rules
|
|
188
|
-
violations.extend(self._check_hierarchy(requirements))
|
|
189
|
-
violations.extend(self._check_format(requirements))
|
|
190
|
-
violations.extend(self._check_circular(requirements))
|
|
191
|
-
violations.extend(self._check_orphans(requirements))
|
|
192
|
-
|
|
193
|
-
return violations
|
|
194
|
-
|
|
195
|
-
def _check_hierarchy(self, requirements: Dict[str, Requirement]) -> List[RuleViolation]:
|
|
196
|
-
"""Check hierarchy rules (allowed implements)."""
|
|
197
|
-
violations = []
|
|
198
|
-
|
|
199
|
-
for req_id, req in requirements.items():
|
|
200
|
-
source_type = self._get_type_from_level(req.level)
|
|
201
|
-
|
|
202
|
-
for impl_id in req.implements:
|
|
203
|
-
# Find the target requirement
|
|
204
|
-
target_req = self._find_requirement(impl_id, requirements)
|
|
205
|
-
if target_req is None:
|
|
206
|
-
# Target not found - this is a broken link, not hierarchy violation
|
|
207
|
-
continue
|
|
208
|
-
|
|
209
|
-
target_type = self._get_type_from_level(target_req.level)
|
|
210
|
-
|
|
211
|
-
# Check if this relationship is allowed
|
|
212
|
-
if not self.config.hierarchy.can_implement(source_type, target_type):
|
|
213
|
-
msg = (
|
|
214
|
-
f"{source_type.upper()} cannot implement "
|
|
215
|
-
f"{target_type.upper()} ({impl_id})"
|
|
216
|
-
)
|
|
217
|
-
violations.append(
|
|
218
|
-
RuleViolation(
|
|
219
|
-
rule_name="hierarchy.implements",
|
|
220
|
-
requirement_id=req_id,
|
|
221
|
-
message=msg,
|
|
222
|
-
severity=Severity.ERROR,
|
|
223
|
-
location=req.location(),
|
|
224
|
-
)
|
|
225
|
-
)
|
|
226
|
-
|
|
227
|
-
return violations
|
|
228
|
-
|
|
229
|
-
def _check_circular(self, requirements: Dict[str, Requirement]) -> List[RuleViolation]:
|
|
230
|
-
"""Check for circular dependencies."""
|
|
231
|
-
if self.config.hierarchy.allow_circular:
|
|
232
|
-
return []
|
|
233
|
-
|
|
234
|
-
violations: List[RuleViolation] = []
|
|
235
|
-
visited: Set[str] = set()
|
|
236
|
-
path: List[str] = []
|
|
237
|
-
|
|
238
|
-
def dfs(req_id: str) -> Optional[List[str]]:
|
|
239
|
-
"""Depth-first search for cycles."""
|
|
240
|
-
if req_id in path:
|
|
241
|
-
# Found a cycle
|
|
242
|
-
cycle_start = path.index(req_id)
|
|
243
|
-
return path[cycle_start:] + [req_id]
|
|
244
|
-
|
|
245
|
-
if req_id in visited:
|
|
246
|
-
return None
|
|
247
|
-
|
|
248
|
-
visited.add(req_id)
|
|
249
|
-
path.append(req_id)
|
|
250
|
-
|
|
251
|
-
req = requirements.get(req_id)
|
|
252
|
-
if req:
|
|
253
|
-
for impl_id in req.implements:
|
|
254
|
-
# Resolve to full ID if needed
|
|
255
|
-
full_id = self._resolve_id(impl_id, requirements)
|
|
256
|
-
if full_id and full_id in requirements:
|
|
257
|
-
cycle = dfs(full_id)
|
|
258
|
-
if cycle:
|
|
259
|
-
return cycle
|
|
260
|
-
|
|
261
|
-
path.pop()
|
|
262
|
-
return None
|
|
263
|
-
|
|
264
|
-
# Check each requirement for cycles
|
|
265
|
-
for req_id in requirements:
|
|
266
|
-
visited.clear()
|
|
267
|
-
path.clear()
|
|
268
|
-
cycle = dfs(req_id)
|
|
269
|
-
if cycle:
|
|
270
|
-
cycle_str = " -> ".join(cycle)
|
|
271
|
-
violations.append(
|
|
272
|
-
RuleViolation(
|
|
273
|
-
rule_name="hierarchy.circular",
|
|
274
|
-
requirement_id=req_id,
|
|
275
|
-
message=f"Circular dependency detected: {cycle_str}",
|
|
276
|
-
severity=Severity.ERROR,
|
|
277
|
-
location=requirements[req_id].location(),
|
|
278
|
-
)
|
|
279
|
-
)
|
|
280
|
-
break # Report only first cycle found
|
|
281
|
-
|
|
282
|
-
return violations
|
|
283
|
-
|
|
284
|
-
def _check_orphans(self, requirements: Dict[str, Requirement]) -> List[RuleViolation]:
|
|
285
|
-
"""Check for orphaned requirements (DEV/OPS without implements)."""
|
|
286
|
-
if self.config.hierarchy.allow_orphans:
|
|
287
|
-
return []
|
|
288
|
-
|
|
289
|
-
violations = []
|
|
290
|
-
|
|
291
|
-
for req_id, req in requirements.items():
|
|
292
|
-
# Skip root level (PRD)
|
|
293
|
-
if req.level.upper() in ["PRD", "PRODUCT"]:
|
|
294
|
-
continue
|
|
295
|
-
|
|
296
|
-
# DEV/OPS should implement something
|
|
297
|
-
if not req.implements:
|
|
298
|
-
violations.append(
|
|
299
|
-
RuleViolation(
|
|
300
|
-
rule_name="hierarchy.orphan",
|
|
301
|
-
requirement_id=req_id,
|
|
302
|
-
message=f"{req.level} requirement has no Implements reference",
|
|
303
|
-
severity=Severity.WARNING,
|
|
304
|
-
location=req.location(),
|
|
305
|
-
)
|
|
306
|
-
)
|
|
307
|
-
|
|
308
|
-
return violations
|
|
309
|
-
|
|
310
|
-
def _check_format(self, requirements: Dict[str, Requirement]) -> List[RuleViolation]:
|
|
311
|
-
"""Check format rules (hash, rationale, assertions, acceptance criteria)."""
|
|
312
|
-
violations = []
|
|
313
|
-
|
|
314
|
-
for req_id, req in requirements.items():
|
|
315
|
-
# Check hash
|
|
316
|
-
if self.config.format.require_hash and not req.hash:
|
|
317
|
-
violations.append(
|
|
318
|
-
RuleViolation(
|
|
319
|
-
rule_name="format.require_hash",
|
|
320
|
-
requirement_id=req_id,
|
|
321
|
-
message="Missing hash footer",
|
|
322
|
-
severity=Severity.ERROR,
|
|
323
|
-
location=req.location(),
|
|
324
|
-
)
|
|
325
|
-
)
|
|
326
|
-
|
|
327
|
-
# Check rationale
|
|
328
|
-
if self.config.format.require_rationale and not req.rationale:
|
|
329
|
-
violations.append(
|
|
330
|
-
RuleViolation(
|
|
331
|
-
rule_name="format.require_rationale",
|
|
332
|
-
requirement_id=req_id,
|
|
333
|
-
message="Missing Rationale section",
|
|
334
|
-
severity=Severity.WARNING,
|
|
335
|
-
location=req.location(),
|
|
336
|
-
)
|
|
337
|
-
)
|
|
338
|
-
|
|
339
|
-
# Check assertions (new format)
|
|
340
|
-
violations.extend(self._check_assertions(req_id, req))
|
|
341
|
-
|
|
342
|
-
# Check acceptance criteria (legacy format)
|
|
343
|
-
acceptance_mode = self.config.format.acceptance_criteria
|
|
344
|
-
if req.acceptance_criteria:
|
|
345
|
-
if acceptance_mode == "error":
|
|
346
|
-
violations.append(
|
|
347
|
-
RuleViolation(
|
|
348
|
-
rule_name="format.acceptance_criteria",
|
|
349
|
-
requirement_id=req_id,
|
|
350
|
-
message="Acceptance Criteria not allowed; use Assertions",
|
|
351
|
-
severity=Severity.ERROR,
|
|
352
|
-
location=req.location(),
|
|
353
|
-
)
|
|
354
|
-
)
|
|
355
|
-
elif acceptance_mode == "warn":
|
|
356
|
-
violations.append(
|
|
357
|
-
RuleViolation(
|
|
358
|
-
rule_name="format.acceptance_criteria",
|
|
359
|
-
requirement_id=req_id,
|
|
360
|
-
message="Acceptance Criteria deprecated; use Assertions",
|
|
361
|
-
severity=Severity.WARNING,
|
|
362
|
-
location=req.location(),
|
|
363
|
-
)
|
|
364
|
-
)
|
|
365
|
-
# "allow" mode: no violation
|
|
366
|
-
|
|
367
|
-
# Check status
|
|
368
|
-
if self.config.format.require_status:
|
|
369
|
-
if req.status not in self.config.format.allowed_statuses:
|
|
370
|
-
allowed = self.config.format.allowed_statuses
|
|
371
|
-
violations.append(
|
|
372
|
-
RuleViolation(
|
|
373
|
-
rule_name="format.status_valid",
|
|
374
|
-
requirement_id=req_id,
|
|
375
|
-
message=f"Invalid status '{req.status}'. Allowed: {allowed}",
|
|
376
|
-
severity=Severity.ERROR,
|
|
377
|
-
location=req.location(),
|
|
378
|
-
)
|
|
379
|
-
)
|
|
380
|
-
|
|
381
|
-
return violations
|
|
382
|
-
|
|
383
|
-
def _check_assertions(self, req_id: str, req: Requirement) -> List[RuleViolation]:
|
|
384
|
-
"""Check assertion-specific validation rules."""
|
|
385
|
-
violations = []
|
|
386
|
-
|
|
387
|
-
# Check if assertions are required
|
|
388
|
-
if self.config.format.require_assertions and not req.assertions:
|
|
389
|
-
violations.append(
|
|
390
|
-
RuleViolation(
|
|
391
|
-
rule_name="format.require_assertions",
|
|
392
|
-
requirement_id=req_id,
|
|
393
|
-
message="Missing Assertions section",
|
|
394
|
-
severity=Severity.ERROR,
|
|
395
|
-
location=req.location(),
|
|
396
|
-
)
|
|
397
|
-
)
|
|
398
|
-
return violations # No point checking other assertion rules
|
|
399
|
-
|
|
400
|
-
if not req.assertions:
|
|
401
|
-
return violations
|
|
402
|
-
|
|
403
|
-
# Extract labels and check for duplicates
|
|
404
|
-
labels = [a.label for a in req.assertions]
|
|
405
|
-
|
|
406
|
-
# Check labels are unique
|
|
407
|
-
if self.config.format.labels_unique:
|
|
408
|
-
seen = set()
|
|
409
|
-
for label in labels:
|
|
410
|
-
if label in seen:
|
|
411
|
-
violations.append(
|
|
412
|
-
RuleViolation(
|
|
413
|
-
rule_name="format.labels_unique",
|
|
414
|
-
requirement_id=req_id,
|
|
415
|
-
message=f"Duplicate assertion label: {label}",
|
|
416
|
-
severity=Severity.ERROR,
|
|
417
|
-
location=req.location(),
|
|
418
|
-
)
|
|
419
|
-
)
|
|
420
|
-
seen.add(label)
|
|
421
|
-
|
|
422
|
-
# Check labels are sequential
|
|
423
|
-
if self.config.format.labels_sequential and self.pattern_validator:
|
|
424
|
-
expected_labels = []
|
|
425
|
-
for i in range(len(labels)):
|
|
426
|
-
expected_labels.append(self.pattern_validator.format_assertion_label(i))
|
|
427
|
-
if labels != expected_labels:
|
|
428
|
-
msg = f"Labels not sequential: {labels} (expected {expected_labels})"
|
|
429
|
-
violations.append(
|
|
430
|
-
RuleViolation(
|
|
431
|
-
rule_name="format.labels_sequential",
|
|
432
|
-
requirement_id=req_id,
|
|
433
|
-
message=msg,
|
|
434
|
-
severity=Severity.ERROR,
|
|
435
|
-
location=req.location(),
|
|
436
|
-
)
|
|
437
|
-
)
|
|
438
|
-
|
|
439
|
-
# Check SHALL/SHALL NOT language (skip placeholders)
|
|
440
|
-
if self.config.format.require_shall:
|
|
441
|
-
for assertion in req.assertions:
|
|
442
|
-
if assertion.is_placeholder:
|
|
443
|
-
continue
|
|
444
|
-
if "SHALL" not in assertion.text.upper():
|
|
445
|
-
text_preview = assertion.text[:40]
|
|
446
|
-
msg = f"Assertion {assertion.label} missing SHALL: {text_preview}..."
|
|
447
|
-
violations.append(
|
|
448
|
-
RuleViolation(
|
|
449
|
-
rule_name="format.require_shall",
|
|
450
|
-
requirement_id=req_id,
|
|
451
|
-
message=msg,
|
|
452
|
-
severity=Severity.WARNING,
|
|
453
|
-
location=req.location(),
|
|
454
|
-
)
|
|
455
|
-
)
|
|
456
|
-
|
|
457
|
-
# Validate assertion labels against configured pattern
|
|
458
|
-
if self.pattern_validator:
|
|
459
|
-
for assertion in req.assertions:
|
|
460
|
-
if not self.pattern_validator.is_valid_assertion_label(assertion.label):
|
|
461
|
-
violations.append(
|
|
462
|
-
RuleViolation(
|
|
463
|
-
rule_name="format.assertion_label",
|
|
464
|
-
requirement_id=req_id,
|
|
465
|
-
message=f"Invalid assertion label format: {assertion.label}",
|
|
466
|
-
severity=Severity.ERROR,
|
|
467
|
-
location=req.location(),
|
|
468
|
-
)
|
|
469
|
-
)
|
|
470
|
-
|
|
471
|
-
return violations
|
|
472
|
-
|
|
473
|
-
def _get_type_from_level(self, level: str) -> str:
|
|
474
|
-
"""Map level name to type code."""
|
|
475
|
-
level_map = {
|
|
476
|
-
"PRD": "prd",
|
|
477
|
-
"PRODUCT": "prd",
|
|
478
|
-
"OPS": "ops",
|
|
479
|
-
"OPERATIONS": "ops",
|
|
480
|
-
"DEV": "dev",
|
|
481
|
-
"DEVELOPMENT": "dev",
|
|
482
|
-
}
|
|
483
|
-
return level_map.get(level.upper(), level.lower())
|
|
484
|
-
|
|
485
|
-
def _find_requirement(
|
|
486
|
-
self, impl_id: str, requirements: Dict[str, Requirement]
|
|
487
|
-
) -> Optional[Requirement]:
|
|
488
|
-
"""Find a requirement by ID (handles partial IDs)."""
|
|
489
|
-
# Try exact match first
|
|
490
|
-
if impl_id in requirements:
|
|
491
|
-
return requirements[impl_id]
|
|
492
|
-
|
|
493
|
-
# Try to find by suffix (e.g., "p00001" matches "REQ-p00001")
|
|
494
|
-
for req_id, req in requirements.items():
|
|
495
|
-
if req_id.endswith(impl_id) or req_id.endswith(f"-{impl_id}"):
|
|
496
|
-
return req
|
|
497
|
-
|
|
498
|
-
return None
|
|
499
|
-
|
|
500
|
-
def _resolve_id(self, impl_id: str, requirements: Dict[str, Requirement]) -> Optional[str]:
|
|
501
|
-
"""Resolve a partial ID to a full ID."""
|
|
502
|
-
if impl_id in requirements:
|
|
503
|
-
return impl_id
|
|
504
|
-
|
|
505
|
-
for req_id in requirements:
|
|
506
|
-
if req_id.endswith(impl_id) or req_id.endswith(f"-{impl_id}"):
|
|
507
|
-
return req_id
|
|
508
|
-
|
|
509
|
-
return None
|
elspais/mcp/context.py
DELETED
|
@@ -1,172 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
elspais.mcp.context - Workspace context for MCP server.
|
|
3
|
-
|
|
4
|
-
Manages workspace state including configuration, requirements cache,
|
|
5
|
-
and content rules.
|
|
6
|
-
"""
|
|
7
|
-
|
|
8
|
-
import re
|
|
9
|
-
from dataclasses import dataclass, field
|
|
10
|
-
from pathlib import Path
|
|
11
|
-
from typing import Any, Dict, List, Optional
|
|
12
|
-
|
|
13
|
-
from elspais.config.loader import find_config_file, get_spec_directories, load_config
|
|
14
|
-
from elspais.core.content_rules import load_content_rules
|
|
15
|
-
from elspais.core.models import ContentRule, Requirement
|
|
16
|
-
from elspais.core.parser import RequirementParser
|
|
17
|
-
from elspais.core.patterns import PatternConfig
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
@dataclass
|
|
21
|
-
class WorkspaceContext:
|
|
22
|
-
"""
|
|
23
|
-
Manages workspace state for MCP server operations.
|
|
24
|
-
|
|
25
|
-
Provides caching of parsed requirements and access to configuration,
|
|
26
|
-
content rules, and other workspace resources.
|
|
27
|
-
"""
|
|
28
|
-
|
|
29
|
-
working_dir: Path
|
|
30
|
-
config: Dict[str, Any] = field(default_factory=dict)
|
|
31
|
-
_requirements_cache: Optional[Dict[str, Requirement]] = field(default=None, repr=False)
|
|
32
|
-
_parser: Optional[RequirementParser] = field(default=None, repr=False)
|
|
33
|
-
|
|
34
|
-
@classmethod
|
|
35
|
-
def from_directory(cls, directory: Path) -> "WorkspaceContext":
|
|
36
|
-
"""
|
|
37
|
-
Initialize context from a working directory.
|
|
38
|
-
|
|
39
|
-
Loads configuration from .elspais.toml if found.
|
|
40
|
-
|
|
41
|
-
Args:
|
|
42
|
-
directory: Working directory path
|
|
43
|
-
|
|
44
|
-
Returns:
|
|
45
|
-
Initialized WorkspaceContext
|
|
46
|
-
"""
|
|
47
|
-
directory = directory.resolve()
|
|
48
|
-
config_path = find_config_file(directory)
|
|
49
|
-
|
|
50
|
-
if config_path:
|
|
51
|
-
config = load_config(config_path)
|
|
52
|
-
else:
|
|
53
|
-
# Use defaults
|
|
54
|
-
from elspais.config.defaults import DEFAULT_CONFIG
|
|
55
|
-
|
|
56
|
-
config = DEFAULT_CONFIG.copy()
|
|
57
|
-
|
|
58
|
-
return cls(working_dir=directory, config=config)
|
|
59
|
-
|
|
60
|
-
def get_requirements(self, force_refresh: bool = False) -> Dict[str, Requirement]:
|
|
61
|
-
"""
|
|
62
|
-
Get all parsed requirements, with caching.
|
|
63
|
-
|
|
64
|
-
Args:
|
|
65
|
-
force_refresh: If True, ignore cache and re-parse
|
|
66
|
-
|
|
67
|
-
Returns:
|
|
68
|
-
Dict mapping requirement IDs to Requirement objects
|
|
69
|
-
"""
|
|
70
|
-
if self._requirements_cache is None or force_refresh:
|
|
71
|
-
self._requirements_cache = self._parse_requirements()
|
|
72
|
-
return self._requirements_cache
|
|
73
|
-
|
|
74
|
-
def get_requirement(self, req_id: str) -> Optional[Requirement]:
|
|
75
|
-
"""
|
|
76
|
-
Get a single requirement by ID.
|
|
77
|
-
|
|
78
|
-
Args:
|
|
79
|
-
req_id: Requirement ID (e.g., "REQ-p00001")
|
|
80
|
-
|
|
81
|
-
Returns:
|
|
82
|
-
Requirement if found, None otherwise
|
|
83
|
-
"""
|
|
84
|
-
requirements = self.get_requirements()
|
|
85
|
-
return requirements.get(req_id)
|
|
86
|
-
|
|
87
|
-
def get_content_rules(self) -> List[ContentRule]:
|
|
88
|
-
"""
|
|
89
|
-
Get all configured content rules.
|
|
90
|
-
|
|
91
|
-
Returns:
|
|
92
|
-
List of ContentRule objects
|
|
93
|
-
"""
|
|
94
|
-
return load_content_rules(self.config, self.working_dir)
|
|
95
|
-
|
|
96
|
-
def search_requirements(
|
|
97
|
-
self,
|
|
98
|
-
query: str,
|
|
99
|
-
field: str = "all",
|
|
100
|
-
regex: bool = False,
|
|
101
|
-
) -> List[Requirement]:
|
|
102
|
-
"""
|
|
103
|
-
Search requirements by pattern.
|
|
104
|
-
|
|
105
|
-
Args:
|
|
106
|
-
query: Search query string
|
|
107
|
-
field: Field to search - "all", "id", "title", "body", "assertions"
|
|
108
|
-
regex: If True, treat query as regex pattern
|
|
109
|
-
|
|
110
|
-
Returns:
|
|
111
|
-
List of matching requirements
|
|
112
|
-
"""
|
|
113
|
-
requirements = self.get_requirements()
|
|
114
|
-
results = []
|
|
115
|
-
|
|
116
|
-
if regex:
|
|
117
|
-
pattern = re.compile(query, re.IGNORECASE)
|
|
118
|
-
else:
|
|
119
|
-
pattern = re.compile(re.escape(query), re.IGNORECASE)
|
|
120
|
-
|
|
121
|
-
for req in requirements.values():
|
|
122
|
-
if self._matches(req, pattern, field):
|
|
123
|
-
results.append(req)
|
|
124
|
-
|
|
125
|
-
return results
|
|
126
|
-
|
|
127
|
-
def invalidate_cache(self) -> None:
|
|
128
|
-
"""Clear cached requirements (call after edits)."""
|
|
129
|
-
self._requirements_cache = None
|
|
130
|
-
|
|
131
|
-
def _parse_requirements(self) -> Dict[str, Requirement]:
|
|
132
|
-
"""Parse requirements from spec directories."""
|
|
133
|
-
if self._parser is None:
|
|
134
|
-
pattern_config = PatternConfig.from_dict(self.config.get("patterns", {}))
|
|
135
|
-
self._parser = RequirementParser(pattern_config)
|
|
136
|
-
|
|
137
|
-
spec_dirs = get_spec_directories(None, self.config, self.working_dir)
|
|
138
|
-
skip_files = self.config.get("spec", {}).get("skip_files", [])
|
|
139
|
-
|
|
140
|
-
all_requirements: Dict[str, Requirement] = {}
|
|
141
|
-
|
|
142
|
-
for spec_dir in spec_dirs:
|
|
143
|
-
if spec_dir.exists():
|
|
144
|
-
requirements = self._parser.parse_directory(spec_dir, skip_files=skip_files)
|
|
145
|
-
all_requirements.update(requirements)
|
|
146
|
-
|
|
147
|
-
return all_requirements
|
|
148
|
-
|
|
149
|
-
def _matches(self, req: Requirement, pattern: re.Pattern, field: str) -> bool:
|
|
150
|
-
"""Check if requirement matches search pattern."""
|
|
151
|
-
if field == "id":
|
|
152
|
-
return bool(pattern.search(req.id))
|
|
153
|
-
elif field == "title":
|
|
154
|
-
return bool(pattern.search(req.title))
|
|
155
|
-
elif field == "body":
|
|
156
|
-
return bool(pattern.search(req.body))
|
|
157
|
-
elif field == "assertions":
|
|
158
|
-
for assertion in req.assertions:
|
|
159
|
-
if pattern.search(assertion.text):
|
|
160
|
-
return True
|
|
161
|
-
return False
|
|
162
|
-
else: # "all"
|
|
163
|
-
if pattern.search(req.id):
|
|
164
|
-
return True
|
|
165
|
-
if pattern.search(req.title):
|
|
166
|
-
return True
|
|
167
|
-
if pattern.search(req.body):
|
|
168
|
-
return True
|
|
169
|
-
for assertion in req.assertions:
|
|
170
|
-
if pattern.search(assertion.text):
|
|
171
|
-
return True
|
|
172
|
-
return False
|