elspais 0.9.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- elspais/__init__.py +36 -0
- elspais/__main__.py +8 -0
- elspais/cli.py +525 -0
- elspais/commands/__init__.py +12 -0
- elspais/commands/analyze.py +218 -0
- elspais/commands/config_cmd.py +501 -0
- elspais/commands/edit.py +522 -0
- elspais/commands/hash_cmd.py +174 -0
- elspais/commands/index.py +166 -0
- elspais/commands/init.py +177 -0
- elspais/commands/rules_cmd.py +120 -0
- elspais/commands/trace.py +208 -0
- elspais/commands/validate.py +388 -0
- elspais/config/__init__.py +13 -0
- elspais/config/defaults.py +173 -0
- elspais/config/loader.py +494 -0
- elspais/core/__init__.py +21 -0
- elspais/core/content_rules.py +170 -0
- elspais/core/hasher.py +143 -0
- elspais/core/models.py +318 -0
- elspais/core/parser.py +596 -0
- elspais/core/patterns.py +390 -0
- elspais/core/rules.py +514 -0
- elspais/mcp/__init__.py +42 -0
- elspais/mcp/__main__.py +6 -0
- elspais/mcp/context.py +171 -0
- elspais/mcp/serializers.py +112 -0
- elspais/mcp/server.py +339 -0
- elspais/testing/__init__.py +27 -0
- elspais/testing/config.py +48 -0
- elspais/testing/mapper.py +163 -0
- elspais/testing/result_parser.py +289 -0
- elspais/testing/scanner.py +206 -0
- elspais-0.9.1.dist-info/METADATA +393 -0
- elspais-0.9.1.dist-info/RECORD +38 -0
- elspais-0.9.1.dist-info/WHEEL +4 -0
- elspais-0.9.1.dist-info/entry_points.txt +2 -0
- elspais-0.9.1.dist-info/licenses/LICENSE +21 -0
elspais/config/loader.py
ADDED
|
@@ -0,0 +1,494 @@
|
|
|
1
|
+
"""
|
|
2
|
+
elspais.config.loader - Configuration loading and merging.
|
|
3
|
+
|
|
4
|
+
Handles loading .elspais.toml files and merging with defaults.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import os
|
|
8
|
+
import re
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Any, Dict, List, Optional
|
|
11
|
+
|
|
12
|
+
from elspais.config.defaults import DEFAULT_CONFIG
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def load_config(config_path: Path) -> Dict[str, Any]:
|
|
16
|
+
"""
|
|
17
|
+
Load configuration from a TOML file.
|
|
18
|
+
|
|
19
|
+
Args:
|
|
20
|
+
config_path: Path to the .elspais.toml file
|
|
21
|
+
|
|
22
|
+
Returns:
|
|
23
|
+
Merged configuration dictionary
|
|
24
|
+
"""
|
|
25
|
+
user_config = parse_toml(config_path.read_text(encoding="utf-8"))
|
|
26
|
+
merged = merge_configs(DEFAULT_CONFIG, user_config)
|
|
27
|
+
merged = apply_env_overrides(merged)
|
|
28
|
+
return merged
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def find_config_file(start_path: Path) -> Optional[Path]:
|
|
32
|
+
"""
|
|
33
|
+
Find .elspais.toml configuration file.
|
|
34
|
+
|
|
35
|
+
Searches from start_path up to git root or filesystem root.
|
|
36
|
+
|
|
37
|
+
Args:
|
|
38
|
+
start_path: Directory to start searching from
|
|
39
|
+
|
|
40
|
+
Returns:
|
|
41
|
+
Path to config file if found, None otherwise
|
|
42
|
+
"""
|
|
43
|
+
current = start_path.resolve()
|
|
44
|
+
|
|
45
|
+
# If start_path is a file, use its parent
|
|
46
|
+
if current.is_file():
|
|
47
|
+
current = current.parent
|
|
48
|
+
|
|
49
|
+
while current != current.parent:
|
|
50
|
+
config_path = current / ".elspais.toml"
|
|
51
|
+
if config_path.exists():
|
|
52
|
+
return config_path
|
|
53
|
+
|
|
54
|
+
# Stop at git root
|
|
55
|
+
if (current / ".git").exists():
|
|
56
|
+
break
|
|
57
|
+
|
|
58
|
+
current = current.parent
|
|
59
|
+
|
|
60
|
+
return None
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def parse_toml(content: str) -> Dict[str, Any]:
|
|
64
|
+
"""
|
|
65
|
+
Parse TOML content into a dictionary.
|
|
66
|
+
|
|
67
|
+
Uses a simple parser for zero dependencies.
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
content: TOML file content
|
|
71
|
+
|
|
72
|
+
Returns:
|
|
73
|
+
Parsed dictionary
|
|
74
|
+
"""
|
|
75
|
+
result: Dict[str, Any] = {}
|
|
76
|
+
current_section: List[str] = []
|
|
77
|
+
lines = content.split("\n")
|
|
78
|
+
i = 0
|
|
79
|
+
|
|
80
|
+
while i < len(lines):
|
|
81
|
+
line = lines[i].strip()
|
|
82
|
+
|
|
83
|
+
# Skip empty lines and comments
|
|
84
|
+
if not line or line.startswith("#"):
|
|
85
|
+
i += 1
|
|
86
|
+
continue
|
|
87
|
+
|
|
88
|
+
# Section header
|
|
89
|
+
if line.startswith("[") and not line.startswith("[["):
|
|
90
|
+
section = line.strip("[]").strip()
|
|
91
|
+
# Handle nested sections like [patterns.types]
|
|
92
|
+
current_section = section.split(".")
|
|
93
|
+
# Create nested structure
|
|
94
|
+
_ensure_nested(result, current_section)
|
|
95
|
+
i += 1
|
|
96
|
+
continue
|
|
97
|
+
|
|
98
|
+
# Key-value pair
|
|
99
|
+
if "=" in line:
|
|
100
|
+
key, value = line.split("=", 1)
|
|
101
|
+
key = key.strip()
|
|
102
|
+
value = value.strip()
|
|
103
|
+
|
|
104
|
+
# Handle multi-line arrays
|
|
105
|
+
if value.startswith("[") and not value.endswith("]"):
|
|
106
|
+
# Collect all lines until closing bracket
|
|
107
|
+
value_lines = [value]
|
|
108
|
+
i += 1
|
|
109
|
+
while i < len(lines):
|
|
110
|
+
next_line = lines[i].strip()
|
|
111
|
+
value_lines.append(next_line)
|
|
112
|
+
if "]" in next_line:
|
|
113
|
+
break
|
|
114
|
+
i += 1
|
|
115
|
+
value = " ".join(value_lines)
|
|
116
|
+
|
|
117
|
+
# Strip inline comments (but not from quoted strings)
|
|
118
|
+
if not (value.startswith('"') or value.startswith("'")):
|
|
119
|
+
# Find comment marker that's not inside brackets/braces
|
|
120
|
+
comment_idx = _find_comment_start(value)
|
|
121
|
+
if comment_idx >= 0:
|
|
122
|
+
value = value[:comment_idx].strip()
|
|
123
|
+
|
|
124
|
+
# Parse value
|
|
125
|
+
parsed_value = _parse_value(value)
|
|
126
|
+
|
|
127
|
+
# Set in current section
|
|
128
|
+
target = _get_nested(result, current_section)
|
|
129
|
+
target[key] = parsed_value
|
|
130
|
+
|
|
131
|
+
i += 1
|
|
132
|
+
|
|
133
|
+
return result
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def _parse_value(value: str) -> Any:
|
|
137
|
+
"""Parse a TOML value string."""
|
|
138
|
+
value = value.strip()
|
|
139
|
+
|
|
140
|
+
# Boolean
|
|
141
|
+
if value.lower() == "true":
|
|
142
|
+
return True
|
|
143
|
+
if value.lower() == "false":
|
|
144
|
+
return False
|
|
145
|
+
|
|
146
|
+
# Integer
|
|
147
|
+
if re.match(r"^-?\d+$", value):
|
|
148
|
+
return int(value)
|
|
149
|
+
|
|
150
|
+
# Float
|
|
151
|
+
if re.match(r"^-?\d+\.\d+$", value):
|
|
152
|
+
return float(value)
|
|
153
|
+
|
|
154
|
+
# String (quoted)
|
|
155
|
+
if (value.startswith('"') and value.endswith('"')) or (
|
|
156
|
+
value.startswith("'") and value.endswith("'")
|
|
157
|
+
):
|
|
158
|
+
return value[1:-1]
|
|
159
|
+
|
|
160
|
+
# Array
|
|
161
|
+
if value.startswith("[") and value.endswith("]"):
|
|
162
|
+
inner = value[1:-1].strip()
|
|
163
|
+
if not inner:
|
|
164
|
+
return []
|
|
165
|
+
# Simple array parsing
|
|
166
|
+
items = []
|
|
167
|
+
for item in _split_array(inner):
|
|
168
|
+
item = item.strip()
|
|
169
|
+
if item:
|
|
170
|
+
items.append(_parse_value(item))
|
|
171
|
+
return items
|
|
172
|
+
|
|
173
|
+
# Inline table
|
|
174
|
+
if value.startswith("{") and value.endswith("}"):
|
|
175
|
+
inner = value[1:-1].strip()
|
|
176
|
+
if not inner:
|
|
177
|
+
return {}
|
|
178
|
+
result = {}
|
|
179
|
+
for pair in _split_array(inner):
|
|
180
|
+
if "=" in pair:
|
|
181
|
+
k, v = pair.split("=", 1)
|
|
182
|
+
result[k.strip()] = _parse_value(v.strip())
|
|
183
|
+
return result
|
|
184
|
+
|
|
185
|
+
# Unquoted string
|
|
186
|
+
return value
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def _find_comment_start(value: str) -> int:
|
|
190
|
+
"""Find the start of an inline comment, respecting brackets."""
|
|
191
|
+
depth = 0
|
|
192
|
+
for i, char in enumerate(value):
|
|
193
|
+
if char in "[{":
|
|
194
|
+
depth += 1
|
|
195
|
+
elif char in "]}":
|
|
196
|
+
depth -= 1
|
|
197
|
+
elif char == "#" and depth == 0:
|
|
198
|
+
return i
|
|
199
|
+
return -1
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
def _split_array(s: str) -> List[str]:
|
|
203
|
+
"""Split array/table content, respecting nested structures and quoted strings."""
|
|
204
|
+
items = []
|
|
205
|
+
current = ""
|
|
206
|
+
depth = 0
|
|
207
|
+
in_string = False
|
|
208
|
+
string_char = None
|
|
209
|
+
|
|
210
|
+
for char in s:
|
|
211
|
+
if in_string:
|
|
212
|
+
current += char
|
|
213
|
+
if char == string_char:
|
|
214
|
+
in_string = False
|
|
215
|
+
string_char = None
|
|
216
|
+
elif char in "\"'":
|
|
217
|
+
in_string = True
|
|
218
|
+
string_char = char
|
|
219
|
+
current += char
|
|
220
|
+
elif char in "[{":
|
|
221
|
+
depth += 1
|
|
222
|
+
current += char
|
|
223
|
+
elif char in "]}":
|
|
224
|
+
depth -= 1
|
|
225
|
+
current += char
|
|
226
|
+
elif char == "," and depth == 0:
|
|
227
|
+
items.append(current.strip())
|
|
228
|
+
current = ""
|
|
229
|
+
else:
|
|
230
|
+
current += char
|
|
231
|
+
|
|
232
|
+
if current.strip():
|
|
233
|
+
items.append(current.strip())
|
|
234
|
+
|
|
235
|
+
return items
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
def _ensure_nested(d: Dict, keys: List[str]) -> None:
|
|
239
|
+
"""Ensure nested dictionary structure exists."""
|
|
240
|
+
current = d
|
|
241
|
+
for key in keys:
|
|
242
|
+
if key not in current:
|
|
243
|
+
current[key] = {}
|
|
244
|
+
current = current[key]
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
def _get_nested(d: Dict, keys: List[str]) -> Dict:
|
|
248
|
+
"""Get nested dictionary by key path."""
|
|
249
|
+
current = d
|
|
250
|
+
for key in keys:
|
|
251
|
+
current = current[key]
|
|
252
|
+
return current
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
def merge_configs(defaults: Dict[str, Any], user: Dict[str, Any]) -> Dict[str, Any]:
|
|
256
|
+
"""
|
|
257
|
+
Deep merge user configuration over defaults.
|
|
258
|
+
|
|
259
|
+
Args:
|
|
260
|
+
defaults: Default configuration dictionary
|
|
261
|
+
user: User configuration dictionary
|
|
262
|
+
|
|
263
|
+
Returns:
|
|
264
|
+
Merged configuration dictionary
|
|
265
|
+
"""
|
|
266
|
+
result = {}
|
|
267
|
+
|
|
268
|
+
# Start with all keys from defaults
|
|
269
|
+
all_keys = set(defaults.keys()) | set(user.keys())
|
|
270
|
+
|
|
271
|
+
for key in all_keys:
|
|
272
|
+
default_val = defaults.get(key)
|
|
273
|
+
user_val = user.get(key)
|
|
274
|
+
|
|
275
|
+
if user_val is None:
|
|
276
|
+
result[key] = default_val
|
|
277
|
+
elif default_val is None:
|
|
278
|
+
result[key] = user_val
|
|
279
|
+
elif isinstance(default_val, dict) and isinstance(user_val, dict):
|
|
280
|
+
result[key] = merge_configs(default_val, user_val)
|
|
281
|
+
else:
|
|
282
|
+
result[key] = user_val
|
|
283
|
+
|
|
284
|
+
return result
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
def apply_env_overrides(config: Dict[str, Any]) -> Dict[str, Any]:
|
|
288
|
+
"""
|
|
289
|
+
Apply environment variable overrides to configuration.
|
|
290
|
+
|
|
291
|
+
Pattern: ELSPAIS_<SECTION>_<KEY> (e.g., ELSPAIS_DIRECTORIES_SPEC)
|
|
292
|
+
|
|
293
|
+
Args:
|
|
294
|
+
config: Configuration dictionary
|
|
295
|
+
|
|
296
|
+
Returns:
|
|
297
|
+
Configuration with environment overrides applied
|
|
298
|
+
"""
|
|
299
|
+
prefix = "ELSPAIS_"
|
|
300
|
+
|
|
301
|
+
for key, value in os.environ.items():
|
|
302
|
+
if not key.startswith(prefix):
|
|
303
|
+
continue
|
|
304
|
+
|
|
305
|
+
# Parse key: ELSPAIS_DIRECTORIES_SPEC -> directories.spec
|
|
306
|
+
parts = key[len(prefix) :].lower().split("_")
|
|
307
|
+
|
|
308
|
+
if len(parts) >= 2:
|
|
309
|
+
section = parts[0]
|
|
310
|
+
subkey = "_".join(parts[1:])
|
|
311
|
+
|
|
312
|
+
if section in config and isinstance(config[section], dict):
|
|
313
|
+
# Parse value (handle booleans, numbers)
|
|
314
|
+
parsed: Any = value
|
|
315
|
+
if value.lower() == "true":
|
|
316
|
+
parsed = True
|
|
317
|
+
elif value.lower() == "false":
|
|
318
|
+
parsed = False
|
|
319
|
+
elif value.isdigit():
|
|
320
|
+
parsed = int(value)
|
|
321
|
+
|
|
322
|
+
config[section][subkey] = parsed
|
|
323
|
+
|
|
324
|
+
return config
|
|
325
|
+
|
|
326
|
+
|
|
327
|
+
def validate_config(config: Dict[str, Any]) -> List[str]:
|
|
328
|
+
"""
|
|
329
|
+
Validate configuration for required fields and valid values.
|
|
330
|
+
|
|
331
|
+
Args:
|
|
332
|
+
config: Configuration dictionary
|
|
333
|
+
|
|
334
|
+
Returns:
|
|
335
|
+
List of error messages (empty if valid)
|
|
336
|
+
"""
|
|
337
|
+
errors = []
|
|
338
|
+
|
|
339
|
+
# Check required sections
|
|
340
|
+
if "project" not in config:
|
|
341
|
+
errors.append("Missing required section: [project]")
|
|
342
|
+
elif "type" in config["project"]:
|
|
343
|
+
project_type = config["project"]["type"]
|
|
344
|
+
if project_type not in ["core", "associated"]:
|
|
345
|
+
errors.append(f"Invalid project type: {project_type}. Must be 'core' or 'associated'")
|
|
346
|
+
|
|
347
|
+
# Validate associated config when type is associated
|
|
348
|
+
if config.get("project", {}).get("type") == "associated":
|
|
349
|
+
associated = config.get("associated", {})
|
|
350
|
+
if not associated.get("prefix"):
|
|
351
|
+
errors.append("Associated repository requires associated.prefix to be set")
|
|
352
|
+
|
|
353
|
+
return errors
|
|
354
|
+
|
|
355
|
+
|
|
356
|
+
def get_directories(
|
|
357
|
+
config: Dict[str, Any],
|
|
358
|
+
key: str,
|
|
359
|
+
override: Optional[Path] = None,
|
|
360
|
+
base_path: Optional[Path] = None,
|
|
361
|
+
default: Optional[str] = None,
|
|
362
|
+
require_exist: bool = True,
|
|
363
|
+
recursive: bool = False,
|
|
364
|
+
ignore: Optional[List[str]] = None,
|
|
365
|
+
) -> List[Path]:
|
|
366
|
+
"""Get directory paths from config, handling both strings and lists.
|
|
367
|
+
|
|
368
|
+
Config can specify either a single directory or a list:
|
|
369
|
+
- spec = "spec"
|
|
370
|
+
- spec = ["spec", "spec/roadmap"]
|
|
371
|
+
- code = ["apps", "packages", "server"]
|
|
372
|
+
|
|
373
|
+
Args:
|
|
374
|
+
config: Configuration dictionary
|
|
375
|
+
key: Config key to look up under 'directories' section (e.g., "spec", "code")
|
|
376
|
+
override: Explicit directory path override (e.g., from CLI --spec-dir)
|
|
377
|
+
base_path: Base path to resolve relative directories (defaults to cwd)
|
|
378
|
+
default: Default value if key not in config (defaults to key name)
|
|
379
|
+
require_exist: If True, filter to only existing directories
|
|
380
|
+
recursive: If True, include all subdirectories recursively
|
|
381
|
+
ignore: List of directory names to ignore when recursing (e.g., ["node_modules", ".git"])
|
|
382
|
+
|
|
383
|
+
Returns:
|
|
384
|
+
List of directory paths (optionally filtered to existing ones)
|
|
385
|
+
"""
|
|
386
|
+
if override:
|
|
387
|
+
return [override]
|
|
388
|
+
|
|
389
|
+
if base_path is None:
|
|
390
|
+
base_path = Path.cwd()
|
|
391
|
+
|
|
392
|
+
if default is None:
|
|
393
|
+
default = key
|
|
394
|
+
|
|
395
|
+
if ignore is None:
|
|
396
|
+
ignore = config.get("directories", {}).get("ignore", [])
|
|
397
|
+
|
|
398
|
+
dir_config = config.get("directories", {}).get(key, default)
|
|
399
|
+
|
|
400
|
+
# Handle both string and list
|
|
401
|
+
if isinstance(dir_config, str):
|
|
402
|
+
dir_list = [dir_config]
|
|
403
|
+
else:
|
|
404
|
+
dir_list = list(dir_config)
|
|
405
|
+
|
|
406
|
+
# Resolve paths
|
|
407
|
+
result = []
|
|
408
|
+
for dir_entry in dir_list:
|
|
409
|
+
dir_path = base_path / dir_entry
|
|
410
|
+
if require_exist and not (dir_path.exists() and dir_path.is_dir()):
|
|
411
|
+
continue
|
|
412
|
+
|
|
413
|
+
result.append(dir_path)
|
|
414
|
+
|
|
415
|
+
# Recursively add subdirectories if requested
|
|
416
|
+
if recursive and dir_path.exists() and dir_path.is_dir():
|
|
417
|
+
for subdir in dir_path.rglob("*"):
|
|
418
|
+
if subdir.is_dir():
|
|
419
|
+
# Check if any part of the path is in ignore list
|
|
420
|
+
if not any(ignored in subdir.parts for ignored in ignore):
|
|
421
|
+
result.append(subdir)
|
|
422
|
+
|
|
423
|
+
return result
|
|
424
|
+
|
|
425
|
+
|
|
426
|
+
def get_code_directories(
|
|
427
|
+
config: Dict[str, Any],
|
|
428
|
+
base_path: Optional[Path] = None,
|
|
429
|
+
) -> List[Path]:
|
|
430
|
+
"""Get all code directories recursively, respecting ignore patterns.
|
|
431
|
+
|
|
432
|
+
Convenience wrapper for get_directories with key="code" and recursive=True.
|
|
433
|
+
|
|
434
|
+
Args:
|
|
435
|
+
config: Configuration dictionary
|
|
436
|
+
base_path: Base path to resolve relative directories (defaults to cwd)
|
|
437
|
+
|
|
438
|
+
Returns:
|
|
439
|
+
List of existing code directory paths (including all subdirectories)
|
|
440
|
+
"""
|
|
441
|
+
return get_directories(
|
|
442
|
+
config=config,
|
|
443
|
+
key="code",
|
|
444
|
+
base_path=base_path,
|
|
445
|
+
recursive=True,
|
|
446
|
+
)
|
|
447
|
+
|
|
448
|
+
|
|
449
|
+
def get_spec_directories(
|
|
450
|
+
spec_dir_override: Optional[Path],
|
|
451
|
+
config: Dict[str, Any],
|
|
452
|
+
base_path: Optional[Path] = None,
|
|
453
|
+
) -> List[Path]:
|
|
454
|
+
"""Get the spec directories from override or config.
|
|
455
|
+
|
|
456
|
+
Convenience wrapper for get_directories with key="spec".
|
|
457
|
+
|
|
458
|
+
Args:
|
|
459
|
+
spec_dir_override: Explicit spec directory (e.g., from CLI --spec-dir)
|
|
460
|
+
config: Configuration dictionary
|
|
461
|
+
base_path: Base path to resolve relative directories (defaults to cwd)
|
|
462
|
+
|
|
463
|
+
Returns:
|
|
464
|
+
List of existing spec directory paths
|
|
465
|
+
"""
|
|
466
|
+
return get_directories(
|
|
467
|
+
config=config,
|
|
468
|
+
key="spec",
|
|
469
|
+
override=spec_dir_override,
|
|
470
|
+
base_path=base_path,
|
|
471
|
+
default="spec",
|
|
472
|
+
)
|
|
473
|
+
|
|
474
|
+
|
|
475
|
+
def get_content_rules(
|
|
476
|
+
config: Dict[str, Any],
|
|
477
|
+
base_path: Optional[Path] = None,
|
|
478
|
+
) -> List[Path]:
|
|
479
|
+
"""Get content rule file paths from configuration.
|
|
480
|
+
|
|
481
|
+
Args:
|
|
482
|
+
config: Configuration dictionary
|
|
483
|
+
base_path: Base path to resolve relative paths (defaults to cwd)
|
|
484
|
+
|
|
485
|
+
Returns:
|
|
486
|
+
List of content rule file paths (may not exist)
|
|
487
|
+
"""
|
|
488
|
+
if base_path is None:
|
|
489
|
+
base_path = Path.cwd()
|
|
490
|
+
|
|
491
|
+
rules_config = config.get("rules", {})
|
|
492
|
+
rule_paths = rules_config.get("content_rules", [])
|
|
493
|
+
|
|
494
|
+
return [base_path / rel_path for rel_path in rule_paths]
|
elspais/core/__init__.py
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
"""
|
|
2
|
+
elspais.core - Core data models, pattern matching, and rule validation
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from elspais.core.hasher import calculate_hash, verify_hash
|
|
6
|
+
from elspais.core.models import ParsedRequirement, Requirement, RequirementType
|
|
7
|
+
from elspais.core.patterns import PatternConfig, PatternValidator
|
|
8
|
+
from elspais.core.rules import RuleEngine, RuleViolation, Severity
|
|
9
|
+
|
|
10
|
+
__all__ = [
|
|
11
|
+
"Requirement",
|
|
12
|
+
"ParsedRequirement",
|
|
13
|
+
"RequirementType",
|
|
14
|
+
"PatternValidator",
|
|
15
|
+
"PatternConfig",
|
|
16
|
+
"RuleEngine",
|
|
17
|
+
"RuleViolation",
|
|
18
|
+
"Severity",
|
|
19
|
+
"calculate_hash",
|
|
20
|
+
"verify_hash",
|
|
21
|
+
]
|
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
"""
|
|
2
|
+
elspais.core.content_rules - Content rule loading and parsing.
|
|
3
|
+
|
|
4
|
+
Content rules are markdown files that provide semantic validation guidance
|
|
5
|
+
for requirements authoring. They can include YAML frontmatter for metadata.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import re
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Any, Dict, List, Tuple
|
|
11
|
+
|
|
12
|
+
from elspais.core.models import ContentRule
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def parse_frontmatter(text: str) -> Tuple[Dict[str, Any], str]:
|
|
16
|
+
"""
|
|
17
|
+
Parse YAML frontmatter from markdown text.
|
|
18
|
+
|
|
19
|
+
Frontmatter is enclosed between --- markers at the start of the file.
|
|
20
|
+
|
|
21
|
+
Args:
|
|
22
|
+
text: Full markdown text
|
|
23
|
+
|
|
24
|
+
Returns:
|
|
25
|
+
Tuple of (metadata dict, content without frontmatter)
|
|
26
|
+
"""
|
|
27
|
+
# Check for frontmatter markers
|
|
28
|
+
if not text.startswith("---"):
|
|
29
|
+
return {}, text
|
|
30
|
+
|
|
31
|
+
# Find the closing ---
|
|
32
|
+
lines = text.split("\n")
|
|
33
|
+
end_idx = None
|
|
34
|
+
for i, line in enumerate(lines[1:], start=1):
|
|
35
|
+
if line.strip() == "---":
|
|
36
|
+
end_idx = i
|
|
37
|
+
break
|
|
38
|
+
|
|
39
|
+
if end_idx is None:
|
|
40
|
+
return {}, text
|
|
41
|
+
|
|
42
|
+
# Extract frontmatter lines
|
|
43
|
+
frontmatter_lines = lines[1:end_idx]
|
|
44
|
+
content_lines = lines[end_idx + 1 :]
|
|
45
|
+
|
|
46
|
+
# Parse simple YAML (zero-dependency)
|
|
47
|
+
metadata = _parse_simple_yaml(frontmatter_lines)
|
|
48
|
+
content = "\n".join(content_lines).lstrip("\n")
|
|
49
|
+
|
|
50
|
+
return metadata, content
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def _parse_simple_yaml(lines: List[str]) -> Dict[str, Any]:
|
|
54
|
+
"""
|
|
55
|
+
Parse simple YAML format (zero-dependency).
|
|
56
|
+
|
|
57
|
+
Supports:
|
|
58
|
+
- key: value
|
|
59
|
+
- key: [item1, item2]
|
|
60
|
+
- key:
|
|
61
|
+
- item1
|
|
62
|
+
- item2
|
|
63
|
+
"""
|
|
64
|
+
result: Dict[str, Any] = {}
|
|
65
|
+
current_key = None
|
|
66
|
+
current_list: List[str] = []
|
|
67
|
+
|
|
68
|
+
for line in lines:
|
|
69
|
+
stripped = line.strip()
|
|
70
|
+
|
|
71
|
+
# Skip empty lines and comments
|
|
72
|
+
if not stripped or stripped.startswith("#"):
|
|
73
|
+
continue
|
|
74
|
+
|
|
75
|
+
# Check for list item
|
|
76
|
+
if stripped.startswith("- "):
|
|
77
|
+
if current_key:
|
|
78
|
+
current_list.append(stripped[2:].strip())
|
|
79
|
+
continue
|
|
80
|
+
|
|
81
|
+
# Check for key: value
|
|
82
|
+
if ":" in stripped:
|
|
83
|
+
# Save previous list if any
|
|
84
|
+
if current_key and current_list:
|
|
85
|
+
result[current_key] = current_list
|
|
86
|
+
current_list = []
|
|
87
|
+
|
|
88
|
+
key, _, value = stripped.partition(":")
|
|
89
|
+
key = key.strip()
|
|
90
|
+
value = value.strip()
|
|
91
|
+
|
|
92
|
+
if value:
|
|
93
|
+
# Inline value
|
|
94
|
+
if value.startswith("[") and value.endswith("]"):
|
|
95
|
+
# Inline list
|
|
96
|
+
items = value[1:-1].split(",")
|
|
97
|
+
result[key] = [item.strip().strip("\"'") for item in items if item.strip()]
|
|
98
|
+
else:
|
|
99
|
+
# Simple value
|
|
100
|
+
result[key] = value.strip("\"'")
|
|
101
|
+
current_key = None
|
|
102
|
+
else:
|
|
103
|
+
# Start of a list
|
|
104
|
+
current_key = key
|
|
105
|
+
current_list = []
|
|
106
|
+
|
|
107
|
+
# Save final list if any
|
|
108
|
+
if current_key and current_list:
|
|
109
|
+
result[current_key] = current_list
|
|
110
|
+
|
|
111
|
+
return result
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def load_content_rule(file_path: Path) -> ContentRule:
|
|
115
|
+
"""
|
|
116
|
+
Load a single content rule file.
|
|
117
|
+
|
|
118
|
+
Args:
|
|
119
|
+
file_path: Path to the markdown file
|
|
120
|
+
|
|
121
|
+
Returns:
|
|
122
|
+
ContentRule object
|
|
123
|
+
|
|
124
|
+
Raises:
|
|
125
|
+
FileNotFoundError: If file doesn't exist
|
|
126
|
+
"""
|
|
127
|
+
if not file_path.exists():
|
|
128
|
+
raise FileNotFoundError(f"Content rule file not found: {file_path}")
|
|
129
|
+
|
|
130
|
+
text = file_path.read_text(encoding="utf-8")
|
|
131
|
+
metadata, content = parse_frontmatter(text)
|
|
132
|
+
|
|
133
|
+
return ContentRule(
|
|
134
|
+
file_path=file_path,
|
|
135
|
+
title=metadata.get("title", file_path.name),
|
|
136
|
+
content=content,
|
|
137
|
+
type=metadata.get("type", "guidance"),
|
|
138
|
+
applies_to=metadata.get("applies_to", []),
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
def load_content_rules(
|
|
143
|
+
config: Dict[str, Any],
|
|
144
|
+
base_path: Path,
|
|
145
|
+
) -> List[ContentRule]:
|
|
146
|
+
"""
|
|
147
|
+
Load all content rules from configuration.
|
|
148
|
+
|
|
149
|
+
Args:
|
|
150
|
+
config: Configuration dictionary
|
|
151
|
+
base_path: Base path for resolving relative paths
|
|
152
|
+
|
|
153
|
+
Returns:
|
|
154
|
+
List of ContentRule objects (missing files are skipped)
|
|
155
|
+
"""
|
|
156
|
+
rules_config = config.get("rules", {})
|
|
157
|
+
rule_paths = rules_config.get("content_rules", [])
|
|
158
|
+
|
|
159
|
+
rules = []
|
|
160
|
+
for rel_path in rule_paths:
|
|
161
|
+
full_path = base_path / rel_path
|
|
162
|
+
if full_path.exists():
|
|
163
|
+
try:
|
|
164
|
+
rule = load_content_rule(full_path)
|
|
165
|
+
rules.append(rule)
|
|
166
|
+
except Exception:
|
|
167
|
+
# Skip files that can't be loaded
|
|
168
|
+
pass
|
|
169
|
+
|
|
170
|
+
return rules
|