elspais 0.9.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- elspais/__init__.py +36 -0
- elspais/__main__.py +8 -0
- elspais/cli.py +525 -0
- elspais/commands/__init__.py +12 -0
- elspais/commands/analyze.py +218 -0
- elspais/commands/config_cmd.py +501 -0
- elspais/commands/edit.py +522 -0
- elspais/commands/hash_cmd.py +174 -0
- elspais/commands/index.py +166 -0
- elspais/commands/init.py +177 -0
- elspais/commands/rules_cmd.py +120 -0
- elspais/commands/trace.py +208 -0
- elspais/commands/validate.py +388 -0
- elspais/config/__init__.py +13 -0
- elspais/config/defaults.py +173 -0
- elspais/config/loader.py +494 -0
- elspais/core/__init__.py +21 -0
- elspais/core/content_rules.py +170 -0
- elspais/core/hasher.py +143 -0
- elspais/core/models.py +318 -0
- elspais/core/parser.py +596 -0
- elspais/core/patterns.py +390 -0
- elspais/core/rules.py +514 -0
- elspais/mcp/__init__.py +42 -0
- elspais/mcp/__main__.py +6 -0
- elspais/mcp/context.py +171 -0
- elspais/mcp/serializers.py +112 -0
- elspais/mcp/server.py +339 -0
- elspais/testing/__init__.py +27 -0
- elspais/testing/config.py +48 -0
- elspais/testing/mapper.py +163 -0
- elspais/testing/result_parser.py +289 -0
- elspais/testing/scanner.py +206 -0
- elspais-0.9.1.dist-info/METADATA +393 -0
- elspais-0.9.1.dist-info/RECORD +38 -0
- elspais-0.9.1.dist-info/WHEEL +4 -0
- elspais-0.9.1.dist-info/entry_points.txt +2 -0
- elspais-0.9.1.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
"""
|
|
2
|
+
elspais.commands.analyze - Analyze requirements command.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import argparse
|
|
6
|
+
import sys
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Dict, List, Optional
|
|
9
|
+
|
|
10
|
+
from elspais.config.defaults import DEFAULT_CONFIG
|
|
11
|
+
from elspais.config.loader import find_config_file, get_spec_directories, load_config
|
|
12
|
+
from elspais.core.models import Requirement
|
|
13
|
+
from elspais.core.parser import RequirementParser
|
|
14
|
+
from elspais.core.patterns import PatternConfig
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def run(args: argparse.Namespace) -> int:
|
|
18
|
+
"""Run the analyze command."""
|
|
19
|
+
if not args.analyze_action:
|
|
20
|
+
print("Usage: elspais analyze {hierarchy|orphans|coverage}")
|
|
21
|
+
return 1
|
|
22
|
+
|
|
23
|
+
if args.analyze_action == "hierarchy":
|
|
24
|
+
return run_hierarchy(args)
|
|
25
|
+
elif args.analyze_action == "orphans":
|
|
26
|
+
return run_orphans(args)
|
|
27
|
+
elif args.analyze_action == "coverage":
|
|
28
|
+
return run_coverage(args)
|
|
29
|
+
|
|
30
|
+
return 1
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def run_hierarchy(args: argparse.Namespace) -> int:
|
|
34
|
+
"""Show requirement hierarchy tree."""
|
|
35
|
+
requirements = load_requirements(args)
|
|
36
|
+
if not requirements:
|
|
37
|
+
return 1
|
|
38
|
+
|
|
39
|
+
print("Requirement Hierarchy")
|
|
40
|
+
print("=" * 60)
|
|
41
|
+
|
|
42
|
+
# Find root requirements (PRD with no implements)
|
|
43
|
+
roots = [
|
|
44
|
+
req for req in requirements.values()
|
|
45
|
+
if req.level.upper() in ["PRD", "PRODUCT"] and not req.implements
|
|
46
|
+
]
|
|
47
|
+
|
|
48
|
+
if not roots:
|
|
49
|
+
# Fall back to all PRD requirements
|
|
50
|
+
roots = [
|
|
51
|
+
req for req in requirements.values()
|
|
52
|
+
if req.level.upper() in ["PRD", "PRODUCT"]
|
|
53
|
+
]
|
|
54
|
+
|
|
55
|
+
printed = set()
|
|
56
|
+
|
|
57
|
+
def print_tree(req: Requirement, indent: int = 0) -> None:
|
|
58
|
+
if req.id in printed:
|
|
59
|
+
return
|
|
60
|
+
printed.add(req.id)
|
|
61
|
+
|
|
62
|
+
prefix = " " * indent
|
|
63
|
+
status_icon = "✓" if req.status == "Active" else "○"
|
|
64
|
+
print(f"{prefix}{status_icon} {req.id}: {req.title}")
|
|
65
|
+
|
|
66
|
+
# Find children (requirements that implement this one)
|
|
67
|
+
children = find_children(req.id, requirements)
|
|
68
|
+
for child in children:
|
|
69
|
+
print_tree(child, indent + 1)
|
|
70
|
+
|
|
71
|
+
for root in sorted(roots, key=lambda r: r.id):
|
|
72
|
+
print_tree(root)
|
|
73
|
+
print()
|
|
74
|
+
|
|
75
|
+
return 0
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def run_orphans(args: argparse.Namespace) -> int:
|
|
79
|
+
"""Find orphaned requirements."""
|
|
80
|
+
requirements = load_requirements(args)
|
|
81
|
+
if not requirements:
|
|
82
|
+
return 1
|
|
83
|
+
|
|
84
|
+
orphans = []
|
|
85
|
+
|
|
86
|
+
for req in requirements.values():
|
|
87
|
+
# Skip PRD (they can be roots)
|
|
88
|
+
if req.level.upper() in ["PRD", "PRODUCT"]:
|
|
89
|
+
continue
|
|
90
|
+
|
|
91
|
+
# Check if this requirement implements anything
|
|
92
|
+
if not req.implements:
|
|
93
|
+
orphans.append(req)
|
|
94
|
+
else:
|
|
95
|
+
# Check if all implements references are valid
|
|
96
|
+
all_valid = True
|
|
97
|
+
for impl_id in req.implements:
|
|
98
|
+
if not find_requirement(impl_id, requirements):
|
|
99
|
+
all_valid = False
|
|
100
|
+
break
|
|
101
|
+
if not all_valid:
|
|
102
|
+
orphans.append(req)
|
|
103
|
+
|
|
104
|
+
if orphans:
|
|
105
|
+
print(f"Orphaned Requirements ({len(orphans)}):")
|
|
106
|
+
print("-" * 40)
|
|
107
|
+
for req in sorted(orphans, key=lambda r: r.id):
|
|
108
|
+
impl_str = ", ".join(req.implements) if req.implements else "(none)"
|
|
109
|
+
print(f" {req.id}: {req.title}")
|
|
110
|
+
print(f" Level: {req.level} | Implements: {impl_str}")
|
|
111
|
+
if req.file_path:
|
|
112
|
+
print(f" File: {req.file_path.name}:{req.line_number}")
|
|
113
|
+
print()
|
|
114
|
+
else:
|
|
115
|
+
print("✓ No orphaned requirements found")
|
|
116
|
+
|
|
117
|
+
return 0
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def run_coverage(args: argparse.Namespace) -> int:
|
|
121
|
+
"""Show implementation coverage report."""
|
|
122
|
+
requirements = load_requirements(args)
|
|
123
|
+
if not requirements:
|
|
124
|
+
return 1
|
|
125
|
+
|
|
126
|
+
# Group by type
|
|
127
|
+
prd_count = sum(1 for r in requirements.values() if r.level.upper() in ["PRD", "PRODUCT"])
|
|
128
|
+
ops_count = sum(1 for r in requirements.values() if r.level.upper() in ["OPS", "OPERATIONS"])
|
|
129
|
+
dev_count = sum(1 for r in requirements.values() if r.level.upper() in ["DEV", "DEVELOPMENT"])
|
|
130
|
+
|
|
131
|
+
# Count PRD requirements that have implementations
|
|
132
|
+
implemented_prd = set()
|
|
133
|
+
for req in requirements.values():
|
|
134
|
+
for impl_id in req.implements:
|
|
135
|
+
# Resolve to full ID
|
|
136
|
+
target = find_requirement(impl_id, requirements)
|
|
137
|
+
if target and target.level.upper() in ["PRD", "PRODUCT"]:
|
|
138
|
+
implemented_prd.add(target.id)
|
|
139
|
+
|
|
140
|
+
print("Implementation Coverage Report")
|
|
141
|
+
print("=" * 60)
|
|
142
|
+
print()
|
|
143
|
+
print(f"Total Requirements: {len(requirements)}")
|
|
144
|
+
print(f" PRD: {prd_count}")
|
|
145
|
+
print(f" OPS: {ops_count}")
|
|
146
|
+
print(f" DEV: {dev_count}")
|
|
147
|
+
print()
|
|
148
|
+
print("PRD Implementation Coverage:")
|
|
149
|
+
print(f" Implemented: {len(implemented_prd)}/{prd_count}")
|
|
150
|
+
if prd_count > 0:
|
|
151
|
+
pct = (len(implemented_prd) / prd_count) * 100
|
|
152
|
+
print(f" Coverage: {pct:.1f}%")
|
|
153
|
+
|
|
154
|
+
# List unimplemented PRD
|
|
155
|
+
unimplemented = [
|
|
156
|
+
req for req in requirements.values()
|
|
157
|
+
if req.level.upper() in ["PRD", "PRODUCT"] and req.id not in implemented_prd
|
|
158
|
+
]
|
|
159
|
+
|
|
160
|
+
if unimplemented:
|
|
161
|
+
print()
|
|
162
|
+
print(f"Unimplemented PRD ({len(unimplemented)}):")
|
|
163
|
+
for req in sorted(unimplemented, key=lambda r: r.id):
|
|
164
|
+
print(f" - {req.id}: {req.title}")
|
|
165
|
+
|
|
166
|
+
return 0
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def load_requirements(args: argparse.Namespace) -> Dict[str, Requirement]:
|
|
170
|
+
"""Load requirements from spec directories."""
|
|
171
|
+
config_path = args.config or find_config_file(Path.cwd())
|
|
172
|
+
if config_path and config_path.exists():
|
|
173
|
+
config = load_config(config_path)
|
|
174
|
+
else:
|
|
175
|
+
config = DEFAULT_CONFIG
|
|
176
|
+
|
|
177
|
+
spec_dirs = get_spec_directories(args.spec_dir, config)
|
|
178
|
+
if not spec_dirs:
|
|
179
|
+
print("Error: No spec directories found", file=sys.stderr)
|
|
180
|
+
return {}
|
|
181
|
+
|
|
182
|
+
pattern_config = PatternConfig.from_dict(config.get("patterns", {}))
|
|
183
|
+
spec_config = config.get("spec", {})
|
|
184
|
+
no_reference_values = spec_config.get("no_reference_values")
|
|
185
|
+
skip_files = spec_config.get("skip_files", [])
|
|
186
|
+
parser = RequirementParser(pattern_config, no_reference_values=no_reference_values)
|
|
187
|
+
|
|
188
|
+
try:
|
|
189
|
+
return parser.parse_directories(spec_dirs, skip_files=skip_files)
|
|
190
|
+
except Exception as e:
|
|
191
|
+
print(f"Error parsing requirements: {e}", file=sys.stderr)
|
|
192
|
+
return {}
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
def find_children(req_id: str, requirements: Dict[str, Requirement]) -> List[Requirement]:
|
|
196
|
+
"""Find requirements that implement the given requirement."""
|
|
197
|
+
children = []
|
|
198
|
+
short_id = req_id.split("-")[-1] if "-" in req_id else req_id
|
|
199
|
+
|
|
200
|
+
for other_req in requirements.values():
|
|
201
|
+
for impl in other_req.implements:
|
|
202
|
+
if impl == req_id or impl == short_id or impl.endswith(short_id):
|
|
203
|
+
children.append(other_req)
|
|
204
|
+
break
|
|
205
|
+
|
|
206
|
+
return sorted(children, key=lambda r: r.id)
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
def find_requirement(impl_id: str, requirements: Dict[str, Requirement]) -> Optional[Requirement]:
|
|
210
|
+
"""Find a requirement by full or partial ID."""
|
|
211
|
+
if impl_id in requirements:
|
|
212
|
+
return requirements[impl_id]
|
|
213
|
+
|
|
214
|
+
for req_id, req in requirements.items():
|
|
215
|
+
if req_id.endswith(impl_id) or req_id.endswith(f"-{impl_id}"):
|
|
216
|
+
return req
|
|
217
|
+
|
|
218
|
+
return None
|
|
@@ -0,0 +1,501 @@
|
|
|
1
|
+
"""
|
|
2
|
+
elspais.commands.config_cmd - Configuration management command.
|
|
3
|
+
|
|
4
|
+
View and modify .elspais.toml configuration.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import argparse
|
|
8
|
+
import json
|
|
9
|
+
import sys
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Any, Dict, List, Optional, Tuple, Union
|
|
12
|
+
|
|
13
|
+
from elspais.config.loader import (
|
|
14
|
+
find_config_file,
|
|
15
|
+
load_config,
|
|
16
|
+
merge_configs,
|
|
17
|
+
parse_toml,
|
|
18
|
+
)
|
|
19
|
+
from elspais.config.defaults import DEFAULT_CONFIG
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def run(args: argparse.Namespace) -> int:
|
|
23
|
+
"""
|
|
24
|
+
Run the config command.
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
args: Parsed command line arguments
|
|
28
|
+
|
|
29
|
+
Returns:
|
|
30
|
+
Exit code (0 for success)
|
|
31
|
+
"""
|
|
32
|
+
action = getattr(args, "config_action", None)
|
|
33
|
+
|
|
34
|
+
if action == "show":
|
|
35
|
+
return cmd_show(args)
|
|
36
|
+
elif action == "get":
|
|
37
|
+
return cmd_get(args)
|
|
38
|
+
elif action == "set":
|
|
39
|
+
return cmd_set(args)
|
|
40
|
+
elif action == "unset":
|
|
41
|
+
return cmd_unset(args)
|
|
42
|
+
elif action == "add":
|
|
43
|
+
return cmd_add(args)
|
|
44
|
+
elif action == "remove":
|
|
45
|
+
return cmd_remove(args)
|
|
46
|
+
elif action == "path":
|
|
47
|
+
return cmd_path(args)
|
|
48
|
+
else:
|
|
49
|
+
# Default to show
|
|
50
|
+
return cmd_show(args)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def cmd_show(args: argparse.Namespace) -> int:
|
|
54
|
+
"""Show current configuration."""
|
|
55
|
+
config_path = _get_config_path(args)
|
|
56
|
+
|
|
57
|
+
if not config_path:
|
|
58
|
+
print("No configuration file found. Run 'elspais init' to create one.")
|
|
59
|
+
return 1
|
|
60
|
+
|
|
61
|
+
config = load_config(config_path)
|
|
62
|
+
section = getattr(args, "section", None)
|
|
63
|
+
|
|
64
|
+
if section:
|
|
65
|
+
value = _get_by_path(config, section)
|
|
66
|
+
if value is None:
|
|
67
|
+
print(f"Section not found: {section}", file=sys.stderr)
|
|
68
|
+
return 1
|
|
69
|
+
_print_value(value, section)
|
|
70
|
+
else:
|
|
71
|
+
if getattr(args, "json", False):
|
|
72
|
+
print(json.dumps(config, indent=2))
|
|
73
|
+
else:
|
|
74
|
+
_print_config(config)
|
|
75
|
+
|
|
76
|
+
return 0
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def cmd_get(args: argparse.Namespace) -> int:
|
|
80
|
+
"""Get a specific configuration value."""
|
|
81
|
+
config_path = _get_config_path(args)
|
|
82
|
+
|
|
83
|
+
if not config_path:
|
|
84
|
+
print("No configuration file found.", file=sys.stderr)
|
|
85
|
+
return 1
|
|
86
|
+
|
|
87
|
+
config = load_config(config_path)
|
|
88
|
+
key = args.key
|
|
89
|
+
|
|
90
|
+
value = _get_by_path(config, key)
|
|
91
|
+
if value is None:
|
|
92
|
+
# Check if it's truly None vs not found
|
|
93
|
+
parts = key.split(".")
|
|
94
|
+
current = config
|
|
95
|
+
for part in parts[:-1]:
|
|
96
|
+
if part not in current:
|
|
97
|
+
print(f"Key not found: {key}", file=sys.stderr)
|
|
98
|
+
return 1
|
|
99
|
+
current = current[part]
|
|
100
|
+
if parts[-1] not in current:
|
|
101
|
+
print(f"Key not found: {key}", file=sys.stderr)
|
|
102
|
+
return 1
|
|
103
|
+
|
|
104
|
+
if getattr(args, "json", False):
|
|
105
|
+
print(json.dumps(value))
|
|
106
|
+
else:
|
|
107
|
+
_print_value(value)
|
|
108
|
+
|
|
109
|
+
return 0
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def cmd_set(args: argparse.Namespace) -> int:
|
|
113
|
+
"""Set a configuration value."""
|
|
114
|
+
config_path = _get_config_path(args)
|
|
115
|
+
|
|
116
|
+
if not config_path:
|
|
117
|
+
config_path = Path.cwd() / ".elspais.toml"
|
|
118
|
+
print(f"Creating new configuration file: {config_path}")
|
|
119
|
+
|
|
120
|
+
key = args.key
|
|
121
|
+
value = args.value
|
|
122
|
+
|
|
123
|
+
# Parse value with type inference
|
|
124
|
+
parsed_value = _parse_cli_value(value)
|
|
125
|
+
|
|
126
|
+
# Load existing user config (not merged with defaults)
|
|
127
|
+
user_config = _load_user_config(config_path)
|
|
128
|
+
|
|
129
|
+
# Set the value
|
|
130
|
+
_set_by_path(user_config, key, parsed_value)
|
|
131
|
+
|
|
132
|
+
# Write back
|
|
133
|
+
_write_config(config_path, user_config)
|
|
134
|
+
|
|
135
|
+
if not args.quiet:
|
|
136
|
+
print(f"Set {key} = {_format_value(parsed_value)}")
|
|
137
|
+
|
|
138
|
+
return 0
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def cmd_unset(args: argparse.Namespace) -> int:
|
|
142
|
+
"""Remove a configuration key."""
|
|
143
|
+
config_path = _get_config_path(args)
|
|
144
|
+
|
|
145
|
+
if not config_path:
|
|
146
|
+
print("No configuration file found.", file=sys.stderr)
|
|
147
|
+
return 1
|
|
148
|
+
|
|
149
|
+
key = args.key
|
|
150
|
+
user_config = _load_user_config(config_path)
|
|
151
|
+
|
|
152
|
+
if not _unset_by_path(user_config, key):
|
|
153
|
+
print(f"Key not found: {key}", file=sys.stderr)
|
|
154
|
+
return 1
|
|
155
|
+
|
|
156
|
+
_write_config(config_path, user_config)
|
|
157
|
+
|
|
158
|
+
if not args.quiet:
|
|
159
|
+
print(f"Unset {key}")
|
|
160
|
+
|
|
161
|
+
return 0
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def cmd_add(args: argparse.Namespace) -> int:
|
|
165
|
+
"""Add a value to an array configuration."""
|
|
166
|
+
config_path = _get_config_path(args)
|
|
167
|
+
|
|
168
|
+
if not config_path:
|
|
169
|
+
config_path = Path.cwd() / ".elspais.toml"
|
|
170
|
+
|
|
171
|
+
key = args.key
|
|
172
|
+
value = _parse_cli_value(args.value)
|
|
173
|
+
|
|
174
|
+
user_config = _load_user_config(config_path)
|
|
175
|
+
|
|
176
|
+
# Get or create the array
|
|
177
|
+
current = _get_by_path(user_config, key)
|
|
178
|
+
|
|
179
|
+
if current is None:
|
|
180
|
+
# Check defaults for existing array
|
|
181
|
+
default_val = _get_by_path(DEFAULT_CONFIG, key)
|
|
182
|
+
if isinstance(default_val, list):
|
|
183
|
+
current = list(default_val) # Copy default
|
|
184
|
+
else:
|
|
185
|
+
current = []
|
|
186
|
+
elif not isinstance(current, list):
|
|
187
|
+
print(f"Error: {key} is not an array", file=sys.stderr)
|
|
188
|
+
return 1
|
|
189
|
+
|
|
190
|
+
if value in current:
|
|
191
|
+
print(f"Value already exists in {key}: {_format_value(value)}")
|
|
192
|
+
return 0
|
|
193
|
+
|
|
194
|
+
current.append(value)
|
|
195
|
+
_set_by_path(user_config, key, current)
|
|
196
|
+
_write_config(config_path, user_config)
|
|
197
|
+
|
|
198
|
+
if not args.quiet:
|
|
199
|
+
print(f"Added {_format_value(value)} to {key}")
|
|
200
|
+
|
|
201
|
+
return 0
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def cmd_remove(args: argparse.Namespace) -> int:
|
|
205
|
+
"""Remove a value from an array configuration."""
|
|
206
|
+
config_path = _get_config_path(args)
|
|
207
|
+
|
|
208
|
+
if not config_path:
|
|
209
|
+
print("No configuration file found.", file=sys.stderr)
|
|
210
|
+
return 1
|
|
211
|
+
|
|
212
|
+
key = args.key
|
|
213
|
+
value = _parse_cli_value(args.value)
|
|
214
|
+
|
|
215
|
+
user_config = _load_user_config(config_path)
|
|
216
|
+
merged_config = load_config(config_path)
|
|
217
|
+
|
|
218
|
+
# Get current merged value to see what's there
|
|
219
|
+
current = _get_by_path(merged_config, key)
|
|
220
|
+
|
|
221
|
+
if current is None or not isinstance(current, list):
|
|
222
|
+
print(f"Error: {key} is not an array or doesn't exist", file=sys.stderr)
|
|
223
|
+
return 1
|
|
224
|
+
|
|
225
|
+
if value not in current:
|
|
226
|
+
print(f"Value not in {key}: {_format_value(value)}", file=sys.stderr)
|
|
227
|
+
return 1
|
|
228
|
+
|
|
229
|
+
# Get user config array or copy from defaults
|
|
230
|
+
user_array = _get_by_path(user_config, key)
|
|
231
|
+
if user_array is None:
|
|
232
|
+
user_array = list(current) # Copy merged
|
|
233
|
+
|
|
234
|
+
user_array = [v for v in user_array if v != value]
|
|
235
|
+
_set_by_path(user_config, key, user_array)
|
|
236
|
+
_write_config(config_path, user_config)
|
|
237
|
+
|
|
238
|
+
if not args.quiet:
|
|
239
|
+
print(f"Removed {_format_value(value)} from {key}")
|
|
240
|
+
|
|
241
|
+
return 0
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
def cmd_path(args: argparse.Namespace) -> int:
|
|
245
|
+
"""Show the path to the configuration file."""
|
|
246
|
+
config_path = _get_config_path(args)
|
|
247
|
+
|
|
248
|
+
if config_path:
|
|
249
|
+
print(config_path)
|
|
250
|
+
return 0
|
|
251
|
+
else:
|
|
252
|
+
print("No configuration file found.", file=sys.stderr)
|
|
253
|
+
return 1
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
# Helper functions
|
|
257
|
+
|
|
258
|
+
def _get_config_path(args: argparse.Namespace) -> Optional[Path]:
|
|
259
|
+
"""Get configuration file path from args or by discovery."""
|
|
260
|
+
if hasattr(args, "config") and args.config:
|
|
261
|
+
return args.config
|
|
262
|
+
return find_config_file(Path.cwd())
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
def _load_user_config(config_path: Path) -> Dict[str, Any]:
|
|
266
|
+
"""Load user configuration without merging with defaults."""
|
|
267
|
+
if config_path.exists():
|
|
268
|
+
return parse_toml(config_path.read_text(encoding="utf-8"))
|
|
269
|
+
return {}
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
def _get_by_path(config: Dict[str, Any], path: str) -> Any:
|
|
273
|
+
"""Get a value from config using dot-notation path."""
|
|
274
|
+
parts = path.split(".")
|
|
275
|
+
current = config
|
|
276
|
+
for part in parts:
|
|
277
|
+
if not isinstance(current, dict) or part not in current:
|
|
278
|
+
return None
|
|
279
|
+
current = current[part]
|
|
280
|
+
return current
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
def _set_by_path(config: Dict[str, Any], path: str, value: Any) -> None:
|
|
284
|
+
"""Set a value in config using dot-notation path."""
|
|
285
|
+
parts = path.split(".")
|
|
286
|
+
current = config
|
|
287
|
+
for part in parts[:-1]:
|
|
288
|
+
if part not in current:
|
|
289
|
+
current[part] = {}
|
|
290
|
+
current = current[part]
|
|
291
|
+
current[parts[-1]] = value
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
def _unset_by_path(config: Dict[str, Any], path: str) -> bool:
|
|
295
|
+
"""Remove a key from config. Returns True if found and removed."""
|
|
296
|
+
parts = path.split(".")
|
|
297
|
+
current = config
|
|
298
|
+
for part in parts[:-1]:
|
|
299
|
+
if part not in current:
|
|
300
|
+
return False
|
|
301
|
+
current = current[part]
|
|
302
|
+
if parts[-1] in current:
|
|
303
|
+
del current[parts[-1]]
|
|
304
|
+
return True
|
|
305
|
+
return False
|
|
306
|
+
|
|
307
|
+
|
|
308
|
+
def _parse_cli_value(value: str) -> Any:
|
|
309
|
+
"""Parse a CLI value string with type inference."""
|
|
310
|
+
# Boolean
|
|
311
|
+
if value.lower() == "true":
|
|
312
|
+
return True
|
|
313
|
+
if value.lower() == "false":
|
|
314
|
+
return False
|
|
315
|
+
|
|
316
|
+
# Integer
|
|
317
|
+
try:
|
|
318
|
+
return int(value)
|
|
319
|
+
except ValueError:
|
|
320
|
+
pass
|
|
321
|
+
|
|
322
|
+
# Float
|
|
323
|
+
try:
|
|
324
|
+
return float(value)
|
|
325
|
+
except ValueError:
|
|
326
|
+
pass
|
|
327
|
+
|
|
328
|
+
# JSON array or object
|
|
329
|
+
if value.startswith("[") or value.startswith("{"):
|
|
330
|
+
try:
|
|
331
|
+
return json.loads(value)
|
|
332
|
+
except json.JSONDecodeError:
|
|
333
|
+
pass
|
|
334
|
+
|
|
335
|
+
# String
|
|
336
|
+
return value
|
|
337
|
+
|
|
338
|
+
|
|
339
|
+
def _format_value(value: Any) -> str:
|
|
340
|
+
"""Format a value for display."""
|
|
341
|
+
if isinstance(value, str):
|
|
342
|
+
return f'"{value}"'
|
|
343
|
+
elif isinstance(value, bool):
|
|
344
|
+
return "true" if value else "false"
|
|
345
|
+
elif isinstance(value, (list, dict)):
|
|
346
|
+
return json.dumps(value)
|
|
347
|
+
else:
|
|
348
|
+
return str(value)
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
def _print_value(value: Any, prefix: str = "") -> None:
|
|
352
|
+
"""Print a configuration value."""
|
|
353
|
+
if isinstance(value, dict):
|
|
354
|
+
for k, v in value.items():
|
|
355
|
+
key = f"{prefix}.{k}" if prefix else k
|
|
356
|
+
_print_value(v, key)
|
|
357
|
+
elif isinstance(value, list):
|
|
358
|
+
if prefix:
|
|
359
|
+
print(f"{prefix} = {json.dumps(value)}")
|
|
360
|
+
else:
|
|
361
|
+
print(json.dumps(value))
|
|
362
|
+
elif isinstance(value, bool):
|
|
363
|
+
if prefix:
|
|
364
|
+
print(f"{prefix} = {'true' if value else 'false'}")
|
|
365
|
+
else:
|
|
366
|
+
print('true' if value else 'false')
|
|
367
|
+
elif isinstance(value, str):
|
|
368
|
+
if prefix:
|
|
369
|
+
print(f'{prefix} = "{value}"')
|
|
370
|
+
else:
|
|
371
|
+
print(value)
|
|
372
|
+
else:
|
|
373
|
+
if prefix:
|
|
374
|
+
print(f"{prefix} = {value}")
|
|
375
|
+
else:
|
|
376
|
+
print(value)
|
|
377
|
+
|
|
378
|
+
|
|
379
|
+
def _print_config(config: Dict[str, Any], indent: int = 0) -> None:
|
|
380
|
+
"""Pretty print configuration in TOML-like format."""
|
|
381
|
+
for key, value in config.items():
|
|
382
|
+
if isinstance(value, dict):
|
|
383
|
+
print(f"\n[{key}]")
|
|
384
|
+
_print_section(value, key)
|
|
385
|
+
else:
|
|
386
|
+
print(f"{key} = {_format_value(value)}")
|
|
387
|
+
|
|
388
|
+
|
|
389
|
+
def _print_section(section: Dict[str, Any], path: str, indent: int = 0) -> None:
|
|
390
|
+
"""Print a configuration section."""
|
|
391
|
+
simple_items = []
|
|
392
|
+
nested_items = []
|
|
393
|
+
|
|
394
|
+
for key, value in section.items():
|
|
395
|
+
if isinstance(value, dict):
|
|
396
|
+
nested_items.append((key, value))
|
|
397
|
+
else:
|
|
398
|
+
simple_items.append((key, value))
|
|
399
|
+
|
|
400
|
+
# Print simple items first
|
|
401
|
+
for key, value in simple_items:
|
|
402
|
+
print(f"{key} = {_format_value(value)}")
|
|
403
|
+
|
|
404
|
+
# Then nested sections
|
|
405
|
+
for key, value in nested_items:
|
|
406
|
+
new_path = f"{path}.{key}"
|
|
407
|
+
print(f"\n[{new_path}]")
|
|
408
|
+
_print_section(value, new_path)
|
|
409
|
+
|
|
410
|
+
|
|
411
|
+
def _write_config(config_path: Path, config: Dict[str, Any]) -> None:
|
|
412
|
+
"""Write configuration to TOML file."""
|
|
413
|
+
content = serialize_toml(config)
|
|
414
|
+
config_path.write_text(content, encoding="utf-8")
|
|
415
|
+
|
|
416
|
+
|
|
417
|
+
def serialize_toml(config: Dict[str, Any], prefix: str = "") -> str:
|
|
418
|
+
"""
|
|
419
|
+
Serialize a dictionary to TOML format.
|
|
420
|
+
|
|
421
|
+
Args:
|
|
422
|
+
config: Configuration dictionary
|
|
423
|
+
prefix: Section prefix for nested tables
|
|
424
|
+
|
|
425
|
+
Returns:
|
|
426
|
+
TOML formatted string
|
|
427
|
+
"""
|
|
428
|
+
lines: List[str] = []
|
|
429
|
+
|
|
430
|
+
# Separate simple values and nested tables
|
|
431
|
+
simple_items: List[Tuple[str, Any]] = []
|
|
432
|
+
nested_items: List[Tuple[str, Dict]] = []
|
|
433
|
+
|
|
434
|
+
for key, value in config.items():
|
|
435
|
+
if isinstance(value, dict) and not _is_inline_table(value):
|
|
436
|
+
nested_items.append((key, value))
|
|
437
|
+
else:
|
|
438
|
+
simple_items.append((key, value))
|
|
439
|
+
|
|
440
|
+
# Write simple items
|
|
441
|
+
for key, value in simple_items:
|
|
442
|
+
lines.append(f"{key} = {_serialize_value(value)}")
|
|
443
|
+
|
|
444
|
+
# Write nested tables
|
|
445
|
+
for key, value in nested_items:
|
|
446
|
+
section_name = f"{prefix}.{key}" if prefix else key
|
|
447
|
+
lines.append("")
|
|
448
|
+
lines.append(f"[{section_name}]")
|
|
449
|
+
nested_content = serialize_toml(value, section_name)
|
|
450
|
+
# Remove leading newline from nested content
|
|
451
|
+
nested_content = nested_content.lstrip("\n")
|
|
452
|
+
if nested_content:
|
|
453
|
+
lines.append(nested_content)
|
|
454
|
+
|
|
455
|
+
result = "\n".join(lines)
|
|
456
|
+
# Clean up multiple consecutive empty lines
|
|
457
|
+
while "\n\n\n" in result:
|
|
458
|
+
result = result.replace("\n\n\n", "\n\n")
|
|
459
|
+
return result
|
|
460
|
+
|
|
461
|
+
|
|
462
|
+
def _is_inline_table(value: Dict) -> bool:
|
|
463
|
+
"""Check if a dict should be serialized as inline table."""
|
|
464
|
+
# Use inline for simple, small tables without nested dicts
|
|
465
|
+
if len(value) > 4:
|
|
466
|
+
return False
|
|
467
|
+
for v in value.values():
|
|
468
|
+
if isinstance(v, dict):
|
|
469
|
+
return False
|
|
470
|
+
if isinstance(v, list) and len(v) > 3:
|
|
471
|
+
return False
|
|
472
|
+
return True
|
|
473
|
+
|
|
474
|
+
|
|
475
|
+
def _serialize_value(value: Any) -> str:
|
|
476
|
+
"""Serialize a single value to TOML format."""
|
|
477
|
+
if value is None:
|
|
478
|
+
return '""' # TOML doesn't have null, use empty string
|
|
479
|
+
elif isinstance(value, bool):
|
|
480
|
+
return "true" if value else "false"
|
|
481
|
+
elif isinstance(value, int):
|
|
482
|
+
return str(value)
|
|
483
|
+
elif isinstance(value, float):
|
|
484
|
+
return str(value)
|
|
485
|
+
elif isinstance(value, str):
|
|
486
|
+
# Escape quotes and backslashes
|
|
487
|
+
escaped = value.replace("\\", "\\\\").replace('"', '\\"')
|
|
488
|
+
return f'"{escaped}"'
|
|
489
|
+
elif isinstance(value, list):
|
|
490
|
+
items = [_serialize_value(v) for v in value]
|
|
491
|
+
# Check if it fits on one line
|
|
492
|
+
single_line = f"[{', '.join(items)}]"
|
|
493
|
+
if len(single_line) < 80:
|
|
494
|
+
return single_line
|
|
495
|
+
# Multi-line format
|
|
496
|
+
return "[\n " + ",\n ".join(items) + ",\n]"
|
|
497
|
+
elif isinstance(value, dict):
|
|
498
|
+
items = [f"{k} = {_serialize_value(v)}" for k, v in value.items()]
|
|
499
|
+
return "{ " + ", ".join(items) + " }"
|
|
500
|
+
else:
|
|
501
|
+
return str(value)
|