specfact-cli 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of specfact-cli might be problematic. Click here for more details.
- specfact_cli/__init__.py +14 -0
- specfact_cli/agents/__init__.py +23 -0
- specfact_cli/agents/analyze_agent.py +392 -0
- specfact_cli/agents/base.py +95 -0
- specfact_cli/agents/plan_agent.py +202 -0
- specfact_cli/agents/registry.py +176 -0
- specfact_cli/agents/sync_agent.py +133 -0
- specfact_cli/analyzers/__init__.py +10 -0
- specfact_cli/analyzers/code_analyzer.py +775 -0
- specfact_cli/cli.py +397 -0
- specfact_cli/commands/__init__.py +7 -0
- specfact_cli/commands/enforce.py +87 -0
- specfact_cli/commands/import_cmd.py +355 -0
- specfact_cli/commands/init.py +119 -0
- specfact_cli/commands/plan.py +1090 -0
- specfact_cli/commands/repro.py +172 -0
- specfact_cli/commands/sync.py +408 -0
- specfact_cli/common/__init__.py +24 -0
- specfact_cli/common/logger_setup.py +673 -0
- specfact_cli/common/logging_utils.py +41 -0
- specfact_cli/common/text_utils.py +52 -0
- specfact_cli/common/utils.py +48 -0
- specfact_cli/comparators/__init__.py +10 -0
- specfact_cli/comparators/plan_comparator.py +391 -0
- specfact_cli/generators/__init__.py +13 -0
- specfact_cli/generators/plan_generator.py +105 -0
- specfact_cli/generators/protocol_generator.py +115 -0
- specfact_cli/generators/report_generator.py +200 -0
- specfact_cli/generators/workflow_generator.py +111 -0
- specfact_cli/importers/__init__.py +6 -0
- specfact_cli/importers/speckit_converter.py +773 -0
- specfact_cli/importers/speckit_scanner.py +704 -0
- specfact_cli/models/__init__.py +32 -0
- specfact_cli/models/deviation.py +105 -0
- specfact_cli/models/enforcement.py +150 -0
- specfact_cli/models/plan.py +97 -0
- specfact_cli/models/protocol.py +28 -0
- specfact_cli/modes/__init__.py +18 -0
- specfact_cli/modes/detector.py +126 -0
- specfact_cli/modes/router.py +153 -0
- specfact_cli/sync/__init__.py +11 -0
- specfact_cli/sync/repository_sync.py +279 -0
- specfact_cli/sync/speckit_sync.py +388 -0
- specfact_cli/utils/__init__.py +57 -0
- specfact_cli/utils/console.py +69 -0
- specfact_cli/utils/feature_keys.py +213 -0
- specfact_cli/utils/git.py +241 -0
- specfact_cli/utils/ide_setup.py +381 -0
- specfact_cli/utils/prompts.py +179 -0
- specfact_cli/utils/structure.py +496 -0
- specfact_cli/utils/yaml_utils.py +200 -0
- specfact_cli/validators/__init__.py +19 -0
- specfact_cli/validators/fsm.py +260 -0
- specfact_cli/validators/repro_checker.py +320 -0
- specfact_cli/validators/schema.py +200 -0
- specfact_cli-0.4.0.dist-info/METADATA +332 -0
- specfact_cli-0.4.0.dist-info/RECORD +60 -0
- specfact_cli-0.4.0.dist-info/WHEEL +4 -0
- specfact_cli-0.4.0.dist-info/entry_points.txt +2 -0
- specfact_cli-0.4.0.dist-info/licenses/LICENSE.md +55 -0
|
@@ -0,0 +1,496 @@
|
|
|
1
|
+
"""SpecFact directory structure utilities."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import re
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
from beartype import beartype
|
|
10
|
+
from icontract import ensure, require
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class SpecFactStructure:
|
|
14
|
+
"""
|
|
15
|
+
Manages the canonical .specfact/ directory structure.
|
|
16
|
+
|
|
17
|
+
All SpecFact artifacts are stored under `.specfact/` for consistency
|
|
18
|
+
and to support multiple plans in a single repository.
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
# Root directory
|
|
22
|
+
ROOT = ".specfact"
|
|
23
|
+
|
|
24
|
+
# Versioned directories (committed to git)
|
|
25
|
+
PLANS = f"{ROOT}/plans"
|
|
26
|
+
PROTOCOLS = f"{ROOT}/protocols"
|
|
27
|
+
|
|
28
|
+
# Ephemeral directories (gitignored)
|
|
29
|
+
REPORTS = f"{ROOT}/reports"
|
|
30
|
+
REPORTS_BROWNFIELD = f"{ROOT}/reports/brownfield"
|
|
31
|
+
REPORTS_COMPARISON = f"{ROOT}/reports/comparison"
|
|
32
|
+
REPORTS_ENFORCEMENT = f"{ROOT}/reports/enforcement"
|
|
33
|
+
GATES_RESULTS = f"{ROOT}/gates/results"
|
|
34
|
+
CACHE = f"{ROOT}/cache"
|
|
35
|
+
|
|
36
|
+
# Configuration files
|
|
37
|
+
CONFIG = f"{ROOT}/config.yaml"
|
|
38
|
+
GATES_CONFIG = f"{ROOT}/gates/config.yaml"
|
|
39
|
+
ENFORCEMENT_CONFIG = f"{ROOT}/gates/config/enforcement.yaml"
|
|
40
|
+
|
|
41
|
+
# Default plan names
|
|
42
|
+
DEFAULT_PLAN = f"{ROOT}/plans/main.bundle.yaml"
|
|
43
|
+
BROWNFIELD_PLAN = f"{ROOT}/plans/auto-derived.yaml"
|
|
44
|
+
PLANS_CONFIG = f"{ROOT}/plans/config.yaml"
|
|
45
|
+
|
|
46
|
+
@classmethod
|
|
47
|
+
@beartype
|
|
48
|
+
@require(lambda base_path: base_path is None or isinstance(base_path, Path), "Base path must be None or Path")
|
|
49
|
+
@ensure(lambda result: result is None, "Must return None")
|
|
50
|
+
def ensure_structure(cls, base_path: Path | None = None) -> None:
|
|
51
|
+
"""
|
|
52
|
+
Ensure the .specfact directory structure exists.
|
|
53
|
+
|
|
54
|
+
Args:
|
|
55
|
+
base_path: Base directory (default: current directory)
|
|
56
|
+
Must be repository root, not a subdirectory
|
|
57
|
+
"""
|
|
58
|
+
if base_path is None:
|
|
59
|
+
base_path = Path(".")
|
|
60
|
+
else:
|
|
61
|
+
# Normalize to absolute path and ensure we're not inside .specfact
|
|
62
|
+
base_path = Path(base_path).resolve()
|
|
63
|
+
# If base_path contains .specfact, find the repository root
|
|
64
|
+
parts = base_path.parts
|
|
65
|
+
if ".specfact" in parts:
|
|
66
|
+
# Find the index of .specfact and go up to repository root
|
|
67
|
+
specfact_idx = parts.index(".specfact")
|
|
68
|
+
base_path = Path(*parts[:specfact_idx])
|
|
69
|
+
|
|
70
|
+
# Create versioned directories
|
|
71
|
+
(base_path / cls.PLANS).mkdir(parents=True, exist_ok=True)
|
|
72
|
+
(base_path / cls.PROTOCOLS).mkdir(parents=True, exist_ok=True)
|
|
73
|
+
(base_path / f"{cls.ROOT}/gates/config").mkdir(parents=True, exist_ok=True)
|
|
74
|
+
|
|
75
|
+
# Create ephemeral directories
|
|
76
|
+
(base_path / cls.REPORTS_BROWNFIELD).mkdir(parents=True, exist_ok=True)
|
|
77
|
+
(base_path / cls.REPORTS_COMPARISON).mkdir(parents=True, exist_ok=True)
|
|
78
|
+
(base_path / cls.REPORTS_ENFORCEMENT).mkdir(parents=True, exist_ok=True)
|
|
79
|
+
(base_path / cls.GATES_RESULTS).mkdir(parents=True, exist_ok=True)
|
|
80
|
+
(base_path / cls.CACHE).mkdir(parents=True, exist_ok=True)
|
|
81
|
+
|
|
82
|
+
@classmethod
|
|
83
|
+
@beartype
|
|
84
|
+
@require(
|
|
85
|
+
lambda report_type: isinstance(report_type, str) and report_type in ("brownfield", "comparison", "enforcement"),
|
|
86
|
+
"Report type must be brownfield/comparison/enforcement",
|
|
87
|
+
)
|
|
88
|
+
@require(lambda base_path: base_path is None or isinstance(base_path, Path), "Base path must be None or Path")
|
|
89
|
+
@require(lambda extension: isinstance(extension, str) and len(extension) > 0, "Extension must be non-empty string")
|
|
90
|
+
@ensure(lambda result: isinstance(result, Path), "Must return Path")
|
|
91
|
+
def get_timestamped_report_path(
|
|
92
|
+
cls, report_type: str, base_path: Path | None = None, extension: str = "md"
|
|
93
|
+
) -> Path:
|
|
94
|
+
"""
|
|
95
|
+
Get a timestamped report path.
|
|
96
|
+
|
|
97
|
+
Args:
|
|
98
|
+
report_type: Type of report (brownfield, comparison, enforcement)
|
|
99
|
+
base_path: Base directory (default: current directory)
|
|
100
|
+
extension: File extension (default: md)
|
|
101
|
+
|
|
102
|
+
Returns:
|
|
103
|
+
Path to timestamped report file
|
|
104
|
+
"""
|
|
105
|
+
if base_path is None:
|
|
106
|
+
base_path = Path(".")
|
|
107
|
+
|
|
108
|
+
# Use ISO format timestamp for consistency
|
|
109
|
+
timestamp = datetime.now().strftime("%Y-%m-%dT%H-%M-%S")
|
|
110
|
+
|
|
111
|
+
if report_type == "brownfield":
|
|
112
|
+
directory = base_path / cls.REPORTS_BROWNFIELD
|
|
113
|
+
elif report_type == "comparison":
|
|
114
|
+
directory = base_path / cls.REPORTS_COMPARISON
|
|
115
|
+
elif report_type == "enforcement":
|
|
116
|
+
directory = base_path / cls.REPORTS_ENFORCEMENT
|
|
117
|
+
else:
|
|
118
|
+
raise ValueError(f"Unknown report type: {report_type}")
|
|
119
|
+
|
|
120
|
+
directory.mkdir(parents=True, exist_ok=True)
|
|
121
|
+
return directory / f"report-{timestamp}.{extension}"
|
|
122
|
+
|
|
123
|
+
@classmethod
|
|
124
|
+
def get_brownfield_analysis_path(cls, base_path: Path | None = None) -> Path:
|
|
125
|
+
"""Get path for brownfield analysis report."""
|
|
126
|
+
return cls.get_timestamped_report_path("brownfield", base_path, "md")
|
|
127
|
+
|
|
128
|
+
@classmethod
|
|
129
|
+
def get_brownfield_plan_path(cls, base_path: Path | None = None) -> Path:
|
|
130
|
+
"""Get path for auto-derived brownfield plan."""
|
|
131
|
+
return cls.get_timestamped_report_path("brownfield", base_path, "yaml")
|
|
132
|
+
|
|
133
|
+
@classmethod
|
|
134
|
+
@beartype
|
|
135
|
+
@require(lambda base_path: base_path is None or isinstance(base_path, Path), "Base path must be None or Path")
|
|
136
|
+
@require(lambda format: isinstance(format, str) and format in ("md", "json", "yaml"), "Format must be md/json/yaml")
|
|
137
|
+
@ensure(lambda result: isinstance(result, Path), "Must return Path")
|
|
138
|
+
def get_comparison_report_path(cls, base_path: Path | None = None, format: str = "md") -> Path:
|
|
139
|
+
"""Get path for comparison report."""
|
|
140
|
+
return cls.get_timestamped_report_path("comparison", base_path, format)
|
|
141
|
+
|
|
142
|
+
@classmethod
|
|
143
|
+
@beartype
|
|
144
|
+
@require(lambda base_path: base_path is None or isinstance(base_path, Path), "Base path must be None or Path")
|
|
145
|
+
@ensure(lambda result: isinstance(result, Path), "Must return Path")
|
|
146
|
+
def get_default_plan_path(cls, base_path: Path | None = None) -> Path:
|
|
147
|
+
"""
|
|
148
|
+
Get path to active plan bundle (from config or fallback to main.bundle.yaml).
|
|
149
|
+
|
|
150
|
+
Args:
|
|
151
|
+
base_path: Base directory (default: current directory)
|
|
152
|
+
|
|
153
|
+
Returns:
|
|
154
|
+
Path to active plan bundle (from config or default)
|
|
155
|
+
"""
|
|
156
|
+
if base_path is None:
|
|
157
|
+
base_path = Path(".")
|
|
158
|
+
|
|
159
|
+
# Try to read active plan from config
|
|
160
|
+
config_path = base_path / cls.PLANS_CONFIG
|
|
161
|
+
if config_path.exists():
|
|
162
|
+
try:
|
|
163
|
+
import yaml
|
|
164
|
+
|
|
165
|
+
with config_path.open() as f:
|
|
166
|
+
config = yaml.safe_load(f) or {}
|
|
167
|
+
active_plan = config.get("active_plan")
|
|
168
|
+
if active_plan:
|
|
169
|
+
plan_path = base_path / cls.PLANS / active_plan
|
|
170
|
+
if plan_path.exists():
|
|
171
|
+
return plan_path
|
|
172
|
+
except Exception:
|
|
173
|
+
# Fallback to default if config read fails
|
|
174
|
+
pass
|
|
175
|
+
|
|
176
|
+
# Fallback to default plan
|
|
177
|
+
return base_path / cls.DEFAULT_PLAN
|
|
178
|
+
|
|
179
|
+
@classmethod
|
|
180
|
+
@beartype
|
|
181
|
+
@require(lambda base_path: base_path is None or isinstance(base_path, Path), "Base path must be None or Path")
|
|
182
|
+
@require(lambda plan_name: isinstance(plan_name, str) and len(plan_name) > 0, "Plan name must be non-empty string")
|
|
183
|
+
@ensure(lambda result: result is None, "Must return None")
|
|
184
|
+
def set_active_plan(cls, plan_name: str, base_path: Path | None = None) -> None:
|
|
185
|
+
"""
|
|
186
|
+
Set the active plan in the plans config.
|
|
187
|
+
|
|
188
|
+
Args:
|
|
189
|
+
plan_name: Name of the plan file (e.g., "main.bundle.yaml", "specfact-cli.2025-11-04T23-35-00.bundle.yaml")
|
|
190
|
+
base_path: Base directory (default: current directory)
|
|
191
|
+
|
|
192
|
+
Examples:
|
|
193
|
+
>>> SpecFactStructure.set_active_plan("specfact-cli.2025-11-04T23-35-00.bundle.yaml")
|
|
194
|
+
>>> SpecFactStructure.get_default_plan_path()
|
|
195
|
+
Path('.specfact/plans/specfact-cli.2025-11-04T23-35-00.bundle.yaml')
|
|
196
|
+
"""
|
|
197
|
+
if base_path is None:
|
|
198
|
+
base_path = Path(".")
|
|
199
|
+
|
|
200
|
+
import yaml
|
|
201
|
+
|
|
202
|
+
config_path = base_path / cls.PLANS_CONFIG
|
|
203
|
+
plans_dir = base_path / cls.PLANS
|
|
204
|
+
|
|
205
|
+
# Ensure plans directory exists
|
|
206
|
+
plans_dir.mkdir(parents=True, exist_ok=True)
|
|
207
|
+
|
|
208
|
+
# Read existing config or create new
|
|
209
|
+
config = {}
|
|
210
|
+
if config_path.exists():
|
|
211
|
+
try:
|
|
212
|
+
with config_path.open() as f:
|
|
213
|
+
config = yaml.safe_load(f) or {}
|
|
214
|
+
except Exception:
|
|
215
|
+
config = {}
|
|
216
|
+
|
|
217
|
+
# Update active plan
|
|
218
|
+
config["active_plan"] = plan_name
|
|
219
|
+
|
|
220
|
+
# Write config
|
|
221
|
+
with config_path.open("w") as f:
|
|
222
|
+
yaml.dump(config, f, default_flow_style=False, sort_keys=False)
|
|
223
|
+
|
|
224
|
+
@classmethod
|
|
225
|
+
@beartype
|
|
226
|
+
@require(lambda base_path: base_path is None or isinstance(base_path, Path), "Base path must be None or Path")
|
|
227
|
+
@ensure(lambda result: isinstance(result, list), "Must return list")
|
|
228
|
+
def list_plans(cls, base_path: Path | None = None) -> list[dict[str, str | int]]:
|
|
229
|
+
"""
|
|
230
|
+
List all available plan bundles with metadata.
|
|
231
|
+
|
|
232
|
+
Args:
|
|
233
|
+
base_path: Base directory (default: current directory)
|
|
234
|
+
|
|
235
|
+
Returns:
|
|
236
|
+
List of plan dictionaries with 'name', 'path', 'features', 'stories', 'size', 'modified' keys
|
|
237
|
+
|
|
238
|
+
Examples:
|
|
239
|
+
>>> plans = SpecFactStructure.list_plans()
|
|
240
|
+
>>> plans[0]['name']
|
|
241
|
+
'specfact-cli.2025-11-04T23-35-00.bundle.yaml'
|
|
242
|
+
"""
|
|
243
|
+
if base_path is None:
|
|
244
|
+
base_path = Path(".")
|
|
245
|
+
|
|
246
|
+
plans_dir = base_path / cls.PLANS
|
|
247
|
+
if not plans_dir.exists():
|
|
248
|
+
return []
|
|
249
|
+
|
|
250
|
+
import yaml
|
|
251
|
+
from datetime import datetime
|
|
252
|
+
|
|
253
|
+
plans = []
|
|
254
|
+
active_plan = None
|
|
255
|
+
|
|
256
|
+
# Get active plan from config
|
|
257
|
+
config_path = base_path / cls.PLANS_CONFIG
|
|
258
|
+
if config_path.exists():
|
|
259
|
+
try:
|
|
260
|
+
with config_path.open() as f:
|
|
261
|
+
config = yaml.safe_load(f) or {}
|
|
262
|
+
active_plan = config.get("active_plan")
|
|
263
|
+
except Exception:
|
|
264
|
+
pass
|
|
265
|
+
|
|
266
|
+
# Find all plan bundles
|
|
267
|
+
for plan_file in sorted(plans_dir.glob("*.bundle.yaml"), reverse=True):
|
|
268
|
+
if plan_file.name == "config.yaml":
|
|
269
|
+
continue
|
|
270
|
+
|
|
271
|
+
plan_info: dict[str, str | int] = {
|
|
272
|
+
"name": plan_file.name,
|
|
273
|
+
"path": str(plan_file.relative_to(base_path)),
|
|
274
|
+
"features": 0,
|
|
275
|
+
"stories": 0,
|
|
276
|
+
"size": plan_file.stat().st_size,
|
|
277
|
+
"modified": datetime.fromtimestamp(plan_file.stat().st_mtime).isoformat(),
|
|
278
|
+
"active": plan_file.name == active_plan,
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
# Try to load plan metadata
|
|
282
|
+
try:
|
|
283
|
+
with plan_file.open() as f:
|
|
284
|
+
plan_data = yaml.safe_load(f) or {}
|
|
285
|
+
features = plan_data.get("features", [])
|
|
286
|
+
plan_info["features"] = len(features)
|
|
287
|
+
plan_info["stories"] = sum(len(f.get("stories", [])) for f in features)
|
|
288
|
+
if plan_data.get("metadata"):
|
|
289
|
+
plan_info["stage"] = plan_data["metadata"].get("stage", "draft")
|
|
290
|
+
else:
|
|
291
|
+
plan_info["stage"] = "draft"
|
|
292
|
+
except Exception:
|
|
293
|
+
plan_info["stage"] = "unknown"
|
|
294
|
+
|
|
295
|
+
plans.append(plan_info)
|
|
296
|
+
|
|
297
|
+
return plans
|
|
298
|
+
|
|
299
|
+
@classmethod
|
|
300
|
+
def get_enforcement_config_path(cls, base_path: Path | None = None) -> Path:
|
|
301
|
+
"""Get path to enforcement configuration file."""
|
|
302
|
+
if base_path is None:
|
|
303
|
+
base_path = Path(".")
|
|
304
|
+
return base_path / cls.ENFORCEMENT_CONFIG
|
|
305
|
+
|
|
306
|
+
@classmethod
|
|
307
|
+
@beartype
|
|
308
|
+
@require(lambda name: name is None or isinstance(name, str), "Name must be None or str")
|
|
309
|
+
@ensure(lambda result: isinstance(result, str) and len(result) > 0, "Sanitized name must be non-empty")
|
|
310
|
+
def sanitize_plan_name(cls, name: str | None) -> str:
|
|
311
|
+
"""
|
|
312
|
+
Sanitize plan name for filesystem persistence.
|
|
313
|
+
|
|
314
|
+
Converts to lowercase, removes spaces and special characters,
|
|
315
|
+
keeping only alphanumeric, hyphens, and underscores.
|
|
316
|
+
|
|
317
|
+
Args:
|
|
318
|
+
name: Plan name to sanitize (e.g., "My Feature Plan", "api-client-v2")
|
|
319
|
+
|
|
320
|
+
Returns:
|
|
321
|
+
Sanitized name safe for filesystem (e.g., "my-feature-plan", "api-client-v2")
|
|
322
|
+
|
|
323
|
+
Examples:
|
|
324
|
+
>>> SpecFactStructure.sanitize_plan_name("My Feature Plan")
|
|
325
|
+
'my-feature-plan'
|
|
326
|
+
>>> SpecFactStructure.sanitize_plan_name("API Client v2.0")
|
|
327
|
+
'api-client-v20'
|
|
328
|
+
>>> SpecFactStructure.sanitize_plan_name("test_plan_123")
|
|
329
|
+
'test_plan_123'
|
|
330
|
+
"""
|
|
331
|
+
if not name:
|
|
332
|
+
return "auto-derived"
|
|
333
|
+
|
|
334
|
+
# Convert to lowercase
|
|
335
|
+
sanitized = name.lower()
|
|
336
|
+
|
|
337
|
+
# Replace spaces and dots with hyphens
|
|
338
|
+
sanitized = re.sub(r"[.\s]+", "-", sanitized)
|
|
339
|
+
|
|
340
|
+
# Remove all characters except alphanumeric, hyphens, and underscores
|
|
341
|
+
sanitized = re.sub(r"[^a-z0-9_-]", "", sanitized)
|
|
342
|
+
|
|
343
|
+
# Remove consecutive hyphens and underscores
|
|
344
|
+
sanitized = re.sub(r"[-_]{2,}", "-", sanitized)
|
|
345
|
+
|
|
346
|
+
# Remove leading/trailing hyphens and underscores
|
|
347
|
+
sanitized = sanitized.strip("-_")
|
|
348
|
+
|
|
349
|
+
# Ensure it's not empty
|
|
350
|
+
if not sanitized:
|
|
351
|
+
return "auto-derived"
|
|
352
|
+
|
|
353
|
+
return sanitized
|
|
354
|
+
|
|
355
|
+
@classmethod
|
|
356
|
+
@beartype
|
|
357
|
+
@require(lambda base_path: base_path is None or isinstance(base_path, Path), "Base path must be None or Path")
|
|
358
|
+
@require(lambda name: name is None or isinstance(name, str), "Name must be None or str")
|
|
359
|
+
@ensure(lambda result: isinstance(result, Path), "Must return Path")
|
|
360
|
+
def get_timestamped_brownfield_report(cls, base_path: Path | None = None, name: str | None = None) -> Path:
|
|
361
|
+
"""
|
|
362
|
+
Get timestamped path for brownfield analysis report (YAML bundle).
|
|
363
|
+
|
|
364
|
+
Args:
|
|
365
|
+
base_path: Base directory (default: current directory)
|
|
366
|
+
name: Custom plan name (will be sanitized, default: "auto-derived")
|
|
367
|
+
|
|
368
|
+
Returns:
|
|
369
|
+
Path to plan bundle file (e.g., `.specfact/plans/my-feature-plan.2025-11-04T23-19-31.bundle.yaml`)
|
|
370
|
+
|
|
371
|
+
Examples:
|
|
372
|
+
>>> SpecFactStructure.get_timestamped_brownfield_report(name="API Client v2")
|
|
373
|
+
Path('.specfact/plans/api-client-v2.2025-11-04T23-19-31.bundle.yaml')
|
|
374
|
+
"""
|
|
375
|
+
if base_path is None:
|
|
376
|
+
base_path = Path(".")
|
|
377
|
+
else:
|
|
378
|
+
# Normalize base_path to repository root (avoid recursive .specfact creation)
|
|
379
|
+
base_path = Path(base_path).resolve()
|
|
380
|
+
# If base_path contains .specfact, find the repository root
|
|
381
|
+
parts = base_path.parts
|
|
382
|
+
if ".specfact" in parts:
|
|
383
|
+
# Find the index of .specfact and go up to repository root
|
|
384
|
+
specfact_idx = parts.index(".specfact")
|
|
385
|
+
base_path = Path(*parts[:specfact_idx])
|
|
386
|
+
|
|
387
|
+
timestamp = datetime.now().strftime("%Y-%m-%dT%H-%M-%S")
|
|
388
|
+
sanitized_name = cls.sanitize_plan_name(name)
|
|
389
|
+
directory = base_path / cls.PLANS
|
|
390
|
+
directory.mkdir(parents=True, exist_ok=True)
|
|
391
|
+
return directory / f"{sanitized_name}.{timestamp}.bundle.yaml"
|
|
392
|
+
|
|
393
|
+
@classmethod
|
|
394
|
+
@beartype
|
|
395
|
+
@require(lambda base_path: base_path is None or isinstance(base_path, Path), "Base path must be None or Path")
|
|
396
|
+
@ensure(lambda result: result is None or isinstance(result, Path), "Must return None or Path")
|
|
397
|
+
def get_latest_brownfield_report(cls, base_path: Path | None = None) -> Path | None:
|
|
398
|
+
"""
|
|
399
|
+
Get the latest brownfield report from the plans directory.
|
|
400
|
+
|
|
401
|
+
Args:
|
|
402
|
+
base_path: Base directory (default: current directory)
|
|
403
|
+
|
|
404
|
+
Returns:
|
|
405
|
+
Path to latest brownfield report, or None if none exist
|
|
406
|
+
"""
|
|
407
|
+
if base_path is None:
|
|
408
|
+
base_path = Path(".")
|
|
409
|
+
|
|
410
|
+
plans_dir = base_path / cls.PLANS
|
|
411
|
+
if not plans_dir.exists():
|
|
412
|
+
return None
|
|
413
|
+
|
|
414
|
+
# Find all auto-derived reports
|
|
415
|
+
reports = sorted(plans_dir.glob("auto-derived.*.bundle.yaml"), reverse=True)
|
|
416
|
+
return reports[0] if reports else None
|
|
417
|
+
|
|
418
|
+
@classmethod
|
|
419
|
+
def create_gitignore(cls, base_path: Path | None = None) -> None:
|
|
420
|
+
"""
|
|
421
|
+
Create .gitignore for .specfact directory.
|
|
422
|
+
|
|
423
|
+
Args:
|
|
424
|
+
base_path: Base directory (default: current directory)
|
|
425
|
+
"""
|
|
426
|
+
if base_path is None:
|
|
427
|
+
base_path = Path(".")
|
|
428
|
+
|
|
429
|
+
gitignore_path = base_path / cls.ROOT / ".gitignore"
|
|
430
|
+
gitignore_content = """# SpecFact ephemeral artifacts (not versioned)
|
|
431
|
+
reports/
|
|
432
|
+
gates/results/
|
|
433
|
+
cache/
|
|
434
|
+
|
|
435
|
+
# Keep these versioned
|
|
436
|
+
!plans/
|
|
437
|
+
!protocols/
|
|
438
|
+
!config.yaml
|
|
439
|
+
!gates/config.yaml
|
|
440
|
+
"""
|
|
441
|
+
gitignore_path.write_text(gitignore_content)
|
|
442
|
+
|
|
443
|
+
@classmethod
|
|
444
|
+
def create_readme(cls, base_path: Path | None = None) -> None:
|
|
445
|
+
"""
|
|
446
|
+
Create README for .specfact directory.
|
|
447
|
+
|
|
448
|
+
Args:
|
|
449
|
+
base_path: Base directory (default: current directory)
|
|
450
|
+
"""
|
|
451
|
+
if base_path is None:
|
|
452
|
+
base_path = Path(".")
|
|
453
|
+
|
|
454
|
+
readme_path = base_path / cls.ROOT / "README.md"
|
|
455
|
+
readme_content = """# SpecFact Directory
|
|
456
|
+
|
|
457
|
+
This directory contains SpecFact CLI artifacts for contract-driven development.
|
|
458
|
+
|
|
459
|
+
## Structure
|
|
460
|
+
|
|
461
|
+
- `plans/` - Plan bundles (versioned in git)
|
|
462
|
+
- `protocols/` - FSM protocol definitions (versioned)
|
|
463
|
+
- `reports/` - Analysis reports (gitignored)
|
|
464
|
+
- `gates/` - Enforcement configuration and results
|
|
465
|
+
- `cache/` - Tool caches (gitignored)
|
|
466
|
+
|
|
467
|
+
## Documentation
|
|
468
|
+
|
|
469
|
+
See `docs/directory-structure.md` for complete documentation.
|
|
470
|
+
|
|
471
|
+
## Getting Started
|
|
472
|
+
|
|
473
|
+
```bash
|
|
474
|
+
# Create a new plan
|
|
475
|
+
specfact plan init --interactive
|
|
476
|
+
|
|
477
|
+
# Analyze existing code
|
|
478
|
+
specfact import from-code --repo .
|
|
479
|
+
|
|
480
|
+
# Compare plans
|
|
481
|
+
specfact plan compare --manual .specfact/plans/main.bundle.yaml --auto .specfact/plans/auto-derived-<timestamp>.bundle.yaml
|
|
482
|
+
```
|
|
483
|
+
"""
|
|
484
|
+
readme_path.write_text(readme_content)
|
|
485
|
+
|
|
486
|
+
@classmethod
|
|
487
|
+
def scaffold_project(cls, base_path: Path | None = None) -> None:
|
|
488
|
+
"""
|
|
489
|
+
Create complete .specfact directory structure.
|
|
490
|
+
|
|
491
|
+
Args:
|
|
492
|
+
base_path: Base directory (default: current directory)
|
|
493
|
+
"""
|
|
494
|
+
cls.ensure_structure(base_path)
|
|
495
|
+
cls.create_gitignore(base_path)
|
|
496
|
+
cls.create_readme(base_path)
|
|
@@ -0,0 +1,200 @@
|
|
|
1
|
+
"""
|
|
2
|
+
YAML utilities.
|
|
3
|
+
|
|
4
|
+
This module provides helpers for YAML parsing and serialization.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Any
|
|
11
|
+
|
|
12
|
+
from beartype import beartype
|
|
13
|
+
from icontract import ensure, require
|
|
14
|
+
from ruamel.yaml import YAML
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class YAMLUtils:
|
|
18
|
+
"""Helper class for YAML operations."""
|
|
19
|
+
|
|
20
|
+
@beartype
|
|
21
|
+
@require(lambda indent_mapping: indent_mapping > 0, "Indent mapping must be positive")
|
|
22
|
+
@require(lambda indent_sequence: indent_sequence > 0, "Indent sequence must be positive")
|
|
23
|
+
def __init__(self, preserve_quotes: bool = True, indent_mapping: int = 2, indent_sequence: int = 2) -> None:
|
|
24
|
+
"""
|
|
25
|
+
Initialize YAML utilities.
|
|
26
|
+
|
|
27
|
+
Args:
|
|
28
|
+
preserve_quotes: Whether to preserve quotes in strings
|
|
29
|
+
indent_mapping: Indentation for mappings (must be > 0)
|
|
30
|
+
indent_sequence: Indentation for sequences (must be > 0)
|
|
31
|
+
"""
|
|
32
|
+
self.yaml = YAML()
|
|
33
|
+
self.yaml.preserve_quotes = preserve_quotes
|
|
34
|
+
self.yaml.indent(mapping=indent_mapping, sequence=indent_sequence)
|
|
35
|
+
self.yaml.default_flow_style = False
|
|
36
|
+
|
|
37
|
+
@beartype
|
|
38
|
+
@require(lambda file_path: isinstance(file_path, (Path, str)), "File path must be Path or str")
|
|
39
|
+
@ensure(lambda result: result is not None, "Must return parsed content")
|
|
40
|
+
def load(self, file_path: Path | str) -> Any:
|
|
41
|
+
"""
|
|
42
|
+
Load YAML from file.
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
file_path: Path to YAML file (must exist)
|
|
46
|
+
|
|
47
|
+
Returns:
|
|
48
|
+
Parsed YAML content
|
|
49
|
+
|
|
50
|
+
Raises:
|
|
51
|
+
FileNotFoundError: If file doesn't exist
|
|
52
|
+
"""
|
|
53
|
+
file_path = Path(file_path)
|
|
54
|
+
|
|
55
|
+
if not file_path.exists():
|
|
56
|
+
raise FileNotFoundError(f"YAML file not found: {file_path}")
|
|
57
|
+
|
|
58
|
+
with open(file_path, encoding="utf-8") as f:
|
|
59
|
+
return self.yaml.load(f)
|
|
60
|
+
|
|
61
|
+
@beartype
|
|
62
|
+
@require(lambda yaml_string: isinstance(yaml_string, str), "YAML string must be str")
|
|
63
|
+
@ensure(lambda result: result is not None, "Must return parsed content")
|
|
64
|
+
def load_string(self, yaml_string: str) -> Any:
|
|
65
|
+
"""
|
|
66
|
+
Load YAML from string.
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
yaml_string: YAML content as string
|
|
70
|
+
|
|
71
|
+
Returns:
|
|
72
|
+
Parsed YAML content
|
|
73
|
+
"""
|
|
74
|
+
return self.yaml.load(yaml_string)
|
|
75
|
+
|
|
76
|
+
@beartype
|
|
77
|
+
@require(lambda file_path: isinstance(file_path, (Path, str)), "File path must be Path or str")
|
|
78
|
+
def dump(self, data: Any, file_path: Path | str) -> None:
|
|
79
|
+
"""
|
|
80
|
+
Dump data to YAML file.
|
|
81
|
+
|
|
82
|
+
Args:
|
|
83
|
+
data: Data to serialize
|
|
84
|
+
file_path: Output file path
|
|
85
|
+
"""
|
|
86
|
+
file_path = Path(file_path)
|
|
87
|
+
file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
88
|
+
|
|
89
|
+
with open(file_path, "w", encoding="utf-8") as f:
|
|
90
|
+
self.yaml.dump(data, f)
|
|
91
|
+
|
|
92
|
+
@beartype
|
|
93
|
+
@ensure(lambda result: isinstance(result, str), "Must return string")
|
|
94
|
+
def dump_string(self, data: Any) -> str:
|
|
95
|
+
"""
|
|
96
|
+
Dump data to YAML string.
|
|
97
|
+
|
|
98
|
+
Args:
|
|
99
|
+
data: Data to serialize
|
|
100
|
+
|
|
101
|
+
Returns:
|
|
102
|
+
YAML string
|
|
103
|
+
"""
|
|
104
|
+
from io import StringIO
|
|
105
|
+
|
|
106
|
+
stream = StringIO()
|
|
107
|
+
self.yaml.dump(data, stream)
|
|
108
|
+
return stream.getvalue()
|
|
109
|
+
|
|
110
|
+
@beartype
|
|
111
|
+
@require(lambda base: isinstance(base, dict), "Base must be dictionary")
|
|
112
|
+
@require(lambda overlay: isinstance(overlay, dict), "Overlay must be dictionary")
|
|
113
|
+
@ensure(lambda result: isinstance(result, dict), "Must return dictionary")
|
|
114
|
+
def merge_yaml(self, base: dict[str, Any], overlay: dict[str, Any]) -> dict[str, Any]:
|
|
115
|
+
"""
|
|
116
|
+
Deep merge two YAML dictionaries.
|
|
117
|
+
|
|
118
|
+
Args:
|
|
119
|
+
base: Base dictionary
|
|
120
|
+
overlay: Overlay dictionary (takes precedence)
|
|
121
|
+
|
|
122
|
+
Returns:
|
|
123
|
+
Merged dictionary
|
|
124
|
+
"""
|
|
125
|
+
result = base.copy()
|
|
126
|
+
|
|
127
|
+
for key, value in overlay.items():
|
|
128
|
+
if key in result and isinstance(result[key], dict) and isinstance(value, dict):
|
|
129
|
+
result[key] = self.merge_yaml(result[key], value)
|
|
130
|
+
else:
|
|
131
|
+
result[key] = value
|
|
132
|
+
|
|
133
|
+
return result
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
# Convenience functions for quick operations
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
@beartype
|
|
140
|
+
@require(lambda file_path: isinstance(file_path, (Path, str)), "File path must be Path or str")
|
|
141
|
+
@ensure(lambda result: result is not None, "Must return parsed content")
|
|
142
|
+
def load_yaml(file_path: Path | str) -> Any:
|
|
143
|
+
"""
|
|
144
|
+
Load YAML from file (convenience function).
|
|
145
|
+
|
|
146
|
+
Args:
|
|
147
|
+
file_path: Path to YAML file
|
|
148
|
+
|
|
149
|
+
Returns:
|
|
150
|
+
Parsed YAML content
|
|
151
|
+
"""
|
|
152
|
+
utils = YAMLUtils()
|
|
153
|
+
return utils.load(file_path)
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
@beartype
|
|
157
|
+
@require(lambda file_path: isinstance(file_path, (Path, str)), "File path must be Path or str")
|
|
158
|
+
def dump_yaml(data: Any, file_path: Path | str) -> None:
|
|
159
|
+
"""
|
|
160
|
+
Dump data to YAML file (convenience function).
|
|
161
|
+
|
|
162
|
+
Args:
|
|
163
|
+
data: Data to serialize
|
|
164
|
+
file_path: Output file path
|
|
165
|
+
"""
|
|
166
|
+
utils = YAMLUtils()
|
|
167
|
+
utils.dump(data, file_path)
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
@beartype
|
|
171
|
+
@ensure(lambda result: isinstance(result, str), "Must return string")
|
|
172
|
+
def yaml_to_string(data: Any) -> str:
|
|
173
|
+
"""
|
|
174
|
+
Convert data to YAML string (convenience function).
|
|
175
|
+
|
|
176
|
+
Args:
|
|
177
|
+
data: Data to serialize
|
|
178
|
+
|
|
179
|
+
Returns:
|
|
180
|
+
YAML string
|
|
181
|
+
"""
|
|
182
|
+
utils = YAMLUtils()
|
|
183
|
+
return utils.dump_string(data)
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
@beartype
|
|
187
|
+
@require(lambda yaml_string: isinstance(yaml_string, str), "YAML string must be str")
|
|
188
|
+
@ensure(lambda result: result is not None, "Must return parsed content")
|
|
189
|
+
def string_to_yaml(yaml_string: str) -> Any:
|
|
190
|
+
"""
|
|
191
|
+
Parse YAML string (convenience function).
|
|
192
|
+
|
|
193
|
+
Args:
|
|
194
|
+
yaml_string: YAML content as string
|
|
195
|
+
|
|
196
|
+
Returns:
|
|
197
|
+
Parsed YAML content
|
|
198
|
+
"""
|
|
199
|
+
utils = YAMLUtils()
|
|
200
|
+
return utils.load_string(yaml_string)
|