foundry-mcp 0.3.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (135) hide show
  1. foundry_mcp/__init__.py +7 -0
  2. foundry_mcp/cli/__init__.py +80 -0
  3. foundry_mcp/cli/__main__.py +9 -0
  4. foundry_mcp/cli/agent.py +96 -0
  5. foundry_mcp/cli/commands/__init__.py +37 -0
  6. foundry_mcp/cli/commands/cache.py +137 -0
  7. foundry_mcp/cli/commands/dashboard.py +148 -0
  8. foundry_mcp/cli/commands/dev.py +446 -0
  9. foundry_mcp/cli/commands/journal.py +377 -0
  10. foundry_mcp/cli/commands/lifecycle.py +274 -0
  11. foundry_mcp/cli/commands/modify.py +824 -0
  12. foundry_mcp/cli/commands/plan.py +633 -0
  13. foundry_mcp/cli/commands/pr.py +393 -0
  14. foundry_mcp/cli/commands/review.py +652 -0
  15. foundry_mcp/cli/commands/session.py +479 -0
  16. foundry_mcp/cli/commands/specs.py +856 -0
  17. foundry_mcp/cli/commands/tasks.py +807 -0
  18. foundry_mcp/cli/commands/testing.py +676 -0
  19. foundry_mcp/cli/commands/validate.py +982 -0
  20. foundry_mcp/cli/config.py +98 -0
  21. foundry_mcp/cli/context.py +259 -0
  22. foundry_mcp/cli/flags.py +266 -0
  23. foundry_mcp/cli/logging.py +212 -0
  24. foundry_mcp/cli/main.py +44 -0
  25. foundry_mcp/cli/output.py +122 -0
  26. foundry_mcp/cli/registry.py +110 -0
  27. foundry_mcp/cli/resilience.py +178 -0
  28. foundry_mcp/cli/transcript.py +217 -0
  29. foundry_mcp/config.py +850 -0
  30. foundry_mcp/core/__init__.py +144 -0
  31. foundry_mcp/core/ai_consultation.py +1636 -0
  32. foundry_mcp/core/cache.py +195 -0
  33. foundry_mcp/core/capabilities.py +446 -0
  34. foundry_mcp/core/concurrency.py +898 -0
  35. foundry_mcp/core/context.py +540 -0
  36. foundry_mcp/core/discovery.py +1603 -0
  37. foundry_mcp/core/error_collection.py +728 -0
  38. foundry_mcp/core/error_store.py +592 -0
  39. foundry_mcp/core/feature_flags.py +592 -0
  40. foundry_mcp/core/health.py +749 -0
  41. foundry_mcp/core/journal.py +694 -0
  42. foundry_mcp/core/lifecycle.py +412 -0
  43. foundry_mcp/core/llm_config.py +1350 -0
  44. foundry_mcp/core/llm_patterns.py +510 -0
  45. foundry_mcp/core/llm_provider.py +1569 -0
  46. foundry_mcp/core/logging_config.py +374 -0
  47. foundry_mcp/core/metrics_persistence.py +584 -0
  48. foundry_mcp/core/metrics_registry.py +327 -0
  49. foundry_mcp/core/metrics_store.py +641 -0
  50. foundry_mcp/core/modifications.py +224 -0
  51. foundry_mcp/core/naming.py +123 -0
  52. foundry_mcp/core/observability.py +1216 -0
  53. foundry_mcp/core/otel.py +452 -0
  54. foundry_mcp/core/otel_stubs.py +264 -0
  55. foundry_mcp/core/pagination.py +255 -0
  56. foundry_mcp/core/progress.py +317 -0
  57. foundry_mcp/core/prometheus.py +577 -0
  58. foundry_mcp/core/prompts/__init__.py +464 -0
  59. foundry_mcp/core/prompts/fidelity_review.py +546 -0
  60. foundry_mcp/core/prompts/markdown_plan_review.py +511 -0
  61. foundry_mcp/core/prompts/plan_review.py +623 -0
  62. foundry_mcp/core/providers/__init__.py +225 -0
  63. foundry_mcp/core/providers/base.py +476 -0
  64. foundry_mcp/core/providers/claude.py +460 -0
  65. foundry_mcp/core/providers/codex.py +619 -0
  66. foundry_mcp/core/providers/cursor_agent.py +642 -0
  67. foundry_mcp/core/providers/detectors.py +488 -0
  68. foundry_mcp/core/providers/gemini.py +405 -0
  69. foundry_mcp/core/providers/opencode.py +616 -0
  70. foundry_mcp/core/providers/opencode_wrapper.js +302 -0
  71. foundry_mcp/core/providers/package-lock.json +24 -0
  72. foundry_mcp/core/providers/package.json +25 -0
  73. foundry_mcp/core/providers/registry.py +607 -0
  74. foundry_mcp/core/providers/test_provider.py +171 -0
  75. foundry_mcp/core/providers/validation.py +729 -0
  76. foundry_mcp/core/rate_limit.py +427 -0
  77. foundry_mcp/core/resilience.py +600 -0
  78. foundry_mcp/core/responses.py +934 -0
  79. foundry_mcp/core/review.py +366 -0
  80. foundry_mcp/core/security.py +438 -0
  81. foundry_mcp/core/spec.py +1650 -0
  82. foundry_mcp/core/task.py +1289 -0
  83. foundry_mcp/core/testing.py +450 -0
  84. foundry_mcp/core/validation.py +2081 -0
  85. foundry_mcp/dashboard/__init__.py +32 -0
  86. foundry_mcp/dashboard/app.py +119 -0
  87. foundry_mcp/dashboard/components/__init__.py +17 -0
  88. foundry_mcp/dashboard/components/cards.py +88 -0
  89. foundry_mcp/dashboard/components/charts.py +234 -0
  90. foundry_mcp/dashboard/components/filters.py +136 -0
  91. foundry_mcp/dashboard/components/tables.py +195 -0
  92. foundry_mcp/dashboard/data/__init__.py +11 -0
  93. foundry_mcp/dashboard/data/stores.py +433 -0
  94. foundry_mcp/dashboard/launcher.py +289 -0
  95. foundry_mcp/dashboard/views/__init__.py +12 -0
  96. foundry_mcp/dashboard/views/errors.py +217 -0
  97. foundry_mcp/dashboard/views/metrics.py +174 -0
  98. foundry_mcp/dashboard/views/overview.py +160 -0
  99. foundry_mcp/dashboard/views/providers.py +83 -0
  100. foundry_mcp/dashboard/views/sdd_workflow.py +255 -0
  101. foundry_mcp/dashboard/views/tool_usage.py +139 -0
  102. foundry_mcp/prompts/__init__.py +9 -0
  103. foundry_mcp/prompts/workflows.py +525 -0
  104. foundry_mcp/resources/__init__.py +9 -0
  105. foundry_mcp/resources/specs.py +591 -0
  106. foundry_mcp/schemas/__init__.py +38 -0
  107. foundry_mcp/schemas/sdd-spec-schema.json +386 -0
  108. foundry_mcp/server.py +164 -0
  109. foundry_mcp/tools/__init__.py +10 -0
  110. foundry_mcp/tools/unified/__init__.py +71 -0
  111. foundry_mcp/tools/unified/authoring.py +1487 -0
  112. foundry_mcp/tools/unified/context_helpers.py +98 -0
  113. foundry_mcp/tools/unified/documentation_helpers.py +198 -0
  114. foundry_mcp/tools/unified/environment.py +939 -0
  115. foundry_mcp/tools/unified/error.py +462 -0
  116. foundry_mcp/tools/unified/health.py +225 -0
  117. foundry_mcp/tools/unified/journal.py +841 -0
  118. foundry_mcp/tools/unified/lifecycle.py +632 -0
  119. foundry_mcp/tools/unified/metrics.py +777 -0
  120. foundry_mcp/tools/unified/plan.py +745 -0
  121. foundry_mcp/tools/unified/pr.py +294 -0
  122. foundry_mcp/tools/unified/provider.py +629 -0
  123. foundry_mcp/tools/unified/review.py +685 -0
  124. foundry_mcp/tools/unified/review_helpers.py +299 -0
  125. foundry_mcp/tools/unified/router.py +102 -0
  126. foundry_mcp/tools/unified/server.py +580 -0
  127. foundry_mcp/tools/unified/spec.py +808 -0
  128. foundry_mcp/tools/unified/task.py +2202 -0
  129. foundry_mcp/tools/unified/test.py +370 -0
  130. foundry_mcp/tools/unified/verification.py +520 -0
  131. foundry_mcp-0.3.3.dist-info/METADATA +337 -0
  132. foundry_mcp-0.3.3.dist-info/RECORD +135 -0
  133. foundry_mcp-0.3.3.dist-info/WHEEL +4 -0
  134. foundry_mcp-0.3.3.dist-info/entry_points.txt +3 -0
  135. foundry_mcp-0.3.3.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,1650 @@
1
+ """
2
+ JSON spec file operations for SDD workflows.
3
+ Provides loading, saving, finding, and listing specs with atomic writes and backups.
4
+ """
5
+
6
+ import json
7
+ import re
8
+ import shutil
9
+ import subprocess
10
+ from datetime import datetime, timezone
11
+ from pathlib import Path
12
+ from typing import Optional, Dict, Any, List, Tuple
13
+
14
+ # Valid templates and categories for spec creation
15
+ TEMPLATES = ("simple", "medium", "complex", "security")
16
+ CATEGORIES = ("investigation", "implementation", "refactoring", "decision", "research")
17
+
18
+ # Valid verification types for verify nodes
19
+ # - test: Automated tests via mcp__foundry-mcp__test-run
20
+ # - fidelity: Implementation-vs-spec comparison via mcp__foundry-mcp__spec-review-fidelity
21
+ VERIFICATION_TYPES = ("test", "fidelity")
22
+
23
+
24
+ def find_git_root() -> Optional[Path]:
25
+ """Find the root of the git repository."""
26
+ try:
27
+ result = subprocess.run(
28
+ ["git", "rev-parse", "--show-toplevel"],
29
+ capture_output=True,
30
+ text=True,
31
+ check=True,
32
+ )
33
+ return Path(result.stdout.strip())
34
+ except (subprocess.CalledProcessError, FileNotFoundError):
35
+ return None
36
+
37
+
38
+ def find_specs_directory(provided_path: Optional[str] = None) -> Optional[Path]:
39
+ """
40
+ Discover the specs directory.
41
+
42
+ Args:
43
+ provided_path: Optional explicit path to specs directory or file
44
+
45
+ Returns:
46
+ Absolute Path to specs directory (containing pending/active/completed/archived),
47
+ or None if not found
48
+ """
49
+
50
+ def is_valid_specs_dir(p: Path) -> bool:
51
+ """Check if a directory is a valid specs directory."""
52
+ return (
53
+ (p / "pending").is_dir()
54
+ or (p / "active").is_dir()
55
+ or (p / "completed").is_dir()
56
+ or (p / "archived").is_dir()
57
+ )
58
+
59
+ if provided_path:
60
+ path = Path(provided_path).resolve()
61
+
62
+ if path.is_file():
63
+ path = path.parent
64
+
65
+ if not path.is_dir():
66
+ return None
67
+
68
+ if is_valid_specs_dir(path):
69
+ return path
70
+
71
+ specs_subdir = path / "specs"
72
+ if specs_subdir.is_dir() and is_valid_specs_dir(specs_subdir):
73
+ return specs_subdir
74
+
75
+ for parent in list(path.parents)[:5]:
76
+ if is_valid_specs_dir(parent):
77
+ return parent
78
+ parent_specs = parent / "specs"
79
+ if parent_specs.is_dir() and is_valid_specs_dir(parent_specs):
80
+ return parent_specs
81
+
82
+ return None
83
+
84
+ git_root = find_git_root()
85
+
86
+ if git_root:
87
+ search_paths = [
88
+ Path.cwd() / "specs",
89
+ git_root / "specs",
90
+ ]
91
+ else:
92
+ search_paths = [
93
+ Path.cwd() / "specs",
94
+ Path.cwd().parent / "specs",
95
+ ]
96
+
97
+ for p in search_paths:
98
+ if p.exists() and is_valid_specs_dir(p):
99
+ return p.resolve()
100
+
101
+ return None
102
+
103
+
104
+ def find_spec_file(spec_id: str, specs_dir: Path) -> Optional[Path]:
105
+ """
106
+ Find the spec file for a given spec ID.
107
+
108
+ Searches in pending/, active/, completed/, and archived/ subdirectories.
109
+
110
+ Args:
111
+ spec_id: Specification ID
112
+ specs_dir: Path to specs directory
113
+
114
+ Returns:
115
+ Absolute path to the spec file, or None if not found
116
+ """
117
+ search_dirs = ["pending", "active", "completed", "archived"]
118
+
119
+ for subdir in search_dirs:
120
+ spec_file = specs_dir / subdir / f"{spec_id}.json"
121
+ if spec_file.exists():
122
+ return spec_file
123
+
124
+ return None
125
+
126
+
127
+ def resolve_spec_file(
128
+ spec_name_or_path: str, specs_dir: Optional[Path] = None
129
+ ) -> Optional[Path]:
130
+ """
131
+ Resolve spec file from either a spec name or full path.
132
+
133
+ Args:
134
+ spec_name_or_path: Either a spec name or full path
135
+ specs_dir: Optional specs directory for name-based lookups
136
+
137
+ Returns:
138
+ Resolved Path object if found, None otherwise
139
+ """
140
+ path = Path(spec_name_or_path)
141
+
142
+ if path.is_absolute():
143
+ spec_file = path.resolve()
144
+ if spec_file.exists() and spec_file.suffix == ".json":
145
+ return spec_file
146
+ return None
147
+
148
+ search_name = spec_name_or_path
149
+ if spec_name_or_path.endswith(".json"):
150
+ spec_file = path.resolve()
151
+ if spec_file.exists() and spec_file.suffix == ".json":
152
+ return spec_file
153
+ search_name = path.stem
154
+
155
+ if specs_dir is None:
156
+ specs_dir = find_specs_directory()
157
+
158
+ if not specs_dir:
159
+ return None
160
+
161
+ return find_spec_file(search_name, specs_dir)
162
+
163
+
164
+ def load_spec(
165
+ spec_id: str, specs_dir: Optional[Path] = None
166
+ ) -> Optional[Dict[str, Any]]:
167
+ """
168
+ Load the JSON spec file for a given spec ID or path.
169
+
170
+ Args:
171
+ spec_id: Specification ID or path to spec file
172
+ specs_dir: Path to specs directory (optional, auto-detected if not provided)
173
+
174
+ Returns:
175
+ Spec data dictionary, or None if not found
176
+ """
177
+ spec_file = resolve_spec_file(spec_id, specs_dir)
178
+
179
+ if not spec_file:
180
+ return None
181
+
182
+ try:
183
+ with open(spec_file, "r") as f:
184
+ return json.load(f)
185
+ except (json.JSONDecodeError, IOError):
186
+ return None
187
+
188
+
189
+ def save_spec(
190
+ spec_id: str,
191
+ spec_data: Dict[str, Any],
192
+ specs_dir: Optional[Path] = None,
193
+ backup: bool = True,
194
+ validate: bool = True,
195
+ ) -> bool:
196
+ """
197
+ Save JSON spec file with atomic write and optional backup.
198
+
199
+ Args:
200
+ spec_id: Specification ID or path to spec file
201
+ spec_data: Spec data to write
202
+ specs_dir: Path to specs directory (optional, auto-detected if not provided)
203
+ backup: Create backup before writing (default: True)
204
+ validate: Validate JSON before writing (default: True)
205
+
206
+ Returns:
207
+ True if successful, False otherwise
208
+ """
209
+ spec_file = resolve_spec_file(spec_id, specs_dir)
210
+
211
+ if not spec_file:
212
+ return False
213
+
214
+ if validate:
215
+ if not _validate_spec_structure(spec_data):
216
+ return False
217
+
218
+ spec_data["last_updated"] = (
219
+ datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
220
+ )
221
+
222
+ if backup:
223
+ backup_spec(spec_id, specs_dir)
224
+
225
+ temp_file = spec_file.with_suffix(".tmp")
226
+ try:
227
+ with open(temp_file, "w") as f:
228
+ json.dump(spec_data, f, indent=2)
229
+ temp_file.replace(spec_file)
230
+ return True
231
+ except (IOError, OSError):
232
+ if temp_file.exists():
233
+ temp_file.unlink()
234
+ return False
235
+
236
+
237
+ def backup_spec(spec_id: str, specs_dir: Optional[Path] = None) -> Optional[Path]:
238
+ """
239
+ Create a backup copy of the JSON spec file in the .backups/ directory.
240
+
241
+ Args:
242
+ spec_id: Specification ID or path to spec file
243
+ specs_dir: Path to specs directory (optional, auto-detected if not provided)
244
+
245
+ Returns:
246
+ Path to backup file if created, None otherwise
247
+ """
248
+ spec_file = resolve_spec_file(spec_id, specs_dir)
249
+
250
+ if not spec_file:
251
+ return None
252
+
253
+ if specs_dir is None:
254
+ specs_dir = find_specs_directory()
255
+
256
+ if not specs_dir:
257
+ return None
258
+
259
+ backups_dir = specs_dir / ".backups"
260
+ backups_dir.mkdir(parents=True, exist_ok=True)
261
+
262
+ backup_file = backups_dir / f"{spec_id}.backup"
263
+
264
+ try:
265
+ shutil.copy2(spec_file, backup_file)
266
+ return backup_file
267
+ except (IOError, OSError):
268
+ return None
269
+
270
+
271
+ def _validate_spec_structure(spec_data: Dict[str, Any]) -> bool:
272
+ """
273
+ Validate basic JSON spec file structure.
274
+
275
+ Args:
276
+ spec_data: Spec data dictionary
277
+
278
+ Returns:
279
+ True if valid, False otherwise
280
+ """
281
+ required_fields = ["spec_id", "hierarchy"]
282
+ for field in required_fields:
283
+ if field not in spec_data:
284
+ return False
285
+
286
+ hierarchy = spec_data.get("hierarchy", {})
287
+ if not isinstance(hierarchy, dict):
288
+ return False
289
+
290
+ for node_id, node_data in hierarchy.items():
291
+ if not isinstance(node_data, dict):
292
+ return False
293
+ if "type" not in node_data or "status" not in node_data:
294
+ return False
295
+ if node_data["status"] not in [
296
+ "pending",
297
+ "in_progress",
298
+ "completed",
299
+ "blocked",
300
+ ]:
301
+ return False
302
+
303
+ return True
304
+
305
+
306
+ def list_specs(
307
+ specs_dir: Optional[Path] = None, status: Optional[str] = None
308
+ ) -> List[Dict[str, Any]]:
309
+ """
310
+ List specification files with optional filtering.
311
+
312
+ Args:
313
+ specs_dir: Base specs directory (auto-detected if not provided)
314
+ status: Filter by status folder (active, completed, archived, pending, or None for all)
315
+
316
+ Returns:
317
+ List of spec info dictionaries
318
+ """
319
+ if specs_dir is None:
320
+ specs_dir = find_specs_directory()
321
+
322
+ if not specs_dir:
323
+ return []
324
+
325
+ if status and status != "all":
326
+ status_dirs = [specs_dir / status]
327
+ else:
328
+ status_dirs = [
329
+ specs_dir / "active",
330
+ specs_dir / "completed",
331
+ specs_dir / "archived",
332
+ specs_dir / "pending",
333
+ ]
334
+
335
+ specs_info = []
336
+
337
+ for status_dir in status_dirs:
338
+ if not status_dir.exists():
339
+ continue
340
+
341
+ status_name = status_dir.name
342
+
343
+ json_files = sorted(status_dir.glob("*.json"))
344
+
345
+ for json_file in json_files:
346
+ spec_data = load_spec(json_file.stem, specs_dir)
347
+ if not spec_data:
348
+ continue
349
+
350
+ metadata = spec_data.get("metadata", {})
351
+ hierarchy = spec_data.get("hierarchy", {})
352
+
353
+ total_tasks = len(hierarchy)
354
+ completed_tasks = sum(
355
+ 1 for task in hierarchy.values() if task.get("status") == "completed"
356
+ )
357
+
358
+ progress_pct = 0
359
+ if total_tasks > 0:
360
+ progress_pct = int((completed_tasks / total_tasks) * 100)
361
+
362
+ info = {
363
+ "spec_id": json_file.stem,
364
+ "status": status_name,
365
+ "title": metadata.get("title", spec_data.get("title", "Untitled")),
366
+ "total_tasks": total_tasks,
367
+ "completed_tasks": completed_tasks,
368
+ "progress_percentage": progress_pct,
369
+ "current_phase": metadata.get("current_phase"),
370
+ }
371
+
372
+ specs_info.append(info)
373
+
374
+ # Sort: active first, then by completion % (highest first)
375
+ specs_info.sort(
376
+ key=lambda s: (
377
+ 0 if s.get("status") == "active" else 1,
378
+ -s.get("progress_percentage", 0),
379
+ )
380
+ )
381
+
382
+ return specs_info
383
+
384
+
385
+ def get_node(spec_data: Dict[str, Any], node_id: str) -> Optional[Dict[str, Any]]:
386
+ """
387
+ Get a specific node from the hierarchy.
388
+
389
+ Args:
390
+ spec_data: JSON spec file data
391
+ node_id: Node identifier
392
+
393
+ Returns:
394
+ Node data dictionary or None if not found
395
+ """
396
+ hierarchy = spec_data.get("hierarchy", {})
397
+ return hierarchy.get(node_id)
398
+
399
+
400
+ def update_node(
401
+ spec_data: Dict[str, Any], node_id: str, updates: Dict[str, Any]
402
+ ) -> bool:
403
+ """
404
+ Update a node in the hierarchy.
405
+
406
+ Special handling for metadata: existing metadata fields are preserved
407
+ and merged with new metadata fields.
408
+
409
+ Args:
410
+ spec_data: JSON spec file data
411
+ node_id: Node identifier
412
+ updates: Dictionary of fields to update
413
+
414
+ Returns:
415
+ True if node exists and was updated, False otherwise
416
+ """
417
+ hierarchy = spec_data.get("hierarchy", {})
418
+
419
+ if node_id not in hierarchy:
420
+ return False
421
+
422
+ node = hierarchy[node_id]
423
+
424
+ if "metadata" in updates:
425
+ existing_metadata = node.get("metadata", {})
426
+ new_metadata = updates["metadata"]
427
+ updates = updates.copy()
428
+ updates["metadata"] = {**existing_metadata, **new_metadata}
429
+
430
+ node.update(updates)
431
+ return True
432
+
433
+
434
+ # =============================================================================
435
+ # Spec Creation Functions
436
+ # =============================================================================
437
+
438
+
439
+ def generate_spec_id(name: str) -> str:
440
+ """
441
+ Generate a spec ID from a human-readable name.
442
+
443
+ Args:
444
+ name: Human-readable spec name.
445
+
446
+ Returns:
447
+ URL-safe spec ID with date suffix (e.g., "my-feature-2025-01-15-001").
448
+ """
449
+ # Normalize: lowercase, replace spaces/special chars with hyphens
450
+ slug = re.sub(r"[^a-z0-9]+", "-", name.lower()).strip("-")
451
+ # Add date suffix
452
+ date_suffix = datetime.now(timezone.utc).strftime("%Y-%m-%d")
453
+ # Add sequence number (001 for new specs)
454
+ return f"{slug}-{date_suffix}-001"
455
+
456
+
457
+ def _add_phase_verification(
458
+ hierarchy: Dict[str, Any], phase_num: int, phase_id: str
459
+ ) -> None:
460
+ """
461
+ Add verify nodes (auto + fidelity) to a phase.
462
+
463
+ Args:
464
+ hierarchy: The hierarchy dict to modify.
465
+ phase_num: Phase number (1, 2, 3, etc.).
466
+ phase_id: Phase node ID (e.g., "phase-1").
467
+ """
468
+ verify_auto_id = f"verify-{phase_num}-1"
469
+ verify_fidelity_id = f"verify-{phase_num}-2"
470
+
471
+ # Run tests verification
472
+ hierarchy[verify_auto_id] = {
473
+ "type": "verify",
474
+ "title": "Run tests",
475
+ "status": "pending",
476
+ "parent": phase_id,
477
+ "children": [],
478
+ "total_tasks": 1,
479
+ "completed_tasks": 0,
480
+ "metadata": {
481
+ "verification_type": "run-tests",
482
+ "mcp_tool": "mcp__foundry-mcp__test-run",
483
+ "expected": "All tests pass",
484
+ },
485
+ "dependencies": {
486
+ "blocks": [verify_fidelity_id],
487
+ "blocked_by": [],
488
+ "depends": [],
489
+ },
490
+ }
491
+
492
+ # Fidelity verification (spec review)
493
+ hierarchy[verify_fidelity_id] = {
494
+ "type": "verify",
495
+ "title": "Fidelity review",
496
+ "status": "pending",
497
+ "parent": phase_id,
498
+ "children": [],
499
+ "total_tasks": 1,
500
+ "completed_tasks": 0,
501
+ "metadata": {
502
+ "verification_type": "fidelity",
503
+ "mcp_tool": "mcp__foundry-mcp__spec-review-fidelity",
504
+ "scope": "phase",
505
+ "target": phase_id,
506
+ "expected": "Implementation matches specification",
507
+ },
508
+ "dependencies": {
509
+ "blocks": [],
510
+ "blocked_by": [verify_auto_id],
511
+ "depends": [],
512
+ },
513
+ }
514
+
515
+ # Update phase children and task count
516
+ hierarchy[phase_id]["children"].extend([verify_auto_id, verify_fidelity_id])
517
+ hierarchy[phase_id]["total_tasks"] += 2
518
+
519
+
520
+ def _generate_phase_id(hierarchy: Dict[str, Any]) -> Tuple[str, int]:
521
+ """Generate the next phase ID and numeric suffix."""
522
+ pattern = re.compile(r"^phase-(\d+)$")
523
+ max_id = 0
524
+ for node_id in hierarchy.keys():
525
+ match = pattern.match(node_id)
526
+ if match:
527
+ max_id = max(max_id, int(match.group(1)))
528
+ next_id = max_id + 1
529
+ return f"phase-{next_id}", next_id
530
+
531
+
532
+ def add_phase(
533
+ spec_id: str,
534
+ title: str,
535
+ description: Optional[str] = None,
536
+ purpose: Optional[str] = None,
537
+ estimated_hours: Optional[float] = None,
538
+ position: Optional[int] = None,
539
+ link_previous: bool = True,
540
+ specs_dir: Optional[Path] = None,
541
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
542
+ """
543
+ Add a new phase under spec-root and scaffold verification tasks.
544
+
545
+ Args:
546
+ spec_id: Specification ID to mutate.
547
+ title: Phase title.
548
+ description: Optional phase description.
549
+ purpose: Optional purpose/goal metadata string.
550
+ estimated_hours: Optional estimated hours for the phase.
551
+ position: Optional zero-based insertion index in spec-root children.
552
+ link_previous: Whether to automatically block on the previous phase when appending.
553
+ specs_dir: Specs directory override.
554
+
555
+ Returns:
556
+ Tuple of (result_dict, error_message).
557
+ """
558
+ if not spec_id or not spec_id.strip():
559
+ return None, "Specification ID is required"
560
+
561
+ if not title or not title.strip():
562
+ return None, "Phase title is required"
563
+
564
+ if estimated_hours is not None and estimated_hours < 0:
565
+ return None, "estimated_hours must be non-negative"
566
+
567
+ title = title.strip()
568
+
569
+ if specs_dir is None:
570
+ specs_dir = find_specs_directory()
571
+
572
+ if specs_dir is None:
573
+ return (
574
+ None,
575
+ "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR.",
576
+ )
577
+
578
+ spec_path = find_spec_file(spec_id, specs_dir)
579
+ if spec_path is None:
580
+ return None, f"Specification '{spec_id}' not found"
581
+
582
+ spec_data = load_spec(spec_id, specs_dir)
583
+ if spec_data is None:
584
+ return None, f"Failed to load specification '{spec_id}'"
585
+
586
+ hierarchy = spec_data.get("hierarchy", {})
587
+ spec_root = hierarchy.get("spec-root")
588
+
589
+ if spec_root is None:
590
+ return None, "Specification root node 'spec-root' not found"
591
+
592
+ if spec_root.get("type") not in {"spec", "root"}:
593
+ return None, "Specification root node has invalid type"
594
+
595
+ children = spec_root.get("children", []) or []
596
+ if not isinstance(children, list):
597
+ children = []
598
+
599
+ insert_index = len(children)
600
+ if position is not None and position >= 0:
601
+ insert_index = min(position, len(children))
602
+
603
+ phase_id, phase_num = _generate_phase_id(hierarchy)
604
+
605
+ metadata: Dict[str, Any] = {
606
+ "purpose": (purpose.strip() if purpose else ""),
607
+ }
608
+ if description:
609
+ metadata["description"] = description.strip()
610
+ if estimated_hours is not None:
611
+ metadata["estimated_hours"] = estimated_hours
612
+
613
+ phase_node = {
614
+ "type": "phase",
615
+ "title": title,
616
+ "status": "pending",
617
+ "parent": "spec-root",
618
+ "children": [],
619
+ "total_tasks": 0,
620
+ "completed_tasks": 0,
621
+ "metadata": metadata,
622
+ "dependencies": {
623
+ "blocks": [],
624
+ "blocked_by": [],
625
+ "depends": [],
626
+ },
627
+ }
628
+
629
+ hierarchy[phase_id] = phase_node
630
+
631
+ if insert_index == len(children):
632
+ children.append(phase_id)
633
+ else:
634
+ children.insert(insert_index, phase_id)
635
+ spec_root["children"] = children
636
+
637
+ linked_phase_id: Optional[str] = None
638
+ if link_previous and insert_index > 0 and insert_index == len(children) - 1:
639
+ candidate = children[insert_index - 1]
640
+ previous = hierarchy.get(candidate)
641
+ if previous and previous.get("type") == "phase":
642
+ linked_phase_id = candidate
643
+ prev_deps = previous.setdefault(
644
+ "dependencies",
645
+ {
646
+ "blocks": [],
647
+ "blocked_by": [],
648
+ "depends": [],
649
+ },
650
+ )
651
+ blocks = prev_deps.setdefault("blocks", [])
652
+ if phase_id not in blocks:
653
+ blocks.append(phase_id)
654
+ phase_node["dependencies"]["blocked_by"].append(candidate)
655
+
656
+ _add_phase_verification(hierarchy, phase_num, phase_id)
657
+
658
+ phase_task_total = phase_node.get("total_tasks", 0)
659
+ total_tasks = spec_root.get("total_tasks", 0)
660
+ spec_root["total_tasks"] = total_tasks + phase_task_total
661
+
662
+ # Update spec-level estimated hours if provided
663
+ if estimated_hours is not None:
664
+ spec_metadata = spec_data.setdefault("metadata", {})
665
+ current_hours = spec_metadata.get("estimated_hours")
666
+ if isinstance(current_hours, (int, float)):
667
+ spec_metadata["estimated_hours"] = current_hours + estimated_hours
668
+ else:
669
+ spec_metadata["estimated_hours"] = estimated_hours
670
+
671
+ saved = save_spec(spec_id, spec_data, specs_dir)
672
+ if not saved:
673
+ return None, "Failed to save specification"
674
+
675
+ verify_ids = [f"verify-{phase_num}-1", f"verify-{phase_num}-2"]
676
+
677
+ return {
678
+ "spec_id": spec_id,
679
+ "phase_id": phase_id,
680
+ "title": title,
681
+ "position": insert_index,
682
+ "linked_previous": linked_phase_id,
683
+ "verify_tasks": verify_ids,
684
+ }, None
685
+
686
+
687
+ def _collect_descendants(hierarchy: Dict[str, Any], node_id: str) -> List[str]:
688
+ """
689
+ Recursively collect all descendant node IDs for a given node.
690
+
691
+ Args:
692
+ hierarchy: The spec hierarchy dict
693
+ node_id: Starting node ID
694
+
695
+ Returns:
696
+ List of all descendant node IDs (not including the starting node)
697
+ """
698
+ descendants: List[str] = []
699
+ node = hierarchy.get(node_id)
700
+ if not node:
701
+ return descendants
702
+
703
+ children = node.get("children", [])
704
+ if not isinstance(children, list):
705
+ return descendants
706
+
707
+ for child_id in children:
708
+ descendants.append(child_id)
709
+ descendants.extend(_collect_descendants(hierarchy, child_id))
710
+
711
+ return descendants
712
+
713
+
714
+ def _count_tasks_in_subtree(
715
+ hierarchy: Dict[str, Any], node_ids: List[str]
716
+ ) -> Tuple[int, int]:
717
+ """
718
+ Count total and completed tasks in a list of nodes.
719
+
720
+ Args:
721
+ hierarchy: The spec hierarchy dict
722
+ node_ids: List of node IDs to count
723
+
724
+ Returns:
725
+ Tuple of (total_count, completed_count)
726
+ """
727
+ total = 0
728
+ completed = 0
729
+
730
+ for node_id in node_ids:
731
+ node = hierarchy.get(node_id)
732
+ if not node:
733
+ continue
734
+ node_type = node.get("type")
735
+ if node_type in ("task", "subtask", "verify"):
736
+ total += 1
737
+ if node.get("status") == "completed":
738
+ completed += 1
739
+
740
+ return total, completed
741
+
742
+
743
+ def _remove_dependency_references(
744
+ hierarchy: Dict[str, Any], removed_ids: List[str]
745
+ ) -> None:
746
+ """
747
+ Remove references to deleted nodes from all dependency lists.
748
+
749
+ Args:
750
+ hierarchy: The spec hierarchy dict
751
+ removed_ids: List of node IDs being removed
752
+ """
753
+ removed_set = set(removed_ids)
754
+
755
+ for node_id, node in hierarchy.items():
756
+ deps = node.get("dependencies")
757
+ if not deps or not isinstance(deps, dict):
758
+ continue
759
+
760
+ for key in ("blocks", "blocked_by", "depends"):
761
+ dep_list = deps.get(key)
762
+ if isinstance(dep_list, list):
763
+ deps[key] = [d for d in dep_list if d not in removed_set]
764
+
765
+
766
+ def remove_phase(
767
+ spec_id: str,
768
+ phase_id: str,
769
+ force: bool = False,
770
+ specs_dir: Optional[Path] = None,
771
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
772
+ """
773
+ Remove a phase and all its children from a specification.
774
+
775
+ Handles adjacent phase re-linking: if phase B is removed and A blocks B
776
+ which blocks C, then A will be updated to block C directly.
777
+
778
+ Args:
779
+ spec_id: Specification ID containing the phase.
780
+ phase_id: Phase ID to remove (e.g., "phase-1").
781
+ force: If True, remove even if phase contains non-completed tasks.
782
+ If False (default), refuse to remove phases with active work.
783
+ specs_dir: Path to specs directory (auto-detected if not provided).
784
+
785
+ Returns:
786
+ Tuple of (result_dict, error_message).
787
+ On success: ({"spec_id": ..., "phase_id": ..., "children_removed": ..., ...}, None)
788
+ On failure: (None, "error message")
789
+ """
790
+ # Validate inputs
791
+ if not spec_id or not spec_id.strip():
792
+ return None, "Specification ID is required"
793
+
794
+ if not phase_id or not phase_id.strip():
795
+ return None, "Phase ID is required"
796
+
797
+ # Find specs directory
798
+ if specs_dir is None:
799
+ specs_dir = find_specs_directory()
800
+
801
+ if specs_dir is None:
802
+ return (
803
+ None,
804
+ "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR.",
805
+ )
806
+
807
+ # Find and load the spec
808
+ spec_path = find_spec_file(spec_id, specs_dir)
809
+ if spec_path is None:
810
+ return None, f"Specification '{spec_id}' not found"
811
+
812
+ spec_data = load_spec(spec_id, specs_dir)
813
+ if spec_data is None:
814
+ return None, f"Failed to load specification '{spec_id}'"
815
+
816
+ hierarchy = spec_data.get("hierarchy", {})
817
+
818
+ # Validate phase exists
819
+ phase = hierarchy.get(phase_id)
820
+ if phase is None:
821
+ return None, f"Phase '{phase_id}' not found"
822
+
823
+ # Validate node type is phase
824
+ node_type = phase.get("type")
825
+ if node_type != "phase":
826
+ return None, f"Node '{phase_id}' is not a phase (type: {node_type})"
827
+
828
+ # Collect all descendants
829
+ descendants = _collect_descendants(hierarchy, phase_id)
830
+
831
+ # Check for non-completed tasks if force is False
832
+ if not force:
833
+ # Count tasks in phase (excluding verify nodes for the active work check)
834
+ all_nodes = [phase_id] + descendants
835
+ has_active_work = False
836
+ active_task_ids: List[str] = []
837
+
838
+ for node_id in all_nodes:
839
+ node = hierarchy.get(node_id)
840
+ if not node:
841
+ continue
842
+ node_status = node.get("status")
843
+ node_node_type = node.get("type")
844
+ # Consider in_progress or pending tasks as active work
845
+ if node_node_type in ("task", "subtask") and node_status in (
846
+ "pending",
847
+ "in_progress",
848
+ ):
849
+ has_active_work = True
850
+ active_task_ids.append(node_id)
851
+
852
+ if has_active_work:
853
+ return (
854
+ None,
855
+ f"Phase '{phase_id}' has {len(active_task_ids)} non-completed task(s). "
856
+ f"Use force=True to remove anyway. Active tasks: {', '.join(active_task_ids[:5])}"
857
+ + ("..." if len(active_task_ids) > 5 else ""),
858
+ )
859
+
860
+ # Get spec-root and phase position info for re-linking
861
+ spec_root = hierarchy.get("spec-root")
862
+ if spec_root is None:
863
+ return None, "Specification root node 'spec-root' not found"
864
+
865
+ children = spec_root.get("children", [])
866
+ if not isinstance(children, list):
867
+ children = []
868
+
869
+ # Find phase position
870
+ try:
871
+ phase_index = children.index(phase_id)
872
+ except ValueError:
873
+ return None, f"Phase '{phase_id}' not found in spec-root children"
874
+
875
+ # Identify adjacent phases for re-linking
876
+ prev_phase_id: Optional[str] = None
877
+ next_phase_id: Optional[str] = None
878
+
879
+ if phase_index > 0:
880
+ candidate = children[phase_index - 1]
881
+ if hierarchy.get(candidate, {}).get("type") == "phase":
882
+ prev_phase_id = candidate
883
+
884
+ if phase_index < len(children) - 1:
885
+ candidate = children[phase_index + 1]
886
+ if hierarchy.get(candidate, {}).get("type") == "phase":
887
+ next_phase_id = candidate
888
+
889
+ # Re-link adjacent phases: if prev blocks this phase and this phase blocks next,
890
+ # then prev should now block next directly
891
+ relinked_from: Optional[str] = None
892
+ relinked_to: Optional[str] = None
893
+
894
+ if prev_phase_id and next_phase_id:
895
+ prev_phase = hierarchy.get(prev_phase_id)
896
+ next_phase = hierarchy.get(next_phase_id)
897
+
898
+ if prev_phase and next_phase:
899
+ # Check if prev_phase blocks this phase
900
+ prev_deps = prev_phase.get("dependencies", {})
901
+ prev_blocks = prev_deps.get("blocks", [])
902
+
903
+ # Check if this phase blocks next_phase
904
+ phase_deps = phase.get("dependencies", {})
905
+ phase_blocks = phase_deps.get("blocks", [])
906
+
907
+ if phase_id in prev_blocks and next_phase_id in phase_blocks:
908
+ # Re-link: prev should now block next
909
+ if next_phase_id not in prev_blocks:
910
+ prev_blocks.append(next_phase_id)
911
+
912
+ # Update next phase's blocked_by
913
+ next_deps = next_phase.setdefault(
914
+ "dependencies",
915
+ {
916
+ "blocks": [],
917
+ "blocked_by": [],
918
+ "depends": [],
919
+ },
920
+ )
921
+ next_blocked_by = next_deps.setdefault("blocked_by", [])
922
+ if prev_phase_id not in next_blocked_by:
923
+ next_blocked_by.append(prev_phase_id)
924
+
925
+ relinked_from = prev_phase_id
926
+ relinked_to = next_phase_id
927
+
928
+ # Count tasks being removed
929
+ nodes_to_remove = [phase_id] + descendants
930
+ total_removed, completed_removed = _count_tasks_in_subtree(hierarchy, descendants)
931
+
932
+ # Remove all nodes from hierarchy
933
+ for node_id in nodes_to_remove:
934
+ if node_id in hierarchy:
935
+ del hierarchy[node_id]
936
+
937
+ # Remove phase from spec-root children
938
+ children.remove(phase_id)
939
+ spec_root["children"] = children
940
+
941
+ # Update spec-root task counts
942
+ current_total = spec_root.get("total_tasks", 0)
943
+ current_completed = spec_root.get("completed_tasks", 0)
944
+ spec_root["total_tasks"] = max(0, current_total - total_removed)
945
+ spec_root["completed_tasks"] = max(0, current_completed - completed_removed)
946
+
947
+ # Clean up dependency references to removed nodes
948
+ _remove_dependency_references(hierarchy, nodes_to_remove)
949
+
950
+ # Save the spec
951
+ saved = save_spec(spec_id, spec_data, specs_dir)
952
+ if not saved:
953
+ return None, "Failed to save specification"
954
+
955
+ result: Dict[str, Any] = {
956
+ "spec_id": spec_id,
957
+ "phase_id": phase_id,
958
+ "phase_title": phase.get("title", ""),
959
+ "children_removed": len(descendants),
960
+ "total_tasks_removed": total_removed,
961
+ "completed_tasks_removed": completed_removed,
962
+ "force": force,
963
+ }
964
+
965
+ if relinked_from and relinked_to:
966
+ result["relinked"] = {
967
+ "from": relinked_from,
968
+ "to": relinked_to,
969
+ }
970
+
971
+ return result, None
972
+
973
+
974
+ def get_template_structure(template: str, category: str) -> Dict[str, Any]:
975
+ """
976
+ Get the hierarchical structure for a spec template.
977
+
978
+ All templates include per-phase verification (auto + fidelity) for each phase.
979
+
980
+ Args:
981
+ template: Template type (simple, medium, complex, security).
982
+ category: Default task category.
983
+
984
+ Returns:
985
+ Hierarchy dict for the spec.
986
+ """
987
+ base_hierarchy = {
988
+ "spec-root": {
989
+ "type": "spec",
990
+ "title": "", # Filled in later
991
+ "status": "pending",
992
+ "parent": None,
993
+ "children": ["phase-1"],
994
+ "total_tasks": 0,
995
+ "completed_tasks": 0,
996
+ "metadata": {
997
+ "purpose": "",
998
+ "category": category,
999
+ },
1000
+ "dependencies": {
1001
+ "blocks": [],
1002
+ "blocked_by": [],
1003
+ "depends": [],
1004
+ },
1005
+ },
1006
+ "phase-1": {
1007
+ "type": "phase",
1008
+ "title": "Planning & Discovery",
1009
+ "status": "pending",
1010
+ "parent": "spec-root",
1011
+ "children": ["task-1-1"],
1012
+ "total_tasks": 1,
1013
+ "completed_tasks": 0,
1014
+ "metadata": {
1015
+ "purpose": "Initial planning and requirements gathering",
1016
+ "estimated_hours": 2,
1017
+ },
1018
+ "dependencies": {
1019
+ "blocks": [],
1020
+ "blocked_by": [],
1021
+ "depends": [],
1022
+ },
1023
+ },
1024
+ "task-1-1": {
1025
+ "type": "task",
1026
+ "title": "Define requirements",
1027
+ "status": "pending",
1028
+ "parent": "phase-1",
1029
+ "children": [],
1030
+ "total_tasks": 1,
1031
+ "completed_tasks": 0,
1032
+ "metadata": {
1033
+ "details": "Document the requirements and acceptance criteria",
1034
+ "category": category,
1035
+ "estimated_hours": 1,
1036
+ },
1037
+ "dependencies": {
1038
+ "blocks": [],
1039
+ "blocked_by": [],
1040
+ "depends": [],
1041
+ },
1042
+ },
1043
+ }
1044
+
1045
+ # Add verification to phase-1 (all templates)
1046
+ _add_phase_verification(base_hierarchy, 1, "phase-1")
1047
+ base_hierarchy["spec-root"]["total_tasks"] = 3 # task + 2 verify
1048
+
1049
+ if template == "simple":
1050
+ return base_hierarchy
1051
+
1052
+ # Medium/complex/security: add implementation phase
1053
+ if template in ("medium", "complex", "security"):
1054
+ base_hierarchy["spec-root"]["children"].append("phase-2")
1055
+ base_hierarchy["phase-1"]["dependencies"]["blocks"].append("phase-2")
1056
+ base_hierarchy["phase-2"] = {
1057
+ "type": "phase",
1058
+ "title": "Implementation",
1059
+ "status": "pending",
1060
+ "parent": "spec-root",
1061
+ "children": ["task-2-1"],
1062
+ "total_tasks": 1,
1063
+ "completed_tasks": 0,
1064
+ "metadata": {
1065
+ "purpose": "Core implementation work",
1066
+ "estimated_hours": 8,
1067
+ },
1068
+ "dependencies": {
1069
+ "blocks": [],
1070
+ "blocked_by": ["phase-1"],
1071
+ "depends": [],
1072
+ },
1073
+ }
1074
+ base_hierarchy["task-2-1"] = {
1075
+ "type": "task",
1076
+ "title": "Implement core functionality",
1077
+ "status": "pending",
1078
+ "parent": "phase-2",
1079
+ "children": [],
1080
+ "total_tasks": 1,
1081
+ "completed_tasks": 0,
1082
+ "metadata": {
1083
+ "details": "Implement the main features",
1084
+ "category": category,
1085
+ "estimated_hours": 4,
1086
+ },
1087
+ "dependencies": {
1088
+ "blocks": [],
1089
+ "blocked_by": [],
1090
+ "depends": [],
1091
+ },
1092
+ }
1093
+ # Add verification to phase-2
1094
+ _add_phase_verification(base_hierarchy, 2, "phase-2")
1095
+ base_hierarchy["spec-root"]["total_tasks"] = 6 # 2 tasks + 4 verify
1096
+
1097
+ # Security: add security review phase
1098
+ if template == "security":
1099
+ base_hierarchy["spec-root"]["children"].append("phase-3")
1100
+ base_hierarchy["phase-2"]["dependencies"]["blocks"].append("phase-3")
1101
+ base_hierarchy["phase-3"] = {
1102
+ "type": "phase",
1103
+ "title": "Security Review",
1104
+ "status": "pending",
1105
+ "parent": "spec-root",
1106
+ "children": ["task-3-1"],
1107
+ "total_tasks": 1,
1108
+ "completed_tasks": 0,
1109
+ "metadata": {
1110
+ "purpose": "Security audit and hardening",
1111
+ "estimated_hours": 4,
1112
+ },
1113
+ "dependencies": {
1114
+ "blocks": [],
1115
+ "blocked_by": ["phase-2"],
1116
+ "depends": [],
1117
+ },
1118
+ }
1119
+ base_hierarchy["task-3-1"] = {
1120
+ "type": "task",
1121
+ "title": "Security audit",
1122
+ "status": "pending",
1123
+ "parent": "phase-3",
1124
+ "children": [],
1125
+ "total_tasks": 1,
1126
+ "completed_tasks": 0,
1127
+ "metadata": {
1128
+ "details": "Review for security vulnerabilities",
1129
+ "category": "investigation",
1130
+ "estimated_hours": 2,
1131
+ },
1132
+ "dependencies": {
1133
+ "blocks": [],
1134
+ "blocked_by": [],
1135
+ "depends": [],
1136
+ },
1137
+ }
1138
+ # Add verification to phase-3
1139
+ _add_phase_verification(base_hierarchy, 3, "phase-3")
1140
+ base_hierarchy["spec-root"]["total_tasks"] = 9 # 3 tasks + 6 verify
1141
+
1142
+ return base_hierarchy
1143
+
1144
+
1145
+ def create_spec(
1146
+ name: str,
1147
+ template: str = "medium",
1148
+ category: str = "implementation",
1149
+ specs_dir: Optional[Path] = None,
1150
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
1151
+ """
1152
+ Create a new specification file from a template.
1153
+
1154
+ Args:
1155
+ name: Human-readable name for the specification.
1156
+ template: Template type (simple, medium, complex, security). Default: medium.
1157
+ category: Default task category. Default: implementation.
1158
+ specs_dir: Path to specs directory (auto-detected if not provided).
1159
+
1160
+ Returns:
1161
+ Tuple of (result_dict, error_message).
1162
+ On success: ({"spec_id": ..., "spec_path": ..., ...}, None)
1163
+ On failure: (None, "error message")
1164
+ """
1165
+ # Validate template
1166
+ if template not in TEMPLATES:
1167
+ return (
1168
+ None,
1169
+ f"Invalid template '{template}'. Must be one of: {', '.join(TEMPLATES)}",
1170
+ )
1171
+
1172
+ # Validate category
1173
+ if category not in CATEGORIES:
1174
+ return (
1175
+ None,
1176
+ f"Invalid category '{category}'. Must be one of: {', '.join(CATEGORIES)}",
1177
+ )
1178
+
1179
+ # Find specs directory
1180
+ if specs_dir is None:
1181
+ specs_dir = find_specs_directory()
1182
+
1183
+ if specs_dir is None:
1184
+ return (
1185
+ None,
1186
+ "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR.",
1187
+ )
1188
+
1189
+ # Ensure pending directory exists
1190
+ pending_dir = specs_dir / "pending"
1191
+ pending_dir.mkdir(parents=True, exist_ok=True)
1192
+
1193
+ # Generate spec ID
1194
+ spec_id = generate_spec_id(name)
1195
+
1196
+ # Check if spec already exists
1197
+ spec_path = pending_dir / f"{spec_id}.json"
1198
+ if spec_path.exists():
1199
+ return None, f"Specification already exists: {spec_id}"
1200
+
1201
+ # Generate spec structure
1202
+ now = datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
1203
+ hierarchy = get_template_structure(template, category)
1204
+
1205
+ # Fill in the title
1206
+ hierarchy["spec-root"]["title"] = name
1207
+
1208
+ # Calculate estimated hours from hierarchy
1209
+ estimated_hours = sum(
1210
+ node.get("metadata", {}).get("estimated_hours", 0)
1211
+ for node in hierarchy.values()
1212
+ if isinstance(node, dict)
1213
+ )
1214
+
1215
+ spec_data = {
1216
+ "spec_id": spec_id,
1217
+ "title": name,
1218
+ "generated": now,
1219
+ "last_updated": now,
1220
+ "metadata": {
1221
+ "description": "",
1222
+ "objectives": [],
1223
+ "complexity": "medium" if template in ("medium", "complex") else "low",
1224
+ "estimated_hours": estimated_hours,
1225
+ "assumptions": [],
1226
+ "status": "pending",
1227
+ "owner": "",
1228
+ "progress_percentage": 0,
1229
+ "current_phase": "phase-1",
1230
+ "category": category,
1231
+ "template": template,
1232
+ },
1233
+ "progress_percentage": 0,
1234
+ "status": "pending",
1235
+ "current_phase": "phase-1",
1236
+ "hierarchy": hierarchy,
1237
+ "journal": [],
1238
+ }
1239
+
1240
+ # Write the spec file
1241
+ try:
1242
+ with open(spec_path, "w") as f:
1243
+ json.dump(spec_data, f, indent=2)
1244
+ except (IOError, OSError) as e:
1245
+ return None, f"Failed to write spec file: {e}"
1246
+
1247
+ # Count tasks and phases
1248
+ task_count = sum(
1249
+ 1
1250
+ for node in hierarchy.values()
1251
+ if isinstance(node, dict) and node.get("type") in ("task", "subtask", "verify")
1252
+ )
1253
+ phase_count = sum(
1254
+ 1
1255
+ for node in hierarchy.values()
1256
+ if isinstance(node, dict) and node.get("type") == "phase"
1257
+ )
1258
+
1259
+ return {
1260
+ "spec_id": spec_id,
1261
+ "spec_path": str(spec_path),
1262
+ "template": template,
1263
+ "category": category,
1264
+ "name": name,
1265
+ "structure": {
1266
+ "phases": phase_count,
1267
+ "tasks": task_count,
1268
+ },
1269
+ }, None
1270
+
1271
+
1272
+ # Valid assumption types
1273
+ ASSUMPTION_TYPES = ("constraint", "requirement")
1274
+
1275
+
1276
+ def add_assumption(
1277
+ spec_id: str,
1278
+ text: str,
1279
+ assumption_type: str = "constraint",
1280
+ author: Optional[str] = None,
1281
+ specs_dir: Optional[Path] = None,
1282
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
1283
+ """
1284
+ Add an assumption to a specification's assumptions array.
1285
+
1286
+ The schema expects assumptions to be stored as strings. The assumption_type
1287
+ and author are included in the returned result for API compatibility but
1288
+ are not stored in the spec (the text itself should be descriptive).
1289
+
1290
+ Args:
1291
+ spec_id: Specification ID to add assumption to.
1292
+ text: Assumption text/description.
1293
+ assumption_type: Type of assumption (constraint, requirement). For API compatibility.
1294
+ author: Optional author. For API compatibility.
1295
+ specs_dir: Path to specs directory (auto-detected if not provided).
1296
+
1297
+ Returns:
1298
+ Tuple of (result_dict, error_message).
1299
+ On success: ({"spec_id": ..., "text": ..., ...}, None)
1300
+ On failure: (None, "error message")
1301
+ """
1302
+ # Validate assumption_type (for API compatibility)
1303
+ if assumption_type not in ASSUMPTION_TYPES:
1304
+ return (
1305
+ None,
1306
+ f"Invalid assumption_type '{assumption_type}'. Must be one of: {', '.join(ASSUMPTION_TYPES)}",
1307
+ )
1308
+
1309
+ # Validate text
1310
+ if not text or not text.strip():
1311
+ return None, "Assumption text is required"
1312
+
1313
+ # Find specs directory
1314
+ if specs_dir is None:
1315
+ specs_dir = find_specs_directory()
1316
+
1317
+ if specs_dir is None:
1318
+ return (
1319
+ None,
1320
+ "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR.",
1321
+ )
1322
+
1323
+ # Find and load the spec
1324
+ spec_path = find_spec_file(spec_id, specs_dir)
1325
+ if spec_path is None:
1326
+ return None, f"Specification '{spec_id}' not found"
1327
+
1328
+ spec_data = load_spec(spec_id, specs_dir)
1329
+ if spec_data is None:
1330
+ return None, f"Failed to load specification '{spec_id}'"
1331
+
1332
+ # Ensure metadata.assumptions exists
1333
+ if "metadata" not in spec_data:
1334
+ spec_data["metadata"] = {}
1335
+ if "assumptions" not in spec_data["metadata"]:
1336
+ spec_data["metadata"]["assumptions"] = []
1337
+
1338
+ assumptions = spec_data["metadata"]["assumptions"]
1339
+
1340
+ # Schema expects strings, so store text directly
1341
+ assumption_text = text.strip()
1342
+
1343
+ # Check for duplicates
1344
+ if assumption_text in assumptions:
1345
+ return None, f"Assumption already exists: {assumption_text[:50]}..."
1346
+
1347
+ # Add to assumptions array (as string per schema)
1348
+ assumptions.append(assumption_text)
1349
+
1350
+ # Update last_updated
1351
+ now = datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
1352
+ spec_data["last_updated"] = now
1353
+
1354
+ # Save the spec
1355
+ success = save_spec(spec_id, spec_data, specs_dir)
1356
+ if not success:
1357
+ return None, "Failed to save specification"
1358
+
1359
+ # Return index as "ID" for API compatibility
1360
+ assumption_index = len(assumptions)
1361
+
1362
+ return {
1363
+ "spec_id": spec_id,
1364
+ "assumption_id": f"a-{assumption_index}",
1365
+ "text": assumption_text,
1366
+ "type": assumption_type,
1367
+ "author": author,
1368
+ "index": assumption_index,
1369
+ }, None
1370
+
1371
+
1372
+ def add_revision(
1373
+ spec_id: str,
1374
+ version: str,
1375
+ changelog: str,
1376
+ author: Optional[str] = None,
1377
+ modified_by: Optional[str] = None,
1378
+ review_triggered_by: Optional[str] = None,
1379
+ specs_dir: Optional[Path] = None,
1380
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
1381
+ """
1382
+ Add a revision entry to a specification's revision_history array.
1383
+
1384
+ Args:
1385
+ spec_id: Specification ID to add revision to.
1386
+ version: Version number (e.g., "1.0", "1.1", "2.0").
1387
+ changelog: Description of changes made in this revision.
1388
+ author: Optional author who made the revision.
1389
+ modified_by: Optional tool or command that made the modification.
1390
+ review_triggered_by: Optional path to review report that triggered this revision.
1391
+ specs_dir: Path to specs directory (auto-detected if not provided).
1392
+
1393
+ Returns:
1394
+ Tuple of (result_dict, error_message).
1395
+ On success: ({"spec_id": ..., "version": ..., ...}, None)
1396
+ On failure: (None, "error message")
1397
+ """
1398
+ # Validate version
1399
+ if not version or not version.strip():
1400
+ return None, "Version is required"
1401
+
1402
+ # Validate changelog
1403
+ if not changelog or not changelog.strip():
1404
+ return None, "Changelog is required"
1405
+
1406
+ # Find specs directory
1407
+ if specs_dir is None:
1408
+ specs_dir = find_specs_directory()
1409
+
1410
+ if specs_dir is None:
1411
+ return (
1412
+ None,
1413
+ "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR.",
1414
+ )
1415
+
1416
+ # Find and load the spec
1417
+ spec_path = find_spec_file(spec_id, specs_dir)
1418
+ if spec_path is None:
1419
+ return None, f"Specification '{spec_id}' not found"
1420
+
1421
+ spec_data = load_spec(spec_id, specs_dir)
1422
+ if spec_data is None:
1423
+ return None, f"Failed to load specification '{spec_id}'"
1424
+
1425
+ # Ensure metadata.revision_history exists
1426
+ if "metadata" not in spec_data:
1427
+ spec_data["metadata"] = {}
1428
+ if "revision_history" not in spec_data["metadata"]:
1429
+ spec_data["metadata"]["revision_history"] = []
1430
+
1431
+ revision_history = spec_data["metadata"]["revision_history"]
1432
+
1433
+ # Create revision entry per schema
1434
+ now = datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
1435
+ revision_entry = {
1436
+ "version": version.strip(),
1437
+ "date": now,
1438
+ "changelog": changelog.strip(),
1439
+ }
1440
+
1441
+ # Add optional fields if provided
1442
+ if author:
1443
+ revision_entry["author"] = author.strip()
1444
+ if modified_by:
1445
+ revision_entry["modified_by"] = modified_by.strip()
1446
+ if review_triggered_by:
1447
+ revision_entry["review_triggered_by"] = review_triggered_by.strip()
1448
+
1449
+ # Append to revision history
1450
+ revision_history.append(revision_entry)
1451
+
1452
+ # Update last_updated
1453
+ spec_data["last_updated"] = now
1454
+
1455
+ # Save the spec
1456
+ success = save_spec(spec_id, spec_data, specs_dir)
1457
+ if not success:
1458
+ return None, "Failed to save specification"
1459
+
1460
+ return {
1461
+ "spec_id": spec_id,
1462
+ "version": revision_entry["version"],
1463
+ "date": revision_entry["date"],
1464
+ "changelog": revision_entry["changelog"],
1465
+ "author": author,
1466
+ "modified_by": modified_by,
1467
+ "review_triggered_by": review_triggered_by,
1468
+ "revision_index": len(revision_history),
1469
+ }, None
1470
+
1471
+
1472
+ def list_assumptions(
1473
+ spec_id: str,
1474
+ assumption_type: Optional[str] = None,
1475
+ specs_dir: Optional[Path] = None,
1476
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
1477
+ """
1478
+ List assumptions from a specification.
1479
+
1480
+ Args:
1481
+ spec_id: Specification ID to list assumptions from.
1482
+ assumption_type: Optional filter by type (constraint, requirement).
1483
+ Note: Since assumptions are stored as strings, this filter is
1484
+ provided for API compatibility but has no effect.
1485
+ specs_dir: Path to specs directory (auto-detected if not provided).
1486
+
1487
+ Returns:
1488
+ Tuple of (result_dict, error_message).
1489
+ On success: ({"spec_id": ..., "assumptions": [...], ...}, None)
1490
+ On failure: (None, "error message")
1491
+ """
1492
+ # Validate assumption_type if provided
1493
+ if assumption_type and assumption_type not in ASSUMPTION_TYPES:
1494
+ return (
1495
+ None,
1496
+ f"Invalid assumption_type '{assumption_type}'. Must be one of: {', '.join(ASSUMPTION_TYPES)}",
1497
+ )
1498
+
1499
+ # Find specs directory
1500
+ if specs_dir is None:
1501
+ specs_dir = find_specs_directory()
1502
+
1503
+ if specs_dir is None:
1504
+ return (
1505
+ None,
1506
+ "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR.",
1507
+ )
1508
+
1509
+ # Find and load the spec
1510
+ spec_path = find_spec_file(spec_id, specs_dir)
1511
+ if spec_path is None:
1512
+ return None, f"Specification '{spec_id}' not found"
1513
+
1514
+ spec_data = load_spec(spec_id, specs_dir)
1515
+ if spec_data is None:
1516
+ return None, f"Failed to load specification '{spec_id}'"
1517
+
1518
+ # Get assumptions from metadata
1519
+ assumptions = spec_data.get("metadata", {}).get("assumptions", [])
1520
+
1521
+ # Build assumption list with indices
1522
+ assumption_list = []
1523
+ for i, assumption in enumerate(assumptions, 1):
1524
+ if isinstance(assumption, str):
1525
+ assumption_list.append(
1526
+ {
1527
+ "id": f"a-{i}",
1528
+ "text": assumption,
1529
+ "index": i,
1530
+ }
1531
+ )
1532
+
1533
+ return {
1534
+ "spec_id": spec_id,
1535
+ "assumptions": assumption_list,
1536
+ "total_count": len(assumption_list),
1537
+ "filter_type": assumption_type,
1538
+ }, None
1539
+
1540
+
1541
+ # Valid frontmatter keys that can be updated
1542
+ # Note: assumptions and revision_history have dedicated functions
1543
+ FRONTMATTER_KEYS = (
1544
+ "title",
1545
+ "description",
1546
+ "objectives",
1547
+ "complexity",
1548
+ "estimated_hours",
1549
+ "owner",
1550
+ "status",
1551
+ "category",
1552
+ "progress_percentage",
1553
+ "current_phase",
1554
+ )
1555
+
1556
+
1557
+ def update_frontmatter(
1558
+ spec_id: str,
1559
+ key: str,
1560
+ value: Any,
1561
+ specs_dir: Optional[Path] = None,
1562
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
1563
+ """
1564
+ Update a top-level metadata field in a specification.
1565
+
1566
+ Updates fields in the spec's metadata block. For arrays like assumptions
1567
+ or revision_history, use the dedicated add_assumption() and add_revision()
1568
+ functions instead.
1569
+
1570
+ Args:
1571
+ spec_id: Specification ID to update.
1572
+ key: Metadata key to update (e.g., "title", "status", "description").
1573
+ value: New value for the key.
1574
+ specs_dir: Path to specs directory (auto-detected if not provided).
1575
+
1576
+ Returns:
1577
+ Tuple of (result_dict, error_message).
1578
+ On success: ({"spec_id": ..., "key": ..., "value": ..., ...}, None)
1579
+ On failure: (None, "error message")
1580
+ """
1581
+ # Validate key
1582
+ if not key or not key.strip():
1583
+ return None, "Key is required"
1584
+
1585
+ key = key.strip()
1586
+
1587
+ # Block array fields that have dedicated functions
1588
+ if key in ("assumptions", "revision_history"):
1589
+ return (
1590
+ None,
1591
+ f"Use dedicated function for '{key}' (add_assumption or add_revision)",
1592
+ )
1593
+
1594
+ # Validate value is not None (but allow empty string, 0, False, etc.)
1595
+ if value is None:
1596
+ return None, "Value cannot be None"
1597
+
1598
+ # Find specs directory
1599
+ if specs_dir is None:
1600
+ specs_dir = find_specs_directory()
1601
+
1602
+ if specs_dir is None:
1603
+ return (
1604
+ None,
1605
+ "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR.",
1606
+ )
1607
+
1608
+ # Find and load the spec
1609
+ spec_path = find_spec_file(spec_id, specs_dir)
1610
+ if spec_path is None:
1611
+ return None, f"Specification '{spec_id}' not found"
1612
+
1613
+ spec_data = load_spec(spec_id, specs_dir)
1614
+ if spec_data is None:
1615
+ return None, f"Failed to load specification '{spec_id}'"
1616
+
1617
+ # Ensure metadata exists
1618
+ if "metadata" not in spec_data:
1619
+ spec_data["metadata"] = {}
1620
+
1621
+ # Get previous value for result
1622
+ previous_value = spec_data["metadata"].get(key)
1623
+
1624
+ # Process value based on type
1625
+ if isinstance(value, str):
1626
+ value = value.strip() if value else value
1627
+
1628
+ # Update the metadata field
1629
+ spec_data["metadata"][key] = value
1630
+
1631
+ # Also update top-level fields if they exist (for backward compatibility)
1632
+ # Some fields like title, status, progress_percentage exist at both levels
1633
+ if key in ("title", "status", "progress_percentage", "current_phase"):
1634
+ spec_data[key] = value
1635
+
1636
+ # Update last_updated
1637
+ now = datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
1638
+ spec_data["last_updated"] = now
1639
+
1640
+ # Save the spec
1641
+ success = save_spec(spec_id, spec_data, specs_dir)
1642
+ if not success:
1643
+ return None, "Failed to save specification"
1644
+
1645
+ return {
1646
+ "spec_id": spec_id,
1647
+ "key": key,
1648
+ "value": value,
1649
+ "previous_value": previous_value,
1650
+ }, None