foundry-mcp 0.8.22__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of foundry-mcp might be problematic. Click here for more details.

Files changed (153) hide show
  1. foundry_mcp/__init__.py +13 -0
  2. foundry_mcp/cli/__init__.py +67 -0
  3. foundry_mcp/cli/__main__.py +9 -0
  4. foundry_mcp/cli/agent.py +96 -0
  5. foundry_mcp/cli/commands/__init__.py +37 -0
  6. foundry_mcp/cli/commands/cache.py +137 -0
  7. foundry_mcp/cli/commands/dashboard.py +148 -0
  8. foundry_mcp/cli/commands/dev.py +446 -0
  9. foundry_mcp/cli/commands/journal.py +377 -0
  10. foundry_mcp/cli/commands/lifecycle.py +274 -0
  11. foundry_mcp/cli/commands/modify.py +824 -0
  12. foundry_mcp/cli/commands/plan.py +640 -0
  13. foundry_mcp/cli/commands/pr.py +393 -0
  14. foundry_mcp/cli/commands/review.py +667 -0
  15. foundry_mcp/cli/commands/session.py +472 -0
  16. foundry_mcp/cli/commands/specs.py +686 -0
  17. foundry_mcp/cli/commands/tasks.py +807 -0
  18. foundry_mcp/cli/commands/testing.py +676 -0
  19. foundry_mcp/cli/commands/validate.py +982 -0
  20. foundry_mcp/cli/config.py +98 -0
  21. foundry_mcp/cli/context.py +298 -0
  22. foundry_mcp/cli/logging.py +212 -0
  23. foundry_mcp/cli/main.py +44 -0
  24. foundry_mcp/cli/output.py +122 -0
  25. foundry_mcp/cli/registry.py +110 -0
  26. foundry_mcp/cli/resilience.py +178 -0
  27. foundry_mcp/cli/transcript.py +217 -0
  28. foundry_mcp/config.py +1454 -0
  29. foundry_mcp/core/__init__.py +144 -0
  30. foundry_mcp/core/ai_consultation.py +1773 -0
  31. foundry_mcp/core/batch_operations.py +1202 -0
  32. foundry_mcp/core/cache.py +195 -0
  33. foundry_mcp/core/capabilities.py +446 -0
  34. foundry_mcp/core/concurrency.py +898 -0
  35. foundry_mcp/core/context.py +540 -0
  36. foundry_mcp/core/discovery.py +1603 -0
  37. foundry_mcp/core/error_collection.py +728 -0
  38. foundry_mcp/core/error_store.py +592 -0
  39. foundry_mcp/core/health.py +749 -0
  40. foundry_mcp/core/intake.py +933 -0
  41. foundry_mcp/core/journal.py +700 -0
  42. foundry_mcp/core/lifecycle.py +412 -0
  43. foundry_mcp/core/llm_config.py +1376 -0
  44. foundry_mcp/core/llm_patterns.py +510 -0
  45. foundry_mcp/core/llm_provider.py +1569 -0
  46. foundry_mcp/core/logging_config.py +374 -0
  47. foundry_mcp/core/metrics_persistence.py +584 -0
  48. foundry_mcp/core/metrics_registry.py +327 -0
  49. foundry_mcp/core/metrics_store.py +641 -0
  50. foundry_mcp/core/modifications.py +224 -0
  51. foundry_mcp/core/naming.py +146 -0
  52. foundry_mcp/core/observability.py +1216 -0
  53. foundry_mcp/core/otel.py +452 -0
  54. foundry_mcp/core/otel_stubs.py +264 -0
  55. foundry_mcp/core/pagination.py +255 -0
  56. foundry_mcp/core/progress.py +387 -0
  57. foundry_mcp/core/prometheus.py +564 -0
  58. foundry_mcp/core/prompts/__init__.py +464 -0
  59. foundry_mcp/core/prompts/fidelity_review.py +691 -0
  60. foundry_mcp/core/prompts/markdown_plan_review.py +515 -0
  61. foundry_mcp/core/prompts/plan_review.py +627 -0
  62. foundry_mcp/core/providers/__init__.py +237 -0
  63. foundry_mcp/core/providers/base.py +515 -0
  64. foundry_mcp/core/providers/claude.py +472 -0
  65. foundry_mcp/core/providers/codex.py +637 -0
  66. foundry_mcp/core/providers/cursor_agent.py +630 -0
  67. foundry_mcp/core/providers/detectors.py +515 -0
  68. foundry_mcp/core/providers/gemini.py +426 -0
  69. foundry_mcp/core/providers/opencode.py +718 -0
  70. foundry_mcp/core/providers/opencode_wrapper.js +308 -0
  71. foundry_mcp/core/providers/package-lock.json +24 -0
  72. foundry_mcp/core/providers/package.json +25 -0
  73. foundry_mcp/core/providers/registry.py +607 -0
  74. foundry_mcp/core/providers/test_provider.py +171 -0
  75. foundry_mcp/core/providers/validation.py +857 -0
  76. foundry_mcp/core/rate_limit.py +427 -0
  77. foundry_mcp/core/research/__init__.py +68 -0
  78. foundry_mcp/core/research/memory.py +528 -0
  79. foundry_mcp/core/research/models.py +1234 -0
  80. foundry_mcp/core/research/providers/__init__.py +40 -0
  81. foundry_mcp/core/research/providers/base.py +242 -0
  82. foundry_mcp/core/research/providers/google.py +507 -0
  83. foundry_mcp/core/research/providers/perplexity.py +442 -0
  84. foundry_mcp/core/research/providers/semantic_scholar.py +544 -0
  85. foundry_mcp/core/research/providers/tavily.py +383 -0
  86. foundry_mcp/core/research/workflows/__init__.py +25 -0
  87. foundry_mcp/core/research/workflows/base.py +298 -0
  88. foundry_mcp/core/research/workflows/chat.py +271 -0
  89. foundry_mcp/core/research/workflows/consensus.py +539 -0
  90. foundry_mcp/core/research/workflows/deep_research.py +4142 -0
  91. foundry_mcp/core/research/workflows/ideate.py +682 -0
  92. foundry_mcp/core/research/workflows/thinkdeep.py +405 -0
  93. foundry_mcp/core/resilience.py +600 -0
  94. foundry_mcp/core/responses.py +1624 -0
  95. foundry_mcp/core/review.py +366 -0
  96. foundry_mcp/core/security.py +438 -0
  97. foundry_mcp/core/spec.py +4119 -0
  98. foundry_mcp/core/task.py +2463 -0
  99. foundry_mcp/core/testing.py +839 -0
  100. foundry_mcp/core/validation.py +2357 -0
  101. foundry_mcp/dashboard/__init__.py +32 -0
  102. foundry_mcp/dashboard/app.py +119 -0
  103. foundry_mcp/dashboard/components/__init__.py +17 -0
  104. foundry_mcp/dashboard/components/cards.py +88 -0
  105. foundry_mcp/dashboard/components/charts.py +177 -0
  106. foundry_mcp/dashboard/components/filters.py +136 -0
  107. foundry_mcp/dashboard/components/tables.py +195 -0
  108. foundry_mcp/dashboard/data/__init__.py +11 -0
  109. foundry_mcp/dashboard/data/stores.py +433 -0
  110. foundry_mcp/dashboard/launcher.py +300 -0
  111. foundry_mcp/dashboard/views/__init__.py +12 -0
  112. foundry_mcp/dashboard/views/errors.py +217 -0
  113. foundry_mcp/dashboard/views/metrics.py +164 -0
  114. foundry_mcp/dashboard/views/overview.py +96 -0
  115. foundry_mcp/dashboard/views/providers.py +83 -0
  116. foundry_mcp/dashboard/views/sdd_workflow.py +255 -0
  117. foundry_mcp/dashboard/views/tool_usage.py +139 -0
  118. foundry_mcp/prompts/__init__.py +9 -0
  119. foundry_mcp/prompts/workflows.py +525 -0
  120. foundry_mcp/resources/__init__.py +9 -0
  121. foundry_mcp/resources/specs.py +591 -0
  122. foundry_mcp/schemas/__init__.py +38 -0
  123. foundry_mcp/schemas/intake-schema.json +89 -0
  124. foundry_mcp/schemas/sdd-spec-schema.json +414 -0
  125. foundry_mcp/server.py +150 -0
  126. foundry_mcp/tools/__init__.py +10 -0
  127. foundry_mcp/tools/unified/__init__.py +92 -0
  128. foundry_mcp/tools/unified/authoring.py +3620 -0
  129. foundry_mcp/tools/unified/context_helpers.py +98 -0
  130. foundry_mcp/tools/unified/documentation_helpers.py +268 -0
  131. foundry_mcp/tools/unified/environment.py +1341 -0
  132. foundry_mcp/tools/unified/error.py +479 -0
  133. foundry_mcp/tools/unified/health.py +225 -0
  134. foundry_mcp/tools/unified/journal.py +841 -0
  135. foundry_mcp/tools/unified/lifecycle.py +640 -0
  136. foundry_mcp/tools/unified/metrics.py +777 -0
  137. foundry_mcp/tools/unified/plan.py +876 -0
  138. foundry_mcp/tools/unified/pr.py +294 -0
  139. foundry_mcp/tools/unified/provider.py +589 -0
  140. foundry_mcp/tools/unified/research.py +1283 -0
  141. foundry_mcp/tools/unified/review.py +1042 -0
  142. foundry_mcp/tools/unified/review_helpers.py +314 -0
  143. foundry_mcp/tools/unified/router.py +102 -0
  144. foundry_mcp/tools/unified/server.py +565 -0
  145. foundry_mcp/tools/unified/spec.py +1283 -0
  146. foundry_mcp/tools/unified/task.py +3846 -0
  147. foundry_mcp/tools/unified/test.py +431 -0
  148. foundry_mcp/tools/unified/verification.py +520 -0
  149. foundry_mcp-0.8.22.dist-info/METADATA +344 -0
  150. foundry_mcp-0.8.22.dist-info/RECORD +153 -0
  151. foundry_mcp-0.8.22.dist-info/WHEEL +4 -0
  152. foundry_mcp-0.8.22.dist-info/entry_points.txt +3 -0
  153. foundry_mcp-0.8.22.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,4119 @@
1
+ """
2
+ JSON spec file operations for SDD workflows.
3
+ Provides loading, saving, finding, and listing specs with atomic writes and backups.
4
+ """
5
+
6
+ import json
7
+ import re
8
+ import shutil
9
+ import subprocess
10
+ from datetime import datetime, timezone
11
+ from pathlib import Path
12
+ from typing import Optional, Dict, Any, List, Tuple, Union
13
+
14
+ # Valid templates and categories for spec creation
15
+ # Note: Only 'empty' template is supported. Use phase templates to add structure.
16
+ TEMPLATES = ("empty",)
17
+ TEMPLATE_DESCRIPTIONS = {
18
+ "empty": "Blank spec with no phases - use phase templates to add structure",
19
+ }
20
+ CATEGORIES = ("investigation", "implementation", "refactoring", "decision", "research")
21
+
22
+ # Valid verification types for verify nodes
23
+ # - run-tests: Automated tests via mcp__foundry-mcp__test-run
24
+ # - fidelity: Implementation-vs-spec comparison via mcp__foundry-mcp__spec-review-fidelity
25
+ # - manual: Manual verification steps
26
+ VERIFICATION_TYPES = ("run-tests", "fidelity", "manual")
27
+
28
+ # Valid phase templates for reusable phase structures
29
+ PHASE_TEMPLATES = ("planning", "implementation", "testing", "security", "documentation")
30
+
31
+
32
+ def _requires_rich_task_fields(spec_data: Dict[str, Any]) -> bool:
33
+ """Check if spec requires rich task fields based on explicit complexity metadata."""
34
+ metadata = spec_data.get("metadata", {})
35
+ if not isinstance(metadata, dict):
36
+ return False
37
+
38
+ # Only check explicit complexity metadata (template no longer indicates complexity)
39
+ complexity = metadata.get("complexity")
40
+ if isinstance(complexity, str) and complexity.strip().lower() in {
41
+ "medium",
42
+ "complex",
43
+ "high",
44
+ }:
45
+ return True
46
+
47
+ return False
48
+
49
+
50
+ def _normalize_acceptance_criteria(value: Any) -> Optional[List[str]]:
51
+ if value is None:
52
+ return None
53
+ if isinstance(value, str):
54
+ cleaned = value.strip()
55
+ return [cleaned] if cleaned else []
56
+ if isinstance(value, list):
57
+ cleaned_items = []
58
+ for item in value:
59
+ if isinstance(item, str):
60
+ cleaned = item.strip()
61
+ if cleaned:
62
+ cleaned_items.append(cleaned)
63
+ return cleaned_items
64
+ return []
65
+
66
+
67
+ def find_git_root() -> Optional[Path]:
68
+ """Find the root of the git repository."""
69
+ try:
70
+ result = subprocess.run(
71
+ ["git", "rev-parse", "--show-toplevel"],
72
+ capture_output=True,
73
+ text=True,
74
+ check=True,
75
+ )
76
+ return Path(result.stdout.strip())
77
+ except (subprocess.CalledProcessError, FileNotFoundError):
78
+ return None
79
+
80
+
81
+ def find_specs_directory(provided_path: Optional[str] = None) -> Optional[Path]:
82
+ """
83
+ Discover the specs directory.
84
+
85
+ Args:
86
+ provided_path: Optional explicit path to specs directory or file
87
+
88
+ Returns:
89
+ Absolute Path to specs directory (containing pending/active/completed/archived),
90
+ or None if not found
91
+ """
92
+
93
+ def is_valid_specs_dir(p: Path) -> bool:
94
+ """Check if a directory is a valid specs directory."""
95
+ return (
96
+ (p / "pending").is_dir()
97
+ or (p / "active").is_dir()
98
+ or (p / "completed").is_dir()
99
+ or (p / "archived").is_dir()
100
+ )
101
+
102
+ if provided_path:
103
+ path = Path(provided_path).resolve()
104
+
105
+ if path.is_file():
106
+ path = path.parent
107
+
108
+ if not path.is_dir():
109
+ return None
110
+
111
+ if is_valid_specs_dir(path):
112
+ return path
113
+
114
+ specs_subdir = path / "specs"
115
+ if specs_subdir.is_dir() and is_valid_specs_dir(specs_subdir):
116
+ return specs_subdir
117
+
118
+ for parent in list(path.parents)[:5]:
119
+ if is_valid_specs_dir(parent):
120
+ return parent
121
+ parent_specs = parent / "specs"
122
+ if parent_specs.is_dir() and is_valid_specs_dir(parent_specs):
123
+ return parent_specs
124
+
125
+ return None
126
+
127
+ git_root = find_git_root()
128
+
129
+ if git_root:
130
+ search_paths = [
131
+ Path.cwd() / "specs",
132
+ git_root / "specs",
133
+ ]
134
+ else:
135
+ search_paths = [
136
+ Path.cwd() / "specs",
137
+ Path.cwd().parent / "specs",
138
+ ]
139
+
140
+ for p in search_paths:
141
+ if p.exists() and is_valid_specs_dir(p):
142
+ return p.resolve()
143
+
144
+ return None
145
+
146
+
147
+ def find_spec_file(spec_id: str, specs_dir: Path) -> Optional[Path]:
148
+ """
149
+ Find the spec file for a given spec ID.
150
+
151
+ Searches in pending/, active/, completed/, and archived/ subdirectories.
152
+
153
+ Args:
154
+ spec_id: Specification ID
155
+ specs_dir: Path to specs directory
156
+
157
+ Returns:
158
+ Absolute path to the spec file, or None if not found
159
+ """
160
+ search_dirs = ["pending", "active", "completed", "archived"]
161
+
162
+ for subdir in search_dirs:
163
+ spec_file = specs_dir / subdir / f"{spec_id}.json"
164
+ if spec_file.exists():
165
+ return spec_file
166
+
167
+ return None
168
+
169
+
170
+ def resolve_spec_file(
171
+ spec_name_or_path: str, specs_dir: Optional[Path] = None
172
+ ) -> Optional[Path]:
173
+ """
174
+ Resolve spec file from either a spec name or full path.
175
+
176
+ Args:
177
+ spec_name_or_path: Either a spec name or full path
178
+ specs_dir: Optional specs directory for name-based lookups
179
+
180
+ Returns:
181
+ Resolved Path object if found, None otherwise
182
+ """
183
+ path = Path(spec_name_or_path)
184
+
185
+ if path.is_absolute():
186
+ spec_file = path.resolve()
187
+ if spec_file.exists() and spec_file.suffix == ".json":
188
+ return spec_file
189
+ return None
190
+
191
+ search_name = spec_name_or_path
192
+ if spec_name_or_path.endswith(".json"):
193
+ spec_file = path.resolve()
194
+ if spec_file.exists() and spec_file.suffix == ".json":
195
+ return spec_file
196
+ search_name = path.stem
197
+
198
+ if specs_dir is None:
199
+ specs_dir = find_specs_directory()
200
+
201
+ if not specs_dir:
202
+ return None
203
+
204
+ return find_spec_file(search_name, specs_dir)
205
+
206
+
207
+ def _migrate_spec_fields(spec_data: Dict[str, Any]) -> Dict[str, Any]:
208
+ """
209
+ Migrate spec from dual-field format to canonical format.
210
+
211
+ Moves status, progress_percentage, and current_phase from metadata
212
+ to top-level (their canonical location). This handles specs created
213
+ before the field deduplication.
214
+
215
+ Args:
216
+ spec_data: Spec data dictionary (modified in place)
217
+
218
+ Returns:
219
+ The modified spec_data
220
+ """
221
+ if not spec_data:
222
+ return spec_data
223
+
224
+ metadata = spec_data.get("metadata", {})
225
+ computed_fields = ("status", "progress_percentage", "current_phase")
226
+
227
+ for field in computed_fields:
228
+ # If field exists in metadata but not at top-level, migrate it
229
+ if field in metadata and field not in spec_data:
230
+ spec_data[field] = metadata[field]
231
+ # Remove from metadata (canonical location is top-level)
232
+ metadata.pop(field, None)
233
+
234
+ return spec_data
235
+
236
+
237
+ def load_spec(
238
+ spec_id: str, specs_dir: Optional[Path] = None
239
+ ) -> Optional[Dict[str, Any]]:
240
+ """
241
+ Load the JSON spec file for a given spec ID or path.
242
+
243
+ Args:
244
+ spec_id: Specification ID or path to spec file
245
+ specs_dir: Path to specs directory (optional, auto-detected if not provided)
246
+
247
+ Returns:
248
+ Spec data dictionary, or None if not found
249
+ """
250
+ spec_file = resolve_spec_file(spec_id, specs_dir)
251
+
252
+ if not spec_file:
253
+ return None
254
+
255
+ try:
256
+ with open(spec_file, "r") as f:
257
+ spec_data = json.load(f)
258
+ # Migrate old specs to canonical field locations
259
+ return _migrate_spec_fields(spec_data)
260
+ except (json.JSONDecodeError, IOError):
261
+ return None
262
+
263
+
264
+ def save_spec(
265
+ spec_id: str,
266
+ spec_data: Dict[str, Any],
267
+ specs_dir: Optional[Path] = None,
268
+ backup: bool = True,
269
+ validate: bool = True,
270
+ ) -> bool:
271
+ """
272
+ Save JSON spec file with atomic write and optional backup.
273
+
274
+ Args:
275
+ spec_id: Specification ID or path to spec file
276
+ spec_data: Spec data to write
277
+ specs_dir: Path to specs directory (optional, auto-detected if not provided)
278
+ backup: Create backup before writing (default: True)
279
+ validate: Validate JSON before writing (default: True)
280
+
281
+ Returns:
282
+ True if successful, False otherwise
283
+ """
284
+ spec_file = resolve_spec_file(spec_id, specs_dir)
285
+
286
+ if not spec_file:
287
+ return False
288
+
289
+ if validate:
290
+ if not _validate_spec_structure(spec_data):
291
+ return False
292
+
293
+ spec_data["last_updated"] = (
294
+ datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
295
+ )
296
+
297
+ if backup:
298
+ backup_spec(spec_id, specs_dir)
299
+
300
+ temp_file = spec_file.with_suffix(".tmp")
301
+ try:
302
+ with open(temp_file, "w") as f:
303
+ json.dump(spec_data, f, indent=2)
304
+ temp_file.replace(spec_file)
305
+ return True
306
+ except (IOError, OSError):
307
+ if temp_file.exists():
308
+ temp_file.unlink()
309
+ return False
310
+
311
+
312
+ # Default retention policy for versioned backups
313
+ DEFAULT_MAX_BACKUPS = 10
314
+
315
+
316
+ def backup_spec(
317
+ spec_id: str,
318
+ specs_dir: Optional[Path] = None,
319
+ max_backups: int = DEFAULT_MAX_BACKUPS,
320
+ ) -> Optional[Path]:
321
+ """
322
+ Create a versioned backup of the JSON spec file.
323
+
324
+ Creates timestamped backups in .backups/{spec_id}/ directory with a
325
+ configurable retention policy. Also maintains a latest.json copy for
326
+ quick access to the most recent backup.
327
+
328
+ Directory structure:
329
+ .backups/
330
+ └── {spec_id}/
331
+ ├── 2025-12-26T18-20-13.456789.json # Timestamped backups (μs precision)
332
+ ├── 2025-12-26T18-30-45.123456.json
333
+ └── latest.json # Copy of most recent
334
+
335
+ Args:
336
+ spec_id: Specification ID or path to spec file
337
+ specs_dir: Path to specs directory (optional, auto-detected if not provided)
338
+ max_backups: Maximum number of versioned backups to retain (default: 10).
339
+ Set to 0 for unlimited backups.
340
+
341
+ Returns:
342
+ Path to backup file if created, None otherwise
343
+ """
344
+ spec_file = resolve_spec_file(spec_id, specs_dir)
345
+
346
+ if not spec_file:
347
+ return None
348
+
349
+ if specs_dir is None:
350
+ specs_dir = find_specs_directory()
351
+
352
+ if not specs_dir:
353
+ return None
354
+
355
+ # Create versioned backup directory: .backups/{spec_id}/
356
+ spec_backups_dir = specs_dir / ".backups" / spec_id
357
+ spec_backups_dir.mkdir(parents=True, exist_ok=True)
358
+
359
+ # Generate timestamp filename (ISO format with safe characters)
360
+ # Include full microseconds to handle rapid successive saves
361
+ now = datetime.now(timezone.utc)
362
+ timestamp = now.strftime("%Y-%m-%dT%H-%M-%S")
363
+ micros = now.strftime("%f") # Full 6-digit microseconds
364
+ backup_file = spec_backups_dir / f"{timestamp}.{micros}.json"
365
+
366
+ try:
367
+ # Create the timestamped backup
368
+ shutil.copy2(spec_file, backup_file)
369
+
370
+ # Update latest.json to point to the newest backup
371
+ latest_file = spec_backups_dir / "latest.json"
372
+ shutil.copy2(backup_file, latest_file)
373
+
374
+ # Apply retention policy
375
+ if max_backups > 0:
376
+ _apply_backup_retention(spec_backups_dir, max_backups)
377
+
378
+ return backup_file
379
+ except (IOError, OSError):
380
+ return None
381
+
382
+
383
+ def _apply_backup_retention(backups_dir: Path, max_backups: int) -> int:
384
+ """
385
+ Apply retention policy by removing oldest backups exceeding the limit.
386
+
387
+ Args:
388
+ backups_dir: Path to the spec's backup directory
389
+ max_backups: Maximum number of backups to retain
390
+
391
+ Returns:
392
+ Number of backups deleted
393
+ """
394
+ # List all timestamped backup files (exclude latest.json)
395
+ backup_files = sorted(
396
+ [
397
+ f for f in backups_dir.glob("*.json")
398
+ if f.name != "latest.json" and f.is_file()
399
+ ],
400
+ key=lambda p: p.name, # Sort by filename (timestamp order)
401
+ )
402
+
403
+ deleted_count = 0
404
+ while len(backup_files) > max_backups:
405
+ oldest = backup_files.pop(0)
406
+ try:
407
+ oldest.unlink()
408
+ deleted_count += 1
409
+ except (IOError, OSError):
410
+ pass # Best effort deletion
411
+
412
+ return deleted_count
413
+
414
+
415
+ # Default pagination settings for backup listing
416
+ DEFAULT_BACKUP_PAGE_SIZE = 50
417
+ MAX_BACKUP_PAGE_SIZE = 100
418
+
419
+
420
+ def list_spec_backups(
421
+ spec_id: str,
422
+ specs_dir: Optional[Path] = None,
423
+ cursor: Optional[str] = None,
424
+ limit: Optional[int] = None,
425
+ ) -> Dict[str, Any]:
426
+ """
427
+ List backups for a spec with cursor-based pagination.
428
+
429
+ Lists timestamped backup files chronologically (newest first) from the
430
+ .backups/{spec_id}/ directory. Returns file metadata including timestamp,
431
+ path, and size. Designed for use with spec.history action.
432
+
433
+ Args:
434
+ spec_id: Specification ID to list backups for
435
+ specs_dir: Base specs directory (uses find_specs_directory if None)
436
+ cursor: Pagination cursor from previous call (base64-encoded JSON)
437
+ limit: Maximum backups per page (default: 50, max: 100)
438
+
439
+ Returns:
440
+ Dict with structure:
441
+ {
442
+ "spec_id": str,
443
+ "backups": [
444
+ {
445
+ "timestamp": str, # ISO-ish format from filename
446
+ "file_path": str, # Absolute path to backup file
447
+ "file_size_bytes": int # File size
448
+ },
449
+ ...
450
+ ],
451
+ "count": int,
452
+ "pagination": {
453
+ "cursor": Optional[str],
454
+ "has_more": bool,
455
+ "page_size": int
456
+ }
457
+ }
458
+
459
+ Returns empty backups list if spec or backup directory doesn't exist.
460
+ """
461
+ # Import pagination helpers
462
+ from foundry_mcp.core.pagination import (
463
+ CursorError,
464
+ decode_cursor,
465
+ encode_cursor,
466
+ normalize_page_size,
467
+ )
468
+
469
+ # Resolve specs directory
470
+ if specs_dir is None:
471
+ specs_dir = find_specs_directory()
472
+
473
+ # Normalize page size
474
+ page_size = normalize_page_size(
475
+ limit, default=DEFAULT_BACKUP_PAGE_SIZE, maximum=MAX_BACKUP_PAGE_SIZE
476
+ )
477
+
478
+ result: Dict[str, Any] = {
479
+ "spec_id": spec_id,
480
+ "backups": [],
481
+ "count": 0,
482
+ "pagination": {
483
+ "cursor": None,
484
+ "has_more": False,
485
+ "page_size": page_size,
486
+ },
487
+ }
488
+
489
+ if not specs_dir:
490
+ return result
491
+
492
+ # Locate backup directory: .backups/{spec_id}/
493
+ backups_dir = specs_dir / ".backups" / spec_id
494
+ if not backups_dir.is_dir():
495
+ return result
496
+
497
+ # List all timestamped backup files (exclude latest.json)
498
+ backup_files = sorted(
499
+ [
500
+ f
501
+ for f in backups_dir.glob("*.json")
502
+ if f.name != "latest.json" and f.is_file()
503
+ ],
504
+ key=lambda p: p.name,
505
+ reverse=True, # Newest first
506
+ )
507
+
508
+ if not backup_files:
509
+ return result
510
+
511
+ # Handle cursor-based pagination
512
+ start_after_timestamp: Optional[str] = None
513
+ if cursor:
514
+ try:
515
+ cursor_data = decode_cursor(cursor)
516
+ start_after_timestamp = cursor_data.get("last_id")
517
+ except CursorError:
518
+ # Invalid cursor - return from beginning
519
+ pass
520
+
521
+ # Find start position based on cursor
522
+ if start_after_timestamp:
523
+ start_index = 0
524
+ for idx, backup_file in enumerate(backup_files):
525
+ # Filename without extension is the timestamp
526
+ timestamp = backup_file.stem
527
+ if timestamp == start_after_timestamp:
528
+ start_index = idx + 1
529
+ break
530
+ backup_files = backup_files[start_index:]
531
+
532
+ # Fetch one extra to check for more pages
533
+ page_files = backup_files[: page_size + 1]
534
+ has_more = len(page_files) > page_size
535
+ if has_more:
536
+ page_files = page_files[:page_size]
537
+
538
+ # Build backup entries with metadata
539
+ backups = []
540
+ for backup_file in page_files:
541
+ try:
542
+ file_stat = backup_file.stat()
543
+ backups.append(
544
+ {
545
+ "timestamp": backup_file.stem,
546
+ "file_path": str(backup_file.absolute()),
547
+ "file_size_bytes": file_stat.st_size,
548
+ }
549
+ )
550
+ except OSError:
551
+ # Skip files we can't stat
552
+ continue
553
+
554
+ # Generate next cursor if more pages exist
555
+ next_cursor = None
556
+ if has_more and backups:
557
+ next_cursor = encode_cursor({"last_id": backups[-1]["timestamp"]})
558
+
559
+ result["backups"] = backups
560
+ result["count"] = len(backups)
561
+ result["pagination"] = {
562
+ "cursor": next_cursor,
563
+ "has_more": has_more,
564
+ "page_size": page_size,
565
+ }
566
+
567
+ return result
568
+
569
+
570
+ # Default settings for diff operations
571
+ DEFAULT_DIFF_MAX_RESULTS = 100
572
+
573
+
574
+ def _load_spec_source(
575
+ source: Union[str, Path, Dict[str, Any]],
576
+ specs_dir: Optional[Path] = None,
577
+ ) -> Optional[Dict[str, Any]]:
578
+ """
579
+ Load a spec from various source types.
580
+
581
+ Args:
582
+ source: Spec ID, file path, or already-loaded dict
583
+ specs_dir: Base specs directory for ID lookups
584
+
585
+ Returns:
586
+ Loaded spec dict, or None if not found/invalid
587
+ """
588
+ # Already a dict - return as-is
589
+ if isinstance(source, dict):
590
+ return source
591
+
592
+ # Path object or string path
593
+ source_path = Path(source) if isinstance(source, str) else source
594
+
595
+ # If it's an existing file path, load directly
596
+ if source_path.is_file():
597
+ try:
598
+ with open(source_path, "r") as f:
599
+ return json.load(f)
600
+ except (IOError, json.JSONDecodeError):
601
+ return None
602
+
603
+ # Otherwise treat as spec_id and use resolve_spec_file
604
+ if isinstance(source, str):
605
+ return load_spec(source, specs_dir)
606
+
607
+ return None
608
+
609
+
610
+ def _diff_node(
611
+ old_node: Dict[str, Any],
612
+ new_node: Dict[str, Any],
613
+ node_id: str,
614
+ ) -> Optional[Dict[str, Any]]:
615
+ """
616
+ Compare two nodes and return field-level changes.
617
+
618
+ Args:
619
+ old_node: Original node data
620
+ new_node: Updated node data
621
+ node_id: Node identifier for the result
622
+
623
+ Returns:
624
+ Dict with node info and field_changes list, or None if no changes
625
+ """
626
+ # Fields to compare (excluding computed/transient fields)
627
+ compare_fields = ["title", "status", "type", "parent", "children", "metadata", "dependencies"]
628
+
629
+ field_changes = []
630
+ for field in compare_fields:
631
+ old_val = old_node.get(field)
632
+ new_val = new_node.get(field)
633
+
634
+ if old_val != new_val:
635
+ field_changes.append({
636
+ "field": field,
637
+ "old": old_val,
638
+ "new": new_val,
639
+ })
640
+
641
+ if not field_changes:
642
+ return None
643
+
644
+ return {
645
+ "node_id": node_id,
646
+ "type": new_node.get("type", old_node.get("type")),
647
+ "title": new_node.get("title", old_node.get("title")),
648
+ "field_changes": field_changes,
649
+ }
650
+
651
+
652
+ def diff_specs(
653
+ source: Union[str, Path, Dict[str, Any]],
654
+ target: Union[str, Path, Dict[str, Any]],
655
+ specs_dir: Optional[Path] = None,
656
+ max_results: Optional[int] = None,
657
+ ) -> Dict[str, Any]:
658
+ """
659
+ Compare two specs and categorize changes as added, removed, or modified.
660
+
661
+ Compares hierarchy nodes between source (base/older) and target (comparison/newer)
662
+ specs, identifying structural and content changes at the task level.
663
+
664
+ Args:
665
+ source: Base spec - spec_id, file path (including backup), or loaded dict
666
+ target: Comparison spec - spec_id, file path, or loaded dict
667
+ specs_dir: Base specs directory (auto-detected if None)
668
+ max_results: Maximum changes to return per category (default: 100)
669
+
670
+ Returns:
671
+ Dict with structure:
672
+ {
673
+ "summary": {
674
+ "added_count": int,
675
+ "removed_count": int,
676
+ "modified_count": int,
677
+ "total_changes": int
678
+ },
679
+ "changes": {
680
+ "added": [{"node_id": str, "type": str, "title": str}, ...],
681
+ "removed": [{"node_id": str, "type": str, "title": str}, ...],
682
+ "modified": [{
683
+ "node_id": str,
684
+ "type": str,
685
+ "title": str,
686
+ "field_changes": [{"field": str, "old": Any, "new": Any}, ...]
687
+ }, ...]
688
+ },
689
+ "partial": bool, # True if results truncated
690
+ "source_spec_id": Optional[str],
691
+ "target_spec_id": Optional[str]
692
+ }
693
+
694
+ Returns error structure if specs cannot be loaded:
695
+ {"error": str, "success": False}
696
+ """
697
+ # Resolve specs directory
698
+ if specs_dir is None:
699
+ specs_dir = find_specs_directory()
700
+
701
+ # Load source spec
702
+ source_spec = _load_spec_source(source, specs_dir)
703
+ if source_spec is None:
704
+ return {
705
+ "error": f"Could not load source spec: {source}",
706
+ "success": False,
707
+ }
708
+
709
+ # Load target spec
710
+ target_spec = _load_spec_source(target, specs_dir)
711
+ if target_spec is None:
712
+ return {
713
+ "error": f"Could not load target spec: {target}",
714
+ "success": False,
715
+ }
716
+
717
+ # Get hierarchies
718
+ source_hierarchy = source_spec.get("hierarchy", {})
719
+ target_hierarchy = target_spec.get("hierarchy", {})
720
+
721
+ source_ids = set(source_hierarchy.keys())
722
+ target_ids = set(target_hierarchy.keys())
723
+
724
+ # Categorize changes
725
+ added_ids = target_ids - source_ids
726
+ removed_ids = source_ids - target_ids
727
+ common_ids = source_ids & target_ids
728
+
729
+ # Apply max_results limit
730
+ limit = max_results if max_results is not None else DEFAULT_DIFF_MAX_RESULTS
731
+ partial = False
732
+
733
+ # Build added list
734
+ added = []
735
+ for node_id in sorted(added_ids):
736
+ if len(added) >= limit:
737
+ partial = True
738
+ break
739
+ node = target_hierarchy[node_id]
740
+ added.append({
741
+ "node_id": node_id,
742
+ "type": node.get("type"),
743
+ "title": node.get("title"),
744
+ })
745
+
746
+ # Build removed list
747
+ removed = []
748
+ for node_id in sorted(removed_ids):
749
+ if len(removed) >= limit:
750
+ partial = True
751
+ break
752
+ node = source_hierarchy[node_id]
753
+ removed.append({
754
+ "node_id": node_id,
755
+ "type": node.get("type"),
756
+ "title": node.get("title"),
757
+ })
758
+
759
+ # Build modified list
760
+ modified = []
761
+ for node_id in sorted(common_ids):
762
+ if len(modified) >= limit:
763
+ partial = True
764
+ break
765
+ old_node = source_hierarchy[node_id]
766
+ new_node = target_hierarchy[node_id]
767
+ diff = _diff_node(old_node, new_node, node_id)
768
+ if diff:
769
+ modified.append(diff)
770
+
771
+ # Calculate actual counts (may exceed displayed if partial)
772
+ total_added = len(added_ids)
773
+ total_removed = len(removed_ids)
774
+ total_modified = sum(
775
+ 1 for nid in common_ids
776
+ if _diff_node(source_hierarchy[nid], target_hierarchy[nid], nid)
777
+ ) if not partial else len(modified) # Only count all if not already partial
778
+
779
+ return {
780
+ "summary": {
781
+ "added_count": total_added,
782
+ "removed_count": total_removed,
783
+ "modified_count": total_modified if not partial else len(modified),
784
+ "total_changes": total_added + total_removed + (total_modified if not partial else len(modified)),
785
+ },
786
+ "changes": {
787
+ "added": added,
788
+ "removed": removed,
789
+ "modified": modified,
790
+ },
791
+ "partial": partial,
792
+ "source_spec_id": source_spec.get("spec_id"),
793
+ "target_spec_id": target_spec.get("spec_id"),
794
+ }
795
+
796
+
797
+ def rollback_spec(
798
+ spec_id: str,
799
+ timestamp: str,
800
+ specs_dir: Optional[Path] = None,
801
+ dry_run: bool = False,
802
+ create_backup: bool = True,
803
+ ) -> Dict[str, Any]:
804
+ """
805
+ Restore a spec from a specific backup timestamp.
806
+
807
+ Creates a safety backup of the current state before rollback (by default),
808
+ then replaces the spec file with the contents from the specified backup.
809
+
810
+ Args:
811
+ spec_id: Specification ID to rollback
812
+ timestamp: Backup timestamp to restore (e.g., "2025-12-26T18-20-13.456789")
813
+ specs_dir: Base specs directory (auto-detected if None)
814
+ dry_run: If True, validate and return what would happen without changes
815
+ create_backup: If True (default), create safety backup before rollback
816
+
817
+ Returns:
818
+ Dict with structure:
819
+ {
820
+ "success": bool,
821
+ "spec_id": str,
822
+ "timestamp": str,
823
+ "dry_run": bool,
824
+ "backup_created": Optional[str], # Safety backup path
825
+ "restored_from": str, # Source backup path
826
+ "error": Optional[str] # Error if failed
827
+ }
828
+ """
829
+ # Resolve specs directory
830
+ if specs_dir is None:
831
+ specs_dir = find_specs_directory()
832
+
833
+ result: Dict[str, Any] = {
834
+ "success": False,
835
+ "spec_id": spec_id,
836
+ "timestamp": timestamp,
837
+ "dry_run": dry_run,
838
+ "backup_created": None,
839
+ "restored_from": None,
840
+ "error": None,
841
+ }
842
+
843
+ if not specs_dir:
844
+ result["error"] = "Could not find specs directory"
845
+ return result
846
+
847
+ # Find current spec file
848
+ spec_file = find_spec_file(spec_id, specs_dir)
849
+ if not spec_file:
850
+ result["error"] = f"Spec '{spec_id}' not found"
851
+ return result
852
+
853
+ # Locate backup directory
854
+ backups_dir = specs_dir / ".backups" / spec_id
855
+ if not backups_dir.is_dir():
856
+ result["error"] = f"No backups directory for spec '{spec_id}'"
857
+ return result
858
+
859
+ # Find the backup file matching the timestamp
860
+ backup_file = backups_dir / f"{timestamp}.json"
861
+ if not backup_file.is_file():
862
+ result["error"] = f"Backup not found for timestamp '{timestamp}'"
863
+ return result
864
+
865
+ result["restored_from"] = str(backup_file)
866
+
867
+ # Validate backup is valid JSON
868
+ try:
869
+ with open(backup_file, "r") as f:
870
+ backup_data = json.load(f)
871
+ if not isinstance(backup_data, dict):
872
+ result["error"] = "Backup file is not a valid spec (not a JSON object)"
873
+ return result
874
+ except json.JSONDecodeError as e:
875
+ result["error"] = f"Backup file is not valid JSON: {e}"
876
+ return result
877
+ except IOError as e:
878
+ result["error"] = f"Could not read backup file: {e}"
879
+ return result
880
+
881
+ # dry_run - return success without making changes
882
+ if dry_run:
883
+ result["success"] = True
884
+ if create_backup:
885
+ result["backup_created"] = "(would be created)"
886
+ return result
887
+
888
+ # Create safety backup of current state before rollback
889
+ if create_backup:
890
+ safety_backup = backup_spec(spec_id, specs_dir)
891
+ if safety_backup:
892
+ result["backup_created"] = str(safety_backup)
893
+
894
+ # Perform rollback - copy backup to spec location
895
+ try:
896
+ shutil.copy2(backup_file, spec_file)
897
+ result["success"] = True
898
+ except (IOError, OSError) as e:
899
+ result["error"] = f"Failed to restore backup: {e}"
900
+ return result
901
+
902
+ return result
903
+
904
+
905
+ def _validate_spec_structure(spec_data: Dict[str, Any]) -> bool:
906
+ """
907
+ Validate basic JSON spec file structure.
908
+
909
+ Args:
910
+ spec_data: Spec data dictionary
911
+
912
+ Returns:
913
+ True if valid, False otherwise
914
+ """
915
+ required_fields = ["spec_id", "hierarchy"]
916
+ for field in required_fields:
917
+ if field not in spec_data:
918
+ return False
919
+
920
+ hierarchy = spec_data.get("hierarchy", {})
921
+ if not isinstance(hierarchy, dict):
922
+ return False
923
+
924
+ for node_id, node_data in hierarchy.items():
925
+ if not isinstance(node_data, dict):
926
+ return False
927
+ if "type" not in node_data or "status" not in node_data:
928
+ return False
929
+ if node_data["status"] not in [
930
+ "pending",
931
+ "in_progress",
932
+ "completed",
933
+ "blocked",
934
+ "failed",
935
+ ]:
936
+ return False
937
+
938
+ return True
939
+
940
+
941
+ def list_specs(
942
+ specs_dir: Optional[Path] = None, status: Optional[str] = None
943
+ ) -> List[Dict[str, Any]]:
944
+ """
945
+ List specification files with optional filtering.
946
+
947
+ Args:
948
+ specs_dir: Base specs directory (auto-detected if not provided)
949
+ status: Filter by status folder (active, completed, archived, pending, or None for all)
950
+
951
+ Returns:
952
+ List of spec info dictionaries
953
+ """
954
+ if specs_dir is None:
955
+ specs_dir = find_specs_directory()
956
+
957
+ if not specs_dir:
958
+ return []
959
+
960
+ if status and status != "all":
961
+ status_dirs = [specs_dir / status]
962
+ else:
963
+ status_dirs = [
964
+ specs_dir / "active",
965
+ specs_dir / "completed",
966
+ specs_dir / "archived",
967
+ specs_dir / "pending",
968
+ ]
969
+
970
+ specs_info = []
971
+
972
+ for status_dir in status_dirs:
973
+ if not status_dir.exists():
974
+ continue
975
+
976
+ status_name = status_dir.name
977
+
978
+ json_files = sorted(status_dir.glob("*.json"))
979
+
980
+ for json_file in json_files:
981
+ spec_data = load_spec(json_file.stem, specs_dir)
982
+ if not spec_data:
983
+ continue
984
+
985
+ metadata = spec_data.get("metadata", {})
986
+ hierarchy = spec_data.get("hierarchy", {})
987
+
988
+ total_tasks = len(hierarchy)
989
+ completed_tasks = sum(
990
+ 1 for task in hierarchy.values() if task.get("status") == "completed"
991
+ )
992
+
993
+ progress_pct = 0
994
+ if total_tasks > 0:
995
+ progress_pct = int((completed_tasks / total_tasks) * 100)
996
+
997
+ info = {
998
+ "spec_id": json_file.stem,
999
+ "status": status_name,
1000
+ "title": metadata.get("title", spec_data.get("title", "Untitled")),
1001
+ "total_tasks": total_tasks,
1002
+ "completed_tasks": completed_tasks,
1003
+ "progress_percentage": progress_pct,
1004
+ "current_phase": metadata.get("current_phase"),
1005
+ }
1006
+
1007
+ specs_info.append(info)
1008
+
1009
+ # Sort: active first, then by completion % (highest first)
1010
+ specs_info.sort(
1011
+ key=lambda s: (
1012
+ 0 if s.get("status") == "active" else 1,
1013
+ -s.get("progress_percentage", 0),
1014
+ )
1015
+ )
1016
+
1017
+ return specs_info
1018
+
1019
+
1020
+ def get_node(spec_data: Dict[str, Any], node_id: str) -> Optional[Dict[str, Any]]:
1021
+ """
1022
+ Get a specific node from the hierarchy.
1023
+
1024
+ Args:
1025
+ spec_data: JSON spec file data
1026
+ node_id: Node identifier
1027
+
1028
+ Returns:
1029
+ Node data dictionary or None if not found
1030
+ """
1031
+ hierarchy = spec_data.get("hierarchy", {})
1032
+ return hierarchy.get(node_id)
1033
+
1034
+
1035
+ def update_node(
1036
+ spec_data: Dict[str, Any], node_id: str, updates: Dict[str, Any]
1037
+ ) -> bool:
1038
+ """
1039
+ Update a node in the hierarchy.
1040
+
1041
+ Special handling for metadata: existing metadata fields are preserved
1042
+ and merged with new metadata fields.
1043
+
1044
+ Args:
1045
+ spec_data: JSON spec file data
1046
+ node_id: Node identifier
1047
+ updates: Dictionary of fields to update
1048
+
1049
+ Returns:
1050
+ True if node exists and was updated, False otherwise
1051
+ """
1052
+ hierarchy = spec_data.get("hierarchy", {})
1053
+
1054
+ if node_id not in hierarchy:
1055
+ return False
1056
+
1057
+ node = hierarchy[node_id]
1058
+
1059
+ if "metadata" in updates:
1060
+ existing_metadata = node.get("metadata", {})
1061
+ new_metadata = updates["metadata"]
1062
+ updates = updates.copy()
1063
+ updates["metadata"] = {**existing_metadata, **new_metadata}
1064
+
1065
+ node.update(updates)
1066
+ return True
1067
+
1068
+
1069
+ # =============================================================================
1070
+ # Spec Creation Functions
1071
+ # =============================================================================
1072
+
1073
+
1074
+ def generate_spec_id(name: str) -> str:
1075
+ """
1076
+ Generate a spec ID from a human-readable name.
1077
+
1078
+ Args:
1079
+ name: Human-readable spec name.
1080
+
1081
+ Returns:
1082
+ URL-safe spec ID with date suffix (e.g., "my-feature-2025-01-15-001").
1083
+ """
1084
+ # Normalize: lowercase, replace spaces/special chars with hyphens
1085
+ slug = re.sub(r"[^a-z0-9]+", "-", name.lower()).strip("-")
1086
+ # Add date suffix
1087
+ date_suffix = datetime.now(timezone.utc).strftime("%Y-%m-%d")
1088
+ # Add sequence number (001 for new specs)
1089
+ return f"{slug}-{date_suffix}-001"
1090
+
1091
+
1092
+ def _add_phase_verification(
1093
+ hierarchy: Dict[str, Any], phase_num: int, phase_id: str
1094
+ ) -> None:
1095
+ """
1096
+ Add verify nodes (auto + fidelity) to a phase.
1097
+
1098
+ Args:
1099
+ hierarchy: The hierarchy dict to modify.
1100
+ phase_num: Phase number (1, 2, 3, etc.).
1101
+ phase_id: Phase node ID (e.g., "phase-1").
1102
+ """
1103
+ verify_auto_id = f"verify-{phase_num}-1"
1104
+ verify_fidelity_id = f"verify-{phase_num}-2"
1105
+
1106
+ # Run tests verification
1107
+ hierarchy[verify_auto_id] = {
1108
+ "type": "verify",
1109
+ "title": "Run tests",
1110
+ "status": "pending",
1111
+ "parent": phase_id,
1112
+ "children": [],
1113
+ "total_tasks": 1,
1114
+ "completed_tasks": 0,
1115
+ "metadata": {
1116
+ "verification_type": "run-tests",
1117
+ "mcp_tool": "mcp__foundry-mcp__test-run",
1118
+ "expected": "All tests pass",
1119
+ },
1120
+ "dependencies": {
1121
+ "blocks": [verify_fidelity_id],
1122
+ "blocked_by": [],
1123
+ "depends": [],
1124
+ },
1125
+ }
1126
+
1127
+ # Fidelity verification (spec review)
1128
+ hierarchy[verify_fidelity_id] = {
1129
+ "type": "verify",
1130
+ "title": "Fidelity review",
1131
+ "status": "pending",
1132
+ "parent": phase_id,
1133
+ "children": [],
1134
+ "total_tasks": 1,
1135
+ "completed_tasks": 0,
1136
+ "metadata": {
1137
+ "verification_type": "fidelity",
1138
+ "mcp_tool": "mcp__foundry-mcp__spec-review-fidelity",
1139
+ "scope": "phase",
1140
+ "target": phase_id,
1141
+ "expected": "Implementation matches specification",
1142
+ },
1143
+ "dependencies": {
1144
+ "blocks": [],
1145
+ "blocked_by": [verify_auto_id],
1146
+ "depends": [],
1147
+ },
1148
+ }
1149
+
1150
+ # Update phase children and task count
1151
+ hierarchy[phase_id]["children"].extend([verify_auto_id, verify_fidelity_id])
1152
+ hierarchy[phase_id]["total_tasks"] += 2
1153
+
1154
+
1155
+ def _generate_phase_id(hierarchy: Dict[str, Any]) -> Tuple[str, int]:
1156
+ """Generate the next phase ID and numeric suffix."""
1157
+ pattern = re.compile(r"^phase-(\d+)$")
1158
+ max_id = 0
1159
+ for node_id in hierarchy.keys():
1160
+ match = pattern.match(node_id)
1161
+ if match:
1162
+ max_id = max(max_id, int(match.group(1)))
1163
+ next_id = max_id + 1
1164
+ return f"phase-{next_id}", next_id
1165
+
1166
+
1167
+ def add_phase(
1168
+ spec_id: str,
1169
+ title: str,
1170
+ description: Optional[str] = None,
1171
+ purpose: Optional[str] = None,
1172
+ estimated_hours: Optional[float] = None,
1173
+ position: Optional[int] = None,
1174
+ link_previous: bool = True,
1175
+ specs_dir: Optional[Path] = None,
1176
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
1177
+ """
1178
+ Add a new phase under spec-root and scaffold verification tasks.
1179
+
1180
+ Args:
1181
+ spec_id: Specification ID to mutate.
1182
+ title: Phase title.
1183
+ description: Optional phase description.
1184
+ purpose: Optional purpose/goal metadata string.
1185
+ estimated_hours: Optional estimated hours for the phase.
1186
+ position: Optional zero-based insertion index in spec-root children.
1187
+ link_previous: Whether to automatically block on the previous phase when appending.
1188
+ specs_dir: Specs directory override.
1189
+
1190
+ Returns:
1191
+ Tuple of (result_dict, error_message).
1192
+ """
1193
+ if not spec_id or not spec_id.strip():
1194
+ return None, "Specification ID is required"
1195
+
1196
+ if not title or not title.strip():
1197
+ return None, "Phase title is required"
1198
+
1199
+ if estimated_hours is not None and estimated_hours < 0:
1200
+ return None, "estimated_hours must be non-negative"
1201
+
1202
+ title = title.strip()
1203
+
1204
+ if specs_dir is None:
1205
+ specs_dir = find_specs_directory()
1206
+
1207
+ if specs_dir is None:
1208
+ return (
1209
+ None,
1210
+ "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR.",
1211
+ )
1212
+
1213
+ spec_path = find_spec_file(spec_id, specs_dir)
1214
+ if spec_path is None:
1215
+ return None, f"Specification '{spec_id}' not found"
1216
+
1217
+ spec_data = load_spec(spec_id, specs_dir)
1218
+ if spec_data is None:
1219
+ return None, f"Failed to load specification '{spec_id}'"
1220
+
1221
+ hierarchy = spec_data.get("hierarchy", {})
1222
+ spec_root = hierarchy.get("spec-root")
1223
+
1224
+ if spec_root is None:
1225
+ return None, "Specification root node 'spec-root' not found"
1226
+
1227
+ if spec_root.get("type") not in {"spec", "root"}:
1228
+ return None, "Specification root node has invalid type"
1229
+
1230
+ children = spec_root.get("children", []) or []
1231
+ if not isinstance(children, list):
1232
+ children = []
1233
+
1234
+ insert_index = len(children)
1235
+ if position is not None and position >= 0:
1236
+ insert_index = min(position, len(children))
1237
+
1238
+ phase_id, phase_num = _generate_phase_id(hierarchy)
1239
+
1240
+ metadata: Dict[str, Any] = {
1241
+ "purpose": (purpose.strip() if purpose else ""),
1242
+ }
1243
+ if description:
1244
+ metadata["description"] = description.strip()
1245
+ if estimated_hours is not None:
1246
+ metadata["estimated_hours"] = estimated_hours
1247
+
1248
+ phase_node = {
1249
+ "type": "phase",
1250
+ "title": title,
1251
+ "status": "pending",
1252
+ "parent": "spec-root",
1253
+ "children": [],
1254
+ "total_tasks": 0,
1255
+ "completed_tasks": 0,
1256
+ "metadata": metadata,
1257
+ "dependencies": {
1258
+ "blocks": [],
1259
+ "blocked_by": [],
1260
+ "depends": [],
1261
+ },
1262
+ }
1263
+
1264
+ hierarchy[phase_id] = phase_node
1265
+
1266
+ if insert_index == len(children):
1267
+ children.append(phase_id)
1268
+ else:
1269
+ children.insert(insert_index, phase_id)
1270
+ spec_root["children"] = children
1271
+
1272
+ linked_phase_id: Optional[str] = None
1273
+ if link_previous and insert_index > 0 and insert_index == len(children) - 1:
1274
+ candidate = children[insert_index - 1]
1275
+ previous = hierarchy.get(candidate)
1276
+ if previous and previous.get("type") == "phase":
1277
+ linked_phase_id = candidate
1278
+ prev_deps = previous.setdefault(
1279
+ "dependencies",
1280
+ {
1281
+ "blocks": [],
1282
+ "blocked_by": [],
1283
+ "depends": [],
1284
+ },
1285
+ )
1286
+ blocks = prev_deps.setdefault("blocks", [])
1287
+ if phase_id not in blocks:
1288
+ blocks.append(phase_id)
1289
+ phase_node["dependencies"]["blocked_by"].append(candidate)
1290
+
1291
+ _add_phase_verification(hierarchy, phase_num, phase_id)
1292
+
1293
+ phase_task_total = phase_node.get("total_tasks", 0)
1294
+ total_tasks = spec_root.get("total_tasks", 0)
1295
+ spec_root["total_tasks"] = total_tasks + phase_task_total
1296
+
1297
+ # Update spec-level estimated hours if provided
1298
+ if estimated_hours is not None:
1299
+ spec_metadata = spec_data.setdefault("metadata", {})
1300
+ current_hours = spec_metadata.get("estimated_hours")
1301
+ if isinstance(current_hours, (int, float)):
1302
+ spec_metadata["estimated_hours"] = current_hours + estimated_hours
1303
+ else:
1304
+ spec_metadata["estimated_hours"] = estimated_hours
1305
+
1306
+ saved = save_spec(spec_id, spec_data, specs_dir)
1307
+ if not saved:
1308
+ return None, "Failed to save specification"
1309
+
1310
+ verify_ids = [f"verify-{phase_num}-1", f"verify-{phase_num}-2"]
1311
+
1312
+ return {
1313
+ "spec_id": spec_id,
1314
+ "phase_id": phase_id,
1315
+ "title": title,
1316
+ "position": insert_index,
1317
+ "linked_previous": linked_phase_id,
1318
+ "verify_tasks": verify_ids,
1319
+ }, None
1320
+
1321
+
1322
+ def add_phase_bulk(
1323
+ spec_id: str,
1324
+ phase_title: str,
1325
+ tasks: List[Dict[str, Any]],
1326
+ phase_description: Optional[str] = None,
1327
+ phase_purpose: Optional[str] = None,
1328
+ phase_estimated_hours: Optional[float] = None,
1329
+ metadata_defaults: Optional[Dict[str, Any]] = None,
1330
+ position: Optional[int] = None,
1331
+ link_previous: bool = True,
1332
+ specs_dir: Optional[Path] = None,
1333
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
1334
+ """
1335
+ Add a new phase with pre-defined tasks in a single atomic operation.
1336
+
1337
+ Creates a phase and all specified tasks/verify nodes without auto-generating
1338
+ verification scaffolding. This enables creating complete phase structures
1339
+ in one operation.
1340
+
1341
+ Args:
1342
+ spec_id: Specification ID to mutate.
1343
+ phase_title: Phase title.
1344
+ tasks: List of task definitions, each containing:
1345
+ - type: "task" or "verify" (required)
1346
+ - title: Task title (required)
1347
+ - description: Optional description
1348
+ - acceptance_criteria: Optional list of acceptance criteria
1349
+ - task_category: Optional task category
1350
+ - file_path: Optional associated file path
1351
+ - estimated_hours: Optional time estimate
1352
+ - verification_type: Optional verification type for verify tasks
1353
+ phase_description: Optional phase description.
1354
+ phase_purpose: Optional purpose/goal metadata string.
1355
+ phase_estimated_hours: Optional estimated hours for the phase.
1356
+ metadata_defaults: Optional defaults applied to tasks missing explicit values.
1357
+ Supported keys: task_category, category, acceptance_criteria, estimated_hours
1358
+ position: Optional zero-based insertion index in spec-root children.
1359
+ link_previous: Whether to automatically block on the previous phase.
1360
+ specs_dir: Specs directory override.
1361
+
1362
+ Returns:
1363
+ Tuple of (result_dict, error_message).
1364
+ On success: ({"phase_id": ..., "tasks_created": [...], ...}, None)
1365
+ On failure: (None, "error message")
1366
+ """
1367
+ # Validate required parameters
1368
+ if not spec_id or not spec_id.strip():
1369
+ return None, "Specification ID is required"
1370
+
1371
+ if not phase_title or not phase_title.strip():
1372
+ return None, "Phase title is required"
1373
+
1374
+ if not tasks or not isinstance(tasks, list) or len(tasks) == 0:
1375
+ return None, "At least one task definition is required"
1376
+
1377
+ if phase_estimated_hours is not None and phase_estimated_hours < 0:
1378
+ return None, "phase_estimated_hours must be non-negative"
1379
+
1380
+ phase_title = phase_title.strip()
1381
+ defaults = metadata_defaults or {}
1382
+
1383
+ # Validate metadata_defaults values
1384
+ if defaults:
1385
+ default_est_hours = defaults.get("estimated_hours")
1386
+ if default_est_hours is not None:
1387
+ if not isinstance(default_est_hours, (int, float)) or default_est_hours < 0:
1388
+ return None, "metadata_defaults.estimated_hours must be a non-negative number"
1389
+ default_category = defaults.get("task_category")
1390
+ if default_category is None:
1391
+ default_category = defaults.get("category")
1392
+ if default_category is not None and not isinstance(default_category, str):
1393
+ return None, "metadata_defaults.task_category must be a string"
1394
+ default_acceptance = defaults.get("acceptance_criteria")
1395
+ if default_acceptance is not None and not isinstance(
1396
+ default_acceptance, (list, str)
1397
+ ):
1398
+ return None, "metadata_defaults.acceptance_criteria must be a list of strings"
1399
+ if isinstance(default_acceptance, list) and any(
1400
+ not isinstance(item, str) for item in default_acceptance
1401
+ ):
1402
+ return None, "metadata_defaults.acceptance_criteria must be a list of strings"
1403
+
1404
+ # Validate each task definition
1405
+ valid_task_types = {"task", "verify"}
1406
+ for idx, task_def in enumerate(tasks):
1407
+ if not isinstance(task_def, dict):
1408
+ return None, f"Task at index {idx} must be a dictionary"
1409
+
1410
+ task_type = task_def.get("type")
1411
+ if not task_type or task_type not in valid_task_types:
1412
+ return None, f"Task at index {idx} must have type 'task' or 'verify'"
1413
+
1414
+ task_title = task_def.get("title")
1415
+ if not task_title or not isinstance(task_title, str) or not task_title.strip():
1416
+ return None, f"Task at index {idx} must have a non-empty title"
1417
+
1418
+ est_hours = task_def.get("estimated_hours")
1419
+ if est_hours is not None:
1420
+ if not isinstance(est_hours, (int, float)) or est_hours < 0:
1421
+ return None, f"Task at index {idx} has invalid estimated_hours"
1422
+
1423
+ task_category = task_def.get("task_category")
1424
+ if task_category is not None and not isinstance(task_category, str):
1425
+ return None, f"Task at index {idx} has invalid task_category"
1426
+
1427
+ legacy_category = task_def.get("category")
1428
+ if legacy_category is not None and not isinstance(legacy_category, str):
1429
+ return None, f"Task at index {idx} has invalid category"
1430
+
1431
+ acceptance_criteria = task_def.get("acceptance_criteria")
1432
+ if acceptance_criteria is not None and not isinstance(
1433
+ acceptance_criteria, (list, str)
1434
+ ):
1435
+ return None, f"Task at index {idx} has invalid acceptance_criteria"
1436
+ if isinstance(acceptance_criteria, list) and any(
1437
+ not isinstance(item, str) for item in acceptance_criteria
1438
+ ):
1439
+ return None, f"Task at index {idx} acceptance_criteria must be a list of strings"
1440
+
1441
+ # Find specs directory
1442
+ if specs_dir is None:
1443
+ specs_dir = find_specs_directory()
1444
+
1445
+ if specs_dir is None:
1446
+ return (
1447
+ None,
1448
+ "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR.",
1449
+ )
1450
+
1451
+ spec_path = find_spec_file(spec_id, specs_dir)
1452
+ if spec_path is None:
1453
+ return None, f"Specification '{spec_id}' not found"
1454
+
1455
+ spec_data = load_spec(spec_id, specs_dir)
1456
+ if spec_data is None:
1457
+ return None, f"Failed to load specification '{spec_id}'"
1458
+
1459
+ requires_rich_tasks = _requires_rich_task_fields(spec_data)
1460
+
1461
+ hierarchy = spec_data.get("hierarchy", {})
1462
+ spec_root = hierarchy.get("spec-root")
1463
+
1464
+ if spec_root is None:
1465
+ return None, "Specification root node 'spec-root' not found"
1466
+
1467
+ if spec_root.get("type") not in {"spec", "root"}:
1468
+ return None, "Specification root node has invalid type"
1469
+
1470
+ children = spec_root.get("children", []) or []
1471
+ if not isinstance(children, list):
1472
+ children = []
1473
+
1474
+ insert_index = len(children)
1475
+ if position is not None and position >= 0:
1476
+ insert_index = min(position, len(children))
1477
+
1478
+ # Generate phase ID
1479
+ phase_id, phase_num = _generate_phase_id(hierarchy)
1480
+
1481
+ # Build phase metadata
1482
+ phase_metadata: Dict[str, Any] = {
1483
+ "purpose": (phase_purpose.strip() if phase_purpose else ""),
1484
+ }
1485
+ if phase_description:
1486
+ phase_metadata["description"] = phase_description.strip()
1487
+ if phase_estimated_hours is not None:
1488
+ phase_metadata["estimated_hours"] = phase_estimated_hours
1489
+
1490
+ # Create phase node (without children initially)
1491
+ phase_node = {
1492
+ "type": "phase",
1493
+ "title": phase_title,
1494
+ "status": "pending",
1495
+ "parent": "spec-root",
1496
+ "children": [],
1497
+ "total_tasks": 0,
1498
+ "completed_tasks": 0,
1499
+ "metadata": phase_metadata,
1500
+ "dependencies": {
1501
+ "blocks": [],
1502
+ "blocked_by": [],
1503
+ "depends": [],
1504
+ },
1505
+ }
1506
+
1507
+ hierarchy[phase_id] = phase_node
1508
+
1509
+ # Insert phase into spec-root children
1510
+ if insert_index == len(children):
1511
+ children.append(phase_id)
1512
+ else:
1513
+ children.insert(insert_index, phase_id)
1514
+ spec_root["children"] = children
1515
+
1516
+ # Link to previous phase if requested
1517
+ linked_phase_id: Optional[str] = None
1518
+ if link_previous and insert_index > 0 and insert_index == len(children) - 1:
1519
+ candidate = children[insert_index - 1]
1520
+ previous = hierarchy.get(candidate)
1521
+ if previous and previous.get("type") == "phase":
1522
+ linked_phase_id = candidate
1523
+ prev_deps = previous.setdefault(
1524
+ "dependencies",
1525
+ {"blocks": [], "blocked_by": [], "depends": []},
1526
+ )
1527
+ blocks = prev_deps.setdefault("blocks", [])
1528
+ if phase_id not in blocks:
1529
+ blocks.append(phase_id)
1530
+ phase_node["dependencies"]["blocked_by"].append(candidate)
1531
+
1532
+ def _nonempty_string(value: Any) -> bool:
1533
+ return isinstance(value, str) and bool(value.strip())
1534
+
1535
+ def _extract_description(task_def: Dict[str, Any]) -> tuple[Optional[str], Any]:
1536
+ description = task_def.get("description")
1537
+ if _nonempty_string(description) and isinstance(description, str):
1538
+ return "description", description.strip()
1539
+ details = task_def.get("details")
1540
+ if _nonempty_string(details) and isinstance(details, str):
1541
+ return "details", details.strip()
1542
+ if isinstance(details, list):
1543
+ cleaned = [
1544
+ item.strip()
1545
+ for item in details
1546
+ if isinstance(item, str) and item.strip()
1547
+ ]
1548
+ if cleaned:
1549
+ return "details", cleaned
1550
+ return None, None
1551
+
1552
+ # Create tasks under the phase
1553
+ tasks_created: List[Dict[str, Any]] = []
1554
+ task_counter = 0
1555
+ verify_counter = 0
1556
+
1557
+ for task_def in tasks:
1558
+ task_type = task_def["type"]
1559
+ task_title = task_def["title"].strip()
1560
+
1561
+ # Generate task ID based on type
1562
+ if task_type == "verify":
1563
+ verify_counter += 1
1564
+ task_id = f"verify-{phase_num}-{verify_counter}"
1565
+ else:
1566
+ task_counter += 1
1567
+ task_id = f"task-{phase_num}-{task_counter}"
1568
+
1569
+ # Build task metadata with defaults cascade
1570
+ task_metadata: Dict[str, Any] = {}
1571
+
1572
+ # Apply description/details
1573
+ desc_field, desc_value = _extract_description(task_def)
1574
+ if desc_field and desc_value is not None:
1575
+ task_metadata[desc_field] = desc_value
1576
+ elif requires_rich_tasks and task_type == "task":
1577
+ return None, f"Task '{task_title}' missing description"
1578
+
1579
+ # Apply file_path
1580
+ file_path = task_def.get("file_path")
1581
+ if file_path and isinstance(file_path, str):
1582
+ task_metadata["file_path"] = file_path.strip()
1583
+
1584
+ # Apply estimated_hours (task-level overrides defaults)
1585
+ est_hours = task_def.get("estimated_hours")
1586
+ if est_hours is not None:
1587
+ task_metadata["estimated_hours"] = float(est_hours)
1588
+ elif defaults.get("estimated_hours") is not None:
1589
+ task_metadata["estimated_hours"] = float(defaults["estimated_hours"])
1590
+
1591
+ normalized_category = None
1592
+ if task_type == "task":
1593
+ # Apply acceptance_criteria
1594
+ raw_acceptance = task_def.get("acceptance_criteria")
1595
+ if raw_acceptance is None:
1596
+ raw_acceptance = defaults.get("acceptance_criteria")
1597
+ acceptance_criteria = _normalize_acceptance_criteria(raw_acceptance)
1598
+ if acceptance_criteria is not None:
1599
+ task_metadata["acceptance_criteria"] = acceptance_criteria
1600
+ if requires_rich_tasks:
1601
+ if raw_acceptance is None:
1602
+ return None, f"Task '{task_title}' missing acceptance_criteria"
1603
+ if not acceptance_criteria:
1604
+ return (
1605
+ None,
1606
+ f"Task '{task_title}' acceptance_criteria must include at least one entry",
1607
+ )
1608
+
1609
+ # Apply task_category from defaults if not specified
1610
+ category = task_def.get("task_category") or task_def.get("category")
1611
+ if category is None:
1612
+ category = defaults.get("task_category") or defaults.get("category")
1613
+ if category and isinstance(category, str):
1614
+ normalized_category = category.strip().lower()
1615
+ if normalized_category not in CATEGORIES:
1616
+ return (
1617
+ None,
1618
+ f"Task '{task_title}' has invalid task_category '{category}'",
1619
+ )
1620
+ task_metadata["task_category"] = normalized_category
1621
+ if requires_rich_tasks and normalized_category is None:
1622
+ return None, f"Task '{task_title}' missing task_category"
1623
+
1624
+ if normalized_category in {"implementation", "refactoring"}:
1625
+ if not _nonempty_string(task_metadata.get("file_path")):
1626
+ return (
1627
+ None,
1628
+ f"Task '{task_title}' missing file_path for category '{normalized_category}'",
1629
+ )
1630
+
1631
+ # Apply verification_type for verify tasks
1632
+ if task_type == "verify":
1633
+ verify_type = task_def.get("verification_type")
1634
+ if verify_type and verify_type in VERIFICATION_TYPES:
1635
+ task_metadata["verification_type"] = verify_type
1636
+
1637
+ # Create task node
1638
+ task_node = {
1639
+ "type": task_type,
1640
+ "title": task_title,
1641
+ "status": "pending",
1642
+ "parent": phase_id,
1643
+ "children": [],
1644
+ "total_tasks": 1,
1645
+ "completed_tasks": 0,
1646
+ "metadata": task_metadata,
1647
+ "dependencies": {
1648
+ "blocks": [],
1649
+ "blocked_by": [],
1650
+ "depends": [],
1651
+ },
1652
+ }
1653
+
1654
+ hierarchy[task_id] = task_node
1655
+ phase_node["children"].append(task_id)
1656
+ phase_node["total_tasks"] += 1
1657
+
1658
+ tasks_created.append({
1659
+ "task_id": task_id,
1660
+ "title": task_title,
1661
+ "type": task_type,
1662
+ })
1663
+
1664
+ # Update spec-root total_tasks
1665
+ total_tasks = spec_root.get("total_tasks", 0)
1666
+ spec_root["total_tasks"] = total_tasks + phase_node["total_tasks"]
1667
+
1668
+ # Update spec-level estimated hours if provided
1669
+ if phase_estimated_hours is not None:
1670
+ spec_metadata = spec_data.setdefault("metadata", {})
1671
+ current_hours = spec_metadata.get("estimated_hours")
1672
+ if isinstance(current_hours, (int, float)):
1673
+ spec_metadata["estimated_hours"] = current_hours + phase_estimated_hours
1674
+ else:
1675
+ spec_metadata["estimated_hours"] = phase_estimated_hours
1676
+
1677
+ # Save spec atomically
1678
+ saved = save_spec(spec_id, spec_data, specs_dir)
1679
+ if not saved:
1680
+ return None, "Failed to save specification"
1681
+
1682
+ return {
1683
+ "spec_id": spec_id,
1684
+ "phase_id": phase_id,
1685
+ "title": phase_title,
1686
+ "position": insert_index,
1687
+ "linked_previous": linked_phase_id,
1688
+ "tasks_created": tasks_created,
1689
+ "total_tasks": len(tasks_created),
1690
+ }, None
1691
+
1692
+
1693
+ def _collect_descendants(hierarchy: Dict[str, Any], node_id: str) -> List[str]:
1694
+ """
1695
+ Recursively collect all descendant node IDs for a given node.
1696
+
1697
+ Args:
1698
+ hierarchy: The spec hierarchy dict
1699
+ node_id: Starting node ID
1700
+
1701
+ Returns:
1702
+ List of all descendant node IDs (not including the starting node)
1703
+ """
1704
+ descendants: List[str] = []
1705
+ node = hierarchy.get(node_id)
1706
+ if not node:
1707
+ return descendants
1708
+
1709
+ children = node.get("children", [])
1710
+ if not isinstance(children, list):
1711
+ return descendants
1712
+
1713
+ for child_id in children:
1714
+ descendants.append(child_id)
1715
+ descendants.extend(_collect_descendants(hierarchy, child_id))
1716
+
1717
+ return descendants
1718
+
1719
+
1720
+ def _count_tasks_in_subtree(
1721
+ hierarchy: Dict[str, Any], node_ids: List[str]
1722
+ ) -> Tuple[int, int]:
1723
+ """
1724
+ Count total and completed tasks in a list of nodes.
1725
+
1726
+ Args:
1727
+ hierarchy: The spec hierarchy dict
1728
+ node_ids: List of node IDs to count
1729
+
1730
+ Returns:
1731
+ Tuple of (total_count, completed_count)
1732
+ """
1733
+ total = 0
1734
+ completed = 0
1735
+
1736
+ for node_id in node_ids:
1737
+ node = hierarchy.get(node_id)
1738
+ if not node:
1739
+ continue
1740
+ node_type = node.get("type")
1741
+ if node_type in ("task", "subtask", "verify"):
1742
+ total += 1
1743
+ if node.get("status") == "completed":
1744
+ completed += 1
1745
+
1746
+ return total, completed
1747
+
1748
+
1749
+ def _remove_dependency_references(
1750
+ hierarchy: Dict[str, Any], removed_ids: List[str]
1751
+ ) -> None:
1752
+ """
1753
+ Remove references to deleted nodes from all dependency lists.
1754
+
1755
+ Args:
1756
+ hierarchy: The spec hierarchy dict
1757
+ removed_ids: List of node IDs being removed
1758
+ """
1759
+ removed_set = set(removed_ids)
1760
+
1761
+ for node_id, node in hierarchy.items():
1762
+ deps = node.get("dependencies")
1763
+ if not deps or not isinstance(deps, dict):
1764
+ continue
1765
+
1766
+ for key in ("blocks", "blocked_by", "depends"):
1767
+ dep_list = deps.get(key)
1768
+ if isinstance(dep_list, list):
1769
+ deps[key] = [d for d in dep_list if d not in removed_set]
1770
+
1771
+
1772
+ def remove_phase(
1773
+ spec_id: str,
1774
+ phase_id: str,
1775
+ force: bool = False,
1776
+ specs_dir: Optional[Path] = None,
1777
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
1778
+ """
1779
+ Remove a phase and all its children from a specification.
1780
+
1781
+ Handles adjacent phase re-linking: if phase B is removed and A blocks B
1782
+ which blocks C, then A will be updated to block C directly.
1783
+
1784
+ Args:
1785
+ spec_id: Specification ID containing the phase.
1786
+ phase_id: Phase ID to remove (e.g., "phase-1").
1787
+ force: If True, remove even if phase contains non-completed tasks.
1788
+ If False (default), refuse to remove phases with active work.
1789
+ specs_dir: Path to specs directory (auto-detected if not provided).
1790
+
1791
+ Returns:
1792
+ Tuple of (result_dict, error_message).
1793
+ On success: ({"spec_id": ..., "phase_id": ..., "children_removed": ..., ...}, None)
1794
+ On failure: (None, "error message")
1795
+ """
1796
+ # Validate inputs
1797
+ if not spec_id or not spec_id.strip():
1798
+ return None, "Specification ID is required"
1799
+
1800
+ if not phase_id or not phase_id.strip():
1801
+ return None, "Phase ID is required"
1802
+
1803
+ # Find specs directory
1804
+ if specs_dir is None:
1805
+ specs_dir = find_specs_directory()
1806
+
1807
+ if specs_dir is None:
1808
+ return (
1809
+ None,
1810
+ "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR.",
1811
+ )
1812
+
1813
+ # Find and load the spec
1814
+ spec_path = find_spec_file(spec_id, specs_dir)
1815
+ if spec_path is None:
1816
+ return None, f"Specification '{spec_id}' not found"
1817
+
1818
+ spec_data = load_spec(spec_id, specs_dir)
1819
+ if spec_data is None:
1820
+ return None, f"Failed to load specification '{spec_id}'"
1821
+
1822
+ hierarchy = spec_data.get("hierarchy", {})
1823
+
1824
+ # Validate phase exists
1825
+ phase = hierarchy.get(phase_id)
1826
+ if phase is None:
1827
+ return None, f"Phase '{phase_id}' not found"
1828
+
1829
+ # Validate node type is phase
1830
+ node_type = phase.get("type")
1831
+ if node_type != "phase":
1832
+ return None, f"Node '{phase_id}' is not a phase (type: {node_type})"
1833
+
1834
+ # Collect all descendants
1835
+ descendants = _collect_descendants(hierarchy, phase_id)
1836
+
1837
+ # Check for non-completed tasks if force is False
1838
+ if not force:
1839
+ # Count tasks in phase (excluding verify nodes for the active work check)
1840
+ all_nodes = [phase_id] + descendants
1841
+ has_active_work = False
1842
+ active_task_ids: List[str] = []
1843
+
1844
+ for node_id in all_nodes:
1845
+ node = hierarchy.get(node_id)
1846
+ if not node:
1847
+ continue
1848
+ node_status = node.get("status")
1849
+ node_node_type = node.get("type")
1850
+ # Consider in_progress or pending tasks as active work
1851
+ if node_node_type in ("task", "subtask") and node_status in (
1852
+ "pending",
1853
+ "in_progress",
1854
+ ):
1855
+ has_active_work = True
1856
+ active_task_ids.append(node_id)
1857
+
1858
+ if has_active_work:
1859
+ return (
1860
+ None,
1861
+ f"Phase '{phase_id}' has {len(active_task_ids)} non-completed task(s). "
1862
+ f"Use force=True to remove anyway. Active tasks: {', '.join(active_task_ids[:5])}"
1863
+ + ("..." if len(active_task_ids) > 5 else ""),
1864
+ )
1865
+
1866
+ # Get spec-root and phase position info for re-linking
1867
+ spec_root = hierarchy.get("spec-root")
1868
+ if spec_root is None:
1869
+ return None, "Specification root node 'spec-root' not found"
1870
+
1871
+ children = spec_root.get("children", [])
1872
+ if not isinstance(children, list):
1873
+ children = []
1874
+
1875
+ # Find phase position
1876
+ try:
1877
+ phase_index = children.index(phase_id)
1878
+ except ValueError:
1879
+ return None, f"Phase '{phase_id}' not found in spec-root children"
1880
+
1881
+ # Identify adjacent phases for re-linking
1882
+ prev_phase_id: Optional[str] = None
1883
+ next_phase_id: Optional[str] = None
1884
+
1885
+ if phase_index > 0:
1886
+ candidate = children[phase_index - 1]
1887
+ if hierarchy.get(candidate, {}).get("type") == "phase":
1888
+ prev_phase_id = candidate
1889
+
1890
+ if phase_index < len(children) - 1:
1891
+ candidate = children[phase_index + 1]
1892
+ if hierarchy.get(candidate, {}).get("type") == "phase":
1893
+ next_phase_id = candidate
1894
+
1895
+ # Re-link adjacent phases: if prev blocks this phase and this phase blocks next,
1896
+ # then prev should now block next directly
1897
+ relinked_from: Optional[str] = None
1898
+ relinked_to: Optional[str] = None
1899
+
1900
+ if prev_phase_id and next_phase_id:
1901
+ prev_phase = hierarchy.get(prev_phase_id)
1902
+ next_phase = hierarchy.get(next_phase_id)
1903
+
1904
+ if prev_phase and next_phase:
1905
+ # Check if prev_phase blocks this phase
1906
+ prev_deps = prev_phase.get("dependencies", {})
1907
+ prev_blocks = prev_deps.get("blocks", [])
1908
+
1909
+ # Check if this phase blocks next_phase
1910
+ phase_deps = phase.get("dependencies", {})
1911
+ phase_blocks = phase_deps.get("blocks", [])
1912
+
1913
+ if phase_id in prev_blocks and next_phase_id in phase_blocks:
1914
+ # Re-link: prev should now block next
1915
+ if next_phase_id not in prev_blocks:
1916
+ prev_blocks.append(next_phase_id)
1917
+
1918
+ # Update next phase's blocked_by
1919
+ next_deps = next_phase.setdefault(
1920
+ "dependencies",
1921
+ {
1922
+ "blocks": [],
1923
+ "blocked_by": [],
1924
+ "depends": [],
1925
+ },
1926
+ )
1927
+ next_blocked_by = next_deps.setdefault("blocked_by", [])
1928
+ if prev_phase_id not in next_blocked_by:
1929
+ next_blocked_by.append(prev_phase_id)
1930
+
1931
+ relinked_from = prev_phase_id
1932
+ relinked_to = next_phase_id
1933
+
1934
+ # Count tasks being removed
1935
+ nodes_to_remove = [phase_id] + descendants
1936
+ total_removed, completed_removed = _count_tasks_in_subtree(hierarchy, descendants)
1937
+
1938
+ # Remove all nodes from hierarchy
1939
+ for node_id in nodes_to_remove:
1940
+ if node_id in hierarchy:
1941
+ del hierarchy[node_id]
1942
+
1943
+ # Remove phase from spec-root children
1944
+ children.remove(phase_id)
1945
+ spec_root["children"] = children
1946
+
1947
+ # Update spec-root task counts
1948
+ current_total = spec_root.get("total_tasks", 0)
1949
+ current_completed = spec_root.get("completed_tasks", 0)
1950
+ spec_root["total_tasks"] = max(0, current_total - total_removed)
1951
+ spec_root["completed_tasks"] = max(0, current_completed - completed_removed)
1952
+
1953
+ # Clean up dependency references to removed nodes
1954
+ _remove_dependency_references(hierarchy, nodes_to_remove)
1955
+
1956
+ # Save the spec
1957
+ saved = save_spec(spec_id, spec_data, specs_dir)
1958
+ if not saved:
1959
+ return None, "Failed to save specification"
1960
+
1961
+ result: Dict[str, Any] = {
1962
+ "spec_id": spec_id,
1963
+ "phase_id": phase_id,
1964
+ "phase_title": phase.get("title", ""),
1965
+ "children_removed": len(descendants),
1966
+ "total_tasks_removed": total_removed,
1967
+ "completed_tasks_removed": completed_removed,
1968
+ "force": force,
1969
+ }
1970
+
1971
+ if relinked_from and relinked_to:
1972
+ result["relinked"] = {
1973
+ "from": relinked_from,
1974
+ "to": relinked_to,
1975
+ }
1976
+
1977
+ return result, None
1978
+
1979
+
1980
+ def move_phase(
1981
+ spec_id: str,
1982
+ phase_id: str,
1983
+ position: int,
1984
+ link_previous: bool = True,
1985
+ dry_run: bool = False,
1986
+ specs_dir: Optional[Path] = None,
1987
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
1988
+ """
1989
+ Move a phase to a new position within spec-root's children.
1990
+
1991
+ Supports reordering phases and optionally re-linking phase dependencies
1992
+ according to the link_previous pattern (each phase blocked by its predecessor).
1993
+
1994
+ Args:
1995
+ spec_id: Specification ID containing the phase.
1996
+ phase_id: Phase ID to move (e.g., "phase-2").
1997
+ position: Target position (1-based index) in spec-root children.
1998
+ link_previous: If True, update dependencies to maintain the sequential
1999
+ blocking pattern. If False, preserve existing dependencies.
2000
+ dry_run: If True, validate and return preview without saving changes.
2001
+ specs_dir: Path to specs directory (auto-detected if not provided).
2002
+
2003
+ Returns:
2004
+ Tuple of (result_dict, error_message).
2005
+ On success: ({"spec_id": ..., "phase_id": ..., "old_position": ..., "new_position": ..., ...}, None)
2006
+ On failure: (None, "error message")
2007
+ """
2008
+ # Validate inputs
2009
+ if not spec_id or not spec_id.strip():
2010
+ return None, "Specification ID is required"
2011
+
2012
+ if not phase_id or not phase_id.strip():
2013
+ return None, "Phase ID is required"
2014
+
2015
+ if not isinstance(position, int) or position < 1:
2016
+ return None, "Position must be a positive integer (1-based)"
2017
+
2018
+ # Find specs directory
2019
+ if specs_dir is None:
2020
+ specs_dir = find_specs_directory()
2021
+
2022
+ if specs_dir is None:
2023
+ return (
2024
+ None,
2025
+ "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR.",
2026
+ )
2027
+
2028
+ # Find and load the spec
2029
+ spec_path = find_spec_file(spec_id, specs_dir)
2030
+ if spec_path is None:
2031
+ return None, f"Specification '{spec_id}' not found"
2032
+
2033
+ spec_data = load_spec(spec_id, specs_dir)
2034
+ if spec_data is None:
2035
+ return None, f"Failed to load specification '{spec_id}'"
2036
+
2037
+ hierarchy = spec_data.get("hierarchy", {})
2038
+
2039
+ # Validate phase exists
2040
+ phase = hierarchy.get(phase_id)
2041
+ if phase is None:
2042
+ return None, f"Phase '{phase_id}' not found"
2043
+
2044
+ # Validate node type is phase
2045
+ node_type = phase.get("type")
2046
+ if node_type != "phase":
2047
+ return None, f"Node '{phase_id}' is not a phase (type: {node_type})"
2048
+
2049
+ # Get spec-root
2050
+ spec_root = hierarchy.get("spec-root")
2051
+ if spec_root is None:
2052
+ return None, "Specification root node 'spec-root' not found"
2053
+
2054
+ children = spec_root.get("children", [])
2055
+ if not isinstance(children, list):
2056
+ children = []
2057
+
2058
+ # Find current position
2059
+ try:
2060
+ old_index = children.index(phase_id)
2061
+ except ValueError:
2062
+ return None, f"Phase '{phase_id}' not found in spec-root children"
2063
+
2064
+ # Convert to 0-based index for internal use
2065
+ new_index = position - 1
2066
+
2067
+ # Validate position is within bounds
2068
+ if new_index < 0 or new_index >= len(children):
2069
+ return None, f"Invalid position {position}. Must be 1-{len(children)}"
2070
+
2071
+ # No change needed if same position
2072
+ if old_index == new_index:
2073
+ return {
2074
+ "spec_id": spec_id,
2075
+ "phase_id": phase_id,
2076
+ "phase_title": phase.get("title", ""),
2077
+ "old_position": old_index + 1,
2078
+ "new_position": new_index + 1,
2079
+ "moved": False,
2080
+ "dry_run": dry_run,
2081
+ "message": "Phase is already at the specified position",
2082
+ }, None
2083
+
2084
+ # Identify old neighbors for dependency cleanup
2085
+ old_prev_id: Optional[str] = None
2086
+ old_next_id: Optional[str] = None
2087
+
2088
+ if old_index > 0:
2089
+ candidate = children[old_index - 1]
2090
+ if hierarchy.get(candidate, {}).get("type") == "phase":
2091
+ old_prev_id = candidate
2092
+
2093
+ if old_index < len(children) - 1:
2094
+ candidate = children[old_index + 1]
2095
+ if hierarchy.get(candidate, {}).get("type") == "phase":
2096
+ old_next_id = candidate
2097
+
2098
+ # Perform the move in children list
2099
+ children.remove(phase_id)
2100
+ # After removal, adjust target index if moving forward
2101
+ insert_index = new_index if new_index <= old_index else new_index
2102
+ if insert_index >= len(children):
2103
+ children.append(phase_id)
2104
+ else:
2105
+ children.insert(insert_index, phase_id)
2106
+
2107
+ # Identify new neighbors
2108
+ actual_new_index = children.index(phase_id)
2109
+ new_prev_id: Optional[str] = None
2110
+ new_next_id: Optional[str] = None
2111
+
2112
+ if actual_new_index > 0:
2113
+ candidate = children[actual_new_index - 1]
2114
+ if hierarchy.get(candidate, {}).get("type") == "phase":
2115
+ new_prev_id = candidate
2116
+
2117
+ if actual_new_index < len(children) - 1:
2118
+ candidate = children[actual_new_index + 1]
2119
+ if hierarchy.get(candidate, {}).get("type") == "phase":
2120
+ new_next_id = candidate
2121
+
2122
+ # Track dependency changes
2123
+ dependencies_updated: List[Dict[str, Any]] = []
2124
+
2125
+ if link_previous:
2126
+ # Remove old dependency links
2127
+ phase_deps = phase.setdefault(
2128
+ "dependencies", {"blocks": [], "blocked_by": [], "depends": []}
2129
+ )
2130
+
2131
+ # 1. Remove this phase from old_prev's blocks list
2132
+ if old_prev_id:
2133
+ old_prev = hierarchy.get(old_prev_id)
2134
+ if old_prev:
2135
+ old_prev_deps = old_prev.get("dependencies", {})
2136
+ old_prev_blocks = old_prev_deps.get("blocks", [])
2137
+ if phase_id in old_prev_blocks:
2138
+ old_prev_blocks.remove(phase_id)
2139
+ dependencies_updated.append({
2140
+ "action": "removed",
2141
+ "from": old_prev_id,
2142
+ "relationship": "blocks",
2143
+ "target": phase_id,
2144
+ })
2145
+
2146
+ # 2. Remove old_prev from this phase's blocked_by
2147
+ phase_blocked_by = phase_deps.setdefault("blocked_by", [])
2148
+ if old_prev_id and old_prev_id in phase_blocked_by:
2149
+ phase_blocked_by.remove(old_prev_id)
2150
+ dependencies_updated.append({
2151
+ "action": "removed",
2152
+ "from": phase_id,
2153
+ "relationship": "blocked_by",
2154
+ "target": old_prev_id,
2155
+ })
2156
+
2157
+ # 3. Remove this phase from old_next's blocked_by
2158
+ if old_next_id:
2159
+ old_next = hierarchy.get(old_next_id)
2160
+ if old_next:
2161
+ old_next_deps = old_next.get("dependencies", {})
2162
+ old_next_blocked_by = old_next_deps.get("blocked_by", [])
2163
+ if phase_id in old_next_blocked_by:
2164
+ old_next_blocked_by.remove(phase_id)
2165
+ dependencies_updated.append({
2166
+ "action": "removed",
2167
+ "from": old_next_id,
2168
+ "relationship": "blocked_by",
2169
+ "target": phase_id,
2170
+ })
2171
+
2172
+ # 4. Remove old_next from this phase's blocks
2173
+ phase_blocks = phase_deps.setdefault("blocks", [])
2174
+ if old_next_id and old_next_id in phase_blocks:
2175
+ phase_blocks.remove(old_next_id)
2176
+ dependencies_updated.append({
2177
+ "action": "removed",
2178
+ "from": phase_id,
2179
+ "relationship": "blocks",
2180
+ "target": old_next_id,
2181
+ })
2182
+
2183
+ # 5. Link old neighbors to each other (if they were adjacent via this phase)
2184
+ if old_prev_id and old_next_id:
2185
+ old_prev = hierarchy.get(old_prev_id)
2186
+ old_next = hierarchy.get(old_next_id)
2187
+ if old_prev and old_next:
2188
+ old_prev_deps = old_prev.setdefault(
2189
+ "dependencies", {"blocks": [], "blocked_by": [], "depends": []}
2190
+ )
2191
+ old_prev_blocks = old_prev_deps.setdefault("blocks", [])
2192
+ if old_next_id not in old_prev_blocks:
2193
+ old_prev_blocks.append(old_next_id)
2194
+ dependencies_updated.append({
2195
+ "action": "added",
2196
+ "from": old_prev_id,
2197
+ "relationship": "blocks",
2198
+ "target": old_next_id,
2199
+ })
2200
+
2201
+ old_next_deps = old_next.setdefault(
2202
+ "dependencies", {"blocks": [], "blocked_by": [], "depends": []}
2203
+ )
2204
+ old_next_blocked_by = old_next_deps.setdefault("blocked_by", [])
2205
+ if old_prev_id not in old_next_blocked_by:
2206
+ old_next_blocked_by.append(old_prev_id)
2207
+ dependencies_updated.append({
2208
+ "action": "added",
2209
+ "from": old_next_id,
2210
+ "relationship": "blocked_by",
2211
+ "target": old_prev_id,
2212
+ })
2213
+
2214
+ # Add new dependency links
2215
+ # 6. New prev blocks this phase
2216
+ if new_prev_id:
2217
+ new_prev = hierarchy.get(new_prev_id)
2218
+ if new_prev:
2219
+ new_prev_deps = new_prev.setdefault(
2220
+ "dependencies", {"blocks": [], "blocked_by": [], "depends": []}
2221
+ )
2222
+ new_prev_blocks = new_prev_deps.setdefault("blocks", [])
2223
+ if phase_id not in new_prev_blocks:
2224
+ new_prev_blocks.append(phase_id)
2225
+ dependencies_updated.append({
2226
+ "action": "added",
2227
+ "from": new_prev_id,
2228
+ "relationship": "blocks",
2229
+ "target": phase_id,
2230
+ })
2231
+
2232
+ # This phase is blocked by new prev
2233
+ if new_prev_id not in phase_blocked_by:
2234
+ phase_blocked_by.append(new_prev_id)
2235
+ dependencies_updated.append({
2236
+ "action": "added",
2237
+ "from": phase_id,
2238
+ "relationship": "blocked_by",
2239
+ "target": new_prev_id,
2240
+ })
2241
+
2242
+ # 7. This phase blocks new next
2243
+ if new_next_id:
2244
+ new_next = hierarchy.get(new_next_id)
2245
+ if new_next:
2246
+ if new_next_id not in phase_blocks:
2247
+ phase_blocks.append(new_next_id)
2248
+ dependencies_updated.append({
2249
+ "action": "added",
2250
+ "from": phase_id,
2251
+ "relationship": "blocks",
2252
+ "target": new_next_id,
2253
+ })
2254
+
2255
+ new_next_deps = new_next.setdefault(
2256
+ "dependencies", {"blocks": [], "blocked_by": [], "depends": []}
2257
+ )
2258
+ new_next_blocked_by = new_next_deps.setdefault("blocked_by", [])
2259
+ if phase_id not in new_next_blocked_by:
2260
+ new_next_blocked_by.append(phase_id)
2261
+ dependencies_updated.append({
2262
+ "action": "added",
2263
+ "from": new_next_id,
2264
+ "relationship": "blocked_by",
2265
+ "target": phase_id,
2266
+ })
2267
+
2268
+ # Remove old link from new prev to new next (now goes through this phase)
2269
+ if new_prev_id:
2270
+ new_prev = hierarchy.get(new_prev_id)
2271
+ if new_prev:
2272
+ new_prev_deps = new_prev.get("dependencies", {})
2273
+ new_prev_blocks = new_prev_deps.get("blocks", [])
2274
+ if new_next_id in new_prev_blocks:
2275
+ new_prev_blocks.remove(new_next_id)
2276
+ dependencies_updated.append({
2277
+ "action": "removed",
2278
+ "from": new_prev_id,
2279
+ "relationship": "blocks",
2280
+ "target": new_next_id,
2281
+ })
2282
+
2283
+ if new_prev_id in new_next_blocked_by:
2284
+ new_next_blocked_by.remove(new_prev_id)
2285
+ dependencies_updated.append({
2286
+ "action": "removed",
2287
+ "from": new_next_id,
2288
+ "relationship": "blocked_by",
2289
+ "target": new_prev_id,
2290
+ })
2291
+
2292
+ # Update spec-root children
2293
+ spec_root["children"] = children
2294
+
2295
+ # Build result
2296
+ result: Dict[str, Any] = {
2297
+ "spec_id": spec_id,
2298
+ "phase_id": phase_id,
2299
+ "phase_title": phase.get("title", ""),
2300
+ "old_position": old_index + 1,
2301
+ "new_position": actual_new_index + 1,
2302
+ "moved": True,
2303
+ "link_previous": link_previous,
2304
+ "dry_run": dry_run,
2305
+ }
2306
+
2307
+ if dependencies_updated:
2308
+ result["dependencies_updated"] = dependencies_updated
2309
+
2310
+ if dry_run:
2311
+ result["message"] = "Dry run - changes not saved"
2312
+ return result, None
2313
+
2314
+ # Save the spec
2315
+ saved = save_spec(spec_id, spec_data, specs_dir)
2316
+ if not saved:
2317
+ return None, "Failed to save specification"
2318
+
2319
+ return result, None
2320
+
2321
+
2322
+ def update_phase_metadata(
2323
+ spec_id: str,
2324
+ phase_id: str,
2325
+ *,
2326
+ estimated_hours: Optional[float] = None,
2327
+ description: Optional[str] = None,
2328
+ purpose: Optional[str] = None,
2329
+ dry_run: bool = False,
2330
+ specs_dir: Optional[Path] = None,
2331
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
2332
+ """
2333
+ Update metadata fields of a phase in a specification.
2334
+
2335
+ Allows updating phase-level metadata such as estimated_hours, description,
2336
+ and purpose. Tracks previous values for audit purposes.
2337
+
2338
+ Args:
2339
+ spec_id: Specification ID containing the phase.
2340
+ phase_id: Phase ID to update (e.g., "phase-1").
2341
+ estimated_hours: New estimated hours value (must be >= 0 if provided).
2342
+ description: New description text for the phase.
2343
+ purpose: New purpose text for the phase.
2344
+ dry_run: If True, validate and return preview without saving changes.
2345
+ specs_dir: Path to specs directory (auto-detected if not provided).
2346
+
2347
+ Returns:
2348
+ Tuple of (result_dict, error_message).
2349
+ On success: ({"spec_id": ..., "phase_id": ..., "updates": [...], ...}, None)
2350
+ On failure: (None, "error message")
2351
+ """
2352
+ # Validate spec_id
2353
+ if not spec_id or not spec_id.strip():
2354
+ return None, "Specification ID is required"
2355
+
2356
+ # Validate phase_id
2357
+ if not phase_id or not phase_id.strip():
2358
+ return None, "Phase ID is required"
2359
+
2360
+ # Validate estimated_hours if provided
2361
+ if estimated_hours is not None:
2362
+ if not isinstance(estimated_hours, (int, float)):
2363
+ return None, "estimated_hours must be a number"
2364
+ if estimated_hours < 0:
2365
+ return None, "estimated_hours must be >= 0"
2366
+
2367
+ # Check that at least one field is being updated
2368
+ has_update = any(
2369
+ v is not None for v in [estimated_hours, description, purpose]
2370
+ )
2371
+ if not has_update:
2372
+ return None, "At least one field (estimated_hours, description, purpose) must be provided"
2373
+
2374
+ # Find specs directory
2375
+ if specs_dir is None:
2376
+ specs_dir = find_specs_directory()
2377
+
2378
+ if specs_dir is None:
2379
+ return (
2380
+ None,
2381
+ "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR.",
2382
+ )
2383
+
2384
+ # Find and load the spec
2385
+ spec_path = find_spec_file(spec_id, specs_dir)
2386
+ if spec_path is None:
2387
+ return None, f"Specification '{spec_id}' not found"
2388
+
2389
+ spec_data = load_spec(spec_id, specs_dir)
2390
+ if spec_data is None:
2391
+ return None, f"Failed to load specification '{spec_id}'"
2392
+
2393
+ hierarchy = spec_data.get("hierarchy", {})
2394
+
2395
+ # Validate phase exists
2396
+ phase = hierarchy.get(phase_id)
2397
+ if phase is None:
2398
+ return None, f"Phase '{phase_id}' not found"
2399
+
2400
+ # Validate node type is phase
2401
+ node_type = phase.get("type")
2402
+ if node_type != "phase":
2403
+ return None, f"Node '{phase_id}' is not a phase (type: {node_type})"
2404
+
2405
+ # Ensure metadata exists on phase
2406
+ if "metadata" not in phase:
2407
+ phase["metadata"] = {}
2408
+
2409
+ phase_metadata = phase["metadata"]
2410
+
2411
+ # Track updates with previous values
2412
+ updates: List[Dict[str, Any]] = []
2413
+
2414
+ if estimated_hours is not None:
2415
+ previous = phase_metadata.get("estimated_hours")
2416
+ phase_metadata["estimated_hours"] = estimated_hours
2417
+ updates.append({
2418
+ "field": "estimated_hours",
2419
+ "previous_value": previous,
2420
+ "new_value": estimated_hours,
2421
+ })
2422
+
2423
+ if description is not None:
2424
+ description = description.strip() if description else description
2425
+ previous = phase_metadata.get("description")
2426
+ phase_metadata["description"] = description
2427
+ updates.append({
2428
+ "field": "description",
2429
+ "previous_value": previous,
2430
+ "new_value": description,
2431
+ })
2432
+
2433
+ if purpose is not None:
2434
+ purpose = purpose.strip() if purpose else purpose
2435
+ previous = phase_metadata.get("purpose")
2436
+ phase_metadata["purpose"] = purpose
2437
+ updates.append({
2438
+ "field": "purpose",
2439
+ "previous_value": previous,
2440
+ "new_value": purpose,
2441
+ })
2442
+
2443
+ # Build result
2444
+ result: Dict[str, Any] = {
2445
+ "spec_id": spec_id,
2446
+ "phase_id": phase_id,
2447
+ "phase_title": phase.get("title", ""),
2448
+ "updates": updates,
2449
+ "dry_run": dry_run,
2450
+ }
2451
+
2452
+ if dry_run:
2453
+ result["message"] = "Dry run - changes not saved"
2454
+ return result, None
2455
+
2456
+ # Save the spec
2457
+ saved = save_spec(spec_id, spec_data, specs_dir)
2458
+ if not saved:
2459
+ return None, "Failed to save specification"
2460
+
2461
+ return result, None
2462
+
2463
+
2464
+ def recalculate_estimated_hours(
2465
+ spec_id: str,
2466
+ dry_run: bool = False,
2467
+ specs_dir: Optional[Path] = None,
2468
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
2469
+ """
2470
+ Recalculate estimated_hours by aggregating from tasks up through the hierarchy.
2471
+
2472
+ Performs full hierarchy rollup:
2473
+ 1. For each phase: sums estimated_hours from all task/subtask/verify descendants
2474
+ 2. Updates each phase's metadata.estimated_hours with the calculated sum
2475
+ 3. Sums all phase estimates to get the spec total
2476
+ 4. Updates spec metadata.estimated_hours with the calculated sum
2477
+
2478
+ Args:
2479
+ spec_id: Specification ID to recalculate.
2480
+ dry_run: If True, return report without saving changes.
2481
+ specs_dir: Path to specs directory (auto-detected if not provided).
2482
+
2483
+ Returns:
2484
+ Tuple of (result_dict, error_message).
2485
+ On success: ({"spec_id": ..., "phases": [...], "spec_level": {...}, ...}, None)
2486
+ On failure: (None, "error message")
2487
+ """
2488
+ # Validate spec_id
2489
+ if not spec_id or not spec_id.strip():
2490
+ return None, "Specification ID is required"
2491
+
2492
+ # Find specs directory
2493
+ if specs_dir is None:
2494
+ specs_dir = find_specs_directory()
2495
+
2496
+ if specs_dir is None:
2497
+ return None, "Could not find specs directory"
2498
+
2499
+ # Load spec
2500
+ spec_data = load_spec(spec_id, specs_dir)
2501
+ if spec_data is None:
2502
+ return None, f"Specification '{spec_id}' not found"
2503
+
2504
+ hierarchy = spec_data.get("hierarchy", {})
2505
+ spec_root = hierarchy.get("spec-root")
2506
+ if not spec_root:
2507
+ return None, "Invalid spec: missing spec-root"
2508
+
2509
+ # Get phase children from spec-root
2510
+ phase_ids = spec_root.get("children", [])
2511
+
2512
+ # Track results for each phase
2513
+ phase_results: List[Dict[str, Any]] = []
2514
+ spec_total_calculated = 0.0
2515
+
2516
+ for phase_id in phase_ids:
2517
+ phase = hierarchy.get(phase_id)
2518
+ if not phase or phase.get("type") != "phase":
2519
+ continue
2520
+
2521
+ phase_metadata = phase.get("metadata", {})
2522
+ previous_hours = phase_metadata.get("estimated_hours")
2523
+
2524
+ # Collect all descendants of this phase
2525
+ descendants = _collect_descendants(hierarchy, phase_id)
2526
+
2527
+ # Sum estimated_hours from task/subtask/verify nodes
2528
+ task_count = 0
2529
+ calculated_hours = 0.0
2530
+
2531
+ for desc_id in descendants:
2532
+ desc_node = hierarchy.get(desc_id)
2533
+ if not desc_node:
2534
+ continue
2535
+
2536
+ desc_type = desc_node.get("type")
2537
+ if desc_type in ("task", "subtask", "verify"):
2538
+ task_count += 1
2539
+ desc_metadata = desc_node.get("metadata", {})
2540
+ est = desc_metadata.get("estimated_hours")
2541
+ if isinstance(est, (int, float)) and est >= 0:
2542
+ calculated_hours += float(est)
2543
+
2544
+ # Calculate delta
2545
+ prev_value = float(previous_hours) if isinstance(previous_hours, (int, float)) else 0.0
2546
+ delta = calculated_hours - prev_value
2547
+
2548
+ phase_results.append({
2549
+ "phase_id": phase_id,
2550
+ "title": phase.get("title", ""),
2551
+ "previous": previous_hours,
2552
+ "calculated": calculated_hours,
2553
+ "delta": delta,
2554
+ "task_count": task_count,
2555
+ })
2556
+
2557
+ # Update phase metadata (will be saved if not dry_run)
2558
+ if "metadata" not in phase:
2559
+ phase["metadata"] = {}
2560
+ phase["metadata"]["estimated_hours"] = calculated_hours
2561
+
2562
+ # Add to spec total
2563
+ spec_total_calculated += calculated_hours
2564
+
2565
+ # Get spec-level previous value
2566
+ spec_metadata = spec_data.get("metadata", {})
2567
+ spec_previous = spec_metadata.get("estimated_hours")
2568
+ spec_prev_value = float(spec_previous) if isinstance(spec_previous, (int, float)) else 0.0
2569
+ spec_delta = spec_total_calculated - spec_prev_value
2570
+
2571
+ # Update spec metadata
2572
+ if "metadata" not in spec_data:
2573
+ spec_data["metadata"] = {}
2574
+ spec_data["metadata"]["estimated_hours"] = spec_total_calculated
2575
+
2576
+ # Build result
2577
+ result: Dict[str, Any] = {
2578
+ "spec_id": spec_id,
2579
+ "dry_run": dry_run,
2580
+ "spec_level": {
2581
+ "previous": spec_previous,
2582
+ "calculated": spec_total_calculated,
2583
+ "delta": spec_delta,
2584
+ },
2585
+ "phases": phase_results,
2586
+ "summary": {
2587
+ "total_phases": len(phase_results),
2588
+ "phases_changed": sum(1 for p in phase_results if p["delta"] != 0),
2589
+ "spec_changed": spec_delta != 0,
2590
+ },
2591
+ }
2592
+
2593
+ if dry_run:
2594
+ result["message"] = "Dry run - changes not saved"
2595
+ return result, None
2596
+
2597
+ # Save spec
2598
+ saved = save_spec(spec_id, spec_data, specs_dir)
2599
+ if not saved:
2600
+ return None, "Failed to save specification"
2601
+
2602
+ return result, None
2603
+
2604
+
2605
+ def recalculate_actual_hours(
2606
+ spec_id: str,
2607
+ dry_run: bool = False,
2608
+ specs_dir: Optional[Path] = None,
2609
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
2610
+ """
2611
+ Recalculate actual_hours by aggregating from tasks up through the hierarchy.
2612
+
2613
+ Performs full hierarchy rollup:
2614
+ 1. For each phase: sums actual_hours from all task/subtask/verify descendants
2615
+ 2. Updates each phase's metadata.actual_hours with the calculated sum
2616
+ 3. Sums all phase actuals to get the spec total
2617
+ 4. Updates spec metadata.actual_hours with the calculated sum
2618
+
2619
+ Args:
2620
+ spec_id: Specification ID to recalculate.
2621
+ dry_run: If True, return report without saving changes.
2622
+ specs_dir: Path to specs directory (auto-detected if not provided).
2623
+
2624
+ Returns:
2625
+ Tuple of (result_dict, error_message).
2626
+ On success: ({"spec_id": ..., "phases": [...], "spec_level": {...}, ...}, None)
2627
+ On failure: (None, "error message")
2628
+ """
2629
+ # Validate spec_id
2630
+ if not spec_id or not spec_id.strip():
2631
+ return None, "Specification ID is required"
2632
+
2633
+ # Find specs directory
2634
+ if specs_dir is None:
2635
+ specs_dir = find_specs_directory()
2636
+
2637
+ if specs_dir is None:
2638
+ return None, "Could not find specs directory"
2639
+
2640
+ # Load spec
2641
+ spec_data = load_spec(spec_id, specs_dir)
2642
+ if spec_data is None:
2643
+ return None, f"Specification '{spec_id}' not found"
2644
+
2645
+ hierarchy = spec_data.get("hierarchy", {})
2646
+ spec_root = hierarchy.get("spec-root")
2647
+ if not spec_root:
2648
+ return None, "Invalid spec: missing spec-root"
2649
+
2650
+ # Get phase children from spec-root
2651
+ phase_ids = spec_root.get("children", [])
2652
+
2653
+ # Track results for each phase
2654
+ phase_results: List[Dict[str, Any]] = []
2655
+ spec_total_calculated = 0.0
2656
+
2657
+ for phase_id in phase_ids:
2658
+ phase = hierarchy.get(phase_id)
2659
+ if not phase or phase.get("type") != "phase":
2660
+ continue
2661
+
2662
+ phase_metadata = phase.get("metadata", {})
2663
+ previous_hours = phase_metadata.get("actual_hours")
2664
+
2665
+ # Collect all descendants of this phase
2666
+ descendants = _collect_descendants(hierarchy, phase_id)
2667
+
2668
+ # Sum actual_hours from task/subtask/verify nodes
2669
+ task_count = 0
2670
+ calculated_hours = 0.0
2671
+
2672
+ for desc_id in descendants:
2673
+ desc_node = hierarchy.get(desc_id)
2674
+ if not desc_node:
2675
+ continue
2676
+
2677
+ desc_type = desc_node.get("type")
2678
+ if desc_type in ("task", "subtask", "verify"):
2679
+ task_count += 1
2680
+ desc_metadata = desc_node.get("metadata", {})
2681
+ act = desc_metadata.get("actual_hours")
2682
+ if isinstance(act, (int, float)) and act >= 0:
2683
+ calculated_hours += float(act)
2684
+
2685
+ # Calculate delta
2686
+ prev_value = float(previous_hours) if isinstance(previous_hours, (int, float)) else 0.0
2687
+ delta = calculated_hours - prev_value
2688
+
2689
+ phase_results.append({
2690
+ "phase_id": phase_id,
2691
+ "title": phase.get("title", ""),
2692
+ "previous": previous_hours,
2693
+ "calculated": calculated_hours,
2694
+ "delta": delta,
2695
+ "task_count": task_count,
2696
+ })
2697
+
2698
+ # Update phase metadata (will be saved if not dry_run)
2699
+ if "metadata" not in phase:
2700
+ phase["metadata"] = {}
2701
+ phase["metadata"]["actual_hours"] = calculated_hours
2702
+
2703
+ # Add to spec total
2704
+ spec_total_calculated += calculated_hours
2705
+
2706
+ # Get spec-level previous value
2707
+ spec_metadata = spec_data.get("metadata", {})
2708
+ spec_previous = spec_metadata.get("actual_hours")
2709
+ spec_prev_value = float(spec_previous) if isinstance(spec_previous, (int, float)) else 0.0
2710
+ spec_delta = spec_total_calculated - spec_prev_value
2711
+
2712
+ # Update spec metadata
2713
+ if "metadata" not in spec_data:
2714
+ spec_data["metadata"] = {}
2715
+ spec_data["metadata"]["actual_hours"] = spec_total_calculated
2716
+
2717
+ # Build result
2718
+ result: Dict[str, Any] = {
2719
+ "spec_id": spec_id,
2720
+ "dry_run": dry_run,
2721
+ "spec_level": {
2722
+ "previous": spec_previous,
2723
+ "calculated": spec_total_calculated,
2724
+ "delta": spec_delta,
2725
+ },
2726
+ "phases": phase_results,
2727
+ "summary": {
2728
+ "total_phases": len(phase_results),
2729
+ "phases_changed": sum(1 for p in phase_results if p["delta"] != 0),
2730
+ "spec_changed": spec_delta != 0,
2731
+ },
2732
+ }
2733
+
2734
+ if dry_run:
2735
+ result["message"] = "Dry run - changes not saved"
2736
+ return result, None
2737
+
2738
+ # Save spec
2739
+ saved = save_spec(spec_id, spec_data, specs_dir)
2740
+ if not saved:
2741
+ return None, "Failed to save specification"
2742
+
2743
+ return result, None
2744
+
2745
+
2746
+ def get_template_structure(template: str, category: str) -> Dict[str, Any]:
2747
+ """
2748
+ Get the hierarchical structure for a spec template.
2749
+
2750
+ Only the 'empty' template is supported. Use phase templates to add structure.
2751
+
2752
+ Args:
2753
+ template: Template type (only 'empty' is valid).
2754
+ category: Default task category.
2755
+
2756
+ Returns:
2757
+ Hierarchy dict for the spec.
2758
+
2759
+ Raises:
2760
+ ValueError: If template is not 'empty'.
2761
+ """
2762
+ if template != "empty":
2763
+ raise ValueError(
2764
+ f"Invalid template '{template}'. Only 'empty' template is supported. "
2765
+ f"Use phase templates (phase-add-bulk or phase-template apply) to add structure."
2766
+ )
2767
+
2768
+ return {
2769
+ "spec-root": {
2770
+ "type": "spec",
2771
+ "title": "", # Filled in later
2772
+ "status": "pending",
2773
+ "parent": None,
2774
+ "children": [],
2775
+ "total_tasks": 0,
2776
+ "completed_tasks": 0,
2777
+ "metadata": {
2778
+ "purpose": "",
2779
+ "category": category,
2780
+ },
2781
+ "dependencies": {
2782
+ "blocks": [],
2783
+ "blocked_by": [],
2784
+ "depends": [],
2785
+ },
2786
+ },
2787
+ }
2788
+
2789
+
2790
+ def get_phase_template_structure(
2791
+ template: str, category: str = "implementation"
2792
+ ) -> Dict[str, Any]:
2793
+ """
2794
+ Get the structure definition for a phase template.
2795
+
2796
+ Phase templates define reusable phase structures with pre-configured tasks.
2797
+ Each template includes automatic verification scaffolding (run-tests + fidelity).
2798
+
2799
+ Args:
2800
+ template: Phase template type (planning, implementation, testing, security, documentation).
2801
+ category: Default task category for tasks in this phase.
2802
+
2803
+ Returns:
2804
+ Dict with phase structure including:
2805
+ - title: Phase title
2806
+ - description: Phase description
2807
+ - purpose: Phase purpose for metadata
2808
+ - estimated_hours: Total estimated hours
2809
+ - tasks: List of task definitions (title, description, category, estimated_hours)
2810
+ - includes_verification: Always True (verification auto-added)
2811
+ """
2812
+ templates: Dict[str, Dict[str, Any]] = {
2813
+ "planning": {
2814
+ "title": "Planning & Discovery",
2815
+ "description": "Requirements gathering, analysis, and initial planning",
2816
+ "purpose": "Define scope, requirements, and acceptance criteria",
2817
+ "estimated_hours": 4,
2818
+ "tasks": [
2819
+ {
2820
+ "title": "Define requirements",
2821
+ "description": "Document functional and non-functional requirements",
2822
+ "task_category": "investigation",
2823
+ "acceptance_criteria": [
2824
+ "Requirements are documented and reviewed",
2825
+ ],
2826
+ "estimated_hours": 2,
2827
+ },
2828
+ {
2829
+ "title": "Design solution approach",
2830
+ "description": "Outline the technical approach and architecture decisions",
2831
+ "task_category": "investigation",
2832
+ "acceptance_criteria": [
2833
+ "Solution approach and key decisions are documented",
2834
+ ],
2835
+ "estimated_hours": 2,
2836
+ },
2837
+ ],
2838
+ },
2839
+ "implementation": {
2840
+ "title": "Implementation",
2841
+ "description": "Core development and feature implementation",
2842
+ "purpose": "Build the primary functionality",
2843
+ "estimated_hours": 8,
2844
+ "tasks": [
2845
+ {
2846
+ "title": "Implement core functionality",
2847
+ "description": "Build the main features and business logic",
2848
+ "task_category": "investigation",
2849
+ "acceptance_criteria": [
2850
+ "Core functionality is implemented and verified",
2851
+ ],
2852
+ "estimated_hours": 6,
2853
+ },
2854
+ {
2855
+ "title": "Add error handling",
2856
+ "description": "Implement error handling and edge cases",
2857
+ "task_category": "investigation",
2858
+ "acceptance_criteria": [
2859
+ "Error handling covers expected edge cases",
2860
+ ],
2861
+ "estimated_hours": 2,
2862
+ },
2863
+ ],
2864
+ },
2865
+ "testing": {
2866
+ "title": "Testing & Validation",
2867
+ "description": "Comprehensive testing and quality assurance",
2868
+ "purpose": "Ensure code quality and correctness",
2869
+ "estimated_hours": 6,
2870
+ "tasks": [
2871
+ {
2872
+ "title": "Write unit tests",
2873
+ "description": "Create unit tests for individual components",
2874
+ "task_category": "investigation",
2875
+ "acceptance_criteria": [
2876
+ "Unit tests cover primary logic paths",
2877
+ ],
2878
+ "estimated_hours": 3,
2879
+ },
2880
+ {
2881
+ "title": "Write integration tests",
2882
+ "description": "Create integration tests for component interactions",
2883
+ "task_category": "investigation",
2884
+ "acceptance_criteria": [
2885
+ "Integration tests cover critical workflows",
2886
+ ],
2887
+ "estimated_hours": 3,
2888
+ },
2889
+ ],
2890
+ },
2891
+ "security": {
2892
+ "title": "Security Review",
2893
+ "description": "Security audit, vulnerability assessment, and hardening",
2894
+ "purpose": "Identify and remediate security vulnerabilities",
2895
+ "estimated_hours": 6,
2896
+ "tasks": [
2897
+ {
2898
+ "title": "Security audit",
2899
+ "description": "Review code for security vulnerabilities (OWASP Top 10)",
2900
+ "task_category": "investigation",
2901
+ "acceptance_criteria": [
2902
+ "Security findings are documented with severity",
2903
+ ],
2904
+ "estimated_hours": 3,
2905
+ },
2906
+ {
2907
+ "title": "Security remediation",
2908
+ "description": "Fix identified vulnerabilities and harden implementation",
2909
+ "task_category": "investigation",
2910
+ "acceptance_criteria": [
2911
+ "Security findings are addressed or tracked",
2912
+ ],
2913
+ "estimated_hours": 3,
2914
+ },
2915
+ ],
2916
+ },
2917
+ "documentation": {
2918
+ "title": "Documentation",
2919
+ "description": "Technical documentation and knowledge capture",
2920
+ "purpose": "Document the implementation for maintainability",
2921
+ "estimated_hours": 4,
2922
+ "tasks": [
2923
+ {
2924
+ "title": "Write API documentation",
2925
+ "description": "Document public APIs, parameters, and return values",
2926
+ "task_category": "research",
2927
+ "acceptance_criteria": [
2928
+ "API documentation is updated with current behavior",
2929
+ ],
2930
+ "estimated_hours": 2,
2931
+ },
2932
+ {
2933
+ "title": "Write user guide",
2934
+ "description": "Create usage examples and integration guide",
2935
+ "task_category": "research",
2936
+ "acceptance_criteria": [
2937
+ "User guide includes usage examples",
2938
+ ],
2939
+ "estimated_hours": 2,
2940
+ },
2941
+ ],
2942
+ },
2943
+ }
2944
+
2945
+ if template not in templates:
2946
+ raise ValueError(
2947
+ f"Invalid phase template '{template}'. Must be one of: {', '.join(PHASE_TEMPLATES)}"
2948
+ )
2949
+
2950
+ result = templates[template].copy()
2951
+ result["includes_verification"] = True
2952
+ result["template_name"] = template
2953
+ return result
2954
+
2955
+
2956
+ def apply_phase_template(
2957
+ spec_id: str,
2958
+ template: str,
2959
+ specs_dir: Optional[Path] = None,
2960
+ category: str = "implementation",
2961
+ position: Optional[int] = None,
2962
+ link_previous: bool = True,
2963
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
2964
+ """
2965
+ Apply a phase template to an existing spec.
2966
+
2967
+ Creates a new phase with pre-configured tasks based on the template.
2968
+ Automatically includes verification scaffolding (run-tests + fidelity).
2969
+
2970
+ Args:
2971
+ spec_id: ID of the spec to add the phase to.
2972
+ template: Phase template name (planning, implementation, testing, security, documentation).
2973
+ specs_dir: Path to specs directory (auto-detected if not provided).
2974
+ category: Default task category for tasks (can be overridden by template).
2975
+ position: Position to insert phase (None = append at end).
2976
+ link_previous: Whether to link this phase to the previous one with dependencies.
2977
+
2978
+ Returns:
2979
+ Tuple of (result_dict, error_message).
2980
+ On success: ({"phase_id": ..., "tasks_created": [...], ...}, None)
2981
+ On failure: (None, "error message")
2982
+ """
2983
+ # Validate template
2984
+ if template not in PHASE_TEMPLATES:
2985
+ return (
2986
+ None,
2987
+ f"Invalid phase template '{template}'. Must be one of: {', '.join(PHASE_TEMPLATES)}",
2988
+ )
2989
+
2990
+ # Get template structure
2991
+ template_struct = get_phase_template_structure(template, category)
2992
+
2993
+ # Build tasks list for add_phase_bulk
2994
+ tasks = []
2995
+ for task_def in template_struct["tasks"]:
2996
+ tasks.append({
2997
+ "type": "task",
2998
+ "title": task_def["title"],
2999
+ "description": task_def.get("description", ""),
3000
+ "task_category": task_def.get("task_category", task_def.get("category", category)),
3001
+ "acceptance_criteria": task_def.get("acceptance_criteria"),
3002
+ "estimated_hours": task_def.get("estimated_hours", 1),
3003
+ })
3004
+
3005
+ # Append verification scaffolding (run-tests + fidelity-review)
3006
+ tasks.append({
3007
+ "type": "verify",
3008
+ "title": "Run tests",
3009
+ "verification_type": "run-tests",
3010
+ })
3011
+ tasks.append({
3012
+ "type": "verify",
3013
+ "title": "Fidelity review",
3014
+ "verification_type": "fidelity",
3015
+ })
3016
+
3017
+ # Use add_phase_bulk to create the phase atomically
3018
+ result, error = add_phase_bulk(
3019
+ spec_id=spec_id,
3020
+ phase_title=template_struct["title"],
3021
+ tasks=tasks,
3022
+ specs_dir=specs_dir,
3023
+ phase_description=template_struct.get("description"),
3024
+ phase_purpose=template_struct.get("purpose"),
3025
+ phase_estimated_hours=template_struct.get("estimated_hours"),
3026
+ position=position,
3027
+ link_previous=link_previous,
3028
+ )
3029
+
3030
+ if error:
3031
+ return None, error
3032
+
3033
+ # Enhance result with template info
3034
+ if result:
3035
+ result["template_applied"] = template
3036
+ result["template_title"] = template_struct["title"]
3037
+
3038
+ return result, None
3039
+
3040
+
3041
+ def generate_spec_data(
3042
+ name: str,
3043
+ template: str = "empty",
3044
+ category: str = "implementation",
3045
+ mission: Optional[str] = None,
3046
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
3047
+ """
3048
+ Generate spec data structure without writing to disk.
3049
+
3050
+ Used for preflight validation (dry_run) and by create_spec.
3051
+
3052
+ Args:
3053
+ name: Human-readable name for the specification.
3054
+ template: Template type (only 'empty' is valid).
3055
+ category: Default task category.
3056
+ mission: Optional mission statement for the spec.
3057
+
3058
+ Returns:
3059
+ Tuple of (spec_data, error_message).
3060
+ On success: (dict, None)
3061
+ On failure: (None, "error message")
3062
+ """
3063
+ # Validate template - only 'empty' is supported
3064
+ if template not in TEMPLATES:
3065
+ return (
3066
+ None,
3067
+ f"Invalid template '{template}'. Only 'empty' template is supported. "
3068
+ f"Use phase templates to add structure.",
3069
+ )
3070
+
3071
+ # Validate category
3072
+ if category not in CATEGORIES:
3073
+ return (
3074
+ None,
3075
+ f"Invalid category '{category}'. Must be one of: {', '.join(CATEGORIES)}",
3076
+ )
3077
+
3078
+ # Generate spec ID
3079
+ spec_id = generate_spec_id(name)
3080
+
3081
+ # Generate spec structure
3082
+ now = datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
3083
+ hierarchy = get_template_structure(template, category)
3084
+
3085
+ # Fill in the title
3086
+ hierarchy["spec-root"]["title"] = name
3087
+
3088
+ # Calculate estimated hours from hierarchy
3089
+ estimated_hours = sum(
3090
+ node.get("metadata", {}).get("estimated_hours", 0)
3091
+ for node in hierarchy.values()
3092
+ if isinstance(node, dict)
3093
+ )
3094
+
3095
+ spec_data = {
3096
+ "spec_id": spec_id,
3097
+ "title": name,
3098
+ "generated": now,
3099
+ "last_updated": now,
3100
+ "metadata": {
3101
+ "description": "",
3102
+ "mission": mission.strip() if isinstance(mission, str) else "",
3103
+ "objectives": [],
3104
+ "complexity": "low", # Complexity set via explicit metadata, not template
3105
+ "estimated_hours": estimated_hours,
3106
+ "assumptions": [],
3107
+ "owner": "",
3108
+ "category": category,
3109
+ "template": template,
3110
+ },
3111
+ "progress_percentage": 0,
3112
+ "status": "pending",
3113
+ "current_phase": None, # Empty template has no phases
3114
+ "hierarchy": hierarchy,
3115
+ "journal": [],
3116
+ }
3117
+
3118
+ return spec_data, None
3119
+
3120
+
3121
+ def create_spec(
3122
+ name: str,
3123
+ template: str = "empty",
3124
+ category: str = "implementation",
3125
+ mission: Optional[str] = None,
3126
+ specs_dir: Optional[Path] = None,
3127
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
3128
+ """
3129
+ Create a new specification file from a template.
3130
+
3131
+ Args:
3132
+ name: Human-readable name for the specification.
3133
+ template: Template type (only 'empty' is valid). Use phase templates to add structure.
3134
+ category: Default task category. Default: implementation.
3135
+ mission: Optional mission statement for the spec.
3136
+ specs_dir: Path to specs directory (auto-detected if not provided).
3137
+
3138
+ Returns:
3139
+ Tuple of (result_dict, error_message).
3140
+ On success: ({"spec_id": ..., "spec_path": ..., ...}, None)
3141
+ On failure: (None, "error message")
3142
+ """
3143
+ # Generate spec data (handles validation)
3144
+ spec_data, error = generate_spec_data(
3145
+ name=name,
3146
+ template=template,
3147
+ category=category,
3148
+ mission=mission,
3149
+ )
3150
+ if error or spec_data is None:
3151
+ return None, error or "Failed to generate spec data"
3152
+
3153
+ # Find specs directory
3154
+ if specs_dir is None:
3155
+ specs_dir = find_specs_directory()
3156
+
3157
+ if specs_dir is None:
3158
+ return (
3159
+ None,
3160
+ "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR.",
3161
+ )
3162
+
3163
+ # Ensure pending directory exists
3164
+ pending_dir = specs_dir / "pending"
3165
+ pending_dir.mkdir(parents=True, exist_ok=True)
3166
+
3167
+ # Check if spec already exists
3168
+ spec_id = spec_data["spec_id"]
3169
+ spec_path = pending_dir / f"{spec_id}.json"
3170
+ if spec_path.exists():
3171
+ return None, f"Specification already exists: {spec_id}"
3172
+
3173
+ # Write the spec file
3174
+ try:
3175
+ with open(spec_path, "w") as f:
3176
+ json.dump(spec_data, f, indent=2)
3177
+ except (IOError, OSError) as e:
3178
+ return None, f"Failed to write spec file: {e}"
3179
+
3180
+ # Count tasks and phases
3181
+ hierarchy = spec_data["hierarchy"]
3182
+ task_count = sum(
3183
+ 1
3184
+ for node in hierarchy.values()
3185
+ if isinstance(node, dict) and node.get("type") in ("task", "subtask", "verify")
3186
+ )
3187
+ phase_count = sum(
3188
+ 1
3189
+ for node in hierarchy.values()
3190
+ if isinstance(node, dict) and node.get("type") == "phase"
3191
+ )
3192
+
3193
+ return {
3194
+ "spec_id": spec_id,
3195
+ "spec_path": str(spec_path),
3196
+ "template": template,
3197
+ "category": category,
3198
+ "name": name,
3199
+ "structure": {
3200
+ "phases": phase_count,
3201
+ "tasks": task_count,
3202
+ },
3203
+ }, None
3204
+
3205
+
3206
+ def add_assumption(
3207
+ spec_id: str,
3208
+ text: str,
3209
+ assumption_type: Optional[str] = None,
3210
+ author: Optional[str] = None,
3211
+ specs_dir: Optional[Path] = None,
3212
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
3213
+ """
3214
+ Add an assumption to a specification's assumptions array.
3215
+
3216
+ The schema expects assumptions to be stored as strings. The assumption_type
3217
+ and author are included in the returned result for API compatibility but
3218
+ are not stored in the spec (the text itself should be descriptive).
3219
+
3220
+ Args:
3221
+ spec_id: Specification ID to add assumption to.
3222
+ text: Assumption text/description.
3223
+ assumption_type: Optional type/category (any string accepted, e.g. "constraint",
3224
+ "architectural", "security"). For API compatibility only.
3225
+ author: Optional author. For API compatibility.
3226
+ specs_dir: Path to specs directory (auto-detected if not provided).
3227
+
3228
+ Returns:
3229
+ Tuple of (result_dict, error_message).
3230
+ On success: ({"spec_id": ..., "text": ..., ...}, None)
3231
+ On failure: (None, "error message")
3232
+ """
3233
+
3234
+ # Validate text
3235
+ if not text or not text.strip():
3236
+ return None, "Assumption text is required"
3237
+
3238
+ # Find specs directory
3239
+ if specs_dir is None:
3240
+ specs_dir = find_specs_directory()
3241
+
3242
+ if specs_dir is None:
3243
+ return (
3244
+ None,
3245
+ "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR.",
3246
+ )
3247
+
3248
+ # Find and load the spec
3249
+ spec_path = find_spec_file(spec_id, specs_dir)
3250
+ if spec_path is None:
3251
+ return None, f"Specification '{spec_id}' not found"
3252
+
3253
+ spec_data = load_spec(spec_id, specs_dir)
3254
+ if spec_data is None:
3255
+ return None, f"Failed to load specification '{spec_id}'"
3256
+
3257
+ # Ensure metadata.assumptions exists
3258
+ if "metadata" not in spec_data:
3259
+ spec_data["metadata"] = {}
3260
+ if "assumptions" not in spec_data["metadata"]:
3261
+ spec_data["metadata"]["assumptions"] = []
3262
+
3263
+ assumptions = spec_data["metadata"]["assumptions"]
3264
+
3265
+ # Schema expects strings, so store text directly
3266
+ assumption_text = text.strip()
3267
+
3268
+ # Check for duplicates
3269
+ if assumption_text in assumptions:
3270
+ return None, f"Assumption already exists: {assumption_text[:50]}..."
3271
+
3272
+ # Add to assumptions array (as string per schema)
3273
+ assumptions.append(assumption_text)
3274
+
3275
+ # Update last_updated
3276
+ now = datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
3277
+ spec_data["last_updated"] = now
3278
+
3279
+ # Save the spec
3280
+ success = save_spec(spec_id, spec_data, specs_dir)
3281
+ if not success:
3282
+ return None, "Failed to save specification"
3283
+
3284
+ # Return index as "ID" for API compatibility
3285
+ assumption_index = len(assumptions)
3286
+
3287
+ return {
3288
+ "spec_id": spec_id,
3289
+ "assumption_id": f"a-{assumption_index}",
3290
+ "text": assumption_text,
3291
+ "type": assumption_type,
3292
+ "author": author,
3293
+ "index": assumption_index,
3294
+ }, None
3295
+
3296
+
3297
+ def add_revision(
3298
+ spec_id: str,
3299
+ version: str,
3300
+ changelog: str,
3301
+ author: Optional[str] = None,
3302
+ modified_by: Optional[str] = None,
3303
+ review_triggered_by: Optional[str] = None,
3304
+ specs_dir: Optional[Path] = None,
3305
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
3306
+ """
3307
+ Add a revision entry to a specification's revision_history array.
3308
+
3309
+ Args:
3310
+ spec_id: Specification ID to add revision to.
3311
+ version: Version number (e.g., "1.0", "1.1", "2.0").
3312
+ changelog: Description of changes made in this revision.
3313
+ author: Optional author who made the revision.
3314
+ modified_by: Optional tool or command that made the modification.
3315
+ review_triggered_by: Optional path to review report that triggered this revision.
3316
+ specs_dir: Path to specs directory (auto-detected if not provided).
3317
+
3318
+ Returns:
3319
+ Tuple of (result_dict, error_message).
3320
+ On success: ({"spec_id": ..., "version": ..., ...}, None)
3321
+ On failure: (None, "error message")
3322
+ """
3323
+ # Validate version
3324
+ if not version or not version.strip():
3325
+ return None, "Version is required"
3326
+
3327
+ # Validate changelog
3328
+ if not changelog or not changelog.strip():
3329
+ return None, "Changelog is required"
3330
+
3331
+ # Find specs directory
3332
+ if specs_dir is None:
3333
+ specs_dir = find_specs_directory()
3334
+
3335
+ if specs_dir is None:
3336
+ return (
3337
+ None,
3338
+ "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR.",
3339
+ )
3340
+
3341
+ # Find and load the spec
3342
+ spec_path = find_spec_file(spec_id, specs_dir)
3343
+ if spec_path is None:
3344
+ return None, f"Specification '{spec_id}' not found"
3345
+
3346
+ spec_data = load_spec(spec_id, specs_dir)
3347
+ if spec_data is None:
3348
+ return None, f"Failed to load specification '{spec_id}'"
3349
+
3350
+ # Ensure metadata.revision_history exists
3351
+ if "metadata" not in spec_data:
3352
+ spec_data["metadata"] = {}
3353
+ if "revision_history" not in spec_data["metadata"]:
3354
+ spec_data["metadata"]["revision_history"] = []
3355
+
3356
+ revision_history = spec_data["metadata"]["revision_history"]
3357
+
3358
+ # Create revision entry per schema
3359
+ now = datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
3360
+ revision_entry = {
3361
+ "version": version.strip(),
3362
+ "date": now,
3363
+ "changelog": changelog.strip(),
3364
+ }
3365
+
3366
+ # Add optional fields if provided
3367
+ if author:
3368
+ revision_entry["author"] = author.strip()
3369
+ if modified_by:
3370
+ revision_entry["modified_by"] = modified_by.strip()
3371
+ if review_triggered_by:
3372
+ revision_entry["review_triggered_by"] = review_triggered_by.strip()
3373
+
3374
+ # Append to revision history
3375
+ revision_history.append(revision_entry)
3376
+
3377
+ # Update last_updated
3378
+ spec_data["last_updated"] = now
3379
+
3380
+ # Save the spec
3381
+ success = save_spec(spec_id, spec_data, specs_dir)
3382
+ if not success:
3383
+ return None, "Failed to save specification"
3384
+
3385
+ return {
3386
+ "spec_id": spec_id,
3387
+ "version": revision_entry["version"],
3388
+ "date": revision_entry["date"],
3389
+ "changelog": revision_entry["changelog"],
3390
+ "author": author,
3391
+ "modified_by": modified_by,
3392
+ "review_triggered_by": review_triggered_by,
3393
+ "revision_index": len(revision_history),
3394
+ }, None
3395
+
3396
+
3397
+ def list_assumptions(
3398
+ spec_id: str,
3399
+ assumption_type: Optional[str] = None,
3400
+ specs_dir: Optional[Path] = None,
3401
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
3402
+ """
3403
+ List assumptions from a specification.
3404
+
3405
+ Args:
3406
+ spec_id: Specification ID to list assumptions from.
3407
+ assumption_type: Optional filter parameter (any string accepted).
3408
+ Note: Since assumptions are stored as strings, this filter is
3409
+ provided for API compatibility but has no effect.
3410
+ specs_dir: Path to specs directory (auto-detected if not provided).
3411
+
3412
+ Returns:
3413
+ Tuple of (result_dict, error_message).
3414
+ On success: ({"spec_id": ..., "assumptions": [...], ...}, None)
3415
+ On failure: (None, "error message")
3416
+ """
3417
+ # Find specs directory
3418
+ if specs_dir is None:
3419
+ specs_dir = find_specs_directory()
3420
+
3421
+ if specs_dir is None:
3422
+ return (
3423
+ None,
3424
+ "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR.",
3425
+ )
3426
+
3427
+ # Find and load the spec
3428
+ spec_path = find_spec_file(spec_id, specs_dir)
3429
+ if spec_path is None:
3430
+ return None, f"Specification '{spec_id}' not found"
3431
+
3432
+ spec_data = load_spec(spec_id, specs_dir)
3433
+ if spec_data is None:
3434
+ return None, f"Failed to load specification '{spec_id}'"
3435
+
3436
+ # Get assumptions from metadata
3437
+ assumptions = spec_data.get("metadata", {}).get("assumptions", [])
3438
+
3439
+ # Build assumption list with indices
3440
+ assumption_list = []
3441
+ for i, assumption in enumerate(assumptions, 1):
3442
+ if isinstance(assumption, str):
3443
+ assumption_list.append(
3444
+ {
3445
+ "id": f"a-{i}",
3446
+ "text": assumption,
3447
+ "index": i,
3448
+ }
3449
+ )
3450
+
3451
+ return {
3452
+ "spec_id": spec_id,
3453
+ "assumptions": assumption_list,
3454
+ "total_count": len(assumption_list),
3455
+ "filter_type": assumption_type,
3456
+ }, None
3457
+
3458
+
3459
+ # Valid frontmatter keys that can be updated
3460
+ # Note: assumptions and revision_history have dedicated functions
3461
+ FRONTMATTER_KEYS = (
3462
+ "title",
3463
+ "description",
3464
+ "mission",
3465
+ "objectives",
3466
+ "complexity",
3467
+ "estimated_hours",
3468
+ "owner",
3469
+ "status",
3470
+ "category",
3471
+ "progress_percentage",
3472
+ "current_phase",
3473
+ )
3474
+
3475
+
3476
+ def update_frontmatter(
3477
+ spec_id: str,
3478
+ key: str,
3479
+ value: Any,
3480
+ specs_dir: Optional[Path] = None,
3481
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
3482
+ """
3483
+ Update a top-level metadata field in a specification.
3484
+
3485
+ Updates fields in the spec's metadata block. For arrays like assumptions
3486
+ or revision_history, use the dedicated add_assumption() and add_revision()
3487
+ functions instead.
3488
+
3489
+ Args:
3490
+ spec_id: Specification ID to update.
3491
+ key: Metadata key to update (e.g., "title", "status", "description").
3492
+ value: New value for the key.
3493
+ specs_dir: Path to specs directory (auto-detected if not provided).
3494
+
3495
+ Returns:
3496
+ Tuple of (result_dict, error_message).
3497
+ On success: ({"spec_id": ..., "key": ..., "value": ..., ...}, None)
3498
+ On failure: (None, "error message")
3499
+ """
3500
+ # Validate key
3501
+ if not key or not key.strip():
3502
+ return None, "Key is required"
3503
+
3504
+ key = key.strip()
3505
+
3506
+ # Block array fields that have dedicated functions
3507
+ if key in ("assumptions", "revision_history"):
3508
+ return (
3509
+ None,
3510
+ f"Use dedicated function for '{key}' (add_assumption or add_revision)",
3511
+ )
3512
+
3513
+ # Validate value is not None (but allow empty string, 0, False, etc.)
3514
+ if value is None:
3515
+ return None, "Value cannot be None"
3516
+
3517
+ # Find specs directory
3518
+ if specs_dir is None:
3519
+ specs_dir = find_specs_directory()
3520
+
3521
+ if specs_dir is None:
3522
+ return (
3523
+ None,
3524
+ "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR.",
3525
+ )
3526
+
3527
+ # Find and load the spec
3528
+ spec_path = find_spec_file(spec_id, specs_dir)
3529
+ if spec_path is None:
3530
+ return None, f"Specification '{spec_id}' not found"
3531
+
3532
+ spec_data = load_spec(spec_id, specs_dir)
3533
+ if spec_data is None:
3534
+ return None, f"Failed to load specification '{spec_id}'"
3535
+
3536
+ # Ensure metadata exists
3537
+ if "metadata" not in spec_data:
3538
+ spec_data["metadata"] = {}
3539
+
3540
+ # Get previous value for result (check appropriate location)
3541
+ if key in ("status", "progress_percentage", "current_phase"):
3542
+ previous_value = spec_data.get(key)
3543
+ else:
3544
+ previous_value = spec_data["metadata"].get(key)
3545
+
3546
+ # Process value based on type
3547
+ if isinstance(value, str):
3548
+ value = value.strip() if value else value
3549
+
3550
+ # Computed fields (status, progress_percentage, current_phase) are now
3551
+ # stored only at top-level. Title is kept in metadata for descriptive purposes.
3552
+ if key in ("status", "progress_percentage", "current_phase"):
3553
+ # Update top-level only (canonical location for computed fields)
3554
+ spec_data[key] = value
3555
+ else:
3556
+ # Regular metadata field
3557
+ spec_data["metadata"][key] = value
3558
+ # Also sync title to top-level if updating it
3559
+ if key == "title":
3560
+ spec_data[key] = value
3561
+
3562
+ # Update last_updated
3563
+ now = datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
3564
+ spec_data["last_updated"] = now
3565
+
3566
+ # Save the spec
3567
+ success = save_spec(spec_id, spec_data, specs_dir)
3568
+ if not success:
3569
+ return None, "Failed to save specification"
3570
+
3571
+ return {
3572
+ "spec_id": spec_id,
3573
+ "key": key,
3574
+ "value": value,
3575
+ "previous_value": previous_value,
3576
+ }, None
3577
+
3578
+
3579
+ # Safety constraints for find/replace operations
3580
+ _FR_MAX_PATTERN_LENGTH = 256
3581
+ _FR_DEFAULT_MAX_REPLACEMENTS = 1000
3582
+ _FR_VALID_SCOPES = {"all", "titles", "descriptions"}
3583
+ _FR_MAX_SAMPLE_DIFFS = 10
3584
+
3585
+
3586
+ def find_replace_in_spec(
3587
+ spec_id: str,
3588
+ find: str,
3589
+ replace: str,
3590
+ *,
3591
+ scope: str = "all",
3592
+ use_regex: bool = False,
3593
+ case_sensitive: bool = True,
3594
+ dry_run: bool = False,
3595
+ max_replacements: int = _FR_DEFAULT_MAX_REPLACEMENTS,
3596
+ specs_dir: Optional[Path] = None,
3597
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
3598
+ """
3599
+ Find and replace text across spec hierarchy nodes.
3600
+
3601
+ Performs literal or regex find/replace across titles and/or descriptions
3602
+ in a specification's hierarchy nodes.
3603
+
3604
+ Args:
3605
+ spec_id: Specification ID to modify.
3606
+ find: Text or regex pattern to find.
3607
+ replace: Replacement text (supports backreferences if use_regex=True).
3608
+ scope: Where to search - "all", "titles", or "descriptions".
3609
+ use_regex: If True, treat `find` as a regex pattern.
3610
+ case_sensitive: If False, perform case-insensitive matching.
3611
+ dry_run: If True, preview changes without modifying the spec.
3612
+ max_replacements: Maximum number of replacements (safety limit).
3613
+ specs_dir: Path to specs directory (auto-detected if not provided).
3614
+
3615
+ Returns:
3616
+ Tuple of (result_dict, error_message).
3617
+ On success: ({"spec_id": ..., "total_replacements": ..., ...}, None)
3618
+ On failure: (None, "error message")
3619
+ """
3620
+ # Validate find pattern
3621
+ if not find or not isinstance(find, str):
3622
+ return None, "find must be a non-empty string"
3623
+ # Don't strip the pattern - use exactly what user provides (whitespace may be intentional)
3624
+ if not find.strip():
3625
+ return None, "find must be a non-empty string"
3626
+ if len(find) > _FR_MAX_PATTERN_LENGTH:
3627
+ return None, f"find pattern exceeds maximum length of {_FR_MAX_PATTERN_LENGTH} characters"
3628
+
3629
+ # Validate replace
3630
+ if replace is None:
3631
+ return None, "replace must be provided (use empty string to delete matches)"
3632
+ if not isinstance(replace, str):
3633
+ return None, "replace must be a string"
3634
+
3635
+ # Validate scope
3636
+ if scope not in _FR_VALID_SCOPES:
3637
+ return None, f"scope must be one of: {sorted(_FR_VALID_SCOPES)}"
3638
+
3639
+ # Validate max_replacements
3640
+ if not isinstance(max_replacements, int) or max_replacements <= 0:
3641
+ return None, "max_replacements must be a positive integer"
3642
+
3643
+ # Compile regex if needed
3644
+ compiled_pattern = None
3645
+ if use_regex:
3646
+ try:
3647
+ flags = 0 if case_sensitive else re.IGNORECASE
3648
+ compiled_pattern = re.compile(find, flags)
3649
+ except re.error as e:
3650
+ return None, f"Invalid regex pattern: {e}"
3651
+ else:
3652
+ # For literal search, prepare flags
3653
+ if not case_sensitive:
3654
+ # Create case-insensitive literal pattern
3655
+ compiled_pattern = re.compile(re.escape(find), re.IGNORECASE)
3656
+
3657
+ # Find specs directory
3658
+ if specs_dir is None:
3659
+ specs_dir = find_specs_directory()
3660
+ if specs_dir is None:
3661
+ return None, "No specs directory found"
3662
+
3663
+ # Load spec
3664
+ spec_path = find_spec_file(spec_id, specs_dir)
3665
+ if not spec_path:
3666
+ return None, f"Specification '{spec_id}' not found"
3667
+ spec_data = load_spec(spec_id, specs_dir)
3668
+ if not spec_data:
3669
+ return None, f"Failed to load specification '{spec_id}'"
3670
+
3671
+ hierarchy = spec_data.get("hierarchy", {})
3672
+ if not hierarchy:
3673
+ return {
3674
+ "spec_id": spec_id,
3675
+ "total_replacements": 0,
3676
+ "nodes_affected": 0,
3677
+ "changes": [],
3678
+ "dry_run": dry_run,
3679
+ "message": "No hierarchy nodes to process",
3680
+ }, None
3681
+
3682
+ # Track changes
3683
+ changes: List[Dict[str, Any]] = []
3684
+ total_replacements = 0
3685
+ nodes_affected = set()
3686
+ warnings: List[str] = []
3687
+ limit_reached = False
3688
+
3689
+ # Helper to perform replacement
3690
+ def do_replace(text: str) -> Tuple[str, int]:
3691
+ if compiled_pattern:
3692
+ new_text, count = compiled_pattern.subn(replace, text)
3693
+ return new_text, count
3694
+ else:
3695
+ # Case-sensitive literal replace
3696
+ count = text.count(find)
3697
+ new_text = text.replace(find, replace)
3698
+ return new_text, count
3699
+
3700
+ # Process hierarchy nodes
3701
+ for node_id, node_data in hierarchy.items():
3702
+ if node_id == "spec-root":
3703
+ continue
3704
+ if limit_reached:
3705
+ break
3706
+
3707
+ # Process title if in scope
3708
+ if scope in ("all", "titles"):
3709
+ title = node_data.get("title", "")
3710
+ if title and isinstance(title, str):
3711
+ new_title, count = do_replace(title)
3712
+ if count > 0:
3713
+ if total_replacements + count > max_replacements:
3714
+ count = max_replacements - total_replacements
3715
+ # Partial replacement not supported, skip this field
3716
+ warnings.append(
3717
+ f"max_replacements limit ({max_replacements}) reached"
3718
+ )
3719
+ limit_reached = True
3720
+ else:
3721
+ total_replacements += count
3722
+ nodes_affected.add(node_id)
3723
+ changes.append({
3724
+ "node_id": node_id,
3725
+ "field": "title",
3726
+ "old": title,
3727
+ "new": new_title,
3728
+ "replacement_count": count,
3729
+ })
3730
+ if not dry_run:
3731
+ node_data["title"] = new_title
3732
+
3733
+ # Process description if in scope
3734
+ if scope in ("all", "descriptions") and not limit_reached:
3735
+ metadata = node_data.get("metadata", {})
3736
+ if isinstance(metadata, dict):
3737
+ description = metadata.get("description", "")
3738
+ if description and isinstance(description, str):
3739
+ new_description, count = do_replace(description)
3740
+ if count > 0:
3741
+ if total_replacements + count > max_replacements:
3742
+ warnings.append(
3743
+ f"max_replacements limit ({max_replacements}) reached"
3744
+ )
3745
+ limit_reached = True
3746
+ else:
3747
+ total_replacements += count
3748
+ nodes_affected.add(node_id)
3749
+ changes.append({
3750
+ "node_id": node_id,
3751
+ "field": "description",
3752
+ "old": description,
3753
+ "new": new_description,
3754
+ "replacement_count": count,
3755
+ })
3756
+ if not dry_run:
3757
+ metadata["description"] = new_description
3758
+
3759
+ # Save if not dry_run and there were changes
3760
+ if not dry_run and total_replacements > 0:
3761
+ if not save_spec(spec_id, spec_data, specs_dir):
3762
+ return None, "Failed to save specification after replacements"
3763
+
3764
+ # Build result
3765
+ result: Dict[str, Any] = {
3766
+ "spec_id": spec_id,
3767
+ "total_replacements": total_replacements,
3768
+ "nodes_affected": len(nodes_affected),
3769
+ "dry_run": dry_run,
3770
+ "scope": scope,
3771
+ "find": find,
3772
+ "replace": replace,
3773
+ "use_regex": use_regex,
3774
+ "case_sensitive": case_sensitive,
3775
+ }
3776
+
3777
+ # Include sample diffs (limited)
3778
+ if changes:
3779
+ result["changes"] = changes[:_FR_MAX_SAMPLE_DIFFS]
3780
+ if len(changes) > _FR_MAX_SAMPLE_DIFFS:
3781
+ result["changes_truncated"] = True
3782
+ result["total_changes"] = len(changes)
3783
+
3784
+ if warnings:
3785
+ result["warnings"] = warnings
3786
+
3787
+ if total_replacements == 0:
3788
+ result["message"] = "No matches found"
3789
+
3790
+ return result, None
3791
+
3792
+
3793
+ # Completeness check constants
3794
+ _CC_WEIGHT_TITLES = 0.20
3795
+ _CC_WEIGHT_DESCRIPTIONS = 0.30
3796
+ _CC_WEIGHT_FILE_PATHS = 0.25
3797
+ _CC_WEIGHT_ESTIMATES = 0.25
3798
+
3799
+
3800
+ def check_spec_completeness(
3801
+ spec_id: str,
3802
+ *,
3803
+ specs_dir: Optional[Path] = None,
3804
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
3805
+ """
3806
+ Check spec completeness and calculate a score (0-100).
3807
+
3808
+ Evaluates spec quality by checking for:
3809
+ - Empty titles
3810
+ - Missing task descriptions
3811
+ - Missing file_path for implementation/refactoring tasks
3812
+ - Missing estimated_hours
3813
+
3814
+ Args:
3815
+ spec_id: Specification ID to check.
3816
+ specs_dir: Path to specs directory (auto-detected if not provided).
3817
+
3818
+ Returns:
3819
+ Tuple of (result_dict, error_message).
3820
+ On success: ({"spec_id": ..., "completeness_score": ..., ...}, None)
3821
+ On failure: (None, "error message")
3822
+ """
3823
+ # Find specs directory
3824
+ if specs_dir is None:
3825
+ specs_dir = find_specs_directory()
3826
+ if specs_dir is None:
3827
+ return None, "No specs directory found"
3828
+
3829
+ # Load spec
3830
+ spec_path = find_spec_file(spec_id, specs_dir)
3831
+ if not spec_path:
3832
+ return None, f"Specification '{spec_id}' not found"
3833
+ spec_data = load_spec(spec_id, specs_dir)
3834
+ if not spec_data:
3835
+ return None, f"Failed to load specification '{spec_id}'"
3836
+
3837
+ hierarchy = spec_data.get("hierarchy", {})
3838
+ if not hierarchy:
3839
+ return {
3840
+ "spec_id": spec_id,
3841
+ "completeness_score": 100,
3842
+ "categories": {},
3843
+ "issues": [],
3844
+ "message": "No hierarchy nodes to check",
3845
+ }, None
3846
+
3847
+ # Helper functions
3848
+ def _nonempty_string(value: Any) -> bool:
3849
+ return isinstance(value, str) and bool(value.strip())
3850
+
3851
+ def _has_description(metadata: Dict[str, Any]) -> bool:
3852
+ if _nonempty_string(metadata.get("description")):
3853
+ return True
3854
+ details = metadata.get("details")
3855
+ if _nonempty_string(details):
3856
+ return True
3857
+ if isinstance(details, list):
3858
+ return any(_nonempty_string(item) for item in details)
3859
+ return False
3860
+
3861
+ # Tracking
3862
+ issues: List[Dict[str, Any]] = []
3863
+ categories: Dict[str, Dict[str, Any]] = {
3864
+ "titles": {"complete": 0, "total": 0, "score": 0.0},
3865
+ "descriptions": {"complete": 0, "total": 0, "score": 0.0},
3866
+ "file_paths": {"complete": 0, "total": 0, "score": 0.0},
3867
+ "estimates": {"complete": 0, "total": 0, "score": 0.0},
3868
+ }
3869
+
3870
+ # Check each node
3871
+ for node_id, node in hierarchy.items():
3872
+ if node_id == "spec-root":
3873
+ continue
3874
+ if not isinstance(node, dict):
3875
+ continue
3876
+
3877
+ node_type = node.get("type", "")
3878
+ title = node.get("title", "")
3879
+ metadata = node.get("metadata", {})
3880
+ if not isinstance(metadata, dict):
3881
+ metadata = {}
3882
+
3883
+ # Check title (all nodes)
3884
+ categories["titles"]["total"] += 1
3885
+ if _nonempty_string(title):
3886
+ categories["titles"]["complete"] += 1
3887
+ else:
3888
+ issues.append({
3889
+ "node_id": node_id,
3890
+ "category": "titles",
3891
+ "message": "Empty or missing title",
3892
+ })
3893
+
3894
+ # Check description (tasks and verify nodes only)
3895
+ if node_type in ("task", "verify"):
3896
+ categories["descriptions"]["total"] += 1
3897
+ if _has_description(metadata):
3898
+ categories["descriptions"]["complete"] += 1
3899
+ else:
3900
+ issues.append({
3901
+ "node_id": node_id,
3902
+ "category": "descriptions",
3903
+ "message": "Missing description",
3904
+ })
3905
+
3906
+ # Check file_path (implementation/refactoring tasks only)
3907
+ task_category = metadata.get("task_category", "")
3908
+ if task_category in ("implementation", "refactoring"):
3909
+ categories["file_paths"]["total"] += 1
3910
+ if _nonempty_string(metadata.get("file_path")):
3911
+ categories["file_paths"]["complete"] += 1
3912
+ else:
3913
+ issues.append({
3914
+ "node_id": node_id,
3915
+ "category": "file_paths",
3916
+ "message": "Missing file_path for implementation task",
3917
+ })
3918
+
3919
+ # Check estimated_hours (tasks only)
3920
+ if node_type == "task":
3921
+ categories["estimates"]["total"] += 1
3922
+ est = metadata.get("estimated_hours")
3923
+ if isinstance(est, (int, float)) and est > 0:
3924
+ categories["estimates"]["complete"] += 1
3925
+ else:
3926
+ issues.append({
3927
+ "node_id": node_id,
3928
+ "category": "estimates",
3929
+ "message": "Missing or invalid estimated_hours",
3930
+ })
3931
+
3932
+ # Calculate category scores
3933
+ for cat_data in categories.values():
3934
+ if cat_data["total"] > 0:
3935
+ cat_data["score"] = round(cat_data["complete"] / cat_data["total"], 2)
3936
+ else:
3937
+ cat_data["score"] = 1.0 # No items to check = complete
3938
+
3939
+ # Calculate weighted completeness score
3940
+ weighted_score = 0.0
3941
+ total_weight = 0.0
3942
+
3943
+ if categories["titles"]["total"] > 0:
3944
+ weighted_score += categories["titles"]["score"] * _CC_WEIGHT_TITLES
3945
+ total_weight += _CC_WEIGHT_TITLES
3946
+
3947
+ if categories["descriptions"]["total"] > 0:
3948
+ weighted_score += categories["descriptions"]["score"] * _CC_WEIGHT_DESCRIPTIONS
3949
+ total_weight += _CC_WEIGHT_DESCRIPTIONS
3950
+
3951
+ if categories["file_paths"]["total"] > 0:
3952
+ weighted_score += categories["file_paths"]["score"] * _CC_WEIGHT_FILE_PATHS
3953
+ total_weight += _CC_WEIGHT_FILE_PATHS
3954
+
3955
+ if categories["estimates"]["total"] > 0:
3956
+ weighted_score += categories["estimates"]["score"] * _CC_WEIGHT_ESTIMATES
3957
+ total_weight += _CC_WEIGHT_ESTIMATES
3958
+
3959
+ # Normalize score
3960
+ if total_weight > 0:
3961
+ completeness_score = int(round((weighted_score / total_weight) * 100))
3962
+ else:
3963
+ completeness_score = 100 # Nothing to check
3964
+
3965
+ return {
3966
+ "spec_id": spec_id,
3967
+ "completeness_score": completeness_score,
3968
+ "categories": categories,
3969
+ "issues": issues,
3970
+ "issue_count": len(issues),
3971
+ }, None
3972
+
3973
+
3974
+ # Duplicate detection constants
3975
+ _DD_DEFAULT_THRESHOLD = 0.8
3976
+ _DD_MAX_PAIRS = 100
3977
+ _DD_VALID_SCOPES = {"titles", "descriptions", "both"}
3978
+
3979
+
3980
+ def detect_duplicate_tasks(
3981
+ spec_id: str,
3982
+ *,
3983
+ scope: str = "titles",
3984
+ threshold: float = _DD_DEFAULT_THRESHOLD,
3985
+ max_pairs: int = _DD_MAX_PAIRS,
3986
+ specs_dir: Optional[Path] = None,
3987
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
3988
+ """
3989
+ Detect duplicate or near-duplicate tasks in a spec.
3990
+
3991
+ Uses text similarity to find tasks with similar titles or descriptions.
3992
+
3993
+ Args:
3994
+ spec_id: Specification ID to check.
3995
+ scope: What to compare - "titles", "descriptions", or "both".
3996
+ threshold: Similarity threshold (0.0-1.0). Default 0.8.
3997
+ max_pairs: Maximum duplicate pairs to return. Default 100.
3998
+ specs_dir: Path to specs directory (auto-detected if not provided).
3999
+
4000
+ Returns:
4001
+ Tuple of (result_dict, error_message).
4002
+ On success: ({"spec_id": ..., "duplicates": [...], ...}, None)
4003
+ On failure: (None, "error message")
4004
+ """
4005
+ from difflib import SequenceMatcher
4006
+
4007
+ # Validate scope
4008
+ if scope not in _DD_VALID_SCOPES:
4009
+ return None, f"scope must be one of: {sorted(_DD_VALID_SCOPES)}"
4010
+
4011
+ # Validate threshold
4012
+ if not isinstance(threshold, (int, float)) or not 0.0 <= threshold <= 1.0:
4013
+ return None, "threshold must be a number between 0.0 and 1.0"
4014
+
4015
+ # Validate max_pairs
4016
+ if not isinstance(max_pairs, int) or max_pairs <= 0:
4017
+ return None, "max_pairs must be a positive integer"
4018
+
4019
+ # Find specs directory
4020
+ if specs_dir is None:
4021
+ specs_dir = find_specs_directory()
4022
+ if specs_dir is None:
4023
+ return None, "No specs directory found"
4024
+
4025
+ # Load spec
4026
+ spec_path = find_spec_file(spec_id, specs_dir)
4027
+ if not spec_path:
4028
+ return None, f"Specification '{spec_id}' not found"
4029
+ spec_data = load_spec(spec_id, specs_dir)
4030
+ if not spec_data:
4031
+ return None, f"Failed to load specification '{spec_id}'"
4032
+
4033
+ hierarchy = spec_data.get("hierarchy", {})
4034
+ if not hierarchy:
4035
+ return {
4036
+ "spec_id": spec_id,
4037
+ "duplicates": [],
4038
+ "duplicate_count": 0,
4039
+ "scope": scope,
4040
+ "threshold": threshold,
4041
+ "message": "No hierarchy nodes to check",
4042
+ }, None
4043
+
4044
+ # Collect tasks/verify nodes with their text
4045
+ nodes: List[Dict[str, Any]] = []
4046
+ for node_id, node in hierarchy.items():
4047
+ if node_id == "spec-root":
4048
+ continue
4049
+ if not isinstance(node, dict):
4050
+ continue
4051
+ node_type = node.get("type", "")
4052
+ if node_type not in ("task", "verify"):
4053
+ continue
4054
+
4055
+ title = node.get("title", "") or ""
4056
+ metadata = node.get("metadata", {})
4057
+ if not isinstance(metadata, dict):
4058
+ metadata = {}
4059
+ description = metadata.get("description", "") or ""
4060
+
4061
+ nodes.append({
4062
+ "id": node_id,
4063
+ "title": title.strip().lower(),
4064
+ "description": description.strip().lower(),
4065
+ })
4066
+
4067
+ # Compare pairs
4068
+ duplicates: List[Dict[str, Any]] = []
4069
+ truncated = False
4070
+ total_compared = 0
4071
+
4072
+ def similarity(a: str, b: str) -> float:
4073
+ if not a or not b:
4074
+ return 0.0
4075
+ return SequenceMatcher(None, a, b).ratio()
4076
+
4077
+ for i, node_a in enumerate(nodes):
4078
+ if len(duplicates) >= max_pairs:
4079
+ truncated = True
4080
+ break
4081
+ for node_b in nodes[i + 1:]:
4082
+ total_compared += 1
4083
+ if len(duplicates) >= max_pairs:
4084
+ truncated = True
4085
+ break
4086
+
4087
+ # Calculate similarity based on scope
4088
+ if scope == "titles":
4089
+ sim = similarity(node_a["title"], node_b["title"])
4090
+ elif scope == "descriptions":
4091
+ sim = similarity(node_a["description"], node_b["description"])
4092
+ else: # both
4093
+ title_sim = similarity(node_a["title"], node_b["title"])
4094
+ desc_sim = similarity(node_a["description"], node_b["description"])
4095
+ sim = max(title_sim, desc_sim)
4096
+
4097
+ if sim >= threshold:
4098
+ duplicates.append({
4099
+ "node_a": node_a["id"],
4100
+ "node_b": node_b["id"],
4101
+ "similarity": round(sim, 2),
4102
+ "scope": scope,
4103
+ })
4104
+
4105
+ result: Dict[str, Any] = {
4106
+ "spec_id": spec_id,
4107
+ "duplicates": duplicates,
4108
+ "duplicate_count": len(duplicates),
4109
+ "scope": scope,
4110
+ "threshold": threshold,
4111
+ "nodes_checked": len(nodes),
4112
+ "pairs_compared": total_compared,
4113
+ }
4114
+
4115
+ if truncated:
4116
+ result["truncated"] = True
4117
+ result["warnings"] = [f"Results limited to {max_pairs} pairs"]
4118
+
4119
+ return result, None