foundry-mcp 0.3.3__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. foundry_mcp/__init__.py +7 -1
  2. foundry_mcp/cli/commands/plan.py +10 -3
  3. foundry_mcp/cli/commands/review.py +19 -4
  4. foundry_mcp/cli/commands/specs.py +38 -208
  5. foundry_mcp/cli/output.py +3 -3
  6. foundry_mcp/config.py +235 -5
  7. foundry_mcp/core/ai_consultation.py +146 -9
  8. foundry_mcp/core/discovery.py +6 -6
  9. foundry_mcp/core/error_store.py +2 -2
  10. foundry_mcp/core/intake.py +933 -0
  11. foundry_mcp/core/llm_config.py +20 -2
  12. foundry_mcp/core/metrics_store.py +2 -2
  13. foundry_mcp/core/progress.py +70 -0
  14. foundry_mcp/core/prompts/fidelity_review.py +149 -4
  15. foundry_mcp/core/prompts/markdown_plan_review.py +5 -1
  16. foundry_mcp/core/prompts/plan_review.py +5 -1
  17. foundry_mcp/core/providers/claude.py +6 -47
  18. foundry_mcp/core/providers/codex.py +6 -57
  19. foundry_mcp/core/providers/cursor_agent.py +3 -44
  20. foundry_mcp/core/providers/gemini.py +6 -57
  21. foundry_mcp/core/providers/opencode.py +35 -5
  22. foundry_mcp/core/research/__init__.py +68 -0
  23. foundry_mcp/core/research/memory.py +425 -0
  24. foundry_mcp/core/research/models.py +437 -0
  25. foundry_mcp/core/research/workflows/__init__.py +22 -0
  26. foundry_mcp/core/research/workflows/base.py +204 -0
  27. foundry_mcp/core/research/workflows/chat.py +271 -0
  28. foundry_mcp/core/research/workflows/consensus.py +396 -0
  29. foundry_mcp/core/research/workflows/ideate.py +682 -0
  30. foundry_mcp/core/research/workflows/thinkdeep.py +405 -0
  31. foundry_mcp/core/responses.py +450 -0
  32. foundry_mcp/core/spec.py +2438 -236
  33. foundry_mcp/core/task.py +1064 -19
  34. foundry_mcp/core/testing.py +512 -123
  35. foundry_mcp/core/validation.py +313 -42
  36. foundry_mcp/dashboard/components/charts.py +0 -57
  37. foundry_mcp/dashboard/launcher.py +11 -0
  38. foundry_mcp/dashboard/views/metrics.py +25 -35
  39. foundry_mcp/dashboard/views/overview.py +1 -65
  40. foundry_mcp/resources/specs.py +25 -25
  41. foundry_mcp/schemas/intake-schema.json +89 -0
  42. foundry_mcp/schemas/sdd-spec-schema.json +33 -5
  43. foundry_mcp/server.py +38 -0
  44. foundry_mcp/tools/unified/__init__.py +4 -2
  45. foundry_mcp/tools/unified/authoring.py +2423 -267
  46. foundry_mcp/tools/unified/documentation_helpers.py +69 -6
  47. foundry_mcp/tools/unified/environment.py +235 -6
  48. foundry_mcp/tools/unified/error.py +18 -1
  49. foundry_mcp/tools/unified/lifecycle.py +8 -0
  50. foundry_mcp/tools/unified/plan.py +113 -1
  51. foundry_mcp/tools/unified/research.py +658 -0
  52. foundry_mcp/tools/unified/review.py +370 -16
  53. foundry_mcp/tools/unified/spec.py +367 -0
  54. foundry_mcp/tools/unified/task.py +1163 -48
  55. foundry_mcp/tools/unified/test.py +69 -8
  56. {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.7.0.dist-info}/METADATA +7 -1
  57. {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.7.0.dist-info}/RECORD +60 -48
  58. {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.7.0.dist-info}/WHEEL +0 -0
  59. {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.7.0.dist-info}/entry_points.txt +0 -0
  60. {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.7.0.dist-info}/licenses/LICENSE +0 -0
foundry_mcp/core/spec.py CHANGED
@@ -9,16 +9,59 @@ import shutil
9
9
  import subprocess
10
10
  from datetime import datetime, timezone
11
11
  from pathlib import Path
12
- from typing import Optional, Dict, Any, List, Tuple
12
+ from typing import Optional, Dict, Any, List, Tuple, Union
13
13
 
14
14
  # Valid templates and categories for spec creation
15
- TEMPLATES = ("simple", "medium", "complex", "security")
15
+ # Note: Only 'empty' template is supported. Use phase templates to add structure.
16
+ TEMPLATES = ("empty",)
17
+ TEMPLATE_DESCRIPTIONS = {
18
+ "empty": "Blank spec with no phases - use phase templates to add structure",
19
+ }
16
20
  CATEGORIES = ("investigation", "implementation", "refactoring", "decision", "research")
17
21
 
18
22
  # Valid verification types for verify nodes
19
- # - test: Automated tests via mcp__foundry-mcp__test-run
23
+ # - run-tests: Automated tests via mcp__foundry-mcp__test-run
20
24
  # - fidelity: Implementation-vs-spec comparison via mcp__foundry-mcp__spec-review-fidelity
21
- VERIFICATION_TYPES = ("test", "fidelity")
25
+ # - manual: Manual verification steps
26
+ VERIFICATION_TYPES = ("run-tests", "fidelity", "manual")
27
+
28
+ # Valid phase templates for reusable phase structures
29
+ PHASE_TEMPLATES = ("planning", "implementation", "testing", "security", "documentation")
30
+
31
+
32
+ def _requires_rich_task_fields(spec_data: Dict[str, Any]) -> bool:
33
+ """Check if spec requires rich task fields based on explicit complexity metadata."""
34
+ metadata = spec_data.get("metadata", {})
35
+ if not isinstance(metadata, dict):
36
+ return False
37
+
38
+ # Only check explicit complexity metadata (template no longer indicates complexity)
39
+ complexity = metadata.get("complexity")
40
+ if isinstance(complexity, str) and complexity.strip().lower() in {
41
+ "medium",
42
+ "complex",
43
+ "high",
44
+ }:
45
+ return True
46
+
47
+ return False
48
+
49
+
50
+ def _normalize_acceptance_criteria(value: Any) -> Optional[List[str]]:
51
+ if value is None:
52
+ return None
53
+ if isinstance(value, str):
54
+ cleaned = value.strip()
55
+ return [cleaned] if cleaned else []
56
+ if isinstance(value, list):
57
+ cleaned_items = []
58
+ for item in value:
59
+ if isinstance(item, str):
60
+ cleaned = item.strip()
61
+ if cleaned:
62
+ cleaned_items.append(cleaned)
63
+ return cleaned_items
64
+ return []
22
65
 
23
66
 
24
67
  def find_git_root() -> Optional[Path]:
@@ -161,6 +204,36 @@ def resolve_spec_file(
161
204
  return find_spec_file(search_name, specs_dir)
162
205
 
163
206
 
207
+ def _migrate_spec_fields(spec_data: Dict[str, Any]) -> Dict[str, Any]:
208
+ """
209
+ Migrate spec from dual-field format to canonical format.
210
+
211
+ Moves status, progress_percentage, and current_phase from metadata
212
+ to top-level (their canonical location). This handles specs created
213
+ before the field deduplication.
214
+
215
+ Args:
216
+ spec_data: Spec data dictionary (modified in place)
217
+
218
+ Returns:
219
+ The modified spec_data
220
+ """
221
+ if not spec_data:
222
+ return spec_data
223
+
224
+ metadata = spec_data.get("metadata", {})
225
+ computed_fields = ("status", "progress_percentage", "current_phase")
226
+
227
+ for field in computed_fields:
228
+ # If field exists in metadata but not at top-level, migrate it
229
+ if field in metadata and field not in spec_data:
230
+ spec_data[field] = metadata[field]
231
+ # Remove from metadata (canonical location is top-level)
232
+ metadata.pop(field, None)
233
+
234
+ return spec_data
235
+
236
+
164
237
  def load_spec(
165
238
  spec_id: str, specs_dir: Optional[Path] = None
166
239
  ) -> Optional[Dict[str, Any]]:
@@ -181,7 +254,9 @@ def load_spec(
181
254
 
182
255
  try:
183
256
  with open(spec_file, "r") as f:
184
- return json.load(f)
257
+ spec_data = json.load(f)
258
+ # Migrate old specs to canonical field locations
259
+ return _migrate_spec_fields(spec_data)
185
260
  except (json.JSONDecodeError, IOError):
186
261
  return None
187
262
 
@@ -234,13 +309,34 @@ def save_spec(
234
309
  return False
235
310
 
236
311
 
237
- def backup_spec(spec_id: str, specs_dir: Optional[Path] = None) -> Optional[Path]:
312
+ # Default retention policy for versioned backups
313
+ DEFAULT_MAX_BACKUPS = 10
314
+
315
+
316
+ def backup_spec(
317
+ spec_id: str,
318
+ specs_dir: Optional[Path] = None,
319
+ max_backups: int = DEFAULT_MAX_BACKUPS,
320
+ ) -> Optional[Path]:
238
321
  """
239
- Create a backup copy of the JSON spec file in the .backups/ directory.
322
+ Create a versioned backup of the JSON spec file.
323
+
324
+ Creates timestamped backups in .backups/{spec_id}/ directory with a
325
+ configurable retention policy. Also maintains a latest.json copy for
326
+ quick access to the most recent backup.
327
+
328
+ Directory structure:
329
+ .backups/
330
+ └── {spec_id}/
331
+ ├── 2025-12-26T18-20-13.456789.json # Timestamped backups (μs precision)
332
+ ├── 2025-12-26T18-30-45.123456.json
333
+ └── latest.json # Copy of most recent
240
334
 
241
335
  Args:
242
336
  spec_id: Specification ID or path to spec file
243
337
  specs_dir: Path to specs directory (optional, auto-detected if not provided)
338
+ max_backups: Maximum number of versioned backups to retain (default: 10).
339
+ Set to 0 for unlimited backups.
244
340
 
245
341
  Returns:
246
342
  Path to backup file if created, None otherwise
@@ -256,18 +352,556 @@ def backup_spec(spec_id: str, specs_dir: Optional[Path] = None) -> Optional[Path
256
352
  if not specs_dir:
257
353
  return None
258
354
 
259
- backups_dir = specs_dir / ".backups"
260
- backups_dir.mkdir(parents=True, exist_ok=True)
355
+ # Create versioned backup directory: .backups/{spec_id}/
356
+ spec_backups_dir = specs_dir / ".backups" / spec_id
357
+ spec_backups_dir.mkdir(parents=True, exist_ok=True)
261
358
 
262
- backup_file = backups_dir / f"{spec_id}.backup"
359
+ # Generate timestamp filename (ISO format with safe characters)
360
+ # Include full microseconds to handle rapid successive saves
361
+ now = datetime.now(timezone.utc)
362
+ timestamp = now.strftime("%Y-%m-%dT%H-%M-%S")
363
+ micros = now.strftime("%f") # Full 6-digit microseconds
364
+ backup_file = spec_backups_dir / f"{timestamp}.{micros}.json"
263
365
 
264
366
  try:
367
+ # Create the timestamped backup
265
368
  shutil.copy2(spec_file, backup_file)
369
+
370
+ # Update latest.json to point to the newest backup
371
+ latest_file = spec_backups_dir / "latest.json"
372
+ shutil.copy2(backup_file, latest_file)
373
+
374
+ # Apply retention policy
375
+ if max_backups > 0:
376
+ _apply_backup_retention(spec_backups_dir, max_backups)
377
+
266
378
  return backup_file
267
379
  except (IOError, OSError):
268
380
  return None
269
381
 
270
382
 
383
+ def _apply_backup_retention(backups_dir: Path, max_backups: int) -> int:
384
+ """
385
+ Apply retention policy by removing oldest backups exceeding the limit.
386
+
387
+ Args:
388
+ backups_dir: Path to the spec's backup directory
389
+ max_backups: Maximum number of backups to retain
390
+
391
+ Returns:
392
+ Number of backups deleted
393
+ """
394
+ # List all timestamped backup files (exclude latest.json)
395
+ backup_files = sorted(
396
+ [
397
+ f for f in backups_dir.glob("*.json")
398
+ if f.name != "latest.json" and f.is_file()
399
+ ],
400
+ key=lambda p: p.name, # Sort by filename (timestamp order)
401
+ )
402
+
403
+ deleted_count = 0
404
+ while len(backup_files) > max_backups:
405
+ oldest = backup_files.pop(0)
406
+ try:
407
+ oldest.unlink()
408
+ deleted_count += 1
409
+ except (IOError, OSError):
410
+ pass # Best effort deletion
411
+
412
+ return deleted_count
413
+
414
+
415
+ # Default pagination settings for backup listing
416
+ DEFAULT_BACKUP_PAGE_SIZE = 50
417
+ MAX_BACKUP_PAGE_SIZE = 100
418
+
419
+
420
+ def list_spec_backups(
421
+ spec_id: str,
422
+ specs_dir: Optional[Path] = None,
423
+ cursor: Optional[str] = None,
424
+ limit: Optional[int] = None,
425
+ ) -> Dict[str, Any]:
426
+ """
427
+ List backups for a spec with cursor-based pagination.
428
+
429
+ Lists timestamped backup files chronologically (newest first) from the
430
+ .backups/{spec_id}/ directory. Returns file metadata including timestamp,
431
+ path, and size. Designed for use with spec.history action.
432
+
433
+ Args:
434
+ spec_id: Specification ID to list backups for
435
+ specs_dir: Base specs directory (uses find_specs_directory if None)
436
+ cursor: Pagination cursor from previous call (base64-encoded JSON)
437
+ limit: Maximum backups per page (default: 50, max: 100)
438
+
439
+ Returns:
440
+ Dict with structure:
441
+ {
442
+ "spec_id": str,
443
+ "backups": [
444
+ {
445
+ "timestamp": str, # ISO-ish format from filename
446
+ "file_path": str, # Absolute path to backup file
447
+ "file_size_bytes": int # File size
448
+ },
449
+ ...
450
+ ],
451
+ "count": int,
452
+ "pagination": {
453
+ "cursor": Optional[str],
454
+ "has_more": bool,
455
+ "page_size": int
456
+ }
457
+ }
458
+
459
+ Returns empty backups list if spec or backup directory doesn't exist.
460
+ """
461
+ # Import pagination helpers
462
+ from foundry_mcp.core.pagination import (
463
+ CursorError,
464
+ decode_cursor,
465
+ encode_cursor,
466
+ normalize_page_size,
467
+ )
468
+
469
+ # Resolve specs directory
470
+ if specs_dir is None:
471
+ specs_dir = find_specs_directory()
472
+
473
+ # Normalize page size
474
+ page_size = normalize_page_size(
475
+ limit, default=DEFAULT_BACKUP_PAGE_SIZE, maximum=MAX_BACKUP_PAGE_SIZE
476
+ )
477
+
478
+ result: Dict[str, Any] = {
479
+ "spec_id": spec_id,
480
+ "backups": [],
481
+ "count": 0,
482
+ "pagination": {
483
+ "cursor": None,
484
+ "has_more": False,
485
+ "page_size": page_size,
486
+ },
487
+ }
488
+
489
+ if not specs_dir:
490
+ return result
491
+
492
+ # Locate backup directory: .backups/{spec_id}/
493
+ backups_dir = specs_dir / ".backups" / spec_id
494
+ if not backups_dir.is_dir():
495
+ return result
496
+
497
+ # List all timestamped backup files (exclude latest.json)
498
+ backup_files = sorted(
499
+ [
500
+ f
501
+ for f in backups_dir.glob("*.json")
502
+ if f.name != "latest.json" and f.is_file()
503
+ ],
504
+ key=lambda p: p.name,
505
+ reverse=True, # Newest first
506
+ )
507
+
508
+ if not backup_files:
509
+ return result
510
+
511
+ # Handle cursor-based pagination
512
+ start_after_timestamp: Optional[str] = None
513
+ if cursor:
514
+ try:
515
+ cursor_data = decode_cursor(cursor)
516
+ start_after_timestamp = cursor_data.get("last_id")
517
+ except CursorError:
518
+ # Invalid cursor - return from beginning
519
+ pass
520
+
521
+ # Find start position based on cursor
522
+ if start_after_timestamp:
523
+ start_index = 0
524
+ for idx, backup_file in enumerate(backup_files):
525
+ # Filename without extension is the timestamp
526
+ timestamp = backup_file.stem
527
+ if timestamp == start_after_timestamp:
528
+ start_index = idx + 1
529
+ break
530
+ backup_files = backup_files[start_index:]
531
+
532
+ # Fetch one extra to check for more pages
533
+ page_files = backup_files[: page_size + 1]
534
+ has_more = len(page_files) > page_size
535
+ if has_more:
536
+ page_files = page_files[:page_size]
537
+
538
+ # Build backup entries with metadata
539
+ backups = []
540
+ for backup_file in page_files:
541
+ try:
542
+ file_stat = backup_file.stat()
543
+ backups.append(
544
+ {
545
+ "timestamp": backup_file.stem,
546
+ "file_path": str(backup_file.absolute()),
547
+ "file_size_bytes": file_stat.st_size,
548
+ }
549
+ )
550
+ except OSError:
551
+ # Skip files we can't stat
552
+ continue
553
+
554
+ # Generate next cursor if more pages exist
555
+ next_cursor = None
556
+ if has_more and backups:
557
+ next_cursor = encode_cursor({"last_id": backups[-1]["timestamp"]})
558
+
559
+ result["backups"] = backups
560
+ result["count"] = len(backups)
561
+ result["pagination"] = {
562
+ "cursor": next_cursor,
563
+ "has_more": has_more,
564
+ "page_size": page_size,
565
+ }
566
+
567
+ return result
568
+
569
+
570
+ # Default settings for diff operations
571
+ DEFAULT_DIFF_MAX_RESULTS = 100
572
+
573
+
574
+ def _load_spec_source(
575
+ source: Union[str, Path, Dict[str, Any]],
576
+ specs_dir: Optional[Path] = None,
577
+ ) -> Optional[Dict[str, Any]]:
578
+ """
579
+ Load a spec from various source types.
580
+
581
+ Args:
582
+ source: Spec ID, file path, or already-loaded dict
583
+ specs_dir: Base specs directory for ID lookups
584
+
585
+ Returns:
586
+ Loaded spec dict, or None if not found/invalid
587
+ """
588
+ # Already a dict - return as-is
589
+ if isinstance(source, dict):
590
+ return source
591
+
592
+ # Path object or string path
593
+ source_path = Path(source) if isinstance(source, str) else source
594
+
595
+ # If it's an existing file path, load directly
596
+ if source_path.is_file():
597
+ try:
598
+ with open(source_path, "r") as f:
599
+ return json.load(f)
600
+ except (IOError, json.JSONDecodeError):
601
+ return None
602
+
603
+ # Otherwise treat as spec_id and use resolve_spec_file
604
+ if isinstance(source, str):
605
+ return load_spec(source, specs_dir)
606
+
607
+ return None
608
+
609
+
610
+ def _diff_node(
611
+ old_node: Dict[str, Any],
612
+ new_node: Dict[str, Any],
613
+ node_id: str,
614
+ ) -> Optional[Dict[str, Any]]:
615
+ """
616
+ Compare two nodes and return field-level changes.
617
+
618
+ Args:
619
+ old_node: Original node data
620
+ new_node: Updated node data
621
+ node_id: Node identifier for the result
622
+
623
+ Returns:
624
+ Dict with node info and field_changes list, or None if no changes
625
+ """
626
+ # Fields to compare (excluding computed/transient fields)
627
+ compare_fields = ["title", "status", "type", "parent", "children", "metadata", "dependencies"]
628
+
629
+ field_changes = []
630
+ for field in compare_fields:
631
+ old_val = old_node.get(field)
632
+ new_val = new_node.get(field)
633
+
634
+ if old_val != new_val:
635
+ field_changes.append({
636
+ "field": field,
637
+ "old": old_val,
638
+ "new": new_val,
639
+ })
640
+
641
+ if not field_changes:
642
+ return None
643
+
644
+ return {
645
+ "node_id": node_id,
646
+ "type": new_node.get("type", old_node.get("type")),
647
+ "title": new_node.get("title", old_node.get("title")),
648
+ "field_changes": field_changes,
649
+ }
650
+
651
+
652
+ def diff_specs(
653
+ source: Union[str, Path, Dict[str, Any]],
654
+ target: Union[str, Path, Dict[str, Any]],
655
+ specs_dir: Optional[Path] = None,
656
+ max_results: Optional[int] = None,
657
+ ) -> Dict[str, Any]:
658
+ """
659
+ Compare two specs and categorize changes as added, removed, or modified.
660
+
661
+ Compares hierarchy nodes between source (base/older) and target (comparison/newer)
662
+ specs, identifying structural and content changes at the task level.
663
+
664
+ Args:
665
+ source: Base spec - spec_id, file path (including backup), or loaded dict
666
+ target: Comparison spec - spec_id, file path, or loaded dict
667
+ specs_dir: Base specs directory (auto-detected if None)
668
+ max_results: Maximum changes to return per category (default: 100)
669
+
670
+ Returns:
671
+ Dict with structure:
672
+ {
673
+ "summary": {
674
+ "added_count": int,
675
+ "removed_count": int,
676
+ "modified_count": int,
677
+ "total_changes": int
678
+ },
679
+ "changes": {
680
+ "added": [{"node_id": str, "type": str, "title": str}, ...],
681
+ "removed": [{"node_id": str, "type": str, "title": str}, ...],
682
+ "modified": [{
683
+ "node_id": str,
684
+ "type": str,
685
+ "title": str,
686
+ "field_changes": [{"field": str, "old": Any, "new": Any}, ...]
687
+ }, ...]
688
+ },
689
+ "partial": bool, # True if results truncated
690
+ "source_spec_id": Optional[str],
691
+ "target_spec_id": Optional[str]
692
+ }
693
+
694
+ Returns error structure if specs cannot be loaded:
695
+ {"error": str, "success": False}
696
+ """
697
+ # Resolve specs directory
698
+ if specs_dir is None:
699
+ specs_dir = find_specs_directory()
700
+
701
+ # Load source spec
702
+ source_spec = _load_spec_source(source, specs_dir)
703
+ if source_spec is None:
704
+ return {
705
+ "error": f"Could not load source spec: {source}",
706
+ "success": False,
707
+ }
708
+
709
+ # Load target spec
710
+ target_spec = _load_spec_source(target, specs_dir)
711
+ if target_spec is None:
712
+ return {
713
+ "error": f"Could not load target spec: {target}",
714
+ "success": False,
715
+ }
716
+
717
+ # Get hierarchies
718
+ source_hierarchy = source_spec.get("hierarchy", {})
719
+ target_hierarchy = target_spec.get("hierarchy", {})
720
+
721
+ source_ids = set(source_hierarchy.keys())
722
+ target_ids = set(target_hierarchy.keys())
723
+
724
+ # Categorize changes
725
+ added_ids = target_ids - source_ids
726
+ removed_ids = source_ids - target_ids
727
+ common_ids = source_ids & target_ids
728
+
729
+ # Apply max_results limit
730
+ limit = max_results if max_results is not None else DEFAULT_DIFF_MAX_RESULTS
731
+ partial = False
732
+
733
+ # Build added list
734
+ added = []
735
+ for node_id in sorted(added_ids):
736
+ if len(added) >= limit:
737
+ partial = True
738
+ break
739
+ node = target_hierarchy[node_id]
740
+ added.append({
741
+ "node_id": node_id,
742
+ "type": node.get("type"),
743
+ "title": node.get("title"),
744
+ })
745
+
746
+ # Build removed list
747
+ removed = []
748
+ for node_id in sorted(removed_ids):
749
+ if len(removed) >= limit:
750
+ partial = True
751
+ break
752
+ node = source_hierarchy[node_id]
753
+ removed.append({
754
+ "node_id": node_id,
755
+ "type": node.get("type"),
756
+ "title": node.get("title"),
757
+ })
758
+
759
+ # Build modified list
760
+ modified = []
761
+ for node_id in sorted(common_ids):
762
+ if len(modified) >= limit:
763
+ partial = True
764
+ break
765
+ old_node = source_hierarchy[node_id]
766
+ new_node = target_hierarchy[node_id]
767
+ diff = _diff_node(old_node, new_node, node_id)
768
+ if diff:
769
+ modified.append(diff)
770
+
771
+ # Calculate actual counts (may exceed displayed if partial)
772
+ total_added = len(added_ids)
773
+ total_removed = len(removed_ids)
774
+ total_modified = sum(
775
+ 1 for nid in common_ids
776
+ if _diff_node(source_hierarchy[nid], target_hierarchy[nid], nid)
777
+ ) if not partial else len(modified) # Only count all if not already partial
778
+
779
+ return {
780
+ "summary": {
781
+ "added_count": total_added,
782
+ "removed_count": total_removed,
783
+ "modified_count": total_modified if not partial else len(modified),
784
+ "total_changes": total_added + total_removed + (total_modified if not partial else len(modified)),
785
+ },
786
+ "changes": {
787
+ "added": added,
788
+ "removed": removed,
789
+ "modified": modified,
790
+ },
791
+ "partial": partial,
792
+ "source_spec_id": source_spec.get("spec_id"),
793
+ "target_spec_id": target_spec.get("spec_id"),
794
+ }
795
+
796
+
797
+ def rollback_spec(
798
+ spec_id: str,
799
+ timestamp: str,
800
+ specs_dir: Optional[Path] = None,
801
+ dry_run: bool = False,
802
+ create_backup: bool = True,
803
+ ) -> Dict[str, Any]:
804
+ """
805
+ Restore a spec from a specific backup timestamp.
806
+
807
+ Creates a safety backup of the current state before rollback (by default),
808
+ then replaces the spec file with the contents from the specified backup.
809
+
810
+ Args:
811
+ spec_id: Specification ID to rollback
812
+ timestamp: Backup timestamp to restore (e.g., "2025-12-26T18-20-13.456789")
813
+ specs_dir: Base specs directory (auto-detected if None)
814
+ dry_run: If True, validate and return what would happen without changes
815
+ create_backup: If True (default), create safety backup before rollback
816
+
817
+ Returns:
818
+ Dict with structure:
819
+ {
820
+ "success": bool,
821
+ "spec_id": str,
822
+ "timestamp": str,
823
+ "dry_run": bool,
824
+ "backup_created": Optional[str], # Safety backup path
825
+ "restored_from": str, # Source backup path
826
+ "error": Optional[str] # Error if failed
827
+ }
828
+ """
829
+ # Resolve specs directory
830
+ if specs_dir is None:
831
+ specs_dir = find_specs_directory()
832
+
833
+ result: Dict[str, Any] = {
834
+ "success": False,
835
+ "spec_id": spec_id,
836
+ "timestamp": timestamp,
837
+ "dry_run": dry_run,
838
+ "backup_created": None,
839
+ "restored_from": None,
840
+ "error": None,
841
+ }
842
+
843
+ if not specs_dir:
844
+ result["error"] = "Could not find specs directory"
845
+ return result
846
+
847
+ # Find current spec file
848
+ spec_file = find_spec_file(spec_id, specs_dir)
849
+ if not spec_file:
850
+ result["error"] = f"Spec '{spec_id}' not found"
851
+ return result
852
+
853
+ # Locate backup directory
854
+ backups_dir = specs_dir / ".backups" / spec_id
855
+ if not backups_dir.is_dir():
856
+ result["error"] = f"No backups directory for spec '{spec_id}'"
857
+ return result
858
+
859
+ # Find the backup file matching the timestamp
860
+ backup_file = backups_dir / f"{timestamp}.json"
861
+ if not backup_file.is_file():
862
+ result["error"] = f"Backup not found for timestamp '{timestamp}'"
863
+ return result
864
+
865
+ result["restored_from"] = str(backup_file)
866
+
867
+ # Validate backup is valid JSON
868
+ try:
869
+ with open(backup_file, "r") as f:
870
+ backup_data = json.load(f)
871
+ if not isinstance(backup_data, dict):
872
+ result["error"] = "Backup file is not a valid spec (not a JSON object)"
873
+ return result
874
+ except json.JSONDecodeError as e:
875
+ result["error"] = f"Backup file is not valid JSON: {e}"
876
+ return result
877
+ except IOError as e:
878
+ result["error"] = f"Could not read backup file: {e}"
879
+ return result
880
+
881
+ # dry_run - return success without making changes
882
+ if dry_run:
883
+ result["success"] = True
884
+ if create_backup:
885
+ result["backup_created"] = "(would be created)"
886
+ return result
887
+
888
+ # Create safety backup of current state before rollback
889
+ if create_backup:
890
+ safety_backup = backup_spec(spec_id, specs_dir)
891
+ if safety_backup:
892
+ result["backup_created"] = str(safety_backup)
893
+
894
+ # Perform rollback - copy backup to spec location
895
+ try:
896
+ shutil.copy2(backup_file, spec_file)
897
+ result["success"] = True
898
+ except (IOError, OSError) as e:
899
+ result["error"] = f"Failed to restore backup: {e}"
900
+ return result
901
+
902
+ return result
903
+
904
+
271
905
  def _validate_spec_structure(spec_data: Dict[str, Any]) -> bool:
272
906
  """
273
907
  Validate basic JSON spec file structure.
@@ -684,73 +1318,444 @@ def add_phase(
684
1318
  }, None
685
1319
 
686
1320
 
687
- def _collect_descendants(hierarchy: Dict[str, Any], node_id: str) -> List[str]:
1321
+ def add_phase_bulk(
1322
+ spec_id: str,
1323
+ phase_title: str,
1324
+ tasks: List[Dict[str, Any]],
1325
+ phase_description: Optional[str] = None,
1326
+ phase_purpose: Optional[str] = None,
1327
+ phase_estimated_hours: Optional[float] = None,
1328
+ metadata_defaults: Optional[Dict[str, Any]] = None,
1329
+ position: Optional[int] = None,
1330
+ link_previous: bool = True,
1331
+ specs_dir: Optional[Path] = None,
1332
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
688
1333
  """
689
- Recursively collect all descendant node IDs for a given node.
1334
+ Add a new phase with pre-defined tasks in a single atomic operation.
1335
+
1336
+ Creates a phase and all specified tasks/verify nodes without auto-generating
1337
+ verification scaffolding. This enables creating complete phase structures
1338
+ in one operation.
690
1339
 
691
1340
  Args:
692
- hierarchy: The spec hierarchy dict
693
- node_id: Starting node ID
1341
+ spec_id: Specification ID to mutate.
1342
+ phase_title: Phase title.
1343
+ tasks: List of task definitions, each containing:
1344
+ - type: "task" or "verify" (required)
1345
+ - title: Task title (required)
1346
+ - description: Optional description
1347
+ - acceptance_criteria: Optional list of acceptance criteria
1348
+ - task_category: Optional task category
1349
+ - file_path: Optional associated file path
1350
+ - estimated_hours: Optional time estimate
1351
+ - verification_type: Optional verification type for verify tasks
1352
+ phase_description: Optional phase description.
1353
+ phase_purpose: Optional purpose/goal metadata string.
1354
+ phase_estimated_hours: Optional estimated hours for the phase.
1355
+ metadata_defaults: Optional defaults applied to tasks missing explicit values.
1356
+ Supported keys: task_category, category, acceptance_criteria, estimated_hours
1357
+ position: Optional zero-based insertion index in spec-root children.
1358
+ link_previous: Whether to automatically block on the previous phase.
1359
+ specs_dir: Specs directory override.
694
1360
 
695
1361
  Returns:
696
- List of all descendant node IDs (not including the starting node)
1362
+ Tuple of (result_dict, error_message).
1363
+ On success: ({"phase_id": ..., "tasks_created": [...], ...}, None)
1364
+ On failure: (None, "error message")
697
1365
  """
698
- descendants: List[str] = []
699
- node = hierarchy.get(node_id)
700
- if not node:
701
- return descendants
1366
+ # Validate required parameters
1367
+ if not spec_id or not spec_id.strip():
1368
+ return None, "Specification ID is required"
702
1369
 
703
- children = node.get("children", [])
704
- if not isinstance(children, list):
705
- return descendants
1370
+ if not phase_title or not phase_title.strip():
1371
+ return None, "Phase title is required"
706
1372
 
707
- for child_id in children:
708
- descendants.append(child_id)
709
- descendants.extend(_collect_descendants(hierarchy, child_id))
1373
+ if not tasks or not isinstance(tasks, list) or len(tasks) == 0:
1374
+ return None, "At least one task definition is required"
1375
+
1376
+ if phase_estimated_hours is not None and phase_estimated_hours < 0:
1377
+ return None, "phase_estimated_hours must be non-negative"
1378
+
1379
+ phase_title = phase_title.strip()
1380
+ defaults = metadata_defaults or {}
1381
+
1382
+ # Validate metadata_defaults values
1383
+ if defaults:
1384
+ default_est_hours = defaults.get("estimated_hours")
1385
+ if default_est_hours is not None:
1386
+ if not isinstance(default_est_hours, (int, float)) or default_est_hours < 0:
1387
+ return None, "metadata_defaults.estimated_hours must be a non-negative number"
1388
+ default_category = defaults.get("task_category")
1389
+ if default_category is None:
1390
+ default_category = defaults.get("category")
1391
+ if default_category is not None and not isinstance(default_category, str):
1392
+ return None, "metadata_defaults.task_category must be a string"
1393
+ default_acceptance = defaults.get("acceptance_criteria")
1394
+ if default_acceptance is not None and not isinstance(
1395
+ default_acceptance, (list, str)
1396
+ ):
1397
+ return None, "metadata_defaults.acceptance_criteria must be a list of strings"
1398
+ if isinstance(default_acceptance, list) and any(
1399
+ not isinstance(item, str) for item in default_acceptance
1400
+ ):
1401
+ return None, "metadata_defaults.acceptance_criteria must be a list of strings"
1402
+
1403
+ # Validate each task definition
1404
+ valid_task_types = {"task", "verify"}
1405
+ for idx, task_def in enumerate(tasks):
1406
+ if not isinstance(task_def, dict):
1407
+ return None, f"Task at index {idx} must be a dictionary"
1408
+
1409
+ task_type = task_def.get("type")
1410
+ if not task_type or task_type not in valid_task_types:
1411
+ return None, f"Task at index {idx} must have type 'task' or 'verify'"
1412
+
1413
+ task_title = task_def.get("title")
1414
+ if not task_title or not isinstance(task_title, str) or not task_title.strip():
1415
+ return None, f"Task at index {idx} must have a non-empty title"
1416
+
1417
+ est_hours = task_def.get("estimated_hours")
1418
+ if est_hours is not None:
1419
+ if not isinstance(est_hours, (int, float)) or est_hours < 0:
1420
+ return None, f"Task at index {idx} has invalid estimated_hours"
1421
+
1422
+ task_category = task_def.get("task_category")
1423
+ if task_category is not None and not isinstance(task_category, str):
1424
+ return None, f"Task at index {idx} has invalid task_category"
1425
+
1426
+ legacy_category = task_def.get("category")
1427
+ if legacy_category is not None and not isinstance(legacy_category, str):
1428
+ return None, f"Task at index {idx} has invalid category"
1429
+
1430
+ acceptance_criteria = task_def.get("acceptance_criteria")
1431
+ if acceptance_criteria is not None and not isinstance(
1432
+ acceptance_criteria, (list, str)
1433
+ ):
1434
+ return None, f"Task at index {idx} has invalid acceptance_criteria"
1435
+ if isinstance(acceptance_criteria, list) and any(
1436
+ not isinstance(item, str) for item in acceptance_criteria
1437
+ ):
1438
+ return None, f"Task at index {idx} acceptance_criteria must be a list of strings"
710
1439
 
711
- return descendants
1440
+ # Find specs directory
1441
+ if specs_dir is None:
1442
+ specs_dir = find_specs_directory()
712
1443
 
1444
+ if specs_dir is None:
1445
+ return (
1446
+ None,
1447
+ "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR.",
1448
+ )
713
1449
 
714
- def _count_tasks_in_subtree(
715
- hierarchy: Dict[str, Any], node_ids: List[str]
716
- ) -> Tuple[int, int]:
717
- """
718
- Count total and completed tasks in a list of nodes.
1450
+ spec_path = find_spec_file(spec_id, specs_dir)
1451
+ if spec_path is None:
1452
+ return None, f"Specification '{spec_id}' not found"
719
1453
 
720
- Args:
721
- hierarchy: The spec hierarchy dict
722
- node_ids: List of node IDs to count
1454
+ spec_data = load_spec(spec_id, specs_dir)
1455
+ if spec_data is None:
1456
+ return None, f"Failed to load specification '{spec_id}'"
723
1457
 
724
- Returns:
725
- Tuple of (total_count, completed_count)
726
- """
727
- total = 0
728
- completed = 0
1458
+ requires_rich_tasks = _requires_rich_task_fields(spec_data)
729
1459
 
730
- for node_id in node_ids:
731
- node = hierarchy.get(node_id)
732
- if not node:
733
- continue
734
- node_type = node.get("type")
735
- if node_type in ("task", "subtask", "verify"):
736
- total += 1
737
- if node.get("status") == "completed":
738
- completed += 1
1460
+ hierarchy = spec_data.get("hierarchy", {})
1461
+ spec_root = hierarchy.get("spec-root")
739
1462
 
740
- return total, completed
1463
+ if spec_root is None:
1464
+ return None, "Specification root node 'spec-root' not found"
741
1465
 
1466
+ if spec_root.get("type") not in {"spec", "root"}:
1467
+ return None, "Specification root node has invalid type"
742
1468
 
743
- def _remove_dependency_references(
744
- hierarchy: Dict[str, Any], removed_ids: List[str]
745
- ) -> None:
746
- """
747
- Remove references to deleted nodes from all dependency lists.
1469
+ children = spec_root.get("children", []) or []
1470
+ if not isinstance(children, list):
1471
+ children = []
748
1472
 
749
- Args:
750
- hierarchy: The spec hierarchy dict
751
- removed_ids: List of node IDs being removed
752
- """
753
- removed_set = set(removed_ids)
1473
+ insert_index = len(children)
1474
+ if position is not None and position >= 0:
1475
+ insert_index = min(position, len(children))
1476
+
1477
+ # Generate phase ID
1478
+ phase_id, phase_num = _generate_phase_id(hierarchy)
1479
+
1480
+ # Build phase metadata
1481
+ phase_metadata: Dict[str, Any] = {
1482
+ "purpose": (phase_purpose.strip() if phase_purpose else ""),
1483
+ }
1484
+ if phase_description:
1485
+ phase_metadata["description"] = phase_description.strip()
1486
+ if phase_estimated_hours is not None:
1487
+ phase_metadata["estimated_hours"] = phase_estimated_hours
1488
+
1489
+ # Create phase node (without children initially)
1490
+ phase_node = {
1491
+ "type": "phase",
1492
+ "title": phase_title,
1493
+ "status": "pending",
1494
+ "parent": "spec-root",
1495
+ "children": [],
1496
+ "total_tasks": 0,
1497
+ "completed_tasks": 0,
1498
+ "metadata": phase_metadata,
1499
+ "dependencies": {
1500
+ "blocks": [],
1501
+ "blocked_by": [],
1502
+ "depends": [],
1503
+ },
1504
+ }
1505
+
1506
+ hierarchy[phase_id] = phase_node
1507
+
1508
+ # Insert phase into spec-root children
1509
+ if insert_index == len(children):
1510
+ children.append(phase_id)
1511
+ else:
1512
+ children.insert(insert_index, phase_id)
1513
+ spec_root["children"] = children
1514
+
1515
+ # Link to previous phase if requested
1516
+ linked_phase_id: Optional[str] = None
1517
+ if link_previous and insert_index > 0 and insert_index == len(children) - 1:
1518
+ candidate = children[insert_index - 1]
1519
+ previous = hierarchy.get(candidate)
1520
+ if previous and previous.get("type") == "phase":
1521
+ linked_phase_id = candidate
1522
+ prev_deps = previous.setdefault(
1523
+ "dependencies",
1524
+ {"blocks": [], "blocked_by": [], "depends": []},
1525
+ )
1526
+ blocks = prev_deps.setdefault("blocks", [])
1527
+ if phase_id not in blocks:
1528
+ blocks.append(phase_id)
1529
+ phase_node["dependencies"]["blocked_by"].append(candidate)
1530
+
1531
+ def _nonempty_string(value: Any) -> bool:
1532
+ return isinstance(value, str) and bool(value.strip())
1533
+
1534
+ def _extract_description(task_def: Dict[str, Any]) -> tuple[Optional[str], Any]:
1535
+ description = task_def.get("description")
1536
+ if _nonempty_string(description) and isinstance(description, str):
1537
+ return "description", description.strip()
1538
+ details = task_def.get("details")
1539
+ if _nonempty_string(details) and isinstance(details, str):
1540
+ return "details", details.strip()
1541
+ if isinstance(details, list):
1542
+ cleaned = [
1543
+ item.strip()
1544
+ for item in details
1545
+ if isinstance(item, str) and item.strip()
1546
+ ]
1547
+ if cleaned:
1548
+ return "details", cleaned
1549
+ return None, None
1550
+
1551
+ # Create tasks under the phase
1552
+ tasks_created: List[Dict[str, Any]] = []
1553
+ task_counter = 0
1554
+ verify_counter = 0
1555
+
1556
+ for task_def in tasks:
1557
+ task_type = task_def["type"]
1558
+ task_title = task_def["title"].strip()
1559
+
1560
+ # Generate task ID based on type
1561
+ if task_type == "verify":
1562
+ verify_counter += 1
1563
+ task_id = f"verify-{phase_num}-{verify_counter}"
1564
+ else:
1565
+ task_counter += 1
1566
+ task_id = f"task-{phase_num}-{task_counter}"
1567
+
1568
+ # Build task metadata with defaults cascade
1569
+ task_metadata: Dict[str, Any] = {}
1570
+
1571
+ # Apply description/details
1572
+ desc_field, desc_value = _extract_description(task_def)
1573
+ if desc_field and desc_value is not None:
1574
+ task_metadata[desc_field] = desc_value
1575
+ elif requires_rich_tasks and task_type == "task":
1576
+ return None, f"Task '{task_title}' missing description"
1577
+
1578
+ # Apply file_path
1579
+ file_path = task_def.get("file_path")
1580
+ if file_path and isinstance(file_path, str):
1581
+ task_metadata["file_path"] = file_path.strip()
1582
+
1583
+ # Apply estimated_hours (task-level overrides defaults)
1584
+ est_hours = task_def.get("estimated_hours")
1585
+ if est_hours is not None:
1586
+ task_metadata["estimated_hours"] = float(est_hours)
1587
+ elif defaults.get("estimated_hours") is not None:
1588
+ task_metadata["estimated_hours"] = float(defaults["estimated_hours"])
1589
+
1590
+ normalized_category = None
1591
+ if task_type == "task":
1592
+ # Apply acceptance_criteria
1593
+ raw_acceptance = task_def.get("acceptance_criteria")
1594
+ if raw_acceptance is None:
1595
+ raw_acceptance = defaults.get("acceptance_criteria")
1596
+ acceptance_criteria = _normalize_acceptance_criteria(raw_acceptance)
1597
+ if acceptance_criteria is not None:
1598
+ task_metadata["acceptance_criteria"] = acceptance_criteria
1599
+ if requires_rich_tasks:
1600
+ if raw_acceptance is None:
1601
+ return None, f"Task '{task_title}' missing acceptance_criteria"
1602
+ if not acceptance_criteria:
1603
+ return (
1604
+ None,
1605
+ f"Task '{task_title}' acceptance_criteria must include at least one entry",
1606
+ )
1607
+
1608
+ # Apply task_category from defaults if not specified
1609
+ category = task_def.get("task_category") or task_def.get("category")
1610
+ if category is None:
1611
+ category = defaults.get("task_category") or defaults.get("category")
1612
+ if category and isinstance(category, str):
1613
+ normalized_category = category.strip().lower()
1614
+ if normalized_category not in CATEGORIES:
1615
+ return (
1616
+ None,
1617
+ f"Task '{task_title}' has invalid task_category '{category}'",
1618
+ )
1619
+ task_metadata["task_category"] = normalized_category
1620
+ if requires_rich_tasks and normalized_category is None:
1621
+ return None, f"Task '{task_title}' missing task_category"
1622
+
1623
+ if normalized_category in {"implementation", "refactoring"}:
1624
+ if not _nonempty_string(task_metadata.get("file_path")):
1625
+ return (
1626
+ None,
1627
+ f"Task '{task_title}' missing file_path for category '{normalized_category}'",
1628
+ )
1629
+
1630
+ # Apply verification_type for verify tasks
1631
+ if task_type == "verify":
1632
+ verify_type = task_def.get("verification_type")
1633
+ if verify_type and verify_type in VERIFICATION_TYPES:
1634
+ task_metadata["verification_type"] = verify_type
1635
+
1636
+ # Create task node
1637
+ task_node = {
1638
+ "type": task_type,
1639
+ "title": task_title,
1640
+ "status": "pending",
1641
+ "parent": phase_id,
1642
+ "children": [],
1643
+ "total_tasks": 1,
1644
+ "completed_tasks": 0,
1645
+ "metadata": task_metadata,
1646
+ "dependencies": {
1647
+ "blocks": [],
1648
+ "blocked_by": [],
1649
+ "depends": [],
1650
+ },
1651
+ }
1652
+
1653
+ hierarchy[task_id] = task_node
1654
+ phase_node["children"].append(task_id)
1655
+ phase_node["total_tasks"] += 1
1656
+
1657
+ tasks_created.append({
1658
+ "task_id": task_id,
1659
+ "title": task_title,
1660
+ "type": task_type,
1661
+ })
1662
+
1663
+ # Update spec-root total_tasks
1664
+ total_tasks = spec_root.get("total_tasks", 0)
1665
+ spec_root["total_tasks"] = total_tasks + phase_node["total_tasks"]
1666
+
1667
+ # Update spec-level estimated hours if provided
1668
+ if phase_estimated_hours is not None:
1669
+ spec_metadata = spec_data.setdefault("metadata", {})
1670
+ current_hours = spec_metadata.get("estimated_hours")
1671
+ if isinstance(current_hours, (int, float)):
1672
+ spec_metadata["estimated_hours"] = current_hours + phase_estimated_hours
1673
+ else:
1674
+ spec_metadata["estimated_hours"] = phase_estimated_hours
1675
+
1676
+ # Save spec atomically
1677
+ saved = save_spec(spec_id, spec_data, specs_dir)
1678
+ if not saved:
1679
+ return None, "Failed to save specification"
1680
+
1681
+ return {
1682
+ "spec_id": spec_id,
1683
+ "phase_id": phase_id,
1684
+ "title": phase_title,
1685
+ "position": insert_index,
1686
+ "linked_previous": linked_phase_id,
1687
+ "tasks_created": tasks_created,
1688
+ "total_tasks": len(tasks_created),
1689
+ }, None
1690
+
1691
+
1692
+ def _collect_descendants(hierarchy: Dict[str, Any], node_id: str) -> List[str]:
1693
+ """
1694
+ Recursively collect all descendant node IDs for a given node.
1695
+
1696
+ Args:
1697
+ hierarchy: The spec hierarchy dict
1698
+ node_id: Starting node ID
1699
+
1700
+ Returns:
1701
+ List of all descendant node IDs (not including the starting node)
1702
+ """
1703
+ descendants: List[str] = []
1704
+ node = hierarchy.get(node_id)
1705
+ if not node:
1706
+ return descendants
1707
+
1708
+ children = node.get("children", [])
1709
+ if not isinstance(children, list):
1710
+ return descendants
1711
+
1712
+ for child_id in children:
1713
+ descendants.append(child_id)
1714
+ descendants.extend(_collect_descendants(hierarchy, child_id))
1715
+
1716
+ return descendants
1717
+
1718
+
1719
+ def _count_tasks_in_subtree(
1720
+ hierarchy: Dict[str, Any], node_ids: List[str]
1721
+ ) -> Tuple[int, int]:
1722
+ """
1723
+ Count total and completed tasks in a list of nodes.
1724
+
1725
+ Args:
1726
+ hierarchy: The spec hierarchy dict
1727
+ node_ids: List of node IDs to count
1728
+
1729
+ Returns:
1730
+ Tuple of (total_count, completed_count)
1731
+ """
1732
+ total = 0
1733
+ completed = 0
1734
+
1735
+ for node_id in node_ids:
1736
+ node = hierarchy.get(node_id)
1737
+ if not node:
1738
+ continue
1739
+ node_type = node.get("type")
1740
+ if node_type in ("task", "subtask", "verify"):
1741
+ total += 1
1742
+ if node.get("status") == "completed":
1743
+ completed += 1
1744
+
1745
+ return total, completed
1746
+
1747
+
1748
+ def _remove_dependency_references(
1749
+ hierarchy: Dict[str, Any], removed_ids: List[str]
1750
+ ) -> None:
1751
+ """
1752
+ Remove references to deleted nodes from all dependency lists.
1753
+
1754
+ Args:
1755
+ hierarchy: The spec hierarchy dict
1756
+ removed_ids: List of node IDs being removed
1757
+ """
1758
+ removed_set = set(removed_ids)
754
1759
 
755
1760
  for node_id, node in hierarchy.items():
756
1761
  deps = node.get("dependencies")
@@ -971,26 +1976,519 @@ def remove_phase(
971
1976
  return result, None
972
1977
 
973
1978
 
1979
+ def move_phase(
1980
+ spec_id: str,
1981
+ phase_id: str,
1982
+ position: int,
1983
+ link_previous: bool = True,
1984
+ dry_run: bool = False,
1985
+ specs_dir: Optional[Path] = None,
1986
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
1987
+ """
1988
+ Move a phase to a new position within spec-root's children.
1989
+
1990
+ Supports reordering phases and optionally re-linking phase dependencies
1991
+ according to the link_previous pattern (each phase blocked by its predecessor).
1992
+
1993
+ Args:
1994
+ spec_id: Specification ID containing the phase.
1995
+ phase_id: Phase ID to move (e.g., "phase-2").
1996
+ position: Target position (1-based index) in spec-root children.
1997
+ link_previous: If True, update dependencies to maintain the sequential
1998
+ blocking pattern. If False, preserve existing dependencies.
1999
+ dry_run: If True, validate and return preview without saving changes.
2000
+ specs_dir: Path to specs directory (auto-detected if not provided).
2001
+
2002
+ Returns:
2003
+ Tuple of (result_dict, error_message).
2004
+ On success: ({"spec_id": ..., "phase_id": ..., "old_position": ..., "new_position": ..., ...}, None)
2005
+ On failure: (None, "error message")
2006
+ """
2007
+ # Validate inputs
2008
+ if not spec_id or not spec_id.strip():
2009
+ return None, "Specification ID is required"
2010
+
2011
+ if not phase_id or not phase_id.strip():
2012
+ return None, "Phase ID is required"
2013
+
2014
+ if not isinstance(position, int) or position < 1:
2015
+ return None, "Position must be a positive integer (1-based)"
2016
+
2017
+ # Find specs directory
2018
+ if specs_dir is None:
2019
+ specs_dir = find_specs_directory()
2020
+
2021
+ if specs_dir is None:
2022
+ return (
2023
+ None,
2024
+ "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR.",
2025
+ )
2026
+
2027
+ # Find and load the spec
2028
+ spec_path = find_spec_file(spec_id, specs_dir)
2029
+ if spec_path is None:
2030
+ return None, f"Specification '{spec_id}' not found"
2031
+
2032
+ spec_data = load_spec(spec_id, specs_dir)
2033
+ if spec_data is None:
2034
+ return None, f"Failed to load specification '{spec_id}'"
2035
+
2036
+ hierarchy = spec_data.get("hierarchy", {})
2037
+
2038
+ # Validate phase exists
2039
+ phase = hierarchy.get(phase_id)
2040
+ if phase is None:
2041
+ return None, f"Phase '{phase_id}' not found"
2042
+
2043
+ # Validate node type is phase
2044
+ node_type = phase.get("type")
2045
+ if node_type != "phase":
2046
+ return None, f"Node '{phase_id}' is not a phase (type: {node_type})"
2047
+
2048
+ # Get spec-root
2049
+ spec_root = hierarchy.get("spec-root")
2050
+ if spec_root is None:
2051
+ return None, "Specification root node 'spec-root' not found"
2052
+
2053
+ children = spec_root.get("children", [])
2054
+ if not isinstance(children, list):
2055
+ children = []
2056
+
2057
+ # Find current position
2058
+ try:
2059
+ old_index = children.index(phase_id)
2060
+ except ValueError:
2061
+ return None, f"Phase '{phase_id}' not found in spec-root children"
2062
+
2063
+ # Convert to 0-based index for internal use
2064
+ new_index = position - 1
2065
+
2066
+ # Validate position is within bounds
2067
+ if new_index < 0 or new_index >= len(children):
2068
+ return None, f"Invalid position {position}. Must be 1-{len(children)}"
2069
+
2070
+ # No change needed if same position
2071
+ if old_index == new_index:
2072
+ return {
2073
+ "spec_id": spec_id,
2074
+ "phase_id": phase_id,
2075
+ "phase_title": phase.get("title", ""),
2076
+ "old_position": old_index + 1,
2077
+ "new_position": new_index + 1,
2078
+ "moved": False,
2079
+ "dry_run": dry_run,
2080
+ "message": "Phase is already at the specified position",
2081
+ }, None
2082
+
2083
+ # Identify old neighbors for dependency cleanup
2084
+ old_prev_id: Optional[str] = None
2085
+ old_next_id: Optional[str] = None
2086
+
2087
+ if old_index > 0:
2088
+ candidate = children[old_index - 1]
2089
+ if hierarchy.get(candidate, {}).get("type") == "phase":
2090
+ old_prev_id = candidate
2091
+
2092
+ if old_index < len(children) - 1:
2093
+ candidate = children[old_index + 1]
2094
+ if hierarchy.get(candidate, {}).get("type") == "phase":
2095
+ old_next_id = candidate
2096
+
2097
+ # Perform the move in children list
2098
+ children.remove(phase_id)
2099
+ # After removal, adjust target index if moving forward
2100
+ insert_index = new_index if new_index <= old_index else new_index
2101
+ if insert_index >= len(children):
2102
+ children.append(phase_id)
2103
+ else:
2104
+ children.insert(insert_index, phase_id)
2105
+
2106
+ # Identify new neighbors
2107
+ actual_new_index = children.index(phase_id)
2108
+ new_prev_id: Optional[str] = None
2109
+ new_next_id: Optional[str] = None
2110
+
2111
+ if actual_new_index > 0:
2112
+ candidate = children[actual_new_index - 1]
2113
+ if hierarchy.get(candidate, {}).get("type") == "phase":
2114
+ new_prev_id = candidate
2115
+
2116
+ if actual_new_index < len(children) - 1:
2117
+ candidate = children[actual_new_index + 1]
2118
+ if hierarchy.get(candidate, {}).get("type") == "phase":
2119
+ new_next_id = candidate
2120
+
2121
+ # Track dependency changes
2122
+ dependencies_updated: List[Dict[str, Any]] = []
2123
+
2124
+ if link_previous:
2125
+ # Remove old dependency links
2126
+ phase_deps = phase.setdefault(
2127
+ "dependencies", {"blocks": [], "blocked_by": [], "depends": []}
2128
+ )
2129
+
2130
+ # 1. Remove this phase from old_prev's blocks list
2131
+ if old_prev_id:
2132
+ old_prev = hierarchy.get(old_prev_id)
2133
+ if old_prev:
2134
+ old_prev_deps = old_prev.get("dependencies", {})
2135
+ old_prev_blocks = old_prev_deps.get("blocks", [])
2136
+ if phase_id in old_prev_blocks:
2137
+ old_prev_blocks.remove(phase_id)
2138
+ dependencies_updated.append({
2139
+ "action": "removed",
2140
+ "from": old_prev_id,
2141
+ "relationship": "blocks",
2142
+ "target": phase_id,
2143
+ })
2144
+
2145
+ # 2. Remove old_prev from this phase's blocked_by
2146
+ phase_blocked_by = phase_deps.setdefault("blocked_by", [])
2147
+ if old_prev_id and old_prev_id in phase_blocked_by:
2148
+ phase_blocked_by.remove(old_prev_id)
2149
+ dependencies_updated.append({
2150
+ "action": "removed",
2151
+ "from": phase_id,
2152
+ "relationship": "blocked_by",
2153
+ "target": old_prev_id,
2154
+ })
2155
+
2156
+ # 3. Remove this phase from old_next's blocked_by
2157
+ if old_next_id:
2158
+ old_next = hierarchy.get(old_next_id)
2159
+ if old_next:
2160
+ old_next_deps = old_next.get("dependencies", {})
2161
+ old_next_blocked_by = old_next_deps.get("blocked_by", [])
2162
+ if phase_id in old_next_blocked_by:
2163
+ old_next_blocked_by.remove(phase_id)
2164
+ dependencies_updated.append({
2165
+ "action": "removed",
2166
+ "from": old_next_id,
2167
+ "relationship": "blocked_by",
2168
+ "target": phase_id,
2169
+ })
2170
+
2171
+ # 4. Remove old_next from this phase's blocks
2172
+ phase_blocks = phase_deps.setdefault("blocks", [])
2173
+ if old_next_id and old_next_id in phase_blocks:
2174
+ phase_blocks.remove(old_next_id)
2175
+ dependencies_updated.append({
2176
+ "action": "removed",
2177
+ "from": phase_id,
2178
+ "relationship": "blocks",
2179
+ "target": old_next_id,
2180
+ })
2181
+
2182
+ # 5. Link old neighbors to each other (if they were adjacent via this phase)
2183
+ if old_prev_id and old_next_id:
2184
+ old_prev = hierarchy.get(old_prev_id)
2185
+ old_next = hierarchy.get(old_next_id)
2186
+ if old_prev and old_next:
2187
+ old_prev_deps = old_prev.setdefault(
2188
+ "dependencies", {"blocks": [], "blocked_by": [], "depends": []}
2189
+ )
2190
+ old_prev_blocks = old_prev_deps.setdefault("blocks", [])
2191
+ if old_next_id not in old_prev_blocks:
2192
+ old_prev_blocks.append(old_next_id)
2193
+ dependencies_updated.append({
2194
+ "action": "added",
2195
+ "from": old_prev_id,
2196
+ "relationship": "blocks",
2197
+ "target": old_next_id,
2198
+ })
2199
+
2200
+ old_next_deps = old_next.setdefault(
2201
+ "dependencies", {"blocks": [], "blocked_by": [], "depends": []}
2202
+ )
2203
+ old_next_blocked_by = old_next_deps.setdefault("blocked_by", [])
2204
+ if old_prev_id not in old_next_blocked_by:
2205
+ old_next_blocked_by.append(old_prev_id)
2206
+ dependencies_updated.append({
2207
+ "action": "added",
2208
+ "from": old_next_id,
2209
+ "relationship": "blocked_by",
2210
+ "target": old_prev_id,
2211
+ })
2212
+
2213
+ # Add new dependency links
2214
+ # 6. New prev blocks this phase
2215
+ if new_prev_id:
2216
+ new_prev = hierarchy.get(new_prev_id)
2217
+ if new_prev:
2218
+ new_prev_deps = new_prev.setdefault(
2219
+ "dependencies", {"blocks": [], "blocked_by": [], "depends": []}
2220
+ )
2221
+ new_prev_blocks = new_prev_deps.setdefault("blocks", [])
2222
+ if phase_id not in new_prev_blocks:
2223
+ new_prev_blocks.append(phase_id)
2224
+ dependencies_updated.append({
2225
+ "action": "added",
2226
+ "from": new_prev_id,
2227
+ "relationship": "blocks",
2228
+ "target": phase_id,
2229
+ })
2230
+
2231
+ # This phase is blocked by new prev
2232
+ if new_prev_id not in phase_blocked_by:
2233
+ phase_blocked_by.append(new_prev_id)
2234
+ dependencies_updated.append({
2235
+ "action": "added",
2236
+ "from": phase_id,
2237
+ "relationship": "blocked_by",
2238
+ "target": new_prev_id,
2239
+ })
2240
+
2241
+ # 7. This phase blocks new next
2242
+ if new_next_id:
2243
+ new_next = hierarchy.get(new_next_id)
2244
+ if new_next:
2245
+ if new_next_id not in phase_blocks:
2246
+ phase_blocks.append(new_next_id)
2247
+ dependencies_updated.append({
2248
+ "action": "added",
2249
+ "from": phase_id,
2250
+ "relationship": "blocks",
2251
+ "target": new_next_id,
2252
+ })
2253
+
2254
+ new_next_deps = new_next.setdefault(
2255
+ "dependencies", {"blocks": [], "blocked_by": [], "depends": []}
2256
+ )
2257
+ new_next_blocked_by = new_next_deps.setdefault("blocked_by", [])
2258
+ if phase_id not in new_next_blocked_by:
2259
+ new_next_blocked_by.append(phase_id)
2260
+ dependencies_updated.append({
2261
+ "action": "added",
2262
+ "from": new_next_id,
2263
+ "relationship": "blocked_by",
2264
+ "target": phase_id,
2265
+ })
2266
+
2267
+ # Remove old link from new prev to new next (now goes through this phase)
2268
+ if new_prev_id:
2269
+ new_prev = hierarchy.get(new_prev_id)
2270
+ if new_prev:
2271
+ new_prev_deps = new_prev.get("dependencies", {})
2272
+ new_prev_blocks = new_prev_deps.get("blocks", [])
2273
+ if new_next_id in new_prev_blocks:
2274
+ new_prev_blocks.remove(new_next_id)
2275
+ dependencies_updated.append({
2276
+ "action": "removed",
2277
+ "from": new_prev_id,
2278
+ "relationship": "blocks",
2279
+ "target": new_next_id,
2280
+ })
2281
+
2282
+ if new_prev_id in new_next_blocked_by:
2283
+ new_next_blocked_by.remove(new_prev_id)
2284
+ dependencies_updated.append({
2285
+ "action": "removed",
2286
+ "from": new_next_id,
2287
+ "relationship": "blocked_by",
2288
+ "target": new_prev_id,
2289
+ })
2290
+
2291
+ # Update spec-root children
2292
+ spec_root["children"] = children
2293
+
2294
+ # Build result
2295
+ result: Dict[str, Any] = {
2296
+ "spec_id": spec_id,
2297
+ "phase_id": phase_id,
2298
+ "phase_title": phase.get("title", ""),
2299
+ "old_position": old_index + 1,
2300
+ "new_position": actual_new_index + 1,
2301
+ "moved": True,
2302
+ "link_previous": link_previous,
2303
+ "dry_run": dry_run,
2304
+ }
2305
+
2306
+ if dependencies_updated:
2307
+ result["dependencies_updated"] = dependencies_updated
2308
+
2309
+ if dry_run:
2310
+ result["message"] = "Dry run - changes not saved"
2311
+ return result, None
2312
+
2313
+ # Save the spec
2314
+ saved = save_spec(spec_id, spec_data, specs_dir)
2315
+ if not saved:
2316
+ return None, "Failed to save specification"
2317
+
2318
+ return result, None
2319
+
2320
+
2321
+ def update_phase_metadata(
2322
+ spec_id: str,
2323
+ phase_id: str,
2324
+ *,
2325
+ estimated_hours: Optional[float] = None,
2326
+ description: Optional[str] = None,
2327
+ purpose: Optional[str] = None,
2328
+ dry_run: bool = False,
2329
+ specs_dir: Optional[Path] = None,
2330
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
2331
+ """
2332
+ Update metadata fields of a phase in a specification.
2333
+
2334
+ Allows updating phase-level metadata such as estimated_hours, description,
2335
+ and purpose. Tracks previous values for audit purposes.
2336
+
2337
+ Args:
2338
+ spec_id: Specification ID containing the phase.
2339
+ phase_id: Phase ID to update (e.g., "phase-1").
2340
+ estimated_hours: New estimated hours value (must be >= 0 if provided).
2341
+ description: New description text for the phase.
2342
+ purpose: New purpose text for the phase.
2343
+ dry_run: If True, validate and return preview without saving changes.
2344
+ specs_dir: Path to specs directory (auto-detected if not provided).
2345
+
2346
+ Returns:
2347
+ Tuple of (result_dict, error_message).
2348
+ On success: ({"spec_id": ..., "phase_id": ..., "updates": [...], ...}, None)
2349
+ On failure: (None, "error message")
2350
+ """
2351
+ # Validate spec_id
2352
+ if not spec_id or not spec_id.strip():
2353
+ return None, "Specification ID is required"
2354
+
2355
+ # Validate phase_id
2356
+ if not phase_id or not phase_id.strip():
2357
+ return None, "Phase ID is required"
2358
+
2359
+ # Validate estimated_hours if provided
2360
+ if estimated_hours is not None:
2361
+ if not isinstance(estimated_hours, (int, float)):
2362
+ return None, "estimated_hours must be a number"
2363
+ if estimated_hours < 0:
2364
+ return None, "estimated_hours must be >= 0"
2365
+
2366
+ # Check that at least one field is being updated
2367
+ has_update = any(
2368
+ v is not None for v in [estimated_hours, description, purpose]
2369
+ )
2370
+ if not has_update:
2371
+ return None, "At least one field (estimated_hours, description, purpose) must be provided"
2372
+
2373
+ # Find specs directory
2374
+ if specs_dir is None:
2375
+ specs_dir = find_specs_directory()
2376
+
2377
+ if specs_dir is None:
2378
+ return (
2379
+ None,
2380
+ "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR.",
2381
+ )
2382
+
2383
+ # Find and load the spec
2384
+ spec_path = find_spec_file(spec_id, specs_dir)
2385
+ if spec_path is None:
2386
+ return None, f"Specification '{spec_id}' not found"
2387
+
2388
+ spec_data = load_spec(spec_id, specs_dir)
2389
+ if spec_data is None:
2390
+ return None, f"Failed to load specification '{spec_id}'"
2391
+
2392
+ hierarchy = spec_data.get("hierarchy", {})
2393
+
2394
+ # Validate phase exists
2395
+ phase = hierarchy.get(phase_id)
2396
+ if phase is None:
2397
+ return None, f"Phase '{phase_id}' not found"
2398
+
2399
+ # Validate node type is phase
2400
+ node_type = phase.get("type")
2401
+ if node_type != "phase":
2402
+ return None, f"Node '{phase_id}' is not a phase (type: {node_type})"
2403
+
2404
+ # Ensure metadata exists on phase
2405
+ if "metadata" not in phase:
2406
+ phase["metadata"] = {}
2407
+
2408
+ phase_metadata = phase["metadata"]
2409
+
2410
+ # Track updates with previous values
2411
+ updates: List[Dict[str, Any]] = []
2412
+
2413
+ if estimated_hours is not None:
2414
+ previous = phase_metadata.get("estimated_hours")
2415
+ phase_metadata["estimated_hours"] = estimated_hours
2416
+ updates.append({
2417
+ "field": "estimated_hours",
2418
+ "previous_value": previous,
2419
+ "new_value": estimated_hours,
2420
+ })
2421
+
2422
+ if description is not None:
2423
+ description = description.strip() if description else description
2424
+ previous = phase_metadata.get("description")
2425
+ phase_metadata["description"] = description
2426
+ updates.append({
2427
+ "field": "description",
2428
+ "previous_value": previous,
2429
+ "new_value": description,
2430
+ })
2431
+
2432
+ if purpose is not None:
2433
+ purpose = purpose.strip() if purpose else purpose
2434
+ previous = phase_metadata.get("purpose")
2435
+ phase_metadata["purpose"] = purpose
2436
+ updates.append({
2437
+ "field": "purpose",
2438
+ "previous_value": previous,
2439
+ "new_value": purpose,
2440
+ })
2441
+
2442
+ # Build result
2443
+ result: Dict[str, Any] = {
2444
+ "spec_id": spec_id,
2445
+ "phase_id": phase_id,
2446
+ "phase_title": phase.get("title", ""),
2447
+ "updates": updates,
2448
+ "dry_run": dry_run,
2449
+ }
2450
+
2451
+ if dry_run:
2452
+ result["message"] = "Dry run - changes not saved"
2453
+ return result, None
2454
+
2455
+ # Save the spec
2456
+ saved = save_spec(spec_id, spec_data, specs_dir)
2457
+ if not saved:
2458
+ return None, "Failed to save specification"
2459
+
2460
+ return result, None
2461
+
2462
+
974
2463
  def get_template_structure(template: str, category: str) -> Dict[str, Any]:
975
2464
  """
976
2465
  Get the hierarchical structure for a spec template.
977
2466
 
978
- All templates include per-phase verification (auto + fidelity) for each phase.
2467
+ Only the 'empty' template is supported. Use phase templates to add structure.
979
2468
 
980
2469
  Args:
981
- template: Template type (simple, medium, complex, security).
2470
+ template: Template type (only 'empty' is valid).
982
2471
  category: Default task category.
983
2472
 
984
2473
  Returns:
985
2474
  Hierarchy dict for the spec.
2475
+
2476
+ Raises:
2477
+ ValueError: If template is not 'empty'.
986
2478
  """
987
- base_hierarchy = {
2479
+ if template != "empty":
2480
+ raise ValueError(
2481
+ f"Invalid template '{template}'. Only 'empty' template is supported. "
2482
+ f"Use phase templates (phase-add-bulk or phase-template apply) to add structure."
2483
+ )
2484
+
2485
+ return {
988
2486
  "spec-root": {
989
2487
  "type": "spec",
990
2488
  "title": "", # Filled in later
991
2489
  "status": "pending",
992
2490
  "parent": None,
993
- "children": ["phase-1"],
2491
+ "children": [],
994
2492
  "total_tasks": 0,
995
2493
  "completed_tasks": 0,
996
2494
  "metadata": {
@@ -1003,201 +2501,300 @@ def get_template_structure(template: str, category: str) -> Dict[str, Any]:
1003
2501
  "depends": [],
1004
2502
  },
1005
2503
  },
1006
- "phase-1": {
1007
- "type": "phase",
1008
- "title": "Planning & Discovery",
1009
- "status": "pending",
1010
- "parent": "spec-root",
1011
- "children": ["task-1-1"],
1012
- "total_tasks": 1,
1013
- "completed_tasks": 0,
1014
- "metadata": {
1015
- "purpose": "Initial planning and requirements gathering",
1016
- "estimated_hours": 2,
1017
- },
1018
- "dependencies": {
1019
- "blocks": [],
1020
- "blocked_by": [],
1021
- "depends": [],
1022
- },
1023
- },
1024
- "task-1-1": {
1025
- "type": "task",
1026
- "title": "Define requirements",
1027
- "status": "pending",
1028
- "parent": "phase-1",
1029
- "children": [],
1030
- "total_tasks": 1,
1031
- "completed_tasks": 0,
1032
- "metadata": {
1033
- "details": "Document the requirements and acceptance criteria",
1034
- "category": category,
1035
- "estimated_hours": 1,
1036
- },
1037
- "dependencies": {
1038
- "blocks": [],
1039
- "blocked_by": [],
1040
- "depends": [],
1041
- },
1042
- },
1043
2504
  }
1044
2505
 
1045
- # Add verification to phase-1 (all templates)
1046
- _add_phase_verification(base_hierarchy, 1, "phase-1")
1047
- base_hierarchy["spec-root"]["total_tasks"] = 3 # task + 2 verify
1048
2506
 
1049
- if template == "simple":
1050
- return base_hierarchy
2507
+ def get_phase_template_structure(
2508
+ template: str, category: str = "implementation"
2509
+ ) -> Dict[str, Any]:
2510
+ """
2511
+ Get the structure definition for a phase template.
2512
+
2513
+ Phase templates define reusable phase structures with pre-configured tasks.
2514
+ Each template includes automatic verification scaffolding (run-tests + fidelity).
2515
+
2516
+ Args:
2517
+ template: Phase template type (planning, implementation, testing, security, documentation).
2518
+ category: Default task category for tasks in this phase.
1051
2519
 
1052
- # Medium/complex/security: add implementation phase
1053
- if template in ("medium", "complex", "security"):
1054
- base_hierarchy["spec-root"]["children"].append("phase-2")
1055
- base_hierarchy["phase-1"]["dependencies"]["blocks"].append("phase-2")
1056
- base_hierarchy["phase-2"] = {
1057
- "type": "phase",
2520
+ Returns:
2521
+ Dict with phase structure including:
2522
+ - title: Phase title
2523
+ - description: Phase description
2524
+ - purpose: Phase purpose for metadata
2525
+ - estimated_hours: Total estimated hours
2526
+ - tasks: List of task definitions (title, description, category, estimated_hours)
2527
+ - includes_verification: Always True (verification auto-added)
2528
+ """
2529
+ templates: Dict[str, Dict[str, Any]] = {
2530
+ "planning": {
2531
+ "title": "Planning & Discovery",
2532
+ "description": "Requirements gathering, analysis, and initial planning",
2533
+ "purpose": "Define scope, requirements, and acceptance criteria",
2534
+ "estimated_hours": 4,
2535
+ "tasks": [
2536
+ {
2537
+ "title": "Define requirements",
2538
+ "description": "Document functional and non-functional requirements",
2539
+ "task_category": "investigation",
2540
+ "acceptance_criteria": [
2541
+ "Requirements are documented and reviewed",
2542
+ ],
2543
+ "estimated_hours": 2,
2544
+ },
2545
+ {
2546
+ "title": "Design solution approach",
2547
+ "description": "Outline the technical approach and architecture decisions",
2548
+ "task_category": "investigation",
2549
+ "acceptance_criteria": [
2550
+ "Solution approach and key decisions are documented",
2551
+ ],
2552
+ "estimated_hours": 2,
2553
+ },
2554
+ ],
2555
+ },
2556
+ "implementation": {
1058
2557
  "title": "Implementation",
1059
- "status": "pending",
1060
- "parent": "spec-root",
1061
- "children": ["task-2-1"],
1062
- "total_tasks": 1,
1063
- "completed_tasks": 0,
1064
- "metadata": {
1065
- "purpose": "Core implementation work",
1066
- "estimated_hours": 8,
1067
- },
1068
- "dependencies": {
1069
- "blocks": [],
1070
- "blocked_by": ["phase-1"],
1071
- "depends": [],
1072
- },
1073
- }
1074
- base_hierarchy["task-2-1"] = {
1075
- "type": "task",
1076
- "title": "Implement core functionality",
1077
- "status": "pending",
1078
- "parent": "phase-2",
1079
- "children": [],
1080
- "total_tasks": 1,
1081
- "completed_tasks": 0,
1082
- "metadata": {
1083
- "details": "Implement the main features",
1084
- "category": category,
1085
- "estimated_hours": 4,
1086
- },
1087
- "dependencies": {
1088
- "blocks": [],
1089
- "blocked_by": [],
1090
- "depends": [],
1091
- },
1092
- }
1093
- # Add verification to phase-2
1094
- _add_phase_verification(base_hierarchy, 2, "phase-2")
1095
- base_hierarchy["spec-root"]["total_tasks"] = 6 # 2 tasks + 4 verify
1096
-
1097
- # Security: add security review phase
1098
- if template == "security":
1099
- base_hierarchy["spec-root"]["children"].append("phase-3")
1100
- base_hierarchy["phase-2"]["dependencies"]["blocks"].append("phase-3")
1101
- base_hierarchy["phase-3"] = {
1102
- "type": "phase",
1103
- "title": "Security Review",
1104
- "status": "pending",
1105
- "parent": "spec-root",
1106
- "children": ["task-3-1"],
1107
- "total_tasks": 1,
1108
- "completed_tasks": 0,
1109
- "metadata": {
1110
- "purpose": "Security audit and hardening",
1111
- "estimated_hours": 4,
1112
- },
1113
- "dependencies": {
1114
- "blocks": [],
1115
- "blocked_by": ["phase-2"],
1116
- "depends": [],
1117
- },
1118
- }
1119
- base_hierarchy["task-3-1"] = {
1120
- "type": "task",
1121
- "title": "Security audit",
1122
- "status": "pending",
1123
- "parent": "phase-3",
1124
- "children": [],
1125
- "total_tasks": 1,
1126
- "completed_tasks": 0,
1127
- "metadata": {
1128
- "details": "Review for security vulnerabilities",
1129
- "category": "investigation",
1130
- "estimated_hours": 2,
1131
- },
1132
- "dependencies": {
1133
- "blocks": [],
1134
- "blocked_by": [],
1135
- "depends": [],
1136
- },
1137
- }
1138
- # Add verification to phase-3
1139
- _add_phase_verification(base_hierarchy, 3, "phase-3")
1140
- base_hierarchy["spec-root"]["total_tasks"] = 9 # 3 tasks + 6 verify
2558
+ "description": "Core development and feature implementation",
2559
+ "purpose": "Build the primary functionality",
2560
+ "estimated_hours": 8,
2561
+ "tasks": [
2562
+ {
2563
+ "title": "Implement core functionality",
2564
+ "description": "Build the main features and business logic",
2565
+ "task_category": "investigation",
2566
+ "acceptance_criteria": [
2567
+ "Core functionality is implemented and verified",
2568
+ ],
2569
+ "estimated_hours": 6,
2570
+ },
2571
+ {
2572
+ "title": "Add error handling",
2573
+ "description": "Implement error handling and edge cases",
2574
+ "task_category": "investigation",
2575
+ "acceptance_criteria": [
2576
+ "Error handling covers expected edge cases",
2577
+ ],
2578
+ "estimated_hours": 2,
2579
+ },
2580
+ ],
2581
+ },
2582
+ "testing": {
2583
+ "title": "Testing & Validation",
2584
+ "description": "Comprehensive testing and quality assurance",
2585
+ "purpose": "Ensure code quality and correctness",
2586
+ "estimated_hours": 6,
2587
+ "tasks": [
2588
+ {
2589
+ "title": "Write unit tests",
2590
+ "description": "Create unit tests for individual components",
2591
+ "task_category": "investigation",
2592
+ "acceptance_criteria": [
2593
+ "Unit tests cover primary logic paths",
2594
+ ],
2595
+ "estimated_hours": 3,
2596
+ },
2597
+ {
2598
+ "title": "Write integration tests",
2599
+ "description": "Create integration tests for component interactions",
2600
+ "task_category": "investigation",
2601
+ "acceptance_criteria": [
2602
+ "Integration tests cover critical workflows",
2603
+ ],
2604
+ "estimated_hours": 3,
2605
+ },
2606
+ ],
2607
+ },
2608
+ "security": {
2609
+ "title": "Security Review",
2610
+ "description": "Security audit, vulnerability assessment, and hardening",
2611
+ "purpose": "Identify and remediate security vulnerabilities",
2612
+ "estimated_hours": 6,
2613
+ "tasks": [
2614
+ {
2615
+ "title": "Security audit",
2616
+ "description": "Review code for security vulnerabilities (OWASP Top 10)",
2617
+ "task_category": "investigation",
2618
+ "acceptance_criteria": [
2619
+ "Security findings are documented with severity",
2620
+ ],
2621
+ "estimated_hours": 3,
2622
+ },
2623
+ {
2624
+ "title": "Security remediation",
2625
+ "description": "Fix identified vulnerabilities and harden implementation",
2626
+ "task_category": "investigation",
2627
+ "acceptance_criteria": [
2628
+ "Security findings are addressed or tracked",
2629
+ ],
2630
+ "estimated_hours": 3,
2631
+ },
2632
+ ],
2633
+ },
2634
+ "documentation": {
2635
+ "title": "Documentation",
2636
+ "description": "Technical documentation and knowledge capture",
2637
+ "purpose": "Document the implementation for maintainability",
2638
+ "estimated_hours": 4,
2639
+ "tasks": [
2640
+ {
2641
+ "title": "Write API documentation",
2642
+ "description": "Document public APIs, parameters, and return values",
2643
+ "task_category": "research",
2644
+ "acceptance_criteria": [
2645
+ "API documentation is updated with current behavior",
2646
+ ],
2647
+ "estimated_hours": 2,
2648
+ },
2649
+ {
2650
+ "title": "Write user guide",
2651
+ "description": "Create usage examples and integration guide",
2652
+ "task_category": "research",
2653
+ "acceptance_criteria": [
2654
+ "User guide includes usage examples",
2655
+ ],
2656
+ "estimated_hours": 2,
2657
+ },
2658
+ ],
2659
+ },
2660
+ }
1141
2661
 
1142
- return base_hierarchy
2662
+ if template not in templates:
2663
+ raise ValueError(
2664
+ f"Invalid phase template '{template}'. Must be one of: {', '.join(PHASE_TEMPLATES)}"
2665
+ )
1143
2666
 
2667
+ result = templates[template].copy()
2668
+ result["includes_verification"] = True
2669
+ result["template_name"] = template
2670
+ return result
1144
2671
 
1145
- def create_spec(
1146
- name: str,
1147
- template: str = "medium",
1148
- category: str = "implementation",
2672
+
2673
+ def apply_phase_template(
2674
+ spec_id: str,
2675
+ template: str,
1149
2676
  specs_dir: Optional[Path] = None,
2677
+ category: str = "implementation",
2678
+ position: Optional[int] = None,
2679
+ link_previous: bool = True,
1150
2680
  ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
1151
2681
  """
1152
- Create a new specification file from a template.
2682
+ Apply a phase template to an existing spec.
2683
+
2684
+ Creates a new phase with pre-configured tasks based on the template.
2685
+ Automatically includes verification scaffolding (run-tests + fidelity).
1153
2686
 
1154
2687
  Args:
1155
- name: Human-readable name for the specification.
1156
- template: Template type (simple, medium, complex, security). Default: medium.
1157
- category: Default task category. Default: implementation.
2688
+ spec_id: ID of the spec to add the phase to.
2689
+ template: Phase template name (planning, implementation, testing, security, documentation).
1158
2690
  specs_dir: Path to specs directory (auto-detected if not provided).
2691
+ category: Default task category for tasks (can be overridden by template).
2692
+ position: Position to insert phase (None = append at end).
2693
+ link_previous: Whether to link this phase to the previous one with dependencies.
1159
2694
 
1160
2695
  Returns:
1161
2696
  Tuple of (result_dict, error_message).
1162
- On success: ({"spec_id": ..., "spec_path": ..., ...}, None)
2697
+ On success: ({"phase_id": ..., "tasks_created": [...], ...}, None)
1163
2698
  On failure: (None, "error message")
1164
2699
  """
1165
2700
  # Validate template
1166
- if template not in TEMPLATES:
2701
+ if template not in PHASE_TEMPLATES:
1167
2702
  return (
1168
2703
  None,
1169
- f"Invalid template '{template}'. Must be one of: {', '.join(TEMPLATES)}",
2704
+ f"Invalid phase template '{template}'. Must be one of: {', '.join(PHASE_TEMPLATES)}",
1170
2705
  )
1171
2706
 
1172
- # Validate category
1173
- if category not in CATEGORIES:
2707
+ # Get template structure
2708
+ template_struct = get_phase_template_structure(template, category)
2709
+
2710
+ # Build tasks list for add_phase_bulk
2711
+ tasks = []
2712
+ for task_def in template_struct["tasks"]:
2713
+ tasks.append({
2714
+ "type": "task",
2715
+ "title": task_def["title"],
2716
+ "description": task_def.get("description", ""),
2717
+ "task_category": task_def.get("task_category", task_def.get("category", category)),
2718
+ "acceptance_criteria": task_def.get("acceptance_criteria"),
2719
+ "estimated_hours": task_def.get("estimated_hours", 1),
2720
+ })
2721
+
2722
+ # Append verification scaffolding (run-tests + fidelity-review)
2723
+ tasks.append({
2724
+ "type": "verify",
2725
+ "title": "Run tests",
2726
+ "verification_type": "run-tests",
2727
+ })
2728
+ tasks.append({
2729
+ "type": "verify",
2730
+ "title": "Fidelity review",
2731
+ "verification_type": "fidelity",
2732
+ })
2733
+
2734
+ # Use add_phase_bulk to create the phase atomically
2735
+ result, error = add_phase_bulk(
2736
+ spec_id=spec_id,
2737
+ phase_title=template_struct["title"],
2738
+ tasks=tasks,
2739
+ specs_dir=specs_dir,
2740
+ phase_description=template_struct.get("description"),
2741
+ phase_purpose=template_struct.get("purpose"),
2742
+ phase_estimated_hours=template_struct.get("estimated_hours"),
2743
+ position=position,
2744
+ link_previous=link_previous,
2745
+ )
2746
+
2747
+ if error:
2748
+ return None, error
2749
+
2750
+ # Enhance result with template info
2751
+ if result:
2752
+ result["template_applied"] = template
2753
+ result["template_title"] = template_struct["title"]
2754
+
2755
+ return result, None
2756
+
2757
+
2758
+ def generate_spec_data(
2759
+ name: str,
2760
+ template: str = "empty",
2761
+ category: str = "implementation",
2762
+ mission: Optional[str] = None,
2763
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
2764
+ """
2765
+ Generate spec data structure without writing to disk.
2766
+
2767
+ Used for preflight validation (dry_run) and by create_spec.
2768
+
2769
+ Args:
2770
+ name: Human-readable name for the specification.
2771
+ template: Template type (only 'empty' is valid).
2772
+ category: Default task category.
2773
+ mission: Optional mission statement for the spec.
2774
+
2775
+ Returns:
2776
+ Tuple of (spec_data, error_message).
2777
+ On success: (dict, None)
2778
+ On failure: (None, "error message")
2779
+ """
2780
+ # Validate template - only 'empty' is supported
2781
+ if template not in TEMPLATES:
1174
2782
  return (
1175
2783
  None,
1176
- f"Invalid category '{category}'. Must be one of: {', '.join(CATEGORIES)}",
2784
+ f"Invalid template '{template}'. Only 'empty' template is supported. "
2785
+ f"Use phase templates to add structure.",
1177
2786
  )
1178
2787
 
1179
- # Find specs directory
1180
- if specs_dir is None:
1181
- specs_dir = find_specs_directory()
1182
-
1183
- if specs_dir is None:
2788
+ # Validate category
2789
+ if category not in CATEGORIES:
1184
2790
  return (
1185
2791
  None,
1186
- "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR.",
2792
+ f"Invalid category '{category}'. Must be one of: {', '.join(CATEGORIES)}",
1187
2793
  )
1188
2794
 
1189
- # Ensure pending directory exists
1190
- pending_dir = specs_dir / "pending"
1191
- pending_dir.mkdir(parents=True, exist_ok=True)
1192
-
1193
2795
  # Generate spec ID
1194
2796
  spec_id = generate_spec_id(name)
1195
2797
 
1196
- # Check if spec already exists
1197
- spec_path = pending_dir / f"{spec_id}.json"
1198
- if spec_path.exists():
1199
- return None, f"Specification already exists: {spec_id}"
1200
-
1201
2798
  # Generate spec structure
1202
2799
  now = datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
1203
2800
  hierarchy = get_template_structure(template, category)
@@ -1219,24 +2816,77 @@ def create_spec(
1219
2816
  "last_updated": now,
1220
2817
  "metadata": {
1221
2818
  "description": "",
2819
+ "mission": mission.strip() if isinstance(mission, str) else "",
1222
2820
  "objectives": [],
1223
- "complexity": "medium" if template in ("medium", "complex") else "low",
2821
+ "complexity": "low", # Complexity set via explicit metadata, not template
1224
2822
  "estimated_hours": estimated_hours,
1225
2823
  "assumptions": [],
1226
- "status": "pending",
1227
2824
  "owner": "",
1228
- "progress_percentage": 0,
1229
- "current_phase": "phase-1",
1230
2825
  "category": category,
1231
2826
  "template": template,
1232
2827
  },
1233
2828
  "progress_percentage": 0,
1234
2829
  "status": "pending",
1235
- "current_phase": "phase-1",
2830
+ "current_phase": None, # Empty template has no phases
1236
2831
  "hierarchy": hierarchy,
1237
2832
  "journal": [],
1238
2833
  }
1239
2834
 
2835
+ return spec_data, None
2836
+
2837
+
2838
+ def create_spec(
2839
+ name: str,
2840
+ template: str = "empty",
2841
+ category: str = "implementation",
2842
+ mission: Optional[str] = None,
2843
+ specs_dir: Optional[Path] = None,
2844
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
2845
+ """
2846
+ Create a new specification file from a template.
2847
+
2848
+ Args:
2849
+ name: Human-readable name for the specification.
2850
+ template: Template type (only 'empty' is valid). Use phase templates to add structure.
2851
+ category: Default task category. Default: implementation.
2852
+ mission: Optional mission statement for the spec.
2853
+ specs_dir: Path to specs directory (auto-detected if not provided).
2854
+
2855
+ Returns:
2856
+ Tuple of (result_dict, error_message).
2857
+ On success: ({"spec_id": ..., "spec_path": ..., ...}, None)
2858
+ On failure: (None, "error message")
2859
+ """
2860
+ # Generate spec data (handles validation)
2861
+ spec_data, error = generate_spec_data(
2862
+ name=name,
2863
+ template=template,
2864
+ category=category,
2865
+ mission=mission,
2866
+ )
2867
+ if error or spec_data is None:
2868
+ return None, error or "Failed to generate spec data"
2869
+
2870
+ # Find specs directory
2871
+ if specs_dir is None:
2872
+ specs_dir = find_specs_directory()
2873
+
2874
+ if specs_dir is None:
2875
+ return (
2876
+ None,
2877
+ "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR.",
2878
+ )
2879
+
2880
+ # Ensure pending directory exists
2881
+ pending_dir = specs_dir / "pending"
2882
+ pending_dir.mkdir(parents=True, exist_ok=True)
2883
+
2884
+ # Check if spec already exists
2885
+ spec_id = spec_data["spec_id"]
2886
+ spec_path = pending_dir / f"{spec_id}.json"
2887
+ if spec_path.exists():
2888
+ return None, f"Specification already exists: {spec_id}"
2889
+
1240
2890
  # Write the spec file
1241
2891
  try:
1242
2892
  with open(spec_path, "w") as f:
@@ -1245,6 +2895,7 @@ def create_spec(
1245
2895
  return None, f"Failed to write spec file: {e}"
1246
2896
 
1247
2897
  # Count tasks and phases
2898
+ hierarchy = spec_data["hierarchy"]
1248
2899
  task_count = sum(
1249
2900
  1
1250
2901
  for node in hierarchy.values()
@@ -1543,6 +3194,7 @@ def list_assumptions(
1543
3194
  FRONTMATTER_KEYS = (
1544
3195
  "title",
1545
3196
  "description",
3197
+ "mission",
1546
3198
  "objectives",
1547
3199
  "complexity",
1548
3200
  "estimated_hours",
@@ -1618,20 +3270,27 @@ def update_frontmatter(
1618
3270
  if "metadata" not in spec_data:
1619
3271
  spec_data["metadata"] = {}
1620
3272
 
1621
- # Get previous value for result
1622
- previous_value = spec_data["metadata"].get(key)
3273
+ # Get previous value for result (check appropriate location)
3274
+ if key in ("status", "progress_percentage", "current_phase"):
3275
+ previous_value = spec_data.get(key)
3276
+ else:
3277
+ previous_value = spec_data["metadata"].get(key)
1623
3278
 
1624
3279
  # Process value based on type
1625
3280
  if isinstance(value, str):
1626
3281
  value = value.strip() if value else value
1627
3282
 
1628
- # Update the metadata field
1629
- spec_data["metadata"][key] = value
1630
-
1631
- # Also update top-level fields if they exist (for backward compatibility)
1632
- # Some fields like title, status, progress_percentage exist at both levels
1633
- if key in ("title", "status", "progress_percentage", "current_phase"):
3283
+ # Computed fields (status, progress_percentage, current_phase) are now
3284
+ # stored only at top-level. Title is kept in metadata for descriptive purposes.
3285
+ if key in ("status", "progress_percentage", "current_phase"):
3286
+ # Update top-level only (canonical location for computed fields)
1634
3287
  spec_data[key] = value
3288
+ else:
3289
+ # Regular metadata field
3290
+ spec_data["metadata"][key] = value
3291
+ # Also sync title to top-level if updating it
3292
+ if key == "title":
3293
+ spec_data[key] = value
1635
3294
 
1636
3295
  # Update last_updated
1637
3296
  now = datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
@@ -1648,3 +3307,546 @@ def update_frontmatter(
1648
3307
  "value": value,
1649
3308
  "previous_value": previous_value,
1650
3309
  }, None
3310
+
3311
+
3312
+ # Safety constraints for find/replace operations
3313
+ _FR_MAX_PATTERN_LENGTH = 256
3314
+ _FR_DEFAULT_MAX_REPLACEMENTS = 1000
3315
+ _FR_VALID_SCOPES = {"all", "titles", "descriptions"}
3316
+ _FR_MAX_SAMPLE_DIFFS = 10
3317
+
3318
+
3319
+ def find_replace_in_spec(
3320
+ spec_id: str,
3321
+ find: str,
3322
+ replace: str,
3323
+ *,
3324
+ scope: str = "all",
3325
+ use_regex: bool = False,
3326
+ case_sensitive: bool = True,
3327
+ dry_run: bool = False,
3328
+ max_replacements: int = _FR_DEFAULT_MAX_REPLACEMENTS,
3329
+ specs_dir: Optional[Path] = None,
3330
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
3331
+ """
3332
+ Find and replace text across spec hierarchy nodes.
3333
+
3334
+ Performs literal or regex find/replace across titles and/or descriptions
3335
+ in a specification's hierarchy nodes.
3336
+
3337
+ Args:
3338
+ spec_id: Specification ID to modify.
3339
+ find: Text or regex pattern to find.
3340
+ replace: Replacement text (supports backreferences if use_regex=True).
3341
+ scope: Where to search - "all", "titles", or "descriptions".
3342
+ use_regex: If True, treat `find` as a regex pattern.
3343
+ case_sensitive: If False, perform case-insensitive matching.
3344
+ dry_run: If True, preview changes without modifying the spec.
3345
+ max_replacements: Maximum number of replacements (safety limit).
3346
+ specs_dir: Path to specs directory (auto-detected if not provided).
3347
+
3348
+ Returns:
3349
+ Tuple of (result_dict, error_message).
3350
+ On success: ({"spec_id": ..., "total_replacements": ..., ...}, None)
3351
+ On failure: (None, "error message")
3352
+ """
3353
+ # Validate find pattern
3354
+ if not find or not isinstance(find, str):
3355
+ return None, "find must be a non-empty string"
3356
+ # Don't strip the pattern - use exactly what user provides (whitespace may be intentional)
3357
+ if not find.strip():
3358
+ return None, "find must be a non-empty string"
3359
+ if len(find) > _FR_MAX_PATTERN_LENGTH:
3360
+ return None, f"find pattern exceeds maximum length of {_FR_MAX_PATTERN_LENGTH} characters"
3361
+
3362
+ # Validate replace
3363
+ if replace is None:
3364
+ return None, "replace must be provided (use empty string to delete matches)"
3365
+ if not isinstance(replace, str):
3366
+ return None, "replace must be a string"
3367
+
3368
+ # Validate scope
3369
+ if scope not in _FR_VALID_SCOPES:
3370
+ return None, f"scope must be one of: {sorted(_FR_VALID_SCOPES)}"
3371
+
3372
+ # Validate max_replacements
3373
+ if not isinstance(max_replacements, int) or max_replacements <= 0:
3374
+ return None, "max_replacements must be a positive integer"
3375
+
3376
+ # Compile regex if needed
3377
+ compiled_pattern = None
3378
+ if use_regex:
3379
+ try:
3380
+ flags = 0 if case_sensitive else re.IGNORECASE
3381
+ compiled_pattern = re.compile(find, flags)
3382
+ except re.error as e:
3383
+ return None, f"Invalid regex pattern: {e}"
3384
+ else:
3385
+ # For literal search, prepare flags
3386
+ if not case_sensitive:
3387
+ # Create case-insensitive literal pattern
3388
+ compiled_pattern = re.compile(re.escape(find), re.IGNORECASE)
3389
+
3390
+ # Find specs directory
3391
+ if specs_dir is None:
3392
+ specs_dir = find_specs_directory()
3393
+ if specs_dir is None:
3394
+ return None, "No specs directory found"
3395
+
3396
+ # Load spec
3397
+ spec_path = find_spec_file(spec_id, specs_dir)
3398
+ if not spec_path:
3399
+ return None, f"Specification '{spec_id}' not found"
3400
+ spec_data = load_spec(spec_id, specs_dir)
3401
+ if not spec_data:
3402
+ return None, f"Failed to load specification '{spec_id}'"
3403
+
3404
+ hierarchy = spec_data.get("hierarchy", {})
3405
+ if not hierarchy:
3406
+ return {
3407
+ "spec_id": spec_id,
3408
+ "total_replacements": 0,
3409
+ "nodes_affected": 0,
3410
+ "changes": [],
3411
+ "dry_run": dry_run,
3412
+ "message": "No hierarchy nodes to process",
3413
+ }, None
3414
+
3415
+ # Track changes
3416
+ changes: List[Dict[str, Any]] = []
3417
+ total_replacements = 0
3418
+ nodes_affected = set()
3419
+ warnings: List[str] = []
3420
+ limit_reached = False
3421
+
3422
+ # Helper to perform replacement
3423
+ def do_replace(text: str) -> Tuple[str, int]:
3424
+ if compiled_pattern:
3425
+ new_text, count = compiled_pattern.subn(replace, text)
3426
+ return new_text, count
3427
+ else:
3428
+ # Case-sensitive literal replace
3429
+ count = text.count(find)
3430
+ new_text = text.replace(find, replace)
3431
+ return new_text, count
3432
+
3433
+ # Process hierarchy nodes
3434
+ for node_id, node_data in hierarchy.items():
3435
+ if node_id == "spec-root":
3436
+ continue
3437
+ if limit_reached:
3438
+ break
3439
+
3440
+ # Process title if in scope
3441
+ if scope in ("all", "titles"):
3442
+ title = node_data.get("title", "")
3443
+ if title and isinstance(title, str):
3444
+ new_title, count = do_replace(title)
3445
+ if count > 0:
3446
+ if total_replacements + count > max_replacements:
3447
+ count = max_replacements - total_replacements
3448
+ # Partial replacement not supported, skip this field
3449
+ warnings.append(
3450
+ f"max_replacements limit ({max_replacements}) reached"
3451
+ )
3452
+ limit_reached = True
3453
+ else:
3454
+ total_replacements += count
3455
+ nodes_affected.add(node_id)
3456
+ changes.append({
3457
+ "node_id": node_id,
3458
+ "field": "title",
3459
+ "old": title,
3460
+ "new": new_title,
3461
+ "replacement_count": count,
3462
+ })
3463
+ if not dry_run:
3464
+ node_data["title"] = new_title
3465
+
3466
+ # Process description if in scope
3467
+ if scope in ("all", "descriptions") and not limit_reached:
3468
+ metadata = node_data.get("metadata", {})
3469
+ if isinstance(metadata, dict):
3470
+ description = metadata.get("description", "")
3471
+ if description and isinstance(description, str):
3472
+ new_description, count = do_replace(description)
3473
+ if count > 0:
3474
+ if total_replacements + count > max_replacements:
3475
+ warnings.append(
3476
+ f"max_replacements limit ({max_replacements}) reached"
3477
+ )
3478
+ limit_reached = True
3479
+ else:
3480
+ total_replacements += count
3481
+ nodes_affected.add(node_id)
3482
+ changes.append({
3483
+ "node_id": node_id,
3484
+ "field": "description",
3485
+ "old": description,
3486
+ "new": new_description,
3487
+ "replacement_count": count,
3488
+ })
3489
+ if not dry_run:
3490
+ metadata["description"] = new_description
3491
+
3492
+ # Save if not dry_run and there were changes
3493
+ if not dry_run and total_replacements > 0:
3494
+ if not save_spec(spec_id, spec_data, specs_dir):
3495
+ return None, "Failed to save specification after replacements"
3496
+
3497
+ # Build result
3498
+ result: Dict[str, Any] = {
3499
+ "spec_id": spec_id,
3500
+ "total_replacements": total_replacements,
3501
+ "nodes_affected": len(nodes_affected),
3502
+ "dry_run": dry_run,
3503
+ "scope": scope,
3504
+ "find": find,
3505
+ "replace": replace,
3506
+ "use_regex": use_regex,
3507
+ "case_sensitive": case_sensitive,
3508
+ }
3509
+
3510
+ # Include sample diffs (limited)
3511
+ if changes:
3512
+ result["changes"] = changes[:_FR_MAX_SAMPLE_DIFFS]
3513
+ if len(changes) > _FR_MAX_SAMPLE_DIFFS:
3514
+ result["changes_truncated"] = True
3515
+ result["total_changes"] = len(changes)
3516
+
3517
+ if warnings:
3518
+ result["warnings"] = warnings
3519
+
3520
+ if total_replacements == 0:
3521
+ result["message"] = "No matches found"
3522
+
3523
+ return result, None
3524
+
3525
+
3526
+ # Completeness check constants
3527
+ _CC_WEIGHT_TITLES = 0.20
3528
+ _CC_WEIGHT_DESCRIPTIONS = 0.30
3529
+ _CC_WEIGHT_FILE_PATHS = 0.25
3530
+ _CC_WEIGHT_ESTIMATES = 0.25
3531
+
3532
+
3533
+ def check_spec_completeness(
3534
+ spec_id: str,
3535
+ *,
3536
+ specs_dir: Optional[Path] = None,
3537
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
3538
+ """
3539
+ Check spec completeness and calculate a score (0-100).
3540
+
3541
+ Evaluates spec quality by checking for:
3542
+ - Empty titles
3543
+ - Missing task descriptions
3544
+ - Missing file_path for implementation/refactoring tasks
3545
+ - Missing estimated_hours
3546
+
3547
+ Args:
3548
+ spec_id: Specification ID to check.
3549
+ specs_dir: Path to specs directory (auto-detected if not provided).
3550
+
3551
+ Returns:
3552
+ Tuple of (result_dict, error_message).
3553
+ On success: ({"spec_id": ..., "completeness_score": ..., ...}, None)
3554
+ On failure: (None, "error message")
3555
+ """
3556
+ # Find specs directory
3557
+ if specs_dir is None:
3558
+ specs_dir = find_specs_directory()
3559
+ if specs_dir is None:
3560
+ return None, "No specs directory found"
3561
+
3562
+ # Load spec
3563
+ spec_path = find_spec_file(spec_id, specs_dir)
3564
+ if not spec_path:
3565
+ return None, f"Specification '{spec_id}' not found"
3566
+ spec_data = load_spec(spec_id, specs_dir)
3567
+ if not spec_data:
3568
+ return None, f"Failed to load specification '{spec_id}'"
3569
+
3570
+ hierarchy = spec_data.get("hierarchy", {})
3571
+ if not hierarchy:
3572
+ return {
3573
+ "spec_id": spec_id,
3574
+ "completeness_score": 100,
3575
+ "categories": {},
3576
+ "issues": [],
3577
+ "message": "No hierarchy nodes to check",
3578
+ }, None
3579
+
3580
+ # Helper functions
3581
+ def _nonempty_string(value: Any) -> bool:
3582
+ return isinstance(value, str) and bool(value.strip())
3583
+
3584
+ def _has_description(metadata: Dict[str, Any]) -> bool:
3585
+ if _nonempty_string(metadata.get("description")):
3586
+ return True
3587
+ details = metadata.get("details")
3588
+ if _nonempty_string(details):
3589
+ return True
3590
+ if isinstance(details, list):
3591
+ return any(_nonempty_string(item) for item in details)
3592
+ return False
3593
+
3594
+ # Tracking
3595
+ issues: List[Dict[str, Any]] = []
3596
+ categories: Dict[str, Dict[str, Any]] = {
3597
+ "titles": {"complete": 0, "total": 0, "score": 0.0},
3598
+ "descriptions": {"complete": 0, "total": 0, "score": 0.0},
3599
+ "file_paths": {"complete": 0, "total": 0, "score": 0.0},
3600
+ "estimates": {"complete": 0, "total": 0, "score": 0.0},
3601
+ }
3602
+
3603
+ # Check each node
3604
+ for node_id, node in hierarchy.items():
3605
+ if node_id == "spec-root":
3606
+ continue
3607
+ if not isinstance(node, dict):
3608
+ continue
3609
+
3610
+ node_type = node.get("type", "")
3611
+ title = node.get("title", "")
3612
+ metadata = node.get("metadata", {})
3613
+ if not isinstance(metadata, dict):
3614
+ metadata = {}
3615
+
3616
+ # Check title (all nodes)
3617
+ categories["titles"]["total"] += 1
3618
+ if _nonempty_string(title):
3619
+ categories["titles"]["complete"] += 1
3620
+ else:
3621
+ issues.append({
3622
+ "node_id": node_id,
3623
+ "category": "titles",
3624
+ "message": "Empty or missing title",
3625
+ })
3626
+
3627
+ # Check description (tasks and verify nodes only)
3628
+ if node_type in ("task", "verify"):
3629
+ categories["descriptions"]["total"] += 1
3630
+ if _has_description(metadata):
3631
+ categories["descriptions"]["complete"] += 1
3632
+ else:
3633
+ issues.append({
3634
+ "node_id": node_id,
3635
+ "category": "descriptions",
3636
+ "message": "Missing description",
3637
+ })
3638
+
3639
+ # Check file_path (implementation/refactoring tasks only)
3640
+ task_category = metadata.get("task_category", "")
3641
+ if task_category in ("implementation", "refactoring"):
3642
+ categories["file_paths"]["total"] += 1
3643
+ if _nonempty_string(metadata.get("file_path")):
3644
+ categories["file_paths"]["complete"] += 1
3645
+ else:
3646
+ issues.append({
3647
+ "node_id": node_id,
3648
+ "category": "file_paths",
3649
+ "message": "Missing file_path for implementation task",
3650
+ })
3651
+
3652
+ # Check estimated_hours (tasks only)
3653
+ if node_type == "task":
3654
+ categories["estimates"]["total"] += 1
3655
+ est = metadata.get("estimated_hours")
3656
+ if isinstance(est, (int, float)) and est > 0:
3657
+ categories["estimates"]["complete"] += 1
3658
+ else:
3659
+ issues.append({
3660
+ "node_id": node_id,
3661
+ "category": "estimates",
3662
+ "message": "Missing or invalid estimated_hours",
3663
+ })
3664
+
3665
+ # Calculate category scores
3666
+ for cat_data in categories.values():
3667
+ if cat_data["total"] > 0:
3668
+ cat_data["score"] = round(cat_data["complete"] / cat_data["total"], 2)
3669
+ else:
3670
+ cat_data["score"] = 1.0 # No items to check = complete
3671
+
3672
+ # Calculate weighted completeness score
3673
+ weighted_score = 0.0
3674
+ total_weight = 0.0
3675
+
3676
+ if categories["titles"]["total"] > 0:
3677
+ weighted_score += categories["titles"]["score"] * _CC_WEIGHT_TITLES
3678
+ total_weight += _CC_WEIGHT_TITLES
3679
+
3680
+ if categories["descriptions"]["total"] > 0:
3681
+ weighted_score += categories["descriptions"]["score"] * _CC_WEIGHT_DESCRIPTIONS
3682
+ total_weight += _CC_WEIGHT_DESCRIPTIONS
3683
+
3684
+ if categories["file_paths"]["total"] > 0:
3685
+ weighted_score += categories["file_paths"]["score"] * _CC_WEIGHT_FILE_PATHS
3686
+ total_weight += _CC_WEIGHT_FILE_PATHS
3687
+
3688
+ if categories["estimates"]["total"] > 0:
3689
+ weighted_score += categories["estimates"]["score"] * _CC_WEIGHT_ESTIMATES
3690
+ total_weight += _CC_WEIGHT_ESTIMATES
3691
+
3692
+ # Normalize score
3693
+ if total_weight > 0:
3694
+ completeness_score = int(round((weighted_score / total_weight) * 100))
3695
+ else:
3696
+ completeness_score = 100 # Nothing to check
3697
+
3698
+ return {
3699
+ "spec_id": spec_id,
3700
+ "completeness_score": completeness_score,
3701
+ "categories": categories,
3702
+ "issues": issues,
3703
+ "issue_count": len(issues),
3704
+ }, None
3705
+
3706
+
3707
+ # Duplicate detection constants
3708
+ _DD_DEFAULT_THRESHOLD = 0.8
3709
+ _DD_MAX_PAIRS = 100
3710
+ _DD_VALID_SCOPES = {"titles", "descriptions", "both"}
3711
+
3712
+
3713
+ def detect_duplicate_tasks(
3714
+ spec_id: str,
3715
+ *,
3716
+ scope: str = "titles",
3717
+ threshold: float = _DD_DEFAULT_THRESHOLD,
3718
+ max_pairs: int = _DD_MAX_PAIRS,
3719
+ specs_dir: Optional[Path] = None,
3720
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
3721
+ """
3722
+ Detect duplicate or near-duplicate tasks in a spec.
3723
+
3724
+ Uses text similarity to find tasks with similar titles or descriptions.
3725
+
3726
+ Args:
3727
+ spec_id: Specification ID to check.
3728
+ scope: What to compare - "titles", "descriptions", or "both".
3729
+ threshold: Similarity threshold (0.0-1.0). Default 0.8.
3730
+ max_pairs: Maximum duplicate pairs to return. Default 100.
3731
+ specs_dir: Path to specs directory (auto-detected if not provided).
3732
+
3733
+ Returns:
3734
+ Tuple of (result_dict, error_message).
3735
+ On success: ({"spec_id": ..., "duplicates": [...], ...}, None)
3736
+ On failure: (None, "error message")
3737
+ """
3738
+ from difflib import SequenceMatcher
3739
+
3740
+ # Validate scope
3741
+ if scope not in _DD_VALID_SCOPES:
3742
+ return None, f"scope must be one of: {sorted(_DD_VALID_SCOPES)}"
3743
+
3744
+ # Validate threshold
3745
+ if not isinstance(threshold, (int, float)) or not 0.0 <= threshold <= 1.0:
3746
+ return None, "threshold must be a number between 0.0 and 1.0"
3747
+
3748
+ # Validate max_pairs
3749
+ if not isinstance(max_pairs, int) or max_pairs <= 0:
3750
+ return None, "max_pairs must be a positive integer"
3751
+
3752
+ # Find specs directory
3753
+ if specs_dir is None:
3754
+ specs_dir = find_specs_directory()
3755
+ if specs_dir is None:
3756
+ return None, "No specs directory found"
3757
+
3758
+ # Load spec
3759
+ spec_path = find_spec_file(spec_id, specs_dir)
3760
+ if not spec_path:
3761
+ return None, f"Specification '{spec_id}' not found"
3762
+ spec_data = load_spec(spec_id, specs_dir)
3763
+ if not spec_data:
3764
+ return None, f"Failed to load specification '{spec_id}'"
3765
+
3766
+ hierarchy = spec_data.get("hierarchy", {})
3767
+ if not hierarchy:
3768
+ return {
3769
+ "spec_id": spec_id,
3770
+ "duplicates": [],
3771
+ "duplicate_count": 0,
3772
+ "scope": scope,
3773
+ "threshold": threshold,
3774
+ "message": "No hierarchy nodes to check",
3775
+ }, None
3776
+
3777
+ # Collect tasks/verify nodes with their text
3778
+ nodes: List[Dict[str, Any]] = []
3779
+ for node_id, node in hierarchy.items():
3780
+ if node_id == "spec-root":
3781
+ continue
3782
+ if not isinstance(node, dict):
3783
+ continue
3784
+ node_type = node.get("type", "")
3785
+ if node_type not in ("task", "verify"):
3786
+ continue
3787
+
3788
+ title = node.get("title", "") or ""
3789
+ metadata = node.get("metadata", {})
3790
+ if not isinstance(metadata, dict):
3791
+ metadata = {}
3792
+ description = metadata.get("description", "") or ""
3793
+
3794
+ nodes.append({
3795
+ "id": node_id,
3796
+ "title": title.strip().lower(),
3797
+ "description": description.strip().lower(),
3798
+ })
3799
+
3800
+ # Compare pairs
3801
+ duplicates: List[Dict[str, Any]] = []
3802
+ truncated = False
3803
+ total_compared = 0
3804
+
3805
+ def similarity(a: str, b: str) -> float:
3806
+ if not a or not b:
3807
+ return 0.0
3808
+ return SequenceMatcher(None, a, b).ratio()
3809
+
3810
+ for i, node_a in enumerate(nodes):
3811
+ if len(duplicates) >= max_pairs:
3812
+ truncated = True
3813
+ break
3814
+ for node_b in nodes[i + 1:]:
3815
+ total_compared += 1
3816
+ if len(duplicates) >= max_pairs:
3817
+ truncated = True
3818
+ break
3819
+
3820
+ # Calculate similarity based on scope
3821
+ if scope == "titles":
3822
+ sim = similarity(node_a["title"], node_b["title"])
3823
+ elif scope == "descriptions":
3824
+ sim = similarity(node_a["description"], node_b["description"])
3825
+ else: # both
3826
+ title_sim = similarity(node_a["title"], node_b["title"])
3827
+ desc_sim = similarity(node_a["description"], node_b["description"])
3828
+ sim = max(title_sim, desc_sim)
3829
+
3830
+ if sim >= threshold:
3831
+ duplicates.append({
3832
+ "node_a": node_a["id"],
3833
+ "node_b": node_b["id"],
3834
+ "similarity": round(sim, 2),
3835
+ "scope": scope,
3836
+ })
3837
+
3838
+ result: Dict[str, Any] = {
3839
+ "spec_id": spec_id,
3840
+ "duplicates": duplicates,
3841
+ "duplicate_count": len(duplicates),
3842
+ "scope": scope,
3843
+ "threshold": threshold,
3844
+ "nodes_checked": len(nodes),
3845
+ "pairs_compared": total_compared,
3846
+ }
3847
+
3848
+ if truncated:
3849
+ result["truncated"] = True
3850
+ result["warnings"] = [f"Results limited to {max_pairs} pairs"]
3851
+
3852
+ return result, None