foundry-mcp 0.3.3__py3-none-any.whl → 0.8.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. foundry_mcp/__init__.py +7 -1
  2. foundry_mcp/cli/__init__.py +0 -13
  3. foundry_mcp/cli/commands/plan.py +10 -3
  4. foundry_mcp/cli/commands/review.py +19 -4
  5. foundry_mcp/cli/commands/session.py +1 -8
  6. foundry_mcp/cli/commands/specs.py +38 -208
  7. foundry_mcp/cli/context.py +39 -0
  8. foundry_mcp/cli/output.py +3 -3
  9. foundry_mcp/config.py +615 -11
  10. foundry_mcp/core/ai_consultation.py +146 -9
  11. foundry_mcp/core/batch_operations.py +1196 -0
  12. foundry_mcp/core/discovery.py +7 -7
  13. foundry_mcp/core/error_store.py +2 -2
  14. foundry_mcp/core/intake.py +933 -0
  15. foundry_mcp/core/llm_config.py +28 -2
  16. foundry_mcp/core/metrics_store.py +2 -2
  17. foundry_mcp/core/naming.py +25 -2
  18. foundry_mcp/core/progress.py +70 -0
  19. foundry_mcp/core/prometheus.py +0 -13
  20. foundry_mcp/core/prompts/fidelity_review.py +149 -4
  21. foundry_mcp/core/prompts/markdown_plan_review.py +5 -1
  22. foundry_mcp/core/prompts/plan_review.py +5 -1
  23. foundry_mcp/core/providers/__init__.py +12 -0
  24. foundry_mcp/core/providers/base.py +39 -0
  25. foundry_mcp/core/providers/claude.py +51 -48
  26. foundry_mcp/core/providers/codex.py +70 -60
  27. foundry_mcp/core/providers/cursor_agent.py +25 -47
  28. foundry_mcp/core/providers/detectors.py +34 -7
  29. foundry_mcp/core/providers/gemini.py +69 -58
  30. foundry_mcp/core/providers/opencode.py +101 -47
  31. foundry_mcp/core/providers/package-lock.json +4 -4
  32. foundry_mcp/core/providers/package.json +1 -1
  33. foundry_mcp/core/providers/validation.py +128 -0
  34. foundry_mcp/core/research/__init__.py +68 -0
  35. foundry_mcp/core/research/memory.py +528 -0
  36. foundry_mcp/core/research/models.py +1220 -0
  37. foundry_mcp/core/research/providers/__init__.py +40 -0
  38. foundry_mcp/core/research/providers/base.py +242 -0
  39. foundry_mcp/core/research/providers/google.py +507 -0
  40. foundry_mcp/core/research/providers/perplexity.py +442 -0
  41. foundry_mcp/core/research/providers/semantic_scholar.py +544 -0
  42. foundry_mcp/core/research/providers/tavily.py +383 -0
  43. foundry_mcp/core/research/workflows/__init__.py +25 -0
  44. foundry_mcp/core/research/workflows/base.py +298 -0
  45. foundry_mcp/core/research/workflows/chat.py +271 -0
  46. foundry_mcp/core/research/workflows/consensus.py +539 -0
  47. foundry_mcp/core/research/workflows/deep_research.py +4020 -0
  48. foundry_mcp/core/research/workflows/ideate.py +682 -0
  49. foundry_mcp/core/research/workflows/thinkdeep.py +405 -0
  50. foundry_mcp/core/responses.py +690 -0
  51. foundry_mcp/core/spec.py +2439 -236
  52. foundry_mcp/core/task.py +1205 -31
  53. foundry_mcp/core/testing.py +512 -123
  54. foundry_mcp/core/validation.py +319 -43
  55. foundry_mcp/dashboard/components/charts.py +0 -57
  56. foundry_mcp/dashboard/launcher.py +11 -0
  57. foundry_mcp/dashboard/views/metrics.py +25 -35
  58. foundry_mcp/dashboard/views/overview.py +1 -65
  59. foundry_mcp/resources/specs.py +25 -25
  60. foundry_mcp/schemas/intake-schema.json +89 -0
  61. foundry_mcp/schemas/sdd-spec-schema.json +33 -5
  62. foundry_mcp/server.py +0 -14
  63. foundry_mcp/tools/unified/__init__.py +39 -18
  64. foundry_mcp/tools/unified/authoring.py +2371 -248
  65. foundry_mcp/tools/unified/documentation_helpers.py +69 -6
  66. foundry_mcp/tools/unified/environment.py +434 -32
  67. foundry_mcp/tools/unified/error.py +18 -1
  68. foundry_mcp/tools/unified/lifecycle.py +8 -0
  69. foundry_mcp/tools/unified/plan.py +133 -2
  70. foundry_mcp/tools/unified/provider.py +0 -40
  71. foundry_mcp/tools/unified/research.py +1283 -0
  72. foundry_mcp/tools/unified/review.py +374 -17
  73. foundry_mcp/tools/unified/review_helpers.py +16 -1
  74. foundry_mcp/tools/unified/server.py +9 -24
  75. foundry_mcp/tools/unified/spec.py +367 -0
  76. foundry_mcp/tools/unified/task.py +1664 -30
  77. foundry_mcp/tools/unified/test.py +69 -8
  78. {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.8.10.dist-info}/METADATA +8 -1
  79. foundry_mcp-0.8.10.dist-info/RECORD +153 -0
  80. foundry_mcp/cli/flags.py +0 -266
  81. foundry_mcp/core/feature_flags.py +0 -592
  82. foundry_mcp-0.3.3.dist-info/RECORD +0 -135
  83. {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.8.10.dist-info}/WHEEL +0 -0
  84. {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.8.10.dist-info}/entry_points.txt +0 -0
  85. {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.8.10.dist-info}/licenses/LICENSE +0 -0
foundry_mcp/core/task.py CHANGED
@@ -8,11 +8,18 @@ from dataclasses import asdict
8
8
  from pathlib import Path
9
9
  from typing import Optional, Dict, Any, Tuple, List
10
10
 
11
- from foundry_mcp.core.spec import load_spec, save_spec, find_spec_file, find_specs_directory, get_node
11
+ from foundry_mcp.core.spec import (
12
+ CATEGORIES,
13
+ load_spec,
14
+ save_spec,
15
+ find_spec_file,
16
+ find_specs_directory,
17
+ get_node,
18
+ )
12
19
  from foundry_mcp.core.responses import success_response, error_response
13
20
 
14
21
  # Valid task types for add_task
15
- TASK_TYPES = ("task", "subtask", "verify")
22
+ TASK_TYPES = ("task", "subtask", "verify", "research")
16
23
 
17
24
 
18
25
  def is_unblocked(spec_data: Dict[str, Any], task_id: str, task_data: Dict[str, Any]) -> bool:
@@ -24,6 +31,11 @@ def is_unblocked(spec_data: Dict[str, Any], task_id: str, task_data: Dict[str, A
24
31
  1. Any of its direct task dependencies are not completed, OR
25
32
  2. Its parent phase is blocked by an incomplete phase
26
33
 
34
+ Research nodes have special blocking behavior based on blocking_mode:
35
+ - "none": Research doesn't block dependents
36
+ - "soft": Research is informational, doesn't block (default)
37
+ - "hard": Research must complete before dependents can start
38
+
27
39
  Args:
28
40
  spec_data: JSON spec file data
29
41
  task_id: Task identifier
@@ -38,7 +50,18 @@ def is_unblocked(spec_data: Dict[str, Any], task_id: str, task_data: Dict[str, A
38
50
  blocked_by = task_data.get("dependencies", {}).get("blocked_by", [])
39
51
  for blocker_id in blocked_by:
40
52
  blocker = hierarchy.get(blocker_id)
41
- if not blocker or blocker.get("status") != "completed":
53
+ if not blocker:
54
+ continue
55
+
56
+ # Special handling for research nodes based on blocking_mode
57
+ if blocker.get("type") == "research":
58
+ blocking_mode = blocker.get("metadata", {}).get("blocking_mode", "soft")
59
+ if blocking_mode in ("none", "soft"):
60
+ # Research with "none" or "soft" blocking mode doesn't block
61
+ continue
62
+ # "hard" mode falls through to standard completion check
63
+
64
+ if blocker.get("status") != "completed":
42
65
  return False
43
66
 
44
67
  # Check phase-level dependencies
@@ -526,6 +549,82 @@ def get_task_journal_summary(
526
549
  }
527
550
 
528
551
 
552
+ def _compute_auto_mode_hints(
553
+ spec_data: Dict[str, Any],
554
+ task_id: str,
555
+ task_data: Dict[str, Any],
556
+ ) -> Dict[str, Any]:
557
+ """
558
+ Compute hints for autonomous mode execution.
559
+
560
+ These hints help an autonomous agent decide whether to proceed
561
+ without user input or pause for confirmation.
562
+
563
+ Args:
564
+ spec_data: Loaded spec data
565
+ task_id: Current task ID
566
+ task_data: Task node data
567
+
568
+ Returns:
569
+ Dictionary with autonomous mode hints:
570
+ - estimated_complexity: "low", "medium", or "high"
571
+ - has_sibling_verify: bool (phase has verify tasks)
572
+ - may_require_user_input: bool (task category suggests user input needed)
573
+ """
574
+ hierarchy = spec_data.get("hierarchy", {})
575
+ metadata = task_data.get("metadata", {}) or {}
576
+
577
+ # Compute estimated_complexity
578
+ complexity = metadata.get("complexity", "").lower()
579
+ estimated_hours = metadata.get("estimated_hours")
580
+
581
+ if complexity in ("complex", "high"):
582
+ estimated_complexity = "high"
583
+ elif complexity in ("medium", "moderate"):
584
+ estimated_complexity = "medium"
585
+ elif complexity in ("simple", "low"):
586
+ estimated_complexity = "low"
587
+ elif estimated_hours is not None:
588
+ # Derive from hours if explicit complexity not set
589
+ if estimated_hours > 2:
590
+ estimated_complexity = "high"
591
+ elif estimated_hours > 0.5:
592
+ estimated_complexity = "medium"
593
+ else:
594
+ estimated_complexity = "low"
595
+ else:
596
+ # Default to medium if no hints
597
+ estimated_complexity = "medium"
598
+
599
+ # Check has_sibling_verify - look for verify tasks in same phase
600
+ parent_id = task_data.get("parent")
601
+ has_sibling_verify = False
602
+ if parent_id:
603
+ parent = hierarchy.get(parent_id, {})
604
+ children = parent.get("children", [])
605
+ for sibling_id in children:
606
+ if sibling_id != task_id:
607
+ sibling = hierarchy.get(sibling_id, {})
608
+ if sibling.get("type") == "verify":
609
+ has_sibling_verify = True
610
+ break
611
+
612
+ # Check may_require_user_input based on task_category
613
+ task_category = metadata.get("task_category", "").lower()
614
+ may_require_user_input = task_category in (
615
+ "decision",
616
+ "investigation",
617
+ "planning",
618
+ "design",
619
+ )
620
+
621
+ return {
622
+ "estimated_complexity": estimated_complexity,
623
+ "has_sibling_verify": has_sibling_verify,
624
+ "may_require_user_input": may_require_user_input,
625
+ }
626
+
627
+
529
628
  def prepare_task(
530
629
  spec_id: str,
531
630
  specs_dir: Path,
@@ -592,12 +691,16 @@ def prepare_task(
592
691
  "task_journal": get_task_journal_summary(spec_data, task_id),
593
692
  }
594
693
 
694
+ # Compute autonomous mode hints
695
+ auto_mode_hints = _compute_auto_mode_hints(spec_data, task_id, task_data)
696
+
595
697
  return asdict(success_response(
596
698
  task_id=task_id,
597
699
  task_data=task_data,
598
700
  dependencies=deps,
599
701
  spec_complete=False,
600
- context=context
702
+ context=context,
703
+ auto_mode_hints=auto_mode_hints,
601
704
  ))
602
705
 
603
706
 
@@ -612,28 +715,35 @@ def _generate_task_id(parent_id: str, existing_children: List[str], task_type: s
612
715
  For verify IDs:
613
716
  - Same pattern but with "verify-" prefix
614
717
 
718
+ For research IDs:
719
+ - Same pattern but with "research-" prefix
720
+
615
721
  Args:
616
722
  parent_id: Parent node ID
617
723
  existing_children: List of existing child IDs
618
- task_type: Type of task (task, subtask, verify)
724
+ task_type: Type of task (task, subtask, verify, research)
619
725
 
620
726
  Returns:
621
727
  New task ID string
622
728
  """
623
- prefix = "verify" if task_type == "verify" else "task"
729
+ # Map task_type to ID prefix
730
+ prefix_map = {"verify": "verify", "research": "research"}
731
+ prefix = prefix_map.get(task_type, "task")
624
732
 
625
733
  # Extract numeric parts from parent
626
734
  if parent_id.startswith("phase-"):
627
735
  # Parent is phase-N, new task is task-N-1, task-N-2, etc.
628
736
  phase_num = parent_id.replace("phase-", "")
629
737
  base = f"{prefix}-{phase_num}"
630
- elif parent_id.startswith("task-") or parent_id.startswith("verify-"):
631
- # Parent is task-N-M or verify-N-M, new task appends next number
632
- # Remove the prefix (task- or verify-) to get the numeric path
738
+ elif parent_id.startswith(("task-", "verify-", "research-")):
739
+ # Parent is task-N-M, verify-N-M, or research-N-M; new task appends next number
740
+ # Remove the prefix to get the numeric path
633
741
  if parent_id.startswith("task-"):
634
742
  base = f"{prefix}-{parent_id[5:]}" # len("task-") = 5
635
- else:
743
+ elif parent_id.startswith("verify-"):
636
744
  base = f"{prefix}-{parent_id[7:]}" # len("verify-") = 7
745
+ else: # research-
746
+ base = f"{prefix}-{parent_id[9:]}" # len("research-") = 9
637
747
  else:
638
748
  # Unknown parent type, generate based on existing children count
639
749
  base = f"{prefix}-1"
@@ -687,12 +797,17 @@ def add_task(
687
797
  task_type: str = "task",
688
798
  estimated_hours: Optional[float] = None,
689
799
  position: Optional[int] = None,
800
+ file_path: Optional[str] = None,
690
801
  specs_dir: Optional[Path] = None,
802
+ # Research-specific parameters
803
+ research_type: Optional[str] = None,
804
+ blocking_mode: Optional[str] = None,
805
+ query: Optional[str] = None,
691
806
  ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
692
807
  """
693
808
  Add a new task to a specification's hierarchy.
694
809
 
695
- Creates a new task, subtask, or verify node under the specified parent.
810
+ Creates a new task, subtask, verify, or research node under the specified parent.
696
811
  Automatically generates the task ID and updates ancestor task counts.
697
812
 
698
813
  Args:
@@ -700,10 +815,14 @@ def add_task(
700
815
  parent_id: Parent node ID (phase or task).
701
816
  title: Task title.
702
817
  description: Optional task description.
703
- task_type: Type of task (task, subtask, verify). Default: task.
818
+ task_type: Type of task (task, subtask, verify, research). Default: task.
704
819
  estimated_hours: Optional estimated hours.
705
820
  position: Optional position in parent's children list (0-based).
821
+ file_path: Optional file path associated with this task.
706
822
  specs_dir: Path to specs directory (auto-detected if not provided).
823
+ research_type: For research nodes - workflow type (chat, consensus, etc).
824
+ blocking_mode: For research nodes - blocking behavior (none, soft, hard).
825
+ query: For research nodes - the research question/topic.
707
826
 
708
827
  Returns:
709
828
  Tuple of (result_dict, error_message).
@@ -714,6 +833,15 @@ def add_task(
714
833
  if task_type not in TASK_TYPES:
715
834
  return None, f"Invalid task_type '{task_type}'. Must be one of: {', '.join(TASK_TYPES)}"
716
835
 
836
+ # Validate research-specific parameters
837
+ if task_type == "research":
838
+ from foundry_mcp.core.validation import VALID_RESEARCH_TYPES, RESEARCH_BLOCKING_MODES
839
+
840
+ if research_type and research_type not in VALID_RESEARCH_TYPES:
841
+ return None, f"Invalid research_type '{research_type}'. Must be one of: {', '.join(sorted(VALID_RESEARCH_TYPES))}"
842
+ if blocking_mode and blocking_mode not in RESEARCH_BLOCKING_MODES:
843
+ return None, f"Invalid blocking_mode '{blocking_mode}'. Must be one of: {', '.join(sorted(RESEARCH_BLOCKING_MODES))}"
844
+
717
845
  # Validate title
718
846
  if not title or not title.strip():
719
847
  return None, "Title is required"
@@ -762,6 +890,17 @@ def add_task(
762
890
  metadata["description"] = description.strip()
763
891
  if estimated_hours is not None:
764
892
  metadata["estimated_hours"] = estimated_hours
893
+ if file_path:
894
+ metadata["file_path"] = file_path.strip()
895
+
896
+ # Add research-specific metadata
897
+ if task_type == "research":
898
+ metadata["research_type"] = research_type or "consensus" # Default to consensus
899
+ metadata["blocking_mode"] = blocking_mode or "soft" # Default to soft blocking
900
+ if query:
901
+ metadata["query"] = query.strip()
902
+ metadata["research_history"] = [] # Empty history initially
903
+ metadata["findings"] = {} # Empty findings initially
765
904
 
766
905
  # Create the task node
767
906
  task_node = {
@@ -804,6 +943,7 @@ def add_task(
804
943
  "title": title,
805
944
  "type": task_type,
806
945
  "position": position if position is not None else len(existing_children) - 1,
946
+ "file_path": file_path.strip() if file_path else None,
807
947
  }, None
808
948
 
809
949
 
@@ -969,7 +1109,8 @@ def remove_task(
969
1109
  # Validate task type (can only remove task, subtask, verify)
970
1110
  task_type = task.get("type")
971
1111
  if task_type not in ("task", "subtask", "verify"):
972
- return None, f"Cannot remove node type '{task_type}'. Only task, subtask, or verify nodes can be removed."
1112
+ hint = " Use `authoring action=\"phase-remove\"` instead." if task_type == "phase" else ""
1113
+ return None, f"Cannot remove node type '{task_type}'. Only task, subtask, or verify nodes can be removed.{hint}"
973
1114
 
974
1115
  # Check for children
975
1116
  children = task.get("children", [])
@@ -1139,56 +1280,453 @@ def update_estimate(
1139
1280
 
1140
1281
 
1141
1282
  # Valid verification types for update_task_metadata
1142
- VERIFICATION_TYPES = ("auto", "manual", "none")
1283
+ VERIFICATION_TYPES = ("run-tests", "fidelity", "manual")
1143
1284
 
1144
1285
  # Valid task categories
1145
- TASK_CATEGORIES = ("implementation", "testing", "documentation", "investigation", "refactoring", "design")
1286
+ TASK_CATEGORIES = CATEGORIES
1287
+
1288
+ # Valid dependency types for manage_task_dependency
1289
+ DEPENDENCY_TYPES = ("blocks", "blocked_by", "depends")
1290
+
1291
+ # Valid requirement types for update_task_requirements
1292
+ REQUIREMENT_TYPES = ("acceptance", "technical", "constraint")
1293
+
1294
+ # Maximum number of requirements per task (to prevent unbounded growth)
1295
+ MAX_REQUIREMENTS_PER_TASK = 50
1296
+
1297
+
1298
+ def _would_create_circular_dependency(
1299
+ hierarchy: Dict[str, Any],
1300
+ source_id: str,
1301
+ target_id: str,
1302
+ dep_type: str,
1303
+ ) -> bool:
1304
+ """
1305
+ Check if adding a dependency would create a circular reference.
1306
+
1307
+ For blocking dependencies:
1308
+ - Adding A blocks B means B is blocked_by A
1309
+ - Circular if B already blocks A (directly or transitively)
1310
+
1311
+ Uses breadth-first search to detect cycles in the dependency graph.
1312
+
1313
+ Args:
1314
+ hierarchy: The spec hierarchy dict
1315
+ source_id: Source task ID
1316
+ target_id: Target task ID
1317
+ dep_type: Type of dependency being added
1318
+
1319
+ Returns:
1320
+ True if adding this dependency would create a cycle
1321
+ """
1322
+ if source_id == target_id:
1323
+ return True
1324
+
1325
+ # For "blocks": source blocks target, so target cannot already block source
1326
+ # For "blocked_by": source is blocked_by target, so source cannot already block target
1327
+ # For "depends": soft dependency, check for cycles in depends chain
1328
+
1329
+ if dep_type == "blocks":
1330
+ # If source blocks target, check if target already blocks source (transitively)
1331
+ # i.e., walk from target's "blocks" chain to see if we reach source
1332
+ return _can_reach_via_dependency(hierarchy, target_id, source_id, "blocks")
1333
+ elif dep_type == "blocked_by":
1334
+ # If source is blocked_by target, check if source already blocks target (transitively)
1335
+ return _can_reach_via_dependency(hierarchy, source_id, target_id, "blocks")
1336
+ elif dep_type == "depends":
1337
+ # Check for cycles in depends chain
1338
+ return _can_reach_via_dependency(hierarchy, target_id, source_id, "depends")
1339
+
1340
+ return False
1341
+
1342
+
1343
+ def _can_reach_via_dependency(
1344
+ hierarchy: Dict[str, Any],
1345
+ start_id: str,
1346
+ target_id: str,
1347
+ dep_key: str,
1348
+ ) -> bool:
1349
+ """
1350
+ Check if target_id can be reached from start_id via dependency chains.
1351
+
1352
+ Uses BFS to traverse the dependency graph.
1353
+
1354
+ Args:
1355
+ hierarchy: The spec hierarchy dict
1356
+ start_id: Starting node ID
1357
+ target_id: Target node ID to find
1358
+ dep_key: Which dependency list to follow ("blocks", "blocked_by", "depends")
1359
+
1360
+ Returns:
1361
+ True if target_id is reachable from start_id
1362
+ """
1363
+ visited = set()
1364
+ queue = [start_id]
1365
+
1366
+ while queue:
1367
+ current_id = queue.pop(0)
1368
+ if current_id in visited:
1369
+ continue
1370
+ visited.add(current_id)
1371
+
1372
+ if current_id == target_id:
1373
+ return True
1374
+
1375
+ node = hierarchy.get(current_id)
1376
+ if not node:
1377
+ continue
1378
+
1379
+ deps = node.get("dependencies", {})
1380
+ next_ids = deps.get(dep_key, [])
1381
+ if isinstance(next_ids, list):
1382
+ for next_id in next_ids:
1383
+ if next_id not in visited:
1384
+ queue.append(next_id)
1385
+
1386
+ return False
1387
+
1388
+
1389
+ def manage_task_dependency(
1390
+ spec_id: str,
1391
+ source_task_id: str,
1392
+ target_task_id: str,
1393
+ dependency_type: str,
1394
+ action: str = "add",
1395
+ dry_run: bool = False,
1396
+ specs_dir: Optional[Path] = None,
1397
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
1398
+ """
1399
+ Add or remove a dependency relationship between two tasks.
1400
+
1401
+ Manages blocks, blocked_by, and depends relationships between tasks.
1402
+ Updates both source and target tasks atomically.
1403
+
1404
+ Dependency types:
1405
+ - blocks: Source task blocks target (target cannot start until source completes)
1406
+ - blocked_by: Source task is blocked by target (source cannot start until target completes)
1407
+ - depends: Soft dependency (informational, doesn't block)
1408
+
1409
+ When adding:
1410
+ - blocks: Adds target to source.blocks AND source to target.blocked_by
1411
+ - blocked_by: Adds target to source.blocked_by AND source to target.blocks
1412
+ - depends: Only adds target to source.depends (soft, no reciprocal)
1413
+
1414
+ Args:
1415
+ spec_id: Specification ID containing the tasks.
1416
+ source_task_id: Source task ID.
1417
+ target_task_id: Target task ID.
1418
+ dependency_type: Type of dependency (blocks, blocked_by, depends).
1419
+ action: Action to perform (add or remove). Default: add.
1420
+ dry_run: If True, validate and return preview without saving changes.
1421
+ specs_dir: Path to specs directory (auto-detected if not provided).
1422
+
1423
+ Returns:
1424
+ Tuple of (result_dict, error_message).
1425
+ On success: ({"source_task": ..., "target_task": ..., "dependency_type": ..., ...}, None)
1426
+ On failure: (None, "error message")
1427
+ """
1428
+ # Validate action
1429
+ if action not in ("add", "remove"):
1430
+ return None, f"Invalid action '{action}'. Must be 'add' or 'remove'"
1431
+
1432
+ # Validate dependency_type
1433
+ if dependency_type not in DEPENDENCY_TYPES:
1434
+ return None, f"Invalid dependency_type '{dependency_type}'. Must be one of: {', '.join(DEPENDENCY_TYPES)}"
1435
+
1436
+ # Prevent self-reference
1437
+ if source_task_id == target_task_id:
1438
+ return None, f"Cannot add dependency: task '{source_task_id}' cannot depend on itself"
1439
+
1440
+ # Find specs directory
1441
+ if specs_dir is None:
1442
+ specs_dir = find_specs_directory()
1443
+
1444
+ if specs_dir is None:
1445
+ return None, "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR."
1446
+
1447
+ # Find and load the spec
1448
+ spec_path = find_spec_file(spec_id, specs_dir)
1449
+ if spec_path is None:
1450
+ return None, f"Specification '{spec_id}' not found"
1451
+
1452
+ spec_data = load_spec(spec_id, specs_dir)
1453
+ if spec_data is None:
1454
+ return None, f"Failed to load specification '{spec_id}'"
1455
+
1456
+ hierarchy = spec_data.get("hierarchy", {})
1457
+
1458
+ # Validate source task exists
1459
+ source_task = hierarchy.get(source_task_id)
1460
+ if source_task is None:
1461
+ return None, f"Source task '{source_task_id}' not found"
1462
+
1463
+ # Validate source task type
1464
+ source_type = source_task.get("type")
1465
+ if source_type not in ("task", "subtask", "verify", "phase"):
1466
+ return None, f"Cannot manage dependencies for node type '{source_type}'"
1467
+
1468
+ # Validate target task exists
1469
+ target_task = hierarchy.get(target_task_id)
1470
+ if target_task is None:
1471
+ return None, f"Target task '{target_task_id}' not found"
1472
+
1473
+ # Validate target task type
1474
+ target_type = target_task.get("type")
1475
+ if target_type not in ("task", "subtask", "verify", "phase"):
1476
+ return None, f"Cannot add dependency to node type '{target_type}'"
1477
+
1478
+ # Get or create dependencies for both tasks
1479
+ source_deps = source_task.get("dependencies")
1480
+ if source_deps is None:
1481
+ source_deps = {"blocks": [], "blocked_by": [], "depends": []}
1482
+ source_task["dependencies"] = source_deps
1483
+
1484
+ target_deps = target_task.get("dependencies")
1485
+ if target_deps is None:
1486
+ target_deps = {"blocks": [], "blocked_by": [], "depends": []}
1487
+ target_task["dependencies"] = target_deps
1488
+
1489
+ # Ensure lists exist
1490
+ for key in DEPENDENCY_TYPES:
1491
+ if not isinstance(source_deps.get(key), list):
1492
+ source_deps[key] = []
1493
+ if not isinstance(target_deps.get(key), list):
1494
+ target_deps[key] = []
1495
+
1496
+ # Determine the reciprocal relationship
1497
+ reciprocal_type = None
1498
+ if dependency_type == "blocks":
1499
+ reciprocal_type = "blocked_by"
1500
+ elif dependency_type == "blocked_by":
1501
+ reciprocal_type = "blocks"
1502
+ # depends has no reciprocal
1503
+
1504
+ if action == "add":
1505
+ # Check for circular dependencies
1506
+ if _would_create_circular_dependency(hierarchy, source_task_id, target_task_id, dependency_type):
1507
+ return None, f"Cannot add dependency: would create circular reference between '{source_task_id}' and '{target_task_id}'"
1508
+
1509
+ # Check if dependency already exists
1510
+ if target_task_id in source_deps[dependency_type]:
1511
+ return None, f"Dependency already exists: {source_task_id} {dependency_type} {target_task_id}"
1512
+
1513
+ # Add the dependency
1514
+ source_deps[dependency_type].append(target_task_id)
1515
+
1516
+ # Add reciprocal if applicable (blocks <-> blocked_by)
1517
+ if reciprocal_type:
1518
+ if source_task_id not in target_deps[reciprocal_type]:
1519
+ target_deps[reciprocal_type].append(source_task_id)
1520
+
1521
+ elif action == "remove":
1522
+ # Check if dependency exists
1523
+ if target_task_id not in source_deps[dependency_type]:
1524
+ return None, f"Dependency does not exist: {source_task_id} {dependency_type} {target_task_id}"
1525
+
1526
+ # Remove the dependency
1527
+ source_deps[dependency_type].remove(target_task_id)
1528
+
1529
+ # Remove reciprocal if applicable
1530
+ if reciprocal_type and source_task_id in target_deps[reciprocal_type]:
1531
+ target_deps[reciprocal_type].remove(source_task_id)
1532
+
1533
+ # Build result
1534
+ result = {
1535
+ "spec_id": spec_id,
1536
+ "source_task": source_task_id,
1537
+ "target_task": target_task_id,
1538
+ "dependency_type": dependency_type,
1539
+ "action": action,
1540
+ "dry_run": dry_run,
1541
+ "source_dependencies": {
1542
+ "blocks": source_deps["blocks"],
1543
+ "blocked_by": source_deps["blocked_by"],
1544
+ "depends": source_deps["depends"],
1545
+ },
1546
+ "target_dependencies": {
1547
+ "blocks": target_deps["blocks"],
1548
+ "blocked_by": target_deps["blocked_by"],
1549
+ "depends": target_deps["depends"],
1550
+ },
1551
+ }
1552
+
1553
+ # Save the spec (unless dry_run)
1554
+ if dry_run:
1555
+ result["message"] = "Dry run - changes not saved"
1556
+ else:
1557
+ success = save_spec(spec_id, spec_data, specs_dir)
1558
+ if not success:
1559
+ return None, "Failed to save specification"
1560
+
1561
+ return result, None
1562
+
1563
+
1564
+ def _is_descendant(hierarchy: Dict[str, Any], ancestor_id: str, potential_descendant_id: str) -> bool:
1565
+ """
1566
+ Check if a node is a descendant of another node.
1567
+
1568
+ Used to prevent circular references when moving tasks.
1569
+
1570
+ Args:
1571
+ hierarchy: The spec hierarchy dict
1572
+ ancestor_id: The potential ancestor node ID
1573
+ potential_descendant_id: The node to check if it's a descendant
1574
+
1575
+ Returns:
1576
+ True if potential_descendant_id is a descendant of ancestor_id
1577
+ """
1578
+ if ancestor_id == potential_descendant_id:
1579
+ return True
1580
+
1581
+ descendants = _collect_descendants(hierarchy, ancestor_id)
1582
+ return potential_descendant_id in descendants
1583
+
1584
+
1585
+ def _get_phase_for_node(hierarchy: Dict[str, Any], node_id: str) -> Optional[str]:
1586
+ """
1587
+ Walk up the hierarchy to find the phase containing a node.
1588
+
1589
+ Args:
1590
+ hierarchy: The spec hierarchy dict
1591
+ node_id: The node to find the phase for
1592
+
1593
+ Returns:
1594
+ Phase ID if found, None otherwise
1595
+ """
1596
+ current_id = node_id
1597
+ visited = set()
1598
+
1599
+ while current_id:
1600
+ if current_id in visited:
1601
+ break
1602
+ visited.add(current_id)
1603
+
1604
+ node = hierarchy.get(current_id)
1605
+ if not node:
1606
+ break
1607
+
1608
+ if node.get("type") == "phase":
1609
+ return current_id
1610
+
1611
+ current_id = node.get("parent")
1612
+
1613
+ return None
1614
+
1615
+
1616
+ def _check_cross_phase_dependencies(
1617
+ hierarchy: Dict[str, Any],
1618
+ task_id: str,
1619
+ old_phase_id: Optional[str],
1620
+ new_phase_id: Optional[str],
1621
+ ) -> List[str]:
1622
+ """
1623
+ Check for potential dependency issues when moving across phases.
1624
+
1625
+ Args:
1626
+ hierarchy: The spec hierarchy dict
1627
+ task_id: The task being moved
1628
+ old_phase_id: The original phase ID
1629
+ new_phase_id: The target phase ID
1630
+
1631
+ Returns:
1632
+ List of warning messages about potential dependency issues
1633
+ """
1634
+ warnings = []
1635
+
1636
+ if old_phase_id == new_phase_id:
1637
+ return warnings
1638
+
1639
+ task = hierarchy.get(task_id)
1640
+ if not task:
1641
+ return warnings
1642
+
1643
+ deps = task.get("dependencies", {})
1644
+
1645
+ # Check blocked_by dependencies
1646
+ blocked_by = deps.get("blocked_by", [])
1647
+ for dep_id in blocked_by:
1648
+ dep_phase = _get_phase_for_node(hierarchy, dep_id)
1649
+ if dep_phase and dep_phase != new_phase_id:
1650
+ dep_node = hierarchy.get(dep_id, {})
1651
+ warnings.append(
1652
+ f"Task '{task_id}' is blocked by '{dep_id}' ({dep_node.get('title', '')}) "
1653
+ f"which is in a different phase ('{dep_phase}')"
1654
+ )
1655
+
1656
+ # Check blocks dependencies
1657
+ blocks = deps.get("blocks", [])
1658
+ for dep_id in blocks:
1659
+ dep_phase = _get_phase_for_node(hierarchy, dep_id)
1660
+ if dep_phase and dep_phase != new_phase_id:
1661
+ dep_node = hierarchy.get(dep_id, {})
1662
+ warnings.append(
1663
+ f"Task '{task_id}' blocks '{dep_id}' ({dep_node.get('title', '')}) "
1664
+ f"which is in a different phase ('{dep_phase}')"
1665
+ )
1666
+
1667
+ return warnings
1146
1668
 
1147
1669
 
1148
1670
  def update_task_metadata(
1149
1671
  spec_id: str,
1150
1672
  task_id: str,
1673
+ title: Optional[str] = None,
1151
1674
  file_path: Optional[str] = None,
1152
1675
  description: Optional[str] = None,
1676
+ acceptance_criteria: Optional[List[str]] = None,
1153
1677
  task_category: Optional[str] = None,
1154
1678
  actual_hours: Optional[float] = None,
1155
1679
  status_note: Optional[str] = None,
1156
1680
  verification_type: Optional[str] = None,
1157
1681
  command: Optional[str] = None,
1158
1682
  custom_metadata: Optional[Dict[str, Any]] = None,
1683
+ dry_run: bool = False,
1159
1684
  specs_dir: Optional[Path] = None,
1160
1685
  ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
1161
1686
  """
1162
1687
  Update arbitrary metadata fields on a task.
1163
1688
 
1164
- Updates various metadata fields on a task including file path, description,
1689
+ Updates various metadata fields on a task including title, file path, description,
1165
1690
  category, hours, notes, verification type, and custom fields.
1166
1691
  At least one field must be provided.
1167
1692
 
1168
1693
  Args:
1169
1694
  spec_id: Specification ID containing the task.
1170
1695
  task_id: Task ID to update.
1696
+ title: Optional new title for the task (cannot be empty/whitespace-only).
1171
1697
  file_path: Optional file path associated with the task.
1172
1698
  description: Optional task description.
1173
- task_category: Optional task category (implementation, testing, etc.).
1699
+ acceptance_criteria: Optional acceptance criteria list.
1700
+ task_category: Optional task category (implementation, refactoring, investigation, decision, research).
1174
1701
  actual_hours: Optional actual hours spent on task (must be >= 0).
1175
1702
  status_note: Optional status note or completion note.
1176
- verification_type: Optional verification type (auto, manual, none).
1703
+ verification_type: Optional verification type (run-tests, fidelity, manual).
1177
1704
  command: Optional command executed for the task.
1178
1705
  custom_metadata: Optional dict of custom metadata fields to merge.
1706
+ dry_run: If True, validate and return preview without saving changes.
1179
1707
  specs_dir: Path to specs directory (auto-detected if not provided).
1180
1708
 
1181
1709
  Returns:
1182
1710
  Tuple of (result_dict, error_message).
1183
- On success: ({"task_id": ..., "fields_updated": [...], ...}, None)
1711
+ On success: ({"task_id": ..., "fields_updated": [...], "previous_values": {...}, ...}, None)
1184
1712
  On failure: (None, "error message")
1185
1713
  """
1186
- # Collect all provided fields
1714
+ # Validate title if provided (cannot be empty/whitespace-only)
1715
+ title_update: Optional[str] = None
1716
+ if title is not None:
1717
+ title_stripped = title.strip()
1718
+ if not title_stripped:
1719
+ return None, "Title cannot be empty or whitespace-only"
1720
+ title_update = title_stripped
1721
+
1722
+ # Collect all provided metadata fields
1187
1723
  updates: Dict[str, Any] = {}
1188
1724
  if file_path is not None:
1189
1725
  updates["file_path"] = file_path.strip() if file_path else None
1190
1726
  if description is not None:
1191
1727
  updates["description"] = description.strip() if description else None
1728
+ if acceptance_criteria is not None:
1729
+ updates["acceptance_criteria"] = acceptance_criteria
1192
1730
  if task_category is not None:
1193
1731
  updates["task_category"] = task_category
1194
1732
  if actual_hours is not None:
@@ -1200,9 +1738,9 @@ def update_task_metadata(
1200
1738
  if command is not None:
1201
1739
  updates["command"] = command.strip() if command else None
1202
1740
 
1203
- # Validate at least one field is provided
1204
- if not updates and not custom_metadata:
1205
- return None, "At least one metadata field must be provided"
1741
+ # Validate at least one field is provided (title or metadata fields)
1742
+ if title_update is None and not updates and not custom_metadata:
1743
+ return None, "At least one field must be provided (title or metadata fields)"
1206
1744
 
1207
1745
  # Validate actual_hours
1208
1746
  if actual_hours is not None:
@@ -1211,6 +1749,16 @@ def update_task_metadata(
1211
1749
  if actual_hours < 0:
1212
1750
  return None, "actual_hours must be >= 0"
1213
1751
 
1752
+ if acceptance_criteria is not None:
1753
+ if not isinstance(acceptance_criteria, list):
1754
+ return None, "acceptance_criteria must be a list of strings"
1755
+ cleaned_criteria = []
1756
+ for item in acceptance_criteria:
1757
+ if not isinstance(item, str) or not item.strip():
1758
+ return None, "acceptance_criteria must be a list of non-empty strings"
1759
+ cleaned_criteria.append(item.strip())
1760
+ updates["acceptance_criteria"] = cleaned_criteria
1761
+
1214
1762
  # Validate task_category
1215
1763
  if task_category is not None:
1216
1764
  task_category_lower = task_category.lower().strip()
@@ -1259,12 +1807,20 @@ def update_task_metadata(
1259
1807
  metadata = {}
1260
1808
  task["metadata"] = metadata
1261
1809
 
1262
- # Track which fields were updated
1810
+ # Track which fields were updated and their previous values
1263
1811
  fields_updated = []
1812
+ previous_values: Dict[str, Any] = {}
1264
1813
 
1265
- # Apply updates
1814
+ # Apply title update (core field on task, not metadata)
1815
+ if title_update is not None:
1816
+ previous_values["title"] = task.get("title")
1817
+ task["title"] = title_update
1818
+ fields_updated.append("title")
1819
+
1820
+ # Apply metadata updates
1266
1821
  for key, value in updates.items():
1267
1822
  if value is not None or key in metadata:
1823
+ previous_values[key] = metadata.get(key)
1268
1824
  metadata[key] = value
1269
1825
  fields_updated.append(key)
1270
1826
 
@@ -1273,17 +1829,635 @@ def update_task_metadata(
1273
1829
  for key, value in custom_metadata.items():
1274
1830
  # Don't allow overwriting core fields via custom_metadata
1275
1831
  if key not in ("type", "title", "status", "parent", "children", "dependencies"):
1832
+ if key not in previous_values:
1833
+ previous_values[key] = metadata.get(key)
1276
1834
  metadata[key] = value
1277
1835
  if key not in fields_updated:
1278
1836
  fields_updated.append(key)
1279
1837
 
1838
+ # Build result
1839
+ result = {
1840
+ "spec_id": spec_id,
1841
+ "task_id": task_id,
1842
+ "fields_updated": fields_updated,
1843
+ "previous_values": previous_values,
1844
+ "dry_run": dry_run,
1845
+ }
1846
+
1847
+ # Save the spec (unless dry_run)
1848
+ if dry_run:
1849
+ result["message"] = "Dry run - changes not saved"
1850
+ else:
1851
+ success = save_spec(spec_id, spec_data, specs_dir)
1852
+ if not success:
1853
+ return None, "Failed to save specification"
1854
+
1855
+ return result, None
1856
+
1857
+
1858
+ def move_task(
1859
+ spec_id: str,
1860
+ task_id: str,
1861
+ new_parent: Optional[str] = None,
1862
+ position: Optional[int] = None,
1863
+ dry_run: bool = False,
1864
+ specs_dir: Optional[Path] = None,
1865
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str], List[str]]:
1866
+ """
1867
+ Move a task to a new position within its parent or to a different parent.
1868
+
1869
+ Supports two modes:
1870
+ 1. Reorder within parent: only specify position (new_parent=None)
1871
+ 2. Reparent to different phase/task: specify new_parent, optionally position
1872
+
1873
+ Updates task counts on affected parents. Prevents circular references.
1874
+ Emits warnings for cross-phase moves that might affect dependencies.
1875
+
1876
+ Args:
1877
+ spec_id: Specification ID containing the task.
1878
+ task_id: Task ID to move.
1879
+ new_parent: Optional new parent ID (phase or task). If None, reorders
1880
+ within current parent.
1881
+ position: Optional position in parent's children list (1-based).
1882
+ If None, appends to end.
1883
+ dry_run: If True, validate and return preview without saving changes.
1884
+ specs_dir: Path to specs directory (auto-detected if not provided).
1885
+
1886
+ Returns:
1887
+ Tuple of (result_dict, error_message, warnings_list).
1888
+ On success: ({"task_id": ..., "old_parent": ..., "new_parent": ..., ...}, None, [warnings])
1889
+ On failure: (None, "error message", [])
1890
+ """
1891
+ # Find specs directory
1892
+ if specs_dir is None:
1893
+ specs_dir = find_specs_directory()
1894
+
1895
+ if specs_dir is None:
1896
+ return None, "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR.", []
1897
+
1898
+ # Find and load the spec
1899
+ spec_path = find_spec_file(spec_id, specs_dir)
1900
+ if spec_path is None:
1901
+ return None, f"Specification '{spec_id}' not found", []
1902
+
1903
+ spec_data = load_spec(spec_id, specs_dir)
1904
+ if spec_data is None:
1905
+ return None, f"Failed to load specification '{spec_id}'", []
1906
+
1907
+ hierarchy = spec_data.get("hierarchy", {})
1908
+
1909
+ # Validate task exists
1910
+ task = hierarchy.get(task_id)
1911
+ if task is None:
1912
+ return None, f"Task '{task_id}' not found", []
1913
+
1914
+ # Validate task type (can only move task, subtask, verify)
1915
+ task_type = task.get("type")
1916
+ if task_type not in ("task", "subtask", "verify"):
1917
+ return None, f"Cannot move node type '{task_type}'. Only task, subtask, or verify nodes can be moved.", []
1918
+
1919
+ old_parent_id = task.get("parent")
1920
+ if not old_parent_id:
1921
+ return None, f"Task '{task_id}' has no parent and cannot be moved", []
1922
+
1923
+ old_parent = hierarchy.get(old_parent_id)
1924
+ if not old_parent:
1925
+ return None, f"Task's current parent '{old_parent_id}' not found", []
1926
+
1927
+ # Determine effective new parent
1928
+ effective_new_parent_id = new_parent if new_parent else old_parent_id
1929
+ is_reparenting = effective_new_parent_id != old_parent_id
1930
+
1931
+ # Validate new parent exists
1932
+ new_parent_node = hierarchy.get(effective_new_parent_id)
1933
+ if new_parent_node is None:
1934
+ return None, f"Target parent '{effective_new_parent_id}' not found", []
1935
+
1936
+ # Validate new parent type (can add tasks to phases, groups, or tasks)
1937
+ new_parent_type = new_parent_node.get("type")
1938
+ if new_parent_type not in ("phase", "group", "task"):
1939
+ return None, f"Cannot move to node type '{new_parent_type}'. Target must be a phase, group, or task.", []
1940
+
1941
+ # Prevent self-reference
1942
+ if task_id == effective_new_parent_id:
1943
+ return None, f"Task '{task_id}' cannot be moved to itself", []
1944
+
1945
+ # Prevent circular reference (can't move a task to one of its descendants)
1946
+ if _is_descendant(hierarchy, task_id, effective_new_parent_id):
1947
+ return None, f"Cannot move '{task_id}' to '{effective_new_parent_id}': would create circular reference", []
1948
+
1949
+ # Get current children lists
1950
+ old_children = old_parent.get("children", [])
1951
+ if not isinstance(old_children, list):
1952
+ old_children = []
1953
+
1954
+ new_children = new_parent_node.get("children", []) if is_reparenting else old_children.copy()
1955
+ if not isinstance(new_children, list):
1956
+ new_children = []
1957
+
1958
+ # Validate position
1959
+ # Remove task from old position first to calculate valid range
1960
+ old_position = None
1961
+ if task_id in old_children:
1962
+ old_position = old_children.index(task_id)
1963
+
1964
+ # For position validation, consider the list after removal
1965
+ max_position = len(new_children) if is_reparenting else len(new_children) - 1
1966
+ if position is not None:
1967
+ # Convert to 0-based for internal use (user provides 1-based)
1968
+ position_0based = position - 1
1969
+ if position_0based < 0 or position_0based > max_position:
1970
+ return None, f"Invalid position {position}. Must be 1-{max_position + 1}", []
1971
+ else:
1972
+ # Default: append to end
1973
+ position_0based = max_position
1974
+
1975
+ # Check for cross-phase dependency warnings
1976
+ warnings: List[str] = []
1977
+ if is_reparenting:
1978
+ old_phase = _get_phase_for_node(hierarchy, task_id)
1979
+ new_phase = _get_phase_for_node(hierarchy, effective_new_parent_id)
1980
+ if new_phase != old_phase:
1981
+ warnings = _check_cross_phase_dependencies(hierarchy, task_id, old_phase, new_phase)
1982
+
1983
+ # Calculate task counts for the subtree being moved (including the task itself)
1984
+ descendants = _collect_descendants(hierarchy, task_id)
1985
+ all_moved_nodes = [task_id] + descendants
1986
+ total_moved, completed_moved = _count_tasks_in_subtree(hierarchy, all_moved_nodes)
1987
+
1988
+ # Build result for dry run or actual move
1989
+ result: Dict[str, Any] = {
1990
+ "spec_id": spec_id,
1991
+ "task_id": task_id,
1992
+ "old_parent": old_parent_id,
1993
+ "new_parent": effective_new_parent_id,
1994
+ "old_position": old_position + 1 if old_position is not None else None, # 1-based for output
1995
+ "new_position": position_0based + 1, # 1-based for output
1996
+ "is_reparenting": is_reparenting,
1997
+ "tasks_in_subtree": total_moved,
1998
+ "dry_run": dry_run,
1999
+ }
2000
+
2001
+ if dry_run:
2002
+ result["message"] = "Dry run - changes not saved"
2003
+ if warnings:
2004
+ result["dependency_warnings"] = warnings
2005
+ return result, None, warnings
2006
+
2007
+ # Perform the move
2008
+
2009
+ # 1. Remove from old parent's children list
2010
+ if task_id in old_children:
2011
+ old_children.remove(task_id)
2012
+ old_parent["children"] = old_children
2013
+
2014
+ # 2. Add to new parent's children list at specified position
2015
+ if is_reparenting:
2016
+ # Fresh list from new parent
2017
+ new_children = new_parent_node.get("children", [])
2018
+ if not isinstance(new_children, list):
2019
+ new_children = []
2020
+ else:
2021
+ # Same parent, already removed
2022
+ new_children = old_children
2023
+
2024
+ # Insert at position
2025
+ if position_0based >= len(new_children):
2026
+ new_children.append(task_id)
2027
+ else:
2028
+ new_children.insert(position_0based, task_id)
2029
+
2030
+ if is_reparenting:
2031
+ new_parent_node["children"] = new_children
2032
+ else:
2033
+ old_parent["children"] = new_children
2034
+
2035
+ # 3. Update task's parent reference
2036
+ if is_reparenting:
2037
+ task["parent"] = effective_new_parent_id
2038
+
2039
+ # 4. Update ancestor task counts
2040
+ # Decrement old parent's ancestors
2041
+ _decrement_ancestor_counts(hierarchy, old_parent_id, total_moved, completed_moved)
2042
+ # Increment new parent's ancestors
2043
+ _update_ancestor_counts(hierarchy, effective_new_parent_id, delta=total_moved)
2044
+ # Update completed counts for new ancestors
2045
+ if completed_moved > 0:
2046
+ current_id = effective_new_parent_id
2047
+ visited = set()
2048
+ while current_id:
2049
+ if current_id in visited:
2050
+ break
2051
+ visited.add(current_id)
2052
+ node = hierarchy.get(current_id)
2053
+ if not node:
2054
+ break
2055
+ current_completed = node.get("completed_tasks", 0)
2056
+ node["completed_tasks"] = current_completed + completed_moved
2057
+ current_id = node.get("parent")
2058
+
1280
2059
  # Save the spec
1281
2060
  success = save_spec(spec_id, spec_data, specs_dir)
1282
2061
  if not success:
1283
- return None, "Failed to save specification"
2062
+ return None, "Failed to save specification", []
1284
2063
 
1285
- return {
1286
- "spec_id": spec_id,
1287
- "task_id": task_id,
1288
- "fields_updated": fields_updated,
1289
- }, None
2064
+ if warnings:
2065
+ result["dependency_warnings"] = warnings
2066
+
2067
+ return result, None, warnings
2068
+
2069
+
2070
+ def _generate_requirement_id(existing_requirements: List[Dict[str, Any]]) -> str:
2071
+ """
2072
+ Generate a unique requirement ID based on existing requirements.
2073
+
2074
+ Args:
2075
+ existing_requirements: List of existing requirement dictionaries
2076
+
2077
+ Returns:
2078
+ New requirement ID string (e.g., "req-1", "req-2")
2079
+ """
2080
+ if not existing_requirements:
2081
+ return "req-1"
2082
+
2083
+ max_index = 0
2084
+ pattern = re.compile(r"^req-(\d+)$")
2085
+
2086
+ for req in existing_requirements:
2087
+ req_id = req.get("id", "")
2088
+ match = pattern.match(req_id)
2089
+ if match:
2090
+ index = int(match.group(1))
2091
+ max_index = max(max_index, index)
2092
+
2093
+ return f"req-{max_index + 1}"
2094
+
2095
+
2096
+ def update_task_requirements(
2097
+ spec_id: str,
2098
+ task_id: str,
2099
+ action: str = "add",
2100
+ requirement_type: Optional[str] = None,
2101
+ text: Optional[str] = None,
2102
+ requirement_id: Optional[str] = None,
2103
+ dry_run: bool = False,
2104
+ specs_dir: Optional[Path] = None,
2105
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
2106
+ """
2107
+ Add or remove a structured requirement from a task's metadata.
2108
+
2109
+ Requirements are stored in metadata.requirements as a list of objects:
2110
+ [{"id": "req-1", "type": "acceptance", "text": "..."}, ...]
2111
+
2112
+ Each requirement has:
2113
+ - id: Auto-generated unique ID (e.g., "req-1", "req-2")
2114
+ - type: Requirement type (acceptance, technical, constraint)
2115
+ - text: Requirement description text
2116
+
2117
+ Args:
2118
+ spec_id: Specification ID containing the task.
2119
+ task_id: Task ID to update.
2120
+ action: Action to perform ("add" or "remove"). Default: "add".
2121
+ requirement_type: Requirement type (required for add). One of:
2122
+ acceptance, technical, constraint.
2123
+ text: Requirement text (required for add).
2124
+ requirement_id: Requirement ID to remove (required for remove action).
2125
+ dry_run: If True, validate and return preview without saving changes.
2126
+ specs_dir: Path to specs directory (auto-detected if not provided).
2127
+
2128
+ Returns:
2129
+ Tuple of (result_dict, error_message).
2130
+ On success: ({"task_id": ..., "action": ..., "requirement": {...}, ...}, None)
2131
+ On failure: (None, "error message")
2132
+ """
2133
+ # Validate action
2134
+ if action not in ("add", "remove"):
2135
+ return None, f"Invalid action '{action}'. Must be 'add' or 'remove'"
2136
+
2137
+ # Validate parameters based on action
2138
+ if action == "add":
2139
+ if requirement_type is None:
2140
+ return None, "requirement_type is required for add action"
2141
+ if not isinstance(requirement_type, str):
2142
+ return None, "requirement_type must be a string"
2143
+ requirement_type = requirement_type.lower().strip()
2144
+ if requirement_type not in REQUIREMENT_TYPES:
2145
+ return None, f"Invalid requirement_type '{requirement_type}'. Must be one of: {', '.join(REQUIREMENT_TYPES)}"
2146
+
2147
+ if text is None:
2148
+ return None, "text is required for add action"
2149
+ if not isinstance(text, str) or not text.strip():
2150
+ return None, "text must be a non-empty string"
2151
+ text = text.strip()
2152
+
2153
+ elif action == "remove":
2154
+ if requirement_id is None:
2155
+ return None, "requirement_id is required for remove action"
2156
+ if not isinstance(requirement_id, str) or not requirement_id.strip():
2157
+ return None, "requirement_id must be a non-empty string"
2158
+ requirement_id = requirement_id.strip()
2159
+
2160
+ # Find specs directory
2161
+ if specs_dir is None:
2162
+ specs_dir = find_specs_directory()
2163
+
2164
+ if specs_dir is None:
2165
+ return None, "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR."
2166
+
2167
+ # Find and load the spec
2168
+ spec_path = find_spec_file(spec_id, specs_dir)
2169
+ if spec_path is None:
2170
+ return None, f"Specification '{spec_id}' not found"
2171
+
2172
+ spec_data = load_spec(spec_id, specs_dir)
2173
+ if spec_data is None:
2174
+ return None, f"Failed to load specification '{spec_id}'"
2175
+
2176
+ hierarchy = spec_data.get("hierarchy", {})
2177
+
2178
+ # Validate task exists
2179
+ task = hierarchy.get(task_id)
2180
+ if task is None:
2181
+ return None, f"Task '{task_id}' not found"
2182
+
2183
+ # Validate task type (can only update task, subtask, verify)
2184
+ task_type = task.get("type")
2185
+ if task_type not in ("task", "subtask", "verify"):
2186
+ return None, f"Cannot update requirements for node type '{task_type}'. Only task, subtask, or verify nodes can be updated."
2187
+
2188
+ # Get or create metadata
2189
+ metadata = task.get("metadata")
2190
+ if metadata is None:
2191
+ metadata = {}
2192
+ task["metadata"] = metadata
2193
+
2194
+ # Get or create requirements list
2195
+ requirements = metadata.get("requirements")
2196
+ if requirements is None:
2197
+ requirements = []
2198
+ metadata["requirements"] = requirements
2199
+ elif not isinstance(requirements, list):
2200
+ requirements = []
2201
+ metadata["requirements"] = requirements
2202
+
2203
+ # Perform the action
2204
+ if action == "add":
2205
+ # Check limit
2206
+ if len(requirements) >= MAX_REQUIREMENTS_PER_TASK:
2207
+ return None, f"Cannot add requirement: task already has {MAX_REQUIREMENTS_PER_TASK} requirements (maximum)"
2208
+
2209
+ # Generate new requirement ID
2210
+ new_id = _generate_requirement_id(requirements)
2211
+
2212
+ # Create requirement object
2213
+ new_requirement = {
2214
+ "id": new_id,
2215
+ "type": requirement_type,
2216
+ "text": text,
2217
+ }
2218
+
2219
+ # Add to list
2220
+ requirements.append(new_requirement)
2221
+
2222
+ result = {
2223
+ "spec_id": spec_id,
2224
+ "task_id": task_id,
2225
+ "action": "add",
2226
+ "requirement": new_requirement,
2227
+ "total_requirements": len(requirements),
2228
+ "dry_run": dry_run,
2229
+ }
2230
+
2231
+ elif action == "remove":
2232
+ # Find requirement by ID
2233
+ found_index = None
2234
+ removed_requirement = None
2235
+ for i, req in enumerate(requirements):
2236
+ if req.get("id") == requirement_id:
2237
+ found_index = i
2238
+ removed_requirement = req
2239
+ break
2240
+
2241
+ if found_index is None:
2242
+ return None, f"Requirement '{requirement_id}' not found in task '{task_id}'"
2243
+
2244
+ # Remove from list
2245
+ requirements.pop(found_index)
2246
+
2247
+ result = {
2248
+ "spec_id": spec_id,
2249
+ "task_id": task_id,
2250
+ "action": "remove",
2251
+ "requirement": removed_requirement,
2252
+ "total_requirements": len(requirements),
2253
+ "dry_run": dry_run,
2254
+ }
2255
+
2256
+ # Save the spec (unless dry_run)
2257
+ if dry_run:
2258
+ result["message"] = "Dry run - changes not saved"
2259
+ else:
2260
+ success = save_spec(spec_id, spec_data, specs_dir)
2261
+ if not success:
2262
+ return None, "Failed to save specification"
2263
+
2264
+ return result, None
2265
+
2266
+
2267
+ # Valid statuses for batch filtering
2268
+ BATCH_ALLOWED_STATUSES = {"pending", "in_progress", "completed", "blocked"}
2269
+
2270
+ # Safety constraints for batch operations
2271
+ MAX_PATTERN_LENGTH = 256
2272
+ DEFAULT_MAX_MATCHES = 100
2273
+
2274
+
2275
+ def _match_tasks_for_batch(
2276
+ hierarchy: Dict[str, Any],
2277
+ *,
2278
+ status_filter: Optional[str] = None,
2279
+ parent_filter: Optional[str] = None,
2280
+ pattern: Optional[str] = None,
2281
+ ) -> List[str]:
2282
+ """Find tasks matching filter criteria (AND logic). Returns sorted task IDs."""
2283
+ compiled_pattern = None
2284
+ if pattern:
2285
+ try:
2286
+ compiled_pattern = re.compile(pattern, re.IGNORECASE)
2287
+ except re.error:
2288
+ return []
2289
+
2290
+ matched = []
2291
+ target_types = {"task", "subtask", "verify"}
2292
+
2293
+ valid_descendants: Optional[set] = None
2294
+ if parent_filter:
2295
+ parent_node = hierarchy.get(parent_filter)
2296
+ if not parent_node:
2297
+ return []
2298
+ valid_descendants = set()
2299
+ to_visit = list(parent_node.get("children", []))
2300
+ while to_visit:
2301
+ child_id = to_visit.pop()
2302
+ if child_id in valid_descendants:
2303
+ continue
2304
+ valid_descendants.add(child_id)
2305
+ child_node = hierarchy.get(child_id)
2306
+ if child_node:
2307
+ to_visit.extend(child_node.get("children", []))
2308
+
2309
+ for node_id, node_data in hierarchy.items():
2310
+ if node_data.get("type") not in target_types:
2311
+ continue
2312
+ if status_filter and node_data.get("status") != status_filter:
2313
+ continue
2314
+ if valid_descendants is not None and node_id not in valid_descendants:
2315
+ continue
2316
+ if compiled_pattern:
2317
+ title = node_data.get("title", "")
2318
+ if not (compiled_pattern.search(title) or compiled_pattern.search(node_id)):
2319
+ continue
2320
+ matched.append(node_id)
2321
+
2322
+ return sorted(matched)
2323
+
2324
+
2325
+ def batch_update_tasks(
2326
+ spec_id: str,
2327
+ *,
2328
+ status_filter: Optional[str] = None,
2329
+ parent_filter: Optional[str] = None,
2330
+ pattern: Optional[str] = None,
2331
+ description: Optional[str] = None,
2332
+ file_path: Optional[str] = None,
2333
+ estimated_hours: Optional[float] = None,
2334
+ category: Optional[str] = None,
2335
+ labels: Optional[Dict[str, str]] = None,
2336
+ owners: Optional[List[str]] = None,
2337
+ custom_metadata: Optional[Dict[str, Any]] = None,
2338
+ dry_run: bool = False,
2339
+ max_matches: int = DEFAULT_MAX_MATCHES,
2340
+ specs_dir: Optional[Path] = None,
2341
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
2342
+ """Batch update metadata across tasks matching filters (AND logic)."""
2343
+ # Validate filters
2344
+ if not any([status_filter, parent_filter, pattern]):
2345
+ return None, "At least one filter must be provided: status_filter, parent_filter, or pattern"
2346
+ if status_filter and status_filter not in BATCH_ALLOWED_STATUSES:
2347
+ return None, f"Invalid status_filter '{status_filter}'. Must be one of: {sorted(BATCH_ALLOWED_STATUSES)}"
2348
+ if pattern:
2349
+ if not isinstance(pattern, str) or not pattern.strip():
2350
+ return None, "pattern must be a non-empty string"
2351
+ pattern = pattern.strip()
2352
+ if len(pattern) > MAX_PATTERN_LENGTH:
2353
+ return None, f"pattern exceeds maximum length of {MAX_PATTERN_LENGTH} characters"
2354
+ try:
2355
+ re.compile(pattern)
2356
+ except re.error as e:
2357
+ return None, f"Invalid regex pattern: {e}"
2358
+ if parent_filter:
2359
+ if not isinstance(parent_filter, str) or not parent_filter.strip():
2360
+ return None, "parent_filter must be a non-empty string"
2361
+ parent_filter = parent_filter.strip()
2362
+
2363
+ # Collect metadata updates
2364
+ metadata_updates: Dict[str, Any] = {}
2365
+ if description is not None:
2366
+ metadata_updates["description"] = description.strip() if description else None
2367
+ if file_path is not None:
2368
+ metadata_updates["file_path"] = file_path.strip() if file_path else None
2369
+ if estimated_hours is not None:
2370
+ if not isinstance(estimated_hours, (int, float)) or estimated_hours < 0:
2371
+ return None, "estimated_hours must be a non-negative number"
2372
+ metadata_updates["estimated_hours"] = float(estimated_hours)
2373
+ if category is not None:
2374
+ metadata_updates["category"] = category.strip() if category else None
2375
+ if labels is not None:
2376
+ if not isinstance(labels, dict) or not all(isinstance(k, str) and isinstance(v, str) for k, v in labels.items()):
2377
+ return None, "labels must be a dict with string keys and values"
2378
+ metadata_updates["labels"] = labels
2379
+ if owners is not None:
2380
+ if not isinstance(owners, list) or not all(isinstance(o, str) for o in owners):
2381
+ return None, "owners must be a list of strings"
2382
+ metadata_updates["owners"] = owners
2383
+ if custom_metadata:
2384
+ if not isinstance(custom_metadata, dict):
2385
+ return None, "custom_metadata must be a dict"
2386
+ for key, value in custom_metadata.items():
2387
+ if key not in metadata_updates:
2388
+ metadata_updates[key] = value
2389
+
2390
+ if not metadata_updates:
2391
+ return None, "At least one metadata field must be provided"
2392
+ if max_matches <= 0:
2393
+ return None, "max_matches must be a positive integer"
2394
+
2395
+ # Load spec
2396
+ if specs_dir is None:
2397
+ specs_dir = find_specs_directory()
2398
+ if specs_dir is None:
2399
+ return None, "No specs directory found"
2400
+ spec_path = find_spec_file(spec_id, specs_dir)
2401
+ if not spec_path:
2402
+ return None, f"Specification '{spec_id}' not found"
2403
+ spec_data = load_spec(spec_id, specs_dir)
2404
+ if not spec_data:
2405
+ return None, f"Failed to load specification '{spec_id}'"
2406
+
2407
+ hierarchy = spec_data.get("hierarchy", {})
2408
+ if parent_filter and parent_filter not in hierarchy:
2409
+ return None, f"Parent '{parent_filter}' not found in specification"
2410
+
2411
+ matched_ids = _match_tasks_for_batch(hierarchy, status_filter=status_filter, parent_filter=parent_filter, pattern=pattern)
2412
+ warnings: List[str] = []
2413
+ skipped_ids = []
2414
+ if len(matched_ids) > max_matches:
2415
+ warnings.append(f"Found {len(matched_ids)} matches, limiting to {max_matches}")
2416
+ skipped_ids = matched_ids[max_matches:]
2417
+ matched_ids = matched_ids[:max_matches]
2418
+
2419
+ if not matched_ids:
2420
+ return {"spec_id": spec_id, "matched_count": 0, "updated_count": 0, "skipped_count": len(skipped_ids),
2421
+ "nodes": [], "filters": {"status_filter": status_filter, "parent_filter": parent_filter, "pattern": pattern},
2422
+ "metadata_applied": metadata_updates, "dry_run": dry_run, "message": "No tasks matched"}, None
2423
+
2424
+ # Capture originals and build result
2425
+ original_metadata: Dict[str, Dict[str, Any]] = {}
2426
+ updated_nodes: List[Dict[str, Any]] = []
2427
+ for node_id in matched_ids:
2428
+ node = hierarchy.get(node_id, {})
2429
+ existing_meta = node.get("metadata", {}) or {}
2430
+ original_metadata[node_id] = {k: existing_meta.get(k) for k in metadata_updates}
2431
+ diff = {k: {"old": original_metadata[node_id].get(k), "new": v} for k, v in metadata_updates.items() if original_metadata[node_id].get(k) != v}
2432
+ updated_nodes.append({"node_id": node_id, "title": node.get("title", ""), "type": node.get("type", ""),
2433
+ "status": node.get("status", ""), "fields_updated": list(metadata_updates.keys()), "diff": diff} if diff else
2434
+ {"node_id": node_id, "title": node.get("title", ""), "type": node.get("type", ""),
2435
+ "status": node.get("status", ""), "fields_updated": list(metadata_updates.keys())})
2436
+ if not dry_run:
2437
+ if "metadata" not in node:
2438
+ node["metadata"] = {}
2439
+ node["metadata"].update(metadata_updates)
2440
+
2441
+ if not dry_run:
2442
+ if not save_spec(spec_id, spec_data, specs_dir):
2443
+ for nid, orig in original_metadata.items():
2444
+ n = hierarchy.get(nid, {})
2445
+ if "metadata" in n:
2446
+ for k, v in orig.items():
2447
+ if v is None:
2448
+ n["metadata"].pop(k, None)
2449
+ else:
2450
+ n["metadata"][k] = v
2451
+ return None, "Failed to save; changes rolled back"
2452
+
2453
+ if len(matched_ids) > 50:
2454
+ warnings.append(f"Updated {len(matched_ids)} tasks")
2455
+
2456
+ result = {"spec_id": spec_id, "matched_count": len(matched_ids), "updated_count": len(matched_ids) if not dry_run else 0,
2457
+ "skipped_count": len(skipped_ids), "nodes": updated_nodes, "filters": {"status_filter": status_filter, "parent_filter": parent_filter, "pattern": pattern},
2458
+ "metadata_applied": metadata_updates, "dry_run": dry_run}
2459
+ if warnings:
2460
+ result["warnings"] = warnings
2461
+ if skipped_ids:
2462
+ result["skipped_tasks"] = skipped_ids
2463
+ return result, None