foundry-mcp 0.3.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (135) hide show
  1. foundry_mcp/__init__.py +7 -0
  2. foundry_mcp/cli/__init__.py +80 -0
  3. foundry_mcp/cli/__main__.py +9 -0
  4. foundry_mcp/cli/agent.py +96 -0
  5. foundry_mcp/cli/commands/__init__.py +37 -0
  6. foundry_mcp/cli/commands/cache.py +137 -0
  7. foundry_mcp/cli/commands/dashboard.py +148 -0
  8. foundry_mcp/cli/commands/dev.py +446 -0
  9. foundry_mcp/cli/commands/journal.py +377 -0
  10. foundry_mcp/cli/commands/lifecycle.py +274 -0
  11. foundry_mcp/cli/commands/modify.py +824 -0
  12. foundry_mcp/cli/commands/plan.py +633 -0
  13. foundry_mcp/cli/commands/pr.py +393 -0
  14. foundry_mcp/cli/commands/review.py +652 -0
  15. foundry_mcp/cli/commands/session.py +479 -0
  16. foundry_mcp/cli/commands/specs.py +856 -0
  17. foundry_mcp/cli/commands/tasks.py +807 -0
  18. foundry_mcp/cli/commands/testing.py +676 -0
  19. foundry_mcp/cli/commands/validate.py +982 -0
  20. foundry_mcp/cli/config.py +98 -0
  21. foundry_mcp/cli/context.py +259 -0
  22. foundry_mcp/cli/flags.py +266 -0
  23. foundry_mcp/cli/logging.py +212 -0
  24. foundry_mcp/cli/main.py +44 -0
  25. foundry_mcp/cli/output.py +122 -0
  26. foundry_mcp/cli/registry.py +110 -0
  27. foundry_mcp/cli/resilience.py +178 -0
  28. foundry_mcp/cli/transcript.py +217 -0
  29. foundry_mcp/config.py +850 -0
  30. foundry_mcp/core/__init__.py +144 -0
  31. foundry_mcp/core/ai_consultation.py +1636 -0
  32. foundry_mcp/core/cache.py +195 -0
  33. foundry_mcp/core/capabilities.py +446 -0
  34. foundry_mcp/core/concurrency.py +898 -0
  35. foundry_mcp/core/context.py +540 -0
  36. foundry_mcp/core/discovery.py +1603 -0
  37. foundry_mcp/core/error_collection.py +728 -0
  38. foundry_mcp/core/error_store.py +592 -0
  39. foundry_mcp/core/feature_flags.py +592 -0
  40. foundry_mcp/core/health.py +749 -0
  41. foundry_mcp/core/journal.py +694 -0
  42. foundry_mcp/core/lifecycle.py +412 -0
  43. foundry_mcp/core/llm_config.py +1350 -0
  44. foundry_mcp/core/llm_patterns.py +510 -0
  45. foundry_mcp/core/llm_provider.py +1569 -0
  46. foundry_mcp/core/logging_config.py +374 -0
  47. foundry_mcp/core/metrics_persistence.py +584 -0
  48. foundry_mcp/core/metrics_registry.py +327 -0
  49. foundry_mcp/core/metrics_store.py +641 -0
  50. foundry_mcp/core/modifications.py +224 -0
  51. foundry_mcp/core/naming.py +123 -0
  52. foundry_mcp/core/observability.py +1216 -0
  53. foundry_mcp/core/otel.py +452 -0
  54. foundry_mcp/core/otel_stubs.py +264 -0
  55. foundry_mcp/core/pagination.py +255 -0
  56. foundry_mcp/core/progress.py +317 -0
  57. foundry_mcp/core/prometheus.py +577 -0
  58. foundry_mcp/core/prompts/__init__.py +464 -0
  59. foundry_mcp/core/prompts/fidelity_review.py +546 -0
  60. foundry_mcp/core/prompts/markdown_plan_review.py +511 -0
  61. foundry_mcp/core/prompts/plan_review.py +623 -0
  62. foundry_mcp/core/providers/__init__.py +225 -0
  63. foundry_mcp/core/providers/base.py +476 -0
  64. foundry_mcp/core/providers/claude.py +460 -0
  65. foundry_mcp/core/providers/codex.py +619 -0
  66. foundry_mcp/core/providers/cursor_agent.py +642 -0
  67. foundry_mcp/core/providers/detectors.py +488 -0
  68. foundry_mcp/core/providers/gemini.py +405 -0
  69. foundry_mcp/core/providers/opencode.py +616 -0
  70. foundry_mcp/core/providers/opencode_wrapper.js +302 -0
  71. foundry_mcp/core/providers/package-lock.json +24 -0
  72. foundry_mcp/core/providers/package.json +25 -0
  73. foundry_mcp/core/providers/registry.py +607 -0
  74. foundry_mcp/core/providers/test_provider.py +171 -0
  75. foundry_mcp/core/providers/validation.py +729 -0
  76. foundry_mcp/core/rate_limit.py +427 -0
  77. foundry_mcp/core/resilience.py +600 -0
  78. foundry_mcp/core/responses.py +934 -0
  79. foundry_mcp/core/review.py +366 -0
  80. foundry_mcp/core/security.py +438 -0
  81. foundry_mcp/core/spec.py +1650 -0
  82. foundry_mcp/core/task.py +1289 -0
  83. foundry_mcp/core/testing.py +450 -0
  84. foundry_mcp/core/validation.py +2081 -0
  85. foundry_mcp/dashboard/__init__.py +32 -0
  86. foundry_mcp/dashboard/app.py +119 -0
  87. foundry_mcp/dashboard/components/__init__.py +17 -0
  88. foundry_mcp/dashboard/components/cards.py +88 -0
  89. foundry_mcp/dashboard/components/charts.py +234 -0
  90. foundry_mcp/dashboard/components/filters.py +136 -0
  91. foundry_mcp/dashboard/components/tables.py +195 -0
  92. foundry_mcp/dashboard/data/__init__.py +11 -0
  93. foundry_mcp/dashboard/data/stores.py +433 -0
  94. foundry_mcp/dashboard/launcher.py +289 -0
  95. foundry_mcp/dashboard/views/__init__.py +12 -0
  96. foundry_mcp/dashboard/views/errors.py +217 -0
  97. foundry_mcp/dashboard/views/metrics.py +174 -0
  98. foundry_mcp/dashboard/views/overview.py +160 -0
  99. foundry_mcp/dashboard/views/providers.py +83 -0
  100. foundry_mcp/dashboard/views/sdd_workflow.py +255 -0
  101. foundry_mcp/dashboard/views/tool_usage.py +139 -0
  102. foundry_mcp/prompts/__init__.py +9 -0
  103. foundry_mcp/prompts/workflows.py +525 -0
  104. foundry_mcp/resources/__init__.py +9 -0
  105. foundry_mcp/resources/specs.py +591 -0
  106. foundry_mcp/schemas/__init__.py +38 -0
  107. foundry_mcp/schemas/sdd-spec-schema.json +386 -0
  108. foundry_mcp/server.py +164 -0
  109. foundry_mcp/tools/__init__.py +10 -0
  110. foundry_mcp/tools/unified/__init__.py +71 -0
  111. foundry_mcp/tools/unified/authoring.py +1487 -0
  112. foundry_mcp/tools/unified/context_helpers.py +98 -0
  113. foundry_mcp/tools/unified/documentation_helpers.py +198 -0
  114. foundry_mcp/tools/unified/environment.py +939 -0
  115. foundry_mcp/tools/unified/error.py +462 -0
  116. foundry_mcp/tools/unified/health.py +225 -0
  117. foundry_mcp/tools/unified/journal.py +841 -0
  118. foundry_mcp/tools/unified/lifecycle.py +632 -0
  119. foundry_mcp/tools/unified/metrics.py +777 -0
  120. foundry_mcp/tools/unified/plan.py +745 -0
  121. foundry_mcp/tools/unified/pr.py +294 -0
  122. foundry_mcp/tools/unified/provider.py +629 -0
  123. foundry_mcp/tools/unified/review.py +685 -0
  124. foundry_mcp/tools/unified/review_helpers.py +299 -0
  125. foundry_mcp/tools/unified/router.py +102 -0
  126. foundry_mcp/tools/unified/server.py +580 -0
  127. foundry_mcp/tools/unified/spec.py +808 -0
  128. foundry_mcp/tools/unified/task.py +2202 -0
  129. foundry_mcp/tools/unified/test.py +370 -0
  130. foundry_mcp/tools/unified/verification.py +520 -0
  131. foundry_mcp-0.3.3.dist-info/METADATA +337 -0
  132. foundry_mcp-0.3.3.dist-info/RECORD +135 -0
  133. foundry_mcp-0.3.3.dist-info/WHEEL +4 -0
  134. foundry_mcp-0.3.3.dist-info/entry_points.txt +3 -0
  135. foundry_mcp-0.3.3.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,1289 @@
1
+ """
2
+ Task discovery and dependency operations for SDD workflows.
3
+ Provides finding next tasks, dependency checking, and task preparation.
4
+ """
5
+
6
+ import re
7
+ from dataclasses import asdict
8
+ from pathlib import Path
9
+ from typing import Optional, Dict, Any, Tuple, List
10
+
11
+ from foundry_mcp.core.spec import load_spec, save_spec, find_spec_file, find_specs_directory, get_node
12
+ from foundry_mcp.core.responses import success_response, error_response
13
+
14
+ # Valid task types for add_task
15
+ TASK_TYPES = ("task", "subtask", "verify")
16
+
17
+
18
+ def is_unblocked(spec_data: Dict[str, Any], task_id: str, task_data: Dict[str, Any]) -> bool:
19
+ """
20
+ Check if all blocking dependencies are completed.
21
+
22
+ This checks both task-level dependencies and phase-level dependencies.
23
+ A task is blocked if:
24
+ 1. Any of its direct task dependencies are not completed, OR
25
+ 2. Its parent phase is blocked by an incomplete phase
26
+
27
+ Args:
28
+ spec_data: JSON spec file data
29
+ task_id: Task identifier
30
+ task_data: Task data dictionary
31
+
32
+ Returns:
33
+ True if task has no blockers or all blockers are completed
34
+ """
35
+ hierarchy = spec_data.get("hierarchy", {})
36
+
37
+ # Check task-level dependencies
38
+ blocked_by = task_data.get("dependencies", {}).get("blocked_by", [])
39
+ for blocker_id in blocked_by:
40
+ blocker = hierarchy.get(blocker_id)
41
+ if not blocker or blocker.get("status") != "completed":
42
+ return False
43
+
44
+ # Check phase-level dependencies
45
+ # Walk up to find the parent phase
46
+ parent_phase_id = None
47
+ current = task_data
48
+ while current:
49
+ parent_id = current.get("parent")
50
+ if not parent_id:
51
+ break
52
+ parent = hierarchy.get(parent_id)
53
+ if not parent:
54
+ break
55
+ if parent.get("type") == "phase":
56
+ parent_phase_id = parent_id
57
+ break
58
+ current = parent
59
+
60
+ # If task belongs to a phase, check if that phase is blocked
61
+ if parent_phase_id:
62
+ parent_phase = hierarchy.get(parent_phase_id)
63
+ if parent_phase:
64
+ phase_blocked_by = parent_phase.get("dependencies", {}).get("blocked_by", [])
65
+ for blocker_id in phase_blocked_by:
66
+ blocker = hierarchy.get(blocker_id)
67
+ if not blocker or blocker.get("status") != "completed":
68
+ return False
69
+
70
+ return True
71
+
72
+
73
+ def is_in_current_phase(spec_data: Dict[str, Any], task_id: str, phase_id: str) -> bool:
74
+ """
75
+ Check if task belongs to current phase (including nested groups).
76
+
77
+ Args:
78
+ spec_data: JSON spec file data
79
+ task_id: Task identifier
80
+ phase_id: Phase identifier to check against
81
+
82
+ Returns:
83
+ True if task is within the phase hierarchy
84
+ """
85
+ hierarchy = spec_data.get("hierarchy", {})
86
+ task = hierarchy.get(task_id)
87
+ if not task:
88
+ return False
89
+
90
+ # Walk up parent chain to find phase
91
+ current = task
92
+ while current:
93
+ parent_id = current.get("parent")
94
+ if parent_id == phase_id:
95
+ return True
96
+ if not parent_id:
97
+ return False
98
+ current = hierarchy.get(parent_id)
99
+ return False
100
+
101
+
102
+ def get_next_task(spec_data: Dict[str, Any]) -> Optional[Tuple[str, Dict[str, Any]]]:
103
+ """
104
+ Find the next actionable task.
105
+
106
+ Searches phases in order (in_progress first, then pending).
107
+ Within each phase, finds leaf tasks (no children) before parent tasks.
108
+ Only returns unblocked tasks with pending status.
109
+
110
+ Args:
111
+ spec_data: JSON spec file data
112
+
113
+ Returns:
114
+ Tuple of (task_id, task_data) or None if no task available
115
+ """
116
+ hierarchy = spec_data.get("hierarchy", {})
117
+
118
+ # Get all phases in order
119
+ spec_root = hierarchy.get("spec-root", {})
120
+ phase_order = spec_root.get("children", [])
121
+
122
+ # Build list of phases to check: in_progress first, then pending
123
+ phases_to_check = []
124
+
125
+ # First, add any in_progress phases
126
+ for phase_id in phase_order:
127
+ phase = hierarchy.get(phase_id, {})
128
+ if phase.get("type") == "phase" and phase.get("status") == "in_progress":
129
+ phases_to_check.append(phase_id)
130
+
131
+ # Then add pending phases
132
+ for phase_id in phase_order:
133
+ phase = hierarchy.get(phase_id, {})
134
+ if phase.get("type") == "phase" and phase.get("status") == "pending":
135
+ phases_to_check.append(phase_id)
136
+
137
+ if not phases_to_check:
138
+ return None
139
+
140
+ # Try each phase until we find actionable tasks
141
+ for current_phase in phases_to_check:
142
+ # Find first available task or subtask in current phase
143
+ # Prefer leaf tasks (no children) over parent tasks
144
+ candidates = []
145
+ for key, value in hierarchy.items():
146
+ if (value.get("type") in ["task", "subtask", "verify"] and
147
+ value.get("status") == "pending" and
148
+ is_unblocked(spec_data, key, value) and
149
+ is_in_current_phase(spec_data, key, current_phase)):
150
+ has_children = len(value.get("children", [])) > 0
151
+ candidates.append((key, value, has_children))
152
+
153
+ if candidates:
154
+ # Sort: leaf tasks first (has_children=False), then by ID
155
+ candidates.sort(key=lambda x: (x[2], x[0]))
156
+ return (candidates[0][0], candidates[0][1])
157
+
158
+ # No actionable tasks found in any phase
159
+ return None
160
+
161
+
162
+ def check_dependencies(spec_data: Dict[str, Any], task_id: str) -> Dict[str, Any]:
163
+ """
164
+ Check dependency status for a task.
165
+
166
+ Args:
167
+ spec_data: JSON spec file data
168
+ task_id: Task identifier
169
+
170
+ Returns:
171
+ Dictionary with dependency analysis including:
172
+ - task_id: The task being checked
173
+ - can_start: Whether the task is unblocked
174
+ - blocked_by: List of blocking task info
175
+ - soft_depends: List of soft dependency info
176
+ - blocks: List of tasks this blocks
177
+ """
178
+ hierarchy = spec_data.get("hierarchy", {})
179
+ task = hierarchy.get(task_id)
180
+
181
+ if not task:
182
+ return {"error": f"Task {task_id} not found"}
183
+
184
+ deps = task.get("dependencies", {})
185
+ blocked_by = deps.get("blocked_by", [])
186
+ depends = deps.get("depends", [])
187
+ blocks = deps.get("blocks", [])
188
+
189
+ result = {
190
+ "task_id": task_id,
191
+ "can_start": is_unblocked(spec_data, task_id, task),
192
+ "blocked_by": [],
193
+ "soft_depends": [],
194
+ "blocks": []
195
+ }
196
+
197
+ # Get info for blocking tasks
198
+ for dep_id in blocked_by:
199
+ dep_task = hierarchy.get(dep_id)
200
+ if dep_task:
201
+ result["blocked_by"].append({
202
+ "id": dep_id,
203
+ "title": dep_task.get("title", ""),
204
+ "status": dep_task.get("status", ""),
205
+ "file": dep_task.get("metadata", {}).get("file_path", "")
206
+ })
207
+
208
+ # Get info for soft dependencies
209
+ for dep_id in depends:
210
+ dep_task = hierarchy.get(dep_id)
211
+ if dep_task:
212
+ result["soft_depends"].append({
213
+ "id": dep_id,
214
+ "title": dep_task.get("title", ""),
215
+ "status": dep_task.get("status", ""),
216
+ "file": dep_task.get("metadata", {}).get("file_path", "")
217
+ })
218
+
219
+ # Get info for tasks this blocks
220
+ for dep_id in blocks:
221
+ dep_task = hierarchy.get(dep_id)
222
+ if dep_task:
223
+ result["blocks"].append({
224
+ "id": dep_id,
225
+ "title": dep_task.get("title", ""),
226
+ "status": dep_task.get("status", ""),
227
+ "file": dep_task.get("metadata", {}).get("file_path", "")
228
+ })
229
+
230
+ return result
231
+
232
+
233
+ def _get_sibling_ids(
234
+ hierarchy: Dict[str, Dict[str, Any]],
235
+ parent_id: str,
236
+ parent_node: Dict[str, Any],
237
+ ) -> List[str]:
238
+ """Return sibling IDs for a parent, falling back to scanning the hierarchy."""
239
+ children = parent_node.get("children", [])
240
+ if isinstance(children, list) and children:
241
+ return [child_id for child_id in children if child_id in hierarchy]
242
+
243
+ return [
244
+ node_id
245
+ for node_id, node in hierarchy.items()
246
+ if node.get("parent") == parent_id
247
+ ]
248
+
249
+
250
+ def _get_latest_journal_excerpt(
251
+ journal_entries: List[Dict[str, Any]],
252
+ task_id: str,
253
+ ) -> Optional[Dict[str, Any]]:
254
+ """Return the most recent journal entry for the given task."""
255
+ if not journal_entries:
256
+ return None
257
+
258
+ filtered = [
259
+ entry for entry in journal_entries if entry.get("task_id") == task_id
260
+ ]
261
+ if not filtered:
262
+ return None
263
+
264
+ filtered.sort(key=lambda entry: entry.get("timestamp") or "", reverse=True)
265
+ latest = filtered[0]
266
+ summary = (latest.get("content") or "").strip()
267
+
268
+ return {
269
+ "timestamp": latest.get("timestamp"),
270
+ "entry_type": latest.get("entry_type"),
271
+ "summary": summary,
272
+ }
273
+
274
+
275
+ def _find_phase_node(hierarchy: Dict[str, Dict[str, Any]], task_node: Dict[str, Any]) -> Optional[Dict[str, Any]]:
276
+ """Walk ancestor chain to find the nearest phase node."""
277
+ current = task_node
278
+ while current:
279
+ parent_id = current.get("parent")
280
+ if not parent_id:
281
+ return None
282
+ parent = hierarchy.get(parent_id)
283
+ if not parent:
284
+ return None
285
+ if parent.get("type") == "phase":
286
+ return parent
287
+ current = parent
288
+ return None
289
+
290
+
291
+ def get_previous_sibling(spec_data: Dict[str, Any], task_id: str) -> Optional[Dict[str, Any]]:
292
+ """
293
+ Return metadata about the previous sibling for the given task.
294
+
295
+ Args:
296
+ spec_data: Loaded JSON spec dictionary.
297
+ task_id: ID of the current task.
298
+
299
+ Returns:
300
+ Dictionary describing the previous sibling or None when the task is
301
+ first in its group / has no siblings.
302
+ """
303
+ if not spec_data:
304
+ return None
305
+
306
+ hierarchy = spec_data.get("hierarchy", {})
307
+ task = hierarchy.get(task_id)
308
+ if not task:
309
+ return None
310
+
311
+ parent_id = task.get("parent")
312
+ if not parent_id:
313
+ return None
314
+
315
+ parent = hierarchy.get(parent_id, {})
316
+ sibling_ids = _get_sibling_ids(hierarchy, parent_id, parent)
317
+ if not sibling_ids:
318
+ return None
319
+
320
+ try:
321
+ task_index = sibling_ids.index(task_id)
322
+ except ValueError:
323
+ return None
324
+
325
+ if task_index == 0:
326
+ return None
327
+
328
+ previous_id = sibling_ids[task_index - 1]
329
+ previous_task = hierarchy.get(previous_id)
330
+ if not previous_task:
331
+ return None
332
+
333
+ metadata = previous_task.get("metadata", {}) or {}
334
+ journal_excerpt = _get_latest_journal_excerpt(
335
+ spec_data.get("journal", []),
336
+ previous_id,
337
+ )
338
+
339
+ return {
340
+ "id": previous_id,
341
+ "title": previous_task.get("title", ""),
342
+ "status": previous_task.get("status", ""),
343
+ "type": previous_task.get("type", ""),
344
+ "file_path": metadata.get("file_path"),
345
+ "completed_at": metadata.get("completed_at"),
346
+ "journal_excerpt": journal_excerpt,
347
+ }
348
+
349
+
350
+ def get_parent_context(spec_data: Dict[str, Any], task_id: str) -> Optional[Dict[str, Any]]:
351
+ """
352
+ Return contextual information about the parent node for a task.
353
+
354
+ Args:
355
+ spec_data: Loaded JSON spec dictionary.
356
+ task_id: ID of the current task.
357
+
358
+ Returns:
359
+ Dictionary with parent metadata or None if the task has no parent.
360
+ """
361
+ if not spec_data:
362
+ return None
363
+
364
+ hierarchy = spec_data.get("hierarchy", {})
365
+ task = hierarchy.get(task_id)
366
+ if not task:
367
+ return None
368
+
369
+ parent_id = task.get("parent")
370
+ if not parent_id:
371
+ return None
372
+
373
+ parent = hierarchy.get(parent_id)
374
+ if not parent:
375
+ return None
376
+
377
+ parent_metadata = parent.get("metadata", {}) or {}
378
+ description = (
379
+ parent_metadata.get("description")
380
+ or parent_metadata.get("note")
381
+ or parent.get("description")
382
+ )
383
+
384
+ children_ids = _get_sibling_ids(hierarchy, parent_id, parent)
385
+ children_entries = [
386
+ {
387
+ "id": child_id,
388
+ "title": hierarchy.get(child_id, {}).get("title", ""),
389
+ "status": hierarchy.get(child_id, {}).get("status", ""),
390
+ }
391
+ for child_id in children_ids
392
+ ]
393
+
394
+ position_label = None
395
+ if children_ids and task_id in children_ids:
396
+ index = children_ids.index(task_id) + 1
397
+ total = len(children_ids)
398
+ label = "subtasks" if parent.get("type") == "task" else "children"
399
+ position_label = f"{index} of {total} {label}"
400
+
401
+ remaining_tasks = None
402
+ completed_tasks = parent.get("completed_tasks")
403
+ total_tasks = parent.get("total_tasks")
404
+ if isinstance(completed_tasks, int) and isinstance(total_tasks, int):
405
+ remaining_tasks = max(total_tasks - completed_tasks, 0)
406
+
407
+ return {
408
+ "id": parent_id,
409
+ "title": parent.get("title", ""),
410
+ "type": parent.get("type", ""),
411
+ "status": parent.get("status", ""),
412
+ "description": description,
413
+ "completed_tasks": completed_tasks,
414
+ "total_tasks": total_tasks,
415
+ "remaining_tasks": remaining_tasks,
416
+ "position_label": position_label,
417
+ "children": children_entries,
418
+ }
419
+
420
+
421
+ def get_phase_context(spec_data: Dict[str, Any], task_id: str) -> Optional[Dict[str, Any]]:
422
+ """
423
+ Return phase-level context for a task, including progress metrics.
424
+
425
+ Args:
426
+ spec_data: Loaded JSON spec dictionary.
427
+ task_id: ID of the current task.
428
+
429
+ Returns:
430
+ Dictionary with phase data or None if the task does not belong to a phase.
431
+ """
432
+ if not spec_data:
433
+ return None
434
+
435
+ hierarchy = spec_data.get("hierarchy", {})
436
+ task = hierarchy.get(task_id)
437
+ if not task:
438
+ return None
439
+
440
+ phase_node = _find_phase_node(hierarchy, task)
441
+ if not phase_node:
442
+ return None
443
+
444
+ phase_id = None
445
+ for node_id, node in hierarchy.items():
446
+ if node is phase_node:
447
+ phase_id = node_id
448
+ break
449
+
450
+ phase_metadata = phase_node.get("metadata", {}) or {}
451
+ summary = (
452
+ phase_metadata.get("description")
453
+ or phase_metadata.get("note")
454
+ or phase_node.get("description")
455
+ )
456
+ blockers = phase_node.get("dependencies", {}).get("blocked_by", []) or []
457
+
458
+ completed = phase_node.get("completed_tasks")
459
+ total = phase_node.get("total_tasks")
460
+ percentage = None
461
+ if isinstance(completed, int) and isinstance(total, int) and total > 0:
462
+ percentage = int((completed / total) * 100)
463
+
464
+ spec_root = hierarchy.get("spec-root", {})
465
+ sequence_index = None
466
+ phase_list = spec_root.get("children", [])
467
+ if isinstance(phase_list, list) and phase_id in phase_list:
468
+ sequence_index = phase_list.index(phase_id) + 1
469
+
470
+ return {
471
+ "id": phase_id,
472
+ "title": phase_node.get("title", ""),
473
+ "status": phase_node.get("status", ""),
474
+ "sequence_index": sequence_index,
475
+ "completed_tasks": completed,
476
+ "total_tasks": total,
477
+ "percentage": percentage,
478
+ "summary": summary,
479
+ "blockers": blockers,
480
+ }
481
+
482
+
483
+ def get_task_journal_summary(
484
+ spec_data: Dict[str, Any],
485
+ task_id: str,
486
+ max_entries: int = 3,
487
+ ) -> Dict[str, Any]:
488
+ """
489
+ Return a compact summary of journal entries for a task.
490
+
491
+ Args:
492
+ spec_data: Loaded JSON spec dictionary.
493
+ task_id: Task identifier.
494
+ max_entries: Maximum entries to include in summary.
495
+
496
+ Returns:
497
+ Dictionary with entry_count and entries[]
498
+ """
499
+ if not spec_data or not task_id:
500
+ return {"entry_count": 0, "entries": []}
501
+
502
+ journal = spec_data.get("journal", []) or []
503
+ filtered = [
504
+ entry for entry in journal
505
+ if entry.get("task_id") == task_id
506
+ ]
507
+
508
+ if not filtered:
509
+ return {"entry_count": 0, "entries": []}
510
+
511
+ filtered.sort(key=lambda entry: entry.get("timestamp") or "", reverse=True)
512
+ entries = []
513
+ for entry in filtered[:max_entries]:
514
+ summary = (entry.get("content") or "").strip()
515
+ entries.append({
516
+ "timestamp": entry.get("timestamp"),
517
+ "entry_type": entry.get("entry_type"),
518
+ "title": entry.get("title"),
519
+ "summary": summary,
520
+ "author": entry.get("author"),
521
+ })
522
+
523
+ return {
524
+ "entry_count": len(filtered),
525
+ "entries": entries,
526
+ }
527
+
528
+
529
+ def prepare_task(
530
+ spec_id: str,
531
+ specs_dir: Path,
532
+ task_id: Optional[str] = None,
533
+ ) -> Dict[str, Any]:
534
+ """
535
+ Prepare complete context for task implementation.
536
+
537
+ Combines task discovery, dependency checking, and context gathering.
538
+
539
+ Args:
540
+ spec_id: Specification ID
541
+ specs_dir: Path to specs directory
542
+ task_id: Optional task ID (auto-discovers if not provided)
543
+
544
+ Returns:
545
+ Complete task preparation data with context.
546
+ """
547
+ # Find and load spec
548
+ spec_path = find_spec_file(spec_id, specs_dir)
549
+ if not spec_path:
550
+ return asdict(error_response(f"Spec file not found for {spec_id}"))
551
+
552
+ spec_data = load_spec(spec_id, specs_dir)
553
+ if not spec_data:
554
+ return asdict(error_response("Failed to load spec"))
555
+
556
+ # Get task ID if not provided
557
+ if not task_id:
558
+ next_task = get_next_task(spec_data)
559
+ if not next_task:
560
+ # Check if spec is complete
561
+ hierarchy = spec_data.get("hierarchy", {})
562
+ all_tasks = [
563
+ node for node in hierarchy.values()
564
+ if node.get("type") in ["task", "subtask", "verify"]
565
+ ]
566
+ completed = sum(1 for t in all_tasks if t.get("status") == "completed")
567
+ pending = sum(1 for t in all_tasks if t.get("status") == "pending")
568
+
569
+ if pending == 0 and completed > 0:
570
+ return asdict(success_response(
571
+ task_id=None,
572
+ spec_complete=True
573
+ ))
574
+ else:
575
+ return asdict(error_response("No actionable tasks found"))
576
+
577
+ task_id, _ = next_task
578
+
579
+ # Get task info
580
+ task_data = get_node(spec_data, task_id)
581
+ if not task_data:
582
+ return asdict(error_response(f"Task {task_id} not found"))
583
+
584
+ # Check dependencies
585
+ deps = check_dependencies(spec_data, task_id)
586
+
587
+ # Gather context
588
+ context = {
589
+ "previous_sibling": get_previous_sibling(spec_data, task_id),
590
+ "parent_task": get_parent_context(spec_data, task_id),
591
+ "phase": get_phase_context(spec_data, task_id),
592
+ "task_journal": get_task_journal_summary(spec_data, task_id),
593
+ }
594
+
595
+ return asdict(success_response(
596
+ task_id=task_id,
597
+ task_data=task_data,
598
+ dependencies=deps,
599
+ spec_complete=False,
600
+ context=context
601
+ ))
602
+
603
+
604
+ def _generate_task_id(parent_id: str, existing_children: List[str], task_type: str) -> str:
605
+ """
606
+ Generate a new task ID based on parent and existing siblings.
607
+
608
+ For task IDs:
609
+ - If parent is phase-N, generate task-N-M where M is next available
610
+ - If parent is task-N-M, generate task-N-M-P where P is next available
611
+
612
+ For verify IDs:
613
+ - Same pattern but with "verify-" prefix
614
+
615
+ Args:
616
+ parent_id: Parent node ID
617
+ existing_children: List of existing child IDs
618
+ task_type: Type of task (task, subtask, verify)
619
+
620
+ Returns:
621
+ New task ID string
622
+ """
623
+ prefix = "verify" if task_type == "verify" else "task"
624
+
625
+ # Extract numeric parts from parent
626
+ if parent_id.startswith("phase-"):
627
+ # Parent is phase-N, new task is task-N-1, task-N-2, etc.
628
+ phase_num = parent_id.replace("phase-", "")
629
+ base = f"{prefix}-{phase_num}"
630
+ elif parent_id.startswith("task-") or parent_id.startswith("verify-"):
631
+ # Parent is task-N-M or verify-N-M, new task appends next number
632
+ # Remove the prefix (task- or verify-) to get the numeric path
633
+ if parent_id.startswith("task-"):
634
+ base = f"{prefix}-{parent_id[5:]}" # len("task-") = 5
635
+ else:
636
+ base = f"{prefix}-{parent_id[7:]}" # len("verify-") = 7
637
+ else:
638
+ # Unknown parent type, generate based on existing children count
639
+ base = f"{prefix}-1"
640
+
641
+ # Find the next available index
642
+ pattern = re.compile(rf"^{re.escape(base)}-(\d+)$")
643
+ max_index = 0
644
+ for child_id in existing_children:
645
+ match = pattern.match(child_id)
646
+ if match:
647
+ index = int(match.group(1))
648
+ max_index = max(max_index, index)
649
+
650
+ return f"{base}-{max_index + 1}"
651
+
652
+
653
+ def _update_ancestor_counts(hierarchy: Dict[str, Any], node_id: str, delta: int = 1) -> None:
654
+ """
655
+ Walk up the hierarchy and increment total_tasks for all ancestors.
656
+
657
+ Args:
658
+ hierarchy: The spec hierarchy dict
659
+ node_id: Starting node ID
660
+ delta: Amount to add to total_tasks (default 1)
661
+ """
662
+ current_id = node_id
663
+ visited = set()
664
+
665
+ while current_id:
666
+ if current_id in visited:
667
+ break
668
+ visited.add(current_id)
669
+
670
+ node = hierarchy.get(current_id)
671
+ if not node:
672
+ break
673
+
674
+ # Increment total_tasks
675
+ current_total = node.get("total_tasks", 0)
676
+ node["total_tasks"] = current_total + delta
677
+
678
+ # Move to parent
679
+ current_id = node.get("parent")
680
+
681
+
682
+ def add_task(
683
+ spec_id: str,
684
+ parent_id: str,
685
+ title: str,
686
+ description: Optional[str] = None,
687
+ task_type: str = "task",
688
+ estimated_hours: Optional[float] = None,
689
+ position: Optional[int] = None,
690
+ specs_dir: Optional[Path] = None,
691
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
692
+ """
693
+ Add a new task to a specification's hierarchy.
694
+
695
+ Creates a new task, subtask, or verify node under the specified parent.
696
+ Automatically generates the task ID and updates ancestor task counts.
697
+
698
+ Args:
699
+ spec_id: Specification ID to add task to.
700
+ parent_id: Parent node ID (phase or task).
701
+ title: Task title.
702
+ description: Optional task description.
703
+ task_type: Type of task (task, subtask, verify). Default: task.
704
+ estimated_hours: Optional estimated hours.
705
+ position: Optional position in parent's children list (0-based).
706
+ specs_dir: Path to specs directory (auto-detected if not provided).
707
+
708
+ Returns:
709
+ Tuple of (result_dict, error_message).
710
+ On success: ({"task_id": ..., "parent": ..., ...}, None)
711
+ On failure: (None, "error message")
712
+ """
713
+ # Validate task_type
714
+ if task_type not in TASK_TYPES:
715
+ return None, f"Invalid task_type '{task_type}'. Must be one of: {', '.join(TASK_TYPES)}"
716
+
717
+ # Validate title
718
+ if not title or not title.strip():
719
+ return None, "Title is required"
720
+
721
+ title = title.strip()
722
+
723
+ # Find specs directory
724
+ if specs_dir is None:
725
+ specs_dir = find_specs_directory()
726
+
727
+ if specs_dir is None:
728
+ return None, "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR."
729
+
730
+ # Find and load the spec
731
+ spec_path = find_spec_file(spec_id, specs_dir)
732
+ if spec_path is None:
733
+ return None, f"Specification '{spec_id}' not found"
734
+
735
+ spec_data = load_spec(spec_id, specs_dir)
736
+ if spec_data is None:
737
+ return None, f"Failed to load specification '{spec_id}'"
738
+
739
+ hierarchy = spec_data.get("hierarchy", {})
740
+
741
+ # Validate parent exists
742
+ parent = hierarchy.get(parent_id)
743
+ if parent is None:
744
+ return None, f"Parent node '{parent_id}' not found"
745
+
746
+ # Validate parent type (can add tasks to phases, groups, or tasks)
747
+ parent_type = parent.get("type")
748
+ if parent_type not in ("phase", "group", "task"):
749
+ return None, f"Cannot add tasks to node type '{parent_type}'. Parent must be a phase, group, or task."
750
+
751
+ # Get existing children
752
+ existing_children = parent.get("children", [])
753
+ if not isinstance(existing_children, list):
754
+ existing_children = []
755
+
756
+ # Generate task ID
757
+ task_id = _generate_task_id(parent_id, existing_children, task_type)
758
+
759
+ # Build metadata
760
+ metadata: Dict[str, Any] = {}
761
+ if description:
762
+ metadata["description"] = description.strip()
763
+ if estimated_hours is not None:
764
+ metadata["estimated_hours"] = estimated_hours
765
+
766
+ # Create the task node
767
+ task_node = {
768
+ "type": task_type,
769
+ "title": title,
770
+ "status": "pending",
771
+ "parent": parent_id,
772
+ "children": [],
773
+ "total_tasks": 1, # Counts itself
774
+ "completed_tasks": 0,
775
+ "metadata": metadata,
776
+ "dependencies": {
777
+ "blocks": [],
778
+ "blocked_by": [],
779
+ "depends": [],
780
+ },
781
+ }
782
+
783
+ # Add to hierarchy
784
+ hierarchy[task_id] = task_node
785
+
786
+ # Update parent's children list
787
+ if position is not None and 0 <= position <= len(existing_children):
788
+ existing_children.insert(position, task_id)
789
+ else:
790
+ existing_children.append(task_id)
791
+ parent["children"] = existing_children
792
+
793
+ # Update ancestor task counts
794
+ _update_ancestor_counts(hierarchy, parent_id, delta=1)
795
+
796
+ # Save the spec
797
+ success = save_spec(spec_id, spec_data, specs_dir)
798
+ if not success:
799
+ return None, "Failed to save specification"
800
+
801
+ return {
802
+ "task_id": task_id,
803
+ "parent": parent_id,
804
+ "title": title,
805
+ "type": task_type,
806
+ "position": position if position is not None else len(existing_children) - 1,
807
+ }, None
808
+
809
+
810
+ def _collect_descendants(hierarchy: Dict[str, Any], node_id: str) -> List[str]:
811
+ """
812
+ Recursively collect all descendant node IDs for a given node.
813
+
814
+ Args:
815
+ hierarchy: The spec hierarchy dict
816
+ node_id: Starting node ID
817
+
818
+ Returns:
819
+ List of all descendant node IDs (not including the starting node)
820
+ """
821
+ descendants = []
822
+ node = hierarchy.get(node_id)
823
+ if not node:
824
+ return descendants
825
+
826
+ children = node.get("children", [])
827
+ if not isinstance(children, list):
828
+ return descendants
829
+
830
+ for child_id in children:
831
+ descendants.append(child_id)
832
+ descendants.extend(_collect_descendants(hierarchy, child_id))
833
+
834
+ return descendants
835
+
836
+
837
+ def _count_tasks_in_subtree(hierarchy: Dict[str, Any], node_ids: List[str]) -> Tuple[int, int]:
838
+ """
839
+ Count total and completed tasks in a list of nodes.
840
+
841
+ Args:
842
+ hierarchy: The spec hierarchy dict
843
+ node_ids: List of node IDs to count
844
+
845
+ Returns:
846
+ Tuple of (total_count, completed_count)
847
+ """
848
+ total = 0
849
+ completed = 0
850
+
851
+ for node_id in node_ids:
852
+ node = hierarchy.get(node_id)
853
+ if not node:
854
+ continue
855
+ node_type = node.get("type")
856
+ if node_type in ("task", "subtask", "verify"):
857
+ total += 1
858
+ if node.get("status") == "completed":
859
+ completed += 1
860
+
861
+ return total, completed
862
+
863
+
864
+ def _decrement_ancestor_counts(
865
+ hierarchy: Dict[str, Any],
866
+ node_id: str,
867
+ total_delta: int,
868
+ completed_delta: int,
869
+ ) -> None:
870
+ """
871
+ Walk up the hierarchy and decrement task counts for all ancestors.
872
+
873
+ Args:
874
+ hierarchy: The spec hierarchy dict
875
+ node_id: Starting node ID (the parent of the removed node)
876
+ total_delta: Amount to subtract from total_tasks
877
+ completed_delta: Amount to subtract from completed_tasks
878
+ """
879
+ current_id = node_id
880
+ visited = set()
881
+
882
+ while current_id:
883
+ if current_id in visited:
884
+ break
885
+ visited.add(current_id)
886
+
887
+ node = hierarchy.get(current_id)
888
+ if not node:
889
+ break
890
+
891
+ # Decrement counts
892
+ current_total = node.get("total_tasks", 0)
893
+ current_completed = node.get("completed_tasks", 0)
894
+ node["total_tasks"] = max(0, current_total - total_delta)
895
+ node["completed_tasks"] = max(0, current_completed - completed_delta)
896
+
897
+ # Move to parent
898
+ current_id = node.get("parent")
899
+
900
+
901
+ def _remove_dependency_references(hierarchy: Dict[str, Any], removed_ids: List[str]) -> None:
902
+ """
903
+ Remove references to deleted nodes from all dependency lists.
904
+
905
+ Args:
906
+ hierarchy: The spec hierarchy dict
907
+ removed_ids: List of node IDs being removed
908
+ """
909
+ removed_set = set(removed_ids)
910
+
911
+ for node_id, node in hierarchy.items():
912
+ deps = node.get("dependencies")
913
+ if not deps or not isinstance(deps, dict):
914
+ continue
915
+
916
+ for key in ("blocks", "blocked_by", "depends"):
917
+ dep_list = deps.get(key)
918
+ if isinstance(dep_list, list):
919
+ deps[key] = [d for d in dep_list if d not in removed_set]
920
+
921
+
922
+ def remove_task(
923
+ spec_id: str,
924
+ task_id: str,
925
+ cascade: bool = False,
926
+ specs_dir: Optional[Path] = None,
927
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
928
+ """
929
+ Remove a task from a specification's hierarchy.
930
+
931
+ Removes the specified task and optionally all its descendants.
932
+ Updates ancestor task counts and cleans up dependency references.
933
+
934
+ Args:
935
+ spec_id: Specification ID containing the task.
936
+ task_id: Task ID to remove.
937
+ cascade: If True, also remove all child tasks recursively.
938
+ If False and task has children, returns an error.
939
+ specs_dir: Path to specs directory (auto-detected if not provided).
940
+
941
+ Returns:
942
+ Tuple of (result_dict, error_message).
943
+ On success: ({"task_id": ..., "children_removed": ..., ...}, None)
944
+ On failure: (None, "error message")
945
+ """
946
+ # Find specs directory
947
+ if specs_dir is None:
948
+ specs_dir = find_specs_directory()
949
+
950
+ if specs_dir is None:
951
+ return None, "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR."
952
+
953
+ # Find and load the spec
954
+ spec_path = find_spec_file(spec_id, specs_dir)
955
+ if spec_path is None:
956
+ return None, f"Specification '{spec_id}' not found"
957
+
958
+ spec_data = load_spec(spec_id, specs_dir)
959
+ if spec_data is None:
960
+ return None, f"Failed to load specification '{spec_id}'"
961
+
962
+ hierarchy = spec_data.get("hierarchy", {})
963
+
964
+ # Validate task exists
965
+ task = hierarchy.get(task_id)
966
+ if task is None:
967
+ return None, f"Task '{task_id}' not found"
968
+
969
+ # Validate task type (can only remove task, subtask, verify)
970
+ task_type = task.get("type")
971
+ if task_type not in ("task", "subtask", "verify"):
972
+ return None, f"Cannot remove node type '{task_type}'. Only task, subtask, or verify nodes can be removed."
973
+
974
+ # Check for children
975
+ children = task.get("children", [])
976
+ if isinstance(children, list) and len(children) > 0 and not cascade:
977
+ return None, f"Task '{task_id}' has {len(children)} children. Use cascade=True to remove them."
978
+
979
+ # Collect all nodes to remove
980
+ nodes_to_remove = [task_id]
981
+ if cascade:
982
+ nodes_to_remove.extend(_collect_descendants(hierarchy, task_id))
983
+
984
+ # Count tasks being removed (including the target node itself)
985
+ total_removed, completed_removed = _count_tasks_in_subtree(hierarchy, nodes_to_remove)
986
+ # The target node itself
987
+ if task_type in ("task", "subtask", "verify"):
988
+ total_removed += 1
989
+ if task.get("status") == "completed":
990
+ completed_removed += 1
991
+
992
+ # Get parent before removing
993
+ parent_id = task.get("parent")
994
+
995
+ # Remove nodes from hierarchy
996
+ for node_id in nodes_to_remove:
997
+ if node_id in hierarchy:
998
+ del hierarchy[node_id]
999
+
1000
+ # Update parent's children list
1001
+ if parent_id:
1002
+ parent = hierarchy.get(parent_id)
1003
+ if parent:
1004
+ parent_children = parent.get("children", [])
1005
+ if isinstance(parent_children, list) and task_id in parent_children:
1006
+ parent_children.remove(task_id)
1007
+ parent["children"] = parent_children
1008
+
1009
+ # Update ancestor task counts
1010
+ _decrement_ancestor_counts(hierarchy, parent_id, total_removed, completed_removed)
1011
+
1012
+ # Clean up dependency references
1013
+ _remove_dependency_references(hierarchy, nodes_to_remove)
1014
+
1015
+ # Save the spec
1016
+ success = save_spec(spec_id, spec_data, specs_dir)
1017
+ if not success:
1018
+ return None, "Failed to save specification"
1019
+
1020
+ return {
1021
+ "task_id": task_id,
1022
+ "spec_id": spec_id,
1023
+ "cascade": cascade,
1024
+ "children_removed": len(nodes_to_remove) - 1, # Exclude the target itself
1025
+ "total_tasks_removed": total_removed,
1026
+ }, None
1027
+
1028
+
1029
+ # Valid complexity levels for update_estimate
1030
+ COMPLEXITY_LEVELS = ("low", "medium", "high")
1031
+
1032
+
1033
+ def update_estimate(
1034
+ spec_id: str,
1035
+ task_id: str,
1036
+ estimated_hours: Optional[float] = None,
1037
+ complexity: Optional[str] = None,
1038
+ specs_dir: Optional[Path] = None,
1039
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
1040
+ """
1041
+ Update effort/time estimates for a task.
1042
+
1043
+ Updates the estimated_hours and/or complexity metadata for a task.
1044
+ At least one of estimated_hours or complexity must be provided.
1045
+
1046
+ Args:
1047
+ spec_id: Specification ID containing the task.
1048
+ task_id: Task ID to update.
1049
+ estimated_hours: Optional estimated hours (float, must be >= 0).
1050
+ complexity: Optional complexity level (low, medium, high).
1051
+ specs_dir: Path to specs directory (auto-detected if not provided).
1052
+
1053
+ Returns:
1054
+ Tuple of (result_dict, error_message).
1055
+ On success: ({"task_id": ..., "hours": ..., "complexity": ..., ...}, None)
1056
+ On failure: (None, "error message")
1057
+ """
1058
+ # Validate at least one field is provided
1059
+ if estimated_hours is None and complexity is None:
1060
+ return None, "At least one of estimated_hours or complexity must be provided"
1061
+
1062
+ # Validate estimated_hours
1063
+ if estimated_hours is not None:
1064
+ if not isinstance(estimated_hours, (int, float)):
1065
+ return None, "estimated_hours must be a number"
1066
+ if estimated_hours < 0:
1067
+ return None, "estimated_hours must be >= 0"
1068
+
1069
+ # Validate complexity
1070
+ if complexity is not None:
1071
+ complexity = complexity.lower().strip()
1072
+ if complexity not in COMPLEXITY_LEVELS:
1073
+ return None, f"Invalid complexity '{complexity}'. Must be one of: {', '.join(COMPLEXITY_LEVELS)}"
1074
+
1075
+ # Find specs directory
1076
+ if specs_dir is None:
1077
+ specs_dir = find_specs_directory()
1078
+
1079
+ if specs_dir is None:
1080
+ return None, "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR."
1081
+
1082
+ # Find and load the spec
1083
+ spec_path = find_spec_file(spec_id, specs_dir)
1084
+ if spec_path is None:
1085
+ return None, f"Specification '{spec_id}' not found"
1086
+
1087
+ spec_data = load_spec(spec_id, specs_dir)
1088
+ if spec_data is None:
1089
+ return None, f"Failed to load specification '{spec_id}'"
1090
+
1091
+ hierarchy = spec_data.get("hierarchy", {})
1092
+
1093
+ # Validate task exists
1094
+ task = hierarchy.get(task_id)
1095
+ if task is None:
1096
+ return None, f"Task '{task_id}' not found"
1097
+
1098
+ # Validate task type (can only update task, subtask, verify)
1099
+ task_type = task.get("type")
1100
+ if task_type not in ("task", "subtask", "verify"):
1101
+ return None, f"Cannot update estimates for node type '{task_type}'. Only task, subtask, or verify nodes can be updated."
1102
+
1103
+ # Get or create metadata
1104
+ metadata = task.get("metadata")
1105
+ if metadata is None:
1106
+ metadata = {}
1107
+ task["metadata"] = metadata
1108
+
1109
+ # Track previous values for response
1110
+ previous_hours = metadata.get("estimated_hours")
1111
+ previous_complexity = metadata.get("complexity")
1112
+
1113
+ # Update fields
1114
+ if estimated_hours is not None:
1115
+ metadata["estimated_hours"] = float(estimated_hours)
1116
+
1117
+ if complexity is not None:
1118
+ metadata["complexity"] = complexity
1119
+
1120
+ # Save the spec
1121
+ success = save_spec(spec_id, spec_data, specs_dir)
1122
+ if not success:
1123
+ return None, "Failed to save specification"
1124
+
1125
+ result: Dict[str, Any] = {
1126
+ "spec_id": spec_id,
1127
+ "task_id": task_id,
1128
+ }
1129
+
1130
+ if estimated_hours is not None:
1131
+ result["hours"] = float(estimated_hours)
1132
+ result["previous_hours"] = previous_hours
1133
+
1134
+ if complexity is not None:
1135
+ result["complexity"] = complexity
1136
+ result["previous_complexity"] = previous_complexity
1137
+
1138
+ return result, None
1139
+
1140
+
1141
+ # Valid verification types for update_task_metadata
1142
+ VERIFICATION_TYPES = ("auto", "manual", "none")
1143
+
1144
+ # Valid task categories
1145
+ TASK_CATEGORIES = ("implementation", "testing", "documentation", "investigation", "refactoring", "design")
1146
+
1147
+
1148
+ def update_task_metadata(
1149
+ spec_id: str,
1150
+ task_id: str,
1151
+ file_path: Optional[str] = None,
1152
+ description: Optional[str] = None,
1153
+ task_category: Optional[str] = None,
1154
+ actual_hours: Optional[float] = None,
1155
+ status_note: Optional[str] = None,
1156
+ verification_type: Optional[str] = None,
1157
+ command: Optional[str] = None,
1158
+ custom_metadata: Optional[Dict[str, Any]] = None,
1159
+ specs_dir: Optional[Path] = None,
1160
+ ) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
1161
+ """
1162
+ Update arbitrary metadata fields on a task.
1163
+
1164
+ Updates various metadata fields on a task including file path, description,
1165
+ category, hours, notes, verification type, and custom fields.
1166
+ At least one field must be provided.
1167
+
1168
+ Args:
1169
+ spec_id: Specification ID containing the task.
1170
+ task_id: Task ID to update.
1171
+ file_path: Optional file path associated with the task.
1172
+ description: Optional task description.
1173
+ task_category: Optional task category (implementation, testing, etc.).
1174
+ actual_hours: Optional actual hours spent on task (must be >= 0).
1175
+ status_note: Optional status note or completion note.
1176
+ verification_type: Optional verification type (auto, manual, none).
1177
+ command: Optional command executed for the task.
1178
+ custom_metadata: Optional dict of custom metadata fields to merge.
1179
+ specs_dir: Path to specs directory (auto-detected if not provided).
1180
+
1181
+ Returns:
1182
+ Tuple of (result_dict, error_message).
1183
+ On success: ({"task_id": ..., "fields_updated": [...], ...}, None)
1184
+ On failure: (None, "error message")
1185
+ """
1186
+ # Collect all provided fields
1187
+ updates: Dict[str, Any] = {}
1188
+ if file_path is not None:
1189
+ updates["file_path"] = file_path.strip() if file_path else None
1190
+ if description is not None:
1191
+ updates["description"] = description.strip() if description else None
1192
+ if task_category is not None:
1193
+ updates["task_category"] = task_category
1194
+ if actual_hours is not None:
1195
+ updates["actual_hours"] = actual_hours
1196
+ if status_note is not None:
1197
+ updates["status_note"] = status_note.strip() if status_note else None
1198
+ if verification_type is not None:
1199
+ updates["verification_type"] = verification_type
1200
+ if command is not None:
1201
+ updates["command"] = command.strip() if command else None
1202
+
1203
+ # Validate at least one field is provided
1204
+ if not updates and not custom_metadata:
1205
+ return None, "At least one metadata field must be provided"
1206
+
1207
+ # Validate actual_hours
1208
+ if actual_hours is not None:
1209
+ if not isinstance(actual_hours, (int, float)):
1210
+ return None, "actual_hours must be a number"
1211
+ if actual_hours < 0:
1212
+ return None, "actual_hours must be >= 0"
1213
+
1214
+ # Validate task_category
1215
+ if task_category is not None:
1216
+ task_category_lower = task_category.lower().strip()
1217
+ if task_category_lower not in TASK_CATEGORIES:
1218
+ return None, f"Invalid task_category '{task_category}'. Must be one of: {', '.join(TASK_CATEGORIES)}"
1219
+ updates["task_category"] = task_category_lower
1220
+
1221
+ # Validate verification_type
1222
+ if verification_type is not None:
1223
+ verification_type_lower = verification_type.lower().strip()
1224
+ if verification_type_lower not in VERIFICATION_TYPES:
1225
+ return None, f"Invalid verification_type '{verification_type}'. Must be one of: {', '.join(VERIFICATION_TYPES)}"
1226
+ updates["verification_type"] = verification_type_lower
1227
+
1228
+ # Find specs directory
1229
+ if specs_dir is None:
1230
+ specs_dir = find_specs_directory()
1231
+
1232
+ if specs_dir is None:
1233
+ return None, "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR."
1234
+
1235
+ # Find and load the spec
1236
+ spec_path = find_spec_file(spec_id, specs_dir)
1237
+ if spec_path is None:
1238
+ return None, f"Specification '{spec_id}' not found"
1239
+
1240
+ spec_data = load_spec(spec_id, specs_dir)
1241
+ if spec_data is None:
1242
+ return None, f"Failed to load specification '{spec_id}'"
1243
+
1244
+ hierarchy = spec_data.get("hierarchy", {})
1245
+
1246
+ # Validate task exists
1247
+ task = hierarchy.get(task_id)
1248
+ if task is None:
1249
+ return None, f"Task '{task_id}' not found"
1250
+
1251
+ # Validate task type (can only update task, subtask, verify)
1252
+ task_type = task.get("type")
1253
+ if task_type not in ("task", "subtask", "verify"):
1254
+ return None, f"Cannot update metadata for node type '{task_type}'. Only task, subtask, or verify nodes can be updated."
1255
+
1256
+ # Get or create metadata
1257
+ metadata = task.get("metadata")
1258
+ if metadata is None:
1259
+ metadata = {}
1260
+ task["metadata"] = metadata
1261
+
1262
+ # Track which fields were updated
1263
+ fields_updated = []
1264
+
1265
+ # Apply updates
1266
+ for key, value in updates.items():
1267
+ if value is not None or key in metadata:
1268
+ metadata[key] = value
1269
+ fields_updated.append(key)
1270
+
1271
+ # Apply custom metadata
1272
+ if custom_metadata and isinstance(custom_metadata, dict):
1273
+ for key, value in custom_metadata.items():
1274
+ # Don't allow overwriting core fields via custom_metadata
1275
+ if key not in ("type", "title", "status", "parent", "children", "dependencies"):
1276
+ metadata[key] = value
1277
+ if key not in fields_updated:
1278
+ fields_updated.append(key)
1279
+
1280
+ # Save the spec
1281
+ success = save_spec(spec_id, spec_data, specs_dir)
1282
+ if not success:
1283
+ return None, "Failed to save specification"
1284
+
1285
+ return {
1286
+ "spec_id": spec_id,
1287
+ "task_id": task_id,
1288
+ "fields_updated": fields_updated,
1289
+ }, None