foundry-mcp 0.8.22__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of foundry-mcp might be problematic. Click here for more details.
- foundry_mcp/__init__.py +13 -0
- foundry_mcp/cli/__init__.py +67 -0
- foundry_mcp/cli/__main__.py +9 -0
- foundry_mcp/cli/agent.py +96 -0
- foundry_mcp/cli/commands/__init__.py +37 -0
- foundry_mcp/cli/commands/cache.py +137 -0
- foundry_mcp/cli/commands/dashboard.py +148 -0
- foundry_mcp/cli/commands/dev.py +446 -0
- foundry_mcp/cli/commands/journal.py +377 -0
- foundry_mcp/cli/commands/lifecycle.py +274 -0
- foundry_mcp/cli/commands/modify.py +824 -0
- foundry_mcp/cli/commands/plan.py +640 -0
- foundry_mcp/cli/commands/pr.py +393 -0
- foundry_mcp/cli/commands/review.py +667 -0
- foundry_mcp/cli/commands/session.py +472 -0
- foundry_mcp/cli/commands/specs.py +686 -0
- foundry_mcp/cli/commands/tasks.py +807 -0
- foundry_mcp/cli/commands/testing.py +676 -0
- foundry_mcp/cli/commands/validate.py +982 -0
- foundry_mcp/cli/config.py +98 -0
- foundry_mcp/cli/context.py +298 -0
- foundry_mcp/cli/logging.py +212 -0
- foundry_mcp/cli/main.py +44 -0
- foundry_mcp/cli/output.py +122 -0
- foundry_mcp/cli/registry.py +110 -0
- foundry_mcp/cli/resilience.py +178 -0
- foundry_mcp/cli/transcript.py +217 -0
- foundry_mcp/config.py +1454 -0
- foundry_mcp/core/__init__.py +144 -0
- foundry_mcp/core/ai_consultation.py +1773 -0
- foundry_mcp/core/batch_operations.py +1202 -0
- foundry_mcp/core/cache.py +195 -0
- foundry_mcp/core/capabilities.py +446 -0
- foundry_mcp/core/concurrency.py +898 -0
- foundry_mcp/core/context.py +540 -0
- foundry_mcp/core/discovery.py +1603 -0
- foundry_mcp/core/error_collection.py +728 -0
- foundry_mcp/core/error_store.py +592 -0
- foundry_mcp/core/health.py +749 -0
- foundry_mcp/core/intake.py +933 -0
- foundry_mcp/core/journal.py +700 -0
- foundry_mcp/core/lifecycle.py +412 -0
- foundry_mcp/core/llm_config.py +1376 -0
- foundry_mcp/core/llm_patterns.py +510 -0
- foundry_mcp/core/llm_provider.py +1569 -0
- foundry_mcp/core/logging_config.py +374 -0
- foundry_mcp/core/metrics_persistence.py +584 -0
- foundry_mcp/core/metrics_registry.py +327 -0
- foundry_mcp/core/metrics_store.py +641 -0
- foundry_mcp/core/modifications.py +224 -0
- foundry_mcp/core/naming.py +146 -0
- foundry_mcp/core/observability.py +1216 -0
- foundry_mcp/core/otel.py +452 -0
- foundry_mcp/core/otel_stubs.py +264 -0
- foundry_mcp/core/pagination.py +255 -0
- foundry_mcp/core/progress.py +387 -0
- foundry_mcp/core/prometheus.py +564 -0
- foundry_mcp/core/prompts/__init__.py +464 -0
- foundry_mcp/core/prompts/fidelity_review.py +691 -0
- foundry_mcp/core/prompts/markdown_plan_review.py +515 -0
- foundry_mcp/core/prompts/plan_review.py +627 -0
- foundry_mcp/core/providers/__init__.py +237 -0
- foundry_mcp/core/providers/base.py +515 -0
- foundry_mcp/core/providers/claude.py +472 -0
- foundry_mcp/core/providers/codex.py +637 -0
- foundry_mcp/core/providers/cursor_agent.py +630 -0
- foundry_mcp/core/providers/detectors.py +515 -0
- foundry_mcp/core/providers/gemini.py +426 -0
- foundry_mcp/core/providers/opencode.py +718 -0
- foundry_mcp/core/providers/opencode_wrapper.js +308 -0
- foundry_mcp/core/providers/package-lock.json +24 -0
- foundry_mcp/core/providers/package.json +25 -0
- foundry_mcp/core/providers/registry.py +607 -0
- foundry_mcp/core/providers/test_provider.py +171 -0
- foundry_mcp/core/providers/validation.py +857 -0
- foundry_mcp/core/rate_limit.py +427 -0
- foundry_mcp/core/research/__init__.py +68 -0
- foundry_mcp/core/research/memory.py +528 -0
- foundry_mcp/core/research/models.py +1234 -0
- foundry_mcp/core/research/providers/__init__.py +40 -0
- foundry_mcp/core/research/providers/base.py +242 -0
- foundry_mcp/core/research/providers/google.py +507 -0
- foundry_mcp/core/research/providers/perplexity.py +442 -0
- foundry_mcp/core/research/providers/semantic_scholar.py +544 -0
- foundry_mcp/core/research/providers/tavily.py +383 -0
- foundry_mcp/core/research/workflows/__init__.py +25 -0
- foundry_mcp/core/research/workflows/base.py +298 -0
- foundry_mcp/core/research/workflows/chat.py +271 -0
- foundry_mcp/core/research/workflows/consensus.py +539 -0
- foundry_mcp/core/research/workflows/deep_research.py +4142 -0
- foundry_mcp/core/research/workflows/ideate.py +682 -0
- foundry_mcp/core/research/workflows/thinkdeep.py +405 -0
- foundry_mcp/core/resilience.py +600 -0
- foundry_mcp/core/responses.py +1624 -0
- foundry_mcp/core/review.py +366 -0
- foundry_mcp/core/security.py +438 -0
- foundry_mcp/core/spec.py +4119 -0
- foundry_mcp/core/task.py +2463 -0
- foundry_mcp/core/testing.py +839 -0
- foundry_mcp/core/validation.py +2357 -0
- foundry_mcp/dashboard/__init__.py +32 -0
- foundry_mcp/dashboard/app.py +119 -0
- foundry_mcp/dashboard/components/__init__.py +17 -0
- foundry_mcp/dashboard/components/cards.py +88 -0
- foundry_mcp/dashboard/components/charts.py +177 -0
- foundry_mcp/dashboard/components/filters.py +136 -0
- foundry_mcp/dashboard/components/tables.py +195 -0
- foundry_mcp/dashboard/data/__init__.py +11 -0
- foundry_mcp/dashboard/data/stores.py +433 -0
- foundry_mcp/dashboard/launcher.py +300 -0
- foundry_mcp/dashboard/views/__init__.py +12 -0
- foundry_mcp/dashboard/views/errors.py +217 -0
- foundry_mcp/dashboard/views/metrics.py +164 -0
- foundry_mcp/dashboard/views/overview.py +96 -0
- foundry_mcp/dashboard/views/providers.py +83 -0
- foundry_mcp/dashboard/views/sdd_workflow.py +255 -0
- foundry_mcp/dashboard/views/tool_usage.py +139 -0
- foundry_mcp/prompts/__init__.py +9 -0
- foundry_mcp/prompts/workflows.py +525 -0
- foundry_mcp/resources/__init__.py +9 -0
- foundry_mcp/resources/specs.py +591 -0
- foundry_mcp/schemas/__init__.py +38 -0
- foundry_mcp/schemas/intake-schema.json +89 -0
- foundry_mcp/schemas/sdd-spec-schema.json +414 -0
- foundry_mcp/server.py +150 -0
- foundry_mcp/tools/__init__.py +10 -0
- foundry_mcp/tools/unified/__init__.py +92 -0
- foundry_mcp/tools/unified/authoring.py +3620 -0
- foundry_mcp/tools/unified/context_helpers.py +98 -0
- foundry_mcp/tools/unified/documentation_helpers.py +268 -0
- foundry_mcp/tools/unified/environment.py +1341 -0
- foundry_mcp/tools/unified/error.py +479 -0
- foundry_mcp/tools/unified/health.py +225 -0
- foundry_mcp/tools/unified/journal.py +841 -0
- foundry_mcp/tools/unified/lifecycle.py +640 -0
- foundry_mcp/tools/unified/metrics.py +777 -0
- foundry_mcp/tools/unified/plan.py +876 -0
- foundry_mcp/tools/unified/pr.py +294 -0
- foundry_mcp/tools/unified/provider.py +589 -0
- foundry_mcp/tools/unified/research.py +1283 -0
- foundry_mcp/tools/unified/review.py +1042 -0
- foundry_mcp/tools/unified/review_helpers.py +314 -0
- foundry_mcp/tools/unified/router.py +102 -0
- foundry_mcp/tools/unified/server.py +565 -0
- foundry_mcp/tools/unified/spec.py +1283 -0
- foundry_mcp/tools/unified/task.py +3846 -0
- foundry_mcp/tools/unified/test.py +431 -0
- foundry_mcp/tools/unified/verification.py +520 -0
- foundry_mcp-0.8.22.dist-info/METADATA +344 -0
- foundry_mcp-0.8.22.dist-info/RECORD +153 -0
- foundry_mcp-0.8.22.dist-info/WHEEL +4 -0
- foundry_mcp-0.8.22.dist-info/entry_points.txt +3 -0
- foundry_mcp-0.8.22.dist-info/licenses/LICENSE +21 -0
foundry_mcp/core/task.py
ADDED
|
@@ -0,0 +1,2463 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Task discovery and dependency operations for SDD workflows.
|
|
3
|
+
Provides finding next tasks, dependency checking, and task preparation.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import re
|
|
7
|
+
from dataclasses import asdict
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Optional, Dict, Any, Tuple, List
|
|
10
|
+
|
|
11
|
+
from foundry_mcp.core.spec import (
|
|
12
|
+
CATEGORIES,
|
|
13
|
+
load_spec,
|
|
14
|
+
save_spec,
|
|
15
|
+
find_spec_file,
|
|
16
|
+
find_specs_directory,
|
|
17
|
+
get_node,
|
|
18
|
+
)
|
|
19
|
+
from foundry_mcp.core.responses import success_response, error_response
|
|
20
|
+
|
|
21
|
+
# Valid task types for add_task
|
|
22
|
+
TASK_TYPES = ("task", "subtask", "verify", "research")
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def is_unblocked(spec_data: Dict[str, Any], task_id: str, task_data: Dict[str, Any]) -> bool:
|
|
26
|
+
"""
|
|
27
|
+
Check if all blocking dependencies are completed.
|
|
28
|
+
|
|
29
|
+
This checks both task-level dependencies and phase-level dependencies.
|
|
30
|
+
A task is blocked if:
|
|
31
|
+
1. Any of its direct task dependencies are not completed, OR
|
|
32
|
+
2. Its parent phase is blocked by an incomplete phase
|
|
33
|
+
|
|
34
|
+
Research nodes have special blocking behavior based on blocking_mode:
|
|
35
|
+
- "none": Research doesn't block dependents
|
|
36
|
+
- "soft": Research is informational, doesn't block (default)
|
|
37
|
+
- "hard": Research must complete before dependents can start
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
spec_data: JSON spec file data
|
|
41
|
+
task_id: Task identifier
|
|
42
|
+
task_data: Task data dictionary
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
True if task has no blockers or all blockers are completed
|
|
46
|
+
"""
|
|
47
|
+
hierarchy = spec_data.get("hierarchy", {})
|
|
48
|
+
|
|
49
|
+
# Check task-level dependencies
|
|
50
|
+
blocked_by = task_data.get("dependencies", {}).get("blocked_by", [])
|
|
51
|
+
for blocker_id in blocked_by:
|
|
52
|
+
blocker = hierarchy.get(blocker_id)
|
|
53
|
+
if not blocker:
|
|
54
|
+
continue
|
|
55
|
+
|
|
56
|
+
# Special handling for research nodes based on blocking_mode
|
|
57
|
+
if blocker.get("type") == "research":
|
|
58
|
+
blocking_mode = blocker.get("metadata", {}).get("blocking_mode", "soft")
|
|
59
|
+
if blocking_mode in ("none", "soft"):
|
|
60
|
+
# Research with "none" or "soft" blocking mode doesn't block
|
|
61
|
+
continue
|
|
62
|
+
# "hard" mode falls through to standard completion check
|
|
63
|
+
|
|
64
|
+
if blocker.get("status") != "completed":
|
|
65
|
+
return False
|
|
66
|
+
|
|
67
|
+
# Check phase-level dependencies
|
|
68
|
+
# Walk up to find the parent phase
|
|
69
|
+
parent_phase_id = None
|
|
70
|
+
current = task_data
|
|
71
|
+
while current:
|
|
72
|
+
parent_id = current.get("parent")
|
|
73
|
+
if not parent_id:
|
|
74
|
+
break
|
|
75
|
+
parent = hierarchy.get(parent_id)
|
|
76
|
+
if not parent:
|
|
77
|
+
break
|
|
78
|
+
if parent.get("type") == "phase":
|
|
79
|
+
parent_phase_id = parent_id
|
|
80
|
+
break
|
|
81
|
+
current = parent
|
|
82
|
+
|
|
83
|
+
# If task belongs to a phase, check if that phase is blocked
|
|
84
|
+
if parent_phase_id:
|
|
85
|
+
parent_phase = hierarchy.get(parent_phase_id)
|
|
86
|
+
if parent_phase:
|
|
87
|
+
phase_blocked_by = parent_phase.get("dependencies", {}).get("blocked_by", [])
|
|
88
|
+
for blocker_id in phase_blocked_by:
|
|
89
|
+
blocker = hierarchy.get(blocker_id)
|
|
90
|
+
if not blocker or blocker.get("status") != "completed":
|
|
91
|
+
return False
|
|
92
|
+
|
|
93
|
+
return True
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def is_in_current_phase(spec_data: Dict[str, Any], task_id: str, phase_id: str) -> bool:
|
|
97
|
+
"""
|
|
98
|
+
Check if task belongs to current phase (including nested groups).
|
|
99
|
+
|
|
100
|
+
Args:
|
|
101
|
+
spec_data: JSON spec file data
|
|
102
|
+
task_id: Task identifier
|
|
103
|
+
phase_id: Phase identifier to check against
|
|
104
|
+
|
|
105
|
+
Returns:
|
|
106
|
+
True if task is within the phase hierarchy
|
|
107
|
+
"""
|
|
108
|
+
hierarchy = spec_data.get("hierarchy", {})
|
|
109
|
+
task = hierarchy.get(task_id)
|
|
110
|
+
if not task:
|
|
111
|
+
return False
|
|
112
|
+
|
|
113
|
+
# Walk up parent chain to find phase
|
|
114
|
+
current = task
|
|
115
|
+
while current:
|
|
116
|
+
parent_id = current.get("parent")
|
|
117
|
+
if parent_id == phase_id:
|
|
118
|
+
return True
|
|
119
|
+
if not parent_id:
|
|
120
|
+
return False
|
|
121
|
+
current = hierarchy.get(parent_id)
|
|
122
|
+
return False
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def get_next_task(spec_data: Dict[str, Any]) -> Optional[Tuple[str, Dict[str, Any]]]:
|
|
126
|
+
"""
|
|
127
|
+
Find the next actionable task.
|
|
128
|
+
|
|
129
|
+
Searches phases in order (in_progress first, then pending).
|
|
130
|
+
Within each phase, finds leaf tasks (no children) before parent tasks.
|
|
131
|
+
Only returns unblocked tasks with pending status.
|
|
132
|
+
|
|
133
|
+
Args:
|
|
134
|
+
spec_data: JSON spec file data
|
|
135
|
+
|
|
136
|
+
Returns:
|
|
137
|
+
Tuple of (task_id, task_data) or None if no task available
|
|
138
|
+
"""
|
|
139
|
+
hierarchy = spec_data.get("hierarchy", {})
|
|
140
|
+
|
|
141
|
+
# Get all phases in order
|
|
142
|
+
spec_root = hierarchy.get("spec-root", {})
|
|
143
|
+
phase_order = spec_root.get("children", [])
|
|
144
|
+
|
|
145
|
+
# Build list of phases to check: in_progress first, then pending
|
|
146
|
+
phases_to_check = []
|
|
147
|
+
|
|
148
|
+
# First, add any in_progress phases
|
|
149
|
+
for phase_id in phase_order:
|
|
150
|
+
phase = hierarchy.get(phase_id, {})
|
|
151
|
+
if phase.get("type") == "phase" and phase.get("status") == "in_progress":
|
|
152
|
+
phases_to_check.append(phase_id)
|
|
153
|
+
|
|
154
|
+
# Then add pending phases
|
|
155
|
+
for phase_id in phase_order:
|
|
156
|
+
phase = hierarchy.get(phase_id, {})
|
|
157
|
+
if phase.get("type") == "phase" and phase.get("status") == "pending":
|
|
158
|
+
phases_to_check.append(phase_id)
|
|
159
|
+
|
|
160
|
+
if not phases_to_check:
|
|
161
|
+
return None
|
|
162
|
+
|
|
163
|
+
# Try each phase until we find actionable tasks
|
|
164
|
+
for current_phase in phases_to_check:
|
|
165
|
+
# Find first available task or subtask in current phase
|
|
166
|
+
# Prefer leaf tasks (no children) over parent tasks
|
|
167
|
+
candidates = []
|
|
168
|
+
for key, value in hierarchy.items():
|
|
169
|
+
if (value.get("type") in ["task", "subtask", "verify"] and
|
|
170
|
+
value.get("status") == "pending" and
|
|
171
|
+
is_unblocked(spec_data, key, value) and
|
|
172
|
+
is_in_current_phase(spec_data, key, current_phase)):
|
|
173
|
+
has_children = len(value.get("children", [])) > 0
|
|
174
|
+
candidates.append((key, value, has_children))
|
|
175
|
+
|
|
176
|
+
if candidates:
|
|
177
|
+
# Sort: leaf tasks first (has_children=False), then by ID
|
|
178
|
+
candidates.sort(key=lambda x: (x[2], x[0]))
|
|
179
|
+
return (candidates[0][0], candidates[0][1])
|
|
180
|
+
|
|
181
|
+
# No actionable tasks found in any phase
|
|
182
|
+
return None
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def check_dependencies(spec_data: Dict[str, Any], task_id: str) -> Dict[str, Any]:
|
|
186
|
+
"""
|
|
187
|
+
Check dependency status for a task.
|
|
188
|
+
|
|
189
|
+
Args:
|
|
190
|
+
spec_data: JSON spec file data
|
|
191
|
+
task_id: Task identifier
|
|
192
|
+
|
|
193
|
+
Returns:
|
|
194
|
+
Dictionary with dependency analysis including:
|
|
195
|
+
- task_id: The task being checked
|
|
196
|
+
- can_start: Whether the task is unblocked
|
|
197
|
+
- blocked_by: List of blocking task info
|
|
198
|
+
- soft_depends: List of soft dependency info
|
|
199
|
+
- blocks: List of tasks this blocks
|
|
200
|
+
"""
|
|
201
|
+
hierarchy = spec_data.get("hierarchy", {})
|
|
202
|
+
task = hierarchy.get(task_id)
|
|
203
|
+
|
|
204
|
+
if not task:
|
|
205
|
+
return {"error": f"Task {task_id} not found"}
|
|
206
|
+
|
|
207
|
+
deps = task.get("dependencies", {})
|
|
208
|
+
blocked_by = deps.get("blocked_by", [])
|
|
209
|
+
depends = deps.get("depends", [])
|
|
210
|
+
blocks = deps.get("blocks", [])
|
|
211
|
+
|
|
212
|
+
result = {
|
|
213
|
+
"task_id": task_id,
|
|
214
|
+
"can_start": is_unblocked(spec_data, task_id, task),
|
|
215
|
+
"blocked_by": [],
|
|
216
|
+
"soft_depends": [],
|
|
217
|
+
"blocks": []
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
# Get info for blocking tasks
|
|
221
|
+
for dep_id in blocked_by:
|
|
222
|
+
dep_task = hierarchy.get(dep_id)
|
|
223
|
+
if dep_task:
|
|
224
|
+
result["blocked_by"].append({
|
|
225
|
+
"id": dep_id,
|
|
226
|
+
"title": dep_task.get("title", ""),
|
|
227
|
+
"status": dep_task.get("status", ""),
|
|
228
|
+
"file": dep_task.get("metadata", {}).get("file_path", "")
|
|
229
|
+
})
|
|
230
|
+
|
|
231
|
+
# Get info for soft dependencies
|
|
232
|
+
for dep_id in depends:
|
|
233
|
+
dep_task = hierarchy.get(dep_id)
|
|
234
|
+
if dep_task:
|
|
235
|
+
result["soft_depends"].append({
|
|
236
|
+
"id": dep_id,
|
|
237
|
+
"title": dep_task.get("title", ""),
|
|
238
|
+
"status": dep_task.get("status", ""),
|
|
239
|
+
"file": dep_task.get("metadata", {}).get("file_path", "")
|
|
240
|
+
})
|
|
241
|
+
|
|
242
|
+
# Get info for tasks this blocks
|
|
243
|
+
for dep_id in blocks:
|
|
244
|
+
dep_task = hierarchy.get(dep_id)
|
|
245
|
+
if dep_task:
|
|
246
|
+
result["blocks"].append({
|
|
247
|
+
"id": dep_id,
|
|
248
|
+
"title": dep_task.get("title", ""),
|
|
249
|
+
"status": dep_task.get("status", ""),
|
|
250
|
+
"file": dep_task.get("metadata", {}).get("file_path", "")
|
|
251
|
+
})
|
|
252
|
+
|
|
253
|
+
return result
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
def _get_sibling_ids(
|
|
257
|
+
hierarchy: Dict[str, Dict[str, Any]],
|
|
258
|
+
parent_id: str,
|
|
259
|
+
parent_node: Dict[str, Any],
|
|
260
|
+
) -> List[str]:
|
|
261
|
+
"""Return sibling IDs for a parent, falling back to scanning the hierarchy."""
|
|
262
|
+
children = parent_node.get("children", [])
|
|
263
|
+
if isinstance(children, list) and children:
|
|
264
|
+
return [child_id for child_id in children if child_id in hierarchy]
|
|
265
|
+
|
|
266
|
+
return [
|
|
267
|
+
node_id
|
|
268
|
+
for node_id, node in hierarchy.items()
|
|
269
|
+
if node.get("parent") == parent_id
|
|
270
|
+
]
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+
def _get_latest_journal_excerpt(
|
|
274
|
+
journal_entries: List[Dict[str, Any]],
|
|
275
|
+
task_id: str,
|
|
276
|
+
) -> Optional[Dict[str, Any]]:
|
|
277
|
+
"""Return the most recent journal entry for the given task."""
|
|
278
|
+
if not journal_entries:
|
|
279
|
+
return None
|
|
280
|
+
|
|
281
|
+
filtered = [
|
|
282
|
+
entry for entry in journal_entries if entry.get("task_id") == task_id
|
|
283
|
+
]
|
|
284
|
+
if not filtered:
|
|
285
|
+
return None
|
|
286
|
+
|
|
287
|
+
filtered.sort(key=lambda entry: entry.get("timestamp") or "", reverse=True)
|
|
288
|
+
latest = filtered[0]
|
|
289
|
+
summary = (latest.get("content") or "").strip()
|
|
290
|
+
|
|
291
|
+
return {
|
|
292
|
+
"timestamp": latest.get("timestamp"),
|
|
293
|
+
"entry_type": latest.get("entry_type"),
|
|
294
|
+
"summary": summary,
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
|
|
298
|
+
def _find_phase_node(hierarchy: Dict[str, Dict[str, Any]], task_node: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
|
299
|
+
"""Walk ancestor chain to find the nearest phase node."""
|
|
300
|
+
current = task_node
|
|
301
|
+
while current:
|
|
302
|
+
parent_id = current.get("parent")
|
|
303
|
+
if not parent_id:
|
|
304
|
+
return None
|
|
305
|
+
parent = hierarchy.get(parent_id)
|
|
306
|
+
if not parent:
|
|
307
|
+
return None
|
|
308
|
+
if parent.get("type") == "phase":
|
|
309
|
+
return parent
|
|
310
|
+
current = parent
|
|
311
|
+
return None
|
|
312
|
+
|
|
313
|
+
|
|
314
|
+
def get_previous_sibling(spec_data: Dict[str, Any], task_id: str) -> Optional[Dict[str, Any]]:
|
|
315
|
+
"""
|
|
316
|
+
Return metadata about the previous sibling for the given task.
|
|
317
|
+
|
|
318
|
+
Args:
|
|
319
|
+
spec_data: Loaded JSON spec dictionary.
|
|
320
|
+
task_id: ID of the current task.
|
|
321
|
+
|
|
322
|
+
Returns:
|
|
323
|
+
Dictionary describing the previous sibling or None when the task is
|
|
324
|
+
first in its group / has no siblings.
|
|
325
|
+
"""
|
|
326
|
+
if not spec_data:
|
|
327
|
+
return None
|
|
328
|
+
|
|
329
|
+
hierarchy = spec_data.get("hierarchy", {})
|
|
330
|
+
task = hierarchy.get(task_id)
|
|
331
|
+
if not task:
|
|
332
|
+
return None
|
|
333
|
+
|
|
334
|
+
parent_id = task.get("parent")
|
|
335
|
+
if not parent_id:
|
|
336
|
+
return None
|
|
337
|
+
|
|
338
|
+
parent = hierarchy.get(parent_id, {})
|
|
339
|
+
sibling_ids = _get_sibling_ids(hierarchy, parent_id, parent)
|
|
340
|
+
if not sibling_ids:
|
|
341
|
+
return None
|
|
342
|
+
|
|
343
|
+
try:
|
|
344
|
+
task_index = sibling_ids.index(task_id)
|
|
345
|
+
except ValueError:
|
|
346
|
+
return None
|
|
347
|
+
|
|
348
|
+
if task_index == 0:
|
|
349
|
+
return None
|
|
350
|
+
|
|
351
|
+
previous_id = sibling_ids[task_index - 1]
|
|
352
|
+
previous_task = hierarchy.get(previous_id)
|
|
353
|
+
if not previous_task:
|
|
354
|
+
return None
|
|
355
|
+
|
|
356
|
+
metadata = previous_task.get("metadata", {}) or {}
|
|
357
|
+
journal_excerpt = _get_latest_journal_excerpt(
|
|
358
|
+
spec_data.get("journal", []),
|
|
359
|
+
previous_id,
|
|
360
|
+
)
|
|
361
|
+
|
|
362
|
+
return {
|
|
363
|
+
"id": previous_id,
|
|
364
|
+
"title": previous_task.get("title", ""),
|
|
365
|
+
"status": previous_task.get("status", ""),
|
|
366
|
+
"type": previous_task.get("type", ""),
|
|
367
|
+
"file_path": metadata.get("file_path"),
|
|
368
|
+
"completed_at": metadata.get("completed_at"),
|
|
369
|
+
"journal_excerpt": journal_excerpt,
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
|
|
373
|
+
def get_parent_context(spec_data: Dict[str, Any], task_id: str) -> Optional[Dict[str, Any]]:
|
|
374
|
+
"""
|
|
375
|
+
Return contextual information about the parent node for a task.
|
|
376
|
+
|
|
377
|
+
Args:
|
|
378
|
+
spec_data: Loaded JSON spec dictionary.
|
|
379
|
+
task_id: ID of the current task.
|
|
380
|
+
|
|
381
|
+
Returns:
|
|
382
|
+
Dictionary with parent metadata or None if the task has no parent.
|
|
383
|
+
"""
|
|
384
|
+
if not spec_data:
|
|
385
|
+
return None
|
|
386
|
+
|
|
387
|
+
hierarchy = spec_data.get("hierarchy", {})
|
|
388
|
+
task = hierarchy.get(task_id)
|
|
389
|
+
if not task:
|
|
390
|
+
return None
|
|
391
|
+
|
|
392
|
+
parent_id = task.get("parent")
|
|
393
|
+
if not parent_id:
|
|
394
|
+
return None
|
|
395
|
+
|
|
396
|
+
parent = hierarchy.get(parent_id)
|
|
397
|
+
if not parent:
|
|
398
|
+
return None
|
|
399
|
+
|
|
400
|
+
parent_metadata = parent.get("metadata", {}) or {}
|
|
401
|
+
description = (
|
|
402
|
+
parent_metadata.get("description")
|
|
403
|
+
or parent_metadata.get("note")
|
|
404
|
+
or parent.get("description")
|
|
405
|
+
)
|
|
406
|
+
|
|
407
|
+
children_ids = _get_sibling_ids(hierarchy, parent_id, parent)
|
|
408
|
+
children_entries = [
|
|
409
|
+
{
|
|
410
|
+
"id": child_id,
|
|
411
|
+
"title": hierarchy.get(child_id, {}).get("title", ""),
|
|
412
|
+
"status": hierarchy.get(child_id, {}).get("status", ""),
|
|
413
|
+
}
|
|
414
|
+
for child_id in children_ids
|
|
415
|
+
]
|
|
416
|
+
|
|
417
|
+
position_label = None
|
|
418
|
+
if children_ids and task_id in children_ids:
|
|
419
|
+
index = children_ids.index(task_id) + 1
|
|
420
|
+
total = len(children_ids)
|
|
421
|
+
label = "subtasks" if parent.get("type") == "task" else "children"
|
|
422
|
+
position_label = f"{index} of {total} {label}"
|
|
423
|
+
|
|
424
|
+
remaining_tasks = None
|
|
425
|
+
completed_tasks = parent.get("completed_tasks")
|
|
426
|
+
total_tasks = parent.get("total_tasks")
|
|
427
|
+
if isinstance(completed_tasks, int) and isinstance(total_tasks, int):
|
|
428
|
+
remaining_tasks = max(total_tasks - completed_tasks, 0)
|
|
429
|
+
|
|
430
|
+
return {
|
|
431
|
+
"id": parent_id,
|
|
432
|
+
"title": parent.get("title", ""),
|
|
433
|
+
"type": parent.get("type", ""),
|
|
434
|
+
"status": parent.get("status", ""),
|
|
435
|
+
"description": description,
|
|
436
|
+
"completed_tasks": completed_tasks,
|
|
437
|
+
"total_tasks": total_tasks,
|
|
438
|
+
"remaining_tasks": remaining_tasks,
|
|
439
|
+
"position_label": position_label,
|
|
440
|
+
"children": children_entries,
|
|
441
|
+
}
|
|
442
|
+
|
|
443
|
+
|
|
444
|
+
def get_phase_context(spec_data: Dict[str, Any], task_id: str) -> Optional[Dict[str, Any]]:
|
|
445
|
+
"""
|
|
446
|
+
Return phase-level context for a task, including progress metrics.
|
|
447
|
+
|
|
448
|
+
Args:
|
|
449
|
+
spec_data: Loaded JSON spec dictionary.
|
|
450
|
+
task_id: ID of the current task.
|
|
451
|
+
|
|
452
|
+
Returns:
|
|
453
|
+
Dictionary with phase data or None if the task does not belong to a phase.
|
|
454
|
+
"""
|
|
455
|
+
if not spec_data:
|
|
456
|
+
return None
|
|
457
|
+
|
|
458
|
+
hierarchy = spec_data.get("hierarchy", {})
|
|
459
|
+
task = hierarchy.get(task_id)
|
|
460
|
+
if not task:
|
|
461
|
+
return None
|
|
462
|
+
|
|
463
|
+
phase_node = _find_phase_node(hierarchy, task)
|
|
464
|
+
if not phase_node:
|
|
465
|
+
return None
|
|
466
|
+
|
|
467
|
+
phase_id = None
|
|
468
|
+
for node_id, node in hierarchy.items():
|
|
469
|
+
if node is phase_node:
|
|
470
|
+
phase_id = node_id
|
|
471
|
+
break
|
|
472
|
+
|
|
473
|
+
phase_metadata = phase_node.get("metadata", {}) or {}
|
|
474
|
+
summary = (
|
|
475
|
+
phase_metadata.get("description")
|
|
476
|
+
or phase_metadata.get("note")
|
|
477
|
+
or phase_node.get("description")
|
|
478
|
+
)
|
|
479
|
+
blockers = phase_node.get("dependencies", {}).get("blocked_by", []) or []
|
|
480
|
+
|
|
481
|
+
completed = phase_node.get("completed_tasks")
|
|
482
|
+
total = phase_node.get("total_tasks")
|
|
483
|
+
percentage = None
|
|
484
|
+
if isinstance(completed, int) and isinstance(total, int) and total > 0:
|
|
485
|
+
percentage = int((completed / total) * 100)
|
|
486
|
+
|
|
487
|
+
spec_root = hierarchy.get("spec-root", {})
|
|
488
|
+
sequence_index = None
|
|
489
|
+
phase_list = spec_root.get("children", [])
|
|
490
|
+
if isinstance(phase_list, list) and phase_id in phase_list:
|
|
491
|
+
sequence_index = phase_list.index(phase_id) + 1
|
|
492
|
+
|
|
493
|
+
return {
|
|
494
|
+
"id": phase_id,
|
|
495
|
+
"title": phase_node.get("title", ""),
|
|
496
|
+
"status": phase_node.get("status", ""),
|
|
497
|
+
"sequence_index": sequence_index,
|
|
498
|
+
"completed_tasks": completed,
|
|
499
|
+
"total_tasks": total,
|
|
500
|
+
"percentage": percentage,
|
|
501
|
+
"summary": summary,
|
|
502
|
+
"blockers": blockers,
|
|
503
|
+
}
|
|
504
|
+
|
|
505
|
+
|
|
506
|
+
def get_task_journal_summary(
|
|
507
|
+
spec_data: Dict[str, Any],
|
|
508
|
+
task_id: str,
|
|
509
|
+
max_entries: int = 3,
|
|
510
|
+
) -> Dict[str, Any]:
|
|
511
|
+
"""
|
|
512
|
+
Return a compact summary of journal entries for a task.
|
|
513
|
+
|
|
514
|
+
Args:
|
|
515
|
+
spec_data: Loaded JSON spec dictionary.
|
|
516
|
+
task_id: Task identifier.
|
|
517
|
+
max_entries: Maximum entries to include in summary.
|
|
518
|
+
|
|
519
|
+
Returns:
|
|
520
|
+
Dictionary with entry_count and entries[]
|
|
521
|
+
"""
|
|
522
|
+
if not spec_data or not task_id:
|
|
523
|
+
return {"entry_count": 0, "entries": []}
|
|
524
|
+
|
|
525
|
+
journal = spec_data.get("journal", []) or []
|
|
526
|
+
filtered = [
|
|
527
|
+
entry for entry in journal
|
|
528
|
+
if entry.get("task_id") == task_id
|
|
529
|
+
]
|
|
530
|
+
|
|
531
|
+
if not filtered:
|
|
532
|
+
return {"entry_count": 0, "entries": []}
|
|
533
|
+
|
|
534
|
+
filtered.sort(key=lambda entry: entry.get("timestamp") or "", reverse=True)
|
|
535
|
+
entries = []
|
|
536
|
+
for entry in filtered[:max_entries]:
|
|
537
|
+
summary = (entry.get("content") or "").strip()
|
|
538
|
+
entries.append({
|
|
539
|
+
"timestamp": entry.get("timestamp"),
|
|
540
|
+
"entry_type": entry.get("entry_type"),
|
|
541
|
+
"title": entry.get("title"),
|
|
542
|
+
"summary": summary,
|
|
543
|
+
"author": entry.get("author"),
|
|
544
|
+
})
|
|
545
|
+
|
|
546
|
+
return {
|
|
547
|
+
"entry_count": len(filtered),
|
|
548
|
+
"entries": entries,
|
|
549
|
+
}
|
|
550
|
+
|
|
551
|
+
|
|
552
|
+
def _compute_auto_mode_hints(
|
|
553
|
+
spec_data: Dict[str, Any],
|
|
554
|
+
task_id: str,
|
|
555
|
+
task_data: Dict[str, Any],
|
|
556
|
+
) -> Dict[str, Any]:
|
|
557
|
+
"""
|
|
558
|
+
Compute hints for autonomous mode execution.
|
|
559
|
+
|
|
560
|
+
These hints help an autonomous agent decide whether to proceed
|
|
561
|
+
without user input or pause for confirmation.
|
|
562
|
+
|
|
563
|
+
Args:
|
|
564
|
+
spec_data: Loaded spec data
|
|
565
|
+
task_id: Current task ID
|
|
566
|
+
task_data: Task node data
|
|
567
|
+
|
|
568
|
+
Returns:
|
|
569
|
+
Dictionary with autonomous mode hints:
|
|
570
|
+
- estimated_complexity: "low", "medium", or "high"
|
|
571
|
+
- has_sibling_verify: bool (phase has verify tasks)
|
|
572
|
+
- may_require_user_input: bool (task category suggests user input needed)
|
|
573
|
+
"""
|
|
574
|
+
hierarchy = spec_data.get("hierarchy", {})
|
|
575
|
+
metadata = task_data.get("metadata", {}) or {}
|
|
576
|
+
|
|
577
|
+
# Compute estimated_complexity
|
|
578
|
+
complexity = metadata.get("complexity", "").lower()
|
|
579
|
+
estimated_hours = metadata.get("estimated_hours")
|
|
580
|
+
|
|
581
|
+
if complexity in ("complex", "high"):
|
|
582
|
+
estimated_complexity = "high"
|
|
583
|
+
elif complexity in ("medium", "moderate"):
|
|
584
|
+
estimated_complexity = "medium"
|
|
585
|
+
elif complexity in ("simple", "low"):
|
|
586
|
+
estimated_complexity = "low"
|
|
587
|
+
elif estimated_hours is not None:
|
|
588
|
+
# Derive from hours if explicit complexity not set
|
|
589
|
+
if estimated_hours > 2:
|
|
590
|
+
estimated_complexity = "high"
|
|
591
|
+
elif estimated_hours > 0.5:
|
|
592
|
+
estimated_complexity = "medium"
|
|
593
|
+
else:
|
|
594
|
+
estimated_complexity = "low"
|
|
595
|
+
else:
|
|
596
|
+
# Default to medium if no hints
|
|
597
|
+
estimated_complexity = "medium"
|
|
598
|
+
|
|
599
|
+
# Check has_sibling_verify - look for verify tasks in same phase
|
|
600
|
+
parent_id = task_data.get("parent")
|
|
601
|
+
has_sibling_verify = False
|
|
602
|
+
if parent_id:
|
|
603
|
+
parent = hierarchy.get(parent_id, {})
|
|
604
|
+
children = parent.get("children", [])
|
|
605
|
+
for sibling_id in children:
|
|
606
|
+
if sibling_id != task_id:
|
|
607
|
+
sibling = hierarchy.get(sibling_id, {})
|
|
608
|
+
if sibling.get("type") == "verify":
|
|
609
|
+
has_sibling_verify = True
|
|
610
|
+
break
|
|
611
|
+
|
|
612
|
+
# Check may_require_user_input based on task_category
|
|
613
|
+
task_category = metadata.get("task_category", "").lower()
|
|
614
|
+
may_require_user_input = task_category in (
|
|
615
|
+
"decision",
|
|
616
|
+
"investigation",
|
|
617
|
+
"planning",
|
|
618
|
+
"design",
|
|
619
|
+
)
|
|
620
|
+
|
|
621
|
+
return {
|
|
622
|
+
"estimated_complexity": estimated_complexity,
|
|
623
|
+
"has_sibling_verify": has_sibling_verify,
|
|
624
|
+
"may_require_user_input": may_require_user_input,
|
|
625
|
+
}
|
|
626
|
+
|
|
627
|
+
|
|
628
|
+
def prepare_task(
|
|
629
|
+
spec_id: str,
|
|
630
|
+
specs_dir: Path,
|
|
631
|
+
task_id: Optional[str] = None,
|
|
632
|
+
) -> Dict[str, Any]:
|
|
633
|
+
"""
|
|
634
|
+
Prepare complete context for task implementation.
|
|
635
|
+
|
|
636
|
+
Combines task discovery, dependency checking, and context gathering.
|
|
637
|
+
|
|
638
|
+
Args:
|
|
639
|
+
spec_id: Specification ID
|
|
640
|
+
specs_dir: Path to specs directory
|
|
641
|
+
task_id: Optional task ID (auto-discovers if not provided)
|
|
642
|
+
|
|
643
|
+
Returns:
|
|
644
|
+
Complete task preparation data with context.
|
|
645
|
+
"""
|
|
646
|
+
# Find and load spec
|
|
647
|
+
spec_path = find_spec_file(spec_id, specs_dir)
|
|
648
|
+
if not spec_path:
|
|
649
|
+
return asdict(error_response(f"Spec file not found for {spec_id}"))
|
|
650
|
+
|
|
651
|
+
spec_data = load_spec(spec_id, specs_dir)
|
|
652
|
+
if not spec_data:
|
|
653
|
+
return asdict(error_response("Failed to load spec"))
|
|
654
|
+
|
|
655
|
+
# Get task ID if not provided
|
|
656
|
+
if not task_id:
|
|
657
|
+
next_task = get_next_task(spec_data)
|
|
658
|
+
if not next_task:
|
|
659
|
+
# Check if spec is complete
|
|
660
|
+
hierarchy = spec_data.get("hierarchy", {})
|
|
661
|
+
all_tasks = [
|
|
662
|
+
node for node in hierarchy.values()
|
|
663
|
+
if node.get("type") in ["task", "subtask", "verify"]
|
|
664
|
+
]
|
|
665
|
+
completed = sum(1 for t in all_tasks if t.get("status") == "completed")
|
|
666
|
+
pending = sum(1 for t in all_tasks if t.get("status") == "pending")
|
|
667
|
+
|
|
668
|
+
if pending == 0 and completed > 0:
|
|
669
|
+
return asdict(success_response(
|
|
670
|
+
task_id=None,
|
|
671
|
+
spec_complete=True
|
|
672
|
+
))
|
|
673
|
+
else:
|
|
674
|
+
return asdict(error_response("No actionable tasks found"))
|
|
675
|
+
|
|
676
|
+
task_id, _ = next_task
|
|
677
|
+
|
|
678
|
+
# Get task info
|
|
679
|
+
task_data = get_node(spec_data, task_id)
|
|
680
|
+
if not task_data:
|
|
681
|
+
return asdict(error_response(f"Task {task_id} not found"))
|
|
682
|
+
|
|
683
|
+
# Check dependencies
|
|
684
|
+
deps = check_dependencies(spec_data, task_id)
|
|
685
|
+
|
|
686
|
+
# Gather context
|
|
687
|
+
context = {
|
|
688
|
+
"previous_sibling": get_previous_sibling(spec_data, task_id),
|
|
689
|
+
"parent_task": get_parent_context(spec_data, task_id),
|
|
690
|
+
"phase": get_phase_context(spec_data, task_id),
|
|
691
|
+
"task_journal": get_task_journal_summary(spec_data, task_id),
|
|
692
|
+
}
|
|
693
|
+
|
|
694
|
+
# Compute autonomous mode hints
|
|
695
|
+
auto_mode_hints = _compute_auto_mode_hints(spec_data, task_id, task_data)
|
|
696
|
+
|
|
697
|
+
return asdict(success_response(
|
|
698
|
+
task_id=task_id,
|
|
699
|
+
task_data=task_data,
|
|
700
|
+
dependencies=deps,
|
|
701
|
+
spec_complete=False,
|
|
702
|
+
context=context,
|
|
703
|
+
auto_mode_hints=auto_mode_hints,
|
|
704
|
+
))
|
|
705
|
+
|
|
706
|
+
|
|
707
|
+
def _generate_task_id(parent_id: str, existing_children: List[str], task_type: str) -> str:
|
|
708
|
+
"""
|
|
709
|
+
Generate a new task ID based on parent and existing siblings.
|
|
710
|
+
|
|
711
|
+
For task IDs:
|
|
712
|
+
- If parent is phase-N, generate task-N-M where M is next available
|
|
713
|
+
- If parent is task-N-M, generate task-N-M-P where P is next available
|
|
714
|
+
|
|
715
|
+
For verify IDs:
|
|
716
|
+
- Same pattern but with "verify-" prefix
|
|
717
|
+
|
|
718
|
+
For research IDs:
|
|
719
|
+
- Same pattern but with "research-" prefix
|
|
720
|
+
|
|
721
|
+
Args:
|
|
722
|
+
parent_id: Parent node ID
|
|
723
|
+
existing_children: List of existing child IDs
|
|
724
|
+
task_type: Type of task (task, subtask, verify, research)
|
|
725
|
+
|
|
726
|
+
Returns:
|
|
727
|
+
New task ID string
|
|
728
|
+
"""
|
|
729
|
+
# Map task_type to ID prefix
|
|
730
|
+
prefix_map = {"verify": "verify", "research": "research"}
|
|
731
|
+
prefix = prefix_map.get(task_type, "task")
|
|
732
|
+
|
|
733
|
+
# Extract numeric parts from parent
|
|
734
|
+
if parent_id.startswith("phase-"):
|
|
735
|
+
# Parent is phase-N, new task is task-N-1, task-N-2, etc.
|
|
736
|
+
phase_num = parent_id.replace("phase-", "")
|
|
737
|
+
base = f"{prefix}-{phase_num}"
|
|
738
|
+
elif parent_id.startswith(("task-", "verify-", "research-")):
|
|
739
|
+
# Parent is task-N-M, verify-N-M, or research-N-M; new task appends next number
|
|
740
|
+
# Remove the prefix to get the numeric path
|
|
741
|
+
if parent_id.startswith("task-"):
|
|
742
|
+
base = f"{prefix}-{parent_id[5:]}" # len("task-") = 5
|
|
743
|
+
elif parent_id.startswith("verify-"):
|
|
744
|
+
base = f"{prefix}-{parent_id[7:]}" # len("verify-") = 7
|
|
745
|
+
else: # research-
|
|
746
|
+
base = f"{prefix}-{parent_id[9:]}" # len("research-") = 9
|
|
747
|
+
else:
|
|
748
|
+
# Unknown parent type, generate based on existing children count
|
|
749
|
+
base = f"{prefix}-1"
|
|
750
|
+
|
|
751
|
+
# Find the next available index
|
|
752
|
+
pattern = re.compile(rf"^{re.escape(base)}-(\d+)$")
|
|
753
|
+
max_index = 0
|
|
754
|
+
for child_id in existing_children:
|
|
755
|
+
match = pattern.match(child_id)
|
|
756
|
+
if match:
|
|
757
|
+
index = int(match.group(1))
|
|
758
|
+
max_index = max(max_index, index)
|
|
759
|
+
|
|
760
|
+
return f"{base}-{max_index + 1}"
|
|
761
|
+
|
|
762
|
+
|
|
763
|
+
def _update_ancestor_counts(hierarchy: Dict[str, Any], node_id: str, delta: int = 1) -> None:
|
|
764
|
+
"""
|
|
765
|
+
Walk up the hierarchy and increment total_tasks for all ancestors.
|
|
766
|
+
|
|
767
|
+
Args:
|
|
768
|
+
hierarchy: The spec hierarchy dict
|
|
769
|
+
node_id: Starting node ID
|
|
770
|
+
delta: Amount to add to total_tasks (default 1)
|
|
771
|
+
"""
|
|
772
|
+
current_id = node_id
|
|
773
|
+
visited = set()
|
|
774
|
+
|
|
775
|
+
while current_id:
|
|
776
|
+
if current_id in visited:
|
|
777
|
+
break
|
|
778
|
+
visited.add(current_id)
|
|
779
|
+
|
|
780
|
+
node = hierarchy.get(current_id)
|
|
781
|
+
if not node:
|
|
782
|
+
break
|
|
783
|
+
|
|
784
|
+
# Increment total_tasks
|
|
785
|
+
current_total = node.get("total_tasks", 0)
|
|
786
|
+
node["total_tasks"] = current_total + delta
|
|
787
|
+
|
|
788
|
+
# Move to parent
|
|
789
|
+
current_id = node.get("parent")
|
|
790
|
+
|
|
791
|
+
|
|
792
|
+
def add_task(
|
|
793
|
+
spec_id: str,
|
|
794
|
+
parent_id: str,
|
|
795
|
+
title: str,
|
|
796
|
+
description: Optional[str] = None,
|
|
797
|
+
task_type: str = "task",
|
|
798
|
+
estimated_hours: Optional[float] = None,
|
|
799
|
+
position: Optional[int] = None,
|
|
800
|
+
file_path: Optional[str] = None,
|
|
801
|
+
specs_dir: Optional[Path] = None,
|
|
802
|
+
# Research-specific parameters
|
|
803
|
+
research_type: Optional[str] = None,
|
|
804
|
+
blocking_mode: Optional[str] = None,
|
|
805
|
+
query: Optional[str] = None,
|
|
806
|
+
) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
|
|
807
|
+
"""
|
|
808
|
+
Add a new task to a specification's hierarchy.
|
|
809
|
+
|
|
810
|
+
Creates a new task, subtask, verify, or research node under the specified parent.
|
|
811
|
+
Automatically generates the task ID and updates ancestor task counts.
|
|
812
|
+
|
|
813
|
+
Args:
|
|
814
|
+
spec_id: Specification ID to add task to.
|
|
815
|
+
parent_id: Parent node ID (phase or task).
|
|
816
|
+
title: Task title.
|
|
817
|
+
description: Optional task description.
|
|
818
|
+
task_type: Type of task (task, subtask, verify, research). Default: task.
|
|
819
|
+
estimated_hours: Optional estimated hours.
|
|
820
|
+
position: Optional position in parent's children list (0-based).
|
|
821
|
+
file_path: Optional file path associated with this task.
|
|
822
|
+
specs_dir: Path to specs directory (auto-detected if not provided).
|
|
823
|
+
research_type: For research nodes - workflow type (chat, consensus, etc).
|
|
824
|
+
blocking_mode: For research nodes - blocking behavior (none, soft, hard).
|
|
825
|
+
query: For research nodes - the research question/topic.
|
|
826
|
+
|
|
827
|
+
Returns:
|
|
828
|
+
Tuple of (result_dict, error_message).
|
|
829
|
+
On success: ({"task_id": ..., "parent": ..., ...}, None)
|
|
830
|
+
On failure: (None, "error message")
|
|
831
|
+
"""
|
|
832
|
+
# Validate task_type
|
|
833
|
+
if task_type not in TASK_TYPES:
|
|
834
|
+
return None, f"Invalid task_type '{task_type}'. Must be one of: {', '.join(TASK_TYPES)}"
|
|
835
|
+
|
|
836
|
+
# Validate research-specific parameters
|
|
837
|
+
if task_type == "research":
|
|
838
|
+
from foundry_mcp.core.validation import VALID_RESEARCH_TYPES, RESEARCH_BLOCKING_MODES
|
|
839
|
+
|
|
840
|
+
if research_type and research_type not in VALID_RESEARCH_TYPES:
|
|
841
|
+
return None, f"Invalid research_type '{research_type}'. Must be one of: {', '.join(sorted(VALID_RESEARCH_TYPES))}"
|
|
842
|
+
if blocking_mode and blocking_mode not in RESEARCH_BLOCKING_MODES:
|
|
843
|
+
return None, f"Invalid blocking_mode '{blocking_mode}'. Must be one of: {', '.join(sorted(RESEARCH_BLOCKING_MODES))}"
|
|
844
|
+
|
|
845
|
+
# Validate title
|
|
846
|
+
if not title or not title.strip():
|
|
847
|
+
return None, "Title is required"
|
|
848
|
+
|
|
849
|
+
title = title.strip()
|
|
850
|
+
|
|
851
|
+
# Find specs directory
|
|
852
|
+
if specs_dir is None:
|
|
853
|
+
specs_dir = find_specs_directory()
|
|
854
|
+
|
|
855
|
+
if specs_dir is None:
|
|
856
|
+
return None, "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR."
|
|
857
|
+
|
|
858
|
+
# Find and load the spec
|
|
859
|
+
spec_path = find_spec_file(spec_id, specs_dir)
|
|
860
|
+
if spec_path is None:
|
|
861
|
+
return None, f"Specification '{spec_id}' not found"
|
|
862
|
+
|
|
863
|
+
spec_data = load_spec(spec_id, specs_dir)
|
|
864
|
+
if spec_data is None:
|
|
865
|
+
return None, f"Failed to load specification '{spec_id}'"
|
|
866
|
+
|
|
867
|
+
hierarchy = spec_data.get("hierarchy", {})
|
|
868
|
+
|
|
869
|
+
# Validate parent exists
|
|
870
|
+
parent = hierarchy.get(parent_id)
|
|
871
|
+
if parent is None:
|
|
872
|
+
return None, f"Parent node '{parent_id}' not found"
|
|
873
|
+
|
|
874
|
+
# Validate parent type (can add tasks to phases, groups, or tasks)
|
|
875
|
+
parent_type = parent.get("type")
|
|
876
|
+
if parent_type not in ("phase", "group", "task"):
|
|
877
|
+
return None, f"Cannot add tasks to node type '{parent_type}'. Parent must be a phase, group, or task."
|
|
878
|
+
|
|
879
|
+
# Get existing children
|
|
880
|
+
existing_children = parent.get("children", [])
|
|
881
|
+
if not isinstance(existing_children, list):
|
|
882
|
+
existing_children = []
|
|
883
|
+
|
|
884
|
+
# Generate task ID
|
|
885
|
+
task_id = _generate_task_id(parent_id, existing_children, task_type)
|
|
886
|
+
|
|
887
|
+
# Build metadata
|
|
888
|
+
metadata: Dict[str, Any] = {}
|
|
889
|
+
if description:
|
|
890
|
+
metadata["description"] = description.strip()
|
|
891
|
+
if estimated_hours is not None:
|
|
892
|
+
metadata["estimated_hours"] = estimated_hours
|
|
893
|
+
if file_path:
|
|
894
|
+
metadata["file_path"] = file_path.strip()
|
|
895
|
+
|
|
896
|
+
# Add research-specific metadata
|
|
897
|
+
if task_type == "research":
|
|
898
|
+
metadata["research_type"] = research_type or "consensus" # Default to consensus
|
|
899
|
+
metadata["blocking_mode"] = blocking_mode or "soft" # Default to soft blocking
|
|
900
|
+
if query:
|
|
901
|
+
metadata["query"] = query.strip()
|
|
902
|
+
metadata["research_history"] = [] # Empty history initially
|
|
903
|
+
metadata["findings"] = {} # Empty findings initially
|
|
904
|
+
|
|
905
|
+
# Create the task node
|
|
906
|
+
task_node = {
|
|
907
|
+
"type": task_type,
|
|
908
|
+
"title": title,
|
|
909
|
+
"status": "pending",
|
|
910
|
+
"parent": parent_id,
|
|
911
|
+
"children": [],
|
|
912
|
+
"total_tasks": 1, # Counts itself
|
|
913
|
+
"completed_tasks": 0,
|
|
914
|
+
"metadata": metadata,
|
|
915
|
+
"dependencies": {
|
|
916
|
+
"blocks": [],
|
|
917
|
+
"blocked_by": [],
|
|
918
|
+
"depends": [],
|
|
919
|
+
},
|
|
920
|
+
}
|
|
921
|
+
|
|
922
|
+
# Add to hierarchy
|
|
923
|
+
hierarchy[task_id] = task_node
|
|
924
|
+
|
|
925
|
+
# Update parent's children list
|
|
926
|
+
if position is not None and 0 <= position <= len(existing_children):
|
|
927
|
+
existing_children.insert(position, task_id)
|
|
928
|
+
else:
|
|
929
|
+
existing_children.append(task_id)
|
|
930
|
+
parent["children"] = existing_children
|
|
931
|
+
|
|
932
|
+
# Update ancestor task counts
|
|
933
|
+
_update_ancestor_counts(hierarchy, parent_id, delta=1)
|
|
934
|
+
|
|
935
|
+
# Save the spec
|
|
936
|
+
success = save_spec(spec_id, spec_data, specs_dir)
|
|
937
|
+
if not success:
|
|
938
|
+
return None, "Failed to save specification"
|
|
939
|
+
|
|
940
|
+
return {
|
|
941
|
+
"task_id": task_id,
|
|
942
|
+
"parent": parent_id,
|
|
943
|
+
"title": title,
|
|
944
|
+
"type": task_type,
|
|
945
|
+
"position": position if position is not None else len(existing_children) - 1,
|
|
946
|
+
"file_path": file_path.strip() if file_path else None,
|
|
947
|
+
}, None
|
|
948
|
+
|
|
949
|
+
|
|
950
|
+
def _collect_descendants(hierarchy: Dict[str, Any], node_id: str) -> List[str]:
|
|
951
|
+
"""
|
|
952
|
+
Recursively collect all descendant node IDs for a given node.
|
|
953
|
+
|
|
954
|
+
Args:
|
|
955
|
+
hierarchy: The spec hierarchy dict
|
|
956
|
+
node_id: Starting node ID
|
|
957
|
+
|
|
958
|
+
Returns:
|
|
959
|
+
List of all descendant node IDs (not including the starting node)
|
|
960
|
+
"""
|
|
961
|
+
descendants = []
|
|
962
|
+
node = hierarchy.get(node_id)
|
|
963
|
+
if not node:
|
|
964
|
+
return descendants
|
|
965
|
+
|
|
966
|
+
children = node.get("children", [])
|
|
967
|
+
if not isinstance(children, list):
|
|
968
|
+
return descendants
|
|
969
|
+
|
|
970
|
+
for child_id in children:
|
|
971
|
+
descendants.append(child_id)
|
|
972
|
+
descendants.extend(_collect_descendants(hierarchy, child_id))
|
|
973
|
+
|
|
974
|
+
return descendants
|
|
975
|
+
|
|
976
|
+
|
|
977
|
+
def _count_tasks_in_subtree(hierarchy: Dict[str, Any], node_ids: List[str]) -> Tuple[int, int]:
|
|
978
|
+
"""
|
|
979
|
+
Count total and completed tasks in a list of nodes.
|
|
980
|
+
|
|
981
|
+
Args:
|
|
982
|
+
hierarchy: The spec hierarchy dict
|
|
983
|
+
node_ids: List of node IDs to count
|
|
984
|
+
|
|
985
|
+
Returns:
|
|
986
|
+
Tuple of (total_count, completed_count)
|
|
987
|
+
"""
|
|
988
|
+
total = 0
|
|
989
|
+
completed = 0
|
|
990
|
+
|
|
991
|
+
for node_id in node_ids:
|
|
992
|
+
node = hierarchy.get(node_id)
|
|
993
|
+
if not node:
|
|
994
|
+
continue
|
|
995
|
+
node_type = node.get("type")
|
|
996
|
+
if node_type in ("task", "subtask", "verify"):
|
|
997
|
+
total += 1
|
|
998
|
+
if node.get("status") == "completed":
|
|
999
|
+
completed += 1
|
|
1000
|
+
|
|
1001
|
+
return total, completed
|
|
1002
|
+
|
|
1003
|
+
|
|
1004
|
+
def _decrement_ancestor_counts(
|
|
1005
|
+
hierarchy: Dict[str, Any],
|
|
1006
|
+
node_id: str,
|
|
1007
|
+
total_delta: int,
|
|
1008
|
+
completed_delta: int,
|
|
1009
|
+
) -> None:
|
|
1010
|
+
"""
|
|
1011
|
+
Walk up the hierarchy and decrement task counts for all ancestors.
|
|
1012
|
+
|
|
1013
|
+
Args:
|
|
1014
|
+
hierarchy: The spec hierarchy dict
|
|
1015
|
+
node_id: Starting node ID (the parent of the removed node)
|
|
1016
|
+
total_delta: Amount to subtract from total_tasks
|
|
1017
|
+
completed_delta: Amount to subtract from completed_tasks
|
|
1018
|
+
"""
|
|
1019
|
+
current_id = node_id
|
|
1020
|
+
visited = set()
|
|
1021
|
+
|
|
1022
|
+
while current_id:
|
|
1023
|
+
if current_id in visited:
|
|
1024
|
+
break
|
|
1025
|
+
visited.add(current_id)
|
|
1026
|
+
|
|
1027
|
+
node = hierarchy.get(current_id)
|
|
1028
|
+
if not node:
|
|
1029
|
+
break
|
|
1030
|
+
|
|
1031
|
+
# Decrement counts
|
|
1032
|
+
current_total = node.get("total_tasks", 0)
|
|
1033
|
+
current_completed = node.get("completed_tasks", 0)
|
|
1034
|
+
node["total_tasks"] = max(0, current_total - total_delta)
|
|
1035
|
+
node["completed_tasks"] = max(0, current_completed - completed_delta)
|
|
1036
|
+
|
|
1037
|
+
# Move to parent
|
|
1038
|
+
current_id = node.get("parent")
|
|
1039
|
+
|
|
1040
|
+
|
|
1041
|
+
def _remove_dependency_references(hierarchy: Dict[str, Any], removed_ids: List[str]) -> None:
|
|
1042
|
+
"""
|
|
1043
|
+
Remove references to deleted nodes from all dependency lists.
|
|
1044
|
+
|
|
1045
|
+
Args:
|
|
1046
|
+
hierarchy: The spec hierarchy dict
|
|
1047
|
+
removed_ids: List of node IDs being removed
|
|
1048
|
+
"""
|
|
1049
|
+
removed_set = set(removed_ids)
|
|
1050
|
+
|
|
1051
|
+
for node_id, node in hierarchy.items():
|
|
1052
|
+
deps = node.get("dependencies")
|
|
1053
|
+
if not deps or not isinstance(deps, dict):
|
|
1054
|
+
continue
|
|
1055
|
+
|
|
1056
|
+
for key in ("blocks", "blocked_by", "depends"):
|
|
1057
|
+
dep_list = deps.get(key)
|
|
1058
|
+
if isinstance(dep_list, list):
|
|
1059
|
+
deps[key] = [d for d in dep_list if d not in removed_set]
|
|
1060
|
+
|
|
1061
|
+
|
|
1062
|
+
def remove_task(
|
|
1063
|
+
spec_id: str,
|
|
1064
|
+
task_id: str,
|
|
1065
|
+
cascade: bool = False,
|
|
1066
|
+
specs_dir: Optional[Path] = None,
|
|
1067
|
+
) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
|
|
1068
|
+
"""
|
|
1069
|
+
Remove a task from a specification's hierarchy.
|
|
1070
|
+
|
|
1071
|
+
Removes the specified task and optionally all its descendants.
|
|
1072
|
+
Updates ancestor task counts and cleans up dependency references.
|
|
1073
|
+
|
|
1074
|
+
Args:
|
|
1075
|
+
spec_id: Specification ID containing the task.
|
|
1076
|
+
task_id: Task ID to remove.
|
|
1077
|
+
cascade: If True, also remove all child tasks recursively.
|
|
1078
|
+
If False and task has children, returns an error.
|
|
1079
|
+
specs_dir: Path to specs directory (auto-detected if not provided).
|
|
1080
|
+
|
|
1081
|
+
Returns:
|
|
1082
|
+
Tuple of (result_dict, error_message).
|
|
1083
|
+
On success: ({"task_id": ..., "children_removed": ..., ...}, None)
|
|
1084
|
+
On failure: (None, "error message")
|
|
1085
|
+
"""
|
|
1086
|
+
# Find specs directory
|
|
1087
|
+
if specs_dir is None:
|
|
1088
|
+
specs_dir = find_specs_directory()
|
|
1089
|
+
|
|
1090
|
+
if specs_dir is None:
|
|
1091
|
+
return None, "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR."
|
|
1092
|
+
|
|
1093
|
+
# Find and load the spec
|
|
1094
|
+
spec_path = find_spec_file(spec_id, specs_dir)
|
|
1095
|
+
if spec_path is None:
|
|
1096
|
+
return None, f"Specification '{spec_id}' not found"
|
|
1097
|
+
|
|
1098
|
+
spec_data = load_spec(spec_id, specs_dir)
|
|
1099
|
+
if spec_data is None:
|
|
1100
|
+
return None, f"Failed to load specification '{spec_id}'"
|
|
1101
|
+
|
|
1102
|
+
hierarchy = spec_data.get("hierarchy", {})
|
|
1103
|
+
|
|
1104
|
+
# Validate task exists
|
|
1105
|
+
task = hierarchy.get(task_id)
|
|
1106
|
+
if task is None:
|
|
1107
|
+
return None, f"Task '{task_id}' not found"
|
|
1108
|
+
|
|
1109
|
+
# Validate task type (can only remove task, subtask, verify)
|
|
1110
|
+
task_type = task.get("type")
|
|
1111
|
+
if task_type not in ("task", "subtask", "verify"):
|
|
1112
|
+
hint = " Use `authoring action=\"phase-remove\"` instead." if task_type == "phase" else ""
|
|
1113
|
+
return None, f"Cannot remove node type '{task_type}'. Only task, subtask, or verify nodes can be removed.{hint}"
|
|
1114
|
+
|
|
1115
|
+
# Check for children
|
|
1116
|
+
children = task.get("children", [])
|
|
1117
|
+
if isinstance(children, list) and len(children) > 0 and not cascade:
|
|
1118
|
+
return None, f"Task '{task_id}' has {len(children)} children. Use cascade=True to remove them."
|
|
1119
|
+
|
|
1120
|
+
# Collect all nodes to remove
|
|
1121
|
+
nodes_to_remove = [task_id]
|
|
1122
|
+
if cascade:
|
|
1123
|
+
nodes_to_remove.extend(_collect_descendants(hierarchy, task_id))
|
|
1124
|
+
|
|
1125
|
+
# Count tasks being removed (including the target node itself)
|
|
1126
|
+
total_removed, completed_removed = _count_tasks_in_subtree(hierarchy, nodes_to_remove)
|
|
1127
|
+
# The target node itself
|
|
1128
|
+
if task_type in ("task", "subtask", "verify"):
|
|
1129
|
+
total_removed += 1
|
|
1130
|
+
if task.get("status") == "completed":
|
|
1131
|
+
completed_removed += 1
|
|
1132
|
+
|
|
1133
|
+
# Get parent before removing
|
|
1134
|
+
parent_id = task.get("parent")
|
|
1135
|
+
|
|
1136
|
+
# Remove nodes from hierarchy
|
|
1137
|
+
for node_id in nodes_to_remove:
|
|
1138
|
+
if node_id in hierarchy:
|
|
1139
|
+
del hierarchy[node_id]
|
|
1140
|
+
|
|
1141
|
+
# Update parent's children list
|
|
1142
|
+
if parent_id:
|
|
1143
|
+
parent = hierarchy.get(parent_id)
|
|
1144
|
+
if parent:
|
|
1145
|
+
parent_children = parent.get("children", [])
|
|
1146
|
+
if isinstance(parent_children, list) and task_id in parent_children:
|
|
1147
|
+
parent_children.remove(task_id)
|
|
1148
|
+
parent["children"] = parent_children
|
|
1149
|
+
|
|
1150
|
+
# Update ancestor task counts
|
|
1151
|
+
_decrement_ancestor_counts(hierarchy, parent_id, total_removed, completed_removed)
|
|
1152
|
+
|
|
1153
|
+
# Clean up dependency references
|
|
1154
|
+
_remove_dependency_references(hierarchy, nodes_to_remove)
|
|
1155
|
+
|
|
1156
|
+
# Save the spec
|
|
1157
|
+
success = save_spec(spec_id, spec_data, specs_dir)
|
|
1158
|
+
if not success:
|
|
1159
|
+
return None, "Failed to save specification"
|
|
1160
|
+
|
|
1161
|
+
return {
|
|
1162
|
+
"task_id": task_id,
|
|
1163
|
+
"spec_id": spec_id,
|
|
1164
|
+
"cascade": cascade,
|
|
1165
|
+
"children_removed": len(nodes_to_remove) - 1, # Exclude the target itself
|
|
1166
|
+
"total_tasks_removed": total_removed,
|
|
1167
|
+
}, None
|
|
1168
|
+
|
|
1169
|
+
|
|
1170
|
+
# Valid complexity levels for update_estimate
|
|
1171
|
+
COMPLEXITY_LEVELS = ("low", "medium", "high")
|
|
1172
|
+
|
|
1173
|
+
|
|
1174
|
+
def update_estimate(
|
|
1175
|
+
spec_id: str,
|
|
1176
|
+
task_id: str,
|
|
1177
|
+
estimated_hours: Optional[float] = None,
|
|
1178
|
+
complexity: Optional[str] = None,
|
|
1179
|
+
specs_dir: Optional[Path] = None,
|
|
1180
|
+
) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
|
|
1181
|
+
"""
|
|
1182
|
+
Update effort/time estimates for a task.
|
|
1183
|
+
|
|
1184
|
+
Updates the estimated_hours and/or complexity metadata for a task.
|
|
1185
|
+
At least one of estimated_hours or complexity must be provided.
|
|
1186
|
+
|
|
1187
|
+
Args:
|
|
1188
|
+
spec_id: Specification ID containing the task.
|
|
1189
|
+
task_id: Task ID to update.
|
|
1190
|
+
estimated_hours: Optional estimated hours (float, must be >= 0).
|
|
1191
|
+
complexity: Optional complexity level (low, medium, high).
|
|
1192
|
+
specs_dir: Path to specs directory (auto-detected if not provided).
|
|
1193
|
+
|
|
1194
|
+
Returns:
|
|
1195
|
+
Tuple of (result_dict, error_message).
|
|
1196
|
+
On success: ({"task_id": ..., "hours": ..., "complexity": ..., ...}, None)
|
|
1197
|
+
On failure: (None, "error message")
|
|
1198
|
+
"""
|
|
1199
|
+
# Validate at least one field is provided
|
|
1200
|
+
if estimated_hours is None and complexity is None:
|
|
1201
|
+
return None, "At least one of estimated_hours or complexity must be provided"
|
|
1202
|
+
|
|
1203
|
+
# Validate estimated_hours
|
|
1204
|
+
if estimated_hours is not None:
|
|
1205
|
+
if not isinstance(estimated_hours, (int, float)):
|
|
1206
|
+
return None, "estimated_hours must be a number"
|
|
1207
|
+
if estimated_hours < 0:
|
|
1208
|
+
return None, "estimated_hours must be >= 0"
|
|
1209
|
+
|
|
1210
|
+
# Validate complexity
|
|
1211
|
+
if complexity is not None:
|
|
1212
|
+
complexity = complexity.lower().strip()
|
|
1213
|
+
if complexity not in COMPLEXITY_LEVELS:
|
|
1214
|
+
return None, f"Invalid complexity '{complexity}'. Must be one of: {', '.join(COMPLEXITY_LEVELS)}"
|
|
1215
|
+
|
|
1216
|
+
# Find specs directory
|
|
1217
|
+
if specs_dir is None:
|
|
1218
|
+
specs_dir = find_specs_directory()
|
|
1219
|
+
|
|
1220
|
+
if specs_dir is None:
|
|
1221
|
+
return None, "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR."
|
|
1222
|
+
|
|
1223
|
+
# Find and load the spec
|
|
1224
|
+
spec_path = find_spec_file(spec_id, specs_dir)
|
|
1225
|
+
if spec_path is None:
|
|
1226
|
+
return None, f"Specification '{spec_id}' not found"
|
|
1227
|
+
|
|
1228
|
+
spec_data = load_spec(spec_id, specs_dir)
|
|
1229
|
+
if spec_data is None:
|
|
1230
|
+
return None, f"Failed to load specification '{spec_id}'"
|
|
1231
|
+
|
|
1232
|
+
hierarchy = spec_data.get("hierarchy", {})
|
|
1233
|
+
|
|
1234
|
+
# Validate task exists
|
|
1235
|
+
task = hierarchy.get(task_id)
|
|
1236
|
+
if task is None:
|
|
1237
|
+
return None, f"Task '{task_id}' not found"
|
|
1238
|
+
|
|
1239
|
+
# Validate task type (can only update task, subtask, verify)
|
|
1240
|
+
task_type = task.get("type")
|
|
1241
|
+
if task_type not in ("task", "subtask", "verify"):
|
|
1242
|
+
return None, f"Cannot update estimates for node type '{task_type}'. Only task, subtask, or verify nodes can be updated."
|
|
1243
|
+
|
|
1244
|
+
# Get or create metadata
|
|
1245
|
+
metadata = task.get("metadata")
|
|
1246
|
+
if metadata is None:
|
|
1247
|
+
metadata = {}
|
|
1248
|
+
task["metadata"] = metadata
|
|
1249
|
+
|
|
1250
|
+
# Track previous values for response
|
|
1251
|
+
previous_hours = metadata.get("estimated_hours")
|
|
1252
|
+
previous_complexity = metadata.get("complexity")
|
|
1253
|
+
|
|
1254
|
+
# Update fields
|
|
1255
|
+
if estimated_hours is not None:
|
|
1256
|
+
metadata["estimated_hours"] = float(estimated_hours)
|
|
1257
|
+
|
|
1258
|
+
if complexity is not None:
|
|
1259
|
+
metadata["complexity"] = complexity
|
|
1260
|
+
|
|
1261
|
+
# Save the spec
|
|
1262
|
+
success = save_spec(spec_id, spec_data, specs_dir)
|
|
1263
|
+
if not success:
|
|
1264
|
+
return None, "Failed to save specification"
|
|
1265
|
+
|
|
1266
|
+
result: Dict[str, Any] = {
|
|
1267
|
+
"spec_id": spec_id,
|
|
1268
|
+
"task_id": task_id,
|
|
1269
|
+
}
|
|
1270
|
+
|
|
1271
|
+
if estimated_hours is not None:
|
|
1272
|
+
result["hours"] = float(estimated_hours)
|
|
1273
|
+
result["previous_hours"] = previous_hours
|
|
1274
|
+
|
|
1275
|
+
if complexity is not None:
|
|
1276
|
+
result["complexity"] = complexity
|
|
1277
|
+
result["previous_complexity"] = previous_complexity
|
|
1278
|
+
|
|
1279
|
+
return result, None
|
|
1280
|
+
|
|
1281
|
+
|
|
1282
|
+
# Valid verification types for update_task_metadata
|
|
1283
|
+
VERIFICATION_TYPES = ("run-tests", "fidelity", "manual")
|
|
1284
|
+
|
|
1285
|
+
# Valid task categories
|
|
1286
|
+
TASK_CATEGORIES = CATEGORIES
|
|
1287
|
+
|
|
1288
|
+
# Valid dependency types for manage_task_dependency
|
|
1289
|
+
DEPENDENCY_TYPES = ("blocks", "blocked_by", "depends")
|
|
1290
|
+
|
|
1291
|
+
# Valid requirement types for update_task_requirements
|
|
1292
|
+
REQUIREMENT_TYPES = ("acceptance", "technical", "constraint")
|
|
1293
|
+
|
|
1294
|
+
# Maximum number of requirements per task (to prevent unbounded growth)
|
|
1295
|
+
MAX_REQUIREMENTS_PER_TASK = 50
|
|
1296
|
+
|
|
1297
|
+
|
|
1298
|
+
def _would_create_circular_dependency(
|
|
1299
|
+
hierarchy: Dict[str, Any],
|
|
1300
|
+
source_id: str,
|
|
1301
|
+
target_id: str,
|
|
1302
|
+
dep_type: str,
|
|
1303
|
+
) -> bool:
|
|
1304
|
+
"""
|
|
1305
|
+
Check if adding a dependency would create a circular reference.
|
|
1306
|
+
|
|
1307
|
+
For blocking dependencies:
|
|
1308
|
+
- Adding A blocks B means B is blocked_by A
|
|
1309
|
+
- Circular if B already blocks A (directly or transitively)
|
|
1310
|
+
|
|
1311
|
+
Uses breadth-first search to detect cycles in the dependency graph.
|
|
1312
|
+
|
|
1313
|
+
Args:
|
|
1314
|
+
hierarchy: The spec hierarchy dict
|
|
1315
|
+
source_id: Source task ID
|
|
1316
|
+
target_id: Target task ID
|
|
1317
|
+
dep_type: Type of dependency being added
|
|
1318
|
+
|
|
1319
|
+
Returns:
|
|
1320
|
+
True if adding this dependency would create a cycle
|
|
1321
|
+
"""
|
|
1322
|
+
if source_id == target_id:
|
|
1323
|
+
return True
|
|
1324
|
+
|
|
1325
|
+
# For "blocks": source blocks target, so target cannot already block source
|
|
1326
|
+
# For "blocked_by": source is blocked_by target, so source cannot already block target
|
|
1327
|
+
# For "depends": soft dependency, check for cycles in depends chain
|
|
1328
|
+
|
|
1329
|
+
if dep_type == "blocks":
|
|
1330
|
+
# If source blocks target, check if target already blocks source (transitively)
|
|
1331
|
+
# i.e., walk from target's "blocks" chain to see if we reach source
|
|
1332
|
+
return _can_reach_via_dependency(hierarchy, target_id, source_id, "blocks")
|
|
1333
|
+
elif dep_type == "blocked_by":
|
|
1334
|
+
# If source is blocked_by target, check if source already blocks target (transitively)
|
|
1335
|
+
return _can_reach_via_dependency(hierarchy, source_id, target_id, "blocks")
|
|
1336
|
+
elif dep_type == "depends":
|
|
1337
|
+
# Check for cycles in depends chain
|
|
1338
|
+
return _can_reach_via_dependency(hierarchy, target_id, source_id, "depends")
|
|
1339
|
+
|
|
1340
|
+
return False
|
|
1341
|
+
|
|
1342
|
+
|
|
1343
|
+
def _can_reach_via_dependency(
|
|
1344
|
+
hierarchy: Dict[str, Any],
|
|
1345
|
+
start_id: str,
|
|
1346
|
+
target_id: str,
|
|
1347
|
+
dep_key: str,
|
|
1348
|
+
) -> bool:
|
|
1349
|
+
"""
|
|
1350
|
+
Check if target_id can be reached from start_id via dependency chains.
|
|
1351
|
+
|
|
1352
|
+
Uses BFS to traverse the dependency graph.
|
|
1353
|
+
|
|
1354
|
+
Args:
|
|
1355
|
+
hierarchy: The spec hierarchy dict
|
|
1356
|
+
start_id: Starting node ID
|
|
1357
|
+
target_id: Target node ID to find
|
|
1358
|
+
dep_key: Which dependency list to follow ("blocks", "blocked_by", "depends")
|
|
1359
|
+
|
|
1360
|
+
Returns:
|
|
1361
|
+
True if target_id is reachable from start_id
|
|
1362
|
+
"""
|
|
1363
|
+
visited = set()
|
|
1364
|
+
queue = [start_id]
|
|
1365
|
+
|
|
1366
|
+
while queue:
|
|
1367
|
+
current_id = queue.pop(0)
|
|
1368
|
+
if current_id in visited:
|
|
1369
|
+
continue
|
|
1370
|
+
visited.add(current_id)
|
|
1371
|
+
|
|
1372
|
+
if current_id == target_id:
|
|
1373
|
+
return True
|
|
1374
|
+
|
|
1375
|
+
node = hierarchy.get(current_id)
|
|
1376
|
+
if not node:
|
|
1377
|
+
continue
|
|
1378
|
+
|
|
1379
|
+
deps = node.get("dependencies", {})
|
|
1380
|
+
next_ids = deps.get(dep_key, [])
|
|
1381
|
+
if isinstance(next_ids, list):
|
|
1382
|
+
for next_id in next_ids:
|
|
1383
|
+
if next_id not in visited:
|
|
1384
|
+
queue.append(next_id)
|
|
1385
|
+
|
|
1386
|
+
return False
|
|
1387
|
+
|
|
1388
|
+
|
|
1389
|
+
def manage_task_dependency(
|
|
1390
|
+
spec_id: str,
|
|
1391
|
+
source_task_id: str,
|
|
1392
|
+
target_task_id: str,
|
|
1393
|
+
dependency_type: str,
|
|
1394
|
+
action: str = "add",
|
|
1395
|
+
dry_run: bool = False,
|
|
1396
|
+
specs_dir: Optional[Path] = None,
|
|
1397
|
+
) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
|
|
1398
|
+
"""
|
|
1399
|
+
Add or remove a dependency relationship between two tasks.
|
|
1400
|
+
|
|
1401
|
+
Manages blocks, blocked_by, and depends relationships between tasks.
|
|
1402
|
+
Updates both source and target tasks atomically.
|
|
1403
|
+
|
|
1404
|
+
Dependency types:
|
|
1405
|
+
- blocks: Source task blocks target (target cannot start until source completes)
|
|
1406
|
+
- blocked_by: Source task is blocked by target (source cannot start until target completes)
|
|
1407
|
+
- depends: Soft dependency (informational, doesn't block)
|
|
1408
|
+
|
|
1409
|
+
When adding:
|
|
1410
|
+
- blocks: Adds target to source.blocks AND source to target.blocked_by
|
|
1411
|
+
- blocked_by: Adds target to source.blocked_by AND source to target.blocks
|
|
1412
|
+
- depends: Only adds target to source.depends (soft, no reciprocal)
|
|
1413
|
+
|
|
1414
|
+
Args:
|
|
1415
|
+
spec_id: Specification ID containing the tasks.
|
|
1416
|
+
source_task_id: Source task ID.
|
|
1417
|
+
target_task_id: Target task ID.
|
|
1418
|
+
dependency_type: Type of dependency (blocks, blocked_by, depends).
|
|
1419
|
+
action: Action to perform (add or remove). Default: add.
|
|
1420
|
+
dry_run: If True, validate and return preview without saving changes.
|
|
1421
|
+
specs_dir: Path to specs directory (auto-detected if not provided).
|
|
1422
|
+
|
|
1423
|
+
Returns:
|
|
1424
|
+
Tuple of (result_dict, error_message).
|
|
1425
|
+
On success: ({"source_task": ..., "target_task": ..., "dependency_type": ..., ...}, None)
|
|
1426
|
+
On failure: (None, "error message")
|
|
1427
|
+
"""
|
|
1428
|
+
# Validate action
|
|
1429
|
+
if action not in ("add", "remove"):
|
|
1430
|
+
return None, f"Invalid action '{action}'. Must be 'add' or 'remove'"
|
|
1431
|
+
|
|
1432
|
+
# Validate dependency_type
|
|
1433
|
+
if dependency_type not in DEPENDENCY_TYPES:
|
|
1434
|
+
return None, f"Invalid dependency_type '{dependency_type}'. Must be one of: {', '.join(DEPENDENCY_TYPES)}"
|
|
1435
|
+
|
|
1436
|
+
# Prevent self-reference
|
|
1437
|
+
if source_task_id == target_task_id:
|
|
1438
|
+
return None, f"Cannot add dependency: task '{source_task_id}' cannot depend on itself"
|
|
1439
|
+
|
|
1440
|
+
# Find specs directory
|
|
1441
|
+
if specs_dir is None:
|
|
1442
|
+
specs_dir = find_specs_directory()
|
|
1443
|
+
|
|
1444
|
+
if specs_dir is None:
|
|
1445
|
+
return None, "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR."
|
|
1446
|
+
|
|
1447
|
+
# Find and load the spec
|
|
1448
|
+
spec_path = find_spec_file(spec_id, specs_dir)
|
|
1449
|
+
if spec_path is None:
|
|
1450
|
+
return None, f"Specification '{spec_id}' not found"
|
|
1451
|
+
|
|
1452
|
+
spec_data = load_spec(spec_id, specs_dir)
|
|
1453
|
+
if spec_data is None:
|
|
1454
|
+
return None, f"Failed to load specification '{spec_id}'"
|
|
1455
|
+
|
|
1456
|
+
hierarchy = spec_data.get("hierarchy", {})
|
|
1457
|
+
|
|
1458
|
+
# Validate source task exists
|
|
1459
|
+
source_task = hierarchy.get(source_task_id)
|
|
1460
|
+
if source_task is None:
|
|
1461
|
+
return None, f"Source task '{source_task_id}' not found"
|
|
1462
|
+
|
|
1463
|
+
# Validate source task type
|
|
1464
|
+
source_type = source_task.get("type")
|
|
1465
|
+
if source_type not in ("task", "subtask", "verify", "phase"):
|
|
1466
|
+
return None, f"Cannot manage dependencies for node type '{source_type}'"
|
|
1467
|
+
|
|
1468
|
+
# Validate target task exists
|
|
1469
|
+
target_task = hierarchy.get(target_task_id)
|
|
1470
|
+
if target_task is None:
|
|
1471
|
+
return None, f"Target task '{target_task_id}' not found"
|
|
1472
|
+
|
|
1473
|
+
# Validate target task type
|
|
1474
|
+
target_type = target_task.get("type")
|
|
1475
|
+
if target_type not in ("task", "subtask", "verify", "phase"):
|
|
1476
|
+
return None, f"Cannot add dependency to node type '{target_type}'"
|
|
1477
|
+
|
|
1478
|
+
# Get or create dependencies for both tasks
|
|
1479
|
+
source_deps = source_task.get("dependencies")
|
|
1480
|
+
if source_deps is None:
|
|
1481
|
+
source_deps = {"blocks": [], "blocked_by": [], "depends": []}
|
|
1482
|
+
source_task["dependencies"] = source_deps
|
|
1483
|
+
|
|
1484
|
+
target_deps = target_task.get("dependencies")
|
|
1485
|
+
if target_deps is None:
|
|
1486
|
+
target_deps = {"blocks": [], "blocked_by": [], "depends": []}
|
|
1487
|
+
target_task["dependencies"] = target_deps
|
|
1488
|
+
|
|
1489
|
+
# Ensure lists exist
|
|
1490
|
+
for key in DEPENDENCY_TYPES:
|
|
1491
|
+
if not isinstance(source_deps.get(key), list):
|
|
1492
|
+
source_deps[key] = []
|
|
1493
|
+
if not isinstance(target_deps.get(key), list):
|
|
1494
|
+
target_deps[key] = []
|
|
1495
|
+
|
|
1496
|
+
# Determine the reciprocal relationship
|
|
1497
|
+
reciprocal_type = None
|
|
1498
|
+
if dependency_type == "blocks":
|
|
1499
|
+
reciprocal_type = "blocked_by"
|
|
1500
|
+
elif dependency_type == "blocked_by":
|
|
1501
|
+
reciprocal_type = "blocks"
|
|
1502
|
+
# depends has no reciprocal
|
|
1503
|
+
|
|
1504
|
+
if action == "add":
|
|
1505
|
+
# Check for circular dependencies
|
|
1506
|
+
if _would_create_circular_dependency(hierarchy, source_task_id, target_task_id, dependency_type):
|
|
1507
|
+
return None, f"Cannot add dependency: would create circular reference between '{source_task_id}' and '{target_task_id}'"
|
|
1508
|
+
|
|
1509
|
+
# Check if dependency already exists
|
|
1510
|
+
if target_task_id in source_deps[dependency_type]:
|
|
1511
|
+
return None, f"Dependency already exists: {source_task_id} {dependency_type} {target_task_id}"
|
|
1512
|
+
|
|
1513
|
+
# Add the dependency
|
|
1514
|
+
source_deps[dependency_type].append(target_task_id)
|
|
1515
|
+
|
|
1516
|
+
# Add reciprocal if applicable (blocks <-> blocked_by)
|
|
1517
|
+
if reciprocal_type:
|
|
1518
|
+
if source_task_id not in target_deps[reciprocal_type]:
|
|
1519
|
+
target_deps[reciprocal_type].append(source_task_id)
|
|
1520
|
+
|
|
1521
|
+
elif action == "remove":
|
|
1522
|
+
# Check if dependency exists
|
|
1523
|
+
if target_task_id not in source_deps[dependency_type]:
|
|
1524
|
+
return None, f"Dependency does not exist: {source_task_id} {dependency_type} {target_task_id}"
|
|
1525
|
+
|
|
1526
|
+
# Remove the dependency
|
|
1527
|
+
source_deps[dependency_type].remove(target_task_id)
|
|
1528
|
+
|
|
1529
|
+
# Remove reciprocal if applicable
|
|
1530
|
+
if reciprocal_type and source_task_id in target_deps[reciprocal_type]:
|
|
1531
|
+
target_deps[reciprocal_type].remove(source_task_id)
|
|
1532
|
+
|
|
1533
|
+
# Build result
|
|
1534
|
+
result = {
|
|
1535
|
+
"spec_id": spec_id,
|
|
1536
|
+
"source_task": source_task_id,
|
|
1537
|
+
"target_task": target_task_id,
|
|
1538
|
+
"dependency_type": dependency_type,
|
|
1539
|
+
"action": action,
|
|
1540
|
+
"dry_run": dry_run,
|
|
1541
|
+
"source_dependencies": {
|
|
1542
|
+
"blocks": source_deps["blocks"],
|
|
1543
|
+
"blocked_by": source_deps["blocked_by"],
|
|
1544
|
+
"depends": source_deps["depends"],
|
|
1545
|
+
},
|
|
1546
|
+
"target_dependencies": {
|
|
1547
|
+
"blocks": target_deps["blocks"],
|
|
1548
|
+
"blocked_by": target_deps["blocked_by"],
|
|
1549
|
+
"depends": target_deps["depends"],
|
|
1550
|
+
},
|
|
1551
|
+
}
|
|
1552
|
+
|
|
1553
|
+
# Save the spec (unless dry_run)
|
|
1554
|
+
if dry_run:
|
|
1555
|
+
result["message"] = "Dry run - changes not saved"
|
|
1556
|
+
else:
|
|
1557
|
+
success = save_spec(spec_id, spec_data, specs_dir)
|
|
1558
|
+
if not success:
|
|
1559
|
+
return None, "Failed to save specification"
|
|
1560
|
+
|
|
1561
|
+
return result, None
|
|
1562
|
+
|
|
1563
|
+
|
|
1564
|
+
def _is_descendant(hierarchy: Dict[str, Any], ancestor_id: str, potential_descendant_id: str) -> bool:
|
|
1565
|
+
"""
|
|
1566
|
+
Check if a node is a descendant of another node.
|
|
1567
|
+
|
|
1568
|
+
Used to prevent circular references when moving tasks.
|
|
1569
|
+
|
|
1570
|
+
Args:
|
|
1571
|
+
hierarchy: The spec hierarchy dict
|
|
1572
|
+
ancestor_id: The potential ancestor node ID
|
|
1573
|
+
potential_descendant_id: The node to check if it's a descendant
|
|
1574
|
+
|
|
1575
|
+
Returns:
|
|
1576
|
+
True if potential_descendant_id is a descendant of ancestor_id
|
|
1577
|
+
"""
|
|
1578
|
+
if ancestor_id == potential_descendant_id:
|
|
1579
|
+
return True
|
|
1580
|
+
|
|
1581
|
+
descendants = _collect_descendants(hierarchy, ancestor_id)
|
|
1582
|
+
return potential_descendant_id in descendants
|
|
1583
|
+
|
|
1584
|
+
|
|
1585
|
+
def _get_phase_for_node(hierarchy: Dict[str, Any], node_id: str) -> Optional[str]:
|
|
1586
|
+
"""
|
|
1587
|
+
Walk up the hierarchy to find the phase containing a node.
|
|
1588
|
+
|
|
1589
|
+
Args:
|
|
1590
|
+
hierarchy: The spec hierarchy dict
|
|
1591
|
+
node_id: The node to find the phase for
|
|
1592
|
+
|
|
1593
|
+
Returns:
|
|
1594
|
+
Phase ID if found, None otherwise
|
|
1595
|
+
"""
|
|
1596
|
+
current_id = node_id
|
|
1597
|
+
visited = set()
|
|
1598
|
+
|
|
1599
|
+
while current_id:
|
|
1600
|
+
if current_id in visited:
|
|
1601
|
+
break
|
|
1602
|
+
visited.add(current_id)
|
|
1603
|
+
|
|
1604
|
+
node = hierarchy.get(current_id)
|
|
1605
|
+
if not node:
|
|
1606
|
+
break
|
|
1607
|
+
|
|
1608
|
+
if node.get("type") == "phase":
|
|
1609
|
+
return current_id
|
|
1610
|
+
|
|
1611
|
+
current_id = node.get("parent")
|
|
1612
|
+
|
|
1613
|
+
return None
|
|
1614
|
+
|
|
1615
|
+
|
|
1616
|
+
def _check_cross_phase_dependencies(
|
|
1617
|
+
hierarchy: Dict[str, Any],
|
|
1618
|
+
task_id: str,
|
|
1619
|
+
old_phase_id: Optional[str],
|
|
1620
|
+
new_phase_id: Optional[str],
|
|
1621
|
+
) -> List[str]:
|
|
1622
|
+
"""
|
|
1623
|
+
Check for potential dependency issues when moving across phases.
|
|
1624
|
+
|
|
1625
|
+
Args:
|
|
1626
|
+
hierarchy: The spec hierarchy dict
|
|
1627
|
+
task_id: The task being moved
|
|
1628
|
+
old_phase_id: The original phase ID
|
|
1629
|
+
new_phase_id: The target phase ID
|
|
1630
|
+
|
|
1631
|
+
Returns:
|
|
1632
|
+
List of warning messages about potential dependency issues
|
|
1633
|
+
"""
|
|
1634
|
+
warnings = []
|
|
1635
|
+
|
|
1636
|
+
if old_phase_id == new_phase_id:
|
|
1637
|
+
return warnings
|
|
1638
|
+
|
|
1639
|
+
task = hierarchy.get(task_id)
|
|
1640
|
+
if not task:
|
|
1641
|
+
return warnings
|
|
1642
|
+
|
|
1643
|
+
deps = task.get("dependencies", {})
|
|
1644
|
+
|
|
1645
|
+
# Check blocked_by dependencies
|
|
1646
|
+
blocked_by = deps.get("blocked_by", [])
|
|
1647
|
+
for dep_id in blocked_by:
|
|
1648
|
+
dep_phase = _get_phase_for_node(hierarchy, dep_id)
|
|
1649
|
+
if dep_phase and dep_phase != new_phase_id:
|
|
1650
|
+
dep_node = hierarchy.get(dep_id, {})
|
|
1651
|
+
warnings.append(
|
|
1652
|
+
f"Task '{task_id}' is blocked by '{dep_id}' ({dep_node.get('title', '')}) "
|
|
1653
|
+
f"which is in a different phase ('{dep_phase}')"
|
|
1654
|
+
)
|
|
1655
|
+
|
|
1656
|
+
# Check blocks dependencies
|
|
1657
|
+
blocks = deps.get("blocks", [])
|
|
1658
|
+
for dep_id in blocks:
|
|
1659
|
+
dep_phase = _get_phase_for_node(hierarchy, dep_id)
|
|
1660
|
+
if dep_phase and dep_phase != new_phase_id:
|
|
1661
|
+
dep_node = hierarchy.get(dep_id, {})
|
|
1662
|
+
warnings.append(
|
|
1663
|
+
f"Task '{task_id}' blocks '{dep_id}' ({dep_node.get('title', '')}) "
|
|
1664
|
+
f"which is in a different phase ('{dep_phase}')"
|
|
1665
|
+
)
|
|
1666
|
+
|
|
1667
|
+
return warnings
|
|
1668
|
+
|
|
1669
|
+
|
|
1670
|
+
def update_task_metadata(
|
|
1671
|
+
spec_id: str,
|
|
1672
|
+
task_id: str,
|
|
1673
|
+
title: Optional[str] = None,
|
|
1674
|
+
file_path: Optional[str] = None,
|
|
1675
|
+
description: Optional[str] = None,
|
|
1676
|
+
acceptance_criteria: Optional[List[str]] = None,
|
|
1677
|
+
task_category: Optional[str] = None,
|
|
1678
|
+
actual_hours: Optional[float] = None,
|
|
1679
|
+
status_note: Optional[str] = None,
|
|
1680
|
+
verification_type: Optional[str] = None,
|
|
1681
|
+
command: Optional[str] = None,
|
|
1682
|
+
custom_metadata: Optional[Dict[str, Any]] = None,
|
|
1683
|
+
dry_run: bool = False,
|
|
1684
|
+
specs_dir: Optional[Path] = None,
|
|
1685
|
+
) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
|
|
1686
|
+
"""
|
|
1687
|
+
Update arbitrary metadata fields on a task.
|
|
1688
|
+
|
|
1689
|
+
Updates various metadata fields on a task including title, file path, description,
|
|
1690
|
+
category, hours, notes, verification type, and custom fields.
|
|
1691
|
+
At least one field must be provided.
|
|
1692
|
+
|
|
1693
|
+
Args:
|
|
1694
|
+
spec_id: Specification ID containing the task.
|
|
1695
|
+
task_id: Task ID to update.
|
|
1696
|
+
title: Optional new title for the task (cannot be empty/whitespace-only).
|
|
1697
|
+
file_path: Optional file path associated with the task.
|
|
1698
|
+
description: Optional task description.
|
|
1699
|
+
acceptance_criteria: Optional acceptance criteria list.
|
|
1700
|
+
task_category: Optional task category (implementation, refactoring, investigation, decision, research).
|
|
1701
|
+
actual_hours: Optional actual hours spent on task (must be >= 0).
|
|
1702
|
+
status_note: Optional status note or completion note.
|
|
1703
|
+
verification_type: Optional verification type (run-tests, fidelity, manual).
|
|
1704
|
+
command: Optional command executed for the task.
|
|
1705
|
+
custom_metadata: Optional dict of custom metadata fields to merge.
|
|
1706
|
+
dry_run: If True, validate and return preview without saving changes.
|
|
1707
|
+
specs_dir: Path to specs directory (auto-detected if not provided).
|
|
1708
|
+
|
|
1709
|
+
Returns:
|
|
1710
|
+
Tuple of (result_dict, error_message).
|
|
1711
|
+
On success: ({"task_id": ..., "fields_updated": [...], "previous_values": {...}, ...}, None)
|
|
1712
|
+
On failure: (None, "error message")
|
|
1713
|
+
"""
|
|
1714
|
+
# Validate title if provided (cannot be empty/whitespace-only)
|
|
1715
|
+
title_update: Optional[str] = None
|
|
1716
|
+
if title is not None:
|
|
1717
|
+
title_stripped = title.strip()
|
|
1718
|
+
if not title_stripped:
|
|
1719
|
+
return None, "Title cannot be empty or whitespace-only"
|
|
1720
|
+
title_update = title_stripped
|
|
1721
|
+
|
|
1722
|
+
# Collect all provided metadata fields
|
|
1723
|
+
updates: Dict[str, Any] = {}
|
|
1724
|
+
if file_path is not None:
|
|
1725
|
+
updates["file_path"] = file_path.strip() if file_path else None
|
|
1726
|
+
if description is not None:
|
|
1727
|
+
updates["description"] = description.strip() if description else None
|
|
1728
|
+
if acceptance_criteria is not None:
|
|
1729
|
+
updates["acceptance_criteria"] = acceptance_criteria
|
|
1730
|
+
if task_category is not None:
|
|
1731
|
+
updates["task_category"] = task_category
|
|
1732
|
+
if actual_hours is not None:
|
|
1733
|
+
updates["actual_hours"] = actual_hours
|
|
1734
|
+
if status_note is not None:
|
|
1735
|
+
updates["status_note"] = status_note.strip() if status_note else None
|
|
1736
|
+
if verification_type is not None:
|
|
1737
|
+
updates["verification_type"] = verification_type
|
|
1738
|
+
if command is not None:
|
|
1739
|
+
updates["command"] = command.strip() if command else None
|
|
1740
|
+
|
|
1741
|
+
# Validate at least one field is provided (title or metadata fields)
|
|
1742
|
+
if title_update is None and not updates and not custom_metadata:
|
|
1743
|
+
return None, "At least one field must be provided (title or metadata fields)"
|
|
1744
|
+
|
|
1745
|
+
# Validate actual_hours
|
|
1746
|
+
if actual_hours is not None:
|
|
1747
|
+
if not isinstance(actual_hours, (int, float)):
|
|
1748
|
+
return None, "actual_hours must be a number"
|
|
1749
|
+
if actual_hours < 0:
|
|
1750
|
+
return None, "actual_hours must be >= 0"
|
|
1751
|
+
|
|
1752
|
+
if acceptance_criteria is not None:
|
|
1753
|
+
if not isinstance(acceptance_criteria, list):
|
|
1754
|
+
return None, "acceptance_criteria must be a list of strings"
|
|
1755
|
+
cleaned_criteria = []
|
|
1756
|
+
for item in acceptance_criteria:
|
|
1757
|
+
if not isinstance(item, str) or not item.strip():
|
|
1758
|
+
return None, "acceptance_criteria must be a list of non-empty strings"
|
|
1759
|
+
cleaned_criteria.append(item.strip())
|
|
1760
|
+
updates["acceptance_criteria"] = cleaned_criteria
|
|
1761
|
+
|
|
1762
|
+
# Validate task_category
|
|
1763
|
+
if task_category is not None:
|
|
1764
|
+
task_category_lower = task_category.lower().strip()
|
|
1765
|
+
if task_category_lower not in TASK_CATEGORIES:
|
|
1766
|
+
return None, f"Invalid task_category '{task_category}'. Must be one of: {', '.join(TASK_CATEGORIES)}"
|
|
1767
|
+
updates["task_category"] = task_category_lower
|
|
1768
|
+
|
|
1769
|
+
# Validate verification_type
|
|
1770
|
+
if verification_type is not None:
|
|
1771
|
+
verification_type_lower = verification_type.lower().strip()
|
|
1772
|
+
if verification_type_lower not in VERIFICATION_TYPES:
|
|
1773
|
+
return None, f"Invalid verification_type '{verification_type}'. Must be one of: {', '.join(VERIFICATION_TYPES)}"
|
|
1774
|
+
updates["verification_type"] = verification_type_lower
|
|
1775
|
+
|
|
1776
|
+
# Find specs directory
|
|
1777
|
+
if specs_dir is None:
|
|
1778
|
+
specs_dir = find_specs_directory()
|
|
1779
|
+
|
|
1780
|
+
if specs_dir is None:
|
|
1781
|
+
return None, "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR."
|
|
1782
|
+
|
|
1783
|
+
# Find and load the spec
|
|
1784
|
+
spec_path = find_spec_file(spec_id, specs_dir)
|
|
1785
|
+
if spec_path is None:
|
|
1786
|
+
return None, f"Specification '{spec_id}' not found"
|
|
1787
|
+
|
|
1788
|
+
spec_data = load_spec(spec_id, specs_dir)
|
|
1789
|
+
if spec_data is None:
|
|
1790
|
+
return None, f"Failed to load specification '{spec_id}'"
|
|
1791
|
+
|
|
1792
|
+
hierarchy = spec_data.get("hierarchy", {})
|
|
1793
|
+
|
|
1794
|
+
# Validate task exists
|
|
1795
|
+
task = hierarchy.get(task_id)
|
|
1796
|
+
if task is None:
|
|
1797
|
+
return None, f"Task '{task_id}' not found"
|
|
1798
|
+
|
|
1799
|
+
# Validate task type (can only update task, subtask, verify)
|
|
1800
|
+
task_type = task.get("type")
|
|
1801
|
+
if task_type not in ("task", "subtask", "verify"):
|
|
1802
|
+
return None, f"Cannot update metadata for node type '{task_type}'. Only task, subtask, or verify nodes can be updated."
|
|
1803
|
+
|
|
1804
|
+
# Get or create metadata
|
|
1805
|
+
metadata = task.get("metadata")
|
|
1806
|
+
if metadata is None:
|
|
1807
|
+
metadata = {}
|
|
1808
|
+
task["metadata"] = metadata
|
|
1809
|
+
|
|
1810
|
+
# Track which fields were updated and their previous values
|
|
1811
|
+
fields_updated = []
|
|
1812
|
+
previous_values: Dict[str, Any] = {}
|
|
1813
|
+
|
|
1814
|
+
# Apply title update (core field on task, not metadata)
|
|
1815
|
+
if title_update is not None:
|
|
1816
|
+
previous_values["title"] = task.get("title")
|
|
1817
|
+
task["title"] = title_update
|
|
1818
|
+
fields_updated.append("title")
|
|
1819
|
+
|
|
1820
|
+
# Apply metadata updates
|
|
1821
|
+
for key, value in updates.items():
|
|
1822
|
+
if value is not None or key in metadata:
|
|
1823
|
+
previous_values[key] = metadata.get(key)
|
|
1824
|
+
metadata[key] = value
|
|
1825
|
+
fields_updated.append(key)
|
|
1826
|
+
|
|
1827
|
+
# Apply custom metadata
|
|
1828
|
+
if custom_metadata and isinstance(custom_metadata, dict):
|
|
1829
|
+
for key, value in custom_metadata.items():
|
|
1830
|
+
# Don't allow overwriting core fields via custom_metadata
|
|
1831
|
+
if key not in ("type", "title", "status", "parent", "children", "dependencies"):
|
|
1832
|
+
if key not in previous_values:
|
|
1833
|
+
previous_values[key] = metadata.get(key)
|
|
1834
|
+
metadata[key] = value
|
|
1835
|
+
if key not in fields_updated:
|
|
1836
|
+
fields_updated.append(key)
|
|
1837
|
+
|
|
1838
|
+
# Build result
|
|
1839
|
+
result = {
|
|
1840
|
+
"spec_id": spec_id,
|
|
1841
|
+
"task_id": task_id,
|
|
1842
|
+
"fields_updated": fields_updated,
|
|
1843
|
+
"previous_values": previous_values,
|
|
1844
|
+
"dry_run": dry_run,
|
|
1845
|
+
}
|
|
1846
|
+
|
|
1847
|
+
# Save the spec (unless dry_run)
|
|
1848
|
+
if dry_run:
|
|
1849
|
+
result["message"] = "Dry run - changes not saved"
|
|
1850
|
+
else:
|
|
1851
|
+
success = save_spec(spec_id, spec_data, specs_dir)
|
|
1852
|
+
if not success:
|
|
1853
|
+
return None, "Failed to save specification"
|
|
1854
|
+
|
|
1855
|
+
return result, None
|
|
1856
|
+
|
|
1857
|
+
|
|
1858
|
+
def move_task(
|
|
1859
|
+
spec_id: str,
|
|
1860
|
+
task_id: str,
|
|
1861
|
+
new_parent: Optional[str] = None,
|
|
1862
|
+
position: Optional[int] = None,
|
|
1863
|
+
dry_run: bool = False,
|
|
1864
|
+
specs_dir: Optional[Path] = None,
|
|
1865
|
+
) -> Tuple[Optional[Dict[str, Any]], Optional[str], List[str]]:
|
|
1866
|
+
"""
|
|
1867
|
+
Move a task to a new position within its parent or to a different parent.
|
|
1868
|
+
|
|
1869
|
+
Supports two modes:
|
|
1870
|
+
1. Reorder within parent: only specify position (new_parent=None)
|
|
1871
|
+
2. Reparent to different phase/task: specify new_parent, optionally position
|
|
1872
|
+
|
|
1873
|
+
Updates task counts on affected parents. Prevents circular references.
|
|
1874
|
+
Emits warnings for cross-phase moves that might affect dependencies.
|
|
1875
|
+
|
|
1876
|
+
Args:
|
|
1877
|
+
spec_id: Specification ID containing the task.
|
|
1878
|
+
task_id: Task ID to move.
|
|
1879
|
+
new_parent: Optional new parent ID (phase or task). If None, reorders
|
|
1880
|
+
within current parent.
|
|
1881
|
+
position: Optional position in parent's children list (1-based).
|
|
1882
|
+
If None, appends to end.
|
|
1883
|
+
dry_run: If True, validate and return preview without saving changes.
|
|
1884
|
+
specs_dir: Path to specs directory (auto-detected if not provided).
|
|
1885
|
+
|
|
1886
|
+
Returns:
|
|
1887
|
+
Tuple of (result_dict, error_message, warnings_list).
|
|
1888
|
+
On success: ({"task_id": ..., "old_parent": ..., "new_parent": ..., ...}, None, [warnings])
|
|
1889
|
+
On failure: (None, "error message", [])
|
|
1890
|
+
"""
|
|
1891
|
+
# Find specs directory
|
|
1892
|
+
if specs_dir is None:
|
|
1893
|
+
specs_dir = find_specs_directory()
|
|
1894
|
+
|
|
1895
|
+
if specs_dir is None:
|
|
1896
|
+
return None, "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR.", []
|
|
1897
|
+
|
|
1898
|
+
# Find and load the spec
|
|
1899
|
+
spec_path = find_spec_file(spec_id, specs_dir)
|
|
1900
|
+
if spec_path is None:
|
|
1901
|
+
return None, f"Specification '{spec_id}' not found", []
|
|
1902
|
+
|
|
1903
|
+
spec_data = load_spec(spec_id, specs_dir)
|
|
1904
|
+
if spec_data is None:
|
|
1905
|
+
return None, f"Failed to load specification '{spec_id}'", []
|
|
1906
|
+
|
|
1907
|
+
hierarchy = spec_data.get("hierarchy", {})
|
|
1908
|
+
|
|
1909
|
+
# Validate task exists
|
|
1910
|
+
task = hierarchy.get(task_id)
|
|
1911
|
+
if task is None:
|
|
1912
|
+
return None, f"Task '{task_id}' not found", []
|
|
1913
|
+
|
|
1914
|
+
# Validate task type (can only move task, subtask, verify)
|
|
1915
|
+
task_type = task.get("type")
|
|
1916
|
+
if task_type not in ("task", "subtask", "verify"):
|
|
1917
|
+
return None, f"Cannot move node type '{task_type}'. Only task, subtask, or verify nodes can be moved.", []
|
|
1918
|
+
|
|
1919
|
+
old_parent_id = task.get("parent")
|
|
1920
|
+
if not old_parent_id:
|
|
1921
|
+
return None, f"Task '{task_id}' has no parent and cannot be moved", []
|
|
1922
|
+
|
|
1923
|
+
old_parent = hierarchy.get(old_parent_id)
|
|
1924
|
+
if not old_parent:
|
|
1925
|
+
return None, f"Task's current parent '{old_parent_id}' not found", []
|
|
1926
|
+
|
|
1927
|
+
# Determine effective new parent
|
|
1928
|
+
effective_new_parent_id = new_parent if new_parent else old_parent_id
|
|
1929
|
+
is_reparenting = effective_new_parent_id != old_parent_id
|
|
1930
|
+
|
|
1931
|
+
# Validate new parent exists
|
|
1932
|
+
new_parent_node = hierarchy.get(effective_new_parent_id)
|
|
1933
|
+
if new_parent_node is None:
|
|
1934
|
+
return None, f"Target parent '{effective_new_parent_id}' not found", []
|
|
1935
|
+
|
|
1936
|
+
# Validate new parent type (can add tasks to phases, groups, or tasks)
|
|
1937
|
+
new_parent_type = new_parent_node.get("type")
|
|
1938
|
+
if new_parent_type not in ("phase", "group", "task"):
|
|
1939
|
+
return None, f"Cannot move to node type '{new_parent_type}'. Target must be a phase, group, or task.", []
|
|
1940
|
+
|
|
1941
|
+
# Prevent self-reference
|
|
1942
|
+
if task_id == effective_new_parent_id:
|
|
1943
|
+
return None, f"Task '{task_id}' cannot be moved to itself", []
|
|
1944
|
+
|
|
1945
|
+
# Prevent circular reference (can't move a task to one of its descendants)
|
|
1946
|
+
if _is_descendant(hierarchy, task_id, effective_new_parent_id):
|
|
1947
|
+
return None, f"Cannot move '{task_id}' to '{effective_new_parent_id}': would create circular reference", []
|
|
1948
|
+
|
|
1949
|
+
# Get current children lists
|
|
1950
|
+
old_children = old_parent.get("children", [])
|
|
1951
|
+
if not isinstance(old_children, list):
|
|
1952
|
+
old_children = []
|
|
1953
|
+
|
|
1954
|
+
new_children = new_parent_node.get("children", []) if is_reparenting else old_children.copy()
|
|
1955
|
+
if not isinstance(new_children, list):
|
|
1956
|
+
new_children = []
|
|
1957
|
+
|
|
1958
|
+
# Validate position
|
|
1959
|
+
# Remove task from old position first to calculate valid range
|
|
1960
|
+
old_position = None
|
|
1961
|
+
if task_id in old_children:
|
|
1962
|
+
old_position = old_children.index(task_id)
|
|
1963
|
+
|
|
1964
|
+
# For position validation, consider the list after removal
|
|
1965
|
+
max_position = len(new_children) if is_reparenting else len(new_children) - 1
|
|
1966
|
+
if position is not None:
|
|
1967
|
+
# Convert to 0-based for internal use (user provides 1-based)
|
|
1968
|
+
position_0based = position - 1
|
|
1969
|
+
if position_0based < 0 or position_0based > max_position:
|
|
1970
|
+
return None, f"Invalid position {position}. Must be 1-{max_position + 1}", []
|
|
1971
|
+
else:
|
|
1972
|
+
# Default: append to end
|
|
1973
|
+
position_0based = max_position
|
|
1974
|
+
|
|
1975
|
+
# Check for cross-phase dependency warnings
|
|
1976
|
+
warnings: List[str] = []
|
|
1977
|
+
if is_reparenting:
|
|
1978
|
+
old_phase = _get_phase_for_node(hierarchy, task_id)
|
|
1979
|
+
new_phase = _get_phase_for_node(hierarchy, effective_new_parent_id)
|
|
1980
|
+
if new_phase != old_phase:
|
|
1981
|
+
warnings = _check_cross_phase_dependencies(hierarchy, task_id, old_phase, new_phase)
|
|
1982
|
+
|
|
1983
|
+
# Calculate task counts for the subtree being moved (including the task itself)
|
|
1984
|
+
descendants = _collect_descendants(hierarchy, task_id)
|
|
1985
|
+
all_moved_nodes = [task_id] + descendants
|
|
1986
|
+
total_moved, completed_moved = _count_tasks_in_subtree(hierarchy, all_moved_nodes)
|
|
1987
|
+
|
|
1988
|
+
# Build result for dry run or actual move
|
|
1989
|
+
result: Dict[str, Any] = {
|
|
1990
|
+
"spec_id": spec_id,
|
|
1991
|
+
"task_id": task_id,
|
|
1992
|
+
"old_parent": old_parent_id,
|
|
1993
|
+
"new_parent": effective_new_parent_id,
|
|
1994
|
+
"old_position": old_position + 1 if old_position is not None else None, # 1-based for output
|
|
1995
|
+
"new_position": position_0based + 1, # 1-based for output
|
|
1996
|
+
"is_reparenting": is_reparenting,
|
|
1997
|
+
"tasks_in_subtree": total_moved,
|
|
1998
|
+
"dry_run": dry_run,
|
|
1999
|
+
}
|
|
2000
|
+
|
|
2001
|
+
if dry_run:
|
|
2002
|
+
result["message"] = "Dry run - changes not saved"
|
|
2003
|
+
if warnings:
|
|
2004
|
+
result["dependency_warnings"] = warnings
|
|
2005
|
+
return result, None, warnings
|
|
2006
|
+
|
|
2007
|
+
# Perform the move
|
|
2008
|
+
|
|
2009
|
+
# 1. Remove from old parent's children list
|
|
2010
|
+
if task_id in old_children:
|
|
2011
|
+
old_children.remove(task_id)
|
|
2012
|
+
old_parent["children"] = old_children
|
|
2013
|
+
|
|
2014
|
+
# 2. Add to new parent's children list at specified position
|
|
2015
|
+
if is_reparenting:
|
|
2016
|
+
# Fresh list from new parent
|
|
2017
|
+
new_children = new_parent_node.get("children", [])
|
|
2018
|
+
if not isinstance(new_children, list):
|
|
2019
|
+
new_children = []
|
|
2020
|
+
else:
|
|
2021
|
+
# Same parent, already removed
|
|
2022
|
+
new_children = old_children
|
|
2023
|
+
|
|
2024
|
+
# Insert at position
|
|
2025
|
+
if position_0based >= len(new_children):
|
|
2026
|
+
new_children.append(task_id)
|
|
2027
|
+
else:
|
|
2028
|
+
new_children.insert(position_0based, task_id)
|
|
2029
|
+
|
|
2030
|
+
if is_reparenting:
|
|
2031
|
+
new_parent_node["children"] = new_children
|
|
2032
|
+
else:
|
|
2033
|
+
old_parent["children"] = new_children
|
|
2034
|
+
|
|
2035
|
+
# 3. Update task's parent reference
|
|
2036
|
+
if is_reparenting:
|
|
2037
|
+
task["parent"] = effective_new_parent_id
|
|
2038
|
+
|
|
2039
|
+
# 4. Update ancestor task counts
|
|
2040
|
+
# Decrement old parent's ancestors
|
|
2041
|
+
_decrement_ancestor_counts(hierarchy, old_parent_id, total_moved, completed_moved)
|
|
2042
|
+
# Increment new parent's ancestors
|
|
2043
|
+
_update_ancestor_counts(hierarchy, effective_new_parent_id, delta=total_moved)
|
|
2044
|
+
# Update completed counts for new ancestors
|
|
2045
|
+
if completed_moved > 0:
|
|
2046
|
+
current_id = effective_new_parent_id
|
|
2047
|
+
visited = set()
|
|
2048
|
+
while current_id:
|
|
2049
|
+
if current_id in visited:
|
|
2050
|
+
break
|
|
2051
|
+
visited.add(current_id)
|
|
2052
|
+
node = hierarchy.get(current_id)
|
|
2053
|
+
if not node:
|
|
2054
|
+
break
|
|
2055
|
+
current_completed = node.get("completed_tasks", 0)
|
|
2056
|
+
node["completed_tasks"] = current_completed + completed_moved
|
|
2057
|
+
current_id = node.get("parent")
|
|
2058
|
+
|
|
2059
|
+
# Save the spec
|
|
2060
|
+
success = save_spec(spec_id, spec_data, specs_dir)
|
|
2061
|
+
if not success:
|
|
2062
|
+
return None, "Failed to save specification", []
|
|
2063
|
+
|
|
2064
|
+
if warnings:
|
|
2065
|
+
result["dependency_warnings"] = warnings
|
|
2066
|
+
|
|
2067
|
+
return result, None, warnings
|
|
2068
|
+
|
|
2069
|
+
|
|
2070
|
+
def _generate_requirement_id(existing_requirements: List[Dict[str, Any]]) -> str:
|
|
2071
|
+
"""
|
|
2072
|
+
Generate a unique requirement ID based on existing requirements.
|
|
2073
|
+
|
|
2074
|
+
Args:
|
|
2075
|
+
existing_requirements: List of existing requirement dictionaries
|
|
2076
|
+
|
|
2077
|
+
Returns:
|
|
2078
|
+
New requirement ID string (e.g., "req-1", "req-2")
|
|
2079
|
+
"""
|
|
2080
|
+
if not existing_requirements:
|
|
2081
|
+
return "req-1"
|
|
2082
|
+
|
|
2083
|
+
max_index = 0
|
|
2084
|
+
pattern = re.compile(r"^req-(\d+)$")
|
|
2085
|
+
|
|
2086
|
+
for req in existing_requirements:
|
|
2087
|
+
req_id = req.get("id", "")
|
|
2088
|
+
match = pattern.match(req_id)
|
|
2089
|
+
if match:
|
|
2090
|
+
index = int(match.group(1))
|
|
2091
|
+
max_index = max(max_index, index)
|
|
2092
|
+
|
|
2093
|
+
return f"req-{max_index + 1}"
|
|
2094
|
+
|
|
2095
|
+
|
|
2096
|
+
def update_task_requirements(
|
|
2097
|
+
spec_id: str,
|
|
2098
|
+
task_id: str,
|
|
2099
|
+
action: str = "add",
|
|
2100
|
+
requirement_type: Optional[str] = None,
|
|
2101
|
+
text: Optional[str] = None,
|
|
2102
|
+
requirement_id: Optional[str] = None,
|
|
2103
|
+
dry_run: bool = False,
|
|
2104
|
+
specs_dir: Optional[Path] = None,
|
|
2105
|
+
) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
|
|
2106
|
+
"""
|
|
2107
|
+
Add or remove a structured requirement from a task's metadata.
|
|
2108
|
+
|
|
2109
|
+
Requirements are stored in metadata.requirements as a list of objects:
|
|
2110
|
+
[{"id": "req-1", "type": "acceptance", "text": "..."}, ...]
|
|
2111
|
+
|
|
2112
|
+
Each requirement has:
|
|
2113
|
+
- id: Auto-generated unique ID (e.g., "req-1", "req-2")
|
|
2114
|
+
- type: Requirement type (acceptance, technical, constraint)
|
|
2115
|
+
- text: Requirement description text
|
|
2116
|
+
|
|
2117
|
+
Args:
|
|
2118
|
+
spec_id: Specification ID containing the task.
|
|
2119
|
+
task_id: Task ID to update.
|
|
2120
|
+
action: Action to perform ("add" or "remove"). Default: "add".
|
|
2121
|
+
requirement_type: Requirement type (required for add). One of:
|
|
2122
|
+
acceptance, technical, constraint.
|
|
2123
|
+
text: Requirement text (required for add).
|
|
2124
|
+
requirement_id: Requirement ID to remove (required for remove action).
|
|
2125
|
+
dry_run: If True, validate and return preview without saving changes.
|
|
2126
|
+
specs_dir: Path to specs directory (auto-detected if not provided).
|
|
2127
|
+
|
|
2128
|
+
Returns:
|
|
2129
|
+
Tuple of (result_dict, error_message).
|
|
2130
|
+
On success: ({"task_id": ..., "action": ..., "requirement": {...}, ...}, None)
|
|
2131
|
+
On failure: (None, "error message")
|
|
2132
|
+
"""
|
|
2133
|
+
# Validate action
|
|
2134
|
+
if action not in ("add", "remove"):
|
|
2135
|
+
return None, f"Invalid action '{action}'. Must be 'add' or 'remove'"
|
|
2136
|
+
|
|
2137
|
+
# Validate parameters based on action
|
|
2138
|
+
if action == "add":
|
|
2139
|
+
if requirement_type is None:
|
|
2140
|
+
return None, "requirement_type is required for add action"
|
|
2141
|
+
if not isinstance(requirement_type, str):
|
|
2142
|
+
return None, "requirement_type must be a string"
|
|
2143
|
+
requirement_type = requirement_type.lower().strip()
|
|
2144
|
+
if requirement_type not in REQUIREMENT_TYPES:
|
|
2145
|
+
return None, f"Invalid requirement_type '{requirement_type}'. Must be one of: {', '.join(REQUIREMENT_TYPES)}"
|
|
2146
|
+
|
|
2147
|
+
if text is None:
|
|
2148
|
+
return None, "text is required for add action"
|
|
2149
|
+
if not isinstance(text, str) or not text.strip():
|
|
2150
|
+
return None, "text must be a non-empty string"
|
|
2151
|
+
text = text.strip()
|
|
2152
|
+
|
|
2153
|
+
elif action == "remove":
|
|
2154
|
+
if requirement_id is None:
|
|
2155
|
+
return None, "requirement_id is required for remove action"
|
|
2156
|
+
if not isinstance(requirement_id, str) or not requirement_id.strip():
|
|
2157
|
+
return None, "requirement_id must be a non-empty string"
|
|
2158
|
+
requirement_id = requirement_id.strip()
|
|
2159
|
+
|
|
2160
|
+
# Find specs directory
|
|
2161
|
+
if specs_dir is None:
|
|
2162
|
+
specs_dir = find_specs_directory()
|
|
2163
|
+
|
|
2164
|
+
if specs_dir is None:
|
|
2165
|
+
return None, "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR."
|
|
2166
|
+
|
|
2167
|
+
# Find and load the spec
|
|
2168
|
+
spec_path = find_spec_file(spec_id, specs_dir)
|
|
2169
|
+
if spec_path is None:
|
|
2170
|
+
return None, f"Specification '{spec_id}' not found"
|
|
2171
|
+
|
|
2172
|
+
spec_data = load_spec(spec_id, specs_dir)
|
|
2173
|
+
if spec_data is None:
|
|
2174
|
+
return None, f"Failed to load specification '{spec_id}'"
|
|
2175
|
+
|
|
2176
|
+
hierarchy = spec_data.get("hierarchy", {})
|
|
2177
|
+
|
|
2178
|
+
# Validate task exists
|
|
2179
|
+
task = hierarchy.get(task_id)
|
|
2180
|
+
if task is None:
|
|
2181
|
+
return None, f"Task '{task_id}' not found"
|
|
2182
|
+
|
|
2183
|
+
# Validate task type (can only update task, subtask, verify)
|
|
2184
|
+
task_type = task.get("type")
|
|
2185
|
+
if task_type not in ("task", "subtask", "verify"):
|
|
2186
|
+
return None, f"Cannot update requirements for node type '{task_type}'. Only task, subtask, or verify nodes can be updated."
|
|
2187
|
+
|
|
2188
|
+
# Get or create metadata
|
|
2189
|
+
metadata = task.get("metadata")
|
|
2190
|
+
if metadata is None:
|
|
2191
|
+
metadata = {}
|
|
2192
|
+
task["metadata"] = metadata
|
|
2193
|
+
|
|
2194
|
+
# Get or create requirements list
|
|
2195
|
+
requirements = metadata.get("requirements")
|
|
2196
|
+
if requirements is None:
|
|
2197
|
+
requirements = []
|
|
2198
|
+
metadata["requirements"] = requirements
|
|
2199
|
+
elif not isinstance(requirements, list):
|
|
2200
|
+
requirements = []
|
|
2201
|
+
metadata["requirements"] = requirements
|
|
2202
|
+
|
|
2203
|
+
# Perform the action
|
|
2204
|
+
if action == "add":
|
|
2205
|
+
# Check limit
|
|
2206
|
+
if len(requirements) >= MAX_REQUIREMENTS_PER_TASK:
|
|
2207
|
+
return None, f"Cannot add requirement: task already has {MAX_REQUIREMENTS_PER_TASK} requirements (maximum)"
|
|
2208
|
+
|
|
2209
|
+
# Generate new requirement ID
|
|
2210
|
+
new_id = _generate_requirement_id(requirements)
|
|
2211
|
+
|
|
2212
|
+
# Create requirement object
|
|
2213
|
+
new_requirement = {
|
|
2214
|
+
"id": new_id,
|
|
2215
|
+
"type": requirement_type,
|
|
2216
|
+
"text": text,
|
|
2217
|
+
}
|
|
2218
|
+
|
|
2219
|
+
# Add to list
|
|
2220
|
+
requirements.append(new_requirement)
|
|
2221
|
+
|
|
2222
|
+
result = {
|
|
2223
|
+
"spec_id": spec_id,
|
|
2224
|
+
"task_id": task_id,
|
|
2225
|
+
"action": "add",
|
|
2226
|
+
"requirement": new_requirement,
|
|
2227
|
+
"total_requirements": len(requirements),
|
|
2228
|
+
"dry_run": dry_run,
|
|
2229
|
+
}
|
|
2230
|
+
|
|
2231
|
+
elif action == "remove":
|
|
2232
|
+
# Find requirement by ID
|
|
2233
|
+
found_index = None
|
|
2234
|
+
removed_requirement = None
|
|
2235
|
+
for i, req in enumerate(requirements):
|
|
2236
|
+
if req.get("id") == requirement_id:
|
|
2237
|
+
found_index = i
|
|
2238
|
+
removed_requirement = req
|
|
2239
|
+
break
|
|
2240
|
+
|
|
2241
|
+
if found_index is None:
|
|
2242
|
+
return None, f"Requirement '{requirement_id}' not found in task '{task_id}'"
|
|
2243
|
+
|
|
2244
|
+
# Remove from list
|
|
2245
|
+
requirements.pop(found_index)
|
|
2246
|
+
|
|
2247
|
+
result = {
|
|
2248
|
+
"spec_id": spec_id,
|
|
2249
|
+
"task_id": task_id,
|
|
2250
|
+
"action": "remove",
|
|
2251
|
+
"requirement": removed_requirement,
|
|
2252
|
+
"total_requirements": len(requirements),
|
|
2253
|
+
"dry_run": dry_run,
|
|
2254
|
+
}
|
|
2255
|
+
|
|
2256
|
+
# Save the spec (unless dry_run)
|
|
2257
|
+
if dry_run:
|
|
2258
|
+
result["message"] = "Dry run - changes not saved"
|
|
2259
|
+
else:
|
|
2260
|
+
success = save_spec(spec_id, spec_data, specs_dir)
|
|
2261
|
+
if not success:
|
|
2262
|
+
return None, "Failed to save specification"
|
|
2263
|
+
|
|
2264
|
+
return result, None
|
|
2265
|
+
|
|
2266
|
+
|
|
2267
|
+
# Valid statuses for batch filtering
|
|
2268
|
+
BATCH_ALLOWED_STATUSES = {"pending", "in_progress", "completed", "blocked"}
|
|
2269
|
+
|
|
2270
|
+
# Safety constraints for batch operations
|
|
2271
|
+
MAX_PATTERN_LENGTH = 256
|
|
2272
|
+
DEFAULT_MAX_MATCHES = 100
|
|
2273
|
+
|
|
2274
|
+
|
|
2275
|
+
def _match_tasks_for_batch(
|
|
2276
|
+
hierarchy: Dict[str, Any],
|
|
2277
|
+
*,
|
|
2278
|
+
status_filter: Optional[str] = None,
|
|
2279
|
+
parent_filter: Optional[str] = None,
|
|
2280
|
+
pattern: Optional[str] = None,
|
|
2281
|
+
) -> List[str]:
|
|
2282
|
+
"""Find tasks matching filter criteria (AND logic). Returns sorted task IDs."""
|
|
2283
|
+
compiled_pattern = None
|
|
2284
|
+
if pattern:
|
|
2285
|
+
try:
|
|
2286
|
+
compiled_pattern = re.compile(pattern, re.IGNORECASE)
|
|
2287
|
+
except re.error:
|
|
2288
|
+
return []
|
|
2289
|
+
|
|
2290
|
+
matched = []
|
|
2291
|
+
target_types = {"task", "subtask", "verify"}
|
|
2292
|
+
|
|
2293
|
+
valid_descendants: Optional[set] = None
|
|
2294
|
+
if parent_filter:
|
|
2295
|
+
parent_node = hierarchy.get(parent_filter)
|
|
2296
|
+
if not parent_node:
|
|
2297
|
+
return []
|
|
2298
|
+
valid_descendants = set()
|
|
2299
|
+
to_visit = list(parent_node.get("children", []))
|
|
2300
|
+
while to_visit:
|
|
2301
|
+
child_id = to_visit.pop()
|
|
2302
|
+
if child_id in valid_descendants:
|
|
2303
|
+
continue
|
|
2304
|
+
valid_descendants.add(child_id)
|
|
2305
|
+
child_node = hierarchy.get(child_id)
|
|
2306
|
+
if child_node:
|
|
2307
|
+
to_visit.extend(child_node.get("children", []))
|
|
2308
|
+
|
|
2309
|
+
for node_id, node_data in hierarchy.items():
|
|
2310
|
+
if node_data.get("type") not in target_types:
|
|
2311
|
+
continue
|
|
2312
|
+
if status_filter and node_data.get("status") != status_filter:
|
|
2313
|
+
continue
|
|
2314
|
+
if valid_descendants is not None and node_id not in valid_descendants:
|
|
2315
|
+
continue
|
|
2316
|
+
if compiled_pattern:
|
|
2317
|
+
title = node_data.get("title", "")
|
|
2318
|
+
if not (compiled_pattern.search(title) or compiled_pattern.search(node_id)):
|
|
2319
|
+
continue
|
|
2320
|
+
matched.append(node_id)
|
|
2321
|
+
|
|
2322
|
+
return sorted(matched)
|
|
2323
|
+
|
|
2324
|
+
|
|
2325
|
+
def batch_update_tasks(
|
|
2326
|
+
spec_id: str,
|
|
2327
|
+
*,
|
|
2328
|
+
status_filter: Optional[str] = None,
|
|
2329
|
+
parent_filter: Optional[str] = None,
|
|
2330
|
+
pattern: Optional[str] = None,
|
|
2331
|
+
description: Optional[str] = None,
|
|
2332
|
+
file_path: Optional[str] = None,
|
|
2333
|
+
estimated_hours: Optional[float] = None,
|
|
2334
|
+
category: Optional[str] = None,
|
|
2335
|
+
labels: Optional[Dict[str, str]] = None,
|
|
2336
|
+
owners: Optional[List[str]] = None,
|
|
2337
|
+
custom_metadata: Optional[Dict[str, Any]] = None,
|
|
2338
|
+
dry_run: bool = False,
|
|
2339
|
+
max_matches: int = DEFAULT_MAX_MATCHES,
|
|
2340
|
+
specs_dir: Optional[Path] = None,
|
|
2341
|
+
) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
|
|
2342
|
+
"""Batch update metadata across tasks matching filters (AND logic)."""
|
|
2343
|
+
# Validate filters
|
|
2344
|
+
if not any([status_filter, parent_filter, pattern]):
|
|
2345
|
+
return None, "At least one filter must be provided: status_filter, parent_filter, or pattern"
|
|
2346
|
+
if status_filter and status_filter not in BATCH_ALLOWED_STATUSES:
|
|
2347
|
+
return None, f"Invalid status_filter '{status_filter}'. Must be one of: {sorted(BATCH_ALLOWED_STATUSES)}"
|
|
2348
|
+
if pattern:
|
|
2349
|
+
if not isinstance(pattern, str) or not pattern.strip():
|
|
2350
|
+
return None, "pattern must be a non-empty string"
|
|
2351
|
+
pattern = pattern.strip()
|
|
2352
|
+
if len(pattern) > MAX_PATTERN_LENGTH:
|
|
2353
|
+
return None, f"pattern exceeds maximum length of {MAX_PATTERN_LENGTH} characters"
|
|
2354
|
+
try:
|
|
2355
|
+
re.compile(pattern)
|
|
2356
|
+
except re.error as e:
|
|
2357
|
+
return None, f"Invalid regex pattern: {e}"
|
|
2358
|
+
if parent_filter:
|
|
2359
|
+
if not isinstance(parent_filter, str) or not parent_filter.strip():
|
|
2360
|
+
return None, "parent_filter must be a non-empty string"
|
|
2361
|
+
parent_filter = parent_filter.strip()
|
|
2362
|
+
|
|
2363
|
+
# Collect metadata updates
|
|
2364
|
+
metadata_updates: Dict[str, Any] = {}
|
|
2365
|
+
if description is not None:
|
|
2366
|
+
metadata_updates["description"] = description.strip() if description else None
|
|
2367
|
+
if file_path is not None:
|
|
2368
|
+
metadata_updates["file_path"] = file_path.strip() if file_path else None
|
|
2369
|
+
if estimated_hours is not None:
|
|
2370
|
+
if not isinstance(estimated_hours, (int, float)) or estimated_hours < 0:
|
|
2371
|
+
return None, "estimated_hours must be a non-negative number"
|
|
2372
|
+
metadata_updates["estimated_hours"] = float(estimated_hours)
|
|
2373
|
+
if category is not None:
|
|
2374
|
+
metadata_updates["category"] = category.strip() if category else None
|
|
2375
|
+
if labels is not None:
|
|
2376
|
+
if not isinstance(labels, dict) or not all(isinstance(k, str) and isinstance(v, str) for k, v in labels.items()):
|
|
2377
|
+
return None, "labels must be a dict with string keys and values"
|
|
2378
|
+
metadata_updates["labels"] = labels
|
|
2379
|
+
if owners is not None:
|
|
2380
|
+
if not isinstance(owners, list) or not all(isinstance(o, str) for o in owners):
|
|
2381
|
+
return None, "owners must be a list of strings"
|
|
2382
|
+
metadata_updates["owners"] = owners
|
|
2383
|
+
if custom_metadata:
|
|
2384
|
+
if not isinstance(custom_metadata, dict):
|
|
2385
|
+
return None, "custom_metadata must be a dict"
|
|
2386
|
+
for key, value in custom_metadata.items():
|
|
2387
|
+
if key not in metadata_updates:
|
|
2388
|
+
metadata_updates[key] = value
|
|
2389
|
+
|
|
2390
|
+
if not metadata_updates:
|
|
2391
|
+
return None, "At least one metadata field must be provided"
|
|
2392
|
+
if max_matches <= 0:
|
|
2393
|
+
return None, "max_matches must be a positive integer"
|
|
2394
|
+
|
|
2395
|
+
# Load spec
|
|
2396
|
+
if specs_dir is None:
|
|
2397
|
+
specs_dir = find_specs_directory()
|
|
2398
|
+
if specs_dir is None:
|
|
2399
|
+
return None, "No specs directory found"
|
|
2400
|
+
spec_path = find_spec_file(spec_id, specs_dir)
|
|
2401
|
+
if not spec_path:
|
|
2402
|
+
return None, f"Specification '{spec_id}' not found"
|
|
2403
|
+
spec_data = load_spec(spec_id, specs_dir)
|
|
2404
|
+
if not spec_data:
|
|
2405
|
+
return None, f"Failed to load specification '{spec_id}'"
|
|
2406
|
+
|
|
2407
|
+
hierarchy = spec_data.get("hierarchy", {})
|
|
2408
|
+
if parent_filter and parent_filter not in hierarchy:
|
|
2409
|
+
return None, f"Parent '{parent_filter}' not found in specification"
|
|
2410
|
+
|
|
2411
|
+
matched_ids = _match_tasks_for_batch(hierarchy, status_filter=status_filter, parent_filter=parent_filter, pattern=pattern)
|
|
2412
|
+
warnings: List[str] = []
|
|
2413
|
+
skipped_ids = []
|
|
2414
|
+
if len(matched_ids) > max_matches:
|
|
2415
|
+
warnings.append(f"Found {len(matched_ids)} matches, limiting to {max_matches}")
|
|
2416
|
+
skipped_ids = matched_ids[max_matches:]
|
|
2417
|
+
matched_ids = matched_ids[:max_matches]
|
|
2418
|
+
|
|
2419
|
+
if not matched_ids:
|
|
2420
|
+
return {"spec_id": spec_id, "matched_count": 0, "updated_count": 0, "skipped_count": len(skipped_ids),
|
|
2421
|
+
"nodes": [], "filters": {"status_filter": status_filter, "parent_filter": parent_filter, "pattern": pattern},
|
|
2422
|
+
"metadata_applied": metadata_updates, "dry_run": dry_run, "message": "No tasks matched"}, None
|
|
2423
|
+
|
|
2424
|
+
# Capture originals and build result
|
|
2425
|
+
original_metadata: Dict[str, Dict[str, Any]] = {}
|
|
2426
|
+
updated_nodes: List[Dict[str, Any]] = []
|
|
2427
|
+
for node_id in matched_ids:
|
|
2428
|
+
node = hierarchy.get(node_id, {})
|
|
2429
|
+
existing_meta = node.get("metadata", {}) or {}
|
|
2430
|
+
original_metadata[node_id] = {k: existing_meta.get(k) for k in metadata_updates}
|
|
2431
|
+
diff = {k: {"old": original_metadata[node_id].get(k), "new": v} for k, v in metadata_updates.items() if original_metadata[node_id].get(k) != v}
|
|
2432
|
+
updated_nodes.append({"node_id": node_id, "title": node.get("title", ""), "type": node.get("type", ""),
|
|
2433
|
+
"status": node.get("status", ""), "fields_updated": list(metadata_updates.keys()), "diff": diff} if diff else
|
|
2434
|
+
{"node_id": node_id, "title": node.get("title", ""), "type": node.get("type", ""),
|
|
2435
|
+
"status": node.get("status", ""), "fields_updated": list(metadata_updates.keys())})
|
|
2436
|
+
if not dry_run:
|
|
2437
|
+
if "metadata" not in node:
|
|
2438
|
+
node["metadata"] = {}
|
|
2439
|
+
node["metadata"].update(metadata_updates)
|
|
2440
|
+
|
|
2441
|
+
if not dry_run:
|
|
2442
|
+
if not save_spec(spec_id, spec_data, specs_dir):
|
|
2443
|
+
for nid, orig in original_metadata.items():
|
|
2444
|
+
n = hierarchy.get(nid, {})
|
|
2445
|
+
if "metadata" in n:
|
|
2446
|
+
for k, v in orig.items():
|
|
2447
|
+
if v is None:
|
|
2448
|
+
n["metadata"].pop(k, None)
|
|
2449
|
+
else:
|
|
2450
|
+
n["metadata"][k] = v
|
|
2451
|
+
return None, "Failed to save; changes rolled back"
|
|
2452
|
+
|
|
2453
|
+
if len(matched_ids) > 50:
|
|
2454
|
+
warnings.append(f"Updated {len(matched_ids)} tasks")
|
|
2455
|
+
|
|
2456
|
+
result = {"spec_id": spec_id, "matched_count": len(matched_ids), "updated_count": len(matched_ids) if not dry_run else 0,
|
|
2457
|
+
"skipped_count": len(skipped_ids), "nodes": updated_nodes, "filters": {"status_filter": status_filter, "parent_filter": parent_filter, "pattern": pattern},
|
|
2458
|
+
"metadata_applied": metadata_updates, "dry_run": dry_run}
|
|
2459
|
+
if warnings:
|
|
2460
|
+
result["warnings"] = warnings
|
|
2461
|
+
if skipped_ids:
|
|
2462
|
+
result["skipped_tasks"] = skipped_ids
|
|
2463
|
+
return result, None
|