foundry-mcp 0.3.3__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- foundry_mcp/__init__.py +7 -1
- foundry_mcp/cli/commands/plan.py +10 -3
- foundry_mcp/cli/commands/review.py +19 -4
- foundry_mcp/cli/commands/specs.py +38 -208
- foundry_mcp/cli/output.py +3 -3
- foundry_mcp/config.py +235 -5
- foundry_mcp/core/ai_consultation.py +146 -9
- foundry_mcp/core/discovery.py +6 -6
- foundry_mcp/core/error_store.py +2 -2
- foundry_mcp/core/intake.py +933 -0
- foundry_mcp/core/llm_config.py +20 -2
- foundry_mcp/core/metrics_store.py +2 -2
- foundry_mcp/core/progress.py +70 -0
- foundry_mcp/core/prompts/fidelity_review.py +149 -4
- foundry_mcp/core/prompts/markdown_plan_review.py +5 -1
- foundry_mcp/core/prompts/plan_review.py +5 -1
- foundry_mcp/core/providers/claude.py +6 -47
- foundry_mcp/core/providers/codex.py +6 -57
- foundry_mcp/core/providers/cursor_agent.py +3 -44
- foundry_mcp/core/providers/gemini.py +6 -57
- foundry_mcp/core/providers/opencode.py +35 -5
- foundry_mcp/core/research/__init__.py +68 -0
- foundry_mcp/core/research/memory.py +425 -0
- foundry_mcp/core/research/models.py +437 -0
- foundry_mcp/core/research/workflows/__init__.py +22 -0
- foundry_mcp/core/research/workflows/base.py +204 -0
- foundry_mcp/core/research/workflows/chat.py +271 -0
- foundry_mcp/core/research/workflows/consensus.py +396 -0
- foundry_mcp/core/research/workflows/ideate.py +682 -0
- foundry_mcp/core/research/workflows/thinkdeep.py +405 -0
- foundry_mcp/core/responses.py +450 -0
- foundry_mcp/core/spec.py +2438 -236
- foundry_mcp/core/task.py +1064 -19
- foundry_mcp/core/testing.py +512 -123
- foundry_mcp/core/validation.py +313 -42
- foundry_mcp/dashboard/components/charts.py +0 -57
- foundry_mcp/dashboard/launcher.py +11 -0
- foundry_mcp/dashboard/views/metrics.py +25 -35
- foundry_mcp/dashboard/views/overview.py +1 -65
- foundry_mcp/resources/specs.py +25 -25
- foundry_mcp/schemas/intake-schema.json +89 -0
- foundry_mcp/schemas/sdd-spec-schema.json +33 -5
- foundry_mcp/server.py +38 -0
- foundry_mcp/tools/unified/__init__.py +4 -2
- foundry_mcp/tools/unified/authoring.py +2423 -267
- foundry_mcp/tools/unified/documentation_helpers.py +69 -6
- foundry_mcp/tools/unified/environment.py +235 -6
- foundry_mcp/tools/unified/error.py +18 -1
- foundry_mcp/tools/unified/lifecycle.py +8 -0
- foundry_mcp/tools/unified/plan.py +113 -1
- foundry_mcp/tools/unified/research.py +658 -0
- foundry_mcp/tools/unified/review.py +370 -16
- foundry_mcp/tools/unified/spec.py +367 -0
- foundry_mcp/tools/unified/task.py +1163 -48
- foundry_mcp/tools/unified/test.py +69 -8
- {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.7.0.dist-info}/METADATA +7 -1
- {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.7.0.dist-info}/RECORD +60 -48
- {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.7.0.dist-info}/WHEEL +0 -0
- {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.7.0.dist-info}/entry_points.txt +0 -0
- {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.7.0.dist-info}/licenses/LICENSE +0 -0
foundry_mcp/core/task.py
CHANGED
|
@@ -8,7 +8,14 @@ from dataclasses import asdict
|
|
|
8
8
|
from pathlib import Path
|
|
9
9
|
from typing import Optional, Dict, Any, Tuple, List
|
|
10
10
|
|
|
11
|
-
from foundry_mcp.core.spec import
|
|
11
|
+
from foundry_mcp.core.spec import (
|
|
12
|
+
CATEGORIES,
|
|
13
|
+
load_spec,
|
|
14
|
+
save_spec,
|
|
15
|
+
find_spec_file,
|
|
16
|
+
find_specs_directory,
|
|
17
|
+
get_node,
|
|
18
|
+
)
|
|
12
19
|
from foundry_mcp.core.responses import success_response, error_response
|
|
13
20
|
|
|
14
21
|
# Valid task types for add_task
|
|
@@ -687,6 +694,7 @@ def add_task(
|
|
|
687
694
|
task_type: str = "task",
|
|
688
695
|
estimated_hours: Optional[float] = None,
|
|
689
696
|
position: Optional[int] = None,
|
|
697
|
+
file_path: Optional[str] = None,
|
|
690
698
|
specs_dir: Optional[Path] = None,
|
|
691
699
|
) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
|
|
692
700
|
"""
|
|
@@ -703,6 +711,7 @@ def add_task(
|
|
|
703
711
|
task_type: Type of task (task, subtask, verify). Default: task.
|
|
704
712
|
estimated_hours: Optional estimated hours.
|
|
705
713
|
position: Optional position in parent's children list (0-based).
|
|
714
|
+
file_path: Optional file path associated with this task.
|
|
706
715
|
specs_dir: Path to specs directory (auto-detected if not provided).
|
|
707
716
|
|
|
708
717
|
Returns:
|
|
@@ -762,6 +771,8 @@ def add_task(
|
|
|
762
771
|
metadata["description"] = description.strip()
|
|
763
772
|
if estimated_hours is not None:
|
|
764
773
|
metadata["estimated_hours"] = estimated_hours
|
|
774
|
+
if file_path:
|
|
775
|
+
metadata["file_path"] = file_path.strip()
|
|
765
776
|
|
|
766
777
|
# Create the task node
|
|
767
778
|
task_node = {
|
|
@@ -804,6 +815,7 @@ def add_task(
|
|
|
804
815
|
"title": title,
|
|
805
816
|
"type": task_type,
|
|
806
817
|
"position": position if position is not None else len(existing_children) - 1,
|
|
818
|
+
"file_path": file_path.strip() if file_path else None,
|
|
807
819
|
}, None
|
|
808
820
|
|
|
809
821
|
|
|
@@ -1139,56 +1151,453 @@ def update_estimate(
|
|
|
1139
1151
|
|
|
1140
1152
|
|
|
1141
1153
|
# Valid verification types for update_task_metadata
|
|
1142
|
-
VERIFICATION_TYPES = ("
|
|
1154
|
+
VERIFICATION_TYPES = ("run-tests", "fidelity", "manual")
|
|
1143
1155
|
|
|
1144
1156
|
# Valid task categories
|
|
1145
|
-
TASK_CATEGORIES =
|
|
1157
|
+
TASK_CATEGORIES = CATEGORIES
|
|
1158
|
+
|
|
1159
|
+
# Valid dependency types for manage_task_dependency
|
|
1160
|
+
DEPENDENCY_TYPES = ("blocks", "blocked_by", "depends")
|
|
1161
|
+
|
|
1162
|
+
# Valid requirement types for update_task_requirements
|
|
1163
|
+
REQUIREMENT_TYPES = ("acceptance", "technical", "constraint")
|
|
1164
|
+
|
|
1165
|
+
# Maximum number of requirements per task (to prevent unbounded growth)
|
|
1166
|
+
MAX_REQUIREMENTS_PER_TASK = 50
|
|
1167
|
+
|
|
1168
|
+
|
|
1169
|
+
def _would_create_circular_dependency(
|
|
1170
|
+
hierarchy: Dict[str, Any],
|
|
1171
|
+
source_id: str,
|
|
1172
|
+
target_id: str,
|
|
1173
|
+
dep_type: str,
|
|
1174
|
+
) -> bool:
|
|
1175
|
+
"""
|
|
1176
|
+
Check if adding a dependency would create a circular reference.
|
|
1177
|
+
|
|
1178
|
+
For blocking dependencies:
|
|
1179
|
+
- Adding A blocks B means B is blocked_by A
|
|
1180
|
+
- Circular if B already blocks A (directly or transitively)
|
|
1181
|
+
|
|
1182
|
+
Uses breadth-first search to detect cycles in the dependency graph.
|
|
1183
|
+
|
|
1184
|
+
Args:
|
|
1185
|
+
hierarchy: The spec hierarchy dict
|
|
1186
|
+
source_id: Source task ID
|
|
1187
|
+
target_id: Target task ID
|
|
1188
|
+
dep_type: Type of dependency being added
|
|
1189
|
+
|
|
1190
|
+
Returns:
|
|
1191
|
+
True if adding this dependency would create a cycle
|
|
1192
|
+
"""
|
|
1193
|
+
if source_id == target_id:
|
|
1194
|
+
return True
|
|
1195
|
+
|
|
1196
|
+
# For "blocks": source blocks target, so target cannot already block source
|
|
1197
|
+
# For "blocked_by": source is blocked_by target, so source cannot already block target
|
|
1198
|
+
# For "depends": soft dependency, check for cycles in depends chain
|
|
1199
|
+
|
|
1200
|
+
if dep_type == "blocks":
|
|
1201
|
+
# If source blocks target, check if target already blocks source (transitively)
|
|
1202
|
+
# i.e., walk from target's "blocks" chain to see if we reach source
|
|
1203
|
+
return _can_reach_via_dependency(hierarchy, target_id, source_id, "blocks")
|
|
1204
|
+
elif dep_type == "blocked_by":
|
|
1205
|
+
# If source is blocked_by target, check if source already blocks target (transitively)
|
|
1206
|
+
return _can_reach_via_dependency(hierarchy, source_id, target_id, "blocks")
|
|
1207
|
+
elif dep_type == "depends":
|
|
1208
|
+
# Check for cycles in depends chain
|
|
1209
|
+
return _can_reach_via_dependency(hierarchy, target_id, source_id, "depends")
|
|
1210
|
+
|
|
1211
|
+
return False
|
|
1212
|
+
|
|
1213
|
+
|
|
1214
|
+
def _can_reach_via_dependency(
|
|
1215
|
+
hierarchy: Dict[str, Any],
|
|
1216
|
+
start_id: str,
|
|
1217
|
+
target_id: str,
|
|
1218
|
+
dep_key: str,
|
|
1219
|
+
) -> bool:
|
|
1220
|
+
"""
|
|
1221
|
+
Check if target_id can be reached from start_id via dependency chains.
|
|
1222
|
+
|
|
1223
|
+
Uses BFS to traverse the dependency graph.
|
|
1224
|
+
|
|
1225
|
+
Args:
|
|
1226
|
+
hierarchy: The spec hierarchy dict
|
|
1227
|
+
start_id: Starting node ID
|
|
1228
|
+
target_id: Target node ID to find
|
|
1229
|
+
dep_key: Which dependency list to follow ("blocks", "blocked_by", "depends")
|
|
1230
|
+
|
|
1231
|
+
Returns:
|
|
1232
|
+
True if target_id is reachable from start_id
|
|
1233
|
+
"""
|
|
1234
|
+
visited = set()
|
|
1235
|
+
queue = [start_id]
|
|
1236
|
+
|
|
1237
|
+
while queue:
|
|
1238
|
+
current_id = queue.pop(0)
|
|
1239
|
+
if current_id in visited:
|
|
1240
|
+
continue
|
|
1241
|
+
visited.add(current_id)
|
|
1242
|
+
|
|
1243
|
+
if current_id == target_id:
|
|
1244
|
+
return True
|
|
1245
|
+
|
|
1246
|
+
node = hierarchy.get(current_id)
|
|
1247
|
+
if not node:
|
|
1248
|
+
continue
|
|
1249
|
+
|
|
1250
|
+
deps = node.get("dependencies", {})
|
|
1251
|
+
next_ids = deps.get(dep_key, [])
|
|
1252
|
+
if isinstance(next_ids, list):
|
|
1253
|
+
for next_id in next_ids:
|
|
1254
|
+
if next_id not in visited:
|
|
1255
|
+
queue.append(next_id)
|
|
1256
|
+
|
|
1257
|
+
return False
|
|
1258
|
+
|
|
1259
|
+
|
|
1260
|
+
def manage_task_dependency(
|
|
1261
|
+
spec_id: str,
|
|
1262
|
+
source_task_id: str,
|
|
1263
|
+
target_task_id: str,
|
|
1264
|
+
dependency_type: str,
|
|
1265
|
+
action: str = "add",
|
|
1266
|
+
dry_run: bool = False,
|
|
1267
|
+
specs_dir: Optional[Path] = None,
|
|
1268
|
+
) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
|
|
1269
|
+
"""
|
|
1270
|
+
Add or remove a dependency relationship between two tasks.
|
|
1271
|
+
|
|
1272
|
+
Manages blocks, blocked_by, and depends relationships between tasks.
|
|
1273
|
+
Updates both source and target tasks atomically.
|
|
1274
|
+
|
|
1275
|
+
Dependency types:
|
|
1276
|
+
- blocks: Source task blocks target (target cannot start until source completes)
|
|
1277
|
+
- blocked_by: Source task is blocked by target (source cannot start until target completes)
|
|
1278
|
+
- depends: Soft dependency (informational, doesn't block)
|
|
1279
|
+
|
|
1280
|
+
When adding:
|
|
1281
|
+
- blocks: Adds target to source.blocks AND source to target.blocked_by
|
|
1282
|
+
- blocked_by: Adds target to source.blocked_by AND source to target.blocks
|
|
1283
|
+
- depends: Only adds target to source.depends (soft, no reciprocal)
|
|
1284
|
+
|
|
1285
|
+
Args:
|
|
1286
|
+
spec_id: Specification ID containing the tasks.
|
|
1287
|
+
source_task_id: Source task ID.
|
|
1288
|
+
target_task_id: Target task ID.
|
|
1289
|
+
dependency_type: Type of dependency (blocks, blocked_by, depends).
|
|
1290
|
+
action: Action to perform (add or remove). Default: add.
|
|
1291
|
+
dry_run: If True, validate and return preview without saving changes.
|
|
1292
|
+
specs_dir: Path to specs directory (auto-detected if not provided).
|
|
1293
|
+
|
|
1294
|
+
Returns:
|
|
1295
|
+
Tuple of (result_dict, error_message).
|
|
1296
|
+
On success: ({"source_task": ..., "target_task": ..., "dependency_type": ..., ...}, None)
|
|
1297
|
+
On failure: (None, "error message")
|
|
1298
|
+
"""
|
|
1299
|
+
# Validate action
|
|
1300
|
+
if action not in ("add", "remove"):
|
|
1301
|
+
return None, f"Invalid action '{action}'. Must be 'add' or 'remove'"
|
|
1302
|
+
|
|
1303
|
+
# Validate dependency_type
|
|
1304
|
+
if dependency_type not in DEPENDENCY_TYPES:
|
|
1305
|
+
return None, f"Invalid dependency_type '{dependency_type}'. Must be one of: {', '.join(DEPENDENCY_TYPES)}"
|
|
1306
|
+
|
|
1307
|
+
# Prevent self-reference
|
|
1308
|
+
if source_task_id == target_task_id:
|
|
1309
|
+
return None, f"Cannot add dependency: task '{source_task_id}' cannot depend on itself"
|
|
1310
|
+
|
|
1311
|
+
# Find specs directory
|
|
1312
|
+
if specs_dir is None:
|
|
1313
|
+
specs_dir = find_specs_directory()
|
|
1314
|
+
|
|
1315
|
+
if specs_dir is None:
|
|
1316
|
+
return None, "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR."
|
|
1317
|
+
|
|
1318
|
+
# Find and load the spec
|
|
1319
|
+
spec_path = find_spec_file(spec_id, specs_dir)
|
|
1320
|
+
if spec_path is None:
|
|
1321
|
+
return None, f"Specification '{spec_id}' not found"
|
|
1322
|
+
|
|
1323
|
+
spec_data = load_spec(spec_id, specs_dir)
|
|
1324
|
+
if spec_data is None:
|
|
1325
|
+
return None, f"Failed to load specification '{spec_id}'"
|
|
1326
|
+
|
|
1327
|
+
hierarchy = spec_data.get("hierarchy", {})
|
|
1328
|
+
|
|
1329
|
+
# Validate source task exists
|
|
1330
|
+
source_task = hierarchy.get(source_task_id)
|
|
1331
|
+
if source_task is None:
|
|
1332
|
+
return None, f"Source task '{source_task_id}' not found"
|
|
1333
|
+
|
|
1334
|
+
# Validate source task type
|
|
1335
|
+
source_type = source_task.get("type")
|
|
1336
|
+
if source_type not in ("task", "subtask", "verify", "phase"):
|
|
1337
|
+
return None, f"Cannot manage dependencies for node type '{source_type}'"
|
|
1338
|
+
|
|
1339
|
+
# Validate target task exists
|
|
1340
|
+
target_task = hierarchy.get(target_task_id)
|
|
1341
|
+
if target_task is None:
|
|
1342
|
+
return None, f"Target task '{target_task_id}' not found"
|
|
1343
|
+
|
|
1344
|
+
# Validate target task type
|
|
1345
|
+
target_type = target_task.get("type")
|
|
1346
|
+
if target_type not in ("task", "subtask", "verify", "phase"):
|
|
1347
|
+
return None, f"Cannot add dependency to node type '{target_type}'"
|
|
1348
|
+
|
|
1349
|
+
# Get or create dependencies for both tasks
|
|
1350
|
+
source_deps = source_task.get("dependencies")
|
|
1351
|
+
if source_deps is None:
|
|
1352
|
+
source_deps = {"blocks": [], "blocked_by": [], "depends": []}
|
|
1353
|
+
source_task["dependencies"] = source_deps
|
|
1354
|
+
|
|
1355
|
+
target_deps = target_task.get("dependencies")
|
|
1356
|
+
if target_deps is None:
|
|
1357
|
+
target_deps = {"blocks": [], "blocked_by": [], "depends": []}
|
|
1358
|
+
target_task["dependencies"] = target_deps
|
|
1359
|
+
|
|
1360
|
+
# Ensure lists exist
|
|
1361
|
+
for key in DEPENDENCY_TYPES:
|
|
1362
|
+
if not isinstance(source_deps.get(key), list):
|
|
1363
|
+
source_deps[key] = []
|
|
1364
|
+
if not isinstance(target_deps.get(key), list):
|
|
1365
|
+
target_deps[key] = []
|
|
1366
|
+
|
|
1367
|
+
# Determine the reciprocal relationship
|
|
1368
|
+
reciprocal_type = None
|
|
1369
|
+
if dependency_type == "blocks":
|
|
1370
|
+
reciprocal_type = "blocked_by"
|
|
1371
|
+
elif dependency_type == "blocked_by":
|
|
1372
|
+
reciprocal_type = "blocks"
|
|
1373
|
+
# depends has no reciprocal
|
|
1374
|
+
|
|
1375
|
+
if action == "add":
|
|
1376
|
+
# Check for circular dependencies
|
|
1377
|
+
if _would_create_circular_dependency(hierarchy, source_task_id, target_task_id, dependency_type):
|
|
1378
|
+
return None, f"Cannot add dependency: would create circular reference between '{source_task_id}' and '{target_task_id}'"
|
|
1379
|
+
|
|
1380
|
+
# Check if dependency already exists
|
|
1381
|
+
if target_task_id in source_deps[dependency_type]:
|
|
1382
|
+
return None, f"Dependency already exists: {source_task_id} {dependency_type} {target_task_id}"
|
|
1383
|
+
|
|
1384
|
+
# Add the dependency
|
|
1385
|
+
source_deps[dependency_type].append(target_task_id)
|
|
1386
|
+
|
|
1387
|
+
# Add reciprocal if applicable (blocks <-> blocked_by)
|
|
1388
|
+
if reciprocal_type:
|
|
1389
|
+
if source_task_id not in target_deps[reciprocal_type]:
|
|
1390
|
+
target_deps[reciprocal_type].append(source_task_id)
|
|
1391
|
+
|
|
1392
|
+
elif action == "remove":
|
|
1393
|
+
# Check if dependency exists
|
|
1394
|
+
if target_task_id not in source_deps[dependency_type]:
|
|
1395
|
+
return None, f"Dependency does not exist: {source_task_id} {dependency_type} {target_task_id}"
|
|
1396
|
+
|
|
1397
|
+
# Remove the dependency
|
|
1398
|
+
source_deps[dependency_type].remove(target_task_id)
|
|
1399
|
+
|
|
1400
|
+
# Remove reciprocal if applicable
|
|
1401
|
+
if reciprocal_type and source_task_id in target_deps[reciprocal_type]:
|
|
1402
|
+
target_deps[reciprocal_type].remove(source_task_id)
|
|
1403
|
+
|
|
1404
|
+
# Build result
|
|
1405
|
+
result = {
|
|
1406
|
+
"spec_id": spec_id,
|
|
1407
|
+
"source_task": source_task_id,
|
|
1408
|
+
"target_task": target_task_id,
|
|
1409
|
+
"dependency_type": dependency_type,
|
|
1410
|
+
"action": action,
|
|
1411
|
+
"dry_run": dry_run,
|
|
1412
|
+
"source_dependencies": {
|
|
1413
|
+
"blocks": source_deps["blocks"],
|
|
1414
|
+
"blocked_by": source_deps["blocked_by"],
|
|
1415
|
+
"depends": source_deps["depends"],
|
|
1416
|
+
},
|
|
1417
|
+
"target_dependencies": {
|
|
1418
|
+
"blocks": target_deps["blocks"],
|
|
1419
|
+
"blocked_by": target_deps["blocked_by"],
|
|
1420
|
+
"depends": target_deps["depends"],
|
|
1421
|
+
},
|
|
1422
|
+
}
|
|
1423
|
+
|
|
1424
|
+
# Save the spec (unless dry_run)
|
|
1425
|
+
if dry_run:
|
|
1426
|
+
result["message"] = "Dry run - changes not saved"
|
|
1427
|
+
else:
|
|
1428
|
+
success = save_spec(spec_id, spec_data, specs_dir)
|
|
1429
|
+
if not success:
|
|
1430
|
+
return None, "Failed to save specification"
|
|
1431
|
+
|
|
1432
|
+
return result, None
|
|
1433
|
+
|
|
1434
|
+
|
|
1435
|
+
def _is_descendant(hierarchy: Dict[str, Any], ancestor_id: str, potential_descendant_id: str) -> bool:
|
|
1436
|
+
"""
|
|
1437
|
+
Check if a node is a descendant of another node.
|
|
1438
|
+
|
|
1439
|
+
Used to prevent circular references when moving tasks.
|
|
1440
|
+
|
|
1441
|
+
Args:
|
|
1442
|
+
hierarchy: The spec hierarchy dict
|
|
1443
|
+
ancestor_id: The potential ancestor node ID
|
|
1444
|
+
potential_descendant_id: The node to check if it's a descendant
|
|
1445
|
+
|
|
1446
|
+
Returns:
|
|
1447
|
+
True if potential_descendant_id is a descendant of ancestor_id
|
|
1448
|
+
"""
|
|
1449
|
+
if ancestor_id == potential_descendant_id:
|
|
1450
|
+
return True
|
|
1451
|
+
|
|
1452
|
+
descendants = _collect_descendants(hierarchy, ancestor_id)
|
|
1453
|
+
return potential_descendant_id in descendants
|
|
1454
|
+
|
|
1455
|
+
|
|
1456
|
+
def _get_phase_for_node(hierarchy: Dict[str, Any], node_id: str) -> Optional[str]:
|
|
1457
|
+
"""
|
|
1458
|
+
Walk up the hierarchy to find the phase containing a node.
|
|
1459
|
+
|
|
1460
|
+
Args:
|
|
1461
|
+
hierarchy: The spec hierarchy dict
|
|
1462
|
+
node_id: The node to find the phase for
|
|
1463
|
+
|
|
1464
|
+
Returns:
|
|
1465
|
+
Phase ID if found, None otherwise
|
|
1466
|
+
"""
|
|
1467
|
+
current_id = node_id
|
|
1468
|
+
visited = set()
|
|
1469
|
+
|
|
1470
|
+
while current_id:
|
|
1471
|
+
if current_id in visited:
|
|
1472
|
+
break
|
|
1473
|
+
visited.add(current_id)
|
|
1474
|
+
|
|
1475
|
+
node = hierarchy.get(current_id)
|
|
1476
|
+
if not node:
|
|
1477
|
+
break
|
|
1478
|
+
|
|
1479
|
+
if node.get("type") == "phase":
|
|
1480
|
+
return current_id
|
|
1481
|
+
|
|
1482
|
+
current_id = node.get("parent")
|
|
1483
|
+
|
|
1484
|
+
return None
|
|
1485
|
+
|
|
1486
|
+
|
|
1487
|
+
def _check_cross_phase_dependencies(
|
|
1488
|
+
hierarchy: Dict[str, Any],
|
|
1489
|
+
task_id: str,
|
|
1490
|
+
old_phase_id: Optional[str],
|
|
1491
|
+
new_phase_id: Optional[str],
|
|
1492
|
+
) -> List[str]:
|
|
1493
|
+
"""
|
|
1494
|
+
Check for potential dependency issues when moving across phases.
|
|
1495
|
+
|
|
1496
|
+
Args:
|
|
1497
|
+
hierarchy: The spec hierarchy dict
|
|
1498
|
+
task_id: The task being moved
|
|
1499
|
+
old_phase_id: The original phase ID
|
|
1500
|
+
new_phase_id: The target phase ID
|
|
1501
|
+
|
|
1502
|
+
Returns:
|
|
1503
|
+
List of warning messages about potential dependency issues
|
|
1504
|
+
"""
|
|
1505
|
+
warnings = []
|
|
1506
|
+
|
|
1507
|
+
if old_phase_id == new_phase_id:
|
|
1508
|
+
return warnings
|
|
1509
|
+
|
|
1510
|
+
task = hierarchy.get(task_id)
|
|
1511
|
+
if not task:
|
|
1512
|
+
return warnings
|
|
1513
|
+
|
|
1514
|
+
deps = task.get("dependencies", {})
|
|
1515
|
+
|
|
1516
|
+
# Check blocked_by dependencies
|
|
1517
|
+
blocked_by = deps.get("blocked_by", [])
|
|
1518
|
+
for dep_id in blocked_by:
|
|
1519
|
+
dep_phase = _get_phase_for_node(hierarchy, dep_id)
|
|
1520
|
+
if dep_phase and dep_phase != new_phase_id:
|
|
1521
|
+
dep_node = hierarchy.get(dep_id, {})
|
|
1522
|
+
warnings.append(
|
|
1523
|
+
f"Task '{task_id}' is blocked by '{dep_id}' ({dep_node.get('title', '')}) "
|
|
1524
|
+
f"which is in a different phase ('{dep_phase}')"
|
|
1525
|
+
)
|
|
1526
|
+
|
|
1527
|
+
# Check blocks dependencies
|
|
1528
|
+
blocks = deps.get("blocks", [])
|
|
1529
|
+
for dep_id in blocks:
|
|
1530
|
+
dep_phase = _get_phase_for_node(hierarchy, dep_id)
|
|
1531
|
+
if dep_phase and dep_phase != new_phase_id:
|
|
1532
|
+
dep_node = hierarchy.get(dep_id, {})
|
|
1533
|
+
warnings.append(
|
|
1534
|
+
f"Task '{task_id}' blocks '{dep_id}' ({dep_node.get('title', '')}) "
|
|
1535
|
+
f"which is in a different phase ('{dep_phase}')"
|
|
1536
|
+
)
|
|
1537
|
+
|
|
1538
|
+
return warnings
|
|
1146
1539
|
|
|
1147
1540
|
|
|
1148
1541
|
def update_task_metadata(
|
|
1149
1542
|
spec_id: str,
|
|
1150
1543
|
task_id: str,
|
|
1544
|
+
title: Optional[str] = None,
|
|
1151
1545
|
file_path: Optional[str] = None,
|
|
1152
1546
|
description: Optional[str] = None,
|
|
1547
|
+
acceptance_criteria: Optional[List[str]] = None,
|
|
1153
1548
|
task_category: Optional[str] = None,
|
|
1154
1549
|
actual_hours: Optional[float] = None,
|
|
1155
1550
|
status_note: Optional[str] = None,
|
|
1156
1551
|
verification_type: Optional[str] = None,
|
|
1157
1552
|
command: Optional[str] = None,
|
|
1158
1553
|
custom_metadata: Optional[Dict[str, Any]] = None,
|
|
1554
|
+
dry_run: bool = False,
|
|
1159
1555
|
specs_dir: Optional[Path] = None,
|
|
1160
1556
|
) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
|
|
1161
1557
|
"""
|
|
1162
1558
|
Update arbitrary metadata fields on a task.
|
|
1163
1559
|
|
|
1164
|
-
Updates various metadata fields on a task including file path, description,
|
|
1560
|
+
Updates various metadata fields on a task including title, file path, description,
|
|
1165
1561
|
category, hours, notes, verification type, and custom fields.
|
|
1166
1562
|
At least one field must be provided.
|
|
1167
1563
|
|
|
1168
1564
|
Args:
|
|
1169
1565
|
spec_id: Specification ID containing the task.
|
|
1170
1566
|
task_id: Task ID to update.
|
|
1567
|
+
title: Optional new title for the task (cannot be empty/whitespace-only).
|
|
1171
1568
|
file_path: Optional file path associated with the task.
|
|
1172
1569
|
description: Optional task description.
|
|
1173
|
-
|
|
1570
|
+
acceptance_criteria: Optional acceptance criteria list.
|
|
1571
|
+
task_category: Optional task category (implementation, refactoring, investigation, decision, research).
|
|
1174
1572
|
actual_hours: Optional actual hours spent on task (must be >= 0).
|
|
1175
1573
|
status_note: Optional status note or completion note.
|
|
1176
|
-
verification_type: Optional verification type (
|
|
1574
|
+
verification_type: Optional verification type (run-tests, fidelity, manual).
|
|
1177
1575
|
command: Optional command executed for the task.
|
|
1178
1576
|
custom_metadata: Optional dict of custom metadata fields to merge.
|
|
1577
|
+
dry_run: If True, validate and return preview without saving changes.
|
|
1179
1578
|
specs_dir: Path to specs directory (auto-detected if not provided).
|
|
1180
1579
|
|
|
1181
1580
|
Returns:
|
|
1182
1581
|
Tuple of (result_dict, error_message).
|
|
1183
|
-
On success: ({"task_id": ..., "fields_updated": [...], ...}, None)
|
|
1582
|
+
On success: ({"task_id": ..., "fields_updated": [...], "previous_values": {...}, ...}, None)
|
|
1184
1583
|
On failure: (None, "error message")
|
|
1185
1584
|
"""
|
|
1186
|
-
#
|
|
1585
|
+
# Validate title if provided (cannot be empty/whitespace-only)
|
|
1586
|
+
title_update: Optional[str] = None
|
|
1587
|
+
if title is not None:
|
|
1588
|
+
title_stripped = title.strip()
|
|
1589
|
+
if not title_stripped:
|
|
1590
|
+
return None, "Title cannot be empty or whitespace-only"
|
|
1591
|
+
title_update = title_stripped
|
|
1592
|
+
|
|
1593
|
+
# Collect all provided metadata fields
|
|
1187
1594
|
updates: Dict[str, Any] = {}
|
|
1188
1595
|
if file_path is not None:
|
|
1189
1596
|
updates["file_path"] = file_path.strip() if file_path else None
|
|
1190
1597
|
if description is not None:
|
|
1191
1598
|
updates["description"] = description.strip() if description else None
|
|
1599
|
+
if acceptance_criteria is not None:
|
|
1600
|
+
updates["acceptance_criteria"] = acceptance_criteria
|
|
1192
1601
|
if task_category is not None:
|
|
1193
1602
|
updates["task_category"] = task_category
|
|
1194
1603
|
if actual_hours is not None:
|
|
@@ -1200,9 +1609,9 @@ def update_task_metadata(
|
|
|
1200
1609
|
if command is not None:
|
|
1201
1610
|
updates["command"] = command.strip() if command else None
|
|
1202
1611
|
|
|
1203
|
-
# Validate at least one field is provided
|
|
1204
|
-
if not updates and not custom_metadata:
|
|
1205
|
-
return None, "At least one
|
|
1612
|
+
# Validate at least one field is provided (title or metadata fields)
|
|
1613
|
+
if title_update is None and not updates and not custom_metadata:
|
|
1614
|
+
return None, "At least one field must be provided (title or metadata fields)"
|
|
1206
1615
|
|
|
1207
1616
|
# Validate actual_hours
|
|
1208
1617
|
if actual_hours is not None:
|
|
@@ -1211,6 +1620,16 @@ def update_task_metadata(
|
|
|
1211
1620
|
if actual_hours < 0:
|
|
1212
1621
|
return None, "actual_hours must be >= 0"
|
|
1213
1622
|
|
|
1623
|
+
if acceptance_criteria is not None:
|
|
1624
|
+
if not isinstance(acceptance_criteria, list):
|
|
1625
|
+
return None, "acceptance_criteria must be a list of strings"
|
|
1626
|
+
cleaned_criteria = []
|
|
1627
|
+
for item in acceptance_criteria:
|
|
1628
|
+
if not isinstance(item, str) or not item.strip():
|
|
1629
|
+
return None, "acceptance_criteria must be a list of non-empty strings"
|
|
1630
|
+
cleaned_criteria.append(item.strip())
|
|
1631
|
+
updates["acceptance_criteria"] = cleaned_criteria
|
|
1632
|
+
|
|
1214
1633
|
# Validate task_category
|
|
1215
1634
|
if task_category is not None:
|
|
1216
1635
|
task_category_lower = task_category.lower().strip()
|
|
@@ -1259,12 +1678,20 @@ def update_task_metadata(
|
|
|
1259
1678
|
metadata = {}
|
|
1260
1679
|
task["metadata"] = metadata
|
|
1261
1680
|
|
|
1262
|
-
# Track which fields were updated
|
|
1681
|
+
# Track which fields were updated and their previous values
|
|
1263
1682
|
fields_updated = []
|
|
1683
|
+
previous_values: Dict[str, Any] = {}
|
|
1264
1684
|
|
|
1265
|
-
# Apply
|
|
1685
|
+
# Apply title update (core field on task, not metadata)
|
|
1686
|
+
if title_update is not None:
|
|
1687
|
+
previous_values["title"] = task.get("title")
|
|
1688
|
+
task["title"] = title_update
|
|
1689
|
+
fields_updated.append("title")
|
|
1690
|
+
|
|
1691
|
+
# Apply metadata updates
|
|
1266
1692
|
for key, value in updates.items():
|
|
1267
1693
|
if value is not None or key in metadata:
|
|
1694
|
+
previous_values[key] = metadata.get(key)
|
|
1268
1695
|
metadata[key] = value
|
|
1269
1696
|
fields_updated.append(key)
|
|
1270
1697
|
|
|
@@ -1273,17 +1700,635 @@ def update_task_metadata(
|
|
|
1273
1700
|
for key, value in custom_metadata.items():
|
|
1274
1701
|
# Don't allow overwriting core fields via custom_metadata
|
|
1275
1702
|
if key not in ("type", "title", "status", "parent", "children", "dependencies"):
|
|
1703
|
+
if key not in previous_values:
|
|
1704
|
+
previous_values[key] = metadata.get(key)
|
|
1276
1705
|
metadata[key] = value
|
|
1277
1706
|
if key not in fields_updated:
|
|
1278
1707
|
fields_updated.append(key)
|
|
1279
1708
|
|
|
1709
|
+
# Build result
|
|
1710
|
+
result = {
|
|
1711
|
+
"spec_id": spec_id,
|
|
1712
|
+
"task_id": task_id,
|
|
1713
|
+
"fields_updated": fields_updated,
|
|
1714
|
+
"previous_values": previous_values,
|
|
1715
|
+
"dry_run": dry_run,
|
|
1716
|
+
}
|
|
1717
|
+
|
|
1718
|
+
# Save the spec (unless dry_run)
|
|
1719
|
+
if dry_run:
|
|
1720
|
+
result["message"] = "Dry run - changes not saved"
|
|
1721
|
+
else:
|
|
1722
|
+
success = save_spec(spec_id, spec_data, specs_dir)
|
|
1723
|
+
if not success:
|
|
1724
|
+
return None, "Failed to save specification"
|
|
1725
|
+
|
|
1726
|
+
return result, None
|
|
1727
|
+
|
|
1728
|
+
|
|
1729
|
+
def move_task(
|
|
1730
|
+
spec_id: str,
|
|
1731
|
+
task_id: str,
|
|
1732
|
+
new_parent: Optional[str] = None,
|
|
1733
|
+
position: Optional[int] = None,
|
|
1734
|
+
dry_run: bool = False,
|
|
1735
|
+
specs_dir: Optional[Path] = None,
|
|
1736
|
+
) -> Tuple[Optional[Dict[str, Any]], Optional[str], List[str]]:
|
|
1737
|
+
"""
|
|
1738
|
+
Move a task to a new position within its parent or to a different parent.
|
|
1739
|
+
|
|
1740
|
+
Supports two modes:
|
|
1741
|
+
1. Reorder within parent: only specify position (new_parent=None)
|
|
1742
|
+
2. Reparent to different phase/task: specify new_parent, optionally position
|
|
1743
|
+
|
|
1744
|
+
Updates task counts on affected parents. Prevents circular references.
|
|
1745
|
+
Emits warnings for cross-phase moves that might affect dependencies.
|
|
1746
|
+
|
|
1747
|
+
Args:
|
|
1748
|
+
spec_id: Specification ID containing the task.
|
|
1749
|
+
task_id: Task ID to move.
|
|
1750
|
+
new_parent: Optional new parent ID (phase or task). If None, reorders
|
|
1751
|
+
within current parent.
|
|
1752
|
+
position: Optional position in parent's children list (1-based).
|
|
1753
|
+
If None, appends to end.
|
|
1754
|
+
dry_run: If True, validate and return preview without saving changes.
|
|
1755
|
+
specs_dir: Path to specs directory (auto-detected if not provided).
|
|
1756
|
+
|
|
1757
|
+
Returns:
|
|
1758
|
+
Tuple of (result_dict, error_message, warnings_list).
|
|
1759
|
+
On success: ({"task_id": ..., "old_parent": ..., "new_parent": ..., ...}, None, [warnings])
|
|
1760
|
+
On failure: (None, "error message", [])
|
|
1761
|
+
"""
|
|
1762
|
+
# Find specs directory
|
|
1763
|
+
if specs_dir is None:
|
|
1764
|
+
specs_dir = find_specs_directory()
|
|
1765
|
+
|
|
1766
|
+
if specs_dir is None:
|
|
1767
|
+
return None, "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR.", []
|
|
1768
|
+
|
|
1769
|
+
# Find and load the spec
|
|
1770
|
+
spec_path = find_spec_file(spec_id, specs_dir)
|
|
1771
|
+
if spec_path is None:
|
|
1772
|
+
return None, f"Specification '{spec_id}' not found", []
|
|
1773
|
+
|
|
1774
|
+
spec_data = load_spec(spec_id, specs_dir)
|
|
1775
|
+
if spec_data is None:
|
|
1776
|
+
return None, f"Failed to load specification '{spec_id}'", []
|
|
1777
|
+
|
|
1778
|
+
hierarchy = spec_data.get("hierarchy", {})
|
|
1779
|
+
|
|
1780
|
+
# Validate task exists
|
|
1781
|
+
task = hierarchy.get(task_id)
|
|
1782
|
+
if task is None:
|
|
1783
|
+
return None, f"Task '{task_id}' not found", []
|
|
1784
|
+
|
|
1785
|
+
# Validate task type (can only move task, subtask, verify)
|
|
1786
|
+
task_type = task.get("type")
|
|
1787
|
+
if task_type not in ("task", "subtask", "verify"):
|
|
1788
|
+
return None, f"Cannot move node type '{task_type}'. Only task, subtask, or verify nodes can be moved.", []
|
|
1789
|
+
|
|
1790
|
+
old_parent_id = task.get("parent")
|
|
1791
|
+
if not old_parent_id:
|
|
1792
|
+
return None, f"Task '{task_id}' has no parent and cannot be moved", []
|
|
1793
|
+
|
|
1794
|
+
old_parent = hierarchy.get(old_parent_id)
|
|
1795
|
+
if not old_parent:
|
|
1796
|
+
return None, f"Task's current parent '{old_parent_id}' not found", []
|
|
1797
|
+
|
|
1798
|
+
# Determine effective new parent
|
|
1799
|
+
effective_new_parent_id = new_parent if new_parent else old_parent_id
|
|
1800
|
+
is_reparenting = effective_new_parent_id != old_parent_id
|
|
1801
|
+
|
|
1802
|
+
# Validate new parent exists
|
|
1803
|
+
new_parent_node = hierarchy.get(effective_new_parent_id)
|
|
1804
|
+
if new_parent_node is None:
|
|
1805
|
+
return None, f"Target parent '{effective_new_parent_id}' not found", []
|
|
1806
|
+
|
|
1807
|
+
# Validate new parent type (can add tasks to phases, groups, or tasks)
|
|
1808
|
+
new_parent_type = new_parent_node.get("type")
|
|
1809
|
+
if new_parent_type not in ("phase", "group", "task"):
|
|
1810
|
+
return None, f"Cannot move to node type '{new_parent_type}'. Target must be a phase, group, or task.", []
|
|
1811
|
+
|
|
1812
|
+
# Prevent self-reference
|
|
1813
|
+
if task_id == effective_new_parent_id:
|
|
1814
|
+
return None, f"Task '{task_id}' cannot be moved to itself", []
|
|
1815
|
+
|
|
1816
|
+
# Prevent circular reference (can't move a task to one of its descendants)
|
|
1817
|
+
if _is_descendant(hierarchy, task_id, effective_new_parent_id):
|
|
1818
|
+
return None, f"Cannot move '{task_id}' to '{effective_new_parent_id}': would create circular reference", []
|
|
1819
|
+
|
|
1820
|
+
# Get current children lists
|
|
1821
|
+
old_children = old_parent.get("children", [])
|
|
1822
|
+
if not isinstance(old_children, list):
|
|
1823
|
+
old_children = []
|
|
1824
|
+
|
|
1825
|
+
new_children = new_parent_node.get("children", []) if is_reparenting else old_children.copy()
|
|
1826
|
+
if not isinstance(new_children, list):
|
|
1827
|
+
new_children = []
|
|
1828
|
+
|
|
1829
|
+
# Validate position
|
|
1830
|
+
# Remove task from old position first to calculate valid range
|
|
1831
|
+
old_position = None
|
|
1832
|
+
if task_id in old_children:
|
|
1833
|
+
old_position = old_children.index(task_id)
|
|
1834
|
+
|
|
1835
|
+
# For position validation, consider the list after removal
|
|
1836
|
+
max_position = len(new_children) if is_reparenting else len(new_children) - 1
|
|
1837
|
+
if position is not None:
|
|
1838
|
+
# Convert to 0-based for internal use (user provides 1-based)
|
|
1839
|
+
position_0based = position - 1
|
|
1840
|
+
if position_0based < 0 or position_0based > max_position:
|
|
1841
|
+
return None, f"Invalid position {position}. Must be 1-{max_position + 1}", []
|
|
1842
|
+
else:
|
|
1843
|
+
# Default: append to end
|
|
1844
|
+
position_0based = max_position
|
|
1845
|
+
|
|
1846
|
+
# Check for cross-phase dependency warnings
|
|
1847
|
+
warnings: List[str] = []
|
|
1848
|
+
if is_reparenting:
|
|
1849
|
+
old_phase = _get_phase_for_node(hierarchy, task_id)
|
|
1850
|
+
new_phase = _get_phase_for_node(hierarchy, effective_new_parent_id)
|
|
1851
|
+
if new_phase != old_phase:
|
|
1852
|
+
warnings = _check_cross_phase_dependencies(hierarchy, task_id, old_phase, new_phase)
|
|
1853
|
+
|
|
1854
|
+
# Calculate task counts for the subtree being moved (including the task itself)
|
|
1855
|
+
descendants = _collect_descendants(hierarchy, task_id)
|
|
1856
|
+
all_moved_nodes = [task_id] + descendants
|
|
1857
|
+
total_moved, completed_moved = _count_tasks_in_subtree(hierarchy, all_moved_nodes)
|
|
1858
|
+
|
|
1859
|
+
# Build result for dry run or actual move
|
|
1860
|
+
result: Dict[str, Any] = {
|
|
1861
|
+
"spec_id": spec_id,
|
|
1862
|
+
"task_id": task_id,
|
|
1863
|
+
"old_parent": old_parent_id,
|
|
1864
|
+
"new_parent": effective_new_parent_id,
|
|
1865
|
+
"old_position": old_position + 1 if old_position is not None else None, # 1-based for output
|
|
1866
|
+
"new_position": position_0based + 1, # 1-based for output
|
|
1867
|
+
"is_reparenting": is_reparenting,
|
|
1868
|
+
"tasks_in_subtree": total_moved,
|
|
1869
|
+
"dry_run": dry_run,
|
|
1870
|
+
}
|
|
1871
|
+
|
|
1872
|
+
if dry_run:
|
|
1873
|
+
result["message"] = "Dry run - changes not saved"
|
|
1874
|
+
if warnings:
|
|
1875
|
+
result["dependency_warnings"] = warnings
|
|
1876
|
+
return result, None, warnings
|
|
1877
|
+
|
|
1878
|
+
# Perform the move
|
|
1879
|
+
|
|
1880
|
+
# 1. Remove from old parent's children list
|
|
1881
|
+
if task_id in old_children:
|
|
1882
|
+
old_children.remove(task_id)
|
|
1883
|
+
old_parent["children"] = old_children
|
|
1884
|
+
|
|
1885
|
+
# 2. Add to new parent's children list at specified position
|
|
1886
|
+
if is_reparenting:
|
|
1887
|
+
# Fresh list from new parent
|
|
1888
|
+
new_children = new_parent_node.get("children", [])
|
|
1889
|
+
if not isinstance(new_children, list):
|
|
1890
|
+
new_children = []
|
|
1891
|
+
else:
|
|
1892
|
+
# Same parent, already removed
|
|
1893
|
+
new_children = old_children
|
|
1894
|
+
|
|
1895
|
+
# Insert at position
|
|
1896
|
+
if position_0based >= len(new_children):
|
|
1897
|
+
new_children.append(task_id)
|
|
1898
|
+
else:
|
|
1899
|
+
new_children.insert(position_0based, task_id)
|
|
1900
|
+
|
|
1901
|
+
if is_reparenting:
|
|
1902
|
+
new_parent_node["children"] = new_children
|
|
1903
|
+
else:
|
|
1904
|
+
old_parent["children"] = new_children
|
|
1905
|
+
|
|
1906
|
+
# 3. Update task's parent reference
|
|
1907
|
+
if is_reparenting:
|
|
1908
|
+
task["parent"] = effective_new_parent_id
|
|
1909
|
+
|
|
1910
|
+
# 4. Update ancestor task counts
|
|
1911
|
+
# Decrement old parent's ancestors
|
|
1912
|
+
_decrement_ancestor_counts(hierarchy, old_parent_id, total_moved, completed_moved)
|
|
1913
|
+
# Increment new parent's ancestors
|
|
1914
|
+
_update_ancestor_counts(hierarchy, effective_new_parent_id, delta=total_moved)
|
|
1915
|
+
# Update completed counts for new ancestors
|
|
1916
|
+
if completed_moved > 0:
|
|
1917
|
+
current_id = effective_new_parent_id
|
|
1918
|
+
visited = set()
|
|
1919
|
+
while current_id:
|
|
1920
|
+
if current_id in visited:
|
|
1921
|
+
break
|
|
1922
|
+
visited.add(current_id)
|
|
1923
|
+
node = hierarchy.get(current_id)
|
|
1924
|
+
if not node:
|
|
1925
|
+
break
|
|
1926
|
+
current_completed = node.get("completed_tasks", 0)
|
|
1927
|
+
node["completed_tasks"] = current_completed + completed_moved
|
|
1928
|
+
current_id = node.get("parent")
|
|
1929
|
+
|
|
1280
1930
|
# Save the spec
|
|
1281
1931
|
success = save_spec(spec_id, spec_data, specs_dir)
|
|
1282
1932
|
if not success:
|
|
1283
|
-
return None, "Failed to save specification"
|
|
1933
|
+
return None, "Failed to save specification", []
|
|
1284
1934
|
|
|
1285
|
-
|
|
1286
|
-
"
|
|
1287
|
-
|
|
1288
|
-
|
|
1289
|
-
|
|
1935
|
+
if warnings:
|
|
1936
|
+
result["dependency_warnings"] = warnings
|
|
1937
|
+
|
|
1938
|
+
return result, None, warnings
|
|
1939
|
+
|
|
1940
|
+
|
|
1941
|
+
def _generate_requirement_id(existing_requirements: List[Dict[str, Any]]) -> str:
|
|
1942
|
+
"""
|
|
1943
|
+
Generate a unique requirement ID based on existing requirements.
|
|
1944
|
+
|
|
1945
|
+
Args:
|
|
1946
|
+
existing_requirements: List of existing requirement dictionaries
|
|
1947
|
+
|
|
1948
|
+
Returns:
|
|
1949
|
+
New requirement ID string (e.g., "req-1", "req-2")
|
|
1950
|
+
"""
|
|
1951
|
+
if not existing_requirements:
|
|
1952
|
+
return "req-1"
|
|
1953
|
+
|
|
1954
|
+
max_index = 0
|
|
1955
|
+
pattern = re.compile(r"^req-(\d+)$")
|
|
1956
|
+
|
|
1957
|
+
for req in existing_requirements:
|
|
1958
|
+
req_id = req.get("id", "")
|
|
1959
|
+
match = pattern.match(req_id)
|
|
1960
|
+
if match:
|
|
1961
|
+
index = int(match.group(1))
|
|
1962
|
+
max_index = max(max_index, index)
|
|
1963
|
+
|
|
1964
|
+
return f"req-{max_index + 1}"
|
|
1965
|
+
|
|
1966
|
+
|
|
1967
|
+
def update_task_requirements(
|
|
1968
|
+
spec_id: str,
|
|
1969
|
+
task_id: str,
|
|
1970
|
+
action: str = "add",
|
|
1971
|
+
requirement_type: Optional[str] = None,
|
|
1972
|
+
text: Optional[str] = None,
|
|
1973
|
+
requirement_id: Optional[str] = None,
|
|
1974
|
+
dry_run: bool = False,
|
|
1975
|
+
specs_dir: Optional[Path] = None,
|
|
1976
|
+
) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
|
|
1977
|
+
"""
|
|
1978
|
+
Add or remove a structured requirement from a task's metadata.
|
|
1979
|
+
|
|
1980
|
+
Requirements are stored in metadata.requirements as a list of objects:
|
|
1981
|
+
[{"id": "req-1", "type": "acceptance", "text": "..."}, ...]
|
|
1982
|
+
|
|
1983
|
+
Each requirement has:
|
|
1984
|
+
- id: Auto-generated unique ID (e.g., "req-1", "req-2")
|
|
1985
|
+
- type: Requirement type (acceptance, technical, constraint)
|
|
1986
|
+
- text: Requirement description text
|
|
1987
|
+
|
|
1988
|
+
Args:
|
|
1989
|
+
spec_id: Specification ID containing the task.
|
|
1990
|
+
task_id: Task ID to update.
|
|
1991
|
+
action: Action to perform ("add" or "remove"). Default: "add".
|
|
1992
|
+
requirement_type: Requirement type (required for add). One of:
|
|
1993
|
+
acceptance, technical, constraint.
|
|
1994
|
+
text: Requirement text (required for add).
|
|
1995
|
+
requirement_id: Requirement ID to remove (required for remove action).
|
|
1996
|
+
dry_run: If True, validate and return preview without saving changes.
|
|
1997
|
+
specs_dir: Path to specs directory (auto-detected if not provided).
|
|
1998
|
+
|
|
1999
|
+
Returns:
|
|
2000
|
+
Tuple of (result_dict, error_message).
|
|
2001
|
+
On success: ({"task_id": ..., "action": ..., "requirement": {...}, ...}, None)
|
|
2002
|
+
On failure: (None, "error message")
|
|
2003
|
+
"""
|
|
2004
|
+
# Validate action
|
|
2005
|
+
if action not in ("add", "remove"):
|
|
2006
|
+
return None, f"Invalid action '{action}'. Must be 'add' or 'remove'"
|
|
2007
|
+
|
|
2008
|
+
# Validate parameters based on action
|
|
2009
|
+
if action == "add":
|
|
2010
|
+
if requirement_type is None:
|
|
2011
|
+
return None, "requirement_type is required for add action"
|
|
2012
|
+
if not isinstance(requirement_type, str):
|
|
2013
|
+
return None, "requirement_type must be a string"
|
|
2014
|
+
requirement_type = requirement_type.lower().strip()
|
|
2015
|
+
if requirement_type not in REQUIREMENT_TYPES:
|
|
2016
|
+
return None, f"Invalid requirement_type '{requirement_type}'. Must be one of: {', '.join(REQUIREMENT_TYPES)}"
|
|
2017
|
+
|
|
2018
|
+
if text is None:
|
|
2019
|
+
return None, "text is required for add action"
|
|
2020
|
+
if not isinstance(text, str) or not text.strip():
|
|
2021
|
+
return None, "text must be a non-empty string"
|
|
2022
|
+
text = text.strip()
|
|
2023
|
+
|
|
2024
|
+
elif action == "remove":
|
|
2025
|
+
if requirement_id is None:
|
|
2026
|
+
return None, "requirement_id is required for remove action"
|
|
2027
|
+
if not isinstance(requirement_id, str) or not requirement_id.strip():
|
|
2028
|
+
return None, "requirement_id must be a non-empty string"
|
|
2029
|
+
requirement_id = requirement_id.strip()
|
|
2030
|
+
|
|
2031
|
+
# Find specs directory
|
|
2032
|
+
if specs_dir is None:
|
|
2033
|
+
specs_dir = find_specs_directory()
|
|
2034
|
+
|
|
2035
|
+
if specs_dir is None:
|
|
2036
|
+
return None, "No specs directory found. Use specs_dir parameter or set SDD_SPECS_DIR."
|
|
2037
|
+
|
|
2038
|
+
# Find and load the spec
|
|
2039
|
+
spec_path = find_spec_file(spec_id, specs_dir)
|
|
2040
|
+
if spec_path is None:
|
|
2041
|
+
return None, f"Specification '{spec_id}' not found"
|
|
2042
|
+
|
|
2043
|
+
spec_data = load_spec(spec_id, specs_dir)
|
|
2044
|
+
if spec_data is None:
|
|
2045
|
+
return None, f"Failed to load specification '{spec_id}'"
|
|
2046
|
+
|
|
2047
|
+
hierarchy = spec_data.get("hierarchy", {})
|
|
2048
|
+
|
|
2049
|
+
# Validate task exists
|
|
2050
|
+
task = hierarchy.get(task_id)
|
|
2051
|
+
if task is None:
|
|
2052
|
+
return None, f"Task '{task_id}' not found"
|
|
2053
|
+
|
|
2054
|
+
# Validate task type (can only update task, subtask, verify)
|
|
2055
|
+
task_type = task.get("type")
|
|
2056
|
+
if task_type not in ("task", "subtask", "verify"):
|
|
2057
|
+
return None, f"Cannot update requirements for node type '{task_type}'. Only task, subtask, or verify nodes can be updated."
|
|
2058
|
+
|
|
2059
|
+
# Get or create metadata
|
|
2060
|
+
metadata = task.get("metadata")
|
|
2061
|
+
if metadata is None:
|
|
2062
|
+
metadata = {}
|
|
2063
|
+
task["metadata"] = metadata
|
|
2064
|
+
|
|
2065
|
+
# Get or create requirements list
|
|
2066
|
+
requirements = metadata.get("requirements")
|
|
2067
|
+
if requirements is None:
|
|
2068
|
+
requirements = []
|
|
2069
|
+
metadata["requirements"] = requirements
|
|
2070
|
+
elif not isinstance(requirements, list):
|
|
2071
|
+
requirements = []
|
|
2072
|
+
metadata["requirements"] = requirements
|
|
2073
|
+
|
|
2074
|
+
# Perform the action
|
|
2075
|
+
if action == "add":
|
|
2076
|
+
# Check limit
|
|
2077
|
+
if len(requirements) >= MAX_REQUIREMENTS_PER_TASK:
|
|
2078
|
+
return None, f"Cannot add requirement: task already has {MAX_REQUIREMENTS_PER_TASK} requirements (maximum)"
|
|
2079
|
+
|
|
2080
|
+
# Generate new requirement ID
|
|
2081
|
+
new_id = _generate_requirement_id(requirements)
|
|
2082
|
+
|
|
2083
|
+
# Create requirement object
|
|
2084
|
+
new_requirement = {
|
|
2085
|
+
"id": new_id,
|
|
2086
|
+
"type": requirement_type,
|
|
2087
|
+
"text": text,
|
|
2088
|
+
}
|
|
2089
|
+
|
|
2090
|
+
# Add to list
|
|
2091
|
+
requirements.append(new_requirement)
|
|
2092
|
+
|
|
2093
|
+
result = {
|
|
2094
|
+
"spec_id": spec_id,
|
|
2095
|
+
"task_id": task_id,
|
|
2096
|
+
"action": "add",
|
|
2097
|
+
"requirement": new_requirement,
|
|
2098
|
+
"total_requirements": len(requirements),
|
|
2099
|
+
"dry_run": dry_run,
|
|
2100
|
+
}
|
|
2101
|
+
|
|
2102
|
+
elif action == "remove":
|
|
2103
|
+
# Find requirement by ID
|
|
2104
|
+
found_index = None
|
|
2105
|
+
removed_requirement = None
|
|
2106
|
+
for i, req in enumerate(requirements):
|
|
2107
|
+
if req.get("id") == requirement_id:
|
|
2108
|
+
found_index = i
|
|
2109
|
+
removed_requirement = req
|
|
2110
|
+
break
|
|
2111
|
+
|
|
2112
|
+
if found_index is None:
|
|
2113
|
+
return None, f"Requirement '{requirement_id}' not found in task '{task_id}'"
|
|
2114
|
+
|
|
2115
|
+
# Remove from list
|
|
2116
|
+
requirements.pop(found_index)
|
|
2117
|
+
|
|
2118
|
+
result = {
|
|
2119
|
+
"spec_id": spec_id,
|
|
2120
|
+
"task_id": task_id,
|
|
2121
|
+
"action": "remove",
|
|
2122
|
+
"requirement": removed_requirement,
|
|
2123
|
+
"total_requirements": len(requirements),
|
|
2124
|
+
"dry_run": dry_run,
|
|
2125
|
+
}
|
|
2126
|
+
|
|
2127
|
+
# Save the spec (unless dry_run)
|
|
2128
|
+
if dry_run:
|
|
2129
|
+
result["message"] = "Dry run - changes not saved"
|
|
2130
|
+
else:
|
|
2131
|
+
success = save_spec(spec_id, spec_data, specs_dir)
|
|
2132
|
+
if not success:
|
|
2133
|
+
return None, "Failed to save specification"
|
|
2134
|
+
|
|
2135
|
+
return result, None
|
|
2136
|
+
|
|
2137
|
+
|
|
2138
|
+
# Valid statuses for batch filtering
|
|
2139
|
+
BATCH_ALLOWED_STATUSES = {"pending", "in_progress", "completed", "blocked"}
|
|
2140
|
+
|
|
2141
|
+
# Safety constraints for batch operations
|
|
2142
|
+
MAX_PATTERN_LENGTH = 256
|
|
2143
|
+
DEFAULT_MAX_MATCHES = 100
|
|
2144
|
+
|
|
2145
|
+
|
|
2146
|
+
def _match_tasks_for_batch(
|
|
2147
|
+
hierarchy: Dict[str, Any],
|
|
2148
|
+
*,
|
|
2149
|
+
status_filter: Optional[str] = None,
|
|
2150
|
+
parent_filter: Optional[str] = None,
|
|
2151
|
+
pattern: Optional[str] = None,
|
|
2152
|
+
) -> List[str]:
|
|
2153
|
+
"""Find tasks matching filter criteria (AND logic). Returns sorted task IDs."""
|
|
2154
|
+
compiled_pattern = None
|
|
2155
|
+
if pattern:
|
|
2156
|
+
try:
|
|
2157
|
+
compiled_pattern = re.compile(pattern, re.IGNORECASE)
|
|
2158
|
+
except re.error:
|
|
2159
|
+
return []
|
|
2160
|
+
|
|
2161
|
+
matched = []
|
|
2162
|
+
target_types = {"task", "subtask", "verify"}
|
|
2163
|
+
|
|
2164
|
+
valid_descendants: Optional[set] = None
|
|
2165
|
+
if parent_filter:
|
|
2166
|
+
parent_node = hierarchy.get(parent_filter)
|
|
2167
|
+
if not parent_node:
|
|
2168
|
+
return []
|
|
2169
|
+
valid_descendants = set()
|
|
2170
|
+
to_visit = list(parent_node.get("children", []))
|
|
2171
|
+
while to_visit:
|
|
2172
|
+
child_id = to_visit.pop()
|
|
2173
|
+
if child_id in valid_descendants:
|
|
2174
|
+
continue
|
|
2175
|
+
valid_descendants.add(child_id)
|
|
2176
|
+
child_node = hierarchy.get(child_id)
|
|
2177
|
+
if child_node:
|
|
2178
|
+
to_visit.extend(child_node.get("children", []))
|
|
2179
|
+
|
|
2180
|
+
for node_id, node_data in hierarchy.items():
|
|
2181
|
+
if node_data.get("type") not in target_types:
|
|
2182
|
+
continue
|
|
2183
|
+
if status_filter and node_data.get("status") != status_filter:
|
|
2184
|
+
continue
|
|
2185
|
+
if valid_descendants is not None and node_id not in valid_descendants:
|
|
2186
|
+
continue
|
|
2187
|
+
if compiled_pattern:
|
|
2188
|
+
title = node_data.get("title", "")
|
|
2189
|
+
if not (compiled_pattern.search(title) or compiled_pattern.search(node_id)):
|
|
2190
|
+
continue
|
|
2191
|
+
matched.append(node_id)
|
|
2192
|
+
|
|
2193
|
+
return sorted(matched)
|
|
2194
|
+
|
|
2195
|
+
|
|
2196
|
+
def batch_update_tasks(
|
|
2197
|
+
spec_id: str,
|
|
2198
|
+
*,
|
|
2199
|
+
status_filter: Optional[str] = None,
|
|
2200
|
+
parent_filter: Optional[str] = None,
|
|
2201
|
+
pattern: Optional[str] = None,
|
|
2202
|
+
description: Optional[str] = None,
|
|
2203
|
+
file_path: Optional[str] = None,
|
|
2204
|
+
estimated_hours: Optional[float] = None,
|
|
2205
|
+
category: Optional[str] = None,
|
|
2206
|
+
labels: Optional[Dict[str, str]] = None,
|
|
2207
|
+
owners: Optional[List[str]] = None,
|
|
2208
|
+
custom_metadata: Optional[Dict[str, Any]] = None,
|
|
2209
|
+
dry_run: bool = False,
|
|
2210
|
+
max_matches: int = DEFAULT_MAX_MATCHES,
|
|
2211
|
+
specs_dir: Optional[Path] = None,
|
|
2212
|
+
) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
|
|
2213
|
+
"""Batch update metadata across tasks matching filters (AND logic)."""
|
|
2214
|
+
# Validate filters
|
|
2215
|
+
if not any([status_filter, parent_filter, pattern]):
|
|
2216
|
+
return None, "At least one filter must be provided: status_filter, parent_filter, or pattern"
|
|
2217
|
+
if status_filter and status_filter not in BATCH_ALLOWED_STATUSES:
|
|
2218
|
+
return None, f"Invalid status_filter '{status_filter}'. Must be one of: {sorted(BATCH_ALLOWED_STATUSES)}"
|
|
2219
|
+
if pattern:
|
|
2220
|
+
if not isinstance(pattern, str) or not pattern.strip():
|
|
2221
|
+
return None, "pattern must be a non-empty string"
|
|
2222
|
+
pattern = pattern.strip()
|
|
2223
|
+
if len(pattern) > MAX_PATTERN_LENGTH:
|
|
2224
|
+
return None, f"pattern exceeds maximum length of {MAX_PATTERN_LENGTH} characters"
|
|
2225
|
+
try:
|
|
2226
|
+
re.compile(pattern)
|
|
2227
|
+
except re.error as e:
|
|
2228
|
+
return None, f"Invalid regex pattern: {e}"
|
|
2229
|
+
if parent_filter:
|
|
2230
|
+
if not isinstance(parent_filter, str) or not parent_filter.strip():
|
|
2231
|
+
return None, "parent_filter must be a non-empty string"
|
|
2232
|
+
parent_filter = parent_filter.strip()
|
|
2233
|
+
|
|
2234
|
+
# Collect metadata updates
|
|
2235
|
+
metadata_updates: Dict[str, Any] = {}
|
|
2236
|
+
if description is not None:
|
|
2237
|
+
metadata_updates["description"] = description.strip() if description else None
|
|
2238
|
+
if file_path is not None:
|
|
2239
|
+
metadata_updates["file_path"] = file_path.strip() if file_path else None
|
|
2240
|
+
if estimated_hours is not None:
|
|
2241
|
+
if not isinstance(estimated_hours, (int, float)) or estimated_hours < 0:
|
|
2242
|
+
return None, "estimated_hours must be a non-negative number"
|
|
2243
|
+
metadata_updates["estimated_hours"] = float(estimated_hours)
|
|
2244
|
+
if category is not None:
|
|
2245
|
+
metadata_updates["category"] = category.strip() if category else None
|
|
2246
|
+
if labels is not None:
|
|
2247
|
+
if not isinstance(labels, dict) or not all(isinstance(k, str) and isinstance(v, str) for k, v in labels.items()):
|
|
2248
|
+
return None, "labels must be a dict with string keys and values"
|
|
2249
|
+
metadata_updates["labels"] = labels
|
|
2250
|
+
if owners is not None:
|
|
2251
|
+
if not isinstance(owners, list) or not all(isinstance(o, str) for o in owners):
|
|
2252
|
+
return None, "owners must be a list of strings"
|
|
2253
|
+
metadata_updates["owners"] = owners
|
|
2254
|
+
if custom_metadata:
|
|
2255
|
+
if not isinstance(custom_metadata, dict):
|
|
2256
|
+
return None, "custom_metadata must be a dict"
|
|
2257
|
+
for key, value in custom_metadata.items():
|
|
2258
|
+
if key not in metadata_updates:
|
|
2259
|
+
metadata_updates[key] = value
|
|
2260
|
+
|
|
2261
|
+
if not metadata_updates:
|
|
2262
|
+
return None, "At least one metadata field must be provided"
|
|
2263
|
+
if max_matches <= 0:
|
|
2264
|
+
return None, "max_matches must be a positive integer"
|
|
2265
|
+
|
|
2266
|
+
# Load spec
|
|
2267
|
+
if specs_dir is None:
|
|
2268
|
+
specs_dir = find_specs_directory()
|
|
2269
|
+
if specs_dir is None:
|
|
2270
|
+
return None, "No specs directory found"
|
|
2271
|
+
spec_path = find_spec_file(spec_id, specs_dir)
|
|
2272
|
+
if not spec_path:
|
|
2273
|
+
return None, f"Specification '{spec_id}' not found"
|
|
2274
|
+
spec_data = load_spec(spec_id, specs_dir)
|
|
2275
|
+
if not spec_data:
|
|
2276
|
+
return None, f"Failed to load specification '{spec_id}'"
|
|
2277
|
+
|
|
2278
|
+
hierarchy = spec_data.get("hierarchy", {})
|
|
2279
|
+
if parent_filter and parent_filter not in hierarchy:
|
|
2280
|
+
return None, f"Parent '{parent_filter}' not found in specification"
|
|
2281
|
+
|
|
2282
|
+
matched_ids = _match_tasks_for_batch(hierarchy, status_filter=status_filter, parent_filter=parent_filter, pattern=pattern)
|
|
2283
|
+
warnings: List[str] = []
|
|
2284
|
+
skipped_ids = []
|
|
2285
|
+
if len(matched_ids) > max_matches:
|
|
2286
|
+
warnings.append(f"Found {len(matched_ids)} matches, limiting to {max_matches}")
|
|
2287
|
+
skipped_ids = matched_ids[max_matches:]
|
|
2288
|
+
matched_ids = matched_ids[:max_matches]
|
|
2289
|
+
|
|
2290
|
+
if not matched_ids:
|
|
2291
|
+
return {"spec_id": spec_id, "matched_count": 0, "updated_count": 0, "skipped_count": len(skipped_ids),
|
|
2292
|
+
"nodes": [], "filters": {"status_filter": status_filter, "parent_filter": parent_filter, "pattern": pattern},
|
|
2293
|
+
"metadata_applied": metadata_updates, "dry_run": dry_run, "message": "No tasks matched"}, None
|
|
2294
|
+
|
|
2295
|
+
# Capture originals and build result
|
|
2296
|
+
original_metadata: Dict[str, Dict[str, Any]] = {}
|
|
2297
|
+
updated_nodes: List[Dict[str, Any]] = []
|
|
2298
|
+
for node_id in matched_ids:
|
|
2299
|
+
node = hierarchy.get(node_id, {})
|
|
2300
|
+
existing_meta = node.get("metadata", {}) or {}
|
|
2301
|
+
original_metadata[node_id] = {k: existing_meta.get(k) for k in metadata_updates}
|
|
2302
|
+
diff = {k: {"old": original_metadata[node_id].get(k), "new": v} for k, v in metadata_updates.items() if original_metadata[node_id].get(k) != v}
|
|
2303
|
+
updated_nodes.append({"node_id": node_id, "title": node.get("title", ""), "type": node.get("type", ""),
|
|
2304
|
+
"status": node.get("status", ""), "fields_updated": list(metadata_updates.keys()), "diff": diff} if diff else
|
|
2305
|
+
{"node_id": node_id, "title": node.get("title", ""), "type": node.get("type", ""),
|
|
2306
|
+
"status": node.get("status", ""), "fields_updated": list(metadata_updates.keys())})
|
|
2307
|
+
if not dry_run:
|
|
2308
|
+
if "metadata" not in node:
|
|
2309
|
+
node["metadata"] = {}
|
|
2310
|
+
node["metadata"].update(metadata_updates)
|
|
2311
|
+
|
|
2312
|
+
if not dry_run:
|
|
2313
|
+
if not save_spec(spec_id, spec_data, specs_dir):
|
|
2314
|
+
for nid, orig in original_metadata.items():
|
|
2315
|
+
n = hierarchy.get(nid, {})
|
|
2316
|
+
if "metadata" in n:
|
|
2317
|
+
for k, v in orig.items():
|
|
2318
|
+
if v is None:
|
|
2319
|
+
n["metadata"].pop(k, None)
|
|
2320
|
+
else:
|
|
2321
|
+
n["metadata"][k] = v
|
|
2322
|
+
return None, "Failed to save; changes rolled back"
|
|
2323
|
+
|
|
2324
|
+
if len(matched_ids) > 50:
|
|
2325
|
+
warnings.append(f"Updated {len(matched_ids)} tasks")
|
|
2326
|
+
|
|
2327
|
+
result = {"spec_id": spec_id, "matched_count": len(matched_ids), "updated_count": len(matched_ids) if not dry_run else 0,
|
|
2328
|
+
"skipped_count": len(skipped_ids), "nodes": updated_nodes, "filters": {"status_filter": status_filter, "parent_filter": parent_filter, "pattern": pattern},
|
|
2329
|
+
"metadata_applied": metadata_updates, "dry_run": dry_run}
|
|
2330
|
+
if warnings:
|
|
2331
|
+
result["warnings"] = warnings
|
|
2332
|
+
if skipped_ids:
|
|
2333
|
+
result["skipped_tasks"] = skipped_ids
|
|
2334
|
+
return result, None
|