foundry-mcp 0.3.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (135) hide show
  1. foundry_mcp/__init__.py +7 -0
  2. foundry_mcp/cli/__init__.py +80 -0
  3. foundry_mcp/cli/__main__.py +9 -0
  4. foundry_mcp/cli/agent.py +96 -0
  5. foundry_mcp/cli/commands/__init__.py +37 -0
  6. foundry_mcp/cli/commands/cache.py +137 -0
  7. foundry_mcp/cli/commands/dashboard.py +148 -0
  8. foundry_mcp/cli/commands/dev.py +446 -0
  9. foundry_mcp/cli/commands/journal.py +377 -0
  10. foundry_mcp/cli/commands/lifecycle.py +274 -0
  11. foundry_mcp/cli/commands/modify.py +824 -0
  12. foundry_mcp/cli/commands/plan.py +633 -0
  13. foundry_mcp/cli/commands/pr.py +393 -0
  14. foundry_mcp/cli/commands/review.py +652 -0
  15. foundry_mcp/cli/commands/session.py +479 -0
  16. foundry_mcp/cli/commands/specs.py +856 -0
  17. foundry_mcp/cli/commands/tasks.py +807 -0
  18. foundry_mcp/cli/commands/testing.py +676 -0
  19. foundry_mcp/cli/commands/validate.py +982 -0
  20. foundry_mcp/cli/config.py +98 -0
  21. foundry_mcp/cli/context.py +259 -0
  22. foundry_mcp/cli/flags.py +266 -0
  23. foundry_mcp/cli/logging.py +212 -0
  24. foundry_mcp/cli/main.py +44 -0
  25. foundry_mcp/cli/output.py +122 -0
  26. foundry_mcp/cli/registry.py +110 -0
  27. foundry_mcp/cli/resilience.py +178 -0
  28. foundry_mcp/cli/transcript.py +217 -0
  29. foundry_mcp/config.py +850 -0
  30. foundry_mcp/core/__init__.py +144 -0
  31. foundry_mcp/core/ai_consultation.py +1636 -0
  32. foundry_mcp/core/cache.py +195 -0
  33. foundry_mcp/core/capabilities.py +446 -0
  34. foundry_mcp/core/concurrency.py +898 -0
  35. foundry_mcp/core/context.py +540 -0
  36. foundry_mcp/core/discovery.py +1603 -0
  37. foundry_mcp/core/error_collection.py +728 -0
  38. foundry_mcp/core/error_store.py +592 -0
  39. foundry_mcp/core/feature_flags.py +592 -0
  40. foundry_mcp/core/health.py +749 -0
  41. foundry_mcp/core/journal.py +694 -0
  42. foundry_mcp/core/lifecycle.py +412 -0
  43. foundry_mcp/core/llm_config.py +1350 -0
  44. foundry_mcp/core/llm_patterns.py +510 -0
  45. foundry_mcp/core/llm_provider.py +1569 -0
  46. foundry_mcp/core/logging_config.py +374 -0
  47. foundry_mcp/core/metrics_persistence.py +584 -0
  48. foundry_mcp/core/metrics_registry.py +327 -0
  49. foundry_mcp/core/metrics_store.py +641 -0
  50. foundry_mcp/core/modifications.py +224 -0
  51. foundry_mcp/core/naming.py +123 -0
  52. foundry_mcp/core/observability.py +1216 -0
  53. foundry_mcp/core/otel.py +452 -0
  54. foundry_mcp/core/otel_stubs.py +264 -0
  55. foundry_mcp/core/pagination.py +255 -0
  56. foundry_mcp/core/progress.py +317 -0
  57. foundry_mcp/core/prometheus.py +577 -0
  58. foundry_mcp/core/prompts/__init__.py +464 -0
  59. foundry_mcp/core/prompts/fidelity_review.py +546 -0
  60. foundry_mcp/core/prompts/markdown_plan_review.py +511 -0
  61. foundry_mcp/core/prompts/plan_review.py +623 -0
  62. foundry_mcp/core/providers/__init__.py +225 -0
  63. foundry_mcp/core/providers/base.py +476 -0
  64. foundry_mcp/core/providers/claude.py +460 -0
  65. foundry_mcp/core/providers/codex.py +619 -0
  66. foundry_mcp/core/providers/cursor_agent.py +642 -0
  67. foundry_mcp/core/providers/detectors.py +488 -0
  68. foundry_mcp/core/providers/gemini.py +405 -0
  69. foundry_mcp/core/providers/opencode.py +616 -0
  70. foundry_mcp/core/providers/opencode_wrapper.js +302 -0
  71. foundry_mcp/core/providers/package-lock.json +24 -0
  72. foundry_mcp/core/providers/package.json +25 -0
  73. foundry_mcp/core/providers/registry.py +607 -0
  74. foundry_mcp/core/providers/test_provider.py +171 -0
  75. foundry_mcp/core/providers/validation.py +729 -0
  76. foundry_mcp/core/rate_limit.py +427 -0
  77. foundry_mcp/core/resilience.py +600 -0
  78. foundry_mcp/core/responses.py +934 -0
  79. foundry_mcp/core/review.py +366 -0
  80. foundry_mcp/core/security.py +438 -0
  81. foundry_mcp/core/spec.py +1650 -0
  82. foundry_mcp/core/task.py +1289 -0
  83. foundry_mcp/core/testing.py +450 -0
  84. foundry_mcp/core/validation.py +2081 -0
  85. foundry_mcp/dashboard/__init__.py +32 -0
  86. foundry_mcp/dashboard/app.py +119 -0
  87. foundry_mcp/dashboard/components/__init__.py +17 -0
  88. foundry_mcp/dashboard/components/cards.py +88 -0
  89. foundry_mcp/dashboard/components/charts.py +234 -0
  90. foundry_mcp/dashboard/components/filters.py +136 -0
  91. foundry_mcp/dashboard/components/tables.py +195 -0
  92. foundry_mcp/dashboard/data/__init__.py +11 -0
  93. foundry_mcp/dashboard/data/stores.py +433 -0
  94. foundry_mcp/dashboard/launcher.py +289 -0
  95. foundry_mcp/dashboard/views/__init__.py +12 -0
  96. foundry_mcp/dashboard/views/errors.py +217 -0
  97. foundry_mcp/dashboard/views/metrics.py +174 -0
  98. foundry_mcp/dashboard/views/overview.py +160 -0
  99. foundry_mcp/dashboard/views/providers.py +83 -0
  100. foundry_mcp/dashboard/views/sdd_workflow.py +255 -0
  101. foundry_mcp/dashboard/views/tool_usage.py +139 -0
  102. foundry_mcp/prompts/__init__.py +9 -0
  103. foundry_mcp/prompts/workflows.py +525 -0
  104. foundry_mcp/resources/__init__.py +9 -0
  105. foundry_mcp/resources/specs.py +591 -0
  106. foundry_mcp/schemas/__init__.py +38 -0
  107. foundry_mcp/schemas/sdd-spec-schema.json +386 -0
  108. foundry_mcp/server.py +164 -0
  109. foundry_mcp/tools/__init__.py +10 -0
  110. foundry_mcp/tools/unified/__init__.py +71 -0
  111. foundry_mcp/tools/unified/authoring.py +1487 -0
  112. foundry_mcp/tools/unified/context_helpers.py +98 -0
  113. foundry_mcp/tools/unified/documentation_helpers.py +198 -0
  114. foundry_mcp/tools/unified/environment.py +939 -0
  115. foundry_mcp/tools/unified/error.py +462 -0
  116. foundry_mcp/tools/unified/health.py +225 -0
  117. foundry_mcp/tools/unified/journal.py +841 -0
  118. foundry_mcp/tools/unified/lifecycle.py +632 -0
  119. foundry_mcp/tools/unified/metrics.py +777 -0
  120. foundry_mcp/tools/unified/plan.py +745 -0
  121. foundry_mcp/tools/unified/pr.py +294 -0
  122. foundry_mcp/tools/unified/provider.py +629 -0
  123. foundry_mcp/tools/unified/review.py +685 -0
  124. foundry_mcp/tools/unified/review_helpers.py +299 -0
  125. foundry_mcp/tools/unified/router.py +102 -0
  126. foundry_mcp/tools/unified/server.py +580 -0
  127. foundry_mcp/tools/unified/spec.py +808 -0
  128. foundry_mcp/tools/unified/task.py +2202 -0
  129. foundry_mcp/tools/unified/test.py +370 -0
  130. foundry_mcp/tools/unified/verification.py +520 -0
  131. foundry_mcp-0.3.3.dist-info/METADATA +337 -0
  132. foundry_mcp-0.3.3.dist-info/RECORD +135 -0
  133. foundry_mcp-0.3.3.dist-info/WHEEL +4 -0
  134. foundry_mcp-0.3.3.dist-info/entry_points.txt +3 -0
  135. foundry_mcp-0.3.3.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,694 @@
1
+ """
2
+ Journal and blocker operations for SDD spec files.
3
+ Provides journal entry management, task blocking, and unblocking.
4
+ """
5
+
6
+ from dataclasses import dataclass, field
7
+ from datetime import datetime, timezone
8
+ from pathlib import Path
9
+ from typing import Any, Dict, List, Optional
10
+ import json
11
+
12
+
13
+ # Data structures
14
+
15
+ @dataclass
16
+ class JournalEntry:
17
+ """
18
+ A journal entry in the spec file.
19
+ """
20
+ timestamp: str
21
+ entry_type: str # status_change, deviation, blocker, decision, note
22
+ title: str
23
+ content: str
24
+ author: str = "claude-code"
25
+ task_id: Optional[str] = None
26
+ metadata: Dict[str, Any] = field(default_factory=dict)
27
+
28
+
29
+ @dataclass
30
+ class BlockerInfo:
31
+ """
32
+ Information about a task blocker.
33
+ """
34
+ blocked_at: str
35
+ blocker_type: str # dependency, technical, resource, decision
36
+ description: str
37
+ ticket: Optional[str] = None
38
+ blocked_by_external: bool = False
39
+
40
+
41
+ @dataclass
42
+ class ResolvedBlocker:
43
+ """
44
+ Information about a resolved blocker.
45
+ """
46
+ blocked_at: str
47
+ blocker_type: str
48
+ description: str
49
+ resolved_at: str
50
+ resolution: str
51
+ ticket: Optional[str] = None
52
+
53
+
54
+ # Constants
55
+
56
+ VALID_ENTRY_TYPES = {"status_change", "deviation", "blocker", "decision", "note"}
57
+ VALID_BLOCKER_TYPES = {"dependency", "technical", "resource", "decision"}
58
+ VALID_STATUSES = {"pending", "in_progress", "completed", "blocked"}
59
+
60
+
61
+ # Journal operations
62
+
63
+ def add_journal_entry(
64
+ spec_data: Dict[str, Any],
65
+ title: str,
66
+ content: str,
67
+ entry_type: str = "note",
68
+ task_id: Optional[str] = None,
69
+ author: str = "claude-code",
70
+ metadata: Optional[Dict[str, Any]] = None,
71
+ ) -> JournalEntry:
72
+ """
73
+ Add a journal entry to the spec data.
74
+
75
+ Args:
76
+ spec_data: Spec data dictionary (modified in place)
77
+ title: Entry title
78
+ content: Entry content
79
+ entry_type: Type of entry (status_change, deviation, blocker, decision, note)
80
+ task_id: Optional associated task ID
81
+ author: Author of the entry
82
+ metadata: Optional additional metadata
83
+
84
+ Returns:
85
+ The created JournalEntry
86
+ """
87
+ timestamp = _get_timestamp()
88
+
89
+ entry_data = {
90
+ "timestamp": timestamp,
91
+ "entry_type": entry_type,
92
+ "title": title,
93
+ "content": content,
94
+ "author": author,
95
+ "metadata": metadata or {},
96
+ }
97
+
98
+ if task_id:
99
+ entry_data["task_id"] = task_id
100
+
101
+ # Ensure journal array exists
102
+ if "journal" not in spec_data or not isinstance(spec_data["journal"], list):
103
+ spec_data["journal"] = []
104
+
105
+ spec_data["journal"].append(entry_data)
106
+
107
+ # Update last_updated timestamp
108
+ spec_data["last_updated"] = timestamp
109
+
110
+ # Clear needs_journaling flag if task_id provided
111
+ if task_id:
112
+ _clear_journaling_flag(spec_data, task_id, timestamp)
113
+
114
+ return JournalEntry(
115
+ timestamp=timestamp,
116
+ entry_type=entry_type,
117
+ title=title,
118
+ content=content,
119
+ author=author,
120
+ task_id=task_id,
121
+ metadata=metadata or {},
122
+ )
123
+
124
+
125
+ def get_journal_entries(
126
+ spec_data: Dict[str, Any],
127
+ task_id: Optional[str] = None,
128
+ entry_type: Optional[str] = None,
129
+ limit: Optional[int] = None,
130
+ ) -> List[JournalEntry]:
131
+ """
132
+ Get journal entries from spec data.
133
+
134
+ Args:
135
+ spec_data: Spec data dictionary
136
+ task_id: Optional filter by task ID
137
+ entry_type: Optional filter by entry type
138
+ limit: Optional limit on number of entries
139
+
140
+ Returns:
141
+ List of JournalEntry objects (most recent first)
142
+ """
143
+ journal = spec_data.get("journal", []) or []
144
+
145
+ # Filter entries
146
+ filtered = []
147
+ for entry in journal:
148
+ if task_id and entry.get("task_id") != task_id:
149
+ continue
150
+ if entry_type and entry.get("entry_type") != entry_type:
151
+ continue
152
+ filtered.append(entry)
153
+
154
+ # Sort by timestamp descending (most recent first)
155
+ filtered.sort(key=lambda e: e.get("timestamp", ""), reverse=True)
156
+
157
+ # Apply limit
158
+ if limit:
159
+ filtered = filtered[:limit]
160
+
161
+ # Convert to JournalEntry objects
162
+ return [
163
+ JournalEntry(
164
+ timestamp=e.get("timestamp", ""),
165
+ entry_type=e.get("entry_type", "note"),
166
+ title=e.get("title", ""),
167
+ content=e.get("content", ""),
168
+ author=e.get("author", ""),
169
+ task_id=e.get("task_id"),
170
+ metadata=e.get("metadata", {}),
171
+ )
172
+ for e in filtered
173
+ ]
174
+
175
+
176
+ def bulk_journal(
177
+ spec_data: Dict[str, Any],
178
+ entries: List[Dict[str, Any]],
179
+ ) -> List[JournalEntry]:
180
+ """
181
+ Add multiple journal entries to the spec data in a single operation.
182
+
183
+ This is more efficient than calling add_journal_entry multiple times
184
+ as it updates the spec data once after all entries are added.
185
+
186
+ Args:
187
+ spec_data: Spec data dictionary (modified in place)
188
+ entries: List of entry dicts, each with keys:
189
+ - title (required): Entry title
190
+ - content (required): Entry content
191
+ - entry_type (optional): Type of entry (default: "note")
192
+ - task_id (optional): Associated task ID
193
+ - author (optional): Entry author (default: "claude-code")
194
+ - metadata (optional): Additional metadata dict
195
+
196
+ Returns:
197
+ List of created JournalEntry objects
198
+
199
+ Example:
200
+ >>> entries = [
201
+ ... {"title": "First entry", "content": "Content 1", "task_id": "task-1"},
202
+ ... {"title": "Second entry", "content": "Content 2", "task_id": "task-2"},
203
+ ... ]
204
+ >>> results = bulk_journal(spec_data, entries)
205
+ >>> print(f"Added {len(results)} entries")
206
+ """
207
+ if not entries:
208
+ return []
209
+
210
+ # Ensure journal array exists
211
+ if "journal" not in spec_data or not isinstance(spec_data["journal"], list):
212
+ spec_data["journal"] = []
213
+
214
+ timestamp = _get_timestamp()
215
+ created_entries: List[JournalEntry] = []
216
+ tasks_to_clear: List[str] = []
217
+
218
+ for entry_data in entries:
219
+ if not isinstance(entry_data, dict):
220
+ continue
221
+
222
+ title = entry_data.get("title", "")
223
+ content = entry_data.get("content", "")
224
+
225
+ if not title or not content:
226
+ continue
227
+
228
+ entry_type = entry_data.get("entry_type", "note")
229
+ task_id = entry_data.get("task_id")
230
+ author = entry_data.get("author", "claude-code")
231
+ metadata = entry_data.get("metadata", {})
232
+
233
+ journal_entry = {
234
+ "timestamp": timestamp,
235
+ "entry_type": entry_type,
236
+ "title": title,
237
+ "content": content,
238
+ "author": author,
239
+ "metadata": metadata,
240
+ }
241
+
242
+ if task_id:
243
+ journal_entry["task_id"] = task_id
244
+ tasks_to_clear.append(task_id)
245
+
246
+ spec_data["journal"].append(journal_entry)
247
+
248
+ created_entries.append(JournalEntry(
249
+ timestamp=timestamp,
250
+ entry_type=entry_type,
251
+ title=title,
252
+ content=content,
253
+ author=author,
254
+ task_id=task_id,
255
+ metadata=metadata,
256
+ ))
257
+
258
+ # Update last_updated timestamp once
259
+ spec_data["last_updated"] = timestamp
260
+
261
+ # Clear needs_journaling flags for all affected tasks
262
+ for task_id in tasks_to_clear:
263
+ _clear_journaling_flag(spec_data, task_id, timestamp)
264
+
265
+ return created_entries
266
+
267
+
268
+ def get_latest_journal_entry(
269
+ spec_data: Dict[str, Any],
270
+ task_id: str,
271
+ ) -> Optional[JournalEntry]:
272
+ """
273
+ Get the most recent journal entry for a task.
274
+
275
+ Args:
276
+ spec_data: Spec data dictionary
277
+ task_id: Task ID to get entry for
278
+
279
+ Returns:
280
+ JournalEntry or None if no entries found
281
+ """
282
+ entries = get_journal_entries(spec_data, task_id=task_id, limit=1)
283
+ return entries[0] if entries else None
284
+
285
+
286
+ # Blocker operations
287
+
288
+ def mark_blocked(
289
+ spec_data: Dict[str, Any],
290
+ task_id: str,
291
+ reason: str,
292
+ blocker_type: str = "dependency",
293
+ ticket: Optional[str] = None,
294
+ ) -> bool:
295
+ """
296
+ Mark a task as blocked.
297
+
298
+ Args:
299
+ spec_data: Spec data dictionary (modified in place)
300
+ task_id: Task to mark as blocked
301
+ reason: Description of the blocker
302
+ blocker_type: Type of blocker (dependency, technical, resource, decision)
303
+ ticket: Optional ticket/issue reference
304
+
305
+ Returns:
306
+ True if successful, False if task not found
307
+ """
308
+ hierarchy = spec_data.get("hierarchy", {})
309
+ if task_id not in hierarchy:
310
+ return False
311
+
312
+ task = hierarchy[task_id]
313
+ timestamp = _get_timestamp()
314
+
315
+ # Build blocker info
316
+ blocker_info = {
317
+ "blocked_at": timestamp,
318
+ "blocker_type": blocker_type,
319
+ "blocker_description": reason,
320
+ "blocked_by_external": blocker_type in {"resource", "dependency"},
321
+ }
322
+
323
+ if ticket:
324
+ blocker_info["blocker_ticket"] = ticket
325
+
326
+ # Update task
327
+ task["status"] = "blocked"
328
+ metadata = task.setdefault("metadata", {})
329
+ metadata.update(blocker_info)
330
+
331
+ # Update last_updated
332
+ spec_data["last_updated"] = timestamp
333
+
334
+ # Recalculate progress
335
+ _recalculate_counts(spec_data)
336
+
337
+ return True
338
+
339
+
340
+ def unblock(
341
+ spec_data: Dict[str, Any],
342
+ task_id: str,
343
+ resolution: Optional[str] = None,
344
+ new_status: str = "pending",
345
+ ) -> bool:
346
+ """
347
+ Unblock a task and optionally set its new status.
348
+
349
+ Args:
350
+ spec_data: Spec data dictionary (modified in place)
351
+ task_id: Task to unblock
352
+ resolution: Optional description of how blocker was resolved
353
+ new_status: Status to set after unblocking (default: pending)
354
+
355
+ Returns:
356
+ True if successful, False if task not found or not blocked
357
+ """
358
+ hierarchy = spec_data.get("hierarchy", {})
359
+ if task_id not in hierarchy:
360
+ return False
361
+
362
+ task = hierarchy[task_id]
363
+ if task.get("status") != "blocked":
364
+ return False
365
+
366
+ timestamp = _get_timestamp()
367
+ metadata = task.get("metadata", {}).copy()
368
+
369
+ # Move blocker info to resolved_blockers
370
+ if "blocker_description" in metadata:
371
+ resolved_blockers = metadata.setdefault("resolved_blockers", [])
372
+ resolved_blockers.append({
373
+ "blocked_at": metadata.get("blocked_at"),
374
+ "blocker_type": metadata.get("blocker_type"),
375
+ "description": metadata.get("blocker_description"),
376
+ "ticket": metadata.get("blocker_ticket"),
377
+ "resolved_at": timestamp,
378
+ "resolution": resolution or "Blocker resolved",
379
+ })
380
+
381
+ # Remove active blocker fields
382
+ for key in ["blocked_at", "blocker_type", "blocker_description", "blocker_ticket", "blocked_by_external"]:
383
+ metadata.pop(key, None)
384
+
385
+ # Update task
386
+ task["status"] = new_status
387
+ task["metadata"] = metadata
388
+
389
+ # Update last_updated
390
+ spec_data["last_updated"] = timestamp
391
+
392
+ # Recalculate progress
393
+ _recalculate_counts(spec_data)
394
+
395
+ return True
396
+
397
+
398
+ def get_blocker_info(
399
+ spec_data: Dict[str, Any],
400
+ task_id: str,
401
+ ) -> Optional[BlockerInfo]:
402
+ """
403
+ Get blocker information for a task.
404
+
405
+ Args:
406
+ spec_data: Spec data dictionary
407
+ task_id: Task ID to check
408
+
409
+ Returns:
410
+ BlockerInfo if task is blocked, None otherwise
411
+ """
412
+ hierarchy = spec_data.get("hierarchy", {})
413
+ task = hierarchy.get(task_id)
414
+
415
+ if not task or task.get("status") != "blocked":
416
+ return None
417
+
418
+ metadata = task.get("metadata", {})
419
+ if "blocker_description" not in metadata:
420
+ return None
421
+
422
+ return BlockerInfo(
423
+ blocked_at=metadata.get("blocked_at", ""),
424
+ blocker_type=metadata.get("blocker_type", ""),
425
+ description=metadata.get("blocker_description", ""),
426
+ ticket=metadata.get("blocker_ticket"),
427
+ blocked_by_external=metadata.get("blocked_by_external", False),
428
+ )
429
+
430
+
431
+ def get_resolved_blockers(
432
+ spec_data: Dict[str, Any],
433
+ task_id: str,
434
+ ) -> List[ResolvedBlocker]:
435
+ """
436
+ Get history of resolved blockers for a task.
437
+
438
+ Args:
439
+ spec_data: Spec data dictionary
440
+ task_id: Task ID to check
441
+
442
+ Returns:
443
+ List of ResolvedBlocker objects
444
+ """
445
+ hierarchy = spec_data.get("hierarchy", {})
446
+ task = hierarchy.get(task_id)
447
+
448
+ if not task:
449
+ return []
450
+
451
+ metadata = task.get("metadata", {})
452
+ resolved = metadata.get("resolved_blockers", [])
453
+
454
+ return [
455
+ ResolvedBlocker(
456
+ blocked_at=b.get("blocked_at", ""),
457
+ blocker_type=b.get("blocker_type", ""),
458
+ description=b.get("description", ""),
459
+ resolved_at=b.get("resolved_at", ""),
460
+ resolution=b.get("resolution", ""),
461
+ ticket=b.get("ticket"),
462
+ )
463
+ for b in resolved
464
+ ]
465
+
466
+
467
+ def list_blocked_tasks(spec_data: Dict[str, Any]) -> List[Dict[str, Any]]:
468
+ """
469
+ List all blocked tasks in the spec.
470
+
471
+ Args:
472
+ spec_data: Spec data dictionary
473
+
474
+ Returns:
475
+ List of dicts with task_id, title, and blocker info
476
+ """
477
+ hierarchy = spec_data.get("hierarchy", {})
478
+ blocked = []
479
+
480
+ for node_id, node in hierarchy.items():
481
+ if node.get("status") == "blocked":
482
+ metadata = node.get("metadata", {})
483
+ blocked.append({
484
+ "task_id": node_id,
485
+ "title": node.get("title", ""),
486
+ "blocker_type": metadata.get("blocker_type", "unknown"),
487
+ "blocker_description": metadata.get("blocker_description", ""),
488
+ "blocked_at": metadata.get("blocked_at", ""),
489
+ "ticket": metadata.get("blocker_ticket"),
490
+ })
491
+
492
+ return blocked
493
+
494
+
495
+ # Status update with journaling
496
+
497
+ def update_task_status(
498
+ spec_data: Dict[str, Any],
499
+ task_id: str,
500
+ new_status: str,
501
+ note: Optional[str] = None,
502
+ ) -> bool:
503
+ """
504
+ Update a task's status with automatic progress recalculation.
505
+
506
+ Args:
507
+ spec_data: Spec data dictionary (modified in place)
508
+ task_id: Task to update
509
+ new_status: New status (pending, in_progress, completed, blocked)
510
+ note: Optional note about the status change
511
+
512
+ Returns:
513
+ True if successful, False if task not found or invalid status
514
+ """
515
+ if new_status not in VALID_STATUSES:
516
+ return False
517
+
518
+ hierarchy = spec_data.get("hierarchy", {})
519
+ if task_id not in hierarchy:
520
+ return False
521
+
522
+ task = hierarchy[task_id]
523
+ timestamp = _get_timestamp()
524
+
525
+ # Update status
526
+ task["status"] = new_status
527
+
528
+ # Update metadata
529
+ metadata = task.setdefault("metadata", {})
530
+
531
+ if new_status == "in_progress":
532
+ metadata["started_at"] = timestamp
533
+ elif new_status == "completed":
534
+ metadata["completed_at"] = timestamp
535
+ metadata["needs_journaling"] = True
536
+
537
+ if note:
538
+ metadata["status_note"] = note
539
+
540
+ # Update last_updated
541
+ spec_data["last_updated"] = timestamp
542
+
543
+ # Recalculate progress
544
+ _recalculate_counts(spec_data)
545
+
546
+ return True
547
+
548
+
549
+ def mark_task_journaled(
550
+ spec_data: Dict[str, Any],
551
+ task_id: str,
552
+ ) -> bool:
553
+ """
554
+ Mark a task as journaled (clear needs_journaling flag).
555
+
556
+ Args:
557
+ spec_data: Spec data dictionary (modified in place)
558
+ task_id: Task to mark as journaled
559
+
560
+ Returns:
561
+ True if successful, False if task not found
562
+ """
563
+ hierarchy = spec_data.get("hierarchy", {})
564
+ if task_id not in hierarchy:
565
+ return False
566
+
567
+ task = hierarchy[task_id]
568
+ metadata = task.setdefault("metadata", {})
569
+
570
+ if "needs_journaling" in metadata:
571
+ metadata["needs_journaling"] = False
572
+ metadata["journaled_at"] = _get_timestamp()
573
+
574
+ return True
575
+
576
+
577
+ def find_unjournaled_tasks(spec_data: Dict[str, Any]) -> List[Dict[str, str]]:
578
+ """
579
+ Find all completed tasks that need journaling.
580
+
581
+ Args:
582
+ spec_data: Spec data dictionary
583
+
584
+ Returns:
585
+ List of dicts with task_id and title
586
+ """
587
+ hierarchy = spec_data.get("hierarchy", {})
588
+ unjournaled = []
589
+
590
+ for node_id, node in hierarchy.items():
591
+ if node.get("status") == "completed":
592
+ metadata = node.get("metadata", {})
593
+ if metadata.get("needs_journaling", False):
594
+ unjournaled.append({
595
+ "task_id": node_id,
596
+ "title": node.get("title", ""),
597
+ "completed_at": metadata.get("completed_at", ""),
598
+ })
599
+
600
+ return unjournaled
601
+
602
+
603
+ # Utility functions
604
+
605
+ def save_journal(
606
+ spec_data: Dict[str, Any],
607
+ spec_path: str,
608
+ create_backup: bool = True,
609
+ ) -> bool:
610
+ """
611
+ Save spec data with journal to disk.
612
+
613
+ Args:
614
+ spec_data: Spec data dictionary
615
+ spec_path: Path to spec file
616
+ create_backup: Whether to create backup before saving
617
+
618
+ Returns:
619
+ True if successful, False otherwise
620
+ """
621
+ if create_backup:
622
+ backup_path = Path(spec_path).with_suffix(".json.backup")
623
+ try:
624
+ with open(spec_path, "r") as f:
625
+ current_data = f.read()
626
+ with open(backup_path, "w") as f:
627
+ f.write(current_data)
628
+ except OSError:
629
+ pass # Continue even if backup fails
630
+
631
+ try:
632
+ with open(spec_path, "w") as f:
633
+ json.dump(spec_data, f, indent=2)
634
+ return True
635
+ except OSError:
636
+ return False
637
+
638
+
639
+ # Helper functions
640
+
641
+ def _get_timestamp() -> str:
642
+ """Get current timestamp in ISO 8601 format."""
643
+ return datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
644
+
645
+
646
+ def _clear_journaling_flag(spec_data: Dict[str, Any], task_id: str, timestamp: str) -> None:
647
+ """Clear the needs_journaling flag for a task."""
648
+ hierarchy = spec_data.get("hierarchy", {})
649
+ task = hierarchy.get(task_id)
650
+
651
+ if task:
652
+ metadata = task.setdefault("metadata", {})
653
+ if "needs_journaling" in metadata:
654
+ metadata["needs_journaling"] = False
655
+ metadata["journaled_at"] = timestamp
656
+
657
+
658
+ def _recalculate_counts(spec_data: Dict[str, Any]) -> None:
659
+ """Recalculate task counts for all nodes in hierarchy."""
660
+ hierarchy = spec_data.get("hierarchy", {})
661
+ if not hierarchy:
662
+ return
663
+
664
+ def calculate_node(node_id: str) -> tuple:
665
+ """Return (total_tasks, completed_tasks) for a node."""
666
+ node = hierarchy.get(node_id, {})
667
+ children = node.get("children", [])
668
+ node_type = node.get("type", "")
669
+ status = node.get("status", "")
670
+
671
+ if not children:
672
+ # Leaf node
673
+ if node_type in {"task", "subtask", "verify"}:
674
+ total = 1
675
+ completed = 1 if status == "completed" else 0
676
+ else:
677
+ total = 0
678
+ completed = 0
679
+ else:
680
+ # Parent node: sum children
681
+ total = 0
682
+ completed = 0
683
+ for child_id in children:
684
+ if child_id in hierarchy:
685
+ child_total, child_completed = calculate_node(child_id)
686
+ total += child_total
687
+ completed += child_completed
688
+
689
+ node["total_tasks"] = total
690
+ node["completed_tasks"] = completed
691
+ return total, completed
692
+
693
+ if "spec-root" in hierarchy:
694
+ calculate_node("spec-root")