foundry-mcp 0.3.3__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. foundry_mcp/__init__.py +7 -1
  2. foundry_mcp/cli/commands/plan.py +10 -3
  3. foundry_mcp/cli/commands/review.py +19 -4
  4. foundry_mcp/cli/commands/specs.py +38 -208
  5. foundry_mcp/cli/output.py +3 -3
  6. foundry_mcp/config.py +235 -5
  7. foundry_mcp/core/ai_consultation.py +146 -9
  8. foundry_mcp/core/discovery.py +6 -6
  9. foundry_mcp/core/error_store.py +2 -2
  10. foundry_mcp/core/intake.py +933 -0
  11. foundry_mcp/core/llm_config.py +20 -2
  12. foundry_mcp/core/metrics_store.py +2 -2
  13. foundry_mcp/core/progress.py +70 -0
  14. foundry_mcp/core/prompts/fidelity_review.py +149 -4
  15. foundry_mcp/core/prompts/markdown_plan_review.py +5 -1
  16. foundry_mcp/core/prompts/plan_review.py +5 -1
  17. foundry_mcp/core/providers/claude.py +6 -47
  18. foundry_mcp/core/providers/codex.py +6 -57
  19. foundry_mcp/core/providers/cursor_agent.py +3 -44
  20. foundry_mcp/core/providers/gemini.py +6 -57
  21. foundry_mcp/core/providers/opencode.py +35 -5
  22. foundry_mcp/core/research/__init__.py +68 -0
  23. foundry_mcp/core/research/memory.py +425 -0
  24. foundry_mcp/core/research/models.py +437 -0
  25. foundry_mcp/core/research/workflows/__init__.py +22 -0
  26. foundry_mcp/core/research/workflows/base.py +204 -0
  27. foundry_mcp/core/research/workflows/chat.py +271 -0
  28. foundry_mcp/core/research/workflows/consensus.py +396 -0
  29. foundry_mcp/core/research/workflows/ideate.py +682 -0
  30. foundry_mcp/core/research/workflows/thinkdeep.py +405 -0
  31. foundry_mcp/core/responses.py +450 -0
  32. foundry_mcp/core/spec.py +2438 -236
  33. foundry_mcp/core/task.py +1064 -19
  34. foundry_mcp/core/testing.py +512 -123
  35. foundry_mcp/core/validation.py +313 -42
  36. foundry_mcp/dashboard/components/charts.py +0 -57
  37. foundry_mcp/dashboard/launcher.py +11 -0
  38. foundry_mcp/dashboard/views/metrics.py +25 -35
  39. foundry_mcp/dashboard/views/overview.py +1 -65
  40. foundry_mcp/resources/specs.py +25 -25
  41. foundry_mcp/schemas/intake-schema.json +89 -0
  42. foundry_mcp/schemas/sdd-spec-schema.json +33 -5
  43. foundry_mcp/server.py +38 -0
  44. foundry_mcp/tools/unified/__init__.py +4 -2
  45. foundry_mcp/tools/unified/authoring.py +2423 -267
  46. foundry_mcp/tools/unified/documentation_helpers.py +69 -6
  47. foundry_mcp/tools/unified/environment.py +235 -6
  48. foundry_mcp/tools/unified/error.py +18 -1
  49. foundry_mcp/tools/unified/lifecycle.py +8 -0
  50. foundry_mcp/tools/unified/plan.py +113 -1
  51. foundry_mcp/tools/unified/research.py +658 -0
  52. foundry_mcp/tools/unified/review.py +370 -16
  53. foundry_mcp/tools/unified/spec.py +367 -0
  54. foundry_mcp/tools/unified/task.py +1163 -48
  55. foundry_mcp/tools/unified/test.py +69 -8
  56. {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.7.0.dist-info}/METADATA +7 -1
  57. {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.7.0.dist-info}/RECORD +60 -48
  58. {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.7.0.dist-info}/WHEEL +0 -0
  59. {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.7.0.dist-info}/entry_points.txt +0 -0
  60. {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.7.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,425 @@
1
+ """File-based storage backend for research workflows.
2
+
3
+ Provides thread-safe persistence for conversation threads, investigation states,
4
+ and ideation sessions using file locking.
5
+ """
6
+
7
+ import json
8
+ import logging
9
+ from datetime import datetime, timedelta
10
+ from pathlib import Path
11
+ from typing import Generic, Optional, TypeVar
12
+
13
+ from filelock import FileLock
14
+
15
+ from foundry_mcp.core.research.models import (
16
+ ConsensusState,
17
+ ConversationThread,
18
+ IdeationState,
19
+ ThinkDeepState,
20
+ ThreadStatus,
21
+ )
22
+
23
+ logger = logging.getLogger(__name__)
24
+
25
+ T = TypeVar("T")
26
+
27
+
28
+ class FileStorageBackend(Generic[T]):
29
+ """Generic file-based storage with locking and TTL support."""
30
+
31
+ def __init__(
32
+ self,
33
+ storage_path: Path,
34
+ model_class: type[T],
35
+ ttl_hours: Optional[int] = 24,
36
+ ) -> None:
37
+ """Initialize storage backend.
38
+
39
+ Args:
40
+ storage_path: Directory to store files
41
+ model_class: Pydantic model class for serialization
42
+ ttl_hours: Time-to-live in hours (None for no expiry)
43
+ """
44
+ self.storage_path = storage_path
45
+ self.model_class = model_class
46
+ self.ttl_hours = ttl_hours
47
+ self._ensure_directory()
48
+
49
+ def _ensure_directory(self) -> None:
50
+ """Create storage directory if it doesn't exist."""
51
+ self.storage_path.mkdir(parents=True, exist_ok=True)
52
+
53
+ def _get_file_path(self, item_id: str) -> Path:
54
+ """Get file path for an item ID."""
55
+ # Sanitize ID to prevent path traversal
56
+ safe_id = "".join(c for c in item_id if c.isalnum() or c in "-_")
57
+ return self.storage_path / f"{safe_id}.json"
58
+
59
+ def _get_lock_path(self, item_id: str) -> Path:
60
+ """Get lock file path for an item ID."""
61
+ return self._get_file_path(item_id).with_suffix(".lock")
62
+
63
+ def _is_expired(self, file_path: Path) -> bool:
64
+ """Check if a file has expired based on TTL."""
65
+ if self.ttl_hours is None:
66
+ return False
67
+
68
+ try:
69
+ mtime = datetime.fromtimestamp(file_path.stat().st_mtime)
70
+ expiry = mtime + timedelta(hours=self.ttl_hours)
71
+ return datetime.now() > expiry
72
+ except OSError:
73
+ return True
74
+
75
+ def save(self, item_id: str, item: T) -> None:
76
+ """Save an item to storage with locking.
77
+
78
+ Args:
79
+ item_id: Unique identifier for the item
80
+ item: Pydantic model instance to save
81
+ """
82
+ file_path = self._get_file_path(item_id)
83
+ lock_path = self._get_lock_path(item_id)
84
+
85
+ with FileLock(lock_path, timeout=10):
86
+ data = item.model_dump(mode="json")
87
+ file_path.write_text(json.dumps(data, indent=2, default=str))
88
+ logger.debug("Saved %s to %s", item_id, file_path)
89
+
90
+ def load(self, item_id: str) -> Optional[T]:
91
+ """Load an item from storage with locking.
92
+
93
+ Args:
94
+ item_id: Unique identifier for the item
95
+
96
+ Returns:
97
+ The loaded item or None if not found/expired
98
+ """
99
+ file_path = self._get_file_path(item_id)
100
+ lock_path = self._get_lock_path(item_id)
101
+
102
+ if not file_path.exists():
103
+ return None
104
+
105
+ if self._is_expired(file_path):
106
+ logger.debug("Item %s has expired, removing", item_id)
107
+ self.delete(item_id)
108
+ return None
109
+
110
+ with FileLock(lock_path, timeout=10):
111
+ try:
112
+ data = json.loads(file_path.read_text())
113
+ return self.model_class.model_validate(data)
114
+ except (json.JSONDecodeError, ValueError) as exc:
115
+ logger.warning("Failed to load %s: %s", item_id, exc)
116
+ return None
117
+
118
+ def delete(self, item_id: str) -> bool:
119
+ """Delete an item from storage.
120
+
121
+ Args:
122
+ item_id: Unique identifier for the item
123
+
124
+ Returns:
125
+ True if deleted, False if not found
126
+ """
127
+ file_path = self._get_file_path(item_id)
128
+ lock_path = self._get_lock_path(item_id)
129
+
130
+ if not file_path.exists():
131
+ return False
132
+
133
+ with FileLock(lock_path, timeout=10):
134
+ try:
135
+ file_path.unlink()
136
+ logger.debug("Deleted %s", item_id)
137
+ # Clean up lock file
138
+ if lock_path.exists():
139
+ lock_path.unlink()
140
+ return True
141
+ except OSError as exc:
142
+ logger.warning("Failed to delete %s: %s", item_id, exc)
143
+ return False
144
+
145
+ def list_ids(self) -> list[str]:
146
+ """List all item IDs in storage.
147
+
148
+ Returns:
149
+ List of item IDs (without .json extension)
150
+ """
151
+ if not self.storage_path.exists():
152
+ return []
153
+
154
+ ids = []
155
+ for file_path in self.storage_path.glob("*.json"):
156
+ item_id = file_path.stem
157
+ # Skip expired items
158
+ if not self._is_expired(file_path):
159
+ ids.append(item_id)
160
+ return sorted(ids)
161
+
162
+ def cleanup_expired(self) -> int:
163
+ """Remove all expired items from storage.
164
+
165
+ Returns:
166
+ Number of items removed
167
+ """
168
+ if self.ttl_hours is None:
169
+ return 0
170
+
171
+ removed = 0
172
+ for file_path in self.storage_path.glob("*.json"):
173
+ if self._is_expired(file_path):
174
+ item_id = file_path.stem
175
+ if self.delete(item_id):
176
+ removed += 1
177
+ return removed
178
+
179
+
180
+ class ResearchMemory:
181
+ """Unified memory interface for all research workflow states.
182
+
183
+ Provides CRUD operations for conversation threads, investigation states,
184
+ ideation sessions, and consensus states.
185
+ """
186
+
187
+ def __init__(
188
+ self,
189
+ base_path: Optional[Path] = None,
190
+ ttl_hours: int = 24,
191
+ ) -> None:
192
+ """Initialize research memory.
193
+
194
+ Args:
195
+ base_path: Base directory for all storage (default: ~/.foundry-mcp/research)
196
+ ttl_hours: Default TTL for all storages
197
+ """
198
+ if base_path is None:
199
+ base_path = Path.home() / ".foundry-mcp" / "research"
200
+
201
+ self.base_path = base_path
202
+ self.ttl_hours = ttl_hours
203
+
204
+ # Initialize storage backends for each type
205
+ self._threads = FileStorageBackend(
206
+ storage_path=base_path / "threads",
207
+ model_class=ConversationThread,
208
+ ttl_hours=ttl_hours,
209
+ )
210
+ self._investigations = FileStorageBackend(
211
+ storage_path=base_path / "investigations",
212
+ model_class=ThinkDeepState,
213
+ ttl_hours=ttl_hours,
214
+ )
215
+ self._ideations = FileStorageBackend(
216
+ storage_path=base_path / "ideations",
217
+ model_class=IdeationState,
218
+ ttl_hours=ttl_hours,
219
+ )
220
+ self._consensus = FileStorageBackend(
221
+ storage_path=base_path / "consensus",
222
+ model_class=ConsensusState,
223
+ ttl_hours=ttl_hours,
224
+ )
225
+
226
+ # =========================================================================
227
+ # Thread operations (CHAT workflow)
228
+ # =========================================================================
229
+
230
+ def save_thread(self, thread: ConversationThread) -> None:
231
+ """Save a conversation thread."""
232
+ self._threads.save(thread.id, thread)
233
+
234
+ def load_thread(self, thread_id: str) -> Optional[ConversationThread]:
235
+ """Load a conversation thread by ID."""
236
+ return self._threads.load(thread_id)
237
+
238
+ def delete_thread(self, thread_id: str) -> bool:
239
+ """Delete a conversation thread."""
240
+ return self._threads.delete(thread_id)
241
+
242
+ def list_threads(
243
+ self,
244
+ status: Optional[ThreadStatus] = None,
245
+ limit: Optional[int] = None,
246
+ ) -> list[ConversationThread]:
247
+ """List conversation threads, optionally filtered by status.
248
+
249
+ Args:
250
+ status: Filter by thread status
251
+ limit: Maximum number of threads to return
252
+
253
+ Returns:
254
+ List of conversation threads
255
+ """
256
+ threads = []
257
+ for thread_id in self._threads.list_ids():
258
+ thread = self._threads.load(thread_id)
259
+ if thread is not None:
260
+ if status is None or thread.status == status:
261
+ threads.append(thread)
262
+
263
+ # Sort by updated_at descending
264
+ threads.sort(key=lambda t: t.updated_at, reverse=True)
265
+
266
+ if limit is not None:
267
+ threads = threads[:limit]
268
+
269
+ return threads
270
+
271
+ # =========================================================================
272
+ # Investigation operations (THINKDEEP workflow)
273
+ # =========================================================================
274
+
275
+ def save_investigation(self, investigation: ThinkDeepState) -> None:
276
+ """Save an investigation state."""
277
+ self._investigations.save(investigation.id, investigation)
278
+
279
+ def load_investigation(self, investigation_id: str) -> Optional[ThinkDeepState]:
280
+ """Load an investigation state by ID."""
281
+ return self._investigations.load(investigation_id)
282
+
283
+ def delete_investigation(self, investigation_id: str) -> bool:
284
+ """Delete an investigation state."""
285
+ return self._investigations.delete(investigation_id)
286
+
287
+ def list_investigations(
288
+ self,
289
+ limit: Optional[int] = None,
290
+ ) -> list[ThinkDeepState]:
291
+ """List investigation states.
292
+
293
+ Args:
294
+ limit: Maximum number of investigations to return
295
+
296
+ Returns:
297
+ List of investigation states
298
+ """
299
+ investigations = []
300
+ for inv_id in self._investigations.list_ids():
301
+ inv = self._investigations.load(inv_id)
302
+ if inv is not None:
303
+ investigations.append(inv)
304
+
305
+ # Sort by updated_at descending
306
+ investigations.sort(key=lambda i: i.updated_at, reverse=True)
307
+
308
+ if limit is not None:
309
+ investigations = investigations[:limit]
310
+
311
+ return investigations
312
+
313
+ # =========================================================================
314
+ # Ideation operations (IDEATE workflow)
315
+ # =========================================================================
316
+
317
+ def save_ideation(self, ideation: IdeationState) -> None:
318
+ """Save an ideation state."""
319
+ self._ideations.save(ideation.id, ideation)
320
+
321
+ def load_ideation(self, ideation_id: str) -> Optional[IdeationState]:
322
+ """Load an ideation state by ID."""
323
+ return self._ideations.load(ideation_id)
324
+
325
+ def delete_ideation(self, ideation_id: str) -> bool:
326
+ """Delete an ideation state."""
327
+ return self._ideations.delete(ideation_id)
328
+
329
+ def list_ideations(
330
+ self,
331
+ limit: Optional[int] = None,
332
+ ) -> list[IdeationState]:
333
+ """List ideation states.
334
+
335
+ Args:
336
+ limit: Maximum number of ideations to return
337
+
338
+ Returns:
339
+ List of ideation states
340
+ """
341
+ ideations = []
342
+ for ide_id in self._ideations.list_ids():
343
+ ide = self._ideations.load(ide_id)
344
+ if ide is not None:
345
+ ideations.append(ide)
346
+
347
+ # Sort by updated_at descending
348
+ ideations.sort(key=lambda i: i.updated_at, reverse=True)
349
+
350
+ if limit is not None:
351
+ ideations = ideations[:limit]
352
+
353
+ return ideations
354
+
355
+ # =========================================================================
356
+ # Consensus operations (CONSENSUS workflow)
357
+ # =========================================================================
358
+
359
+ def save_consensus(self, consensus: ConsensusState) -> None:
360
+ """Save a consensus state."""
361
+ self._consensus.save(consensus.id, consensus)
362
+
363
+ def load_consensus(self, consensus_id: str) -> Optional[ConsensusState]:
364
+ """Load a consensus state by ID."""
365
+ return self._consensus.load(consensus_id)
366
+
367
+ def delete_consensus(self, consensus_id: str) -> bool:
368
+ """Delete a consensus state."""
369
+ return self._consensus.delete(consensus_id)
370
+
371
+ def list_consensus(
372
+ self,
373
+ limit: Optional[int] = None,
374
+ ) -> list[ConsensusState]:
375
+ """List consensus states.
376
+
377
+ Args:
378
+ limit: Maximum number of consensus states to return
379
+
380
+ Returns:
381
+ List of consensus states
382
+ """
383
+ states = []
384
+ for cons_id in self._consensus.list_ids():
385
+ cons = self._consensus.load(cons_id)
386
+ if cons is not None:
387
+ states.append(cons)
388
+
389
+ # Sort by created_at descending
390
+ states.sort(key=lambda c: c.created_at, reverse=True)
391
+
392
+ if limit is not None:
393
+ states = states[:limit]
394
+
395
+ return states
396
+
397
+ # =========================================================================
398
+ # Maintenance operations
399
+ # =========================================================================
400
+
401
+ def cleanup_all_expired(self) -> dict[str, int]:
402
+ """Remove expired items from all storages.
403
+
404
+ Returns:
405
+ Dict with counts of removed items per storage type
406
+ """
407
+ return {
408
+ "threads": self._threads.cleanup_expired(),
409
+ "investigations": self._investigations.cleanup_expired(),
410
+ "ideations": self._ideations.cleanup_expired(),
411
+ "consensus": self._consensus.cleanup_expired(),
412
+ }
413
+
414
+ def get_storage_stats(self) -> dict[str, int]:
415
+ """Get count of items in each storage.
416
+
417
+ Returns:
418
+ Dict with counts per storage type
419
+ """
420
+ return {
421
+ "threads": len(self._threads.list_ids()),
422
+ "investigations": len(self._investigations.list_ids()),
423
+ "ideations": len(self._ideations.list_ids()),
424
+ "consensus": len(self._consensus.list_ids()),
425
+ }