foundry-mcp 0.3.3__py3-none-any.whl → 0.8.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. foundry_mcp/__init__.py +7 -1
  2. foundry_mcp/cli/__init__.py +0 -13
  3. foundry_mcp/cli/commands/plan.py +10 -3
  4. foundry_mcp/cli/commands/review.py +19 -4
  5. foundry_mcp/cli/commands/session.py +1 -8
  6. foundry_mcp/cli/commands/specs.py +38 -208
  7. foundry_mcp/cli/context.py +39 -0
  8. foundry_mcp/cli/output.py +3 -3
  9. foundry_mcp/config.py +615 -11
  10. foundry_mcp/core/ai_consultation.py +146 -9
  11. foundry_mcp/core/batch_operations.py +1196 -0
  12. foundry_mcp/core/discovery.py +7 -7
  13. foundry_mcp/core/error_store.py +2 -2
  14. foundry_mcp/core/intake.py +933 -0
  15. foundry_mcp/core/llm_config.py +28 -2
  16. foundry_mcp/core/metrics_store.py +2 -2
  17. foundry_mcp/core/naming.py +25 -2
  18. foundry_mcp/core/progress.py +70 -0
  19. foundry_mcp/core/prometheus.py +0 -13
  20. foundry_mcp/core/prompts/fidelity_review.py +149 -4
  21. foundry_mcp/core/prompts/markdown_plan_review.py +5 -1
  22. foundry_mcp/core/prompts/plan_review.py +5 -1
  23. foundry_mcp/core/providers/__init__.py +12 -0
  24. foundry_mcp/core/providers/base.py +39 -0
  25. foundry_mcp/core/providers/claude.py +51 -48
  26. foundry_mcp/core/providers/codex.py +70 -60
  27. foundry_mcp/core/providers/cursor_agent.py +25 -47
  28. foundry_mcp/core/providers/detectors.py +34 -7
  29. foundry_mcp/core/providers/gemini.py +69 -58
  30. foundry_mcp/core/providers/opencode.py +101 -47
  31. foundry_mcp/core/providers/package-lock.json +4 -4
  32. foundry_mcp/core/providers/package.json +1 -1
  33. foundry_mcp/core/providers/validation.py +128 -0
  34. foundry_mcp/core/research/__init__.py +68 -0
  35. foundry_mcp/core/research/memory.py +528 -0
  36. foundry_mcp/core/research/models.py +1220 -0
  37. foundry_mcp/core/research/providers/__init__.py +40 -0
  38. foundry_mcp/core/research/providers/base.py +242 -0
  39. foundry_mcp/core/research/providers/google.py +507 -0
  40. foundry_mcp/core/research/providers/perplexity.py +442 -0
  41. foundry_mcp/core/research/providers/semantic_scholar.py +544 -0
  42. foundry_mcp/core/research/providers/tavily.py +383 -0
  43. foundry_mcp/core/research/workflows/__init__.py +25 -0
  44. foundry_mcp/core/research/workflows/base.py +298 -0
  45. foundry_mcp/core/research/workflows/chat.py +271 -0
  46. foundry_mcp/core/research/workflows/consensus.py +539 -0
  47. foundry_mcp/core/research/workflows/deep_research.py +4020 -0
  48. foundry_mcp/core/research/workflows/ideate.py +682 -0
  49. foundry_mcp/core/research/workflows/thinkdeep.py +405 -0
  50. foundry_mcp/core/responses.py +690 -0
  51. foundry_mcp/core/spec.py +2439 -236
  52. foundry_mcp/core/task.py +1205 -31
  53. foundry_mcp/core/testing.py +512 -123
  54. foundry_mcp/core/validation.py +319 -43
  55. foundry_mcp/dashboard/components/charts.py +0 -57
  56. foundry_mcp/dashboard/launcher.py +11 -0
  57. foundry_mcp/dashboard/views/metrics.py +25 -35
  58. foundry_mcp/dashboard/views/overview.py +1 -65
  59. foundry_mcp/resources/specs.py +25 -25
  60. foundry_mcp/schemas/intake-schema.json +89 -0
  61. foundry_mcp/schemas/sdd-spec-schema.json +33 -5
  62. foundry_mcp/server.py +0 -14
  63. foundry_mcp/tools/unified/__init__.py +39 -18
  64. foundry_mcp/tools/unified/authoring.py +2371 -248
  65. foundry_mcp/tools/unified/documentation_helpers.py +69 -6
  66. foundry_mcp/tools/unified/environment.py +434 -32
  67. foundry_mcp/tools/unified/error.py +18 -1
  68. foundry_mcp/tools/unified/lifecycle.py +8 -0
  69. foundry_mcp/tools/unified/plan.py +133 -2
  70. foundry_mcp/tools/unified/provider.py +0 -40
  71. foundry_mcp/tools/unified/research.py +1283 -0
  72. foundry_mcp/tools/unified/review.py +374 -17
  73. foundry_mcp/tools/unified/review_helpers.py +16 -1
  74. foundry_mcp/tools/unified/server.py +9 -24
  75. foundry_mcp/tools/unified/spec.py +367 -0
  76. foundry_mcp/tools/unified/task.py +1664 -30
  77. foundry_mcp/tools/unified/test.py +69 -8
  78. {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.8.10.dist-info}/METADATA +8 -1
  79. foundry_mcp-0.8.10.dist-info/RECORD +153 -0
  80. foundry_mcp/cli/flags.py +0 -266
  81. foundry_mcp/core/feature_flags.py +0 -592
  82. foundry_mcp-0.3.3.dist-info/RECORD +0 -135
  83. {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.8.10.dist-info}/WHEEL +0 -0
  84. {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.8.10.dist-info}/entry_points.txt +0 -0
  85. {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.8.10.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,528 @@
1
+ """File-based storage backend for research workflows.
2
+
3
+ Provides thread-safe persistence for conversation threads, investigation states,
4
+ and ideation sessions using file locking.
5
+ """
6
+
7
+ import json
8
+ import logging
9
+ from datetime import datetime, timedelta
10
+ from pathlib import Path
11
+ from typing import Generic, Optional, TypeVar
12
+
13
+ from filelock import FileLock
14
+
15
+ from foundry_mcp.core.research.models import (
16
+ ConsensusState,
17
+ ConversationThread,
18
+ DeepResearchState,
19
+ IdeationState,
20
+ ThinkDeepState,
21
+ ThreadStatus,
22
+ )
23
+
24
+ logger = logging.getLogger(__name__)
25
+
26
+ T = TypeVar("T")
27
+
28
+
29
+ class FileStorageBackend(Generic[T]):
30
+ """Generic file-based storage with locking and TTL support."""
31
+
32
+ def __init__(
33
+ self,
34
+ storage_path: Path,
35
+ model_class: type[T],
36
+ ttl_hours: Optional[int] = 24,
37
+ ) -> None:
38
+ """Initialize storage backend.
39
+
40
+ Args:
41
+ storage_path: Directory to store files
42
+ model_class: Pydantic model class for serialization
43
+ ttl_hours: Time-to-live in hours (None for no expiry)
44
+ """
45
+ self.storage_path = storage_path
46
+ self.model_class = model_class
47
+ self.ttl_hours = ttl_hours
48
+ self._ensure_directory()
49
+
50
+ def _ensure_directory(self) -> None:
51
+ """Create storage directory if it doesn't exist."""
52
+ self.storage_path.mkdir(parents=True, exist_ok=True)
53
+
54
+ def _get_file_path(self, item_id: str) -> Path:
55
+ """Get file path for an item ID."""
56
+ # Sanitize ID to prevent path traversal
57
+ safe_id = "".join(c for c in item_id if c.isalnum() or c in "-_")
58
+ return self.storage_path / f"{safe_id}.json"
59
+
60
+ def _get_lock_path(self, item_id: str) -> Path:
61
+ """Get lock file path for an item ID."""
62
+ return self._get_file_path(item_id).with_suffix(".lock")
63
+
64
+ def _is_expired(self, file_path: Path) -> bool:
65
+ """Check if a file has expired based on TTL."""
66
+ if self.ttl_hours is None:
67
+ return False
68
+
69
+ try:
70
+ mtime = datetime.fromtimestamp(file_path.stat().st_mtime)
71
+ expiry = mtime + timedelta(hours=self.ttl_hours)
72
+ return datetime.now() > expiry
73
+ except OSError:
74
+ return True
75
+
76
+ def save(self, item_id: str, item: T) -> None:
77
+ """Save an item to storage with locking.
78
+
79
+ Args:
80
+ item_id: Unique identifier for the item
81
+ item: Pydantic model instance to save
82
+ """
83
+ file_path = self._get_file_path(item_id)
84
+ lock_path = self._get_lock_path(item_id)
85
+
86
+ with FileLock(lock_path, timeout=10):
87
+ data = item.model_dump(mode="json")
88
+ file_path.write_text(json.dumps(data, indent=2, default=str))
89
+ logger.debug("Saved %s to %s", item_id, file_path)
90
+
91
+ def load(self, item_id: str) -> Optional[T]:
92
+ """Load an item from storage with locking.
93
+
94
+ Args:
95
+ item_id: Unique identifier for the item
96
+
97
+ Returns:
98
+ The loaded item or None if not found/expired
99
+ """
100
+ file_path = self._get_file_path(item_id)
101
+ lock_path = self._get_lock_path(item_id)
102
+
103
+ if not file_path.exists():
104
+ return None
105
+
106
+ if self._is_expired(file_path):
107
+ logger.debug("Item %s has expired, removing", item_id)
108
+ self.delete(item_id)
109
+ return None
110
+
111
+ with FileLock(lock_path, timeout=10):
112
+ try:
113
+ data = json.loads(file_path.read_text())
114
+ return self.model_class.model_validate(data)
115
+ except (json.JSONDecodeError, ValueError) as exc:
116
+ logger.warning("Failed to load %s: %s", item_id, exc)
117
+ return None
118
+
119
+ def delete(self, item_id: str) -> bool:
120
+ """Delete an item from storage.
121
+
122
+ Args:
123
+ item_id: Unique identifier for the item
124
+
125
+ Returns:
126
+ True if deleted, False if not found
127
+ """
128
+ file_path = self._get_file_path(item_id)
129
+ lock_path = self._get_lock_path(item_id)
130
+
131
+ if not file_path.exists():
132
+ return False
133
+
134
+ with FileLock(lock_path, timeout=10):
135
+ try:
136
+ file_path.unlink()
137
+ logger.debug("Deleted %s", item_id)
138
+ # Clean up lock file
139
+ if lock_path.exists():
140
+ lock_path.unlink()
141
+ return True
142
+ except OSError as exc:
143
+ logger.warning("Failed to delete %s: %s", item_id, exc)
144
+ return False
145
+
146
+ def list_ids(self) -> list[str]:
147
+ """List all item IDs in storage.
148
+
149
+ Returns:
150
+ List of item IDs (without .json extension)
151
+ """
152
+ if not self.storage_path.exists():
153
+ return []
154
+
155
+ ids = []
156
+ for file_path in self.storage_path.glob("*.json"):
157
+ item_id = file_path.stem
158
+ # Skip expired items
159
+ if not self._is_expired(file_path):
160
+ ids.append(item_id)
161
+ return sorted(ids)
162
+
163
+ def cleanup_expired(self) -> int:
164
+ """Remove all expired items from storage.
165
+
166
+ Returns:
167
+ Number of items removed
168
+ """
169
+ if self.ttl_hours is None:
170
+ return 0
171
+
172
+ removed = 0
173
+ for file_path in self.storage_path.glob("*.json"):
174
+ if self._is_expired(file_path):
175
+ item_id = file_path.stem
176
+ if self.delete(item_id):
177
+ removed += 1
178
+ return removed
179
+
180
+
181
+ class ResearchMemory:
182
+ """Unified memory interface for all research workflow states.
183
+
184
+ Provides CRUD operations for conversation threads, investigation states,
185
+ ideation sessions, and consensus states.
186
+ """
187
+
188
+ def __init__(
189
+ self,
190
+ base_path: Optional[Path] = None,
191
+ ttl_hours: int = 24,
192
+ ) -> None:
193
+ """Initialize research memory.
194
+
195
+ Args:
196
+ base_path: Base directory for all storage (default: ~/.foundry-mcp/research)
197
+ ttl_hours: Default TTL for all storages
198
+ """
199
+ if base_path is None:
200
+ base_path = Path.home() / ".foundry-mcp" / "research"
201
+
202
+ self.base_path = base_path
203
+ self.ttl_hours = ttl_hours
204
+
205
+ # Initialize storage backends for each type
206
+ self._threads = FileStorageBackend(
207
+ storage_path=base_path / "threads",
208
+ model_class=ConversationThread,
209
+ ttl_hours=ttl_hours,
210
+ )
211
+ self._investigations = FileStorageBackend(
212
+ storage_path=base_path / "investigations",
213
+ model_class=ThinkDeepState,
214
+ ttl_hours=ttl_hours,
215
+ )
216
+ self._ideations = FileStorageBackend(
217
+ storage_path=base_path / "ideations",
218
+ model_class=IdeationState,
219
+ ttl_hours=ttl_hours,
220
+ )
221
+ self._consensus = FileStorageBackend(
222
+ storage_path=base_path / "consensus",
223
+ model_class=ConsensusState,
224
+ ttl_hours=ttl_hours,
225
+ )
226
+ self._deep_research = FileStorageBackend(
227
+ storage_path=base_path / "deep_research",
228
+ model_class=DeepResearchState,
229
+ ttl_hours=ttl_hours,
230
+ )
231
+
232
+ # =========================================================================
233
+ # Thread operations (CHAT workflow)
234
+ # =========================================================================
235
+
236
+ def save_thread(self, thread: ConversationThread) -> None:
237
+ """Save a conversation thread."""
238
+ self._threads.save(thread.id, thread)
239
+
240
+ def load_thread(self, thread_id: str) -> Optional[ConversationThread]:
241
+ """Load a conversation thread by ID."""
242
+ return self._threads.load(thread_id)
243
+
244
+ def delete_thread(self, thread_id: str) -> bool:
245
+ """Delete a conversation thread."""
246
+ return self._threads.delete(thread_id)
247
+
248
+ def list_threads(
249
+ self,
250
+ status: Optional[ThreadStatus] = None,
251
+ limit: Optional[int] = None,
252
+ ) -> list[ConversationThread]:
253
+ """List conversation threads, optionally filtered by status.
254
+
255
+ Args:
256
+ status: Filter by thread status
257
+ limit: Maximum number of threads to return
258
+
259
+ Returns:
260
+ List of conversation threads
261
+ """
262
+ threads = []
263
+ for thread_id in self._threads.list_ids():
264
+ thread = self._threads.load(thread_id)
265
+ if thread is not None:
266
+ if status is None or thread.status == status:
267
+ threads.append(thread)
268
+
269
+ # Sort by updated_at descending
270
+ threads.sort(key=lambda t: t.updated_at, reverse=True)
271
+
272
+ if limit is not None:
273
+ threads = threads[:limit]
274
+
275
+ return threads
276
+
277
+ # =========================================================================
278
+ # Investigation operations (THINKDEEP workflow)
279
+ # =========================================================================
280
+
281
+ def save_investigation(self, investigation: ThinkDeepState) -> None:
282
+ """Save an investigation state."""
283
+ self._investigations.save(investigation.id, investigation)
284
+
285
+ def load_investigation(self, investigation_id: str) -> Optional[ThinkDeepState]:
286
+ """Load an investigation state by ID."""
287
+ return self._investigations.load(investigation_id)
288
+
289
+ def delete_investigation(self, investigation_id: str) -> bool:
290
+ """Delete an investigation state."""
291
+ return self._investigations.delete(investigation_id)
292
+
293
+ def list_investigations(
294
+ self,
295
+ limit: Optional[int] = None,
296
+ ) -> list[ThinkDeepState]:
297
+ """List investigation states.
298
+
299
+ Args:
300
+ limit: Maximum number of investigations to return
301
+
302
+ Returns:
303
+ List of investigation states
304
+ """
305
+ investigations = []
306
+ for inv_id in self._investigations.list_ids():
307
+ inv = self._investigations.load(inv_id)
308
+ if inv is not None:
309
+ investigations.append(inv)
310
+
311
+ # Sort by updated_at descending
312
+ investigations.sort(key=lambda i: i.updated_at, reverse=True)
313
+
314
+ if limit is not None:
315
+ investigations = investigations[:limit]
316
+
317
+ return investigations
318
+
319
+ # =========================================================================
320
+ # Ideation operations (IDEATE workflow)
321
+ # =========================================================================
322
+
323
+ def save_ideation(self, ideation: IdeationState) -> None:
324
+ """Save an ideation state."""
325
+ self._ideations.save(ideation.id, ideation)
326
+
327
+ def load_ideation(self, ideation_id: str) -> Optional[IdeationState]:
328
+ """Load an ideation state by ID."""
329
+ return self._ideations.load(ideation_id)
330
+
331
+ def delete_ideation(self, ideation_id: str) -> bool:
332
+ """Delete an ideation state."""
333
+ return self._ideations.delete(ideation_id)
334
+
335
+ def list_ideations(
336
+ self,
337
+ limit: Optional[int] = None,
338
+ ) -> list[IdeationState]:
339
+ """List ideation states.
340
+
341
+ Args:
342
+ limit: Maximum number of ideations to return
343
+
344
+ Returns:
345
+ List of ideation states
346
+ """
347
+ ideations = []
348
+ for ide_id in self._ideations.list_ids():
349
+ ide = self._ideations.load(ide_id)
350
+ if ide is not None:
351
+ ideations.append(ide)
352
+
353
+ # Sort by updated_at descending
354
+ ideations.sort(key=lambda i: i.updated_at, reverse=True)
355
+
356
+ if limit is not None:
357
+ ideations = ideations[:limit]
358
+
359
+ return ideations
360
+
361
+ # =========================================================================
362
+ # Consensus operations (CONSENSUS workflow)
363
+ # =========================================================================
364
+
365
+ def save_consensus(self, consensus: ConsensusState) -> None:
366
+ """Save a consensus state."""
367
+ self._consensus.save(consensus.id, consensus)
368
+
369
+ def load_consensus(self, consensus_id: str) -> Optional[ConsensusState]:
370
+ """Load a consensus state by ID."""
371
+ return self._consensus.load(consensus_id)
372
+
373
+ def delete_consensus(self, consensus_id: str) -> bool:
374
+ """Delete a consensus state."""
375
+ return self._consensus.delete(consensus_id)
376
+
377
+ def list_consensus(
378
+ self,
379
+ limit: Optional[int] = None,
380
+ ) -> list[ConsensusState]:
381
+ """List consensus states.
382
+
383
+ Args:
384
+ limit: Maximum number of consensus states to return
385
+
386
+ Returns:
387
+ List of consensus states
388
+ """
389
+ states = []
390
+ for cons_id in self._consensus.list_ids():
391
+ cons = self._consensus.load(cons_id)
392
+ if cons is not None:
393
+ states.append(cons)
394
+
395
+ # Sort by created_at descending
396
+ states.sort(key=lambda c: c.created_at, reverse=True)
397
+
398
+ if limit is not None:
399
+ states = states[:limit]
400
+
401
+ return states
402
+
403
+ # =========================================================================
404
+ # Deep research operations (DEEP_RESEARCH workflow)
405
+ # =========================================================================
406
+
407
+ def save_deep_research(self, deep_research: DeepResearchState) -> None:
408
+ """Save a deep research state."""
409
+ self._deep_research.save(deep_research.id, deep_research)
410
+
411
+ def load_deep_research(self, deep_research_id: str) -> Optional[DeepResearchState]:
412
+ """Load a deep research state by ID."""
413
+ return self._deep_research.load(deep_research_id)
414
+
415
+ def delete_deep_research(self, deep_research_id: str) -> bool:
416
+ """Delete a deep research state."""
417
+ return self._deep_research.delete(deep_research_id)
418
+
419
+ def list_deep_research(
420
+ self,
421
+ limit: Optional[int] = None,
422
+ cursor: Optional[str] = None,
423
+ completed_only: bool = False,
424
+ ) -> list[DeepResearchState]:
425
+ """List deep research states.
426
+
427
+ Args:
428
+ limit: Maximum number of states to return
429
+ cursor: Pagination cursor (research_id to start after)
430
+ completed_only: Filter to only completed research
431
+
432
+ Returns:
433
+ List of deep research states
434
+ """
435
+ states = []
436
+ for dr_id in self._deep_research.list_ids():
437
+ dr = self._deep_research.load(dr_id)
438
+ if dr is not None:
439
+ if completed_only and dr.completed_at is None:
440
+ continue
441
+ states.append(dr)
442
+
443
+ # Sort by updated_at descending
444
+ states.sort(key=lambda s: s.updated_at, reverse=True)
445
+
446
+ # Apply cursor-based pagination (skip until after cursor ID)
447
+ if cursor is not None:
448
+ cursor_found = False
449
+ filtered_states = []
450
+ for state in states:
451
+ if cursor_found:
452
+ filtered_states.append(state)
453
+ elif state.id == cursor:
454
+ cursor_found = True
455
+ states = filtered_states
456
+
457
+ if limit is not None:
458
+ states = states[:limit]
459
+
460
+ return states
461
+
462
+ # =========================================================================
463
+ # Maintenance operations
464
+ # =========================================================================
465
+
466
+ def cleanup_all_expired(self) -> dict[str, int]:
467
+ """Remove expired items from all storages.
468
+
469
+ Returns:
470
+ Dict with counts of removed items per storage type
471
+ """
472
+ return {
473
+ "threads": self._threads.cleanup_expired(),
474
+ "investigations": self._investigations.cleanup_expired(),
475
+ "ideations": self._ideations.cleanup_expired(),
476
+ "consensus": self._consensus.cleanup_expired(),
477
+ "deep_research": self._deep_research.cleanup_expired(),
478
+ }
479
+
480
+ def get_storage_stats(self) -> dict[str, int]:
481
+ """Get count of items in each storage.
482
+
483
+ Returns:
484
+ Dict with counts per storage type
485
+ """
486
+ return {
487
+ "threads": len(self._threads.list_ids()),
488
+ "investigations": len(self._investigations.list_ids()),
489
+ "ideations": len(self._ideations.list_ids()),
490
+ "consensus": len(self._consensus.list_ids()),
491
+ "deep_research": len(self._deep_research.list_ids()),
492
+ }
493
+
494
+ # =========================================================================
495
+ # Universal session lookup
496
+ # =========================================================================
497
+
498
+ def load_session_by_id(
499
+ self, session_id: str
500
+ ) -> Optional[
501
+ ConversationThread
502
+ | ThinkDeepState
503
+ | IdeationState
504
+ | ConsensusState
505
+ | DeepResearchState
506
+ ]:
507
+ """Load any research session by its ID prefix.
508
+
509
+ Determines the session type from the ID prefix and loads from
510
+ the appropriate storage backend.
511
+
512
+ Args:
513
+ session_id: Session ID with type prefix (e.g., "thread-xxx", "consensus-xxx")
514
+
515
+ Returns:
516
+ The session state object, or None if not found
517
+ """
518
+ if session_id.startswith("thread-"):
519
+ return self.load_thread(session_id)
520
+ elif session_id.startswith("investigation-"):
521
+ return self.load_investigation(session_id)
522
+ elif session_id.startswith("ideation-"):
523
+ return self.load_ideation(session_id)
524
+ elif session_id.startswith("consensus-"):
525
+ return self.load_consensus(session_id)
526
+ elif session_id.startswith("deepres-"):
527
+ return self.load_deep_research(session_id)
528
+ return None