foundry-mcp 0.8.22__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of foundry-mcp might be problematic. Click here for more details.
- foundry_mcp/__init__.py +13 -0
- foundry_mcp/cli/__init__.py +67 -0
- foundry_mcp/cli/__main__.py +9 -0
- foundry_mcp/cli/agent.py +96 -0
- foundry_mcp/cli/commands/__init__.py +37 -0
- foundry_mcp/cli/commands/cache.py +137 -0
- foundry_mcp/cli/commands/dashboard.py +148 -0
- foundry_mcp/cli/commands/dev.py +446 -0
- foundry_mcp/cli/commands/journal.py +377 -0
- foundry_mcp/cli/commands/lifecycle.py +274 -0
- foundry_mcp/cli/commands/modify.py +824 -0
- foundry_mcp/cli/commands/plan.py +640 -0
- foundry_mcp/cli/commands/pr.py +393 -0
- foundry_mcp/cli/commands/review.py +667 -0
- foundry_mcp/cli/commands/session.py +472 -0
- foundry_mcp/cli/commands/specs.py +686 -0
- foundry_mcp/cli/commands/tasks.py +807 -0
- foundry_mcp/cli/commands/testing.py +676 -0
- foundry_mcp/cli/commands/validate.py +982 -0
- foundry_mcp/cli/config.py +98 -0
- foundry_mcp/cli/context.py +298 -0
- foundry_mcp/cli/logging.py +212 -0
- foundry_mcp/cli/main.py +44 -0
- foundry_mcp/cli/output.py +122 -0
- foundry_mcp/cli/registry.py +110 -0
- foundry_mcp/cli/resilience.py +178 -0
- foundry_mcp/cli/transcript.py +217 -0
- foundry_mcp/config.py +1454 -0
- foundry_mcp/core/__init__.py +144 -0
- foundry_mcp/core/ai_consultation.py +1773 -0
- foundry_mcp/core/batch_operations.py +1202 -0
- foundry_mcp/core/cache.py +195 -0
- foundry_mcp/core/capabilities.py +446 -0
- foundry_mcp/core/concurrency.py +898 -0
- foundry_mcp/core/context.py +540 -0
- foundry_mcp/core/discovery.py +1603 -0
- foundry_mcp/core/error_collection.py +728 -0
- foundry_mcp/core/error_store.py +592 -0
- foundry_mcp/core/health.py +749 -0
- foundry_mcp/core/intake.py +933 -0
- foundry_mcp/core/journal.py +700 -0
- foundry_mcp/core/lifecycle.py +412 -0
- foundry_mcp/core/llm_config.py +1376 -0
- foundry_mcp/core/llm_patterns.py +510 -0
- foundry_mcp/core/llm_provider.py +1569 -0
- foundry_mcp/core/logging_config.py +374 -0
- foundry_mcp/core/metrics_persistence.py +584 -0
- foundry_mcp/core/metrics_registry.py +327 -0
- foundry_mcp/core/metrics_store.py +641 -0
- foundry_mcp/core/modifications.py +224 -0
- foundry_mcp/core/naming.py +146 -0
- foundry_mcp/core/observability.py +1216 -0
- foundry_mcp/core/otel.py +452 -0
- foundry_mcp/core/otel_stubs.py +264 -0
- foundry_mcp/core/pagination.py +255 -0
- foundry_mcp/core/progress.py +387 -0
- foundry_mcp/core/prometheus.py +564 -0
- foundry_mcp/core/prompts/__init__.py +464 -0
- foundry_mcp/core/prompts/fidelity_review.py +691 -0
- foundry_mcp/core/prompts/markdown_plan_review.py +515 -0
- foundry_mcp/core/prompts/plan_review.py +627 -0
- foundry_mcp/core/providers/__init__.py +237 -0
- foundry_mcp/core/providers/base.py +515 -0
- foundry_mcp/core/providers/claude.py +472 -0
- foundry_mcp/core/providers/codex.py +637 -0
- foundry_mcp/core/providers/cursor_agent.py +630 -0
- foundry_mcp/core/providers/detectors.py +515 -0
- foundry_mcp/core/providers/gemini.py +426 -0
- foundry_mcp/core/providers/opencode.py +718 -0
- foundry_mcp/core/providers/opencode_wrapper.js +308 -0
- foundry_mcp/core/providers/package-lock.json +24 -0
- foundry_mcp/core/providers/package.json +25 -0
- foundry_mcp/core/providers/registry.py +607 -0
- foundry_mcp/core/providers/test_provider.py +171 -0
- foundry_mcp/core/providers/validation.py +857 -0
- foundry_mcp/core/rate_limit.py +427 -0
- foundry_mcp/core/research/__init__.py +68 -0
- foundry_mcp/core/research/memory.py +528 -0
- foundry_mcp/core/research/models.py +1234 -0
- foundry_mcp/core/research/providers/__init__.py +40 -0
- foundry_mcp/core/research/providers/base.py +242 -0
- foundry_mcp/core/research/providers/google.py +507 -0
- foundry_mcp/core/research/providers/perplexity.py +442 -0
- foundry_mcp/core/research/providers/semantic_scholar.py +544 -0
- foundry_mcp/core/research/providers/tavily.py +383 -0
- foundry_mcp/core/research/workflows/__init__.py +25 -0
- foundry_mcp/core/research/workflows/base.py +298 -0
- foundry_mcp/core/research/workflows/chat.py +271 -0
- foundry_mcp/core/research/workflows/consensus.py +539 -0
- foundry_mcp/core/research/workflows/deep_research.py +4142 -0
- foundry_mcp/core/research/workflows/ideate.py +682 -0
- foundry_mcp/core/research/workflows/thinkdeep.py +405 -0
- foundry_mcp/core/resilience.py +600 -0
- foundry_mcp/core/responses.py +1624 -0
- foundry_mcp/core/review.py +366 -0
- foundry_mcp/core/security.py +438 -0
- foundry_mcp/core/spec.py +4119 -0
- foundry_mcp/core/task.py +2463 -0
- foundry_mcp/core/testing.py +839 -0
- foundry_mcp/core/validation.py +2357 -0
- foundry_mcp/dashboard/__init__.py +32 -0
- foundry_mcp/dashboard/app.py +119 -0
- foundry_mcp/dashboard/components/__init__.py +17 -0
- foundry_mcp/dashboard/components/cards.py +88 -0
- foundry_mcp/dashboard/components/charts.py +177 -0
- foundry_mcp/dashboard/components/filters.py +136 -0
- foundry_mcp/dashboard/components/tables.py +195 -0
- foundry_mcp/dashboard/data/__init__.py +11 -0
- foundry_mcp/dashboard/data/stores.py +433 -0
- foundry_mcp/dashboard/launcher.py +300 -0
- foundry_mcp/dashboard/views/__init__.py +12 -0
- foundry_mcp/dashboard/views/errors.py +217 -0
- foundry_mcp/dashboard/views/metrics.py +164 -0
- foundry_mcp/dashboard/views/overview.py +96 -0
- foundry_mcp/dashboard/views/providers.py +83 -0
- foundry_mcp/dashboard/views/sdd_workflow.py +255 -0
- foundry_mcp/dashboard/views/tool_usage.py +139 -0
- foundry_mcp/prompts/__init__.py +9 -0
- foundry_mcp/prompts/workflows.py +525 -0
- foundry_mcp/resources/__init__.py +9 -0
- foundry_mcp/resources/specs.py +591 -0
- foundry_mcp/schemas/__init__.py +38 -0
- foundry_mcp/schemas/intake-schema.json +89 -0
- foundry_mcp/schemas/sdd-spec-schema.json +414 -0
- foundry_mcp/server.py +150 -0
- foundry_mcp/tools/__init__.py +10 -0
- foundry_mcp/tools/unified/__init__.py +92 -0
- foundry_mcp/tools/unified/authoring.py +3620 -0
- foundry_mcp/tools/unified/context_helpers.py +98 -0
- foundry_mcp/tools/unified/documentation_helpers.py +268 -0
- foundry_mcp/tools/unified/environment.py +1341 -0
- foundry_mcp/tools/unified/error.py +479 -0
- foundry_mcp/tools/unified/health.py +225 -0
- foundry_mcp/tools/unified/journal.py +841 -0
- foundry_mcp/tools/unified/lifecycle.py +640 -0
- foundry_mcp/tools/unified/metrics.py +777 -0
- foundry_mcp/tools/unified/plan.py +876 -0
- foundry_mcp/tools/unified/pr.py +294 -0
- foundry_mcp/tools/unified/provider.py +589 -0
- foundry_mcp/tools/unified/research.py +1283 -0
- foundry_mcp/tools/unified/review.py +1042 -0
- foundry_mcp/tools/unified/review_helpers.py +314 -0
- foundry_mcp/tools/unified/router.py +102 -0
- foundry_mcp/tools/unified/server.py +565 -0
- foundry_mcp/tools/unified/spec.py +1283 -0
- foundry_mcp/tools/unified/task.py +3846 -0
- foundry_mcp/tools/unified/test.py +431 -0
- foundry_mcp/tools/unified/verification.py +520 -0
- foundry_mcp-0.8.22.dist-info/METADATA +344 -0
- foundry_mcp-0.8.22.dist-info/RECORD +153 -0
- foundry_mcp-0.8.22.dist-info/WHEEL +4 -0
- foundry_mcp-0.8.22.dist-info/entry_points.txt +3 -0
- foundry_mcp-0.8.22.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,592 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Error storage backends for the error collection infrastructure.
|
|
3
|
+
|
|
4
|
+
Provides abstract base class and concrete implementations for persisting
|
|
5
|
+
error records collected by ErrorCollector.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import fcntl
|
|
11
|
+
import json
|
|
12
|
+
import logging
|
|
13
|
+
import threading
|
|
14
|
+
from abc import ABC, abstractmethod
|
|
15
|
+
from dataclasses import asdict
|
|
16
|
+
from datetime import datetime, timedelta, timezone
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
from typing import Any, Optional
|
|
19
|
+
|
|
20
|
+
from .error_collection import ErrorRecord
|
|
21
|
+
|
|
22
|
+
logger = logging.getLogger(__name__)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class ErrorStore(ABC):
|
|
26
|
+
"""Abstract base class for error storage backends."""
|
|
27
|
+
|
|
28
|
+
@abstractmethod
|
|
29
|
+
def append(self, record: ErrorRecord) -> None:
|
|
30
|
+
"""
|
|
31
|
+
Append an error record to storage.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
record: The error record to store
|
|
35
|
+
"""
|
|
36
|
+
pass
|
|
37
|
+
|
|
38
|
+
@abstractmethod
|
|
39
|
+
def get(self, error_id: str) -> Optional[ErrorRecord]:
|
|
40
|
+
"""
|
|
41
|
+
Retrieve an error record by ID.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
error_id: The error ID to look up
|
|
45
|
+
|
|
46
|
+
Returns:
|
|
47
|
+
ErrorRecord if found, None otherwise
|
|
48
|
+
"""
|
|
49
|
+
pass
|
|
50
|
+
|
|
51
|
+
@abstractmethod
|
|
52
|
+
def query(
|
|
53
|
+
self,
|
|
54
|
+
*,
|
|
55
|
+
tool_name: Optional[str] = None,
|
|
56
|
+
error_code: Optional[str] = None,
|
|
57
|
+
error_type: Optional[str] = None,
|
|
58
|
+
fingerprint: Optional[str] = None,
|
|
59
|
+
provider_id: Optional[str] = None,
|
|
60
|
+
since: Optional[str] = None,
|
|
61
|
+
until: Optional[str] = None,
|
|
62
|
+
limit: int = 100,
|
|
63
|
+
offset: int = 0,
|
|
64
|
+
) -> list[ErrorRecord]:
|
|
65
|
+
"""
|
|
66
|
+
Query error records with filtering.
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
tool_name: Filter by tool name
|
|
70
|
+
error_code: Filter by error code
|
|
71
|
+
error_type: Filter by error type
|
|
72
|
+
fingerprint: Filter by fingerprint
|
|
73
|
+
provider_id: Filter by provider ID
|
|
74
|
+
since: ISO 8601 timestamp - include records after this time
|
|
75
|
+
until: ISO 8601 timestamp - include records before this time
|
|
76
|
+
limit: Maximum number of records to return
|
|
77
|
+
offset: Number of records to skip
|
|
78
|
+
|
|
79
|
+
Returns:
|
|
80
|
+
List of matching ErrorRecords
|
|
81
|
+
"""
|
|
82
|
+
pass
|
|
83
|
+
|
|
84
|
+
@abstractmethod
|
|
85
|
+
def get_stats(self) -> dict[str, Any]:
|
|
86
|
+
"""
|
|
87
|
+
Get aggregated error statistics.
|
|
88
|
+
|
|
89
|
+
Returns:
|
|
90
|
+
Dictionary with statistics grouped by various dimensions
|
|
91
|
+
"""
|
|
92
|
+
pass
|
|
93
|
+
|
|
94
|
+
@abstractmethod
|
|
95
|
+
def get_patterns(self, min_count: int = 3) -> list[dict[str, Any]]:
|
|
96
|
+
"""
|
|
97
|
+
Get recurring error patterns.
|
|
98
|
+
|
|
99
|
+
Args:
|
|
100
|
+
min_count: Minimum occurrence count to include
|
|
101
|
+
|
|
102
|
+
Returns:
|
|
103
|
+
List of patterns with fingerprint, count, and metadata
|
|
104
|
+
"""
|
|
105
|
+
pass
|
|
106
|
+
|
|
107
|
+
@abstractmethod
|
|
108
|
+
def cleanup(self, retention_days: int, max_errors: int) -> int:
|
|
109
|
+
"""
|
|
110
|
+
Clean up old records based on retention policy.
|
|
111
|
+
|
|
112
|
+
Args:
|
|
113
|
+
retention_days: Delete records older than this many days
|
|
114
|
+
max_errors: Maximum number of errors to keep
|
|
115
|
+
|
|
116
|
+
Returns:
|
|
117
|
+
Number of records deleted
|
|
118
|
+
"""
|
|
119
|
+
pass
|
|
120
|
+
|
|
121
|
+
@abstractmethod
|
|
122
|
+
def count(self) -> int:
|
|
123
|
+
"""
|
|
124
|
+
Get total count of error records.
|
|
125
|
+
|
|
126
|
+
Returns:
|
|
127
|
+
Total number of stored error records
|
|
128
|
+
"""
|
|
129
|
+
pass
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
class FileErrorStore(ErrorStore):
|
|
133
|
+
"""
|
|
134
|
+
JSONL-based error storage implementation.
|
|
135
|
+
|
|
136
|
+
Stores errors in append-only JSONL format with separate index and stats files
|
|
137
|
+
for efficient querying. Thread-safe with file locking for concurrent access.
|
|
138
|
+
|
|
139
|
+
Directory structure:
|
|
140
|
+
~/.foundry-mcp/errors/
|
|
141
|
+
errors.jsonl - Append-only error log
|
|
142
|
+
index.json - Fingerprint -> metadata mapping
|
|
143
|
+
stats.json - Pre-computed statistics (updated periodically)
|
|
144
|
+
"""
|
|
145
|
+
|
|
146
|
+
def __init__(self, storage_path: str | Path):
|
|
147
|
+
"""
|
|
148
|
+
Initialize the file-based error store.
|
|
149
|
+
|
|
150
|
+
Args:
|
|
151
|
+
storage_path: Directory path for error storage
|
|
152
|
+
"""
|
|
153
|
+
self.storage_path = Path(storage_path).expanduser()
|
|
154
|
+
self.storage_path.mkdir(parents=True, exist_ok=True)
|
|
155
|
+
|
|
156
|
+
self.errors_file = self.storage_path / "errors.jsonl"
|
|
157
|
+
self.index_file = self.storage_path / "index.json"
|
|
158
|
+
self.stats_file = self.storage_path / "stats.json"
|
|
159
|
+
|
|
160
|
+
self._lock = threading.Lock()
|
|
161
|
+
self._index: dict[str, dict[str, Any]] = {}
|
|
162
|
+
self._id_index: dict[str, int] = {} # error_id -> line number
|
|
163
|
+
self._stats_dirty = False
|
|
164
|
+
self._last_stats_update: Optional[datetime] = None
|
|
165
|
+
|
|
166
|
+
# Load index on initialization
|
|
167
|
+
self._load_index()
|
|
168
|
+
|
|
169
|
+
def _load_index(self) -> None:
|
|
170
|
+
"""Load the index from disk."""
|
|
171
|
+
if self.index_file.exists():
|
|
172
|
+
try:
|
|
173
|
+
with open(self.index_file, "r") as f:
|
|
174
|
+
data = json.load(f)
|
|
175
|
+
self._index = data.get("fingerprints", {})
|
|
176
|
+
self._id_index = data.get("ids", {})
|
|
177
|
+
except (json.JSONDecodeError, OSError) as e:
|
|
178
|
+
logger.warning(f"Failed to load error index, rebuilding: {e}")
|
|
179
|
+
self._rebuild_index()
|
|
180
|
+
else:
|
|
181
|
+
# First run or index deleted - rebuild from errors file
|
|
182
|
+
self._rebuild_index()
|
|
183
|
+
|
|
184
|
+
def _rebuild_index(self) -> None:
|
|
185
|
+
"""Rebuild index from the errors JSONL file."""
|
|
186
|
+
self._index = {}
|
|
187
|
+
self._id_index = {}
|
|
188
|
+
|
|
189
|
+
if not self.errors_file.exists():
|
|
190
|
+
self._save_index()
|
|
191
|
+
return
|
|
192
|
+
|
|
193
|
+
line_num = 0
|
|
194
|
+
try:
|
|
195
|
+
with open(self.errors_file, "r") as f:
|
|
196
|
+
for line in f:
|
|
197
|
+
line = line.strip()
|
|
198
|
+
if not line:
|
|
199
|
+
line_num += 1
|
|
200
|
+
continue
|
|
201
|
+
|
|
202
|
+
try:
|
|
203
|
+
record_dict = json.loads(line)
|
|
204
|
+
error_id = record_dict.get("id", "")
|
|
205
|
+
fingerprint = record_dict.get("fingerprint", "")
|
|
206
|
+
timestamp = record_dict.get("timestamp", "")
|
|
207
|
+
|
|
208
|
+
if error_id:
|
|
209
|
+
self._id_index[error_id] = line_num
|
|
210
|
+
|
|
211
|
+
if fingerprint:
|
|
212
|
+
if fingerprint not in self._index:
|
|
213
|
+
self._index[fingerprint] = {
|
|
214
|
+
"count": 0,
|
|
215
|
+
"first_seen": timestamp,
|
|
216
|
+
"last_seen": timestamp,
|
|
217
|
+
"error_ids": [],
|
|
218
|
+
"tool_name": record_dict.get("tool_name"),
|
|
219
|
+
"error_code": record_dict.get("error_code"),
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
self._index[fingerprint]["count"] += 1
|
|
223
|
+
self._index[fingerprint]["last_seen"] = timestamp
|
|
224
|
+
# Keep last 10 error IDs per fingerprint
|
|
225
|
+
ids = self._index[fingerprint]["error_ids"]
|
|
226
|
+
if len(ids) >= 10:
|
|
227
|
+
ids.pop(0)
|
|
228
|
+
ids.append(error_id)
|
|
229
|
+
|
|
230
|
+
except json.JSONDecodeError:
|
|
231
|
+
logger.warning(f"Invalid JSON at line {line_num}")
|
|
232
|
+
|
|
233
|
+
line_num += 1
|
|
234
|
+
|
|
235
|
+
except OSError as e:
|
|
236
|
+
logger.error(f"Failed to rebuild index: {e}")
|
|
237
|
+
|
|
238
|
+
self._save_index()
|
|
239
|
+
logger.info(f"Rebuilt error index: {len(self._index)} patterns, {len(self._id_index)} records")
|
|
240
|
+
|
|
241
|
+
def _save_index(self) -> None:
|
|
242
|
+
"""Save the index to disk."""
|
|
243
|
+
try:
|
|
244
|
+
data = {
|
|
245
|
+
"fingerprints": self._index,
|
|
246
|
+
"ids": self._id_index,
|
|
247
|
+
"updated_at": datetime.now(timezone.utc).isoformat(),
|
|
248
|
+
}
|
|
249
|
+
# Atomic write via temp file
|
|
250
|
+
temp_file = self.index_file.with_suffix(".tmp")
|
|
251
|
+
with open(temp_file, "w") as f:
|
|
252
|
+
json.dump(data, f, indent=2)
|
|
253
|
+
temp_file.rename(self.index_file)
|
|
254
|
+
except OSError as e:
|
|
255
|
+
logger.error(f"Failed to save error index: {e}")
|
|
256
|
+
|
|
257
|
+
def append(self, record: ErrorRecord) -> None:
|
|
258
|
+
"""Append an error record to storage."""
|
|
259
|
+
with self._lock:
|
|
260
|
+
record_dict = asdict(record)
|
|
261
|
+
|
|
262
|
+
# Append to JSONL file with file locking
|
|
263
|
+
try:
|
|
264
|
+
with open(self.errors_file, "a") as f:
|
|
265
|
+
fcntl.flock(f.fileno(), fcntl.LOCK_EX)
|
|
266
|
+
try:
|
|
267
|
+
# Get current line number before writing
|
|
268
|
+
line_num = sum(1 for _ in open(self.errors_file, "r")) if self.errors_file.exists() else 0
|
|
269
|
+
f.write(json.dumps(record_dict, default=str) + "\n")
|
|
270
|
+
f.flush()
|
|
271
|
+
finally:
|
|
272
|
+
fcntl.flock(f.fileno(), fcntl.LOCK_UN)
|
|
273
|
+
|
|
274
|
+
except OSError as e:
|
|
275
|
+
logger.error(f"Failed to append error record: {e}")
|
|
276
|
+
return
|
|
277
|
+
|
|
278
|
+
# Update index
|
|
279
|
+
self._id_index[record.id] = line_num
|
|
280
|
+
|
|
281
|
+
if record.fingerprint not in self._index:
|
|
282
|
+
self._index[record.fingerprint] = {
|
|
283
|
+
"count": 0,
|
|
284
|
+
"first_seen": record.timestamp,
|
|
285
|
+
"last_seen": record.timestamp,
|
|
286
|
+
"error_ids": [],
|
|
287
|
+
"tool_name": record.tool_name,
|
|
288
|
+
"error_code": record.error_code,
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
fp_data = self._index[record.fingerprint]
|
|
292
|
+
fp_data["count"] += 1
|
|
293
|
+
fp_data["last_seen"] = record.timestamp
|
|
294
|
+
if len(fp_data["error_ids"]) >= 10:
|
|
295
|
+
fp_data["error_ids"].pop(0)
|
|
296
|
+
fp_data["error_ids"].append(record.id)
|
|
297
|
+
|
|
298
|
+
self._save_index()
|
|
299
|
+
self._stats_dirty = True
|
|
300
|
+
|
|
301
|
+
def get(self, error_id: str) -> Optional[ErrorRecord]:
|
|
302
|
+
"""Retrieve an error record by ID."""
|
|
303
|
+
with self._lock:
|
|
304
|
+
line_num = self._id_index.get(error_id)
|
|
305
|
+
if line_num is None:
|
|
306
|
+
return None
|
|
307
|
+
|
|
308
|
+
try:
|
|
309
|
+
with open(self.errors_file, "r") as f:
|
|
310
|
+
for i, line in enumerate(f):
|
|
311
|
+
if i == line_num:
|
|
312
|
+
record_dict = json.loads(line.strip())
|
|
313
|
+
return ErrorRecord(**record_dict)
|
|
314
|
+
except (OSError, json.JSONDecodeError) as e:
|
|
315
|
+
logger.error(f"Failed to retrieve error {error_id}: {e}")
|
|
316
|
+
|
|
317
|
+
return None
|
|
318
|
+
|
|
319
|
+
def query(
|
|
320
|
+
self,
|
|
321
|
+
*,
|
|
322
|
+
tool_name: Optional[str] = None,
|
|
323
|
+
error_code: Optional[str] = None,
|
|
324
|
+
error_type: Optional[str] = None,
|
|
325
|
+
fingerprint: Optional[str] = None,
|
|
326
|
+
provider_id: Optional[str] = None,
|
|
327
|
+
since: Optional[str] = None,
|
|
328
|
+
until: Optional[str] = None,
|
|
329
|
+
limit: int = 100,
|
|
330
|
+
offset: int = 0,
|
|
331
|
+
) -> list[ErrorRecord]:
|
|
332
|
+
"""Query error records with filtering."""
|
|
333
|
+
results: list[ErrorRecord] = []
|
|
334
|
+
skipped = 0
|
|
335
|
+
|
|
336
|
+
# Parse time filters
|
|
337
|
+
since_dt = datetime.fromisoformat(since.replace("Z", "+00:00")) if since else None
|
|
338
|
+
until_dt = datetime.fromisoformat(until.replace("Z", "+00:00")) if until else None
|
|
339
|
+
|
|
340
|
+
with self._lock:
|
|
341
|
+
if not self.errors_file.exists():
|
|
342
|
+
return []
|
|
343
|
+
|
|
344
|
+
try:
|
|
345
|
+
with open(self.errors_file, "r") as f:
|
|
346
|
+
for line in f:
|
|
347
|
+
line = line.strip()
|
|
348
|
+
if not line:
|
|
349
|
+
continue
|
|
350
|
+
|
|
351
|
+
try:
|
|
352
|
+
record_dict = json.loads(line)
|
|
353
|
+
except json.JSONDecodeError:
|
|
354
|
+
continue
|
|
355
|
+
|
|
356
|
+
# Apply filters
|
|
357
|
+
if tool_name and record_dict.get("tool_name") != tool_name:
|
|
358
|
+
continue
|
|
359
|
+
if error_code and record_dict.get("error_code") != error_code:
|
|
360
|
+
continue
|
|
361
|
+
if error_type and record_dict.get("error_type") != error_type:
|
|
362
|
+
continue
|
|
363
|
+
if fingerprint and record_dict.get("fingerprint") != fingerprint:
|
|
364
|
+
continue
|
|
365
|
+
if provider_id and record_dict.get("provider_id") != provider_id:
|
|
366
|
+
continue
|
|
367
|
+
|
|
368
|
+
# Time filters
|
|
369
|
+
if since_dt or until_dt:
|
|
370
|
+
try:
|
|
371
|
+
ts = record_dict.get("timestamp", "")
|
|
372
|
+
record_dt = datetime.fromisoformat(ts.replace("Z", "+00:00"))
|
|
373
|
+
if since_dt and record_dt < since_dt:
|
|
374
|
+
continue
|
|
375
|
+
if until_dt and record_dt > until_dt:
|
|
376
|
+
continue
|
|
377
|
+
except (ValueError, TypeError):
|
|
378
|
+
continue
|
|
379
|
+
|
|
380
|
+
# Apply offset
|
|
381
|
+
if skipped < offset:
|
|
382
|
+
skipped += 1
|
|
383
|
+
continue
|
|
384
|
+
|
|
385
|
+
# Check limit
|
|
386
|
+
if len(results) >= limit:
|
|
387
|
+
break
|
|
388
|
+
|
|
389
|
+
results.append(ErrorRecord(**record_dict))
|
|
390
|
+
|
|
391
|
+
except OSError as e:
|
|
392
|
+
logger.error(f"Failed to query errors: {e}")
|
|
393
|
+
|
|
394
|
+
return results
|
|
395
|
+
|
|
396
|
+
def get_stats(self) -> dict[str, Any]:
|
|
397
|
+
"""Get aggregated error statistics."""
|
|
398
|
+
# Check if we have recent cached stats
|
|
399
|
+
if self.stats_file.exists() and not self._stats_dirty:
|
|
400
|
+
try:
|
|
401
|
+
with open(self.stats_file, "r") as f:
|
|
402
|
+
cached = json.load(f)
|
|
403
|
+
updated_at = cached.get("updated_at", "")
|
|
404
|
+
if updated_at:
|
|
405
|
+
updated_dt = datetime.fromisoformat(updated_at.replace("Z", "+00:00"))
|
|
406
|
+
# Use cached stats if less than 30 seconds old
|
|
407
|
+
if datetime.now(timezone.utc) - updated_dt < timedelta(seconds=30):
|
|
408
|
+
return cached
|
|
409
|
+
except (OSError, json.JSONDecodeError):
|
|
410
|
+
pass
|
|
411
|
+
|
|
412
|
+
# Compute fresh stats
|
|
413
|
+
stats = self._compute_stats()
|
|
414
|
+
|
|
415
|
+
# Cache stats
|
|
416
|
+
try:
|
|
417
|
+
with open(self.stats_file, "w") as f:
|
|
418
|
+
json.dump(stats, f, indent=2)
|
|
419
|
+
self._stats_dirty = False
|
|
420
|
+
except OSError as e:
|
|
421
|
+
logger.warning(f"Failed to cache stats: {e}")
|
|
422
|
+
|
|
423
|
+
return stats
|
|
424
|
+
|
|
425
|
+
def _compute_stats(self) -> dict[str, Any]:
|
|
426
|
+
"""Compute error statistics from index."""
|
|
427
|
+
with self._lock:
|
|
428
|
+
total_errors = sum(fp["count"] for fp in self._index.values())
|
|
429
|
+
unique_patterns = len(self._index)
|
|
430
|
+
|
|
431
|
+
# Group by tool
|
|
432
|
+
by_tool: dict[str, int] = {}
|
|
433
|
+
by_error_code: dict[str, int] = {}
|
|
434
|
+
|
|
435
|
+
for fp_data in self._index.values():
|
|
436
|
+
tool = fp_data.get("tool_name", "unknown")
|
|
437
|
+
code = fp_data.get("error_code", "unknown")
|
|
438
|
+
count = fp_data.get("count", 0)
|
|
439
|
+
|
|
440
|
+
by_tool[tool] = by_tool.get(tool, 0) + count
|
|
441
|
+
by_error_code[code] = by_error_code.get(code, 0) + count
|
|
442
|
+
|
|
443
|
+
# Top patterns
|
|
444
|
+
top_patterns = sorted(
|
|
445
|
+
[
|
|
446
|
+
{
|
|
447
|
+
"fingerprint": fp,
|
|
448
|
+
"count": data["count"],
|
|
449
|
+
"tool_name": data.get("tool_name"),
|
|
450
|
+
"error_code": data.get("error_code"),
|
|
451
|
+
"first_seen": data.get("first_seen"),
|
|
452
|
+
"last_seen": data.get("last_seen"),
|
|
453
|
+
}
|
|
454
|
+
for fp, data in self._index.items()
|
|
455
|
+
],
|
|
456
|
+
key=lambda x: x["count"],
|
|
457
|
+
reverse=True,
|
|
458
|
+
)[:20]
|
|
459
|
+
|
|
460
|
+
return {
|
|
461
|
+
"total_errors": total_errors,
|
|
462
|
+
"unique_patterns": unique_patterns,
|
|
463
|
+
"by_tool": by_tool,
|
|
464
|
+
"by_error_code": by_error_code,
|
|
465
|
+
"top_patterns": top_patterns,
|
|
466
|
+
"updated_at": datetime.now(timezone.utc).isoformat(),
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
def get_patterns(self, min_count: int = 3) -> list[dict[str, Any]]:
|
|
470
|
+
"""Get recurring error patterns."""
|
|
471
|
+
with self._lock:
|
|
472
|
+
patterns = []
|
|
473
|
+
|
|
474
|
+
for fp, data in self._index.items():
|
|
475
|
+
if data.get("count", 0) >= min_count:
|
|
476
|
+
patterns.append(
|
|
477
|
+
{
|
|
478
|
+
"fingerprint": fp,
|
|
479
|
+
"count": data["count"],
|
|
480
|
+
"tool_name": data.get("tool_name"),
|
|
481
|
+
"error_code": data.get("error_code"),
|
|
482
|
+
"first_seen": data.get("first_seen"),
|
|
483
|
+
"last_seen": data.get("last_seen"),
|
|
484
|
+
"sample_ids": data.get("error_ids", [])[-5:],
|
|
485
|
+
}
|
|
486
|
+
)
|
|
487
|
+
|
|
488
|
+
# Sort by count descending
|
|
489
|
+
patterns.sort(key=lambda x: x["count"], reverse=True)
|
|
490
|
+
return patterns
|
|
491
|
+
|
|
492
|
+
def cleanup(self, retention_days: int, max_errors: int) -> int:
|
|
493
|
+
"""Clean up old records based on retention policy."""
|
|
494
|
+
with self._lock:
|
|
495
|
+
if not self.errors_file.exists():
|
|
496
|
+
return 0
|
|
497
|
+
|
|
498
|
+
cutoff_dt = datetime.now(timezone.utc) - timedelta(days=retention_days)
|
|
499
|
+
kept_records: list[str] = []
|
|
500
|
+
deleted_count = 0
|
|
501
|
+
|
|
502
|
+
try:
|
|
503
|
+
# Read all records
|
|
504
|
+
with open(self.errors_file, "r") as f:
|
|
505
|
+
lines = f.readlines()
|
|
506
|
+
|
|
507
|
+
for line in lines:
|
|
508
|
+
line = line.strip()
|
|
509
|
+
if not line:
|
|
510
|
+
continue
|
|
511
|
+
|
|
512
|
+
try:
|
|
513
|
+
record_dict = json.loads(line)
|
|
514
|
+
ts = record_dict.get("timestamp", "")
|
|
515
|
+
record_dt = datetime.fromisoformat(ts.replace("Z", "+00:00"))
|
|
516
|
+
|
|
517
|
+
# Keep if within retention period
|
|
518
|
+
if record_dt >= cutoff_dt:
|
|
519
|
+
kept_records.append(line)
|
|
520
|
+
else:
|
|
521
|
+
deleted_count += 1
|
|
522
|
+
|
|
523
|
+
except (json.JSONDecodeError, ValueError):
|
|
524
|
+
# Keep malformed records to avoid data loss
|
|
525
|
+
kept_records.append(line)
|
|
526
|
+
|
|
527
|
+
# Enforce max_errors limit (keep most recent)
|
|
528
|
+
if len(kept_records) > max_errors:
|
|
529
|
+
deleted_count += len(kept_records) - max_errors
|
|
530
|
+
kept_records = kept_records[-max_errors:]
|
|
531
|
+
|
|
532
|
+
# Write back
|
|
533
|
+
temp_file = self.errors_file.with_suffix(".tmp")
|
|
534
|
+
with open(temp_file, "w") as f:
|
|
535
|
+
for line in kept_records:
|
|
536
|
+
f.write(line + "\n")
|
|
537
|
+
temp_file.rename(self.errors_file)
|
|
538
|
+
|
|
539
|
+
# Rebuild index after cleanup
|
|
540
|
+
self._rebuild_index()
|
|
541
|
+
|
|
542
|
+
logger.info(f"Cleaned up {deleted_count} error records")
|
|
543
|
+
return deleted_count
|
|
544
|
+
|
|
545
|
+
except OSError as e:
|
|
546
|
+
logger.error(f"Failed to cleanup errors: {e}")
|
|
547
|
+
return 0
|
|
548
|
+
|
|
549
|
+
def count(self) -> int:
|
|
550
|
+
"""Get total count of error records."""
|
|
551
|
+
with self._lock:
|
|
552
|
+
return len(self._id_index)
|
|
553
|
+
|
|
554
|
+
def get_total_count(self) -> int:
|
|
555
|
+
"""Get total error count from all patterns (single source of truth)."""
|
|
556
|
+
with self._lock:
|
|
557
|
+
return sum(fp.get("count", 0) for fp in self._index.values())
|
|
558
|
+
|
|
559
|
+
|
|
560
|
+
# Global store instance
|
|
561
|
+
_error_store: Optional[ErrorStore] = None
|
|
562
|
+
_store_lock = threading.Lock()
|
|
563
|
+
|
|
564
|
+
|
|
565
|
+
def get_error_store(storage_path: Optional[str | Path] = None) -> ErrorStore:
|
|
566
|
+
"""
|
|
567
|
+
Get the global error store instance.
|
|
568
|
+
|
|
569
|
+
Args:
|
|
570
|
+
storage_path: Optional path to initialize the store. If not provided
|
|
571
|
+
on first call, uses default path.
|
|
572
|
+
|
|
573
|
+
Returns:
|
|
574
|
+
The ErrorStore instance
|
|
575
|
+
"""
|
|
576
|
+
global _error_store
|
|
577
|
+
|
|
578
|
+
with _store_lock:
|
|
579
|
+
if _error_store is None:
|
|
580
|
+
if storage_path is None:
|
|
581
|
+
# Default path
|
|
582
|
+
storage_path = Path.home() / ".foundry-mcp" / "errors"
|
|
583
|
+
_error_store = FileErrorStore(storage_path)
|
|
584
|
+
|
|
585
|
+
return _error_store
|
|
586
|
+
|
|
587
|
+
|
|
588
|
+
def reset_error_store() -> None:
|
|
589
|
+
"""Reset the global error store (for testing)."""
|
|
590
|
+
global _error_store
|
|
591
|
+
with _store_lock:
|
|
592
|
+
_error_store = None
|