foundry-mcp 0.8.22__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of foundry-mcp might be problematic. Click here for more details.

Files changed (153) hide show
  1. foundry_mcp/__init__.py +13 -0
  2. foundry_mcp/cli/__init__.py +67 -0
  3. foundry_mcp/cli/__main__.py +9 -0
  4. foundry_mcp/cli/agent.py +96 -0
  5. foundry_mcp/cli/commands/__init__.py +37 -0
  6. foundry_mcp/cli/commands/cache.py +137 -0
  7. foundry_mcp/cli/commands/dashboard.py +148 -0
  8. foundry_mcp/cli/commands/dev.py +446 -0
  9. foundry_mcp/cli/commands/journal.py +377 -0
  10. foundry_mcp/cli/commands/lifecycle.py +274 -0
  11. foundry_mcp/cli/commands/modify.py +824 -0
  12. foundry_mcp/cli/commands/plan.py +640 -0
  13. foundry_mcp/cli/commands/pr.py +393 -0
  14. foundry_mcp/cli/commands/review.py +667 -0
  15. foundry_mcp/cli/commands/session.py +472 -0
  16. foundry_mcp/cli/commands/specs.py +686 -0
  17. foundry_mcp/cli/commands/tasks.py +807 -0
  18. foundry_mcp/cli/commands/testing.py +676 -0
  19. foundry_mcp/cli/commands/validate.py +982 -0
  20. foundry_mcp/cli/config.py +98 -0
  21. foundry_mcp/cli/context.py +298 -0
  22. foundry_mcp/cli/logging.py +212 -0
  23. foundry_mcp/cli/main.py +44 -0
  24. foundry_mcp/cli/output.py +122 -0
  25. foundry_mcp/cli/registry.py +110 -0
  26. foundry_mcp/cli/resilience.py +178 -0
  27. foundry_mcp/cli/transcript.py +217 -0
  28. foundry_mcp/config.py +1454 -0
  29. foundry_mcp/core/__init__.py +144 -0
  30. foundry_mcp/core/ai_consultation.py +1773 -0
  31. foundry_mcp/core/batch_operations.py +1202 -0
  32. foundry_mcp/core/cache.py +195 -0
  33. foundry_mcp/core/capabilities.py +446 -0
  34. foundry_mcp/core/concurrency.py +898 -0
  35. foundry_mcp/core/context.py +540 -0
  36. foundry_mcp/core/discovery.py +1603 -0
  37. foundry_mcp/core/error_collection.py +728 -0
  38. foundry_mcp/core/error_store.py +592 -0
  39. foundry_mcp/core/health.py +749 -0
  40. foundry_mcp/core/intake.py +933 -0
  41. foundry_mcp/core/journal.py +700 -0
  42. foundry_mcp/core/lifecycle.py +412 -0
  43. foundry_mcp/core/llm_config.py +1376 -0
  44. foundry_mcp/core/llm_patterns.py +510 -0
  45. foundry_mcp/core/llm_provider.py +1569 -0
  46. foundry_mcp/core/logging_config.py +374 -0
  47. foundry_mcp/core/metrics_persistence.py +584 -0
  48. foundry_mcp/core/metrics_registry.py +327 -0
  49. foundry_mcp/core/metrics_store.py +641 -0
  50. foundry_mcp/core/modifications.py +224 -0
  51. foundry_mcp/core/naming.py +146 -0
  52. foundry_mcp/core/observability.py +1216 -0
  53. foundry_mcp/core/otel.py +452 -0
  54. foundry_mcp/core/otel_stubs.py +264 -0
  55. foundry_mcp/core/pagination.py +255 -0
  56. foundry_mcp/core/progress.py +387 -0
  57. foundry_mcp/core/prometheus.py +564 -0
  58. foundry_mcp/core/prompts/__init__.py +464 -0
  59. foundry_mcp/core/prompts/fidelity_review.py +691 -0
  60. foundry_mcp/core/prompts/markdown_plan_review.py +515 -0
  61. foundry_mcp/core/prompts/plan_review.py +627 -0
  62. foundry_mcp/core/providers/__init__.py +237 -0
  63. foundry_mcp/core/providers/base.py +515 -0
  64. foundry_mcp/core/providers/claude.py +472 -0
  65. foundry_mcp/core/providers/codex.py +637 -0
  66. foundry_mcp/core/providers/cursor_agent.py +630 -0
  67. foundry_mcp/core/providers/detectors.py +515 -0
  68. foundry_mcp/core/providers/gemini.py +426 -0
  69. foundry_mcp/core/providers/opencode.py +718 -0
  70. foundry_mcp/core/providers/opencode_wrapper.js +308 -0
  71. foundry_mcp/core/providers/package-lock.json +24 -0
  72. foundry_mcp/core/providers/package.json +25 -0
  73. foundry_mcp/core/providers/registry.py +607 -0
  74. foundry_mcp/core/providers/test_provider.py +171 -0
  75. foundry_mcp/core/providers/validation.py +857 -0
  76. foundry_mcp/core/rate_limit.py +427 -0
  77. foundry_mcp/core/research/__init__.py +68 -0
  78. foundry_mcp/core/research/memory.py +528 -0
  79. foundry_mcp/core/research/models.py +1234 -0
  80. foundry_mcp/core/research/providers/__init__.py +40 -0
  81. foundry_mcp/core/research/providers/base.py +242 -0
  82. foundry_mcp/core/research/providers/google.py +507 -0
  83. foundry_mcp/core/research/providers/perplexity.py +442 -0
  84. foundry_mcp/core/research/providers/semantic_scholar.py +544 -0
  85. foundry_mcp/core/research/providers/tavily.py +383 -0
  86. foundry_mcp/core/research/workflows/__init__.py +25 -0
  87. foundry_mcp/core/research/workflows/base.py +298 -0
  88. foundry_mcp/core/research/workflows/chat.py +271 -0
  89. foundry_mcp/core/research/workflows/consensus.py +539 -0
  90. foundry_mcp/core/research/workflows/deep_research.py +4142 -0
  91. foundry_mcp/core/research/workflows/ideate.py +682 -0
  92. foundry_mcp/core/research/workflows/thinkdeep.py +405 -0
  93. foundry_mcp/core/resilience.py +600 -0
  94. foundry_mcp/core/responses.py +1624 -0
  95. foundry_mcp/core/review.py +366 -0
  96. foundry_mcp/core/security.py +438 -0
  97. foundry_mcp/core/spec.py +4119 -0
  98. foundry_mcp/core/task.py +2463 -0
  99. foundry_mcp/core/testing.py +839 -0
  100. foundry_mcp/core/validation.py +2357 -0
  101. foundry_mcp/dashboard/__init__.py +32 -0
  102. foundry_mcp/dashboard/app.py +119 -0
  103. foundry_mcp/dashboard/components/__init__.py +17 -0
  104. foundry_mcp/dashboard/components/cards.py +88 -0
  105. foundry_mcp/dashboard/components/charts.py +177 -0
  106. foundry_mcp/dashboard/components/filters.py +136 -0
  107. foundry_mcp/dashboard/components/tables.py +195 -0
  108. foundry_mcp/dashboard/data/__init__.py +11 -0
  109. foundry_mcp/dashboard/data/stores.py +433 -0
  110. foundry_mcp/dashboard/launcher.py +300 -0
  111. foundry_mcp/dashboard/views/__init__.py +12 -0
  112. foundry_mcp/dashboard/views/errors.py +217 -0
  113. foundry_mcp/dashboard/views/metrics.py +164 -0
  114. foundry_mcp/dashboard/views/overview.py +96 -0
  115. foundry_mcp/dashboard/views/providers.py +83 -0
  116. foundry_mcp/dashboard/views/sdd_workflow.py +255 -0
  117. foundry_mcp/dashboard/views/tool_usage.py +139 -0
  118. foundry_mcp/prompts/__init__.py +9 -0
  119. foundry_mcp/prompts/workflows.py +525 -0
  120. foundry_mcp/resources/__init__.py +9 -0
  121. foundry_mcp/resources/specs.py +591 -0
  122. foundry_mcp/schemas/__init__.py +38 -0
  123. foundry_mcp/schemas/intake-schema.json +89 -0
  124. foundry_mcp/schemas/sdd-spec-schema.json +414 -0
  125. foundry_mcp/server.py +150 -0
  126. foundry_mcp/tools/__init__.py +10 -0
  127. foundry_mcp/tools/unified/__init__.py +92 -0
  128. foundry_mcp/tools/unified/authoring.py +3620 -0
  129. foundry_mcp/tools/unified/context_helpers.py +98 -0
  130. foundry_mcp/tools/unified/documentation_helpers.py +268 -0
  131. foundry_mcp/tools/unified/environment.py +1341 -0
  132. foundry_mcp/tools/unified/error.py +479 -0
  133. foundry_mcp/tools/unified/health.py +225 -0
  134. foundry_mcp/tools/unified/journal.py +841 -0
  135. foundry_mcp/tools/unified/lifecycle.py +640 -0
  136. foundry_mcp/tools/unified/metrics.py +777 -0
  137. foundry_mcp/tools/unified/plan.py +876 -0
  138. foundry_mcp/tools/unified/pr.py +294 -0
  139. foundry_mcp/tools/unified/provider.py +589 -0
  140. foundry_mcp/tools/unified/research.py +1283 -0
  141. foundry_mcp/tools/unified/review.py +1042 -0
  142. foundry_mcp/tools/unified/review_helpers.py +314 -0
  143. foundry_mcp/tools/unified/router.py +102 -0
  144. foundry_mcp/tools/unified/server.py +565 -0
  145. foundry_mcp/tools/unified/spec.py +1283 -0
  146. foundry_mcp/tools/unified/task.py +3846 -0
  147. foundry_mcp/tools/unified/test.py +431 -0
  148. foundry_mcp/tools/unified/verification.py +520 -0
  149. foundry_mcp-0.8.22.dist-info/METADATA +344 -0
  150. foundry_mcp-0.8.22.dist-info/RECORD +153 -0
  151. foundry_mcp-0.8.22.dist-info/WHEEL +4 -0
  152. foundry_mcp-0.8.22.dist-info/entry_points.txt +3 -0
  153. foundry_mcp-0.8.22.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,641 @@
1
+ """
2
+ Metrics storage backends for the metrics persistence infrastructure.
3
+
4
+ Provides abstract base class and concrete implementations for persisting
5
+ metric data points to enable time-series analysis across server restarts.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import fcntl
11
+ import json
12
+ import logging
13
+ import threading
14
+ from abc import ABC, abstractmethod
15
+ from dataclasses import dataclass, asdict, field
16
+ from datetime import datetime, timedelta, timezone
17
+ from pathlib import Path
18
+ from typing import Any, Optional
19
+
20
+ logger = logging.getLogger(__name__)
21
+
22
+
23
+ @dataclass
24
+ class MetricDataPoint:
25
+ """
26
+ A single metric data point or aggregated bucket.
27
+
28
+ Attributes:
29
+ metric_name: Name of the metric (e.g., "tool_invocations_total")
30
+ timestamp: ISO 8601 timestamp when recorded
31
+ value: Current value or delta for the bucket
32
+ metric_type: Type of metric (counter, gauge, histogram)
33
+ labels: Label key-value pairs
34
+ bucket_start: Aggregation bucket start time (ISO 8601)
35
+ bucket_end: Aggregation bucket end time (ISO 8601)
36
+ sample_count: Number of samples aggregated in this bucket
37
+ """
38
+
39
+ metric_name: str
40
+ timestamp: str
41
+ value: float
42
+ metric_type: str = "counter"
43
+ labels: dict[str, str] = field(default_factory=dict)
44
+ bucket_start: str = ""
45
+ bucket_end: str = ""
46
+ sample_count: int = 1
47
+
48
+ def to_dict(self) -> dict[str, Any]:
49
+ """Convert to dictionary for JSON serialization."""
50
+ return asdict(self)
51
+
52
+ @classmethod
53
+ def from_dict(cls, data: dict[str, Any]) -> "MetricDataPoint":
54
+ """Create from dictionary."""
55
+ return cls(
56
+ metric_name=data.get("metric_name", ""),
57
+ timestamp=data.get("timestamp", ""),
58
+ value=float(data.get("value", 0.0)),
59
+ metric_type=data.get("metric_type", "counter"),
60
+ labels=data.get("labels", {}),
61
+ bucket_start=data.get("bucket_start", ""),
62
+ bucket_end=data.get("bucket_end", ""),
63
+ sample_count=int(data.get("sample_count", 1)),
64
+ )
65
+
66
+
67
+ class MetricsStore(ABC):
68
+ """Abstract base class for metrics storage backends."""
69
+
70
+ @abstractmethod
71
+ def append(self, data_point: MetricDataPoint) -> None:
72
+ """
73
+ Append a metric data point to storage.
74
+
75
+ Args:
76
+ data_point: The metric data point to store
77
+ """
78
+ pass
79
+
80
+ @abstractmethod
81
+ def append_batch(self, data_points: list[MetricDataPoint]) -> None:
82
+ """
83
+ Append multiple metric data points atomically.
84
+
85
+ Args:
86
+ data_points: List of metric data points to store
87
+ """
88
+ pass
89
+
90
+ @abstractmethod
91
+ def query(
92
+ self,
93
+ *,
94
+ metric_name: Optional[str] = None,
95
+ labels: Optional[dict[str, str]] = None,
96
+ since: Optional[str] = None,
97
+ until: Optional[str] = None,
98
+ limit: int = 100,
99
+ offset: int = 0,
100
+ ) -> list[MetricDataPoint]:
101
+ """
102
+ Query metric data points with filtering.
103
+
104
+ Args:
105
+ metric_name: Filter by metric name
106
+ labels: Filter by label key-value pairs
107
+ since: ISO 8601 timestamp - include records after this time
108
+ until: ISO 8601 timestamp - include records before this time
109
+ limit: Maximum number of records to return
110
+ offset: Number of records to skip
111
+
112
+ Returns:
113
+ List of matching MetricDataPoints
114
+ """
115
+ pass
116
+
117
+ @abstractmethod
118
+ def list_metrics(self) -> list[dict[str, Any]]:
119
+ """
120
+ List all persisted metrics with metadata.
121
+
122
+ Returns:
123
+ List of metric metadata objects with names and counts
124
+ """
125
+ pass
126
+
127
+ @abstractmethod
128
+ def get_summary(
129
+ self,
130
+ metric_name: str,
131
+ *,
132
+ labels: Optional[dict[str, str]] = None,
133
+ since: Optional[str] = None,
134
+ until: Optional[str] = None,
135
+ ) -> dict[str, Any]:
136
+ """
137
+ Get aggregated statistics for a metric.
138
+
139
+ Args:
140
+ metric_name: Name of the metric
141
+ labels: Filter by label key-value pairs
142
+ since: ISO 8601 timestamp - include records after this time
143
+ until: ISO 8601 timestamp - include records before this time
144
+
145
+ Returns:
146
+ Dictionary with min, max, avg, sum, count statistics
147
+ """
148
+ pass
149
+
150
+ @abstractmethod
151
+ def cleanup(self, retention_days: int, max_records: int) -> int:
152
+ """
153
+ Clean up old records based on retention policy.
154
+
155
+ Args:
156
+ retention_days: Delete records older than this many days
157
+ max_records: Maximum number of records to keep
158
+
159
+ Returns:
160
+ Number of records deleted
161
+ """
162
+ pass
163
+
164
+ @abstractmethod
165
+ def count(self) -> int:
166
+ """
167
+ Get total count of metric data points.
168
+
169
+ Returns:
170
+ Total number of stored metric data points
171
+ """
172
+ pass
173
+
174
+
175
+ class FileMetricsStore(MetricsStore):
176
+ """
177
+ JSONL-based metrics storage implementation.
178
+
179
+ Stores metrics in append-only JSONL format with separate index file
180
+ for efficient querying. Thread-safe with file locking for concurrent access.
181
+
182
+ Directory structure:
183
+ ~/.foundry-mcp/metrics/
184
+ metrics.jsonl - Append-only metrics log
185
+ index.json - Metric name -> metadata mapping
186
+ """
187
+
188
+ def __init__(self, storage_path: str | Path):
189
+ """
190
+ Initialize the file-based metrics store.
191
+
192
+ Args:
193
+ storage_path: Directory path for metrics storage
194
+ """
195
+ self.storage_path = Path(storage_path).expanduser()
196
+ self.storage_path.mkdir(parents=True, exist_ok=True)
197
+
198
+ self.metrics_file = self.storage_path / "metrics.jsonl"
199
+ self.index_file = self.storage_path / "index.json"
200
+
201
+ self._lock = threading.Lock()
202
+ self._index: dict[str, dict[str, Any]] = {} # metric_name -> metadata
203
+ self._record_count = 0
204
+
205
+ # Load index on initialization
206
+ self._load_index()
207
+
208
+ def _load_index(self) -> None:
209
+ """Load the index from disk."""
210
+ if self.index_file.exists():
211
+ try:
212
+ with open(self.index_file, "r") as f:
213
+ data = json.load(f)
214
+ self._index = data.get("metrics", {})
215
+ self._record_count = data.get("record_count", 0)
216
+ except (json.JSONDecodeError, OSError) as e:
217
+ logger.warning(f"Failed to load metrics index, rebuilding: {e}")
218
+ self._rebuild_index()
219
+ else:
220
+ # First run or index deleted - rebuild from metrics file
221
+ self._rebuild_index()
222
+
223
+ def _rebuild_index(self) -> None:
224
+ """Rebuild index from the metrics JSONL file."""
225
+ self._index = {}
226
+ self._record_count = 0
227
+
228
+ if not self.metrics_file.exists():
229
+ self._save_index()
230
+ return
231
+
232
+ try:
233
+ with open(self.metrics_file, "r") as f:
234
+ for line in f:
235
+ line = line.strip()
236
+ if not line:
237
+ continue
238
+
239
+ try:
240
+ record_dict = json.loads(line)
241
+ metric_name = record_dict.get("metric_name", "")
242
+ timestamp = record_dict.get("timestamp", "")
243
+ metric_type = record_dict.get("metric_type", "counter")
244
+
245
+ if metric_name:
246
+ if metric_name not in self._index:
247
+ self._index[metric_name] = {
248
+ "count": 0,
249
+ "first_seen": timestamp,
250
+ "last_seen": timestamp,
251
+ "metric_type": metric_type,
252
+ "label_keys": set(),
253
+ }
254
+
255
+ self._index[metric_name]["count"] += 1
256
+ self._index[metric_name]["last_seen"] = timestamp
257
+
258
+ # Track label keys
259
+ labels = record_dict.get("labels", {})
260
+ if isinstance(labels, dict):
261
+ self._index[metric_name]["label_keys"].update(labels.keys())
262
+
263
+ self._record_count += 1
264
+
265
+ except json.JSONDecodeError:
266
+ logger.warning(f"Invalid JSON in metrics file at line {self._record_count}")
267
+
268
+ except OSError as e:
269
+ logger.error(f"Failed to rebuild metrics index: {e}")
270
+
271
+ # Convert label_keys sets to lists for JSON serialization
272
+ for metric_data in self._index.values():
273
+ if isinstance(metric_data.get("label_keys"), set):
274
+ metric_data["label_keys"] = list(metric_data["label_keys"])
275
+
276
+ self._save_index()
277
+ logger.info(f"Rebuilt metrics index: {len(self._index)} metrics, {self._record_count} records")
278
+
279
+ def _save_index(self) -> None:
280
+ """Save the index to disk."""
281
+ try:
282
+ # Convert label_keys to lists for JSON serialization
283
+ index_copy = {}
284
+ for name, data in self._index.items():
285
+ data_copy = dict(data)
286
+ if isinstance(data_copy.get("label_keys"), set):
287
+ data_copy["label_keys"] = list(data_copy["label_keys"])
288
+ index_copy[name] = data_copy
289
+
290
+ data = {
291
+ "metrics": index_copy,
292
+ "record_count": self._record_count,
293
+ "updated_at": datetime.now(timezone.utc).isoformat(),
294
+ }
295
+ # Atomic write via temp file
296
+ temp_file = self.index_file.with_suffix(".tmp")
297
+ with open(temp_file, "w") as f:
298
+ json.dump(data, f, indent=2)
299
+ temp_file.rename(self.index_file)
300
+ except OSError as e:
301
+ logger.error(f"Failed to save metrics index: {e}")
302
+
303
+ def append(self, data_point: MetricDataPoint) -> None:
304
+ """Append a metric data point to storage."""
305
+ with self._lock:
306
+ record_dict = data_point.to_dict()
307
+
308
+ # Append to JSONL file with file locking
309
+ try:
310
+ with open(self.metrics_file, "a") as f:
311
+ fcntl.flock(f.fileno(), fcntl.LOCK_EX)
312
+ try:
313
+ f.write(json.dumps(record_dict, default=str) + "\n")
314
+ f.flush()
315
+ finally:
316
+ fcntl.flock(f.fileno(), fcntl.LOCK_UN)
317
+
318
+ except OSError as e:
319
+ logger.error(f"Failed to append metric data point: {e}")
320
+ return
321
+
322
+ # Update index
323
+ metric_name = data_point.metric_name
324
+ if metric_name not in self._index:
325
+ self._index[metric_name] = {
326
+ "count": 0,
327
+ "first_seen": data_point.timestamp,
328
+ "last_seen": data_point.timestamp,
329
+ "metric_type": data_point.metric_type,
330
+ "label_keys": set(),
331
+ }
332
+
333
+ self._index[metric_name]["count"] += 1
334
+ self._index[metric_name]["last_seen"] = data_point.timestamp
335
+
336
+ # Track label keys
337
+ if isinstance(self._index[metric_name].get("label_keys"), list):
338
+ self._index[metric_name]["label_keys"] = set(self._index[metric_name]["label_keys"])
339
+ self._index[metric_name]["label_keys"].update(data_point.labels.keys())
340
+
341
+ self._record_count += 1
342
+ self._save_index()
343
+
344
+ def append_batch(self, data_points: list[MetricDataPoint]) -> None:
345
+ """Append multiple metric data points atomically."""
346
+ if not data_points:
347
+ return
348
+
349
+ with self._lock:
350
+ # Append to JSONL file with file locking
351
+ try:
352
+ with open(self.metrics_file, "a") as f:
353
+ fcntl.flock(f.fileno(), fcntl.LOCK_EX)
354
+ try:
355
+ for data_point in data_points:
356
+ record_dict = data_point.to_dict()
357
+ f.write(json.dumps(record_dict, default=str) + "\n")
358
+ f.flush()
359
+ finally:
360
+ fcntl.flock(f.fileno(), fcntl.LOCK_UN)
361
+
362
+ except OSError as e:
363
+ logger.error(f"Failed to append metric batch: {e}")
364
+ return
365
+
366
+ # Update index
367
+ for data_point in data_points:
368
+ metric_name = data_point.metric_name
369
+ if metric_name not in self._index:
370
+ self._index[metric_name] = {
371
+ "count": 0,
372
+ "first_seen": data_point.timestamp,
373
+ "last_seen": data_point.timestamp,
374
+ "metric_type": data_point.metric_type,
375
+ "label_keys": set(),
376
+ }
377
+
378
+ self._index[metric_name]["count"] += 1
379
+ self._index[metric_name]["last_seen"] = data_point.timestamp
380
+
381
+ if isinstance(self._index[metric_name].get("label_keys"), list):
382
+ self._index[metric_name]["label_keys"] = set(self._index[metric_name]["label_keys"])
383
+ self._index[metric_name]["label_keys"].update(data_point.labels.keys())
384
+
385
+ self._record_count += 1
386
+
387
+ self._save_index()
388
+
389
+ def query(
390
+ self,
391
+ *,
392
+ metric_name: Optional[str] = None,
393
+ labels: Optional[dict[str, str]] = None,
394
+ since: Optional[str] = None,
395
+ until: Optional[str] = None,
396
+ limit: int = 100,
397
+ offset: int = 0,
398
+ ) -> list[MetricDataPoint]:
399
+ """Query metric data points with filtering."""
400
+ results: list[MetricDataPoint] = []
401
+ skipped = 0
402
+
403
+ # Parse time filters
404
+ since_dt = datetime.fromisoformat(since.replace("Z", "+00:00")) if since else None
405
+ until_dt = datetime.fromisoformat(until.replace("Z", "+00:00")) if until else None
406
+
407
+ with self._lock:
408
+ if not self.metrics_file.exists():
409
+ return []
410
+
411
+ try:
412
+ with open(self.metrics_file, "r") as f:
413
+ for line in f:
414
+ line = line.strip()
415
+ if not line:
416
+ continue
417
+
418
+ try:
419
+ record_dict = json.loads(line)
420
+ except json.JSONDecodeError:
421
+ continue
422
+
423
+ # Apply metric name filter
424
+ if metric_name and record_dict.get("metric_name") != metric_name:
425
+ continue
426
+
427
+ # Apply label filters
428
+ if labels:
429
+ record_labels = record_dict.get("labels", {})
430
+ if not all(
431
+ record_labels.get(k) == v
432
+ for k, v in labels.items()
433
+ ):
434
+ continue
435
+
436
+ # Time filters
437
+ if since_dt or until_dt:
438
+ try:
439
+ ts = record_dict.get("timestamp", "")
440
+ record_dt = datetime.fromisoformat(ts.replace("Z", "+00:00"))
441
+ if since_dt and record_dt < since_dt:
442
+ continue
443
+ if until_dt and record_dt > until_dt:
444
+ continue
445
+ except (ValueError, TypeError):
446
+ continue
447
+
448
+ # Apply offset
449
+ if skipped < offset:
450
+ skipped += 1
451
+ continue
452
+
453
+ # Check limit
454
+ if len(results) >= limit:
455
+ break
456
+
457
+ results.append(MetricDataPoint.from_dict(record_dict))
458
+
459
+ except OSError as e:
460
+ logger.error(f"Failed to query metrics: {e}")
461
+
462
+ return results
463
+
464
+ def list_metrics(self) -> list[dict[str, Any]]:
465
+ """List all persisted metrics with metadata."""
466
+ with self._lock:
467
+ metrics_list = []
468
+ for name, data in self._index.items():
469
+ label_keys = data.get("label_keys", [])
470
+ if isinstance(label_keys, set):
471
+ label_keys = list(label_keys)
472
+
473
+ metrics_list.append({
474
+ "metric_name": name,
475
+ "count": data.get("count", 0),
476
+ "first_seen": data.get("first_seen"),
477
+ "last_seen": data.get("last_seen"),
478
+ "metric_type": data.get("metric_type"),
479
+ "label_keys": label_keys,
480
+ })
481
+
482
+ # Sort by count descending
483
+ metrics_list.sort(key=lambda x: x["count"], reverse=True)
484
+ return metrics_list
485
+
486
+ def get_summary(
487
+ self,
488
+ metric_name: str,
489
+ *,
490
+ labels: Optional[dict[str, str]] = None,
491
+ since: Optional[str] = None,
492
+ until: Optional[str] = None,
493
+ ) -> dict[str, Any]:
494
+ """Get aggregated statistics for a metric."""
495
+ # Query all matching data points
496
+ data_points = self.query(
497
+ metric_name=metric_name,
498
+ labels=labels,
499
+ since=since,
500
+ until=until,
501
+ limit=100000, # Get all matching
502
+ offset=0,
503
+ )
504
+
505
+ if not data_points:
506
+ return {
507
+ "metric_name": metric_name,
508
+ "count": 0,
509
+ "min": None,
510
+ "max": None,
511
+ "avg": None,
512
+ "sum": None,
513
+ "sample_count": 0,
514
+ }
515
+
516
+ values = [dp.value for dp in data_points]
517
+ total_samples = sum(dp.sample_count for dp in data_points)
518
+
519
+ return {
520
+ "metric_name": metric_name,
521
+ "count": len(data_points),
522
+ "min": min(values),
523
+ "max": max(values),
524
+ "avg": sum(values) / len(values) if values else None,
525
+ "sum": sum(values),
526
+ "sample_count": total_samples,
527
+ "first_timestamp": data_points[0].timestamp if data_points else None,
528
+ "last_timestamp": data_points[-1].timestamp if data_points else None,
529
+ }
530
+
531
+ def cleanup(self, retention_days: int, max_records: int) -> int:
532
+ """Clean up old records based on retention policy."""
533
+ with self._lock:
534
+ if not self.metrics_file.exists():
535
+ return 0
536
+
537
+ cutoff_dt = datetime.now(timezone.utc) - timedelta(days=retention_days)
538
+ kept_records: list[str] = []
539
+ deleted_count = 0
540
+
541
+ try:
542
+ # Read all records
543
+ with open(self.metrics_file, "r") as f:
544
+ lines = f.readlines()
545
+
546
+ for line in lines:
547
+ line = line.strip()
548
+ if not line:
549
+ continue
550
+
551
+ try:
552
+ record_dict = json.loads(line)
553
+ ts = record_dict.get("timestamp", "")
554
+ record_dt = datetime.fromisoformat(ts.replace("Z", "+00:00"))
555
+
556
+ # Keep if within retention period
557
+ if record_dt >= cutoff_dt:
558
+ kept_records.append(line)
559
+ else:
560
+ deleted_count += 1
561
+
562
+ except (json.JSONDecodeError, ValueError):
563
+ # Keep malformed records to avoid data loss
564
+ kept_records.append(line)
565
+
566
+ # Enforce max_records limit (keep most recent)
567
+ if len(kept_records) > max_records:
568
+ deleted_count += len(kept_records) - max_records
569
+ kept_records = kept_records[-max_records:]
570
+
571
+ # Write back
572
+ temp_file = self.metrics_file.with_suffix(".tmp")
573
+ with open(temp_file, "w") as f:
574
+ for line in kept_records:
575
+ f.write(line + "\n")
576
+ temp_file.rename(self.metrics_file)
577
+
578
+ # Rebuild index after cleanup
579
+ self._rebuild_index()
580
+
581
+ logger.info(f"Cleaned up {deleted_count} metric records")
582
+ return deleted_count
583
+
584
+ except OSError as e:
585
+ logger.error(f"Failed to cleanup metrics: {e}")
586
+ return 0
587
+
588
+ def count(self) -> int:
589
+ """Get total count of metric data points."""
590
+ with self._lock:
591
+ return self._record_count
592
+
593
+ def get_total_count(self, metric_name: Optional[str] = None) -> int:
594
+ """Get total count for a specific metric or all metrics (single source of truth).
595
+
596
+ Args:
597
+ metric_name: If provided, returns count for that metric only.
598
+ If None, returns total count across all metrics.
599
+
600
+ Returns:
601
+ Total count of metric records
602
+ """
603
+ with self._lock:
604
+ if metric_name is not None:
605
+ return self._index.get(metric_name, {}).get("count", 0)
606
+ return sum(m.get("count", 0) for m in self._index.values())
607
+
608
+
609
+ # Global store instance
610
+ _metrics_store: Optional[MetricsStore] = None
611
+ _store_lock = threading.Lock()
612
+
613
+
614
+ def get_metrics_store(storage_path: Optional[str | Path] = None) -> MetricsStore:
615
+ """
616
+ Get the global metrics store instance.
617
+
618
+ Args:
619
+ storage_path: Optional path to initialize the store. If not provided
620
+ on first call, uses default path.
621
+
622
+ Returns:
623
+ The MetricsStore instance
624
+ """
625
+ global _metrics_store
626
+
627
+ with _store_lock:
628
+ if _metrics_store is None:
629
+ if storage_path is None:
630
+ # Default path
631
+ storage_path = Path.home() / ".foundry-mcp" / "metrics"
632
+ _metrics_store = FileMetricsStore(storage_path)
633
+
634
+ return _metrics_store
635
+
636
+
637
+ def reset_metrics_store() -> None:
638
+ """Reset the global metrics store (for testing)."""
639
+ global _metrics_store
640
+ with _store_lock:
641
+ _metrics_store = None