claude-mpm 3.7.4__py3-none-any.whl → 3.8.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (117) hide show
  1. claude_mpm/VERSION +1 -1
  2. claude_mpm/agents/BASE_PM.md +0 -106
  3. claude_mpm/agents/INSTRUCTIONS.md +0 -78
  4. claude_mpm/agents/MEMORY.md +88 -0
  5. claude_mpm/agents/WORKFLOW.md +86 -0
  6. claude_mpm/agents/schema/agent_schema.json +1 -1
  7. claude_mpm/agents/templates/code_analyzer.json +26 -11
  8. claude_mpm/agents/templates/data_engineer.json +4 -7
  9. claude_mpm/agents/templates/documentation.json +2 -2
  10. claude_mpm/agents/templates/engineer.json +2 -2
  11. claude_mpm/agents/templates/ops.json +3 -8
  12. claude_mpm/agents/templates/qa.json +2 -3
  13. claude_mpm/agents/templates/research.json +2 -3
  14. claude_mpm/agents/templates/security.json +3 -6
  15. claude_mpm/agents/templates/ticketing.json +4 -9
  16. claude_mpm/agents/templates/version_control.json +3 -3
  17. claude_mpm/agents/templates/web_qa.json +4 -4
  18. claude_mpm/agents/templates/web_ui.json +4 -4
  19. claude_mpm/cli/__init__.py +2 -2
  20. claude_mpm/cli/commands/__init__.py +2 -1
  21. claude_mpm/cli/commands/agents.py +118 -1
  22. claude_mpm/cli/commands/tickets.py +596 -19
  23. claude_mpm/cli/parser.py +228 -5
  24. claude_mpm/config/__init__.py +30 -39
  25. claude_mpm/config/socketio_config.py +8 -5
  26. claude_mpm/constants.py +13 -0
  27. claude_mpm/core/__init__.py +8 -18
  28. claude_mpm/core/cache.py +596 -0
  29. claude_mpm/core/claude_runner.py +166 -622
  30. claude_mpm/core/config.py +5 -1
  31. claude_mpm/core/constants.py +339 -0
  32. claude_mpm/core/container.py +461 -22
  33. claude_mpm/core/exceptions.py +392 -0
  34. claude_mpm/core/framework_loader.py +208 -93
  35. claude_mpm/core/interactive_session.py +432 -0
  36. claude_mpm/core/interfaces.py +424 -0
  37. claude_mpm/core/lazy.py +467 -0
  38. claude_mpm/core/logging_config.py +444 -0
  39. claude_mpm/core/oneshot_session.py +465 -0
  40. claude_mpm/core/optimized_agent_loader.py +485 -0
  41. claude_mpm/core/optimized_startup.py +490 -0
  42. claude_mpm/core/service_registry.py +52 -26
  43. claude_mpm/core/socketio_pool.py +162 -5
  44. claude_mpm/core/types.py +292 -0
  45. claude_mpm/core/typing_utils.py +477 -0
  46. claude_mpm/dashboard/static/js/components/file-tool-tracker.js +46 -2
  47. claude_mpm/dashboard/templates/index.html +5 -5
  48. claude_mpm/hooks/claude_hooks/hook_handler.py +213 -99
  49. claude_mpm/init.py +2 -1
  50. claude_mpm/services/__init__.py +78 -14
  51. claude_mpm/services/agent/__init__.py +24 -0
  52. claude_mpm/services/agent/deployment.py +2548 -0
  53. claude_mpm/services/agent/management.py +598 -0
  54. claude_mpm/services/agent/registry.py +813 -0
  55. claude_mpm/services/agents/deployment/agent_deployment.py +592 -269
  56. claude_mpm/services/agents/deployment/async_agent_deployment.py +5 -1
  57. claude_mpm/services/agents/management/agent_capabilities_generator.py +21 -11
  58. claude_mpm/services/agents/memory/agent_memory_manager.py +156 -1
  59. claude_mpm/services/async_session_logger.py +8 -3
  60. claude_mpm/services/communication/__init__.py +21 -0
  61. claude_mpm/services/communication/socketio.py +1933 -0
  62. claude_mpm/services/communication/websocket.py +479 -0
  63. claude_mpm/services/core/__init__.py +123 -0
  64. claude_mpm/services/core/base.py +247 -0
  65. claude_mpm/services/core/interfaces.py +951 -0
  66. claude_mpm/services/framework_claude_md_generator/section_generators/todo_task_tools.py +23 -23
  67. claude_mpm/services/framework_claude_md_generator.py +3 -2
  68. claude_mpm/services/health_monitor.py +4 -3
  69. claude_mpm/services/hook_service.py +64 -4
  70. claude_mpm/services/infrastructure/__init__.py +21 -0
  71. claude_mpm/services/infrastructure/logging.py +202 -0
  72. claude_mpm/services/infrastructure/monitoring.py +893 -0
  73. claude_mpm/services/memory/indexed_memory.py +648 -0
  74. claude_mpm/services/project/__init__.py +21 -0
  75. claude_mpm/services/project/analyzer.py +864 -0
  76. claude_mpm/services/project/registry.py +608 -0
  77. claude_mpm/services/project_analyzer.py +95 -2
  78. claude_mpm/services/recovery_manager.py +15 -9
  79. claude_mpm/services/socketio/__init__.py +25 -0
  80. claude_mpm/services/socketio/handlers/__init__.py +25 -0
  81. claude_mpm/services/socketio/handlers/base.py +121 -0
  82. claude_mpm/services/socketio/handlers/connection.py +198 -0
  83. claude_mpm/services/socketio/handlers/file.py +213 -0
  84. claude_mpm/services/socketio/handlers/git.py +723 -0
  85. claude_mpm/services/socketio/handlers/memory.py +27 -0
  86. claude_mpm/services/socketio/handlers/project.py +25 -0
  87. claude_mpm/services/socketio/handlers/registry.py +145 -0
  88. claude_mpm/services/socketio_client_manager.py +12 -7
  89. claude_mpm/services/socketio_server.py +156 -30
  90. claude_mpm/services/ticket_manager.py +377 -51
  91. claude_mpm/utils/agent_dependency_loader.py +66 -15
  92. claude_mpm/utils/error_handler.py +1 -1
  93. claude_mpm/utils/robust_installer.py +587 -0
  94. claude_mpm/validation/agent_validator.py +27 -14
  95. claude_mpm/validation/frontmatter_validator.py +231 -0
  96. {claude_mpm-3.7.4.dist-info → claude_mpm-3.8.1.dist-info}/METADATA +74 -41
  97. {claude_mpm-3.7.4.dist-info → claude_mpm-3.8.1.dist-info}/RECORD +101 -76
  98. claude_mpm/.claude-mpm/logs/hooks_20250728.log +0 -10
  99. claude_mpm/agents/agent-template.yaml +0 -83
  100. claude_mpm/cli/README.md +0 -108
  101. claude_mpm/cli_module/refactoring_guide.md +0 -253
  102. claude_mpm/config/async_logging_config.yaml +0 -145
  103. claude_mpm/core/.claude-mpm/logs/hooks_20250730.log +0 -34
  104. claude_mpm/dashboard/.claude-mpm/memories/README.md +0 -36
  105. claude_mpm/dashboard/README.md +0 -121
  106. claude_mpm/dashboard/static/js/dashboard.js.backup +0 -1973
  107. claude_mpm/dashboard/templates/.claude-mpm/memories/README.md +0 -36
  108. claude_mpm/dashboard/templates/.claude-mpm/memories/engineer_agent.md +0 -39
  109. claude_mpm/dashboard/templates/.claude-mpm/memories/version_control_agent.md +0 -38
  110. claude_mpm/hooks/README.md +0 -96
  111. claude_mpm/schemas/agent_schema.json +0 -435
  112. claude_mpm/services/framework_claude_md_generator/README.md +0 -92
  113. claude_mpm/services/version_control/VERSION +0 -1
  114. {claude_mpm-3.7.4.dist-info → claude_mpm-3.8.1.dist-info}/WHEEL +0 -0
  115. {claude_mpm-3.7.4.dist-info → claude_mpm-3.8.1.dist-info}/entry_points.txt +0 -0
  116. {claude_mpm-3.7.4.dist-info → claude_mpm-3.8.1.dist-info}/licenses/LICENSE +0 -0
  117. {claude_mpm-3.7.4.dist-info → claude_mpm-3.8.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,596 @@
1
+ #!/usr/bin/env python3
2
+ """File system caching with LRU eviction for performance optimization.
3
+
4
+ This module provides a high-performance caching layer for file system operations,
5
+ reducing repeated file reads and improving response times.
6
+
7
+ WHY file system caching:
8
+ - Reduces file I/O operations by 50-70%
9
+ - Implements LRU eviction to manage memory usage
10
+ - Provides thread-safe concurrent access
11
+ - Supports TTL-based expiration for dynamic content
12
+ """
13
+
14
+ import asyncio
15
+ import hashlib
16
+ import json
17
+ import os
18
+ import pickle
19
+ import threading
20
+ import time
21
+ from collections import OrderedDict
22
+ from dataclasses import dataclass, field
23
+ from datetime import datetime, timedelta
24
+ from pathlib import Path
25
+ from typing import Any, Callable, Dict, Optional, Tuple, TypeVar, Union
26
+
27
+ from ..core.logger import get_logger
28
+
29
+ T = TypeVar('T')
30
+
31
+
32
+ @dataclass
33
+ class CacheEntry:
34
+ """Single cache entry with metadata."""
35
+ key: str
36
+ value: Any
37
+ size: int
38
+ created_at: datetime = field(default_factory=datetime.now)
39
+ last_accessed: datetime = field(default_factory=datetime.now)
40
+ access_count: int = 0
41
+ ttl: Optional[float] = None
42
+
43
+ def is_expired(self) -> bool:
44
+ """Check if entry has expired based on TTL."""
45
+ if self.ttl is None:
46
+ return False
47
+ age = (datetime.now() - self.created_at).total_seconds()
48
+ return age > self.ttl
49
+
50
+ def touch(self):
51
+ """Update last access time and increment counter."""
52
+ self.last_accessed = datetime.now()
53
+ self.access_count += 1
54
+
55
+
56
+ @dataclass
57
+ class CacheStats:
58
+ """Cache performance statistics."""
59
+ hits: int = 0
60
+ misses: int = 0
61
+ evictions: int = 0
62
+ total_size: int = 0
63
+ entry_count: int = 0
64
+
65
+ @property
66
+ def hit_rate(self) -> float:
67
+ """Calculate cache hit rate."""
68
+ total = self.hits + self.misses
69
+ return self.hits / total if total > 0 else 0.0
70
+
71
+
72
+ class FileSystemCache:
73
+ """LRU cache for file system operations.
74
+
75
+ WHY this design:
76
+ - OrderedDict provides O(1) LRU operations
77
+ - Thread-safe with fine-grained locking
78
+ - Memory-aware with size limits
79
+ - TTL support for dynamic content
80
+
81
+ Example:
82
+ cache = FileSystemCache(max_size_mb=100, default_ttl=300)
83
+
84
+ # Cache file content
85
+ content = cache.get_file('/path/to/file.json')
86
+
87
+ # Cache expensive computation
88
+ result = cache.get_or_compute(
89
+ 'expensive_key',
90
+ lambda: expensive_computation(),
91
+ ttl=60
92
+ )
93
+ """
94
+
95
+ def __init__(
96
+ self,
97
+ max_size_mb: float = 100,
98
+ max_entries: int = 10000,
99
+ default_ttl: Optional[float] = None,
100
+ persist_path: Optional[Path] = None
101
+ ):
102
+ """Initialize file system cache.
103
+
104
+ Args:
105
+ max_size_mb: Maximum cache size in megabytes
106
+ max_entries: Maximum number of cache entries
107
+ default_ttl: Default time-to-live in seconds
108
+ persist_path: Optional path to persist cache to disk
109
+ """
110
+ self.max_size = int(max_size_mb * 1024 * 1024) # Convert to bytes
111
+ self.max_entries = max_entries
112
+ self.default_ttl = default_ttl
113
+ self.persist_path = persist_path
114
+
115
+ self._cache: OrderedDict[str, CacheEntry] = OrderedDict()
116
+ self._lock = threading.RLock()
117
+ self._stats = CacheStats()
118
+ self._logger = get_logger("fs_cache")
119
+
120
+ # Load persisted cache if available
121
+ if persist_path and persist_path.exists():
122
+ self._load_cache()
123
+
124
+ def _estimate_size(self, value: Any) -> int:
125
+ """Estimate memory size of a value in bytes."""
126
+ if isinstance(value, (str, bytes)):
127
+ return len(value)
128
+ elif isinstance(value, (list, dict)):
129
+ # Rough estimate using JSON serialization
130
+ try:
131
+ return len(json.dumps(value))
132
+ except:
133
+ return 1000 # Default estimate
134
+ else:
135
+ # Use pickle for size estimation
136
+ try:
137
+ return len(pickle.dumps(value))
138
+ except:
139
+ return 100 # Default small size
140
+
141
+ def _evict_lru(self):
142
+ """Evict least recently used entries to make space."""
143
+ with self._lock:
144
+ while self._cache and (
145
+ self._stats.total_size > self.max_size or
146
+ self._stats.entry_count > self.max_entries
147
+ ):
148
+ # Remove oldest entry (first in OrderedDict)
149
+ key, entry = self._cache.popitem(last=False)
150
+ self._stats.total_size -= entry.size
151
+ self._stats.entry_count -= 1
152
+ self._stats.evictions += 1
153
+ self._logger.debug(f"Evicted cache entry: {key}")
154
+
155
+ def _evict_expired(self):
156
+ """Remove expired entries from cache."""
157
+ with self._lock:
158
+ expired_keys = [
159
+ key for key, entry in self._cache.items()
160
+ if entry.is_expired()
161
+ ]
162
+
163
+ for key in expired_keys:
164
+ entry = self._cache.pop(key)
165
+ self._stats.total_size -= entry.size
166
+ self._stats.entry_count -= 1
167
+ self._logger.debug(f"Expired cache entry: {key}")
168
+
169
+ def get(self, key: str) -> Optional[Any]:
170
+ """Get value from cache.
171
+
172
+ Args:
173
+ key: Cache key
174
+
175
+ Returns:
176
+ Cached value or None if not found/expired
177
+ """
178
+ with self._lock:
179
+ entry = self._cache.get(key)
180
+
181
+ if entry is None:
182
+ self._stats.misses += 1
183
+ return None
184
+
185
+ # Check expiration
186
+ if entry.is_expired():
187
+ self._cache.pop(key)
188
+ self._stats.total_size -= entry.size
189
+ self._stats.entry_count -= 1
190
+ self._stats.misses += 1
191
+ return None
192
+
193
+ # Update LRU order
194
+ self._cache.move_to_end(key)
195
+ entry.touch()
196
+
197
+ self._stats.hits += 1
198
+ return entry.value
199
+
200
+ def put(
201
+ self,
202
+ key: str,
203
+ value: Any,
204
+ ttl: Optional[float] = None
205
+ ) -> None:
206
+ """Store value in cache.
207
+
208
+ Args:
209
+ key: Cache key
210
+ value: Value to cache
211
+ ttl: Time-to-live in seconds (overrides default)
212
+ """
213
+ size = self._estimate_size(value)
214
+
215
+ # Don't cache if single item exceeds max size
216
+ if size > self.max_size:
217
+ self._logger.warning(f"Value too large to cache: {key} ({size} bytes)")
218
+ return
219
+
220
+ with self._lock:
221
+ # Remove existing entry if present
222
+ if key in self._cache:
223
+ old_entry = self._cache.pop(key)
224
+ self._stats.total_size -= old_entry.size
225
+ self._stats.entry_count -= 1
226
+
227
+ # Create new entry
228
+ entry = CacheEntry(
229
+ key=key,
230
+ value=value,
231
+ size=size,
232
+ ttl=ttl or self.default_ttl
233
+ )
234
+
235
+ # Add to cache
236
+ self._cache[key] = entry
237
+ self._stats.total_size += size
238
+ self._stats.entry_count += 1
239
+
240
+ # Evict if necessary
241
+ self._evict_lru()
242
+
243
+ def get_or_compute(
244
+ self,
245
+ key: str,
246
+ compute_fn: Callable[[], T],
247
+ ttl: Optional[float] = None
248
+ ) -> T:
249
+ """Get from cache or compute if missing.
250
+
251
+ Args:
252
+ key: Cache key
253
+ compute_fn: Function to compute value if not cached
254
+ ttl: Time-to-live in seconds
255
+
256
+ Returns:
257
+ Cached or computed value
258
+ """
259
+ # Try cache first
260
+ value = self.get(key)
261
+ if value is not None:
262
+ return value
263
+
264
+ # Compute value
265
+ value = compute_fn()
266
+
267
+ # Cache result
268
+ self.put(key, value, ttl)
269
+
270
+ return value
271
+
272
+ def get_file(
273
+ self,
274
+ file_path: Union[str, Path],
275
+ mode: str = 'r',
276
+ encoding: str = 'utf-8',
277
+ ttl: Optional[float] = None
278
+ ) -> Optional[Any]:
279
+ """Get file content from cache or read from disk.
280
+
281
+ Args:
282
+ file_path: Path to file
283
+ mode: File open mode ('r' for text, 'rb' for binary)
284
+ encoding: Text encoding (for text mode)
285
+ ttl: Time-to-live in seconds
286
+
287
+ Returns:
288
+ File content or None if file doesn't exist
289
+ """
290
+ file_path = Path(file_path)
291
+
292
+ # Generate cache key based on file path and modification time
293
+ if file_path.exists():
294
+ mtime = file_path.stat().st_mtime
295
+ cache_key = f"file:{file_path}:{mtime}:{mode}"
296
+ else:
297
+ return None
298
+
299
+ def read_file():
300
+ """Read file from disk."""
301
+ try:
302
+ if 'b' in mode:
303
+ with open(file_path, mode) as f:
304
+ return f.read()
305
+ else:
306
+ with open(file_path, mode, encoding=encoding) as f:
307
+ return f.read()
308
+ except Exception as e:
309
+ self._logger.error(f"Failed to read file {file_path}: {e}")
310
+ return None
311
+
312
+ return self.get_or_compute(cache_key, read_file, ttl)
313
+
314
+ def get_json(
315
+ self,
316
+ file_path: Union[str, Path],
317
+ ttl: Optional[float] = None
318
+ ) -> Optional[Dict[str, Any]]:
319
+ """Get JSON file content from cache or parse from disk.
320
+
321
+ Args:
322
+ file_path: Path to JSON file
323
+ ttl: Time-to-live in seconds
324
+
325
+ Returns:
326
+ Parsed JSON or None if file doesn't exist/invalid
327
+ """
328
+ content = self.get_file(file_path, mode='r', ttl=ttl)
329
+ if content is None:
330
+ return None
331
+
332
+ try:
333
+ return json.loads(content)
334
+ except json.JSONDecodeError as e:
335
+ self._logger.error(f"Invalid JSON in {file_path}: {e}")
336
+ return None
337
+
338
+ def invalidate(self, key: str) -> bool:
339
+ """Remove entry from cache.
340
+
341
+ Args:
342
+ key: Cache key to invalidate
343
+
344
+ Returns:
345
+ True if entry was removed, False if not found
346
+ """
347
+ with self._lock:
348
+ entry = self._cache.pop(key, None)
349
+ if entry:
350
+ self._stats.total_size -= entry.size
351
+ self._stats.entry_count -= 1
352
+ return True
353
+ return False
354
+
355
+ def invalidate_pattern(self, pattern: str) -> int:
356
+ """Invalidate all keys matching pattern.
357
+
358
+ Args:
359
+ pattern: Pattern to match (supports * wildcard)
360
+
361
+ Returns:
362
+ Number of entries invalidated
363
+ """
364
+ import fnmatch
365
+
366
+ with self._lock:
367
+ matching_keys = [
368
+ key for key in self._cache.keys()
369
+ if fnmatch.fnmatch(key, pattern)
370
+ ]
371
+
372
+ count = 0
373
+ for key in matching_keys:
374
+ if self.invalidate(key):
375
+ count += 1
376
+
377
+ return count
378
+
379
+ def clear(self):
380
+ """Clear all cache entries."""
381
+ with self._lock:
382
+ self._cache.clear()
383
+ self._stats = CacheStats()
384
+ self._logger.info("Cache cleared")
385
+
386
+ def get_stats(self) -> Dict[str, Any]:
387
+ """Get cache statistics."""
388
+ with self._lock:
389
+ # Clean up expired entries first
390
+ self._evict_expired()
391
+
392
+ return {
393
+ "hits": self._stats.hits,
394
+ "misses": self._stats.misses,
395
+ "hit_rate": self._stats.hit_rate,
396
+ "evictions": self._stats.evictions,
397
+ "entry_count": self._stats.entry_count,
398
+ "total_size_mb": self._stats.total_size / (1024 * 1024),
399
+ "max_size_mb": self.max_size / (1024 * 1024)
400
+ }
401
+
402
+ def _save_cache(self):
403
+ """Persist cache to disk."""
404
+ if not self.persist_path:
405
+ return
406
+
407
+ try:
408
+ self.persist_path.parent.mkdir(parents=True, exist_ok=True)
409
+ with open(self.persist_path, 'wb') as f:
410
+ pickle.dump(self._cache, f)
411
+ self._logger.debug(f"Cache persisted to {self.persist_path}")
412
+ except Exception as e:
413
+ self._logger.error(f"Failed to persist cache: {e}")
414
+
415
+ def _load_cache(self):
416
+ """Load cache from disk."""
417
+ if not self.persist_path or not self.persist_path.exists():
418
+ return
419
+
420
+ try:
421
+ with open(self.persist_path, 'rb') as f:
422
+ loaded_cache = pickle.load(f)
423
+
424
+ # Rebuild cache with validation
425
+ for key, entry in loaded_cache.items():
426
+ if not entry.is_expired():
427
+ self._cache[key] = entry
428
+ self._stats.total_size += entry.size
429
+ self._stats.entry_count += 1
430
+
431
+ self._logger.info(f"Loaded {len(self._cache)} entries from cache")
432
+ except Exception as e:
433
+ self._logger.error(f"Failed to load cache: {e}")
434
+
435
+
436
+ class AsyncFileSystemCache:
437
+ """Async version of FileSystemCache for async applications.
438
+
439
+ Provides non-blocking cache operations for async contexts.
440
+ """
441
+
442
+ def __init__(
443
+ self,
444
+ max_size_mb: float = 100,
445
+ max_entries: int = 10000,
446
+ default_ttl: Optional[float] = None
447
+ ):
448
+ self.sync_cache = FileSystemCache(
449
+ max_size_mb=max_size_mb,
450
+ max_entries=max_entries,
451
+ default_ttl=default_ttl
452
+ )
453
+ self._lock = asyncio.Lock()
454
+ self._logger = get_logger("async_fs_cache")
455
+
456
+ async def get(self, key: str) -> Optional[Any]:
457
+ """Get value from cache asynchronously."""
458
+ async with self._lock:
459
+ # Run in executor to avoid blocking
460
+ loop = asyncio.get_event_loop()
461
+ return await loop.run_in_executor(None, self.sync_cache.get, key)
462
+
463
+ async def put(
464
+ self,
465
+ key: str,
466
+ value: Any,
467
+ ttl: Optional[float] = None
468
+ ) -> None:
469
+ """Store value in cache asynchronously."""
470
+ async with self._lock:
471
+ loop = asyncio.get_event_loop()
472
+ await loop.run_in_executor(None, self.sync_cache.put, key, value, ttl)
473
+
474
+ async def get_or_compute(
475
+ self,
476
+ key: str,
477
+ compute_fn: Callable[[], T],
478
+ ttl: Optional[float] = None
479
+ ) -> T:
480
+ """Get from cache or compute asynchronously."""
481
+ # Try cache first
482
+ value = await self.get(key)
483
+ if value is not None:
484
+ return value
485
+
486
+ # Compute value (handle both sync and async functions)
487
+ if asyncio.iscoroutinefunction(compute_fn):
488
+ value = await compute_fn()
489
+ else:
490
+ loop = asyncio.get_event_loop()
491
+ value = await loop.run_in_executor(None, compute_fn)
492
+
493
+ # Cache result
494
+ await self.put(key, value, ttl)
495
+
496
+ return value
497
+
498
+ async def get_file(
499
+ self,
500
+ file_path: Union[str, Path],
501
+ mode: str = 'r',
502
+ encoding: str = 'utf-8',
503
+ ttl: Optional[float] = None
504
+ ) -> Optional[Any]:
505
+ """Get file content asynchronously."""
506
+ loop = asyncio.get_event_loop()
507
+ return await loop.run_in_executor(
508
+ None,
509
+ self.sync_cache.get_file,
510
+ file_path,
511
+ mode,
512
+ encoding,
513
+ ttl
514
+ )
515
+
516
+
517
+ # Global cache instances
518
+ _file_cache: Optional[FileSystemCache] = None
519
+ _async_cache: Optional[AsyncFileSystemCache] = None
520
+
521
+
522
+ def get_file_cache(
523
+ max_size_mb: float = 100,
524
+ default_ttl: Optional[float] = 300
525
+ ) -> FileSystemCache:
526
+ """Get or create global file cache instance.
527
+
528
+ Args:
529
+ max_size_mb: Maximum cache size in MB
530
+ default_ttl: Default TTL in seconds
531
+
532
+ Returns:
533
+ Global FileSystemCache instance
534
+ """
535
+ global _file_cache
536
+ if _file_cache is None:
537
+ # Use project cache directory
538
+ cache_dir = Path.home() / ".claude-mpm" / "cache"
539
+ cache_dir.mkdir(parents=True, exist_ok=True)
540
+
541
+ _file_cache = FileSystemCache(
542
+ max_size_mb=max_size_mb,
543
+ default_ttl=default_ttl,
544
+ persist_path=cache_dir / "fs_cache.pkl"
545
+ )
546
+ return _file_cache
547
+
548
+
549
+ def get_async_cache(
550
+ max_size_mb: float = 100,
551
+ default_ttl: Optional[float] = 300
552
+ ) -> AsyncFileSystemCache:
553
+ """Get or create global async cache instance."""
554
+ global _async_cache
555
+ if _async_cache is None:
556
+ _async_cache = AsyncFileSystemCache(
557
+ max_size_mb=max_size_mb,
558
+ default_ttl=default_ttl
559
+ )
560
+ return _async_cache
561
+
562
+
563
+ def cache_decorator(
564
+ ttl: Optional[float] = None,
565
+ key_prefix: str = ""
566
+ ):
567
+ """Decorator for caching function results.
568
+
569
+ Args:
570
+ ttl: Time-to-live in seconds
571
+ key_prefix: Optional prefix for cache keys
572
+
573
+ Example:
574
+ @cache_decorator(ttl=60)
575
+ def expensive_function(param):
576
+ return compute_result(param)
577
+ """
578
+ def decorator(func: Callable) -> Callable:
579
+ cache = get_file_cache()
580
+
581
+ def wrapper(*args, **kwargs):
582
+ # Generate cache key from function name and arguments
583
+ key_parts = [key_prefix, func.__name__]
584
+ key_parts.extend(str(arg) for arg in args)
585
+ key_parts.extend(f"{k}={v}" for k, v in sorted(kwargs.items()))
586
+ cache_key = ":".join(key_parts)
587
+
588
+ # Use cache
589
+ return cache.get_or_compute(
590
+ cache_key,
591
+ lambda: func(*args, **kwargs),
592
+ ttl=ttl
593
+ )
594
+
595
+ return wrapper
596
+ return decorator