claude-mpm 4.1.2__py3-none-any.whl → 4.1.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- claude_mpm/VERSION +1 -1
- claude_mpm/agents/templates/engineer.json +33 -11
- claude_mpm/cli/commands/agents.py +556 -1009
- claude_mpm/cli/commands/memory.py +248 -927
- claude_mpm/cli/commands/run.py +139 -484
- claude_mpm/cli/startup_logging.py +76 -0
- claude_mpm/core/agent_registry.py +6 -10
- claude_mpm/core/framework_loader.py +114 -595
- claude_mpm/core/logging_config.py +2 -4
- claude_mpm/hooks/claude_hooks/event_handlers.py +7 -117
- claude_mpm/hooks/claude_hooks/hook_handler.py +91 -755
- claude_mpm/hooks/claude_hooks/hook_handler_original.py +1040 -0
- claude_mpm/hooks/claude_hooks/hook_handler_refactored.py +347 -0
- claude_mpm/hooks/claude_hooks/services/__init__.py +13 -0
- claude_mpm/hooks/claude_hooks/services/connection_manager.py +190 -0
- claude_mpm/hooks/claude_hooks/services/duplicate_detector.py +106 -0
- claude_mpm/hooks/claude_hooks/services/state_manager.py +282 -0
- claude_mpm/hooks/claude_hooks/services/subagent_processor.py +374 -0
- claude_mpm/services/agents/deployment/agent_deployment.py +42 -454
- claude_mpm/services/agents/deployment/base_agent_locator.py +132 -0
- claude_mpm/services/agents/deployment/deployment_results_manager.py +185 -0
- claude_mpm/services/agents/deployment/single_agent_deployer.py +315 -0
- claude_mpm/services/agents/memory/agent_memory_manager.py +42 -508
- claude_mpm/services/agents/memory/memory_categorization_service.py +165 -0
- claude_mpm/services/agents/memory/memory_file_service.py +103 -0
- claude_mpm/services/agents/memory/memory_format_service.py +201 -0
- claude_mpm/services/agents/memory/memory_limits_service.py +99 -0
- claude_mpm/services/agents/registry/__init__.py +1 -1
- claude_mpm/services/cli/__init__.py +18 -0
- claude_mpm/services/cli/agent_cleanup_service.py +407 -0
- claude_mpm/services/cli/agent_dependency_service.py +395 -0
- claude_mpm/services/cli/agent_listing_service.py +463 -0
- claude_mpm/services/cli/agent_output_formatter.py +605 -0
- claude_mpm/services/cli/agent_validation_service.py +589 -0
- claude_mpm/services/cli/dashboard_launcher.py +424 -0
- claude_mpm/services/cli/memory_crud_service.py +617 -0
- claude_mpm/services/cli/memory_output_formatter.py +604 -0
- claude_mpm/services/cli/session_manager.py +513 -0
- claude_mpm/services/cli/socketio_manager.py +498 -0
- claude_mpm/services/cli/startup_checker.py +370 -0
- claude_mpm/services/core/cache_manager.py +311 -0
- claude_mpm/services/core/memory_manager.py +637 -0
- claude_mpm/services/core/path_resolver.py +498 -0
- claude_mpm/services/core/service_container.py +520 -0
- claude_mpm/services/core/service_interfaces.py +436 -0
- claude_mpm/services/diagnostics/checks/agent_check.py +65 -19
- {claude_mpm-4.1.2.dist-info → claude_mpm-4.1.3.dist-info}/METADATA +1 -1
- {claude_mpm-4.1.2.dist-info → claude_mpm-4.1.3.dist-info}/RECORD +52 -22
- claude_mpm/cli/commands/run_config_checker.py +0 -159
- {claude_mpm-4.1.2.dist-info → claude_mpm-4.1.3.dist-info}/WHEEL +0 -0
- {claude_mpm-4.1.2.dist-info → claude_mpm-4.1.3.dist-info}/entry_points.txt +0 -0
- {claude_mpm-4.1.2.dist-info → claude_mpm-4.1.3.dist-info}/licenses/LICENSE +0 -0
- {claude_mpm-4.1.2.dist-info → claude_mpm-4.1.3.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,370 @@
|
|
|
1
|
+
"""Startup checking service for CLI commands.
|
|
2
|
+
|
|
3
|
+
WHY: This service extracts startup validation logic from run.py to improve
|
|
4
|
+
separation of concerns, testability, and reusability across CLI commands.
|
|
5
|
+
|
|
6
|
+
DESIGN DECISIONS:
|
|
7
|
+
- Interface-based design for dependency injection
|
|
8
|
+
- Single responsibility: startup validation only
|
|
9
|
+
- Returns structured warnings for better handling
|
|
10
|
+
- Non-blocking: warns but doesn't prevent execution
|
|
11
|
+
- Reusable across multiple CLI commands
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
import os
|
|
15
|
+
from abc import ABC, abstractmethod
|
|
16
|
+
from dataclasses import dataclass
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
from typing import List, Optional
|
|
19
|
+
|
|
20
|
+
from claude_mpm.core.logger import get_logger
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
# Interface Definition
|
|
24
|
+
class IStartupChecker(ABC):
|
|
25
|
+
"""Interface for startup checking service."""
|
|
26
|
+
|
|
27
|
+
@abstractmethod
|
|
28
|
+
def check_configuration(self) -> List["StartupWarning"]:
|
|
29
|
+
"""Validate configuration and return warnings."""
|
|
30
|
+
|
|
31
|
+
@abstractmethod
|
|
32
|
+
def check_memory(self, resume_enabled: bool = False) -> Optional["StartupWarning"]:
|
|
33
|
+
"""Check Claude.json memory usage."""
|
|
34
|
+
|
|
35
|
+
@abstractmethod
|
|
36
|
+
def check_environment(self) -> List["StartupWarning"]:
|
|
37
|
+
"""Validate environment and paths."""
|
|
38
|
+
|
|
39
|
+
@abstractmethod
|
|
40
|
+
def get_startup_warnings(
|
|
41
|
+
self, resume_enabled: bool = False
|
|
42
|
+
) -> List["StartupWarning"]:
|
|
43
|
+
"""Collect all startup warnings."""
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
@dataclass
|
|
47
|
+
class StartupWarning:
|
|
48
|
+
"""Structured warning information."""
|
|
49
|
+
|
|
50
|
+
category: str # 'config', 'memory', 'environment'
|
|
51
|
+
message: str
|
|
52
|
+
suggestion: Optional[str] = None
|
|
53
|
+
severity: str = "warning" # 'info', 'warning', 'error'
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class StartupCheckerService(IStartupChecker):
|
|
57
|
+
"""Service for startup validation and health checks."""
|
|
58
|
+
|
|
59
|
+
def __init__(self, config_service):
|
|
60
|
+
"""Initialize the startup checker.
|
|
61
|
+
|
|
62
|
+
Args:
|
|
63
|
+
config_service: Configuration service instance (IConfigurationService)
|
|
64
|
+
"""
|
|
65
|
+
self.config_service = config_service
|
|
66
|
+
self.logger = get_logger("StartupChecker")
|
|
67
|
+
|
|
68
|
+
def check_configuration(self) -> List[StartupWarning]:
|
|
69
|
+
"""Validate configuration and return warnings.
|
|
70
|
+
|
|
71
|
+
Checks:
|
|
72
|
+
- Response logging directory exists and is writable
|
|
73
|
+
- Agent deployment configuration
|
|
74
|
+
- Memory management configuration
|
|
75
|
+
- Common configuration issues
|
|
76
|
+
|
|
77
|
+
Returns:
|
|
78
|
+
List of configuration warnings
|
|
79
|
+
"""
|
|
80
|
+
warnings = []
|
|
81
|
+
|
|
82
|
+
try:
|
|
83
|
+
# Check response logging configuration
|
|
84
|
+
response_logging = self.config_service.get("response_logging", {})
|
|
85
|
+
if response_logging.get("enabled", False):
|
|
86
|
+
log_dir = response_logging.get("directory")
|
|
87
|
+
if log_dir:
|
|
88
|
+
warnings.extend(self._check_log_directory(log_dir))
|
|
89
|
+
|
|
90
|
+
# Check memory management configuration
|
|
91
|
+
memory_config = self.config_service.get("memory_management", {})
|
|
92
|
+
if memory_config.get("auto_cleanup", False):
|
|
93
|
+
cleanup_threshold = memory_config.get("cleanup_threshold_mb", 100)
|
|
94
|
+
if cleanup_threshold < 50:
|
|
95
|
+
warnings.append(
|
|
96
|
+
StartupWarning(
|
|
97
|
+
category="config",
|
|
98
|
+
message=f"Memory cleanup threshold very low: {cleanup_threshold}MB",
|
|
99
|
+
suggestion="Consider increasing to at least 50MB",
|
|
100
|
+
severity="warning",
|
|
101
|
+
)
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
# Check for deprecated configuration keys
|
|
105
|
+
warnings.extend(self._check_deprecated_keys())
|
|
106
|
+
|
|
107
|
+
# Check configuration file access
|
|
108
|
+
warnings.extend(self._check_config_file_access())
|
|
109
|
+
|
|
110
|
+
except Exception as e:
|
|
111
|
+
self.logger.warning(f"Configuration check failed: {e}")
|
|
112
|
+
warnings.append(
|
|
113
|
+
StartupWarning(
|
|
114
|
+
category="config",
|
|
115
|
+
message=f"Configuration check failed: {e}",
|
|
116
|
+
severity="info",
|
|
117
|
+
)
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
return warnings
|
|
121
|
+
|
|
122
|
+
def check_memory(self, resume_enabled: bool = False) -> Optional[StartupWarning]:
|
|
123
|
+
"""Check .claude.json memory usage.
|
|
124
|
+
|
|
125
|
+
WHY: Large .claude.json files (>500KB) cause significant memory issues
|
|
126
|
+
when using --resume. Claude Code loads the entire conversation history
|
|
127
|
+
into memory, leading to 2GB+ memory consumption.
|
|
128
|
+
|
|
129
|
+
Args:
|
|
130
|
+
resume_enabled: Whether --mpm-resume is being used
|
|
131
|
+
|
|
132
|
+
Returns:
|
|
133
|
+
Warning if memory issue detected, None otherwise
|
|
134
|
+
"""
|
|
135
|
+
if not resume_enabled:
|
|
136
|
+
return None
|
|
137
|
+
|
|
138
|
+
try:
|
|
139
|
+
claude_json_path = Path.cwd() / ".claude.json"
|
|
140
|
+
if not claude_json_path.exists():
|
|
141
|
+
self.logger.debug("No .claude.json file found")
|
|
142
|
+
return None
|
|
143
|
+
|
|
144
|
+
file_size = claude_json_path.stat().st_size
|
|
145
|
+
|
|
146
|
+
# Only warn if file is larger than 500KB
|
|
147
|
+
if file_size > 500 * 1024:
|
|
148
|
+
formatted_size = self._format_file_size(file_size)
|
|
149
|
+
return StartupWarning(
|
|
150
|
+
category="memory",
|
|
151
|
+
message=f"Large .claude.json file detected ({formatted_size})",
|
|
152
|
+
suggestion="Consider running 'claude-mpm cleanup-memory' to archive old conversations",
|
|
153
|
+
severity="warning",
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
self.logger.info(f".claude.json size: {self._format_file_size(file_size)}")
|
|
157
|
+
|
|
158
|
+
except Exception as e:
|
|
159
|
+
self.logger.warning(f"Failed to check .claude.json size: {e}")
|
|
160
|
+
|
|
161
|
+
return None
|
|
162
|
+
|
|
163
|
+
def check_environment(self) -> List[StartupWarning]:
|
|
164
|
+
"""Validate environment and paths.
|
|
165
|
+
|
|
166
|
+
Checks:
|
|
167
|
+
- Python version compatibility
|
|
168
|
+
- Virtual environment activation
|
|
169
|
+
- Required directories exist
|
|
170
|
+
- File permissions
|
|
171
|
+
|
|
172
|
+
Returns:
|
|
173
|
+
List of environment warnings
|
|
174
|
+
"""
|
|
175
|
+
warnings = []
|
|
176
|
+
|
|
177
|
+
try:
|
|
178
|
+
# Check Python version
|
|
179
|
+
import sys
|
|
180
|
+
|
|
181
|
+
if sys.version_info < (3, 8):
|
|
182
|
+
warnings.append(
|
|
183
|
+
StartupWarning(
|
|
184
|
+
category="environment",
|
|
185
|
+
message=f"Python {sys.version_info.major}.{sys.version_info.minor} detected",
|
|
186
|
+
suggestion="Python 3.8+ is recommended for optimal performance",
|
|
187
|
+
severity="info",
|
|
188
|
+
)
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
# Check for common missing directories
|
|
192
|
+
warnings.extend(self._check_required_directories())
|
|
193
|
+
|
|
194
|
+
except Exception as e:
|
|
195
|
+
self.logger.warning(f"Environment check failed: {e}")
|
|
196
|
+
warnings.append(
|
|
197
|
+
StartupWarning(
|
|
198
|
+
category="environment",
|
|
199
|
+
message=f"Environment check failed: {e}",
|
|
200
|
+
severity="info",
|
|
201
|
+
)
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
return warnings
|
|
205
|
+
|
|
206
|
+
def get_startup_warnings(
|
|
207
|
+
self, resume_enabled: bool = False
|
|
208
|
+
) -> List[StartupWarning]:
|
|
209
|
+
"""Collect all startup warnings.
|
|
210
|
+
|
|
211
|
+
Args:
|
|
212
|
+
resume_enabled: Whether --mpm-resume is being used
|
|
213
|
+
|
|
214
|
+
Returns:
|
|
215
|
+
Complete list of startup warnings
|
|
216
|
+
"""
|
|
217
|
+
all_warnings = []
|
|
218
|
+
|
|
219
|
+
# Collect configuration warnings
|
|
220
|
+
all_warnings.extend(self.check_configuration())
|
|
221
|
+
|
|
222
|
+
# Check memory if resume is enabled
|
|
223
|
+
memory_warning = self.check_memory(resume_enabled)
|
|
224
|
+
if memory_warning:
|
|
225
|
+
all_warnings.append(memory_warning)
|
|
226
|
+
|
|
227
|
+
# Collect environment warnings
|
|
228
|
+
all_warnings.extend(self.check_environment())
|
|
229
|
+
|
|
230
|
+
return all_warnings
|
|
231
|
+
|
|
232
|
+
def display_warnings(self, warnings: List[StartupWarning]) -> None:
|
|
233
|
+
"""Display warnings to the user.
|
|
234
|
+
|
|
235
|
+
Args:
|
|
236
|
+
warnings: List of warnings to display
|
|
237
|
+
"""
|
|
238
|
+
if not warnings:
|
|
239
|
+
return
|
|
240
|
+
|
|
241
|
+
# Group warnings by severity
|
|
242
|
+
errors = [w for w in warnings if w.severity == "error"]
|
|
243
|
+
warnings_list = [w for w in warnings if w.severity == "warning"]
|
|
244
|
+
info = [w for w in warnings if w.severity == "info"]
|
|
245
|
+
|
|
246
|
+
# Display errors first
|
|
247
|
+
for warning in errors:
|
|
248
|
+
print(f"❌ {warning.message}")
|
|
249
|
+
if warning.suggestion:
|
|
250
|
+
print(f" {warning.suggestion}")
|
|
251
|
+
|
|
252
|
+
# Display warnings
|
|
253
|
+
for warning in warnings_list:
|
|
254
|
+
print(f"⚠️ {warning.message}")
|
|
255
|
+
if warning.suggestion:
|
|
256
|
+
print(f" 💡 {warning.suggestion}")
|
|
257
|
+
|
|
258
|
+
# Display info last
|
|
259
|
+
for warning in info:
|
|
260
|
+
print(f"ℹ️ {warning.message}")
|
|
261
|
+
if warning.suggestion:
|
|
262
|
+
print(f" {warning.suggestion}")
|
|
263
|
+
|
|
264
|
+
if errors or warnings_list:
|
|
265
|
+
print() # Add spacing after warnings
|
|
266
|
+
|
|
267
|
+
# Private helper methods
|
|
268
|
+
|
|
269
|
+
def _check_log_directory(self, log_dir: str) -> List[StartupWarning]:
|
|
270
|
+
"""Check if log directory exists and is writable."""
|
|
271
|
+
warnings = []
|
|
272
|
+
log_path = Path(log_dir)
|
|
273
|
+
|
|
274
|
+
if not log_path.exists():
|
|
275
|
+
warnings.append(
|
|
276
|
+
StartupWarning(
|
|
277
|
+
category="config",
|
|
278
|
+
message=f"Response logging directory does not exist: {log_path}",
|
|
279
|
+
suggestion=f"Run: mkdir -p {log_path}",
|
|
280
|
+
severity="warning",
|
|
281
|
+
)
|
|
282
|
+
)
|
|
283
|
+
elif not log_path.is_dir():
|
|
284
|
+
warnings.append(
|
|
285
|
+
StartupWarning(
|
|
286
|
+
category="config",
|
|
287
|
+
message=f"Response logging path is not a directory: {log_path}",
|
|
288
|
+
severity="warning",
|
|
289
|
+
)
|
|
290
|
+
)
|
|
291
|
+
elif not os.access(log_path, os.W_OK):
|
|
292
|
+
warnings.append(
|
|
293
|
+
StartupWarning(
|
|
294
|
+
category="config",
|
|
295
|
+
message=f"Response logging directory is not writable: {log_path}",
|
|
296
|
+
suggestion=f"Run: chmod 755 {log_path}",
|
|
297
|
+
severity="warning",
|
|
298
|
+
)
|
|
299
|
+
)
|
|
300
|
+
|
|
301
|
+
return warnings
|
|
302
|
+
|
|
303
|
+
def _check_deprecated_keys(self) -> List[StartupWarning]:
|
|
304
|
+
"""Check for deprecated configuration keys."""
|
|
305
|
+
warnings = []
|
|
306
|
+
deprecated_keys = ["legacy_mode", "old_agent_format", "deprecated_logging"]
|
|
307
|
+
|
|
308
|
+
for key in deprecated_keys:
|
|
309
|
+
if self.config_service.get(key) is not None:
|
|
310
|
+
warnings.append(
|
|
311
|
+
StartupWarning(
|
|
312
|
+
category="config",
|
|
313
|
+
message=f"Deprecated configuration key found: {key}",
|
|
314
|
+
suggestion="Consider removing this key from your configuration",
|
|
315
|
+
severity="info",
|
|
316
|
+
)
|
|
317
|
+
)
|
|
318
|
+
|
|
319
|
+
return warnings
|
|
320
|
+
|
|
321
|
+
def _check_config_file_access(self) -> List[StartupWarning]:
|
|
322
|
+
"""Check configuration file accessibility."""
|
|
323
|
+
warnings = []
|
|
324
|
+
|
|
325
|
+
try:
|
|
326
|
+
# Try to get config file path from config service
|
|
327
|
+
config_file = getattr(self.config_service, "config_file", None)
|
|
328
|
+
if config_file and Path(config_file).exists():
|
|
329
|
+
if not os.access(config_file, os.R_OK):
|
|
330
|
+
warnings.append(
|
|
331
|
+
StartupWarning(
|
|
332
|
+
category="config",
|
|
333
|
+
message=f"Configuration file is not readable: {config_file}",
|
|
334
|
+
suggestion=f"Run: chmod 644 {config_file}",
|
|
335
|
+
severity="warning",
|
|
336
|
+
)
|
|
337
|
+
)
|
|
338
|
+
except Exception as e:
|
|
339
|
+
self.logger.debug(f"Config file access check failed: {e}")
|
|
340
|
+
|
|
341
|
+
return warnings
|
|
342
|
+
|
|
343
|
+
def _check_required_directories(self) -> List[StartupWarning]:
|
|
344
|
+
"""Check for required directories."""
|
|
345
|
+
warnings = []
|
|
346
|
+
|
|
347
|
+
# Check .claude directory
|
|
348
|
+
claude_dir = Path.cwd() / ".claude"
|
|
349
|
+
if not claude_dir.exists():
|
|
350
|
+
self.logger.debug(
|
|
351
|
+
".claude directory does not exist (will be created on first use)"
|
|
352
|
+
)
|
|
353
|
+
elif not claude_dir.is_dir():
|
|
354
|
+
warnings.append(
|
|
355
|
+
StartupWarning(
|
|
356
|
+
category="environment",
|
|
357
|
+
message=".claude path exists but is not a directory",
|
|
358
|
+
severity="warning",
|
|
359
|
+
)
|
|
360
|
+
)
|
|
361
|
+
|
|
362
|
+
return warnings
|
|
363
|
+
|
|
364
|
+
def _format_file_size(self, size_bytes: int) -> str:
|
|
365
|
+
"""Format file size in human readable format."""
|
|
366
|
+
if size_bytes < 1024:
|
|
367
|
+
return f"{size_bytes} B"
|
|
368
|
+
if size_bytes < 1024 * 1024:
|
|
369
|
+
return f"{size_bytes / 1024:.1f} KB"
|
|
370
|
+
return f"{size_bytes / (1024 * 1024):.1f} MB"
|
|
@@ -0,0 +1,311 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Cache Manager Service for Framework Loader
|
|
3
|
+
==========================================
|
|
4
|
+
|
|
5
|
+
WHY: This service extracts and centralizes cache management logic from the FrameworkLoader,
|
|
6
|
+
providing a clean interface for managing multiple cache types with different TTLs.
|
|
7
|
+
|
|
8
|
+
DESIGN DECISION: Built on top of the existing FileSystemCache infrastructure while
|
|
9
|
+
providing a specialized interface for framework-specific caching needs.
|
|
10
|
+
|
|
11
|
+
EXTRACTED FROM: core/framework_loader.py (lines 89-140)
|
|
12
|
+
- Reduces FrameworkLoader complexity by ~50 lines
|
|
13
|
+
- Provides thread-safe, type-specific cache management
|
|
14
|
+
|
|
15
|
+
NOTE: The ICacheManager interface has been moved to service_interfaces.py for
|
|
16
|
+
better organization and to support dependency injection.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
import threading
|
|
20
|
+
import time
|
|
21
|
+
from typing import Any, Dict, Optional, Set, Tuple
|
|
22
|
+
|
|
23
|
+
from claude_mpm.core.cache import FileSystemCache
|
|
24
|
+
from claude_mpm.core.logger import get_logger
|
|
25
|
+
|
|
26
|
+
# Import interface from consolidated location
|
|
27
|
+
from .service_interfaces import ICacheManager
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class CacheManager(ICacheManager):
|
|
31
|
+
"""
|
|
32
|
+
Centralized cache management service for framework loader.
|
|
33
|
+
|
|
34
|
+
This service manages multiple cache types with different TTLs:
|
|
35
|
+
- Agent capabilities (60s TTL)
|
|
36
|
+
- Deployed agents (30s TTL)
|
|
37
|
+
- Agent metadata (60s TTL)
|
|
38
|
+
- Memories (60s TTL)
|
|
39
|
+
|
|
40
|
+
Thread-safe implementation using locks for concurrent access.
|
|
41
|
+
"""
|
|
42
|
+
|
|
43
|
+
# Default TTL values (in seconds)
|
|
44
|
+
DEFAULT_CAPABILITIES_TTL = 60
|
|
45
|
+
DEFAULT_DEPLOYED_AGENTS_TTL = 30
|
|
46
|
+
DEFAULT_METADATA_TTL = 60
|
|
47
|
+
DEFAULT_MEMORIES_TTL = 60
|
|
48
|
+
|
|
49
|
+
def __init__(
|
|
50
|
+
self,
|
|
51
|
+
capabilities_ttl: float = DEFAULT_CAPABILITIES_TTL,
|
|
52
|
+
deployed_agents_ttl: float = DEFAULT_DEPLOYED_AGENTS_TTL,
|
|
53
|
+
metadata_ttl: float = DEFAULT_METADATA_TTL,
|
|
54
|
+
memories_ttl: float = DEFAULT_MEMORIES_TTL,
|
|
55
|
+
):
|
|
56
|
+
"""
|
|
57
|
+
Initialize cache manager with configurable TTLs.
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
capabilities_ttl: TTL for agent capabilities cache
|
|
61
|
+
deployed_agents_ttl: TTL for deployed agents cache
|
|
62
|
+
metadata_ttl: TTL for agent metadata cache
|
|
63
|
+
memories_ttl: TTL for memories cache
|
|
64
|
+
"""
|
|
65
|
+
self.logger = get_logger("cache_manager")
|
|
66
|
+
|
|
67
|
+
# TTL configuration
|
|
68
|
+
self.capabilities_ttl = capabilities_ttl
|
|
69
|
+
self.deployed_agents_ttl = deployed_agents_ttl
|
|
70
|
+
self.metadata_ttl = metadata_ttl
|
|
71
|
+
self.memories_ttl = memories_ttl
|
|
72
|
+
|
|
73
|
+
# Cache storage with timestamps
|
|
74
|
+
self._capabilities_cache: Optional[str] = None
|
|
75
|
+
self._capabilities_cache_time: float = 0
|
|
76
|
+
|
|
77
|
+
self._deployed_agents_cache: Optional[Set[str]] = None
|
|
78
|
+
self._deployed_agents_cache_time: float = 0
|
|
79
|
+
|
|
80
|
+
self._agent_metadata_cache: Dict[
|
|
81
|
+
str, Tuple[Optional[Dict[str, Any]], float]
|
|
82
|
+
] = {}
|
|
83
|
+
|
|
84
|
+
self._memories_cache: Optional[Dict[str, Any]] = None
|
|
85
|
+
self._memories_cache_time: float = 0
|
|
86
|
+
|
|
87
|
+
# Thread safety
|
|
88
|
+
self._lock = threading.RLock()
|
|
89
|
+
|
|
90
|
+
# Underlying file system cache (optional, for persistence)
|
|
91
|
+
self._fs_cache = FileSystemCache(
|
|
92
|
+
max_size_mb=50, # 50MB for framework caches
|
|
93
|
+
default_ttl=max(
|
|
94
|
+
capabilities_ttl, deployed_agents_ttl, metadata_ttl, memories_ttl
|
|
95
|
+
),
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
self.logger.debug(
|
|
99
|
+
f"Cache manager initialized with TTLs: "
|
|
100
|
+
f"capabilities={capabilities_ttl}s, "
|
|
101
|
+
f"deployed_agents={deployed_agents_ttl}s, "
|
|
102
|
+
f"metadata={metadata_ttl}s, "
|
|
103
|
+
f"memories={memories_ttl}s"
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
def is_cache_valid(self, cache_time: float, ttl: float) -> bool:
|
|
107
|
+
"""Check if a cache entry is still valid based on its timestamp and TTL."""
|
|
108
|
+
current_time = time.time()
|
|
109
|
+
age = current_time - cache_time
|
|
110
|
+
return age < ttl
|
|
111
|
+
|
|
112
|
+
# Agent capabilities cache
|
|
113
|
+
def get_capabilities(self) -> Optional[str]:
|
|
114
|
+
"""Get cached agent capabilities."""
|
|
115
|
+
with self._lock:
|
|
116
|
+
if self._capabilities_cache is not None and self.is_cache_valid(
|
|
117
|
+
self._capabilities_cache_time, self.capabilities_ttl
|
|
118
|
+
):
|
|
119
|
+
age = time.time() - self._capabilities_cache_time
|
|
120
|
+
self.logger.debug(f"Cache hit: agent capabilities (age: {age:.1f}s)")
|
|
121
|
+
return self._capabilities_cache
|
|
122
|
+
|
|
123
|
+
self.logger.debug("Cache miss: agent capabilities")
|
|
124
|
+
return None
|
|
125
|
+
|
|
126
|
+
def set_capabilities(self, value: str) -> None:
|
|
127
|
+
"""Set agent capabilities cache."""
|
|
128
|
+
with self._lock:
|
|
129
|
+
self._capabilities_cache = value
|
|
130
|
+
self._capabilities_cache_time = time.time()
|
|
131
|
+
self.logger.debug("Updated agent capabilities cache")
|
|
132
|
+
|
|
133
|
+
# Deployed agents cache
|
|
134
|
+
def get_deployed_agents(self) -> Optional[Set[str]]:
|
|
135
|
+
"""Get cached deployed agents set."""
|
|
136
|
+
with self._lock:
|
|
137
|
+
if self._deployed_agents_cache is not None and self.is_cache_valid(
|
|
138
|
+
self._deployed_agents_cache_time, self.deployed_agents_ttl
|
|
139
|
+
):
|
|
140
|
+
age = time.time() - self._deployed_agents_cache_time
|
|
141
|
+
self.logger.debug(f"Cache hit: deployed agents (age: {age:.1f}s)")
|
|
142
|
+
return (
|
|
143
|
+
self._deployed_agents_cache.copy()
|
|
144
|
+
) # Return a copy to prevent external modification
|
|
145
|
+
|
|
146
|
+
self.logger.debug("Cache miss: deployed agents")
|
|
147
|
+
return None
|
|
148
|
+
|
|
149
|
+
def set_deployed_agents(self, agents: Set[str]) -> None:
|
|
150
|
+
"""Set deployed agents cache."""
|
|
151
|
+
with self._lock:
|
|
152
|
+
self._deployed_agents_cache = agents.copy() # Store a copy
|
|
153
|
+
self._deployed_agents_cache_time = time.time()
|
|
154
|
+
self.logger.debug(
|
|
155
|
+
f"Updated deployed agents cache with {len(agents)} agents"
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
# Agent metadata cache
|
|
159
|
+
def get_agent_metadata(
|
|
160
|
+
self, agent_file: str
|
|
161
|
+
) -> Optional[Tuple[Optional[Dict[str, Any]], float]]:
|
|
162
|
+
"""Get cached agent metadata for a specific file."""
|
|
163
|
+
with self._lock:
|
|
164
|
+
if agent_file in self._agent_metadata_cache:
|
|
165
|
+
cached_data, cached_mtime = self._agent_metadata_cache[agent_file]
|
|
166
|
+
# Check if cache is still valid
|
|
167
|
+
if self.is_cache_valid(cached_mtime, self.metadata_ttl):
|
|
168
|
+
self.logger.debug(f"Cache hit: metadata for {agent_file}")
|
|
169
|
+
return cached_data, cached_mtime
|
|
170
|
+
|
|
171
|
+
self.logger.debug(f"Cache miss: metadata for {agent_file}")
|
|
172
|
+
return None
|
|
173
|
+
|
|
174
|
+
def set_agent_metadata(
|
|
175
|
+
self, agent_file: str, metadata: Optional[Dict[str, Any]], mtime: float
|
|
176
|
+
) -> None:
|
|
177
|
+
"""Set agent metadata cache for a specific file."""
|
|
178
|
+
with self._lock:
|
|
179
|
+
self._agent_metadata_cache[agent_file] = (metadata, mtime)
|
|
180
|
+
self.logger.debug(f"Updated metadata cache for {agent_file}")
|
|
181
|
+
|
|
182
|
+
# Memories cache
|
|
183
|
+
def get_memories(self) -> Optional[Dict[str, Any]]:
|
|
184
|
+
"""Get cached memories."""
|
|
185
|
+
with self._lock:
|
|
186
|
+
if self._memories_cache is not None and self.is_cache_valid(
|
|
187
|
+
self._memories_cache_time, self.memories_ttl
|
|
188
|
+
):
|
|
189
|
+
age = time.time() - self._memories_cache_time
|
|
190
|
+
self.logger.debug(f"Cache hit: memories (age: {age:.1f}s)")
|
|
191
|
+
return self._memories_cache.copy() # Return a copy
|
|
192
|
+
|
|
193
|
+
self.logger.debug("Cache miss: memories")
|
|
194
|
+
return None
|
|
195
|
+
|
|
196
|
+
def set_memories(self, memories: Dict[str, Any]) -> None:
|
|
197
|
+
"""Set memories cache."""
|
|
198
|
+
with self._lock:
|
|
199
|
+
self._memories_cache = memories.copy() # Store a copy
|
|
200
|
+
self._memories_cache_time = time.time()
|
|
201
|
+
self.logger.debug("Updated memories cache")
|
|
202
|
+
|
|
203
|
+
# Cache clearing methods
|
|
204
|
+
def clear_all(self) -> None:
|
|
205
|
+
"""Clear all caches."""
|
|
206
|
+
with self._lock:
|
|
207
|
+
self.logger.info("Clearing all caches")
|
|
208
|
+
|
|
209
|
+
# Clear capabilities
|
|
210
|
+
self._capabilities_cache = None
|
|
211
|
+
self._capabilities_cache_time = 0
|
|
212
|
+
|
|
213
|
+
# Clear deployed agents
|
|
214
|
+
self._deployed_agents_cache = None
|
|
215
|
+
self._deployed_agents_cache_time = 0
|
|
216
|
+
|
|
217
|
+
# Clear metadata
|
|
218
|
+
self._agent_metadata_cache.clear()
|
|
219
|
+
|
|
220
|
+
# Clear memories
|
|
221
|
+
self._memories_cache = None
|
|
222
|
+
self._memories_cache_time = 0
|
|
223
|
+
|
|
224
|
+
# Clear underlying file system cache
|
|
225
|
+
self._fs_cache.clear()
|
|
226
|
+
|
|
227
|
+
def clear_agent_caches(self) -> None:
|
|
228
|
+
"""Clear agent-related caches only."""
|
|
229
|
+
with self._lock:
|
|
230
|
+
self.logger.info("Clearing agent-related caches")
|
|
231
|
+
|
|
232
|
+
# Clear capabilities
|
|
233
|
+
self._capabilities_cache = None
|
|
234
|
+
self._capabilities_cache_time = 0
|
|
235
|
+
|
|
236
|
+
# Clear deployed agents
|
|
237
|
+
self._deployed_agents_cache = None
|
|
238
|
+
self._deployed_agents_cache_time = 0
|
|
239
|
+
|
|
240
|
+
# Clear metadata
|
|
241
|
+
self._agent_metadata_cache.clear()
|
|
242
|
+
|
|
243
|
+
def clear_memory_caches(self) -> None:
|
|
244
|
+
"""Clear memory-related caches only."""
|
|
245
|
+
with self._lock:
|
|
246
|
+
self.logger.info("Clearing memory caches")
|
|
247
|
+
|
|
248
|
+
self._memories_cache = None
|
|
249
|
+
self._memories_cache_time = 0
|
|
250
|
+
|
|
251
|
+
def get_stats(self) -> Dict[str, Any]:
|
|
252
|
+
"""Get cache statistics."""
|
|
253
|
+
with self._lock:
|
|
254
|
+
current_time = time.time()
|
|
255
|
+
|
|
256
|
+
stats = {
|
|
257
|
+
"capabilities": {
|
|
258
|
+
"cached": self._capabilities_cache is not None,
|
|
259
|
+
"age": (
|
|
260
|
+
current_time - self._capabilities_cache_time
|
|
261
|
+
if self._capabilities_cache
|
|
262
|
+
else None
|
|
263
|
+
),
|
|
264
|
+
"valid": (
|
|
265
|
+
self.is_cache_valid(
|
|
266
|
+
self._capabilities_cache_time, self.capabilities_ttl
|
|
267
|
+
)
|
|
268
|
+
if self._capabilities_cache
|
|
269
|
+
else False
|
|
270
|
+
),
|
|
271
|
+
},
|
|
272
|
+
"deployed_agents": {
|
|
273
|
+
"cached": self._deployed_agents_cache is not None,
|
|
274
|
+
"count": (
|
|
275
|
+
len(self._deployed_agents_cache)
|
|
276
|
+
if self._deployed_agents_cache
|
|
277
|
+
else 0
|
|
278
|
+
),
|
|
279
|
+
"age": (
|
|
280
|
+
current_time - self._deployed_agents_cache_time
|
|
281
|
+
if self._deployed_agents_cache
|
|
282
|
+
else None
|
|
283
|
+
),
|
|
284
|
+
"valid": (
|
|
285
|
+
self.is_cache_valid(
|
|
286
|
+
self._deployed_agents_cache_time, self.deployed_agents_ttl
|
|
287
|
+
)
|
|
288
|
+
if self._deployed_agents_cache
|
|
289
|
+
else False
|
|
290
|
+
),
|
|
291
|
+
},
|
|
292
|
+
"metadata": {"entries": len(self._agent_metadata_cache)},
|
|
293
|
+
"memories": {
|
|
294
|
+
"cached": self._memories_cache is not None,
|
|
295
|
+
"age": (
|
|
296
|
+
current_time - self._memories_cache_time
|
|
297
|
+
if self._memories_cache
|
|
298
|
+
else None
|
|
299
|
+
),
|
|
300
|
+
"valid": (
|
|
301
|
+
self.is_cache_valid(
|
|
302
|
+
self._memories_cache_time, self.memories_ttl
|
|
303
|
+
)
|
|
304
|
+
if self._memories_cache
|
|
305
|
+
else False
|
|
306
|
+
),
|
|
307
|
+
},
|
|
308
|
+
"fs_cache": self._fs_cache.get_stats() if self._fs_cache else {},
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
return stats
|