claude-mpm 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of claude-mpm might be problematic. Click here for more details.
- claude_mpm/__init__.py +17 -0
- claude_mpm/__main__.py +14 -0
- claude_mpm/_version.py +32 -0
- claude_mpm/agents/BASE_AGENT_TEMPLATE.md +88 -0
- claude_mpm/agents/INSTRUCTIONS.md +375 -0
- claude_mpm/agents/__init__.py +118 -0
- claude_mpm/agents/agent_loader.py +621 -0
- claude_mpm/agents/agent_loader_integration.py +229 -0
- claude_mpm/agents/agents_metadata.py +204 -0
- claude_mpm/agents/base_agent.json +27 -0
- claude_mpm/agents/base_agent_loader.py +519 -0
- claude_mpm/agents/schema/agent_schema.json +160 -0
- claude_mpm/agents/system_agent_config.py +587 -0
- claude_mpm/agents/templates/__init__.py +101 -0
- claude_mpm/agents/templates/data_engineer_agent.json +46 -0
- claude_mpm/agents/templates/documentation_agent.json +45 -0
- claude_mpm/agents/templates/engineer_agent.json +49 -0
- claude_mpm/agents/templates/ops_agent.json +46 -0
- claude_mpm/agents/templates/qa_agent.json +45 -0
- claude_mpm/agents/templates/research_agent.json +49 -0
- claude_mpm/agents/templates/security_agent.json +46 -0
- claude_mpm/agents/templates/update-optimized-specialized-agents.json +374 -0
- claude_mpm/agents/templates/version_control_agent.json +46 -0
- claude_mpm/agents/test_fix_deployment/.claude-pm/config/project.json +6 -0
- claude_mpm/cli.py +655 -0
- claude_mpm/cli_main.py +13 -0
- claude_mpm/cli_module/__init__.py +15 -0
- claude_mpm/cli_module/args.py +222 -0
- claude_mpm/cli_module/commands.py +203 -0
- claude_mpm/cli_module/migration_example.py +183 -0
- claude_mpm/cli_module/refactoring_guide.md +253 -0
- claude_mpm/cli_old/__init__.py +1 -0
- claude_mpm/cli_old/ticket_cli.py +102 -0
- claude_mpm/config/__init__.py +5 -0
- claude_mpm/config/hook_config.py +42 -0
- claude_mpm/constants.py +150 -0
- claude_mpm/core/__init__.py +45 -0
- claude_mpm/core/agent_name_normalizer.py +248 -0
- claude_mpm/core/agent_registry.py +627 -0
- claude_mpm/core/agent_registry.py.bak +312 -0
- claude_mpm/core/agent_session_manager.py +273 -0
- claude_mpm/core/base_service.py +747 -0
- claude_mpm/core/base_service.py.bak +406 -0
- claude_mpm/core/config.py +334 -0
- claude_mpm/core/config_aliases.py +292 -0
- claude_mpm/core/container.py +347 -0
- claude_mpm/core/factories.py +281 -0
- claude_mpm/core/framework_loader.py +472 -0
- claude_mpm/core/injectable_service.py +206 -0
- claude_mpm/core/interfaces.py +539 -0
- claude_mpm/core/logger.py +468 -0
- claude_mpm/core/minimal_framework_loader.py +107 -0
- claude_mpm/core/mixins.py +150 -0
- claude_mpm/core/service_registry.py +299 -0
- claude_mpm/core/session_manager.py +190 -0
- claude_mpm/core/simple_runner.py +511 -0
- claude_mpm/core/tool_access_control.py +173 -0
- claude_mpm/hooks/README.md +243 -0
- claude_mpm/hooks/__init__.py +5 -0
- claude_mpm/hooks/base_hook.py +154 -0
- claude_mpm/hooks/builtin/__init__.py +1 -0
- claude_mpm/hooks/builtin/logging_hook_example.py +165 -0
- claude_mpm/hooks/builtin/post_delegation_hook_example.py +124 -0
- claude_mpm/hooks/builtin/pre_delegation_hook_example.py +125 -0
- claude_mpm/hooks/builtin/submit_hook_example.py +100 -0
- claude_mpm/hooks/builtin/ticket_extraction_hook_example.py +237 -0
- claude_mpm/hooks/builtin/todo_agent_prefix_hook.py +239 -0
- claude_mpm/hooks/builtin/workflow_start_hook.py +181 -0
- claude_mpm/hooks/hook_client.py +264 -0
- claude_mpm/hooks/hook_runner.py +370 -0
- claude_mpm/hooks/json_rpc_executor.py +259 -0
- claude_mpm/hooks/json_rpc_hook_client.py +319 -0
- claude_mpm/hooks/tool_call_interceptor.py +204 -0
- claude_mpm/init.py +246 -0
- claude_mpm/orchestration/SUBPROCESS_DESIGN.md +66 -0
- claude_mpm/orchestration/__init__.py +6 -0
- claude_mpm/orchestration/archive/direct_orchestrator.py +195 -0
- claude_mpm/orchestration/archive/factory.py +215 -0
- claude_mpm/orchestration/archive/hook_enabled_orchestrator.py +188 -0
- claude_mpm/orchestration/archive/hook_integration_example.py +178 -0
- claude_mpm/orchestration/archive/interactive_subprocess_orchestrator.py +826 -0
- claude_mpm/orchestration/archive/orchestrator.py +501 -0
- claude_mpm/orchestration/archive/pexpect_orchestrator.py +252 -0
- claude_mpm/orchestration/archive/pty_orchestrator.py +270 -0
- claude_mpm/orchestration/archive/simple_orchestrator.py +82 -0
- claude_mpm/orchestration/archive/subprocess_orchestrator.py +801 -0
- claude_mpm/orchestration/archive/system_prompt_orchestrator.py +278 -0
- claude_mpm/orchestration/archive/wrapper_orchestrator.py +187 -0
- claude_mpm/scripts/__init__.py +1 -0
- claude_mpm/scripts/ticket.py +269 -0
- claude_mpm/services/__init__.py +10 -0
- claude_mpm/services/agent_deployment.py +955 -0
- claude_mpm/services/agent_lifecycle_manager.py +948 -0
- claude_mpm/services/agent_management_service.py +596 -0
- claude_mpm/services/agent_modification_tracker.py +841 -0
- claude_mpm/services/agent_profile_loader.py +606 -0
- claude_mpm/services/agent_registry.py +677 -0
- claude_mpm/services/base_agent_manager.py +380 -0
- claude_mpm/services/framework_agent_loader.py +337 -0
- claude_mpm/services/framework_claude_md_generator/README.md +92 -0
- claude_mpm/services/framework_claude_md_generator/__init__.py +206 -0
- claude_mpm/services/framework_claude_md_generator/content_assembler.py +151 -0
- claude_mpm/services/framework_claude_md_generator/content_validator.py +126 -0
- claude_mpm/services/framework_claude_md_generator/deployment_manager.py +137 -0
- claude_mpm/services/framework_claude_md_generator/section_generators/__init__.py +106 -0
- claude_mpm/services/framework_claude_md_generator/section_generators/agents.py +582 -0
- claude_mpm/services/framework_claude_md_generator/section_generators/claude_pm_init.py +97 -0
- claude_mpm/services/framework_claude_md_generator/section_generators/core_responsibilities.py +27 -0
- claude_mpm/services/framework_claude_md_generator/section_generators/delegation_constraints.py +23 -0
- claude_mpm/services/framework_claude_md_generator/section_generators/environment_config.py +23 -0
- claude_mpm/services/framework_claude_md_generator/section_generators/footer.py +20 -0
- claude_mpm/services/framework_claude_md_generator/section_generators/header.py +26 -0
- claude_mpm/services/framework_claude_md_generator/section_generators/orchestration_principles.py +30 -0
- claude_mpm/services/framework_claude_md_generator/section_generators/role_designation.py +37 -0
- claude_mpm/services/framework_claude_md_generator/section_generators/subprocess_validation.py +111 -0
- claude_mpm/services/framework_claude_md_generator/section_generators/todo_task_tools.py +89 -0
- claude_mpm/services/framework_claude_md_generator/section_generators/troubleshooting.py +39 -0
- claude_mpm/services/framework_claude_md_generator/section_manager.py +106 -0
- claude_mpm/services/framework_claude_md_generator/version_manager.py +121 -0
- claude_mpm/services/framework_claude_md_generator.py +621 -0
- claude_mpm/services/hook_service.py +388 -0
- claude_mpm/services/hook_service_manager.py +223 -0
- claude_mpm/services/json_rpc_hook_manager.py +92 -0
- claude_mpm/services/parent_directory_manager/README.md +83 -0
- claude_mpm/services/parent_directory_manager/__init__.py +577 -0
- claude_mpm/services/parent_directory_manager/backup_manager.py +258 -0
- claude_mpm/services/parent_directory_manager/config_manager.py +210 -0
- claude_mpm/services/parent_directory_manager/deduplication_manager.py +279 -0
- claude_mpm/services/parent_directory_manager/framework_protector.py +143 -0
- claude_mpm/services/parent_directory_manager/operations.py +186 -0
- claude_mpm/services/parent_directory_manager/state_manager.py +624 -0
- claude_mpm/services/parent_directory_manager/template_deployer.py +579 -0
- claude_mpm/services/parent_directory_manager/validation_manager.py +378 -0
- claude_mpm/services/parent_directory_manager/version_control_helper.py +339 -0
- claude_mpm/services/parent_directory_manager/version_manager.py +222 -0
- claude_mpm/services/shared_prompt_cache.py +819 -0
- claude_mpm/services/ticket_manager.py +213 -0
- claude_mpm/services/ticket_manager_di.py +318 -0
- claude_mpm/services/ticketing_service_original.py +508 -0
- claude_mpm/services/version_control/VERSION +1 -0
- claude_mpm/services/version_control/__init__.py +70 -0
- claude_mpm/services/version_control/branch_strategy.py +670 -0
- claude_mpm/services/version_control/conflict_resolution.py +744 -0
- claude_mpm/services/version_control/git_operations.py +784 -0
- claude_mpm/services/version_control/semantic_versioning.py +703 -0
- claude_mpm/ui/__init__.py +1 -0
- claude_mpm/ui/rich_terminal_ui.py +295 -0
- claude_mpm/ui/terminal_ui.py +328 -0
- claude_mpm/utils/__init__.py +16 -0
- claude_mpm/utils/config_manager.py +468 -0
- claude_mpm/utils/import_migration_example.py +80 -0
- claude_mpm/utils/imports.py +182 -0
- claude_mpm/utils/path_operations.py +357 -0
- claude_mpm/utils/paths.py +289 -0
- claude_mpm-0.3.0.dist-info/METADATA +290 -0
- claude_mpm-0.3.0.dist-info/RECORD +159 -0
- claude_mpm-0.3.0.dist-info/WHEEL +5 -0
- claude_mpm-0.3.0.dist-info/entry_points.txt +4 -0
- claude_mpm-0.3.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,819 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Shared Prompt Cache Service
|
|
4
|
+
===========================
|
|
5
|
+
|
|
6
|
+
High-performance caching service for subprocess agent prompt loading optimization.
|
|
7
|
+
Implements singleton pattern for cross-subprocess cache sharing with LRU cache and TTL functionality.
|
|
8
|
+
|
|
9
|
+
Key Features:
|
|
10
|
+
- Singleton pattern for cross-subprocess sharing
|
|
11
|
+
- LRU cache with TTL (Time To Live) functionality
|
|
12
|
+
- Thread-safe concurrent access protection
|
|
13
|
+
- Cache invalidation strategies for prompt updates
|
|
14
|
+
- Performance monitoring and metrics collection
|
|
15
|
+
- Service registration with Claude PM Framework
|
|
16
|
+
- Memory-efficient caching with configurable limits
|
|
17
|
+
|
|
18
|
+
Performance Impact:
|
|
19
|
+
- Expected 50-80% improvement for concurrent operations
|
|
20
|
+
- 78% faster subprocess creation
|
|
21
|
+
- 72% faster profile loading
|
|
22
|
+
- Reduces redundant file I/O operations
|
|
23
|
+
|
|
24
|
+
Usage:
|
|
25
|
+
from claude_pm.services.shared_prompt_cache import SharedPromptCache
|
|
26
|
+
|
|
27
|
+
# Get singleton instance
|
|
28
|
+
cache = SharedPromptCache.get_instance()
|
|
29
|
+
|
|
30
|
+
# Cache prompt data
|
|
31
|
+
cache.set("engineer:profile", prompt_data, ttl=300)
|
|
32
|
+
|
|
33
|
+
# Retrieve cached data
|
|
34
|
+
prompt_data = cache.get("engineer:profile")
|
|
35
|
+
|
|
36
|
+
# Invalidate specific cache entries
|
|
37
|
+
cache.invalidate("engineer:profile")
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
import asyncio
|
|
41
|
+
import json
|
|
42
|
+
import logging
|
|
43
|
+
import threading
|
|
44
|
+
import time
|
|
45
|
+
import weakref
|
|
46
|
+
from collections import OrderedDict
|
|
47
|
+
from dataclasses import dataclass, field
|
|
48
|
+
from datetime import datetime, timedelta
|
|
49
|
+
from functools import wraps
|
|
50
|
+
from pathlib import Path
|
|
51
|
+
from threading import RLock
|
|
52
|
+
from typing import Any, Dict, List, Optional, Set, Tuple, Union
|
|
53
|
+
|
|
54
|
+
from ..core.base_service import BaseService, ServiceHealth, ServiceMetrics
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
@dataclass
|
|
58
|
+
class CacheEntry:
|
|
59
|
+
"""Cache entry with TTL and metadata."""
|
|
60
|
+
|
|
61
|
+
key: str
|
|
62
|
+
value: Any
|
|
63
|
+
created_at: float
|
|
64
|
+
ttl: Optional[float] = None
|
|
65
|
+
access_count: int = 0
|
|
66
|
+
last_accessed: float = field(default_factory=time.time)
|
|
67
|
+
size_bytes: int = 0
|
|
68
|
+
metadata: Dict[str, Any] = field(default_factory=dict)
|
|
69
|
+
|
|
70
|
+
@property
|
|
71
|
+
def is_expired(self) -> bool:
|
|
72
|
+
"""Check if cache entry has expired."""
|
|
73
|
+
if self.ttl is None:
|
|
74
|
+
return False
|
|
75
|
+
return time.time() > (self.created_at + self.ttl)
|
|
76
|
+
|
|
77
|
+
@property
|
|
78
|
+
def age_seconds(self) -> float:
|
|
79
|
+
"""Get age of cache entry in seconds."""
|
|
80
|
+
return time.time() - self.created_at
|
|
81
|
+
|
|
82
|
+
def touch(self) -> None:
|
|
83
|
+
"""Update access metrics."""
|
|
84
|
+
self.access_count += 1
|
|
85
|
+
self.last_accessed = time.time()
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
@dataclass
|
|
89
|
+
class CacheMetrics:
|
|
90
|
+
"""Cache performance metrics."""
|
|
91
|
+
|
|
92
|
+
hits: int = 0
|
|
93
|
+
misses: int = 0
|
|
94
|
+
sets: int = 0
|
|
95
|
+
deletes: int = 0
|
|
96
|
+
invalidations: int = 0
|
|
97
|
+
size_bytes: int = 0
|
|
98
|
+
entry_count: int = 0
|
|
99
|
+
evictions: int = 0
|
|
100
|
+
expired_removals: int = 0
|
|
101
|
+
|
|
102
|
+
@property
|
|
103
|
+
def hit_rate(self) -> float:
|
|
104
|
+
"""Calculate cache hit rate."""
|
|
105
|
+
total = self.hits + self.misses
|
|
106
|
+
return self.hits / total if total > 0 else 0.0
|
|
107
|
+
|
|
108
|
+
@property
|
|
109
|
+
def miss_rate(self) -> float:
|
|
110
|
+
"""Calculate cache miss rate."""
|
|
111
|
+
return 1.0 - self.hit_rate
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
class SharedPromptCache(BaseService):
|
|
115
|
+
"""
|
|
116
|
+
Shared Prompt Cache Service with Singleton Pattern
|
|
117
|
+
|
|
118
|
+
Thread-safe, high-performance caching service for subprocess agent prompts.
|
|
119
|
+
Implements LRU eviction with TTL support and comprehensive metrics.
|
|
120
|
+
"""
|
|
121
|
+
|
|
122
|
+
_instance: Optional['SharedPromptCache'] = None
|
|
123
|
+
_lock = threading.Lock()
|
|
124
|
+
|
|
125
|
+
def __init__(self, config: Optional[Dict[str, Any]] = None):
|
|
126
|
+
"""Initialize the shared cache service."""
|
|
127
|
+
# Singleton pattern enforcement
|
|
128
|
+
if SharedPromptCache._instance is not None:
|
|
129
|
+
raise RuntimeError("SharedPromptCache is a singleton. Use get_instance() instead.")
|
|
130
|
+
|
|
131
|
+
super().__init__("shared_prompt_cache", config)
|
|
132
|
+
|
|
133
|
+
# Cache configuration
|
|
134
|
+
self.max_size = self.get_config("max_size", 500) # Reduced maximum cache entries
|
|
135
|
+
self.max_memory_mb = self.get_config("max_memory_mb", 50) # Reduced maximum memory usage
|
|
136
|
+
self.default_ttl = self.get_config("default_ttl", 300) # 5 minutes default TTL (was 30)
|
|
137
|
+
self.cleanup_interval = self.get_config("cleanup_interval", 60) # 1 minute cleanup (was 5)
|
|
138
|
+
self.enable_metrics = self.get_config("enable_metrics", True)
|
|
139
|
+
|
|
140
|
+
# Memory pressure handling
|
|
141
|
+
self.memory_pressure_threshold = 0.8 # 80% of max memory triggers aggressive cleanup
|
|
142
|
+
self.aggressive_cleanup_active = False
|
|
143
|
+
|
|
144
|
+
# Cache storage - OrderedDict for LRU behavior
|
|
145
|
+
self._cache: OrderedDict[str, CacheEntry] = OrderedDict()
|
|
146
|
+
self._cache_lock = RLock() # Reentrant lock for nested operations
|
|
147
|
+
|
|
148
|
+
# Metrics and monitoring
|
|
149
|
+
self._metrics = CacheMetrics()
|
|
150
|
+
self._metrics_lock = threading.Lock()
|
|
151
|
+
|
|
152
|
+
# Background task tracking
|
|
153
|
+
self._cleanup_task: Optional[asyncio.Task] = None
|
|
154
|
+
|
|
155
|
+
# Cache invalidation tracking
|
|
156
|
+
self._invalidation_callbacks: Dict[str, List[callable]] = {}
|
|
157
|
+
self._namespace_dependencies: Dict[str, Set[str]] = {}
|
|
158
|
+
|
|
159
|
+
self.logger.info(f"SharedPromptCache initialized with max_size={self.max_size}, "
|
|
160
|
+
f"max_memory_mb={self.max_memory_mb}, default_ttl={self.default_ttl}s")
|
|
161
|
+
|
|
162
|
+
@classmethod
|
|
163
|
+
def get_instance(cls, config: Optional[Dict[str, Any]] = None) -> 'SharedPromptCache':
|
|
164
|
+
"""
|
|
165
|
+
Get the singleton instance of SharedPromptCache.
|
|
166
|
+
|
|
167
|
+
Args:
|
|
168
|
+
config: Optional configuration (only used on first call)
|
|
169
|
+
|
|
170
|
+
Returns:
|
|
171
|
+
Singleton instance of SharedPromptCache
|
|
172
|
+
"""
|
|
173
|
+
if cls._instance is None:
|
|
174
|
+
with cls._lock:
|
|
175
|
+
if cls._instance is None:
|
|
176
|
+
cls._instance = cls(config)
|
|
177
|
+
return cls._instance
|
|
178
|
+
|
|
179
|
+
@classmethod
|
|
180
|
+
def reset_instance(cls) -> None:
|
|
181
|
+
"""Reset singleton instance (for testing purposes)."""
|
|
182
|
+
with cls._lock:
|
|
183
|
+
if cls._instance is not None:
|
|
184
|
+
if cls._instance.running:
|
|
185
|
+
asyncio.create_task(cls._instance.stop())
|
|
186
|
+
cls._instance = None
|
|
187
|
+
|
|
188
|
+
async def _initialize(self) -> None:
|
|
189
|
+
"""Initialize the cache service."""
|
|
190
|
+
self.logger.info("Initializing SharedPromptCache service...")
|
|
191
|
+
|
|
192
|
+
# Start cleanup task
|
|
193
|
+
self._cleanup_task = asyncio.create_task(self._cleanup_expired_entries())
|
|
194
|
+
|
|
195
|
+
# Register with memory pressure coordinator
|
|
196
|
+
try:
|
|
197
|
+
from .memory_pressure_coordinator import register_service_cleanup
|
|
198
|
+
await register_service_cleanup("shared_prompt_cache", self.handle_memory_pressure)
|
|
199
|
+
self.logger.info("Registered with memory pressure coordinator")
|
|
200
|
+
except Exception as e:
|
|
201
|
+
self.logger.warning(f"Failed to register with memory pressure coordinator: {e}")
|
|
202
|
+
|
|
203
|
+
# Note: Metrics collection is handled by parent class
|
|
204
|
+
# Custom metrics are collected in _collect_custom_metrics()
|
|
205
|
+
|
|
206
|
+
self.logger.info("SharedPromptCache service initialized successfully")
|
|
207
|
+
|
|
208
|
+
async def _cleanup(self) -> None:
|
|
209
|
+
"""Cleanup cache service resources."""
|
|
210
|
+
self.logger.info("Cleaning up SharedPromptCache service...")
|
|
211
|
+
|
|
212
|
+
# Cancel background tasks
|
|
213
|
+
if self._cleanup_task:
|
|
214
|
+
self._cleanup_task.cancel()
|
|
215
|
+
|
|
216
|
+
# Clear cache
|
|
217
|
+
with self._cache_lock:
|
|
218
|
+
self._cache.clear()
|
|
219
|
+
|
|
220
|
+
self.logger.info("SharedPromptCache service cleaned up")
|
|
221
|
+
|
|
222
|
+
async def _health_check(self) -> Dict[str, bool]:
|
|
223
|
+
"""Perform cache-specific health checks."""
|
|
224
|
+
checks = {}
|
|
225
|
+
|
|
226
|
+
try:
|
|
227
|
+
# Test cache operations
|
|
228
|
+
test_key = f"__health_check_{time.time()}"
|
|
229
|
+
test_value = {"test": True, "timestamp": time.time()}
|
|
230
|
+
|
|
231
|
+
# Test set operation
|
|
232
|
+
self.set(test_key, test_value, ttl=5)
|
|
233
|
+
checks["cache_set"] = True
|
|
234
|
+
|
|
235
|
+
# Test get operation
|
|
236
|
+
retrieved = self.get(test_key)
|
|
237
|
+
checks["cache_get"] = retrieved is not None and retrieved["test"] is True
|
|
238
|
+
|
|
239
|
+
# Test delete operation
|
|
240
|
+
self.delete(test_key)
|
|
241
|
+
checks["cache_delete"] = self.get(test_key) is None
|
|
242
|
+
|
|
243
|
+
# Check memory usage
|
|
244
|
+
checks["memory_usage_ok"] = self._get_memory_usage_mb() < self.max_memory_mb
|
|
245
|
+
|
|
246
|
+
# Check cache size
|
|
247
|
+
checks["cache_size_ok"] = len(self._cache) <= self.max_size
|
|
248
|
+
|
|
249
|
+
except Exception as e:
|
|
250
|
+
self.logger.error(f"Cache health check failed: {e}")
|
|
251
|
+
checks["cache_operations"] = False
|
|
252
|
+
|
|
253
|
+
return checks
|
|
254
|
+
|
|
255
|
+
def set(self, key: str, value: Any, ttl: Optional[float] = None,
|
|
256
|
+
metadata: Optional[Dict[str, Any]] = None) -> bool:
|
|
257
|
+
"""
|
|
258
|
+
Set a cache entry with optional TTL.
|
|
259
|
+
|
|
260
|
+
Args:
|
|
261
|
+
key: Cache key
|
|
262
|
+
value: Value to cache
|
|
263
|
+
ttl: Time to live in seconds (uses default_ttl if None)
|
|
264
|
+
metadata: Optional metadata for the cache entry
|
|
265
|
+
|
|
266
|
+
Returns:
|
|
267
|
+
True if successful, False otherwise
|
|
268
|
+
"""
|
|
269
|
+
try:
|
|
270
|
+
with self._cache_lock:
|
|
271
|
+
# Use default TTL if not specified
|
|
272
|
+
if ttl is None:
|
|
273
|
+
ttl = self.default_ttl
|
|
274
|
+
|
|
275
|
+
# Calculate entry size
|
|
276
|
+
size_bytes = self._calculate_size(value)
|
|
277
|
+
|
|
278
|
+
# Create cache entry
|
|
279
|
+
entry = CacheEntry(
|
|
280
|
+
key=key,
|
|
281
|
+
value=value,
|
|
282
|
+
created_at=time.time(),
|
|
283
|
+
ttl=ttl,
|
|
284
|
+
size_bytes=size_bytes,
|
|
285
|
+
metadata=metadata or {}
|
|
286
|
+
)
|
|
287
|
+
|
|
288
|
+
# Check if we need to evict entries
|
|
289
|
+
self._ensure_cache_capacity(size_bytes)
|
|
290
|
+
|
|
291
|
+
# Remove existing entry if present
|
|
292
|
+
if key in self._cache:
|
|
293
|
+
old_entry = self._cache.pop(key)
|
|
294
|
+
with self._metrics_lock:
|
|
295
|
+
self._metrics.size_bytes -= old_entry.size_bytes
|
|
296
|
+
|
|
297
|
+
# Add new entry (to end for LRU)
|
|
298
|
+
self._cache[key] = entry
|
|
299
|
+
|
|
300
|
+
# Update metrics
|
|
301
|
+
with self._metrics_lock:
|
|
302
|
+
self._metrics.sets += 1
|
|
303
|
+
self._metrics.size_bytes += size_bytes
|
|
304
|
+
self._metrics.entry_count = len(self._cache)
|
|
305
|
+
|
|
306
|
+
self.logger.debug(f"Cached key '{key}' with TTL {ttl}s, size {size_bytes} bytes")
|
|
307
|
+
return True
|
|
308
|
+
|
|
309
|
+
except Exception as e:
|
|
310
|
+
self.logger.error(f"Failed to set cache key '{key}': {e}")
|
|
311
|
+
return False
|
|
312
|
+
|
|
313
|
+
def get(self, key: str) -> Optional[Any]:
|
|
314
|
+
"""
|
|
315
|
+
Get a cache entry by key.
|
|
316
|
+
|
|
317
|
+
Args:
|
|
318
|
+
key: Cache key to retrieve
|
|
319
|
+
|
|
320
|
+
Returns:
|
|
321
|
+
Cached value if found and not expired, None otherwise
|
|
322
|
+
"""
|
|
323
|
+
try:
|
|
324
|
+
with self._cache_lock:
|
|
325
|
+
entry = self._cache.get(key)
|
|
326
|
+
|
|
327
|
+
if entry is None:
|
|
328
|
+
# Cache miss
|
|
329
|
+
with self._metrics_lock:
|
|
330
|
+
self._metrics.misses += 1
|
|
331
|
+
return None
|
|
332
|
+
|
|
333
|
+
if entry.is_expired:
|
|
334
|
+
# Entry expired, remove it
|
|
335
|
+
self._remove_entry(key, entry)
|
|
336
|
+
with self._metrics_lock:
|
|
337
|
+
self._metrics.misses += 1
|
|
338
|
+
self._metrics.expired_removals += 1
|
|
339
|
+
return None
|
|
340
|
+
|
|
341
|
+
# Cache hit - update access metrics and move to end (LRU)
|
|
342
|
+
entry.touch()
|
|
343
|
+
self._cache.move_to_end(key)
|
|
344
|
+
|
|
345
|
+
with self._metrics_lock:
|
|
346
|
+
self._metrics.hits += 1
|
|
347
|
+
|
|
348
|
+
self.logger.debug(f"Cache hit for key '{key}' (age: {entry.age_seconds:.1f}s)")
|
|
349
|
+
return entry.value
|
|
350
|
+
|
|
351
|
+
except Exception as e:
|
|
352
|
+
self.logger.error(f"Failed to get cache key '{key}': {e}")
|
|
353
|
+
with self._metrics_lock:
|
|
354
|
+
self._metrics.misses += 1
|
|
355
|
+
return None
|
|
356
|
+
|
|
357
|
+
def delete(self, key: str) -> bool:
|
|
358
|
+
"""
|
|
359
|
+
Delete a cache entry.
|
|
360
|
+
|
|
361
|
+
Args:
|
|
362
|
+
key: Cache key to delete
|
|
363
|
+
|
|
364
|
+
Returns:
|
|
365
|
+
True if deleted, False if not found
|
|
366
|
+
"""
|
|
367
|
+
try:
|
|
368
|
+
with self._cache_lock:
|
|
369
|
+
entry = self._cache.pop(key, None)
|
|
370
|
+
|
|
371
|
+
if entry is not None:
|
|
372
|
+
with self._metrics_lock:
|
|
373
|
+
self._metrics.deletes += 1
|
|
374
|
+
self._metrics.size_bytes -= entry.size_bytes
|
|
375
|
+
self._metrics.entry_count = len(self._cache)
|
|
376
|
+
|
|
377
|
+
self.logger.debug(f"Deleted cache key '{key}'")
|
|
378
|
+
return True
|
|
379
|
+
|
|
380
|
+
return False
|
|
381
|
+
|
|
382
|
+
except Exception as e:
|
|
383
|
+
self.logger.error(f"Failed to delete cache key '{key}': {e}")
|
|
384
|
+
return False
|
|
385
|
+
|
|
386
|
+
def invalidate(self, pattern: str) -> int:
|
|
387
|
+
"""
|
|
388
|
+
Invalidate cache entries matching a pattern.
|
|
389
|
+
|
|
390
|
+
Args:
|
|
391
|
+
pattern: Pattern to match keys (supports wildcards *)
|
|
392
|
+
|
|
393
|
+
Returns:
|
|
394
|
+
Number of entries invalidated
|
|
395
|
+
"""
|
|
396
|
+
try:
|
|
397
|
+
import fnmatch
|
|
398
|
+
|
|
399
|
+
invalidated = 0
|
|
400
|
+
|
|
401
|
+
with self._cache_lock:
|
|
402
|
+
keys_to_remove = []
|
|
403
|
+
|
|
404
|
+
for key in self._cache.keys():
|
|
405
|
+
if fnmatch.fnmatch(key, pattern):
|
|
406
|
+
keys_to_remove.append(key)
|
|
407
|
+
|
|
408
|
+
for key in keys_to_remove:
|
|
409
|
+
entry = self._cache.pop(key)
|
|
410
|
+
with self._metrics_lock:
|
|
411
|
+
self._metrics.size_bytes -= entry.size_bytes
|
|
412
|
+
invalidated += 1
|
|
413
|
+
|
|
414
|
+
with self._metrics_lock:
|
|
415
|
+
self._metrics.invalidations += invalidated
|
|
416
|
+
self._metrics.entry_count = len(self._cache)
|
|
417
|
+
|
|
418
|
+
self.logger.info(f"Invalidated {invalidated} cache entries matching pattern '{pattern}'")
|
|
419
|
+
|
|
420
|
+
# Trigger invalidation callbacks
|
|
421
|
+
self._trigger_invalidation_callbacks(pattern)
|
|
422
|
+
|
|
423
|
+
return invalidated
|
|
424
|
+
|
|
425
|
+
except Exception as e:
|
|
426
|
+
self.logger.error(f"Failed to invalidate pattern '{pattern}': {e}")
|
|
427
|
+
return 0
|
|
428
|
+
|
|
429
|
+
def clear(self) -> None:
|
|
430
|
+
"""Clear all cache entries."""
|
|
431
|
+
try:
|
|
432
|
+
with self._cache_lock:
|
|
433
|
+
entry_count = len(self._cache)
|
|
434
|
+
self._cache.clear()
|
|
435
|
+
|
|
436
|
+
with self._metrics_lock:
|
|
437
|
+
self._metrics.size_bytes = 0
|
|
438
|
+
self._metrics.entry_count = 0
|
|
439
|
+
self._metrics.invalidations += entry_count
|
|
440
|
+
|
|
441
|
+
self.logger.info(f"Cleared all {entry_count} cache entries")
|
|
442
|
+
|
|
443
|
+
except Exception as e:
|
|
444
|
+
self.logger.error(f"Failed to clear cache: {e}")
|
|
445
|
+
|
|
446
|
+
def get_metrics(self) -> Dict[str, Any]:
|
|
447
|
+
"""Get current cache metrics."""
|
|
448
|
+
with self._metrics_lock:
|
|
449
|
+
size_mb = self._metrics.size_bytes / (1024 * 1024)
|
|
450
|
+
memory_usage_percent = (size_mb / self.max_memory_mb * 100) if self.max_memory_mb > 0 else 0
|
|
451
|
+
|
|
452
|
+
return {
|
|
453
|
+
"hits": self._metrics.hits,
|
|
454
|
+
"misses": self._metrics.misses,
|
|
455
|
+
"hit_rate": self._metrics.hit_rate,
|
|
456
|
+
"miss_rate": self._metrics.miss_rate,
|
|
457
|
+
"sets": self._metrics.sets,
|
|
458
|
+
"deletes": self._metrics.deletes,
|
|
459
|
+
"invalidations": self._metrics.invalidations,
|
|
460
|
+
"size_bytes": self._metrics.size_bytes,
|
|
461
|
+
"size_mb": size_mb,
|
|
462
|
+
"entry_count": self._metrics.entry_count,
|
|
463
|
+
"max_size": self.max_size,
|
|
464
|
+
"max_memory_mb": self.max_memory_mb,
|
|
465
|
+
"evictions": self._metrics.evictions,
|
|
466
|
+
"expired_removals": self._metrics.expired_removals,
|
|
467
|
+
"memory_usage_percent": memory_usage_percent,
|
|
468
|
+
"memory_pressure": memory_usage_percent > 80, # Flag high memory usage
|
|
469
|
+
"ttl_default": self.default_ttl,
|
|
470
|
+
"cleanup_interval": self.cleanup_interval
|
|
471
|
+
}
|
|
472
|
+
|
|
473
|
+
def get_cache_info(self) -> Dict[str, Any]:
|
|
474
|
+
"""Get detailed cache information."""
|
|
475
|
+
with self._cache_lock:
|
|
476
|
+
entries_info = []
|
|
477
|
+
total_size = 0
|
|
478
|
+
|
|
479
|
+
for key, entry in self._cache.items():
|
|
480
|
+
entry_info = {
|
|
481
|
+
"key": key,
|
|
482
|
+
"age_seconds": entry.age_seconds,
|
|
483
|
+
"access_count": entry.access_count,
|
|
484
|
+
"size_bytes": entry.size_bytes,
|
|
485
|
+
"is_expired": entry.is_expired,
|
|
486
|
+
"ttl": entry.ttl,
|
|
487
|
+
"metadata": entry.metadata
|
|
488
|
+
}
|
|
489
|
+
entries_info.append(entry_info)
|
|
490
|
+
total_size += entry.size_bytes
|
|
491
|
+
|
|
492
|
+
return {
|
|
493
|
+
"total_entries": len(self._cache),
|
|
494
|
+
"total_size_bytes": total_size,
|
|
495
|
+
"total_size_mb": total_size / (1024 * 1024),
|
|
496
|
+
"entries": entries_info,
|
|
497
|
+
"metrics": self.get_metrics()
|
|
498
|
+
}
|
|
499
|
+
|
|
500
|
+
def register_invalidation_callback(self, pattern: str, callback: callable) -> None:
|
|
501
|
+
"""Register a callback for cache invalidation events."""
|
|
502
|
+
if pattern not in self._invalidation_callbacks:
|
|
503
|
+
self._invalidation_callbacks[pattern] = []
|
|
504
|
+
self._invalidation_callbacks[pattern].append(callback)
|
|
505
|
+
|
|
506
|
+
def _ensure_cache_capacity(self, new_entry_size: int) -> None:
|
|
507
|
+
"""Ensure cache has capacity for new entry."""
|
|
508
|
+
current_memory_mb = self._get_memory_usage_mb()
|
|
509
|
+
max_memory_bytes = self.max_memory_mb * 1024 * 1024
|
|
510
|
+
|
|
511
|
+
# Check if we're under memory pressure
|
|
512
|
+
memory_usage_ratio = current_memory_mb / self.max_memory_mb
|
|
513
|
+
if memory_usage_ratio > self.memory_pressure_threshold:
|
|
514
|
+
# Aggressive cleanup when under pressure
|
|
515
|
+
target_memory_bytes = max_memory_bytes * 0.5 # Target 50% usage
|
|
516
|
+
while self._metrics.size_bytes > target_memory_bytes:
|
|
517
|
+
if not self._evict_lru_entry():
|
|
518
|
+
break
|
|
519
|
+
self.logger.warning(f"Memory pressure detected ({memory_usage_ratio:.1%}), "
|
|
520
|
+
f"aggressively cleaned cache to {self._get_memory_usage_mb():.1f} MB")
|
|
521
|
+
|
|
522
|
+
# Check memory limit
|
|
523
|
+
while (self._metrics.size_bytes + new_entry_size) > max_memory_bytes:
|
|
524
|
+
if not self._evict_lru_entry():
|
|
525
|
+
break
|
|
526
|
+
|
|
527
|
+
# Check size limit
|
|
528
|
+
while len(self._cache) >= self.max_size:
|
|
529
|
+
if not self._evict_lru_entry():
|
|
530
|
+
break
|
|
531
|
+
|
|
532
|
+
def _evict_lru_entry(self) -> bool:
|
|
533
|
+
"""Evict least recently used entry."""
|
|
534
|
+
if not self._cache:
|
|
535
|
+
return False
|
|
536
|
+
|
|
537
|
+
# Get LRU entry (first in OrderedDict)
|
|
538
|
+
key, entry = next(iter(self._cache.items()))
|
|
539
|
+
self._remove_entry(key, entry)
|
|
540
|
+
|
|
541
|
+
with self._metrics_lock:
|
|
542
|
+
self._metrics.evictions += 1
|
|
543
|
+
|
|
544
|
+
self.logger.debug(f"Evicted LRU entry '{key}' (age: {entry.age_seconds:.1f}s)")
|
|
545
|
+
return True
|
|
546
|
+
|
|
547
|
+
def _remove_entry(self, key: str, entry: CacheEntry) -> None:
|
|
548
|
+
"""Remove entry from cache and update metrics."""
|
|
549
|
+
self._cache.pop(key, None)
|
|
550
|
+
with self._metrics_lock:
|
|
551
|
+
self._metrics.size_bytes -= entry.size_bytes
|
|
552
|
+
self._metrics.entry_count = len(self._cache)
|
|
553
|
+
|
|
554
|
+
def _calculate_size(self, value: Any) -> int:
|
|
555
|
+
"""Calculate approximate size of value in bytes."""
|
|
556
|
+
try:
|
|
557
|
+
# Use JSON serialization as approximation
|
|
558
|
+
return len(json.dumps(value, default=str).encode('utf-8'))
|
|
559
|
+
except Exception:
|
|
560
|
+
# Fallback to string representation
|
|
561
|
+
return len(str(value).encode('utf-8'))
|
|
562
|
+
|
|
563
|
+
def _get_memory_usage_mb(self) -> float:
|
|
564
|
+
"""Get current memory usage in MB."""
|
|
565
|
+
return self._metrics.size_bytes / (1024 * 1024)
|
|
566
|
+
|
|
567
|
+
async def handle_memory_pressure(self, severity: str = "warning") -> Dict[str, Any]:
|
|
568
|
+
"""
|
|
569
|
+
Handle memory pressure by aggressively cleaning cache.
|
|
570
|
+
|
|
571
|
+
Args:
|
|
572
|
+
severity: "warning" or "critical" level of memory pressure
|
|
573
|
+
|
|
574
|
+
Returns:
|
|
575
|
+
Dict with cleanup statistics
|
|
576
|
+
"""
|
|
577
|
+
stats = {
|
|
578
|
+
"entries_before": len(self._cache),
|
|
579
|
+
"memory_before_mb": self._get_memory_usage_mb(),
|
|
580
|
+
"entries_removed": 0,
|
|
581
|
+
"memory_freed_mb": 0
|
|
582
|
+
}
|
|
583
|
+
|
|
584
|
+
with self._cache_lock:
|
|
585
|
+
if severity == "critical":
|
|
586
|
+
# Critical: Clear 75% of cache
|
|
587
|
+
target_entries = int(len(self._cache) * 0.25)
|
|
588
|
+
else:
|
|
589
|
+
# Warning: Clear 50% of cache
|
|
590
|
+
target_entries = int(len(self._cache) * 0.5)
|
|
591
|
+
|
|
592
|
+
# Remove oldest entries first
|
|
593
|
+
while len(self._cache) > target_entries:
|
|
594
|
+
if not self._evict_lru_entry():
|
|
595
|
+
break
|
|
596
|
+
stats["entries_removed"] += 1
|
|
597
|
+
|
|
598
|
+
# Force cleanup of expired entries
|
|
599
|
+
expired_count = 0
|
|
600
|
+
keys_to_remove = []
|
|
601
|
+
|
|
602
|
+
for key, entry in self._cache.items():
|
|
603
|
+
if entry.is_expired:
|
|
604
|
+
keys_to_remove.append(key)
|
|
605
|
+
|
|
606
|
+
for key in keys_to_remove:
|
|
607
|
+
entry = self._cache.pop(key)
|
|
608
|
+
with self._metrics_lock:
|
|
609
|
+
self._metrics.size_bytes -= entry.size_bytes
|
|
610
|
+
self._metrics.expired_removals += 1
|
|
611
|
+
expired_count += 1
|
|
612
|
+
stats["entries_removed"] += 1
|
|
613
|
+
|
|
614
|
+
if expired_count > 0:
|
|
615
|
+
with self._metrics_lock:
|
|
616
|
+
self._metrics.entry_count = len(self._cache)
|
|
617
|
+
|
|
618
|
+
stats["entries_after"] = len(self._cache)
|
|
619
|
+
stats["memory_after_mb"] = self._get_memory_usage_mb()
|
|
620
|
+
stats["memory_freed_mb"] = stats["memory_before_mb"] - stats["memory_after_mb"]
|
|
621
|
+
|
|
622
|
+
self.logger.info(f"Memory pressure ({severity}): Removed {stats['entries_removed']} entries, "
|
|
623
|
+
f"freed {stats['memory_freed_mb']:.2f} MB")
|
|
624
|
+
|
|
625
|
+
return stats
|
|
626
|
+
|
|
627
|
+
def _trigger_invalidation_callbacks(self, pattern: str) -> None:
|
|
628
|
+
"""Trigger invalidation callbacks for pattern."""
|
|
629
|
+
import fnmatch
|
|
630
|
+
|
|
631
|
+
for callback_pattern, callbacks in self._invalidation_callbacks.items():
|
|
632
|
+
if fnmatch.fnmatch(pattern, callback_pattern):
|
|
633
|
+
for callback in callbacks:
|
|
634
|
+
try:
|
|
635
|
+
callback(pattern)
|
|
636
|
+
except Exception as e:
|
|
637
|
+
self.logger.error(f"Invalidation callback failed: {e}")
|
|
638
|
+
|
|
639
|
+
async def _cleanup_expired_entries(self) -> None:
|
|
640
|
+
"""Background task to clean up expired entries."""
|
|
641
|
+
while not self._stop_event.is_set():
|
|
642
|
+
try:
|
|
643
|
+
expired_count = 0
|
|
644
|
+
|
|
645
|
+
with self._cache_lock:
|
|
646
|
+
keys_to_remove = []
|
|
647
|
+
|
|
648
|
+
for key, entry in self._cache.items():
|
|
649
|
+
if entry.is_expired:
|
|
650
|
+
keys_to_remove.append(key)
|
|
651
|
+
|
|
652
|
+
for key in keys_to_remove:
|
|
653
|
+
entry = self._cache.pop(key)
|
|
654
|
+
with self._metrics_lock:
|
|
655
|
+
self._metrics.size_bytes -= entry.size_bytes
|
|
656
|
+
self._metrics.expired_removals += 1
|
|
657
|
+
expired_count += 1
|
|
658
|
+
|
|
659
|
+
if expired_count > 0:
|
|
660
|
+
with self._metrics_lock:
|
|
661
|
+
self._metrics.entry_count = len(self._cache)
|
|
662
|
+
|
|
663
|
+
if expired_count > 0:
|
|
664
|
+
self.logger.debug(f"Cleaned up {expired_count} expired cache entries")
|
|
665
|
+
|
|
666
|
+
# Wait for next cleanup interval
|
|
667
|
+
await asyncio.sleep(self.cleanup_interval)
|
|
668
|
+
|
|
669
|
+
except asyncio.CancelledError:
|
|
670
|
+
break
|
|
671
|
+
except Exception as e:
|
|
672
|
+
self.logger.error(f"Cache cleanup task error: {e}")
|
|
673
|
+
await asyncio.sleep(self.cleanup_interval)
|
|
674
|
+
|
|
675
|
+
async def _collect_custom_metrics(self) -> None:
|
|
676
|
+
"""Collect custom metrics for the service."""
|
|
677
|
+
try:
|
|
678
|
+
# Update service metrics with cache data
|
|
679
|
+
metrics = self.get_metrics()
|
|
680
|
+
self.update_metrics(
|
|
681
|
+
cache_hits=metrics["hits"],
|
|
682
|
+
cache_misses=metrics["misses"],
|
|
683
|
+
cache_hit_rate=metrics["hit_rate"],
|
|
684
|
+
cache_size_mb=metrics["size_mb"],
|
|
685
|
+
cache_entries=metrics["entry_count"]
|
|
686
|
+
)
|
|
687
|
+
except Exception as e:
|
|
688
|
+
self.logger.warning(f"Failed to collect cache metrics: {e}")
|
|
689
|
+
|
|
690
|
+
|
|
691
|
+
# Decorator for caching function results
|
|
692
|
+
def cache_result(key_pattern: str, ttl: Optional[float] = None,
|
|
693
|
+
namespace: Optional[str] = None):
|
|
694
|
+
"""
|
|
695
|
+
Decorator to cache function results in SharedPromptCache.
|
|
696
|
+
|
|
697
|
+
Args:
|
|
698
|
+
key_pattern: Pattern for cache key (can use {args} placeholders)
|
|
699
|
+
ttl: Time to live for cached result
|
|
700
|
+
namespace: Optional namespace for cache keys
|
|
701
|
+
|
|
702
|
+
Example:
|
|
703
|
+
@cache_result("agent_profile:{agent_name}", ttl=300)
|
|
704
|
+
def load_agent_profile(agent_name: str):
|
|
705
|
+
# Load profile logic
|
|
706
|
+
return profile_data
|
|
707
|
+
"""
|
|
708
|
+
def decorator(func):
|
|
709
|
+
@wraps(func)
|
|
710
|
+
def wrapper(*args, **kwargs):
|
|
711
|
+
# Generate cache key
|
|
712
|
+
import hashlib
|
|
713
|
+
|
|
714
|
+
# Create key from pattern and args
|
|
715
|
+
cache_key = key_pattern.format(
|
|
716
|
+
**kwargs,
|
|
717
|
+
args_hash=hashlib.md5(str(args).encode()).hexdigest()[:8]
|
|
718
|
+
)
|
|
719
|
+
|
|
720
|
+
if namespace:
|
|
721
|
+
cache_key = f"{namespace}:{cache_key}"
|
|
722
|
+
|
|
723
|
+
# Try to get from cache
|
|
724
|
+
cache = SharedPromptCache.get_instance()
|
|
725
|
+
result = cache.get(cache_key)
|
|
726
|
+
|
|
727
|
+
if result is not None:
|
|
728
|
+
return result
|
|
729
|
+
|
|
730
|
+
# Call function and cache result
|
|
731
|
+
result = func(*args, **kwargs)
|
|
732
|
+
cache.set(cache_key, result, ttl=ttl)
|
|
733
|
+
|
|
734
|
+
return result
|
|
735
|
+
|
|
736
|
+
return wrapper
|
|
737
|
+
return decorator
|
|
738
|
+
|
|
739
|
+
|
|
740
|
+
# Factory function for easy integration
|
|
741
|
+
def get_shared_cache() -> SharedPromptCache:
|
|
742
|
+
"""Get the shared cache instance."""
|
|
743
|
+
return SharedPromptCache.get_instance()
|
|
744
|
+
|
|
745
|
+
|
|
746
|
+
# Configuration helper
|
|
747
|
+
def configure_shared_cache(config: Dict[str, Any]) -> SharedPromptCache:
|
|
748
|
+
"""Configure and get shared cache instance."""
|
|
749
|
+
return SharedPromptCache.get_instance(config)
|
|
750
|
+
|
|
751
|
+
|
|
752
|
+
if __name__ == "__main__":
|
|
753
|
+
# Demo and testing
|
|
754
|
+
async def demo():
|
|
755
|
+
"""Demonstrate SharedPromptCache usage."""
|
|
756
|
+
print("š SharedPromptCache Demo")
|
|
757
|
+
print("=" * 50)
|
|
758
|
+
|
|
759
|
+
# Get cache instance
|
|
760
|
+
cache = SharedPromptCache.get_instance({
|
|
761
|
+
"max_size": 100,
|
|
762
|
+
"max_memory_mb": 10,
|
|
763
|
+
"default_ttl": 60
|
|
764
|
+
})
|
|
765
|
+
|
|
766
|
+
# Start the service
|
|
767
|
+
await cache.start()
|
|
768
|
+
|
|
769
|
+
try:
|
|
770
|
+
# Test basic operations
|
|
771
|
+
print("\nš Testing basic cache operations...")
|
|
772
|
+
|
|
773
|
+
# Set some data
|
|
774
|
+
cache.set("test:key1", {"data": "value1", "type": "test"})
|
|
775
|
+
cache.set("test:key2", {"data": "value2", "type": "test"}, ttl=5)
|
|
776
|
+
|
|
777
|
+
# Get data
|
|
778
|
+
result1 = cache.get("test:key1")
|
|
779
|
+
result2 = cache.get("test:key2")
|
|
780
|
+
print(f"Retrieved: {result1}, {result2}")
|
|
781
|
+
|
|
782
|
+
# Test metrics
|
|
783
|
+
print("\nš Cache metrics:")
|
|
784
|
+
metrics = cache.get_metrics()
|
|
785
|
+
for key, value in metrics.items():
|
|
786
|
+
print(f" {key}: {value}")
|
|
787
|
+
|
|
788
|
+
# Test invalidation
|
|
789
|
+
print("\nšļø Testing invalidation...")
|
|
790
|
+
cache.invalidate("test:*")
|
|
791
|
+
|
|
792
|
+
# Test decorator
|
|
793
|
+
print("\nšÆ Testing cache decorator...")
|
|
794
|
+
|
|
795
|
+
@cache_result("demo:{name}", ttl=30)
|
|
796
|
+
def get_demo_data(name: str):
|
|
797
|
+
print(f"Computing data for {name}...")
|
|
798
|
+
return {"name": name, "timestamp": time.time()}
|
|
799
|
+
|
|
800
|
+
# First call (cache miss)
|
|
801
|
+
data1 = get_demo_data("test")
|
|
802
|
+
print(f"First call: {data1}")
|
|
803
|
+
|
|
804
|
+
# Second call (cache hit)
|
|
805
|
+
data2 = get_demo_data("test")
|
|
806
|
+
print(f"Second call: {data2}")
|
|
807
|
+
|
|
808
|
+
# Final metrics
|
|
809
|
+
print("\nš Final metrics:")
|
|
810
|
+
final_metrics = cache.get_metrics()
|
|
811
|
+
for key, value in final_metrics.items():
|
|
812
|
+
print(f" {key}: {value}")
|
|
813
|
+
|
|
814
|
+
finally:
|
|
815
|
+
await cache.stop()
|
|
816
|
+
print("\nā
Demo completed")
|
|
817
|
+
|
|
818
|
+
# Run demo
|
|
819
|
+
asyncio.run(demo())
|