kweaver-dolphin 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- DolphinLanguageSDK/__init__.py +58 -0
- dolphin/__init__.py +62 -0
- dolphin/cli/__init__.py +20 -0
- dolphin/cli/args/__init__.py +9 -0
- dolphin/cli/args/parser.py +567 -0
- dolphin/cli/builtin_agents/__init__.py +22 -0
- dolphin/cli/commands/__init__.py +4 -0
- dolphin/cli/interrupt/__init__.py +8 -0
- dolphin/cli/interrupt/handler.py +205 -0
- dolphin/cli/interrupt/keyboard.py +82 -0
- dolphin/cli/main.py +49 -0
- dolphin/cli/multimodal/__init__.py +34 -0
- dolphin/cli/multimodal/clipboard.py +327 -0
- dolphin/cli/multimodal/handler.py +249 -0
- dolphin/cli/multimodal/image_processor.py +214 -0
- dolphin/cli/multimodal/input_parser.py +149 -0
- dolphin/cli/runner/__init__.py +8 -0
- dolphin/cli/runner/runner.py +989 -0
- dolphin/cli/ui/__init__.py +10 -0
- dolphin/cli/ui/console.py +2795 -0
- dolphin/cli/ui/input.py +340 -0
- dolphin/cli/ui/layout.py +425 -0
- dolphin/cli/ui/stream_renderer.py +302 -0
- dolphin/cli/utils/__init__.py +8 -0
- dolphin/cli/utils/helpers.py +135 -0
- dolphin/cli/utils/version.py +49 -0
- dolphin/core/__init__.py +107 -0
- dolphin/core/agent/__init__.py +10 -0
- dolphin/core/agent/agent_state.py +69 -0
- dolphin/core/agent/base_agent.py +970 -0
- dolphin/core/code_block/__init__.py +0 -0
- dolphin/core/code_block/agent_init_block.py +0 -0
- dolphin/core/code_block/assign_block.py +98 -0
- dolphin/core/code_block/basic_code_block.py +1865 -0
- dolphin/core/code_block/explore_block.py +1327 -0
- dolphin/core/code_block/explore_block_v2.py +712 -0
- dolphin/core/code_block/explore_strategy.py +672 -0
- dolphin/core/code_block/judge_block.py +220 -0
- dolphin/core/code_block/prompt_block.py +32 -0
- dolphin/core/code_block/skill_call_deduplicator.py +291 -0
- dolphin/core/code_block/tool_block.py +129 -0
- dolphin/core/common/__init__.py +17 -0
- dolphin/core/common/constants.py +176 -0
- dolphin/core/common/enums.py +1173 -0
- dolphin/core/common/exceptions.py +133 -0
- dolphin/core/common/multimodal.py +539 -0
- dolphin/core/common/object_type.py +165 -0
- dolphin/core/common/output_format.py +432 -0
- dolphin/core/common/types.py +36 -0
- dolphin/core/config/__init__.py +16 -0
- dolphin/core/config/global_config.py +1289 -0
- dolphin/core/config/ontology_config.py +133 -0
- dolphin/core/context/__init__.py +12 -0
- dolphin/core/context/context.py +1580 -0
- dolphin/core/context/context_manager.py +161 -0
- dolphin/core/context/var_output.py +82 -0
- dolphin/core/context/variable_pool.py +356 -0
- dolphin/core/context_engineer/__init__.py +41 -0
- dolphin/core/context_engineer/config/__init__.py +5 -0
- dolphin/core/context_engineer/config/settings.py +402 -0
- dolphin/core/context_engineer/core/__init__.py +7 -0
- dolphin/core/context_engineer/core/budget_manager.py +327 -0
- dolphin/core/context_engineer/core/context_assembler.py +583 -0
- dolphin/core/context_engineer/core/context_manager.py +637 -0
- dolphin/core/context_engineer/core/tokenizer_service.py +260 -0
- dolphin/core/context_engineer/example/incremental_example.py +267 -0
- dolphin/core/context_engineer/example/traditional_example.py +334 -0
- dolphin/core/context_engineer/services/__init__.py +5 -0
- dolphin/core/context_engineer/services/compressor.py +399 -0
- dolphin/core/context_engineer/utils/__init__.py +6 -0
- dolphin/core/context_engineer/utils/context_utils.py +441 -0
- dolphin/core/context_engineer/utils/message_formatter.py +270 -0
- dolphin/core/context_engineer/utils/token_utils.py +139 -0
- dolphin/core/coroutine/__init__.py +15 -0
- dolphin/core/coroutine/context_snapshot.py +154 -0
- dolphin/core/coroutine/context_snapshot_profile.py +922 -0
- dolphin/core/coroutine/context_snapshot_store.py +268 -0
- dolphin/core/coroutine/execution_frame.py +145 -0
- dolphin/core/coroutine/execution_state_registry.py +161 -0
- dolphin/core/coroutine/resume_handle.py +101 -0
- dolphin/core/coroutine/step_result.py +101 -0
- dolphin/core/executor/__init__.py +18 -0
- dolphin/core/executor/debug_controller.py +630 -0
- dolphin/core/executor/dolphin_executor.py +1063 -0
- dolphin/core/executor/executor.py +624 -0
- dolphin/core/flags/__init__.py +27 -0
- dolphin/core/flags/definitions.py +49 -0
- dolphin/core/flags/manager.py +113 -0
- dolphin/core/hook/__init__.py +95 -0
- dolphin/core/hook/expression_evaluator.py +499 -0
- dolphin/core/hook/hook_dispatcher.py +380 -0
- dolphin/core/hook/hook_types.py +248 -0
- dolphin/core/hook/isolated_variable_pool.py +284 -0
- dolphin/core/interfaces.py +53 -0
- dolphin/core/llm/__init__.py +0 -0
- dolphin/core/llm/llm.py +495 -0
- dolphin/core/llm/llm_call.py +100 -0
- dolphin/core/llm/llm_client.py +1285 -0
- dolphin/core/llm/message_sanitizer.py +120 -0
- dolphin/core/logging/__init__.py +20 -0
- dolphin/core/logging/logger.py +526 -0
- dolphin/core/message/__init__.py +8 -0
- dolphin/core/message/compressor.py +749 -0
- dolphin/core/parser/__init__.py +8 -0
- dolphin/core/parser/parser.py +405 -0
- dolphin/core/runtime/__init__.py +10 -0
- dolphin/core/runtime/runtime_graph.py +926 -0
- dolphin/core/runtime/runtime_instance.py +446 -0
- dolphin/core/skill/__init__.py +14 -0
- dolphin/core/skill/context_retention.py +157 -0
- dolphin/core/skill/skill_function.py +686 -0
- dolphin/core/skill/skill_matcher.py +282 -0
- dolphin/core/skill/skillkit.py +700 -0
- dolphin/core/skill/skillset.py +72 -0
- dolphin/core/trajectory/__init__.py +10 -0
- dolphin/core/trajectory/recorder.py +189 -0
- dolphin/core/trajectory/trajectory.py +522 -0
- dolphin/core/utils/__init__.py +9 -0
- dolphin/core/utils/cache_kv.py +212 -0
- dolphin/core/utils/tools.py +340 -0
- dolphin/lib/__init__.py +93 -0
- dolphin/lib/debug/__init__.py +8 -0
- dolphin/lib/debug/visualizer.py +409 -0
- dolphin/lib/memory/__init__.py +28 -0
- dolphin/lib/memory/async_processor.py +220 -0
- dolphin/lib/memory/llm_calls.py +195 -0
- dolphin/lib/memory/manager.py +78 -0
- dolphin/lib/memory/sandbox.py +46 -0
- dolphin/lib/memory/storage.py +245 -0
- dolphin/lib/memory/utils.py +51 -0
- dolphin/lib/ontology/__init__.py +12 -0
- dolphin/lib/ontology/basic/__init__.py +0 -0
- dolphin/lib/ontology/basic/base.py +102 -0
- dolphin/lib/ontology/basic/concept.py +130 -0
- dolphin/lib/ontology/basic/object.py +11 -0
- dolphin/lib/ontology/basic/relation.py +63 -0
- dolphin/lib/ontology/datasource/__init__.py +27 -0
- dolphin/lib/ontology/datasource/datasource.py +66 -0
- dolphin/lib/ontology/datasource/oracle_datasource.py +338 -0
- dolphin/lib/ontology/datasource/sql.py +845 -0
- dolphin/lib/ontology/mapping.py +177 -0
- dolphin/lib/ontology/ontology.py +733 -0
- dolphin/lib/ontology/ontology_context.py +16 -0
- dolphin/lib/ontology/ontology_manager.py +107 -0
- dolphin/lib/skill_results/__init__.py +31 -0
- dolphin/lib/skill_results/cache_backend.py +559 -0
- dolphin/lib/skill_results/result_processor.py +181 -0
- dolphin/lib/skill_results/result_reference.py +179 -0
- dolphin/lib/skill_results/skillkit_hook.py +324 -0
- dolphin/lib/skill_results/strategies.py +328 -0
- dolphin/lib/skill_results/strategy_registry.py +150 -0
- dolphin/lib/skillkits/__init__.py +44 -0
- dolphin/lib/skillkits/agent_skillkit.py +155 -0
- dolphin/lib/skillkits/cognitive_skillkit.py +82 -0
- dolphin/lib/skillkits/env_skillkit.py +250 -0
- dolphin/lib/skillkits/mcp_adapter.py +616 -0
- dolphin/lib/skillkits/mcp_skillkit.py +771 -0
- dolphin/lib/skillkits/memory_skillkit.py +650 -0
- dolphin/lib/skillkits/noop_skillkit.py +31 -0
- dolphin/lib/skillkits/ontology_skillkit.py +89 -0
- dolphin/lib/skillkits/plan_act_skillkit.py +452 -0
- dolphin/lib/skillkits/resource/__init__.py +52 -0
- dolphin/lib/skillkits/resource/models/__init__.py +6 -0
- dolphin/lib/skillkits/resource/models/skill_config.py +109 -0
- dolphin/lib/skillkits/resource/models/skill_meta.py +127 -0
- dolphin/lib/skillkits/resource/resource_skillkit.py +393 -0
- dolphin/lib/skillkits/resource/skill_cache.py +215 -0
- dolphin/lib/skillkits/resource/skill_loader.py +395 -0
- dolphin/lib/skillkits/resource/skill_validator.py +406 -0
- dolphin/lib/skillkits/resource_skillkit.py +11 -0
- dolphin/lib/skillkits/search_skillkit.py +163 -0
- dolphin/lib/skillkits/sql_skillkit.py +274 -0
- dolphin/lib/skillkits/system_skillkit.py +509 -0
- dolphin/lib/skillkits/vm_skillkit.py +65 -0
- dolphin/lib/utils/__init__.py +9 -0
- dolphin/lib/utils/data_process.py +207 -0
- dolphin/lib/utils/handle_progress.py +178 -0
- dolphin/lib/utils/security.py +139 -0
- dolphin/lib/utils/text_retrieval.py +462 -0
- dolphin/lib/vm/__init__.py +11 -0
- dolphin/lib/vm/env_executor.py +895 -0
- dolphin/lib/vm/python_session_manager.py +453 -0
- dolphin/lib/vm/vm.py +610 -0
- dolphin/sdk/__init__.py +60 -0
- dolphin/sdk/agent/__init__.py +12 -0
- dolphin/sdk/agent/agent_factory.py +236 -0
- dolphin/sdk/agent/dolphin_agent.py +1106 -0
- dolphin/sdk/api/__init__.py +4 -0
- dolphin/sdk/runtime/__init__.py +8 -0
- dolphin/sdk/runtime/env.py +363 -0
- dolphin/sdk/skill/__init__.py +10 -0
- dolphin/sdk/skill/global_skills.py +706 -0
- dolphin/sdk/skill/traditional_toolkit.py +260 -0
- kweaver_dolphin-0.1.0.dist-info/METADATA +521 -0
- kweaver_dolphin-0.1.0.dist-info/RECORD +199 -0
- kweaver_dolphin-0.1.0.dist-info/WHEEL +5 -0
- kweaver_dolphin-0.1.0.dist-info/entry_points.txt +27 -0
- kweaver_dolphin-0.1.0.dist-info/licenses/LICENSE.txt +201 -0
- kweaver_dolphin-0.1.0.dist-info/top_level.txt +2 -0
|
@@ -0,0 +1,559 @@
|
|
|
1
|
+
"""Cache backend module
|
|
2
|
+
Provides tool result caching functionality, supports multiple storage backends
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import os
|
|
6
|
+
import json
|
|
7
|
+
import pickle
|
|
8
|
+
import hashlib
|
|
9
|
+
import sqlite3
|
|
10
|
+
from abc import ABC, abstractmethod
|
|
11
|
+
from typing import Any, Optional, Dict
|
|
12
|
+
from dataclasses import dataclass, asdict
|
|
13
|
+
from datetime import datetime, timedelta
|
|
14
|
+
from dolphin.core.logging.logger import get_logger
|
|
15
|
+
|
|
16
|
+
logger = get_logger("skill_results")
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass
|
|
20
|
+
class CacheEntry:
|
|
21
|
+
"""Cache Entry"""
|
|
22
|
+
|
|
23
|
+
reference_id: str
|
|
24
|
+
full_result: Any
|
|
25
|
+
metadata: Dict[str, Any]
|
|
26
|
+
created_at: datetime
|
|
27
|
+
tool_name: str
|
|
28
|
+
size: int = 0
|
|
29
|
+
ttl: Optional[int] = None # Expiration time (hours)
|
|
30
|
+
|
|
31
|
+
def __post_init__(self):
|
|
32
|
+
if self.size == 0:
|
|
33
|
+
self.size = len(str(self.full_result))
|
|
34
|
+
|
|
35
|
+
def is_expired(self) -> bool:
|
|
36
|
+
"""Check if expired"""
|
|
37
|
+
if self.ttl is None:
|
|
38
|
+
return False
|
|
39
|
+
return datetime.now() > self.created_at + timedelta(hours=self.ttl)
|
|
40
|
+
|
|
41
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
42
|
+
"""Convert to dictionary"""
|
|
43
|
+
data = asdict(self)
|
|
44
|
+
data["created_at"] = self.created_at.isoformat()
|
|
45
|
+
return data
|
|
46
|
+
|
|
47
|
+
@classmethod
|
|
48
|
+
def from_dict(cls, data: Dict[str, Any]) -> "CacheEntry":
|
|
49
|
+
"""Create from dictionary"""
|
|
50
|
+
data["created_at"] = datetime.fromisoformat(data["created_at"])
|
|
51
|
+
return cls(**data)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
class CacheBackend(ABC):
|
|
55
|
+
"""Cache backend abstract interface"""
|
|
56
|
+
|
|
57
|
+
@abstractmethod
|
|
58
|
+
def store(self, entry: CacheEntry) -> bool:
|
|
59
|
+
"""Cache entry storage"""
|
|
60
|
+
pass
|
|
61
|
+
|
|
62
|
+
@abstractmethod
|
|
63
|
+
def get(self, reference_id: str) -> Optional[CacheEntry]:
|
|
64
|
+
"""Get cache entry"""
|
|
65
|
+
pass
|
|
66
|
+
|
|
67
|
+
@abstractmethod
|
|
68
|
+
def delete(self, reference_id: str) -> bool:
|
|
69
|
+
"""Delete cache entries"""
|
|
70
|
+
pass
|
|
71
|
+
|
|
72
|
+
@abstractmethod
|
|
73
|
+
def cleanup(self, max_age_hours: int = 24) -> int:
|
|
74
|
+
"""Clean up expired cache and return the number cleaned."""
|
|
75
|
+
pass
|
|
76
|
+
|
|
77
|
+
@abstractmethod
|
|
78
|
+
def exists(self, reference_id: str) -> bool:
|
|
79
|
+
"""Check if cache entry exists"""
|
|
80
|
+
pass
|
|
81
|
+
|
|
82
|
+
@abstractmethod
|
|
83
|
+
def get_stats(self) -> Dict[str, Any]:
|
|
84
|
+
"""Get cache statistics"""
|
|
85
|
+
pass
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
class MemoryCacheBackend(CacheBackend):
|
|
89
|
+
"""Memory Cache Backend"""
|
|
90
|
+
|
|
91
|
+
def __init__(self, max_size: int = 1000):
|
|
92
|
+
self.max_size = max_size
|
|
93
|
+
self.cache: Dict[str, CacheEntry] = {}
|
|
94
|
+
self.access_times: Dict[str, datetime] = {}
|
|
95
|
+
|
|
96
|
+
def store(self, entry: CacheEntry) -> bool:
|
|
97
|
+
"""Cache entry storage"""
|
|
98
|
+
try:
|
|
99
|
+
# If the cache is full, remove the oldest entry
|
|
100
|
+
if len(self.cache) >= self.max_size:
|
|
101
|
+
self._evict_oldest()
|
|
102
|
+
|
|
103
|
+
self.cache[entry.reference_id] = entry
|
|
104
|
+
self.access_times[entry.reference_id] = datetime.now()
|
|
105
|
+
logger.debug(f"Stored cache entry: {entry.reference_id}")
|
|
106
|
+
return True
|
|
107
|
+
except Exception as e:
|
|
108
|
+
logger.error(f"Failed to store cache entry: {e}")
|
|
109
|
+
return False
|
|
110
|
+
|
|
111
|
+
def get(self, reference_id: str) -> Optional[CacheEntry]:
|
|
112
|
+
"""Get cache entry"""
|
|
113
|
+
try:
|
|
114
|
+
entry = self.cache.get(reference_id)
|
|
115
|
+
if entry and not entry.is_expired():
|
|
116
|
+
self.access_times[reference_id] = datetime.now()
|
|
117
|
+
return entry
|
|
118
|
+
elif entry and entry.is_expired():
|
|
119
|
+
self.delete(reference_id)
|
|
120
|
+
return None
|
|
121
|
+
except Exception as e:
|
|
122
|
+
logger.error(f"Failed to get cache entry {reference_id}: {e}")
|
|
123
|
+
return None
|
|
124
|
+
|
|
125
|
+
def delete(self, reference_id: str) -> bool:
|
|
126
|
+
"""Delete cache entries"""
|
|
127
|
+
try:
|
|
128
|
+
if reference_id in self.cache:
|
|
129
|
+
del self.cache[reference_id]
|
|
130
|
+
del self.access_times[reference_id]
|
|
131
|
+
logger.debug(f"Deleted cache entry: {reference_id}")
|
|
132
|
+
return True
|
|
133
|
+
return False
|
|
134
|
+
except Exception as e:
|
|
135
|
+
logger.error(f"Failed to delete cache entry {reference_id}: {e}")
|
|
136
|
+
return False
|
|
137
|
+
|
|
138
|
+
def cleanup(self, max_age_hours: int = 24) -> int:
|
|
139
|
+
"""Clean up expired cache"""
|
|
140
|
+
try:
|
|
141
|
+
cutoff_time = datetime.now() - timedelta(hours=max_age_hours)
|
|
142
|
+
expired_keys = []
|
|
143
|
+
|
|
144
|
+
for ref_id, entry in self.cache.items():
|
|
145
|
+
if entry.is_expired() or entry.created_at < cutoff_time:
|
|
146
|
+
expired_keys.append(ref_id)
|
|
147
|
+
|
|
148
|
+
for ref_id in expired_keys:
|
|
149
|
+
self.delete(ref_id)
|
|
150
|
+
|
|
151
|
+
logger.debug(f"Cleaned up {len(expired_keys)} expired cache entries")
|
|
152
|
+
return len(expired_keys)
|
|
153
|
+
except Exception as e:
|
|
154
|
+
logger.error(f"Failed to cleanup cache: {e}")
|
|
155
|
+
return 0
|
|
156
|
+
|
|
157
|
+
def exists(self, reference_id: str) -> bool:
|
|
158
|
+
"""Check if cache entry exists"""
|
|
159
|
+
entry = self.get(reference_id)
|
|
160
|
+
return entry is not None
|
|
161
|
+
|
|
162
|
+
def get_stats(self) -> Dict[str, Any]:
|
|
163
|
+
"""Get cache statistics"""
|
|
164
|
+
return {
|
|
165
|
+
"total_entries": len(self.cache),
|
|
166
|
+
"max_size": self.max_size,
|
|
167
|
+
"usage_percent": (
|
|
168
|
+
(len(self.cache) / self.max_size) * 100 if self.max_size > 0 else 0
|
|
169
|
+
),
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
def _evict_oldest(self):
|
|
173
|
+
"""Delete the oldest cache entry"""
|
|
174
|
+
if not self.access_times:
|
|
175
|
+
return
|
|
176
|
+
|
|
177
|
+
oldest_key = min(self.access_times.keys(), key=lambda k: self.access_times[k])
|
|
178
|
+
self.delete(oldest_key)
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
class FileCacheBackend(CacheBackend):
|
|
182
|
+
"""File Cache Backend"""
|
|
183
|
+
|
|
184
|
+
def __init__(
|
|
185
|
+
self, cache_dir: str = "./cache", max_file_size: int = 100 * 1024 * 1024
|
|
186
|
+
): # 100MB
|
|
187
|
+
self.cache_dir = cache_dir
|
|
188
|
+
self.max_file_size = max_file_size
|
|
189
|
+
self._ensure_cache_dir()
|
|
190
|
+
|
|
191
|
+
def store(self, entry: CacheEntry) -> bool:
|
|
192
|
+
"""Store cache entry to file"""
|
|
193
|
+
try:
|
|
194
|
+
file_path = self._get_file_path(entry.reference_id)
|
|
195
|
+
|
|
196
|
+
# Check file size limit
|
|
197
|
+
if (
|
|
198
|
+
os.path.exists(file_path)
|
|
199
|
+
and os.path.getsize(file_path) > self.max_file_size
|
|
200
|
+
):
|
|
201
|
+
logger.warning(f"Cache file too large, skipping: {entry.reference_id}")
|
|
202
|
+
return False
|
|
203
|
+
|
|
204
|
+
# Serialized data
|
|
205
|
+
data = {
|
|
206
|
+
"reference_id": entry.reference_id,
|
|
207
|
+
"full_result": entry.full_result,
|
|
208
|
+
"metadata": entry.metadata,
|
|
209
|
+
"created_at": entry.created_at.isoformat(),
|
|
210
|
+
"tool_name": entry.tool_name,
|
|
211
|
+
"size": entry.size,
|
|
212
|
+
"ttl": entry.ttl,
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
# Write to file
|
|
216
|
+
with open(file_path, "wb") as f:
|
|
217
|
+
pickle.dump(data, f)
|
|
218
|
+
|
|
219
|
+
logger.debug(f"Stored cache entry to file: {entry.reference_id}")
|
|
220
|
+
return True
|
|
221
|
+
except Exception as e:
|
|
222
|
+
logger.error(f"Failed to store cache entry to file: {e}")
|
|
223
|
+
return False
|
|
224
|
+
|
|
225
|
+
def get(self, reference_id: str) -> Optional[CacheEntry]:
|
|
226
|
+
"""Read cache entries from file"""
|
|
227
|
+
try:
|
|
228
|
+
file_path = self._get_file_path(reference_id)
|
|
229
|
+
|
|
230
|
+
if not os.path.exists(file_path):
|
|
231
|
+
return None
|
|
232
|
+
|
|
233
|
+
# Read file
|
|
234
|
+
with open(file_path, "rb") as f:
|
|
235
|
+
data = pickle.load(f)
|
|
236
|
+
|
|
237
|
+
# Create cache entry
|
|
238
|
+
entry = CacheEntry(
|
|
239
|
+
reference_id=data["reference_id"],
|
|
240
|
+
full_result=data["full_result"],
|
|
241
|
+
metadata=data["metadata"],
|
|
242
|
+
created_at=datetime.fromisoformat(data["created_at"]),
|
|
243
|
+
tool_name=data["tool_name"],
|
|
244
|
+
size=data.get("size", 0),
|
|
245
|
+
ttl=data.get("ttl"),
|
|
246
|
+
)
|
|
247
|
+
|
|
248
|
+
# Check if expired
|
|
249
|
+
if entry.is_expired():
|
|
250
|
+
self.delete(reference_id)
|
|
251
|
+
return None
|
|
252
|
+
|
|
253
|
+
logger.debug(f"Retrieved cache entry from file: {reference_id}")
|
|
254
|
+
return entry
|
|
255
|
+
except Exception as e:
|
|
256
|
+
logger.error(f"Failed to get cache entry from file {reference_id}: {e}")
|
|
257
|
+
return None
|
|
258
|
+
|
|
259
|
+
def delete(self, reference_id: str) -> bool:
|
|
260
|
+
"""Delete cache files"""
|
|
261
|
+
try:
|
|
262
|
+
file_path = self._get_file_path(reference_id)
|
|
263
|
+
if os.path.exists(file_path):
|
|
264
|
+
os.remove(file_path)
|
|
265
|
+
logger.debug(f"Deleted cache file: {reference_id}")
|
|
266
|
+
return True
|
|
267
|
+
return False
|
|
268
|
+
except Exception as e:
|
|
269
|
+
logger.error(f"Failed to delete cache file {reference_id}: {e}")
|
|
270
|
+
return False
|
|
271
|
+
|
|
272
|
+
def cleanup(self, max_age_hours: int = 24) -> int:
|
|
273
|
+
"""Clean up expired cache files"""
|
|
274
|
+
try:
|
|
275
|
+
cutoff_time = datetime.now() - timedelta(hours=max_age_hours)
|
|
276
|
+
deleted_count = 0
|
|
277
|
+
|
|
278
|
+
for filename in os.listdir(self.cache_dir):
|
|
279
|
+
if filename.endswith(".cache"):
|
|
280
|
+
file_path = os.path.join(self.cache_dir, filename)
|
|
281
|
+
try:
|
|
282
|
+
# Check file modification time
|
|
283
|
+
if os.path.getmtime(file_path) < cutoff_time.timestamp():
|
|
284
|
+
os.remove(file_path)
|
|
285
|
+
deleted_count += 1
|
|
286
|
+
except Exception as e:
|
|
287
|
+
logger.warning(f"Failed to process cache file {filename}: {e}")
|
|
288
|
+
|
|
289
|
+
logger.debug(f"Cleaned up {deleted_count} expired cache files")
|
|
290
|
+
return deleted_count
|
|
291
|
+
except Exception as e:
|
|
292
|
+
logger.error(f"Failed to cleanup cache files: {e}")
|
|
293
|
+
return 0
|
|
294
|
+
|
|
295
|
+
def exists(self, reference_id: str) -> bool:
|
|
296
|
+
"""Check if the cache file exists"""
|
|
297
|
+
file_path = self._get_file_path(reference_id)
|
|
298
|
+
return os.path.exists(file_path)
|
|
299
|
+
|
|
300
|
+
def get_stats(self) -> Dict[str, Any]:
|
|
301
|
+
"""Get cache statistics"""
|
|
302
|
+
try:
|
|
303
|
+
total_files = 0
|
|
304
|
+
total_size = 0
|
|
305
|
+
|
|
306
|
+
for filename in os.listdir(self.cache_dir):
|
|
307
|
+
if filename.endswith(".cache"):
|
|
308
|
+
file_path = os.path.join(self.cache_dir, filename)
|
|
309
|
+
total_files += 1
|
|
310
|
+
total_size += os.path.getsize(file_path)
|
|
311
|
+
|
|
312
|
+
return {
|
|
313
|
+
"total_entries": total_files,
|
|
314
|
+
"total_size_bytes": total_size,
|
|
315
|
+
"total_size_mb": total_size / (1024 * 1024),
|
|
316
|
+
"cache_dir": self.cache_dir,
|
|
317
|
+
}
|
|
318
|
+
except Exception as e:
|
|
319
|
+
logger.error(f"Failed to get cache stats: {e}")
|
|
320
|
+
return {}
|
|
321
|
+
|
|
322
|
+
def _ensure_cache_dir(self):
|
|
323
|
+
"""Ensure the cache directory exists"""
|
|
324
|
+
os.makedirs(self.cache_dir, exist_ok=True)
|
|
325
|
+
|
|
326
|
+
def _get_file_path(self, reference_id: str) -> str:
|
|
327
|
+
"""Get cache file path"""
|
|
328
|
+
# Use hashing to avoid excessively long filenames
|
|
329
|
+
hash_id = hashlib.md5(reference_id.encode()).hexdigest()
|
|
330
|
+
return os.path.join(self.cache_dir, f"{hash_id}.cache")
|
|
331
|
+
|
|
332
|
+
|
|
333
|
+
class DatabaseCacheBackend(CacheBackend):
|
|
334
|
+
"""Database Cache Backend"""
|
|
335
|
+
|
|
336
|
+
def __init__(self, db_path: str = "./cache.db"):
|
|
337
|
+
self.db_path = db_path
|
|
338
|
+
self._init_database()
|
|
339
|
+
|
|
340
|
+
def store(self, entry: CacheEntry) -> bool:
|
|
341
|
+
"""Store cache entry to database"""
|
|
342
|
+
try:
|
|
343
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
344
|
+
cursor = conn.cursor()
|
|
345
|
+
|
|
346
|
+
# Serialized result data
|
|
347
|
+
result_data = pickle.dumps(entry.full_result)
|
|
348
|
+
|
|
349
|
+
cursor.execute(
|
|
350
|
+
"""
|
|
351
|
+
INSERT OR REPLACE INTO cache_entries
|
|
352
|
+
(reference_id, full_result, metadata, created_at, tool_name, size, ttl)
|
|
353
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
354
|
+
""",
|
|
355
|
+
(
|
|
356
|
+
entry.reference_id,
|
|
357
|
+
result_data,
|
|
358
|
+
json.dumps(entry.metadata),
|
|
359
|
+
entry.created_at.isoformat(),
|
|
360
|
+
entry.tool_name,
|
|
361
|
+
entry.size,
|
|
362
|
+
entry.ttl,
|
|
363
|
+
),
|
|
364
|
+
)
|
|
365
|
+
|
|
366
|
+
conn.commit()
|
|
367
|
+
logger.debug(f"Stored cache entry to database: {entry.reference_id}")
|
|
368
|
+
return True
|
|
369
|
+
except Exception as e:
|
|
370
|
+
logger.error(f"Failed to store cache entry to database: {e}")
|
|
371
|
+
return False
|
|
372
|
+
|
|
373
|
+
def get(self, reference_id: str) -> Optional[CacheEntry]:
|
|
374
|
+
"""Read cache entries from database"""
|
|
375
|
+
try:
|
|
376
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
377
|
+
cursor = conn.cursor()
|
|
378
|
+
|
|
379
|
+
cursor.execute(
|
|
380
|
+
"""
|
|
381
|
+
SELECT full_result, metadata, created_at, tool_name, size, ttl
|
|
382
|
+
FROM cache_entries WHERE reference_id = ?
|
|
383
|
+
""",
|
|
384
|
+
(reference_id,),
|
|
385
|
+
)
|
|
386
|
+
|
|
387
|
+
row = cursor.fetchone()
|
|
388
|
+
if not row:
|
|
389
|
+
return None
|
|
390
|
+
|
|
391
|
+
result_data, metadata_json, created_at_str, tool_name, size, ttl = row
|
|
392
|
+
|
|
393
|
+
# Deserialized result data
|
|
394
|
+
full_result = pickle.loads(result_data)
|
|
395
|
+
metadata = json.loads(metadata_json)
|
|
396
|
+
created_at = datetime.fromisoformat(created_at_str)
|
|
397
|
+
|
|
398
|
+
entry = CacheEntry(
|
|
399
|
+
reference_id=reference_id,
|
|
400
|
+
full_result=full_result,
|
|
401
|
+
metadata=metadata,
|
|
402
|
+
created_at=created_at,
|
|
403
|
+
tool_name=tool_name,
|
|
404
|
+
size=size or 0,
|
|
405
|
+
ttl=ttl,
|
|
406
|
+
)
|
|
407
|
+
|
|
408
|
+
# Check if expired
|
|
409
|
+
if entry.is_expired():
|
|
410
|
+
self.delete(reference_id)
|
|
411
|
+
return None
|
|
412
|
+
|
|
413
|
+
logger.debug(f"Retrieved cache entry from database: {reference_id}")
|
|
414
|
+
return entry
|
|
415
|
+
except Exception as e:
|
|
416
|
+
logger.error(f"Failed to get cache entry from database {reference_id}: {e}")
|
|
417
|
+
return None
|
|
418
|
+
|
|
419
|
+
def delete(self, reference_id: str) -> bool:
|
|
420
|
+
"""Delete database cache entries"""
|
|
421
|
+
try:
|
|
422
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
423
|
+
cursor = conn.cursor()
|
|
424
|
+
cursor.execute(
|
|
425
|
+
"DELETE FROM cache_entries WHERE reference_id = ?", (reference_id,)
|
|
426
|
+
)
|
|
427
|
+
conn.commit()
|
|
428
|
+
|
|
429
|
+
deleted = cursor.rowcount > 0
|
|
430
|
+
if deleted:
|
|
431
|
+
logger.debug(f"Deleted cache entry from database: {reference_id}")
|
|
432
|
+
return deleted
|
|
433
|
+
except Exception as e:
|
|
434
|
+
logger.error(
|
|
435
|
+
f"Failed to delete cache entry from database {reference_id}: {e}"
|
|
436
|
+
)
|
|
437
|
+
return False
|
|
438
|
+
|
|
439
|
+
def cleanup(self, max_age_hours: int = 24) -> int:
|
|
440
|
+
"""Clean up expired cache entries"""
|
|
441
|
+
try:
|
|
442
|
+
cutoff_time = datetime.now() - timedelta(hours=max_age_hours)
|
|
443
|
+
|
|
444
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
445
|
+
cursor = conn.cursor()
|
|
446
|
+
|
|
447
|
+
# Delete expired entries
|
|
448
|
+
cursor.execute(
|
|
449
|
+
"""
|
|
450
|
+
DELETE FROM cache_entries
|
|
451
|
+
WHERE created_at < ? OR
|
|
452
|
+
(ttl IS NOT NULL AND datetime(created_at, '+' || ttl || ' hours') < datetime('now'))
|
|
453
|
+
""",
|
|
454
|
+
(cutoff_time.isoformat(),),
|
|
455
|
+
)
|
|
456
|
+
|
|
457
|
+
deleted_count = cursor.rowcount
|
|
458
|
+
conn.commit()
|
|
459
|
+
|
|
460
|
+
logger.debug(
|
|
461
|
+
f"Cleaned up {deleted_count} expired cache entries from database"
|
|
462
|
+
)
|
|
463
|
+
return deleted_count
|
|
464
|
+
except Exception as e:
|
|
465
|
+
logger.error(f"Failed to cleanup database cache: {e}")
|
|
466
|
+
return 0
|
|
467
|
+
|
|
468
|
+
def exists(self, reference_id: str) -> bool:
|
|
469
|
+
"""Check if database cache entry exists"""
|
|
470
|
+
try:
|
|
471
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
472
|
+
cursor = conn.cursor()
|
|
473
|
+
cursor.execute(
|
|
474
|
+
"SELECT 1 FROM cache_entries WHERE reference_id = ?",
|
|
475
|
+
(reference_id,),
|
|
476
|
+
)
|
|
477
|
+
return cursor.fetchone() is not None
|
|
478
|
+
except Exception as e:
|
|
479
|
+
logger.error(f"Failed to check cache entry existence {reference_id}: {e}")
|
|
480
|
+
return False
|
|
481
|
+
|
|
482
|
+
def get_stats(self) -> Dict[str, Any]:
|
|
483
|
+
"""Get database cache statistics"""
|
|
484
|
+
try:
|
|
485
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
486
|
+
cursor = conn.cursor()
|
|
487
|
+
|
|
488
|
+
# Total number of entries
|
|
489
|
+
cursor.execute("SELECT COUNT(*) FROM cache_entries")
|
|
490
|
+
total_entries = cursor.fetchone()[0]
|
|
491
|
+
|
|
492
|
+
# Total size
|
|
493
|
+
cursor.execute("SELECT COALESCE(SUM(size), 0) FROM cache_entries")
|
|
494
|
+
total_size = cursor.fetchone()[0]
|
|
495
|
+
|
|
496
|
+
# Group by tool type
|
|
497
|
+
cursor.execute(
|
|
498
|
+
"""
|
|
499
|
+
SELECT tool_name, COUNT(*) as count
|
|
500
|
+
FROM cache_entries
|
|
501
|
+
GROUP BY tool_name
|
|
502
|
+
"""
|
|
503
|
+
)
|
|
504
|
+
tool_stats = dict(cursor.fetchall())
|
|
505
|
+
|
|
506
|
+
return {
|
|
507
|
+
"total_entries": total_entries,
|
|
508
|
+
"total_size_bytes": total_size,
|
|
509
|
+
"total_size_mb": (
|
|
510
|
+
total_size / (1024 * 1024) if total_size > 0 else 0
|
|
511
|
+
),
|
|
512
|
+
"tool_stats": tool_stats,
|
|
513
|
+
"db_path": self.db_path,
|
|
514
|
+
}
|
|
515
|
+
except Exception as e:
|
|
516
|
+
logger.error(f"Failed to get database cache stats: {e}")
|
|
517
|
+
return {}
|
|
518
|
+
|
|
519
|
+
def _init_database(self):
|
|
520
|
+
"""Initialize database tables"""
|
|
521
|
+
try:
|
|
522
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
523
|
+
cursor = conn.cursor()
|
|
524
|
+
|
|
525
|
+
cursor.execute(
|
|
526
|
+
"""
|
|
527
|
+
CREATE TABLE IF NOT EXISTS cache_entries (
|
|
528
|
+
reference_id TEXT PRIMARY KEY,
|
|
529
|
+
full_result BLOB NOT NULL,
|
|
530
|
+
metadata TEXT NOT NULL,
|
|
531
|
+
created_at TEXT NOT NULL,
|
|
532
|
+
tool_name TEXT NOT NULL,
|
|
533
|
+
size INTEGER DEFAULT 0,
|
|
534
|
+
ttl INTEGER,
|
|
535
|
+
created_at_index TEXT GENERATED ALWAYS AS (created_at) VIRTUAL
|
|
536
|
+
)
|
|
537
|
+
"""
|
|
538
|
+
)
|
|
539
|
+
|
|
540
|
+
# Create index
|
|
541
|
+
cursor.execute(
|
|
542
|
+
"""
|
|
543
|
+
CREATE INDEX IF NOT EXISTS idx_cache_entries_created_at
|
|
544
|
+
ON cache_entries(created_at)
|
|
545
|
+
"""
|
|
546
|
+
)
|
|
547
|
+
|
|
548
|
+
cursor.execute(
|
|
549
|
+
"""
|
|
550
|
+
CREATE INDEX IF NOT EXISTS idx_cache_entries_tool_name
|
|
551
|
+
ON cache_entries(tool_name)
|
|
552
|
+
"""
|
|
553
|
+
)
|
|
554
|
+
|
|
555
|
+
conn.commit()
|
|
556
|
+
logger.debug("Database cache initialized")
|
|
557
|
+
except Exception as e:
|
|
558
|
+
logger.error(f"Failed to initialize database cache: {e}")
|
|
559
|
+
raise
|