aiecs 1.2.2__py3-none-any.whl → 1.3.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of aiecs might be problematic. Click here for more details.
- aiecs/__init__.py +1 -1
- aiecs/llm/clients/vertex_client.py +22 -2
- aiecs/main.py +2 -2
- aiecs/scripts/tools_develop/README.md +111 -2
- aiecs/scripts/tools_develop/TOOL_AUTO_DISCOVERY.md +234 -0
- aiecs/scripts/tools_develop/validate_tool_schemas.py +80 -21
- aiecs/scripts/tools_develop/verify_tools.py +347 -0
- aiecs/tools/__init__.py +94 -30
- aiecs/tools/apisource/__init__.py +106 -0
- aiecs/tools/apisource/intelligence/__init__.py +20 -0
- aiecs/tools/apisource/intelligence/data_fusion.py +378 -0
- aiecs/tools/apisource/intelligence/query_analyzer.py +387 -0
- aiecs/tools/apisource/intelligence/search_enhancer.py +384 -0
- aiecs/tools/apisource/monitoring/__init__.py +12 -0
- aiecs/tools/apisource/monitoring/metrics.py +308 -0
- aiecs/tools/apisource/providers/__init__.py +114 -0
- aiecs/tools/apisource/providers/base.py +684 -0
- aiecs/tools/apisource/providers/census.py +412 -0
- aiecs/tools/apisource/providers/fred.py +575 -0
- aiecs/tools/apisource/providers/newsapi.py +402 -0
- aiecs/tools/apisource/providers/worldbank.py +346 -0
- aiecs/tools/apisource/reliability/__init__.py +14 -0
- aiecs/tools/apisource/reliability/error_handler.py +362 -0
- aiecs/tools/apisource/reliability/fallback_strategy.py +420 -0
- aiecs/tools/apisource/tool.py +814 -0
- aiecs/tools/apisource/utils/__init__.py +12 -0
- aiecs/tools/apisource/utils/validators.py +343 -0
- aiecs/tools/langchain_adapter.py +95 -17
- aiecs/tools/search_tool/__init__.py +102 -0
- aiecs/tools/search_tool/analyzers.py +583 -0
- aiecs/tools/search_tool/cache.py +280 -0
- aiecs/tools/search_tool/constants.py +127 -0
- aiecs/tools/search_tool/context.py +219 -0
- aiecs/tools/search_tool/core.py +773 -0
- aiecs/tools/search_tool/deduplicator.py +123 -0
- aiecs/tools/search_tool/error_handler.py +257 -0
- aiecs/tools/search_tool/metrics.py +375 -0
- aiecs/tools/search_tool/rate_limiter.py +177 -0
- aiecs/tools/search_tool/schemas.py +297 -0
- aiecs/tools/statistics/data_loader_tool.py +2 -2
- aiecs/tools/statistics/data_transformer_tool.py +1 -1
- aiecs/tools/task_tools/__init__.py +8 -8
- aiecs/tools/task_tools/report_tool.py +1 -1
- aiecs/tools/tool_executor/__init__.py +2 -0
- aiecs/tools/tool_executor/tool_executor.py +284 -14
- aiecs/utils/__init__.py +11 -0
- aiecs/utils/cache_provider.py +698 -0
- aiecs/utils/execution_utils.py +5 -5
- {aiecs-1.2.2.dist-info → aiecs-1.3.3.dist-info}/METADATA +1 -1
- {aiecs-1.2.2.dist-info → aiecs-1.3.3.dist-info}/RECORD +54 -22
- aiecs/tools/task_tools/search_tool.py +0 -1123
- {aiecs-1.2.2.dist-info → aiecs-1.3.3.dist-info}/WHEEL +0 -0
- {aiecs-1.2.2.dist-info → aiecs-1.3.3.dist-info}/entry_points.txt +0 -0
- {aiecs-1.2.2.dist-info → aiecs-1.3.3.dist-info}/licenses/LICENSE +0 -0
- {aiecs-1.2.2.dist-info → aiecs-1.3.3.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,698 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Cache Provider Interface and Implementations
|
|
3
|
+
|
|
4
|
+
提供统一的缓存接口,支持多种缓存策略和存储后端。
|
|
5
|
+
所有缓存实现都应该实现 ICacheProvider 接口,以便与 ToolExecutor 和其他组件集成。
|
|
6
|
+
|
|
7
|
+
Architecture:
|
|
8
|
+
ICacheProvider (Interface)
|
|
9
|
+
├── LRUCacheProvider (Default: wraps ExecutionUtils)
|
|
10
|
+
├── DualLayerCacheProvider (L1: Memory + L2: Custom)
|
|
11
|
+
└── Custom implementations (e.g., IntelligentCacheProvider)
|
|
12
|
+
|
|
13
|
+
Usage:
|
|
14
|
+
# Use default LRU cache
|
|
15
|
+
from aiecs.utils.cache_provider import LRUCacheProvider
|
|
16
|
+
cache = LRUCacheProvider(execution_utils)
|
|
17
|
+
|
|
18
|
+
# Use dual-layer cache
|
|
19
|
+
from aiecs.utils.cache_provider import DualLayerCacheProvider
|
|
20
|
+
cache = DualLayerCacheProvider(l1_provider, l2_provider)
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
from abc import ABC, abstractmethod
|
|
24
|
+
from typing import Any, Dict, Optional
|
|
25
|
+
import logging
|
|
26
|
+
import threading
|
|
27
|
+
|
|
28
|
+
logger = logging.getLogger(__name__)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class ICacheProvider(ABC):
|
|
32
|
+
"""
|
|
33
|
+
缓存提供者接口
|
|
34
|
+
|
|
35
|
+
所有缓存实现都应该实现这个接口,以便与 ToolExecutor 和其他组件集成。
|
|
36
|
+
这个接口定义了缓存的核心操作:获取、设置、失效和统计。
|
|
37
|
+
|
|
38
|
+
支持同步和异步两种接口:
|
|
39
|
+
- 同步接口 (get, set, invalidate): 用于向后兼容和简单场景
|
|
40
|
+
- 异步接口 (get_async, set_async, invalidate_async): 用于异步操作和高性能场景
|
|
41
|
+
|
|
42
|
+
Example:
|
|
43
|
+
class MyCacheProvider(ICacheProvider):
|
|
44
|
+
def get(self, key: str) -> Optional[Any]:
|
|
45
|
+
# 实现同步获取逻辑
|
|
46
|
+
pass
|
|
47
|
+
|
|
48
|
+
async def get_async(self, key: str) -> Optional[Any]:
|
|
49
|
+
# 实现异步获取逻辑
|
|
50
|
+
pass
|
|
51
|
+
|
|
52
|
+
def set(self, key: str, value: Any, ttl: Optional[int] = None):
|
|
53
|
+
# 实现同步设置逻辑
|
|
54
|
+
pass
|
|
55
|
+
|
|
56
|
+
async def set_async(self, key: str, value: Any, ttl: Optional[int] = None):
|
|
57
|
+
# 实现异步设置逻辑
|
|
58
|
+
pass
|
|
59
|
+
"""
|
|
60
|
+
|
|
61
|
+
@abstractmethod
|
|
62
|
+
def get(self, key: str) -> Optional[Any]:
|
|
63
|
+
"""
|
|
64
|
+
获取缓存值(同步接口)
|
|
65
|
+
|
|
66
|
+
Args:
|
|
67
|
+
key: 缓存键
|
|
68
|
+
|
|
69
|
+
Returns:
|
|
70
|
+
缓存的值,如果不存在或已过期则返回 None
|
|
71
|
+
"""
|
|
72
|
+
pass
|
|
73
|
+
|
|
74
|
+
@abstractmethod
|
|
75
|
+
def set(self, key: str, value: Any, ttl: Optional[int] = None):
|
|
76
|
+
"""
|
|
77
|
+
设置缓存值(同步接口)
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
key: 缓存键
|
|
81
|
+
value: 要缓存的值
|
|
82
|
+
ttl: 过期时间(秒),None 表示使用默认 TTL
|
|
83
|
+
"""
|
|
84
|
+
pass
|
|
85
|
+
|
|
86
|
+
@abstractmethod
|
|
87
|
+
def invalidate(self, key: str):
|
|
88
|
+
"""
|
|
89
|
+
使缓存失效(同步接口)
|
|
90
|
+
|
|
91
|
+
Args:
|
|
92
|
+
key: 缓存键
|
|
93
|
+
"""
|
|
94
|
+
pass
|
|
95
|
+
|
|
96
|
+
@abstractmethod
|
|
97
|
+
def get_stats(self) -> Dict[str, Any]:
|
|
98
|
+
"""
|
|
99
|
+
获取缓存统计信息
|
|
100
|
+
|
|
101
|
+
Returns:
|
|
102
|
+
包含缓存统计的字典,至少应包含:
|
|
103
|
+
- type: 缓存类型
|
|
104
|
+
- hits: 命中次数(可选)
|
|
105
|
+
- misses: 未命中次数(可选)
|
|
106
|
+
- hit_rate: 命中率(可选)
|
|
107
|
+
"""
|
|
108
|
+
pass
|
|
109
|
+
|
|
110
|
+
def clear(self):
|
|
111
|
+
"""
|
|
112
|
+
清空所有缓存(可选实现)
|
|
113
|
+
|
|
114
|
+
默认实现为空操作,子类可以根据需要覆盖。
|
|
115
|
+
"""
|
|
116
|
+
pass
|
|
117
|
+
|
|
118
|
+
# Async interface (optional, with default implementations)
|
|
119
|
+
async def get_async(self, key: str) -> Optional[Any]:
|
|
120
|
+
"""
|
|
121
|
+
获取缓存值(异步接口)
|
|
122
|
+
|
|
123
|
+
默认实现调用同步方法。子类应该覆盖此方法以提供真正的异步实现。
|
|
124
|
+
|
|
125
|
+
Args:
|
|
126
|
+
key: 缓存键
|
|
127
|
+
|
|
128
|
+
Returns:
|
|
129
|
+
缓存的值,如果不存在或已过期则返回 None
|
|
130
|
+
"""
|
|
131
|
+
return self.get(key)
|
|
132
|
+
|
|
133
|
+
async def set_async(self, key: str, value: Any, ttl: Optional[int] = None):
|
|
134
|
+
"""
|
|
135
|
+
设置缓存值(异步接口)
|
|
136
|
+
|
|
137
|
+
默认实现调用同步方法。子类应该覆盖此方法以提供真正的异步实现。
|
|
138
|
+
|
|
139
|
+
Args:
|
|
140
|
+
key: 缓存键
|
|
141
|
+
value: 要缓存的值
|
|
142
|
+
ttl: 过期时间(秒),None 表示使用默认 TTL
|
|
143
|
+
"""
|
|
144
|
+
self.set(key, value, ttl)
|
|
145
|
+
|
|
146
|
+
async def invalidate_async(self, key: str):
|
|
147
|
+
"""
|
|
148
|
+
使缓存失效(异步接口)
|
|
149
|
+
|
|
150
|
+
默认实现调用同步方法。子类应该覆盖此方法以提供真正的异步实现。
|
|
151
|
+
|
|
152
|
+
Args:
|
|
153
|
+
key: 缓存键
|
|
154
|
+
"""
|
|
155
|
+
self.invalidate(key)
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
class LRUCacheProvider(ICacheProvider):
|
|
159
|
+
"""
|
|
160
|
+
基于 ExecutionUtils 的 LRU 缓存提供者
|
|
161
|
+
|
|
162
|
+
这是默认的缓存实现,包装了现有的 ExecutionUtils 缓存逻辑。
|
|
163
|
+
使用 LRU (Least Recently Used) 淘汰策略和 TTL 过期机制。
|
|
164
|
+
|
|
165
|
+
Features:
|
|
166
|
+
- LRU 淘汰策略
|
|
167
|
+
- TTL 过期机制
|
|
168
|
+
- 线程安全
|
|
169
|
+
- 内存缓存
|
|
170
|
+
|
|
171
|
+
Example:
|
|
172
|
+
from aiecs.utils.execution_utils import ExecutionUtils
|
|
173
|
+
from aiecs.utils.cache_provider import LRUCacheProvider
|
|
174
|
+
|
|
175
|
+
execution_utils = ExecutionUtils(cache_size=100, cache_ttl=3600)
|
|
176
|
+
cache = LRUCacheProvider(execution_utils)
|
|
177
|
+
|
|
178
|
+
# 使用缓存
|
|
179
|
+
cache.set("key1", "value1", ttl=300)
|
|
180
|
+
value = cache.get("key1")
|
|
181
|
+
"""
|
|
182
|
+
|
|
183
|
+
def __init__(self, execution_utils):
|
|
184
|
+
"""
|
|
185
|
+
初始化 LRU 缓存提供者
|
|
186
|
+
|
|
187
|
+
Args:
|
|
188
|
+
execution_utils: ExecutionUtils 实例
|
|
189
|
+
"""
|
|
190
|
+
self.execution_utils = execution_utils
|
|
191
|
+
self._hits = 0
|
|
192
|
+
self._misses = 0
|
|
193
|
+
|
|
194
|
+
def get(self, key: str) -> Optional[Any]:
|
|
195
|
+
"""从 ExecutionUtils 缓存获取值"""
|
|
196
|
+
result = self.execution_utils.get_from_cache(key)
|
|
197
|
+
if result is not None:
|
|
198
|
+
self._hits += 1
|
|
199
|
+
logger.debug(f"Cache hit: {key}")
|
|
200
|
+
else:
|
|
201
|
+
self._misses += 1
|
|
202
|
+
logger.debug(f"Cache miss: {key}")
|
|
203
|
+
return result
|
|
204
|
+
|
|
205
|
+
def set(self, key: str, value: Any, ttl: Optional[int] = None):
|
|
206
|
+
"""设置值到 ExecutionUtils 缓存"""
|
|
207
|
+
self.execution_utils.add_to_cache(key, value, ttl)
|
|
208
|
+
logger.debug(f"Cache set: {key} (ttl={ttl})")
|
|
209
|
+
|
|
210
|
+
def invalidate(self, key: str):
|
|
211
|
+
"""
|
|
212
|
+
使缓存失效
|
|
213
|
+
|
|
214
|
+
ExecutionUtils 没有直接的 invalidate 方法,
|
|
215
|
+
通过直接删除缓存条目来实现。
|
|
216
|
+
"""
|
|
217
|
+
if hasattr(self.execution_utils, '_cache') and self.execution_utils._cache:
|
|
218
|
+
with self.execution_utils._cache_lock:
|
|
219
|
+
if key in self.execution_utils._cache:
|
|
220
|
+
del self.execution_utils._cache[key]
|
|
221
|
+
logger.debug(f"Cache invalidated: {key}")
|
|
222
|
+
if key in self.execution_utils._cache_ttl_dict:
|
|
223
|
+
del self.execution_utils._cache_ttl_dict[key]
|
|
224
|
+
|
|
225
|
+
def get_stats(self) -> Dict[str, Any]:
|
|
226
|
+
"""获取缓存统计"""
|
|
227
|
+
cache_size = 0
|
|
228
|
+
if hasattr(self.execution_utils, '_cache') and self.execution_utils._cache:
|
|
229
|
+
cache_size = len(self.execution_utils._cache)
|
|
230
|
+
|
|
231
|
+
total_requests = self._hits + self._misses
|
|
232
|
+
hit_rate = self._hits / total_requests if total_requests > 0 else 0.0
|
|
233
|
+
|
|
234
|
+
return {
|
|
235
|
+
'type': 'lru',
|
|
236
|
+
'backend': 'memory',
|
|
237
|
+
'size': cache_size,
|
|
238
|
+
'max_size': self.execution_utils.cache_size,
|
|
239
|
+
'hits': self._hits,
|
|
240
|
+
'misses': self._misses,
|
|
241
|
+
'hit_rate': hit_rate
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
def clear(self):
|
|
245
|
+
"""清空所有缓存"""
|
|
246
|
+
if hasattr(self.execution_utils, '_cache') and self.execution_utils._cache:
|
|
247
|
+
with self.execution_utils._cache_lock:
|
|
248
|
+
self.execution_utils._cache.clear()
|
|
249
|
+
self.execution_utils._cache_ttl_dict.clear()
|
|
250
|
+
logger.info("LRU cache cleared")
|
|
251
|
+
|
|
252
|
+
|
|
253
|
+
class DualLayerCacheProvider(ICacheProvider):
|
|
254
|
+
"""
|
|
255
|
+
双层缓存提供者
|
|
256
|
+
|
|
257
|
+
实现两层缓存架构:
|
|
258
|
+
- L1: 快速内存缓存(通常是 LRUCacheProvider)
|
|
259
|
+
- L2: 智能缓存(如 Redis + Intent-aware TTL)
|
|
260
|
+
|
|
261
|
+
缓存策略:
|
|
262
|
+
1. 读取时先查 L1,命中则直接返回
|
|
263
|
+
2. L1 未命中则查 L2,命中则回填 L1
|
|
264
|
+
3. 写入时同时写入 L1 和 L2
|
|
265
|
+
|
|
266
|
+
这是为 SearchTool 等需要高级缓存策略的工具设计的。
|
|
267
|
+
|
|
268
|
+
Example:
|
|
269
|
+
from aiecs.utils.cache_provider import DualLayerCacheProvider, LRUCacheProvider
|
|
270
|
+
|
|
271
|
+
l1_cache = LRUCacheProvider(execution_utils)
|
|
272
|
+
l2_cache = IntelligentCacheProvider(redis_client)
|
|
273
|
+
|
|
274
|
+
dual_cache = DualLayerCacheProvider(
|
|
275
|
+
l1_provider=l1_cache,
|
|
276
|
+
l2_provider=l2_cache,
|
|
277
|
+
l1_ttl=300 # L1 缓存 5 分钟
|
|
278
|
+
)
|
|
279
|
+
|
|
280
|
+
# 使用双层缓存
|
|
281
|
+
dual_cache.set("key1", "value1") # 写入 L1 和 L2
|
|
282
|
+
value = dual_cache.get("key1") # 先查 L1,再查 L2
|
|
283
|
+
"""
|
|
284
|
+
|
|
285
|
+
def __init__(
|
|
286
|
+
self,
|
|
287
|
+
l1_provider: ICacheProvider,
|
|
288
|
+
l2_provider: ICacheProvider,
|
|
289
|
+
l1_ttl: int = 300
|
|
290
|
+
):
|
|
291
|
+
"""
|
|
292
|
+
初始化双层缓存
|
|
293
|
+
|
|
294
|
+
Args:
|
|
295
|
+
l1_provider: L1 缓存提供者(通常是 LRUCacheProvider)
|
|
296
|
+
l2_provider: L2 缓存提供者(如 IntelligentCacheProvider)
|
|
297
|
+
l1_ttl: L1 缓存的 TTL(秒),默认 5 分钟
|
|
298
|
+
"""
|
|
299
|
+
self.l1 = l1_provider
|
|
300
|
+
self.l2 = l2_provider
|
|
301
|
+
self.l1_ttl = l1_ttl
|
|
302
|
+
self._l1_hits = 0
|
|
303
|
+
self._l2_hits = 0
|
|
304
|
+
self._misses = 0
|
|
305
|
+
|
|
306
|
+
def get(self, key: str) -> Optional[Any]:
|
|
307
|
+
"""
|
|
308
|
+
双层缓存获取
|
|
309
|
+
|
|
310
|
+
1. 先查 L1 缓存
|
|
311
|
+
2. L1 未命中则查 L2 缓存
|
|
312
|
+
3. L2 命中则回填 L1
|
|
313
|
+
"""
|
|
314
|
+
# 尝试 L1
|
|
315
|
+
result = self.l1.get(key)
|
|
316
|
+
if result is not None:
|
|
317
|
+
self._l1_hits += 1
|
|
318
|
+
logger.debug(f"L1 cache hit: {key}")
|
|
319
|
+
return result
|
|
320
|
+
|
|
321
|
+
# 尝试 L2
|
|
322
|
+
result = self.l2.get(key)
|
|
323
|
+
if result is not None:
|
|
324
|
+
self._l2_hits += 1
|
|
325
|
+
logger.debug(f"L2 cache hit: {key}, warming L1")
|
|
326
|
+
# 回填 L1(使用较短的 TTL)
|
|
327
|
+
self.l1.set(key, result, ttl=self.l1_ttl)
|
|
328
|
+
return result
|
|
329
|
+
|
|
330
|
+
self._misses += 1
|
|
331
|
+
logger.debug(f"Cache miss (L1 + L2): {key}")
|
|
332
|
+
return None
|
|
333
|
+
|
|
334
|
+
def set(self, key: str, value: Any, ttl: Optional[int] = None):
|
|
335
|
+
"""
|
|
336
|
+
双层缓存设置
|
|
337
|
+
|
|
338
|
+
同时写入 L1 和 L2:
|
|
339
|
+
- L2 使用传入的 TTL(可能是智能计算的)
|
|
340
|
+
- L1 使用固定的短 TTL
|
|
341
|
+
"""
|
|
342
|
+
# 写入 L2(使用智能 TTL)
|
|
343
|
+
self.l2.set(key, value, ttl)
|
|
344
|
+
logger.debug(f"L2 cache set: {key} (ttl={ttl})")
|
|
345
|
+
|
|
346
|
+
# 写入 L1(使用短 TTL)
|
|
347
|
+
self.l1.set(key, value, ttl=self.l1_ttl)
|
|
348
|
+
logger.debug(f"L1 cache set: {key} (ttl={self.l1_ttl})")
|
|
349
|
+
|
|
350
|
+
def invalidate(self, key: str):
|
|
351
|
+
"""使两层缓存都失效"""
|
|
352
|
+
self.l1.invalidate(key)
|
|
353
|
+
self.l2.invalidate(key)
|
|
354
|
+
logger.debug(f"Cache invalidated (L1 + L2): {key}")
|
|
355
|
+
|
|
356
|
+
def get_stats(self) -> Dict[str, Any]:
|
|
357
|
+
"""获取双层缓存统计"""
|
|
358
|
+
l1_stats = self.l1.get_stats()
|
|
359
|
+
l2_stats = self.l2.get_stats()
|
|
360
|
+
|
|
361
|
+
total_hits = self._l1_hits + self._l2_hits
|
|
362
|
+
total_requests = total_hits + self._misses
|
|
363
|
+
|
|
364
|
+
return {
|
|
365
|
+
'type': 'dual_layer',
|
|
366
|
+
'l1': l1_stats,
|
|
367
|
+
'l2': l2_stats,
|
|
368
|
+
'l1_hits': self._l1_hits,
|
|
369
|
+
'l2_hits': self._l2_hits,
|
|
370
|
+
'misses': self._misses,
|
|
371
|
+
'total_requests': total_requests,
|
|
372
|
+
'hit_rate': total_hits / total_requests if total_requests > 0 else 0.0,
|
|
373
|
+
'l1_hit_rate': self._l1_hits / total_requests if total_requests > 0 else 0.0,
|
|
374
|
+
'l2_hit_rate': self._l2_hits / total_requests if total_requests > 0 else 0.0
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
def clear(self):
|
|
378
|
+
"""清空两层缓存"""
|
|
379
|
+
self.l1.clear()
|
|
380
|
+
self.l2.clear()
|
|
381
|
+
logger.info("Dual-layer cache cleared")
|
|
382
|
+
|
|
383
|
+
# Async interface
|
|
384
|
+
async def get_async(self, key: str) -> Optional[Any]:
|
|
385
|
+
"""
|
|
386
|
+
双层缓存异步获取
|
|
387
|
+
|
|
388
|
+
1. 先查 L1 缓存(同步)
|
|
389
|
+
2. L1 未命中则查 L2 缓存(异步)
|
|
390
|
+
3. L2 命中则回填 L1
|
|
391
|
+
"""
|
|
392
|
+
# 尝试 L1 (同步)
|
|
393
|
+
result = self.l1.get(key)
|
|
394
|
+
if result is not None:
|
|
395
|
+
self._l1_hits += 1
|
|
396
|
+
logger.debug(f"L1 cache hit (async): {key}")
|
|
397
|
+
return result
|
|
398
|
+
|
|
399
|
+
# 尝试 L2 (异步)
|
|
400
|
+
result = await self.l2.get_async(key)
|
|
401
|
+
if result is not None:
|
|
402
|
+
self._l2_hits += 1
|
|
403
|
+
logger.debug(f"L2 cache hit (async): {key}, warming L1")
|
|
404
|
+
# 回填 L1(使用较短的 TTL)
|
|
405
|
+
self.l1.set(key, result, ttl=self.l1_ttl)
|
|
406
|
+
return result
|
|
407
|
+
|
|
408
|
+
self._misses += 1
|
|
409
|
+
logger.debug(f"Cache miss (L1 + L2, async): {key}")
|
|
410
|
+
return None
|
|
411
|
+
|
|
412
|
+
async def set_async(self, key: str, value: Any, ttl: Optional[int] = None):
|
|
413
|
+
"""
|
|
414
|
+
双层缓存异步设置
|
|
415
|
+
|
|
416
|
+
同时写入 L1 和 L2:
|
|
417
|
+
- L2 使用传入的 TTL(可能是智能计算的)- 异步写入
|
|
418
|
+
- L1 使用固定的短 TTL - 同步写入
|
|
419
|
+
"""
|
|
420
|
+
# 写入 L2(异步,使用智能 TTL)
|
|
421
|
+
await self.l2.set_async(key, value, ttl)
|
|
422
|
+
logger.debug(f"L2 cache set (async): {key} (ttl={ttl})")
|
|
423
|
+
|
|
424
|
+
# 写入 L1(同步,使用短 TTL)
|
|
425
|
+
self.l1.set(key, value, ttl=self.l1_ttl)
|
|
426
|
+
logger.debug(f"L1 cache set (async): {key} (ttl={self.l1_ttl})")
|
|
427
|
+
|
|
428
|
+
async def invalidate_async(self, key: str):
|
|
429
|
+
"""使两层缓存都失效(异步)"""
|
|
430
|
+
self.l1.invalidate(key)
|
|
431
|
+
await self.l2.invalidate_async(key)
|
|
432
|
+
logger.debug(f"Cache invalidated (L1 + L2, async): {key}")
|
|
433
|
+
|
|
434
|
+
async def clear_async(self):
|
|
435
|
+
"""异步清空两层缓存"""
|
|
436
|
+
self.l1.clear()
|
|
437
|
+
if hasattr(self.l2, 'clear_async'):
|
|
438
|
+
await self.l2.clear_async()
|
|
439
|
+
else:
|
|
440
|
+
self.l2.clear()
|
|
441
|
+
logger.info("Dual-layer cache cleared (async)")
|
|
442
|
+
|
|
443
|
+
|
|
444
|
+
class RedisCacheProvider(ICacheProvider):
|
|
445
|
+
"""
|
|
446
|
+
基于全局 Redis 的缓存提供者
|
|
447
|
+
|
|
448
|
+
使用全局 RedisClient 单例,避免重复创建连接池。
|
|
449
|
+
适用于需要持久化缓存或分布式缓存共享的场景。
|
|
450
|
+
|
|
451
|
+
Features:
|
|
452
|
+
- 使用全局 Redis 单例
|
|
453
|
+
- 持久化缓存
|
|
454
|
+
- 分布式共享
|
|
455
|
+
- 支持 TTL
|
|
456
|
+
|
|
457
|
+
Example:
|
|
458
|
+
from aiecs.utils.cache_provider import RedisCacheProvider
|
|
459
|
+
|
|
460
|
+
# 使用全局 Redis 客户端
|
|
461
|
+
cache = await RedisCacheProvider.create(
|
|
462
|
+
prefix="my_app:",
|
|
463
|
+
default_ttl=3600
|
|
464
|
+
)
|
|
465
|
+
|
|
466
|
+
# 使用缓存
|
|
467
|
+
await cache.set_async("key1", "value1", ttl=300)
|
|
468
|
+
value = await cache.get_async("key1")
|
|
469
|
+
|
|
470
|
+
Note:
|
|
471
|
+
- 需要先调用 initialize_redis_client() 初始化全局 Redis
|
|
472
|
+
- 提供同步接口(使用内存回退)和异步接口(使用 Redis)
|
|
473
|
+
"""
|
|
474
|
+
|
|
475
|
+
_instance: Optional['RedisCacheProvider'] = None
|
|
476
|
+
_lock = threading.Lock()
|
|
477
|
+
|
|
478
|
+
def __init__(self, redis_client, prefix: str = "", default_ttl: int = 3600):
|
|
479
|
+
"""
|
|
480
|
+
初始化 Redis 缓存提供者
|
|
481
|
+
|
|
482
|
+
Args:
|
|
483
|
+
redis_client: RedisClient 实例
|
|
484
|
+
prefix: 缓存键前缀
|
|
485
|
+
default_ttl: 默认 TTL(秒)
|
|
486
|
+
"""
|
|
487
|
+
self.redis_client = redis_client
|
|
488
|
+
self.prefix = prefix
|
|
489
|
+
self.default_ttl = default_ttl
|
|
490
|
+
self._sync_cache: Dict[str, Any] = {} # 同步接口的内存回退
|
|
491
|
+
self._hits = 0
|
|
492
|
+
self._misses = 0
|
|
493
|
+
|
|
494
|
+
@classmethod
|
|
495
|
+
async def create(
|
|
496
|
+
cls,
|
|
497
|
+
prefix: str = "",
|
|
498
|
+
default_ttl: int = 3600,
|
|
499
|
+
use_singleton: bool = True
|
|
500
|
+
) -> 'RedisCacheProvider':
|
|
501
|
+
"""
|
|
502
|
+
创建 RedisCacheProvider 实例
|
|
503
|
+
|
|
504
|
+
Args:
|
|
505
|
+
prefix: 缓存键前缀
|
|
506
|
+
default_ttl: 默认 TTL(秒)
|
|
507
|
+
use_singleton: 是否使用单例模式
|
|
508
|
+
|
|
509
|
+
Returns:
|
|
510
|
+
RedisCacheProvider 实例
|
|
511
|
+
|
|
512
|
+
Raises:
|
|
513
|
+
RuntimeError: 如果全局 Redis 客户端未初始化
|
|
514
|
+
"""
|
|
515
|
+
if use_singleton and cls._instance is not None:
|
|
516
|
+
return cls._instance
|
|
517
|
+
|
|
518
|
+
try:
|
|
519
|
+
from aiecs.infrastructure.persistence import get_redis_client
|
|
520
|
+
redis_client = await get_redis_client()
|
|
521
|
+
|
|
522
|
+
instance = cls(redis_client, prefix, default_ttl)
|
|
523
|
+
|
|
524
|
+
if use_singleton:
|
|
525
|
+
with cls._lock:
|
|
526
|
+
cls._instance = instance
|
|
527
|
+
|
|
528
|
+
logger.info(f"RedisCacheProvider created (prefix={prefix}, ttl={default_ttl})")
|
|
529
|
+
return instance
|
|
530
|
+
|
|
531
|
+
except Exception as e:
|
|
532
|
+
logger.error(f"Failed to create RedisCacheProvider: {e}")
|
|
533
|
+
raise
|
|
534
|
+
|
|
535
|
+
def _make_key(self, key: str) -> str:
|
|
536
|
+
"""生成带前缀的缓存键"""
|
|
537
|
+
return f"{self.prefix}{key}"
|
|
538
|
+
|
|
539
|
+
# 同步接口(ICacheProvider 要求)
|
|
540
|
+
def get(self, key: str) -> Optional[Any]:
|
|
541
|
+
"""
|
|
542
|
+
同步获取(使用内存回退)
|
|
543
|
+
|
|
544
|
+
Note: 对于 Redis 操作,建议使用 get_async()
|
|
545
|
+
"""
|
|
546
|
+
result = self._sync_cache.get(key)
|
|
547
|
+
if result is not None:
|
|
548
|
+
self._hits += 1
|
|
549
|
+
else:
|
|
550
|
+
self._misses += 1
|
|
551
|
+
return result
|
|
552
|
+
|
|
553
|
+
def set(self, key: str, value: Any, ttl: Optional[int] = None):
|
|
554
|
+
"""
|
|
555
|
+
同步设置(使用内存回退)
|
|
556
|
+
|
|
557
|
+
Note: 对于 Redis 操作,建议使用 set_async()
|
|
558
|
+
"""
|
|
559
|
+
self._sync_cache[key] = value
|
|
560
|
+
|
|
561
|
+
def invalidate(self, key: str):
|
|
562
|
+
"""同步失效(使用内存回退)"""
|
|
563
|
+
if key in self._sync_cache:
|
|
564
|
+
del self._sync_cache[key]
|
|
565
|
+
|
|
566
|
+
# 异步接口(推荐使用)
|
|
567
|
+
async def get_async(self, key: str) -> Optional[Any]:
|
|
568
|
+
"""
|
|
569
|
+
异步获取缓存值
|
|
570
|
+
|
|
571
|
+
Args:
|
|
572
|
+
key: 缓存键
|
|
573
|
+
|
|
574
|
+
Returns:
|
|
575
|
+
缓存的值,如果不存在或已过期则返回 None
|
|
576
|
+
"""
|
|
577
|
+
try:
|
|
578
|
+
redis = await self.redis_client.get_client()
|
|
579
|
+
full_key = self._make_key(key)
|
|
580
|
+
|
|
581
|
+
value = await redis.get(full_key)
|
|
582
|
+
if value is not None:
|
|
583
|
+
self._hits += 1
|
|
584
|
+
logger.debug(f"Redis cache hit: {key}")
|
|
585
|
+
# 尝试反序列化 JSON
|
|
586
|
+
try:
|
|
587
|
+
import json
|
|
588
|
+
return json.loads(value)
|
|
589
|
+
except (json.JSONDecodeError, TypeError):
|
|
590
|
+
return value
|
|
591
|
+
else:
|
|
592
|
+
self._misses += 1
|
|
593
|
+
logger.debug(f"Redis cache miss: {key}")
|
|
594
|
+
return None
|
|
595
|
+
|
|
596
|
+
except Exception as e:
|
|
597
|
+
logger.warning(f"Redis get error for key {key}: {e}")
|
|
598
|
+
self._misses += 1
|
|
599
|
+
return None
|
|
600
|
+
|
|
601
|
+
async def set_async(self, key: str, value: Any, ttl: Optional[int] = None):
|
|
602
|
+
"""
|
|
603
|
+
异步设置缓存值
|
|
604
|
+
|
|
605
|
+
Args:
|
|
606
|
+
key: 缓存键
|
|
607
|
+
value: 要缓存的值
|
|
608
|
+
ttl: 过期时间(秒),None 表示使用默认 TTL
|
|
609
|
+
"""
|
|
610
|
+
try:
|
|
611
|
+
redis = await self.redis_client.get_client()
|
|
612
|
+
full_key = self._make_key(key)
|
|
613
|
+
ttl = ttl if ttl is not None else self.default_ttl
|
|
614
|
+
|
|
615
|
+
# 序列化为 JSON
|
|
616
|
+
import json
|
|
617
|
+
try:
|
|
618
|
+
serialized_value = json.dumps(value)
|
|
619
|
+
except (TypeError, ValueError):
|
|
620
|
+
serialized_value = str(value)
|
|
621
|
+
|
|
622
|
+
if ttl > 0:
|
|
623
|
+
await redis.setex(full_key, ttl, serialized_value)
|
|
624
|
+
else:
|
|
625
|
+
await redis.set(full_key, serialized_value)
|
|
626
|
+
|
|
627
|
+
logger.debug(f"Redis cache set: {key} (ttl={ttl})")
|
|
628
|
+
|
|
629
|
+
except Exception as e:
|
|
630
|
+
logger.warning(f"Redis set error for key {key}: {e}")
|
|
631
|
+
|
|
632
|
+
async def invalidate_async(self, key: str):
|
|
633
|
+
"""
|
|
634
|
+
异步使缓存失效
|
|
635
|
+
|
|
636
|
+
Args:
|
|
637
|
+
key: 缓存键
|
|
638
|
+
"""
|
|
639
|
+
try:
|
|
640
|
+
redis = await self.redis_client.get_client()
|
|
641
|
+
full_key = self._make_key(key)
|
|
642
|
+
await redis.delete(full_key)
|
|
643
|
+
logger.debug(f"Redis cache invalidated: {key}")
|
|
644
|
+
|
|
645
|
+
except Exception as e:
|
|
646
|
+
logger.warning(f"Redis invalidate error for key {key}: {e}")
|
|
647
|
+
|
|
648
|
+
def get_stats(self) -> Dict[str, Any]:
|
|
649
|
+
"""获取缓存统计"""
|
|
650
|
+
total_requests = self._hits + self._misses
|
|
651
|
+
hit_rate = self._hits / total_requests if total_requests > 0 else 0.0
|
|
652
|
+
|
|
653
|
+
return {
|
|
654
|
+
'type': 'redis',
|
|
655
|
+
'backend': 'redis',
|
|
656
|
+
'prefix': self.prefix,
|
|
657
|
+
'default_ttl': self.default_ttl,
|
|
658
|
+
'hits': self._hits,
|
|
659
|
+
'misses': self._misses,
|
|
660
|
+
'hit_rate': hit_rate,
|
|
661
|
+
'sync_cache_size': len(self._sync_cache)
|
|
662
|
+
}
|
|
663
|
+
|
|
664
|
+
def clear(self):
|
|
665
|
+
"""清空同步缓存(Redis 缓存需要使用 clear_async)"""
|
|
666
|
+
self._sync_cache.clear()
|
|
667
|
+
|
|
668
|
+
async def clear_async(self, pattern: Optional[str] = None):
|
|
669
|
+
"""
|
|
670
|
+
异步清空缓存
|
|
671
|
+
|
|
672
|
+
Args:
|
|
673
|
+
pattern: 键模式,None 表示清空所有带前缀的键
|
|
674
|
+
"""
|
|
675
|
+
try:
|
|
676
|
+
redis = await self.redis_client.get_client()
|
|
677
|
+
pattern = pattern or f"{self.prefix}*"
|
|
678
|
+
|
|
679
|
+
keys_to_delete = []
|
|
680
|
+
async for key in redis.scan_iter(match=pattern):
|
|
681
|
+
keys_to_delete.append(key)
|
|
682
|
+
|
|
683
|
+
if keys_to_delete:
|
|
684
|
+
await redis.delete(*keys_to_delete)
|
|
685
|
+
logger.info(f"Redis cache cleared: {len(keys_to_delete)} keys deleted")
|
|
686
|
+
|
|
687
|
+
except Exception as e:
|
|
688
|
+
logger.warning(f"Redis clear error: {e}")
|
|
689
|
+
|
|
690
|
+
|
|
691
|
+
# 导出接口和实现
|
|
692
|
+
__all__ = [
|
|
693
|
+
'ICacheProvider',
|
|
694
|
+
'LRUCacheProvider',
|
|
695
|
+
'DualLayerCacheProvider',
|
|
696
|
+
'RedisCacheProvider'
|
|
697
|
+
]
|
|
698
|
+
|