zen-ai-pentest 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agents/__init__.py +28 -0
- agents/agent_base.py +239 -0
- agents/agent_orchestrator.py +346 -0
- agents/analysis_agent.py +225 -0
- agents/cli.py +258 -0
- agents/exploit_agent.py +224 -0
- agents/integration.py +211 -0
- agents/post_scan_agent.py +937 -0
- agents/react_agent.py +384 -0
- agents/react_agent_enhanced.py +616 -0
- agents/react_agent_vm.py +298 -0
- agents/research_agent.py +176 -0
- api/__init__.py +11 -0
- api/auth.py +123 -0
- api/main.py +1027 -0
- api/schemas.py +357 -0
- api/websocket.py +97 -0
- autonomous/__init__.py +122 -0
- autonomous/agent.py +253 -0
- autonomous/agent_loop.py +1370 -0
- autonomous/exploit_validator.py +1537 -0
- autonomous/memory.py +448 -0
- autonomous/react.py +339 -0
- autonomous/tool_executor.py +488 -0
- backends/__init__.py +16 -0
- backends/chatgpt_direct.py +133 -0
- backends/claude_direct.py +130 -0
- backends/duckduckgo.py +138 -0
- backends/openrouter.py +120 -0
- benchmarks/__init__.py +149 -0
- benchmarks/benchmark_engine.py +904 -0
- benchmarks/ci_benchmark.py +785 -0
- benchmarks/comparison.py +729 -0
- benchmarks/metrics.py +553 -0
- benchmarks/run_benchmarks.py +809 -0
- ci_cd/__init__.py +2 -0
- core/__init__.py +17 -0
- core/async_pool.py +282 -0
- core/asyncio_fix.py +222 -0
- core/cache.py +472 -0
- core/container.py +277 -0
- core/database.py +114 -0
- core/input_validator.py +353 -0
- core/models.py +288 -0
- core/orchestrator.py +611 -0
- core/plugin_manager.py +571 -0
- core/rate_limiter.py +405 -0
- core/secure_config.py +328 -0
- core/shield_integration.py +296 -0
- modules/__init__.py +46 -0
- modules/cve_database.py +362 -0
- modules/exploit_assist.py +330 -0
- modules/nuclei_integration.py +480 -0
- modules/osint.py +604 -0
- modules/protonvpn.py +554 -0
- modules/recon.py +165 -0
- modules/sql_injection_db.py +826 -0
- modules/tool_orchestrator.py +498 -0
- modules/vuln_scanner.py +292 -0
- modules/wordlist_generator.py +566 -0
- risk_engine/__init__.py +99 -0
- risk_engine/business_impact.py +267 -0
- risk_engine/business_impact_calculator.py +563 -0
- risk_engine/cvss.py +156 -0
- risk_engine/epss.py +190 -0
- risk_engine/example_usage.py +294 -0
- risk_engine/false_positive_engine.py +1073 -0
- risk_engine/scorer.py +304 -0
- web_ui/backend/main.py +471 -0
- zen_ai_pentest-2.0.0.dist-info/METADATA +795 -0
- zen_ai_pentest-2.0.0.dist-info/RECORD +75 -0
- zen_ai_pentest-2.0.0.dist-info/WHEEL +5 -0
- zen_ai_pentest-2.0.0.dist-info/entry_points.txt +2 -0
- zen_ai_pentest-2.0.0.dist-info/licenses/LICENSE +21 -0
- zen_ai_pentest-2.0.0.dist-info/top_level.txt +10 -0
core/cache.py
ADDED
|
@@ -0,0 +1,472 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Caching Strategy - Redis/Memory/SQLite backends
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
import hashlib
|
|
7
|
+
import json
|
|
8
|
+
import logging
|
|
9
|
+
import pickle
|
|
10
|
+
from datetime import datetime, timedelta
|
|
11
|
+
from functools import wraps
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Any, Callable, Optional, Union
|
|
14
|
+
|
|
15
|
+
try:
|
|
16
|
+
import redis.asyncio as redis
|
|
17
|
+
|
|
18
|
+
REDIS_AVAILABLE = True
|
|
19
|
+
except ImportError:
|
|
20
|
+
REDIS_AVAILABLE = False
|
|
21
|
+
|
|
22
|
+
import aiosqlite
|
|
23
|
+
|
|
24
|
+
logger = logging.getLogger(__name__)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class CacheBackend:
|
|
28
|
+
"""Abstract cache backend interface"""
|
|
29
|
+
|
|
30
|
+
async def get(self, key: str) -> Optional[Any]:
|
|
31
|
+
raise NotImplementedError()
|
|
32
|
+
|
|
33
|
+
async def set(self, key: str, value: Any, ttl: Optional[int] = None) -> bool:
|
|
34
|
+
raise NotImplementedError()
|
|
35
|
+
|
|
36
|
+
async def delete(self, key: str) -> bool:
|
|
37
|
+
raise NotImplementedError()
|
|
38
|
+
|
|
39
|
+
async def exists(self, key: str) -> bool:
|
|
40
|
+
raise NotImplementedError()
|
|
41
|
+
|
|
42
|
+
async def clear(self) -> bool:
|
|
43
|
+
raise NotImplementedError()
|
|
44
|
+
|
|
45
|
+
async def close(self):
|
|
46
|
+
pass
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class MemoryCache(CacheBackend):
|
|
50
|
+
"""In-memory cache with TTL support"""
|
|
51
|
+
|
|
52
|
+
def __init__(self, max_size: int = 1000):
|
|
53
|
+
self._cache: dict = {}
|
|
54
|
+
self._expiry: dict = {}
|
|
55
|
+
self._max_size = max_size
|
|
56
|
+
self._lock = asyncio.Lock()
|
|
57
|
+
|
|
58
|
+
async def get(self, key: str) -> Optional[Any]:
|
|
59
|
+
async with self._lock:
|
|
60
|
+
# Check expiry
|
|
61
|
+
if key in self._expiry:
|
|
62
|
+
if datetime.utcnow() > self._expiry[key]:
|
|
63
|
+
del self._cache[key]
|
|
64
|
+
del self._expiry[key]
|
|
65
|
+
return None
|
|
66
|
+
|
|
67
|
+
return self._cache.get(key)
|
|
68
|
+
|
|
69
|
+
async def set(self, key: str, value: Any, ttl: Optional[int] = None) -> bool:
|
|
70
|
+
async with self._lock:
|
|
71
|
+
# Evict oldest if at capacity
|
|
72
|
+
if len(self._cache) >= self._max_size and key not in self._cache:
|
|
73
|
+
self._evict_oldest()
|
|
74
|
+
|
|
75
|
+
self._cache[key] = value
|
|
76
|
+
|
|
77
|
+
if ttl:
|
|
78
|
+
self._expiry[key] = datetime.utcnow() + timedelta(seconds=ttl)
|
|
79
|
+
|
|
80
|
+
return True
|
|
81
|
+
|
|
82
|
+
async def delete(self, key: str) -> bool:
|
|
83
|
+
async with self._lock:
|
|
84
|
+
self._cache.pop(key, None)
|
|
85
|
+
self._expiry.pop(key, None)
|
|
86
|
+
return True
|
|
87
|
+
|
|
88
|
+
async def exists(self, key: str) -> bool:
|
|
89
|
+
return await self.get(key) is not None
|
|
90
|
+
|
|
91
|
+
async def clear(self) -> bool:
|
|
92
|
+
async with self._lock:
|
|
93
|
+
self._cache.clear()
|
|
94
|
+
self._expiry.clear()
|
|
95
|
+
return True
|
|
96
|
+
|
|
97
|
+
def _evict_oldest(self):
|
|
98
|
+
"""Remove oldest entries"""
|
|
99
|
+
if not self._cache:
|
|
100
|
+
return
|
|
101
|
+
|
|
102
|
+
# Remove expired entries first
|
|
103
|
+
now = datetime.utcnow()
|
|
104
|
+
expired = [k for k, exp in self._expiry.items() if exp and now > exp]
|
|
105
|
+
for k in expired:
|
|
106
|
+
del self._cache[k]
|
|
107
|
+
del self._expiry[k]
|
|
108
|
+
|
|
109
|
+
# If still at capacity, remove oldest
|
|
110
|
+
if len(self._cache) >= self._max_size:
|
|
111
|
+
oldest = min(self._expiry.keys(), key=lambda k: self._expiry[k])
|
|
112
|
+
del self._cache[oldest]
|
|
113
|
+
del self._expiry[oldest]
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
class SQLiteCache(CacheBackend):
|
|
117
|
+
"""SQLite-based persistent cache"""
|
|
118
|
+
|
|
119
|
+
def __init__(self, db_path: Path = None):
|
|
120
|
+
self.db_path = db_path or Path.home() / ".cache" / "zen-ai-pentest" / "cache.db"
|
|
121
|
+
self.db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
122
|
+
self._db: Optional[aiosqlite.Connection] = None
|
|
123
|
+
self._lock = asyncio.Lock()
|
|
124
|
+
|
|
125
|
+
async def _get_db(self) -> aiosqlite.Connection:
|
|
126
|
+
if self._db is None:
|
|
127
|
+
self._db = await aiosqlite.connect(self.db_path)
|
|
128
|
+
await self._db.execute("""
|
|
129
|
+
CREATE TABLE IF NOT EXISTS cache (
|
|
130
|
+
key TEXT PRIMARY KEY,
|
|
131
|
+
value BLOB,
|
|
132
|
+
expires TIMESTAMP
|
|
133
|
+
)
|
|
134
|
+
""")
|
|
135
|
+
await self._db.execute("""
|
|
136
|
+
CREATE INDEX IF NOT EXISTS idx_expires ON cache(expires)
|
|
137
|
+
""")
|
|
138
|
+
await self._db.commit()
|
|
139
|
+
return self._db
|
|
140
|
+
|
|
141
|
+
async def get(self, key: str) -> Optional[Any]:
|
|
142
|
+
async with self._lock:
|
|
143
|
+
db = await self._get_db()
|
|
144
|
+
|
|
145
|
+
cursor = await db.execute(
|
|
146
|
+
"SELECT value, expires FROM cache WHERE key = ?", (key,)
|
|
147
|
+
)
|
|
148
|
+
row = await cursor.fetchone()
|
|
149
|
+
|
|
150
|
+
if row is None:
|
|
151
|
+
return None
|
|
152
|
+
|
|
153
|
+
value, expires = row
|
|
154
|
+
|
|
155
|
+
# Check expiry
|
|
156
|
+
if expires and datetime.utcnow() > datetime.fromisoformat(expires):
|
|
157
|
+
await self.delete(key)
|
|
158
|
+
return None
|
|
159
|
+
|
|
160
|
+
return pickle.loads(value)
|
|
161
|
+
|
|
162
|
+
async def set(self, key: str, value: Any, ttl: Optional[int] = None) -> bool:
|
|
163
|
+
async with self._lock:
|
|
164
|
+
db = await self._get_db()
|
|
165
|
+
|
|
166
|
+
expires = None
|
|
167
|
+
if ttl:
|
|
168
|
+
expires = (datetime.utcnow() + timedelta(seconds=ttl)).isoformat()
|
|
169
|
+
|
|
170
|
+
serialized = pickle.dumps(value)
|
|
171
|
+
|
|
172
|
+
await db.execute(
|
|
173
|
+
"""INSERT OR REPLACE INTO cache (key, value, expires)
|
|
174
|
+
VALUES (?, ?, ?)""",
|
|
175
|
+
(key, serialized, expires),
|
|
176
|
+
)
|
|
177
|
+
await db.commit()
|
|
178
|
+
return True
|
|
179
|
+
|
|
180
|
+
async def delete(self, key: str) -> bool:
|
|
181
|
+
async with self._lock:
|
|
182
|
+
db = await self._get_db()
|
|
183
|
+
await db.execute("DELETE FROM cache WHERE key = ?", (key,))
|
|
184
|
+
await db.commit()
|
|
185
|
+
return True
|
|
186
|
+
|
|
187
|
+
async def exists(self, key: str) -> bool:
|
|
188
|
+
return await self.get(key) is not None
|
|
189
|
+
|
|
190
|
+
async def clear(self) -> bool:
|
|
191
|
+
async with self._lock:
|
|
192
|
+
db = await self._get_db()
|
|
193
|
+
await db.execute("DELETE FROM cache")
|
|
194
|
+
await db.commit()
|
|
195
|
+
return True
|
|
196
|
+
|
|
197
|
+
async def cleanup_expired(self):
|
|
198
|
+
"""Remove expired entries"""
|
|
199
|
+
async with self._lock:
|
|
200
|
+
db = await self._get_db()
|
|
201
|
+
await db.execute(
|
|
202
|
+
"DELETE FROM cache WHERE expires < ?", (datetime.utcnow().isoformat(),)
|
|
203
|
+
)
|
|
204
|
+
await db.commit()
|
|
205
|
+
|
|
206
|
+
async def close(self):
|
|
207
|
+
if self._db:
|
|
208
|
+
await self._db.close()
|
|
209
|
+
self._db = None
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
class RedisCache(CacheBackend):
|
|
213
|
+
"""Redis cache backend"""
|
|
214
|
+
|
|
215
|
+
def __init__(
|
|
216
|
+
self,
|
|
217
|
+
host: str = "localhost",
|
|
218
|
+
port: int = 6379,
|
|
219
|
+
db: int = 0,
|
|
220
|
+
password: Optional[str] = None,
|
|
221
|
+
):
|
|
222
|
+
if not REDIS_AVAILABLE:
|
|
223
|
+
raise ImportError("redis not installed: pip install redis")
|
|
224
|
+
|
|
225
|
+
self.host = host
|
|
226
|
+
self.port = port
|
|
227
|
+
self.db = db
|
|
228
|
+
self.password = password
|
|
229
|
+
self._client: Optional[redis.Redis] = None
|
|
230
|
+
|
|
231
|
+
async def _get_client(self) -> redis.Redis:
|
|
232
|
+
if self._client is None:
|
|
233
|
+
self._client = redis.Redis(
|
|
234
|
+
host=self.host,
|
|
235
|
+
port=self.port,
|
|
236
|
+
db=self.db,
|
|
237
|
+
password=self.password,
|
|
238
|
+
decode_responses=False,
|
|
239
|
+
)
|
|
240
|
+
return self._client
|
|
241
|
+
|
|
242
|
+
async def get(self, key: str) -> Optional[Any]:
|
|
243
|
+
try:
|
|
244
|
+
client = await self._get_client()
|
|
245
|
+
value = await client.get(key)
|
|
246
|
+
if value:
|
|
247
|
+
return pickle.loads(value)
|
|
248
|
+
return None
|
|
249
|
+
except Exception as e:
|
|
250
|
+
logger.error(f"Redis get error: {e}")
|
|
251
|
+
return None
|
|
252
|
+
|
|
253
|
+
async def set(self, key: str, value: Any, ttl: Optional[int] = None) -> bool:
|
|
254
|
+
try:
|
|
255
|
+
client = await self._get_client()
|
|
256
|
+
serialized = pickle.dumps(value)
|
|
257
|
+
await client.set(key, serialized, ex=ttl)
|
|
258
|
+
return True
|
|
259
|
+
except Exception as e:
|
|
260
|
+
logger.error(f"Redis set error: {e}")
|
|
261
|
+
return False
|
|
262
|
+
|
|
263
|
+
async def delete(self, key: str) -> bool:
|
|
264
|
+
try:
|
|
265
|
+
client = await self._get_client()
|
|
266
|
+
await client.delete(key)
|
|
267
|
+
return True
|
|
268
|
+
except Exception as e:
|
|
269
|
+
logger.error(f"Redis delete error: {e}")
|
|
270
|
+
return False
|
|
271
|
+
|
|
272
|
+
async def exists(self, key: str) -> bool:
|
|
273
|
+
try:
|
|
274
|
+
client = await self._get_client()
|
|
275
|
+
return await client.exists(key) > 0
|
|
276
|
+
except Exception as e:
|
|
277
|
+
logger.error(f"Redis exists error: {e}")
|
|
278
|
+
return False
|
|
279
|
+
|
|
280
|
+
async def clear(self) -> bool:
|
|
281
|
+
try:
|
|
282
|
+
client = await self._get_client()
|
|
283
|
+
await client.flushdb()
|
|
284
|
+
return True
|
|
285
|
+
except Exception as e:
|
|
286
|
+
logger.error(f"Redis clear error: {e}")
|
|
287
|
+
return False
|
|
288
|
+
|
|
289
|
+
async def close(self):
|
|
290
|
+
if self._client:
|
|
291
|
+
await self._client.close()
|
|
292
|
+
self._client = None
|
|
293
|
+
|
|
294
|
+
|
|
295
|
+
class MultiTierCache:
|
|
296
|
+
"""
|
|
297
|
+
Multi-tier caching (L1: Memory, L2: SQLite, L3: Redis)
|
|
298
|
+
"""
|
|
299
|
+
|
|
300
|
+
def __init__(
|
|
301
|
+
self,
|
|
302
|
+
memory_size: int = 100,
|
|
303
|
+
sqlite_path: Optional[Path] = None,
|
|
304
|
+
redis_config: Optional[dict] = None,
|
|
305
|
+
):
|
|
306
|
+
self.l1 = MemoryCache(max_size=memory_size)
|
|
307
|
+
self.l2 = SQLiteCache(sqlite_path) if sqlite_path else None
|
|
308
|
+
self.l3 = None
|
|
309
|
+
|
|
310
|
+
if redis_config and REDIS_AVAILABLE:
|
|
311
|
+
try:
|
|
312
|
+
self.l3 = RedisCache(**redis_config)
|
|
313
|
+
except Exception as e:
|
|
314
|
+
logger.warning(f"Redis cache unavailable: {e}")
|
|
315
|
+
|
|
316
|
+
async def get(self, key: str) -> Optional[Any]:
|
|
317
|
+
"""Get from cache (L1 -> L2 -> L3)"""
|
|
318
|
+
# Try L1
|
|
319
|
+
value = await self.l1.get(key)
|
|
320
|
+
if value is not None:
|
|
321
|
+
return value
|
|
322
|
+
|
|
323
|
+
# Try L2
|
|
324
|
+
if self.l2:
|
|
325
|
+
value = await self.l2.get(key)
|
|
326
|
+
if value is not None:
|
|
327
|
+
# Promote to L1
|
|
328
|
+
await self.l1.set(key, value)
|
|
329
|
+
return value
|
|
330
|
+
|
|
331
|
+
# Try L3
|
|
332
|
+
if self.l3:
|
|
333
|
+
value = await self.l3.get(key)
|
|
334
|
+
if value is not None:
|
|
335
|
+
# Promote to L1/L2
|
|
336
|
+
await self.l1.set(key, value)
|
|
337
|
+
if self.l2:
|
|
338
|
+
await self.l2.set(key, value)
|
|
339
|
+
return value
|
|
340
|
+
|
|
341
|
+
return None
|
|
342
|
+
|
|
343
|
+
async def set(
|
|
344
|
+
self,
|
|
345
|
+
key: str,
|
|
346
|
+
value: Any,
|
|
347
|
+
ttl: Optional[int] = None,
|
|
348
|
+
tiers: str = "all", # "all", "memory", "persistent"
|
|
349
|
+
) -> bool:
|
|
350
|
+
"""Set in cache tiers"""
|
|
351
|
+
success = True
|
|
352
|
+
|
|
353
|
+
if tiers in ("all", "memory"):
|
|
354
|
+
success = await self.l1.set(key, value, ttl) and success
|
|
355
|
+
|
|
356
|
+
if tiers in ("all", "persistent") and self.l2:
|
|
357
|
+
success = await self.l2.set(key, value, ttl) and success
|
|
358
|
+
|
|
359
|
+
if tiers == "all" and self.l3:
|
|
360
|
+
success = await self.l3.set(key, value, ttl) and success
|
|
361
|
+
|
|
362
|
+
return success
|
|
363
|
+
|
|
364
|
+
async def delete(self, key: str) -> bool:
|
|
365
|
+
"""Delete from all tiers"""
|
|
366
|
+
await self.l1.delete(key)
|
|
367
|
+
if self.l2:
|
|
368
|
+
await self.l2.delete(key)
|
|
369
|
+
if self.l3:
|
|
370
|
+
await self.l3.delete(key)
|
|
371
|
+
return True
|
|
372
|
+
|
|
373
|
+
async def close(self):
|
|
374
|
+
if self.l2:
|
|
375
|
+
await self.l2.close()
|
|
376
|
+
if self.l3:
|
|
377
|
+
await self.l3.close()
|
|
378
|
+
|
|
379
|
+
|
|
380
|
+
def generate_cache_key(*args, **kwargs) -> str:
|
|
381
|
+
"""Generate cache key from function arguments"""
|
|
382
|
+
key_data = json.dumps({"args": args, "kwargs": kwargs}, sort_keys=True)
|
|
383
|
+
return hashlib.md5(key_data.encode()).hexdigest()
|
|
384
|
+
|
|
385
|
+
|
|
386
|
+
def cached(
|
|
387
|
+
backend: Union[CacheBackend, str] = "memory",
|
|
388
|
+
ttl: int = 3600,
|
|
389
|
+
key_func: Optional[Callable] = None,
|
|
390
|
+
):
|
|
391
|
+
"""
|
|
392
|
+
Decorator for caching function results
|
|
393
|
+
|
|
394
|
+
Args:
|
|
395
|
+
backend: Cache backend or "memory"/"sqlite"/"redis"
|
|
396
|
+
ttl: Time to live in seconds
|
|
397
|
+
key_func: Custom key generation function
|
|
398
|
+
"""
|
|
399
|
+
|
|
400
|
+
def decorator(func: Callable) -> Callable:
|
|
401
|
+
@wraps(func)
|
|
402
|
+
async def async_wrapper(*args, **kwargs):
|
|
403
|
+
cache = _get_cache_backend(backend)
|
|
404
|
+
|
|
405
|
+
if key_func:
|
|
406
|
+
key = key_func(*args, **kwargs)
|
|
407
|
+
else:
|
|
408
|
+
key = f"{func.__module__}.{func.__name__}:{generate_cache_key(*args, **kwargs)}"
|
|
409
|
+
|
|
410
|
+
# Try cache
|
|
411
|
+
cached_value = await cache.get(key)
|
|
412
|
+
if cached_value is not None:
|
|
413
|
+
logger.debug(f"Cache hit: {key}")
|
|
414
|
+
return cached_value
|
|
415
|
+
|
|
416
|
+
# Execute and cache
|
|
417
|
+
result = await func(*args, **kwargs)
|
|
418
|
+
await cache.set(key, result, ttl)
|
|
419
|
+
return result
|
|
420
|
+
|
|
421
|
+
@wraps(func)
|
|
422
|
+
def sync_wrapper(*args, **kwargs):
|
|
423
|
+
# For sync functions, run in async context
|
|
424
|
+
return asyncio.run(async_wrapper(*args, **kwargs))
|
|
425
|
+
|
|
426
|
+
return async_wrapper if asyncio.iscoroutinefunction(func) else sync_wrapper
|
|
427
|
+
|
|
428
|
+
return decorator
|
|
429
|
+
|
|
430
|
+
|
|
431
|
+
# Global cache instances
|
|
432
|
+
_memory_cache = None
|
|
433
|
+
_sqlite_cache = None
|
|
434
|
+
_redis_cache = None
|
|
435
|
+
|
|
436
|
+
|
|
437
|
+
def _get_cache_backend(backend: Union[CacheBackend, str]) -> CacheBackend:
|
|
438
|
+
"""Get or create cache backend"""
|
|
439
|
+
global _memory_cache, _sqlite_cache, _redis_cache
|
|
440
|
+
|
|
441
|
+
if isinstance(backend, CacheBackend):
|
|
442
|
+
return backend
|
|
443
|
+
|
|
444
|
+
if backend == "memory":
|
|
445
|
+
if _memory_cache is None:
|
|
446
|
+
_memory_cache = MemoryCache()
|
|
447
|
+
return _memory_cache
|
|
448
|
+
|
|
449
|
+
elif backend == "sqlite":
|
|
450
|
+
if _sqlite_cache is None:
|
|
451
|
+
_sqlite_cache = SQLiteCache()
|
|
452
|
+
return _sqlite_cache
|
|
453
|
+
|
|
454
|
+
elif backend == "redis":
|
|
455
|
+
if _redis_cache is None:
|
|
456
|
+
_redis_cache = RedisCache()
|
|
457
|
+
return _redis_cache
|
|
458
|
+
|
|
459
|
+
raise ValueError(f"Unknown cache backend: {backend}")
|
|
460
|
+
|
|
461
|
+
|
|
462
|
+
# Convenience function for CVE caching
|
|
463
|
+
async def get_cached_cve(cve_id: str) -> Optional[dict]:
|
|
464
|
+
"""Get CVE from cache"""
|
|
465
|
+
cache = _get_cache_backend("sqlite")
|
|
466
|
+
return await cache.get(f"cve:{cve_id.upper()}")
|
|
467
|
+
|
|
468
|
+
|
|
469
|
+
async def cache_cve(cve_id: str, data: dict, ttl: int = 86400 * 7):
|
|
470
|
+
"""Cache CVE data for 7 days"""
|
|
471
|
+
cache = _get_cache_backend("sqlite")
|
|
472
|
+
await cache.set(f"cve:{cve_id.upper()}", data, ttl)
|