kailash 0.6.2__py3-none-any.whl → 0.6.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +3 -3
- kailash/api/custom_nodes_secure.py +3 -3
- kailash/api/gateway.py +1 -1
- kailash/api/studio.py +2 -3
- kailash/api/workflow_api.py +3 -4
- kailash/core/resilience/bulkhead.py +460 -0
- kailash/core/resilience/circuit_breaker.py +92 -10
- kailash/edge/discovery.py +86 -0
- kailash/mcp_server/__init__.py +334 -0
- kailash/mcp_server/advanced_features.py +1022 -0
- kailash/{mcp → mcp_server}/ai_registry_server.py +29 -4
- kailash/mcp_server/auth.py +789 -0
- kailash/mcp_server/client.py +712 -0
- kailash/mcp_server/discovery.py +1593 -0
- kailash/mcp_server/errors.py +673 -0
- kailash/mcp_server/oauth.py +1727 -0
- kailash/mcp_server/protocol.py +1126 -0
- kailash/mcp_server/registry_integration.py +587 -0
- kailash/mcp_server/server.py +1747 -0
- kailash/{mcp → mcp_server}/servers/ai_registry.py +2 -2
- kailash/mcp_server/transports.py +1169 -0
- kailash/mcp_server/utils/cache.py +510 -0
- kailash/middleware/auth/auth_manager.py +3 -3
- kailash/middleware/communication/api_gateway.py +2 -9
- kailash/middleware/communication/realtime.py +1 -1
- kailash/middleware/mcp/client_integration.py +1 -1
- kailash/middleware/mcp/enhanced_server.py +2 -2
- kailash/nodes/__init__.py +2 -0
- kailash/nodes/admin/audit_log.py +6 -6
- kailash/nodes/admin/permission_check.py +8 -8
- kailash/nodes/admin/role_management.py +32 -28
- kailash/nodes/admin/schema.sql +6 -1
- kailash/nodes/admin/schema_manager.py +13 -13
- kailash/nodes/admin/security_event.py +16 -20
- kailash/nodes/admin/tenant_isolation.py +3 -3
- kailash/nodes/admin/transaction_utils.py +3 -3
- kailash/nodes/admin/user_management.py +21 -22
- kailash/nodes/ai/a2a.py +11 -11
- kailash/nodes/ai/ai_providers.py +9 -12
- kailash/nodes/ai/embedding_generator.py +13 -14
- kailash/nodes/ai/intelligent_agent_orchestrator.py +19 -19
- kailash/nodes/ai/iterative_llm_agent.py +3 -3
- kailash/nodes/ai/llm_agent.py +213 -36
- kailash/nodes/ai/self_organizing.py +2 -2
- kailash/nodes/alerts/discord.py +4 -4
- kailash/nodes/api/graphql.py +6 -6
- kailash/nodes/api/http.py +12 -17
- kailash/nodes/api/rate_limiting.py +4 -4
- kailash/nodes/api/rest.py +15 -15
- kailash/nodes/auth/mfa.py +3 -4
- kailash/nodes/auth/risk_assessment.py +2 -2
- kailash/nodes/auth/session_management.py +5 -5
- kailash/nodes/auth/sso.py +143 -0
- kailash/nodes/base.py +6 -2
- kailash/nodes/base_async.py +16 -2
- kailash/nodes/base_with_acl.py +2 -2
- kailash/nodes/cache/__init__.py +9 -0
- kailash/nodes/cache/cache.py +1172 -0
- kailash/nodes/cache/cache_invalidation.py +870 -0
- kailash/nodes/cache/redis_pool_manager.py +595 -0
- kailash/nodes/code/async_python.py +2 -1
- kailash/nodes/code/python.py +196 -35
- kailash/nodes/compliance/data_retention.py +6 -6
- kailash/nodes/compliance/gdpr.py +5 -5
- kailash/nodes/data/__init__.py +10 -0
- kailash/nodes/data/optimistic_locking.py +906 -0
- kailash/nodes/data/readers.py +8 -8
- kailash/nodes/data/redis.py +349 -0
- kailash/nodes/data/sql.py +314 -3
- kailash/nodes/data/streaming.py +21 -0
- kailash/nodes/enterprise/__init__.py +8 -0
- kailash/nodes/enterprise/audit_logger.py +285 -0
- kailash/nodes/enterprise/batch_processor.py +22 -3
- kailash/nodes/enterprise/data_lineage.py +1 -1
- kailash/nodes/enterprise/mcp_executor.py +205 -0
- kailash/nodes/enterprise/service_discovery.py +150 -0
- kailash/nodes/enterprise/tenant_assignment.py +108 -0
- kailash/nodes/logic/async_operations.py +2 -2
- kailash/nodes/logic/convergence.py +1 -1
- kailash/nodes/logic/operations.py +1 -1
- kailash/nodes/monitoring/__init__.py +11 -1
- kailash/nodes/monitoring/health_check.py +456 -0
- kailash/nodes/monitoring/log_processor.py +817 -0
- kailash/nodes/monitoring/metrics_collector.py +627 -0
- kailash/nodes/monitoring/performance_benchmark.py +137 -11
- kailash/nodes/rag/advanced.py +7 -7
- kailash/nodes/rag/agentic.py +49 -2
- kailash/nodes/rag/conversational.py +3 -3
- kailash/nodes/rag/evaluation.py +3 -3
- kailash/nodes/rag/federated.py +3 -3
- kailash/nodes/rag/graph.py +3 -3
- kailash/nodes/rag/multimodal.py +3 -3
- kailash/nodes/rag/optimized.py +5 -5
- kailash/nodes/rag/privacy.py +3 -3
- kailash/nodes/rag/query_processing.py +6 -6
- kailash/nodes/rag/realtime.py +1 -1
- kailash/nodes/rag/registry.py +2 -6
- kailash/nodes/rag/router.py +1 -1
- kailash/nodes/rag/similarity.py +7 -7
- kailash/nodes/rag/strategies.py +4 -4
- kailash/nodes/security/abac_evaluator.py +6 -6
- kailash/nodes/security/behavior_analysis.py +5 -6
- kailash/nodes/security/credential_manager.py +1 -1
- kailash/nodes/security/rotating_credentials.py +11 -11
- kailash/nodes/security/threat_detection.py +8 -8
- kailash/nodes/testing/credential_testing.py +2 -2
- kailash/nodes/transform/processors.py +5 -5
- kailash/runtime/local.py +162 -14
- kailash/runtime/parameter_injection.py +425 -0
- kailash/runtime/parameter_injector.py +657 -0
- kailash/runtime/testing.py +2 -2
- kailash/testing/fixtures.py +2 -2
- kailash/workflow/builder.py +99 -18
- kailash/workflow/builder_improvements.py +207 -0
- kailash/workflow/input_handling.py +170 -0
- {kailash-0.6.2.dist-info → kailash-0.6.4.dist-info}/METADATA +21 -8
- {kailash-0.6.2.dist-info → kailash-0.6.4.dist-info}/RECORD +126 -101
- kailash/mcp/__init__.py +0 -53
- kailash/mcp/client.py +0 -445
- kailash/mcp/server.py +0 -292
- kailash/mcp/server_enhanced.py +0 -449
- kailash/mcp/utils/cache.py +0 -267
- /kailash/{mcp → mcp_server}/client_new.py +0 -0
- /kailash/{mcp → mcp_server}/utils/__init__.py +0 -0
- /kailash/{mcp → mcp_server}/utils/config.py +0 -0
- /kailash/{mcp → mcp_server}/utils/formatters.py +0 -0
- /kailash/{mcp → mcp_server}/utils/metrics.py +0 -0
- {kailash-0.6.2.dist-info → kailash-0.6.4.dist-info}/WHEEL +0 -0
- {kailash-0.6.2.dist-info → kailash-0.6.4.dist-info}/entry_points.txt +0 -0
- {kailash-0.6.2.dist-info → kailash-0.6.4.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.6.2.dist-info → kailash-0.6.4.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,510 @@
|
|
1
|
+
"""
|
2
|
+
Caching utilities for MCP servers.
|
3
|
+
|
4
|
+
Provides LRU cache, TTL support, and decorators for method-level caching.
|
5
|
+
Based on patterns from production MCP server implementations.
|
6
|
+
"""
|
7
|
+
|
8
|
+
import asyncio
|
9
|
+
import functools
|
10
|
+
import json
|
11
|
+
import logging
|
12
|
+
import threading
|
13
|
+
import time
|
14
|
+
from typing import Any, Awaitable, Callable, Dict, Optional, Tuple, TypeVar
|
15
|
+
|
16
|
+
logger = logging.getLogger(__name__)
|
17
|
+
|
18
|
+
F = TypeVar("F", bound=Callable[..., Any])
|
19
|
+
|
20
|
+
|
21
|
+
class LRUCache:
|
22
|
+
"""
|
23
|
+
Thread-safe LRU cache with TTL (time-to-live) support.
|
24
|
+
|
25
|
+
Features:
|
26
|
+
- Configurable maximum size
|
27
|
+
- TTL expiration for entries
|
28
|
+
- Thread-safe operations
|
29
|
+
- Performance statistics
|
30
|
+
"""
|
31
|
+
|
32
|
+
def __init__(self, max_size: int = 128, ttl: int = 300):
|
33
|
+
"""
|
34
|
+
Initialize LRU cache.
|
35
|
+
|
36
|
+
Args:
|
37
|
+
max_size: Maximum number of entries to store
|
38
|
+
ttl: Time-to-live in seconds (0 = no expiration)
|
39
|
+
"""
|
40
|
+
self.max_size = max_size
|
41
|
+
self.ttl = ttl
|
42
|
+
self._cache: Dict[str, Tuple[Any, float]] = {}
|
43
|
+
self._access_order: Dict[str, float] = {}
|
44
|
+
self._lock = threading.RLock()
|
45
|
+
|
46
|
+
# Statistics
|
47
|
+
self._hits = 0
|
48
|
+
self._misses = 0
|
49
|
+
self._evictions = 0
|
50
|
+
|
51
|
+
def get(self, key: str) -> Optional[Any]:
|
52
|
+
"""Get value from cache if it exists and hasn't expired."""
|
53
|
+
with self._lock:
|
54
|
+
if key not in self._cache:
|
55
|
+
self._misses += 1
|
56
|
+
return None
|
57
|
+
|
58
|
+
value, timestamp = self._cache[key]
|
59
|
+
|
60
|
+
# Check TTL expiration
|
61
|
+
if self.ttl > 0 and time.time() - timestamp > self.ttl:
|
62
|
+
del self._cache[key]
|
63
|
+
del self._access_order[key]
|
64
|
+
self._misses += 1
|
65
|
+
return None
|
66
|
+
|
67
|
+
# Update access time for LRU
|
68
|
+
self._access_order[key] = time.time()
|
69
|
+
self._hits += 1
|
70
|
+
return value
|
71
|
+
|
72
|
+
def set(self, key: str, value: Any, ttl: Optional[int] = None) -> None:
|
73
|
+
"""Set value in cache, evicting LRU items if necessary.
|
74
|
+
|
75
|
+
Args:
|
76
|
+
key: Cache key
|
77
|
+
value: Value to cache
|
78
|
+
ttl: Time-to-live in seconds (uses instance default if None)
|
79
|
+
"""
|
80
|
+
with self._lock:
|
81
|
+
current_time = time.time()
|
82
|
+
|
83
|
+
# If key exists, update it
|
84
|
+
if key in self._cache:
|
85
|
+
self._cache[key] = (value, current_time)
|
86
|
+
self._access_order[key] = current_time
|
87
|
+
return
|
88
|
+
|
89
|
+
# Check if we need to evict
|
90
|
+
if len(self._cache) >= self.max_size:
|
91
|
+
self._evict_lru()
|
92
|
+
|
93
|
+
# Add new entry
|
94
|
+
self._cache[key] = (value, current_time)
|
95
|
+
self._access_order[key] = current_time
|
96
|
+
|
97
|
+
def _evict_lru(self) -> None:
|
98
|
+
"""Evict least recently used item."""
|
99
|
+
if not self._access_order:
|
100
|
+
return
|
101
|
+
|
102
|
+
lru_key = min(self._access_order.keys(), key=self._access_order.get)
|
103
|
+
del self._cache[lru_key]
|
104
|
+
del self._access_order[lru_key]
|
105
|
+
self._evictions += 1
|
106
|
+
|
107
|
+
def clear(self) -> None:
|
108
|
+
"""Clear all entries from cache."""
|
109
|
+
with self._lock:
|
110
|
+
self._cache.clear()
|
111
|
+
self._access_order.clear()
|
112
|
+
|
113
|
+
def stats(self) -> Dict[str, Any]:
|
114
|
+
"""Get cache performance statistics."""
|
115
|
+
with self._lock:
|
116
|
+
total_requests = self._hits + self._misses
|
117
|
+
hit_rate = self._hits / total_requests if total_requests > 0 else 0
|
118
|
+
|
119
|
+
return {
|
120
|
+
"hits": self._hits,
|
121
|
+
"misses": self._misses,
|
122
|
+
"evictions": self._evictions,
|
123
|
+
"hit_rate": hit_rate,
|
124
|
+
"size": len(self._cache),
|
125
|
+
"max_size": self.max_size,
|
126
|
+
"ttl": self.ttl,
|
127
|
+
}
|
128
|
+
|
129
|
+
|
130
|
+
class UnifiedCache:
|
131
|
+
"""
|
132
|
+
Unified cache interface that works with both Redis and in-memory LRU cache.
|
133
|
+
|
134
|
+
This provides a consistent interface regardless of the backend.
|
135
|
+
Includes cache stampede prevention using single-flight pattern.
|
136
|
+
"""
|
137
|
+
|
138
|
+
def __init__(
|
139
|
+
self,
|
140
|
+
name: str,
|
141
|
+
ttl: int = 300,
|
142
|
+
redis_client=None,
|
143
|
+
redis_prefix: str = "mcp:",
|
144
|
+
lru_cache=None,
|
145
|
+
):
|
146
|
+
"""Initialize unified cache.
|
147
|
+
|
148
|
+
Args:
|
149
|
+
name: Cache name
|
150
|
+
ttl: Default TTL
|
151
|
+
redis_client: Redis client (if using Redis backend)
|
152
|
+
redis_prefix: Redis key prefix
|
153
|
+
lru_cache: LRU cache instance (if using memory backend)
|
154
|
+
"""
|
155
|
+
self.name = name
|
156
|
+
self.ttl = ttl
|
157
|
+
self.redis_client = redis_client
|
158
|
+
self.redis_prefix = redis_prefix
|
159
|
+
self.lru_cache = lru_cache
|
160
|
+
self.is_redis = redis_client is not None
|
161
|
+
|
162
|
+
# Single-flight pattern for stampede prevention
|
163
|
+
self._in_flight: Dict[str, asyncio.Future] = {}
|
164
|
+
self._flight_lock = asyncio.Lock()
|
165
|
+
|
166
|
+
def _make_key(self, key: str) -> str:
|
167
|
+
"""Make cache key with name prefix."""
|
168
|
+
if self.is_redis:
|
169
|
+
return f"{self.redis_prefix}{self.name}:{key}"
|
170
|
+
return key
|
171
|
+
|
172
|
+
def get(self, key: str):
|
173
|
+
"""Get value from cache."""
|
174
|
+
if self.is_redis:
|
175
|
+
# For Redis, we need async operations but this is called synchronously
|
176
|
+
# We'll implement async versions for the server to use
|
177
|
+
return None # Fallback for now
|
178
|
+
else:
|
179
|
+
return self.lru_cache.get(key)
|
180
|
+
|
181
|
+
def set(self, key: str, value, ttl: Optional[int] = None):
|
182
|
+
"""Set value in cache."""
|
183
|
+
if self.is_redis:
|
184
|
+
# For Redis, we need async operations but this is called synchronously
|
185
|
+
# We'll implement async versions for the server to use
|
186
|
+
pass # Fallback for now
|
187
|
+
else:
|
188
|
+
self.lru_cache.set(key, value, ttl or self.ttl)
|
189
|
+
|
190
|
+
async def aget(self, key: str):
|
191
|
+
"""Async get value from cache."""
|
192
|
+
if self.is_redis:
|
193
|
+
try:
|
194
|
+
redis_key = self._make_key(key)
|
195
|
+
value = await self.redis_client.get(redis_key)
|
196
|
+
return json.loads(value) if value else None
|
197
|
+
except Exception as e:
|
198
|
+
logger.error(f"Redis get error: {e}")
|
199
|
+
return None
|
200
|
+
else:
|
201
|
+
return self.lru_cache.get(key)
|
202
|
+
|
203
|
+
async def aset(self, key: str, value, ttl: Optional[int] = None):
|
204
|
+
"""Async set value in cache."""
|
205
|
+
if self.is_redis:
|
206
|
+
try:
|
207
|
+
redis_key = self._make_key(key)
|
208
|
+
serialized_value = json.dumps(value)
|
209
|
+
cache_ttl = ttl or self.ttl
|
210
|
+
await self.redis_client.setex(redis_key, cache_ttl, serialized_value)
|
211
|
+
return True
|
212
|
+
except Exception as e:
|
213
|
+
logger.error(f"Redis set error: {e}")
|
214
|
+
return False
|
215
|
+
else:
|
216
|
+
self.lru_cache.set(key, value, ttl or self.ttl)
|
217
|
+
return True
|
218
|
+
|
219
|
+
async def get_or_compute(
|
220
|
+
self,
|
221
|
+
key: str,
|
222
|
+
compute_func: Callable[[], Awaitable[Any]],
|
223
|
+
ttl: Optional[int] = None,
|
224
|
+
) -> Any:
|
225
|
+
"""Get value from cache or compute it if not present (with stampede prevention).
|
226
|
+
|
227
|
+
This method implements single-flight pattern to prevent cache stampede.
|
228
|
+
If multiple requests come in for the same key while it's being computed,
|
229
|
+
only one will actually execute the compute function.
|
230
|
+
|
231
|
+
Args:
|
232
|
+
key: Cache key
|
233
|
+
compute_func: Async function to compute the value if not in cache
|
234
|
+
ttl: TTL for cached value
|
235
|
+
|
236
|
+
Returns:
|
237
|
+
The cached or computed value
|
238
|
+
"""
|
239
|
+
# First try to get from cache
|
240
|
+
cached_value = await self.aget(key)
|
241
|
+
if cached_value is not None:
|
242
|
+
return cached_value
|
243
|
+
|
244
|
+
# Check if computation is already in flight
|
245
|
+
async with self._flight_lock:
|
246
|
+
if key in self._in_flight:
|
247
|
+
# Wait for the existing computation
|
248
|
+
logger.debug(f"Cache key {key} already being computed, waiting...")
|
249
|
+
return await self._in_flight[key]
|
250
|
+
|
251
|
+
# Start new computation
|
252
|
+
future = asyncio.Future()
|
253
|
+
self._in_flight[key] = future
|
254
|
+
|
255
|
+
try:
|
256
|
+
# Compute the value
|
257
|
+
logger.debug(f"Computing value for cache key {key}")
|
258
|
+
value = await compute_func()
|
259
|
+
|
260
|
+
# Cache the result
|
261
|
+
await self.aset(key, value, ttl)
|
262
|
+
|
263
|
+
# Notify waiting requests
|
264
|
+
future.set_result(value)
|
265
|
+
return value
|
266
|
+
|
267
|
+
except Exception as e:
|
268
|
+
# Notify waiting requests of the error
|
269
|
+
future.set_exception(e)
|
270
|
+
raise
|
271
|
+
finally:
|
272
|
+
# Clean up in-flight tracking
|
273
|
+
async with self._flight_lock:
|
274
|
+
self._in_flight.pop(key, None)
|
275
|
+
|
276
|
+
def clear(self):
|
277
|
+
"""Clear cache."""
|
278
|
+
if self.is_redis:
|
279
|
+
# For async operations, this would need to be implemented separately
|
280
|
+
pass
|
281
|
+
else:
|
282
|
+
self.lru_cache.clear()
|
283
|
+
|
284
|
+
def stats(self):
|
285
|
+
"""Get cache statistics."""
|
286
|
+
if self.is_redis:
|
287
|
+
return {"backend": "redis", "name": self.name}
|
288
|
+
else:
|
289
|
+
return self.lru_cache.stats()
|
290
|
+
|
291
|
+
|
292
|
+
class CacheManager:
|
293
|
+
"""
|
294
|
+
High-level cache management with multiple caching strategies.
|
295
|
+
|
296
|
+
Provides easy-to-use caching for MCP servers with different cache types
|
297
|
+
for different use cases.
|
298
|
+
"""
|
299
|
+
|
300
|
+
def __init__(
|
301
|
+
self,
|
302
|
+
enabled: bool = True,
|
303
|
+
default_ttl: int = 300,
|
304
|
+
backend: str = "memory",
|
305
|
+
config: Optional[Dict[str, Any]] = None,
|
306
|
+
):
|
307
|
+
"""
|
308
|
+
Initialize cache manager.
|
309
|
+
|
310
|
+
Args:
|
311
|
+
enabled: Whether caching is enabled
|
312
|
+
default_ttl: Default TTL for cache entries
|
313
|
+
backend: Cache backend ("memory" or "redis")
|
314
|
+
config: Backend-specific configuration
|
315
|
+
"""
|
316
|
+
self.enabled = enabled
|
317
|
+
self.default_ttl = default_ttl
|
318
|
+
self.backend = backend
|
319
|
+
self.config = config or {}
|
320
|
+
self._caches: Dict[str, UnifiedCache] = {}
|
321
|
+
|
322
|
+
# Initialize Redis if specified
|
323
|
+
self._redis = None
|
324
|
+
if backend == "redis" and enabled:
|
325
|
+
self._init_redis()
|
326
|
+
|
327
|
+
def get_cache(
|
328
|
+
self, name: str, max_size: int = 128, ttl: Optional[int] = None
|
329
|
+
) -> UnifiedCache:
|
330
|
+
"""Get or create a named cache."""
|
331
|
+
if name not in self._caches:
|
332
|
+
cache_ttl = ttl if ttl is not None else self.default_ttl
|
333
|
+
if self.backend == "redis" and self._redis:
|
334
|
+
self._caches[name] = UnifiedCache(
|
335
|
+
name=name,
|
336
|
+
ttl=cache_ttl,
|
337
|
+
redis_client=self._redis,
|
338
|
+
redis_prefix=self.config.get("prefix", "mcp:"),
|
339
|
+
)
|
340
|
+
else:
|
341
|
+
self._caches[name] = UnifiedCache(
|
342
|
+
name=name,
|
343
|
+
ttl=cache_ttl,
|
344
|
+
lru_cache=LRUCache(max_size=max_size, ttl=cache_ttl),
|
345
|
+
)
|
346
|
+
return self._caches[name]
|
347
|
+
|
348
|
+
def cached(self, cache_name: str = "default", ttl: Optional[int] = None):
|
349
|
+
"""
|
350
|
+
Decorator to cache function results.
|
351
|
+
|
352
|
+
Args:
|
353
|
+
cache_name: Name of cache to use
|
354
|
+
ttl: TTL for this specific cache
|
355
|
+
|
356
|
+
Returns:
|
357
|
+
Decorated function with caching
|
358
|
+
"""
|
359
|
+
|
360
|
+
def decorator(func: F) -> F:
|
361
|
+
if not self.enabled:
|
362
|
+
return func
|
363
|
+
|
364
|
+
cache = self.get_cache(cache_name, ttl=ttl)
|
365
|
+
|
366
|
+
@functools.wraps(func)
|
367
|
+
def sync_wrapper(*args, **kwargs):
|
368
|
+
# Create cache key from function name and arguments
|
369
|
+
cache_key = self._create_cache_key(func.__name__, args, kwargs)
|
370
|
+
|
371
|
+
# Try to get from cache
|
372
|
+
result = cache.get(cache_key)
|
373
|
+
if result is not None:
|
374
|
+
logger.debug(f"Cache hit for {func.__name__}: {cache_key}")
|
375
|
+
return result
|
376
|
+
|
377
|
+
# Execute function and cache result
|
378
|
+
logger.debug(f"Cache miss for {func.__name__}: {cache_key}")
|
379
|
+
result = func(*args, **kwargs)
|
380
|
+
cache.set(cache_key, result)
|
381
|
+
return result
|
382
|
+
|
383
|
+
@functools.wraps(func)
|
384
|
+
async def async_wrapper(*args, **kwargs):
|
385
|
+
# Create cache key from function name and arguments
|
386
|
+
cache_key = self._create_cache_key(func.__name__, args, kwargs)
|
387
|
+
|
388
|
+
# Try to get from cache
|
389
|
+
result = cache.get(cache_key)
|
390
|
+
if result is not None:
|
391
|
+
logger.debug(f"Cache hit for {func.__name__}: {cache_key}")
|
392
|
+
return result
|
393
|
+
|
394
|
+
# Execute function and cache result
|
395
|
+
logger.debug(f"Cache miss for {func.__name__}: {cache_key}")
|
396
|
+
result = await func(*args, **kwargs)
|
397
|
+
cache.set(cache_key, result)
|
398
|
+
return result
|
399
|
+
|
400
|
+
# Return appropriate wrapper based on function type
|
401
|
+
if asyncio.iscoroutinefunction(func):
|
402
|
+
return async_wrapper
|
403
|
+
else:
|
404
|
+
return sync_wrapper
|
405
|
+
|
406
|
+
return decorator
|
407
|
+
|
408
|
+
def _init_redis(self):
|
409
|
+
"""Initialize Redis connection."""
|
410
|
+
try:
|
411
|
+
import redis.asyncio as redis
|
412
|
+
|
413
|
+
redis_url = self.config.get("redis_url", "redis://localhost:6379")
|
414
|
+
self._redis = redis.from_url(redis_url, decode_responses=True)
|
415
|
+
logger.info(f"Initialized Redis cache backend: {redis_url}")
|
416
|
+
except ImportError:
|
417
|
+
logger.warning("Redis not available. Install with: pip install redis")
|
418
|
+
self.enabled = False
|
419
|
+
except Exception as e:
|
420
|
+
logger.error(f"Failed to initialize Redis: {e}")
|
421
|
+
self.enabled = False
|
422
|
+
|
423
|
+
async def get_redis(self, key: str) -> Optional[Any]:
|
424
|
+
"""Get value from Redis cache."""
|
425
|
+
if not self._redis:
|
426
|
+
return None
|
427
|
+
try:
|
428
|
+
value = await self._redis.get(self._make_redis_key(key))
|
429
|
+
return json.loads(value) if value else None
|
430
|
+
except Exception as e:
|
431
|
+
logger.error(f"Redis get error: {e}")
|
432
|
+
return None
|
433
|
+
|
434
|
+
async def set_redis(self, key: str, value: Any, ttl: Optional[int] = None) -> bool:
|
435
|
+
"""Set value in Redis cache."""
|
436
|
+
if not self._redis:
|
437
|
+
return False
|
438
|
+
try:
|
439
|
+
redis_key = self._make_redis_key(key)
|
440
|
+
serialized_value = json.dumps(value)
|
441
|
+
if ttl:
|
442
|
+
await self._redis.setex(redis_key, ttl, serialized_value)
|
443
|
+
else:
|
444
|
+
await self._redis.set(redis_key, serialized_value)
|
445
|
+
return True
|
446
|
+
except Exception as e:
|
447
|
+
logger.error(f"Redis set error: {e}")
|
448
|
+
return False
|
449
|
+
|
450
|
+
def _make_redis_key(self, key: str) -> str:
|
451
|
+
"""Create Redis key with prefix."""
|
452
|
+
prefix = self.config.get("prefix", "mcp:")
|
453
|
+
return f"{prefix}{key}"
|
454
|
+
|
455
|
+
def _create_cache_key(self, func_name: str, args: tuple, kwargs: dict) -> str:
|
456
|
+
"""Create a cache key from function name and arguments."""
|
457
|
+
# Convert args and kwargs to string representation
|
458
|
+
args_str = str(args) if args else ""
|
459
|
+
kwargs_str = str(sorted(kwargs.items())) if kwargs else ""
|
460
|
+
return f"{func_name}:{args_str}:{kwargs_str}"
|
461
|
+
|
462
|
+
def clear_all(self) -> None:
|
463
|
+
"""Clear all caches."""
|
464
|
+
for cache in self._caches.values():
|
465
|
+
cache.clear()
|
466
|
+
|
467
|
+
def stats(self) -> Dict[str, Dict[str, Any]]:
|
468
|
+
"""Get statistics for all caches."""
|
469
|
+
return {name: cache.stats() for name, cache in self._caches.items()}
|
470
|
+
|
471
|
+
|
472
|
+
# Global cache manager instance
|
473
|
+
_global_cache_manager = CacheManager()
|
474
|
+
|
475
|
+
|
476
|
+
def cached_query(cache_name: str = "query", ttl: int = 300, enabled: bool = True):
|
477
|
+
"""
|
478
|
+
Simple decorator for caching query results.
|
479
|
+
|
480
|
+
This is a convenience decorator that uses the global cache manager.
|
481
|
+
|
482
|
+
Args:
|
483
|
+
cache_name: Name of cache to use
|
484
|
+
ttl: Time-to-live for cache entries
|
485
|
+
enabled: Whether caching is enabled
|
486
|
+
|
487
|
+
Example:
|
488
|
+
@cached_query("search", ttl=600)
|
489
|
+
async def search_data(query: str) -> list:
|
490
|
+
# Expensive search operation
|
491
|
+
return results
|
492
|
+
"""
|
493
|
+
|
494
|
+
def decorator(func: F) -> F:
|
495
|
+
if not enabled:
|
496
|
+
return func
|
497
|
+
|
498
|
+
return _global_cache_manager.cached(cache_name, ttl=ttl)(func)
|
499
|
+
|
500
|
+
return decorator
|
501
|
+
|
502
|
+
|
503
|
+
def get_cache_stats() -> Dict[str, Dict[str, Any]]:
|
504
|
+
"""Get statistics for the global cache manager."""
|
505
|
+
return _global_cache_manager.stats()
|
506
|
+
|
507
|
+
|
508
|
+
def clear_all_caches() -> None:
|
509
|
+
"""Clear all caches in the global cache manager."""
|
510
|
+
_global_cache_manager.clear_all()
|
@@ -229,7 +229,7 @@ class MiddlewareAuthManager:
|
|
229
229
|
api_key = f"sk_{secrets.token_urlsafe(32)}"
|
230
230
|
|
231
231
|
# Store API key metadata using credential manager
|
232
|
-
result = self.credential_manager.
|
232
|
+
result = self.credential_manager.execute(
|
233
233
|
operation="store_credential",
|
234
234
|
credential_name=api_key,
|
235
235
|
credential_data={
|
@@ -274,7 +274,7 @@ class MiddlewareAuthManager:
|
|
274
274
|
|
275
275
|
try:
|
276
276
|
# Verify using credential manager since rotating credential node doesn't have verify
|
277
|
-
result = self.credential_manager.
|
277
|
+
result = self.credential_manager.execute(
|
278
278
|
operation="get_credential", credential_name=api_key
|
279
279
|
)
|
280
280
|
|
@@ -309,7 +309,7 @@ class MiddlewareAuthManager:
|
|
309
309
|
Returns:
|
310
310
|
True if permission is granted
|
311
311
|
"""
|
312
|
-
result = self.permission_checker.
|
312
|
+
result = self.permission_checker.execute(
|
313
313
|
user_context={"user_id": user_id},
|
314
314
|
permission=permission,
|
315
315
|
resource=resource or {},
|
@@ -15,14 +15,7 @@ from datetime import datetime, timezone
|
|
15
15
|
from typing import Any, Dict, List, Optional, Union
|
16
16
|
from urllib.parse import parse_qs
|
17
17
|
|
18
|
-
from fastapi import
|
19
|
-
Depends,
|
20
|
-
FastAPI,
|
21
|
-
HTTPException,
|
22
|
-
Request,
|
23
|
-
WebSocket,
|
24
|
-
WebSocketDisconnect,
|
25
|
-
)
|
18
|
+
from fastapi import Depends, FastAPI, HTTPException, Request, WebSocket, WebSocketDisconnect
|
26
19
|
from fastapi.middleware.cors import CORSMiddleware
|
27
20
|
from fastapi.responses import JSONResponse, StreamingResponse
|
28
21
|
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
|
@@ -819,7 +812,7 @@ def create_gateway(
|
|
819
812
|
>>> # Or use default auth
|
820
813
|
>>> gateway = create_gateway(title="My App")
|
821
814
|
>>>
|
822
|
-
>>> gateway.
|
815
|
+
>>> gateway.execute(port=8000)
|
823
816
|
"""
|
824
817
|
# Pass auth_manager to APIGateway
|
825
818
|
if auth_manager is not None:
|
@@ -378,7 +378,7 @@ class WebhookManager:
|
|
378
378
|
|
379
379
|
try:
|
380
380
|
# Use HTTPRequestNode for delivery (it handles retries internally)
|
381
|
-
response = self.http_node.
|
381
|
+
response = self.http_node.execute(
|
382
382
|
url=url, method="POST", json_data=payload, headers=headers
|
383
383
|
)
|
384
384
|
|
@@ -24,8 +24,8 @@ from kailash.workflow.builder import WorkflowBuilder
|
|
24
24
|
|
25
25
|
# Import existing Kailash MCP components
|
26
26
|
try:
|
27
|
-
from kailash.
|
28
|
-
from kailash.
|
27
|
+
from kailash.mcp_server import MCPServer
|
28
|
+
from kailash.mcp_server.utils import CacheManager, ConfigManager, MetricsCollector
|
29
29
|
|
30
30
|
_KAILASH_MCP_AVAILABLE = True
|
31
31
|
except ImportError:
|
kailash/nodes/__init__.py
CHANGED
kailash/nodes/admin/audit_log.py
CHANGED
@@ -169,7 +169,7 @@ class EnterpriseAuditLogNode(Node):
|
|
169
169
|
... "ip_address": "192.168.1.100"
|
170
170
|
... }
|
171
171
|
... )
|
172
|
-
>>> result = node.
|
172
|
+
>>> result = node.execute()
|
173
173
|
>>> event_id = result["event"]["event_id"]
|
174
174
|
|
175
175
|
>>> # Query security events
|
@@ -184,7 +184,7 @@ class EnterpriseAuditLogNode(Node):
|
|
184
184
|
... },
|
185
185
|
... pagination={"page": 1, "size": 50}
|
186
186
|
... )
|
187
|
-
>>> result = node.
|
187
|
+
>>> result = node.execute()
|
188
188
|
>>> events = result["events"]
|
189
189
|
|
190
190
|
>>> # Generate compliance report
|
@@ -196,7 +196,7 @@ class EnterpriseAuditLogNode(Node):
|
|
196
196
|
... },
|
197
197
|
... export_format="json"
|
198
198
|
... )
|
199
|
-
>>> result = node.
|
199
|
+
>>> result = node.execute()
|
200
200
|
>>> report = result["report"]
|
201
201
|
"""
|
202
202
|
|
@@ -436,7 +436,7 @@ class EnterpriseAuditLogNode(Node):
|
|
436
436
|
}
|
437
437
|
)
|
438
438
|
|
439
|
-
db_result = self._db_node.
|
439
|
+
db_result = self._db_node.execute()
|
440
440
|
|
441
441
|
return {
|
442
442
|
"result": {
|
@@ -571,14 +571,14 @@ class EnterpriseAuditLogNode(Node):
|
|
571
571
|
self._db_node.config.update(
|
572
572
|
{"query": count_query, "params": params, "fetch_mode": "one"}
|
573
573
|
)
|
574
|
-
count_result = self._db_node.
|
574
|
+
count_result = self._db_node.execute()
|
575
575
|
total_count = count_result["result"]["data"]["total"]
|
576
576
|
|
577
577
|
# Execute data query
|
578
578
|
self._db_node.config.update(
|
579
579
|
{"query": data_query, "params": params, "fetch_mode": "all"}
|
580
580
|
)
|
581
|
-
data_result = self._db_node.
|
581
|
+
data_result = self._db_node.execute()
|
582
582
|
logs = data_result["result"]["data"]
|
583
583
|
|
584
584
|
# Calculate pagination info
|