kailash 0.6.3__py3-none-any.whl → 0.6.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +3 -3
- kailash/api/custom_nodes_secure.py +3 -3
- kailash/api/gateway.py +1 -1
- kailash/api/studio.py +2 -3
- kailash/api/workflow_api.py +3 -4
- kailash/core/resilience/bulkhead.py +460 -0
- kailash/core/resilience/circuit_breaker.py +92 -10
- kailash/edge/discovery.py +86 -0
- kailash/mcp_server/__init__.py +309 -33
- kailash/mcp_server/advanced_features.py +1022 -0
- kailash/mcp_server/ai_registry_server.py +27 -2
- kailash/mcp_server/auth.py +789 -0
- kailash/mcp_server/client.py +645 -378
- kailash/mcp_server/discovery.py +1593 -0
- kailash/mcp_server/errors.py +673 -0
- kailash/mcp_server/oauth.py +1727 -0
- kailash/mcp_server/protocol.py +1126 -0
- kailash/mcp_server/registry_integration.py +587 -0
- kailash/mcp_server/server.py +1213 -98
- kailash/mcp_server/transports.py +1169 -0
- kailash/mcp_server/utils/__init__.py +6 -1
- kailash/mcp_server/utils/cache.py +250 -7
- kailash/middleware/auth/auth_manager.py +3 -3
- kailash/middleware/communication/api_gateway.py +2 -9
- kailash/middleware/communication/realtime.py +1 -1
- kailash/middleware/mcp/enhanced_server.py +1 -1
- kailash/nodes/__init__.py +2 -0
- kailash/nodes/admin/audit_log.py +6 -6
- kailash/nodes/admin/permission_check.py +8 -8
- kailash/nodes/admin/role_management.py +32 -28
- kailash/nodes/admin/schema.sql +6 -1
- kailash/nodes/admin/schema_manager.py +13 -13
- kailash/nodes/admin/security_event.py +16 -20
- kailash/nodes/admin/tenant_isolation.py +3 -3
- kailash/nodes/admin/transaction_utils.py +3 -3
- kailash/nodes/admin/user_management.py +21 -22
- kailash/nodes/ai/a2a.py +11 -11
- kailash/nodes/ai/ai_providers.py +9 -12
- kailash/nodes/ai/embedding_generator.py +13 -14
- kailash/nodes/ai/intelligent_agent_orchestrator.py +19 -19
- kailash/nodes/ai/iterative_llm_agent.py +2 -2
- kailash/nodes/ai/llm_agent.py +210 -33
- kailash/nodes/ai/self_organizing.py +2 -2
- kailash/nodes/alerts/discord.py +4 -4
- kailash/nodes/api/graphql.py +6 -6
- kailash/nodes/api/http.py +12 -17
- kailash/nodes/api/rate_limiting.py +4 -4
- kailash/nodes/api/rest.py +15 -15
- kailash/nodes/auth/mfa.py +3 -4
- kailash/nodes/auth/risk_assessment.py +2 -2
- kailash/nodes/auth/session_management.py +5 -5
- kailash/nodes/auth/sso.py +143 -0
- kailash/nodes/base.py +6 -2
- kailash/nodes/base_async.py +16 -2
- kailash/nodes/base_with_acl.py +2 -2
- kailash/nodes/cache/__init__.py +9 -0
- kailash/nodes/cache/cache.py +1172 -0
- kailash/nodes/cache/cache_invalidation.py +870 -0
- kailash/nodes/cache/redis_pool_manager.py +595 -0
- kailash/nodes/code/async_python.py +2 -1
- kailash/nodes/code/python.py +196 -35
- kailash/nodes/compliance/data_retention.py +6 -6
- kailash/nodes/compliance/gdpr.py +5 -5
- kailash/nodes/data/__init__.py +10 -0
- kailash/nodes/data/optimistic_locking.py +906 -0
- kailash/nodes/data/readers.py +8 -8
- kailash/nodes/data/redis.py +349 -0
- kailash/nodes/data/sql.py +314 -3
- kailash/nodes/data/streaming.py +21 -0
- kailash/nodes/enterprise/__init__.py +8 -0
- kailash/nodes/enterprise/audit_logger.py +285 -0
- kailash/nodes/enterprise/batch_processor.py +22 -3
- kailash/nodes/enterprise/data_lineage.py +1 -1
- kailash/nodes/enterprise/mcp_executor.py +205 -0
- kailash/nodes/enterprise/service_discovery.py +150 -0
- kailash/nodes/enterprise/tenant_assignment.py +108 -0
- kailash/nodes/logic/async_operations.py +2 -2
- kailash/nodes/logic/convergence.py +1 -1
- kailash/nodes/logic/operations.py +1 -1
- kailash/nodes/monitoring/__init__.py +11 -1
- kailash/nodes/monitoring/health_check.py +456 -0
- kailash/nodes/monitoring/log_processor.py +817 -0
- kailash/nodes/monitoring/metrics_collector.py +627 -0
- kailash/nodes/monitoring/performance_benchmark.py +137 -11
- kailash/nodes/rag/advanced.py +7 -7
- kailash/nodes/rag/agentic.py +49 -2
- kailash/nodes/rag/conversational.py +3 -3
- kailash/nodes/rag/evaluation.py +3 -3
- kailash/nodes/rag/federated.py +3 -3
- kailash/nodes/rag/graph.py +3 -3
- kailash/nodes/rag/multimodal.py +3 -3
- kailash/nodes/rag/optimized.py +5 -5
- kailash/nodes/rag/privacy.py +3 -3
- kailash/nodes/rag/query_processing.py +6 -6
- kailash/nodes/rag/realtime.py +1 -1
- kailash/nodes/rag/registry.py +2 -6
- kailash/nodes/rag/router.py +1 -1
- kailash/nodes/rag/similarity.py +7 -7
- kailash/nodes/rag/strategies.py +4 -4
- kailash/nodes/security/abac_evaluator.py +6 -6
- kailash/nodes/security/behavior_analysis.py +5 -6
- kailash/nodes/security/credential_manager.py +1 -1
- kailash/nodes/security/rotating_credentials.py +11 -11
- kailash/nodes/security/threat_detection.py +8 -8
- kailash/nodes/testing/credential_testing.py +2 -2
- kailash/nodes/transform/processors.py +5 -5
- kailash/runtime/local.py +162 -14
- kailash/runtime/parameter_injection.py +425 -0
- kailash/runtime/parameter_injector.py +657 -0
- kailash/runtime/testing.py +2 -2
- kailash/testing/fixtures.py +2 -2
- kailash/workflow/builder.py +99 -18
- kailash/workflow/builder_improvements.py +207 -0
- kailash/workflow/input_handling.py +170 -0
- {kailash-0.6.3.dist-info → kailash-0.6.4.dist-info}/METADATA +22 -9
- {kailash-0.6.3.dist-info → kailash-0.6.4.dist-info}/RECORD +120 -94
- {kailash-0.6.3.dist-info → kailash-0.6.4.dist-info}/WHEEL +0 -0
- {kailash-0.6.3.dist-info → kailash-0.6.4.dist-info}/entry_points.txt +0 -0
- {kailash-0.6.3.dist-info → kailash-0.6.4.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.6.3.dist-info → kailash-0.6.4.dist-info}/top_level.txt +0 -0
@@ -10,7 +10,12 @@ This module provides production-ready utilities for MCP servers including:
|
|
10
10
|
|
11
11
|
from .cache import CacheManager, LRUCache, cached_query
|
12
12
|
from .config import ConfigManager
|
13
|
-
from .formatters import
|
13
|
+
from .formatters import (
|
14
|
+
format_response,
|
15
|
+
json_formatter,
|
16
|
+
markdown_formatter,
|
17
|
+
search_formatter,
|
18
|
+
)
|
14
19
|
from .metrics import MetricsCollector
|
15
20
|
|
16
21
|
__all__ = [
|
@@ -7,10 +7,11 @@ Based on patterns from production MCP server implementations.
|
|
7
7
|
|
8
8
|
import asyncio
|
9
9
|
import functools
|
10
|
+
import json
|
10
11
|
import logging
|
11
12
|
import threading
|
12
13
|
import time
|
13
|
-
from typing import Any, Callable, Dict, Optional, Tuple, TypeVar
|
14
|
+
from typing import Any, Awaitable, Callable, Dict, Optional, Tuple, TypeVar
|
14
15
|
|
15
16
|
logger = logging.getLogger(__name__)
|
16
17
|
|
@@ -68,8 +69,14 @@ class LRUCache:
|
|
68
69
|
self._hits += 1
|
69
70
|
return value
|
70
71
|
|
71
|
-
def set(self, key: str, value: Any) -> None:
|
72
|
-
"""Set value in cache, evicting LRU items if necessary.
|
72
|
+
def set(self, key: str, value: Any, ttl: Optional[int] = None) -> None:
|
73
|
+
"""Set value in cache, evicting LRU items if necessary.
|
74
|
+
|
75
|
+
Args:
|
76
|
+
key: Cache key
|
77
|
+
value: Value to cache
|
78
|
+
ttl: Time-to-live in seconds (uses instance default if None)
|
79
|
+
"""
|
73
80
|
with self._lock:
|
74
81
|
current_time = time.time()
|
75
82
|
|
@@ -120,6 +127,168 @@ class LRUCache:
|
|
120
127
|
}
|
121
128
|
|
122
129
|
|
130
|
+
class UnifiedCache:
|
131
|
+
"""
|
132
|
+
Unified cache interface that works with both Redis and in-memory LRU cache.
|
133
|
+
|
134
|
+
This provides a consistent interface regardless of the backend.
|
135
|
+
Includes cache stampede prevention using single-flight pattern.
|
136
|
+
"""
|
137
|
+
|
138
|
+
def __init__(
|
139
|
+
self,
|
140
|
+
name: str,
|
141
|
+
ttl: int = 300,
|
142
|
+
redis_client=None,
|
143
|
+
redis_prefix: str = "mcp:",
|
144
|
+
lru_cache=None,
|
145
|
+
):
|
146
|
+
"""Initialize unified cache.
|
147
|
+
|
148
|
+
Args:
|
149
|
+
name: Cache name
|
150
|
+
ttl: Default TTL
|
151
|
+
redis_client: Redis client (if using Redis backend)
|
152
|
+
redis_prefix: Redis key prefix
|
153
|
+
lru_cache: LRU cache instance (if using memory backend)
|
154
|
+
"""
|
155
|
+
self.name = name
|
156
|
+
self.ttl = ttl
|
157
|
+
self.redis_client = redis_client
|
158
|
+
self.redis_prefix = redis_prefix
|
159
|
+
self.lru_cache = lru_cache
|
160
|
+
self.is_redis = redis_client is not None
|
161
|
+
|
162
|
+
# Single-flight pattern for stampede prevention
|
163
|
+
self._in_flight: Dict[str, asyncio.Future] = {}
|
164
|
+
self._flight_lock = asyncio.Lock()
|
165
|
+
|
166
|
+
def _make_key(self, key: str) -> str:
|
167
|
+
"""Make cache key with name prefix."""
|
168
|
+
if self.is_redis:
|
169
|
+
return f"{self.redis_prefix}{self.name}:{key}"
|
170
|
+
return key
|
171
|
+
|
172
|
+
def get(self, key: str):
|
173
|
+
"""Get value from cache."""
|
174
|
+
if self.is_redis:
|
175
|
+
# For Redis, we need async operations but this is called synchronously
|
176
|
+
# We'll implement async versions for the server to use
|
177
|
+
return None # Fallback for now
|
178
|
+
else:
|
179
|
+
return self.lru_cache.get(key)
|
180
|
+
|
181
|
+
def set(self, key: str, value, ttl: Optional[int] = None):
|
182
|
+
"""Set value in cache."""
|
183
|
+
if self.is_redis:
|
184
|
+
# For Redis, we need async operations but this is called synchronously
|
185
|
+
# We'll implement async versions for the server to use
|
186
|
+
pass # Fallback for now
|
187
|
+
else:
|
188
|
+
self.lru_cache.set(key, value, ttl or self.ttl)
|
189
|
+
|
190
|
+
async def aget(self, key: str):
|
191
|
+
"""Async get value from cache."""
|
192
|
+
if self.is_redis:
|
193
|
+
try:
|
194
|
+
redis_key = self._make_key(key)
|
195
|
+
value = await self.redis_client.get(redis_key)
|
196
|
+
return json.loads(value) if value else None
|
197
|
+
except Exception as e:
|
198
|
+
logger.error(f"Redis get error: {e}")
|
199
|
+
return None
|
200
|
+
else:
|
201
|
+
return self.lru_cache.get(key)
|
202
|
+
|
203
|
+
async def aset(self, key: str, value, ttl: Optional[int] = None):
|
204
|
+
"""Async set value in cache."""
|
205
|
+
if self.is_redis:
|
206
|
+
try:
|
207
|
+
redis_key = self._make_key(key)
|
208
|
+
serialized_value = json.dumps(value)
|
209
|
+
cache_ttl = ttl or self.ttl
|
210
|
+
await self.redis_client.setex(redis_key, cache_ttl, serialized_value)
|
211
|
+
return True
|
212
|
+
except Exception as e:
|
213
|
+
logger.error(f"Redis set error: {e}")
|
214
|
+
return False
|
215
|
+
else:
|
216
|
+
self.lru_cache.set(key, value, ttl or self.ttl)
|
217
|
+
return True
|
218
|
+
|
219
|
+
async def get_or_compute(
|
220
|
+
self,
|
221
|
+
key: str,
|
222
|
+
compute_func: Callable[[], Awaitable[Any]],
|
223
|
+
ttl: Optional[int] = None,
|
224
|
+
) -> Any:
|
225
|
+
"""Get value from cache or compute it if not present (with stampede prevention).
|
226
|
+
|
227
|
+
This method implements single-flight pattern to prevent cache stampede.
|
228
|
+
If multiple requests come in for the same key while it's being computed,
|
229
|
+
only one will actually execute the compute function.
|
230
|
+
|
231
|
+
Args:
|
232
|
+
key: Cache key
|
233
|
+
compute_func: Async function to compute the value if not in cache
|
234
|
+
ttl: TTL for cached value
|
235
|
+
|
236
|
+
Returns:
|
237
|
+
The cached or computed value
|
238
|
+
"""
|
239
|
+
# First try to get from cache
|
240
|
+
cached_value = await self.aget(key)
|
241
|
+
if cached_value is not None:
|
242
|
+
return cached_value
|
243
|
+
|
244
|
+
# Check if computation is already in flight
|
245
|
+
async with self._flight_lock:
|
246
|
+
if key in self._in_flight:
|
247
|
+
# Wait for the existing computation
|
248
|
+
logger.debug(f"Cache key {key} already being computed, waiting...")
|
249
|
+
return await self._in_flight[key]
|
250
|
+
|
251
|
+
# Start new computation
|
252
|
+
future = asyncio.Future()
|
253
|
+
self._in_flight[key] = future
|
254
|
+
|
255
|
+
try:
|
256
|
+
# Compute the value
|
257
|
+
logger.debug(f"Computing value for cache key {key}")
|
258
|
+
value = await compute_func()
|
259
|
+
|
260
|
+
# Cache the result
|
261
|
+
await self.aset(key, value, ttl)
|
262
|
+
|
263
|
+
# Notify waiting requests
|
264
|
+
future.set_result(value)
|
265
|
+
return value
|
266
|
+
|
267
|
+
except Exception as e:
|
268
|
+
# Notify waiting requests of the error
|
269
|
+
future.set_exception(e)
|
270
|
+
raise
|
271
|
+
finally:
|
272
|
+
# Clean up in-flight tracking
|
273
|
+
async with self._flight_lock:
|
274
|
+
self._in_flight.pop(key, None)
|
275
|
+
|
276
|
+
def clear(self):
|
277
|
+
"""Clear cache."""
|
278
|
+
if self.is_redis:
|
279
|
+
# For async operations, this would need to be implemented separately
|
280
|
+
pass
|
281
|
+
else:
|
282
|
+
self.lru_cache.clear()
|
283
|
+
|
284
|
+
def stats(self):
|
285
|
+
"""Get cache statistics."""
|
286
|
+
if self.is_redis:
|
287
|
+
return {"backend": "redis", "name": self.name}
|
288
|
+
else:
|
289
|
+
return self.lru_cache.stats()
|
290
|
+
|
291
|
+
|
123
292
|
class CacheManager:
|
124
293
|
"""
|
125
294
|
High-level cache management with multiple caching strategies.
|
@@ -128,25 +297,52 @@ class CacheManager:
|
|
128
297
|
for different use cases.
|
129
298
|
"""
|
130
299
|
|
131
|
-
def __init__(
|
300
|
+
def __init__(
|
301
|
+
self,
|
302
|
+
enabled: bool = True,
|
303
|
+
default_ttl: int = 300,
|
304
|
+
backend: str = "memory",
|
305
|
+
config: Optional[Dict[str, Any]] = None,
|
306
|
+
):
|
132
307
|
"""
|
133
308
|
Initialize cache manager.
|
134
309
|
|
135
310
|
Args:
|
136
311
|
enabled: Whether caching is enabled
|
137
312
|
default_ttl: Default TTL for cache entries
|
313
|
+
backend: Cache backend ("memory" or "redis")
|
314
|
+
config: Backend-specific configuration
|
138
315
|
"""
|
139
316
|
self.enabled = enabled
|
140
317
|
self.default_ttl = default_ttl
|
141
|
-
self.
|
318
|
+
self.backend = backend
|
319
|
+
self.config = config or {}
|
320
|
+
self._caches: Dict[str, UnifiedCache] = {}
|
321
|
+
|
322
|
+
# Initialize Redis if specified
|
323
|
+
self._redis = None
|
324
|
+
if backend == "redis" and enabled:
|
325
|
+
self._init_redis()
|
142
326
|
|
143
327
|
def get_cache(
|
144
328
|
self, name: str, max_size: int = 128, ttl: Optional[int] = None
|
145
|
-
) ->
|
329
|
+
) -> UnifiedCache:
|
146
330
|
"""Get or create a named cache."""
|
147
331
|
if name not in self._caches:
|
148
332
|
cache_ttl = ttl if ttl is not None else self.default_ttl
|
149
|
-
self.
|
333
|
+
if self.backend == "redis" and self._redis:
|
334
|
+
self._caches[name] = UnifiedCache(
|
335
|
+
name=name,
|
336
|
+
ttl=cache_ttl,
|
337
|
+
redis_client=self._redis,
|
338
|
+
redis_prefix=self.config.get("prefix", "mcp:"),
|
339
|
+
)
|
340
|
+
else:
|
341
|
+
self._caches[name] = UnifiedCache(
|
342
|
+
name=name,
|
343
|
+
ttl=cache_ttl,
|
344
|
+
lru_cache=LRUCache(max_size=max_size, ttl=cache_ttl),
|
345
|
+
)
|
150
346
|
return self._caches[name]
|
151
347
|
|
152
348
|
def cached(self, cache_name: str = "default", ttl: Optional[int] = None):
|
@@ -209,6 +405,53 @@ class CacheManager:
|
|
209
405
|
|
210
406
|
return decorator
|
211
407
|
|
408
|
+
def _init_redis(self):
|
409
|
+
"""Initialize Redis connection."""
|
410
|
+
try:
|
411
|
+
import redis.asyncio as redis
|
412
|
+
|
413
|
+
redis_url = self.config.get("redis_url", "redis://localhost:6379")
|
414
|
+
self._redis = redis.from_url(redis_url, decode_responses=True)
|
415
|
+
logger.info(f"Initialized Redis cache backend: {redis_url}")
|
416
|
+
except ImportError:
|
417
|
+
logger.warning("Redis not available. Install with: pip install redis")
|
418
|
+
self.enabled = False
|
419
|
+
except Exception as e:
|
420
|
+
logger.error(f"Failed to initialize Redis: {e}")
|
421
|
+
self.enabled = False
|
422
|
+
|
423
|
+
async def get_redis(self, key: str) -> Optional[Any]:
|
424
|
+
"""Get value from Redis cache."""
|
425
|
+
if not self._redis:
|
426
|
+
return None
|
427
|
+
try:
|
428
|
+
value = await self._redis.get(self._make_redis_key(key))
|
429
|
+
return json.loads(value) if value else None
|
430
|
+
except Exception as e:
|
431
|
+
logger.error(f"Redis get error: {e}")
|
432
|
+
return None
|
433
|
+
|
434
|
+
async def set_redis(self, key: str, value: Any, ttl: Optional[int] = None) -> bool:
|
435
|
+
"""Set value in Redis cache."""
|
436
|
+
if not self._redis:
|
437
|
+
return False
|
438
|
+
try:
|
439
|
+
redis_key = self._make_redis_key(key)
|
440
|
+
serialized_value = json.dumps(value)
|
441
|
+
if ttl:
|
442
|
+
await self._redis.setex(redis_key, ttl, serialized_value)
|
443
|
+
else:
|
444
|
+
await self._redis.set(redis_key, serialized_value)
|
445
|
+
return True
|
446
|
+
except Exception as e:
|
447
|
+
logger.error(f"Redis set error: {e}")
|
448
|
+
return False
|
449
|
+
|
450
|
+
def _make_redis_key(self, key: str) -> str:
|
451
|
+
"""Create Redis key with prefix."""
|
452
|
+
prefix = self.config.get("prefix", "mcp:")
|
453
|
+
return f"{prefix}{key}"
|
454
|
+
|
212
455
|
def _create_cache_key(self, func_name: str, args: tuple, kwargs: dict) -> str:
|
213
456
|
"""Create a cache key from function name and arguments."""
|
214
457
|
# Convert args and kwargs to string representation
|
@@ -229,7 +229,7 @@ class MiddlewareAuthManager:
|
|
229
229
|
api_key = f"sk_{secrets.token_urlsafe(32)}"
|
230
230
|
|
231
231
|
# Store API key metadata using credential manager
|
232
|
-
result = self.credential_manager.
|
232
|
+
result = self.credential_manager.execute(
|
233
233
|
operation="store_credential",
|
234
234
|
credential_name=api_key,
|
235
235
|
credential_data={
|
@@ -274,7 +274,7 @@ class MiddlewareAuthManager:
|
|
274
274
|
|
275
275
|
try:
|
276
276
|
# Verify using credential manager since rotating credential node doesn't have verify
|
277
|
-
result = self.credential_manager.
|
277
|
+
result = self.credential_manager.execute(
|
278
278
|
operation="get_credential", credential_name=api_key
|
279
279
|
)
|
280
280
|
|
@@ -309,7 +309,7 @@ class MiddlewareAuthManager:
|
|
309
309
|
Returns:
|
310
310
|
True if permission is granted
|
311
311
|
"""
|
312
|
-
result = self.permission_checker.
|
312
|
+
result = self.permission_checker.execute(
|
313
313
|
user_context={"user_id": user_id},
|
314
314
|
permission=permission,
|
315
315
|
resource=resource or {},
|
@@ -15,14 +15,7 @@ from datetime import datetime, timezone
|
|
15
15
|
from typing import Any, Dict, List, Optional, Union
|
16
16
|
from urllib.parse import parse_qs
|
17
17
|
|
18
|
-
from fastapi import
|
19
|
-
Depends,
|
20
|
-
FastAPI,
|
21
|
-
HTTPException,
|
22
|
-
Request,
|
23
|
-
WebSocket,
|
24
|
-
WebSocketDisconnect,
|
25
|
-
)
|
18
|
+
from fastapi import Depends, FastAPI, HTTPException, Request, WebSocket, WebSocketDisconnect
|
26
19
|
from fastapi.middleware.cors import CORSMiddleware
|
27
20
|
from fastapi.responses import JSONResponse, StreamingResponse
|
28
21
|
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
|
@@ -819,7 +812,7 @@ def create_gateway(
|
|
819
812
|
>>> # Or use default auth
|
820
813
|
>>> gateway = create_gateway(title="My App")
|
821
814
|
>>>
|
822
|
-
>>> gateway.
|
815
|
+
>>> gateway.execute(port=8000)
|
823
816
|
"""
|
824
817
|
# Pass auth_manager to APIGateway
|
825
818
|
if auth_manager is not None:
|
@@ -378,7 +378,7 @@ class WebhookManager:
|
|
378
378
|
|
379
379
|
try:
|
380
380
|
# Use HTTPRequestNode for delivery (it handles retries internally)
|
381
|
-
response = self.http_node.
|
381
|
+
response = self.http_node.execute(
|
382
382
|
url=url, method="POST", json_data=payload, headers=headers
|
383
383
|
)
|
384
384
|
|
@@ -24,7 +24,7 @@ from kailash.workflow.builder import WorkflowBuilder
|
|
24
24
|
|
25
25
|
# Import existing Kailash MCP components
|
26
26
|
try:
|
27
|
-
from kailash.mcp_server import MCPServer
|
27
|
+
from kailash.mcp_server import MCPServer
|
28
28
|
from kailash.mcp_server.utils import CacheManager, ConfigManager, MetricsCollector
|
29
29
|
|
30
30
|
_KAILASH_MCP_AVAILABLE = True
|
kailash/nodes/__init__.py
CHANGED
kailash/nodes/admin/audit_log.py
CHANGED
@@ -169,7 +169,7 @@ class EnterpriseAuditLogNode(Node):
|
|
169
169
|
... "ip_address": "192.168.1.100"
|
170
170
|
... }
|
171
171
|
... )
|
172
|
-
>>> result = node.
|
172
|
+
>>> result = node.execute()
|
173
173
|
>>> event_id = result["event"]["event_id"]
|
174
174
|
|
175
175
|
>>> # Query security events
|
@@ -184,7 +184,7 @@ class EnterpriseAuditLogNode(Node):
|
|
184
184
|
... },
|
185
185
|
... pagination={"page": 1, "size": 50}
|
186
186
|
... )
|
187
|
-
>>> result = node.
|
187
|
+
>>> result = node.execute()
|
188
188
|
>>> events = result["events"]
|
189
189
|
|
190
190
|
>>> # Generate compliance report
|
@@ -196,7 +196,7 @@ class EnterpriseAuditLogNode(Node):
|
|
196
196
|
... },
|
197
197
|
... export_format="json"
|
198
198
|
... )
|
199
|
-
>>> result = node.
|
199
|
+
>>> result = node.execute()
|
200
200
|
>>> report = result["report"]
|
201
201
|
"""
|
202
202
|
|
@@ -436,7 +436,7 @@ class EnterpriseAuditLogNode(Node):
|
|
436
436
|
}
|
437
437
|
)
|
438
438
|
|
439
|
-
db_result = self._db_node.
|
439
|
+
db_result = self._db_node.execute()
|
440
440
|
|
441
441
|
return {
|
442
442
|
"result": {
|
@@ -571,14 +571,14 @@ class EnterpriseAuditLogNode(Node):
|
|
571
571
|
self._db_node.config.update(
|
572
572
|
{"query": count_query, "params": params, "fetch_mode": "one"}
|
573
573
|
)
|
574
|
-
count_result = self._db_node.
|
574
|
+
count_result = self._db_node.execute()
|
575
575
|
total_count = count_result["result"]["data"]["total"]
|
576
576
|
|
577
577
|
# Execute data query
|
578
578
|
self._db_node.config.update(
|
579
579
|
{"query": data_query, "params": params, "fetch_mode": "all"}
|
580
580
|
)
|
581
|
-
data_result = self._db_node.
|
581
|
+
data_result = self._db_node.execute()
|
582
582
|
logs = data_result["result"]["data"]
|
583
583
|
|
584
584
|
# Calculate pagination info
|
@@ -165,7 +165,7 @@ class PermissionCheckNode(Node):
|
|
165
165
|
... cache_ttl=300,
|
166
166
|
... explain=True
|
167
167
|
... )
|
168
|
-
>>> result = node.
|
168
|
+
>>> result = node.execute()
|
169
169
|
>>> allowed = result["check"]["allowed"]
|
170
170
|
>>> explanation = result["explanation"]
|
171
171
|
|
@@ -177,7 +177,7 @@ class PermissionCheckNode(Node):
|
|
177
177
|
... permissions=["read", "write", "delete"],
|
178
178
|
... cache_level="full"
|
179
179
|
... )
|
180
|
-
>>> result = node.
|
180
|
+
>>> result = node.execute()
|
181
181
|
>>> results = result["batch_results"]
|
182
182
|
|
183
183
|
>>> # Bulk user permission check
|
@@ -187,7 +187,7 @@ class PermissionCheckNode(Node):
|
|
187
187
|
... resource_id="workflow_execute",
|
188
188
|
... permission="execute"
|
189
189
|
... )
|
190
|
-
>>> result = node.
|
190
|
+
>>> result = node.execute()
|
191
191
|
>>> access_matrix = result["access_matrix"]
|
192
192
|
"""
|
193
193
|
|
@@ -738,7 +738,7 @@ class PermissionCheckNode(Node):
|
|
738
738
|
|
739
739
|
try:
|
740
740
|
# Get user data - strict tenant check
|
741
|
-
user_result = self._db_node.
|
741
|
+
user_result = self._db_node.execute(
|
742
742
|
query=user_query, parameters=[user_id, tenant_id], result_format="dict"
|
743
743
|
)
|
744
744
|
|
@@ -758,7 +758,7 @@ class PermissionCheckNode(Node):
|
|
758
758
|
return None
|
759
759
|
|
760
760
|
# Get assigned roles - also with strict tenant isolation
|
761
|
-
roles_result = self._db_node.
|
761
|
+
roles_result = self._db_node.execute(
|
762
762
|
query=roles_query, parameters=[user_id, tenant_id], result_format="dict"
|
763
763
|
)
|
764
764
|
|
@@ -888,7 +888,7 @@ class PermissionCheckNode(Node):
|
|
888
888
|
"""
|
889
889
|
|
890
890
|
try:
|
891
|
-
result = self._db_node.
|
891
|
+
result = self._db_node.execute(
|
892
892
|
query=query,
|
893
893
|
parameters=[role_id, tenant_id, tenant_id, tenant_id],
|
894
894
|
result_format="dict",
|
@@ -1246,7 +1246,7 @@ class PermissionCheckNode(Node):
|
|
1246
1246
|
"""
|
1247
1247
|
|
1248
1248
|
try:
|
1249
|
-
result = self._db_node.
|
1249
|
+
result = self._db_node.execute(
|
1250
1250
|
query=query, parameters=[role_id, tenant_id], result_format="dict"
|
1251
1251
|
)
|
1252
1252
|
role_rows = result.get("data", [])
|
@@ -1709,7 +1709,7 @@ class PermissionCheckNode(Node):
|
|
1709
1709
|
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
1710
1710
|
"""
|
1711
1711
|
|
1712
|
-
self._db_node.
|
1712
|
+
self._db_node.execute(
|
1713
1713
|
query=audit_query,
|
1714
1714
|
parameters=[
|
1715
1715
|
user_id,
|