kailash 0.6.6__py3-none-any.whl → 0.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +35 -5
- kailash/access_control.py +64 -46
- kailash/adapters/__init__.py +5 -0
- kailash/adapters/mcp_platform_adapter.py +273 -0
- kailash/api/workflow_api.py +34 -3
- kailash/channels/__init__.py +21 -0
- kailash/channels/api_channel.py +409 -0
- kailash/channels/base.py +271 -0
- kailash/channels/cli_channel.py +661 -0
- kailash/channels/event_router.py +496 -0
- kailash/channels/mcp_channel.py +648 -0
- kailash/channels/session.py +423 -0
- kailash/mcp_server/discovery.py +57 -18
- kailash/middleware/communication/api_gateway.py +23 -3
- kailash/middleware/communication/realtime.py +83 -0
- kailash/middleware/core/agent_ui.py +1 -1
- kailash/middleware/gateway/storage_backends.py +393 -0
- kailash/middleware/mcp/enhanced_server.py +22 -16
- kailash/nexus/__init__.py +21 -0
- kailash/nexus/cli/__init__.py +5 -0
- kailash/nexus/cli/__main__.py +6 -0
- kailash/nexus/cli/main.py +176 -0
- kailash/nexus/factory.py +413 -0
- kailash/nexus/gateway.py +545 -0
- kailash/nodes/__init__.py +8 -5
- kailash/nodes/ai/iterative_llm_agent.py +988 -17
- kailash/nodes/ai/llm_agent.py +29 -9
- kailash/nodes/api/__init__.py +2 -2
- kailash/nodes/api/monitoring.py +1 -1
- kailash/nodes/base.py +29 -5
- kailash/nodes/base_async.py +54 -14
- kailash/nodes/code/async_python.py +1 -1
- kailash/nodes/code/python.py +50 -6
- kailash/nodes/data/async_sql.py +90 -0
- kailash/nodes/data/bulk_operations.py +939 -0
- kailash/nodes/data/query_builder.py +373 -0
- kailash/nodes/data/query_cache.py +512 -0
- kailash/nodes/monitoring/__init__.py +10 -0
- kailash/nodes/monitoring/deadlock_detector.py +964 -0
- kailash/nodes/monitoring/performance_anomaly.py +1078 -0
- kailash/nodes/monitoring/race_condition_detector.py +1151 -0
- kailash/nodes/monitoring/transaction_metrics.py +790 -0
- kailash/nodes/monitoring/transaction_monitor.py +931 -0
- kailash/nodes/security/behavior_analysis.py +414 -0
- kailash/nodes/system/__init__.py +17 -0
- kailash/nodes/system/command_parser.py +820 -0
- kailash/nodes/transaction/__init__.py +48 -0
- kailash/nodes/transaction/distributed_transaction_manager.py +983 -0
- kailash/nodes/transaction/saga_coordinator.py +652 -0
- kailash/nodes/transaction/saga_state_storage.py +411 -0
- kailash/nodes/transaction/saga_step.py +467 -0
- kailash/nodes/transaction/transaction_context.py +756 -0
- kailash/nodes/transaction/two_phase_commit.py +978 -0
- kailash/nodes/transform/processors.py +17 -1
- kailash/nodes/validation/__init__.py +21 -0
- kailash/nodes/validation/test_executor.py +532 -0
- kailash/nodes/validation/validation_nodes.py +447 -0
- kailash/resources/factory.py +1 -1
- kailash/runtime/access_controlled.py +9 -7
- kailash/runtime/async_local.py +84 -21
- kailash/runtime/local.py +21 -2
- kailash/runtime/parameter_injector.py +187 -31
- kailash/runtime/runner.py +6 -4
- kailash/runtime/testing.py +1 -1
- kailash/security.py +22 -3
- kailash/servers/__init__.py +32 -0
- kailash/servers/durable_workflow_server.py +430 -0
- kailash/servers/enterprise_workflow_server.py +522 -0
- kailash/servers/gateway.py +183 -0
- kailash/servers/workflow_server.py +293 -0
- kailash/utils/data_validation.py +192 -0
- kailash/workflow/builder.py +382 -15
- kailash/workflow/cyclic_runner.py +102 -10
- kailash/workflow/validation.py +144 -8
- kailash/workflow/visualization.py +99 -27
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/METADATA +3 -2
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/RECORD +81 -40
- kailash/workflow/builder_improvements.py +0 -207
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/WHEEL +0 -0
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/entry_points.txt +0 -0
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,512 @@
|
|
1
|
+
"""Query Result Caching for Database Nodes.
|
2
|
+
|
3
|
+
This module provides Redis-based caching for database query results
|
4
|
+
with TTL management and cache invalidation strategies.
|
5
|
+
|
6
|
+
Key Features:
|
7
|
+
- Redis-based query result caching
|
8
|
+
- Cache key generation from queries
|
9
|
+
- TTL management
|
10
|
+
- Cache invalidation strategies
|
11
|
+
- Support for different cache patterns
|
12
|
+
"""
|
13
|
+
|
14
|
+
import hashlib
|
15
|
+
import json
|
16
|
+
import logging
|
17
|
+
from datetime import datetime, timedelta
|
18
|
+
from enum import Enum
|
19
|
+
from typing import Any, Dict, List, Optional, Union
|
20
|
+
|
21
|
+
import redis
|
22
|
+
from redis.exceptions import RedisError
|
23
|
+
|
24
|
+
from kailash.sdk_exceptions import NodeExecutionError
|
25
|
+
|
26
|
+
logger = logging.getLogger(__name__)
|
27
|
+
|
28
|
+
|
29
|
+
class CachePattern(Enum):
|
30
|
+
"""Cache patterns for different use cases."""
|
31
|
+
|
32
|
+
WRITE_THROUGH = "write_through" # Update cache on write
|
33
|
+
WRITE_BEHIND = "write_behind" # Async cache updates
|
34
|
+
CACHE_ASIDE = "cache_aside" # Manual cache management
|
35
|
+
REFRESH_AHEAD = "refresh_ahead" # Proactive cache refresh
|
36
|
+
|
37
|
+
|
38
|
+
class CacheInvalidationStrategy(Enum):
|
39
|
+
"""Cache invalidation strategies."""
|
40
|
+
|
41
|
+
TTL = "ttl" # Time-based expiration
|
42
|
+
MANUAL = "manual" # Manual invalidation
|
43
|
+
WRITE_THROUGH = "write_through" # Invalidate on write
|
44
|
+
PATTERN_BASED = "pattern_based" # Invalidate by pattern
|
45
|
+
EVENT_BASED = "event_based" # Invalidate on events
|
46
|
+
|
47
|
+
|
48
|
+
class QueryCacheKey:
|
49
|
+
"""Generates cache keys for database queries."""
|
50
|
+
|
51
|
+
def __init__(self, prefix: str = "kailash:query"):
|
52
|
+
"""Initialize cache key generator.
|
53
|
+
|
54
|
+
Args:
|
55
|
+
prefix: Prefix for cache keys
|
56
|
+
"""
|
57
|
+
self.prefix = prefix
|
58
|
+
|
59
|
+
def generate(
|
60
|
+
self, query: str, parameters: List[Any], tenant_id: Optional[str] = None
|
61
|
+
) -> str:
|
62
|
+
"""Generate cache key for a query.
|
63
|
+
|
64
|
+
Args:
|
65
|
+
query: SQL query string
|
66
|
+
parameters: Query parameters
|
67
|
+
tenant_id: Optional tenant ID for multi-tenant caching
|
68
|
+
|
69
|
+
Returns:
|
70
|
+
Cache key string
|
71
|
+
"""
|
72
|
+
# Create a consistent representation of the query and parameters
|
73
|
+
query_data = {
|
74
|
+
"query": query.strip(),
|
75
|
+
"parameters": self._normalize_parameters(parameters),
|
76
|
+
"tenant_id": tenant_id,
|
77
|
+
}
|
78
|
+
|
79
|
+
# Create hash of the query data
|
80
|
+
query_json = json.dumps(query_data, sort_keys=True)
|
81
|
+
query_hash = hashlib.sha256(query_json.encode()).hexdigest()[:16]
|
82
|
+
|
83
|
+
# Build cache key
|
84
|
+
key_parts = [self.prefix]
|
85
|
+
if tenant_id:
|
86
|
+
key_parts.append(f"tenant:{tenant_id}")
|
87
|
+
key_parts.append(query_hash)
|
88
|
+
|
89
|
+
return ":".join(key_parts)
|
90
|
+
|
91
|
+
def generate_pattern(self, table_name: str, tenant_id: Optional[str] = None) -> str:
|
92
|
+
"""Generate cache key pattern for invalidation.
|
93
|
+
|
94
|
+
Args:
|
95
|
+
table_name: Database table name
|
96
|
+
tenant_id: Optional tenant ID
|
97
|
+
|
98
|
+
Returns:
|
99
|
+
Cache key pattern
|
100
|
+
"""
|
101
|
+
pattern_parts = [self.prefix]
|
102
|
+
if tenant_id:
|
103
|
+
pattern_parts.append(f"tenant:{tenant_id}")
|
104
|
+
pattern_parts.append(f"table:{table_name}")
|
105
|
+
pattern_parts.append("*")
|
106
|
+
|
107
|
+
return ":".join(pattern_parts)
|
108
|
+
|
109
|
+
def _normalize_parameters(self, parameters: List[Any]) -> List[Any]:
|
110
|
+
"""Normalize parameters for consistent hashing."""
|
111
|
+
normalized = []
|
112
|
+
for param in parameters:
|
113
|
+
if isinstance(param, datetime):
|
114
|
+
normalized.append(param.isoformat())
|
115
|
+
elif isinstance(param, (dict, list)):
|
116
|
+
normalized.append(json.dumps(param, sort_keys=True))
|
117
|
+
else:
|
118
|
+
normalized.append(param)
|
119
|
+
return normalized
|
120
|
+
|
121
|
+
|
122
|
+
class QueryCache:
|
123
|
+
"""Redis-based query result cache."""
|
124
|
+
|
125
|
+
def __init__(
|
126
|
+
self,
|
127
|
+
redis_host: str = "localhost",
|
128
|
+
redis_port: int = 6379,
|
129
|
+
redis_db: int = 0,
|
130
|
+
redis_password: Optional[str] = None,
|
131
|
+
default_ttl: int = 3600,
|
132
|
+
cache_pattern: CachePattern = CachePattern.CACHE_ASIDE,
|
133
|
+
invalidation_strategy: CacheInvalidationStrategy = CacheInvalidationStrategy.TTL,
|
134
|
+
key_prefix: str = "kailash:query",
|
135
|
+
):
|
136
|
+
"""Initialize query cache.
|
137
|
+
|
138
|
+
Args:
|
139
|
+
redis_host: Redis server host
|
140
|
+
redis_port: Redis server port
|
141
|
+
redis_db: Redis database number
|
142
|
+
redis_password: Redis password (optional)
|
143
|
+
default_ttl: Default TTL in seconds
|
144
|
+
cache_pattern: Cache pattern to use
|
145
|
+
invalidation_strategy: Cache invalidation strategy
|
146
|
+
key_prefix: Prefix for cache keys
|
147
|
+
"""
|
148
|
+
self.redis_host = redis_host
|
149
|
+
self.redis_port = redis_port
|
150
|
+
self.redis_db = redis_db
|
151
|
+
self.redis_password = redis_password
|
152
|
+
self.default_ttl = default_ttl
|
153
|
+
self.cache_pattern = cache_pattern
|
154
|
+
self.invalidation_strategy = invalidation_strategy
|
155
|
+
|
156
|
+
self.key_generator = QueryCacheKey(key_prefix)
|
157
|
+
self._redis: Optional[redis.Redis] = None
|
158
|
+
|
159
|
+
def _get_redis(self) -> redis.Redis:
|
160
|
+
"""Get Redis connection."""
|
161
|
+
if self._redis is None:
|
162
|
+
self._redis = redis.Redis(
|
163
|
+
host=self.redis_host,
|
164
|
+
port=self.redis_port,
|
165
|
+
db=self.redis_db,
|
166
|
+
password=self.redis_password,
|
167
|
+
decode_responses=True,
|
168
|
+
)
|
169
|
+
return self._redis
|
170
|
+
|
171
|
+
def get(
|
172
|
+
self, query: str, parameters: List[Any], tenant_id: Optional[str] = None
|
173
|
+
) -> Optional[Dict[str, Any]]:
|
174
|
+
"""Get cached query result.
|
175
|
+
|
176
|
+
Args:
|
177
|
+
query: SQL query string
|
178
|
+
parameters: Query parameters
|
179
|
+
tenant_id: Optional tenant ID
|
180
|
+
|
181
|
+
Returns:
|
182
|
+
Cached result or None if not found
|
183
|
+
"""
|
184
|
+
try:
|
185
|
+
redis_client = self._get_redis()
|
186
|
+
cache_key = self.key_generator.generate(query, parameters, tenant_id)
|
187
|
+
|
188
|
+
cached_data = redis_client.get(cache_key)
|
189
|
+
if cached_data:
|
190
|
+
logger.debug(f"Cache hit for key: {cache_key}")
|
191
|
+
return json.loads(cached_data)
|
192
|
+
else:
|
193
|
+
logger.debug(f"Cache miss for key: {cache_key}")
|
194
|
+
return None
|
195
|
+
|
196
|
+
except (RedisError, json.JSONDecodeError) as e:
|
197
|
+
logger.warning(f"Cache get error: {e}")
|
198
|
+
return None
|
199
|
+
|
200
|
+
def set(
|
201
|
+
self,
|
202
|
+
query: str,
|
203
|
+
parameters: List[Any],
|
204
|
+
result: Dict[str, Any],
|
205
|
+
tenant_id: Optional[str] = None,
|
206
|
+
ttl: Optional[int] = None,
|
207
|
+
) -> bool:
|
208
|
+
"""Set cached query result.
|
209
|
+
|
210
|
+
Args:
|
211
|
+
query: SQL query string
|
212
|
+
parameters: Query parameters
|
213
|
+
result: Query result to cache
|
214
|
+
tenant_id: Optional tenant ID
|
215
|
+
ttl: TTL in seconds (uses default if not specified)
|
216
|
+
|
217
|
+
Returns:
|
218
|
+
True if cached successfully, False otherwise
|
219
|
+
"""
|
220
|
+
try:
|
221
|
+
redis_client = self._get_redis()
|
222
|
+
cache_key = self.key_generator.generate(query, parameters, tenant_id)
|
223
|
+
|
224
|
+
# Prepare cache data
|
225
|
+
cache_data = {
|
226
|
+
"result": result,
|
227
|
+
"cached_at": datetime.now().isoformat(),
|
228
|
+
"query_hash": hashlib.sha256(query.encode()).hexdigest()[:8],
|
229
|
+
}
|
230
|
+
|
231
|
+
# Set with TTL
|
232
|
+
actual_ttl = ttl or self.default_ttl
|
233
|
+
success = redis_client.setex(cache_key, actual_ttl, json.dumps(cache_data))
|
234
|
+
|
235
|
+
if success:
|
236
|
+
logger.debug(f"Cache set for key: {cache_key} (TTL: {actual_ttl}s)")
|
237
|
+
|
238
|
+
# Add to table-based index for pattern invalidation
|
239
|
+
if (
|
240
|
+
self.invalidation_strategy
|
241
|
+
== CacheInvalidationStrategy.PATTERN_BASED
|
242
|
+
):
|
243
|
+
self._add_to_table_index(query, cache_key, tenant_id)
|
244
|
+
|
245
|
+
return success
|
246
|
+
|
247
|
+
except (RedisError, TypeError, ValueError) as e:
|
248
|
+
logger.warning(f"Cache set error: {e}")
|
249
|
+
return False
|
250
|
+
|
251
|
+
def invalidate(
|
252
|
+
self, query: str, parameters: List[Any], tenant_id: Optional[str] = None
|
253
|
+
) -> bool:
|
254
|
+
"""Invalidate specific cached query.
|
255
|
+
|
256
|
+
Args:
|
257
|
+
query: SQL query string
|
258
|
+
parameters: Query parameters
|
259
|
+
tenant_id: Optional tenant ID
|
260
|
+
|
261
|
+
Returns:
|
262
|
+
True if invalidated successfully, False otherwise
|
263
|
+
"""
|
264
|
+
try:
|
265
|
+
redis_client = self._get_redis()
|
266
|
+
cache_key = self.key_generator.generate(query, parameters, tenant_id)
|
267
|
+
|
268
|
+
deleted = redis_client.delete(cache_key)
|
269
|
+
if deleted:
|
270
|
+
logger.debug(f"Cache invalidated for key: {cache_key}")
|
271
|
+
|
272
|
+
return deleted > 0
|
273
|
+
|
274
|
+
except RedisError as e:
|
275
|
+
logger.warning(f"Cache invalidation error: {e}")
|
276
|
+
return False
|
277
|
+
|
278
|
+
def invalidate_table(self, table_name: str, tenant_id: Optional[str] = None) -> int:
|
279
|
+
"""Invalidate all cached queries for a table.
|
280
|
+
|
281
|
+
Args:
|
282
|
+
table_name: Database table name
|
283
|
+
tenant_id: Optional tenant ID
|
284
|
+
|
285
|
+
Returns:
|
286
|
+
Number of keys invalidated
|
287
|
+
"""
|
288
|
+
try:
|
289
|
+
redis_client = self._get_redis()
|
290
|
+
|
291
|
+
if self.invalidation_strategy == CacheInvalidationStrategy.PATTERN_BASED:
|
292
|
+
# Use index-based invalidation for pattern-based strategy
|
293
|
+
index_key = f"{self.key_generator.prefix}:index:table:{table_name}"
|
294
|
+
if tenant_id:
|
295
|
+
index_key += f":tenant:{tenant_id}"
|
296
|
+
|
297
|
+
keys = redis_client.smembers(index_key)
|
298
|
+
if keys:
|
299
|
+
# Delete the actual cache keys
|
300
|
+
deleted = redis_client.delete(*keys)
|
301
|
+
# Also delete the index
|
302
|
+
redis_client.delete(index_key)
|
303
|
+
logger.debug(
|
304
|
+
f"Cache invalidated {deleted} keys for table: {table_name}"
|
305
|
+
)
|
306
|
+
return deleted
|
307
|
+
else:
|
308
|
+
return 0
|
309
|
+
else:
|
310
|
+
# Use pattern-based invalidation for other strategies
|
311
|
+
pattern = self.key_generator.generate_pattern(table_name, tenant_id)
|
312
|
+
keys = redis_client.keys(pattern)
|
313
|
+
if keys:
|
314
|
+
deleted = redis_client.delete(*keys)
|
315
|
+
logger.debug(
|
316
|
+
f"Cache invalidated {deleted} keys for table: {table_name}"
|
317
|
+
)
|
318
|
+
return deleted
|
319
|
+
else:
|
320
|
+
return 0
|
321
|
+
|
322
|
+
except RedisError as e:
|
323
|
+
logger.warning(f"Cache table invalidation error: {e}")
|
324
|
+
return 0
|
325
|
+
|
326
|
+
def clear_all(self, tenant_id: Optional[str] = None) -> int:
|
327
|
+
"""Clear all cached queries for a tenant or globally.
|
328
|
+
|
329
|
+
Args:
|
330
|
+
tenant_id: Optional tenant ID (clears all if None)
|
331
|
+
|
332
|
+
Returns:
|
333
|
+
Number of keys cleared
|
334
|
+
"""
|
335
|
+
try:
|
336
|
+
redis_client = self._get_redis()
|
337
|
+
|
338
|
+
if tenant_id:
|
339
|
+
pattern = f"{self.key_generator.prefix}:tenant:{tenant_id}:*"
|
340
|
+
else:
|
341
|
+
pattern = f"{self.key_generator.prefix}:*"
|
342
|
+
|
343
|
+
keys = redis_client.keys(pattern)
|
344
|
+
if keys:
|
345
|
+
deleted = redis_client.delete(*keys)
|
346
|
+
logger.info(f"Cache cleared {deleted} keys for tenant: {tenant_id}")
|
347
|
+
return deleted
|
348
|
+
else:
|
349
|
+
return 0
|
350
|
+
|
351
|
+
except RedisError as e:
|
352
|
+
logger.warning(f"Cache clear error: {e}")
|
353
|
+
return 0
|
354
|
+
|
355
|
+
def get_stats(self) -> Dict[str, Any]:
|
356
|
+
"""Get cache statistics.
|
357
|
+
|
358
|
+
Returns:
|
359
|
+
Dictionary with cache statistics
|
360
|
+
"""
|
361
|
+
try:
|
362
|
+
redis_client = self._get_redis()
|
363
|
+
|
364
|
+
# Get Redis info
|
365
|
+
info = redis_client.info()
|
366
|
+
|
367
|
+
# Count our keys
|
368
|
+
pattern = f"{self.key_generator.prefix}:*"
|
369
|
+
keys = redis_client.keys(pattern)
|
370
|
+
|
371
|
+
stats = {
|
372
|
+
"total_keys": len(keys),
|
373
|
+
"redis_memory_used": info.get("used_memory_human", "unknown"),
|
374
|
+
"redis_connected_clients": info.get("connected_clients", 0),
|
375
|
+
"redis_keyspace_hits": info.get("keyspace_hits", 0),
|
376
|
+
"redis_keyspace_misses": info.get("keyspace_misses", 0),
|
377
|
+
"cache_pattern": self.cache_pattern.value,
|
378
|
+
"invalidation_strategy": self.invalidation_strategy.value,
|
379
|
+
"default_ttl": self.default_ttl,
|
380
|
+
}
|
381
|
+
|
382
|
+
# Calculate hit rate
|
383
|
+
hits = stats["redis_keyspace_hits"]
|
384
|
+
misses = stats["redis_keyspace_misses"]
|
385
|
+
if hits + misses > 0:
|
386
|
+
stats["hit_rate"] = hits / (hits + misses)
|
387
|
+
else:
|
388
|
+
stats["hit_rate"] = 0.0
|
389
|
+
|
390
|
+
return stats
|
391
|
+
|
392
|
+
except RedisError as e:
|
393
|
+
logger.warning(f"Cache stats error: {e}")
|
394
|
+
return {"error": str(e), "total_keys": 0, "hit_rate": 0.0}
|
395
|
+
|
396
|
+
def health_check(self) -> Dict[str, Any]:
|
397
|
+
"""Check cache health.
|
398
|
+
|
399
|
+
Returns:
|
400
|
+
Dictionary with health status
|
401
|
+
"""
|
402
|
+
try:
|
403
|
+
redis_client = self._get_redis()
|
404
|
+
|
405
|
+
# Try to ping Redis
|
406
|
+
pong = redis_client.ping()
|
407
|
+
if pong:
|
408
|
+
# Test basic operations
|
409
|
+
test_key = f"{self.key_generator.prefix}:health_check"
|
410
|
+
redis_client.setex(test_key, 10, "test")
|
411
|
+
test_value = redis_client.get(test_key)
|
412
|
+
redis_client.delete(test_key)
|
413
|
+
|
414
|
+
return {
|
415
|
+
"status": "healthy",
|
416
|
+
"redis_ping": True,
|
417
|
+
"read_write_test": test_value == "test",
|
418
|
+
"connection": "active",
|
419
|
+
}
|
420
|
+
else:
|
421
|
+
return {
|
422
|
+
"status": "unhealthy",
|
423
|
+
"redis_ping": False,
|
424
|
+
"error": "Redis ping failed",
|
425
|
+
}
|
426
|
+
|
427
|
+
except RedisError as e:
|
428
|
+
return {"status": "unhealthy", "redis_ping": False, "error": str(e)}
|
429
|
+
|
430
|
+
def _add_to_table_index(
|
431
|
+
self, query: str, cache_key: str, tenant_id: Optional[str] = None
|
432
|
+
) -> None:
|
433
|
+
"""Add cache key to table-based index for pattern invalidation."""
|
434
|
+
try:
|
435
|
+
# Extract table name from query (simple heuristic)
|
436
|
+
table_name = self._extract_table_name(query)
|
437
|
+
if table_name:
|
438
|
+
redis_client = self._get_redis()
|
439
|
+
index_key = f"{self.key_generator.prefix}:index:table:{table_name}"
|
440
|
+
if tenant_id:
|
441
|
+
index_key += f":tenant:{tenant_id}"
|
442
|
+
|
443
|
+
redis_client.sadd(index_key, cache_key)
|
444
|
+
redis_client.expire(
|
445
|
+
index_key, self.default_ttl * 2
|
446
|
+
) # Index lives longer
|
447
|
+
|
448
|
+
except Exception as e:
|
449
|
+
logger.warning(f"Failed to add to table index: {e}")
|
450
|
+
|
451
|
+
def _extract_table_name(self, query: str) -> Optional[str]:
|
452
|
+
"""Extract table name from SQL query (simple heuristic)."""
|
453
|
+
try:
|
454
|
+
query_lower = query.lower().strip()
|
455
|
+
|
456
|
+
# Handle SELECT queries
|
457
|
+
if query_lower.startswith("select"):
|
458
|
+
from_index = query_lower.find("from")
|
459
|
+
if from_index != -1:
|
460
|
+
from_part = query_lower[from_index + 4 :].strip()
|
461
|
+
table_name = from_part.split()[0].strip()
|
462
|
+
return table_name
|
463
|
+
|
464
|
+
# Handle INSERT queries
|
465
|
+
elif query_lower.startswith("insert into"):
|
466
|
+
into_part = query_lower[11:].strip()
|
467
|
+
table_name = into_part.split()[0].strip()
|
468
|
+
return table_name
|
469
|
+
|
470
|
+
# Handle UPDATE queries
|
471
|
+
elif query_lower.startswith("update"):
|
472
|
+
update_part = query_lower[6:].strip()
|
473
|
+
table_name = update_part.split()[0].strip()
|
474
|
+
return table_name
|
475
|
+
|
476
|
+
# Handle DELETE queries
|
477
|
+
elif query_lower.startswith("delete from"):
|
478
|
+
from_part = query_lower[11:].strip()
|
479
|
+
table_name = from_part.split()[0].strip()
|
480
|
+
return table_name
|
481
|
+
|
482
|
+
return None
|
483
|
+
|
484
|
+
except Exception:
|
485
|
+
return None
|
486
|
+
|
487
|
+
|
488
|
+
# Factory function for creating query cache
|
489
|
+
def create_query_cache(config: Dict[str, Any] = None) -> QueryCache:
|
490
|
+
"""Create a query cache instance with configuration.
|
491
|
+
|
492
|
+
Args:
|
493
|
+
config: Configuration dictionary
|
494
|
+
|
495
|
+
Returns:
|
496
|
+
QueryCache instance
|
497
|
+
"""
|
498
|
+
if config is None:
|
499
|
+
config = {}
|
500
|
+
|
501
|
+
return QueryCache(
|
502
|
+
redis_host=config.get("redis_host", "localhost"),
|
503
|
+
redis_port=config.get("redis_port", 6379),
|
504
|
+
redis_db=config.get("redis_db", 0),
|
505
|
+
redis_password=config.get("redis_password"),
|
506
|
+
default_ttl=config.get("default_ttl", 3600),
|
507
|
+
cache_pattern=CachePattern(config.get("cache_pattern", "cache_aside")),
|
508
|
+
invalidation_strategy=CacheInvalidationStrategy(
|
509
|
+
config.get("invalidation_strategy", "ttl")
|
510
|
+
),
|
511
|
+
key_prefix=config.get("key_prefix", "kailash:query"),
|
512
|
+
)
|
@@ -1,15 +1,25 @@
|
|
1
1
|
"""Monitoring nodes for connection and workflow visualization."""
|
2
2
|
|
3
3
|
from .connection_dashboard import ConnectionDashboardNode
|
4
|
+
from .deadlock_detector import DeadlockDetectorNode
|
4
5
|
from .health_check import HealthCheckNode
|
5
6
|
from .log_processor import LogProcessorNode
|
6
7
|
from .metrics_collector import MetricsCollectorNode
|
8
|
+
from .performance_anomaly import PerformanceAnomalyNode
|
7
9
|
from .performance_benchmark import PerformanceBenchmarkNode
|
10
|
+
from .race_condition_detector import RaceConditionDetectorNode
|
11
|
+
from .transaction_metrics import TransactionMetricsNode
|
12
|
+
from .transaction_monitor import TransactionMonitorNode
|
8
13
|
|
9
14
|
__all__ = [
|
10
15
|
"ConnectionDashboardNode",
|
16
|
+
"DeadlockDetectorNode",
|
11
17
|
"HealthCheckNode",
|
12
18
|
"LogProcessorNode",
|
13
19
|
"MetricsCollectorNode",
|
20
|
+
"PerformanceAnomalyNode",
|
14
21
|
"PerformanceBenchmarkNode",
|
22
|
+
"RaceConditionDetectorNode",
|
23
|
+
"TransactionMetricsNode",
|
24
|
+
"TransactionMonitorNode",
|
15
25
|
]
|