kailash 0.6.2__py3-none-any.whl → 0.6.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (131) hide show
  1. kailash/__init__.py +3 -3
  2. kailash/api/custom_nodes_secure.py +3 -3
  3. kailash/api/gateway.py +1 -1
  4. kailash/api/studio.py +2 -3
  5. kailash/api/workflow_api.py +3 -4
  6. kailash/core/resilience/bulkhead.py +460 -0
  7. kailash/core/resilience/circuit_breaker.py +92 -10
  8. kailash/edge/discovery.py +86 -0
  9. kailash/mcp_server/__init__.py +334 -0
  10. kailash/mcp_server/advanced_features.py +1022 -0
  11. kailash/{mcp → mcp_server}/ai_registry_server.py +29 -4
  12. kailash/mcp_server/auth.py +789 -0
  13. kailash/mcp_server/client.py +712 -0
  14. kailash/mcp_server/discovery.py +1593 -0
  15. kailash/mcp_server/errors.py +673 -0
  16. kailash/mcp_server/oauth.py +1727 -0
  17. kailash/mcp_server/protocol.py +1126 -0
  18. kailash/mcp_server/registry_integration.py +587 -0
  19. kailash/mcp_server/server.py +1747 -0
  20. kailash/{mcp → mcp_server}/servers/ai_registry.py +2 -2
  21. kailash/mcp_server/transports.py +1169 -0
  22. kailash/mcp_server/utils/cache.py +510 -0
  23. kailash/middleware/auth/auth_manager.py +3 -3
  24. kailash/middleware/communication/api_gateway.py +2 -9
  25. kailash/middleware/communication/realtime.py +1 -1
  26. kailash/middleware/mcp/client_integration.py +1 -1
  27. kailash/middleware/mcp/enhanced_server.py +2 -2
  28. kailash/nodes/__init__.py +2 -0
  29. kailash/nodes/admin/audit_log.py +6 -6
  30. kailash/nodes/admin/permission_check.py +8 -8
  31. kailash/nodes/admin/role_management.py +32 -28
  32. kailash/nodes/admin/schema.sql +6 -1
  33. kailash/nodes/admin/schema_manager.py +13 -13
  34. kailash/nodes/admin/security_event.py +16 -20
  35. kailash/nodes/admin/tenant_isolation.py +3 -3
  36. kailash/nodes/admin/transaction_utils.py +3 -3
  37. kailash/nodes/admin/user_management.py +21 -22
  38. kailash/nodes/ai/a2a.py +11 -11
  39. kailash/nodes/ai/ai_providers.py +9 -12
  40. kailash/nodes/ai/embedding_generator.py +13 -14
  41. kailash/nodes/ai/intelligent_agent_orchestrator.py +19 -19
  42. kailash/nodes/ai/iterative_llm_agent.py +3 -3
  43. kailash/nodes/ai/llm_agent.py +213 -36
  44. kailash/nodes/ai/self_organizing.py +2 -2
  45. kailash/nodes/alerts/discord.py +4 -4
  46. kailash/nodes/api/graphql.py +6 -6
  47. kailash/nodes/api/http.py +12 -17
  48. kailash/nodes/api/rate_limiting.py +4 -4
  49. kailash/nodes/api/rest.py +15 -15
  50. kailash/nodes/auth/mfa.py +3 -4
  51. kailash/nodes/auth/risk_assessment.py +2 -2
  52. kailash/nodes/auth/session_management.py +5 -5
  53. kailash/nodes/auth/sso.py +143 -0
  54. kailash/nodes/base.py +6 -2
  55. kailash/nodes/base_async.py +16 -2
  56. kailash/nodes/base_with_acl.py +2 -2
  57. kailash/nodes/cache/__init__.py +9 -0
  58. kailash/nodes/cache/cache.py +1172 -0
  59. kailash/nodes/cache/cache_invalidation.py +870 -0
  60. kailash/nodes/cache/redis_pool_manager.py +595 -0
  61. kailash/nodes/code/async_python.py +2 -1
  62. kailash/nodes/code/python.py +196 -35
  63. kailash/nodes/compliance/data_retention.py +6 -6
  64. kailash/nodes/compliance/gdpr.py +5 -5
  65. kailash/nodes/data/__init__.py +10 -0
  66. kailash/nodes/data/optimistic_locking.py +906 -0
  67. kailash/nodes/data/readers.py +8 -8
  68. kailash/nodes/data/redis.py +349 -0
  69. kailash/nodes/data/sql.py +314 -3
  70. kailash/nodes/data/streaming.py +21 -0
  71. kailash/nodes/enterprise/__init__.py +8 -0
  72. kailash/nodes/enterprise/audit_logger.py +285 -0
  73. kailash/nodes/enterprise/batch_processor.py +22 -3
  74. kailash/nodes/enterprise/data_lineage.py +1 -1
  75. kailash/nodes/enterprise/mcp_executor.py +205 -0
  76. kailash/nodes/enterprise/service_discovery.py +150 -0
  77. kailash/nodes/enterprise/tenant_assignment.py +108 -0
  78. kailash/nodes/logic/async_operations.py +2 -2
  79. kailash/nodes/logic/convergence.py +1 -1
  80. kailash/nodes/logic/operations.py +1 -1
  81. kailash/nodes/monitoring/__init__.py +11 -1
  82. kailash/nodes/monitoring/health_check.py +456 -0
  83. kailash/nodes/monitoring/log_processor.py +817 -0
  84. kailash/nodes/monitoring/metrics_collector.py +627 -0
  85. kailash/nodes/monitoring/performance_benchmark.py +137 -11
  86. kailash/nodes/rag/advanced.py +7 -7
  87. kailash/nodes/rag/agentic.py +49 -2
  88. kailash/nodes/rag/conversational.py +3 -3
  89. kailash/nodes/rag/evaluation.py +3 -3
  90. kailash/nodes/rag/federated.py +3 -3
  91. kailash/nodes/rag/graph.py +3 -3
  92. kailash/nodes/rag/multimodal.py +3 -3
  93. kailash/nodes/rag/optimized.py +5 -5
  94. kailash/nodes/rag/privacy.py +3 -3
  95. kailash/nodes/rag/query_processing.py +6 -6
  96. kailash/nodes/rag/realtime.py +1 -1
  97. kailash/nodes/rag/registry.py +2 -6
  98. kailash/nodes/rag/router.py +1 -1
  99. kailash/nodes/rag/similarity.py +7 -7
  100. kailash/nodes/rag/strategies.py +4 -4
  101. kailash/nodes/security/abac_evaluator.py +6 -6
  102. kailash/nodes/security/behavior_analysis.py +5 -6
  103. kailash/nodes/security/credential_manager.py +1 -1
  104. kailash/nodes/security/rotating_credentials.py +11 -11
  105. kailash/nodes/security/threat_detection.py +8 -8
  106. kailash/nodes/testing/credential_testing.py +2 -2
  107. kailash/nodes/transform/processors.py +5 -5
  108. kailash/runtime/local.py +162 -14
  109. kailash/runtime/parameter_injection.py +425 -0
  110. kailash/runtime/parameter_injector.py +657 -0
  111. kailash/runtime/testing.py +2 -2
  112. kailash/testing/fixtures.py +2 -2
  113. kailash/workflow/builder.py +99 -18
  114. kailash/workflow/builder_improvements.py +207 -0
  115. kailash/workflow/input_handling.py +170 -0
  116. {kailash-0.6.2.dist-info → kailash-0.6.4.dist-info}/METADATA +21 -8
  117. {kailash-0.6.2.dist-info → kailash-0.6.4.dist-info}/RECORD +126 -101
  118. kailash/mcp/__init__.py +0 -53
  119. kailash/mcp/client.py +0 -445
  120. kailash/mcp/server.py +0 -292
  121. kailash/mcp/server_enhanced.py +0 -449
  122. kailash/mcp/utils/cache.py +0 -267
  123. /kailash/{mcp → mcp_server}/client_new.py +0 -0
  124. /kailash/{mcp → mcp_server}/utils/__init__.py +0 -0
  125. /kailash/{mcp → mcp_server}/utils/config.py +0 -0
  126. /kailash/{mcp → mcp_server}/utils/formatters.py +0 -0
  127. /kailash/{mcp → mcp_server}/utils/metrics.py +0 -0
  128. {kailash-0.6.2.dist-info → kailash-0.6.4.dist-info}/WHEEL +0 -0
  129. {kailash-0.6.2.dist-info → kailash-0.6.4.dist-info}/entry_points.txt +0 -0
  130. {kailash-0.6.2.dist-info → kailash-0.6.4.dist-info}/licenses/LICENSE +0 -0
  131. {kailash-0.6.2.dist-info → kailash-0.6.4.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,870 @@
1
+ """Cache invalidation node for intelligent cache management.
2
+
3
+ This module provides advanced cache invalidation strategies including
4
+ pattern-based invalidation, cascade invalidation, event-driven clearing,
5
+ and tag-based invalidation for complex cache hierarchies.
6
+ """
7
+
8
+ import asyncio
9
+ import fnmatch
10
+ import re
11
+ import time
12
+ from datetime import UTC, datetime, timedelta
13
+ from enum import Enum
14
+ from typing import Any, Dict, List, Optional, Set, Union
15
+
16
+ from kailash.nodes.base import NodeParameter, register_node
17
+ from kailash.nodes.base_async import AsyncNode
18
+ from kailash.sdk_exceptions import NodeExecutionError
19
+
20
+ try:
21
+ import redis.asyncio as redis
22
+
23
+ REDIS_AVAILABLE = True
24
+ except ImportError:
25
+ REDIS_AVAILABLE = False
26
+
27
+
28
+ class InvalidationStrategy(Enum):
29
+ """Cache invalidation strategies."""
30
+
31
+ IMMEDIATE = "immediate" # Invalidate immediately
32
+ LAZY = "lazy" # Mark for lazy deletion
33
+ TTL_REFRESH = "ttl_refresh" # Reset TTL
34
+ CASCADE = "cascade" # Cascade to dependent keys
35
+ TAG_BASED = "tag_based" # Invalidate by tags
36
+
37
+
38
+ class InvalidationScope(Enum):
39
+ """Scope of invalidation operation."""
40
+
41
+ SINGLE = "single" # Single key
42
+ PATTERN = "pattern" # Pattern matching
43
+ TAG = "tag" # Tag-based
44
+ DEPENDENCY = "dependency" # Dependency chain
45
+ TIME_BASED = "time_based" # Time-based expiration
46
+
47
+
48
+ class EventType(Enum):
49
+ """Cache invalidation event types."""
50
+
51
+ CREATE = "create"
52
+ UPDATE = "update"
53
+ DELETE = "delete"
54
+ ACCESS = "access"
55
+ EXPIRE = "expire"
56
+
57
+
58
+ @register_node()
59
+ class CacheInvalidationNode(AsyncNode):
60
+ """Node for intelligent cache invalidation and management.
61
+
62
+ This node provides comprehensive cache invalidation capabilities including:
63
+ - Pattern-based invalidation with wildcard and regex support
64
+ - Tag-based cache invalidation for complex hierarchies
65
+ - Cascade invalidation for dependent cache entries
66
+ - Event-driven invalidation based on data changes
67
+ - Time-based invalidation strategies
68
+ - Dependency tracking and management
69
+ - Statistics and monitoring for invalidation operations
70
+ - Support for multiple cache backends (Redis, memory, hybrid)
71
+
72
+ Design Purpose:
73
+ - Maintain cache consistency across complex applications
74
+ - Provide flexible invalidation strategies
75
+ - Support real-time and batch invalidation operations
76
+ - Enable efficient cache management for microservices
77
+
78
+ Examples:
79
+ >>> # Pattern-based invalidation
80
+ >>> invalidator = CacheInvalidationNode()
81
+ >>> result = await invalidator.execute(
82
+ ... strategy="immediate",
83
+ ... scope="pattern",
84
+ ... pattern="user:*:profile",
85
+ ... reason="User profile updated"
86
+ ... )
87
+
88
+ >>> # Tag-based invalidation
89
+ >>> result = await invalidator.execute(
90
+ ... strategy="cascade",
91
+ ... scope="tag",
92
+ ... tags=["user:123", "profile", "session"],
93
+ ... cascade_patterns=["session:*", "cache:user:123:*"]
94
+ ... )
95
+
96
+ >>> # Event-driven invalidation
97
+ >>> result = await invalidator.execute(
98
+ ... strategy="immediate",
99
+ ... scope="dependency",
100
+ ... event_type="update",
101
+ ... source_key="user:123",
102
+ ... dependencies=["user:123:profile", "user:123:preferences"]
103
+ ... )
104
+ """
105
+
106
+ def __init__(self, **kwargs):
107
+ """Initialize the cache invalidation node."""
108
+ super().__init__(**kwargs)
109
+ self._redis_client = None
110
+ self._memory_cache = {}
111
+ self._tag_registry = {} # tag -> set of keys
112
+ self._dependency_graph = {} # key -> set of dependent keys
113
+ self._invalidation_log = []
114
+ self._stats = {
115
+ "invalidations": 0,
116
+ "cascade_invalidations": 0,
117
+ "pattern_matches": 0,
118
+ "tag_matches": 0,
119
+ "dependency_cascades": 0,
120
+ }
121
+ self.logger.info(f"Initialized CacheInvalidationNode: {self.id}")
122
+
123
+ def get_parameters(self) -> Dict[str, NodeParameter]:
124
+ """Define the parameters this node accepts."""
125
+ return {
126
+ "strategy": NodeParameter(
127
+ name="strategy",
128
+ type=str,
129
+ required=True,
130
+ description="Invalidation strategy (immediate, lazy, ttl_refresh, cascade, tag_based)",
131
+ ),
132
+ "scope": NodeParameter(
133
+ name="scope",
134
+ type=str,
135
+ required=True,
136
+ description="Invalidation scope (single, pattern, tag, dependency, time_based)",
137
+ ),
138
+ "key": NodeParameter(
139
+ name="key",
140
+ type=str,
141
+ required=False,
142
+ description="Single cache key to invalidate",
143
+ ),
144
+ "keys": NodeParameter(
145
+ name="keys",
146
+ type=list,
147
+ required=False,
148
+ description="Multiple cache keys to invalidate",
149
+ ),
150
+ "pattern": NodeParameter(
151
+ name="pattern",
152
+ type=str,
153
+ required=False,
154
+ description="Pattern for key matching (supports wildcards and regex)",
155
+ ),
156
+ "tags": NodeParameter(
157
+ name="tags",
158
+ type=list,
159
+ required=False,
160
+ description="Tags to invalidate",
161
+ ),
162
+ "dependencies": NodeParameter(
163
+ name="dependencies",
164
+ type=list,
165
+ required=False,
166
+ description="Dependent keys to invalidate",
167
+ ),
168
+ "cascade_patterns": NodeParameter(
169
+ name="cascade_patterns",
170
+ type=list,
171
+ required=False,
172
+ description="Patterns for cascade invalidation",
173
+ ),
174
+ "max_age": NodeParameter(
175
+ name="max_age",
176
+ type=int,
177
+ required=False,
178
+ description="Maximum age in seconds for time-based invalidation",
179
+ ),
180
+ "reason": NodeParameter(
181
+ name="reason",
182
+ type=str,
183
+ required=False,
184
+ default="Manual invalidation",
185
+ description="Reason for invalidation (for logging)",
186
+ ),
187
+ "event_type": NodeParameter(
188
+ name="event_type",
189
+ type=str,
190
+ required=False,
191
+ description="Event type that triggered invalidation",
192
+ ),
193
+ "source_key": NodeParameter(
194
+ name="source_key",
195
+ type=str,
196
+ required=False,
197
+ description="Source key that triggered the invalidation",
198
+ ),
199
+ "backend": NodeParameter(
200
+ name="backend",
201
+ type=str,
202
+ required=False,
203
+ default="memory",
204
+ description="Cache backend (memory, redis, hybrid)",
205
+ ),
206
+ "redis_url": NodeParameter(
207
+ name="redis_url",
208
+ type=str,
209
+ required=False,
210
+ default="redis://localhost:6379",
211
+ description="Redis connection URL",
212
+ ),
213
+ "namespace": NodeParameter(
214
+ name="namespace",
215
+ type=str,
216
+ required=False,
217
+ default="",
218
+ description="Key namespace prefix",
219
+ ),
220
+ "dry_run": NodeParameter(
221
+ name="dry_run",
222
+ type=bool,
223
+ required=False,
224
+ default=False,
225
+ description="Simulate invalidation without executing",
226
+ ),
227
+ "batch_size": NodeParameter(
228
+ name="batch_size",
229
+ type=int,
230
+ required=False,
231
+ default=1000,
232
+ description="Batch size for large invalidation operations",
233
+ ),
234
+ }
235
+
236
+ def get_output_schema(self) -> Dict[str, NodeParameter]:
237
+ """Define the output schema for this node."""
238
+ return {
239
+ "success": NodeParameter(
240
+ name="success",
241
+ type=bool,
242
+ description="Whether the invalidation succeeded",
243
+ ),
244
+ "invalidated_count": NodeParameter(
245
+ name="invalidated_count",
246
+ type=int,
247
+ description="Number of cache entries invalidated",
248
+ ),
249
+ "cascade_count": NodeParameter(
250
+ name="cascade_count",
251
+ type=int,
252
+ description="Number of cascade invalidations performed",
253
+ ),
254
+ "invalidated_keys": NodeParameter(
255
+ name="invalidated_keys",
256
+ type=list,
257
+ description="List of invalidated cache keys",
258
+ ),
259
+ "cascade_keys": NodeParameter(
260
+ name="cascade_keys",
261
+ type=list,
262
+ description="List of cascade invalidated keys",
263
+ ),
264
+ "strategy_used": NodeParameter(
265
+ name="strategy_used",
266
+ type=str,
267
+ description="Invalidation strategy that was applied",
268
+ ),
269
+ "scope_used": NodeParameter(
270
+ name="scope_used",
271
+ type=str,
272
+ description="Invalidation scope that was applied",
273
+ ),
274
+ "execution_time": NodeParameter(
275
+ name="execution_time",
276
+ type=float,
277
+ description="Time taken to execute invalidation",
278
+ ),
279
+ "stats": NodeParameter(
280
+ name="stats",
281
+ type=dict,
282
+ description="Invalidation statistics",
283
+ ),
284
+ "dry_run": NodeParameter(
285
+ name="dry_run",
286
+ type=bool,
287
+ description="Whether this was a dry run",
288
+ ),
289
+ "reason": NodeParameter(
290
+ name="reason",
291
+ type=str,
292
+ description="Reason for invalidation",
293
+ ),
294
+ "timestamp": NodeParameter(
295
+ name="timestamp",
296
+ type=str,
297
+ description="ISO timestamp of invalidation",
298
+ ),
299
+ }
300
+
301
+ async def async_run(self, **kwargs) -> Dict[str, Any]:
302
+ """Execute cache invalidation based on strategy and scope."""
303
+ strategy = InvalidationStrategy(kwargs["strategy"])
304
+ scope = InvalidationScope(kwargs["scope"])
305
+ dry_run = kwargs.get("dry_run", False)
306
+ reason = kwargs.get("reason", "Manual invalidation")
307
+
308
+ start_time = time.time()
309
+
310
+ try:
311
+ # Initialize backend if needed
312
+ await self._ensure_backend(kwargs)
313
+
314
+ # Execute invalidation based on scope
315
+ if scope == InvalidationScope.SINGLE:
316
+ result = await self._invalidate_single(strategy, kwargs, dry_run)
317
+ elif scope == InvalidationScope.PATTERN:
318
+ result = await self._invalidate_pattern(strategy, kwargs, dry_run)
319
+ elif scope == InvalidationScope.TAG:
320
+ result = await self._invalidate_tag(strategy, kwargs, dry_run)
321
+ elif scope == InvalidationScope.DEPENDENCY:
322
+ result = await self._invalidate_dependency(strategy, kwargs, dry_run)
323
+ elif scope == InvalidationScope.TIME_BASED:
324
+ result = await self._invalidate_time_based(strategy, kwargs, dry_run)
325
+ else:
326
+ raise ValueError(f"Unsupported invalidation scope: {scope}")
327
+
328
+ execution_time = time.time() - start_time
329
+
330
+ # Log invalidation
331
+ if not dry_run:
332
+ self._log_invalidation(strategy, scope, result, reason, kwargs)
333
+
334
+ # Update statistics
335
+ self._update_stats(result)
336
+
337
+ return {
338
+ "success": True,
339
+ "invalidated_count": result.get("invalidated_count", 0),
340
+ "cascade_count": result.get("cascade_count", 0),
341
+ "invalidated_keys": result.get("invalidated_keys", []),
342
+ "cascade_keys": result.get("cascade_keys", []),
343
+ "strategy_used": strategy.value,
344
+ "scope_used": scope.value,
345
+ "execution_time": execution_time,
346
+ "stats": dict(self._stats),
347
+ "dry_run": dry_run,
348
+ "reason": reason,
349
+ "timestamp": datetime.now(UTC).isoformat(),
350
+ }
351
+
352
+ except Exception as e:
353
+ self.logger.error(f"Cache invalidation failed: {str(e)}")
354
+ raise NodeExecutionError(f"Cache invalidation failed: {str(e)}")
355
+
356
+ async def _ensure_backend(self, kwargs: Dict[str, Any]):
357
+ """Ensure cache backend is initialized."""
358
+ backend = kwargs.get("backend", "memory")
359
+
360
+ if backend in ["redis", "hybrid"]:
361
+ if not REDIS_AVAILABLE:
362
+ if backend == "redis":
363
+ raise NodeExecutionError(
364
+ "Redis is not available. Install with: pip install redis"
365
+ )
366
+ else:
367
+ self.logger.warning("Redis not available, using memory cache only")
368
+ return
369
+
370
+ redis_url = kwargs.get("redis_url", "redis://localhost:6379")
371
+
372
+ # Only recreate Redis client if the current one is problematic
373
+ if self._redis_client:
374
+ try:
375
+ # Test if current connection is still valid
376
+ await asyncio.wait_for(self._redis_client.ping(), timeout=1.0)
377
+ # Connection is good, reuse it
378
+ return
379
+ except:
380
+ # Connection is bad, close and recreate
381
+ try:
382
+ await self._redis_client.aclose()
383
+ except:
384
+ pass # Ignore errors when closing old client
385
+
386
+ try:
387
+ self._redis_client = redis.from_url(redis_url)
388
+ # Test connection with proper error handling
389
+ try:
390
+ await asyncio.wait_for(self._redis_client.ping(), timeout=2.0)
391
+ self.logger.debug(f"Fresh Redis connection established to {redis_url}")
392
+ except (asyncio.TimeoutError, RuntimeError) as e:
393
+ if "Event loop is closed" in str(e):
394
+ # Event loop issue - create new client without ping test
395
+ self._redis_client = redis.from_url(redis_url)
396
+ self.logger.debug("Created Redis client without ping test due to event loop issue")
397
+ else:
398
+ raise
399
+ except Exception as e:
400
+ if backend == "redis":
401
+ raise NodeExecutionError(f"Failed to connect to Redis: {str(e)}")
402
+ else:
403
+ self.logger.warning(
404
+ f"Redis connection failed, using memory cache: {str(e)}"
405
+ )
406
+ self._redis_client = None
407
+
408
+ def _build_key(self, key: str, namespace: str = "") -> str:
409
+ """Build a namespaced cache key."""
410
+ if namespace:
411
+ return f"{namespace}:{key}"
412
+ return key
413
+
414
+ async def _invalidate_single(
415
+ self, strategy: InvalidationStrategy, kwargs: Dict[str, Any], dry_run: bool
416
+ ) -> Dict[str, Any]:
417
+ """Invalidate a single cache key."""
418
+ key = kwargs.get("key")
419
+ keys = kwargs.get("keys", [])
420
+ namespace = kwargs.get("namespace", "")
421
+
422
+ if not key and not keys:
423
+ raise ValueError(
424
+ "Either 'key' or 'keys' must be provided for single invalidation"
425
+ )
426
+
427
+ target_keys = [key] if key else keys
428
+ full_keys = [self._build_key(k, namespace) for k in target_keys]
429
+
430
+ invalidated_keys = []
431
+ cascade_keys = []
432
+
433
+ for full_key in full_keys:
434
+ if not dry_run:
435
+ success = await self._execute_invalidation(strategy, full_key, kwargs)
436
+ if success:
437
+ invalidated_keys.append(full_key)
438
+
439
+ # Handle cascade if strategy supports it
440
+ if strategy == InvalidationStrategy.CASCADE:
441
+ cascaded = await self._cascade_invalidate(full_key, kwargs)
442
+ cascade_keys.extend(cascaded)
443
+ else:
444
+ # Dry run - just collect what would be invalidated
445
+ invalidated_keys.append(full_key)
446
+ if strategy == InvalidationStrategy.CASCADE:
447
+ cascaded = await self._get_cascade_keys(full_key, kwargs)
448
+ cascade_keys.extend(cascaded)
449
+
450
+ return {
451
+ "invalidated_count": len(invalidated_keys),
452
+ "cascade_count": len(cascade_keys),
453
+ "invalidated_keys": invalidated_keys,
454
+ "cascade_keys": cascade_keys,
455
+ }
456
+
457
+ async def _invalidate_pattern(
458
+ self, strategy: InvalidationStrategy, kwargs: Dict[str, Any], dry_run: bool
459
+ ) -> Dict[str, Any]:
460
+ """Invalidate cache keys matching a pattern."""
461
+ pattern = kwargs.get("pattern")
462
+ namespace = kwargs.get("namespace", "")
463
+ backend = kwargs.get("backend", "memory")
464
+ batch_size = kwargs.get("batch_size", 1000)
465
+
466
+ if not pattern:
467
+ raise ValueError("Pattern must be provided for pattern invalidation")
468
+
469
+ if namespace:
470
+ pattern = f"{namespace}:{pattern}"
471
+
472
+ # Find matching keys
473
+ matching_keys = await self._find_matching_keys(pattern, backend)
474
+
475
+ invalidated_keys = []
476
+ cascade_keys = []
477
+
478
+ # Process in batches
479
+ for i in range(0, len(matching_keys), batch_size):
480
+ batch = matching_keys[i : i + batch_size]
481
+
482
+ for key in batch:
483
+ if not dry_run:
484
+ success = await self._execute_invalidation(strategy, key, kwargs)
485
+ if success:
486
+ invalidated_keys.append(key)
487
+
488
+ if strategy == InvalidationStrategy.CASCADE:
489
+ cascaded = await self._cascade_invalidate(key, kwargs)
490
+ cascade_keys.extend(cascaded)
491
+ else:
492
+ invalidated_keys.append(key)
493
+ if strategy == InvalidationStrategy.CASCADE:
494
+ cascaded = await self._get_cascade_keys(key, kwargs)
495
+ cascade_keys.extend(cascaded)
496
+
497
+ return {
498
+ "invalidated_count": len(invalidated_keys),
499
+ "cascade_count": len(cascade_keys),
500
+ "invalidated_keys": invalidated_keys,
501
+ "cascade_keys": cascade_keys,
502
+ }
503
+
504
+ async def _invalidate_tag(
505
+ self, strategy: InvalidationStrategy, kwargs: Dict[str, Any], dry_run: bool
506
+ ) -> Dict[str, Any]:
507
+ """Invalidate cache keys associated with specific tags."""
508
+ tags = kwargs.get("tags", [])
509
+
510
+ if not tags:
511
+ raise ValueError("Tags must be provided for tag-based invalidation")
512
+
513
+ invalidated_keys = []
514
+ cascade_keys = []
515
+
516
+ for tag in tags:
517
+ # Get keys associated with this tag
518
+ tag_keys = self._tag_registry.get(tag, set())
519
+
520
+ for key in tag_keys:
521
+ if not dry_run:
522
+ success = await self._execute_invalidation(strategy, key, kwargs)
523
+ if success:
524
+ invalidated_keys.append(key)
525
+
526
+ if strategy == InvalidationStrategy.CASCADE:
527
+ cascaded = await self._cascade_invalidate(key, kwargs)
528
+ cascade_keys.extend(cascaded)
529
+ else:
530
+ invalidated_keys.append(key)
531
+ if strategy == InvalidationStrategy.CASCADE:
532
+ cascaded = await self._get_cascade_keys(key, kwargs)
533
+ cascade_keys.extend(cascaded)
534
+
535
+ return {
536
+ "invalidated_count": len(invalidated_keys),
537
+ "cascade_count": len(cascade_keys),
538
+ "invalidated_keys": invalidated_keys,
539
+ "cascade_keys": cascade_keys,
540
+ }
541
+
542
+ async def _invalidate_dependency(
543
+ self, strategy: InvalidationStrategy, kwargs: Dict[str, Any], dry_run: bool
544
+ ) -> Dict[str, Any]:
545
+ """Invalidate cache keys based on dependency relationships."""
546
+ source_key = kwargs.get("source_key")
547
+ dependencies = kwargs.get("dependencies", [])
548
+
549
+ if not source_key and not dependencies:
550
+ raise ValueError("Either source_key or dependencies must be provided")
551
+
552
+ invalidated_keys = []
553
+ cascade_keys = []
554
+
555
+ # Get all dependent keys
556
+ dependent_keys = set(dependencies) if dependencies else set()
557
+
558
+ if source_key:
559
+ # Add dependencies from dependency graph
560
+ dependent_keys.update(self._dependency_graph.get(source_key, set()))
561
+
562
+ for key in dependent_keys:
563
+ if not dry_run:
564
+ success = await self._execute_invalidation(strategy, key, kwargs)
565
+ if success:
566
+ invalidated_keys.append(key)
567
+
568
+ if strategy == InvalidationStrategy.CASCADE:
569
+ cascaded = await self._cascade_invalidate(key, kwargs)
570
+ cascade_keys.extend(cascaded)
571
+ else:
572
+ invalidated_keys.append(key)
573
+ if strategy == InvalidationStrategy.CASCADE:
574
+ cascaded = await self._get_cascade_keys(key, kwargs)
575
+ cascade_keys.extend(cascaded)
576
+
577
+ return {
578
+ "invalidated_count": len(invalidated_keys),
579
+ "cascade_count": len(cascade_keys),
580
+ "invalidated_keys": invalidated_keys,
581
+ "cascade_keys": cascade_keys,
582
+ }
583
+
584
+ async def _invalidate_time_based(
585
+ self, strategy: InvalidationStrategy, kwargs: Dict[str, Any], dry_run: bool
586
+ ) -> Dict[str, Any]:
587
+ """Invalidate cache keys based on age."""
588
+ max_age = kwargs.get("max_age")
589
+ backend = kwargs.get("backend", "memory")
590
+
591
+ if not max_age:
592
+ raise ValueError("max_age must be provided for time-based invalidation")
593
+
594
+ cutoff_time = time.time() - max_age
595
+ invalidated_keys = []
596
+ cascade_keys = []
597
+
598
+ # Find expired keys based on backend
599
+ if backend == "redis" and self._redis_client:
600
+ # Redis doesn't directly support age-based queries
601
+ # We would need to store metadata or use Redis streams
602
+ self.logger.warning(
603
+ "Time-based invalidation not fully supported for Redis backend"
604
+ )
605
+
606
+ elif backend in ["memory", "hybrid"]:
607
+ # Check memory cache for old entries
608
+ expired_keys = []
609
+ for key, value in self._memory_cache.items():
610
+ if isinstance(value, dict) and "_cache_timestamp" in value:
611
+ if value["_cache_timestamp"] < cutoff_time:
612
+ expired_keys.append(key)
613
+
614
+ for key in expired_keys:
615
+ if not dry_run:
616
+ success = await self._execute_invalidation(strategy, key, kwargs)
617
+ if success:
618
+ invalidated_keys.append(key)
619
+
620
+ if strategy == InvalidationStrategy.CASCADE:
621
+ cascaded = await self._cascade_invalidate(key, kwargs)
622
+ cascade_keys.extend(cascaded)
623
+ else:
624
+ invalidated_keys.append(key)
625
+ if strategy == InvalidationStrategy.CASCADE:
626
+ cascaded = await self._get_cascade_keys(key, kwargs)
627
+ cascade_keys.extend(cascaded)
628
+
629
+ return {
630
+ "invalidated_count": len(invalidated_keys),
631
+ "cascade_count": len(cascade_keys),
632
+ "invalidated_keys": invalidated_keys,
633
+ "cascade_keys": cascade_keys,
634
+ }
635
+
636
+ async def _execute_invalidation(
637
+ self, strategy: InvalidationStrategy, key: str, kwargs: Dict[str, Any]
638
+ ) -> bool:
639
+ """Execute the actual invalidation based on strategy."""
640
+ backend = kwargs.get("backend", "memory")
641
+
642
+ try:
643
+ if strategy == InvalidationStrategy.IMMEDIATE:
644
+ # Delete immediately
645
+ if backend == "redis" and self._redis_client:
646
+ await self._redis_client.delete(key)
647
+ elif backend in ["memory", "hybrid"]:
648
+ self._memory_cache.pop(key, None)
649
+ if backend == "hybrid" and self._redis_client:
650
+ await self._redis_client.delete(key)
651
+
652
+ elif strategy == InvalidationStrategy.LAZY:
653
+ # Mark for lazy deletion (could set a special flag)
654
+ if backend == "redis" and self._redis_client:
655
+ await self._redis_client.set(f"{key}:_lazy_delete", "1", ex=1)
656
+ elif backend in ["memory", "hybrid"]:
657
+ if key in self._memory_cache:
658
+ # Replace with lazy deletion marker
659
+ self._memory_cache[key] = {
660
+ "_lazy_delete": True,
661
+ "_timestamp": time.time(),
662
+ }
663
+
664
+ elif strategy == InvalidationStrategy.TTL_REFRESH:
665
+ # Reset TTL to expire soon
666
+ new_ttl = kwargs.get("new_ttl", 1) # 1 second
667
+ if backend == "redis" and self._redis_client:
668
+ await self._redis_client.expire(key, new_ttl)
669
+ elif backend in ["memory", "hybrid"]:
670
+ if key in self._memory_cache and isinstance(
671
+ self._memory_cache[key], dict
672
+ ):
673
+ self._memory_cache[key]["_cache_ttl"] = new_ttl
674
+ self._memory_cache[key]["_cache_timestamp"] = time.time()
675
+
676
+ elif strategy == InvalidationStrategy.CASCADE:
677
+ # CASCADE strategy should also immediately delete the key
678
+ # The cascade dependencies will be handled separately
679
+ if backend == "redis" and self._redis_client:
680
+ await self._redis_client.delete(key)
681
+ elif backend in ["memory", "hybrid"]:
682
+ self._memory_cache.pop(key, None)
683
+ if backend == "hybrid" and self._redis_client:
684
+ await self._redis_client.delete(key)
685
+
686
+ return True
687
+
688
+ except Exception as e:
689
+ self.logger.error(f"Failed to invalidate key '{key}': {str(e)}")
690
+ return False
691
+
692
+ async def _cascade_invalidate(self, key: str, kwargs: Dict[str, Any]) -> List[str]:
693
+ """Perform cascade invalidation for dependent keys."""
694
+ cascade_patterns = kwargs.get("cascade_patterns", [])
695
+ cascaded_keys = []
696
+
697
+ # Invalidate based on cascade patterns
698
+ for pattern in cascade_patterns:
699
+ # Replace placeholder with actual key
700
+ resolved_pattern = pattern.replace("{key}", key)
701
+ matching_keys = await self._find_matching_keys(
702
+ resolved_pattern, kwargs.get("backend", "memory")
703
+ )
704
+
705
+ for match_key in matching_keys:
706
+ success = await self._execute_invalidation(
707
+ InvalidationStrategy.IMMEDIATE, match_key, kwargs
708
+ )
709
+ if success:
710
+ cascaded_keys.append(match_key)
711
+
712
+ # Invalidate dependencies from dependency graph
713
+ dependent_keys = self._dependency_graph.get(key, set())
714
+ for dep_key in dependent_keys:
715
+ success = await self._execute_invalidation(
716
+ InvalidationStrategy.IMMEDIATE, dep_key, kwargs
717
+ )
718
+ if success:
719
+ cascaded_keys.append(dep_key)
720
+
721
+ return cascaded_keys
722
+
723
+ async def _get_cascade_keys(self, key: str, kwargs: Dict[str, Any]) -> List[str]:
724
+ """Get keys that would be cascade invalidated (for dry run)."""
725
+ cascade_patterns = kwargs.get("cascade_patterns", [])
726
+ cascade_keys = []
727
+
728
+ for pattern in cascade_patterns:
729
+ resolved_pattern = pattern.replace("{key}", key)
730
+ matching_keys = await self._find_matching_keys(
731
+ resolved_pattern, kwargs.get("backend", "memory")
732
+ )
733
+ cascade_keys.extend(matching_keys)
734
+
735
+ # Add dependencies
736
+ cascade_keys.extend(self._dependency_graph.get(key, set()))
737
+
738
+ return cascade_keys
739
+
740
+ async def _find_matching_keys(self, pattern: str, backend: str) -> List[str]:
741
+ """Find cache keys matching a pattern."""
742
+ matching_keys = []
743
+
744
+ try:
745
+ if backend == "redis" and self._redis_client:
746
+ # Use Redis KEYS command (note: this can be expensive)
747
+ redis_keys = await self._redis_client.keys(pattern)
748
+ matching_keys.extend(
749
+ [k.decode() if isinstance(k, bytes) else k for k in redis_keys]
750
+ )
751
+
752
+ elif backend in ["memory", "hybrid"]:
753
+ # Use fnmatch for memory cache
754
+ for key in self._memory_cache.keys():
755
+ if fnmatch.fnmatch(key, pattern):
756
+ matching_keys.append(key)
757
+
758
+ # Also check Redis for hybrid
759
+ if backend == "hybrid" and self._redis_client:
760
+ redis_keys = await self._redis_client.keys(pattern)
761
+ for k in redis_keys:
762
+ decoded_key = k.decode() if isinstance(k, bytes) else k
763
+ if decoded_key not in matching_keys:
764
+ matching_keys.append(decoded_key)
765
+
766
+ except Exception as e:
767
+ self.logger.error(
768
+ f"Failed to find matching keys for pattern '{pattern}': {str(e)}"
769
+ )
770
+
771
+ return matching_keys
772
+
773
+ def _log_invalidation(
774
+ self,
775
+ strategy: InvalidationStrategy,
776
+ scope: InvalidationScope,
777
+ result: Dict[str, Any],
778
+ reason: str,
779
+ kwargs: Dict[str, Any],
780
+ ):
781
+ """Log invalidation operation."""
782
+ log_entry = {
783
+ "timestamp": datetime.now(UTC).isoformat(),
784
+ "strategy": strategy.value,
785
+ "scope": scope.value,
786
+ "reason": reason,
787
+ "invalidated_count": result.get("invalidated_count", 0),
788
+ "cascade_count": result.get("cascade_count", 0),
789
+ "source_key": kwargs.get("source_key"),
790
+ "pattern": kwargs.get("pattern"),
791
+ "tags": kwargs.get("tags"),
792
+ }
793
+
794
+ self._invalidation_log.append(log_entry)
795
+
796
+ # Keep only last 1000 log entries
797
+ if len(self._invalidation_log) > 1000:
798
+ self._invalidation_log = self._invalidation_log[-1000:]
799
+
800
+ def _update_stats(self, result: Dict[str, Any]):
801
+ """Update invalidation statistics."""
802
+ self._stats["invalidations"] += result.get("invalidated_count", 0)
803
+ self._stats["cascade_invalidations"] += result.get("cascade_count", 0)
804
+
805
+ if result.get("invalidated_count", 0) > 0:
806
+ self._stats["pattern_matches"] += 1
807
+
808
+ def add_tag(self, key: str, tag: str):
809
+ """Add a tag association for a cache key."""
810
+ if tag not in self._tag_registry:
811
+ self._tag_registry[tag] = set()
812
+ self._tag_registry[tag].add(key)
813
+
814
+ def remove_tag(self, key: str, tag: str):
815
+ """Remove a tag association for a cache key."""
816
+ if tag in self._tag_registry:
817
+ self._tag_registry[tag].discard(key)
818
+ if not self._tag_registry[tag]:
819
+ del self._tag_registry[tag]
820
+
821
+ def add_dependency(self, parent_key: str, dependent_key: str):
822
+ """Add a dependency relationship."""
823
+ if parent_key not in self._dependency_graph:
824
+ self._dependency_graph[parent_key] = set()
825
+ self._dependency_graph[parent_key].add(dependent_key)
826
+
827
+ def remove_dependency(self, parent_key: str, dependent_key: str):
828
+ """Remove a dependency relationship."""
829
+ if parent_key in self._dependency_graph:
830
+ self._dependency_graph[parent_key].discard(dependent_key)
831
+ if not self._dependency_graph[parent_key]:
832
+ del self._dependency_graph[parent_key]
833
+
834
+ def run(self, **kwargs) -> Dict[str, Any]:
835
+ """Synchronous wrapper for compatibility."""
836
+ try:
837
+ # Try to get current event loop
838
+ loop = asyncio.get_running_loop()
839
+ except RuntimeError:
840
+ # No event loop running, safe to use asyncio.run()
841
+ return asyncio.run(self.async_run(**kwargs))
842
+ else:
843
+ # Event loop is running, schedule the coroutine
844
+ import concurrent.futures
845
+ import threading
846
+
847
+ result_holder = {}
848
+ exception_holder = {}
849
+
850
+ def run_in_new_loop():
851
+ try:
852
+ # Create a new event loop for this thread
853
+ new_loop = asyncio.new_event_loop()
854
+ asyncio.set_event_loop(new_loop)
855
+ try:
856
+ result = new_loop.run_until_complete(self.async_run(**kwargs))
857
+ result_holder['result'] = result
858
+ finally:
859
+ new_loop.close()
860
+ except Exception as e:
861
+ exception_holder['error'] = e
862
+
863
+ thread = threading.Thread(target=run_in_new_loop)
864
+ thread.start()
865
+ thread.join()
866
+
867
+ if 'error' in exception_holder:
868
+ raise exception_holder['error']
869
+
870
+ return result_holder['result']