kailash 0.6.3__py3-none-any.whl → 0.6.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (122) hide show
  1. kailash/__init__.py +3 -3
  2. kailash/api/custom_nodes_secure.py +3 -3
  3. kailash/api/gateway.py +1 -1
  4. kailash/api/studio.py +1 -1
  5. kailash/api/workflow_api.py +2 -2
  6. kailash/core/resilience/bulkhead.py +475 -0
  7. kailash/core/resilience/circuit_breaker.py +92 -10
  8. kailash/core/resilience/health_monitor.py +578 -0
  9. kailash/edge/discovery.py +86 -0
  10. kailash/mcp_server/__init__.py +309 -33
  11. kailash/mcp_server/advanced_features.py +1022 -0
  12. kailash/mcp_server/ai_registry_server.py +27 -2
  13. kailash/mcp_server/auth.py +789 -0
  14. kailash/mcp_server/client.py +645 -378
  15. kailash/mcp_server/discovery.py +1593 -0
  16. kailash/mcp_server/errors.py +673 -0
  17. kailash/mcp_server/oauth.py +1727 -0
  18. kailash/mcp_server/protocol.py +1126 -0
  19. kailash/mcp_server/registry_integration.py +587 -0
  20. kailash/mcp_server/server.py +1228 -96
  21. kailash/mcp_server/transports.py +1169 -0
  22. kailash/mcp_server/utils/__init__.py +6 -1
  23. kailash/mcp_server/utils/cache.py +250 -7
  24. kailash/middleware/auth/auth_manager.py +3 -3
  25. kailash/middleware/communication/api_gateway.py +1 -1
  26. kailash/middleware/communication/realtime.py +1 -1
  27. kailash/middleware/mcp/enhanced_server.py +1 -1
  28. kailash/nodes/__init__.py +2 -0
  29. kailash/nodes/admin/audit_log.py +6 -6
  30. kailash/nodes/admin/permission_check.py +8 -8
  31. kailash/nodes/admin/role_management.py +32 -28
  32. kailash/nodes/admin/schema.sql +6 -1
  33. kailash/nodes/admin/schema_manager.py +13 -13
  34. kailash/nodes/admin/security_event.py +15 -15
  35. kailash/nodes/admin/tenant_isolation.py +3 -3
  36. kailash/nodes/admin/transaction_utils.py +3 -3
  37. kailash/nodes/admin/user_management.py +21 -21
  38. kailash/nodes/ai/a2a.py +11 -11
  39. kailash/nodes/ai/ai_providers.py +9 -12
  40. kailash/nodes/ai/embedding_generator.py +13 -14
  41. kailash/nodes/ai/intelligent_agent_orchestrator.py +19 -19
  42. kailash/nodes/ai/iterative_llm_agent.py +2 -2
  43. kailash/nodes/ai/llm_agent.py +210 -33
  44. kailash/nodes/ai/self_organizing.py +2 -2
  45. kailash/nodes/alerts/discord.py +4 -4
  46. kailash/nodes/api/graphql.py +6 -6
  47. kailash/nodes/api/http.py +10 -10
  48. kailash/nodes/api/rate_limiting.py +4 -4
  49. kailash/nodes/api/rest.py +15 -15
  50. kailash/nodes/auth/mfa.py +3 -3
  51. kailash/nodes/auth/risk_assessment.py +2 -2
  52. kailash/nodes/auth/session_management.py +5 -5
  53. kailash/nodes/auth/sso.py +143 -0
  54. kailash/nodes/base.py +8 -2
  55. kailash/nodes/base_async.py +16 -2
  56. kailash/nodes/base_with_acl.py +2 -2
  57. kailash/nodes/cache/__init__.py +9 -0
  58. kailash/nodes/cache/cache.py +1172 -0
  59. kailash/nodes/cache/cache_invalidation.py +874 -0
  60. kailash/nodes/cache/redis_pool_manager.py +595 -0
  61. kailash/nodes/code/async_python.py +2 -1
  62. kailash/nodes/code/python.py +194 -30
  63. kailash/nodes/compliance/data_retention.py +6 -6
  64. kailash/nodes/compliance/gdpr.py +5 -5
  65. kailash/nodes/data/__init__.py +10 -0
  66. kailash/nodes/data/async_sql.py +1956 -129
  67. kailash/nodes/data/optimistic_locking.py +906 -0
  68. kailash/nodes/data/readers.py +8 -8
  69. kailash/nodes/data/redis.py +378 -0
  70. kailash/nodes/data/sql.py +314 -3
  71. kailash/nodes/data/streaming.py +21 -0
  72. kailash/nodes/enterprise/__init__.py +8 -0
  73. kailash/nodes/enterprise/audit_logger.py +285 -0
  74. kailash/nodes/enterprise/batch_processor.py +22 -3
  75. kailash/nodes/enterprise/data_lineage.py +1 -1
  76. kailash/nodes/enterprise/mcp_executor.py +205 -0
  77. kailash/nodes/enterprise/service_discovery.py +150 -0
  78. kailash/nodes/enterprise/tenant_assignment.py +108 -0
  79. kailash/nodes/logic/async_operations.py +2 -2
  80. kailash/nodes/logic/convergence.py +1 -1
  81. kailash/nodes/logic/operations.py +1 -1
  82. kailash/nodes/monitoring/__init__.py +11 -1
  83. kailash/nodes/monitoring/health_check.py +456 -0
  84. kailash/nodes/monitoring/log_processor.py +817 -0
  85. kailash/nodes/monitoring/metrics_collector.py +627 -0
  86. kailash/nodes/monitoring/performance_benchmark.py +137 -11
  87. kailash/nodes/rag/advanced.py +7 -7
  88. kailash/nodes/rag/agentic.py +49 -2
  89. kailash/nodes/rag/conversational.py +3 -3
  90. kailash/nodes/rag/evaluation.py +3 -3
  91. kailash/nodes/rag/federated.py +3 -3
  92. kailash/nodes/rag/graph.py +3 -3
  93. kailash/nodes/rag/multimodal.py +3 -3
  94. kailash/nodes/rag/optimized.py +5 -5
  95. kailash/nodes/rag/privacy.py +3 -3
  96. kailash/nodes/rag/query_processing.py +6 -6
  97. kailash/nodes/rag/realtime.py +1 -1
  98. kailash/nodes/rag/registry.py +1 -1
  99. kailash/nodes/rag/router.py +1 -1
  100. kailash/nodes/rag/similarity.py +7 -7
  101. kailash/nodes/rag/strategies.py +4 -4
  102. kailash/nodes/security/abac_evaluator.py +6 -6
  103. kailash/nodes/security/behavior_analysis.py +5 -5
  104. kailash/nodes/security/credential_manager.py +1 -1
  105. kailash/nodes/security/rotating_credentials.py +11 -11
  106. kailash/nodes/security/threat_detection.py +8 -8
  107. kailash/nodes/testing/credential_testing.py +2 -2
  108. kailash/nodes/transform/processors.py +5 -5
  109. kailash/runtime/local.py +163 -9
  110. kailash/runtime/parameter_injection.py +425 -0
  111. kailash/runtime/parameter_injector.py +657 -0
  112. kailash/runtime/testing.py +2 -2
  113. kailash/testing/fixtures.py +2 -2
  114. kailash/workflow/builder.py +99 -14
  115. kailash/workflow/builder_improvements.py +207 -0
  116. kailash/workflow/input_handling.py +170 -0
  117. {kailash-0.6.3.dist-info → kailash-0.6.5.dist-info}/METADATA +22 -9
  118. {kailash-0.6.3.dist-info → kailash-0.6.5.dist-info}/RECORD +122 -95
  119. {kailash-0.6.3.dist-info → kailash-0.6.5.dist-info}/WHEEL +0 -0
  120. {kailash-0.6.3.dist-info → kailash-0.6.5.dist-info}/entry_points.txt +0 -0
  121. {kailash-0.6.3.dist-info → kailash-0.6.5.dist-info}/licenses/LICENSE +0 -0
  122. {kailash-0.6.3.dist-info → kailash-0.6.5.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,874 @@
1
+ """Cache invalidation node for intelligent cache management.
2
+
3
+ This module provides advanced cache invalidation strategies including
4
+ pattern-based invalidation, cascade invalidation, event-driven clearing,
5
+ and tag-based invalidation for complex cache hierarchies.
6
+ """
7
+
8
+ import asyncio
9
+ import fnmatch
10
+ import re
11
+ import time
12
+ from datetime import UTC, datetime, timedelta
13
+ from enum import Enum
14
+ from typing import Any, Dict, List, Optional, Set, Union
15
+
16
+ from kailash.nodes.base import NodeParameter, register_node
17
+ from kailash.nodes.base_async import AsyncNode
18
+ from kailash.sdk_exceptions import NodeExecutionError
19
+
20
+ try:
21
+ import redis.asyncio as redis
22
+
23
+ REDIS_AVAILABLE = True
24
+ except ImportError:
25
+ REDIS_AVAILABLE = False
26
+
27
+
28
+ class InvalidationStrategy(Enum):
29
+ """Cache invalidation strategies."""
30
+
31
+ IMMEDIATE = "immediate" # Invalidate immediately
32
+ LAZY = "lazy" # Mark for lazy deletion
33
+ TTL_REFRESH = "ttl_refresh" # Reset TTL
34
+ CASCADE = "cascade" # Cascade to dependent keys
35
+ TAG_BASED = "tag_based" # Invalidate by tags
36
+
37
+
38
+ class InvalidationScope(Enum):
39
+ """Scope of invalidation operation."""
40
+
41
+ SINGLE = "single" # Single key
42
+ PATTERN = "pattern" # Pattern matching
43
+ TAG = "tag" # Tag-based
44
+ DEPENDENCY = "dependency" # Dependency chain
45
+ TIME_BASED = "time_based" # Time-based expiration
46
+
47
+
48
+ class EventType(Enum):
49
+ """Cache invalidation event types."""
50
+
51
+ CREATE = "create"
52
+ UPDATE = "update"
53
+ DELETE = "delete"
54
+ ACCESS = "access"
55
+ EXPIRE = "expire"
56
+
57
+
58
+ @register_node()
59
+ class CacheInvalidationNode(AsyncNode):
60
+ """Node for intelligent cache invalidation and management.
61
+
62
+ This node provides comprehensive cache invalidation capabilities including:
63
+ - Pattern-based invalidation with wildcard and regex support
64
+ - Tag-based cache invalidation for complex hierarchies
65
+ - Cascade invalidation for dependent cache entries
66
+ - Event-driven invalidation based on data changes
67
+ - Time-based invalidation strategies
68
+ - Dependency tracking and management
69
+ - Statistics and monitoring for invalidation operations
70
+ - Support for multiple cache backends (Redis, memory, hybrid)
71
+
72
+ Design Purpose:
73
+ - Maintain cache consistency across complex applications
74
+ - Provide flexible invalidation strategies
75
+ - Support real-time and batch invalidation operations
76
+ - Enable efficient cache management for microservices
77
+
78
+ Examples:
79
+ >>> # Pattern-based invalidation
80
+ >>> invalidator = CacheInvalidationNode()
81
+ >>> result = await invalidator.execute(
82
+ ... strategy="immediate",
83
+ ... scope="pattern",
84
+ ... pattern="user:*:profile",
85
+ ... reason="User profile updated"
86
+ ... )
87
+
88
+ >>> # Tag-based invalidation
89
+ >>> result = await invalidator.execute(
90
+ ... strategy="cascade",
91
+ ... scope="tag",
92
+ ... tags=["user:123", "profile", "session"],
93
+ ... cascade_patterns=["session:*", "cache:user:123:*"]
94
+ ... )
95
+
96
+ >>> # Event-driven invalidation
97
+ >>> result = await invalidator.execute(
98
+ ... strategy="immediate",
99
+ ... scope="dependency",
100
+ ... event_type="update",
101
+ ... source_key="user:123",
102
+ ... dependencies=["user:123:profile", "user:123:preferences"]
103
+ ... )
104
+ """
105
+
106
+ def __init__(self, **kwargs):
107
+ """Initialize the cache invalidation node."""
108
+ super().__init__(**kwargs)
109
+ self._redis_client = None
110
+ self._memory_cache = {}
111
+ self._tag_registry = {} # tag -> set of keys
112
+ self._dependency_graph = {} # key -> set of dependent keys
113
+ self._invalidation_log = []
114
+ self._stats = {
115
+ "invalidations": 0,
116
+ "cascade_invalidations": 0,
117
+ "pattern_matches": 0,
118
+ "tag_matches": 0,
119
+ "dependency_cascades": 0,
120
+ }
121
+ self.logger.info(f"Initialized CacheInvalidationNode: {self.id}")
122
+
123
+ def get_parameters(self) -> Dict[str, NodeParameter]:
124
+ """Define the parameters this node accepts."""
125
+ return {
126
+ "strategy": NodeParameter(
127
+ name="strategy",
128
+ type=str,
129
+ required=True,
130
+ description="Invalidation strategy (immediate, lazy, ttl_refresh, cascade, tag_based)",
131
+ ),
132
+ "scope": NodeParameter(
133
+ name="scope",
134
+ type=str,
135
+ required=True,
136
+ description="Invalidation scope (single, pattern, tag, dependency, time_based)",
137
+ ),
138
+ "key": NodeParameter(
139
+ name="key",
140
+ type=str,
141
+ required=False,
142
+ description="Single cache key to invalidate",
143
+ ),
144
+ "keys": NodeParameter(
145
+ name="keys",
146
+ type=list,
147
+ required=False,
148
+ description="Multiple cache keys to invalidate",
149
+ ),
150
+ "pattern": NodeParameter(
151
+ name="pattern",
152
+ type=str,
153
+ required=False,
154
+ description="Pattern for key matching (supports wildcards and regex)",
155
+ ),
156
+ "tags": NodeParameter(
157
+ name="tags",
158
+ type=list,
159
+ required=False,
160
+ description="Tags to invalidate",
161
+ ),
162
+ "dependencies": NodeParameter(
163
+ name="dependencies",
164
+ type=list,
165
+ required=False,
166
+ description="Dependent keys to invalidate",
167
+ ),
168
+ "cascade_patterns": NodeParameter(
169
+ name="cascade_patterns",
170
+ type=list,
171
+ required=False,
172
+ description="Patterns for cascade invalidation",
173
+ ),
174
+ "max_age": NodeParameter(
175
+ name="max_age",
176
+ type=int,
177
+ required=False,
178
+ description="Maximum age in seconds for time-based invalidation",
179
+ ),
180
+ "reason": NodeParameter(
181
+ name="reason",
182
+ type=str,
183
+ required=False,
184
+ default="Manual invalidation",
185
+ description="Reason for invalidation (for logging)",
186
+ ),
187
+ "event_type": NodeParameter(
188
+ name="event_type",
189
+ type=str,
190
+ required=False,
191
+ description="Event type that triggered invalidation",
192
+ ),
193
+ "source_key": NodeParameter(
194
+ name="source_key",
195
+ type=str,
196
+ required=False,
197
+ description="Source key that triggered the invalidation",
198
+ ),
199
+ "backend": NodeParameter(
200
+ name="backend",
201
+ type=str,
202
+ required=False,
203
+ default="memory",
204
+ description="Cache backend (memory, redis, hybrid)",
205
+ ),
206
+ "redis_url": NodeParameter(
207
+ name="redis_url",
208
+ type=str,
209
+ required=False,
210
+ default="redis://localhost:6379",
211
+ description="Redis connection URL",
212
+ ),
213
+ "namespace": NodeParameter(
214
+ name="namespace",
215
+ type=str,
216
+ required=False,
217
+ default="",
218
+ description="Key namespace prefix",
219
+ ),
220
+ "dry_run": NodeParameter(
221
+ name="dry_run",
222
+ type=bool,
223
+ required=False,
224
+ default=False,
225
+ description="Simulate invalidation without executing",
226
+ ),
227
+ "batch_size": NodeParameter(
228
+ name="batch_size",
229
+ type=int,
230
+ required=False,
231
+ default=1000,
232
+ description="Batch size for large invalidation operations",
233
+ ),
234
+ }
235
+
236
+ def get_output_schema(self) -> Dict[str, NodeParameter]:
237
+ """Define the output schema for this node."""
238
+ return {
239
+ "success": NodeParameter(
240
+ name="success",
241
+ type=bool,
242
+ description="Whether the invalidation succeeded",
243
+ ),
244
+ "invalidated_count": NodeParameter(
245
+ name="invalidated_count",
246
+ type=int,
247
+ description="Number of cache entries invalidated",
248
+ ),
249
+ "cascade_count": NodeParameter(
250
+ name="cascade_count",
251
+ type=int,
252
+ description="Number of cascade invalidations performed",
253
+ ),
254
+ "invalidated_keys": NodeParameter(
255
+ name="invalidated_keys",
256
+ type=list,
257
+ description="List of invalidated cache keys",
258
+ ),
259
+ "cascade_keys": NodeParameter(
260
+ name="cascade_keys",
261
+ type=list,
262
+ description="List of cascade invalidated keys",
263
+ ),
264
+ "strategy_used": NodeParameter(
265
+ name="strategy_used",
266
+ type=str,
267
+ description="Invalidation strategy that was applied",
268
+ ),
269
+ "scope_used": NodeParameter(
270
+ name="scope_used",
271
+ type=str,
272
+ description="Invalidation scope that was applied",
273
+ ),
274
+ "execution_time": NodeParameter(
275
+ name="execution_time",
276
+ type=float,
277
+ description="Time taken to execute invalidation",
278
+ ),
279
+ "stats": NodeParameter(
280
+ name="stats",
281
+ type=dict,
282
+ description="Invalidation statistics",
283
+ ),
284
+ "dry_run": NodeParameter(
285
+ name="dry_run",
286
+ type=bool,
287
+ description="Whether this was a dry run",
288
+ ),
289
+ "reason": NodeParameter(
290
+ name="reason",
291
+ type=str,
292
+ description="Reason for invalidation",
293
+ ),
294
+ "timestamp": NodeParameter(
295
+ name="timestamp",
296
+ type=str,
297
+ description="ISO timestamp of invalidation",
298
+ ),
299
+ }
300
+
301
+ async def async_run(self, **kwargs) -> Dict[str, Any]:
302
+ """Execute cache invalidation based on strategy and scope."""
303
+ strategy = InvalidationStrategy(kwargs["strategy"])
304
+ scope = InvalidationScope(kwargs["scope"])
305
+ dry_run = kwargs.get("dry_run", False)
306
+ reason = kwargs.get("reason", "Manual invalidation")
307
+
308
+ start_time = time.time()
309
+
310
+ try:
311
+ # Initialize backend if needed
312
+ await self._ensure_backend(kwargs)
313
+
314
+ # Execute invalidation based on scope
315
+ if scope == InvalidationScope.SINGLE:
316
+ result = await self._invalidate_single(strategy, kwargs, dry_run)
317
+ elif scope == InvalidationScope.PATTERN:
318
+ result = await self._invalidate_pattern(strategy, kwargs, dry_run)
319
+ elif scope == InvalidationScope.TAG:
320
+ result = await self._invalidate_tag(strategy, kwargs, dry_run)
321
+ elif scope == InvalidationScope.DEPENDENCY:
322
+ result = await self._invalidate_dependency(strategy, kwargs, dry_run)
323
+ elif scope == InvalidationScope.TIME_BASED:
324
+ result = await self._invalidate_time_based(strategy, kwargs, dry_run)
325
+ else:
326
+ raise ValueError(f"Unsupported invalidation scope: {scope}")
327
+
328
+ execution_time = time.time() - start_time
329
+
330
+ # Log invalidation
331
+ if not dry_run:
332
+ self._log_invalidation(strategy, scope, result, reason, kwargs)
333
+
334
+ # Update statistics
335
+ self._update_stats(result)
336
+
337
+ return {
338
+ "success": True,
339
+ "invalidated_count": result.get("invalidated_count", 0),
340
+ "cascade_count": result.get("cascade_count", 0),
341
+ "invalidated_keys": result.get("invalidated_keys", []),
342
+ "cascade_keys": result.get("cascade_keys", []),
343
+ "strategy_used": strategy.value,
344
+ "scope_used": scope.value,
345
+ "execution_time": execution_time,
346
+ "stats": dict(self._stats),
347
+ "dry_run": dry_run,
348
+ "reason": reason,
349
+ "timestamp": datetime.now(UTC).isoformat(),
350
+ }
351
+
352
+ except Exception as e:
353
+ self.logger.error(f"Cache invalidation failed: {str(e)}")
354
+ raise NodeExecutionError(f"Cache invalidation failed: {str(e)}")
355
+
356
+ async def _ensure_backend(self, kwargs: Dict[str, Any]):
357
+ """Ensure cache backend is initialized."""
358
+ backend = kwargs.get("backend", "memory")
359
+
360
+ if backend in ["redis", "hybrid"]:
361
+ if not REDIS_AVAILABLE:
362
+ if backend == "redis":
363
+ raise NodeExecutionError(
364
+ "Redis is not available. Install with: pip install redis"
365
+ )
366
+ else:
367
+ self.logger.warning("Redis not available, using memory cache only")
368
+ return
369
+
370
+ redis_url = kwargs.get("redis_url", "redis://localhost:6379")
371
+
372
+ # Only recreate Redis client if the current one is problematic
373
+ if self._redis_client:
374
+ try:
375
+ # Test if current connection is still valid
376
+ await asyncio.wait_for(self._redis_client.ping(), timeout=1.0)
377
+ # Connection is good, reuse it
378
+ return
379
+ except:
380
+ # Connection is bad, close and recreate
381
+ try:
382
+ await self._redis_client.aclose()
383
+ except:
384
+ pass # Ignore errors when closing old client
385
+
386
+ try:
387
+ self._redis_client = redis.from_url(redis_url)
388
+ # Test connection with proper error handling
389
+ try:
390
+ await asyncio.wait_for(self._redis_client.ping(), timeout=2.0)
391
+ self.logger.debug(
392
+ f"Fresh Redis connection established to {redis_url}"
393
+ )
394
+ except (asyncio.TimeoutError, RuntimeError) as e:
395
+ if "Event loop is closed" in str(e):
396
+ # Event loop issue - create new client without ping test
397
+ self._redis_client = redis.from_url(redis_url)
398
+ self.logger.debug(
399
+ "Created Redis client without ping test due to event loop issue"
400
+ )
401
+ else:
402
+ raise
403
+ except Exception as e:
404
+ if backend == "redis":
405
+ raise NodeExecutionError(f"Failed to connect to Redis: {str(e)}")
406
+ else:
407
+ self.logger.warning(
408
+ f"Redis connection failed, using memory cache: {str(e)}"
409
+ )
410
+ self._redis_client = None
411
+
412
+ def _build_key(self, key: str, namespace: str = "") -> str:
413
+ """Build a namespaced cache key."""
414
+ if namespace:
415
+ return f"{namespace}:{key}"
416
+ return key
417
+
418
+ async def _invalidate_single(
419
+ self, strategy: InvalidationStrategy, kwargs: Dict[str, Any], dry_run: bool
420
+ ) -> Dict[str, Any]:
421
+ """Invalidate a single cache key."""
422
+ key = kwargs.get("key")
423
+ keys = kwargs.get("keys", [])
424
+ namespace = kwargs.get("namespace", "")
425
+
426
+ if not key and not keys:
427
+ raise ValueError(
428
+ "Either 'key' or 'keys' must be provided for single invalidation"
429
+ )
430
+
431
+ target_keys = [key] if key else keys
432
+ full_keys = [self._build_key(k, namespace) for k in target_keys]
433
+
434
+ invalidated_keys = []
435
+ cascade_keys = []
436
+
437
+ for full_key in full_keys:
438
+ if not dry_run:
439
+ success = await self._execute_invalidation(strategy, full_key, kwargs)
440
+ if success:
441
+ invalidated_keys.append(full_key)
442
+
443
+ # Handle cascade if strategy supports it
444
+ if strategy == InvalidationStrategy.CASCADE:
445
+ cascaded = await self._cascade_invalidate(full_key, kwargs)
446
+ cascade_keys.extend(cascaded)
447
+ else:
448
+ # Dry run - just collect what would be invalidated
449
+ invalidated_keys.append(full_key)
450
+ if strategy == InvalidationStrategy.CASCADE:
451
+ cascaded = await self._get_cascade_keys(full_key, kwargs)
452
+ cascade_keys.extend(cascaded)
453
+
454
+ return {
455
+ "invalidated_count": len(invalidated_keys),
456
+ "cascade_count": len(cascade_keys),
457
+ "invalidated_keys": invalidated_keys,
458
+ "cascade_keys": cascade_keys,
459
+ }
460
+
461
+ async def _invalidate_pattern(
462
+ self, strategy: InvalidationStrategy, kwargs: Dict[str, Any], dry_run: bool
463
+ ) -> Dict[str, Any]:
464
+ """Invalidate cache keys matching a pattern."""
465
+ pattern = kwargs.get("pattern")
466
+ namespace = kwargs.get("namespace", "")
467
+ backend = kwargs.get("backend", "memory")
468
+ batch_size = kwargs.get("batch_size", 1000)
469
+
470
+ if not pattern:
471
+ raise ValueError("Pattern must be provided for pattern invalidation")
472
+
473
+ if namespace:
474
+ pattern = f"{namespace}:{pattern}"
475
+
476
+ # Find matching keys
477
+ matching_keys = await self._find_matching_keys(pattern, backend)
478
+
479
+ invalidated_keys = []
480
+ cascade_keys = []
481
+
482
+ # Process in batches
483
+ for i in range(0, len(matching_keys), batch_size):
484
+ batch = matching_keys[i : i + batch_size]
485
+
486
+ for key in batch:
487
+ if not dry_run:
488
+ success = await self._execute_invalidation(strategy, key, kwargs)
489
+ if success:
490
+ invalidated_keys.append(key)
491
+
492
+ if strategy == InvalidationStrategy.CASCADE:
493
+ cascaded = await self._cascade_invalidate(key, kwargs)
494
+ cascade_keys.extend(cascaded)
495
+ else:
496
+ invalidated_keys.append(key)
497
+ if strategy == InvalidationStrategy.CASCADE:
498
+ cascaded = await self._get_cascade_keys(key, kwargs)
499
+ cascade_keys.extend(cascaded)
500
+
501
+ return {
502
+ "invalidated_count": len(invalidated_keys),
503
+ "cascade_count": len(cascade_keys),
504
+ "invalidated_keys": invalidated_keys,
505
+ "cascade_keys": cascade_keys,
506
+ }
507
+
508
+ async def _invalidate_tag(
509
+ self, strategy: InvalidationStrategy, kwargs: Dict[str, Any], dry_run: bool
510
+ ) -> Dict[str, Any]:
511
+ """Invalidate cache keys associated with specific tags."""
512
+ tags = kwargs.get("tags", [])
513
+
514
+ if not tags:
515
+ raise ValueError("Tags must be provided for tag-based invalidation")
516
+
517
+ invalidated_keys = []
518
+ cascade_keys = []
519
+
520
+ for tag in tags:
521
+ # Get keys associated with this tag
522
+ tag_keys = self._tag_registry.get(tag, set())
523
+
524
+ for key in tag_keys:
525
+ if not dry_run:
526
+ success = await self._execute_invalidation(strategy, key, kwargs)
527
+ if success:
528
+ invalidated_keys.append(key)
529
+
530
+ if strategy == InvalidationStrategy.CASCADE:
531
+ cascaded = await self._cascade_invalidate(key, kwargs)
532
+ cascade_keys.extend(cascaded)
533
+ else:
534
+ invalidated_keys.append(key)
535
+ if strategy == InvalidationStrategy.CASCADE:
536
+ cascaded = await self._get_cascade_keys(key, kwargs)
537
+ cascade_keys.extend(cascaded)
538
+
539
+ return {
540
+ "invalidated_count": len(invalidated_keys),
541
+ "cascade_count": len(cascade_keys),
542
+ "invalidated_keys": invalidated_keys,
543
+ "cascade_keys": cascade_keys,
544
+ }
545
+
546
+ async def _invalidate_dependency(
547
+ self, strategy: InvalidationStrategy, kwargs: Dict[str, Any], dry_run: bool
548
+ ) -> Dict[str, Any]:
549
+ """Invalidate cache keys based on dependency relationships."""
550
+ source_key = kwargs.get("source_key")
551
+ dependencies = kwargs.get("dependencies", [])
552
+
553
+ if not source_key and not dependencies:
554
+ raise ValueError("Either source_key or dependencies must be provided")
555
+
556
+ invalidated_keys = []
557
+ cascade_keys = []
558
+
559
+ # Get all dependent keys
560
+ dependent_keys = set(dependencies) if dependencies else set()
561
+
562
+ if source_key:
563
+ # Add dependencies from dependency graph
564
+ dependent_keys.update(self._dependency_graph.get(source_key, set()))
565
+
566
+ for key in dependent_keys:
567
+ if not dry_run:
568
+ success = await self._execute_invalidation(strategy, key, kwargs)
569
+ if success:
570
+ invalidated_keys.append(key)
571
+
572
+ if strategy == InvalidationStrategy.CASCADE:
573
+ cascaded = await self._cascade_invalidate(key, kwargs)
574
+ cascade_keys.extend(cascaded)
575
+ else:
576
+ invalidated_keys.append(key)
577
+ if strategy == InvalidationStrategy.CASCADE:
578
+ cascaded = await self._get_cascade_keys(key, kwargs)
579
+ cascade_keys.extend(cascaded)
580
+
581
+ return {
582
+ "invalidated_count": len(invalidated_keys),
583
+ "cascade_count": len(cascade_keys),
584
+ "invalidated_keys": invalidated_keys,
585
+ "cascade_keys": cascade_keys,
586
+ }
587
+
588
+ async def _invalidate_time_based(
589
+ self, strategy: InvalidationStrategy, kwargs: Dict[str, Any], dry_run: bool
590
+ ) -> Dict[str, Any]:
591
+ """Invalidate cache keys based on age."""
592
+ max_age = kwargs.get("max_age")
593
+ backend = kwargs.get("backend", "memory")
594
+
595
+ if not max_age:
596
+ raise ValueError("max_age must be provided for time-based invalidation")
597
+
598
+ cutoff_time = time.time() - max_age
599
+ invalidated_keys = []
600
+ cascade_keys = []
601
+
602
+ # Find expired keys based on backend
603
+ if backend == "redis" and self._redis_client:
604
+ # Redis doesn't directly support age-based queries
605
+ # We would need to store metadata or use Redis streams
606
+ self.logger.warning(
607
+ "Time-based invalidation not fully supported for Redis backend"
608
+ )
609
+
610
+ elif backend in ["memory", "hybrid"]:
611
+ # Check memory cache for old entries
612
+ expired_keys = []
613
+ for key, value in self._memory_cache.items():
614
+ if isinstance(value, dict) and "_cache_timestamp" in value:
615
+ if value["_cache_timestamp"] < cutoff_time:
616
+ expired_keys.append(key)
617
+
618
+ for key in expired_keys:
619
+ if not dry_run:
620
+ success = await self._execute_invalidation(strategy, key, kwargs)
621
+ if success:
622
+ invalidated_keys.append(key)
623
+
624
+ if strategy == InvalidationStrategy.CASCADE:
625
+ cascaded = await self._cascade_invalidate(key, kwargs)
626
+ cascade_keys.extend(cascaded)
627
+ else:
628
+ invalidated_keys.append(key)
629
+ if strategy == InvalidationStrategy.CASCADE:
630
+ cascaded = await self._get_cascade_keys(key, kwargs)
631
+ cascade_keys.extend(cascaded)
632
+
633
+ return {
634
+ "invalidated_count": len(invalidated_keys),
635
+ "cascade_count": len(cascade_keys),
636
+ "invalidated_keys": invalidated_keys,
637
+ "cascade_keys": cascade_keys,
638
+ }
639
+
640
+ async def _execute_invalidation(
641
+ self, strategy: InvalidationStrategy, key: str, kwargs: Dict[str, Any]
642
+ ) -> bool:
643
+ """Execute the actual invalidation based on strategy."""
644
+ backend = kwargs.get("backend", "memory")
645
+
646
+ try:
647
+ if strategy == InvalidationStrategy.IMMEDIATE:
648
+ # Delete immediately
649
+ if backend == "redis" and self._redis_client:
650
+ await self._redis_client.delete(key)
651
+ elif backend in ["memory", "hybrid"]:
652
+ self._memory_cache.pop(key, None)
653
+ if backend == "hybrid" and self._redis_client:
654
+ await self._redis_client.delete(key)
655
+
656
+ elif strategy == InvalidationStrategy.LAZY:
657
+ # Mark for lazy deletion (could set a special flag)
658
+ if backend == "redis" and self._redis_client:
659
+ await self._redis_client.set(f"{key}:_lazy_delete", "1", ex=1)
660
+ elif backend in ["memory", "hybrid"]:
661
+ if key in self._memory_cache:
662
+ # Replace with lazy deletion marker
663
+ self._memory_cache[key] = {
664
+ "_lazy_delete": True,
665
+ "_timestamp": time.time(),
666
+ }
667
+
668
+ elif strategy == InvalidationStrategy.TTL_REFRESH:
669
+ # Reset TTL to expire soon
670
+ new_ttl = kwargs.get("new_ttl", 1) # 1 second
671
+ if backend == "redis" and self._redis_client:
672
+ await self._redis_client.expire(key, new_ttl)
673
+ elif backend in ["memory", "hybrid"]:
674
+ if key in self._memory_cache and isinstance(
675
+ self._memory_cache[key], dict
676
+ ):
677
+ self._memory_cache[key]["_cache_ttl"] = new_ttl
678
+ self._memory_cache[key]["_cache_timestamp"] = time.time()
679
+
680
+ elif strategy == InvalidationStrategy.CASCADE:
681
+ # CASCADE strategy should also immediately delete the key
682
+ # The cascade dependencies will be handled separately
683
+ if backend == "redis" and self._redis_client:
684
+ await self._redis_client.delete(key)
685
+ elif backend in ["memory", "hybrid"]:
686
+ self._memory_cache.pop(key, None)
687
+ if backend == "hybrid" and self._redis_client:
688
+ await self._redis_client.delete(key)
689
+
690
+ return True
691
+
692
+ except Exception as e:
693
+ self.logger.error(f"Failed to invalidate key '{key}': {str(e)}")
694
+ return False
695
+
696
+ async def _cascade_invalidate(self, key: str, kwargs: Dict[str, Any]) -> List[str]:
697
+ """Perform cascade invalidation for dependent keys."""
698
+ cascade_patterns = kwargs.get("cascade_patterns", [])
699
+ cascaded_keys = []
700
+
701
+ # Invalidate based on cascade patterns
702
+ for pattern in cascade_patterns:
703
+ # Replace placeholder with actual key
704
+ resolved_pattern = pattern.replace("{key}", key)
705
+ matching_keys = await self._find_matching_keys(
706
+ resolved_pattern, kwargs.get("backend", "memory")
707
+ )
708
+
709
+ for match_key in matching_keys:
710
+ success = await self._execute_invalidation(
711
+ InvalidationStrategy.IMMEDIATE, match_key, kwargs
712
+ )
713
+ if success:
714
+ cascaded_keys.append(match_key)
715
+
716
+ # Invalidate dependencies from dependency graph
717
+ dependent_keys = self._dependency_graph.get(key, set())
718
+ for dep_key in dependent_keys:
719
+ success = await self._execute_invalidation(
720
+ InvalidationStrategy.IMMEDIATE, dep_key, kwargs
721
+ )
722
+ if success:
723
+ cascaded_keys.append(dep_key)
724
+
725
+ return cascaded_keys
726
+
727
+ async def _get_cascade_keys(self, key: str, kwargs: Dict[str, Any]) -> List[str]:
728
+ """Get keys that would be cascade invalidated (for dry run)."""
729
+ cascade_patterns = kwargs.get("cascade_patterns", [])
730
+ cascade_keys = []
731
+
732
+ for pattern in cascade_patterns:
733
+ resolved_pattern = pattern.replace("{key}", key)
734
+ matching_keys = await self._find_matching_keys(
735
+ resolved_pattern, kwargs.get("backend", "memory")
736
+ )
737
+ cascade_keys.extend(matching_keys)
738
+
739
+ # Add dependencies
740
+ cascade_keys.extend(self._dependency_graph.get(key, set()))
741
+
742
+ return cascade_keys
743
+
744
+ async def _find_matching_keys(self, pattern: str, backend: str) -> List[str]:
745
+ """Find cache keys matching a pattern."""
746
+ matching_keys = []
747
+
748
+ try:
749
+ if backend == "redis" and self._redis_client:
750
+ # Use Redis KEYS command (note: this can be expensive)
751
+ redis_keys = await self._redis_client.keys(pattern)
752
+ matching_keys.extend(
753
+ [k.decode() if isinstance(k, bytes) else k for k in redis_keys]
754
+ )
755
+
756
+ elif backend in ["memory", "hybrid"]:
757
+ # Use fnmatch for memory cache
758
+ for key in self._memory_cache.keys():
759
+ if fnmatch.fnmatch(key, pattern):
760
+ matching_keys.append(key)
761
+
762
+ # Also check Redis for hybrid
763
+ if backend == "hybrid" and self._redis_client:
764
+ redis_keys = await self._redis_client.keys(pattern)
765
+ for k in redis_keys:
766
+ decoded_key = k.decode() if isinstance(k, bytes) else k
767
+ if decoded_key not in matching_keys:
768
+ matching_keys.append(decoded_key)
769
+
770
+ except Exception as e:
771
+ self.logger.error(
772
+ f"Failed to find matching keys for pattern '{pattern}': {str(e)}"
773
+ )
774
+
775
+ return matching_keys
776
+
777
+ def _log_invalidation(
778
+ self,
779
+ strategy: InvalidationStrategy,
780
+ scope: InvalidationScope,
781
+ result: Dict[str, Any],
782
+ reason: str,
783
+ kwargs: Dict[str, Any],
784
+ ):
785
+ """Log invalidation operation."""
786
+ log_entry = {
787
+ "timestamp": datetime.now(UTC).isoformat(),
788
+ "strategy": strategy.value,
789
+ "scope": scope.value,
790
+ "reason": reason,
791
+ "invalidated_count": result.get("invalidated_count", 0),
792
+ "cascade_count": result.get("cascade_count", 0),
793
+ "source_key": kwargs.get("source_key"),
794
+ "pattern": kwargs.get("pattern"),
795
+ "tags": kwargs.get("tags"),
796
+ }
797
+
798
+ self._invalidation_log.append(log_entry)
799
+
800
+ # Keep only last 1000 log entries
801
+ if len(self._invalidation_log) > 1000:
802
+ self._invalidation_log = self._invalidation_log[-1000:]
803
+
804
+ def _update_stats(self, result: Dict[str, Any]):
805
+ """Update invalidation statistics."""
806
+ self._stats["invalidations"] += result.get("invalidated_count", 0)
807
+ self._stats["cascade_invalidations"] += result.get("cascade_count", 0)
808
+
809
+ if result.get("invalidated_count", 0) > 0:
810
+ self._stats["pattern_matches"] += 1
811
+
812
+ def add_tag(self, key: str, tag: str):
813
+ """Add a tag association for a cache key."""
814
+ if tag not in self._tag_registry:
815
+ self._tag_registry[tag] = set()
816
+ self._tag_registry[tag].add(key)
817
+
818
+ def remove_tag(self, key: str, tag: str):
819
+ """Remove a tag association for a cache key."""
820
+ if tag in self._tag_registry:
821
+ self._tag_registry[tag].discard(key)
822
+ if not self._tag_registry[tag]:
823
+ del self._tag_registry[tag]
824
+
825
+ def add_dependency(self, parent_key: str, dependent_key: str):
826
+ """Add a dependency relationship."""
827
+ if parent_key not in self._dependency_graph:
828
+ self._dependency_graph[parent_key] = set()
829
+ self._dependency_graph[parent_key].add(dependent_key)
830
+
831
+ def remove_dependency(self, parent_key: str, dependent_key: str):
832
+ """Remove a dependency relationship."""
833
+ if parent_key in self._dependency_graph:
834
+ self._dependency_graph[parent_key].discard(dependent_key)
835
+ if not self._dependency_graph[parent_key]:
836
+ del self._dependency_graph[parent_key]
837
+
838
+ def run(self, **kwargs) -> Dict[str, Any]:
839
+ """Synchronous wrapper for compatibility."""
840
+ try:
841
+ # Try to get current event loop
842
+ loop = asyncio.get_running_loop()
843
+ except RuntimeError:
844
+ # No event loop running, safe to use asyncio.run()
845
+ return asyncio.run(self.async_run(**kwargs))
846
+ else:
847
+ # Event loop is running, schedule the coroutine
848
+ import concurrent.futures
849
+ import threading
850
+
851
+ result_holder = {}
852
+ exception_holder = {}
853
+
854
+ def run_in_new_loop():
855
+ try:
856
+ # Create a new event loop for this thread
857
+ new_loop = asyncio.new_event_loop()
858
+ asyncio.set_event_loop(new_loop)
859
+ try:
860
+ result = new_loop.run_until_complete(self.async_run(**kwargs))
861
+ result_holder["result"] = result
862
+ finally:
863
+ new_loop.close()
864
+ except Exception as e:
865
+ exception_holder["error"] = e
866
+
867
+ thread = threading.Thread(target=run_in_new_loop)
868
+ thread.start()
869
+ thread.join()
870
+
871
+ if "error" in exception_holder:
872
+ raise exception_holder["error"]
873
+
874
+ return result_holder["result"]