kailash 0.6.5__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. kailash/__init__.py +35 -4
  2. kailash/adapters/__init__.py +5 -0
  3. kailash/adapters/mcp_platform_adapter.py +273 -0
  4. kailash/channels/__init__.py +21 -0
  5. kailash/channels/api_channel.py +409 -0
  6. kailash/channels/base.py +271 -0
  7. kailash/channels/cli_channel.py +661 -0
  8. kailash/channels/event_router.py +496 -0
  9. kailash/channels/mcp_channel.py +648 -0
  10. kailash/channels/session.py +423 -0
  11. kailash/mcp_server/discovery.py +1 -1
  12. kailash/middleware/core/agent_ui.py +5 -0
  13. kailash/middleware/mcp/enhanced_server.py +22 -16
  14. kailash/nexus/__init__.py +21 -0
  15. kailash/nexus/factory.py +413 -0
  16. kailash/nexus/gateway.py +545 -0
  17. kailash/nodes/__init__.py +2 -0
  18. kailash/nodes/ai/iterative_llm_agent.py +988 -17
  19. kailash/nodes/ai/llm_agent.py +29 -9
  20. kailash/nodes/api/__init__.py +2 -2
  21. kailash/nodes/api/monitoring.py +1 -1
  22. kailash/nodes/base_async.py +54 -14
  23. kailash/nodes/code/async_python.py +1 -1
  24. kailash/nodes/data/bulk_operations.py +939 -0
  25. kailash/nodes/data/query_builder.py +373 -0
  26. kailash/nodes/data/query_cache.py +512 -0
  27. kailash/nodes/monitoring/__init__.py +10 -0
  28. kailash/nodes/monitoring/deadlock_detector.py +964 -0
  29. kailash/nodes/monitoring/performance_anomaly.py +1078 -0
  30. kailash/nodes/monitoring/race_condition_detector.py +1151 -0
  31. kailash/nodes/monitoring/transaction_metrics.py +790 -0
  32. kailash/nodes/monitoring/transaction_monitor.py +931 -0
  33. kailash/nodes/system/__init__.py +17 -0
  34. kailash/nodes/system/command_parser.py +820 -0
  35. kailash/nodes/transaction/__init__.py +48 -0
  36. kailash/nodes/transaction/distributed_transaction_manager.py +983 -0
  37. kailash/nodes/transaction/saga_coordinator.py +652 -0
  38. kailash/nodes/transaction/saga_state_storage.py +411 -0
  39. kailash/nodes/transaction/saga_step.py +467 -0
  40. kailash/nodes/transaction/transaction_context.py +756 -0
  41. kailash/nodes/transaction/two_phase_commit.py +978 -0
  42. kailash/nodes/transform/processors.py +17 -1
  43. kailash/nodes/validation/__init__.py +21 -0
  44. kailash/nodes/validation/test_executor.py +532 -0
  45. kailash/nodes/validation/validation_nodes.py +447 -0
  46. kailash/resources/factory.py +1 -1
  47. kailash/runtime/async_local.py +84 -21
  48. kailash/runtime/local.py +21 -2
  49. kailash/runtime/parameter_injector.py +187 -31
  50. kailash/security.py +16 -1
  51. kailash/servers/__init__.py +32 -0
  52. kailash/servers/durable_workflow_server.py +430 -0
  53. kailash/servers/enterprise_workflow_server.py +466 -0
  54. kailash/servers/gateway.py +183 -0
  55. kailash/servers/workflow_server.py +290 -0
  56. kailash/utils/data_validation.py +192 -0
  57. kailash/workflow/builder.py +291 -12
  58. kailash/workflow/validation.py +144 -8
  59. {kailash-0.6.5.dist-info → kailash-0.7.0.dist-info}/METADATA +1 -1
  60. {kailash-0.6.5.dist-info → kailash-0.7.0.dist-info}/RECORD +64 -26
  61. {kailash-0.6.5.dist-info → kailash-0.7.0.dist-info}/WHEEL +0 -0
  62. {kailash-0.6.5.dist-info → kailash-0.7.0.dist-info}/entry_points.txt +0 -0
  63. {kailash-0.6.5.dist-info → kailash-0.7.0.dist-info}/licenses/LICENSE +0 -0
  64. {kailash-0.6.5.dist-info → kailash-0.7.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,411 @@
1
+ """Saga State Storage implementations for persistence and recovery.
2
+
3
+ Provides different storage backends for saga state persistence including
4
+ in-memory, Redis, and database storage options.
5
+ """
6
+
7
+ import json
8
+ import logging
9
+ from abc import ABC, abstractmethod
10
+ from datetime import UTC, datetime
11
+ from typing import Any, Dict, List, Optional
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ class SagaStateStorage(ABC):
17
+ """Abstract base class for saga state storage implementations."""
18
+
19
+ @abstractmethod
20
+ async def save_state(self, saga_id: str, state_data: Dict[str, Any]) -> bool:
21
+ """Save saga state."""
22
+ pass
23
+
24
+ @abstractmethod
25
+ async def load_state(self, saga_id: str) -> Optional[Dict[str, Any]]:
26
+ """Load saga state."""
27
+ pass
28
+
29
+ @abstractmethod
30
+ async def delete_state(self, saga_id: str) -> bool:
31
+ """Delete saga state."""
32
+ pass
33
+
34
+ @abstractmethod
35
+ async def list_sagas(
36
+ self, filter_criteria: Optional[Dict[str, Any]] = None
37
+ ) -> List[str]:
38
+ """List saga IDs matching criteria."""
39
+ pass
40
+
41
+
42
+ class InMemoryStateStorage(SagaStateStorage):
43
+ """In-memory saga state storage for development and testing."""
44
+
45
+ def __init__(self):
46
+ self._storage: Dict[str, Dict[str, Any]] = {}
47
+
48
+ async def save_state(self, saga_id: str, state_data: Dict[str, Any]) -> bool:
49
+ """Save saga state to memory."""
50
+ try:
51
+ self._storage[saga_id] = state_data
52
+ return True
53
+ except Exception as e:
54
+ logger.error(f"Failed to save state for saga {saga_id}: {e}")
55
+ return False
56
+
57
+ async def load_state(self, saga_id: str) -> Optional[Dict[str, Any]]:
58
+ """Load saga state from memory."""
59
+ return self._storage.get(saga_id)
60
+
61
+ async def delete_state(self, saga_id: str) -> bool:
62
+ """Delete saga state from memory."""
63
+ if saga_id in self._storage:
64
+ del self._storage[saga_id]
65
+ return True
66
+ return False
67
+
68
+ async def list_sagas(
69
+ self, filter_criteria: Optional[Dict[str, Any]] = None
70
+ ) -> List[str]:
71
+ """List saga IDs in memory."""
72
+ if not filter_criteria:
73
+ return list(self._storage.keys())
74
+
75
+ # Simple filtering
76
+ result = []
77
+ for saga_id, state in self._storage.items():
78
+ match = True
79
+ for key, value in filter_criteria.items():
80
+ if state.get(key) != value:
81
+ match = False
82
+ break
83
+ if match:
84
+ result.append(saga_id)
85
+ return result
86
+
87
+
88
+ class RedisStateStorage(SagaStateStorage):
89
+ """Redis-based saga state storage for distributed systems."""
90
+
91
+ def __init__(self, redis_client: Any, key_prefix: str = "saga:state:"):
92
+ """Initialize Redis storage.
93
+
94
+ Args:
95
+ redis_client: Redis client instance (can be sync or async)
96
+ key_prefix: Prefix for Redis keys
97
+ """
98
+ self.redis = redis_client
99
+ self.key_prefix = key_prefix
100
+ # Detect if client is async by checking for execute method or common async methods
101
+ self.is_async_client = hasattr(redis_client, "execute") or hasattr(
102
+ redis_client, "aset"
103
+ )
104
+
105
+ async def save_state(self, saga_id: str, state_data: Dict[str, Any]) -> bool:
106
+ """Save saga state to Redis."""
107
+ try:
108
+ key = f"{self.key_prefix}{saga_id}"
109
+
110
+ # Add metadata
111
+ state_data["_last_updated"] = datetime.now(UTC).isoformat()
112
+
113
+ # Serialize to JSON
114
+ json_data = json.dumps(state_data)
115
+
116
+ if self.is_async_client:
117
+ # Use async Redis client
118
+ if state_data.get("state") in ["completed", "compensated"]:
119
+ await self.redis.setex(key, 604800, json_data) # 7 days
120
+ else:
121
+ await self.redis.set(key, json_data)
122
+
123
+ # Add to saga index
124
+ await self.redis.sadd(f"{self.key_prefix}index", saga_id)
125
+
126
+ # Add to state-specific index
127
+ state = state_data.get("state", "unknown")
128
+ await self.redis.sadd(f"{self.key_prefix}state:{state}", saga_id)
129
+ else:
130
+ # Use sync Redis client
131
+ if state_data.get("state") in ["completed", "compensated"]:
132
+ self.redis.setex(key, 604800, json_data) # 7 days
133
+ else:
134
+ self.redis.set(key, json_data)
135
+
136
+ # Add to saga index
137
+ self.redis.sadd(f"{self.key_prefix}index", saga_id)
138
+
139
+ # Add to state-specific index
140
+ state = state_data.get("state", "unknown")
141
+ self.redis.sadd(f"{self.key_prefix}state:{state}", saga_id)
142
+
143
+ return True
144
+
145
+ except Exception as e:
146
+ logger.error(f"Failed to save state to Redis for saga {saga_id}: {e}")
147
+ return False
148
+
149
+ async def load_state(self, saga_id: str) -> Optional[Dict[str, Any]]:
150
+ """Load saga state from Redis."""
151
+ try:
152
+ key = f"{self.key_prefix}{saga_id}"
153
+
154
+ if self.is_async_client:
155
+ json_data = await self.redis.get(key)
156
+ else:
157
+ json_data = self.redis.get(key)
158
+
159
+ if json_data:
160
+ return json.loads(json_data)
161
+ return None
162
+
163
+ except Exception as e:
164
+ logger.error(f"Failed to load state from Redis for saga {saga_id}: {e}")
165
+ return None
166
+
167
+ async def delete_state(self, saga_id: str) -> bool:
168
+ """Delete saga state from Redis."""
169
+ try:
170
+ key = f"{self.key_prefix}{saga_id}"
171
+
172
+ # Get current state for index cleanup
173
+ state_data = await self.load_state(saga_id)
174
+
175
+ # Delete the state
176
+ if self.is_async_client:
177
+ deleted = await self.redis.delete(key) > 0
178
+ else:
179
+ deleted = self.redis.delete(key) > 0
180
+
181
+ if deleted:
182
+ # Remove from indexes
183
+ if self.is_async_client:
184
+ await self.redis.srem(f"{self.key_prefix}index", saga_id)
185
+
186
+ if state_data:
187
+ state = state_data.get("state", "unknown")
188
+ await self.redis.srem(
189
+ f"{self.key_prefix}state:{state}", saga_id
190
+ )
191
+ else:
192
+ self.redis.srem(f"{self.key_prefix}index", saga_id)
193
+
194
+ if state_data:
195
+ state = state_data.get("state", "unknown")
196
+ self.redis.srem(f"{self.key_prefix}state:{state}", saga_id)
197
+
198
+ return deleted
199
+
200
+ except Exception as e:
201
+ logger.error(f"Failed to delete state from Redis for saga {saga_id}: {e}")
202
+ return False
203
+
204
+ async def list_sagas(
205
+ self, filter_criteria: Optional[Dict[str, Any]] = None
206
+ ) -> List[str]:
207
+ """List saga IDs from Redis."""
208
+ try:
209
+ if not filter_criteria:
210
+ # Return all saga IDs
211
+ if self.is_async_client:
212
+ saga_ids = await self.redis.smembers(f"{self.key_prefix}index")
213
+ else:
214
+ saga_ids = self.redis.smembers(f"{self.key_prefix}index")
215
+ return list(saga_ids)
216
+
217
+ # Filter by state if specified
218
+ if "state" in filter_criteria:
219
+ state = filter_criteria["state"]
220
+ if self.is_async_client:
221
+ saga_ids = await self.redis.smembers(
222
+ f"{self.key_prefix}state:{state}"
223
+ )
224
+ else:
225
+ saga_ids = self.redis.smembers(f"{self.key_prefix}state:{state}")
226
+ return list(saga_ids)
227
+
228
+ # For other criteria, load and filter
229
+ if self.is_async_client:
230
+ all_saga_ids = await self.redis.smembers(f"{self.key_prefix}index")
231
+ else:
232
+ all_saga_ids = self.redis.smembers(f"{self.key_prefix}index")
233
+
234
+ result = []
235
+
236
+ for saga_id in all_saga_ids:
237
+ state_data = await self.load_state(saga_id)
238
+ if state_data:
239
+ match = True
240
+ for key, value in filter_criteria.items():
241
+ if state_data.get(key) != value:
242
+ match = False
243
+ break
244
+ if match:
245
+ result.append(saga_id)
246
+
247
+ return result
248
+
249
+ except Exception as e:
250
+ logger.error(f"Failed to list sagas from Redis: {e}")
251
+ return []
252
+
253
+
254
+ class DatabaseStateStorage(SagaStateStorage):
255
+ """Database-based saga state storage for persistent storage."""
256
+
257
+ def __init__(self, db_pool: Any, table_name: str = "saga_states"):
258
+ """Initialize database storage.
259
+
260
+ Args:
261
+ db_pool: Database connection pool
262
+ table_name: Name of the table for saga states
263
+ """
264
+ self.db_pool = db_pool
265
+ self.table_name = table_name
266
+ self._ensure_table_exists()
267
+
268
+ def _ensure_table_exists(self):
269
+ """Ensure the saga states table exists."""
270
+ # Table creation is handled externally in tests
271
+ # In production, this would use proper database migrations
272
+ pass
273
+
274
+ async def save_state(self, saga_id: str, state_data: Dict[str, Any]) -> bool:
275
+ """Save saga state to database."""
276
+ try:
277
+ async with self.db_pool.acquire() as conn:
278
+ # PostgreSQL example with JSONB
279
+ query = f"""
280
+ INSERT INTO {self.table_name}
281
+ (saga_id, saga_name, state, state_data, updated_at)
282
+ VALUES ($1, $2, $3, $4, $5)
283
+ ON CONFLICT (saga_id)
284
+ DO UPDATE SET
285
+ saga_name = EXCLUDED.saga_name,
286
+ state = EXCLUDED.state,
287
+ state_data = EXCLUDED.state_data,
288
+ updated_at = EXCLUDED.updated_at
289
+ """
290
+
291
+ await conn.execute(
292
+ query,
293
+ saga_id,
294
+ state_data.get("saga_name", ""),
295
+ state_data.get("state", ""),
296
+ json.dumps(state_data),
297
+ datetime.now(UTC),
298
+ )
299
+
300
+ return True
301
+
302
+ except Exception as e:
303
+ logger.error(f"Failed to save state to database for saga {saga_id}: {e}")
304
+ return False
305
+
306
+ async def load_state(self, saga_id: str) -> Optional[Dict[str, Any]]:
307
+ """Load saga state from database."""
308
+ try:
309
+ async with self.db_pool.acquire() as conn:
310
+ query = f"""
311
+ SELECT state_data
312
+ FROM {self.table_name}
313
+ WHERE saga_id = $1
314
+ """
315
+
316
+ row = await conn.fetchrow(query, saga_id)
317
+
318
+ if row:
319
+ return json.loads(row["state_data"])
320
+ return None
321
+
322
+ except Exception as e:
323
+ logger.error(f"Failed to load state from database for saga {saga_id}: {e}")
324
+ return None
325
+
326
+ async def delete_state(self, saga_id: str) -> bool:
327
+ """Delete saga state from database."""
328
+ try:
329
+ async with self.db_pool.acquire() as conn:
330
+ query = f"DELETE FROM {self.table_name} WHERE saga_id = $1"
331
+ result = await conn.execute(query, saga_id)
332
+
333
+ # Check if any rows were deleted
334
+ return result.split()[-1] != "0"
335
+
336
+ except Exception as e:
337
+ logger.error(
338
+ f"Failed to delete state from database for saga {saga_id}: {e}"
339
+ )
340
+ return False
341
+
342
+ async def list_sagas(
343
+ self, filter_criteria: Optional[Dict[str, Any]] = None
344
+ ) -> List[str]:
345
+ """List saga IDs from database."""
346
+ try:
347
+ async with self.db_pool.acquire() as conn:
348
+ if not filter_criteria:
349
+ query = f"SELECT saga_id FROM {self.table_name}"
350
+ rows = await conn.fetch(query)
351
+ else:
352
+ # Build WHERE clause
353
+ conditions = []
354
+ params = []
355
+ param_count = 0
356
+
357
+ for key, value in filter_criteria.items():
358
+ param_count += 1
359
+ if key in ["state", "saga_name"]:
360
+ conditions.append(f"{key} = ${param_count}")
361
+ params.append(value)
362
+ else:
363
+ # For other fields, use JSONB query
364
+ conditions.append(f"state_data->'{key}' = ${param_count}")
365
+ params.append(json.dumps(value))
366
+
367
+ where_clause = " AND ".join(conditions)
368
+ query = (
369
+ f"SELECT saga_id FROM {self.table_name} WHERE {where_clause}"
370
+ )
371
+ rows = await conn.fetch(query, *params)
372
+
373
+ return [row["saga_id"] for row in rows]
374
+
375
+ except Exception as e:
376
+ logger.error(f"Failed to list sagas from database: {e}")
377
+ return []
378
+
379
+
380
+ class StorageFactory:
381
+ """Factory for creating saga state storage instances."""
382
+
383
+ @staticmethod
384
+ def create_storage(storage_type: str, **kwargs) -> SagaStateStorage:
385
+ """Create a storage instance based on type.
386
+
387
+ Args:
388
+ storage_type: Type of storage ("memory", "redis", "database")
389
+ **kwargs: Additional arguments for storage initialization
390
+
391
+ Returns:
392
+ SagaStateStorage instance
393
+ """
394
+ if storage_type == "memory":
395
+ return InMemoryStateStorage()
396
+ elif storage_type == "redis":
397
+ redis_client = kwargs.get("redis_client")
398
+ if not redis_client:
399
+ raise ValueError("redis_client is required for Redis storage")
400
+ return RedisStateStorage(
401
+ redis_client, kwargs.get("key_prefix", "saga:state:")
402
+ )
403
+ elif storage_type == "database":
404
+ db_pool = kwargs.get("db_pool")
405
+ if not db_pool:
406
+ raise ValueError("db_pool is required for database storage")
407
+ return DatabaseStateStorage(
408
+ db_pool, kwargs.get("table_name", "saga_states")
409
+ )
410
+ else:
411
+ raise ValueError(f"Unknown storage type: {storage_type}")