kailash 0.5.0__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. kailash/__init__.py +1 -1
  2. kailash/client/__init__.py +12 -0
  3. kailash/client/enhanced_client.py +306 -0
  4. kailash/core/actors/__init__.py +16 -0
  5. kailash/core/actors/connection_actor.py +566 -0
  6. kailash/core/actors/supervisor.py +364 -0
  7. kailash/edge/__init__.py +16 -0
  8. kailash/edge/compliance.py +834 -0
  9. kailash/edge/discovery.py +659 -0
  10. kailash/edge/location.py +582 -0
  11. kailash/gateway/__init__.py +33 -0
  12. kailash/gateway/api.py +289 -0
  13. kailash/gateway/enhanced_gateway.py +357 -0
  14. kailash/gateway/resource_resolver.py +217 -0
  15. kailash/gateway/security.py +227 -0
  16. kailash/middleware/auth/models.py +2 -2
  17. kailash/middleware/database/base_models.py +1 -7
  18. kailash/middleware/gateway/__init__.py +22 -0
  19. kailash/middleware/gateway/checkpoint_manager.py +398 -0
  20. kailash/middleware/gateway/deduplicator.py +382 -0
  21. kailash/middleware/gateway/durable_gateway.py +417 -0
  22. kailash/middleware/gateway/durable_request.py +498 -0
  23. kailash/middleware/gateway/event_store.py +459 -0
  24. kailash/nodes/admin/permission_check.py +817 -33
  25. kailash/nodes/admin/role_management.py +1242 -108
  26. kailash/nodes/admin/schema_manager.py +438 -0
  27. kailash/nodes/admin/user_management.py +1124 -1582
  28. kailash/nodes/code/__init__.py +8 -1
  29. kailash/nodes/code/async_python.py +1035 -0
  30. kailash/nodes/code/python.py +1 -0
  31. kailash/nodes/data/async_sql.py +9 -3
  32. kailash/nodes/data/sql.py +20 -11
  33. kailash/nodes/data/workflow_connection_pool.py +643 -0
  34. kailash/nodes/rag/__init__.py +1 -4
  35. kailash/resources/__init__.py +40 -0
  36. kailash/resources/factory.py +533 -0
  37. kailash/resources/health.py +319 -0
  38. kailash/resources/reference.py +288 -0
  39. kailash/resources/registry.py +392 -0
  40. kailash/runtime/async_local.py +711 -302
  41. kailash/testing/__init__.py +34 -0
  42. kailash/testing/async_test_case.py +353 -0
  43. kailash/testing/async_utils.py +345 -0
  44. kailash/testing/fixtures.py +458 -0
  45. kailash/testing/mock_registry.py +495 -0
  46. kailash/workflow/__init__.py +8 -0
  47. kailash/workflow/async_builder.py +621 -0
  48. kailash/workflow/async_patterns.py +766 -0
  49. kailash/workflow/cyclic_runner.py +107 -16
  50. kailash/workflow/graph.py +7 -2
  51. kailash/workflow/resilience.py +11 -1
  52. {kailash-0.5.0.dist-info → kailash-0.6.0.dist-info}/METADATA +7 -4
  53. {kailash-0.5.0.dist-info → kailash-0.6.0.dist-info}/RECORD +57 -22
  54. {kailash-0.5.0.dist-info → kailash-0.6.0.dist-info}/WHEEL +0 -0
  55. {kailash-0.5.0.dist-info → kailash-0.6.0.dist-info}/entry_points.txt +0 -0
  56. {kailash-0.5.0.dist-info → kailash-0.6.0.dist-info}/licenses/LICENSE +0 -0
  57. {kailash-0.5.0.dist-info → kailash-0.6.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,458 @@
1
+ """Common test fixtures for async workflows."""
2
+
3
+ import asyncio
4
+ import json
5
+ import logging
6
+ import os
7
+ import shutil
8
+ import tempfile
9
+ from contextlib import asynccontextmanager
10
+ from dataclasses import dataclass
11
+ from typing import Any, Dict, List, Optional, Union
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+ # Optional dependencies
16
+ try:
17
+ import aiofiles
18
+
19
+ HAS_AIOFILES = True
20
+ except ImportError:
21
+ HAS_AIOFILES = False
22
+ logger.warning("aiofiles not available for async file operations")
23
+
24
+ # Check if Docker is available
25
+ try:
26
+ import docker
27
+
28
+ # Verify it's the correct docker-py client
29
+ if hasattr(docker, "from_env"):
30
+ HAS_DOCKER = True
31
+ else:
32
+ HAS_DOCKER = False
33
+ docker = None
34
+ logger.warning("docker module found but not docker-py client")
35
+ except ImportError:
36
+ HAS_DOCKER = False
37
+ docker = None
38
+ logger.warning("Docker not available for test fixtures")
39
+
40
+
41
+ @dataclass
42
+ class DatabaseFixture:
43
+ """Test database fixture."""
44
+
45
+ container: Any # Docker container
46
+ connection_string: str
47
+ host: str
48
+ port: int
49
+ database: str
50
+ user: str
51
+ password: str
52
+
53
+ async def cleanup(self):
54
+ """Clean up database."""
55
+ if self.container and HAS_DOCKER:
56
+ try:
57
+ self.container.stop()
58
+ self.container.remove()
59
+ except Exception as e:
60
+ logger.error(f"Failed to cleanup database container: {e}")
61
+
62
+
63
+ @dataclass
64
+ class TestHttpServer:
65
+ """Test HTTP server fixture."""
66
+
67
+ host: str
68
+ port: int
69
+ url: str
70
+ process: Any
71
+
72
+ async def cleanup(self):
73
+ """Stop server."""
74
+ if self.process:
75
+ try:
76
+ self.process.terminate()
77
+ await self.process.wait()
78
+ except Exception as e:
79
+ logger.error(f"Failed to cleanup HTTP server: {e}")
80
+
81
+
82
+ class AsyncWorkflowFixtures:
83
+ """Common test fixtures for async workflows."""
84
+
85
+ @staticmethod
86
+ @asynccontextmanager
87
+ async def temp_directory():
88
+ """Create temporary directory for test."""
89
+ temp_dir = tempfile.mkdtemp()
90
+ try:
91
+ yield temp_dir
92
+ finally:
93
+ shutil.rmtree(temp_dir, ignore_errors=True)
94
+
95
+ @staticmethod
96
+ async def create_test_database(
97
+ engine: str = "postgresql",
98
+ tag: str = "13",
99
+ database: str = "test",
100
+ user: str = "test",
101
+ password: str = "test",
102
+ port: int = None,
103
+ ) -> DatabaseFixture:
104
+ """Create test database with Docker."""
105
+ if not HAS_DOCKER:
106
+ raise RuntimeError("Docker not available for test database")
107
+
108
+ client = docker.from_env()
109
+
110
+ if engine == "postgresql":
111
+ # Start PostgreSQL container
112
+ container = client.containers.run(
113
+ f"postgres:{tag}",
114
+ environment={
115
+ "POSTGRES_DB": database,
116
+ "POSTGRES_USER": user,
117
+ "POSTGRES_PASSWORD": password,
118
+ },
119
+ ports={"5432/tcp": port} if port else {"5432/tcp": None},
120
+ detach=True,
121
+ remove=False,
122
+ )
123
+
124
+ # Get assigned port
125
+ container.reload()
126
+ actual_port = int(container.ports["5432/tcp"][0]["HostPort"])
127
+
128
+ # Wait for database to be ready
129
+ try:
130
+ import asyncpg
131
+ except ImportError:
132
+ container.stop()
133
+ container.remove()
134
+ raise RuntimeError("asyncpg required for PostgreSQL testing")
135
+
136
+ conn_string = (
137
+ f"postgresql://{user}:{password}@localhost:{actual_port}/{database}"
138
+ )
139
+
140
+ # Wait up to 30 seconds for database to be ready
141
+ for i in range(30):
142
+ try:
143
+ conn = await asyncpg.connect(conn_string)
144
+ await conn.close()
145
+ break
146
+ except Exception:
147
+ if i == 29: # Last attempt
148
+ container.stop()
149
+ container.remove()
150
+ raise TimeoutError("Database did not start in time")
151
+ await asyncio.sleep(1)
152
+
153
+ return DatabaseFixture(
154
+ container=container,
155
+ connection_string=conn_string,
156
+ host="localhost",
157
+ port=actual_port,
158
+ database=database,
159
+ user=user,
160
+ password=password,
161
+ )
162
+
163
+ elif engine == "mysql":
164
+ # Start MySQL container
165
+ container = client.containers.run(
166
+ f"mysql:{tag}",
167
+ environment={
168
+ "MYSQL_ROOT_PASSWORD": password,
169
+ "MYSQL_DATABASE": database,
170
+ "MYSQL_USER": user,
171
+ "MYSQL_PASSWORD": password,
172
+ },
173
+ ports={"3306/tcp": port} if port else {"3306/tcp": None},
174
+ detach=True,
175
+ remove=False,
176
+ )
177
+
178
+ # Get assigned port
179
+ container.reload()
180
+ actual_port = int(container.ports["3306/tcp"][0]["HostPort"])
181
+
182
+ conn_string = (
183
+ f"mysql://{user}:{password}@localhost:{actual_port}/{database}"
184
+ )
185
+
186
+ # Wait for MySQL to be ready (takes longer than PostgreSQL)
187
+ await asyncio.sleep(10)
188
+
189
+ return DatabaseFixture(
190
+ container=container,
191
+ connection_string=conn_string,
192
+ host="localhost",
193
+ port=actual_port,
194
+ database=database,
195
+ user=user,
196
+ password=password,
197
+ )
198
+
199
+ else:
200
+ raise ValueError(f"Unsupported database engine: {engine}")
201
+
202
+ @staticmethod
203
+ async def create_test_files(directory: str, files: Dict[str, Union[str, Dict]]):
204
+ """Create test files in directory."""
205
+ for path, content in files.items():
206
+ full_path = os.path.join(directory, path)
207
+ os.makedirs(os.path.dirname(full_path), exist_ok=True)
208
+
209
+ if HAS_AIOFILES:
210
+ async with aiofiles.open(full_path, "w") as f:
211
+ if isinstance(content, dict):
212
+ await f.write(json.dumps(content, indent=2))
213
+ else:
214
+ await f.write(content)
215
+ else:
216
+ # Fallback to sync file operations
217
+ with open(full_path, "w") as f:
218
+ if isinstance(content, dict):
219
+ f.write(json.dumps(content, indent=2))
220
+ else:
221
+ f.write(content)
222
+
223
+ @staticmethod
224
+ def create_mock_http_client() -> "MockHttpClient":
225
+ """Create mock HTTP client for testing."""
226
+ return MockHttpClient()
227
+
228
+ @staticmethod
229
+ async def create_test_cache() -> "MockCache":
230
+ """Create mock cache for testing."""
231
+ return MockCache()
232
+
233
+ @staticmethod
234
+ @asynccontextmanager
235
+ async def mock_time(start_time: float = None, speed: float = 1.0):
236
+ """Mock time for testing time-dependent code."""
237
+ import time as time_module
238
+
239
+ if start_time is None:
240
+ start_time = time_module.time()
241
+
242
+ real_time = time_module.time
243
+ mock_start = start_time
244
+ real_start = real_time()
245
+
246
+ def mock_time():
247
+ elapsed = (real_time() - real_start) * speed
248
+ return mock_start + elapsed
249
+
250
+ # Store original
251
+ original_time = time_module.time
252
+ original_loop_time = asyncio.get_event_loop().time
253
+
254
+ # Patch time
255
+ time_module.time = mock_time
256
+ # Note: Patching event loop time is tricky and may not work in all cases
257
+
258
+ try:
259
+ yield mock_time
260
+ finally:
261
+ # Restore
262
+ time_module.time = original_time
263
+
264
+
265
+ @dataclass
266
+ class HttpCall:
267
+ """Record of HTTP call."""
268
+
269
+ method: str
270
+ url: str
271
+ kwargs: dict
272
+
273
+
274
+ class MockResponse:
275
+ """Mock HTTP response."""
276
+
277
+ def __init__(self, data: Any, status: int = 200, headers: Dict = None):
278
+ self._data = data
279
+ self.status = status
280
+ self.headers = headers or {}
281
+
282
+ async def json(self):
283
+ """Get JSON response."""
284
+ if isinstance(self._data, str):
285
+ return json.loads(self._data)
286
+ return self._data
287
+
288
+ async def text(self):
289
+ """Get text response."""
290
+ if isinstance(self._data, str):
291
+ return self._data
292
+ return json.dumps(self._data)
293
+
294
+ def raise_for_status(self):
295
+ """Raise if error status."""
296
+ if self.status >= 400:
297
+ raise Exception(f"HTTP {self.status}")
298
+
299
+
300
+ class MockHttpClient:
301
+ """Mock HTTP client for testing."""
302
+
303
+ def __init__(self):
304
+ self._responses: Dict[str, Any] = {}
305
+ self._calls: List[HttpCall] = []
306
+ self._default_status = 404
307
+ self._default_response = {"error": "Not found"}
308
+
309
+ def add_response(
310
+ self,
311
+ method: str,
312
+ url: str,
313
+ response: Any,
314
+ status: int = 200,
315
+ headers: Dict[str, str] = None,
316
+ ):
317
+ """Add a mock response."""
318
+ key = f"{method.upper()}:{url}"
319
+ self._responses[key] = {
320
+ "response": response,
321
+ "status": status,
322
+ "headers": headers or {},
323
+ }
324
+
325
+ def add_responses(self, responses: Dict[str, Any]):
326
+ """Add multiple responses."""
327
+ for key, value in responses.items():
328
+ if ":" in key:
329
+ method, url = key.split(":", 1)
330
+ self.add_response(method, url, value)
331
+ else:
332
+ # Default to GET
333
+ self.add_response("GET", key, value)
334
+
335
+ def set_default_response(self, response: Any, status: int = 200):
336
+ """Set default response for unmatched requests."""
337
+ self._default_response = response
338
+ self._default_status = status
339
+
340
+ async def request(self, method: str, url: str, **kwargs) -> MockResponse:
341
+ """Make mock request."""
342
+ # Record call
343
+ call = HttpCall(method.upper(), url, kwargs)
344
+ self._calls.append(call)
345
+
346
+ # Find response
347
+ key = f"{method.upper()}:{url}"
348
+ if key in self._responses:
349
+ resp_data = self._responses[key]
350
+ return MockResponse(
351
+ resp_data["response"], resp_data["status"], resp_data["headers"]
352
+ )
353
+
354
+ # Default response
355
+ return MockResponse(self._default_response, self._default_status)
356
+
357
+ # Convenience methods
358
+ async def get(self, url: str, **kwargs):
359
+ return await self.request("GET", url, **kwargs)
360
+
361
+ async def post(self, url: str, **kwargs):
362
+ return await self.request("POST", url, **kwargs)
363
+
364
+ async def put(self, url: str, **kwargs):
365
+ return await self.request("PUT", url, **kwargs)
366
+
367
+ async def delete(self, url: str, **kwargs):
368
+ return await self.request("DELETE", url, **kwargs)
369
+
370
+ def get_calls(self, method: str = None, url: str = None) -> List[HttpCall]:
371
+ """Get recorded calls."""
372
+ calls = self._calls
373
+ if method:
374
+ calls = [c for c in calls if c.method == method.upper()]
375
+ if url:
376
+ calls = [c for c in calls if c.url == url]
377
+ return calls
378
+
379
+ def assert_called(self, method: str, url: str, times: int = None):
380
+ """Assert endpoint was called."""
381
+ calls = self.get_calls(method, url)
382
+ if times is not None:
383
+ assert (
384
+ len(calls) == times
385
+ ), f"{method} {url} called {len(calls)} times, expected {times}"
386
+ else:
387
+ assert len(calls) > 0, f"{method} {url} was not called"
388
+
389
+ def reset(self):
390
+ """Reset recorded calls."""
391
+ self._calls.clear()
392
+
393
+
394
+ class MockCache:
395
+ """Mock cache for testing."""
396
+
397
+ def __init__(self):
398
+ self._data: Dict[str, Any] = {}
399
+ self._expiry: Dict[str, float] = {}
400
+ self._calls: List[tuple[str, tuple, dict]] = []
401
+
402
+ async def get(self, key: str) -> Optional[Any]:
403
+ """Get value from cache."""
404
+ self._calls.append(("get", (key,), {}))
405
+
406
+ # Check expiry
407
+ if key in self._expiry:
408
+ if asyncio.get_event_loop().time() > self._expiry[key]:
409
+ del self._data[key]
410
+ del self._expiry[key]
411
+ return None
412
+
413
+ return self._data.get(key)
414
+
415
+ async def set(self, key: str, value: Any, ttl: int = None):
416
+ """Set value in cache."""
417
+ self._calls.append(("set", (key, value), {"ttl": ttl}))
418
+ self._data[key] = value
419
+ if ttl:
420
+ self._expiry[key] = asyncio.get_event_loop().time() + ttl
421
+
422
+ async def setex(self, key: str, ttl: int, value: Any):
423
+ """Set with expiration (Redis style)."""
424
+ await self.set(key, value, ttl)
425
+
426
+ async def delete(self, key: str):
427
+ """Delete from cache."""
428
+ self._calls.append(("delete", (key,), {}))
429
+ self._data.pop(key, None)
430
+ self._expiry.pop(key, None)
431
+
432
+ async def expire(self, key: str, ttl: int):
433
+ """Set expiration on existing key."""
434
+ self._calls.append(("expire", (key, ttl), {}))
435
+ if key in self._data:
436
+ self._expiry[key] = asyncio.get_event_loop().time() + ttl
437
+
438
+ async def clear(self):
439
+ """Clear cache."""
440
+ self._calls.append(("clear", (), {}))
441
+ self._data.clear()
442
+ self._expiry.clear()
443
+
444
+ def get_calls(self, method: str = None) -> List[tuple[str, tuple, dict]]:
445
+ """Get recorded calls."""
446
+ if method:
447
+ return [c for c in self._calls if c[0] == method]
448
+ return self._calls.copy()
449
+
450
+ def assert_called(self, method: str, times: int = None):
451
+ """Assert method was called."""
452
+ calls = self.get_calls(method)
453
+ if times is not None:
454
+ assert (
455
+ len(calls) == times
456
+ ), f"Cache.{method} called {len(calls)} times, expected {times}"
457
+ else:
458
+ assert len(calls) > 0, f"Cache.{method} was not called"