kailash 0.5.0__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. kailash/__init__.py +1 -1
  2. kailash/client/__init__.py +12 -0
  3. kailash/client/enhanced_client.py +306 -0
  4. kailash/core/actors/__init__.py +16 -0
  5. kailash/core/actors/connection_actor.py +566 -0
  6. kailash/core/actors/supervisor.py +364 -0
  7. kailash/edge/__init__.py +16 -0
  8. kailash/edge/compliance.py +834 -0
  9. kailash/edge/discovery.py +659 -0
  10. kailash/edge/location.py +582 -0
  11. kailash/gateway/__init__.py +33 -0
  12. kailash/gateway/api.py +289 -0
  13. kailash/gateway/enhanced_gateway.py +357 -0
  14. kailash/gateway/resource_resolver.py +217 -0
  15. kailash/gateway/security.py +227 -0
  16. kailash/middleware/auth/models.py +2 -2
  17. kailash/middleware/database/base_models.py +1 -7
  18. kailash/middleware/gateway/__init__.py +22 -0
  19. kailash/middleware/gateway/checkpoint_manager.py +398 -0
  20. kailash/middleware/gateway/deduplicator.py +382 -0
  21. kailash/middleware/gateway/durable_gateway.py +417 -0
  22. kailash/middleware/gateway/durable_request.py +498 -0
  23. kailash/middleware/gateway/event_store.py +459 -0
  24. kailash/nodes/admin/permission_check.py +817 -33
  25. kailash/nodes/admin/role_management.py +1242 -108
  26. kailash/nodes/admin/schema_manager.py +438 -0
  27. kailash/nodes/admin/user_management.py +1124 -1582
  28. kailash/nodes/code/__init__.py +8 -1
  29. kailash/nodes/code/async_python.py +1035 -0
  30. kailash/nodes/code/python.py +1 -0
  31. kailash/nodes/data/async_sql.py +9 -3
  32. kailash/nodes/data/sql.py +20 -11
  33. kailash/nodes/data/workflow_connection_pool.py +643 -0
  34. kailash/nodes/rag/__init__.py +1 -4
  35. kailash/resources/__init__.py +40 -0
  36. kailash/resources/factory.py +533 -0
  37. kailash/resources/health.py +319 -0
  38. kailash/resources/reference.py +288 -0
  39. kailash/resources/registry.py +392 -0
  40. kailash/runtime/async_local.py +711 -302
  41. kailash/testing/__init__.py +34 -0
  42. kailash/testing/async_test_case.py +353 -0
  43. kailash/testing/async_utils.py +345 -0
  44. kailash/testing/fixtures.py +458 -0
  45. kailash/testing/mock_registry.py +495 -0
  46. kailash/workflow/__init__.py +8 -0
  47. kailash/workflow/async_builder.py +621 -0
  48. kailash/workflow/async_patterns.py +766 -0
  49. kailash/workflow/cyclic_runner.py +107 -16
  50. kailash/workflow/graph.py +7 -2
  51. kailash/workflow/resilience.py +11 -1
  52. {kailash-0.5.0.dist-info → kailash-0.6.0.dist-info}/METADATA +7 -4
  53. {kailash-0.5.0.dist-info → kailash-0.6.0.dist-info}/RECORD +57 -22
  54. {kailash-0.5.0.dist-info → kailash-0.6.0.dist-info}/WHEEL +0 -0
  55. {kailash-0.5.0.dist-info → kailash-0.6.0.dist-info}/entry_points.txt +0 -0
  56. {kailash-0.5.0.dist-info → kailash-0.6.0.dist-info}/licenses/LICENSE +0 -0
  57. {kailash-0.5.0.dist-info → kailash-0.6.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,459 @@
1
+ """Event store for request audit trail and event sourcing.
2
+
3
+ This module provides:
4
+ - Append-only event log
5
+ - Event replay capability
6
+ - Event projections
7
+ - Audit trail for compliance
8
+ """
9
+
10
+ import asyncio
11
+ import json
12
+ import logging
13
+ import time
14
+ import uuid
15
+ from dataclasses import dataclass, field
16
+ from datetime import datetime
17
+ from enum import Enum
18
+ from typing import Any, AsyncIterator, Callable, Dict, List, Optional
19
+
20
+ logger = logging.getLogger(__name__)
21
+
22
+
23
+ class EventType(Enum):
24
+ """Standard event types for request lifecycle."""
25
+
26
+ REQUEST_CREATED = "request.created"
27
+ REQUEST_VALIDATED = "request.validated"
28
+ REQUEST_STARTED = "request.started"
29
+ REQUEST_CHECKPOINTED = "request.checkpointed"
30
+ REQUEST_COMPLETED = "request.completed"
31
+ REQUEST_FAILED = "request.failed"
32
+ REQUEST_CANCELLED = "request.cancelled"
33
+ REQUEST_RESUMED = "request.resumed"
34
+ REQUEST_RETRIED = "request.retried"
35
+
36
+ WORKFLOW_CREATED = "workflow.created"
37
+ WORKFLOW_STARTED = "workflow.started"
38
+ WORKFLOW_NODE_STARTED = "workflow.node.started"
39
+ WORKFLOW_NODE_COMPLETED = "workflow.node.completed"
40
+ WORKFLOW_NODE_FAILED = "workflow.node.failed"
41
+ WORKFLOW_COMPLETED = "workflow.completed"
42
+ WORKFLOW_FAILED = "workflow.failed"
43
+
44
+ DEDUPLICATION_HIT = "deduplication.hit"
45
+ DEDUPLICATION_MISS = "deduplication.miss"
46
+
47
+ ERROR_OCCURRED = "error.occurred"
48
+ ERROR_HANDLED = "error.handled"
49
+
50
+
51
+ @dataclass
52
+ class RequestEvent:
53
+ """Immutable event in the request lifecycle."""
54
+
55
+ event_id: str = field(default_factory=lambda: f"evt_{uuid.uuid4().hex[:12]}")
56
+ event_type: EventType = EventType.REQUEST_CREATED
57
+ request_id: str = ""
58
+ timestamp: datetime = field(default_factory=datetime.utcnow)
59
+ sequence_number: int = 0
60
+ data: Dict[str, Any] = field(default_factory=dict)
61
+ metadata: Dict[str, Any] = field(default_factory=dict)
62
+
63
+ def to_dict(self) -> Dict[str, Any]:
64
+ """Convert to dictionary for storage."""
65
+ return {
66
+ "event_id": self.event_id,
67
+ "event_type": self.event_type.value,
68
+ "request_id": self.request_id,
69
+ "timestamp": self.timestamp.isoformat(),
70
+ "sequence_number": self.sequence_number,
71
+ "data": self.data,
72
+ "metadata": self.metadata,
73
+ }
74
+
75
+ @classmethod
76
+ def from_dict(cls, data: Dict[str, Any]) -> "RequestEvent":
77
+ """Create from dictionary."""
78
+ return cls(
79
+ event_id=data["event_id"],
80
+ event_type=EventType(data["event_type"]),
81
+ request_id=data["request_id"],
82
+ timestamp=datetime.fromisoformat(data["timestamp"]),
83
+ sequence_number=data["sequence_number"],
84
+ data=data["data"],
85
+ metadata=data.get("metadata", {}),
86
+ )
87
+
88
+
89
+ class EventStore:
90
+ """Append-only event store with replay capability."""
91
+
92
+ def __init__(
93
+ self,
94
+ storage_backend: Optional[Any] = None,
95
+ batch_size: int = 100,
96
+ flush_interval_seconds: float = 1.0,
97
+ ):
98
+ """Initialize event store."""
99
+ self.storage_backend = storage_backend
100
+ self.batch_size = batch_size
101
+ self.flush_interval = flush_interval_seconds
102
+
103
+ # In-memory buffer
104
+ self._buffer: List[RequestEvent] = []
105
+ self._buffer_lock = asyncio.Lock()
106
+
107
+ # Event stream
108
+ self._event_stream: List[RequestEvent] = []
109
+ self._stream_lock = asyncio.Lock()
110
+
111
+ # Projections
112
+ self._projections: Dict[str, Any] = {}
113
+ self._projection_handlers: Dict[str, Callable] = {}
114
+
115
+ # Sequence tracking
116
+ self._sequences: Dict[str, int] = {}
117
+
118
+ # Metrics
119
+ self.event_count = 0
120
+ self.flush_count = 0
121
+
122
+ # Start flush task
123
+ self._flush_task = asyncio.create_task(self._flush_loop())
124
+
125
+ async def append(
126
+ self,
127
+ event_type: EventType,
128
+ request_id: str,
129
+ data: Dict[str, Any],
130
+ metadata: Optional[Dict[str, Any]] = None,
131
+ ) -> RequestEvent:
132
+ """Append an event to the store."""
133
+ async with self._buffer_lock:
134
+ # Get next sequence number
135
+ sequence = self._sequences.get(request_id, 0)
136
+ self._sequences[request_id] = sequence + 1
137
+
138
+ # Create event
139
+ event = RequestEvent(
140
+ event_type=event_type,
141
+ request_id=request_id,
142
+ sequence_number=sequence,
143
+ data=data,
144
+ metadata=metadata or {},
145
+ )
146
+
147
+ # Add to buffer
148
+ self._buffer.append(event)
149
+ self.event_count += 1
150
+
151
+ # Flush if buffer is full
152
+ if len(self._buffer) >= self.batch_size:
153
+ await self._flush_buffer()
154
+
155
+ # Apply projections
156
+ await self._apply_projections(event)
157
+
158
+ logger.debug(
159
+ f"Appended event {event.event_type.value} for request {request_id} "
160
+ f"(seq: {sequence})"
161
+ )
162
+
163
+ return event
164
+
165
+ async def get_events(
166
+ self,
167
+ request_id: str,
168
+ start_sequence: int = 0,
169
+ end_sequence: Optional[int] = None,
170
+ event_types: Optional[List[EventType]] = None,
171
+ ) -> List[RequestEvent]:
172
+ """Get events for a request."""
173
+ # Ensure buffer is flushed
174
+ await self._flush_buffer()
175
+
176
+ events = []
177
+
178
+ # Get from in-memory stream
179
+ async with self._stream_lock:
180
+ for event in self._event_stream:
181
+ if event.request_id != request_id:
182
+ continue
183
+
184
+ if event.sequence_number < start_sequence:
185
+ continue
186
+
187
+ if end_sequence is not None and event.sequence_number > end_sequence:
188
+ continue
189
+
190
+ if event_types and event.event_type not in event_types:
191
+ continue
192
+
193
+ events.append(event)
194
+
195
+ # Get from storage if available
196
+ if self.storage_backend and not events:
197
+ stored_events = await self._load_from_storage(
198
+ request_id,
199
+ start_sequence,
200
+ end_sequence,
201
+ )
202
+ events.extend(stored_events)
203
+
204
+ # Sort by sequence
205
+ events.sort(key=lambda e: e.sequence_number)
206
+
207
+ return events
208
+
209
+ async def replay(
210
+ self,
211
+ request_id: str,
212
+ handler: Callable[[RequestEvent], Any],
213
+ start_sequence: int = 0,
214
+ end_sequence: Optional[int] = None,
215
+ ) -> None:
216
+ """Replay events for a request."""
217
+ events = await self.get_events(
218
+ request_id,
219
+ start_sequence,
220
+ end_sequence,
221
+ )
222
+
223
+ for event in events:
224
+ if asyncio.iscoroutinefunction(handler):
225
+ await handler(event)
226
+ else:
227
+ handler(event)
228
+
229
+ async def stream_events(
230
+ self,
231
+ request_id: Optional[str] = None,
232
+ event_types: Optional[List[EventType]] = None,
233
+ follow: bool = False,
234
+ ) -> AsyncIterator[RequestEvent]:
235
+ """Stream events as they occur."""
236
+ last_index = 0
237
+
238
+ while True:
239
+ # Get new events
240
+ async with self._stream_lock:
241
+ events = self._event_stream[last_index:]
242
+ last_index = len(self._event_stream)
243
+
244
+ # Filter and yield
245
+ for event in events:
246
+ if request_id and event.request_id != request_id:
247
+ continue
248
+
249
+ if event_types and event.event_type not in event_types:
250
+ continue
251
+
252
+ yield event
253
+
254
+ if not follow:
255
+ break
256
+
257
+ # Wait for new events
258
+ await asyncio.sleep(0.1)
259
+
260
+ def register_projection(
261
+ self,
262
+ name: str,
263
+ handler: Callable[[RequestEvent, Dict[str, Any]], Any],
264
+ initial_state: Optional[Dict[str, Any]] = None,
265
+ ) -> None:
266
+ """Register a projection handler."""
267
+ self._projection_handlers[name] = handler
268
+ self._projections[name] = initial_state or {}
269
+
270
+ logger.info(f"Registered projection: {name}")
271
+
272
+ def get_projection(self, name: str) -> Optional[Dict[str, Any]]:
273
+ """Get current projection state."""
274
+ return self._projections.get(name)
275
+
276
+ async def _apply_projections(self, event: RequestEvent) -> None:
277
+ """Apply registered projections to an event."""
278
+ for name, handler in self._projection_handlers.items():
279
+ try:
280
+ state = self._projections[name]
281
+
282
+ if asyncio.iscoroutinefunction(handler):
283
+ new_state = await handler(event, state)
284
+ else:
285
+ new_state = handler(event, state)
286
+
287
+ if new_state is not None:
288
+ self._projections[name] = new_state
289
+
290
+ except Exception as e:
291
+ logger.error(
292
+ f"Projection {name} failed for event {event.event_id}: {e}"
293
+ )
294
+
295
+ async def _flush_buffer(self) -> None:
296
+ """Flush event buffer to storage."""
297
+ async with self._buffer_lock:
298
+ if not self._buffer:
299
+ return
300
+
301
+ events_to_flush = self._buffer.copy()
302
+ self._buffer.clear()
303
+
304
+ # Add to in-memory stream
305
+ async with self._stream_lock:
306
+ self._event_stream.extend(events_to_flush)
307
+
308
+ # Store if backend available
309
+ if self.storage_backend:
310
+ await self._store_events(events_to_flush)
311
+
312
+ self.flush_count += 1
313
+ logger.debug(f"Flushed {len(events_to_flush)} events")
314
+
315
+ async def _flush_loop(self) -> None:
316
+ """Periodically flush the buffer."""
317
+ while True:
318
+ try:
319
+ await asyncio.sleep(self.flush_interval)
320
+ await self._flush_buffer()
321
+ except asyncio.CancelledError:
322
+ # Final flush before shutdown
323
+ await self._flush_buffer()
324
+ break
325
+ except Exception as e:
326
+ logger.error(f"Flush error: {e}")
327
+
328
+ async def _store_events(self, events: List[RequestEvent]) -> None:
329
+ """Store events in backend."""
330
+ try:
331
+ # Group by request ID for efficient storage
332
+ by_request = {}
333
+ for event in events:
334
+ if event.request_id not in by_request:
335
+ by_request[event.request_id] = []
336
+ by_request[event.request_id].append(event.to_dict())
337
+
338
+ # Store each request's events
339
+ for request_id, request_events in by_request.items():
340
+ key = f"events:{request_id}"
341
+ await self.storage_backend.append(key, request_events)
342
+
343
+ except Exception as e:
344
+ logger.error(f"Failed to store events: {e}")
345
+
346
+ async def _load_from_storage(
347
+ self,
348
+ request_id: str,
349
+ start_sequence: int,
350
+ end_sequence: Optional[int],
351
+ ) -> List[RequestEvent]:
352
+ """Load events from storage."""
353
+ try:
354
+ key = f"events:{request_id}"
355
+ stored = await self.storage_backend.get(key)
356
+
357
+ if not stored:
358
+ return []
359
+
360
+ events = []
361
+ for event_dict in stored:
362
+ event = RequestEvent.from_dict(event_dict)
363
+
364
+ if event.sequence_number < start_sequence:
365
+ continue
366
+
367
+ if end_sequence is not None and event.sequence_number > end_sequence:
368
+ continue
369
+
370
+ events.append(event)
371
+
372
+ return events
373
+
374
+ except Exception as e:
375
+ logger.error(f"Failed to load events for {request_id}: {e}")
376
+ return []
377
+
378
+ def get_stats(self) -> Dict[str, Any]:
379
+ """Get event store statistics."""
380
+ return {
381
+ "event_count": self.event_count,
382
+ "flush_count": self.flush_count,
383
+ "buffer_size": len(self._buffer),
384
+ "stream_size": len(self._event_stream),
385
+ "active_projections": list(self._projection_handlers.keys()),
386
+ "request_count": len(self._sequences),
387
+ }
388
+
389
+ async def close(self) -> None:
390
+ """Close event store and flush remaining events."""
391
+ self._flush_task.cancel()
392
+ try:
393
+ await self._flush_task
394
+ except asyncio.CancelledError:
395
+ pass
396
+
397
+
398
+ # Example projection handlers
399
+ def request_state_projection(
400
+ event: RequestEvent, state: Dict[str, Any]
401
+ ) -> Dict[str, Any]:
402
+ """Track current state of all requests."""
403
+ request_id = event.request_id
404
+
405
+ if request_id not in state:
406
+ state[request_id] = {
407
+ "current_state": "initialized",
408
+ "created_at": event.timestamp,
409
+ "updated_at": event.timestamp,
410
+ "event_count": 0,
411
+ }
412
+
413
+ request_state = state[request_id]
414
+ request_state["event_count"] += 1
415
+ request_state["updated_at"] = event.timestamp
416
+
417
+ # Update state based on event type
418
+ if event.event_type == EventType.REQUEST_STARTED:
419
+ request_state["current_state"] = "executing"
420
+ elif event.event_type == EventType.REQUEST_COMPLETED:
421
+ request_state["current_state"] = "completed"
422
+ elif event.event_type == EventType.REQUEST_FAILED:
423
+ request_state["current_state"] = "failed"
424
+ elif event.event_type == EventType.REQUEST_CANCELLED:
425
+ request_state["current_state"] = "cancelled"
426
+
427
+ return state
428
+
429
+
430
+ def performance_metrics_projection(
431
+ event: RequestEvent, state: Dict[str, Any]
432
+ ) -> Dict[str, Any]:
433
+ """Track performance metrics across all requests."""
434
+ if "total_requests" not in state:
435
+ state.update(
436
+ {
437
+ "total_requests": 0,
438
+ "completed_requests": 0,
439
+ "failed_requests": 0,
440
+ "cancelled_requests": 0,
441
+ "total_duration_ms": 0,
442
+ "checkpoint_count": 0,
443
+ }
444
+ )
445
+
446
+ state["total_requests"] += 1
447
+
448
+ if event.event_type == EventType.REQUEST_COMPLETED:
449
+ state["completed_requests"] += 1
450
+ if "duration_ms" in event.data:
451
+ state["total_duration_ms"] += event.data["duration_ms"]
452
+ elif event.event_type == EventType.REQUEST_FAILED:
453
+ state["failed_requests"] += 1
454
+ elif event.event_type == EventType.REQUEST_CANCELLED:
455
+ state["cancelled_requests"] += 1
456
+ elif event.event_type == EventType.REQUEST_CHECKPOINTED:
457
+ state["checkpoint_count"] += 1
458
+
459
+ return state