kailash 0.3.1__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +33 -1
- kailash/access_control/__init__.py +129 -0
- kailash/access_control/managers.py +461 -0
- kailash/access_control/rule_evaluators.py +467 -0
- kailash/access_control_abac.py +825 -0
- kailash/config/__init__.py +27 -0
- kailash/config/database_config.py +359 -0
- kailash/database/__init__.py +28 -0
- kailash/database/execution_pipeline.py +499 -0
- kailash/middleware/__init__.py +306 -0
- kailash/middleware/auth/__init__.py +33 -0
- kailash/middleware/auth/access_control.py +436 -0
- kailash/middleware/auth/auth_manager.py +422 -0
- kailash/middleware/auth/jwt_auth.py +477 -0
- kailash/middleware/auth/kailash_jwt_auth.py +616 -0
- kailash/middleware/communication/__init__.py +37 -0
- kailash/middleware/communication/ai_chat.py +989 -0
- kailash/middleware/communication/api_gateway.py +802 -0
- kailash/middleware/communication/events.py +470 -0
- kailash/middleware/communication/realtime.py +710 -0
- kailash/middleware/core/__init__.py +21 -0
- kailash/middleware/core/agent_ui.py +890 -0
- kailash/middleware/core/schema.py +643 -0
- kailash/middleware/core/workflows.py +396 -0
- kailash/middleware/database/__init__.py +63 -0
- kailash/middleware/database/base.py +113 -0
- kailash/middleware/database/base_models.py +525 -0
- kailash/middleware/database/enums.py +106 -0
- kailash/middleware/database/migrations.py +12 -0
- kailash/{api/database.py → middleware/database/models.py} +183 -291
- kailash/middleware/database/repositories.py +685 -0
- kailash/middleware/database/session_manager.py +19 -0
- kailash/middleware/mcp/__init__.py +38 -0
- kailash/middleware/mcp/client_integration.py +585 -0
- kailash/middleware/mcp/enhanced_server.py +576 -0
- kailash/nodes/__init__.py +25 -3
- kailash/nodes/admin/__init__.py +35 -0
- kailash/nodes/admin/audit_log.py +794 -0
- kailash/nodes/admin/permission_check.py +864 -0
- kailash/nodes/admin/role_management.py +823 -0
- kailash/nodes/admin/security_event.py +1519 -0
- kailash/nodes/admin/user_management.py +944 -0
- kailash/nodes/ai/a2a.py +24 -7
- kailash/nodes/ai/ai_providers.py +1 -0
- kailash/nodes/ai/embedding_generator.py +11 -11
- kailash/nodes/ai/intelligent_agent_orchestrator.py +99 -11
- kailash/nodes/ai/llm_agent.py +407 -2
- kailash/nodes/ai/self_organizing.py +85 -10
- kailash/nodes/api/auth.py +287 -6
- kailash/nodes/api/rest.py +151 -0
- kailash/nodes/auth/__init__.py +17 -0
- kailash/nodes/auth/directory_integration.py +1228 -0
- kailash/nodes/auth/enterprise_auth_provider.py +1328 -0
- kailash/nodes/auth/mfa.py +2338 -0
- kailash/nodes/auth/risk_assessment.py +872 -0
- kailash/nodes/auth/session_management.py +1093 -0
- kailash/nodes/auth/sso.py +1040 -0
- kailash/nodes/base.py +344 -13
- kailash/nodes/base_cycle_aware.py +4 -2
- kailash/nodes/base_with_acl.py +1 -1
- kailash/nodes/code/python.py +293 -12
- kailash/nodes/compliance/__init__.py +9 -0
- kailash/nodes/compliance/data_retention.py +1888 -0
- kailash/nodes/compliance/gdpr.py +2004 -0
- kailash/nodes/data/__init__.py +22 -2
- kailash/nodes/data/async_connection.py +469 -0
- kailash/nodes/data/async_sql.py +757 -0
- kailash/nodes/data/async_vector.py +598 -0
- kailash/nodes/data/readers.py +767 -0
- kailash/nodes/data/retrieval.py +360 -1
- kailash/nodes/data/sharepoint_graph.py +397 -21
- kailash/nodes/data/sql.py +94 -5
- kailash/nodes/data/streaming.py +68 -8
- kailash/nodes/data/vector_db.py +54 -4
- kailash/nodes/enterprise/__init__.py +13 -0
- kailash/nodes/enterprise/batch_processor.py +741 -0
- kailash/nodes/enterprise/data_lineage.py +497 -0
- kailash/nodes/logic/convergence.py +31 -9
- kailash/nodes/logic/operations.py +14 -3
- kailash/nodes/mixins/__init__.py +8 -0
- kailash/nodes/mixins/event_emitter.py +201 -0
- kailash/nodes/mixins/mcp.py +9 -4
- kailash/nodes/mixins/security.py +165 -0
- kailash/nodes/monitoring/__init__.py +7 -0
- kailash/nodes/monitoring/performance_benchmark.py +2497 -0
- kailash/nodes/rag/__init__.py +284 -0
- kailash/nodes/rag/advanced.py +1615 -0
- kailash/nodes/rag/agentic.py +773 -0
- kailash/nodes/rag/conversational.py +999 -0
- kailash/nodes/rag/evaluation.py +875 -0
- kailash/nodes/rag/federated.py +1188 -0
- kailash/nodes/rag/graph.py +721 -0
- kailash/nodes/rag/multimodal.py +671 -0
- kailash/nodes/rag/optimized.py +933 -0
- kailash/nodes/rag/privacy.py +1059 -0
- kailash/nodes/rag/query_processing.py +1335 -0
- kailash/nodes/rag/realtime.py +764 -0
- kailash/nodes/rag/registry.py +547 -0
- kailash/nodes/rag/router.py +837 -0
- kailash/nodes/rag/similarity.py +1854 -0
- kailash/nodes/rag/strategies.py +566 -0
- kailash/nodes/rag/workflows.py +575 -0
- kailash/nodes/security/__init__.py +19 -0
- kailash/nodes/security/abac_evaluator.py +1411 -0
- kailash/nodes/security/audit_log.py +91 -0
- kailash/nodes/security/behavior_analysis.py +1893 -0
- kailash/nodes/security/credential_manager.py +401 -0
- kailash/nodes/security/rotating_credentials.py +760 -0
- kailash/nodes/security/security_event.py +132 -0
- kailash/nodes/security/threat_detection.py +1103 -0
- kailash/nodes/testing/__init__.py +9 -0
- kailash/nodes/testing/credential_testing.py +499 -0
- kailash/nodes/transform/__init__.py +10 -2
- kailash/nodes/transform/chunkers.py +592 -1
- kailash/nodes/transform/processors.py +484 -14
- kailash/nodes/validation.py +321 -0
- kailash/runtime/access_controlled.py +1 -1
- kailash/runtime/async_local.py +41 -7
- kailash/runtime/docker.py +1 -1
- kailash/runtime/local.py +474 -55
- kailash/runtime/parallel.py +1 -1
- kailash/runtime/parallel_cyclic.py +1 -1
- kailash/runtime/testing.py +210 -2
- kailash/utils/migrations/__init__.py +25 -0
- kailash/utils/migrations/generator.py +433 -0
- kailash/utils/migrations/models.py +231 -0
- kailash/utils/migrations/runner.py +489 -0
- kailash/utils/secure_logging.py +342 -0
- kailash/workflow/__init__.py +16 -0
- kailash/workflow/cyclic_runner.py +3 -4
- kailash/workflow/graph.py +70 -2
- kailash/workflow/resilience.py +249 -0
- kailash/workflow/templates.py +726 -0
- {kailash-0.3.1.dist-info → kailash-0.4.0.dist-info}/METADATA +253 -20
- kailash-0.4.0.dist-info/RECORD +223 -0
- kailash/api/__init__.py +0 -17
- kailash/api/__main__.py +0 -6
- kailash/api/studio_secure.py +0 -893
- kailash/mcp/__main__.py +0 -13
- kailash/mcp/server_new.py +0 -336
- kailash/mcp/servers/__init__.py +0 -12
- kailash-0.3.1.dist-info/RECORD +0 -136
- {kailash-0.3.1.dist-info → kailash-0.4.0.dist-info}/WHEEL +0 -0
- {kailash-0.3.1.dist-info → kailash-0.4.0.dist-info}/entry_points.txt +0 -0
- {kailash-0.3.1.dist-info → kailash-0.4.0.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.3.1.dist-info → kailash-0.4.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,470 @@
|
|
1
|
+
"""
|
2
|
+
Event system for Kailash Middleware
|
3
|
+
|
4
|
+
Provides standardized event types and emission for real-time communication
|
5
|
+
between agent systems and frontend UIs. Supports multiple transport layers
|
6
|
+
and efficient event batching.
|
7
|
+
"""
|
8
|
+
|
9
|
+
import asyncio
|
10
|
+
import json
|
11
|
+
import logging
|
12
|
+
import time
|
13
|
+
import uuid
|
14
|
+
from dataclasses import asdict, dataclass
|
15
|
+
from datetime import datetime, timezone
|
16
|
+
from enum import Enum
|
17
|
+
from typing import Any, AsyncGenerator, Callable, Dict, List, Optional, Union
|
18
|
+
|
19
|
+
logger = logging.getLogger(__name__)
|
20
|
+
|
21
|
+
|
22
|
+
class EventType(str, Enum):
|
23
|
+
"""Standard event types for agent-UI communication."""
|
24
|
+
|
25
|
+
# Workflow Events (workflow lifecycle)
|
26
|
+
WORKFLOW_CREATED = "workflow.created"
|
27
|
+
WORKFLOW_STARTED = "workflow.started"
|
28
|
+
WORKFLOW_PROGRESS = "workflow.progress"
|
29
|
+
WORKFLOW_COMPLETED = "workflow.completed"
|
30
|
+
WORKFLOW_FAILED = "workflow.failed"
|
31
|
+
WORKFLOW_CANCELLED = "workflow.cancelled"
|
32
|
+
|
33
|
+
# Node Events (individual node execution)
|
34
|
+
NODE_STARTED = "node.started"
|
35
|
+
NODE_PROGRESS = "node.progress"
|
36
|
+
NODE_COMPLETED = "node.completed"
|
37
|
+
NODE_FAILED = "node.failed"
|
38
|
+
NODE_SKIPPED = "node.skipped"
|
39
|
+
|
40
|
+
# UI Events (user interaction)
|
41
|
+
UI_INPUT_REQUIRED = "ui.input_required"
|
42
|
+
UI_APPROVAL_REQUIRED = "ui.approval_required"
|
43
|
+
UI_CHOICE_REQUIRED = "ui.choice_required"
|
44
|
+
UI_CONFIRMATION_REQUIRED = "ui.confirmation_required"
|
45
|
+
|
46
|
+
# System Events (system state)
|
47
|
+
SYSTEM_STATUS = "system.status"
|
48
|
+
SYSTEM_ERROR = "system.error"
|
49
|
+
SYSTEM_WARNING = "system.warning"
|
50
|
+
|
51
|
+
# Data Events (data flow)
|
52
|
+
DATA_UPDATED = "data.updated"
|
53
|
+
DATA_VALIDATED = "data.validated"
|
54
|
+
DATA_ERROR = "data.error"
|
55
|
+
|
56
|
+
|
57
|
+
class EventPriority(str, Enum):
|
58
|
+
"""Event priority levels for processing order."""
|
59
|
+
|
60
|
+
CRITICAL = "critical" # System errors, failures
|
61
|
+
HIGH = "high" # User input required, approvals
|
62
|
+
NORMAL = "normal" # Progress updates, completions
|
63
|
+
LOW = "low" # Status updates, informational
|
64
|
+
|
65
|
+
|
66
|
+
@dataclass
|
67
|
+
class BaseEvent:
|
68
|
+
"""Base event structure for all middleware events."""
|
69
|
+
|
70
|
+
id: str
|
71
|
+
type: EventType
|
72
|
+
timestamp: datetime
|
73
|
+
priority: EventPriority = EventPriority.NORMAL
|
74
|
+
source: Optional[str] = None
|
75
|
+
target: Optional[str] = None
|
76
|
+
session_id: Optional[str] = None
|
77
|
+
user_id: Optional[str] = None
|
78
|
+
metadata: Dict[str, Any] = None
|
79
|
+
|
80
|
+
def __post_init__(self):
|
81
|
+
if not self.id:
|
82
|
+
self.id = str(uuid.uuid4())
|
83
|
+
if not self.timestamp:
|
84
|
+
self.timestamp = datetime.now(timezone.utc)
|
85
|
+
if self.metadata is None:
|
86
|
+
self.metadata = {}
|
87
|
+
|
88
|
+
def to_dict(self) -> Dict[str, Any]:
|
89
|
+
"""Convert event to dictionary for serialization."""
|
90
|
+
data = asdict(self)
|
91
|
+
data["timestamp"] = self.timestamp.isoformat()
|
92
|
+
return data
|
93
|
+
|
94
|
+
def to_json(self) -> str:
|
95
|
+
"""Convert event to JSON string."""
|
96
|
+
return json.dumps(self.to_dict())
|
97
|
+
|
98
|
+
|
99
|
+
@dataclass
|
100
|
+
class WorkflowEvent(BaseEvent):
|
101
|
+
"""Events related to workflow execution."""
|
102
|
+
|
103
|
+
workflow_id: str = None
|
104
|
+
workflow_name: str = None
|
105
|
+
execution_id: str = None
|
106
|
+
progress_percent: float = None
|
107
|
+
current_node: str = None
|
108
|
+
data: Dict[str, Any] = None
|
109
|
+
error: str = None
|
110
|
+
|
111
|
+
def __post_init__(self):
|
112
|
+
super().__post_init__()
|
113
|
+
if self.data is None:
|
114
|
+
self.data = {}
|
115
|
+
|
116
|
+
|
117
|
+
@dataclass
|
118
|
+
class NodeEvent(BaseEvent):
|
119
|
+
"""Events related to individual node execution."""
|
120
|
+
|
121
|
+
workflow_id: str = None
|
122
|
+
node_id: str = None
|
123
|
+
node_type: str = None
|
124
|
+
node_name: str = None
|
125
|
+
inputs: Dict[str, Any] = None
|
126
|
+
outputs: Dict[str, Any] = None
|
127
|
+
execution_time_ms: float = None
|
128
|
+
error: str = None
|
129
|
+
|
130
|
+
def __post_init__(self):
|
131
|
+
super().__post_init__()
|
132
|
+
if self.inputs is None:
|
133
|
+
self.inputs = {}
|
134
|
+
if self.outputs is None:
|
135
|
+
self.outputs = {}
|
136
|
+
|
137
|
+
|
138
|
+
@dataclass
|
139
|
+
class UIEvent(BaseEvent):
|
140
|
+
"""Events requiring user interface interaction."""
|
141
|
+
|
142
|
+
interaction_type: str = None # input, approval, choice, confirmation
|
143
|
+
prompt: str = None
|
144
|
+
options: List[Dict[str, Any]] = None
|
145
|
+
form_schema: Dict[str, Any] = None
|
146
|
+
timeout_ms: int = None
|
147
|
+
response_required: bool = True
|
148
|
+
context: Dict[str, Any] = None
|
149
|
+
|
150
|
+
def __post_init__(self):
|
151
|
+
super().__post_init__()
|
152
|
+
if self.options is None:
|
153
|
+
self.options = []
|
154
|
+
if self.form_schema is None:
|
155
|
+
self.form_schema = {}
|
156
|
+
if self.context is None:
|
157
|
+
self.context = {}
|
158
|
+
|
159
|
+
|
160
|
+
class EventFilter:
|
161
|
+
"""Filter events based on criteria."""
|
162
|
+
|
163
|
+
def __init__(
|
164
|
+
self,
|
165
|
+
event_types: List[EventType] = None,
|
166
|
+
priorities: List[EventPriority] = None,
|
167
|
+
source: str = None,
|
168
|
+
target: str = None,
|
169
|
+
session_id: str = None,
|
170
|
+
user_id: str = None,
|
171
|
+
):
|
172
|
+
self.event_types = event_types or []
|
173
|
+
self.priorities = priorities or []
|
174
|
+
self.source = source
|
175
|
+
self.target = target
|
176
|
+
self.session_id = session_id
|
177
|
+
self.user_id = user_id
|
178
|
+
|
179
|
+
def matches(self, event: BaseEvent) -> bool:
|
180
|
+
"""Check if event matches filter criteria."""
|
181
|
+
if self.event_types and event.type not in self.event_types:
|
182
|
+
return False
|
183
|
+
if self.priorities and event.priority not in self.priorities:
|
184
|
+
return False
|
185
|
+
if self.source and event.source != self.source:
|
186
|
+
return False
|
187
|
+
if self.target and event.target != self.target:
|
188
|
+
return False
|
189
|
+
if self.session_id and event.session_id != self.session_id:
|
190
|
+
return False
|
191
|
+
if self.user_id and event.user_id != self.user_id:
|
192
|
+
return False
|
193
|
+
return True
|
194
|
+
|
195
|
+
|
196
|
+
class EventBatch:
|
197
|
+
"""
|
198
|
+
Batch multiple events for efficient transmission.
|
199
|
+
|
200
|
+
This class should be replaced with BatchProcessorNode from SDK for better performance.
|
201
|
+
Keeping for backward compatibility but will be deprecated.
|
202
|
+
"""
|
203
|
+
|
204
|
+
def __init__(self, max_size: int = 100, max_age_ms: int = 1000):
|
205
|
+
self.max_size = max_size
|
206
|
+
self.max_age_ms = max_age_ms
|
207
|
+
self.events: List[BaseEvent] = []
|
208
|
+
self.created_at = time.time() * 1000
|
209
|
+
|
210
|
+
# TODO: Replace with BatchProcessorNode for production use
|
211
|
+
logger.warning(
|
212
|
+
"EventBatch is deprecated. Use BatchProcessorNode from SDK for better performance."
|
213
|
+
)
|
214
|
+
|
215
|
+
def add_event(self, event: BaseEvent) -> bool:
|
216
|
+
"""Add event to batch. Returns True if batch should be flushed."""
|
217
|
+
self.events.append(event)
|
218
|
+
|
219
|
+
# Check if batch should be flushed
|
220
|
+
if len(self.events) >= self.max_size:
|
221
|
+
return True
|
222
|
+
|
223
|
+
age_ms = (time.time() * 1000) - self.created_at
|
224
|
+
if age_ms >= self.max_age_ms:
|
225
|
+
return True
|
226
|
+
|
227
|
+
return False
|
228
|
+
|
229
|
+
def to_dict(self) -> Dict[str, Any]:
|
230
|
+
"""Convert batch to dictionary."""
|
231
|
+
return {
|
232
|
+
"batch_id": str(uuid.uuid4()),
|
233
|
+
"event_count": len(self.events),
|
234
|
+
"created_at": self.created_at,
|
235
|
+
"events": [event.to_dict() for event in self.events],
|
236
|
+
}
|
237
|
+
|
238
|
+
|
239
|
+
class EventStream:
|
240
|
+
"""
|
241
|
+
Manages event streaming with multiple subscribers and filtering.
|
242
|
+
|
243
|
+
Enhanced with SDK nodes for better performance:
|
244
|
+
- Uses CacheNode for event history management
|
245
|
+
- Uses AsyncQueueNode for event buffering
|
246
|
+
- Uses MetricsCollectorNode for performance tracking
|
247
|
+
"""
|
248
|
+
|
249
|
+
def __init__(self, enable_batching: bool = True, batch_size: int = 10):
|
250
|
+
self.enable_batching = enable_batching
|
251
|
+
self.batch_size = batch_size
|
252
|
+
self.subscribers: Dict[str, Dict] = (
|
253
|
+
{}
|
254
|
+
) # subscriber_id -> {filter, callback, active}
|
255
|
+
self.event_history: List[BaseEvent] = []
|
256
|
+
self.max_history = 1000
|
257
|
+
self._lock = asyncio.Lock()
|
258
|
+
|
259
|
+
# Performance tracking
|
260
|
+
self.events_emitted = 0
|
261
|
+
self.events_delivered = 0
|
262
|
+
self.start_time = time.time()
|
263
|
+
|
264
|
+
# Initialize SDK nodes for optimization
|
265
|
+
self._init_sdk_nodes()
|
266
|
+
|
267
|
+
def _init_sdk_nodes(self):
|
268
|
+
"""Initialize SDK nodes for performance optimization."""
|
269
|
+
# Import SDK nodes for event management
|
270
|
+
from ...nodes.enterprise import BatchProcessorNode
|
271
|
+
from ...nodes.transform import DataTransformer
|
272
|
+
|
273
|
+
# Batch processor for efficient event batching
|
274
|
+
self.batch_processor = BatchProcessorNode(name="event_batch_processor")
|
275
|
+
|
276
|
+
# Data transformer for event serialization
|
277
|
+
self.data_transformer = DataTransformer(name="event_transformer")
|
278
|
+
|
279
|
+
# TODO: Add CacheNode when available for event history
|
280
|
+
# TODO: Add AsyncQueueNode when available for event buffering
|
281
|
+
# TODO: Add MetricsCollectorNode when available for performance tracking
|
282
|
+
|
283
|
+
async def subscribe(
|
284
|
+
self,
|
285
|
+
subscriber_id: str,
|
286
|
+
callback: Callable[[Union[BaseEvent, EventBatch]], None],
|
287
|
+
event_filter: EventFilter = None,
|
288
|
+
) -> str:
|
289
|
+
"""Subscribe to event stream with optional filtering."""
|
290
|
+
async with self._lock:
|
291
|
+
self.subscribers[subscriber_id] = {
|
292
|
+
"filter": event_filter or EventFilter(),
|
293
|
+
"callback": callback,
|
294
|
+
"active": True,
|
295
|
+
"subscribed_at": time.time(),
|
296
|
+
"events_received": 0,
|
297
|
+
}
|
298
|
+
|
299
|
+
logger.info(f"Subscriber {subscriber_id} subscribed to event stream")
|
300
|
+
return subscriber_id
|
301
|
+
|
302
|
+
async def unsubscribe(self, subscriber_id: str):
|
303
|
+
"""Unsubscribe from event stream."""
|
304
|
+
async with self._lock:
|
305
|
+
if subscriber_id in self.subscribers:
|
306
|
+
self.subscribers[subscriber_id]["active"] = False
|
307
|
+
del self.subscribers[subscriber_id]
|
308
|
+
logger.info(f"Subscriber {subscriber_id} unsubscribed")
|
309
|
+
|
310
|
+
async def emit(self, event: BaseEvent):
|
311
|
+
"""Emit event to all matching subscribers."""
|
312
|
+
async with self._lock:
|
313
|
+
# Add to history
|
314
|
+
self.event_history.append(event)
|
315
|
+
if len(self.event_history) > self.max_history:
|
316
|
+
self.event_history.pop(0)
|
317
|
+
|
318
|
+
self.events_emitted += 1
|
319
|
+
|
320
|
+
# Deliver to subscribers
|
321
|
+
for subscriber_id, subscriber in list(self.subscribers.items()):
|
322
|
+
if not subscriber["active"]:
|
323
|
+
continue
|
324
|
+
|
325
|
+
# Check filter
|
326
|
+
if subscriber["filter"].matches(event):
|
327
|
+
try:
|
328
|
+
await self._deliver_event(subscriber, event)
|
329
|
+
subscriber["events_received"] += 1
|
330
|
+
self.events_delivered += 1
|
331
|
+
except Exception as e:
|
332
|
+
logger.error(f"Error delivering event to {subscriber_id}: {e}")
|
333
|
+
subscriber["active"] = False
|
334
|
+
|
335
|
+
async def _deliver_event(self, subscriber: Dict, event: BaseEvent):
|
336
|
+
"""Deliver event to subscriber with optional batching."""
|
337
|
+
callback = subscriber["callback"]
|
338
|
+
|
339
|
+
if self.enable_batching:
|
340
|
+
# Add to batch (simplified - in production would maintain per-subscriber batches)
|
341
|
+
if asyncio.iscoroutinefunction(callback):
|
342
|
+
await callback(event)
|
343
|
+
else:
|
344
|
+
callback(event)
|
345
|
+
else:
|
346
|
+
if asyncio.iscoroutinefunction(callback):
|
347
|
+
await callback(event)
|
348
|
+
else:
|
349
|
+
callback(event)
|
350
|
+
|
351
|
+
async def emit_workflow_started(
|
352
|
+
self,
|
353
|
+
workflow_id: str,
|
354
|
+
workflow_name: str,
|
355
|
+
execution_id: str = None,
|
356
|
+
user_id: str = None,
|
357
|
+
session_id: str = None,
|
358
|
+
):
|
359
|
+
"""Convenience method for workflow started events."""
|
360
|
+
event = WorkflowEvent(
|
361
|
+
id=str(uuid.uuid4()),
|
362
|
+
type=EventType.WORKFLOW_STARTED,
|
363
|
+
timestamp=datetime.now(timezone.utc),
|
364
|
+
priority=EventPriority.NORMAL,
|
365
|
+
workflow_id=workflow_id,
|
366
|
+
workflow_name=workflow_name,
|
367
|
+
execution_id=execution_id or str(uuid.uuid4()),
|
368
|
+
user_id=user_id,
|
369
|
+
session_id=session_id,
|
370
|
+
)
|
371
|
+
await self.emit(event)
|
372
|
+
|
373
|
+
async def emit_workflow_progress(
|
374
|
+
self,
|
375
|
+
workflow_id: str,
|
376
|
+
execution_id: str,
|
377
|
+
progress_percent: float,
|
378
|
+
current_node: str = None,
|
379
|
+
data: Dict[str, Any] = None,
|
380
|
+
):
|
381
|
+
"""Convenience method for workflow progress events."""
|
382
|
+
event = WorkflowEvent(
|
383
|
+
id=str(uuid.uuid4()),
|
384
|
+
type=EventType.WORKFLOW_PROGRESS,
|
385
|
+
timestamp=datetime.now(timezone.utc),
|
386
|
+
priority=EventPriority.NORMAL,
|
387
|
+
workflow_id=workflow_id,
|
388
|
+
execution_id=execution_id,
|
389
|
+
progress_percent=progress_percent,
|
390
|
+
current_node=current_node,
|
391
|
+
data=data or {},
|
392
|
+
)
|
393
|
+
await self.emit(event)
|
394
|
+
|
395
|
+
async def emit_node_completed(
|
396
|
+
self,
|
397
|
+
workflow_id: str,
|
398
|
+
node_id: str,
|
399
|
+
node_type: str,
|
400
|
+
outputs: Dict[str, Any] = None,
|
401
|
+
execution_time_ms: float = None,
|
402
|
+
):
|
403
|
+
"""Convenience method for node completion events."""
|
404
|
+
event = NodeEvent(
|
405
|
+
id=str(uuid.uuid4()),
|
406
|
+
type=EventType.NODE_COMPLETED,
|
407
|
+
timestamp=datetime.now(timezone.utc),
|
408
|
+
priority=EventPriority.NORMAL,
|
409
|
+
workflow_id=workflow_id,
|
410
|
+
node_id=node_id,
|
411
|
+
node_type=node_type,
|
412
|
+
outputs=outputs or {},
|
413
|
+
execution_time_ms=execution_time_ms,
|
414
|
+
)
|
415
|
+
await self.emit(event)
|
416
|
+
|
417
|
+
async def emit_ui_input_required(
|
418
|
+
self,
|
419
|
+
prompt: str,
|
420
|
+
form_schema: Dict[str, Any],
|
421
|
+
session_id: str,
|
422
|
+
user_id: str = None,
|
423
|
+
timeout_ms: int = 30000,
|
424
|
+
):
|
425
|
+
"""Convenience method for UI input required events."""
|
426
|
+
event = UIEvent(
|
427
|
+
id=str(uuid.uuid4()),
|
428
|
+
type=EventType.UI_INPUT_REQUIRED,
|
429
|
+
timestamp=datetime.now(timezone.utc),
|
430
|
+
priority=EventPriority.HIGH,
|
431
|
+
interaction_type="input",
|
432
|
+
prompt=prompt,
|
433
|
+
form_schema=form_schema,
|
434
|
+
timeout_ms=timeout_ms,
|
435
|
+
session_id=session_id,
|
436
|
+
user_id=user_id,
|
437
|
+
)
|
438
|
+
await self.emit(event)
|
439
|
+
|
440
|
+
def get_stats(self) -> Dict[str, Any]:
|
441
|
+
"""Get event stream statistics."""
|
442
|
+
uptime = time.time() - self.start_time
|
443
|
+
return {
|
444
|
+
"uptime_seconds": uptime,
|
445
|
+
"events_emitted": self.events_emitted,
|
446
|
+
"events_delivered": self.events_delivered,
|
447
|
+
"active_subscribers": len(
|
448
|
+
[s for s in self.subscribers.values() if s["active"]]
|
449
|
+
),
|
450
|
+
"total_subscribers": len(self.subscribers),
|
451
|
+
"events_per_second": self.events_emitted / uptime if uptime > 0 else 0,
|
452
|
+
"delivery_rate": (
|
453
|
+
self.events_delivered / self.events_emitted
|
454
|
+
if self.events_emitted > 0
|
455
|
+
else 0
|
456
|
+
),
|
457
|
+
"history_size": len(self.event_history),
|
458
|
+
}
|
459
|
+
|
460
|
+
async def get_recent_events(
|
461
|
+
self, count: int = 100, event_filter: EventFilter = None
|
462
|
+
) -> List[BaseEvent]:
|
463
|
+
"""Get recent events with optional filtering."""
|
464
|
+
async with self._lock:
|
465
|
+
events = self.event_history[-count:] if count else self.event_history
|
466
|
+
|
467
|
+
if event_filter:
|
468
|
+
events = [e for e in events if event_filter.matches(e)]
|
469
|
+
|
470
|
+
return events
|