kailash 0.3.2__py3-none-any.whl → 0.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +33 -1
- kailash/access_control/__init__.py +129 -0
- kailash/access_control/managers.py +461 -0
- kailash/access_control/rule_evaluators.py +467 -0
- kailash/access_control_abac.py +825 -0
- kailash/config/__init__.py +27 -0
- kailash/config/database_config.py +359 -0
- kailash/database/__init__.py +28 -0
- kailash/database/execution_pipeline.py +499 -0
- kailash/middleware/__init__.py +306 -0
- kailash/middleware/auth/__init__.py +33 -0
- kailash/middleware/auth/access_control.py +436 -0
- kailash/middleware/auth/auth_manager.py +422 -0
- kailash/middleware/auth/jwt_auth.py +477 -0
- kailash/middleware/auth/kailash_jwt_auth.py +616 -0
- kailash/middleware/communication/__init__.py +37 -0
- kailash/middleware/communication/ai_chat.py +989 -0
- kailash/middleware/communication/api_gateway.py +802 -0
- kailash/middleware/communication/events.py +470 -0
- kailash/middleware/communication/realtime.py +710 -0
- kailash/middleware/core/__init__.py +21 -0
- kailash/middleware/core/agent_ui.py +890 -0
- kailash/middleware/core/schema.py +643 -0
- kailash/middleware/core/workflows.py +396 -0
- kailash/middleware/database/__init__.py +63 -0
- kailash/middleware/database/base.py +113 -0
- kailash/middleware/database/base_models.py +525 -0
- kailash/middleware/database/enums.py +106 -0
- kailash/middleware/database/migrations.py +12 -0
- kailash/{api/database.py → middleware/database/models.py} +183 -291
- kailash/middleware/database/repositories.py +685 -0
- kailash/middleware/database/session_manager.py +19 -0
- kailash/middleware/mcp/__init__.py +38 -0
- kailash/middleware/mcp/client_integration.py +585 -0
- kailash/middleware/mcp/enhanced_server.py +576 -0
- kailash/nodes/__init__.py +27 -3
- kailash/nodes/admin/__init__.py +42 -0
- kailash/nodes/admin/audit_log.py +794 -0
- kailash/nodes/admin/permission_check.py +864 -0
- kailash/nodes/admin/role_management.py +823 -0
- kailash/nodes/admin/security_event.py +1523 -0
- kailash/nodes/admin/user_management.py +944 -0
- kailash/nodes/ai/a2a.py +24 -7
- kailash/nodes/ai/ai_providers.py +248 -40
- kailash/nodes/ai/embedding_generator.py +11 -11
- kailash/nodes/ai/intelligent_agent_orchestrator.py +99 -11
- kailash/nodes/ai/llm_agent.py +436 -5
- kailash/nodes/ai/self_organizing.py +85 -10
- kailash/nodes/ai/vision_utils.py +148 -0
- kailash/nodes/alerts/__init__.py +26 -0
- kailash/nodes/alerts/base.py +234 -0
- kailash/nodes/alerts/discord.py +499 -0
- kailash/nodes/api/auth.py +287 -6
- kailash/nodes/api/rest.py +151 -0
- kailash/nodes/auth/__init__.py +17 -0
- kailash/nodes/auth/directory_integration.py +1228 -0
- kailash/nodes/auth/enterprise_auth_provider.py +1328 -0
- kailash/nodes/auth/mfa.py +2338 -0
- kailash/nodes/auth/risk_assessment.py +872 -0
- kailash/nodes/auth/session_management.py +1093 -0
- kailash/nodes/auth/sso.py +1040 -0
- kailash/nodes/base.py +344 -13
- kailash/nodes/base_cycle_aware.py +4 -2
- kailash/nodes/base_with_acl.py +1 -1
- kailash/nodes/code/python.py +283 -10
- kailash/nodes/compliance/__init__.py +9 -0
- kailash/nodes/compliance/data_retention.py +1888 -0
- kailash/nodes/compliance/gdpr.py +2004 -0
- kailash/nodes/data/__init__.py +22 -2
- kailash/nodes/data/async_connection.py +469 -0
- kailash/nodes/data/async_sql.py +757 -0
- kailash/nodes/data/async_vector.py +598 -0
- kailash/nodes/data/readers.py +767 -0
- kailash/nodes/data/retrieval.py +360 -1
- kailash/nodes/data/sharepoint_graph.py +397 -21
- kailash/nodes/data/sql.py +94 -5
- kailash/nodes/data/streaming.py +68 -8
- kailash/nodes/data/vector_db.py +54 -4
- kailash/nodes/enterprise/__init__.py +13 -0
- kailash/nodes/enterprise/batch_processor.py +741 -0
- kailash/nodes/enterprise/data_lineage.py +497 -0
- kailash/nodes/logic/convergence.py +31 -9
- kailash/nodes/logic/operations.py +14 -3
- kailash/nodes/mixins/__init__.py +8 -0
- kailash/nodes/mixins/event_emitter.py +201 -0
- kailash/nodes/mixins/mcp.py +9 -4
- kailash/nodes/mixins/security.py +165 -0
- kailash/nodes/monitoring/__init__.py +7 -0
- kailash/nodes/monitoring/performance_benchmark.py +2497 -0
- kailash/nodes/rag/__init__.py +284 -0
- kailash/nodes/rag/advanced.py +1615 -0
- kailash/nodes/rag/agentic.py +773 -0
- kailash/nodes/rag/conversational.py +999 -0
- kailash/nodes/rag/evaluation.py +875 -0
- kailash/nodes/rag/federated.py +1188 -0
- kailash/nodes/rag/graph.py +721 -0
- kailash/nodes/rag/multimodal.py +671 -0
- kailash/nodes/rag/optimized.py +933 -0
- kailash/nodes/rag/privacy.py +1059 -0
- kailash/nodes/rag/query_processing.py +1335 -0
- kailash/nodes/rag/realtime.py +764 -0
- kailash/nodes/rag/registry.py +547 -0
- kailash/nodes/rag/router.py +837 -0
- kailash/nodes/rag/similarity.py +1854 -0
- kailash/nodes/rag/strategies.py +566 -0
- kailash/nodes/rag/workflows.py +575 -0
- kailash/nodes/security/__init__.py +19 -0
- kailash/nodes/security/abac_evaluator.py +1411 -0
- kailash/nodes/security/audit_log.py +103 -0
- kailash/nodes/security/behavior_analysis.py +1893 -0
- kailash/nodes/security/credential_manager.py +401 -0
- kailash/nodes/security/rotating_credentials.py +760 -0
- kailash/nodes/security/security_event.py +133 -0
- kailash/nodes/security/threat_detection.py +1103 -0
- kailash/nodes/testing/__init__.py +9 -0
- kailash/nodes/testing/credential_testing.py +499 -0
- kailash/nodes/transform/__init__.py +10 -2
- kailash/nodes/transform/chunkers.py +592 -1
- kailash/nodes/transform/processors.py +484 -14
- kailash/nodes/validation.py +321 -0
- kailash/runtime/access_controlled.py +1 -1
- kailash/runtime/async_local.py +41 -7
- kailash/runtime/docker.py +1 -1
- kailash/runtime/local.py +474 -55
- kailash/runtime/parallel.py +1 -1
- kailash/runtime/parallel_cyclic.py +1 -1
- kailash/runtime/testing.py +210 -2
- kailash/security.py +1 -1
- kailash/utils/migrations/__init__.py +25 -0
- kailash/utils/migrations/generator.py +433 -0
- kailash/utils/migrations/models.py +231 -0
- kailash/utils/migrations/runner.py +489 -0
- kailash/utils/secure_logging.py +342 -0
- kailash/workflow/__init__.py +16 -0
- kailash/workflow/cyclic_runner.py +3 -4
- kailash/workflow/graph.py +70 -2
- kailash/workflow/resilience.py +249 -0
- kailash/workflow/templates.py +726 -0
- {kailash-0.3.2.dist-info → kailash-0.4.1.dist-info}/METADATA +256 -20
- kailash-0.4.1.dist-info/RECORD +227 -0
- kailash/api/__init__.py +0 -17
- kailash/api/__main__.py +0 -6
- kailash/api/studio_secure.py +0 -893
- kailash/mcp/__main__.py +0 -13
- kailash/mcp/server_new.py +0 -336
- kailash/mcp/servers/__init__.py +0 -12
- kailash-0.3.2.dist-info/RECORD +0 -136
- {kailash-0.3.2.dist-info → kailash-0.4.1.dist-info}/WHEEL +0 -0
- {kailash-0.3.2.dist-info → kailash-0.4.1.dist-info}/entry_points.txt +0 -0
- {kailash-0.3.2.dist-info → kailash-0.4.1.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.3.2.dist-info → kailash-0.4.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,710 @@
|
|
1
|
+
"""
|
2
|
+
Real-time Communication Middleware for Kailash
|
3
|
+
|
4
|
+
Provides WebSocket, Server-Sent Events (SSE), and Webhook support for
|
5
|
+
real-time agent-frontend communication with sub-200ms latency.
|
6
|
+
"""
|
7
|
+
|
8
|
+
import asyncio
|
9
|
+
import json
|
10
|
+
import logging
|
11
|
+
import time
|
12
|
+
import uuid
|
13
|
+
from datetime import datetime, timezone
|
14
|
+
from typing import Any, AsyncGenerator, Dict, List, Optional, Set, Union
|
15
|
+
from urllib.parse import parse_qs
|
16
|
+
|
17
|
+
from fastapi import Request, Response, WebSocket, WebSocketDisconnect
|
18
|
+
from fastapi.responses import StreamingResponse
|
19
|
+
|
20
|
+
from ...nodes.api import HTTPRequestNode
|
21
|
+
from ...nodes.security import CredentialManagerNode
|
22
|
+
from ...nodes.transform import DataTransformer
|
23
|
+
from ..core.agent_ui import AgentUIMiddleware
|
24
|
+
from .events import BaseEvent, EventFilter, EventPriority, EventStream, EventType
|
25
|
+
|
26
|
+
logger = logging.getLogger(__name__)
|
27
|
+
|
28
|
+
|
29
|
+
class ConnectionManager:
|
30
|
+
"""Manages WebSocket connections with authentication and filtering."""
|
31
|
+
|
32
|
+
def __init__(self):
|
33
|
+
self.connections: Dict[str, Dict] = {} # connection_id -> connection_info
|
34
|
+
self.session_connections: Dict[str, Set[str]] = (
|
35
|
+
{}
|
36
|
+
) # session_id -> set of connection_ids
|
37
|
+
self.user_connections: Dict[str, Set[str]] = (
|
38
|
+
{}
|
39
|
+
) # user_id -> set of connection_ids
|
40
|
+
|
41
|
+
async def connect(
|
42
|
+
self,
|
43
|
+
websocket: WebSocket,
|
44
|
+
connection_id: str,
|
45
|
+
session_id: str = None,
|
46
|
+
user_id: str = None,
|
47
|
+
event_filter: EventFilter = None,
|
48
|
+
):
|
49
|
+
"""Accept and register a WebSocket connection."""
|
50
|
+
await websocket.accept()
|
51
|
+
|
52
|
+
self.connections[connection_id] = {
|
53
|
+
"websocket": websocket,
|
54
|
+
"session_id": session_id,
|
55
|
+
"user_id": user_id,
|
56
|
+
"event_filter": event_filter or EventFilter(),
|
57
|
+
"connected_at": datetime.now(timezone.utc),
|
58
|
+
"messages_sent": 0,
|
59
|
+
"last_ping": time.time(),
|
60
|
+
}
|
61
|
+
|
62
|
+
# Track by session and user
|
63
|
+
if session_id:
|
64
|
+
if session_id not in self.session_connections:
|
65
|
+
self.session_connections[session_id] = set()
|
66
|
+
self.session_connections[session_id].add(connection_id)
|
67
|
+
|
68
|
+
if user_id:
|
69
|
+
if user_id not in self.user_connections:
|
70
|
+
self.user_connections[user_id] = set()
|
71
|
+
self.user_connections[user_id].add(connection_id)
|
72
|
+
|
73
|
+
logger.info(
|
74
|
+
f"WebSocket connection {connection_id} established for session {session_id}"
|
75
|
+
)
|
76
|
+
|
77
|
+
async def disconnect(self, connection_id: str):
|
78
|
+
"""Disconnect and cleanup a WebSocket connection."""
|
79
|
+
if connection_id not in self.connections:
|
80
|
+
return
|
81
|
+
|
82
|
+
connection = self.connections[connection_id]
|
83
|
+
session_id = connection["session_id"]
|
84
|
+
user_id = connection["user_id"]
|
85
|
+
|
86
|
+
# Remove from tracking
|
87
|
+
if session_id and session_id in self.session_connections:
|
88
|
+
self.session_connections[session_id].discard(connection_id)
|
89
|
+
if not self.session_connections[session_id]:
|
90
|
+
del self.session_connections[session_id]
|
91
|
+
|
92
|
+
if user_id and user_id in self.user_connections:
|
93
|
+
self.user_connections[user_id].discard(connection_id)
|
94
|
+
if not self.user_connections[user_id]:
|
95
|
+
del self.user_connections[user_id]
|
96
|
+
|
97
|
+
# Close WebSocket
|
98
|
+
try:
|
99
|
+
await connection["websocket"].close()
|
100
|
+
except:
|
101
|
+
pass
|
102
|
+
|
103
|
+
del self.connections[connection_id]
|
104
|
+
logger.info(f"WebSocket connection {connection_id} disconnected")
|
105
|
+
|
106
|
+
async def send_to_connection(self, connection_id: str, message: Dict[str, Any]):
|
107
|
+
"""Send message to a specific connection."""
|
108
|
+
if connection_id not in self.connections:
|
109
|
+
return False
|
110
|
+
|
111
|
+
connection = self.connections[connection_id]
|
112
|
+
try:
|
113
|
+
await connection["websocket"].send_text(json.dumps(message))
|
114
|
+
connection["messages_sent"] += 1
|
115
|
+
return True
|
116
|
+
except Exception as e:
|
117
|
+
logger.error(f"Error sending to connection {connection_id}: {e}")
|
118
|
+
await self.disconnect(connection_id)
|
119
|
+
return False
|
120
|
+
|
121
|
+
async def send_to_session(self, session_id: str, message: Dict[str, Any]):
|
122
|
+
"""Send message to all connections in a session."""
|
123
|
+
if session_id not in self.session_connections:
|
124
|
+
return 0
|
125
|
+
|
126
|
+
sent_count = 0
|
127
|
+
for connection_id in list(self.session_connections[session_id]):
|
128
|
+
if await self.send_to_connection(connection_id, message):
|
129
|
+
sent_count += 1
|
130
|
+
|
131
|
+
return sent_count
|
132
|
+
|
133
|
+
async def send_to_user(self, user_id: str, message: Dict[str, Any]):
|
134
|
+
"""Send message to all connections for a user."""
|
135
|
+
if user_id not in self.user_connections:
|
136
|
+
return 0
|
137
|
+
|
138
|
+
sent_count = 0
|
139
|
+
for connection_id in list(self.user_connections[user_id]):
|
140
|
+
if await self.send_to_connection(connection_id, message):
|
141
|
+
sent_count += 1
|
142
|
+
|
143
|
+
return sent_count
|
144
|
+
|
145
|
+
async def broadcast(
|
146
|
+
self, message: Dict[str, Any], event_filter: EventFilter = None
|
147
|
+
):
|
148
|
+
"""Broadcast message to all matching connections."""
|
149
|
+
sent_count = 0
|
150
|
+
for connection_id, connection in list(self.connections.items()):
|
151
|
+
# Apply filtering if provided
|
152
|
+
if event_filter:
|
153
|
+
if (
|
154
|
+
event_filter.session_id
|
155
|
+
and connection["session_id"] != event_filter.session_id
|
156
|
+
):
|
157
|
+
continue
|
158
|
+
if (
|
159
|
+
event_filter.user_id
|
160
|
+
and connection["user_id"] != event_filter.user_id
|
161
|
+
):
|
162
|
+
continue
|
163
|
+
|
164
|
+
if await self.send_to_connection(connection_id, message):
|
165
|
+
sent_count += 1
|
166
|
+
|
167
|
+
return sent_count
|
168
|
+
|
169
|
+
def get_stats(self) -> Dict[str, Any]:
|
170
|
+
"""Get connection statistics."""
|
171
|
+
total_messages = sum(
|
172
|
+
conn["messages_sent"] for conn in self.connections.values()
|
173
|
+
)
|
174
|
+
return {
|
175
|
+
"total_connections": len(self.connections),
|
176
|
+
"active_sessions": len(self.session_connections),
|
177
|
+
"active_users": len(self.user_connections),
|
178
|
+
"total_messages_sent": total_messages,
|
179
|
+
}
|
180
|
+
|
181
|
+
|
182
|
+
class SSEManager:
|
183
|
+
"""Manages Server-Sent Events streams."""
|
184
|
+
|
185
|
+
def __init__(self):
|
186
|
+
self.streams: Dict[str, Dict] = {} # stream_id -> stream_info
|
187
|
+
self.session_streams: Dict[str, Set[str]] = (
|
188
|
+
{}
|
189
|
+
) # session_id -> set of stream_ids
|
190
|
+
|
191
|
+
def create_stream(
|
192
|
+
self,
|
193
|
+
stream_id: str,
|
194
|
+
session_id: str = None,
|
195
|
+
user_id: str = None,
|
196
|
+
event_filter: EventFilter = None,
|
197
|
+
) -> AsyncGenerator[str, None]:
|
198
|
+
"""Create a new SSE stream."""
|
199
|
+
|
200
|
+
async def event_generator():
|
201
|
+
# Store stream info
|
202
|
+
self.streams[stream_id] = {
|
203
|
+
"session_id": session_id,
|
204
|
+
"user_id": user_id,
|
205
|
+
"event_filter": event_filter or EventFilter(),
|
206
|
+
"created_at": datetime.now(timezone.utc),
|
207
|
+
"events_sent": 0,
|
208
|
+
"active": True,
|
209
|
+
}
|
210
|
+
|
211
|
+
# Track by session
|
212
|
+
if session_id:
|
213
|
+
if session_id not in self.session_streams:
|
214
|
+
self.session_streams[session_id] = set()
|
215
|
+
self.session_streams[session_id].add(stream_id)
|
216
|
+
|
217
|
+
try:
|
218
|
+
# Send initial connection event
|
219
|
+
yield f"data: {json.dumps({'type': 'connected', 'stream_id': stream_id})}\n\n"
|
220
|
+
|
221
|
+
# Keep connection alive and wait for events
|
222
|
+
while self.streams.get(stream_id, {}).get("active", False):
|
223
|
+
# Send heartbeat every 30 seconds
|
224
|
+
yield f"data: {json.dumps({'type': 'heartbeat', 'timestamp': time.time()})}\n\n"
|
225
|
+
await asyncio.sleep(30)
|
226
|
+
|
227
|
+
except Exception as e:
|
228
|
+
logger.error(f"SSE stream {stream_id} error: {e}")
|
229
|
+
finally:
|
230
|
+
# Cleanup
|
231
|
+
await self.close_stream(stream_id)
|
232
|
+
|
233
|
+
return event_generator()
|
234
|
+
|
235
|
+
async def send_to_stream(self, stream_id: str, message: Dict[str, Any]):
|
236
|
+
"""Send message to a specific SSE stream."""
|
237
|
+
if stream_id not in self.streams:
|
238
|
+
return False
|
239
|
+
|
240
|
+
# In a real implementation, this would queue the message for the generator
|
241
|
+
# For now, we'll track that the message was sent
|
242
|
+
self.streams[stream_id]["events_sent"] += 1
|
243
|
+
return True
|
244
|
+
|
245
|
+
async def send_to_session_streams(self, session_id: str, message: Dict[str, Any]):
|
246
|
+
"""Send message to all SSE streams in a session."""
|
247
|
+
if session_id not in self.session_streams:
|
248
|
+
return 0
|
249
|
+
|
250
|
+
sent_count = 0
|
251
|
+
for stream_id in list(self.session_streams[session_id]):
|
252
|
+
if await self.send_to_stream(stream_id, message):
|
253
|
+
sent_count += 1
|
254
|
+
|
255
|
+
return sent_count
|
256
|
+
|
257
|
+
async def close_stream(self, stream_id: str):
|
258
|
+
"""Close and cleanup an SSE stream."""
|
259
|
+
if stream_id not in self.streams:
|
260
|
+
return
|
261
|
+
|
262
|
+
stream = self.streams[stream_id]
|
263
|
+
stream["active"] = False
|
264
|
+
|
265
|
+
session_id = stream["session_id"]
|
266
|
+
if session_id and session_id in self.session_streams:
|
267
|
+
self.session_streams[session_id].discard(stream_id)
|
268
|
+
if not self.session_streams[session_id]:
|
269
|
+
del self.session_streams[session_id]
|
270
|
+
|
271
|
+
del self.streams[stream_id]
|
272
|
+
logger.info(f"SSE stream {stream_id} closed")
|
273
|
+
|
274
|
+
|
275
|
+
class WebhookManager:
|
276
|
+
"""Manages webhook delivery for events using SDK nodes."""
|
277
|
+
|
278
|
+
def __init__(self, max_retries: int = 3, timeout_seconds: int = 10):
|
279
|
+
self.webhooks: Dict[str, Dict] = {} # webhook_id -> webhook_config
|
280
|
+
self.max_retries = max_retries
|
281
|
+
self.timeout_seconds = timeout_seconds
|
282
|
+
self.delivery_stats = {
|
283
|
+
"total_attempts": 0,
|
284
|
+
"successful_deliveries": 0,
|
285
|
+
"failed_deliveries": 0,
|
286
|
+
}
|
287
|
+
|
288
|
+
# Use HTTPRequestNode instead of httpx
|
289
|
+
self.http_node = HTTPRequestNode(
|
290
|
+
name="webhook_delivery",
|
291
|
+
retry_count=max_retries,
|
292
|
+
timeout=timeout_seconds,
|
293
|
+
headers={"User-Agent": "Kailash-Middleware/2.0"},
|
294
|
+
)
|
295
|
+
|
296
|
+
# Use CredentialManagerNode for webhook authentication
|
297
|
+
self.credential_node = CredentialManagerNode(
|
298
|
+
name="webhook_auth",
|
299
|
+
credential_name="webhook_secrets",
|
300
|
+
credential_type="custom",
|
301
|
+
)
|
302
|
+
|
303
|
+
def register_webhook(
|
304
|
+
self,
|
305
|
+
webhook_id: str,
|
306
|
+
url: str,
|
307
|
+
secret: str = None,
|
308
|
+
event_filter: EventFilter = None,
|
309
|
+
headers: Dict[str, str] = None,
|
310
|
+
):
|
311
|
+
"""Register a webhook endpoint."""
|
312
|
+
self.webhooks[webhook_id] = {
|
313
|
+
"url": url,
|
314
|
+
"secret": secret,
|
315
|
+
"event_filter": event_filter or EventFilter(),
|
316
|
+
"headers": headers or {},
|
317
|
+
"created_at": datetime.now(timezone.utc),
|
318
|
+
"deliveries": 0,
|
319
|
+
"failures": 0,
|
320
|
+
"active": True,
|
321
|
+
}
|
322
|
+
logger.info(f"Registered webhook {webhook_id} -> {url}")
|
323
|
+
|
324
|
+
def unregister_webhook(self, webhook_id: str):
|
325
|
+
"""Unregister a webhook endpoint."""
|
326
|
+
if webhook_id in self.webhooks:
|
327
|
+
del self.webhooks[webhook_id]
|
328
|
+
logger.info(f"Unregistered webhook {webhook_id}")
|
329
|
+
|
330
|
+
async def deliver_event(self, event: BaseEvent):
|
331
|
+
"""Deliver event to all matching webhooks."""
|
332
|
+
delivery_results = []
|
333
|
+
|
334
|
+
for webhook_id, webhook in self.webhooks.items():
|
335
|
+
if not webhook["active"]:
|
336
|
+
continue
|
337
|
+
|
338
|
+
# Check if event matches filter
|
339
|
+
if webhook["event_filter"].matches(event):
|
340
|
+
result = await self._deliver_to_webhook(webhook_id, webhook, event)
|
341
|
+
delivery_results.append(result)
|
342
|
+
|
343
|
+
return delivery_results
|
344
|
+
|
345
|
+
async def _deliver_to_webhook(
|
346
|
+
self, webhook_id: str, webhook: Dict[str, Any], event: BaseEvent
|
347
|
+
) -> Dict[str, Any]:
|
348
|
+
"""Deliver event to a specific webhook using SDK HTTPRequestNode."""
|
349
|
+
url = webhook["url"]
|
350
|
+
delivery_id = str(uuid.uuid4())
|
351
|
+
|
352
|
+
# Prepare headers
|
353
|
+
headers = {
|
354
|
+
**webhook.get("headers", {}),
|
355
|
+
"X-Kailash-Webhook-Id": webhook_id,
|
356
|
+
"X-Kailash-Delivery-Id": delivery_id,
|
357
|
+
}
|
358
|
+
|
359
|
+
# Add signature if secret is provided
|
360
|
+
if webhook["secret"]:
|
361
|
+
# In production, this would include HMAC signature
|
362
|
+
headers["X-Kailash-Signature"] = f"sha256={webhook['secret'][:8]}..."
|
363
|
+
|
364
|
+
# Prepare payload
|
365
|
+
payload = {
|
366
|
+
"webhook_id": webhook_id,
|
367
|
+
"event": event.to_dict(),
|
368
|
+
"delivery_id": delivery_id,
|
369
|
+
"timestamp": time.time(),
|
370
|
+
}
|
371
|
+
|
372
|
+
# Log delivery attempt
|
373
|
+
logger.info(
|
374
|
+
f"Webhook delivery attempt: {webhook_id} -> {url} (event: {event.type.value})"
|
375
|
+
)
|
376
|
+
|
377
|
+
self.delivery_stats["total_attempts"] += 1
|
378
|
+
|
379
|
+
try:
|
380
|
+
# Use HTTPRequestNode for delivery (it handles retries internally)
|
381
|
+
response = self.http_node.run(
|
382
|
+
url=url, method="POST", json_data=payload, headers=headers
|
383
|
+
)
|
384
|
+
|
385
|
+
status_code = response.get("status_code", 0)
|
386
|
+
|
387
|
+
if status_code < 400:
|
388
|
+
webhook["deliveries"] += 1
|
389
|
+
self.delivery_stats["successful_deliveries"] += 1
|
390
|
+
|
391
|
+
# Log successful delivery
|
392
|
+
logger.info(
|
393
|
+
f"Webhook delivery successful: {webhook_id} (status: {status_code})"
|
394
|
+
)
|
395
|
+
|
396
|
+
return {
|
397
|
+
"webhook_id": webhook_id,
|
398
|
+
"success": True,
|
399
|
+
"status_code": status_code,
|
400
|
+
"delivery_id": delivery_id,
|
401
|
+
}
|
402
|
+
else:
|
403
|
+
# Log HTTP error
|
404
|
+
logger.warning(
|
405
|
+
f"Webhook delivery HTTP error: {webhook_id} (status: {status_code})"
|
406
|
+
)
|
407
|
+
|
408
|
+
webhook["failures"] += 1
|
409
|
+
self.delivery_stats["failed_deliveries"] += 1
|
410
|
+
|
411
|
+
return {
|
412
|
+
"webhook_id": webhook_id,
|
413
|
+
"success": False,
|
414
|
+
"status_code": status_code,
|
415
|
+
"error": f"HTTP {status_code}",
|
416
|
+
}
|
417
|
+
|
418
|
+
except Exception as e:
|
419
|
+
# Log delivery failure
|
420
|
+
logger.error(f"Webhook delivery failed: {webhook_id} - {str(e)}")
|
421
|
+
|
422
|
+
webhook["failures"] += 1
|
423
|
+
self.delivery_stats["failed_deliveries"] += 1
|
424
|
+
|
425
|
+
return {
|
426
|
+
"webhook_id": webhook_id,
|
427
|
+
"success": False,
|
428
|
+
"error": str(e),
|
429
|
+
"delivery_id": delivery_id,
|
430
|
+
}
|
431
|
+
|
432
|
+
|
433
|
+
class RealtimeMiddleware:
|
434
|
+
"""
|
435
|
+
Real-time communication middleware supporting multiple transport layers.
|
436
|
+
|
437
|
+
Provides:
|
438
|
+
- WebSocket connections for bidirectional real-time communication
|
439
|
+
- Server-Sent Events (SSE) for unidirectional event streaming
|
440
|
+
- Webhook delivery for external integrations
|
441
|
+
- Sub-200ms latency optimization
|
442
|
+
- Event filtering and routing
|
443
|
+
"""
|
444
|
+
|
445
|
+
def __init__(
|
446
|
+
self,
|
447
|
+
agent_ui_middleware: AgentUIMiddleware,
|
448
|
+
enable_websockets: bool = True,
|
449
|
+
enable_sse: bool = True,
|
450
|
+
enable_webhooks: bool = True,
|
451
|
+
latency_target_ms: int = 200,
|
452
|
+
):
|
453
|
+
self.agent_ui = agent_ui_middleware
|
454
|
+
self.enable_websockets = enable_websockets
|
455
|
+
self.enable_sse = enable_sse
|
456
|
+
self.enable_webhooks = enable_webhooks
|
457
|
+
self.latency_target_ms = latency_target_ms
|
458
|
+
|
459
|
+
# Transport managers
|
460
|
+
self.connection_manager = ConnectionManager() if enable_websockets else None
|
461
|
+
self.sse_manager = SSEManager() if enable_sse else None
|
462
|
+
self.webhook_manager = WebhookManager() if enable_webhooks else None
|
463
|
+
|
464
|
+
# Performance tracking
|
465
|
+
self.start_time = time.time()
|
466
|
+
self.events_processed = 0
|
467
|
+
self.latency_samples = []
|
468
|
+
|
469
|
+
# Event subscription will be set up during initialize()
|
470
|
+
self._event_subscription_task = None
|
471
|
+
|
472
|
+
async def initialize(self):
|
473
|
+
"""Initialize the middleware and set up event subscriptions."""
|
474
|
+
# Set up event subscription in async context
|
475
|
+
self._event_subscription_task = asyncio.create_task(self._subscribe_to_events())
|
476
|
+
logger.info("RealtimeMiddleware initialized with event subscriptions")
|
477
|
+
|
478
|
+
def _setup_event_subscription(self):
|
479
|
+
"""Subscribe to events from the agent UI middleware."""
|
480
|
+
# Deprecated - use initialize() instead
|
481
|
+
logger.warning(
|
482
|
+
"_setup_event_subscription called directly - use initialize() instead"
|
483
|
+
)
|
484
|
+
|
485
|
+
async def _subscribe_to_events(self):
|
486
|
+
"""Subscribe to and process events from agent UI."""
|
487
|
+
|
488
|
+
async def event_handler(event: BaseEvent):
|
489
|
+
start_time = time.time()
|
490
|
+
|
491
|
+
try:
|
492
|
+
await self._process_event(event)
|
493
|
+
self.events_processed += 1
|
494
|
+
|
495
|
+
# Track latency
|
496
|
+
latency_ms = (time.time() - start_time) * 1000
|
497
|
+
self.latency_samples.append(latency_ms)
|
498
|
+
|
499
|
+
# Keep only recent samples
|
500
|
+
if len(self.latency_samples) > 1000:
|
501
|
+
self.latency_samples = self.latency_samples[-500:]
|
502
|
+
|
503
|
+
# Log if latency exceeds target
|
504
|
+
if latency_ms > self.latency_target_ms:
|
505
|
+
logger.warning(
|
506
|
+
f"Event processing latency {latency_ms:.1f}ms exceeds target {self.latency_target_ms}ms"
|
507
|
+
)
|
508
|
+
|
509
|
+
except Exception as e:
|
510
|
+
logger.error(f"Error processing event {event.id}: {e}")
|
511
|
+
|
512
|
+
await self.agent_ui.event_stream.subscribe("realtime_middleware", event_handler)
|
513
|
+
|
514
|
+
async def _process_event(self, event: BaseEvent):
|
515
|
+
"""Process and route event to appropriate transport layers."""
|
516
|
+
message = {"event": event.to_dict(), "timestamp": time.time()}
|
517
|
+
|
518
|
+
# Route to WebSocket connections
|
519
|
+
if self.enable_websockets and self.connection_manager:
|
520
|
+
await self.connection_manager.broadcast(message)
|
521
|
+
|
522
|
+
# Route to SSE streams (would need to implement message queuing)
|
523
|
+
if self.enable_sse and self.sse_manager:
|
524
|
+
# In a real implementation, this would queue messages for active streams
|
525
|
+
pass
|
526
|
+
|
527
|
+
# Route to webhooks
|
528
|
+
if self.enable_webhooks and self.webhook_manager:
|
529
|
+
await self.webhook_manager.deliver_event(event)
|
530
|
+
|
531
|
+
# WebSocket Methods
|
532
|
+
async def handle_websocket(
|
533
|
+
self,
|
534
|
+
websocket: WebSocket,
|
535
|
+
session_id: str = None,
|
536
|
+
user_id: str = None,
|
537
|
+
event_types: List[str] = None,
|
538
|
+
):
|
539
|
+
"""Handle WebSocket connection lifecycle."""
|
540
|
+
if not self.enable_websockets:
|
541
|
+
await websocket.close(code=1000)
|
542
|
+
return
|
543
|
+
|
544
|
+
connection_id = str(uuid.uuid4())
|
545
|
+
|
546
|
+
# Parse event filter
|
547
|
+
event_filter = EventFilter(
|
548
|
+
event_types=[EventType(t) for t in event_types] if event_types else None,
|
549
|
+
session_id=session_id,
|
550
|
+
user_id=user_id,
|
551
|
+
)
|
552
|
+
|
553
|
+
try:
|
554
|
+
await self.connection_manager.connect(
|
555
|
+
websocket, connection_id, session_id, user_id, event_filter
|
556
|
+
)
|
557
|
+
|
558
|
+
# Handle incoming messages
|
559
|
+
while True:
|
560
|
+
try:
|
561
|
+
data = await websocket.receive_text()
|
562
|
+
message = json.loads(data)
|
563
|
+
await self._handle_websocket_message(connection_id, message)
|
564
|
+
|
565
|
+
except WebSocketDisconnect:
|
566
|
+
break
|
567
|
+
except json.JSONDecodeError:
|
568
|
+
await websocket.send_text(
|
569
|
+
json.dumps({"error": "Invalid JSON format"})
|
570
|
+
)
|
571
|
+
except Exception as e:
|
572
|
+
logger.error(f"WebSocket message error: {e}")
|
573
|
+
await websocket.send_text(json.dumps({"error": str(e)}))
|
574
|
+
|
575
|
+
finally:
|
576
|
+
await self.connection_manager.disconnect(connection_id)
|
577
|
+
|
578
|
+
async def _handle_websocket_message(
|
579
|
+
self, connection_id: str, message: Dict[str, Any]
|
580
|
+
):
|
581
|
+
"""Handle incoming WebSocket messages."""
|
582
|
+
message_type = message.get("type")
|
583
|
+
|
584
|
+
if message_type == "ping":
|
585
|
+
# Respond to ping
|
586
|
+
connection = self.connection_manager.connections.get(connection_id)
|
587
|
+
if connection:
|
588
|
+
connection["last_ping"] = time.time()
|
589
|
+
await self.connection_manager.send_to_connection(
|
590
|
+
connection_id, {"type": "pong", "timestamp": time.time()}
|
591
|
+
)
|
592
|
+
|
593
|
+
elif message_type == "execute_workflow":
|
594
|
+
# Handle workflow execution request
|
595
|
+
session_id = message.get("session_id")
|
596
|
+
workflow_id = message.get("workflow_id")
|
597
|
+
inputs = message.get("inputs", {})
|
598
|
+
|
599
|
+
try:
|
600
|
+
execution_id = await self.agent_ui.execute_workflow(
|
601
|
+
session_id, workflow_id, inputs
|
602
|
+
)
|
603
|
+
await self.connection_manager.send_to_connection(
|
604
|
+
connection_id,
|
605
|
+
{"type": "execution_started", "execution_id": execution_id},
|
606
|
+
)
|
607
|
+
except Exception as e:
|
608
|
+
await self.connection_manager.send_to_connection(
|
609
|
+
connection_id, {"type": "error", "error": str(e)}
|
610
|
+
)
|
611
|
+
|
612
|
+
elif message_type == "subscribe_events":
|
613
|
+
# Update event filter for this connection
|
614
|
+
# Implementation would update the connection's event filter
|
615
|
+
pass
|
616
|
+
|
617
|
+
# SSE Methods
|
618
|
+
def create_sse_stream(
|
619
|
+
self,
|
620
|
+
request: Request,
|
621
|
+
session_id: str = None,
|
622
|
+
user_id: str = None,
|
623
|
+
event_types: List[str] = None,
|
624
|
+
) -> StreamingResponse:
|
625
|
+
"""Create Server-Sent Events stream."""
|
626
|
+
if not self.enable_sse:
|
627
|
+
return Response("SSE not enabled", status_code=501)
|
628
|
+
|
629
|
+
stream_id = str(uuid.uuid4())
|
630
|
+
|
631
|
+
# Parse event filter
|
632
|
+
event_filter = EventFilter(
|
633
|
+
event_types=[EventType(t) for t in event_types] if event_types else None,
|
634
|
+
session_id=session_id,
|
635
|
+
user_id=user_id,
|
636
|
+
)
|
637
|
+
|
638
|
+
generator = self.sse_manager.create_stream(
|
639
|
+
stream_id, session_id, user_id, event_filter
|
640
|
+
)
|
641
|
+
|
642
|
+
return StreamingResponse(
|
643
|
+
generator,
|
644
|
+
media_type="text/event-stream",
|
645
|
+
headers={
|
646
|
+
"Cache-Control": "no-cache",
|
647
|
+
"Connection": "keep-alive",
|
648
|
+
"X-Stream-ID": stream_id,
|
649
|
+
},
|
650
|
+
)
|
651
|
+
|
652
|
+
# Webhook Methods
|
653
|
+
def register_webhook(
|
654
|
+
self,
|
655
|
+
webhook_id: str,
|
656
|
+
url: str,
|
657
|
+
secret: str = None,
|
658
|
+
event_types: List[str] = None,
|
659
|
+
session_id: str = None,
|
660
|
+
headers: Dict[str, str] = None,
|
661
|
+
):
|
662
|
+
"""Register webhook endpoint."""
|
663
|
+
if not self.enable_webhooks:
|
664
|
+
raise ValueError("Webhooks not enabled")
|
665
|
+
|
666
|
+
event_filter = EventFilter(
|
667
|
+
event_types=[EventType(t) for t in event_types] if event_types else None,
|
668
|
+
session_id=session_id,
|
669
|
+
)
|
670
|
+
|
671
|
+
self.webhook_manager.register_webhook(
|
672
|
+
webhook_id, url, secret, event_filter, headers
|
673
|
+
)
|
674
|
+
|
675
|
+
def unregister_webhook(self, webhook_id: str):
|
676
|
+
"""Unregister webhook endpoint."""
|
677
|
+
if self.enable_webhooks and self.webhook_manager:
|
678
|
+
self.webhook_manager.unregister_webhook(webhook_id)
|
679
|
+
|
680
|
+
# Statistics and Monitoring
|
681
|
+
def get_stats(self) -> Dict[str, Any]:
|
682
|
+
"""Get real-time middleware statistics."""
|
683
|
+
stats = {
|
684
|
+
"uptime_seconds": time.time() - self.start_time,
|
685
|
+
"events_processed": self.events_processed,
|
686
|
+
"latency_target_ms": self.latency_target_ms,
|
687
|
+
"enabled_transports": {
|
688
|
+
"websockets": self.enable_websockets,
|
689
|
+
"sse": self.enable_sse,
|
690
|
+
"webhooks": self.enable_webhooks,
|
691
|
+
},
|
692
|
+
}
|
693
|
+
|
694
|
+
# Add latency statistics
|
695
|
+
if self.latency_samples:
|
696
|
+
stats["latency_stats"] = {
|
697
|
+
"avg_ms": sum(self.latency_samples) / len(self.latency_samples),
|
698
|
+
"max_ms": max(self.latency_samples),
|
699
|
+
"min_ms": min(self.latency_samples),
|
700
|
+
"samples": len(self.latency_samples),
|
701
|
+
}
|
702
|
+
|
703
|
+
# Add transport-specific stats
|
704
|
+
if self.connection_manager:
|
705
|
+
stats["websocket_stats"] = self.connection_manager.get_stats()
|
706
|
+
|
707
|
+
if self.webhook_manager:
|
708
|
+
stats["webhook_stats"] = self.webhook_manager.delivery_stats
|
709
|
+
|
710
|
+
return stats
|