kailash 0.6.6__py3-none-any.whl → 0.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +35 -5
- kailash/access_control.py +64 -46
- kailash/adapters/__init__.py +5 -0
- kailash/adapters/mcp_platform_adapter.py +273 -0
- kailash/api/workflow_api.py +34 -3
- kailash/channels/__init__.py +21 -0
- kailash/channels/api_channel.py +409 -0
- kailash/channels/base.py +271 -0
- kailash/channels/cli_channel.py +661 -0
- kailash/channels/event_router.py +496 -0
- kailash/channels/mcp_channel.py +648 -0
- kailash/channels/session.py +423 -0
- kailash/mcp_server/discovery.py +57 -18
- kailash/middleware/communication/api_gateway.py +23 -3
- kailash/middleware/communication/realtime.py +83 -0
- kailash/middleware/core/agent_ui.py +1 -1
- kailash/middleware/gateway/storage_backends.py +393 -0
- kailash/middleware/mcp/enhanced_server.py +22 -16
- kailash/nexus/__init__.py +21 -0
- kailash/nexus/cli/__init__.py +5 -0
- kailash/nexus/cli/__main__.py +6 -0
- kailash/nexus/cli/main.py +176 -0
- kailash/nexus/factory.py +413 -0
- kailash/nexus/gateway.py +545 -0
- kailash/nodes/__init__.py +8 -5
- kailash/nodes/ai/iterative_llm_agent.py +988 -17
- kailash/nodes/ai/llm_agent.py +29 -9
- kailash/nodes/api/__init__.py +2 -2
- kailash/nodes/api/monitoring.py +1 -1
- kailash/nodes/base.py +29 -5
- kailash/nodes/base_async.py +54 -14
- kailash/nodes/code/async_python.py +1 -1
- kailash/nodes/code/python.py +50 -6
- kailash/nodes/data/async_sql.py +90 -0
- kailash/nodes/data/bulk_operations.py +939 -0
- kailash/nodes/data/query_builder.py +373 -0
- kailash/nodes/data/query_cache.py +512 -0
- kailash/nodes/monitoring/__init__.py +10 -0
- kailash/nodes/monitoring/deadlock_detector.py +964 -0
- kailash/nodes/monitoring/performance_anomaly.py +1078 -0
- kailash/nodes/monitoring/race_condition_detector.py +1151 -0
- kailash/nodes/monitoring/transaction_metrics.py +790 -0
- kailash/nodes/monitoring/transaction_monitor.py +931 -0
- kailash/nodes/security/behavior_analysis.py +414 -0
- kailash/nodes/system/__init__.py +17 -0
- kailash/nodes/system/command_parser.py +820 -0
- kailash/nodes/transaction/__init__.py +48 -0
- kailash/nodes/transaction/distributed_transaction_manager.py +983 -0
- kailash/nodes/transaction/saga_coordinator.py +652 -0
- kailash/nodes/transaction/saga_state_storage.py +411 -0
- kailash/nodes/transaction/saga_step.py +467 -0
- kailash/nodes/transaction/transaction_context.py +756 -0
- kailash/nodes/transaction/two_phase_commit.py +978 -0
- kailash/nodes/transform/processors.py +17 -1
- kailash/nodes/validation/__init__.py +21 -0
- kailash/nodes/validation/test_executor.py +532 -0
- kailash/nodes/validation/validation_nodes.py +447 -0
- kailash/resources/factory.py +1 -1
- kailash/runtime/access_controlled.py +9 -7
- kailash/runtime/async_local.py +84 -21
- kailash/runtime/local.py +21 -2
- kailash/runtime/parameter_injector.py +187 -31
- kailash/runtime/runner.py +6 -4
- kailash/runtime/testing.py +1 -1
- kailash/security.py +22 -3
- kailash/servers/__init__.py +32 -0
- kailash/servers/durable_workflow_server.py +430 -0
- kailash/servers/enterprise_workflow_server.py +522 -0
- kailash/servers/gateway.py +183 -0
- kailash/servers/workflow_server.py +293 -0
- kailash/utils/data_validation.py +192 -0
- kailash/workflow/builder.py +382 -15
- kailash/workflow/cyclic_runner.py +102 -10
- kailash/workflow/validation.py +144 -8
- kailash/workflow/visualization.py +99 -27
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/METADATA +3 -2
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/RECORD +81 -40
- kailash/workflow/builder_improvements.py +0 -207
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/WHEEL +0 -0
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/entry_points.txt +0 -0
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,423 @@
|
|
1
|
+
"""Session management for cross-channel communication."""
|
2
|
+
|
3
|
+
import asyncio
|
4
|
+
import logging
|
5
|
+
import time
|
6
|
+
import uuid
|
7
|
+
from dataclasses import dataclass, field
|
8
|
+
from enum import Enum
|
9
|
+
from typing import Any, Dict, List, Optional, Set
|
10
|
+
|
11
|
+
logger = logging.getLogger(__name__)
|
12
|
+
|
13
|
+
|
14
|
+
class SessionStatus(Enum):
|
15
|
+
"""Session status states."""
|
16
|
+
|
17
|
+
ACTIVE = "active"
|
18
|
+
IDLE = "idle"
|
19
|
+
EXPIRED = "expired"
|
20
|
+
TERMINATED = "terminated"
|
21
|
+
|
22
|
+
|
23
|
+
@dataclass
|
24
|
+
class CrossChannelSession:
|
25
|
+
"""Represents a session that can span multiple channels."""
|
26
|
+
|
27
|
+
session_id: str
|
28
|
+
user_id: Optional[str] = None
|
29
|
+
created_at: float = field(default_factory=time.time)
|
30
|
+
last_activity: float = field(default_factory=time.time)
|
31
|
+
expires_at: Optional[float] = None
|
32
|
+
status: SessionStatus = SessionStatus.ACTIVE
|
33
|
+
|
34
|
+
# Channel tracking
|
35
|
+
active_channels: Set[str] = field(default_factory=set)
|
36
|
+
channel_contexts: Dict[str, Dict[str, Any]] = field(default_factory=dict)
|
37
|
+
|
38
|
+
# Session data
|
39
|
+
shared_data: Dict[str, Any] = field(default_factory=dict)
|
40
|
+
workflow_states: Dict[str, Any] = field(default_factory=dict)
|
41
|
+
|
42
|
+
# Event tracking
|
43
|
+
event_history: List[Dict[str, Any]] = field(default_factory=list)
|
44
|
+
max_history_size: int = 1000
|
45
|
+
|
46
|
+
def touch(self) -> None:
|
47
|
+
"""Update last activity timestamp."""
|
48
|
+
self.last_activity = time.time()
|
49
|
+
if self.status == SessionStatus.IDLE:
|
50
|
+
self.status = SessionStatus.ACTIVE
|
51
|
+
|
52
|
+
def add_channel(
|
53
|
+
self, channel_name: str, initial_context: Optional[Dict[str, Any]] = None
|
54
|
+
) -> None:
|
55
|
+
"""Add a channel to this session.
|
56
|
+
|
57
|
+
Args:
|
58
|
+
channel_name: Name of the channel to add
|
59
|
+
initial_context: Initial context data for the channel
|
60
|
+
"""
|
61
|
+
self.active_channels.add(channel_name)
|
62
|
+
if initial_context:
|
63
|
+
self.channel_contexts[channel_name] = initial_context.copy()
|
64
|
+
else:
|
65
|
+
self.channel_contexts[channel_name] = {}
|
66
|
+
self.touch()
|
67
|
+
logger.debug(f"Added channel {channel_name} to session {self.session_id}")
|
68
|
+
|
69
|
+
def remove_channel(self, channel_name: str) -> None:
|
70
|
+
"""Remove a channel from this session.
|
71
|
+
|
72
|
+
Args:
|
73
|
+
channel_name: Name of the channel to remove
|
74
|
+
"""
|
75
|
+
self.active_channels.discard(channel_name)
|
76
|
+
self.channel_contexts.pop(channel_name, None)
|
77
|
+
logger.debug(f"Removed channel {channel_name} from session {self.session_id}")
|
78
|
+
|
79
|
+
def update_channel_context(
|
80
|
+
self, channel_name: str, context_updates: Dict[str, Any]
|
81
|
+
) -> None:
|
82
|
+
"""Update context data for a specific channel.
|
83
|
+
|
84
|
+
Args:
|
85
|
+
channel_name: Name of the channel
|
86
|
+
context_updates: Context updates to apply
|
87
|
+
"""
|
88
|
+
if channel_name not in self.channel_contexts:
|
89
|
+
self.channel_contexts[channel_name] = {}
|
90
|
+
|
91
|
+
self.channel_contexts[channel_name].update(context_updates)
|
92
|
+
self.touch()
|
93
|
+
|
94
|
+
def get_channel_context(self, channel_name: str) -> Dict[str, Any]:
|
95
|
+
"""Get context data for a specific channel.
|
96
|
+
|
97
|
+
Args:
|
98
|
+
channel_name: Name of the channel
|
99
|
+
|
100
|
+
Returns:
|
101
|
+
Channel context data
|
102
|
+
"""
|
103
|
+
return self.channel_contexts.get(channel_name, {}).copy()
|
104
|
+
|
105
|
+
def set_shared_data(self, key: str, value: Any) -> None:
|
106
|
+
"""Set shared data accessible across all channels.
|
107
|
+
|
108
|
+
Args:
|
109
|
+
key: Data key
|
110
|
+
value: Data value
|
111
|
+
"""
|
112
|
+
self.shared_data[key] = value
|
113
|
+
self.touch()
|
114
|
+
|
115
|
+
def get_shared_data(self, key: str, default: Any = None) -> Any:
|
116
|
+
"""Get shared data.
|
117
|
+
|
118
|
+
Args:
|
119
|
+
key: Data key
|
120
|
+
default: Default value if key not found
|
121
|
+
|
122
|
+
Returns:
|
123
|
+
Shared data value
|
124
|
+
"""
|
125
|
+
return self.shared_data.get(key, default)
|
126
|
+
|
127
|
+
def add_event(self, event: Dict[str, Any]) -> None:
|
128
|
+
"""Add an event to the session history.
|
129
|
+
|
130
|
+
Args:
|
131
|
+
event: Event data to add
|
132
|
+
"""
|
133
|
+
event_record = {
|
134
|
+
"timestamp": time.time(),
|
135
|
+
"session_id": self.session_id,
|
136
|
+
**event,
|
137
|
+
}
|
138
|
+
|
139
|
+
self.event_history.append(event_record)
|
140
|
+
|
141
|
+
# Maintain max history size
|
142
|
+
if len(self.event_history) > self.max_history_size:
|
143
|
+
self.event_history = self.event_history[-self.max_history_size :]
|
144
|
+
|
145
|
+
self.touch()
|
146
|
+
|
147
|
+
def is_expired(self, timeout: int = 3600) -> bool:
|
148
|
+
"""Check if the session has expired.
|
149
|
+
|
150
|
+
Args:
|
151
|
+
timeout: Session timeout in seconds
|
152
|
+
|
153
|
+
Returns:
|
154
|
+
True if session has expired
|
155
|
+
"""
|
156
|
+
if self.expires_at:
|
157
|
+
return time.time() > self.expires_at
|
158
|
+
|
159
|
+
return (time.time() - self.last_activity) > timeout
|
160
|
+
|
161
|
+
def extend_expiry(self, additional_seconds: int = 3600) -> None:
|
162
|
+
"""Extend session expiry time.
|
163
|
+
|
164
|
+
Args:
|
165
|
+
additional_seconds: Additional seconds to extend
|
166
|
+
"""
|
167
|
+
if self.expires_at:
|
168
|
+
self.expires_at += additional_seconds
|
169
|
+
else:
|
170
|
+
self.expires_at = time.time() + additional_seconds
|
171
|
+
self.touch()
|
172
|
+
|
173
|
+
def to_dict(self) -> Dict[str, Any]:
|
174
|
+
"""Convert session to dictionary for serialization."""
|
175
|
+
return {
|
176
|
+
"session_id": self.session_id,
|
177
|
+
"user_id": self.user_id,
|
178
|
+
"created_at": self.created_at,
|
179
|
+
"last_activity": self.last_activity,
|
180
|
+
"expires_at": self.expires_at,
|
181
|
+
"status": self.status.value,
|
182
|
+
"active_channels": list(self.active_channels),
|
183
|
+
"channel_contexts": self.channel_contexts,
|
184
|
+
"shared_data": self.shared_data,
|
185
|
+
"workflow_states": self.workflow_states,
|
186
|
+
"event_count": len(self.event_history),
|
187
|
+
}
|
188
|
+
|
189
|
+
|
190
|
+
class SessionManager:
|
191
|
+
"""Manages cross-channel sessions for the Nexus framework."""
|
192
|
+
|
193
|
+
def __init__(self, default_timeout: int = 3600, cleanup_interval: int = 300):
|
194
|
+
"""Initialize session manager.
|
195
|
+
|
196
|
+
Args:
|
197
|
+
default_timeout: Default session timeout in seconds
|
198
|
+
cleanup_interval: Interval for cleanup task in seconds
|
199
|
+
"""
|
200
|
+
self.default_timeout = default_timeout
|
201
|
+
self.cleanup_interval = cleanup_interval
|
202
|
+
self._sessions: Dict[str, CrossChannelSession] = {}
|
203
|
+
self._cleanup_task: Optional[asyncio.Task] = None
|
204
|
+
self._running = False
|
205
|
+
|
206
|
+
async def start(self) -> None:
|
207
|
+
"""Start the session manager."""
|
208
|
+
if self._running:
|
209
|
+
return
|
210
|
+
|
211
|
+
self._running = True
|
212
|
+
self._cleanup_task = asyncio.create_task(self._cleanup_loop())
|
213
|
+
logger.info("Session manager started")
|
214
|
+
|
215
|
+
async def stop(self) -> None:
|
216
|
+
"""Stop the session manager."""
|
217
|
+
self._running = False
|
218
|
+
|
219
|
+
if self._cleanup_task:
|
220
|
+
self._cleanup_task.cancel()
|
221
|
+
try:
|
222
|
+
await self._cleanup_task
|
223
|
+
except asyncio.CancelledError:
|
224
|
+
pass
|
225
|
+
|
226
|
+
logger.info("Session manager stopped")
|
227
|
+
|
228
|
+
def create_session(
|
229
|
+
self,
|
230
|
+
user_id: Optional[str] = None,
|
231
|
+
session_id: Optional[str] = None,
|
232
|
+
timeout: Optional[int] = None,
|
233
|
+
) -> CrossChannelSession:
|
234
|
+
"""Create a new session.
|
235
|
+
|
236
|
+
Args:
|
237
|
+
user_id: Optional user ID for the session
|
238
|
+
session_id: Optional custom session ID
|
239
|
+
timeout: Optional custom timeout
|
240
|
+
|
241
|
+
Returns:
|
242
|
+
New CrossChannelSession instance
|
243
|
+
"""
|
244
|
+
if session_id is None:
|
245
|
+
session_id = str(uuid.uuid4())
|
246
|
+
|
247
|
+
if session_id in self._sessions:
|
248
|
+
raise ValueError(f"Session {session_id} already exists")
|
249
|
+
|
250
|
+
session = CrossChannelSession(session_id=session_id, user_id=user_id)
|
251
|
+
|
252
|
+
if timeout:
|
253
|
+
session.extends_at = time.time() + timeout
|
254
|
+
|
255
|
+
self._sessions[session_id] = session
|
256
|
+
logger.info(f"Created session {session_id} for user {user_id}")
|
257
|
+
|
258
|
+
return session
|
259
|
+
|
260
|
+
def get_session(self, session_id: str) -> Optional[CrossChannelSession]:
|
261
|
+
"""Get an existing session.
|
262
|
+
|
263
|
+
Args:
|
264
|
+
session_id: Session ID to retrieve
|
265
|
+
|
266
|
+
Returns:
|
267
|
+
CrossChannelSession if found, None otherwise
|
268
|
+
"""
|
269
|
+
session = self._sessions.get(session_id)
|
270
|
+
|
271
|
+
if session and session.is_expired(self.default_timeout):
|
272
|
+
self.terminate_session(session_id)
|
273
|
+
return None
|
274
|
+
|
275
|
+
return session
|
276
|
+
|
277
|
+
def get_or_create_session(
|
278
|
+
self, session_id: str, user_id: Optional[str] = None
|
279
|
+
) -> CrossChannelSession:
|
280
|
+
"""Get existing session or create new one.
|
281
|
+
|
282
|
+
Args:
|
283
|
+
session_id: Session ID
|
284
|
+
user_id: Optional user ID for new sessions
|
285
|
+
|
286
|
+
Returns:
|
287
|
+
CrossChannelSession instance
|
288
|
+
"""
|
289
|
+
session = self.get_session(session_id)
|
290
|
+
if session:
|
291
|
+
return session
|
292
|
+
|
293
|
+
return self.create_session(user_id=user_id, session_id=session_id)
|
294
|
+
|
295
|
+
def terminate_session(self, session_id: str) -> bool:
|
296
|
+
"""Terminate a session.
|
297
|
+
|
298
|
+
Args:
|
299
|
+
session_id: Session ID to terminate
|
300
|
+
|
301
|
+
Returns:
|
302
|
+
True if session was terminated, False if not found
|
303
|
+
"""
|
304
|
+
session = self._sessions.pop(session_id, None)
|
305
|
+
if session:
|
306
|
+
session.status = SessionStatus.TERMINATED
|
307
|
+
logger.info(f"Terminated session {session_id}")
|
308
|
+
return True
|
309
|
+
return False
|
310
|
+
|
311
|
+
def list_sessions(
|
312
|
+
self, user_id: Optional[str] = None, status: Optional[SessionStatus] = None
|
313
|
+
) -> List[CrossChannelSession]:
|
314
|
+
"""List sessions with optional filtering.
|
315
|
+
|
316
|
+
Args:
|
317
|
+
user_id: Filter by user ID
|
318
|
+
status: Filter by status
|
319
|
+
|
320
|
+
Returns:
|
321
|
+
List of matching sessions
|
322
|
+
"""
|
323
|
+
sessions = []
|
324
|
+
|
325
|
+
for session in self._sessions.values():
|
326
|
+
if user_id and session.user_id != user_id:
|
327
|
+
continue
|
328
|
+
if status and session.status != status:
|
329
|
+
continue
|
330
|
+
sessions.append(session)
|
331
|
+
|
332
|
+
return sessions
|
333
|
+
|
334
|
+
def get_channel_sessions(self, channel_name: str) -> List[CrossChannelSession]:
|
335
|
+
"""Get all sessions active on a specific channel.
|
336
|
+
|
337
|
+
Args:
|
338
|
+
channel_name: Name of the channel
|
339
|
+
|
340
|
+
Returns:
|
341
|
+
List of sessions active on the channel
|
342
|
+
"""
|
343
|
+
return [
|
344
|
+
session
|
345
|
+
for session in self._sessions.values()
|
346
|
+
if channel_name in session.active_channels
|
347
|
+
]
|
348
|
+
|
349
|
+
async def broadcast_to_channel(
|
350
|
+
self, channel_name: str, event: Dict[str, Any]
|
351
|
+
) -> int:
|
352
|
+
"""Broadcast an event to all sessions on a channel.
|
353
|
+
|
354
|
+
Args:
|
355
|
+
channel_name: Target channel name
|
356
|
+
event: Event data to broadcast
|
357
|
+
|
358
|
+
Returns:
|
359
|
+
Number of sessions that received the event
|
360
|
+
"""
|
361
|
+
sessions = self.get_channel_sessions(channel_name)
|
362
|
+
|
363
|
+
for session in sessions:
|
364
|
+
session.add_event(
|
365
|
+
{"type": "broadcast", "channel": channel_name, "data": event}
|
366
|
+
)
|
367
|
+
|
368
|
+
logger.debug(
|
369
|
+
f"Broadcasted event to {len(sessions)} sessions on channel {channel_name}"
|
370
|
+
)
|
371
|
+
return len(sessions)
|
372
|
+
|
373
|
+
async def _cleanup_loop(self) -> None:
|
374
|
+
"""Background task to clean up expired sessions."""
|
375
|
+
while self._running:
|
376
|
+
try:
|
377
|
+
await asyncio.sleep(self.cleanup_interval)
|
378
|
+
await self._cleanup_expired_sessions()
|
379
|
+
except asyncio.CancelledError:
|
380
|
+
break
|
381
|
+
except Exception as e:
|
382
|
+
logger.error(f"Error in session cleanup: {e}")
|
383
|
+
|
384
|
+
async def _cleanup_expired_sessions(self) -> None:
|
385
|
+
"""Clean up expired sessions."""
|
386
|
+
expired_sessions = []
|
387
|
+
|
388
|
+
for session_id, session in self._sessions.items():
|
389
|
+
if session.is_expired(self.default_timeout):
|
390
|
+
expired_sessions.append(session_id)
|
391
|
+
|
392
|
+
for session_id in expired_sessions:
|
393
|
+
self.terminate_session(session_id)
|
394
|
+
|
395
|
+
if expired_sessions:
|
396
|
+
logger.info(f"Cleaned up {len(expired_sessions)} expired sessions")
|
397
|
+
|
398
|
+
def get_stats(self) -> Dict[str, Any]:
|
399
|
+
"""Get session manager statistics.
|
400
|
+
|
401
|
+
Returns:
|
402
|
+
Dictionary with session statistics
|
403
|
+
"""
|
404
|
+
active_sessions = len(
|
405
|
+
[s for s in self._sessions.values() if s.status == SessionStatus.ACTIVE]
|
406
|
+
)
|
407
|
+
idle_sessions = len(
|
408
|
+
[s for s in self._sessions.values() if s.status == SessionStatus.IDLE]
|
409
|
+
)
|
410
|
+
|
411
|
+
channel_usage = {}
|
412
|
+
for session in self._sessions.values():
|
413
|
+
for channel in session.active_channels:
|
414
|
+
channel_usage[channel] = channel_usage.get(channel, 0) + 1
|
415
|
+
|
416
|
+
return {
|
417
|
+
"total_sessions": len(self._sessions),
|
418
|
+
"active_sessions": active_sessions,
|
419
|
+
"idle_sessions": idle_sessions,
|
420
|
+
"channel_usage": channel_usage,
|
421
|
+
"default_timeout": self.default_timeout,
|
422
|
+
"cleanup_interval": self.cleanup_interval,
|
423
|
+
}
|
kailash/mcp_server/discovery.py
CHANGED
@@ -486,7 +486,7 @@ class FileBasedDiscovery(DiscoveryBackend):
|
|
486
486
|
raise
|
487
487
|
|
488
488
|
|
489
|
-
class NetworkDiscovery:
|
489
|
+
class NetworkDiscovery(asyncio.DatagramProtocol):
|
490
490
|
"""Network-based discovery using UDP broadcast/multicast."""
|
491
491
|
|
492
492
|
DISCOVERY_PORT = 8765
|
@@ -535,6 +535,40 @@ class NetworkDiscovery:
|
|
535
535
|
self._transport = None
|
536
536
|
self._protocol = None
|
537
537
|
|
538
|
+
# AsyncIO DatagramProtocol methods
|
539
|
+
def connection_made(self, transport):
|
540
|
+
"""Called when a connection is made."""
|
541
|
+
self._transport = transport
|
542
|
+
logger.info(f"Network discovery protocol connected on port {self.port}")
|
543
|
+
|
544
|
+
def datagram_received(self, data, addr):
|
545
|
+
"""Called when a datagram is received."""
|
546
|
+
try:
|
547
|
+
message = json.loads(data.decode())
|
548
|
+
# Try to get current event loop
|
549
|
+
try:
|
550
|
+
asyncio.get_running_loop()
|
551
|
+
asyncio.create_task(self._handle_discovery_message(message, addr))
|
552
|
+
except RuntimeError:
|
553
|
+
# No event loop, run synchronously
|
554
|
+
asyncio.run(self._handle_discovery_message(message, addr))
|
555
|
+
except json.JSONDecodeError:
|
556
|
+
logger.warning(f"Invalid JSON received from {addr}")
|
557
|
+
except Exception as e:
|
558
|
+
logger.error(f"Error handling datagram from {addr}: {e}")
|
559
|
+
|
560
|
+
def error_received(self, exc):
|
561
|
+
"""Called when an error is received."""
|
562
|
+
logger.error(f"Network discovery protocol error: {exc}")
|
563
|
+
|
564
|
+
def connection_lost(self, exc):
|
565
|
+
"""Called when the connection is lost."""
|
566
|
+
if exc:
|
567
|
+
logger.error(f"Network discovery connection lost: {exc}")
|
568
|
+
else:
|
569
|
+
logger.info("Network discovery connection closed")
|
570
|
+
self.running = False
|
571
|
+
|
538
572
|
async def start_discovery_listener(self):
|
539
573
|
"""Start listening for server announcements."""
|
540
574
|
await self.start()
|
@@ -590,6 +624,27 @@ class NetworkDiscovery:
|
|
590
624
|
except (json.JSONDecodeError, KeyError) as e:
|
591
625
|
logger.debug(f"Invalid announcement from {addr[0]}: {e}")
|
592
626
|
|
627
|
+
async def _is_port_open(self, host: str, port: int, timeout: float = 1.0) -> bool:
|
628
|
+
"""Check if a port is open on a host.
|
629
|
+
|
630
|
+
Args:
|
631
|
+
host: Host to check
|
632
|
+
port: Port to check
|
633
|
+
timeout: Connection timeout
|
634
|
+
|
635
|
+
Returns:
|
636
|
+
True if port is open, False otherwise
|
637
|
+
"""
|
638
|
+
try:
|
639
|
+
# Create socket connection with timeout
|
640
|
+
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
641
|
+
sock.settimeout(timeout)
|
642
|
+
result = sock.connect_ex((host, port))
|
643
|
+
sock.close()
|
644
|
+
return result == 0
|
645
|
+
except (OSError, socket.error, socket.timeout):
|
646
|
+
return False
|
647
|
+
|
593
648
|
async def scan_network(
|
594
649
|
self, network: str = "192.168.1.0/24", timeout: float = 5.0
|
595
650
|
) -> List[ServerInfo]:
|
@@ -756,22 +811,6 @@ class NetworkDiscovery:
|
|
756
811
|
else:
|
757
812
|
logger.debug(f"Unknown message type: {msg_type}")
|
758
813
|
|
759
|
-
def datagram_received(self, data: bytes, addr: tuple):
|
760
|
-
"""Handle received datagram (part of asyncio protocol)."""
|
761
|
-
try:
|
762
|
-
message = json.loads(data.decode())
|
763
|
-
# Try to get current event loop
|
764
|
-
try:
|
765
|
-
loop = asyncio.get_running_loop()
|
766
|
-
asyncio.create_task(self._handle_discovery_message(message, addr))
|
767
|
-
except RuntimeError:
|
768
|
-
# No event loop, run synchronously
|
769
|
-
asyncio.run(self._handle_discovery_message(message, addr))
|
770
|
-
except json.JSONDecodeError:
|
771
|
-
logger.warning(f"Invalid JSON received from {addr}")
|
772
|
-
except Exception as e:
|
773
|
-
logger.error(f"Error handling datagram from {addr}: {e}")
|
774
|
-
|
775
814
|
|
776
815
|
class ServiceRegistry:
|
777
816
|
"""Main service registry coordinating multiple discovery backends."""
|
@@ -967,7 +1006,7 @@ class ServiceRegistry:
|
|
967
1006
|
async def stop_health_monitoring(self):
|
968
1007
|
"""Stop health monitoring (async version)."""
|
969
1008
|
if self.health_checker:
|
970
|
-
self.health_checker.stop()
|
1009
|
+
await self.health_checker.stop()
|
971
1010
|
|
972
1011
|
async def get_best_server_for_capability(
|
973
1012
|
self, capability: str
|
@@ -331,9 +331,23 @@ class APIGateway:
|
|
331
331
|
def _setup_session_routes(self):
|
332
332
|
"""Setup session management routes."""
|
333
333
|
|
334
|
+
# Create auth dependency
|
335
|
+
async def get_optional_current_user():
|
336
|
+
"""Optional auth dependency - returns None if auth is disabled."""
|
337
|
+
if self.enable_auth and self.auth_manager:
|
338
|
+
# Use auth manager's dependency if available
|
339
|
+
try:
|
340
|
+
# This would normally use the auth manager's get_current_user_dependency
|
341
|
+
# For now, return None to avoid complex auth setup
|
342
|
+
return None
|
343
|
+
except:
|
344
|
+
return None
|
345
|
+
return None
|
346
|
+
|
334
347
|
@self.app.post("/api/sessions", response_model=SessionResponse)
|
335
348
|
async def create_session(
|
336
|
-
request: SessionCreateRequest,
|
349
|
+
request: SessionCreateRequest,
|
350
|
+
current_user: Dict[str, Any] = Depends(get_optional_current_user),
|
337
351
|
):
|
338
352
|
"""Create a new session for a frontend client."""
|
339
353
|
try:
|
@@ -583,7 +597,13 @@ class APIGateway:
|
|
583
597
|
"""Get schemas for available node types."""
|
584
598
|
try:
|
585
599
|
# Get all registered nodes
|
586
|
-
|
600
|
+
# NodeRegistry doesn't have get_all_nodes, need to use _nodes directly
|
601
|
+
available_nodes = {}
|
602
|
+
if hasattr(self.node_registry, "_nodes"):
|
603
|
+
available_nodes = self.node_registry._nodes.copy()
|
604
|
+
else:
|
605
|
+
# Fallback - return empty dict
|
606
|
+
available_nodes = {}
|
587
607
|
|
588
608
|
# Filter by requested types if specified
|
589
609
|
if request.node_types:
|
@@ -611,7 +631,7 @@ class APIGateway:
|
|
611
631
|
@self.app.get("/api/schemas/nodes/{node_type}")
|
612
632
|
async def get_node_schema(node_type: str):
|
613
633
|
"""Get schema for a specific node type."""
|
614
|
-
node_class = self.node_registry.
|
634
|
+
node_class = self.node_registry.get(node_type)
|
615
635
|
if not node_class:
|
616
636
|
raise HTTPException(status_code=404, detail="Node type not found")
|
617
637
|
|
@@ -177,6 +177,89 @@ class ConnectionManager:
|
|
177
177
|
"active_users": len(self.user_connections),
|
178
178
|
"total_messages_sent": total_messages,
|
179
179
|
}
|
180
|
+
|
181
|
+
def filter_events(self, events: List[BaseEvent], event_filter: EventFilter = None) -> List[BaseEvent]:
|
182
|
+
"""Filter events based on event filter criteria."""
|
183
|
+
if not event_filter:
|
184
|
+
return events
|
185
|
+
|
186
|
+
filtered = []
|
187
|
+
for event in events:
|
188
|
+
# Apply session filter
|
189
|
+
if event_filter.session_id and hasattr(event, 'session_id'):
|
190
|
+
if event.session_id != event_filter.session_id:
|
191
|
+
continue
|
192
|
+
|
193
|
+
# Apply user filter
|
194
|
+
if event_filter.user_id and hasattr(event, 'user_id'):
|
195
|
+
if event.user_id != event_filter.user_id:
|
196
|
+
continue
|
197
|
+
|
198
|
+
# Apply event type filter
|
199
|
+
if event_filter.event_types and event.event_type not in event_filter.event_types:
|
200
|
+
continue
|
201
|
+
|
202
|
+
filtered.append(event)
|
203
|
+
|
204
|
+
return filtered
|
205
|
+
|
206
|
+
def set_event_filter(self, connection_id: str, event_filter: EventFilter):
|
207
|
+
"""Set event filter for a specific connection."""
|
208
|
+
if connection_id in self.connections:
|
209
|
+
self.connections[connection_id]["event_filter"] = event_filter
|
210
|
+
|
211
|
+
def get_event_filter(self, connection_id: str) -> Optional[EventFilter]:
|
212
|
+
"""Get event filter for a specific connection."""
|
213
|
+
if connection_id in self.connections:
|
214
|
+
return self.connections[connection_id].get("event_filter")
|
215
|
+
return None
|
216
|
+
|
217
|
+
# Alias methods for compatibility
|
218
|
+
def event_filter(self, events: List[BaseEvent], filter_criteria: EventFilter = None) -> List[BaseEvent]:
|
219
|
+
"""Alias for filter_events method."""
|
220
|
+
return self.filter_events(events, filter_criteria)
|
221
|
+
|
222
|
+
async def on_event(self, event: BaseEvent):
|
223
|
+
"""Handle incoming event - route to appropriate connections."""
|
224
|
+
await self.handle_event(event)
|
225
|
+
|
226
|
+
async def handle_event(self, event: BaseEvent):
|
227
|
+
"""Handle and route event to matching connections."""
|
228
|
+
await self.process_event(event)
|
229
|
+
|
230
|
+
async def process_event(self, event: BaseEvent):
|
231
|
+
"""Process event and broadcast to matching connections."""
|
232
|
+
message = {
|
233
|
+
"type": "event",
|
234
|
+
"event_type": event.event_type.value if hasattr(event.event_type, 'value') else str(event.event_type),
|
235
|
+
"data": event.data,
|
236
|
+
"timestamp": event.timestamp.isoformat() if hasattr(event, 'timestamp') else datetime.now(timezone.utc).isoformat(),
|
237
|
+
"session_id": getattr(event, 'session_id', None),
|
238
|
+
"user_id": getattr(event, 'user_id', None),
|
239
|
+
}
|
240
|
+
|
241
|
+
# Broadcast to all matching connections
|
242
|
+
for connection_id, connection in self.connections.items():
|
243
|
+
event_filter = connection.get("event_filter")
|
244
|
+
|
245
|
+
# Check if this connection should receive this event
|
246
|
+
should_send = True
|
247
|
+
if event_filter:
|
248
|
+
# Apply session filter
|
249
|
+
if event_filter.session_id and connection["session_id"] != event_filter.session_id:
|
250
|
+
should_send = False
|
251
|
+
|
252
|
+
# Apply user filter
|
253
|
+
if event_filter.user_id and connection["user_id"] != event_filter.user_id:
|
254
|
+
should_send = False
|
255
|
+
|
256
|
+
# Apply event type filter
|
257
|
+
if hasattr(event_filter, 'event_types') and event_filter.event_types:
|
258
|
+
if event.event_type not in event_filter.event_types:
|
259
|
+
should_send = False
|
260
|
+
|
261
|
+
if should_send:
|
262
|
+
await self.send_to_connection(connection_id, message)
|
180
263
|
|
181
264
|
|
182
265
|
class SSEManager:
|
@@ -944,7 +944,7 @@ class AgentUIMiddleware:
|
|
944
944
|
async def get_available_nodes(self) -> List[Dict[str, Any]]:
|
945
945
|
"""Get all available node types with their schemas."""
|
946
946
|
nodes = []
|
947
|
-
for node_name, node_class in self.node_registry.
|
947
|
+
for node_name, node_class in self.node_registry._nodes.items():
|
948
948
|
# Get node schema (would be implemented in schema.py)
|
949
949
|
schema = await self._get_node_schema(node_class)
|
950
950
|
nodes.append(
|