kailash 0.6.6__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +35 -5
- kailash/adapters/__init__.py +5 -0
- kailash/adapters/mcp_platform_adapter.py +273 -0
- kailash/channels/__init__.py +21 -0
- kailash/channels/api_channel.py +409 -0
- kailash/channels/base.py +271 -0
- kailash/channels/cli_channel.py +661 -0
- kailash/channels/event_router.py +496 -0
- kailash/channels/mcp_channel.py +648 -0
- kailash/channels/session.py +423 -0
- kailash/mcp_server/discovery.py +1 -1
- kailash/middleware/mcp/enhanced_server.py +22 -16
- kailash/nexus/__init__.py +21 -0
- kailash/nexus/factory.py +413 -0
- kailash/nexus/gateway.py +545 -0
- kailash/nodes/__init__.py +2 -0
- kailash/nodes/ai/iterative_llm_agent.py +988 -17
- kailash/nodes/ai/llm_agent.py +29 -9
- kailash/nodes/api/__init__.py +2 -2
- kailash/nodes/api/monitoring.py +1 -1
- kailash/nodes/base_async.py +54 -14
- kailash/nodes/code/async_python.py +1 -1
- kailash/nodes/data/bulk_operations.py +939 -0
- kailash/nodes/data/query_builder.py +373 -0
- kailash/nodes/data/query_cache.py +512 -0
- kailash/nodes/monitoring/__init__.py +10 -0
- kailash/nodes/monitoring/deadlock_detector.py +964 -0
- kailash/nodes/monitoring/performance_anomaly.py +1078 -0
- kailash/nodes/monitoring/race_condition_detector.py +1151 -0
- kailash/nodes/monitoring/transaction_metrics.py +790 -0
- kailash/nodes/monitoring/transaction_monitor.py +931 -0
- kailash/nodes/system/__init__.py +17 -0
- kailash/nodes/system/command_parser.py +820 -0
- kailash/nodes/transaction/__init__.py +48 -0
- kailash/nodes/transaction/distributed_transaction_manager.py +983 -0
- kailash/nodes/transaction/saga_coordinator.py +652 -0
- kailash/nodes/transaction/saga_state_storage.py +411 -0
- kailash/nodes/transaction/saga_step.py +467 -0
- kailash/nodes/transaction/transaction_context.py +756 -0
- kailash/nodes/transaction/two_phase_commit.py +978 -0
- kailash/nodes/transform/processors.py +17 -1
- kailash/nodes/validation/__init__.py +21 -0
- kailash/nodes/validation/test_executor.py +532 -0
- kailash/nodes/validation/validation_nodes.py +447 -0
- kailash/resources/factory.py +1 -1
- kailash/runtime/async_local.py +84 -21
- kailash/runtime/local.py +21 -2
- kailash/runtime/parameter_injector.py +187 -31
- kailash/security.py +16 -1
- kailash/servers/__init__.py +32 -0
- kailash/servers/durable_workflow_server.py +430 -0
- kailash/servers/enterprise_workflow_server.py +466 -0
- kailash/servers/gateway.py +183 -0
- kailash/servers/workflow_server.py +290 -0
- kailash/utils/data_validation.py +192 -0
- kailash/workflow/builder.py +291 -12
- kailash/workflow/validation.py +144 -8
- {kailash-0.6.6.dist-info → kailash-0.7.0.dist-info}/METADATA +1 -1
- {kailash-0.6.6.dist-info → kailash-0.7.0.dist-info}/RECORD +63 -25
- {kailash-0.6.6.dist-info → kailash-0.7.0.dist-info}/WHEEL +0 -0
- {kailash-0.6.6.dist-info → kailash-0.7.0.dist-info}/entry_points.txt +0 -0
- {kailash-0.6.6.dist-info → kailash-0.7.0.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.6.6.dist-info → kailash-0.7.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,423 @@
|
|
1
|
+
"""Session management for cross-channel communication."""
|
2
|
+
|
3
|
+
import asyncio
|
4
|
+
import logging
|
5
|
+
import time
|
6
|
+
import uuid
|
7
|
+
from dataclasses import dataclass, field
|
8
|
+
from enum import Enum
|
9
|
+
from typing import Any, Dict, List, Optional, Set
|
10
|
+
|
11
|
+
logger = logging.getLogger(__name__)
|
12
|
+
|
13
|
+
|
14
|
+
class SessionStatus(Enum):
|
15
|
+
"""Session status states."""
|
16
|
+
|
17
|
+
ACTIVE = "active"
|
18
|
+
IDLE = "idle"
|
19
|
+
EXPIRED = "expired"
|
20
|
+
TERMINATED = "terminated"
|
21
|
+
|
22
|
+
|
23
|
+
@dataclass
|
24
|
+
class CrossChannelSession:
|
25
|
+
"""Represents a session that can span multiple channels."""
|
26
|
+
|
27
|
+
session_id: str
|
28
|
+
user_id: Optional[str] = None
|
29
|
+
created_at: float = field(default_factory=time.time)
|
30
|
+
last_activity: float = field(default_factory=time.time)
|
31
|
+
expires_at: Optional[float] = None
|
32
|
+
status: SessionStatus = SessionStatus.ACTIVE
|
33
|
+
|
34
|
+
# Channel tracking
|
35
|
+
active_channels: Set[str] = field(default_factory=set)
|
36
|
+
channel_contexts: Dict[str, Dict[str, Any]] = field(default_factory=dict)
|
37
|
+
|
38
|
+
# Session data
|
39
|
+
shared_data: Dict[str, Any] = field(default_factory=dict)
|
40
|
+
workflow_states: Dict[str, Any] = field(default_factory=dict)
|
41
|
+
|
42
|
+
# Event tracking
|
43
|
+
event_history: List[Dict[str, Any]] = field(default_factory=list)
|
44
|
+
max_history_size: int = 1000
|
45
|
+
|
46
|
+
def touch(self) -> None:
|
47
|
+
"""Update last activity timestamp."""
|
48
|
+
self.last_activity = time.time()
|
49
|
+
if self.status == SessionStatus.IDLE:
|
50
|
+
self.status = SessionStatus.ACTIVE
|
51
|
+
|
52
|
+
def add_channel(
|
53
|
+
self, channel_name: str, initial_context: Optional[Dict[str, Any]] = None
|
54
|
+
) -> None:
|
55
|
+
"""Add a channel to this session.
|
56
|
+
|
57
|
+
Args:
|
58
|
+
channel_name: Name of the channel to add
|
59
|
+
initial_context: Initial context data for the channel
|
60
|
+
"""
|
61
|
+
self.active_channels.add(channel_name)
|
62
|
+
if initial_context:
|
63
|
+
self.channel_contexts[channel_name] = initial_context.copy()
|
64
|
+
else:
|
65
|
+
self.channel_contexts[channel_name] = {}
|
66
|
+
self.touch()
|
67
|
+
logger.debug(f"Added channel {channel_name} to session {self.session_id}")
|
68
|
+
|
69
|
+
def remove_channel(self, channel_name: str) -> None:
|
70
|
+
"""Remove a channel from this session.
|
71
|
+
|
72
|
+
Args:
|
73
|
+
channel_name: Name of the channel to remove
|
74
|
+
"""
|
75
|
+
self.active_channels.discard(channel_name)
|
76
|
+
self.channel_contexts.pop(channel_name, None)
|
77
|
+
logger.debug(f"Removed channel {channel_name} from session {self.session_id}")
|
78
|
+
|
79
|
+
def update_channel_context(
|
80
|
+
self, channel_name: str, context_updates: Dict[str, Any]
|
81
|
+
) -> None:
|
82
|
+
"""Update context data for a specific channel.
|
83
|
+
|
84
|
+
Args:
|
85
|
+
channel_name: Name of the channel
|
86
|
+
context_updates: Context updates to apply
|
87
|
+
"""
|
88
|
+
if channel_name not in self.channel_contexts:
|
89
|
+
self.channel_contexts[channel_name] = {}
|
90
|
+
|
91
|
+
self.channel_contexts[channel_name].update(context_updates)
|
92
|
+
self.touch()
|
93
|
+
|
94
|
+
def get_channel_context(self, channel_name: str) -> Dict[str, Any]:
|
95
|
+
"""Get context data for a specific channel.
|
96
|
+
|
97
|
+
Args:
|
98
|
+
channel_name: Name of the channel
|
99
|
+
|
100
|
+
Returns:
|
101
|
+
Channel context data
|
102
|
+
"""
|
103
|
+
return self.channel_contexts.get(channel_name, {}).copy()
|
104
|
+
|
105
|
+
def set_shared_data(self, key: str, value: Any) -> None:
|
106
|
+
"""Set shared data accessible across all channels.
|
107
|
+
|
108
|
+
Args:
|
109
|
+
key: Data key
|
110
|
+
value: Data value
|
111
|
+
"""
|
112
|
+
self.shared_data[key] = value
|
113
|
+
self.touch()
|
114
|
+
|
115
|
+
def get_shared_data(self, key: str, default: Any = None) -> Any:
|
116
|
+
"""Get shared data.
|
117
|
+
|
118
|
+
Args:
|
119
|
+
key: Data key
|
120
|
+
default: Default value if key not found
|
121
|
+
|
122
|
+
Returns:
|
123
|
+
Shared data value
|
124
|
+
"""
|
125
|
+
return self.shared_data.get(key, default)
|
126
|
+
|
127
|
+
def add_event(self, event: Dict[str, Any]) -> None:
|
128
|
+
"""Add an event to the session history.
|
129
|
+
|
130
|
+
Args:
|
131
|
+
event: Event data to add
|
132
|
+
"""
|
133
|
+
event_record = {
|
134
|
+
"timestamp": time.time(),
|
135
|
+
"session_id": self.session_id,
|
136
|
+
**event,
|
137
|
+
}
|
138
|
+
|
139
|
+
self.event_history.append(event_record)
|
140
|
+
|
141
|
+
# Maintain max history size
|
142
|
+
if len(self.event_history) > self.max_history_size:
|
143
|
+
self.event_history = self.event_history[-self.max_history_size :]
|
144
|
+
|
145
|
+
self.touch()
|
146
|
+
|
147
|
+
def is_expired(self, timeout: int = 3600) -> bool:
|
148
|
+
"""Check if the session has expired.
|
149
|
+
|
150
|
+
Args:
|
151
|
+
timeout: Session timeout in seconds
|
152
|
+
|
153
|
+
Returns:
|
154
|
+
True if session has expired
|
155
|
+
"""
|
156
|
+
if self.expires_at:
|
157
|
+
return time.time() > self.expires_at
|
158
|
+
|
159
|
+
return (time.time() - self.last_activity) > timeout
|
160
|
+
|
161
|
+
def extend_expiry(self, additional_seconds: int = 3600) -> None:
|
162
|
+
"""Extend session expiry time.
|
163
|
+
|
164
|
+
Args:
|
165
|
+
additional_seconds: Additional seconds to extend
|
166
|
+
"""
|
167
|
+
if self.expires_at:
|
168
|
+
self.expires_at += additional_seconds
|
169
|
+
else:
|
170
|
+
self.expires_at = time.time() + additional_seconds
|
171
|
+
self.touch()
|
172
|
+
|
173
|
+
def to_dict(self) -> Dict[str, Any]:
|
174
|
+
"""Convert session to dictionary for serialization."""
|
175
|
+
return {
|
176
|
+
"session_id": self.session_id,
|
177
|
+
"user_id": self.user_id,
|
178
|
+
"created_at": self.created_at,
|
179
|
+
"last_activity": self.last_activity,
|
180
|
+
"expires_at": self.expires_at,
|
181
|
+
"status": self.status.value,
|
182
|
+
"active_channels": list(self.active_channels),
|
183
|
+
"channel_contexts": self.channel_contexts,
|
184
|
+
"shared_data": self.shared_data,
|
185
|
+
"workflow_states": self.workflow_states,
|
186
|
+
"event_count": len(self.event_history),
|
187
|
+
}
|
188
|
+
|
189
|
+
|
190
|
+
class SessionManager:
|
191
|
+
"""Manages cross-channel sessions for the Nexus framework."""
|
192
|
+
|
193
|
+
def __init__(self, default_timeout: int = 3600, cleanup_interval: int = 300):
|
194
|
+
"""Initialize session manager.
|
195
|
+
|
196
|
+
Args:
|
197
|
+
default_timeout: Default session timeout in seconds
|
198
|
+
cleanup_interval: Interval for cleanup task in seconds
|
199
|
+
"""
|
200
|
+
self.default_timeout = default_timeout
|
201
|
+
self.cleanup_interval = cleanup_interval
|
202
|
+
self._sessions: Dict[str, CrossChannelSession] = {}
|
203
|
+
self._cleanup_task: Optional[asyncio.Task] = None
|
204
|
+
self._running = False
|
205
|
+
|
206
|
+
async def start(self) -> None:
|
207
|
+
"""Start the session manager."""
|
208
|
+
if self._running:
|
209
|
+
return
|
210
|
+
|
211
|
+
self._running = True
|
212
|
+
self._cleanup_task = asyncio.create_task(self._cleanup_loop())
|
213
|
+
logger.info("Session manager started")
|
214
|
+
|
215
|
+
async def stop(self) -> None:
|
216
|
+
"""Stop the session manager."""
|
217
|
+
self._running = False
|
218
|
+
|
219
|
+
if self._cleanup_task:
|
220
|
+
self._cleanup_task.cancel()
|
221
|
+
try:
|
222
|
+
await self._cleanup_task
|
223
|
+
except asyncio.CancelledError:
|
224
|
+
pass
|
225
|
+
|
226
|
+
logger.info("Session manager stopped")
|
227
|
+
|
228
|
+
def create_session(
|
229
|
+
self,
|
230
|
+
user_id: Optional[str] = None,
|
231
|
+
session_id: Optional[str] = None,
|
232
|
+
timeout: Optional[int] = None,
|
233
|
+
) -> CrossChannelSession:
|
234
|
+
"""Create a new session.
|
235
|
+
|
236
|
+
Args:
|
237
|
+
user_id: Optional user ID for the session
|
238
|
+
session_id: Optional custom session ID
|
239
|
+
timeout: Optional custom timeout
|
240
|
+
|
241
|
+
Returns:
|
242
|
+
New CrossChannelSession instance
|
243
|
+
"""
|
244
|
+
if session_id is None:
|
245
|
+
session_id = str(uuid.uuid4())
|
246
|
+
|
247
|
+
if session_id in self._sessions:
|
248
|
+
raise ValueError(f"Session {session_id} already exists")
|
249
|
+
|
250
|
+
session = CrossChannelSession(session_id=session_id, user_id=user_id)
|
251
|
+
|
252
|
+
if timeout:
|
253
|
+
session.extends_at = time.time() + timeout
|
254
|
+
|
255
|
+
self._sessions[session_id] = session
|
256
|
+
logger.info(f"Created session {session_id} for user {user_id}")
|
257
|
+
|
258
|
+
return session
|
259
|
+
|
260
|
+
def get_session(self, session_id: str) -> Optional[CrossChannelSession]:
|
261
|
+
"""Get an existing session.
|
262
|
+
|
263
|
+
Args:
|
264
|
+
session_id: Session ID to retrieve
|
265
|
+
|
266
|
+
Returns:
|
267
|
+
CrossChannelSession if found, None otherwise
|
268
|
+
"""
|
269
|
+
session = self._sessions.get(session_id)
|
270
|
+
|
271
|
+
if session and session.is_expired(self.default_timeout):
|
272
|
+
self.terminate_session(session_id)
|
273
|
+
return None
|
274
|
+
|
275
|
+
return session
|
276
|
+
|
277
|
+
def get_or_create_session(
|
278
|
+
self, session_id: str, user_id: Optional[str] = None
|
279
|
+
) -> CrossChannelSession:
|
280
|
+
"""Get existing session or create new one.
|
281
|
+
|
282
|
+
Args:
|
283
|
+
session_id: Session ID
|
284
|
+
user_id: Optional user ID for new sessions
|
285
|
+
|
286
|
+
Returns:
|
287
|
+
CrossChannelSession instance
|
288
|
+
"""
|
289
|
+
session = self.get_session(session_id)
|
290
|
+
if session:
|
291
|
+
return session
|
292
|
+
|
293
|
+
return self.create_session(user_id=user_id, session_id=session_id)
|
294
|
+
|
295
|
+
def terminate_session(self, session_id: str) -> bool:
|
296
|
+
"""Terminate a session.
|
297
|
+
|
298
|
+
Args:
|
299
|
+
session_id: Session ID to terminate
|
300
|
+
|
301
|
+
Returns:
|
302
|
+
True if session was terminated, False if not found
|
303
|
+
"""
|
304
|
+
session = self._sessions.pop(session_id, None)
|
305
|
+
if session:
|
306
|
+
session.status = SessionStatus.TERMINATED
|
307
|
+
logger.info(f"Terminated session {session_id}")
|
308
|
+
return True
|
309
|
+
return False
|
310
|
+
|
311
|
+
def list_sessions(
|
312
|
+
self, user_id: Optional[str] = None, status: Optional[SessionStatus] = None
|
313
|
+
) -> List[CrossChannelSession]:
|
314
|
+
"""List sessions with optional filtering.
|
315
|
+
|
316
|
+
Args:
|
317
|
+
user_id: Filter by user ID
|
318
|
+
status: Filter by status
|
319
|
+
|
320
|
+
Returns:
|
321
|
+
List of matching sessions
|
322
|
+
"""
|
323
|
+
sessions = []
|
324
|
+
|
325
|
+
for session in self._sessions.values():
|
326
|
+
if user_id and session.user_id != user_id:
|
327
|
+
continue
|
328
|
+
if status and session.status != status:
|
329
|
+
continue
|
330
|
+
sessions.append(session)
|
331
|
+
|
332
|
+
return sessions
|
333
|
+
|
334
|
+
def get_channel_sessions(self, channel_name: str) -> List[CrossChannelSession]:
|
335
|
+
"""Get all sessions active on a specific channel.
|
336
|
+
|
337
|
+
Args:
|
338
|
+
channel_name: Name of the channel
|
339
|
+
|
340
|
+
Returns:
|
341
|
+
List of sessions active on the channel
|
342
|
+
"""
|
343
|
+
return [
|
344
|
+
session
|
345
|
+
for session in self._sessions.values()
|
346
|
+
if channel_name in session.active_channels
|
347
|
+
]
|
348
|
+
|
349
|
+
async def broadcast_to_channel(
|
350
|
+
self, channel_name: str, event: Dict[str, Any]
|
351
|
+
) -> int:
|
352
|
+
"""Broadcast an event to all sessions on a channel.
|
353
|
+
|
354
|
+
Args:
|
355
|
+
channel_name: Target channel name
|
356
|
+
event: Event data to broadcast
|
357
|
+
|
358
|
+
Returns:
|
359
|
+
Number of sessions that received the event
|
360
|
+
"""
|
361
|
+
sessions = self.get_channel_sessions(channel_name)
|
362
|
+
|
363
|
+
for session in sessions:
|
364
|
+
session.add_event(
|
365
|
+
{"type": "broadcast", "channel": channel_name, "data": event}
|
366
|
+
)
|
367
|
+
|
368
|
+
logger.debug(
|
369
|
+
f"Broadcasted event to {len(sessions)} sessions on channel {channel_name}"
|
370
|
+
)
|
371
|
+
return len(sessions)
|
372
|
+
|
373
|
+
async def _cleanup_loop(self) -> None:
|
374
|
+
"""Background task to clean up expired sessions."""
|
375
|
+
while self._running:
|
376
|
+
try:
|
377
|
+
await asyncio.sleep(self.cleanup_interval)
|
378
|
+
await self._cleanup_expired_sessions()
|
379
|
+
except asyncio.CancelledError:
|
380
|
+
break
|
381
|
+
except Exception as e:
|
382
|
+
logger.error(f"Error in session cleanup: {e}")
|
383
|
+
|
384
|
+
async def _cleanup_expired_sessions(self) -> None:
|
385
|
+
"""Clean up expired sessions."""
|
386
|
+
expired_sessions = []
|
387
|
+
|
388
|
+
for session_id, session in self._sessions.items():
|
389
|
+
if session.is_expired(self.default_timeout):
|
390
|
+
expired_sessions.append(session_id)
|
391
|
+
|
392
|
+
for session_id in expired_sessions:
|
393
|
+
self.terminate_session(session_id)
|
394
|
+
|
395
|
+
if expired_sessions:
|
396
|
+
logger.info(f"Cleaned up {len(expired_sessions)} expired sessions")
|
397
|
+
|
398
|
+
def get_stats(self) -> Dict[str, Any]:
|
399
|
+
"""Get session manager statistics.
|
400
|
+
|
401
|
+
Returns:
|
402
|
+
Dictionary with session statistics
|
403
|
+
"""
|
404
|
+
active_sessions = len(
|
405
|
+
[s for s in self._sessions.values() if s.status == SessionStatus.ACTIVE]
|
406
|
+
)
|
407
|
+
idle_sessions = len(
|
408
|
+
[s for s in self._sessions.values() if s.status == SessionStatus.IDLE]
|
409
|
+
)
|
410
|
+
|
411
|
+
channel_usage = {}
|
412
|
+
for session in self._sessions.values():
|
413
|
+
for channel in session.active_channels:
|
414
|
+
channel_usage[channel] = channel_usage.get(channel, 0) + 1
|
415
|
+
|
416
|
+
return {
|
417
|
+
"total_sessions": len(self._sessions),
|
418
|
+
"active_sessions": active_sessions,
|
419
|
+
"idle_sessions": idle_sessions,
|
420
|
+
"channel_usage": channel_usage,
|
421
|
+
"default_timeout": self.default_timeout,
|
422
|
+
"cleanup_interval": self.cleanup_interval,
|
423
|
+
}
|
kailash/mcp_server/discovery.py
CHANGED
@@ -967,7 +967,7 @@ class ServiceRegistry:
|
|
967
967
|
async def stop_health_monitoring(self):
|
968
968
|
"""Stop health monitoring (async version)."""
|
969
969
|
if self.health_checker:
|
970
|
-
self.health_checker.stop()
|
970
|
+
await self.health_checker.stop()
|
971
971
|
|
972
972
|
async def get_best_server_for_capability(
|
973
973
|
self, capability: str
|
@@ -199,9 +199,12 @@ class MiddlewareMCPServer:
|
|
199
199
|
# Tool Registration Workflow
|
200
200
|
self.tool_register_workflow = WorkflowBuilder()
|
201
201
|
|
202
|
-
|
203
|
-
|
204
|
-
|
202
|
+
# Use proper WorkflowBuilder syntax with string class names
|
203
|
+
self.tool_register_workflow.add_node(
|
204
|
+
"PythonCodeNode",
|
205
|
+
"validate_tool",
|
206
|
+
{
|
207
|
+
"code": """
|
205
208
|
# Validate tool registration using Kailash patterns
|
206
209
|
tool_data = input_data.get('tool_data', {})
|
207
210
|
|
@@ -220,12 +223,15 @@ else:
|
|
220
223
|
'tool_data': tool_data,
|
221
224
|
'validation_passed': True
|
222
225
|
}
|
223
|
-
"""
|
226
|
+
"""
|
227
|
+
},
|
224
228
|
)
|
225
229
|
|
226
|
-
|
227
|
-
|
228
|
-
|
230
|
+
self.tool_register_workflow.add_node(
|
231
|
+
"PythonCodeNode",
|
232
|
+
"register_tool",
|
233
|
+
{
|
234
|
+
"code": """
|
229
235
|
# Register tool using Kailash patterns
|
230
236
|
validation_result = input_data.get('validation_result', {})
|
231
237
|
|
@@ -243,11 +249,10 @@ else:
|
|
243
249
|
'tool_registered': True,
|
244
250
|
'registration_time': datetime.now().isoformat()
|
245
251
|
}
|
246
|
-
"""
|
252
|
+
"""
|
253
|
+
},
|
247
254
|
)
|
248
255
|
|
249
|
-
self.tool_register_workflow.add_node(validator)
|
250
|
-
self.tool_register_workflow.add_node(register_handler)
|
251
256
|
self.tool_register_workflow.add_connection(
|
252
257
|
"validate_tool", "result", "register_tool", "validation_result"
|
253
258
|
)
|
@@ -255,9 +260,11 @@ else:
|
|
255
260
|
# Tool Execution Workflow
|
256
261
|
self.tool_execute_workflow = WorkflowBuilder()
|
257
262
|
|
258
|
-
|
259
|
-
|
260
|
-
|
263
|
+
self.tool_execute_workflow.add_node(
|
264
|
+
"PythonCodeNode",
|
265
|
+
"execute_tool",
|
266
|
+
{
|
267
|
+
"code": """
|
261
268
|
# Execute MCP tool using Kailash patterns
|
262
269
|
tool_name = input_data.get('tool_name')
|
263
270
|
tool_args = input_data.get('arguments', {})
|
@@ -272,11 +279,10 @@ execution_result = {
|
|
272
279
|
}
|
273
280
|
|
274
281
|
result = {'execution_result': execution_result}
|
275
|
-
"""
|
282
|
+
"""
|
283
|
+
},
|
276
284
|
)
|
277
285
|
|
278
|
-
self.tool_execute_workflow.add_node(executor)
|
279
|
-
|
280
286
|
async def register_tool(
|
281
287
|
self,
|
282
288
|
name: str,
|
@@ -0,0 +1,21 @@
|
|
1
|
+
"""Kailash Nexus - Multi-channel workflow orchestration framework."""
|
2
|
+
|
3
|
+
from .factory import (
|
4
|
+
create_api_nexus,
|
5
|
+
create_cli_nexus,
|
6
|
+
create_development_nexus,
|
7
|
+
create_mcp_nexus,
|
8
|
+
create_nexus,
|
9
|
+
create_production_nexus,
|
10
|
+
)
|
11
|
+
from .gateway import NexusGateway
|
12
|
+
|
13
|
+
__all__ = [
|
14
|
+
"NexusGateway",
|
15
|
+
"create_nexus",
|
16
|
+
"create_api_nexus",
|
17
|
+
"create_cli_nexus",
|
18
|
+
"create_mcp_nexus",
|
19
|
+
"create_development_nexus",
|
20
|
+
"create_production_nexus",
|
21
|
+
]
|