kailash 0.6.5__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. kailash/__init__.py +35 -4
  2. kailash/adapters/__init__.py +5 -0
  3. kailash/adapters/mcp_platform_adapter.py +273 -0
  4. kailash/channels/__init__.py +21 -0
  5. kailash/channels/api_channel.py +409 -0
  6. kailash/channels/base.py +271 -0
  7. kailash/channels/cli_channel.py +661 -0
  8. kailash/channels/event_router.py +496 -0
  9. kailash/channels/mcp_channel.py +648 -0
  10. kailash/channels/session.py +423 -0
  11. kailash/mcp_server/discovery.py +1 -1
  12. kailash/middleware/core/agent_ui.py +5 -0
  13. kailash/middleware/mcp/enhanced_server.py +22 -16
  14. kailash/nexus/__init__.py +21 -0
  15. kailash/nexus/factory.py +413 -0
  16. kailash/nexus/gateway.py +545 -0
  17. kailash/nodes/__init__.py +2 -0
  18. kailash/nodes/ai/iterative_llm_agent.py +988 -17
  19. kailash/nodes/ai/llm_agent.py +29 -9
  20. kailash/nodes/api/__init__.py +2 -2
  21. kailash/nodes/api/monitoring.py +1 -1
  22. kailash/nodes/base_async.py +54 -14
  23. kailash/nodes/code/async_python.py +1 -1
  24. kailash/nodes/data/bulk_operations.py +939 -0
  25. kailash/nodes/data/query_builder.py +373 -0
  26. kailash/nodes/data/query_cache.py +512 -0
  27. kailash/nodes/monitoring/__init__.py +10 -0
  28. kailash/nodes/monitoring/deadlock_detector.py +964 -0
  29. kailash/nodes/monitoring/performance_anomaly.py +1078 -0
  30. kailash/nodes/monitoring/race_condition_detector.py +1151 -0
  31. kailash/nodes/monitoring/transaction_metrics.py +790 -0
  32. kailash/nodes/monitoring/transaction_monitor.py +931 -0
  33. kailash/nodes/system/__init__.py +17 -0
  34. kailash/nodes/system/command_parser.py +820 -0
  35. kailash/nodes/transaction/__init__.py +48 -0
  36. kailash/nodes/transaction/distributed_transaction_manager.py +983 -0
  37. kailash/nodes/transaction/saga_coordinator.py +652 -0
  38. kailash/nodes/transaction/saga_state_storage.py +411 -0
  39. kailash/nodes/transaction/saga_step.py +467 -0
  40. kailash/nodes/transaction/transaction_context.py +756 -0
  41. kailash/nodes/transaction/two_phase_commit.py +978 -0
  42. kailash/nodes/transform/processors.py +17 -1
  43. kailash/nodes/validation/__init__.py +21 -0
  44. kailash/nodes/validation/test_executor.py +532 -0
  45. kailash/nodes/validation/validation_nodes.py +447 -0
  46. kailash/resources/factory.py +1 -1
  47. kailash/runtime/async_local.py +84 -21
  48. kailash/runtime/local.py +21 -2
  49. kailash/runtime/parameter_injector.py +187 -31
  50. kailash/security.py +16 -1
  51. kailash/servers/__init__.py +32 -0
  52. kailash/servers/durable_workflow_server.py +430 -0
  53. kailash/servers/enterprise_workflow_server.py +466 -0
  54. kailash/servers/gateway.py +183 -0
  55. kailash/servers/workflow_server.py +290 -0
  56. kailash/utils/data_validation.py +192 -0
  57. kailash/workflow/builder.py +291 -12
  58. kailash/workflow/validation.py +144 -8
  59. {kailash-0.6.5.dist-info → kailash-0.7.0.dist-info}/METADATA +1 -1
  60. {kailash-0.6.5.dist-info → kailash-0.7.0.dist-info}/RECORD +64 -26
  61. {kailash-0.6.5.dist-info → kailash-0.7.0.dist-info}/WHEEL +0 -0
  62. {kailash-0.6.5.dist-info → kailash-0.7.0.dist-info}/entry_points.txt +0 -0
  63. {kailash-0.6.5.dist-info → kailash-0.7.0.dist-info}/licenses/LICENSE +0 -0
  64. {kailash-0.6.5.dist-info → kailash-0.7.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,409 @@
1
+ """API Channel implementation using EnterpriseWorkflowServer."""
2
+
3
+ import asyncio
4
+ import logging
5
+ from typing import Any, Dict, Optional
6
+
7
+ import uvicorn
8
+ from fastapi import FastAPI, HTTPException, Request, Response
9
+ from fastapi.middleware.cors import CORSMiddleware
10
+
11
+ from ..servers import EnterpriseWorkflowServer
12
+ from ..workflow import Workflow
13
+ from .base import (
14
+ Channel,
15
+ ChannelConfig,
16
+ ChannelEvent,
17
+ ChannelResponse,
18
+ ChannelStatus,
19
+ ChannelType,
20
+ )
21
+
22
+ logger = logging.getLogger(__name__)
23
+
24
+
25
+ class APIChannel(Channel):
26
+ """HTTP API channel implementation using EnterpriseWorkflowServer.
27
+
28
+ This channel provides RESTful API access to workflows and session management
29
+ through the existing EnterpriseWorkflowServer infrastructure.
30
+ """
31
+
32
+ def __init__(
33
+ self,
34
+ config: ChannelConfig,
35
+ workflow_server: Optional[EnterpriseWorkflowServer] = None,
36
+ ):
37
+ """Initialize API channel.
38
+
39
+ Args:
40
+ config: Channel configuration
41
+ workflow_server: Optional existing workflow server, will create one if not provided
42
+ """
43
+ super().__init__(config)
44
+
45
+ # Create or use provided workflow server
46
+ if workflow_server:
47
+ self.workflow_server = workflow_server
48
+ else:
49
+ self.workflow_server = self._create_workflow_server()
50
+
51
+ self.app: FastAPI = self.workflow_server.app
52
+ self._server: Optional[uvicorn.Server] = None
53
+ self._server_task: Optional[asyncio.Task] = None
54
+
55
+ # Add channel-specific endpoints
56
+ self._setup_channel_endpoints()
57
+
58
+ logger.info(
59
+ f"Initialized API channel {self.name} on {config.host}:{config.port}"
60
+ )
61
+
62
+ def _create_workflow_server(self) -> EnterpriseWorkflowServer:
63
+ """Create a new workflow server with channel configuration."""
64
+ # Extract server config from channel config
65
+ server_title = self.config.extra_config.get("title", f"{self.name} API Server")
66
+ server_description = self.config.extra_config.get(
67
+ "description", f"API server for {self.name} channel"
68
+ )
69
+
70
+ # CORS configuration
71
+ cors_origins = self.config.extra_config.get("cors_origins", ["*"])
72
+
73
+ return EnterpriseWorkflowServer(
74
+ title=server_title,
75
+ description=server_description,
76
+ cors_origins=cors_origins,
77
+ enable_durability=self.config.extra_config.get("enable_durability", True),
78
+ enable_resource_management=self.config.extra_config.get(
79
+ "enable_resource_management", True
80
+ ),
81
+ enable_async_execution=self.config.extra_config.get(
82
+ "enable_async_execution", True
83
+ ),
84
+ enable_health_checks=self.config.extra_config.get(
85
+ "enable_health_checks", True
86
+ ),
87
+ )
88
+
89
+ def _setup_channel_endpoints(self) -> None:
90
+ """Add channel-specific endpoints to the FastAPI app."""
91
+
92
+ @self.app.get("/channel/info")
93
+ async def get_channel_info():
94
+ """Get information about this API channel."""
95
+ return {
96
+ "channel_name": self.name,
97
+ "channel_type": self.channel_type.value,
98
+ "status": self.status.value,
99
+ "config": {
100
+ "host": self.config.host,
101
+ "port": self.config.port,
102
+ "enable_sessions": self.config.enable_sessions,
103
+ "enable_auth": self.config.enable_auth,
104
+ "enable_event_routing": self.config.enable_event_routing,
105
+ },
106
+ }
107
+
108
+ @self.app.post("/channel/events")
109
+ async def emit_channel_event(request: Request):
110
+ """Emit an event through this channel."""
111
+ try:
112
+ data = await request.json()
113
+
114
+ event = ChannelEvent(
115
+ event_id=data.get(
116
+ "event_id", f"api_{asyncio.get_event_loop().time()}"
117
+ ),
118
+ channel_name=self.name,
119
+ channel_type=self.channel_type,
120
+ event_type=data.get("event_type", "api_event"),
121
+ payload=data.get("payload", {}),
122
+ session_id=data.get("session_id"),
123
+ metadata=data.get("metadata", {}),
124
+ )
125
+
126
+ await self.emit_event(event)
127
+
128
+ return {"status": "success", "event_id": event.event_id}
129
+
130
+ except Exception as e:
131
+ logger.error(f"Error emitting channel event: {e}")
132
+ raise HTTPException(status_code=400, detail=str(e))
133
+
134
+ @self.app.get("/channel/status")
135
+ async def get_channel_status():
136
+ """Get detailed channel status."""
137
+ return await self.get_status()
138
+
139
+ @self.app.get("/channel/health")
140
+ async def get_channel_health():
141
+ """Get channel health check."""
142
+ health = await self.health_check()
143
+ status_code = 200 if health["healthy"] else 503
144
+ return Response(
145
+ content=str(health),
146
+ status_code=status_code,
147
+ media_type="application/json",
148
+ )
149
+
150
+ async def start(self) -> None:
151
+ """Start the API channel server."""
152
+ if self.status == ChannelStatus.RUNNING:
153
+ logger.warning(f"API channel {self.name} is already running")
154
+ return
155
+
156
+ try:
157
+ self.status = ChannelStatus.STARTING
158
+ self._setup_event_queue()
159
+
160
+ # Configure uvicorn server
161
+ config = uvicorn.Config(
162
+ app=self.app,
163
+ host=self.config.host,
164
+ port=self.config.port or 8000,
165
+ log_level="info" if logger.isEnabledFor(logging.INFO) else "warning",
166
+ access_log=False, # We'll handle our own logging
167
+ )
168
+
169
+ self._server = uvicorn.Server(config)
170
+
171
+ # Start server in background task
172
+ self._server_task = asyncio.create_task(self._server.serve())
173
+
174
+ # Wait a moment for server to start
175
+ await asyncio.sleep(0.1)
176
+
177
+ self.status = ChannelStatus.RUNNING
178
+
179
+ # Emit startup event
180
+ await self.emit_event(
181
+ ChannelEvent(
182
+ event_id=f"api_startup_{asyncio.get_event_loop().time()}",
183
+ channel_name=self.name,
184
+ channel_type=self.channel_type,
185
+ event_type="channel_started",
186
+ payload={"host": self.config.host, "port": self.config.port},
187
+ )
188
+ )
189
+
190
+ logger.info(
191
+ f"API channel {self.name} started on {self.config.host}:{self.config.port}"
192
+ )
193
+
194
+ except Exception as e:
195
+ self.status = ChannelStatus.ERROR
196
+ logger.error(f"Failed to start API channel {self.name}: {e}")
197
+ raise
198
+
199
+ async def stop(self) -> None:
200
+ """Stop the API channel server."""
201
+ if self.status == ChannelStatus.STOPPED:
202
+ return
203
+
204
+ try:
205
+ self.status = ChannelStatus.STOPPING
206
+
207
+ # Emit shutdown event
208
+ await self.emit_event(
209
+ ChannelEvent(
210
+ event_id=f"api_shutdown_{asyncio.get_event_loop().time()}",
211
+ channel_name=self.name,
212
+ channel_type=self.channel_type,
213
+ event_type="channel_stopping",
214
+ payload={},
215
+ )
216
+ )
217
+
218
+ # Stop the uvicorn server
219
+ if self._server:
220
+ self._server.should_exit = True
221
+
222
+ # Cancel the server task
223
+ if self._server_task and not self._server_task.done():
224
+ self._server_task.cancel()
225
+ try:
226
+ await self._server_task
227
+ except asyncio.CancelledError:
228
+ pass
229
+
230
+ await self._cleanup()
231
+ self.status = ChannelStatus.STOPPED
232
+
233
+ logger.info(f"API channel {self.name} stopped")
234
+
235
+ except Exception as e:
236
+ self.status = ChannelStatus.ERROR
237
+ logger.error(f"Error stopping API channel {self.name}: {e}")
238
+ raise
239
+
240
+ async def handle_request(self, request: Dict[str, Any]) -> ChannelResponse:
241
+ """Handle a request through the API channel.
242
+
243
+ Args:
244
+ request: Request data with workflow execution parameters
245
+
246
+ Returns:
247
+ ChannelResponse with execution results
248
+ """
249
+ try:
250
+ workflow_name = request.get("workflow_name")
251
+ if not workflow_name:
252
+ return ChannelResponse(success=False, error="workflow_name is required")
253
+
254
+ # Check if workflow exists
255
+ if workflow_name not in self.workflow_server.workflows:
256
+ return ChannelResponse(
257
+ success=False, error=f"Workflow '{workflow_name}' not found"
258
+ )
259
+
260
+ # Execute workflow through server's runtime
261
+ workflow_registration = self.workflow_server.workflows[workflow_name]
262
+ inputs = request.get("inputs", {})
263
+
264
+ # Emit request event
265
+ await self.emit_event(
266
+ ChannelEvent(
267
+ event_id=f"api_request_{asyncio.get_event_loop().time()}",
268
+ channel_name=self.name,
269
+ channel_type=self.channel_type,
270
+ event_type="workflow_request",
271
+ payload={"workflow_name": workflow_name, "inputs": inputs},
272
+ session_id=request.get("session_id"),
273
+ )
274
+ )
275
+
276
+ # Execute workflow
277
+ if workflow_registration.type == "embedded":
278
+ workflow = workflow_registration.workflow
279
+ results, run_id = self.workflow_server.runtime.execute(
280
+ workflow, parameters=inputs
281
+ )
282
+ else:
283
+ # Handle proxied workflows
284
+ return ChannelResponse(
285
+ success=False,
286
+ error="Proxied workflows not yet supported in APIChannel",
287
+ )
288
+
289
+ # Emit completion event
290
+ await self.emit_event(
291
+ ChannelEvent(
292
+ event_id=f"api_completion_{asyncio.get_event_loop().time()}",
293
+ channel_name=self.name,
294
+ channel_type=self.channel_type,
295
+ event_type="workflow_completed",
296
+ payload={
297
+ "workflow_name": workflow_name,
298
+ "run_id": run_id,
299
+ "success": True,
300
+ },
301
+ session_id=request.get("session_id"),
302
+ )
303
+ )
304
+
305
+ return ChannelResponse(
306
+ success=True,
307
+ data={
308
+ "results": results,
309
+ "run_id": run_id,
310
+ "workflow_name": workflow_name,
311
+ },
312
+ metadata={"channel": self.name, "type": "api"},
313
+ )
314
+
315
+ except Exception as e:
316
+ logger.error(f"Error handling API request: {e}")
317
+
318
+ # Emit error event
319
+ await self.emit_event(
320
+ ChannelEvent(
321
+ event_id=f"api_error_{asyncio.get_event_loop().time()}",
322
+ channel_name=self.name,
323
+ channel_type=self.channel_type,
324
+ event_type="workflow_error",
325
+ payload={"error": str(e), "request": request},
326
+ session_id=request.get("session_id"),
327
+ )
328
+ )
329
+
330
+ return ChannelResponse(
331
+ success=False,
332
+ error=str(e),
333
+ metadata={"channel": self.name, "type": "api"},
334
+ )
335
+
336
+ def register_workflow(
337
+ self,
338
+ name: str,
339
+ workflow: Workflow,
340
+ description: Optional[str] = None,
341
+ tags: Optional[list] = None,
342
+ ) -> None:
343
+ """Register a workflow with this API channel.
344
+
345
+ Args:
346
+ name: Workflow name
347
+ workflow: Workflow instance
348
+ description: Optional description
349
+ tags: Optional tags
350
+ """
351
+ self.workflow_server.register_workflow(
352
+ name=name, workflow=workflow, description=description, tags=tags
353
+ )
354
+ logger.info(f"Registered workflow '{name}' with API channel {self.name}")
355
+
356
+ def proxy_workflow(
357
+ self,
358
+ name: str,
359
+ proxy_url: str,
360
+ health_check: Optional[str] = None,
361
+ description: Optional[str] = None,
362
+ tags: Optional[list] = None,
363
+ ) -> None:
364
+ """Register a proxied workflow with this API channel.
365
+
366
+ Args:
367
+ name: Workflow name
368
+ proxy_url: URL to proxy requests to
369
+ health_check: Optional health check endpoint
370
+ description: Optional description
371
+ tags: Optional tags
372
+ """
373
+ self.workflow_server.proxy_workflow(
374
+ name=name,
375
+ proxy_url=proxy_url,
376
+ health_check=health_check,
377
+ description=description,
378
+ tags=tags,
379
+ )
380
+ logger.info(
381
+ f"Registered proxied workflow '{name}' with API channel {self.name}"
382
+ )
383
+
384
+ async def health_check(self) -> Dict[str, Any]:
385
+ """Perform comprehensive health check."""
386
+ base_health = await super().health_check()
387
+
388
+ # Add API-specific health checks
389
+ api_checks = {
390
+ "server_running": self._server is not None
391
+ and not (self._server_task and self._server_task.done()),
392
+ "workflows_registered": len(self.workflow_server.workflows) > 0,
393
+ "enterprise_features": {
394
+ "durability": self.workflow_server.enable_durability,
395
+ "resource_management": self.workflow_server.enable_resource_management,
396
+ "async_execution": self.workflow_server.enable_async_execution,
397
+ "health_checks": self.workflow_server.enable_health_checks,
398
+ },
399
+ }
400
+
401
+ all_healthy = base_health["healthy"] and all(api_checks.values())
402
+
403
+ return {
404
+ **base_health,
405
+ "healthy": all_healthy,
406
+ "checks": {**base_health["checks"], **api_checks},
407
+ "workflows": list(self.workflow_server.workflows.keys()),
408
+ "enterprise_info": api_checks["enterprise_features"],
409
+ }
@@ -0,0 +1,271 @@
1
+ """Base channel abstractions for the Nexus framework."""
2
+
3
+ import asyncio
4
+ import logging
5
+ from abc import ABC, abstractmethod
6
+ from dataclasses import dataclass, field
7
+ from enum import Enum
8
+ from typing import Any, Callable, Dict, List, Optional, Union
9
+
10
+ logger = logging.getLogger(__name__)
11
+
12
+
13
+ class ChannelType(Enum):
14
+ """Supported channel types."""
15
+
16
+ API = "api"
17
+ CLI = "cli"
18
+ MCP = "mcp"
19
+
20
+
21
+ class ChannelStatus(Enum):
22
+ """Channel status states."""
23
+
24
+ INITIALIZED = "initialized"
25
+ STARTING = "starting"
26
+ RUNNING = "running"
27
+ STOPPING = "stopping"
28
+ STOPPED = "stopped"
29
+ ERROR = "error"
30
+
31
+
32
+ @dataclass
33
+ class ChannelConfig:
34
+ """Configuration for a channel."""
35
+
36
+ name: str
37
+ channel_type: ChannelType
38
+ enabled: bool = True
39
+ host: str = "localhost"
40
+ port: Optional[int] = None
41
+
42
+ # Security settings
43
+ enable_auth: bool = False
44
+ auth_config: Dict[str, Any] = field(default_factory=dict)
45
+
46
+ # Session management
47
+ enable_sessions: bool = True
48
+ session_timeout: int = 3600 # 1 hour default
49
+
50
+ # Event handling
51
+ enable_event_routing: bool = True
52
+ event_buffer_size: int = 1000
53
+
54
+ # Channel-specific configuration
55
+ extra_config: Dict[str, Any] = field(default_factory=dict)
56
+
57
+
58
+ @dataclass
59
+ class ChannelEvent:
60
+ """Represents an event in a channel."""
61
+
62
+ event_id: str
63
+ channel_name: str
64
+ channel_type: ChannelType
65
+ event_type: str
66
+ payload: Dict[str, Any]
67
+ session_id: Optional[str] = None
68
+ timestamp: Optional[float] = None
69
+ metadata: Dict[str, Any] = field(default_factory=dict)
70
+
71
+
72
+ @dataclass
73
+ class ChannelResponse:
74
+ """Response from a channel operation."""
75
+
76
+ success: bool
77
+ data: Any = None
78
+ error: Optional[str] = None
79
+ metadata: Dict[str, Any] = field(default_factory=dict)
80
+
81
+
82
+ class Channel(ABC):
83
+ """Abstract base class for all channel implementations.
84
+
85
+ Channels provide a unified interface for different communication protocols
86
+ (HTTP API, CLI, MCP) in the Nexus framework.
87
+ """
88
+
89
+ def __init__(self, config: ChannelConfig):
90
+ """Initialize the channel.
91
+
92
+ Args:
93
+ config: Channel configuration
94
+ """
95
+ self.config = config
96
+ self.status = ChannelStatus.INITIALIZED
97
+ self._event_handlers: Dict[str, List[Callable]] = {}
98
+ self._event_queue: Optional[asyncio.Queue] = None
99
+ self._running_task: Optional[asyncio.Task] = None
100
+
101
+ logger.info(f"Initialized {config.channel_type.value} channel: {config.name}")
102
+
103
+ @property
104
+ def name(self) -> str:
105
+ """Get channel name."""
106
+ return self.config.name
107
+
108
+ @property
109
+ def channel_type(self) -> ChannelType:
110
+ """Get channel type."""
111
+ return self.config.channel_type
112
+
113
+ @property
114
+ def is_running(self) -> bool:
115
+ """Check if channel is running."""
116
+ return self.status == ChannelStatus.RUNNING
117
+
118
+ @abstractmethod
119
+ async def start(self) -> None:
120
+ """Start the channel.
121
+
122
+ This method should:
123
+ 1. Initialize channel-specific resources
124
+ 2. Start listening for requests
125
+ 3. Set status to RUNNING
126
+ """
127
+ pass
128
+
129
+ @abstractmethod
130
+ async def stop(self) -> None:
131
+ """Stop the channel.
132
+
133
+ This method should:
134
+ 1. Stop accepting new requests
135
+ 2. Clean up resources
136
+ 3. Set status to STOPPED
137
+ """
138
+ pass
139
+
140
+ @abstractmethod
141
+ async def handle_request(self, request: Dict[str, Any]) -> ChannelResponse:
142
+ """Handle a request from this channel.
143
+
144
+ Args:
145
+ request: Channel-specific request data
146
+
147
+ Returns:
148
+ ChannelResponse with the result
149
+ """
150
+ pass
151
+
152
+ async def emit_event(self, event: ChannelEvent) -> None:
153
+ """Emit an event from this channel.
154
+
155
+ Args:
156
+ event: Event to emit
157
+ """
158
+ if not self.config.enable_event_routing:
159
+ return
160
+
161
+ # Add to event queue for routing
162
+ if self._event_queue:
163
+ try:
164
+ await self._event_queue.put(event)
165
+ logger.debug(f"Emitted event {event.event_id} from channel {self.name}")
166
+ except asyncio.QueueFull:
167
+ logger.warning(f"Event queue full, dropping event {event.event_id}")
168
+
169
+ def add_event_handler(
170
+ self, event_type: str, handler: Callable[[ChannelEvent], None]
171
+ ) -> None:
172
+ """Add an event handler for specific event types.
173
+
174
+ Args:
175
+ event_type: Type of event to handle
176
+ handler: Callable to handle the event
177
+ """
178
+ if event_type not in self._event_handlers:
179
+ self._event_handlers[event_type] = []
180
+ self._event_handlers[event_type].append(handler)
181
+ logger.debug(f"Added event handler for {event_type} on channel {self.name}")
182
+
183
+ async def handle_event(self, event: ChannelEvent) -> None:
184
+ """Handle an incoming event.
185
+
186
+ Args:
187
+ event: Event to handle
188
+ """
189
+ handlers = self._event_handlers.get(event.event_type, [])
190
+ for handler in handlers:
191
+ try:
192
+ if asyncio.iscoroutinefunction(handler):
193
+ await handler(event)
194
+ else:
195
+ handler(event)
196
+ except Exception as e:
197
+ logger.error(f"Error in event handler for {event.event_type}: {e}")
198
+
199
+ async def get_status(self) -> Dict[str, Any]:
200
+ """Get channel status information.
201
+
202
+ Returns:
203
+ Dictionary with channel status details
204
+ """
205
+ return {
206
+ "name": self.name,
207
+ "type": self.channel_type.value,
208
+ "status": self.status.value,
209
+ "enabled": self.config.enabled,
210
+ "host": self.config.host,
211
+ "port": self.config.port,
212
+ "event_handlers": len(self._event_handlers),
213
+ "queue_size": self._event_queue.qsize() if self._event_queue else 0,
214
+ }
215
+
216
+ async def health_check(self) -> Dict[str, Any]:
217
+ """Perform a health check on the channel.
218
+
219
+ Returns:
220
+ Health check results
221
+ """
222
+ try:
223
+ # Base health check - can be overridden by subclasses
224
+ is_healthy = self.status in [
225
+ ChannelStatus.RUNNING,
226
+ ChannelStatus.INITIALIZED,
227
+ ]
228
+
229
+ return {
230
+ "healthy": is_healthy,
231
+ "status": self.status.value,
232
+ "message": (
233
+ "OK" if is_healthy else f"Channel status: {self.status.value}"
234
+ ),
235
+ "checks": {
236
+ "status": is_healthy,
237
+ "event_queue": self._event_queue is not None,
238
+ "enabled": self.config.enabled,
239
+ },
240
+ }
241
+ except Exception as e:
242
+ return {
243
+ "healthy": False,
244
+ "status": "error",
245
+ "message": str(e),
246
+ "checks": {},
247
+ }
248
+
249
+ def _setup_event_queue(self) -> None:
250
+ """Set up the event queue for this channel."""
251
+ if self.config.enable_event_routing:
252
+ self._event_queue = asyncio.Queue(maxsize=self.config.event_buffer_size)
253
+
254
+ async def _cleanup(self) -> None:
255
+ """Clean up channel resources."""
256
+ if self._running_task and not self._running_task.done():
257
+ self._running_task.cancel()
258
+ try:
259
+ await self._running_task
260
+ except asyncio.CancelledError:
261
+ pass
262
+
263
+ if self._event_queue:
264
+ # Clear any remaining events
265
+ while not self._event_queue.empty():
266
+ try:
267
+ self._event_queue.get_nowait()
268
+ except asyncio.QueueEmpty:
269
+ break
270
+
271
+ logger.info(f"Cleaned up channel {self.name}")