kailash 0.6.6__py3-none-any.whl → 0.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +35 -5
- kailash/access_control.py +64 -46
- kailash/adapters/__init__.py +5 -0
- kailash/adapters/mcp_platform_adapter.py +273 -0
- kailash/api/workflow_api.py +34 -3
- kailash/channels/__init__.py +21 -0
- kailash/channels/api_channel.py +409 -0
- kailash/channels/base.py +271 -0
- kailash/channels/cli_channel.py +661 -0
- kailash/channels/event_router.py +496 -0
- kailash/channels/mcp_channel.py +648 -0
- kailash/channels/session.py +423 -0
- kailash/mcp_server/discovery.py +57 -18
- kailash/middleware/communication/api_gateway.py +23 -3
- kailash/middleware/communication/realtime.py +83 -0
- kailash/middleware/core/agent_ui.py +1 -1
- kailash/middleware/gateway/storage_backends.py +393 -0
- kailash/middleware/mcp/enhanced_server.py +22 -16
- kailash/nexus/__init__.py +21 -0
- kailash/nexus/cli/__init__.py +5 -0
- kailash/nexus/cli/__main__.py +6 -0
- kailash/nexus/cli/main.py +176 -0
- kailash/nexus/factory.py +413 -0
- kailash/nexus/gateway.py +545 -0
- kailash/nodes/__init__.py +8 -5
- kailash/nodes/ai/iterative_llm_agent.py +988 -17
- kailash/nodes/ai/llm_agent.py +29 -9
- kailash/nodes/api/__init__.py +2 -2
- kailash/nodes/api/monitoring.py +1 -1
- kailash/nodes/base.py +29 -5
- kailash/nodes/base_async.py +54 -14
- kailash/nodes/code/async_python.py +1 -1
- kailash/nodes/code/python.py +50 -6
- kailash/nodes/data/async_sql.py +90 -0
- kailash/nodes/data/bulk_operations.py +939 -0
- kailash/nodes/data/query_builder.py +373 -0
- kailash/nodes/data/query_cache.py +512 -0
- kailash/nodes/monitoring/__init__.py +10 -0
- kailash/nodes/monitoring/deadlock_detector.py +964 -0
- kailash/nodes/monitoring/performance_anomaly.py +1078 -0
- kailash/nodes/monitoring/race_condition_detector.py +1151 -0
- kailash/nodes/monitoring/transaction_metrics.py +790 -0
- kailash/nodes/monitoring/transaction_monitor.py +931 -0
- kailash/nodes/security/behavior_analysis.py +414 -0
- kailash/nodes/system/__init__.py +17 -0
- kailash/nodes/system/command_parser.py +820 -0
- kailash/nodes/transaction/__init__.py +48 -0
- kailash/nodes/transaction/distributed_transaction_manager.py +983 -0
- kailash/nodes/transaction/saga_coordinator.py +652 -0
- kailash/nodes/transaction/saga_state_storage.py +411 -0
- kailash/nodes/transaction/saga_step.py +467 -0
- kailash/nodes/transaction/transaction_context.py +756 -0
- kailash/nodes/transaction/two_phase_commit.py +978 -0
- kailash/nodes/transform/processors.py +17 -1
- kailash/nodes/validation/__init__.py +21 -0
- kailash/nodes/validation/test_executor.py +532 -0
- kailash/nodes/validation/validation_nodes.py +447 -0
- kailash/resources/factory.py +1 -1
- kailash/runtime/access_controlled.py +9 -7
- kailash/runtime/async_local.py +84 -21
- kailash/runtime/local.py +21 -2
- kailash/runtime/parameter_injector.py +187 -31
- kailash/runtime/runner.py +6 -4
- kailash/runtime/testing.py +1 -1
- kailash/security.py +22 -3
- kailash/servers/__init__.py +32 -0
- kailash/servers/durable_workflow_server.py +430 -0
- kailash/servers/enterprise_workflow_server.py +522 -0
- kailash/servers/gateway.py +183 -0
- kailash/servers/workflow_server.py +293 -0
- kailash/utils/data_validation.py +192 -0
- kailash/workflow/builder.py +382 -15
- kailash/workflow/cyclic_runner.py +102 -10
- kailash/workflow/validation.py +144 -8
- kailash/workflow/visualization.py +99 -27
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/METADATA +3 -2
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/RECORD +81 -40
- kailash/workflow/builder_improvements.py +0 -207
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/WHEEL +0 -0
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/entry_points.txt +0 -0
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,183 @@
|
|
1
|
+
"""Gateway creation utilities with enterprise defaults.
|
2
|
+
|
3
|
+
This module provides the main create_gateway function that creates
|
4
|
+
production-ready servers with enterprise features enabled by default.
|
5
|
+
"""
|
6
|
+
|
7
|
+
import logging
|
8
|
+
from typing import Any, List, Optional
|
9
|
+
|
10
|
+
from ..gateway.security import SecretManager
|
11
|
+
from ..resources.registry import ResourceRegistry
|
12
|
+
from .durable_workflow_server import DurableWorkflowServer
|
13
|
+
from .enterprise_workflow_server import EnterpriseWorkflowServer
|
14
|
+
from .workflow_server import WorkflowServer
|
15
|
+
|
16
|
+
logger = logging.getLogger(__name__)
|
17
|
+
|
18
|
+
|
19
|
+
def create_gateway(
|
20
|
+
title: str = "Kailash Enterprise Gateway",
|
21
|
+
description: str = "Production-ready workflow server with enterprise features",
|
22
|
+
version: str = "1.0.0",
|
23
|
+
# Server type selection
|
24
|
+
server_type: str = "enterprise", # "enterprise", "durable", "basic"
|
25
|
+
# Basic configuration
|
26
|
+
max_workers: int = 20,
|
27
|
+
cors_origins: Optional[List[str]] = None,
|
28
|
+
# Enterprise features (enabled by default)
|
29
|
+
enable_durability: bool = True,
|
30
|
+
enable_resource_management: bool = True,
|
31
|
+
enable_async_execution: bool = True,
|
32
|
+
enable_health_checks: bool = True,
|
33
|
+
# Enterprise components
|
34
|
+
resource_registry: Optional[ResourceRegistry] = None,
|
35
|
+
secret_manager: Optional[SecretManager] = None,
|
36
|
+
# Backward compatibility
|
37
|
+
**kwargs,
|
38
|
+
) -> EnterpriseWorkflowServer:
|
39
|
+
"""Create a production-ready workflow server.
|
40
|
+
|
41
|
+
By default, creates an EnterpriseWorkflowServer with all enterprise
|
42
|
+
features enabled. This is the recommended configuration for production
|
43
|
+
deployments.
|
44
|
+
|
45
|
+
Args:
|
46
|
+
title: Server title for documentation
|
47
|
+
description: Server description
|
48
|
+
version: Server version
|
49
|
+
server_type: Type of server to create ("enterprise", "durable", "basic")
|
50
|
+
max_workers: Maximum thread pool workers (default: 20 for enterprise)
|
51
|
+
cors_origins: Allowed CORS origins
|
52
|
+
enable_durability: Enable request durability features
|
53
|
+
enable_resource_management: Enable resource registry
|
54
|
+
enable_async_execution: Enable async workflow execution
|
55
|
+
enable_health_checks: Enable comprehensive health checks
|
56
|
+
resource_registry: Optional ResourceRegistry instance
|
57
|
+
secret_manager: Optional SecretManager instance
|
58
|
+
**kwargs: Additional arguments passed to server constructor
|
59
|
+
|
60
|
+
Returns:
|
61
|
+
Configured workflow server instance
|
62
|
+
|
63
|
+
Examples:
|
64
|
+
>>> # Enterprise server with all features (recommended)
|
65
|
+
>>> gateway = create_gateway()
|
66
|
+
|
67
|
+
>>> # Enterprise server with custom configuration
|
68
|
+
>>> gateway = create_gateway(
|
69
|
+
... title="My Application",
|
70
|
+
... cors_origins=["http://localhost:3000"],
|
71
|
+
... max_workers=50
|
72
|
+
... )
|
73
|
+
|
74
|
+
>>> # Durable server without full enterprise features
|
75
|
+
>>> gateway = create_gateway(
|
76
|
+
... server_type="durable",
|
77
|
+
... enable_resource_management=False
|
78
|
+
... )
|
79
|
+
|
80
|
+
>>> # Basic server for development
|
81
|
+
>>> gateway = create_gateway(
|
82
|
+
... server_type="basic",
|
83
|
+
... enable_durability=False
|
84
|
+
... )
|
85
|
+
"""
|
86
|
+
# Log server creation
|
87
|
+
logger.info(f"Creating {server_type} workflow server: {title}")
|
88
|
+
|
89
|
+
# Common configuration
|
90
|
+
common_config = {
|
91
|
+
"title": title,
|
92
|
+
"description": description,
|
93
|
+
"version": version,
|
94
|
+
"max_workers": max_workers,
|
95
|
+
"cors_origins": cors_origins,
|
96
|
+
**kwargs,
|
97
|
+
}
|
98
|
+
|
99
|
+
# Create server based on type
|
100
|
+
if server_type == "enterprise":
|
101
|
+
server = EnterpriseWorkflowServer(
|
102
|
+
enable_durability=enable_durability,
|
103
|
+
enable_resource_management=enable_resource_management,
|
104
|
+
enable_async_execution=enable_async_execution,
|
105
|
+
enable_health_checks=enable_health_checks,
|
106
|
+
resource_registry=resource_registry,
|
107
|
+
secret_manager=secret_manager,
|
108
|
+
**common_config,
|
109
|
+
)
|
110
|
+
|
111
|
+
elif server_type == "durable":
|
112
|
+
server = DurableWorkflowServer(
|
113
|
+
enable_durability=enable_durability, **common_config
|
114
|
+
)
|
115
|
+
|
116
|
+
elif server_type == "basic":
|
117
|
+
server = WorkflowServer(**common_config)
|
118
|
+
|
119
|
+
else:
|
120
|
+
raise ValueError(f"Unknown server type: {server_type}")
|
121
|
+
|
122
|
+
logger.info(
|
123
|
+
f"Created {type(server).__name__} with features: durability={enable_durability}, "
|
124
|
+
f"resources={enable_resource_management}, async={enable_async_execution}"
|
125
|
+
)
|
126
|
+
|
127
|
+
return server
|
128
|
+
|
129
|
+
|
130
|
+
def create_enterprise_gateway(**kwargs) -> EnterpriseWorkflowServer:
|
131
|
+
"""Create enterprise workflow server (explicit enterprise features).
|
132
|
+
|
133
|
+
This is an alias for create_gateway(server_type="enterprise") that makes
|
134
|
+
it explicit that enterprise features are desired.
|
135
|
+
"""
|
136
|
+
return create_gateway(server_type="enterprise", **kwargs)
|
137
|
+
|
138
|
+
|
139
|
+
def create_durable_gateway(**kwargs) -> DurableWorkflowServer:
|
140
|
+
"""Create durable workflow server without full enterprise features.
|
141
|
+
|
142
|
+
This creates a server with durability features but without resource
|
143
|
+
management and other enterprise capabilities.
|
144
|
+
"""
|
145
|
+
return create_gateway(server_type="durable", **kwargs)
|
146
|
+
|
147
|
+
|
148
|
+
def create_basic_gateway(**kwargs) -> WorkflowServer:
|
149
|
+
"""Create basic workflow server for development/testing.
|
150
|
+
|
151
|
+
This creates a minimal server without durability or enterprise features.
|
152
|
+
Suitable for development and testing scenarios.
|
153
|
+
"""
|
154
|
+
return create_gateway(server_type="basic", **kwargs)
|
155
|
+
|
156
|
+
|
157
|
+
# Backward compatibility - maintain the existing create_gateway signature
|
158
|
+
# but issue deprecation warning for old usage patterns
|
159
|
+
def create_gateway_legacy(agent_ui_middleware=None, auth_manager=None, **kwargs):
|
160
|
+
"""Legacy create_gateway function for backward compatibility.
|
161
|
+
|
162
|
+
This function maintains compatibility with the old APIGateway-based
|
163
|
+
create_gateway function. New code should use the new create_gateway()
|
164
|
+
function which creates EnterpriseWorkflowServer by default.
|
165
|
+
"""
|
166
|
+
import warnings
|
167
|
+
|
168
|
+
warnings.warn(
|
169
|
+
"Legacy create_gateway usage detected. Consider migrating to the new "
|
170
|
+
"create_gateway() function which creates EnterpriseWorkflowServer by default. "
|
171
|
+
"See migration guide for details.",
|
172
|
+
DeprecationWarning,
|
173
|
+
stacklevel=2,
|
174
|
+
)
|
175
|
+
|
176
|
+
# For now, delegate to the old APIGateway implementation
|
177
|
+
from ..middleware.communication.api_gateway import (
|
178
|
+
create_gateway as old_create_gateway,
|
179
|
+
)
|
180
|
+
|
181
|
+
return old_create_gateway(
|
182
|
+
agent_ui_middleware=agent_ui_middleware, auth_manager=auth_manager, **kwargs
|
183
|
+
)
|
@@ -0,0 +1,293 @@
|
|
1
|
+
"""Basic workflow server implementation.
|
2
|
+
|
3
|
+
This module provides WorkflowServer - a renamed and improved version of
|
4
|
+
WorkflowAPIGateway with clearer naming and better organization.
|
5
|
+
"""
|
6
|
+
|
7
|
+
import logging
|
8
|
+
from concurrent.futures import ThreadPoolExecutor
|
9
|
+
from contextlib import asynccontextmanager
|
10
|
+
from typing import Any
|
11
|
+
|
12
|
+
from fastapi import FastAPI, WebSocket
|
13
|
+
from fastapi.middleware.cors import CORSMiddleware
|
14
|
+
from pydantic import BaseModel, Field
|
15
|
+
|
16
|
+
from ..api.workflow_api import WorkflowAPI
|
17
|
+
from ..workflow import Workflow
|
18
|
+
|
19
|
+
logger = logging.getLogger(__name__)
|
20
|
+
|
21
|
+
|
22
|
+
class WorkflowRegistration(BaseModel):
|
23
|
+
"""Registration details for a workflow."""
|
24
|
+
|
25
|
+
model_config = {"arbitrary_types_allowed": True}
|
26
|
+
|
27
|
+
name: str
|
28
|
+
type: str = Field(description="embedded or proxied")
|
29
|
+
workflow: Workflow | None = None
|
30
|
+
proxy_url: str | None = None
|
31
|
+
health_check: str | None = None
|
32
|
+
description: str | None = None
|
33
|
+
version: str = "1.0.0"
|
34
|
+
tags: list[str] = Field(default_factory=list)
|
35
|
+
|
36
|
+
|
37
|
+
class WorkflowServer:
|
38
|
+
"""Basic workflow server for hosting multiple Kailash workflows.
|
39
|
+
|
40
|
+
This server provides:
|
41
|
+
- Multi-workflow hosting with dynamic registration
|
42
|
+
- REST API endpoints for workflow execution
|
43
|
+
- WebSocket support for real-time updates
|
44
|
+
- MCP server integration
|
45
|
+
- Health monitoring
|
46
|
+
- CORS support
|
47
|
+
|
48
|
+
This is the base server class. For production deployments, consider
|
49
|
+
using EnterpriseWorkflowServer which includes durability, security,
|
50
|
+
and monitoring features.
|
51
|
+
|
52
|
+
Attributes:
|
53
|
+
app: FastAPI application instance
|
54
|
+
workflows: Registry of all registered workflows
|
55
|
+
executor: Thread pool for synchronous execution
|
56
|
+
mcp_servers: Registry of MCP servers
|
57
|
+
"""
|
58
|
+
|
59
|
+
def __init__(
|
60
|
+
self,
|
61
|
+
title: str = "Kailash Workflow Server",
|
62
|
+
description: str = "Multi-workflow hosting server",
|
63
|
+
version: str = "1.0.0",
|
64
|
+
max_workers: int = 10,
|
65
|
+
cors_origins: list[str] = None,
|
66
|
+
**kwargs,
|
67
|
+
):
|
68
|
+
"""Initialize the workflow server.
|
69
|
+
|
70
|
+
Args:
|
71
|
+
title: Server title for documentation
|
72
|
+
description: Server description
|
73
|
+
version: Server version
|
74
|
+
max_workers: Maximum thread pool workers
|
75
|
+
cors_origins: Allowed CORS origins
|
76
|
+
"""
|
77
|
+
self.workflows: dict[str, WorkflowRegistration] = {}
|
78
|
+
self.mcp_servers: dict[str, Any] = {}
|
79
|
+
self.executor = ThreadPoolExecutor(max_workers=max_workers)
|
80
|
+
|
81
|
+
# Create FastAPI app with lifespan
|
82
|
+
@asynccontextmanager
|
83
|
+
async def lifespan(app: FastAPI):
|
84
|
+
# Startup
|
85
|
+
logger.info(f"Starting {title} v{version}")
|
86
|
+
yield
|
87
|
+
# Shutdown
|
88
|
+
logger.info("Shutting down workflow server")
|
89
|
+
self.executor.shutdown(wait=True)
|
90
|
+
|
91
|
+
self.app = FastAPI(
|
92
|
+
title=title, description=description, version=version, lifespan=lifespan
|
93
|
+
)
|
94
|
+
|
95
|
+
# Add CORS middleware
|
96
|
+
if cors_origins:
|
97
|
+
self.app.add_middleware(
|
98
|
+
CORSMiddleware,
|
99
|
+
allow_origins=cors_origins,
|
100
|
+
allow_credentials=True,
|
101
|
+
allow_methods=["*"],
|
102
|
+
allow_headers=["*"],
|
103
|
+
)
|
104
|
+
|
105
|
+
# Register root endpoints
|
106
|
+
self._register_root_endpoints()
|
107
|
+
|
108
|
+
def _register_root_endpoints(self):
|
109
|
+
"""Register server-level endpoints."""
|
110
|
+
|
111
|
+
@self.app.get("/")
|
112
|
+
async def root():
|
113
|
+
"""Server information."""
|
114
|
+
return {
|
115
|
+
"name": self.app.title,
|
116
|
+
"version": self.app.version,
|
117
|
+
"workflows": list(self.workflows.keys()),
|
118
|
+
"mcp_servers": list(self.mcp_servers.keys()),
|
119
|
+
"type": "workflow_server",
|
120
|
+
}
|
121
|
+
|
122
|
+
@self.app.get("/workflows")
|
123
|
+
async def list_workflows():
|
124
|
+
"""List all registered workflows."""
|
125
|
+
return {
|
126
|
+
name: {
|
127
|
+
"type": reg.type,
|
128
|
+
"description": reg.description,
|
129
|
+
"version": reg.version,
|
130
|
+
"tags": reg.tags,
|
131
|
+
"endpoints": self._get_workflow_endpoints(name),
|
132
|
+
}
|
133
|
+
for name, reg in self.workflows.items()
|
134
|
+
}
|
135
|
+
|
136
|
+
@self.app.get("/health")
|
137
|
+
async def health_check():
|
138
|
+
"""Server health check."""
|
139
|
+
health_status = {
|
140
|
+
"status": "healthy",
|
141
|
+
"server_type": "workflow_server",
|
142
|
+
"workflows": {},
|
143
|
+
"mcp_servers": {},
|
144
|
+
}
|
145
|
+
|
146
|
+
# Check workflow health
|
147
|
+
for name, reg in self.workflows.items():
|
148
|
+
if reg.type == "embedded":
|
149
|
+
health_status["workflows"][name] = "healthy"
|
150
|
+
else:
|
151
|
+
# TODO: Implement proxy health check
|
152
|
+
health_status["workflows"][name] = "unknown"
|
153
|
+
|
154
|
+
# Check MCP server health
|
155
|
+
for name, server in self.mcp_servers.items():
|
156
|
+
# TODO: Implement MCP health check
|
157
|
+
health_status["mcp_servers"][name] = "unknown"
|
158
|
+
|
159
|
+
return health_status
|
160
|
+
|
161
|
+
# Note: Metrics and authentication endpoints are provided by EnterpriseWorkflowServer
|
162
|
+
# Basic WorkflowServer focuses on core workflow functionality
|
163
|
+
|
164
|
+
@self.app.websocket("/ws")
|
165
|
+
async def websocket_endpoint(websocket: WebSocket):
|
166
|
+
"""WebSocket for real-time updates."""
|
167
|
+
await websocket.accept()
|
168
|
+
try:
|
169
|
+
while True:
|
170
|
+
# Basic WebSocket echo - subclasses can override
|
171
|
+
data = await websocket.receive_text()
|
172
|
+
await websocket.send_text(f"Echo: {data}")
|
173
|
+
except Exception as e:
|
174
|
+
logger.error(f"WebSocket error: {e}")
|
175
|
+
finally:
|
176
|
+
await websocket.close()
|
177
|
+
|
178
|
+
def register_workflow(
|
179
|
+
self,
|
180
|
+
name: str,
|
181
|
+
workflow: Workflow,
|
182
|
+
description: str = None,
|
183
|
+
tags: list[str] = None,
|
184
|
+
):
|
185
|
+
"""Register a workflow with the server.
|
186
|
+
|
187
|
+
Args:
|
188
|
+
name: Unique workflow identifier
|
189
|
+
workflow: Workflow instance to register
|
190
|
+
description: Optional workflow description
|
191
|
+
tags: Optional tags for categorization
|
192
|
+
"""
|
193
|
+
if name in self.workflows:
|
194
|
+
raise ValueError(f"Workflow '{name}' already registered")
|
195
|
+
|
196
|
+
# Create workflow registration
|
197
|
+
registration = WorkflowRegistration(
|
198
|
+
name=name,
|
199
|
+
type="embedded",
|
200
|
+
workflow=workflow,
|
201
|
+
description=description or f"Workflow: {name}",
|
202
|
+
tags=tags or [],
|
203
|
+
)
|
204
|
+
|
205
|
+
self.workflows[name] = registration
|
206
|
+
|
207
|
+
# Create workflow API wrapper
|
208
|
+
workflow_api = WorkflowAPI(workflow)
|
209
|
+
|
210
|
+
# Register workflow endpoints with prefix
|
211
|
+
prefix = f"/workflows/{name}"
|
212
|
+
self.app.mount(prefix, workflow_api.app)
|
213
|
+
|
214
|
+
logger.info(f"Registered workflow '{name}' at {prefix}")
|
215
|
+
|
216
|
+
def register_mcp_server(self, name: str, mcp_server: Any):
|
217
|
+
"""Register an MCP server with the workflow server.
|
218
|
+
|
219
|
+
Args:
|
220
|
+
name: Unique MCP server identifier
|
221
|
+
mcp_server: MCP server instance
|
222
|
+
"""
|
223
|
+
if name in self.mcp_servers:
|
224
|
+
raise ValueError(f"MCP server '{name}' already registered")
|
225
|
+
|
226
|
+
self.mcp_servers[name] = mcp_server
|
227
|
+
|
228
|
+
# Mount MCP server endpoints
|
229
|
+
mcp_prefix = f"/mcp/{name}"
|
230
|
+
# TODO: Implement MCP mounting logic
|
231
|
+
|
232
|
+
logger.info(f"Registered MCP server '{name}' at {mcp_prefix}")
|
233
|
+
|
234
|
+
def proxy_workflow(
|
235
|
+
self,
|
236
|
+
name: str,
|
237
|
+
proxy_url: str,
|
238
|
+
health_check: str = "/health",
|
239
|
+
description: str = None,
|
240
|
+
tags: list[str] = None,
|
241
|
+
):
|
242
|
+
"""Register a proxied workflow running on another server.
|
243
|
+
|
244
|
+
Args:
|
245
|
+
name: Unique workflow identifier
|
246
|
+
proxy_url: Base URL of the proxied workflow
|
247
|
+
health_check: Health check endpoint path
|
248
|
+
description: Optional workflow description
|
249
|
+
tags: Optional tags for categorization
|
250
|
+
"""
|
251
|
+
if name in self.workflows:
|
252
|
+
raise ValueError(f"Workflow '{name}' already registered")
|
253
|
+
|
254
|
+
# Create proxied workflow registration
|
255
|
+
registration = WorkflowRegistration(
|
256
|
+
name=name,
|
257
|
+
type="proxied",
|
258
|
+
proxy_url=proxy_url,
|
259
|
+
health_check=health_check,
|
260
|
+
description=description or f"Proxied workflow: {name}",
|
261
|
+
tags=tags or [],
|
262
|
+
)
|
263
|
+
|
264
|
+
self.workflows[name] = registration
|
265
|
+
|
266
|
+
# TODO: Implement proxy endpoint creation
|
267
|
+
logger.info(f"Registered proxied workflow '{name}' -> {proxy_url}")
|
268
|
+
|
269
|
+
def _get_workflow_endpoints(self, name: str) -> list[str]:
|
270
|
+
"""Get available endpoints for a workflow."""
|
271
|
+
base = f"/workflows/{name}"
|
272
|
+
return [
|
273
|
+
f"{base}/execute",
|
274
|
+
f"{base}/status",
|
275
|
+
f"{base}/schema",
|
276
|
+
f"{base}/docs",
|
277
|
+
]
|
278
|
+
|
279
|
+
def run(self, host: str = "0.0.0.0", port: int = 8000, **kwargs):
|
280
|
+
"""Run the workflow server.
|
281
|
+
|
282
|
+
Args:
|
283
|
+
host: Host address to bind to
|
284
|
+
port: Port to listen on
|
285
|
+
**kwargs: Additional arguments passed to uvicorn
|
286
|
+
"""
|
287
|
+
import uvicorn
|
288
|
+
|
289
|
+
uvicorn.run(self.app, host=host, port=port, **kwargs)
|
290
|
+
|
291
|
+
def execute(self, **kwargs):
|
292
|
+
"""Execute the server (alias for run)."""
|
293
|
+
self.run(**kwargs)
|
@@ -0,0 +1,192 @@
|
|
1
|
+
"""Data validation and type consistency utilities for workflow execution."""
|
2
|
+
|
3
|
+
import logging
|
4
|
+
from typing import Any, Dict, List, Union
|
5
|
+
|
6
|
+
logger = logging.getLogger(__name__)
|
7
|
+
|
8
|
+
|
9
|
+
class DataTypeValidator:
|
10
|
+
"""Validates and fixes data type inconsistencies in workflow execution."""
|
11
|
+
|
12
|
+
@staticmethod
|
13
|
+
def validate_node_output(node_id: str, output: Dict[str, Any]) -> Dict[str, Any]:
|
14
|
+
"""Validate and fix node output to ensure consistent data types.
|
15
|
+
|
16
|
+
Args:
|
17
|
+
node_id: ID of the node producing the output
|
18
|
+
output: Raw output from the node
|
19
|
+
|
20
|
+
Returns:
|
21
|
+
Validated and potentially fixed output
|
22
|
+
"""
|
23
|
+
if not isinstance(output, dict):
|
24
|
+
logger.warning(
|
25
|
+
f"Node '{node_id}' output should be a dict, got {type(output)}. Wrapping in result key."
|
26
|
+
)
|
27
|
+
return {"result": output}
|
28
|
+
|
29
|
+
validated_output = {}
|
30
|
+
|
31
|
+
for key, value in output.items():
|
32
|
+
validated_value = DataTypeValidator._validate_value(node_id, key, value)
|
33
|
+
validated_output[key] = validated_value
|
34
|
+
|
35
|
+
return validated_output
|
36
|
+
|
37
|
+
@staticmethod
|
38
|
+
def _validate_value(node_id: str, key: str, value: Any) -> Any:
|
39
|
+
"""Validate a single value and fix common type issues.
|
40
|
+
|
41
|
+
Args:
|
42
|
+
node_id: ID of the node producing the value
|
43
|
+
key: Key name for the value
|
44
|
+
value: The value to validate
|
45
|
+
|
46
|
+
Returns:
|
47
|
+
Validated value
|
48
|
+
"""
|
49
|
+
# Common bug: Dictionary gets converted to list of keys
|
50
|
+
if isinstance(value, list) and key == "result":
|
51
|
+
# Check if this looks like dict keys
|
52
|
+
if all(isinstance(item, str) for item in value):
|
53
|
+
logger.warning(
|
54
|
+
f"Node '{node_id}' output '{key}' appears to be dict keys converted to list: {value}. "
|
55
|
+
"This is a known bug in some node implementations."
|
56
|
+
)
|
57
|
+
# We can't recover the original dict, so wrap the list properly
|
58
|
+
return value
|
59
|
+
|
60
|
+
# Ensure string data is not accidentally indexed as dict
|
61
|
+
if isinstance(value, str):
|
62
|
+
return value
|
63
|
+
|
64
|
+
# Validate dict structure
|
65
|
+
if isinstance(value, dict):
|
66
|
+
# Recursively validate nested dicts
|
67
|
+
validated_dict = {}
|
68
|
+
for subkey, subvalue in value.items():
|
69
|
+
validated_dict[subkey] = DataTypeValidator._validate_value(
|
70
|
+
node_id, f"{key}.{subkey}", subvalue
|
71
|
+
)
|
72
|
+
return validated_dict
|
73
|
+
|
74
|
+
# Validate list structure
|
75
|
+
if isinstance(value, list):
|
76
|
+
# Ensure list elements are consistently typed
|
77
|
+
if len(value) > 0:
|
78
|
+
first_type = type(value[0])
|
79
|
+
inconsistent_types = [
|
80
|
+
i for i, item in enumerate(value) if type(item) is not first_type
|
81
|
+
]
|
82
|
+
if inconsistent_types:
|
83
|
+
logger.warning(
|
84
|
+
f"Node '{node_id}' output '{key}' has inconsistent list element types. "
|
85
|
+
f"First type: {first_type}, inconsistent indices: {inconsistent_types[:5]}"
|
86
|
+
)
|
87
|
+
|
88
|
+
return value
|
89
|
+
|
90
|
+
@staticmethod
|
91
|
+
def validate_node_input(node_id: str, inputs: Dict[str, Any]) -> Dict[str, Any]:
|
92
|
+
"""Validate node inputs before execution.
|
93
|
+
|
94
|
+
Args:
|
95
|
+
node_id: ID of the node receiving the inputs
|
96
|
+
inputs: Input parameters for the node
|
97
|
+
|
98
|
+
Returns:
|
99
|
+
Validated inputs
|
100
|
+
"""
|
101
|
+
if not isinstance(inputs, dict):
|
102
|
+
logger.error(f"Node '{node_id}' inputs must be a dict, got {type(inputs)}")
|
103
|
+
return {}
|
104
|
+
|
105
|
+
validated_inputs = {}
|
106
|
+
|
107
|
+
for key, value in inputs.items():
|
108
|
+
# Handle common data mapping issues
|
109
|
+
if key == "data" and isinstance(value, list):
|
110
|
+
# Check if this is the dict-to-keys bug
|
111
|
+
if all(isinstance(item, str) for item in value):
|
112
|
+
logger.warning(
|
113
|
+
f"Node '{node_id}' received list of strings for 'data' parameter: {value}. "
|
114
|
+
"This may be due to a dict-to-keys conversion bug in upstream node."
|
115
|
+
)
|
116
|
+
|
117
|
+
validated_inputs[key] = value
|
118
|
+
|
119
|
+
return validated_inputs
|
120
|
+
|
121
|
+
@staticmethod
|
122
|
+
def fix_string_indexing_error(data: Any, error_context: str = "") -> Any:
|
123
|
+
"""Fix common 'string indices must be integers' errors.
|
124
|
+
|
125
|
+
Args:
|
126
|
+
data: Data that caused the error
|
127
|
+
error_context: Context information about the error
|
128
|
+
|
129
|
+
Returns:
|
130
|
+
Fixed data or None if unfixable
|
131
|
+
"""
|
132
|
+
if isinstance(data, str):
|
133
|
+
logger.warning(
|
134
|
+
f"Attempting to index string as dict{' in ' + error_context if error_context else ''}. "
|
135
|
+
f"String value: '{data[:100]}...'"
|
136
|
+
if len(data) > 100
|
137
|
+
else f"String value: '{data}'"
|
138
|
+
)
|
139
|
+
return None
|
140
|
+
|
141
|
+
if isinstance(data, list) and all(isinstance(item, str) for item in data):
|
142
|
+
logger.warning(
|
143
|
+
f"Data appears to be list of dict keys{' in ' + error_context if error_context else ''}. "
|
144
|
+
f"Keys: {data}. Cannot recover original dict structure."
|
145
|
+
)
|
146
|
+
return None
|
147
|
+
|
148
|
+
return data
|
149
|
+
|
150
|
+
@staticmethod
|
151
|
+
def create_error_recovery_wrapper(
|
152
|
+
original_data: Any, fallback_data: Any = None
|
153
|
+
) -> Dict[str, Any]:
|
154
|
+
"""Create a recovery wrapper for problematic data.
|
155
|
+
|
156
|
+
Args:
|
157
|
+
original_data: The problematic data
|
158
|
+
fallback_data: Fallback data to use if original is unusable
|
159
|
+
|
160
|
+
Returns:
|
161
|
+
Recovery wrapper dict
|
162
|
+
"""
|
163
|
+
return {
|
164
|
+
"data": fallback_data if fallback_data is not None else {},
|
165
|
+
"original_data": original_data,
|
166
|
+
"data_type_error": True,
|
167
|
+
"error_message": f"Data type conversion error. Original type: {type(original_data)}",
|
168
|
+
}
|
169
|
+
|
170
|
+
|
171
|
+
def validate_workflow_data_flow(workflow_results: Dict[str, Any]) -> Dict[str, Any]:
|
172
|
+
"""Validate entire workflow result data flow for consistency.
|
173
|
+
|
174
|
+
Args:
|
175
|
+
workflow_results: Results from workflow execution
|
176
|
+
|
177
|
+
Returns:
|
178
|
+
Validated workflow results
|
179
|
+
"""
|
180
|
+
validated_results = {}
|
181
|
+
|
182
|
+
for node_id, result in workflow_results.items():
|
183
|
+
try:
|
184
|
+
validated_result = DataTypeValidator.validate_node_output(node_id, result)
|
185
|
+
validated_results[node_id] = validated_result
|
186
|
+
except Exception as e:
|
187
|
+
logger.error(f"Data validation failed for node '{node_id}': {e}")
|
188
|
+
validated_results[node_id] = (
|
189
|
+
DataTypeValidator.create_error_recovery_wrapper(result)
|
190
|
+
)
|
191
|
+
|
192
|
+
return validated_results
|