kailash 0.5.0__py3-none-any.whl → 0.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +1 -1
- kailash/client/__init__.py +12 -0
- kailash/client/enhanced_client.py +306 -0
- kailash/core/actors/__init__.py +16 -0
- kailash/core/actors/connection_actor.py +566 -0
- kailash/core/actors/supervisor.py +364 -0
- kailash/edge/__init__.py +16 -0
- kailash/edge/compliance.py +834 -0
- kailash/edge/discovery.py +659 -0
- kailash/edge/location.py +582 -0
- kailash/gateway/__init__.py +33 -0
- kailash/gateway/api.py +289 -0
- kailash/gateway/enhanced_gateway.py +357 -0
- kailash/gateway/resource_resolver.py +217 -0
- kailash/gateway/security.py +227 -0
- kailash/middleware/auth/models.py +2 -2
- kailash/middleware/database/base_models.py +1 -7
- kailash/middleware/gateway/__init__.py +22 -0
- kailash/middleware/gateway/checkpoint_manager.py +398 -0
- kailash/middleware/gateway/deduplicator.py +382 -0
- kailash/middleware/gateway/durable_gateway.py +417 -0
- kailash/middleware/gateway/durable_request.py +498 -0
- kailash/middleware/gateway/event_store.py +459 -0
- kailash/nodes/admin/permission_check.py +817 -33
- kailash/nodes/admin/role_management.py +1242 -108
- kailash/nodes/admin/schema_manager.py +438 -0
- kailash/nodes/admin/user_management.py +1124 -1582
- kailash/nodes/code/__init__.py +8 -1
- kailash/nodes/code/async_python.py +1035 -0
- kailash/nodes/code/python.py +1 -0
- kailash/nodes/data/async_sql.py +9 -3
- kailash/nodes/data/sql.py +20 -11
- kailash/nodes/data/workflow_connection_pool.py +643 -0
- kailash/nodes/rag/__init__.py +1 -4
- kailash/resources/__init__.py +40 -0
- kailash/resources/factory.py +533 -0
- kailash/resources/health.py +319 -0
- kailash/resources/reference.py +288 -0
- kailash/resources/registry.py +392 -0
- kailash/runtime/async_local.py +711 -302
- kailash/testing/__init__.py +34 -0
- kailash/testing/async_test_case.py +353 -0
- kailash/testing/async_utils.py +345 -0
- kailash/testing/fixtures.py +458 -0
- kailash/testing/mock_registry.py +495 -0
- kailash/workflow/__init__.py +8 -0
- kailash/workflow/async_builder.py +621 -0
- kailash/workflow/async_patterns.py +766 -0
- kailash/workflow/cyclic_runner.py +107 -16
- kailash/workflow/graph.py +7 -2
- kailash/workflow/resilience.py +11 -1
- {kailash-0.5.0.dist-info → kailash-0.6.0.dist-info}/METADATA +7 -4
- {kailash-0.5.0.dist-info → kailash-0.6.0.dist-info}/RECORD +57 -22
- {kailash-0.5.0.dist-info → kailash-0.6.0.dist-info}/WHEEL +0 -0
- {kailash-0.5.0.dist-info → kailash-0.6.0.dist-info}/entry_points.txt +0 -0
- {kailash-0.5.0.dist-info → kailash-0.6.0.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.5.0.dist-info → kailash-0.6.0.dist-info}/top_level.txt +0 -0
kailash/gateway/api.py
ADDED
@@ -0,0 +1,289 @@
|
|
1
|
+
"""FastAPI integration for Enhanced Gateway.
|
2
|
+
|
3
|
+
This module provides REST API endpoints for the enhanced gateway
|
4
|
+
with resource management and async workflow support.
|
5
|
+
"""
|
6
|
+
|
7
|
+
import asyncio
|
8
|
+
import logging
|
9
|
+
from datetime import datetime
|
10
|
+
from typing import Any, Dict, List, Optional, Union
|
11
|
+
|
12
|
+
from fastapi import APIRouter, BackgroundTasks, Depends, FastAPI, HTTPException
|
13
|
+
from fastapi.responses import JSONResponse
|
14
|
+
from pydantic import BaseModel, Field
|
15
|
+
|
16
|
+
from ..resources.registry import ResourceRegistry
|
17
|
+
from .enhanced_gateway import (
|
18
|
+
EnhancedDurableAPIGateway,
|
19
|
+
ResourceReference,
|
20
|
+
WorkflowRequest,
|
21
|
+
WorkflowResponse,
|
22
|
+
)
|
23
|
+
from .security import SecretManager
|
24
|
+
|
25
|
+
logger = logging.getLogger(__name__)
|
26
|
+
|
27
|
+
|
28
|
+
# Pydantic models for API
|
29
|
+
class ResourceReferenceModel(BaseModel):
|
30
|
+
"""Model for resource reference in API."""
|
31
|
+
|
32
|
+
type: str = Field(
|
33
|
+
..., description="Resource type (database, http_client, cache, etc.)"
|
34
|
+
)
|
35
|
+
config: Dict[str, Any] = Field(..., description="Resource configuration")
|
36
|
+
credentials_ref: Optional[str] = Field(
|
37
|
+
None, description="Reference to credentials secret"
|
38
|
+
)
|
39
|
+
|
40
|
+
class Config:
|
41
|
+
schema_extra = {
|
42
|
+
"example": {
|
43
|
+
"type": "database",
|
44
|
+
"config": {"host": "localhost", "port": 5432, "database": "myapp"},
|
45
|
+
"credentials_ref": "db_credentials",
|
46
|
+
}
|
47
|
+
}
|
48
|
+
|
49
|
+
|
50
|
+
class WorkflowRequestModel(BaseModel):
|
51
|
+
"""API model for workflow requests."""
|
52
|
+
|
53
|
+
inputs: Dict[str, Any] = Field(..., description="Workflow input parameters")
|
54
|
+
resources: Optional[Dict[str, Union[str, ResourceReferenceModel]]] = Field(
|
55
|
+
None,
|
56
|
+
description="Resource references (@name for registered, or inline definition)",
|
57
|
+
)
|
58
|
+
context: Optional[Dict[str, Any]] = Field(
|
59
|
+
None, description="Additional context variables"
|
60
|
+
)
|
61
|
+
|
62
|
+
class Config:
|
63
|
+
schema_extra = {
|
64
|
+
"example": {
|
65
|
+
"inputs": {"user_id": 123, "action": "process"},
|
66
|
+
"resources": {
|
67
|
+
"db": "@main_database",
|
68
|
+
"api": {
|
69
|
+
"type": "http_client",
|
70
|
+
"config": {"base_url": "https://api.example.com"},
|
71
|
+
"credentials_ref": "api_key_secret",
|
72
|
+
},
|
73
|
+
},
|
74
|
+
"context": {"environment": "production", "trace_id": "abc123"},
|
75
|
+
}
|
76
|
+
}
|
77
|
+
|
78
|
+
|
79
|
+
class WorkflowResponseModel(BaseModel):
|
80
|
+
"""API model for workflow responses."""
|
81
|
+
|
82
|
+
request_id: str
|
83
|
+
workflow_id: str
|
84
|
+
status: str
|
85
|
+
result: Optional[Any] = None
|
86
|
+
error: Optional[str] = None
|
87
|
+
started_at: Optional[datetime] = None
|
88
|
+
completed_at: Optional[datetime] = None
|
89
|
+
execution_time: Optional[float] = None
|
90
|
+
|
91
|
+
|
92
|
+
class WorkflowRegistrationModel(BaseModel):
|
93
|
+
"""Model for workflow registration."""
|
94
|
+
|
95
|
+
workflow_definition: Dict[str, Any] = Field(..., description="Workflow definition")
|
96
|
+
required_resources: Optional[List[str]] = Field(
|
97
|
+
None, description="Required resource names"
|
98
|
+
)
|
99
|
+
description: Optional[str] = Field(None, description="Workflow description")
|
100
|
+
|
101
|
+
|
102
|
+
# Create router
|
103
|
+
router = APIRouter(prefix="/api/v1", tags=["workflows"])
|
104
|
+
|
105
|
+
# Dependency to get gateway instance
|
106
|
+
_gateway_instance: Optional[EnhancedDurableAPIGateway] = None
|
107
|
+
|
108
|
+
|
109
|
+
async def get_gateway() -> EnhancedDurableAPIGateway:
|
110
|
+
"""Get or create gateway instance."""
|
111
|
+
global _gateway_instance
|
112
|
+
if not _gateway_instance:
|
113
|
+
_gateway_instance = EnhancedDurableAPIGateway()
|
114
|
+
return _gateway_instance
|
115
|
+
|
116
|
+
|
117
|
+
# API Endpoints
|
118
|
+
@router.post("/workflows/{workflow_id}/execute", response_model=WorkflowResponseModel)
|
119
|
+
async def execute_workflow(
|
120
|
+
workflow_id: str,
|
121
|
+
request: WorkflowRequestModel,
|
122
|
+
background_tasks: BackgroundTasks,
|
123
|
+
gateway: EnhancedDurableAPIGateway = Depends(get_gateway),
|
124
|
+
):
|
125
|
+
"""Execute a workflow with resource support."""
|
126
|
+
# Convert API model to internal request
|
127
|
+
resources = {}
|
128
|
+
if request.resources:
|
129
|
+
for name, ref in request.resources.items():
|
130
|
+
if isinstance(ref, str):
|
131
|
+
resources[name] = ref
|
132
|
+
elif isinstance(ref, ResourceReferenceModel):
|
133
|
+
resources[name] = ResourceReference(
|
134
|
+
type=ref.type,
|
135
|
+
config=ref.config,
|
136
|
+
credentials_ref=ref.credentials_ref,
|
137
|
+
)
|
138
|
+
elif isinstance(ref, dict):
|
139
|
+
resources[name] = ResourceReference(**ref)
|
140
|
+
|
141
|
+
workflow_request = WorkflowRequest(
|
142
|
+
inputs=request.inputs, resources=resources, context=request.context or {}
|
143
|
+
)
|
144
|
+
|
145
|
+
# Execute workflow
|
146
|
+
try:
|
147
|
+
response = await gateway.execute_workflow(workflow_id, workflow_request)
|
148
|
+
return WorkflowResponseModel(**response.to_dict())
|
149
|
+
except Exception as e:
|
150
|
+
logger.error(f"Workflow execution failed: {e}", exc_info=True)
|
151
|
+
raise HTTPException(status_code=500, detail=str(e))
|
152
|
+
|
153
|
+
|
154
|
+
@router.get(
|
155
|
+
"/workflows/{workflow_id}/status/{request_id}", response_model=WorkflowResponseModel
|
156
|
+
)
|
157
|
+
async def get_workflow_status(
|
158
|
+
workflow_id: str,
|
159
|
+
request_id: str,
|
160
|
+
gateway: EnhancedDurableAPIGateway = Depends(get_gateway),
|
161
|
+
):
|
162
|
+
"""Get status of a workflow execution."""
|
163
|
+
try:
|
164
|
+
response = await gateway.get_workflow_status(request_id)
|
165
|
+
if response.workflow_id != workflow_id:
|
166
|
+
raise HTTPException(
|
167
|
+
status_code=400,
|
168
|
+
detail=f"Request {request_id} is for workflow {response.workflow_id}, not {workflow_id}",
|
169
|
+
)
|
170
|
+
return WorkflowResponseModel(**response.to_dict())
|
171
|
+
except ValueError as e:
|
172
|
+
raise HTTPException(status_code=404, detail=str(e))
|
173
|
+
|
174
|
+
|
175
|
+
@router.get("/workflows")
|
176
|
+
async def list_workflows(gateway: EnhancedDurableAPIGateway = Depends(get_gateway)):
|
177
|
+
"""List all registered workflows."""
|
178
|
+
return gateway.list_workflows()
|
179
|
+
|
180
|
+
|
181
|
+
@router.get("/workflows/{workflow_id}")
|
182
|
+
async def get_workflow_details(
|
183
|
+
workflow_id: str, gateway: EnhancedDurableAPIGateway = Depends(get_gateway)
|
184
|
+
):
|
185
|
+
"""Get details of a specific workflow."""
|
186
|
+
workflows = gateway.list_workflows()
|
187
|
+
if workflow_id not in workflows:
|
188
|
+
raise HTTPException(status_code=404, detail=f"Workflow {workflow_id} not found")
|
189
|
+
return workflows[workflow_id]
|
190
|
+
|
191
|
+
|
192
|
+
@router.post("/workflows/{workflow_id}/register")
|
193
|
+
async def register_workflow(
|
194
|
+
workflow_id: str,
|
195
|
+
registration: WorkflowRegistrationModel,
|
196
|
+
gateway: EnhancedDurableAPIGateway = Depends(get_gateway),
|
197
|
+
):
|
198
|
+
"""Register a new workflow."""
|
199
|
+
try:
|
200
|
+
# Build workflow from definition
|
201
|
+
from ..workflow import WorkflowBuilder
|
202
|
+
|
203
|
+
builder = WorkflowBuilder()
|
204
|
+
|
205
|
+
# Parse workflow definition
|
206
|
+
for node in registration.workflow_definition.get("nodes", []):
|
207
|
+
builder.add_node(node["type"], node["id"], node.get("config", {}))
|
208
|
+
|
209
|
+
for conn in registration.workflow_definition.get("connections", []):
|
210
|
+
builder.add_connection(
|
211
|
+
conn["from_node"],
|
212
|
+
conn.get("from_output"),
|
213
|
+
conn["to_node"],
|
214
|
+
conn.get("to_input"),
|
215
|
+
)
|
216
|
+
|
217
|
+
workflow = builder.build()
|
218
|
+
workflow.name = workflow_id
|
219
|
+
|
220
|
+
# Register with gateway
|
221
|
+
gateway.register_workflow(
|
222
|
+
workflow_id,
|
223
|
+
workflow,
|
224
|
+
required_resources=registration.required_resources,
|
225
|
+
description=registration.description,
|
226
|
+
)
|
227
|
+
|
228
|
+
return {
|
229
|
+
"status": "registered",
|
230
|
+
"workflow_id": workflow_id,
|
231
|
+
"message": f"Workflow {workflow_id} registered successfully",
|
232
|
+
}
|
233
|
+
except Exception as e:
|
234
|
+
logger.error(f"Workflow registration failed: {e}", exc_info=True)
|
235
|
+
raise HTTPException(status_code=400, detail=str(e))
|
236
|
+
|
237
|
+
|
238
|
+
@router.get("/health")
|
239
|
+
async def health_check(gateway: EnhancedDurableAPIGateway = Depends(get_gateway)):
|
240
|
+
"""Check gateway and resource health."""
|
241
|
+
return await gateway.health_check()
|
242
|
+
|
243
|
+
|
244
|
+
@router.get("/resources")
|
245
|
+
async def list_resources(gateway: EnhancedDurableAPIGateway = Depends(get_gateway)):
|
246
|
+
"""List available resources."""
|
247
|
+
return gateway.resource_registry.list_resources()
|
248
|
+
|
249
|
+
|
250
|
+
# Create FastAPI app
|
251
|
+
def create_gateway_app(
|
252
|
+
resource_registry: Optional[ResourceRegistry] = None,
|
253
|
+
secret_manager: Optional[SecretManager] = None,
|
254
|
+
title: str = "Kailash Enhanced Gateway",
|
255
|
+
description: str = "API Gateway for async workflows with resource management",
|
256
|
+
version: str = "1.0.0",
|
257
|
+
) -> FastAPI:
|
258
|
+
"""Create FastAPI app for gateway."""
|
259
|
+
app = FastAPI(title=title, description=description, version=version)
|
260
|
+
|
261
|
+
# Set up gateway instance
|
262
|
+
global _gateway_instance
|
263
|
+
_gateway_instance = EnhancedDurableAPIGateway(
|
264
|
+
resource_registry=resource_registry,
|
265
|
+
secret_manager=secret_manager,
|
266
|
+
title=title,
|
267
|
+
description=description,
|
268
|
+
version=version,
|
269
|
+
)
|
270
|
+
|
271
|
+
# Include router
|
272
|
+
app.include_router(router)
|
273
|
+
|
274
|
+
# Startup event
|
275
|
+
@app.on_event("startup")
|
276
|
+
async def startup_event():
|
277
|
+
"""Initialize gateway on startup."""
|
278
|
+
logger.info("Enhanced Gateway starting up...")
|
279
|
+
# Could load workflows from storage here
|
280
|
+
|
281
|
+
# Shutdown event
|
282
|
+
@app.on_event("shutdown")
|
283
|
+
async def shutdown_event():
|
284
|
+
"""Cleanup on shutdown."""
|
285
|
+
logger.info("Enhanced Gateway shutting down...")
|
286
|
+
if _gateway_instance and _gateway_instance.resource_registry:
|
287
|
+
await _gateway_instance.resource_registry.cleanup()
|
288
|
+
|
289
|
+
return app
|
@@ -0,0 +1,357 @@
|
|
1
|
+
"""Enhanced Gateway with resource management and async workflow support.
|
2
|
+
|
3
|
+
This module provides an enhanced version of the DurableAPIGateway that adds:
|
4
|
+
- Resource reference resolution for non-serializable objects
|
5
|
+
- Integration with ResourceRegistry for shared resources
|
6
|
+
- Secret management for credentials
|
7
|
+
- Async workflow execution support
|
8
|
+
- Health checks for resources
|
9
|
+
"""
|
10
|
+
|
11
|
+
import asyncio
|
12
|
+
import json
|
13
|
+
import logging
|
14
|
+
import uuid
|
15
|
+
from dataclasses import dataclass, field
|
16
|
+
from datetime import UTC, datetime
|
17
|
+
from typing import Any, Dict, List, Optional, Set, Union
|
18
|
+
|
19
|
+
from ..middleware.gateway.durable_gateway import DurableAPIGateway
|
20
|
+
from ..resources.registry import ResourceRegistry
|
21
|
+
from ..runtime.async_local import AsyncLocalRuntime, ExecutionContext
|
22
|
+
from ..workflow import Workflow
|
23
|
+
from .resource_resolver import ResourceReference, ResourceResolver
|
24
|
+
from .security import SecretManager
|
25
|
+
|
26
|
+
logger = logging.getLogger(__name__)
|
27
|
+
|
28
|
+
|
29
|
+
class WorkflowNotFoundError(Exception):
|
30
|
+
"""Raised when workflow is not found."""
|
31
|
+
|
32
|
+
pass
|
33
|
+
|
34
|
+
|
35
|
+
@dataclass
|
36
|
+
class WorkflowRequest:
|
37
|
+
"""Enhanced workflow request with resource support."""
|
38
|
+
|
39
|
+
request_id: str = field(default_factory=lambda: str(uuid.uuid4()))
|
40
|
+
inputs: Dict[str, Any] = field(default_factory=dict)
|
41
|
+
resources: Dict[str, Union[str, ResourceReference]] = field(default_factory=dict)
|
42
|
+
context: Dict[str, Any] = field(default_factory=dict)
|
43
|
+
timestamp: datetime = field(default_factory=datetime.utcnow)
|
44
|
+
|
45
|
+
def to_dict(self) -> Dict[str, Any]:
|
46
|
+
"""Convert to JSON-serializable dict."""
|
47
|
+
return {
|
48
|
+
"request_id": self.request_id,
|
49
|
+
"inputs": self.inputs,
|
50
|
+
"resources": {
|
51
|
+
k: v if isinstance(v, str) else v.to_dict()
|
52
|
+
for k, v in self.resources.items()
|
53
|
+
},
|
54
|
+
"context": self.context,
|
55
|
+
"timestamp": self.timestamp.isoformat(),
|
56
|
+
}
|
57
|
+
|
58
|
+
|
59
|
+
@dataclass
|
60
|
+
class WorkflowResponse:
|
61
|
+
"""Response from workflow execution."""
|
62
|
+
|
63
|
+
request_id: str
|
64
|
+
workflow_id: str
|
65
|
+
status: str # pending, running, completed, failed
|
66
|
+
result: Optional[Any] = None
|
67
|
+
error: Optional[str] = None
|
68
|
+
started_at: Optional[datetime] = None
|
69
|
+
completed_at: Optional[datetime] = None
|
70
|
+
execution_time: Optional[float] = None
|
71
|
+
|
72
|
+
def to_dict(self) -> Dict[str, Any]:
|
73
|
+
"""Convert to JSON-serializable dict."""
|
74
|
+
return {
|
75
|
+
"request_id": self.request_id,
|
76
|
+
"workflow_id": self.workflow_id,
|
77
|
+
"status": self.status,
|
78
|
+
"result": self.result,
|
79
|
+
"error": self.error,
|
80
|
+
"started_at": self.started_at.isoformat() if self.started_at else None,
|
81
|
+
"completed_at": (
|
82
|
+
self.completed_at.isoformat() if self.completed_at else None
|
83
|
+
),
|
84
|
+
"execution_time": self.execution_time,
|
85
|
+
}
|
86
|
+
|
87
|
+
|
88
|
+
class EnhancedDurableAPIGateway(DurableAPIGateway):
|
89
|
+
"""Gateway with resource management and async workflow support."""
|
90
|
+
|
91
|
+
def __init__(
|
92
|
+
self,
|
93
|
+
resource_registry: ResourceRegistry = None,
|
94
|
+
secret_manager: SecretManager = None,
|
95
|
+
**kwargs,
|
96
|
+
):
|
97
|
+
super().__init__(**kwargs)
|
98
|
+
self.resource_registry = resource_registry or ResourceRegistry()
|
99
|
+
self.secret_manager = secret_manager or SecretManager()
|
100
|
+
self._workflow_resources: Dict[str, Set[str]] = {}
|
101
|
+
self._resource_resolver = ResourceResolver(
|
102
|
+
self.resource_registry, self.secret_manager
|
103
|
+
)
|
104
|
+
self._runtime = AsyncLocalRuntime(resource_registry=self.resource_registry)
|
105
|
+
self._active_requests: Dict[str, WorkflowResponse] = {}
|
106
|
+
self._cleanup_tasks: List[asyncio.Task] = []
|
107
|
+
|
108
|
+
def register_workflow(
|
109
|
+
self,
|
110
|
+
workflow_id: str,
|
111
|
+
workflow: Workflow,
|
112
|
+
required_resources: List[str] = None,
|
113
|
+
description: str = None,
|
114
|
+
):
|
115
|
+
"""Register workflow with resource requirements."""
|
116
|
+
# Use parent's register_workflow method
|
117
|
+
super().register_workflow(workflow_id, workflow)
|
118
|
+
|
119
|
+
# Track resource requirements
|
120
|
+
if required_resources:
|
121
|
+
self._workflow_resources[workflow_id] = set(required_resources)
|
122
|
+
|
123
|
+
# Extract requirements from workflow metadata
|
124
|
+
if hasattr(workflow, "metadata"):
|
125
|
+
declared_resources = workflow.metadata.get("required_resources", [])
|
126
|
+
if workflow_id not in self._workflow_resources:
|
127
|
+
self._workflow_resources[workflow_id] = set()
|
128
|
+
self._workflow_resources[workflow_id].update(declared_resources)
|
129
|
+
|
130
|
+
# Store workflow description
|
131
|
+
if description and hasattr(workflow, "metadata"):
|
132
|
+
workflow.metadata["description"] = description
|
133
|
+
|
134
|
+
async def execute_workflow(
|
135
|
+
self, workflow_id: str, request: WorkflowRequest
|
136
|
+
) -> WorkflowResponse:
|
137
|
+
"""Execute workflow with resource injection."""
|
138
|
+
# Create response object
|
139
|
+
response = WorkflowResponse(
|
140
|
+
request_id=request.request_id,
|
141
|
+
workflow_id=workflow_id,
|
142
|
+
status="pending",
|
143
|
+
started_at=datetime.now(UTC),
|
144
|
+
)
|
145
|
+
|
146
|
+
# Store active request
|
147
|
+
self._active_requests[request.request_id] = response
|
148
|
+
|
149
|
+
try:
|
150
|
+
# Validate workflow exists
|
151
|
+
if workflow_id not in self.workflows:
|
152
|
+
raise WorkflowNotFoundError(f"Workflow {workflow_id} not found")
|
153
|
+
|
154
|
+
workflow_reg = self.workflows[workflow_id]
|
155
|
+
workflow = workflow_reg.workflow
|
156
|
+
|
157
|
+
# Update status
|
158
|
+
response.status = "running"
|
159
|
+
|
160
|
+
# Prepare execution context
|
161
|
+
context = await self._prepare_execution_context(workflow_id, request)
|
162
|
+
|
163
|
+
# Execute with resource injection
|
164
|
+
result = await self._execute_with_resources(
|
165
|
+
workflow, request.inputs, context
|
166
|
+
)
|
167
|
+
|
168
|
+
# Update response
|
169
|
+
response.status = "completed"
|
170
|
+
response.result = (
|
171
|
+
result.get("results", result) if isinstance(result, dict) else result
|
172
|
+
)
|
173
|
+
response.completed_at = datetime.now(UTC)
|
174
|
+
response.execution_time = (
|
175
|
+
response.completed_at - response.started_at
|
176
|
+
).total_seconds()
|
177
|
+
|
178
|
+
except Exception as e:
|
179
|
+
# Handle error
|
180
|
+
response.status = "failed"
|
181
|
+
response.error = str(e)
|
182
|
+
response.completed_at = datetime.now(UTC)
|
183
|
+
response.execution_time = (
|
184
|
+
response.completed_at - response.started_at
|
185
|
+
).total_seconds()
|
186
|
+
|
187
|
+
# Log error
|
188
|
+
logger.error(f"Workflow {workflow_id} failed: {e}", exc_info=True)
|
189
|
+
|
190
|
+
finally:
|
191
|
+
# Clean up active request after a delay
|
192
|
+
cleanup_task = asyncio.create_task(
|
193
|
+
self._cleanup_request(request.request_id)
|
194
|
+
)
|
195
|
+
self._cleanup_tasks.append(cleanup_task)
|
196
|
+
|
197
|
+
return response
|
198
|
+
|
199
|
+
async def _prepare_execution_context(
|
200
|
+
self, workflow_id: str, request: WorkflowRequest
|
201
|
+
) -> ExecutionContext:
|
202
|
+
"""Prepare execution context with resources."""
|
203
|
+
context = ExecutionContext()
|
204
|
+
context.resource_registry = self.resource_registry
|
205
|
+
|
206
|
+
# Add request context variables
|
207
|
+
for key, value in request.context.items():
|
208
|
+
context.set_variable(key, value)
|
209
|
+
|
210
|
+
# Handle resource references in request
|
211
|
+
if request.resources:
|
212
|
+
for name, ref in request.resources.items():
|
213
|
+
if isinstance(ref, ResourceReference):
|
214
|
+
# Resolve resource reference
|
215
|
+
resource = await self._resource_resolver.resolve(ref)
|
216
|
+
|
217
|
+
# Register the resource under the expected name
|
218
|
+
# Create a wrapper factory that returns the already-created resource
|
219
|
+
class ExistingResourceFactory:
|
220
|
+
def __init__(self, resource):
|
221
|
+
self._resource = resource
|
222
|
+
|
223
|
+
async def create(self):
|
224
|
+
return self._resource
|
225
|
+
|
226
|
+
self.resource_registry.register_factory(
|
227
|
+
name, ExistingResourceFactory(resource)
|
228
|
+
)
|
229
|
+
|
230
|
+
elif isinstance(ref, str) and ref.startswith("@"):
|
231
|
+
# Reference to registered resource
|
232
|
+
resource_name = ref[1:] # Remove @ prefix
|
233
|
+
# Ensure resource exists
|
234
|
+
if not self.resource_registry.has_factory(resource_name):
|
235
|
+
raise ValueError(f"Resource '{resource_name}' not registered")
|
236
|
+
# Resource will be fetched on demand
|
237
|
+
|
238
|
+
elif isinstance(ref, dict) and "type" in ref:
|
239
|
+
# Inline resource reference
|
240
|
+
resource_ref = ResourceReference(**ref)
|
241
|
+
resource = await self._resource_resolver.resolve(resource_ref)
|
242
|
+
|
243
|
+
# Add required resources to context
|
244
|
+
required = self._workflow_resources.get(workflow_id, set())
|
245
|
+
for resource_name in required:
|
246
|
+
if not self.resource_registry.has_factory(resource_name):
|
247
|
+
raise ValueError(f"Required resource '{resource_name}' not available")
|
248
|
+
|
249
|
+
return context
|
250
|
+
|
251
|
+
async def _execute_with_resources(
|
252
|
+
self, workflow: Workflow, inputs: Dict[str, Any], context: ExecutionContext
|
253
|
+
) -> Any:
|
254
|
+
"""Execute workflow with resources."""
|
255
|
+
# Use async runtime for execution
|
256
|
+
result = await self._runtime.execute_workflow_async(workflow, inputs, context)
|
257
|
+
return result
|
258
|
+
|
259
|
+
async def _cleanup_request(self, request_id: str, delay: int = 3600):
|
260
|
+
"""Clean up request after delay."""
|
261
|
+
try:
|
262
|
+
await asyncio.sleep(delay)
|
263
|
+
if request_id in self._active_requests:
|
264
|
+
del self._active_requests[request_id]
|
265
|
+
except asyncio.CancelledError:
|
266
|
+
# Task was cancelled during shutdown
|
267
|
+
pass
|
268
|
+
|
269
|
+
async def get_workflow_status(self, request_id: str) -> WorkflowResponse:
|
270
|
+
"""Get status of workflow execution."""
|
271
|
+
if request_id in self._active_requests:
|
272
|
+
return self._active_requests[request_id]
|
273
|
+
|
274
|
+
# Could check persistent storage here
|
275
|
+
raise ValueError(f"Request {request_id} not found")
|
276
|
+
|
277
|
+
def list_workflows(self) -> Dict[str, Dict[str, Any]]:
|
278
|
+
"""List all registered workflows with metadata."""
|
279
|
+
workflows = {}
|
280
|
+
|
281
|
+
for workflow_id, workflow_reg in self.workflows.items():
|
282
|
+
workflow = workflow_reg.workflow
|
283
|
+
metadata = getattr(workflow, "metadata", {})
|
284
|
+
workflows[workflow_id] = {
|
285
|
+
"name": workflow.name,
|
286
|
+
"description": metadata.get(
|
287
|
+
"description", workflow_reg.description or ""
|
288
|
+
),
|
289
|
+
"required_resources": list(
|
290
|
+
self._workflow_resources.get(workflow_id, [])
|
291
|
+
),
|
292
|
+
"async_workflow": metadata.get("async_workflow", False),
|
293
|
+
"node_count": len(workflow.nodes),
|
294
|
+
"type": workflow_reg.type,
|
295
|
+
"version": workflow_reg.version,
|
296
|
+
"tags": workflow_reg.tags,
|
297
|
+
}
|
298
|
+
|
299
|
+
return workflows
|
300
|
+
|
301
|
+
async def shutdown(self):
|
302
|
+
"""Shutdown the gateway and cleanup resources."""
|
303
|
+
# Cancel all cleanup tasks
|
304
|
+
for task in self._cleanup_tasks:
|
305
|
+
if not task.done():
|
306
|
+
task.cancel()
|
307
|
+
|
308
|
+
# Wait for all tasks to complete
|
309
|
+
if self._cleanup_tasks:
|
310
|
+
await asyncio.gather(*self._cleanup_tasks, return_exceptions=True)
|
311
|
+
|
312
|
+
# Clear the task list
|
313
|
+
self._cleanup_tasks.clear()
|
314
|
+
|
315
|
+
# Clear active requests
|
316
|
+
self._active_requests.clear()
|
317
|
+
|
318
|
+
# Cleanup runtime if it has a cleanup method
|
319
|
+
if hasattr(self._runtime, "cleanup"):
|
320
|
+
await self._runtime.cleanup()
|
321
|
+
|
322
|
+
# Call parent's close method to cleanup middleware components
|
323
|
+
if hasattr(super(), "close"):
|
324
|
+
await super().close()
|
325
|
+
|
326
|
+
async def health_check(self) -> Dict[str, Any]:
|
327
|
+
"""Perform health check on gateway and resources."""
|
328
|
+
health = {
|
329
|
+
"status": "healthy",
|
330
|
+
"timestamp": datetime.now(UTC).isoformat(),
|
331
|
+
"workflows": len(self.workflows),
|
332
|
+
"active_requests": len(self._active_requests),
|
333
|
+
"resources": {},
|
334
|
+
}
|
335
|
+
|
336
|
+
# Check resource health
|
337
|
+
for resource_name in self.resource_registry.list_resources():
|
338
|
+
try:
|
339
|
+
resource = await self.resource_registry.get_resource(resource_name)
|
340
|
+
# Try to get health check
|
341
|
+
factory = self.resource_registry._factories.get(resource_name)
|
342
|
+
if (
|
343
|
+
factory
|
344
|
+
and hasattr(factory, "health_check")
|
345
|
+
and factory.health_check
|
346
|
+
):
|
347
|
+
is_healthy = await factory.health_check(resource)
|
348
|
+
health["resources"][resource_name] = (
|
349
|
+
"healthy" if is_healthy else "unhealthy"
|
350
|
+
)
|
351
|
+
else:
|
352
|
+
health["resources"][resource_name] = "healthy"
|
353
|
+
except Exception as e:
|
354
|
+
health["resources"][resource_name] = f"unhealthy: {str(e)}"
|
355
|
+
health["status"] = "degraded"
|
356
|
+
|
357
|
+
return health
|