kailash 0.1.3__py3-none-any.whl → 0.1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +1 -1
- kailash/api/__init__.py +11 -1
- kailash/api/gateway.py +394 -0
- kailash/api/mcp_integration.py +478 -0
- kailash/api/workflow_api.py +29 -13
- kailash/nodes/ai/__init__.py +40 -4
- kailash/nodes/ai/a2a.py +1143 -0
- kailash/nodes/ai/agents.py +120 -6
- kailash/nodes/ai/ai_providers.py +224 -30
- kailash/nodes/ai/embedding_generator.py +34 -38
- kailash/nodes/ai/intelligent_agent_orchestrator.py +2114 -0
- kailash/nodes/ai/llm_agent.py +351 -356
- kailash/nodes/ai/self_organizing.py +1624 -0
- kailash/nodes/api/http.py +106 -25
- kailash/nodes/api/rest.py +116 -21
- kailash/nodes/base.py +60 -64
- kailash/nodes/code/python.py +61 -42
- kailash/nodes/data/__init__.py +10 -10
- kailash/nodes/data/readers.py +117 -66
- kailash/nodes/data/retrieval.py +1 -1
- kailash/nodes/data/sharepoint_graph.py +23 -25
- kailash/nodes/data/sql.py +24 -26
- kailash/nodes/data/writers.py +41 -44
- kailash/nodes/logic/__init__.py +9 -3
- kailash/nodes/logic/async_operations.py +60 -21
- kailash/nodes/logic/operations.py +43 -22
- kailash/nodes/logic/workflow.py +26 -18
- kailash/nodes/mcp/client.py +29 -33
- kailash/nodes/transform/__init__.py +8 -1
- kailash/nodes/transform/formatters.py +1 -1
- kailash/nodes/transform/processors.py +119 -4
- kailash/tracking/metrics_collector.py +6 -7
- kailash/utils/export.py +2 -2
- kailash/utils/templates.py +16 -16
- {kailash-0.1.3.dist-info → kailash-0.1.5.dist-info}/METADATA +293 -29
- {kailash-0.1.3.dist-info → kailash-0.1.5.dist-info}/RECORD +40 -35
- {kailash-0.1.3.dist-info → kailash-0.1.5.dist-info}/WHEEL +0 -0
- {kailash-0.1.3.dist-info → kailash-0.1.5.dist-info}/entry_points.txt +0 -0
- {kailash-0.1.3.dist-info → kailash-0.1.5.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.1.3.dist-info → kailash-0.1.5.dist-info}/top_level.txt +0 -0
kailash/__init__.py
CHANGED
kailash/api/__init__.py
CHANGED
@@ -2,6 +2,16 @@
|
|
2
2
|
Kailash API module for exposing workflows as REST APIs.
|
3
3
|
"""
|
4
4
|
|
5
|
+
from .gateway import WorkflowAPIGateway, WorkflowOrchestrator
|
6
|
+
from .mcp_integration import MCPIntegration, MCPToolNode
|
5
7
|
from .workflow_api import HierarchicalRAGAPI, WorkflowAPI, create_workflow_api
|
6
8
|
|
7
|
-
__all__ = [
|
9
|
+
__all__ = [
|
10
|
+
"WorkflowAPI",
|
11
|
+
"HierarchicalRAGAPI",
|
12
|
+
"create_workflow_api",
|
13
|
+
"WorkflowAPIGateway",
|
14
|
+
"WorkflowOrchestrator",
|
15
|
+
"MCPIntegration",
|
16
|
+
"MCPToolNode",
|
17
|
+
]
|
kailash/api/gateway.py
ADDED
@@ -0,0 +1,394 @@
|
|
1
|
+
"""Multi-workflow API gateway for managing multiple Kailash workflows.
|
2
|
+
|
3
|
+
This module provides a unified API server that can host multiple workflows
|
4
|
+
with dynamic routing, MCP integration, and centralized management.
|
5
|
+
|
6
|
+
Design Philosophy:
|
7
|
+
The gateway acts as a single entry point for all workflow executions,
|
8
|
+
providing unified authentication, monitoring, and resource management.
|
9
|
+
It supports both embedded workflows (running in-process) and proxied
|
10
|
+
workflows (running in separate processes).
|
11
|
+
|
12
|
+
Example:
|
13
|
+
>>> # Basic usage with multiple workflows
|
14
|
+
>>> from kailash.api import WorkflowAPIGateway
|
15
|
+
>>> from kailash.workflow import Workflow
|
16
|
+
|
17
|
+
>>> # Create workflows
|
18
|
+
>>> sales_workflow = Workflow("sales_pipeline")
|
19
|
+
>>> analytics_workflow = Workflow("analytics_pipeline")
|
20
|
+
|
21
|
+
>>> # Create gateway
|
22
|
+
>>> gateway = WorkflowAPIGateway(
|
23
|
+
... title="Company API Gateway",
|
24
|
+
... description="Unified API for all workflows"
|
25
|
+
... )
|
26
|
+
|
27
|
+
>>> # Register workflows
|
28
|
+
>>> gateway.register_workflow("sales", sales_workflow)
|
29
|
+
>>> gateway.register_workflow("analytics", analytics_workflow)
|
30
|
+
|
31
|
+
>>> # Start server
|
32
|
+
>>> gateway.run(port=8000) # doctest: +SKIP
|
33
|
+
|
34
|
+
>>> # With MCP integration
|
35
|
+
>>> from kailash.api.mcp_integration import MCPIntegration
|
36
|
+
|
37
|
+
>>> # Add MCP server
|
38
|
+
>>> mcp = MCPIntegration("tools_server")
|
39
|
+
>>> gateway.register_mcp_server("tools", mcp)
|
40
|
+
|
41
|
+
>>> # With proxied workflows
|
42
|
+
>>> # Proxy to external workflow service
|
43
|
+
>>> gateway.proxy_workflow(
|
44
|
+
... "ml_pipeline",
|
45
|
+
... "http://ml-service:8080",
|
46
|
+
... health_check="/health"
|
47
|
+
... )
|
48
|
+
"""
|
49
|
+
|
50
|
+
import logging
|
51
|
+
from concurrent.futures import ThreadPoolExecutor
|
52
|
+
from contextlib import asynccontextmanager
|
53
|
+
from typing import Any, Dict, List, Optional
|
54
|
+
|
55
|
+
from fastapi import FastAPI, WebSocket
|
56
|
+
from fastapi.middleware.cors import CORSMiddleware
|
57
|
+
from pydantic import BaseModel, Field
|
58
|
+
|
59
|
+
from ..workflow import Workflow
|
60
|
+
from .workflow_api import WorkflowAPI
|
61
|
+
|
62
|
+
logger = logging.getLogger(__name__)
|
63
|
+
|
64
|
+
|
65
|
+
class WorkflowRegistration(BaseModel):
|
66
|
+
"""Registration details for a workflow."""
|
67
|
+
|
68
|
+
model_config = {"arbitrary_types_allowed": True}
|
69
|
+
|
70
|
+
name: str
|
71
|
+
type: str = Field(description="embedded or proxied")
|
72
|
+
workflow: Optional[Workflow] = None
|
73
|
+
proxy_url: Optional[str] = None
|
74
|
+
health_check: Optional[str] = None
|
75
|
+
description: Optional[str] = None
|
76
|
+
version: str = "1.0.0"
|
77
|
+
tags: List[str] = Field(default_factory=list)
|
78
|
+
|
79
|
+
|
80
|
+
class WorkflowAPIGateway:
|
81
|
+
"""Unified API gateway for multiple Kailash workflows.
|
82
|
+
|
83
|
+
This gateway provides:
|
84
|
+
- Dynamic workflow registration
|
85
|
+
- Unified routing with prefix-based paths
|
86
|
+
- MCP server integration
|
87
|
+
- Health monitoring
|
88
|
+
- Resource management
|
89
|
+
- WebSocket support for real-time updates
|
90
|
+
|
91
|
+
Attributes:
|
92
|
+
app: FastAPI application instance
|
93
|
+
workflows: Registry of all registered workflows
|
94
|
+
executor: Thread pool for synchronous execution
|
95
|
+
mcp_servers: Registry of MCP servers
|
96
|
+
"""
|
97
|
+
|
98
|
+
def __init__(
|
99
|
+
self,
|
100
|
+
title: str = "Kailash Workflow Gateway",
|
101
|
+
description: str = "Unified API for Kailash workflows",
|
102
|
+
version: str = "1.0.0",
|
103
|
+
max_workers: int = 10,
|
104
|
+
cors_origins: List[str] = None,
|
105
|
+
):
|
106
|
+
"""Initialize the API gateway.
|
107
|
+
|
108
|
+
Args:
|
109
|
+
title: API title for documentation
|
110
|
+
description: API description
|
111
|
+
version: API version
|
112
|
+
max_workers: Maximum thread pool workers
|
113
|
+
cors_origins: Allowed CORS origins
|
114
|
+
"""
|
115
|
+
self.workflows: Dict[str, WorkflowRegistration] = {}
|
116
|
+
self.mcp_servers: Dict[str, Any] = {}
|
117
|
+
self.executor = ThreadPoolExecutor(max_workers=max_workers)
|
118
|
+
|
119
|
+
# Create FastAPI app with lifespan
|
120
|
+
@asynccontextmanager
|
121
|
+
async def lifespan(app: FastAPI):
|
122
|
+
# Startup
|
123
|
+
logger.info(f"Starting {title} v{version}")
|
124
|
+
yield
|
125
|
+
# Shutdown
|
126
|
+
logger.info("Shutting down gateway")
|
127
|
+
self.executor.shutdown(wait=True)
|
128
|
+
|
129
|
+
self.app = FastAPI(
|
130
|
+
title=title, description=description, version=version, lifespan=lifespan
|
131
|
+
)
|
132
|
+
|
133
|
+
# Add CORS middleware
|
134
|
+
if cors_origins:
|
135
|
+
self.app.add_middleware(
|
136
|
+
CORSMiddleware,
|
137
|
+
allow_origins=cors_origins,
|
138
|
+
allow_credentials=True,
|
139
|
+
allow_methods=["*"],
|
140
|
+
allow_headers=["*"],
|
141
|
+
)
|
142
|
+
|
143
|
+
# Register root endpoints
|
144
|
+
self._register_root_endpoints()
|
145
|
+
|
146
|
+
def _register_root_endpoints(self):
|
147
|
+
"""Register gateway-level endpoints."""
|
148
|
+
|
149
|
+
@self.app.get("/")
|
150
|
+
async def root():
|
151
|
+
"""Gateway information."""
|
152
|
+
return {
|
153
|
+
"name": self.app.title,
|
154
|
+
"version": self.app.version,
|
155
|
+
"workflows": list(self.workflows.keys()),
|
156
|
+
"mcp_servers": list(self.mcp_servers.keys()),
|
157
|
+
}
|
158
|
+
|
159
|
+
@self.app.get("/workflows")
|
160
|
+
async def list_workflows():
|
161
|
+
"""List all registered workflows."""
|
162
|
+
return {
|
163
|
+
name: {
|
164
|
+
"type": reg.type,
|
165
|
+
"description": reg.description,
|
166
|
+
"version": reg.version,
|
167
|
+
"tags": reg.tags,
|
168
|
+
"endpoints": self._get_workflow_endpoints(name),
|
169
|
+
}
|
170
|
+
for name, reg in self.workflows.items()
|
171
|
+
}
|
172
|
+
|
173
|
+
@self.app.get("/health")
|
174
|
+
async def health_check():
|
175
|
+
"""Gateway health check."""
|
176
|
+
health_status = {"status": "healthy", "workflows": {}, "mcp_servers": {}}
|
177
|
+
|
178
|
+
# Check workflow health
|
179
|
+
for name, reg in self.workflows.items():
|
180
|
+
if reg.type == "embedded":
|
181
|
+
health_status["workflows"][name] = "healthy"
|
182
|
+
else:
|
183
|
+
# TODO: Implement proxy health check
|
184
|
+
health_status["workflows"][name] = "unknown"
|
185
|
+
|
186
|
+
# Check MCP server health
|
187
|
+
for name, server in self.mcp_servers.items():
|
188
|
+
# TODO: Implement MCP health check
|
189
|
+
health_status["mcp_servers"][name] = "unknown"
|
190
|
+
|
191
|
+
return health_status
|
192
|
+
|
193
|
+
@self.app.websocket("/ws")
|
194
|
+
async def websocket_endpoint(websocket: WebSocket):
|
195
|
+
"""WebSocket for real-time updates."""
|
196
|
+
await websocket.accept()
|
197
|
+
try:
|
198
|
+
while True:
|
199
|
+
data = await websocket.receive_json()
|
200
|
+
# Handle WebSocket messages
|
201
|
+
if data.get("type") == "subscribe":
|
202
|
+
# TODO: Implement subscription logic for workflow
|
203
|
+
data.get("workflow")
|
204
|
+
await websocket.send_json(
|
205
|
+
{"type": "ack", "message": "Message received"}
|
206
|
+
)
|
207
|
+
except Exception as e:
|
208
|
+
logger.error(f"WebSocket error: {e}")
|
209
|
+
finally:
|
210
|
+
await websocket.close()
|
211
|
+
|
212
|
+
def register_workflow(
|
213
|
+
self,
|
214
|
+
name: str,
|
215
|
+
workflow: Workflow,
|
216
|
+
description: Optional[str] = None,
|
217
|
+
version: str = "1.0.0",
|
218
|
+
tags: List[str] = None,
|
219
|
+
**kwargs,
|
220
|
+
):
|
221
|
+
"""Register an embedded workflow.
|
222
|
+
|
223
|
+
Args:
|
224
|
+
name: Unique workflow identifier
|
225
|
+
workflow: Workflow instance
|
226
|
+
description: Workflow description
|
227
|
+
version: Workflow version
|
228
|
+
tags: Workflow tags for organization
|
229
|
+
**kwargs: Additional WorkflowAPI parameters
|
230
|
+
"""
|
231
|
+
if name in self.workflows:
|
232
|
+
raise ValueError(f"Workflow '{name}' already registered")
|
233
|
+
|
234
|
+
# Create WorkflowAPI wrapper
|
235
|
+
workflow_api = WorkflowAPI(
|
236
|
+
workflow=workflow,
|
237
|
+
app_name=f"{name} Workflow API",
|
238
|
+
version=version,
|
239
|
+
description=description,
|
240
|
+
)
|
241
|
+
|
242
|
+
# Mount the workflow app as a sub-application
|
243
|
+
self.app.mount(f"/{name}", workflow_api.app)
|
244
|
+
|
245
|
+
# Register workflow
|
246
|
+
self.workflows[name] = WorkflowRegistration(
|
247
|
+
name=name,
|
248
|
+
type="embedded",
|
249
|
+
workflow=workflow,
|
250
|
+
description=description or workflow.name,
|
251
|
+
version=version,
|
252
|
+
tags=tags or [],
|
253
|
+
)
|
254
|
+
|
255
|
+
logger.info(f"Registered embedded workflow: {name}")
|
256
|
+
|
257
|
+
def proxy_workflow(
|
258
|
+
self,
|
259
|
+
name: str,
|
260
|
+
proxy_url: str,
|
261
|
+
health_check: str = "/health",
|
262
|
+
description: Optional[str] = None,
|
263
|
+
version: str = "1.0.0",
|
264
|
+
tags: List[str] = None,
|
265
|
+
):
|
266
|
+
"""Register a proxied workflow.
|
267
|
+
|
268
|
+
Args:
|
269
|
+
name: Unique workflow identifier
|
270
|
+
proxy_url: URL of the workflow service
|
271
|
+
health_check: Health check endpoint path
|
272
|
+
description: Workflow description
|
273
|
+
version: Workflow version
|
274
|
+
tags: Workflow tags
|
275
|
+
"""
|
276
|
+
if name in self.workflows:
|
277
|
+
raise ValueError(f"Workflow '{name}' already registered")
|
278
|
+
|
279
|
+
# TODO: Implement proxy routing
|
280
|
+
# This would use httpx or similar to forward requests
|
281
|
+
|
282
|
+
self.workflows[name] = WorkflowRegistration(
|
283
|
+
name=name,
|
284
|
+
type="proxied",
|
285
|
+
proxy_url=proxy_url,
|
286
|
+
health_check=health_check,
|
287
|
+
description=description,
|
288
|
+
version=version,
|
289
|
+
tags=tags or [],
|
290
|
+
)
|
291
|
+
|
292
|
+
logger.info(f"Registered proxied workflow: {name} -> {proxy_url}")
|
293
|
+
|
294
|
+
def register_mcp_server(self, name: str, mcp_server: Any):
|
295
|
+
"""Register an MCP server.
|
296
|
+
|
297
|
+
Args:
|
298
|
+
name: Unique MCP server identifier
|
299
|
+
mcp_server: MCP server instance
|
300
|
+
"""
|
301
|
+
if name in self.mcp_servers:
|
302
|
+
raise ValueError(f"MCP server '{name}' already registered")
|
303
|
+
|
304
|
+
self.mcp_servers[name] = mcp_server
|
305
|
+
|
306
|
+
# TODO: Integrate MCP tools with workflows
|
307
|
+
logger.info(f"Registered MCP server: {name}")
|
308
|
+
|
309
|
+
def _get_workflow_endpoints(self, name: str) -> List[str]:
|
310
|
+
"""Get endpoints for a workflow."""
|
311
|
+
reg = self.workflows.get(name)
|
312
|
+
if not reg:
|
313
|
+
return []
|
314
|
+
|
315
|
+
base_endpoints = [
|
316
|
+
f"/{name}/execute",
|
317
|
+
f"/{name}/workflow/info",
|
318
|
+
f"/{name}/health",
|
319
|
+
]
|
320
|
+
|
321
|
+
if reg.type == "embedded":
|
322
|
+
base_endpoints.append(f"/{name}/docs")
|
323
|
+
|
324
|
+
return base_endpoints
|
325
|
+
|
326
|
+
def run(
|
327
|
+
self, host: str = "0.0.0.0", port: int = 8000, reload: bool = False, **kwargs
|
328
|
+
):
|
329
|
+
"""Run the gateway server.
|
330
|
+
|
331
|
+
Args:
|
332
|
+
host: Host to bind to
|
333
|
+
port: Port to bind to
|
334
|
+
reload: Enable auto-reload
|
335
|
+
**kwargs: Additional uvicorn parameters
|
336
|
+
"""
|
337
|
+
import uvicorn
|
338
|
+
|
339
|
+
uvicorn.run(self.app, host=host, port=port, reload=reload, **kwargs)
|
340
|
+
|
341
|
+
|
342
|
+
class WorkflowOrchestrator:
|
343
|
+
"""Advanced orchestrator for complex workflow scenarios.
|
344
|
+
|
345
|
+
Provides:
|
346
|
+
- Workflow chaining and dependencies
|
347
|
+
- Conditional routing between workflows
|
348
|
+
- Parallel workflow execution
|
349
|
+
- Transaction management
|
350
|
+
- Event-driven triggers
|
351
|
+
"""
|
352
|
+
|
353
|
+
def __init__(self, gateway: WorkflowAPIGateway):
|
354
|
+
"""Initialize orchestrator with a gateway."""
|
355
|
+
self.gateway = gateway
|
356
|
+
self.chains: Dict[str, List[str]] = {}
|
357
|
+
self.dependencies: Dict[str, List[str]] = {}
|
358
|
+
|
359
|
+
def create_chain(self, name: str, workflow_sequence: List[str]):
|
360
|
+
"""Create a workflow chain.
|
361
|
+
|
362
|
+
Args:
|
363
|
+
name: Chain identifier
|
364
|
+
workflow_sequence: Ordered list of workflow names
|
365
|
+
"""
|
366
|
+
# Validate all workflows exist
|
367
|
+
for workflow in workflow_sequence:
|
368
|
+
if workflow not in self.gateway.workflows:
|
369
|
+
raise ValueError(f"Workflow '{workflow}' not registered")
|
370
|
+
|
371
|
+
self.chains[name] = workflow_sequence
|
372
|
+
|
373
|
+
async def execute_chain(
|
374
|
+
self, chain_name: str, initial_input: Dict[str, Any]
|
375
|
+
) -> Dict[str, Any]:
|
376
|
+
"""Execute a workflow chain.
|
377
|
+
|
378
|
+
Args:
|
379
|
+
chain_name: Chain to execute
|
380
|
+
initial_input: Input for first workflow
|
381
|
+
|
382
|
+
Returns:
|
383
|
+
Final output from the chain
|
384
|
+
"""
|
385
|
+
if chain_name not in self.chains:
|
386
|
+
raise ValueError(f"Chain '{chain_name}' not found")
|
387
|
+
|
388
|
+
result = initial_input
|
389
|
+
for workflow_name in self.chains[chain_name]:
|
390
|
+
# Execute workflow with previous result
|
391
|
+
# TODO: Implement execution logic
|
392
|
+
pass
|
393
|
+
|
394
|
+
return result
|