kailash 0.3.0__py3-none-any.whl → 0.3.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/access_control.py +40 -39
- kailash/api/auth.py +26 -32
- kailash/api/custom_nodes.py +29 -29
- kailash/api/custom_nodes_secure.py +35 -35
- kailash/api/database.py +17 -17
- kailash/api/gateway.py +19 -19
- kailash/api/mcp_integration.py +24 -23
- kailash/api/studio.py +45 -45
- kailash/api/workflow_api.py +8 -8
- kailash/cli/commands.py +5 -8
- kailash/manifest.py +42 -42
- kailash/mcp/__init__.py +1 -1
- kailash/mcp/ai_registry_server.py +20 -20
- kailash/mcp/client.py +9 -11
- kailash/mcp/client_new.py +10 -10
- kailash/mcp/server.py +1 -2
- kailash/mcp/server_enhanced.py +449 -0
- kailash/mcp/servers/ai_registry.py +6 -6
- kailash/mcp/utils/__init__.py +31 -0
- kailash/mcp/utils/cache.py +267 -0
- kailash/mcp/utils/config.py +263 -0
- kailash/mcp/utils/formatters.py +293 -0
- kailash/mcp/utils/metrics.py +418 -0
- kailash/nodes/ai/agents.py +9 -9
- kailash/nodes/ai/ai_providers.py +33 -34
- kailash/nodes/ai/embedding_generator.py +31 -32
- kailash/nodes/ai/intelligent_agent_orchestrator.py +62 -66
- kailash/nodes/ai/iterative_llm_agent.py +48 -48
- kailash/nodes/ai/llm_agent.py +32 -33
- kailash/nodes/ai/models.py +13 -13
- kailash/nodes/ai/self_organizing.py +44 -44
- kailash/nodes/api/auth.py +11 -11
- kailash/nodes/api/graphql.py +13 -13
- kailash/nodes/api/http.py +19 -19
- kailash/nodes/api/monitoring.py +20 -20
- kailash/nodes/api/rate_limiting.py +9 -13
- kailash/nodes/api/rest.py +29 -29
- kailash/nodes/api/security.py +44 -47
- kailash/nodes/base.py +21 -23
- kailash/nodes/base_async.py +7 -7
- kailash/nodes/base_cycle_aware.py +12 -12
- kailash/nodes/base_with_acl.py +5 -5
- kailash/nodes/code/python.py +66 -57
- kailash/nodes/data/directory.py +6 -6
- kailash/nodes/data/event_generation.py +10 -10
- kailash/nodes/data/file_discovery.py +28 -31
- kailash/nodes/data/readers.py +8 -8
- kailash/nodes/data/retrieval.py +10 -10
- kailash/nodes/data/sharepoint_graph.py +17 -17
- kailash/nodes/data/sources.py +5 -5
- kailash/nodes/data/sql.py +13 -13
- kailash/nodes/data/streaming.py +25 -25
- kailash/nodes/data/vector_db.py +22 -22
- kailash/nodes/data/writers.py +7 -7
- kailash/nodes/logic/async_operations.py +17 -17
- kailash/nodes/logic/convergence.py +11 -11
- kailash/nodes/logic/loop.py +4 -4
- kailash/nodes/logic/operations.py +11 -11
- kailash/nodes/logic/workflow.py +8 -9
- kailash/nodes/mixins/mcp.py +17 -17
- kailash/nodes/mixins.py +8 -10
- kailash/nodes/transform/chunkers.py +3 -3
- kailash/nodes/transform/formatters.py +7 -7
- kailash/nodes/transform/processors.py +10 -10
- kailash/runtime/access_controlled.py +18 -18
- kailash/runtime/async_local.py +17 -19
- kailash/runtime/docker.py +20 -22
- kailash/runtime/local.py +16 -16
- kailash/runtime/parallel.py +23 -23
- kailash/runtime/parallel_cyclic.py +27 -27
- kailash/runtime/runner.py +6 -6
- kailash/runtime/testing.py +20 -20
- kailash/sdk_exceptions.py +0 -58
- kailash/security.py +14 -26
- kailash/tracking/manager.py +38 -38
- kailash/tracking/metrics_collector.py +15 -14
- kailash/tracking/models.py +53 -53
- kailash/tracking/storage/base.py +7 -17
- kailash/tracking/storage/database.py +22 -23
- kailash/tracking/storage/filesystem.py +38 -40
- kailash/utils/export.py +21 -21
- kailash/utils/templates.py +2 -3
- kailash/visualization/api.py +30 -34
- kailash/visualization/dashboard.py +17 -17
- kailash/visualization/performance.py +16 -16
- kailash/visualization/reports.py +25 -27
- kailash/workflow/builder.py +8 -8
- kailash/workflow/convergence.py +13 -12
- kailash/workflow/cycle_analyzer.py +30 -32
- kailash/workflow/cycle_builder.py +12 -12
- kailash/workflow/cycle_config.py +16 -15
- kailash/workflow/cycle_debugger.py +40 -40
- kailash/workflow/cycle_exceptions.py +29 -29
- kailash/workflow/cycle_profiler.py +21 -21
- kailash/workflow/cycle_state.py +20 -22
- kailash/workflow/cyclic_runner.py +44 -44
- kailash/workflow/graph.py +40 -40
- kailash/workflow/mermaid_visualizer.py +9 -11
- kailash/workflow/migration.py +22 -22
- kailash/workflow/mock_registry.py +6 -6
- kailash/workflow/runner.py +9 -9
- kailash/workflow/safety.py +12 -13
- kailash/workflow/state.py +8 -11
- kailash/workflow/templates.py +19 -19
- kailash/workflow/validation.py +14 -14
- kailash/workflow/visualization.py +22 -22
- {kailash-0.3.0.dist-info → kailash-0.3.2.dist-info}/METADATA +53 -5
- kailash-0.3.2.dist-info/RECORD +136 -0
- kailash-0.3.0.dist-info/RECORD +0 -130
- {kailash-0.3.0.dist-info → kailash-0.3.2.dist-info}/WHEEL +0 -0
- {kailash-0.3.0.dist-info → kailash-0.3.2.dist-info}/entry_points.txt +0 -0
- {kailash-0.3.0.dist-info → kailash-0.3.2.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.3.0.dist-info → kailash-0.3.2.dist-info}/top_level.txt +0 -0
kailash/api/database.py
CHANGED
@@ -9,9 +9,9 @@ This module provides:
|
|
9
9
|
|
10
10
|
import uuid
|
11
11
|
from contextlib import contextmanager
|
12
|
-
from datetime import
|
12
|
+
from datetime import UTC, datetime
|
13
13
|
from pathlib import Path
|
14
|
-
from typing import Any
|
14
|
+
from typing import Any
|
15
15
|
|
16
16
|
from sqlalchemy import (
|
17
17
|
JSON,
|
@@ -383,7 +383,7 @@ class WorkflowRepository:
|
|
383
383
|
tenant_id: str,
|
384
384
|
name: str,
|
385
385
|
description: str,
|
386
|
-
definition:
|
386
|
+
definition: dict[str, Any],
|
387
387
|
created_by: str = None,
|
388
388
|
) -> Workflow:
|
389
389
|
"""Create a new workflow"""
|
@@ -403,7 +403,7 @@ class WorkflowRepository:
|
|
403
403
|
return workflow
|
404
404
|
|
405
405
|
def update(
|
406
|
-
self, workflow_id: str, updates:
|
406
|
+
self, workflow_id: str, updates: dict[str, Any], updated_by: str = None
|
407
407
|
) -> Workflow:
|
408
408
|
"""Update a workflow"""
|
409
409
|
workflow = self.session.query(Workflow).filter_by(id=workflow_id).first()
|
@@ -433,7 +433,7 @@ class WorkflowRepository:
|
|
433
433
|
self,
|
434
434
|
workflow_id: str,
|
435
435
|
version: int,
|
436
|
-
definition:
|
436
|
+
definition: dict[str, Any],
|
437
437
|
change_message: str,
|
438
438
|
created_by: str = None,
|
439
439
|
):
|
@@ -448,11 +448,11 @@ class WorkflowRepository:
|
|
448
448
|
self.session.add(version_record)
|
449
449
|
self.session.commit()
|
450
450
|
|
451
|
-
def get(self, workflow_id: str) ->
|
451
|
+
def get(self, workflow_id: str) -> Workflow | None:
|
452
452
|
"""Get a workflow by ID"""
|
453
453
|
return self.session.query(Workflow).filter_by(id=workflow_id).first()
|
454
454
|
|
455
|
-
def list(self, tenant_id: str, limit: int = 100, offset: int = 0) ->
|
455
|
+
def list(self, tenant_id: str, limit: int = 100, offset: int = 0) -> list[Workflow]:
|
456
456
|
"""List workflows for a tenant"""
|
457
457
|
return (
|
458
458
|
self.session.query(Workflow)
|
@@ -477,14 +477,14 @@ class CustomNodeRepository:
|
|
477
477
|
def __init__(self, session: Session):
|
478
478
|
self.session = session
|
479
479
|
|
480
|
-
def create(self, tenant_id: str, node_data:
|
480
|
+
def create(self, tenant_id: str, node_data: dict[str, Any]) -> CustomNode:
|
481
481
|
"""Create a custom node"""
|
482
482
|
node = CustomNode(tenant_id=tenant_id, **node_data)
|
483
483
|
self.session.add(node)
|
484
484
|
self.session.commit()
|
485
485
|
return node
|
486
486
|
|
487
|
-
def update(self, node_id: str, updates:
|
487
|
+
def update(self, node_id: str, updates: dict[str, Any]) -> CustomNode:
|
488
488
|
"""Update a custom node"""
|
489
489
|
node = self.session.query(CustomNode).filter_by(id=node_id).first()
|
490
490
|
if not node:
|
@@ -497,7 +497,7 @@ class CustomNodeRepository:
|
|
497
497
|
self.session.commit()
|
498
498
|
return node
|
499
499
|
|
500
|
-
def list(self, tenant_id: str) ->
|
500
|
+
def list(self, tenant_id: str) -> list[CustomNode]:
|
501
501
|
"""List custom nodes for a tenant"""
|
502
502
|
return (
|
503
503
|
self.session.query(CustomNode)
|
@@ -506,7 +506,7 @@ class CustomNodeRepository:
|
|
506
506
|
.all()
|
507
507
|
)
|
508
508
|
|
509
|
-
def get(self, node_id: str) ->
|
509
|
+
def get(self, node_id: str) -> CustomNode | None:
|
510
510
|
"""Get a custom node by ID"""
|
511
511
|
return self.session.query(CustomNode).filter_by(id=node_id).first()
|
512
512
|
|
@@ -525,7 +525,7 @@ class ExecutionRepository:
|
|
525
525
|
self.session = session
|
526
526
|
|
527
527
|
def create(
|
528
|
-
self, workflow_id: str, tenant_id: str, parameters:
|
528
|
+
self, workflow_id: str, tenant_id: str, parameters: dict[str, Any] = None
|
529
529
|
) -> WorkflowExecution:
|
530
530
|
"""Create an execution record"""
|
531
531
|
execution = WorkflowExecution(
|
@@ -533,7 +533,7 @@ class ExecutionRepository:
|
|
533
533
|
tenant_id=tenant_id,
|
534
534
|
status="pending",
|
535
535
|
parameters=parameters,
|
536
|
-
started_at=datetime.now(
|
536
|
+
started_at=datetime.now(UTC),
|
537
537
|
)
|
538
538
|
self.session.add(execution)
|
539
539
|
self.session.commit()
|
@@ -543,7 +543,7 @@ class ExecutionRepository:
|
|
543
543
|
self,
|
544
544
|
execution_id: str,
|
545
545
|
status: str,
|
546
|
-
result:
|
546
|
+
result: dict[str, Any] = None,
|
547
547
|
error: str = None,
|
548
548
|
):
|
549
549
|
"""Update execution status"""
|
@@ -560,7 +560,7 @@ class ExecutionRepository:
|
|
560
560
|
execution.error = error
|
561
561
|
|
562
562
|
if status in ["completed", "failed"]:
|
563
|
-
execution.completed_at = datetime.now(
|
563
|
+
execution.completed_at = datetime.now(UTC)
|
564
564
|
if execution.started_at:
|
565
565
|
execution.execution_time_ms = int(
|
566
566
|
(execution.completed_at - execution.started_at).total_seconds()
|
@@ -569,13 +569,13 @@ class ExecutionRepository:
|
|
569
569
|
|
570
570
|
self.session.commit()
|
571
571
|
|
572
|
-
def get(self, execution_id: str) ->
|
572
|
+
def get(self, execution_id: str) -> WorkflowExecution | None:
|
573
573
|
"""Get execution by ID"""
|
574
574
|
return self.session.query(WorkflowExecution).filter_by(id=execution_id).first()
|
575
575
|
|
576
576
|
def list_for_workflow(
|
577
577
|
self, workflow_id: str, limit: int = 50
|
578
|
-
) ->
|
578
|
+
) -> list[WorkflowExecution]:
|
579
579
|
"""List executions for a workflow"""
|
580
580
|
return (
|
581
581
|
self.session.query(WorkflowExecution)
|
kailash/api/gateway.py
CHANGED
@@ -50,7 +50,7 @@ Example:
|
|
50
50
|
import logging
|
51
51
|
from concurrent.futures import ThreadPoolExecutor
|
52
52
|
from contextlib import asynccontextmanager
|
53
|
-
from typing import Any
|
53
|
+
from typing import Any
|
54
54
|
|
55
55
|
from fastapi import FastAPI, WebSocket
|
56
56
|
from fastapi.middleware.cors import CORSMiddleware
|
@@ -69,12 +69,12 @@ class WorkflowRegistration(BaseModel):
|
|
69
69
|
|
70
70
|
name: str
|
71
71
|
type: str = Field(description="embedded or proxied")
|
72
|
-
workflow:
|
73
|
-
proxy_url:
|
74
|
-
health_check:
|
75
|
-
description:
|
72
|
+
workflow: Workflow | None = None
|
73
|
+
proxy_url: str | None = None
|
74
|
+
health_check: str | None = None
|
75
|
+
description: str | None = None
|
76
76
|
version: str = "1.0.0"
|
77
|
-
tags:
|
77
|
+
tags: list[str] = Field(default_factory=list)
|
78
78
|
|
79
79
|
|
80
80
|
class WorkflowAPIGateway:
|
@@ -101,7 +101,7 @@ class WorkflowAPIGateway:
|
|
101
101
|
description: str = "Unified API for Kailash workflows",
|
102
102
|
version: str = "1.0.0",
|
103
103
|
max_workers: int = 10,
|
104
|
-
cors_origins:
|
104
|
+
cors_origins: list[str] = None,
|
105
105
|
):
|
106
106
|
"""Initialize the API gateway.
|
107
107
|
|
@@ -112,8 +112,8 @@ class WorkflowAPIGateway:
|
|
112
112
|
max_workers: Maximum thread pool workers
|
113
113
|
cors_origins: Allowed CORS origins
|
114
114
|
"""
|
115
|
-
self.workflows:
|
116
|
-
self.mcp_servers:
|
115
|
+
self.workflows: dict[str, WorkflowRegistration] = {}
|
116
|
+
self.mcp_servers: dict[str, Any] = {}
|
117
117
|
self.executor = ThreadPoolExecutor(max_workers=max_workers)
|
118
118
|
|
119
119
|
# Create FastAPI app with lifespan
|
@@ -213,9 +213,9 @@ class WorkflowAPIGateway:
|
|
213
213
|
self,
|
214
214
|
name: str,
|
215
215
|
workflow: Workflow,
|
216
|
-
description:
|
216
|
+
description: str | None = None,
|
217
217
|
version: str = "1.0.0",
|
218
|
-
tags:
|
218
|
+
tags: list[str] = None,
|
219
219
|
**kwargs,
|
220
220
|
):
|
221
221
|
"""Register an embedded workflow.
|
@@ -259,9 +259,9 @@ class WorkflowAPIGateway:
|
|
259
259
|
name: str,
|
260
260
|
proxy_url: str,
|
261
261
|
health_check: str = "/health",
|
262
|
-
description:
|
262
|
+
description: str | None = None,
|
263
263
|
version: str = "1.0.0",
|
264
|
-
tags:
|
264
|
+
tags: list[str] = None,
|
265
265
|
):
|
266
266
|
"""Register a proxied workflow.
|
267
267
|
|
@@ -306,7 +306,7 @@ class WorkflowAPIGateway:
|
|
306
306
|
# TODO: Integrate MCP tools with workflows
|
307
307
|
logger.info(f"Registered MCP server: {name}")
|
308
308
|
|
309
|
-
def _get_workflow_endpoints(self, name: str) ->
|
309
|
+
def _get_workflow_endpoints(self, name: str) -> list[str]:
|
310
310
|
"""Get endpoints for a workflow."""
|
311
311
|
reg = self.workflows.get(name)
|
312
312
|
if not reg:
|
@@ -353,10 +353,10 @@ class WorkflowOrchestrator:
|
|
353
353
|
def __init__(self, gateway: WorkflowAPIGateway):
|
354
354
|
"""Initialize orchestrator with a gateway."""
|
355
355
|
self.gateway = gateway
|
356
|
-
self.chains:
|
357
|
-
self.dependencies:
|
356
|
+
self.chains: dict[str, list[str]] = {}
|
357
|
+
self.dependencies: dict[str, list[str]] = {}
|
358
358
|
|
359
|
-
def create_chain(self, name: str, workflow_sequence:
|
359
|
+
def create_chain(self, name: str, workflow_sequence: list[str]):
|
360
360
|
"""Create a workflow chain.
|
361
361
|
|
362
362
|
Args:
|
@@ -371,8 +371,8 @@ class WorkflowOrchestrator:
|
|
371
371
|
self.chains[name] = workflow_sequence
|
372
372
|
|
373
373
|
async def execute_chain(
|
374
|
-
self, chain_name: str, initial_input:
|
375
|
-
) ->
|
374
|
+
self, chain_name: str, initial_input: dict[str, Any]
|
375
|
+
) -> dict[str, Any]:
|
376
376
|
"""Execute a workflow chain.
|
377
377
|
|
378
378
|
Args:
|
kailash/api/mcp_integration.py
CHANGED
@@ -28,7 +28,8 @@ Example:
|
|
28
28
|
|
29
29
|
import asyncio
|
30
30
|
import logging
|
31
|
-
from
|
31
|
+
from collections.abc import Callable
|
32
|
+
from typing import Any
|
32
33
|
|
33
34
|
from pydantic import BaseModel, Field
|
34
35
|
|
@@ -42,10 +43,10 @@ class MCPTool(BaseModel):
|
|
42
43
|
|
43
44
|
name: str
|
44
45
|
description: str
|
45
|
-
parameters:
|
46
|
-
function:
|
47
|
-
async_function:
|
48
|
-
metadata:
|
46
|
+
parameters: dict[str, Any] = Field(default_factory=dict)
|
47
|
+
function: Callable | None = None
|
48
|
+
async_function: Callable | None = None
|
49
|
+
metadata: dict[str, Any] = Field(default_factory=dict)
|
49
50
|
|
50
51
|
|
51
52
|
class MCPResource(BaseModel):
|
@@ -55,7 +56,7 @@ class MCPResource(BaseModel):
|
|
55
56
|
uri: str
|
56
57
|
description: str
|
57
58
|
mime_type: str = "text/plain"
|
58
|
-
metadata:
|
59
|
+
metadata: dict[str, Any] = Field(default_factory=dict)
|
59
60
|
|
60
61
|
|
61
62
|
class MCPIntegration:
|
@@ -74,7 +75,7 @@ class MCPIntegration:
|
|
74
75
|
"""
|
75
76
|
|
76
77
|
def __init__(
|
77
|
-
self, name: str, description: str = "", capabilities:
|
78
|
+
self, name: str, description: str = "", capabilities: list[str] = None
|
78
79
|
):
|
79
80
|
"""Initialize MCP integration.
|
80
81
|
|
@@ -86,16 +87,16 @@ class MCPIntegration:
|
|
86
87
|
self.name = name
|
87
88
|
self.description = description
|
88
89
|
self.capabilities = capabilities or ["tools", "resources"]
|
89
|
-
self.tools:
|
90
|
-
self.resources:
|
91
|
-
self._context:
|
90
|
+
self.tools: dict[str, MCPTool] = {}
|
91
|
+
self.resources: dict[str, MCPResource] = {}
|
92
|
+
self._context: dict[str, Any] = {}
|
92
93
|
|
93
94
|
def add_tool(
|
94
95
|
self,
|
95
96
|
name: str,
|
96
|
-
function:
|
97
|
+
function: Callable | Callable[..., asyncio.Future],
|
97
98
|
description: str = "",
|
98
|
-
parameters:
|
99
|
+
parameters: dict[str, Any] = None,
|
99
100
|
):
|
100
101
|
"""Add a tool to the MCP server.
|
101
102
|
|
@@ -134,7 +135,7 @@ class MCPIntegration:
|
|
134
135
|
self.resources[name] = resource
|
135
136
|
logger.info(f"Added resource '{name}' to MCP server '{self.name}'")
|
136
137
|
|
137
|
-
async def execute_tool(self, tool_name: str, parameters:
|
138
|
+
async def execute_tool(self, tool_name: str, parameters: dict[str, Any]) -> Any:
|
138
139
|
"""Execute a tool asynchronously.
|
139
140
|
|
140
141
|
Args:
|
@@ -173,7 +174,7 @@ class MCPIntegration:
|
|
173
174
|
else:
|
174
175
|
raise ValueError(f"Tool '{tool_name}' has no implementation")
|
175
176
|
|
176
|
-
def execute_tool_sync(self, tool_name: str, parameters:
|
177
|
+
def execute_tool_sync(self, tool_name: str, parameters: dict[str, Any]) -> Any:
|
177
178
|
"""Execute a tool synchronously.
|
178
179
|
|
179
180
|
Args:
|
@@ -200,7 +201,7 @@ class MCPIntegration:
|
|
200
201
|
else:
|
201
202
|
raise ValueError(f"Tool '{tool_name}' requires async execution")
|
202
203
|
|
203
|
-
def get_resource(self, resource_name: str) ->
|
204
|
+
def get_resource(self, resource_name: str) -> MCPResource | None:
|
204
205
|
"""Get a resource by name.
|
205
206
|
|
206
207
|
Args:
|
@@ -231,7 +232,7 @@ class MCPIntegration:
|
|
231
232
|
"""
|
232
233
|
return self._context.get(key)
|
233
234
|
|
234
|
-
def list_tools(self) ->
|
235
|
+
def list_tools(self) -> list[dict[str, Any]]:
|
235
236
|
"""List all available tools.
|
236
237
|
|
237
238
|
Returns:
|
@@ -246,7 +247,7 @@ class MCPIntegration:
|
|
246
247
|
for tool in self.tools.values()
|
247
248
|
]
|
248
249
|
|
249
|
-
def list_resources(self) ->
|
250
|
+
def list_resources(self) -> list[dict[str, Any]]:
|
250
251
|
"""List all available resources.
|
251
252
|
|
252
253
|
Returns:
|
@@ -262,7 +263,7 @@ class MCPIntegration:
|
|
262
263
|
for resource in self.resources.values()
|
263
264
|
]
|
264
265
|
|
265
|
-
def to_mcp_protocol(self) ->
|
266
|
+
def to_mcp_protocol(self) -> dict[str, Any]:
|
266
267
|
"""Convert to MCP protocol format.
|
267
268
|
|
268
269
|
Returns:
|
@@ -298,7 +299,7 @@ class MCPToolNode(AsyncNode):
|
|
298
299
|
"""
|
299
300
|
|
300
301
|
def __init__(
|
301
|
-
self, mcp_server: str, tool_name: str, parameter_mapping:
|
302
|
+
self, mcp_server: str, tool_name: str, parameter_mapping: dict[str, str] = None
|
302
303
|
):
|
303
304
|
"""Initialize MCP tool node.
|
304
305
|
|
@@ -311,13 +312,13 @@ class MCPToolNode(AsyncNode):
|
|
311
312
|
self.mcp_server = mcp_server
|
312
313
|
self.tool_name = tool_name
|
313
314
|
self.parameter_mapping = parameter_mapping or {}
|
314
|
-
self._mcp_integration:
|
315
|
+
self._mcp_integration: MCPIntegration | None = None
|
315
316
|
|
316
317
|
def set_mcp_integration(self, mcp: MCPIntegration):
|
317
318
|
"""Set the MCP integration instance."""
|
318
319
|
self._mcp_integration = mcp
|
319
320
|
|
320
|
-
def get_parameters(self) ->
|
321
|
+
def get_parameters(self) -> dict[str, Any]:
|
321
322
|
"""Get node parameters.
|
322
323
|
|
323
324
|
Returns:
|
@@ -326,7 +327,7 @@ class MCPToolNode(AsyncNode):
|
|
326
327
|
# For MCP tools, parameters are dynamic based on the tool
|
327
328
|
return {}
|
328
329
|
|
329
|
-
def validate_inputs(self, **kwargs) ->
|
330
|
+
def validate_inputs(self, **kwargs) -> dict[str, Any]:
|
330
331
|
"""Validate runtime inputs.
|
331
332
|
|
332
333
|
For MCPToolNode, we accept any inputs since the parameters
|
@@ -373,7 +374,7 @@ class MCPToolNode(AsyncNode):
|
|
373
374
|
return {"result": result}
|
374
375
|
return result
|
375
376
|
|
376
|
-
async def async_run(self, **kwargs) ->
|
377
|
+
async def async_run(self, **kwargs) -> dict[str, Any]:
|
377
378
|
"""Run the node asynchronously.
|
378
379
|
|
379
380
|
Args:
|