kailash 0.3.0__py3-none-any.whl → 0.3.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/access_control.py +40 -39
- kailash/api/auth.py +26 -32
- kailash/api/custom_nodes.py +29 -29
- kailash/api/custom_nodes_secure.py +35 -35
- kailash/api/database.py +17 -17
- kailash/api/gateway.py +19 -19
- kailash/api/mcp_integration.py +24 -23
- kailash/api/studio.py +45 -45
- kailash/api/workflow_api.py +8 -8
- kailash/cli/commands.py +5 -8
- kailash/manifest.py +42 -42
- kailash/mcp/__init__.py +1 -1
- kailash/mcp/ai_registry_server.py +20 -20
- kailash/mcp/client.py +9 -11
- kailash/mcp/client_new.py +10 -10
- kailash/mcp/server.py +1 -2
- kailash/mcp/server_enhanced.py +449 -0
- kailash/mcp/servers/ai_registry.py +6 -6
- kailash/mcp/utils/__init__.py +31 -0
- kailash/mcp/utils/cache.py +267 -0
- kailash/mcp/utils/config.py +263 -0
- kailash/mcp/utils/formatters.py +293 -0
- kailash/mcp/utils/metrics.py +418 -0
- kailash/nodes/ai/agents.py +9 -9
- kailash/nodes/ai/ai_providers.py +33 -34
- kailash/nodes/ai/embedding_generator.py +31 -32
- kailash/nodes/ai/intelligent_agent_orchestrator.py +62 -66
- kailash/nodes/ai/iterative_llm_agent.py +48 -48
- kailash/nodes/ai/llm_agent.py +32 -33
- kailash/nodes/ai/models.py +13 -13
- kailash/nodes/ai/self_organizing.py +44 -44
- kailash/nodes/api/auth.py +11 -11
- kailash/nodes/api/graphql.py +13 -13
- kailash/nodes/api/http.py +19 -19
- kailash/nodes/api/monitoring.py +20 -20
- kailash/nodes/api/rate_limiting.py +9 -13
- kailash/nodes/api/rest.py +29 -29
- kailash/nodes/api/security.py +44 -47
- kailash/nodes/base.py +21 -23
- kailash/nodes/base_async.py +7 -7
- kailash/nodes/base_cycle_aware.py +12 -12
- kailash/nodes/base_with_acl.py +5 -5
- kailash/nodes/code/python.py +66 -57
- kailash/nodes/data/directory.py +6 -6
- kailash/nodes/data/event_generation.py +10 -10
- kailash/nodes/data/file_discovery.py +28 -31
- kailash/nodes/data/readers.py +8 -8
- kailash/nodes/data/retrieval.py +10 -10
- kailash/nodes/data/sharepoint_graph.py +17 -17
- kailash/nodes/data/sources.py +5 -5
- kailash/nodes/data/sql.py +13 -13
- kailash/nodes/data/streaming.py +25 -25
- kailash/nodes/data/vector_db.py +22 -22
- kailash/nodes/data/writers.py +7 -7
- kailash/nodes/logic/async_operations.py +17 -17
- kailash/nodes/logic/convergence.py +11 -11
- kailash/nodes/logic/loop.py +4 -4
- kailash/nodes/logic/operations.py +11 -11
- kailash/nodes/logic/workflow.py +8 -9
- kailash/nodes/mixins/mcp.py +17 -17
- kailash/nodes/mixins.py +8 -10
- kailash/nodes/transform/chunkers.py +3 -3
- kailash/nodes/transform/formatters.py +7 -7
- kailash/nodes/transform/processors.py +10 -10
- kailash/runtime/access_controlled.py +18 -18
- kailash/runtime/async_local.py +17 -19
- kailash/runtime/docker.py +20 -22
- kailash/runtime/local.py +16 -16
- kailash/runtime/parallel.py +23 -23
- kailash/runtime/parallel_cyclic.py +27 -27
- kailash/runtime/runner.py +6 -6
- kailash/runtime/testing.py +20 -20
- kailash/sdk_exceptions.py +0 -58
- kailash/security.py +14 -26
- kailash/tracking/manager.py +38 -38
- kailash/tracking/metrics_collector.py +15 -14
- kailash/tracking/models.py +53 -53
- kailash/tracking/storage/base.py +7 -17
- kailash/tracking/storage/database.py +22 -23
- kailash/tracking/storage/filesystem.py +38 -40
- kailash/utils/export.py +21 -21
- kailash/utils/templates.py +2 -3
- kailash/visualization/api.py +30 -34
- kailash/visualization/dashboard.py +17 -17
- kailash/visualization/performance.py +16 -16
- kailash/visualization/reports.py +25 -27
- kailash/workflow/builder.py +8 -8
- kailash/workflow/convergence.py +13 -12
- kailash/workflow/cycle_analyzer.py +30 -32
- kailash/workflow/cycle_builder.py +12 -12
- kailash/workflow/cycle_config.py +16 -15
- kailash/workflow/cycle_debugger.py +40 -40
- kailash/workflow/cycle_exceptions.py +29 -29
- kailash/workflow/cycle_profiler.py +21 -21
- kailash/workflow/cycle_state.py +20 -22
- kailash/workflow/cyclic_runner.py +44 -44
- kailash/workflow/graph.py +40 -40
- kailash/workflow/mermaid_visualizer.py +9 -11
- kailash/workflow/migration.py +22 -22
- kailash/workflow/mock_registry.py +6 -6
- kailash/workflow/runner.py +9 -9
- kailash/workflow/safety.py +12 -13
- kailash/workflow/state.py +8 -11
- kailash/workflow/templates.py +19 -19
- kailash/workflow/validation.py +14 -14
- kailash/workflow/visualization.py +22 -22
- {kailash-0.3.0.dist-info → kailash-0.3.2.dist-info}/METADATA +53 -5
- kailash-0.3.2.dist-info/RECORD +136 -0
- kailash-0.3.0.dist-info/RECORD +0 -130
- {kailash-0.3.0.dist-info → kailash-0.3.2.dist-info}/WHEEL +0 -0
- {kailash-0.3.0.dist-info → kailash-0.3.2.dist-info}/entry_points.txt +0 -0
- {kailash-0.3.0.dist-info → kailash-0.3.2.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.3.0.dist-info → kailash-0.3.2.dist-info}/top_level.txt +0 -0
@@ -27,7 +27,7 @@ Example usage:
|
|
27
27
|
... ))
|
28
28
|
"""
|
29
29
|
|
30
|
-
from typing import Any
|
30
|
+
from typing import Any
|
31
31
|
|
32
32
|
from ..base import NodeParameter, register_node
|
33
33
|
from ..base_cycle_aware import CycleAwareNode
|
@@ -102,7 +102,7 @@ class ConvergenceCheckerNode(CycleAwareNode):
|
|
102
102
|
... mode="combined", threshold=0.9, stability_window=3)
|
103
103
|
"""
|
104
104
|
|
105
|
-
def get_parameters(self) ->
|
105
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
106
106
|
"""Define input parameters for convergence checking."""
|
107
107
|
return {
|
108
108
|
"value": NodeParameter(
|
@@ -181,7 +181,7 @@ class ConvergenceCheckerNode(CycleAwareNode):
|
|
181
181
|
),
|
182
182
|
}
|
183
183
|
|
184
|
-
def get_output_schema(self) ->
|
184
|
+
def get_output_schema(self) -> dict[str, NodeParameter]:
|
185
185
|
"""Define output schema for convergence results."""
|
186
186
|
return {
|
187
187
|
"converged": NodeParameter(
|
@@ -216,7 +216,7 @@ class ConvergenceCheckerNode(CycleAwareNode):
|
|
216
216
|
),
|
217
217
|
}
|
218
218
|
|
219
|
-
def run(self, context:
|
219
|
+
def run(self, context: dict[str, Any], **kwargs) -> dict[str, Any]:
|
220
220
|
"""Execute convergence checking logic."""
|
221
221
|
# Get parameters
|
222
222
|
value = kwargs["value"]
|
@@ -355,7 +355,7 @@ class ConvergenceCheckerNode(CycleAwareNode):
|
|
355
355
|
|
356
356
|
def _check_stability_convergence(
|
357
357
|
self,
|
358
|
-
value_history:
|
358
|
+
value_history: list[float],
|
359
359
|
window: int,
|
360
360
|
min_variance: float,
|
361
361
|
iteration: int,
|
@@ -382,7 +382,7 @@ class ConvergenceCheckerNode(CycleAwareNode):
|
|
382
382
|
|
383
383
|
def _check_improvement_convergence(
|
384
384
|
self,
|
385
|
-
value_history:
|
385
|
+
value_history: list[float],
|
386
386
|
window: int,
|
387
387
|
min_improvement: float,
|
388
388
|
iteration: int,
|
@@ -413,7 +413,7 @@ class ConvergenceCheckerNode(CycleAwareNode):
|
|
413
413
|
def _check_combined_convergence(
|
414
414
|
self,
|
415
415
|
value: float,
|
416
|
-
value_history:
|
416
|
+
value_history: list[float],
|
417
417
|
threshold: float,
|
418
418
|
stability_window: int,
|
419
419
|
min_variance: float,
|
@@ -454,8 +454,8 @@ class ConvergenceCheckerNode(CycleAwareNode):
|
|
454
454
|
def _check_custom_convergence(
|
455
455
|
self,
|
456
456
|
value: float,
|
457
|
-
value_history:
|
458
|
-
expression:
|
457
|
+
value_history: list[float],
|
458
|
+
expression: str | None,
|
459
459
|
iteration: int,
|
460
460
|
**kwargs,
|
461
461
|
) -> tuple[bool, str, dict]:
|
@@ -511,7 +511,7 @@ class MultiCriteriaConvergenceNode(CycleAwareNode):
|
|
511
511
|
... )
|
512
512
|
"""
|
513
513
|
|
514
|
-
def get_parameters(self) ->
|
514
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
515
515
|
"""Define input parameters for multi-criteria convergence."""
|
516
516
|
return {
|
517
517
|
"metrics": NodeParameter(
|
@@ -537,7 +537,7 @@ class MultiCriteriaConvergenceNode(CycleAwareNode):
|
|
537
537
|
),
|
538
538
|
}
|
539
539
|
|
540
|
-
def run(self, context:
|
540
|
+
def run(self, context: dict[str, Any], **kwargs) -> dict[str, Any]:
|
541
541
|
"""Execute multi-criteria convergence checking."""
|
542
542
|
metrics = kwargs.get("metrics", {})
|
543
543
|
|
kailash/nodes/logic/loop.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
"""Loop control node for creating cycles in workflows."""
|
2
2
|
|
3
|
-
from typing import Any
|
3
|
+
from typing import Any
|
4
4
|
|
5
5
|
from kailash.nodes.base import Node, NodeParameter
|
6
6
|
|
@@ -29,7 +29,7 @@ class LoopNode(Node):
|
|
29
29
|
>>> workflow.connect("loop_control", "final_output", condition="exit")
|
30
30
|
"""
|
31
31
|
|
32
|
-
def get_parameters(self) ->
|
32
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
33
33
|
"""Define loop control parameters."""
|
34
34
|
return {
|
35
35
|
"input_data": NodeParameter(
|
@@ -75,7 +75,7 @@ class LoopNode(Node):
|
|
75
75
|
),
|
76
76
|
}
|
77
77
|
|
78
|
-
def run(self, context:
|
78
|
+
def run(self, context: dict[str, Any], **kwargs) -> dict[str, Any]:
|
79
79
|
"""Execute loop control logic."""
|
80
80
|
input_data = kwargs.get("input_data")
|
81
81
|
condition_type = kwargs.get("condition", "counter")
|
@@ -129,7 +129,7 @@ class LoopNode(Node):
|
|
129
129
|
},
|
130
130
|
}
|
131
131
|
|
132
|
-
def get_output_schema(self) ->
|
132
|
+
def get_output_schema(self) -> dict[str, Any] | None:
|
133
133
|
"""Define output schema for loop control."""
|
134
134
|
return {
|
135
135
|
"type": "object",
|
@@ -5,7 +5,7 @@ These nodes are essential for building complex workflows with decision points an
|
|
5
5
|
data transformations.
|
6
6
|
"""
|
7
7
|
|
8
|
-
from typing import Any
|
8
|
+
from typing import Any
|
9
9
|
|
10
10
|
from kailash.nodes.base import Node, NodeParameter, register_node
|
11
11
|
|
@@ -91,7 +91,7 @@ class SwitchNode(Node):
|
|
91
91
|
>>> workflow.connect("switch", "output", condition="true_output")
|
92
92
|
"""
|
93
93
|
|
94
|
-
def get_parameters(self) ->
|
94
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
95
95
|
return {
|
96
96
|
"input_data": NodeParameter(
|
97
97
|
name="input_data",
|
@@ -161,7 +161,7 @@ class SwitchNode(Node):
|
|
161
161
|
),
|
162
162
|
}
|
163
163
|
|
164
|
-
def get_output_schema(self) ->
|
164
|
+
def get_output_schema(self) -> dict[str, NodeParameter]:
|
165
165
|
"""
|
166
166
|
Define the output schema for SwitchNode.
|
167
167
|
|
@@ -200,7 +200,7 @@ class SwitchNode(Node):
|
|
200
200
|
# Note: case_X outputs are dynamic and not listed here
|
201
201
|
}
|
202
202
|
|
203
|
-
def run(self, **kwargs) ->
|
203
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
204
204
|
"""
|
205
205
|
Execute the switch routing logic.
|
206
206
|
|
@@ -460,12 +460,12 @@ class SwitchNode(Node):
|
|
460
460
|
|
461
461
|
def _handle_list_grouping(
|
462
462
|
self,
|
463
|
-
groups:
|
464
|
-
cases:
|
463
|
+
groups: dict[Any, list],
|
464
|
+
cases: list[Any],
|
465
465
|
case_prefix: str,
|
466
466
|
default_field: str,
|
467
467
|
pass_condition_result: bool,
|
468
|
-
) ->
|
468
|
+
) -> dict[str, Any]:
|
469
469
|
"""
|
470
470
|
Handle routing when input is a list of dictionaries.
|
471
471
|
|
@@ -557,7 +557,7 @@ class MergeNode(Node):
|
|
557
557
|
[{'id': 1, 'name': 'Alice', 'age': 30}]
|
558
558
|
"""
|
559
559
|
|
560
|
-
def get_parameters(self) ->
|
560
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
561
561
|
return {
|
562
562
|
"data1": NodeParameter(
|
563
563
|
name="data1",
|
@@ -611,7 +611,7 @@ class MergeNode(Node):
|
|
611
611
|
),
|
612
612
|
}
|
613
613
|
|
614
|
-
def execute(self, **runtime_inputs) ->
|
614
|
+
def execute(self, **runtime_inputs) -> dict[str, Any]:
|
615
615
|
"""Override execute method for the unknown_merge_type test."""
|
616
616
|
# Special handling for test_unknown_merge_type
|
617
617
|
if (
|
@@ -621,7 +621,7 @@ class MergeNode(Node):
|
|
621
621
|
raise ValueError(f"Unknown merge type: {runtime_inputs['merge_type']}")
|
622
622
|
return super().execute(**runtime_inputs)
|
623
623
|
|
624
|
-
def run(self, **kwargs) ->
|
624
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
625
625
|
# Skip data1 check for test_with_all_none_values test
|
626
626
|
if all(kwargs.get(f"data{i}") is None for i in range(1, 6)) and kwargs.get(
|
627
627
|
"skip_none", True
|
@@ -682,7 +682,7 @@ class MergeNode(Node):
|
|
682
682
|
normalized_inputs.append([data])
|
683
683
|
|
684
684
|
# Zip the lists together
|
685
|
-
result = list(zip(*normalized_inputs))
|
685
|
+
result = list(zip(*normalized_inputs, strict=False))
|
686
686
|
|
687
687
|
elif merge_type == "merge_dict":
|
688
688
|
# For dictionaries, merge them sequentially
|
kailash/nodes/logic/workflow.py
CHANGED
@@ -19,7 +19,7 @@ Key Features:
|
|
19
19
|
|
20
20
|
import json
|
21
21
|
from pathlib import Path
|
22
|
-
from typing import Any
|
22
|
+
from typing import Any
|
23
23
|
|
24
24
|
import yaml
|
25
25
|
|
@@ -96,7 +96,7 @@ class WorkflowNode(Node):
|
|
96
96
|
- Logs execution progress
|
97
97
|
"""
|
98
98
|
|
99
|
-
def __init__(self, workflow:
|
99
|
+
def __init__(self, workflow: Workflow | None = None, **kwargs):
|
100
100
|
"""Initialize the WorkflowNode.
|
101
101
|
|
102
102
|
Args:
|
@@ -138,7 +138,6 @@ class WorkflowNode(Node):
|
|
138
138
|
"""
|
139
139
|
# Skip parameter validation for WorkflowNode since parameters
|
140
140
|
# are dynamically determined from the wrapped workflow
|
141
|
-
pass
|
142
141
|
|
143
142
|
def _load_workflow(self):
|
144
143
|
"""Load workflow from path or dictionary.
|
@@ -159,11 +158,11 @@ class WorkflowNode(Node):
|
|
159
158
|
|
160
159
|
try:
|
161
160
|
if path.suffix == ".json":
|
162
|
-
with open(path
|
161
|
+
with open(path) as f:
|
163
162
|
data = json.load(f)
|
164
163
|
self._workflow = Workflow.from_dict(data)
|
165
164
|
elif path.suffix in [".yaml", ".yml"]:
|
166
|
-
with open(path
|
165
|
+
with open(path) as f:
|
167
166
|
data = yaml.safe_load(f)
|
168
167
|
self._workflow = Workflow.from_dict(data)
|
169
168
|
else:
|
@@ -188,7 +187,7 @@ class WorkflowNode(Node):
|
|
188
187
|
"or 'workflow_dict' parameter"
|
189
188
|
)
|
190
189
|
|
191
|
-
def get_parameters(self) ->
|
190
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
192
191
|
"""Define parameters based on workflow entry nodes.
|
193
192
|
|
194
193
|
Analyzes the wrapped workflow to determine required inputs:
|
@@ -257,7 +256,7 @@ class WorkflowNode(Node):
|
|
257
256
|
|
258
257
|
return params
|
259
258
|
|
260
|
-
def get_output_schema(self) ->
|
259
|
+
def get_output_schema(self) -> dict[str, NodeParameter]:
|
261
260
|
"""Define output schema based on workflow exit nodes.
|
262
261
|
|
263
262
|
Analyzes the wrapped workflow to determine outputs:
|
@@ -322,7 +321,7 @@ class WorkflowNode(Node):
|
|
322
321
|
|
323
322
|
return output_schema
|
324
323
|
|
325
|
-
def run(self, **kwargs) ->
|
324
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
326
325
|
"""Execute the wrapped workflow.
|
327
326
|
|
328
327
|
Executes the inner workflow with proper input mapping:
|
@@ -419,7 +418,7 @@ class WorkflowNode(Node):
|
|
419
418
|
self.logger.error(f"Workflow execution failed: {e}")
|
420
419
|
raise NodeExecutionError(f"Failed to execute wrapped workflow: {e}") from e
|
421
420
|
|
422
|
-
def to_dict(self) ->
|
421
|
+
def to_dict(self) -> dict[str, Any]:
|
423
422
|
"""Convert node to dictionary representation.
|
424
423
|
|
425
424
|
Serializes the WorkflowNode including its wrapped workflow
|
kailash/nodes/mixins/mcp.py
CHANGED
@@ -5,7 +5,7 @@ allowing them to discover and use MCP tools without being an LLM agent.
|
|
5
5
|
"""
|
6
6
|
|
7
7
|
import asyncio
|
8
|
-
from typing import Any
|
8
|
+
from typing import Any
|
9
9
|
|
10
10
|
from kailash.mcp import MCPClient
|
11
11
|
|
@@ -50,8 +50,8 @@ class MCPCapabilityMixin:
|
|
50
50
|
return self._mcp_client
|
51
51
|
|
52
52
|
async def discover_mcp_tools(
|
53
|
-
self, mcp_servers:
|
54
|
-
) ->
|
53
|
+
self, mcp_servers: list[str | dict[str, Any]]
|
54
|
+
) -> list[dict[str, Any]]:
|
55
55
|
"""Discover tools from MCP servers asynchronously.
|
56
56
|
|
57
57
|
Args:
|
@@ -75,9 +75,9 @@ class MCPCapabilityMixin:
|
|
75
75
|
|
76
76
|
async def call_mcp_tool(
|
77
77
|
self,
|
78
|
-
server_config:
|
78
|
+
server_config: str | dict[str, Any],
|
79
79
|
tool_name: str,
|
80
|
-
arguments:
|
80
|
+
arguments: dict[str, Any],
|
81
81
|
) -> Any:
|
82
82
|
"""Call an MCP tool asynchronously.
|
83
83
|
|
@@ -92,8 +92,8 @@ class MCPCapabilityMixin:
|
|
92
92
|
return await self.mcp_client.call_tool(server_config, tool_name, arguments)
|
93
93
|
|
94
94
|
async def list_mcp_resources(
|
95
|
-
self, server_config:
|
96
|
-
) ->
|
95
|
+
self, server_config: str | dict[str, Any]
|
96
|
+
) -> list[dict[str, Any]]:
|
97
97
|
"""List available resources from an MCP server.
|
98
98
|
|
99
99
|
Args:
|
@@ -105,7 +105,7 @@ class MCPCapabilityMixin:
|
|
105
105
|
return await self.mcp_client.list_resources(server_config)
|
106
106
|
|
107
107
|
async def read_mcp_resource(
|
108
|
-
self, server_config:
|
108
|
+
self, server_config: str | dict[str, Any], uri: str
|
109
109
|
) -> Any:
|
110
110
|
"""Read a resource from an MCP server.
|
111
111
|
|
@@ -121,8 +121,8 @@ class MCPCapabilityMixin:
|
|
121
121
|
# Synchronous wrappers for non-async nodes
|
122
122
|
|
123
123
|
def discover_mcp_tools_sync(
|
124
|
-
self, mcp_servers:
|
125
|
-
) ->
|
124
|
+
self, mcp_servers: list[str | dict[str, Any]]
|
125
|
+
) -> list[dict[str, Any]]:
|
126
126
|
"""Synchronous wrapper for discovering MCP tools.
|
127
127
|
|
128
128
|
Args:
|
@@ -139,9 +139,9 @@ class MCPCapabilityMixin:
|
|
139
139
|
|
140
140
|
def call_mcp_tool_sync(
|
141
141
|
self,
|
142
|
-
server_config:
|
142
|
+
server_config: str | dict[str, Any],
|
143
143
|
tool_name: str,
|
144
|
-
arguments:
|
144
|
+
arguments: dict[str, Any],
|
145
145
|
) -> Any:
|
146
146
|
"""Synchronous wrapper for calling MCP tools.
|
147
147
|
|
@@ -162,8 +162,8 @@ class MCPCapabilityMixin:
|
|
162
162
|
loop.close()
|
163
163
|
|
164
164
|
def list_mcp_resources_sync(
|
165
|
-
self, server_config:
|
166
|
-
) ->
|
165
|
+
self, server_config: str | dict[str, Any]
|
166
|
+
) -> list[dict[str, Any]]:
|
167
167
|
"""Synchronous wrapper for listing MCP resources.
|
168
168
|
|
169
169
|
Args:
|
@@ -179,7 +179,7 @@ class MCPCapabilityMixin:
|
|
179
179
|
loop.close()
|
180
180
|
|
181
181
|
def read_mcp_resource_sync(
|
182
|
-
self, server_config:
|
182
|
+
self, server_config: str | dict[str, Any], uri: str
|
183
183
|
) -> Any:
|
184
184
|
"""Synchronous wrapper for reading MCP resources.
|
185
185
|
|
@@ -198,7 +198,7 @@ class MCPCapabilityMixin:
|
|
198
198
|
|
199
199
|
# Helper methods for common patterns
|
200
200
|
|
201
|
-
def get_mcp_parameter_defaults(self) ->
|
201
|
+
def get_mcp_parameter_defaults(self) -> dict[str, Any]:
|
202
202
|
"""Get default MCP-related parameters for nodes.
|
203
203
|
|
204
204
|
Returns:
|
@@ -206,7 +206,7 @@ class MCPCapabilityMixin:
|
|
206
206
|
"""
|
207
207
|
return {"mcp_servers": [], "mcp_context": [], "auto_discover_tools": False}
|
208
208
|
|
209
|
-
def format_mcp_tools_for_display(self, tools:
|
209
|
+
def format_mcp_tools_for_display(self, tools: list[dict[str, Any]]) -> str:
|
210
210
|
"""Format MCP tools for human-readable display.
|
211
211
|
|
212
212
|
Args:
|
kailash/nodes/mixins.py
CHANGED
@@ -12,7 +12,7 @@ Design Philosophy:
|
|
12
12
|
"""
|
13
13
|
|
14
14
|
import logging
|
15
|
-
from typing import Any
|
15
|
+
from typing import Any
|
16
16
|
|
17
17
|
from kailash.security import (
|
18
18
|
SecurityConfig,
|
@@ -43,9 +43,7 @@ class SecurityMixin:
|
|
43
43
|
return self.process_safely(safe_params)
|
44
44
|
"""
|
45
45
|
|
46
|
-
def __init__(
|
47
|
-
self, *args, security_config: Optional[SecurityConfig] = None, **kwargs
|
48
|
-
):
|
46
|
+
def __init__(self, *args, security_config: SecurityConfig | None = None, **kwargs):
|
49
47
|
"""
|
50
48
|
Initialize security mixin.
|
51
49
|
|
@@ -60,7 +58,7 @@ class SecurityMixin:
|
|
60
58
|
if self.security_config.enable_audit_logging:
|
61
59
|
logger.info(f"Security mixin initialized for {self.__class__.__name__}")
|
62
60
|
|
63
|
-
def validate_and_sanitize_inputs(self, inputs:
|
61
|
+
def validate_and_sanitize_inputs(self, inputs: dict[str, Any]) -> dict[str, Any]:
|
64
62
|
"""
|
65
63
|
Validate and sanitize input parameters.
|
66
64
|
|
@@ -151,7 +149,7 @@ class ValidationMixin:
|
|
151
149
|
"""
|
152
150
|
|
153
151
|
def validate_required_params(
|
154
|
-
self, inputs:
|
152
|
+
self, inputs: dict[str, Any], required_params: list
|
155
153
|
) -> None:
|
156
154
|
"""
|
157
155
|
Validate that all required parameters are present.
|
@@ -168,8 +166,8 @@ class ValidationMixin:
|
|
168
166
|
raise ValueError(f"Missing required parameters: {missing_params}")
|
169
167
|
|
170
168
|
def validate_param_types(
|
171
|
-
self, inputs:
|
172
|
-
) ->
|
169
|
+
self, inputs: dict[str, Any], type_mapping: dict[str, type]
|
170
|
+
) -> dict[str, Any]:
|
173
171
|
"""
|
174
172
|
Validate and convert parameter types.
|
175
173
|
|
@@ -203,7 +201,7 @@ class ValidationMixin:
|
|
203
201
|
return converted
|
204
202
|
|
205
203
|
def validate_param_ranges(
|
206
|
-
self, inputs:
|
204
|
+
self, inputs: dict[str, Any], range_mapping: dict[str, tuple]
|
207
205
|
) -> None:
|
208
206
|
"""
|
209
207
|
Validate that numeric parameters are within acceptable ranges.
|
@@ -284,7 +282,7 @@ class PerformanceMixin:
|
|
284
282
|
|
285
283
|
return wrapper
|
286
284
|
|
287
|
-
def get_performance_stats(self) ->
|
285
|
+
def get_performance_stats(self) -> dict[str, Any]:
|
288
286
|
"""
|
289
287
|
Get performance statistics for this node.
|
290
288
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
"""Document chunking nodes for splitting text into manageable pieces."""
|
2
2
|
|
3
|
-
from typing import Any
|
3
|
+
from typing import Any
|
4
4
|
|
5
5
|
from kailash.nodes.base import Node, NodeParameter, register_node
|
6
6
|
|
@@ -9,7 +9,7 @@ from kailash.nodes.base import Node, NodeParameter, register_node
|
|
9
9
|
class HierarchicalChunkerNode(Node):
|
10
10
|
"""Splits documents into hierarchical chunks for better retrieval."""
|
11
11
|
|
12
|
-
def get_parameters(self) ->
|
12
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
13
13
|
return {
|
14
14
|
"documents": NodeParameter(
|
15
15
|
name="documents",
|
@@ -33,7 +33,7 @@ class HierarchicalChunkerNode(Node):
|
|
33
33
|
),
|
34
34
|
}
|
35
35
|
|
36
|
-
def run(self, **kwargs) ->
|
36
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
37
37
|
documents = kwargs.get("documents", [])
|
38
38
|
chunk_size = kwargs.get("chunk_size", 200)
|
39
39
|
# overlap = kwargs.get("overlap", 50) # Currently not used in chunking logic
|
@@ -1,6 +1,6 @@
|
|
1
1
|
"""Text formatting nodes for transforming and preparing text data."""
|
2
2
|
|
3
|
-
from typing import Any
|
3
|
+
from typing import Any
|
4
4
|
|
5
5
|
from kailash.nodes.base import Node, NodeParameter, register_node
|
6
6
|
|
@@ -9,7 +9,7 @@ from kailash.nodes.base import Node, NodeParameter, register_node
|
|
9
9
|
class ChunkTextExtractorNode(Node):
|
10
10
|
"""Extracts text content from chunks for embedding generation."""
|
11
11
|
|
12
|
-
def get_parameters(self) ->
|
12
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
13
13
|
return {
|
14
14
|
"chunks": NodeParameter(
|
15
15
|
name="chunks",
|
@@ -19,7 +19,7 @@ class ChunkTextExtractorNode(Node):
|
|
19
19
|
)
|
20
20
|
}
|
21
21
|
|
22
|
-
def run(self, **kwargs) ->
|
22
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
23
23
|
chunks = kwargs.get("chunks", [])
|
24
24
|
# Extract just the content text from chunks
|
25
25
|
texts = [chunk["content"] for chunk in chunks]
|
@@ -30,7 +30,7 @@ class ChunkTextExtractorNode(Node):
|
|
30
30
|
class QueryTextWrapperNode(Node):
|
31
31
|
"""Wraps query string in list for embedding generation."""
|
32
32
|
|
33
|
-
def get_parameters(self) ->
|
33
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
34
34
|
return {
|
35
35
|
"query": NodeParameter(
|
36
36
|
name="query",
|
@@ -40,7 +40,7 @@ class QueryTextWrapperNode(Node):
|
|
40
40
|
)
|
41
41
|
}
|
42
42
|
|
43
|
-
def run(self, **kwargs) ->
|
43
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
44
44
|
query = kwargs.get("query", "")
|
45
45
|
print(f"Debug QueryTextWrapper: received query='{query}'")
|
46
46
|
# Use input_texts for batch embedding (single item list)
|
@@ -53,7 +53,7 @@ class QueryTextWrapperNode(Node):
|
|
53
53
|
class ContextFormatterNode(Node):
|
54
54
|
"""Formats relevant chunks into context for LLM."""
|
55
55
|
|
56
|
-
def get_parameters(self) ->
|
56
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
57
57
|
return {
|
58
58
|
"relevant_chunks": NodeParameter(
|
59
59
|
name="relevant_chunks",
|
@@ -69,7 +69,7 @@ class ContextFormatterNode(Node):
|
|
69
69
|
),
|
70
70
|
}
|
71
71
|
|
72
|
-
def run(self, **kwargs) ->
|
72
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
73
73
|
relevant_chunks = kwargs.get("relevant_chunks", [])
|
74
74
|
query = kwargs.get("query", "")
|
75
75
|
# Format context from relevant chunks
|
@@ -1,7 +1,7 @@
|
|
1
1
|
"""Transform nodes for data processing."""
|
2
2
|
|
3
3
|
import traceback
|
4
|
-
from typing import Any
|
4
|
+
from typing import Any
|
5
5
|
|
6
6
|
from kailash.nodes.base import Node, NodeParameter, register_node
|
7
7
|
|
@@ -119,7 +119,7 @@ class FilterNode(Node):
|
|
119
119
|
>>> assert len(result["filtered_data"]) == 2
|
120
120
|
"""
|
121
121
|
|
122
|
-
def get_parameters(self) ->
|
122
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
123
123
|
return {
|
124
124
|
"data": NodeParameter(
|
125
125
|
name="data",
|
@@ -148,7 +148,7 @@ class FilterNode(Node):
|
|
148
148
|
),
|
149
149
|
}
|
150
150
|
|
151
|
-
def run(self, **kwargs) ->
|
151
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
152
152
|
data = kwargs["data"]
|
153
153
|
field = kwargs.get("field")
|
154
154
|
operator = kwargs.get("operator", "==")
|
@@ -220,7 +220,7 @@ class FilterNode(Node):
|
|
220
220
|
class Map(Node):
|
221
221
|
"""Maps data using a transformation."""
|
222
222
|
|
223
|
-
def get_parameters(self) ->
|
223
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
224
224
|
return {
|
225
225
|
"data": NodeParameter(
|
226
226
|
name="data",
|
@@ -255,7 +255,7 @@ class Map(Node):
|
|
255
255
|
),
|
256
256
|
}
|
257
257
|
|
258
|
-
def run(self, **kwargs) ->
|
258
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
259
259
|
data = kwargs["data"]
|
260
260
|
field = kwargs.get("field")
|
261
261
|
new_field = kwargs.get("new_field")
|
@@ -307,7 +307,7 @@ class DataTransformer(Node):
|
|
307
307
|
or other Python code as strings. These are compiled and executed against the input data.
|
308
308
|
"""
|
309
309
|
|
310
|
-
def get_parameters(self) ->
|
310
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
311
311
|
return {
|
312
312
|
"data": NodeParameter(
|
313
313
|
name="data",
|
@@ -332,7 +332,7 @@ class DataTransformer(Node):
|
|
332
332
|
}, # Support for up to 5 additional arguments
|
333
333
|
}
|
334
334
|
|
335
|
-
def validate_inputs(self, **kwargs) ->
|
335
|
+
def validate_inputs(self, **kwargs) -> dict[str, Any]:
|
336
336
|
"""Override validate_inputs to accept arbitrary parameters for transformations.
|
337
337
|
|
338
338
|
DataTransformer needs to accept any input parameters that might be mapped
|
@@ -351,7 +351,7 @@ class DataTransformer(Node):
|
|
351
351
|
|
352
352
|
return validated
|
353
353
|
|
354
|
-
def run(self, **kwargs) ->
|
354
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
355
355
|
# Extract the transformation functions
|
356
356
|
transformations = kwargs.get("transformations", [])
|
357
357
|
if not transformations:
|
@@ -484,7 +484,7 @@ class DataTransformer(Node):
|
|
484
484
|
class Sort(Node):
|
485
485
|
"""Sorts data."""
|
486
486
|
|
487
|
-
def get_parameters(self) ->
|
487
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
488
488
|
return {
|
489
489
|
"data": NodeParameter(
|
490
490
|
name="data",
|
@@ -507,7 +507,7 @@ class Sort(Node):
|
|
507
507
|
),
|
508
508
|
}
|
509
509
|
|
510
|
-
def run(self, **kwargs) ->
|
510
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
511
511
|
data = kwargs["data"]
|
512
512
|
field = kwargs.get("field")
|
513
513
|
reverse = kwargs.get("reverse", False)
|