kailash 0.2.2__py3-none-any.whl → 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +1 -1
- kailash/access_control.py +40 -39
- kailash/api/auth.py +26 -32
- kailash/api/custom_nodes.py +29 -29
- kailash/api/custom_nodes_secure.py +35 -35
- kailash/api/database.py +17 -17
- kailash/api/gateway.py +19 -19
- kailash/api/mcp_integration.py +24 -23
- kailash/api/studio.py +45 -45
- kailash/api/workflow_api.py +8 -8
- kailash/cli/commands.py +5 -8
- kailash/manifest.py +42 -42
- kailash/mcp/__init__.py +1 -1
- kailash/mcp/ai_registry_server.py +20 -20
- kailash/mcp/client.py +9 -11
- kailash/mcp/client_new.py +10 -10
- kailash/mcp/server.py +1 -2
- kailash/mcp/server_enhanced.py +449 -0
- kailash/mcp/servers/ai_registry.py +6 -6
- kailash/mcp/utils/__init__.py +31 -0
- kailash/mcp/utils/cache.py +267 -0
- kailash/mcp/utils/config.py +263 -0
- kailash/mcp/utils/formatters.py +293 -0
- kailash/mcp/utils/metrics.py +418 -0
- kailash/nodes/ai/agents.py +9 -9
- kailash/nodes/ai/ai_providers.py +33 -34
- kailash/nodes/ai/embedding_generator.py +31 -32
- kailash/nodes/ai/intelligent_agent_orchestrator.py +62 -66
- kailash/nodes/ai/iterative_llm_agent.py +48 -48
- kailash/nodes/ai/llm_agent.py +32 -33
- kailash/nodes/ai/models.py +13 -13
- kailash/nodes/ai/self_organizing.py +44 -44
- kailash/nodes/api/__init__.py +5 -0
- kailash/nodes/api/auth.py +11 -11
- kailash/nodes/api/graphql.py +13 -13
- kailash/nodes/api/http.py +19 -19
- kailash/nodes/api/monitoring.py +463 -0
- kailash/nodes/api/rate_limiting.py +9 -13
- kailash/nodes/api/rest.py +29 -29
- kailash/nodes/api/security.py +819 -0
- kailash/nodes/base.py +24 -26
- kailash/nodes/base_async.py +7 -7
- kailash/nodes/base_cycle_aware.py +12 -12
- kailash/nodes/base_with_acl.py +5 -5
- kailash/nodes/code/python.py +56 -55
- kailash/nodes/data/__init__.py +6 -0
- kailash/nodes/data/directory.py +6 -6
- kailash/nodes/data/event_generation.py +297 -0
- kailash/nodes/data/file_discovery.py +598 -0
- kailash/nodes/data/readers.py +8 -8
- kailash/nodes/data/retrieval.py +10 -10
- kailash/nodes/data/sharepoint_graph.py +17 -17
- kailash/nodes/data/sources.py +5 -5
- kailash/nodes/data/sql.py +13 -13
- kailash/nodes/data/streaming.py +25 -25
- kailash/nodes/data/vector_db.py +22 -22
- kailash/nodes/data/writers.py +7 -7
- kailash/nodes/logic/async_operations.py +17 -17
- kailash/nodes/logic/convergence.py +11 -11
- kailash/nodes/logic/loop.py +4 -4
- kailash/nodes/logic/operations.py +11 -11
- kailash/nodes/logic/workflow.py +8 -9
- kailash/nodes/mixins/mcp.py +17 -17
- kailash/nodes/mixins.py +8 -10
- kailash/nodes/transform/chunkers.py +3 -3
- kailash/nodes/transform/formatters.py +7 -7
- kailash/nodes/transform/processors.py +11 -11
- kailash/runtime/access_controlled.py +18 -18
- kailash/runtime/async_local.py +18 -20
- kailash/runtime/docker.py +24 -26
- kailash/runtime/local.py +55 -31
- kailash/runtime/parallel.py +25 -25
- kailash/runtime/parallel_cyclic.py +29 -29
- kailash/runtime/runner.py +6 -6
- kailash/runtime/testing.py +22 -22
- kailash/sdk_exceptions.py +0 -58
- kailash/security.py +14 -26
- kailash/tracking/manager.py +38 -38
- kailash/tracking/metrics_collector.py +15 -14
- kailash/tracking/models.py +53 -53
- kailash/tracking/storage/base.py +7 -17
- kailash/tracking/storage/database.py +22 -23
- kailash/tracking/storage/filesystem.py +38 -40
- kailash/utils/export.py +21 -21
- kailash/utils/templates.py +8 -9
- kailash/visualization/api.py +30 -34
- kailash/visualization/dashboard.py +17 -17
- kailash/visualization/performance.py +32 -19
- kailash/visualization/reports.py +30 -28
- kailash/workflow/builder.py +8 -8
- kailash/workflow/convergence.py +13 -12
- kailash/workflow/cycle_analyzer.py +38 -33
- kailash/workflow/cycle_builder.py +12 -12
- kailash/workflow/cycle_config.py +16 -15
- kailash/workflow/cycle_debugger.py +40 -40
- kailash/workflow/cycle_exceptions.py +29 -29
- kailash/workflow/cycle_profiler.py +21 -21
- kailash/workflow/cycle_state.py +20 -22
- kailash/workflow/cyclic_runner.py +45 -45
- kailash/workflow/graph.py +57 -45
- kailash/workflow/mermaid_visualizer.py +9 -11
- kailash/workflow/migration.py +22 -22
- kailash/workflow/mock_registry.py +6 -6
- kailash/workflow/runner.py +9 -9
- kailash/workflow/safety.py +12 -13
- kailash/workflow/state.py +8 -11
- kailash/workflow/templates.py +19 -19
- kailash/workflow/validation.py +14 -14
- kailash/workflow/visualization.py +32 -24
- kailash-0.3.1.dist-info/METADATA +476 -0
- kailash-0.3.1.dist-info/RECORD +136 -0
- kailash-0.2.2.dist-info/METADATA +0 -121
- kailash-0.2.2.dist-info/RECORD +0 -126
- {kailash-0.2.2.dist-info → kailash-0.3.1.dist-info}/WHEEL +0 -0
- {kailash-0.2.2.dist-info → kailash-0.3.1.dist-info}/entry_points.txt +0 -0
- {kailash-0.2.2.dist-info → kailash-0.3.1.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.2.2.dist-info → kailash-0.3.1.dist-info}/top_level.txt +0 -0
kailash/nodes/mixins/mcp.py
CHANGED
@@ -5,7 +5,7 @@ allowing them to discover and use MCP tools without being an LLM agent.
|
|
5
5
|
"""
|
6
6
|
|
7
7
|
import asyncio
|
8
|
-
from typing import Any
|
8
|
+
from typing import Any
|
9
9
|
|
10
10
|
from kailash.mcp import MCPClient
|
11
11
|
|
@@ -50,8 +50,8 @@ class MCPCapabilityMixin:
|
|
50
50
|
return self._mcp_client
|
51
51
|
|
52
52
|
async def discover_mcp_tools(
|
53
|
-
self, mcp_servers:
|
54
|
-
) ->
|
53
|
+
self, mcp_servers: list[str | dict[str, Any]]
|
54
|
+
) -> list[dict[str, Any]]:
|
55
55
|
"""Discover tools from MCP servers asynchronously.
|
56
56
|
|
57
57
|
Args:
|
@@ -75,9 +75,9 @@ class MCPCapabilityMixin:
|
|
75
75
|
|
76
76
|
async def call_mcp_tool(
|
77
77
|
self,
|
78
|
-
server_config:
|
78
|
+
server_config: str | dict[str, Any],
|
79
79
|
tool_name: str,
|
80
|
-
arguments:
|
80
|
+
arguments: dict[str, Any],
|
81
81
|
) -> Any:
|
82
82
|
"""Call an MCP tool asynchronously.
|
83
83
|
|
@@ -92,8 +92,8 @@ class MCPCapabilityMixin:
|
|
92
92
|
return await self.mcp_client.call_tool(server_config, tool_name, arguments)
|
93
93
|
|
94
94
|
async def list_mcp_resources(
|
95
|
-
self, server_config:
|
96
|
-
) ->
|
95
|
+
self, server_config: str | dict[str, Any]
|
96
|
+
) -> list[dict[str, Any]]:
|
97
97
|
"""List available resources from an MCP server.
|
98
98
|
|
99
99
|
Args:
|
@@ -105,7 +105,7 @@ class MCPCapabilityMixin:
|
|
105
105
|
return await self.mcp_client.list_resources(server_config)
|
106
106
|
|
107
107
|
async def read_mcp_resource(
|
108
|
-
self, server_config:
|
108
|
+
self, server_config: str | dict[str, Any], uri: str
|
109
109
|
) -> Any:
|
110
110
|
"""Read a resource from an MCP server.
|
111
111
|
|
@@ -121,8 +121,8 @@ class MCPCapabilityMixin:
|
|
121
121
|
# Synchronous wrappers for non-async nodes
|
122
122
|
|
123
123
|
def discover_mcp_tools_sync(
|
124
|
-
self, mcp_servers:
|
125
|
-
) ->
|
124
|
+
self, mcp_servers: list[str | dict[str, Any]]
|
125
|
+
) -> list[dict[str, Any]]:
|
126
126
|
"""Synchronous wrapper for discovering MCP tools.
|
127
127
|
|
128
128
|
Args:
|
@@ -139,9 +139,9 @@ class MCPCapabilityMixin:
|
|
139
139
|
|
140
140
|
def call_mcp_tool_sync(
|
141
141
|
self,
|
142
|
-
server_config:
|
142
|
+
server_config: str | dict[str, Any],
|
143
143
|
tool_name: str,
|
144
|
-
arguments:
|
144
|
+
arguments: dict[str, Any],
|
145
145
|
) -> Any:
|
146
146
|
"""Synchronous wrapper for calling MCP tools.
|
147
147
|
|
@@ -162,8 +162,8 @@ class MCPCapabilityMixin:
|
|
162
162
|
loop.close()
|
163
163
|
|
164
164
|
def list_mcp_resources_sync(
|
165
|
-
self, server_config:
|
166
|
-
) ->
|
165
|
+
self, server_config: str | dict[str, Any]
|
166
|
+
) -> list[dict[str, Any]]:
|
167
167
|
"""Synchronous wrapper for listing MCP resources.
|
168
168
|
|
169
169
|
Args:
|
@@ -179,7 +179,7 @@ class MCPCapabilityMixin:
|
|
179
179
|
loop.close()
|
180
180
|
|
181
181
|
def read_mcp_resource_sync(
|
182
|
-
self, server_config:
|
182
|
+
self, server_config: str | dict[str, Any], uri: str
|
183
183
|
) -> Any:
|
184
184
|
"""Synchronous wrapper for reading MCP resources.
|
185
185
|
|
@@ -198,7 +198,7 @@ class MCPCapabilityMixin:
|
|
198
198
|
|
199
199
|
# Helper methods for common patterns
|
200
200
|
|
201
|
-
def get_mcp_parameter_defaults(self) ->
|
201
|
+
def get_mcp_parameter_defaults(self) -> dict[str, Any]:
|
202
202
|
"""Get default MCP-related parameters for nodes.
|
203
203
|
|
204
204
|
Returns:
|
@@ -206,7 +206,7 @@ class MCPCapabilityMixin:
|
|
206
206
|
"""
|
207
207
|
return {"mcp_servers": [], "mcp_context": [], "auto_discover_tools": False}
|
208
208
|
|
209
|
-
def format_mcp_tools_for_display(self, tools:
|
209
|
+
def format_mcp_tools_for_display(self, tools: list[dict[str, Any]]) -> str:
|
210
210
|
"""Format MCP tools for human-readable display.
|
211
211
|
|
212
212
|
Args:
|
kailash/nodes/mixins.py
CHANGED
@@ -12,7 +12,7 @@ Design Philosophy:
|
|
12
12
|
"""
|
13
13
|
|
14
14
|
import logging
|
15
|
-
from typing import Any
|
15
|
+
from typing import Any
|
16
16
|
|
17
17
|
from kailash.security import (
|
18
18
|
SecurityConfig,
|
@@ -43,9 +43,7 @@ class SecurityMixin:
|
|
43
43
|
return self.process_safely(safe_params)
|
44
44
|
"""
|
45
45
|
|
46
|
-
def __init__(
|
47
|
-
self, *args, security_config: Optional[SecurityConfig] = None, **kwargs
|
48
|
-
):
|
46
|
+
def __init__(self, *args, security_config: SecurityConfig | None = None, **kwargs):
|
49
47
|
"""
|
50
48
|
Initialize security mixin.
|
51
49
|
|
@@ -60,7 +58,7 @@ class SecurityMixin:
|
|
60
58
|
if self.security_config.enable_audit_logging:
|
61
59
|
logger.info(f"Security mixin initialized for {self.__class__.__name__}")
|
62
60
|
|
63
|
-
def validate_and_sanitize_inputs(self, inputs:
|
61
|
+
def validate_and_sanitize_inputs(self, inputs: dict[str, Any]) -> dict[str, Any]:
|
64
62
|
"""
|
65
63
|
Validate and sanitize input parameters.
|
66
64
|
|
@@ -151,7 +149,7 @@ class ValidationMixin:
|
|
151
149
|
"""
|
152
150
|
|
153
151
|
def validate_required_params(
|
154
|
-
self, inputs:
|
152
|
+
self, inputs: dict[str, Any], required_params: list
|
155
153
|
) -> None:
|
156
154
|
"""
|
157
155
|
Validate that all required parameters are present.
|
@@ -168,8 +166,8 @@ class ValidationMixin:
|
|
168
166
|
raise ValueError(f"Missing required parameters: {missing_params}")
|
169
167
|
|
170
168
|
def validate_param_types(
|
171
|
-
self, inputs:
|
172
|
-
) ->
|
169
|
+
self, inputs: dict[str, Any], type_mapping: dict[str, type]
|
170
|
+
) -> dict[str, Any]:
|
173
171
|
"""
|
174
172
|
Validate and convert parameter types.
|
175
173
|
|
@@ -203,7 +201,7 @@ class ValidationMixin:
|
|
203
201
|
return converted
|
204
202
|
|
205
203
|
def validate_param_ranges(
|
206
|
-
self, inputs:
|
204
|
+
self, inputs: dict[str, Any], range_mapping: dict[str, tuple]
|
207
205
|
) -> None:
|
208
206
|
"""
|
209
207
|
Validate that numeric parameters are within acceptable ranges.
|
@@ -284,7 +282,7 @@ class PerformanceMixin:
|
|
284
282
|
|
285
283
|
return wrapper
|
286
284
|
|
287
|
-
def get_performance_stats(self) ->
|
285
|
+
def get_performance_stats(self) -> dict[str, Any]:
|
288
286
|
"""
|
289
287
|
Get performance statistics for this node.
|
290
288
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
"""Document chunking nodes for splitting text into manageable pieces."""
|
2
2
|
|
3
|
-
from typing import Any
|
3
|
+
from typing import Any
|
4
4
|
|
5
5
|
from kailash.nodes.base import Node, NodeParameter, register_node
|
6
6
|
|
@@ -9,7 +9,7 @@ from kailash.nodes.base import Node, NodeParameter, register_node
|
|
9
9
|
class HierarchicalChunkerNode(Node):
|
10
10
|
"""Splits documents into hierarchical chunks for better retrieval."""
|
11
11
|
|
12
|
-
def get_parameters(self) ->
|
12
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
13
13
|
return {
|
14
14
|
"documents": NodeParameter(
|
15
15
|
name="documents",
|
@@ -33,7 +33,7 @@ class HierarchicalChunkerNode(Node):
|
|
33
33
|
),
|
34
34
|
}
|
35
35
|
|
36
|
-
def run(self, **kwargs) ->
|
36
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
37
37
|
documents = kwargs.get("documents", [])
|
38
38
|
chunk_size = kwargs.get("chunk_size", 200)
|
39
39
|
# overlap = kwargs.get("overlap", 50) # Currently not used in chunking logic
|
@@ -1,6 +1,6 @@
|
|
1
1
|
"""Text formatting nodes for transforming and preparing text data."""
|
2
2
|
|
3
|
-
from typing import Any
|
3
|
+
from typing import Any
|
4
4
|
|
5
5
|
from kailash.nodes.base import Node, NodeParameter, register_node
|
6
6
|
|
@@ -9,7 +9,7 @@ from kailash.nodes.base import Node, NodeParameter, register_node
|
|
9
9
|
class ChunkTextExtractorNode(Node):
|
10
10
|
"""Extracts text content from chunks for embedding generation."""
|
11
11
|
|
12
|
-
def get_parameters(self) ->
|
12
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
13
13
|
return {
|
14
14
|
"chunks": NodeParameter(
|
15
15
|
name="chunks",
|
@@ -19,7 +19,7 @@ class ChunkTextExtractorNode(Node):
|
|
19
19
|
)
|
20
20
|
}
|
21
21
|
|
22
|
-
def run(self, **kwargs) ->
|
22
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
23
23
|
chunks = kwargs.get("chunks", [])
|
24
24
|
# Extract just the content text from chunks
|
25
25
|
texts = [chunk["content"] for chunk in chunks]
|
@@ -30,7 +30,7 @@ class ChunkTextExtractorNode(Node):
|
|
30
30
|
class QueryTextWrapperNode(Node):
|
31
31
|
"""Wraps query string in list for embedding generation."""
|
32
32
|
|
33
|
-
def get_parameters(self) ->
|
33
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
34
34
|
return {
|
35
35
|
"query": NodeParameter(
|
36
36
|
name="query",
|
@@ -40,7 +40,7 @@ class QueryTextWrapperNode(Node):
|
|
40
40
|
)
|
41
41
|
}
|
42
42
|
|
43
|
-
def run(self, **kwargs) ->
|
43
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
44
44
|
query = kwargs.get("query", "")
|
45
45
|
print(f"Debug QueryTextWrapper: received query='{query}'")
|
46
46
|
# Use input_texts for batch embedding (single item list)
|
@@ -53,7 +53,7 @@ class QueryTextWrapperNode(Node):
|
|
53
53
|
class ContextFormatterNode(Node):
|
54
54
|
"""Formats relevant chunks into context for LLM."""
|
55
55
|
|
56
|
-
def get_parameters(self) ->
|
56
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
57
57
|
return {
|
58
58
|
"relevant_chunks": NodeParameter(
|
59
59
|
name="relevant_chunks",
|
@@ -69,7 +69,7 @@ class ContextFormatterNode(Node):
|
|
69
69
|
),
|
70
70
|
}
|
71
71
|
|
72
|
-
def run(self, **kwargs) ->
|
72
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
73
73
|
relevant_chunks = kwargs.get("relevant_chunks", [])
|
74
74
|
query = kwargs.get("query", "")
|
75
75
|
# Format context from relevant chunks
|
@@ -1,7 +1,7 @@
|
|
1
1
|
"""Transform nodes for data processing."""
|
2
2
|
|
3
3
|
import traceback
|
4
|
-
from typing import Any
|
4
|
+
from typing import Any
|
5
5
|
|
6
6
|
from kailash.nodes.base import Node, NodeParameter, register_node
|
7
7
|
|
@@ -119,7 +119,7 @@ class FilterNode(Node):
|
|
119
119
|
>>> assert len(result["filtered_data"]) == 2
|
120
120
|
"""
|
121
121
|
|
122
|
-
def get_parameters(self) ->
|
122
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
123
123
|
return {
|
124
124
|
"data": NodeParameter(
|
125
125
|
name="data",
|
@@ -148,7 +148,7 @@ class FilterNode(Node):
|
|
148
148
|
),
|
149
149
|
}
|
150
150
|
|
151
|
-
def run(self, **kwargs) ->
|
151
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
152
152
|
data = kwargs["data"]
|
153
153
|
field = kwargs.get("field")
|
154
154
|
operator = kwargs.get("operator", "==")
|
@@ -220,7 +220,7 @@ class FilterNode(Node):
|
|
220
220
|
class Map(Node):
|
221
221
|
"""Maps data using a transformation."""
|
222
222
|
|
223
|
-
def get_parameters(self) ->
|
223
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
224
224
|
return {
|
225
225
|
"data": NodeParameter(
|
226
226
|
name="data",
|
@@ -255,7 +255,7 @@ class Map(Node):
|
|
255
255
|
),
|
256
256
|
}
|
257
257
|
|
258
|
-
def run(self, **kwargs) ->
|
258
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
259
259
|
data = kwargs["data"]
|
260
260
|
field = kwargs.get("field")
|
261
261
|
new_field = kwargs.get("new_field")
|
@@ -307,11 +307,11 @@ class DataTransformer(Node):
|
|
307
307
|
or other Python code as strings. These are compiled and executed against the input data.
|
308
308
|
"""
|
309
309
|
|
310
|
-
def get_parameters(self) ->
|
310
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
311
311
|
return {
|
312
312
|
"data": NodeParameter(
|
313
313
|
name="data",
|
314
|
-
type=
|
314
|
+
type=Any,
|
315
315
|
required=False,
|
316
316
|
description="Primary input data to transform",
|
317
317
|
),
|
@@ -332,7 +332,7 @@ class DataTransformer(Node):
|
|
332
332
|
}, # Support for up to 5 additional arguments
|
333
333
|
}
|
334
334
|
|
335
|
-
def validate_inputs(self, **kwargs) ->
|
335
|
+
def validate_inputs(self, **kwargs) -> dict[str, Any]:
|
336
336
|
"""Override validate_inputs to accept arbitrary parameters for transformations.
|
337
337
|
|
338
338
|
DataTransformer needs to accept any input parameters that might be mapped
|
@@ -351,7 +351,7 @@ class DataTransformer(Node):
|
|
351
351
|
|
352
352
|
return validated
|
353
353
|
|
354
|
-
def run(self, **kwargs) ->
|
354
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
355
355
|
# Extract the transformation functions
|
356
356
|
transformations = kwargs.get("transformations", [])
|
357
357
|
if not transformations:
|
@@ -484,7 +484,7 @@ class DataTransformer(Node):
|
|
484
484
|
class Sort(Node):
|
485
485
|
"""Sorts data."""
|
486
486
|
|
487
|
-
def get_parameters(self) ->
|
487
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
488
488
|
return {
|
489
489
|
"data": NodeParameter(
|
490
490
|
name="data",
|
@@ -507,7 +507,7 @@ class Sort(Node):
|
|
507
507
|
),
|
508
508
|
}
|
509
509
|
|
510
|
-
def run(self, **kwargs) ->
|
510
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
511
511
|
data = kwargs["data"]
|
512
512
|
field = kwargs.get("field")
|
513
513
|
reverse = kwargs.get("reverse", False)
|
@@ -27,7 +27,7 @@ Example with access control (opt-in):
|
|
27
27
|
"""
|
28
28
|
|
29
29
|
import logging
|
30
|
-
from typing import Any
|
30
|
+
from typing import Any
|
31
31
|
|
32
32
|
from kailash.access_control import (
|
33
33
|
AccessControlManager,
|
@@ -108,7 +108,7 @@ class AccessControlledRuntime:
|
|
108
108
|
"""
|
109
109
|
|
110
110
|
def __init__(
|
111
|
-
self, user_context: UserContext, base_runtime:
|
111
|
+
self, user_context: UserContext, base_runtime: LocalRuntime | None = None
|
112
112
|
):
|
113
113
|
"""
|
114
114
|
Initialize access-controlled runtime.
|
@@ -123,11 +123,11 @@ class AccessControlledRuntime:
|
|
123
123
|
|
124
124
|
# Track skipped nodes for alternative routing
|
125
125
|
self._skipped_nodes: set[str] = set()
|
126
|
-
self._node_outputs:
|
126
|
+
self._node_outputs: dict[str, Any] = {}
|
127
127
|
|
128
128
|
def execute(
|
129
|
-
self, workflow: Workflow, parameters:
|
130
|
-
) ->
|
129
|
+
self, workflow: Workflow, parameters: dict[str, Any] | None = None
|
130
|
+
) -> tuple[Any, str]:
|
131
131
|
"""
|
132
132
|
Execute workflow with access control.
|
133
133
|
|
@@ -272,7 +272,7 @@ class AccessControlledRuntime:
|
|
272
272
|
return wrapper
|
273
273
|
|
274
274
|
@staticmethod
|
275
|
-
def _mask_fields(data:
|
275
|
+
def _mask_fields(data: dict[str, Any], fields: list[str]) -> dict[str, Any]:
|
276
276
|
"""Mask sensitive fields in data"""
|
277
277
|
masked = data.copy()
|
278
278
|
for field in fields:
|
@@ -281,8 +281,8 @@ class AccessControlledRuntime:
|
|
281
281
|
return masked
|
282
282
|
|
283
283
|
def _handle_conditional_routing(
|
284
|
-
self, node_id: str, true_path:
|
285
|
-
) ->
|
284
|
+
self, node_id: str, true_path: list[str], false_path: list[str]
|
285
|
+
) -> list[str]:
|
286
286
|
"""
|
287
287
|
Determine which path to take based on permissions.
|
288
288
|
|
@@ -345,14 +345,14 @@ class AccessControlConfig:
|
|
345
345
|
"""
|
346
346
|
|
347
347
|
def __init__(self):
|
348
|
-
self.rules:
|
348
|
+
self.rules: list[PermissionRule] = []
|
349
349
|
|
350
350
|
def add_workflow_permission(
|
351
351
|
self,
|
352
352
|
workflow_id: str,
|
353
353
|
permission: WorkflowPermission,
|
354
|
-
user_id:
|
355
|
-
role:
|
354
|
+
user_id: str | None = None,
|
355
|
+
role: str | None = None,
|
356
356
|
effect: PermissionEffect = PermissionEffect.ALLOW,
|
357
357
|
):
|
358
358
|
"""Add a workflow-level permission rule"""
|
@@ -372,11 +372,11 @@ class AccessControlConfig:
|
|
372
372
|
workflow_id: str,
|
373
373
|
node_id: str,
|
374
374
|
permission: NodePermission,
|
375
|
-
user_id:
|
376
|
-
role:
|
375
|
+
user_id: str | None = None,
|
376
|
+
role: str | None = None,
|
377
377
|
effect: PermissionEffect = PermissionEffect.ALLOW,
|
378
|
-
masked_fields:
|
379
|
-
redirect_node:
|
378
|
+
masked_fields: list[str] | None = None,
|
379
|
+
redirect_node: str | None = None,
|
380
380
|
):
|
381
381
|
"""Add a node-level permission rule"""
|
382
382
|
rule = PermissionRule(
|
@@ -406,9 +406,9 @@ class AccessControlConfig:
|
|
406
406
|
def execute_with_access_control(
|
407
407
|
workflow: Workflow,
|
408
408
|
user_context: UserContext,
|
409
|
-
parameters:
|
410
|
-
access_config:
|
411
|
-
) ->
|
409
|
+
parameters: dict[str, Any] | None = None,
|
410
|
+
access_config: AccessControlConfig | None = None,
|
411
|
+
) -> tuple[Any, str]:
|
412
412
|
"""
|
413
413
|
Convenience function to execute a workflow with access control.
|
414
414
|
|
kailash/runtime/async_local.py
CHANGED
@@ -6,8 +6,8 @@ database queries, or LLM interactions.
|
|
6
6
|
"""
|
7
7
|
|
8
8
|
import logging
|
9
|
-
from datetime import
|
10
|
-
from typing import Any
|
9
|
+
from datetime import UTC, datetime
|
10
|
+
from typing import Any
|
11
11
|
|
12
12
|
import networkx as nx
|
13
13
|
|
@@ -60,9 +60,9 @@ class AsyncLocalRuntime:
|
|
60
60
|
async def execute(
|
61
61
|
self,
|
62
62
|
workflow: Workflow,
|
63
|
-
task_manager:
|
64
|
-
parameters:
|
65
|
-
) ->
|
63
|
+
task_manager: TaskManager | None = None,
|
64
|
+
parameters: dict[str, dict[str, Any]] | None = None,
|
65
|
+
) -> tuple[dict[str, Any], str | None]:
|
66
66
|
"""Execute a workflow asynchronously.
|
67
67
|
|
68
68
|
Args:
|
@@ -144,10 +144,10 @@ class AsyncLocalRuntime:
|
|
144
144
|
async def _execute_workflow(
|
145
145
|
self,
|
146
146
|
workflow: Workflow,
|
147
|
-
task_manager:
|
148
|
-
run_id:
|
149
|
-
parameters:
|
150
|
-
) ->
|
147
|
+
task_manager: TaskManager | None,
|
148
|
+
run_id: str | None,
|
149
|
+
parameters: dict[str, dict[str, Any]],
|
150
|
+
) -> dict[str, Any]:
|
151
151
|
"""Execute the workflow nodes asynchronously.
|
152
152
|
|
153
153
|
Args:
|
@@ -195,7 +195,7 @@ class AsyncLocalRuntime:
|
|
195
195
|
run_id=run_id,
|
196
196
|
node_id=node_id,
|
197
197
|
node_type=node_instance.__class__.__name__,
|
198
|
-
started_at=datetime.now(
|
198
|
+
started_at=datetime.now(UTC),
|
199
199
|
)
|
200
200
|
except Exception as e:
|
201
201
|
self.logger.warning(
|
@@ -220,18 +220,16 @@ class AsyncLocalRuntime:
|
|
220
220
|
task.update_status(TaskStatus.RUNNING)
|
221
221
|
|
222
222
|
# Execute node - check if it supports async execution
|
223
|
-
start_time = datetime.now(
|
223
|
+
start_time = datetime.now(UTC)
|
224
224
|
|
225
225
|
if isinstance(node_instance, AsyncNode):
|
226
226
|
# Use async execution
|
227
227
|
outputs = await node_instance.execute_async(**inputs)
|
228
228
|
else:
|
229
229
|
# Fall back to synchronous execution
|
230
|
-
outputs = node_instance.
|
230
|
+
outputs = node_instance.run(**inputs)
|
231
231
|
|
232
|
-
execution_time = (
|
233
|
-
datetime.now(timezone.utc) - start_time
|
234
|
-
).total_seconds()
|
232
|
+
execution_time = (datetime.now(UTC) - start_time).total_seconds()
|
235
233
|
|
236
234
|
# Store outputs
|
237
235
|
node_outputs[node_id] = outputs
|
@@ -245,7 +243,7 @@ class AsyncLocalRuntime:
|
|
245
243
|
task.update_status(
|
246
244
|
TaskStatus.COMPLETED,
|
247
245
|
result=outputs,
|
248
|
-
ended_at=datetime.now(
|
246
|
+
ended_at=datetime.now(UTC),
|
249
247
|
metadata={"execution_time": execution_time},
|
250
248
|
)
|
251
249
|
|
@@ -262,7 +260,7 @@ class AsyncLocalRuntime:
|
|
262
260
|
task.update_status(
|
263
261
|
TaskStatus.FAILED,
|
264
262
|
error=str(e),
|
265
|
-
ended_at=datetime.now(
|
263
|
+
ended_at=datetime.now(UTC),
|
266
264
|
)
|
267
265
|
|
268
266
|
# Determine if we should continue or stop
|
@@ -287,9 +285,9 @@ class AsyncLocalRuntime:
|
|
287
285
|
workflow: Workflow,
|
288
286
|
node_id: str,
|
289
287
|
node_instance: Any,
|
290
|
-
node_outputs:
|
291
|
-
parameters:
|
292
|
-
) ->
|
288
|
+
node_outputs: dict[str, dict[str, Any]],
|
289
|
+
parameters: dict[str, Any],
|
290
|
+
) -> dict[str, Any]:
|
293
291
|
"""Prepare inputs for a node execution.
|
294
292
|
|
295
293
|
Args:
|