kailash 0.6.2__py3-none-any.whl → 0.6.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +3 -3
- kailash/api/custom_nodes_secure.py +3 -3
- kailash/api/gateway.py +1 -1
- kailash/api/studio.py +2 -3
- kailash/api/workflow_api.py +3 -4
- kailash/core/resilience/bulkhead.py +460 -0
- kailash/core/resilience/circuit_breaker.py +92 -10
- kailash/edge/discovery.py +86 -0
- kailash/mcp_server/__init__.py +334 -0
- kailash/mcp_server/advanced_features.py +1022 -0
- kailash/{mcp → mcp_server}/ai_registry_server.py +29 -4
- kailash/mcp_server/auth.py +789 -0
- kailash/mcp_server/client.py +712 -0
- kailash/mcp_server/discovery.py +1593 -0
- kailash/mcp_server/errors.py +673 -0
- kailash/mcp_server/oauth.py +1727 -0
- kailash/mcp_server/protocol.py +1126 -0
- kailash/mcp_server/registry_integration.py +587 -0
- kailash/mcp_server/server.py +1747 -0
- kailash/{mcp → mcp_server}/servers/ai_registry.py +2 -2
- kailash/mcp_server/transports.py +1169 -0
- kailash/mcp_server/utils/cache.py +510 -0
- kailash/middleware/auth/auth_manager.py +3 -3
- kailash/middleware/communication/api_gateway.py +2 -9
- kailash/middleware/communication/realtime.py +1 -1
- kailash/middleware/mcp/client_integration.py +1 -1
- kailash/middleware/mcp/enhanced_server.py +2 -2
- kailash/nodes/__init__.py +2 -0
- kailash/nodes/admin/audit_log.py +6 -6
- kailash/nodes/admin/permission_check.py +8 -8
- kailash/nodes/admin/role_management.py +32 -28
- kailash/nodes/admin/schema.sql +6 -1
- kailash/nodes/admin/schema_manager.py +13 -13
- kailash/nodes/admin/security_event.py +16 -20
- kailash/nodes/admin/tenant_isolation.py +3 -3
- kailash/nodes/admin/transaction_utils.py +3 -3
- kailash/nodes/admin/user_management.py +21 -22
- kailash/nodes/ai/a2a.py +11 -11
- kailash/nodes/ai/ai_providers.py +9 -12
- kailash/nodes/ai/embedding_generator.py +13 -14
- kailash/nodes/ai/intelligent_agent_orchestrator.py +19 -19
- kailash/nodes/ai/iterative_llm_agent.py +3 -3
- kailash/nodes/ai/llm_agent.py +213 -36
- kailash/nodes/ai/self_organizing.py +2 -2
- kailash/nodes/alerts/discord.py +4 -4
- kailash/nodes/api/graphql.py +6 -6
- kailash/nodes/api/http.py +12 -17
- kailash/nodes/api/rate_limiting.py +4 -4
- kailash/nodes/api/rest.py +15 -15
- kailash/nodes/auth/mfa.py +3 -4
- kailash/nodes/auth/risk_assessment.py +2 -2
- kailash/nodes/auth/session_management.py +5 -5
- kailash/nodes/auth/sso.py +143 -0
- kailash/nodes/base.py +6 -2
- kailash/nodes/base_async.py +16 -2
- kailash/nodes/base_with_acl.py +2 -2
- kailash/nodes/cache/__init__.py +9 -0
- kailash/nodes/cache/cache.py +1172 -0
- kailash/nodes/cache/cache_invalidation.py +870 -0
- kailash/nodes/cache/redis_pool_manager.py +595 -0
- kailash/nodes/code/async_python.py +2 -1
- kailash/nodes/code/python.py +196 -35
- kailash/nodes/compliance/data_retention.py +6 -6
- kailash/nodes/compliance/gdpr.py +5 -5
- kailash/nodes/data/__init__.py +10 -0
- kailash/nodes/data/optimistic_locking.py +906 -0
- kailash/nodes/data/readers.py +8 -8
- kailash/nodes/data/redis.py +349 -0
- kailash/nodes/data/sql.py +314 -3
- kailash/nodes/data/streaming.py +21 -0
- kailash/nodes/enterprise/__init__.py +8 -0
- kailash/nodes/enterprise/audit_logger.py +285 -0
- kailash/nodes/enterprise/batch_processor.py +22 -3
- kailash/nodes/enterprise/data_lineage.py +1 -1
- kailash/nodes/enterprise/mcp_executor.py +205 -0
- kailash/nodes/enterprise/service_discovery.py +150 -0
- kailash/nodes/enterprise/tenant_assignment.py +108 -0
- kailash/nodes/logic/async_operations.py +2 -2
- kailash/nodes/logic/convergence.py +1 -1
- kailash/nodes/logic/operations.py +1 -1
- kailash/nodes/monitoring/__init__.py +11 -1
- kailash/nodes/monitoring/health_check.py +456 -0
- kailash/nodes/monitoring/log_processor.py +817 -0
- kailash/nodes/monitoring/metrics_collector.py +627 -0
- kailash/nodes/monitoring/performance_benchmark.py +137 -11
- kailash/nodes/rag/advanced.py +7 -7
- kailash/nodes/rag/agentic.py +49 -2
- kailash/nodes/rag/conversational.py +3 -3
- kailash/nodes/rag/evaluation.py +3 -3
- kailash/nodes/rag/federated.py +3 -3
- kailash/nodes/rag/graph.py +3 -3
- kailash/nodes/rag/multimodal.py +3 -3
- kailash/nodes/rag/optimized.py +5 -5
- kailash/nodes/rag/privacy.py +3 -3
- kailash/nodes/rag/query_processing.py +6 -6
- kailash/nodes/rag/realtime.py +1 -1
- kailash/nodes/rag/registry.py +2 -6
- kailash/nodes/rag/router.py +1 -1
- kailash/nodes/rag/similarity.py +7 -7
- kailash/nodes/rag/strategies.py +4 -4
- kailash/nodes/security/abac_evaluator.py +6 -6
- kailash/nodes/security/behavior_analysis.py +5 -6
- kailash/nodes/security/credential_manager.py +1 -1
- kailash/nodes/security/rotating_credentials.py +11 -11
- kailash/nodes/security/threat_detection.py +8 -8
- kailash/nodes/testing/credential_testing.py +2 -2
- kailash/nodes/transform/processors.py +5 -5
- kailash/runtime/local.py +162 -14
- kailash/runtime/parameter_injection.py +425 -0
- kailash/runtime/parameter_injector.py +657 -0
- kailash/runtime/testing.py +2 -2
- kailash/testing/fixtures.py +2 -2
- kailash/workflow/builder.py +99 -18
- kailash/workflow/builder_improvements.py +207 -0
- kailash/workflow/input_handling.py +170 -0
- {kailash-0.6.2.dist-info → kailash-0.6.4.dist-info}/METADATA +21 -8
- {kailash-0.6.2.dist-info → kailash-0.6.4.dist-info}/RECORD +126 -101
- kailash/mcp/__init__.py +0 -53
- kailash/mcp/client.py +0 -445
- kailash/mcp/server.py +0 -292
- kailash/mcp/server_enhanced.py +0 -449
- kailash/mcp/utils/cache.py +0 -267
- /kailash/{mcp → mcp_server}/client_new.py +0 -0
- /kailash/{mcp → mcp_server}/utils/__init__.py +0 -0
- /kailash/{mcp → mcp_server}/utils/config.py +0 -0
- /kailash/{mcp → mcp_server}/utils/formatters.py +0 -0
- /kailash/{mcp → mcp_server}/utils/metrics.py +0 -0
- {kailash-0.6.2.dist-info → kailash-0.6.4.dist-info}/WHEEL +0 -0
- {kailash-0.6.2.dist-info → kailash-0.6.4.dist-info}/entry_points.txt +0 -0
- {kailash-0.6.2.dist-info → kailash-0.6.4.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.6.2.dist-info → kailash-0.6.4.dist-info}/top_level.txt +0 -0
kailash/workflow/builder.py
CHANGED
@@ -217,6 +217,58 @@ class WorkflowBuilder:
|
|
217
217
|
self._metadata.update(kwargs)
|
218
218
|
return self
|
219
219
|
|
220
|
+
def add_workflow_inputs(
|
221
|
+
self, input_node_id: str, input_mappings: dict
|
222
|
+
) -> "WorkflowBuilder":
|
223
|
+
"""
|
224
|
+
Map workflow-level inputs to a specific node's parameters.
|
225
|
+
|
226
|
+
Args:
|
227
|
+
input_node_id: The node that should receive workflow inputs
|
228
|
+
input_mappings: Dict mapping workflow input names to node parameter names
|
229
|
+
|
230
|
+
Returns:
|
231
|
+
Self for chaining
|
232
|
+
"""
|
233
|
+
if input_node_id not in self.nodes:
|
234
|
+
raise WorkflowValidationError(f"Node '{input_node_id}' not found")
|
235
|
+
|
236
|
+
# Store input mappings in metadata
|
237
|
+
if "_workflow_inputs" not in self._metadata:
|
238
|
+
self._metadata["_workflow_inputs"] = {}
|
239
|
+
self._metadata["_workflow_inputs"][input_node_id] = input_mappings
|
240
|
+
return self
|
241
|
+
|
242
|
+
def update_node(self, node_id: str, config_updates: dict[str, Any]) -> "WorkflowBuilder":
|
243
|
+
"""
|
244
|
+
Update the configuration of an existing node.
|
245
|
+
|
246
|
+
This is essential for enterprise scenarios like:
|
247
|
+
- Dynamic environment-specific configuration
|
248
|
+
- Runtime parameter injection
|
249
|
+
- Security context updates
|
250
|
+
- A/B testing and feature flags
|
251
|
+
|
252
|
+
Args:
|
253
|
+
node_id: ID of the node to update
|
254
|
+
config_updates: Dictionary of configuration updates to apply
|
255
|
+
|
256
|
+
Returns:
|
257
|
+
Self for chaining
|
258
|
+
|
259
|
+
Raises:
|
260
|
+
WorkflowValidationError: If node doesn't exist
|
261
|
+
"""
|
262
|
+
if node_id not in self.nodes:
|
263
|
+
raise WorkflowValidationError(f"Node '{node_id}' not found in workflow")
|
264
|
+
|
265
|
+
# Deep merge the configuration updates
|
266
|
+
if "config" not in self.nodes[node_id]:
|
267
|
+
self.nodes[node_id]["config"] = {}
|
268
|
+
|
269
|
+
self.nodes[node_id]["config"].update(config_updates)
|
270
|
+
return self
|
271
|
+
|
220
272
|
def build(self, workflow_id: str | None = None, **kwargs) -> Workflow:
|
221
273
|
"""
|
222
274
|
Build and return a Workflow instance.
|
@@ -338,32 +390,61 @@ class WorkflowBuilder:
|
|
338
390
|
if key not in ["nodes", "connections"]:
|
339
391
|
builder._metadata[key] = value
|
340
392
|
|
341
|
-
# Add nodes
|
342
|
-
|
343
|
-
|
344
|
-
|
345
|
-
|
346
|
-
|
347
|
-
|
348
|
-
|
349
|
-
|
350
|
-
|
351
|
-
|
352
|
-
|
393
|
+
# Add nodes - handle both dict and list formats
|
394
|
+
nodes_config = config.get("nodes", [])
|
395
|
+
|
396
|
+
if isinstance(nodes_config, dict):
|
397
|
+
# Dict format: {node_id: {type: "...", parameters: {...}}}
|
398
|
+
for node_id, node_config in nodes_config.items():
|
399
|
+
node_type = node_config.get("type")
|
400
|
+
node_params = node_config.get("parameters", node_config.get("config", {}))
|
401
|
+
|
402
|
+
if not node_type:
|
403
|
+
raise WorkflowValidationError(
|
404
|
+
f"Node type is required for node '{node_id}'"
|
405
|
+
)
|
406
|
+
|
407
|
+
builder.add_node(node_type, node_id, node_params)
|
408
|
+
else:
|
409
|
+
# List format: [{id: "...", type: "...", config: {...}}]
|
410
|
+
for node_config in nodes_config:
|
411
|
+
node_id = node_config.get("id")
|
412
|
+
node_type = node_config.get("type")
|
413
|
+
node_params = node_config.get("config", {})
|
414
|
+
|
415
|
+
if not node_id:
|
416
|
+
raise WorkflowValidationError("Node ID is required")
|
417
|
+
if not node_type:
|
418
|
+
raise WorkflowValidationError(
|
419
|
+
f"Node type is required for node '{node_id}'"
|
420
|
+
)
|
353
421
|
|
354
|
-
|
422
|
+
builder.add_node(node_type, node_id, node_params)
|
355
423
|
|
356
|
-
# Add connections
|
424
|
+
# Add connections - handle both full and simple formats
|
357
425
|
for conn in config.get("connections", []):
|
426
|
+
# Try full format first: {from_node, from_output, to_node, to_input}
|
358
427
|
from_node = conn.get("from_node")
|
359
428
|
from_output = conn.get("from_output")
|
360
|
-
to_node = conn.get("to_node")
|
429
|
+
to_node = conn.get("to_node")
|
361
430
|
to_input = conn.get("to_input")
|
362
|
-
|
363
|
-
|
431
|
+
|
432
|
+
# Handle simple format: {from, to} with default outputs/inputs
|
433
|
+
if not from_node:
|
434
|
+
from_node = conn.get("from")
|
435
|
+
from_output = conn.get("from_output", "result") # Default output
|
436
|
+
if not to_node:
|
437
|
+
to_node = conn.get("to")
|
438
|
+
to_input = conn.get("to_input", "input") # Default input
|
439
|
+
|
440
|
+
if not all([from_node, to_node]):
|
364
441
|
raise WorkflowValidationError(
|
365
|
-
f"Invalid connection: missing
|
442
|
+
f"Invalid connection: missing from_node and to_node. Connection data: {conn}"
|
366
443
|
)
|
444
|
+
|
445
|
+
# Use defaults if not specified
|
446
|
+
from_output = from_output or "result"
|
447
|
+
to_input = to_input or "input"
|
367
448
|
|
368
449
|
builder.add_connection(from_node, from_output, to_node, to_input)
|
369
450
|
|
@@ -0,0 +1,207 @@
|
|
1
|
+
"""
|
2
|
+
WorkflowBuilder Improvements for Parameter Passing
|
3
|
+
|
4
|
+
This module contains improvements to the WorkflowBuilder to handle
|
5
|
+
parameter passing to nodes without incoming connections.
|
6
|
+
"""
|
7
|
+
|
8
|
+
from typing import Any, Dict, List, Optional
|
9
|
+
|
10
|
+
from kailash.workflow.builder import WorkflowBuilder
|
11
|
+
from kailash.workflow.graph import Workflow
|
12
|
+
|
13
|
+
|
14
|
+
class ImprovedWorkflowBuilder(WorkflowBuilder):
|
15
|
+
"""Enhanced WorkflowBuilder that automatically handles parameter passing"""
|
16
|
+
|
17
|
+
def __init__(self):
|
18
|
+
super().__init__()
|
19
|
+
self.workflow_parameters: Dict[str, Any] = {}
|
20
|
+
self.parameter_mappings: Dict[str, Dict[str, str]] = {}
|
21
|
+
|
22
|
+
def set_workflow_parameters(self, **parameters) -> "ImprovedWorkflowBuilder":
|
23
|
+
"""
|
24
|
+
Set default parameters that will be passed to all nodes.
|
25
|
+
|
26
|
+
Args:
|
27
|
+
**parameters: Key-value pairs of workflow-level parameters
|
28
|
+
|
29
|
+
Returns:
|
30
|
+
Self for chaining
|
31
|
+
"""
|
32
|
+
self.workflow_parameters.update(parameters)
|
33
|
+
return self
|
34
|
+
|
35
|
+
def add_parameter_mapping(
|
36
|
+
self, node_id: str, mappings: Dict[str, str]
|
37
|
+
) -> "ImprovedWorkflowBuilder":
|
38
|
+
"""
|
39
|
+
Add parameter mappings for a specific node.
|
40
|
+
|
41
|
+
Args:
|
42
|
+
node_id: Node to configure
|
43
|
+
mappings: Dict mapping workflow param names to node param names
|
44
|
+
|
45
|
+
Returns:
|
46
|
+
Self for chaining
|
47
|
+
"""
|
48
|
+
if node_id not in self.parameter_mappings:
|
49
|
+
self.parameter_mappings[node_id] = {}
|
50
|
+
self.parameter_mappings[node_id].update(mappings)
|
51
|
+
return self
|
52
|
+
|
53
|
+
def add_input_connection(
|
54
|
+
self, to_node: str, to_input: str, from_workflow_param: str
|
55
|
+
) -> "ImprovedWorkflowBuilder":
|
56
|
+
"""
|
57
|
+
Connect a workflow parameter directly to a node input.
|
58
|
+
|
59
|
+
Args:
|
60
|
+
to_node: Target node ID
|
61
|
+
to_input: Input parameter name on the node
|
62
|
+
from_workflow_param: Workflow parameter name
|
63
|
+
|
64
|
+
Returns:
|
65
|
+
Self for chaining
|
66
|
+
"""
|
67
|
+
# Add a special connection type for workflow inputs
|
68
|
+
connection = {
|
69
|
+
"from_node": "__workflow_input__",
|
70
|
+
"from_output": from_workflow_param,
|
71
|
+
"to_node": to_node,
|
72
|
+
"to_input": to_input,
|
73
|
+
"is_workflow_input": True,
|
74
|
+
}
|
75
|
+
self.connections.append(connection)
|
76
|
+
return self
|
77
|
+
|
78
|
+
def build(self, workflow_id: str | None = None, **kwargs) -> Workflow:
|
79
|
+
"""
|
80
|
+
Build the workflow with automatic parameter injection.
|
81
|
+
|
82
|
+
Returns:
|
83
|
+
Enhanced Workflow instance
|
84
|
+
"""
|
85
|
+
# First, build the base workflow
|
86
|
+
workflow = super().build(workflow_id, **kwargs)
|
87
|
+
|
88
|
+
# Find nodes without incoming connections
|
89
|
+
nodes_with_inputs = set()
|
90
|
+
for conn in self.connections:
|
91
|
+
if not conn.get("is_workflow_input"):
|
92
|
+
nodes_with_inputs.add(conn["to_node"])
|
93
|
+
|
94
|
+
nodes_without_inputs = set(self.nodes.keys()) - nodes_with_inputs
|
95
|
+
|
96
|
+
# For each node without inputs, check if it needs workflow parameters
|
97
|
+
for node_id in nodes_without_inputs:
|
98
|
+
node = self.nodes[node_id]
|
99
|
+
node_instance = workflow.get_node(node_id)
|
100
|
+
|
101
|
+
if hasattr(node_instance, "get_parameters"):
|
102
|
+
params = node_instance.get_parameters()
|
103
|
+
|
104
|
+
# Check which required parameters are missing from config
|
105
|
+
for param_name, param_def in params.items():
|
106
|
+
if param_def.required and param_name not in node["config"]:
|
107
|
+
# Check if this parameter should come from workflow parameters
|
108
|
+
if param_name in self.workflow_parameters:
|
109
|
+
# Add to node config
|
110
|
+
node["config"][param_name] = self.workflow_parameters[
|
111
|
+
param_name
|
112
|
+
]
|
113
|
+
elif node_id in self.parameter_mappings:
|
114
|
+
# Check parameter mappings
|
115
|
+
mapping = self.parameter_mappings[node_id]
|
116
|
+
if param_name in mapping:
|
117
|
+
workflow_param = mapping[param_name]
|
118
|
+
if workflow_param in self.workflow_parameters:
|
119
|
+
node["config"][param_name] = (
|
120
|
+
self.workflow_parameters[workflow_param]
|
121
|
+
)
|
122
|
+
|
123
|
+
# Store workflow parameters in metadata for runtime reference
|
124
|
+
workflow._metadata["workflow_parameters"] = self.workflow_parameters
|
125
|
+
workflow._metadata["parameter_mappings"] = self.parameter_mappings
|
126
|
+
|
127
|
+
return workflow
|
128
|
+
|
129
|
+
|
130
|
+
def create_user_login_workflow_improved(config: Dict[str, Any]) -> Workflow:
|
131
|
+
"""
|
132
|
+
Example of creating a login workflow with proper parameter handling.
|
133
|
+
"""
|
134
|
+
workflow = ImprovedWorkflowBuilder()
|
135
|
+
|
136
|
+
# Set workflow-level parameters that will be shared
|
137
|
+
workflow.set_workflow_parameters(
|
138
|
+
tenant_id="default", database_config=config["database_config"]
|
139
|
+
)
|
140
|
+
|
141
|
+
# Add user fetcher node
|
142
|
+
workflow.add_node(
|
143
|
+
"UserManagementNode",
|
144
|
+
"user_fetcher",
|
145
|
+
{
|
146
|
+
"operation": "get_user",
|
147
|
+
"identifier": "$.email",
|
148
|
+
"identifier_type": "email",
|
149
|
+
# tenant_id and database_config will be auto-injected
|
150
|
+
},
|
151
|
+
)
|
152
|
+
|
153
|
+
# Map workflow inputs to the first node
|
154
|
+
workflow.add_input_connection("user_fetcher", "email", "email")
|
155
|
+
workflow.add_input_connection("user_fetcher", "password", "password")
|
156
|
+
|
157
|
+
# Add other nodes...
|
158
|
+
workflow.add_node(
|
159
|
+
"PythonCodeNode",
|
160
|
+
"password_verifier",
|
161
|
+
{"code": "# Password verification code here"},
|
162
|
+
)
|
163
|
+
|
164
|
+
# Connect nodes
|
165
|
+
workflow.add_connection("user_fetcher", "result", "password_verifier", "input")
|
166
|
+
|
167
|
+
return workflow.build(name="user_login_improved")
|
168
|
+
|
169
|
+
|
170
|
+
# Alternative approach: Fix in the existing WorkflowBuilder
|
171
|
+
def patch_workflow_builder():
|
172
|
+
"""
|
173
|
+
Monkey patch the existing WorkflowBuilder to handle parameters better.
|
174
|
+
"""
|
175
|
+
original_build = WorkflowBuilder.build
|
176
|
+
|
177
|
+
def enhanced_build(self, workflow_id: str | None = None, **kwargs) -> Workflow:
|
178
|
+
# Build the workflow normally
|
179
|
+
workflow = original_build(self, workflow_id, **kwargs)
|
180
|
+
|
181
|
+
# Enhanced parameter handling
|
182
|
+
# Find nodes without incoming connections and inject common parameters
|
183
|
+
nodes_with_inputs = set()
|
184
|
+
for edge in workflow._graph.edges():
|
185
|
+
nodes_with_inputs.add(edge[1]) # target node
|
186
|
+
|
187
|
+
# Get all nodes
|
188
|
+
all_nodes = set(workflow._graph.nodes())
|
189
|
+
nodes_without_inputs = all_nodes - nodes_with_inputs
|
190
|
+
|
191
|
+
# Common parameters that should be injected
|
192
|
+
common_params = {
|
193
|
+
"tenant_id": "default",
|
194
|
+
"database_config": kwargs.get("database_config", {}),
|
195
|
+
}
|
196
|
+
|
197
|
+
for node_id in nodes_without_inputs:
|
198
|
+
if node_id in workflow._nodes:
|
199
|
+
node_instance = workflow._nodes[node_id]
|
200
|
+
# Update node config with common parameters if not already set
|
201
|
+
for param, value in common_params.items():
|
202
|
+
if param not in node_instance.config:
|
203
|
+
node_instance.config[param] = value
|
204
|
+
|
205
|
+
return workflow
|
206
|
+
|
207
|
+
WorkflowBuilder.build = enhanced_build
|
@@ -0,0 +1,170 @@
|
|
1
|
+
"""
|
2
|
+
Workflow Input Handling Enhancement
|
3
|
+
|
4
|
+
This module provides a solution for properly passing workflow-level parameters
|
5
|
+
to nodes, especially those without incoming connections.
|
6
|
+
"""
|
7
|
+
|
8
|
+
import logging
|
9
|
+
from typing import Any, Dict, List, Optional, Set
|
10
|
+
|
11
|
+
from kailash.workflow.graph import Workflow
|
12
|
+
|
13
|
+
logger = logging.getLogger(__name__)
|
14
|
+
|
15
|
+
|
16
|
+
class WorkflowInputHandler:
|
17
|
+
"""Handles workflow input parameter distribution to nodes"""
|
18
|
+
|
19
|
+
@staticmethod
|
20
|
+
def inject_workflow_parameters(
|
21
|
+
workflow: Workflow, parameters: Dict[str, Any]
|
22
|
+
) -> None:
|
23
|
+
"""
|
24
|
+
Inject workflow parameters into nodes that need them.
|
25
|
+
|
26
|
+
This method identifies nodes without incoming connections and ensures
|
27
|
+
they receive necessary parameters from the workflow input.
|
28
|
+
|
29
|
+
Args:
|
30
|
+
workflow: The workflow to process
|
31
|
+
parameters: Parameters passed to the workflow
|
32
|
+
"""
|
33
|
+
# Find nodes without incoming connections
|
34
|
+
nodes_with_inputs = set()
|
35
|
+
for edge in workflow._graph.edges():
|
36
|
+
nodes_with_inputs.add(edge[1]) # target node
|
37
|
+
|
38
|
+
all_nodes = set(workflow._graph.nodes())
|
39
|
+
entry_nodes = all_nodes - nodes_with_inputs
|
40
|
+
|
41
|
+
logger.debug(f"Found entry nodes without inputs: {entry_nodes}")
|
42
|
+
|
43
|
+
# Process each entry node
|
44
|
+
for node_id in entry_nodes:
|
45
|
+
if node_id not in workflow._nodes:
|
46
|
+
continue
|
47
|
+
|
48
|
+
node_instance = workflow._nodes[node_id]
|
49
|
+
node_type = type(node_instance).__name__
|
50
|
+
|
51
|
+
logger.debug(f"Processing entry node '{node_id}' of type {node_type}")
|
52
|
+
|
53
|
+
# Get node's required parameters
|
54
|
+
if hasattr(node_instance, "get_parameters"):
|
55
|
+
node_params = node_instance.get_parameters()
|
56
|
+
|
57
|
+
for param_name, param_def in node_params.items():
|
58
|
+
if param_def.required:
|
59
|
+
# Check if parameter is already in node config
|
60
|
+
if param_name not in node_instance.config:
|
61
|
+
# Try to find it in workflow parameters
|
62
|
+
if param_name in parameters:
|
63
|
+
logger.info(
|
64
|
+
f"Injecting parameter '{param_name}' "
|
65
|
+
f"into node '{node_id}'"
|
66
|
+
)
|
67
|
+
node_instance.config[param_name] = parameters[
|
68
|
+
param_name
|
69
|
+
]
|
70
|
+
# Special handling for common parameters
|
71
|
+
elif (
|
72
|
+
param_name == "tenant_id"
|
73
|
+
and "tenant_id" not in parameters
|
74
|
+
):
|
75
|
+
# Use default tenant if not specified
|
76
|
+
node_instance.config[param_name] = "default"
|
77
|
+
logger.info(
|
78
|
+
f"Using default tenant_id for node '{node_id}'"
|
79
|
+
)
|
80
|
+
elif param_name == "database_config":
|
81
|
+
# Try to find database config in various places
|
82
|
+
db_config = (
|
83
|
+
parameters.get("database_config")
|
84
|
+
or parameters.get("db_config")
|
85
|
+
or workflow._metadata.get("database_config")
|
86
|
+
)
|
87
|
+
if db_config:
|
88
|
+
node_instance.config[param_name] = db_config
|
89
|
+
logger.info(
|
90
|
+
f"Injecting database_config into node '{node_id}'"
|
91
|
+
)
|
92
|
+
|
93
|
+
@staticmethod
|
94
|
+
def create_input_mappings(
|
95
|
+
workflow: Workflow, mappings: Dict[str, Dict[str, str]]
|
96
|
+
) -> None:
|
97
|
+
"""
|
98
|
+
Create explicit mappings from workflow inputs to node parameters.
|
99
|
+
|
100
|
+
Args:
|
101
|
+
workflow: The workflow to configure
|
102
|
+
mappings: Dict of node_id -> {workflow_param: node_param} mappings
|
103
|
+
"""
|
104
|
+
for node_id, param_mappings in mappings.items():
|
105
|
+
if node_id not in workflow._nodes:
|
106
|
+
logger.warning(f"Node '{node_id}' not found in workflow")
|
107
|
+
continue
|
108
|
+
|
109
|
+
node_instance = workflow._nodes[node_id]
|
110
|
+
|
111
|
+
# Store mappings in node metadata for runtime use
|
112
|
+
if "_input_mappings" not in node_instance.config:
|
113
|
+
node_instance.config["_input_mappings"] = {}
|
114
|
+
|
115
|
+
node_instance.config["_input_mappings"].update(param_mappings)
|
116
|
+
logger.info(
|
117
|
+
f"Created input mappings for node '{node_id}': {param_mappings}"
|
118
|
+
)
|
119
|
+
|
120
|
+
|
121
|
+
def enhance_workflow_execution(original_execute):
|
122
|
+
"""
|
123
|
+
Decorator to enhance workflow execution with parameter injection.
|
124
|
+
|
125
|
+
This wraps the workflow execution to ensure parameters are properly
|
126
|
+
distributed to nodes before execution begins.
|
127
|
+
"""
|
128
|
+
|
129
|
+
def enhanced_execute(self, parameters: Dict[str, Any] = None, **kwargs):
|
130
|
+
# Inject parameters before execution
|
131
|
+
if parameters:
|
132
|
+
WorkflowInputHandler.inject_workflow_parameters(self, parameters)
|
133
|
+
|
134
|
+
# Call original execution
|
135
|
+
return original_execute(self, parameters, **kwargs)
|
136
|
+
|
137
|
+
return enhanced_execute
|
138
|
+
|
139
|
+
|
140
|
+
# Example usage for fixing the login workflow
|
141
|
+
def fix_login_workflow(workflow: Workflow, config: Dict[str, Any]) -> None:
|
142
|
+
"""
|
143
|
+
Fix the login workflow to properly handle parameters.
|
144
|
+
|
145
|
+
Args:
|
146
|
+
workflow: The login workflow to fix
|
147
|
+
config: Application configuration
|
148
|
+
"""
|
149
|
+
# Define input mappings for the user_fetcher node
|
150
|
+
mappings = {
|
151
|
+
"user_fetcher": {
|
152
|
+
"email": "identifier", # Map workflow 'email' to node 'identifier'
|
153
|
+
"tenant_id": "tenant_id",
|
154
|
+
"database_config": "database_config",
|
155
|
+
}
|
156
|
+
}
|
157
|
+
|
158
|
+
# Apply mappings
|
159
|
+
WorkflowInputHandler.create_input_mappings(workflow, mappings)
|
160
|
+
|
161
|
+
# Set default values for common parameters
|
162
|
+
if "user_fetcher" in workflow._nodes:
|
163
|
+
node = workflow._nodes["user_fetcher"]
|
164
|
+
if "tenant_id" not in node.config:
|
165
|
+
node.config["tenant_id"] = "default"
|
166
|
+
if "database_config" not in node.config:
|
167
|
+
node.config["database_config"] = {
|
168
|
+
"connection_string": config.get("DATABASE_URL"),
|
169
|
+
"database_type": "postgresql",
|
170
|
+
}
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: kailash
|
3
|
-
Version: 0.6.
|
3
|
+
Version: 0.6.4
|
4
4
|
Summary: Python SDK for the Kailash container-node architecture
|
5
5
|
Home-page: https://github.com/integrum/kailash-python-sdk
|
6
6
|
Author: Integrum
|
@@ -65,6 +65,19 @@ Requires-Dist: qrcode>=8.2
|
|
65
65
|
Requires-Dist: aiofiles>=24.1.0
|
66
66
|
Requires-Dist: bcrypt>=4.3.0
|
67
67
|
Requires-Dist: plotly>=6.2.0
|
68
|
+
Requires-Dist: redis[asyncio]>=6.2.0
|
69
|
+
Requires-Dist: faker>=37.4.0
|
70
|
+
Requires-Dist: structlog>=25.4.0
|
71
|
+
Requires-Dist: authlib>=1.6.0
|
72
|
+
Requires-Dist: slowapi>=0.1.9
|
73
|
+
Requires-Dist: limits>=5.4.0
|
74
|
+
Requires-Dist: prometheus-client>=0.22.1
|
75
|
+
Requires-Dist: opentelemetry-api>=1.34.1
|
76
|
+
Requires-Dist: opentelemetry-sdk>=1.34.1
|
77
|
+
Requires-Dist: passlib>=1.7.4
|
78
|
+
Requires-Dist: pyotp>=2.9.0
|
79
|
+
Requires-Dist: opentelemetry-instrumentation-fastapi>=0.55b1
|
80
|
+
Requires-Dist: seaborn>=0.13.2
|
68
81
|
Provides-Extra: dev
|
69
82
|
Requires-Dist: pytest>=7.0; extra == "dev"
|
70
83
|
Requires-Dist: pytest-cov>=3.0; extra == "dev"
|
@@ -99,16 +112,16 @@ Dynamic: requires-python
|
|
99
112
|
|
100
113
|
---
|
101
114
|
|
102
|
-
## 🔥 Latest Release: v0.6.
|
115
|
+
## 🔥 Latest Release: v0.6.3 (July 5, 2025)
|
103
116
|
|
104
|
-
**
|
117
|
+
**Comprehensive MCP Platform & Documentation Improvements**
|
105
118
|
|
106
|
-
-
|
107
|
-
-
|
108
|
-
-
|
109
|
-
-
|
119
|
+
- 🚀 **MCP Testing**: 407 tests with 100% pass rate across 8 MCP components
|
120
|
+
- 📚 **Documentation**: Fixed 200+ code examples, all now execute correctly
|
121
|
+
- 🏢 **Enterprise**: Complete MCP platform integration with production workflows
|
122
|
+
- 🔧 **Platform**: Resolved namespace collision (kailash.mcp → kailash.mcp_server)
|
110
123
|
|
111
|
-
[Full Changelog](changelogs/releases/v0.6.
|
124
|
+
[Full Changelog](changelogs/releases/v0.6.3-2025-07-05.md) | [Previous Release](changelogs/releases/v0.6.1-2025-01-26.md)
|
112
125
|
|
113
126
|
## ✨ Highlights
|
114
127
|
|