kailash 0.6.3__py3-none-any.whl → 0.6.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +3 -3
- kailash/api/custom_nodes_secure.py +3 -3
- kailash/api/gateway.py +1 -1
- kailash/api/studio.py +1 -1
- kailash/api/workflow_api.py +2 -2
- kailash/core/resilience/bulkhead.py +475 -0
- kailash/core/resilience/circuit_breaker.py +92 -10
- kailash/core/resilience/health_monitor.py +578 -0
- kailash/edge/discovery.py +86 -0
- kailash/mcp_server/__init__.py +309 -33
- kailash/mcp_server/advanced_features.py +1022 -0
- kailash/mcp_server/ai_registry_server.py +27 -2
- kailash/mcp_server/auth.py +789 -0
- kailash/mcp_server/client.py +645 -378
- kailash/mcp_server/discovery.py +1593 -0
- kailash/mcp_server/errors.py +673 -0
- kailash/mcp_server/oauth.py +1727 -0
- kailash/mcp_server/protocol.py +1126 -0
- kailash/mcp_server/registry_integration.py +587 -0
- kailash/mcp_server/server.py +1228 -96
- kailash/mcp_server/transports.py +1169 -0
- kailash/mcp_server/utils/__init__.py +6 -1
- kailash/mcp_server/utils/cache.py +250 -7
- kailash/middleware/auth/auth_manager.py +3 -3
- kailash/middleware/communication/api_gateway.py +1 -1
- kailash/middleware/communication/realtime.py +1 -1
- kailash/middleware/mcp/enhanced_server.py +1 -1
- kailash/nodes/__init__.py +2 -0
- kailash/nodes/admin/audit_log.py +6 -6
- kailash/nodes/admin/permission_check.py +8 -8
- kailash/nodes/admin/role_management.py +32 -28
- kailash/nodes/admin/schema.sql +6 -1
- kailash/nodes/admin/schema_manager.py +13 -13
- kailash/nodes/admin/security_event.py +15 -15
- kailash/nodes/admin/tenant_isolation.py +3 -3
- kailash/nodes/admin/transaction_utils.py +3 -3
- kailash/nodes/admin/user_management.py +21 -21
- kailash/nodes/ai/a2a.py +11 -11
- kailash/nodes/ai/ai_providers.py +9 -12
- kailash/nodes/ai/embedding_generator.py +13 -14
- kailash/nodes/ai/intelligent_agent_orchestrator.py +19 -19
- kailash/nodes/ai/iterative_llm_agent.py +2 -2
- kailash/nodes/ai/llm_agent.py +210 -33
- kailash/nodes/ai/self_organizing.py +2 -2
- kailash/nodes/alerts/discord.py +4 -4
- kailash/nodes/api/graphql.py +6 -6
- kailash/nodes/api/http.py +10 -10
- kailash/nodes/api/rate_limiting.py +4 -4
- kailash/nodes/api/rest.py +15 -15
- kailash/nodes/auth/mfa.py +3 -3
- kailash/nodes/auth/risk_assessment.py +2 -2
- kailash/nodes/auth/session_management.py +5 -5
- kailash/nodes/auth/sso.py +143 -0
- kailash/nodes/base.py +8 -2
- kailash/nodes/base_async.py +16 -2
- kailash/nodes/base_with_acl.py +2 -2
- kailash/nodes/cache/__init__.py +9 -0
- kailash/nodes/cache/cache.py +1172 -0
- kailash/nodes/cache/cache_invalidation.py +874 -0
- kailash/nodes/cache/redis_pool_manager.py +595 -0
- kailash/nodes/code/async_python.py +2 -1
- kailash/nodes/code/python.py +194 -30
- kailash/nodes/compliance/data_retention.py +6 -6
- kailash/nodes/compliance/gdpr.py +5 -5
- kailash/nodes/data/__init__.py +10 -0
- kailash/nodes/data/async_sql.py +1956 -129
- kailash/nodes/data/optimistic_locking.py +906 -0
- kailash/nodes/data/readers.py +8 -8
- kailash/nodes/data/redis.py +378 -0
- kailash/nodes/data/sql.py +314 -3
- kailash/nodes/data/streaming.py +21 -0
- kailash/nodes/enterprise/__init__.py +8 -0
- kailash/nodes/enterprise/audit_logger.py +285 -0
- kailash/nodes/enterprise/batch_processor.py +22 -3
- kailash/nodes/enterprise/data_lineage.py +1 -1
- kailash/nodes/enterprise/mcp_executor.py +205 -0
- kailash/nodes/enterprise/service_discovery.py +150 -0
- kailash/nodes/enterprise/tenant_assignment.py +108 -0
- kailash/nodes/logic/async_operations.py +2 -2
- kailash/nodes/logic/convergence.py +1 -1
- kailash/nodes/logic/operations.py +1 -1
- kailash/nodes/monitoring/__init__.py +11 -1
- kailash/nodes/monitoring/health_check.py +456 -0
- kailash/nodes/monitoring/log_processor.py +817 -0
- kailash/nodes/monitoring/metrics_collector.py +627 -0
- kailash/nodes/monitoring/performance_benchmark.py +137 -11
- kailash/nodes/rag/advanced.py +7 -7
- kailash/nodes/rag/agentic.py +49 -2
- kailash/nodes/rag/conversational.py +3 -3
- kailash/nodes/rag/evaluation.py +3 -3
- kailash/nodes/rag/federated.py +3 -3
- kailash/nodes/rag/graph.py +3 -3
- kailash/nodes/rag/multimodal.py +3 -3
- kailash/nodes/rag/optimized.py +5 -5
- kailash/nodes/rag/privacy.py +3 -3
- kailash/nodes/rag/query_processing.py +6 -6
- kailash/nodes/rag/realtime.py +1 -1
- kailash/nodes/rag/registry.py +1 -1
- kailash/nodes/rag/router.py +1 -1
- kailash/nodes/rag/similarity.py +7 -7
- kailash/nodes/rag/strategies.py +4 -4
- kailash/nodes/security/abac_evaluator.py +6 -6
- kailash/nodes/security/behavior_analysis.py +5 -5
- kailash/nodes/security/credential_manager.py +1 -1
- kailash/nodes/security/rotating_credentials.py +11 -11
- kailash/nodes/security/threat_detection.py +8 -8
- kailash/nodes/testing/credential_testing.py +2 -2
- kailash/nodes/transform/processors.py +5 -5
- kailash/runtime/local.py +163 -9
- kailash/runtime/parameter_injection.py +425 -0
- kailash/runtime/parameter_injector.py +657 -0
- kailash/runtime/testing.py +2 -2
- kailash/testing/fixtures.py +2 -2
- kailash/workflow/builder.py +99 -14
- kailash/workflow/builder_improvements.py +207 -0
- kailash/workflow/input_handling.py +170 -0
- {kailash-0.6.3.dist-info → kailash-0.6.5.dist-info}/METADATA +22 -9
- {kailash-0.6.3.dist-info → kailash-0.6.5.dist-info}/RECORD +122 -95
- {kailash-0.6.3.dist-info → kailash-0.6.5.dist-info}/WHEEL +0 -0
- {kailash-0.6.3.dist-info → kailash-0.6.5.dist-info}/entry_points.txt +0 -0
- {kailash-0.6.3.dist-info → kailash-0.6.5.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.6.3.dist-info → kailash-0.6.5.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,657 @@
|
|
1
|
+
"""Simple parameter injection framework for enterprise nodes.
|
2
|
+
|
3
|
+
This module provides a simpler approach to handling runtime parameter injection
|
4
|
+
for enterprise nodes that require connection configuration.
|
5
|
+
"""
|
6
|
+
|
7
|
+
import logging
|
8
|
+
from typing import Any, Dict, Optional
|
9
|
+
|
10
|
+
from kailash.nodes.base import Node
|
11
|
+
|
12
|
+
logger = logging.getLogger(__name__)
|
13
|
+
|
14
|
+
|
15
|
+
class DeferredConfigNode(Node):
|
16
|
+
"""Base class for nodes that support deferred configuration.
|
17
|
+
|
18
|
+
This provides a simple wrapper pattern that delays node creation
|
19
|
+
until runtime parameters are available.
|
20
|
+
"""
|
21
|
+
|
22
|
+
def __init__(self, node_class, **initial_config):
|
23
|
+
"""Initialize with deferred configuration.
|
24
|
+
|
25
|
+
Args:
|
26
|
+
node_class: The actual node class to instantiate later
|
27
|
+
**initial_config: Initial configuration parameters
|
28
|
+
"""
|
29
|
+
# Set our attributes first (needed by get_parameters)
|
30
|
+
self._node_class = node_class
|
31
|
+
self._initial_config = initial_config
|
32
|
+
self._runtime_config = {}
|
33
|
+
self._actual_node = None
|
34
|
+
self._is_initialized = False
|
35
|
+
|
36
|
+
# Initialize parent with basic config for Node functionality
|
37
|
+
name = initial_config.get("name", f"deferred_{node_class.__name__}")
|
38
|
+
node_config = initial_config.copy()
|
39
|
+
node_config.pop("name", None) # Remove name to avoid conflict
|
40
|
+
super().__init__(name=name, **node_config)
|
41
|
+
|
42
|
+
def set_runtime_config(self, **config):
|
43
|
+
"""Set runtime configuration parameters."""
|
44
|
+
self._runtime_config.update(config)
|
45
|
+
logger.debug(
|
46
|
+
f"Set runtime config for {self._node_class.__name__}: {list(config.keys())}"
|
47
|
+
)
|
48
|
+
|
49
|
+
def get_effective_config(self):
|
50
|
+
"""Get effective configuration combining initial and runtime config."""
|
51
|
+
effective = self._initial_config.copy()
|
52
|
+
effective.update(self._runtime_config)
|
53
|
+
return effective
|
54
|
+
|
55
|
+
def _initialize_if_needed(self):
|
56
|
+
"""Initialize the actual node if not already done."""
|
57
|
+
if not self._is_initialized and self._has_required_config():
|
58
|
+
effective_config = self.get_effective_config()
|
59
|
+
try:
|
60
|
+
self._actual_node = self._node_class(**effective_config)
|
61
|
+
self._is_initialized = True
|
62
|
+
logger.info(
|
63
|
+
f"Initialized {self._node_class.__name__} with runtime config"
|
64
|
+
)
|
65
|
+
except Exception as e:
|
66
|
+
logger.warning(f"Failed to initialize {self._node_class.__name__}: {e}")
|
67
|
+
|
68
|
+
def _has_required_config(self):
|
69
|
+
"""Check if we have enough configuration to initialize the node."""
|
70
|
+
# This is a simple heuristic - could be made more sophisticated
|
71
|
+
effective_config = self.get_effective_config()
|
72
|
+
|
73
|
+
# For OAuth2 nodes, we need at least token_url and client_id
|
74
|
+
if "OAuth2" in self._node_class.__name__:
|
75
|
+
return "token_url" in effective_config and "client_id" in effective_config
|
76
|
+
|
77
|
+
# For SQL nodes, we need at least database info and a query
|
78
|
+
if "SQL" in self._node_class.__name__:
|
79
|
+
has_db_config = any(
|
80
|
+
key in effective_config for key in ["connection_string", "database"]
|
81
|
+
)
|
82
|
+
has_query = "query" in effective_config
|
83
|
+
return has_db_config and has_query
|
84
|
+
|
85
|
+
# Default: assume we have enough config
|
86
|
+
return True
|
87
|
+
|
88
|
+
def get_parameters(self):
|
89
|
+
"""Get parameter definitions for this node."""
|
90
|
+
if self._actual_node:
|
91
|
+
return self._actual_node.get_parameters()
|
92
|
+
else:
|
93
|
+
# Return default parameters based on node type
|
94
|
+
return self._get_default_parameters()
|
95
|
+
|
96
|
+
def _get_default_parameters(self):
|
97
|
+
"""Get default parameter definitions before actual node creation."""
|
98
|
+
from kailash.nodes.base import NodeParameter
|
99
|
+
|
100
|
+
if "OAuth2" in self._node_class.__name__:
|
101
|
+
return {
|
102
|
+
"token_url": NodeParameter(
|
103
|
+
name="token_url",
|
104
|
+
type=str,
|
105
|
+
required=True,
|
106
|
+
description="OAuth token endpoint URL",
|
107
|
+
),
|
108
|
+
"client_id": NodeParameter(
|
109
|
+
name="client_id",
|
110
|
+
type=str,
|
111
|
+
required=True,
|
112
|
+
description="OAuth client ID",
|
113
|
+
),
|
114
|
+
"client_secret": NodeParameter(
|
115
|
+
name="client_secret",
|
116
|
+
type=str,
|
117
|
+
required=False,
|
118
|
+
description="OAuth client secret",
|
119
|
+
),
|
120
|
+
"grant_type": NodeParameter(
|
121
|
+
name="grant_type",
|
122
|
+
type=str,
|
123
|
+
required=False,
|
124
|
+
default="client_credentials",
|
125
|
+
description="OAuth grant type",
|
126
|
+
),
|
127
|
+
}
|
128
|
+
|
129
|
+
elif "SQL" in self._node_class.__name__:
|
130
|
+
return {
|
131
|
+
"database_type": NodeParameter(
|
132
|
+
name="database_type",
|
133
|
+
type=str,
|
134
|
+
required=False,
|
135
|
+
default="postgresql",
|
136
|
+
description="Database type",
|
137
|
+
),
|
138
|
+
"host": NodeParameter(
|
139
|
+
name="host", type=str, required=False, description="Database host"
|
140
|
+
),
|
141
|
+
"database": NodeParameter(
|
142
|
+
name="database",
|
143
|
+
type=str,
|
144
|
+
required=False,
|
145
|
+
description="Database name",
|
146
|
+
),
|
147
|
+
"user": NodeParameter(
|
148
|
+
name="user", type=str, required=False, description="Database user"
|
149
|
+
),
|
150
|
+
"password": NodeParameter(
|
151
|
+
name="password",
|
152
|
+
type=str,
|
153
|
+
required=False,
|
154
|
+
description="Database password",
|
155
|
+
),
|
156
|
+
"query": NodeParameter(
|
157
|
+
name="query",
|
158
|
+
type=str,
|
159
|
+
required=True,
|
160
|
+
description="SQL query to execute",
|
161
|
+
),
|
162
|
+
}
|
163
|
+
|
164
|
+
return {}
|
165
|
+
|
166
|
+
def validate_inputs(self, **kwargs):
|
167
|
+
"""Validate inputs and extract runtime configuration."""
|
168
|
+
# Extract potential configuration parameters
|
169
|
+
config_params = self._extract_config_params(kwargs)
|
170
|
+
if config_params:
|
171
|
+
self.set_runtime_config(**config_params)
|
172
|
+
|
173
|
+
# Try to initialize with current config
|
174
|
+
self._initialize_if_needed()
|
175
|
+
|
176
|
+
# If we have an actual node, delegate to it
|
177
|
+
if self._actual_node and hasattr(self._actual_node, "validate_inputs"):
|
178
|
+
return self._actual_node.validate_inputs(**kwargs)
|
179
|
+
|
180
|
+
# Otherwise, just return the kwargs
|
181
|
+
return kwargs
|
182
|
+
|
183
|
+
def _extract_config_params(self, inputs):
|
184
|
+
"""Extract configuration parameters from runtime inputs."""
|
185
|
+
config_keys = {
|
186
|
+
# OAuth2 parameters
|
187
|
+
"token_url",
|
188
|
+
"client_id",
|
189
|
+
"client_secret",
|
190
|
+
"grant_type",
|
191
|
+
"scope",
|
192
|
+
"username",
|
193
|
+
"password",
|
194
|
+
"refresh_token",
|
195
|
+
# SQL parameters
|
196
|
+
"database_type",
|
197
|
+
"connection_string",
|
198
|
+
"host",
|
199
|
+
"port",
|
200
|
+
"database",
|
201
|
+
"user",
|
202
|
+
"password",
|
203
|
+
"pool_size",
|
204
|
+
"max_pool_size",
|
205
|
+
"timeout",
|
206
|
+
}
|
207
|
+
|
208
|
+
return {k: v for k, v in inputs.items() if k in config_keys}
|
209
|
+
|
210
|
+
def run(self, **kwargs):
|
211
|
+
"""Execute the node with runtime configuration."""
|
212
|
+
# Extract and set any new configuration
|
213
|
+
config_params = self._extract_config_params(kwargs)
|
214
|
+
if config_params:
|
215
|
+
self.set_runtime_config(**config_params)
|
216
|
+
|
217
|
+
# Ensure we're initialized
|
218
|
+
self._initialize_if_needed()
|
219
|
+
|
220
|
+
if not self._actual_node:
|
221
|
+
raise RuntimeError(
|
222
|
+
f"Cannot execute {self._node_class.__name__} - missing required configuration. "
|
223
|
+
f"Provided config: {list(self.get_effective_config().keys())}"
|
224
|
+
)
|
225
|
+
|
226
|
+
# Delegate to the actual node - prefer execute() for compatibility
|
227
|
+
return self._actual_node.execute(**kwargs)
|
228
|
+
|
229
|
+
async def async_run(self, **kwargs):
|
230
|
+
"""Execute the node asynchronously with runtime configuration."""
|
231
|
+
# Extract and set any new configuration
|
232
|
+
config_params = self._extract_config_params(kwargs)
|
233
|
+
if config_params:
|
234
|
+
self.set_runtime_config(**config_params)
|
235
|
+
|
236
|
+
# Ensure we're initialized
|
237
|
+
self._initialize_if_needed()
|
238
|
+
|
239
|
+
if not self._actual_node:
|
240
|
+
raise RuntimeError(
|
241
|
+
f"Cannot execute {self._node_class.__name__} - missing required configuration. "
|
242
|
+
f"Provided config: {list(self.get_effective_config().keys())}"
|
243
|
+
)
|
244
|
+
|
245
|
+
# Delegate to the actual node
|
246
|
+
if hasattr(self._actual_node, "async_run"):
|
247
|
+
return await self._actual_node.async_run(**kwargs)
|
248
|
+
else:
|
249
|
+
return self._actual_node.execute(**kwargs)
|
250
|
+
|
251
|
+
|
252
|
+
def create_deferred_oauth2(**kwargs):
|
253
|
+
"""Create a deferred OAuth2 node that accepts runtime configuration.
|
254
|
+
|
255
|
+
Args:
|
256
|
+
**kwargs: Initial configuration parameters
|
257
|
+
|
258
|
+
Returns:
|
259
|
+
DeferredConfigNode wrapping OAuth2Node
|
260
|
+
"""
|
261
|
+
from kailash.nodes.api.auth import OAuth2Node
|
262
|
+
|
263
|
+
return DeferredConfigNode(OAuth2Node, **kwargs)
|
264
|
+
|
265
|
+
|
266
|
+
def create_deferred_sql(**kwargs):
|
267
|
+
"""Create a deferred SQL node that accepts runtime configuration.
|
268
|
+
|
269
|
+
Args:
|
270
|
+
**kwargs: Initial configuration parameters
|
271
|
+
|
272
|
+
Returns:
|
273
|
+
DeferredConfigNode wrapping AsyncSQLDatabaseNode
|
274
|
+
"""
|
275
|
+
from kailash.nodes.data.async_sql import AsyncSQLDatabaseNode
|
276
|
+
|
277
|
+
return DeferredConfigNode(AsyncSQLDatabaseNode, **kwargs)
|
278
|
+
|
279
|
+
|
280
|
+
def create_deferred_node(node_class, **kwargs):
|
281
|
+
"""Create a deferred node for any enterprise node class.
|
282
|
+
|
283
|
+
Args:
|
284
|
+
node_class: The node class to wrap
|
285
|
+
**kwargs: Initial configuration parameters
|
286
|
+
|
287
|
+
Returns:
|
288
|
+
DeferredConfigNode wrapping the specified node class
|
289
|
+
"""
|
290
|
+
return DeferredConfigNode(node_class, **kwargs)
|
291
|
+
|
292
|
+
|
293
|
+
class WorkflowParameterInjector:
|
294
|
+
"""Workflow-level parameter injection for enterprise nodes."""
|
295
|
+
|
296
|
+
def __init__(self, workflow, debug=False):
|
297
|
+
"""Initialize the workflow parameter injector.
|
298
|
+
|
299
|
+
Args:
|
300
|
+
workflow: The workflow to inject parameters into
|
301
|
+
debug: Enable debug logging
|
302
|
+
"""
|
303
|
+
self.workflow = workflow
|
304
|
+
self.debug = debug
|
305
|
+
self.logger = logging.getLogger(__name__)
|
306
|
+
|
307
|
+
def inject_parameters(self, workflow_params: Dict[str, Any]) -> None:
|
308
|
+
"""Inject workflow-level parameters into deferred configuration nodes.
|
309
|
+
|
310
|
+
Args:
|
311
|
+
workflow_params: Dictionary of workflow-level parameters
|
312
|
+
"""
|
313
|
+
if self.debug:
|
314
|
+
self.logger.debug(
|
315
|
+
f"Injecting workflow parameters: {list(workflow_params.keys())}"
|
316
|
+
)
|
317
|
+
|
318
|
+
# For now, this is a placeholder implementation
|
319
|
+
# In a full implementation, this would traverse the workflow
|
320
|
+
# and inject parameters into any DeferredConfigNode instances
|
321
|
+
pass
|
322
|
+
|
323
|
+
def transform_workflow_parameters(
|
324
|
+
self, parameters: Dict[str, Any]
|
325
|
+
) -> Dict[str, Dict[str, Any]]:
|
326
|
+
"""Transform workflow parameters for injection.
|
327
|
+
|
328
|
+
Args:
|
329
|
+
parameters: Dictionary of workflow parameters
|
330
|
+
|
331
|
+
Returns:
|
332
|
+
Transformed parameters dictionary in node-specific format
|
333
|
+
"""
|
334
|
+
if not parameters:
|
335
|
+
return {}
|
336
|
+
|
337
|
+
transformed = {}
|
338
|
+
|
339
|
+
# Handle explicit workflow input mappings first
|
340
|
+
if hasattr(self.workflow, "metadata") and self.workflow.metadata:
|
341
|
+
workflow_inputs = self.workflow.metadata.get("_workflow_inputs", {})
|
342
|
+
for node_id, input_mappings in workflow_inputs.items():
|
343
|
+
node_params = {}
|
344
|
+
for workflow_param, node_param in input_mappings.items():
|
345
|
+
# Handle dot notation for nested parameter access
|
346
|
+
value = self._get_nested_parameter(parameters, workflow_param)
|
347
|
+
if value is not None:
|
348
|
+
node_params[node_param] = value
|
349
|
+
if self.debug:
|
350
|
+
self.logger.debug(
|
351
|
+
f"Mapping workflow input {workflow_param} -> {node_param} for node {node_id} (value: {value})"
|
352
|
+
)
|
353
|
+
|
354
|
+
if node_params:
|
355
|
+
transformed[node_id] = node_params
|
356
|
+
|
357
|
+
# ENTERPRISE ENHANCEMENT: Get ALL nodes for parameter injection, not just entry nodes
|
358
|
+
# Real enterprise workflows need parameters available throughout the execution graph
|
359
|
+
all_nodes = self._get_all_nodes()
|
360
|
+
|
361
|
+
if self.debug:
|
362
|
+
self.logger.debug(
|
363
|
+
f"Found nodes for parameter injection: {list(all_nodes.keys())}, "
|
364
|
+
f"injecting parameters: {list(parameters.keys())}"
|
365
|
+
)
|
366
|
+
|
367
|
+
# Distribute workflow parameters to ALL nodes that can accept them
|
368
|
+
for node_id, node_instance in all_nodes.items():
|
369
|
+
# Skip nodes that already have explicit mappings
|
370
|
+
if node_id in transformed:
|
371
|
+
continue
|
372
|
+
|
373
|
+
node_params = {}
|
374
|
+
node_param_defs = node_instance.get_parameters()
|
375
|
+
|
376
|
+
for param_name, value in parameters.items():
|
377
|
+
# Check if this parameter is needed by this node and get the mapped parameter name
|
378
|
+
mapped_param_name = self._get_mapped_parameter_name(
|
379
|
+
param_name, value, node_param_defs, node_instance
|
380
|
+
)
|
381
|
+
if mapped_param_name:
|
382
|
+
node_params[mapped_param_name] = value
|
383
|
+
if self.debug:
|
384
|
+
self.logger.debug(
|
385
|
+
f"Injecting {param_name} -> {mapped_param_name} into node {node_id}"
|
386
|
+
)
|
387
|
+
|
388
|
+
if node_params:
|
389
|
+
transformed[node_id] = node_params
|
390
|
+
|
391
|
+
return transformed
|
392
|
+
|
393
|
+
def _get_nested_parameter(self, parameters: Dict[str, Any], path: str) -> Any:
|
394
|
+
"""Get a nested parameter value using dot notation.
|
395
|
+
|
396
|
+
Args:
|
397
|
+
parameters: Parameters dictionary
|
398
|
+
path: Dot-separated path (e.g., "data.user_id")
|
399
|
+
|
400
|
+
Returns:
|
401
|
+
Value at the specified path or None if not found
|
402
|
+
"""
|
403
|
+
if "." not in path:
|
404
|
+
# Simple parameter lookup
|
405
|
+
return parameters.get(path)
|
406
|
+
|
407
|
+
# Handle nested parameter access
|
408
|
+
parts = path.split(".")
|
409
|
+
current = parameters
|
410
|
+
|
411
|
+
for part in parts:
|
412
|
+
if isinstance(current, dict) and part in current:
|
413
|
+
current = current[part]
|
414
|
+
else:
|
415
|
+
return None
|
416
|
+
|
417
|
+
return current
|
418
|
+
|
419
|
+
def validate_parameters(self, parameters: Dict[str, Any]) -> list[str]:
|
420
|
+
"""Validate workflow parameters.
|
421
|
+
|
422
|
+
Args:
|
423
|
+
parameters: Dictionary of workflow parameters
|
424
|
+
|
425
|
+
Returns:
|
426
|
+
List of warning messages if validation issues found
|
427
|
+
"""
|
428
|
+
warnings = []
|
429
|
+
|
430
|
+
if not parameters:
|
431
|
+
return warnings
|
432
|
+
|
433
|
+
# ENTERPRISE ENHANCEMENT: Check ALL nodes for parameter usage, not just entry nodes
|
434
|
+
all_nodes = self._get_all_nodes()
|
435
|
+
|
436
|
+
# Check if any workflow parameters don't match any node parameters
|
437
|
+
used_params = set()
|
438
|
+
for node_id, node_instance in all_nodes.items():
|
439
|
+
node_param_defs = node_instance.get_parameters()
|
440
|
+
for param_name in parameters.keys():
|
441
|
+
if self._get_mapped_parameter_name(
|
442
|
+
param_name, parameters[param_name], node_param_defs, node_instance
|
443
|
+
):
|
444
|
+
used_params.add(param_name)
|
445
|
+
|
446
|
+
unused_params = set(parameters.keys()) - used_params
|
447
|
+
if unused_params:
|
448
|
+
warnings.append(f"Unused workflow parameters: {list(unused_params)}")
|
449
|
+
|
450
|
+
return warnings
|
451
|
+
|
452
|
+
def _get_entry_nodes(self) -> Dict[str, Any]:
|
453
|
+
"""Get entry nodes (nodes with no incoming connections).
|
454
|
+
|
455
|
+
Returns:
|
456
|
+
Dictionary of entry node IDs to node instances
|
457
|
+
"""
|
458
|
+
entry_nodes = {}
|
459
|
+
|
460
|
+
for node_id in self.workflow.nodes.keys():
|
461
|
+
# Check if this node has any incoming connections
|
462
|
+
has_incoming = False
|
463
|
+
for connection in self.workflow.connections:
|
464
|
+
if connection.target_node == node_id:
|
465
|
+
has_incoming = True
|
466
|
+
break
|
467
|
+
|
468
|
+
if not has_incoming:
|
469
|
+
# Get the actual node instance, not the metadata
|
470
|
+
entry_nodes[node_id] = self.workflow._node_instances[node_id]
|
471
|
+
|
472
|
+
return entry_nodes
|
473
|
+
|
474
|
+
def _get_all_nodes(self) -> Dict[str, Any]:
|
475
|
+
"""Get all nodes in the workflow for enterprise parameter injection.
|
476
|
+
|
477
|
+
ENTERPRISE CAPABILITY: Unlike _get_entry_nodes(), this method returns ALL nodes
|
478
|
+
in the workflow that can potentially accept enterprise parameters. This enables
|
479
|
+
true enterprise-grade parameter flow throughout complex workflows.
|
480
|
+
|
481
|
+
Returns:
|
482
|
+
Dictionary of all node IDs to node instances
|
483
|
+
"""
|
484
|
+
all_nodes = {}
|
485
|
+
|
486
|
+
for node_id in self.workflow.nodes.keys():
|
487
|
+
# Get the actual node instance, not the metadata
|
488
|
+
if (
|
489
|
+
hasattr(self.workflow, "_node_instances")
|
490
|
+
and node_id in self.workflow._node_instances
|
491
|
+
):
|
492
|
+
all_nodes[node_id] = self.workflow._node_instances[node_id]
|
493
|
+
|
494
|
+
return all_nodes
|
495
|
+
|
496
|
+
def _should_inject_parameter(
|
497
|
+
self, param_name: str, param_value: Any, node_param_defs: Dict[str, Any]
|
498
|
+
) -> bool:
|
499
|
+
"""Check if a parameter should be injected into a node.
|
500
|
+
|
501
|
+
Args:
|
502
|
+
param_name: Name of the parameter
|
503
|
+
param_value: Value of the parameter
|
504
|
+
node_param_defs: Node parameter definitions
|
505
|
+
|
506
|
+
Returns:
|
507
|
+
True if parameter should be injected
|
508
|
+
"""
|
509
|
+
# Direct parameter name match
|
510
|
+
if param_name in node_param_defs:
|
511
|
+
return True
|
512
|
+
|
513
|
+
# Check for workflow alias matches
|
514
|
+
for param_def in node_param_defs.values():
|
515
|
+
if (
|
516
|
+
hasattr(param_def, "workflow_alias")
|
517
|
+
and param_def.workflow_alias == param_name
|
518
|
+
):
|
519
|
+
return True
|
520
|
+
|
521
|
+
# Check for auto_map_from matches
|
522
|
+
if hasattr(param_def, "auto_map_from") and param_def.auto_map_from:
|
523
|
+
if param_name in param_def.auto_map_from:
|
524
|
+
return True
|
525
|
+
|
526
|
+
# Check for auto_map_primary matches
|
527
|
+
if hasattr(param_def, "auto_map_primary") and param_def.auto_map_primary:
|
528
|
+
# Primary parameters get first available workflow parameter
|
529
|
+
# This is a simplified implementation - could be more sophisticated
|
530
|
+
return True
|
531
|
+
|
532
|
+
return False
|
533
|
+
|
534
|
+
def _get_mapped_parameter_name(
|
535
|
+
self,
|
536
|
+
param_name: str,
|
537
|
+
param_value: Any,
|
538
|
+
node_param_defs: Dict[str, Any],
|
539
|
+
node_instance=None,
|
540
|
+
) -> str | None:
|
541
|
+
"""Get the mapped parameter name for injection.
|
542
|
+
|
543
|
+
ENTERPRISE ENHANCEMENT: Enhanced to detect and inject parameters into
|
544
|
+
PythonCodeNode functions that accept **kwargs for enterprise parameter injection.
|
545
|
+
|
546
|
+
Args:
|
547
|
+
param_name: Name of the workflow parameter
|
548
|
+
param_value: Value of the parameter
|
549
|
+
node_param_defs: Node parameter definitions
|
550
|
+
|
551
|
+
Returns:
|
552
|
+
The node parameter name to inject to, or the original param_name
|
553
|
+
if the node accepts **kwargs parameters
|
554
|
+
"""
|
555
|
+
# Direct parameter name match
|
556
|
+
if param_name in node_param_defs:
|
557
|
+
return param_name
|
558
|
+
|
559
|
+
# Check for workflow alias matches
|
560
|
+
for node_param_name, param_def in node_param_defs.items():
|
561
|
+
if (
|
562
|
+
hasattr(param_def, "workflow_alias")
|
563
|
+
and param_def.workflow_alias == param_name
|
564
|
+
):
|
565
|
+
return node_param_name
|
566
|
+
|
567
|
+
# Check for auto_map_from matches
|
568
|
+
if hasattr(param_def, "auto_map_from") and param_def.auto_map_from:
|
569
|
+
if param_name in param_def.auto_map_from:
|
570
|
+
return node_param_name
|
571
|
+
|
572
|
+
# Check for auto_map_primary matches
|
573
|
+
if hasattr(param_def, "auto_map_primary") and param_def.auto_map_primary:
|
574
|
+
# Primary parameters get first available workflow parameter
|
575
|
+
# This is a simplified implementation - could be more sophisticated
|
576
|
+
return node_param_name
|
577
|
+
|
578
|
+
# ENTERPRISE FEATURE: Check if this specific node accepts **kwargs
|
579
|
+
# This enables enterprise parameter injection into arbitrary functions
|
580
|
+
if node_instance and self._node_accepts_kwargs(node_instance):
|
581
|
+
# PythonCodeNode with **kwargs can accept any workflow parameter
|
582
|
+
if self.debug:
|
583
|
+
self.logger.debug(
|
584
|
+
f"Injecting workflow parameter '{param_name}' into **kwargs function"
|
585
|
+
)
|
586
|
+
return param_name
|
587
|
+
|
588
|
+
return None
|
589
|
+
|
590
|
+
def _node_accepts_kwargs(self, node_instance) -> bool:
|
591
|
+
"""Check if a node can accept arbitrary keyword arguments.
|
592
|
+
|
593
|
+
ENTERPRISE CAPABILITY: Detects PythonCodeNode instances that have
|
594
|
+
functions with **kwargs parameters, enabling enterprise parameter injection.
|
595
|
+
|
596
|
+
Args:
|
597
|
+
node_instance: The node instance to check
|
598
|
+
|
599
|
+
Returns:
|
600
|
+
True if the node can accept arbitrary parameters via **kwargs
|
601
|
+
"""
|
602
|
+
# Check if this is a PythonCodeNode with a function that accepts **kwargs
|
603
|
+
if (
|
604
|
+
hasattr(node_instance, "__class__")
|
605
|
+
and "PythonCode" in node_instance.__class__.__name__
|
606
|
+
):
|
607
|
+
# For PythonCodeNode created from functions
|
608
|
+
if hasattr(node_instance, "wrapper") and node_instance.wrapper:
|
609
|
+
if hasattr(node_instance.wrapper, "accepts_var_keyword"):
|
610
|
+
return node_instance.wrapper.accepts_var_keyword()
|
611
|
+
|
612
|
+
# For PythonCodeNode with inline code - always accepts parameters
|
613
|
+
if hasattr(node_instance, "code") and node_instance.code:
|
614
|
+
return True
|
615
|
+
|
616
|
+
# For function-based nodes, check the function signature
|
617
|
+
if hasattr(node_instance, "function") and node_instance.function:
|
618
|
+
import inspect
|
619
|
+
|
620
|
+
try:
|
621
|
+
sig = inspect.signature(node_instance.function)
|
622
|
+
return any(
|
623
|
+
param.kind == inspect.Parameter.VAR_KEYWORD
|
624
|
+
for param in sig.parameters.values()
|
625
|
+
)
|
626
|
+
except (ValueError, TypeError):
|
627
|
+
pass
|
628
|
+
|
629
|
+
return False
|
630
|
+
|
631
|
+
def configure_deferred_node(self, node_id: str, **config) -> None:
|
632
|
+
"""Configure a deferred node with runtime parameters.
|
633
|
+
|
634
|
+
Args:
|
635
|
+
node_id: ID of the deferred node to configure
|
636
|
+
**config: Configuration parameters to apply
|
637
|
+
"""
|
638
|
+
if (
|
639
|
+
not hasattr(self.workflow, "_node_instances")
|
640
|
+
or node_id not in self.workflow._node_instances
|
641
|
+
):
|
642
|
+
raise ValueError(f"Node '{node_id}' not found in workflow")
|
643
|
+
|
644
|
+
node_instance = self.workflow._node_instances[node_id]
|
645
|
+
|
646
|
+
# Check if this is a deferred configuration node
|
647
|
+
if hasattr(node_instance, "set_runtime_config"):
|
648
|
+
node_instance.set_runtime_config(**config)
|
649
|
+
# Force initialization now that we have runtime config
|
650
|
+
if hasattr(node_instance, "_initialize_if_needed"):
|
651
|
+
node_instance._initialize_if_needed()
|
652
|
+
if self.debug:
|
653
|
+
self.logger.debug(
|
654
|
+
f"Configured deferred node '{node_id}' with parameters: {list(config.keys())}"
|
655
|
+
)
|
656
|
+
else:
|
657
|
+
raise ValueError(f"Node '{node_id}' is not a deferred configuration node")
|
kailash/runtime/testing.py
CHANGED
@@ -309,7 +309,7 @@ class SecurityTestHelper:
|
|
309
309
|
|
310
310
|
for scenario in scenarios:
|
311
311
|
try:
|
312
|
-
result = tester.
|
312
|
+
result = tester.execute(
|
313
313
|
credential_type=credential_type,
|
314
314
|
scenario=scenario,
|
315
315
|
mock_data=(
|
@@ -442,7 +442,7 @@ class NodeTestHelper:
|
|
442
442
|
except (NodeValidationError, WorkflowExecutionError):
|
443
443
|
return {}
|
444
444
|
else:
|
445
|
-
result = node.
|
445
|
+
result = node.execute(**inputs)
|
446
446
|
|
447
447
|
# Check expected output keys
|
448
448
|
for key in expected_keys:
|
kailash/testing/fixtures.py
CHANGED
@@ -109,7 +109,7 @@ class AsyncWorkflowFixtures:
|
|
109
109
|
|
110
110
|
if engine == "postgresql":
|
111
111
|
# Start PostgreSQL container
|
112
|
-
container = client.containers.
|
112
|
+
container = client.containers.execute(
|
113
113
|
f"postgres:{tag}",
|
114
114
|
environment={
|
115
115
|
"POSTGRES_DB": database,
|
@@ -162,7 +162,7 @@ class AsyncWorkflowFixtures:
|
|
162
162
|
|
163
163
|
elif engine == "mysql":
|
164
164
|
# Start MySQL container
|
165
|
-
container = client.containers.
|
165
|
+
container = client.containers.execute(
|
166
166
|
f"mysql:{tag}",
|
167
167
|
environment={
|
168
168
|
"MYSQL_ROOT_PASSWORD": password,
|