kailash 0.8.1__py3-none-any.whl → 0.8.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -103,6 +103,7 @@ ALLOWED_MODULES = {
103
103
  "glob", # For file pattern matching
104
104
  "xml", # For XML processing
105
105
  "uuid", # For generating unique identifiers (safe, no I/O)
106
+ "hashlib", # For cryptographic hashing (safe for common use cases)
106
107
  }
107
108
 
108
109
 
kailash/runtime/local.py CHANGED
@@ -43,6 +43,7 @@ import networkx as nx
43
43
 
44
44
  from kailash.nodes import Node
45
45
  from kailash.runtime.parameter_injector import WorkflowParameterInjector
46
+ from kailash.runtime.secret_provider import EnvironmentSecretProvider, SecretProvider
46
47
  from kailash.sdk_exceptions import (
47
48
  RuntimeExecutionError,
48
49
  WorkflowExecutionError,
@@ -84,6 +85,7 @@ class LocalRuntime:
84
85
  enable_security: bool = False,
85
86
  enable_audit: bool = False,
86
87
  resource_limits: Optional[dict[str, Any]] = None,
88
+ secret_provider: Optional[Any] = None,
87
89
  ):
88
90
  """Initialize the unified runtime.
89
91
 
@@ -97,12 +99,14 @@ class LocalRuntime:
97
99
  enable_security: Whether to enable security features.
98
100
  enable_audit: Whether to enable audit logging.
99
101
  resource_limits: Resource limits (memory_mb, cpu_cores, etc.).
102
+ secret_provider: Optional secret provider for runtime secret injection.
100
103
  """
101
104
  self.debug = debug
102
105
  self.enable_cycles = enable_cycles
103
106
  self.enable_async = enable_async
104
107
  self.max_concurrency = max_concurrency
105
108
  self.user_context = user_context
109
+ self.secret_provider = secret_provider
106
110
  self.enable_monitoring = enable_monitoring
107
111
  self.enable_security = enable_security
108
112
  self.enable_audit = enable_audit
@@ -132,6 +136,22 @@ class LocalRuntime:
132
136
  "user_context": user_context,
133
137
  }
134
138
 
139
+ def _extract_secret_requirements(self, workflow: "Workflow") -> list:
140
+ """Extract secret requirements from workflow nodes.
141
+
142
+ Args:
143
+ workflow: Workflow to analyze
144
+
145
+ Returns:
146
+ List of secret requirements
147
+ """
148
+ requirements = []
149
+ for node_id, node in workflow.nodes.items():
150
+ if hasattr(node, "get_secret_requirements"):
151
+ node_requirements = node.get_secret_requirements()
152
+ requirements.extend(node_requirements)
153
+ return requirements
154
+
135
155
  def execute(
136
156
  self,
137
157
  workflow: Workflow,
@@ -1057,6 +1077,52 @@ class LocalRuntime:
1057
1077
  for warning in warnings:
1058
1078
  self.logger.warning(f"Parameter validation: {warning}")
1059
1079
 
1080
+ # Inject secrets into the processed parameters
1081
+ if self.secret_provider:
1082
+ # Get secret requirements from workflow nodes
1083
+ requirements = self._extract_secret_requirements(workflow)
1084
+ if requirements:
1085
+ # Fetch secrets from provider
1086
+ secrets = self.secret_provider.get_secrets(requirements)
1087
+
1088
+ # Inject secrets into workflow-level parameters
1089
+ if secrets:
1090
+ # If we have workflow-level parameters, add secrets to them
1091
+ if workflow_level_params:
1092
+ workflow_level_params.update(secrets)
1093
+
1094
+ # Re-inject workflow parameters with secrets
1095
+ injector = WorkflowParameterInjector(workflow, debug=self.debug)
1096
+ injected_params = injector.transform_workflow_parameters(
1097
+ workflow_level_params
1098
+ )
1099
+
1100
+ # Merge secret-enhanced parameters
1101
+ for node_id, node_params in injected_params.items():
1102
+ if node_id not in result:
1103
+ result[node_id] = {}
1104
+ for param_name, param_value in node_params.items():
1105
+ if param_name not in result[node_id]:
1106
+ result[node_id][param_name] = param_value
1107
+ else:
1108
+ # Create workflow-level parameters from secrets only
1109
+ injector = WorkflowParameterInjector(workflow, debug=self.debug)
1110
+ injected_params = injector.transform_workflow_parameters(
1111
+ secrets
1112
+ )
1113
+
1114
+ # Merge secret parameters
1115
+ for node_id, node_params in injected_params.items():
1116
+ if node_id not in result:
1117
+ result[node_id] = {}
1118
+ for param_name, param_value in node_params.items():
1119
+ if param_name not in result[node_id]:
1120
+ result[node_id][param_name] = param_value
1121
+
1122
+ # Ensure result is not None if we added secrets
1123
+ if result is None:
1124
+ result = {}
1125
+
1060
1126
  return result if result else None
1061
1127
 
1062
1128
  def _separate_parameter_formats(
@@ -0,0 +1,293 @@
1
+ """Runtime secret management interface and providers.
2
+
3
+ This module provides the SecretProvider interface and implementations for
4
+ injecting secrets at runtime, eliminating the need to embed secrets in
5
+ environment variables or workflow parameters.
6
+ """
7
+
8
+ import json
9
+ import logging
10
+ import os
11
+ from abc import ABC, abstractmethod
12
+ from typing import Any, Dict, List, Optional
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+
17
+ class SecretRequirement:
18
+ """Metadata for a required secret."""
19
+
20
+ def __init__(
21
+ self,
22
+ name: str,
23
+ parameter_name: str,
24
+ version: Optional[str] = None,
25
+ optional: bool = False,
26
+ ):
27
+ """Initialize secret requirement.
28
+
29
+ Args:
30
+ name: Secret name in the provider (e.g., "jwt-signing-key")
31
+ parameter_name: Parameter name in the node (e.g., "secret_key")
32
+ version: Optional version identifier
33
+ optional: Whether this secret is optional
34
+ """
35
+ self.name = name
36
+ self.parameter_name = parameter_name
37
+ self.version = version
38
+ self.optional = optional
39
+
40
+
41
+ class SecretProvider(ABC):
42
+ """Base interface for secret providers."""
43
+
44
+ @abstractmethod
45
+ def get_secret(self, name: str, version: Optional[str] = None) -> str:
46
+ """Fetch a secret by name and optional version.
47
+
48
+ Args:
49
+ name: Secret name
50
+ version: Optional version identifier
51
+
52
+ Returns:
53
+ Secret value as string
54
+
55
+ Raises:
56
+ SecretNotFoundError: If secret doesn't exist
57
+ SecretProviderError: If provider operation fails
58
+ """
59
+ pass
60
+
61
+ @abstractmethod
62
+ def list_secrets(self) -> List[str]:
63
+ """List available secrets.
64
+
65
+ Returns:
66
+ List of secret names
67
+ """
68
+ pass
69
+
70
+ def get_secrets(self, requirements: List[SecretRequirement]) -> Dict[str, str]:
71
+ """Fetch multiple secrets based on requirements.
72
+
73
+ Args:
74
+ requirements: List of secret requirements
75
+
76
+ Returns:
77
+ Dictionary mapping parameter names to secret values
78
+ """
79
+ secrets = {}
80
+ for req in requirements:
81
+ try:
82
+ secret_value = self.get_secret(req.name, req.version)
83
+ secrets[req.parameter_name] = secret_value
84
+ except Exception as e:
85
+ if req.optional:
86
+ logger.warning(f"Optional secret {req.name} not found: {e}")
87
+ continue
88
+ else:
89
+ raise
90
+ return secrets
91
+
92
+
93
+ class EnvironmentSecretProvider(SecretProvider):
94
+ """Secret provider that fetches secrets from environment variables.
95
+
96
+ This provider maintains backward compatibility by reading secrets from
97
+ environment variables, but provides a secure interface for runtime injection.
98
+ """
99
+
100
+ def __init__(self, prefix: str = "KAILASH_SECRET_"):
101
+ """Initialize environment secret provider.
102
+
103
+ Args:
104
+ prefix: Prefix for environment variables containing secrets
105
+ """
106
+ self.prefix = prefix
107
+
108
+ def get_secret(self, name: str, version: Optional[str] = None) -> str:
109
+ """Get secret from environment variable.
110
+
111
+ Args:
112
+ name: Secret name (will be prefixed and uppercased)
113
+ version: Ignored for environment provider
114
+
115
+ Returns:
116
+ Secret value from environment
117
+
118
+ Raises:
119
+ SecretNotFoundError: If environment variable not found
120
+ """
121
+ # Convert name to environment variable format
122
+ env_name = f"{self.prefix}{name.upper().replace('-', '_')}"
123
+
124
+ secret_value = os.environ.get(env_name)
125
+ if secret_value is None:
126
+ # Try without prefix for backward compatibility
127
+ secret_value = os.environ.get(name.upper().replace("-", "_"))
128
+
129
+ if secret_value is None:
130
+ raise SecretNotFoundError(
131
+ f"Secret '{name}' not found in environment variables"
132
+ )
133
+
134
+ return secret_value
135
+
136
+ def list_secrets(self) -> List[str]:
137
+ """List all secrets available in environment.
138
+
139
+ Returns:
140
+ List of secret names (without prefix)
141
+ """
142
+ secrets = []
143
+ for key in os.environ:
144
+ if key.startswith(self.prefix):
145
+ # Remove prefix and convert back to secret name format
146
+ secret_name = key[len(self.prefix) :].lower().replace("_", "-")
147
+ secrets.append(secret_name)
148
+ return secrets
149
+
150
+
151
+ class VaultSecretProvider(SecretProvider):
152
+ """Secret provider for HashiCorp Vault.
153
+
154
+ This provider integrates with HashiCorp Vault for enterprise secret management.
155
+ """
156
+
157
+ def __init__(self, vault_url: str, vault_token: str, mount_path: str = "secret"):
158
+ """Initialize Vault secret provider.
159
+
160
+ Args:
161
+ vault_url: Vault server URL
162
+ vault_token: Vault authentication token
163
+ mount_path: Vault mount path for secrets
164
+ """
165
+ self.vault_url = vault_url
166
+ self.vault_token = vault_token
167
+ self.mount_path = mount_path
168
+ self._client = None
169
+
170
+ @property
171
+ def client(self):
172
+ """Lazy initialization of Vault client."""
173
+ if self._client is None:
174
+ try:
175
+ import hvac
176
+
177
+ self._client = hvac.Client(url=self.vault_url, token=self.vault_token)
178
+ except ImportError:
179
+ raise RuntimeError(
180
+ "hvac library not installed. Install with: pip install hvac"
181
+ )
182
+ return self._client
183
+
184
+ def get_secret(self, name: str, version: Optional[str] = None) -> str:
185
+ """Get secret from Vault.
186
+
187
+ Args:
188
+ name: Secret path in Vault
189
+ version: Optional version (for KV v2)
190
+
191
+ Returns:
192
+ Secret value
193
+ """
194
+ try:
195
+ # Try KV v2 first
196
+ response = self.client.secrets.kv.v2.read_secret_version(
197
+ path=name, version=version, mount_point=self.mount_path
198
+ )
199
+ return response["data"]["data"]["value"]
200
+ except Exception:
201
+ # Fall back to KV v1
202
+ response = self.client.secrets.kv.v1.read_secret(
203
+ path=name, mount_point=self.mount_path
204
+ )
205
+ return response["data"]["value"]
206
+
207
+ def list_secrets(self) -> List[str]:
208
+ """List all secrets in Vault.
209
+
210
+ Returns:
211
+ List of secret paths
212
+ """
213
+ try:
214
+ response = self.client.secrets.kv.v2.list_secrets(
215
+ path="", mount_point=self.mount_path
216
+ )
217
+ return response["data"]["keys"]
218
+ except Exception:
219
+ # Fall back to KV v1
220
+ response = self.client.secrets.kv.v1.list_secrets(
221
+ path="", mount_point=self.mount_path
222
+ )
223
+ return response["data"]["keys"]
224
+
225
+
226
+ class AWSSecretProvider(SecretProvider):
227
+ """Secret provider for AWS Secrets Manager.
228
+
229
+ This provider integrates with AWS Secrets Manager for cloud-native secret management.
230
+ """
231
+
232
+ def __init__(self, region_name: str = "us-east-1"):
233
+ """Initialize AWS secret provider.
234
+
235
+ Args:
236
+ region_name: AWS region
237
+ """
238
+ self.region_name = region_name
239
+ self._client = None
240
+
241
+ @property
242
+ def client(self):
243
+ """Lazy initialization of AWS client."""
244
+ if self._client is None:
245
+ try:
246
+ import boto3
247
+
248
+ self._client = boto3.client(
249
+ "secretsmanager", region_name=self.region_name
250
+ )
251
+ except ImportError:
252
+ raise RuntimeError(
253
+ "boto3 library not installed. Install with: pip install boto3"
254
+ )
255
+ return self._client
256
+
257
+ def get_secret(self, name: str, version: Optional[str] = None) -> str:
258
+ """Get secret from AWS Secrets Manager.
259
+
260
+ Args:
261
+ name: Secret name in AWS
262
+ version: Optional version ID
263
+
264
+ Returns:
265
+ Secret value
266
+ """
267
+ kwargs = {"SecretId": name}
268
+ if version:
269
+ kwargs["VersionId"] = version
270
+
271
+ response = self.client.get_secret_value(**kwargs)
272
+ return response["SecretString"]
273
+
274
+ def list_secrets(self) -> List[str]:
275
+ """List all secrets in AWS Secrets Manager.
276
+
277
+ Returns:
278
+ List of secret names
279
+ """
280
+ response = self.client.list_secrets()
281
+ return [secret["Name"] for secret in response["SecretList"]]
282
+
283
+
284
+ class SecretNotFoundError(Exception):
285
+ """Raised when a secret cannot be found."""
286
+
287
+ pass
288
+
289
+
290
+ class SecretProviderError(Exception):
291
+ """Raised when a secret provider operation fails."""
292
+
293
+ pass
@@ -380,18 +380,89 @@ class WorkflowBuilder:
380
380
  WorkflowValidationError: If nodes don't exist
381
381
  ConnectionError: If connection is invalid
382
382
  """
383
+ # Enhanced error messages with helpful suggestions
383
384
  if from_node not in self.nodes:
384
- raise WorkflowValidationError(
385
- f"Source node '{from_node}' not found in workflow"
386
- )
385
+ available_nodes = list(self.nodes.keys())
386
+ similar_nodes = [
387
+ n
388
+ for n in available_nodes
389
+ if from_node.lower() in n.lower() or n.lower() in from_node.lower()
390
+ ]
391
+
392
+ error_msg = f"Source node '{from_node}' not found in workflow."
393
+ if available_nodes:
394
+ error_msg += f"\nAvailable nodes: {available_nodes}"
395
+ if similar_nodes:
396
+ error_msg += f"\nDid you mean: {similar_nodes}?"
397
+ error_msg += "\n\nTip: Use workflow.add_node() to create nodes before connecting them."
398
+ error_msg += f"\nExample: workflow.add_node('CSVReaderNode', '{from_node}', {{'file_path': 'data.csv'}})"
399
+
400
+ raise WorkflowValidationError(error_msg)
401
+
387
402
  if to_node not in self.nodes:
388
- raise WorkflowValidationError(
389
- f"Target node '{to_node}' not found in workflow"
403
+ available_nodes = list(self.nodes.keys())
404
+ similar_nodes = [
405
+ n
406
+ for n in available_nodes
407
+ if to_node.lower() in n.lower() or n.lower() in to_node.lower()
408
+ ]
409
+
410
+ error_msg = f"Target node '{to_node}' not found in workflow."
411
+ if available_nodes:
412
+ error_msg += f"\nAvailable nodes: {available_nodes}"
413
+ if similar_nodes:
414
+ error_msg += f"\nDid you mean: {similar_nodes}?"
415
+ error_msg += "\n\nTip: Use workflow.add_node() to create nodes before connecting them."
416
+ error_msg += f"\nExample: workflow.add_node('PythonCodeNode', '{to_node}', {{'code': 'result = data'}})"
417
+
418
+ raise WorkflowValidationError(error_msg)
419
+
420
+ # Self-connection check with helpful message
421
+ if from_node == to_node:
422
+ raise ConnectionError(
423
+ f"Cannot connect node '{from_node}' to itself.\n"
424
+ f"Tip: Consider using intermediate nodes or different port names.\n"
425
+ f"Example: Create a separate processing node between input and output."
390
426
  )
391
427
 
392
- # Self-connection check
393
- if from_node == to_node:
394
- raise ConnectionError(f"Cannot connect node '{from_node}' to itself")
428
+ # REFINED: Enhanced duplicate connection detection
429
+ for existing_conn in self.connections:
430
+ if (
431
+ existing_conn["from_node"] == from_node
432
+ and existing_conn["from_output"] == from_output
433
+ and existing_conn["to_node"] == to_node
434
+ and existing_conn["to_input"] == to_input
435
+ ):
436
+ raise ConnectionError(
437
+ f"Duplicate connection detected: {from_node}.{from_output} -> {to_node}.{to_input}\n"
438
+ f"This connection already exists in the workflow.\n"
439
+ f"Tip: Remove the duplicate add_connection() call or use different port names.\n"
440
+ f"Current connections: {len(self.connections)} total"
441
+ )
442
+
443
+ # Enhanced port validation with suggestions
444
+ common_output_ports = [
445
+ "data",
446
+ "result",
447
+ "output",
448
+ "response",
449
+ "content",
450
+ "value",
451
+ ]
452
+ common_input_ports = ["data", "input", "input_data", "content", "value"]
453
+
454
+ # Log port usage patterns for debugging
455
+ if from_output not in common_output_ports:
456
+ logger.debug(
457
+ f"Using non-standard output port '{from_output}' on node '{from_node}'"
458
+ )
459
+ logger.debug(f"Common output ports: {common_output_ports}")
460
+
461
+ if to_input not in common_input_ports:
462
+ logger.debug(
463
+ f"Using non-standard input port '{to_input}' on node '{to_node}'"
464
+ )
465
+ logger.debug(f"Common input ports: {common_input_ports}")
395
466
 
396
467
  # Add connection to list
397
468
  connection = {
@@ -402,7 +473,15 @@ class WorkflowBuilder:
402
473
  }
403
474
  self.connections.append(connection)
404
475
 
405
- logger.info(f"Connected '{from_node}.{from_output}' to '{to_node}.{to_input}'")
476
+ logger.info(f"Connected '{from_node}.{from_output}' -> '{to_node}.{to_input}'")
477
+
478
+ # Provide helpful tips for common connection patterns
479
+ if from_output == to_input == "data":
480
+ logger.debug("Using standard data flow connection pattern")
481
+ elif from_output in ["result", "output"] and to_input in ["data", "input"]:
482
+ logger.debug("Using result-to-input connection pattern")
483
+ else:
484
+ logger.debug(f"Using custom port mapping: {from_output} -> {to_input}")
406
485
  return self
407
486
 
408
487
  def connect(