kailash 0.1.5__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. kailash/__init__.py +1 -1
  2. kailash/access_control.py +740 -0
  3. kailash/api/__main__.py +6 -0
  4. kailash/api/auth.py +668 -0
  5. kailash/api/custom_nodes.py +285 -0
  6. kailash/api/custom_nodes_secure.py +377 -0
  7. kailash/api/database.py +620 -0
  8. kailash/api/studio.py +915 -0
  9. kailash/api/studio_secure.py +893 -0
  10. kailash/mcp/__init__.py +53 -0
  11. kailash/mcp/__main__.py +13 -0
  12. kailash/mcp/ai_registry_server.py +712 -0
  13. kailash/mcp/client.py +447 -0
  14. kailash/mcp/client_new.py +334 -0
  15. kailash/mcp/server.py +293 -0
  16. kailash/mcp/server_new.py +336 -0
  17. kailash/mcp/servers/__init__.py +12 -0
  18. kailash/mcp/servers/ai_registry.py +289 -0
  19. kailash/nodes/__init__.py +4 -2
  20. kailash/nodes/ai/__init__.py +2 -0
  21. kailash/nodes/ai/a2a.py +714 -67
  22. kailash/nodes/ai/intelligent_agent_orchestrator.py +31 -37
  23. kailash/nodes/ai/iterative_llm_agent.py +1280 -0
  24. kailash/nodes/ai/llm_agent.py +324 -1
  25. kailash/nodes/ai/self_organizing.py +5 -6
  26. kailash/nodes/base.py +15 -2
  27. kailash/nodes/base_async.py +45 -0
  28. kailash/nodes/base_cycle_aware.py +374 -0
  29. kailash/nodes/base_with_acl.py +338 -0
  30. kailash/nodes/code/python.py +135 -27
  31. kailash/nodes/data/readers.py +16 -6
  32. kailash/nodes/data/writers.py +16 -6
  33. kailash/nodes/logic/__init__.py +8 -0
  34. kailash/nodes/logic/convergence.py +642 -0
  35. kailash/nodes/logic/loop.py +153 -0
  36. kailash/nodes/logic/operations.py +187 -27
  37. kailash/nodes/mixins/__init__.py +11 -0
  38. kailash/nodes/mixins/mcp.py +228 -0
  39. kailash/nodes/mixins.py +387 -0
  40. kailash/runtime/__init__.py +2 -1
  41. kailash/runtime/access_controlled.py +458 -0
  42. kailash/runtime/local.py +106 -33
  43. kailash/runtime/parallel_cyclic.py +529 -0
  44. kailash/sdk_exceptions.py +90 -5
  45. kailash/security.py +845 -0
  46. kailash/tracking/manager.py +38 -15
  47. kailash/tracking/models.py +1 -1
  48. kailash/tracking/storage/filesystem.py +30 -2
  49. kailash/utils/__init__.py +8 -0
  50. kailash/workflow/__init__.py +18 -0
  51. kailash/workflow/convergence.py +270 -0
  52. kailash/workflow/cycle_analyzer.py +768 -0
  53. kailash/workflow/cycle_builder.py +573 -0
  54. kailash/workflow/cycle_config.py +709 -0
  55. kailash/workflow/cycle_debugger.py +760 -0
  56. kailash/workflow/cycle_exceptions.py +601 -0
  57. kailash/workflow/cycle_profiler.py +671 -0
  58. kailash/workflow/cycle_state.py +338 -0
  59. kailash/workflow/cyclic_runner.py +985 -0
  60. kailash/workflow/graph.py +500 -39
  61. kailash/workflow/migration.py +768 -0
  62. kailash/workflow/safety.py +365 -0
  63. kailash/workflow/templates.py +744 -0
  64. kailash/workflow/validation.py +693 -0
  65. {kailash-0.1.5.dist-info → kailash-0.2.0.dist-info}/METADATA +256 -12
  66. kailash-0.2.0.dist-info/RECORD +125 -0
  67. kailash/nodes/mcp/__init__.py +0 -11
  68. kailash/nodes/mcp/client.py +0 -554
  69. kailash/nodes/mcp/resource.py +0 -682
  70. kailash/nodes/mcp/server.py +0 -577
  71. kailash-0.1.5.dist-info/RECORD +0 -88
  72. {kailash-0.1.5.dist-info → kailash-0.2.0.dist-info}/WHEEL +0 -0
  73. {kailash-0.1.5.dist-info → kailash-0.2.0.dist-info}/entry_points.txt +0 -0
  74. {kailash-0.1.5.dist-info → kailash-0.2.0.dist-info}/licenses/LICENSE +0 -0
  75. {kailash-0.1.5.dist-info → kailash-0.2.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,387 @@
1
+ """
2
+ Node mixins for the Kailash SDK.
3
+
4
+ This module provides mixins that add common functionality to nodes,
5
+ including security features, validation, and utility methods.
6
+
7
+ Design Philosophy:
8
+ - Composition over inheritance for optional features
9
+ - Security by default
10
+ - Minimal performance overhead
11
+ - Easy to integrate with existing nodes
12
+ """
13
+
14
+ import logging
15
+ from typing import Any, Dict, Optional
16
+
17
+ from kailash.security import (
18
+ SecurityConfig,
19
+ SecurityError,
20
+ get_security_config,
21
+ sanitize_input,
22
+ validate_node_parameters,
23
+ )
24
+
25
+ logger = logging.getLogger(__name__)
26
+
27
+
28
+ class SecurityMixin:
29
+ """
30
+ Mixin that adds security features to nodes.
31
+
32
+ This mixin provides:
33
+ - Input parameter validation and sanitization
34
+ - Security policy enforcement
35
+ - Audit logging for security events
36
+ - Protection against common attack vectors
37
+
38
+ Usage:
39
+ class MySecureNode(SecurityMixin, Node):
40
+ def run(self, **kwargs):
41
+ # Input is automatically sanitized
42
+ safe_params = self.validate_and_sanitize_inputs(kwargs)
43
+ return self.process_safely(safe_params)
44
+ """
45
+
46
+ def __init__(
47
+ self, *args, security_config: Optional[SecurityConfig] = None, **kwargs
48
+ ):
49
+ """
50
+ Initialize security mixin.
51
+
52
+ Args:
53
+ security_config: Security configuration to use
54
+ *args: Arguments passed to parent class
55
+ **kwargs: Keyword arguments passed to parent class
56
+ """
57
+ super().__init__(*args, **kwargs)
58
+ self.security_config = security_config or get_security_config()
59
+
60
+ if self.security_config.enable_audit_logging:
61
+ logger.info(f"Security mixin initialized for {self.__class__.__name__}")
62
+
63
+ def validate_and_sanitize_inputs(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
64
+ """
65
+ Validate and sanitize input parameters.
66
+
67
+ Args:
68
+ inputs: Dictionary of input parameters
69
+
70
+ Returns:
71
+ Dictionary of validated and sanitized parameters
72
+
73
+ Raises:
74
+ SecurityError: If validation fails
75
+ """
76
+ try:
77
+ # First validate using the security framework
78
+ validated_inputs = validate_node_parameters(inputs, self.security_config)
79
+
80
+ if self.security_config.enable_audit_logging:
81
+ logger.debug(
82
+ f"Inputs validated for {self.__class__.__name__}: {list(validated_inputs.keys())}"
83
+ )
84
+
85
+ return validated_inputs
86
+
87
+ except SecurityError as e:
88
+ if self.security_config.enable_audit_logging:
89
+ logger.error(
90
+ f"Security validation failed for {self.__class__.__name__}: {e}"
91
+ )
92
+ raise
93
+ except Exception as e:
94
+ if self.security_config.enable_audit_logging:
95
+ logger.error(
96
+ f"Unexpected validation error for {self.__class__.__name__}: {e}"
97
+ )
98
+ raise SecurityError(f"Input validation failed: {e}")
99
+
100
+ def sanitize_single_input(self, value: Any, max_length: int = 10000) -> Any:
101
+ """
102
+ Sanitize a single input value.
103
+
104
+ Args:
105
+ value: Value to sanitize
106
+ max_length: Maximum string length
107
+
108
+ Returns:
109
+ Sanitized value
110
+ """
111
+ return sanitize_input(value, max_length, config=self.security_config)
112
+
113
+ def is_security_enabled(self) -> bool:
114
+ """Check if security features are enabled."""
115
+ return (
116
+ self.security_config.enable_path_validation
117
+ or self.security_config.enable_command_validation
118
+ or hasattr(self, "security_config")
119
+ )
120
+
121
+ def log_security_event(self, event: str, level: str = "INFO") -> None:
122
+ """
123
+ Log a security-related event.
124
+
125
+ Args:
126
+ event: Description of the security event
127
+ level: Log level (INFO, WARNING, ERROR)
128
+ """
129
+ if not self.security_config.enable_audit_logging:
130
+ return
131
+
132
+ log_msg = f"Security event in {self.__class__.__name__}: {event}"
133
+
134
+ if level.upper() == "ERROR":
135
+ logger.error(log_msg)
136
+ elif level.upper() == "WARNING":
137
+ logger.warning(log_msg)
138
+ else:
139
+ logger.info(log_msg)
140
+
141
+
142
+ class ValidationMixin:
143
+ """
144
+ Mixin that adds enhanced input validation to nodes.
145
+
146
+ This mixin provides:
147
+ - Type checking and conversion
148
+ - Range and constraint validation
149
+ - Custom validation rules
150
+ - Detailed error reporting
151
+ """
152
+
153
+ def validate_required_params(
154
+ self, inputs: Dict[str, Any], required_params: list
155
+ ) -> None:
156
+ """
157
+ Validate that all required parameters are present.
158
+
159
+ Args:
160
+ inputs: Input parameters
161
+ required_params: List of required parameter names
162
+
163
+ Raises:
164
+ ValueError: If required parameters are missing
165
+ """
166
+ missing_params = [param for param in required_params if param not in inputs]
167
+ if missing_params:
168
+ raise ValueError(f"Missing required parameters: {missing_params}")
169
+
170
+ def validate_param_types(
171
+ self, inputs: Dict[str, Any], type_mapping: Dict[str, type]
172
+ ) -> Dict[str, Any]:
173
+ """
174
+ Validate and convert parameter types.
175
+
176
+ Args:
177
+ inputs: Input parameters
178
+ type_mapping: Dictionary mapping parameter names to expected types
179
+
180
+ Returns:
181
+ Dictionary with converted types
182
+
183
+ Raises:
184
+ TypeError: If type conversion fails
185
+ """
186
+ converted = {}
187
+
188
+ for param_name, value in inputs.items():
189
+ if param_name in type_mapping:
190
+ expected_type = type_mapping[param_name]
191
+ try:
192
+ if isinstance(value, expected_type):
193
+ converted[param_name] = value
194
+ else:
195
+ converted[param_name] = expected_type(value)
196
+ except (ValueError, TypeError) as e:
197
+ raise TypeError(
198
+ f"Cannot convert {param_name} to {expected_type.__name__}: {e}"
199
+ )
200
+ else:
201
+ converted[param_name] = value
202
+
203
+ return converted
204
+
205
+ def validate_param_ranges(
206
+ self, inputs: Dict[str, Any], range_mapping: Dict[str, tuple]
207
+ ) -> None:
208
+ """
209
+ Validate that numeric parameters are within acceptable ranges.
210
+
211
+ Args:
212
+ inputs: Input parameters
213
+ range_mapping: Dictionary mapping parameter names to (min, max) tuples
214
+
215
+ Raises:
216
+ ValueError: If parameters are out of range
217
+ """
218
+ for param_name, (min_val, max_val) in range_mapping.items():
219
+ if param_name in inputs:
220
+ value = inputs[param_name]
221
+ if isinstance(value, (int, float)):
222
+ if value < min_val or value > max_val:
223
+ raise ValueError(
224
+ f"{param_name} must be between {min_val} and {max_val}, got {value}"
225
+ )
226
+
227
+
228
+ class PerformanceMixin:
229
+ """
230
+ Mixin that adds performance monitoring to nodes.
231
+
232
+ This mixin provides:
233
+ - Execution time tracking
234
+ - Memory usage monitoring
235
+ - Performance metrics collection
236
+ - Optimization hints
237
+ """
238
+
239
+ def __init__(self, *args, **kwargs):
240
+ """Initialize performance mixin."""
241
+ super().__init__(*args, **kwargs)
242
+ self.execution_times = []
243
+ self.memory_usage = []
244
+ self.performance_enabled = True
245
+
246
+ def track_performance(self, func):
247
+ """
248
+ Decorator to track performance of node methods.
249
+
250
+ Args:
251
+ func: Function to wrap
252
+
253
+ Returns:
254
+ Wrapped function with performance tracking
255
+ """
256
+ import time
257
+ import tracemalloc
258
+ from functools import wraps
259
+
260
+ @wraps(func)
261
+ def wrapper(*args, **kwargs):
262
+ if not self.performance_enabled:
263
+ return func(*args, **kwargs)
264
+
265
+ # Start tracking
266
+ start_time = time.time()
267
+ tracemalloc.start()
268
+
269
+ try:
270
+ result = func(*args, **kwargs)
271
+ return result
272
+ finally:
273
+ # Record metrics
274
+ execution_time = time.time() - start_time
275
+ current, peak = tracemalloc.get_traced_memory()
276
+ tracemalloc.stop()
277
+
278
+ self.execution_times.append(execution_time)
279
+ self.memory_usage.append(peak)
280
+
281
+ if len(self.execution_times) > 100: # Keep last 100 measurements
282
+ self.execution_times = self.execution_times[-100:]
283
+ self.memory_usage = self.memory_usage[-100:]
284
+
285
+ return wrapper
286
+
287
+ def get_performance_stats(self) -> Dict[str, Any]:
288
+ """
289
+ Get performance statistics for this node.
290
+
291
+ Returns:
292
+ Dictionary containing performance metrics
293
+ """
294
+ if not self.execution_times:
295
+ return {"status": "No performance data available"}
296
+
297
+ import statistics
298
+
299
+ return {
300
+ "executions": len(self.execution_times),
301
+ "avg_execution_time": statistics.mean(self.execution_times),
302
+ "min_execution_time": min(self.execution_times),
303
+ "max_execution_time": max(self.execution_times),
304
+ "avg_memory_usage": (
305
+ statistics.mean(self.memory_usage) if self.memory_usage else 0
306
+ ),
307
+ "peak_memory_usage": max(self.memory_usage) if self.memory_usage else 0,
308
+ }
309
+
310
+ def reset_performance_stats(self) -> None:
311
+ """Reset performance statistics."""
312
+ self.execution_times.clear()
313
+ self.memory_usage.clear()
314
+
315
+
316
+ class LoggingMixin:
317
+ """
318
+ Mixin that adds enhanced logging capabilities to nodes.
319
+
320
+ This mixin provides:
321
+ - Structured logging with context
322
+ - Log level management
323
+ - Performance logging
324
+ - Debug information
325
+ """
326
+
327
+ def __init__(self, *args, log_level: str = "INFO", **kwargs):
328
+ """
329
+ Initialize logging mixin.
330
+
331
+ Args:
332
+ log_level: Default log level for this node
333
+ *args: Arguments passed to parent class
334
+ **kwargs: Keyword arguments passed to parent class
335
+ """
336
+ super().__init__(*args, **kwargs)
337
+ self.logger = logging.getLogger(
338
+ f"{self.__class__.__module__}.{self.__class__.__name__}"
339
+ )
340
+ self.logger.setLevel(getattr(logging, log_level.upper()))
341
+ self.log_context = {"node_class": self.__class__.__name__}
342
+
343
+ def log_with_context(self, level: str, message: str, **context) -> None:
344
+ """
345
+ Log a message with additional context.
346
+
347
+ Args:
348
+ level: Log level
349
+ message: Log message
350
+ **context: Additional context to include
351
+ """
352
+ full_context = {**self.log_context, **context}
353
+ context_str = " | ".join(f"{k}={v}" for k, v in full_context.items())
354
+ full_message = f"{message} | {context_str}"
355
+
356
+ log_func = getattr(self.logger, level.lower())
357
+ log_func(full_message)
358
+
359
+ def log_node_execution(self, operation: str, **context) -> None:
360
+ """
361
+ Log node execution information.
362
+
363
+ Args:
364
+ operation: Type of operation being performed
365
+ **context: Additional context
366
+ """
367
+ self.log_with_context("INFO", f"Node operation: {operation}", **context)
368
+
369
+ def log_error_with_traceback(
370
+ self, error: Exception, operation: str = "unknown"
371
+ ) -> None:
372
+ """
373
+ Log an error with full traceback information.
374
+
375
+ Args:
376
+ error: Exception that occurred
377
+ operation: Operation that failed
378
+ """
379
+ import traceback
380
+
381
+ self.log_with_context(
382
+ "ERROR",
383
+ f"Operation failed: {operation}",
384
+ error_type=type(error).__name__,
385
+ error_message=str(error),
386
+ traceback=traceback.format_exc(),
387
+ )
@@ -1,6 +1,7 @@
1
1
  """Runtime engines for the Kailash SDK."""
2
2
 
3
3
  from kailash.runtime.local import LocalRuntime
4
+ from kailash.runtime.parallel_cyclic import ParallelCyclicRuntime
4
5
  from kailash.runtime.runner import WorkflowRunner
5
6
 
6
- __all__ = ["LocalRuntime", "WorkflowRunner"]
7
+ __all__ = ["LocalRuntime", "ParallelCyclicRuntime", "WorkflowRunner"]