kailash 0.1.5__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. kailash/__init__.py +1 -1
  2. kailash/access_control.py +740 -0
  3. kailash/api/__main__.py +6 -0
  4. kailash/api/auth.py +668 -0
  5. kailash/api/custom_nodes.py +285 -0
  6. kailash/api/custom_nodes_secure.py +377 -0
  7. kailash/api/database.py +620 -0
  8. kailash/api/studio.py +915 -0
  9. kailash/api/studio_secure.py +893 -0
  10. kailash/mcp/__init__.py +53 -0
  11. kailash/mcp/__main__.py +13 -0
  12. kailash/mcp/ai_registry_server.py +712 -0
  13. kailash/mcp/client.py +447 -0
  14. kailash/mcp/client_new.py +334 -0
  15. kailash/mcp/server.py +293 -0
  16. kailash/mcp/server_new.py +336 -0
  17. kailash/mcp/servers/__init__.py +12 -0
  18. kailash/mcp/servers/ai_registry.py +289 -0
  19. kailash/nodes/__init__.py +4 -2
  20. kailash/nodes/ai/__init__.py +2 -0
  21. kailash/nodes/ai/a2a.py +714 -67
  22. kailash/nodes/ai/intelligent_agent_orchestrator.py +31 -37
  23. kailash/nodes/ai/iterative_llm_agent.py +1280 -0
  24. kailash/nodes/ai/llm_agent.py +324 -1
  25. kailash/nodes/ai/self_organizing.py +5 -6
  26. kailash/nodes/base.py +15 -2
  27. kailash/nodes/base_async.py +45 -0
  28. kailash/nodes/base_cycle_aware.py +374 -0
  29. kailash/nodes/base_with_acl.py +338 -0
  30. kailash/nodes/code/python.py +135 -27
  31. kailash/nodes/data/readers.py +16 -6
  32. kailash/nodes/data/writers.py +16 -6
  33. kailash/nodes/logic/__init__.py +8 -0
  34. kailash/nodes/logic/convergence.py +642 -0
  35. kailash/nodes/logic/loop.py +153 -0
  36. kailash/nodes/logic/operations.py +187 -27
  37. kailash/nodes/mixins/__init__.py +11 -0
  38. kailash/nodes/mixins/mcp.py +228 -0
  39. kailash/nodes/mixins.py +387 -0
  40. kailash/runtime/__init__.py +2 -1
  41. kailash/runtime/access_controlled.py +458 -0
  42. kailash/runtime/local.py +106 -33
  43. kailash/runtime/parallel_cyclic.py +529 -0
  44. kailash/sdk_exceptions.py +90 -5
  45. kailash/security.py +845 -0
  46. kailash/tracking/manager.py +38 -15
  47. kailash/tracking/models.py +1 -1
  48. kailash/tracking/storage/filesystem.py +30 -2
  49. kailash/utils/__init__.py +8 -0
  50. kailash/workflow/__init__.py +18 -0
  51. kailash/workflow/convergence.py +270 -0
  52. kailash/workflow/cycle_analyzer.py +768 -0
  53. kailash/workflow/cycle_builder.py +573 -0
  54. kailash/workflow/cycle_config.py +709 -0
  55. kailash/workflow/cycle_debugger.py +760 -0
  56. kailash/workflow/cycle_exceptions.py +601 -0
  57. kailash/workflow/cycle_profiler.py +671 -0
  58. kailash/workflow/cycle_state.py +338 -0
  59. kailash/workflow/cyclic_runner.py +985 -0
  60. kailash/workflow/graph.py +500 -39
  61. kailash/workflow/migration.py +768 -0
  62. kailash/workflow/safety.py +365 -0
  63. kailash/workflow/templates.py +744 -0
  64. kailash/workflow/validation.py +693 -0
  65. {kailash-0.1.5.dist-info → kailash-0.2.0.dist-info}/METADATA +256 -12
  66. kailash-0.2.0.dist-info/RECORD +125 -0
  67. kailash/nodes/mcp/__init__.py +0 -11
  68. kailash/nodes/mcp/client.py +0 -554
  69. kailash/nodes/mcp/resource.py +0 -682
  70. kailash/nodes/mcp/server.py +0 -577
  71. kailash-0.1.5.dist-info/RECORD +0 -88
  72. {kailash-0.1.5.dist-info → kailash-0.2.0.dist-info}/WHEEL +0 -0
  73. {kailash-0.1.5.dist-info → kailash-0.2.0.dist-info}/entry_points.txt +0 -0
  74. {kailash-0.1.5.dist-info → kailash-0.2.0.dist-info}/licenses/LICENSE +0 -0
  75. {kailash-0.1.5.dist-info → kailash-0.2.0.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  """Task manager for workflow execution tracking."""
2
2
 
3
3
  import logging
4
- from datetime import datetime, timedelta
4
+ from datetime import datetime, timedelta, timezone
5
5
  from typing import Any, Dict, List, Optional
6
6
 
7
7
  from kailash.sdk_exceptions import StorageException, TaskException, TaskStateError
@@ -678,11 +678,28 @@ class TaskManager:
678
678
  )
679
679
  else:
680
680
  # Fallback for MockStorage
681
- return [
682
- t
683
- for t in self.storage.get_all_tasks()
684
- if t.created_at >= start_time and t.created_at <= end_time
685
- ]
681
+ tasks = []
682
+ for t in self.storage.get_all_tasks():
683
+ # Ensure timezone-aware comparison
684
+ task_created_at = t.created_at
685
+ if task_created_at and task_created_at.tzinfo is None:
686
+ task_created_at = task_created_at.replace(tzinfo=timezone.utc)
687
+
688
+ start_aware = start_time
689
+ if start_aware.tzinfo is None:
690
+ start_aware = start_aware.replace(tzinfo=timezone.utc)
691
+
692
+ end_aware = end_time
693
+ if end_aware.tzinfo is None:
694
+ end_aware = end_aware.replace(tzinfo=timezone.utc)
695
+
696
+ if (
697
+ task_created_at
698
+ and task_created_at >= start_aware
699
+ and task_created_at <= end_aware
700
+ ):
701
+ tasks.append(t)
702
+ return tasks
686
703
  except Exception as e:
687
704
  raise StorageException(f"Failed to query tasks by timerange: {e}") from e
688
705
 
@@ -734,18 +751,24 @@ class TaskManager:
734
751
  except Exception as e:
735
752
  raise StorageException(f"Failed to get tasks for cleanup: {e}") from e
736
753
 
737
- cutoff = datetime.now() - timedelta(days=days)
754
+ cutoff = datetime.now(timezone.utc) - timedelta(days=days)
738
755
  deleted = 0
739
756
 
740
757
  for task in tasks:
741
- if task.created_at and task.created_at < cutoff:
742
- try:
743
- self.delete_task(task.task_id)
744
- deleted += 1
745
- except Exception as e:
746
- self.logger.warning(
747
- f"Failed to delete old task {task.task_id}: {e}"
748
- )
758
+ if task.created_at:
759
+ # Ensure timezone-aware comparison
760
+ task_created_at = task.created_at
761
+ if task_created_at.tzinfo is None:
762
+ task_created_at = task_created_at.replace(tzinfo=timezone.utc)
763
+
764
+ if task_created_at < cutoff:
765
+ try:
766
+ self.delete_task(task.task_id)
767
+ deleted += 1
768
+ except Exception as e:
769
+ self.logger.warning(
770
+ f"Failed to delete old task {task.task_id}: {e}"
771
+ )
749
772
 
750
773
  return deleted
751
774
 
@@ -91,7 +91,7 @@ class TaskRun(BaseModel):
91
91
  completed_at: Optional[datetime] = (
92
92
  None # Alias for ended_at for backward compatibility
93
93
  )
94
- created_at: datetime = Field(default_factory=datetime.utcnow)
94
+ created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
95
95
  result: Optional[Dict[str, Any]] = None
96
96
  error: Optional[str] = None
97
97
  metadata: Dict[str, Any] = Field(default_factory=dict)
@@ -83,7 +83,22 @@ class FileSystemStorage(StorageBackend):
83
83
  continue
84
84
 
85
85
  # Sort by started_at (newest first)
86
- runs.sort(key=lambda r: r.started_at, reverse=True)
86
+ # Handle potential timezone-naive datetime objects
87
+ def safe_datetime_key(run):
88
+ if run.started_at:
89
+ # Ensure datetime is timezone-aware
90
+ if run.started_at.tzinfo is None:
91
+ # Assume UTC for naive datetimes
92
+ from datetime import timezone
93
+
94
+ return run.started_at.replace(tzinfo=timezone.utc)
95
+ return run.started_at
96
+ # Return a very old date for runs without started_at
97
+ from datetime import datetime, timezone
98
+
99
+ return datetime.min.replace(tzinfo=timezone.utc)
100
+
101
+ runs.sort(key=safe_datetime_key, reverse=True)
87
102
  return runs
88
103
 
89
104
  def save_task(self, task: TaskRun) -> None:
@@ -225,7 +240,20 @@ class FileSystemStorage(StorageBackend):
225
240
  continue
226
241
 
227
242
  # Sort by started_at
228
- tasks.sort(key=lambda t: t.started_at or t.task_id)
243
+ # Handle potential timezone-naive datetime objects
244
+ def safe_task_sort_key(task):
245
+ if task.started_at:
246
+ # Ensure datetime is timezone-aware
247
+ if task.started_at.tzinfo is None:
248
+ # Assume UTC for naive datetimes
249
+ from datetime import timezone
250
+
251
+ return task.started_at.replace(tzinfo=timezone.utc)
252
+ return task.started_at
253
+ # Use task_id as fallback for tasks without started_at
254
+ return task.task_id
255
+
256
+ tasks.sort(key=safe_task_sort_key)
229
257
  return tasks
230
258
 
231
259
  def clear(self) -> None:
kailash/utils/__init__.py CHANGED
@@ -0,0 +1,8 @@
1
+ """Internal utilities for the Kailash SDK.
2
+
3
+ This package contains internal implementation utilities that are not part of the public API.
4
+ Users should not directly import from this package.
5
+ """
6
+
7
+ # Internal utilities - not part of public API
8
+ __all__ = []
@@ -1,6 +1,15 @@
1
1
  """Workflow system for the Kailash SDK."""
2
2
 
3
3
  from kailash.workflow.builder import WorkflowBuilder
4
+ from kailash.workflow.cycle_analyzer import CycleAnalyzer
5
+ from kailash.workflow.cycle_builder import CycleBuilder
6
+ from kailash.workflow.cycle_config import CycleConfig, CycleTemplates
7
+ from kailash.workflow.cycle_debugger import (
8
+ CycleDebugger,
9
+ CycleExecutionTrace,
10
+ CycleIteration,
11
+ )
12
+ from kailash.workflow.cycle_profiler import CycleProfiler, PerformanceMetrics
4
13
  from kailash.workflow.graph import Connection, NodeInstance, Workflow
5
14
  from kailash.workflow.mermaid_visualizer import MermaidVisualizer
6
15
  from kailash.workflow.visualization import WorkflowVisualizer
@@ -12,4 +21,13 @@ __all__ = [
12
21
  "WorkflowVisualizer",
13
22
  "MermaidVisualizer",
14
23
  "WorkflowBuilder",
24
+ "CycleBuilder",
25
+ "CycleConfig",
26
+ "CycleTemplates",
27
+ "CycleDebugger",
28
+ "CycleExecutionTrace",
29
+ "CycleIteration",
30
+ "CycleProfiler",
31
+ "PerformanceMetrics",
32
+ "CycleAnalyzer",
15
33
  ]
@@ -0,0 +1,270 @@
1
+ """Convergence condition system for cycle termination in workflows."""
2
+
3
+ import logging
4
+ from abc import ABC, abstractmethod
5
+ from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Union
6
+
7
+ if TYPE_CHECKING:
8
+ from kailash.workflow.cycle_state import CycleState
9
+
10
+ logger = logging.getLogger(__name__)
11
+
12
+
13
+ class ConvergenceCondition(ABC):
14
+ """Base class for cycle convergence conditions."""
15
+
16
+ @abstractmethod
17
+ def evaluate(self, results: Dict[str, Any], cycle_state: "CycleState") -> bool:
18
+ """Evaluate if cycle should terminate.
19
+
20
+ Args:
21
+ results: Current iteration results from nodes
22
+ cycle_state: Current cycle state with history
23
+
24
+ Returns:
25
+ True if cycle should terminate, False to continue
26
+ """
27
+ raise NotImplementedError
28
+
29
+ def describe(self) -> str:
30
+ """Describe the convergence condition for logging."""
31
+ return self.__class__.__name__
32
+
33
+
34
+ class ExpressionCondition(ConvergenceCondition):
35
+ """Expression-based convergence condition.
36
+
37
+ Examples:
38
+ - "quality_score > 0.9"
39
+ - "iteration >= 10"
40
+ - "abs(loss_improvement) < 0.001"
41
+ """
42
+
43
+ def __init__(self, expression: str):
44
+ """Initialize with expression string.
45
+
46
+ Args:
47
+ expression: Python expression to evaluate
48
+ """
49
+ self.expression = expression
50
+
51
+ def evaluate(self, results: Dict[str, Any], cycle_state: "CycleState") -> bool:
52
+ """Evaluate expression with results and cycle state context."""
53
+ # Create evaluation context
54
+ context = {
55
+ "results": results,
56
+ "iteration": cycle_state.iteration,
57
+ "history": cycle_state.history,
58
+ "elapsed_time": cycle_state.elapsed_time,
59
+ # Add common math functions
60
+ "abs": abs,
61
+ "min": min,
62
+ "max": max,
63
+ "sum": sum,
64
+ "len": len,
65
+ "all": all,
66
+ "any": any,
67
+ }
68
+
69
+ # Add all result values to top-level context for easier access
70
+ for node_id, node_result in results.items():
71
+ if isinstance(node_id, str) and node_id.isidentifier():
72
+ context[node_id] = node_result
73
+
74
+ # Also extract scalar values from node results for convenience
75
+ if isinstance(node_result, dict):
76
+ # Check if this is a PythonCodeNode result with 'result' key
77
+ if "result" in node_result and isinstance(
78
+ node_result["result"], dict
79
+ ):
80
+ # Extract from nested result
81
+ for key, value in node_result["result"].items():
82
+ if isinstance(key, str) and key.isidentifier():
83
+ if isinstance(value, (int, float, str, bool)):
84
+ context[key] = value
85
+ else:
86
+ # Extract from top level
87
+ for key, value in node_result.items():
88
+ if isinstance(key, str) and key.isidentifier():
89
+ # Only add scalar values to avoid conflicts
90
+ if isinstance(value, (int, float, str, bool)):
91
+ context[key] = value
92
+
93
+ try:
94
+ # Safe evaluation with restricted builtins
95
+ logger.debug(f"Evaluating expression: {self.expression}")
96
+ logger.debug(f"Context variables: {list(context.keys())}")
97
+ logger.debug(
98
+ f"should_continue value: {context.get('should_continue', 'NOT FOUND')}"
99
+ )
100
+ result = eval(self.expression, {"__builtins__": {}}, context)
101
+ logger.debug(f"Expression result: {result} -> {bool(result)}")
102
+ return bool(result)
103
+ except Exception as e:
104
+ logger.warning(
105
+ f"Expression evaluation failed: {e}. Expression: {self.expression}"
106
+ )
107
+ # On error, terminate cycle for safety
108
+ return True
109
+
110
+ def describe(self) -> str:
111
+ """Describe the expression condition."""
112
+ return f"ExpressionCondition: {self.expression}"
113
+
114
+
115
+ class CallbackCondition(ConvergenceCondition):
116
+ """Callback-based convergence condition for complex logic."""
117
+
118
+ def __init__(
119
+ self,
120
+ callback: Callable[[Dict[str, Any], "CycleState"], bool],
121
+ name: Optional[str] = None,
122
+ ):
123
+ """Initialize with callback function.
124
+
125
+ Args:
126
+ callback: Function that takes (results, cycle_state) and returns bool
127
+ name: Optional name for the callback
128
+ """
129
+ self.callback = callback
130
+ self.name = name or callback.__name__
131
+
132
+ def evaluate(self, results: Dict[str, Any], cycle_state: "CycleState") -> bool:
133
+ """Evaluate callback with results and cycle state."""
134
+ try:
135
+ return self.callback(results, cycle_state)
136
+ except Exception as e:
137
+ logger.warning(f"Callback evaluation failed: {e}. Callback: {self.name}")
138
+ # On error, terminate cycle for safety
139
+ return True
140
+
141
+ def describe(self) -> str:
142
+ """Describe the callback condition."""
143
+ return f"CallbackCondition: {self.name}"
144
+
145
+
146
+ class MaxIterationsCondition(ConvergenceCondition):
147
+ """Simple iteration limit condition."""
148
+
149
+ def __init__(self, max_iterations: int):
150
+ """Initialize with maximum iteration count.
151
+
152
+ Args:
153
+ max_iterations: Maximum number of iterations allowed
154
+ """
155
+ self.max_iterations = max_iterations
156
+
157
+ def evaluate(self, results: Dict[str, Any], cycle_state: "CycleState") -> bool:
158
+ """Check if maximum iterations reached."""
159
+ return cycle_state.iteration >= self.max_iterations
160
+
161
+ def describe(self) -> str:
162
+ """Describe the iteration limit."""
163
+ return f"MaxIterationsCondition: {self.max_iterations}"
164
+
165
+
166
+ class CompoundCondition(ConvergenceCondition):
167
+ """Combine multiple conditions with AND/OR logic."""
168
+
169
+ def __init__(self, conditions: List[ConvergenceCondition], operator: str = "OR"):
170
+ """Initialize with list of conditions.
171
+
172
+ Args:
173
+ conditions: List of convergence conditions
174
+ operator: "AND" or "OR" to combine conditions
175
+ """
176
+ self.conditions = conditions
177
+ self.operator = operator.upper()
178
+ if self.operator not in ["AND", "OR"]:
179
+ raise ValueError("Operator must be 'AND' or 'OR'")
180
+
181
+ def evaluate(self, results: Dict[str, Any], cycle_state: "CycleState") -> bool:
182
+ """Evaluate all conditions with specified operator."""
183
+ evaluations = [cond.evaluate(results, cycle_state) for cond in self.conditions]
184
+
185
+ if self.operator == "AND":
186
+ return all(evaluations)
187
+ else: # OR
188
+ return any(evaluations)
189
+
190
+ def describe(self) -> str:
191
+ """Describe the compound condition."""
192
+ conditions_desc = [cond.describe() for cond in self.conditions]
193
+ return f"CompoundCondition({self.operator}): [{', '.join(conditions_desc)}]"
194
+
195
+
196
+ class AdaptiveCondition(ConvergenceCondition):
197
+ """Adaptive convergence that changes based on iteration progress."""
198
+
199
+ def __init__(self, stages: List[tuple[int, ConvergenceCondition]]):
200
+ """Initialize with stages of conditions.
201
+
202
+ Args:
203
+ stages: List of (iteration_threshold, condition) tuples
204
+ Conditions are applied when iteration >= threshold
205
+ """
206
+ self.stages = sorted(stages, key=lambda x: x[0])
207
+
208
+ def evaluate(self, results: Dict[str, Any], cycle_state: "CycleState") -> bool:
209
+ """Evaluate condition based on current iteration stage."""
210
+ current_iteration = cycle_state.iteration
211
+
212
+ # Find the appropriate condition for current iteration
213
+ active_condition = None
214
+ for threshold, condition in reversed(self.stages):
215
+ if current_iteration >= threshold:
216
+ active_condition = condition
217
+ break
218
+
219
+ if active_condition:
220
+ return active_condition.evaluate(results, cycle_state)
221
+
222
+ # No condition applies yet, continue
223
+ return False
224
+
225
+ def describe(self) -> str:
226
+ """Describe the adaptive condition."""
227
+ stages_desc = [(t, c.describe()) for t, c in self.stages]
228
+ return f"AdaptiveCondition: {stages_desc}"
229
+
230
+
231
+ def create_convergence_condition(
232
+ spec: Union[str, int, Callable, Dict]
233
+ ) -> ConvergenceCondition:
234
+ """Factory function to create convergence conditions from various specs.
235
+
236
+ Args:
237
+ spec: Can be:
238
+ - str: Expression condition
239
+ - int: Max iterations condition
240
+ - Callable: Callback condition
241
+ - Dict: Complex condition specification
242
+
243
+ Returns:
244
+ ConvergenceCondition instance
245
+ """
246
+ if isinstance(spec, str):
247
+ return ExpressionCondition(spec)
248
+ elif isinstance(spec, int):
249
+ return MaxIterationsCondition(spec)
250
+ elif callable(spec):
251
+ return CallbackCondition(spec)
252
+ elif isinstance(spec, dict):
253
+ cond_type = spec.get("type", "expression")
254
+
255
+ if cond_type == "expression":
256
+ return ExpressionCondition(spec["expression"])
257
+ elif cond_type == "max_iterations":
258
+ return MaxIterationsCondition(spec["max_iterations"])
259
+ elif cond_type == "callback":
260
+ return CallbackCondition(spec["callback"], spec.get("name"))
261
+ elif cond_type == "compound":
262
+ conditions = [create_convergence_condition(c) for c in spec["conditions"]]
263
+ return CompoundCondition(conditions, spec.get("operator", "OR"))
264
+ elif cond_type == "adaptive":
265
+ stages = [(t, create_convergence_condition(c)) for t, c in spec["stages"]]
266
+ return AdaptiveCondition(stages)
267
+ else:
268
+ raise ValueError(f"Unknown condition type: {cond_type}")
269
+ else:
270
+ raise ValueError(f"Invalid convergence condition spec: {spec}")