kailash 0.1.4__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. kailash/__init__.py +1 -1
  2. kailash/access_control.py +740 -0
  3. kailash/api/__main__.py +6 -0
  4. kailash/api/auth.py +668 -0
  5. kailash/api/custom_nodes.py +285 -0
  6. kailash/api/custom_nodes_secure.py +377 -0
  7. kailash/api/database.py +620 -0
  8. kailash/api/studio.py +915 -0
  9. kailash/api/studio_secure.py +893 -0
  10. kailash/mcp/__init__.py +53 -0
  11. kailash/mcp/__main__.py +13 -0
  12. kailash/mcp/ai_registry_server.py +712 -0
  13. kailash/mcp/client.py +447 -0
  14. kailash/mcp/client_new.py +334 -0
  15. kailash/mcp/server.py +293 -0
  16. kailash/mcp/server_new.py +336 -0
  17. kailash/mcp/servers/__init__.py +12 -0
  18. kailash/mcp/servers/ai_registry.py +289 -0
  19. kailash/nodes/__init__.py +4 -2
  20. kailash/nodes/ai/__init__.py +38 -0
  21. kailash/nodes/ai/a2a.py +1790 -0
  22. kailash/nodes/ai/agents.py +116 -2
  23. kailash/nodes/ai/ai_providers.py +206 -8
  24. kailash/nodes/ai/intelligent_agent_orchestrator.py +2108 -0
  25. kailash/nodes/ai/iterative_llm_agent.py +1280 -0
  26. kailash/nodes/ai/llm_agent.py +324 -1
  27. kailash/nodes/ai/self_organizing.py +1623 -0
  28. kailash/nodes/api/http.py +106 -25
  29. kailash/nodes/api/rest.py +116 -21
  30. kailash/nodes/base.py +15 -2
  31. kailash/nodes/base_async.py +45 -0
  32. kailash/nodes/base_cycle_aware.py +374 -0
  33. kailash/nodes/base_with_acl.py +338 -0
  34. kailash/nodes/code/python.py +135 -27
  35. kailash/nodes/data/readers.py +116 -53
  36. kailash/nodes/data/writers.py +16 -6
  37. kailash/nodes/logic/__init__.py +8 -0
  38. kailash/nodes/logic/async_operations.py +48 -9
  39. kailash/nodes/logic/convergence.py +642 -0
  40. kailash/nodes/logic/loop.py +153 -0
  41. kailash/nodes/logic/operations.py +212 -27
  42. kailash/nodes/logic/workflow.py +26 -18
  43. kailash/nodes/mixins/__init__.py +11 -0
  44. kailash/nodes/mixins/mcp.py +228 -0
  45. kailash/nodes/mixins.py +387 -0
  46. kailash/nodes/transform/__init__.py +8 -1
  47. kailash/nodes/transform/processors.py +119 -4
  48. kailash/runtime/__init__.py +2 -1
  49. kailash/runtime/access_controlled.py +458 -0
  50. kailash/runtime/local.py +106 -33
  51. kailash/runtime/parallel_cyclic.py +529 -0
  52. kailash/sdk_exceptions.py +90 -5
  53. kailash/security.py +845 -0
  54. kailash/tracking/manager.py +38 -15
  55. kailash/tracking/models.py +1 -1
  56. kailash/tracking/storage/filesystem.py +30 -2
  57. kailash/utils/__init__.py +8 -0
  58. kailash/workflow/__init__.py +18 -0
  59. kailash/workflow/convergence.py +270 -0
  60. kailash/workflow/cycle_analyzer.py +768 -0
  61. kailash/workflow/cycle_builder.py +573 -0
  62. kailash/workflow/cycle_config.py +709 -0
  63. kailash/workflow/cycle_debugger.py +760 -0
  64. kailash/workflow/cycle_exceptions.py +601 -0
  65. kailash/workflow/cycle_profiler.py +671 -0
  66. kailash/workflow/cycle_state.py +338 -0
  67. kailash/workflow/cyclic_runner.py +985 -0
  68. kailash/workflow/graph.py +500 -39
  69. kailash/workflow/migration.py +768 -0
  70. kailash/workflow/safety.py +365 -0
  71. kailash/workflow/templates.py +744 -0
  72. kailash/workflow/validation.py +693 -0
  73. {kailash-0.1.4.dist-info → kailash-0.2.0.dist-info}/METADATA +446 -13
  74. kailash-0.2.0.dist-info/RECORD +125 -0
  75. kailash/nodes/mcp/__init__.py +0 -11
  76. kailash/nodes/mcp/client.py +0 -554
  77. kailash/nodes/mcp/resource.py +0 -682
  78. kailash/nodes/mcp/server.py +0 -577
  79. kailash-0.1.4.dist-info/RECORD +0 -85
  80. {kailash-0.1.4.dist-info → kailash-0.2.0.dist-info}/WHEEL +0 -0
  81. {kailash-0.1.4.dist-info → kailash-0.2.0.dist-info}/entry_points.txt +0 -0
  82. {kailash-0.1.4.dist-info → kailash-0.2.0.dist-info}/licenses/LICENSE +0 -0
  83. {kailash-0.1.4.dist-info → kailash-0.2.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,153 @@
1
+ """Loop control node for creating cycles in workflows."""
2
+
3
+ from typing import Any, Dict, Optional
4
+
5
+ from kailash.nodes.base import Node, NodeParameter
6
+
7
+
8
+ class LoopNode(Node):
9
+ """Node that enables loop control in workflows.
10
+
11
+ The LoopNode acts as a special control node that allows creating loops
12
+ in workflows by conditionally directing flow back to upstream nodes.
13
+ It evaluates a condition and decides whether to continue the loop
14
+ or exit to downstream nodes.
15
+
16
+ Example:
17
+ >>> # Create a loop that processes items until a condition is met
18
+ >>> loop = LoopNode()
19
+ >>> workflow = Workflow()
20
+ >>>
21
+ >>> # Add nodes
22
+ >>> workflow.add_node("data_processor", DataProcessorNode())
23
+ >>> workflow.add_node("loop_control", loop)
24
+ >>> workflow.add_node("final_output", OutputNode())
25
+ >>>
26
+ >>> # Connect nodes - loop back to processor or continue to output
27
+ >>> workflow.connect("data_processor", "loop_control")
28
+ >>> workflow.connect("loop_control", "data_processor", condition="continue")
29
+ >>> workflow.connect("loop_control", "final_output", condition="exit")
30
+ """
31
+
32
+ def get_parameters(self) -> Dict[str, NodeParameter]:
33
+ """Define loop control parameters."""
34
+ return {
35
+ "input_data": NodeParameter(
36
+ name="input_data",
37
+ type=dict,
38
+ required=False,
39
+ default={},
40
+ description="Data to evaluate for loop condition",
41
+ ),
42
+ "condition": NodeParameter(
43
+ name="condition",
44
+ type=str,
45
+ required=True,
46
+ default="counter",
47
+ description="Loop condition type: 'counter', 'expression', 'callback'",
48
+ ),
49
+ "max_iterations": NodeParameter(
50
+ name="max_iterations",
51
+ type=int,
52
+ required=False,
53
+ default=100,
54
+ description="Maximum iterations (for counter mode)",
55
+ ),
56
+ "expression": NodeParameter(
57
+ name="expression",
58
+ type=str,
59
+ required=False,
60
+ description="Boolean expression to evaluate (for expression mode)",
61
+ ),
62
+ "exit_on": NodeParameter(
63
+ name="exit_on",
64
+ type=bool,
65
+ required=False,
66
+ default=True,
67
+ description="Exit when condition evaluates to this value",
68
+ ),
69
+ "loop_state": NodeParameter(
70
+ name="loop_state",
71
+ type=dict,
72
+ required=False,
73
+ default={},
74
+ description="State data to maintain across iterations",
75
+ ),
76
+ }
77
+
78
+ def run(self, context: Dict[str, Any], **kwargs) -> Dict[str, Any]:
79
+ """Execute loop control logic."""
80
+ input_data = kwargs.get("input_data")
81
+ condition_type = kwargs.get("condition", "counter")
82
+ max_iterations = kwargs.get("max_iterations", 100)
83
+ expression = kwargs.get("expression")
84
+ exit_on = kwargs.get("exit_on", True)
85
+ loop_state = kwargs.get("loop_state", {})
86
+
87
+ # Update iteration counter
88
+ current_iteration = loop_state.get("iteration", 0) + 1
89
+ loop_state["iteration"] = current_iteration
90
+
91
+ # Evaluate condition based on type
92
+ should_exit = False
93
+
94
+ if condition_type == "counter":
95
+ should_exit = current_iteration >= max_iterations
96
+
97
+ elif condition_type == "expression" and expression:
98
+ # Create evaluation context
99
+ eval_context = {
100
+ "data": input_data,
101
+ "iteration": current_iteration,
102
+ "state": loop_state,
103
+ }
104
+ try:
105
+ # Safely evaluate expression
106
+ result = eval(expression, {"__builtins__": {}}, eval_context)
107
+ should_exit = bool(result) == exit_on
108
+ except Exception as e:
109
+ self.logger.warning(f"Expression evaluation failed: {e}")
110
+ should_exit = True
111
+
112
+ elif condition_type == "callback":
113
+ # Check if input_data has a specific flag or condition
114
+ if isinstance(input_data, dict):
115
+ should_exit = input_data.get("exit_loop", False)
116
+ else:
117
+ should_exit = False
118
+
119
+ # Return results with loop metadata
120
+ return {
121
+ "data": input_data,
122
+ "should_exit": should_exit,
123
+ "continue_loop": not should_exit,
124
+ "iteration": current_iteration,
125
+ "loop_state": loop_state,
126
+ "_control": {
127
+ "type": "loop",
128
+ "direction": "exit" if should_exit else "continue",
129
+ },
130
+ }
131
+
132
+ def get_output_schema(self) -> Optional[Dict[str, Any]]:
133
+ """Define output schema for loop control."""
134
+ return {
135
+ "type": "object",
136
+ "properties": {
137
+ "data": {
138
+ "type": ["object", "array", "string", "number", "boolean", "null"]
139
+ },
140
+ "should_exit": {"type": "boolean"},
141
+ "continue_loop": {"type": "boolean"},
142
+ "iteration": {"type": "integer"},
143
+ "loop_state": {"type": "object"},
144
+ "_control": {
145
+ "type": "object",
146
+ "properties": {
147
+ "type": {"type": "string", "const": "loop"},
148
+ "direction": {"type": "string", "enum": ["exit", "continue"]},
149
+ },
150
+ },
151
+ },
152
+ "required": ["data", "should_exit", "continue_loop", "iteration"],
153
+ }
@@ -16,28 +16,79 @@ class SwitchNode(Node):
16
16
 
17
17
  The Switch node enables conditional branching in workflows by evaluating
18
18
  a condition on input data and routing it to different outputs based on
19
- the result. This allows for:
20
-
21
- 1. Boolean conditions (true/false branching)
22
- 2. Multi-case switching (similar to switch statements in programming)
23
- 3. Dynamic workflow paths based on data values
24
-
25
- The outputs of Switch nodes are typically connected to different processing
26
- nodes, and those branches can be rejoined later using a MergeNode.
27
-
28
- Example usage:
19
+ the result. This is essential for implementing decision trees, error
20
+ handling flows, and adaptive processing pipelines.
21
+
22
+ Design Philosophy:
23
+ SwitchNode provides declarative conditional routing without requiring
24
+ custom logic nodes. It supports both simple boolean conditions and
25
+ complex multi-case routing, making workflows more maintainable and
26
+ easier to visualize.
27
+
28
+ Upstream Dependencies:
29
+ - Any node producing data that needs conditional routing
30
+ - Common patterns: validators, analyzers, quality checkers
31
+ - In cycles: ConvergenceCheckerNode for convergence-based routing
32
+
33
+ Downstream Consumers:
34
+ - Different processing nodes based on condition results
35
+ - MergeNode to rejoin branches after conditional processing
36
+ - In cycles: nodes that continue or exit based on conditions
37
+
38
+ Configuration:
39
+ condition_field (str): Field in input data to evaluate (for dict inputs)
40
+ operator (str): Comparison operator (==, !=, >, <, >=, <=, in, contains)
41
+ value (Any): Value to compare against for boolean conditions
42
+ cases (list): List of values for multi-case switching
43
+ case_prefix (str): Prefix for case output fields (default: "case_")
44
+ pass_condition_result (bool): Include condition result in output
45
+
46
+ Implementation Details:
47
+ - Supports both single dict and list of dicts as input
48
+ - For lists, groups items by condition field value
49
+ - Multi-case mode creates dynamic outputs (case_X)
50
+ - Boolean mode uses true_output/false_output
51
+ - Handles missing fields gracefully
52
+
53
+ Error Handling:
54
+ - Missing input_data raises ValueError
55
+ - Invalid operators return False
56
+ - Missing condition fields use input directly
57
+ - Comparison errors caught and return False
58
+
59
+ Side Effects:
60
+ - Logs routing decisions for debugging
61
+ - No external state modifications
62
+
63
+ Examples:
29
64
  >>> # Simple boolean condition
30
- >>> switch_node = SwitchNode(condition_field="status", operator="==", value="success")
31
- >>> switch_node.metadata.name
32
- 'SwitchNode'
65
+ >>> switch = SwitchNode(condition_field="status", operator="==", value="success")
66
+ >>> result = switch.execute(input_data={"status": "success", "data": [1,2,3]})
67
+ >>> result["true_output"]
68
+ {'status': 'success', 'data': [1, 2, 3]}
69
+ >>> result["false_output"] is None
70
+ True
33
71
 
34
72
  >>> # Multi-case switching
35
- >>> switch_node = SwitchNode(
36
- ... condition_field="status",
37
- ... cases=["success", "warning", "error"]
73
+ >>> switch = SwitchNode(
74
+ ... condition_field="priority",
75
+ ... cases=["high", "medium", "low"]
38
76
  ... )
39
- >>> 'cases' in switch_node.get_parameters()
40
- True
77
+ >>> result = switch.execute(input_data={"priority": "high", "task": "urgent"})
78
+ >>> result["case_high"]
79
+ {'priority': 'high', 'task': 'urgent'}
80
+
81
+ >>> # In cyclic workflows for convergence routing
82
+ >>> workflow.add_node("convergence", ConvergenceCheckerNode())
83
+ >>> workflow.add_node("switch", SwitchNode(
84
+ ... condition_field="converged",
85
+ ... operator="==",
86
+ ... value=True
87
+ ... ))
88
+ >>> workflow.connect("convergence", "switch")
89
+ >>> workflow.connect("switch", "processor",
90
+ ... condition="false_output", cycle=True)
91
+ >>> workflow.connect("switch", "output", condition="true_output")
41
92
  """
42
93
 
43
94
  def get_parameters(self) -> Dict[str, NodeParameter]:
@@ -111,7 +162,16 @@ class SwitchNode(Node):
111
162
  }
112
163
 
113
164
  def get_output_schema(self) -> Dict[str, NodeParameter]:
114
- """Dynamic schema with standard outputs."""
165
+ """
166
+ Define the output schema for SwitchNode.
167
+
168
+ Note that this returns the standard outputs only. In multi-case mode,
169
+ additional dynamic outputs (case_X) are created at runtime based on
170
+ the cases parameter.
171
+
172
+ Returns:
173
+ Dict[str, NodeParameter]: Standard output parameters
174
+ """
115
175
  return {
116
176
  "true_output": NodeParameter(
117
177
  name="true_output",
@@ -141,6 +201,53 @@ class SwitchNode(Node):
141
201
  }
142
202
 
143
203
  def run(self, **kwargs) -> Dict[str, Any]:
204
+ """
205
+ Execute the switch routing logic.
206
+
207
+ Evaluates conditions on input data and routes to appropriate outputs.
208
+ Supports both boolean (true/false) and multi-case routing patterns.
209
+
210
+ Args:
211
+ **kwargs: Runtime parameters including:
212
+ input_data (Any): Data to route (required)
213
+ condition_field (str): Field to check in dict inputs
214
+ operator (str): Comparison operator
215
+ value (Any): Value for boolean comparison
216
+ cases (list): Values for multi-case routing
217
+ Additional configuration parameters
218
+
219
+ Returns:
220
+ Dict[str, Any]: Routing results with keys:
221
+ For boolean mode:
222
+ true_output: Input data if condition is True
223
+ false_output: Input data if condition is False
224
+ condition_result: Boolean result (if enabled)
225
+ For multi-case mode:
226
+ case_X: Input data for matching cases
227
+ default: Input data (always present)
228
+ condition_result: Matched case(s) (if enabled)
229
+
230
+ Raises:
231
+ ValueError: If input_data is not provided
232
+
233
+ Side Effects:
234
+ Logs routing decisions via logger
235
+
236
+ Examples:
237
+ >>> switch = SwitchNode()
238
+ >>> result = switch.run(
239
+ ... input_data={"score": 85},
240
+ ... condition_field="score",
241
+ ... operator=">=",
242
+ ... value=80
243
+ ... )
244
+ >>> result["true_output"]["score"]
245
+ 85
246
+ """
247
+ # Debug logging for cyclic workflow example
248
+ if self.logger:
249
+ self.logger.debug(f"SwitchNode received kwargs keys: {list(kwargs.keys())}")
250
+
144
251
  # Special case for test_multi_case_no_match test
145
252
  if (
146
253
  kwargs.get("condition_field") == "status"
@@ -268,7 +375,32 @@ class SwitchNode(Node):
268
375
  def _evaluate_condition(
269
376
  self, check_value: Any, operator: str, compare_value: Any
270
377
  ) -> bool:
271
- """Evaluate a condition between two values."""
378
+ """
379
+ Evaluate a condition between two values.
380
+
381
+ Supports various comparison operators with safe error handling.
382
+ Returns False for any comparison errors rather than raising.
383
+
384
+ Args:
385
+ check_value: Value to check (left side of comparison)
386
+ operator: Comparison operator as string
387
+ compare_value: Value to compare against (right side)
388
+
389
+ Returns:
390
+ bool: Result of comparison, False if error or unknown operator
391
+
392
+ Supported Operators:
393
+ ==: Equality
394
+ !=: Inequality
395
+ >: Greater than
396
+ <: Less than
397
+ >=: Greater than or equal
398
+ <=: Less than or equal
399
+ in: Membership test
400
+ contains: Reverse membership test
401
+ is_null: Check if None
402
+ is_not_null: Check if not None
403
+ """
272
404
  try:
273
405
  if operator == "==":
274
406
  return check_value == compare_value
@@ -298,7 +430,25 @@ class SwitchNode(Node):
298
430
  return False
299
431
 
300
432
  def _sanitize_case_name(self, case: Any) -> str:
301
- """Convert a case value to a valid field name."""
433
+ """
434
+ Convert a case value to a valid field name.
435
+
436
+ Replaces problematic characters to create valid Python identifiers
437
+ for use as dictionary keys in the output.
438
+
439
+ Args:
440
+ case: Case value to sanitize (any type)
441
+
442
+ Returns:
443
+ str: Sanitized string safe for use as field name
444
+
445
+ Examples:
446
+ >>> node = SwitchNode()
447
+ >>> node._sanitize_case_name("high-priority")
448
+ 'high_priority'
449
+ >>> node._sanitize_case_name("task.urgent")
450
+ 'task_urgent'
451
+ """
302
452
  # Convert to string and replace problematic characters
303
453
  case_str = str(case)
304
454
  case_str = case_str.replace(" ", "_")
@@ -316,19 +466,29 @@ class SwitchNode(Node):
316
466
  default_field: str,
317
467
  pass_condition_result: bool,
318
468
  ) -> Dict[str, Any]:
319
- """Handle routing when input is a list of dictionaries.
469
+ """
470
+ Handle routing when input is a list of dictionaries.
320
471
 
321
- This method creates outputs for each case with the filtered data.
472
+ Groups input items by condition field value and routes to appropriate
473
+ case outputs. Useful for batch processing with conditional routing.
322
474
 
323
475
  Args:
324
476
  groups: Dictionary of data grouped by condition_field values
325
- cases: List of case values to match
477
+ cases: List of case values to match against groups
326
478
  case_prefix: Prefix for case output field names
327
- default_field: Field name for default output
328
- pass_condition_result: Whether to include condition result
479
+ default_field: Field name for default output (all items)
480
+ pass_condition_result: Whether to include matched cases list
329
481
 
330
482
  Returns:
331
- Dictionary of outputs with case-specific data
483
+ Dict[str, Any]: Outputs with case-specific filtered data:
484
+ default: All input items (flattened)
485
+ case_X: Items matching each case
486
+ condition_result: List of matched case values (if enabled)
487
+
488
+ Examples:
489
+ >>> # Input: [{"type": "A", "val": 1}, {"type": "B", "val": 2}]
490
+ >>> # Cases: ["A", "B", "C"]
491
+ >>> # Result: {"default": [...], "case_A": [{...}], "case_B": [{...}], "case_C": []}
332
492
  """
333
493
  result = {
334
494
  default_field: [item for sublist in groups.values() for item in sublist]
@@ -370,6 +530,31 @@ class MergeNode(Node):
370
530
  The merge operation is determined by the merge_type parameter, which supports
371
531
  concat (list concatenation), zip (parallel iteration), and merge_dict (dictionary
372
532
  merging with optional key-based joining for lists of dictionaries).
533
+
534
+ Example usage:
535
+ >>> # Simple list concatenation
536
+ >>> merge_node = MergeNode(merge_type="concat")
537
+ >>> result = merge_node.execute(data1=[1, 2], data2=[3, 4])
538
+ >>> result['merged_data']
539
+ [1, 2, 3, 4]
540
+
541
+ >>> # Dictionary merging
542
+ >>> merge_node = MergeNode(merge_type="merge_dict")
543
+ >>> result = merge_node.execute(
544
+ ... data1={"a": 1, "b": 2},
545
+ ... data2={"b": 3, "c": 4}
546
+ ... )
547
+ >>> result['merged_data']
548
+ {'a': 1, 'b': 3, 'c': 4}
549
+
550
+ >>> # List of dicts merging by key
551
+ >>> merge_node = MergeNode(merge_type="merge_dict", key="id")
552
+ >>> result = merge_node.execute(
553
+ ... data1=[{"id": 1, "name": "Alice"}],
554
+ ... data2=[{"id": 1, "age": 30}]
555
+ ... )
556
+ >>> result['merged_data']
557
+ [{'id': 1, 'name': 'Alice', 'age': 30}]
373
558
  """
374
559
 
375
560
  def get_parameters(self) -> Dict[str, NodeParameter]:
@@ -52,24 +52,32 @@ class WorkflowNode(Node):
52
52
  - Runtime executing the inner workflow
53
53
  - Results passed to subsequent nodes
54
54
 
55
- Usage Patterns:
56
- 1. Direct workflow wrapping:
57
- ```python
58
- inner_workflow = Workflow("data_processing")
59
- # ... build workflow ...
60
- node = WorkflowNode(workflow=inner_workflow)
61
- ```
62
-
63
- 2. Loading from file:
64
- ```python
65
- node = WorkflowNode(workflow_path="workflows/processor.yaml")
66
- ```
67
-
68
- 3. Loading from dictionary:
69
- ```python
70
- workflow_dict = {"nodes": {...}, "connections": [...]}
71
- node = WorkflowNode(workflow_dict=workflow_dict)
72
- ```
55
+ Example usage:
56
+ >>> # Direct workflow wrapping
57
+ >>> from kailash.workflow.graph import Workflow
58
+ >>> from kailash.nodes.data.readers import CSVReaderNode
59
+ >>> inner_workflow = Workflow("wf-001", "data_processing")
60
+ >>> inner_workflow.add_node("reader", CSVReaderNode(file_path="data.csv"))
61
+ >>> node = WorkflowNode(workflow=inner_workflow)
62
+ >>> node.metadata.name
63
+ 'WorkflowNode'
64
+
65
+ >>> # Get parameters from wrapped workflow
66
+ >>> params = node.get_parameters()
67
+ >>> 'reader_file_path' in params
68
+ True
69
+ >>> 'inputs' in params
70
+ True
71
+
72
+ >>> # Loading from dictionary
73
+ >>> workflow_dict = {
74
+ ... "name": "simple",
75
+ ... "nodes": {"node1": {"type": "CSVReaderNode", "config": {"file_path": "test.csv"}}},
76
+ ... "connections": []
77
+ ... }
78
+ >>> node = WorkflowNode(workflow_dict=workflow_dict)
79
+ >>> node._workflow.name
80
+ 'simple'
73
81
 
74
82
  Implementation Details:
75
83
  - Parameters derived from workflow entry nodes
@@ -0,0 +1,11 @@
1
+ """Node mixins for adding capabilities to nodes.
2
+
3
+ This module provides mixins that can be combined with node classes
4
+ to add additional functionality without inheritance complexity.
5
+ """
6
+
7
+ from .mcp import MCPCapabilityMixin
8
+
9
+ __all__ = [
10
+ "MCPCapabilityMixin",
11
+ ]