soe-ai 0.2.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (145) hide show
  1. soe/__init__.py +50 -0
  2. soe/broker.py +168 -0
  3. soe/builtin_tools/__init__.py +51 -0
  4. soe/builtin_tools/soe_add_signal.py +82 -0
  5. soe/builtin_tools/soe_call_tool.py +111 -0
  6. soe/builtin_tools/soe_copy_context.py +80 -0
  7. soe/builtin_tools/soe_explore_docs.py +290 -0
  8. soe/builtin_tools/soe_get_available_tools.py +42 -0
  9. soe/builtin_tools/soe_get_context.py +50 -0
  10. soe/builtin_tools/soe_get_context_schema.py +56 -0
  11. soe/builtin_tools/soe_get_identities.py +63 -0
  12. soe/builtin_tools/soe_get_workflows.py +63 -0
  13. soe/builtin_tools/soe_inject_context_schema_field.py +80 -0
  14. soe/builtin_tools/soe_inject_identity.py +64 -0
  15. soe/builtin_tools/soe_inject_node.py +86 -0
  16. soe/builtin_tools/soe_inject_workflow.py +105 -0
  17. soe/builtin_tools/soe_list_contexts.py +73 -0
  18. soe/builtin_tools/soe_remove_context_schema_field.py +61 -0
  19. soe/builtin_tools/soe_remove_identity.py +61 -0
  20. soe/builtin_tools/soe_remove_node.py +72 -0
  21. soe/builtin_tools/soe_remove_workflow.py +62 -0
  22. soe/builtin_tools/soe_update_context.py +54 -0
  23. soe/docs/_config.yml +10 -0
  24. soe/docs/advanced_patterns/guide_fanout_and_aggregations.md +318 -0
  25. soe/docs/advanced_patterns/guide_inheritance.md +435 -0
  26. soe/docs/advanced_patterns/hybrid_intelligence.md +237 -0
  27. soe/docs/advanced_patterns/index.md +49 -0
  28. soe/docs/advanced_patterns/operational.md +781 -0
  29. soe/docs/advanced_patterns/self_evolving_workflows.md +385 -0
  30. soe/docs/advanced_patterns/swarm_intelligence.md +211 -0
  31. soe/docs/builtins/context.md +164 -0
  32. soe/docs/builtins/context_schema.md +158 -0
  33. soe/docs/builtins/identity.md +139 -0
  34. soe/docs/builtins/soe_explore_docs.md +135 -0
  35. soe/docs/builtins/tools.md +164 -0
  36. soe/docs/builtins/workflows.md +199 -0
  37. soe/docs/guide_00_getting_started.md +341 -0
  38. soe/docs/guide_01_tool.md +206 -0
  39. soe/docs/guide_02_llm.md +143 -0
  40. soe/docs/guide_03_router.md +146 -0
  41. soe/docs/guide_04_patterns.md +475 -0
  42. soe/docs/guide_05_agent.md +159 -0
  43. soe/docs/guide_06_schema.md +397 -0
  44. soe/docs/guide_07_identity.md +540 -0
  45. soe/docs/guide_08_child.md +612 -0
  46. soe/docs/guide_09_ecosystem.md +690 -0
  47. soe/docs/guide_10_infrastructure.md +427 -0
  48. soe/docs/guide_11_builtins.md +126 -0
  49. soe/docs/index.md +104 -0
  50. soe/docs/primitives/backends.md +281 -0
  51. soe/docs/primitives/context.md +256 -0
  52. soe/docs/primitives/node_reference.md +259 -0
  53. soe/docs/primitives/primitives.md +331 -0
  54. soe/docs/primitives/signals.md +865 -0
  55. soe/docs_index.py +2 -0
  56. soe/init.py +165 -0
  57. soe/lib/__init__.py +0 -0
  58. soe/lib/child_context.py +46 -0
  59. soe/lib/context_fields.py +51 -0
  60. soe/lib/inheritance.py +172 -0
  61. soe/lib/jinja_render.py +113 -0
  62. soe/lib/operational.py +51 -0
  63. soe/lib/parent_sync.py +71 -0
  64. soe/lib/register_event.py +75 -0
  65. soe/lib/schema_validation.py +134 -0
  66. soe/lib/yaml_parser.py +14 -0
  67. soe/local_backends/__init__.py +18 -0
  68. soe/local_backends/factory.py +124 -0
  69. soe/local_backends/in_memory/context.py +38 -0
  70. soe/local_backends/in_memory/conversation_history.py +60 -0
  71. soe/local_backends/in_memory/identity.py +52 -0
  72. soe/local_backends/in_memory/schema.py +40 -0
  73. soe/local_backends/in_memory/telemetry.py +38 -0
  74. soe/local_backends/in_memory/workflow.py +33 -0
  75. soe/local_backends/storage/context.py +57 -0
  76. soe/local_backends/storage/conversation_history.py +82 -0
  77. soe/local_backends/storage/identity.py +118 -0
  78. soe/local_backends/storage/schema.py +96 -0
  79. soe/local_backends/storage/telemetry.py +72 -0
  80. soe/local_backends/storage/workflow.py +56 -0
  81. soe/nodes/__init__.py +13 -0
  82. soe/nodes/agent/__init__.py +10 -0
  83. soe/nodes/agent/factory.py +134 -0
  84. soe/nodes/agent/lib/loop_handlers.py +150 -0
  85. soe/nodes/agent/lib/loop_state.py +157 -0
  86. soe/nodes/agent/lib/prompts.py +65 -0
  87. soe/nodes/agent/lib/tools.py +35 -0
  88. soe/nodes/agent/stages/__init__.py +12 -0
  89. soe/nodes/agent/stages/parameter.py +37 -0
  90. soe/nodes/agent/stages/response.py +54 -0
  91. soe/nodes/agent/stages/router.py +37 -0
  92. soe/nodes/agent/state.py +111 -0
  93. soe/nodes/agent/types.py +66 -0
  94. soe/nodes/agent/validation/__init__.py +11 -0
  95. soe/nodes/agent/validation/config.py +95 -0
  96. soe/nodes/agent/validation/operational.py +24 -0
  97. soe/nodes/child/__init__.py +3 -0
  98. soe/nodes/child/factory.py +61 -0
  99. soe/nodes/child/state.py +59 -0
  100. soe/nodes/child/validation/__init__.py +11 -0
  101. soe/nodes/child/validation/config.py +126 -0
  102. soe/nodes/child/validation/operational.py +28 -0
  103. soe/nodes/lib/conditions.py +71 -0
  104. soe/nodes/lib/context.py +24 -0
  105. soe/nodes/lib/conversation_history.py +77 -0
  106. soe/nodes/lib/identity.py +64 -0
  107. soe/nodes/lib/llm_resolver.py +142 -0
  108. soe/nodes/lib/output.py +68 -0
  109. soe/nodes/lib/response_builder.py +91 -0
  110. soe/nodes/lib/signal_emission.py +79 -0
  111. soe/nodes/lib/signals.py +54 -0
  112. soe/nodes/lib/tools.py +100 -0
  113. soe/nodes/llm/__init__.py +7 -0
  114. soe/nodes/llm/factory.py +103 -0
  115. soe/nodes/llm/state.py +76 -0
  116. soe/nodes/llm/types.py +12 -0
  117. soe/nodes/llm/validation/__init__.py +11 -0
  118. soe/nodes/llm/validation/config.py +89 -0
  119. soe/nodes/llm/validation/operational.py +23 -0
  120. soe/nodes/router/__init__.py +3 -0
  121. soe/nodes/router/factory.py +37 -0
  122. soe/nodes/router/state.py +32 -0
  123. soe/nodes/router/validation/__init__.py +11 -0
  124. soe/nodes/router/validation/config.py +58 -0
  125. soe/nodes/router/validation/operational.py +16 -0
  126. soe/nodes/tool/factory.py +66 -0
  127. soe/nodes/tool/lib/__init__.py +11 -0
  128. soe/nodes/tool/lib/conditions.py +35 -0
  129. soe/nodes/tool/lib/failure.py +28 -0
  130. soe/nodes/tool/lib/parameters.py +67 -0
  131. soe/nodes/tool/state.py +66 -0
  132. soe/nodes/tool/types.py +27 -0
  133. soe/nodes/tool/validation/__init__.py +15 -0
  134. soe/nodes/tool/validation/config.py +132 -0
  135. soe/nodes/tool/validation/operational.py +16 -0
  136. soe/types.py +209 -0
  137. soe/validation/__init__.py +18 -0
  138. soe/validation/config.py +195 -0
  139. soe/validation/jinja.py +54 -0
  140. soe/validation/operational.py +110 -0
  141. soe_ai-0.2.0b1.dist-info/METADATA +262 -0
  142. soe_ai-0.2.0b1.dist-info/RECORD +145 -0
  143. soe_ai-0.2.0b1.dist-info/WHEEL +5 -0
  144. soe_ai-0.2.0b1.dist-info/licenses/LICENSE +21 -0
  145. soe_ai-0.2.0b1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,82 @@
1
+ """
2
+ Local file-based conversation history backend
3
+
4
+ This backend stores conversation history by identity, allowing agents
5
+ to persist their conversation state across different node executions.
6
+ """
7
+
8
+ import json
9
+ from pathlib import Path
10
+ from typing import List, Dict, Any
11
+
12
+
13
+ class LocalConversationHistoryBackend:
14
+ """File-based conversation history storage backend"""
15
+
16
+ def __init__(self, storage_dir: str = "./orchestration_data/conversations"):
17
+ """
18
+ Initialize local conversation history backend
19
+
20
+ Args:
21
+ storage_dir: Directory to store conversation history files
22
+ """
23
+ self.storage_dir = Path(storage_dir)
24
+ self.storage_dir.mkdir(parents=True, exist_ok=True)
25
+
26
+ def get_conversation_history(self, identity: str) -> List[Dict[str, Any]]:
27
+ """
28
+ Get conversation history for an identity
29
+
30
+ Args:
31
+ identity: Unique identity identifier for the agent
32
+
33
+ Returns:
34
+ List of conversation entries, empty if not found
35
+ """
36
+ history_file = self.storage_dir / f"{identity}.json"
37
+
38
+ if not history_file.exists():
39
+ return []
40
+
41
+ with open(history_file, "r") as f:
42
+ return json.load(f)
43
+
44
+ def append_to_conversation_history(
45
+ self, identity: str, entry: Dict[str, Any]
46
+ ) -> None:
47
+ """
48
+ Append a single entry to the conversation history
49
+
50
+ Args:
51
+ identity: Unique identity identifier for the agent
52
+ entry: Conversation entry to append
53
+ """
54
+ history = self.get_conversation_history(identity)
55
+ history.append(entry)
56
+ self.save_conversation_history(identity, history)
57
+
58
+ def save_conversation_history(
59
+ self, identity: str, history: List[Dict[str, Any]]
60
+ ) -> None:
61
+ """
62
+ Save full conversation history for an identity
63
+
64
+ Args:
65
+ identity: Unique identity identifier for the agent
66
+ history: Full conversation history list to save
67
+ """
68
+ history_file = self.storage_dir / f"{identity}.json"
69
+
70
+ with open(history_file, "w") as f:
71
+ json.dump(history, f, indent=2, default=str)
72
+
73
+ def delete_conversation_history(self, identity: str) -> None:
74
+ """Delete conversation history for an identity."""
75
+ history_file = self.storage_dir / f"{identity}.json"
76
+ if history_file.exists():
77
+ history_file.unlink()
78
+
79
+ def cleanup_all(self) -> None:
80
+ """Delete all conversation history files. Used for test cleanup."""
81
+ for history_file in self.storage_dir.glob("*.json"):
82
+ history_file.unlink()
@@ -0,0 +1,118 @@
1
+ """
2
+ Identity backend for workflow identity definitions.
3
+
4
+ Stores and retrieves identities that define the participating personas/roles
5
+ in a workflow. These are used as initial system prompts for conversation history.
6
+ Keyed by execution_id (main_execution_id) so children can access parent's identities.
7
+ Dumb storage only - no validation logic.
8
+
9
+ Identity format is simple: identity_name -> system_prompt (string)
10
+ Example:
11
+ assistant: "You are a helpful assistant."
12
+ coding_expert: "You are an expert programmer."
13
+ """
14
+
15
+ from typing import Dict, Optional
16
+ import os
17
+ import yaml
18
+
19
+ from ...lib.yaml_parser import parse_yaml
20
+
21
+
22
+ class LocalIdentityBackend:
23
+ """
24
+ Local file-based identity storage backend.
25
+
26
+ Stores identity definitions as YAML files keyed by execution_id.
27
+ """
28
+
29
+ def __init__(self, storage_dir: str = "./orchestration_data/identities"):
30
+ self.storage_dir = storage_dir
31
+ os.makedirs(storage_dir, exist_ok=True)
32
+ self._cache: Dict[str, Dict[str, str]] = {}
33
+
34
+ def _get_identity_path(self, execution_id: str) -> str:
35
+ """Get file path for an execution's identities."""
36
+ return os.path.join(self.storage_dir, f"{execution_id}.yaml")
37
+
38
+ def save_identities(self, execution_id: str, identities: Dict[str, str]) -> None:
39
+ """
40
+ Save identity definitions for an execution.
41
+
42
+ Args:
43
+ execution_id: Execution ID (typically main_execution_id)
44
+ identities: Identity definitions (identity_name -> system_prompt)
45
+ """
46
+ path = self._get_identity_path(execution_id)
47
+ with open(path, "w", encoding="utf-8") as f:
48
+ yaml.dump(identities, f, default_flow_style=False)
49
+ self._cache[execution_id] = identities
50
+
51
+ def get_identities(self, execution_id: str) -> Optional[Dict[str, str]]:
52
+ """
53
+ Get identity definitions for an execution.
54
+
55
+ Args:
56
+ execution_id: Execution ID (typically main_execution_id)
57
+
58
+ Returns:
59
+ Identity definitions dict (identity_name -> system_prompt) or None if not found
60
+ """
61
+ if execution_id in self._cache:
62
+ return self._cache[execution_id]
63
+
64
+ path = self._get_identity_path(execution_id)
65
+ if os.path.exists(path):
66
+ with open(path, "r", encoding="utf-8") as f:
67
+ identities = parse_yaml(f.read())
68
+ self._cache[execution_id] = identities
69
+ return identities
70
+
71
+ return None
72
+
73
+ def get_identity(self, execution_id: str, identity_name: str) -> Optional[str]:
74
+ """
75
+ Get a specific identity's system prompt.
76
+
77
+ Args:
78
+ execution_id: Execution ID (typically main_execution_id)
79
+ identity_name: Name of the identity
80
+
81
+ Returns:
82
+ System prompt string or None if not found
83
+ """
84
+ identities = self.get_identities(execution_id)
85
+ if identities and identity_name in identities:
86
+ return identities[identity_name]
87
+ return None
88
+
89
+ def delete_identities(self, execution_id: str) -> bool:
90
+ """
91
+ Delete an execution's identity definitions.
92
+
93
+ Args:
94
+ execution_id: Execution ID (typically main_execution_id)
95
+
96
+ Returns:
97
+ True if deleted, False if not found
98
+ """
99
+ path = self._get_identity_path(execution_id)
100
+ deleted = False
101
+
102
+ if execution_id in self._cache:
103
+ del self._cache[execution_id]
104
+ deleted = True
105
+
106
+ if os.path.exists(path):
107
+ os.remove(path)
108
+ deleted = True
109
+
110
+ return deleted
111
+
112
+ def cleanup_all(self) -> None:
113
+ """Cleanup all stored data."""
114
+ self._cache.clear()
115
+ if os.path.exists(self.storage_dir):
116
+ for filename in os.listdir(self.storage_dir):
117
+ if filename.endswith(".yaml"):
118
+ os.remove(os.path.join(self.storage_dir, filename))
@@ -0,0 +1,96 @@
1
+ """
2
+ Context schema backend for workflow context field validation.
3
+
4
+ Stores and retrieves schemas that define the structure of workflow context fields.
5
+ Keyed by execution_id (main_execution_id) so children can access parent's schemas.
6
+ Dumb storage only - no validation logic (see soe/lib/schema_validation.py).
7
+ """
8
+
9
+ from typing import Dict, Any, Optional
10
+ import os
11
+ import yaml
12
+
13
+ from ...lib.yaml_parser import parse_yaml
14
+
15
+
16
+ class LocalContextSchemaBackend:
17
+ """
18
+ Local file-based context schema storage backend.
19
+
20
+ Stores context schemas as YAML files keyed by execution_id.
21
+ """
22
+
23
+ def __init__(self, storage_dir: str = "./orchestration_data/schemas"):
24
+ self.storage_dir = storage_dir
25
+ os.makedirs(storage_dir, exist_ok=True)
26
+ self._cache: Dict[str, Dict[str, Any]] = {}
27
+
28
+ def _get_schema_path(self, execution_id: str) -> str:
29
+ """Get file path for an execution's context schema."""
30
+ return os.path.join(self.storage_dir, f"{execution_id}.yaml")
31
+
32
+ def save_context_schema(self, execution_id: str, schema: Dict[str, Any]) -> None:
33
+ """
34
+ Save a context schema for an execution.
35
+
36
+ Args:
37
+ execution_id: Execution ID (typically main_execution_id)
38
+ schema: Schema definition dict
39
+ """
40
+ path = self._get_schema_path(execution_id)
41
+ with open(path, "w", encoding="utf-8") as f:
42
+ yaml.dump(schema, f, default_flow_style=False)
43
+ self._cache[execution_id] = schema
44
+
45
+ def get_context_schema(self, execution_id: str) -> Optional[Dict[str, Any]]:
46
+ """
47
+ Get context schema for an execution.
48
+
49
+ Args:
50
+ execution_id: Execution ID (typically main_execution_id)
51
+
52
+ Returns:
53
+ Schema dict or None if not found
54
+ """
55
+ # Check cache first
56
+ if execution_id in self._cache:
57
+ return self._cache[execution_id]
58
+
59
+ # Load from file
60
+ path = self._get_schema_path(execution_id)
61
+ if os.path.exists(path):
62
+ with open(path, "r", encoding="utf-8") as f:
63
+ schema = parse_yaml(f.read())
64
+ self._cache[execution_id] = schema
65
+ return schema
66
+
67
+ return None
68
+
69
+ def delete_context_schema(self, execution_id: str) -> bool:
70
+ """
71
+ Delete an execution's context schema.
72
+
73
+ Args:
74
+ execution_id: Execution ID (typically main_execution_id)
75
+
76
+ Returns:
77
+ True if deleted, False if not found
78
+ """
79
+ path = self._get_schema_path(execution_id)
80
+ if os.path.exists(path):
81
+ os.remove(path)
82
+ self._cache.pop(execution_id, None)
83
+ return True
84
+ return False
85
+
86
+ def cleanup_all(self) -> None:
87
+ """
88
+ Remove all stored context schemas.
89
+
90
+ This method is useful for test cleanup to remove all stored data.
91
+ """
92
+ import shutil
93
+ if os.path.exists(self.storage_dir):
94
+ shutil.rmtree(self.storage_dir)
95
+ os.makedirs(self.storage_dir, exist_ok=True)
96
+ self._cache.clear()
@@ -0,0 +1,72 @@
1
+ """
2
+ Local file-based telemetry backend - dumb storage only.
3
+ """
4
+
5
+ import json
6
+ from pathlib import Path
7
+ from typing import Dict, Any
8
+ from ...types import EventTypes
9
+
10
+
11
+ class LocalTelemetryBackend:
12
+ """File-based telemetry storage backend"""
13
+
14
+ def __init__(self, storage_dir: str = "./orchestration_data/telemetry"):
15
+ """
16
+ Initialize local telemetry backend
17
+
18
+ Args:
19
+ storage_dir: Directory to store telemetry files
20
+ """
21
+ self.storage_dir = Path(storage_dir)
22
+ self.storage_dir.mkdir(parents=True, exist_ok=True)
23
+
24
+ def log_event(self, execution_id: str, event_type: str, **event_data) -> None:
25
+ """
26
+ Log an event for an execution ID
27
+
28
+ Args:
29
+ execution_id: Unique execution identifier
30
+ event_type: Type of event (use EventTypes constants)
31
+ **event_data: Additional event-specific data (caller provides timestamp)
32
+ """
33
+ event = {
34
+ "event_type": event_type,
35
+ **event_data,
36
+ }
37
+
38
+ telemetry_file = self.storage_dir / f"{execution_id}.jsonl"
39
+
40
+ with open(telemetry_file, "a") as f:
41
+ f.write(json.dumps(event) + "\n")
42
+
43
+ def get_events(self, execution_id: str) -> list[Dict[str, Any]]:
44
+ """
45
+ Get all events for execution ID
46
+
47
+ Args:
48
+ execution_id: Unique execution identifier
49
+
50
+ Returns:
51
+ List of event dictionaries, empty if not found
52
+ """
53
+ telemetry_file = self.storage_dir / f"{execution_id}.jsonl"
54
+
55
+ if not telemetry_file.exists():
56
+ return []
57
+
58
+ events = []
59
+ with open(telemetry_file, "r") as f:
60
+ for line in f:
61
+ line = line.strip()
62
+ if line:
63
+ try:
64
+ events.append(json.loads(line))
65
+ except json.JSONDecodeError:
66
+ continue
67
+ return events
68
+
69
+ def cleanup_all(self) -> None:
70
+ """Delete all telemetry files. Used for test cleanup."""
71
+ for telemetry_file in self.storage_dir.glob("*.jsonl"):
72
+ telemetry_file.unlink()
@@ -0,0 +1,56 @@
1
+ """Local file-based workflow backend - dumb storage only."""
2
+
3
+ import json
4
+ from pathlib import Path
5
+ from typing import Dict, Any
6
+
7
+
8
+ class LocalWorkflowBackend:
9
+ """File-based workflow storage backend."""
10
+
11
+ def __init__(self, storage_dir: str = "./orchestration_data/workflows"):
12
+ self.storage_dir = Path(storage_dir)
13
+ self.storage_dir.mkdir(parents=True, exist_ok=True)
14
+
15
+ def save_workflows_registry(
16
+ self, execution_id: str, workflows_registry: Dict[str, Any]
17
+ ) -> None:
18
+ """Save workflows registry for execution ID."""
19
+ workflows_file = self.storage_dir / f"{execution_id}_workflows.json"
20
+
21
+ with open(workflows_file, "w") as f:
22
+ json.dump(workflows_registry, f, indent=2, default=str)
23
+
24
+ def get_workflows_registry(self, execution_id: str) -> Dict[str, Any]:
25
+ """Get workflows registry for execution ID."""
26
+ workflows_file = self.storage_dir / f"{execution_id}_workflows.json"
27
+
28
+ if not workflows_file.exists():
29
+ return {}
30
+
31
+ with open(workflows_file, "r") as f:
32
+ return json.load(f)
33
+
34
+ def save_current_workflow_name(self, execution_id: str, workflow_name: str) -> None:
35
+ """Save current workflow name for execution ID."""
36
+ current_workflow_file = self.storage_dir / f"{execution_id}_current.txt"
37
+
38
+ with open(current_workflow_file, "w") as f:
39
+ f.write(workflow_name)
40
+
41
+ def get_current_workflow_name(self, execution_id: str) -> str:
42
+ """Get current workflow name for execution ID."""
43
+ current_workflow_file = self.storage_dir / f"{execution_id}_current.txt"
44
+
45
+ if not current_workflow_file.exists():
46
+ return ""
47
+
48
+ with open(current_workflow_file, "r") as f:
49
+ return f.read().strip()
50
+
51
+ def cleanup_all(self) -> None:
52
+ """Delete all workflow files. Used for test cleanup."""
53
+ for workflow_file in self.storage_dir.glob("*_workflows.json"):
54
+ workflow_file.unlink()
55
+ for current_file in self.storage_dir.glob("*_current.txt"):
56
+ current_file.unlink()
soe/nodes/__init__.py ADDED
@@ -0,0 +1,13 @@
1
+ """
2
+ Orchestration nodes - different types of workflow nodes
3
+ """
4
+
5
+ from .agent.types import AgentRequest, AgentResponse
6
+ from .tool.types import ToolNodeConfigurationError, ToolParameterError
7
+
8
+ __all__ = [
9
+ "AgentRequest",
10
+ "AgentResponse",
11
+ "ToolNodeConfigurationError",
12
+ "ToolParameterError",
13
+ ]
@@ -0,0 +1,10 @@
1
+ """
2
+ Agent node - AI reasoning with tool execution capabilities
3
+ """
4
+
5
+ from .types import AgentRequest, AgentResponse
6
+
7
+ __all__ = [
8
+ "AgentRequest",
9
+ "AgentResponse",
10
+ ]
@@ -0,0 +1,134 @@
1
+ """
2
+ Agent node factory
3
+
4
+ Orchestrates the multi-stage loop (Router -> Response/Parameter -> Execution)
5
+ using the AgentLoopState for internal tracking.
6
+ """
7
+
8
+ from typing import Dict, Any, Callable, List
9
+ from .state import get_operational_state, prepare_agent_context
10
+ from .lib.loop_state import AgentLoopState
11
+ from .lib.tools import load_tools_and_build_signatures
12
+ from .lib.loop_handlers import handle_finish_action, handle_tool_call_action
13
+ from .stages import execute_router_stage
14
+ from ..lib.signal_emission import emit_completion_signals, handle_llm_failure
15
+ from ...types import BroadcastSignalsCaller, AgentNodeCaller, EventTypes
16
+ from ...lib.register_event import register_event
17
+ from ..lib.context import save_output_to_context
18
+ from ...validation.operational import validate_operational
19
+ from .validation import validate_node_config
20
+
21
+
22
+ def create_agent_node_caller(
23
+ backends,
24
+ tools: List[Dict[str, Any]],
25
+ call_llm: Callable,
26
+ broadcast_signals_caller: BroadcastSignalsCaller,
27
+ ) -> AgentNodeCaller:
28
+ """
29
+ Create agent node caller with pre-loaded dependencies.
30
+
31
+ Args:
32
+ backends: Backend services
33
+ tools: List of tool configs, each with {"function": callable, "max_retries": int, "failure_signal": str}
34
+ call_llm: LLM caller function
35
+ broadcast_signals_caller: Signal broadcaster
36
+ """
37
+ tools_registry: Dict[str, Dict[str, Any]] = {
38
+ tool_config["function"].__name__: tool_config for tool_config in tools
39
+ }
40
+
41
+ def execute_agent_node(execution_id: str, node_config: Dict[str, Any]) -> None:
42
+ validate_operational(execution_id, backends)
43
+ validate_node_config(node_config)
44
+
45
+ operational_state = get_operational_state(execution_id, node_config, backends)
46
+
47
+ register_event(backends, execution_id, EventTypes.LLM_CALL, {"stage": "router"})
48
+
49
+ loop_state = AgentLoopState.create(
50
+ history_key=operational_state.history_key,
51
+ backends=backends,
52
+ max_retries=operational_state.max_retries
53
+ )
54
+
55
+ agent_context = prepare_agent_context(execution_id, node_config, backends, loop_state.tool_responses)
56
+ tools_signature = load_tools_and_build_signatures(
57
+ agent_context.tool_names, tools_registry, execution_id, backends
58
+ )
59
+
60
+ register_event(
61
+ backends=backends,
62
+ execution_id=execution_id,
63
+ event_type=EventTypes.AGENT_TOOLS_LOADED,
64
+ data={
65
+ "node_name": node_config.get("name", "unknown"),
66
+ "agent_tools": agent_context.tool_names,
67
+ "registry_tools": list(tools_registry.keys()),
68
+ }
69
+ )
70
+
71
+ while loop_state.can_retry():
72
+ router_response = execute_router_stage(
73
+ call_llm=call_llm,
74
+ agent_context=agent_context,
75
+ loop_state=loop_state,
76
+ tools_signature=tools_signature,
77
+ config=node_config,
78
+ max_retries=operational_state.max_retries,
79
+ )
80
+
81
+ if router_response.action == "finish":
82
+ final_response = handle_finish_action(
83
+ call_llm=call_llm,
84
+ agent_context=agent_context,
85
+ loop_state=loop_state,
86
+ node_config=node_config,
87
+ backends=backends,
88
+ operational_state=operational_state,
89
+ )
90
+
91
+ save_output_to_context(execution_id, operational_state.output_field, final_response.output, backends)
92
+
93
+ if operational_state.output_field:
94
+ if operational_state.output_field in operational_state.context:
95
+ operational_state.context[operational_state.output_field].append(final_response.output)
96
+ else:
97
+ operational_state.context[operational_state.output_field] = [final_response.output]
98
+
99
+ emit_completion_signals(
100
+ selected_signal=final_response.selected_signal,
101
+ node_config=node_config,
102
+ operational_state=operational_state,
103
+ broadcast_signals_caller=broadcast_signals_caller,
104
+ execution_id=execution_id,
105
+ )
106
+ return
107
+
108
+ elif router_response.action == "call_tool":
109
+ handle_tool_call_action(
110
+ call_llm=call_llm,
111
+ tool_name=router_response.tool_name,
112
+ tools_registry=tools_registry,
113
+ agent_context=agent_context,
114
+ loop_state=loop_state,
115
+ node_config=node_config,
116
+ operational_state=operational_state,
117
+ backends=backends,
118
+ execution_id=execution_id,
119
+ )
120
+
121
+ error_msg = f"Agent execution exceeded max retries ({loop_state.max_retries})."
122
+ if loop_state.errors:
123
+ error_msg += f" Last error: {loop_state.errors[-1]}"
124
+
125
+ handle_llm_failure(
126
+ failure_signal=operational_state.llm_failure_signal,
127
+ error_message=error_msg,
128
+ node_type="agent",
129
+ execution_id=execution_id,
130
+ backends=backends,
131
+ broadcast_signals_caller=broadcast_signals_caller,
132
+ )
133
+
134
+ return execute_agent_node