soe-ai 0.1.0__py3-none-any.whl → 0.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- soe/broker.py +4 -5
- soe/builtin_tools/__init__.py +39 -0
- soe/builtin_tools/soe_add_signal.py +82 -0
- soe/builtin_tools/soe_call_tool.py +111 -0
- soe/builtin_tools/soe_copy_context.py +80 -0
- soe/builtin_tools/soe_explore_docs.py +290 -0
- soe/builtin_tools/soe_get_available_tools.py +42 -0
- soe/builtin_tools/soe_get_context.py +50 -0
- soe/builtin_tools/soe_get_workflows.py +63 -0
- soe/builtin_tools/soe_inject_node.py +86 -0
- soe/builtin_tools/soe_inject_workflow.py +105 -0
- soe/builtin_tools/soe_list_contexts.py +73 -0
- soe/builtin_tools/soe_remove_node.py +72 -0
- soe/builtin_tools/soe_remove_workflow.py +62 -0
- soe/builtin_tools/soe_update_context.py +54 -0
- soe/docs/_config.yml +10 -0
- soe/docs/advanced_patterns/guide_fanout_and_aggregations.md +318 -0
- soe/docs/advanced_patterns/guide_inheritance.md +435 -0
- soe/docs/advanced_patterns/hybrid_intelligence.md +237 -0
- soe/docs/advanced_patterns/index.md +49 -0
- soe/docs/advanced_patterns/operational.md +781 -0
- soe/docs/advanced_patterns/self_evolving_workflows.md +385 -0
- soe/docs/advanced_patterns/swarm_intelligence.md +211 -0
- soe/docs/builtins/context.md +164 -0
- soe/docs/builtins/explore_docs.md +135 -0
- soe/docs/builtins/tools.md +164 -0
- soe/docs/builtins/workflows.md +199 -0
- soe/docs/guide_00_getting_started.md +341 -0
- soe/docs/guide_01_tool.md +206 -0
- soe/docs/guide_02_llm.md +143 -0
- soe/docs/guide_03_router.md +146 -0
- soe/docs/guide_04_patterns.md +475 -0
- soe/docs/guide_05_agent.md +159 -0
- soe/docs/guide_06_schema.md +397 -0
- soe/docs/guide_07_identity.md +540 -0
- soe/docs/guide_08_child.md +612 -0
- soe/docs/guide_09_ecosystem.md +690 -0
- soe/docs/guide_10_infrastructure.md +427 -0
- soe/docs/guide_11_builtins.md +118 -0
- soe/docs/index.md +104 -0
- soe/docs/primitives/backends.md +281 -0
- soe/docs/primitives/context.md +256 -0
- soe/docs/primitives/node_reference.md +259 -0
- soe/docs/primitives/primitives.md +331 -0
- soe/docs/primitives/signals.md +865 -0
- soe/docs_index.py +1 -1
- soe/init.py +2 -2
- soe/lib/__init__.py +0 -0
- soe/lib/child_context.py +46 -0
- soe/lib/context_fields.py +51 -0
- soe/lib/inheritance.py +172 -0
- soe/lib/jinja_render.py +113 -0
- soe/lib/operational.py +51 -0
- soe/lib/parent_sync.py +71 -0
- soe/lib/register_event.py +75 -0
- soe/lib/schema_validation.py +134 -0
- soe/lib/yaml_parser.py +14 -0
- soe/local_backends/__init__.py +18 -0
- soe/local_backends/factory.py +124 -0
- soe/local_backends/in_memory/context.py +38 -0
- soe/local_backends/in_memory/conversation_history.py +60 -0
- soe/local_backends/in_memory/identity.py +52 -0
- soe/local_backends/in_memory/schema.py +40 -0
- soe/local_backends/in_memory/telemetry.py +38 -0
- soe/local_backends/in_memory/workflow.py +33 -0
- soe/local_backends/storage/context.py +57 -0
- soe/local_backends/storage/conversation_history.py +82 -0
- soe/local_backends/storage/identity.py +118 -0
- soe/local_backends/storage/schema.py +96 -0
- soe/local_backends/storage/telemetry.py +72 -0
- soe/local_backends/storage/workflow.py +56 -0
- soe/nodes/__init__.py +13 -0
- soe/nodes/agent/__init__.py +10 -0
- soe/nodes/agent/factory.py +134 -0
- soe/nodes/agent/lib/loop_handlers.py +150 -0
- soe/nodes/agent/lib/loop_state.py +157 -0
- soe/nodes/agent/lib/prompts.py +65 -0
- soe/nodes/agent/lib/tools.py +35 -0
- soe/nodes/agent/stages/__init__.py +12 -0
- soe/nodes/agent/stages/parameter.py +37 -0
- soe/nodes/agent/stages/response.py +54 -0
- soe/nodes/agent/stages/router.py +37 -0
- soe/nodes/agent/state.py +111 -0
- soe/nodes/agent/types.py +66 -0
- soe/nodes/agent/validation/__init__.py +11 -0
- soe/nodes/agent/validation/config.py +95 -0
- soe/nodes/agent/validation/operational.py +24 -0
- soe/nodes/child/__init__.py +3 -0
- soe/nodes/child/factory.py +61 -0
- soe/nodes/child/state.py +59 -0
- soe/nodes/child/validation/__init__.py +11 -0
- soe/nodes/child/validation/config.py +126 -0
- soe/nodes/child/validation/operational.py +28 -0
- soe/nodes/lib/conditions.py +71 -0
- soe/nodes/lib/context.py +24 -0
- soe/nodes/lib/conversation_history.py +77 -0
- soe/nodes/lib/identity.py +64 -0
- soe/nodes/lib/llm_resolver.py +142 -0
- soe/nodes/lib/output.py +68 -0
- soe/nodes/lib/response_builder.py +91 -0
- soe/nodes/lib/signal_emission.py +79 -0
- soe/nodes/lib/signals.py +54 -0
- soe/nodes/lib/tools.py +100 -0
- soe/nodes/llm/__init__.py +7 -0
- soe/nodes/llm/factory.py +103 -0
- soe/nodes/llm/state.py +76 -0
- soe/nodes/llm/types.py +12 -0
- soe/nodes/llm/validation/__init__.py +11 -0
- soe/nodes/llm/validation/config.py +89 -0
- soe/nodes/llm/validation/operational.py +23 -0
- soe/nodes/router/__init__.py +3 -0
- soe/nodes/router/factory.py +37 -0
- soe/nodes/router/state.py +32 -0
- soe/nodes/router/validation/__init__.py +11 -0
- soe/nodes/router/validation/config.py +58 -0
- soe/nodes/router/validation/operational.py +16 -0
- soe/nodes/tool/factory.py +66 -0
- soe/nodes/tool/lib/__init__.py +11 -0
- soe/nodes/tool/lib/conditions.py +35 -0
- soe/nodes/tool/lib/failure.py +28 -0
- soe/nodes/tool/lib/parameters.py +67 -0
- soe/nodes/tool/state.py +66 -0
- soe/nodes/tool/types.py +27 -0
- soe/nodes/tool/validation/__init__.py +15 -0
- soe/nodes/tool/validation/config.py +132 -0
- soe/nodes/tool/validation/operational.py +16 -0
- soe/types.py +40 -28
- soe/validation/__init__.py +18 -0
- soe/validation/config.py +195 -0
- soe/validation/jinja.py +54 -0
- soe/validation/operational.py +110 -0
- {soe_ai-0.1.0.dist-info → soe_ai-0.1.2.dist-info}/METADATA +72 -9
- soe_ai-0.1.2.dist-info/RECORD +137 -0
- {soe_ai-0.1.0.dist-info → soe_ai-0.1.2.dist-info}/WHEEL +1 -1
- soe/validation.py +0 -8
- soe_ai-0.1.0.dist-info/RECORD +0 -11
- {soe_ai-0.1.0.dist-info → soe_ai-0.1.2.dist-info}/licenses/LICENSE +0 -0
- {soe_ai-0.1.0.dist-info → soe_ai-0.1.2.dist-info}/top_level.txt +0 -0
soe/nodes/agent/types.py
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Agent node models and data structures
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from typing import Any, Dict, List, Literal, Optional
|
|
6
|
+
from pydantic import BaseModel, Field
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class AgentRequest(BaseModel):
|
|
10
|
+
"""Request sent to agent - includes everything needed to build a complete prompt"""
|
|
11
|
+
|
|
12
|
+
agent_id: str
|
|
13
|
+
prompt: str
|
|
14
|
+
agent_config: Dict[str, Any]
|
|
15
|
+
tool_responses: Dict[str, Any] = {}
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class AgentResponse(BaseModel):
|
|
19
|
+
"""Validated agent response with mutually exclusive types:
|
|
20
|
+
- Tool response: Only tool_calls + minimal context
|
|
21
|
+
- Signal response: Only emitted_signals + context (THE END)
|
|
22
|
+
- Clarification response: Only needs_clarification + message
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
output: Any = None
|
|
26
|
+
|
|
27
|
+
tool_calls: Dict[str, Any] = {}
|
|
28
|
+
emitted_signals: List[str] = []
|
|
29
|
+
needs_clarification: bool = False
|
|
30
|
+
clarification_message: str = ""
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class RouterInput(BaseModel):
|
|
34
|
+
"""Input model for the Router stage prompt."""
|
|
35
|
+
instructions: str = Field(description="State-specific instructions for the router")
|
|
36
|
+
task_description: str
|
|
37
|
+
context: str
|
|
38
|
+
available_tools: str
|
|
39
|
+
conversation_history: str = ""
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class RouterResponse(BaseModel):
|
|
43
|
+
"""Output model for the Router stage."""
|
|
44
|
+
action: Literal["call_tool", "finish"]
|
|
45
|
+
tool_name: Optional[str] = Field(None, description="Name of the tool to call. Required if action is 'call_tool'.")
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class ParameterInput(BaseModel):
|
|
49
|
+
"""Input for Parameter stage prompt."""
|
|
50
|
+
task_description: str
|
|
51
|
+
context: str
|
|
52
|
+
tool_name: str
|
|
53
|
+
conversation_history: str = ""
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class ResponseStageInput(BaseModel):
|
|
57
|
+
"""Input model for the Response stage prompt."""
|
|
58
|
+
task_description: str
|
|
59
|
+
context: str
|
|
60
|
+
conversation_history: str = ""
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
class FinalResponse(BaseModel):
|
|
64
|
+
"""Standardized output from the Response stage."""
|
|
65
|
+
output: Any
|
|
66
|
+
selected_signal: Optional[str] = None
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Agent node validation.
|
|
3
|
+
|
|
4
|
+
- config.py: Config validation at orchestration start
|
|
5
|
+
- operational.py: Runtime validation before execution (fail-fast)
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from .config import validate_node_config
|
|
9
|
+
from .operational import validate_agent_node_runtime
|
|
10
|
+
|
|
11
|
+
__all__ = ["validate_node_config", "validate_agent_node_runtime"]
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Agent node configuration validation.
|
|
3
|
+
|
|
4
|
+
Called once at orchestration start, not during node execution.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from typing import Dict, Any, List
|
|
8
|
+
from ....types import WorkflowValidationError
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def validate_node_config(node_config: Dict[str, Any]) -> None:
|
|
12
|
+
"""
|
|
13
|
+
Validate agent node configuration exhaustively.
|
|
14
|
+
Called once at orchestration start, not during node execution.
|
|
15
|
+
|
|
16
|
+
Raises:
|
|
17
|
+
WorkflowValidationError: If configuration is invalid
|
|
18
|
+
"""
|
|
19
|
+
event_triggers = node_config.get("event_triggers")
|
|
20
|
+
if not event_triggers:
|
|
21
|
+
raise WorkflowValidationError(
|
|
22
|
+
"'event_triggers' is required - specify which signals activate this agent"
|
|
23
|
+
)
|
|
24
|
+
if not isinstance(event_triggers, list):
|
|
25
|
+
raise WorkflowValidationError(
|
|
26
|
+
"'event_triggers' must be a list, e.g., [\"START\", \"RETRY\"]"
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
if not node_config.get("prompt"):
|
|
30
|
+
raise WorkflowValidationError(
|
|
31
|
+
"'prompt' is required - provide the agent's task description or instructions"
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
if node_config.get("input_fields") is not None:
|
|
35
|
+
raise WorkflowValidationError(
|
|
36
|
+
"'input_fields' is no longer supported for Agent nodes. "
|
|
37
|
+
"Use Jinja syntax in prompts instead: {{ context.field_name }}"
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
output_field = node_config.get("output_field")
|
|
41
|
+
if output_field is not None:
|
|
42
|
+
if not isinstance(output_field, str):
|
|
43
|
+
raise WorkflowValidationError(
|
|
44
|
+
"'output_field' must be a string - the context field name to store the agent's output"
|
|
45
|
+
)
|
|
46
|
+
if output_field == "__operational__":
|
|
47
|
+
raise WorkflowValidationError(
|
|
48
|
+
"'output_field' cannot be '__operational__' - this is a reserved system field"
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
retries = node_config.get("retries")
|
|
52
|
+
if retries is not None:
|
|
53
|
+
if not isinstance(retries, int) or retries < 0:
|
|
54
|
+
raise WorkflowValidationError(
|
|
55
|
+
"'retries' must be a positive integer (default is 3)"
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
event_emissions = node_config.get("event_emissions")
|
|
59
|
+
if event_emissions is not None:
|
|
60
|
+
if not isinstance(event_emissions, list):
|
|
61
|
+
raise WorkflowValidationError(
|
|
62
|
+
"'event_emissions' must be a list of signal definitions"
|
|
63
|
+
)
|
|
64
|
+
for i, emission in enumerate(event_emissions):
|
|
65
|
+
if not isinstance(emission, dict):
|
|
66
|
+
raise WorkflowValidationError(
|
|
67
|
+
f"Each event_emission must be an object with 'signal_name', got invalid item at position {i + 1}"
|
|
68
|
+
)
|
|
69
|
+
if not emission.get("signal_name"):
|
|
70
|
+
raise WorkflowValidationError(
|
|
71
|
+
f"Event emission at position {i + 1} is missing 'signal_name'"
|
|
72
|
+
)
|
|
73
|
+
condition = emission.get("condition")
|
|
74
|
+
if condition is not None and not isinstance(condition, str):
|
|
75
|
+
raise WorkflowValidationError(
|
|
76
|
+
f"Event emission at position {i + 1} has invalid 'condition' - must be a jinja string"
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
tools = node_config.get("tools")
|
|
80
|
+
if tools is not None and not isinstance(tools, list):
|
|
81
|
+
raise WorkflowValidationError(
|
|
82
|
+
"'tools' must be a list of tool names available to the agent"
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
identity = node_config.get("identity")
|
|
86
|
+
if identity is not None and not isinstance(identity, str):
|
|
87
|
+
raise WorkflowValidationError(
|
|
88
|
+
"'identity' must be a string - used to persist conversation history across executions"
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
llm_failure_signal = node_config.get("llm_failure_signal")
|
|
92
|
+
if llm_failure_signal is not None and not isinstance(llm_failure_signal, str):
|
|
93
|
+
raise WorkflowValidationError(
|
|
94
|
+
"'llm_failure_signal' must be a string - the signal to emit when LLM call fails"
|
|
95
|
+
)
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
"""Agent node operational validation.
|
|
2
|
+
|
|
3
|
+
Calls shared operational validation + Agent-specific backend validation.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from typing import Dict, Any
|
|
7
|
+
from ....types import Backends
|
|
8
|
+
from ....validation.operational import validate_operational, OperationalValidationError
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def validate_agent_node_runtime(
|
|
12
|
+
execution_id: str,
|
|
13
|
+
backends: Backends,
|
|
14
|
+
) -> Dict[str, Any]:
|
|
15
|
+
"""Validate runtime state for Agent node."""
|
|
16
|
+
context = validate_operational(execution_id, backends)
|
|
17
|
+
|
|
18
|
+
try:
|
|
19
|
+
backends.workflow.get_current_workflow_name(execution_id)
|
|
20
|
+
backends.workflow.get_workflows_registry(execution_id)
|
|
21
|
+
except Exception as e:
|
|
22
|
+
raise OperationalValidationError(f"Cannot access workflow backend: {e}")
|
|
23
|
+
|
|
24
|
+
return context
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Child node factory
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import copy
|
|
6
|
+
import time
|
|
7
|
+
from typing import Dict, Any
|
|
8
|
+
|
|
9
|
+
from ...validation.operational import validate_operational
|
|
10
|
+
from .validation import validate_node_config
|
|
11
|
+
from .state import get_operational_state
|
|
12
|
+
from ...lib.register_event import register_event
|
|
13
|
+
from ...types import ChildNodeCaller, OrchestrateCaller, EventTypes
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def create_child_node_caller(
|
|
17
|
+
backends,
|
|
18
|
+
orchestrate_caller: OrchestrateCaller,
|
|
19
|
+
) -> ChildNodeCaller:
|
|
20
|
+
"""Create child node caller with pre-loaded dependencies."""
|
|
21
|
+
|
|
22
|
+
def execute_child_node(id: str, node_config: Dict[str, Any]) -> None:
|
|
23
|
+
validate_operational(id, backends)
|
|
24
|
+
validate_node_config(node_config)
|
|
25
|
+
|
|
26
|
+
state = get_operational_state(id, node_config, backends)
|
|
27
|
+
|
|
28
|
+
register_event(
|
|
29
|
+
backends, id, EventTypes.NODE_EXECUTION,
|
|
30
|
+
{
|
|
31
|
+
"node_type": "child",
|
|
32
|
+
"child_workflow": state.child_workflow_name,
|
|
33
|
+
"parent_id": id,
|
|
34
|
+
}
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
if state.fan_out_items and state.child_input_field:
|
|
38
|
+
for i, item in enumerate(state.fan_out_items):
|
|
39
|
+
child_context = copy.deepcopy(state.child_initial_context)
|
|
40
|
+
child_context[state.child_input_field] = item
|
|
41
|
+
|
|
42
|
+
if i > 0 and state.spawn_interval > 0:
|
|
43
|
+
time.sleep(state.spawn_interval)
|
|
44
|
+
|
|
45
|
+
orchestrate_caller(
|
|
46
|
+
config=state.workflows_registry,
|
|
47
|
+
initial_workflow_name=state.child_workflow_name,
|
|
48
|
+
initial_signals=state.child_initial_signals,
|
|
49
|
+
initial_context=child_context,
|
|
50
|
+
backends=backends,
|
|
51
|
+
)
|
|
52
|
+
else:
|
|
53
|
+
orchestrate_caller(
|
|
54
|
+
config=state.workflows_registry,
|
|
55
|
+
initial_workflow_name=state.child_workflow_name,
|
|
56
|
+
initial_signals=state.child_initial_signals,
|
|
57
|
+
initial_context=state.child_initial_context,
|
|
58
|
+
backends=backends,
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
return execute_child_node
|
soe/nodes/child/state.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
"""Child node state retrieval."""
|
|
2
|
+
|
|
3
|
+
import copy
|
|
4
|
+
from typing import Any, Dict, List, Optional
|
|
5
|
+
from pydantic import BaseModel, ConfigDict, Field
|
|
6
|
+
|
|
7
|
+
from ...types import Backends
|
|
8
|
+
from ...lib.context_fields import get_accumulated
|
|
9
|
+
from ...lib.child_context import prepare_child_context
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class ChildOperationalState(BaseModel):
|
|
13
|
+
"""All data needed for child node execution."""
|
|
14
|
+
model_config = ConfigDict(arbitrary_types_allowed=True)
|
|
15
|
+
|
|
16
|
+
context: Dict[str, Any]
|
|
17
|
+
main_execution_id: str
|
|
18
|
+
child_workflow_name: str
|
|
19
|
+
child_initial_signals: List[str]
|
|
20
|
+
child_initial_context: Dict[str, Any]
|
|
21
|
+
workflows_registry: Dict[str, Any]
|
|
22
|
+
fan_out_items: List[Any] = Field(default_factory=list)
|
|
23
|
+
child_input_field: Optional[str] = None
|
|
24
|
+
spawn_interval: float = 0.0
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def get_operational_state(
|
|
28
|
+
execution_id: str,
|
|
29
|
+
node_config: Dict[str, Any],
|
|
30
|
+
backends: Backends,
|
|
31
|
+
) -> ChildOperationalState:
|
|
32
|
+
"""Retrieve all state needed for child node execution."""
|
|
33
|
+
context = backends.context.get_context(execution_id)
|
|
34
|
+
operational = context["__operational__"]
|
|
35
|
+
main_execution_id = operational["main_execution_id"]
|
|
36
|
+
|
|
37
|
+
child_initial_context = prepare_child_context(
|
|
38
|
+
parent_context=context,
|
|
39
|
+
node_config=node_config,
|
|
40
|
+
parent_execution_id=execution_id,
|
|
41
|
+
main_execution_id=main_execution_id,
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
workflows_registry = copy.deepcopy(backends.workflow.get_workflows_registry(execution_id))
|
|
45
|
+
|
|
46
|
+
fan_out_field = node_config.get("fan_out_field")
|
|
47
|
+
fan_out_items = get_accumulated(context, fan_out_field) if fan_out_field else []
|
|
48
|
+
|
|
49
|
+
return ChildOperationalState(
|
|
50
|
+
context=context,
|
|
51
|
+
main_execution_id=main_execution_id,
|
|
52
|
+
child_workflow_name=node_config["child_workflow_name"],
|
|
53
|
+
child_initial_signals=node_config["child_initial_signals"],
|
|
54
|
+
child_initial_context=child_initial_context,
|
|
55
|
+
workflows_registry=workflows_registry,
|
|
56
|
+
fan_out_items=fan_out_items,
|
|
57
|
+
child_input_field=node_config.get("child_input_field"),
|
|
58
|
+
spawn_interval=node_config.get("spawn_interval", 0.0),
|
|
59
|
+
)
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Child node validation.
|
|
3
|
+
|
|
4
|
+
- config.py: Config validation at orchestration start
|
|
5
|
+
- operational.py: Runtime validation before execution (fail-fast)
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from .config import validate_node_config
|
|
9
|
+
from .operational import validate_child_node_runtime
|
|
10
|
+
|
|
11
|
+
__all__ = ["validate_node_config", "validate_child_node_runtime"]
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Child node configuration validation.
|
|
3
|
+
|
|
4
|
+
Called once at orchestration start, not during node execution.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from typing import Dict, Any
|
|
8
|
+
from ....types import WorkflowValidationError
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def validate_node_config(node_config: Dict[str, Any]) -> None:
|
|
12
|
+
"""
|
|
13
|
+
Validate sub-orchestration node configuration exhaustively.
|
|
14
|
+
Called once at orchestration start, not during node execution.
|
|
15
|
+
|
|
16
|
+
Raises:
|
|
17
|
+
WorkflowValidationError: If configuration is invalid
|
|
18
|
+
"""
|
|
19
|
+
child_workflow_name = node_config.get("child_workflow_name")
|
|
20
|
+
if not child_workflow_name:
|
|
21
|
+
raise WorkflowValidationError(
|
|
22
|
+
"'child_workflow_name' is required - specify which workflow to start as a child"
|
|
23
|
+
)
|
|
24
|
+
if not isinstance(child_workflow_name, str):
|
|
25
|
+
raise WorkflowValidationError(
|
|
26
|
+
"'child_workflow_name' must be a string"
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
child_initial_signals = node_config.get("child_initial_signals")
|
|
30
|
+
if not child_initial_signals:
|
|
31
|
+
raise WorkflowValidationError(
|
|
32
|
+
"'child_initial_signals' is required - specify which signals to start the child workflow with"
|
|
33
|
+
)
|
|
34
|
+
if not isinstance(child_initial_signals, list):
|
|
35
|
+
raise WorkflowValidationError(
|
|
36
|
+
"'child_initial_signals' must be a list of signal names"
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
event_triggers = node_config.get("event_triggers")
|
|
40
|
+
if not event_triggers:
|
|
41
|
+
raise WorkflowValidationError(
|
|
42
|
+
"'event_triggers' is required - specify which signals trigger the start of the child workflow"
|
|
43
|
+
)
|
|
44
|
+
if not isinstance(event_triggers, list):
|
|
45
|
+
raise WorkflowValidationError(
|
|
46
|
+
"'event_triggers' must be a list of signal names, e.g., [\"START_CHILD\"]"
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
signals_to_parent = node_config.get("signals_to_parent")
|
|
50
|
+
if signals_to_parent is not None:
|
|
51
|
+
if not isinstance(signals_to_parent, list):
|
|
52
|
+
raise WorkflowValidationError(
|
|
53
|
+
"'signals_to_parent' must be a list of signal names that should propagate from child to parent"
|
|
54
|
+
)
|
|
55
|
+
for signal in signals_to_parent:
|
|
56
|
+
if not isinstance(signal, str):
|
|
57
|
+
raise WorkflowValidationError(
|
|
58
|
+
f"All items in 'signals_to_parent' must be strings. Found: {type(signal).__name__}"
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
context_updates_to_parent = node_config.get("context_updates_to_parent")
|
|
62
|
+
if context_updates_to_parent is not None:
|
|
63
|
+
if not isinstance(context_updates_to_parent, list):
|
|
64
|
+
raise WorkflowValidationError(
|
|
65
|
+
"'context_updates_to_parent' must be a list of context key names that should propagate from child to parent"
|
|
66
|
+
)
|
|
67
|
+
for key in context_updates_to_parent:
|
|
68
|
+
if not isinstance(key, str):
|
|
69
|
+
raise WorkflowValidationError(
|
|
70
|
+
f"All items in 'context_updates_to_parent' must be strings. Found: {type(key).__name__}"
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
input_fields = node_config.get("input_fields")
|
|
74
|
+
if input_fields is not None and not isinstance(input_fields, list):
|
|
75
|
+
raise WorkflowValidationError(
|
|
76
|
+
"'input_fields' must be a list of context field names to pass to the child workflow"
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
fan_out_field = node_config.get("fan_out_field")
|
|
80
|
+
if fan_out_field is not None:
|
|
81
|
+
if not isinstance(fan_out_field, str):
|
|
82
|
+
raise WorkflowValidationError(
|
|
83
|
+
"'fan_out_field' must be a string (the context field to iterate over)"
|
|
84
|
+
)
|
|
85
|
+
child_input_field = node_config.get("child_input_field")
|
|
86
|
+
if not child_input_field:
|
|
87
|
+
raise WorkflowValidationError(
|
|
88
|
+
"'child_input_field' is required when 'fan_out_field' is set - "
|
|
89
|
+
"specify which field in child context receives each item"
|
|
90
|
+
)
|
|
91
|
+
if not isinstance(child_input_field, str):
|
|
92
|
+
raise WorkflowValidationError(
|
|
93
|
+
"'child_input_field' must be a string"
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
spawn_interval = node_config.get("spawn_interval")
|
|
97
|
+
if spawn_interval is not None:
|
|
98
|
+
if not isinstance(spawn_interval, (int, float)):
|
|
99
|
+
raise WorkflowValidationError(
|
|
100
|
+
"'spawn_interval' must be a number (seconds to sleep between spawns)"
|
|
101
|
+
)
|
|
102
|
+
if spawn_interval < 0:
|
|
103
|
+
raise WorkflowValidationError(
|
|
104
|
+
"'spawn_interval' must be non-negative"
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
event_emissions = node_config.get("event_emissions")
|
|
108
|
+
if event_emissions is not None:
|
|
109
|
+
if not isinstance(event_emissions, list):
|
|
110
|
+
raise WorkflowValidationError(
|
|
111
|
+
"'event_emissions' must be a list of signal definitions"
|
|
112
|
+
)
|
|
113
|
+
for i, emission in enumerate(event_emissions):
|
|
114
|
+
if not isinstance(emission, dict):
|
|
115
|
+
raise WorkflowValidationError(
|
|
116
|
+
f"Each event_emission must be an object with 'signal_name', got invalid item at position {i + 1}"
|
|
117
|
+
)
|
|
118
|
+
if not emission.get("signal_name"):
|
|
119
|
+
raise WorkflowValidationError(
|
|
120
|
+
f"Event emission at position {i + 1} is missing 'signal_name'"
|
|
121
|
+
)
|
|
122
|
+
condition = emission.get("condition")
|
|
123
|
+
if condition is not None and not isinstance(condition, str):
|
|
124
|
+
raise WorkflowValidationError(
|
|
125
|
+
f"Event emission at position {i + 1} has invalid 'condition' - must be a jinja string"
|
|
126
|
+
)
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
"""Child node operational validation.
|
|
2
|
+
|
|
3
|
+
Calls shared operational validation + Child-specific backend validation.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from typing import Dict, Any
|
|
7
|
+
from ....types import Backends
|
|
8
|
+
from ....validation.operational import validate_operational, OperationalValidationError
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def validate_child_node_runtime(
|
|
12
|
+
execution_id: str,
|
|
13
|
+
backends: Backends,
|
|
14
|
+
) -> Dict[str, Any]:
|
|
15
|
+
"""Validate runtime state for Child node."""
|
|
16
|
+
context = validate_operational(execution_id, backends)
|
|
17
|
+
|
|
18
|
+
try:
|
|
19
|
+
workflows_registry = backends.workflow.get_workflows_registry(execution_id)
|
|
20
|
+
except Exception as e:
|
|
21
|
+
raise OperationalValidationError(f"Cannot access workflow backend: {e}")
|
|
22
|
+
|
|
23
|
+
if not workflows_registry:
|
|
24
|
+
raise OperationalValidationError(
|
|
25
|
+
f"No workflows_registry found for execution_id '{execution_id}'"
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
return context
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Shared condition evaluation for nodes that emit signals.
|
|
3
|
+
Used by Router, LLM, Agent, and Tool nodes.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import re
|
|
7
|
+
from typing import Dict, List, Any
|
|
8
|
+
from jinja2 import Environment
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def _create_accumulated_filter(full_context: Dict[str, Any]):
|
|
12
|
+
"""Create an accumulated filter that returns full history for a field."""
|
|
13
|
+
def accumulated_filter(value):
|
|
14
|
+
"""
|
|
15
|
+
Return the full accumulated history list for a context field.
|
|
16
|
+
|
|
17
|
+
If history has exactly one entry and it's a list, returns that list
|
|
18
|
+
(common case: initial context passed a list as value).
|
|
19
|
+
Otherwise returns the history entries.
|
|
20
|
+
"""
|
|
21
|
+
for key, hist_list in full_context.items():
|
|
22
|
+
if key.startswith("__"):
|
|
23
|
+
continue
|
|
24
|
+
if isinstance(hist_list, list) and hist_list and hist_list[-1] == value:
|
|
25
|
+
if len(hist_list) == 1 and isinstance(hist_list[0], list):
|
|
26
|
+
return hist_list[0]
|
|
27
|
+
return hist_list
|
|
28
|
+
return [value] if value is not None else []
|
|
29
|
+
|
|
30
|
+
return accumulated_filter
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def evaluate_conditions(
|
|
34
|
+
event_emissions: List[Dict[str, Any]],
|
|
35
|
+
render_context: Dict[str, Any],
|
|
36
|
+
full_context: Dict[str, Any] = None,
|
|
37
|
+
) -> List[str]:
|
|
38
|
+
"""
|
|
39
|
+
Evaluate jinja conditions and return signals that pass.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
event_emissions: List of emission configs with signal_name and optional condition
|
|
43
|
+
render_context: Variables for jinja (e.g., {"context": ctx} or {"result": res, "context": ctx})
|
|
44
|
+
full_context: The raw context with history lists (for accumulated filter)
|
|
45
|
+
|
|
46
|
+
Returns:
|
|
47
|
+
List of signal names that passed their conditions (or had no condition)
|
|
48
|
+
"""
|
|
49
|
+
jinja_env = Environment()
|
|
50
|
+
|
|
51
|
+
if full_context:
|
|
52
|
+
jinja_env.filters["accumulated"] = _create_accumulated_filter(full_context)
|
|
53
|
+
|
|
54
|
+
filtered_signals = []
|
|
55
|
+
|
|
56
|
+
for emission in event_emissions:
|
|
57
|
+
signal_name = emission.get("signal_name")
|
|
58
|
+
condition = emission.get("condition", "")
|
|
59
|
+
|
|
60
|
+
if not condition or not re.search(r"\{\{.*\}\}", condition):
|
|
61
|
+
filtered_signals.append(signal_name)
|
|
62
|
+
continue
|
|
63
|
+
|
|
64
|
+
try:
|
|
65
|
+
result = jinja_env.from_string(condition).render(**render_context)
|
|
66
|
+
if result and result.strip().lower() not in ["false", "0", "none", ""]:
|
|
67
|
+
filtered_signals.append(signal_name)
|
|
68
|
+
except Exception:
|
|
69
|
+
pass
|
|
70
|
+
|
|
71
|
+
return filtered_signals
|
soe/nodes/lib/context.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
"""Context output utilities shared across nodes."""
|
|
2
|
+
|
|
3
|
+
from typing import Any, Optional
|
|
4
|
+
from ...lib.parent_sync import sync_context_to_parent
|
|
5
|
+
from ...lib.context_fields import set_field, get_field
|
|
6
|
+
from ...types import Backends
|
|
7
|
+
|
|
8
|
+
__all__ = ["set_field", "get_field", "save_output_to_context"]
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def save_output_to_context(
|
|
12
|
+
execution_id: str,
|
|
13
|
+
output_field: Optional[str],
|
|
14
|
+
output_value: Any,
|
|
15
|
+
backends: Backends,
|
|
16
|
+
) -> None:
|
|
17
|
+
"""Save output value to context and sync to parent if configured."""
|
|
18
|
+
if not output_field or output_value is None:
|
|
19
|
+
return
|
|
20
|
+
|
|
21
|
+
context = backends.context.get_context(execution_id)
|
|
22
|
+
set_field(context, output_field, output_value)
|
|
23
|
+
backends.context.save_context(execution_id, context)
|
|
24
|
+
sync_context_to_parent(context, [output_field], backends)
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Shared conversation history utilities for LLM and Agent nodes.
|
|
3
|
+
|
|
4
|
+
This module handles conversation history retrieval, formatting, and saving
|
|
5
|
+
for nodes that support identity-based conversation persistence.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from typing import Dict, Any, List, Optional, Tuple
|
|
9
|
+
from ...types import Backends
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def get_conversation_history(
|
|
13
|
+
execution_id: str,
|
|
14
|
+
identity: Optional[str],
|
|
15
|
+
backends: Backends,
|
|
16
|
+
) -> Tuple[Optional[str], List[Dict[str, Any]]]:
|
|
17
|
+
"""
|
|
18
|
+
Get conversation history and history key for a node with identity.
|
|
19
|
+
|
|
20
|
+
Identity enables conversation history. The key is main_execution_id,
|
|
21
|
+
ensuring history persists across sub-orchestration boundaries.
|
|
22
|
+
|
|
23
|
+
When the history is empty and an identity backend is configured,
|
|
24
|
+
the identity's system prompt is injected as the first message.
|
|
25
|
+
Both identity and context_schema are keyed by main_execution_id.
|
|
26
|
+
|
|
27
|
+
Args:
|
|
28
|
+
execution_id: Current execution ID
|
|
29
|
+
identity: Identity key for conversation history
|
|
30
|
+
backends: Backend services
|
|
31
|
+
|
|
32
|
+
Returns:
|
|
33
|
+
Tuple of (history_key, conversation_history list)
|
|
34
|
+
history_key is None if no identity or no conversation backend
|
|
35
|
+
"""
|
|
36
|
+
if not identity or not backends.conversation_history:
|
|
37
|
+
return (None, [])
|
|
38
|
+
|
|
39
|
+
context = backends.context.get_context(execution_id)
|
|
40
|
+
main_id = context.get("__operational__", {}).get("main_execution_id", execution_id)
|
|
41
|
+
history = backends.conversation_history.get_conversation_history(main_id)
|
|
42
|
+
|
|
43
|
+
if not history and backends.identity:
|
|
44
|
+
identities = backends.identity.get_identities(main_id)
|
|
45
|
+
if identities and identity in identities:
|
|
46
|
+
system_prompt = identities[identity]
|
|
47
|
+
if system_prompt:
|
|
48
|
+
history = [{"role": "system", "content": system_prompt}]
|
|
49
|
+
backends.conversation_history.save_conversation_history(main_id, history)
|
|
50
|
+
|
|
51
|
+
return (main_id, history)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def format_conversation_history(conversation_history: List[Dict[str, Any]]) -> str:
|
|
55
|
+
"""Format conversation history as a string for prompts."""
|
|
56
|
+
if not conversation_history:
|
|
57
|
+
return ""
|
|
58
|
+
return "\n".join(
|
|
59
|
+
f"[{msg.get('role', 'unknown')}]: {msg.get('content', '')}"
|
|
60
|
+
for msg in conversation_history
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def save_conversation_turn(
|
|
65
|
+
history_key: Optional[str],
|
|
66
|
+
conversation_history: List[Dict[str, Any]],
|
|
67
|
+
user_content: str,
|
|
68
|
+
assistant_content: str,
|
|
69
|
+
backends: Backends,
|
|
70
|
+
) -> None:
|
|
71
|
+
"""Save a conversation turn (user + assistant) to history."""
|
|
72
|
+
if not history_key or not backends.conversation_history:
|
|
73
|
+
return
|
|
74
|
+
|
|
75
|
+
conversation_history.append({"role": "user", "content": user_content})
|
|
76
|
+
conversation_history.append({"role": "assistant", "content": str(assistant_content)})
|
|
77
|
+
backends.conversation_history.save_conversation_history(history_key, conversation_history)
|