soe-ai 0.2.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- soe/__init__.py +50 -0
- soe/broker.py +168 -0
- soe/builtin_tools/__init__.py +51 -0
- soe/builtin_tools/soe_add_signal.py +82 -0
- soe/builtin_tools/soe_call_tool.py +111 -0
- soe/builtin_tools/soe_copy_context.py +80 -0
- soe/builtin_tools/soe_explore_docs.py +290 -0
- soe/builtin_tools/soe_get_available_tools.py +42 -0
- soe/builtin_tools/soe_get_context.py +50 -0
- soe/builtin_tools/soe_get_context_schema.py +56 -0
- soe/builtin_tools/soe_get_identities.py +63 -0
- soe/builtin_tools/soe_get_workflows.py +63 -0
- soe/builtin_tools/soe_inject_context_schema_field.py +80 -0
- soe/builtin_tools/soe_inject_identity.py +64 -0
- soe/builtin_tools/soe_inject_node.py +86 -0
- soe/builtin_tools/soe_inject_workflow.py +105 -0
- soe/builtin_tools/soe_list_contexts.py +73 -0
- soe/builtin_tools/soe_remove_context_schema_field.py +61 -0
- soe/builtin_tools/soe_remove_identity.py +61 -0
- soe/builtin_tools/soe_remove_node.py +72 -0
- soe/builtin_tools/soe_remove_workflow.py +62 -0
- soe/builtin_tools/soe_update_context.py +54 -0
- soe/docs/_config.yml +10 -0
- soe/docs/advanced_patterns/guide_fanout_and_aggregations.md +318 -0
- soe/docs/advanced_patterns/guide_inheritance.md +435 -0
- soe/docs/advanced_patterns/hybrid_intelligence.md +237 -0
- soe/docs/advanced_patterns/index.md +49 -0
- soe/docs/advanced_patterns/operational.md +781 -0
- soe/docs/advanced_patterns/self_evolving_workflows.md +385 -0
- soe/docs/advanced_patterns/swarm_intelligence.md +211 -0
- soe/docs/builtins/context.md +164 -0
- soe/docs/builtins/context_schema.md +158 -0
- soe/docs/builtins/identity.md +139 -0
- soe/docs/builtins/soe_explore_docs.md +135 -0
- soe/docs/builtins/tools.md +164 -0
- soe/docs/builtins/workflows.md +199 -0
- soe/docs/guide_00_getting_started.md +341 -0
- soe/docs/guide_01_tool.md +206 -0
- soe/docs/guide_02_llm.md +143 -0
- soe/docs/guide_03_router.md +146 -0
- soe/docs/guide_04_patterns.md +475 -0
- soe/docs/guide_05_agent.md +159 -0
- soe/docs/guide_06_schema.md +397 -0
- soe/docs/guide_07_identity.md +540 -0
- soe/docs/guide_08_child.md +612 -0
- soe/docs/guide_09_ecosystem.md +690 -0
- soe/docs/guide_10_infrastructure.md +427 -0
- soe/docs/guide_11_builtins.md +126 -0
- soe/docs/index.md +104 -0
- soe/docs/primitives/backends.md +281 -0
- soe/docs/primitives/context.md +256 -0
- soe/docs/primitives/node_reference.md +259 -0
- soe/docs/primitives/primitives.md +331 -0
- soe/docs/primitives/signals.md +865 -0
- soe/docs_index.py +2 -0
- soe/init.py +165 -0
- soe/lib/__init__.py +0 -0
- soe/lib/child_context.py +46 -0
- soe/lib/context_fields.py +51 -0
- soe/lib/inheritance.py +172 -0
- soe/lib/jinja_render.py +113 -0
- soe/lib/operational.py +51 -0
- soe/lib/parent_sync.py +71 -0
- soe/lib/register_event.py +75 -0
- soe/lib/schema_validation.py +134 -0
- soe/lib/yaml_parser.py +14 -0
- soe/local_backends/__init__.py +18 -0
- soe/local_backends/factory.py +124 -0
- soe/local_backends/in_memory/context.py +38 -0
- soe/local_backends/in_memory/conversation_history.py +60 -0
- soe/local_backends/in_memory/identity.py +52 -0
- soe/local_backends/in_memory/schema.py +40 -0
- soe/local_backends/in_memory/telemetry.py +38 -0
- soe/local_backends/in_memory/workflow.py +33 -0
- soe/local_backends/storage/context.py +57 -0
- soe/local_backends/storage/conversation_history.py +82 -0
- soe/local_backends/storage/identity.py +118 -0
- soe/local_backends/storage/schema.py +96 -0
- soe/local_backends/storage/telemetry.py +72 -0
- soe/local_backends/storage/workflow.py +56 -0
- soe/nodes/__init__.py +13 -0
- soe/nodes/agent/__init__.py +10 -0
- soe/nodes/agent/factory.py +134 -0
- soe/nodes/agent/lib/loop_handlers.py +150 -0
- soe/nodes/agent/lib/loop_state.py +157 -0
- soe/nodes/agent/lib/prompts.py +65 -0
- soe/nodes/agent/lib/tools.py +35 -0
- soe/nodes/agent/stages/__init__.py +12 -0
- soe/nodes/agent/stages/parameter.py +37 -0
- soe/nodes/agent/stages/response.py +54 -0
- soe/nodes/agent/stages/router.py +37 -0
- soe/nodes/agent/state.py +111 -0
- soe/nodes/agent/types.py +66 -0
- soe/nodes/agent/validation/__init__.py +11 -0
- soe/nodes/agent/validation/config.py +95 -0
- soe/nodes/agent/validation/operational.py +24 -0
- soe/nodes/child/__init__.py +3 -0
- soe/nodes/child/factory.py +61 -0
- soe/nodes/child/state.py +59 -0
- soe/nodes/child/validation/__init__.py +11 -0
- soe/nodes/child/validation/config.py +126 -0
- soe/nodes/child/validation/operational.py +28 -0
- soe/nodes/lib/conditions.py +71 -0
- soe/nodes/lib/context.py +24 -0
- soe/nodes/lib/conversation_history.py +77 -0
- soe/nodes/lib/identity.py +64 -0
- soe/nodes/lib/llm_resolver.py +142 -0
- soe/nodes/lib/output.py +68 -0
- soe/nodes/lib/response_builder.py +91 -0
- soe/nodes/lib/signal_emission.py +79 -0
- soe/nodes/lib/signals.py +54 -0
- soe/nodes/lib/tools.py +100 -0
- soe/nodes/llm/__init__.py +7 -0
- soe/nodes/llm/factory.py +103 -0
- soe/nodes/llm/state.py +76 -0
- soe/nodes/llm/types.py +12 -0
- soe/nodes/llm/validation/__init__.py +11 -0
- soe/nodes/llm/validation/config.py +89 -0
- soe/nodes/llm/validation/operational.py +23 -0
- soe/nodes/router/__init__.py +3 -0
- soe/nodes/router/factory.py +37 -0
- soe/nodes/router/state.py +32 -0
- soe/nodes/router/validation/__init__.py +11 -0
- soe/nodes/router/validation/config.py +58 -0
- soe/nodes/router/validation/operational.py +16 -0
- soe/nodes/tool/factory.py +66 -0
- soe/nodes/tool/lib/__init__.py +11 -0
- soe/nodes/tool/lib/conditions.py +35 -0
- soe/nodes/tool/lib/failure.py +28 -0
- soe/nodes/tool/lib/parameters.py +67 -0
- soe/nodes/tool/state.py +66 -0
- soe/nodes/tool/types.py +27 -0
- soe/nodes/tool/validation/__init__.py +15 -0
- soe/nodes/tool/validation/config.py +132 -0
- soe/nodes/tool/validation/operational.py +16 -0
- soe/types.py +209 -0
- soe/validation/__init__.py +18 -0
- soe/validation/config.py +195 -0
- soe/validation/jinja.py +54 -0
- soe/validation/operational.py +110 -0
- soe_ai-0.2.0b1.dist-info/METADATA +262 -0
- soe_ai-0.2.0b1.dist-info/RECORD +145 -0
- soe_ai-0.2.0b1.dist-info/WHEEL +5 -0
- soe_ai-0.2.0b1.dist-info/licenses/LICENSE +21 -0
- soe_ai-0.2.0b1.dist-info/top_level.txt +1 -0
soe/init.py
ADDED
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Convenience initialization for SOE.
|
|
3
|
+
|
|
4
|
+
This module provides easy setup functions for common use cases.
|
|
5
|
+
Use these to quickly get started without manually wiring nodes and backends.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import copy
|
|
9
|
+
from typing import Callable, Dict, Any, Tuple, Optional, List, Union
|
|
10
|
+
from .broker import broadcast_signals, orchestrate
|
|
11
|
+
from .nodes.router.factory import create_router_node_caller
|
|
12
|
+
from .nodes.llm.factory import create_llm_node_caller
|
|
13
|
+
from .nodes.agent.factory import create_agent_node_caller
|
|
14
|
+
from .nodes.tool.factory import create_tool_node_caller
|
|
15
|
+
from .nodes.child.factory import create_child_node_caller
|
|
16
|
+
from .lib.yaml_parser import parse_yaml
|
|
17
|
+
from .types import CallLlm, Backends, NodeCaller, BroadcastSignalsCaller
|
|
18
|
+
from .local_backends import create_in_memory_backends, create_local_backends
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def create_all_nodes(
|
|
22
|
+
backends: Backends,
|
|
23
|
+
call_llm: Optional[CallLlm] = None,
|
|
24
|
+
tools_registry: Optional[Dict[str, Callable]] = None,
|
|
25
|
+
) -> Tuple[Dict[str, NodeCaller], BroadcastSignalsCaller]:
|
|
26
|
+
"""
|
|
27
|
+
Create all node types with automatic wiring.
|
|
28
|
+
|
|
29
|
+
This is the recommended way to set up nodes for orchestration.
|
|
30
|
+
Returns both the nodes dictionary and the broadcast_signals_caller.
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
backends: Backend services (use create_in_memory_backends or create_local_backends)
|
|
34
|
+
call_llm: Optional LLM caller function for LLM/Agent nodes
|
|
35
|
+
tools_registry: Optional dict mapping tool name -> callable for Tool/Agent nodes
|
|
36
|
+
|
|
37
|
+
Returns:
|
|
38
|
+
Tuple of (nodes dict, broadcast_signals_caller function)
|
|
39
|
+
|
|
40
|
+
Example:
|
|
41
|
+
backends = create_in_memory_backends()
|
|
42
|
+
nodes, broadcast = create_all_nodes(backends, call_llm=my_llm, tools_registry=my_tools)
|
|
43
|
+
|
|
44
|
+
execution_id = orchestrate(
|
|
45
|
+
config=workflow_yaml,
|
|
46
|
+
initial_workflow_name="my_workflow",
|
|
47
|
+
initial_signals=["START"],
|
|
48
|
+
initial_context={"user_input": "Hello"},
|
|
49
|
+
backends=backends,
|
|
50
|
+
broadcast_signals_caller=broadcast,
|
|
51
|
+
)
|
|
52
|
+
"""
|
|
53
|
+
nodes = {}
|
|
54
|
+
|
|
55
|
+
def broadcast_signals_caller(id: str, signals: List[str]):
|
|
56
|
+
broadcast_signals(id, signals, nodes, backends)
|
|
57
|
+
|
|
58
|
+
nodes["router"] = create_router_node_caller(backends, broadcast_signals_caller)
|
|
59
|
+
|
|
60
|
+
if call_llm is not None:
|
|
61
|
+
nodes["llm"] = create_llm_node_caller(backends, call_llm, broadcast_signals_caller)
|
|
62
|
+
|
|
63
|
+
tools_list = []
|
|
64
|
+
if tools_registry:
|
|
65
|
+
tools_list = [{"function": func, "max_retries": 0} for func in tools_registry.values()]
|
|
66
|
+
nodes["agent"] = create_agent_node_caller(backends, tools_list, call_llm, broadcast_signals_caller)
|
|
67
|
+
|
|
68
|
+
if tools_registry is not None:
|
|
69
|
+
nodes["tool"] = create_tool_node_caller(backends, tools_registry, broadcast_signals_caller)
|
|
70
|
+
|
|
71
|
+
def orchestrate_caller(
|
|
72
|
+
config: Union[str, Dict[str, Any]],
|
|
73
|
+
initial_workflow_name: str,
|
|
74
|
+
initial_signals: List[str],
|
|
75
|
+
initial_context: Dict[str, Any],
|
|
76
|
+
backends: Backends,
|
|
77
|
+
) -> str:
|
|
78
|
+
"""Start a child workflow execution."""
|
|
79
|
+
if isinstance(config, str):
|
|
80
|
+
parsed_config = parse_yaml(config)
|
|
81
|
+
else:
|
|
82
|
+
parsed_config = copy.deepcopy(config)
|
|
83
|
+
|
|
84
|
+
def child_broadcast(execution_id: str, signals: List[str]):
|
|
85
|
+
broadcast_signals(execution_id, signals, nodes, backends)
|
|
86
|
+
|
|
87
|
+
return orchestrate(
|
|
88
|
+
config=parsed_config,
|
|
89
|
+
initial_workflow_name=initial_workflow_name,
|
|
90
|
+
initial_signals=initial_signals,
|
|
91
|
+
initial_context=initial_context,
|
|
92
|
+
backends=backends,
|
|
93
|
+
broadcast_signals_caller=child_broadcast,
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
nodes["child"] = create_child_node_caller(backends, orchestrate_caller)
|
|
97
|
+
|
|
98
|
+
return nodes, broadcast_signals_caller
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def setup_orchestration(
|
|
102
|
+
call_llm: Optional[CallLlm] = None,
|
|
103
|
+
tools_registry: Optional[Dict[str, Callable]] = None,
|
|
104
|
+
use_local_storage: bool = False,
|
|
105
|
+
storage_dir: str = "./orchestration_data",
|
|
106
|
+
) -> Tuple[Backends, Callable]:
|
|
107
|
+
"""
|
|
108
|
+
One-line setup for SOE orchestration.
|
|
109
|
+
|
|
110
|
+
Creates backends and nodes with automatic wiring.
|
|
111
|
+
Returns the backends and broadcast_signals_caller ready to use.
|
|
112
|
+
|
|
113
|
+
Args:
|
|
114
|
+
call_llm: Optional LLM caller function for LLM/Agent nodes
|
|
115
|
+
tools_registry: Optional dict mapping tool name -> callable
|
|
116
|
+
use_local_storage: If True, use file-based storage. If False, use in-memory.
|
|
117
|
+
storage_dir: Directory for local storage (only used if use_local_storage=True)
|
|
118
|
+
|
|
119
|
+
Returns:
|
|
120
|
+
Tuple of (backends, broadcast_signals_caller)
|
|
121
|
+
|
|
122
|
+
Example:
|
|
123
|
+
# Minimal setup (router-only workflows)
|
|
124
|
+
backends, broadcast = setup_orchestration()
|
|
125
|
+
|
|
126
|
+
# Full setup with LLM and tools
|
|
127
|
+
backends, broadcast = setup_orchestration(
|
|
128
|
+
call_llm=my_llm_function,
|
|
129
|
+
tools_registry={"search": search_fn, "calculate": calc_fn},
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
execution_id = orchestrate(
|
|
133
|
+
config=workflow_yaml,
|
|
134
|
+
initial_workflow_name="my_workflow",
|
|
135
|
+
initial_signals=["START"],
|
|
136
|
+
initial_context={},
|
|
137
|
+
backends=backends,
|
|
138
|
+
broadcast_signals_caller=broadcast,
|
|
139
|
+
)
|
|
140
|
+
"""
|
|
141
|
+
if use_local_storage:
|
|
142
|
+
backends = create_local_backends(
|
|
143
|
+
context_storage_dir=f"{storage_dir}/contexts",
|
|
144
|
+
workflow_storage_dir=f"{storage_dir}/workflows",
|
|
145
|
+
telemetry_storage_dir=f"{storage_dir}/telemetry",
|
|
146
|
+
conversation_history_storage_dir=f"{storage_dir}/conversations",
|
|
147
|
+
context_schema_storage_dir=f"{storage_dir}/schemas",
|
|
148
|
+
identity_storage_dir=f"{storage_dir}/identities",
|
|
149
|
+
)
|
|
150
|
+
else:
|
|
151
|
+
backends = create_in_memory_backends()
|
|
152
|
+
|
|
153
|
+
_, broadcast_signals_caller = create_all_nodes(
|
|
154
|
+
backends=backends,
|
|
155
|
+
call_llm=call_llm,
|
|
156
|
+
tools_registry=tools_registry,
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
return backends, broadcast_signals_caller
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
__all__ = [
|
|
163
|
+
"create_all_nodes",
|
|
164
|
+
"setup_orchestration",
|
|
165
|
+
]
|
soe/lib/__init__.py
ADDED
|
File without changes
|
soe/lib/child_context.py
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Child context preparation utilities.
|
|
3
|
+
|
|
4
|
+
Prepares initial context for child workflows with parent metadata.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import copy
|
|
8
|
+
from typing import Any, Dict
|
|
9
|
+
|
|
10
|
+
from .context_fields import get_field
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
PARENT_INFO_KEY = "__parent__"
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def prepare_child_context(
|
|
17
|
+
parent_context: Dict[str, Any],
|
|
18
|
+
node_config: Dict[str, Any],
|
|
19
|
+
parent_execution_id: str,
|
|
20
|
+
main_execution_id: str,
|
|
21
|
+
) -> Dict[str, Any]:
|
|
22
|
+
"""
|
|
23
|
+
Prepare initial context for child workflow.
|
|
24
|
+
|
|
25
|
+
Includes:
|
|
26
|
+
- Input fields copied from parent context (current value, not full history)
|
|
27
|
+
- __parent__ metadata for communication back to parent
|
|
28
|
+
"""
|
|
29
|
+
child_context: Dict[str, Any] = {}
|
|
30
|
+
|
|
31
|
+
# Copy specified input fields from parent (current value only)
|
|
32
|
+
# orchestrate() will wrap these in history lists
|
|
33
|
+
for field_name in node_config.get("input_fields", []):
|
|
34
|
+
if field_name in parent_context:
|
|
35
|
+
# Use get_field to get current value, not full history
|
|
36
|
+
child_context[field_name] = copy.deepcopy(get_field(parent_context, field_name))
|
|
37
|
+
|
|
38
|
+
# Inject parent info for child-to-parent communication
|
|
39
|
+
child_context[PARENT_INFO_KEY] = {
|
|
40
|
+
"parent_execution_id": parent_execution_id,
|
|
41
|
+
"signals_to_parent": node_config.get("signals_to_parent", []),
|
|
42
|
+
"context_updates_to_parent": node_config.get("context_updates_to_parent", []),
|
|
43
|
+
"main_execution_id": main_execution_id,
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
return child_context
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Context field utilities for history-aware field storage.
|
|
3
|
+
|
|
4
|
+
Fields are stored as lists to maintain update history.
|
|
5
|
+
Reading always returns the last (most recent) value.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from typing import Any, Dict, List
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def set_field(context: Dict[str, Any], field: str, value: Any) -> None:
|
|
12
|
+
"""Set a context field, appending to history list."""
|
|
13
|
+
if field.startswith("__"):
|
|
14
|
+
context[field] = value
|
|
15
|
+
return
|
|
16
|
+
|
|
17
|
+
if field not in context:
|
|
18
|
+
context[field] = [value]
|
|
19
|
+
else:
|
|
20
|
+
context[field].append(value)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def get_field(context: Dict[str, Any], field: str) -> Any:
|
|
24
|
+
"""Get a context field value (last item in history list)."""
|
|
25
|
+
if field.startswith("__"):
|
|
26
|
+
return context.get(field)
|
|
27
|
+
|
|
28
|
+
value = context.get(field)
|
|
29
|
+
if value is None:
|
|
30
|
+
return None
|
|
31
|
+
|
|
32
|
+
return value[-1]
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def get_accumulated(context: Dict[str, Any], field: str) -> List[Any]:
|
|
36
|
+
"""
|
|
37
|
+
Get full accumulated history for a field.
|
|
38
|
+
|
|
39
|
+
If history has exactly one entry and it's a list, returns that list
|
|
40
|
+
(common case: initial context passed a list as value for fan-out).
|
|
41
|
+
"""
|
|
42
|
+
if field not in context:
|
|
43
|
+
return []
|
|
44
|
+
|
|
45
|
+
history = context[field]
|
|
46
|
+
|
|
47
|
+
# If history has exactly one entry and it's a list, return that list
|
|
48
|
+
if len(history) == 1 and isinstance(history[0], list):
|
|
49
|
+
return list(history[0])
|
|
50
|
+
|
|
51
|
+
return list(history)
|
soe/lib/inheritance.py
ADDED
|
@@ -0,0 +1,172 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Configuration and context inheritance utilities.
|
|
3
|
+
|
|
4
|
+
Handles:
|
|
5
|
+
- Extracting and saving config sections (workflows, identity, schema) from parsed config
|
|
6
|
+
- Inheriting config from existing executions
|
|
7
|
+
- Inheriting context from existing executions
|
|
8
|
+
|
|
9
|
+
Used for workflow initialization and chaining.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
import copy
|
|
13
|
+
from typing import Dict, Any, Optional
|
|
14
|
+
|
|
15
|
+
from ..types import Backends
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def save_config_sections(
|
|
19
|
+
execution_id: str,
|
|
20
|
+
backends: Backends,
|
|
21
|
+
identities: Optional[Dict[str, str]] = None,
|
|
22
|
+
context_schema: Optional[Dict[str, Any]] = None,
|
|
23
|
+
) -> None:
|
|
24
|
+
"""
|
|
25
|
+
Save identities and context_schema to their respective backends.
|
|
26
|
+
|
|
27
|
+
This is the shared logic for both:
|
|
28
|
+
- Extracting sections from new config
|
|
29
|
+
- Inheriting sections from existing execution
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
execution_id: Target execution ID
|
|
33
|
+
backends: Backend services
|
|
34
|
+
identities: Identity definitions to save (optional)
|
|
35
|
+
context_schema: Context schema to save (optional)
|
|
36
|
+
"""
|
|
37
|
+
if identities and backends.identity:
|
|
38
|
+
backends.identity.save_identities(execution_id, identities)
|
|
39
|
+
|
|
40
|
+
if context_schema and backends.context_schema:
|
|
41
|
+
backends.context_schema.save_context_schema(execution_id, context_schema)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def extract_and_save_config_sections(
|
|
45
|
+
parsed_config: Dict[str, Any],
|
|
46
|
+
execution_id: str,
|
|
47
|
+
backends: Backends,
|
|
48
|
+
) -> Dict[str, Any]:
|
|
49
|
+
"""
|
|
50
|
+
Extract workflows, context_schema, and identities from config.
|
|
51
|
+
|
|
52
|
+
If config has 'workflows' key, it's the combined structure:
|
|
53
|
+
- Extract and save context_schema to context_schema backend
|
|
54
|
+
- Extract and save identities to identity backend
|
|
55
|
+
- Return just the workflows portion
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
parsed_config: Parsed config dictionary
|
|
59
|
+
execution_id: Execution ID to save sections under
|
|
60
|
+
backends: Backend services
|
|
61
|
+
|
|
62
|
+
Returns:
|
|
63
|
+
The workflows registry portion of the config
|
|
64
|
+
"""
|
|
65
|
+
if "workflows" in parsed_config:
|
|
66
|
+
workflows = parsed_config["workflows"]
|
|
67
|
+
|
|
68
|
+
save_config_sections(
|
|
69
|
+
execution_id=execution_id,
|
|
70
|
+
backends=backends,
|
|
71
|
+
identities=parsed_config.get("identities"),
|
|
72
|
+
context_schema=parsed_config.get("context_schema"),
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
return workflows
|
|
76
|
+
|
|
77
|
+
# Simple/Legacy structure - entire config is workflows
|
|
78
|
+
return parsed_config
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def inherit_config(
|
|
82
|
+
source_execution_id: str,
|
|
83
|
+
target_execution_id: str,
|
|
84
|
+
backends: Backends,
|
|
85
|
+
) -> Dict[str, Any]:
|
|
86
|
+
"""
|
|
87
|
+
Inherit configuration from source execution to target execution.
|
|
88
|
+
|
|
89
|
+
Copies:
|
|
90
|
+
- Workflows registry
|
|
91
|
+
- Identities (if available)
|
|
92
|
+
- Context schema (if available)
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
source_execution_id: Execution ID to inherit from
|
|
96
|
+
target_execution_id: Execution ID to inherit to
|
|
97
|
+
backends: Backend services
|
|
98
|
+
|
|
99
|
+
Returns:
|
|
100
|
+
The workflows registry (for validation and use)
|
|
101
|
+
|
|
102
|
+
Raises:
|
|
103
|
+
ValueError: If source execution has no workflows registry
|
|
104
|
+
"""
|
|
105
|
+
workflows_registry = backends.workflow.get_workflows_registry(source_execution_id)
|
|
106
|
+
if not workflows_registry:
|
|
107
|
+
raise ValueError(
|
|
108
|
+
f"Cannot inherit config from execution '{source_execution_id}': "
|
|
109
|
+
"no workflows registry found"
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
# Copy workflows to new execution
|
|
113
|
+
backends.workflow.save_workflows_registry(target_execution_id, workflows_registry)
|
|
114
|
+
|
|
115
|
+
# Get source identities and schema
|
|
116
|
+
source_identities = None
|
|
117
|
+
source_schema = None
|
|
118
|
+
|
|
119
|
+
if backends.identity:
|
|
120
|
+
source_identities = backends.identity.get_identities(source_execution_id)
|
|
121
|
+
|
|
122
|
+
if backends.context_schema:
|
|
123
|
+
source_schema = backends.context_schema.get_context_schema(source_execution_id)
|
|
124
|
+
|
|
125
|
+
# Save to target using shared logic
|
|
126
|
+
save_config_sections(
|
|
127
|
+
execution_id=target_execution_id,
|
|
128
|
+
backends=backends,
|
|
129
|
+
identities=source_identities,
|
|
130
|
+
context_schema=source_schema,
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
return workflows_registry
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def inherit_context(
|
|
137
|
+
source_execution_id: str,
|
|
138
|
+
backends: Backends,
|
|
139
|
+
) -> Dict[str, Any]:
|
|
140
|
+
"""
|
|
141
|
+
Inherit context from source execution, resetting operational state.
|
|
142
|
+
|
|
143
|
+
Copies all context fields except __operational__, which is reset
|
|
144
|
+
for the new execution.
|
|
145
|
+
|
|
146
|
+
Args:
|
|
147
|
+
source_execution_id: Execution ID to inherit context from
|
|
148
|
+
backends: Backend services
|
|
149
|
+
|
|
150
|
+
Returns:
|
|
151
|
+
Context dictionary ready for new execution (without __operational__)
|
|
152
|
+
|
|
153
|
+
Raises:
|
|
154
|
+
ValueError: If source execution has no context
|
|
155
|
+
"""
|
|
156
|
+
source_context = backends.context.get_context(source_execution_id)
|
|
157
|
+
if not source_context:
|
|
158
|
+
raise ValueError(
|
|
159
|
+
f"Cannot inherit context from execution '{source_execution_id}': "
|
|
160
|
+
"no context found"
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
# Deep copy context, excluding internal fields (will be reset for new execution)
|
|
164
|
+
# __operational__ - execution tracking state
|
|
165
|
+
# __parent__ - parent workflow metadata (not relevant for new execution)
|
|
166
|
+
inherited_context = {
|
|
167
|
+
k: copy.deepcopy(v)
|
|
168
|
+
for k, v in source_context.items()
|
|
169
|
+
if k not in ("__operational__", "__parent__")
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
return inherited_context
|
soe/lib/jinja_render.py
ADDED
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Jinja template rendering utilities for prompt processing.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import re
|
|
6
|
+
from typing import Dict, Any, Set, List, Tuple
|
|
7
|
+
|
|
8
|
+
from jinja2 import Environment, BaseLoader, TemplateSyntaxError
|
|
9
|
+
|
|
10
|
+
from .context_fields import get_field
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def _create_accumulated_filter(full_context: Dict[str, Any]):
|
|
14
|
+
"""Create an accumulated filter that returns full history for a field."""
|
|
15
|
+
def accumulated_filter(value):
|
|
16
|
+
"""
|
|
17
|
+
Return the full accumulated history list for a context field.
|
|
18
|
+
|
|
19
|
+
Usage in templates:
|
|
20
|
+
{{ context.field | accumulated }} - returns full list
|
|
21
|
+
{{ context.field | accumulated | length }} - count of items
|
|
22
|
+
{{ context.field | accumulated | join(', ') }} - join all items
|
|
23
|
+
|
|
24
|
+
If history has exactly one entry and it's a list, returns that list
|
|
25
|
+
(common case: initial context passed a list as value).
|
|
26
|
+
"""
|
|
27
|
+
# Find the field in full_context by matching the last value
|
|
28
|
+
for key, hist_list in full_context.items():
|
|
29
|
+
if key.startswith("__"):
|
|
30
|
+
continue
|
|
31
|
+
if isinstance(hist_list, list) and hist_list and hist_list[-1] == value:
|
|
32
|
+
# If history has exactly one entry and it's a list, return that list
|
|
33
|
+
if len(hist_list) == 1 and isinstance(hist_list[0], list):
|
|
34
|
+
return hist_list[0]
|
|
35
|
+
return hist_list
|
|
36
|
+
# Fallback: return value as single-item list
|
|
37
|
+
return [value] if value is not None else []
|
|
38
|
+
|
|
39
|
+
return accumulated_filter
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def _extract_context_variables(template: str) -> Set[str]:
|
|
43
|
+
"""Extract variable names from a Jinja template."""
|
|
44
|
+
if not template:
|
|
45
|
+
return set()
|
|
46
|
+
|
|
47
|
+
variables = set()
|
|
48
|
+
|
|
49
|
+
dot_pattern = r'\{\{[^}]*context\.([a-zA-Z_][a-zA-Z0-9_]*)'
|
|
50
|
+
for match in re.finditer(dot_pattern, template):
|
|
51
|
+
variables.add(match.group(1))
|
|
52
|
+
|
|
53
|
+
bracket_pattern = r"\{\{[^}]*context\[['\"]([a-zA-Z_][a-zA-Z0-9_]*)['\"]"
|
|
54
|
+
for match in re.finditer(bracket_pattern, template):
|
|
55
|
+
variables.add(match.group(1))
|
|
56
|
+
|
|
57
|
+
return variables
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def get_context_for_prompt(
|
|
61
|
+
full_context: Dict[str, Any],
|
|
62
|
+
template: str
|
|
63
|
+
) -> Tuple[Dict[str, Any], List[str]]:
|
|
64
|
+
"""Extract the context needed for a prompt template."""
|
|
65
|
+
required_fields = _extract_context_variables(template)
|
|
66
|
+
filtered_context = {}
|
|
67
|
+
warnings = []
|
|
68
|
+
|
|
69
|
+
for field in required_fields:
|
|
70
|
+
if field not in full_context:
|
|
71
|
+
warnings.append(f"Context field '{field}' referenced in prompt but not found in context")
|
|
72
|
+
else:
|
|
73
|
+
value = get_field(full_context, field)
|
|
74
|
+
if value is None:
|
|
75
|
+
warnings.append(f"Context field '{field}' is None")
|
|
76
|
+
filtered_context[field] = None
|
|
77
|
+
elif value == "":
|
|
78
|
+
warnings.append(f"Context field '{field}' is empty string")
|
|
79
|
+
filtered_context[field] = ""
|
|
80
|
+
else:
|
|
81
|
+
filtered_context[field] = value
|
|
82
|
+
|
|
83
|
+
return filtered_context, warnings
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def render_prompt(prompt: str, context: Dict[str, Any]) -> Tuple[str, List[str]]:
|
|
87
|
+
"""Render a Jinja template prompt with the given context."""
|
|
88
|
+
if not prompt:
|
|
89
|
+
return prompt, []
|
|
90
|
+
|
|
91
|
+
if "{{" not in prompt and "{%" not in prompt:
|
|
92
|
+
return prompt, []
|
|
93
|
+
|
|
94
|
+
_, warnings = get_context_for_prompt(context, prompt)
|
|
95
|
+
|
|
96
|
+
unwrapped = {k: get_field(context, k) for k in context if not k.startswith("__")}
|
|
97
|
+
for k, v in context.items():
|
|
98
|
+
if k.startswith("__"):
|
|
99
|
+
unwrapped[k] = v
|
|
100
|
+
|
|
101
|
+
try:
|
|
102
|
+
jinja_env = Environment(loader=BaseLoader())
|
|
103
|
+
# Register custom filters
|
|
104
|
+
jinja_env.filters["accumulated"] = _create_accumulated_filter(context)
|
|
105
|
+
template = jinja_env.from_string(prompt)
|
|
106
|
+
rendered = template.render(context=unwrapped)
|
|
107
|
+
return rendered, warnings
|
|
108
|
+
except TemplateSyntaxError as e:
|
|
109
|
+
warnings.append(f"Jinja syntax error: {e}")
|
|
110
|
+
return prompt, warnings
|
|
111
|
+
except Exception as e:
|
|
112
|
+
warnings.append(f"Template rendering error: {e}")
|
|
113
|
+
return prompt, warnings
|
soe/lib/operational.py
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Operational context initialization.
|
|
3
|
+
|
|
4
|
+
This module handles initialization of the operational state structure.
|
|
5
|
+
Runtime updates are handled by register_event.py.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from typing import Dict, Any
|
|
9
|
+
|
|
10
|
+
PARENT_INFO_KEY = "__parent__"
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def wrap_context_fields(context: Dict[str, Any]) -> Dict[str, Any]:
|
|
14
|
+
"""Wrap context field values in lists for history tracking.
|
|
15
|
+
|
|
16
|
+
Internal fields (starting with __) are not wrapped.
|
|
17
|
+
If context has __parent__, fields are already wrapped (from parent workflow).
|
|
18
|
+
"""
|
|
19
|
+
# Child workflows receive pre-wrapped fields from parent
|
|
20
|
+
if PARENT_INFO_KEY in context:
|
|
21
|
+
return context
|
|
22
|
+
|
|
23
|
+
return {
|
|
24
|
+
k: [v] if not k.startswith("__") else v
|
|
25
|
+
for k, v in context.items()
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def add_operational_state(
|
|
30
|
+
execution_id: str,
|
|
31
|
+
context: Dict[str, Any],
|
|
32
|
+
) -> Dict[str, Any]:
|
|
33
|
+
"""Add operational state to context if not present. Returns new context dict."""
|
|
34
|
+
if "__operational__" in context:
|
|
35
|
+
return context
|
|
36
|
+
|
|
37
|
+
parent_info = context.get(PARENT_INFO_KEY, {})
|
|
38
|
+
inherited_main_id = parent_info.get("main_execution_id")
|
|
39
|
+
main_id = inherited_main_id if inherited_main_id else execution_id
|
|
40
|
+
|
|
41
|
+
return {
|
|
42
|
+
**context,
|
|
43
|
+
"__operational__": {
|
|
44
|
+
"signals": [],
|
|
45
|
+
"nodes": {},
|
|
46
|
+
"llm_calls": 0,
|
|
47
|
+
"tool_calls": 0,
|
|
48
|
+
"errors": 0,
|
|
49
|
+
"main_execution_id": main_id
|
|
50
|
+
}
|
|
51
|
+
}
|
soe/lib/parent_sync.py
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Parent sync utilities for sub-orchestration.
|
|
3
|
+
|
|
4
|
+
These functions check if a signal or context update should be propagated
|
|
5
|
+
to the parent workflow based on the injected __parent__ metadata.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from typing import Dict, Any, Tuple, Optional, List
|
|
9
|
+
from ..types import Backends
|
|
10
|
+
|
|
11
|
+
PARENT_INFO_KEY = "__parent__"
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def get_signals_for_parent(
|
|
15
|
+
signals: List[str], context: Dict[str, Any]
|
|
16
|
+
) -> Tuple[Optional[str], List[str]]:
|
|
17
|
+
"""Get the subset of signals that should be propagated to the parent."""
|
|
18
|
+
parent_info = context.get(PARENT_INFO_KEY)
|
|
19
|
+
if not parent_info:
|
|
20
|
+
return (None, [])
|
|
21
|
+
|
|
22
|
+
parent_execution_id = parent_info.get("parent_execution_id")
|
|
23
|
+
signals_to_parent = set(parent_info.get("signals_to_parent", []))
|
|
24
|
+
matching_signals = [s for s in signals if s in signals_to_parent]
|
|
25
|
+
|
|
26
|
+
return (parent_execution_id, matching_signals)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _check_parent_context_sync(
|
|
30
|
+
key: str, context: Dict[str, Any]
|
|
31
|
+
) -> Tuple[Optional[str], bool]:
|
|
32
|
+
"""Determine if a context update should be propagated to the parent."""
|
|
33
|
+
parent_info = context.get(PARENT_INFO_KEY)
|
|
34
|
+
if not parent_info:
|
|
35
|
+
return (None, False)
|
|
36
|
+
|
|
37
|
+
parent_execution_id = parent_info.get("parent_execution_id")
|
|
38
|
+
context_updates_to_parent = parent_info.get("context_updates_to_parent", [])
|
|
39
|
+
|
|
40
|
+
if key in context_updates_to_parent:
|
|
41
|
+
return (parent_execution_id, True)
|
|
42
|
+
|
|
43
|
+
return (parent_execution_id, False)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def sync_context_to_parent(
|
|
47
|
+
context: Dict[str, Any],
|
|
48
|
+
updated_keys: List[str],
|
|
49
|
+
backends: Backends,
|
|
50
|
+
) -> None:
|
|
51
|
+
"""Sync updated context keys to the parent workflow if configured.
|
|
52
|
+
|
|
53
|
+
For each key in updated_keys that is configured for parent sync:
|
|
54
|
+
- If parent doesn't have the key, copy the full list from child
|
|
55
|
+
- If parent already has the key, extend with new items from child
|
|
56
|
+
"""
|
|
57
|
+
for key in updated_keys:
|
|
58
|
+
parent_id, should_sync = _check_parent_context_sync(key, context)
|
|
59
|
+
if should_sync and parent_id:
|
|
60
|
+
parent_context = backends.context.get_context(parent_id)
|
|
61
|
+
child_history = context[key]
|
|
62
|
+
|
|
63
|
+
if key in parent_context:
|
|
64
|
+
# Append new items from child to parent's existing list
|
|
65
|
+
parent_context[key].extend(child_history)
|
|
66
|
+
else:
|
|
67
|
+
# Initialize parent with child's history
|
|
68
|
+
parent_context[key] = child_history
|
|
69
|
+
|
|
70
|
+
backends.context.save_context(parent_id, parent_context)
|
|
71
|
+
sync_context_to_parent(parent_context, [key], backends)
|