soe-ai 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- soe_ai-0.1.0/LICENSE +21 -0
- soe_ai-0.1.0/PKG-INFO +199 -0
- soe_ai-0.1.0/README.md +163 -0
- soe_ai-0.1.0/pyproject.toml +76 -0
- soe_ai-0.1.0/setup.cfg +4 -0
- soe_ai-0.1.0/soe/__init__.py +50 -0
- soe_ai-0.1.0/soe/broker.py +169 -0
- soe_ai-0.1.0/soe/docs_index.py +2 -0
- soe_ai-0.1.0/soe/init.py +165 -0
- soe_ai-0.1.0/soe/types.py +197 -0
- soe_ai-0.1.0/soe/validation.py +8 -0
- soe_ai-0.1.0/soe_ai.egg-info/PKG-INFO +199 -0
- soe_ai-0.1.0/soe_ai.egg-info/SOURCES.txt +16 -0
- soe_ai-0.1.0/soe_ai.egg-info/dependency_links.txt +1 -0
- soe_ai-0.1.0/soe_ai.egg-info/requires.txt +14 -0
- soe_ai-0.1.0/soe_ai.egg-info/top_level.txt +1 -0
- soe_ai-0.1.0/tests/test_local_storage_backends.py +365 -0
- soe_ai-0.1.0/tests/test_validation_errors.py +1066 -0
soe_ai-0.1.0/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Pedro Garcia
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
soe_ai-0.1.0/PKG-INFO
ADDED
|
@@ -0,0 +1,199 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: soe-ai
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Signal-driven Orchestration Engine - Agent orchestration with event-driven workflow engine
|
|
5
|
+
Author-email: Pedro Garcia <pedro@example.com>
|
|
6
|
+
License-Expression: MIT
|
|
7
|
+
Project-URL: Homepage, https://github.com/pgarcia14180/soe
|
|
8
|
+
Project-URL: Documentation, https://github.com/pgarcia14180/soe/tree/master/docs
|
|
9
|
+
Project-URL: Repository, https://github.com/pgarcia14180/soe
|
|
10
|
+
Project-URL: Issues, https://github.com/pgarcia14180/soe/issues
|
|
11
|
+
Keywords: orchestration,agent,workflow,automation
|
|
12
|
+
Classifier: Development Status :: 3 - Alpha
|
|
13
|
+
Classifier: Intended Audience :: Developers
|
|
14
|
+
Classifier: Programming Language :: Python :: 3
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
20
|
+
Requires-Python: >=3.8
|
|
21
|
+
Description-Content-Type: text/markdown
|
|
22
|
+
License-File: LICENSE
|
|
23
|
+
Requires-Dist: pyyaml>=6.0
|
|
24
|
+
Requires-Dist: pydantic>=2.0.0
|
|
25
|
+
Requires-Dist: jinja2>=3.0.0
|
|
26
|
+
Provides-Extra: dev
|
|
27
|
+
Requires-Dist: pytest>=7.0; extra == "dev"
|
|
28
|
+
Requires-Dist: pytest-cov>=4.0; extra == "dev"
|
|
29
|
+
Requires-Dist: black>=23.0; extra == "dev"
|
|
30
|
+
Requires-Dist: mypy>=1.0; extra == "dev"
|
|
31
|
+
Requires-Dist: ruff>=0.1.0; extra == "dev"
|
|
32
|
+
Provides-Extra: integration
|
|
33
|
+
Requires-Dist: openai>=1.0.0; extra == "integration"
|
|
34
|
+
Requires-Dist: requests>=2.28.0; extra == "integration"
|
|
35
|
+
Dynamic: license-file
|
|
36
|
+
|
|
37
|
+
# SOE — Signal-driven Orchestration Engine
|
|
38
|
+
|
|
39
|
+
**A protocol for orchestrating AI workflows through signals.**
|
|
40
|
+
|
|
41
|
+
---
|
|
42
|
+
|
|
43
|
+
## What SOE Is
|
|
44
|
+
|
|
45
|
+
SOE is an orchestration engine where nodes communicate through **signals** rather than direct function calls. You define workflows in YAML, and the engine handles execution.
|
|
46
|
+
|
|
47
|
+
| Approach | How It Works | Trade-off |
|
|
48
|
+
|----------|--------------|-----------|
|
|
49
|
+
| Chain-based | `Step A → B → C → D` | Simple but rigid |
|
|
50
|
+
| SOE Signal-based | `[SIGNAL] → all listeners respond` | Flexible, requires understanding signals |
|
|
51
|
+
|
|
52
|
+
**The C++ analogy**: Like C++ gives you control over memory and execution (compared to higher-level languages), SOE gives you control over orchestration primitives. You decide how state is stored, how LLMs are called, and how signals are broadcast. This requires more setup but means no vendor lock-in and full observability.
|
|
53
|
+
|
|
54
|
+
---
|
|
55
|
+
|
|
56
|
+
## What SOE Does
|
|
57
|
+
|
|
58
|
+
SOE orchestrates workflows through **signals**. Nodes don't call each other—they emit signals that other nodes listen for.
|
|
59
|
+
|
|
60
|
+
```yaml
|
|
61
|
+
example_workflow:
|
|
62
|
+
ValidateInput:
|
|
63
|
+
node_type: router
|
|
64
|
+
event_triggers: [START]
|
|
65
|
+
event_emissions:
|
|
66
|
+
- signal_name: VALID
|
|
67
|
+
condition: "{{ context.data is defined }}"
|
|
68
|
+
- signal_name: INVALID
|
|
69
|
+
|
|
70
|
+
ProcessData:
|
|
71
|
+
node_type: llm
|
|
72
|
+
event_triggers: [VALID]
|
|
73
|
+
prompt: "Process this: {{ context.data }}"
|
|
74
|
+
output_field: result
|
|
75
|
+
event_emissions:
|
|
76
|
+
- signal_name: DONE
|
|
77
|
+
```
|
|
78
|
+
|
|
79
|
+
**That's the entire workflow definition.** No SDK, no decorators, no base classes.
|
|
80
|
+
|
|
81
|
+
---
|
|
82
|
+
|
|
83
|
+
## Why SOE
|
|
84
|
+
|
|
85
|
+
### 1. Infrastructure-Agnostic
|
|
86
|
+
SOE defines **protocols**, not implementations. Swap PostgreSQL for DynamoDB. Replace OpenAI with a local LLM. Deploy to Lambda, Kubernetes, or a single Python script. Your workflow YAML stays the same.
|
|
87
|
+
|
|
88
|
+
### 2. Context-Driven with Jinja
|
|
89
|
+
All workflow state flows through **context**—a shared dictionary accessible via Jinja2 templates. This means:
|
|
90
|
+
- Conditions like `{{ context.user_validated }}` are readable and debuggable
|
|
91
|
+
- LLM prompts can interpolate any context field
|
|
92
|
+
- No hidden state—everything is inspectable
|
|
93
|
+
|
|
94
|
+
### 3. Deterministic + Agentic
|
|
95
|
+
Mix hard-coded logic with LLM-driven behavior in the same workflow. Router nodes are pure conditionals. Agent nodes can call tools. Use what you need.
|
|
96
|
+
|
|
97
|
+
### 4. Portable
|
|
98
|
+
Workflows are YAML. Run them locally, in CI, in production. Extract them, version them, share them.
|
|
99
|
+
|
|
100
|
+
---
|
|
101
|
+
|
|
102
|
+
## Installation
|
|
103
|
+
|
|
104
|
+
```bash
|
|
105
|
+
# With uv (recommended)
|
|
106
|
+
uv add soe-ai
|
|
107
|
+
|
|
108
|
+
# With pip
|
|
109
|
+
pip install soe-ai
|
|
110
|
+
|
|
111
|
+
# From source
|
|
112
|
+
git clone https://github.com/pgarcia14180/soe.git
|
|
113
|
+
cd soe && uv sync
|
|
114
|
+
```
|
|
115
|
+
|
|
116
|
+
---
|
|
117
|
+
|
|
118
|
+
## Quick Start
|
|
119
|
+
|
|
120
|
+
```python
|
|
121
|
+
from soe import orchestrate, create_all_nodes
|
|
122
|
+
from soe.local_backends import create_local_backends
|
|
123
|
+
|
|
124
|
+
# Your workflow (can also be loaded from file or database)
|
|
125
|
+
workflow = """
|
|
126
|
+
example_workflow:
|
|
127
|
+
Start:
|
|
128
|
+
node_type: router
|
|
129
|
+
event_triggers: [START]
|
|
130
|
+
event_emissions:
|
|
131
|
+
- signal_name: DONE
|
|
132
|
+
"""
|
|
133
|
+
|
|
134
|
+
# Create backends (storage for context, workflows, etc.)
|
|
135
|
+
backends = create_local_backends("./data")
|
|
136
|
+
|
|
137
|
+
# Create all node handlers
|
|
138
|
+
nodes, broadcast = create_all_nodes(backends)
|
|
139
|
+
|
|
140
|
+
# Run the workflow
|
|
141
|
+
execution_id = orchestrate(
|
|
142
|
+
config=workflow,
|
|
143
|
+
initial_workflow_name="example_workflow",
|
|
144
|
+
initial_signals=["START"],
|
|
145
|
+
initial_context={"user": "alice"},
|
|
146
|
+
backends=backends,
|
|
147
|
+
broadcast_signals_caller=broadcast,
|
|
148
|
+
)
|
|
149
|
+
# When orchestrate() returns, the workflow is complete
|
|
150
|
+
```
|
|
151
|
+
|
|
152
|
+
**For product managers and less technical users**: The Quick Start above is all you need to run a workflow. The `config` parameter accepts YAML defining your workflow structure. The `initial_context` is where you pass input data (like user IDs, requests, etc.).
|
|
153
|
+
|
|
154
|
+
---
|
|
155
|
+
|
|
156
|
+
## Documentation
|
|
157
|
+
|
|
158
|
+
| Audience | Start Here |
|
|
159
|
+
|----------|------------|
|
|
160
|
+
| **Builders** (workflow authors) | [Documentation](docs/index.md) — Step-by-step chapters |
|
|
161
|
+
| **Engineers** (infrastructure) | [ARCHITECTURE.md](ai_docs/ARCHITECTURE.md) — Design philosophy |
|
|
162
|
+
| **Researchers** (advanced patterns) | [Advanced Patterns](docs/advanced_patterns/index.md) — Swarm, hybrid, self-evolving |
|
|
163
|
+
|
|
164
|
+
---
|
|
165
|
+
|
|
166
|
+
## Node Types
|
|
167
|
+
|
|
168
|
+
| Node | Purpose |
|
|
169
|
+
|------|---------|
|
|
170
|
+
| `router` | Conditional signal emission (no LLM) |
|
|
171
|
+
| `llm` | Single LLM call with output |
|
|
172
|
+
| `agent` | Multi-turn LLM with tool access |
|
|
173
|
+
| `tool` | Execute Python functions |
|
|
174
|
+
| `child` | Spawn sub-workflows |
|
|
175
|
+
|
|
176
|
+
---
|
|
177
|
+
|
|
178
|
+
## Backend Protocols
|
|
179
|
+
|
|
180
|
+
Implement these to plug SOE into your infrastructure:
|
|
181
|
+
|
|
182
|
+
| Protocol | Purpose |
|
|
183
|
+
|----------|---------|
|
|
184
|
+
| `ContextBackend` | Workflow state storage |
|
|
185
|
+
| `WorkflowBackend` | Workflow definitions |
|
|
186
|
+
| `ContextSchemaBackend` | Output validation (optional) |
|
|
187
|
+
| `IdentityBackend` | LLM system prompts (optional) |
|
|
188
|
+
| `ConversationHistoryBackend` | Agent memory (optional) |
|
|
189
|
+
| `TelemetryBackend` | Observability (optional) |
|
|
190
|
+
|
|
191
|
+
**Recommendation**: Use the same database for context, workflows, identities, and context_schema—just separate tables. The backend methods handle table creation.
|
|
192
|
+
|
|
193
|
+
See [Infrastructure Guide](docs/guide_10_infrastructure.md) for PostgreSQL, DynamoDB, and Lambda examples.
|
|
194
|
+
|
|
195
|
+
---
|
|
196
|
+
|
|
197
|
+
## License
|
|
198
|
+
|
|
199
|
+
MIT
|
soe_ai-0.1.0/README.md
ADDED
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
# SOE — Signal-driven Orchestration Engine
|
|
2
|
+
|
|
3
|
+
**A protocol for orchestrating AI workflows through signals.**
|
|
4
|
+
|
|
5
|
+
---
|
|
6
|
+
|
|
7
|
+
## What SOE Is
|
|
8
|
+
|
|
9
|
+
SOE is an orchestration engine where nodes communicate through **signals** rather than direct function calls. You define workflows in YAML, and the engine handles execution.
|
|
10
|
+
|
|
11
|
+
| Approach | How It Works | Trade-off |
|
|
12
|
+
|----------|--------------|-----------|
|
|
13
|
+
| Chain-based | `Step A → B → C → D` | Simple but rigid |
|
|
14
|
+
| SOE Signal-based | `[SIGNAL] → all listeners respond` | Flexible, requires understanding signals |
|
|
15
|
+
|
|
16
|
+
**The C++ analogy**: Like C++ gives you control over memory and execution (compared to higher-level languages), SOE gives you control over orchestration primitives. You decide how state is stored, how LLMs are called, and how signals are broadcast. This requires more setup but means no vendor lock-in and full observability.
|
|
17
|
+
|
|
18
|
+
---
|
|
19
|
+
|
|
20
|
+
## What SOE Does
|
|
21
|
+
|
|
22
|
+
SOE orchestrates workflows through **signals**. Nodes don't call each other—they emit signals that other nodes listen for.
|
|
23
|
+
|
|
24
|
+
```yaml
|
|
25
|
+
example_workflow:
|
|
26
|
+
ValidateInput:
|
|
27
|
+
node_type: router
|
|
28
|
+
event_triggers: [START]
|
|
29
|
+
event_emissions:
|
|
30
|
+
- signal_name: VALID
|
|
31
|
+
condition: "{{ context.data is defined }}"
|
|
32
|
+
- signal_name: INVALID
|
|
33
|
+
|
|
34
|
+
ProcessData:
|
|
35
|
+
node_type: llm
|
|
36
|
+
event_triggers: [VALID]
|
|
37
|
+
prompt: "Process this: {{ context.data }}"
|
|
38
|
+
output_field: result
|
|
39
|
+
event_emissions:
|
|
40
|
+
- signal_name: DONE
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
**That's the entire workflow definition.** No SDK, no decorators, no base classes.
|
|
44
|
+
|
|
45
|
+
---
|
|
46
|
+
|
|
47
|
+
## Why SOE
|
|
48
|
+
|
|
49
|
+
### 1. Infrastructure-Agnostic
|
|
50
|
+
SOE defines **protocols**, not implementations. Swap PostgreSQL for DynamoDB. Replace OpenAI with a local LLM. Deploy to Lambda, Kubernetes, or a single Python script. Your workflow YAML stays the same.
|
|
51
|
+
|
|
52
|
+
### 2. Context-Driven with Jinja
|
|
53
|
+
All workflow state flows through **context**—a shared dictionary accessible via Jinja2 templates. This means:
|
|
54
|
+
- Conditions like `{{ context.user_validated }}` are readable and debuggable
|
|
55
|
+
- LLM prompts can interpolate any context field
|
|
56
|
+
- No hidden state—everything is inspectable
|
|
57
|
+
|
|
58
|
+
### 3. Deterministic + Agentic
|
|
59
|
+
Mix hard-coded logic with LLM-driven behavior in the same workflow. Router nodes are pure conditionals. Agent nodes can call tools. Use what you need.
|
|
60
|
+
|
|
61
|
+
### 4. Portable
|
|
62
|
+
Workflows are YAML. Run them locally, in CI, in production. Extract them, version them, share them.
|
|
63
|
+
|
|
64
|
+
---
|
|
65
|
+
|
|
66
|
+
## Installation
|
|
67
|
+
|
|
68
|
+
```bash
|
|
69
|
+
# With uv (recommended)
|
|
70
|
+
uv add soe-ai
|
|
71
|
+
|
|
72
|
+
# With pip
|
|
73
|
+
pip install soe-ai
|
|
74
|
+
|
|
75
|
+
# From source
|
|
76
|
+
git clone https://github.com/pgarcia14180/soe.git
|
|
77
|
+
cd soe && uv sync
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
---
|
|
81
|
+
|
|
82
|
+
## Quick Start
|
|
83
|
+
|
|
84
|
+
```python
|
|
85
|
+
from soe import orchestrate, create_all_nodes
|
|
86
|
+
from soe.local_backends import create_local_backends
|
|
87
|
+
|
|
88
|
+
# Your workflow (can also be loaded from file or database)
|
|
89
|
+
workflow = """
|
|
90
|
+
example_workflow:
|
|
91
|
+
Start:
|
|
92
|
+
node_type: router
|
|
93
|
+
event_triggers: [START]
|
|
94
|
+
event_emissions:
|
|
95
|
+
- signal_name: DONE
|
|
96
|
+
"""
|
|
97
|
+
|
|
98
|
+
# Create backends (storage for context, workflows, etc.)
|
|
99
|
+
backends = create_local_backends("./data")
|
|
100
|
+
|
|
101
|
+
# Create all node handlers
|
|
102
|
+
nodes, broadcast = create_all_nodes(backends)
|
|
103
|
+
|
|
104
|
+
# Run the workflow
|
|
105
|
+
execution_id = orchestrate(
|
|
106
|
+
config=workflow,
|
|
107
|
+
initial_workflow_name="example_workflow",
|
|
108
|
+
initial_signals=["START"],
|
|
109
|
+
initial_context={"user": "alice"},
|
|
110
|
+
backends=backends,
|
|
111
|
+
broadcast_signals_caller=broadcast,
|
|
112
|
+
)
|
|
113
|
+
# When orchestrate() returns, the workflow is complete
|
|
114
|
+
```
|
|
115
|
+
|
|
116
|
+
**For product managers and less technical users**: The Quick Start above is all you need to run a workflow. The `config` parameter accepts YAML defining your workflow structure. The `initial_context` is where you pass input data (like user IDs, requests, etc.).
|
|
117
|
+
|
|
118
|
+
---
|
|
119
|
+
|
|
120
|
+
## Documentation
|
|
121
|
+
|
|
122
|
+
| Audience | Start Here |
|
|
123
|
+
|----------|------------|
|
|
124
|
+
| **Builders** (workflow authors) | [Documentation](docs/index.md) — Step-by-step chapters |
|
|
125
|
+
| **Engineers** (infrastructure) | [ARCHITECTURE.md](ai_docs/ARCHITECTURE.md) — Design philosophy |
|
|
126
|
+
| **Researchers** (advanced patterns) | [Advanced Patterns](docs/advanced_patterns/index.md) — Swarm, hybrid, self-evolving |
|
|
127
|
+
|
|
128
|
+
---
|
|
129
|
+
|
|
130
|
+
## Node Types
|
|
131
|
+
|
|
132
|
+
| Node | Purpose |
|
|
133
|
+
|------|---------|
|
|
134
|
+
| `router` | Conditional signal emission (no LLM) |
|
|
135
|
+
| `llm` | Single LLM call with output |
|
|
136
|
+
| `agent` | Multi-turn LLM with tool access |
|
|
137
|
+
| `tool` | Execute Python functions |
|
|
138
|
+
| `child` | Spawn sub-workflows |
|
|
139
|
+
|
|
140
|
+
---
|
|
141
|
+
|
|
142
|
+
## Backend Protocols
|
|
143
|
+
|
|
144
|
+
Implement these to plug SOE into your infrastructure:
|
|
145
|
+
|
|
146
|
+
| Protocol | Purpose |
|
|
147
|
+
|----------|---------|
|
|
148
|
+
| `ContextBackend` | Workflow state storage |
|
|
149
|
+
| `WorkflowBackend` | Workflow definitions |
|
|
150
|
+
| `ContextSchemaBackend` | Output validation (optional) |
|
|
151
|
+
| `IdentityBackend` | LLM system prompts (optional) |
|
|
152
|
+
| `ConversationHistoryBackend` | Agent memory (optional) |
|
|
153
|
+
| `TelemetryBackend` | Observability (optional) |
|
|
154
|
+
|
|
155
|
+
**Recommendation**: Use the same database for context, workflows, identities, and context_schema—just separate tables. The backend methods handle table creation.
|
|
156
|
+
|
|
157
|
+
See [Infrastructure Guide](docs/guide_10_infrastructure.md) for PostgreSQL, DynamoDB, and Lambda examples.
|
|
158
|
+
|
|
159
|
+
---
|
|
160
|
+
|
|
161
|
+
## License
|
|
162
|
+
|
|
163
|
+
MIT
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["setuptools>=61.0", "wheel"]
|
|
3
|
+
build-backend = "setuptools.build_meta"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "soe-ai"
|
|
7
|
+
version = "0.1.0"
|
|
8
|
+
description = "Signal-driven Orchestration Engine - Agent orchestration with event-driven workflow engine"
|
|
9
|
+
readme = "README.md"
|
|
10
|
+
requires-python = ">=3.8"
|
|
11
|
+
license = "MIT"
|
|
12
|
+
authors = [
|
|
13
|
+
{name = "Pedro Garcia", email = "pedro@example.com"}
|
|
14
|
+
]
|
|
15
|
+
keywords = ["orchestration", "agent", "workflow", "automation"]
|
|
16
|
+
classifiers = [
|
|
17
|
+
"Development Status :: 3 - Alpha",
|
|
18
|
+
"Intended Audience :: Developers",
|
|
19
|
+
"Programming Language :: Python :: 3",
|
|
20
|
+
"Programming Language :: Python :: 3.8",
|
|
21
|
+
"Programming Language :: Python :: 3.9",
|
|
22
|
+
"Programming Language :: Python :: 3.10",
|
|
23
|
+
"Programming Language :: Python :: 3.11",
|
|
24
|
+
"Programming Language :: Python :: 3.12",
|
|
25
|
+
]
|
|
26
|
+
|
|
27
|
+
dependencies = [
|
|
28
|
+
"pyyaml>=6.0",
|
|
29
|
+
"pydantic>=2.0.0",
|
|
30
|
+
"jinja2>=3.0.0",
|
|
31
|
+
]
|
|
32
|
+
|
|
33
|
+
[project.optional-dependencies]
|
|
34
|
+
dev = [
|
|
35
|
+
"pytest>=7.0",
|
|
36
|
+
"pytest-cov>=4.0",
|
|
37
|
+
"black>=23.0",
|
|
38
|
+
"mypy>=1.0",
|
|
39
|
+
"ruff>=0.1.0",
|
|
40
|
+
]
|
|
41
|
+
integration = [
|
|
42
|
+
"openai>=1.0.0",
|
|
43
|
+
"requests>=2.28.0",
|
|
44
|
+
]
|
|
45
|
+
|
|
46
|
+
[project.urls]
|
|
47
|
+
Homepage = "https://github.com/pgarcia14180/soe"
|
|
48
|
+
Documentation = "https://github.com/pgarcia14180/soe/tree/master/docs"
|
|
49
|
+
Repository = "https://github.com/pgarcia14180/soe"
|
|
50
|
+
Issues = "https://github.com/pgarcia14180/soe/issues"
|
|
51
|
+
|
|
52
|
+
[tool.setuptools]
|
|
53
|
+
packages = ["soe"]
|
|
54
|
+
|
|
55
|
+
[tool.setuptools.package-data]
|
|
56
|
+
soe = ["py.typed"]
|
|
57
|
+
|
|
58
|
+
[tool.black]
|
|
59
|
+
line-length = 88
|
|
60
|
+
target-version = ['py38', 'py39', 'py310', 'py311']
|
|
61
|
+
|
|
62
|
+
[tool.ruff]
|
|
63
|
+
line-length = 88
|
|
64
|
+
target-version = "py38"
|
|
65
|
+
|
|
66
|
+
[tool.mypy]
|
|
67
|
+
python_version = "3.8"
|
|
68
|
+
warn_return_any = true
|
|
69
|
+
warn_unused_configs = true
|
|
70
|
+
disallow_untyped_defs = false
|
|
71
|
+
|
|
72
|
+
[tool.uv.workspace]
|
|
73
|
+
members = [
|
|
74
|
+
"usage_demo",
|
|
75
|
+
".",
|
|
76
|
+
]
|
soe_ai-0.1.0/setup.cfg
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Orchestration Engine - MVP
|
|
3
|
+
Agent orchestration with event-driven workflow engine
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from .broker import orchestrate, broadcast_signals
|
|
7
|
+
from .nodes import (
|
|
8
|
+
AgentRequest,
|
|
9
|
+
AgentResponse,
|
|
10
|
+
ToolNodeConfigurationError,
|
|
11
|
+
ToolParameterError,
|
|
12
|
+
)
|
|
13
|
+
from .types import (
|
|
14
|
+
# Backend protocols - for building custom backends
|
|
15
|
+
Backends,
|
|
16
|
+
ContextBackend,
|
|
17
|
+
WorkflowBackend,
|
|
18
|
+
TelemetryBackend,
|
|
19
|
+
ConversationHistoryBackend,
|
|
20
|
+
ContextSchemaBackend,
|
|
21
|
+
IdentityBackend,
|
|
22
|
+
# LLM protocol - for integrating custom LLM providers
|
|
23
|
+
CallLlm,
|
|
24
|
+
)
|
|
25
|
+
from .init import create_all_nodes, setup_orchestration
|
|
26
|
+
|
|
27
|
+
__all__ = [
|
|
28
|
+
# Core functions
|
|
29
|
+
"orchestrate",
|
|
30
|
+
"broadcast_signals",
|
|
31
|
+
# Easy setup
|
|
32
|
+
"create_all_nodes",
|
|
33
|
+
"setup_orchestration",
|
|
34
|
+
# Agent types
|
|
35
|
+
"AgentRequest",
|
|
36
|
+
"AgentResponse",
|
|
37
|
+
# Tool errors
|
|
38
|
+
"ToolNodeConfigurationError",
|
|
39
|
+
"ToolParameterError",
|
|
40
|
+
# Backend protocols
|
|
41
|
+
"Backends",
|
|
42
|
+
"ContextBackend",
|
|
43
|
+
"WorkflowBackend",
|
|
44
|
+
"TelemetryBackend",
|
|
45
|
+
"ConversationHistoryBackend",
|
|
46
|
+
"ContextSchemaBackend",
|
|
47
|
+
"IdentityBackend",
|
|
48
|
+
# LLM protocol
|
|
49
|
+
"CallLlm",
|
|
50
|
+
]
|
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
from uuid import uuid4
|
|
2
|
+
from typing import Dict, List, Any, Union, Callable, Optional
|
|
3
|
+
from .types import Backends, BroadcastSignalsCaller
|
|
4
|
+
from .local_backends import EventTypes
|
|
5
|
+
from .lib.register_event import register_event
|
|
6
|
+
from .lib.yaml_parser import parse_yaml
|
|
7
|
+
from .lib.operational import add_operational_state
|
|
8
|
+
from .lib.context_fields import set_field
|
|
9
|
+
from .lib.parent_sync import get_signals_for_parent
|
|
10
|
+
from .lib.inheritance import (
|
|
11
|
+
inherit_config,
|
|
12
|
+
inherit_context,
|
|
13
|
+
extract_and_save_config_sections,
|
|
14
|
+
)
|
|
15
|
+
from .validation import validate_config, validate_operational, validate_orchestrate_params
|
|
16
|
+
from .types import WorkflowValidationError
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def orchestrate(
|
|
20
|
+
config: Optional[Union[str, Dict[str, Any]]],
|
|
21
|
+
initial_workflow_name: str,
|
|
22
|
+
initial_signals: List[str],
|
|
23
|
+
initial_context: Dict[str, Any],
|
|
24
|
+
backends: Backends,
|
|
25
|
+
broadcast_signals_caller: BroadcastSignalsCaller,
|
|
26
|
+
inherit_config_from_id: Optional[str] = None,
|
|
27
|
+
inherit_context_from_id: Optional[str] = None,
|
|
28
|
+
) -> str:
|
|
29
|
+
"""
|
|
30
|
+
Initialize orchestration with config and trigger initial signals.
|
|
31
|
+
|
|
32
|
+
Config can be either:
|
|
33
|
+
|
|
34
|
+
1. Workflows only (legacy format):
|
|
35
|
+
config = {
|
|
36
|
+
"workflow_name": {
|
|
37
|
+
"node_name": {...}
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
2. Combined config with workflows, context_schema, and identities:
|
|
42
|
+
config = {
|
|
43
|
+
"workflows": {...},
|
|
44
|
+
"context_schema": {...}, # optional
|
|
45
|
+
"identities": {...} # optional
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
3. Inherited config (config=None, inherit_config_from_id provided):
|
|
49
|
+
Inherits workflows, identities, and context_schema from an existing
|
|
50
|
+
execution. Useful for workflow chaining and continuation.
|
|
51
|
+
|
|
52
|
+
Inheritance options:
|
|
53
|
+
|
|
54
|
+
- inherit_config_from_id: Copy workflows, identities, and context_schema
|
|
55
|
+
from the specified execution ID. When provided, config is optional.
|
|
56
|
+
|
|
57
|
+
- inherit_context_from_id: Copy context from the specified execution ID,
|
|
58
|
+
but ALWAYS reset __operational__ state. Useful for continuing work
|
|
59
|
+
with existing context data.
|
|
60
|
+
|
|
61
|
+
When context_schema and identities are present (either from config or
|
|
62
|
+
inherited), they are automatically saved to their respective backends,
|
|
63
|
+
keyed by the main execution ID so children can access them.
|
|
64
|
+
"""
|
|
65
|
+
validate_orchestrate_params(initial_workflow_name, initial_signals)
|
|
66
|
+
|
|
67
|
+
if config is None and inherit_config_from_id is None:
|
|
68
|
+
raise WorkflowValidationError(
|
|
69
|
+
"Either 'config' or 'inherit_config_from_id' must be provided"
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
id = str(uuid4())
|
|
73
|
+
|
|
74
|
+
if inherit_config_from_id:
|
|
75
|
+
register_event(
|
|
76
|
+
backends, id, EventTypes.CONFIG_INHERITANCE_START,
|
|
77
|
+
{"source_execution_id": inherit_config_from_id}
|
|
78
|
+
)
|
|
79
|
+
parsed_registry = inherit_config(inherit_config_from_id, id, backends)
|
|
80
|
+
if config:
|
|
81
|
+
validate_config(config)
|
|
82
|
+
parsed_config = parse_yaml(config)
|
|
83
|
+
parsed_registry = extract_and_save_config_sections(parsed_config, id, backends)
|
|
84
|
+
else:
|
|
85
|
+
validate_config(config)
|
|
86
|
+
parsed_config = parse_yaml(config)
|
|
87
|
+
parsed_registry = extract_and_save_config_sections(parsed_config, id, backends)
|
|
88
|
+
|
|
89
|
+
register_event(
|
|
90
|
+
backends, id, EventTypes.ORCHESTRATION_START,
|
|
91
|
+
{"workflow_name": initial_workflow_name}
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
backends.workflow.save_workflows_registry(id, parsed_registry)
|
|
95
|
+
|
|
96
|
+
if initial_workflow_name not in parsed_registry:
|
|
97
|
+
available = list(parsed_registry.keys())
|
|
98
|
+
raise WorkflowValidationError(
|
|
99
|
+
f"Workflow '{initial_workflow_name}' not found in config. "
|
|
100
|
+
f"Available workflows: {available}"
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
backends.workflow.save_current_workflow_name(id, initial_workflow_name)
|
|
104
|
+
|
|
105
|
+
if inherit_context_from_id:
|
|
106
|
+
register_event(
|
|
107
|
+
backends, id, EventTypes.CONTEXT_INHERITANCE_START,
|
|
108
|
+
)
|
|
109
|
+
context = inherit_context(inherit_context_from_id, backends)
|
|
110
|
+
if initial_context:
|
|
111
|
+
register_event(
|
|
112
|
+
backends, id, EventTypes.CONTEXT_MERGE,
|
|
113
|
+
{"fields": list(initial_context.keys())}
|
|
114
|
+
)
|
|
115
|
+
for field, value in initial_context.items():
|
|
116
|
+
set_field(context, field, value)
|
|
117
|
+
else:
|
|
118
|
+
context = {
|
|
119
|
+
k: [v] if not k.startswith("__") else v
|
|
120
|
+
for k, v in initial_context.items()
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
context = add_operational_state(id, context)
|
|
124
|
+
backends.context.save_context(id, context)
|
|
125
|
+
|
|
126
|
+
broadcast_signals_caller(id, initial_signals)
|
|
127
|
+
|
|
128
|
+
return id
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def broadcast_signals(
|
|
132
|
+
id: str,
|
|
133
|
+
signals: List[str],
|
|
134
|
+
nodes: Dict[str, Callable[[str, Dict[str, Any]], None]],
|
|
135
|
+
backends: Backends,
|
|
136
|
+
) -> None:
|
|
137
|
+
"""Broadcast signals to matching nodes in the current workflow"""
|
|
138
|
+
context = validate_operational(id, backends)
|
|
139
|
+
|
|
140
|
+
register_event(backends, id, EventTypes.SIGNALS_BROADCAST, {"signals": signals})
|
|
141
|
+
|
|
142
|
+
workflows_registry = backends.workflow.soe_get_workflows_registry(id)
|
|
143
|
+
|
|
144
|
+
workflow_name = backends.workflow.get_current_workflow_name(id)
|
|
145
|
+
workflow = workflows_registry.get(workflow_name, {})
|
|
146
|
+
|
|
147
|
+
for node_name, node_config in workflow.items():
|
|
148
|
+
triggers = node_config.get("event_triggers", [])
|
|
149
|
+
if set(triggers) & set(signals):
|
|
150
|
+
node_type = node_config["node_type"]
|
|
151
|
+
node_executor = nodes[node_type]
|
|
152
|
+
|
|
153
|
+
register_event(
|
|
154
|
+
backends, id, EventTypes.NODE_EXECUTION,
|
|
155
|
+
{"node_name": node_name, "node_type": node_type}
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
node_config["name"] = node_name
|
|
159
|
+
node_executor(id, node_config)
|
|
160
|
+
|
|
161
|
+
context = backends.context.get_context(id)
|
|
162
|
+
parent_id, signals_to_sync = get_signals_for_parent(signals, context)
|
|
163
|
+
|
|
164
|
+
if parent_id and signals_to_sync:
|
|
165
|
+
register_event(
|
|
166
|
+
backends, id, EventTypes.SIGNALS_TO_PARENT,
|
|
167
|
+
{"signals": signals_to_sync, "parent_id": parent_id}
|
|
168
|
+
)
|
|
169
|
+
broadcast_signals(parent_id, signals_to_sync, nodes, backends)
|