soe-ai 0.2.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- soe/__init__.py +50 -0
- soe/broker.py +168 -0
- soe/builtin_tools/__init__.py +51 -0
- soe/builtin_tools/soe_add_signal.py +82 -0
- soe/builtin_tools/soe_call_tool.py +111 -0
- soe/builtin_tools/soe_copy_context.py +80 -0
- soe/builtin_tools/soe_explore_docs.py +290 -0
- soe/builtin_tools/soe_get_available_tools.py +42 -0
- soe/builtin_tools/soe_get_context.py +50 -0
- soe/builtin_tools/soe_get_context_schema.py +56 -0
- soe/builtin_tools/soe_get_identities.py +63 -0
- soe/builtin_tools/soe_get_workflows.py +63 -0
- soe/builtin_tools/soe_inject_context_schema_field.py +80 -0
- soe/builtin_tools/soe_inject_identity.py +64 -0
- soe/builtin_tools/soe_inject_node.py +86 -0
- soe/builtin_tools/soe_inject_workflow.py +105 -0
- soe/builtin_tools/soe_list_contexts.py +73 -0
- soe/builtin_tools/soe_remove_context_schema_field.py +61 -0
- soe/builtin_tools/soe_remove_identity.py +61 -0
- soe/builtin_tools/soe_remove_node.py +72 -0
- soe/builtin_tools/soe_remove_workflow.py +62 -0
- soe/builtin_tools/soe_update_context.py +54 -0
- soe/docs/_config.yml +10 -0
- soe/docs/advanced_patterns/guide_fanout_and_aggregations.md +318 -0
- soe/docs/advanced_patterns/guide_inheritance.md +435 -0
- soe/docs/advanced_patterns/hybrid_intelligence.md +237 -0
- soe/docs/advanced_patterns/index.md +49 -0
- soe/docs/advanced_patterns/operational.md +781 -0
- soe/docs/advanced_patterns/self_evolving_workflows.md +385 -0
- soe/docs/advanced_patterns/swarm_intelligence.md +211 -0
- soe/docs/builtins/context.md +164 -0
- soe/docs/builtins/context_schema.md +158 -0
- soe/docs/builtins/identity.md +139 -0
- soe/docs/builtins/soe_explore_docs.md +135 -0
- soe/docs/builtins/tools.md +164 -0
- soe/docs/builtins/workflows.md +199 -0
- soe/docs/guide_00_getting_started.md +341 -0
- soe/docs/guide_01_tool.md +206 -0
- soe/docs/guide_02_llm.md +143 -0
- soe/docs/guide_03_router.md +146 -0
- soe/docs/guide_04_patterns.md +475 -0
- soe/docs/guide_05_agent.md +159 -0
- soe/docs/guide_06_schema.md +397 -0
- soe/docs/guide_07_identity.md +540 -0
- soe/docs/guide_08_child.md +612 -0
- soe/docs/guide_09_ecosystem.md +690 -0
- soe/docs/guide_10_infrastructure.md +427 -0
- soe/docs/guide_11_builtins.md +126 -0
- soe/docs/index.md +104 -0
- soe/docs/primitives/backends.md +281 -0
- soe/docs/primitives/context.md +256 -0
- soe/docs/primitives/node_reference.md +259 -0
- soe/docs/primitives/primitives.md +331 -0
- soe/docs/primitives/signals.md +865 -0
- soe/docs_index.py +2 -0
- soe/init.py +165 -0
- soe/lib/__init__.py +0 -0
- soe/lib/child_context.py +46 -0
- soe/lib/context_fields.py +51 -0
- soe/lib/inheritance.py +172 -0
- soe/lib/jinja_render.py +113 -0
- soe/lib/operational.py +51 -0
- soe/lib/parent_sync.py +71 -0
- soe/lib/register_event.py +75 -0
- soe/lib/schema_validation.py +134 -0
- soe/lib/yaml_parser.py +14 -0
- soe/local_backends/__init__.py +18 -0
- soe/local_backends/factory.py +124 -0
- soe/local_backends/in_memory/context.py +38 -0
- soe/local_backends/in_memory/conversation_history.py +60 -0
- soe/local_backends/in_memory/identity.py +52 -0
- soe/local_backends/in_memory/schema.py +40 -0
- soe/local_backends/in_memory/telemetry.py +38 -0
- soe/local_backends/in_memory/workflow.py +33 -0
- soe/local_backends/storage/context.py +57 -0
- soe/local_backends/storage/conversation_history.py +82 -0
- soe/local_backends/storage/identity.py +118 -0
- soe/local_backends/storage/schema.py +96 -0
- soe/local_backends/storage/telemetry.py +72 -0
- soe/local_backends/storage/workflow.py +56 -0
- soe/nodes/__init__.py +13 -0
- soe/nodes/agent/__init__.py +10 -0
- soe/nodes/agent/factory.py +134 -0
- soe/nodes/agent/lib/loop_handlers.py +150 -0
- soe/nodes/agent/lib/loop_state.py +157 -0
- soe/nodes/agent/lib/prompts.py +65 -0
- soe/nodes/agent/lib/tools.py +35 -0
- soe/nodes/agent/stages/__init__.py +12 -0
- soe/nodes/agent/stages/parameter.py +37 -0
- soe/nodes/agent/stages/response.py +54 -0
- soe/nodes/agent/stages/router.py +37 -0
- soe/nodes/agent/state.py +111 -0
- soe/nodes/agent/types.py +66 -0
- soe/nodes/agent/validation/__init__.py +11 -0
- soe/nodes/agent/validation/config.py +95 -0
- soe/nodes/agent/validation/operational.py +24 -0
- soe/nodes/child/__init__.py +3 -0
- soe/nodes/child/factory.py +61 -0
- soe/nodes/child/state.py +59 -0
- soe/nodes/child/validation/__init__.py +11 -0
- soe/nodes/child/validation/config.py +126 -0
- soe/nodes/child/validation/operational.py +28 -0
- soe/nodes/lib/conditions.py +71 -0
- soe/nodes/lib/context.py +24 -0
- soe/nodes/lib/conversation_history.py +77 -0
- soe/nodes/lib/identity.py +64 -0
- soe/nodes/lib/llm_resolver.py +142 -0
- soe/nodes/lib/output.py +68 -0
- soe/nodes/lib/response_builder.py +91 -0
- soe/nodes/lib/signal_emission.py +79 -0
- soe/nodes/lib/signals.py +54 -0
- soe/nodes/lib/tools.py +100 -0
- soe/nodes/llm/__init__.py +7 -0
- soe/nodes/llm/factory.py +103 -0
- soe/nodes/llm/state.py +76 -0
- soe/nodes/llm/types.py +12 -0
- soe/nodes/llm/validation/__init__.py +11 -0
- soe/nodes/llm/validation/config.py +89 -0
- soe/nodes/llm/validation/operational.py +23 -0
- soe/nodes/router/__init__.py +3 -0
- soe/nodes/router/factory.py +37 -0
- soe/nodes/router/state.py +32 -0
- soe/nodes/router/validation/__init__.py +11 -0
- soe/nodes/router/validation/config.py +58 -0
- soe/nodes/router/validation/operational.py +16 -0
- soe/nodes/tool/factory.py +66 -0
- soe/nodes/tool/lib/__init__.py +11 -0
- soe/nodes/tool/lib/conditions.py +35 -0
- soe/nodes/tool/lib/failure.py +28 -0
- soe/nodes/tool/lib/parameters.py +67 -0
- soe/nodes/tool/state.py +66 -0
- soe/nodes/tool/types.py +27 -0
- soe/nodes/tool/validation/__init__.py +15 -0
- soe/nodes/tool/validation/config.py +132 -0
- soe/nodes/tool/validation/operational.py +16 -0
- soe/types.py +209 -0
- soe/validation/__init__.py +18 -0
- soe/validation/config.py +195 -0
- soe/validation/jinja.py +54 -0
- soe/validation/operational.py +110 -0
- soe_ai-0.2.0b1.dist-info/METADATA +262 -0
- soe_ai-0.2.0b1.dist-info/RECORD +145 -0
- soe_ai-0.2.0b1.dist-info/WHEEL +5 -0
- soe_ai-0.2.0b1.dist-info/licenses/LICENSE +21 -0
- soe_ai-0.2.0b1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Unified event registration for telemetry and operational state.
|
|
3
|
+
|
|
4
|
+
Records events to telemetry backend and updates operational context state
|
|
5
|
+
(signals, nodes, llm_calls, tool_calls, errors).
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from datetime import datetime
|
|
9
|
+
from typing import Dict, Any, Optional
|
|
10
|
+
from ..types import Backends, EventTypes
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def register_event(
|
|
14
|
+
backends: Backends,
|
|
15
|
+
execution_id: str,
|
|
16
|
+
event_type: str,
|
|
17
|
+
data: Optional[Dict[str, Any]] = None
|
|
18
|
+
) -> None:
|
|
19
|
+
"""
|
|
20
|
+
Log telemetry and update operational state based on event type.
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
backends: Backend services
|
|
24
|
+
execution_id: The execution ID
|
|
25
|
+
event_type: Event type from EventTypes enum
|
|
26
|
+
data: Event-specific data
|
|
27
|
+
"""
|
|
28
|
+
data = data or {}
|
|
29
|
+
|
|
30
|
+
# Log to telemetry if available
|
|
31
|
+
if backends.telemetry is not None:
|
|
32
|
+
backends.telemetry.log_event(
|
|
33
|
+
execution_id,
|
|
34
|
+
event_type,
|
|
35
|
+
timestamp=datetime.utcnow().isoformat() + "Z",
|
|
36
|
+
context=data
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
# Update operational state based on event type
|
|
40
|
+
|
|
41
|
+
if event_type == EventTypes.SIGNALS_BROADCAST:
|
|
42
|
+
context = backends.context.get_context(execution_id)
|
|
43
|
+
operational = context["__operational__"]
|
|
44
|
+
signals = data.get("signals", [])
|
|
45
|
+
operational["signals"].extend(signals)
|
|
46
|
+
backends.context.save_context(execution_id, context)
|
|
47
|
+
|
|
48
|
+
elif event_type == EventTypes.NODE_EXECUTION:
|
|
49
|
+
node_name = data.get("node_name")
|
|
50
|
+
if node_name:
|
|
51
|
+
context = backends.context.get_context(execution_id)
|
|
52
|
+
operational = context["__operational__"]
|
|
53
|
+
nodes = operational["nodes"]
|
|
54
|
+
if node_name not in nodes:
|
|
55
|
+
nodes[node_name] = 0
|
|
56
|
+
nodes[node_name] += 1
|
|
57
|
+
backends.context.save_context(execution_id, context)
|
|
58
|
+
|
|
59
|
+
elif event_type == EventTypes.LLM_CALL:
|
|
60
|
+
context = backends.context.get_context(execution_id)
|
|
61
|
+
operational = context["__operational__"]
|
|
62
|
+
operational["llm_calls"] += 1
|
|
63
|
+
backends.context.save_context(execution_id, context)
|
|
64
|
+
|
|
65
|
+
elif event_type == EventTypes.NODE_ERROR:
|
|
66
|
+
context = backends.context.get_context(execution_id)
|
|
67
|
+
operational = context["__operational__"]
|
|
68
|
+
operational["errors"] += 1
|
|
69
|
+
backends.context.save_context(execution_id, context)
|
|
70
|
+
|
|
71
|
+
elif event_type in (EventTypes.TOOL_CALL, EventTypes.AGENT_TOOL_CALL):
|
|
72
|
+
context = backends.context.get_context(execution_id)
|
|
73
|
+
operational = context["__operational__"]
|
|
74
|
+
operational["tool_calls"] += 1
|
|
75
|
+
backends.context.save_context(execution_id, context)
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Schema validation utilities for Pydantic model generation.
|
|
3
|
+
|
|
4
|
+
Converts schema definitions to Pydantic models for runtime validation.
|
|
5
|
+
Logic extracted from backends to centralize in lib.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from typing import Dict, Any, Optional, Type, List
|
|
9
|
+
from pydantic import BaseModel, create_model, Field, RootModel
|
|
10
|
+
import json
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
# Type mapping from schema types to Python types
|
|
14
|
+
TYPE_MAPPING = {
|
|
15
|
+
"string": str,
|
|
16
|
+
"text": str,
|
|
17
|
+
"str": str,
|
|
18
|
+
"integer": int,
|
|
19
|
+
"int": int,
|
|
20
|
+
"number": float,
|
|
21
|
+
"float": float,
|
|
22
|
+
"boolean": bool,
|
|
23
|
+
"bool": bool,
|
|
24
|
+
"list": list,
|
|
25
|
+
"array": list,
|
|
26
|
+
"dict": dict,
|
|
27
|
+
"object": dict,
|
|
28
|
+
"any": Any,
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def _schema_field_to_type(field_def: Any, model_name: str) -> Any:
|
|
33
|
+
"""Resolve a schema field definition to a Python type (supports nested properties/items)."""
|
|
34
|
+
if isinstance(field_def, str):
|
|
35
|
+
field_def = {"type": field_def}
|
|
36
|
+
|
|
37
|
+
if not isinstance(field_def, dict):
|
|
38
|
+
return Any
|
|
39
|
+
|
|
40
|
+
type_name = field_def.get("type", "any").lower()
|
|
41
|
+
|
|
42
|
+
if type_name in ("object", "dict"):
|
|
43
|
+
properties = field_def.get("properties")
|
|
44
|
+
if isinstance(properties, dict) and properties:
|
|
45
|
+
nested_model = schema_to_pydantic(properties, model_name=f"{model_name}Nested")
|
|
46
|
+
return nested_model
|
|
47
|
+
return dict
|
|
48
|
+
|
|
49
|
+
if type_name in ("list", "array"):
|
|
50
|
+
items = field_def.get("items")
|
|
51
|
+
if items:
|
|
52
|
+
if isinstance(items, str):
|
|
53
|
+
items = {"type": items}
|
|
54
|
+
item_type = _schema_field_to_type(items, model_name=f"{model_name}Item")
|
|
55
|
+
return List[item_type]
|
|
56
|
+
return list
|
|
57
|
+
|
|
58
|
+
return TYPE_MAPPING.get(type_name, Any)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def schema_to_root_model(
|
|
62
|
+
field_def: Dict[str, Any],
|
|
63
|
+
model_name: str = "RootModel"
|
|
64
|
+
) -> Type[BaseModel]:
|
|
65
|
+
"""Convert a single field schema into a RootModel for flat output validation."""
|
|
66
|
+
if isinstance(field_def, str):
|
|
67
|
+
field_def = {"type": field_def}
|
|
68
|
+
|
|
69
|
+
root_type = _schema_field_to_type(field_def, model_name=model_name)
|
|
70
|
+
|
|
71
|
+
class DynamicRoot(RootModel[root_type]):
|
|
72
|
+
pass
|
|
73
|
+
|
|
74
|
+
DynamicRoot.__name__ = model_name
|
|
75
|
+
return DynamicRoot
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def schema_to_pydantic(
|
|
79
|
+
schema: Dict[str, Any],
|
|
80
|
+
model_name: str = "DynamicModel"
|
|
81
|
+
) -> Type[BaseModel]:
|
|
82
|
+
"""
|
|
83
|
+
Convert a schema dict to a Pydantic model.
|
|
84
|
+
|
|
85
|
+
Schema format:
|
|
86
|
+
{
|
|
87
|
+
"field_name": {
|
|
88
|
+
"type": "string",
|
|
89
|
+
"description": "Field description", # optional
|
|
90
|
+
"required": true, # optional, default true
|
|
91
|
+
"default": null # optional
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
Args:
|
|
96
|
+
schema: Dict mapping field names to field definitions
|
|
97
|
+
model_name: Name for the generated Pydantic model
|
|
98
|
+
|
|
99
|
+
Returns:
|
|
100
|
+
Dynamically created Pydantic model class
|
|
101
|
+
"""
|
|
102
|
+
fields = {}
|
|
103
|
+
|
|
104
|
+
for field_name, field_def in schema.items():
|
|
105
|
+
# Handle simple type definition (just the type name as string)
|
|
106
|
+
if isinstance(field_def, str):
|
|
107
|
+
field_def = {"type": field_def}
|
|
108
|
+
|
|
109
|
+
# Get Python type (supports nested properties/items)
|
|
110
|
+
python_type = _schema_field_to_type(field_def, model_name=f"{model_name}{field_name.title()}")
|
|
111
|
+
|
|
112
|
+
# Get field metadata
|
|
113
|
+
description = field_def.get("description", "")
|
|
114
|
+
required = field_def.get("required", True)
|
|
115
|
+
default = field_def.get("default", None)
|
|
116
|
+
|
|
117
|
+
# Build field tuple
|
|
118
|
+
if required and default is None:
|
|
119
|
+
# Required field with no default
|
|
120
|
+
if description:
|
|
121
|
+
field_info = (python_type, Field(..., description=description))
|
|
122
|
+
else:
|
|
123
|
+
field_info = (python_type, ...)
|
|
124
|
+
else:
|
|
125
|
+
# Optional field or has default
|
|
126
|
+
if description:
|
|
127
|
+
field_info = (Optional[python_type], Field(default=default, description=description))
|
|
128
|
+
else:
|
|
129
|
+
field_info = (Optional[python_type], default)
|
|
130
|
+
|
|
131
|
+
fields[field_name] = field_info
|
|
132
|
+
|
|
133
|
+
# Allow extra fields to be preserved (important for nested partial schemas)
|
|
134
|
+
return create_model(model_name, **fields, __config__={"extra": "allow"})
|
soe/lib/yaml_parser.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
"""YAML parsing utilities."""
|
|
2
|
+
|
|
3
|
+
import yaml
|
|
4
|
+
from typing import Dict, Any, Union
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def parse_yaml(data: Union[str, Dict[str, Any]]) -> Dict[str, Any]:
|
|
8
|
+
"""Parse YAML string to dict, or return dict as-is."""
|
|
9
|
+
if isinstance(data, str):
|
|
10
|
+
try:
|
|
11
|
+
return yaml.safe_load(data)
|
|
12
|
+
except yaml.YAMLError as e:
|
|
13
|
+
raise ValueError(f"Invalid YAML configuration: {e}")
|
|
14
|
+
return data
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Local file-based backends for orchestration testing
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from .storage.context import LocalContextBackend
|
|
6
|
+
from .storage.workflow import LocalWorkflowBackend
|
|
7
|
+
from .storage.telemetry import LocalTelemetryBackend
|
|
8
|
+
from .storage.conversation_history import LocalConversationHistoryBackend
|
|
9
|
+
from .factory import create_local_backends, create_in_memory_backends
|
|
10
|
+
|
|
11
|
+
__all__ = [
|
|
12
|
+
"LocalContextBackend",
|
|
13
|
+
"LocalWorkflowBackend",
|
|
14
|
+
"LocalTelemetryBackend",
|
|
15
|
+
"LocalConversationHistoryBackend",
|
|
16
|
+
"create_local_backends",
|
|
17
|
+
"create_in_memory_backends",
|
|
18
|
+
]
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Factory for creating local backends
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from typing import Optional, Any
|
|
6
|
+
from .storage.context import LocalContextBackend
|
|
7
|
+
from .storage.workflow import LocalWorkflowBackend
|
|
8
|
+
from .storage.telemetry import LocalTelemetryBackend
|
|
9
|
+
from .storage.conversation_history import LocalConversationHistoryBackend
|
|
10
|
+
from .storage.schema import LocalContextSchemaBackend
|
|
11
|
+
from .storage.identity import LocalIdentityBackend
|
|
12
|
+
from .in_memory.context import InMemoryContextBackend
|
|
13
|
+
from .in_memory.workflow import InMemoryWorkflowBackend
|
|
14
|
+
from .in_memory.telemetry import InMemoryTelemetryBackend
|
|
15
|
+
from .in_memory.conversation_history import InMemoryConversationHistoryBackend
|
|
16
|
+
from .in_memory.schema import InMemoryContextSchemaBackend
|
|
17
|
+
from .in_memory.identity import InMemoryIdentityBackend
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class LocalBackends:
|
|
21
|
+
"""Container for local backends"""
|
|
22
|
+
|
|
23
|
+
def __init__(
|
|
24
|
+
self,
|
|
25
|
+
context_backend: Any,
|
|
26
|
+
workflow_backend: Any,
|
|
27
|
+
telemetry_backend: Optional[Any] = None,
|
|
28
|
+
conversation_history_backend: Optional[Any] = None,
|
|
29
|
+
context_schema_backend: Optional[Any] = None,
|
|
30
|
+
identity_backend: Optional[Any] = None,
|
|
31
|
+
):
|
|
32
|
+
self.context = context_backend
|
|
33
|
+
self.workflow = workflow_backend
|
|
34
|
+
self.telemetry = telemetry_backend
|
|
35
|
+
self.conversation_history = conversation_history_backend
|
|
36
|
+
self.context_schema = context_schema_backend
|
|
37
|
+
self.identity = identity_backend
|
|
38
|
+
|
|
39
|
+
def cleanup_all(self) -> None:
|
|
40
|
+
"""
|
|
41
|
+
Cleanup all backend data
|
|
42
|
+
|
|
43
|
+
This method is useful for test cleanup to remove all stored data
|
|
44
|
+
"""
|
|
45
|
+
self.context.cleanup_all()
|
|
46
|
+
self.workflow.cleanup_all()
|
|
47
|
+
if self.telemetry:
|
|
48
|
+
self.telemetry.cleanup_all()
|
|
49
|
+
if self.conversation_history:
|
|
50
|
+
self.conversation_history.cleanup_all()
|
|
51
|
+
if self.context_schema:
|
|
52
|
+
self.context_schema.cleanup_all()
|
|
53
|
+
if self.identity:
|
|
54
|
+
self.identity.cleanup_all()
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def create_local_backends(
|
|
58
|
+
context_storage_dir: str = "./orchestration_data/contexts",
|
|
59
|
+
workflow_storage_dir: str = "./orchestration_data/workflows",
|
|
60
|
+
telemetry_storage_dir: Optional[str] = None,
|
|
61
|
+
conversation_history_storage_dir: Optional[str] = "./orchestration_data/conversations",
|
|
62
|
+
context_schema_storage_dir: Optional[str] = "./orchestration_data/schemas",
|
|
63
|
+
identity_storage_dir: Optional[str] = "./orchestration_data/identities",
|
|
64
|
+
) -> LocalBackends:
|
|
65
|
+
"""
|
|
66
|
+
Create local file-based backends for testing and development
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
context_storage_dir: Directory for context storage
|
|
70
|
+
workflow_storage_dir: Directory for workflow storage
|
|
71
|
+
telemetry_storage_dir: Directory for telemetry storage (optional)
|
|
72
|
+
conversation_history_storage_dir: Directory for conversation history storage (optional)
|
|
73
|
+
context_schema_storage_dir: Directory for context schema storage (optional)
|
|
74
|
+
identity_storage_dir: Directory for identity storage (optional)
|
|
75
|
+
|
|
76
|
+
Returns:
|
|
77
|
+
LocalBackends instance with context, workflow, and optional backends
|
|
78
|
+
"""
|
|
79
|
+
context_backend = LocalContextBackend(context_storage_dir)
|
|
80
|
+
workflow_backend = LocalWorkflowBackend(workflow_storage_dir)
|
|
81
|
+
|
|
82
|
+
telemetry_backend = None
|
|
83
|
+
if telemetry_storage_dir:
|
|
84
|
+
telemetry_backend = LocalTelemetryBackend(telemetry_storage_dir)
|
|
85
|
+
|
|
86
|
+
conversation_history_backend = None
|
|
87
|
+
if conversation_history_storage_dir:
|
|
88
|
+
conversation_history_backend = LocalConversationHistoryBackend(
|
|
89
|
+
conversation_history_storage_dir
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
context_schema_backend = None
|
|
93
|
+
if context_schema_storage_dir:
|
|
94
|
+
context_schema_backend = LocalContextSchemaBackend(context_schema_storage_dir)
|
|
95
|
+
|
|
96
|
+
identity_backend = None
|
|
97
|
+
if identity_storage_dir:
|
|
98
|
+
identity_backend = LocalIdentityBackend(identity_storage_dir)
|
|
99
|
+
|
|
100
|
+
return LocalBackends(
|
|
101
|
+
context_backend,
|
|
102
|
+
workflow_backend,
|
|
103
|
+
telemetry_backend,
|
|
104
|
+
conversation_history_backend,
|
|
105
|
+
context_schema_backend,
|
|
106
|
+
identity_backend,
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def create_in_memory_backends() -> LocalBackends:
|
|
111
|
+
"""
|
|
112
|
+
Create in-memory backends for testing
|
|
113
|
+
|
|
114
|
+
Returns:
|
|
115
|
+
LocalBackends instance with in-memory backends
|
|
116
|
+
"""
|
|
117
|
+
return LocalBackends(
|
|
118
|
+
context_backend=InMemoryContextBackend(),
|
|
119
|
+
workflow_backend=InMemoryWorkflowBackend(),
|
|
120
|
+
telemetry_backend=InMemoryTelemetryBackend(),
|
|
121
|
+
conversation_history_backend=InMemoryConversationHistoryBackend(),
|
|
122
|
+
context_schema_backend=InMemoryContextSchemaBackend(),
|
|
123
|
+
identity_backend=InMemoryIdentityBackend(),
|
|
124
|
+
)
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
"""
|
|
2
|
+
In-memory context backend
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from typing import Dict, Any
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class InMemoryContextBackend:
|
|
9
|
+
"""In-memory context storage backend"""
|
|
10
|
+
|
|
11
|
+
def __init__(self):
|
|
12
|
+
self._storage: Dict[str, Dict[str, Any]] = {}
|
|
13
|
+
|
|
14
|
+
def get_context(self, execution_id: str) -> Dict[str, Any]:
|
|
15
|
+
"""
|
|
16
|
+
Get context for execution ID
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
execution_id: Unique execution identifier
|
|
20
|
+
|
|
21
|
+
Returns:
|
|
22
|
+
Context dictionary, empty if not found
|
|
23
|
+
"""
|
|
24
|
+
return self._storage.get(execution_id, {})
|
|
25
|
+
|
|
26
|
+
def save_context(self, execution_id: str, context: Dict[str, Any]) -> None:
|
|
27
|
+
"""
|
|
28
|
+
Save context for execution ID
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
execution_id: Unique execution identifier
|
|
32
|
+
context: Context dictionary to save
|
|
33
|
+
"""
|
|
34
|
+
self._storage[execution_id] = context.copy()
|
|
35
|
+
|
|
36
|
+
def cleanup_all(self) -> None:
|
|
37
|
+
"""Cleanup all data"""
|
|
38
|
+
self._storage.clear()
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
"""
|
|
2
|
+
In-memory conversation history backend
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from typing import Dict, Any, List
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class InMemoryConversationHistoryBackend:
|
|
9
|
+
"""In-memory conversation history backend"""
|
|
10
|
+
|
|
11
|
+
def __init__(self):
|
|
12
|
+
self._history: Dict[str, List[Dict[str, Any]]] = {}
|
|
13
|
+
|
|
14
|
+
def get_conversation_history(self, identity: str) -> List[Dict[str, Any]]:
|
|
15
|
+
"""
|
|
16
|
+
Get conversation history for identity
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
identity: User/Agent identity
|
|
20
|
+
|
|
21
|
+
Returns:
|
|
22
|
+
List of conversation messages
|
|
23
|
+
"""
|
|
24
|
+
return self._history.get(identity, [])
|
|
25
|
+
|
|
26
|
+
def append_to_conversation_history(self, identity: str, entry: Dict[str, Any]) -> None:
|
|
27
|
+
"""
|
|
28
|
+
Append entry to conversation history
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
identity: User/Agent identity
|
|
32
|
+
entry: Message entry to append
|
|
33
|
+
"""
|
|
34
|
+
if identity not in self._history:
|
|
35
|
+
self._history[identity] = []
|
|
36
|
+
self._history[identity].append(entry)
|
|
37
|
+
|
|
38
|
+
def save_conversation_history(self, identity: str, history: List[Dict[str, Any]]) -> None:
|
|
39
|
+
"""
|
|
40
|
+
Save full conversation history
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
identity: User/Agent identity
|
|
44
|
+
history: Full history list
|
|
45
|
+
"""
|
|
46
|
+
self._history[identity] = history
|
|
47
|
+
|
|
48
|
+
def delete_conversation_history(self, identity: str) -> None:
|
|
49
|
+
"""
|
|
50
|
+
Delete conversation history for identity
|
|
51
|
+
|
|
52
|
+
Args:
|
|
53
|
+
identity: User/Agent identity
|
|
54
|
+
"""
|
|
55
|
+
if identity in self._history:
|
|
56
|
+
del self._history[identity]
|
|
57
|
+
|
|
58
|
+
def cleanup_all(self) -> None:
|
|
59
|
+
"""Cleanup all data"""
|
|
60
|
+
self._history.clear()
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
"""In-memory identity backend - dumb storage only.
|
|
2
|
+
|
|
3
|
+
Keyed by execution_id (main_execution_id) so children can access parent's identities.
|
|
4
|
+
|
|
5
|
+
Identity format is simple: identity_name -> system_prompt (string)
|
|
6
|
+
Example:
|
|
7
|
+
assistant: "You are a helpful assistant."
|
|
8
|
+
coding_expert: "You are an expert programmer."
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from typing import Dict, Optional
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class InMemoryIdentityBackend:
|
|
15
|
+
"""In-memory identity backend.
|
|
16
|
+
|
|
17
|
+
Stores identity definitions per execution. Identities define the
|
|
18
|
+
participating personas/roles and are used as initial system prompts.
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
def __init__(self):
|
|
22
|
+
self._identities: Dict[str, Dict[str, str]] = {}
|
|
23
|
+
|
|
24
|
+
def save_identities(self, execution_id: str, identities: Dict[str, str]) -> None:
|
|
25
|
+
"""Save identity definitions for execution."""
|
|
26
|
+
self._identities[execution_id] = identities
|
|
27
|
+
|
|
28
|
+
def get_identities(self, execution_id: str) -> Optional[Dict[str, str]]:
|
|
29
|
+
"""Get identity definitions for execution."""
|
|
30
|
+
return self._identities.get(execution_id)
|
|
31
|
+
|
|
32
|
+
def get_identity(self, execution_id: str, identity_name: str) -> Optional[str]:
|
|
33
|
+
"""Get a specific identity's system prompt."""
|
|
34
|
+
identities = self.get_identities(execution_id)
|
|
35
|
+
if identities and identity_name in identities:
|
|
36
|
+
return identities[identity_name]
|
|
37
|
+
return None
|
|
38
|
+
|
|
39
|
+
def delete_identities(self, execution_id: str) -> bool:
|
|
40
|
+
"""Delete identity definitions for execution.
|
|
41
|
+
|
|
42
|
+
Returns:
|
|
43
|
+
True if deleted, False if not found
|
|
44
|
+
"""
|
|
45
|
+
if execution_id in self._identities:
|
|
46
|
+
del self._identities[execution_id]
|
|
47
|
+
return True
|
|
48
|
+
return False
|
|
49
|
+
|
|
50
|
+
def cleanup_all(self) -> None:
|
|
51
|
+
"""Cleanup all data."""
|
|
52
|
+
self._identities.clear()
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
"""In-memory context schema backend - dumb storage only.
|
|
2
|
+
|
|
3
|
+
Keyed by execution_id (main_execution_id) so children can access parent's schemas.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from typing import Dict, Any, Optional
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class InMemoryContextSchemaBackend:
|
|
10
|
+
"""In-memory context schema backend."""
|
|
11
|
+
|
|
12
|
+
def __init__(self):
|
|
13
|
+
self._schemas: Dict[str, Dict[str, Any]] = {}
|
|
14
|
+
|
|
15
|
+
def save_context_schema(self, execution_id: str, schema: Dict[str, Any]) -> None:
|
|
16
|
+
"""Save context schema for execution."""
|
|
17
|
+
self._schemas[execution_id] = schema
|
|
18
|
+
|
|
19
|
+
def get_context_schema(self, execution_id: str) -> Optional[Dict[str, Any]]:
|
|
20
|
+
"""Get context schema for execution."""
|
|
21
|
+
return self._schemas.get(execution_id)
|
|
22
|
+
|
|
23
|
+
def delete_context_schema(self, execution_id: str) -> bool:
|
|
24
|
+
"""
|
|
25
|
+
Delete context schema for execution.
|
|
26
|
+
|
|
27
|
+
Args:
|
|
28
|
+
execution_id: Execution ID (typically main_execution_id)
|
|
29
|
+
|
|
30
|
+
Returns:
|
|
31
|
+
True if deleted, False if not found
|
|
32
|
+
"""
|
|
33
|
+
if execution_id in self._schemas:
|
|
34
|
+
del self._schemas[execution_id]
|
|
35
|
+
return True
|
|
36
|
+
return False
|
|
37
|
+
|
|
38
|
+
def cleanup_all(self) -> None:
|
|
39
|
+
"""Cleanup all data"""
|
|
40
|
+
self._schemas.clear()
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
"""
|
|
2
|
+
In-memory telemetry backend - dumb storage only.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from typing import Dict, Any, List
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class InMemoryTelemetryBackend:
|
|
9
|
+
"""In-memory telemetry backend"""
|
|
10
|
+
|
|
11
|
+
def __init__(self):
|
|
12
|
+
self._events: Dict[str, List[Dict[str, Any]]] = {}
|
|
13
|
+
|
|
14
|
+
def log_event(self, execution_id: str, event_type: str, **event_data) -> None:
|
|
15
|
+
"""
|
|
16
|
+
Log telemetry event
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
execution_id: Execution ID
|
|
20
|
+
event_type: Type of event
|
|
21
|
+
**event_data: Additional event data
|
|
22
|
+
"""
|
|
23
|
+
if execution_id not in self._events:
|
|
24
|
+
self._events[execution_id] = []
|
|
25
|
+
|
|
26
|
+
event = {
|
|
27
|
+
"event_type": event_type,
|
|
28
|
+
**event_data
|
|
29
|
+
}
|
|
30
|
+
self._events[execution_id].append(event)
|
|
31
|
+
|
|
32
|
+
def get_events(self, execution_id: str) -> List[Dict[str, Any]]:
|
|
33
|
+
"""Get all events for an execution ID"""
|
|
34
|
+
return self._events.get(execution_id, [])
|
|
35
|
+
|
|
36
|
+
def cleanup_all(self) -> None:
|
|
37
|
+
"""Cleanup all data"""
|
|
38
|
+
self._events.clear()
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
"""In-memory workflow backend - dumb storage only."""
|
|
2
|
+
|
|
3
|
+
from typing import Dict, Any
|
|
4
|
+
import copy
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class InMemoryWorkflowBackend:
|
|
8
|
+
"""In-memory workflow storage backend."""
|
|
9
|
+
|
|
10
|
+
def __init__(self):
|
|
11
|
+
self._registries: Dict[str, Any] = {}
|
|
12
|
+
self._current_workflows: Dict[str, str] = {}
|
|
13
|
+
|
|
14
|
+
def save_workflows_registry(self, id: str, workflows: Dict[str, Any]) -> None:
|
|
15
|
+
"""Save workflows registry for execution ID."""
|
|
16
|
+
self._registries[id] = copy.deepcopy(workflows)
|
|
17
|
+
|
|
18
|
+
def get_workflows_registry(self, id: str) -> Any:
|
|
19
|
+
"""Get workflows registry for execution ID."""
|
|
20
|
+
return copy.deepcopy(self._registries.get(id))
|
|
21
|
+
|
|
22
|
+
def save_current_workflow_name(self, id: str, name: str) -> None:
|
|
23
|
+
"""Save current workflow name for execution ID."""
|
|
24
|
+
self._current_workflows[id] = name
|
|
25
|
+
|
|
26
|
+
def get_current_workflow_name(self, id: str) -> str:
|
|
27
|
+
"""Get current workflow name for execution ID."""
|
|
28
|
+
return self._current_workflows.get(id, "")
|
|
29
|
+
|
|
30
|
+
def cleanup_all(self) -> None:
|
|
31
|
+
"""Cleanup all data."""
|
|
32
|
+
self._registries.clear()
|
|
33
|
+
self._current_workflows.clear()
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Local file-based context backend
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Dict, Any
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class LocalContextBackend:
|
|
11
|
+
"""File-based context storage backend"""
|
|
12
|
+
|
|
13
|
+
def __init__(self, storage_dir: str = "./orchestration_data/contexts"):
|
|
14
|
+
"""
|
|
15
|
+
Initialize local context backend
|
|
16
|
+
|
|
17
|
+
Args:
|
|
18
|
+
storage_dir: Directory to store context files
|
|
19
|
+
"""
|
|
20
|
+
self.storage_dir = Path(storage_dir)
|
|
21
|
+
self.storage_dir.mkdir(parents=True, exist_ok=True)
|
|
22
|
+
|
|
23
|
+
def get_context(self, execution_id: str) -> Dict[str, Any]:
|
|
24
|
+
"""
|
|
25
|
+
Get context for execution ID
|
|
26
|
+
|
|
27
|
+
Args:
|
|
28
|
+
execution_id: Unique execution identifier
|
|
29
|
+
|
|
30
|
+
Returns:
|
|
31
|
+
Context dictionary, empty if not found
|
|
32
|
+
"""
|
|
33
|
+
context_file = self.storage_dir / f"{execution_id}.json"
|
|
34
|
+
|
|
35
|
+
if not context_file.exists():
|
|
36
|
+
return {}
|
|
37
|
+
|
|
38
|
+
with open(context_file, "r") as f:
|
|
39
|
+
return json.load(f)
|
|
40
|
+
|
|
41
|
+
def save_context(self, execution_id: str, context: Dict[str, Any]) -> None:
|
|
42
|
+
"""
|
|
43
|
+
Save context for execution ID
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
execution_id: Unique execution identifier
|
|
47
|
+
context: Context dictionary to save
|
|
48
|
+
"""
|
|
49
|
+
context_file = self.storage_dir / f"{execution_id}.json"
|
|
50
|
+
|
|
51
|
+
with open(context_file, "w") as f:
|
|
52
|
+
json.dump(context, f, indent=2, default=str)
|
|
53
|
+
|
|
54
|
+
def cleanup_all(self) -> None:
|
|
55
|
+
"""Delete all context files. Used for test cleanup."""
|
|
56
|
+
for context_file in self.storage_dir.glob("*.json"):
|
|
57
|
+
context_file.unlink()
|