soe-ai 0.1.1__py3-none-any.whl → 0.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (134) hide show
  1. soe/builtin_tools/__init__.py +39 -0
  2. soe/builtin_tools/soe_add_signal.py +82 -0
  3. soe/builtin_tools/soe_call_tool.py +111 -0
  4. soe/builtin_tools/soe_copy_context.py +80 -0
  5. soe/builtin_tools/soe_explore_docs.py +290 -0
  6. soe/builtin_tools/soe_get_available_tools.py +42 -0
  7. soe/builtin_tools/soe_get_context.py +50 -0
  8. soe/builtin_tools/soe_get_workflows.py +63 -0
  9. soe/builtin_tools/soe_inject_node.py +86 -0
  10. soe/builtin_tools/soe_inject_workflow.py +105 -0
  11. soe/builtin_tools/soe_list_contexts.py +73 -0
  12. soe/builtin_tools/soe_remove_node.py +72 -0
  13. soe/builtin_tools/soe_remove_workflow.py +62 -0
  14. soe/builtin_tools/soe_update_context.py +54 -0
  15. soe/docs/_config.yml +10 -0
  16. soe/docs/advanced_patterns/guide_fanout_and_aggregations.md +318 -0
  17. soe/docs/advanced_patterns/guide_inheritance.md +435 -0
  18. soe/docs/advanced_patterns/hybrid_intelligence.md +237 -0
  19. soe/docs/advanced_patterns/index.md +49 -0
  20. soe/docs/advanced_patterns/operational.md +781 -0
  21. soe/docs/advanced_patterns/self_evolving_workflows.md +385 -0
  22. soe/docs/advanced_patterns/swarm_intelligence.md +211 -0
  23. soe/docs/builtins/context.md +164 -0
  24. soe/docs/builtins/explore_docs.md +135 -0
  25. soe/docs/builtins/tools.md +164 -0
  26. soe/docs/builtins/workflows.md +199 -0
  27. soe/docs/guide_00_getting_started.md +341 -0
  28. soe/docs/guide_01_tool.md +206 -0
  29. soe/docs/guide_02_llm.md +143 -0
  30. soe/docs/guide_03_router.md +146 -0
  31. soe/docs/guide_04_patterns.md +475 -0
  32. soe/docs/guide_05_agent.md +159 -0
  33. soe/docs/guide_06_schema.md +397 -0
  34. soe/docs/guide_07_identity.md +540 -0
  35. soe/docs/guide_08_child.md +612 -0
  36. soe/docs/guide_09_ecosystem.md +690 -0
  37. soe/docs/guide_10_infrastructure.md +427 -0
  38. soe/docs/guide_11_builtins.md +118 -0
  39. soe/docs/index.md +104 -0
  40. soe/docs/primitives/backends.md +281 -0
  41. soe/docs/primitives/context.md +256 -0
  42. soe/docs/primitives/node_reference.md +259 -0
  43. soe/docs/primitives/primitives.md +331 -0
  44. soe/docs/primitives/signals.md +865 -0
  45. soe/docs_index.py +1 -1
  46. soe/lib/__init__.py +0 -0
  47. soe/lib/child_context.py +46 -0
  48. soe/lib/context_fields.py +51 -0
  49. soe/lib/inheritance.py +172 -0
  50. soe/lib/jinja_render.py +113 -0
  51. soe/lib/operational.py +51 -0
  52. soe/lib/parent_sync.py +71 -0
  53. soe/lib/register_event.py +75 -0
  54. soe/lib/schema_validation.py +134 -0
  55. soe/lib/yaml_parser.py +14 -0
  56. soe/local_backends/__init__.py +18 -0
  57. soe/local_backends/factory.py +124 -0
  58. soe/local_backends/in_memory/context.py +38 -0
  59. soe/local_backends/in_memory/conversation_history.py +60 -0
  60. soe/local_backends/in_memory/identity.py +52 -0
  61. soe/local_backends/in_memory/schema.py +40 -0
  62. soe/local_backends/in_memory/telemetry.py +38 -0
  63. soe/local_backends/in_memory/workflow.py +33 -0
  64. soe/local_backends/storage/context.py +57 -0
  65. soe/local_backends/storage/conversation_history.py +82 -0
  66. soe/local_backends/storage/identity.py +118 -0
  67. soe/local_backends/storage/schema.py +96 -0
  68. soe/local_backends/storage/telemetry.py +72 -0
  69. soe/local_backends/storage/workflow.py +56 -0
  70. soe/nodes/__init__.py +13 -0
  71. soe/nodes/agent/__init__.py +10 -0
  72. soe/nodes/agent/factory.py +134 -0
  73. soe/nodes/agent/lib/loop_handlers.py +150 -0
  74. soe/nodes/agent/lib/loop_state.py +157 -0
  75. soe/nodes/agent/lib/prompts.py +65 -0
  76. soe/nodes/agent/lib/tools.py +35 -0
  77. soe/nodes/agent/stages/__init__.py +12 -0
  78. soe/nodes/agent/stages/parameter.py +37 -0
  79. soe/nodes/agent/stages/response.py +54 -0
  80. soe/nodes/agent/stages/router.py +37 -0
  81. soe/nodes/agent/state.py +111 -0
  82. soe/nodes/agent/types.py +66 -0
  83. soe/nodes/agent/validation/__init__.py +11 -0
  84. soe/nodes/agent/validation/config.py +95 -0
  85. soe/nodes/agent/validation/operational.py +24 -0
  86. soe/nodes/child/__init__.py +3 -0
  87. soe/nodes/child/factory.py +61 -0
  88. soe/nodes/child/state.py +59 -0
  89. soe/nodes/child/validation/__init__.py +11 -0
  90. soe/nodes/child/validation/config.py +126 -0
  91. soe/nodes/child/validation/operational.py +28 -0
  92. soe/nodes/lib/conditions.py +71 -0
  93. soe/nodes/lib/context.py +24 -0
  94. soe/nodes/lib/conversation_history.py +77 -0
  95. soe/nodes/lib/identity.py +64 -0
  96. soe/nodes/lib/llm_resolver.py +142 -0
  97. soe/nodes/lib/output.py +68 -0
  98. soe/nodes/lib/response_builder.py +91 -0
  99. soe/nodes/lib/signal_emission.py +79 -0
  100. soe/nodes/lib/signals.py +54 -0
  101. soe/nodes/lib/tools.py +100 -0
  102. soe/nodes/llm/__init__.py +7 -0
  103. soe/nodes/llm/factory.py +103 -0
  104. soe/nodes/llm/state.py +76 -0
  105. soe/nodes/llm/types.py +12 -0
  106. soe/nodes/llm/validation/__init__.py +11 -0
  107. soe/nodes/llm/validation/config.py +89 -0
  108. soe/nodes/llm/validation/operational.py +23 -0
  109. soe/nodes/router/__init__.py +3 -0
  110. soe/nodes/router/factory.py +37 -0
  111. soe/nodes/router/state.py +32 -0
  112. soe/nodes/router/validation/__init__.py +11 -0
  113. soe/nodes/router/validation/config.py +58 -0
  114. soe/nodes/router/validation/operational.py +16 -0
  115. soe/nodes/tool/factory.py +66 -0
  116. soe/nodes/tool/lib/__init__.py +11 -0
  117. soe/nodes/tool/lib/conditions.py +35 -0
  118. soe/nodes/tool/lib/failure.py +28 -0
  119. soe/nodes/tool/lib/parameters.py +67 -0
  120. soe/nodes/tool/state.py +66 -0
  121. soe/nodes/tool/types.py +27 -0
  122. soe/nodes/tool/validation/__init__.py +15 -0
  123. soe/nodes/tool/validation/config.py +132 -0
  124. soe/nodes/tool/validation/operational.py +16 -0
  125. soe/validation/__init__.py +18 -0
  126. soe/validation/config.py +195 -0
  127. soe/validation/jinja.py +54 -0
  128. soe/validation/operational.py +110 -0
  129. {soe_ai-0.1.1.dist-info → soe_ai-0.1.2.dist-info}/METADATA +4 -4
  130. soe_ai-0.1.2.dist-info/RECORD +137 -0
  131. {soe_ai-0.1.1.dist-info → soe_ai-0.1.2.dist-info}/WHEEL +1 -1
  132. soe_ai-0.1.1.dist-info/RECORD +0 -10
  133. {soe_ai-0.1.1.dist-info → soe_ai-0.1.2.dist-info}/licenses/LICENSE +0 -0
  134. {soe_ai-0.1.1.dist-info → soe_ai-0.1.2.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,134 @@
1
+ """
2
+ Schema validation utilities for Pydantic model generation.
3
+
4
+ Converts schema definitions to Pydantic models for runtime validation.
5
+ Logic extracted from backends to centralize in lib.
6
+ """
7
+
8
+ from typing import Dict, Any, Optional, Type, List
9
+ from pydantic import BaseModel, create_model, Field, RootModel
10
+ import json
11
+
12
+
13
+ # Type mapping from schema types to Python types
14
+ TYPE_MAPPING = {
15
+ "string": str,
16
+ "text": str,
17
+ "str": str,
18
+ "integer": int,
19
+ "int": int,
20
+ "number": float,
21
+ "float": float,
22
+ "boolean": bool,
23
+ "bool": bool,
24
+ "list": list,
25
+ "array": list,
26
+ "dict": dict,
27
+ "object": dict,
28
+ "any": Any,
29
+ }
30
+
31
+
32
+ def _schema_field_to_type(field_def: Any, model_name: str) -> Any:
33
+ """Resolve a schema field definition to a Python type (supports nested properties/items)."""
34
+ if isinstance(field_def, str):
35
+ field_def = {"type": field_def}
36
+
37
+ if not isinstance(field_def, dict):
38
+ return Any
39
+
40
+ type_name = field_def.get("type", "any").lower()
41
+
42
+ if type_name in ("object", "dict"):
43
+ properties = field_def.get("properties")
44
+ if isinstance(properties, dict) and properties:
45
+ nested_model = schema_to_pydantic(properties, model_name=f"{model_name}Nested")
46
+ return nested_model
47
+ return dict
48
+
49
+ if type_name in ("list", "array"):
50
+ items = field_def.get("items")
51
+ if items:
52
+ if isinstance(items, str):
53
+ items = {"type": items}
54
+ item_type = _schema_field_to_type(items, model_name=f"{model_name}Item")
55
+ return List[item_type]
56
+ return list
57
+
58
+ return TYPE_MAPPING.get(type_name, Any)
59
+
60
+
61
+ def schema_to_root_model(
62
+ field_def: Dict[str, Any],
63
+ model_name: str = "RootModel"
64
+ ) -> Type[BaseModel]:
65
+ """Convert a single field schema into a RootModel for flat output validation."""
66
+ if isinstance(field_def, str):
67
+ field_def = {"type": field_def}
68
+
69
+ root_type = _schema_field_to_type(field_def, model_name=model_name)
70
+
71
+ class DynamicRoot(RootModel[root_type]):
72
+ pass
73
+
74
+ DynamicRoot.__name__ = model_name
75
+ return DynamicRoot
76
+
77
+
78
+ def schema_to_pydantic(
79
+ schema: Dict[str, Any],
80
+ model_name: str = "DynamicModel"
81
+ ) -> Type[BaseModel]:
82
+ """
83
+ Convert a schema dict to a Pydantic model.
84
+
85
+ Schema format:
86
+ {
87
+ "field_name": {
88
+ "type": "string",
89
+ "description": "Field description", # optional
90
+ "required": true, # optional, default true
91
+ "default": null # optional
92
+ }
93
+ }
94
+
95
+ Args:
96
+ schema: Dict mapping field names to field definitions
97
+ model_name: Name for the generated Pydantic model
98
+
99
+ Returns:
100
+ Dynamically created Pydantic model class
101
+ """
102
+ fields = {}
103
+
104
+ for field_name, field_def in schema.items():
105
+ # Handle simple type definition (just the type name as string)
106
+ if isinstance(field_def, str):
107
+ field_def = {"type": field_def}
108
+
109
+ # Get Python type (supports nested properties/items)
110
+ python_type = _schema_field_to_type(field_def, model_name=f"{model_name}{field_name.title()}")
111
+
112
+ # Get field metadata
113
+ description = field_def.get("description", "")
114
+ required = field_def.get("required", True)
115
+ default = field_def.get("default", None)
116
+
117
+ # Build field tuple
118
+ if required and default is None:
119
+ # Required field with no default
120
+ if description:
121
+ field_info = (python_type, Field(..., description=description))
122
+ else:
123
+ field_info = (python_type, ...)
124
+ else:
125
+ # Optional field or has default
126
+ if description:
127
+ field_info = (Optional[python_type], Field(default=default, description=description))
128
+ else:
129
+ field_info = (Optional[python_type], default)
130
+
131
+ fields[field_name] = field_info
132
+
133
+ # Allow extra fields to be preserved (important for nested partial schemas)
134
+ return create_model(model_name, **fields, __config__={"extra": "allow"})
soe/lib/yaml_parser.py ADDED
@@ -0,0 +1,14 @@
1
+ """YAML parsing utilities."""
2
+
3
+ import yaml
4
+ from typing import Dict, Any, Union
5
+
6
+
7
+ def parse_yaml(data: Union[str, Dict[str, Any]]) -> Dict[str, Any]:
8
+ """Parse YAML string to dict, or return dict as-is."""
9
+ if isinstance(data, str):
10
+ try:
11
+ return yaml.safe_load(data)
12
+ except yaml.YAMLError as e:
13
+ raise ValueError(f"Invalid YAML configuration: {e}")
14
+ return data
@@ -0,0 +1,18 @@
1
+ """
2
+ Local file-based backends for orchestration testing
3
+ """
4
+
5
+ from .storage.context import LocalContextBackend
6
+ from .storage.workflow import LocalWorkflowBackend
7
+ from .storage.telemetry import LocalTelemetryBackend
8
+ from .storage.conversation_history import LocalConversationHistoryBackend
9
+ from .factory import create_local_backends, create_in_memory_backends
10
+
11
+ __all__ = [
12
+ "LocalContextBackend",
13
+ "LocalWorkflowBackend",
14
+ "LocalTelemetryBackend",
15
+ "LocalConversationHistoryBackend",
16
+ "create_local_backends",
17
+ "create_in_memory_backends",
18
+ ]
@@ -0,0 +1,124 @@
1
+ """
2
+ Factory for creating local backends
3
+ """
4
+
5
+ from typing import Optional, Any
6
+ from .storage.context import LocalContextBackend
7
+ from .storage.workflow import LocalWorkflowBackend
8
+ from .storage.telemetry import LocalTelemetryBackend
9
+ from .storage.conversation_history import LocalConversationHistoryBackend
10
+ from .storage.schema import LocalContextSchemaBackend
11
+ from .storage.identity import LocalIdentityBackend
12
+ from .in_memory.context import InMemoryContextBackend
13
+ from .in_memory.workflow import InMemoryWorkflowBackend
14
+ from .in_memory.telemetry import InMemoryTelemetryBackend
15
+ from .in_memory.conversation_history import InMemoryConversationHistoryBackend
16
+ from .in_memory.schema import InMemoryContextSchemaBackend
17
+ from .in_memory.identity import InMemoryIdentityBackend
18
+
19
+
20
+ class LocalBackends:
21
+ """Container for local backends"""
22
+
23
+ def __init__(
24
+ self,
25
+ context_backend: Any,
26
+ workflow_backend: Any,
27
+ telemetry_backend: Optional[Any] = None,
28
+ conversation_history_backend: Optional[Any] = None,
29
+ context_schema_backend: Optional[Any] = None,
30
+ identity_backend: Optional[Any] = None,
31
+ ):
32
+ self.context = context_backend
33
+ self.workflow = workflow_backend
34
+ self.telemetry = telemetry_backend
35
+ self.conversation_history = conversation_history_backend
36
+ self.context_schema = context_schema_backend
37
+ self.identity = identity_backend
38
+
39
+ def cleanup_all(self) -> None:
40
+ """
41
+ Cleanup all backend data
42
+
43
+ This method is useful for test cleanup to remove all stored data
44
+ """
45
+ self.context.cleanup_all()
46
+ self.workflow.cleanup_all()
47
+ if self.telemetry:
48
+ self.telemetry.cleanup_all()
49
+ if self.conversation_history:
50
+ self.conversation_history.cleanup_all()
51
+ if self.context_schema:
52
+ self.context_schema.cleanup_all()
53
+ if self.identity:
54
+ self.identity.cleanup_all()
55
+
56
+
57
+ def create_local_backends(
58
+ context_storage_dir: str = "./orchestration_data/contexts",
59
+ workflow_storage_dir: str = "./orchestration_data/workflows",
60
+ telemetry_storage_dir: Optional[str] = None,
61
+ conversation_history_storage_dir: Optional[str] = "./orchestration_data/conversations",
62
+ context_schema_storage_dir: Optional[str] = "./orchestration_data/schemas",
63
+ identity_storage_dir: Optional[str] = "./orchestration_data/identities",
64
+ ) -> LocalBackends:
65
+ """
66
+ Create local file-based backends for testing and development
67
+
68
+ Args:
69
+ context_storage_dir: Directory for context storage
70
+ workflow_storage_dir: Directory for workflow storage
71
+ telemetry_storage_dir: Directory for telemetry storage (optional)
72
+ conversation_history_storage_dir: Directory for conversation history storage (optional)
73
+ context_schema_storage_dir: Directory for context schema storage (optional)
74
+ identity_storage_dir: Directory for identity storage (optional)
75
+
76
+ Returns:
77
+ LocalBackends instance with context, workflow, and optional backends
78
+ """
79
+ context_backend = LocalContextBackend(context_storage_dir)
80
+ workflow_backend = LocalWorkflowBackend(workflow_storage_dir)
81
+
82
+ telemetry_backend = None
83
+ if telemetry_storage_dir:
84
+ telemetry_backend = LocalTelemetryBackend(telemetry_storage_dir)
85
+
86
+ conversation_history_backend = None
87
+ if conversation_history_storage_dir:
88
+ conversation_history_backend = LocalConversationHistoryBackend(
89
+ conversation_history_storage_dir
90
+ )
91
+
92
+ context_schema_backend = None
93
+ if context_schema_storage_dir:
94
+ context_schema_backend = LocalContextSchemaBackend(context_schema_storage_dir)
95
+
96
+ identity_backend = None
97
+ if identity_storage_dir:
98
+ identity_backend = LocalIdentityBackend(identity_storage_dir)
99
+
100
+ return LocalBackends(
101
+ context_backend,
102
+ workflow_backend,
103
+ telemetry_backend,
104
+ conversation_history_backend,
105
+ context_schema_backend,
106
+ identity_backend,
107
+ )
108
+
109
+
110
+ def create_in_memory_backends() -> LocalBackends:
111
+ """
112
+ Create in-memory backends for testing
113
+
114
+ Returns:
115
+ LocalBackends instance with in-memory backends
116
+ """
117
+ return LocalBackends(
118
+ context_backend=InMemoryContextBackend(),
119
+ workflow_backend=InMemoryWorkflowBackend(),
120
+ telemetry_backend=InMemoryTelemetryBackend(),
121
+ conversation_history_backend=InMemoryConversationHistoryBackend(),
122
+ context_schema_backend=InMemoryContextSchemaBackend(),
123
+ identity_backend=InMemoryIdentityBackend(),
124
+ )
@@ -0,0 +1,38 @@
1
+ """
2
+ In-memory context backend
3
+ """
4
+
5
+ from typing import Dict, Any
6
+
7
+
8
+ class InMemoryContextBackend:
9
+ """In-memory context storage backend"""
10
+
11
+ def __init__(self):
12
+ self._storage: Dict[str, Dict[str, Any]] = {}
13
+
14
+ def get_context(self, execution_id: str) -> Dict[str, Any]:
15
+ """
16
+ Get context for execution ID
17
+
18
+ Args:
19
+ execution_id: Unique execution identifier
20
+
21
+ Returns:
22
+ Context dictionary, empty if not found
23
+ """
24
+ return self._storage.get(execution_id, {})
25
+
26
+ def save_context(self, execution_id: str, context: Dict[str, Any]) -> None:
27
+ """
28
+ Save context for execution ID
29
+
30
+ Args:
31
+ execution_id: Unique execution identifier
32
+ context: Context dictionary to save
33
+ """
34
+ self._storage[execution_id] = context.copy()
35
+
36
+ def cleanup_all(self) -> None:
37
+ """Cleanup all data"""
38
+ self._storage.clear()
@@ -0,0 +1,60 @@
1
+ """
2
+ In-memory conversation history backend
3
+ """
4
+
5
+ from typing import Dict, Any, List
6
+
7
+
8
+ class InMemoryConversationHistoryBackend:
9
+ """In-memory conversation history backend"""
10
+
11
+ def __init__(self):
12
+ self._history: Dict[str, List[Dict[str, Any]]] = {}
13
+
14
+ def get_conversation_history(self, identity: str) -> List[Dict[str, Any]]:
15
+ """
16
+ Get conversation history for identity
17
+
18
+ Args:
19
+ identity: User/Agent identity
20
+
21
+ Returns:
22
+ List of conversation messages
23
+ """
24
+ return self._history.get(identity, [])
25
+
26
+ def append_to_conversation_history(self, identity: str, entry: Dict[str, Any]) -> None:
27
+ """
28
+ Append entry to conversation history
29
+
30
+ Args:
31
+ identity: User/Agent identity
32
+ entry: Message entry to append
33
+ """
34
+ if identity not in self._history:
35
+ self._history[identity] = []
36
+ self._history[identity].append(entry)
37
+
38
+ def save_conversation_history(self, identity: str, history: List[Dict[str, Any]]) -> None:
39
+ """
40
+ Save full conversation history
41
+
42
+ Args:
43
+ identity: User/Agent identity
44
+ history: Full history list
45
+ """
46
+ self._history[identity] = history
47
+
48
+ def delete_conversation_history(self, identity: str) -> None:
49
+ """
50
+ Delete conversation history for identity
51
+
52
+ Args:
53
+ identity: User/Agent identity
54
+ """
55
+ if identity in self._history:
56
+ del self._history[identity]
57
+
58
+ def cleanup_all(self) -> None:
59
+ """Cleanup all data"""
60
+ self._history.clear()
@@ -0,0 +1,52 @@
1
+ """In-memory identity backend - dumb storage only.
2
+
3
+ Keyed by execution_id (main_execution_id) so children can access parent's identities.
4
+
5
+ Identity format is simple: identity_name -> system_prompt (string)
6
+ Example:
7
+ assistant: "You are a helpful assistant."
8
+ coding_expert: "You are an expert programmer."
9
+ """
10
+
11
+ from typing import Dict, Optional
12
+
13
+
14
+ class InMemoryIdentityBackend:
15
+ """In-memory identity backend.
16
+
17
+ Stores identity definitions per execution. Identities define the
18
+ participating personas/roles and are used as initial system prompts.
19
+ """
20
+
21
+ def __init__(self):
22
+ self._identities: Dict[str, Dict[str, str]] = {}
23
+
24
+ def save_identities(self, execution_id: str, identities: Dict[str, str]) -> None:
25
+ """Save identity definitions for execution."""
26
+ self._identities[execution_id] = identities
27
+
28
+ def get_identities(self, execution_id: str) -> Optional[Dict[str, str]]:
29
+ """Get identity definitions for execution."""
30
+ return self._identities.get(execution_id)
31
+
32
+ def get_identity(self, execution_id: str, identity_name: str) -> Optional[str]:
33
+ """Get a specific identity's system prompt."""
34
+ identities = self.get_identities(execution_id)
35
+ if identities and identity_name in identities:
36
+ return identities[identity_name]
37
+ return None
38
+
39
+ def delete_identities(self, execution_id: str) -> bool:
40
+ """Delete identity definitions for execution.
41
+
42
+ Returns:
43
+ True if deleted, False if not found
44
+ """
45
+ if execution_id in self._identities:
46
+ del self._identities[execution_id]
47
+ return True
48
+ return False
49
+
50
+ def cleanup_all(self) -> None:
51
+ """Cleanup all data."""
52
+ self._identities.clear()
@@ -0,0 +1,40 @@
1
+ """In-memory context schema backend - dumb storage only.
2
+
3
+ Keyed by execution_id (main_execution_id) so children can access parent's schemas.
4
+ """
5
+
6
+ from typing import Dict, Any, Optional
7
+
8
+
9
+ class InMemoryContextSchemaBackend:
10
+ """In-memory context schema backend."""
11
+
12
+ def __init__(self):
13
+ self._schemas: Dict[str, Dict[str, Any]] = {}
14
+
15
+ def save_context_schema(self, execution_id: str, schema: Dict[str, Any]) -> None:
16
+ """Save context schema for execution."""
17
+ self._schemas[execution_id] = schema
18
+
19
+ def get_context_schema(self, execution_id: str) -> Optional[Dict[str, Any]]:
20
+ """Get context schema for execution."""
21
+ return self._schemas.get(execution_id)
22
+
23
+ def delete_context_schema(self, execution_id: str) -> bool:
24
+ """
25
+ Delete context schema for execution.
26
+
27
+ Args:
28
+ execution_id: Execution ID (typically main_execution_id)
29
+
30
+ Returns:
31
+ True if deleted, False if not found
32
+ """
33
+ if execution_id in self._schemas:
34
+ del self._schemas[execution_id]
35
+ return True
36
+ return False
37
+
38
+ def cleanup_all(self) -> None:
39
+ """Cleanup all data"""
40
+ self._schemas.clear()
@@ -0,0 +1,38 @@
1
+ """
2
+ In-memory telemetry backend - dumb storage only.
3
+ """
4
+
5
+ from typing import Dict, Any, List
6
+
7
+
8
+ class InMemoryTelemetryBackend:
9
+ """In-memory telemetry backend"""
10
+
11
+ def __init__(self):
12
+ self._events: Dict[str, List[Dict[str, Any]]] = {}
13
+
14
+ def log_event(self, execution_id: str, event_type: str, **event_data) -> None:
15
+ """
16
+ Log telemetry event
17
+
18
+ Args:
19
+ execution_id: Execution ID
20
+ event_type: Type of event
21
+ **event_data: Additional event data
22
+ """
23
+ if execution_id not in self._events:
24
+ self._events[execution_id] = []
25
+
26
+ event = {
27
+ "event_type": event_type,
28
+ **event_data
29
+ }
30
+ self._events[execution_id].append(event)
31
+
32
+ def get_events(self, execution_id: str) -> List[Dict[str, Any]]:
33
+ """Get all events for an execution ID"""
34
+ return self._events.get(execution_id, [])
35
+
36
+ def cleanup_all(self) -> None:
37
+ """Cleanup all data"""
38
+ self._events.clear()
@@ -0,0 +1,33 @@
1
+ """In-memory workflow backend - dumb storage only."""
2
+
3
+ from typing import Dict, Any
4
+ import copy
5
+
6
+
7
+ class InMemoryWorkflowBackend:
8
+ """In-memory workflow storage backend."""
9
+
10
+ def __init__(self):
11
+ self._registries: Dict[str, Any] = {}
12
+ self._current_workflows: Dict[str, str] = {}
13
+
14
+ def save_workflows_registry(self, id: str, workflows: Dict[str, Any]) -> None:
15
+ """Save workflows registry for execution ID."""
16
+ self._registries[id] = copy.deepcopy(workflows)
17
+
18
+ def get_workflows_registry(self, id: str) -> Any:
19
+ """Get workflows registry for execution ID."""
20
+ return copy.deepcopy(self._registries.get(id))
21
+
22
+ def save_current_workflow_name(self, id: str, name: str) -> None:
23
+ """Save current workflow name for execution ID."""
24
+ self._current_workflows[id] = name
25
+
26
+ def get_current_workflow_name(self, id: str) -> str:
27
+ """Get current workflow name for execution ID."""
28
+ return self._current_workflows.get(id, "")
29
+
30
+ def cleanup_all(self) -> None:
31
+ """Cleanup all data."""
32
+ self._registries.clear()
33
+ self._current_workflows.clear()
@@ -0,0 +1,57 @@
1
+ """
2
+ Local file-based context backend
3
+ """
4
+
5
+ import json
6
+ from pathlib import Path
7
+ from typing import Dict, Any
8
+
9
+
10
+ class LocalContextBackend:
11
+ """File-based context storage backend"""
12
+
13
+ def __init__(self, storage_dir: str = "./orchestration_data/contexts"):
14
+ """
15
+ Initialize local context backend
16
+
17
+ Args:
18
+ storage_dir: Directory to store context files
19
+ """
20
+ self.storage_dir = Path(storage_dir)
21
+ self.storage_dir.mkdir(parents=True, exist_ok=True)
22
+
23
+ def get_context(self, execution_id: str) -> Dict[str, Any]:
24
+ """
25
+ Get context for execution ID
26
+
27
+ Args:
28
+ execution_id: Unique execution identifier
29
+
30
+ Returns:
31
+ Context dictionary, empty if not found
32
+ """
33
+ context_file = self.storage_dir / f"{execution_id}.json"
34
+
35
+ if not context_file.exists():
36
+ return {}
37
+
38
+ with open(context_file, "r") as f:
39
+ return json.load(f)
40
+
41
+ def save_context(self, execution_id: str, context: Dict[str, Any]) -> None:
42
+ """
43
+ Save context for execution ID
44
+
45
+ Args:
46
+ execution_id: Unique execution identifier
47
+ context: Context dictionary to save
48
+ """
49
+ context_file = self.storage_dir / f"{execution_id}.json"
50
+
51
+ with open(context_file, "w") as f:
52
+ json.dump(context, f, indent=2, default=str)
53
+
54
+ def cleanup_all(self) -> None:
55
+ """Delete all context files. Used for test cleanup."""
56
+ for context_file in self.storage_dir.glob("*.json"):
57
+ context_file.unlink()