soe-ai 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
soe/init.py ADDED
@@ -0,0 +1,165 @@
1
+ """
2
+ Convenience initialization for SOE.
3
+
4
+ This module provides easy setup functions for common use cases.
5
+ Use these to quickly get started without manually wiring nodes and backends.
6
+ """
7
+
8
+ import copy
9
+ from typing import Callable, Dict, Any, Tuple, Optional, List, Union
10
+ from .broker import broadcast_signals, orchestrate
11
+ from .nodes.router.factory import create_router_node_caller
12
+ from .nodes.llm.factory import create_llm_node_caller
13
+ from .nodes.agent.factory import create_agent_node_caller
14
+ from .nodes.tool.factory import create_tool_node_caller
15
+ from .nodes.child.factory import create_child_node_caller
16
+ from .lib.yaml_parser import parse_yaml
17
+ from .types import CallLlm, Backends
18
+ from .local_backends import create_in_memory_backends, create_local_backends
19
+
20
+
21
+ def create_all_nodes(
22
+ backends: Backends,
23
+ call_llm: Optional[CallLlm] = None,
24
+ tools_registry: Optional[Dict[str, Callable]] = None,
25
+ ) -> Tuple[Dict[str, Callable], Callable]:
26
+ """
27
+ Create all node types with automatic wiring.
28
+
29
+ This is the recommended way to set up nodes for orchestration.
30
+ Returns both the nodes dictionary and the broadcast_signals_caller.
31
+
32
+ Args:
33
+ backends: Backend services (use create_in_memory_backends or create_local_backends)
34
+ call_llm: Optional LLM caller function for LLM/Agent nodes
35
+ tools_registry: Optional dict mapping tool name -> callable for Tool/Agent nodes
36
+
37
+ Returns:
38
+ Tuple of (nodes dict, broadcast_signals_caller function)
39
+
40
+ Example:
41
+ backends = create_in_memory_backends()
42
+ nodes, broadcast = create_all_nodes(backends, call_llm=my_llm, tools_registry=my_tools)
43
+
44
+ execution_id = orchestrate(
45
+ config=workflow_yaml,
46
+ initial_workflow_name="my_workflow",
47
+ initial_signals=["START"],
48
+ initial_context={"user_input": "Hello"},
49
+ backends=backends,
50
+ broadcast_signals_caller=broadcast,
51
+ )
52
+ """
53
+ nodes = {}
54
+
55
+ def broadcast_signals_caller(id: str, signals: List[str]):
56
+ broadcast_signals(id, signals, nodes, backends)
57
+
58
+ nodes["router"] = create_router_node_caller(backends, broadcast_signals_caller)
59
+
60
+ if call_llm is not None:
61
+ nodes["llm"] = create_llm_node_caller(backends, call_llm, broadcast_signals_caller)
62
+
63
+ tools_list = []
64
+ if tools_registry:
65
+ tools_list = [{"function": func, "max_retries": 0} for func in tools_registry.values()]
66
+ nodes["agent"] = create_agent_node_caller(backends, tools_list, call_llm, broadcast_signals_caller)
67
+
68
+ if tools_registry is not None:
69
+ nodes["tool"] = create_tool_node_caller(backends, tools_registry, broadcast_signals_caller)
70
+
71
+ def orchestrate_caller(
72
+ config: Union[str, Dict[str, Any]],
73
+ initial_workflow_name: str,
74
+ initial_signals: List[str],
75
+ initial_context: Dict[str, Any],
76
+ backends: Backends,
77
+ ) -> str:
78
+ """Start a child workflow execution."""
79
+ if isinstance(config, str):
80
+ parsed_config = parse_yaml(config)
81
+ else:
82
+ parsed_config = copy.deepcopy(config)
83
+
84
+ def child_broadcast(execution_id: str, signals: List[str]):
85
+ broadcast_signals(execution_id, signals, nodes, backends)
86
+
87
+ return orchestrate(
88
+ config=parsed_config,
89
+ initial_workflow_name=initial_workflow_name,
90
+ initial_signals=initial_signals,
91
+ initial_context=initial_context,
92
+ backends=backends,
93
+ broadcast_signals_caller=child_broadcast,
94
+ )
95
+
96
+ nodes["child"] = create_child_node_caller(backends, orchestrate_caller)
97
+
98
+ return nodes, broadcast_signals_caller
99
+
100
+
101
+ def setup_orchestration(
102
+ call_llm: Optional[CallLlm] = None,
103
+ tools_registry: Optional[Dict[str, Callable]] = None,
104
+ use_local_storage: bool = False,
105
+ storage_dir: str = "./orchestration_data",
106
+ ) -> Tuple[Backends, Callable]:
107
+ """
108
+ One-line setup for SOE orchestration.
109
+
110
+ Creates backends and nodes with automatic wiring.
111
+ Returns the backends and broadcast_signals_caller ready to use.
112
+
113
+ Args:
114
+ call_llm: Optional LLM caller function for LLM/Agent nodes
115
+ tools_registry: Optional dict mapping tool name -> callable
116
+ use_local_storage: If True, use file-based storage. If False, use in-memory.
117
+ storage_dir: Directory for local storage (only used if use_local_storage=True)
118
+
119
+ Returns:
120
+ Tuple of (backends, broadcast_signals_caller)
121
+
122
+ Example:
123
+ # Minimal setup (router-only workflows)
124
+ backends, broadcast = setup_orchestration()
125
+
126
+ # Full setup with LLM and tools
127
+ backends, broadcast = setup_orchestration(
128
+ call_llm=my_llm_function,
129
+ tools_registry={"search": search_fn, "calculate": calc_fn},
130
+ )
131
+
132
+ execution_id = orchestrate(
133
+ config=workflow_yaml,
134
+ initial_workflow_name="my_workflow",
135
+ initial_signals=["START"],
136
+ initial_context={},
137
+ backends=backends,
138
+ broadcast_signals_caller=broadcast,
139
+ )
140
+ """
141
+ if use_local_storage:
142
+ backends = create_local_backends(
143
+ context_storage_dir=f"{storage_dir}/contexts",
144
+ workflow_storage_dir=f"{storage_dir}/workflows",
145
+ telemetry_storage_dir=f"{storage_dir}/telemetry",
146
+ conversation_history_storage_dir=f"{storage_dir}/conversations",
147
+ context_schema_storage_dir=f"{storage_dir}/schemas",
148
+ identity_storage_dir=f"{storage_dir}/identities",
149
+ )
150
+ else:
151
+ backends = create_in_memory_backends()
152
+
153
+ _, broadcast_signals_caller = create_all_nodes(
154
+ backends=backends,
155
+ call_llm=call_llm,
156
+ tools_registry=tools_registry,
157
+ )
158
+
159
+ return backends, broadcast_signals_caller
160
+
161
+
162
+ __all__ = [
163
+ "create_all_nodes",
164
+ "setup_orchestration",
165
+ ]
soe/types.py ADDED
@@ -0,0 +1,197 @@
1
+ """
2
+ Types for orchestration system
3
+ """
4
+
5
+ from __future__ import annotations
6
+
7
+ from typing import Protocol, Optional, Any, Dict, List
8
+
9
+
10
+ class TelemetryBackend(Protocol):
11
+ """Protocol for telemetry backend"""
12
+
13
+ def log_event(self, execution_id: str, event_type: str, **event_data) -> None:
14
+ ...
15
+
16
+
17
+ class ContextBackend(Protocol):
18
+ """Protocol for context backend"""
19
+
20
+ def save_context(self, id: str, context: dict) -> None:
21
+ ...
22
+
23
+ def get_context(self, id: str) -> dict:
24
+ ...
25
+
26
+
27
+ class WorkflowBackend(Protocol):
28
+ """Protocol for workflow backend"""
29
+
30
+ def save_workflows_registry(self, id: str, workflows: Dict[str, Any]) -> None:
31
+ ...
32
+
33
+ def soe_get_workflows_registry(self, id: str) -> Any:
34
+ ...
35
+
36
+ def save_current_workflow_name(self, id: str, name: str) -> None:
37
+ ...
38
+
39
+ def get_current_workflow_name(self, id: str) -> str:
40
+ ...
41
+
42
+
43
+ class OrchestrateCaller(Protocol):
44
+ """Protocol for orchestrate caller function"""
45
+
46
+ def __call__(self, execution_id: str, signals: List[str]) -> None:
47
+ ...
48
+
49
+
50
+ class BroadcastSignalsCaller(Protocol):
51
+ """Protocol for broadcast signals caller function"""
52
+
53
+ def __call__(self, execution_id: str, signals: List[str]) -> None:
54
+ ...
55
+
56
+
57
+ class RouterNodeCaller(Protocol):
58
+ """Protocol for router node caller function"""
59
+
60
+ def __call__(self, execution_id: str, node_config: Dict[str, Any]) -> None:
61
+ ...
62
+
63
+
64
+ class AgentNodeCaller(Protocol):
65
+ """Protocol for agent node caller function"""
66
+
67
+ def __call__(self, execution_id: str, node_config: Dict[str, Any]) -> None:
68
+ ...
69
+
70
+
71
+ class ToolNodeCaller(Protocol):
72
+ """Protocol for tool node caller function"""
73
+
74
+ def __call__(self, execution_id: str, node_config: Dict[str, Any]) -> None:
75
+ ...
76
+
77
+
78
+ class ChildNodeCaller(Protocol):
79
+ """Protocol for child node caller function"""
80
+
81
+ def __call__(self, execution_id: str, node_config: Dict[str, Any]) -> None:
82
+ ...
83
+
84
+
85
+ class OrchestrateCaller(Protocol):
86
+ """Protocol for starting child workflows (wrapper around orchestrate).
87
+
88
+ Supports optional inheritance parameters:
89
+ - inherit_config_from_id: Inherit workflows, identities, schema from existing execution
90
+ - inherit_context_from_id: Inherit context from existing execution (resets operational)
91
+ """
92
+
93
+ def __call__(
94
+ self,
95
+ config: Any,
96
+ initial_workflow_name: str,
97
+ initial_signals: List[str],
98
+ initial_context: Dict[str, Any],
99
+ backends: "Backends",
100
+ inherit_config_from_id: Optional[str] = None,
101
+ inherit_context_from_id: Optional[str] = None,
102
+ ) -> str:
103
+ ...
104
+
105
+
106
+ class CallLlm(Protocol):
107
+ """Protocol for LLM caller function"""
108
+
109
+ def __call__(self, prompt: str, config: Dict[str, Any]) -> str:
110
+ ...
111
+
112
+
113
+ class ConversationHistoryBackend(Protocol):
114
+ """Protocol for conversation history backend"""
115
+
116
+ def get_conversation_history(self, identity: str) -> List[Dict[str, Any]]:
117
+ ...
118
+
119
+ def append_to_conversation_history(self, identity: str, entry: Dict[str, Any]) -> None:
120
+ ...
121
+
122
+ def save_conversation_history(self, identity: str, history: List[Dict[str, Any]]) -> None:
123
+ ...
124
+
125
+ def delete_conversation_history(self, identity: str) -> None:
126
+ ...
127
+
128
+
129
+ class ContextSchemaBackend(Protocol):
130
+ """Protocol for context schema backend - stores workflow context field schemas.
131
+
132
+ Context schemas define the structure and types of context fields used in workflows.
133
+ These are keyed by execution_id (main_execution_id) so children can access parent's schemas.
134
+ """
135
+
136
+ def save_context_schema(self, execution_id: str, schema: Dict[str, Any]) -> None:
137
+ """Save context schema for an execution."""
138
+ ...
139
+
140
+ def get_context_schema(self, execution_id: str) -> Optional[Dict[str, Any]]:
141
+ """Get context schema for an execution."""
142
+ ...
143
+
144
+ def delete_context_schema(self, execution_id: str) -> bool:
145
+ """Delete context schema for an execution."""
146
+ ...
147
+
148
+
149
+ class IdentityBackend(Protocol):
150
+ """Protocol for identity backend - stores workflow identity definitions.
151
+
152
+ Identities define the participating personas/roles in a workflow.
153
+ These are used as the initial system prompt for conversation history.
154
+ Keyed by execution_id (main_execution_id) so children can access parent's identities.
155
+
156
+ Identity format is simple: identity_name -> system_prompt (string)
157
+ Example:
158
+ assistant: "You are a helpful assistant."
159
+ coding_expert: "You are an expert programmer."
160
+ """
161
+
162
+ def save_identities(self, execution_id: str, identities: Dict[str, str]) -> None:
163
+ """Save identity definitions for an execution."""
164
+ ...
165
+
166
+ def get_identities(self, execution_id: str) -> Optional[Dict[str, str]]:
167
+ """Get all identity definitions for an execution."""
168
+ ...
169
+
170
+ def get_identity(self, execution_id: str, identity_name: str) -> Optional[str]:
171
+ """Get a specific identity's system prompt."""
172
+ ...
173
+
174
+ def delete_identities(self, execution_id: str) -> bool:
175
+ """Delete identity definitions for an execution."""
176
+ ...
177
+
178
+
179
+ class Backends(Protocol):
180
+ """Protocol for orchestration backends"""
181
+
182
+ context: ContextBackend
183
+ workflow: WorkflowBackend
184
+ telemetry: Optional[TelemetryBackend]
185
+ conversation_history: Optional[ConversationHistoryBackend]
186
+ context_schema: Optional[ContextSchemaBackend]
187
+ identity: Optional[IdentityBackend]
188
+
189
+
190
+ class SoeError(Exception):
191
+ """Base class for all SOE exceptions"""
192
+ pass
193
+
194
+
195
+ class WorkflowValidationError(SoeError):
196
+ """Raised when workflow or node configuration is invalid (static/startup)"""
197
+ pass
soe/validation.py ADDED
@@ -0,0 +1,8 @@
1
+ """
2
+ DEPRECATED: This module has been moved to soe/validation/ folder.
3
+ This file is kept temporarily but will be removed.
4
+
5
+ Import from soe.validation instead:
6
+ from soe.validation import validate_config, validate_operational
7
+ """
8
+ # This file should be deleted - Python will import from soe/validation/ folder instead
@@ -0,0 +1,199 @@
1
+ Metadata-Version: 2.4
2
+ Name: soe-ai
3
+ Version: 0.1.0
4
+ Summary: Signal-driven Orchestration Engine - Agent orchestration with event-driven workflow engine
5
+ Author-email: Pedro Garcia <pedro@example.com>
6
+ License-Expression: MIT
7
+ Project-URL: Homepage, https://github.com/pgarcia14180/soe
8
+ Project-URL: Documentation, https://github.com/pgarcia14180/soe/tree/master/docs
9
+ Project-URL: Repository, https://github.com/pgarcia14180/soe
10
+ Project-URL: Issues, https://github.com/pgarcia14180/soe/issues
11
+ Keywords: orchestration,agent,workflow,automation
12
+ Classifier: Development Status :: 3 - Alpha
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: Programming Language :: Python :: 3
15
+ Classifier: Programming Language :: Python :: 3.8
16
+ Classifier: Programming Language :: Python :: 3.9
17
+ Classifier: Programming Language :: Python :: 3.10
18
+ Classifier: Programming Language :: Python :: 3.11
19
+ Classifier: Programming Language :: Python :: 3.12
20
+ Requires-Python: >=3.8
21
+ Description-Content-Type: text/markdown
22
+ License-File: LICENSE
23
+ Requires-Dist: pyyaml>=6.0
24
+ Requires-Dist: pydantic>=2.0.0
25
+ Requires-Dist: jinja2>=3.0.0
26
+ Provides-Extra: dev
27
+ Requires-Dist: pytest>=7.0; extra == "dev"
28
+ Requires-Dist: pytest-cov>=4.0; extra == "dev"
29
+ Requires-Dist: black>=23.0; extra == "dev"
30
+ Requires-Dist: mypy>=1.0; extra == "dev"
31
+ Requires-Dist: ruff>=0.1.0; extra == "dev"
32
+ Provides-Extra: integration
33
+ Requires-Dist: openai>=1.0.0; extra == "integration"
34
+ Requires-Dist: requests>=2.28.0; extra == "integration"
35
+ Dynamic: license-file
36
+
37
+ # SOE — Signal-driven Orchestration Engine
38
+
39
+ **A protocol for orchestrating AI workflows through signals.**
40
+
41
+ ---
42
+
43
+ ## What SOE Is
44
+
45
+ SOE is an orchestration engine where nodes communicate through **signals** rather than direct function calls. You define workflows in YAML, and the engine handles execution.
46
+
47
+ | Approach | How It Works | Trade-off |
48
+ |----------|--------------|-----------|
49
+ | Chain-based | `Step A → B → C → D` | Simple but rigid |
50
+ | SOE Signal-based | `[SIGNAL] → all listeners respond` | Flexible, requires understanding signals |
51
+
52
+ **The C++ analogy**: Like C++ gives you control over memory and execution (compared to higher-level languages), SOE gives you control over orchestration primitives. You decide how state is stored, how LLMs are called, and how signals are broadcast. This requires more setup but means no vendor lock-in and full observability.
53
+
54
+ ---
55
+
56
+ ## What SOE Does
57
+
58
+ SOE orchestrates workflows through **signals**. Nodes don't call each other—they emit signals that other nodes listen for.
59
+
60
+ ```yaml
61
+ example_workflow:
62
+ ValidateInput:
63
+ node_type: router
64
+ event_triggers: [START]
65
+ event_emissions:
66
+ - signal_name: VALID
67
+ condition: "{{ context.data is defined }}"
68
+ - signal_name: INVALID
69
+
70
+ ProcessData:
71
+ node_type: llm
72
+ event_triggers: [VALID]
73
+ prompt: "Process this: {{ context.data }}"
74
+ output_field: result
75
+ event_emissions:
76
+ - signal_name: DONE
77
+ ```
78
+
79
+ **That's the entire workflow definition.** No SDK, no decorators, no base classes.
80
+
81
+ ---
82
+
83
+ ## Why SOE
84
+
85
+ ### 1. Infrastructure-Agnostic
86
+ SOE defines **protocols**, not implementations. Swap PostgreSQL for DynamoDB. Replace OpenAI with a local LLM. Deploy to Lambda, Kubernetes, or a single Python script. Your workflow YAML stays the same.
87
+
88
+ ### 2. Context-Driven with Jinja
89
+ All workflow state flows through **context**—a shared dictionary accessible via Jinja2 templates. This means:
90
+ - Conditions like `{{ context.user_validated }}` are readable and debuggable
91
+ - LLM prompts can interpolate any context field
92
+ - No hidden state—everything is inspectable
93
+
94
+ ### 3. Deterministic + Agentic
95
+ Mix hard-coded logic with LLM-driven behavior in the same workflow. Router nodes are pure conditionals. Agent nodes can call tools. Use what you need.
96
+
97
+ ### 4. Portable
98
+ Workflows are YAML. Run them locally, in CI, in production. Extract them, version them, share them.
99
+
100
+ ---
101
+
102
+ ## Installation
103
+
104
+ ```bash
105
+ # With uv (recommended)
106
+ uv add soe-ai
107
+
108
+ # With pip
109
+ pip install soe-ai
110
+
111
+ # From source
112
+ git clone https://github.com/pgarcia14180/soe.git
113
+ cd soe && uv sync
114
+ ```
115
+
116
+ ---
117
+
118
+ ## Quick Start
119
+
120
+ ```python
121
+ from soe import orchestrate, create_all_nodes
122
+ from soe.local_backends import create_local_backends
123
+
124
+ # Your workflow (can also be loaded from file or database)
125
+ workflow = """
126
+ example_workflow:
127
+ Start:
128
+ node_type: router
129
+ event_triggers: [START]
130
+ event_emissions:
131
+ - signal_name: DONE
132
+ """
133
+
134
+ # Create backends (storage for context, workflows, etc.)
135
+ backends = create_local_backends("./data")
136
+
137
+ # Create all node handlers
138
+ nodes, broadcast = create_all_nodes(backends)
139
+
140
+ # Run the workflow
141
+ execution_id = orchestrate(
142
+ config=workflow,
143
+ initial_workflow_name="example_workflow",
144
+ initial_signals=["START"],
145
+ initial_context={"user": "alice"},
146
+ backends=backends,
147
+ broadcast_signals_caller=broadcast,
148
+ )
149
+ # When orchestrate() returns, the workflow is complete
150
+ ```
151
+
152
+ **For product managers and less technical users**: The Quick Start above is all you need to run a workflow. The `config` parameter accepts YAML defining your workflow structure. The `initial_context` is where you pass input data (like user IDs, requests, etc.).
153
+
154
+ ---
155
+
156
+ ## Documentation
157
+
158
+ | Audience | Start Here |
159
+ |----------|------------|
160
+ | **Builders** (workflow authors) | [Documentation](docs/index.md) — Step-by-step chapters |
161
+ | **Engineers** (infrastructure) | [ARCHITECTURE.md](ai_docs/ARCHITECTURE.md) — Design philosophy |
162
+ | **Researchers** (advanced patterns) | [Advanced Patterns](docs/advanced_patterns/index.md) — Swarm, hybrid, self-evolving |
163
+
164
+ ---
165
+
166
+ ## Node Types
167
+
168
+ | Node | Purpose |
169
+ |------|---------|
170
+ | `router` | Conditional signal emission (no LLM) |
171
+ | `llm` | Single LLM call with output |
172
+ | `agent` | Multi-turn LLM with tool access |
173
+ | `tool` | Execute Python functions |
174
+ | `child` | Spawn sub-workflows |
175
+
176
+ ---
177
+
178
+ ## Backend Protocols
179
+
180
+ Implement these to plug SOE into your infrastructure:
181
+
182
+ | Protocol | Purpose |
183
+ |----------|---------|
184
+ | `ContextBackend` | Workflow state storage |
185
+ | `WorkflowBackend` | Workflow definitions |
186
+ | `ContextSchemaBackend` | Output validation (optional) |
187
+ | `IdentityBackend` | LLM system prompts (optional) |
188
+ | `ConversationHistoryBackend` | Agent memory (optional) |
189
+ | `TelemetryBackend` | Observability (optional) |
190
+
191
+ **Recommendation**: Use the same database for context, workflows, identities, and context_schema—just separate tables. The backend methods handle table creation.
192
+
193
+ See [Infrastructure Guide](docs/guide_10_infrastructure.md) for PostgreSQL, DynamoDB, and Lambda examples.
194
+
195
+ ---
196
+
197
+ ## License
198
+
199
+ MIT
@@ -0,0 +1,11 @@
1
+ soe/__init__.py,sha256=4g0b-2C4wlr3Aq9eSmISxEMeWRCI4fim_I1v4nfW_Cs,1131
2
+ soe/broker.py,sha256=R5s5GvgmwgBQK9FyI-UAMVb7nf14tMdXpeEZ-t8d3rE,5987
3
+ soe/docs_index.py,sha256=mDnQ-LVyPgF25_8yCuD04j2PxiQCR_6Si_NMkfJyk-0,3055071
4
+ soe/init.py,sha256=Fzw7MDoEOlZLfCwH-kL0evUjc3WHmLEVOaeR2-dgwWs,5876
5
+ soe/types.py,sha256=xSmkRcKehYW2pesnzB0qCZHjpm_IoDTBbQcb1SwV-8o,5712
6
+ soe/validation.py,sha256=SeBfGaaGjUi73ejnHNqVLTv7LYtU_gFHX8OJ3iytJMo,318
7
+ soe_ai-0.1.0.dist-info/licenses/LICENSE,sha256=SvEn330zvSOu83Vi4lDCX2QHgr6bmSSXOV1YClLGk1Y,1069
8
+ soe_ai-0.1.0.dist-info/METADATA,sha256=bWjq5DC6W9NenF18lKAgVkK-xZE_9cYGYJOOCQCoHSM,6558
9
+ soe_ai-0.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
10
+ soe_ai-0.1.0.dist-info/top_level.txt,sha256=TUufXyVAQnzN6iT6zRb4dRHdfBSiWXuAL8EoyvugZuY,4
11
+ soe_ai-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.9.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Pedro Garcia
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1 @@
1
+ soe