pygeai-orchestration 0.1.0b2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pygeai_orchestration/__init__.py +99 -0
- pygeai_orchestration/cli/__init__.py +7 -0
- pygeai_orchestration/cli/__main__.py +11 -0
- pygeai_orchestration/cli/commands/__init__.py +13 -0
- pygeai_orchestration/cli/commands/base.py +192 -0
- pygeai_orchestration/cli/error_handler.py +123 -0
- pygeai_orchestration/cli/formatters.py +419 -0
- pygeai_orchestration/cli/geai_orch.py +270 -0
- pygeai_orchestration/cli/interactive.py +265 -0
- pygeai_orchestration/cli/texts/help.py +169 -0
- pygeai_orchestration/core/__init__.py +130 -0
- pygeai_orchestration/core/base/__init__.py +23 -0
- pygeai_orchestration/core/base/agent.py +121 -0
- pygeai_orchestration/core/base/geai_agent.py +144 -0
- pygeai_orchestration/core/base/geai_orchestrator.py +77 -0
- pygeai_orchestration/core/base/orchestrator.py +142 -0
- pygeai_orchestration/core/base/pattern.py +161 -0
- pygeai_orchestration/core/base/tool.py +149 -0
- pygeai_orchestration/core/common/__init__.py +18 -0
- pygeai_orchestration/core/common/context.py +140 -0
- pygeai_orchestration/core/common/memory.py +176 -0
- pygeai_orchestration/core/common/message.py +50 -0
- pygeai_orchestration/core/common/state.py +181 -0
- pygeai_orchestration/core/composition.py +190 -0
- pygeai_orchestration/core/config.py +356 -0
- pygeai_orchestration/core/exceptions.py +400 -0
- pygeai_orchestration/core/handlers.py +380 -0
- pygeai_orchestration/core/utils/__init__.py +37 -0
- pygeai_orchestration/core/utils/cache.py +138 -0
- pygeai_orchestration/core/utils/config.py +94 -0
- pygeai_orchestration/core/utils/logging.py +57 -0
- pygeai_orchestration/core/utils/metrics.py +184 -0
- pygeai_orchestration/core/utils/validators.py +140 -0
- pygeai_orchestration/dev/__init__.py +15 -0
- pygeai_orchestration/dev/debug.py +288 -0
- pygeai_orchestration/dev/templates.py +321 -0
- pygeai_orchestration/dev/testing.py +301 -0
- pygeai_orchestration/patterns/__init__.py +15 -0
- pygeai_orchestration/patterns/multi_agent.py +237 -0
- pygeai_orchestration/patterns/planning.py +219 -0
- pygeai_orchestration/patterns/react.py +221 -0
- pygeai_orchestration/patterns/reflection.py +134 -0
- pygeai_orchestration/patterns/tool_use.py +170 -0
- pygeai_orchestration/tests/__init__.py +1 -0
- pygeai_orchestration/tests/test_base_classes.py +187 -0
- pygeai_orchestration/tests/test_cache.py +184 -0
- pygeai_orchestration/tests/test_cli_formatters.py +232 -0
- pygeai_orchestration/tests/test_common.py +214 -0
- pygeai_orchestration/tests/test_composition.py +265 -0
- pygeai_orchestration/tests/test_config.py +301 -0
- pygeai_orchestration/tests/test_dev_utils.py +337 -0
- pygeai_orchestration/tests/test_exceptions.py +327 -0
- pygeai_orchestration/tests/test_handlers.py +307 -0
- pygeai_orchestration/tests/test_metrics.py +171 -0
- pygeai_orchestration/tests/test_patterns.py +165 -0
- pygeai_orchestration-0.1.0b2.dist-info/METADATA +290 -0
- pygeai_orchestration-0.1.0b2.dist-info/RECORD +61 -0
- pygeai_orchestration-0.1.0b2.dist-info/WHEEL +5 -0
- pygeai_orchestration-0.1.0b2.dist-info/entry_points.txt +2 -0
- pygeai_orchestration-0.1.0b2.dist-info/licenses/LICENSE +8 -0
- pygeai_orchestration-0.1.0b2.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,190 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
from typing import Optional
|
|
3
|
+
|
|
4
|
+
from pydantic import BaseModel, Field
|
|
5
|
+
|
|
6
|
+
from pygeai_orchestration.core.base.pattern import BasePattern, PatternResult
|
|
7
|
+
from pygeai_orchestration.core.exceptions import PatternExecutionError
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class CompositionMode(str, Enum):
|
|
11
|
+
SEQUENTIAL = "sequential"
|
|
12
|
+
PARALLEL = "parallel"
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class CompositionConfig(BaseModel):
|
|
16
|
+
mode: CompositionMode = Field(
|
|
17
|
+
default=CompositionMode.SEQUENTIAL,
|
|
18
|
+
description="Execution mode for composed patterns"
|
|
19
|
+
)
|
|
20
|
+
stop_on_error: bool = Field(
|
|
21
|
+
default=True,
|
|
22
|
+
description="Stop execution if a pattern fails"
|
|
23
|
+
)
|
|
24
|
+
pass_output: bool = Field(
|
|
25
|
+
default=True,
|
|
26
|
+
description="Pass output of previous pattern as input to next"
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class PatternPipeline:
|
|
31
|
+
def __init__(self, config: Optional[CompositionConfig] = None):
|
|
32
|
+
self.config = config or CompositionConfig()
|
|
33
|
+
self._patterns: list[BasePattern] = []
|
|
34
|
+
self._results: list[PatternResult] = []
|
|
35
|
+
|
|
36
|
+
def add_pattern(self, pattern: BasePattern) -> "PatternPipeline":
|
|
37
|
+
if not isinstance(pattern, BasePattern):
|
|
38
|
+
raise TypeError(f"Expected BasePattern, got {type(pattern)}")
|
|
39
|
+
self._patterns.append(pattern)
|
|
40
|
+
return self
|
|
41
|
+
|
|
42
|
+
def add_patterns(self, *patterns: BasePattern) -> "PatternPipeline":
|
|
43
|
+
for pattern in patterns:
|
|
44
|
+
self.add_pattern(pattern)
|
|
45
|
+
return self
|
|
46
|
+
|
|
47
|
+
async def execute(self, initial_input: str, **kwargs) -> PatternResult:
|
|
48
|
+
if not self._patterns:
|
|
49
|
+
raise PatternExecutionError("No patterns in pipeline")
|
|
50
|
+
|
|
51
|
+
self._results.clear()
|
|
52
|
+
current_input = initial_input
|
|
53
|
+
|
|
54
|
+
if self.config.mode == CompositionMode.SEQUENTIAL:
|
|
55
|
+
return await self._execute_sequential(current_input, **kwargs)
|
|
56
|
+
elif self.config.mode == CompositionMode.PARALLEL:
|
|
57
|
+
return await self._execute_parallel(current_input, **kwargs)
|
|
58
|
+
else:
|
|
59
|
+
raise PatternExecutionError(f"Unsupported composition mode: {self.config.mode}")
|
|
60
|
+
|
|
61
|
+
async def _execute_sequential(self, initial_input: str, **kwargs) -> PatternResult:
|
|
62
|
+
current_input = initial_input
|
|
63
|
+
last_result = None
|
|
64
|
+
|
|
65
|
+
for i, pattern in enumerate(self._patterns):
|
|
66
|
+
try:
|
|
67
|
+
result = await pattern.execute(current_input, **kwargs)
|
|
68
|
+
self._results.append(result)
|
|
69
|
+
last_result = result
|
|
70
|
+
|
|
71
|
+
if self.config.pass_output and result.result:
|
|
72
|
+
current_input = result.result
|
|
73
|
+
|
|
74
|
+
except Exception as e:
|
|
75
|
+
if self.config.stop_on_error:
|
|
76
|
+
raise PatternExecutionError(f"Pattern {i} ({type(pattern).__name__}) failed: {str(e)}")
|
|
77
|
+
|
|
78
|
+
error_result = PatternResult(
|
|
79
|
+
result="",
|
|
80
|
+
success=False,
|
|
81
|
+
error=str(e),
|
|
82
|
+
metadata={"pattern_index": i, "error": str(e)}
|
|
83
|
+
)
|
|
84
|
+
self._results.append(error_result)
|
|
85
|
+
last_result = error_result
|
|
86
|
+
|
|
87
|
+
if last_result is None:
|
|
88
|
+
raise PatternExecutionError("No results produced")
|
|
89
|
+
|
|
90
|
+
return PatternResult(
|
|
91
|
+
result=last_result.result,
|
|
92
|
+
success=all(r.success for r in self._results),
|
|
93
|
+
metadata={
|
|
94
|
+
"pipeline_results": [
|
|
95
|
+
{
|
|
96
|
+
"index": i,
|
|
97
|
+
"success": r.success,
|
|
98
|
+
"result_length": len(r.result) if r.result else 0
|
|
99
|
+
}
|
|
100
|
+
for i, r in enumerate(self._results)
|
|
101
|
+
],
|
|
102
|
+
"total_patterns": len(self._patterns),
|
|
103
|
+
"successful_patterns": sum(1 for r in self._results if r.success)
|
|
104
|
+
}
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
async def _execute_parallel(self, initial_input: str, **kwargs) -> PatternResult:
|
|
108
|
+
import asyncio
|
|
109
|
+
|
|
110
|
+
tasks = [pattern.execute(initial_input, **kwargs) for pattern in self._patterns]
|
|
111
|
+
|
|
112
|
+
try:
|
|
113
|
+
results = await asyncio.gather(*tasks, return_exceptions=True)
|
|
114
|
+
except Exception as e:
|
|
115
|
+
raise PatternExecutionError(f"Parallel execution failed: {str(e)}")
|
|
116
|
+
|
|
117
|
+
self._results = []
|
|
118
|
+
for i, result in enumerate(results):
|
|
119
|
+
if isinstance(result, Exception):
|
|
120
|
+
error_result = PatternResult(
|
|
121
|
+
result="",
|
|
122
|
+
success=False,
|
|
123
|
+
error=str(result),
|
|
124
|
+
metadata={"pattern_index": i, "error": str(result)}
|
|
125
|
+
)
|
|
126
|
+
self._results.append(error_result)
|
|
127
|
+
else:
|
|
128
|
+
self._results.append(result)
|
|
129
|
+
|
|
130
|
+
successful_results = [r for r in self._results if r.success]
|
|
131
|
+
|
|
132
|
+
if not successful_results:
|
|
133
|
+
raise PatternExecutionError("All patterns failed in parallel execution")
|
|
134
|
+
|
|
135
|
+
combined_output = "\n\n".join(r.result for r in successful_results if r.result)
|
|
136
|
+
|
|
137
|
+
return PatternResult(
|
|
138
|
+
result=combined_output,
|
|
139
|
+
success=any(r.success for r in self._results),
|
|
140
|
+
metadata={
|
|
141
|
+
"pipeline_results": [
|
|
142
|
+
{
|
|
143
|
+
"index": i,
|
|
144
|
+
"success": r.success,
|
|
145
|
+
"result_length": len(r.result) if r.result else 0
|
|
146
|
+
}
|
|
147
|
+
for i, r in enumerate(self._results)
|
|
148
|
+
],
|
|
149
|
+
"total_patterns": len(self._patterns),
|
|
150
|
+
"successful_patterns": len(successful_results),
|
|
151
|
+
"mode": "parallel"
|
|
152
|
+
}
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
def get_results(self) -> list[PatternResult]:
|
|
156
|
+
return self._results.copy()
|
|
157
|
+
|
|
158
|
+
def clear(self) -> None:
|
|
159
|
+
self._patterns.clear()
|
|
160
|
+
self._results.clear()
|
|
161
|
+
|
|
162
|
+
def __len__(self) -> int:
|
|
163
|
+
return len(self._patterns)
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
class PatternComposer:
|
|
167
|
+
@staticmethod
|
|
168
|
+
def sequential(*patterns: BasePattern) -> PatternPipeline:
|
|
169
|
+
config = CompositionConfig(mode=CompositionMode.SEQUENTIAL)
|
|
170
|
+
pipeline = PatternPipeline(config)
|
|
171
|
+
return pipeline.add_patterns(*patterns)
|
|
172
|
+
|
|
173
|
+
@staticmethod
|
|
174
|
+
def parallel(*patterns: BasePattern) -> PatternPipeline:
|
|
175
|
+
config = CompositionConfig(mode=CompositionMode.PARALLEL)
|
|
176
|
+
pipeline = PatternPipeline(config)
|
|
177
|
+
return pipeline.add_patterns(*patterns)
|
|
178
|
+
|
|
179
|
+
@staticmethod
|
|
180
|
+
def custom(
|
|
181
|
+
mode: CompositionMode,
|
|
182
|
+
stop_on_error: bool = True,
|
|
183
|
+
pass_output: bool = True
|
|
184
|
+
) -> PatternPipeline:
|
|
185
|
+
config = CompositionConfig(
|
|
186
|
+
mode=mode,
|
|
187
|
+
stop_on_error=stop_on_error,
|
|
188
|
+
pass_output=pass_output
|
|
189
|
+
)
|
|
190
|
+
return PatternPipeline(config)
|
|
@@ -0,0 +1,356 @@
|
|
|
1
|
+
"""Enhanced configuration system for PyGEAI Orchestration.
|
|
2
|
+
|
|
3
|
+
This module provides a comprehensive configuration management system with support for:
|
|
4
|
+
- Environment variables with .env file loading
|
|
5
|
+
- Configuration profiles (dev, prod, test)
|
|
6
|
+
- YAML/JSON configuration files
|
|
7
|
+
- Validation and type safety
|
|
8
|
+
- Configuration merging and inheritance
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import os
|
|
12
|
+
import json
|
|
13
|
+
from enum import Enum
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
from typing import Any, Dict, Optional, Union
|
|
16
|
+
from pydantic import BaseModel, Field, field_validator
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class ConfigProfile(str, Enum):
|
|
20
|
+
"""Configuration profile types."""
|
|
21
|
+
|
|
22
|
+
DEVELOPMENT = "development"
|
|
23
|
+
PRODUCTION = "production"
|
|
24
|
+
TEST = "test"
|
|
25
|
+
CUSTOM = "custom"
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class LogLevel(str, Enum):
|
|
29
|
+
"""Logging levels."""
|
|
30
|
+
|
|
31
|
+
DEBUG = "DEBUG"
|
|
32
|
+
INFO = "INFO"
|
|
33
|
+
WARNING = "WARNING"
|
|
34
|
+
ERROR = "ERROR"
|
|
35
|
+
CRITICAL = "CRITICAL"
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class CacheConfig(BaseModel):
|
|
39
|
+
"""Cache configuration settings."""
|
|
40
|
+
|
|
41
|
+
enabled: bool = Field(default=True, description="Enable caching")
|
|
42
|
+
max_size: int = Field(default=1000, description="Maximum cache size")
|
|
43
|
+
ttl: int = Field(default=3600, description="Cache TTL in seconds")
|
|
44
|
+
backend: str = Field(default="memory", description="Cache backend type")
|
|
45
|
+
|
|
46
|
+
@field_validator("max_size")
|
|
47
|
+
@classmethod
|
|
48
|
+
def validate_max_size(cls, v: int) -> int:
|
|
49
|
+
"""Validate max_size is positive."""
|
|
50
|
+
if v <= 0:
|
|
51
|
+
raise ValueError("max_size must be positive")
|
|
52
|
+
return v
|
|
53
|
+
|
|
54
|
+
@field_validator("ttl")
|
|
55
|
+
@classmethod
|
|
56
|
+
def validate_ttl(cls, v: int) -> int:
|
|
57
|
+
"""Validate TTL is non-negative."""
|
|
58
|
+
if v < 0:
|
|
59
|
+
raise ValueError("ttl must be non-negative")
|
|
60
|
+
return v
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
class MetricsConfig(BaseModel):
|
|
64
|
+
"""Metrics and monitoring configuration."""
|
|
65
|
+
|
|
66
|
+
enabled: bool = Field(default=True, description="Enable metrics collection")
|
|
67
|
+
export_interval: int = Field(default=60, description="Metrics export interval in seconds")
|
|
68
|
+
export_format: str = Field(default="json", description="Metrics export format")
|
|
69
|
+
include_labels: bool = Field(default=True, description="Include label dimensions")
|
|
70
|
+
|
|
71
|
+
@field_validator("export_interval")
|
|
72
|
+
@classmethod
|
|
73
|
+
def validate_export_interval(cls, v: int) -> int:
|
|
74
|
+
"""Validate export interval is positive."""
|
|
75
|
+
if v <= 0:
|
|
76
|
+
raise ValueError("export_interval must be positive")
|
|
77
|
+
return v
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
class ExecutionConfig(BaseModel):
|
|
81
|
+
"""Pattern execution configuration."""
|
|
82
|
+
|
|
83
|
+
max_iterations: int = Field(default=10, description="Maximum pattern iterations")
|
|
84
|
+
timeout: int = Field(default=300, description="Execution timeout in seconds")
|
|
85
|
+
retry_attempts: int = Field(default=3, description="Number of retry attempts")
|
|
86
|
+
retry_delay: float = Field(default=1.0, description="Retry delay in seconds")
|
|
87
|
+
parallel_workers: int = Field(default=4, description="Number of parallel workers")
|
|
88
|
+
|
|
89
|
+
@field_validator("max_iterations", "retry_attempts", "parallel_workers")
|
|
90
|
+
@classmethod
|
|
91
|
+
def validate_positive(cls, v: int) -> int:
|
|
92
|
+
"""Validate value is positive."""
|
|
93
|
+
if v <= 0:
|
|
94
|
+
raise ValueError("Value must be positive")
|
|
95
|
+
return v
|
|
96
|
+
|
|
97
|
+
@field_validator("timeout")
|
|
98
|
+
@classmethod
|
|
99
|
+
def validate_timeout(cls, v: int) -> int:
|
|
100
|
+
"""Validate timeout is non-negative."""
|
|
101
|
+
if v < 0:
|
|
102
|
+
raise ValueError("timeout must be non-negative")
|
|
103
|
+
return v
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
class OrchestrationConfig(BaseModel):
|
|
107
|
+
"""Main orchestration configuration."""
|
|
108
|
+
|
|
109
|
+
profile: ConfigProfile = Field(
|
|
110
|
+
default=ConfigProfile.DEVELOPMENT,
|
|
111
|
+
description="Configuration profile"
|
|
112
|
+
)
|
|
113
|
+
log_level: LogLevel = Field(default=LogLevel.INFO, description="Logging level")
|
|
114
|
+
debug: bool = Field(default=False, description="Enable debug mode")
|
|
115
|
+
cache: CacheConfig = Field(default_factory=CacheConfig, description="Cache settings")
|
|
116
|
+
metrics: MetricsConfig = Field(default_factory=MetricsConfig, description="Metrics settings")
|
|
117
|
+
execution: ExecutionConfig = Field(
|
|
118
|
+
default_factory=ExecutionConfig,
|
|
119
|
+
description="Execution settings"
|
|
120
|
+
)
|
|
121
|
+
custom: Dict[str, Any] = Field(default_factory=dict, description="Custom configuration")
|
|
122
|
+
|
|
123
|
+
@classmethod
|
|
124
|
+
def from_env(cls, prefix: str = "PYGEAI_") -> "OrchestrationConfig":
|
|
125
|
+
"""Create configuration from environment variables.
|
|
126
|
+
|
|
127
|
+
Args:
|
|
128
|
+
prefix: Environment variable prefix
|
|
129
|
+
|
|
130
|
+
Returns:
|
|
131
|
+
OrchestrationConfig instance
|
|
132
|
+
"""
|
|
133
|
+
config_data: Dict[str, Any] = {}
|
|
134
|
+
|
|
135
|
+
profile = os.getenv(f"{prefix}PROFILE")
|
|
136
|
+
if profile:
|
|
137
|
+
config_data["profile"] = profile
|
|
138
|
+
|
|
139
|
+
log_level = os.getenv(f"{prefix}LOG_LEVEL")
|
|
140
|
+
if log_level:
|
|
141
|
+
config_data["log_level"] = log_level
|
|
142
|
+
|
|
143
|
+
debug = os.getenv(f"{prefix}DEBUG")
|
|
144
|
+
if debug:
|
|
145
|
+
config_data["debug"] = debug.lower() in ("true", "1", "yes")
|
|
146
|
+
|
|
147
|
+
cache_enabled = os.getenv(f"{prefix}CACHE_ENABLED")
|
|
148
|
+
if cache_enabled:
|
|
149
|
+
config_data.setdefault("cache", {})
|
|
150
|
+
config_data["cache"]["enabled"] = cache_enabled.lower() in ("true", "1", "yes")
|
|
151
|
+
|
|
152
|
+
cache_size = os.getenv(f"{prefix}CACHE_MAX_SIZE")
|
|
153
|
+
if cache_size:
|
|
154
|
+
config_data.setdefault("cache", {})
|
|
155
|
+
config_data["cache"]["max_size"] = int(cache_size)
|
|
156
|
+
|
|
157
|
+
metrics_enabled = os.getenv(f"{prefix}METRICS_ENABLED")
|
|
158
|
+
if metrics_enabled:
|
|
159
|
+
config_data.setdefault("metrics", {})
|
|
160
|
+
config_data["metrics"]["enabled"] = metrics_enabled.lower() in ("true", "1", "yes")
|
|
161
|
+
|
|
162
|
+
max_iterations = os.getenv(f"{prefix}MAX_ITERATIONS")
|
|
163
|
+
if max_iterations:
|
|
164
|
+
config_data.setdefault("execution", {})
|
|
165
|
+
config_data["execution"]["max_iterations"] = int(max_iterations)
|
|
166
|
+
|
|
167
|
+
timeout = os.getenv(f"{prefix}TIMEOUT")
|
|
168
|
+
if timeout:
|
|
169
|
+
config_data.setdefault("execution", {})
|
|
170
|
+
config_data["execution"]["timeout"] = int(timeout)
|
|
171
|
+
|
|
172
|
+
return cls(**config_data)
|
|
173
|
+
|
|
174
|
+
@classmethod
|
|
175
|
+
def from_file(cls, path: Union[str, Path]) -> "OrchestrationConfig":
|
|
176
|
+
"""Load configuration from JSON file.
|
|
177
|
+
|
|
178
|
+
Args:
|
|
179
|
+
path: Path to configuration file
|
|
180
|
+
|
|
181
|
+
Returns:
|
|
182
|
+
OrchestrationConfig instance
|
|
183
|
+
|
|
184
|
+
Raises:
|
|
185
|
+
FileNotFoundError: If file doesn't exist
|
|
186
|
+
ValueError: If file format is invalid
|
|
187
|
+
"""
|
|
188
|
+
file_path = Path(path)
|
|
189
|
+
if not file_path.exists():
|
|
190
|
+
raise FileNotFoundError(f"Configuration file not found: {path}")
|
|
191
|
+
|
|
192
|
+
with open(file_path, "r") as f:
|
|
193
|
+
data = json.load(f)
|
|
194
|
+
|
|
195
|
+
return cls(**data)
|
|
196
|
+
|
|
197
|
+
def to_file(self, path: Union[str, Path]) -> None:
|
|
198
|
+
"""Save configuration to JSON file.
|
|
199
|
+
|
|
200
|
+
Args:
|
|
201
|
+
path: Path to save configuration
|
|
202
|
+
"""
|
|
203
|
+
file_path = Path(path)
|
|
204
|
+
file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
205
|
+
|
|
206
|
+
with open(file_path, "w") as f:
|
|
207
|
+
json.dump(self.model_dump(), f, indent=2)
|
|
208
|
+
|
|
209
|
+
def merge(self, other: "OrchestrationConfig") -> "OrchestrationConfig":
|
|
210
|
+
"""Merge with another configuration.
|
|
211
|
+
|
|
212
|
+
Args:
|
|
213
|
+
other: Configuration to merge
|
|
214
|
+
|
|
215
|
+
Returns:
|
|
216
|
+
New merged configuration
|
|
217
|
+
"""
|
|
218
|
+
self_dict = self.model_dump()
|
|
219
|
+
other_dict = other.model_dump()
|
|
220
|
+
|
|
221
|
+
merged = self._deep_merge(self_dict, other_dict)
|
|
222
|
+
return OrchestrationConfig(**merged)
|
|
223
|
+
|
|
224
|
+
@staticmethod
|
|
225
|
+
def _deep_merge(base: Dict[str, Any], override: Dict[str, Any]) -> Dict[str, Any]:
|
|
226
|
+
"""Deep merge two dictionaries.
|
|
227
|
+
|
|
228
|
+
Args:
|
|
229
|
+
base: Base dictionary
|
|
230
|
+
override: Override dictionary
|
|
231
|
+
|
|
232
|
+
Returns:
|
|
233
|
+
Merged dictionary
|
|
234
|
+
"""
|
|
235
|
+
result = base.copy()
|
|
236
|
+
|
|
237
|
+
for key, value in override.items():
|
|
238
|
+
if key in result and isinstance(result[key], dict) and isinstance(value, dict):
|
|
239
|
+
result[key] = OrchestrationConfig._deep_merge(result[key], value)
|
|
240
|
+
else:
|
|
241
|
+
result[key] = value
|
|
242
|
+
|
|
243
|
+
return result
|
|
244
|
+
|
|
245
|
+
def get_profile_config(self) -> "OrchestrationConfig":
|
|
246
|
+
"""Get profile-specific configuration.
|
|
247
|
+
|
|
248
|
+
Returns:
|
|
249
|
+
Configuration with profile-specific defaults
|
|
250
|
+
"""
|
|
251
|
+
if self.profile == ConfigProfile.PRODUCTION:
|
|
252
|
+
base_dict = self.model_dump()
|
|
253
|
+
profile_overrides = {
|
|
254
|
+
"log_level": LogLevel.WARNING,
|
|
255
|
+
"debug": False,
|
|
256
|
+
"execution": {
|
|
257
|
+
"max_iterations": 20,
|
|
258
|
+
"timeout": 600,
|
|
259
|
+
"retry_attempts": 5
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
merged = self._deep_merge(base_dict, profile_overrides)
|
|
263
|
+
return OrchestrationConfig(**merged)
|
|
264
|
+
elif self.profile == ConfigProfile.TEST:
|
|
265
|
+
base_dict = self.model_dump()
|
|
266
|
+
profile_overrides = {
|
|
267
|
+
"log_level": LogLevel.DEBUG,
|
|
268
|
+
"debug": True,
|
|
269
|
+
"cache": {"enabled": False},
|
|
270
|
+
"metrics": {"enabled": False},
|
|
271
|
+
"execution": {
|
|
272
|
+
"max_iterations": 5,
|
|
273
|
+
"timeout": 10
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
merged = self._deep_merge(base_dict, profile_overrides)
|
|
277
|
+
return OrchestrationConfig(**merged)
|
|
278
|
+
else:
|
|
279
|
+
return self
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
class ConfigManager:
|
|
283
|
+
"""Configuration manager for global configuration access."""
|
|
284
|
+
|
|
285
|
+
_instance: Optional["ConfigManager"] = None
|
|
286
|
+
_config: Optional[OrchestrationConfig] = None
|
|
287
|
+
|
|
288
|
+
def __new__(cls) -> "ConfigManager":
|
|
289
|
+
"""Ensure singleton instance."""
|
|
290
|
+
if cls._instance is None:
|
|
291
|
+
cls._instance = super().__new__(cls)
|
|
292
|
+
return cls._instance
|
|
293
|
+
|
|
294
|
+
@classmethod
|
|
295
|
+
def initialize(
|
|
296
|
+
cls,
|
|
297
|
+
config: Optional[OrchestrationConfig] = None,
|
|
298
|
+
config_file: Optional[Union[str, Path]] = None,
|
|
299
|
+
from_env: bool = True
|
|
300
|
+
) -> "ConfigManager":
|
|
301
|
+
"""Initialize configuration manager.
|
|
302
|
+
|
|
303
|
+
Args:
|
|
304
|
+
config: Configuration instance
|
|
305
|
+
config_file: Path to configuration file
|
|
306
|
+
from_env: Load from environment variables
|
|
307
|
+
|
|
308
|
+
Returns:
|
|
309
|
+
ConfigManager instance
|
|
310
|
+
"""
|
|
311
|
+
instance = cls()
|
|
312
|
+
|
|
313
|
+
if config:
|
|
314
|
+
instance._config = config
|
|
315
|
+
elif config_file:
|
|
316
|
+
instance._config = OrchestrationConfig.from_file(config_file)
|
|
317
|
+
instance._config = instance._config.get_profile_config()
|
|
318
|
+
elif from_env:
|
|
319
|
+
instance._config = OrchestrationConfig.from_env()
|
|
320
|
+
instance._config = instance._config.get_profile_config()
|
|
321
|
+
else:
|
|
322
|
+
instance._config = OrchestrationConfig()
|
|
323
|
+
instance._config = instance._config.get_profile_config()
|
|
324
|
+
|
|
325
|
+
return instance
|
|
326
|
+
|
|
327
|
+
@classmethod
|
|
328
|
+
def get_config(cls) -> OrchestrationConfig:
|
|
329
|
+
"""Get current configuration.
|
|
330
|
+
|
|
331
|
+
Returns:
|
|
332
|
+
Current configuration
|
|
333
|
+
|
|
334
|
+
Raises:
|
|
335
|
+
RuntimeError: If not initialized
|
|
336
|
+
"""
|
|
337
|
+
instance = cls()
|
|
338
|
+
if instance._config is None:
|
|
339
|
+
instance._config = OrchestrationConfig()
|
|
340
|
+
return instance._config
|
|
341
|
+
|
|
342
|
+
@classmethod
|
|
343
|
+
def set_config(cls, config: OrchestrationConfig) -> None:
|
|
344
|
+
"""Set configuration.
|
|
345
|
+
|
|
346
|
+
Args:
|
|
347
|
+
config: Configuration to set
|
|
348
|
+
"""
|
|
349
|
+
instance = cls()
|
|
350
|
+
instance._config = config
|
|
351
|
+
|
|
352
|
+
@classmethod
|
|
353
|
+
def reset(cls) -> None:
|
|
354
|
+
"""Reset configuration to default."""
|
|
355
|
+
instance = cls()
|
|
356
|
+
instance._config = None
|