agentflow-orchestrator 1.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agentflow/__init__.py +68 -0
- agentflow/agents/__init__.py +1 -0
- agentflow/agents/base_agent.py +67 -0
- agentflow/agents/executor.py +307 -0
- agentflow/agents/planner.py +265 -0
- agentflow/agents/validator.py +228 -0
- agentflow/caching/__init__.py +21 -0
- agentflow/caching/backends.py +108 -0
- agentflow/caching/response_cache.py +238 -0
- agentflow/connectors/__init__.py +21 -0
- agentflow/connectors/aws/__init__.py +5 -0
- agentflow/connectors/aws/client.py +303 -0
- agentflow/connectors/azure/__init__.py +5 -0
- agentflow/connectors/azure/client.py +295 -0
- agentflow/connectors/base.py +138 -0
- agentflow/connectors/graphql/__init__.py +5 -0
- agentflow/connectors/graphql/client.py +256 -0
- agentflow/connectors/mulesoft/__init__.py +1 -0
- agentflow/connectors/mulesoft/client.py +369 -0
- agentflow/connectors/rest/__init__.py +19 -0
- agentflow/connectors/rest/auth.py +78 -0
- agentflow/connectors/rest/client.py +240 -0
- agentflow/core/__init__.py +1 -0
- agentflow/core/context.py +187 -0
- agentflow/core/cyclic_workflow.py +334 -0
- agentflow/core/orchestrator.py +239 -0
- agentflow/core/plan.py +218 -0
- agentflow/nlp/__init__.py +24 -0
- agentflow/nlp/hybrid_intent_parser.py +117 -0
- agentflow/nlp/intent_parser.py +391 -0
- agentflow/nlp/llm_intent_parser.py +204 -0
- agentflow/nlp/llm_provider.py +231 -0
- agentflow/observability/__init__.py +17 -0
- agentflow/observability/metrics.py +169 -0
- agentflow/observability/tracer.py +229 -0
- agentflow/resilience/__init__.py +30 -0
- agentflow/resilience/bulkhead.py +204 -0
- agentflow/resilience/circuit_breaker.py +224 -0
- agentflow/resilience/cooldown_strategy.py +228 -0
- agentflow/resilience/retry_policy.py +171 -0
- agentflow/routing/__init__.py +37 -0
- agentflow/routing/adaptive_weight_optimizer.py +335 -0
- agentflow/routing/budget_router.py +205 -0
- agentflow/routing/dynamic_router.py +289 -0
- agentflow/utils/__init__.py +1 -0
- agentflow_orchestrator-1.3.0.dist-info/METADATA +225 -0
- agentflow_orchestrator-1.3.0.dist-info/RECORD +56 -0
- agentflow_orchestrator-1.3.0.dist-info/WHEEL +5 -0
- agentflow_orchestrator-1.3.0.dist-info/licenses/LICENSE +19 -0
- agentflow_orchestrator-1.3.0.dist-info/top_level.txt +3 -0
- benchmarks/__init__.py +1 -0
- benchmarks/baseline_comparison.py +305 -0
- experiments/__init__.py +1 -0
- experiments/intent_corpus.py +252 -0
- experiments/parser_quality_benchmark.py +213 -0
- experiments/routing_weight_ablation.py +215 -0
agentflow/__init__.py
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
"""
|
|
2
|
+
AgentFlow - A Multi-Agent Framework for AI-Powered Enterprise API Orchestration.
|
|
3
|
+
|
|
4
|
+
A production-grade framework where autonomous AI agents dynamically orchestrate,
|
|
5
|
+
compose, and self-heal API workflows across enterprise integration platforms,
|
|
6
|
+
with first-class MuleSoft Anypoint support.
|
|
7
|
+
|
|
8
|
+
Author: Venkata Pavan Kumar Gummadi
|
|
9
|
+
License: Apache 2.0
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
__version__ = "1.3.0"
|
|
13
|
+
__author__ = "Venkata Pavan Kumar Gummadi"
|
|
14
|
+
|
|
15
|
+
from agentflow.agents.executor import ExecutorAgent
|
|
16
|
+
from agentflow.agents.planner import PlannerAgent
|
|
17
|
+
from agentflow.agents.validator import ValidatorAgent
|
|
18
|
+
from agentflow.caching.response_cache import ResponseCache
|
|
19
|
+
from agentflow.connectors.base import BaseConnector
|
|
20
|
+
from agentflow.connectors.graphql.client import GraphQLConnector
|
|
21
|
+
from agentflow.connectors.mulesoft.client import MuleSoftConnector
|
|
22
|
+
from agentflow.connectors.rest.client import RESTConnector
|
|
23
|
+
from agentflow.core.context import OrchestrationContext
|
|
24
|
+
from agentflow.core.orchestrator import AgentOrchestrator
|
|
25
|
+
from agentflow.core.plan import ExecutionPlan, PlanStep
|
|
26
|
+
from agentflow.nlp.hybrid_intent_parser import HybridIntentParser
|
|
27
|
+
from agentflow.nlp.intent_parser import IntentParser
|
|
28
|
+
from agentflow.nlp.llm_intent_parser import LLMIntentParser
|
|
29
|
+
from agentflow.nlp.llm_provider import (
|
|
30
|
+
CallableLLMProvider,
|
|
31
|
+
DeterministicMockProvider,
|
|
32
|
+
LLMProvider,
|
|
33
|
+
)
|
|
34
|
+
from agentflow.observability.metrics import MetricsCollector
|
|
35
|
+
from agentflow.observability.tracer import Tracer
|
|
36
|
+
from agentflow.resilience.bulkhead import Bulkhead, BulkheadRegistry
|
|
37
|
+
from agentflow.resilience.circuit_breaker import CircuitBreaker
|
|
38
|
+
from agentflow.routing.budget_router import BudgetMode, BudgetRouter
|
|
39
|
+
from agentflow.routing.dynamic_router import DynamicRouter
|
|
40
|
+
|
|
41
|
+
__all__ = [
|
|
42
|
+
"AgentOrchestrator",
|
|
43
|
+
"OrchestrationContext",
|
|
44
|
+
"ExecutionPlan",
|
|
45
|
+
"PlanStep",
|
|
46
|
+
"MuleSoftConnector",
|
|
47
|
+
"RESTConnector",
|
|
48
|
+
"GraphQLConnector",
|
|
49
|
+
"BaseConnector",
|
|
50
|
+
"PlannerAgent",
|
|
51
|
+
"ExecutorAgent",
|
|
52
|
+
"ValidatorAgent",
|
|
53
|
+
"DynamicRouter",
|
|
54
|
+
"BudgetRouter",
|
|
55
|
+
"BudgetMode",
|
|
56
|
+
"CircuitBreaker",
|
|
57
|
+
"Bulkhead",
|
|
58
|
+
"BulkheadRegistry",
|
|
59
|
+
"ResponseCache",
|
|
60
|
+
"Tracer",
|
|
61
|
+
"MetricsCollector",
|
|
62
|
+
"IntentParser",
|
|
63
|
+
"LLMIntentParser",
|
|
64
|
+
"HybridIntentParser",
|
|
65
|
+
"LLMProvider",
|
|
66
|
+
"DeterministicMockProvider",
|
|
67
|
+
"CallableLLMProvider",
|
|
68
|
+
]
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Autonomous AI agents for API orchestration."""
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Base Agent — abstract foundation for all orchestration agents.
|
|
3
|
+
|
|
4
|
+
Agents are autonomous units that collaborate through the shared
|
|
5
|
+
OrchestrationContext. Each agent has a specific role in the
|
|
6
|
+
orchestration lifecycle and can communicate with other agents
|
|
7
|
+
via context events.
|
|
8
|
+
|
|
9
|
+
Author: Venkata Pavan Kumar Gummadi
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from __future__ import annotations
|
|
13
|
+
|
|
14
|
+
import logging
|
|
15
|
+
import uuid
|
|
16
|
+
from abc import ABC, abstractmethod
|
|
17
|
+
from typing import Any
|
|
18
|
+
|
|
19
|
+
from agentflow.core.context import EventType, OrchestrationContext
|
|
20
|
+
|
|
21
|
+
logger = logging.getLogger(__name__)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class BaseAgent(ABC):
|
|
25
|
+
"""
|
|
26
|
+
Abstract base for orchestration agents.
|
|
27
|
+
|
|
28
|
+
Each agent has:
|
|
29
|
+
- A unique identity for audit tracking
|
|
30
|
+
- Access to the shared OrchestrationContext
|
|
31
|
+
- Ability to emit events to the journal
|
|
32
|
+
- A defined role in the orchestration pipeline
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
def __init__(
|
|
36
|
+
self,
|
|
37
|
+
agent_id: str | None = None,
|
|
38
|
+
name: str = "BaseAgent",
|
|
39
|
+
config: dict[str, Any] | None = None,
|
|
40
|
+
):
|
|
41
|
+
self.agent_id = agent_id or f"{name.lower()}-{uuid.uuid4().hex[:6]}"
|
|
42
|
+
self.name = name
|
|
43
|
+
self.config = config or {}
|
|
44
|
+
self._logger = logging.getLogger(f"agentflow.agents.{name}")
|
|
45
|
+
|
|
46
|
+
def emit_event(
|
|
47
|
+
self,
|
|
48
|
+
context: OrchestrationContext,
|
|
49
|
+
event_type: EventType,
|
|
50
|
+
message: str = "",
|
|
51
|
+
step_id: str | None = None,
|
|
52
|
+
payload: dict[str, Any] | None = None,
|
|
53
|
+
) -> None:
|
|
54
|
+
"""Record an event in the orchestration journal."""
|
|
55
|
+
context.record_event(
|
|
56
|
+
event_type=event_type,
|
|
57
|
+
agent_id=self.agent_id,
|
|
58
|
+
step_id=step_id,
|
|
59
|
+
payload=payload or {},
|
|
60
|
+
message=message,
|
|
61
|
+
)
|
|
62
|
+
self._logger.debug("[%s] %s: %s", self.agent_id, event_type.value, message)
|
|
63
|
+
|
|
64
|
+
@abstractmethod
|
|
65
|
+
async def execute(self, context: OrchestrationContext, **kwargs: Any) -> Any:
|
|
66
|
+
"""Execute the agent's primary function."""
|
|
67
|
+
...
|
|
@@ -0,0 +1,307 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Executor Agent — executes plans with parallel scheduling and resilience.
|
|
3
|
+
|
|
4
|
+
The ExecutorAgent traverses the ExecutionPlan DAG, running steps in
|
|
5
|
+
parallel where dependencies allow, with intelligent routing and
|
|
6
|
+
self-healing resilience (circuit breakers, retries, fallbacks).
|
|
7
|
+
|
|
8
|
+
Author: Venkata Pavan Kumar Gummadi
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from __future__ import annotations
|
|
12
|
+
|
|
13
|
+
import asyncio
|
|
14
|
+
import logging
|
|
15
|
+
from typing import Any
|
|
16
|
+
|
|
17
|
+
from agentflow.agents.base_agent import BaseAgent
|
|
18
|
+
from agentflow.connectors.base import APIResponse, BaseConnector
|
|
19
|
+
from agentflow.core.context import EventType, OrchestrationContext
|
|
20
|
+
from agentflow.core.plan import ExecutionPlan, PlanStep, StepStatus
|
|
21
|
+
from agentflow.resilience.circuit_breaker import CircuitBreaker
|
|
22
|
+
from agentflow.resilience.retry_policy import RetryPolicy
|
|
23
|
+
from agentflow.routing.dynamic_router import DynamicRouter
|
|
24
|
+
|
|
25
|
+
logger = logging.getLogger(__name__)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class ExecutorAgent(BaseAgent):
|
|
29
|
+
"""
|
|
30
|
+
Executes plan steps with parallel scheduling and resilience.
|
|
31
|
+
|
|
32
|
+
Execution strategy:
|
|
33
|
+
1. Identify all ready steps (dependencies satisfied)
|
|
34
|
+
2. Route each step to the optimal endpoint via DynamicRouter
|
|
35
|
+
3. Execute ready steps in parallel (bounded concurrency)
|
|
36
|
+
4. Apply circuit breaker and retry logic on failures
|
|
37
|
+
5. Store results in context for downstream steps
|
|
38
|
+
6. Repeat until all steps are terminal
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
def __init__(self, **kwargs: Any):
|
|
42
|
+
super().__init__(name="ExecutorAgent", **kwargs)
|
|
43
|
+
self._circuit_breakers: dict[str, CircuitBreaker] = {}
|
|
44
|
+
self._retry_policy = RetryPolicy()
|
|
45
|
+
|
|
46
|
+
async def execute(self, context: OrchestrationContext, **kwargs: Any) -> dict[str, Any]:
|
|
47
|
+
"""Execute the plan from context."""
|
|
48
|
+
plan = kwargs.get("plan")
|
|
49
|
+
if not isinstance(plan, ExecutionPlan):
|
|
50
|
+
raise TypeError("ExecutorAgent.execute requires a 'plan' ExecutionPlan kwarg")
|
|
51
|
+
connectors = kwargs.get("connectors", {})
|
|
52
|
+
router = kwargs.get("router")
|
|
53
|
+
return await self.execute_plan(plan, context, connectors, router)
|
|
54
|
+
|
|
55
|
+
async def execute_plan(
|
|
56
|
+
self,
|
|
57
|
+
plan: ExecutionPlan,
|
|
58
|
+
context: OrchestrationContext,
|
|
59
|
+
connectors: dict[str, BaseConnector],
|
|
60
|
+
router: DynamicRouter | None = None,
|
|
61
|
+
max_parallel: int = 10,
|
|
62
|
+
) -> dict[str, Any]:
|
|
63
|
+
"""
|
|
64
|
+
Execute all steps in the plan with maximum parallelism.
|
|
65
|
+
|
|
66
|
+
Uses a work-stealing scheduler: continuously finds ready steps
|
|
67
|
+
and executes them concurrently, bounded by max_parallel.
|
|
68
|
+
"""
|
|
69
|
+
outputs: dict[str, Any] = {}
|
|
70
|
+
semaphore = asyncio.Semaphore(max_parallel)
|
|
71
|
+
iteration = 0
|
|
72
|
+
|
|
73
|
+
while not plan.is_complete:
|
|
74
|
+
iteration += 1
|
|
75
|
+
ready_steps = plan.get_ready_steps()
|
|
76
|
+
|
|
77
|
+
if not ready_steps:
|
|
78
|
+
# Deadlock detection
|
|
79
|
+
pending = [s for s in plan.steps if s.status == StepStatus.PENDING]
|
|
80
|
+
if pending:
|
|
81
|
+
logger.error(
|
|
82
|
+
"Deadlock detected: %d pending steps with unmet dependencies",
|
|
83
|
+
len(pending),
|
|
84
|
+
)
|
|
85
|
+
for step in pending:
|
|
86
|
+
step.mark_failed("Deadlock: unresolvable dependencies")
|
|
87
|
+
break
|
|
88
|
+
|
|
89
|
+
logger.info(
|
|
90
|
+
"Iteration %d: executing %d ready steps in parallel",
|
|
91
|
+
iteration,
|
|
92
|
+
len(ready_steps),
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
# Execute ready steps in parallel
|
|
96
|
+
tasks = [
|
|
97
|
+
self._execute_step_with_semaphore(
|
|
98
|
+
step=step,
|
|
99
|
+
context=context,
|
|
100
|
+
connectors=connectors,
|
|
101
|
+
router=router,
|
|
102
|
+
semaphore=semaphore,
|
|
103
|
+
)
|
|
104
|
+
for step in ready_steps
|
|
105
|
+
]
|
|
106
|
+
|
|
107
|
+
results = await asyncio.gather(*tasks, return_exceptions=True)
|
|
108
|
+
|
|
109
|
+
# Collect outputs
|
|
110
|
+
for step, result in zip(ready_steps, results):
|
|
111
|
+
if isinstance(result, Exception):
|
|
112
|
+
step.mark_failed(str(result))
|
|
113
|
+
self.emit_event(
|
|
114
|
+
context,
|
|
115
|
+
EventType.STEP_FAILED,
|
|
116
|
+
step_id=step.step_id,
|
|
117
|
+
message=f"Step {step.name} failed: {result}",
|
|
118
|
+
)
|
|
119
|
+
elif step.status == StepStatus.COMPLETED:
|
|
120
|
+
outputs[step.step_id] = step.result
|
|
121
|
+
context.store_step_result(step.step_id, step.result)
|
|
122
|
+
|
|
123
|
+
return outputs
|
|
124
|
+
|
|
125
|
+
async def _execute_step_with_semaphore(
|
|
126
|
+
self,
|
|
127
|
+
step: PlanStep,
|
|
128
|
+
context: OrchestrationContext,
|
|
129
|
+
connectors: dict[str, BaseConnector],
|
|
130
|
+
router: DynamicRouter | None,
|
|
131
|
+
semaphore: asyncio.Semaphore,
|
|
132
|
+
) -> Any:
|
|
133
|
+
"""Execute a single step within the concurrency semaphore."""
|
|
134
|
+
async with semaphore:
|
|
135
|
+
return await self._execute_step(step, context, connectors, router)
|
|
136
|
+
|
|
137
|
+
async def _execute_step(
|
|
138
|
+
self,
|
|
139
|
+
step: PlanStep,
|
|
140
|
+
context: OrchestrationContext,
|
|
141
|
+
connectors: dict[str, BaseConnector],
|
|
142
|
+
router: DynamicRouter | None,
|
|
143
|
+
) -> Any:
|
|
144
|
+
"""
|
|
145
|
+
Execute a single plan step with full resilience.
|
|
146
|
+
|
|
147
|
+
Flow:
|
|
148
|
+
1. Check condition (skip if false)
|
|
149
|
+
2. Get circuit breaker for the target connector
|
|
150
|
+
3. Route to optimal endpoint
|
|
151
|
+
4. Execute with retry policy
|
|
152
|
+
5. Apply output transformation
|
|
153
|
+
"""
|
|
154
|
+
step.mark_running()
|
|
155
|
+
self.emit_event(
|
|
156
|
+
context,
|
|
157
|
+
EventType.STEP_STARTED,
|
|
158
|
+
step_id=step.step_id,
|
|
159
|
+
message=f"Starting step: {step.name}",
|
|
160
|
+
)
|
|
161
|
+
|
|
162
|
+
# Check condition
|
|
163
|
+
if step.condition:
|
|
164
|
+
if not self._evaluate_condition(step.condition, context):
|
|
165
|
+
step.mark_skipped(f"Condition not met: {step.condition}")
|
|
166
|
+
return None
|
|
167
|
+
|
|
168
|
+
# Resolve connector
|
|
169
|
+
connector = connectors.get(step.connector_id)
|
|
170
|
+
if not connector:
|
|
171
|
+
# Try routing to find the best connector
|
|
172
|
+
if router and connectors:
|
|
173
|
+
connector = self._route_to_connector(step, list(connectors.values()), router)
|
|
174
|
+
|
|
175
|
+
if not connector:
|
|
176
|
+
step.mark_failed(f"No connector available for: {step.connector_id}")
|
|
177
|
+
return None
|
|
178
|
+
|
|
179
|
+
# Get or create circuit breaker
|
|
180
|
+
cb = self._get_circuit_breaker(connector.connector_id)
|
|
181
|
+
|
|
182
|
+
# Execute with circuit breaker and retry
|
|
183
|
+
try:
|
|
184
|
+
result = await self._execute_with_resilience(
|
|
185
|
+
step=step,
|
|
186
|
+
connector=connector,
|
|
187
|
+
circuit_breaker=cb,
|
|
188
|
+
context=context,
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
step.mark_completed(result)
|
|
192
|
+
self.emit_event(
|
|
193
|
+
context,
|
|
194
|
+
EventType.STEP_COMPLETED,
|
|
195
|
+
step_id=step.step_id,
|
|
196
|
+
message=f"Step {step.name} completed successfully",
|
|
197
|
+
)
|
|
198
|
+
return result
|
|
199
|
+
|
|
200
|
+
except Exception as e:
|
|
201
|
+
step.mark_failed(str(e))
|
|
202
|
+
|
|
203
|
+
# Try fallback if available
|
|
204
|
+
if step.fallback_step_id:
|
|
205
|
+
self.emit_event(
|
|
206
|
+
context,
|
|
207
|
+
EventType.FALLBACK_TRIGGERED,
|
|
208
|
+
step_id=step.step_id,
|
|
209
|
+
message=f"Triggering fallback for {step.name}",
|
|
210
|
+
)
|
|
211
|
+
|
|
212
|
+
raise
|
|
213
|
+
|
|
214
|
+
async def _execute_with_resilience(
|
|
215
|
+
self,
|
|
216
|
+
step: PlanStep,
|
|
217
|
+
connector: BaseConnector,
|
|
218
|
+
circuit_breaker: CircuitBreaker,
|
|
219
|
+
context: OrchestrationContext,
|
|
220
|
+
) -> Any:
|
|
221
|
+
"""Execute a step with circuit breaker and retry logic."""
|
|
222
|
+
|
|
223
|
+
# Check circuit breaker
|
|
224
|
+
if not circuit_breaker.allow_request():
|
|
225
|
+
raise RuntimeError(f"Circuit open for connector {connector.connector_id}")
|
|
226
|
+
|
|
227
|
+
retry_config = step.retry_policy or {
|
|
228
|
+
"max_retries": 3,
|
|
229
|
+
"backoff_base": 1.0,
|
|
230
|
+
"backoff_max": 30.0,
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
last_error: Exception | None = None
|
|
234
|
+
max_retries = retry_config.get("max_retries", 3)
|
|
235
|
+
|
|
236
|
+
for attempt in range(max_retries + 1):
|
|
237
|
+
try:
|
|
238
|
+
response: APIResponse = await connector.invoke(
|
|
239
|
+
operation=step.operation,
|
|
240
|
+
parameters=step.parameters,
|
|
241
|
+
timeout_ms=step.timeout_ms,
|
|
242
|
+
)
|
|
243
|
+
|
|
244
|
+
if response.success:
|
|
245
|
+
circuit_breaker.record_success()
|
|
246
|
+
result = response.body
|
|
247
|
+
|
|
248
|
+
# Apply transformation if specified
|
|
249
|
+
if step.transform:
|
|
250
|
+
result = self._apply_transform(result, step.transform)
|
|
251
|
+
|
|
252
|
+
return result
|
|
253
|
+
elif response.retryable and attempt < max_retries:
|
|
254
|
+
self.emit_event(
|
|
255
|
+
context,
|
|
256
|
+
EventType.STEP_RETRIED,
|
|
257
|
+
step_id=step.step_id,
|
|
258
|
+
message=f"Retry {attempt + 1}/{max_retries}: {response.error_message}",
|
|
259
|
+
)
|
|
260
|
+
backoff = self._retry_policy.calculate_backoff(attempt, retry_config)
|
|
261
|
+
await asyncio.sleep(backoff)
|
|
262
|
+
continue
|
|
263
|
+
else:
|
|
264
|
+
circuit_breaker.record_failure()
|
|
265
|
+
raise RuntimeError(
|
|
266
|
+
f"API call failed: {response.status_code} - {response.error_message}"
|
|
267
|
+
)
|
|
268
|
+
|
|
269
|
+
except Exception as e:
|
|
270
|
+
last_error = e
|
|
271
|
+
if attempt < max_retries:
|
|
272
|
+
backoff = self._retry_policy.calculate_backoff(attempt, retry_config)
|
|
273
|
+
await asyncio.sleep(backoff)
|
|
274
|
+
else:
|
|
275
|
+
circuit_breaker.record_failure()
|
|
276
|
+
raise
|
|
277
|
+
|
|
278
|
+
raise last_error or RuntimeError("Execution failed after retries")
|
|
279
|
+
|
|
280
|
+
def _get_circuit_breaker(self, connector_id: str) -> CircuitBreaker:
|
|
281
|
+
"""Get or create a circuit breaker for a connector."""
|
|
282
|
+
if connector_id not in self._circuit_breakers:
|
|
283
|
+
self._circuit_breakers[connector_id] = CircuitBreaker(name=connector_id)
|
|
284
|
+
return self._circuit_breakers[connector_id]
|
|
285
|
+
|
|
286
|
+
def _route_to_connector(
|
|
287
|
+
self,
|
|
288
|
+
step: PlanStep,
|
|
289
|
+
connectors: list[BaseConnector],
|
|
290
|
+
router: DynamicRouter,
|
|
291
|
+
) -> BaseConnector | None:
|
|
292
|
+
"""Use the DynamicRouter to find the best connector."""
|
|
293
|
+
# Simplified routing — full implementation in DynamicRouter
|
|
294
|
+
if connectors:
|
|
295
|
+
return connectors[0]
|
|
296
|
+
return None
|
|
297
|
+
|
|
298
|
+
def _evaluate_condition(self, condition: str, context: OrchestrationContext) -> bool:
|
|
299
|
+
"""Evaluate a step condition against the context."""
|
|
300
|
+
# Simple expression evaluation
|
|
301
|
+
# Production: use a safe expression parser
|
|
302
|
+
return True
|
|
303
|
+
|
|
304
|
+
def _apply_transform(self, data: Any, transform: str) -> Any:
|
|
305
|
+
"""Apply a JMESPath/JSONPath transformation to step output."""
|
|
306
|
+
# Placeholder for transform engine
|
|
307
|
+
return data
|