fast-agent-mcp 0.2.12__py3-none-any.whl → 0.2.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. {fast_agent_mcp-0.2.12.dist-info → fast_agent_mcp-0.2.14.dist-info}/METADATA +1 -1
  2. {fast_agent_mcp-0.2.12.dist-info → fast_agent_mcp-0.2.14.dist-info}/RECORD +33 -33
  3. mcp_agent/agents/agent.py +2 -2
  4. mcp_agent/agents/base_agent.py +3 -3
  5. mcp_agent/agents/workflow/chain_agent.py +2 -2
  6. mcp_agent/agents/workflow/evaluator_optimizer.py +3 -3
  7. mcp_agent/agents/workflow/orchestrator_agent.py +3 -3
  8. mcp_agent/agents/workflow/parallel_agent.py +2 -2
  9. mcp_agent/agents/workflow/router_agent.py +2 -2
  10. mcp_agent/cli/commands/check_config.py +450 -0
  11. mcp_agent/cli/commands/setup.py +1 -1
  12. mcp_agent/cli/main.py +8 -15
  13. mcp_agent/core/agent_types.py +8 -8
  14. mcp_agent/core/direct_decorators.py +10 -8
  15. mcp_agent/core/direct_factory.py +4 -1
  16. mcp_agent/core/validation.py +6 -4
  17. mcp_agent/event_progress.py +6 -6
  18. mcp_agent/llm/augmented_llm.py +10 -2
  19. mcp_agent/llm/augmented_llm_passthrough.py +5 -3
  20. mcp_agent/llm/augmented_llm_playback.py +2 -1
  21. mcp_agent/llm/model_factory.py +7 -27
  22. mcp_agent/llm/provider_key_manager.py +83 -0
  23. mcp_agent/llm/provider_types.py +16 -0
  24. mcp_agent/llm/providers/augmented_llm_anthropic.py +5 -26
  25. mcp_agent/llm/providers/augmented_llm_deepseek.py +5 -24
  26. mcp_agent/llm/providers/augmented_llm_generic.py +2 -16
  27. mcp_agent/llm/providers/augmented_llm_openai.py +4 -26
  28. mcp_agent/llm/providers/augmented_llm_openrouter.py +17 -45
  29. mcp_agent/mcp/interfaces.py +2 -1
  30. mcp_agent/mcp_server/agent_server.py +335 -14
  31. mcp_agent/cli/commands/config.py +0 -11
  32. mcp_agent/executor/temporal.py +0 -383
  33. mcp_agent/executor/workflow.py +0 -195
  34. {fast_agent_mcp-0.2.12.dist-info → fast_agent_mcp-0.2.14.dist-info}/WHEEL +0 -0
  35. {fast_agent_mcp-0.2.12.dist-info → fast_agent_mcp-0.2.14.dist-info}/entry_points.txt +0 -0
  36. {fast_agent_mcp-0.2.12.dist-info → fast_agent_mcp-0.2.14.dist-info}/licenses/LICENSE +0 -0
@@ -1,383 +0,0 @@
1
- """
2
- Temporal based orchestrator for the MCP Agent.
3
- Temporal provides durable execution and robust workflow orchestration,
4
- as well as dynamic control flow, making it a good choice for an AI agent orchestrator.
5
- Read more: https://docs.temporal.io/develop/python/core-application
6
- """
7
-
8
- import asyncio
9
- import functools
10
- import uuid
11
- from typing import (
12
- TYPE_CHECKING,
13
- Any,
14
- AsyncIterator,
15
- Callable,
16
- Coroutine,
17
- Dict,
18
- List,
19
- Optional,
20
- )
21
-
22
- from pydantic import ConfigDict
23
- from temporalio import activity, exceptions, workflow
24
- from temporalio.client import Client as TemporalClient
25
- from temporalio.worker import Worker
26
-
27
- from mcp_agent.config import TemporalSettings
28
- from mcp_agent.executor.executor import Executor, ExecutorConfig, R
29
- from mcp_agent.executor.workflow_signal import (
30
- BaseSignalHandler,
31
- Signal,
32
- SignalHandler,
33
- SignalRegistration,
34
- SignalValueT,
35
- )
36
-
37
- if TYPE_CHECKING:
38
- from mcp_agent.context import Context
39
-
40
-
41
- class TemporalSignalHandler(BaseSignalHandler[SignalValueT]):
42
- """Temporal-based signal handling using workflow signals"""
43
-
44
- async def wait_for_signal(self, signal, timeout_seconds=None) -> SignalValueT:
45
- if not workflow._Runtime.current():
46
- raise RuntimeError(
47
- "TemporalSignalHandler.wait_for_signal must be called from within a workflow"
48
- )
49
-
50
- unique_signal_name = f"{signal.name}_{uuid.uuid4()}"
51
- registration = SignalRegistration(
52
- signal_name=signal.name,
53
- unique_name=unique_signal_name,
54
- workflow_id=workflow.info().workflow_id,
55
- )
56
-
57
- # Container for signal value
58
- container = {"value": None, "completed": False}
59
-
60
- # Define the signal handler for this specific registration
61
- @workflow.signal(name=unique_signal_name)
62
- def signal_handler(value: SignalValueT) -> None:
63
- container["value"] = value
64
- container["completed"] = True
65
-
66
- async with self._lock:
67
- # Register both the signal registration and handler atomically
68
- self._pending_signals.setdefault(signal.name, []).append(registration)
69
- self._handlers.setdefault(signal.name, []).append((unique_signal_name, signal_handler))
70
-
71
- try:
72
- # Wait for signal with optional timeout
73
- await workflow.wait_condition(lambda: container["completed"], timeout=timeout_seconds)
74
-
75
- return container["value"]
76
- except asyncio.TimeoutError as exc:
77
- raise TimeoutError(f"Timeout waiting for signal {signal.name}") from exc
78
- finally:
79
- async with self._lock:
80
- # Remove ourselves from _pending_signals
81
- if signal.name in self._pending_signals:
82
- self._pending_signals[signal.name] = [
83
- sr
84
- for sr in self._pending_signals[signal.name]
85
- if sr.unique_name != unique_signal_name
86
- ]
87
- if not self._pending_signals[signal.name]:
88
- del self._pending_signals[signal.name]
89
-
90
- # Remove ourselves from _handlers
91
- if signal.name in self._handlers:
92
- self._handlers[signal.name] = [
93
- h for h in self._handlers[signal.name] if h[0] != unique_signal_name
94
- ]
95
- if not self._handlers[signal.name]:
96
- del self._handlers[signal.name]
97
-
98
- def on_signal(self, signal_name):
99
- """Decorator to register a signal handler."""
100
-
101
- def decorator(func: Callable) -> Callable:
102
- # Create unique signal name for this handler
103
- unique_signal_name = f"{signal_name}_{uuid.uuid4()}"
104
-
105
- # Create the actual handler that will be registered with Temporal
106
- @workflow.signal(name=unique_signal_name)
107
- async def wrapped(signal_value: SignalValueT) -> None:
108
- # Create a signal object to pass to the handler
109
- signal = Signal(
110
- name=signal_name,
111
- payload=signal_value,
112
- workflow_id=workflow.info().workflow_id,
113
- )
114
- if asyncio.iscoroutinefunction(func):
115
- await func(signal)
116
- else:
117
- func(signal)
118
-
119
- # Register the handler under the original signal name
120
- self._handlers.setdefault(signal_name, []).append((unique_signal_name, wrapped))
121
- return func
122
-
123
- return decorator
124
-
125
- async def signal(self, signal) -> None:
126
- self.validate_signal(signal)
127
-
128
- workflow_handle = workflow.get_external_workflow_handle(workflow_id=signal.workflow_id)
129
-
130
- # Send the signal to all registrations of this signal
131
- async with self._lock:
132
- signal_tasks = []
133
-
134
- if signal.name in self._pending_signals:
135
- for pending_signal in self._pending_signals[signal.name]:
136
- registration = pending_signal.registration
137
- if registration.workflow_id == signal.workflow_id:
138
- # Only signal for registrations of that workflow
139
- signal_tasks.append(
140
- workflow_handle.signal(registration.unique_name, signal.payload)
141
- )
142
- else:
143
- continue
144
-
145
- # Notify any registered handler functions
146
- if signal.name in self._handlers:
147
- for unique_name, _ in self._handlers[signal.name]:
148
- signal_tasks.append(workflow_handle.signal(unique_name, signal.payload))
149
-
150
- await asyncio.gather(*signal_tasks, return_exceptions=True)
151
-
152
- def validate_signal(self, signal) -> None:
153
- super().validate_signal(signal)
154
- # Add TemporalSignalHandler-specific validation
155
- if signal.workflow_id is None:
156
- raise ValueError(
157
- "No workflow_id provided on Signal. That is required for Temporal signals"
158
- )
159
-
160
-
161
- class TemporalExecutorConfig(ExecutorConfig, TemporalSettings):
162
- """Configuration for Temporal executors."""
163
-
164
- model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)
165
-
166
-
167
- class TemporalExecutor(Executor):
168
- """Executor that runs @workflows as Temporal workflows, with @workflow_tasks as Temporal activities"""
169
-
170
- def __init__(
171
- self,
172
- config: TemporalExecutorConfig | None = None,
173
- signal_bus: SignalHandler | None = None,
174
- client: TemporalClient | None = None,
175
- context: Optional["Context"] = None,
176
- **kwargs,
177
- ) -> None:
178
- signal_bus = signal_bus or TemporalSignalHandler()
179
- super().__init__(
180
- engine="temporal",
181
- config=config,
182
- signal_bus=signal_bus,
183
- context=context,
184
- **kwargs,
185
- )
186
- self.config: TemporalExecutorConfig = (
187
- config or self.context.config.temporal or TemporalExecutorConfig()
188
- )
189
- self.client = client
190
- self._worker = None
191
- self._activity_semaphore = None
192
-
193
- if config.max_concurrent_activities is not None:
194
- self._activity_semaphore = asyncio.Semaphore(self.config.max_concurrent_activities)
195
-
196
- @staticmethod
197
- def wrap_as_activity(
198
- activity_name: str,
199
- func: Callable[..., R] | Coroutine[Any, Any, R],
200
- **kwargs: Any,
201
- ) -> Coroutine[Any, Any, R]:
202
- """
203
- Convert a function into a Temporal activity and return its info.
204
- """
205
-
206
- @activity.defn(name=activity_name)
207
- async def wrapped_activity(*args, **local_kwargs):
208
- try:
209
- if asyncio.iscoroutinefunction(func):
210
- return await func(*args, **local_kwargs)
211
- elif asyncio.iscoroutine(func):
212
- return await func
213
- else:
214
- return func(*args, **local_kwargs)
215
- except Exception as e:
216
- # Handle exceptions gracefully
217
- raise e
218
-
219
- return wrapped_activity
220
-
221
- async def _execute_task_as_async(
222
- self, task: Callable[..., R] | Coroutine[Any, Any, R], **kwargs: Any
223
- ) -> R | BaseException:
224
- async def run_task(task: Callable[..., R] | Coroutine[Any, Any, R]) -> R:
225
- try:
226
- if asyncio.iscoroutine(task):
227
- return await task
228
- elif asyncio.iscoroutinefunction(task):
229
- return await task(**kwargs)
230
- else:
231
- # Execute the callable and await if it returns a coroutine
232
- loop = asyncio.get_running_loop()
233
-
234
- # If kwargs are provided, wrap the function with partial
235
- if kwargs:
236
- wrapped_task = functools.partial(task, **kwargs)
237
- result = await loop.run_in_executor(None, wrapped_task)
238
- else:
239
- result = await loop.run_in_executor(None, task)
240
-
241
- # Handle case where the sync function returns a coroutine
242
- if asyncio.iscoroutine(result):
243
- return await result
244
-
245
- return result
246
- except Exception as e:
247
- # TODO: saqadri - adding logging or other error handling here
248
- return e
249
-
250
- if self._activity_semaphore:
251
- async with self._activity_semaphore:
252
- return await run_task(task)
253
- else:
254
- return await run_task(task)
255
-
256
- async def _execute_task(
257
- self, task: Callable[..., R] | Coroutine[Any, Any, R], **kwargs: Any
258
- ) -> R | BaseException:
259
- func = task.func if isinstance(task, functools.partial) else task
260
- is_workflow_task = getattr(func, "is_workflow_task", False)
261
- if not is_workflow_task:
262
- return await asyncio.create_task(self._execute_task_as_async(task, **kwargs))
263
-
264
- execution_metadata: Dict[str, Any] = getattr(func, "execution_metadata", {})
265
-
266
- # Derive stable activity name, e.g. module + qualname
267
- activity_name = execution_metadata.get("activity_name")
268
- if not activity_name:
269
- activity_name = f"{func.__module__}.{func.__qualname__}"
270
-
271
- schedule_to_close = execution_metadata.get(
272
- "schedule_to_close_timeout", self.config.timeout_seconds
273
- )
274
-
275
- retry_policy = execution_metadata.get("retry_policy", None)
276
-
277
- _task_activity = self.wrap_as_activity(activity_name=activity_name, func=task)
278
-
279
- # # For partials, we pass the partial's arguments
280
- # args = task.args if isinstance(task, functools.partial) else ()
281
- try:
282
- result = await workflow.execute_activity(
283
- activity_name,
284
- args=kwargs.get("args", ()),
285
- task_queue=self.config.task_queue,
286
- schedule_to_close_timeout=schedule_to_close,
287
- retry_policy=retry_policy,
288
- **kwargs,
289
- )
290
- return result
291
- except Exception as e:
292
- # Properly propagate activity errors
293
- if isinstance(e, exceptions.ActivityError):
294
- raise e.cause if e.cause else e
295
- raise
296
-
297
- async def execute(
298
- self,
299
- *tasks: Callable[..., R] | Coroutine[Any, Any, R],
300
- **kwargs: Any,
301
- ) -> List[R | BaseException]:
302
- # Must be called from within a workflow
303
- if not workflow._Runtime.current():
304
- raise RuntimeError("TemporalExecutor.execute must be called from within a workflow")
305
-
306
- # TODO: saqadri - validate if async with self.execution_context() is needed here
307
- async with self.execution_context():
308
- return await asyncio.gather(
309
- *(self._execute_task(task, **kwargs) for task in tasks),
310
- return_exceptions=True,
311
- )
312
-
313
- async def execute_streaming(
314
- self,
315
- *tasks: Callable[..., R] | Coroutine[Any, Any, R],
316
- **kwargs: Any,
317
- ) -> AsyncIterator[R | BaseException]:
318
- if not workflow._Runtime.current():
319
- raise RuntimeError(
320
- "TemporalExecutor.execute_streaming must be called from within a workflow"
321
- )
322
-
323
- # TODO: saqadri - validate if async with self.execution_context() is needed here
324
- async with self.execution_context():
325
- # Create futures for all tasks
326
- futures = [self._execute_task(task, **kwargs) for task in tasks]
327
- pending = set(futures)
328
-
329
- while pending:
330
- done, pending = await workflow.wait(pending, return_when=asyncio.FIRST_COMPLETED)
331
- for future in done:
332
- try:
333
- result = await future
334
- yield result
335
- except Exception as e:
336
- yield e
337
-
338
- async def ensure_client(self):
339
- """Ensure we have a connected Temporal client."""
340
- if self.client is None:
341
- self.client = await TemporalClient.connect(
342
- target_host=self.config.host,
343
- namespace=self.config.namespace,
344
- api_key=self.config.api_key,
345
- )
346
-
347
- return self.client
348
-
349
- async def start_worker(self) -> None:
350
- """
351
- Start a worker in this process, auto-registering all tasks
352
- from the global registry. Also picks up any classes decorated
353
- with @workflow_defn as recognized workflows.
354
- """
355
- await self.ensure_client()
356
-
357
- if self._worker is None:
358
- # We'll collect the activities from the global registry
359
- # and optionally wrap them with `activity.defn` if we want
360
- # (Not strictly required if your code calls `execute_activity("name")` by name)
361
- activity_registry = self.context.task_registry
362
- activities = []
363
- for name in activity_registry.list_activities():
364
- activities.append(activity_registry.get_activity(name))
365
-
366
- # Now we attempt to discover any classes that are recognized as workflows
367
- # But in this simple example, we rely on the user specifying them or
368
- # we might do a dynamic scan.
369
- # For demonstration, we'll just assume the user is only using
370
- # the workflow classes they decorate with `@workflow_defn`.
371
- # We'll rely on them passing the classes or scanning your code.
372
-
373
- self._worker = Worker(
374
- client=self.client,
375
- task_queue=self.config.task_queue,
376
- activities=activities,
377
- workflows=[], # We'll auto-load by Python scanning or let the user specify
378
- )
379
- print(
380
- f"Starting Temporal Worker on task queue '{self.config.task_queue}' with {len(activities)} activities."
381
- )
382
-
383
- await self._worker.run()
@@ -1,195 +0,0 @@
1
- from abc import ABC, abstractmethod
2
- from datetime import datetime
3
- from typing import (
4
- Any,
5
- Dict,
6
- Generic,
7
- TypeVar,
8
- Union,
9
- )
10
-
11
- from pydantic import BaseModel, ConfigDict, Field
12
-
13
- from mcp_agent.executor.executor import Executor
14
-
15
- T = TypeVar("T")
16
-
17
-
18
- class WorkflowState(BaseModel):
19
- """
20
- Simple container for persistent workflow state.
21
- This can hold fields that should persist across tasks.
22
- """
23
-
24
- status: str = "initialized"
25
- metadata: Dict[str, Any] = Field(default_factory=dict)
26
- updated_at: float | None = None
27
- error: Dict[str, Any] | None = None
28
-
29
- model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)
30
-
31
- def record_error(self, error: Exception) -> None:
32
- self.error = {
33
- "type": type(error).__name__,
34
- "message": str(error),
35
- "timestamp": datetime.utcnow().timestamp(),
36
- }
37
-
38
-
39
- class WorkflowResult(BaseModel, Generic[T]):
40
- value: Union[T, None] = None
41
- metadata: Dict[str, Any] = Field(default_factory=dict)
42
- start_time: float | None = None
43
- end_time: float | None = None
44
-
45
-
46
- class Workflow(ABC, Generic[T]):
47
- """
48
- Base class for user-defined workflows.
49
- Handles execution and state management.
50
- Some key notes:
51
- - To enable the executor engine to recognize and orchestrate the workflow,
52
- - the class MUST be decorated with @workflow.
53
- - the main entrypoint method MUST be decorated with @workflow_run.
54
- - any task methods MUST be decorated with @workflow_task.
55
-
56
- - Persistent state: Provides a simple `state` object for storing data across tasks.
57
- """
58
-
59
- def __init__(
60
- self,
61
- executor: Executor,
62
- name: str | None = None,
63
- metadata: Dict[str, Any] | None = None,
64
- **kwargs: Any,
65
- ) -> None:
66
- self.executor = executor
67
- self.name = name or self.__class__.__name__
68
- self.init_kwargs = kwargs
69
- # TODO: handle logging
70
- # self._logger = logging.getLogger(self.name)
71
-
72
- # A simple workflow state object
73
- # If under Temporal, storing it as a field on this class
74
- # means it can be replayed automatically
75
- self.state = WorkflowState(name=name, metadata=metadata or {})
76
-
77
- @abstractmethod
78
- async def run(self, *args: Any, **kwargs: Any) -> "WorkflowResult[T]":
79
- """
80
- Main workflow implementation. Must be overridden by subclasses.
81
- """
82
-
83
- async def update_state(self, **kwargs) -> None:
84
- """Syntactic sugar to update workflow state."""
85
- for key, value in kwargs.items():
86
- self.state[key] = value
87
- setattr(self.state, key, value)
88
-
89
- self.state.updated_at = datetime.utcnow().timestamp()
90
-
91
- async def wait_for_input(self, description: str = "Provide input") -> str:
92
- """
93
- Convenience method for human input. Uses `human_input` signal
94
- so we can unify local (console input) and Temporal signals.
95
- """
96
- return await self.executor.wait_for_signal("human_input", description=description)
97
-
98
-
99
- # ############################
100
- # # Example: DocumentWorkflow
101
- # ############################
102
-
103
-
104
- # @workflow_defn # <-- This becomes @temporal_workflow.defn if in Temporal mode, else no-op
105
- # class DocumentWorkflow(Workflow[List[Dict[str, Any]]]):
106
- # """
107
- # Example workflow with persistent state.
108
- # If run locally, `self.state` is ephemeral.
109
- # If run in Temporal mode, `self.state` is replayed automatically.
110
- # """
111
-
112
- # @workflow_task(
113
- # schedule_to_close_timeout=timedelta(minutes=10),
114
- # retry_policy={"initial_interval": 1, "max_attempts": 3},
115
- # )
116
- # async def process_document(self, doc_id: str) -> Dict[str, Any]:
117
- # """Activity that simulates document processing."""
118
- # await asyncio.sleep(1)
119
- # # Optionally mutate workflow state
120
- # self.state.metadata.setdefault("processed_docs", []).append(doc_id)
121
- # return {
122
- # "doc_id": doc_id,
123
- # "status": "processed",
124
- # "timestamp": datetime.utcnow().isoformat(),
125
- # }
126
-
127
- # @workflow_run # <-- This becomes @temporal_workflow.run(...) if Temporal is used
128
- # async def _run_impl(
129
- # self, documents: List[str], batch_size: int = 2
130
- # ) -> List[Dict[str, Any]]:
131
- # """Main workflow logic, which becomes the official 'run' in Temporal mode."""
132
- # self._logger.info("Workflow starting, state=%s", self.state)
133
- # self.state.update_status("running")
134
-
135
- # all_results = []
136
- # for i in range(0, len(documents), batch_size):
137
- # batch = documents[i : i + batch_size]
138
- # tasks = [self.process_document(doc) for doc in batch]
139
- # results = await self.executor.execute(*tasks)
140
-
141
- # for res in results:
142
- # if isinstance(res.value, Exception):
143
- # self._logger.error(
144
- # f"Error processing document: {res.metadata.get('error')}"
145
- # )
146
- # else:
147
- # all_results.append(res.value)
148
-
149
- # self.state.update_status("completed")
150
- # return all_results
151
-
152
-
153
- # ########################
154
- # # 12. Example Local Usage
155
- # ########################
156
-
157
-
158
- # async def run_example_local():
159
- # from . import AsyncIOExecutor, DocumentWorkflow # if in a package
160
-
161
- # executor = AsyncIOExecutor()
162
- # wf = DocumentWorkflow(executor)
163
-
164
- # documents = ["doc1", "doc2", "doc3", "doc4"]
165
- # result = await wf.run(documents, batch_size=2)
166
-
167
- # print("Local results:", result.value)
168
- # print("Local workflow final state:", wf.state)
169
- # # Notice `wf.state.metadata['processed_docs']` has the processed doc IDs.
170
-
171
-
172
- # ########################
173
- # # Example Temporal Usage
174
- # ########################
175
-
176
-
177
- # async def run_example_temporal():
178
- # from . import TemporalExecutor, DocumentWorkflow # if in a package
179
-
180
- # # 1) Create a TemporalExecutor (client side)
181
- # executor = TemporalExecutor(task_queue="my_task_queue")
182
- # await executor.ensure_client()
183
-
184
- # # 2) Start a worker in the same process (or do so in a separate process)
185
- # asyncio.create_task(executor.start_worker())
186
- # await asyncio.sleep(2) # Wait for worker to be up
187
-
188
- # # 3) Now we can run the workflow by normal means if we like,
189
- # # or rely on the Worker picking it up. Typically, you'd do:
190
- # # handle = await executor._client.start_workflow(...)
191
- # # but let's keep it simple and show conceptually
192
- # # that 'DocumentWorkflow' is now recognized as a real Temporal workflow
193
- # print(
194
- # "Temporal environment is running. Use the Worker logs or CLI to start 'DocumentWorkflow'."
195
- # )