fast-agent-mcp 0.2.13__py3-none-any.whl → 0.2.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. {fast_agent_mcp-0.2.13.dist-info → fast_agent_mcp-0.2.14.dist-info}/METADATA +1 -1
  2. {fast_agent_mcp-0.2.13.dist-info → fast_agent_mcp-0.2.14.dist-info}/RECORD +33 -33
  3. mcp_agent/agents/agent.py +2 -2
  4. mcp_agent/agents/base_agent.py +3 -3
  5. mcp_agent/agents/workflow/chain_agent.py +2 -2
  6. mcp_agent/agents/workflow/evaluator_optimizer.py +3 -3
  7. mcp_agent/agents/workflow/orchestrator_agent.py +3 -3
  8. mcp_agent/agents/workflow/parallel_agent.py +2 -2
  9. mcp_agent/agents/workflow/router_agent.py +2 -2
  10. mcp_agent/cli/commands/check_config.py +450 -0
  11. mcp_agent/cli/commands/setup.py +1 -1
  12. mcp_agent/cli/main.py +8 -15
  13. mcp_agent/core/agent_types.py +8 -8
  14. mcp_agent/core/direct_decorators.py +10 -8
  15. mcp_agent/core/direct_factory.py +4 -1
  16. mcp_agent/core/validation.py +6 -4
  17. mcp_agent/event_progress.py +6 -6
  18. mcp_agent/llm/augmented_llm.py +10 -2
  19. mcp_agent/llm/augmented_llm_passthrough.py +5 -3
  20. mcp_agent/llm/augmented_llm_playback.py +2 -1
  21. mcp_agent/llm/model_factory.py +7 -27
  22. mcp_agent/llm/provider_key_manager.py +83 -0
  23. mcp_agent/llm/provider_types.py +16 -0
  24. mcp_agent/llm/providers/augmented_llm_anthropic.py +5 -26
  25. mcp_agent/llm/providers/augmented_llm_deepseek.py +5 -24
  26. mcp_agent/llm/providers/augmented_llm_generic.py +2 -16
  27. mcp_agent/llm/providers/augmented_llm_openai.py +4 -26
  28. mcp_agent/llm/providers/augmented_llm_openrouter.py +17 -45
  29. mcp_agent/mcp/interfaces.py +2 -1
  30. mcp_agent/mcp_server/agent_server.py +120 -38
  31. mcp_agent/cli/commands/config.py +0 -11
  32. mcp_agent/executor/temporal.py +0 -383
  33. mcp_agent/executor/workflow.py +0 -195
  34. {fast_agent_mcp-0.2.13.dist-info → fast_agent_mcp-0.2.14.dist-info}/WHEEL +0 -0
  35. {fast_agent_mcp-0.2.13.dist-info → fast_agent_mcp-0.2.14.dist-info}/entry_points.txt +0 -0
  36. {fast_agent_mcp-0.2.13.dist-info → fast_agent_mcp-0.2.14.dist-info}/licenses/LICENSE +0 -0
@@ -1,195 +0,0 @@
1
- from abc import ABC, abstractmethod
2
- from datetime import datetime
3
- from typing import (
4
- Any,
5
- Dict,
6
- Generic,
7
- TypeVar,
8
- Union,
9
- )
10
-
11
- from pydantic import BaseModel, ConfigDict, Field
12
-
13
- from mcp_agent.executor.executor import Executor
14
-
15
- T = TypeVar("T")
16
-
17
-
18
- class WorkflowState(BaseModel):
19
- """
20
- Simple container for persistent workflow state.
21
- This can hold fields that should persist across tasks.
22
- """
23
-
24
- status: str = "initialized"
25
- metadata: Dict[str, Any] = Field(default_factory=dict)
26
- updated_at: float | None = None
27
- error: Dict[str, Any] | None = None
28
-
29
- model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)
30
-
31
- def record_error(self, error: Exception) -> None:
32
- self.error = {
33
- "type": type(error).__name__,
34
- "message": str(error),
35
- "timestamp": datetime.utcnow().timestamp(),
36
- }
37
-
38
-
39
- class WorkflowResult(BaseModel, Generic[T]):
40
- value: Union[T, None] = None
41
- metadata: Dict[str, Any] = Field(default_factory=dict)
42
- start_time: float | None = None
43
- end_time: float | None = None
44
-
45
-
46
- class Workflow(ABC, Generic[T]):
47
- """
48
- Base class for user-defined workflows.
49
- Handles execution and state management.
50
- Some key notes:
51
- - To enable the executor engine to recognize and orchestrate the workflow,
52
- - the class MUST be decorated with @workflow.
53
- - the main entrypoint method MUST be decorated with @workflow_run.
54
- - any task methods MUST be decorated with @workflow_task.
55
-
56
- - Persistent state: Provides a simple `state` object for storing data across tasks.
57
- """
58
-
59
- def __init__(
60
- self,
61
- executor: Executor,
62
- name: str | None = None,
63
- metadata: Dict[str, Any] | None = None,
64
- **kwargs: Any,
65
- ) -> None:
66
- self.executor = executor
67
- self.name = name or self.__class__.__name__
68
- self.init_kwargs = kwargs
69
- # TODO: handle logging
70
- # self._logger = logging.getLogger(self.name)
71
-
72
- # A simple workflow state object
73
- # If under Temporal, storing it as a field on this class
74
- # means it can be replayed automatically
75
- self.state = WorkflowState(name=name, metadata=metadata or {})
76
-
77
- @abstractmethod
78
- async def run(self, *args: Any, **kwargs: Any) -> "WorkflowResult[T]":
79
- """
80
- Main workflow implementation. Must be overridden by subclasses.
81
- """
82
-
83
- async def update_state(self, **kwargs) -> None:
84
- """Syntactic sugar to update workflow state."""
85
- for key, value in kwargs.items():
86
- self.state[key] = value
87
- setattr(self.state, key, value)
88
-
89
- self.state.updated_at = datetime.utcnow().timestamp()
90
-
91
- async def wait_for_input(self, description: str = "Provide input") -> str:
92
- """
93
- Convenience method for human input. Uses `human_input` signal
94
- so we can unify local (console input) and Temporal signals.
95
- """
96
- return await self.executor.wait_for_signal("human_input", description=description)
97
-
98
-
99
- # ############################
100
- # # Example: DocumentWorkflow
101
- # ############################
102
-
103
-
104
- # @workflow_defn # <-- This becomes @temporal_workflow.defn if in Temporal mode, else no-op
105
- # class DocumentWorkflow(Workflow[List[Dict[str, Any]]]):
106
- # """
107
- # Example workflow with persistent state.
108
- # If run locally, `self.state` is ephemeral.
109
- # If run in Temporal mode, `self.state` is replayed automatically.
110
- # """
111
-
112
- # @workflow_task(
113
- # schedule_to_close_timeout=timedelta(minutes=10),
114
- # retry_policy={"initial_interval": 1, "max_attempts": 3},
115
- # )
116
- # async def process_document(self, doc_id: str) -> Dict[str, Any]:
117
- # """Activity that simulates document processing."""
118
- # await asyncio.sleep(1)
119
- # # Optionally mutate workflow state
120
- # self.state.metadata.setdefault("processed_docs", []).append(doc_id)
121
- # return {
122
- # "doc_id": doc_id,
123
- # "status": "processed",
124
- # "timestamp": datetime.utcnow().isoformat(),
125
- # }
126
-
127
- # @workflow_run # <-- This becomes @temporal_workflow.run(...) if Temporal is used
128
- # async def _run_impl(
129
- # self, documents: List[str], batch_size: int = 2
130
- # ) -> List[Dict[str, Any]]:
131
- # """Main workflow logic, which becomes the official 'run' in Temporal mode."""
132
- # self._logger.info("Workflow starting, state=%s", self.state)
133
- # self.state.update_status("running")
134
-
135
- # all_results = []
136
- # for i in range(0, len(documents), batch_size):
137
- # batch = documents[i : i + batch_size]
138
- # tasks = [self.process_document(doc) for doc in batch]
139
- # results = await self.executor.execute(*tasks)
140
-
141
- # for res in results:
142
- # if isinstance(res.value, Exception):
143
- # self._logger.error(
144
- # f"Error processing document: {res.metadata.get('error')}"
145
- # )
146
- # else:
147
- # all_results.append(res.value)
148
-
149
- # self.state.update_status("completed")
150
- # return all_results
151
-
152
-
153
- # ########################
154
- # # 12. Example Local Usage
155
- # ########################
156
-
157
-
158
- # async def run_example_local():
159
- # from . import AsyncIOExecutor, DocumentWorkflow # if in a package
160
-
161
- # executor = AsyncIOExecutor()
162
- # wf = DocumentWorkflow(executor)
163
-
164
- # documents = ["doc1", "doc2", "doc3", "doc4"]
165
- # result = await wf.run(documents, batch_size=2)
166
-
167
- # print("Local results:", result.value)
168
- # print("Local workflow final state:", wf.state)
169
- # # Notice `wf.state.metadata['processed_docs']` has the processed doc IDs.
170
-
171
-
172
- # ########################
173
- # # Example Temporal Usage
174
- # ########################
175
-
176
-
177
- # async def run_example_temporal():
178
- # from . import TemporalExecutor, DocumentWorkflow # if in a package
179
-
180
- # # 1) Create a TemporalExecutor (client side)
181
- # executor = TemporalExecutor(task_queue="my_task_queue")
182
- # await executor.ensure_client()
183
-
184
- # # 2) Start a worker in the same process (or do so in a separate process)
185
- # asyncio.create_task(executor.start_worker())
186
- # await asyncio.sleep(2) # Wait for worker to be up
187
-
188
- # # 3) Now we can run the workflow by normal means if we like,
189
- # # or rely on the Worker picking it up. Typically, you'd do:
190
- # # handle = await executor._client.start_workflow(...)
191
- # # but let's keep it simple and show conceptually
192
- # # that 'DocumentWorkflow' is now recognized as a real Temporal workflow
193
- # print(
194
- # "Temporal environment is running. Use the Worker logs or CLI to start 'DocumentWorkflow'."
195
- # )