fast-agent-mcp 0.0.12__py3-none-any.whl → 0.0.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of fast-agent-mcp might be problematic. Click here for more details.

@@ -17,13 +17,27 @@ fast = FastAgent("Agent Chaining")
17
17
  Respond only with the post, never use hashtags.
18
18
  """,
19
19
  )
20
+ @fast.chain(
21
+ name="post_writer",
22
+ sequence=["url_fetcher", "social_media"],
23
+ )
20
24
  async def main():
21
25
  async with fast.run() as agent:
22
- await agent.social_media(
23
- await agent.url_fetcher("http://llmindset.co.uk/resources/mcp-hfspace/")
24
- )
26
+ # using chain workflow
27
+ await agent.post_writer.prompt()
28
+
29
+ # calling directly
30
+ # await agent.url_fetcher("http://llmindset.co.uk/resources/mcp-hfspace/")
31
+ # await agent.social_media(
32
+ # await agent.url_fetcher("http://llmindset.co.uk/resources/mcp-hfspace/")
33
+ # )
34
+
35
+ # agents can also be accessed like dictionaries:
36
+ # awwait agent["post_writer"].prompt()
37
+
25
38
 
26
- # alternative syntax for above is agent["social_media"].send(message)
39
+ # alternative syntax for above is result = agent["post_writer"].send(message)
40
+ # alternative syntax for above is result = agent["post_writer"].prompt()
27
41
 
28
42
 
29
43
  if __name__ == "__main__":
@@ -9,7 +9,7 @@ from mcp_agent.core.fastagent import FastAgent
9
9
  fast = FastAgent("Evaluator-Optimizer")
10
10
 
11
11
 
12
- # Define optimizer agent
12
+ # Define generator agent
13
13
  @fast.agent(
14
14
  name="generator",
15
15
  instruction="""You are a career coach specializing in cover letter writing.
@@ -44,7 +44,7 @@ fast = FastAgent("Evaluator-Optimizer")
44
44
  # Define the evaluator-optimizer workflow
45
45
  @fast.evaluator_optimizer(
46
46
  name="cover_letter_writer",
47
- generator="generator", # Reference to optimizer agent
47
+ generator="generator", # Reference to generator agent
48
48
  evaluator="evaluator", # Reference to evaluator agent
49
49
  min_rating="EXCELLENT", # Strive for excellence
50
50
  max_refinements=3, # Maximum iterations
@@ -0,0 +1,24 @@
1
+ # Please edit this configuration file to match your environment (on Windows).
2
+ # Examples in comments below - check/change the paths.
3
+ #
4
+ #
5
+
6
+ execution_engine: asyncio
7
+ logger:
8
+ type: file
9
+ level: error
10
+ truncate_tools: true
11
+
12
+ mcp:
13
+ servers:
14
+ filesystem:
15
+ # On windows update the command and arguments to use `node` and the absolute path to the server.
16
+ # Use `npm i -g @modelcontextprotocol/server-filesystem` to install the server globally.
17
+ # Use `npm -g root` to find the global node_modules path.`
18
+ # command: "node"
19
+ # args: ["c:/Program Files/nodejs/node_modules/@modelcontextprotocol/server-filesystem/dist/index.js","."]
20
+ command: "npx"
21
+ args: ["-y", "@modelcontextprotocol/server-filesystem", "."]
22
+ fetch:
23
+ command: "uvx"
24
+ args: ["mcp-server-fetch"]
@@ -45,10 +45,11 @@ fast = FastAgent("Orchestrator-Workers")
45
45
  @fast.orchestrator(
46
46
  name="orchestrate",
47
47
  agents=["finder", "writer", "proofreader"],
48
- model="sonnet",
48
+ plan_type="iterative",
49
49
  )
50
50
  async def main():
51
51
  async with fast.run() as agent:
52
+
52
53
  await agent.author(
53
54
  "write a 250 word short story about kittens discovering a castle, and save it to short_story.md"
54
55
  )
@@ -65,7 +66,7 @@ async def main():
65
66
 
66
67
  # Send the task
67
68
  await agent.orchestrate(task)
68
-
69
+ await agent()
69
70
 
70
71
  if __name__ == "__main__":
71
72
  asyncio.run(main())
@@ -69,8 +69,9 @@ async def main():
69
69
  # Use the app's context manager
70
70
  async with fast.run() as agent:
71
71
  await agent.parallel(f"student short story submission: {SHORT_STORY}")
72
+
72
73
  # follow-on prompt to task agent
73
- # await agent.style_enforcer.prompt(default="STOP")
74
+ await agent.style_enforcer.prompt(default_prompt="STOP")
74
75
 
75
76
 
76
77
  if __name__ == "__main__":
@@ -15,6 +15,7 @@ from mcp.types import (
15
15
  )
16
16
 
17
17
  from mcp_agent.context_dependent import ContextDependent
18
+ from mcp_agent.core.exceptions import PromptExitError
18
19
  from mcp_agent.event_progress import ProgressAction
19
20
  from mcp_agent.mcp.mcp_aggregator import MCPAggregator, SEP
20
21
  from mcp_agent.workflows.llm.llm_selector import ModelSelector
@@ -608,6 +609,8 @@ class AugmentedLLM(ContextDependent, AugmentedLLMProtocol[MessageParamT, Message
608
609
  result = postprocess
609
610
 
610
611
  return result
612
+ except PromptExitError:
613
+ raise
611
614
  except Exception as e:
612
615
  return CallToolResult(
613
616
  isError=True,
@@ -3,6 +3,7 @@ from enum import Enum, auto
3
3
  from typing import Optional, Type, Dict, Union, Callable
4
4
 
5
5
  from mcp_agent.agents.agent import Agent
6
+ from mcp_agent.core.exceptions import ModelConfigError
6
7
  from mcp_agent.workflows.llm.augmented_llm_anthropic import AnthropicAugmentedLLM
7
8
  from mcp_agent.workflows.llm.augmented_llm_openai import OpenAIAugmentedLLM
8
9
  from mcp_agent.workflows.llm.augmented_llm import RequestParams
@@ -53,6 +54,7 @@ class ModelFactory:
53
54
 
54
55
  # TODO -- add context window size information for display/mmanagement
55
56
  # TODO -- add audio supporting got-4o-audio-preview
57
+ # TODO -- bring model parameter configuration here
56
58
  # Mapping of model names to their default providers
57
59
  DEFAULT_PROVIDERS = {
58
60
  "gpt-4o": Provider.OPENAI,
@@ -80,6 +82,7 @@ class ModelFactory:
80
82
  "claude": "claude-3-5-sonnet-latest",
81
83
  "haiku": "claude-3-5-haiku-latest",
82
84
  "haiku3": "claude-3-haiku-20240307",
85
+ "haiku35": "claude-3-5-haiku-latest",
83
86
  "opus": "claude-3-opus-latest",
84
87
  "opus3": "claude-3-opus-latest",
85
88
  }
@@ -121,7 +124,7 @@ class ModelFactory:
121
124
  if provider is None:
122
125
  provider = cls.DEFAULT_PROVIDERS.get(model_name)
123
126
  if provider is None:
124
- raise ValueError(f"Unknown model: {model_name}")
127
+ raise ModelConfigError(f"Unknown model: {model_name}")
125
128
 
126
129
  return ModelConfig(
127
130
  provider=provider, model_name=model_name, reasoning_effort=reasoning_effort
@@ -173,16 +176,16 @@ class ModelFactory:
173
176
  "request_params": factory_params,
174
177
  "name": kwargs.get("name"),
175
178
  }
176
-
179
+
177
180
  # Add reasoning effort if available
178
181
  if config.reasoning_effort:
179
182
  llm_args["reasoning_effort"] = config.reasoning_effort.value
180
-
183
+
181
184
  # Forward all other kwargs (including verb)
182
185
  for key, value in kwargs.items():
183
186
  if key not in ["agent", "default_request_params", "name"]:
184
187
  llm_args[key] = value
185
-
188
+
186
189
  llm = llm_class(**llm_args)
187
190
  return llm
188
191
 
@@ -1,38 +0,0 @@
1
- import asyncio
2
-
3
- from mcp_agent.core.fastagent import FastAgent
4
- # from rich import print
5
-
6
- agents = FastAgent(name="Researcher")
7
-
8
-
9
- @agents.agent(
10
- "Researcher",
11
- instruction="""
12
- You are a research assistant, with access to internet search (via Brave),
13
- website fetch, a python interpreter (you can install packages with uv) and a filesystem.
14
- Use the current working directory to save and create files with both the Interpreter and Filesystem tools.
15
- The interpreter has numpy, pandas, matplotlib and seaborn already installed
16
- """,
17
- servers=["brave", "interpreter", "filesystem", "fetch"],
18
- )
19
- async def main():
20
- research_prompt = """
21
- Produce an investment report for the company Eutelsat. The final report should be saved in the filesystem in markdown format, and
22
- contain at least the following:
23
- 1 - A brief description of the company
24
- 2 - Current financial position (find data, create and incorporate charts)
25
- 3 - A PESTLE analysis
26
- 4 - An investment thesis for the next 3 years. Include both 'buy side' and 'sell side' arguments, and a final
27
- summary and recommendation.
28
- Todays date is 15 February 2025. Include the main data sources consulted in presenting the report.""" # noqa: F841
29
-
30
- async with agents.run() as agent:
31
- await agent.prompt()
32
-
33
- # await agent.prompt(default="STOP")
34
- # await agent.prompt(default=research_prompt)
35
-
36
-
37
- if __name__ == "__main__":
38
- asyncio.run(main())
@@ -1,17 +0,0 @@
1
- import asyncio
2
- from mcp_agent.core.fastagent import FastAgent
3
-
4
- # Create the application
5
- fast = FastAgent("FastAgent Example")
6
-
7
-
8
- # Define the agent
9
- @fast.agent(servers=["fetch"])
10
- async def main():
11
- # use the --model command line switch or agent arguments to change model
12
- async with fast.run() as agent:
13
- await agent()
14
-
15
-
16
- if __name__ == "__main__":
17
- asyncio.run(main())
@@ -1,22 +0,0 @@
1
- import asyncio
2
- from mcp_agent.core.fastagent import FastAgent
3
-
4
- # Create the application
5
- agent_app = FastAgent("FastAgent Example")
6
- # Uncomment the below to disable human input callback tool
7
- # agent_app.app._human_input_callback = None
8
-
9
-
10
- # Define the agent
11
- @agent_app.agent(
12
- instruction="You are a helpful AI Agent",
13
- servers=[],
14
- )
15
- async def main():
16
- # use the --model= command line switch to specify model
17
- async with agent_app.run() as agent:
18
- await agent()
19
-
20
-
21
- if __name__ == "__main__":
22
- asyncio.run(main())