fast-agent-mcp 0.2.44__py3-none-any.whl → 0.2.46__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of fast-agent-mcp might be problematic. Click here for more details.

mcp_agent/mcp/__init__.py CHANGED
@@ -0,0 +1,50 @@
1
+ """
2
+ MCP (Model Context Protocol) integration components.
3
+ """
4
+
5
+ from mcp.types import PromptMessage
6
+
7
+ from .helpers import (
8
+ get_image_data,
9
+ get_resource_text,
10
+ get_resource_uri,
11
+ get_text,
12
+ is_image_content,
13
+ is_resource_content,
14
+ is_resource_link,
15
+ is_text_content,
16
+ )
17
+ from .interfaces import (
18
+ AgentProtocol,
19
+ AugmentedLLMProtocol,
20
+ MCPConnectionManagerProtocol,
21
+ ModelFactoryClassProtocol,
22
+ ModelT,
23
+ ServerConnection,
24
+ ServerRegistryProtocol,
25
+ )
26
+ from .prompt_message_multipart import PromptMessageMultipart
27
+
28
+ __all__ = [
29
+ # Types from mcp.types
30
+ "PromptMessage",
31
+ # Multipart message handling
32
+ "PromptMessageMultipart",
33
+ # Protocol interfaces
34
+ "AugmentedLLMProtocol",
35
+ "AgentProtocol",
36
+ "MCPConnectionManagerProtocol",
37
+ "ServerRegistryProtocol",
38
+ "ServerConnection",
39
+ "ModelFactoryClassProtocol",
40
+ "ModelT",
41
+ # Helper functions
42
+ "get_text",
43
+ "get_image_data",
44
+ "get_resource_uri",
45
+ "is_text_content",
46
+ "is_image_content",
47
+ "is_resource_content",
48
+ "is_resource_link",
49
+ "get_resource_text",
50
+ ]
@@ -1,3 +1,25 @@
1
1
  """
2
2
  Helper modules for working with MCP content.
3
- """
3
+ """
4
+
5
+ from .content_helpers import (
6
+ get_image_data,
7
+ get_resource_text,
8
+ get_resource_uri,
9
+ get_text,
10
+ is_image_content,
11
+ is_resource_content,
12
+ is_resource_link,
13
+ is_text_content,
14
+ )
15
+
16
+ __all__ = [
17
+ "get_text",
18
+ "get_image_data",
19
+ "get_resource_uri",
20
+ "is_text_content",
21
+ "is_image_content",
22
+ "is_resource_content",
23
+ "is_resource_link",
24
+ "get_resource_text",
25
+ ]
@@ -36,6 +36,17 @@ if TYPE_CHECKING:
36
36
  from mcp_agent.llm.usage_tracking import UsageAccumulator
37
37
 
38
38
 
39
+ __all__ = [
40
+ "MCPConnectionManagerProtocol",
41
+ "ServerRegistryProtocol",
42
+ "ServerConnection",
43
+ "AugmentedLLMProtocol",
44
+ "AgentProtocol",
45
+ "ModelFactoryClassProtocol",
46
+ "ModelT",
47
+ ]
48
+
49
+
39
50
  @runtime_checkable
40
51
  class MCPConnectionManagerProtocol(Protocol):
41
52
  """Protocol for MCPConnectionManager functionality needed by ServerRegistry."""
@@ -101,7 +112,7 @@ class AugmentedLLMProtocol(Protocol):
101
112
 
102
113
  async def structured(
103
114
  self,
104
- multipart_messages: List[PromptMessageMultipart],
115
+ multipart_messages: List[Union[PromptMessageMultipart, PromptMessage]],
105
116
  model: Type[ModelT],
106
117
  request_params: RequestParams | None = None,
107
118
  ) -> Tuple[ModelT | None, PromptMessageMultipart]:
@@ -110,7 +121,7 @@ class AugmentedLLMProtocol(Protocol):
110
121
 
111
122
  async def generate(
112
123
  self,
113
- multipart_messages: List[PromptMessageMultipart],
124
+ multipart_messages: List[Union[PromptMessageMultipart, PromptMessage]],
114
125
  request_params: RequestParams | None = None,
115
126
  ) -> PromptMessageMultipart:
116
127
  """
mcp_agent/py.typed ADDED
File without changes
@@ -18,7 +18,7 @@ fast = FastAgent("Evaluator-Optimizer")
18
18
  candidate details, and company information. Tailor the response to the company and job requirements.
19
19
  """,
20
20
  servers=["fetch"],
21
- model="haiku3",
21
+ model="gpt-4.1-nano",
22
22
  use_history=True,
23
23
  )
24
24
  # Define evaluator agent
@@ -40,7 +40,7 @@ fast = FastAgent("Evaluator-Optimizer")
40
40
  Summarize your evaluation as a structured response with:
41
41
  - Overall quality rating.
42
42
  - Specific feedback and areas for improvement.""",
43
- model="gpt-4.1",
43
+ model="sonnet",
44
44
  )
45
45
  # Define the evaluator-optimizer workflow
46
46
  @fast.evaluator_optimizer(
@@ -43,7 +43,7 @@ SAMPLE_REQUESTS = [
43
43
  )
44
44
  @fast.router(
45
45
  name="route",
46
- model="sonnet",
46
+ model="gpt-4.1",
47
47
  agents=["code_expert", "general_assistant", "fetcher"],
48
48
  )
49
49
  async def main() -> None:
@@ -1,6 +1,8 @@
1
+ from json import JSONDecodeError
1
2
  from typing import Optional, Union
2
3
 
3
4
  from mcp.types import CallToolResult
5
+ from rich.json import JSON
4
6
  from rich.panel import Panel
5
7
  from rich.text import Text
6
8
 
@@ -52,13 +54,15 @@ class ConsoleDisplay:
52
54
  elif len(content) == 1 and is_text_content(content[0]):
53
55
  text_content = get_text(content[0])
54
56
  char_count = len(text_content) if text_content else 0
55
- status = f"Text Only ({char_count} chars)"
57
+ status = f"Text Only {char_count} chars"
56
58
  else:
57
59
  text_count = sum(1 for item in content if is_text_content(item))
58
60
  if text_count == len(content):
59
61
  status = f"{len(content)} Text Blocks" if len(content) > 1 else "1 Text Block"
60
62
  else:
61
- status = f"{len(content)} Content Blocks"
63
+ status = (
64
+ f"{len(content)} Content Blocks" if len(content) > 1 else "1 Content Block"
65
+ )
62
66
 
63
67
  # Combined separator and status line
64
68
  left = f"[{block_color}]▎[/{block_color}][{text_color}]▶[/{text_color}]{f' [{block_color}]{name}[/{block_color}]' if name else ''}"
@@ -357,16 +361,22 @@ class ConsoleDisplay:
357
361
  right = f"[dim]{model}[/dim]" if model else ""
358
362
  self._create_combined_separator_status(left, right)
359
363
 
360
- # Display content as markdown if it looks like markdown, otherwise as text
361
364
  if isinstance(message_text, str):
362
365
  content = message_text
363
- # if any(marker in content for marker in ["##", "**", "*", "`", "---", "###"]):
364
- md = Markdown(content, code_theme=CODE_STYLE)
365
- console.console.print(md, markup=self._markup)
366
- # else:
367
- # console.console.print(content, markup=self._markup)
366
+
367
+ # Try to detect and pretty print JSON
368
+ try:
369
+ import json
370
+
371
+ json.loads(content)
372
+ json = JSON(message_text)
373
+ console.console.print(json, markup=self._markup)
374
+ except (JSONDecodeError, TypeError, ValueError):
375
+ # Not JSON, treat as markdown
376
+ md = Markdown(content, code_theme=CODE_STYLE)
377
+ console.console.print(md, markup=self._markup)
368
378
  else:
369
- # Handle Text objects directly
379
+ # Handle Rich Text objects directly
370
380
  console.console.print(message_text, markup=self._markup)
371
381
 
372
382
  # Bottom separator with server list: ─ [server1] [server2] ────────