fast-agent-mcp 0.2.17__py3-none-any.whl → 0.2.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,5 @@
1
1
  import asyncio
2
+ import contextvars
2
3
  import functools
3
4
  from abc import ABC, abstractmethod
4
5
  from contextlib import asynccontextmanager
@@ -206,13 +207,13 @@ class AsyncioExecutor(Executor):
206
207
  else:
207
208
  # Execute the callable and await if it returns a coroutine
208
209
  loop = asyncio.get_running_loop()
209
-
210
+ ctx = contextvars.copy_context()
210
211
  # If kwargs are provided, wrap the function with partial
211
212
  if kwargs:
212
213
  wrapped_task = functools.partial(task, **kwargs)
213
- result = await loop.run_in_executor(None, wrapped_task)
214
+ result = await loop.run_in_executor(None, lambda: ctx.run(wrapped_task))
214
215
  else:
215
- result = await loop.run_in_executor(None, task)
216
+ result = await loop.run_in_executor(None, lambda: ctx.run(task))
216
217
 
217
218
  # Handle case where the sync function returns a coroutine
218
219
  if asyncio.iscoroutine(result):
@@ -234,12 +235,10 @@ class AsyncioExecutor(Executor):
234
235
  *tasks: Callable[..., R] | Coroutine[Any, Any, R],
235
236
  **kwargs: Any,
236
237
  ) -> List[R | BaseException]:
237
- # TODO: saqadri - validate if async with self.execution_context() is needed here
238
- async with self.execution_context():
239
- return await asyncio.gather(
240
- *(self._execute_task(task, **kwargs) for task in tasks),
241
- return_exceptions=True,
242
- )
238
+ return await asyncio.gather(
239
+ *(self._execute_task(task, **kwargs) for task in tasks),
240
+ return_exceptions=True,
241
+ )
243
242
 
244
243
  async def execute_streaming(
245
244
  self,
@@ -171,6 +171,7 @@ class AugmentedLLM(ContextDependent, AugmentedLLMProtocol, Generic[MessageParamT
171
171
  # We never expect this for structured() calls - this is for interactive use - developers
172
172
  # can do this programatically
173
173
  # TODO -- create a "fast-agent" control role rather than magic strings
174
+
174
175
  if multipart_messages[-1].first_text().startswith("***SAVE_HISTORY"):
175
176
  parts: list[str] = multipart_messages[-1].first_text().split(" ", 1)
176
177
  filename: str = (
@@ -220,6 +221,7 @@ class AugmentedLLM(ContextDependent, AugmentedLLMProtocol, Generic[MessageParamT
220
221
  request_params: RequestParams | None = None,
221
222
  ) -> Tuple[ModelT | None, PromptMessageMultipart]:
222
223
  """Return a structured response from the LLM using the provided messages."""
224
+
223
225
  self._precall(multipart_messages)
224
226
  result, assistant_response = await self._apply_prompt_provider_specific_structured(
225
227
  multipart_messages, model, request_params
@@ -347,10 +347,11 @@ class AnthropicAugmentedLLM(AugmentedLLM[MessageParam, Message]):
347
347
  ) -> Tuple[ModelT | None, PromptMessageMultipart]: # noqa: F821
348
348
  request_params = self.get_request_params(request_params)
349
349
 
350
+ # TODO - convert this to use Tool Calling convention for Anthropic Structured outputs
350
351
  multipart_messages[-1].add_text(
351
352
  """YOU MUST RESPOND IN THE FOLLOWING FORMAT:
352
353
  {schema}
353
- RESPOND ONLY WITH THE JSON, NO PREAMBLE OR CODE FENCES """.format(
354
+ RESPOND ONLY WITH THE JSON, NO PREAMBLE, CODE FENCES OR 'properties' ARE PERMISSABLE """.format(
354
355
  schema=model.model_json_schema()
355
356
  )
356
357
  )
@@ -8,9 +8,7 @@ DEFAULT_DEEPSEEK_MODEL = "deepseekchat" # current Deepseek only has two type mo
8
8
 
9
9
  class DeepSeekAugmentedLLM(OpenAIAugmentedLLM):
10
10
  def __init__(self, *args, **kwargs) -> None:
11
- super().__init__(
12
- *args, provider=Provider.DEEPSEEK, **kwargs
13
- ) # Properly pass args and kwargs to parent
11
+ super().__init__(*args, provider=Provider.DEEPSEEK, **kwargs)
14
12
 
15
13
  def _initialize_default_params(self, kwargs: dict) -> RequestParams:
16
14
  """Initialize Deepseek-specific default parameters"""
@@ -222,7 +222,10 @@ class OpenAIAugmentedLLM(AugmentedLLM[ChatCompletionMessageParam, ChatCompletion
222
222
  method="tools/call",
223
223
  params=CallToolRequestParams(
224
224
  name=tool_call.function.name,
225
- arguments=from_json(tool_call.function.arguments, allow_partial=True),
225
+ arguments={}
226
+ if not tool_call.function.arguments
227
+ or tool_call.function.arguments.strip() == ""
228
+ else from_json(tool_call.function.arguments, allow_partial=True),
226
229
  ),
227
230
  )
228
231
  result = await self.call_tool(tool_call_request, tool_call.id)
@@ -21,6 +21,7 @@ from mcp.types import (
21
21
  TextContent,
22
22
  Tool,
23
23
  )
24
+ from opentelemetry import trace
24
25
  from pydantic import AnyUrl, BaseModel, ConfigDict
25
26
 
26
27
  from mcp_agent.context_dependent import ContextDependent
@@ -469,16 +470,20 @@ class MCPAggregator(ContextDependent):
469
470
  },
470
471
  )
471
472
 
472
- return await self._execute_on_server(
473
- server_name=server_name,
474
- operation_type="tool",
475
- operation_name=local_tool_name,
476
- method_name="call_tool",
477
- method_args={"name": local_tool_name, "arguments": arguments},
478
- error_factory=lambda msg: CallToolResult(
479
- isError=True, content=[TextContent(type="text", text=msg)]
480
- ),
481
- )
473
+ tracer = trace.get_tracer(__name__)
474
+ with tracer.start_as_current_span(f"MCP Tool: {server_name}/{local_tool_name}"):
475
+ trace.get_current_span().set_attribute("tool_name", local_tool_name)
476
+ trace.get_current_span().set_attribute("server_name", server_name)
477
+ return await self._execute_on_server(
478
+ server_name=server_name,
479
+ operation_type="tool",
480
+ operation_name=local_tool_name,
481
+ method_name="call_tool",
482
+ method_args={"name": local_tool_name, "arguments": arguments},
483
+ error_factory=lambda msg: CallToolResult(
484
+ isError=True, content=[TextContent(type="text", text=msg)]
485
+ ),
486
+ )
482
487
 
483
488
  async def get_prompt(
484
489
  self,
@@ -262,7 +262,7 @@ class MCPConnectionManager(ContextDependent):
262
262
  if config.transport == "stdio":
263
263
  server_params = StdioServerParameters(
264
264
  command=config.command,
265
- args=config.args,
265
+ args=config.args if config.args is not None else [],
266
266
  env={**get_default_environment(), **(config.env or {})},
267
267
  )
268
268
  # Create custom error handler to ensure all output is captured
@@ -1,138 +0,0 @@
1
- """
2
- Telemetry manager that defines distributed tracing decorators for OpenTelemetry traces/spans
3
- for the Logger module for MCP Agent
4
- """
5
-
6
- import asyncio
7
- import functools
8
- from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Tuple
9
-
10
- from opentelemetry import trace
11
- from opentelemetry.context import Context as OtelContext
12
- from opentelemetry.propagate import extract as otel_extract
13
- from opentelemetry.trace import SpanKind, Status, StatusCode, set_span_in_context
14
- from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator
15
-
16
- from mcp_agent.context_dependent import ContextDependent
17
-
18
- if TYPE_CHECKING:
19
- from mcp_agent.context import Context
20
-
21
-
22
- class TelemetryManager(ContextDependent):
23
- """
24
- Simple manager for creating OpenTelemetry spans automatically.
25
- Decorator usage: @telemetry.traced("SomeSpanName")
26
- """
27
-
28
- def __init__(self, context: Optional["Context"] = None, **kwargs) -> None:
29
- # If needed, configure resources, exporters, etc.
30
- # E.g.: from opentelemetry.sdk.trace import TracerProvider
31
- # trace.set_tracer_provider(TracerProvider(...))
32
- super().__init__(context=context, **kwargs)
33
-
34
- def traced(
35
- self,
36
- name: str | None = None,
37
- kind: SpanKind = SpanKind.INTERNAL,
38
- attributes: Dict[str, Any] = None,
39
- ) -> Callable:
40
- """
41
- Decorator that automatically creates and manages a span for a function.
42
- Works for both async and sync functions.
43
- """
44
-
45
- def decorator(func):
46
- span_name = name or f"{func.__module__}.{func.__qualname__}"
47
-
48
- tracer = self.context.tracer or trace.get_tracer("mcp_agent")
49
-
50
- @functools.wraps(func)
51
- async def async_wrapper(*args, **kwargs):
52
- with tracer.start_as_current_span(span_name, kind=kind) as span:
53
- if attributes:
54
- for k, v in attributes.items():
55
- span.set_attribute(k, v)
56
- # Record simple args
57
- self._record_args(span, args, kwargs)
58
- try:
59
- res = await func(*args, **kwargs)
60
- return res
61
- except Exception as e:
62
- span.record_exception(e)
63
- span.set_status(Status(StatusCode.ERROR))
64
- raise
65
-
66
- @functools.wraps(func)
67
- def sync_wrapper(*args, **kwargs):
68
- with tracer.start_as_current_span(span_name, kind=kind) as span:
69
- if attributes:
70
- for k, v in attributes.items():
71
- span.set_attribute(k, v)
72
- # Record simple args
73
- self._record_args(span, args, kwargs)
74
- try:
75
- res = func(*args, **kwargs)
76
- return res
77
- except Exception as e:
78
- span.record_exception(e)
79
- span.set_status(Status(StatusCode.ERROR))
80
- raise
81
-
82
- if asyncio.iscoroutinefunction(func):
83
- return async_wrapper
84
- else:
85
- return sync_wrapper
86
-
87
- return decorator
88
-
89
- def _record_args(self, span, args, kwargs) -> None:
90
- """Optionally record primitive args as span attributes."""
91
- for i, arg in enumerate(args):
92
- if isinstance(arg, (str, int, float, bool)):
93
- span.set_attribute(f"arg_{i}", str(arg))
94
- for k, v in kwargs.items():
95
- if isinstance(v, (str, int, float, bool)):
96
- span.set_attribute(k, str(v))
97
-
98
-
99
- class MCPRequestTrace:
100
- """Helper class for trace context propagation in MCP"""
101
-
102
- @staticmethod
103
- def start_span_from_mcp_request(
104
- method: str, params: Dict[str, Any]
105
- ) -> Tuple[trace.Span, OtelContext]:
106
- """Extract trace context from incoming MCP request and start a new span"""
107
- # Extract trace context from _meta if present
108
- carrier = {}
109
- _meta = params.get("_meta", {})
110
- if "traceparent" in _meta:
111
- carrier["traceparent"] = _meta["traceparent"]
112
- if "tracestate" in _meta:
113
- carrier["tracestate"] = _meta["tracestate"]
114
-
115
- # Extract context and start span
116
- ctx = otel_extract(carrier, context=OtelContext())
117
- tracer = trace.get_tracer(__name__)
118
- span = tracer.start_span(method, context=ctx, kind=SpanKind.SERVER)
119
- return span, set_span_in_context(span)
120
-
121
- @staticmethod
122
- def inject_trace_context(arguments: Dict[str, Any]) -> Dict[str, Any]:
123
- """Inject current trace context into outgoing MCP request arguments"""
124
- carrier = {}
125
- TraceContextTextMapPropagator().inject(carrier)
126
-
127
- # Create or update _meta with trace context
128
- _meta = arguments.get("_meta", {})
129
- if "traceparent" in carrier:
130
- _meta["traceparent"] = carrier["traceparent"]
131
- if "tracestate" in carrier:
132
- _meta["tracestate"] = carrier["tracestate"]
133
- arguments["_meta"] = _meta
134
-
135
- return arguments
136
-
137
-
138
- telemetry = TelemetryManager()