fastmcp 2.12.2__py3-none-any.whl → 2.12.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. fastmcp/cli/claude.py +1 -10
  2. fastmcp/cli/cli.py +45 -25
  3. fastmcp/cli/install/__init__.py +2 -0
  4. fastmcp/cli/install/claude_code.py +1 -10
  5. fastmcp/cli/install/claude_desktop.py +1 -9
  6. fastmcp/cli/install/cursor.py +2 -18
  7. fastmcp/cli/install/gemini_cli.py +241 -0
  8. fastmcp/cli/install/mcp_json.py +1 -9
  9. fastmcp/cli/run.py +2 -86
  10. fastmcp/client/auth/oauth.py +50 -37
  11. fastmcp/client/client.py +18 -8
  12. fastmcp/client/elicitation.py +6 -1
  13. fastmcp/client/transports.py +1 -1
  14. fastmcp/contrib/component_manager/component_service.py +1 -1
  15. fastmcp/contrib/mcp_mixin/README.md +3 -3
  16. fastmcp/contrib/mcp_mixin/mcp_mixin.py +41 -6
  17. fastmcp/experimental/utilities/openapi/director.py +8 -1
  18. fastmcp/experimental/utilities/openapi/schemas.py +31 -5
  19. fastmcp/prompts/prompt.py +10 -8
  20. fastmcp/resources/resource.py +14 -11
  21. fastmcp/resources/template.py +12 -10
  22. fastmcp/server/auth/auth.py +10 -4
  23. fastmcp/server/auth/oauth_proxy.py +93 -23
  24. fastmcp/server/auth/oidc_proxy.py +348 -0
  25. fastmcp/server/auth/providers/auth0.py +174 -0
  26. fastmcp/server/auth/providers/aws.py +237 -0
  27. fastmcp/server/auth/providers/azure.py +6 -2
  28. fastmcp/server/auth/providers/descope.py +172 -0
  29. fastmcp/server/auth/providers/github.py +6 -2
  30. fastmcp/server/auth/providers/google.py +6 -2
  31. fastmcp/server/auth/providers/workos.py +6 -2
  32. fastmcp/server/context.py +17 -16
  33. fastmcp/server/dependencies.py +18 -5
  34. fastmcp/server/http.py +1 -1
  35. fastmcp/server/middleware/logging.py +147 -116
  36. fastmcp/server/middleware/middleware.py +3 -2
  37. fastmcp/server/openapi.py +5 -1
  38. fastmcp/server/server.py +43 -36
  39. fastmcp/settings.py +42 -6
  40. fastmcp/tools/tool.py +105 -87
  41. fastmcp/tools/tool_transform.py +1 -1
  42. fastmcp/utilities/json_schema.py +18 -1
  43. fastmcp/utilities/logging.py +66 -4
  44. fastmcp/utilities/mcp_server_config/v1/environments/uv.py +4 -39
  45. fastmcp/utilities/mcp_server_config/v1/mcp_server_config.py +3 -2
  46. fastmcp/utilities/mcp_server_config/v1/schema.json +2 -1
  47. fastmcp/utilities/storage.py +204 -0
  48. fastmcp/utilities/tests.py +8 -6
  49. fastmcp/utilities/types.py +9 -5
  50. {fastmcp-2.12.2.dist-info → fastmcp-2.12.4.dist-info}/METADATA +121 -48
  51. {fastmcp-2.12.2.dist-info → fastmcp-2.12.4.dist-info}/RECORD +54 -48
  52. {fastmcp-2.12.2.dist-info → fastmcp-2.12.4.dist-info}/WHEEL +0 -0
  53. {fastmcp-2.12.2.dist-info → fastmcp-2.12.4.dist-info}/entry_points.txt +0 -0
  54. {fastmcp-2.12.2.dist-info → fastmcp-2.12.4.dist-info}/licenses/LICENSE +0 -0
@@ -1,11 +1,14 @@
1
1
  """Logging utilities for FastMCP."""
2
2
 
3
+ import contextlib
3
4
  import logging
4
- from typing import Any, Literal
5
+ from typing import Any, Literal, cast
5
6
 
6
7
  from rich.console import Console
7
8
  from rich.logging import RichHandler
8
9
 
10
+ import fastmcp
11
+
9
12
 
10
13
  def get_logger(name: str) -> logging.Logger:
11
14
  """Get a logger nested under FastMCP namespace.
@@ -16,13 +19,13 @@ def get_logger(name: str) -> logging.Logger:
16
19
  Returns:
17
20
  a configured logger instance
18
21
  """
19
- return logging.getLogger(f"FastMCP.{name}")
22
+ return logging.getLogger(f"fastmcp.{name}")
20
23
 
21
24
 
22
25
  def configure_logging(
23
26
  level: Literal["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] | int = "INFO",
24
27
  logger: logging.Logger | None = None,
25
- enable_rich_tracebacks: bool = True,
28
+ enable_rich_tracebacks: bool | None = None,
26
29
  **rich_kwargs: Any,
27
30
  ) -> None:
28
31
  """
@@ -33,9 +36,16 @@ def configure_logging(
33
36
  level: the log level to use
34
37
  rich_kwargs: the parameters to use for creating RichHandler
35
38
  """
39
+ # Check if logging is disabled in settings
40
+ if not fastmcp.settings.log_enabled:
41
+ return
42
+
43
+ # Use settings default if not specified
44
+ if enable_rich_tracebacks is None:
45
+ enable_rich_tracebacks = fastmcp.settings.enable_rich_tracebacks
36
46
 
37
47
  if logger is None:
38
- logger = logging.getLogger("FastMCP")
48
+ logger = logging.getLogger("fastmcp")
39
49
 
40
50
  # Only configure the FastMCP logger namespace
41
51
  handler = RichHandler(
@@ -56,3 +66,55 @@ def configure_logging(
56
66
 
57
67
  # Don't propagate to the root logger
58
68
  logger.propagate = False
69
+
70
+
71
+ @contextlib.contextmanager
72
+ def temporary_log_level(
73
+ level: str | None,
74
+ logger: logging.Logger | None = None,
75
+ enable_rich_tracebacks: bool | None = None,
76
+ **rich_kwargs: Any,
77
+ ):
78
+ """Context manager to temporarily set log level and restore it afterwards.
79
+
80
+ Args:
81
+ level: The temporary log level to set (e.g., "DEBUG", "INFO")
82
+ logger: Optional logger to configure (defaults to FastMCP logger)
83
+ enable_rich_tracebacks: Whether to enable rich tracebacks
84
+ **rich_kwargs: Additional parameters for RichHandler
85
+
86
+ Usage:
87
+ with temporary_log_level("DEBUG"):
88
+ # Code that runs with DEBUG logging
89
+ pass
90
+ # Original log level is restored here
91
+ """
92
+ if level:
93
+ # Get the original log level from settings
94
+ original_level = fastmcp.settings.log_level
95
+
96
+ # Configure with new level
97
+ # Cast to proper type for type checker
98
+ log_level_literal = cast(
99
+ Literal["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"],
100
+ level.upper(),
101
+ )
102
+ configure_logging(
103
+ level=log_level_literal,
104
+ logger=logger,
105
+ enable_rich_tracebacks=enable_rich_tracebacks,
106
+ **rich_kwargs,
107
+ )
108
+ try:
109
+ yield
110
+ finally:
111
+ # Restore original configuration using configure_logging
112
+ # This will respect the log_enabled setting
113
+ configure_logging(
114
+ level=original_level,
115
+ logger=logger,
116
+ enable_rich_tracebacks=enable_rich_tracebacks,
117
+ **rich_kwargs,
118
+ )
119
+ else:
120
+ yield
@@ -1,7 +1,5 @@
1
- import os
2
1
  import shutil
3
2
  import subprocess
4
- import sys
5
3
  from pathlib import Path
6
4
  from typing import Literal
7
5
 
@@ -59,7 +57,7 @@ class UVEnvironment(Environment):
59
57
  If no environment configuration is set, returns the command unchanged.
60
58
  """
61
59
  # If no environment setup is needed, return command as-is
62
- if not self._needs_setup():
60
+ if not self._must_run_with_uv():
63
61
  return command
64
62
 
65
63
  args = ["uv", "run"]
@@ -75,7 +73,7 @@ class UVEnvironment(Environment):
75
73
  # Always add dependencies, requirements, and editable packages
76
74
  # These work with --project to add additional packages on top of the project env
77
75
  if self.dependencies:
78
- for dep in self.dependencies:
76
+ for dep in sorted(set(self.dependencies)):
79
77
  args.extend(["--with", dep])
80
78
 
81
79
  # Add requirements file
@@ -92,31 +90,7 @@ class UVEnvironment(Environment):
92
90
 
93
91
  return args
94
92
 
95
- def run_with_uv(self, command: list[str]) -> None:
96
- """Execute a command using uv run with this environment configuration.
97
-
98
- Args:
99
- command: Command and arguments to execute (e.g., ["fastmcp", "run", "server.py"])
100
- """
101
- import subprocess
102
-
103
- # Build the full uv command
104
- cmd = self.build_command(command)
105
-
106
- # Set marker to prevent infinite loops when subprocess calls FastMCP again
107
- env = os.environ | {"FASTMCP_UV_SPAWNED": "1"}
108
-
109
- logger.debug(f"Running command: {' '.join(cmd)}")
110
-
111
- try:
112
- # Run without capturing output so it flows through naturally
113
- process = subprocess.run(cmd, check=True, env=env)
114
- sys.exit(process.returncode)
115
- except subprocess.CalledProcessError as e:
116
- logger.error(f"Command failed: {e}")
117
- sys.exit(e.returncode)
118
-
119
- def _needs_setup(self) -> bool:
93
+ def _must_run_with_uv(self) -> bool:
120
94
  """Check if this environment config requires uv to set up.
121
95
 
122
96
  Returns:
@@ -132,15 +106,6 @@ class UVEnvironment(Environment):
132
106
  ]
133
107
  )
134
108
 
135
- # Backward compatibility aliases
136
- def needs_uv(self) -> bool:
137
- """Deprecated: Use _needs_setup() internally or check if build_command modifies the command."""
138
- return self._needs_setup()
139
-
140
- def build_uv_run_command(self, command: list[str]) -> list[str]:
141
- """Deprecated: Use build_command() instead."""
142
- return self.build_command(command)
143
-
144
109
  async def prepare(self, output_dir: Path | None = None) -> None:
145
110
  """Prepare the Python environment using uv.
146
111
 
@@ -157,7 +122,7 @@ class UVEnvironment(Environment):
157
122
  )
158
123
 
159
124
  # Only prepare environment if there are actual settings to apply
160
- if not self._needs_setup():
125
+ if not self._must_run_with_uv():
161
126
  logger.debug("No environment settings configured, skipping preparation")
162
127
  return
163
128
 
@@ -26,7 +26,7 @@ logger = get_logger("cli.config")
26
26
  FASTMCP_JSON_SCHEMA = "https://gofastmcp.com/public/schemas/fastmcp.json/v1.json"
27
27
 
28
28
 
29
- # Type alias for source union (will expand with GitSource, etc in future)
29
+ # Type alias for source union (will expand with GitSource, etc. in future)
30
30
  SourceType: TypeAlias = FileSystemSource
31
31
 
32
32
  # Type alias for environment union (will expand with other environments in future)
@@ -403,7 +403,8 @@ class MCPServerConfig(BaseModel):
403
403
  run_args["port"] = self.deployment.port
404
404
  if self.deployment.path:
405
405
  run_args["path"] = self.deployment.path
406
- # Note: log_level not currently supported by run_async
406
+ if self.deployment.log_level:
407
+ run_args["log_level"] = self.deployment.log_level
407
408
 
408
409
  # Override with any provided kwargs
409
410
  run_args.update(kwargs)
@@ -9,7 +9,8 @@
9
9
  "enum": [
10
10
  "stdio",
11
11
  "http",
12
- "sse"
12
+ "sse",
13
+ "streamable-http"
13
14
  ],
14
15
  "type": "string"
15
16
  },
@@ -0,0 +1,204 @@
1
+ """Key-value storage utilities for persistent data management."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import json
6
+ from pathlib import Path
7
+ from typing import Any, Protocol
8
+
9
+ import pydantic_core
10
+
11
+ from fastmcp.utilities.logging import get_logger
12
+
13
+ logger = get_logger(__name__)
14
+
15
+
16
+ class KVStorage(Protocol):
17
+ """Protocol for key-value storage of JSON data."""
18
+
19
+ async def get(self, key: str) -> dict[str, Any] | None:
20
+ """Get a JSON dict by key."""
21
+ ...
22
+
23
+ async def set(self, key: str, value: dict[str, Any]) -> None:
24
+ """Store a JSON dict by key."""
25
+ ...
26
+
27
+ async def delete(self, key: str) -> None:
28
+ """Delete a value by key."""
29
+ ...
30
+
31
+
32
+ class JSONFileStorage:
33
+ """File-based key-value storage for JSON data with automatic metadata tracking.
34
+
35
+ Each key-value pair is stored as a separate JSON file on disk.
36
+ Keys are sanitized to be filesystem-safe.
37
+
38
+ The storage automatically wraps all data with metadata:
39
+ - timestamp: Timestamp when the entry was last written
40
+
41
+ Args:
42
+ cache_dir: Directory for storing JSON files
43
+ """
44
+
45
+ def __init__(self, cache_dir: Path):
46
+ """Initialize JSON file storage."""
47
+ self.cache_dir = cache_dir
48
+ self.cache_dir.mkdir(exist_ok=True, parents=True)
49
+
50
+ def _get_safe_key(self, key: str) -> str:
51
+ """Convert key to filesystem-safe string."""
52
+ safe_key = key
53
+
54
+ # Replace problematic characters with underscores
55
+ for char in [".", "/", "\\", ":", "*", "?", '"', "<", ">", "|", " "]:
56
+ safe_key = safe_key.replace(char, "_")
57
+
58
+ # Compress multiple underscores into one
59
+ while "__" in safe_key:
60
+ safe_key = safe_key.replace("__", "_")
61
+
62
+ # Strip leading and trailing underscores
63
+ safe_key = safe_key.strip("_")
64
+
65
+ return safe_key
66
+
67
+ def _get_file_path(self, key: str) -> Path:
68
+ """Get the file path for a given key."""
69
+ safe_key = self._get_safe_key(key)
70
+ return self.cache_dir / f"{safe_key}.json"
71
+
72
+ async def get(self, key: str) -> dict[str, Any] | None:
73
+ """Get a JSON dict from storage by key.
74
+
75
+ Args:
76
+ key: The key to retrieve
77
+
78
+ Returns:
79
+ The stored dict or None if not found
80
+ """
81
+ path = self._get_file_path(key)
82
+ try:
83
+ wrapper = json.loads(path.read_text())
84
+
85
+ # Expect wrapped format with metadata
86
+ if not isinstance(wrapper, dict) or "data" not in wrapper:
87
+ logger.warning(f"Invalid storage format for key '{key}'")
88
+ return None
89
+
90
+ logger.debug(f"Loaded data for key '{key}'")
91
+ return wrapper["data"]
92
+
93
+ except FileNotFoundError:
94
+ logger.debug(f"No data found for key '{key}'")
95
+ return None
96
+ except json.JSONDecodeError as e:
97
+ logger.warning(f"Failed to load data for key '{key}': {e}")
98
+ return None
99
+
100
+ async def set(self, key: str, value: dict[str, Any]) -> None:
101
+ """Store a JSON dict with metadata.
102
+
103
+ Args:
104
+ key: The key to store under
105
+ value: The dict to store
106
+ """
107
+ import time
108
+
109
+ path = self._get_file_path(key)
110
+ current_time = time.time()
111
+
112
+ # Create wrapper with metadata
113
+ wrapper = {
114
+ "data": value,
115
+ "timestamp": current_time,
116
+ }
117
+
118
+ # Use pydantic_core for consistent JSON serialization
119
+ json_data = pydantic_core.to_json(wrapper, fallback=str)
120
+ path.write_bytes(json_data)
121
+ logger.debug(f"Saved data for key '{key}'")
122
+
123
+ async def delete(self, key: str) -> None:
124
+ """Delete a value from storage.
125
+
126
+ Args:
127
+ key: The key to delete
128
+ """
129
+ path = self._get_file_path(key)
130
+ if path.exists():
131
+ path.unlink()
132
+ logger.debug(f"Deleted data for key '{key}'")
133
+
134
+ async def cleanup_old_entries(
135
+ self,
136
+ max_age_seconds: int = 30 * 24 * 60 * 60, # 30 days default
137
+ ) -> int:
138
+ """Remove entries older than the specified age.
139
+
140
+ Uses the timestamp field to determine age.
141
+
142
+ Args:
143
+ max_age_seconds: Maximum age in seconds (default 30 days)
144
+
145
+ Returns:
146
+ Number of entries removed
147
+ """
148
+ import time
149
+
150
+ current_time = time.time()
151
+ removed_count = 0
152
+
153
+ for json_file in self.cache_dir.glob("*.json"):
154
+ try:
155
+ # Read the file and check timestamp
156
+ wrapper = json.loads(json_file.read_text())
157
+
158
+ # Check wrapped format
159
+ if not isinstance(wrapper, dict) or "data" not in wrapper:
160
+ continue # Invalid format, skip
161
+
162
+ if "timestamp" not in wrapper:
163
+ continue # No timestamp field, skip
164
+
165
+ entry_age = current_time - wrapper["timestamp"]
166
+ if entry_age > max_age_seconds:
167
+ json_file.unlink()
168
+ removed_count += 1
169
+ logger.debug(
170
+ f"Removed old entry '{json_file.stem}' (age: {entry_age:.0f}s)"
171
+ )
172
+
173
+ except (json.JSONDecodeError, KeyError) as e:
174
+ logger.debug(f"Error reading {json_file.name}: {e}")
175
+ continue
176
+
177
+ if removed_count > 0:
178
+ logger.info(f"Cleaned up {removed_count} old entries from storage")
179
+
180
+ return removed_count
181
+
182
+
183
+ class InMemoryStorage:
184
+ """In-memory key-value storage for JSON data.
185
+
186
+ Simple dict-based storage that doesn't persist across restarts.
187
+ Useful for testing or environments where file storage isn't available.
188
+ """
189
+
190
+ def __init__(self):
191
+ """Initialize in-memory storage."""
192
+ self._data: dict[str, dict[str, Any]] = {}
193
+
194
+ async def get(self, key: str) -> dict[str, Any] | None:
195
+ """Get a JSON dict from memory by key."""
196
+ return self._data.get(key)
197
+
198
+ async def set(self, key: str, value: dict[str, Any]) -> None:
199
+ """Store a JSON dict in memory."""
200
+ self._data[key] = value
201
+
202
+ async def delete(self, key: str) -> None:
203
+ """Delete a value from memory."""
204
+ self._data.pop(key, None)
@@ -109,7 +109,7 @@ def run_server_in_process(
109
109
  proc.start()
110
110
 
111
111
  # Wait for server to be running
112
- max_attempts = 10
112
+ max_attempts = 30
113
113
  attempt = 0
114
114
  while attempt < max_attempts and proc.is_alive():
115
115
  try:
@@ -117,10 +117,12 @@ def run_server_in_process(
117
117
  s.connect((host, port))
118
118
  break
119
119
  except ConnectionRefusedError:
120
- if attempt < 3:
121
- time.sleep(0.01)
122
- else:
120
+ if attempt < 5:
121
+ time.sleep(0.05)
122
+ elif attempt < 15:
123
123
  time.sleep(0.1)
124
+ else:
125
+ time.sleep(0.2)
124
126
  attempt += 1
125
127
  else:
126
128
  raise RuntimeError(f"Server failed to start after {max_attempts} attempts")
@@ -141,10 +143,10 @@ def run_server_in_process(
141
143
  def caplog_for_fastmcp(caplog):
142
144
  """Context manager to capture logs from FastMCP loggers even when propagation is disabled."""
143
145
  caplog.clear()
144
- logger = logging.getLogger("FastMCP")
146
+ logger = logging.getLogger("fastmcp")
145
147
  logger.addHandler(caplog.handler)
146
148
  try:
147
- yield
149
+ yield caplog
148
150
  finally:
149
151
  logger.removeHandler(caplog.handler)
150
152
 
@@ -31,6 +31,10 @@ NotSet = ...
31
31
  NotSetT: TypeAlias = EllipsisType
32
32
 
33
33
 
34
+ def get_fn_name(fn: Callable[..., Any]) -> str:
35
+ return fn.__name__ # ty: ignore[unresolved-attribute]
36
+
37
+
34
38
  class FastMCPBaseModel(BaseModel):
35
39
  """Base model for FastMCP models."""
36
40
 
@@ -80,11 +84,11 @@ def get_cached_typeadapter(cls: T) -> TypeAdapter[T]:
80
84
  # Handle both functions and methods
81
85
  if inspect.ismethod(cls):
82
86
  actual_func = cls.__func__
83
- code = actual_func.__code__
84
- globals_dict = actual_func.__globals__
85
- name = actual_func.__name__
86
- defaults = actual_func.__defaults__
87
- closure = actual_func.__closure__
87
+ code = actual_func.__code__ # ty: ignore[unresolved-attribute]
88
+ globals_dict = actual_func.__globals__ # ty: ignore[unresolved-attribute]
89
+ name = actual_func.__name__ # ty: ignore[unresolved-attribute]
90
+ defaults = actual_func.__defaults__ # ty: ignore[unresolved-attribute]
91
+ closure = actual_func.__closure__ # ty: ignore[unresolved-attribute]
88
92
  else:
89
93
  code = cls.__code__
90
94
  globals_dict = cls.__globals__