fastmcp 2.13.3__py3-none-any.whl → 2.14.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. fastmcp/__init__.py +0 -21
  2. fastmcp/cli/__init__.py +0 -3
  3. fastmcp/cli/__main__.py +5 -0
  4. fastmcp/cli/cli.py +8 -22
  5. fastmcp/cli/install/shared.py +0 -15
  6. fastmcp/cli/tasks.py +110 -0
  7. fastmcp/client/auth/oauth.py +9 -9
  8. fastmcp/client/client.py +739 -136
  9. fastmcp/client/elicitation.py +11 -5
  10. fastmcp/client/messages.py +7 -5
  11. fastmcp/client/roots.py +2 -1
  12. fastmcp/client/sampling/__init__.py +69 -0
  13. fastmcp/client/sampling/handlers/__init__.py +0 -0
  14. fastmcp/client/sampling/handlers/anthropic.py +387 -0
  15. fastmcp/client/sampling/handlers/openai.py +399 -0
  16. fastmcp/client/tasks.py +551 -0
  17. fastmcp/client/transports.py +72 -21
  18. fastmcp/contrib/component_manager/component_service.py +4 -20
  19. fastmcp/dependencies.py +25 -0
  20. fastmcp/experimental/sampling/handlers/__init__.py +5 -0
  21. fastmcp/experimental/sampling/handlers/openai.py +4 -169
  22. fastmcp/experimental/server/openapi/__init__.py +15 -13
  23. fastmcp/experimental/utilities/openapi/__init__.py +12 -38
  24. fastmcp/prompts/prompt.py +38 -38
  25. fastmcp/resources/resource.py +33 -16
  26. fastmcp/resources/template.py +69 -59
  27. fastmcp/server/auth/__init__.py +0 -9
  28. fastmcp/server/auth/auth.py +127 -3
  29. fastmcp/server/auth/oauth_proxy.py +47 -97
  30. fastmcp/server/auth/oidc_proxy.py +7 -0
  31. fastmcp/server/auth/providers/in_memory.py +2 -2
  32. fastmcp/server/auth/providers/oci.py +2 -2
  33. fastmcp/server/context.py +509 -180
  34. fastmcp/server/dependencies.py +464 -6
  35. fastmcp/server/elicitation.py +285 -47
  36. fastmcp/server/event_store.py +177 -0
  37. fastmcp/server/http.py +15 -3
  38. fastmcp/server/low_level.py +56 -12
  39. fastmcp/server/middleware/middleware.py +2 -2
  40. fastmcp/server/openapi/__init__.py +35 -0
  41. fastmcp/{experimental/server → server}/openapi/components.py +4 -3
  42. fastmcp/{experimental/server → server}/openapi/routing.py +1 -1
  43. fastmcp/{experimental/server → server}/openapi/server.py +6 -5
  44. fastmcp/server/proxy.py +53 -40
  45. fastmcp/server/sampling/__init__.py +10 -0
  46. fastmcp/server/sampling/run.py +301 -0
  47. fastmcp/server/sampling/sampling_tool.py +108 -0
  48. fastmcp/server/server.py +793 -552
  49. fastmcp/server/tasks/__init__.py +21 -0
  50. fastmcp/server/tasks/capabilities.py +22 -0
  51. fastmcp/server/tasks/config.py +89 -0
  52. fastmcp/server/tasks/converters.py +206 -0
  53. fastmcp/server/tasks/handlers.py +356 -0
  54. fastmcp/server/tasks/keys.py +93 -0
  55. fastmcp/server/tasks/protocol.py +355 -0
  56. fastmcp/server/tasks/subscriptions.py +205 -0
  57. fastmcp/settings.py +101 -103
  58. fastmcp/tools/tool.py +83 -49
  59. fastmcp/tools/tool_transform.py +1 -12
  60. fastmcp/utilities/components.py +3 -3
  61. fastmcp/utilities/json_schema_type.py +4 -4
  62. fastmcp/utilities/mcp_config.py +1 -2
  63. fastmcp/utilities/mcp_server_config/v1/mcp_server_config.py +1 -1
  64. fastmcp/{experimental/utilities → utilities}/openapi/README.md +7 -35
  65. fastmcp/utilities/openapi/__init__.py +63 -0
  66. fastmcp/{experimental/utilities → utilities}/openapi/formatters.py +5 -5
  67. fastmcp/{experimental/utilities → utilities}/openapi/json_schema_converter.py +1 -1
  68. fastmcp/utilities/tests.py +11 -5
  69. fastmcp/utilities/types.py +8 -0
  70. {fastmcp-2.13.3.dist-info → fastmcp-2.14.1.dist-info}/METADATA +7 -4
  71. {fastmcp-2.13.3.dist-info → fastmcp-2.14.1.dist-info}/RECORD +79 -63
  72. fastmcp/client/sampling.py +0 -56
  73. fastmcp/experimental/sampling/handlers/base.py +0 -21
  74. fastmcp/server/auth/providers/bearer.py +0 -25
  75. fastmcp/server/openapi.py +0 -1087
  76. fastmcp/server/sampling/handler.py +0 -19
  77. fastmcp/utilities/openapi.py +0 -1568
  78. /fastmcp/{experimental/server → server}/openapi/README.md +0 -0
  79. /fastmcp/{experimental/utilities → utilities}/openapi/director.py +0 -0
  80. /fastmcp/{experimental/utilities → utilities}/openapi/models.py +0 -0
  81. /fastmcp/{experimental/utilities → utilities}/openapi/parser.py +0 -0
  82. /fastmcp/{experimental/utilities → utilities}/openapi/schemas.py +0 -0
  83. {fastmcp-2.13.3.dist-info → fastmcp-2.14.1.dist-info}/WHEEL +0 -0
  84. {fastmcp-2.13.3.dist-info → fastmcp-2.14.1.dist-info}/entry_points.txt +0 -0
  85. {fastmcp-2.13.3.dist-info → fastmcp-2.14.1.dist-info}/licenses/LICENSE +0 -0
@@ -7,7 +7,7 @@ import mcp.types
7
7
  from mcp import ClientSession
8
8
  from mcp.client.session import ElicitationFnT
9
9
  from mcp.shared.context import LifespanContextT, RequestContext
10
- from mcp.types import ElicitRequestParams
10
+ from mcp.types import ElicitRequestFormParams, ElicitRequestParams
11
11
  from mcp.types import ElicitResult as MCPElicitResult
12
12
  from pydantic_core import to_jsonable_python
13
13
  from typing_extensions import TypeVar
@@ -26,7 +26,8 @@ class ElicitResult(MCPElicitResult, Generic[T]):
26
26
  ElicitationHandler: TypeAlias = Callable[
27
27
  [
28
28
  str, # message
29
- type[T], # a class for creating a structured response
29
+ type[T]
30
+ | None, # a class for creating a structured response (None for URL elicitation)
30
31
  ElicitRequestParams,
31
32
  RequestContext[ClientSession, LifespanContextT],
32
33
  ],
@@ -42,10 +43,15 @@ def create_elicitation_callback(
42
43
  params: ElicitRequestParams,
43
44
  ) -> MCPElicitResult | mcp.types.ErrorData:
44
45
  try:
45
- if params.requestedSchema == {"type": "object", "properties": {}}:
46
- response_type = None
46
+ # requestedSchema only exists on ElicitRequestFormParams, not ElicitRequestURLParams
47
+ if isinstance(params, ElicitRequestFormParams):
48
+ if params.requestedSchema == {"type": "object", "properties": {}}:
49
+ response_type = None
50
+ else:
51
+ response_type = json_schema_to_type(params.requestedSchema)
47
52
  else:
48
- response_type = json_schema_to_type(params.requestedSchema)
53
+ # URL-based elicitation doesn't have a schema
54
+ response_type = None
49
55
 
50
56
  result = await elicitation_handler(
51
57
  params.message, response_type, params, context
@@ -35,16 +35,18 @@ class MessageHandler:
35
35
  # requests
36
36
  case RequestResponder():
37
37
  # handle all requests
38
- await self.on_request(message)
38
+ # TODO(ty): remove when ty supports match statement narrowing
39
+ await self.on_request(message) # type: ignore[arg-type]
39
40
 
40
41
  # handle specific requests
41
- match message.request.root:
42
+ # TODO(ty): remove type ignores when ty supports match statement narrowing
43
+ match message.request.root: # type: ignore[union-attr]
42
44
  case mcp.types.PingRequest():
43
- await self.on_ping(message.request.root)
45
+ await self.on_ping(message.request.root) # type: ignore[union-attr]
44
46
  case mcp.types.ListRootsRequest():
45
- await self.on_list_roots(message.request.root)
47
+ await self.on_list_roots(message.request.root) # type: ignore[union-attr]
46
48
  case mcp.types.CreateMessageRequest():
47
- await self.on_create_message(message.request.root)
49
+ await self.on_create_message(message.request.root) # type: ignore[union-attr]
48
50
 
49
51
  # notifications
50
52
  case mcp.types.ServerNotification():
fastmcp/client/roots.py CHANGED
@@ -34,7 +34,8 @@ def create_roots_callback(
34
34
  handler: RootsList | RootsHandler,
35
35
  ) -> ListRootsFnT:
36
36
  if isinstance(handler, list):
37
- return _create_roots_callback_from_roots(handler)
37
+ # TODO(ty): remove when ty supports isinstance union narrowing
38
+ return _create_roots_callback_from_roots(handler) # type: ignore[arg-type]
38
39
  elif inspect.isfunction(handler):
39
40
  return _create_roots_callback_from_fn(handler)
40
41
  else:
@@ -0,0 +1,69 @@
1
+ import inspect
2
+ from collections.abc import Awaitable, Callable
3
+ from typing import TypeAlias, TypeVar
4
+
5
+ import mcp.types
6
+ from mcp import ClientSession, CreateMessageResult
7
+ from mcp.client.session import SamplingFnT
8
+ from mcp.server.session import ServerSession
9
+ from mcp.shared.context import LifespanContextT, RequestContext
10
+ from mcp.types import CreateMessageRequestParams as SamplingParams
11
+ from mcp.types import CreateMessageResultWithTools, SamplingMessage
12
+
13
+ # Result type that handlers can return
14
+ SamplingHandlerResult: TypeAlias = (
15
+ str | CreateMessageResult | CreateMessageResultWithTools
16
+ )
17
+
18
+ # Session type for sampling handlers - works with both client and server sessions
19
+ SessionT = TypeVar("SessionT", ClientSession, ServerSession)
20
+
21
+ # Unified sampling handler type that works for both clients and servers.
22
+ # Handlers receive messages and parameters from the MCP sampling flow
23
+ # and return LLM responses.
24
+ SamplingHandler: TypeAlias = Callable[
25
+ [
26
+ list[SamplingMessage],
27
+ SamplingParams,
28
+ RequestContext[SessionT, LifespanContextT],
29
+ ],
30
+ SamplingHandlerResult | Awaitable[SamplingHandlerResult],
31
+ ]
32
+
33
+
34
+ __all__ = [
35
+ "RequestContext",
36
+ "SamplingHandler",
37
+ "SamplingHandlerResult",
38
+ "SamplingMessage",
39
+ "SamplingParams",
40
+ "create_sampling_callback",
41
+ ]
42
+
43
+
44
+ def create_sampling_callback(
45
+ sampling_handler: SamplingHandler,
46
+ ) -> SamplingFnT:
47
+ async def _sampling_handler(
48
+ context,
49
+ params: SamplingParams,
50
+ ) -> CreateMessageResult | CreateMessageResultWithTools | mcp.types.ErrorData:
51
+ try:
52
+ result = sampling_handler(params.messages, params, context)
53
+ if inspect.isawaitable(result):
54
+ result = await result
55
+
56
+ if isinstance(result, str):
57
+ result = CreateMessageResult(
58
+ role="assistant",
59
+ model="fastmcp-client",
60
+ content=mcp.types.TextContent(type="text", text=result),
61
+ )
62
+ return result
63
+ except Exception as e:
64
+ return mcp.types.ErrorData(
65
+ code=mcp.types.INTERNAL_ERROR,
66
+ message=str(e),
67
+ )
68
+
69
+ return _sampling_handler
File without changes
@@ -0,0 +1,387 @@
1
+ """Anthropic sampling handler for FastMCP."""
2
+
3
+ from collections.abc import Iterator, Sequence
4
+ from typing import Any
5
+
6
+ from mcp.types import CreateMessageRequestParams as SamplingParams
7
+ from mcp.types import (
8
+ CreateMessageResult,
9
+ CreateMessageResultWithTools,
10
+ ModelPreferences,
11
+ SamplingMessage,
12
+ SamplingMessageContentBlock,
13
+ StopReason,
14
+ TextContent,
15
+ Tool,
16
+ ToolChoice,
17
+ ToolResultContent,
18
+ ToolUseContent,
19
+ )
20
+
21
+ try:
22
+ from anthropic import AsyncAnthropic, NotGiven
23
+ from anthropic._types import NOT_GIVEN
24
+ from anthropic.types import (
25
+ Message,
26
+ MessageParam,
27
+ TextBlock,
28
+ TextBlockParam,
29
+ ToolParam,
30
+ ToolResultBlockParam,
31
+ ToolUseBlock,
32
+ ToolUseBlockParam,
33
+ )
34
+ from anthropic.types.model_param import ModelParam
35
+ from anthropic.types.tool_choice_any_param import ToolChoiceAnyParam
36
+ from anthropic.types.tool_choice_auto_param import ToolChoiceAutoParam
37
+ from anthropic.types.tool_choice_param import ToolChoiceParam
38
+ except ImportError as e:
39
+ raise ImportError(
40
+ "The `anthropic` package is not installed. "
41
+ "Install it with `pip install fastmcp[anthropic]` or add `anthropic` to your dependencies."
42
+ ) from e
43
+
44
+ __all__ = ["AnthropicSamplingHandler"]
45
+
46
+
47
+ class AnthropicSamplingHandler:
48
+ """Sampling handler that uses the Anthropic API.
49
+
50
+ Example:
51
+ ```python
52
+ from anthropic import AsyncAnthropic
53
+ from fastmcp import FastMCP
54
+ from fastmcp.client.sampling.handlers.anthropic import AnthropicSamplingHandler
55
+
56
+ handler = AnthropicSamplingHandler(
57
+ default_model="claude-sonnet-4-5",
58
+ client=AsyncAnthropic(),
59
+ )
60
+
61
+ server = FastMCP(sampling_handler=handler)
62
+ ```
63
+ """
64
+
65
+ def __init__(
66
+ self, default_model: ModelParam, client: AsyncAnthropic | None = None
67
+ ) -> None:
68
+ self.client: AsyncAnthropic = client or AsyncAnthropic()
69
+ self.default_model: ModelParam = default_model
70
+
71
+ async def __call__(
72
+ self,
73
+ messages: list[SamplingMessage],
74
+ params: SamplingParams,
75
+ context: Any,
76
+ ) -> CreateMessageResult | CreateMessageResultWithTools:
77
+ anthropic_messages: list[MessageParam] = self._convert_to_anthropic_messages(
78
+ messages=messages,
79
+ )
80
+
81
+ model: ModelParam = self._select_model_from_preferences(params.modelPreferences)
82
+
83
+ # Convert MCP tools to Anthropic format
84
+ anthropic_tools: list[ToolParam] | NotGiven = NOT_GIVEN
85
+ if params.tools:
86
+ anthropic_tools = self._convert_tools_to_anthropic(params.tools)
87
+
88
+ # Convert tool_choice to Anthropic format
89
+ # Returns None if mode is "none", signaling tools should be omitted
90
+ anthropic_tool_choice: ToolChoiceParam | NotGiven = NOT_GIVEN
91
+ if params.toolChoice:
92
+ converted = self._convert_tool_choice_to_anthropic(params.toolChoice)
93
+ if converted is None:
94
+ # tool_choice="none" means don't use tools
95
+ anthropic_tools = NOT_GIVEN
96
+ else:
97
+ anthropic_tool_choice = converted
98
+
99
+ response = await self.client.messages.create(
100
+ model=model,
101
+ messages=anthropic_messages,
102
+ system=(
103
+ params.systemPrompt if params.systemPrompt is not None else NOT_GIVEN
104
+ ),
105
+ temperature=(
106
+ params.temperature if params.temperature is not None else NOT_GIVEN
107
+ ),
108
+ max_tokens=params.maxTokens,
109
+ stop_sequences=(
110
+ params.stopSequences if params.stopSequences is not None else NOT_GIVEN
111
+ ),
112
+ tools=anthropic_tools,
113
+ tool_choice=anthropic_tool_choice,
114
+ )
115
+
116
+ # Return appropriate result type based on whether tools were provided
117
+ if params.tools:
118
+ return self._message_to_result_with_tools(response)
119
+ return self._message_to_create_message_result(response)
120
+
121
+ @staticmethod
122
+ def _iter_models_from_preferences(
123
+ model_preferences: ModelPreferences | str | list[str] | None,
124
+ ) -> Iterator[str]:
125
+ if model_preferences is None:
126
+ return
127
+
128
+ if isinstance(model_preferences, str):
129
+ yield model_preferences
130
+
131
+ elif isinstance(model_preferences, list):
132
+ yield from model_preferences
133
+
134
+ elif isinstance(model_preferences, ModelPreferences):
135
+ if not (hints := model_preferences.hints):
136
+ return
137
+
138
+ for hint in hints:
139
+ if not (name := hint.name):
140
+ continue
141
+
142
+ yield name
143
+
144
+ @staticmethod
145
+ def _convert_to_anthropic_messages(
146
+ messages: Sequence[SamplingMessage],
147
+ ) -> list[MessageParam]:
148
+ anthropic_messages: list[MessageParam] = []
149
+
150
+ for message in messages:
151
+ content = message.content
152
+
153
+ # Handle list content (from CreateMessageResultWithTools)
154
+ if isinstance(content, list):
155
+ content_blocks: list[
156
+ TextBlockParam | ToolUseBlockParam | ToolResultBlockParam
157
+ ] = []
158
+
159
+ for item in content:
160
+ if isinstance(item, ToolUseContent):
161
+ content_blocks.append(
162
+ ToolUseBlockParam(
163
+ type="tool_use",
164
+ id=item.id,
165
+ name=item.name,
166
+ input=item.input,
167
+ )
168
+ )
169
+ elif isinstance(item, TextContent):
170
+ content_blocks.append(
171
+ TextBlockParam(type="text", text=item.text)
172
+ )
173
+ elif isinstance(item, ToolResultContent):
174
+ # Extract text content from the result
175
+ result_content: str | list[TextBlockParam] = ""
176
+ if item.content:
177
+ text_blocks: list[TextBlockParam] = []
178
+ for sub_item in item.content:
179
+ if isinstance(sub_item, TextContent):
180
+ text_blocks.append(
181
+ TextBlockParam(type="text", text=sub_item.text)
182
+ )
183
+ if len(text_blocks) == 1:
184
+ result_content = text_blocks[0]["text"]
185
+ elif text_blocks:
186
+ result_content = text_blocks
187
+
188
+ content_blocks.append(
189
+ ToolResultBlockParam(
190
+ type="tool_result",
191
+ tool_use_id=item.toolUseId,
192
+ content=result_content,
193
+ is_error=item.isError if item.isError else False,
194
+ )
195
+ )
196
+
197
+ if content_blocks:
198
+ anthropic_messages.append(
199
+ MessageParam(
200
+ role=message.role,
201
+ content=content_blocks, # type: ignore[arg-type]
202
+ )
203
+ )
204
+ continue
205
+
206
+ # Handle ToolUseContent (assistant's tool calls)
207
+ if isinstance(content, ToolUseContent):
208
+ anthropic_messages.append(
209
+ MessageParam(
210
+ role="assistant",
211
+ content=[
212
+ ToolUseBlockParam(
213
+ type="tool_use",
214
+ id=content.id,
215
+ name=content.name,
216
+ input=content.input,
217
+ )
218
+ ],
219
+ )
220
+ )
221
+ continue
222
+
223
+ # Handle ToolResultContent (user's tool results)
224
+ if isinstance(content, ToolResultContent):
225
+ result_content_str: str | list[TextBlockParam] = ""
226
+ if content.content:
227
+ text_parts: list[TextBlockParam] = []
228
+ for item in content.content:
229
+ if isinstance(item, TextContent):
230
+ text_parts.append(
231
+ TextBlockParam(type="text", text=item.text)
232
+ )
233
+ if len(text_parts) == 1:
234
+ result_content_str = text_parts[0]["text"]
235
+ elif text_parts:
236
+ result_content_str = text_parts
237
+
238
+ anthropic_messages.append(
239
+ MessageParam(
240
+ role="user",
241
+ content=[
242
+ ToolResultBlockParam(
243
+ type="tool_result",
244
+ tool_use_id=content.toolUseId,
245
+ content=result_content_str,
246
+ is_error=content.isError if content.isError else False,
247
+ )
248
+ ],
249
+ )
250
+ )
251
+ continue
252
+
253
+ # Handle TextContent
254
+ if isinstance(content, TextContent):
255
+ anthropic_messages.append(
256
+ MessageParam(
257
+ role=message.role,
258
+ content=content.text,
259
+ )
260
+ )
261
+ continue
262
+
263
+ raise ValueError(f"Unsupported content type: {type(content)}")
264
+
265
+ return anthropic_messages
266
+
267
+ @staticmethod
268
+ def _message_to_create_message_result(
269
+ message: Message,
270
+ ) -> CreateMessageResult:
271
+ if len(message.content) == 0:
272
+ raise ValueError("No content in response from Anthropic")
273
+
274
+ # Join all text blocks to avoid dropping content
275
+ text = "".join(
276
+ block.text for block in message.content if isinstance(block, TextBlock)
277
+ )
278
+ if text:
279
+ return CreateMessageResult(
280
+ content=TextContent(type="text", text=text),
281
+ role="assistant",
282
+ model=message.model,
283
+ )
284
+
285
+ raise ValueError(
286
+ f"No text content in response from Anthropic: {[type(b).__name__ for b in message.content]}"
287
+ )
288
+
289
+ def _select_model_from_preferences(
290
+ self, model_preferences: ModelPreferences | str | list[str] | None
291
+ ) -> ModelParam:
292
+ for model_option in self._iter_models_from_preferences(model_preferences):
293
+ # Accept any model that starts with "claude"
294
+ if model_option.startswith("claude"):
295
+ return model_option
296
+
297
+ return self.default_model
298
+
299
+ @staticmethod
300
+ def _convert_tools_to_anthropic(tools: list[Tool]) -> list[ToolParam]:
301
+ """Convert MCP tools to Anthropic tool format."""
302
+ anthropic_tools: list[ToolParam] = []
303
+ for tool in tools:
304
+ # Build input_schema dict, ensuring required fields
305
+ input_schema: dict[str, Any] = dict(tool.inputSchema)
306
+ if "type" not in input_schema:
307
+ input_schema["type"] = "object"
308
+
309
+ anthropic_tools.append(
310
+ ToolParam(
311
+ name=tool.name,
312
+ description=tool.description or "",
313
+ input_schema=input_schema, # type: ignore[arg-type]
314
+ )
315
+ )
316
+ return anthropic_tools
317
+
318
+ @staticmethod
319
+ def _convert_tool_choice_to_anthropic(
320
+ tool_choice: ToolChoice,
321
+ ) -> ToolChoiceParam | None:
322
+ """Convert MCP tool_choice to Anthropic format.
323
+
324
+ Returns None for "none" mode, signaling that tools should be omitted
325
+ from the request entirely (Anthropic doesn't have an explicit "none" option).
326
+ """
327
+ if tool_choice.mode == "auto":
328
+ return ToolChoiceAutoParam(type="auto")
329
+ elif tool_choice.mode == "required":
330
+ return ToolChoiceAnyParam(type="any")
331
+ elif tool_choice.mode == "none":
332
+ # Anthropic doesn't have a "none" option - return None to signal
333
+ # that tools should be omitted from the request entirely
334
+ return None
335
+ else:
336
+ raise ValueError(f"Unsupported tool_choice mode: {tool_choice.mode!r}")
337
+
338
+ @staticmethod
339
+ def _message_to_result_with_tools(
340
+ message: Message,
341
+ ) -> CreateMessageResultWithTools:
342
+ """Convert Anthropic response to CreateMessageResultWithTools."""
343
+ if len(message.content) == 0:
344
+ raise ValueError("No content in response from Anthropic")
345
+
346
+ # Determine stop reason
347
+ stop_reason: StopReason
348
+ if message.stop_reason == "tool_use":
349
+ stop_reason = "toolUse"
350
+ elif message.stop_reason == "end_turn":
351
+ stop_reason = "endTurn"
352
+ elif message.stop_reason == "max_tokens":
353
+ stop_reason = "maxTokens"
354
+ elif message.stop_reason == "stop_sequence":
355
+ stop_reason = "endTurn"
356
+ else:
357
+ stop_reason = "endTurn"
358
+
359
+ # Build content list
360
+ content: list[SamplingMessageContentBlock] = []
361
+
362
+ for block in message.content:
363
+ if isinstance(block, TextBlock):
364
+ content.append(TextContent(type="text", text=block.text))
365
+ elif isinstance(block, ToolUseBlock):
366
+ # Anthropic returns input as dict directly
367
+ arguments = block.input if isinstance(block.input, dict) else {}
368
+
369
+ content.append(
370
+ ToolUseContent(
371
+ type="tool_use",
372
+ id=block.id,
373
+ name=block.name,
374
+ input=arguments,
375
+ )
376
+ )
377
+
378
+ # Must have at least some content
379
+ if not content:
380
+ raise ValueError("No content in response from Anthropic")
381
+
382
+ return CreateMessageResultWithTools(
383
+ content=content,
384
+ role="assistant",
385
+ model=message.model,
386
+ stopReason=stop_reason,
387
+ )