fastmcp 2.14.0__py3-none-any.whl → 2.14.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. fastmcp/client/client.py +79 -12
  2. fastmcp/client/sampling/__init__.py +69 -0
  3. fastmcp/client/sampling/handlers/__init__.py +0 -0
  4. fastmcp/client/sampling/handlers/anthropic.py +387 -0
  5. fastmcp/client/sampling/handlers/openai.py +399 -0
  6. fastmcp/client/tasks.py +0 -63
  7. fastmcp/client/transports.py +35 -16
  8. fastmcp/experimental/sampling/handlers/__init__.py +5 -0
  9. fastmcp/experimental/sampling/handlers/openai.py +4 -169
  10. fastmcp/prompts/prompt.py +5 -5
  11. fastmcp/prompts/prompt_manager.py +3 -4
  12. fastmcp/resources/resource.py +4 -4
  13. fastmcp/resources/resource_manager.py +9 -14
  14. fastmcp/resources/template.py +5 -5
  15. fastmcp/server/auth/auth.py +20 -5
  16. fastmcp/server/auth/oauth_proxy.py +73 -15
  17. fastmcp/server/auth/providers/supabase.py +11 -6
  18. fastmcp/server/context.py +448 -113
  19. fastmcp/server/dependencies.py +5 -0
  20. fastmcp/server/elicitation.py +7 -3
  21. fastmcp/server/middleware/error_handling.py +1 -1
  22. fastmcp/server/openapi/components.py +2 -4
  23. fastmcp/server/proxy.py +3 -3
  24. fastmcp/server/sampling/__init__.py +10 -0
  25. fastmcp/server/sampling/run.py +301 -0
  26. fastmcp/server/sampling/sampling_tool.py +108 -0
  27. fastmcp/server/server.py +84 -78
  28. fastmcp/server/tasks/converters.py +2 -1
  29. fastmcp/tools/tool.py +8 -6
  30. fastmcp/tools/tool_manager.py +5 -7
  31. fastmcp/utilities/cli.py +23 -43
  32. fastmcp/utilities/json_schema.py +40 -0
  33. fastmcp/utilities/openapi/schemas.py +4 -4
  34. {fastmcp-2.14.0.dist-info → fastmcp-2.14.2.dist-info}/METADATA +8 -3
  35. {fastmcp-2.14.0.dist-info → fastmcp-2.14.2.dist-info}/RECORD +38 -34
  36. fastmcp/client/sampling.py +0 -56
  37. fastmcp/experimental/sampling/handlers/base.py +0 -21
  38. fastmcp/server/sampling/handler.py +0 -19
  39. {fastmcp-2.14.0.dist-info → fastmcp-2.14.2.dist-info}/WHEEL +0 -0
  40. {fastmcp-2.14.0.dist-info → fastmcp-2.14.2.dist-info}/entry_points.txt +0 -0
  41. {fastmcp-2.14.0.dist-info → fastmcp-2.14.2.dist-info}/licenses/LICENSE +0 -0
fastmcp/client/client.py CHANGED
@@ -47,7 +47,6 @@ from fastmcp.client.roots import (
47
47
  create_roots_callback,
48
48
  )
49
49
  from fastmcp.client.sampling import (
50
- ClientSamplingHandler,
51
50
  SamplingHandler,
52
51
  create_sampling_callback,
53
52
  )
@@ -56,7 +55,6 @@ from fastmcp.client.tasks import (
56
55
  ResourceTask,
57
56
  TaskNotificationHandler,
58
57
  ToolTask,
59
- _task_capable_initialize,
60
58
  )
61
59
  from fastmcp.exceptions import ToolError
62
60
  from fastmcp.mcp_config import MCPConfig
@@ -83,7 +81,6 @@ from .transports import (
83
81
 
84
82
  __all__ = [
85
83
  "Client",
86
- "ClientSamplingHandler",
87
84
  "ElicitationHandler",
88
85
  "LogHandler",
89
86
  "MessageHandler",
@@ -249,7 +246,8 @@ class Client(Generic[ClientTransportT]):
249
246
  ),
250
247
  name: str | None = None,
251
248
  roots: RootsList | RootsHandler | None = None,
252
- sampling_handler: ClientSamplingHandler | None = None,
249
+ sampling_handler: SamplingHandler | None = None,
250
+ sampling_capabilities: mcp.types.SamplingCapability | None = None,
253
251
  elicitation_handler: ElicitationHandler | None = None,
254
252
  log_handler: LogHandler | None = None,
255
253
  message_handler: MessageHandlerT | MessageHandler | None = None,
@@ -307,6 +305,14 @@ class Client(Generic[ClientTransportT]):
307
305
  self._session_kwargs["sampling_callback"] = create_sampling_callback(
308
306
  sampling_handler
309
307
  )
308
+ # Default to tools-enabled capabilities unless explicitly overridden
309
+ self._session_kwargs["sampling_capabilities"] = (
310
+ sampling_capabilities
311
+ if sampling_capabilities is not None
312
+ else mcp.types.SamplingCapability(
313
+ tools=mcp.types.SamplingToolsCapability()
314
+ )
315
+ )
310
316
 
311
317
  if elicitation_handler is not None:
312
318
  self._session_kwargs["elicitation_callback"] = create_elicitation_callback(
@@ -325,6 +331,20 @@ class Client(Generic[ClientTransportT]):
325
331
  str, weakref.ref[ToolTask | PromptTask | ResourceTask]
326
332
  ] = {}
327
333
 
334
+ def _reset_session_state(self, full: bool = False) -> None:
335
+ """Reset session state after disconnect or cancellation.
336
+
337
+ Args:
338
+ full: If True, also resets session_task and nesting_counter.
339
+ Use full=True for cancellation cleanup where the session
340
+ task was started but never completed normally.
341
+ """
342
+ self._session_state.session = None
343
+ self._session_state.initialize_result = None
344
+ if full:
345
+ self._session_state.session_task = None
346
+ self._session_state.nesting_counter = 0
347
+
328
348
  @property
329
349
  def session(self) -> ClientSession:
330
350
  """Get the current active session. Raises RuntimeError if not connected."""
@@ -344,11 +364,21 @@ class Client(Generic[ClientTransportT]):
344
364
  """Set the roots for the client. This does not automatically call `send_roots_list_changed`."""
345
365
  self._session_kwargs["list_roots_callback"] = create_roots_callback(roots)
346
366
 
347
- def set_sampling_callback(self, sampling_callback: ClientSamplingHandler) -> None:
367
+ def set_sampling_callback(
368
+ self,
369
+ sampling_callback: SamplingHandler,
370
+ sampling_capabilities: mcp.types.SamplingCapability | None = None,
371
+ ) -> None:
348
372
  """Set the sampling callback for the client."""
349
373
  self._session_kwargs["sampling_callback"] = create_sampling_callback(
350
374
  sampling_callback
351
375
  )
376
+ # Default to tools-enabled capabilities unless explicitly overridden
377
+ self._session_kwargs["sampling_capabilities"] = (
378
+ sampling_capabilities
379
+ if sampling_capabilities is not None
380
+ else mcp.types.SamplingCapability(tools=mcp.types.SamplingToolsCapability())
381
+ )
352
382
 
353
383
  def set_elicitation_callback(
354
384
  self, elicitation_callback: ElicitationHandler
@@ -405,8 +435,7 @@ class Client(Generic[ClientTransportT]):
405
435
  except anyio.ClosedResourceError as e:
406
436
  raise RuntimeError("Server session was closed unexpectedly") from e
407
437
  finally:
408
- self._session_state.session = None
409
- self._session_state.initialize_result = None
438
+ self._reset_session_state()
410
439
 
411
440
  async def initialize(
412
441
  self,
@@ -458,10 +487,7 @@ class Client(Generic[ClientTransportT]):
458
487
 
459
488
  try:
460
489
  with anyio.fail_after(timeout):
461
- self._session_state.initialize_result = await _task_capable_initialize(
462
- self.session
463
- )
464
-
490
+ self._session_state.initialize_result = await self.session.initialize()
465
491
  return self._session_state.initialize_result
466
492
  except TimeoutError as e:
467
493
  raise RuntimeError("Failed to initialize server session") from e
@@ -506,7 +532,48 @@ class Client(Generic[ClientTransportT]):
506
532
  self._session_state.session_task = asyncio.create_task(
507
533
  self._session_runner()
508
534
  )
509
- await self._session_state.ready_event.wait()
535
+ try:
536
+ await self._session_state.ready_event.wait()
537
+ except asyncio.CancelledError:
538
+ # Cancellation during initial connection startup can leave the
539
+ # background session task running because __aexit__ is never invoked
540
+ # when __aenter__ is cancelled. Since we hold the session lock here
541
+ # and we know we started the session task, it's safe to tear it down
542
+ # without impacting other active contexts.
543
+ #
544
+ # Note: session_task is an asyncio.Task (not anyio) because it needs
545
+ # to outlive individual context manager scopes - anyio's structured
546
+ # concurrency doesn't allow tasks to escape their task group.
547
+ session_task = self._session_state.session_task
548
+ if session_task is not None:
549
+ # Request a graceful stop if the runner has already reached
550
+ # its stop_event wait.
551
+ self._session_state.stop_event.set()
552
+ session_task.cancel()
553
+ with anyio.CancelScope(shield=True):
554
+ with anyio.move_on_after(3):
555
+ try:
556
+ await session_task
557
+ except asyncio.CancelledError:
558
+ pass
559
+ except Exception as e:
560
+ logger.debug(
561
+ f"Error during cancelled session cleanup: {e}"
562
+ )
563
+
564
+ # Reset session state so future callers can reconnect cleanly.
565
+ self._reset_session_state(full=True)
566
+
567
+ with anyio.CancelScope(shield=True):
568
+ with anyio.move_on_after(3):
569
+ try:
570
+ await self.transport.close()
571
+ except Exception as e:
572
+ logger.debug(
573
+ f"Error closing transport after cancellation: {e}"
574
+ )
575
+
576
+ raise
510
577
 
511
578
  if self._session_state.session_task.done():
512
579
  exception = self._session_state.session_task.exception()
@@ -0,0 +1,69 @@
1
+ import inspect
2
+ from collections.abc import Awaitable, Callable
3
+ from typing import TypeAlias, TypeVar
4
+
5
+ import mcp.types
6
+ from mcp import ClientSession, CreateMessageResult
7
+ from mcp.client.session import SamplingFnT
8
+ from mcp.server.session import ServerSession
9
+ from mcp.shared.context import LifespanContextT, RequestContext
10
+ from mcp.types import CreateMessageRequestParams as SamplingParams
11
+ from mcp.types import CreateMessageResultWithTools, SamplingMessage
12
+
13
+ # Result type that handlers can return
14
+ SamplingHandlerResult: TypeAlias = (
15
+ str | CreateMessageResult | CreateMessageResultWithTools
16
+ )
17
+
18
+ # Session type for sampling handlers - works with both client and server sessions
19
+ SessionT = TypeVar("SessionT", ClientSession, ServerSession)
20
+
21
+ # Unified sampling handler type that works for both clients and servers.
22
+ # Handlers receive messages and parameters from the MCP sampling flow
23
+ # and return LLM responses.
24
+ SamplingHandler: TypeAlias = Callable[
25
+ [
26
+ list[SamplingMessage],
27
+ SamplingParams,
28
+ RequestContext[SessionT, LifespanContextT],
29
+ ],
30
+ SamplingHandlerResult | Awaitable[SamplingHandlerResult],
31
+ ]
32
+
33
+
34
+ __all__ = [
35
+ "RequestContext",
36
+ "SamplingHandler",
37
+ "SamplingHandlerResult",
38
+ "SamplingMessage",
39
+ "SamplingParams",
40
+ "create_sampling_callback",
41
+ ]
42
+
43
+
44
+ def create_sampling_callback(
45
+ sampling_handler: SamplingHandler,
46
+ ) -> SamplingFnT:
47
+ async def _sampling_handler(
48
+ context,
49
+ params: SamplingParams,
50
+ ) -> CreateMessageResult | CreateMessageResultWithTools | mcp.types.ErrorData:
51
+ try:
52
+ result = sampling_handler(params.messages, params, context)
53
+ if inspect.isawaitable(result):
54
+ result = await result
55
+
56
+ if isinstance(result, str):
57
+ result = CreateMessageResult(
58
+ role="assistant",
59
+ model="fastmcp-client",
60
+ content=mcp.types.TextContent(type="text", text=result),
61
+ )
62
+ return result
63
+ except Exception as e:
64
+ return mcp.types.ErrorData(
65
+ code=mcp.types.INTERNAL_ERROR,
66
+ message=str(e),
67
+ )
68
+
69
+ return _sampling_handler
File without changes
@@ -0,0 +1,387 @@
1
+ """Anthropic sampling handler for FastMCP."""
2
+
3
+ from collections.abc import Iterator, Sequence
4
+ from typing import Any
5
+
6
+ from mcp.types import CreateMessageRequestParams as SamplingParams
7
+ from mcp.types import (
8
+ CreateMessageResult,
9
+ CreateMessageResultWithTools,
10
+ ModelPreferences,
11
+ SamplingMessage,
12
+ SamplingMessageContentBlock,
13
+ StopReason,
14
+ TextContent,
15
+ Tool,
16
+ ToolChoice,
17
+ ToolResultContent,
18
+ ToolUseContent,
19
+ )
20
+
21
+ try:
22
+ from anthropic import AsyncAnthropic, NotGiven
23
+ from anthropic._types import NOT_GIVEN
24
+ from anthropic.types import (
25
+ Message,
26
+ MessageParam,
27
+ TextBlock,
28
+ TextBlockParam,
29
+ ToolParam,
30
+ ToolResultBlockParam,
31
+ ToolUseBlock,
32
+ ToolUseBlockParam,
33
+ )
34
+ from anthropic.types.model_param import ModelParam
35
+ from anthropic.types.tool_choice_any_param import ToolChoiceAnyParam
36
+ from anthropic.types.tool_choice_auto_param import ToolChoiceAutoParam
37
+ from anthropic.types.tool_choice_param import ToolChoiceParam
38
+ except ImportError as e:
39
+ raise ImportError(
40
+ "The `anthropic` package is not installed. "
41
+ "Install it with `pip install fastmcp[anthropic]` or add `anthropic` to your dependencies."
42
+ ) from e
43
+
44
+ __all__ = ["AnthropicSamplingHandler"]
45
+
46
+
47
+ class AnthropicSamplingHandler:
48
+ """Sampling handler that uses the Anthropic API.
49
+
50
+ Example:
51
+ ```python
52
+ from anthropic import AsyncAnthropic
53
+ from fastmcp import FastMCP
54
+ from fastmcp.client.sampling.handlers.anthropic import AnthropicSamplingHandler
55
+
56
+ handler = AnthropicSamplingHandler(
57
+ default_model="claude-sonnet-4-5",
58
+ client=AsyncAnthropic(),
59
+ )
60
+
61
+ server = FastMCP(sampling_handler=handler)
62
+ ```
63
+ """
64
+
65
+ def __init__(
66
+ self, default_model: ModelParam, client: AsyncAnthropic | None = None
67
+ ) -> None:
68
+ self.client: AsyncAnthropic = client or AsyncAnthropic()
69
+ self.default_model: ModelParam = default_model
70
+
71
+ async def __call__(
72
+ self,
73
+ messages: list[SamplingMessage],
74
+ params: SamplingParams,
75
+ context: Any,
76
+ ) -> CreateMessageResult | CreateMessageResultWithTools:
77
+ anthropic_messages: list[MessageParam] = self._convert_to_anthropic_messages(
78
+ messages=messages,
79
+ )
80
+
81
+ model: ModelParam = self._select_model_from_preferences(params.modelPreferences)
82
+
83
+ # Convert MCP tools to Anthropic format
84
+ anthropic_tools: list[ToolParam] | NotGiven = NOT_GIVEN
85
+ if params.tools:
86
+ anthropic_tools = self._convert_tools_to_anthropic(params.tools)
87
+
88
+ # Convert tool_choice to Anthropic format
89
+ # Returns None if mode is "none", signaling tools should be omitted
90
+ anthropic_tool_choice: ToolChoiceParam | NotGiven = NOT_GIVEN
91
+ if params.toolChoice:
92
+ converted = self._convert_tool_choice_to_anthropic(params.toolChoice)
93
+ if converted is None:
94
+ # tool_choice="none" means don't use tools
95
+ anthropic_tools = NOT_GIVEN
96
+ else:
97
+ anthropic_tool_choice = converted
98
+
99
+ response = await self.client.messages.create(
100
+ model=model,
101
+ messages=anthropic_messages,
102
+ system=(
103
+ params.systemPrompt if params.systemPrompt is not None else NOT_GIVEN
104
+ ),
105
+ temperature=(
106
+ params.temperature if params.temperature is not None else NOT_GIVEN
107
+ ),
108
+ max_tokens=params.maxTokens,
109
+ stop_sequences=(
110
+ params.stopSequences if params.stopSequences is not None else NOT_GIVEN
111
+ ),
112
+ tools=anthropic_tools,
113
+ tool_choice=anthropic_tool_choice,
114
+ )
115
+
116
+ # Return appropriate result type based on whether tools were provided
117
+ if params.tools:
118
+ return self._message_to_result_with_tools(response)
119
+ return self._message_to_create_message_result(response)
120
+
121
+ @staticmethod
122
+ def _iter_models_from_preferences(
123
+ model_preferences: ModelPreferences | str | list[str] | None,
124
+ ) -> Iterator[str]:
125
+ if model_preferences is None:
126
+ return
127
+
128
+ if isinstance(model_preferences, str):
129
+ yield model_preferences
130
+
131
+ elif isinstance(model_preferences, list):
132
+ yield from model_preferences
133
+
134
+ elif isinstance(model_preferences, ModelPreferences):
135
+ if not (hints := model_preferences.hints):
136
+ return
137
+
138
+ for hint in hints:
139
+ if not (name := hint.name):
140
+ continue
141
+
142
+ yield name
143
+
144
+ @staticmethod
145
+ def _convert_to_anthropic_messages(
146
+ messages: Sequence[SamplingMessage],
147
+ ) -> list[MessageParam]:
148
+ anthropic_messages: list[MessageParam] = []
149
+
150
+ for message in messages:
151
+ content = message.content
152
+
153
+ # Handle list content (from CreateMessageResultWithTools)
154
+ if isinstance(content, list):
155
+ content_blocks: list[
156
+ TextBlockParam | ToolUseBlockParam | ToolResultBlockParam
157
+ ] = []
158
+
159
+ for item in content:
160
+ if isinstance(item, ToolUseContent):
161
+ content_blocks.append(
162
+ ToolUseBlockParam(
163
+ type="tool_use",
164
+ id=item.id,
165
+ name=item.name,
166
+ input=item.input,
167
+ )
168
+ )
169
+ elif isinstance(item, TextContent):
170
+ content_blocks.append(
171
+ TextBlockParam(type="text", text=item.text)
172
+ )
173
+ elif isinstance(item, ToolResultContent):
174
+ # Extract text content from the result
175
+ result_content: str | list[TextBlockParam] = ""
176
+ if item.content:
177
+ text_blocks: list[TextBlockParam] = []
178
+ for sub_item in item.content:
179
+ if isinstance(sub_item, TextContent):
180
+ text_blocks.append(
181
+ TextBlockParam(type="text", text=sub_item.text)
182
+ )
183
+ if len(text_blocks) == 1:
184
+ result_content = text_blocks[0]["text"]
185
+ elif text_blocks:
186
+ result_content = text_blocks
187
+
188
+ content_blocks.append(
189
+ ToolResultBlockParam(
190
+ type="tool_result",
191
+ tool_use_id=item.toolUseId,
192
+ content=result_content,
193
+ is_error=item.isError if item.isError else False,
194
+ )
195
+ )
196
+
197
+ if content_blocks:
198
+ anthropic_messages.append(
199
+ MessageParam(
200
+ role=message.role,
201
+ content=content_blocks, # type: ignore[arg-type]
202
+ )
203
+ )
204
+ continue
205
+
206
+ # Handle ToolUseContent (assistant's tool calls)
207
+ if isinstance(content, ToolUseContent):
208
+ anthropic_messages.append(
209
+ MessageParam(
210
+ role="assistant",
211
+ content=[
212
+ ToolUseBlockParam(
213
+ type="tool_use",
214
+ id=content.id,
215
+ name=content.name,
216
+ input=content.input,
217
+ )
218
+ ],
219
+ )
220
+ )
221
+ continue
222
+
223
+ # Handle ToolResultContent (user's tool results)
224
+ if isinstance(content, ToolResultContent):
225
+ result_content_str: str | list[TextBlockParam] = ""
226
+ if content.content:
227
+ text_parts: list[TextBlockParam] = []
228
+ for item in content.content:
229
+ if isinstance(item, TextContent):
230
+ text_parts.append(
231
+ TextBlockParam(type="text", text=item.text)
232
+ )
233
+ if len(text_parts) == 1:
234
+ result_content_str = text_parts[0]["text"]
235
+ elif text_parts:
236
+ result_content_str = text_parts
237
+
238
+ anthropic_messages.append(
239
+ MessageParam(
240
+ role="user",
241
+ content=[
242
+ ToolResultBlockParam(
243
+ type="tool_result",
244
+ tool_use_id=content.toolUseId,
245
+ content=result_content_str,
246
+ is_error=content.isError if content.isError else False,
247
+ )
248
+ ],
249
+ )
250
+ )
251
+ continue
252
+
253
+ # Handle TextContent
254
+ if isinstance(content, TextContent):
255
+ anthropic_messages.append(
256
+ MessageParam(
257
+ role=message.role,
258
+ content=content.text,
259
+ )
260
+ )
261
+ continue
262
+
263
+ raise ValueError(f"Unsupported content type: {type(content)}")
264
+
265
+ return anthropic_messages
266
+
267
+ @staticmethod
268
+ def _message_to_create_message_result(
269
+ message: Message,
270
+ ) -> CreateMessageResult:
271
+ if len(message.content) == 0:
272
+ raise ValueError("No content in response from Anthropic")
273
+
274
+ # Join all text blocks to avoid dropping content
275
+ text = "".join(
276
+ block.text for block in message.content if isinstance(block, TextBlock)
277
+ )
278
+ if text:
279
+ return CreateMessageResult(
280
+ content=TextContent(type="text", text=text),
281
+ role="assistant",
282
+ model=message.model,
283
+ )
284
+
285
+ raise ValueError(
286
+ f"No text content in response from Anthropic: {[type(b).__name__ for b in message.content]}"
287
+ )
288
+
289
+ def _select_model_from_preferences(
290
+ self, model_preferences: ModelPreferences | str | list[str] | None
291
+ ) -> ModelParam:
292
+ for model_option in self._iter_models_from_preferences(model_preferences):
293
+ # Accept any model that starts with "claude"
294
+ if model_option.startswith("claude"):
295
+ return model_option
296
+
297
+ return self.default_model
298
+
299
+ @staticmethod
300
+ def _convert_tools_to_anthropic(tools: list[Tool]) -> list[ToolParam]:
301
+ """Convert MCP tools to Anthropic tool format."""
302
+ anthropic_tools: list[ToolParam] = []
303
+ for tool in tools:
304
+ # Build input_schema dict, ensuring required fields
305
+ input_schema: dict[str, Any] = dict(tool.inputSchema)
306
+ if "type" not in input_schema:
307
+ input_schema["type"] = "object"
308
+
309
+ anthropic_tools.append(
310
+ ToolParam(
311
+ name=tool.name,
312
+ description=tool.description or "",
313
+ input_schema=input_schema, # type: ignore[arg-type]
314
+ )
315
+ )
316
+ return anthropic_tools
317
+
318
+ @staticmethod
319
+ def _convert_tool_choice_to_anthropic(
320
+ tool_choice: ToolChoice,
321
+ ) -> ToolChoiceParam | None:
322
+ """Convert MCP tool_choice to Anthropic format.
323
+
324
+ Returns None for "none" mode, signaling that tools should be omitted
325
+ from the request entirely (Anthropic doesn't have an explicit "none" option).
326
+ """
327
+ if tool_choice.mode == "auto":
328
+ return ToolChoiceAutoParam(type="auto")
329
+ elif tool_choice.mode == "required":
330
+ return ToolChoiceAnyParam(type="any")
331
+ elif tool_choice.mode == "none":
332
+ # Anthropic doesn't have a "none" option - return None to signal
333
+ # that tools should be omitted from the request entirely
334
+ return None
335
+ else:
336
+ raise ValueError(f"Unsupported tool_choice mode: {tool_choice.mode!r}")
337
+
338
+ @staticmethod
339
+ def _message_to_result_with_tools(
340
+ message: Message,
341
+ ) -> CreateMessageResultWithTools:
342
+ """Convert Anthropic response to CreateMessageResultWithTools."""
343
+ if len(message.content) == 0:
344
+ raise ValueError("No content in response from Anthropic")
345
+
346
+ # Determine stop reason
347
+ stop_reason: StopReason
348
+ if message.stop_reason == "tool_use":
349
+ stop_reason = "toolUse"
350
+ elif message.stop_reason == "end_turn":
351
+ stop_reason = "endTurn"
352
+ elif message.stop_reason == "max_tokens":
353
+ stop_reason = "maxTokens"
354
+ elif message.stop_reason == "stop_sequence":
355
+ stop_reason = "endTurn"
356
+ else:
357
+ stop_reason = "endTurn"
358
+
359
+ # Build content list
360
+ content: list[SamplingMessageContentBlock] = []
361
+
362
+ for block in message.content:
363
+ if isinstance(block, TextBlock):
364
+ content.append(TextContent(type="text", text=block.text))
365
+ elif isinstance(block, ToolUseBlock):
366
+ # Anthropic returns input as dict directly
367
+ arguments = block.input if isinstance(block.input, dict) else {}
368
+
369
+ content.append(
370
+ ToolUseContent(
371
+ type="tool_use",
372
+ id=block.id,
373
+ name=block.name,
374
+ input=arguments,
375
+ )
376
+ )
377
+
378
+ # Must have at least some content
379
+ if not content:
380
+ raise ValueError("No content in response from Anthropic")
381
+
382
+ return CreateMessageResultWithTools(
383
+ content=content,
384
+ role="assistant",
385
+ model=message.model,
386
+ stopReason=stop_reason,
387
+ )