anthropic 0.67.0__py3-none-any.whl → 0.68.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
anthropic/__init__.py CHANGED
@@ -87,11 +87,14 @@ __all__ = [
87
87
  "DefaultAioHttpClient",
88
88
  "HUMAN_PROMPT",
89
89
  "AI_PROMPT",
90
+ "beta_tool",
91
+ "beta_async_tool",
90
92
  ]
91
93
 
92
94
  if not _t.TYPE_CHECKING:
93
95
  from ._utils._resources_proxy import resources as resources
94
96
 
97
+ from .lib.tools import beta_tool, beta_async_tool
95
98
  from .lib.vertex import *
96
99
  from .lib.bedrock import *
97
100
  from .lib.streaming import *
anthropic/_models.py CHANGED
@@ -272,7 +272,7 @@ class BaseModel(pydantic.BaseModel):
272
272
  mode: Literal["json", "python"] | str = "python",
273
273
  include: IncEx | None = None,
274
274
  exclude: IncEx | None = None,
275
- by_alias: bool = False,
275
+ by_alias: bool | None = None,
276
276
  exclude_unset: bool = False,
277
277
  exclude_defaults: bool = False,
278
278
  exclude_none: bool = False,
@@ -280,6 +280,7 @@ class BaseModel(pydantic.BaseModel):
280
280
  warnings: bool | Literal["none", "warn", "error"] = True,
281
281
  context: dict[str, Any] | None = None,
282
282
  serialize_as_any: bool = False,
283
+ fallback: Callable[[Any], Any] | None = None,
283
284
  ) -> dict[str, Any]:
284
285
  """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump
285
286
 
@@ -311,10 +312,12 @@ class BaseModel(pydantic.BaseModel):
311
312
  raise ValueError("context is only supported in Pydantic v2")
312
313
  if serialize_as_any != False:
313
314
  raise ValueError("serialize_as_any is only supported in Pydantic v2")
315
+ if fallback is not None:
316
+ raise ValueError("fallback is only supported in Pydantic v2")
314
317
  dumped = super().dict( # pyright: ignore[reportDeprecated]
315
318
  include=include,
316
319
  exclude=exclude,
317
- by_alias=by_alias,
320
+ by_alias=by_alias if by_alias is not None else False,
318
321
  exclude_unset=exclude_unset,
319
322
  exclude_defaults=exclude_defaults,
320
323
  exclude_none=exclude_none,
@@ -329,13 +332,14 @@ class BaseModel(pydantic.BaseModel):
329
332
  indent: int | None = None,
330
333
  include: IncEx | None = None,
331
334
  exclude: IncEx | None = None,
332
- by_alias: bool = False,
335
+ by_alias: bool | None = None,
333
336
  exclude_unset: bool = False,
334
337
  exclude_defaults: bool = False,
335
338
  exclude_none: bool = False,
336
339
  round_trip: bool = False,
337
340
  warnings: bool | Literal["none", "warn", "error"] = True,
338
341
  context: dict[str, Any] | None = None,
342
+ fallback: Callable[[Any], Any] | None = None,
339
343
  serialize_as_any: bool = False,
340
344
  ) -> str:
341
345
  """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump_json
@@ -364,11 +368,13 @@ class BaseModel(pydantic.BaseModel):
364
368
  raise ValueError("context is only supported in Pydantic v2")
365
369
  if serialize_as_any != False:
366
370
  raise ValueError("serialize_as_any is only supported in Pydantic v2")
371
+ if fallback is not None:
372
+ raise ValueError("fallback is only supported in Pydantic v2")
367
373
  return super().json( # type: ignore[reportDeprecated]
368
374
  indent=indent,
369
375
  include=include,
370
376
  exclude=exclude,
371
- by_alias=by_alias,
377
+ by_alias=by_alias if by_alias is not None else False,
372
378
  exclude_unset=exclude_unset,
373
379
  exclude_defaults=exclude_defaults,
374
380
  exclude_none=exclude_none,
@@ -780,6 +786,9 @@ elif not TYPE_CHECKING: # TODO: condition is weird
780
786
  def _create_pydantic_model(type_: _T) -> Type[RootModel[_T]]:
781
787
  return RootModel[type_] # type: ignore
782
788
 
789
+ def TypeAdapter(*_args: Any, **_kwargs: Any) -> Any:
790
+ raise RuntimeError("attempted to use TypeAdapter in pydantic v1")
791
+
783
792
 
784
793
  class FinalRequestOptionsInput(TypedDict, total=False):
785
794
  method: Required[str]
anthropic/_version.py CHANGED
@@ -1,4 +1,4 @@
1
1
  # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2
2
 
3
3
  __title__ = "anthropic"
4
- __version__ = "0.67.0" # x-release-please-version
4
+ __version__ = "0.68.0" # x-release-please-version
@@ -0,0 +1,20 @@
1
+ from ._beta_runner import BetaToolRunner, BetaAsyncToolRunner, BetaStreamingToolRunner, BetaAsyncStreamingToolRunner
2
+ from ._beta_functions import (
3
+ BetaFunctionTool,
4
+ BetaAsyncFunctionTool,
5
+ BetaFunctionToolResultType,
6
+ beta_tool,
7
+ beta_async_tool,
8
+ )
9
+
10
+ __all__ = [
11
+ "beta_tool",
12
+ "beta_async_tool",
13
+ "BetaFunctionTool",
14
+ "BetaAsyncFunctionTool",
15
+ "BetaToolRunner",
16
+ "BetaAsyncStreamingToolRunner",
17
+ "BetaStreamingToolRunner",
18
+ "BetaAsyncToolRunner",
19
+ "BetaFunctionToolResultType",
20
+ ]
@@ -0,0 +1,289 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ from typing import Any, Union, Generic, TypeVar, Callable, Iterable, Coroutine, cast, overload
5
+ from inspect import iscoroutinefunction
6
+ from typing_extensions import TypeAlias, override
7
+
8
+ import pydantic
9
+ import docstring_parser
10
+ from pydantic import BaseModel
11
+
12
+ from ... import _compat
13
+ from ..._utils import is_dict
14
+ from ..._compat import cached_property
15
+ from ..._models import TypeAdapter
16
+ from ..._utils._utils import CallableT
17
+ from ...types.tool_param import ToolParam, InputSchema
18
+ from ...types.beta.beta_tool_result_block_param import Content as BetaContent
19
+
20
+ log = logging.getLogger(__name__)
21
+
22
+ BetaFunctionToolResultType: TypeAlias = Union[str, Iterable[BetaContent]]
23
+
24
+ Function = Callable[..., BetaFunctionToolResultType]
25
+ FunctionT = TypeVar("FunctionT", bound=Function)
26
+
27
+ AsyncFunction = Callable[..., Coroutine[Any, Any, BetaFunctionToolResultType]]
28
+ AsyncFunctionT = TypeVar("AsyncFunctionT", bound=AsyncFunction)
29
+
30
+
31
+ class BaseFunctionTool(Generic[CallableT]):
32
+ func: CallableT
33
+ """The function this tool is wrapping"""
34
+
35
+ name: str
36
+ """The name of the tool that will be sent to the API"""
37
+
38
+ description: str
39
+
40
+ input_schema: InputSchema
41
+
42
+ def __init__(
43
+ self,
44
+ func: CallableT,
45
+ *,
46
+ name: str | None = None,
47
+ description: str | None = None,
48
+ input_schema: InputSchema | type[BaseModel] | None = None,
49
+ ) -> None:
50
+ if _compat.PYDANTIC_V1:
51
+ raise RuntimeError("Tool functions are only supported with Pydantic v2")
52
+
53
+ self.func = func
54
+ self._func_with_validate = pydantic.validate_call(func)
55
+ self.name = name or func.__name__
56
+
57
+ self.description = description or self._get_description_from_docstring()
58
+
59
+ if input_schema is not None:
60
+ if isinstance(input_schema, type):
61
+ self.input_schema: InputSchema = input_schema.model_json_schema()
62
+ else:
63
+ self.input_schema = input_schema
64
+ else:
65
+ self.input_schema = self._create_schema_from_function()
66
+
67
+ @property
68
+ def __call__(self) -> CallableT:
69
+ return self.func
70
+
71
+ def to_dict(self) -> ToolParam:
72
+ return {
73
+ "name": self.name,
74
+ "description": self.description,
75
+ "input_schema": self.input_schema,
76
+ }
77
+
78
+ @cached_property
79
+ def _parsed_docstring(self) -> docstring_parser.Docstring:
80
+ return docstring_parser.parse(self.func.__doc__ or "")
81
+
82
+ def _get_description_from_docstring(self) -> str:
83
+ """Extract description from parsed docstring."""
84
+ if self._parsed_docstring.short_description:
85
+ description = self._parsed_docstring.short_description
86
+ if self._parsed_docstring.long_description:
87
+ description += f"\n\n{self._parsed_docstring.long_description}"
88
+ return description
89
+ return ""
90
+
91
+ def _create_schema_from_function(self) -> InputSchema:
92
+ """Create JSON schema from function signature using pydantic."""
93
+
94
+ from pydantic_core import CoreSchema
95
+ from pydantic.json_schema import JsonSchemaValue, GenerateJsonSchema
96
+ from pydantic_core.core_schema import ArgumentsParameter
97
+
98
+ class CustomGenerateJsonSchema(GenerateJsonSchema):
99
+ def __init__(self, *, func: Callable[..., Any], parsed_docstring: Any) -> None:
100
+ super().__init__()
101
+ self._func = func
102
+ self._parsed_docstring = parsed_docstring
103
+
104
+ def __call__(self, *_args: Any, **_kwds: Any) -> "CustomGenerateJsonSchema": # noqa: ARG002
105
+ return self
106
+
107
+ @override
108
+ def kw_arguments_schema(
109
+ self,
110
+ arguments: "list[ArgumentsParameter]",
111
+ var_kwargs_schema: CoreSchema | None,
112
+ ) -> JsonSchemaValue:
113
+ schema = super().kw_arguments_schema(arguments, var_kwargs_schema)
114
+ if schema.get("type") != "object":
115
+ return schema
116
+
117
+ properties = schema.get("properties")
118
+ if not properties or not is_dict(properties):
119
+ return schema
120
+
121
+ # Add parameter descriptions from docstring
122
+ for param in self._parsed_docstring.params:
123
+ prop_schema = properties.get(param.arg_name)
124
+ if not prop_schema or not is_dict(prop_schema):
125
+ continue
126
+
127
+ if param.description and "description" not in prop_schema:
128
+ prop_schema["description"] = param.description
129
+
130
+ return schema
131
+
132
+ schema_generator = CustomGenerateJsonSchema(func=self.func, parsed_docstring=self._parsed_docstring)
133
+ return self._adapter.json_schema(schema_generator=schema_generator) # type: ignore
134
+
135
+ @cached_property
136
+ def _adapter(self) -> TypeAdapter[Any]:
137
+ return TypeAdapter(self._func_with_validate)
138
+
139
+
140
+ class BetaFunctionTool(BaseFunctionTool[FunctionT]):
141
+ def call(self, input: object) -> BetaFunctionToolResultType:
142
+ if iscoroutinefunction(self.func):
143
+ raise RuntimeError("Cannot call a coroutine function synchronously. Use `@async_tool` instead.")
144
+
145
+ if not is_dict(input):
146
+ raise TypeError(f"Input must be a dictionary, got {type(input).__name__}")
147
+
148
+ try:
149
+ return self._func_with_validate(**cast(Any, input))
150
+ except pydantic.ValidationError as e:
151
+ raise ValueError(f"Invalid arguments for function {self.name}") from e
152
+
153
+
154
+ class BetaAsyncFunctionTool(BaseFunctionTool[AsyncFunctionT]):
155
+ async def call(self, input: object) -> BetaFunctionToolResultType:
156
+ if not iscoroutinefunction(self.func):
157
+ raise RuntimeError("Cannot call a synchronous function asynchronously. Use `@tool` instead.")
158
+
159
+ if not is_dict(input):
160
+ raise TypeError(f"Input must be a dictionary, got {type(input).__name__}")
161
+
162
+ try:
163
+ return await self._func_with_validate(**cast(Any, input))
164
+ except pydantic.ValidationError as e:
165
+ raise ValueError(f"Invalid arguments for function {self.name}") from e
166
+
167
+
168
+ @overload
169
+ def beta_tool(func: FunctionT) -> BetaFunctionTool[FunctionT]: ...
170
+
171
+
172
+ @overload
173
+ def beta_tool(
174
+ func: FunctionT,
175
+ *,
176
+ name: str | None = None,
177
+ description: str | None = None,
178
+ input_schema: InputSchema | type[BaseModel] | None = None,
179
+ ) -> BetaFunctionTool[FunctionT]: ...
180
+
181
+
182
+ @overload
183
+ def beta_tool(
184
+ *,
185
+ name: str | None = None,
186
+ description: str | None = None,
187
+ input_schema: InputSchema | type[BaseModel] | None = None,
188
+ ) -> Callable[[FunctionT], BetaFunctionTool[FunctionT]]: ...
189
+
190
+
191
+ def beta_tool(
192
+ func: FunctionT | None = None,
193
+ *,
194
+ name: str | None = None,
195
+ description: str | None = None,
196
+ input_schema: InputSchema | type[BaseModel] | None = None,
197
+ ) -> BetaFunctionTool[FunctionT] | Callable[[FunctionT], BetaFunctionTool[FunctionT]]:
198
+ """Create a FunctionTool from a function with automatic schema inference.
199
+
200
+ Can be used as a decorator with or without parentheses:
201
+
202
+ @function_tool
203
+ def my_func(x: int) -> str: ...
204
+
205
+ @function_tool()
206
+ def my_func(x: int) -> str: ...
207
+
208
+ @function_tool(name="custom_name")
209
+ def my_func(x: int) -> str: ...
210
+ """
211
+ if _compat.PYDANTIC_V1:
212
+ raise RuntimeError("Tool functions are only supported with Pydantic v2")
213
+
214
+ if func is not None:
215
+ # @beta_tool called without parentheses
216
+ return BetaFunctionTool(func=func, name=name, description=description, input_schema=input_schema)
217
+
218
+ # @beta_tool()
219
+ def decorator(func: FunctionT) -> BetaFunctionTool[FunctionT]:
220
+ return BetaFunctionTool(func=func, name=name, description=description, input_schema=input_schema)
221
+
222
+ return decorator
223
+
224
+
225
+ @overload
226
+ def beta_async_tool(func: AsyncFunctionT) -> BetaAsyncFunctionTool[AsyncFunctionT]: ...
227
+
228
+
229
+ @overload
230
+ def beta_async_tool(
231
+ func: AsyncFunctionT,
232
+ *,
233
+ name: str | None = None,
234
+ description: str | None = None,
235
+ input_schema: InputSchema | type[BaseModel] | None = None,
236
+ ) -> BetaAsyncFunctionTool[AsyncFunctionT]: ...
237
+
238
+
239
+ @overload
240
+ def beta_async_tool(
241
+ *,
242
+ name: str | None = None,
243
+ description: str | None = None,
244
+ input_schema: InputSchema | type[BaseModel] | None = None,
245
+ ) -> Callable[[AsyncFunctionT], BetaAsyncFunctionTool[AsyncFunctionT]]: ...
246
+
247
+
248
+ def beta_async_tool(
249
+ func: AsyncFunctionT | None = None,
250
+ *,
251
+ name: str | None = None,
252
+ description: str | None = None,
253
+ input_schema: InputSchema | type[BaseModel] | None = None,
254
+ ) -> BetaAsyncFunctionTool[AsyncFunctionT] | Callable[[AsyncFunctionT], BetaAsyncFunctionTool[AsyncFunctionT]]:
255
+ """Create an AsyncFunctionTool from a function with automatic schema inference.
256
+
257
+ Can be used as a decorator with or without parentheses:
258
+
259
+ @async_tool
260
+ async def my_func(x: int) -> str: ...
261
+
262
+ @async_tool()
263
+ async def my_func(x: int) -> str: ...
264
+
265
+ @async_tool(name="custom_name")
266
+ async def my_func(x: int) -> str: ...
267
+ """
268
+ if _compat.PYDANTIC_V1:
269
+ raise RuntimeError("Tool functions are only supported with Pydantic v2")
270
+
271
+ if func is not None:
272
+ # @beta_async_tool called without parentheses
273
+ return BetaAsyncFunctionTool(
274
+ func=func,
275
+ name=name,
276
+ description=description,
277
+ input_schema=input_schema,
278
+ )
279
+
280
+ # @beta_async_tool()
281
+ def decorator(func: AsyncFunctionT) -> BetaAsyncFunctionTool[AsyncFunctionT]:
282
+ return BetaAsyncFunctionTool(
283
+ func=func,
284
+ name=name,
285
+ description=description,
286
+ input_schema=input_schema,
287
+ )
288
+
289
+ return decorator
@@ -0,0 +1,405 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ from abc import ABC, abstractmethod
5
+ from typing import (
6
+ TYPE_CHECKING,
7
+ Any,
8
+ List,
9
+ Union,
10
+ Generic,
11
+ TypeVar,
12
+ Callable,
13
+ Iterable,
14
+ Iterator,
15
+ Coroutine,
16
+ AsyncIterator,
17
+ )
18
+ from typing_extensions import TypedDict, override
19
+
20
+ import httpx
21
+
22
+ from ..._types import Body, Query, Headers, NotGiven
23
+ from ..._utils import consume_sync_iterator, consume_async_iterator
24
+ from ...types.beta import BetaMessage, BetaContentBlock, BetaMessageParam
25
+ from ._beta_functions import BetaFunctionTool, BetaAsyncFunctionTool
26
+ from ..streaming._beta_messages import BetaMessageStream, BetaAsyncMessageStream
27
+ from ...types.beta.message_create_params import MessageCreateParamsBase
28
+ from ...types.beta.beta_tool_result_block_param import BetaToolResultBlockParam
29
+
30
+ if TYPE_CHECKING:
31
+ from ..._client import Anthropic, AsyncAnthropic
32
+
33
+
34
+ AnyFunctionToolT = TypeVar("AnyFunctionToolT", bound=Union[BetaFunctionTool[Any], BetaAsyncFunctionTool[Any]])
35
+ RunnerItemT = TypeVar("RunnerItemT")
36
+
37
+ log = logging.getLogger(__name__)
38
+
39
+
40
+ class RequestOptions(TypedDict, total=False):
41
+ extra_headers: Headers | None
42
+ extra_query: Query | None
43
+ extra_body: Body | None
44
+ timeout: float | httpx.Timeout | None | NotGiven
45
+
46
+
47
+ class BaseToolRunner(Generic[AnyFunctionToolT]):
48
+ def __init__(
49
+ self,
50
+ *,
51
+ params: MessageCreateParamsBase,
52
+ options: RequestOptions,
53
+ tools: Iterable[AnyFunctionToolT],
54
+ max_iterations: int | None = None,
55
+ ) -> None:
56
+ self._tools_by_name = {tool.name: tool for tool in tools}
57
+ self._params: MessageCreateParamsBase = {
58
+ **params,
59
+ "messages": [message for message in params["messages"]],
60
+ }
61
+ self._options = options
62
+ self._messages_modified = False
63
+ self._cached_tool_call_response: BetaMessageParam | None = None
64
+ self._max_iterations = max_iterations
65
+ self._iteration_count = 0
66
+
67
+ def set_messages_params(
68
+ self, params: MessageCreateParamsBase | Callable[[MessageCreateParamsBase], MessageCreateParamsBase]
69
+ ) -> None:
70
+ """
71
+ Update the parameters for the next API call. This invalidates any cached tool responses.
72
+
73
+ Args:
74
+ params (MessageCreateParamsBase | Callable): Either new parameters or a function to mutate existing parameters
75
+ """
76
+ if callable(params):
77
+ params = params(self._params)
78
+ self._params = params
79
+
80
+ def append_messages(self, *messages: BetaMessageParam | BetaMessage) -> None:
81
+ """Add one or more messages to the conversation history.
82
+
83
+ This invalidates the cached tool response, i.e. if tools were already called, then they will
84
+ be called again on the next loop iteration.
85
+ """
86
+ message_params: List[BetaMessageParam] = [
87
+ {"role": message.role, "content": message.content} if isinstance(message, BetaMessage) else message
88
+ for message in messages
89
+ ]
90
+ self._messages_modified = True
91
+ self.set_messages_params(lambda params: {**params, "messages": [*self._params["messages"], *message_params]})
92
+ self._cached_tool_call_response = None
93
+
94
+ def _should_stop(self) -> bool:
95
+ if self._max_iterations is not None and self._iteration_count >= self._max_iterations:
96
+ return True
97
+ return False
98
+
99
+
100
+ class BaseSyncToolRunner(BaseToolRunner[BetaFunctionTool[Any]], Generic[RunnerItemT], ABC):
101
+ def __init__(
102
+ self,
103
+ *,
104
+ params: MessageCreateParamsBase,
105
+ options: RequestOptions,
106
+ tools: Iterable[BetaFunctionTool[Any]],
107
+ client: Anthropic,
108
+ max_iterations: int | None = None,
109
+ ) -> None:
110
+ super().__init__(params=params, options=options, tools=tools, max_iterations=max_iterations)
111
+ self._client = client
112
+ self._iterator = self.__run__()
113
+ self._last_message: Callable[[], BetaMessage] | BetaMessage | None = None
114
+
115
+ def __next__(self) -> RunnerItemT:
116
+ return self._iterator.__next__()
117
+
118
+ def __iter__(self) -> Iterator[RunnerItemT]:
119
+ for item in self._iterator:
120
+ yield item
121
+
122
+ @abstractmethod
123
+ def __run__(self) -> Iterator[RunnerItemT]:
124
+ raise NotImplementedError()
125
+
126
+ def until_done(self) -> BetaMessage:
127
+ """
128
+ Consumes the tool runner stream and returns the last message if it has not been consumed yet.
129
+ If it has, it simply returns the last message.
130
+ """
131
+ consume_sync_iterator(self)
132
+ last_message = self._get_last_message()
133
+ assert last_message is not None
134
+ return last_message
135
+
136
+ def generate_tool_call_response(self) -> BetaMessageParam | None:
137
+ """Generate a MessageParam by calling tool functions with any tool use blocks from the last message.
138
+
139
+ Note the tool call response is cached, repeated calls to this method will return the same response.
140
+
141
+ None can be returned if no tool call was applicable.
142
+ """
143
+ if self._cached_tool_call_response is not None:
144
+ log.debug("Returning cached tool call response.")
145
+ return self._cached_tool_call_response
146
+ response = self._generate_tool_call_response()
147
+ self._cached_tool_call_response = response
148
+ return response
149
+
150
+
151
+ def _generate_tool_call_response(self) -> BetaMessageParam | None:
152
+ content = self._get_last_assistant_message_content()
153
+ if not content:
154
+ return None
155
+
156
+ tool_use_blocks = [block for block in content if block.type == "tool_use"]
157
+ if not tool_use_blocks:
158
+ return None
159
+
160
+ results: list[BetaToolResultBlockParam] = []
161
+
162
+ for tool_use in tool_use_blocks:
163
+ tool = self._tools_by_name.get(tool_use.name)
164
+ if tool is None:
165
+ results.append(
166
+ {
167
+ "type": "tool_result",
168
+ "tool_use_id": tool_use.id,
169
+ "content": f"Error: Tool '{tool_use.name}' not found",
170
+ "is_error": True,
171
+ }
172
+ )
173
+ continue
174
+
175
+ try:
176
+ result = tool.call(tool_use.input)
177
+ results.append({"type": "tool_result", "tool_use_id": tool_use.id, "content": result})
178
+ except Exception as exc:
179
+ log.exception(f"Error occurred while calling tool: {tool.name}", exc_info=exc)
180
+ results.append(
181
+ {
182
+ "type": "tool_result",
183
+ "tool_use_id": tool_use.id,
184
+ "content": repr(exc),
185
+ "is_error": True,
186
+ }
187
+ )
188
+
189
+ return {"role": "user", "content": results}
190
+
191
+ def _get_last_message(self) -> BetaMessage | None:
192
+ if callable(self._last_message):
193
+ return self._last_message()
194
+ return self._last_message
195
+
196
+ def _get_last_assistant_message_content(self) -> list[BetaContentBlock] | None:
197
+ last_message = self._get_last_message()
198
+ if last_message is None or last_message.role != "assistant" or not last_message.content:
199
+ return None
200
+
201
+ return last_message.content
202
+
203
+
204
+ class BetaToolRunner(BaseSyncToolRunner[BetaMessage]):
205
+ @override
206
+ def __run__(self) -> Iterator[BetaMessage]:
207
+ self._last_message = message = self._client.beta.messages.create(**self._params, **self._options)
208
+ yield message
209
+ self._iteration_count += 1
210
+
211
+ while not self._should_stop():
212
+ response = self.generate_tool_call_response()
213
+ if response is None:
214
+ log.debug("Tool call was not requested, exiting from tool runner loop.")
215
+ return
216
+
217
+ if not self._messages_modified:
218
+ self.append_messages(message, response)
219
+
220
+ self._iteration_count += 1
221
+ self._messages_modified = False
222
+ self._cached_tool_call_response = None
223
+ self._last_message = message = self._client.beta.messages.create(**self._params, **self._options)
224
+ yield message
225
+
226
+
227
+ class BetaStreamingToolRunner(BaseSyncToolRunner[BetaMessageStream]):
228
+ @override
229
+ def __run__(self) -> Iterator[BetaMessageStream]:
230
+ with self._client.beta.messages.stream(**self._params, **self._options) as stream:
231
+ self._last_message = stream.get_final_message
232
+ yield stream
233
+ message = stream.get_final_message()
234
+ self._iteration_count += 1
235
+
236
+ while not self._should_stop():
237
+ response = self.generate_tool_call_response()
238
+ if response is None:
239
+ log.debug("Tool call was not requested, exiting from tool runner loop.")
240
+ return
241
+
242
+ if not self._messages_modified:
243
+ self.append_messages(message, response)
244
+ self._iteration_count += 1
245
+ self._messages_modified = False
246
+
247
+ with self._client.beta.messages.stream(**self._params, **self._options) as stream:
248
+ self._cached_tool_call_response = None
249
+ self._last_message = stream.get_final_message
250
+ yield stream
251
+ message = stream.get_final_message()
252
+
253
+
254
+ class BaseAsyncToolRunner(BaseToolRunner[BetaAsyncFunctionTool[Any]], Generic[RunnerItemT], ABC):
255
+ def __init__(
256
+ self,
257
+ *,
258
+ params: MessageCreateParamsBase,
259
+ options: RequestOptions,
260
+ tools: Iterable[BetaAsyncFunctionTool[Any]],
261
+ client: AsyncAnthropic,
262
+ max_iterations: int | None = None,
263
+ ) -> None:
264
+ super().__init__(params=params, options=options, tools=tools, max_iterations=max_iterations)
265
+ self._client = client
266
+ self._iterator = self.__run__()
267
+ self._last_message: Callable[[], Coroutine[None, None, BetaMessage]] | BetaMessage | None = None
268
+
269
+ async def __anext__(self) -> RunnerItemT:
270
+ return await self._iterator.__anext__()
271
+
272
+ async def __aiter__(self) -> AsyncIterator[RunnerItemT]:
273
+ async for item in self._iterator:
274
+ yield item
275
+
276
+ @abstractmethod
277
+ async def __run__(self) -> AsyncIterator[RunnerItemT]:
278
+ raise NotImplementedError()
279
+ yield # type: ignore[unreachable]
280
+
281
+ async def until_done(self) -> BetaMessage:
282
+ """
283
+ Consumes the tool runner stream and returns the last message if it has not been consumed yet.
284
+ If it has, it simply returns the last message.
285
+ """
286
+ await consume_async_iterator(self)
287
+ last_message = await self._get_last_message()
288
+ assert last_message is not None
289
+ return last_message
290
+
291
+ async def generate_tool_call_response(self) -> BetaMessageParam | None:
292
+ """Generate a MessageParam by calling tool functions with any tool use blocks from the last message.
293
+
294
+ Note the tool call response is cached, repeated calls to this method will return the same response.
295
+
296
+ None can be returned if no tool call was applicable.
297
+ """
298
+ if self._cached_tool_call_response is not None:
299
+ log.debug("Returning cached tool call response.")
300
+ return self._cached_tool_call_response
301
+
302
+ response = await self._generate_tool_call_response()
303
+ self._cached_tool_call_response = response
304
+ return response
305
+
306
+ async def _get_last_message(self) -> BetaMessage | None:
307
+ if callable(self._last_message):
308
+ return await self._last_message()
309
+ return self._last_message
310
+
311
+ async def _get_last_assistant_message_content(self) -> list[BetaContentBlock] | None:
312
+ last_message = await self._get_last_message()
313
+ if last_message is None or last_message.role != "assistant" or not last_message.content:
314
+ return None
315
+
316
+ return last_message.content
317
+
318
+ async def _generate_tool_call_response(self) -> BetaMessageParam | None:
319
+ content = await self._get_last_assistant_message_content()
320
+ if not content:
321
+ return None
322
+
323
+ tool_use_blocks = [block for block in content if block.type == "tool_use"]
324
+ if not tool_use_blocks:
325
+ return None
326
+
327
+ results: list[BetaToolResultBlockParam] = []
328
+
329
+ for tool_use in tool_use_blocks:
330
+ tool = self._tools_by_name.get(tool_use.name)
331
+ if tool is None:
332
+ results.append(
333
+ {
334
+ "type": "tool_result",
335
+ "tool_use_id": tool_use.id,
336
+ "content": f"Error: Tool '{tool_use.name}' not found",
337
+ "is_error": True,
338
+ }
339
+ )
340
+ continue
341
+
342
+ try:
343
+ result = await tool.call(tool_use.input)
344
+ results.append({"type": "tool_result", "tool_use_id": tool_use.id, "content": result})
345
+ except Exception as exc:
346
+ log.exception(f"Error occurred while calling tool: {tool.name}", exc_info=exc)
347
+ results.append(
348
+ {
349
+ "type": "tool_result",
350
+ "tool_use_id": tool_use.id,
351
+ "content": repr(exc),
352
+ "is_error": True,
353
+ }
354
+ )
355
+
356
+ return {"role": "user", "content": results}
357
+
358
+
359
+ class BetaAsyncToolRunner(BaseAsyncToolRunner[BetaMessage]):
360
+ @override
361
+ async def __run__(self) -> AsyncIterator[BetaMessage]:
362
+ self._last_message = message = await self._client.beta.messages.create(**self._params, **self._options)
363
+ yield message
364
+ self._iteration_count += 1
365
+
366
+ while not self._should_stop():
367
+ response = await self.generate_tool_call_response()
368
+ if response is None:
369
+ log.debug("Tool call was not requested, exiting from tool runner loop.")
370
+ return
371
+
372
+ if not self._messages_modified:
373
+ self.append_messages(message, response)
374
+ self._iteration_count += 1
375
+ self._messages_modified = False
376
+ self._cached_tool_call_response = None
377
+ self._last_message = message = await self._client.beta.messages.create(**self._params, **self._options)
378
+ yield message
379
+
380
+
381
+ class BetaAsyncStreamingToolRunner(BaseAsyncToolRunner[BetaAsyncMessageStream]):
382
+ @override
383
+ async def __run__(self) -> AsyncIterator[BetaAsyncMessageStream]:
384
+ async with self._client.beta.messages.stream(**self._params, **self._options) as stream:
385
+ self._last_message = stream.get_final_message
386
+ yield stream
387
+ message = await stream.get_final_message()
388
+ self._iteration_count += 1
389
+
390
+ while not self._should_stop():
391
+ response = await self.generate_tool_call_response()
392
+ if response is None:
393
+ log.debug("Tool call was not requested, exiting from tool runner loop.")
394
+ return
395
+
396
+ if not self._messages_modified:
397
+ self.append_messages(message, response)
398
+ self._iteration_count += 1
399
+ self._messages_modified = False
400
+
401
+ async with self._client.beta.messages.stream(**self._params, **self._options) as stream:
402
+ self._last_message = stream.get_final_message
403
+ self._cached_tool_call_response = None
404
+ yield stream
405
+ message = await stream.get_final_message()
@@ -3,7 +3,7 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  import warnings
6
- from typing import List, Union, Iterable, Optional
6
+ from typing import TYPE_CHECKING, Any, List, Union, Iterable, Optional, cast
7
7
  from functools import partial
8
8
  from itertools import chain
9
9
  from typing_extensions import Literal, overload
@@ -24,6 +24,14 @@ from ...._utils import is_given, required_args, maybe_transform, strip_not_given
24
24
  from ...._compat import cached_property
25
25
  from ...._resource import SyncAPIResource, AsyncAPIResource
26
26
  from ...._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper
27
+ from ....lib.tools import (
28
+ BetaToolRunner,
29
+ BetaFunctionTool,
30
+ BetaAsyncToolRunner,
31
+ BetaAsyncFunctionTool,
32
+ BetaStreamingToolRunner,
33
+ BetaAsyncStreamingToolRunner,
34
+ )
27
35
  from ...._constants import DEFAULT_TIMEOUT, MODEL_NONSTREAMING_TOKENS
28
36
  from ...._streaming import Stream, AsyncStream
29
37
  from ....types.beta import (
@@ -47,6 +55,9 @@ from ....types.beta.beta_thinking_config_param import BetaThinkingConfigParam
47
55
  from ....types.beta.beta_raw_message_stream_event import BetaRawMessageStreamEvent
48
56
  from ....types.beta.beta_request_mcp_server_url_definition_param import BetaRequestMCPServerURLDefinitionParam
49
57
 
58
+ if TYPE_CHECKING:
59
+ from ...._client import Anthropic, AsyncAnthropic
60
+
50
61
  __all__ = ["Messages", "AsyncMessages"]
51
62
 
52
63
 
@@ -982,6 +993,185 @@ class Messages(SyncAPIResource):
982
993
  stream_cls=Stream[BetaRawMessageStreamEvent],
983
994
  )
984
995
 
996
+ @overload
997
+ def tool_runner(
998
+ self,
999
+ *,
1000
+ max_tokens: int,
1001
+ messages: Iterable[BetaMessageParam],
1002
+ model: ModelParam,
1003
+ tools: Iterable[BetaFunctionTool[Any]],
1004
+ max_iterations: int | NotGiven = NOT_GIVEN,
1005
+ container: Optional[str] | NotGiven = NOT_GIVEN,
1006
+ mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | NotGiven = NOT_GIVEN,
1007
+ metadata: BetaMetadataParam | NotGiven = NOT_GIVEN,
1008
+ service_tier: Literal["auto", "standard_only"] | NotGiven = NOT_GIVEN,
1009
+ stop_sequences: SequenceNotStr[str] | NotGiven = NOT_GIVEN,
1010
+ stream: Literal[False] | NotGiven = NOT_GIVEN,
1011
+ system: Union[str, Iterable[BetaTextBlockParam]] | NotGiven = NOT_GIVEN,
1012
+ temperature: float | NotGiven = NOT_GIVEN,
1013
+ top_k: int | NotGiven = NOT_GIVEN,
1014
+ top_p: float | NotGiven = NOT_GIVEN,
1015
+ thinking: BetaThinkingConfigParam | NotGiven = NOT_GIVEN,
1016
+ tool_choice: BetaToolChoiceParam | NotGiven = NOT_GIVEN,
1017
+ betas: List[AnthropicBetaParam] | NotGiven = NOT_GIVEN,
1018
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
1019
+ # The extra values given here take precedence over values defined on the client or passed to this method.
1020
+ extra_headers: Headers | None = None,
1021
+ extra_query: Query | None = None,
1022
+ extra_body: Body | None = None,
1023
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
1024
+ ) -> BetaToolRunner: ...
1025
+
1026
+ @overload
1027
+ def tool_runner(
1028
+ self,
1029
+ *,
1030
+ max_tokens: int,
1031
+ messages: Iterable[BetaMessageParam],
1032
+ model: ModelParam,
1033
+ tools: Iterable[BetaFunctionTool[Any]],
1034
+ stream: Literal[True],
1035
+ max_iterations: int | NotGiven = NOT_GIVEN,
1036
+ container: Optional[str] | NotGiven = NOT_GIVEN,
1037
+ mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | NotGiven = NOT_GIVEN,
1038
+ metadata: BetaMetadataParam | NotGiven = NOT_GIVEN,
1039
+ service_tier: Literal["auto", "standard_only"] | NotGiven = NOT_GIVEN,
1040
+ stop_sequences: SequenceNotStr[str] | NotGiven = NOT_GIVEN,
1041
+ system: Union[str, Iterable[BetaTextBlockParam]] | NotGiven = NOT_GIVEN,
1042
+ temperature: float | NotGiven = NOT_GIVEN,
1043
+ top_k: int | NotGiven = NOT_GIVEN,
1044
+ top_p: float | NotGiven = NOT_GIVEN,
1045
+ thinking: BetaThinkingConfigParam | NotGiven = NOT_GIVEN,
1046
+ tool_choice: BetaToolChoiceParam | NotGiven = NOT_GIVEN,
1047
+ betas: List[AnthropicBetaParam] | NotGiven = NOT_GIVEN,
1048
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
1049
+ # The extra values given here take precedence over values defined on the client or passed to this method.
1050
+ extra_headers: Headers | None = None,
1051
+ extra_query: Query | None = None,
1052
+ extra_body: Body | None = None,
1053
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
1054
+ ) -> BetaStreamingToolRunner: ...
1055
+
1056
+ @overload
1057
+ def tool_runner(
1058
+ self,
1059
+ *,
1060
+ max_tokens: int,
1061
+ messages: Iterable[BetaMessageParam],
1062
+ model: ModelParam,
1063
+ tools: Iterable[BetaFunctionTool[Any]],
1064
+ stream: bool,
1065
+ max_iterations: int | NotGiven = NOT_GIVEN,
1066
+ container: Optional[str] | NotGiven = NOT_GIVEN,
1067
+ mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | NotGiven = NOT_GIVEN,
1068
+ metadata: BetaMetadataParam | NotGiven = NOT_GIVEN,
1069
+ service_tier: Literal["auto", "standard_only"] | NotGiven = NOT_GIVEN,
1070
+ stop_sequences: SequenceNotStr[str] | NotGiven = NOT_GIVEN,
1071
+ system: Union[str, Iterable[BetaTextBlockParam]] | NotGiven = NOT_GIVEN,
1072
+ temperature: float | NotGiven = NOT_GIVEN,
1073
+ top_k: int | NotGiven = NOT_GIVEN,
1074
+ top_p: float | NotGiven = NOT_GIVEN,
1075
+ thinking: BetaThinkingConfigParam | NotGiven = NOT_GIVEN,
1076
+ tool_choice: BetaToolChoiceParam | NotGiven = NOT_GIVEN,
1077
+ betas: List[AnthropicBetaParam] | NotGiven = NOT_GIVEN,
1078
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
1079
+ # The extra values given here take precedence over values defined on the client or passed to this method.
1080
+ extra_headers: Headers | None = None,
1081
+ extra_query: Query | None = None,
1082
+ extra_body: Body | None = None,
1083
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
1084
+ ) -> BetaStreamingToolRunner | BetaToolRunner: ...
1085
+
1086
+ def tool_runner(
1087
+ self,
1088
+ *,
1089
+ max_tokens: int,
1090
+ messages: Iterable[BetaMessageParam],
1091
+ model: ModelParam,
1092
+ tools: Iterable[BetaFunctionTool[Any]],
1093
+ max_iterations: int | NotGiven = NOT_GIVEN,
1094
+ container: Optional[str] | NotGiven = NOT_GIVEN,
1095
+ mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | NotGiven = NOT_GIVEN,
1096
+ metadata: BetaMetadataParam | NotGiven = NOT_GIVEN,
1097
+ service_tier: Literal["auto", "standard_only"] | NotGiven = NOT_GIVEN,
1098
+ stop_sequences: SequenceNotStr[str] | NotGiven = NOT_GIVEN,
1099
+ stream: bool | NotGiven = NOT_GIVEN,
1100
+ system: Union[str, Iterable[BetaTextBlockParam]] | NotGiven = NOT_GIVEN,
1101
+ temperature: float | NotGiven = NOT_GIVEN,
1102
+ top_k: int | NotGiven = NOT_GIVEN,
1103
+ top_p: float | NotGiven = NOT_GIVEN,
1104
+ thinking: BetaThinkingConfigParam | NotGiven = NOT_GIVEN,
1105
+ tool_choice: BetaToolChoiceParam | NotGiven = NOT_GIVEN,
1106
+ betas: List[AnthropicBetaParam] | NotGiven = NOT_GIVEN,
1107
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
1108
+ # The extra values given here take precedence over values defined on the client or passed to this method.
1109
+ extra_headers: Headers | None = None,
1110
+ extra_query: Query | None = None,
1111
+ extra_body: Body | None = None,
1112
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
1113
+ ) -> BetaToolRunner | BetaStreamingToolRunner:
1114
+ """Create a Message stream"""
1115
+ if model in DEPRECATED_MODELS:
1116
+ warnings.warn(
1117
+ f"The model '{model}' is deprecated and will reach end-of-life on {DEPRECATED_MODELS[model]}.\nPlease migrate to a newer model. Visit https://docs.anthropic.com/en/docs/resources/model-deprecations for more information.",
1118
+ DeprecationWarning,
1119
+ stacklevel=3,
1120
+ )
1121
+
1122
+ extra_headers = {
1123
+ "X-Stainless-Helper": "beta.messages.tool_runner",
1124
+ **strip_not_given({"anthropic-beta": ",".join(str(e) for e in betas) if is_given(betas) else NOT_GIVEN}),
1125
+ **(extra_headers or {}),
1126
+ }
1127
+
1128
+ params = cast(
1129
+ message_create_params.MessageCreateParamsNonStreaming,
1130
+ {
1131
+ "max_tokens": max_tokens,
1132
+ "messages": messages,
1133
+ "model": model,
1134
+ "container": container,
1135
+ "mcp_servers": mcp_servers,
1136
+ "metadata": metadata,
1137
+ "service_tier": service_tier,
1138
+ "stop_sequences": stop_sequences,
1139
+ "system": system,
1140
+ "temperature": temperature,
1141
+ "thinking": thinking,
1142
+ "tool_choice": tool_choice,
1143
+ "tools": [tool.to_dict() for tool in tools],
1144
+ "top_k": top_k,
1145
+ "top_p": top_p,
1146
+ },
1147
+ )
1148
+
1149
+ if stream:
1150
+ return BetaStreamingToolRunner(
1151
+ tools=tools,
1152
+ params=params,
1153
+ options={
1154
+ "extra_headers": extra_headers,
1155
+ "extra_query": extra_query,
1156
+ "extra_body": extra_body,
1157
+ "timeout": timeout,
1158
+ },
1159
+ client=cast("Anthropic", self._client),
1160
+ max_iterations=max_iterations if is_given(max_iterations) else None,
1161
+ )
1162
+ return BetaToolRunner(
1163
+ tools=tools,
1164
+ params=params,
1165
+ options={
1166
+ "extra_headers": extra_headers,
1167
+ "extra_query": extra_query,
1168
+ "extra_body": extra_body,
1169
+ "timeout": timeout,
1170
+ },
1171
+ client=cast("Anthropic", self._client),
1172
+ max_iterations=max_iterations if is_given(max_iterations) else None,
1173
+ )
1174
+
985
1175
  def stream(
986
1176
  self,
987
1177
  *,
@@ -2223,6 +2413,185 @@ class AsyncMessages(AsyncAPIResource):
2223
2413
  stream_cls=AsyncStream[BetaRawMessageStreamEvent],
2224
2414
  )
2225
2415
 
2416
+ @overload
2417
+ def tool_runner(
2418
+ self,
2419
+ *,
2420
+ max_tokens: int,
2421
+ messages: Iterable[BetaMessageParam],
2422
+ model: ModelParam,
2423
+ tools: Iterable[BetaAsyncFunctionTool[Any]],
2424
+ max_iterations: int | NotGiven = NOT_GIVEN,
2425
+ container: Optional[str] | NotGiven = NOT_GIVEN,
2426
+ mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | NotGiven = NOT_GIVEN,
2427
+ metadata: BetaMetadataParam | NotGiven = NOT_GIVEN,
2428
+ service_tier: Literal["auto", "standard_only"] | NotGiven = NOT_GIVEN,
2429
+ stop_sequences: SequenceNotStr[str] | NotGiven = NOT_GIVEN,
2430
+ stream: Literal[False] | NotGiven = NOT_GIVEN,
2431
+ system: Union[str, Iterable[BetaTextBlockParam]] | NotGiven = NOT_GIVEN,
2432
+ temperature: float | NotGiven = NOT_GIVEN,
2433
+ top_k: int | NotGiven = NOT_GIVEN,
2434
+ top_p: float | NotGiven = NOT_GIVEN,
2435
+ thinking: BetaThinkingConfigParam | NotGiven = NOT_GIVEN,
2436
+ tool_choice: BetaToolChoiceParam | NotGiven = NOT_GIVEN,
2437
+ betas: List[AnthropicBetaParam] | NotGiven = NOT_GIVEN,
2438
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
2439
+ # The extra values given here take precedence over values defined on the client or passed to this method.
2440
+ extra_headers: Headers | None = None,
2441
+ extra_query: Query | None = None,
2442
+ extra_body: Body | None = None,
2443
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
2444
+ ) -> BetaAsyncToolRunner: ...
2445
+
2446
+ @overload
2447
+ def tool_runner(
2448
+ self,
2449
+ *,
2450
+ max_tokens: int,
2451
+ messages: Iterable[BetaMessageParam],
2452
+ model: ModelParam,
2453
+ tools: Iterable[BetaAsyncFunctionTool[Any]],
2454
+ stream: Literal[True],
2455
+ max_iterations: int | NotGiven = NOT_GIVEN,
2456
+ container: Optional[str] | NotGiven = NOT_GIVEN,
2457
+ mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | NotGiven = NOT_GIVEN,
2458
+ metadata: BetaMetadataParam | NotGiven = NOT_GIVEN,
2459
+ service_tier: Literal["auto", "standard_only"] | NotGiven = NOT_GIVEN,
2460
+ stop_sequences: SequenceNotStr[str] | NotGiven = NOT_GIVEN,
2461
+ system: Union[str, Iterable[BetaTextBlockParam]] | NotGiven = NOT_GIVEN,
2462
+ temperature: float | NotGiven = NOT_GIVEN,
2463
+ top_k: int | NotGiven = NOT_GIVEN,
2464
+ top_p: float | NotGiven = NOT_GIVEN,
2465
+ thinking: BetaThinkingConfigParam | NotGiven = NOT_GIVEN,
2466
+ tool_choice: BetaToolChoiceParam | NotGiven = NOT_GIVEN,
2467
+ betas: List[AnthropicBetaParam] | NotGiven = NOT_GIVEN,
2468
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
2469
+ # The extra values given here take precedence over values defined on the client or passed to this method.
2470
+ extra_headers: Headers | None = None,
2471
+ extra_query: Query | None = None,
2472
+ extra_body: Body | None = None,
2473
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
2474
+ ) -> BetaAsyncStreamingToolRunner: ...
2475
+
2476
+ @overload
2477
+ def tool_runner(
2478
+ self,
2479
+ *,
2480
+ max_tokens: int,
2481
+ messages: Iterable[BetaMessageParam],
2482
+ model: ModelParam,
2483
+ tools: Iterable[BetaAsyncFunctionTool[Any]],
2484
+ stream: bool,
2485
+ max_iterations: int | NotGiven = NOT_GIVEN,
2486
+ container: Optional[str] | NotGiven = NOT_GIVEN,
2487
+ mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | NotGiven = NOT_GIVEN,
2488
+ metadata: BetaMetadataParam | NotGiven = NOT_GIVEN,
2489
+ service_tier: Literal["auto", "standard_only"] | NotGiven = NOT_GIVEN,
2490
+ stop_sequences: SequenceNotStr[str] | NotGiven = NOT_GIVEN,
2491
+ system: Union[str, Iterable[BetaTextBlockParam]] | NotGiven = NOT_GIVEN,
2492
+ temperature: float | NotGiven = NOT_GIVEN,
2493
+ top_k: int | NotGiven = NOT_GIVEN,
2494
+ top_p: float | NotGiven = NOT_GIVEN,
2495
+ thinking: BetaThinkingConfigParam | NotGiven = NOT_GIVEN,
2496
+ tool_choice: BetaToolChoiceParam | NotGiven = NOT_GIVEN,
2497
+ betas: List[AnthropicBetaParam] | NotGiven = NOT_GIVEN,
2498
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
2499
+ # The extra values given here take precedence over values defined on the client or passed to this method.
2500
+ extra_headers: Headers | None = None,
2501
+ extra_query: Query | None = None,
2502
+ extra_body: Body | None = None,
2503
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
2504
+ ) -> BetaAsyncStreamingToolRunner | BetaAsyncToolRunner: ...
2505
+
2506
+ def tool_runner(
2507
+ self,
2508
+ *,
2509
+ max_tokens: int,
2510
+ messages: Iterable[BetaMessageParam],
2511
+ model: ModelParam,
2512
+ tools: Iterable[BetaAsyncFunctionTool[Any]],
2513
+ max_iterations: int | NotGiven = NOT_GIVEN,
2514
+ container: Optional[str] | NotGiven = NOT_GIVEN,
2515
+ mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | NotGiven = NOT_GIVEN,
2516
+ metadata: BetaMetadataParam | NotGiven = NOT_GIVEN,
2517
+ service_tier: Literal["auto", "standard_only"] | NotGiven = NOT_GIVEN,
2518
+ stop_sequences: SequenceNotStr[str] | NotGiven = NOT_GIVEN,
2519
+ stream: Literal[True] | Literal[False] | NotGiven = False,
2520
+ system: Union[str, Iterable[BetaTextBlockParam]] | NotGiven = NOT_GIVEN,
2521
+ temperature: float | NotGiven = NOT_GIVEN,
2522
+ top_k: int | NotGiven = NOT_GIVEN,
2523
+ top_p: float | NotGiven = NOT_GIVEN,
2524
+ thinking: BetaThinkingConfigParam | NotGiven = NOT_GIVEN,
2525
+ tool_choice: BetaToolChoiceParam | NotGiven = NOT_GIVEN,
2526
+ betas: List[AnthropicBetaParam] | NotGiven = NOT_GIVEN,
2527
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
2528
+ # The extra values given here take precedence over values defined on the client or passed to this method.
2529
+ extra_headers: Headers | None = None,
2530
+ extra_query: Query | None = None,
2531
+ extra_body: Body | None = None,
2532
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
2533
+ ) -> BetaAsyncToolRunner | BetaAsyncStreamingToolRunner:
2534
+ """Create a Message stream"""
2535
+ if model in DEPRECATED_MODELS:
2536
+ warnings.warn(
2537
+ f"The model '{model}' is deprecated and will reach end-of-life on {DEPRECATED_MODELS[model]}.\nPlease migrate to a newer model. Visit https://docs.anthropic.com/en/docs/resources/model-deprecations for more information.",
2538
+ DeprecationWarning,
2539
+ stacklevel=3,
2540
+ )
2541
+
2542
+ extra_headers = {
2543
+ "X-Stainless-Helper": "beta.messages.tool_runner",
2544
+ **strip_not_given({"anthropic-beta": ",".join(str(e) for e in betas) if is_given(betas) else NOT_GIVEN}),
2545
+ **(extra_headers or {}),
2546
+ }
2547
+
2548
+ params = cast(
2549
+ message_create_params.MessageCreateParamsBase,
2550
+ {
2551
+ "max_tokens": max_tokens,
2552
+ "messages": messages,
2553
+ "model": model,
2554
+ "container": container,
2555
+ "mcp_servers": mcp_servers,
2556
+ "metadata": metadata,
2557
+ "service_tier": service_tier,
2558
+ "stop_sequences": stop_sequences,
2559
+ "system": system,
2560
+ "temperature": temperature,
2561
+ "thinking": thinking,
2562
+ "tool_choice": tool_choice,
2563
+ "tools": [tool.to_dict() for tool in tools],
2564
+ "top_k": top_k,
2565
+ "top_p": top_p,
2566
+ },
2567
+ )
2568
+
2569
+ if stream:
2570
+ return BetaAsyncStreamingToolRunner(
2571
+ tools=tools,
2572
+ params=params,
2573
+ options={
2574
+ "extra_headers": extra_headers,
2575
+ "extra_query": extra_query,
2576
+ "extra_body": extra_body,
2577
+ "timeout": timeout,
2578
+ },
2579
+ client=cast("AsyncAnthropic", self._client),
2580
+ max_iterations=max_iterations if is_given(max_iterations) else None,
2581
+ )
2582
+ return BetaAsyncToolRunner(
2583
+ tools=tools,
2584
+ params=params,
2585
+ options={
2586
+ "extra_headers": extra_headers,
2587
+ "extra_query": extra_query,
2588
+ "extra_body": extra_body,
2589
+ "timeout": timeout,
2590
+ },
2591
+ client=cast("AsyncAnthropic", self._client),
2592
+ max_iterations=max_iterations if is_given(max_iterations) else None,
2593
+ )
2594
+
2226
2595
  def stream(
2227
2596
  self,
2228
2597
  *,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: anthropic
3
- Version: 0.67.0
3
+ Version: 0.68.0
4
4
  Summary: The official Python library for the anthropic API
5
5
  Project-URL: Homepage, https://github.com/anthropics/anthropic-sdk-python
6
6
  Project-URL: Repository, https://github.com/anthropics/anthropic-sdk-python
@@ -24,6 +24,7 @@ Classifier: Typing :: Typed
24
24
  Requires-Python: >=3.8
25
25
  Requires-Dist: anyio<5,>=3.5.0
26
26
  Requires-Dist: distro<2,>=1.7.0
27
+ Requires-Dist: docstring-parser<1,>=0.15
27
28
  Requires-Dist: httpx<1,>=0.25.0
28
29
  Requires-Dist: jiter<1,>=0.4.0
29
30
  Requires-Dist: pydantic<3,>=1.9.0
@@ -208,6 +209,55 @@ async for event in stream:
208
209
  print(event.type)
209
210
  ```
210
211
 
212
+ ### Tool helpers
213
+
214
+ This library provides helper functions for defining and running tools as pure python functions, for example:
215
+
216
+ ```py
217
+ import json
218
+ import rich
219
+ from typing_extensions import Literal
220
+ from anthropic import Anthropic, beta_tool
221
+
222
+ client = Anthropic()
223
+
224
+
225
+ @beta_tool
226
+ def get_weather(location: str) -> str:
227
+ """Lookup the weather for a given city in either celsius or fahrenheit
228
+
229
+ Args:
230
+ location: The city and state, e.g. San Francisco, CA
231
+ Returns:
232
+ A dictionary containing the location, temperature, and weather condition.
233
+ """
234
+ # Here you would typically make an API call to a weather service
235
+ # For demonstration, we return a mock response
236
+ return json.dumps(
237
+ {
238
+ "location": location,
239
+ "temperature": "68°F",
240
+ "condition": "Sunny",
241
+ }
242
+ )
243
+
244
+
245
+ runner = client.beta.messages.tool_runner(
246
+ max_tokens=1024,
247
+ model="claude-sonnet-4-20250514",
248
+ tools=[get_weather],
249
+ messages=[
250
+ {"role": "user", "content": "What is the weather in SF?"},
251
+ ],
252
+ )
253
+ for message in runner:
254
+ rich.print(message)
255
+ ```
256
+
257
+ On every iteration, an API request will be made, if Claude wants to call one of the given tools then it will be automatically called, and the result will be returned directly to the model in the next iteration.
258
+
259
+ For more information see the [full docs](https://github.com/anthropics/anthropic-sdk-python/tree/main/tools.md).
260
+
211
261
  ### Streaming Helpers
212
262
 
213
263
  This library provides several conveniences for streaming messages, for example:
@@ -1,4 +1,4 @@
1
- anthropic/__init__.py,sha256=_3qHjlaUTyCm_xLr3HAcWvxMuKwKJtVRR1TkwU9WEYE,2845
1
+ anthropic/__init__.py,sha256=6JJg_MfwoX2NEIS05VHtFvK6O5obE1HllHeRfel6DB0,2935
2
2
  anthropic/_base_client.py,sha256=VdxbcECVlP8hn1oS4vFBrU87W0a1iEw0eq0NpX8Q7mI,72849
3
3
  anthropic/_client.py,sha256=kZlulmKAcSG7WdzYCUdXFFfATn5ZP1PO7gHQbqAe2Dc,22827
4
4
  anthropic/_compat.py,sha256=DQBVORjFb33zch24jzkhM14msvnzY7mmSmgDLaVFUM8,6562
@@ -6,13 +6,13 @@ anthropic/_constants.py,sha256=wADeUqY3lsseF0L6jIen-PexfQ06FOtf2dVESXDM828,885
6
6
  anthropic/_exceptions.py,sha256=bkSqVWxtRdRb31H7MIvtxfh5mo_Xf7Ib3nPTOmAOmGs,4073
7
7
  anthropic/_files.py,sha256=_Ux6v6nAsxK4e_4efdt1DiIOZ0hGmlR2ZKKcVfJIfGU,3623
8
8
  anthropic/_legacy_response.py,sha256=QsroQ_9LHI8tSoPEvbIXXB44SvLJXaXQX7khjZpnqfE,17235
9
- anthropic/_models.py,sha256=BDZv65jMBcFXe8SBhN2RoHGPJiNlIq3Dk89qUDcWcYM,31397
9
+ anthropic/_models.py,sha256=wUXeougIoFGSKQr_XIcmamSHeupO6R7ZaQnFZC-hEl8,31957
10
10
  anthropic/_qs.py,sha256=AOkSz4rHtK4YI3ZU_kzea-zpwBUgEY8WniGmTPyEimc,4846
11
11
  anthropic/_resource.py,sha256=FYEOzfhB-XWTR2gyTmQuuFoecRiVXxe_SpjZlQQGytU,1080
12
12
  anthropic/_response.py,sha256=1Y7-OrGn1lOwvZ_SmMlwT9Nb2i9A1RYw2Q4-F1cwPSU,30542
13
13
  anthropic/_streaming.py,sha256=vn8K5KgfO3Bv9NE8nwHIQEjEhkQeVE6YMnGqiJlCgqE,14023
14
14
  anthropic/_types.py,sha256=PwAzwWTKp9b08FTllhWjQPX_-8CLXWh6U-9atpUYHxg,7398
15
- anthropic/_version.py,sha256=ZnlGfhFJ_n808nDMKeLmAYv5q8IX6Lc079zE4qSOyeA,162
15
+ anthropic/_version.py,sha256=XBCqVoiIx9RiTN5BgCAlMHcs9_p4oh39-eXsN2hTYCw,162
16
16
  anthropic/pagination.py,sha256=hW6DOtNbwwQrNQ8wn4PJj7WB2y_37szSDQeUBnunQ40,2202
17
17
  anthropic/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
18
  anthropic/_decoders/jsonl.py,sha256=KDLw-Frjo7gRup5qDp_BWkXIZ-mFZU5vFDz0WBhEKcs,3510
@@ -46,6 +46,9 @@ anthropic/lib/streaming/_beta_messages.py,sha256=swDeDwTbhx33G_YjwTkusbij6CerLuW
46
46
  anthropic/lib/streaming/_beta_types.py,sha256=fny8XN85afEG6of84YuaScr3U8UeMCJxqyfuTePHNbM,2131
47
47
  anthropic/lib/streaming/_messages.py,sha256=OSV9sjb8MLThSywEFXQV9OchcNXAE2KxDacVpJbkNRM,16958
48
48
  anthropic/lib/streaming/_types.py,sha256=CrR4948IWgUF7L9O0ase2QwbpiQ1JeiYXrRyVi74-Bw,2086
49
+ anthropic/lib/tools/__init__.py,sha256=ui3gBw2-vQsl_4pKjf3O5kfUYpGATjQ6H4MPLopy0h4,525
50
+ anthropic/lib/tools/_beta_functions.py,sha256=eje6jXzDZa0azRevFF6TP_YQBo7FX3DvY4wRT5nUv7c,9738
51
+ anthropic/lib/tools/_beta_runner.py,sha256=Lcf1bltqrLXj6niPqPaFYCaFFRWKqRHbfINtbCAqjvY,15470
49
52
  anthropic/lib/vertex/__init__.py,sha256=A8vuK1qVPtmKr1_LQgPuDRVA6I4xm_ye2aPdAa4yGsI,102
50
53
  anthropic/lib/vertex/_auth.py,sha256=Kyt_hbUc-DPlkvds4__OLR8FLPpoDas6bXhZTECxO3Y,1644
51
54
  anthropic/lib/vertex/_beta.py,sha256=8kXsUUIGstf6dZfiZtm6s9OWEueuSgra8dPvkaUacy4,3323
@@ -60,7 +63,7 @@ anthropic/resources/beta/files.py,sha256=q4V7iBnUDflYrG31oNPqY9ycXTF5oryMMBmrR7i
60
63
  anthropic/resources/beta/models.py,sha256=OsXRcsEHoNuj1VjhbFLeOYgE35DnUO8m4hTPF6zcs0Q,12594
61
64
  anthropic/resources/beta/messages/__init__.py,sha256=7ZO4hB7hPBhXQja7gMzkwLXQVDlyap4JsihpA0UKZjk,849
62
65
  anthropic/resources/beta/messages/batches.py,sha256=6sbFpFCCRC-qN37-5n7lO3BzGXKGVaXhPv9y7Daj0-0,35990
63
- anthropic/resources/beta/messages/messages.py,sha256=OlRVKosYcpFW-zmrrZcwDRpKNI50GuB9pnahEEBx_YU,115891
66
+ anthropic/resources/beta/messages/messages.py,sha256=lgtY2_Pvgca5IXEYz3OhwSB7GPnnwCsv5Cbe38m_dSQ,133367
64
67
  anthropic/resources/messages/__init__.py,sha256=iOSBh4D7NTXqe7RNhw9HZCiFmJvDfIgVFnjaF7r27YU,897
65
68
  anthropic/resources/messages/batches.py,sha256=w_bNgg_NV4rFQkDeixJtRokimPIT3OVpimr8D8_7v5Y,28590
66
69
  anthropic/resources/messages/messages.py,sha256=PBhZL3iuG-A0WhNcawR3pRtgL8fR8qSmAYsx48Mksi8,109831
@@ -362,7 +365,7 @@ anthropic/types/shared/not_found_error.py,sha256=R6OsCvAmsf_SB2TwoX6E63o049qZMaA
362
365
  anthropic/types/shared/overloaded_error.py,sha256=PlyhHt3wmzcnynSfkWbfP4XkLoWsPa9B39V3CyAdgx8,282
363
366
  anthropic/types/shared/permission_error.py,sha256=nuyxtLXOiEkYEbFRXiAWjxU6XtdyjkAaXQ2NgMB3pjw,282
364
367
  anthropic/types/shared/rate_limit_error.py,sha256=eYULATjXa6KKdqeBauest7RzuN-bhGsY5BWwH9eYv4c,280
365
- anthropic-0.67.0.dist-info/METADATA,sha256=rgahUDxvf6Juuztdsbt6-_WOx85mKGJ-j5H_fB_THOU,27053
366
- anthropic-0.67.0.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87
367
- anthropic-0.67.0.dist-info/licenses/LICENSE,sha256=i_lphP-Lz65-SMrnalKeiiUxe6ngKr9_08xk_flWV6Y,1056
368
- anthropic-0.67.0.dist-info/RECORD,,
368
+ anthropic-0.68.0.dist-info/METADATA,sha256=v27Ud_PHScKrCxi_FuUQLPA98LOh21l1vzQxP50CEgQ,28528
369
+ anthropic-0.68.0.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87
370
+ anthropic-0.68.0.dist-info/licenses/LICENSE,sha256=i_lphP-Lz65-SMrnalKeiiUxe6ngKr9_08xk_flWV6Y,1056
371
+ anthropic-0.68.0.dist-info/RECORD,,