anthropic 0.66.0__py3-none-any.whl → 0.68.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. anthropic/__init__.py +3 -0
  2. anthropic/_base_client.py +3 -3
  3. anthropic/_compat.py +48 -48
  4. anthropic/_models.py +54 -45
  5. anthropic/_utils/__init__.py +8 -2
  6. anthropic/_utils/_compat.py +45 -0
  7. anthropic/_utils/_datetime_parse.py +136 -0
  8. anthropic/_utils/_transform.py +5 -1
  9. anthropic/_utils/_typing.py +1 -1
  10. anthropic/_utils/_utils.py +0 -1
  11. anthropic/_version.py +1 -1
  12. anthropic/lib/tools/__init__.py +20 -0
  13. anthropic/lib/tools/_beta_functions.py +289 -0
  14. anthropic/lib/tools/_beta_runner.py +405 -0
  15. anthropic/resources/beta/messages/messages.py +370 -1
  16. anthropic/types/beta/__init__.py +14 -0
  17. anthropic/types/beta/beta_base64_pdf_source.py +15 -0
  18. anthropic/types/beta/beta_citation_config.py +9 -0
  19. anthropic/types/beta/beta_content_block.py +2 -0
  20. anthropic/types/beta/beta_content_block_param.py +4 -0
  21. anthropic/types/beta/beta_document_block.py +26 -0
  22. anthropic/types/beta/beta_plain_text_source.py +15 -0
  23. anthropic/types/beta/beta_raw_content_block_start_event.py +2 -0
  24. anthropic/types/beta/beta_request_document_block_param.py +1 -1
  25. anthropic/types/beta/beta_server_tool_usage.py +3 -0
  26. anthropic/types/beta/beta_server_tool_use_block.py +1 -1
  27. anthropic/types/beta/beta_server_tool_use_block_param.py +3 -1
  28. anthropic/types/beta/beta_tool_union_param.py +2 -0
  29. anthropic/types/beta/beta_web_fetch_block.py +21 -0
  30. anthropic/types/beta/beta_web_fetch_block_param.py +22 -0
  31. anthropic/types/beta/beta_web_fetch_tool_20250910_param.py +46 -0
  32. anthropic/types/beta/beta_web_fetch_tool_result_block.py +20 -0
  33. anthropic/types/beta/beta_web_fetch_tool_result_block_param.py +25 -0
  34. anthropic/types/beta/beta_web_fetch_tool_result_error_block.py +14 -0
  35. anthropic/types/beta/beta_web_fetch_tool_result_error_block_param.py +15 -0
  36. anthropic/types/beta/beta_web_fetch_tool_result_error_code.py +16 -0
  37. anthropic/types/beta/message_count_tokens_params.py +2 -0
  38. anthropic/types/document_block_param.py +1 -1
  39. {anthropic-0.66.0.dist-info → anthropic-0.68.0.dist-info}/METADATA +51 -1
  40. {anthropic-0.66.0.dist-info → anthropic-0.68.0.dist-info}/RECORD +42 -25
  41. {anthropic-0.66.0.dist-info → anthropic-0.68.0.dist-info}/WHEEL +0 -0
  42. {anthropic-0.66.0.dist-info → anthropic-0.68.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,136 @@
1
+ """
2
+ This file contains code from https://github.com/pydantic/pydantic/blob/main/pydantic/v1/datetime_parse.py
3
+ without the Pydantic v1 specific errors.
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ import re
9
+ from typing import Dict, Union, Optional
10
+ from datetime import date, datetime, timezone, timedelta
11
+
12
+ from .._types import StrBytesIntFloat
13
+
14
+ date_expr = r"(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})"
15
+ time_expr = (
16
+ r"(?P<hour>\d{1,2}):(?P<minute>\d{1,2})"
17
+ r"(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?"
18
+ r"(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$"
19
+ )
20
+
21
+ date_re = re.compile(f"{date_expr}$")
22
+ datetime_re = re.compile(f"{date_expr}[T ]{time_expr}")
23
+
24
+
25
+ EPOCH = datetime(1970, 1, 1)
26
+ # if greater than this, the number is in ms, if less than or equal it's in seconds
27
+ # (in seconds this is 11th October 2603, in ms it's 20th August 1970)
28
+ MS_WATERSHED = int(2e10)
29
+ # slightly more than datetime.max in ns - (datetime.max - EPOCH).total_seconds() * 1e9
30
+ MAX_NUMBER = int(3e20)
31
+
32
+
33
+ def _get_numeric(value: StrBytesIntFloat, native_expected_type: str) -> Union[None, int, float]:
34
+ if isinstance(value, (int, float)):
35
+ return value
36
+ try:
37
+ return float(value)
38
+ except ValueError:
39
+ return None
40
+ except TypeError:
41
+ raise TypeError(f"invalid type; expected {native_expected_type}, string, bytes, int or float") from None
42
+
43
+
44
+ def _from_unix_seconds(seconds: Union[int, float]) -> datetime:
45
+ if seconds > MAX_NUMBER:
46
+ return datetime.max
47
+ elif seconds < -MAX_NUMBER:
48
+ return datetime.min
49
+
50
+ while abs(seconds) > MS_WATERSHED:
51
+ seconds /= 1000
52
+ dt = EPOCH + timedelta(seconds=seconds)
53
+ return dt.replace(tzinfo=timezone.utc)
54
+
55
+
56
+ def _parse_timezone(value: Optional[str]) -> Union[None, int, timezone]:
57
+ if value == "Z":
58
+ return timezone.utc
59
+ elif value is not None:
60
+ offset_mins = int(value[-2:]) if len(value) > 3 else 0
61
+ offset = 60 * int(value[1:3]) + offset_mins
62
+ if value[0] == "-":
63
+ offset = -offset
64
+ return timezone(timedelta(minutes=offset))
65
+ else:
66
+ return None
67
+
68
+
69
+ def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime:
70
+ """
71
+ Parse a datetime/int/float/string and return a datetime.datetime.
72
+
73
+ This function supports time zone offsets. When the input contains one,
74
+ the output uses a timezone with a fixed offset from UTC.
75
+
76
+ Raise ValueError if the input is well formatted but not a valid datetime.
77
+ Raise ValueError if the input isn't well formatted.
78
+ """
79
+ if isinstance(value, datetime):
80
+ return value
81
+
82
+ number = _get_numeric(value, "datetime")
83
+ if number is not None:
84
+ return _from_unix_seconds(number)
85
+
86
+ if isinstance(value, bytes):
87
+ value = value.decode()
88
+
89
+ assert not isinstance(value, (float, int))
90
+
91
+ match = datetime_re.match(value)
92
+ if match is None:
93
+ raise ValueError("invalid datetime format")
94
+
95
+ kw = match.groupdict()
96
+ if kw["microsecond"]:
97
+ kw["microsecond"] = kw["microsecond"].ljust(6, "0")
98
+
99
+ tzinfo = _parse_timezone(kw.pop("tzinfo"))
100
+ kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None}
101
+ kw_["tzinfo"] = tzinfo
102
+
103
+ return datetime(**kw_) # type: ignore
104
+
105
+
106
+ def parse_date(value: Union[date, StrBytesIntFloat]) -> date:
107
+ """
108
+ Parse a date/int/float/string and return a datetime.date.
109
+
110
+ Raise ValueError if the input is well formatted but not a valid date.
111
+ Raise ValueError if the input isn't well formatted.
112
+ """
113
+ if isinstance(value, date):
114
+ if isinstance(value, datetime):
115
+ return value.date()
116
+ else:
117
+ return value
118
+
119
+ number = _get_numeric(value, "date")
120
+ if number is not None:
121
+ return _from_unix_seconds(number).date()
122
+
123
+ if isinstance(value, bytes):
124
+ value = value.decode()
125
+
126
+ assert not isinstance(value, (float, int))
127
+ match = date_re.match(value)
128
+ if match is None:
129
+ raise ValueError("invalid date format")
130
+
131
+ kw = {k: int(v) for k, v in match.groupdict().items()}
132
+
133
+ try:
134
+ return date(**kw)
135
+ except ValueError:
136
+ raise ValueError("invalid date format") from None
@@ -19,6 +19,7 @@ from ._utils import (
19
19
  is_sequence,
20
20
  )
21
21
  from .._files import is_base64_file_input
22
+ from ._compat import get_origin, is_typeddict
22
23
  from ._typing import (
23
24
  is_list_type,
24
25
  is_union_type,
@@ -29,7 +30,6 @@ from ._typing import (
29
30
  is_annotated_type,
30
31
  strip_annotated_type,
31
32
  )
32
- from .._compat import get_origin, model_dump, is_typeddict
33
33
 
34
34
  _T = TypeVar("_T")
35
35
 
@@ -169,6 +169,8 @@ def _transform_recursive(
169
169
 
170
170
  Defaults to the same value as the `annotation` argument.
171
171
  """
172
+ from .._compat import model_dump
173
+
172
174
  if inner_type is None:
173
175
  inner_type = annotation
174
176
 
@@ -333,6 +335,8 @@ async def _async_transform_recursive(
333
335
 
334
336
  Defaults to the same value as the `annotation` argument.
335
337
  """
338
+ from .._compat import model_dump
339
+
336
340
  if inner_type is None:
337
341
  inner_type = annotation
338
342
 
@@ -15,7 +15,7 @@ from typing_extensions import (
15
15
 
16
16
  from ._utils import lru_cache
17
17
  from .._types import InheritsGeneric
18
- from .._compat import is_union as _is_union
18
+ from ._compat import is_union as _is_union
19
19
 
20
20
 
21
21
  def is_annotated_type(typ: type) -> bool:
@@ -22,7 +22,6 @@ from typing_extensions import TypeGuard
22
22
  import sniffio
23
23
 
24
24
  from .._types import NotGiven, FileTypes, NotGivenOr, HeadersLike
25
- from .._compat import parse_date as parse_date, parse_datetime as parse_datetime
26
25
 
27
26
  _T = TypeVar("_T")
28
27
  _TupleT = TypeVar("_TupleT", bound=Tuple[object, ...])
anthropic/_version.py CHANGED
@@ -1,4 +1,4 @@
1
1
  # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2
2
 
3
3
  __title__ = "anthropic"
4
- __version__ = "0.66.0" # x-release-please-version
4
+ __version__ = "0.68.0" # x-release-please-version
@@ -0,0 +1,20 @@
1
+ from ._beta_runner import BetaToolRunner, BetaAsyncToolRunner, BetaStreamingToolRunner, BetaAsyncStreamingToolRunner
2
+ from ._beta_functions import (
3
+ BetaFunctionTool,
4
+ BetaAsyncFunctionTool,
5
+ BetaFunctionToolResultType,
6
+ beta_tool,
7
+ beta_async_tool,
8
+ )
9
+
10
+ __all__ = [
11
+ "beta_tool",
12
+ "beta_async_tool",
13
+ "BetaFunctionTool",
14
+ "BetaAsyncFunctionTool",
15
+ "BetaToolRunner",
16
+ "BetaAsyncStreamingToolRunner",
17
+ "BetaStreamingToolRunner",
18
+ "BetaAsyncToolRunner",
19
+ "BetaFunctionToolResultType",
20
+ ]
@@ -0,0 +1,289 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ from typing import Any, Union, Generic, TypeVar, Callable, Iterable, Coroutine, cast, overload
5
+ from inspect import iscoroutinefunction
6
+ from typing_extensions import TypeAlias, override
7
+
8
+ import pydantic
9
+ import docstring_parser
10
+ from pydantic import BaseModel
11
+
12
+ from ... import _compat
13
+ from ..._utils import is_dict
14
+ from ..._compat import cached_property
15
+ from ..._models import TypeAdapter
16
+ from ..._utils._utils import CallableT
17
+ from ...types.tool_param import ToolParam, InputSchema
18
+ from ...types.beta.beta_tool_result_block_param import Content as BetaContent
19
+
20
+ log = logging.getLogger(__name__)
21
+
22
+ BetaFunctionToolResultType: TypeAlias = Union[str, Iterable[BetaContent]]
23
+
24
+ Function = Callable[..., BetaFunctionToolResultType]
25
+ FunctionT = TypeVar("FunctionT", bound=Function)
26
+
27
+ AsyncFunction = Callable[..., Coroutine[Any, Any, BetaFunctionToolResultType]]
28
+ AsyncFunctionT = TypeVar("AsyncFunctionT", bound=AsyncFunction)
29
+
30
+
31
+ class BaseFunctionTool(Generic[CallableT]):
32
+ func: CallableT
33
+ """The function this tool is wrapping"""
34
+
35
+ name: str
36
+ """The name of the tool that will be sent to the API"""
37
+
38
+ description: str
39
+
40
+ input_schema: InputSchema
41
+
42
+ def __init__(
43
+ self,
44
+ func: CallableT,
45
+ *,
46
+ name: str | None = None,
47
+ description: str | None = None,
48
+ input_schema: InputSchema | type[BaseModel] | None = None,
49
+ ) -> None:
50
+ if _compat.PYDANTIC_V1:
51
+ raise RuntimeError("Tool functions are only supported with Pydantic v2")
52
+
53
+ self.func = func
54
+ self._func_with_validate = pydantic.validate_call(func)
55
+ self.name = name or func.__name__
56
+
57
+ self.description = description or self._get_description_from_docstring()
58
+
59
+ if input_schema is not None:
60
+ if isinstance(input_schema, type):
61
+ self.input_schema: InputSchema = input_schema.model_json_schema()
62
+ else:
63
+ self.input_schema = input_schema
64
+ else:
65
+ self.input_schema = self._create_schema_from_function()
66
+
67
+ @property
68
+ def __call__(self) -> CallableT:
69
+ return self.func
70
+
71
+ def to_dict(self) -> ToolParam:
72
+ return {
73
+ "name": self.name,
74
+ "description": self.description,
75
+ "input_schema": self.input_schema,
76
+ }
77
+
78
+ @cached_property
79
+ def _parsed_docstring(self) -> docstring_parser.Docstring:
80
+ return docstring_parser.parse(self.func.__doc__ or "")
81
+
82
+ def _get_description_from_docstring(self) -> str:
83
+ """Extract description from parsed docstring."""
84
+ if self._parsed_docstring.short_description:
85
+ description = self._parsed_docstring.short_description
86
+ if self._parsed_docstring.long_description:
87
+ description += f"\n\n{self._parsed_docstring.long_description}"
88
+ return description
89
+ return ""
90
+
91
+ def _create_schema_from_function(self) -> InputSchema:
92
+ """Create JSON schema from function signature using pydantic."""
93
+
94
+ from pydantic_core import CoreSchema
95
+ from pydantic.json_schema import JsonSchemaValue, GenerateJsonSchema
96
+ from pydantic_core.core_schema import ArgumentsParameter
97
+
98
+ class CustomGenerateJsonSchema(GenerateJsonSchema):
99
+ def __init__(self, *, func: Callable[..., Any], parsed_docstring: Any) -> None:
100
+ super().__init__()
101
+ self._func = func
102
+ self._parsed_docstring = parsed_docstring
103
+
104
+ def __call__(self, *_args: Any, **_kwds: Any) -> "CustomGenerateJsonSchema": # noqa: ARG002
105
+ return self
106
+
107
+ @override
108
+ def kw_arguments_schema(
109
+ self,
110
+ arguments: "list[ArgumentsParameter]",
111
+ var_kwargs_schema: CoreSchema | None,
112
+ ) -> JsonSchemaValue:
113
+ schema = super().kw_arguments_schema(arguments, var_kwargs_schema)
114
+ if schema.get("type") != "object":
115
+ return schema
116
+
117
+ properties = schema.get("properties")
118
+ if not properties or not is_dict(properties):
119
+ return schema
120
+
121
+ # Add parameter descriptions from docstring
122
+ for param in self._parsed_docstring.params:
123
+ prop_schema = properties.get(param.arg_name)
124
+ if not prop_schema or not is_dict(prop_schema):
125
+ continue
126
+
127
+ if param.description and "description" not in prop_schema:
128
+ prop_schema["description"] = param.description
129
+
130
+ return schema
131
+
132
+ schema_generator = CustomGenerateJsonSchema(func=self.func, parsed_docstring=self._parsed_docstring)
133
+ return self._adapter.json_schema(schema_generator=schema_generator) # type: ignore
134
+
135
+ @cached_property
136
+ def _adapter(self) -> TypeAdapter[Any]:
137
+ return TypeAdapter(self._func_with_validate)
138
+
139
+
140
+ class BetaFunctionTool(BaseFunctionTool[FunctionT]):
141
+ def call(self, input: object) -> BetaFunctionToolResultType:
142
+ if iscoroutinefunction(self.func):
143
+ raise RuntimeError("Cannot call a coroutine function synchronously. Use `@async_tool` instead.")
144
+
145
+ if not is_dict(input):
146
+ raise TypeError(f"Input must be a dictionary, got {type(input).__name__}")
147
+
148
+ try:
149
+ return self._func_with_validate(**cast(Any, input))
150
+ except pydantic.ValidationError as e:
151
+ raise ValueError(f"Invalid arguments for function {self.name}") from e
152
+
153
+
154
+ class BetaAsyncFunctionTool(BaseFunctionTool[AsyncFunctionT]):
155
+ async def call(self, input: object) -> BetaFunctionToolResultType:
156
+ if not iscoroutinefunction(self.func):
157
+ raise RuntimeError("Cannot call a synchronous function asynchronously. Use `@tool` instead.")
158
+
159
+ if not is_dict(input):
160
+ raise TypeError(f"Input must be a dictionary, got {type(input).__name__}")
161
+
162
+ try:
163
+ return await self._func_with_validate(**cast(Any, input))
164
+ except pydantic.ValidationError as e:
165
+ raise ValueError(f"Invalid arguments for function {self.name}") from e
166
+
167
+
168
+ @overload
169
+ def beta_tool(func: FunctionT) -> BetaFunctionTool[FunctionT]: ...
170
+
171
+
172
+ @overload
173
+ def beta_tool(
174
+ func: FunctionT,
175
+ *,
176
+ name: str | None = None,
177
+ description: str | None = None,
178
+ input_schema: InputSchema | type[BaseModel] | None = None,
179
+ ) -> BetaFunctionTool[FunctionT]: ...
180
+
181
+
182
+ @overload
183
+ def beta_tool(
184
+ *,
185
+ name: str | None = None,
186
+ description: str | None = None,
187
+ input_schema: InputSchema | type[BaseModel] | None = None,
188
+ ) -> Callable[[FunctionT], BetaFunctionTool[FunctionT]]: ...
189
+
190
+
191
+ def beta_tool(
192
+ func: FunctionT | None = None,
193
+ *,
194
+ name: str | None = None,
195
+ description: str | None = None,
196
+ input_schema: InputSchema | type[BaseModel] | None = None,
197
+ ) -> BetaFunctionTool[FunctionT] | Callable[[FunctionT], BetaFunctionTool[FunctionT]]:
198
+ """Create a FunctionTool from a function with automatic schema inference.
199
+
200
+ Can be used as a decorator with or without parentheses:
201
+
202
+ @function_tool
203
+ def my_func(x: int) -> str: ...
204
+
205
+ @function_tool()
206
+ def my_func(x: int) -> str: ...
207
+
208
+ @function_tool(name="custom_name")
209
+ def my_func(x: int) -> str: ...
210
+ """
211
+ if _compat.PYDANTIC_V1:
212
+ raise RuntimeError("Tool functions are only supported with Pydantic v2")
213
+
214
+ if func is not None:
215
+ # @beta_tool called without parentheses
216
+ return BetaFunctionTool(func=func, name=name, description=description, input_schema=input_schema)
217
+
218
+ # @beta_tool()
219
+ def decorator(func: FunctionT) -> BetaFunctionTool[FunctionT]:
220
+ return BetaFunctionTool(func=func, name=name, description=description, input_schema=input_schema)
221
+
222
+ return decorator
223
+
224
+
225
+ @overload
226
+ def beta_async_tool(func: AsyncFunctionT) -> BetaAsyncFunctionTool[AsyncFunctionT]: ...
227
+
228
+
229
+ @overload
230
+ def beta_async_tool(
231
+ func: AsyncFunctionT,
232
+ *,
233
+ name: str | None = None,
234
+ description: str | None = None,
235
+ input_schema: InputSchema | type[BaseModel] | None = None,
236
+ ) -> BetaAsyncFunctionTool[AsyncFunctionT]: ...
237
+
238
+
239
+ @overload
240
+ def beta_async_tool(
241
+ *,
242
+ name: str | None = None,
243
+ description: str | None = None,
244
+ input_schema: InputSchema | type[BaseModel] | None = None,
245
+ ) -> Callable[[AsyncFunctionT], BetaAsyncFunctionTool[AsyncFunctionT]]: ...
246
+
247
+
248
+ def beta_async_tool(
249
+ func: AsyncFunctionT | None = None,
250
+ *,
251
+ name: str | None = None,
252
+ description: str | None = None,
253
+ input_schema: InputSchema | type[BaseModel] | None = None,
254
+ ) -> BetaAsyncFunctionTool[AsyncFunctionT] | Callable[[AsyncFunctionT], BetaAsyncFunctionTool[AsyncFunctionT]]:
255
+ """Create an AsyncFunctionTool from a function with automatic schema inference.
256
+
257
+ Can be used as a decorator with or without parentheses:
258
+
259
+ @async_tool
260
+ async def my_func(x: int) -> str: ...
261
+
262
+ @async_tool()
263
+ async def my_func(x: int) -> str: ...
264
+
265
+ @async_tool(name="custom_name")
266
+ async def my_func(x: int) -> str: ...
267
+ """
268
+ if _compat.PYDANTIC_V1:
269
+ raise RuntimeError("Tool functions are only supported with Pydantic v2")
270
+
271
+ if func is not None:
272
+ # @beta_async_tool called without parentheses
273
+ return BetaAsyncFunctionTool(
274
+ func=func,
275
+ name=name,
276
+ description=description,
277
+ input_schema=input_schema,
278
+ )
279
+
280
+ # @beta_async_tool()
281
+ def decorator(func: AsyncFunctionT) -> BetaAsyncFunctionTool[AsyncFunctionT]:
282
+ return BetaAsyncFunctionTool(
283
+ func=func,
284
+ name=name,
285
+ description=description,
286
+ input_schema=input_schema,
287
+ )
288
+
289
+ return decorator