anthropic 0.66.0__py3-none-any.whl → 0.67.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- anthropic/_base_client.py +3 -3
- anthropic/_compat.py +48 -48
- anthropic/_models.py +41 -41
- anthropic/_utils/__init__.py +8 -2
- anthropic/_utils/_compat.py +45 -0
- anthropic/_utils/_datetime_parse.py +136 -0
- anthropic/_utils/_transform.py +5 -1
- anthropic/_utils/_typing.py +1 -1
- anthropic/_utils/_utils.py +0 -1
- anthropic/_version.py +1 -1
- anthropic/types/beta/__init__.py +14 -0
- anthropic/types/beta/beta_base64_pdf_source.py +15 -0
- anthropic/types/beta/beta_citation_config.py +9 -0
- anthropic/types/beta/beta_content_block.py +2 -0
- anthropic/types/beta/beta_content_block_param.py +4 -0
- anthropic/types/beta/beta_document_block.py +26 -0
- anthropic/types/beta/beta_plain_text_source.py +15 -0
- anthropic/types/beta/beta_raw_content_block_start_event.py +2 -0
- anthropic/types/beta/beta_request_document_block_param.py +1 -1
- anthropic/types/beta/beta_server_tool_usage.py +3 -0
- anthropic/types/beta/beta_server_tool_use_block.py +1 -1
- anthropic/types/beta/beta_server_tool_use_block_param.py +3 -1
- anthropic/types/beta/beta_tool_union_param.py +2 -0
- anthropic/types/beta/beta_web_fetch_block.py +21 -0
- anthropic/types/beta/beta_web_fetch_block_param.py +22 -0
- anthropic/types/beta/beta_web_fetch_tool_20250910_param.py +46 -0
- anthropic/types/beta/beta_web_fetch_tool_result_block.py +20 -0
- anthropic/types/beta/beta_web_fetch_tool_result_block_param.py +25 -0
- anthropic/types/beta/beta_web_fetch_tool_result_error_block.py +14 -0
- anthropic/types/beta/beta_web_fetch_tool_result_error_block_param.py +15 -0
- anthropic/types/beta/beta_web_fetch_tool_result_error_code.py +16 -0
- anthropic/types/beta/message_count_tokens_params.py +2 -0
- anthropic/types/document_block_param.py +1 -1
- {anthropic-0.66.0.dist-info → anthropic-0.67.0.dist-info}/METADATA +1 -1
- {anthropic-0.66.0.dist-info → anthropic-0.67.0.dist-info}/RECORD +37 -23
- {anthropic-0.66.0.dist-info → anthropic-0.67.0.dist-info}/WHEEL +0 -0
- {anthropic-0.66.0.dist-info → anthropic-0.67.0.dist-info}/licenses/LICENSE +0 -0
anthropic/_base_client.py
CHANGED
|
@@ -62,7 +62,7 @@ from ._types import (
|
|
|
62
62
|
ModelBuilderProtocol,
|
|
63
63
|
)
|
|
64
64
|
from ._utils import is_dict, is_list, asyncify, is_given, lru_cache, is_mapping
|
|
65
|
-
from ._compat import
|
|
65
|
+
from ._compat import PYDANTIC_V1, model_copy, model_dump
|
|
66
66
|
from ._models import GenericModel, FinalRequestOptions, validate_type, construct_type
|
|
67
67
|
from ._response import (
|
|
68
68
|
APIResponse,
|
|
@@ -237,7 +237,7 @@ class BaseSyncPage(BasePage[_T], Generic[_T]):
|
|
|
237
237
|
model: Type[_T],
|
|
238
238
|
options: FinalRequestOptions,
|
|
239
239
|
) -> None:
|
|
240
|
-
if
|
|
240
|
+
if (not PYDANTIC_V1) and getattr(self, "__pydantic_private__", None) is None:
|
|
241
241
|
self.__pydantic_private__ = {}
|
|
242
242
|
|
|
243
243
|
self._model = model
|
|
@@ -325,7 +325,7 @@ class BaseAsyncPage(BasePage[_T], Generic[_T]):
|
|
|
325
325
|
client: AsyncAPIClient,
|
|
326
326
|
options: FinalRequestOptions,
|
|
327
327
|
) -> None:
|
|
328
|
-
if
|
|
328
|
+
if (not PYDANTIC_V1) and getattr(self, "__pydantic_private__", None) is None:
|
|
329
329
|
self.__pydantic_private__ = {}
|
|
330
330
|
|
|
331
331
|
self._model = model
|
anthropic/_compat.py
CHANGED
|
@@ -12,14 +12,13 @@ from ._types import IncEx, StrBytesIntFloat
|
|
|
12
12
|
_T = TypeVar("_T")
|
|
13
13
|
_ModelT = TypeVar("_ModelT", bound=pydantic.BaseModel)
|
|
14
14
|
|
|
15
|
-
# --------------- Pydantic v2 compatibility ---------------
|
|
15
|
+
# --------------- Pydantic v2, v3 compatibility ---------------
|
|
16
16
|
|
|
17
17
|
# Pyright incorrectly reports some of our functions as overriding a method when they don't
|
|
18
18
|
# pyright: reportIncompatibleMethodOverride=false
|
|
19
19
|
|
|
20
|
-
|
|
20
|
+
PYDANTIC_V1 = pydantic.VERSION.startswith("1.")
|
|
21
21
|
|
|
22
|
-
# v1 re-exports
|
|
23
22
|
if TYPE_CHECKING:
|
|
24
23
|
|
|
25
24
|
def parse_date(value: date | StrBytesIntFloat) -> date: # noqa: ARG001
|
|
@@ -44,90 +43,92 @@ if TYPE_CHECKING:
|
|
|
44
43
|
...
|
|
45
44
|
|
|
46
45
|
else:
|
|
47
|
-
|
|
48
|
-
|
|
46
|
+
# v1 re-exports
|
|
47
|
+
if PYDANTIC_V1:
|
|
48
|
+
from pydantic.typing import (
|
|
49
49
|
get_args as get_args,
|
|
50
50
|
is_union as is_union,
|
|
51
51
|
get_origin as get_origin,
|
|
52
52
|
is_typeddict as is_typeddict,
|
|
53
53
|
is_literal_type as is_literal_type,
|
|
54
54
|
)
|
|
55
|
-
from pydantic.
|
|
55
|
+
from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
|
|
56
56
|
else:
|
|
57
|
-
from
|
|
57
|
+
from ._utils import (
|
|
58
58
|
get_args as get_args,
|
|
59
59
|
is_union as is_union,
|
|
60
60
|
get_origin as get_origin,
|
|
61
|
+
parse_date as parse_date,
|
|
61
62
|
is_typeddict as is_typeddict,
|
|
63
|
+
parse_datetime as parse_datetime,
|
|
62
64
|
is_literal_type as is_literal_type,
|
|
63
65
|
)
|
|
64
|
-
from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
|
|
65
66
|
|
|
66
67
|
|
|
67
68
|
# refactored config
|
|
68
69
|
if TYPE_CHECKING:
|
|
69
70
|
from pydantic import ConfigDict as ConfigDict
|
|
70
71
|
else:
|
|
71
|
-
if
|
|
72
|
-
from pydantic import ConfigDict
|
|
73
|
-
else:
|
|
72
|
+
if PYDANTIC_V1:
|
|
74
73
|
# TODO: provide an error message here?
|
|
75
74
|
ConfigDict = None
|
|
75
|
+
else:
|
|
76
|
+
from pydantic import ConfigDict as ConfigDict
|
|
76
77
|
|
|
77
78
|
|
|
78
79
|
# renamed methods / properties
|
|
79
80
|
def parse_obj(model: type[_ModelT], value: object) -> _ModelT:
|
|
80
|
-
if
|
|
81
|
-
return model.model_validate(value)
|
|
82
|
-
else:
|
|
81
|
+
if PYDANTIC_V1:
|
|
83
82
|
return cast(_ModelT, model.parse_obj(value)) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
|
|
83
|
+
else:
|
|
84
|
+
return model.model_validate(value)
|
|
84
85
|
|
|
85
86
|
|
|
86
87
|
def field_is_required(field: FieldInfo) -> bool:
|
|
87
|
-
if
|
|
88
|
-
return field.
|
|
89
|
-
return field.
|
|
88
|
+
if PYDANTIC_V1:
|
|
89
|
+
return field.required # type: ignore
|
|
90
|
+
return field.is_required()
|
|
90
91
|
|
|
91
92
|
|
|
92
93
|
def field_get_default(field: FieldInfo) -> Any:
|
|
93
94
|
value = field.get_default()
|
|
94
|
-
if
|
|
95
|
-
from pydantic_core import PydanticUndefined
|
|
96
|
-
|
|
97
|
-
if value == PydanticUndefined:
|
|
98
|
-
return None
|
|
95
|
+
if PYDANTIC_V1:
|
|
99
96
|
return value
|
|
97
|
+
from pydantic_core import PydanticUndefined
|
|
98
|
+
|
|
99
|
+
if value == PydanticUndefined:
|
|
100
|
+
return None
|
|
100
101
|
return value
|
|
101
102
|
|
|
102
103
|
|
|
103
104
|
def field_outer_type(field: FieldInfo) -> Any:
|
|
104
|
-
if
|
|
105
|
-
return field.
|
|
106
|
-
return field.
|
|
105
|
+
if PYDANTIC_V1:
|
|
106
|
+
return field.outer_type_ # type: ignore
|
|
107
|
+
return field.annotation
|
|
107
108
|
|
|
108
109
|
|
|
109
110
|
def get_model_config(model: type[pydantic.BaseModel]) -> Any:
|
|
110
|
-
if
|
|
111
|
-
return model.
|
|
112
|
-
return model.
|
|
111
|
+
if PYDANTIC_V1:
|
|
112
|
+
return model.__config__ # type: ignore
|
|
113
|
+
return model.model_config
|
|
113
114
|
|
|
114
115
|
|
|
115
116
|
def get_model_fields(model: type[pydantic.BaseModel]) -> dict[str, FieldInfo]:
|
|
116
|
-
if
|
|
117
|
-
return model.
|
|
118
|
-
return model.
|
|
117
|
+
if PYDANTIC_V1:
|
|
118
|
+
return model.__fields__ # type: ignore
|
|
119
|
+
return model.model_fields
|
|
119
120
|
|
|
120
121
|
|
|
121
122
|
def model_copy(model: _ModelT, *, deep: bool = False) -> _ModelT:
|
|
122
|
-
if
|
|
123
|
-
return model.
|
|
124
|
-
return model.
|
|
123
|
+
if PYDANTIC_V1:
|
|
124
|
+
return model.copy(deep=deep) # type: ignore
|
|
125
|
+
return model.model_copy(deep=deep)
|
|
125
126
|
|
|
126
127
|
|
|
127
128
|
def model_json(model: pydantic.BaseModel, *, indent: int | None = None) -> str:
|
|
128
|
-
if
|
|
129
|
-
return model.
|
|
130
|
-
return model.
|
|
129
|
+
if PYDANTIC_V1:
|
|
130
|
+
return model.json(indent=indent) # type: ignore
|
|
131
|
+
return model.model_dump_json(indent=indent)
|
|
131
132
|
|
|
132
133
|
|
|
133
134
|
def model_dump(
|
|
@@ -139,14 +140,14 @@ def model_dump(
|
|
|
139
140
|
warnings: bool = True,
|
|
140
141
|
mode: Literal["json", "python"] = "python",
|
|
141
142
|
) -> dict[str, Any]:
|
|
142
|
-
if
|
|
143
|
+
if (not PYDANTIC_V1) or hasattr(model, "model_dump"):
|
|
143
144
|
return model.model_dump(
|
|
144
145
|
mode=mode,
|
|
145
146
|
exclude=exclude,
|
|
146
147
|
exclude_unset=exclude_unset,
|
|
147
148
|
exclude_defaults=exclude_defaults,
|
|
148
149
|
# warnings are not supported in Pydantic v1
|
|
149
|
-
warnings=
|
|
150
|
+
warnings=True if PYDANTIC_V1 else warnings,
|
|
150
151
|
)
|
|
151
152
|
return cast(
|
|
152
153
|
"dict[str, Any]",
|
|
@@ -159,9 +160,9 @@ def model_dump(
|
|
|
159
160
|
|
|
160
161
|
|
|
161
162
|
def model_parse(model: type[_ModelT], data: Any) -> _ModelT:
|
|
162
|
-
if
|
|
163
|
-
return model.
|
|
164
|
-
return model.
|
|
163
|
+
if PYDANTIC_V1:
|
|
164
|
+
return model.parse_obj(data) # pyright: ignore[reportDeprecated]
|
|
165
|
+
return model.model_validate(data)
|
|
165
166
|
|
|
166
167
|
|
|
167
168
|
# generic models
|
|
@@ -170,17 +171,16 @@ if TYPE_CHECKING:
|
|
|
170
171
|
class GenericModel(pydantic.BaseModel): ...
|
|
171
172
|
|
|
172
173
|
else:
|
|
173
|
-
if
|
|
174
|
+
if PYDANTIC_V1:
|
|
175
|
+
import pydantic.generics
|
|
176
|
+
|
|
177
|
+
class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ...
|
|
178
|
+
else:
|
|
174
179
|
# there no longer needs to be a distinction in v2 but
|
|
175
180
|
# we still have to create our own subclass to avoid
|
|
176
181
|
# inconsistent MRO ordering errors
|
|
177
182
|
class GenericModel(pydantic.BaseModel): ...
|
|
178
183
|
|
|
179
|
-
else:
|
|
180
|
-
import pydantic.generics
|
|
181
|
-
|
|
182
|
-
class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ...
|
|
183
|
-
|
|
184
184
|
|
|
185
185
|
# cached properties
|
|
186
186
|
if TYPE_CHECKING:
|
anthropic/_models.py
CHANGED
|
@@ -50,7 +50,7 @@ from ._utils import (
|
|
|
50
50
|
strip_annotated_type,
|
|
51
51
|
)
|
|
52
52
|
from ._compat import (
|
|
53
|
-
|
|
53
|
+
PYDANTIC_V1,
|
|
54
54
|
ConfigDict,
|
|
55
55
|
GenericModel as BaseGenericModel,
|
|
56
56
|
get_args,
|
|
@@ -81,11 +81,7 @@ class _ConfigProtocol(Protocol):
|
|
|
81
81
|
|
|
82
82
|
|
|
83
83
|
class BaseModel(pydantic.BaseModel):
|
|
84
|
-
if
|
|
85
|
-
model_config: ClassVar[ConfigDict] = ConfigDict(
|
|
86
|
-
extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true"))
|
|
87
|
-
)
|
|
88
|
-
else:
|
|
84
|
+
if PYDANTIC_V1:
|
|
89
85
|
|
|
90
86
|
@property
|
|
91
87
|
@override
|
|
@@ -95,6 +91,10 @@ class BaseModel(pydantic.BaseModel):
|
|
|
95
91
|
|
|
96
92
|
class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated]
|
|
97
93
|
extra: Any = pydantic.Extra.allow # type: ignore
|
|
94
|
+
else:
|
|
95
|
+
model_config: ClassVar[ConfigDict] = ConfigDict(
|
|
96
|
+
extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true"))
|
|
97
|
+
)
|
|
98
98
|
|
|
99
99
|
if TYPE_CHECKING:
|
|
100
100
|
_request_id: Optional[str] = None
|
|
@@ -231,25 +231,25 @@ class BaseModel(pydantic.BaseModel):
|
|
|
231
231
|
if key not in model_fields:
|
|
232
232
|
parsed = construct_type(value=value, type_=extra_field_type) if extra_field_type is not None else value
|
|
233
233
|
|
|
234
|
-
if
|
|
235
|
-
_extra[key] = parsed
|
|
236
|
-
else:
|
|
234
|
+
if PYDANTIC_V1:
|
|
237
235
|
_fields_set.add(key)
|
|
238
236
|
fields_values[key] = parsed
|
|
237
|
+
else:
|
|
238
|
+
_extra[key] = parsed
|
|
239
239
|
|
|
240
240
|
object.__setattr__(m, "__dict__", fields_values)
|
|
241
241
|
|
|
242
|
-
if
|
|
243
|
-
# these properties are copied from Pydantic's `model_construct()` method
|
|
244
|
-
object.__setattr__(m, "__pydantic_private__", None)
|
|
245
|
-
object.__setattr__(m, "__pydantic_extra__", _extra)
|
|
246
|
-
object.__setattr__(m, "__pydantic_fields_set__", _fields_set)
|
|
247
|
-
else:
|
|
242
|
+
if PYDANTIC_V1:
|
|
248
243
|
# init_private_attributes() does not exist in v2
|
|
249
244
|
m._init_private_attributes() # type: ignore
|
|
250
245
|
|
|
251
246
|
# copied from Pydantic v1's `construct()` method
|
|
252
247
|
object.__setattr__(m, "__fields_set__", _fields_set)
|
|
248
|
+
else:
|
|
249
|
+
# these properties are copied from Pydantic's `model_construct()` method
|
|
250
|
+
object.__setattr__(m, "__pydantic_private__", None)
|
|
251
|
+
object.__setattr__(m, "__pydantic_extra__", _extra)
|
|
252
|
+
object.__setattr__(m, "__pydantic_fields_set__", _fields_set)
|
|
253
253
|
|
|
254
254
|
return m
|
|
255
255
|
|
|
@@ -259,7 +259,7 @@ class BaseModel(pydantic.BaseModel):
|
|
|
259
259
|
# although not in practice
|
|
260
260
|
model_construct = construct
|
|
261
261
|
|
|
262
|
-
if
|
|
262
|
+
if PYDANTIC_V1:
|
|
263
263
|
# we define aliases for some of the new pydantic v2 methods so
|
|
264
264
|
# that we can just document these methods without having to specify
|
|
265
265
|
# a specific pydantic version as some users may not know which
|
|
@@ -379,10 +379,10 @@ def _construct_field(value: object, field: FieldInfo, key: str) -> object:
|
|
|
379
379
|
if value is None:
|
|
380
380
|
return field_get_default(field)
|
|
381
381
|
|
|
382
|
-
if
|
|
383
|
-
type_ = field.annotation
|
|
384
|
-
else:
|
|
382
|
+
if PYDANTIC_V1:
|
|
385
383
|
type_ = cast(type, field.outer_type_) # type: ignore
|
|
384
|
+
else:
|
|
385
|
+
type_ = field.annotation # type: ignore
|
|
386
386
|
|
|
387
387
|
if type_ is None:
|
|
388
388
|
raise RuntimeError(f"Unexpected field type is None for {key}")
|
|
@@ -391,7 +391,7 @@ def _construct_field(value: object, field: FieldInfo, key: str) -> object:
|
|
|
391
391
|
|
|
392
392
|
|
|
393
393
|
def _get_extra_fields_type(cls: type[pydantic.BaseModel]) -> type | None:
|
|
394
|
-
if
|
|
394
|
+
if PYDANTIC_V1:
|
|
395
395
|
# TODO
|
|
396
396
|
return None
|
|
397
397
|
|
|
@@ -644,30 +644,30 @@ def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any,
|
|
|
644
644
|
for variant in get_args(union):
|
|
645
645
|
variant = strip_annotated_type(variant)
|
|
646
646
|
if is_basemodel_type(variant):
|
|
647
|
-
if
|
|
648
|
-
|
|
649
|
-
if not
|
|
647
|
+
if PYDANTIC_V1:
|
|
648
|
+
field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
|
|
649
|
+
if not field_info:
|
|
650
650
|
continue
|
|
651
651
|
|
|
652
652
|
# Note: if one variant defines an alias then they all should
|
|
653
|
-
discriminator_alias =
|
|
654
|
-
|
|
655
|
-
field_schema = field["schema"]
|
|
653
|
+
discriminator_alias = field_info.alias
|
|
656
654
|
|
|
657
|
-
if
|
|
658
|
-
for entry in
|
|
655
|
+
if (annotation := getattr(field_info, "annotation", None)) and is_literal_type(annotation):
|
|
656
|
+
for entry in get_args(annotation):
|
|
659
657
|
if isinstance(entry, str):
|
|
660
658
|
mapping[entry] = variant
|
|
661
659
|
else:
|
|
662
|
-
|
|
663
|
-
if not
|
|
660
|
+
field = _extract_field_schema_pv2(variant, discriminator_field_name)
|
|
661
|
+
if not field:
|
|
664
662
|
continue
|
|
665
663
|
|
|
666
664
|
# Note: if one variant defines an alias then they all should
|
|
667
|
-
discriminator_alias =
|
|
665
|
+
discriminator_alias = field.get("serialization_alias")
|
|
668
666
|
|
|
669
|
-
|
|
670
|
-
|
|
667
|
+
field_schema = field["schema"]
|
|
668
|
+
|
|
669
|
+
if field_schema["type"] == "literal":
|
|
670
|
+
for entry in cast("LiteralSchema", field_schema)["expected"]:
|
|
671
671
|
if isinstance(entry, str):
|
|
672
672
|
mapping[entry] = variant
|
|
673
673
|
|
|
@@ -726,7 +726,7 @@ def add_request_id(obj: BaseModel, request_id: str | None) -> None:
|
|
|
726
726
|
# in Pydantic v1, using setattr like we do above causes the attribute
|
|
727
727
|
# to be included when serializing the model which we don't want in this
|
|
728
728
|
# case so we need to explicitly exclude it
|
|
729
|
-
if
|
|
729
|
+
if PYDANTIC_V1:
|
|
730
730
|
try:
|
|
731
731
|
exclude_fields = obj.__exclude_fields__ # type: ignore
|
|
732
732
|
except AttributeError:
|
|
@@ -745,7 +745,7 @@ else:
|
|
|
745
745
|
pass
|
|
746
746
|
|
|
747
747
|
|
|
748
|
-
if
|
|
748
|
+
if not PYDANTIC_V1:
|
|
749
749
|
from pydantic import TypeAdapter as _TypeAdapter
|
|
750
750
|
|
|
751
751
|
_CachedTypeAdapter = cast("TypeAdapter[object]", lru_cache(maxsize=None)(_TypeAdapter))
|
|
@@ -813,12 +813,12 @@ class FinalRequestOptions(pydantic.BaseModel):
|
|
|
813
813
|
json_data: Union[Body, None] = None
|
|
814
814
|
extra_json: Union[AnyMapping, None] = None
|
|
815
815
|
|
|
816
|
-
if
|
|
817
|
-
model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True)
|
|
818
|
-
else:
|
|
816
|
+
if PYDANTIC_V1:
|
|
819
817
|
|
|
820
818
|
class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated]
|
|
821
819
|
arbitrary_types_allowed: bool = True
|
|
820
|
+
else:
|
|
821
|
+
model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True)
|
|
822
822
|
|
|
823
823
|
def get_max_retries(self, max_retries: int) -> int:
|
|
824
824
|
if isinstance(self.max_retries, NotGiven):
|
|
@@ -851,9 +851,9 @@ class FinalRequestOptions(pydantic.BaseModel):
|
|
|
851
851
|
key: strip_not_given(value)
|
|
852
852
|
for key, value in values.items()
|
|
853
853
|
}
|
|
854
|
-
if
|
|
855
|
-
return super().
|
|
856
|
-
return
|
|
854
|
+
if PYDANTIC_V1:
|
|
855
|
+
return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated]
|
|
856
|
+
return super().model_construct(_fields_set, **kwargs)
|
|
857
857
|
|
|
858
858
|
if not TYPE_CHECKING:
|
|
859
859
|
# type checkers incorrectly complain about this assignment
|
anthropic/_utils/__init__.py
CHANGED
|
@@ -10,7 +10,6 @@ from ._utils import (
|
|
|
10
10
|
lru_cache as lru_cache,
|
|
11
11
|
is_mapping as is_mapping,
|
|
12
12
|
is_tuple_t as is_tuple_t,
|
|
13
|
-
parse_date as parse_date,
|
|
14
13
|
is_iterable as is_iterable,
|
|
15
14
|
is_sequence as is_sequence,
|
|
16
15
|
coerce_float as coerce_float,
|
|
@@ -23,7 +22,6 @@ from ._utils import (
|
|
|
23
22
|
coerce_boolean as coerce_boolean,
|
|
24
23
|
coerce_integer as coerce_integer,
|
|
25
24
|
file_from_path as file_from_path,
|
|
26
|
-
parse_datetime as parse_datetime,
|
|
27
25
|
strip_not_given as strip_not_given,
|
|
28
26
|
deepcopy_minimal as deepcopy_minimal,
|
|
29
27
|
get_async_library as get_async_library,
|
|
@@ -32,6 +30,13 @@ from ._utils import (
|
|
|
32
30
|
maybe_coerce_boolean as maybe_coerce_boolean,
|
|
33
31
|
maybe_coerce_integer as maybe_coerce_integer,
|
|
34
32
|
)
|
|
33
|
+
from ._compat import (
|
|
34
|
+
get_args as get_args,
|
|
35
|
+
is_union as is_union,
|
|
36
|
+
get_origin as get_origin,
|
|
37
|
+
is_typeddict as is_typeddict,
|
|
38
|
+
is_literal_type as is_literal_type,
|
|
39
|
+
)
|
|
35
40
|
from ._typing import (
|
|
36
41
|
is_list_type as is_list_type,
|
|
37
42
|
is_union_type as is_union_type,
|
|
@@ -56,3 +61,4 @@ from ._reflection import (
|
|
|
56
61
|
function_has_argument as function_has_argument,
|
|
57
62
|
assert_signatures_in_sync as assert_signatures_in_sync,
|
|
58
63
|
)
|
|
64
|
+
from ._datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import sys
|
|
4
|
+
import typing_extensions
|
|
5
|
+
from typing import Any, Type, Union, Literal, Optional
|
|
6
|
+
from datetime import date, datetime
|
|
7
|
+
from typing_extensions import get_args as _get_args, get_origin as _get_origin
|
|
8
|
+
|
|
9
|
+
from .._types import StrBytesIntFloat
|
|
10
|
+
from ._datetime_parse import parse_date as _parse_date, parse_datetime as _parse_datetime
|
|
11
|
+
|
|
12
|
+
_LITERAL_TYPES = {Literal, typing_extensions.Literal}
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def get_args(tp: type[Any]) -> tuple[Any, ...]:
|
|
16
|
+
return _get_args(tp)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def get_origin(tp: type[Any]) -> type[Any] | None:
|
|
20
|
+
return _get_origin(tp)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def is_union(tp: Optional[Type[Any]]) -> bool:
|
|
24
|
+
if sys.version_info < (3, 10):
|
|
25
|
+
return tp is Union # type: ignore[comparison-overlap]
|
|
26
|
+
else:
|
|
27
|
+
import types
|
|
28
|
+
|
|
29
|
+
return tp is Union or tp is types.UnionType
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def is_typeddict(tp: Type[Any]) -> bool:
|
|
33
|
+
return typing_extensions.is_typeddict(tp)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def is_literal_type(tp: Type[Any]) -> bool:
|
|
37
|
+
return get_origin(tp) in _LITERAL_TYPES
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def parse_date(value: Union[date, StrBytesIntFloat]) -> date:
|
|
41
|
+
return _parse_date(value)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime:
|
|
45
|
+
return _parse_datetime(value)
|
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This file contains code from https://github.com/pydantic/pydantic/blob/main/pydantic/v1/datetime_parse.py
|
|
3
|
+
without the Pydantic v1 specific errors.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
import re
|
|
9
|
+
from typing import Dict, Union, Optional
|
|
10
|
+
from datetime import date, datetime, timezone, timedelta
|
|
11
|
+
|
|
12
|
+
from .._types import StrBytesIntFloat
|
|
13
|
+
|
|
14
|
+
date_expr = r"(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})"
|
|
15
|
+
time_expr = (
|
|
16
|
+
r"(?P<hour>\d{1,2}):(?P<minute>\d{1,2})"
|
|
17
|
+
r"(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?"
|
|
18
|
+
r"(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$"
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
date_re = re.compile(f"{date_expr}$")
|
|
22
|
+
datetime_re = re.compile(f"{date_expr}[T ]{time_expr}")
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
EPOCH = datetime(1970, 1, 1)
|
|
26
|
+
# if greater than this, the number is in ms, if less than or equal it's in seconds
|
|
27
|
+
# (in seconds this is 11th October 2603, in ms it's 20th August 1970)
|
|
28
|
+
MS_WATERSHED = int(2e10)
|
|
29
|
+
# slightly more than datetime.max in ns - (datetime.max - EPOCH).total_seconds() * 1e9
|
|
30
|
+
MAX_NUMBER = int(3e20)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def _get_numeric(value: StrBytesIntFloat, native_expected_type: str) -> Union[None, int, float]:
|
|
34
|
+
if isinstance(value, (int, float)):
|
|
35
|
+
return value
|
|
36
|
+
try:
|
|
37
|
+
return float(value)
|
|
38
|
+
except ValueError:
|
|
39
|
+
return None
|
|
40
|
+
except TypeError:
|
|
41
|
+
raise TypeError(f"invalid type; expected {native_expected_type}, string, bytes, int or float") from None
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def _from_unix_seconds(seconds: Union[int, float]) -> datetime:
|
|
45
|
+
if seconds > MAX_NUMBER:
|
|
46
|
+
return datetime.max
|
|
47
|
+
elif seconds < -MAX_NUMBER:
|
|
48
|
+
return datetime.min
|
|
49
|
+
|
|
50
|
+
while abs(seconds) > MS_WATERSHED:
|
|
51
|
+
seconds /= 1000
|
|
52
|
+
dt = EPOCH + timedelta(seconds=seconds)
|
|
53
|
+
return dt.replace(tzinfo=timezone.utc)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def _parse_timezone(value: Optional[str]) -> Union[None, int, timezone]:
|
|
57
|
+
if value == "Z":
|
|
58
|
+
return timezone.utc
|
|
59
|
+
elif value is not None:
|
|
60
|
+
offset_mins = int(value[-2:]) if len(value) > 3 else 0
|
|
61
|
+
offset = 60 * int(value[1:3]) + offset_mins
|
|
62
|
+
if value[0] == "-":
|
|
63
|
+
offset = -offset
|
|
64
|
+
return timezone(timedelta(minutes=offset))
|
|
65
|
+
else:
|
|
66
|
+
return None
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime:
|
|
70
|
+
"""
|
|
71
|
+
Parse a datetime/int/float/string and return a datetime.datetime.
|
|
72
|
+
|
|
73
|
+
This function supports time zone offsets. When the input contains one,
|
|
74
|
+
the output uses a timezone with a fixed offset from UTC.
|
|
75
|
+
|
|
76
|
+
Raise ValueError if the input is well formatted but not a valid datetime.
|
|
77
|
+
Raise ValueError if the input isn't well formatted.
|
|
78
|
+
"""
|
|
79
|
+
if isinstance(value, datetime):
|
|
80
|
+
return value
|
|
81
|
+
|
|
82
|
+
number = _get_numeric(value, "datetime")
|
|
83
|
+
if number is not None:
|
|
84
|
+
return _from_unix_seconds(number)
|
|
85
|
+
|
|
86
|
+
if isinstance(value, bytes):
|
|
87
|
+
value = value.decode()
|
|
88
|
+
|
|
89
|
+
assert not isinstance(value, (float, int))
|
|
90
|
+
|
|
91
|
+
match = datetime_re.match(value)
|
|
92
|
+
if match is None:
|
|
93
|
+
raise ValueError("invalid datetime format")
|
|
94
|
+
|
|
95
|
+
kw = match.groupdict()
|
|
96
|
+
if kw["microsecond"]:
|
|
97
|
+
kw["microsecond"] = kw["microsecond"].ljust(6, "0")
|
|
98
|
+
|
|
99
|
+
tzinfo = _parse_timezone(kw.pop("tzinfo"))
|
|
100
|
+
kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None}
|
|
101
|
+
kw_["tzinfo"] = tzinfo
|
|
102
|
+
|
|
103
|
+
return datetime(**kw_) # type: ignore
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def parse_date(value: Union[date, StrBytesIntFloat]) -> date:
|
|
107
|
+
"""
|
|
108
|
+
Parse a date/int/float/string and return a datetime.date.
|
|
109
|
+
|
|
110
|
+
Raise ValueError if the input is well formatted but not a valid date.
|
|
111
|
+
Raise ValueError if the input isn't well formatted.
|
|
112
|
+
"""
|
|
113
|
+
if isinstance(value, date):
|
|
114
|
+
if isinstance(value, datetime):
|
|
115
|
+
return value.date()
|
|
116
|
+
else:
|
|
117
|
+
return value
|
|
118
|
+
|
|
119
|
+
number = _get_numeric(value, "date")
|
|
120
|
+
if number is not None:
|
|
121
|
+
return _from_unix_seconds(number).date()
|
|
122
|
+
|
|
123
|
+
if isinstance(value, bytes):
|
|
124
|
+
value = value.decode()
|
|
125
|
+
|
|
126
|
+
assert not isinstance(value, (float, int))
|
|
127
|
+
match = date_re.match(value)
|
|
128
|
+
if match is None:
|
|
129
|
+
raise ValueError("invalid date format")
|
|
130
|
+
|
|
131
|
+
kw = {k: int(v) for k, v in match.groupdict().items()}
|
|
132
|
+
|
|
133
|
+
try:
|
|
134
|
+
return date(**kw)
|
|
135
|
+
except ValueError:
|
|
136
|
+
raise ValueError("invalid date format") from None
|
anthropic/_utils/_transform.py
CHANGED
|
@@ -19,6 +19,7 @@ from ._utils import (
|
|
|
19
19
|
is_sequence,
|
|
20
20
|
)
|
|
21
21
|
from .._files import is_base64_file_input
|
|
22
|
+
from ._compat import get_origin, is_typeddict
|
|
22
23
|
from ._typing import (
|
|
23
24
|
is_list_type,
|
|
24
25
|
is_union_type,
|
|
@@ -29,7 +30,6 @@ from ._typing import (
|
|
|
29
30
|
is_annotated_type,
|
|
30
31
|
strip_annotated_type,
|
|
31
32
|
)
|
|
32
|
-
from .._compat import get_origin, model_dump, is_typeddict
|
|
33
33
|
|
|
34
34
|
_T = TypeVar("_T")
|
|
35
35
|
|
|
@@ -169,6 +169,8 @@ def _transform_recursive(
|
|
|
169
169
|
|
|
170
170
|
Defaults to the same value as the `annotation` argument.
|
|
171
171
|
"""
|
|
172
|
+
from .._compat import model_dump
|
|
173
|
+
|
|
172
174
|
if inner_type is None:
|
|
173
175
|
inner_type = annotation
|
|
174
176
|
|
|
@@ -333,6 +335,8 @@ async def _async_transform_recursive(
|
|
|
333
335
|
|
|
334
336
|
Defaults to the same value as the `annotation` argument.
|
|
335
337
|
"""
|
|
338
|
+
from .._compat import model_dump
|
|
339
|
+
|
|
336
340
|
if inner_type is None:
|
|
337
341
|
inner_type = annotation
|
|
338
342
|
|
anthropic/_utils/_typing.py
CHANGED
anthropic/_utils/_utils.py
CHANGED
|
@@ -22,7 +22,6 @@ from typing_extensions import TypeGuard
|
|
|
22
22
|
import sniffio
|
|
23
23
|
|
|
24
24
|
from .._types import NotGiven, FileTypes, NotGivenOr, HeadersLike
|
|
25
|
-
from .._compat import parse_date as parse_date, parse_datetime as parse_datetime
|
|
26
25
|
|
|
27
26
|
_T = TypeVar("_T")
|
|
28
27
|
_TupleT = TypeVar("_TupleT", bound=Tuple[object, ...])
|
anthropic/_version.py
CHANGED