anthropic 0.72.0__py3-none-any.whl → 0.73.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- anthropic/__init__.py +2 -0
- anthropic/_compat.py +6 -0
- anthropic/_models.py +50 -16
- anthropic/_streaming.py +4 -6
- anthropic/_utils/_sync.py +3 -31
- anthropic/_utils/_transform.py +1 -1
- anthropic/_utils/_utils.py +1 -1
- anthropic/_version.py +1 -1
- anthropic/lib/_parse/_response.py +44 -0
- anthropic/lib/_parse/_transform.py +167 -0
- anthropic/lib/streaming/__init__.py +14 -4
- anthropic/lib/streaming/_beta_messages.py +82 -43
- anthropic/lib/streaming/_beta_types.py +21 -13
- anthropic/lib/tools/_beta_runner.py +102 -101
- anthropic/resources/beta/messages/batches.py +12 -12
- anthropic/resources/beta/messages/messages.py +365 -29
- anthropic/resources/messages/batches.py +12 -12
- anthropic/resources/messages/messages.py +14 -8
- anthropic/types/beta/__init__.py +1 -0
- anthropic/types/beta/beta_code_execution_tool_20250522_param.py +2 -0
- anthropic/types/beta/beta_code_execution_tool_20250825_param.py +2 -0
- anthropic/types/beta/beta_json_output_format_param.py +15 -0
- anthropic/types/beta/beta_memory_tool_20250818_param.py +2 -0
- anthropic/types/beta/beta_tool_bash_20241022_param.py +2 -0
- anthropic/types/beta/beta_tool_bash_20250124_param.py +2 -0
- anthropic/types/beta/beta_tool_computer_use_20241022_param.py +2 -0
- anthropic/types/beta/beta_tool_computer_use_20250124_param.py +2 -0
- anthropic/types/beta/beta_tool_param.py +2 -0
- anthropic/types/beta/beta_tool_text_editor_20241022_param.py +2 -0
- anthropic/types/beta/beta_tool_text_editor_20250124_param.py +2 -0
- anthropic/types/beta/beta_tool_text_editor_20250429_param.py +2 -0
- anthropic/types/beta/beta_tool_text_editor_20250728_param.py +2 -0
- anthropic/types/beta/beta_web_fetch_tool_20250910_param.py +2 -0
- anthropic/types/beta/beta_web_search_tool_20250305_param.py +2 -0
- anthropic/types/beta/message_count_tokens_params.py +4 -0
- anthropic/types/beta/message_create_params.py +24 -2
- anthropic/types/beta/messages/batch_create_params.py +8 -2
- anthropic/types/beta/parsed_beta_message.py +68 -0
- anthropic/types/messages/batch_create_params.py +0 -1
- {anthropic-0.72.0.dist-info → anthropic-0.73.0.dist-info}/METADATA +4 -5
- {anthropic-0.72.0.dist-info → anthropic-0.73.0.dist-info}/RECORD +43 -39
- {anthropic-0.72.0.dist-info → anthropic-0.73.0.dist-info}/WHEEL +0 -0
- {anthropic-0.72.0.dist-info → anthropic-0.73.0.dist-info}/licenses/LICENSE +0 -0
anthropic/__init__.py
CHANGED
|
@@ -44,6 +44,7 @@ from ._exceptions import (
|
|
|
44
44
|
)
|
|
45
45
|
from ._base_client import DefaultHttpxClient, DefaultAioHttpClient, DefaultAsyncHttpxClient
|
|
46
46
|
from ._utils._logs import setup_logging as _setup_logging
|
|
47
|
+
from .lib._parse._transform import transform_schema
|
|
47
48
|
|
|
48
49
|
__all__ = [
|
|
49
50
|
"types",
|
|
@@ -91,6 +92,7 @@ __all__ = [
|
|
|
91
92
|
"AI_PROMPT",
|
|
92
93
|
"beta_tool",
|
|
93
94
|
"beta_async_tool",
|
|
95
|
+
"transform_schema",
|
|
94
96
|
]
|
|
95
97
|
|
|
96
98
|
if not _t.TYPE_CHECKING:
|
anthropic/_compat.py
CHANGED
|
@@ -131,6 +131,12 @@ def model_json(model: pydantic.BaseModel, *, indent: int | None = None) -> str:
|
|
|
131
131
|
return model.model_dump_json(indent=indent)
|
|
132
132
|
|
|
133
133
|
|
|
134
|
+
def model_parse_json(model: type[_ModelT], data: str | bytes) -> _ModelT:
|
|
135
|
+
if PYDANTIC_V1:
|
|
136
|
+
return model.parse_raw(data) # pyright: ignore[reportDeprecated]
|
|
137
|
+
return model.model_validate_json(data)
|
|
138
|
+
|
|
139
|
+
|
|
134
140
|
def model_dump(
|
|
135
141
|
model: pydantic.BaseModel,
|
|
136
142
|
*,
|
anthropic/_models.py
CHANGED
|
@@ -2,6 +2,7 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import os
|
|
4
4
|
import inspect
|
|
5
|
+
import weakref
|
|
5
6
|
from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, Optional, cast
|
|
6
7
|
from datetime import date, datetime
|
|
7
8
|
from typing_extensions import (
|
|
@@ -272,15 +273,16 @@ class BaseModel(pydantic.BaseModel):
|
|
|
272
273
|
mode: Literal["json", "python"] | str = "python",
|
|
273
274
|
include: IncEx | None = None,
|
|
274
275
|
exclude: IncEx | None = None,
|
|
276
|
+
context: Any | None = None,
|
|
275
277
|
by_alias: bool | None = None,
|
|
276
278
|
exclude_unset: bool = False,
|
|
277
279
|
exclude_defaults: bool = False,
|
|
278
280
|
exclude_none: bool = False,
|
|
281
|
+
exclude_computed_fields: bool = False,
|
|
279
282
|
round_trip: bool = False,
|
|
280
283
|
warnings: bool | Literal["none", "warn", "error"] = True,
|
|
281
|
-
context: dict[str, Any] | None = None,
|
|
282
|
-
serialize_as_any: bool = False,
|
|
283
284
|
fallback: Callable[[Any], Any] | None = None,
|
|
285
|
+
serialize_as_any: bool = False,
|
|
284
286
|
) -> dict[str, Any]:
|
|
285
287
|
"""Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump
|
|
286
288
|
|
|
@@ -288,16 +290,24 @@ class BaseModel(pydantic.BaseModel):
|
|
|
288
290
|
|
|
289
291
|
Args:
|
|
290
292
|
mode: The mode in which `to_python` should run.
|
|
291
|
-
If mode is 'json', the
|
|
292
|
-
If mode is 'python', the
|
|
293
|
-
include: A
|
|
294
|
-
exclude: A
|
|
293
|
+
If mode is 'json', the output will only contain JSON serializable types.
|
|
294
|
+
If mode is 'python', the output may contain non-JSON-serializable Python objects.
|
|
295
|
+
include: A set of fields to include in the output.
|
|
296
|
+
exclude: A set of fields to exclude from the output.
|
|
297
|
+
context: Additional context to pass to the serializer.
|
|
295
298
|
by_alias: Whether to use the field's alias in the dictionary key if defined.
|
|
296
|
-
exclude_unset: Whether to exclude fields that
|
|
297
|
-
exclude_defaults: Whether to exclude fields that are set to their default value
|
|
298
|
-
exclude_none: Whether to exclude fields that have a value of `None
|
|
299
|
-
|
|
300
|
-
|
|
299
|
+
exclude_unset: Whether to exclude fields that have not been explicitly set.
|
|
300
|
+
exclude_defaults: Whether to exclude fields that are set to their default value.
|
|
301
|
+
exclude_none: Whether to exclude fields that have a value of `None`.
|
|
302
|
+
exclude_computed_fields: Whether to exclude computed fields.
|
|
303
|
+
While this can be useful for round-tripping, it is usually recommended to use the dedicated
|
|
304
|
+
`round_trip` parameter instead.
|
|
305
|
+
round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
|
|
306
|
+
warnings: How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
|
|
307
|
+
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError].
|
|
308
|
+
fallback: A function to call when an unknown value is encountered. If not provided,
|
|
309
|
+
a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised.
|
|
310
|
+
serialize_as_any: Whether to serialize fields with duck-typing serialization behavior.
|
|
301
311
|
|
|
302
312
|
Returns:
|
|
303
313
|
A dictionary representation of the model.
|
|
@@ -314,6 +324,8 @@ class BaseModel(pydantic.BaseModel):
|
|
|
314
324
|
raise ValueError("serialize_as_any is only supported in Pydantic v2")
|
|
315
325
|
if fallback is not None:
|
|
316
326
|
raise ValueError("fallback is only supported in Pydantic v2")
|
|
327
|
+
if exclude_computed_fields != False:
|
|
328
|
+
raise ValueError("exclude_computed_fields is only supported in Pydantic v2")
|
|
317
329
|
dumped = super().dict( # pyright: ignore[reportDeprecated]
|
|
318
330
|
include=include,
|
|
319
331
|
exclude=exclude,
|
|
@@ -330,15 +342,17 @@ class BaseModel(pydantic.BaseModel):
|
|
|
330
342
|
self,
|
|
331
343
|
*,
|
|
332
344
|
indent: int | None = None,
|
|
345
|
+
ensure_ascii: bool = False,
|
|
333
346
|
include: IncEx | None = None,
|
|
334
347
|
exclude: IncEx | None = None,
|
|
348
|
+
context: Any | None = None,
|
|
335
349
|
by_alias: bool | None = None,
|
|
336
350
|
exclude_unset: bool = False,
|
|
337
351
|
exclude_defaults: bool = False,
|
|
338
352
|
exclude_none: bool = False,
|
|
353
|
+
exclude_computed_fields: bool = False,
|
|
339
354
|
round_trip: bool = False,
|
|
340
355
|
warnings: bool | Literal["none", "warn", "error"] = True,
|
|
341
|
-
context: dict[str, Any] | None = None,
|
|
342
356
|
fallback: Callable[[Any], Any] | None = None,
|
|
343
357
|
serialize_as_any: bool = False,
|
|
344
358
|
) -> str:
|
|
@@ -370,6 +384,10 @@ class BaseModel(pydantic.BaseModel):
|
|
|
370
384
|
raise ValueError("serialize_as_any is only supported in Pydantic v2")
|
|
371
385
|
if fallback is not None:
|
|
372
386
|
raise ValueError("fallback is only supported in Pydantic v2")
|
|
387
|
+
if ensure_ascii != False:
|
|
388
|
+
raise ValueError("ensure_ascii is only supported in Pydantic v2")
|
|
389
|
+
if exclude_computed_fields != False:
|
|
390
|
+
raise ValueError("exclude_computed_fields is only supported in Pydantic v2")
|
|
373
391
|
return super().json( # type: ignore[reportDeprecated]
|
|
374
392
|
indent=indent,
|
|
375
393
|
include=include,
|
|
@@ -589,6 +607,9 @@ class CachedDiscriminatorType(Protocol):
|
|
|
589
607
|
__discriminator__: DiscriminatorDetails
|
|
590
608
|
|
|
591
609
|
|
|
610
|
+
DISCRIMINATOR_CACHE: weakref.WeakKeyDictionary[type, DiscriminatorDetails] = weakref.WeakKeyDictionary()
|
|
611
|
+
|
|
612
|
+
|
|
592
613
|
class DiscriminatorDetails:
|
|
593
614
|
field_name: str
|
|
594
615
|
"""The name of the discriminator field in the variant class, e.g.
|
|
@@ -631,8 +652,9 @@ class DiscriminatorDetails:
|
|
|
631
652
|
|
|
632
653
|
|
|
633
654
|
def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any, ...]) -> DiscriminatorDetails | None:
|
|
634
|
-
|
|
635
|
-
|
|
655
|
+
cached = DISCRIMINATOR_CACHE.get(union)
|
|
656
|
+
if cached is not None:
|
|
657
|
+
return cached
|
|
636
658
|
|
|
637
659
|
discriminator_field_name: str | None = None
|
|
638
660
|
|
|
@@ -685,7 +707,7 @@ def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any,
|
|
|
685
707
|
discriminator_field=discriminator_field_name,
|
|
686
708
|
discriminator_alias=discriminator_alias,
|
|
687
709
|
)
|
|
688
|
-
|
|
710
|
+
DISCRIMINATOR_CACHE.setdefault(union, details)
|
|
689
711
|
return details
|
|
690
712
|
|
|
691
713
|
|
|
@@ -752,7 +774,7 @@ else:
|
|
|
752
774
|
|
|
753
775
|
|
|
754
776
|
if not PYDANTIC_V1:
|
|
755
|
-
from pydantic import TypeAdapter as _TypeAdapter
|
|
777
|
+
from pydantic import TypeAdapter as _TypeAdapter, computed_field as computed_field
|
|
756
778
|
|
|
757
779
|
_CachedTypeAdapter = cast("TypeAdapter[object]", lru_cache(maxsize=None)(_TypeAdapter))
|
|
758
780
|
|
|
@@ -789,6 +811,18 @@ elif not TYPE_CHECKING: # TODO: condition is weird
|
|
|
789
811
|
def TypeAdapter(*_args: Any, **_kwargs: Any) -> Any:
|
|
790
812
|
raise RuntimeError("attempted to use TypeAdapter in pydantic v1")
|
|
791
813
|
|
|
814
|
+
def computed_field(func: Any | None = None, /, **__: Any) -> Any:
|
|
815
|
+
def _exc_func(*_: Any, **__: Any) -> Any:
|
|
816
|
+
raise RuntimeError("attempted to use computed_field in pydantic v1")
|
|
817
|
+
|
|
818
|
+
def _dec(*_: Any, **__: Any) -> Any:
|
|
819
|
+
return _exc_func
|
|
820
|
+
|
|
821
|
+
if func is not None:
|
|
822
|
+
return _dec(func)
|
|
823
|
+
else:
|
|
824
|
+
return _dec
|
|
825
|
+
|
|
792
826
|
|
|
793
827
|
class FinalRequestOptionsInput(TypedDict, total=False):
|
|
794
828
|
method: Required[str]
|
anthropic/_streaming.py
CHANGED
|
@@ -113,9 +113,8 @@ class Stream(Generic[_T], metaclass=_SyncStreamMeta):
|
|
|
113
113
|
response=self.response,
|
|
114
114
|
)
|
|
115
115
|
|
|
116
|
-
#
|
|
117
|
-
|
|
118
|
-
...
|
|
116
|
+
# As we might not fully consume the response stream, we need to close it explicitly
|
|
117
|
+
response.close()
|
|
119
118
|
|
|
120
119
|
def __enter__(self) -> Self:
|
|
121
120
|
return self
|
|
@@ -231,9 +230,8 @@ class AsyncStream(Generic[_T], metaclass=_AsyncStreamMeta):
|
|
|
231
230
|
response=self.response,
|
|
232
231
|
)
|
|
233
232
|
|
|
234
|
-
#
|
|
235
|
-
|
|
236
|
-
...
|
|
233
|
+
# As we might not fully consume the response stream, we need to close it explicitly
|
|
234
|
+
await response.aclose()
|
|
237
235
|
|
|
238
236
|
async def __aenter__(self) -> Self:
|
|
239
237
|
return self
|
anthropic/_utils/_sync.py
CHANGED
|
@@ -1,10 +1,8 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
import sys
|
|
4
3
|
import asyncio
|
|
5
4
|
import functools
|
|
6
|
-
import
|
|
7
|
-
from typing import Any, TypeVar, Callable, Awaitable
|
|
5
|
+
from typing import TypeVar, Callable, Awaitable
|
|
8
6
|
from typing_extensions import ParamSpec
|
|
9
7
|
|
|
10
8
|
import anyio
|
|
@@ -15,34 +13,11 @@ T_Retval = TypeVar("T_Retval")
|
|
|
15
13
|
T_ParamSpec = ParamSpec("T_ParamSpec")
|
|
16
14
|
|
|
17
15
|
|
|
18
|
-
if sys.version_info >= (3, 9):
|
|
19
|
-
_asyncio_to_thread = asyncio.to_thread
|
|
20
|
-
else:
|
|
21
|
-
# backport of https://docs.python.org/3/library/asyncio-task.html#asyncio.to_thread
|
|
22
|
-
# for Python 3.8 support
|
|
23
|
-
async def _asyncio_to_thread(
|
|
24
|
-
func: Callable[T_ParamSpec, T_Retval], /, *args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs
|
|
25
|
-
) -> Any:
|
|
26
|
-
"""Asynchronously run function *func* in a separate thread.
|
|
27
|
-
|
|
28
|
-
Any *args and **kwargs supplied for this function are directly passed
|
|
29
|
-
to *func*. Also, the current :class:`contextvars.Context` is propagated,
|
|
30
|
-
allowing context variables from the main thread to be accessed in the
|
|
31
|
-
separate thread.
|
|
32
|
-
|
|
33
|
-
Returns a coroutine that can be awaited to get the eventual result of *func*.
|
|
34
|
-
"""
|
|
35
|
-
loop = asyncio.events.get_running_loop()
|
|
36
|
-
ctx = contextvars.copy_context()
|
|
37
|
-
func_call = functools.partial(ctx.run, func, *args, **kwargs)
|
|
38
|
-
return await loop.run_in_executor(None, func_call)
|
|
39
|
-
|
|
40
|
-
|
|
41
16
|
async def to_thread(
|
|
42
17
|
func: Callable[T_ParamSpec, T_Retval], /, *args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs
|
|
43
18
|
) -> T_Retval:
|
|
44
19
|
if sniffio.current_async_library() == "asyncio":
|
|
45
|
-
return await
|
|
20
|
+
return await asyncio.to_thread(func, *args, **kwargs)
|
|
46
21
|
|
|
47
22
|
return await anyio.to_thread.run_sync(
|
|
48
23
|
functools.partial(func, *args, **kwargs),
|
|
@@ -53,10 +28,7 @@ async def to_thread(
|
|
|
53
28
|
def asyncify(function: Callable[T_ParamSpec, T_Retval]) -> Callable[T_ParamSpec, Awaitable[T_Retval]]:
|
|
54
29
|
"""
|
|
55
30
|
Take a blocking function and create an async one that receives the same
|
|
56
|
-
positional and keyword arguments.
|
|
57
|
-
asyncio.to_thread to run the function in a separate thread. For python version
|
|
58
|
-
3.8, it uses locally defined copy of the asyncio.to_thread function which was
|
|
59
|
-
introduced in python 3.9.
|
|
31
|
+
positional and keyword arguments.
|
|
60
32
|
|
|
61
33
|
Usage:
|
|
62
34
|
|
anthropic/_utils/_transform.py
CHANGED
|
@@ -218,7 +218,7 @@ def _transform_recursive(
|
|
|
218
218
|
return data
|
|
219
219
|
|
|
220
220
|
if isinstance(data, pydantic.BaseModel):
|
|
221
|
-
return model_dump(data, exclude_unset=True, mode="json")
|
|
221
|
+
return model_dump(data, exclude_unset=True, mode="json", exclude=getattr(data, "__api_exclude__", None))
|
|
222
222
|
|
|
223
223
|
annotated_type = _get_annotated_type(annotation)
|
|
224
224
|
if annotated_type is None:
|
anthropic/_utils/_utils.py
CHANGED
|
@@ -133,7 +133,7 @@ def is_given(obj: _T | NotGiven | Omit) -> TypeGuard[_T]:
|
|
|
133
133
|
# Type safe methods for narrowing types with TypeVars.
|
|
134
134
|
# The default narrowing for isinstance(obj, dict) is dict[unknown, unknown],
|
|
135
135
|
# however this cause Pyright to rightfully report errors. As we know we don't
|
|
136
|
-
# care about the contained types we can safely use `object` in
|
|
136
|
+
# care about the contained types we can safely use `object` in its place.
|
|
137
137
|
#
|
|
138
138
|
# There are two separate functions defined, `is_*` and `is_*_t` for different use cases.
|
|
139
139
|
# `is_*` is for when you're dealing with an unknown input
|
anthropic/_version.py
CHANGED
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing_extensions import TypeVar
|
|
4
|
+
|
|
5
|
+
from ..._types import NotGiven
|
|
6
|
+
from ..._models import TypeAdapter, construct_type_unchecked
|
|
7
|
+
from ..._utils._utils import is_given
|
|
8
|
+
from ...types.beta.beta_message import BetaMessage
|
|
9
|
+
from ...types.beta.parsed_beta_message import ParsedBetaMessage, ParsedBetaTextBlock, ParsedBetaContentBlock
|
|
10
|
+
|
|
11
|
+
ResponseFormatT = TypeVar("ResponseFormatT", default=None)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def parse_text(text: str, output_format: ResponseFormatT | NotGiven) -> ResponseFormatT | None:
|
|
15
|
+
if is_given(output_format):
|
|
16
|
+
adapted_type: TypeAdapter[ResponseFormatT] = TypeAdapter(output_format)
|
|
17
|
+
return adapted_type.validate_json(text)
|
|
18
|
+
return None
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def parse_response(
|
|
22
|
+
*,
|
|
23
|
+
output_format: ResponseFormatT | NotGiven,
|
|
24
|
+
response: BetaMessage,
|
|
25
|
+
) -> ParsedBetaMessage[ResponseFormatT]:
|
|
26
|
+
content_list: list[ParsedBetaContentBlock[ResponseFormatT]] = []
|
|
27
|
+
for content in response.content:
|
|
28
|
+
if content.type == "text":
|
|
29
|
+
content_list.append(
|
|
30
|
+
construct_type_unchecked(
|
|
31
|
+
type_=ParsedBetaTextBlock[ResponseFormatT],
|
|
32
|
+
value={**content.to_dict(), "parsed_output": parse_text(content.text, output_format)},
|
|
33
|
+
)
|
|
34
|
+
)
|
|
35
|
+
else:
|
|
36
|
+
content_list.append(content) # type: ignore
|
|
37
|
+
|
|
38
|
+
return construct_type_unchecked(
|
|
39
|
+
type_=ParsedBetaMessage[ResponseFormatT],
|
|
40
|
+
value={
|
|
41
|
+
**response.to_dict(),
|
|
42
|
+
"content": content_list,
|
|
43
|
+
},
|
|
44
|
+
)
|
|
@@ -0,0 +1,167 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import inspect
|
|
4
|
+
from typing import Any, Literal, Optional, cast
|
|
5
|
+
from typing_extensions import assert_never
|
|
6
|
+
|
|
7
|
+
import pydantic
|
|
8
|
+
|
|
9
|
+
from ..._utils import is_list
|
|
10
|
+
|
|
11
|
+
SupportedTypes = Literal[
|
|
12
|
+
"object",
|
|
13
|
+
"array",
|
|
14
|
+
"string",
|
|
15
|
+
"integer",
|
|
16
|
+
"number",
|
|
17
|
+
"boolean",
|
|
18
|
+
"null",
|
|
19
|
+
]
|
|
20
|
+
|
|
21
|
+
SupportedStringFormats = {
|
|
22
|
+
"date-time",
|
|
23
|
+
"time",
|
|
24
|
+
"date",
|
|
25
|
+
"duration",
|
|
26
|
+
"email",
|
|
27
|
+
"hostname",
|
|
28
|
+
"uri",
|
|
29
|
+
"ipv4",
|
|
30
|
+
"ipv6",
|
|
31
|
+
"uuid",
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def get_transformed_string(
|
|
36
|
+
schema: dict[str, Any],
|
|
37
|
+
) -> dict[str, Any]:
|
|
38
|
+
"""Transforms a JSON schema of type string to ensure it conforms to the API's expectations.
|
|
39
|
+
|
|
40
|
+
Specifically, it ensures that if the schema is of type "string" and does not already
|
|
41
|
+
specify a "format", it sets the format to "text".
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
schema: The original JSON schema.
|
|
45
|
+
|
|
46
|
+
Returns:
|
|
47
|
+
The transformed JSON schema.
|
|
48
|
+
"""
|
|
49
|
+
if schema.get("type") == "string" and "format" not in schema:
|
|
50
|
+
schema["format"] = "text"
|
|
51
|
+
return schema
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def transform_schema(
|
|
55
|
+
json_schema: type[pydantic.BaseModel] | dict[str, Any],
|
|
56
|
+
) -> dict[str, Any]:
|
|
57
|
+
"""
|
|
58
|
+
Transforms a JSON schema to ensure it conforms to the API's expectations.
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
json_schema (Dict[str, Any]): The original JSON schema.
|
|
62
|
+
|
|
63
|
+
Returns:
|
|
64
|
+
The transformed JSON schema.
|
|
65
|
+
|
|
66
|
+
Examples:
|
|
67
|
+
>>> transform_schema(
|
|
68
|
+
... {
|
|
69
|
+
... "type": "integer",
|
|
70
|
+
... "minimum": 1,
|
|
71
|
+
... "maximum": 10,
|
|
72
|
+
... "description": "A number",
|
|
73
|
+
... }
|
|
74
|
+
... )
|
|
75
|
+
{'type': 'integer', 'description': 'A number\n\n{minimum: 1, maximum: 10}'}
|
|
76
|
+
"""
|
|
77
|
+
if inspect.isclass(json_schema) and issubclass(json_schema, pydantic.BaseModel): # pyright: ignore[reportUnnecessaryIsInstance]
|
|
78
|
+
json_schema = json_schema.model_json_schema()
|
|
79
|
+
|
|
80
|
+
strict_schema: dict[str, Any] = {}
|
|
81
|
+
json_schema = {**json_schema}
|
|
82
|
+
|
|
83
|
+
ref = json_schema.pop("$ref", None)
|
|
84
|
+
if ref is not None:
|
|
85
|
+
strict_schema["$ref"] = ref
|
|
86
|
+
return strict_schema
|
|
87
|
+
|
|
88
|
+
defs = json_schema.pop("$defs", None)
|
|
89
|
+
if defs is not None:
|
|
90
|
+
strict_defs: dict[str, Any] = {}
|
|
91
|
+
strict_schema["$defs"] = strict_defs
|
|
92
|
+
|
|
93
|
+
for name, schema in defs.items():
|
|
94
|
+
strict_defs[name] = transform_schema(schema)
|
|
95
|
+
|
|
96
|
+
type_: Optional[SupportedTypes] = json_schema.pop("type", None)
|
|
97
|
+
any_of = json_schema.pop("anyOf", None)
|
|
98
|
+
one_of = json_schema.pop("oneOf", None)
|
|
99
|
+
all_of = json_schema.pop("allOf", None)
|
|
100
|
+
|
|
101
|
+
if is_list(any_of):
|
|
102
|
+
strict_schema["anyOf"] = [transform_schema(cast("dict[str, Any]", variant)) for variant in any_of]
|
|
103
|
+
elif is_list(one_of):
|
|
104
|
+
strict_schema["anyOf"] = [transform_schema(cast("dict[str, Any]", variant)) for variant in one_of]
|
|
105
|
+
elif is_list(all_of):
|
|
106
|
+
strict_schema["allOf"] = [transform_schema(cast("dict[str, Any]", variant)) for variant in all_of]
|
|
107
|
+
else:
|
|
108
|
+
if type_ is None:
|
|
109
|
+
raise ValueError("Schema must have a 'type', 'anyOf', 'oneOf', or 'allOf' field.")
|
|
110
|
+
|
|
111
|
+
strict_schema["type"] = type_
|
|
112
|
+
|
|
113
|
+
description = json_schema.pop("description", None)
|
|
114
|
+
if description is not None:
|
|
115
|
+
strict_schema["description"] = description
|
|
116
|
+
|
|
117
|
+
title = json_schema.pop("title", None)
|
|
118
|
+
if title is not None:
|
|
119
|
+
strict_schema["title"] = title
|
|
120
|
+
|
|
121
|
+
if type_ == "object":
|
|
122
|
+
strict_schema["properties"] = {
|
|
123
|
+
key: transform_schema(prop_schema) for key, prop_schema in json_schema.pop("properties", {}).items()
|
|
124
|
+
}
|
|
125
|
+
json_schema.pop("additionalProperties", None)
|
|
126
|
+
strict_schema["additionalProperties"] = False
|
|
127
|
+
|
|
128
|
+
required = json_schema.pop("required", None)
|
|
129
|
+
if required is not None:
|
|
130
|
+
strict_schema["required"] = required
|
|
131
|
+
|
|
132
|
+
elif type_ == "string":
|
|
133
|
+
format = json_schema.pop("format", None)
|
|
134
|
+
if format and format in SupportedStringFormats:
|
|
135
|
+
strict_schema["format"] = format
|
|
136
|
+
elif format:
|
|
137
|
+
# add it back so its treated as an extra property and appended to the description
|
|
138
|
+
json_schema["format"] = format
|
|
139
|
+
elif type_ == "array":
|
|
140
|
+
items = json_schema.pop("items", None)
|
|
141
|
+
if items is not None:
|
|
142
|
+
strict_schema["items"] = transform_schema(items)
|
|
143
|
+
|
|
144
|
+
min_items = json_schema.pop("minItems", None)
|
|
145
|
+
if min_items is not None and min_items == 0 or min_items == 1:
|
|
146
|
+
strict_schema["minItems"] = min_items
|
|
147
|
+
elif min_items is not None:
|
|
148
|
+
# add it back so its treated as an extra property and appended to the description
|
|
149
|
+
json_schema["minItems"] = min_items
|
|
150
|
+
|
|
151
|
+
elif type_ == "boolean" or type_ == "integer" or type_ == "number" or type_ == "null" or type_ is None:
|
|
152
|
+
pass
|
|
153
|
+
else:
|
|
154
|
+
assert_never(type_)
|
|
155
|
+
|
|
156
|
+
# if there are any propes leftover then they aren't supported, so we add them to the description
|
|
157
|
+
# so that the model *might* follow them.
|
|
158
|
+
if json_schema:
|
|
159
|
+
description = strict_schema.get("description")
|
|
160
|
+
strict_schema["description"] = (
|
|
161
|
+
(description + "\n\n" if description is not None else "")
|
|
162
|
+
+ "{"
|
|
163
|
+
+ ", ".join(f"{key}: {value}" for key, value in json_schema.items())
|
|
164
|
+
+ "}"
|
|
165
|
+
)
|
|
166
|
+
|
|
167
|
+
return strict_schema
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
from typing_extensions import TypeAlias
|
|
2
|
+
|
|
1
3
|
from ._types import (
|
|
2
4
|
TextEvent as TextEvent,
|
|
3
5
|
InputJsonEvent as InputJsonEvent,
|
|
@@ -12,12 +14,20 @@ from ._messages import (
|
|
|
12
14
|
AsyncMessageStreamManager as AsyncMessageStreamManager,
|
|
13
15
|
)
|
|
14
16
|
from ._beta_types import (
|
|
15
|
-
BetaTextEvent as BetaTextEvent,
|
|
16
17
|
BetaInputJsonEvent as BetaInputJsonEvent,
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
18
|
+
ParsedBetaTextEvent as ParsedBetaTextEvent,
|
|
19
|
+
ParsedBetaMessageStopEvent as ParsedBetaMessageStopEvent,
|
|
20
|
+
ParsedBetaMessageStreamEvent as ParsedBetaMessageStreamEvent,
|
|
21
|
+
ParsedBetaContentBlockStopEvent as ParsedBetaContentBlockStopEvent,
|
|
20
22
|
)
|
|
23
|
+
|
|
24
|
+
# For backwards compatibility
|
|
25
|
+
BetaTextEvent: TypeAlias = ParsedBetaTextEvent
|
|
26
|
+
BetaMessageStopEvent: TypeAlias = ParsedBetaMessageStopEvent[object]
|
|
27
|
+
BetaMessageStreamEvent: TypeAlias = ParsedBetaMessageStreamEvent
|
|
28
|
+
BetaContentBlockStopEvent: TypeAlias = ParsedBetaContentBlockStopEvent[object]
|
|
29
|
+
|
|
30
|
+
|
|
21
31
|
from ._beta_messages import (
|
|
22
32
|
BetaMessageStream as BetaMessageStream,
|
|
23
33
|
BetaAsyncMessageStream as BetaAsyncMessageStream,
|