@planqk/planqk-service-sdk 2.2.0 → 2.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README-node.md +1 -0
- package/README-python.md +7 -7
- package/README.md +1 -0
- package/dist/sdk/environments.d.ts +1 -1
- package/dist/sdk/environments.js +1 -1
- package/fern/openapi/openapi.yml +1 -1
- package/notebooks/python-sdk.ipynb +33 -113
- package/package.json +1 -1
- package/planqk/service/_version.py +1 -1
- package/planqk/service/client.py +13 -12
- package/planqk/service/sdk/__init__.py +4 -30
- package/planqk/service/sdk/client.py +20 -19
- package/planqk/service/sdk/core/__init__.py +5 -0
- package/planqk/service/sdk/core/api_error.py +12 -6
- package/planqk/service/sdk/core/client_wrapper.py +12 -4
- package/planqk/service/sdk/core/datetime_utils.py +1 -3
- package/planqk/service/sdk/core/file.py +2 -5
- package/planqk/service/sdk/core/force_multipart.py +16 -0
- package/planqk/service/sdk/core/http_client.py +86 -118
- package/planqk/service/sdk/core/http_response.py +55 -0
- package/planqk/service/sdk/core/jsonable_encoder.py +1 -4
- package/planqk/service/sdk/core/pydantic_utilities.py +79 -147
- package/planqk/service/sdk/core/query_encoder.py +1 -3
- package/planqk/service/sdk/core/serialization.py +10 -10
- package/planqk/service/sdk/environment.py +1 -1
- package/planqk/service/sdk/service_api/__init__.py +4 -12
- package/planqk/service/sdk/service_api/client.py +138 -860
- package/planqk/service/sdk/service_api/raw_client.py +606 -0
- package/planqk/service/sdk/service_api/types/__init__.py +3 -7
- package/planqk/service/sdk/service_api/types/get_result_response.py +7 -11
- package/planqk/service/sdk/service_api/types/get_result_response_embedded.py +4 -6
- package/planqk/service/sdk/service_api/types/get_result_response_links.py +4 -6
- package/planqk/service/sdk/types/__init__.py +3 -11
- package/planqk/service/sdk/types/hal_link.py +3 -5
- package/planqk/service/sdk/types/service_execution.py +8 -16
- package/planqk/service/sdk/types/service_execution_status.py +1 -2
- package/pyproject.toml +1 -1
- package/src/sdk/environments.ts +1 -1
- package/uv.lock +488 -494
- package/planqk/service/sdk/errors/__init__.py +0 -15
- package/planqk/service/sdk/errors/bad_request_error.py +0 -9
- package/planqk/service/sdk/errors/forbidden_error.py +0 -9
- package/planqk/service/sdk/errors/internal_server_error.py +0 -9
- package/planqk/service/sdk/errors/not_found_error.py +0 -9
- package/planqk/service/sdk/errors/unauthorized_error.py +0 -9
- package/planqk/service/sdk/service_api/types/health_check_response.py +0 -24
- package/planqk/service/sdk/types/input_data.py +0 -5
- package/planqk/service/sdk/types/input_data_ref.py +0 -27
- package/planqk/service/sdk/types/input_params.py +0 -5
|
@@ -2,98 +2,67 @@
|
|
|
2
2
|
|
|
3
3
|
# nopycln: file
|
|
4
4
|
import datetime as dt
|
|
5
|
-
import typing
|
|
6
5
|
from collections import defaultdict
|
|
7
|
-
|
|
8
|
-
import typing_extensions
|
|
6
|
+
from typing import Any, Callable, ClassVar, Dict, List, Mapping, Optional, Set, Tuple, Type, TypeVar, Union, cast
|
|
9
7
|
|
|
10
8
|
import pydantic
|
|
11
9
|
|
|
12
|
-
from .datetime_utils import serialize_datetime
|
|
13
|
-
from .serialization import convert_and_respect_annotation_metadata
|
|
14
|
-
|
|
15
10
|
IS_PYDANTIC_V2 = pydantic.VERSION.startswith("2.")
|
|
16
11
|
|
|
17
12
|
if IS_PYDANTIC_V2:
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
from pydantic.v1.
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
from pydantic.v1.
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
from pydantic.v1.json import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2
|
|
27
|
-
ENCODERS_BY_TYPE as encoders_by_type,
|
|
28
|
-
)
|
|
29
|
-
from pydantic.v1.typing import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2
|
|
30
|
-
get_args as get_args,
|
|
31
|
-
)
|
|
32
|
-
from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2
|
|
33
|
-
get_origin as get_origin,
|
|
34
|
-
)
|
|
35
|
-
from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2
|
|
36
|
-
is_literal_type as is_literal_type,
|
|
37
|
-
)
|
|
38
|
-
from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2
|
|
39
|
-
is_union as is_union,
|
|
40
|
-
)
|
|
41
|
-
from pydantic.v1.fields import ModelField as ModelField # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2
|
|
13
|
+
from pydantic.v1.datetime_parse import parse_date as parse_date
|
|
14
|
+
from pydantic.v1.datetime_parse import parse_datetime as parse_datetime
|
|
15
|
+
from pydantic.v1.fields import ModelField as ModelField
|
|
16
|
+
from pydantic.v1.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore[attr-defined]
|
|
17
|
+
from pydantic.v1.typing import get_args as get_args
|
|
18
|
+
from pydantic.v1.typing import get_origin as get_origin
|
|
19
|
+
from pydantic.v1.typing import is_literal_type as is_literal_type
|
|
20
|
+
from pydantic.v1.typing import is_union as is_union
|
|
42
21
|
else:
|
|
43
|
-
from pydantic.datetime_parse import parse_date as parse_date # type: ignore
|
|
44
|
-
from pydantic.datetime_parse import parse_datetime as parse_datetime # type: ignore
|
|
45
|
-
from pydantic.fields import ModelField as ModelField # type: ignore
|
|
46
|
-
from pydantic.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore
|
|
47
|
-
from pydantic.typing import get_args as get_args # type: ignore
|
|
48
|
-
from pydantic.typing import get_origin as get_origin # type: ignore
|
|
49
|
-
from pydantic.typing import is_literal_type as is_literal_type # type: ignore
|
|
50
|
-
from pydantic.typing import is_union as is_union # type: ignore
|
|
51
|
-
|
|
52
|
-
# isort: on
|
|
22
|
+
from pydantic.datetime_parse import parse_date as parse_date # type: ignore[no-redef]
|
|
23
|
+
from pydantic.datetime_parse import parse_datetime as parse_datetime # type: ignore[no-redef]
|
|
24
|
+
from pydantic.fields import ModelField as ModelField # type: ignore[attr-defined, no-redef]
|
|
25
|
+
from pydantic.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore[no-redef]
|
|
26
|
+
from pydantic.typing import get_args as get_args # type: ignore[no-redef]
|
|
27
|
+
from pydantic.typing import get_origin as get_origin # type: ignore[no-redef]
|
|
28
|
+
from pydantic.typing import is_literal_type as is_literal_type # type: ignore[no-redef]
|
|
29
|
+
from pydantic.typing import is_union as is_union # type: ignore[no-redef]
|
|
53
30
|
|
|
31
|
+
from .datetime_utils import serialize_datetime
|
|
32
|
+
from .serialization import convert_and_respect_annotation_metadata
|
|
33
|
+
from typing_extensions import TypeAlias
|
|
54
34
|
|
|
55
|
-
T =
|
|
56
|
-
Model =
|
|
35
|
+
T = TypeVar("T")
|
|
36
|
+
Model = TypeVar("Model", bound=pydantic.BaseModel)
|
|
57
37
|
|
|
58
38
|
|
|
59
|
-
def parse_obj_as(type_:
|
|
60
|
-
dealiased_object = convert_and_respect_annotation_metadata(
|
|
61
|
-
object_=object_, annotation=type_, direction="read"
|
|
62
|
-
)
|
|
39
|
+
def parse_obj_as(type_: Type[T], object_: Any) -> T:
|
|
40
|
+
dealiased_object = convert_and_respect_annotation_metadata(object_=object_, annotation=type_, direction="read")
|
|
63
41
|
if IS_PYDANTIC_V2:
|
|
64
|
-
adapter = pydantic.TypeAdapter(type_) # type: ignore
|
|
42
|
+
adapter = pydantic.TypeAdapter(type_) # type: ignore[attr-defined]
|
|
65
43
|
return adapter.validate_python(dealiased_object)
|
|
66
|
-
|
|
67
|
-
return pydantic.parse_obj_as(type_, dealiased_object)
|
|
44
|
+
return pydantic.parse_obj_as(type_, dealiased_object)
|
|
68
45
|
|
|
69
46
|
|
|
70
|
-
def to_jsonable_with_fallback(
|
|
71
|
-
obj: typing.Any, fallback_serializer: typing.Callable[[typing.Any], typing.Any]
|
|
72
|
-
) -> typing.Any:
|
|
47
|
+
def to_jsonable_with_fallback(obj: Any, fallback_serializer: Callable[[Any], Any]) -> Any:
|
|
73
48
|
if IS_PYDANTIC_V2:
|
|
74
49
|
from pydantic_core import to_jsonable_python
|
|
75
50
|
|
|
76
51
|
return to_jsonable_python(obj, fallback=fallback_serializer)
|
|
77
|
-
|
|
78
|
-
return fallback_serializer(obj)
|
|
52
|
+
return fallback_serializer(obj)
|
|
79
53
|
|
|
80
54
|
|
|
81
55
|
class UniversalBaseModel(pydantic.BaseModel):
|
|
82
56
|
if IS_PYDANTIC_V2:
|
|
83
|
-
model_config:
|
|
57
|
+
model_config: ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( # type: ignore[typeddict-unknown-key]
|
|
84
58
|
# Allow fields beginning with `model_` to be used in the model
|
|
85
59
|
protected_namespaces=(),
|
|
86
|
-
)
|
|
87
|
-
|
|
88
|
-
@pydantic.model_serializer(mode="
|
|
89
|
-
def serialize_model(
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
serialized = handler(self)
|
|
93
|
-
data = {
|
|
94
|
-
k: serialize_datetime(v) if isinstance(v, dt.datetime) else v
|
|
95
|
-
for k, v in serialized.items()
|
|
96
|
-
}
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
@pydantic.model_serializer(mode="plain", when_used="json") # type: ignore[attr-defined]
|
|
63
|
+
def serialize_model(self) -> Any: # type: ignore[name-defined]
|
|
64
|
+
serialized = self.model_dump()
|
|
65
|
+
data = {k: serialize_datetime(v) if isinstance(v, dt.datetime) else v for k, v in serialized.items()}
|
|
97
66
|
return data
|
|
98
67
|
|
|
99
68
|
else:
|
|
@@ -103,42 +72,28 @@ class UniversalBaseModel(pydantic.BaseModel):
|
|
|
103
72
|
json_encoders = {dt.datetime: serialize_datetime}
|
|
104
73
|
|
|
105
74
|
@classmethod
|
|
106
|
-
def model_construct(
|
|
107
|
-
cls
|
|
108
|
-
_fields_set: typing.Optional[typing.Set[str]] = None,
|
|
109
|
-
**values: typing.Any,
|
|
110
|
-
) -> "Model":
|
|
111
|
-
dealiased_object = convert_and_respect_annotation_metadata(
|
|
112
|
-
object_=values, annotation=cls, direction="read"
|
|
113
|
-
)
|
|
75
|
+
def model_construct(cls: Type["Model"], _fields_set: Optional[Set[str]] = None, **values: Any) -> "Model":
|
|
76
|
+
dealiased_object = convert_and_respect_annotation_metadata(object_=values, annotation=cls, direction="read")
|
|
114
77
|
return cls.construct(_fields_set, **dealiased_object)
|
|
115
78
|
|
|
116
79
|
@classmethod
|
|
117
|
-
def construct(
|
|
118
|
-
cls
|
|
119
|
-
_fields_set: typing.Optional[typing.Set[str]] = None,
|
|
120
|
-
**values: typing.Any,
|
|
121
|
-
) -> "Model":
|
|
122
|
-
dealiased_object = convert_and_respect_annotation_metadata(
|
|
123
|
-
object_=values, annotation=cls, direction="read"
|
|
124
|
-
)
|
|
80
|
+
def construct(cls: Type["Model"], _fields_set: Optional[Set[str]] = None, **values: Any) -> "Model":
|
|
81
|
+
dealiased_object = convert_and_respect_annotation_metadata(object_=values, annotation=cls, direction="read")
|
|
125
82
|
if IS_PYDANTIC_V2:
|
|
126
|
-
return super().model_construct(_fields_set, **dealiased_object) # type: ignore
|
|
127
|
-
|
|
128
|
-
return super().construct(_fields_set, **dealiased_object)
|
|
83
|
+
return super().model_construct(_fields_set, **dealiased_object) # type: ignore[misc]
|
|
84
|
+
return super().construct(_fields_set, **dealiased_object)
|
|
129
85
|
|
|
130
|
-
def json(self, **kwargs:
|
|
131
|
-
kwargs_with_defaults
|
|
86
|
+
def json(self, **kwargs: Any) -> str:
|
|
87
|
+
kwargs_with_defaults = {
|
|
132
88
|
"by_alias": True,
|
|
133
89
|
"exclude_unset": True,
|
|
134
90
|
**kwargs,
|
|
135
91
|
}
|
|
136
92
|
if IS_PYDANTIC_V2:
|
|
137
|
-
return super().model_dump_json(**kwargs_with_defaults) # type: ignore
|
|
138
|
-
|
|
139
|
-
return super().json(**kwargs_with_defaults)
|
|
93
|
+
return super().model_dump_json(**kwargs_with_defaults) # type: ignore[misc]
|
|
94
|
+
return super().json(**kwargs_with_defaults)
|
|
140
95
|
|
|
141
|
-
def dict(self, **kwargs:
|
|
96
|
+
def dict(self, **kwargs: Any) -> Dict[str, Any]:
|
|
142
97
|
"""
|
|
143
98
|
Override the default dict method to `exclude_unset` by default. This function patches
|
|
144
99
|
`exclude_unset` to work include fields within non-None default values.
|
|
@@ -149,21 +104,21 @@ class UniversalBaseModel(pydantic.BaseModel):
|
|
|
149
104
|
# We'd ideally do the same for Pydantic V2, but it shells out to a library to serialize models
|
|
150
105
|
# that we have less control over, and this is less intrusive than custom serializers for now.
|
|
151
106
|
if IS_PYDANTIC_V2:
|
|
152
|
-
kwargs_with_defaults_exclude_unset
|
|
107
|
+
kwargs_with_defaults_exclude_unset = {
|
|
153
108
|
**kwargs,
|
|
154
109
|
"by_alias": True,
|
|
155
110
|
"exclude_unset": True,
|
|
156
111
|
"exclude_none": False,
|
|
157
112
|
}
|
|
158
|
-
kwargs_with_defaults_exclude_none
|
|
113
|
+
kwargs_with_defaults_exclude_none = {
|
|
159
114
|
**kwargs,
|
|
160
115
|
"by_alias": True,
|
|
161
116
|
"exclude_none": True,
|
|
162
117
|
"exclude_unset": False,
|
|
163
118
|
}
|
|
164
119
|
dict_dump = deep_union_pydantic_dicts(
|
|
165
|
-
super().model_dump(**kwargs_with_defaults_exclude_unset), # type: ignore
|
|
166
|
-
super().model_dump(**kwargs_with_defaults_exclude_none), # type: ignore
|
|
120
|
+
super().model_dump(**kwargs_with_defaults_exclude_unset), # type: ignore[misc]
|
|
121
|
+
super().model_dump(**kwargs_with_defaults_exclude_none), # type: ignore[misc]
|
|
167
122
|
)
|
|
168
123
|
|
|
169
124
|
else:
|
|
@@ -177,50 +132,38 @@ class UniversalBaseModel(pydantic.BaseModel):
|
|
|
177
132
|
# If the default values are non-null act like they've been set
|
|
178
133
|
# This effectively allows exclude_unset to work like exclude_none where
|
|
179
134
|
# the latter passes through intentionally set none values.
|
|
180
|
-
if default is not None or (
|
|
181
|
-
"exclude_unset" in kwargs and not kwargs["exclude_unset"]
|
|
182
|
-
):
|
|
135
|
+
if default is not None or ("exclude_unset" in kwargs and not kwargs["exclude_unset"]):
|
|
183
136
|
_fields_set.add(name)
|
|
184
137
|
|
|
185
138
|
if default is not None:
|
|
186
139
|
self.__fields_set__.add(name)
|
|
187
140
|
|
|
188
|
-
kwargs_with_defaults_exclude_unset_include_fields
|
|
141
|
+
kwargs_with_defaults_exclude_unset_include_fields = {
|
|
189
142
|
"by_alias": True,
|
|
190
143
|
"exclude_unset": True,
|
|
191
144
|
"include": _fields_set,
|
|
192
145
|
**kwargs,
|
|
193
146
|
}
|
|
194
147
|
|
|
195
|
-
dict_dump = super().dict(
|
|
196
|
-
**kwargs_with_defaults_exclude_unset_include_fields
|
|
197
|
-
)
|
|
148
|
+
dict_dump = super().dict(**kwargs_with_defaults_exclude_unset_include_fields)
|
|
198
149
|
|
|
199
|
-
return convert_and_respect_annotation_metadata(
|
|
200
|
-
object_=dict_dump, annotation=self.__class__, direction="write"
|
|
201
|
-
)
|
|
150
|
+
return convert_and_respect_annotation_metadata(object_=dict_dump, annotation=self.__class__, direction="write")
|
|
202
151
|
|
|
203
152
|
|
|
204
|
-
def _union_list_of_pydantic_dicts(
|
|
205
|
-
|
|
206
|
-
) -> typing.List[typing.Any]:
|
|
207
|
-
converted_list: typing.List[typing.Any] = []
|
|
153
|
+
def _union_list_of_pydantic_dicts(source: List[Any], destination: List[Any]) -> List[Any]:
|
|
154
|
+
converted_list: List[Any] = []
|
|
208
155
|
for i, item in enumerate(source):
|
|
209
|
-
destination_value = destination[i]
|
|
156
|
+
destination_value = destination[i]
|
|
210
157
|
if isinstance(item, dict):
|
|
211
158
|
converted_list.append(deep_union_pydantic_dicts(item, destination_value))
|
|
212
159
|
elif isinstance(item, list):
|
|
213
|
-
converted_list.append(
|
|
214
|
-
_union_list_of_pydantic_dicts(item, destination_value)
|
|
215
|
-
)
|
|
160
|
+
converted_list.append(_union_list_of_pydantic_dicts(item, destination_value))
|
|
216
161
|
else:
|
|
217
162
|
converted_list.append(item)
|
|
218
163
|
return converted_list
|
|
219
164
|
|
|
220
165
|
|
|
221
|
-
def deep_union_pydantic_dicts(
|
|
222
|
-
source: typing.Dict[str, typing.Any], destination: typing.Dict[str, typing.Any]
|
|
223
|
-
) -> typing.Dict[str, typing.Any]:
|
|
166
|
+
def deep_union_pydantic_dicts(source: Dict[str, Any], destination: Dict[str, Any]) -> Dict[str, Any]:
|
|
224
167
|
for key, value in source.items():
|
|
225
168
|
node = destination.setdefault(key, {})
|
|
226
169
|
if isinstance(value, dict):
|
|
@@ -238,18 +181,16 @@ def deep_union_pydantic_dicts(
|
|
|
238
181
|
|
|
239
182
|
if IS_PYDANTIC_V2:
|
|
240
183
|
|
|
241
|
-
class V2RootModel(UniversalBaseModel, pydantic.RootModel): # type: ignore
|
|
184
|
+
class V2RootModel(UniversalBaseModel, pydantic.RootModel): # type: ignore[misc, name-defined, type-arg]
|
|
242
185
|
pass
|
|
243
186
|
|
|
244
|
-
UniversalRootModel:
|
|
187
|
+
UniversalRootModel: TypeAlias = V2RootModel # type: ignore[misc]
|
|
245
188
|
else:
|
|
246
|
-
UniversalRootModel:
|
|
189
|
+
UniversalRootModel: TypeAlias = UniversalBaseModel # type: ignore[misc, no-redef]
|
|
247
190
|
|
|
248
191
|
|
|
249
|
-
def encode_by_type(o:
|
|
250
|
-
encoders_by_class_tuples:
|
|
251
|
-
typing.Callable[[typing.Any], typing.Any], typing.Tuple[typing.Any, ...]
|
|
252
|
-
] = defaultdict(tuple)
|
|
192
|
+
def encode_by_type(o: Any) -> Any:
|
|
193
|
+
encoders_by_class_tuples: Dict[Callable[[Any], Any], Tuple[Any, ...]] = defaultdict(tuple)
|
|
253
194
|
for type_, encoder in encoders_by_type.items():
|
|
254
195
|
encoders_by_class_tuples[encoder] += (type_,)
|
|
255
196
|
|
|
@@ -260,58 +201,49 @@ def encode_by_type(o: typing.Any) -> typing.Any:
|
|
|
260
201
|
return encoder(o)
|
|
261
202
|
|
|
262
203
|
|
|
263
|
-
def update_forward_refs(model:
|
|
204
|
+
def update_forward_refs(model: Type["Model"], **localns: Any) -> None:
|
|
264
205
|
if IS_PYDANTIC_V2:
|
|
265
|
-
model.model_rebuild(raise_errors=False) # type: ignore
|
|
206
|
+
model.model_rebuild(raise_errors=False) # type: ignore[attr-defined]
|
|
266
207
|
else:
|
|
267
208
|
model.update_forward_refs(**localns)
|
|
268
209
|
|
|
269
210
|
|
|
270
211
|
# Mirrors Pydantic's internal typing
|
|
271
|
-
AnyCallable =
|
|
212
|
+
AnyCallable = Callable[..., Any]
|
|
272
213
|
|
|
273
214
|
|
|
274
215
|
def universal_root_validator(
|
|
275
216
|
pre: bool = False,
|
|
276
|
-
) ->
|
|
217
|
+
) -> Callable[[AnyCallable], AnyCallable]:
|
|
277
218
|
def decorator(func: AnyCallable) -> AnyCallable:
|
|
278
219
|
if IS_PYDANTIC_V2:
|
|
279
|
-
return pydantic.model_validator(mode="before" if pre else "after")(func) # type: ignore
|
|
280
|
-
|
|
281
|
-
return pydantic.root_validator(pre=pre)(func) # type: ignore # Pydantic v1
|
|
220
|
+
return cast(AnyCallable, pydantic.model_validator(mode="before" if pre else "after")(func)) # type: ignore[attr-defined]
|
|
221
|
+
return cast(AnyCallable, pydantic.root_validator(pre=pre)(func)) # type: ignore[call-overload]
|
|
282
222
|
|
|
283
223
|
return decorator
|
|
284
224
|
|
|
285
225
|
|
|
286
|
-
def universal_field_validator(
|
|
287
|
-
field_name: str, pre: bool = False
|
|
288
|
-
) -> typing.Callable[[AnyCallable], AnyCallable]:
|
|
226
|
+
def universal_field_validator(field_name: str, pre: bool = False) -> Callable[[AnyCallable], AnyCallable]:
|
|
289
227
|
def decorator(func: AnyCallable) -> AnyCallable:
|
|
290
228
|
if IS_PYDANTIC_V2:
|
|
291
|
-
return pydantic.field_validator(
|
|
292
|
-
|
|
293
|
-
)(func) # type: ignore # Pydantic v2
|
|
294
|
-
else:
|
|
295
|
-
return pydantic.validator(field_name, pre=pre)(func) # type: ignore # Pydantic v1
|
|
229
|
+
return cast(AnyCallable, pydantic.field_validator(field_name, mode="before" if pre else "after")(func)) # type: ignore[attr-defined]
|
|
230
|
+
return cast(AnyCallable, pydantic.validator(field_name, pre=pre)(func))
|
|
296
231
|
|
|
297
232
|
return decorator
|
|
298
233
|
|
|
299
234
|
|
|
300
|
-
PydanticField =
|
|
235
|
+
PydanticField = Union[ModelField, pydantic.fields.FieldInfo]
|
|
301
236
|
|
|
302
237
|
|
|
303
|
-
def _get_model_fields(
|
|
304
|
-
model: typing.Type["Model"],
|
|
305
|
-
) -> typing.Mapping[str, PydanticField]:
|
|
238
|
+
def _get_model_fields(model: Type["Model"]) -> Mapping[str, PydanticField]:
|
|
306
239
|
if IS_PYDANTIC_V2:
|
|
307
|
-
return model.model_fields # type: ignore
|
|
308
|
-
|
|
309
|
-
return model.__fields__ # type: ignore # Pydantic v1
|
|
240
|
+
return cast(Mapping[str, PydanticField], model.model_fields) # type: ignore[attr-defined]
|
|
241
|
+
return cast(Mapping[str, PydanticField], model.__fields__)
|
|
310
242
|
|
|
311
243
|
|
|
312
|
-
def _get_field_default(field: PydanticField) ->
|
|
244
|
+
def _get_field_default(field: PydanticField) -> Any:
|
|
313
245
|
try:
|
|
314
|
-
value = field.get_default() # type: ignore
|
|
246
|
+
value = field.get_default() # type: ignore[union-attr]
|
|
315
247
|
except:
|
|
316
248
|
value = field.default
|
|
317
249
|
if IS_PYDANTIC_V2:
|
|
@@ -6,9 +6,7 @@ import pydantic
|
|
|
6
6
|
|
|
7
7
|
|
|
8
8
|
# Flattens dicts to be of the form {"key[subkey][subkey2]": value} where value is not a dict
|
|
9
|
-
def traverse_query_dict(
|
|
10
|
-
dict_flat: Dict[str, Any], key_prefix: Optional[str] = None
|
|
11
|
-
) -> List[Tuple[str, Any]]:
|
|
9
|
+
def traverse_query_dict(dict_flat: Dict[str, Any], key_prefix: Optional[str] = None) -> List[Tuple[str, Any]]:
|
|
12
10
|
result = []
|
|
13
11
|
for k, v in dict_flat.items():
|
|
14
12
|
key = f"{key_prefix}[{k}]" if key_prefix is not None else k
|
|
@@ -4,9 +4,8 @@ import collections
|
|
|
4
4
|
import inspect
|
|
5
5
|
import typing
|
|
6
6
|
|
|
7
|
-
import typing_extensions
|
|
8
|
-
|
|
9
7
|
import pydantic
|
|
8
|
+
import typing_extensions
|
|
10
9
|
|
|
11
10
|
|
|
12
11
|
class FieldMetadata:
|
|
@@ -68,9 +67,7 @@ def convert_and_respect_annotation_metadata(
|
|
|
68
67
|
):
|
|
69
68
|
return _convert_mapping(object_, clean_type, direction)
|
|
70
69
|
# TypedDicts
|
|
71
|
-
if typing_extensions.is_typeddict(clean_type) and isinstance(
|
|
72
|
-
object_, typing.Mapping
|
|
73
|
-
):
|
|
70
|
+
if typing_extensions.is_typeddict(clean_type) and isinstance(object_, typing.Mapping):
|
|
74
71
|
return _convert_mapping(object_, clean_type, direction)
|
|
75
72
|
|
|
76
73
|
if (
|
|
@@ -163,7 +160,12 @@ def _convert_mapping(
|
|
|
163
160
|
direction: typing.Literal["read", "write"],
|
|
164
161
|
) -> typing.Mapping[str, object]:
|
|
165
162
|
converted_object: typing.Dict[str, object] = {}
|
|
166
|
-
|
|
163
|
+
try:
|
|
164
|
+
annotations = typing_extensions.get_type_hints(expected_type, include_extras=True)
|
|
165
|
+
except NameError:
|
|
166
|
+
# The TypedDict contains a circular reference, so
|
|
167
|
+
# we use the __annotations__ attribute directly.
|
|
168
|
+
annotations = getattr(expected_type, "__annotations__", {})
|
|
167
169
|
aliases_to_field_names = _get_alias_to_field_name(annotations)
|
|
168
170
|
for key, value in object_.items():
|
|
169
171
|
if direction == "read" and key in aliases_to_field_names:
|
|
@@ -183,10 +185,8 @@ def _convert_mapping(
|
|
|
183
185
|
object_=value, annotation=type_, direction=direction
|
|
184
186
|
)
|
|
185
187
|
else:
|
|
186
|
-
converted_object[
|
|
187
|
-
|
|
188
|
-
] = convert_and_respect_annotation_metadata(
|
|
189
|
-
object_=value, annotation=type_, direction=direction
|
|
188
|
+
converted_object[_alias_key(key, type_, direction, aliases_to_field_names)] = (
|
|
189
|
+
convert_and_respect_annotation_metadata(object_=value, annotation=type_, direction=direction)
|
|
190
190
|
)
|
|
191
191
|
return converted_object
|
|
192
192
|
|
|
@@ -1,15 +1,7 @@
|
|
|
1
1
|
# This file was auto-generated by Fern from our API Definition.
|
|
2
2
|
|
|
3
|
-
|
|
4
|
-
GetResultResponse,
|
|
5
|
-
GetResultResponseEmbedded,
|
|
6
|
-
GetResultResponseLinks,
|
|
7
|
-
HealthCheckResponse,
|
|
8
|
-
)
|
|
3
|
+
# isort: skip_file
|
|
9
4
|
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
"GetResultResponseLinks",
|
|
14
|
-
"HealthCheckResponse",
|
|
15
|
-
]
|
|
5
|
+
from .types import GetResultResponse, GetResultResponseEmbedded, GetResultResponseLinks
|
|
6
|
+
|
|
7
|
+
__all__ = ["GetResultResponse", "GetResultResponseEmbedded", "GetResultResponseLinks"]
|