@planqk/planqk-service-sdk 2.6.2 → 2.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/sdk/Client.d.ts +2 -2
- package/dist/sdk/Client.js +1 -1
- package/dist/sdk/api/resources/serviceApi/client/Client.d.ts +4 -4
- package/dist/sdk/api/resources/serviceApi/client/Client.js +6 -2
- package/dist/sdk/api/types/LogEntry.d.ts +8 -0
- package/dist/sdk/api/types/LogEntry.js +5 -0
- package/dist/sdk/api/types/index.d.ts +1 -0
- package/dist/sdk/api/types/index.js +1 -0
- package/fern/fern.config.json +1 -1
- package/fern/generators.yml +6 -4
- package/fern/openapi/openapi.yml +104 -80
- package/package.json +1 -1
- package/planqk/service/_version.py +1 -1
- package/planqk/service/client.py +3 -3
- package/planqk/service/sdk/__init__.py +58 -8
- package/planqk/service/sdk/client.py +27 -7
- package/planqk/service/sdk/core/__init__.py +80 -20
- package/planqk/service/sdk/core/client_wrapper.py +8 -6
- package/planqk/service/sdk/core/force_multipart.py +4 -2
- package/planqk/service/sdk/core/http_response.py +1 -1
- package/planqk/service/sdk/core/http_sse/__init__.py +42 -0
- package/planqk/service/sdk/core/http_sse/_api.py +112 -0
- package/planqk/service/sdk/core/http_sse/_decoders.py +61 -0
- package/planqk/service/sdk/core/http_sse/_exceptions.py +7 -0
- package/planqk/service/sdk/core/http_sse/_models.py +17 -0
- package/planqk/service/sdk/core/pydantic_utilities.py +5 -2
- package/planqk/service/sdk/core/unchecked_base_model.py +341 -0
- package/planqk/service/sdk/service_api/__init__.py +0 -3
- package/planqk/service/sdk/service_api/client.py +17 -21
- package/planqk/service/sdk/service_api/raw_client.py +36 -40
- package/planqk/service/sdk/types/__init__.py +54 -4
- package/planqk/service/sdk/types/hal_link.py +3 -2
- package/planqk/service/sdk/types/log_entry.py +23 -0
- package/planqk/service/sdk/types/request_body.py +5 -0
- package/planqk/service/sdk/types/result_response.py +27 -0
- package/planqk/service/sdk/{service_api/types/get_result_response_embedded.py → types/result_response_embedded.py} +4 -3
- package/planqk/service/sdk/{service_api/types/get_result_response_links.py → types/result_response_links.py} +4 -3
- package/planqk/service/sdk/types/service_execution.py +3 -2
- package/pyproject.toml +1 -1
- package/requirements-dev.txt +59 -43
- package/requirements.txt +6 -6
- package/src/sdk/Client.ts +2 -2
- package/src/sdk/api/resources/serviceApi/client/Client.ts +15 -7
- package/src/sdk/api/types/LogEntry.ts +9 -0
- package/src/sdk/api/types/index.ts +1 -0
- package/uv.lock +77 -47
- package/planqk/service/sdk/service_api/types/__init__.py +0 -9
- package/planqk/service/sdk/service_api/types/get_result_response.py +0 -26
|
@@ -10,7 +10,7 @@ class BaseClientWrapper:
|
|
|
10
10
|
def __init__(
|
|
11
11
|
self,
|
|
12
12
|
*,
|
|
13
|
-
token: typing.Union[str, typing.Callable[[], str]],
|
|
13
|
+
token: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None,
|
|
14
14
|
headers: typing.Optional[typing.Dict[str, str]] = None,
|
|
15
15
|
base_url: str,
|
|
16
16
|
timeout: typing.Optional[float] = None,
|
|
@@ -25,11 +25,13 @@ class BaseClientWrapper:
|
|
|
25
25
|
"X-Fern-Language": "Python",
|
|
26
26
|
**(self.get_custom_headers() or {}),
|
|
27
27
|
}
|
|
28
|
-
|
|
28
|
+
token = self._get_token()
|
|
29
|
+
if token is not None:
|
|
30
|
+
headers["Authorization"] = f"Bearer {token}"
|
|
29
31
|
return headers
|
|
30
32
|
|
|
31
|
-
def _get_token(self) -> str:
|
|
32
|
-
if isinstance(self._token, str):
|
|
33
|
+
def _get_token(self) -> typing.Optional[str]:
|
|
34
|
+
if isinstance(self._token, str) or self._token is None:
|
|
33
35
|
return self._token
|
|
34
36
|
else:
|
|
35
37
|
return self._token()
|
|
@@ -48,7 +50,7 @@ class SyncClientWrapper(BaseClientWrapper):
|
|
|
48
50
|
def __init__(
|
|
49
51
|
self,
|
|
50
52
|
*,
|
|
51
|
-
token: typing.Union[str, typing.Callable[[], str]],
|
|
53
|
+
token: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None,
|
|
52
54
|
headers: typing.Optional[typing.Dict[str, str]] = None,
|
|
53
55
|
base_url: str,
|
|
54
56
|
timeout: typing.Optional[float] = None,
|
|
@@ -67,7 +69,7 @@ class AsyncClientWrapper(BaseClientWrapper):
|
|
|
67
69
|
def __init__(
|
|
68
70
|
self,
|
|
69
71
|
*,
|
|
70
|
-
token: typing.Union[str, typing.Callable[[], str]],
|
|
72
|
+
token: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None,
|
|
71
73
|
headers: typing.Optional[typing.Dict[str, str]] = None,
|
|
72
74
|
base_url: str,
|
|
73
75
|
timeout: typing.Optional[float] = None,
|
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
# This file was auto-generated by Fern from our API Definition.
|
|
2
2
|
|
|
3
|
+
from typing import Any, Dict
|
|
3
4
|
|
|
4
|
-
|
|
5
|
+
|
|
6
|
+
class ForceMultipartDict(Dict[str, Any]):
|
|
5
7
|
"""
|
|
6
8
|
A dictionary subclass that always evaluates to True in boolean contexts.
|
|
7
9
|
|
|
@@ -9,7 +11,7 @@ class ForceMultipartDict(dict):
|
|
|
9
11
|
the dictionary is empty, which would normally evaluate to False.
|
|
10
12
|
"""
|
|
11
13
|
|
|
12
|
-
def __bool__(self):
|
|
14
|
+
def __bool__(self) -> bool:
|
|
13
15
|
return True
|
|
14
16
|
|
|
15
17
|
|
|
@@ -4,8 +4,8 @@ from typing import Dict, Generic, TypeVar
|
|
|
4
4
|
|
|
5
5
|
import httpx
|
|
6
6
|
|
|
7
|
+
# Generic to represent the underlying type of the data wrapped by the HTTP response.
|
|
7
8
|
T = TypeVar("T")
|
|
8
|
-
"""Generic to represent the underlying type of the data wrapped by the HTTP response."""
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
class BaseHttpResponse:
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
# isort: skip_file
|
|
4
|
+
|
|
5
|
+
import typing
|
|
6
|
+
from importlib import import_module
|
|
7
|
+
|
|
8
|
+
if typing.TYPE_CHECKING:
|
|
9
|
+
from ._api import EventSource, aconnect_sse, connect_sse
|
|
10
|
+
from ._exceptions import SSEError
|
|
11
|
+
from ._models import ServerSentEvent
|
|
12
|
+
_dynamic_imports: typing.Dict[str, str] = {
|
|
13
|
+
"EventSource": "._api",
|
|
14
|
+
"SSEError": "._exceptions",
|
|
15
|
+
"ServerSentEvent": "._models",
|
|
16
|
+
"aconnect_sse": "._api",
|
|
17
|
+
"connect_sse": "._api",
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def __getattr__(attr_name: str) -> typing.Any:
|
|
22
|
+
module_name = _dynamic_imports.get(attr_name)
|
|
23
|
+
if module_name is None:
|
|
24
|
+
raise AttributeError(f"No {attr_name} found in _dynamic_imports for module name -> {__name__}")
|
|
25
|
+
try:
|
|
26
|
+
module = import_module(module_name, __package__)
|
|
27
|
+
if module_name == f".{attr_name}":
|
|
28
|
+
return module
|
|
29
|
+
else:
|
|
30
|
+
return getattr(module, attr_name)
|
|
31
|
+
except ImportError as e:
|
|
32
|
+
raise ImportError(f"Failed to import {attr_name} from {module_name}: {e}") from e
|
|
33
|
+
except AttributeError as e:
|
|
34
|
+
raise AttributeError(f"Failed to get {attr_name} from {module_name}: {e}") from e
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def __dir__():
|
|
38
|
+
lazy_attrs = list(_dynamic_imports.keys())
|
|
39
|
+
return sorted(lazy_attrs)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
__all__ = ["EventSource", "SSEError", "ServerSentEvent", "aconnect_sse", "connect_sse"]
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import re
|
|
4
|
+
from contextlib import asynccontextmanager, contextmanager
|
|
5
|
+
from typing import Any, AsyncGenerator, AsyncIterator, Iterator, cast
|
|
6
|
+
|
|
7
|
+
import httpx
|
|
8
|
+
from ._decoders import SSEDecoder
|
|
9
|
+
from ._exceptions import SSEError
|
|
10
|
+
from ._models import ServerSentEvent
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class EventSource:
|
|
14
|
+
def __init__(self, response: httpx.Response) -> None:
|
|
15
|
+
self._response = response
|
|
16
|
+
|
|
17
|
+
def _check_content_type(self) -> None:
|
|
18
|
+
content_type = self._response.headers.get("content-type", "").partition(";")[0]
|
|
19
|
+
if "text/event-stream" not in content_type:
|
|
20
|
+
raise SSEError(
|
|
21
|
+
f"Expected response header Content-Type to contain 'text/event-stream', got {content_type!r}"
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
def _get_charset(self) -> str:
|
|
25
|
+
"""Extract charset from Content-Type header, fallback to UTF-8."""
|
|
26
|
+
content_type = self._response.headers.get("content-type", "")
|
|
27
|
+
|
|
28
|
+
# Parse charset parameter using regex
|
|
29
|
+
charset_match = re.search(r"charset=([^;\s]+)", content_type, re.IGNORECASE)
|
|
30
|
+
if charset_match:
|
|
31
|
+
charset = charset_match.group(1).strip("\"'")
|
|
32
|
+
# Validate that it's a known encoding
|
|
33
|
+
try:
|
|
34
|
+
# Test if the charset is valid by trying to encode/decode
|
|
35
|
+
"test".encode(charset).decode(charset)
|
|
36
|
+
return charset
|
|
37
|
+
except (LookupError, UnicodeError):
|
|
38
|
+
# If charset is invalid, fall back to UTF-8
|
|
39
|
+
pass
|
|
40
|
+
|
|
41
|
+
# Default to UTF-8 if no charset specified or invalid charset
|
|
42
|
+
return "utf-8"
|
|
43
|
+
|
|
44
|
+
@property
|
|
45
|
+
def response(self) -> httpx.Response:
|
|
46
|
+
return self._response
|
|
47
|
+
|
|
48
|
+
def iter_sse(self) -> Iterator[ServerSentEvent]:
|
|
49
|
+
self._check_content_type()
|
|
50
|
+
decoder = SSEDecoder()
|
|
51
|
+
charset = self._get_charset()
|
|
52
|
+
|
|
53
|
+
buffer = ""
|
|
54
|
+
for chunk in self._response.iter_bytes():
|
|
55
|
+
# Decode chunk using detected charset
|
|
56
|
+
text_chunk = chunk.decode(charset, errors="replace")
|
|
57
|
+
buffer += text_chunk
|
|
58
|
+
|
|
59
|
+
# Process complete lines
|
|
60
|
+
while "\n" in buffer:
|
|
61
|
+
line, buffer = buffer.split("\n", 1)
|
|
62
|
+
line = line.rstrip("\r")
|
|
63
|
+
sse = decoder.decode(line)
|
|
64
|
+
# when we reach a "\n\n" => line = ''
|
|
65
|
+
# => decoder will attempt to return an SSE Event
|
|
66
|
+
if sse is not None:
|
|
67
|
+
yield sse
|
|
68
|
+
|
|
69
|
+
# Process any remaining data in buffer
|
|
70
|
+
if buffer.strip():
|
|
71
|
+
line = buffer.rstrip("\r")
|
|
72
|
+
sse = decoder.decode(line)
|
|
73
|
+
if sse is not None:
|
|
74
|
+
yield sse
|
|
75
|
+
|
|
76
|
+
async def aiter_sse(self) -> AsyncGenerator[ServerSentEvent, None]:
|
|
77
|
+
self._check_content_type()
|
|
78
|
+
decoder = SSEDecoder()
|
|
79
|
+
lines = cast(AsyncGenerator[str, None], self._response.aiter_lines())
|
|
80
|
+
try:
|
|
81
|
+
async for line in lines:
|
|
82
|
+
line = line.rstrip("\n")
|
|
83
|
+
sse = decoder.decode(line)
|
|
84
|
+
if sse is not None:
|
|
85
|
+
yield sse
|
|
86
|
+
finally:
|
|
87
|
+
await lines.aclose()
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
@contextmanager
|
|
91
|
+
def connect_sse(client: httpx.Client, method: str, url: str, **kwargs: Any) -> Iterator[EventSource]:
|
|
92
|
+
headers = kwargs.pop("headers", {})
|
|
93
|
+
headers["Accept"] = "text/event-stream"
|
|
94
|
+
headers["Cache-Control"] = "no-store"
|
|
95
|
+
|
|
96
|
+
with client.stream(method, url, headers=headers, **kwargs) as response:
|
|
97
|
+
yield EventSource(response)
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
@asynccontextmanager
|
|
101
|
+
async def aconnect_sse(
|
|
102
|
+
client: httpx.AsyncClient,
|
|
103
|
+
method: str,
|
|
104
|
+
url: str,
|
|
105
|
+
**kwargs: Any,
|
|
106
|
+
) -> AsyncIterator[EventSource]:
|
|
107
|
+
headers = kwargs.pop("headers", {})
|
|
108
|
+
headers["Accept"] = "text/event-stream"
|
|
109
|
+
headers["Cache-Control"] = "no-store"
|
|
110
|
+
|
|
111
|
+
async with client.stream(method, url, headers=headers, **kwargs) as response:
|
|
112
|
+
yield EventSource(response)
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
from typing import List, Optional
|
|
4
|
+
|
|
5
|
+
from ._models import ServerSentEvent
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class SSEDecoder:
|
|
9
|
+
def __init__(self) -> None:
|
|
10
|
+
self._event = ""
|
|
11
|
+
self._data: List[str] = []
|
|
12
|
+
self._last_event_id = ""
|
|
13
|
+
self._retry: Optional[int] = None
|
|
14
|
+
|
|
15
|
+
def decode(self, line: str) -> Optional[ServerSentEvent]:
|
|
16
|
+
# See: https://html.spec.whatwg.org/multipage/server-sent-events.html#event-stream-interpretation # noqa: E501
|
|
17
|
+
|
|
18
|
+
if not line:
|
|
19
|
+
if not self._event and not self._data and not self._last_event_id and self._retry is None:
|
|
20
|
+
return None
|
|
21
|
+
|
|
22
|
+
sse = ServerSentEvent(
|
|
23
|
+
event=self._event,
|
|
24
|
+
data="\n".join(self._data),
|
|
25
|
+
id=self._last_event_id,
|
|
26
|
+
retry=self._retry,
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
# NOTE: as per the SSE spec, do not reset last_event_id.
|
|
30
|
+
self._event = ""
|
|
31
|
+
self._data = []
|
|
32
|
+
self._retry = None
|
|
33
|
+
|
|
34
|
+
return sse
|
|
35
|
+
|
|
36
|
+
if line.startswith(":"):
|
|
37
|
+
return None
|
|
38
|
+
|
|
39
|
+
fieldname, _, value = line.partition(":")
|
|
40
|
+
|
|
41
|
+
if value.startswith(" "):
|
|
42
|
+
value = value[1:]
|
|
43
|
+
|
|
44
|
+
if fieldname == "event":
|
|
45
|
+
self._event = value
|
|
46
|
+
elif fieldname == "data":
|
|
47
|
+
self._data.append(value)
|
|
48
|
+
elif fieldname == "id":
|
|
49
|
+
if "\0" in value:
|
|
50
|
+
pass
|
|
51
|
+
else:
|
|
52
|
+
self._last_event_id = value
|
|
53
|
+
elif fieldname == "retry":
|
|
54
|
+
try:
|
|
55
|
+
self._retry = int(value)
|
|
56
|
+
except (TypeError, ValueError):
|
|
57
|
+
pass
|
|
58
|
+
else:
|
|
59
|
+
pass # Field is ignored.
|
|
60
|
+
|
|
61
|
+
return None
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from typing import Any, Optional
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass(frozen=True)
|
|
9
|
+
class ServerSentEvent:
|
|
10
|
+
event: str = "message"
|
|
11
|
+
data: str = ""
|
|
12
|
+
id: str = ""
|
|
13
|
+
retry: Optional[int] = None
|
|
14
|
+
|
|
15
|
+
def json(self) -> Any:
|
|
16
|
+
"""Parse the data field as JSON."""
|
|
17
|
+
return json.loads(self.data)
|
|
@@ -61,7 +61,7 @@ class UniversalBaseModel(pydantic.BaseModel):
|
|
|
61
61
|
|
|
62
62
|
@pydantic.model_serializer(mode="plain", when_used="json") # type: ignore[attr-defined]
|
|
63
63
|
def serialize_model(self) -> Any: # type: ignore[name-defined]
|
|
64
|
-
serialized = self.
|
|
64
|
+
serialized = self.dict() # type: ignore[attr-defined]
|
|
65
65
|
data = {k: serialize_datetime(v) if isinstance(v, dt.datetime) else v for k, v in serialized.items()}
|
|
66
66
|
return data
|
|
67
67
|
|
|
@@ -147,7 +147,10 @@ class UniversalBaseModel(pydantic.BaseModel):
|
|
|
147
147
|
|
|
148
148
|
dict_dump = super().dict(**kwargs_with_defaults_exclude_unset_include_fields)
|
|
149
149
|
|
|
150
|
-
return
|
|
150
|
+
return cast(
|
|
151
|
+
Dict[str, Any],
|
|
152
|
+
convert_and_respect_annotation_metadata(object_=dict_dump, annotation=self.__class__, direction="write"),
|
|
153
|
+
)
|
|
151
154
|
|
|
152
155
|
|
|
153
156
|
def _union_list_of_pydantic_dicts(source: List[Any], destination: List[Any]) -> List[Any]:
|
|
@@ -0,0 +1,341 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import inspect
|
|
5
|
+
import typing
|
|
6
|
+
import uuid
|
|
7
|
+
|
|
8
|
+
import pydantic
|
|
9
|
+
import typing_extensions
|
|
10
|
+
from .pydantic_utilities import (
|
|
11
|
+
IS_PYDANTIC_V2,
|
|
12
|
+
ModelField,
|
|
13
|
+
UniversalBaseModel,
|
|
14
|
+
get_args,
|
|
15
|
+
get_origin,
|
|
16
|
+
is_literal_type,
|
|
17
|
+
is_union,
|
|
18
|
+
parse_date,
|
|
19
|
+
parse_datetime,
|
|
20
|
+
parse_obj_as,
|
|
21
|
+
)
|
|
22
|
+
from .serialization import get_field_to_alias_mapping
|
|
23
|
+
from pydantic_core import PydanticUndefined
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class UnionMetadata:
|
|
27
|
+
discriminant: str
|
|
28
|
+
|
|
29
|
+
def __init__(self, *, discriminant: str) -> None:
|
|
30
|
+
self.discriminant = discriminant
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
Model = typing.TypeVar("Model", bound=pydantic.BaseModel)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class UncheckedBaseModel(UniversalBaseModel):
|
|
37
|
+
if IS_PYDANTIC_V2:
|
|
38
|
+
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow") # type: ignore # Pydantic v2
|
|
39
|
+
else:
|
|
40
|
+
|
|
41
|
+
class Config:
|
|
42
|
+
extra = pydantic.Extra.allow
|
|
43
|
+
|
|
44
|
+
@classmethod
|
|
45
|
+
def model_construct(
|
|
46
|
+
cls: typing.Type["Model"],
|
|
47
|
+
_fields_set: typing.Optional[typing.Set[str]] = None,
|
|
48
|
+
**values: typing.Any,
|
|
49
|
+
) -> "Model":
|
|
50
|
+
# Fallback construct function to the specified override below.
|
|
51
|
+
return cls.construct(_fields_set=_fields_set, **values)
|
|
52
|
+
|
|
53
|
+
# Allow construct to not validate model
|
|
54
|
+
# Implementation taken from: https://github.com/pydantic/pydantic/issues/1168#issuecomment-817742836
|
|
55
|
+
@classmethod
|
|
56
|
+
def construct(
|
|
57
|
+
cls: typing.Type["Model"],
|
|
58
|
+
_fields_set: typing.Optional[typing.Set[str]] = None,
|
|
59
|
+
**values: typing.Any,
|
|
60
|
+
) -> "Model":
|
|
61
|
+
m = cls.__new__(cls)
|
|
62
|
+
fields_values = {}
|
|
63
|
+
|
|
64
|
+
if _fields_set is None:
|
|
65
|
+
_fields_set = set(values.keys())
|
|
66
|
+
|
|
67
|
+
fields = _get_model_fields(cls)
|
|
68
|
+
populate_by_name = _get_is_populate_by_name(cls)
|
|
69
|
+
field_aliases = get_field_to_alias_mapping(cls)
|
|
70
|
+
|
|
71
|
+
for name, field in fields.items():
|
|
72
|
+
# Key here is only used to pull data from the values dict
|
|
73
|
+
# you should always use the NAME of the field to for field_values, etc.
|
|
74
|
+
# because that's how the object is constructed from a pydantic perspective
|
|
75
|
+
key = field.alias
|
|
76
|
+
if (key is None or field.alias == name) and name in field_aliases:
|
|
77
|
+
key = field_aliases[name]
|
|
78
|
+
|
|
79
|
+
if key is None or (key not in values and populate_by_name): # Added this to allow population by field name
|
|
80
|
+
key = name
|
|
81
|
+
|
|
82
|
+
if key in values:
|
|
83
|
+
if IS_PYDANTIC_V2:
|
|
84
|
+
type_ = field.annotation # type: ignore # Pydantic v2
|
|
85
|
+
else:
|
|
86
|
+
type_ = typing.cast(typing.Type, field.outer_type_) # type: ignore # Pydantic < v1.10.15
|
|
87
|
+
|
|
88
|
+
fields_values[name] = (
|
|
89
|
+
construct_type(object_=values[key], type_=type_) if type_ is not None else values[key]
|
|
90
|
+
)
|
|
91
|
+
_fields_set.add(name)
|
|
92
|
+
else:
|
|
93
|
+
default = _get_field_default(field)
|
|
94
|
+
fields_values[name] = default
|
|
95
|
+
|
|
96
|
+
# If the default values are non-null act like they've been set
|
|
97
|
+
# This effectively allows exclude_unset to work like exclude_none where
|
|
98
|
+
# the latter passes through intentionally set none values.
|
|
99
|
+
if default != None and default != PydanticUndefined:
|
|
100
|
+
_fields_set.add(name)
|
|
101
|
+
|
|
102
|
+
# Add extras back in
|
|
103
|
+
extras = {}
|
|
104
|
+
pydantic_alias_fields = [field.alias for field in fields.values()]
|
|
105
|
+
internal_alias_fields = list(field_aliases.values())
|
|
106
|
+
for key, value in values.items():
|
|
107
|
+
# If the key is not a field by name, nor an alias to a field, then it's extra
|
|
108
|
+
if (key not in pydantic_alias_fields and key not in internal_alias_fields) and key not in fields:
|
|
109
|
+
if IS_PYDANTIC_V2:
|
|
110
|
+
extras[key] = value
|
|
111
|
+
else:
|
|
112
|
+
_fields_set.add(key)
|
|
113
|
+
fields_values[key] = value
|
|
114
|
+
|
|
115
|
+
object.__setattr__(m, "__dict__", fields_values)
|
|
116
|
+
|
|
117
|
+
if IS_PYDANTIC_V2:
|
|
118
|
+
object.__setattr__(m, "__pydantic_private__", None)
|
|
119
|
+
object.__setattr__(m, "__pydantic_extra__", extras)
|
|
120
|
+
object.__setattr__(m, "__pydantic_fields_set__", _fields_set)
|
|
121
|
+
else:
|
|
122
|
+
object.__setattr__(m, "__fields_set__", _fields_set)
|
|
123
|
+
m._init_private_attributes() # type: ignore # Pydantic v1
|
|
124
|
+
return m
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def _validate_collection_items_compatible(collection: typing.Any, target_type: typing.Type[typing.Any]) -> bool:
|
|
128
|
+
"""
|
|
129
|
+
Validate that all items in a collection are compatible with the target type.
|
|
130
|
+
|
|
131
|
+
Args:
|
|
132
|
+
collection: The collection to validate (list, set, or dict values)
|
|
133
|
+
target_type: The target type to validate against
|
|
134
|
+
|
|
135
|
+
Returns:
|
|
136
|
+
True if all items are compatible, False otherwise
|
|
137
|
+
"""
|
|
138
|
+
if inspect.isclass(target_type) and issubclass(target_type, pydantic.BaseModel):
|
|
139
|
+
for item in collection:
|
|
140
|
+
try:
|
|
141
|
+
# Try to validate the item against the target type
|
|
142
|
+
if isinstance(item, dict):
|
|
143
|
+
parse_obj_as(target_type, item)
|
|
144
|
+
else:
|
|
145
|
+
# If it's not a dict, it might already be the right type
|
|
146
|
+
if not isinstance(item, target_type):
|
|
147
|
+
return False
|
|
148
|
+
except Exception:
|
|
149
|
+
return False
|
|
150
|
+
return True
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def _convert_undiscriminated_union_type(union_type: typing.Type[typing.Any], object_: typing.Any) -> typing.Any:
|
|
154
|
+
inner_types = get_args(union_type)
|
|
155
|
+
if typing.Any in inner_types:
|
|
156
|
+
return object_
|
|
157
|
+
|
|
158
|
+
for inner_type in inner_types:
|
|
159
|
+
# Handle lists of objects that need parsing
|
|
160
|
+
if get_origin(inner_type) is list and isinstance(object_, list):
|
|
161
|
+
list_inner_type = get_args(inner_type)[0]
|
|
162
|
+
try:
|
|
163
|
+
if inspect.isclass(list_inner_type) and issubclass(list_inner_type, pydantic.BaseModel):
|
|
164
|
+
# Validate that all items in the list are compatible with the target type
|
|
165
|
+
if _validate_collection_items_compatible(object_, list_inner_type):
|
|
166
|
+
parsed_list = [parse_obj_as(object_=item, type_=list_inner_type) for item in object_]
|
|
167
|
+
return parsed_list
|
|
168
|
+
except Exception:
|
|
169
|
+
pass
|
|
170
|
+
|
|
171
|
+
try:
|
|
172
|
+
if inspect.isclass(inner_type) and issubclass(inner_type, pydantic.BaseModel):
|
|
173
|
+
# Attempt a validated parse until one works
|
|
174
|
+
return parse_obj_as(inner_type, object_)
|
|
175
|
+
except Exception:
|
|
176
|
+
continue
|
|
177
|
+
|
|
178
|
+
# If none of the types work, just return the first successful cast
|
|
179
|
+
for inner_type in inner_types:
|
|
180
|
+
try:
|
|
181
|
+
return construct_type(object_=object_, type_=inner_type)
|
|
182
|
+
except Exception:
|
|
183
|
+
continue
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
def _convert_union_type(type_: typing.Type[typing.Any], object_: typing.Any) -> typing.Any:
|
|
187
|
+
base_type = get_origin(type_) or type_
|
|
188
|
+
union_type = type_
|
|
189
|
+
if base_type == typing_extensions.Annotated: # type: ignore[comparison-overlap]
|
|
190
|
+
union_type = get_args(type_)[0]
|
|
191
|
+
annotated_metadata = get_args(type_)[1:]
|
|
192
|
+
for metadata in annotated_metadata:
|
|
193
|
+
if isinstance(metadata, UnionMetadata):
|
|
194
|
+
try:
|
|
195
|
+
# Cast to the correct type, based on the discriminant
|
|
196
|
+
for inner_type in get_args(union_type):
|
|
197
|
+
try:
|
|
198
|
+
objects_discriminant = getattr(object_, metadata.discriminant)
|
|
199
|
+
except:
|
|
200
|
+
objects_discriminant = object_[metadata.discriminant]
|
|
201
|
+
if inner_type.__fields__[metadata.discriminant].default == objects_discriminant:
|
|
202
|
+
return construct_type(object_=object_, type_=inner_type)
|
|
203
|
+
except Exception:
|
|
204
|
+
# Allow to fall through to our regular union handling
|
|
205
|
+
pass
|
|
206
|
+
return _convert_undiscriminated_union_type(union_type, object_)
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
def construct_type(*, type_: typing.Type[typing.Any], object_: typing.Any) -> typing.Any:
|
|
210
|
+
"""
|
|
211
|
+
Here we are essentially creating the same `construct` method in spirit as the above, but for all types, not just
|
|
212
|
+
Pydantic models.
|
|
213
|
+
The idea is to essentially attempt to coerce object_ to type_ (recursively)
|
|
214
|
+
"""
|
|
215
|
+
# Short circuit when dealing with optionals, don't try to coerces None to a type
|
|
216
|
+
if object_ is None:
|
|
217
|
+
return None
|
|
218
|
+
|
|
219
|
+
base_type = get_origin(type_) or type_
|
|
220
|
+
is_annotated = base_type == typing_extensions.Annotated # type: ignore[comparison-overlap]
|
|
221
|
+
maybe_annotation_members = get_args(type_)
|
|
222
|
+
is_annotated_union = is_annotated and is_union(get_origin(maybe_annotation_members[0]))
|
|
223
|
+
|
|
224
|
+
if base_type == typing.Any: # type: ignore[comparison-overlap]
|
|
225
|
+
return object_
|
|
226
|
+
|
|
227
|
+
if base_type == dict:
|
|
228
|
+
if not isinstance(object_, typing.Mapping):
|
|
229
|
+
return object_
|
|
230
|
+
|
|
231
|
+
key_type, items_type = get_args(type_)
|
|
232
|
+
d = {
|
|
233
|
+
construct_type(object_=key, type_=key_type): construct_type(object_=item, type_=items_type)
|
|
234
|
+
for key, item in object_.items()
|
|
235
|
+
}
|
|
236
|
+
return d
|
|
237
|
+
|
|
238
|
+
if base_type == list:
|
|
239
|
+
if not isinstance(object_, list):
|
|
240
|
+
return object_
|
|
241
|
+
|
|
242
|
+
inner_type = get_args(type_)[0]
|
|
243
|
+
return [construct_type(object_=entry, type_=inner_type) for entry in object_]
|
|
244
|
+
|
|
245
|
+
if base_type == set:
|
|
246
|
+
if not isinstance(object_, set) and not isinstance(object_, list):
|
|
247
|
+
return object_
|
|
248
|
+
|
|
249
|
+
inner_type = get_args(type_)[0]
|
|
250
|
+
return {construct_type(object_=entry, type_=inner_type) for entry in object_}
|
|
251
|
+
|
|
252
|
+
if is_union(base_type) or is_annotated_union:
|
|
253
|
+
return _convert_union_type(type_, object_)
|
|
254
|
+
|
|
255
|
+
# Cannot do an `issubclass` with a literal type, let's also just confirm we have a class before this call
|
|
256
|
+
if (
|
|
257
|
+
object_ is not None
|
|
258
|
+
and not is_literal_type(type_)
|
|
259
|
+
and (
|
|
260
|
+
(inspect.isclass(base_type) and issubclass(base_type, pydantic.BaseModel))
|
|
261
|
+
or (
|
|
262
|
+
is_annotated
|
|
263
|
+
and inspect.isclass(maybe_annotation_members[0])
|
|
264
|
+
and issubclass(maybe_annotation_members[0], pydantic.BaseModel)
|
|
265
|
+
)
|
|
266
|
+
)
|
|
267
|
+
):
|
|
268
|
+
if IS_PYDANTIC_V2:
|
|
269
|
+
return type_.model_construct(**object_)
|
|
270
|
+
else:
|
|
271
|
+
return type_.construct(**object_)
|
|
272
|
+
|
|
273
|
+
if base_type == dt.datetime:
|
|
274
|
+
try:
|
|
275
|
+
return parse_datetime(object_)
|
|
276
|
+
except Exception:
|
|
277
|
+
return object_
|
|
278
|
+
|
|
279
|
+
if base_type == dt.date:
|
|
280
|
+
try:
|
|
281
|
+
return parse_date(object_)
|
|
282
|
+
except Exception:
|
|
283
|
+
return object_
|
|
284
|
+
|
|
285
|
+
if base_type == uuid.UUID:
|
|
286
|
+
try:
|
|
287
|
+
return uuid.UUID(object_)
|
|
288
|
+
except Exception:
|
|
289
|
+
return object_
|
|
290
|
+
|
|
291
|
+
if base_type == int:
|
|
292
|
+
try:
|
|
293
|
+
return int(object_)
|
|
294
|
+
except Exception:
|
|
295
|
+
return object_
|
|
296
|
+
|
|
297
|
+
if base_type == bool:
|
|
298
|
+
try:
|
|
299
|
+
if isinstance(object_, str):
|
|
300
|
+
stringified_object = object_.lower()
|
|
301
|
+
return stringified_object == "true" or stringified_object == "1"
|
|
302
|
+
|
|
303
|
+
return bool(object_)
|
|
304
|
+
except Exception:
|
|
305
|
+
return object_
|
|
306
|
+
|
|
307
|
+
return object_
|
|
308
|
+
|
|
309
|
+
|
|
310
|
+
def _get_is_populate_by_name(model: typing.Type["Model"]) -> bool:
|
|
311
|
+
if IS_PYDANTIC_V2:
|
|
312
|
+
return model.model_config.get("populate_by_name", False) # type: ignore # Pydantic v2
|
|
313
|
+
return model.__config__.allow_population_by_field_name # type: ignore # Pydantic v1
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+
PydanticField = typing.Union[ModelField, pydantic.fields.FieldInfo]
|
|
317
|
+
|
|
318
|
+
|
|
319
|
+
# Pydantic V1 swapped the typing of __fields__'s values from ModelField to FieldInfo
|
|
320
|
+
# And so we try to handle both V1 cases, as well as V2 (FieldInfo from model.model_fields)
|
|
321
|
+
def _get_model_fields(
|
|
322
|
+
model: typing.Type["Model"],
|
|
323
|
+
) -> typing.Mapping[str, PydanticField]:
|
|
324
|
+
if IS_PYDANTIC_V2:
|
|
325
|
+
return model.model_fields # type: ignore # Pydantic v2
|
|
326
|
+
else:
|
|
327
|
+
return model.__fields__ # type: ignore # Pydantic v1
|
|
328
|
+
|
|
329
|
+
|
|
330
|
+
def _get_field_default(field: PydanticField) -> typing.Any:
|
|
331
|
+
try:
|
|
332
|
+
value = field.get_default() # type: ignore # Pydantic < v1.10.15
|
|
333
|
+
except:
|
|
334
|
+
value = field.default
|
|
335
|
+
if IS_PYDANTIC_V2:
|
|
336
|
+
from pydantic_core import PydanticUndefined
|
|
337
|
+
|
|
338
|
+
if value == PydanticUndefined:
|
|
339
|
+
return None
|
|
340
|
+
return value
|
|
341
|
+
return value
|