@planqk/planqk-service-sdk 2.2.1 → 2.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README-node.md +1 -0
- package/README-python.md +7 -7
- package/README.md +1 -0
- package/notebooks/python-sdk.ipynb +56 -114
- package/package.json +1 -1
- package/planqk/service/_version.py +1 -1
- package/planqk/service/client.py +13 -12
- package/planqk/service/datapool.py +24 -0
- package/planqk/service/sdk/__init__.py +4 -30
- package/planqk/service/sdk/client.py +20 -19
- package/planqk/service/sdk/core/__init__.py +5 -0
- package/planqk/service/sdk/core/api_error.py +12 -6
- package/planqk/service/sdk/core/client_wrapper.py +12 -4
- package/planqk/service/sdk/core/datetime_utils.py +1 -3
- package/planqk/service/sdk/core/file.py +2 -5
- package/planqk/service/sdk/core/force_multipart.py +16 -0
- package/planqk/service/sdk/core/http_client.py +86 -118
- package/planqk/service/sdk/core/http_response.py +55 -0
- package/planqk/service/sdk/core/jsonable_encoder.py +1 -4
- package/planqk/service/sdk/core/pydantic_utilities.py +79 -147
- package/planqk/service/sdk/core/query_encoder.py +1 -3
- package/planqk/service/sdk/core/serialization.py +10 -10
- package/planqk/service/sdk/environment.py +1 -1
- package/planqk/service/sdk/service_api/__init__.py +4 -12
- package/planqk/service/sdk/service_api/client.py +138 -860
- package/planqk/service/sdk/service_api/raw_client.py +606 -0
- package/planqk/service/sdk/service_api/types/__init__.py +3 -7
- package/planqk/service/sdk/service_api/types/get_result_response.py +7 -11
- package/planqk/service/sdk/service_api/types/get_result_response_embedded.py +4 -6
- package/planqk/service/sdk/service_api/types/get_result_response_links.py +4 -6
- package/planqk/service/sdk/types/__init__.py +3 -11
- package/planqk/service/sdk/types/hal_link.py +3 -5
- package/planqk/service/sdk/types/service_execution.py +8 -16
- package/planqk/service/sdk/types/service_execution_status.py +1 -2
- package/pyproject.toml +1 -1
- package/uv.lock +250 -256
- package/planqk/service/sdk/errors/__init__.py +0 -15
- package/planqk/service/sdk/errors/bad_request_error.py +0 -9
- package/planqk/service/sdk/errors/forbidden_error.py +0 -9
- package/planqk/service/sdk/errors/internal_server_error.py +0 -9
- package/planqk/service/sdk/errors/not_found_error.py +0 -9
- package/planqk/service/sdk/errors/unauthorized_error.py +0 -9
- package/planqk/service/sdk/service_api/types/health_check_response.py +0 -24
- package/planqk/service/sdk/types/input_data.py +0 -5
- package/planqk/service/sdk/types/input_data_ref.py +0 -27
- package/planqk/service/sdk/types/input_params.py +0 -5
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
# This file was auto-generated by Fern from our API Definition.
|
|
2
2
|
|
|
3
3
|
import typing
|
|
4
|
+
|
|
4
5
|
import httpx
|
|
5
|
-
from .http_client import HttpClient
|
|
6
|
-
from .http_client import AsyncHttpClient
|
|
6
|
+
from .http_client import AsyncHttpClient, HttpClient
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
class BaseClientWrapper:
|
|
@@ -11,16 +11,19 @@ class BaseClientWrapper:
|
|
|
11
11
|
self,
|
|
12
12
|
*,
|
|
13
13
|
token: typing.Union[str, typing.Callable[[], str]],
|
|
14
|
+
headers: typing.Optional[typing.Dict[str, str]] = None,
|
|
14
15
|
base_url: str,
|
|
15
16
|
timeout: typing.Optional[float] = None,
|
|
16
17
|
):
|
|
17
18
|
self._token = token
|
|
19
|
+
self._headers = headers
|
|
18
20
|
self._base_url = base_url
|
|
19
21
|
self._timeout = timeout
|
|
20
22
|
|
|
21
23
|
def get_headers(self) -> typing.Dict[str, str]:
|
|
22
24
|
headers: typing.Dict[str, str] = {
|
|
23
25
|
"X-Fern-Language": "Python",
|
|
26
|
+
**(self.get_custom_headers() or {}),
|
|
24
27
|
}
|
|
25
28
|
headers["Authorization"] = f"Bearer {self._get_token()}"
|
|
26
29
|
return headers
|
|
@@ -31,6 +34,9 @@ class BaseClientWrapper:
|
|
|
31
34
|
else:
|
|
32
35
|
return self._token()
|
|
33
36
|
|
|
37
|
+
def get_custom_headers(self) -> typing.Optional[typing.Dict[str, str]]:
|
|
38
|
+
return self._headers
|
|
39
|
+
|
|
34
40
|
def get_base_url(self) -> str:
|
|
35
41
|
return self._base_url
|
|
36
42
|
|
|
@@ -43,11 +49,12 @@ class SyncClientWrapper(BaseClientWrapper):
|
|
|
43
49
|
self,
|
|
44
50
|
*,
|
|
45
51
|
token: typing.Union[str, typing.Callable[[], str]],
|
|
52
|
+
headers: typing.Optional[typing.Dict[str, str]] = None,
|
|
46
53
|
base_url: str,
|
|
47
54
|
timeout: typing.Optional[float] = None,
|
|
48
55
|
httpx_client: httpx.Client,
|
|
49
56
|
):
|
|
50
|
-
super().__init__(token=token, base_url=base_url, timeout=timeout)
|
|
57
|
+
super().__init__(token=token, headers=headers, base_url=base_url, timeout=timeout)
|
|
51
58
|
self.httpx_client = HttpClient(
|
|
52
59
|
httpx_client=httpx_client,
|
|
53
60
|
base_headers=self.get_headers,
|
|
@@ -61,11 +68,12 @@ class AsyncClientWrapper(BaseClientWrapper):
|
|
|
61
68
|
self,
|
|
62
69
|
*,
|
|
63
70
|
token: typing.Union[str, typing.Callable[[], str]],
|
|
71
|
+
headers: typing.Optional[typing.Dict[str, str]] = None,
|
|
64
72
|
base_url: str,
|
|
65
73
|
timeout: typing.Optional[float] = None,
|
|
66
74
|
httpx_client: httpx.AsyncClient,
|
|
67
75
|
):
|
|
68
|
-
super().__init__(token=token, base_url=base_url, timeout=timeout)
|
|
76
|
+
super().__init__(token=token, headers=headers, base_url=base_url, timeout=timeout)
|
|
69
77
|
self.httpx_client = AsyncHttpClient(
|
|
70
78
|
httpx_client=httpx_client,
|
|
71
79
|
base_headers=self.get_headers,
|
|
@@ -13,9 +13,7 @@ def serialize_datetime(v: dt.datetime) -> str:
|
|
|
13
13
|
"""
|
|
14
14
|
|
|
15
15
|
def _serialize_zoned_datetime(v: dt.datetime) -> str:
|
|
16
|
-
if v.tzinfo is not None and v.tzinfo.tzname(None) == dt.timezone.utc.tzname(
|
|
17
|
-
None
|
|
18
|
-
):
|
|
16
|
+
if v.tzinfo is not None and v.tzinfo.tzname(None) == dt.timezone.utc.tzname(None):
|
|
19
17
|
# UTC is a special case where we use "Z" at the end instead of "+00:00"
|
|
20
18
|
return v.isoformat().replace("+00:00", "Z")
|
|
21
19
|
else:
|
|
@@ -53,15 +53,12 @@ def with_content_type(*, file: File, default_content_type: str) -> File:
|
|
|
53
53
|
filename, content = cast(Tuple[Optional[str], FileContent], file) # type: ignore
|
|
54
54
|
return (filename, content, default_content_type)
|
|
55
55
|
elif len(file) == 3:
|
|
56
|
-
filename, content, file_content_type = cast(
|
|
57
|
-
Tuple[Optional[str], FileContent, Optional[str]], file
|
|
58
|
-
) # type: ignore
|
|
56
|
+
filename, content, file_content_type = cast(Tuple[Optional[str], FileContent, Optional[str]], file) # type: ignore
|
|
59
57
|
out_content_type = file_content_type or default_content_type
|
|
60
58
|
return (filename, content, out_content_type)
|
|
61
59
|
elif len(file) == 4:
|
|
62
60
|
filename, content, file_content_type, headers = cast( # type: ignore
|
|
63
|
-
Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]],
|
|
64
|
-
file,
|
|
61
|
+
Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], file
|
|
65
62
|
)
|
|
66
63
|
out_content_type = file_content_type or default_content_type
|
|
67
64
|
return (filename, content, out_content_type, headers)
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class ForceMultipartDict(dict):
|
|
5
|
+
"""
|
|
6
|
+
A dictionary subclass that always evaluates to True in boolean contexts.
|
|
7
|
+
|
|
8
|
+
This is used to force multipart/form-data encoding in HTTP requests even when
|
|
9
|
+
the dictionary is empty, which would normally evaluate to False.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
def __bool__(self):
|
|
13
|
+
return True
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
FORCE_MULTIPART = ForceMultipartDict()
|
|
@@ -2,7 +2,6 @@
|
|
|
2
2
|
|
|
3
3
|
import asyncio
|
|
4
4
|
import email.utils
|
|
5
|
-
import json
|
|
6
5
|
import re
|
|
7
6
|
import time
|
|
8
7
|
import typing
|
|
@@ -11,12 +10,13 @@ from contextlib import asynccontextmanager, contextmanager
|
|
|
11
10
|
from random import random
|
|
12
11
|
|
|
13
12
|
import httpx
|
|
14
|
-
|
|
15
13
|
from .file import File, convert_file_dict_to_httpx_tuples
|
|
14
|
+
from .force_multipart import FORCE_MULTIPART
|
|
16
15
|
from .jsonable_encoder import jsonable_encoder
|
|
17
16
|
from .query_encoder import encode_query
|
|
18
17
|
from .remove_none_from_dict import remove_none_from_dict
|
|
19
18
|
from .request_options import RequestOptions
|
|
19
|
+
from httpx._types import RequestFiles
|
|
20
20
|
|
|
21
21
|
INITIAL_RETRY_DELAY_SECONDS = 0.5
|
|
22
22
|
MAX_RETRY_DELAY_SECONDS = 10
|
|
@@ -77,9 +77,7 @@ def _retry_timeout(response: httpx.Response, retries: int) -> float:
|
|
|
77
77
|
return retry_after
|
|
78
78
|
|
|
79
79
|
# Apply exponential backoff, capped at MAX_RETRY_DELAY_SECONDS.
|
|
80
|
-
retry_delay = min(
|
|
81
|
-
INITIAL_RETRY_DELAY_SECONDS * pow(2.0, retries), MAX_RETRY_DELAY_SECONDS
|
|
82
|
-
)
|
|
80
|
+
retry_delay = min(INITIAL_RETRY_DELAY_SECONDS * pow(2.0, retries), MAX_RETRY_DELAY_SECONDS)
|
|
83
81
|
|
|
84
82
|
# Add a randomness / jitter to the retry delay to avoid overwhelming the server with retries.
|
|
85
83
|
timeout = retry_delay * (1 - 0.25 * random())
|
|
@@ -111,8 +109,7 @@ def maybe_filter_request_body(
|
|
|
111
109
|
) -> typing.Optional[typing.Any]:
|
|
112
110
|
if data is None:
|
|
113
111
|
return (
|
|
114
|
-
jsonable_encoder(request_options.get("additional_body_parameters", {}))
|
|
115
|
-
or {}
|
|
112
|
+
jsonable_encoder(request_options.get("additional_body_parameters", {})) or {}
|
|
116
113
|
if request_options is not None
|
|
117
114
|
else None
|
|
118
115
|
)
|
|
@@ -122,8 +119,7 @@ def maybe_filter_request_body(
|
|
|
122
119
|
data_content = {
|
|
123
120
|
**(jsonable_encoder(remove_omit_from_dict(data, omit))), # type: ignore
|
|
124
121
|
**(
|
|
125
|
-
jsonable_encoder(request_options.get("additional_body_parameters", {}))
|
|
126
|
-
or {}
|
|
122
|
+
jsonable_encoder(request_options.get("additional_body_parameters", {})) or {}
|
|
127
123
|
if request_options is not None
|
|
128
124
|
else {}
|
|
129
125
|
),
|
|
@@ -148,9 +144,7 @@ def get_request_body(
|
|
|
148
144
|
json_body = maybe_filter_request_body(json, request_options, omit)
|
|
149
145
|
|
|
150
146
|
# If you have an empty JSON body, you should just send None
|
|
151
|
-
return (
|
|
152
|
-
json_body if json_body != {} else None
|
|
153
|
-
), data_body if data_body != {} else None
|
|
147
|
+
return (json_body if json_body != {} else None), data_body if data_body != {} else None
|
|
154
148
|
|
|
155
149
|
|
|
156
150
|
class HttpClient:
|
|
@@ -173,9 +167,7 @@ class HttpClient:
|
|
|
173
167
|
base_url = self.base_url()
|
|
174
168
|
|
|
175
169
|
if base_url is None:
|
|
176
|
-
raise ValueError(
|
|
177
|
-
"A base_url is required to make this request, please provide one and try again."
|
|
178
|
-
)
|
|
170
|
+
raise ValueError("A base_url is required to make this request, please provide one and try again.")
|
|
179
171
|
return base_url
|
|
180
172
|
|
|
181
173
|
def request(
|
|
@@ -187,29 +179,37 @@ class HttpClient:
|
|
|
187
179
|
params: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
188
180
|
json: typing.Optional[typing.Any] = None,
|
|
189
181
|
data: typing.Optional[typing.Any] = None,
|
|
190
|
-
content: typing.Optional[
|
|
191
|
-
typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]
|
|
192
|
-
] = None,
|
|
182
|
+
content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None,
|
|
193
183
|
files: typing.Optional[
|
|
194
|
-
typing.
|
|
184
|
+
typing.Union[
|
|
185
|
+
typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]],
|
|
186
|
+
typing.List[typing.Tuple[str, File]],
|
|
187
|
+
]
|
|
195
188
|
] = None,
|
|
196
189
|
headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
197
190
|
request_options: typing.Optional[RequestOptions] = None,
|
|
198
191
|
retries: int = 2,
|
|
199
192
|
omit: typing.Optional[typing.Any] = None,
|
|
193
|
+
force_multipart: typing.Optional[bool] = None,
|
|
200
194
|
) -> httpx.Response:
|
|
201
195
|
base_url = self.get_base_url(base_url)
|
|
202
196
|
timeout = (
|
|
203
197
|
request_options.get("timeout_in_seconds")
|
|
204
|
-
if request_options is not None
|
|
205
|
-
and request_options.get("timeout_in_seconds") is not None
|
|
198
|
+
if request_options is not None and request_options.get("timeout_in_seconds") is not None
|
|
206
199
|
else self.base_timeout()
|
|
207
200
|
)
|
|
208
201
|
|
|
209
|
-
json_body, data_body = get_request_body(
|
|
210
|
-
|
|
202
|
+
json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
|
|
203
|
+
|
|
204
|
+
request_files: typing.Optional[RequestFiles] = (
|
|
205
|
+
convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit))
|
|
206
|
+
if (files is not None and files is not omit and isinstance(files, dict))
|
|
207
|
+
else None
|
|
211
208
|
)
|
|
212
209
|
|
|
210
|
+
if (request_files is None or len(request_files) == 0) and force_multipart:
|
|
211
|
+
request_files = FORCE_MULTIPART
|
|
212
|
+
|
|
213
213
|
response = self.httpx_client.request(
|
|
214
214
|
method=method,
|
|
215
215
|
url=urllib.parse.urljoin(f"{base_url}/", path),
|
|
@@ -218,11 +218,7 @@ class HttpClient:
|
|
|
218
218
|
{
|
|
219
219
|
**self.base_headers(),
|
|
220
220
|
**(headers if headers is not None else {}),
|
|
221
|
-
**(
|
|
222
|
-
request_options.get("additional_headers", {}) or {}
|
|
223
|
-
if request_options is not None
|
|
224
|
-
else {}
|
|
225
|
-
),
|
|
221
|
+
**(request_options.get("additional_headers", {}) or {} if request_options is not None else {}),
|
|
226
222
|
}
|
|
227
223
|
)
|
|
228
224
|
),
|
|
@@ -233,10 +229,7 @@ class HttpClient:
|
|
|
233
229
|
{
|
|
234
230
|
**(params if params is not None else {}),
|
|
235
231
|
**(
|
|
236
|
-
request_options.get(
|
|
237
|
-
"additional_query_parameters", {}
|
|
238
|
-
)
|
|
239
|
-
or {}
|
|
232
|
+
request_options.get("additional_query_parameters", {}) or {}
|
|
240
233
|
if request_options is not None
|
|
241
234
|
else {}
|
|
242
235
|
),
|
|
@@ -249,19 +242,11 @@ class HttpClient:
|
|
|
249
242
|
json=json_body,
|
|
250
243
|
data=data_body,
|
|
251
244
|
content=content,
|
|
252
|
-
files=
|
|
253
|
-
convert_file_dict_to_httpx_tuples(
|
|
254
|
-
remove_omit_from_dict(remove_none_from_dict(files), omit)
|
|
255
|
-
)
|
|
256
|
-
if (files is not None and files is not omit)
|
|
257
|
-
else None
|
|
258
|
-
),
|
|
245
|
+
files=request_files,
|
|
259
246
|
timeout=timeout,
|
|
260
247
|
)
|
|
261
248
|
|
|
262
|
-
max_retries: int = (
|
|
263
|
-
request_options.get("max_retries", 0) if request_options is not None else 0
|
|
264
|
-
)
|
|
249
|
+
max_retries: int = request_options.get("max_retries", 0) if request_options is not None else 0
|
|
265
250
|
if _should_retry(response=response):
|
|
266
251
|
if max_retries > retries:
|
|
267
252
|
time.sleep(_retry_timeout(response=response, retries=retries))
|
|
@@ -291,29 +276,37 @@ class HttpClient:
|
|
|
291
276
|
params: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
292
277
|
json: typing.Optional[typing.Any] = None,
|
|
293
278
|
data: typing.Optional[typing.Any] = None,
|
|
294
|
-
content: typing.Optional[
|
|
295
|
-
typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]
|
|
296
|
-
] = None,
|
|
279
|
+
content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None,
|
|
297
280
|
files: typing.Optional[
|
|
298
|
-
typing.
|
|
281
|
+
typing.Union[
|
|
282
|
+
typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]],
|
|
283
|
+
typing.List[typing.Tuple[str, File]],
|
|
284
|
+
]
|
|
299
285
|
] = None,
|
|
300
286
|
headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
301
287
|
request_options: typing.Optional[RequestOptions] = None,
|
|
302
288
|
retries: int = 2,
|
|
303
289
|
omit: typing.Optional[typing.Any] = None,
|
|
290
|
+
force_multipart: typing.Optional[bool] = None,
|
|
304
291
|
) -> typing.Iterator[httpx.Response]:
|
|
305
292
|
base_url = self.get_base_url(base_url)
|
|
306
293
|
timeout = (
|
|
307
294
|
request_options.get("timeout_in_seconds")
|
|
308
|
-
if request_options is not None
|
|
309
|
-
and request_options.get("timeout_in_seconds") is not None
|
|
295
|
+
if request_options is not None and request_options.get("timeout_in_seconds") is not None
|
|
310
296
|
else self.base_timeout()
|
|
311
297
|
)
|
|
312
298
|
|
|
313
|
-
|
|
314
|
-
|
|
299
|
+
request_files: typing.Optional[RequestFiles] = (
|
|
300
|
+
convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit))
|
|
301
|
+
if (files is not None and files is not omit and isinstance(files, dict))
|
|
302
|
+
else None
|
|
315
303
|
)
|
|
316
304
|
|
|
305
|
+
if (request_files is None or len(request_files) == 0) and force_multipart:
|
|
306
|
+
request_files = FORCE_MULTIPART
|
|
307
|
+
|
|
308
|
+
json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
|
|
309
|
+
|
|
317
310
|
with self.httpx_client.stream(
|
|
318
311
|
method=method,
|
|
319
312
|
url=urllib.parse.urljoin(f"{base_url}/", path),
|
|
@@ -322,11 +315,7 @@ class HttpClient:
|
|
|
322
315
|
{
|
|
323
316
|
**self.base_headers(),
|
|
324
317
|
**(headers if headers is not None else {}),
|
|
325
|
-
**(
|
|
326
|
-
request_options.get("additional_headers", {})
|
|
327
|
-
if request_options is not None
|
|
328
|
-
else {}
|
|
329
|
-
),
|
|
318
|
+
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
|
|
330
319
|
}
|
|
331
320
|
)
|
|
332
321
|
),
|
|
@@ -337,9 +326,7 @@ class HttpClient:
|
|
|
337
326
|
{
|
|
338
327
|
**(params if params is not None else {}),
|
|
339
328
|
**(
|
|
340
|
-
request_options.get(
|
|
341
|
-
"additional_query_parameters", {}
|
|
342
|
-
)
|
|
329
|
+
request_options.get("additional_query_parameters", {})
|
|
343
330
|
if request_options is not None
|
|
344
331
|
else {}
|
|
345
332
|
),
|
|
@@ -352,13 +339,7 @@ class HttpClient:
|
|
|
352
339
|
json=json_body,
|
|
353
340
|
data=data_body,
|
|
354
341
|
content=content,
|
|
355
|
-
files=
|
|
356
|
-
convert_file_dict_to_httpx_tuples(
|
|
357
|
-
remove_omit_from_dict(remove_none_from_dict(files), omit)
|
|
358
|
-
)
|
|
359
|
-
if (files is not None and files is not omit)
|
|
360
|
-
else None
|
|
361
|
-
),
|
|
342
|
+
files=request_files,
|
|
362
343
|
timeout=timeout,
|
|
363
344
|
) as stream:
|
|
364
345
|
yield stream
|
|
@@ -384,9 +365,7 @@ class AsyncHttpClient:
|
|
|
384
365
|
base_url = self.base_url()
|
|
385
366
|
|
|
386
367
|
if base_url is None:
|
|
387
|
-
raise ValueError(
|
|
388
|
-
"A base_url is required to make this request, please provide one and try again."
|
|
389
|
-
)
|
|
368
|
+
raise ValueError("A base_url is required to make this request, please provide one and try again.")
|
|
390
369
|
return base_url
|
|
391
370
|
|
|
392
371
|
async def request(
|
|
@@ -398,29 +377,37 @@ class AsyncHttpClient:
|
|
|
398
377
|
params: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
399
378
|
json: typing.Optional[typing.Any] = None,
|
|
400
379
|
data: typing.Optional[typing.Any] = None,
|
|
401
|
-
content: typing.Optional[
|
|
402
|
-
typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]
|
|
403
|
-
] = None,
|
|
380
|
+
content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None,
|
|
404
381
|
files: typing.Optional[
|
|
405
|
-
typing.
|
|
382
|
+
typing.Union[
|
|
383
|
+
typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]],
|
|
384
|
+
typing.List[typing.Tuple[str, File]],
|
|
385
|
+
]
|
|
406
386
|
] = None,
|
|
407
387
|
headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
408
388
|
request_options: typing.Optional[RequestOptions] = None,
|
|
409
389
|
retries: int = 2,
|
|
410
390
|
omit: typing.Optional[typing.Any] = None,
|
|
391
|
+
force_multipart: typing.Optional[bool] = None,
|
|
411
392
|
) -> httpx.Response:
|
|
412
393
|
base_url = self.get_base_url(base_url)
|
|
413
394
|
timeout = (
|
|
414
395
|
request_options.get("timeout_in_seconds")
|
|
415
|
-
if request_options is not None
|
|
416
|
-
and request_options.get("timeout_in_seconds") is not None
|
|
396
|
+
if request_options is not None and request_options.get("timeout_in_seconds") is not None
|
|
417
397
|
else self.base_timeout()
|
|
418
398
|
)
|
|
419
399
|
|
|
420
|
-
|
|
421
|
-
|
|
400
|
+
request_files: typing.Optional[RequestFiles] = (
|
|
401
|
+
convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit))
|
|
402
|
+
if (files is not None and files is not omit and isinstance(files, dict))
|
|
403
|
+
else None
|
|
422
404
|
)
|
|
423
405
|
|
|
406
|
+
if (request_files is None or len(request_files) == 0) and force_multipart:
|
|
407
|
+
request_files = FORCE_MULTIPART
|
|
408
|
+
|
|
409
|
+
json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
|
|
410
|
+
|
|
424
411
|
# Add the input to each of these and do None-safety checks
|
|
425
412
|
response = await self.httpx_client.request(
|
|
426
413
|
method=method,
|
|
@@ -430,11 +417,7 @@ class AsyncHttpClient:
|
|
|
430
417
|
{
|
|
431
418
|
**self.base_headers(),
|
|
432
419
|
**(headers if headers is not None else {}),
|
|
433
|
-
**(
|
|
434
|
-
request_options.get("additional_headers", {}) or {}
|
|
435
|
-
if request_options is not None
|
|
436
|
-
else {}
|
|
437
|
-
),
|
|
420
|
+
**(request_options.get("additional_headers", {}) or {} if request_options is not None else {}),
|
|
438
421
|
}
|
|
439
422
|
)
|
|
440
423
|
),
|
|
@@ -445,10 +428,7 @@ class AsyncHttpClient:
|
|
|
445
428
|
{
|
|
446
429
|
**(params if params is not None else {}),
|
|
447
430
|
**(
|
|
448
|
-
request_options.get(
|
|
449
|
-
"additional_query_parameters", {}
|
|
450
|
-
)
|
|
451
|
-
or {}
|
|
431
|
+
request_options.get("additional_query_parameters", {}) or {}
|
|
452
432
|
if request_options is not None
|
|
453
433
|
else {}
|
|
454
434
|
),
|
|
@@ -461,19 +441,11 @@ class AsyncHttpClient:
|
|
|
461
441
|
json=json_body,
|
|
462
442
|
data=data_body,
|
|
463
443
|
content=content,
|
|
464
|
-
files=
|
|
465
|
-
convert_file_dict_to_httpx_tuples(
|
|
466
|
-
remove_omit_from_dict(remove_none_from_dict(files), omit)
|
|
467
|
-
)
|
|
468
|
-
if files is not None
|
|
469
|
-
else None
|
|
470
|
-
),
|
|
444
|
+
files=request_files,
|
|
471
445
|
timeout=timeout,
|
|
472
446
|
)
|
|
473
447
|
|
|
474
|
-
max_retries: int = (
|
|
475
|
-
request_options.get("max_retries", 0) if request_options is not None else 0
|
|
476
|
-
)
|
|
448
|
+
max_retries: int = request_options.get("max_retries", 0) if request_options is not None else 0
|
|
477
449
|
if _should_retry(response=response):
|
|
478
450
|
if max_retries > retries:
|
|
479
451
|
await asyncio.sleep(_retry_timeout(response=response, retries=retries))
|
|
@@ -502,29 +474,37 @@ class AsyncHttpClient:
|
|
|
502
474
|
params: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
503
475
|
json: typing.Optional[typing.Any] = None,
|
|
504
476
|
data: typing.Optional[typing.Any] = None,
|
|
505
|
-
content: typing.Optional[
|
|
506
|
-
typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]
|
|
507
|
-
] = None,
|
|
477
|
+
content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None,
|
|
508
478
|
files: typing.Optional[
|
|
509
|
-
typing.
|
|
479
|
+
typing.Union[
|
|
480
|
+
typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]],
|
|
481
|
+
typing.List[typing.Tuple[str, File]],
|
|
482
|
+
]
|
|
510
483
|
] = None,
|
|
511
484
|
headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
512
485
|
request_options: typing.Optional[RequestOptions] = None,
|
|
513
486
|
retries: int = 2,
|
|
514
487
|
omit: typing.Optional[typing.Any] = None,
|
|
488
|
+
force_multipart: typing.Optional[bool] = None,
|
|
515
489
|
) -> typing.AsyncIterator[httpx.Response]:
|
|
516
490
|
base_url = self.get_base_url(base_url)
|
|
517
491
|
timeout = (
|
|
518
492
|
request_options.get("timeout_in_seconds")
|
|
519
|
-
if request_options is not None
|
|
520
|
-
and request_options.get("timeout_in_seconds") is not None
|
|
493
|
+
if request_options is not None and request_options.get("timeout_in_seconds") is not None
|
|
521
494
|
else self.base_timeout()
|
|
522
495
|
)
|
|
523
496
|
|
|
524
|
-
|
|
525
|
-
|
|
497
|
+
request_files: typing.Optional[RequestFiles] = (
|
|
498
|
+
convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit))
|
|
499
|
+
if (files is not None and files is not omit and isinstance(files, dict))
|
|
500
|
+
else None
|
|
526
501
|
)
|
|
527
502
|
|
|
503
|
+
if (request_files is None or len(request_files) == 0) and force_multipart:
|
|
504
|
+
request_files = FORCE_MULTIPART
|
|
505
|
+
|
|
506
|
+
json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
|
|
507
|
+
|
|
528
508
|
async with self.httpx_client.stream(
|
|
529
509
|
method=method,
|
|
530
510
|
url=urllib.parse.urljoin(f"{base_url}/", path),
|
|
@@ -533,11 +513,7 @@ class AsyncHttpClient:
|
|
|
533
513
|
{
|
|
534
514
|
**self.base_headers(),
|
|
535
515
|
**(headers if headers is not None else {}),
|
|
536
|
-
**(
|
|
537
|
-
request_options.get("additional_headers", {})
|
|
538
|
-
if request_options is not None
|
|
539
|
-
else {}
|
|
540
|
-
),
|
|
516
|
+
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
|
|
541
517
|
}
|
|
542
518
|
)
|
|
543
519
|
),
|
|
@@ -548,9 +524,7 @@ class AsyncHttpClient:
|
|
|
548
524
|
{
|
|
549
525
|
**(params if params is not None else {}),
|
|
550
526
|
**(
|
|
551
|
-
request_options.get(
|
|
552
|
-
"additional_query_parameters", {}
|
|
553
|
-
)
|
|
527
|
+
request_options.get("additional_query_parameters", {})
|
|
554
528
|
if request_options is not None
|
|
555
529
|
else {}
|
|
556
530
|
),
|
|
@@ -563,13 +537,7 @@ class AsyncHttpClient:
|
|
|
563
537
|
json=json_body,
|
|
564
538
|
data=data_body,
|
|
565
539
|
content=content,
|
|
566
|
-
files=
|
|
567
|
-
convert_file_dict_to_httpx_tuples(
|
|
568
|
-
remove_omit_from_dict(remove_none_from_dict(files), omit)
|
|
569
|
-
)
|
|
570
|
-
if files is not None
|
|
571
|
-
else None
|
|
572
|
-
),
|
|
540
|
+
files=request_files,
|
|
573
541
|
timeout=timeout,
|
|
574
542
|
) as stream:
|
|
575
543
|
yield stream
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
from typing import Dict, Generic, TypeVar
|
|
4
|
+
|
|
5
|
+
import httpx
|
|
6
|
+
|
|
7
|
+
T = TypeVar("T")
|
|
8
|
+
"""Generic to represent the underlying type of the data wrapped by the HTTP response."""
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class BaseHttpResponse:
|
|
12
|
+
"""Minimalist HTTP response wrapper that exposes response headers."""
|
|
13
|
+
|
|
14
|
+
_response: httpx.Response
|
|
15
|
+
|
|
16
|
+
def __init__(self, response: httpx.Response):
|
|
17
|
+
self._response = response
|
|
18
|
+
|
|
19
|
+
@property
|
|
20
|
+
def headers(self) -> Dict[str, str]:
|
|
21
|
+
return dict(self._response.headers)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class HttpResponse(Generic[T], BaseHttpResponse):
|
|
25
|
+
"""HTTP response wrapper that exposes response headers and data."""
|
|
26
|
+
|
|
27
|
+
_data: T
|
|
28
|
+
|
|
29
|
+
def __init__(self, response: httpx.Response, data: T):
|
|
30
|
+
super().__init__(response)
|
|
31
|
+
self._data = data
|
|
32
|
+
|
|
33
|
+
@property
|
|
34
|
+
def data(self) -> T:
|
|
35
|
+
return self._data
|
|
36
|
+
|
|
37
|
+
def close(self) -> None:
|
|
38
|
+
self._response.close()
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class AsyncHttpResponse(Generic[T], BaseHttpResponse):
|
|
42
|
+
"""HTTP response wrapper that exposes response headers and data."""
|
|
43
|
+
|
|
44
|
+
_data: T
|
|
45
|
+
|
|
46
|
+
def __init__(self, response: httpx.Response, data: T):
|
|
47
|
+
super().__init__(response)
|
|
48
|
+
self._data = data
|
|
49
|
+
|
|
50
|
+
@property
|
|
51
|
+
def data(self) -> T:
|
|
52
|
+
return self._data
|
|
53
|
+
|
|
54
|
+
async def close(self) -> None:
|
|
55
|
+
await self._response.aclose()
|
|
@@ -17,7 +17,6 @@ from types import GeneratorType
|
|
|
17
17
|
from typing import Any, Callable, Dict, List, Optional, Set, Union
|
|
18
18
|
|
|
19
19
|
import pydantic
|
|
20
|
-
|
|
21
20
|
from .datetime_utils import serialize_datetime
|
|
22
21
|
from .pydantic_utilities import (
|
|
23
22
|
IS_PYDANTIC_V2,
|
|
@@ -29,9 +28,7 @@ SetIntStr = Set[Union[int, str]]
|
|
|
29
28
|
DictIntStrAny = Dict[Union[int, str], Any]
|
|
30
29
|
|
|
31
30
|
|
|
32
|
-
def jsonable_encoder(
|
|
33
|
-
obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any], Any]]] = None
|
|
34
|
-
) -> Any:
|
|
31
|
+
def jsonable_encoder(obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any], Any]]] = None) -> Any:
|
|
35
32
|
custom_encoder = custom_encoder or {}
|
|
36
33
|
if custom_encoder:
|
|
37
34
|
if type(obj) in custom_encoder:
|