deeporigin-data-sdk 0.1.0a56__py3-none-any.whl → 0.1.0a57__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- deeporigin_data/_base_client.py +207 -233
- deeporigin_data/_models.py +2 -3
- deeporigin_data/_utils/_typing.py +1 -1
- deeporigin_data/_utils/_utils.py +9 -1
- deeporigin_data/_version.py +1 -1
- {deeporigin_data_sdk-0.1.0a56.dist-info → deeporigin_data_sdk-0.1.0a57.dist-info}/METADATA +7 -7
- {deeporigin_data_sdk-0.1.0a56.dist-info → deeporigin_data_sdk-0.1.0a57.dist-info}/RECORD +9 -9
- {deeporigin_data_sdk-0.1.0a56.dist-info → deeporigin_data_sdk-0.1.0a57.dist-info}/WHEEL +0 -0
- {deeporigin_data_sdk-0.1.0a56.dist-info → deeporigin_data_sdk-0.1.0a57.dist-info}/licenses/LICENSE +0 -0
deeporigin_data/_base_client.py
CHANGED
@@ -98,7 +98,11 @@ _StreamT = TypeVar("_StreamT", bound=Stream[Any])
|
|
98
98
|
_AsyncStreamT = TypeVar("_AsyncStreamT", bound=AsyncStream[Any])
|
99
99
|
|
100
100
|
if TYPE_CHECKING:
|
101
|
-
from httpx._config import
|
101
|
+
from httpx._config import (
|
102
|
+
DEFAULT_TIMEOUT_CONFIG, # pyright: ignore[reportPrivateImportUsage]
|
103
|
+
)
|
104
|
+
|
105
|
+
HTTPX_DEFAULT_TIMEOUT = DEFAULT_TIMEOUT_CONFIG
|
102
106
|
else:
|
103
107
|
try:
|
104
108
|
from httpx._config import DEFAULT_TIMEOUT_CONFIG as HTTPX_DEFAULT_TIMEOUT
|
@@ -115,6 +119,7 @@ class PageInfo:
|
|
115
119
|
|
116
120
|
url: URL | NotGiven
|
117
121
|
params: Query | NotGiven
|
122
|
+
json: Body | NotGiven
|
118
123
|
|
119
124
|
@overload
|
120
125
|
def __init__(
|
@@ -130,19 +135,30 @@ class PageInfo:
|
|
130
135
|
params: Query,
|
131
136
|
) -> None: ...
|
132
137
|
|
138
|
+
@overload
|
139
|
+
def __init__(
|
140
|
+
self,
|
141
|
+
*,
|
142
|
+
json: Body,
|
143
|
+
) -> None: ...
|
144
|
+
|
133
145
|
def __init__(
|
134
146
|
self,
|
135
147
|
*,
|
136
148
|
url: URL | NotGiven = NOT_GIVEN,
|
149
|
+
json: Body | NotGiven = NOT_GIVEN,
|
137
150
|
params: Query | NotGiven = NOT_GIVEN,
|
138
151
|
) -> None:
|
139
152
|
self.url = url
|
153
|
+
self.json = json
|
140
154
|
self.params = params
|
141
155
|
|
142
156
|
@override
|
143
157
|
def __repr__(self) -> str:
|
144
158
|
if self.url:
|
145
159
|
return f"{self.__class__.__name__}(url={self.url})"
|
160
|
+
if self.json:
|
161
|
+
return f"{self.__class__.__name__}(json={self.json})"
|
146
162
|
return f"{self.__class__.__name__}(params={self.params})"
|
147
163
|
|
148
164
|
|
@@ -191,6 +207,19 @@ class BasePage(GenericModel, Generic[_T]):
|
|
191
207
|
options.url = str(url)
|
192
208
|
return options
|
193
209
|
|
210
|
+
if not isinstance(info.json, NotGiven):
|
211
|
+
if not is_mapping(info.json):
|
212
|
+
raise TypeError("Pagination is only supported with mappings")
|
213
|
+
|
214
|
+
if not options.json_data:
|
215
|
+
options.json_data = {**info.json}
|
216
|
+
else:
|
217
|
+
if not is_mapping(options.json_data):
|
218
|
+
raise TypeError("Pagination is only supported with mappings")
|
219
|
+
|
220
|
+
options.json_data = {**options.json_data, **info.json}
|
221
|
+
return options
|
222
|
+
|
194
223
|
raise ValueError("Unexpected PageInfo state")
|
195
224
|
|
196
225
|
|
@@ -408,8 +437,8 @@ class BaseClient(Generic[_HttpxClientT, _DefaultStreamT]):
|
|
408
437
|
headers = httpx.Headers(headers_dict)
|
409
438
|
|
410
439
|
idempotency_header = self._idempotency_header
|
411
|
-
if idempotency_header and options.
|
412
|
-
headers[idempotency_header] = options.idempotency_key
|
440
|
+
if idempotency_header and options.idempotency_key and idempotency_header not in headers:
|
441
|
+
headers[idempotency_header] = options.idempotency_key
|
413
442
|
|
414
443
|
# Don't set these headers if they were already set or removed by the caller. We check
|
415
444
|
# `custom_headers`, which can contain `Omit()`, instead of `headers` to account for the removal case.
|
@@ -873,7 +902,6 @@ class SyncAPIClient(BaseClient[httpx.Client, Stream[Any]]):
|
|
873
902
|
self,
|
874
903
|
cast_to: Type[ResponseT],
|
875
904
|
options: FinalRequestOptions,
|
876
|
-
remaining_retries: Optional[int] = None,
|
877
905
|
*,
|
878
906
|
stream: Literal[True],
|
879
907
|
stream_cls: Type[_StreamT],
|
@@ -884,7 +912,6 @@ class SyncAPIClient(BaseClient[httpx.Client, Stream[Any]]):
|
|
884
912
|
self,
|
885
913
|
cast_to: Type[ResponseT],
|
886
914
|
options: FinalRequestOptions,
|
887
|
-
remaining_retries: Optional[int] = None,
|
888
915
|
*,
|
889
916
|
stream: Literal[False] = False,
|
890
917
|
) -> ResponseT: ...
|
@@ -894,7 +921,6 @@ class SyncAPIClient(BaseClient[httpx.Client, Stream[Any]]):
|
|
894
921
|
self,
|
895
922
|
cast_to: Type[ResponseT],
|
896
923
|
options: FinalRequestOptions,
|
897
|
-
remaining_retries: Optional[int] = None,
|
898
924
|
*,
|
899
925
|
stream: bool = False,
|
900
926
|
stream_cls: Type[_StreamT] | None = None,
|
@@ -904,121 +930,109 @@ class SyncAPIClient(BaseClient[httpx.Client, Stream[Any]]):
|
|
904
930
|
self,
|
905
931
|
cast_to: Type[ResponseT],
|
906
932
|
options: FinalRequestOptions,
|
907
|
-
remaining_retries: Optional[int] = None,
|
908
933
|
*,
|
909
934
|
stream: bool = False,
|
910
935
|
stream_cls: type[_StreamT] | None = None,
|
911
936
|
) -> ResponseT | _StreamT:
|
912
|
-
|
913
|
-
retries_taken = options.get_max_retries(self.max_retries) - remaining_retries
|
914
|
-
else:
|
915
|
-
retries_taken = 0
|
916
|
-
|
917
|
-
return self._request(
|
918
|
-
cast_to=cast_to,
|
919
|
-
options=options,
|
920
|
-
stream=stream,
|
921
|
-
stream_cls=stream_cls,
|
922
|
-
retries_taken=retries_taken,
|
923
|
-
)
|
937
|
+
cast_to = self._maybe_override_cast_to(cast_to, options)
|
924
938
|
|
925
|
-
def _request(
|
926
|
-
self,
|
927
|
-
*,
|
928
|
-
cast_to: Type[ResponseT],
|
929
|
-
options: FinalRequestOptions,
|
930
|
-
retries_taken: int,
|
931
|
-
stream: bool,
|
932
|
-
stream_cls: type[_StreamT] | None,
|
933
|
-
) -> ResponseT | _StreamT:
|
934
939
|
# create a copy of the options we were given so that if the
|
935
940
|
# options are mutated later & we then retry, the retries are
|
936
941
|
# given the original options
|
937
942
|
input_options = model_copy(options)
|
943
|
+
if input_options.idempotency_key is None and input_options.method.lower() != "get":
|
944
|
+
# ensure the idempotency key is reused between requests
|
945
|
+
input_options.idempotency_key = self._idempotency_key()
|
938
946
|
|
939
|
-
|
940
|
-
|
941
|
-
|
942
|
-
remaining_retries = options.get_max_retries(self.max_retries) - retries_taken
|
943
|
-
request = self._build_request(options, retries_taken=retries_taken)
|
944
|
-
self._prepare_request(request)
|
945
|
-
|
946
|
-
kwargs: HttpxSendArgs = {}
|
947
|
-
if self.custom_auth is not None:
|
948
|
-
kwargs["auth"] = self.custom_auth
|
947
|
+
response: httpx.Response | None = None
|
948
|
+
max_retries = input_options.get_max_retries(self.max_retries)
|
949
949
|
|
950
|
-
|
950
|
+
retries_taken = 0
|
951
|
+
for retries_taken in range(max_retries + 1):
|
952
|
+
options = model_copy(input_options)
|
953
|
+
options = self._prepare_options(options)
|
951
954
|
|
952
|
-
|
953
|
-
|
954
|
-
|
955
|
-
stream=stream or self._should_stream_response_body(request=request),
|
956
|
-
**kwargs,
|
957
|
-
)
|
958
|
-
except httpx.TimeoutException as err:
|
959
|
-
log.debug("Encountered httpx.TimeoutException", exc_info=True)
|
955
|
+
remaining_retries = max_retries - retries_taken
|
956
|
+
request = self._build_request(options, retries_taken=retries_taken)
|
957
|
+
self._prepare_request(request)
|
960
958
|
|
961
|
-
|
962
|
-
|
963
|
-
|
964
|
-
cast_to,
|
965
|
-
retries_taken=retries_taken,
|
966
|
-
stream=stream,
|
967
|
-
stream_cls=stream_cls,
|
968
|
-
response_headers=None,
|
969
|
-
)
|
959
|
+
kwargs: HttpxSendArgs = {}
|
960
|
+
if self.custom_auth is not None:
|
961
|
+
kwargs["auth"] = self.custom_auth
|
970
962
|
|
971
|
-
log.debug("
|
972
|
-
raise APITimeoutError(request=request) from err
|
973
|
-
except Exception as err:
|
974
|
-
log.debug("Encountered Exception", exc_info=True)
|
963
|
+
log.debug("Sending HTTP Request: %s %s", request.method, request.url)
|
975
964
|
|
976
|
-
|
977
|
-
|
978
|
-
|
979
|
-
|
980
|
-
|
981
|
-
|
982
|
-
stream_cls=stream_cls,
|
983
|
-
response_headers=None,
|
965
|
+
response = None
|
966
|
+
try:
|
967
|
+
response = self._client.send(
|
968
|
+
request,
|
969
|
+
stream=stream or self._should_stream_response_body(request=request),
|
970
|
+
**kwargs,
|
984
971
|
)
|
972
|
+
except httpx.TimeoutException as err:
|
973
|
+
log.debug("Encountered httpx.TimeoutException", exc_info=True)
|
974
|
+
|
975
|
+
if remaining_retries > 0:
|
976
|
+
self._sleep_for_retry(
|
977
|
+
retries_taken=retries_taken,
|
978
|
+
max_retries=max_retries,
|
979
|
+
options=input_options,
|
980
|
+
response=None,
|
981
|
+
)
|
982
|
+
continue
|
983
|
+
|
984
|
+
log.debug("Raising timeout error")
|
985
|
+
raise APITimeoutError(request=request) from err
|
986
|
+
except Exception as err:
|
987
|
+
log.debug("Encountered Exception", exc_info=True)
|
988
|
+
|
989
|
+
if remaining_retries > 0:
|
990
|
+
self._sleep_for_retry(
|
991
|
+
retries_taken=retries_taken,
|
992
|
+
max_retries=max_retries,
|
993
|
+
options=input_options,
|
994
|
+
response=None,
|
995
|
+
)
|
996
|
+
continue
|
997
|
+
|
998
|
+
log.debug("Raising connection error")
|
999
|
+
raise APIConnectionError(request=request) from err
|
1000
|
+
|
1001
|
+
log.debug(
|
1002
|
+
'HTTP Response: %s %s "%i %s" %s',
|
1003
|
+
request.method,
|
1004
|
+
request.url,
|
1005
|
+
response.status_code,
|
1006
|
+
response.reason_phrase,
|
1007
|
+
response.headers,
|
1008
|
+
)
|
985
1009
|
|
986
|
-
|
987
|
-
|
988
|
-
|
989
|
-
|
990
|
-
|
991
|
-
|
992
|
-
|
993
|
-
|
994
|
-
|
995
|
-
|
996
|
-
|
1010
|
+
try:
|
1011
|
+
response.raise_for_status()
|
1012
|
+
except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code
|
1013
|
+
log.debug("Encountered httpx.HTTPStatusError", exc_info=True)
|
1014
|
+
|
1015
|
+
if remaining_retries > 0 and self._should_retry(err.response):
|
1016
|
+
err.response.close()
|
1017
|
+
self._sleep_for_retry(
|
1018
|
+
retries_taken=retries_taken,
|
1019
|
+
max_retries=max_retries,
|
1020
|
+
options=input_options,
|
1021
|
+
response=response,
|
1022
|
+
)
|
1023
|
+
continue
|
997
1024
|
|
998
|
-
|
999
|
-
|
1000
|
-
|
1001
|
-
|
1002
|
-
|
1003
|
-
if remaining_retries > 0 and self._should_retry(err.response):
|
1004
|
-
err.response.close()
|
1005
|
-
return self._retry_request(
|
1006
|
-
input_options,
|
1007
|
-
cast_to,
|
1008
|
-
retries_taken=retries_taken,
|
1009
|
-
response_headers=err.response.headers,
|
1010
|
-
stream=stream,
|
1011
|
-
stream_cls=stream_cls,
|
1012
|
-
)
|
1025
|
+
# If the response is streamed then we need to explicitly read the response
|
1026
|
+
# to completion before attempting to access the response text.
|
1027
|
+
if not err.response.is_closed:
|
1028
|
+
err.response.read()
|
1013
1029
|
|
1014
|
-
|
1015
|
-
|
1016
|
-
if not err.response.is_closed:
|
1017
|
-
err.response.read()
|
1030
|
+
log.debug("Re-raising status error")
|
1031
|
+
raise self._make_status_error_from_response(err.response) from None
|
1018
1032
|
|
1019
|
-
|
1020
|
-
raise self._make_status_error_from_response(err.response) from None
|
1033
|
+
break
|
1021
1034
|
|
1035
|
+
assert response is not None, "could not resolve response (should never happen)"
|
1022
1036
|
return self._process_response(
|
1023
1037
|
cast_to=cast_to,
|
1024
1038
|
options=options,
|
@@ -1028,37 +1042,20 @@ class SyncAPIClient(BaseClient[httpx.Client, Stream[Any]]):
|
|
1028
1042
|
retries_taken=retries_taken,
|
1029
1043
|
)
|
1030
1044
|
|
1031
|
-
def
|
1032
|
-
self,
|
1033
|
-
|
1034
|
-
|
1035
|
-
*,
|
1036
|
-
retries_taken: int,
|
1037
|
-
response_headers: httpx.Headers | None,
|
1038
|
-
stream: bool,
|
1039
|
-
stream_cls: type[_StreamT] | None,
|
1040
|
-
) -> ResponseT | _StreamT:
|
1041
|
-
remaining_retries = options.get_max_retries(self.max_retries) - retries_taken
|
1045
|
+
def _sleep_for_retry(
|
1046
|
+
self, *, retries_taken: int, max_retries: int, options: FinalRequestOptions, response: httpx.Response | None
|
1047
|
+
) -> None:
|
1048
|
+
remaining_retries = max_retries - retries_taken
|
1042
1049
|
if remaining_retries == 1:
|
1043
1050
|
log.debug("1 retry left")
|
1044
1051
|
else:
|
1045
1052
|
log.debug("%i retries left", remaining_retries)
|
1046
1053
|
|
1047
|
-
timeout = self._calculate_retry_timeout(remaining_retries, options,
|
1054
|
+
timeout = self._calculate_retry_timeout(remaining_retries, options, response.headers if response else None)
|
1048
1055
|
log.info("Retrying request to %s in %f seconds", options.url, timeout)
|
1049
1056
|
|
1050
|
-
# In a synchronous context we are blocking the entire thread. Up to the library user to run the client in a
|
1051
|
-
# different thread if necessary.
|
1052
1057
|
time.sleep(timeout)
|
1053
1058
|
|
1054
|
-
return self._request(
|
1055
|
-
options=options,
|
1056
|
-
cast_to=cast_to,
|
1057
|
-
retries_taken=retries_taken + 1,
|
1058
|
-
stream=stream,
|
1059
|
-
stream_cls=stream_cls,
|
1060
|
-
)
|
1061
|
-
|
1062
1059
|
def _process_response(
|
1063
1060
|
self,
|
1064
1061
|
*,
|
@@ -1402,7 +1399,6 @@ class AsyncAPIClient(BaseClient[httpx.AsyncClient, AsyncStream[Any]]):
|
|
1402
1399
|
options: FinalRequestOptions,
|
1403
1400
|
*,
|
1404
1401
|
stream: Literal[False] = False,
|
1405
|
-
remaining_retries: Optional[int] = None,
|
1406
1402
|
) -> ResponseT: ...
|
1407
1403
|
|
1408
1404
|
@overload
|
@@ -1413,7 +1409,6 @@ class AsyncAPIClient(BaseClient[httpx.AsyncClient, AsyncStream[Any]]):
|
|
1413
1409
|
*,
|
1414
1410
|
stream: Literal[True],
|
1415
1411
|
stream_cls: type[_AsyncStreamT],
|
1416
|
-
remaining_retries: Optional[int] = None,
|
1417
1412
|
) -> _AsyncStreamT: ...
|
1418
1413
|
|
1419
1414
|
@overload
|
@@ -1424,7 +1419,6 @@ class AsyncAPIClient(BaseClient[httpx.AsyncClient, AsyncStream[Any]]):
|
|
1424
1419
|
*,
|
1425
1420
|
stream: bool,
|
1426
1421
|
stream_cls: type[_AsyncStreamT] | None = None,
|
1427
|
-
remaining_retries: Optional[int] = None,
|
1428
1422
|
) -> ResponseT | _AsyncStreamT: ...
|
1429
1423
|
|
1430
1424
|
async def request(
|
@@ -1434,116 +1428,111 @@ class AsyncAPIClient(BaseClient[httpx.AsyncClient, AsyncStream[Any]]):
|
|
1434
1428
|
*,
|
1435
1429
|
stream: bool = False,
|
1436
1430
|
stream_cls: type[_AsyncStreamT] | None = None,
|
1437
|
-
remaining_retries: Optional[int] = None,
|
1438
|
-
) -> ResponseT | _AsyncStreamT:
|
1439
|
-
if remaining_retries is not None:
|
1440
|
-
retries_taken = options.get_max_retries(self.max_retries) - remaining_retries
|
1441
|
-
else:
|
1442
|
-
retries_taken = 0
|
1443
|
-
|
1444
|
-
return await self._request(
|
1445
|
-
cast_to=cast_to,
|
1446
|
-
options=options,
|
1447
|
-
stream=stream,
|
1448
|
-
stream_cls=stream_cls,
|
1449
|
-
retries_taken=retries_taken,
|
1450
|
-
)
|
1451
|
-
|
1452
|
-
async def _request(
|
1453
|
-
self,
|
1454
|
-
cast_to: Type[ResponseT],
|
1455
|
-
options: FinalRequestOptions,
|
1456
|
-
*,
|
1457
|
-
stream: bool,
|
1458
|
-
stream_cls: type[_AsyncStreamT] | None,
|
1459
|
-
retries_taken: int,
|
1460
1431
|
) -> ResponseT | _AsyncStreamT:
|
1461
1432
|
if self._platform is None:
|
1462
1433
|
# `get_platform` can make blocking IO calls so we
|
1463
1434
|
# execute it earlier while we are in an async context
|
1464
1435
|
self._platform = await asyncify(get_platform)()
|
1465
1436
|
|
1437
|
+
cast_to = self._maybe_override_cast_to(cast_to, options)
|
1438
|
+
|
1466
1439
|
# create a copy of the options we were given so that if the
|
1467
1440
|
# options are mutated later & we then retry, the retries are
|
1468
1441
|
# given the original options
|
1469
1442
|
input_options = model_copy(options)
|
1443
|
+
if input_options.idempotency_key is None and input_options.method.lower() != "get":
|
1444
|
+
# ensure the idempotency key is reused between requests
|
1445
|
+
input_options.idempotency_key = self._idempotency_key()
|
1470
1446
|
|
1471
|
-
|
1472
|
-
|
1447
|
+
response: httpx.Response | None = None
|
1448
|
+
max_retries = input_options.get_max_retries(self.max_retries)
|
1473
1449
|
|
1474
|
-
|
1475
|
-
|
1476
|
-
|
1450
|
+
retries_taken = 0
|
1451
|
+
for retries_taken in range(max_retries + 1):
|
1452
|
+
options = model_copy(input_options)
|
1453
|
+
options = await self._prepare_options(options)
|
1477
1454
|
|
1478
|
-
|
1479
|
-
|
1480
|
-
|
1455
|
+
remaining_retries = max_retries - retries_taken
|
1456
|
+
request = self._build_request(options, retries_taken=retries_taken)
|
1457
|
+
await self._prepare_request(request)
|
1481
1458
|
|
1482
|
-
|
1483
|
-
|
1484
|
-
|
1485
|
-
stream=stream or self._should_stream_response_body(request=request),
|
1486
|
-
**kwargs,
|
1487
|
-
)
|
1488
|
-
except httpx.TimeoutException as err:
|
1489
|
-
log.debug("Encountered httpx.TimeoutException", exc_info=True)
|
1490
|
-
|
1491
|
-
if remaining_retries > 0:
|
1492
|
-
return await self._retry_request(
|
1493
|
-
input_options,
|
1494
|
-
cast_to,
|
1495
|
-
retries_taken=retries_taken,
|
1496
|
-
stream=stream,
|
1497
|
-
stream_cls=stream_cls,
|
1498
|
-
response_headers=None,
|
1499
|
-
)
|
1459
|
+
kwargs: HttpxSendArgs = {}
|
1460
|
+
if self.custom_auth is not None:
|
1461
|
+
kwargs["auth"] = self.custom_auth
|
1500
1462
|
|
1501
|
-
log.debug("
|
1502
|
-
raise APITimeoutError(request=request) from err
|
1503
|
-
except Exception as err:
|
1504
|
-
log.debug("Encountered Exception", exc_info=True)
|
1463
|
+
log.debug("Sending HTTP Request: %s %s", request.method, request.url)
|
1505
1464
|
|
1506
|
-
|
1507
|
-
|
1508
|
-
|
1509
|
-
|
1510
|
-
|
1511
|
-
|
1512
|
-
stream_cls=stream_cls,
|
1513
|
-
response_headers=None,
|
1465
|
+
response = None
|
1466
|
+
try:
|
1467
|
+
response = await self._client.send(
|
1468
|
+
request,
|
1469
|
+
stream=stream or self._should_stream_response_body(request=request),
|
1470
|
+
**kwargs,
|
1514
1471
|
)
|
1472
|
+
except httpx.TimeoutException as err:
|
1473
|
+
log.debug("Encountered httpx.TimeoutException", exc_info=True)
|
1474
|
+
|
1475
|
+
if remaining_retries > 0:
|
1476
|
+
await self._sleep_for_retry(
|
1477
|
+
retries_taken=retries_taken,
|
1478
|
+
max_retries=max_retries,
|
1479
|
+
options=input_options,
|
1480
|
+
response=None,
|
1481
|
+
)
|
1482
|
+
continue
|
1483
|
+
|
1484
|
+
log.debug("Raising timeout error")
|
1485
|
+
raise APITimeoutError(request=request) from err
|
1486
|
+
except Exception as err:
|
1487
|
+
log.debug("Encountered Exception", exc_info=True)
|
1488
|
+
|
1489
|
+
if remaining_retries > 0:
|
1490
|
+
await self._sleep_for_retry(
|
1491
|
+
retries_taken=retries_taken,
|
1492
|
+
max_retries=max_retries,
|
1493
|
+
options=input_options,
|
1494
|
+
response=None,
|
1495
|
+
)
|
1496
|
+
continue
|
1497
|
+
|
1498
|
+
log.debug("Raising connection error")
|
1499
|
+
raise APIConnectionError(request=request) from err
|
1500
|
+
|
1501
|
+
log.debug(
|
1502
|
+
'HTTP Response: %s %s "%i %s" %s',
|
1503
|
+
request.method,
|
1504
|
+
request.url,
|
1505
|
+
response.status_code,
|
1506
|
+
response.reason_phrase,
|
1507
|
+
response.headers,
|
1508
|
+
)
|
1515
1509
|
|
1516
|
-
|
1517
|
-
|
1510
|
+
try:
|
1511
|
+
response.raise_for_status()
|
1512
|
+
except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code
|
1513
|
+
log.debug("Encountered httpx.HTTPStatusError", exc_info=True)
|
1514
|
+
|
1515
|
+
if remaining_retries > 0 and self._should_retry(err.response):
|
1516
|
+
await err.response.aclose()
|
1517
|
+
await self._sleep_for_retry(
|
1518
|
+
retries_taken=retries_taken,
|
1519
|
+
max_retries=max_retries,
|
1520
|
+
options=input_options,
|
1521
|
+
response=response,
|
1522
|
+
)
|
1523
|
+
continue
|
1518
1524
|
|
1519
|
-
|
1520
|
-
|
1521
|
-
|
1525
|
+
# If the response is streamed then we need to explicitly read the response
|
1526
|
+
# to completion before attempting to access the response text.
|
1527
|
+
if not err.response.is_closed:
|
1528
|
+
await err.response.aread()
|
1522
1529
|
|
1523
|
-
|
1524
|
-
|
1525
|
-
except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code
|
1526
|
-
log.debug("Encountered httpx.HTTPStatusError", exc_info=True)
|
1527
|
-
|
1528
|
-
if remaining_retries > 0 and self._should_retry(err.response):
|
1529
|
-
await err.response.aclose()
|
1530
|
-
return await self._retry_request(
|
1531
|
-
input_options,
|
1532
|
-
cast_to,
|
1533
|
-
retries_taken=retries_taken,
|
1534
|
-
response_headers=err.response.headers,
|
1535
|
-
stream=stream,
|
1536
|
-
stream_cls=stream_cls,
|
1537
|
-
)
|
1530
|
+
log.debug("Re-raising status error")
|
1531
|
+
raise self._make_status_error_from_response(err.response) from None
|
1538
1532
|
|
1539
|
-
|
1540
|
-
# to completion before attempting to access the response text.
|
1541
|
-
if not err.response.is_closed:
|
1542
|
-
await err.response.aread()
|
1543
|
-
|
1544
|
-
log.debug("Re-raising status error")
|
1545
|
-
raise self._make_status_error_from_response(err.response) from None
|
1533
|
+
break
|
1546
1534
|
|
1535
|
+
assert response is not None, "could not resolve response (should never happen)"
|
1547
1536
|
return await self._process_response(
|
1548
1537
|
cast_to=cast_to,
|
1549
1538
|
options=options,
|
@@ -1553,35 +1542,20 @@ class AsyncAPIClient(BaseClient[httpx.AsyncClient, AsyncStream[Any]]):
|
|
1553
1542
|
retries_taken=retries_taken,
|
1554
1543
|
)
|
1555
1544
|
|
1556
|
-
async def
|
1557
|
-
self,
|
1558
|
-
|
1559
|
-
|
1560
|
-
*,
|
1561
|
-
retries_taken: int,
|
1562
|
-
response_headers: httpx.Headers | None,
|
1563
|
-
stream: bool,
|
1564
|
-
stream_cls: type[_AsyncStreamT] | None,
|
1565
|
-
) -> ResponseT | _AsyncStreamT:
|
1566
|
-
remaining_retries = options.get_max_retries(self.max_retries) - retries_taken
|
1545
|
+
async def _sleep_for_retry(
|
1546
|
+
self, *, retries_taken: int, max_retries: int, options: FinalRequestOptions, response: httpx.Response | None
|
1547
|
+
) -> None:
|
1548
|
+
remaining_retries = max_retries - retries_taken
|
1567
1549
|
if remaining_retries == 1:
|
1568
1550
|
log.debug("1 retry left")
|
1569
1551
|
else:
|
1570
1552
|
log.debug("%i retries left", remaining_retries)
|
1571
1553
|
|
1572
|
-
timeout = self._calculate_retry_timeout(remaining_retries, options,
|
1554
|
+
timeout = self._calculate_retry_timeout(remaining_retries, options, response.headers if response else None)
|
1573
1555
|
log.info("Retrying request to %s in %f seconds", options.url, timeout)
|
1574
1556
|
|
1575
1557
|
await anyio.sleep(timeout)
|
1576
1558
|
|
1577
|
-
return await self._request(
|
1578
|
-
options=options,
|
1579
|
-
cast_to=cast_to,
|
1580
|
-
retries_taken=retries_taken + 1,
|
1581
|
-
stream=stream,
|
1582
|
-
stream_cls=stream_cls,
|
1583
|
-
)
|
1584
|
-
|
1585
1559
|
async def _process_response(
|
1586
1560
|
self,
|
1587
1561
|
*,
|
deeporigin_data/_models.py
CHANGED
@@ -19,7 +19,6 @@ from typing_extensions import (
|
|
19
19
|
)
|
20
20
|
|
21
21
|
import pydantic
|
22
|
-
import pydantic.generics
|
23
22
|
from pydantic.fields import FieldInfo
|
24
23
|
|
25
24
|
from ._types import (
|
@@ -627,8 +626,8 @@ def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any,
|
|
627
626
|
# Note: if one variant defines an alias then they all should
|
628
627
|
discriminator_alias = field_info.alias
|
629
628
|
|
630
|
-
if field_info
|
631
|
-
for entry in get_args(
|
629
|
+
if (annotation := getattr(field_info, "annotation", None)) and is_literal_type(annotation):
|
630
|
+
for entry in get_args(annotation):
|
632
631
|
if isinstance(entry, str):
|
633
632
|
mapping[entry] = variant
|
634
633
|
|
@@ -110,7 +110,7 @@ def extract_type_var_from_base(
|
|
110
110
|
```
|
111
111
|
"""
|
112
112
|
cls = cast(object, get_origin(typ) or typ)
|
113
|
-
if cls in generic_bases:
|
113
|
+
if cls in generic_bases: # pyright: ignore[reportUnnecessaryContains]
|
114
114
|
# we're given the class directly
|
115
115
|
return extract_type_arg(typ, index)
|
116
116
|
|
deeporigin_data/_utils/_utils.py
CHANGED
@@ -72,8 +72,16 @@ def _extract_items(
|
|
72
72
|
from .._files import assert_is_file_content
|
73
73
|
|
74
74
|
# We have exhausted the path, return the entry we found.
|
75
|
-
assert_is_file_content(obj, key=flattened_key)
|
76
75
|
assert flattened_key is not None
|
76
|
+
|
77
|
+
if is_list(obj):
|
78
|
+
files: list[tuple[str, FileTypes]] = []
|
79
|
+
for entry in obj:
|
80
|
+
assert_is_file_content(entry, key=flattened_key + "[]" if flattened_key else "")
|
81
|
+
files.append((flattened_key + "[]", cast(FileTypes, entry)))
|
82
|
+
return files
|
83
|
+
|
84
|
+
assert_is_file_content(obj, key=flattened_key)
|
77
85
|
return [(flattened_key, cast(FileTypes, obj))]
|
78
86
|
|
79
87
|
index += 1
|
deeporigin_data/_version.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.3
|
2
2
|
Name: deeporigin_data_sdk
|
3
|
-
Version: 0.1.
|
3
|
+
Version: 0.1.0a57
|
4
4
|
Summary: The official Python library for the deeporigin_data API
|
5
5
|
Project-URL: Homepage, https://github.com/deeporiginbio/deeporigin-data-sdk
|
6
6
|
Project-URL: Repository, https://github.com/deeporiginbio/deeporigin-data-sdk
|
@@ -59,8 +59,8 @@ import os
|
|
59
59
|
from deeporigin_data import DeeporiginData
|
60
60
|
|
61
61
|
client = DeeporiginData(
|
62
|
-
token=os.environ.get("ORG_BEARER_TOKEN"), # This is the default and can be omitted
|
63
62
|
org_id="My Org ID",
|
63
|
+
token=os.environ.get("ORG_BEARER_TOKEN"), # This is the default and can be omitted
|
64
64
|
)
|
65
65
|
|
66
66
|
describe_row_response = client.describe_row(
|
@@ -84,8 +84,8 @@ import asyncio
|
|
84
84
|
from deeporigin_data import AsyncDeeporiginData
|
85
85
|
|
86
86
|
client = AsyncDeeporiginData(
|
87
|
-
token=os.environ.get("ORG_BEARER_TOKEN"), # This is the default and can be omitted
|
88
87
|
org_id="My Org ID",
|
88
|
+
token=os.environ.get("ORG_BEARER_TOKEN"), # This is the default and can be omitted
|
89
89
|
)
|
90
90
|
|
91
91
|
|
@@ -189,9 +189,9 @@ from deeporigin_data import DeeporiginData
|
|
189
189
|
|
190
190
|
# Configure the default for all requests:
|
191
191
|
client = DeeporiginData(
|
192
|
+
org_id="My Org ID",
|
192
193
|
# default is 2
|
193
194
|
max_retries=0,
|
194
|
-
org_id="My Org ID",
|
195
195
|
)
|
196
196
|
|
197
197
|
# Or, configure per-request:
|
@@ -210,15 +210,15 @@ from deeporigin_data import DeeporiginData
|
|
210
210
|
|
211
211
|
# Configure the default for all requests:
|
212
212
|
client = DeeporiginData(
|
213
|
+
org_id="My Org ID",
|
213
214
|
# 20 seconds (default is 1 minute)
|
214
215
|
timeout=20.0,
|
215
|
-
org_id="My Org ID",
|
216
216
|
)
|
217
217
|
|
218
218
|
# More granular control:
|
219
219
|
client = DeeporiginData(
|
220
|
-
timeout=httpx.Timeout(60.0, read=5.0, write=10.0, connect=2.0),
|
221
220
|
org_id="My Org ID",
|
221
|
+
timeout=httpx.Timeout(60.0, read=5.0, write=10.0, connect=2.0),
|
222
222
|
)
|
223
223
|
|
224
224
|
# Override per-request:
|
@@ -345,13 +345,13 @@ import httpx
|
|
345
345
|
from deeporigin_data import DeeporiginData, DefaultHttpxClient
|
346
346
|
|
347
347
|
client = DeeporiginData(
|
348
|
+
org_id="My Org ID",
|
348
349
|
# Or use the `DEEPORIGIN_DATA_BASE_URL` env var
|
349
350
|
base_url="http://my.test.server.example.com:8083",
|
350
351
|
http_client=DefaultHttpxClient(
|
351
352
|
proxy="http://my.test.proxy.example.com",
|
352
353
|
transport=httpx.HTTPTransport(local_address="0.0.0.0"),
|
353
354
|
),
|
354
|
-
org_id="My Org ID",
|
355
355
|
)
|
356
356
|
```
|
357
357
|
|
@@ -1,17 +1,17 @@
|
|
1
1
|
deeporigin_data/__init__.py,sha256=g5vq9kCCiWBl8XgJzXdUB9AIdRypOEj9pQH8WJJEVxo,2533
|
2
|
-
deeporigin_data/_base_client.py,sha256=
|
2
|
+
deeporigin_data/_base_client.py,sha256=NiHxYas3raJrBeEWpgFsEUwAQH3WPEwLe9SlMQTjIOs,64853
|
3
3
|
deeporigin_data/_client.py,sha256=i5YiMEgcN5kXZ6ChXAVafaPTju7BNwaBT2DK6dvaa-k,170904
|
4
4
|
deeporigin_data/_compat.py,sha256=VWemUKbj6DDkQ-O4baSpHVLJafotzeXmCQGJugfVTIw,6580
|
5
5
|
deeporigin_data/_constants.py,sha256=S14PFzyN9-I31wiV7SmIlL5Ga0MLHxdvegInGdXH7tM,462
|
6
6
|
deeporigin_data/_exceptions.py,sha256=_25MmrwuBf1sxAJESpY5sPn1o5E-aUymr6wDuRSWIng,3236
|
7
7
|
deeporigin_data/_files.py,sha256=mf4dOgL4b0ryyZlbqLhggD3GVgDf6XxdGFAgce01ugE,3549
|
8
|
-
deeporigin_data/_models.py,sha256=
|
8
|
+
deeporigin_data/_models.py,sha256=mB2r2VWQq49jG-F0RIXDrBxPp3v-Eg12wMOtVTNxtv4,29057
|
9
9
|
deeporigin_data/_qs.py,sha256=AOkSz4rHtK4YI3ZU_kzea-zpwBUgEY8WniGmTPyEimc,4846
|
10
10
|
deeporigin_data/_resource.py,sha256=tkm4gF9YRotE93j48jTDBSGs8wyVa0E5NS9fj19e38c,1148
|
11
11
|
deeporigin_data/_response.py,sha256=aA9Ff3JrsFz4-PRMh4eMTY-t_IiFXI9_uWDP4CcmRps,28871
|
12
12
|
deeporigin_data/_streaming.py,sha256=yG857cOSJD3gbc7mEc2wqfvcPVLMGmYX4hBOqqIT5RE,10132
|
13
13
|
deeporigin_data/_types.py,sha256=HI5vtFJGLEsyOrrWJRSRtUeOSrd8EdoM020wC51GvcI,6152
|
14
|
-
deeporigin_data/_version.py,sha256=
|
14
|
+
deeporigin_data/_version.py,sha256=I2bZqMB-_U9gEFChRgMq-xoaMrLrontBVvOGbqvsUO8,176
|
15
15
|
deeporigin_data/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
16
16
|
deeporigin_data/_utils/__init__.py,sha256=PNZ_QJuzZEgyYXqkO1HVhGkj5IU9bglVUcw7H-Knjzw,2062
|
17
17
|
deeporigin_data/_utils/_logs.py,sha256=R7dnUaDs2cdYbq1Ee16dHy863wdcTZRRzubw9KE0qNc,801
|
@@ -20,8 +20,8 @@ deeporigin_data/_utils/_reflection.py,sha256=ZmGkIgT_PuwedyNBrrKGbxoWtkpytJNU1uU
|
|
20
20
|
deeporigin_data/_utils/_streams.py,sha256=SMC90diFFecpEg_zgDRVbdR3hSEIgVVij4taD-noMLM,289
|
21
21
|
deeporigin_data/_utils/_sync.py,sha256=TpGLrrhRNWTJtODNE6Fup3_k7zrWm1j2RlirzBwre-0,2862
|
22
22
|
deeporigin_data/_utils/_transform.py,sha256=n7kskEWz6o__aoNvhFoGVyDoalNe6mJwp-g7BWkdj88,15617
|
23
|
-
deeporigin_data/_utils/_typing.py,sha256=
|
24
|
-
deeporigin_data/_utils/_utils.py,sha256=
|
23
|
+
deeporigin_data/_utils/_typing.py,sha256=D0DbbNu8GnYQTSICnTSHDGsYXj8TcAKyhejb0XcnjtY,4602
|
24
|
+
deeporigin_data/_utils/_utils.py,sha256=ts4CiiuNpFiGB6YMdkQRh2SZvYvsl7mAF-JWHCcLDf4,12312
|
25
25
|
deeporigin_data/lib/.keep,sha256=wuNrz-5SXo3jJaJOJgz4vFHM41YH_g20F5cRQo0vLes,224
|
26
26
|
deeporigin_data/resources/__init__.py,sha256=ikKh5ucm9qFI-Z42nOKxhBhEI-YHaaxvsSddO_Nx0-Y,86
|
27
27
|
deeporigin_data/types/__init__.py,sha256=rWEVTTs8jU5G-8Ua5Aix0ID5AkXImeqfd4TteL4aj9k,8567
|
@@ -120,7 +120,7 @@ deeporigin_data/types/shared_params/add_column_base.py,sha256=s8cbOjluJmf4Pzmg_v
|
|
120
120
|
deeporigin_data/types/shared_params/add_column_union.py,sha256=uEJwB-xtbKY19Hq7a2vIrGdDfPcHIBwp9_R63Qf9KO0,1036
|
121
121
|
deeporigin_data/types/shared_params/condition.py,sha256=38ItZ9QZrA3rnXNgds7KZcXCZ-h1zVttiD1R6uf5IGQ,3153
|
122
122
|
deeporigin_data/types/shared_params/row_filter_join.py,sha256=QIo2yhjJJZLcGF-hBF7YcLcYHLhf5uq5EkQG-0WJjtU,595
|
123
|
-
deeporigin_data_sdk-0.1.
|
124
|
-
deeporigin_data_sdk-0.1.
|
125
|
-
deeporigin_data_sdk-0.1.
|
126
|
-
deeporigin_data_sdk-0.1.
|
123
|
+
deeporigin_data_sdk-0.1.0a57.dist-info/METADATA,sha256=Da2UkH3HNOAhhJKEwnfYu6NuQHmU4Ktzi8GJ3dk3WrU,13775
|
124
|
+
deeporigin_data_sdk-0.1.0a57.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87
|
125
|
+
deeporigin_data_sdk-0.1.0a57.dist-info/licenses/LICENSE,sha256=jT1To9IZ3XdRqtpv8wDrIwpatTUvf5yP0sFYhEtJVZY,11345
|
126
|
+
deeporigin_data_sdk-0.1.0a57.dist-info/RECORD,,
|
File without changes
|
{deeporigin_data_sdk-0.1.0a56.dist-info → deeporigin_data_sdk-0.1.0a57.dist-info}/licenses/LICENSE
RENAMED
File without changes
|