isaacus 0.7.0__py3-none-any.whl → 0.9.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- isaacus/__init__.py +5 -2
- isaacus/_base_client.py +86 -15
- isaacus/_client.py +17 -9
- isaacus/_compat.py +48 -48
- isaacus/_files.py +4 -4
- isaacus/_models.py +80 -50
- isaacus/_qs.py +7 -7
- isaacus/_types.py +53 -12
- isaacus/_utils/__init__.py +9 -2
- isaacus/_utils/_compat.py +45 -0
- isaacus/_utils/_datetime_parse.py +136 -0
- isaacus/_utils/_transform.py +13 -3
- isaacus/_utils/_typing.py +6 -1
- isaacus/_utils/_utils.py +4 -5
- isaacus/_version.py +1 -1
- isaacus/resources/__init__.py +14 -0
- isaacus/resources/classifications/universal.py +17 -17
- isaacus/resources/embeddings.py +246 -0
- isaacus/resources/extractions/qa.py +23 -21
- isaacus/resources/rerankings.py +19 -19
- isaacus/types/__init__.py +3 -1
- isaacus/types/classifications/__init__.py +1 -1
- isaacus/types/classifications/{universal_classification.py → universal_classification_response.py} +2 -2
- isaacus/types/classifications/universal_create_params.py +4 -2
- isaacus/types/embedding_create_params.py +49 -0
- isaacus/types/embedding_response.py +31 -0
- isaacus/types/extractions/__init__.py +1 -1
- isaacus/types/extractions/{answer_extraction.py → answer_extraction_response.py} +2 -2
- isaacus/types/extractions/qa_create_params.py +7 -4
- isaacus/types/reranking_create_params.py +4 -2
- isaacus/types/{reranking.py → reranking_response.py} +2 -2
- {isaacus-0.7.0.dist-info → isaacus-0.9.0.dist-info}/METADATA +90 -37
- isaacus-0.9.0.dist-info/RECORD +52 -0
- isaacus-0.7.0.dist-info/RECORD +0 -47
- {isaacus-0.7.0.dist-info → isaacus-0.9.0.dist-info}/WHEEL +0 -0
- {isaacus-0.7.0.dist-info → isaacus-0.9.0.dist-info}/licenses/LICENSE +0 -0
isaacus/__init__.py
CHANGED
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
import typing as _t
|
|
4
4
|
|
|
5
5
|
from . import types
|
|
6
|
-
from ._types import NOT_GIVEN, Omit, NoneType, NotGiven, Transport, ProxiesTypes
|
|
6
|
+
from ._types import NOT_GIVEN, Omit, NoneType, NotGiven, Transport, ProxiesTypes, omit, not_given
|
|
7
7
|
from ._utils import file_from_path
|
|
8
8
|
from ._client import Client, Stream, Isaacus, Timeout, Transport, AsyncClient, AsyncStream, AsyncIsaacus, RequestOptions
|
|
9
9
|
from ._models import BaseModel
|
|
@@ -26,7 +26,7 @@ from ._exceptions import (
|
|
|
26
26
|
UnprocessableEntityError,
|
|
27
27
|
APIResponseValidationError,
|
|
28
28
|
)
|
|
29
|
-
from ._base_client import DefaultHttpxClient, DefaultAsyncHttpxClient
|
|
29
|
+
from ._base_client import DefaultHttpxClient, DefaultAioHttpClient, DefaultAsyncHttpxClient
|
|
30
30
|
from ._utils._logs import setup_logging as _setup_logging
|
|
31
31
|
|
|
32
32
|
__all__ = [
|
|
@@ -38,7 +38,9 @@ __all__ = [
|
|
|
38
38
|
"ProxiesTypes",
|
|
39
39
|
"NotGiven",
|
|
40
40
|
"NOT_GIVEN",
|
|
41
|
+
"not_given",
|
|
41
42
|
"Omit",
|
|
43
|
+
"omit",
|
|
42
44
|
"IsaacusError",
|
|
43
45
|
"APIError",
|
|
44
46
|
"APIStatusError",
|
|
@@ -68,6 +70,7 @@ __all__ = [
|
|
|
68
70
|
"DEFAULT_CONNECTION_LIMITS",
|
|
69
71
|
"DefaultHttpxClient",
|
|
70
72
|
"DefaultAsyncHttpxClient",
|
|
73
|
+
"DefaultAioHttpClient",
|
|
71
74
|
]
|
|
72
75
|
|
|
73
76
|
if not _t.TYPE_CHECKING:
|
isaacus/_base_client.py
CHANGED
|
@@ -42,7 +42,6 @@ from . import _exceptions
|
|
|
42
42
|
from ._qs import Querystring
|
|
43
43
|
from ._files import to_httpx_files, async_to_httpx_files
|
|
44
44
|
from ._types import (
|
|
45
|
-
NOT_GIVEN,
|
|
46
45
|
Body,
|
|
47
46
|
Omit,
|
|
48
47
|
Query,
|
|
@@ -57,9 +56,10 @@ from ._types import (
|
|
|
57
56
|
RequestOptions,
|
|
58
57
|
HttpxRequestFiles,
|
|
59
58
|
ModelBuilderProtocol,
|
|
59
|
+
not_given,
|
|
60
60
|
)
|
|
61
61
|
from ._utils import is_dict, is_list, asyncify, is_given, lru_cache, is_mapping
|
|
62
|
-
from ._compat import
|
|
62
|
+
from ._compat import PYDANTIC_V1, model_copy, model_dump
|
|
63
63
|
from ._models import GenericModel, FinalRequestOptions, validate_type, construct_type
|
|
64
64
|
from ._response import (
|
|
65
65
|
APIResponse,
|
|
@@ -119,6 +119,7 @@ class PageInfo:
|
|
|
119
119
|
|
|
120
120
|
url: URL | NotGiven
|
|
121
121
|
params: Query | NotGiven
|
|
122
|
+
json: Body | NotGiven
|
|
122
123
|
|
|
123
124
|
@overload
|
|
124
125
|
def __init__(
|
|
@@ -134,19 +135,30 @@ class PageInfo:
|
|
|
134
135
|
params: Query,
|
|
135
136
|
) -> None: ...
|
|
136
137
|
|
|
138
|
+
@overload
|
|
139
|
+
def __init__(
|
|
140
|
+
self,
|
|
141
|
+
*,
|
|
142
|
+
json: Body,
|
|
143
|
+
) -> None: ...
|
|
144
|
+
|
|
137
145
|
def __init__(
|
|
138
146
|
self,
|
|
139
147
|
*,
|
|
140
|
-
url: URL | NotGiven =
|
|
141
|
-
|
|
148
|
+
url: URL | NotGiven = not_given,
|
|
149
|
+
json: Body | NotGiven = not_given,
|
|
150
|
+
params: Query | NotGiven = not_given,
|
|
142
151
|
) -> None:
|
|
143
152
|
self.url = url
|
|
153
|
+
self.json = json
|
|
144
154
|
self.params = params
|
|
145
155
|
|
|
146
156
|
@override
|
|
147
157
|
def __repr__(self) -> str:
|
|
148
158
|
if self.url:
|
|
149
159
|
return f"{self.__class__.__name__}(url={self.url})"
|
|
160
|
+
if self.json:
|
|
161
|
+
return f"{self.__class__.__name__}(json={self.json})"
|
|
150
162
|
return f"{self.__class__.__name__}(params={self.params})"
|
|
151
163
|
|
|
152
164
|
|
|
@@ -195,6 +207,19 @@ class BasePage(GenericModel, Generic[_T]):
|
|
|
195
207
|
options.url = str(url)
|
|
196
208
|
return options
|
|
197
209
|
|
|
210
|
+
if not isinstance(info.json, NotGiven):
|
|
211
|
+
if not is_mapping(info.json):
|
|
212
|
+
raise TypeError("Pagination is only supported with mappings")
|
|
213
|
+
|
|
214
|
+
if not options.json_data:
|
|
215
|
+
options.json_data = {**info.json}
|
|
216
|
+
else:
|
|
217
|
+
if not is_mapping(options.json_data):
|
|
218
|
+
raise TypeError("Pagination is only supported with mappings")
|
|
219
|
+
|
|
220
|
+
options.json_data = {**options.json_data, **info.json}
|
|
221
|
+
return options
|
|
222
|
+
|
|
198
223
|
raise ValueError("Unexpected PageInfo state")
|
|
199
224
|
|
|
200
225
|
|
|
@@ -207,7 +232,7 @@ class BaseSyncPage(BasePage[_T], Generic[_T]):
|
|
|
207
232
|
model: Type[_T],
|
|
208
233
|
options: FinalRequestOptions,
|
|
209
234
|
) -> None:
|
|
210
|
-
if
|
|
235
|
+
if (not PYDANTIC_V1) and getattr(self, "__pydantic_private__", None) is None:
|
|
211
236
|
self.__pydantic_private__ = {}
|
|
212
237
|
|
|
213
238
|
self._model = model
|
|
@@ -295,7 +320,7 @@ class BaseAsyncPage(BasePage[_T], Generic[_T]):
|
|
|
295
320
|
client: AsyncAPIClient,
|
|
296
321
|
options: FinalRequestOptions,
|
|
297
322
|
) -> None:
|
|
298
|
-
if
|
|
323
|
+
if (not PYDANTIC_V1) and getattr(self, "__pydantic_private__", None) is None:
|
|
299
324
|
self.__pydantic_private__ = {}
|
|
300
325
|
|
|
301
326
|
self._model = model
|
|
@@ -504,6 +529,18 @@ class BaseClient(Generic[_HttpxClientT, _DefaultStreamT]):
|
|
|
504
529
|
# work around https://github.com/encode/httpx/discussions/2880
|
|
505
530
|
kwargs["extensions"] = {"sni_hostname": prepared_url.host.replace("_", "-")}
|
|
506
531
|
|
|
532
|
+
is_body_allowed = options.method.lower() != "get"
|
|
533
|
+
|
|
534
|
+
if is_body_allowed:
|
|
535
|
+
if isinstance(json_data, bytes):
|
|
536
|
+
kwargs["content"] = json_data
|
|
537
|
+
else:
|
|
538
|
+
kwargs["json"] = json_data if is_given(json_data) else None
|
|
539
|
+
kwargs["files"] = files
|
|
540
|
+
else:
|
|
541
|
+
headers.pop("Content-Type", None)
|
|
542
|
+
kwargs.pop("data", None)
|
|
543
|
+
|
|
507
544
|
# TODO: report this error to httpx
|
|
508
545
|
return self._client.build_request( # pyright: ignore[reportUnknownMemberType]
|
|
509
546
|
headers=headers,
|
|
@@ -515,8 +552,6 @@ class BaseClient(Generic[_HttpxClientT, _DefaultStreamT]):
|
|
|
515
552
|
# so that passing a `TypedDict` doesn't cause an error.
|
|
516
553
|
# https://github.com/microsoft/pyright/issues/3526#event-6715453066
|
|
517
554
|
params=self.qs.stringify(cast(Mapping[str, Any], params)) if params else None,
|
|
518
|
-
json=json_data if is_given(json_data) else None,
|
|
519
|
-
files=files,
|
|
520
555
|
**kwargs,
|
|
521
556
|
)
|
|
522
557
|
|
|
@@ -560,7 +595,7 @@ class BaseClient(Generic[_HttpxClientT, _DefaultStreamT]):
|
|
|
560
595
|
# we internally support defining a temporary header to override the
|
|
561
596
|
# default `cast_to` type for use with `.with_raw_response` and `.with_streaming_response`
|
|
562
597
|
# see _response.py for implementation details
|
|
563
|
-
override_cast_to = headers.pop(OVERRIDE_CAST_TO_HEADER,
|
|
598
|
+
override_cast_to = headers.pop(OVERRIDE_CAST_TO_HEADER, not_given)
|
|
564
599
|
if is_given(override_cast_to):
|
|
565
600
|
options.headers = headers
|
|
566
601
|
return cast(Type[ResponseT], override_cast_to)
|
|
@@ -790,7 +825,7 @@ class SyncAPIClient(BaseClient[httpx.Client, Stream[Any]]):
|
|
|
790
825
|
version: str,
|
|
791
826
|
base_url: str | URL,
|
|
792
827
|
max_retries: int = DEFAULT_MAX_RETRIES,
|
|
793
|
-
timeout: float | Timeout | None | NotGiven =
|
|
828
|
+
timeout: float | Timeout | None | NotGiven = not_given,
|
|
794
829
|
http_client: httpx.Client | None = None,
|
|
795
830
|
custom_headers: Mapping[str, str] | None = None,
|
|
796
831
|
custom_query: Mapping[str, object] | None = None,
|
|
@@ -1046,7 +1081,14 @@ class SyncAPIClient(BaseClient[httpx.Client, Stream[Any]]):
|
|
|
1046
1081
|
) -> ResponseT:
|
|
1047
1082
|
origin = get_origin(cast_to) or cast_to
|
|
1048
1083
|
|
|
1049
|
-
if
|
|
1084
|
+
if (
|
|
1085
|
+
inspect.isclass(origin)
|
|
1086
|
+
and issubclass(origin, BaseAPIResponse)
|
|
1087
|
+
# we only want to actually return the custom BaseAPIResponse class if we're
|
|
1088
|
+
# returning the raw response, or if we're not streaming SSE, as if we're streaming
|
|
1089
|
+
# SSE then `cast_to` doesn't actively reflect the type we need to parse into
|
|
1090
|
+
and (not stream or bool(response.request.headers.get(RAW_RESPONSE_HEADER)))
|
|
1091
|
+
):
|
|
1050
1092
|
if not issubclass(origin, APIResponse):
|
|
1051
1093
|
raise TypeError(f"API Response types must subclass {APIResponse}; Received {origin}")
|
|
1052
1094
|
|
|
@@ -1257,6 +1299,24 @@ class _DefaultAsyncHttpxClient(httpx.AsyncClient):
|
|
|
1257
1299
|
super().__init__(**kwargs)
|
|
1258
1300
|
|
|
1259
1301
|
|
|
1302
|
+
try:
|
|
1303
|
+
import httpx_aiohttp
|
|
1304
|
+
except ImportError:
|
|
1305
|
+
|
|
1306
|
+
class _DefaultAioHttpClient(httpx.AsyncClient):
|
|
1307
|
+
def __init__(self, **_kwargs: Any) -> None:
|
|
1308
|
+
raise RuntimeError("To use the aiohttp client you must have installed the package with the `aiohttp` extra")
|
|
1309
|
+
else:
|
|
1310
|
+
|
|
1311
|
+
class _DefaultAioHttpClient(httpx_aiohttp.HttpxAiohttpClient): # type: ignore
|
|
1312
|
+
def __init__(self, **kwargs: Any) -> None:
|
|
1313
|
+
kwargs.setdefault("timeout", DEFAULT_TIMEOUT)
|
|
1314
|
+
kwargs.setdefault("limits", DEFAULT_CONNECTION_LIMITS)
|
|
1315
|
+
kwargs.setdefault("follow_redirects", True)
|
|
1316
|
+
|
|
1317
|
+
super().__init__(**kwargs)
|
|
1318
|
+
|
|
1319
|
+
|
|
1260
1320
|
if TYPE_CHECKING:
|
|
1261
1321
|
DefaultAsyncHttpxClient = httpx.AsyncClient
|
|
1262
1322
|
"""An alias to `httpx.AsyncClient` that provides the same defaults that this SDK
|
|
@@ -1265,8 +1325,12 @@ if TYPE_CHECKING:
|
|
|
1265
1325
|
This is useful because overriding the `http_client` with your own instance of
|
|
1266
1326
|
`httpx.AsyncClient` will result in httpx's defaults being used, not ours.
|
|
1267
1327
|
"""
|
|
1328
|
+
|
|
1329
|
+
DefaultAioHttpClient = httpx.AsyncClient
|
|
1330
|
+
"""An alias to `httpx.AsyncClient` that changes the default HTTP transport to `aiohttp`."""
|
|
1268
1331
|
else:
|
|
1269
1332
|
DefaultAsyncHttpxClient = _DefaultAsyncHttpxClient
|
|
1333
|
+
DefaultAioHttpClient = _DefaultAioHttpClient
|
|
1270
1334
|
|
|
1271
1335
|
|
|
1272
1336
|
class AsyncHttpxClientWrapper(DefaultAsyncHttpxClient):
|
|
@@ -1292,7 +1356,7 @@ class AsyncAPIClient(BaseClient[httpx.AsyncClient, AsyncStream[Any]]):
|
|
|
1292
1356
|
base_url: str | URL,
|
|
1293
1357
|
_strict_response_validation: bool,
|
|
1294
1358
|
max_retries: int = DEFAULT_MAX_RETRIES,
|
|
1295
|
-
timeout: float | Timeout | None | NotGiven =
|
|
1359
|
+
timeout: float | Timeout | None | NotGiven = not_given,
|
|
1296
1360
|
http_client: httpx.AsyncClient | None = None,
|
|
1297
1361
|
custom_headers: Mapping[str, str] | None = None,
|
|
1298
1362
|
custom_query: Mapping[str, object] | None = None,
|
|
@@ -1549,7 +1613,14 @@ class AsyncAPIClient(BaseClient[httpx.AsyncClient, AsyncStream[Any]]):
|
|
|
1549
1613
|
) -> ResponseT:
|
|
1550
1614
|
origin = get_origin(cast_to) or cast_to
|
|
1551
1615
|
|
|
1552
|
-
if
|
|
1616
|
+
if (
|
|
1617
|
+
inspect.isclass(origin)
|
|
1618
|
+
and issubclass(origin, BaseAPIResponse)
|
|
1619
|
+
# we only want to actually return the custom BaseAPIResponse class if we're
|
|
1620
|
+
# returning the raw response, or if we're not streaming SSE, as if we're streaming
|
|
1621
|
+
# SSE then `cast_to` doesn't actively reflect the type we need to parse into
|
|
1622
|
+
and (not stream or bool(response.request.headers.get(RAW_RESPONSE_HEADER)))
|
|
1623
|
+
):
|
|
1553
1624
|
if not issubclass(origin, AsyncAPIResponse):
|
|
1554
1625
|
raise TypeError(f"API Response types must subclass {AsyncAPIResponse}; Received {origin}")
|
|
1555
1626
|
|
|
@@ -1747,8 +1818,8 @@ def make_request_options(
|
|
|
1747
1818
|
extra_query: Query | None = None,
|
|
1748
1819
|
extra_body: Body | None = None,
|
|
1749
1820
|
idempotency_key: str | None = None,
|
|
1750
|
-
timeout: float | httpx.Timeout | None | NotGiven =
|
|
1751
|
-
post_parser: PostParser | NotGiven =
|
|
1821
|
+
timeout: float | httpx.Timeout | None | NotGiven = not_given,
|
|
1822
|
+
post_parser: PostParser | NotGiven = not_given,
|
|
1752
1823
|
) -> RequestOptions:
|
|
1753
1824
|
"""Create a dict of type RequestOptions without keys of NotGiven values."""
|
|
1754
1825
|
options: RequestOptions = {}
|
isaacus/_client.py
CHANGED
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
5
|
import os
|
|
6
|
-
from typing import Any,
|
|
6
|
+
from typing import Any, Mapping
|
|
7
7
|
from typing_extensions import Self, override
|
|
8
8
|
|
|
9
9
|
import httpx
|
|
@@ -11,17 +11,17 @@ import httpx
|
|
|
11
11
|
from . import _exceptions
|
|
12
12
|
from ._qs import Querystring
|
|
13
13
|
from ._types import (
|
|
14
|
-
NOT_GIVEN,
|
|
15
14
|
Omit,
|
|
16
15
|
Timeout,
|
|
17
16
|
NotGiven,
|
|
18
17
|
Transport,
|
|
19
18
|
ProxiesTypes,
|
|
20
19
|
RequestOptions,
|
|
20
|
+
not_given,
|
|
21
21
|
)
|
|
22
22
|
from ._utils import is_given, get_async_library
|
|
23
23
|
from ._version import __version__
|
|
24
|
-
from .resources import rerankings
|
|
24
|
+
from .resources import embeddings, rerankings
|
|
25
25
|
from ._streaming import Stream as Stream, AsyncStream as AsyncStream
|
|
26
26
|
from ._exceptions import IsaacusError, APIStatusError
|
|
27
27
|
from ._base_client import (
|
|
@@ -36,6 +36,7 @@ __all__ = ["Timeout", "Transport", "ProxiesTypes", "RequestOptions", "Isaacus",
|
|
|
36
36
|
|
|
37
37
|
|
|
38
38
|
class Isaacus(SyncAPIClient):
|
|
39
|
+
embeddings: embeddings.EmbeddingsResource
|
|
39
40
|
classifications: classifications.ClassificationsResource
|
|
40
41
|
rerankings: rerankings.RerankingsResource
|
|
41
42
|
extractions: extractions.ExtractionsResource
|
|
@@ -50,7 +51,7 @@ class Isaacus(SyncAPIClient):
|
|
|
50
51
|
*,
|
|
51
52
|
api_key: str | None = None,
|
|
52
53
|
base_url: str | httpx.URL | None = None,
|
|
53
|
-
timeout:
|
|
54
|
+
timeout: float | Timeout | None | NotGiven = not_given,
|
|
54
55
|
max_retries: int = DEFAULT_MAX_RETRIES,
|
|
55
56
|
default_headers: Mapping[str, str] | None = None,
|
|
56
57
|
default_query: Mapping[str, object] | None = None,
|
|
@@ -96,6 +97,7 @@ class Isaacus(SyncAPIClient):
|
|
|
96
97
|
_strict_response_validation=_strict_response_validation,
|
|
97
98
|
)
|
|
98
99
|
|
|
100
|
+
self.embeddings = embeddings.EmbeddingsResource(self)
|
|
99
101
|
self.classifications = classifications.ClassificationsResource(self)
|
|
100
102
|
self.rerankings = rerankings.RerankingsResource(self)
|
|
101
103
|
self.extractions = extractions.ExtractionsResource(self)
|
|
@@ -127,9 +129,9 @@ class Isaacus(SyncAPIClient):
|
|
|
127
129
|
*,
|
|
128
130
|
api_key: str | None = None,
|
|
129
131
|
base_url: str | httpx.URL | None = None,
|
|
130
|
-
timeout: float | Timeout | None | NotGiven =
|
|
132
|
+
timeout: float | Timeout | None | NotGiven = not_given,
|
|
131
133
|
http_client: httpx.Client | None = None,
|
|
132
|
-
max_retries: int | NotGiven =
|
|
134
|
+
max_retries: int | NotGiven = not_given,
|
|
133
135
|
default_headers: Mapping[str, str] | None = None,
|
|
134
136
|
set_default_headers: Mapping[str, str] | None = None,
|
|
135
137
|
default_query: Mapping[str, object] | None = None,
|
|
@@ -208,6 +210,7 @@ class Isaacus(SyncAPIClient):
|
|
|
208
210
|
|
|
209
211
|
|
|
210
212
|
class AsyncIsaacus(AsyncAPIClient):
|
|
213
|
+
embeddings: embeddings.AsyncEmbeddingsResource
|
|
211
214
|
classifications: classifications.AsyncClassificationsResource
|
|
212
215
|
rerankings: rerankings.AsyncRerankingsResource
|
|
213
216
|
extractions: extractions.AsyncExtractionsResource
|
|
@@ -222,7 +225,7 @@ class AsyncIsaacus(AsyncAPIClient):
|
|
|
222
225
|
*,
|
|
223
226
|
api_key: str | None = None,
|
|
224
227
|
base_url: str | httpx.URL | None = None,
|
|
225
|
-
timeout:
|
|
228
|
+
timeout: float | Timeout | None | NotGiven = not_given,
|
|
226
229
|
max_retries: int = DEFAULT_MAX_RETRIES,
|
|
227
230
|
default_headers: Mapping[str, str] | None = None,
|
|
228
231
|
default_query: Mapping[str, object] | None = None,
|
|
@@ -268,6 +271,7 @@ class AsyncIsaacus(AsyncAPIClient):
|
|
|
268
271
|
_strict_response_validation=_strict_response_validation,
|
|
269
272
|
)
|
|
270
273
|
|
|
274
|
+
self.embeddings = embeddings.AsyncEmbeddingsResource(self)
|
|
271
275
|
self.classifications = classifications.AsyncClassificationsResource(self)
|
|
272
276
|
self.rerankings = rerankings.AsyncRerankingsResource(self)
|
|
273
277
|
self.extractions = extractions.AsyncExtractionsResource(self)
|
|
@@ -299,9 +303,9 @@ class AsyncIsaacus(AsyncAPIClient):
|
|
|
299
303
|
*,
|
|
300
304
|
api_key: str | None = None,
|
|
301
305
|
base_url: str | httpx.URL | None = None,
|
|
302
|
-
timeout: float | Timeout | None | NotGiven =
|
|
306
|
+
timeout: float | Timeout | None | NotGiven = not_given,
|
|
303
307
|
http_client: httpx.AsyncClient | None = None,
|
|
304
|
-
max_retries: int | NotGiven =
|
|
308
|
+
max_retries: int | NotGiven = not_given,
|
|
305
309
|
default_headers: Mapping[str, str] | None = None,
|
|
306
310
|
set_default_headers: Mapping[str, str] | None = None,
|
|
307
311
|
default_query: Mapping[str, object] | None = None,
|
|
@@ -381,6 +385,7 @@ class AsyncIsaacus(AsyncAPIClient):
|
|
|
381
385
|
|
|
382
386
|
class IsaacusWithRawResponse:
|
|
383
387
|
def __init__(self, client: Isaacus) -> None:
|
|
388
|
+
self.embeddings = embeddings.EmbeddingsResourceWithRawResponse(client.embeddings)
|
|
384
389
|
self.classifications = classifications.ClassificationsResourceWithRawResponse(client.classifications)
|
|
385
390
|
self.rerankings = rerankings.RerankingsResourceWithRawResponse(client.rerankings)
|
|
386
391
|
self.extractions = extractions.ExtractionsResourceWithRawResponse(client.extractions)
|
|
@@ -388,6 +393,7 @@ class IsaacusWithRawResponse:
|
|
|
388
393
|
|
|
389
394
|
class AsyncIsaacusWithRawResponse:
|
|
390
395
|
def __init__(self, client: AsyncIsaacus) -> None:
|
|
396
|
+
self.embeddings = embeddings.AsyncEmbeddingsResourceWithRawResponse(client.embeddings)
|
|
391
397
|
self.classifications = classifications.AsyncClassificationsResourceWithRawResponse(client.classifications)
|
|
392
398
|
self.rerankings = rerankings.AsyncRerankingsResourceWithRawResponse(client.rerankings)
|
|
393
399
|
self.extractions = extractions.AsyncExtractionsResourceWithRawResponse(client.extractions)
|
|
@@ -395,6 +401,7 @@ class AsyncIsaacusWithRawResponse:
|
|
|
395
401
|
|
|
396
402
|
class IsaacusWithStreamedResponse:
|
|
397
403
|
def __init__(self, client: Isaacus) -> None:
|
|
404
|
+
self.embeddings = embeddings.EmbeddingsResourceWithStreamingResponse(client.embeddings)
|
|
398
405
|
self.classifications = classifications.ClassificationsResourceWithStreamingResponse(client.classifications)
|
|
399
406
|
self.rerankings = rerankings.RerankingsResourceWithStreamingResponse(client.rerankings)
|
|
400
407
|
self.extractions = extractions.ExtractionsResourceWithStreamingResponse(client.extractions)
|
|
@@ -402,6 +409,7 @@ class IsaacusWithStreamedResponse:
|
|
|
402
409
|
|
|
403
410
|
class AsyncIsaacusWithStreamedResponse:
|
|
404
411
|
def __init__(self, client: AsyncIsaacus) -> None:
|
|
412
|
+
self.embeddings = embeddings.AsyncEmbeddingsResourceWithStreamingResponse(client.embeddings)
|
|
405
413
|
self.classifications = classifications.AsyncClassificationsResourceWithStreamingResponse(client.classifications)
|
|
406
414
|
self.rerankings = rerankings.AsyncRerankingsResourceWithStreamingResponse(client.rerankings)
|
|
407
415
|
self.extractions = extractions.AsyncExtractionsResourceWithStreamingResponse(client.extractions)
|
isaacus/_compat.py
CHANGED
|
@@ -12,14 +12,13 @@ from ._types import IncEx, StrBytesIntFloat
|
|
|
12
12
|
_T = TypeVar("_T")
|
|
13
13
|
_ModelT = TypeVar("_ModelT", bound=pydantic.BaseModel)
|
|
14
14
|
|
|
15
|
-
# --------------- Pydantic v2 compatibility ---------------
|
|
15
|
+
# --------------- Pydantic v2, v3 compatibility ---------------
|
|
16
16
|
|
|
17
17
|
# Pyright incorrectly reports some of our functions as overriding a method when they don't
|
|
18
18
|
# pyright: reportIncompatibleMethodOverride=false
|
|
19
19
|
|
|
20
|
-
|
|
20
|
+
PYDANTIC_V1 = pydantic.VERSION.startswith("1.")
|
|
21
21
|
|
|
22
|
-
# v1 re-exports
|
|
23
22
|
if TYPE_CHECKING:
|
|
24
23
|
|
|
25
24
|
def parse_date(value: date | StrBytesIntFloat) -> date: # noqa: ARG001
|
|
@@ -44,90 +43,92 @@ if TYPE_CHECKING:
|
|
|
44
43
|
...
|
|
45
44
|
|
|
46
45
|
else:
|
|
47
|
-
|
|
48
|
-
|
|
46
|
+
# v1 re-exports
|
|
47
|
+
if PYDANTIC_V1:
|
|
48
|
+
from pydantic.typing import (
|
|
49
49
|
get_args as get_args,
|
|
50
50
|
is_union as is_union,
|
|
51
51
|
get_origin as get_origin,
|
|
52
52
|
is_typeddict as is_typeddict,
|
|
53
53
|
is_literal_type as is_literal_type,
|
|
54
54
|
)
|
|
55
|
-
from pydantic.
|
|
55
|
+
from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
|
|
56
56
|
else:
|
|
57
|
-
from
|
|
57
|
+
from ._utils import (
|
|
58
58
|
get_args as get_args,
|
|
59
59
|
is_union as is_union,
|
|
60
60
|
get_origin as get_origin,
|
|
61
|
+
parse_date as parse_date,
|
|
61
62
|
is_typeddict as is_typeddict,
|
|
63
|
+
parse_datetime as parse_datetime,
|
|
62
64
|
is_literal_type as is_literal_type,
|
|
63
65
|
)
|
|
64
|
-
from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
|
|
65
66
|
|
|
66
67
|
|
|
67
68
|
# refactored config
|
|
68
69
|
if TYPE_CHECKING:
|
|
69
70
|
from pydantic import ConfigDict as ConfigDict
|
|
70
71
|
else:
|
|
71
|
-
if
|
|
72
|
-
from pydantic import ConfigDict
|
|
73
|
-
else:
|
|
72
|
+
if PYDANTIC_V1:
|
|
74
73
|
# TODO: provide an error message here?
|
|
75
74
|
ConfigDict = None
|
|
75
|
+
else:
|
|
76
|
+
from pydantic import ConfigDict as ConfigDict
|
|
76
77
|
|
|
77
78
|
|
|
78
79
|
# renamed methods / properties
|
|
79
80
|
def parse_obj(model: type[_ModelT], value: object) -> _ModelT:
|
|
80
|
-
if
|
|
81
|
-
return model.model_validate(value)
|
|
82
|
-
else:
|
|
81
|
+
if PYDANTIC_V1:
|
|
83
82
|
return cast(_ModelT, model.parse_obj(value)) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
|
|
83
|
+
else:
|
|
84
|
+
return model.model_validate(value)
|
|
84
85
|
|
|
85
86
|
|
|
86
87
|
def field_is_required(field: FieldInfo) -> bool:
|
|
87
|
-
if
|
|
88
|
-
return field.
|
|
89
|
-
return field.
|
|
88
|
+
if PYDANTIC_V1:
|
|
89
|
+
return field.required # type: ignore
|
|
90
|
+
return field.is_required()
|
|
90
91
|
|
|
91
92
|
|
|
92
93
|
def field_get_default(field: FieldInfo) -> Any:
|
|
93
94
|
value = field.get_default()
|
|
94
|
-
if
|
|
95
|
-
from pydantic_core import PydanticUndefined
|
|
96
|
-
|
|
97
|
-
if value == PydanticUndefined:
|
|
98
|
-
return None
|
|
95
|
+
if PYDANTIC_V1:
|
|
99
96
|
return value
|
|
97
|
+
from pydantic_core import PydanticUndefined
|
|
98
|
+
|
|
99
|
+
if value == PydanticUndefined:
|
|
100
|
+
return None
|
|
100
101
|
return value
|
|
101
102
|
|
|
102
103
|
|
|
103
104
|
def field_outer_type(field: FieldInfo) -> Any:
|
|
104
|
-
if
|
|
105
|
-
return field.
|
|
106
|
-
return field.
|
|
105
|
+
if PYDANTIC_V1:
|
|
106
|
+
return field.outer_type_ # type: ignore
|
|
107
|
+
return field.annotation
|
|
107
108
|
|
|
108
109
|
|
|
109
110
|
def get_model_config(model: type[pydantic.BaseModel]) -> Any:
|
|
110
|
-
if
|
|
111
|
-
return model.
|
|
112
|
-
return model.
|
|
111
|
+
if PYDANTIC_V1:
|
|
112
|
+
return model.__config__ # type: ignore
|
|
113
|
+
return model.model_config
|
|
113
114
|
|
|
114
115
|
|
|
115
116
|
def get_model_fields(model: type[pydantic.BaseModel]) -> dict[str, FieldInfo]:
|
|
116
|
-
if
|
|
117
|
-
return model.
|
|
118
|
-
return model.
|
|
117
|
+
if PYDANTIC_V1:
|
|
118
|
+
return model.__fields__ # type: ignore
|
|
119
|
+
return model.model_fields
|
|
119
120
|
|
|
120
121
|
|
|
121
122
|
def model_copy(model: _ModelT, *, deep: bool = False) -> _ModelT:
|
|
122
|
-
if
|
|
123
|
-
return model.
|
|
124
|
-
return model.
|
|
123
|
+
if PYDANTIC_V1:
|
|
124
|
+
return model.copy(deep=deep) # type: ignore
|
|
125
|
+
return model.model_copy(deep=deep)
|
|
125
126
|
|
|
126
127
|
|
|
127
128
|
def model_json(model: pydantic.BaseModel, *, indent: int | None = None) -> str:
|
|
128
|
-
if
|
|
129
|
-
return model.
|
|
130
|
-
return model.
|
|
129
|
+
if PYDANTIC_V1:
|
|
130
|
+
return model.json(indent=indent) # type: ignore
|
|
131
|
+
return model.model_dump_json(indent=indent)
|
|
131
132
|
|
|
132
133
|
|
|
133
134
|
def model_dump(
|
|
@@ -139,14 +140,14 @@ def model_dump(
|
|
|
139
140
|
warnings: bool = True,
|
|
140
141
|
mode: Literal["json", "python"] = "python",
|
|
141
142
|
) -> dict[str, Any]:
|
|
142
|
-
if
|
|
143
|
+
if (not PYDANTIC_V1) or hasattr(model, "model_dump"):
|
|
143
144
|
return model.model_dump(
|
|
144
145
|
mode=mode,
|
|
145
146
|
exclude=exclude,
|
|
146
147
|
exclude_unset=exclude_unset,
|
|
147
148
|
exclude_defaults=exclude_defaults,
|
|
148
149
|
# warnings are not supported in Pydantic v1
|
|
149
|
-
warnings=
|
|
150
|
+
warnings=True if PYDANTIC_V1 else warnings,
|
|
150
151
|
)
|
|
151
152
|
return cast(
|
|
152
153
|
"dict[str, Any]",
|
|
@@ -159,9 +160,9 @@ def model_dump(
|
|
|
159
160
|
|
|
160
161
|
|
|
161
162
|
def model_parse(model: type[_ModelT], data: Any) -> _ModelT:
|
|
162
|
-
if
|
|
163
|
-
return model.
|
|
164
|
-
return model.
|
|
163
|
+
if PYDANTIC_V1:
|
|
164
|
+
return model.parse_obj(data) # pyright: ignore[reportDeprecated]
|
|
165
|
+
return model.model_validate(data)
|
|
165
166
|
|
|
166
167
|
|
|
167
168
|
# generic models
|
|
@@ -170,17 +171,16 @@ if TYPE_CHECKING:
|
|
|
170
171
|
class GenericModel(pydantic.BaseModel): ...
|
|
171
172
|
|
|
172
173
|
else:
|
|
173
|
-
if
|
|
174
|
+
if PYDANTIC_V1:
|
|
175
|
+
import pydantic.generics
|
|
176
|
+
|
|
177
|
+
class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ...
|
|
178
|
+
else:
|
|
174
179
|
# there no longer needs to be a distinction in v2 but
|
|
175
180
|
# we still have to create our own subclass to avoid
|
|
176
181
|
# inconsistent MRO ordering errors
|
|
177
182
|
class GenericModel(pydantic.BaseModel): ...
|
|
178
183
|
|
|
179
|
-
else:
|
|
180
|
-
import pydantic.generics
|
|
181
|
-
|
|
182
|
-
class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ...
|
|
183
|
-
|
|
184
184
|
|
|
185
185
|
# cached properties
|
|
186
186
|
if TYPE_CHECKING:
|
isaacus/_files.py
CHANGED
|
@@ -69,12 +69,12 @@ def _transform_file(file: FileTypes) -> HttpxFileTypes:
|
|
|
69
69
|
return file
|
|
70
70
|
|
|
71
71
|
if is_tuple_t(file):
|
|
72
|
-
return (file[0],
|
|
72
|
+
return (file[0], read_file_content(file[1]), *file[2:])
|
|
73
73
|
|
|
74
74
|
raise TypeError(f"Expected file types input to be a FileContent type or to be a tuple")
|
|
75
75
|
|
|
76
76
|
|
|
77
|
-
def
|
|
77
|
+
def read_file_content(file: FileContent) -> HttpxFileContent:
|
|
78
78
|
if isinstance(file, os.PathLike):
|
|
79
79
|
return pathlib.Path(file).read_bytes()
|
|
80
80
|
return file
|
|
@@ -111,12 +111,12 @@ async def _async_transform_file(file: FileTypes) -> HttpxFileTypes:
|
|
|
111
111
|
return file
|
|
112
112
|
|
|
113
113
|
if is_tuple_t(file):
|
|
114
|
-
return (file[0], await
|
|
114
|
+
return (file[0], await async_read_file_content(file[1]), *file[2:])
|
|
115
115
|
|
|
116
116
|
raise TypeError(f"Expected file types input to be a FileContent type or to be a tuple")
|
|
117
117
|
|
|
118
118
|
|
|
119
|
-
async def
|
|
119
|
+
async def async_read_file_content(file: FileContent) -> HttpxFileContent:
|
|
120
120
|
if isinstance(file, os.PathLike):
|
|
121
121
|
return await anyio.Path(file).read_bytes()
|
|
122
122
|
|