dataleon 0.1.0a8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dataleon might be problematic. Click here for more details.

Files changed (60) hide show
  1. dataleon/__init__.py +102 -0
  2. dataleon/_base_client.py +1995 -0
  3. dataleon/_client.py +412 -0
  4. dataleon/_compat.py +219 -0
  5. dataleon/_constants.py +14 -0
  6. dataleon/_exceptions.py +108 -0
  7. dataleon/_files.py +123 -0
  8. dataleon/_models.py +840 -0
  9. dataleon/_qs.py +150 -0
  10. dataleon/_resource.py +43 -0
  11. dataleon/_response.py +830 -0
  12. dataleon/_streaming.py +331 -0
  13. dataleon/_types.py +260 -0
  14. dataleon/_utils/__init__.py +64 -0
  15. dataleon/_utils/_compat.py +45 -0
  16. dataleon/_utils/_datetime_parse.py +136 -0
  17. dataleon/_utils/_logs.py +25 -0
  18. dataleon/_utils/_proxy.py +65 -0
  19. dataleon/_utils/_reflection.py +42 -0
  20. dataleon/_utils/_resources_proxy.py +24 -0
  21. dataleon/_utils/_streams.py +12 -0
  22. dataleon/_utils/_sync.py +58 -0
  23. dataleon/_utils/_transform.py +457 -0
  24. dataleon/_utils/_typing.py +156 -0
  25. dataleon/_utils/_utils.py +421 -0
  26. dataleon/_version.py +4 -0
  27. dataleon/lib/.keep +4 -0
  28. dataleon/py.typed +0 -0
  29. dataleon/resources/__init__.py +33 -0
  30. dataleon/resources/companies/__init__.py +33 -0
  31. dataleon/resources/companies/companies.py +706 -0
  32. dataleon/resources/companies/documents.py +361 -0
  33. dataleon/resources/individuals/__init__.py +33 -0
  34. dataleon/resources/individuals/documents.py +361 -0
  35. dataleon/resources/individuals/individuals.py +711 -0
  36. dataleon/types/__init__.py +17 -0
  37. dataleon/types/companies/__init__.py +5 -0
  38. dataleon/types/companies/document_upload_params.py +56 -0
  39. dataleon/types/company_create_params.py +101 -0
  40. dataleon/types/company_list_params.py +37 -0
  41. dataleon/types/company_list_response.py +10 -0
  42. dataleon/types/company_registration.py +439 -0
  43. dataleon/types/company_retrieve_params.py +15 -0
  44. dataleon/types/company_update_params.py +101 -0
  45. dataleon/types/individual.py +336 -0
  46. dataleon/types/individual_create_params.py +78 -0
  47. dataleon/types/individual_list_params.py +37 -0
  48. dataleon/types/individual_list_response.py +10 -0
  49. dataleon/types/individual_retrieve_params.py +15 -0
  50. dataleon/types/individual_update_params.py +78 -0
  51. dataleon/types/individuals/__init__.py +7 -0
  52. dataleon/types/individuals/document_response.py +41 -0
  53. dataleon/types/individuals/document_upload_params.py +56 -0
  54. dataleon/types/individuals/generic_document.py +57 -0
  55. dataleon/types/shared/__init__.py +3 -0
  56. dataleon/types/shared/check.py +26 -0
  57. dataleon-0.1.0a8.dist-info/METADATA +448 -0
  58. dataleon-0.1.0a8.dist-info/RECORD +60 -0
  59. dataleon-0.1.0a8.dist-info/WHEEL +4 -0
  60. dataleon-0.1.0a8.dist-info/licenses/LICENSE +201 -0
dataleon/_client.py ADDED
@@ -0,0 +1,412 @@
1
+ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2
+
3
+ from __future__ import annotations
4
+
5
+ import os
6
+ from typing import Any, Mapping
7
+ from typing_extensions import Self, override
8
+
9
+ import httpx
10
+
11
+ from . import _exceptions
12
+ from ._qs import Querystring
13
+ from ._types import (
14
+ Omit,
15
+ Timeout,
16
+ NotGiven,
17
+ Transport,
18
+ ProxiesTypes,
19
+ RequestOptions,
20
+ not_given,
21
+ )
22
+ from ._utils import is_given, get_async_library
23
+ from ._version import __version__
24
+ from ._streaming import Stream as Stream, AsyncStream as AsyncStream
25
+ from ._exceptions import DataleonError, APIStatusError
26
+ from ._base_client import (
27
+ DEFAULT_MAX_RETRIES,
28
+ SyncAPIClient,
29
+ AsyncAPIClient,
30
+ )
31
+ from .resources.companies import companies
32
+ from .resources.individuals import individuals
33
+
34
+ __all__ = [
35
+ "Timeout",
36
+ "Transport",
37
+ "ProxiesTypes",
38
+ "RequestOptions",
39
+ "Dataleon",
40
+ "AsyncDataleon",
41
+ "Client",
42
+ "AsyncClient",
43
+ ]
44
+
45
+
46
+ class Dataleon(SyncAPIClient):
47
+ individuals: individuals.IndividualsResource
48
+ companies: companies.CompaniesResource
49
+ with_raw_response: DataleonWithRawResponse
50
+ with_streaming_response: DataleonWithStreamedResponse
51
+
52
+ # client options
53
+ api_key: str
54
+
55
+ def __init__(
56
+ self,
57
+ *,
58
+ api_key: str | None = None,
59
+ base_url: str | httpx.URL | None = None,
60
+ timeout: float | Timeout | None | NotGiven = not_given,
61
+ max_retries: int = DEFAULT_MAX_RETRIES,
62
+ default_headers: Mapping[str, str] | None = None,
63
+ default_query: Mapping[str, object] | None = None,
64
+ # Configure a custom httpx client.
65
+ # We provide a `DefaultHttpxClient` class that you can pass to retain the default values we use for `limits`, `timeout` & `follow_redirects`.
66
+ # See the [httpx documentation](https://www.python-httpx.org/api/#client) for more details.
67
+ http_client: httpx.Client | None = None,
68
+ # Enable or disable schema validation for data returned by the API.
69
+ # When enabled an error APIResponseValidationError is raised
70
+ # if the API responds with invalid data for the expected schema.
71
+ #
72
+ # This parameter may be removed or changed in the future.
73
+ # If you rely on this feature, please open a GitHub issue
74
+ # outlining your use-case to help us decide if it should be
75
+ # part of our public interface in the future.
76
+ _strict_response_validation: bool = False,
77
+ ) -> None:
78
+ """Construct a new synchronous Dataleon client instance.
79
+
80
+ This automatically infers the `api_key` argument from the `DATALEON_API_KEY` environment variable if it is not provided.
81
+ """
82
+ if api_key is None:
83
+ api_key = os.environ.get("DATALEON_API_KEY")
84
+ if api_key is None:
85
+ raise DataleonError(
86
+ "The api_key client option must be set either by passing api_key to the client or by setting the DATALEON_API_KEY environment variable"
87
+ )
88
+ self.api_key = api_key
89
+
90
+ if base_url is None:
91
+ base_url = os.environ.get("DATALEON_BASE_URL")
92
+ if base_url is None:
93
+ base_url = f"https://inference.eu-west-1.dataleon.ai"
94
+
95
+ super().__init__(
96
+ version=__version__,
97
+ base_url=base_url,
98
+ max_retries=max_retries,
99
+ timeout=timeout,
100
+ http_client=http_client,
101
+ custom_headers=default_headers,
102
+ custom_query=default_query,
103
+ _strict_response_validation=_strict_response_validation,
104
+ )
105
+
106
+ self.individuals = individuals.IndividualsResource(self)
107
+ self.companies = companies.CompaniesResource(self)
108
+ self.with_raw_response = DataleonWithRawResponse(self)
109
+ self.with_streaming_response = DataleonWithStreamedResponse(self)
110
+
111
+ @property
112
+ @override
113
+ def qs(self) -> Querystring:
114
+ return Querystring(array_format="comma")
115
+
116
+ @property
117
+ @override
118
+ def auth_headers(self) -> dict[str, str]:
119
+ api_key = self.api_key
120
+ return {"Api-Key": api_key}
121
+
122
+ @property
123
+ @override
124
+ def default_headers(self) -> dict[str, str | Omit]:
125
+ return {
126
+ **super().default_headers,
127
+ "X-Stainless-Async": "false",
128
+ **self._custom_headers,
129
+ }
130
+
131
+ def copy(
132
+ self,
133
+ *,
134
+ api_key: str | None = None,
135
+ base_url: str | httpx.URL | None = None,
136
+ timeout: float | Timeout | None | NotGiven = not_given,
137
+ http_client: httpx.Client | None = None,
138
+ max_retries: int | NotGiven = not_given,
139
+ default_headers: Mapping[str, str] | None = None,
140
+ set_default_headers: Mapping[str, str] | None = None,
141
+ default_query: Mapping[str, object] | None = None,
142
+ set_default_query: Mapping[str, object] | None = None,
143
+ _extra_kwargs: Mapping[str, Any] = {},
144
+ ) -> Self:
145
+ """
146
+ Create a new client instance re-using the same options given to the current client with optional overriding.
147
+ """
148
+ if default_headers is not None and set_default_headers is not None:
149
+ raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive")
150
+
151
+ if default_query is not None and set_default_query is not None:
152
+ raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive")
153
+
154
+ headers = self._custom_headers
155
+ if default_headers is not None:
156
+ headers = {**headers, **default_headers}
157
+ elif set_default_headers is not None:
158
+ headers = set_default_headers
159
+
160
+ params = self._custom_query
161
+ if default_query is not None:
162
+ params = {**params, **default_query}
163
+ elif set_default_query is not None:
164
+ params = set_default_query
165
+
166
+ http_client = http_client or self._client
167
+ return self.__class__(
168
+ api_key=api_key or self.api_key,
169
+ base_url=base_url or self.base_url,
170
+ timeout=self.timeout if isinstance(timeout, NotGiven) else timeout,
171
+ http_client=http_client,
172
+ max_retries=max_retries if is_given(max_retries) else self.max_retries,
173
+ default_headers=headers,
174
+ default_query=params,
175
+ **_extra_kwargs,
176
+ )
177
+
178
+ # Alias for `copy` for nicer inline usage, e.g.
179
+ # client.with_options(timeout=10).foo.create(...)
180
+ with_options = copy
181
+
182
+ @override
183
+ def _make_status_error(
184
+ self,
185
+ err_msg: str,
186
+ *,
187
+ body: object,
188
+ response: httpx.Response,
189
+ ) -> APIStatusError:
190
+ if response.status_code == 400:
191
+ return _exceptions.BadRequestError(err_msg, response=response, body=body)
192
+
193
+ if response.status_code == 401:
194
+ return _exceptions.AuthenticationError(err_msg, response=response, body=body)
195
+
196
+ if response.status_code == 403:
197
+ return _exceptions.PermissionDeniedError(err_msg, response=response, body=body)
198
+
199
+ if response.status_code == 404:
200
+ return _exceptions.NotFoundError(err_msg, response=response, body=body)
201
+
202
+ if response.status_code == 409:
203
+ return _exceptions.ConflictError(err_msg, response=response, body=body)
204
+
205
+ if response.status_code == 422:
206
+ return _exceptions.UnprocessableEntityError(err_msg, response=response, body=body)
207
+
208
+ if response.status_code == 429:
209
+ return _exceptions.RateLimitError(err_msg, response=response, body=body)
210
+
211
+ if response.status_code >= 500:
212
+ return _exceptions.InternalServerError(err_msg, response=response, body=body)
213
+ return APIStatusError(err_msg, response=response, body=body)
214
+
215
+
216
+ class AsyncDataleon(AsyncAPIClient):
217
+ individuals: individuals.AsyncIndividualsResource
218
+ companies: companies.AsyncCompaniesResource
219
+ with_raw_response: AsyncDataleonWithRawResponse
220
+ with_streaming_response: AsyncDataleonWithStreamedResponse
221
+
222
+ # client options
223
+ api_key: str
224
+
225
+ def __init__(
226
+ self,
227
+ *,
228
+ api_key: str | None = None,
229
+ base_url: str | httpx.URL | None = None,
230
+ timeout: float | Timeout | None | NotGiven = not_given,
231
+ max_retries: int = DEFAULT_MAX_RETRIES,
232
+ default_headers: Mapping[str, str] | None = None,
233
+ default_query: Mapping[str, object] | None = None,
234
+ # Configure a custom httpx client.
235
+ # We provide a `DefaultAsyncHttpxClient` class that you can pass to retain the default values we use for `limits`, `timeout` & `follow_redirects`.
236
+ # See the [httpx documentation](https://www.python-httpx.org/api/#asyncclient) for more details.
237
+ http_client: httpx.AsyncClient | None = None,
238
+ # Enable or disable schema validation for data returned by the API.
239
+ # When enabled an error APIResponseValidationError is raised
240
+ # if the API responds with invalid data for the expected schema.
241
+ #
242
+ # This parameter may be removed or changed in the future.
243
+ # If you rely on this feature, please open a GitHub issue
244
+ # outlining your use-case to help us decide if it should be
245
+ # part of our public interface in the future.
246
+ _strict_response_validation: bool = False,
247
+ ) -> None:
248
+ """Construct a new async AsyncDataleon client instance.
249
+
250
+ This automatically infers the `api_key` argument from the `DATALEON_API_KEY` environment variable if it is not provided.
251
+ """
252
+ if api_key is None:
253
+ api_key = os.environ.get("DATALEON_API_KEY")
254
+ if api_key is None:
255
+ raise DataleonError(
256
+ "The api_key client option must be set either by passing api_key to the client or by setting the DATALEON_API_KEY environment variable"
257
+ )
258
+ self.api_key = api_key
259
+
260
+ if base_url is None:
261
+ base_url = os.environ.get("DATALEON_BASE_URL")
262
+ if base_url is None:
263
+ base_url = f"https://inference.eu-west-1.dataleon.ai"
264
+
265
+ super().__init__(
266
+ version=__version__,
267
+ base_url=base_url,
268
+ max_retries=max_retries,
269
+ timeout=timeout,
270
+ http_client=http_client,
271
+ custom_headers=default_headers,
272
+ custom_query=default_query,
273
+ _strict_response_validation=_strict_response_validation,
274
+ )
275
+
276
+ self.individuals = individuals.AsyncIndividualsResource(self)
277
+ self.companies = companies.AsyncCompaniesResource(self)
278
+ self.with_raw_response = AsyncDataleonWithRawResponse(self)
279
+ self.with_streaming_response = AsyncDataleonWithStreamedResponse(self)
280
+
281
+ @property
282
+ @override
283
+ def qs(self) -> Querystring:
284
+ return Querystring(array_format="comma")
285
+
286
+ @property
287
+ @override
288
+ def auth_headers(self) -> dict[str, str]:
289
+ api_key = self.api_key
290
+ return {"Api-Key": api_key}
291
+
292
+ @property
293
+ @override
294
+ def default_headers(self) -> dict[str, str | Omit]:
295
+ return {
296
+ **super().default_headers,
297
+ "X-Stainless-Async": f"async:{get_async_library()}",
298
+ **self._custom_headers,
299
+ }
300
+
301
+ def copy(
302
+ self,
303
+ *,
304
+ api_key: str | None = None,
305
+ base_url: str | httpx.URL | None = None,
306
+ timeout: float | Timeout | None | NotGiven = not_given,
307
+ http_client: httpx.AsyncClient | None = None,
308
+ max_retries: int | NotGiven = not_given,
309
+ default_headers: Mapping[str, str] | None = None,
310
+ set_default_headers: Mapping[str, str] | None = None,
311
+ default_query: Mapping[str, object] | None = None,
312
+ set_default_query: Mapping[str, object] | None = None,
313
+ _extra_kwargs: Mapping[str, Any] = {},
314
+ ) -> Self:
315
+ """
316
+ Create a new client instance re-using the same options given to the current client with optional overriding.
317
+ """
318
+ if default_headers is not None and set_default_headers is not None:
319
+ raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive")
320
+
321
+ if default_query is not None and set_default_query is not None:
322
+ raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive")
323
+
324
+ headers = self._custom_headers
325
+ if default_headers is not None:
326
+ headers = {**headers, **default_headers}
327
+ elif set_default_headers is not None:
328
+ headers = set_default_headers
329
+
330
+ params = self._custom_query
331
+ if default_query is not None:
332
+ params = {**params, **default_query}
333
+ elif set_default_query is not None:
334
+ params = set_default_query
335
+
336
+ http_client = http_client or self._client
337
+ return self.__class__(
338
+ api_key=api_key or self.api_key,
339
+ base_url=base_url or self.base_url,
340
+ timeout=self.timeout if isinstance(timeout, NotGiven) else timeout,
341
+ http_client=http_client,
342
+ max_retries=max_retries if is_given(max_retries) else self.max_retries,
343
+ default_headers=headers,
344
+ default_query=params,
345
+ **_extra_kwargs,
346
+ )
347
+
348
+ # Alias for `copy` for nicer inline usage, e.g.
349
+ # client.with_options(timeout=10).foo.create(...)
350
+ with_options = copy
351
+
352
+ @override
353
+ def _make_status_error(
354
+ self,
355
+ err_msg: str,
356
+ *,
357
+ body: object,
358
+ response: httpx.Response,
359
+ ) -> APIStatusError:
360
+ if response.status_code == 400:
361
+ return _exceptions.BadRequestError(err_msg, response=response, body=body)
362
+
363
+ if response.status_code == 401:
364
+ return _exceptions.AuthenticationError(err_msg, response=response, body=body)
365
+
366
+ if response.status_code == 403:
367
+ return _exceptions.PermissionDeniedError(err_msg, response=response, body=body)
368
+
369
+ if response.status_code == 404:
370
+ return _exceptions.NotFoundError(err_msg, response=response, body=body)
371
+
372
+ if response.status_code == 409:
373
+ return _exceptions.ConflictError(err_msg, response=response, body=body)
374
+
375
+ if response.status_code == 422:
376
+ return _exceptions.UnprocessableEntityError(err_msg, response=response, body=body)
377
+
378
+ if response.status_code == 429:
379
+ return _exceptions.RateLimitError(err_msg, response=response, body=body)
380
+
381
+ if response.status_code >= 500:
382
+ return _exceptions.InternalServerError(err_msg, response=response, body=body)
383
+ return APIStatusError(err_msg, response=response, body=body)
384
+
385
+
386
+ class DataleonWithRawResponse:
387
+ def __init__(self, client: Dataleon) -> None:
388
+ self.individuals = individuals.IndividualsResourceWithRawResponse(client.individuals)
389
+ self.companies = companies.CompaniesResourceWithRawResponse(client.companies)
390
+
391
+
392
+ class AsyncDataleonWithRawResponse:
393
+ def __init__(self, client: AsyncDataleon) -> None:
394
+ self.individuals = individuals.AsyncIndividualsResourceWithRawResponse(client.individuals)
395
+ self.companies = companies.AsyncCompaniesResourceWithRawResponse(client.companies)
396
+
397
+
398
+ class DataleonWithStreamedResponse:
399
+ def __init__(self, client: Dataleon) -> None:
400
+ self.individuals = individuals.IndividualsResourceWithStreamingResponse(client.individuals)
401
+ self.companies = companies.CompaniesResourceWithStreamingResponse(client.companies)
402
+
403
+
404
+ class AsyncDataleonWithStreamedResponse:
405
+ def __init__(self, client: AsyncDataleon) -> None:
406
+ self.individuals = individuals.AsyncIndividualsResourceWithStreamingResponse(client.individuals)
407
+ self.companies = companies.AsyncCompaniesResourceWithStreamingResponse(client.companies)
408
+
409
+
410
+ Client = Dataleon
411
+
412
+ AsyncClient = AsyncDataleon
dataleon/_compat.py ADDED
@@ -0,0 +1,219 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING, Any, Union, Generic, TypeVar, Callable, cast, overload
4
+ from datetime import date, datetime
5
+ from typing_extensions import Self, Literal
6
+
7
+ import pydantic
8
+ from pydantic.fields import FieldInfo
9
+
10
+ from ._types import IncEx, StrBytesIntFloat
11
+
12
+ _T = TypeVar("_T")
13
+ _ModelT = TypeVar("_ModelT", bound=pydantic.BaseModel)
14
+
15
+ # --------------- Pydantic v2, v3 compatibility ---------------
16
+
17
+ # Pyright incorrectly reports some of our functions as overriding a method when they don't
18
+ # pyright: reportIncompatibleMethodOverride=false
19
+
20
+ PYDANTIC_V1 = pydantic.VERSION.startswith("1.")
21
+
22
+ if TYPE_CHECKING:
23
+
24
+ def parse_date(value: date | StrBytesIntFloat) -> date: # noqa: ARG001
25
+ ...
26
+
27
+ def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: # noqa: ARG001
28
+ ...
29
+
30
+ def get_args(t: type[Any]) -> tuple[Any, ...]: # noqa: ARG001
31
+ ...
32
+
33
+ def is_union(tp: type[Any] | None) -> bool: # noqa: ARG001
34
+ ...
35
+
36
+ def get_origin(t: type[Any]) -> type[Any] | None: # noqa: ARG001
37
+ ...
38
+
39
+ def is_literal_type(type_: type[Any]) -> bool: # noqa: ARG001
40
+ ...
41
+
42
+ def is_typeddict(type_: type[Any]) -> bool: # noqa: ARG001
43
+ ...
44
+
45
+ else:
46
+ # v1 re-exports
47
+ if PYDANTIC_V1:
48
+ from pydantic.typing import (
49
+ get_args as get_args,
50
+ is_union as is_union,
51
+ get_origin as get_origin,
52
+ is_typeddict as is_typeddict,
53
+ is_literal_type as is_literal_type,
54
+ )
55
+ from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
56
+ else:
57
+ from ._utils import (
58
+ get_args as get_args,
59
+ is_union as is_union,
60
+ get_origin as get_origin,
61
+ parse_date as parse_date,
62
+ is_typeddict as is_typeddict,
63
+ parse_datetime as parse_datetime,
64
+ is_literal_type as is_literal_type,
65
+ )
66
+
67
+
68
+ # refactored config
69
+ if TYPE_CHECKING:
70
+ from pydantic import ConfigDict as ConfigDict
71
+ else:
72
+ if PYDANTIC_V1:
73
+ # TODO: provide an error message here?
74
+ ConfigDict = None
75
+ else:
76
+ from pydantic import ConfigDict as ConfigDict
77
+
78
+
79
+ # renamed methods / properties
80
+ def parse_obj(model: type[_ModelT], value: object) -> _ModelT:
81
+ if PYDANTIC_V1:
82
+ return cast(_ModelT, model.parse_obj(value)) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
83
+ else:
84
+ return model.model_validate(value)
85
+
86
+
87
+ def field_is_required(field: FieldInfo) -> bool:
88
+ if PYDANTIC_V1:
89
+ return field.required # type: ignore
90
+ return field.is_required()
91
+
92
+
93
+ def field_get_default(field: FieldInfo) -> Any:
94
+ value = field.get_default()
95
+ if PYDANTIC_V1:
96
+ return value
97
+ from pydantic_core import PydanticUndefined
98
+
99
+ if value == PydanticUndefined:
100
+ return None
101
+ return value
102
+
103
+
104
+ def field_outer_type(field: FieldInfo) -> Any:
105
+ if PYDANTIC_V1:
106
+ return field.outer_type_ # type: ignore
107
+ return field.annotation
108
+
109
+
110
+ def get_model_config(model: type[pydantic.BaseModel]) -> Any:
111
+ if PYDANTIC_V1:
112
+ return model.__config__ # type: ignore
113
+ return model.model_config
114
+
115
+
116
+ def get_model_fields(model: type[pydantic.BaseModel]) -> dict[str, FieldInfo]:
117
+ if PYDANTIC_V1:
118
+ return model.__fields__ # type: ignore
119
+ return model.model_fields
120
+
121
+
122
+ def model_copy(model: _ModelT, *, deep: bool = False) -> _ModelT:
123
+ if PYDANTIC_V1:
124
+ return model.copy(deep=deep) # type: ignore
125
+ return model.model_copy(deep=deep)
126
+
127
+
128
+ def model_json(model: pydantic.BaseModel, *, indent: int | None = None) -> str:
129
+ if PYDANTIC_V1:
130
+ return model.json(indent=indent) # type: ignore
131
+ return model.model_dump_json(indent=indent)
132
+
133
+
134
+ def model_dump(
135
+ model: pydantic.BaseModel,
136
+ *,
137
+ exclude: IncEx | None = None,
138
+ exclude_unset: bool = False,
139
+ exclude_defaults: bool = False,
140
+ warnings: bool = True,
141
+ mode: Literal["json", "python"] = "python",
142
+ ) -> dict[str, Any]:
143
+ if (not PYDANTIC_V1) or hasattr(model, "model_dump"):
144
+ return model.model_dump(
145
+ mode=mode,
146
+ exclude=exclude,
147
+ exclude_unset=exclude_unset,
148
+ exclude_defaults=exclude_defaults,
149
+ # warnings are not supported in Pydantic v1
150
+ warnings=True if PYDANTIC_V1 else warnings,
151
+ )
152
+ return cast(
153
+ "dict[str, Any]",
154
+ model.dict( # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
155
+ exclude=exclude,
156
+ exclude_unset=exclude_unset,
157
+ exclude_defaults=exclude_defaults,
158
+ ),
159
+ )
160
+
161
+
162
+ def model_parse(model: type[_ModelT], data: Any) -> _ModelT:
163
+ if PYDANTIC_V1:
164
+ return model.parse_obj(data) # pyright: ignore[reportDeprecated]
165
+ return model.model_validate(data)
166
+
167
+
168
+ # generic models
169
+ if TYPE_CHECKING:
170
+
171
+ class GenericModel(pydantic.BaseModel): ...
172
+
173
+ else:
174
+ if PYDANTIC_V1:
175
+ import pydantic.generics
176
+
177
+ class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ...
178
+ else:
179
+ # there no longer needs to be a distinction in v2 but
180
+ # we still have to create our own subclass to avoid
181
+ # inconsistent MRO ordering errors
182
+ class GenericModel(pydantic.BaseModel): ...
183
+
184
+
185
+ # cached properties
186
+ if TYPE_CHECKING:
187
+ cached_property = property
188
+
189
+ # we define a separate type (copied from typeshed)
190
+ # that represents that `cached_property` is `set`able
191
+ # at runtime, which differs from `@property`.
192
+ #
193
+ # this is a separate type as editors likely special case
194
+ # `@property` and we don't want to cause issues just to have
195
+ # more helpful internal types.
196
+
197
+ class typed_cached_property(Generic[_T]):
198
+ func: Callable[[Any], _T]
199
+ attrname: str | None
200
+
201
+ def __init__(self, func: Callable[[Any], _T]) -> None: ...
202
+
203
+ @overload
204
+ def __get__(self, instance: None, owner: type[Any] | None = None) -> Self: ...
205
+
206
+ @overload
207
+ def __get__(self, instance: object, owner: type[Any] | None = None) -> _T: ...
208
+
209
+ def __get__(self, instance: object, owner: type[Any] | None = None) -> _T | Self:
210
+ raise NotImplementedError()
211
+
212
+ def __set_name__(self, owner: type[Any], name: str) -> None: ...
213
+
214
+ # __set__ is not defined at runtime, but @cached_property is designed to be settable
215
+ def __set__(self, instance: object, value: _T) -> None: ...
216
+ else:
217
+ from functools import cached_property as cached_property
218
+
219
+ typed_cached_property = cached_property
dataleon/_constants.py ADDED
@@ -0,0 +1,14 @@
1
+ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2
+
3
+ import httpx
4
+
5
+ RAW_RESPONSE_HEADER = "X-Stainless-Raw-Response"
6
+ OVERRIDE_CAST_TO_HEADER = "____stainless_override_cast_to"
7
+
8
+ # default timeout is 1 minute
9
+ DEFAULT_TIMEOUT = httpx.Timeout(timeout=60, connect=5.0)
10
+ DEFAULT_MAX_RETRIES = 2
11
+ DEFAULT_CONNECTION_LIMITS = httpx.Limits(max_connections=100, max_keepalive_connections=20)
12
+
13
+ INITIAL_RETRY_DELAY = 0.5
14
+ MAX_RETRY_DELAY = 8.0