dataleon 0.1.0a2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. dataleon/__init__.py +100 -0
  2. dataleon/_base_client.py +1995 -0
  3. dataleon/_client.py +412 -0
  4. dataleon/_compat.py +219 -0
  5. dataleon/_constants.py +14 -0
  6. dataleon/_exceptions.py +108 -0
  7. dataleon/_files.py +123 -0
  8. dataleon/_models.py +829 -0
  9. dataleon/_qs.py +150 -0
  10. dataleon/_resource.py +43 -0
  11. dataleon/_response.py +830 -0
  12. dataleon/_streaming.py +333 -0
  13. dataleon/_types.py +219 -0
  14. dataleon/_utils/__init__.py +57 -0
  15. dataleon/_utils/_logs.py +25 -0
  16. dataleon/_utils/_proxy.py +65 -0
  17. dataleon/_utils/_reflection.py +42 -0
  18. dataleon/_utils/_resources_proxy.py +24 -0
  19. dataleon/_utils/_streams.py +12 -0
  20. dataleon/_utils/_sync.py +86 -0
  21. dataleon/_utils/_transform.py +447 -0
  22. dataleon/_utils/_typing.py +151 -0
  23. dataleon/_utils/_utils.py +422 -0
  24. dataleon/_version.py +4 -0
  25. dataleon/lib/.keep +4 -0
  26. dataleon/py.typed +0 -0
  27. dataleon/resources/__init__.py +33 -0
  28. dataleon/resources/companies/__init__.py +33 -0
  29. dataleon/resources/companies/companies.py +706 -0
  30. dataleon/resources/companies/documents.py +361 -0
  31. dataleon/resources/individuals/__init__.py +33 -0
  32. dataleon/resources/individuals/documents.py +361 -0
  33. dataleon/resources/individuals/individuals.py +711 -0
  34. dataleon/types/__init__.py +17 -0
  35. dataleon/types/companies/__init__.py +5 -0
  36. dataleon/types/companies/document_upload_params.py +56 -0
  37. dataleon/types/company_create_params.py +94 -0
  38. dataleon/types/company_list_params.py +37 -0
  39. dataleon/types/company_list_response.py +10 -0
  40. dataleon/types/company_registration.py +431 -0
  41. dataleon/types/company_retrieve_params.py +15 -0
  42. dataleon/types/company_update_params.py +94 -0
  43. dataleon/types/individual.py +325 -0
  44. dataleon/types/individual_create_params.py +68 -0
  45. dataleon/types/individual_list_params.py +37 -0
  46. dataleon/types/individual_list_response.py +10 -0
  47. dataleon/types/individual_retrieve_params.py +15 -0
  48. dataleon/types/individual_update_params.py +68 -0
  49. dataleon/types/individuals/__init__.py +7 -0
  50. dataleon/types/individuals/document_response.py +41 -0
  51. dataleon/types/individuals/document_upload_params.py +56 -0
  52. dataleon/types/individuals/generic_document.py +57 -0
  53. dataleon/types/shared/__init__.py +3 -0
  54. dataleon/types/shared/check.py +26 -0
  55. dataleon-0.1.0a2.dist-info/METADATA +449 -0
  56. dataleon-0.1.0a2.dist-info/RECORD +58 -0
  57. dataleon-0.1.0a2.dist-info/WHEEL +4 -0
  58. dataleon-0.1.0a2.dist-info/licenses/LICENSE +201 -0
dataleon/_client.py ADDED
@@ -0,0 +1,412 @@
1
+ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2
+
3
+ from __future__ import annotations
4
+
5
+ import os
6
+ from typing import Any, Union, Mapping
7
+ from typing_extensions import Self, override
8
+
9
+ import httpx
10
+
11
+ from . import _exceptions
12
+ from ._qs import Querystring
13
+ from ._types import (
14
+ NOT_GIVEN,
15
+ Omit,
16
+ Timeout,
17
+ NotGiven,
18
+ Transport,
19
+ ProxiesTypes,
20
+ RequestOptions,
21
+ )
22
+ from ._utils import is_given, get_async_library
23
+ from ._version import __version__
24
+ from ._streaming import Stream as Stream, AsyncStream as AsyncStream
25
+ from ._exceptions import DataleonError, APIStatusError
26
+ from ._base_client import (
27
+ DEFAULT_MAX_RETRIES,
28
+ SyncAPIClient,
29
+ AsyncAPIClient,
30
+ )
31
+ from .resources.companies import companies
32
+ from .resources.individuals import individuals
33
+
34
+ __all__ = [
35
+ "Timeout",
36
+ "Transport",
37
+ "ProxiesTypes",
38
+ "RequestOptions",
39
+ "Dataleon",
40
+ "AsyncDataleon",
41
+ "Client",
42
+ "AsyncClient",
43
+ ]
44
+
45
+
46
+ class Dataleon(SyncAPIClient):
47
+ individuals: individuals.IndividualsResource
48
+ companies: companies.CompaniesResource
49
+ with_raw_response: DataleonWithRawResponse
50
+ with_streaming_response: DataleonWithStreamedResponse
51
+
52
+ # client options
53
+ api_key: str
54
+
55
+ def __init__(
56
+ self,
57
+ *,
58
+ api_key: str | None = None,
59
+ base_url: str | httpx.URL | None = None,
60
+ timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN,
61
+ max_retries: int = DEFAULT_MAX_RETRIES,
62
+ default_headers: Mapping[str, str] | None = None,
63
+ default_query: Mapping[str, object] | None = None,
64
+ # Configure a custom httpx client.
65
+ # We provide a `DefaultHttpxClient` class that you can pass to retain the default values we use for `limits`, `timeout` & `follow_redirects`.
66
+ # See the [httpx documentation](https://www.python-httpx.org/api/#client) for more details.
67
+ http_client: httpx.Client | None = None,
68
+ # Enable or disable schema validation for data returned by the API.
69
+ # When enabled an error APIResponseValidationError is raised
70
+ # if the API responds with invalid data for the expected schema.
71
+ #
72
+ # This parameter may be removed or changed in the future.
73
+ # If you rely on this feature, please open a GitHub issue
74
+ # outlining your use-case to help us decide if it should be
75
+ # part of our public interface in the future.
76
+ _strict_response_validation: bool = False,
77
+ ) -> None:
78
+ """Construct a new synchronous Dataleon client instance.
79
+
80
+ This automatically infers the `api_key` argument from the `DATALEON_API_KEY` environment variable if it is not provided.
81
+ """
82
+ if api_key is None:
83
+ api_key = os.environ.get("DATALEON_API_KEY")
84
+ if api_key is None:
85
+ raise DataleonError(
86
+ "The api_key client option must be set either by passing api_key to the client or by setting the DATALEON_API_KEY environment variable"
87
+ )
88
+ self.api_key = api_key
89
+
90
+ if base_url is None:
91
+ base_url = os.environ.get("DATALEON_BASE_URL")
92
+ if base_url is None:
93
+ base_url = f"https://inference.eu-west-1.dataleon.ai"
94
+
95
+ super().__init__(
96
+ version=__version__,
97
+ base_url=base_url,
98
+ max_retries=max_retries,
99
+ timeout=timeout,
100
+ http_client=http_client,
101
+ custom_headers=default_headers,
102
+ custom_query=default_query,
103
+ _strict_response_validation=_strict_response_validation,
104
+ )
105
+
106
+ self.individuals = individuals.IndividualsResource(self)
107
+ self.companies = companies.CompaniesResource(self)
108
+ self.with_raw_response = DataleonWithRawResponse(self)
109
+ self.with_streaming_response = DataleonWithStreamedResponse(self)
110
+
111
+ @property
112
+ @override
113
+ def qs(self) -> Querystring:
114
+ return Querystring(array_format="comma")
115
+
116
+ @property
117
+ @override
118
+ def auth_headers(self) -> dict[str, str]:
119
+ api_key = self.api_key
120
+ return {"Api-Key": api_key}
121
+
122
+ @property
123
+ @override
124
+ def default_headers(self) -> dict[str, str | Omit]:
125
+ return {
126
+ **super().default_headers,
127
+ "X-Stainless-Async": "false",
128
+ **self._custom_headers,
129
+ }
130
+
131
+ def copy(
132
+ self,
133
+ *,
134
+ api_key: str | None = None,
135
+ base_url: str | httpx.URL | None = None,
136
+ timeout: float | Timeout | None | NotGiven = NOT_GIVEN,
137
+ http_client: httpx.Client | None = None,
138
+ max_retries: int | NotGiven = NOT_GIVEN,
139
+ default_headers: Mapping[str, str] | None = None,
140
+ set_default_headers: Mapping[str, str] | None = None,
141
+ default_query: Mapping[str, object] | None = None,
142
+ set_default_query: Mapping[str, object] | None = None,
143
+ _extra_kwargs: Mapping[str, Any] = {},
144
+ ) -> Self:
145
+ """
146
+ Create a new client instance re-using the same options given to the current client with optional overriding.
147
+ """
148
+ if default_headers is not None and set_default_headers is not None:
149
+ raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive")
150
+
151
+ if default_query is not None and set_default_query is not None:
152
+ raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive")
153
+
154
+ headers = self._custom_headers
155
+ if default_headers is not None:
156
+ headers = {**headers, **default_headers}
157
+ elif set_default_headers is not None:
158
+ headers = set_default_headers
159
+
160
+ params = self._custom_query
161
+ if default_query is not None:
162
+ params = {**params, **default_query}
163
+ elif set_default_query is not None:
164
+ params = set_default_query
165
+
166
+ http_client = http_client or self._client
167
+ return self.__class__(
168
+ api_key=api_key or self.api_key,
169
+ base_url=base_url or self.base_url,
170
+ timeout=self.timeout if isinstance(timeout, NotGiven) else timeout,
171
+ http_client=http_client,
172
+ max_retries=max_retries if is_given(max_retries) else self.max_retries,
173
+ default_headers=headers,
174
+ default_query=params,
175
+ **_extra_kwargs,
176
+ )
177
+
178
+ # Alias for `copy` for nicer inline usage, e.g.
179
+ # client.with_options(timeout=10).foo.create(...)
180
+ with_options = copy
181
+
182
+ @override
183
+ def _make_status_error(
184
+ self,
185
+ err_msg: str,
186
+ *,
187
+ body: object,
188
+ response: httpx.Response,
189
+ ) -> APIStatusError:
190
+ if response.status_code == 400:
191
+ return _exceptions.BadRequestError(err_msg, response=response, body=body)
192
+
193
+ if response.status_code == 401:
194
+ return _exceptions.AuthenticationError(err_msg, response=response, body=body)
195
+
196
+ if response.status_code == 403:
197
+ return _exceptions.PermissionDeniedError(err_msg, response=response, body=body)
198
+
199
+ if response.status_code == 404:
200
+ return _exceptions.NotFoundError(err_msg, response=response, body=body)
201
+
202
+ if response.status_code == 409:
203
+ return _exceptions.ConflictError(err_msg, response=response, body=body)
204
+
205
+ if response.status_code == 422:
206
+ return _exceptions.UnprocessableEntityError(err_msg, response=response, body=body)
207
+
208
+ if response.status_code == 429:
209
+ return _exceptions.RateLimitError(err_msg, response=response, body=body)
210
+
211
+ if response.status_code >= 500:
212
+ return _exceptions.InternalServerError(err_msg, response=response, body=body)
213
+ return APIStatusError(err_msg, response=response, body=body)
214
+
215
+
216
+ class AsyncDataleon(AsyncAPIClient):
217
+ individuals: individuals.AsyncIndividualsResource
218
+ companies: companies.AsyncCompaniesResource
219
+ with_raw_response: AsyncDataleonWithRawResponse
220
+ with_streaming_response: AsyncDataleonWithStreamedResponse
221
+
222
+ # client options
223
+ api_key: str
224
+
225
+ def __init__(
226
+ self,
227
+ *,
228
+ api_key: str | None = None,
229
+ base_url: str | httpx.URL | None = None,
230
+ timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN,
231
+ max_retries: int = DEFAULT_MAX_RETRIES,
232
+ default_headers: Mapping[str, str] | None = None,
233
+ default_query: Mapping[str, object] | None = None,
234
+ # Configure a custom httpx client.
235
+ # We provide a `DefaultAsyncHttpxClient` class that you can pass to retain the default values we use for `limits`, `timeout` & `follow_redirects`.
236
+ # See the [httpx documentation](https://www.python-httpx.org/api/#asyncclient) for more details.
237
+ http_client: httpx.AsyncClient | None = None,
238
+ # Enable or disable schema validation for data returned by the API.
239
+ # When enabled an error APIResponseValidationError is raised
240
+ # if the API responds with invalid data for the expected schema.
241
+ #
242
+ # This parameter may be removed or changed in the future.
243
+ # If you rely on this feature, please open a GitHub issue
244
+ # outlining your use-case to help us decide if it should be
245
+ # part of our public interface in the future.
246
+ _strict_response_validation: bool = False,
247
+ ) -> None:
248
+ """Construct a new async AsyncDataleon client instance.
249
+
250
+ This automatically infers the `api_key` argument from the `DATALEON_API_KEY` environment variable if it is not provided.
251
+ """
252
+ if api_key is None:
253
+ api_key = os.environ.get("DATALEON_API_KEY")
254
+ if api_key is None:
255
+ raise DataleonError(
256
+ "The api_key client option must be set either by passing api_key to the client or by setting the DATALEON_API_KEY environment variable"
257
+ )
258
+ self.api_key = api_key
259
+
260
+ if base_url is None:
261
+ base_url = os.environ.get("DATALEON_BASE_URL")
262
+ if base_url is None:
263
+ base_url = f"https://inference.eu-west-1.dataleon.ai"
264
+
265
+ super().__init__(
266
+ version=__version__,
267
+ base_url=base_url,
268
+ max_retries=max_retries,
269
+ timeout=timeout,
270
+ http_client=http_client,
271
+ custom_headers=default_headers,
272
+ custom_query=default_query,
273
+ _strict_response_validation=_strict_response_validation,
274
+ )
275
+
276
+ self.individuals = individuals.AsyncIndividualsResource(self)
277
+ self.companies = companies.AsyncCompaniesResource(self)
278
+ self.with_raw_response = AsyncDataleonWithRawResponse(self)
279
+ self.with_streaming_response = AsyncDataleonWithStreamedResponse(self)
280
+
281
+ @property
282
+ @override
283
+ def qs(self) -> Querystring:
284
+ return Querystring(array_format="comma")
285
+
286
+ @property
287
+ @override
288
+ def auth_headers(self) -> dict[str, str]:
289
+ api_key = self.api_key
290
+ return {"Api-Key": api_key}
291
+
292
+ @property
293
+ @override
294
+ def default_headers(self) -> dict[str, str | Omit]:
295
+ return {
296
+ **super().default_headers,
297
+ "X-Stainless-Async": f"async:{get_async_library()}",
298
+ **self._custom_headers,
299
+ }
300
+
301
+ def copy(
302
+ self,
303
+ *,
304
+ api_key: str | None = None,
305
+ base_url: str | httpx.URL | None = None,
306
+ timeout: float | Timeout | None | NotGiven = NOT_GIVEN,
307
+ http_client: httpx.AsyncClient | None = None,
308
+ max_retries: int | NotGiven = NOT_GIVEN,
309
+ default_headers: Mapping[str, str] | None = None,
310
+ set_default_headers: Mapping[str, str] | None = None,
311
+ default_query: Mapping[str, object] | None = None,
312
+ set_default_query: Mapping[str, object] | None = None,
313
+ _extra_kwargs: Mapping[str, Any] = {},
314
+ ) -> Self:
315
+ """
316
+ Create a new client instance re-using the same options given to the current client with optional overriding.
317
+ """
318
+ if default_headers is not None and set_default_headers is not None:
319
+ raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive")
320
+
321
+ if default_query is not None and set_default_query is not None:
322
+ raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive")
323
+
324
+ headers = self._custom_headers
325
+ if default_headers is not None:
326
+ headers = {**headers, **default_headers}
327
+ elif set_default_headers is not None:
328
+ headers = set_default_headers
329
+
330
+ params = self._custom_query
331
+ if default_query is not None:
332
+ params = {**params, **default_query}
333
+ elif set_default_query is not None:
334
+ params = set_default_query
335
+
336
+ http_client = http_client or self._client
337
+ return self.__class__(
338
+ api_key=api_key or self.api_key,
339
+ base_url=base_url or self.base_url,
340
+ timeout=self.timeout if isinstance(timeout, NotGiven) else timeout,
341
+ http_client=http_client,
342
+ max_retries=max_retries if is_given(max_retries) else self.max_retries,
343
+ default_headers=headers,
344
+ default_query=params,
345
+ **_extra_kwargs,
346
+ )
347
+
348
+ # Alias for `copy` for nicer inline usage, e.g.
349
+ # client.with_options(timeout=10).foo.create(...)
350
+ with_options = copy
351
+
352
+ @override
353
+ def _make_status_error(
354
+ self,
355
+ err_msg: str,
356
+ *,
357
+ body: object,
358
+ response: httpx.Response,
359
+ ) -> APIStatusError:
360
+ if response.status_code == 400:
361
+ return _exceptions.BadRequestError(err_msg, response=response, body=body)
362
+
363
+ if response.status_code == 401:
364
+ return _exceptions.AuthenticationError(err_msg, response=response, body=body)
365
+
366
+ if response.status_code == 403:
367
+ return _exceptions.PermissionDeniedError(err_msg, response=response, body=body)
368
+
369
+ if response.status_code == 404:
370
+ return _exceptions.NotFoundError(err_msg, response=response, body=body)
371
+
372
+ if response.status_code == 409:
373
+ return _exceptions.ConflictError(err_msg, response=response, body=body)
374
+
375
+ if response.status_code == 422:
376
+ return _exceptions.UnprocessableEntityError(err_msg, response=response, body=body)
377
+
378
+ if response.status_code == 429:
379
+ return _exceptions.RateLimitError(err_msg, response=response, body=body)
380
+
381
+ if response.status_code >= 500:
382
+ return _exceptions.InternalServerError(err_msg, response=response, body=body)
383
+ return APIStatusError(err_msg, response=response, body=body)
384
+
385
+
386
+ class DataleonWithRawResponse:
387
+ def __init__(self, client: Dataleon) -> None:
388
+ self.individuals = individuals.IndividualsResourceWithRawResponse(client.individuals)
389
+ self.companies = companies.CompaniesResourceWithRawResponse(client.companies)
390
+
391
+
392
+ class AsyncDataleonWithRawResponse:
393
+ def __init__(self, client: AsyncDataleon) -> None:
394
+ self.individuals = individuals.AsyncIndividualsResourceWithRawResponse(client.individuals)
395
+ self.companies = companies.AsyncCompaniesResourceWithRawResponse(client.companies)
396
+
397
+
398
+ class DataleonWithStreamedResponse:
399
+ def __init__(self, client: Dataleon) -> None:
400
+ self.individuals = individuals.IndividualsResourceWithStreamingResponse(client.individuals)
401
+ self.companies = companies.CompaniesResourceWithStreamingResponse(client.companies)
402
+
403
+
404
+ class AsyncDataleonWithStreamedResponse:
405
+ def __init__(self, client: AsyncDataleon) -> None:
406
+ self.individuals = individuals.AsyncIndividualsResourceWithStreamingResponse(client.individuals)
407
+ self.companies = companies.AsyncCompaniesResourceWithStreamingResponse(client.companies)
408
+
409
+
410
+ Client = Dataleon
411
+
412
+ AsyncClient = AsyncDataleon
dataleon/_compat.py ADDED
@@ -0,0 +1,219 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING, Any, Union, Generic, TypeVar, Callable, cast, overload
4
+ from datetime import date, datetime
5
+ from typing_extensions import Self, Literal
6
+
7
+ import pydantic
8
+ from pydantic.fields import FieldInfo
9
+
10
+ from ._types import IncEx, StrBytesIntFloat
11
+
12
+ _T = TypeVar("_T")
13
+ _ModelT = TypeVar("_ModelT", bound=pydantic.BaseModel)
14
+
15
+ # --------------- Pydantic v2 compatibility ---------------
16
+
17
+ # Pyright incorrectly reports some of our functions as overriding a method when they don't
18
+ # pyright: reportIncompatibleMethodOverride=false
19
+
20
+ PYDANTIC_V2 = pydantic.VERSION.startswith("2.")
21
+
22
+ # v1 re-exports
23
+ if TYPE_CHECKING:
24
+
25
+ def parse_date(value: date | StrBytesIntFloat) -> date: # noqa: ARG001
26
+ ...
27
+
28
+ def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: # noqa: ARG001
29
+ ...
30
+
31
+ def get_args(t: type[Any]) -> tuple[Any, ...]: # noqa: ARG001
32
+ ...
33
+
34
+ def is_union(tp: type[Any] | None) -> bool: # noqa: ARG001
35
+ ...
36
+
37
+ def get_origin(t: type[Any]) -> type[Any] | None: # noqa: ARG001
38
+ ...
39
+
40
+ def is_literal_type(type_: type[Any]) -> bool: # noqa: ARG001
41
+ ...
42
+
43
+ def is_typeddict(type_: type[Any]) -> bool: # noqa: ARG001
44
+ ...
45
+
46
+ else:
47
+ if PYDANTIC_V2:
48
+ from pydantic.v1.typing import (
49
+ get_args as get_args,
50
+ is_union as is_union,
51
+ get_origin as get_origin,
52
+ is_typeddict as is_typeddict,
53
+ is_literal_type as is_literal_type,
54
+ )
55
+ from pydantic.v1.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
56
+ else:
57
+ from pydantic.typing import (
58
+ get_args as get_args,
59
+ is_union as is_union,
60
+ get_origin as get_origin,
61
+ is_typeddict as is_typeddict,
62
+ is_literal_type as is_literal_type,
63
+ )
64
+ from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
65
+
66
+
67
+ # refactored config
68
+ if TYPE_CHECKING:
69
+ from pydantic import ConfigDict as ConfigDict
70
+ else:
71
+ if PYDANTIC_V2:
72
+ from pydantic import ConfigDict
73
+ else:
74
+ # TODO: provide an error message here?
75
+ ConfigDict = None
76
+
77
+
78
+ # renamed methods / properties
79
+ def parse_obj(model: type[_ModelT], value: object) -> _ModelT:
80
+ if PYDANTIC_V2:
81
+ return model.model_validate(value)
82
+ else:
83
+ return cast(_ModelT, model.parse_obj(value)) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
84
+
85
+
86
+ def field_is_required(field: FieldInfo) -> bool:
87
+ if PYDANTIC_V2:
88
+ return field.is_required()
89
+ return field.required # type: ignore
90
+
91
+
92
+ def field_get_default(field: FieldInfo) -> Any:
93
+ value = field.get_default()
94
+ if PYDANTIC_V2:
95
+ from pydantic_core import PydanticUndefined
96
+
97
+ if value == PydanticUndefined:
98
+ return None
99
+ return value
100
+ return value
101
+
102
+
103
+ def field_outer_type(field: FieldInfo) -> Any:
104
+ if PYDANTIC_V2:
105
+ return field.annotation
106
+ return field.outer_type_ # type: ignore
107
+
108
+
109
+ def get_model_config(model: type[pydantic.BaseModel]) -> Any:
110
+ if PYDANTIC_V2:
111
+ return model.model_config
112
+ return model.__config__ # type: ignore
113
+
114
+
115
+ def get_model_fields(model: type[pydantic.BaseModel]) -> dict[str, FieldInfo]:
116
+ if PYDANTIC_V2:
117
+ return model.model_fields
118
+ return model.__fields__ # type: ignore
119
+
120
+
121
+ def model_copy(model: _ModelT, *, deep: bool = False) -> _ModelT:
122
+ if PYDANTIC_V2:
123
+ return model.model_copy(deep=deep)
124
+ return model.copy(deep=deep) # type: ignore
125
+
126
+
127
+ def model_json(model: pydantic.BaseModel, *, indent: int | None = None) -> str:
128
+ if PYDANTIC_V2:
129
+ return model.model_dump_json(indent=indent)
130
+ return model.json(indent=indent) # type: ignore
131
+
132
+
133
+ def model_dump(
134
+ model: pydantic.BaseModel,
135
+ *,
136
+ exclude: IncEx | None = None,
137
+ exclude_unset: bool = False,
138
+ exclude_defaults: bool = False,
139
+ warnings: bool = True,
140
+ mode: Literal["json", "python"] = "python",
141
+ ) -> dict[str, Any]:
142
+ if PYDANTIC_V2 or hasattr(model, "model_dump"):
143
+ return model.model_dump(
144
+ mode=mode,
145
+ exclude=exclude,
146
+ exclude_unset=exclude_unset,
147
+ exclude_defaults=exclude_defaults,
148
+ # warnings are not supported in Pydantic v1
149
+ warnings=warnings if PYDANTIC_V2 else True,
150
+ )
151
+ return cast(
152
+ "dict[str, Any]",
153
+ model.dict( # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
154
+ exclude=exclude,
155
+ exclude_unset=exclude_unset,
156
+ exclude_defaults=exclude_defaults,
157
+ ),
158
+ )
159
+
160
+
161
+ def model_parse(model: type[_ModelT], data: Any) -> _ModelT:
162
+ if PYDANTIC_V2:
163
+ return model.model_validate(data)
164
+ return model.parse_obj(data) # pyright: ignore[reportDeprecated]
165
+
166
+
167
+ # generic models
168
+ if TYPE_CHECKING:
169
+
170
+ class GenericModel(pydantic.BaseModel): ...
171
+
172
+ else:
173
+ if PYDANTIC_V2:
174
+ # there no longer needs to be a distinction in v2 but
175
+ # we still have to create our own subclass to avoid
176
+ # inconsistent MRO ordering errors
177
+ class GenericModel(pydantic.BaseModel): ...
178
+
179
+ else:
180
+ import pydantic.generics
181
+
182
+ class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ...
183
+
184
+
185
+ # cached properties
186
+ if TYPE_CHECKING:
187
+ cached_property = property
188
+
189
+ # we define a separate type (copied from typeshed)
190
+ # that represents that `cached_property` is `set`able
191
+ # at runtime, which differs from `@property`.
192
+ #
193
+ # this is a separate type as editors likely special case
194
+ # `@property` and we don't want to cause issues just to have
195
+ # more helpful internal types.
196
+
197
+ class typed_cached_property(Generic[_T]):
198
+ func: Callable[[Any], _T]
199
+ attrname: str | None
200
+
201
+ def __init__(self, func: Callable[[Any], _T]) -> None: ...
202
+
203
+ @overload
204
+ def __get__(self, instance: None, owner: type[Any] | None = None) -> Self: ...
205
+
206
+ @overload
207
+ def __get__(self, instance: object, owner: type[Any] | None = None) -> _T: ...
208
+
209
+ def __get__(self, instance: object, owner: type[Any] | None = None) -> _T | Self:
210
+ raise NotImplementedError()
211
+
212
+ def __set_name__(self, owner: type[Any], name: str) -> None: ...
213
+
214
+ # __set__ is not defined at runtime, but @cached_property is designed to be settable
215
+ def __set__(self, instance: object, value: _T) -> None: ...
216
+ else:
217
+ from functools import cached_property as cached_property
218
+
219
+ typed_cached_property = cached_property
dataleon/_constants.py ADDED
@@ -0,0 +1,14 @@
1
+ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2
+
3
+ import httpx
4
+
5
+ RAW_RESPONSE_HEADER = "X-Stainless-Raw-Response"
6
+ OVERRIDE_CAST_TO_HEADER = "____stainless_override_cast_to"
7
+
8
+ # default timeout is 1 minute
9
+ DEFAULT_TIMEOUT = httpx.Timeout(timeout=60, connect=5.0)
10
+ DEFAULT_MAX_RETRIES = 2
11
+ DEFAULT_CONNECTION_LIMITS = httpx.Limits(max_connections=100, max_keepalive_connections=20)
12
+
13
+ INITIAL_RETRY_DELAY = 0.5
14
+ MAX_RETRY_DELAY = 8.0