raccoonai 0.1.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of raccoonai might be problematic. Click here for more details.

Files changed (44) hide show
  1. raccoonai/__init__.py +96 -0
  2. raccoonai/_base_client.py +2051 -0
  3. raccoonai/_client.py +473 -0
  4. raccoonai/_compat.py +219 -0
  5. raccoonai/_constants.py +14 -0
  6. raccoonai/_exceptions.py +108 -0
  7. raccoonai/_files.py +123 -0
  8. raccoonai/_models.py +795 -0
  9. raccoonai/_qs.py +150 -0
  10. raccoonai/_resource.py +43 -0
  11. raccoonai/_response.py +830 -0
  12. raccoonai/_streaming.py +333 -0
  13. raccoonai/_types.py +217 -0
  14. raccoonai/_utils/__init__.py +57 -0
  15. raccoonai/_utils/_logs.py +25 -0
  16. raccoonai/_utils/_proxy.py +62 -0
  17. raccoonai/_utils/_reflection.py +42 -0
  18. raccoonai/_utils/_streams.py +12 -0
  19. raccoonai/_utils/_sync.py +71 -0
  20. raccoonai/_utils/_transform.py +392 -0
  21. raccoonai/_utils/_typing.py +149 -0
  22. raccoonai/_utils/_utils.py +414 -0
  23. raccoonai/_version.py +4 -0
  24. raccoonai/lib/.keep +4 -0
  25. raccoonai/py.typed +0 -0
  26. raccoonai/resources/__init__.py +33 -0
  27. raccoonai/resources/fleet.py +485 -0
  28. raccoonai/resources/lam.py +1161 -0
  29. raccoonai/types/__init__.py +15 -0
  30. raccoonai/types/fleet_create_params.py +77 -0
  31. raccoonai/types/fleet_create_response.py +20 -0
  32. raccoonai/types/fleet_logs_response.py +14 -0
  33. raccoonai/types/fleet_status_response.py +17 -0
  34. raccoonai/types/fleet_terminate_response.py +17 -0
  35. raccoonai/types/lam_extract_params.py +51 -0
  36. raccoonai/types/lam_extract_response.py +28 -0
  37. raccoonai/types/lam_integration_run_params.py +35 -0
  38. raccoonai/types/lam_integration_run_response.py +47 -0
  39. raccoonai/types/lam_run_params.py +41 -0
  40. raccoonai/types/lam_run_response.py +21 -0
  41. raccoonai-0.1.0a1.dist-info/METADATA +422 -0
  42. raccoonai-0.1.0a1.dist-info/RECORD +44 -0
  43. raccoonai-0.1.0a1.dist-info/WHEEL +4 -0
  44. raccoonai-0.1.0a1.dist-info/licenses/LICENSE +201 -0
raccoonai/_client.py ADDED
@@ -0,0 +1,473 @@
1
+ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2
+
3
+ from __future__ import annotations
4
+
5
+ import os
6
+ from typing import Any, Dict, Union, Mapping, cast
7
+ from typing_extensions import Self, Literal, override
8
+
9
+ import httpx
10
+
11
+ from . import _exceptions
12
+ from ._qs import Querystring
13
+ from ._types import (
14
+ NOT_GIVEN,
15
+ Omit,
16
+ Timeout,
17
+ NotGiven,
18
+ Transport,
19
+ ProxiesTypes,
20
+ RequestOptions,
21
+ )
22
+ from ._utils import (
23
+ is_given,
24
+ get_async_library,
25
+ )
26
+ from ._version import __version__
27
+ from .resources import lam, fleet
28
+ from ._streaming import Stream as Stream, AsyncStream as AsyncStream
29
+ from ._exceptions import APIStatusError, RaccoonAIError
30
+ from ._base_client import (
31
+ DEFAULT_MAX_RETRIES,
32
+ SyncAPIClient,
33
+ AsyncAPIClient,
34
+ )
35
+
36
+ __all__ = [
37
+ "ENVIRONMENTS",
38
+ "Timeout",
39
+ "Transport",
40
+ "ProxiesTypes",
41
+ "RequestOptions",
42
+ "RaccoonAI",
43
+ "AsyncRaccoonAI",
44
+ "Client",
45
+ "AsyncClient",
46
+ ]
47
+
48
+ ENVIRONMENTS: Dict[str, str] = {
49
+ "production": "https://api.flyingraccoon.tech",
50
+ "staging": "https://staging.flyingraccoon.tech",
51
+ "local": "http://localhost:3800",
52
+ }
53
+
54
+
55
+ class RaccoonAI(SyncAPIClient):
56
+ lam: lam.LamResource
57
+ fleet: fleet.FleetResource
58
+ with_raw_response: RaccoonAIWithRawResponse
59
+ with_streaming_response: RaccoonAIWithStreamedResponse
60
+
61
+ # client options
62
+ secret_key: str
63
+
64
+ _environment: Literal["production", "staging", "local"] | NotGiven
65
+
66
+ def __init__(
67
+ self,
68
+ *,
69
+ secret_key: str | None = None,
70
+ environment: Literal["production", "staging", "local"] | NotGiven = NOT_GIVEN,
71
+ base_url: str | httpx.URL | None | NotGiven = NOT_GIVEN,
72
+ timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN,
73
+ max_retries: int = DEFAULT_MAX_RETRIES,
74
+ default_headers: Mapping[str, str] | None = None,
75
+ default_query: Mapping[str, object] | None = None,
76
+ # Configure a custom httpx client.
77
+ # We provide a `DefaultHttpxClient` class that you can pass to retain the default values we use for `limits`, `timeout` & `follow_redirects`.
78
+ # See the [httpx documentation](https://www.python-httpx.org/api/#client) for more details.
79
+ http_client: httpx.Client | None = None,
80
+ # Enable or disable schema validation for data returned by the API.
81
+ # When enabled an error APIResponseValidationError is raised
82
+ # if the API responds with invalid data for the expected schema.
83
+ #
84
+ # This parameter may be removed or changed in the future.
85
+ # If you rely on this feature, please open a GitHub issue
86
+ # outlining your use-case to help us decide if it should be
87
+ # part of our public interface in the future.
88
+ _strict_response_validation: bool = False,
89
+ ) -> None:
90
+ """Construct a new synchronous raccoonAI client instance.
91
+
92
+ This automatically infers the `secret_key` argument from the `RACCOON_SECRET_KEY` environment variable if it is not provided.
93
+ """
94
+ if secret_key is None:
95
+ secret_key = os.environ.get("RACCOON_SECRET_KEY")
96
+ if secret_key is None:
97
+ raise RaccoonAIError(
98
+ "The secret_key client option must be set either by passing secret_key to the client or by setting the RACCOON_SECRET_KEY environment variable"
99
+ )
100
+ self.secret_key = secret_key
101
+
102
+ self._environment = environment
103
+
104
+ base_url_env = os.environ.get("RACCOON_AI_BASE_URL")
105
+ if is_given(base_url) and base_url is not None:
106
+ # cast required because mypy doesn't understand the type narrowing
107
+ base_url = cast("str | httpx.URL", base_url) # pyright: ignore[reportUnnecessaryCast]
108
+ elif is_given(environment):
109
+ if base_url_env and base_url is not None:
110
+ raise ValueError(
111
+ "Ambiguous URL; The `RACCOON_AI_BASE_URL` env var and the `environment` argument are given. If you want to use the environment, you must pass base_url=None",
112
+ )
113
+
114
+ try:
115
+ base_url = ENVIRONMENTS[environment]
116
+ except KeyError as exc:
117
+ raise ValueError(f"Unknown environment: {environment}") from exc
118
+ elif base_url_env is not None:
119
+ base_url = base_url_env
120
+ else:
121
+ self._environment = environment = "production"
122
+
123
+ try:
124
+ base_url = ENVIRONMENTS[environment]
125
+ except KeyError as exc:
126
+ raise ValueError(f"Unknown environment: {environment}") from exc
127
+
128
+ super().__init__(
129
+ version=__version__,
130
+ base_url=base_url,
131
+ max_retries=max_retries,
132
+ timeout=timeout,
133
+ http_client=http_client,
134
+ custom_headers=default_headers,
135
+ custom_query=default_query,
136
+ _strict_response_validation=_strict_response_validation,
137
+ )
138
+
139
+ self.lam = lam.LamResource(self)
140
+ self.fleet = fleet.FleetResource(self)
141
+ self.with_raw_response = RaccoonAIWithRawResponse(self)
142
+ self.with_streaming_response = RaccoonAIWithStreamedResponse(self)
143
+
144
+ @property
145
+ @override
146
+ def qs(self) -> Querystring:
147
+ return Querystring(array_format="comma")
148
+
149
+ @property
150
+ @override
151
+ def auth_headers(self) -> dict[str, str]:
152
+ secret_key = self.secret_key
153
+ return {"raccoon-secret-key": secret_key}
154
+
155
+ @property
156
+ @override
157
+ def default_headers(self) -> dict[str, str | Omit]:
158
+ return {
159
+ **super().default_headers,
160
+ "X-Stainless-Async": "false",
161
+ **self._custom_headers,
162
+ }
163
+
164
+ def copy(
165
+ self,
166
+ *,
167
+ secret_key: str | None = None,
168
+ environment: Literal["production", "staging", "local"] | None = None,
169
+ base_url: str | httpx.URL | None = None,
170
+ timeout: float | Timeout | None | NotGiven = NOT_GIVEN,
171
+ http_client: httpx.Client | None = None,
172
+ max_retries: int | NotGiven = NOT_GIVEN,
173
+ default_headers: Mapping[str, str] | None = None,
174
+ set_default_headers: Mapping[str, str] | None = None,
175
+ default_query: Mapping[str, object] | None = None,
176
+ set_default_query: Mapping[str, object] | None = None,
177
+ _extra_kwargs: Mapping[str, Any] = {},
178
+ ) -> Self:
179
+ """
180
+ Create a new client instance re-using the same options given to the current client with optional overriding.
181
+ """
182
+ if default_headers is not None and set_default_headers is not None:
183
+ raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive")
184
+
185
+ if default_query is not None and set_default_query is not None:
186
+ raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive")
187
+
188
+ headers = self._custom_headers
189
+ if default_headers is not None:
190
+ headers = {**headers, **default_headers}
191
+ elif set_default_headers is not None:
192
+ headers = set_default_headers
193
+
194
+ params = self._custom_query
195
+ if default_query is not None:
196
+ params = {**params, **default_query}
197
+ elif set_default_query is not None:
198
+ params = set_default_query
199
+
200
+ http_client = http_client or self._client
201
+ return self.__class__(
202
+ secret_key=secret_key or self.secret_key,
203
+ base_url=base_url or self.base_url,
204
+ environment=environment or self._environment,
205
+ timeout=self.timeout if isinstance(timeout, NotGiven) else timeout,
206
+ http_client=http_client,
207
+ max_retries=max_retries if is_given(max_retries) else self.max_retries,
208
+ default_headers=headers,
209
+ default_query=params,
210
+ **_extra_kwargs,
211
+ )
212
+
213
+ # Alias for `copy` for nicer inline usage, e.g.
214
+ # client.with_options(timeout=10).foo.create(...)
215
+ with_options = copy
216
+
217
+ @override
218
+ def _make_status_error(
219
+ self,
220
+ err_msg: str,
221
+ *,
222
+ body: object,
223
+ response: httpx.Response,
224
+ ) -> APIStatusError:
225
+ if response.status_code == 400:
226
+ return _exceptions.BadRequestError(err_msg, response=response, body=body)
227
+
228
+ if response.status_code == 401:
229
+ return _exceptions.AuthenticationError(err_msg, response=response, body=body)
230
+
231
+ if response.status_code == 403:
232
+ return _exceptions.PermissionDeniedError(err_msg, response=response, body=body)
233
+
234
+ if response.status_code == 404:
235
+ return _exceptions.NotFoundError(err_msg, response=response, body=body)
236
+
237
+ if response.status_code == 409:
238
+ return _exceptions.ConflictError(err_msg, response=response, body=body)
239
+
240
+ if response.status_code == 422:
241
+ return _exceptions.UnprocessableEntityError(err_msg, response=response, body=body)
242
+
243
+ if response.status_code == 429:
244
+ return _exceptions.RateLimitError(err_msg, response=response, body=body)
245
+
246
+ if response.status_code >= 500:
247
+ return _exceptions.InternalServerError(err_msg, response=response, body=body)
248
+ return APIStatusError(err_msg, response=response, body=body)
249
+
250
+
251
+ class AsyncRaccoonAI(AsyncAPIClient):
252
+ lam: lam.AsyncLamResource
253
+ fleet: fleet.AsyncFleetResource
254
+ with_raw_response: AsyncRaccoonAIWithRawResponse
255
+ with_streaming_response: AsyncRaccoonAIWithStreamedResponse
256
+
257
+ # client options
258
+ secret_key: str
259
+
260
+ _environment: Literal["production", "staging", "local"] | NotGiven
261
+
262
+ def __init__(
263
+ self,
264
+ *,
265
+ secret_key: str | None = None,
266
+ environment: Literal["production", "staging", "local"] | NotGiven = NOT_GIVEN,
267
+ base_url: str | httpx.URL | None | NotGiven = NOT_GIVEN,
268
+ timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN,
269
+ max_retries: int = DEFAULT_MAX_RETRIES,
270
+ default_headers: Mapping[str, str] | None = None,
271
+ default_query: Mapping[str, object] | None = None,
272
+ # Configure a custom httpx client.
273
+ # We provide a `DefaultAsyncHttpxClient` class that you can pass to retain the default values we use for `limits`, `timeout` & `follow_redirects`.
274
+ # See the [httpx documentation](https://www.python-httpx.org/api/#asyncclient) for more details.
275
+ http_client: httpx.AsyncClient | None = None,
276
+ # Enable or disable schema validation for data returned by the API.
277
+ # When enabled an error APIResponseValidationError is raised
278
+ # if the API responds with invalid data for the expected schema.
279
+ #
280
+ # This parameter may be removed or changed in the future.
281
+ # If you rely on this feature, please open a GitHub issue
282
+ # outlining your use-case to help us decide if it should be
283
+ # part of our public interface in the future.
284
+ _strict_response_validation: bool = False,
285
+ ) -> None:
286
+ """Construct a new async raccoonAI client instance.
287
+
288
+ This automatically infers the `secret_key` argument from the `RACCOON_SECRET_KEY` environment variable if it is not provided.
289
+ """
290
+ if secret_key is None:
291
+ secret_key = os.environ.get("RACCOON_SECRET_KEY")
292
+ if secret_key is None:
293
+ raise RaccoonAIError(
294
+ "The secret_key client option must be set either by passing secret_key to the client or by setting the RACCOON_SECRET_KEY environment variable"
295
+ )
296
+ self.secret_key = secret_key
297
+
298
+ self._environment = environment
299
+
300
+ base_url_env = os.environ.get("RACCOON_AI_BASE_URL")
301
+ if is_given(base_url) and base_url is not None:
302
+ # cast required because mypy doesn't understand the type narrowing
303
+ base_url = cast("str | httpx.URL", base_url) # pyright: ignore[reportUnnecessaryCast]
304
+ elif is_given(environment):
305
+ if base_url_env and base_url is not None:
306
+ raise ValueError(
307
+ "Ambiguous URL; The `RACCOON_AI_BASE_URL` env var and the `environment` argument are given. If you want to use the environment, you must pass base_url=None",
308
+ )
309
+
310
+ try:
311
+ base_url = ENVIRONMENTS[environment]
312
+ except KeyError as exc:
313
+ raise ValueError(f"Unknown environment: {environment}") from exc
314
+ elif base_url_env is not None:
315
+ base_url = base_url_env
316
+ else:
317
+ self._environment = environment = "production"
318
+
319
+ try:
320
+ base_url = ENVIRONMENTS[environment]
321
+ except KeyError as exc:
322
+ raise ValueError(f"Unknown environment: {environment}") from exc
323
+
324
+ super().__init__(
325
+ version=__version__,
326
+ base_url=base_url,
327
+ max_retries=max_retries,
328
+ timeout=timeout,
329
+ http_client=http_client,
330
+ custom_headers=default_headers,
331
+ custom_query=default_query,
332
+ _strict_response_validation=_strict_response_validation,
333
+ )
334
+
335
+ self.lam = lam.AsyncLamResource(self)
336
+ self.fleet = fleet.AsyncFleetResource(self)
337
+ self.with_raw_response = AsyncRaccoonAIWithRawResponse(self)
338
+ self.with_streaming_response = AsyncRaccoonAIWithStreamedResponse(self)
339
+
340
+ @property
341
+ @override
342
+ def qs(self) -> Querystring:
343
+ return Querystring(array_format="comma")
344
+
345
+ @property
346
+ @override
347
+ def auth_headers(self) -> dict[str, str]:
348
+ secret_key = self.secret_key
349
+ return {"raccoon-secret-key": secret_key}
350
+
351
+ @property
352
+ @override
353
+ def default_headers(self) -> dict[str, str | Omit]:
354
+ return {
355
+ **super().default_headers,
356
+ "X-Stainless-Async": f"async:{get_async_library()}",
357
+ **self._custom_headers,
358
+ }
359
+
360
+ def copy(
361
+ self,
362
+ *,
363
+ secret_key: str | None = None,
364
+ environment: Literal["production", "staging", "local"] | None = None,
365
+ base_url: str | httpx.URL | None = None,
366
+ timeout: float | Timeout | None | NotGiven = NOT_GIVEN,
367
+ http_client: httpx.AsyncClient | None = None,
368
+ max_retries: int | NotGiven = NOT_GIVEN,
369
+ default_headers: Mapping[str, str] | None = None,
370
+ set_default_headers: Mapping[str, str] | None = None,
371
+ default_query: Mapping[str, object] | None = None,
372
+ set_default_query: Mapping[str, object] | None = None,
373
+ _extra_kwargs: Mapping[str, Any] = {},
374
+ ) -> Self:
375
+ """
376
+ Create a new client instance re-using the same options given to the current client with optional overriding.
377
+ """
378
+ if default_headers is not None and set_default_headers is not None:
379
+ raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive")
380
+
381
+ if default_query is not None and set_default_query is not None:
382
+ raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive")
383
+
384
+ headers = self._custom_headers
385
+ if default_headers is not None:
386
+ headers = {**headers, **default_headers}
387
+ elif set_default_headers is not None:
388
+ headers = set_default_headers
389
+
390
+ params = self._custom_query
391
+ if default_query is not None:
392
+ params = {**params, **default_query}
393
+ elif set_default_query is not None:
394
+ params = set_default_query
395
+
396
+ http_client = http_client or self._client
397
+ return self.__class__(
398
+ secret_key=secret_key or self.secret_key,
399
+ base_url=base_url or self.base_url,
400
+ environment=environment or self._environment,
401
+ timeout=self.timeout if isinstance(timeout, NotGiven) else timeout,
402
+ http_client=http_client,
403
+ max_retries=max_retries if is_given(max_retries) else self.max_retries,
404
+ default_headers=headers,
405
+ default_query=params,
406
+ **_extra_kwargs,
407
+ )
408
+
409
+ # Alias for `copy` for nicer inline usage, e.g.
410
+ # client.with_options(timeout=10).foo.create(...)
411
+ with_options = copy
412
+
413
+ @override
414
+ def _make_status_error(
415
+ self,
416
+ err_msg: str,
417
+ *,
418
+ body: object,
419
+ response: httpx.Response,
420
+ ) -> APIStatusError:
421
+ if response.status_code == 400:
422
+ return _exceptions.BadRequestError(err_msg, response=response, body=body)
423
+
424
+ if response.status_code == 401:
425
+ return _exceptions.AuthenticationError(err_msg, response=response, body=body)
426
+
427
+ if response.status_code == 403:
428
+ return _exceptions.PermissionDeniedError(err_msg, response=response, body=body)
429
+
430
+ if response.status_code == 404:
431
+ return _exceptions.NotFoundError(err_msg, response=response, body=body)
432
+
433
+ if response.status_code == 409:
434
+ return _exceptions.ConflictError(err_msg, response=response, body=body)
435
+
436
+ if response.status_code == 422:
437
+ return _exceptions.UnprocessableEntityError(err_msg, response=response, body=body)
438
+
439
+ if response.status_code == 429:
440
+ return _exceptions.RateLimitError(err_msg, response=response, body=body)
441
+
442
+ if response.status_code >= 500:
443
+ return _exceptions.InternalServerError(err_msg, response=response, body=body)
444
+ return APIStatusError(err_msg, response=response, body=body)
445
+
446
+
447
+ class RaccoonAIWithRawResponse:
448
+ def __init__(self, client: RaccoonAI) -> None:
449
+ self.lam = lam.LamResourceWithRawResponse(client.lam)
450
+ self.fleet = fleet.FleetResourceWithRawResponse(client.fleet)
451
+
452
+
453
+ class AsyncRaccoonAIWithRawResponse:
454
+ def __init__(self, client: AsyncRaccoonAI) -> None:
455
+ self.lam = lam.AsyncLamResourceWithRawResponse(client.lam)
456
+ self.fleet = fleet.AsyncFleetResourceWithRawResponse(client.fleet)
457
+
458
+
459
+ class RaccoonAIWithStreamedResponse:
460
+ def __init__(self, client: RaccoonAI) -> None:
461
+ self.lam = lam.LamResourceWithStreamingResponse(client.lam)
462
+ self.fleet = fleet.FleetResourceWithStreamingResponse(client.fleet)
463
+
464
+
465
+ class AsyncRaccoonAIWithStreamedResponse:
466
+ def __init__(self, client: AsyncRaccoonAI) -> None:
467
+ self.lam = lam.AsyncLamResourceWithStreamingResponse(client.lam)
468
+ self.fleet = fleet.AsyncFleetResourceWithStreamingResponse(client.fleet)
469
+
470
+
471
+ Client = RaccoonAI
472
+
473
+ AsyncClient = AsyncRaccoonAI
raccoonai/_compat.py ADDED
@@ -0,0 +1,219 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING, Any, Union, Generic, TypeVar, Callable, cast, overload
4
+ from datetime import date, datetime
5
+ from typing_extensions import Self, Literal
6
+
7
+ import pydantic
8
+ from pydantic.fields import FieldInfo
9
+
10
+ from ._types import IncEx, StrBytesIntFloat
11
+
12
+ _T = TypeVar("_T")
13
+ _ModelT = TypeVar("_ModelT", bound=pydantic.BaseModel)
14
+
15
+ # --------------- Pydantic v2 compatibility ---------------
16
+
17
+ # Pyright incorrectly reports some of our functions as overriding a method when they don't
18
+ # pyright: reportIncompatibleMethodOverride=false
19
+
20
+ PYDANTIC_V2 = pydantic.VERSION.startswith("2.")
21
+
22
+ # v1 re-exports
23
+ if TYPE_CHECKING:
24
+
25
+ def parse_date(value: date | StrBytesIntFloat) -> date: # noqa: ARG001
26
+ ...
27
+
28
+ def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: # noqa: ARG001
29
+ ...
30
+
31
+ def get_args(t: type[Any]) -> tuple[Any, ...]: # noqa: ARG001
32
+ ...
33
+
34
+ def is_union(tp: type[Any] | None) -> bool: # noqa: ARG001
35
+ ...
36
+
37
+ def get_origin(t: type[Any]) -> type[Any] | None: # noqa: ARG001
38
+ ...
39
+
40
+ def is_literal_type(type_: type[Any]) -> bool: # noqa: ARG001
41
+ ...
42
+
43
+ def is_typeddict(type_: type[Any]) -> bool: # noqa: ARG001
44
+ ...
45
+
46
+ else:
47
+ if PYDANTIC_V2:
48
+ from pydantic.v1.typing import (
49
+ get_args as get_args,
50
+ is_union as is_union,
51
+ get_origin as get_origin,
52
+ is_typeddict as is_typeddict,
53
+ is_literal_type as is_literal_type,
54
+ )
55
+ from pydantic.v1.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
56
+ else:
57
+ from pydantic.typing import (
58
+ get_args as get_args,
59
+ is_union as is_union,
60
+ get_origin as get_origin,
61
+ is_typeddict as is_typeddict,
62
+ is_literal_type as is_literal_type,
63
+ )
64
+ from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
65
+
66
+
67
+ # refactored config
68
+ if TYPE_CHECKING:
69
+ from pydantic import ConfigDict as ConfigDict
70
+ else:
71
+ if PYDANTIC_V2:
72
+ from pydantic import ConfigDict
73
+ else:
74
+ # TODO: provide an error message here?
75
+ ConfigDict = None
76
+
77
+
78
+ # renamed methods / properties
79
+ def parse_obj(model: type[_ModelT], value: object) -> _ModelT:
80
+ if PYDANTIC_V2:
81
+ return model.model_validate(value)
82
+ else:
83
+ return cast(_ModelT, model.parse_obj(value)) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
84
+
85
+
86
+ def field_is_required(field: FieldInfo) -> bool:
87
+ if PYDANTIC_V2:
88
+ return field.is_required()
89
+ return field.required # type: ignore
90
+
91
+
92
+ def field_get_default(field: FieldInfo) -> Any:
93
+ value = field.get_default()
94
+ if PYDANTIC_V2:
95
+ from pydantic_core import PydanticUndefined
96
+
97
+ if value == PydanticUndefined:
98
+ return None
99
+ return value
100
+ return value
101
+
102
+
103
+ def field_outer_type(field: FieldInfo) -> Any:
104
+ if PYDANTIC_V2:
105
+ return field.annotation
106
+ return field.outer_type_ # type: ignore
107
+
108
+
109
+ def get_model_config(model: type[pydantic.BaseModel]) -> Any:
110
+ if PYDANTIC_V2:
111
+ return model.model_config
112
+ return model.__config__ # type: ignore
113
+
114
+
115
+ def get_model_fields(model: type[pydantic.BaseModel]) -> dict[str, FieldInfo]:
116
+ if PYDANTIC_V2:
117
+ return model.model_fields
118
+ return model.__fields__ # type: ignore
119
+
120
+
121
+ def model_copy(model: _ModelT, *, deep: bool = False) -> _ModelT:
122
+ if PYDANTIC_V2:
123
+ return model.model_copy(deep=deep)
124
+ return model.copy(deep=deep) # type: ignore
125
+
126
+
127
+ def model_json(model: pydantic.BaseModel, *, indent: int | None = None) -> str:
128
+ if PYDANTIC_V2:
129
+ return model.model_dump_json(indent=indent)
130
+ return model.json(indent=indent) # type: ignore
131
+
132
+
133
+ def model_dump(
134
+ model: pydantic.BaseModel,
135
+ *,
136
+ exclude: IncEx | None = None,
137
+ exclude_unset: bool = False,
138
+ exclude_defaults: bool = False,
139
+ warnings: bool = True,
140
+ mode: Literal["json", "python"] = "python",
141
+ ) -> dict[str, Any]:
142
+ if PYDANTIC_V2 or hasattr(model, "model_dump"):
143
+ return model.model_dump(
144
+ mode=mode,
145
+ exclude=exclude,
146
+ exclude_unset=exclude_unset,
147
+ exclude_defaults=exclude_defaults,
148
+ # warnings are not supported in Pydantic v1
149
+ warnings=warnings if PYDANTIC_V2 else True,
150
+ )
151
+ return cast(
152
+ "dict[str, Any]",
153
+ model.dict( # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
154
+ exclude=exclude,
155
+ exclude_unset=exclude_unset,
156
+ exclude_defaults=exclude_defaults,
157
+ ),
158
+ )
159
+
160
+
161
+ def model_parse(model: type[_ModelT], data: Any) -> _ModelT:
162
+ if PYDANTIC_V2:
163
+ return model.model_validate(data)
164
+ return model.parse_obj(data) # pyright: ignore[reportDeprecated]
165
+
166
+
167
+ # generic models
168
+ if TYPE_CHECKING:
169
+
170
+ class GenericModel(pydantic.BaseModel): ...
171
+
172
+ else:
173
+ if PYDANTIC_V2:
174
+ # there no longer needs to be a distinction in v2 but
175
+ # we still have to create our own subclass to avoid
176
+ # inconsistent MRO ordering errors
177
+ class GenericModel(pydantic.BaseModel): ...
178
+
179
+ else:
180
+ import pydantic.generics
181
+
182
+ class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ...
183
+
184
+
185
+ # cached properties
186
+ if TYPE_CHECKING:
187
+ cached_property = property
188
+
189
+ # we define a separate type (copied from typeshed)
190
+ # that represents that `cached_property` is `set`able
191
+ # at runtime, which differs from `@property`.
192
+ #
193
+ # this is a separate type as editors likely special case
194
+ # `@property` and we don't want to cause issues just to have
195
+ # more helpful internal types.
196
+
197
+ class typed_cached_property(Generic[_T]):
198
+ func: Callable[[Any], _T]
199
+ attrname: str | None
200
+
201
+ def __init__(self, func: Callable[[Any], _T]) -> None: ...
202
+
203
+ @overload
204
+ def __get__(self, instance: None, owner: type[Any] | None = None) -> Self: ...
205
+
206
+ @overload
207
+ def __get__(self, instance: object, owner: type[Any] | None = None) -> _T: ...
208
+
209
+ def __get__(self, instance: object, owner: type[Any] | None = None) -> _T | Self:
210
+ raise NotImplementedError()
211
+
212
+ def __set_name__(self, owner: type[Any], name: str) -> None: ...
213
+
214
+ # __set__ is not defined at runtime, but @cached_property is designed to be settable
215
+ def __set__(self, instance: object, value: _T) -> None: ...
216
+ else:
217
+ from functools import cached_property as cached_property
218
+
219
+ typed_cached_property = cached_property
@@ -0,0 +1,14 @@
1
+ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2
+
3
+ import httpx
4
+
5
+ RAW_RESPONSE_HEADER = "X-Stainless-Raw-Response"
6
+ OVERRIDE_CAST_TO_HEADER = "____stainless_override_cast_to"
7
+
8
+ # default timeout is 1 minute
9
+ DEFAULT_TIMEOUT = httpx.Timeout(timeout=60.0, connect=5.0)
10
+ DEFAULT_MAX_RETRIES = 2
11
+ DEFAULT_CONNECTION_LIMITS = httpx.Limits(max_connections=100, max_keepalive_connections=20)
12
+
13
+ INITIAL_RETRY_DELAY = 0.5
14
+ MAX_RETRY_DELAY = 8.0