chunkr-ai 0.1.0__py3-none-any.whl → 0.1.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. chunkr_ai/__init__.py +89 -2
  2. chunkr_ai/_base_client.py +1995 -0
  3. chunkr_ai/_client.py +402 -0
  4. chunkr_ai/_compat.py +219 -0
  5. chunkr_ai/_constants.py +14 -0
  6. chunkr_ai/_exceptions.py +108 -0
  7. chunkr_ai/_files.py +123 -0
  8. chunkr_ai/_models.py +829 -0
  9. chunkr_ai/_qs.py +150 -0
  10. chunkr_ai/_resource.py +43 -0
  11. chunkr_ai/_response.py +830 -0
  12. chunkr_ai/_streaming.py +333 -0
  13. chunkr_ai/_types.py +219 -0
  14. chunkr_ai/_utils/__init__.py +57 -0
  15. chunkr_ai/_utils/_logs.py +25 -0
  16. chunkr_ai/_utils/_proxy.py +65 -0
  17. chunkr_ai/_utils/_reflection.py +42 -0
  18. chunkr_ai/_utils/_resources_proxy.py +24 -0
  19. chunkr_ai/_utils/_streams.py +12 -0
  20. chunkr_ai/_utils/_sync.py +86 -0
  21. chunkr_ai/_utils/_transform.py +447 -0
  22. chunkr_ai/_utils/_typing.py +151 -0
  23. chunkr_ai/_utils/_utils.py +422 -0
  24. chunkr_ai/_version.py +4 -0
  25. chunkr_ai/lib/.keep +4 -0
  26. chunkr_ai/pagination.py +71 -0
  27. chunkr_ai/resources/__init__.py +33 -0
  28. chunkr_ai/resources/health.py +136 -0
  29. chunkr_ai/resources/task.py +1166 -0
  30. chunkr_ai/types/__init__.py +27 -0
  31. chunkr_ai/types/auto_generation_config.py +39 -0
  32. chunkr_ai/types/auto_generation_config_param.py +39 -0
  33. chunkr_ai/types/bounding_box.py +19 -0
  34. chunkr_ai/types/chunk_processing.py +40 -0
  35. chunkr_ai/types/chunk_processing_param.py +42 -0
  36. chunkr_ai/types/health_check_response.py +7 -0
  37. chunkr_ai/types/ignore_generation_config.py +39 -0
  38. chunkr_ai/types/ignore_generation_config_param.py +39 -0
  39. chunkr_ai/types/llm_generation_config.py +39 -0
  40. chunkr_ai/types/llm_generation_config_param.py +39 -0
  41. chunkr_ai/types/llm_processing.py +36 -0
  42. chunkr_ai/types/llm_processing_param.py +36 -0
  43. chunkr_ai/types/picture_generation_config.py +39 -0
  44. chunkr_ai/types/picture_generation_config_param.py +39 -0
  45. chunkr_ai/types/segment_processing.py +280 -0
  46. chunkr_ai/types/segment_processing_param.py +281 -0
  47. chunkr_ai/types/table_generation_config.py +39 -0
  48. chunkr_ai/types/table_generation_config_param.py +39 -0
  49. chunkr_ai/types/task.py +379 -0
  50. chunkr_ai/types/task_get_params.py +18 -0
  51. chunkr_ai/types/task_list_params.py +37 -0
  52. chunkr_ai/types/task_parse_params.py +90 -0
  53. chunkr_ai/types/task_update_params.py +90 -0
  54. chunkr_ai-0.1.0a1.dist-info/METADATA +504 -0
  55. chunkr_ai-0.1.0a1.dist-info/RECORD +58 -0
  56. {chunkr_ai-0.1.0.dist-info → chunkr_ai-0.1.0a1.dist-info}/WHEEL +1 -2
  57. chunkr_ai-0.1.0a1.dist-info/licenses/LICENSE +201 -0
  58. chunkr_ai/api/auth.py +0 -13
  59. chunkr_ai/api/chunkr.py +0 -103
  60. chunkr_ai/api/chunkr_base.py +0 -185
  61. chunkr_ai/api/configuration.py +0 -313
  62. chunkr_ai/api/decorators.py +0 -101
  63. chunkr_ai/api/misc.py +0 -139
  64. chunkr_ai/api/protocol.py +0 -14
  65. chunkr_ai/api/task_response.py +0 -208
  66. chunkr_ai/models.py +0 -55
  67. chunkr_ai-0.1.0.dist-info/METADATA +0 -268
  68. chunkr_ai-0.1.0.dist-info/RECORD +0 -16
  69. chunkr_ai-0.1.0.dist-info/licenses/LICENSE +0 -21
  70. chunkr_ai-0.1.0.dist-info/top_level.txt +0 -1
  71. /chunkr_ai/{api/__init__.py → py.typed} +0 -0
chunkr_ai/_models.py ADDED
@@ -0,0 +1,829 @@
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ import inspect
5
+ from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, Optional, cast
6
+ from datetime import date, datetime
7
+ from typing_extensions import (
8
+ List,
9
+ Unpack,
10
+ Literal,
11
+ ClassVar,
12
+ Protocol,
13
+ Required,
14
+ ParamSpec,
15
+ TypedDict,
16
+ TypeGuard,
17
+ final,
18
+ override,
19
+ runtime_checkable,
20
+ )
21
+
22
+ import pydantic
23
+ from pydantic.fields import FieldInfo
24
+
25
+ from ._types import (
26
+ Body,
27
+ IncEx,
28
+ Query,
29
+ ModelT,
30
+ Headers,
31
+ Timeout,
32
+ NotGiven,
33
+ AnyMapping,
34
+ HttpxRequestFiles,
35
+ )
36
+ from ._utils import (
37
+ PropertyInfo,
38
+ is_list,
39
+ is_given,
40
+ json_safe,
41
+ lru_cache,
42
+ is_mapping,
43
+ parse_date,
44
+ coerce_boolean,
45
+ parse_datetime,
46
+ strip_not_given,
47
+ extract_type_arg,
48
+ is_annotated_type,
49
+ is_type_alias_type,
50
+ strip_annotated_type,
51
+ )
52
+ from ._compat import (
53
+ PYDANTIC_V2,
54
+ ConfigDict,
55
+ GenericModel as BaseGenericModel,
56
+ get_args,
57
+ is_union,
58
+ parse_obj,
59
+ get_origin,
60
+ is_literal_type,
61
+ get_model_config,
62
+ get_model_fields,
63
+ field_get_default,
64
+ )
65
+ from ._constants import RAW_RESPONSE_HEADER
66
+
67
+ if TYPE_CHECKING:
68
+ from pydantic_core.core_schema import ModelField, ModelSchema, LiteralSchema, ModelFieldsSchema
69
+
70
+ __all__ = ["BaseModel", "GenericModel"]
71
+
72
+ _T = TypeVar("_T")
73
+ _BaseModelT = TypeVar("_BaseModelT", bound="BaseModel")
74
+
75
+ P = ParamSpec("P")
76
+
77
+
78
+ @runtime_checkable
79
+ class _ConfigProtocol(Protocol):
80
+ allow_population_by_field_name: bool
81
+
82
+
83
+ class BaseModel(pydantic.BaseModel):
84
+ if PYDANTIC_V2:
85
+ model_config: ClassVar[ConfigDict] = ConfigDict(
86
+ extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true"))
87
+ )
88
+ else:
89
+
90
+ @property
91
+ @override
92
+ def model_fields_set(self) -> set[str]:
93
+ # a forwards-compat shim for pydantic v2
94
+ return self.__fields_set__ # type: ignore
95
+
96
+ class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated]
97
+ extra: Any = pydantic.Extra.allow # type: ignore
98
+
99
+ def to_dict(
100
+ self,
101
+ *,
102
+ mode: Literal["json", "python"] = "python",
103
+ use_api_names: bool = True,
104
+ exclude_unset: bool = True,
105
+ exclude_defaults: bool = False,
106
+ exclude_none: bool = False,
107
+ warnings: bool = True,
108
+ ) -> dict[str, object]:
109
+ """Recursively generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
110
+
111
+ By default, fields that were not set by the API will not be included,
112
+ and keys will match the API response, *not* the property names from the model.
113
+
114
+ For example, if the API responds with `"fooBar": true` but we've defined a `foo_bar: bool` property,
115
+ the output will use the `"fooBar"` key (unless `use_api_names=False` is passed).
116
+
117
+ Args:
118
+ mode:
119
+ If mode is 'json', the dictionary will only contain JSON serializable types. e.g. `datetime` will be turned into a string, `"2024-3-22T18:11:19.117000Z"`.
120
+ If mode is 'python', the dictionary may contain any Python objects. e.g. `datetime(2024, 3, 22)`
121
+
122
+ use_api_names: Whether to use the key that the API responded with or the property name. Defaults to `True`.
123
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
124
+ exclude_defaults: Whether to exclude fields that are set to their default value from the output.
125
+ exclude_none: Whether to exclude fields that have a value of `None` from the output.
126
+ warnings: Whether to log warnings when invalid fields are encountered. This is only supported in Pydantic v2.
127
+ """
128
+ return self.model_dump(
129
+ mode=mode,
130
+ by_alias=use_api_names,
131
+ exclude_unset=exclude_unset,
132
+ exclude_defaults=exclude_defaults,
133
+ exclude_none=exclude_none,
134
+ warnings=warnings,
135
+ )
136
+
137
+ def to_json(
138
+ self,
139
+ *,
140
+ indent: int | None = 2,
141
+ use_api_names: bool = True,
142
+ exclude_unset: bool = True,
143
+ exclude_defaults: bool = False,
144
+ exclude_none: bool = False,
145
+ warnings: bool = True,
146
+ ) -> str:
147
+ """Generates a JSON string representing this model as it would be received from or sent to the API (but with indentation).
148
+
149
+ By default, fields that were not set by the API will not be included,
150
+ and keys will match the API response, *not* the property names from the model.
151
+
152
+ For example, if the API responds with `"fooBar": true` but we've defined a `foo_bar: bool` property,
153
+ the output will use the `"fooBar"` key (unless `use_api_names=False` is passed).
154
+
155
+ Args:
156
+ indent: Indentation to use in the JSON output. If `None` is passed, the output will be compact. Defaults to `2`
157
+ use_api_names: Whether to use the key that the API responded with or the property name. Defaults to `True`.
158
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
159
+ exclude_defaults: Whether to exclude fields that have the default value.
160
+ exclude_none: Whether to exclude fields that have a value of `None`.
161
+ warnings: Whether to show any warnings that occurred during serialization. This is only supported in Pydantic v2.
162
+ """
163
+ return self.model_dump_json(
164
+ indent=indent,
165
+ by_alias=use_api_names,
166
+ exclude_unset=exclude_unset,
167
+ exclude_defaults=exclude_defaults,
168
+ exclude_none=exclude_none,
169
+ warnings=warnings,
170
+ )
171
+
172
+ @override
173
+ def __str__(self) -> str:
174
+ # mypy complains about an invalid self arg
175
+ return f"{self.__repr_name__()}({self.__repr_str__(', ')})" # type: ignore[misc]
176
+
177
+ # Override the 'construct' method in a way that supports recursive parsing without validation.
178
+ # Based on https://github.com/samuelcolvin/pydantic/issues/1168#issuecomment-817742836.
179
+ @classmethod
180
+ @override
181
+ def construct( # pyright: ignore[reportIncompatibleMethodOverride]
182
+ __cls: Type[ModelT],
183
+ _fields_set: set[str] | None = None,
184
+ **values: object,
185
+ ) -> ModelT:
186
+ m = __cls.__new__(__cls)
187
+ fields_values: dict[str, object] = {}
188
+
189
+ config = get_model_config(__cls)
190
+ populate_by_name = (
191
+ config.allow_population_by_field_name
192
+ if isinstance(config, _ConfigProtocol)
193
+ else config.get("populate_by_name")
194
+ )
195
+
196
+ if _fields_set is None:
197
+ _fields_set = set()
198
+
199
+ model_fields = get_model_fields(__cls)
200
+ for name, field in model_fields.items():
201
+ key = field.alias
202
+ if key is None or (key not in values and populate_by_name):
203
+ key = name
204
+
205
+ if key in values:
206
+ fields_values[name] = _construct_field(value=values[key], field=field, key=key)
207
+ _fields_set.add(name)
208
+ else:
209
+ fields_values[name] = field_get_default(field)
210
+
211
+ extra_field_type = _get_extra_fields_type(__cls)
212
+
213
+ _extra = {}
214
+ for key, value in values.items():
215
+ if key not in model_fields:
216
+ parsed = construct_type(value=value, type_=extra_field_type) if extra_field_type is not None else value
217
+
218
+ if PYDANTIC_V2:
219
+ _extra[key] = parsed
220
+ else:
221
+ _fields_set.add(key)
222
+ fields_values[key] = parsed
223
+
224
+ object.__setattr__(m, "__dict__", fields_values)
225
+
226
+ if PYDANTIC_V2:
227
+ # these properties are copied from Pydantic's `model_construct()` method
228
+ object.__setattr__(m, "__pydantic_private__", None)
229
+ object.__setattr__(m, "__pydantic_extra__", _extra)
230
+ object.__setattr__(m, "__pydantic_fields_set__", _fields_set)
231
+ else:
232
+ # init_private_attributes() does not exist in v2
233
+ m._init_private_attributes() # type: ignore
234
+
235
+ # copied from Pydantic v1's `construct()` method
236
+ object.__setattr__(m, "__fields_set__", _fields_set)
237
+
238
+ return m
239
+
240
+ if not TYPE_CHECKING:
241
+ # type checkers incorrectly complain about this assignment
242
+ # because the type signatures are technically different
243
+ # although not in practice
244
+ model_construct = construct
245
+
246
+ if not PYDANTIC_V2:
247
+ # we define aliases for some of the new pydantic v2 methods so
248
+ # that we can just document these methods without having to specify
249
+ # a specific pydantic version as some users may not know which
250
+ # pydantic version they are currently using
251
+
252
+ @override
253
+ def model_dump(
254
+ self,
255
+ *,
256
+ mode: Literal["json", "python"] | str = "python",
257
+ include: IncEx | None = None,
258
+ exclude: IncEx | None = None,
259
+ by_alias: bool = False,
260
+ exclude_unset: bool = False,
261
+ exclude_defaults: bool = False,
262
+ exclude_none: bool = False,
263
+ round_trip: bool = False,
264
+ warnings: bool | Literal["none", "warn", "error"] = True,
265
+ context: dict[str, Any] | None = None,
266
+ serialize_as_any: bool = False,
267
+ ) -> dict[str, Any]:
268
+ """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump
269
+
270
+ Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
271
+
272
+ Args:
273
+ mode: The mode in which `to_python` should run.
274
+ If mode is 'json', the dictionary will only contain JSON serializable types.
275
+ If mode is 'python', the dictionary may contain any Python objects.
276
+ include: A list of fields to include in the output.
277
+ exclude: A list of fields to exclude from the output.
278
+ by_alias: Whether to use the field's alias in the dictionary key if defined.
279
+ exclude_unset: Whether to exclude fields that are unset or None from the output.
280
+ exclude_defaults: Whether to exclude fields that are set to their default value from the output.
281
+ exclude_none: Whether to exclude fields that have a value of `None` from the output.
282
+ round_trip: Whether to enable serialization and deserialization round-trip support.
283
+ warnings: Whether to log warnings when invalid fields are encountered.
284
+
285
+ Returns:
286
+ A dictionary representation of the model.
287
+ """
288
+ if mode not in {"json", "python"}:
289
+ raise ValueError("mode must be either 'json' or 'python'")
290
+ if round_trip != False:
291
+ raise ValueError("round_trip is only supported in Pydantic v2")
292
+ if warnings != True:
293
+ raise ValueError("warnings is only supported in Pydantic v2")
294
+ if context is not None:
295
+ raise ValueError("context is only supported in Pydantic v2")
296
+ if serialize_as_any != False:
297
+ raise ValueError("serialize_as_any is only supported in Pydantic v2")
298
+ dumped = super().dict( # pyright: ignore[reportDeprecated]
299
+ include=include,
300
+ exclude=exclude,
301
+ by_alias=by_alias,
302
+ exclude_unset=exclude_unset,
303
+ exclude_defaults=exclude_defaults,
304
+ exclude_none=exclude_none,
305
+ )
306
+
307
+ return cast(dict[str, Any], json_safe(dumped)) if mode == "json" else dumped
308
+
309
+ @override
310
+ def model_dump_json(
311
+ self,
312
+ *,
313
+ indent: int | None = None,
314
+ include: IncEx | None = None,
315
+ exclude: IncEx | None = None,
316
+ by_alias: bool = False,
317
+ exclude_unset: bool = False,
318
+ exclude_defaults: bool = False,
319
+ exclude_none: bool = False,
320
+ round_trip: bool = False,
321
+ warnings: bool | Literal["none", "warn", "error"] = True,
322
+ context: dict[str, Any] | None = None,
323
+ serialize_as_any: bool = False,
324
+ ) -> str:
325
+ """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump_json
326
+
327
+ Generates a JSON representation of the model using Pydantic's `to_json` method.
328
+
329
+ Args:
330
+ indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
331
+ include: Field(s) to include in the JSON output. Can take either a string or set of strings.
332
+ exclude: Field(s) to exclude from the JSON output. Can take either a string or set of strings.
333
+ by_alias: Whether to serialize using field aliases.
334
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
335
+ exclude_defaults: Whether to exclude fields that have the default value.
336
+ exclude_none: Whether to exclude fields that have a value of `None`.
337
+ round_trip: Whether to use serialization/deserialization between JSON and class instance.
338
+ warnings: Whether to show any warnings that occurred during serialization.
339
+
340
+ Returns:
341
+ A JSON string representation of the model.
342
+ """
343
+ if round_trip != False:
344
+ raise ValueError("round_trip is only supported in Pydantic v2")
345
+ if warnings != True:
346
+ raise ValueError("warnings is only supported in Pydantic v2")
347
+ if context is not None:
348
+ raise ValueError("context is only supported in Pydantic v2")
349
+ if serialize_as_any != False:
350
+ raise ValueError("serialize_as_any is only supported in Pydantic v2")
351
+ return super().json( # type: ignore[reportDeprecated]
352
+ indent=indent,
353
+ include=include,
354
+ exclude=exclude,
355
+ by_alias=by_alias,
356
+ exclude_unset=exclude_unset,
357
+ exclude_defaults=exclude_defaults,
358
+ exclude_none=exclude_none,
359
+ )
360
+
361
+
362
+ def _construct_field(value: object, field: FieldInfo, key: str) -> object:
363
+ if value is None:
364
+ return field_get_default(field)
365
+
366
+ if PYDANTIC_V2:
367
+ type_ = field.annotation
368
+ else:
369
+ type_ = cast(type, field.outer_type_) # type: ignore
370
+
371
+ if type_ is None:
372
+ raise RuntimeError(f"Unexpected field type is None for {key}")
373
+
374
+ return construct_type(value=value, type_=type_, metadata=getattr(field, "metadata", None))
375
+
376
+
377
+ def _get_extra_fields_type(cls: type[pydantic.BaseModel]) -> type | None:
378
+ if not PYDANTIC_V2:
379
+ # TODO
380
+ return None
381
+
382
+ schema = cls.__pydantic_core_schema__
383
+ if schema["type"] == "model":
384
+ fields = schema["schema"]
385
+ if fields["type"] == "model-fields":
386
+ extras = fields.get("extras_schema")
387
+ if extras and "cls" in extras:
388
+ # mypy can't narrow the type
389
+ return extras["cls"] # type: ignore[no-any-return]
390
+
391
+ return None
392
+
393
+
394
+ def is_basemodel(type_: type) -> bool:
395
+ """Returns whether or not the given type is either a `BaseModel` or a union of `BaseModel`"""
396
+ if is_union(type_):
397
+ for variant in get_args(type_):
398
+ if is_basemodel(variant):
399
+ return True
400
+
401
+ return False
402
+
403
+ return is_basemodel_type(type_)
404
+
405
+
406
+ def is_basemodel_type(type_: type) -> TypeGuard[type[BaseModel] | type[GenericModel]]:
407
+ origin = get_origin(type_) or type_
408
+ if not inspect.isclass(origin):
409
+ return False
410
+ return issubclass(origin, BaseModel) or issubclass(origin, GenericModel)
411
+
412
+
413
+ def build(
414
+ base_model_cls: Callable[P, _BaseModelT],
415
+ *args: P.args,
416
+ **kwargs: P.kwargs,
417
+ ) -> _BaseModelT:
418
+ """Construct a BaseModel class without validation.
419
+
420
+ This is useful for cases where you need to instantiate a `BaseModel`
421
+ from an API response as this provides type-safe params which isn't supported
422
+ by helpers like `construct_type()`.
423
+
424
+ ```py
425
+ build(MyModel, my_field_a="foo", my_field_b=123)
426
+ ```
427
+ """
428
+ if args:
429
+ raise TypeError(
430
+ "Received positional arguments which are not supported; Keyword arguments must be used instead",
431
+ )
432
+
433
+ return cast(_BaseModelT, construct_type(type_=base_model_cls, value=kwargs))
434
+
435
+
436
+ def construct_type_unchecked(*, value: object, type_: type[_T]) -> _T:
437
+ """Loose coercion to the expected type with construction of nested values.
438
+
439
+ Note: the returned value from this function is not guaranteed to match the
440
+ given type.
441
+ """
442
+ return cast(_T, construct_type(value=value, type_=type_))
443
+
444
+
445
+ def construct_type(*, value: object, type_: object, metadata: Optional[List[Any]] = None) -> object:
446
+ """Loose coercion to the expected type with construction of nested values.
447
+
448
+ If the given value does not match the expected type then it is returned as-is.
449
+ """
450
+
451
+ # store a reference to the original type we were given before we extract any inner
452
+ # types so that we can properly resolve forward references in `TypeAliasType` annotations
453
+ original_type = None
454
+
455
+ # we allow `object` as the input type because otherwise, passing things like
456
+ # `Literal['value']` will be reported as a type error by type checkers
457
+ type_ = cast("type[object]", type_)
458
+ if is_type_alias_type(type_):
459
+ original_type = type_ # type: ignore[unreachable]
460
+ type_ = type_.__value__ # type: ignore[unreachable]
461
+
462
+ # unwrap `Annotated[T, ...]` -> `T`
463
+ if metadata is not None and len(metadata) > 0:
464
+ meta: tuple[Any, ...] = tuple(metadata)
465
+ elif is_annotated_type(type_):
466
+ meta = get_args(type_)[1:]
467
+ type_ = extract_type_arg(type_, 0)
468
+ else:
469
+ meta = tuple()
470
+
471
+ # we need to use the origin class for any types that are subscripted generics
472
+ # e.g. Dict[str, object]
473
+ origin = get_origin(type_) or type_
474
+ args = get_args(type_)
475
+
476
+ if is_union(origin):
477
+ try:
478
+ return validate_type(type_=cast("type[object]", original_type or type_), value=value)
479
+ except Exception:
480
+ pass
481
+
482
+ # if the type is a discriminated union then we want to construct the right variant
483
+ # in the union, even if the data doesn't match exactly, otherwise we'd break code
484
+ # that relies on the constructed class types, e.g.
485
+ #
486
+ # class FooType:
487
+ # kind: Literal['foo']
488
+ # value: str
489
+ #
490
+ # class BarType:
491
+ # kind: Literal['bar']
492
+ # value: int
493
+ #
494
+ # without this block, if the data we get is something like `{'kind': 'bar', 'value': 'foo'}` then
495
+ # we'd end up constructing `FooType` when it should be `BarType`.
496
+ discriminator = _build_discriminated_union_meta(union=type_, meta_annotations=meta)
497
+ if discriminator and is_mapping(value):
498
+ variant_value = value.get(discriminator.field_alias_from or discriminator.field_name)
499
+ if variant_value and isinstance(variant_value, str):
500
+ variant_type = discriminator.mapping.get(variant_value)
501
+ if variant_type:
502
+ return construct_type(type_=variant_type, value=value)
503
+
504
+ # if the data is not valid, use the first variant that doesn't fail while deserializing
505
+ for variant in args:
506
+ try:
507
+ return construct_type(value=value, type_=variant)
508
+ except Exception:
509
+ continue
510
+
511
+ raise RuntimeError(f"Could not convert data into a valid instance of {type_}")
512
+
513
+ if origin == dict:
514
+ if not is_mapping(value):
515
+ return value
516
+
517
+ _, items_type = get_args(type_) # Dict[_, items_type]
518
+ return {key: construct_type(value=item, type_=items_type) for key, item in value.items()}
519
+
520
+ if (
521
+ not is_literal_type(type_)
522
+ and inspect.isclass(origin)
523
+ and (issubclass(origin, BaseModel) or issubclass(origin, GenericModel))
524
+ ):
525
+ if is_list(value):
526
+ return [cast(Any, type_).construct(**entry) if is_mapping(entry) else entry for entry in value]
527
+
528
+ if is_mapping(value):
529
+ if issubclass(type_, BaseModel):
530
+ return type_.construct(**value) # type: ignore[arg-type]
531
+
532
+ return cast(Any, type_).construct(**value)
533
+
534
+ if origin == list:
535
+ if not is_list(value):
536
+ return value
537
+
538
+ inner_type = args[0] # List[inner_type]
539
+ return [construct_type(value=entry, type_=inner_type) for entry in value]
540
+
541
+ if origin == float:
542
+ if isinstance(value, int):
543
+ coerced = float(value)
544
+ if coerced != value:
545
+ return value
546
+ return coerced
547
+
548
+ return value
549
+
550
+ if type_ == datetime:
551
+ try:
552
+ return parse_datetime(value) # type: ignore
553
+ except Exception:
554
+ return value
555
+
556
+ if type_ == date:
557
+ try:
558
+ return parse_date(value) # type: ignore
559
+ except Exception:
560
+ return value
561
+
562
+ return value
563
+
564
+
565
+ @runtime_checkable
566
+ class CachedDiscriminatorType(Protocol):
567
+ __discriminator__: DiscriminatorDetails
568
+
569
+
570
+ class DiscriminatorDetails:
571
+ field_name: str
572
+ """The name of the discriminator field in the variant class, e.g.
573
+
574
+ ```py
575
+ class Foo(BaseModel):
576
+ type: Literal['foo']
577
+ ```
578
+
579
+ Will result in field_name='type'
580
+ """
581
+
582
+ field_alias_from: str | None
583
+ """The name of the discriminator field in the API response, e.g.
584
+
585
+ ```py
586
+ class Foo(BaseModel):
587
+ type: Literal['foo'] = Field(alias='type_from_api')
588
+ ```
589
+
590
+ Will result in field_alias_from='type_from_api'
591
+ """
592
+
593
+ mapping: dict[str, type]
594
+ """Mapping of discriminator value to variant type, e.g.
595
+
596
+ {'foo': FooVariant, 'bar': BarVariant}
597
+ """
598
+
599
+ def __init__(
600
+ self,
601
+ *,
602
+ mapping: dict[str, type],
603
+ discriminator_field: str,
604
+ discriminator_alias: str | None,
605
+ ) -> None:
606
+ self.mapping = mapping
607
+ self.field_name = discriminator_field
608
+ self.field_alias_from = discriminator_alias
609
+
610
+
611
+ def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any, ...]) -> DiscriminatorDetails | None:
612
+ if isinstance(union, CachedDiscriminatorType):
613
+ return union.__discriminator__
614
+
615
+ discriminator_field_name: str | None = None
616
+
617
+ for annotation in meta_annotations:
618
+ if isinstance(annotation, PropertyInfo) and annotation.discriminator is not None:
619
+ discriminator_field_name = annotation.discriminator
620
+ break
621
+
622
+ if not discriminator_field_name:
623
+ return None
624
+
625
+ mapping: dict[str, type] = {}
626
+ discriminator_alias: str | None = None
627
+
628
+ for variant in get_args(union):
629
+ variant = strip_annotated_type(variant)
630
+ if is_basemodel_type(variant):
631
+ if PYDANTIC_V2:
632
+ field = _extract_field_schema_pv2(variant, discriminator_field_name)
633
+ if not field:
634
+ continue
635
+
636
+ # Note: if one variant defines an alias then they all should
637
+ discriminator_alias = field.get("serialization_alias")
638
+
639
+ field_schema = field["schema"]
640
+
641
+ if field_schema["type"] == "literal":
642
+ for entry in cast("LiteralSchema", field_schema)["expected"]:
643
+ if isinstance(entry, str):
644
+ mapping[entry] = variant
645
+ else:
646
+ field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
647
+ if not field_info:
648
+ continue
649
+
650
+ # Note: if one variant defines an alias then they all should
651
+ discriminator_alias = field_info.alias
652
+
653
+ if (annotation := getattr(field_info, "annotation", None)) and is_literal_type(annotation):
654
+ for entry in get_args(annotation):
655
+ if isinstance(entry, str):
656
+ mapping[entry] = variant
657
+
658
+ if not mapping:
659
+ return None
660
+
661
+ details = DiscriminatorDetails(
662
+ mapping=mapping,
663
+ discriminator_field=discriminator_field_name,
664
+ discriminator_alias=discriminator_alias,
665
+ )
666
+ cast(CachedDiscriminatorType, union).__discriminator__ = details
667
+ return details
668
+
669
+
670
+ def _extract_field_schema_pv2(model: type[BaseModel], field_name: str) -> ModelField | None:
671
+ schema = model.__pydantic_core_schema__
672
+ if schema["type"] == "definitions":
673
+ schema = schema["schema"]
674
+
675
+ if schema["type"] != "model":
676
+ return None
677
+
678
+ schema = cast("ModelSchema", schema)
679
+ fields_schema = schema["schema"]
680
+ if fields_schema["type"] != "model-fields":
681
+ return None
682
+
683
+ fields_schema = cast("ModelFieldsSchema", fields_schema)
684
+ field = fields_schema["fields"].get(field_name)
685
+ if not field:
686
+ return None
687
+
688
+ return cast("ModelField", field) # pyright: ignore[reportUnnecessaryCast]
689
+
690
+
691
+ def validate_type(*, type_: type[_T], value: object) -> _T:
692
+ """Strict validation that the given value matches the expected type"""
693
+ if inspect.isclass(type_) and issubclass(type_, pydantic.BaseModel):
694
+ return cast(_T, parse_obj(type_, value))
695
+
696
+ return cast(_T, _validate_non_model_type(type_=type_, value=value))
697
+
698
+
699
+ def set_pydantic_config(typ: Any, config: pydantic.ConfigDict) -> None:
700
+ """Add a pydantic config for the given type.
701
+
702
+ Note: this is a no-op on Pydantic v1.
703
+ """
704
+ setattr(typ, "__pydantic_config__", config) # noqa: B010
705
+
706
+
707
+ # our use of subclassing here causes weirdness for type checkers,
708
+ # so we just pretend that we don't subclass
709
+ if TYPE_CHECKING:
710
+ GenericModel = BaseModel
711
+ else:
712
+
713
+ class GenericModel(BaseGenericModel, BaseModel):
714
+ pass
715
+
716
+
717
+ if PYDANTIC_V2:
718
+ from pydantic import TypeAdapter as _TypeAdapter
719
+
720
+ _CachedTypeAdapter = cast("TypeAdapter[object]", lru_cache(maxsize=None)(_TypeAdapter))
721
+
722
+ if TYPE_CHECKING:
723
+ from pydantic import TypeAdapter
724
+ else:
725
+ TypeAdapter = _CachedTypeAdapter
726
+
727
+ def _validate_non_model_type(*, type_: type[_T], value: object) -> _T:
728
+ return TypeAdapter(type_).validate_python(value)
729
+
730
+ elif not TYPE_CHECKING: # TODO: condition is weird
731
+
732
+ class RootModel(GenericModel, Generic[_T]):
733
+ """Used as a placeholder to easily convert runtime types to a Pydantic format
734
+ to provide validation.
735
+
736
+ For example:
737
+ ```py
738
+ validated = RootModel[int](__root__="5").__root__
739
+ # validated: 5
740
+ ```
741
+ """
742
+
743
+ __root__: _T
744
+
745
+ def _validate_non_model_type(*, type_: type[_T], value: object) -> _T:
746
+ model = _create_pydantic_model(type_).validate(value)
747
+ return cast(_T, model.__root__)
748
+
749
+ def _create_pydantic_model(type_: _T) -> Type[RootModel[_T]]:
750
+ return RootModel[type_] # type: ignore
751
+
752
+
753
+ class FinalRequestOptionsInput(TypedDict, total=False):
754
+ method: Required[str]
755
+ url: Required[str]
756
+ params: Query
757
+ headers: Headers
758
+ max_retries: int
759
+ timeout: float | Timeout | None
760
+ files: HttpxRequestFiles | None
761
+ idempotency_key: str
762
+ json_data: Body
763
+ extra_json: AnyMapping
764
+ follow_redirects: bool
765
+
766
+
767
+ @final
768
+ class FinalRequestOptions(pydantic.BaseModel):
769
+ method: str
770
+ url: str
771
+ params: Query = {}
772
+ headers: Union[Headers, NotGiven] = NotGiven()
773
+ max_retries: Union[int, NotGiven] = NotGiven()
774
+ timeout: Union[float, Timeout, None, NotGiven] = NotGiven()
775
+ files: Union[HttpxRequestFiles, None] = None
776
+ idempotency_key: Union[str, None] = None
777
+ post_parser: Union[Callable[[Any], Any], NotGiven] = NotGiven()
778
+ follow_redirects: Union[bool, None] = None
779
+
780
+ # It should be noted that we cannot use `json` here as that would override
781
+ # a BaseModel method in an incompatible fashion.
782
+ json_data: Union[Body, None] = None
783
+ extra_json: Union[AnyMapping, None] = None
784
+
785
+ if PYDANTIC_V2:
786
+ model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True)
787
+ else:
788
+
789
+ class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated]
790
+ arbitrary_types_allowed: bool = True
791
+
792
+ def get_max_retries(self, max_retries: int) -> int:
793
+ if isinstance(self.max_retries, NotGiven):
794
+ return max_retries
795
+ return self.max_retries
796
+
797
+ def _strip_raw_response_header(self) -> None:
798
+ if not is_given(self.headers):
799
+ return
800
+
801
+ if self.headers.get(RAW_RESPONSE_HEADER):
802
+ self.headers = {**self.headers}
803
+ self.headers.pop(RAW_RESPONSE_HEADER)
804
+
805
+ # override the `construct` method so that we can run custom transformations.
806
+ # this is necessary as we don't want to do any actual runtime type checking
807
+ # (which means we can't use validators) but we do want to ensure that `NotGiven`
808
+ # values are not present
809
+ #
810
+ # type ignore required because we're adding explicit types to `**values`
811
+ @classmethod
812
+ def construct( # type: ignore
813
+ cls,
814
+ _fields_set: set[str] | None = None,
815
+ **values: Unpack[FinalRequestOptionsInput],
816
+ ) -> FinalRequestOptions:
817
+ kwargs: dict[str, Any] = {
818
+ # we unconditionally call `strip_not_given` on any value
819
+ # as it will just ignore any non-mapping types
820
+ key: strip_not_given(value)
821
+ for key, value in values.items()
822
+ }
823
+ if PYDANTIC_V2:
824
+ return super().model_construct(_fields_set, **kwargs)
825
+ return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated]
826
+
827
+ if not TYPE_CHECKING:
828
+ # type checkers incorrectly complain about this assignment
829
+ model_construct = construct