supermemory 3.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. supermemory/__init__.py +102 -0
  2. supermemory/_base_client.py +1995 -0
  3. supermemory/_client.py +683 -0
  4. supermemory/_compat.py +219 -0
  5. supermemory/_constants.py +14 -0
  6. supermemory/_exceptions.py +108 -0
  7. supermemory/_files.py +123 -0
  8. supermemory/_models.py +857 -0
  9. supermemory/_qs.py +150 -0
  10. supermemory/_resource.py +43 -0
  11. supermemory/_response.py +832 -0
  12. supermemory/_streaming.py +331 -0
  13. supermemory/_types.py +260 -0
  14. supermemory/_utils/__init__.py +64 -0
  15. supermemory/_utils/_compat.py +45 -0
  16. supermemory/_utils/_datetime_parse.py +136 -0
  17. supermemory/_utils/_logs.py +25 -0
  18. supermemory/_utils/_proxy.py +65 -0
  19. supermemory/_utils/_reflection.py +42 -0
  20. supermemory/_utils/_resources_proxy.py +24 -0
  21. supermemory/_utils/_streams.py +12 -0
  22. supermemory/_utils/_sync.py +58 -0
  23. supermemory/_utils/_transform.py +457 -0
  24. supermemory/_utils/_typing.py +156 -0
  25. supermemory/_utils/_utils.py +421 -0
  26. supermemory/_version.py +4 -0
  27. supermemory/lib/.keep +4 -0
  28. supermemory/py.typed +0 -0
  29. supermemory/resources/__init__.py +75 -0
  30. supermemory/resources/connections.py +807 -0
  31. supermemory/resources/documents.py +830 -0
  32. supermemory/resources/memories.py +830 -0
  33. supermemory/resources/search.py +657 -0
  34. supermemory/resources/settings.py +278 -0
  35. supermemory/types/__init__.py +51 -0
  36. supermemory/types/add_response.py +13 -0
  37. supermemory/types/client_add_params.py +36 -0
  38. supermemory/types/client_profile_params.py +21 -0
  39. supermemory/types/connection_create_params.py +21 -0
  40. supermemory/types/connection_create_response.py +19 -0
  41. supermemory/types/connection_delete_by_id_response.py +11 -0
  42. supermemory/types/connection_delete_by_provider_params.py +15 -0
  43. supermemory/types/connection_delete_by_provider_response.py +11 -0
  44. supermemory/types/connection_get_by_id_response.py +25 -0
  45. supermemory/types/connection_get_by_tags_params.py +15 -0
  46. supermemory/types/connection_get_by_tags_response.py +25 -0
  47. supermemory/types/connection_import_params.py +15 -0
  48. supermemory/types/connection_import_response.py +7 -0
  49. supermemory/types/connection_list_documents_params.py +15 -0
  50. supermemory/types/connection_list_documents_response.py +29 -0
  51. supermemory/types/connection_list_params.py +15 -0
  52. supermemory/types/connection_list_response.py +29 -0
  53. supermemory/types/document_add_params.py +36 -0
  54. supermemory/types/document_add_response.py +13 -0
  55. supermemory/types/document_get_response.py +103 -0
  56. supermemory/types/document_list_params.py +52 -0
  57. supermemory/types/document_list_response.py +94 -0
  58. supermemory/types/document_update_params.py +55 -0
  59. supermemory/types/document_update_response.py +13 -0
  60. supermemory/types/document_upload_file_params.py +44 -0
  61. supermemory/types/document_upload_file_response.py +13 -0
  62. supermemory/types/memory_add_params.py +36 -0
  63. supermemory/types/memory_add_response.py +13 -0
  64. supermemory/types/memory_get_response.py +103 -0
  65. supermemory/types/memory_list_params.py +52 -0
  66. supermemory/types/memory_list_response.py +94 -0
  67. supermemory/types/memory_update_params.py +55 -0
  68. supermemory/types/memory_update_response.py +13 -0
  69. supermemory/types/memory_upload_file_params.py +44 -0
  70. supermemory/types/memory_upload_file_response.py +13 -0
  71. supermemory/types/profile_response.py +35 -0
  72. supermemory/types/search_documents_params.py +93 -0
  73. supermemory/types/search_documents_response.py +60 -0
  74. supermemory/types/search_execute_params.py +93 -0
  75. supermemory/types/search_execute_response.py +60 -0
  76. supermemory/types/search_memories_params.py +75 -0
  77. supermemory/types/search_memories_response.py +123 -0
  78. supermemory/types/setting_get_response.py +43 -0
  79. supermemory/types/setting_update_params.py +44 -0
  80. supermemory/types/setting_update_response.py +51 -0
  81. supermemory-3.7.0.dist-info/METADATA +447 -0
  82. supermemory-3.7.0.dist-info/RECORD +84 -0
  83. supermemory-3.7.0.dist-info/WHEEL +4 -0
  84. supermemory-3.7.0.dist-info/licenses/LICENSE +201 -0
supermemory/_models.py ADDED
@@ -0,0 +1,857 @@
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ import inspect
5
+ import weakref
6
+ from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, Optional, cast
7
+ from datetime import date, datetime
8
+ from typing_extensions import (
9
+ List,
10
+ Unpack,
11
+ Literal,
12
+ ClassVar,
13
+ Protocol,
14
+ Required,
15
+ ParamSpec,
16
+ TypedDict,
17
+ TypeGuard,
18
+ final,
19
+ override,
20
+ runtime_checkable,
21
+ )
22
+
23
+ import pydantic
24
+ from pydantic.fields import FieldInfo
25
+
26
+ from ._types import (
27
+ Body,
28
+ IncEx,
29
+ Query,
30
+ ModelT,
31
+ Headers,
32
+ Timeout,
33
+ NotGiven,
34
+ AnyMapping,
35
+ HttpxRequestFiles,
36
+ )
37
+ from ._utils import (
38
+ PropertyInfo,
39
+ is_list,
40
+ is_given,
41
+ json_safe,
42
+ lru_cache,
43
+ is_mapping,
44
+ parse_date,
45
+ coerce_boolean,
46
+ parse_datetime,
47
+ strip_not_given,
48
+ extract_type_arg,
49
+ is_annotated_type,
50
+ is_type_alias_type,
51
+ strip_annotated_type,
52
+ )
53
+ from ._compat import (
54
+ PYDANTIC_V1,
55
+ ConfigDict,
56
+ GenericModel as BaseGenericModel,
57
+ get_args,
58
+ is_union,
59
+ parse_obj,
60
+ get_origin,
61
+ is_literal_type,
62
+ get_model_config,
63
+ get_model_fields,
64
+ field_get_default,
65
+ )
66
+ from ._constants import RAW_RESPONSE_HEADER
67
+
68
+ if TYPE_CHECKING:
69
+ from pydantic_core.core_schema import ModelField, ModelSchema, LiteralSchema, ModelFieldsSchema
70
+
71
+ __all__ = ["BaseModel", "GenericModel"]
72
+
73
+ _T = TypeVar("_T")
74
+ _BaseModelT = TypeVar("_BaseModelT", bound="BaseModel")
75
+
76
+ P = ParamSpec("P")
77
+
78
+
79
+ @runtime_checkable
80
+ class _ConfigProtocol(Protocol):
81
+ allow_population_by_field_name: bool
82
+
83
+
84
+ class BaseModel(pydantic.BaseModel):
85
+ if PYDANTIC_V1:
86
+
87
+ @property
88
+ @override
89
+ def model_fields_set(self) -> set[str]:
90
+ # a forwards-compat shim for pydantic v2
91
+ return self.__fields_set__ # type: ignore
92
+
93
+ class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated]
94
+ extra: Any = pydantic.Extra.allow # type: ignore
95
+ else:
96
+ model_config: ClassVar[ConfigDict] = ConfigDict(
97
+ extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true"))
98
+ )
99
+
100
+ def to_dict(
101
+ self,
102
+ *,
103
+ mode: Literal["json", "python"] = "python",
104
+ use_api_names: bool = True,
105
+ exclude_unset: bool = True,
106
+ exclude_defaults: bool = False,
107
+ exclude_none: bool = False,
108
+ warnings: bool = True,
109
+ ) -> dict[str, object]:
110
+ """Recursively generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
111
+
112
+ By default, fields that were not set by the API will not be included,
113
+ and keys will match the API response, *not* the property names from the model.
114
+
115
+ For example, if the API responds with `"fooBar": true` but we've defined a `foo_bar: bool` property,
116
+ the output will use the `"fooBar"` key (unless `use_api_names=False` is passed).
117
+
118
+ Args:
119
+ mode:
120
+ If mode is 'json', the dictionary will only contain JSON serializable types. e.g. `datetime` will be turned into a string, `"2024-3-22T18:11:19.117000Z"`.
121
+ If mode is 'python', the dictionary may contain any Python objects. e.g. `datetime(2024, 3, 22)`
122
+
123
+ use_api_names: Whether to use the key that the API responded with or the property name. Defaults to `True`.
124
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
125
+ exclude_defaults: Whether to exclude fields that are set to their default value from the output.
126
+ exclude_none: Whether to exclude fields that have a value of `None` from the output.
127
+ warnings: Whether to log warnings when invalid fields are encountered. This is only supported in Pydantic v2.
128
+ """
129
+ return self.model_dump(
130
+ mode=mode,
131
+ by_alias=use_api_names,
132
+ exclude_unset=exclude_unset,
133
+ exclude_defaults=exclude_defaults,
134
+ exclude_none=exclude_none,
135
+ warnings=warnings,
136
+ )
137
+
138
+ def to_json(
139
+ self,
140
+ *,
141
+ indent: int | None = 2,
142
+ use_api_names: bool = True,
143
+ exclude_unset: bool = True,
144
+ exclude_defaults: bool = False,
145
+ exclude_none: bool = False,
146
+ warnings: bool = True,
147
+ ) -> str:
148
+ """Generates a JSON string representing this model as it would be received from or sent to the API (but with indentation).
149
+
150
+ By default, fields that were not set by the API will not be included,
151
+ and keys will match the API response, *not* the property names from the model.
152
+
153
+ For example, if the API responds with `"fooBar": true` but we've defined a `foo_bar: bool` property,
154
+ the output will use the `"fooBar"` key (unless `use_api_names=False` is passed).
155
+
156
+ Args:
157
+ indent: Indentation to use in the JSON output. If `None` is passed, the output will be compact. Defaults to `2`
158
+ use_api_names: Whether to use the key that the API responded with or the property name. Defaults to `True`.
159
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
160
+ exclude_defaults: Whether to exclude fields that have the default value.
161
+ exclude_none: Whether to exclude fields that have a value of `None`.
162
+ warnings: Whether to show any warnings that occurred during serialization. This is only supported in Pydantic v2.
163
+ """
164
+ return self.model_dump_json(
165
+ indent=indent,
166
+ by_alias=use_api_names,
167
+ exclude_unset=exclude_unset,
168
+ exclude_defaults=exclude_defaults,
169
+ exclude_none=exclude_none,
170
+ warnings=warnings,
171
+ )
172
+
173
+ @override
174
+ def __str__(self) -> str:
175
+ # mypy complains about an invalid self arg
176
+ return f"{self.__repr_name__()}({self.__repr_str__(', ')})" # type: ignore[misc]
177
+
178
+ # Override the 'construct' method in a way that supports recursive parsing without validation.
179
+ # Based on https://github.com/samuelcolvin/pydantic/issues/1168#issuecomment-817742836.
180
+ @classmethod
181
+ @override
182
+ def construct( # pyright: ignore[reportIncompatibleMethodOverride]
183
+ __cls: Type[ModelT],
184
+ _fields_set: set[str] | None = None,
185
+ **values: object,
186
+ ) -> ModelT:
187
+ m = __cls.__new__(__cls)
188
+ fields_values: dict[str, object] = {}
189
+
190
+ config = get_model_config(__cls)
191
+ populate_by_name = (
192
+ config.allow_population_by_field_name
193
+ if isinstance(config, _ConfigProtocol)
194
+ else config.get("populate_by_name")
195
+ )
196
+
197
+ if _fields_set is None:
198
+ _fields_set = set()
199
+
200
+ model_fields = get_model_fields(__cls)
201
+ for name, field in model_fields.items():
202
+ key = field.alias
203
+ if key is None or (key not in values and populate_by_name):
204
+ key = name
205
+
206
+ if key in values:
207
+ fields_values[name] = _construct_field(value=values[key], field=field, key=key)
208
+ _fields_set.add(name)
209
+ else:
210
+ fields_values[name] = field_get_default(field)
211
+
212
+ extra_field_type = _get_extra_fields_type(__cls)
213
+
214
+ _extra = {}
215
+ for key, value in values.items():
216
+ if key not in model_fields:
217
+ parsed = construct_type(value=value, type_=extra_field_type) if extra_field_type is not None else value
218
+
219
+ if PYDANTIC_V1:
220
+ _fields_set.add(key)
221
+ fields_values[key] = parsed
222
+ else:
223
+ _extra[key] = parsed
224
+
225
+ object.__setattr__(m, "__dict__", fields_values)
226
+
227
+ if PYDANTIC_V1:
228
+ # init_private_attributes() does not exist in v2
229
+ m._init_private_attributes() # type: ignore
230
+
231
+ # copied from Pydantic v1's `construct()` method
232
+ object.__setattr__(m, "__fields_set__", _fields_set)
233
+ else:
234
+ # these properties are copied from Pydantic's `model_construct()` method
235
+ object.__setattr__(m, "__pydantic_private__", None)
236
+ object.__setattr__(m, "__pydantic_extra__", _extra)
237
+ object.__setattr__(m, "__pydantic_fields_set__", _fields_set)
238
+
239
+ return m
240
+
241
+ if not TYPE_CHECKING:
242
+ # type checkers incorrectly complain about this assignment
243
+ # because the type signatures are technically different
244
+ # although not in practice
245
+ model_construct = construct
246
+
247
+ if PYDANTIC_V1:
248
+ # we define aliases for some of the new pydantic v2 methods so
249
+ # that we can just document these methods without having to specify
250
+ # a specific pydantic version as some users may not know which
251
+ # pydantic version they are currently using
252
+
253
+ @override
254
+ def model_dump(
255
+ self,
256
+ *,
257
+ mode: Literal["json", "python"] | str = "python",
258
+ include: IncEx | None = None,
259
+ exclude: IncEx | None = None,
260
+ context: Any | None = None,
261
+ by_alias: bool | None = None,
262
+ exclude_unset: bool = False,
263
+ exclude_defaults: bool = False,
264
+ exclude_none: bool = False,
265
+ exclude_computed_fields: bool = False,
266
+ round_trip: bool = False,
267
+ warnings: bool | Literal["none", "warn", "error"] = True,
268
+ fallback: Callable[[Any], Any] | None = None,
269
+ serialize_as_any: bool = False,
270
+ ) -> dict[str, Any]:
271
+ """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump
272
+
273
+ Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
274
+
275
+ Args:
276
+ mode: The mode in which `to_python` should run.
277
+ If mode is 'json', the output will only contain JSON serializable types.
278
+ If mode is 'python', the output may contain non-JSON-serializable Python objects.
279
+ include: A set of fields to include in the output.
280
+ exclude: A set of fields to exclude from the output.
281
+ context: Additional context to pass to the serializer.
282
+ by_alias: Whether to use the field's alias in the dictionary key if defined.
283
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
284
+ exclude_defaults: Whether to exclude fields that are set to their default value.
285
+ exclude_none: Whether to exclude fields that have a value of `None`.
286
+ exclude_computed_fields: Whether to exclude computed fields.
287
+ While this can be useful for round-tripping, it is usually recommended to use the dedicated
288
+ `round_trip` parameter instead.
289
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
290
+ warnings: How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
291
+ "error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError].
292
+ fallback: A function to call when an unknown value is encountered. If not provided,
293
+ a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised.
294
+ serialize_as_any: Whether to serialize fields with duck-typing serialization behavior.
295
+
296
+ Returns:
297
+ A dictionary representation of the model.
298
+ """
299
+ if mode not in {"json", "python"}:
300
+ raise ValueError("mode must be either 'json' or 'python'")
301
+ if round_trip != False:
302
+ raise ValueError("round_trip is only supported in Pydantic v2")
303
+ if warnings != True:
304
+ raise ValueError("warnings is only supported in Pydantic v2")
305
+ if context is not None:
306
+ raise ValueError("context is only supported in Pydantic v2")
307
+ if serialize_as_any != False:
308
+ raise ValueError("serialize_as_any is only supported in Pydantic v2")
309
+ if fallback is not None:
310
+ raise ValueError("fallback is only supported in Pydantic v2")
311
+ if exclude_computed_fields != False:
312
+ raise ValueError("exclude_computed_fields is only supported in Pydantic v2")
313
+ dumped = super().dict( # pyright: ignore[reportDeprecated]
314
+ include=include,
315
+ exclude=exclude,
316
+ by_alias=by_alias if by_alias is not None else False,
317
+ exclude_unset=exclude_unset,
318
+ exclude_defaults=exclude_defaults,
319
+ exclude_none=exclude_none,
320
+ )
321
+
322
+ return cast("dict[str, Any]", json_safe(dumped)) if mode == "json" else dumped
323
+
324
+ @override
325
+ def model_dump_json(
326
+ self,
327
+ *,
328
+ indent: int | None = None,
329
+ ensure_ascii: bool = False,
330
+ include: IncEx | None = None,
331
+ exclude: IncEx | None = None,
332
+ context: Any | None = None,
333
+ by_alias: bool | None = None,
334
+ exclude_unset: bool = False,
335
+ exclude_defaults: bool = False,
336
+ exclude_none: bool = False,
337
+ exclude_computed_fields: bool = False,
338
+ round_trip: bool = False,
339
+ warnings: bool | Literal["none", "warn", "error"] = True,
340
+ fallback: Callable[[Any], Any] | None = None,
341
+ serialize_as_any: bool = False,
342
+ ) -> str:
343
+ """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump_json
344
+
345
+ Generates a JSON representation of the model using Pydantic's `to_json` method.
346
+
347
+ Args:
348
+ indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
349
+ include: Field(s) to include in the JSON output. Can take either a string or set of strings.
350
+ exclude: Field(s) to exclude from the JSON output. Can take either a string or set of strings.
351
+ by_alias: Whether to serialize using field aliases.
352
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
353
+ exclude_defaults: Whether to exclude fields that have the default value.
354
+ exclude_none: Whether to exclude fields that have a value of `None`.
355
+ round_trip: Whether to use serialization/deserialization between JSON and class instance.
356
+ warnings: Whether to show any warnings that occurred during serialization.
357
+
358
+ Returns:
359
+ A JSON string representation of the model.
360
+ """
361
+ if round_trip != False:
362
+ raise ValueError("round_trip is only supported in Pydantic v2")
363
+ if warnings != True:
364
+ raise ValueError("warnings is only supported in Pydantic v2")
365
+ if context is not None:
366
+ raise ValueError("context is only supported in Pydantic v2")
367
+ if serialize_as_any != False:
368
+ raise ValueError("serialize_as_any is only supported in Pydantic v2")
369
+ if fallback is not None:
370
+ raise ValueError("fallback is only supported in Pydantic v2")
371
+ if ensure_ascii != False:
372
+ raise ValueError("ensure_ascii is only supported in Pydantic v2")
373
+ if exclude_computed_fields != False:
374
+ raise ValueError("exclude_computed_fields is only supported in Pydantic v2")
375
+ return super().json( # type: ignore[reportDeprecated]
376
+ indent=indent,
377
+ include=include,
378
+ exclude=exclude,
379
+ by_alias=by_alias if by_alias is not None else False,
380
+ exclude_unset=exclude_unset,
381
+ exclude_defaults=exclude_defaults,
382
+ exclude_none=exclude_none,
383
+ )
384
+
385
+
386
+ def _construct_field(value: object, field: FieldInfo, key: str) -> object:
387
+ if value is None:
388
+ return field_get_default(field)
389
+
390
+ if PYDANTIC_V1:
391
+ type_ = cast(type, field.outer_type_) # type: ignore
392
+ else:
393
+ type_ = field.annotation # type: ignore
394
+
395
+ if type_ is None:
396
+ raise RuntimeError(f"Unexpected field type is None for {key}")
397
+
398
+ return construct_type(value=value, type_=type_, metadata=getattr(field, "metadata", None))
399
+
400
+
401
+ def _get_extra_fields_type(cls: type[pydantic.BaseModel]) -> type | None:
402
+ if PYDANTIC_V1:
403
+ # TODO
404
+ return None
405
+
406
+ schema = cls.__pydantic_core_schema__
407
+ if schema["type"] == "model":
408
+ fields = schema["schema"]
409
+ if fields["type"] == "model-fields":
410
+ extras = fields.get("extras_schema")
411
+ if extras and "cls" in extras:
412
+ # mypy can't narrow the type
413
+ return extras["cls"] # type: ignore[no-any-return]
414
+
415
+ return None
416
+
417
+
418
+ def is_basemodel(type_: type) -> bool:
419
+ """Returns whether or not the given type is either a `BaseModel` or a union of `BaseModel`"""
420
+ if is_union(type_):
421
+ for variant in get_args(type_):
422
+ if is_basemodel(variant):
423
+ return True
424
+
425
+ return False
426
+
427
+ return is_basemodel_type(type_)
428
+
429
+
430
+ def is_basemodel_type(type_: type) -> TypeGuard[type[BaseModel] | type[GenericModel]]:
431
+ origin = get_origin(type_) or type_
432
+ if not inspect.isclass(origin):
433
+ return False
434
+ return issubclass(origin, BaseModel) or issubclass(origin, GenericModel)
435
+
436
+
437
+ def build(
438
+ base_model_cls: Callable[P, _BaseModelT],
439
+ *args: P.args,
440
+ **kwargs: P.kwargs,
441
+ ) -> _BaseModelT:
442
+ """Construct a BaseModel class without validation.
443
+
444
+ This is useful for cases where you need to instantiate a `BaseModel`
445
+ from an API response as this provides type-safe params which isn't supported
446
+ by helpers like `construct_type()`.
447
+
448
+ ```py
449
+ build(MyModel, my_field_a="foo", my_field_b=123)
450
+ ```
451
+ """
452
+ if args:
453
+ raise TypeError(
454
+ "Received positional arguments which are not supported; Keyword arguments must be used instead",
455
+ )
456
+
457
+ return cast(_BaseModelT, construct_type(type_=base_model_cls, value=kwargs))
458
+
459
+
460
+ def construct_type_unchecked(*, value: object, type_: type[_T]) -> _T:
461
+ """Loose coercion to the expected type with construction of nested values.
462
+
463
+ Note: the returned value from this function is not guaranteed to match the
464
+ given type.
465
+ """
466
+ return cast(_T, construct_type(value=value, type_=type_))
467
+
468
+
469
+ def construct_type(*, value: object, type_: object, metadata: Optional[List[Any]] = None) -> object:
470
+ """Loose coercion to the expected type with construction of nested values.
471
+
472
+ If the given value does not match the expected type then it is returned as-is.
473
+ """
474
+
475
+ # store a reference to the original type we were given before we extract any inner
476
+ # types so that we can properly resolve forward references in `TypeAliasType` annotations
477
+ original_type = None
478
+
479
+ # we allow `object` as the input type because otherwise, passing things like
480
+ # `Literal['value']` will be reported as a type error by type checkers
481
+ type_ = cast("type[object]", type_)
482
+ if is_type_alias_type(type_):
483
+ original_type = type_ # type: ignore[unreachable]
484
+ type_ = type_.__value__ # type: ignore[unreachable]
485
+
486
+ # unwrap `Annotated[T, ...]` -> `T`
487
+ if metadata is not None and len(metadata) > 0:
488
+ meta: tuple[Any, ...] = tuple(metadata)
489
+ elif is_annotated_type(type_):
490
+ meta = get_args(type_)[1:]
491
+ type_ = extract_type_arg(type_, 0)
492
+ else:
493
+ meta = tuple()
494
+
495
+ # we need to use the origin class for any types that are subscripted generics
496
+ # e.g. Dict[str, object]
497
+ origin = get_origin(type_) or type_
498
+ args = get_args(type_)
499
+
500
+ if is_union(origin):
501
+ try:
502
+ return validate_type(type_=cast("type[object]", original_type or type_), value=value)
503
+ except Exception:
504
+ pass
505
+
506
+ # if the type is a discriminated union then we want to construct the right variant
507
+ # in the union, even if the data doesn't match exactly, otherwise we'd break code
508
+ # that relies on the constructed class types, e.g.
509
+ #
510
+ # class FooType:
511
+ # kind: Literal['foo']
512
+ # value: str
513
+ #
514
+ # class BarType:
515
+ # kind: Literal['bar']
516
+ # value: int
517
+ #
518
+ # without this block, if the data we get is something like `{'kind': 'bar', 'value': 'foo'}` then
519
+ # we'd end up constructing `FooType` when it should be `BarType`.
520
+ discriminator = _build_discriminated_union_meta(union=type_, meta_annotations=meta)
521
+ if discriminator and is_mapping(value):
522
+ variant_value = value.get(discriminator.field_alias_from or discriminator.field_name)
523
+ if variant_value and isinstance(variant_value, str):
524
+ variant_type = discriminator.mapping.get(variant_value)
525
+ if variant_type:
526
+ return construct_type(type_=variant_type, value=value)
527
+
528
+ # if the data is not valid, use the first variant that doesn't fail while deserializing
529
+ for variant in args:
530
+ try:
531
+ return construct_type(value=value, type_=variant)
532
+ except Exception:
533
+ continue
534
+
535
+ raise RuntimeError(f"Could not convert data into a valid instance of {type_}")
536
+
537
+ if origin == dict:
538
+ if not is_mapping(value):
539
+ return value
540
+
541
+ _, items_type = get_args(type_) # Dict[_, items_type]
542
+ return {key: construct_type(value=item, type_=items_type) for key, item in value.items()}
543
+
544
+ if (
545
+ not is_literal_type(type_)
546
+ and inspect.isclass(origin)
547
+ and (issubclass(origin, BaseModel) or issubclass(origin, GenericModel))
548
+ ):
549
+ if is_list(value):
550
+ return [cast(Any, type_).construct(**entry) if is_mapping(entry) else entry for entry in value]
551
+
552
+ if is_mapping(value):
553
+ if issubclass(type_, BaseModel):
554
+ return type_.construct(**value) # type: ignore[arg-type]
555
+
556
+ return cast(Any, type_).construct(**value)
557
+
558
+ if origin == list:
559
+ if not is_list(value):
560
+ return value
561
+
562
+ inner_type = args[0] # List[inner_type]
563
+ return [construct_type(value=entry, type_=inner_type) for entry in value]
564
+
565
+ if origin == float:
566
+ if isinstance(value, int):
567
+ coerced = float(value)
568
+ if coerced != value:
569
+ return value
570
+ return coerced
571
+
572
+ return value
573
+
574
+ if type_ == datetime:
575
+ try:
576
+ return parse_datetime(value) # type: ignore
577
+ except Exception:
578
+ return value
579
+
580
+ if type_ == date:
581
+ try:
582
+ return parse_date(value) # type: ignore
583
+ except Exception:
584
+ return value
585
+
586
+ return value
587
+
588
+
589
+ @runtime_checkable
590
+ class CachedDiscriminatorType(Protocol):
591
+ __discriminator__: DiscriminatorDetails
592
+
593
+
594
+ DISCRIMINATOR_CACHE: weakref.WeakKeyDictionary[type, DiscriminatorDetails] = weakref.WeakKeyDictionary()
595
+
596
+
597
+ class DiscriminatorDetails:
598
+ field_name: str
599
+ """The name of the discriminator field in the variant class, e.g.
600
+
601
+ ```py
602
+ class Foo(BaseModel):
603
+ type: Literal['foo']
604
+ ```
605
+
606
+ Will result in field_name='type'
607
+ """
608
+
609
+ field_alias_from: str | None
610
+ """The name of the discriminator field in the API response, e.g.
611
+
612
+ ```py
613
+ class Foo(BaseModel):
614
+ type: Literal['foo'] = Field(alias='type_from_api')
615
+ ```
616
+
617
+ Will result in field_alias_from='type_from_api'
618
+ """
619
+
620
+ mapping: dict[str, type]
621
+ """Mapping of discriminator value to variant type, e.g.
622
+
623
+ {'foo': FooVariant, 'bar': BarVariant}
624
+ """
625
+
626
+ def __init__(
627
+ self,
628
+ *,
629
+ mapping: dict[str, type],
630
+ discriminator_field: str,
631
+ discriminator_alias: str | None,
632
+ ) -> None:
633
+ self.mapping = mapping
634
+ self.field_name = discriminator_field
635
+ self.field_alias_from = discriminator_alias
636
+
637
+
638
+ def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any, ...]) -> DiscriminatorDetails | None:
639
+ cached = DISCRIMINATOR_CACHE.get(union)
640
+ if cached is not None:
641
+ return cached
642
+
643
+ discriminator_field_name: str | None = None
644
+
645
+ for annotation in meta_annotations:
646
+ if isinstance(annotation, PropertyInfo) and annotation.discriminator is not None:
647
+ discriminator_field_name = annotation.discriminator
648
+ break
649
+
650
+ if not discriminator_field_name:
651
+ return None
652
+
653
+ mapping: dict[str, type] = {}
654
+ discriminator_alias: str | None = None
655
+
656
+ for variant in get_args(union):
657
+ variant = strip_annotated_type(variant)
658
+ if is_basemodel_type(variant):
659
+ if PYDANTIC_V1:
660
+ field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
661
+ if not field_info:
662
+ continue
663
+
664
+ # Note: if one variant defines an alias then they all should
665
+ discriminator_alias = field_info.alias
666
+
667
+ if (annotation := getattr(field_info, "annotation", None)) and is_literal_type(annotation):
668
+ for entry in get_args(annotation):
669
+ if isinstance(entry, str):
670
+ mapping[entry] = variant
671
+ else:
672
+ field = _extract_field_schema_pv2(variant, discriminator_field_name)
673
+ if not field:
674
+ continue
675
+
676
+ # Note: if one variant defines an alias then they all should
677
+ discriminator_alias = field.get("serialization_alias")
678
+
679
+ field_schema = field["schema"]
680
+
681
+ if field_schema["type"] == "literal":
682
+ for entry in cast("LiteralSchema", field_schema)["expected"]:
683
+ if isinstance(entry, str):
684
+ mapping[entry] = variant
685
+
686
+ if not mapping:
687
+ return None
688
+
689
+ details = DiscriminatorDetails(
690
+ mapping=mapping,
691
+ discriminator_field=discriminator_field_name,
692
+ discriminator_alias=discriminator_alias,
693
+ )
694
+ DISCRIMINATOR_CACHE.setdefault(union, details)
695
+ return details
696
+
697
+
698
+ def _extract_field_schema_pv2(model: type[BaseModel], field_name: str) -> ModelField | None:
699
+ schema = model.__pydantic_core_schema__
700
+ if schema["type"] == "definitions":
701
+ schema = schema["schema"]
702
+
703
+ if schema["type"] != "model":
704
+ return None
705
+
706
+ schema = cast("ModelSchema", schema)
707
+ fields_schema = schema["schema"]
708
+ if fields_schema["type"] != "model-fields":
709
+ return None
710
+
711
+ fields_schema = cast("ModelFieldsSchema", fields_schema)
712
+ field = fields_schema["fields"].get(field_name)
713
+ if not field:
714
+ return None
715
+
716
+ return cast("ModelField", field) # pyright: ignore[reportUnnecessaryCast]
717
+
718
+
719
+ def validate_type(*, type_: type[_T], value: object) -> _T:
720
+ """Strict validation that the given value matches the expected type"""
721
+ if inspect.isclass(type_) and issubclass(type_, pydantic.BaseModel):
722
+ return cast(_T, parse_obj(type_, value))
723
+
724
+ return cast(_T, _validate_non_model_type(type_=type_, value=value))
725
+
726
+
727
+ def set_pydantic_config(typ: Any, config: pydantic.ConfigDict) -> None:
728
+ """Add a pydantic config for the given type.
729
+
730
+ Note: this is a no-op on Pydantic v1.
731
+ """
732
+ setattr(typ, "__pydantic_config__", config) # noqa: B010
733
+
734
+
735
+ # our use of subclassing here causes weirdness for type checkers,
736
+ # so we just pretend that we don't subclass
737
+ if TYPE_CHECKING:
738
+ GenericModel = BaseModel
739
+ else:
740
+
741
+ class GenericModel(BaseGenericModel, BaseModel):
742
+ pass
743
+
744
+
745
+ if not PYDANTIC_V1:
746
+ from pydantic import TypeAdapter as _TypeAdapter
747
+
748
+ _CachedTypeAdapter = cast("TypeAdapter[object]", lru_cache(maxsize=None)(_TypeAdapter))
749
+
750
+ if TYPE_CHECKING:
751
+ from pydantic import TypeAdapter
752
+ else:
753
+ TypeAdapter = _CachedTypeAdapter
754
+
755
+ def _validate_non_model_type(*, type_: type[_T], value: object) -> _T:
756
+ return TypeAdapter(type_).validate_python(value)
757
+
758
+ elif not TYPE_CHECKING: # TODO: condition is weird
759
+
760
+ class RootModel(GenericModel, Generic[_T]):
761
+ """Used as a placeholder to easily convert runtime types to a Pydantic format
762
+ to provide validation.
763
+
764
+ For example:
765
+ ```py
766
+ validated = RootModel[int](__root__="5").__root__
767
+ # validated: 5
768
+ ```
769
+ """
770
+
771
+ __root__: _T
772
+
773
+ def _validate_non_model_type(*, type_: type[_T], value: object) -> _T:
774
+ model = _create_pydantic_model(type_).validate(value)
775
+ return cast(_T, model.__root__)
776
+
777
+ def _create_pydantic_model(type_: _T) -> Type[RootModel[_T]]:
778
+ return RootModel[type_] # type: ignore
779
+
780
+
781
+ class FinalRequestOptionsInput(TypedDict, total=False):
782
+ method: Required[str]
783
+ url: Required[str]
784
+ params: Query
785
+ headers: Headers
786
+ max_retries: int
787
+ timeout: float | Timeout | None
788
+ files: HttpxRequestFiles | None
789
+ idempotency_key: str
790
+ json_data: Body
791
+ extra_json: AnyMapping
792
+ follow_redirects: bool
793
+
794
+
795
+ @final
796
+ class FinalRequestOptions(pydantic.BaseModel):
797
+ method: str
798
+ url: str
799
+ params: Query = {}
800
+ headers: Union[Headers, NotGiven] = NotGiven()
801
+ max_retries: Union[int, NotGiven] = NotGiven()
802
+ timeout: Union[float, Timeout, None, NotGiven] = NotGiven()
803
+ files: Union[HttpxRequestFiles, None] = None
804
+ idempotency_key: Union[str, None] = None
805
+ post_parser: Union[Callable[[Any], Any], NotGiven] = NotGiven()
806
+ follow_redirects: Union[bool, None] = None
807
+
808
+ # It should be noted that we cannot use `json` here as that would override
809
+ # a BaseModel method in an incompatible fashion.
810
+ json_data: Union[Body, None] = None
811
+ extra_json: Union[AnyMapping, None] = None
812
+
813
+ if PYDANTIC_V1:
814
+
815
+ class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated]
816
+ arbitrary_types_allowed: bool = True
817
+ else:
818
+ model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True)
819
+
820
+ def get_max_retries(self, max_retries: int) -> int:
821
+ if isinstance(self.max_retries, NotGiven):
822
+ return max_retries
823
+ return self.max_retries
824
+
825
+ def _strip_raw_response_header(self) -> None:
826
+ if not is_given(self.headers):
827
+ return
828
+
829
+ if self.headers.get(RAW_RESPONSE_HEADER):
830
+ self.headers = {**self.headers}
831
+ self.headers.pop(RAW_RESPONSE_HEADER)
832
+
833
+ # override the `construct` method so that we can run custom transformations.
834
+ # this is necessary as we don't want to do any actual runtime type checking
835
+ # (which means we can't use validators) but we do want to ensure that `NotGiven`
836
+ # values are not present
837
+ #
838
+ # type ignore required because we're adding explicit types to `**values`
839
+ @classmethod
840
+ def construct( # type: ignore
841
+ cls,
842
+ _fields_set: set[str] | None = None,
843
+ **values: Unpack[FinalRequestOptionsInput],
844
+ ) -> FinalRequestOptions:
845
+ kwargs: dict[str, Any] = {
846
+ # we unconditionally call `strip_not_given` on any value
847
+ # as it will just ignore any non-mapping types
848
+ key: strip_not_given(value)
849
+ for key, value in values.items()
850
+ }
851
+ if PYDANTIC_V1:
852
+ return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated]
853
+ return super().model_construct(_fields_set, **kwargs)
854
+
855
+ if not TYPE_CHECKING:
856
+ # type checkers incorrectly complain about this assignment
857
+ model_construct = construct