thds.attrs-utils 1.6.20251107140602__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. thds/attrs_utils/__init__.py +4 -0
  2. thds/attrs_utils/cattrs/__init__.py +29 -0
  3. thds/attrs_utils/cattrs/converter.py +182 -0
  4. thds/attrs_utils/cattrs/errors.py +46 -0
  5. thds/attrs_utils/defaults.py +31 -0
  6. thds/attrs_utils/docs.py +75 -0
  7. thds/attrs_utils/empty.py +146 -0
  8. thds/attrs_utils/isinstance/__init__.py +4 -0
  9. thds/attrs_utils/isinstance/check.py +107 -0
  10. thds/attrs_utils/isinstance/registry.py +13 -0
  11. thds/attrs_utils/isinstance/util.py +106 -0
  12. thds/attrs_utils/jsonschema/__init__.py +11 -0
  13. thds/attrs_utils/jsonschema/constructors.py +90 -0
  14. thds/attrs_utils/jsonschema/jsonschema.py +326 -0
  15. thds/attrs_utils/jsonschema/str_formats.py +109 -0
  16. thds/attrs_utils/jsonschema/util.py +94 -0
  17. thds/attrs_utils/py.typed +0 -0
  18. thds/attrs_utils/random/__init__.py +3 -0
  19. thds/attrs_utils/random/attrs.py +57 -0
  20. thds/attrs_utils/random/builtin.py +103 -0
  21. thds/attrs_utils/random/collection.py +41 -0
  22. thds/attrs_utils/random/gen.py +134 -0
  23. thds/attrs_utils/random/optional.py +13 -0
  24. thds/attrs_utils/random/registry.py +30 -0
  25. thds/attrs_utils/random/tuple.py +24 -0
  26. thds/attrs_utils/random/union.py +33 -0
  27. thds/attrs_utils/random/util.py +55 -0
  28. thds/attrs_utils/recursion.py +48 -0
  29. thds/attrs_utils/registry.py +40 -0
  30. thds/attrs_utils/type_cache.py +110 -0
  31. thds/attrs_utils/type_recursion.py +168 -0
  32. thds/attrs_utils/type_utils.py +180 -0
  33. thds_attrs_utils-1.6.20251107140602.dist-info/METADATA +230 -0
  34. thds_attrs_utils-1.6.20251107140602.dist-info/RECORD +36 -0
  35. thds_attrs_utils-1.6.20251107140602.dist-info/WHEEL +5 -0
  36. thds_attrs_utils-1.6.20251107140602.dist-info/top_level.txt +1 -0
@@ -0,0 +1,4 @@
1
+ from thds.core import meta
2
+
3
+ __version__ = meta.get_version(__name__)
4
+ __commit__ = meta.read_metadata(__name__).git_commit
@@ -0,0 +1,29 @@
1
+ __all__ = [
2
+ "DEFAULT_RESTRICTED_CONVERSIONS",
3
+ "DEFAULT_JSON_CONVERTER",
4
+ "DEFAULT_STRUCTURE_HOOKS",
5
+ "DEFAULT_UNSTRUCTURE_HOOKS_JSON",
6
+ "PREJSON_UNSTRUCTURE_COLLECTION_OVERRIDES",
7
+ "DisallowedConversionError",
8
+ "default_converter",
9
+ "format_cattrs_classval_error",
10
+ "setup_converter",
11
+ ]
12
+
13
+ from .converter import (
14
+ DEFAULT_RESTRICTED_CONVERSIONS,
15
+ DEFAULT_STRUCTURE_HOOKS,
16
+ DEFAULT_UNSTRUCTURE_HOOKS_JSON,
17
+ PREJSON_UNSTRUCTURE_COLLECTION_OVERRIDES,
18
+ default_converter,
19
+ setup_converter,
20
+ )
21
+ from .errors import DisallowedConversionError, format_cattrs_classval_error
22
+
23
+ DEFAULT_JSON_CONVERTER = setup_converter(
24
+ default_converter(),
25
+ struct_hooks=DEFAULT_STRUCTURE_HOOKS,
26
+ unstruct_hooks=DEFAULT_UNSTRUCTURE_HOOKS_JSON,
27
+ deterministic=True,
28
+ strict_enums=False,
29
+ )
@@ -0,0 +1,182 @@
1
+ import datetime
2
+ import decimal
3
+ import typing as ty
4
+ from functools import partial
5
+ from typing import Any, Callable, Sequence, Tuple, Type
6
+
7
+ from cattr.converters import Converter, GenConverter
8
+
9
+ from ..type_utils import is_literal_type, is_set_type, literal_base
10
+ from . import errors
11
+
12
+ T = ty.TypeVar("T")
13
+ Struct = Callable[[Any, Type[T]], T]
14
+ UnStruct = Callable[[Any], T]
15
+ StructFactory = Callable[[Any], Struct[T]]
16
+
17
+
18
+ PREJSON_UNSTRUCTURE_COLLECTION_OVERRIDES = {
19
+ ty.Set: list,
20
+ ty.FrozenSet: list,
21
+ ty.AbstractSet: list,
22
+ ty.MutableSet: list,
23
+ ty.Sequence: list,
24
+ ty.DefaultDict: dict,
25
+ ty.OrderedDict: dict,
26
+ ty.Mapping: dict,
27
+ ty.MutableMapping: dict,
28
+ }
29
+
30
+
31
+ # hooks
32
+
33
+
34
+ def _date_from_isoformat(dt: ty.Union[str, datetime.date], t: Type[datetime.date]) -> datetime.date:
35
+ if isinstance(dt, datetime.datetime): # check most specific type first
36
+ return dt.date()
37
+ if isinstance(dt, datetime.date):
38
+ return dt
39
+ return t.fromisoformat(str(dt))
40
+
41
+
42
+ def _datetime_from_isoformat(
43
+ dt: ty.Union[str, datetime.date], t: Type[datetime.datetime]
44
+ ) -> datetime.datetime:
45
+ if isinstance(dt, datetime.datetime):
46
+ return dt
47
+ if isinstance(dt, datetime.date):
48
+ return datetime.datetime(dt.year, dt.month, dt.day)
49
+ return t.fromisoformat(str(dt))
50
+
51
+
52
+ def unstructure_set(converter: Converter, items: ty.AbstractSet) -> ty.List:
53
+ sorted_items = sorted(items)
54
+ return list(map(converter.unstructure, sorted_items))
55
+
56
+
57
+ def _structure_as(converter: Converter, actual_type: Type[T], obj: Any, _: Any) -> T:
58
+ return converter.structure(obj, actual_type)
59
+
60
+
61
+ def structure_as(converter: Converter, actual_type: Type[T]) -> Struct[T]:
62
+ """Define a custom structuring function for cattrs which works by deferring to another type.
63
+ This is useful for some types which cattrs does not natively support, but which otherwise have implementations
64
+ using types that it *does* support; e.g. NewType(str) or a Union thereof"""
65
+ return partial(_structure_as, converter, actual_type)
66
+
67
+
68
+ def _structure_literal_as_base_type(converter: Converter, type_: Type) -> Struct:
69
+ base_type = literal_base(type_)
70
+ return structure_as(converter, base_type)
71
+
72
+
73
+ def structure_literal_as_base_type(converter: Converter) -> StructFactory:
74
+ return partial(_structure_literal_as_base_type, converter)
75
+
76
+
77
+ def _structure_restricted_conversion(
78
+ allowed_types: Tuple[Type, ...], parse: Callable[[Any], T], value: Any, _: Type[T]
79
+ ) -> T:
80
+ if type(value) in allowed_types:
81
+ return parse(value)
82
+ raise errors.DisallowedConversionError(type(value), _)
83
+
84
+
85
+ def structure_restricted_conversion(
86
+ allowed_types: Tuple[Type, ...], parse: Callable[[Any], T]
87
+ ) -> Struct[T]:
88
+ """This hook ensures that the more dubious behaviors of cattrs are forbidden; only values which can
89
+ be very unambiguously interpreted as the target type are allowed to be unstructured as such."""
90
+ return partial(_structure_restricted_conversion, allowed_types, parse)
91
+
92
+
93
+ # default hooks
94
+
95
+ DEFAULT_STRUCTURE_HOOKS: Sequence[Tuple[Type, Struct]] = (
96
+ (datetime.date, _date_from_isoformat),
97
+ (datetime.datetime, _datetime_from_isoformat),
98
+ )
99
+ DEFAULT_UNSTRUCTURE_HOOKS_JSON: Sequence[Tuple[Type, UnStruct]] = (
100
+ (datetime.date, datetime.date.isoformat),
101
+ (datetime.datetime, datetime.datetime.isoformat),
102
+ (decimal.Decimal, float),
103
+ )
104
+ DEFAULT_RESTRICTED_CONVERSIONS: Sequence[Tuple[Tuple[Type, ...], Type, UnStruct]] = (
105
+ ((int, str), str, str),
106
+ ((int, str, float, decimal.Decimal), float, float),
107
+ ((bool, str, int), int, int),
108
+ ((bool,), bool, bool),
109
+ )
110
+
111
+
112
+ # default converters
113
+
114
+
115
+ def setup_converter(
116
+ converter: Converter,
117
+ struct_hooks: Sequence[Tuple[Type[T], Struct[T]]] = DEFAULT_STRUCTURE_HOOKS,
118
+ unstruct_hooks: Sequence[Tuple[Type[T], UnStruct[T]]] = (),
119
+ custom_structure_as: Sequence[Tuple[Type, Type]] = (),
120
+ restricted_conversions: Sequence[
121
+ Tuple[Tuple[Type, ...], Type[T], UnStruct[T]]
122
+ ] = DEFAULT_RESTRICTED_CONVERSIONS,
123
+ deterministic: bool = True,
124
+ strict_enums: bool = False,
125
+ ) -> Converter:
126
+ """Performs side effects on your converter, registering various hooks.
127
+
128
+ :param converter: the cattrs Converter to update
129
+ :param struct_hooks: custom structuring hooks as tuples of (type, callable(value, type) -> value)
130
+ :param unstruct_hooks: custom unstructuring hooks as tuples of (type, callable(value) -> value)
131
+ :param custom_structure_as: tuples of (type1, type2). When these are passed, the converter will
132
+ structure values into type1 using the pre-defined behavior for type2. Useful for specifying that
133
+ some complex type can really be treated in a simpler way (e.g. that a union over several string
134
+ literal enums can just be structured as a string).
135
+ :param restricted_conversions: tuples of ((in_type, ...), out_type, parser(value) -> out_type).
136
+ When an input is to be structured into the out_type, it will only be allowed to do so if its
137
+ concrete type is in the in_type tuple (strict inclusion, not `isinstance` check, to avoid confusion
138
+ e.g. from subtle subclass relations such as bool being a subclass of int). The default includes a
139
+ reasonable set of restrictions that are *not* by default respected by cattrs.
140
+ :param deterministic: when True, an attempt is made to produce deterministic output on unstructuring.
141
+ For example, sets will be sorted prior to being unstructured (but not after, since in general
142
+ unstructured values such as dicts may not be orderable). This requires all types contained within
143
+ sets in your data model to be orderable. Defaults to True.
144
+ :param strict_enums: when True, on unstructuring into a Literal type, an error will be raised when
145
+ the input value is not one of the expected values. This is the default behavior of cattrs as of
146
+ version 22. You may not wish to be so strict when accepting data from a source with potential data
147
+ quality issues, preferring perhaps to clean up any unexpected values after structuring, in which
148
+ case this should be False, the default value.
149
+ :return: the modified converter with hooks registered.
150
+ """
151
+ for target_type, struct in struct_hooks:
152
+ converter.register_structure_hook(target_type, struct)
153
+ for target_type, unstruct in unstruct_hooks:
154
+ converter.register_unstructure_hook(target_type, unstruct)
155
+ for input_types, target_type, parser in restricted_conversions:
156
+ converter.register_structure_hook(
157
+ target_type, structure_restricted_conversion(input_types, parser)
158
+ )
159
+ for actual_type, override_type in custom_structure_as:
160
+ converter.register_structure_hook(actual_type, structure_as(converter, override_type))
161
+ if deterministic:
162
+ converter.register_unstructure_hook_func(is_set_type, partial(unstructure_set, converter))
163
+ if not strict_enums:
164
+ converter.register_structure_hook_factory(
165
+ is_literal_type, structure_literal_as_base_type(converter)
166
+ )
167
+ return converter
168
+
169
+
170
+ def default_converter(
171
+ *,
172
+ forbid_extra_keys: bool = True,
173
+ prefer_attrib_converters: bool = True,
174
+ unstruct_collection_overrides: ty.Mapping[
175
+ Type, Callable[[], ty.Collection]
176
+ ] = PREJSON_UNSTRUCTURE_COLLECTION_OVERRIDES, # type: ignore [assignment]
177
+ ) -> Converter:
178
+ return GenConverter(
179
+ unstruct_collection_overrides=unstruct_collection_overrides,
180
+ prefer_attrib_converters=prefer_attrib_converters,
181
+ forbid_extra_keys=forbid_extra_keys,
182
+ )
@@ -0,0 +1,46 @@
1
+ from functools import singledispatch
2
+ from typing import Callable, Optional, Type
3
+
4
+ import cattrs
5
+ import cattrs.errors
6
+ import cattrs.v
7
+
8
+
9
+ class DisallowedConversionError(TypeError):
10
+ """Raised when a value is not allowed to be converted to a certain type."""
11
+
12
+ def __init__(self, value_type: Type, target_type: Type):
13
+ super().__init__(value_type, target_type)
14
+ self.value_type = value_type
15
+ self.target_type = target_type
16
+
17
+ def __str__(self):
18
+ return f"Refusing to structure value of type {self.value_type} to type {self.target_type}"
19
+
20
+
21
+ @singledispatch
22
+ def format_cattrs_exception(exc: BaseException, type_: Optional[Type]) -> str:
23
+ """Default formatter for cattrs exceptions. Extensible via `singledispatch` to handle other exception types."""
24
+ return cattrs.v.format_exception(exc, type_)
25
+
26
+
27
+ @format_cattrs_exception.register(DisallowedConversionError)
28
+ def _format_disallowed_conversion_error(exc: DisallowedConversionError, type_: Optional[Type]) -> str:
29
+ """Override cattrs' default exception formatting for our more strict approach to type conversion.
30
+ Cattrs is extremely lax - e.g. `None` is permitted for `str` (structures to `"None"`) or `bool`
31
+ (structures to `False`), which we prevent with custom hooks and signal with a custom exception."""
32
+ return f"invalid value for type, expected {exc.target_type.__name__}, got {exc.value_type.__name__}"
33
+
34
+
35
+ @format_cattrs_exception.register(cattrs.errors.ClassValidationError)
36
+ def format_cattrs_classval_error(
37
+ exc: cattrs.errors.ClassValidationError,
38
+ format_exception: Callable[[BaseException, Optional[Type]], str] = format_cattrs_exception,
39
+ ) -> str:
40
+ """Format a cattrs ClassValError into a human-readable string."""
41
+
42
+ sep = "\n "
43
+ return (
44
+ f"{exc.message}:{sep}"
45
+ f"{sep.join(reversed(cattrs.transform_error(exc, format_exception=format_exception)))}"
46
+ )
@@ -0,0 +1,31 @@
1
+ import typing as ty
2
+
3
+ import attrs
4
+ from attr._make import Factory # cannot import the actual class via attrs
5
+
6
+
7
+ def has_value_default(default: ty.Any) -> bool:
8
+ if isinstance(default, Factory):
9
+ # a takes_self Factory requires an instance to create the default; therefore
10
+ # there is no single, known default value to extract
11
+ return not default.takes_self
12
+ return default is not attrs.NOTHING
13
+
14
+
15
+ def extract_factory_default(default: ty.Any) -> ty.Any:
16
+ if isinstance(default, Factory):
17
+ assert not default.takes_self
18
+ return default.factory()
19
+
20
+ return default
21
+
22
+
23
+ def attrs_value_defaults(attrs_cls: ty.Type) -> ty.Dict[str, ty.Any]:
24
+ """Returns a dictionary populated only by attribute names and their default values -
25
+ and only for attributes which _have_ simple default values.
26
+ """
27
+ return {
28
+ name: extract_factory_default(field.default)
29
+ for name, field in attrs.fields_dict(attrs_cls).items()
30
+ if has_value_default(field.default)
31
+ }
@@ -0,0 +1,75 @@
1
+ """Utils for collecting structured documentation from class docstrings"""
2
+
3
+ import inspect
4
+ from collections import ChainMap
5
+ from itertools import chain
6
+ from typing import Callable, Dict, List, Literal, Optional, Set, Type
7
+
8
+ from docstring_parser import Docstring, DocstringMeta, DocstringParam, Style, parse
9
+
10
+ from .type_utils import bases
11
+
12
+ DocCombineSpec = Literal["first", "join"]
13
+
14
+
15
+ def get_record_class_fields(cls: Type) -> Set[str]:
16
+ """Get the names of the attributes of a class"""
17
+
18
+ if not inspect.isclass(cls):
19
+ raise ValueError("Input should be a class")
20
+
21
+ return set(inspect.signature(cls).parameters.keys())
22
+
23
+
24
+ def record_class_docs(
25
+ cls: Type,
26
+ filter_bases: Optional[Callable[[Type], bool]] = None,
27
+ style: Style = Style.AUTO,
28
+ combine_docs: DocCombineSpec = "first",
29
+ join_sep: str = "\n\n",
30
+ require_complete: bool = False,
31
+ ) -> Docstring:
32
+ base_clss = [c for c in bases(cls, filter_bases) if c is not object]
33
+
34
+ docs = [parse(c.__doc__, style=style) for c in base_clss if c.__doc__]
35
+ params = dict(ChainMap(*({param.arg_name: param for param in doc.params} for doc in docs)))
36
+
37
+ if require_complete:
38
+ missing = get_record_class_fields(cls) - set(params.keys())
39
+ if missing:
40
+ raise ValueError(f"Missing docstring params for {cls.__name__}: {missing}")
41
+
42
+ if combine_docs == "first":
43
+ short_description = next((doc.short_description for doc in docs if doc.short_description), None)
44
+ long_description = next((doc.long_description for doc in docs if doc.long_description), None)
45
+ else:
46
+ short_description = join_sep.join(
47
+ doc.short_description for doc in reversed(docs) if doc.short_description
48
+ )
49
+ long_description = join_sep.join(
50
+ doc.long_description for doc in reversed(docs) if doc.long_description
51
+ )
52
+
53
+ combined_meta: Dict[Type[DocstringMeta], List[DocstringMeta]] = {
54
+ DocstringParam: list(params.values())
55
+ }
56
+ for doc in docs:
57
+ metas: Dict[Type[DocstringMeta], List[DocstringMeta]] = {}
58
+ for meta in doc.meta:
59
+ meta_type = type(meta)
60
+ if meta_type not in combined_meta:
61
+ metas.setdefault(meta_type, []).append(meta)
62
+ for meta_type, meta_list in metas.items():
63
+ combined_meta[meta_type] = meta_list
64
+
65
+ style_: Optional[Style] = next((doc.style for doc in docs if doc.style), None)
66
+ combined_doc = Docstring(style_)
67
+ combined_doc.short_description = short_description
68
+ combined_doc.long_description = long_description
69
+ combined_doc.meta = list(chain.from_iterable(combined_meta.values()))
70
+
71
+ bad_params = {param.arg_name for param in combined_doc.params if not param.description}
72
+ if bad_params:
73
+ raise ValueError(f"Parameters {bad_params} have no description")
74
+
75
+ return combined_doc
@@ -0,0 +1,146 @@
1
+ import datetime
2
+ import enum
3
+ import inspect
4
+ import typing as ty
5
+ import uuid
6
+ import warnings
7
+ from functools import partial
8
+
9
+ import attr
10
+
11
+ from . import recursion, type_recursion, type_utils
12
+ from .registry import Registry
13
+ from .type_recursion import Constructor
14
+
15
+ T = ty.TypeVar("T")
16
+
17
+
18
+ def const(value: T) -> Constructor[T]:
19
+ return lambda: value
20
+
21
+
22
+ def empty_optional(empty_gen, type_: ty.Type[T]) -> Constructor[ty.Optional[T]]:
23
+ return const(None)
24
+
25
+
26
+ def _construct_record(
27
+ type_: ty.Type[T],
28
+ signature: inspect.Signature,
29
+ defaults: ty.Mapping[str, Constructor[ty.Any]],
30
+ *args: ty.Any,
31
+ **kwargs: ty.Any,
32
+ ) -> T:
33
+ bound = signature.bind_partial(*args, **kwargs)
34
+ defaults_ = {k: v() for k, v in defaults.items() if k not in bound.arguments}
35
+ bound.arguments.update(defaults_)
36
+ return type_(*bound.args, **bound.kwargs)
37
+
38
+
39
+ def _record_constructor(
40
+ type_: ty.Type[T], defaults: ty.Mapping[str, Constructor[ty.Any]]
41
+ ) -> Constructor[T]:
42
+ signature = inspect.signature(type_)
43
+ f = partial(
44
+ _construct_record, type_, signature, defaults
45
+ ) # using partial application to allow picklability
46
+ try:
47
+ repl_signature = inspect.Signature(
48
+ [
49
+ (
50
+ inspect.Parameter(
51
+ p.name, p.kind, default=defaults[p.name](), annotation=p.annotation
52
+ )
53
+ if p.name in defaults
54
+ else p
55
+ )
56
+ for p in signature.parameters.values()
57
+ ],
58
+ return_annotation=signature.return_annotation,
59
+ )
60
+ except Exception as e:
61
+ warnings.warn(
62
+ f"Couldn't generate signature for {type_} with new defaults {defaults}; using original signature for documenation: {e}"
63
+ )
64
+ repl_signature = signature
65
+ f.__signature__ = repl_signature # type: ignore[attr-defined]
66
+ # ^ allows tab completion in REPLs
67
+ return f
68
+
69
+
70
+ def empty_attrs(empty_gen, type_: ty.Type[attr.AttrsInstance]) -> Constructor[attr.AttrsInstance]:
71
+ fields = attr.fields(type_)
72
+ defaults = {f.name: empty_gen(f.type) for f in fields if f.default is attr.NOTHING}
73
+ # keep the original defaults and default factories
74
+ return _record_constructor(type_, defaults)
75
+
76
+
77
+ def empty_namedtuple(empty_gen, type_: ty.Type[ty.NamedTuple]) -> Constructor[ty.NamedTuple]:
78
+ defaults = {
79
+ name: empty_gen(t)
80
+ for name, t in type_.__annotations__.items()
81
+ if name not in type_._field_defaults
82
+ }
83
+ # keep the original defaults
84
+ return _record_constructor(type_, defaults)
85
+
86
+
87
+ def empty_collection(empty_gen, type_: ty.Type[ty.Collection]) -> Constructor[ty.Collection]:
88
+ return type_utils.concrete_constructor(type_)
89
+
90
+
91
+ def empty_literal(empty_gen, type_: ty.Type[T]) -> Constructor[T]:
92
+ return const(ty.get_args(type_)[0])
93
+
94
+
95
+ def empty_enum(empty_gen, type_: ty.Type[enum.Enum]) -> Constructor[enum.Enum]:
96
+ return const(next(iter(type_)))
97
+
98
+
99
+ def empty_union(empty_gen, type_: ty.Type[T]) -> Constructor[T]:
100
+ return empty_gen(ty.get_args(type_)[0]) # construct the first type in the union
101
+
102
+
103
+ def _construct_tuple(
104
+ type_: ty.Callable[..., ty.Tuple], defaults: ty.Sequence[Constructor[ty.Any]], *args: ty.Any
105
+ ) -> ty.Tuple:
106
+ return type_([*args, *(f() for f in defaults[len(args) :])])
107
+
108
+
109
+ def empty_tuple(empty_gen, type_: ty.Type[ty.Tuple]) -> Constructor[ty.Tuple]:
110
+ args = ty.get_args(type_)
111
+ base = type_utils.concrete_constructor(type_)
112
+ defaults = [empty_gen(arg) for arg in args]
113
+ return partial(_construct_tuple, base, defaults) # using partial application to allow picklability
114
+
115
+
116
+ unknown_type: "recursion.RecF[ty.Type, [], Constructor]" = recursion.value_error(
117
+ "Don't know how to generate an 'empty' value for type {!r}; "
118
+ "use {__name__}.empty_gen.register to register an empty generator",
119
+ TypeError,
120
+ )
121
+
122
+ REGISTRY: Registry[ty.Type, Constructor] = Registry(
123
+ [
124
+ (type(None), const(None)),
125
+ *[(type_, type_) for type_ in [int, bool, float, str, bytes, bytearray]],
126
+ (datetime.date, const(datetime.date.min)),
127
+ (datetime.datetime, const(datetime.datetime.min)),
128
+ (uuid.UUID, uuid.uuid4),
129
+ ]
130
+ )
131
+
132
+ empty_gen: "type_recursion.ConstructorFactory[[]]" = type_recursion.ConstructorFactory(
133
+ REGISTRY,
134
+ cached=True,
135
+ attrs=empty_attrs,
136
+ namedtuple=empty_namedtuple,
137
+ literal=empty_literal,
138
+ enum=empty_enum,
139
+ optional=empty_optional,
140
+ union=empty_union,
141
+ tuple=empty_tuple,
142
+ variadic_tuple=empty_collection,
143
+ mapping=empty_collection,
144
+ collection=empty_collection,
145
+ otherwise=unknown_type,
146
+ )
@@ -0,0 +1,4 @@
1
+ """Interface for creating a callable from a data type, which accepts a value and validates that the
2
+ value is an instance of the type, recursively and respecting generic type parameters"""
3
+
4
+ from .check import instancecheck, isinstance # noqa: F401
@@ -0,0 +1,107 @@
1
+ from typing import Any, NamedTuple, Type, cast, get_args, get_origin
2
+
3
+ import attrs
4
+
5
+ from .. import recursion, type_utils
6
+ from ..type_recursion import TypeRecursion
7
+ from . import util
8
+ from .registry import ISINSTANCE_REGISTRY
9
+ from .util import Check
10
+
11
+ _UNKNOWN_TYPE = (
12
+ "Don't know how to check for instances of {!r}; use "
13
+ f"{__name__}.instancecheck.register to register an instance check"
14
+ )
15
+
16
+ unknown_type: "recursion.RecF[Type, [], Check]" = recursion.value_error(_UNKNOWN_TYPE, TypeError)
17
+
18
+
19
+ def simple_isinstance(instancecheck, type_: Type):
20
+ return util.simple_isinstance(type_)
21
+
22
+
23
+ def check_literal(instancecheck, type_: Type):
24
+ return util.check_in_values(get_args(type_))
25
+
26
+
27
+ def check_attrs(instancecheck, type_: Type[attrs.AttrsInstance]):
28
+ # this _should_ typecheck according to my understanding of attrs.AttrsInstance but it is not
29
+ fields = attrs.fields(type_) # type: ignore [misc]
30
+ names = tuple(f.name for f in fields)
31
+ types = (f.type for f in fields)
32
+ return util.check_attrs(
33
+ util.simple_isinstance(type_),
34
+ names,
35
+ *map(instancecheck, types),
36
+ )
37
+
38
+
39
+ def check_namedtuple(instancecheck, type_: Type[NamedTuple]):
40
+ field_types = tuple(type_.__annotations__[name] for name in type_._fields)
41
+ return util.check_typed_tuple(util.simple_isinstance(type_), *map(instancecheck, field_types))
42
+
43
+
44
+ def check_mapping(instancecheck, type_: Type):
45
+ args = get_args(type_)
46
+ if len(args) != 2:
47
+ return unknown_type(instancecheck, type_)
48
+ kt, vt = args
49
+ k_check = instancecheck(kt)
50
+ v_check = instancecheck(vt)
51
+ org = cast(Type, get_origin(type_))
52
+ return util.check_mapping(util.simple_isinstance(org), k_check, v_check)
53
+
54
+
55
+ def check_tuple(instancecheck, type_: Type):
56
+ org = cast(Type, get_origin(type_))
57
+ args = get_args(type_)
58
+ return util.check_typed_tuple(util.simple_isinstance(org), *map(instancecheck, args))
59
+
60
+
61
+ def check_variadic_tuple(instancecheck, type_: Type):
62
+ org = cast(Type, get_origin(type_))
63
+ args = get_args(type_)
64
+ v_check = instancecheck(args[0])
65
+ return util.check_collection(util.simple_isinstance(org), v_check)
66
+
67
+
68
+ def check_collection(instancecheck, type_: Type):
69
+ args = get_args(type_)
70
+ if len(args) != 1:
71
+ return unknown_type(instancecheck, type_)
72
+ org = cast(Type, get_origin(type_))
73
+ v_check = instancecheck(args[0])
74
+ return util.check_collection(util.simple_isinstance(org), v_check)
75
+
76
+
77
+ def check_union(instancecheck, type_: Type):
78
+ return util.check_any(*map(instancecheck, get_args(type_)))
79
+
80
+
81
+ def check_any(instancecheck, type_: Type):
82
+ if isinstance(type_, type):
83
+ # simple concrete type
84
+ return util.simple_isinstance(type_)
85
+ return unknown_type(instancecheck, type_)
86
+
87
+
88
+ instancecheck: "TypeRecursion[[], Check]" = TypeRecursion(
89
+ ISINSTANCE_REGISTRY,
90
+ # just use isinstance on builtins - don't want to check bytes/str as collections for instance.
91
+ # also an optimization to put this check first since these are the most common types.
92
+ first=(type_utils.is_builtin_type, simple_isinstance),
93
+ literal=check_literal,
94
+ enum=simple_isinstance,
95
+ attrs=check_attrs,
96
+ namedtuple=check_namedtuple,
97
+ union=check_union,
98
+ mapping=check_mapping,
99
+ collection=check_collection,
100
+ tuple=check_tuple,
101
+ variadic_tuple=check_variadic_tuple,
102
+ otherwise=check_any,
103
+ )
104
+
105
+
106
+ def isinstance(obj: Any, type_: Type) -> bool:
107
+ return instancecheck(type_)(obj)
@@ -0,0 +1,13 @@
1
+ from typing import Any, Type
2
+
3
+ from ..registry import Registry
4
+ from .util import Check
5
+
6
+
7
+ def true(value: Any) -> bool:
8
+ return True
9
+
10
+
11
+ ISINSTANCE_REGISTRY: Registry[Type, Check] = Registry(
12
+ {Any: true, object: true} # type: ignore [dict-item]
13
+ )