thds.attrs-utils 1.6.20251103154246__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- thds/attrs_utils/__init__.py +4 -0
- thds/attrs_utils/cattrs/__init__.py +29 -0
- thds/attrs_utils/cattrs/converter.py +182 -0
- thds/attrs_utils/cattrs/errors.py +46 -0
- thds/attrs_utils/defaults.py +31 -0
- thds/attrs_utils/docs.py +75 -0
- thds/attrs_utils/empty.py +146 -0
- thds/attrs_utils/isinstance/__init__.py +4 -0
- thds/attrs_utils/isinstance/check.py +107 -0
- thds/attrs_utils/isinstance/registry.py +13 -0
- thds/attrs_utils/isinstance/util.py +106 -0
- thds/attrs_utils/jsonschema/__init__.py +11 -0
- thds/attrs_utils/jsonschema/constructors.py +90 -0
- thds/attrs_utils/jsonschema/jsonschema.py +326 -0
- thds/attrs_utils/jsonschema/str_formats.py +109 -0
- thds/attrs_utils/jsonschema/util.py +94 -0
- thds/attrs_utils/py.typed +0 -0
- thds/attrs_utils/random/__init__.py +3 -0
- thds/attrs_utils/random/attrs.py +57 -0
- thds/attrs_utils/random/builtin.py +103 -0
- thds/attrs_utils/random/collection.py +41 -0
- thds/attrs_utils/random/gen.py +134 -0
- thds/attrs_utils/random/optional.py +13 -0
- thds/attrs_utils/random/registry.py +30 -0
- thds/attrs_utils/random/tuple.py +24 -0
- thds/attrs_utils/random/union.py +33 -0
- thds/attrs_utils/random/util.py +55 -0
- thds/attrs_utils/recursion.py +48 -0
- thds/attrs_utils/registry.py +40 -0
- thds/attrs_utils/type_cache.py +110 -0
- thds/attrs_utils/type_recursion.py +168 -0
- thds/attrs_utils/type_utils.py +180 -0
- thds_attrs_utils-1.6.20251103154246.dist-info/METADATA +230 -0
- thds_attrs_utils-1.6.20251103154246.dist-info/RECORD +36 -0
- thds_attrs_utils-1.6.20251103154246.dist-info/WHEEL +5 -0
- thds_attrs_utils-1.6.20251103154246.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
import functools
|
|
2
|
+
import typing as ty
|
|
3
|
+
from collections import ChainMap, defaultdict
|
|
4
|
+
|
|
5
|
+
from ..type_cache import TypeCache
|
|
6
|
+
from .constructors import INTEGER, NUMBER, TYPE
|
|
7
|
+
|
|
8
|
+
ENUM, DEFAULT, ANYOF = (
|
|
9
|
+
"enum",
|
|
10
|
+
"default",
|
|
11
|
+
"anyOf",
|
|
12
|
+
)
|
|
13
|
+
DEFS, REF = "$defs", "$ref"
|
|
14
|
+
TITLE = "title"
|
|
15
|
+
|
|
16
|
+
JSONSchema = ty.Dict[str, ty.Any]
|
|
17
|
+
JSON = ty.Union[int, float, bool, str, None, ty.List["JSON"], ty.Dict[str, "JSON"]]
|
|
18
|
+
ToJSON = ty.Callable[[ty.Any], JSON]
|
|
19
|
+
|
|
20
|
+
REF_TEMPLATE = f"#/{DEFS}/{{}}"
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class JSONSchemaTypeCache(TypeCache[JSONSchema]):
|
|
24
|
+
def to_defs(self):
|
|
25
|
+
return {self.type_names[type_id]: schema for type_id, schema in self.schemas.items()}
|
|
26
|
+
|
|
27
|
+
def to_ref(self, type_: ty.Type):
|
|
28
|
+
return {REF: REF_TEMPLATE.format(self.name_of(type_))}
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def check_cache(func):
|
|
32
|
+
"""Decorator to cause a jsonschema generator to cache full schema definitions and return jsonschema
|
|
33
|
+
refs to the generated schemas. Useful for complex/custom types with names so as not to duplicate
|
|
34
|
+
sub-schemas throughout the main schema"""
|
|
35
|
+
|
|
36
|
+
@functools.wraps(func)
|
|
37
|
+
def new_func(gen_jsonschema, type_: ty.Type, type_cache: JSONSchemaTypeCache, serializer: ToJSON):
|
|
38
|
+
if type_ not in type_cache:
|
|
39
|
+
type_name = type_cache.name_of(type_)
|
|
40
|
+
schema = {TITLE: type_name, **func(gen_jsonschema, type_, type_cache, serializer)}
|
|
41
|
+
type_cache[type_] = schema
|
|
42
|
+
|
|
43
|
+
return type_cache.to_ref(type_)
|
|
44
|
+
|
|
45
|
+
return new_func
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def _flatten_anyof_schemas(schemas: ty.Iterable[JSONSchema]) -> ty.Iterator[JSONSchema]:
|
|
49
|
+
for schema in schemas:
|
|
50
|
+
if ANYOF in schema and len(schema) == 1:
|
|
51
|
+
yield from _flatten_anyof_schemas(schema[ANYOF])
|
|
52
|
+
else:
|
|
53
|
+
yield schema
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def _merge_schemas_anyof(schemas: ty.Iterable[JSONSchema]) -> JSONSchema:
|
|
57
|
+
enum_values = []
|
|
58
|
+
schemas_by_type = defaultdict(list)
|
|
59
|
+
other_schemas = []
|
|
60
|
+
number = "_"
|
|
61
|
+
for schema in _flatten_anyof_schemas(schemas):
|
|
62
|
+
if REF in schema:
|
|
63
|
+
other_schemas.append(schema)
|
|
64
|
+
elif ENUM in schema and len(schema) == 1:
|
|
65
|
+
# enums can simply be combined
|
|
66
|
+
enum_values.extend(schema[ENUM])
|
|
67
|
+
elif TYPE in schema:
|
|
68
|
+
types = [schema[TYPE]] if isinstance(schema[TYPE], str) else schema[TYPE]
|
|
69
|
+
for t in types:
|
|
70
|
+
# integer and number share keywords; combine into the same list
|
|
71
|
+
if t in (INTEGER, NUMBER):
|
|
72
|
+
t = number
|
|
73
|
+
schemas_by_type[t].append(schema)
|
|
74
|
+
else:
|
|
75
|
+
other_schemas.append(schema)
|
|
76
|
+
|
|
77
|
+
# any type with only one schema can be keyword-merged with all other such schemas
|
|
78
|
+
simple = []
|
|
79
|
+
complex = []
|
|
80
|
+
for _type, schemas in schemas_by_type.items():
|
|
81
|
+
if len(schemas) == 1:
|
|
82
|
+
simple.append(schemas[0].copy())
|
|
83
|
+
else:
|
|
84
|
+
# we'll transfer these unchanged
|
|
85
|
+
complex.extend(schemas)
|
|
86
|
+
|
|
87
|
+
simple_types = [schema.pop(TYPE) for schema in simple]
|
|
88
|
+
simple_type = simple_types[0] if len(simple_types) == 1 else simple_types
|
|
89
|
+
multi_type_schema = {TYPE: simple_type, **ChainMap(*simple)} if simple else None
|
|
90
|
+
enum_schema = {ENUM: enum_values} if enum_values else None
|
|
91
|
+
all_schemas = [schema for schema in (enum_schema, multi_type_schema) if schema is not None]
|
|
92
|
+
all_schemas.extend(complex)
|
|
93
|
+
all_schemas.extend(other_schemas)
|
|
94
|
+
return all_schemas[0] if len(all_schemas) == 1 else {ANYOF: all_schemas}
|
|
File without changes
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
from typing import Callable, Dict, Sequence, Type, TypeVar
|
|
2
|
+
|
|
3
|
+
import attrs
|
|
4
|
+
|
|
5
|
+
from ..registry import Registry
|
|
6
|
+
from ..type_utils import is_namedtuple_type
|
|
7
|
+
from .util import Gen, T
|
|
8
|
+
|
|
9
|
+
AT = TypeVar("AT", bound=attrs.AttrsInstance)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class GenRecordByFieldNameRegistry(Registry[Type, Dict[str, Gen]]):
|
|
13
|
+
# this class def only exists because of the mypy error "Type variable is unbound"
|
|
14
|
+
pass
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
CUSTOM_ATTRS_BY_FIELD_REGISTRY = GenRecordByFieldNameRegistry()
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def random_attrs(
|
|
21
|
+
constructor: Callable[..., AT], arg_gens: Sequence[Gen[T]], kwarg_gens: Dict[str, Gen[T]]
|
|
22
|
+
) -> AT:
|
|
23
|
+
return constructor(*(gen() for gen in arg_gens), **{name: gen() for name, gen in kwarg_gens.items()})
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def _register_random_gen_by_field(type_: Type[T], **gens: Gen):
|
|
27
|
+
if attrs.has(type_):
|
|
28
|
+
fields = attrs.fields(type_) # type: ignore
|
|
29
|
+
names = [f.name for f in fields]
|
|
30
|
+
elif is_namedtuple_type(type_):
|
|
31
|
+
names = type_._fields # type: ignore [attr-defined]
|
|
32
|
+
else:
|
|
33
|
+
raise TypeError(f"Don't know how to interpret {type_} as a record type")
|
|
34
|
+
|
|
35
|
+
unknown_names = set(gens).difference(names)
|
|
36
|
+
assert not unknown_names, f"Unknown fields: {unknown_names}"
|
|
37
|
+
|
|
38
|
+
CUSTOM_ATTRS_BY_FIELD_REGISTRY.register(type_, gens)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def register_random_gen_by_field(**gens: Gen):
|
|
42
|
+
"""Register random generators for the fields of a record type by specifying random generators by name
|
|
43
|
+
|
|
44
|
+
Example:
|
|
45
|
+
|
|
46
|
+
from typing import NamedTuple
|
|
47
|
+
|
|
48
|
+
@register_random_gen_by_field(foo=lambda: 42):
|
|
49
|
+
class Bar(NamedTuple):
|
|
50
|
+
foo: int
|
|
51
|
+
"""
|
|
52
|
+
|
|
53
|
+
def decorator(type_: Type[AT]) -> Gen[AT]:
|
|
54
|
+
_register_random_gen_by_field(type_, **gens)
|
|
55
|
+
return type_
|
|
56
|
+
|
|
57
|
+
return decorator
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
import random
|
|
3
|
+
import string
|
|
4
|
+
from functools import partial
|
|
5
|
+
|
|
6
|
+
from .util import Gen
|
|
7
|
+
|
|
8
|
+
MIN_INT = -(2**32)
|
|
9
|
+
MAX_INT = 2**32 - 1
|
|
10
|
+
MIN_FLOAT = -float(MIN_INT)
|
|
11
|
+
MAX_FLOAT = float(MAX_INT)
|
|
12
|
+
MIN_DATE = datetime.date(1970, 1, 1)
|
|
13
|
+
MAX_DATE = datetime.date.today()
|
|
14
|
+
STR_MAX_LEN = 32
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def random_null():
|
|
18
|
+
return None
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def random_int(lo: int = MIN_INT, hi: int = MAX_INT) -> int:
|
|
22
|
+
return random.randint(lo, hi)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def random_int_gen(lo: int = MIN_INT, hi: int = MAX_INT) -> Gen[int]:
|
|
26
|
+
return partial(random.randint, lo, hi)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def random_float(lo: float = MIN_FLOAT, hi: float = MAX_FLOAT) -> float:
|
|
30
|
+
return random.uniform(lo, hi)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def random_float_gen(lo: float = MIN_FLOAT, hi: float = MAX_FLOAT) -> Gen[float]:
|
|
34
|
+
return partial(random.uniform, lo, hi)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
default_str_len_gen = random_int_gen(0, STR_MAX_LEN)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def random_str(len_gen: Gen[int] = default_str_len_gen, chars: str = string.printable) -> str:
|
|
41
|
+
return "".join(random.choices(chars, k=len_gen()))
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def random_str_gen(len_gen: Gen[int] = default_str_len_gen, chars: str = string.printable) -> Gen[str]:
|
|
45
|
+
return partial(random_str, len_gen, chars)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def random_bool(true_rate: float = 0.5) -> bool:
|
|
49
|
+
return random.random() < true_rate
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def random_bool_gen(true_rate: float = 0.5) -> Gen[bool]:
|
|
53
|
+
return partial(random_bool, true_rate)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
default_date_offset_gen = random_int_gen(0, (MAX_DATE - MIN_DATE).days)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def random_date(
|
|
60
|
+
earliest: datetime.date = MIN_DATE,
|
|
61
|
+
offset: Gen[int] = default_date_offset_gen,
|
|
62
|
+
) -> datetime.date:
|
|
63
|
+
return earliest + datetime.timedelta(days=offset())
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def random_date_gen(
|
|
67
|
+
earliest: datetime.date,
|
|
68
|
+
offset: Gen[int] = default_date_offset_gen,
|
|
69
|
+
) -> Gen[datetime.date]:
|
|
70
|
+
return partial(random_date, earliest, offset)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def random_date_gen_from_range(earliest: datetime.date, latest: datetime.date):
|
|
74
|
+
return random_date_gen(earliest, random_int_gen(0, (latest - earliest).days))
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
default_datetime_offset_gen = random_float_gen(0.0, (MAX_DATE - MIN_DATE).total_seconds())
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def random_datetime(
|
|
81
|
+
earliest: datetime.date = MIN_DATE,
|
|
82
|
+
offset: Gen[float] = default_datetime_offset_gen,
|
|
83
|
+
) -> datetime.date:
|
|
84
|
+
return earliest + datetime.timedelta(seconds=offset())
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def random_datetime_gen(
|
|
88
|
+
earliest: datetime.datetime,
|
|
89
|
+
offset: Gen[float] = default_datetime_offset_gen,
|
|
90
|
+
) -> Gen[datetime.date]:
|
|
91
|
+
return partial(random_datetime, earliest, offset)
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def random_datetime_gen_from_range(earliest: datetime.datetime, latest: datetime.datetime):
|
|
95
|
+
return random_datetime_gen(earliest, random_float_gen(0.0, (latest - earliest).total_seconds()))
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def random_bytes(len_gen: Gen[int] = default_str_len_gen):
|
|
99
|
+
return bytes(random.randint(0, 255) for _ in range(len_gen()))
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def random_bytearray(len_gen: Gen[int] = default_str_len_gen):
|
|
103
|
+
return bytearray(random.randint(0, 255) for _ in range(len_gen()))
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import itertools
|
|
2
|
+
from functools import partial
|
|
3
|
+
from typing import Callable, Collection, Iterable, Mapping, Tuple, TypeVar, cast
|
|
4
|
+
|
|
5
|
+
from .builtin import random_int_gen
|
|
6
|
+
from .util import Gen
|
|
7
|
+
|
|
8
|
+
T = TypeVar("T")
|
|
9
|
+
U = TypeVar("U")
|
|
10
|
+
C = TypeVar("C", bound=Collection)
|
|
11
|
+
M = TypeVar("M", bound=Mapping)
|
|
12
|
+
KV = Tuple[T, U]
|
|
13
|
+
|
|
14
|
+
COLLECTION_MIN_LEN = 0
|
|
15
|
+
COLLECTION_MAX_LEN = 32
|
|
16
|
+
|
|
17
|
+
default_len_gen = random_int_gen(COLLECTION_MIN_LEN, COLLECTION_MAX_LEN)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def random_collection(
|
|
21
|
+
constructor: Callable[[Iterable[T]], C],
|
|
22
|
+
value_gen: Gen[Iterable[T]],
|
|
23
|
+
len_gen: Gen[int] = default_len_gen,
|
|
24
|
+
) -> C:
|
|
25
|
+
return constructor(itertools.islice(value_gen(), len_gen()))
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def random_collection_gen(
|
|
29
|
+
constructor: Callable[[Iterable[T]], C],
|
|
30
|
+
value_gen: Gen[Iterable[T]],
|
|
31
|
+
len_gen: Gen[int] = default_len_gen,
|
|
32
|
+
) -> Gen[C]:
|
|
33
|
+
return partial(random_collection, constructor, value_gen, len_gen)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def random_mapping_gen(
|
|
37
|
+
constructor: Callable[[Iterable[KV]], M],
|
|
38
|
+
kv_gen: Gen[Iterable[KV]],
|
|
39
|
+
len_gen: Gen[int] = default_len_gen,
|
|
40
|
+
) -> Gen[M]:
|
|
41
|
+
return cast(Gen[M], partial(random_collection, constructor, kv_gen, len_gen))
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
import collections
|
|
2
|
+
import enum
|
|
3
|
+
from functools import partial
|
|
4
|
+
from typing import DefaultDict, Iterable, Tuple, Type, cast, get_args
|
|
5
|
+
|
|
6
|
+
import attr
|
|
7
|
+
|
|
8
|
+
from .. import recursion, type_recursion, type_utils
|
|
9
|
+
from . import attrs, collection, optional, tuple, union
|
|
10
|
+
from .registry import GEN_REGISTRY
|
|
11
|
+
from .util import Gen, T, U, choice_gen, juxtapose_gen, repeat_gen
|
|
12
|
+
|
|
13
|
+
_UNKNOWN_TYPE = (
|
|
14
|
+
"Don't know how to generate random instances of {!r}; use "
|
|
15
|
+
f"{__name__}.random_gen.register to register a random generator"
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
unknown_type: "recursion.RecF[Type, [], Gen]" = recursion.value_error(_UNKNOWN_TYPE, TypeError)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def gen_literal(random_gen, type_: Type[T]) -> Gen[T]:
|
|
22
|
+
values = cast(Tuple[T], get_args(type_))
|
|
23
|
+
return choice_gen(values)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def gen_enum(random_gen, type_: Type[T]) -> Gen[T]:
|
|
27
|
+
assert issubclass(type_, enum.Enum)
|
|
28
|
+
return choice_gen(list(type_))
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def gen_attrs(random_gen, type_: Type[attr.AttrsInstance]) -> Gen[attr.AttrsInstance]:
|
|
32
|
+
fields = attr.fields(type_) # type: ignore [arg-type,misc]
|
|
33
|
+
kw_only_fields = [f for f in fields if f.kw_only]
|
|
34
|
+
overrides = attrs.CUSTOM_ATTRS_BY_FIELD_REGISTRY.get(type_)
|
|
35
|
+
|
|
36
|
+
def random_gen_(field: attr.Attribute):
|
|
37
|
+
if overrides:
|
|
38
|
+
return overrides.get(field.name) or random_gen(field.type)
|
|
39
|
+
return random_gen(field.type)
|
|
40
|
+
|
|
41
|
+
if kw_only_fields:
|
|
42
|
+
pos_fields = [f for f in fields if not f.kw_only]
|
|
43
|
+
arg_gens = list(map(random_gen_, pos_fields))
|
|
44
|
+
kwarg_gens = {f.name: random_gen_(f) for f in kw_only_fields}
|
|
45
|
+
return partial(attrs.random_attrs, type_, arg_gens, kwarg_gens)
|
|
46
|
+
else:
|
|
47
|
+
field_gens = map(random_gen_, fields)
|
|
48
|
+
return tuple.random_namedtuple_gen(type_, *field_gens)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def gen_namedtuple(random_gen, type_: Type[T]) -> Gen[T]:
|
|
52
|
+
field_names = type_._fields # type: ignore [attr-defined]
|
|
53
|
+
field_types = (type_.__annotations__[name] for name in field_names) # type: ignore [attr-defined]
|
|
54
|
+
overrides = attrs.CUSTOM_ATTRS_BY_FIELD_REGISTRY.get(type_)
|
|
55
|
+
if overrides:
|
|
56
|
+
return tuple.random_namedtuple_gen(
|
|
57
|
+
type_, *(overrides.get(name) or random_gen(t) for name, t in zip(field_types, field_names))
|
|
58
|
+
)
|
|
59
|
+
else:
|
|
60
|
+
return tuple.random_namedtuple_gen(type_, *map(random_gen, field_types))
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def gen_tuple(random_gen, type_: Type[T]) -> Gen[T]:
|
|
64
|
+
args = get_args(type_)
|
|
65
|
+
if not args:
|
|
66
|
+
raise TypeError(_UNKNOWN_TYPE.format(type_))
|
|
67
|
+
return cast(Gen[T], tuple.random_tuple_gen(*map(random_gen, args)))
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def gen_variadic_tuple(random_gen, type_: Type[T]) -> Gen[T]:
|
|
71
|
+
args = get_args(type_)
|
|
72
|
+
cons = type_utils.concrete_constructor(type_)
|
|
73
|
+
v_gen = random_gen(args[0])
|
|
74
|
+
return cast(Gen[T], collection.random_collection_gen(cons, repeat_gen(v_gen)))
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def gen_optional(random_gen, type_: Type[T]) -> Gen[T]:
|
|
78
|
+
# more specialized that the union case just below
|
|
79
|
+
return optional.random_optional_gen(
|
|
80
|
+
random_gen(type_utils.unwrap_optional(type_))
|
|
81
|
+
) # type: ignore [return-value]
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def gen_union(random_gen, type_: Type[T]) -> Gen[T]:
|
|
85
|
+
return union.random_uniform_union_gen(*map(random_gen, get_args(type_)))
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def _construct_defauldict(d: DefaultDict[T, U], kvs: Iterable[Tuple[T, U]]) -> DefaultDict[T, U]:
|
|
89
|
+
for k, v in kvs:
|
|
90
|
+
d[k] = v
|
|
91
|
+
return d
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def gen_mapping(random_gen, type_: Type[collection.M]) -> Gen[collection.M]:
|
|
95
|
+
# more specific that collection case below
|
|
96
|
+
args = get_args(type_)
|
|
97
|
+
if len(args) != 2:
|
|
98
|
+
raise TypeError(_UNKNOWN_TYPE.format(type_))
|
|
99
|
+
kt, vt = args
|
|
100
|
+
v_gen = random_gen(vt)
|
|
101
|
+
kv_gen = juxtapose_gen(random_gen(kt), v_gen)
|
|
102
|
+
|
|
103
|
+
cons = (
|
|
104
|
+
partial(_construct_defauldict, collections.defaultdict(v_gen))
|
|
105
|
+
if type_utils.get_origin(type_) is type_utils.get_origin(DefaultDict)
|
|
106
|
+
else type_utils.concrete_constructor(type_) # type: ignore [arg-type]
|
|
107
|
+
)
|
|
108
|
+
return collection.random_mapping_gen(cons, repeat_gen(kv_gen)) # type: ignore [arg-type]
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def gen_collection(random_gen, type_: Type[collection.C]) -> Gen[collection.C]:
|
|
112
|
+
# most generic collection case
|
|
113
|
+
args = get_args(type_)
|
|
114
|
+
if len(args) != 1:
|
|
115
|
+
raise TypeError(_UNKNOWN_TYPE.format(type_))
|
|
116
|
+
cons = type_utils.concrete_constructor(type_)
|
|
117
|
+
v_gen = random_gen(args[0])
|
|
118
|
+
return cast(Gen[collection.C], collection.random_collection_gen(cons, repeat_gen(v_gen)))
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
random_gen: "type_recursion.ConstructorFactory[[]]" = type_recursion.ConstructorFactory(
|
|
122
|
+
GEN_REGISTRY,
|
|
123
|
+
attrs=gen_attrs,
|
|
124
|
+
namedtuple=gen_namedtuple,
|
|
125
|
+
literal=gen_literal,
|
|
126
|
+
enum=gen_enum,
|
|
127
|
+
optional=gen_optional,
|
|
128
|
+
union=gen_union,
|
|
129
|
+
tuple=gen_tuple,
|
|
130
|
+
variadic_tuple=gen_variadic_tuple,
|
|
131
|
+
mapping=gen_mapping,
|
|
132
|
+
collection=gen_collection,
|
|
133
|
+
otherwise=unknown_type,
|
|
134
|
+
)
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
from functools import partial
|
|
2
|
+
from typing import Optional
|
|
3
|
+
|
|
4
|
+
from .builtin import random_bool
|
|
5
|
+
from .util import Gen, T
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def random_optional(gen: Gen[T], nonnull: Gen[bool] = random_bool) -> Optional[T]:
|
|
9
|
+
return gen() if nonnull() else None
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def random_optional_gen(gen: Gen[T], nonnull: Gen[bool] = random_bool) -> Gen[Optional[T]]:
|
|
13
|
+
return partial(random_optional, gen, nonnull)
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
from typing import Type
|
|
3
|
+
|
|
4
|
+
from ..registry import Registry
|
|
5
|
+
from . import builtin
|
|
6
|
+
from .util import Gen, T
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class GenTypeRegistry(Registry[Type[T], Gen[T]]):
|
|
10
|
+
# This class def only exists because of the mypy error "Type variable is unbound".
|
|
11
|
+
# We use the typevar here however to assert that the registered generators generate instances of the
|
|
12
|
+
# types they're registered to
|
|
13
|
+
pass
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
# we define the registry here before passing it to the recursion so that the recursion can reference it
|
|
17
|
+
# after it's already registered implementations in the various type-specific sibling modules to this one
|
|
18
|
+
GEN_REGISTRY: GenTypeRegistry = GenTypeRegistry(
|
|
19
|
+
[
|
|
20
|
+
(type(None), builtin.random_null),
|
|
21
|
+
(int, builtin.random_int),
|
|
22
|
+
(bool, builtin.random_bool),
|
|
23
|
+
(float, builtin.random_float),
|
|
24
|
+
(str, builtin.random_str),
|
|
25
|
+
(bytes, builtin.random_bytes),
|
|
26
|
+
(bytearray, builtin.random_bytearray),
|
|
27
|
+
(datetime.date, builtin.random_date),
|
|
28
|
+
(datetime.datetime, builtin.random_datetime),
|
|
29
|
+
]
|
|
30
|
+
)
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
from functools import partial
|
|
2
|
+
from typing import Callable, Sequence, Tuple, TypeVar
|
|
3
|
+
|
|
4
|
+
from .util import Gen, T
|
|
5
|
+
|
|
6
|
+
N = TypeVar("N")
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def random_tuple(gens: Sequence[Gen[T]]) -> Tuple[T, ...]:
|
|
10
|
+
# can't type this very well for heterogeneous tuples without going a little crazy.
|
|
11
|
+
# most of the time it's being generated from types in an automated way so it's not a worry.
|
|
12
|
+
return tuple(gen() for gen in gens)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def random_tuple_gen(*gens: Gen[T]) -> Gen[Tuple[T, ...]]:
|
|
16
|
+
return partial(random_tuple, gens)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def random_namedtuple(constructor: Callable[..., N], gens: Sequence[Gen[T]]) -> N:
|
|
20
|
+
return constructor(*random_tuple(gens))
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def random_namedtuple_gen(constructor: Callable[..., N], *gens: Gen[T]) -> Gen[N]:
|
|
24
|
+
return partial(random_namedtuple, constructor, gens)
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import random
|
|
2
|
+
from functools import partial
|
|
3
|
+
from itertools import accumulate
|
|
4
|
+
from operator import add
|
|
5
|
+
from typing import Sequence, Tuple
|
|
6
|
+
|
|
7
|
+
from .util import Gen, T
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def random_weighted_union(gens: Sequence[Gen[T]], cum_weights: Sequence[float]):
|
|
11
|
+
"""optimized to use the cum_weights arg of random.choices"""
|
|
12
|
+
gen = random.choices(gens, cum_weights=cum_weights, k=1)[0]
|
|
13
|
+
return gen()
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def random_weighted_union_gen(*weighted_gens: Tuple[Gen[T], float]) -> Gen[T]:
|
|
17
|
+
# can't type this very well for heterogeneous tuples without going a little crazy.
|
|
18
|
+
# most of the time it's being generated from types in an automated way so it's not a worry.
|
|
19
|
+
gens, weights = zip(*weighted_gens)
|
|
20
|
+
total = sum(weights)
|
|
21
|
+
cum_weights = tuple(w / total for w in accumulate(weights, add))
|
|
22
|
+
return partial(random_weighted_union, gens, cum_weights)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def random_uniform_union(gens: Sequence[Gen[T]]) -> T:
|
|
26
|
+
gen = random.choice(gens)
|
|
27
|
+
return gen()
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def random_uniform_union_gen(*gens: Gen[T]) -> Gen[T]:
|
|
31
|
+
# can't type this very well for heterogeneous tuples without going a little crazy.
|
|
32
|
+
# most of the time it's being generated from types in an automated way so it's not a worry.
|
|
33
|
+
return partial(random_uniform_union, gens)
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import random
|
|
2
|
+
from functools import partial
|
|
3
|
+
from itertools import accumulate
|
|
4
|
+
from operator import add
|
|
5
|
+
from typing import Callable, Iterator, Mapping, Optional, Sequence, Tuple, TypeVar, Union
|
|
6
|
+
|
|
7
|
+
T = TypeVar("T")
|
|
8
|
+
U = TypeVar("U")
|
|
9
|
+
|
|
10
|
+
Gen = Callable[[], T]
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def repeat(gen: Gen[T]) -> Iterator[T]:
|
|
14
|
+
while True:
|
|
15
|
+
yield gen()
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def repeat_gen(gen: Gen[T]) -> Gen[Iterator[T]]:
|
|
19
|
+
return partial(repeat, gen)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def juxtapose(gen1: Gen[T], gen2: Gen[U]) -> Tuple[T, U]:
|
|
23
|
+
# e.g. for key-value pairs
|
|
24
|
+
return gen1(), gen2()
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def juxtapose_gen(gen1: Gen[T], gen2: Gen[U]) -> Gen[Tuple[T, U]]:
|
|
28
|
+
return partial(juxtapose, gen1, gen2)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def either(gen1: Gen[T], gen2: Gen[U], choose1: Gen[bool]) -> Union[T, U]:
|
|
32
|
+
return gen1() if choose1() else gen2()
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def either_gen(gen1: Gen[T], gen2: Gen[U], choose1: Gen[bool]) -> Gen[Union[T, U]]:
|
|
36
|
+
return partial(either, gen1, gen2, choose1)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def choice(values: Sequence[T], cum_weights: Optional[Sequence[float]] = None) -> T:
|
|
40
|
+
"""optimized to use the cum_weights arg of random.choices"""
|
|
41
|
+
return (
|
|
42
|
+
random.choice(values)
|
|
43
|
+
if cum_weights is None
|
|
44
|
+
else random.choices(values, cum_weights=cum_weights, k=1)[0]
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def choice_gen(values: Union[Sequence[T], Mapping[T, float]]) -> Gen[T]:
|
|
49
|
+
if isinstance(values, Mapping):
|
|
50
|
+
values_, weights = zip(*values.items())
|
|
51
|
+
total = sum(weights)
|
|
52
|
+
cum_weights = tuple(w / total for w in accumulate(weights, add))
|
|
53
|
+
return partial(choice, values_, cum_weights)
|
|
54
|
+
else:
|
|
55
|
+
return partial(choice, values)
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
from functools import partial
|
|
2
|
+
from typing import Any, Callable, Generic, Sequence, Tuple, Type, TypeVar
|
|
3
|
+
|
|
4
|
+
from typing_extensions import Concatenate, ParamSpec
|
|
5
|
+
|
|
6
|
+
T = TypeVar("T", contravariant=True)
|
|
7
|
+
U = TypeVar("U", covariant=True)
|
|
8
|
+
Params = ParamSpec("Params")
|
|
9
|
+
|
|
10
|
+
Predicate = Callable[[T], bool]
|
|
11
|
+
F = Callable[Concatenate[T, Params], U]
|
|
12
|
+
RecF = Callable[Concatenate[F, T, Params], U]
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class StructuredRecursion(Generic[T, Params, U]):
|
|
16
|
+
def __init__(
|
|
17
|
+
self,
|
|
18
|
+
guarded_recursions: Sequence[Tuple[Predicate[T], RecF[T, Params, U]]],
|
|
19
|
+
fallback: RecF[T, Params, U],
|
|
20
|
+
):
|
|
21
|
+
self.guarded_recursions = list(guarded_recursions)
|
|
22
|
+
self.fallback = fallback
|
|
23
|
+
|
|
24
|
+
def __call__(self, obj: T, *args: Params.args, **kwargs: Params.kwargs) -> U:
|
|
25
|
+
for predicate, recurse in self.guarded_recursions:
|
|
26
|
+
if predicate(obj):
|
|
27
|
+
return recurse(self, obj, *args, **kwargs)
|
|
28
|
+
else:
|
|
29
|
+
return self.fallback(self, obj, *args, **kwargs)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def _value_error(
|
|
33
|
+
msg: str,
|
|
34
|
+
exc_type: Type[Exception],
|
|
35
|
+
# placeholder for the recursive function in case you wish to specify this explictly as one of the
|
|
36
|
+
# recursions; type doesn't matter
|
|
37
|
+
f: Any,
|
|
38
|
+
obj: T,
|
|
39
|
+
*args,
|
|
40
|
+
**kwargs,
|
|
41
|
+
):
|
|
42
|
+
raise exc_type(msg.format(obj))
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def value_error(msg: str, exc_type: Type[Exception] = ValueError) -> RecF[T, Params, U]:
|
|
46
|
+
"""Helper to be passed as the `fallback` of a `StructuredRecursion` in case there is no natural
|
|
47
|
+
fallback/default implementation and an input fails to satisfy any of the predicates"""
|
|
48
|
+
return partial(_value_error, msg, exc_type)
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
from functools import partial, wraps
|
|
2
|
+
from typing import Callable, Dict, Type, TypeVar, cast, overload
|
|
3
|
+
|
|
4
|
+
T = TypeVar("T")
|
|
5
|
+
U = TypeVar("U")
|
|
6
|
+
_D = TypeVar("_D")
|
|
7
|
+
_Type = TypeVar("_Type", bound=Type)
|
|
8
|
+
|
|
9
|
+
_MISSING = object()
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class Registry(Dict[T, U]):
|
|
13
|
+
@overload
|
|
14
|
+
def register(self, key: T) -> Callable[[U], U]: ...
|
|
15
|
+
|
|
16
|
+
@overload
|
|
17
|
+
def register(self, key: T, value: U) -> U: ...
|
|
18
|
+
|
|
19
|
+
def register(self, key: T, value=_MISSING):
|
|
20
|
+
if value is _MISSING:
|
|
21
|
+
|
|
22
|
+
def decorator(value: U) -> U:
|
|
23
|
+
self[key] = value
|
|
24
|
+
return value
|
|
25
|
+
|
|
26
|
+
return decorator
|
|
27
|
+
else:
|
|
28
|
+
self[key] = value
|
|
29
|
+
return value
|
|
30
|
+
|
|
31
|
+
def cache(self, func: Callable[[T], U]) -> Callable[[T], U]:
|
|
32
|
+
cached = partial(_check_cache, self, func)
|
|
33
|
+
return wraps(func)(cached) # type: ignore [return-value]
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def _check_cache(cache: Dict[T, U], func: Callable[[T], U], key: T) -> U:
|
|
37
|
+
value = cache.get(key, cast(U, _MISSING))
|
|
38
|
+
if value is _MISSING:
|
|
39
|
+
value = cache[key] = func(key)
|
|
40
|
+
return value
|