thds.attrs-utils 1.6.20251103154246__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- thds/attrs_utils/__init__.py +4 -0
- thds/attrs_utils/cattrs/__init__.py +29 -0
- thds/attrs_utils/cattrs/converter.py +182 -0
- thds/attrs_utils/cattrs/errors.py +46 -0
- thds/attrs_utils/defaults.py +31 -0
- thds/attrs_utils/docs.py +75 -0
- thds/attrs_utils/empty.py +146 -0
- thds/attrs_utils/isinstance/__init__.py +4 -0
- thds/attrs_utils/isinstance/check.py +107 -0
- thds/attrs_utils/isinstance/registry.py +13 -0
- thds/attrs_utils/isinstance/util.py +106 -0
- thds/attrs_utils/jsonschema/__init__.py +11 -0
- thds/attrs_utils/jsonschema/constructors.py +90 -0
- thds/attrs_utils/jsonschema/jsonschema.py +326 -0
- thds/attrs_utils/jsonschema/str_formats.py +109 -0
- thds/attrs_utils/jsonschema/util.py +94 -0
- thds/attrs_utils/py.typed +0 -0
- thds/attrs_utils/random/__init__.py +3 -0
- thds/attrs_utils/random/attrs.py +57 -0
- thds/attrs_utils/random/builtin.py +103 -0
- thds/attrs_utils/random/collection.py +41 -0
- thds/attrs_utils/random/gen.py +134 -0
- thds/attrs_utils/random/optional.py +13 -0
- thds/attrs_utils/random/registry.py +30 -0
- thds/attrs_utils/random/tuple.py +24 -0
- thds/attrs_utils/random/union.py +33 -0
- thds/attrs_utils/random/util.py +55 -0
- thds/attrs_utils/recursion.py +48 -0
- thds/attrs_utils/registry.py +40 -0
- thds/attrs_utils/type_cache.py +110 -0
- thds/attrs_utils/type_recursion.py +168 -0
- thds/attrs_utils/type_utils.py +180 -0
- thds_attrs_utils-1.6.20251103154246.dist-info/METADATA +230 -0
- thds_attrs_utils-1.6.20251103154246.dist-info/RECORD +36 -0
- thds_attrs_utils-1.6.20251103154246.dist-info/WHEEL +5 -0
- thds_attrs_utils-1.6.20251103154246.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
from functools import lru_cache, partial, reduce
|
|
2
|
+
from operator import contains
|
|
3
|
+
from typing import Any, Callable, Collection, Iterable, Mapping, Optional, Sequence, Type, TypeVar
|
|
4
|
+
|
|
5
|
+
T = TypeVar("T")
|
|
6
|
+
U = TypeVar("U")
|
|
7
|
+
|
|
8
|
+
Check = Callable[[Any], bool]
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def check_in_values(values: Collection) -> Check:
|
|
12
|
+
return partial(contains, frozenset(values))
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _isinstance(type_: Type, value: Any) -> bool:
|
|
16
|
+
# flipped isinstance for use with partial
|
|
17
|
+
return isinstance(value, type_)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def simple_isinstance(type_: Type) -> Check:
|
|
21
|
+
return partial(_isinstance, type_)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def all_values(check: Check, iter_: Optional[Callable[[Any], Iterable]], values: Iterable) -> bool:
|
|
25
|
+
return all(map(check, values if iter_ is None else iter_(values)))
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@lru_cache(None)
|
|
29
|
+
def check_all_values(check: Check, iter_: Optional[Callable[[Any], Iterable]] = None) -> Check:
|
|
30
|
+
return partial(all_values, check, iter_)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def both(check1: Check, check2: Check, value: Any) -> bool:
|
|
34
|
+
return check1(value) and check2(value)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@lru_cache(None)
|
|
38
|
+
def check_both(check1: Check, check2: Check) -> Check:
|
|
39
|
+
return partial(both, check1, check2)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
@lru_cache(None)
|
|
43
|
+
def check_all(*checks: Check) -> Check:
|
|
44
|
+
return reduce(check_both, checks)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def either(check1: Check, check2: Check, value: Any) -> bool:
|
|
48
|
+
return check1(value) or check2(value)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
@lru_cache(None)
|
|
52
|
+
def check_either(check1: Check, check2: Check) -> Check:
|
|
53
|
+
return partial(either, check1, check2)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
@lru_cache(None)
|
|
57
|
+
def check_any(*checks: Check) -> Check:
|
|
58
|
+
return reduce(check_either, checks)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def tuple_(checks: Sequence[Check], values: Sequence) -> bool:
|
|
62
|
+
return (len(checks) == len(values)) and all(check(v) for check, v in zip(checks, values))
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def typed_tuple(
|
|
66
|
+
instancecheck: Check,
|
|
67
|
+
checks: Sequence[Check],
|
|
68
|
+
iter_: Optional[Callable[[Any], Iterable]],
|
|
69
|
+
values: Sequence,
|
|
70
|
+
) -> bool:
|
|
71
|
+
return instancecheck(values) and tuple_(checks, values if iter_ is None else tuple(iter_(values)))
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
@lru_cache(None)
|
|
75
|
+
def check_tuple(*checks: Check) -> Check:
|
|
76
|
+
return partial(tuple_, checks)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
@lru_cache(None)
|
|
80
|
+
def check_typed_tuple(
|
|
81
|
+
instancecheck: Check, *checks: Check, iter_: Optional[Callable[[Any], Iterable]] = None
|
|
82
|
+
) -> Check:
|
|
83
|
+
return partial(typed_tuple, instancecheck, checks, iter_)
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def _attrs(names: Iterable[str], obj) -> Iterable:
|
|
87
|
+
return (getattr(obj, name) for name in names)
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
@lru_cache(None)
|
|
91
|
+
def check_attrs(instancecheck: Check, names: Sequence[str], *checks: Check) -> Check:
|
|
92
|
+
return check_typed_tuple(instancecheck, *checks, iter_=partial(_attrs, names))
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def _items(mapping: Mapping):
|
|
96
|
+
return mapping.items()
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
@lru_cache(None)
|
|
100
|
+
def check_mapping(instancecheck: Check, k_check: Check, v_check: Check) -> Check:
|
|
101
|
+
return check_both(instancecheck, check_all_values(check_tuple(k_check, v_check), _items))
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
@lru_cache(None)
|
|
105
|
+
def check_collection(instancecheck: Check, v_check: Check) -> Check:
|
|
106
|
+
return check_both(instancecheck, check_all_values(v_check))
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
import typing as ty
|
|
2
|
+
|
|
3
|
+
OBJECT, STRING, INTEGER, NUMBER, ARRAY, BOOLEAN, NULL = (
|
|
4
|
+
"object",
|
|
5
|
+
"string",
|
|
6
|
+
"integer",
|
|
7
|
+
"number",
|
|
8
|
+
"array",
|
|
9
|
+
"boolean",
|
|
10
|
+
"null",
|
|
11
|
+
)
|
|
12
|
+
TYPE = "type"
|
|
13
|
+
TITLE, DESCRIPTION = "title", "description"
|
|
14
|
+
|
|
15
|
+
_missing = object()
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
##################################
|
|
19
|
+
# Jsonschema object constructors #
|
|
20
|
+
##################################
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def null():
|
|
24
|
+
return {TYPE: NULL}
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def boolean():
|
|
28
|
+
return {TYPE: BOOLEAN}
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def string(minLength=_missing, maxLength=_missing, pattern=_missing, format=_missing):
|
|
32
|
+
return _supplied_kwargs(locals(), {TYPE: STRING})
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def integer(
|
|
36
|
+
minimum=_missing,
|
|
37
|
+
maximum=_missing,
|
|
38
|
+
exclusiveMinimum=_missing,
|
|
39
|
+
exclusiveMaximum=_missing,
|
|
40
|
+
):
|
|
41
|
+
return _numeric(type=INTEGER, **locals())
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def number(
|
|
45
|
+
minimum=_missing,
|
|
46
|
+
maximum=_missing,
|
|
47
|
+
exclusiveMinimum=_missing,
|
|
48
|
+
exclusiveMaximum=_missing,
|
|
49
|
+
):
|
|
50
|
+
return _numeric(type=NUMBER, **locals())
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def array(
|
|
54
|
+
minItems=_missing,
|
|
55
|
+
maxItems=_missing,
|
|
56
|
+
uniqueItems=_missing,
|
|
57
|
+
prefixItems=_missing,
|
|
58
|
+
items=_missing,
|
|
59
|
+
):
|
|
60
|
+
return _supplied_kwargs(locals(), {TYPE: ARRAY})
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def object_(
|
|
64
|
+
properties=_missing,
|
|
65
|
+
patternProperties=_missing,
|
|
66
|
+
required=_missing,
|
|
67
|
+
minProperties=_missing,
|
|
68
|
+
maxProperties=_missing,
|
|
69
|
+
additionalProperties=_missing,
|
|
70
|
+
):
|
|
71
|
+
return _supplied_kwargs(locals(), {TYPE: OBJECT})
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def _numeric(
|
|
75
|
+
type: str,
|
|
76
|
+
minimum=_missing,
|
|
77
|
+
maximum=_missing,
|
|
78
|
+
exclusiveMinimum=_missing,
|
|
79
|
+
exclusiveMaximum=_missing,
|
|
80
|
+
):
|
|
81
|
+
return _supplied_kwargs(locals())
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def _supplied_kwargs(kwargs, namespace: ty.Optional[ty.Dict[str, ty.Any]] = None):
|
|
85
|
+
supplied_kwargs = ((k, v) for k, v in kwargs.items() if v is not _missing)
|
|
86
|
+
if namespace is None:
|
|
87
|
+
return dict(supplied_kwargs)
|
|
88
|
+
else:
|
|
89
|
+
namespace.update(supplied_kwargs)
|
|
90
|
+
return namespace
|
|
@@ -0,0 +1,326 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
import enum
|
|
3
|
+
import importlib
|
|
4
|
+
import uuid
|
|
5
|
+
from types import ModuleType
|
|
6
|
+
from typing import Any, Callable, Collection, Dict, List, Mapping, Optional, Type, Union
|
|
7
|
+
from warnings import warn
|
|
8
|
+
|
|
9
|
+
import attr
|
|
10
|
+
from typing_inspect import get_args, get_origin, is_optional_type
|
|
11
|
+
|
|
12
|
+
from thds.core import scope
|
|
13
|
+
from thds.core.stack_context import StackContext
|
|
14
|
+
|
|
15
|
+
from ..cattrs import DEFAULT_JSON_CONVERTER
|
|
16
|
+
from ..recursion import RecF, value_error
|
|
17
|
+
from ..registry import Registry
|
|
18
|
+
from ..type_recursion import TypeRecursion
|
|
19
|
+
from ..type_utils import UNIQUE_COLLECTION_TYPES, newtype_base, typename
|
|
20
|
+
from .constructors import array, boolean, integer, null, number, object_, string
|
|
21
|
+
from .str_formats import DEFAULT_FORMAT_CHECKS, string_pattern_for
|
|
22
|
+
from .util import (
|
|
23
|
+
DEFS,
|
|
24
|
+
JSON,
|
|
25
|
+
TITLE,
|
|
26
|
+
JSONSchema,
|
|
27
|
+
JSONSchemaTypeCache,
|
|
28
|
+
ToJSON,
|
|
29
|
+
_merge_schemas_anyof,
|
|
30
|
+
check_cache,
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
ENUM = "enum"
|
|
34
|
+
DEFAULT = "default"
|
|
35
|
+
SCHEMA, BASE_URI = "$schema", "$id"
|
|
36
|
+
DESCRIPTION = "description"
|
|
37
|
+
DATE, DATETIME, TIME, UUID = "date", "date-time", "time", "uuid"
|
|
38
|
+
DRAFT_7_SCHEMA_URL = "http://json-schema.org/draft-07/schema"
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
### Recursively available context
|
|
42
|
+
OptionalNullDefaults = StackContext("OptionalNullDefaults", True)
|
|
43
|
+
# arguably this is the wrong default; consult with Hawthorn and Spencer about changing it
|
|
44
|
+
AllAttributesRequired = StackContext("AllAttributesRequired", False)
|
|
45
|
+
# for creating an output schema; when serializing attrs objects, all
|
|
46
|
+
# attributes are guaranteed to be present, and the only way to express
|
|
47
|
+
# this with JSONSchema is to call them 'required'.
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
#################
|
|
51
|
+
# Main function #
|
|
52
|
+
#################
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
@scope.bound
|
|
56
|
+
def to_jsonschema(
|
|
57
|
+
type_: Type,
|
|
58
|
+
to_json: ToJSON = DEFAULT_JSON_CONVERTER.unstructure,
|
|
59
|
+
as_ref: bool = False,
|
|
60
|
+
modules: Optional[
|
|
61
|
+
Union[
|
|
62
|
+
Collection[ModuleType],
|
|
63
|
+
Mapping[str, Union[ModuleType, Collection[ModuleType]]],
|
|
64
|
+
]
|
|
65
|
+
] = None,
|
|
66
|
+
base_uri: Optional[str] = None,
|
|
67
|
+
optional_null_defaults: bool = True,
|
|
68
|
+
all_attributes_required: bool = False,
|
|
69
|
+
) -> JSONSchema:
|
|
70
|
+
"""Generate a jsonschema for a python type. Handles most useful builtin types and `attrs` classes.
|
|
71
|
+
|
|
72
|
+
:param type_: the type to generate a schema for. Can be an `attrs` class, builtin python type,
|
|
73
|
+
annotation type from `typing` or `typing_extensions`.
|
|
74
|
+
:param to_json: callable taking an arbitrary python object and returning a json-serializable python
|
|
75
|
+
object. The default is a `cattrs` converter's 'unstructure' method. This is used for serializing
|
|
76
|
+
literals in the form of default values and arguments to `typing_extensions.Literal`.
|
|
77
|
+
:param as_ref: if True, put the explicit schema for `type_` in the "$defs" section of the schema, and
|
|
78
|
+
reference it at the top level using a "$ref". If false, generated no "$def" section for `type_`,
|
|
79
|
+
instead including all of its fields at the top level of the schema.
|
|
80
|
+
:param modules: collection of module objects or mapping of `str` to module. These are used for
|
|
81
|
+
looking up names of types that would otherwise be anonymous, such as `typing.NewType` and
|
|
82
|
+
`typing_extensions.Literal` instances. Each such nameable type gets a section in the "$defs"
|
|
83
|
+
section of the schema. If not supplied, the module of `type_` is dynamically imported and its name
|
|
84
|
+
used to qualify any types found there. When a mapping is passed for `modules`, the keys of the
|
|
85
|
+
mapping are used to qualify the names of types found in the associated module. When a collection of
|
|
86
|
+
modules is passed, type names are qualified by the names of their respective modules.
|
|
87
|
+
:param base_uri: sets the base URI of the schema (manifests as the `$id` keyword)
|
|
88
|
+
:param optional_null_defaults: If an attrs attribute is Optional, then the JSON Schema default value is null.
|
|
89
|
+
:param all_attributes_required: For output schemas; all data serialized from attrs classes
|
|
90
|
+
is guaranteed to have all object keys present recursively.
|
|
91
|
+
:return: a json-serializable python dict representing the generated jsonschema
|
|
92
|
+
"""
|
|
93
|
+
scope.enter(AllAttributesRequired.set(all_attributes_required))
|
|
94
|
+
scope.enter(OptionalNullDefaults.set(optional_null_defaults))
|
|
95
|
+
named_modules: Mapping[str, Union[ModuleType, Collection[ModuleType]]]
|
|
96
|
+
if modules is None:
|
|
97
|
+
module_name = getattr(type_, "__module__", None)
|
|
98
|
+
if module_name is not None:
|
|
99
|
+
named_modules = {module_name: importlib.import_module(module_name)}
|
|
100
|
+
else:
|
|
101
|
+
named_modules = {}
|
|
102
|
+
elif not isinstance(modules, Mapping):
|
|
103
|
+
named_modules = {module.__name__: module for module in modules}
|
|
104
|
+
else:
|
|
105
|
+
named_modules = modules
|
|
106
|
+
|
|
107
|
+
type_cache = JSONSchemaTypeCache(**named_modules)
|
|
108
|
+
ref = gen_jsonschema(type_, type_cache, to_json)
|
|
109
|
+
schema = {SCHEMA: DRAFT_7_SCHEMA_URL}
|
|
110
|
+
if base_uri:
|
|
111
|
+
schema[BASE_URI] = base_uri
|
|
112
|
+
if as_ref:
|
|
113
|
+
schema.update(ref)
|
|
114
|
+
else:
|
|
115
|
+
schema[TITLE] = type_cache.name_of(type_)
|
|
116
|
+
schema.update(type_cache.pop(type_))
|
|
117
|
+
if type_cache:
|
|
118
|
+
schema[DEFS] = type_cache.to_defs()
|
|
119
|
+
|
|
120
|
+
return schema
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def jsonschema_validator(
|
|
124
|
+
schema: JSONSchema,
|
|
125
|
+
use_default: bool = True,
|
|
126
|
+
formats: Optional[Mapping[str, Callable[[str], bool]]] = None,
|
|
127
|
+
) -> Callable[[JSON], JSON]:
|
|
128
|
+
"""Convenience method for constructing a validator for a given jsonschema.
|
|
129
|
+
|
|
130
|
+
:param schema: the jsonschema to validate against
|
|
131
|
+
:param use_default: if True (default), the validator returns instances with default values filled in.
|
|
132
|
+
See the corresponding argument of `fastjsonschema.compile` for more information
|
|
133
|
+
:param formats: mapping of format name to predicate indicating whether a given string satisfies the
|
|
134
|
+
format specification. If not passed, a default, extensible set of format predicates is used,
|
|
135
|
+
encompassing some common types. This set may be extended by using the `register_format_check`
|
|
136
|
+
decorator.
|
|
137
|
+
:return: a callable accepting json-serializable python objects and returning them optionally with
|
|
138
|
+
defaults populated. The callable raises an exception if the input doesn't match the jsonschema.
|
|
139
|
+
See `fastjsonschema.compile` for more information
|
|
140
|
+
"""
|
|
141
|
+
try:
|
|
142
|
+
import fastjsonschema
|
|
143
|
+
except ImportError:
|
|
144
|
+
raise ImportError(
|
|
145
|
+
f"fastjsonschema is required to use {jsonschema_validator.__name__} but is not installed; "
|
|
146
|
+
"include the 'jsonschema' extra of this library to install a compatible version"
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
if formats is None:
|
|
150
|
+
formats = DEFAULT_FORMAT_CHECKS.copy()
|
|
151
|
+
|
|
152
|
+
return fastjsonschema.compile(schema, formats=formats, use_default=use_default)
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
#################################
|
|
156
|
+
# Jsonschema generation by type #
|
|
157
|
+
#################################
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
@check_cache
|
|
161
|
+
def gen_jsonschema_newtype(
|
|
162
|
+
gen_jsonschema, type_: Type, type_cache: JSONSchemaTypeCache, serializer: ToJSON
|
|
163
|
+
):
|
|
164
|
+
# we don't use the default implementation from type_recursion because we want to allow the naming
|
|
165
|
+
# cache to pick up the new name to make the intended semantics of the resulting schema clearer
|
|
166
|
+
return gen_jsonschema(newtype_base(type_), type_cache, serializer)
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
@check_cache
|
|
170
|
+
def gen_jsonschema_attrs(
|
|
171
|
+
gen_jsonschema,
|
|
172
|
+
type_: Type,
|
|
173
|
+
type_cache: JSONSchemaTypeCache,
|
|
174
|
+
serializer: ToJSON,
|
|
175
|
+
):
|
|
176
|
+
"""Note that there are two recursively available input parameters that can only be set via StackContext.
|
|
177
|
+
|
|
178
|
+
See above for notes on OptionalNullDefaults and AllAttributesRequired.
|
|
179
|
+
"""
|
|
180
|
+
optional_null_defaults = OptionalNullDefaults()
|
|
181
|
+
all_attributes_required = AllAttributesRequired()
|
|
182
|
+
|
|
183
|
+
type_ = attr.resolve_types(type_)
|
|
184
|
+
attrs = attr.fields(type_)
|
|
185
|
+
properties: Dict[str, Any] = {}
|
|
186
|
+
required: List[str] = []
|
|
187
|
+
|
|
188
|
+
for at in attrs:
|
|
189
|
+
# case where we should add a null default value
|
|
190
|
+
null_default = (
|
|
191
|
+
optional_null_defaults and is_optional_type(at.type) and at.default is attr.NOTHING
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
if at.type is None:
|
|
195
|
+
warn(
|
|
196
|
+
f"No type annotation for field {at.name} of attrs class {typename(type_)}; using empty schema"
|
|
197
|
+
)
|
|
198
|
+
attr_schema = {}
|
|
199
|
+
else:
|
|
200
|
+
attr_schema = gen_jsonschema(at.type, type_cache, serializer)
|
|
201
|
+
if all_attributes_required or (at.default is attr.NOTHING and not null_default):
|
|
202
|
+
required.append(at.name)
|
|
203
|
+
else:
|
|
204
|
+
if isinstance(at.default, attr.Factory): # type: ignore
|
|
205
|
+
if at.default.takes_self: # type: ignore
|
|
206
|
+
warn(
|
|
207
|
+
"Can't define a default value for an attrs field with a factory where "
|
|
208
|
+
f"takes_self=True; occurred for field {at.name!r} on class {type_!r}"
|
|
209
|
+
)
|
|
210
|
+
else:
|
|
211
|
+
attr_schema[DEFAULT] = serializer(at.default.factory()) # type: ignore
|
|
212
|
+
else:
|
|
213
|
+
default = None if null_default else at.default
|
|
214
|
+
attr_schema[DEFAULT] = serializer(default)
|
|
215
|
+
|
|
216
|
+
properties[at.name] = attr_schema # type: ignore
|
|
217
|
+
|
|
218
|
+
return object_(properties=properties, required=required, additionalProperties=False)
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
@check_cache
|
|
222
|
+
def gen_jsonschema_literal(
|
|
223
|
+
gen_jsonschema, type_: Type, type_cache: JSONSchemaTypeCache, serializer: ToJSON
|
|
224
|
+
):
|
|
225
|
+
return _gen_jsonschema_enum(get_args(type_), type_, serializer, "literal")
|
|
226
|
+
|
|
227
|
+
|
|
228
|
+
@check_cache
|
|
229
|
+
def gen_jsonschema_enum(
|
|
230
|
+
gen_jsonschema, type_: Type[enum.Enum], type_cache: JSONSchemaTypeCache, serializer: ToJSON
|
|
231
|
+
):
|
|
232
|
+
return _gen_jsonschema_enum([v.value for v in type_], type_, serializer, "enum")
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
def _gen_jsonschema_enum(base_values, type_: Type[enum.Enum], serializer: ToJSON, kind: str):
|
|
236
|
+
values = []
|
|
237
|
+
for i, value in enumerate(base_values):
|
|
238
|
+
try:
|
|
239
|
+
values.append(serializer(value))
|
|
240
|
+
except Exception:
|
|
241
|
+
raise TypeError(f"Can't serialize value {value!r} at index {i:d} of {kind} type {type_!r}")
|
|
242
|
+
return {ENUM: list(values)}
|
|
243
|
+
|
|
244
|
+
|
|
245
|
+
def gen_jsonschema_union(
|
|
246
|
+
gen_jsonschema, type_: Type, type_cache: JSONSchemaTypeCache, serializer: ToJSON
|
|
247
|
+
):
|
|
248
|
+
types = get_args(type_)
|
|
249
|
+
return _merge_schemas_anyof([gen_jsonschema(t, type_cache, serializer) for t in types])
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
def gen_jsonschema_mapping(
|
|
253
|
+
gen_jsonschema, type_: Type, type_cache: JSONSchemaTypeCache, serializer: ToJSON
|
|
254
|
+
):
|
|
255
|
+
args = get_args(type_)
|
|
256
|
+
if len(args) != 2:
|
|
257
|
+
return unknown_type_for_jsonschema(gen_jsonschema, type_, type_cache, serializer)
|
|
258
|
+
key_type, value_type = args
|
|
259
|
+
value_schema = gen_jsonschema(value_type, type_cache, serializer)
|
|
260
|
+
key_pattern = string_pattern_for(key_type, serializer)
|
|
261
|
+
return object_(patternProperties={key_pattern: value_schema})
|
|
262
|
+
|
|
263
|
+
|
|
264
|
+
def gen_jsonschema_collection(
|
|
265
|
+
gen_jsonschema, type_: Type, type_cache: JSONSchemaTypeCache, serializer: ToJSON
|
|
266
|
+
):
|
|
267
|
+
origin = get_origin(type_)
|
|
268
|
+
args = get_args(type_)
|
|
269
|
+
if len(args) > 1:
|
|
270
|
+
return unknown_type_for_jsonschema(gen_jsonschema, type_, type_cache, serializer)
|
|
271
|
+
item_schema = gen_jsonschema(args[0], type_cache, serializer) if args else {}
|
|
272
|
+
if origin in UNIQUE_COLLECTION_TYPES:
|
|
273
|
+
return array(items=item_schema, uniqueItems=True)
|
|
274
|
+
else:
|
|
275
|
+
return array(items=item_schema)
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
def gen_jsonschema_tuple(
|
|
279
|
+
gen_jsonschema, type_: Type, type_cache: JSONSchemaTypeCache, serializer: ToJSON
|
|
280
|
+
):
|
|
281
|
+
args = get_args(type_)
|
|
282
|
+
items = [gen_jsonschema(t, type_cache, serializer) for t in args]
|
|
283
|
+
return array(prefixItems=items, items=False)
|
|
284
|
+
|
|
285
|
+
|
|
286
|
+
def gen_jsonschema_variadic_tuple(
|
|
287
|
+
gen_jsonschema, type_: Type, type_cache: JSONSchemaTypeCache, serializer: ToJSON
|
|
288
|
+
):
|
|
289
|
+
args = get_args(type_)
|
|
290
|
+
list_type: Type = List[args[0]] # type: ignore [valid-type]
|
|
291
|
+
gen_jsonschema_collection(gen_jsonschema, list_type, type_cache, serializer)
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
GEN_JSONSCHEMA_REGISTRY: Registry[Type, JSONSchema] = Registry(
|
|
295
|
+
{
|
|
296
|
+
type(None): null(),
|
|
297
|
+
str: string(),
|
|
298
|
+
bool: boolean(),
|
|
299
|
+
int: integer(),
|
|
300
|
+
float: number(),
|
|
301
|
+
datetime.date: string(format=DATETIME),
|
|
302
|
+
datetime.datetime: string(format=DATETIME),
|
|
303
|
+
datetime.time: string(format=TIME),
|
|
304
|
+
uuid.UUID: string(format=UUID),
|
|
305
|
+
}
|
|
306
|
+
)
|
|
307
|
+
|
|
308
|
+
unknown_type_for_jsonschema: "RecF[Type, [JSONSchemaTypeCache, ToJSON], JSONSchema]" = value_error(
|
|
309
|
+
"Don't know how to interpret type {!r} as jsonschema",
|
|
310
|
+
TypeError,
|
|
311
|
+
)
|
|
312
|
+
|
|
313
|
+
gen_jsonschema: "TypeRecursion[[JSONSchemaTypeCache, ToJSON], JSONSchema]" = TypeRecursion(
|
|
314
|
+
GEN_JSONSCHEMA_REGISTRY,
|
|
315
|
+
cached=False, # no caching by type since behavior depends on to_json, type_cache args
|
|
316
|
+
attrs=gen_jsonschema_attrs,
|
|
317
|
+
literal=gen_jsonschema_literal,
|
|
318
|
+
enum=gen_jsonschema_enum,
|
|
319
|
+
newtype=gen_jsonschema_newtype,
|
|
320
|
+
union=gen_jsonschema_union,
|
|
321
|
+
mapping=gen_jsonschema_mapping,
|
|
322
|
+
tuple=gen_jsonschema_tuple,
|
|
323
|
+
variadic_tuple=gen_jsonschema_variadic_tuple,
|
|
324
|
+
collection=gen_jsonschema_collection,
|
|
325
|
+
otherwise=unknown_type_for_jsonschema,
|
|
326
|
+
)
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
import json
|
|
3
|
+
import re
|
|
4
|
+
import typing as ty
|
|
5
|
+
import uuid
|
|
6
|
+
from typing import Type, get_args
|
|
7
|
+
|
|
8
|
+
from ..recursion import RecF, value_error
|
|
9
|
+
from ..registry import Registry
|
|
10
|
+
from ..type_recursion import TypeRecursion
|
|
11
|
+
from .util import ToJSON
|
|
12
|
+
|
|
13
|
+
DATE, DATETIME, TIME, UUID = "date", "date-time", "time", "uuid"
|
|
14
|
+
|
|
15
|
+
_null_pattern = "^null$"
|
|
16
|
+
_str_pattern = r"^.*$"
|
|
17
|
+
_bool_pattern = r"^true|false$"
|
|
18
|
+
_int_pattern = r"^[+-]?[0-9]+$"
|
|
19
|
+
_float_pattern = r"^[-+]?([0-9]*\.?[0-9]+|[0-9]+)([eE][-+]?[0-9]+)?$"
|
|
20
|
+
_date_pattern = r"^[0-9]{4}-(0[1-9]|1[1-2])-(0[1-9]|1[0-9]|2[0-9]|3[0-1])$"
|
|
21
|
+
_time_pattern = r"^([0-1][0-9]|2[0-3]):[0-5][0-9]:[0-5][0-9](\.[0-9]{1,6})?([+-][0-5][0-9]:[0-5][0-9])?$"
|
|
22
|
+
_datetime_pattern = rf"^{_date_pattern[1:-1]}T{_time_pattern[1:-1]}$"
|
|
23
|
+
_uuid_pattern = r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$"
|
|
24
|
+
|
|
25
|
+
_uuid_re = re.compile(_uuid_pattern)
|
|
26
|
+
|
|
27
|
+
DEFAULT_FORMAT_CHECKS: ty.Dict[str, ty.Callable[[str], bool]] = {}
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
###################################################################
|
|
31
|
+
# String format checkers for use with jsonschema 'format' keyword #
|
|
32
|
+
###################################################################
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def register_format_check(name: str):
|
|
36
|
+
def decorator(func: ty.Callable[[str], bool]) -> ty.Callable[[str], bool]:
|
|
37
|
+
DEFAULT_FORMAT_CHECKS[name] = func
|
|
38
|
+
return func
|
|
39
|
+
|
|
40
|
+
return decorator
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def _is_datetime_string(s: str, cls: ty.Union[ty.Type[datetime.date], ty.Type[datetime.time]]) -> bool:
|
|
44
|
+
try:
|
|
45
|
+
cls.fromisoformat(s)
|
|
46
|
+
except Exception:
|
|
47
|
+
return False
|
|
48
|
+
return True
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
@register_format_check(DATE)
|
|
52
|
+
def is_date_string(s: str) -> bool:
|
|
53
|
+
return _is_datetime_string(s, datetime.date)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
@register_format_check(DATETIME)
|
|
57
|
+
def is_datetime_string(s: str) -> bool:
|
|
58
|
+
return _is_datetime_string(s, datetime.datetime)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
@register_format_check(TIME)
|
|
62
|
+
def is_time_string(s: str) -> bool:
|
|
63
|
+
return _is_datetime_string(s, datetime.time)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
@register_format_check(UUID)
|
|
67
|
+
def is_uuid_string(s: str) -> bool:
|
|
68
|
+
return isinstance(s, str) and bool(_uuid_re.fullmatch(s))
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def string_pattern_for_literal(string_pattern_for, type_: Type, serializer: ToJSON) -> str:
|
|
72
|
+
jsons = map(serializer, get_args(type_))
|
|
73
|
+
|
|
74
|
+
def to_json_str(value):
|
|
75
|
+
return value if isinstance(value, str) else json.dumps(value, indent="")
|
|
76
|
+
|
|
77
|
+
strings = map(to_json_str, jsons)
|
|
78
|
+
return rf"^({'|'.join(map(re.escape, strings))})$"
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def string_pattern_for_union(string_pattern_for, type_: Type, serializer: ToJSON) -> str:
|
|
82
|
+
patterns = (string_pattern_for(t, serializer) for t in get_args(type_))
|
|
83
|
+
return rf"^({'|'.join(patterns)})$"
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
JSON_STRING_PATTERN_REGISTRY = Registry(
|
|
87
|
+
{
|
|
88
|
+
type(None): _null_pattern,
|
|
89
|
+
bool: _bool_pattern,
|
|
90
|
+
int: _int_pattern,
|
|
91
|
+
float: _float_pattern,
|
|
92
|
+
datetime.date: _date_pattern,
|
|
93
|
+
datetime.datetime: _datetime_pattern,
|
|
94
|
+
uuid.UUID: _uuid_pattern,
|
|
95
|
+
}
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
unknown_type_for_json_str_pattern: "RecF[Type, [ToJSON], str]" = value_error(
|
|
99
|
+
"Can't determine a format regex for type {!r} embedded as a string in json; "
|
|
100
|
+
f"register one with {__name__}.string_pattern_for.register()",
|
|
101
|
+
TypeError,
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
string_pattern_for: "TypeRecursion[[ToJSON], str]" = TypeRecursion(
|
|
105
|
+
JSON_STRING_PATTERN_REGISTRY,
|
|
106
|
+
literal=string_pattern_for_literal,
|
|
107
|
+
union=string_pattern_for_union,
|
|
108
|
+
otherwise=unknown_type_for_json_str_pattern,
|
|
109
|
+
)
|