jentic-openapi-datamodels 1.0.0a12__py3-none-any.whl → 1.0.0a13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- jentic/apitools/openapi/datamodels/low/context.py +21 -0
- jentic/apitools/openapi/datamodels/low/extractors.py +126 -0
- jentic/apitools/openapi/datamodels/low/fields.py +45 -0
- jentic/apitools/openapi/datamodels/low/model_builder.py +113 -0
- jentic/apitools/openapi/datamodels/low/py.typed +0 -0
- jentic/apitools/openapi/datamodels/low/sources.py +89 -0
- jentic/apitools/openapi/datamodels/low/v30/__init__.py +0 -28
- jentic/apitools/openapi/datamodels/low/v30/discriminator.py +53 -78
- jentic/apitools/openapi/datamodels/low/v30/external_documentation.py +47 -61
- jentic/apitools/openapi/datamodels/low/v30/oauth_flow.py +54 -123
- jentic/apitools/openapi/datamodels/low/v30/oauth_flows.py +102 -151
- jentic/apitools/openapi/datamodels/low/v30/reference.py +43 -44
- jentic/apitools/openapi/datamodels/low/v30/schema.py +316 -607
- jentic/apitools/openapi/datamodels/low/v30/security_requirement.py +82 -72
- jentic/apitools/openapi/datamodels/low/v30/security_scheme.py +94 -286
- jentic/apitools/openapi/datamodels/low/v30/tag.py +88 -119
- jentic/apitools/openapi/datamodels/low/v30/xml.py +46 -120
- jentic_openapi_datamodels-1.0.0a13.dist-info/METADATA +211 -0
- jentic_openapi_datamodels-1.0.0a13.dist-info/RECORD +23 -0
- jentic/apitools/openapi/datamodels/low/v30/specification_object.py +0 -217
- jentic_openapi_datamodels-1.0.0a12.dist-info/METADATA +0 -52
- jentic_openapi_datamodels-1.0.0a12.dist-info/RECORD +0 -18
- /jentic/apitools/openapi/datamodels/low/{v30/py.typed → __init__.py} +0 -0
- {jentic_openapi_datamodels-1.0.0a12.dist-info → jentic_openapi_datamodels-1.0.0a13.dist-info}/WHEEL +0 -0
- {jentic_openapi_datamodels-1.0.0a12.dist-info → jentic_openapi_datamodels-1.0.0a13.dist-info}/licenses/LICENSE +0 -0
- {jentic_openapi_datamodels-1.0.0a12.dist-info → jentic_openapi_datamodels-1.0.0a13.dist-info}/licenses/NOTICE +0 -0
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
|
|
3
|
+
from ruamel.yaml import YAML
|
|
4
|
+
from ruamel.yaml.constructor import RoundTripConstructor
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
__all__ = ["Context"]
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dataclass(frozen=True, slots=True)
|
|
11
|
+
class Context:
|
|
12
|
+
"""
|
|
13
|
+
Context for parsing OpenAPI documents.
|
|
14
|
+
|
|
15
|
+
Contains configuration and dependencies used during parsing operations.
|
|
16
|
+
|
|
17
|
+
Attributes:
|
|
18
|
+
yaml_constructor: The YAML constructor used to deserialize YAML nodes into Python objects
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
yaml_constructor: RoundTripConstructor = field(default=YAML(typ="rt", pure=True).constructor)
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
from ruamel import yaml
|
|
2
|
+
|
|
3
|
+
from jentic.apitools.openapi.datamodels.low.context import Context
|
|
4
|
+
from jentic.apitools.openapi.datamodels.low.fields import fixed_fields
|
|
5
|
+
from jentic.apitools.openapi.datamodels.low.sources import KeySource, ValueSource, YAMLValue
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
__all__ = ["extract_extension_fields", "extract_unknown_fields"]
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def extract_extension_fields(
|
|
12
|
+
node: yaml.MappingNode, context: Context | None = None
|
|
13
|
+
) -> dict[KeySource[str], ValueSource[YAMLValue]]:
|
|
14
|
+
"""
|
|
15
|
+
Extract OpenAPI specification extension fields from a YAML mapping node.
|
|
16
|
+
|
|
17
|
+
Specification extension fields are any fields that start with "x-" and allow
|
|
18
|
+
users to add custom properties to OpenAPI definitions.
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
node: The YAML mapping node to extract extension fields from
|
|
22
|
+
context: Optional parsing context. If None, a default context will be created.
|
|
23
|
+
|
|
24
|
+
Returns:
|
|
25
|
+
A dictionary mapping extension field names to their values, or empty dict if no extension fields found
|
|
26
|
+
|
|
27
|
+
Example:
|
|
28
|
+
Given YAML like:
|
|
29
|
+
name: id
|
|
30
|
+
x-custom: value
|
|
31
|
+
x-internal: true
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
{
|
|
35
|
+
KeySource(value="x-custom", key_node=...): ValueSource(value="value", value_node=...),
|
|
36
|
+
KeySource(value="x-internal", key_node=...): ValueSource(value=True, value_node=...)
|
|
37
|
+
}
|
|
38
|
+
"""
|
|
39
|
+
if not isinstance(node, yaml.MappingNode):
|
|
40
|
+
return {}
|
|
41
|
+
|
|
42
|
+
if context is None:
|
|
43
|
+
context = Context()
|
|
44
|
+
|
|
45
|
+
extensions: dict[KeySource[str], ValueSource[YAMLValue]] = {}
|
|
46
|
+
|
|
47
|
+
for key_node, value_node in node.value:
|
|
48
|
+
# Construct the key as a Python object (should be a string)
|
|
49
|
+
py_key = context.yaml_constructor.construct_yaml_str(key_node)
|
|
50
|
+
|
|
51
|
+
# Check if it's an extension (starts with "x-")
|
|
52
|
+
if isinstance(py_key, str) and py_key.startswith("x-"):
|
|
53
|
+
# Construct the actual Python value from the YAML node
|
|
54
|
+
py_value = context.yaml_constructor.construct_object(value_node, deep=True)
|
|
55
|
+
|
|
56
|
+
key_ref = KeySource[str](value=py_key, key_node=key_node)
|
|
57
|
+
value_ref = ValueSource[YAMLValue](value=py_value, value_node=value_node)
|
|
58
|
+
extensions[key_ref] = value_ref
|
|
59
|
+
|
|
60
|
+
return extensions
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def extract_unknown_fields(
|
|
64
|
+
node: yaml.MappingNode, dataclass_type: type, context: Context | None = None
|
|
65
|
+
) -> dict[KeySource[str], ValueSource[YAMLValue]]:
|
|
66
|
+
"""
|
|
67
|
+
Extract unknown fields from a YAML mapping node.
|
|
68
|
+
|
|
69
|
+
Unknown fields are fields that are not part of the OpenAPI specification
|
|
70
|
+
(not fixed fields of the dataclass) and are not extensions (don't start with "x-").
|
|
71
|
+
These are typically typos or fields from a different specification version.
|
|
72
|
+
|
|
73
|
+
Args:
|
|
74
|
+
node: The YAML mapping node to extract unknown fields from
|
|
75
|
+
dataclass_type: The dataclass type to get valid field names from
|
|
76
|
+
context: Optional parsing context. If None, a default context will be created.
|
|
77
|
+
|
|
78
|
+
Returns:
|
|
79
|
+
A dictionary mapping unknown field names to their values, or empty dict if no unknown fields found
|
|
80
|
+
|
|
81
|
+
Example:
|
|
82
|
+
Given YAML like:
|
|
83
|
+
name: id
|
|
84
|
+
namspace: http://example.com # typo - should be "namespace"
|
|
85
|
+
customField: value # unknown field
|
|
86
|
+
x-custom: value # extension - not unknown
|
|
87
|
+
|
|
88
|
+
With dataclass_type=XML (which has fixed fields: name, namespace, prefix, attribute, wrapped)
|
|
89
|
+
Returns:
|
|
90
|
+
{
|
|
91
|
+
KeySource(value="namspace", key_node=...): ValueSource(value="http://example.com", value_node=...),
|
|
92
|
+
KeySource(value="customField", key_node=...): ValueSource(value="value", value_node=...)
|
|
93
|
+
}
|
|
94
|
+
"""
|
|
95
|
+
if not isinstance(node, yaml.MappingNode):
|
|
96
|
+
return {}
|
|
97
|
+
|
|
98
|
+
if context is None:
|
|
99
|
+
context = Context()
|
|
100
|
+
|
|
101
|
+
# Get valid YAML field names from the dataclass (considering yaml_name metadata)
|
|
102
|
+
_fixed_fields = fixed_fields(dataclass_type)
|
|
103
|
+
yaml_field_names = {
|
|
104
|
+
field.metadata.get("yaml_name", fname) for fname, field in _fixed_fields.items()
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
unknown_fields: dict[KeySource[str], ValueSource[YAMLValue]] = {}
|
|
108
|
+
|
|
109
|
+
for key_node, value_node in node.value:
|
|
110
|
+
# Construct the key as a Python object (should be a string)
|
|
111
|
+
py_key = context.yaml_constructor.construct_yaml_str(key_node)
|
|
112
|
+
|
|
113
|
+
# Check if it's an unknown field (not in valid YAML field names and not an extension)
|
|
114
|
+
if (
|
|
115
|
+
isinstance(py_key, str)
|
|
116
|
+
and py_key not in yaml_field_names
|
|
117
|
+
and not py_key.startswith("x-")
|
|
118
|
+
):
|
|
119
|
+
# Construct the actual Python value from the YAML node
|
|
120
|
+
py_value = context.yaml_constructor.construct_object(value_node, deep=True)
|
|
121
|
+
|
|
122
|
+
key_ref = KeySource[str](value=py_key, key_node=key_node)
|
|
123
|
+
value_ref = ValueSource[YAMLValue](value=py_value, value_node=value_node)
|
|
124
|
+
unknown_fields[key_ref] = value_ref
|
|
125
|
+
|
|
126
|
+
return unknown_fields
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
from dataclasses import field, fields
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
__all__ = ["fixed_field", "fixed_fields", "patterned_field", "patterned_fields"]
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def fixed_field(default=None, metadata=None):
|
|
8
|
+
"""Mark a field as a fixed OpenAPI specification field."""
|
|
9
|
+
return field(default=default, metadata={**(metadata or {}), "fixed_field": True})
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def fixed_fields(dataclass_type):
|
|
13
|
+
"""
|
|
14
|
+
Get all fixed specification fields from a dataclass.
|
|
15
|
+
|
|
16
|
+
Args:
|
|
17
|
+
dataclass_type: The dataclass type to inspect
|
|
18
|
+
|
|
19
|
+
Returns:
|
|
20
|
+
A dictionary mapping field names to field objects for all fields marked with fixed_field()
|
|
21
|
+
"""
|
|
22
|
+
return {f.name: f for f in fields(dataclass_type) if f.metadata.get("fixed_field")}
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def patterned_field(default=None, metadata=None):
|
|
26
|
+
"""
|
|
27
|
+
Mark a field as containing OpenAPI patterned fields.
|
|
28
|
+
|
|
29
|
+
Patterned fields have dynamic names that follow a specific pattern (e.g., security scheme names,
|
|
30
|
+
path patterns, callback expressions, HTTP status codes).
|
|
31
|
+
"""
|
|
32
|
+
return field(default=default, metadata={**(metadata or {}), "patterned_field": True})
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def patterned_fields(dataclass_type):
|
|
36
|
+
"""
|
|
37
|
+
Get all patterned fields from a dataclass.
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
dataclass_type: The dataclass type to inspect
|
|
41
|
+
|
|
42
|
+
Returns:
|
|
43
|
+
A dictionary mapping field names to field objects for all fields marked with patterned_field()
|
|
44
|
+
"""
|
|
45
|
+
return {f.name: f for f in fields(dataclass_type) if f.metadata.get("patterned_field")}
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
from dataclasses import fields
|
|
2
|
+
from typing import Any, TypeVar, cast, get_args
|
|
3
|
+
|
|
4
|
+
from ruamel import yaml
|
|
5
|
+
|
|
6
|
+
from jentic.apitools.openapi.datamodels.low.context import Context
|
|
7
|
+
from jentic.apitools.openapi.datamodels.low.extractors import extract_extension_fields
|
|
8
|
+
from jentic.apitools.openapi.datamodels.low.fields import fixed_fields
|
|
9
|
+
from jentic.apitools.openapi.datamodels.low.sources import (
|
|
10
|
+
FieldSource,
|
|
11
|
+
KeySource,
|
|
12
|
+
ValueSource,
|
|
13
|
+
YAMLInvalidValue,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
__all__ = ["build_model"]
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
T = TypeVar("T")
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def build_model(
|
|
24
|
+
root: yaml.Node, dataclass_type: type[T], *, context: Context | None = None
|
|
25
|
+
) -> T | ValueSource[YAMLInvalidValue]:
|
|
26
|
+
"""
|
|
27
|
+
Generic builder for OpenAPI low model.
|
|
28
|
+
|
|
29
|
+
Builds any dataclass that follows the pattern:
|
|
30
|
+
- Has a required `root_node: yaml.Node` field
|
|
31
|
+
- Has an optional `extensions: dict[...]` field
|
|
32
|
+
- Has spec fields marked with `fixed_field()`
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
root: The YAML node to parse (should be a MappingNode)
|
|
36
|
+
dataclass_type: The dataclass type to build
|
|
37
|
+
context: Optional parsing context. If None, a default context will be created.
|
|
38
|
+
|
|
39
|
+
Returns:
|
|
40
|
+
An instance of dataclass_type if the node is valid, or a ValueSource containing
|
|
41
|
+
the invalid data if the root is not a MappingNode (preserving the invalid data
|
|
42
|
+
and its source location for validation).
|
|
43
|
+
|
|
44
|
+
Example:
|
|
45
|
+
xml = build_model(root_node, XML, context=context)
|
|
46
|
+
"""
|
|
47
|
+
# Initialize context once at the beginning
|
|
48
|
+
if context is None:
|
|
49
|
+
context = Context()
|
|
50
|
+
|
|
51
|
+
if not isinstance(root, yaml.MappingNode):
|
|
52
|
+
# Preserve invalid root data instead of returning None
|
|
53
|
+
value = context.yaml_constructor.construct_object(root, deep=True)
|
|
54
|
+
return ValueSource(value=value, value_node=root)
|
|
55
|
+
|
|
56
|
+
# Get fixed specification fields for this dataclass type
|
|
57
|
+
_fixed_fields = fixed_fields(dataclass_type)
|
|
58
|
+
|
|
59
|
+
# Build YAML name to Python field name mapping
|
|
60
|
+
yaml_to_field = {
|
|
61
|
+
field.metadata.get("yaml_name", fname): fname for fname, field in _fixed_fields.items()
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
# Extract field values in a single pass (non-recursive, single layer only)
|
|
65
|
+
field_values: dict[str, Any] = {}
|
|
66
|
+
for key_node, value_node in root.value:
|
|
67
|
+
key = context.yaml_constructor.construct_yaml_str(key_node)
|
|
68
|
+
|
|
69
|
+
# Map YAML key to Python field name
|
|
70
|
+
field_name = yaml_to_field.get(key)
|
|
71
|
+
if field_name:
|
|
72
|
+
field = _fixed_fields[field_name]
|
|
73
|
+
field_type_args = set(get_args(field.type))
|
|
74
|
+
|
|
75
|
+
if field_type_args & {FieldSource[str], FieldSource[bool], FieldSource[int]}:
|
|
76
|
+
value = context.yaml_constructor.construct_object(value_node, deep=True)
|
|
77
|
+
field_values[field_name] = FieldSource(
|
|
78
|
+
value=value, key_node=key_node, value_node=value_node
|
|
79
|
+
)
|
|
80
|
+
elif field_type_args & {FieldSource[dict[KeySource[str], ValueSource[str]]]}:
|
|
81
|
+
# Handle dict with KeySource/ValueSource wrapping
|
|
82
|
+
if isinstance(value_node, yaml.MappingNode):
|
|
83
|
+
mapping_dict: dict[KeySource[str], ValueSource[str]] = {}
|
|
84
|
+
for map_key_node, map_value_node in value_node.value:
|
|
85
|
+
map_key = context.yaml_constructor.construct_yaml_str(map_key_node)
|
|
86
|
+
map_value = context.yaml_constructor.construct_object(
|
|
87
|
+
map_value_node, deep=True
|
|
88
|
+
)
|
|
89
|
+
mapping_dict[KeySource(value=map_key, key_node=map_key_node)] = ValueSource(
|
|
90
|
+
value=map_value, value_node=map_value_node
|
|
91
|
+
)
|
|
92
|
+
field_values[field_name] = FieldSource(
|
|
93
|
+
value=mapping_dict, key_node=key_node, value_node=value_node
|
|
94
|
+
)
|
|
95
|
+
else:
|
|
96
|
+
# Not a mapping - preserve as-is for validation
|
|
97
|
+
value = context.yaml_constructor.construct_object(value_node, deep=True)
|
|
98
|
+
field_values[field_name] = FieldSource(
|
|
99
|
+
value=value, key_node=key_node, value_node=value_node
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
# Build and return the dataclass instance
|
|
103
|
+
# Conditionally include extensions field if dataclass supports it
|
|
104
|
+
# Cast to Any to work around generic type constraints
|
|
105
|
+
has_extensions = any(f.name == "extensions" for f in fields(cast(Any, dataclass_type)))
|
|
106
|
+
return cast(
|
|
107
|
+
T,
|
|
108
|
+
dataclass_type(
|
|
109
|
+
root_node=root, # type: ignore[call-arg]
|
|
110
|
+
**field_values,
|
|
111
|
+
**({"extensions": extract_extension_fields(root, context)} if has_extensions else {}),
|
|
112
|
+
),
|
|
113
|
+
)
|
|
File without changes
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import Generic, TypeAlias, TypeVar
|
|
3
|
+
|
|
4
|
+
from ruamel import yaml
|
|
5
|
+
from ruamel.yaml.comments import CommentedMap, CommentedSeq
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
__all__ = ["FieldSource", "KeySource", "ValueSource", "YAMLValue", "YAMLInvalidValue"]
|
|
9
|
+
|
|
10
|
+
# Type alias for any deserialized YAML value (including mappings)
|
|
11
|
+
YAMLValue: TypeAlias = str | int | float | bool | None | CommentedSeq | CommentedMap
|
|
12
|
+
|
|
13
|
+
# Type alias for invalid YAML values (subset of YAMLValue, excludes CommentedMap)
|
|
14
|
+
# Used when builders receive non-mapping nodes to preserve data for validation
|
|
15
|
+
YAMLInvalidValue: TypeAlias = str | int | float | bool | None | CommentedSeq
|
|
16
|
+
|
|
17
|
+
T = TypeVar("T")
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@dataclass(frozen=True, slots=True)
|
|
21
|
+
class FieldSource(Generic[T]):
|
|
22
|
+
"""
|
|
23
|
+
A field value with associated YAML source location information.
|
|
24
|
+
|
|
25
|
+
Used for fixed OpenAPI specification fields to track both the key and value
|
|
26
|
+
nodes in the original YAML source, enabling precise error reporting.
|
|
27
|
+
|
|
28
|
+
Automatically unwraps ValueSource instances passed as the value parameter,
|
|
29
|
+
allowing child builders to return ValueSource for invalid data while keeping
|
|
30
|
+
parent code clean.
|
|
31
|
+
|
|
32
|
+
Attributes:
|
|
33
|
+
value: The actual field value of type T
|
|
34
|
+
key_node: The YAML node containing the field name/key
|
|
35
|
+
value_node: The YAML node containing the field value (can be None for keys without values)
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
value: T
|
|
39
|
+
key_node: yaml.Node
|
|
40
|
+
value_node: yaml.Node | None = None
|
|
41
|
+
|
|
42
|
+
def __post_init__(self) -> None:
|
|
43
|
+
"""
|
|
44
|
+
Auto-unwrap ValueSource if passed as value parameter.
|
|
45
|
+
|
|
46
|
+
This allows child builders to return ValueSource[YAMLInvalidValue] for invalid root nodes
|
|
47
|
+
while parent code can transparently wrap the result in FieldSource without
|
|
48
|
+
special handling.
|
|
49
|
+
|
|
50
|
+
Note: We don't need to update value_node since parent and child work with
|
|
51
|
+
the same YAML node.
|
|
52
|
+
"""
|
|
53
|
+
if isinstance(self.value, ValueSource):
|
|
54
|
+
# Extract the raw value from ValueSource wrapper
|
|
55
|
+
object.__setattr__(self, "value", self.value.value)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
@dataclass(frozen=True, slots=True)
|
|
59
|
+
class KeySource(Generic[T]):
|
|
60
|
+
"""
|
|
61
|
+
A dictionary key with associated YAML source location information.
|
|
62
|
+
|
|
63
|
+
Used in extension and unknown field dictionaries to track where keys
|
|
64
|
+
appear in the original YAML source.
|
|
65
|
+
|
|
66
|
+
Attributes:
|
|
67
|
+
value: The key value of type T
|
|
68
|
+
key_node: The YAML node that holds this key
|
|
69
|
+
"""
|
|
70
|
+
|
|
71
|
+
value: T
|
|
72
|
+
key_node: yaml.Node
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
@dataclass(frozen=True, slots=True)
|
|
76
|
+
class ValueSource(Generic[T]):
|
|
77
|
+
"""
|
|
78
|
+
A dictionary value / array item with associated YAML source location information.
|
|
79
|
+
|
|
80
|
+
Used in extension and unknown field dictionaries to track where values
|
|
81
|
+
appear in the original YAML source.
|
|
82
|
+
|
|
83
|
+
Attributes:
|
|
84
|
+
value: The value of type T
|
|
85
|
+
value_node: The YAML node that holds this value
|
|
86
|
+
"""
|
|
87
|
+
|
|
88
|
+
value: T
|
|
89
|
+
value_node: yaml.Node
|
|
@@ -1,28 +0,0 @@
|
|
|
1
|
-
"""OpenAPI 3.0.x low-level models."""
|
|
2
|
-
|
|
3
|
-
from .discriminator import Discriminator
|
|
4
|
-
from .external_documentation import ExternalDocumentation
|
|
5
|
-
from .oauth_flow import OAuthFlow
|
|
6
|
-
from .oauth_flows import OAuthFlows
|
|
7
|
-
from .reference import Reference
|
|
8
|
-
from .schema import Schema
|
|
9
|
-
from .security_requirement import SecurityRequirement
|
|
10
|
-
from .security_scheme import SecurityScheme
|
|
11
|
-
from .specification_object import SpecificationObject
|
|
12
|
-
from .tag import Tag
|
|
13
|
-
from .xml import XML
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
__all__ = [
|
|
17
|
-
"Discriminator",
|
|
18
|
-
"ExternalDocumentation",
|
|
19
|
-
"OAuthFlow",
|
|
20
|
-
"OAuthFlows",
|
|
21
|
-
"Reference",
|
|
22
|
-
"Schema",
|
|
23
|
-
"SecurityRequirement",
|
|
24
|
-
"SecurityScheme",
|
|
25
|
-
"SpecificationObject",
|
|
26
|
-
"Tag",
|
|
27
|
-
"XML",
|
|
28
|
-
]
|
|
@@ -1,91 +1,66 @@
|
|
|
1
|
-
|
|
2
|
-
OpenAPI 3.0.4 Discriminator Object model.
|
|
1
|
+
from dataclasses import dataclass
|
|
3
2
|
|
|
4
|
-
|
|
5
|
-
a discriminator object gives a hint about the expected schema.
|
|
6
|
-
"""
|
|
3
|
+
from ruamel import yaml
|
|
7
4
|
|
|
8
|
-
from
|
|
5
|
+
from jentic.apitools.openapi.datamodels.low.context import Context
|
|
6
|
+
from jentic.apitools.openapi.datamodels.low.fields import fixed_field
|
|
7
|
+
from jentic.apitools.openapi.datamodels.low.model_builder import build_model
|
|
8
|
+
from jentic.apitools.openapi.datamodels.low.sources import (
|
|
9
|
+
FieldSource,
|
|
10
|
+
KeySource,
|
|
11
|
+
ValueSource,
|
|
12
|
+
YAMLInvalidValue,
|
|
13
|
+
)
|
|
9
14
|
|
|
10
|
-
from jentic.apitools.openapi.datamodels.low.v30.specification_object import SpecificationObject
|
|
11
15
|
|
|
16
|
+
__all__ = ["Discriminator", "build"]
|
|
12
17
|
|
|
13
|
-
__all__ = ["Discriminator"]
|
|
14
18
|
|
|
19
|
+
@dataclass(frozen=True, slots=True)
|
|
20
|
+
class Discriminator:
|
|
21
|
+
"""
|
|
22
|
+
Discriminator Object representation for OpenAPI 3.0.
|
|
23
|
+
|
|
24
|
+
When request bodies or response payloads may be one of a number of different schemas,
|
|
25
|
+
a discriminator object can be used to aid in serialization, deserialization, and validation.
|
|
26
|
+
|
|
27
|
+
Note: In OpenAPI 3.0.x, the Discriminator Object does not support Specification Extensions.
|
|
28
|
+
This changes in OpenAPI 3.1.x where extensions are permitted.
|
|
15
29
|
|
|
16
|
-
|
|
30
|
+
Attributes:
|
|
31
|
+
root_node: The top-level node representing the entire Discriminator object in the original source file
|
|
32
|
+
property_name: REQUIRED. The name of the property in the payload that will hold the discriminator value
|
|
33
|
+
mapping: An optional mapping of discriminator values to schema names or references
|
|
17
34
|
"""
|
|
18
|
-
Represents a Discriminator Object from OpenAPI 3.0.4.
|
|
19
35
|
|
|
20
|
-
|
|
21
|
-
|
|
36
|
+
root_node: yaml.Node
|
|
37
|
+
property_name: FieldSource[str] | None = fixed_field(metadata={"yaml_name": "propertyName"})
|
|
38
|
+
mapping: FieldSource[dict[KeySource[str], ValueSource[str]]] | None = fixed_field()
|
|
22
39
|
|
|
23
|
-
Supports specification extensions (x-* fields).
|
|
24
40
|
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
... "propertyName": "petType"
|
|
29
|
-
... })
|
|
30
|
-
>>> disc.property_name
|
|
31
|
-
'petType'
|
|
32
|
-
|
|
33
|
-
>>> # With mapping
|
|
34
|
-
>>> disc = Discriminator({
|
|
35
|
-
... "propertyName": "petType",
|
|
36
|
-
... "mapping": {
|
|
37
|
-
... "dog": "#/components/schemas/Dog",
|
|
38
|
-
... "cat": "#/components/schemas/Cat",
|
|
39
|
-
... "lizard": "https://example.com/schemas/Lizard.json"
|
|
40
|
-
... }
|
|
41
|
-
... })
|
|
42
|
-
>>> disc.property_name
|
|
43
|
-
'petType'
|
|
44
|
-
>>> disc.mapping["dog"]
|
|
45
|
-
'#/components/schemas/Dog'
|
|
41
|
+
def build(
|
|
42
|
+
root: yaml.Node, context: Context | None = None
|
|
43
|
+
) -> Discriminator | ValueSource[YAMLInvalidValue]:
|
|
46
44
|
"""
|
|
45
|
+
Build a Discriminator object from a YAML node.
|
|
46
|
+
|
|
47
|
+
Preserves all source data as-is, regardless of type. This is a low-level/plumbing
|
|
48
|
+
model that provides complete source fidelity for inspection and validation.
|
|
47
49
|
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
""
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
"""Set the property name."""
|
|
66
|
-
if value is None:
|
|
67
|
-
self.pop("propertyName", None)
|
|
68
|
-
else:
|
|
69
|
-
self["propertyName"] = value
|
|
70
|
-
|
|
71
|
-
@property
|
|
72
|
-
def mapping(self) -> dict[str, str]:
|
|
73
|
-
"""
|
|
74
|
-
Mapping between payload values and schema names/references.
|
|
75
|
-
|
|
76
|
-
Maps discriminator property values to schema names or references.
|
|
77
|
-
When absent, the value is expected to match a schema name.
|
|
78
|
-
|
|
79
|
-
Returns:
|
|
80
|
-
Dictionary mapping values to schema references (empty dict if not present)
|
|
81
|
-
"""
|
|
82
|
-
return self.get("mapping", {})
|
|
83
|
-
|
|
84
|
-
@mapping.setter
|
|
85
|
-
def mapping(self, value: Mapping[str, str] | None) -> None:
|
|
86
|
-
"""Set the mapping."""
|
|
87
|
-
if value is None:
|
|
88
|
-
self.pop("mapping", None)
|
|
89
|
-
else:
|
|
90
|
-
# Convert to plain dict once at storage time
|
|
91
|
-
self["mapping"] = dict(value) if isinstance(value, Mapping) else value
|
|
50
|
+
Args:
|
|
51
|
+
root: The YAML node to parse (should be a MappingNode)
|
|
52
|
+
context: Optional parsing context. If None, a default context will be created.
|
|
53
|
+
|
|
54
|
+
Returns:
|
|
55
|
+
A Discriminator object if the node is valid, or a ValueSource containing
|
|
56
|
+
the invalid data if the root is not a MappingNode (preserving the invalid data
|
|
57
|
+
and its source location for validation).
|
|
58
|
+
|
|
59
|
+
Example:
|
|
60
|
+
from ruamel.yaml import YAML
|
|
61
|
+
yaml = YAML()
|
|
62
|
+
root = yaml.compose("propertyName: petType\\nmapping:\\n dog: Dog\\n cat: Cat")
|
|
63
|
+
discriminator = build(root)
|
|
64
|
+
assert discriminator.property_name.value == 'petType'
|
|
65
|
+
"""
|
|
66
|
+
return build_model(root, Discriminator, context=context)
|