robotframework-openapitools 1.0.0b2__py3-none-any.whl → 1.0.0b4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- OpenApiDriver/openapi_executors.py +8 -8
- OpenApiDriver/openapi_reader.py +12 -13
- OpenApiDriver/openapidriver.libspec +4 -41
- OpenApiLibCore/__init__.py +0 -2
- OpenApiLibCore/annotations.py +8 -1
- OpenApiLibCore/data_generation/__init__.py +0 -2
- OpenApiLibCore/data_generation/body_data_generation.py +52 -71
- OpenApiLibCore/data_generation/data_generation_core.py +82 -62
- OpenApiLibCore/data_invalidation.py +37 -20
- OpenApiLibCore/dto_base.py +20 -86
- OpenApiLibCore/localized_faker.py +88 -0
- OpenApiLibCore/models.py +715 -0
- OpenApiLibCore/openapi_libcore.libspec +47 -283
- OpenApiLibCore/openapi_libcore.py +20 -46
- OpenApiLibCore/parameter_utils.py +23 -17
- OpenApiLibCore/path_functions.py +5 -4
- OpenApiLibCore/protocols.py +7 -5
- OpenApiLibCore/request_data.py +67 -102
- OpenApiLibCore/resource_relations.py +2 -3
- OpenApiLibCore/validation.py +49 -161
- OpenApiLibCore/value_utils.py +46 -358
- openapi_libgen/__init__.py +0 -46
- openapi_libgen/command_line.py +7 -19
- openapi_libgen/generator.py +84 -0
- openapi_libgen/spec_parser.py +44 -111
- {robotframework_openapitools-1.0.0b2.dist-info → robotframework_openapitools-1.0.0b4.dist-info}/METADATA +98 -1
- robotframework_openapitools-1.0.0b4.dist-info/RECORD +40 -0
- robotframework_openapitools-1.0.0b2.dist-info/RECORD +0 -37
- {robotframework_openapitools-1.0.0b2.dist-info → robotframework_openapitools-1.0.0b4.dist-info}/LICENSE +0 -0
- {robotframework_openapitools-1.0.0b2.dist-info → robotframework_openapitools-1.0.0b4.dist-info}/WHEEL +0 -0
- {robotframework_openapitools-1.0.0b2.dist-info → robotframework_openapitools-1.0.0b4.dist-info}/entry_points.txt +0 -0
@@ -16,13 +16,20 @@ from OpenApiLibCore.dto_base import (
|
|
16
16
|
Dto,
|
17
17
|
PropertyValueConstraint,
|
18
18
|
ResourceRelation,
|
19
|
-
resolve_schema,
|
20
19
|
)
|
21
20
|
from OpenApiLibCore.dto_utils import DefaultDto
|
21
|
+
from OpenApiLibCore.models import (
|
22
|
+
ObjectSchema,
|
23
|
+
OpenApiObject,
|
24
|
+
OperationObject,
|
25
|
+
ParameterObject,
|
26
|
+
RequestBodyObject,
|
27
|
+
UnionTypeSchema,
|
28
|
+
)
|
22
29
|
from OpenApiLibCore.parameter_utils import get_safe_name_for_oas_name
|
23
30
|
from OpenApiLibCore.protocols import GetDtoClassType, GetIdPropertyNameType
|
24
31
|
from OpenApiLibCore.request_data import RequestData
|
25
|
-
from OpenApiLibCore.value_utils import IGNORE
|
32
|
+
from OpenApiLibCore.value_utils import IGNORE
|
26
33
|
|
27
34
|
from .body_data_generation import (
|
28
35
|
get_json_data_for_dto_class as _get_json_data_for_dto_class,
|
@@ -34,7 +41,7 @@ def get_request_data(
|
|
34
41
|
method: str,
|
35
42
|
get_dto_class: GetDtoClassType,
|
36
43
|
get_id_property_name: GetIdPropertyNameType,
|
37
|
-
openapi_spec:
|
44
|
+
openapi_spec: OpenApiObject,
|
38
45
|
) -> RequestData:
|
39
46
|
method = method.lower()
|
40
47
|
dto_cls_name = get_dto_cls_name(path=path, method=method)
|
@@ -43,21 +50,24 @@ def get_request_data(
|
|
43
50
|
spec_path = pf.get_parametrized_path(path=path, openapi_spec=openapi_spec)
|
44
51
|
dto_class = get_dto_class(path=spec_path, method=method)
|
45
52
|
try:
|
46
|
-
|
47
|
-
|
53
|
+
path_item = openapi_spec.paths[spec_path]
|
54
|
+
operation_spec = getattr(path_item, method)
|
55
|
+
if operation_spec is None:
|
56
|
+
raise AttributeError
|
57
|
+
except AttributeError:
|
48
58
|
logger.info(
|
49
59
|
f"method '{method}' not supported on '{spec_path}, using empty spec."
|
50
60
|
)
|
51
|
-
|
61
|
+
operation_spec = OperationObject(operationId="")
|
52
62
|
|
53
63
|
parameters, params, headers = get_request_parameters(
|
54
|
-
dto_class=dto_class, method_spec=
|
64
|
+
dto_class=dto_class, method_spec=operation_spec
|
55
65
|
)
|
56
|
-
if
|
66
|
+
if operation_spec.requestBody is None:
|
57
67
|
dto_instance = _get_dto_instance_for_empty_body(
|
58
68
|
dto_class=dto_class,
|
59
69
|
dto_cls_name=dto_cls_name,
|
60
|
-
method_spec=
|
70
|
+
method_spec=operation_spec,
|
61
71
|
)
|
62
72
|
return RequestData(
|
63
73
|
dto=dto_instance,
|
@@ -67,25 +77,46 @@ def get_request_data(
|
|
67
77
|
has_body=False,
|
68
78
|
)
|
69
79
|
|
70
|
-
headers.update({"content-type": get_content_type(
|
80
|
+
headers.update({"content-type": get_content_type(operation_spec.requestBody)})
|
81
|
+
|
82
|
+
body_schema = operation_spec.requestBody
|
83
|
+
media_type_dict = body_schema.content
|
84
|
+
supported_types = [v for k, v in media_type_dict.items() if "json" in k]
|
85
|
+
supported_schemas = [t.schema_ for t in supported_types if t.schema_ is not None]
|
86
|
+
|
87
|
+
if not supported_schemas:
|
88
|
+
raise ValueError(f"No supported content schema found: {media_type_dict}")
|
89
|
+
|
90
|
+
if len(supported_schemas) > 1:
|
91
|
+
logger.warn(
|
92
|
+
f"Multiple JSON media types defined for requestBody, using the first candidate {media_type_dict}"
|
93
|
+
)
|
94
|
+
|
95
|
+
schema = supported_schemas[0]
|
96
|
+
|
97
|
+
if isinstance(schema, UnionTypeSchema):
|
98
|
+
resolved_schemas = schema.resolved_schemas
|
99
|
+
schema = choice(resolved_schemas)
|
100
|
+
|
101
|
+
if not isinstance(schema, ObjectSchema):
|
102
|
+
raise ValueError(f"Selected schema is not an object schema: {schema}")
|
71
103
|
|
72
|
-
content_schema = resolve_schema(get_content_schema(body_spec))
|
73
104
|
dto_data = _get_json_data_for_dto_class(
|
74
|
-
schema=
|
105
|
+
schema=schema,
|
75
106
|
dto_class=dto_class,
|
76
107
|
get_id_property_name=get_id_property_name,
|
77
|
-
operation_id=
|
108
|
+
operation_id=operation_spec.operationId,
|
78
109
|
)
|
79
110
|
dto_instance = _get_dto_instance_from_dto_data(
|
80
|
-
|
111
|
+
object_schema=schema,
|
81
112
|
dto_class=dto_class,
|
82
113
|
dto_data=dto_data,
|
83
|
-
method_spec=
|
114
|
+
method_spec=operation_spec,
|
84
115
|
dto_cls_name=dto_cls_name,
|
85
116
|
)
|
86
117
|
return RequestData(
|
87
118
|
dto=dto_instance,
|
88
|
-
|
119
|
+
body_schema=schema,
|
89
120
|
parameters=parameters,
|
90
121
|
params=params,
|
91
122
|
headers=headers,
|
@@ -95,13 +126,14 @@ def get_request_data(
|
|
95
126
|
def _get_dto_instance_for_empty_body(
|
96
127
|
dto_class: type[Dto],
|
97
128
|
dto_cls_name: str,
|
98
|
-
method_spec:
|
129
|
+
method_spec: OperationObject,
|
99
130
|
) -> Dto:
|
100
131
|
if dto_class == DefaultDto:
|
101
132
|
dto_instance: Dto = DefaultDto()
|
102
133
|
else:
|
134
|
+
cls_name = method_spec.operationId if method_spec.operationId else dto_cls_name
|
103
135
|
dto_class = make_dataclass(
|
104
|
-
cls_name=
|
136
|
+
cls_name=cls_name,
|
105
137
|
fields=[],
|
106
138
|
bases=(dto_class,),
|
107
139
|
)
|
@@ -110,10 +142,10 @@ def _get_dto_instance_for_empty_body(
|
|
110
142
|
|
111
143
|
|
112
144
|
def _get_dto_instance_from_dto_data(
|
113
|
-
|
145
|
+
object_schema: ObjectSchema,
|
114
146
|
dto_class: type[Dto],
|
115
147
|
dto_data: JSON,
|
116
|
-
method_spec:
|
148
|
+
method_spec: OperationObject,
|
117
149
|
dto_cls_name: str,
|
118
150
|
) -> Dto:
|
119
151
|
if not isinstance(dto_data, (dict, list)):
|
@@ -122,48 +154,41 @@ def _get_dto_instance_from_dto_data(
|
|
122
154
|
if isinstance(dto_data, list):
|
123
155
|
raise NotImplementedError
|
124
156
|
|
125
|
-
fields = get_fields_from_dto_data(
|
157
|
+
fields = get_fields_from_dto_data(object_schema, dto_data)
|
158
|
+
cls_name = method_spec.operationId if method_spec.operationId else dto_cls_name
|
126
159
|
dto_class_ = make_dataclass(
|
127
|
-
cls_name=
|
160
|
+
cls_name=cls_name,
|
128
161
|
fields=fields,
|
129
162
|
bases=(dto_class,),
|
130
163
|
)
|
131
|
-
dto_data = {get_safe_key(key): value for key, value in dto_data.items()}
|
164
|
+
# dto_data = {get_safe_key(key): value for key, value in dto_data.items()}
|
165
|
+
dto_data = {
|
166
|
+
get_safe_name_for_oas_name(key): value for key, value in dto_data.items()
|
167
|
+
}
|
132
168
|
return cast(Dto, dto_class_(**dto_data))
|
133
169
|
|
134
170
|
|
135
171
|
def get_fields_from_dto_data(
|
136
|
-
|
137
|
-
) -> list[tuple[str, type[
|
172
|
+
object_schema: ObjectSchema, dto_data: dict[str, JSON]
|
173
|
+
) -> list[tuple[str, type[object], Field[object]]]:
|
138
174
|
"""Get a dataclasses fields list based on the content_schema and dto_data."""
|
139
|
-
fields: list[tuple[str, type[
|
175
|
+
fields: list[tuple[str, type[object], Field[object]]] = []
|
176
|
+
|
140
177
|
for key, value in dto_data.items():
|
141
|
-
|
142
|
-
safe_key =
|
143
|
-
metadata = {"original_property_name": key}
|
144
|
-
if key in
|
178
|
+
# safe_key = get_safe_key(key)
|
179
|
+
safe_key = get_safe_name_for_oas_name(key)
|
180
|
+
# metadata = {"original_property_name": key}
|
181
|
+
if key in object_schema.required:
|
145
182
|
# The fields list is used to create a dataclass, so non-default fields
|
146
183
|
# must go before fields with a default
|
147
|
-
field_ = cast(Field[Any], field(
|
184
|
+
field_ = cast(Field[Any], field()) # pylint: disable=invalid-field-call
|
148
185
|
fields.insert(0, (safe_key, type(value), field_))
|
149
186
|
else:
|
150
|
-
field_ = cast(Field[Any], field(default=None
|
187
|
+
field_ = cast(Field[Any], field(default=None)) # pylint: disable=invalid-field-call
|
151
188
|
fields.append((safe_key, type(value), field_))
|
152
189
|
return fields
|
153
190
|
|
154
191
|
|
155
|
-
def get_safe_key(key: str) -> str:
|
156
|
-
"""
|
157
|
-
Helper function to convert a valid JSON property name to a string that can be used
|
158
|
-
as a Python variable or function / method name.
|
159
|
-
"""
|
160
|
-
key = key.replace("-", "_")
|
161
|
-
key = key.replace("@", "_")
|
162
|
-
if key[0].isdigit():
|
163
|
-
key = f"_{key}"
|
164
|
-
return key
|
165
|
-
|
166
|
-
|
167
192
|
def get_dto_cls_name(path: str, method: str) -> str:
|
168
193
|
method = method.capitalize()
|
169
194
|
path = path.translate({ord(i): None for i in "{}"})
|
@@ -173,20 +198,13 @@ def get_dto_cls_name(path: str, method: str) -> str:
|
|
173
198
|
return result
|
174
199
|
|
175
200
|
|
176
|
-
def
|
177
|
-
"""Get the content schema from the requestBody spec."""
|
178
|
-
content_type = get_content_type(body_spec)
|
179
|
-
content_schema = body_spec["content"][content_type]["schema"]
|
180
|
-
return resolve_schema(content_schema)
|
181
|
-
|
182
|
-
|
183
|
-
def get_content_type(body_spec: dict[str, Any]) -> str:
|
201
|
+
def get_content_type(body_spec: RequestBodyObject) -> str:
|
184
202
|
"""Get and validate the first supported content type from the requested body spec
|
185
203
|
|
186
204
|
Should be application/json like content type,
|
187
205
|
e.g "application/json;charset=utf-8" or "application/merge-patch+json"
|
188
206
|
"""
|
189
|
-
content_types: list[str] = body_spec
|
207
|
+
content_types: list[str] = list(body_spec.content.keys())
|
190
208
|
json_regex = r"application/([a-z\-]+\+)?json(;\s?charset=(.+))?"
|
191
209
|
for content_type in content_types:
|
192
210
|
if re.search(json_regex, content_type):
|
@@ -200,30 +218,29 @@ def get_content_type(body_spec: dict[str, Any]) -> str:
|
|
200
218
|
|
201
219
|
|
202
220
|
def get_request_parameters(
|
203
|
-
dto_class: Dto | type[Dto], method_spec:
|
204
|
-
) -> tuple[list[
|
221
|
+
dto_class: Dto | type[Dto], method_spec: OperationObject
|
222
|
+
) -> tuple[list[ParameterObject], dict[str, Any], dict[str, str]]:
|
205
223
|
"""Get the methods parameter spec and params and headers with valid data."""
|
206
|
-
parameters = method_spec.
|
224
|
+
parameters = method_spec.parameters if method_spec.parameters else []
|
207
225
|
parameter_relations = dto_class.get_parameter_relations()
|
208
|
-
query_params = [p for p in parameters if p.
|
209
|
-
header_params = [p for p in parameters if p.
|
226
|
+
query_params = [p for p in parameters if p.in_ == "query"]
|
227
|
+
header_params = [p for p in parameters if p.in_ == "header"]
|
210
228
|
params = get_parameter_data(query_params, parameter_relations)
|
211
229
|
headers = get_parameter_data(header_params, parameter_relations)
|
212
230
|
return parameters, params, headers
|
213
231
|
|
214
232
|
|
215
233
|
def get_parameter_data(
|
216
|
-
parameters: list[
|
234
|
+
parameters: list[ParameterObject],
|
217
235
|
parameter_relations: list[ResourceRelation],
|
218
236
|
) -> dict[str, str]:
|
219
237
|
"""Generate a valid list of key-value pairs for all parameters."""
|
220
238
|
result: dict[str, str] = {}
|
221
239
|
value: Any = None
|
222
240
|
for parameter in parameters:
|
223
|
-
parameter_name = parameter
|
241
|
+
parameter_name = parameter.name
|
224
242
|
# register the oas_name
|
225
243
|
_ = get_safe_name_for_oas_name(parameter_name)
|
226
|
-
parameter_schema = resolve_schema(parameter["schema"])
|
227
244
|
relations = [
|
228
245
|
r for r in parameter_relations if r.property_name == parameter_name
|
229
246
|
]
|
@@ -235,6 +252,9 @@ def get_parameter_data(
|
|
235
252
|
continue
|
236
253
|
result[parameter_name] = value
|
237
254
|
continue
|
238
|
-
|
255
|
+
|
256
|
+
if parameter.schema_ is None:
|
257
|
+
continue
|
258
|
+
value = parameter.schema_.get_valid_value()
|
239
259
|
result[parameter_name] = value
|
240
260
|
return result
|
@@ -11,6 +11,7 @@ from requests import Response
|
|
11
11
|
from robot.api import logger
|
12
12
|
from robot.libraries.BuiltIn import BuiltIn
|
13
13
|
|
14
|
+
from OpenApiLibCore.annotations import JSON
|
14
15
|
from OpenApiLibCore.dto_base import (
|
15
16
|
NOT_SET,
|
16
17
|
Dto,
|
@@ -18,15 +19,15 @@ from OpenApiLibCore.dto_base import (
|
|
18
19
|
PathPropertiesConstraint,
|
19
20
|
PropertyValueConstraint,
|
20
21
|
UniquePropertyValueConstraint,
|
21
|
-
resolve_schema,
|
22
22
|
)
|
23
|
+
from OpenApiLibCore.models import ParameterObject, UnionTypeSchema
|
23
24
|
from OpenApiLibCore.request_data import RequestData
|
24
|
-
from OpenApiLibCore.value_utils import IGNORE, get_invalid_value
|
25
|
+
from OpenApiLibCore.value_utils import IGNORE, get_invalid_value
|
25
26
|
|
26
27
|
run_keyword = BuiltIn().run_keyword
|
27
28
|
|
28
29
|
|
29
|
-
def
|
30
|
+
def get_invalid_body_data(
|
30
31
|
url: str,
|
31
32
|
method: str,
|
32
33
|
status_code: int,
|
@@ -39,12 +40,12 @@ def get_invalid_json_data(
|
|
39
40
|
r for r in data_relations if not isinstance(r, PathPropertiesConstraint)
|
40
41
|
]
|
41
42
|
if not data_relations:
|
42
|
-
if
|
43
|
+
if request_data.body_schema is None:
|
43
44
|
raise ValueError(
|
44
|
-
"Failed to invalidate:
|
45
|
+
"Failed to invalidate: request_data does not contain a body_schema."
|
45
46
|
)
|
46
47
|
json_data = request_data.dto.get_invalidated_data(
|
47
|
-
schema=request_data.
|
48
|
+
schema=request_data.body_schema,
|
48
49
|
status_code=status_code,
|
49
50
|
invalid_property_default_code=invalid_property_default_response,
|
50
51
|
)
|
@@ -62,8 +63,12 @@ def get_invalid_json_data(
|
|
62
63
|
run_keyword("ensure_in_use", url, resource_relation)
|
63
64
|
json_data = request_data.dto.as_dict()
|
64
65
|
else:
|
66
|
+
if request_data.body_schema is None:
|
67
|
+
raise ValueError(
|
68
|
+
"Failed to invalidate: request_data does not contain a body_schema."
|
69
|
+
)
|
65
70
|
json_data = request_data.dto.get_invalidated_data(
|
66
|
-
schema=request_data.
|
71
|
+
schema=request_data.body_schema,
|
67
72
|
status_code=status_code,
|
68
73
|
invalid_property_default_code=invalid_property_default_response,
|
69
74
|
)
|
@@ -72,7 +77,7 @@ def get_invalid_json_data(
|
|
72
77
|
|
73
78
|
def get_invalidated_parameters(
|
74
79
|
status_code: int, request_data: RequestData, invalid_property_default_response: int
|
75
|
-
) -> tuple[dict[str,
|
80
|
+
) -> tuple[dict[str, JSON], dict[str, JSON]]:
|
76
81
|
if not request_data.parameters:
|
77
82
|
raise ValueError("No params or headers to invalidate.")
|
78
83
|
|
@@ -115,7 +120,7 @@ def get_invalidated_parameters(
|
|
115
120
|
|
116
121
|
# Dto mappings may contain generic mappings for properties that are not present
|
117
122
|
# in this specific schema
|
118
|
-
request_data_parameter_names = [p.
|
123
|
+
request_data_parameter_names = [p.name for p in request_data.parameters]
|
119
124
|
additional_relation_property_names = {
|
120
125
|
n for n in relation_property_names if n not in request_data_parameter_names
|
121
126
|
}
|
@@ -140,7 +145,7 @@ def get_invalidated_parameters(
|
|
140
145
|
[parameter_data] = [
|
141
146
|
data
|
142
147
|
for data in request_data.parameters
|
143
|
-
if data
|
148
|
+
if data.name == parameter_to_invalidate
|
144
149
|
]
|
145
150
|
except Exception:
|
146
151
|
raise ValueError(
|
@@ -186,7 +191,14 @@ def get_invalidated_parameters(
|
|
186
191
|
else:
|
187
192
|
valid_value = headers[parameter_to_invalidate]
|
188
193
|
|
189
|
-
value_schema =
|
194
|
+
value_schema = parameter_data.schema_
|
195
|
+
if value_schema is None:
|
196
|
+
raise ValueError(f"No schema defined for parameter: {parameter_data}.")
|
197
|
+
|
198
|
+
if isinstance(value_schema, UnionTypeSchema):
|
199
|
+
# FIXME: extra handling may be needed in case of values_from_constraint
|
200
|
+
value_schema = choice(value_schema.resolved_schemas)
|
201
|
+
|
190
202
|
invalid_value = get_invalid_value(
|
191
203
|
value_schema=value_schema,
|
192
204
|
current_value=valid_value,
|
@@ -204,11 +216,11 @@ def get_invalidated_parameters(
|
|
204
216
|
|
205
217
|
def ensure_parameter_in_parameters(
|
206
218
|
parameter_to_invalidate: str,
|
207
|
-
params: dict[str,
|
208
|
-
headers: dict[str,
|
209
|
-
parameter_data:
|
210
|
-
values_from_constraint: list[
|
211
|
-
) -> tuple[dict[str,
|
219
|
+
params: dict[str, JSON],
|
220
|
+
headers: dict[str, JSON],
|
221
|
+
parameter_data: ParameterObject,
|
222
|
+
values_from_constraint: list[JSON],
|
223
|
+
) -> tuple[dict[str, JSON], dict[str, JSON]]:
|
212
224
|
"""
|
213
225
|
Returns the params, headers tuple with parameter_to_invalidate with a valid
|
214
226
|
value to params or headers if not originally present.
|
@@ -220,15 +232,20 @@ def ensure_parameter_in_parameters(
|
|
220
232
|
if values_from_constraint:
|
221
233
|
valid_value = choice(values_from_constraint)
|
222
234
|
else:
|
223
|
-
|
224
|
-
|
235
|
+
value_schema = parameter_data.schema_
|
236
|
+
if value_schema is None:
|
237
|
+
raise ValueError(f"No schema defined for parameter: {parameter_data}.")
|
238
|
+
|
239
|
+
if isinstance(value_schema, UnionTypeSchema):
|
240
|
+
value_schema = choice(value_schema.resolved_schemas)
|
241
|
+
valid_value = value_schema.get_valid_value()
|
225
242
|
if (
|
226
|
-
parameter_data
|
243
|
+
parameter_data.in_ == "query"
|
227
244
|
and parameter_to_invalidate not in params.keys()
|
228
245
|
):
|
229
246
|
params[parameter_to_invalidate] = valid_value
|
230
247
|
if (
|
231
|
-
parameter_data
|
248
|
+
parameter_data.in_ == "header"
|
232
249
|
and parameter_to_invalidate not in headers.keys()
|
233
250
|
):
|
234
251
|
headers[parameter_to_invalidate] = str(valid_value)
|
OpenApiLibCore/dto_base.py
CHANGED
@@ -5,7 +5,6 @@ test and constraints / restrictions on properties of the resources.
|
|
5
5
|
"""
|
6
6
|
|
7
7
|
from abc import ABC
|
8
|
-
from copy import deepcopy
|
9
8
|
from dataclasses import dataclass, fields
|
10
9
|
from random import choice, shuffle
|
11
10
|
from typing import Any
|
@@ -14,77 +13,13 @@ from uuid import uuid4
|
|
14
13
|
from robot.api import logger
|
15
14
|
|
16
15
|
from OpenApiLibCore import value_utils
|
16
|
+
from OpenApiLibCore.models import NullSchema, ObjectSchema, UnionTypeSchema
|
17
|
+
from OpenApiLibCore.parameter_utils import get_oas_name_from_safe_name
|
17
18
|
|
18
19
|
NOT_SET = object()
|
19
20
|
SENTINEL = object()
|
20
21
|
|
21
22
|
|
22
|
-
def resolve_schema(schema: dict[str, Any]) -> dict[str, Any]:
|
23
|
-
"""
|
24
|
-
Helper function to resolve allOf, anyOf and oneOf instances in a schema.
|
25
|
-
|
26
|
-
The schemas are used to generate values for headers, query parameters and json
|
27
|
-
bodies to be able to make requests.
|
28
|
-
"""
|
29
|
-
# Schema is mutable, so deepcopy to prevent mutation of original schema argument
|
30
|
-
resolved_schema = deepcopy(schema)
|
31
|
-
|
32
|
-
# allOf / anyOf / oneOf may be nested, so recursively resolve the dict-typed values
|
33
|
-
for key, value in resolved_schema.items():
|
34
|
-
if isinstance(value, dict):
|
35
|
-
resolved_schema[key] = resolve_schema(value)
|
36
|
-
|
37
|
-
# When handling allOf there should no duplicate keys, so the schema parts can
|
38
|
-
# just be merged after resolving the individual parts
|
39
|
-
if schema_parts := resolved_schema.pop("allOf", None):
|
40
|
-
for schema_part in schema_parts:
|
41
|
-
resolved_part = resolve_schema(schema_part)
|
42
|
-
resolved_schema = merge_schemas(resolved_schema, resolved_part)
|
43
|
-
# Handling anyOf and oneOf requires extra logic to deal with the "type" information.
|
44
|
-
# Some properties / parameters may be of different types and each type may have its
|
45
|
-
# own restrictions e.g. a parameter that accepts an enum value (string) or an
|
46
|
-
# integer value within a certain range.
|
47
|
-
# Since the library needs all this information for different purposes, the
|
48
|
-
# schema_parts cannot be merged, so a helper property / key "types" is introduced.
|
49
|
-
any_of = resolved_schema.pop("anyOf", [])
|
50
|
-
one_of = resolved_schema.pop("oneOf", [])
|
51
|
-
schema_parts = any_of if any_of else one_of
|
52
|
-
|
53
|
-
for schema_part in schema_parts:
|
54
|
-
resolved_part = resolve_schema(schema_part)
|
55
|
-
if isinstance(resolved_part, dict) and "type" in resolved_part.keys():
|
56
|
-
if "types" in resolved_schema.keys():
|
57
|
-
resolved_schema["types"].append(resolved_part)
|
58
|
-
else:
|
59
|
-
resolved_schema["types"] = [resolved_part]
|
60
|
-
else:
|
61
|
-
resolved_schema = merge_schemas(resolved_schema, resolved_part)
|
62
|
-
|
63
|
-
return resolved_schema
|
64
|
-
|
65
|
-
|
66
|
-
def merge_schemas(first: dict[str, Any], second: dict[str, Any]) -> dict[str, Any]:
|
67
|
-
"""Helper method to merge two schemas, recursively."""
|
68
|
-
merged_schema = deepcopy(first)
|
69
|
-
for key, value in second.items():
|
70
|
-
# for existing keys, merge dict and list values, leave others unchanged
|
71
|
-
if key in merged_schema.keys():
|
72
|
-
if isinstance(value, dict):
|
73
|
-
# if the key holds a dict, merge the values (e.g. 'properties')
|
74
|
-
merged_schema[key].update(value)
|
75
|
-
elif isinstance(value, list):
|
76
|
-
# if the key holds a list, extend the values (e.g. 'required')
|
77
|
-
merged_schema[key].extend(value)
|
78
|
-
elif value != merged_schema[key]:
|
79
|
-
logger.debug(
|
80
|
-
f"key '{key}' with value '{merged_schema[key]}'"
|
81
|
-
f" not updated to '{value}'"
|
82
|
-
)
|
83
|
-
else:
|
84
|
-
merged_schema[key] = value
|
85
|
-
return merged_schema
|
86
|
-
|
87
|
-
|
88
23
|
class ResourceRelation(ABC):
|
89
24
|
"""ABC for all resource relations or restrictions within the API."""
|
90
25
|
|
@@ -197,14 +132,14 @@ class Dto(ABC):
|
|
197
132
|
|
198
133
|
def get_invalidated_data(
|
199
134
|
self,
|
200
|
-
schema:
|
135
|
+
schema: ObjectSchema,
|
201
136
|
status_code: int,
|
202
137
|
invalid_property_default_code: int,
|
203
138
|
) -> dict[str, Any]:
|
204
139
|
"""Return a data set with one of the properties set to an invalid value or type."""
|
205
140
|
properties: dict[str, Any] = self.as_dict()
|
206
141
|
|
207
|
-
schema = resolve_schema(schema)
|
142
|
+
# schema = resolve_schema(schema)
|
208
143
|
|
209
144
|
relations = self.get_relations_for_error_code(error_code=status_code)
|
210
145
|
# filter PathProperyConstraints since in that case no data can be invalidated
|
@@ -212,9 +147,9 @@ class Dto(ABC):
|
|
212
147
|
r for r in relations if not isinstance(r, PathPropertiesConstraint)
|
213
148
|
]
|
214
149
|
property_names = [r.property_name for r in relations]
|
215
|
-
if status_code == invalid_property_default_code
|
150
|
+
if status_code == invalid_property_default_code:
|
216
151
|
# add all properties defined in the schema, including optional properties
|
217
|
-
property_names.extend((schema
|
152
|
+
property_names.extend((schema.properties.root.keys()))
|
218
153
|
if not property_names:
|
219
154
|
raise ValueError(
|
220
155
|
f"No property can be invalidated to cause status_code {status_code}"
|
@@ -256,31 +191,30 @@ class Dto(ABC):
|
|
256
191
|
)
|
257
192
|
return properties
|
258
193
|
|
259
|
-
value_schema = schema
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
value_schema = choice(value_schemas)
|
194
|
+
value_schema = schema.properties.root[property_name]
|
195
|
+
if isinstance(value_schema, UnionTypeSchema):
|
196
|
+
# Filter "type": "null" from the possible types since this indicates an
|
197
|
+
# optional / nullable property that can only be invalidated by sending
|
198
|
+
# invalid data of a non-null type
|
199
|
+
non_null_schemas = [
|
200
|
+
s
|
201
|
+
for s in value_schema.resolved_schemas
|
202
|
+
if not isinstance(s, NullSchema)
|
203
|
+
]
|
204
|
+
value_schema = choice(non_null_schemas)
|
271
205
|
|
272
206
|
# there may not be a current_value when invalidating an optional property
|
273
207
|
current_value = properties.get(property_name, SENTINEL)
|
274
208
|
if current_value is SENTINEL:
|
275
209
|
# the current_value isn't very relevant as long as the type is correct
|
276
210
|
# so no logic to handle Relations / objects / arrays here
|
277
|
-
property_type = value_schema
|
211
|
+
property_type = value_schema.type
|
278
212
|
if property_type == "object":
|
279
213
|
current_value = {}
|
280
214
|
elif property_type == "array":
|
281
215
|
current_value = []
|
282
216
|
else:
|
283
|
-
current_value =
|
217
|
+
current_value = value_schema.get_valid_value()
|
284
218
|
|
285
219
|
values_from_constraint = [
|
286
220
|
r.values[0]
|
@@ -311,7 +245,7 @@ class Dto(ABC):
|
|
311
245
|
field_name = field.name
|
312
246
|
if field_name not in self.__dict__:
|
313
247
|
continue
|
314
|
-
original_name =
|
248
|
+
original_name = get_oas_name_from_safe_name(field_name)
|
315
249
|
result[original_name] = getattr(self, field_name)
|
316
250
|
|
317
251
|
return result
|
@@ -0,0 +1,88 @@
|
|
1
|
+
import datetime
|
2
|
+
from typing import Callable
|
3
|
+
|
4
|
+
import faker
|
5
|
+
|
6
|
+
|
7
|
+
def fake_string(string_format: str) -> str:
|
8
|
+
"""
|
9
|
+
Generate a random string based on the provided format if the format is supported.
|
10
|
+
"""
|
11
|
+
# format names may contain -, which is invalid in Python naming
|
12
|
+
string_format = string_format.replace("-", "_")
|
13
|
+
fake_generator = getattr(FAKE, string_format, FAKE.uuid)
|
14
|
+
value: str = fake_generator()
|
15
|
+
if isinstance(value, datetime.datetime):
|
16
|
+
return value.strftime("%Y-%m-%dT%H:%M:%SZ")
|
17
|
+
return value
|
18
|
+
|
19
|
+
|
20
|
+
class LocalizedFaker:
|
21
|
+
"""Class to support setting a locale post-init."""
|
22
|
+
|
23
|
+
# pylint: disable=missing-function-docstring
|
24
|
+
def __init__(self) -> None:
|
25
|
+
self.fake = faker.Faker()
|
26
|
+
|
27
|
+
def set_locale(self, locale: str | list[str]) -> None:
|
28
|
+
"""Update the fake attribute with a Faker instance with the provided locale."""
|
29
|
+
self.fake = faker.Faker(locale)
|
30
|
+
|
31
|
+
@property
|
32
|
+
def date(self) -> Callable[[], str]:
|
33
|
+
return self.fake.date
|
34
|
+
|
35
|
+
@property
|
36
|
+
def date_time(self) -> Callable[[], datetime.datetime]:
|
37
|
+
return self.fake.date_time
|
38
|
+
|
39
|
+
@property
|
40
|
+
def password(self) -> Callable[[], str]:
|
41
|
+
return self.fake.password
|
42
|
+
|
43
|
+
@property
|
44
|
+
def binary(self) -> Callable[[], bytes]:
|
45
|
+
return self.fake.binary
|
46
|
+
|
47
|
+
@property
|
48
|
+
def email(self) -> Callable[[], str]:
|
49
|
+
return self.fake.safe_email
|
50
|
+
|
51
|
+
@property
|
52
|
+
def uuid(self) -> Callable[[], str]:
|
53
|
+
return self.fake.uuid4
|
54
|
+
|
55
|
+
@property
|
56
|
+
def uri(self) -> Callable[[], str]:
|
57
|
+
return self.fake.uri
|
58
|
+
|
59
|
+
@property
|
60
|
+
def url(self) -> Callable[[], str]:
|
61
|
+
return self.fake.url
|
62
|
+
|
63
|
+
@property
|
64
|
+
def hostname(self) -> Callable[[], str]:
|
65
|
+
return self.fake.hostname
|
66
|
+
|
67
|
+
@property
|
68
|
+
def ipv4(self) -> Callable[[], str]:
|
69
|
+
return self.fake.ipv4
|
70
|
+
|
71
|
+
@property
|
72
|
+
def ipv6(self) -> Callable[[], str]:
|
73
|
+
return self.fake.ipv6
|
74
|
+
|
75
|
+
@property
|
76
|
+
def name(self) -> Callable[[], str]:
|
77
|
+
return self.fake.name
|
78
|
+
|
79
|
+
@property
|
80
|
+
def text(self) -> Callable[[], str]:
|
81
|
+
return self.fake.text
|
82
|
+
|
83
|
+
@property
|
84
|
+
def description(self) -> Callable[[], str]:
|
85
|
+
return self.fake.text
|
86
|
+
|
87
|
+
|
88
|
+
FAKE = LocalizedFaker()
|