robotframework-openapitools 0.1.2__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,314 +1,314 @@
1
- """
2
- Module holding the (base) classes that can be used by the user of the OpenApiLibCore
3
- to implement custom mappings for dependencies between resources in the API under
4
- test and constraints / restrictions on properties of the resources.
5
- """
6
-
7
- from abc import ABC
8
- from copy import deepcopy
9
- from dataclasses import dataclass, fields
10
- from logging import getLogger
11
- from random import choice, shuffle
12
- from typing import Any, Dict, List, Optional, Union
13
- from uuid import uuid4
14
-
15
- from OpenApiLibCore import value_utils
16
-
17
- logger = getLogger(__name__)
18
-
19
- NOT_SET = object()
20
- SENTINEL = object()
21
-
22
-
23
- def resolve_schema(schema: Dict[str, Any]) -> Dict[str, Any]:
24
- """
25
- Helper function to resolve allOf, anyOf and oneOf instances in a schema.
26
-
27
- The schemas are used to generate values for headers, query parameters and json
28
- bodies to be able to make requests.
29
- """
30
- # Schema is mutable, so deepcopy to prevent mutation of original schema argument
31
- resolved_schema = deepcopy(schema)
32
-
33
- # allOf / anyOf / oneOf may be nested, so recursively resolve the dict-typed values
34
- for key, value in resolved_schema.items():
35
- if isinstance(value, dict):
36
- resolved_schema[key] = resolve_schema(value)
37
-
38
- # When handling allOf there should no duplicate keys, so the schema parts can
39
- # just be merged after resolving the individual parts
40
- if schema_parts := resolved_schema.pop("allOf", None):
41
- for schema_part in schema_parts:
42
- resolved_part = resolve_schema(schema_part)
43
- resolved_schema = merge_schemas(resolved_schema, resolved_part)
44
- # Handling anyOf and oneOf requires extra logic to deal with the "type" information.
45
- # Some properties / parameters may be of different types and each type may have its
46
- # own restrictions e.g. a parameter that accepts an enum value (string) or an
47
- # integer value within a certain range.
48
- # Since the library needs all this information for different purposes, the
49
- # schema_parts cannot be merged, so a helper property / key "types" is introduced.
50
- any_of = resolved_schema.pop("anyOf", [])
51
- one_of = resolved_schema.pop("oneOf", [])
52
- schema_parts = any_of if any_of else one_of
53
-
54
- for schema_part in schema_parts:
55
- resolved_part = resolve_schema(schema_part)
56
- if isinstance(resolved_part, dict) and "type" in resolved_part.keys():
57
- if "types" in resolved_schema.keys():
58
- resolved_schema["types"].append(resolved_part)
59
- else:
60
- resolved_schema["types"] = [resolved_part]
61
- else:
62
- resolved_schema = merge_schemas(resolved_schema, resolved_part)
63
-
64
- return resolved_schema
65
-
66
-
67
- def merge_schemas(first: Dict[str, Any], second: Dict[str, Any]) -> Dict[str, Any]:
68
- """Helper method to merge two schemas, recursively."""
69
- merged_schema = deepcopy(first)
70
- for key, value in second.items():
71
- # for existing keys, merge dict and list values, leave others unchanged
72
- if key in merged_schema.keys():
73
- if isinstance(value, dict):
74
- # if the key holds a dict, merge the values (e.g. 'properties')
75
- merged_schema[key].update(value)
76
- elif isinstance(value, list):
77
- # if the key holds a list, extend the values (e.g. 'required')
78
- merged_schema[key].extend(value)
79
- else:
80
- logger.warning(
81
- f"key '{key}' with value '{merged_schema[key]}' not "
82
- f"updated to '{value}'"
83
- )
84
- else:
85
- merged_schema[key] = value
86
- return merged_schema
87
-
88
-
89
- class ResourceRelation(ABC): # pylint: disable=too-few-public-methods
90
- """ABC for all resource relations or restrictions within the API."""
91
-
92
- property_name: str
93
- error_code: int
94
-
95
-
96
- @dataclass
97
- class PathPropertiesConstraint(ResourceRelation):
98
- """The resolved path for the endpoint."""
99
-
100
- path: str
101
- property_name: str = "id"
102
- error_code: int = 404
103
-
104
-
105
- @dataclass
106
- class PropertyValueConstraint(ResourceRelation):
107
- """The allowed values for property_name."""
108
-
109
- property_name: str
110
- values: List[Any]
111
- invalid_value: Any = NOT_SET
112
- invalid_value_error_code: int = 422
113
- error_code: int = 422
114
-
115
-
116
- @dataclass
117
- class IdDependency(ResourceRelation):
118
- """The path where a valid id for the property_name can be gotten (using GET)."""
119
-
120
- property_name: str
121
- get_path: str
122
- operation_id: Optional[str] = None
123
- error_code: int = 422
124
-
125
-
126
- @dataclass
127
- class IdReference(ResourceRelation):
128
- """The path where a resource that needs this resource's id can be created (using POST)."""
129
-
130
- property_name: str
131
- post_path: str
132
- error_code: int = 422
133
-
134
-
135
- @dataclass
136
- class UniquePropertyValueConstraint(ResourceRelation):
137
- """The value of the property must be unique within the resource scope."""
138
-
139
- property_name: str
140
- value: Any
141
- error_code: int = 422
142
-
143
-
144
- Relation = Union[
145
- IdDependency,
146
- IdReference,
147
- PathPropertiesConstraint,
148
- PropertyValueConstraint,
149
- UniquePropertyValueConstraint,
150
- ]
151
-
152
-
153
- @dataclass
154
- class Dto(ABC):
155
- """Base class for the Dto class."""
156
-
157
- @staticmethod
158
- def get_parameter_relations() -> List[Relation]:
159
- """Return the list of Relations for the header and query parameters."""
160
- return []
161
-
162
- def get_parameter_relations_for_error_code(self, error_code: int) -> List[Relation]:
163
- """Return the list of Relations associated with the given error_code."""
164
- relations: List[Relation] = [
165
- r
166
- for r in self.get_parameter_relations()
167
- if r.error_code == error_code
168
- or (
169
- getattr(r, "invalid_value_error_code", None) == error_code
170
- and getattr(r, "invalid_value", None) != NOT_SET
171
- )
172
- ]
173
- return relations
174
-
175
- @staticmethod
176
- def get_relations() -> List[Relation]:
177
- """Return the list of Relations for the (json) body."""
178
- return []
179
-
180
- def get_relations_for_error_code(self, error_code: int) -> List[Relation]:
181
- """Return the list of Relations associated with the given error_code."""
182
- relations: List[Relation] = [
183
- r
184
- for r in self.get_relations()
185
- if r.error_code == error_code
186
- or (
187
- getattr(r, "invalid_value_error_code", None) == error_code
188
- and getattr(r, "invalid_value", None) != NOT_SET
189
- )
190
- ]
191
- return relations
192
-
193
- def get_invalidated_data(
194
- self,
195
- schema: Dict[str, Any],
196
- status_code: int,
197
- invalid_property_default_code: int,
198
- ) -> Dict[str, Any]:
199
- """Return a data set with one of the properties set to an invalid value or type."""
200
- properties: Dict[str, Any] = self.as_dict()
201
-
202
- schema = resolve_schema(schema)
203
-
204
- relations = self.get_relations_for_error_code(error_code=status_code)
205
- # filter PathProperyConstraints since in that case no data can be invalidated
206
- relations = [
207
- r for r in relations if not isinstance(r, PathPropertiesConstraint)
208
- ]
209
- property_names = [r.property_name for r in relations]
210
- if status_code == invalid_property_default_code:
211
- # add all properties defined in the schema, including optional properties
212
- property_names.extend((schema["properties"].keys()))
213
- # remove duplicates
214
- property_names = list(set(property_names))
215
- if not property_names:
216
- raise ValueError(
217
- f"No property can be invalidated to cause status_code {status_code}"
218
- )
219
- # shuffle the property_names so different properties on the Dto are invalidated
220
- # when rerunning the test
221
- shuffle(property_names)
222
- for property_name in property_names:
223
- # if possible, invalidate a constraint but send otherwise valid data
224
- id_dependencies = [
225
- r
226
- for r in relations
227
- if isinstance(r, IdDependency) and r.property_name == property_name
228
- ]
229
- if id_dependencies:
230
- invalid_value = uuid4().hex
231
- logger.debug(
232
- f"Breaking IdDependency for status_code {status_code}: replacing "
233
- f"{properties[property_name]} with {invalid_value}"
234
- )
235
- properties[property_name] = invalid_value
236
- return properties
237
-
238
- invalid_value_from_constraint = [
239
- r.invalid_value
240
- for r in relations
241
- if isinstance(r, PropertyValueConstraint)
242
- and r.property_name == property_name
243
- and r.invalid_value_error_code == status_code
244
- ]
245
- if (
246
- invalid_value_from_constraint
247
- and invalid_value_from_constraint[0] is not NOT_SET
248
- ):
249
- properties[property_name] = invalid_value_from_constraint[0]
250
- logger.debug(
251
- f"Using invalid_value {invalid_value_from_constraint[0]} to "
252
- f"invalidate property {property_name}"
253
- )
254
- return properties
255
-
256
- value_schema = schema["properties"][property_name]
257
- value_schema = resolve_schema(value_schema)
258
-
259
- # Filter "type": "null" from the possible types since this indicates an
260
- # optional / nullable property that can only be invalidated by sending
261
- # invalid data of a non-null type
262
- if value_schemas := value_schema.get("types"):
263
- if len(value_schemas) > 1:
264
- value_schemas = [
265
- schema for schema in value_schemas if schema["type"] != "null"
266
- ]
267
- value_schema = choice(value_schemas)
268
-
269
- # there may not be a current_value when invalidating an optional property
270
- current_value = properties.get(property_name, SENTINEL)
271
- if current_value is SENTINEL:
272
- # the current_value isn't very relevant as long as the type is correct
273
- # so no logic to handle Relations / objects / arrays here
274
- property_type = value_schema["type"]
275
- if property_type == "object":
276
- current_value = {}
277
- elif property_type == "array":
278
- current_value = []
279
- else:
280
- current_value = value_utils.get_valid_value(value_schema)
281
-
282
- values_from_constraint = [
283
- r.values[0]
284
- for r in relations
285
- if isinstance(r, PropertyValueConstraint)
286
- and r.property_name == property_name
287
- ]
288
-
289
- invalid_value = value_utils.get_invalid_value(
290
- value_schema=value_schema,
291
- current_value=current_value,
292
- values_from_constraint=values_from_constraint,
293
- )
294
- properties[property_name] = invalid_value
295
- logger.debug(
296
- f"Property {property_name} changed to {invalid_value} (received from "
297
- f"get_invalid_value)"
298
- )
299
- return properties
300
- logger.warning("get_invalidated_data returned unchanged properties")
301
- return properties # pragma: no cover
302
-
303
- def as_dict(self) -> Dict[Any, Any]:
304
- """Return the dict representation of the Dto."""
305
- result = {}
306
-
307
- for field in fields(self):
308
- field_name = field.name
309
- if field_name not in self.__dict__:
310
- continue
311
- original_name = field.metadata["original_property_name"]
312
- result[original_name] = getattr(self, field_name)
313
-
314
- return result
1
+ """
2
+ Module holding the (base) classes that can be used by the user of the OpenApiLibCore
3
+ to implement custom mappings for dependencies between resources in the API under
4
+ test and constraints / restrictions on properties of the resources.
5
+ """
6
+
7
+ from abc import ABC
8
+ from copy import deepcopy
9
+ from dataclasses import dataclass, fields
10
+ from logging import getLogger
11
+ from random import choice, shuffle
12
+ from typing import Any, Dict, List, Optional, Union
13
+ from uuid import uuid4
14
+
15
+ from OpenApiLibCore import value_utils
16
+
17
+ logger = getLogger(__name__)
18
+
19
+ NOT_SET = object()
20
+ SENTINEL = object()
21
+
22
+
23
+ def resolve_schema(schema: Dict[str, Any]) -> Dict[str, Any]:
24
+ """
25
+ Helper function to resolve allOf, anyOf and oneOf instances in a schema.
26
+
27
+ The schemas are used to generate values for headers, query parameters and json
28
+ bodies to be able to make requests.
29
+ """
30
+ # Schema is mutable, so deepcopy to prevent mutation of original schema argument
31
+ resolved_schema = deepcopy(schema)
32
+
33
+ # allOf / anyOf / oneOf may be nested, so recursively resolve the dict-typed values
34
+ for key, value in resolved_schema.items():
35
+ if isinstance(value, dict):
36
+ resolved_schema[key] = resolve_schema(value)
37
+
38
+ # When handling allOf there should no duplicate keys, so the schema parts can
39
+ # just be merged after resolving the individual parts
40
+ if schema_parts := resolved_schema.pop("allOf", None):
41
+ for schema_part in schema_parts:
42
+ resolved_part = resolve_schema(schema_part)
43
+ resolved_schema = merge_schemas(resolved_schema, resolved_part)
44
+ # Handling anyOf and oneOf requires extra logic to deal with the "type" information.
45
+ # Some properties / parameters may be of different types and each type may have its
46
+ # own restrictions e.g. a parameter that accepts an enum value (string) or an
47
+ # integer value within a certain range.
48
+ # Since the library needs all this information for different purposes, the
49
+ # schema_parts cannot be merged, so a helper property / key "types" is introduced.
50
+ any_of = resolved_schema.pop("anyOf", [])
51
+ one_of = resolved_schema.pop("oneOf", [])
52
+ schema_parts = any_of if any_of else one_of
53
+
54
+ for schema_part in schema_parts:
55
+ resolved_part = resolve_schema(schema_part)
56
+ if isinstance(resolved_part, dict) and "type" in resolved_part.keys():
57
+ if "types" in resolved_schema.keys():
58
+ resolved_schema["types"].append(resolved_part)
59
+ else:
60
+ resolved_schema["types"] = [resolved_part]
61
+ else:
62
+ resolved_schema = merge_schemas(resolved_schema, resolved_part)
63
+
64
+ return resolved_schema
65
+
66
+
67
+ def merge_schemas(first: Dict[str, Any], second: Dict[str, Any]) -> Dict[str, Any]:
68
+ """Helper method to merge two schemas, recursively."""
69
+ merged_schema = deepcopy(first)
70
+ for key, value in second.items():
71
+ # for existing keys, merge dict and list values, leave others unchanged
72
+ if key in merged_schema.keys():
73
+ if isinstance(value, dict):
74
+ # if the key holds a dict, merge the values (e.g. 'properties')
75
+ merged_schema[key].update(value)
76
+ elif isinstance(value, list):
77
+ # if the key holds a list, extend the values (e.g. 'required')
78
+ merged_schema[key].extend(value)
79
+ else:
80
+ logger.debug(
81
+ f"key '{key}' with value '{merged_schema[key]}' not "
82
+ f"updated to '{value}'"
83
+ )
84
+ else:
85
+ merged_schema[key] = value
86
+ return merged_schema
87
+
88
+
89
+ class ResourceRelation(ABC): # pylint: disable=too-few-public-methods
90
+ """ABC for all resource relations or restrictions within the API."""
91
+
92
+ property_name: str
93
+ error_code: int
94
+
95
+
96
+ @dataclass
97
+ class PathPropertiesConstraint(ResourceRelation):
98
+ """The resolved path for the endpoint."""
99
+
100
+ path: str
101
+ property_name: str = "id"
102
+ error_code: int = 404
103
+
104
+
105
+ @dataclass
106
+ class PropertyValueConstraint(ResourceRelation):
107
+ """The allowed values for property_name."""
108
+
109
+ property_name: str
110
+ values: List[Any]
111
+ invalid_value: Any = NOT_SET
112
+ invalid_value_error_code: int = 422
113
+ error_code: int = 422
114
+
115
+
116
+ @dataclass
117
+ class IdDependency(ResourceRelation):
118
+ """The path where a valid id for the property_name can be gotten (using GET)."""
119
+
120
+ property_name: str
121
+ get_path: str
122
+ operation_id: Optional[str] = None
123
+ error_code: int = 422
124
+
125
+
126
+ @dataclass
127
+ class IdReference(ResourceRelation):
128
+ """The path where a resource that needs this resource's id can be created (using POST)."""
129
+
130
+ property_name: str
131
+ post_path: str
132
+ error_code: int = 422
133
+
134
+
135
+ @dataclass
136
+ class UniquePropertyValueConstraint(ResourceRelation):
137
+ """The value of the property must be unique within the resource scope."""
138
+
139
+ property_name: str
140
+ value: Any
141
+ error_code: int = 422
142
+
143
+
144
+ Relation = Union[
145
+ IdDependency,
146
+ IdReference,
147
+ PathPropertiesConstraint,
148
+ PropertyValueConstraint,
149
+ UniquePropertyValueConstraint,
150
+ ]
151
+
152
+
153
+ @dataclass
154
+ class Dto(ABC):
155
+ """Base class for the Dto class."""
156
+
157
+ @staticmethod
158
+ def get_parameter_relations() -> List[Relation]:
159
+ """Return the list of Relations for the header and query parameters."""
160
+ return []
161
+
162
+ def get_parameter_relations_for_error_code(self, error_code: int) -> List[Relation]:
163
+ """Return the list of Relations associated with the given error_code."""
164
+ relations: List[Relation] = [
165
+ r
166
+ for r in self.get_parameter_relations()
167
+ if r.error_code == error_code
168
+ or (
169
+ getattr(r, "invalid_value_error_code", None) == error_code
170
+ and getattr(r, "invalid_value", None) != NOT_SET
171
+ )
172
+ ]
173
+ return relations
174
+
175
+ @staticmethod
176
+ def get_relations() -> List[Relation]:
177
+ """Return the list of Relations for the (json) body."""
178
+ return []
179
+
180
+ def get_relations_for_error_code(self, error_code: int) -> List[Relation]:
181
+ """Return the list of Relations associated with the given error_code."""
182
+ relations: List[Relation] = [
183
+ r
184
+ for r in self.get_relations()
185
+ if r.error_code == error_code
186
+ or (
187
+ getattr(r, "invalid_value_error_code", None) == error_code
188
+ and getattr(r, "invalid_value", None) != NOT_SET
189
+ )
190
+ ]
191
+ return relations
192
+
193
+ def get_invalidated_data(
194
+ self,
195
+ schema: Dict[str, Any],
196
+ status_code: int,
197
+ invalid_property_default_code: int,
198
+ ) -> Dict[str, Any]:
199
+ """Return a data set with one of the properties set to an invalid value or type."""
200
+ properties: Dict[str, Any] = self.as_dict()
201
+
202
+ schema = resolve_schema(schema)
203
+
204
+ relations = self.get_relations_for_error_code(error_code=status_code)
205
+ # filter PathProperyConstraints since in that case no data can be invalidated
206
+ relations = [
207
+ r for r in relations if not isinstance(r, PathPropertiesConstraint)
208
+ ]
209
+ property_names = [r.property_name for r in relations]
210
+ if status_code == invalid_property_default_code:
211
+ # add all properties defined in the schema, including optional properties
212
+ property_names.extend((schema["properties"].keys()))
213
+ # remove duplicates
214
+ property_names = list(set(property_names))
215
+ if not property_names:
216
+ raise ValueError(
217
+ f"No property can be invalidated to cause status_code {status_code}"
218
+ )
219
+ # shuffle the property_names so different properties on the Dto are invalidated
220
+ # when rerunning the test
221
+ shuffle(property_names)
222
+ for property_name in property_names:
223
+ # if possible, invalidate a constraint but send otherwise valid data
224
+ id_dependencies = [
225
+ r
226
+ for r in relations
227
+ if isinstance(r, IdDependency) and r.property_name == property_name
228
+ ]
229
+ if id_dependencies:
230
+ invalid_value = uuid4().hex
231
+ logger.debug(
232
+ f"Breaking IdDependency for status_code {status_code}: replacing "
233
+ f"{properties[property_name]} with {invalid_value}"
234
+ )
235
+ properties[property_name] = invalid_value
236
+ return properties
237
+
238
+ invalid_value_from_constraint = [
239
+ r.invalid_value
240
+ for r in relations
241
+ if isinstance(r, PropertyValueConstraint)
242
+ and r.property_name == property_name
243
+ and r.invalid_value_error_code == status_code
244
+ ]
245
+ if (
246
+ invalid_value_from_constraint
247
+ and invalid_value_from_constraint[0] is not NOT_SET
248
+ ):
249
+ properties[property_name] = invalid_value_from_constraint[0]
250
+ logger.debug(
251
+ f"Using invalid_value {invalid_value_from_constraint[0]} to "
252
+ f"invalidate property {property_name}"
253
+ )
254
+ return properties
255
+
256
+ value_schema = schema["properties"][property_name]
257
+ value_schema = resolve_schema(value_schema)
258
+
259
+ # Filter "type": "null" from the possible types since this indicates an
260
+ # optional / nullable property that can only be invalidated by sending
261
+ # invalid data of a non-null type
262
+ if value_schemas := value_schema.get("types"):
263
+ if len(value_schemas) > 1:
264
+ value_schemas = [
265
+ schema for schema in value_schemas if schema["type"] != "null"
266
+ ]
267
+ value_schema = choice(value_schemas)
268
+
269
+ # there may not be a current_value when invalidating an optional property
270
+ current_value = properties.get(property_name, SENTINEL)
271
+ if current_value is SENTINEL:
272
+ # the current_value isn't very relevant as long as the type is correct
273
+ # so no logic to handle Relations / objects / arrays here
274
+ property_type = value_schema["type"]
275
+ if property_type == "object":
276
+ current_value = {}
277
+ elif property_type == "array":
278
+ current_value = []
279
+ else:
280
+ current_value = value_utils.get_valid_value(value_schema)
281
+
282
+ values_from_constraint = [
283
+ r.values[0]
284
+ for r in relations
285
+ if isinstance(r, PropertyValueConstraint)
286
+ and r.property_name == property_name
287
+ ]
288
+
289
+ invalid_value = value_utils.get_invalid_value(
290
+ value_schema=value_schema,
291
+ current_value=current_value,
292
+ values_from_constraint=values_from_constraint,
293
+ )
294
+ properties[property_name] = invalid_value
295
+ logger.debug(
296
+ f"Property {property_name} changed to {invalid_value} (received from "
297
+ f"get_invalid_value)"
298
+ )
299
+ return properties
300
+ logger.warning("get_invalidated_data returned unchanged properties")
301
+ return properties # pragma: no cover
302
+
303
+ def as_dict(self) -> Dict[Any, Any]:
304
+ """Return the dict representation of the Dto."""
305
+ result = {}
306
+
307
+ for field in fields(self):
308
+ field_name = field.name
309
+ if field_name not in self.__dict__:
310
+ continue
311
+ original_name = field.metadata["original_property_name"]
312
+ result[original_name] = getattr(self, field_name)
313
+
314
+ return result