schemathesis 3.29.1__py3-none-any.whl → 3.30.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- schemathesis/__init__.py +3 -3
- schemathesis/_compat.py +2 -2
- schemathesis/_dependency_versions.py +1 -3
- schemathesis/_hypothesis.py +6 -0
- schemathesis/_lazy_import.py +1 -0
- schemathesis/_override.py +1 -0
- schemathesis/_rate_limiter.py +2 -1
- schemathesis/_xml.py +1 -0
- schemathesis/auths.py +4 -2
- schemathesis/checks.py +8 -5
- schemathesis/cli/__init__.py +8 -1
- schemathesis/cli/callbacks.py +3 -4
- schemathesis/cli/cassettes.py +6 -4
- schemathesis/cli/constants.py +2 -0
- schemathesis/cli/context.py +3 -0
- schemathesis/cli/debug.py +2 -1
- schemathesis/cli/handlers.py +1 -1
- schemathesis/cli/options.py +1 -0
- schemathesis/cli/output/default.py +50 -22
- schemathesis/cli/output/short.py +21 -10
- schemathesis/cli/sanitization.py +1 -0
- schemathesis/code_samples.py +1 -0
- schemathesis/constants.py +1 -0
- schemathesis/contrib/openapi/__init__.py +1 -1
- schemathesis/contrib/openapi/fill_missing_examples.py +2 -0
- schemathesis/contrib/openapi/formats/uuid.py +2 -1
- schemathesis/contrib/unique_data.py +2 -1
- schemathesis/exceptions.py +40 -26
- schemathesis/experimental/__init__.py +14 -0
- schemathesis/extra/_aiohttp.py +1 -0
- schemathesis/extra/_server.py +1 -0
- schemathesis/extra/pytest_plugin.py +13 -24
- schemathesis/failures.py +32 -3
- schemathesis/filters.py +2 -1
- schemathesis/fixups/__init__.py +1 -0
- schemathesis/fixups/fast_api.py +2 -2
- schemathesis/fixups/utf8_bom.py +1 -2
- schemathesis/generation/__init__.py +2 -1
- schemathesis/hooks.py +3 -1
- schemathesis/internal/copy.py +19 -3
- schemathesis/internal/deprecation.py +1 -1
- schemathesis/internal/jsonschema.py +2 -1
- schemathesis/internal/result.py +1 -1
- schemathesis/internal/transformation.py +1 -0
- schemathesis/lazy.py +3 -2
- schemathesis/loaders.py +4 -2
- schemathesis/models.py +20 -5
- schemathesis/parameters.py +1 -0
- schemathesis/runner/__init__.py +1 -1
- schemathesis/runner/events.py +21 -4
- schemathesis/runner/impl/core.py +61 -33
- schemathesis/runner/impl/solo.py +2 -1
- schemathesis/runner/impl/threadpool.py +4 -0
- schemathesis/runner/probes.py +1 -1
- schemathesis/runner/serialization.py +1 -1
- schemathesis/sanitization.py +2 -0
- schemathesis/schemas.py +1 -4
- schemathesis/service/ci.py +1 -0
- schemathesis/service/client.py +7 -7
- schemathesis/service/events.py +2 -1
- schemathesis/service/extensions.py +5 -5
- schemathesis/service/hosts.py +1 -0
- schemathesis/service/metadata.py +2 -1
- schemathesis/service/models.py +2 -1
- schemathesis/service/report.py +3 -3
- schemathesis/service/serialization.py +54 -23
- schemathesis/service/usage.py +1 -0
- schemathesis/specs/graphql/_cache.py +1 -1
- schemathesis/specs/graphql/loaders.py +1 -1
- schemathesis/specs/graphql/nodes.py +1 -0
- schemathesis/specs/graphql/scalars.py +2 -2
- schemathesis/specs/graphql/schemas.py +7 -7
- schemathesis/specs/graphql/validation.py +1 -2
- schemathesis/specs/openapi/_hypothesis.py +17 -11
- schemathesis/specs/openapi/checks.py +102 -9
- schemathesis/specs/openapi/converter.py +2 -1
- schemathesis/specs/openapi/definitions.py +2 -1
- schemathesis/specs/openapi/examples.py +7 -9
- schemathesis/specs/openapi/expressions/__init__.py +29 -2
- schemathesis/specs/openapi/expressions/context.py +1 -1
- schemathesis/specs/openapi/expressions/extractors.py +23 -0
- schemathesis/specs/openapi/expressions/lexer.py +19 -18
- schemathesis/specs/openapi/expressions/nodes.py +24 -4
- schemathesis/specs/openapi/expressions/parser.py +26 -5
- schemathesis/specs/openapi/filters.py +1 -0
- schemathesis/specs/openapi/links.py +35 -7
- schemathesis/specs/openapi/loaders.py +13 -11
- schemathesis/specs/openapi/negative/__init__.py +2 -1
- schemathesis/specs/openapi/negative/mutations.py +1 -0
- schemathesis/specs/openapi/parameters.py +1 -0
- schemathesis/specs/openapi/schemas.py +27 -38
- schemathesis/specs/openapi/security.py +1 -0
- schemathesis/specs/openapi/serialization.py +1 -0
- schemathesis/specs/openapi/stateful/__init__.py +159 -70
- schemathesis/specs/openapi/stateful/statistic.py +198 -0
- schemathesis/specs/openapi/stateful/types.py +13 -0
- schemathesis/specs/openapi/utils.py +1 -0
- schemathesis/specs/openapi/validation.py +1 -0
- schemathesis/stateful/__init__.py +4 -2
- schemathesis/stateful/config.py +66 -0
- schemathesis/stateful/context.py +93 -0
- schemathesis/stateful/events.py +209 -0
- schemathesis/stateful/runner.py +233 -0
- schemathesis/stateful/sink.py +68 -0
- schemathesis/stateful/state_machine.py +39 -22
- schemathesis/stateful/statistic.py +20 -0
- schemathesis/stateful/validation.py +66 -0
- schemathesis/targets.py +1 -0
- schemathesis/throttling.py +23 -3
- schemathesis/transports/__init__.py +28 -10
- schemathesis/transports/auth.py +1 -0
- schemathesis/transports/content_types.py +1 -1
- schemathesis/transports/headers.py +2 -1
- schemathesis/transports/responses.py +6 -4
- schemathesis/types.py +1 -0
- schemathesis/utils.py +1 -0
- {schemathesis-3.29.1.dist-info → schemathesis-3.30.0.dist-info}/METADATA +1 -1
- schemathesis-3.30.0.dist-info/RECORD +150 -0
- schemathesis/specs/openapi/stateful/links.py +0 -94
- schemathesis-3.29.1.dist-info/RECORD +0 -141
- {schemathesis-3.29.1.dist-info → schemathesis-3.30.0.dist-info}/WHEEL +0 -0
- {schemathesis-3.29.1.dist-info → schemathesis-3.30.0.dist-info}/entry_points.txt +0 -0
- {schemathesis-3.29.1.dist-info → schemathesis-3.30.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
|
+
|
|
2
3
|
import string
|
|
3
4
|
import time
|
|
4
5
|
from base64 import b64encode
|
|
@@ -9,28 +10,29 @@ from typing import Any, Callable, Dict, Iterable, Optional
|
|
|
9
10
|
from urllib.parse import quote_plus
|
|
10
11
|
from weakref import WeakKeyDictionary
|
|
11
12
|
|
|
12
|
-
from hypothesis import
|
|
13
|
+
from hypothesis import reject
|
|
14
|
+
from hypothesis import strategies as st
|
|
13
15
|
from hypothesis_jsonschema import from_schema
|
|
14
16
|
from requests.auth import _basic_auth_str
|
|
15
17
|
from requests.structures import CaseInsensitiveDict
|
|
16
18
|
from requests.utils import to_key_val_list
|
|
17
19
|
|
|
20
|
+
from ... import auths, serializers
|
|
18
21
|
from ..._hypothesis import prepare_urlencoded
|
|
19
22
|
from ...constants import NOT_SET
|
|
20
|
-
from
|
|
21
|
-
from ... import auths, serializers
|
|
23
|
+
from ...exceptions import BodyInGetRequestError, SerializationNotPossible
|
|
22
24
|
from ...generation import DataGenerationMethod, GenerationConfig
|
|
23
|
-
from ...internal.copy import fast_deepcopy
|
|
24
|
-
from ...exceptions import SerializationNotPossible, BodyInGetRequestError
|
|
25
25
|
from ...hooks import HookContext, HookDispatcher, apply_to_all_dispatchers
|
|
26
|
+
from ...internal.copy import fast_deepcopy
|
|
26
27
|
from ...internal.validation import is_illegal_surrogate
|
|
27
28
|
from ...models import APIOperation, Case, cant_serialize
|
|
29
|
+
from ...serializers import Binary
|
|
28
30
|
from ...transports.content_types import parse_content_type
|
|
29
31
|
from ...transports.headers import has_invalid_characters, is_latin_1_encodable
|
|
30
32
|
from ...types import NotSet
|
|
31
|
-
from ...serializers import Binary
|
|
32
33
|
from ...utils import compose, skip
|
|
33
34
|
from .constants import LOCATION_TO_CONTAINER
|
|
35
|
+
from .formats import STRING_FORMATS
|
|
34
36
|
from .media_types import MEDIA_TYPES
|
|
35
37
|
from .negative import negative_schema
|
|
36
38
|
from .negative.utils import can_negate
|
|
@@ -360,11 +362,15 @@ def get_parameters_strategy(
|
|
|
360
362
|
if operation in _PARAMETER_STRATEGIES_CACHE and nested_cache_key in _PARAMETER_STRATEGIES_CACHE[operation]:
|
|
361
363
|
return _PARAMETER_STRATEGIES_CACHE[operation][nested_cache_key]
|
|
362
364
|
schema = parameters_to_json_schema(operation, parameters)
|
|
363
|
-
if
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
365
|
+
if location == "path":
|
|
366
|
+
if not operation.schema.validate_schema:
|
|
367
|
+
# If schema validation is disabled, we try to generate data even if the parameter definition
|
|
368
|
+
# contains errors.
|
|
369
|
+
# In this case, we know that the `required` keyword should always be `True`.
|
|
370
|
+
schema["required"] = list(schema["properties"])
|
|
371
|
+
for prop in schema.get("properties", {}).values():
|
|
372
|
+
if prop.get("type") == "string":
|
|
373
|
+
prop.setdefault("minLength", 1)
|
|
368
374
|
schema = operation.schema.prepare_schema(schema)
|
|
369
375
|
for name in exclude:
|
|
370
376
|
# Values from `exclude` are not necessarily valid for the schema - they come from user-defined examples
|
|
@@ -1,4 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
2
4
|
from typing import TYPE_CHECKING, Any, Generator, NoReturn
|
|
3
5
|
|
|
4
6
|
from ... import failures
|
|
@@ -6,15 +8,17 @@ from ...exceptions import (
|
|
|
6
8
|
get_headers_error,
|
|
7
9
|
get_malformed_media_type_error,
|
|
8
10
|
get_missing_content_type_error,
|
|
11
|
+
get_negative_rejection_error,
|
|
9
12
|
get_response_type_error,
|
|
10
13
|
get_status_code_error,
|
|
14
|
+
get_use_after_free_error,
|
|
11
15
|
)
|
|
12
16
|
from ...transports.content_types import parse_content_type
|
|
13
17
|
from .utils import expand_status_code
|
|
14
18
|
|
|
15
19
|
if TYPE_CHECKING:
|
|
16
|
-
from ...transports.responses import GenericResponse
|
|
17
20
|
from ...models import Case
|
|
21
|
+
from ...transports.responses import GenericResponse
|
|
18
22
|
|
|
19
23
|
|
|
20
24
|
def status_code_conformance(response: GenericResponse, case: Case) -> bool | None:
|
|
@@ -30,7 +34,7 @@ def status_code_conformance(response: GenericResponse, case: Case) -> bool | Non
|
|
|
30
34
|
if response.status_code not in allowed_status_codes:
|
|
31
35
|
defined_status_codes = list(map(str, responses))
|
|
32
36
|
responses_list = ", ".join(defined_status_codes)
|
|
33
|
-
exc_class = get_status_code_error(response.status_code)
|
|
37
|
+
exc_class = get_status_code_error(case.operation.verbose_name, response.status_code)
|
|
34
38
|
raise exc_class(
|
|
35
39
|
failures.UndefinedStatusCode.title,
|
|
36
40
|
context=failures.UndefinedStatusCode(
|
|
@@ -59,7 +63,7 @@ def content_type_conformance(response: GenericResponse, case: Case) -> bool | No
|
|
|
59
63
|
content_type = response.headers.get("Content-Type")
|
|
60
64
|
if not content_type:
|
|
61
65
|
formatted_content_types = [f"\n- `{content_type}`" for content_type in documented_content_types]
|
|
62
|
-
raise get_missing_content_type_error()(
|
|
66
|
+
raise get_missing_content_type_error(case.operation.verbose_name)(
|
|
63
67
|
failures.MissingContentType.title,
|
|
64
68
|
context=failures.MissingContentType(
|
|
65
69
|
message=f"The following media types are documented in the schema:{''.join(formatted_content_types)}",
|
|
@@ -70,14 +74,16 @@ def content_type_conformance(response: GenericResponse, case: Case) -> bool | No
|
|
|
70
74
|
try:
|
|
71
75
|
expected_main, expected_sub = parse_content_type(option)
|
|
72
76
|
except ValueError as exc:
|
|
73
|
-
_reraise_malformed_media_type(exc, "Schema", option, option)
|
|
77
|
+
_reraise_malformed_media_type(case, exc, "Schema", option, option)
|
|
74
78
|
try:
|
|
75
79
|
received_main, received_sub = parse_content_type(content_type)
|
|
76
80
|
except ValueError as exc:
|
|
77
|
-
_reraise_malformed_media_type(exc, "Response", content_type, option)
|
|
81
|
+
_reraise_malformed_media_type(case, exc, "Response", content_type, option)
|
|
78
82
|
if (expected_main, expected_sub) == (received_main, received_sub):
|
|
79
83
|
return None
|
|
80
|
-
exc_class = get_response_type_error(
|
|
84
|
+
exc_class = get_response_type_error(
|
|
85
|
+
case.operation.verbose_name, f"{expected_main}_{expected_sub}", f"{received_main}_{received_sub}"
|
|
86
|
+
)
|
|
81
87
|
raise exc_class(
|
|
82
88
|
failures.UndefinedContentType.title,
|
|
83
89
|
context=failures.UndefinedContentType(
|
|
@@ -88,9 +94,9 @@ def content_type_conformance(response: GenericResponse, case: Case) -> bool | No
|
|
|
88
94
|
)
|
|
89
95
|
|
|
90
96
|
|
|
91
|
-
def _reraise_malformed_media_type(exc: ValueError, location: str, actual: str, defined: str) -> NoReturn:
|
|
97
|
+
def _reraise_malformed_media_type(case: Case, exc: ValueError, location: str, actual: str, defined: str) -> NoReturn:
|
|
92
98
|
message = f"Media type for {location} is incorrect\n\nReceived: {actual}\nDocumented: {defined}"
|
|
93
|
-
raise get_malformed_media_type_error(message)(
|
|
99
|
+
raise get_malformed_media_type_error(case.operation.verbose_name, message)(
|
|
94
100
|
failures.MalformedMediaType.title,
|
|
95
101
|
context=failures.MalformedMediaType(message=message, actual=actual, defined=defined),
|
|
96
102
|
) from exc
|
|
@@ -114,7 +120,7 @@ def response_headers_conformance(response: GenericResponse, case: Case) -> bool
|
|
|
114
120
|
return None
|
|
115
121
|
formatted_headers = [f"\n- `{header}`" for header in missing_headers]
|
|
116
122
|
message = f"The following required headers are missing from the response:{''.join(formatted_headers)}"
|
|
117
|
-
exc_class = get_headers_error(message)
|
|
123
|
+
exc_class = get_headers_error(case.operation.verbose_name, message)
|
|
118
124
|
raise exc_class(
|
|
119
125
|
failures.MissingHeaders.title,
|
|
120
126
|
context=failures.MissingHeaders(message=message, missing_headers=missing_headers),
|
|
@@ -127,3 +133,90 @@ def response_schema_conformance(response: GenericResponse, case: Case) -> bool |
|
|
|
127
133
|
if not isinstance(case.operation.schema, BaseOpenAPISchema):
|
|
128
134
|
return True
|
|
129
135
|
return case.operation.validate_response(response)
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def negative_data_rejection(response: GenericResponse, case: Case) -> bool | None:
|
|
139
|
+
from .schemas import BaseOpenAPISchema
|
|
140
|
+
|
|
141
|
+
if not isinstance(case.operation.schema, BaseOpenAPISchema):
|
|
142
|
+
return True
|
|
143
|
+
if case.data_generation_method and case.data_generation_method.is_negative and 200 <= response.status_code < 300:
|
|
144
|
+
exc_class = get_negative_rejection_error(case.operation.verbose_name, response.status_code)
|
|
145
|
+
raise exc_class(
|
|
146
|
+
failures.AcceptedNegativeData.title,
|
|
147
|
+
context=failures.AcceptedNegativeData(message="Negative data was not rejected as expected by the API"),
|
|
148
|
+
)
|
|
149
|
+
return None
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
def use_after_free(response: GenericResponse, original: Case) -> bool | None:
|
|
153
|
+
from ...transports.responses import get_reason
|
|
154
|
+
from .schemas import BaseOpenAPISchema
|
|
155
|
+
|
|
156
|
+
if not isinstance(original.operation.schema, BaseOpenAPISchema):
|
|
157
|
+
return True
|
|
158
|
+
if response.status_code == 404 or not original.source:
|
|
159
|
+
return None
|
|
160
|
+
response = original.source.response
|
|
161
|
+
case = original.source.case
|
|
162
|
+
while True:
|
|
163
|
+
# Find the most recent successful DELETE call that corresponds to the current operation
|
|
164
|
+
if case.operation.method.lower() == "delete" and 200 <= response.status_code < 300:
|
|
165
|
+
if _is_prefix_operation(
|
|
166
|
+
ResourcePath(case.path, case.path_parameters or {}),
|
|
167
|
+
ResourcePath(original.path, original.path_parameters or {}),
|
|
168
|
+
):
|
|
169
|
+
free = f"{case.operation.method.upper()} {case.formatted_path}"
|
|
170
|
+
usage = f"{original.operation.method} {original.formatted_path}"
|
|
171
|
+
exc_class = get_use_after_free_error(case.operation.verbose_name)
|
|
172
|
+
reason = get_reason(response.status_code)
|
|
173
|
+
message = (
|
|
174
|
+
"The API did not return a `HTTP 404 Not Found` response "
|
|
175
|
+
f"(got `HTTP {response.status_code} {reason}`) for a resource that was previously deleted.\n\nThe resource was deleted with `{free}`"
|
|
176
|
+
)
|
|
177
|
+
raise exc_class(
|
|
178
|
+
failures.UseAfterFree.title,
|
|
179
|
+
context=failures.UseAfterFree(
|
|
180
|
+
message=message,
|
|
181
|
+
free=free,
|
|
182
|
+
usage=usage,
|
|
183
|
+
),
|
|
184
|
+
)
|
|
185
|
+
if case.source is None:
|
|
186
|
+
break
|
|
187
|
+
response = case.source.response
|
|
188
|
+
case = case.source.case
|
|
189
|
+
return None
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
@dataclass
|
|
193
|
+
class ResourcePath:
|
|
194
|
+
"""A path to a resource with variables."""
|
|
195
|
+
|
|
196
|
+
value: str
|
|
197
|
+
variables: dict[str, str]
|
|
198
|
+
|
|
199
|
+
__slots__ = ("value", "variables")
|
|
200
|
+
|
|
201
|
+
def get(self, key: str) -> str:
|
|
202
|
+
return self.variables[key.lstrip("{").rstrip("}")]
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def _is_prefix_operation(lhs: ResourcePath, rhs: ResourcePath) -> bool:
|
|
206
|
+
lhs_parts = lhs.value.rstrip("/").split("/")
|
|
207
|
+
rhs_parts = rhs.value.rstrip("/").split("/")
|
|
208
|
+
|
|
209
|
+
# Left has more parts, can't be a prefix
|
|
210
|
+
if len(lhs_parts) > len(rhs_parts):
|
|
211
|
+
return False
|
|
212
|
+
|
|
213
|
+
for left, right in zip(lhs_parts, rhs_parts):
|
|
214
|
+
if left.startswith("{") and right.startswith("{"):
|
|
215
|
+
if str(lhs.get(left)) != str(rhs.get(right)):
|
|
216
|
+
return False
|
|
217
|
+
elif left != right and left.rstrip("s") != right.rstrip("s"):
|
|
218
|
+
# Parts don't match, not a prefix
|
|
219
|
+
return False
|
|
220
|
+
|
|
221
|
+
# If we've reached this point, the LHS path is a prefix of the RHS path
|
|
222
|
+
return True
|
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
|
+
|
|
2
3
|
from itertools import chain
|
|
3
4
|
from typing import Any, Callable
|
|
4
5
|
|
|
5
|
-
from ...internal.jsonschema import traverse_schema
|
|
6
6
|
from ...internal.copy import fast_deepcopy
|
|
7
|
+
from ...internal.jsonschema import traverse_schema
|
|
7
8
|
|
|
8
9
|
|
|
9
10
|
def to_json_schema(
|
|
@@ -10,15 +10,14 @@ import requests
|
|
|
10
10
|
from hypothesis.strategies import SearchStrategy
|
|
11
11
|
from hypothesis_jsonschema import from_schema
|
|
12
12
|
|
|
13
|
+
from ..._hypothesis import get_single_example
|
|
13
14
|
from ...constants import DEFAULT_RESPONSE_TIMEOUT
|
|
14
15
|
from ...models import APIOperation, Case
|
|
15
|
-
from ..._hypothesis import get_single_example
|
|
16
16
|
from ._hypothesis import get_case_strategy, get_default_format_strategies
|
|
17
|
-
from .formats import STRING_FORMATS
|
|
18
17
|
from .constants import LOCATION_TO_CONTAINER
|
|
18
|
+
from .formats import STRING_FORMATS
|
|
19
19
|
from .parameters import OpenAPIBody, OpenAPIParameter
|
|
20
20
|
|
|
21
|
-
|
|
22
21
|
if TYPE_CHECKING:
|
|
23
22
|
from ...generation import GenerationConfig
|
|
24
23
|
|
|
@@ -43,9 +42,7 @@ class BodyExample:
|
|
|
43
42
|
Example = Union[ParameterExample, BodyExample]
|
|
44
43
|
|
|
45
44
|
|
|
46
|
-
def get_strategies_from_examples(
|
|
47
|
-
operation: APIOperation[OpenAPIParameter, Case], examples_field: str = "examples"
|
|
48
|
-
) -> list[SearchStrategy[Case]]:
|
|
45
|
+
def get_strategies_from_examples(operation: APIOperation[OpenAPIParameter, Case]) -> list[SearchStrategy[Case]]:
|
|
49
46
|
"""Build a set of strategies that generate test cases based on explicit examples in the schema."""
|
|
50
47
|
maps = {}
|
|
51
48
|
for location, container in LOCATION_TO_CONTAINER.items():
|
|
@@ -183,7 +180,7 @@ def extract_inner_examples(
|
|
|
183
180
|
) -> Generator[Any, None, None]:
|
|
184
181
|
"""Extract exact examples values from the `examples` dictionary."""
|
|
185
182
|
for name, example in examples.items():
|
|
186
|
-
if "$ref" in unresolved_definition[name]:
|
|
183
|
+
if "$ref" in unresolved_definition[name] and "value" not in example and "externalValue" not in example:
|
|
187
184
|
# The example here is a resolved example and should be yielded as is
|
|
188
185
|
yield example
|
|
189
186
|
if isinstance(example, dict):
|
|
@@ -214,8 +211,9 @@ def extract_from_schemas(operation: APIOperation[OpenAPIParameter, Case]) -> Gen
|
|
|
214
211
|
for alternative in operation.body:
|
|
215
212
|
alternative = cast(OpenAPIBody, alternative)
|
|
216
213
|
schema = alternative.as_json_schema(operation)
|
|
217
|
-
for
|
|
218
|
-
|
|
214
|
+
for example_field, examples_field in (("example", "examples"), ("x-example", "x-examples")):
|
|
215
|
+
for value in extract_from_schema(operation, schema, example_field, examples_field):
|
|
216
|
+
yield BodyExample(value=value, media_type=alternative.media_type)
|
|
219
217
|
|
|
220
218
|
|
|
221
219
|
def extract_from_schema(
|
|
@@ -3,14 +3,19 @@
|
|
|
3
3
|
https://swagger.io/docs/specification/links/#runtime-expressions
|
|
4
4
|
"""
|
|
5
5
|
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
import json
|
|
6
9
|
from typing import Any
|
|
7
10
|
|
|
8
11
|
from . import lexer, nodes, parser
|
|
9
12
|
from .context import ExpressionContext
|
|
10
13
|
|
|
11
14
|
|
|
12
|
-
def evaluate(expr: Any, context: ExpressionContext) ->
|
|
15
|
+
def evaluate(expr: Any, context: ExpressionContext, evaluate_nested: bool = False) -> Any:
|
|
13
16
|
"""Evaluate runtime expression in context."""
|
|
17
|
+
if isinstance(expr, (dict, list)) and evaluate_nested:
|
|
18
|
+
return _evaluate_nested(expr, context)
|
|
14
19
|
if not isinstance(expr, str):
|
|
15
20
|
# Can be a non-string constant
|
|
16
21
|
return expr
|
|
@@ -18,4 +23,26 @@ def evaluate(expr: Any, context: ExpressionContext) -> str:
|
|
|
18
23
|
if len(parts) == 1:
|
|
19
24
|
return parts[0] # keep the return type the same as the internal value type
|
|
20
25
|
# otherwise, concatenate into a string
|
|
21
|
-
return "".join(
|
|
26
|
+
return "".join(str(part) for part in parts if part is not None)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _evaluate_nested(expr: dict[str, Any] | list, context: ExpressionContext) -> Any:
|
|
30
|
+
if isinstance(expr, dict):
|
|
31
|
+
return {
|
|
32
|
+
_evaluate_object_key(key, context): evaluate(value, context, evaluate_nested=True)
|
|
33
|
+
for key, value in expr.items()
|
|
34
|
+
}
|
|
35
|
+
return [evaluate(item, context, evaluate_nested=True) for item in expr]
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _evaluate_object_key(key: str, context: ExpressionContext) -> Any:
|
|
39
|
+
evaluated = evaluate(key, context)
|
|
40
|
+
if isinstance(evaluated, str):
|
|
41
|
+
return evaluated
|
|
42
|
+
if isinstance(evaluated, bool):
|
|
43
|
+
return "true" if evaluated else "false"
|
|
44
|
+
if isinstance(evaluated, (int, float)):
|
|
45
|
+
return str(evaluated)
|
|
46
|
+
if evaluated is None:
|
|
47
|
+
return "null"
|
|
48
|
+
return json.dumps(evaluated)
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import re
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
@dataclass
|
|
8
|
+
class Extractor:
|
|
9
|
+
def extract(self, value: str) -> str | None:
|
|
10
|
+
raise NotImplementedError
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass
|
|
14
|
+
class RegexExtractor(Extractor):
|
|
15
|
+
"""Extract value via a regex."""
|
|
16
|
+
|
|
17
|
+
value: re.Pattern
|
|
18
|
+
|
|
19
|
+
def extract(self, value: str) -> str | None:
|
|
20
|
+
match = self.value.search(value)
|
|
21
|
+
if match is None:
|
|
22
|
+
return None
|
|
23
|
+
return match.group(1)
|
|
@@ -20,33 +20,34 @@ class Token:
|
|
|
20
20
|
"""Lexical token that may occur in a runtime expression."""
|
|
21
21
|
|
|
22
22
|
value: str
|
|
23
|
+
end: int
|
|
23
24
|
type_: TokenType
|
|
24
25
|
|
|
25
26
|
# Helpers for cleaner instantiation
|
|
26
27
|
|
|
27
28
|
@classmethod
|
|
28
|
-
def variable(cls, value: str) -> "Token":
|
|
29
|
-
return cls(value, TokenType.VARIABLE)
|
|
29
|
+
def variable(cls, value: str, end: int) -> "Token":
|
|
30
|
+
return cls(value, end, TokenType.VARIABLE)
|
|
30
31
|
|
|
31
32
|
@classmethod
|
|
32
|
-
def string(cls, value: str) -> "Token":
|
|
33
|
-
return cls(value, TokenType.STRING)
|
|
33
|
+
def string(cls, value: str, end: int) -> "Token":
|
|
34
|
+
return cls(value, end, TokenType.STRING)
|
|
34
35
|
|
|
35
36
|
@classmethod
|
|
36
|
-
def pointer(cls, value: str) -> "Token":
|
|
37
|
-
return cls(value, TokenType.POINTER)
|
|
37
|
+
def pointer(cls, value: str, end: int) -> "Token":
|
|
38
|
+
return cls(value, end, TokenType.POINTER)
|
|
38
39
|
|
|
39
40
|
@classmethod
|
|
40
|
-
def lbracket(cls) -> "Token":
|
|
41
|
-
return cls("{", TokenType.LBRACKET)
|
|
41
|
+
def lbracket(cls, end: int) -> "Token":
|
|
42
|
+
return cls("{", end, TokenType.LBRACKET)
|
|
42
43
|
|
|
43
44
|
@classmethod
|
|
44
|
-
def rbracket(cls) -> "Token":
|
|
45
|
-
return cls("}", TokenType.RBRACKET)
|
|
45
|
+
def rbracket(cls, end: int) -> "Token":
|
|
46
|
+
return cls("}", end, TokenType.RBRACKET)
|
|
46
47
|
|
|
47
48
|
@classmethod
|
|
48
|
-
def dot(cls) -> "Token":
|
|
49
|
-
return cls(".", TokenType.DOT)
|
|
49
|
+
def dot(cls, end: int) -> "Token":
|
|
50
|
+
return cls(".", end, TokenType.DOT)
|
|
50
51
|
|
|
51
52
|
# Helpers for simpler type comparison
|
|
52
53
|
|
|
@@ -103,15 +104,15 @@ def tokenize(expression: str) -> TokenGenerator:
|
|
|
103
104
|
if current_symbol() == "$":
|
|
104
105
|
start = cursor
|
|
105
106
|
move_until(lambda: is_eol() or current_symbol() in stop_symbols)
|
|
106
|
-
yield Token.variable(expression[start:cursor])
|
|
107
|
+
yield Token.variable(expression[start:cursor], cursor - 1)
|
|
107
108
|
elif current_symbol() == ".":
|
|
108
|
-
yield Token.dot()
|
|
109
|
+
yield Token.dot(cursor)
|
|
109
110
|
move()
|
|
110
111
|
elif current_symbol() == "{":
|
|
111
|
-
yield Token.lbracket()
|
|
112
|
+
yield Token.lbracket(cursor)
|
|
112
113
|
move()
|
|
113
114
|
elif current_symbol() == "}":
|
|
114
|
-
yield Token.rbracket()
|
|
115
|
+
yield Token.rbracket(cursor)
|
|
115
116
|
move()
|
|
116
117
|
elif current_symbol() == "#":
|
|
117
118
|
start = cursor
|
|
@@ -126,8 +127,8 @@ def tokenize(expression: str) -> TokenGenerator:
|
|
|
126
127
|
# `ID_{$response.body#/foo}_{$response.body#/bar}`
|
|
127
128
|
# Which is much easier if we treat `}` as a closing bracket of an embedded runtime expression
|
|
128
129
|
move_until(lambda: is_eol() or current_symbol() == "}")
|
|
129
|
-
yield Token.pointer(expression[start:cursor])
|
|
130
|
+
yield Token.pointer(expression[start:cursor], cursor - 1)
|
|
130
131
|
else:
|
|
131
132
|
start = cursor
|
|
132
133
|
move_until(lambda: is_eol() or current_symbol() in stop_symbols)
|
|
133
|
-
yield Token.string(expression[start:cursor])
|
|
134
|
+
yield Token.string(expression[start:cursor], cursor - 1)
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"""Expression nodes description and evaluation logic."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
+
|
|
4
5
|
from dataclasses import dataclass
|
|
5
6
|
from enum import Enum, unique
|
|
6
7
|
from typing import Any
|
|
@@ -9,6 +10,7 @@ from requests.structures import CaseInsensitiveDict
|
|
|
9
10
|
|
|
10
11
|
from .. import references
|
|
11
12
|
from .context import ExpressionContext
|
|
13
|
+
from .extractors import Extractor
|
|
12
14
|
|
|
13
15
|
|
|
14
16
|
@dataclass
|
|
@@ -74,6 +76,7 @@ class NonBodyRequest(Node):
|
|
|
74
76
|
|
|
75
77
|
location: str
|
|
76
78
|
parameter: str
|
|
79
|
+
extractor: Extractor | None = None
|
|
77
80
|
|
|
78
81
|
def evaluate(self, context: ExpressionContext) -> str:
|
|
79
82
|
container: dict | CaseInsensitiveDict = {
|
|
@@ -83,7 +86,12 @@ class NonBodyRequest(Node):
|
|
|
83
86
|
}[self.location] or {}
|
|
84
87
|
if self.location == "header":
|
|
85
88
|
container = CaseInsensitiveDict(container)
|
|
86
|
-
|
|
89
|
+
value = container.get(self.parameter)
|
|
90
|
+
if value is None:
|
|
91
|
+
return ""
|
|
92
|
+
if self.extractor is not None:
|
|
93
|
+
return self.extractor.extract(value) or ""
|
|
94
|
+
return value
|
|
87
95
|
|
|
88
96
|
|
|
89
97
|
@dataclass
|
|
@@ -96,7 +104,10 @@ class BodyRequest(Node):
|
|
|
96
104
|
document = context.case.body
|
|
97
105
|
if self.pointer is None:
|
|
98
106
|
return document
|
|
99
|
-
|
|
107
|
+
resolved = references.resolve_pointer(document, self.pointer[1:])
|
|
108
|
+
if resolved is references.UNRESOLVABLE:
|
|
109
|
+
return None
|
|
110
|
+
return resolved
|
|
100
111
|
|
|
101
112
|
|
|
102
113
|
@dataclass
|
|
@@ -104,9 +115,15 @@ class HeaderResponse(Node):
|
|
|
104
115
|
"""A node for `$response.header` expressions."""
|
|
105
116
|
|
|
106
117
|
parameter: str
|
|
118
|
+
extractor: Extractor | None = None
|
|
107
119
|
|
|
108
120
|
def evaluate(self, context: ExpressionContext) -> str:
|
|
109
|
-
|
|
121
|
+
value = context.response.headers.get(self.parameter)
|
|
122
|
+
if value is None:
|
|
123
|
+
return ""
|
|
124
|
+
if self.extractor is not None:
|
|
125
|
+
return self.extractor.extract(value) or ""
|
|
126
|
+
return value
|
|
110
127
|
|
|
111
128
|
|
|
112
129
|
@dataclass
|
|
@@ -125,4 +142,7 @@ class BodyResponse(Node):
|
|
|
125
142
|
if self.pointer is None:
|
|
126
143
|
# We need the parsed document - data will be serialized before sending to the application
|
|
127
144
|
return document
|
|
128
|
-
|
|
145
|
+
resolved = references.resolve_pointer(document, self.pointer[1:])
|
|
146
|
+
if resolved is references.UNRESOLVABLE:
|
|
147
|
+
return None
|
|
148
|
+
return resolved
|
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import re
|
|
2
4
|
from functools import lru_cache
|
|
3
5
|
from typing import Generator
|
|
4
6
|
|
|
5
|
-
from . import lexer, nodes
|
|
7
|
+
from . import extractors, lexer, nodes
|
|
6
8
|
from .errors import RuntimeExpressionError, UnknownToken
|
|
7
9
|
|
|
8
10
|
|
|
@@ -53,7 +55,8 @@ def _parse_request(tokens: lexer.TokenGenerator, expr: str) -> nodes.BodyRequest
|
|
|
53
55
|
if location.value in ("query", "path", "header"):
|
|
54
56
|
skip_dot(tokens, f"$request.{location.value}")
|
|
55
57
|
parameter = take_string(tokens, expr)
|
|
56
|
-
|
|
58
|
+
extractor = take_extractor(tokens, expr, parameter.end)
|
|
59
|
+
return nodes.NonBodyRequest(location.value, parameter.value, extractor)
|
|
57
60
|
if location.value == "body":
|
|
58
61
|
try:
|
|
59
62
|
token = next(tokens)
|
|
@@ -70,7 +73,8 @@ def _parse_response(tokens: lexer.TokenGenerator, expr: str) -> nodes.HeaderResp
|
|
|
70
73
|
if location.value == "header":
|
|
71
74
|
skip_dot(tokens, f"$response.{location.value}")
|
|
72
75
|
parameter = take_string(tokens, expr)
|
|
73
|
-
|
|
76
|
+
extractor = take_extractor(tokens, expr, parameter.end)
|
|
77
|
+
return nodes.HeaderResponse(parameter.value, extractor=extractor)
|
|
74
78
|
if location.value == "body":
|
|
75
79
|
try:
|
|
76
80
|
token = next(tokens)
|
|
@@ -87,8 +91,25 @@ def skip_dot(tokens: lexer.TokenGenerator, name: str) -> None:
|
|
|
87
91
|
raise RuntimeExpressionError(f"`{name}` expression should be followed by a dot (`.`). Got: {token.value}")
|
|
88
92
|
|
|
89
93
|
|
|
90
|
-
def take_string(tokens: lexer.TokenGenerator, expr: str) ->
|
|
94
|
+
def take_string(tokens: lexer.TokenGenerator, expr: str) -> lexer.Token:
|
|
91
95
|
parameter = next(tokens)
|
|
92
96
|
if not parameter.is_string:
|
|
93
97
|
raise RuntimeExpressionError(f"Invalid expression: {expr}")
|
|
94
|
-
return parameter
|
|
98
|
+
return parameter
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def take_extractor(tokens: lexer.TokenGenerator, expr: str, current_end: int) -> extractors.Extractor | None:
|
|
102
|
+
rest = expr[current_end + 1 :]
|
|
103
|
+
if not rest or rest.startswith("}"):
|
|
104
|
+
return None
|
|
105
|
+
extractor = next(tokens)
|
|
106
|
+
if not extractor.value.startswith("#regex:"):
|
|
107
|
+
raise RuntimeExpressionError(f"Invalid extractor: {expr}")
|
|
108
|
+
pattern = extractor.value[len("#regex:") :]
|
|
109
|
+
try:
|
|
110
|
+
compiled = re.compile(pattern)
|
|
111
|
+
except re.error as exc:
|
|
112
|
+
raise RuntimeExpressionError(f"Invalid regex extractor: {exc}") from None
|
|
113
|
+
if compiled.groups != 1:
|
|
114
|
+
raise RuntimeExpressionError("Regex extractor should have exactly one capturing group")
|
|
115
|
+
return extractors.RegexExtractor(compiled)
|