schemathesis 3.31.1__py3-none-any.whl → 3.32.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- schemathesis/checks.py +4 -0
- schemathesis/cli/__init__.py +11 -4
- schemathesis/cli/cassettes.py +1 -8
- schemathesis/cli/constants.py +6 -2
- schemathesis/cli/output/default.py +18 -7
- schemathesis/exceptions.py +7 -1
- schemathesis/filters.py +104 -5
- schemathesis/internal/copy.py +3 -0
- schemathesis/internal/extensions.py +27 -0
- schemathesis/lazy.py +10 -3
- schemathesis/runner/__init__.py +9 -8
- schemathesis/runner/events.py +22 -1
- schemathesis/runner/impl/core.py +71 -60
- schemathesis/runner/impl/solo.py +1 -9
- schemathesis/runner/impl/threadpool.py +2 -8
- schemathesis/runner/probes.py +10 -9
- schemathesis/runner/serialization.py +73 -1
- schemathesis/schemas.py +17 -30
- schemathesis/service/serialization.py +3 -124
- schemathesis/specs/openapi/_hypothesis.py +2 -0
- schemathesis/specs/openapi/loaders.py +12 -10
- schemathesis/specs/openapi/schemas.py +31 -25
- schemathesis/specs/openapi/stateful/__init__.py +16 -3
- schemathesis/stateful/config.py +20 -2
- schemathesis/stateful/context.py +15 -1
- schemathesis/stateful/events.py +50 -4
- schemathesis/stateful/runner.py +50 -11
- schemathesis/stateful/sink.py +1 -1
- schemathesis/stateful/state_machine.py +5 -3
- schemathesis/stateful/validation.py +38 -18
- schemathesis/targets.py +32 -1
- schemathesis/transports/__init__.py +13 -1
- schemathesis/transports/auth.py +22 -1
- {schemathesis-3.31.1.dist-info → schemathesis-3.32.1.dist-info}/METADATA +2 -1
- {schemathesis-3.31.1.dist-info → schemathesis-3.32.1.dist-info}/RECORD +38 -38
- schemathesis/specs/openapi/filters.py +0 -50
- {schemathesis-3.31.1.dist-info → schemathesis-3.32.1.dist-info}/WHEEL +0 -0
- {schemathesis-3.31.1.dist-info → schemathesis-3.32.1.dist-info}/entry_points.txt +0 -0
- {schemathesis-3.31.1.dist-info → schemathesis-3.32.1.dist-info}/licenses/LICENSE +0 -0
schemathesis/checks.py
CHANGED
|
@@ -39,6 +39,10 @@ def not_a_server_error(response: GenericResponse, case: Case) -> bool | None:
|
|
|
39
39
|
return None
|
|
40
40
|
|
|
41
41
|
|
|
42
|
+
def _make_max_response_time_failure_message(elapsed_time: float, max_response_time: int) -> str:
|
|
43
|
+
return f"Actual: {elapsed_time:.2f}ms\nLimit: {max_response_time}.00ms"
|
|
44
|
+
|
|
45
|
+
|
|
42
46
|
DEFAULT_CHECKS: tuple[CheckFunction, ...] = (not_a_server_error,)
|
|
43
47
|
OPTIONAL_CHECKS = (
|
|
44
48
|
status_code_conformance,
|
schemathesis/cli/__init__.py
CHANGED
|
@@ -47,6 +47,7 @@ from ..specs.graphql import loaders as gql_loaders
|
|
|
47
47
|
from ..specs.openapi import loaders as oas_loaders
|
|
48
48
|
from ..stateful import Stateful
|
|
49
49
|
from ..targets import Target
|
|
50
|
+
from ..transports import RequestConfig
|
|
50
51
|
from ..transports.auth import get_requests_auth
|
|
51
52
|
from ..types import Filter, PathLike, RequestCert
|
|
52
53
|
from . import callbacks, cassettes, output
|
|
@@ -850,7 +851,7 @@ def run(
|
|
|
850
851
|
_hypothesis_suppress_health_check: list[hypothesis.HealthCheck] | None = None
|
|
851
852
|
if hypothesis_suppress_health_check is not None:
|
|
852
853
|
_hypothesis_suppress_health_check = [
|
|
853
|
-
|
|
854
|
+
entry for health_check in hypothesis_suppress_health_check for entry in health_check.as_hypothesis()
|
|
854
855
|
]
|
|
855
856
|
|
|
856
857
|
if contrib_unique_data:
|
|
@@ -1073,6 +1074,7 @@ class LoaderConfig:
|
|
|
1073
1074
|
wait_for_schema: float | None
|
|
1074
1075
|
rate_limit: str | None
|
|
1075
1076
|
output_config: OutputConfig
|
|
1077
|
+
generation_config: generation.GenerationConfig
|
|
1076
1078
|
# Network request parameters
|
|
1077
1079
|
auth: tuple[str, str] | None
|
|
1078
1080
|
auth_type: str | None
|
|
@@ -1151,6 +1153,7 @@ def into_event_stream(
|
|
|
1151
1153
|
tag=tag or None,
|
|
1152
1154
|
operation_id=operation_id or None,
|
|
1153
1155
|
output_config=output_config,
|
|
1156
|
+
generation_config=generation_config,
|
|
1154
1157
|
)
|
|
1155
1158
|
schema = load_schema(config)
|
|
1156
1159
|
yield from runner.from_schema(
|
|
@@ -1179,9 +1182,12 @@ def into_event_stream(
|
|
|
1179
1182
|
generation_config=generation_config,
|
|
1180
1183
|
probe_config=probes.ProbeConfig(
|
|
1181
1184
|
base_url=config.base_url,
|
|
1182
|
-
|
|
1183
|
-
|
|
1184
|
-
|
|
1185
|
+
request=RequestConfig(
|
|
1186
|
+
timeout=request_timeout,
|
|
1187
|
+
tls_verify=config.request_tls_verify,
|
|
1188
|
+
proxy=config.request_proxy,
|
|
1189
|
+
cert=config.request_cert,
|
|
1190
|
+
),
|
|
1185
1191
|
auth=config.auth,
|
|
1186
1192
|
auth_type=config.auth_type,
|
|
1187
1193
|
headers=config.headers,
|
|
@@ -1297,6 +1303,7 @@ def get_loader_kwargs(loader: Callable, config: LoaderConfig) -> dict[str, Any]:
|
|
|
1297
1303
|
"data_generation_methods": config.data_generation_methods,
|
|
1298
1304
|
"rate_limit": config.rate_limit,
|
|
1299
1305
|
"output_config": config.output_config,
|
|
1306
|
+
"generation_config": config.generation_config,
|
|
1300
1307
|
}
|
|
1301
1308
|
if loader not in (oas_loaders.from_path, oas_loaders.from_dict):
|
|
1302
1309
|
kwargs["headers"] = config.headers
|
schemathesis/cli/cassettes.py
CHANGED
|
@@ -23,7 +23,6 @@ if TYPE_CHECKING:
|
|
|
23
23
|
import click
|
|
24
24
|
import requests
|
|
25
25
|
|
|
26
|
-
from ..generation import DataGenerationMethod
|
|
27
26
|
from ..models import Request, Response
|
|
28
27
|
from ..runner.serialization import SerializedCheck, SerializedInteraction
|
|
29
28
|
from .context import ExecutionContext
|
|
@@ -90,10 +89,6 @@ class CassetteWriter(EventHandler):
|
|
|
90
89
|
seed=seed,
|
|
91
90
|
correlation_id=event.correlation_id,
|
|
92
91
|
thread_id=event.thread_id,
|
|
93
|
-
# NOTE: For backward compatibility reasons AfterExecution stores a list of data generation methods
|
|
94
|
-
# The list always contains one element - the method that was actually used for generation
|
|
95
|
-
# This will change in the future
|
|
96
|
-
data_generation_method=event.data_generation_method[0],
|
|
97
92
|
interactions=event.result.interactions,
|
|
98
93
|
)
|
|
99
94
|
)
|
|
@@ -105,7 +100,6 @@ class CassetteWriter(EventHandler):
|
|
|
105
100
|
# Correlation ID is not used in stateful testing
|
|
106
101
|
correlation_id="",
|
|
107
102
|
thread_id=event.thread_id,
|
|
108
|
-
data_generation_method=event.data_generation_method[0],
|
|
109
103
|
interactions=event.result.interactions,
|
|
110
104
|
)
|
|
111
105
|
)
|
|
@@ -132,7 +126,6 @@ class Process:
|
|
|
132
126
|
seed: int
|
|
133
127
|
correlation_id: str
|
|
134
128
|
thread_id: int
|
|
135
|
-
data_generation_method: DataGenerationMethod
|
|
136
129
|
interactions: list[SerializedInteraction]
|
|
137
130
|
|
|
138
131
|
|
|
@@ -239,7 +232,7 @@ http_interactions:"""
|
|
|
239
232
|
seed: '{item.seed}'
|
|
240
233
|
thread_id: {item.thread_id}
|
|
241
234
|
correlation_id: '{item.correlation_id}'
|
|
242
|
-
data_generation_method: '{
|
|
235
|
+
data_generation_method: '{interaction.data_generation_method.value}'
|
|
243
236
|
elapsed: '{interaction.response.elapsed}'
|
|
244
237
|
recorded_at: '{interaction.recorded_at}'
|
|
245
238
|
checks:
|
schemathesis/cli/constants.py
CHANGED
|
@@ -42,11 +42,15 @@ class HealthCheck(IntEnum):
|
|
|
42
42
|
filter_too_much = 2
|
|
43
43
|
too_slow = 3
|
|
44
44
|
large_base_example = 7
|
|
45
|
+
all = 8
|
|
45
46
|
|
|
46
|
-
def as_hypothesis(self) -> hypothesis.HealthCheck:
|
|
47
|
+
def as_hypothesis(self) -> list[hypothesis.HealthCheck]:
|
|
47
48
|
from hypothesis import HealthCheck
|
|
48
49
|
|
|
49
|
-
|
|
50
|
+
if self.name == "all":
|
|
51
|
+
return list(HealthCheck)
|
|
52
|
+
|
|
53
|
+
return [HealthCheck[self.name]]
|
|
50
54
|
|
|
51
55
|
|
|
52
56
|
@unique
|
|
@@ -143,7 +143,9 @@ def display_hypothesis_output(hypothesis_output: list[str]) -> None:
|
|
|
143
143
|
|
|
144
144
|
def display_errors(context: ExecutionContext, event: events.Finished) -> None:
|
|
145
145
|
"""Display all errors in the test run."""
|
|
146
|
-
|
|
146
|
+
probes = context.probes or []
|
|
147
|
+
has_probe_errors = any(probe.outcome == ProbeOutcome.ERROR for probe in probes)
|
|
148
|
+
if not event.has_errors and not has_probe_errors:
|
|
147
149
|
return
|
|
148
150
|
|
|
149
151
|
display_section_name("ERRORS")
|
|
@@ -160,6 +162,12 @@ def display_errors(context: ExecutionContext, event: events.Finished) -> None:
|
|
|
160
162
|
should_display_full_traceback_message |= display_single_error(context, result)
|
|
161
163
|
if event.generic_errors:
|
|
162
164
|
display_generic_errors(context, event.generic_errors)
|
|
165
|
+
if has_probe_errors:
|
|
166
|
+
display_section_name("API Probe errors", "_", fg="red")
|
|
167
|
+
for probe in probes:
|
|
168
|
+
if probe.error is not None:
|
|
169
|
+
error = SerializedError.from_exception(probe.error)
|
|
170
|
+
_display_error(context, error)
|
|
163
171
|
if should_display_full_traceback_message and not context.show_trace:
|
|
164
172
|
click.secho(
|
|
165
173
|
"\nAdd this option to your command line parameters to see full tracebacks: --show-trace",
|
|
@@ -848,6 +856,10 @@ def handle_finished(context: ExecutionContext, event: events.Finished) -> None:
|
|
|
848
856
|
|
|
849
857
|
def handle_interrupted(context: ExecutionContext, event: events.Interrupted) -> None:
|
|
850
858
|
click.echo()
|
|
859
|
+
_handle_interrupted(context)
|
|
860
|
+
|
|
861
|
+
|
|
862
|
+
def _handle_interrupted(context: ExecutionContext) -> None:
|
|
851
863
|
context.is_interrupted = True
|
|
852
864
|
display_section_name("KeyboardInterrupt", "!", bold=False)
|
|
853
865
|
|
|
@@ -863,12 +875,11 @@ def handle_stateful_event(context: ExecutionContext, event: events.StatefulEvent
|
|
|
863
875
|
if not experimental.STATEFUL_ONLY.is_enabled:
|
|
864
876
|
click.echo()
|
|
865
877
|
click.secho("Stateful tests\n", bold=True)
|
|
866
|
-
elif (
|
|
867
|
-
|
|
868
|
-
|
|
869
|
-
|
|
870
|
-
|
|
871
|
-
display_execution_result(context, event.data.status.value)
|
|
878
|
+
elif isinstance(event.data, stateful_events.ScenarioFinished) and not event.data.is_final:
|
|
879
|
+
if event.data.status == stateful_events.ScenarioStatus.INTERRUPTED:
|
|
880
|
+
_handle_interrupted(context)
|
|
881
|
+
elif event.data.status != stateful_events.ScenarioStatus.REJECTED:
|
|
882
|
+
display_execution_result(context, event.data.status.value)
|
|
872
883
|
elif isinstance(event.data, stateful_events.RunFinished):
|
|
873
884
|
click.echo()
|
|
874
885
|
# It is initialized in `RunStarted`
|
schemathesis/exceptions.py
CHANGED
|
@@ -520,6 +520,12 @@ def remove_ssl_line_number(text: str) -> str:
|
|
|
520
520
|
return re.sub(r"\(_ssl\.c:\d+\)", "", text)
|
|
521
521
|
|
|
522
522
|
|
|
523
|
+
def _clean_inner_request_message(message: Any) -> str:
|
|
524
|
+
if isinstance(message, str) and message.startswith("HTTPConnectionPool"):
|
|
525
|
+
return re.sub(r"HTTPConnectionPool\(.+?\): ", "", message).rstrip(".")
|
|
526
|
+
return str(message)
|
|
527
|
+
|
|
528
|
+
|
|
523
529
|
def extract_requests_exception_details(exc: RequestException) -> tuple[str, list[str]]:
|
|
524
530
|
from requests.exceptions import ChunkedEncodingError, ConnectionError, SSLError
|
|
525
531
|
from urllib3.exceptions import MaxRetryError
|
|
@@ -542,7 +548,7 @@ def extract_requests_exception_details(exc: RequestException) -> tuple[str, list
|
|
|
542
548
|
reason = f"Max retries exceeded with url: {inner.url}"
|
|
543
549
|
extra = [reason.strip()]
|
|
544
550
|
else:
|
|
545
|
-
extra = [" ".join(map(
|
|
551
|
+
extra = [" ".join(map(_clean_inner_request_message, inner.args))]
|
|
546
552
|
elif isinstance(exc, ChunkedEncodingError):
|
|
547
553
|
message = "Connection broken. The server declared chunked encoding but sent an invalid chunk"
|
|
548
554
|
extra = [str(exc.args[0].args[1])]
|
schemathesis/filters.py
CHANGED
|
@@ -8,6 +8,8 @@ from functools import partial
|
|
|
8
8
|
from types import SimpleNamespace
|
|
9
9
|
from typing import TYPE_CHECKING, Callable, List, Protocol, Union
|
|
10
10
|
|
|
11
|
+
from .types import NotSet, Filter as FilterType
|
|
12
|
+
|
|
11
13
|
from .exceptions import UsageError
|
|
12
14
|
|
|
13
15
|
if TYPE_CHECKING:
|
|
@@ -58,7 +60,12 @@ class Matcher:
|
|
|
58
60
|
def for_regex(cls, attribute: str, regex: RegexValue) -> Matcher:
|
|
59
61
|
"""Matcher that checks whether the specified attribute has the provided regex."""
|
|
60
62
|
if isinstance(regex, str):
|
|
61
|
-
|
|
63
|
+
flags: re.RegexFlag | int
|
|
64
|
+
if attribute == "method":
|
|
65
|
+
flags = re.IGNORECASE
|
|
66
|
+
else:
|
|
67
|
+
flags = 0
|
|
68
|
+
regex = re.compile(regex, flags=flags)
|
|
62
69
|
func = partial(by_regex, attribute=attribute, regex=regex)
|
|
63
70
|
label = f"{attribute}_regex={repr(regex)}"
|
|
64
71
|
return cls(func, label=label, _hash=hash(label))
|
|
@@ -71,6 +78,8 @@ class Matcher:
|
|
|
71
78
|
def get_operation_attribute(operation: APIOperation, attribute: str) -> str | list[str] | None:
|
|
72
79
|
if attribute == "tag":
|
|
73
80
|
return operation.tags
|
|
81
|
+
if attribute == "operation_id":
|
|
82
|
+
return operation.definition.raw.get("operationId")
|
|
74
83
|
# Just uppercase `method`
|
|
75
84
|
value = getattr(operation, attribute)
|
|
76
85
|
if attribute == "method":
|
|
@@ -101,8 +110,8 @@ def by_regex(ctx: HasAPIOperation, attribute: str, regex: re.Pattern) -> bool:
|
|
|
101
110
|
if value is None:
|
|
102
111
|
return False
|
|
103
112
|
if isinstance(value, list):
|
|
104
|
-
return any(bool(regex.
|
|
105
|
-
return bool(regex.
|
|
113
|
+
return any(bool(regex.search(entry)) for entry in value)
|
|
114
|
+
return bool(regex.search(value))
|
|
106
115
|
|
|
107
116
|
|
|
108
117
|
@dataclass(repr=False, frozen=True)
|
|
@@ -111,6 +120,8 @@ class Filter:
|
|
|
111
120
|
|
|
112
121
|
matchers: tuple[Matcher, ...]
|
|
113
122
|
|
|
123
|
+
__slots__ = ("matchers",)
|
|
124
|
+
|
|
114
125
|
def __repr__(self) -> str:
|
|
115
126
|
inner = " && ".join(matcher.label for matcher in self.matchers)
|
|
116
127
|
return f"<{self.__class__.__name__}: [{inner}]>"
|
|
@@ -127,8 +138,14 @@ class Filter:
|
|
|
127
138
|
class FilterSet:
|
|
128
139
|
"""Combines multiple filters to apply inclusion and exclusion rules on API operations."""
|
|
129
140
|
|
|
130
|
-
_includes: set[Filter]
|
|
131
|
-
_excludes: set[Filter]
|
|
141
|
+
_includes: set[Filter]
|
|
142
|
+
_excludes: set[Filter]
|
|
143
|
+
|
|
144
|
+
__slots__ = ("_includes", "_excludes")
|
|
145
|
+
|
|
146
|
+
def __init__(self) -> None:
|
|
147
|
+
self._includes = set()
|
|
148
|
+
self._excludes = set()
|
|
132
149
|
|
|
133
150
|
def apply_to(self, operations: list[APIOperation]) -> list[APIOperation]:
|
|
134
151
|
"""Get a filtered list of the given operations that match the filters."""
|
|
@@ -168,6 +185,8 @@ class FilterSet:
|
|
|
168
185
|
path_regex: RegexValue | None = None,
|
|
169
186
|
tag: FilterValue | None = None,
|
|
170
187
|
tag_regex: RegexValue | None = None,
|
|
188
|
+
operation_id: FilterValue | None = None,
|
|
189
|
+
operation_id_regex: RegexValue | None = None,
|
|
171
190
|
) -> None:
|
|
172
191
|
"""Add a new INCLUDE filter."""
|
|
173
192
|
self._add_filter(
|
|
@@ -181,6 +200,8 @@ class FilterSet:
|
|
|
181
200
|
path_regex=path_regex,
|
|
182
201
|
tag=tag,
|
|
183
202
|
tag_regex=tag_regex,
|
|
203
|
+
operation_id=operation_id,
|
|
204
|
+
operation_id_regex=operation_id_regex,
|
|
184
205
|
)
|
|
185
206
|
|
|
186
207
|
def exclude(
|
|
@@ -195,6 +216,8 @@ class FilterSet:
|
|
|
195
216
|
path_regex: RegexValue | None = None,
|
|
196
217
|
tag: FilterValue | None = None,
|
|
197
218
|
tag_regex: RegexValue | None = None,
|
|
219
|
+
operation_id: FilterValue | None = None,
|
|
220
|
+
operation_id_regex: RegexValue | None = None,
|
|
198
221
|
) -> None:
|
|
199
222
|
"""Add a new EXCLUDE filter."""
|
|
200
223
|
self._add_filter(
|
|
@@ -208,6 +231,8 @@ class FilterSet:
|
|
|
208
231
|
path_regex=path_regex,
|
|
209
232
|
tag=tag,
|
|
210
233
|
tag_regex=tag_regex,
|
|
234
|
+
operation_id=operation_id,
|
|
235
|
+
operation_id_regex=operation_id_regex,
|
|
211
236
|
)
|
|
212
237
|
|
|
213
238
|
def _add_filter(
|
|
@@ -223,6 +248,8 @@ class FilterSet:
|
|
|
223
248
|
path_regex: RegexValue | None = None,
|
|
224
249
|
tag: FilterValue | None = None,
|
|
225
250
|
tag_regex: RegexValue | None = None,
|
|
251
|
+
operation_id: FilterValue | None = None,
|
|
252
|
+
operation_id_regex: RegexValue | None = None,
|
|
226
253
|
) -> None:
|
|
227
254
|
matchers = []
|
|
228
255
|
if func is not None:
|
|
@@ -232,6 +259,7 @@ class FilterSet:
|
|
|
232
259
|
("method", method, method_regex),
|
|
233
260
|
("path", path, path_regex),
|
|
234
261
|
("tag", tag, tag_regex),
|
|
262
|
+
("operation_id", operation_id, operation_id_regex),
|
|
235
263
|
):
|
|
236
264
|
if expected is not None and regex is not None:
|
|
237
265
|
# To match anything the regex should match the expected value, hence passing them together is useless
|
|
@@ -295,3 +323,74 @@ def attach_filter_chain(
|
|
|
295
323
|
proxy.__name__ = attribute
|
|
296
324
|
|
|
297
325
|
setattr(target, attribute, proxy)
|
|
326
|
+
|
|
327
|
+
|
|
328
|
+
def filter_set_from_components(
|
|
329
|
+
*,
|
|
330
|
+
include: bool,
|
|
331
|
+
method: FilterType | None = None,
|
|
332
|
+
endpoint: FilterType | None = None,
|
|
333
|
+
tag: FilterType | None = None,
|
|
334
|
+
operation_id: FilterType | None = None,
|
|
335
|
+
skip_deprecated_operations: bool | None | NotSet = None,
|
|
336
|
+
parent: FilterSet | None = None,
|
|
337
|
+
) -> FilterSet:
|
|
338
|
+
def _is_defined(x: FilterType | None) -> bool:
|
|
339
|
+
return x is not None and not isinstance(x, NotSet)
|
|
340
|
+
|
|
341
|
+
def _is_deprecated(ctx: HasAPIOperation) -> bool:
|
|
342
|
+
return ctx.operation.definition.raw.get("deprecated") is True
|
|
343
|
+
|
|
344
|
+
def _prepare_filter(filter_: FilterType | None) -> RegexValue | None:
|
|
345
|
+
if filter_ is None or isinstance(filter_, NotSet):
|
|
346
|
+
return None
|
|
347
|
+
if isinstance(filter_, str):
|
|
348
|
+
return filter_
|
|
349
|
+
return "|".join(f"({f})" for f in filter_)
|
|
350
|
+
|
|
351
|
+
new = FilterSet()
|
|
352
|
+
|
|
353
|
+
if _is_defined(method) or _is_defined(endpoint) or _is_defined(tag) or _is_defined(operation_id):
|
|
354
|
+
new._add_filter(
|
|
355
|
+
include,
|
|
356
|
+
method_regex=_prepare_filter(method),
|
|
357
|
+
path_regex=_prepare_filter(endpoint),
|
|
358
|
+
tag_regex=_prepare_filter(tag),
|
|
359
|
+
operation_id_regex=_prepare_filter(operation_id),
|
|
360
|
+
)
|
|
361
|
+
if skip_deprecated_operations is True and not any(
|
|
362
|
+
matcher.label == _is_deprecated.__name__ for exclude_ in new._excludes for matcher in exclude_.matchers
|
|
363
|
+
):
|
|
364
|
+
new.exclude(func=_is_deprecated)
|
|
365
|
+
# Merge with the parent filter set
|
|
366
|
+
if parent is not None:
|
|
367
|
+
for include_ in parent._includes:
|
|
368
|
+
matchers = include_.matchers
|
|
369
|
+
ids = []
|
|
370
|
+
for idx, matcher in enumerate(matchers):
|
|
371
|
+
label = matcher.label
|
|
372
|
+
if (
|
|
373
|
+
(not isinstance(method, NotSet) and label.startswith("method_regex="))
|
|
374
|
+
or (not isinstance(endpoint, NotSet) and label.startswith("path_regex="))
|
|
375
|
+
or (not isinstance(tag, NotSet) and matcher.label.startswith("tag_regex="))
|
|
376
|
+
or (not isinstance(operation_id, NotSet) and matcher.label.startswith("operation_id_regex="))
|
|
377
|
+
):
|
|
378
|
+
ids.append(idx)
|
|
379
|
+
if ids:
|
|
380
|
+
matchers = tuple(matcher for idx, matcher in enumerate(matchers) if idx not in ids)
|
|
381
|
+
if matchers:
|
|
382
|
+
if new._includes:
|
|
383
|
+
existing = new._includes.pop()
|
|
384
|
+
matchers = existing.matchers + matchers
|
|
385
|
+
new._includes.add(Filter(matchers=matchers))
|
|
386
|
+
for exclude_ in parent._excludes:
|
|
387
|
+
matchers = exclude_.matchers
|
|
388
|
+
ids = []
|
|
389
|
+
for idx, matcher in enumerate(exclude_.matchers):
|
|
390
|
+
if skip_deprecated_operations is False and matcher.label == _is_deprecated.__name__:
|
|
391
|
+
ids.append(idx)
|
|
392
|
+
if ids:
|
|
393
|
+
matchers = tuple(matcher for idx, matcher in enumerate(matchers) if idx not in ids)
|
|
394
|
+
if matchers:
|
|
395
|
+
new._excludes.add(exclude_)
|
|
396
|
+
return new
|
schemathesis/internal/copy.py
CHANGED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from typing import Any, Callable
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class ExtensionLoadingError(ImportError):
|
|
6
|
+
"""Raised when an extension cannot be loaded."""
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def import_extension(path: str) -> Any:
|
|
10
|
+
try:
|
|
11
|
+
module, item = path.rsplit(".", 1)
|
|
12
|
+
imported = __import__(module, fromlist=[item])
|
|
13
|
+
return getattr(imported, item)
|
|
14
|
+
except ValueError as exc:
|
|
15
|
+
raise ExtensionLoadingError(f"Invalid path: {path}") from exc
|
|
16
|
+
except (ImportError, AttributeError) as exc:
|
|
17
|
+
raise ExtensionLoadingError(f"Could not import {path}") from exc
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def extensible(env_var: str) -> Callable[[Any], Any]:
|
|
21
|
+
def decorator(item: Any) -> Any:
|
|
22
|
+
path = os.getenv(env_var)
|
|
23
|
+
if path is not None:
|
|
24
|
+
return import_extension(path)
|
|
25
|
+
return item
|
|
26
|
+
|
|
27
|
+
return decorator
|
schemathesis/lazy.py
CHANGED
|
@@ -19,6 +19,7 @@ from .auths import AuthStorage
|
|
|
19
19
|
from .code_samples import CodeSampleStyle
|
|
20
20
|
from .constants import FLAKY_FAILURE_MESSAGE, NOT_SET
|
|
21
21
|
from .exceptions import CheckFailed, OperationSchemaError, SkipTest, get_grouped_exception
|
|
22
|
+
from .filters import filter_set_from_components
|
|
22
23
|
from .generation import DataGenerationMethodInput, GenerationConfig
|
|
23
24
|
from .hooks import HookDispatcher, HookScope
|
|
24
25
|
from .internal.output import OutputConfig
|
|
@@ -341,18 +342,24 @@ def get_schema(
|
|
|
341
342
|
schema = request.getfixturevalue(name)
|
|
342
343
|
if not isinstance(schema, BaseSchema):
|
|
343
344
|
raise ValueError(f"The given schema must be an instance of BaseSchema, got: {type(schema)}")
|
|
344
|
-
|
|
345
|
-
|
|
345
|
+
|
|
346
|
+
filter_set = filter_set_from_components(
|
|
347
|
+
include=True,
|
|
346
348
|
method=method,
|
|
347
349
|
endpoint=endpoint,
|
|
348
350
|
tag=tag,
|
|
349
351
|
operation_id=operation_id,
|
|
352
|
+
skip_deprecated_operations=skip_deprecated_operations,
|
|
353
|
+
parent=schema.filter_set,
|
|
354
|
+
)
|
|
355
|
+
return schema.clone(
|
|
356
|
+
base_url=base_url,
|
|
357
|
+
filter_set=filter_set,
|
|
350
358
|
app=app,
|
|
351
359
|
test_function=test_function,
|
|
352
360
|
hooks=schema.hooks.merge(hooks),
|
|
353
361
|
auth=auth,
|
|
354
362
|
validate_schema=validate_schema,
|
|
355
|
-
skip_deprecated_operations=skip_deprecated_operations,
|
|
356
363
|
data_generation_methods=data_generation_methods,
|
|
357
364
|
generation_config=generation_config,
|
|
358
365
|
output_config=output_config,
|
schemathesis/runner/__init__.py
CHANGED
|
@@ -19,6 +19,7 @@ from ..loaders import load_app
|
|
|
19
19
|
from ..specs.graphql import loaders as gql_loaders
|
|
20
20
|
from ..specs.openapi import loaders as oas_loaders
|
|
21
21
|
from ..targets import DEFAULT_TARGETS, Target
|
|
22
|
+
from ..transports import RequestConfig
|
|
22
23
|
from ..transports.auth import get_requests_auth
|
|
23
24
|
from ..types import Filter, NotSet, RawAuth, RequestCert
|
|
24
25
|
from .probes import ProbeConfig
|
|
@@ -373,6 +374,12 @@ def from_schema(
|
|
|
373
374
|
|
|
374
375
|
hypothesis_settings = hypothesis_settings or hypothesis.settings(deadline=DEFAULT_DEADLINE)
|
|
375
376
|
generation_config = generation_config or GenerationConfig()
|
|
377
|
+
request_config = RequestConfig(
|
|
378
|
+
timeout=request_timeout,
|
|
379
|
+
tls_verify=request_tls_verify,
|
|
380
|
+
proxy=request_proxy,
|
|
381
|
+
cert=request_cert,
|
|
382
|
+
)
|
|
376
383
|
|
|
377
384
|
# Use the same seed for all tests unless `derandomize=True` is used
|
|
378
385
|
if seed is None and not hypothesis_settings.derandomize:
|
|
@@ -394,10 +401,7 @@ def from_schema(
|
|
|
394
401
|
headers=headers,
|
|
395
402
|
seed=seed,
|
|
396
403
|
workers_num=workers_num,
|
|
397
|
-
|
|
398
|
-
request_tls_verify=request_tls_verify,
|
|
399
|
-
request_proxy=request_proxy,
|
|
400
|
-
request_cert=request_cert,
|
|
404
|
+
request_config=request_config,
|
|
401
405
|
exit_first=exit_first,
|
|
402
406
|
max_failures=max_failures,
|
|
403
407
|
started_at=started_at,
|
|
@@ -473,10 +477,7 @@ def from_schema(
|
|
|
473
477
|
override=override,
|
|
474
478
|
headers=headers,
|
|
475
479
|
seed=seed,
|
|
476
|
-
|
|
477
|
-
request_tls_verify=request_tls_verify,
|
|
478
|
-
request_proxy=request_proxy,
|
|
479
|
-
request_cert=request_cert,
|
|
480
|
+
request_config=request_config,
|
|
480
481
|
exit_first=exit_first,
|
|
481
482
|
max_failures=max_failures,
|
|
482
483
|
started_at=started_at,
|
schemathesis/runner/events.py
CHANGED
|
@@ -9,7 +9,8 @@ from typing import TYPE_CHECKING, Any
|
|
|
9
9
|
from ..exceptions import RuntimeErrorType, SchemaError, SchemaErrorType, format_exception
|
|
10
10
|
from ..generation import DataGenerationMethod
|
|
11
11
|
from ..internal.datetime import current_datetime
|
|
12
|
-
from ..internal.result import Result
|
|
12
|
+
from ..internal.result import Err, Ok, Result
|
|
13
|
+
from ..service.models import AnalysisSuccess
|
|
13
14
|
from .serialization import SerializedError, SerializedTestResult
|
|
14
15
|
|
|
15
16
|
if TYPE_CHECKING:
|
|
@@ -105,6 +106,23 @@ class BeforeAnalysis(ExecutionEvent):
|
|
|
105
106
|
class AfterAnalysis(ExecutionEvent):
|
|
106
107
|
analysis: Result[AnalysisResult, Exception] | None
|
|
107
108
|
|
|
109
|
+
def _serialize(self) -> dict[str, Any]:
|
|
110
|
+
data = {}
|
|
111
|
+
if isinstance(self.analysis, Ok):
|
|
112
|
+
result = self.analysis.ok()
|
|
113
|
+
if isinstance(result, AnalysisSuccess):
|
|
114
|
+
data["analysis_id"] = result.id
|
|
115
|
+
else:
|
|
116
|
+
data["error"] = result.message
|
|
117
|
+
elif isinstance(self.analysis, Err):
|
|
118
|
+
data["error"] = format_exception(self.analysis.err())
|
|
119
|
+
return data
|
|
120
|
+
|
|
121
|
+
def asdict(self, **kwargs: Any) -> dict[str, Any]:
|
|
122
|
+
data = self._serialize()
|
|
123
|
+
data["event_type"] = self.__class__.__name__
|
|
124
|
+
return data
|
|
125
|
+
|
|
108
126
|
|
|
109
127
|
class CurrentOperationMixin:
|
|
110
128
|
method: str
|
|
@@ -296,6 +314,9 @@ class StatefulEvent(ExecutionEvent):
|
|
|
296
314
|
|
|
297
315
|
__slots__ = ("data",)
|
|
298
316
|
|
|
317
|
+
def asdict(self, **kwargs: Any) -> dict[str, Any]:
|
|
318
|
+
return {"data": self.data.asdict(**kwargs), "event_type": self.__class__.__name__}
|
|
319
|
+
|
|
299
320
|
|
|
300
321
|
@dataclass
|
|
301
322
|
class AfterStatefulExecution(ExecutionEvent):
|