schemathesis 3.31.1__py3-none-any.whl → 3.32.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- schemathesis/checks.py +4 -0
- schemathesis/cli/__init__.py +11 -4
- schemathesis/cli/cassettes.py +1 -8
- schemathesis/cli/constants.py +6 -2
- schemathesis/cli/output/default.py +18 -7
- schemathesis/exceptions.py +7 -1
- schemathesis/internal/copy.py +3 -0
- schemathesis/internal/extensions.py +27 -0
- schemathesis/runner/__init__.py +9 -8
- schemathesis/runner/impl/core.py +71 -60
- schemathesis/runner/impl/solo.py +1 -9
- schemathesis/runner/impl/threadpool.py +2 -8
- schemathesis/runner/probes.py +10 -9
- schemathesis/runner/serialization.py +3 -0
- schemathesis/specs/openapi/_hypothesis.py +2 -0
- schemathesis/specs/openapi/schemas.py +2 -2
- schemathesis/specs/openapi/stateful/__init__.py +16 -3
- schemathesis/stateful/config.py +20 -2
- schemathesis/stateful/context.py +15 -1
- schemathesis/stateful/events.py +2 -2
- schemathesis/stateful/runner.py +50 -11
- schemathesis/stateful/sink.py +1 -1
- schemathesis/stateful/state_machine.py +5 -3
- schemathesis/stateful/validation.py +38 -18
- schemathesis/targets.py +32 -1
- schemathesis/transports/__init__.py +13 -1
- schemathesis/transports/auth.py +22 -1
- {schemathesis-3.31.1.dist-info → schemathesis-3.32.0.dist-info}/METADATA +2 -1
- {schemathesis-3.31.1.dist-info → schemathesis-3.32.0.dist-info}/RECORD +32 -31
- {schemathesis-3.31.1.dist-info → schemathesis-3.32.0.dist-info}/WHEEL +0 -0
- {schemathesis-3.31.1.dist-info → schemathesis-3.32.0.dist-info}/entry_points.txt +0 -0
- {schemathesis-3.31.1.dist-info → schemathesis-3.32.0.dist-info}/licenses/LICENSE +0 -0
schemathesis/checks.py
CHANGED
|
@@ -39,6 +39,10 @@ def not_a_server_error(response: GenericResponse, case: Case) -> bool | None:
|
|
|
39
39
|
return None
|
|
40
40
|
|
|
41
41
|
|
|
42
|
+
def _make_max_response_time_failure_message(elapsed_time: float, max_response_time: int) -> str:
|
|
43
|
+
return f"Actual: {elapsed_time:.2f}ms\nLimit: {max_response_time}.00ms"
|
|
44
|
+
|
|
45
|
+
|
|
42
46
|
DEFAULT_CHECKS: tuple[CheckFunction, ...] = (not_a_server_error,)
|
|
43
47
|
OPTIONAL_CHECKS = (
|
|
44
48
|
status_code_conformance,
|
schemathesis/cli/__init__.py
CHANGED
|
@@ -47,6 +47,7 @@ from ..specs.graphql import loaders as gql_loaders
|
|
|
47
47
|
from ..specs.openapi import loaders as oas_loaders
|
|
48
48
|
from ..stateful import Stateful
|
|
49
49
|
from ..targets import Target
|
|
50
|
+
from ..transports import RequestConfig
|
|
50
51
|
from ..transports.auth import get_requests_auth
|
|
51
52
|
from ..types import Filter, PathLike, RequestCert
|
|
52
53
|
from . import callbacks, cassettes, output
|
|
@@ -850,7 +851,7 @@ def run(
|
|
|
850
851
|
_hypothesis_suppress_health_check: list[hypothesis.HealthCheck] | None = None
|
|
851
852
|
if hypothesis_suppress_health_check is not None:
|
|
852
853
|
_hypothesis_suppress_health_check = [
|
|
853
|
-
|
|
854
|
+
entry for health_check in hypothesis_suppress_health_check for entry in health_check.as_hypothesis()
|
|
854
855
|
]
|
|
855
856
|
|
|
856
857
|
if contrib_unique_data:
|
|
@@ -1073,6 +1074,7 @@ class LoaderConfig:
|
|
|
1073
1074
|
wait_for_schema: float | None
|
|
1074
1075
|
rate_limit: str | None
|
|
1075
1076
|
output_config: OutputConfig
|
|
1077
|
+
generation_config: generation.GenerationConfig
|
|
1076
1078
|
# Network request parameters
|
|
1077
1079
|
auth: tuple[str, str] | None
|
|
1078
1080
|
auth_type: str | None
|
|
@@ -1151,6 +1153,7 @@ def into_event_stream(
|
|
|
1151
1153
|
tag=tag or None,
|
|
1152
1154
|
operation_id=operation_id or None,
|
|
1153
1155
|
output_config=output_config,
|
|
1156
|
+
generation_config=generation_config,
|
|
1154
1157
|
)
|
|
1155
1158
|
schema = load_schema(config)
|
|
1156
1159
|
yield from runner.from_schema(
|
|
@@ -1179,9 +1182,12 @@ def into_event_stream(
|
|
|
1179
1182
|
generation_config=generation_config,
|
|
1180
1183
|
probe_config=probes.ProbeConfig(
|
|
1181
1184
|
base_url=config.base_url,
|
|
1182
|
-
|
|
1183
|
-
|
|
1184
|
-
|
|
1185
|
+
request=RequestConfig(
|
|
1186
|
+
timeout=request_timeout,
|
|
1187
|
+
tls_verify=config.request_tls_verify,
|
|
1188
|
+
proxy=config.request_proxy,
|
|
1189
|
+
cert=config.request_cert,
|
|
1190
|
+
),
|
|
1185
1191
|
auth=config.auth,
|
|
1186
1192
|
auth_type=config.auth_type,
|
|
1187
1193
|
headers=config.headers,
|
|
@@ -1297,6 +1303,7 @@ def get_loader_kwargs(loader: Callable, config: LoaderConfig) -> dict[str, Any]:
|
|
|
1297
1303
|
"data_generation_methods": config.data_generation_methods,
|
|
1298
1304
|
"rate_limit": config.rate_limit,
|
|
1299
1305
|
"output_config": config.output_config,
|
|
1306
|
+
"generation_config": config.generation_config,
|
|
1300
1307
|
}
|
|
1301
1308
|
if loader not in (oas_loaders.from_path, oas_loaders.from_dict):
|
|
1302
1309
|
kwargs["headers"] = config.headers
|
schemathesis/cli/cassettes.py
CHANGED
|
@@ -23,7 +23,6 @@ if TYPE_CHECKING:
|
|
|
23
23
|
import click
|
|
24
24
|
import requests
|
|
25
25
|
|
|
26
|
-
from ..generation import DataGenerationMethod
|
|
27
26
|
from ..models import Request, Response
|
|
28
27
|
from ..runner.serialization import SerializedCheck, SerializedInteraction
|
|
29
28
|
from .context import ExecutionContext
|
|
@@ -90,10 +89,6 @@ class CassetteWriter(EventHandler):
|
|
|
90
89
|
seed=seed,
|
|
91
90
|
correlation_id=event.correlation_id,
|
|
92
91
|
thread_id=event.thread_id,
|
|
93
|
-
# NOTE: For backward compatibility reasons AfterExecution stores a list of data generation methods
|
|
94
|
-
# The list always contains one element - the method that was actually used for generation
|
|
95
|
-
# This will change in the future
|
|
96
|
-
data_generation_method=event.data_generation_method[0],
|
|
97
92
|
interactions=event.result.interactions,
|
|
98
93
|
)
|
|
99
94
|
)
|
|
@@ -105,7 +100,6 @@ class CassetteWriter(EventHandler):
|
|
|
105
100
|
# Correlation ID is not used in stateful testing
|
|
106
101
|
correlation_id="",
|
|
107
102
|
thread_id=event.thread_id,
|
|
108
|
-
data_generation_method=event.data_generation_method[0],
|
|
109
103
|
interactions=event.result.interactions,
|
|
110
104
|
)
|
|
111
105
|
)
|
|
@@ -132,7 +126,6 @@ class Process:
|
|
|
132
126
|
seed: int
|
|
133
127
|
correlation_id: str
|
|
134
128
|
thread_id: int
|
|
135
|
-
data_generation_method: DataGenerationMethod
|
|
136
129
|
interactions: list[SerializedInteraction]
|
|
137
130
|
|
|
138
131
|
|
|
@@ -239,7 +232,7 @@ http_interactions:"""
|
|
|
239
232
|
seed: '{item.seed}'
|
|
240
233
|
thread_id: {item.thread_id}
|
|
241
234
|
correlation_id: '{item.correlation_id}'
|
|
242
|
-
data_generation_method: '{
|
|
235
|
+
data_generation_method: '{interaction.data_generation_method.value}'
|
|
243
236
|
elapsed: '{interaction.response.elapsed}'
|
|
244
237
|
recorded_at: '{interaction.recorded_at}'
|
|
245
238
|
checks:
|
schemathesis/cli/constants.py
CHANGED
|
@@ -42,11 +42,15 @@ class HealthCheck(IntEnum):
|
|
|
42
42
|
filter_too_much = 2
|
|
43
43
|
too_slow = 3
|
|
44
44
|
large_base_example = 7
|
|
45
|
+
all = 8
|
|
45
46
|
|
|
46
|
-
def as_hypothesis(self) -> hypothesis.HealthCheck:
|
|
47
|
+
def as_hypothesis(self) -> list[hypothesis.HealthCheck]:
|
|
47
48
|
from hypothesis import HealthCheck
|
|
48
49
|
|
|
49
|
-
|
|
50
|
+
if self.name == "all":
|
|
51
|
+
return list(HealthCheck)
|
|
52
|
+
|
|
53
|
+
return [HealthCheck[self.name]]
|
|
50
54
|
|
|
51
55
|
|
|
52
56
|
@unique
|
|
@@ -143,7 +143,9 @@ def display_hypothesis_output(hypothesis_output: list[str]) -> None:
|
|
|
143
143
|
|
|
144
144
|
def display_errors(context: ExecutionContext, event: events.Finished) -> None:
|
|
145
145
|
"""Display all errors in the test run."""
|
|
146
|
-
|
|
146
|
+
probes = context.probes or []
|
|
147
|
+
has_probe_errors = any(probe.outcome == ProbeOutcome.ERROR for probe in probes)
|
|
148
|
+
if not event.has_errors and not has_probe_errors:
|
|
147
149
|
return
|
|
148
150
|
|
|
149
151
|
display_section_name("ERRORS")
|
|
@@ -160,6 +162,12 @@ def display_errors(context: ExecutionContext, event: events.Finished) -> None:
|
|
|
160
162
|
should_display_full_traceback_message |= display_single_error(context, result)
|
|
161
163
|
if event.generic_errors:
|
|
162
164
|
display_generic_errors(context, event.generic_errors)
|
|
165
|
+
if has_probe_errors:
|
|
166
|
+
display_section_name("API Probe errors", "_", fg="red")
|
|
167
|
+
for probe in probes:
|
|
168
|
+
if probe.error is not None:
|
|
169
|
+
error = SerializedError.from_exception(probe.error)
|
|
170
|
+
_display_error(context, error)
|
|
163
171
|
if should_display_full_traceback_message and not context.show_trace:
|
|
164
172
|
click.secho(
|
|
165
173
|
"\nAdd this option to your command line parameters to see full tracebacks: --show-trace",
|
|
@@ -848,6 +856,10 @@ def handle_finished(context: ExecutionContext, event: events.Finished) -> None:
|
|
|
848
856
|
|
|
849
857
|
def handle_interrupted(context: ExecutionContext, event: events.Interrupted) -> None:
|
|
850
858
|
click.echo()
|
|
859
|
+
_handle_interrupted(context)
|
|
860
|
+
|
|
861
|
+
|
|
862
|
+
def _handle_interrupted(context: ExecutionContext) -> None:
|
|
851
863
|
context.is_interrupted = True
|
|
852
864
|
display_section_name("KeyboardInterrupt", "!", bold=False)
|
|
853
865
|
|
|
@@ -863,12 +875,11 @@ def handle_stateful_event(context: ExecutionContext, event: events.StatefulEvent
|
|
|
863
875
|
if not experimental.STATEFUL_ONLY.is_enabled:
|
|
864
876
|
click.echo()
|
|
865
877
|
click.secho("Stateful tests\n", bold=True)
|
|
866
|
-
elif (
|
|
867
|
-
|
|
868
|
-
|
|
869
|
-
|
|
870
|
-
|
|
871
|
-
display_execution_result(context, event.data.status.value)
|
|
878
|
+
elif isinstance(event.data, stateful_events.ScenarioFinished) and not event.data.is_final:
|
|
879
|
+
if event.data.status == stateful_events.ScenarioStatus.INTERRUPTED:
|
|
880
|
+
_handle_interrupted(context)
|
|
881
|
+
elif event.data.status != stateful_events.ScenarioStatus.REJECTED:
|
|
882
|
+
display_execution_result(context, event.data.status.value)
|
|
872
883
|
elif isinstance(event.data, stateful_events.RunFinished):
|
|
873
884
|
click.echo()
|
|
874
885
|
# It is initialized in `RunStarted`
|
schemathesis/exceptions.py
CHANGED
|
@@ -520,6 +520,12 @@ def remove_ssl_line_number(text: str) -> str:
|
|
|
520
520
|
return re.sub(r"\(_ssl\.c:\d+\)", "", text)
|
|
521
521
|
|
|
522
522
|
|
|
523
|
+
def _clean_inner_request_message(message: Any) -> str:
|
|
524
|
+
if isinstance(message, str) and message.startswith("HTTPConnectionPool"):
|
|
525
|
+
return re.sub(r"HTTPConnectionPool\(.+?\): ", "", message).rstrip(".")
|
|
526
|
+
return str(message)
|
|
527
|
+
|
|
528
|
+
|
|
523
529
|
def extract_requests_exception_details(exc: RequestException) -> tuple[str, list[str]]:
|
|
524
530
|
from requests.exceptions import ChunkedEncodingError, ConnectionError, SSLError
|
|
525
531
|
from urllib3.exceptions import MaxRetryError
|
|
@@ -542,7 +548,7 @@ def extract_requests_exception_details(exc: RequestException) -> tuple[str, list
|
|
|
542
548
|
reason = f"Max retries exceeded with url: {inner.url}"
|
|
543
549
|
extra = [reason.strip()]
|
|
544
550
|
else:
|
|
545
|
-
extra = [" ".join(map(
|
|
551
|
+
extra = [" ".join(map(_clean_inner_request_message, inner.args))]
|
|
546
552
|
elif isinstance(exc, ChunkedEncodingError):
|
|
547
553
|
message = "Connection broken. The server declared chunked encoding but sent an invalid chunk"
|
|
548
554
|
extra = [str(exc.args[0].args[1])]
|
schemathesis/internal/copy.py
CHANGED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from typing import Any, Callable
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class ExtensionLoadingError(ImportError):
|
|
6
|
+
"""Raised when an extension cannot be loaded."""
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def import_extension(path: str) -> Any:
|
|
10
|
+
try:
|
|
11
|
+
module, item = path.rsplit(".", 1)
|
|
12
|
+
imported = __import__(module, fromlist=[item])
|
|
13
|
+
return getattr(imported, item)
|
|
14
|
+
except ValueError as exc:
|
|
15
|
+
raise ExtensionLoadingError(f"Invalid path: {path}") from exc
|
|
16
|
+
except (ImportError, AttributeError) as exc:
|
|
17
|
+
raise ExtensionLoadingError(f"Could not import {path}") from exc
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def extensible(env_var: str) -> Callable[[Any], Any]:
|
|
21
|
+
def decorator(item: Any) -> Any:
|
|
22
|
+
path = os.getenv(env_var)
|
|
23
|
+
if path is not None:
|
|
24
|
+
return import_extension(path)
|
|
25
|
+
return item
|
|
26
|
+
|
|
27
|
+
return decorator
|
schemathesis/runner/__init__.py
CHANGED
|
@@ -19,6 +19,7 @@ from ..loaders import load_app
|
|
|
19
19
|
from ..specs.graphql import loaders as gql_loaders
|
|
20
20
|
from ..specs.openapi import loaders as oas_loaders
|
|
21
21
|
from ..targets import DEFAULT_TARGETS, Target
|
|
22
|
+
from ..transports import RequestConfig
|
|
22
23
|
from ..transports.auth import get_requests_auth
|
|
23
24
|
from ..types import Filter, NotSet, RawAuth, RequestCert
|
|
24
25
|
from .probes import ProbeConfig
|
|
@@ -373,6 +374,12 @@ def from_schema(
|
|
|
373
374
|
|
|
374
375
|
hypothesis_settings = hypothesis_settings or hypothesis.settings(deadline=DEFAULT_DEADLINE)
|
|
375
376
|
generation_config = generation_config or GenerationConfig()
|
|
377
|
+
request_config = RequestConfig(
|
|
378
|
+
timeout=request_timeout,
|
|
379
|
+
tls_verify=request_tls_verify,
|
|
380
|
+
proxy=request_proxy,
|
|
381
|
+
cert=request_cert,
|
|
382
|
+
)
|
|
376
383
|
|
|
377
384
|
# Use the same seed for all tests unless `derandomize=True` is used
|
|
378
385
|
if seed is None and not hypothesis_settings.derandomize:
|
|
@@ -394,10 +401,7 @@ def from_schema(
|
|
|
394
401
|
headers=headers,
|
|
395
402
|
seed=seed,
|
|
396
403
|
workers_num=workers_num,
|
|
397
|
-
|
|
398
|
-
request_tls_verify=request_tls_verify,
|
|
399
|
-
request_proxy=request_proxy,
|
|
400
|
-
request_cert=request_cert,
|
|
404
|
+
request_config=request_config,
|
|
401
405
|
exit_first=exit_first,
|
|
402
406
|
max_failures=max_failures,
|
|
403
407
|
started_at=started_at,
|
|
@@ -473,10 +477,7 @@ def from_schema(
|
|
|
473
477
|
override=override,
|
|
474
478
|
headers=headers,
|
|
475
479
|
seed=seed,
|
|
476
|
-
|
|
477
|
-
request_tls_verify=request_tls_verify,
|
|
478
|
-
request_proxy=request_proxy,
|
|
479
|
-
request_cert=request_cert,
|
|
480
|
+
request_config=request_config,
|
|
480
481
|
exit_first=exit_first,
|
|
481
482
|
max_failures=max_failures,
|
|
482
483
|
started_at=started_at,
|
schemathesis/runner/impl/core.py
CHANGED
|
@@ -20,7 +20,7 @@ from hypothesis.errors import HypothesisException, InvalidArgument
|
|
|
20
20
|
from hypothesis_jsonschema._canonicalise import HypothesisRefResolutionError
|
|
21
21
|
from jsonschema.exceptions import SchemaError as JsonSchemaError
|
|
22
22
|
from jsonschema.exceptions import ValidationError
|
|
23
|
-
from requests.auth import HTTPDigestAuth
|
|
23
|
+
from requests.auth import HTTPDigestAuth
|
|
24
24
|
from urllib3.exceptions import InsecureRequestWarning
|
|
25
25
|
|
|
26
26
|
from ... import experimental, failures, hooks
|
|
@@ -33,6 +33,7 @@ from ..._hypothesis import (
|
|
|
33
33
|
)
|
|
34
34
|
from ..._override import CaseOverride
|
|
35
35
|
from ...auths import unregister as unregister_auth
|
|
36
|
+
from ...checks import _make_max_response_time_failure_message
|
|
36
37
|
from ...constants import (
|
|
37
38
|
DEFAULT_STATEFUL_RECURSION_LIMIT,
|
|
38
39
|
RECURSIVE_REFERENCE_ERROR_MESSAGE,
|
|
@@ -68,8 +69,9 @@ from ...stateful import Feedback, Stateful
|
|
|
68
69
|
from ...stateful import events as stateful_events
|
|
69
70
|
from ...stateful import runner as stateful_runner
|
|
70
71
|
from ...targets import Target, TargetContext
|
|
71
|
-
from ...transports import
|
|
72
|
-
from ...
|
|
72
|
+
from ...transports import RequestConfig, RequestsTransport
|
|
73
|
+
from ...transports.auth import get_requests_auth, prepare_wsgi_headers
|
|
74
|
+
from ...types import RawAuth
|
|
73
75
|
from ...utils import capture_hypothesis_output
|
|
74
76
|
from .. import probes
|
|
75
77
|
from ..serialization import SerializedTestResult
|
|
@@ -92,11 +94,11 @@ class BaseRunner:
|
|
|
92
94
|
hypothesis_settings: hypothesis.settings
|
|
93
95
|
generation_config: GenerationConfig
|
|
94
96
|
probe_config: probes.ProbeConfig
|
|
97
|
+
request_config: RequestConfig = field(default_factory=RequestConfig)
|
|
95
98
|
override: CaseOverride | None = None
|
|
96
99
|
auth: RawAuth | None = None
|
|
97
100
|
auth_type: str | None = None
|
|
98
101
|
headers: dict[str, Any] | None = None
|
|
99
|
-
request_timeout: int | None = None
|
|
100
102
|
store_interactions: bool = False
|
|
101
103
|
seed: int | None = None
|
|
102
104
|
exit_first: bool = False
|
|
@@ -109,6 +111,7 @@ class BaseRunner:
|
|
|
109
111
|
count_links: bool = True
|
|
110
112
|
service_client: ServiceClient | None = None
|
|
111
113
|
_failures_counter: int = 0
|
|
114
|
+
_is_stopping_due_to_failure_limit: bool = False
|
|
112
115
|
|
|
113
116
|
def execute(self) -> EventStream:
|
|
114
117
|
"""Common logic for all runners."""
|
|
@@ -197,13 +200,20 @@ class BaseRunner:
|
|
|
197
200
|
warnings.simplefilter("ignore", InsecureRequestWarning)
|
|
198
201
|
if not experimental.STATEFUL_ONLY.is_enabled:
|
|
199
202
|
yield from self._execute(results, stop_event)
|
|
200
|
-
|
|
203
|
+
if not self._is_stopping_due_to_failure_limit:
|
|
204
|
+
yield from self._run_stateful_tests(results)
|
|
201
205
|
except KeyboardInterrupt:
|
|
202
206
|
yield events.Interrupted()
|
|
203
207
|
|
|
204
208
|
yield _finish()
|
|
205
209
|
|
|
206
210
|
def _should_stop(self, event: events.ExecutionEvent) -> bool:
|
|
211
|
+
result = self.__should_stop(event)
|
|
212
|
+
if result:
|
|
213
|
+
self._is_stopping_due_to_failure_limit = True
|
|
214
|
+
return result
|
|
215
|
+
|
|
216
|
+
def __should_stop(self, event: events.ExecutionEvent) -> bool:
|
|
207
217
|
if _should_count_towards_stop(event):
|
|
208
218
|
if self.exit_first:
|
|
209
219
|
return True
|
|
@@ -224,14 +234,25 @@ class BaseRunner:
|
|
|
224
234
|
method="",
|
|
225
235
|
path="",
|
|
226
236
|
verbose_name="Stateful tests",
|
|
237
|
+
seed=self.seed,
|
|
227
238
|
data_generation_method=self.schema.data_generation_methods,
|
|
228
239
|
)
|
|
240
|
+
headers = self.headers or {}
|
|
241
|
+
if isinstance(self.schema.transport, RequestsTransport):
|
|
242
|
+
auth = get_requests_auth(self.auth, self.auth_type)
|
|
243
|
+
else:
|
|
244
|
+
auth = None
|
|
245
|
+
headers = prepare_wsgi_headers(headers, self.auth, self.auth_type)
|
|
229
246
|
config = stateful_runner.StatefulTestRunnerConfig(
|
|
230
247
|
checks=tuple(self.checks),
|
|
231
|
-
headers=
|
|
248
|
+
headers=headers,
|
|
232
249
|
hypothesis_settings=self.hypothesis_settings,
|
|
233
250
|
exit_first=self.exit_first,
|
|
234
|
-
|
|
251
|
+
max_failures=None if self.max_failures is None else self.max_failures - self._failures_counter,
|
|
252
|
+
request=self.request_config,
|
|
253
|
+
auth=auth,
|
|
254
|
+
seed=self.seed,
|
|
255
|
+
override=self.override,
|
|
235
256
|
)
|
|
236
257
|
state_machine = self.schema.as_state_machine()
|
|
237
258
|
runner = state_machine.runner(config=config)
|
|
@@ -249,7 +270,7 @@ class BaseRunner:
|
|
|
249
270
|
if isinstance(state_machine.schema.transport, RequestsTransport):
|
|
250
271
|
|
|
251
272
|
def on_step_finished(event: stateful_events.StepFinished) -> None:
|
|
252
|
-
if event.response is not None:
|
|
273
|
+
if event.response is not None and event.status is not None:
|
|
253
274
|
response = cast(requests.Response, event.response)
|
|
254
275
|
result.store_requests_response(
|
|
255
276
|
status=from_step_status(event.status),
|
|
@@ -261,13 +282,15 @@ class BaseRunner:
|
|
|
261
282
|
else:
|
|
262
283
|
|
|
263
284
|
def on_step_finished(event: stateful_events.StepFinished) -> None:
|
|
264
|
-
|
|
285
|
+
from ...transports.responses import WSGIResponse
|
|
286
|
+
|
|
287
|
+
if event.response is not None and event.status is not None:
|
|
265
288
|
response = cast(WSGIResponse, event.response)
|
|
266
289
|
result.store_wsgi_response(
|
|
267
290
|
status=from_step_status(event.status),
|
|
268
291
|
case=event.case,
|
|
269
292
|
response=response,
|
|
270
|
-
headers=
|
|
293
|
+
headers=headers,
|
|
271
294
|
elapsed=response.elapsed.total_seconds(),
|
|
272
295
|
checks=event.checks,
|
|
273
296
|
)
|
|
@@ -286,6 +309,7 @@ class BaseRunner:
|
|
|
286
309
|
on_step_finished(stateful_event)
|
|
287
310
|
elif isinstance(stateful_event, stateful_events.Errored):
|
|
288
311
|
status = Status.error
|
|
312
|
+
result.add_error(stateful_event.exception)
|
|
289
313
|
yield events.StatefulEvent(data=stateful_event)
|
|
290
314
|
results.append(result)
|
|
291
315
|
yield events.AfterStatefulExecution(
|
|
@@ -506,6 +530,26 @@ def run_test(
|
|
|
506
530
|
errors: list[Exception] = []
|
|
507
531
|
test_start_time = time.monotonic()
|
|
508
532
|
setup_hypothesis_database_key(test, operation)
|
|
533
|
+
|
|
534
|
+
def _on_flaky(exc: Exception) -> Status:
|
|
535
|
+
if isinstance(exc.__cause__, hypothesis.errors.DeadlineExceeded):
|
|
536
|
+
status = Status.error
|
|
537
|
+
result.add_error(DeadlineExceeded.from_exc(exc.__cause__))
|
|
538
|
+
elif isinstance(exc, hypothesis.errors.FlakyFailure) and any(
|
|
539
|
+
isinstance(subexc, hypothesis.errors.DeadlineExceeded) for subexc in exc.exceptions
|
|
540
|
+
):
|
|
541
|
+
for sub_exc in exc.exceptions:
|
|
542
|
+
if isinstance(sub_exc, hypothesis.errors.DeadlineExceeded):
|
|
543
|
+
result.add_error(DeadlineExceeded.from_exc(sub_exc))
|
|
544
|
+
status = Status.error
|
|
545
|
+
elif errors:
|
|
546
|
+
status = Status.error
|
|
547
|
+
add_errors(result, errors)
|
|
548
|
+
else:
|
|
549
|
+
status = Status.failure
|
|
550
|
+
result.mark_flaky()
|
|
551
|
+
return status
|
|
552
|
+
|
|
509
553
|
try:
|
|
510
554
|
with catch_warnings(record=True) as warnings, capture_hypothesis_output() as hypothesis_output:
|
|
511
555
|
test(
|
|
@@ -535,6 +579,9 @@ def run_test(
|
|
|
535
579
|
result.mark_errored()
|
|
536
580
|
for error in deduplicate_errors(errors):
|
|
537
581
|
result.add_error(error)
|
|
582
|
+
except hypothesis.errors.FlakyFailure as exc:
|
|
583
|
+
# Hypothesis >= 6.108.0
|
|
584
|
+
status = _on_flaky(exc)
|
|
538
585
|
except MultipleFailures:
|
|
539
586
|
# Schemathesis may detect multiple errors that come from different check results
|
|
540
587
|
# They raise different "grouped" exceptions
|
|
@@ -544,15 +591,7 @@ def run_test(
|
|
|
544
591
|
else:
|
|
545
592
|
status = Status.failure
|
|
546
593
|
except hypothesis.errors.Flaky as exc:
|
|
547
|
-
|
|
548
|
-
status = Status.error
|
|
549
|
-
result.add_error(DeadlineExceeded.from_exc(exc.__cause__))
|
|
550
|
-
elif errors:
|
|
551
|
-
status = Status.error
|
|
552
|
-
add_errors(result, errors)
|
|
553
|
-
else:
|
|
554
|
-
status = Status.failure
|
|
555
|
-
result.mark_flaky()
|
|
594
|
+
status = _on_flaky(exc)
|
|
556
595
|
except hypothesis.errors.Unsatisfiable:
|
|
557
596
|
# We need more clear error message here
|
|
558
597
|
status = Status.error
|
|
@@ -789,7 +828,7 @@ def run_checks(
|
|
|
789
828
|
|
|
790
829
|
if max_response_time:
|
|
791
830
|
if elapsed_time > max_response_time:
|
|
792
|
-
message =
|
|
831
|
+
message = _make_max_response_time_failure_message(elapsed_time, max_response_time)
|
|
793
832
|
errors.append(AssertionError(message))
|
|
794
833
|
result.add_failure(
|
|
795
834
|
"max_response_time",
|
|
@@ -869,10 +908,7 @@ def network_test(
|
|
|
869
908
|
targets: Iterable[Target],
|
|
870
909
|
result: TestResult,
|
|
871
910
|
session: requests.Session,
|
|
872
|
-
|
|
873
|
-
request_tls_verify: bool,
|
|
874
|
-
request_proxy: str | None,
|
|
875
|
-
request_cert: RequestCert | None,
|
|
911
|
+
request_config: RequestConfig,
|
|
876
912
|
store_interactions: bool,
|
|
877
913
|
headers: dict[str, Any] | None,
|
|
878
914
|
feedback: Feedback | None,
|
|
@@ -888,20 +924,16 @@ def network_test(
|
|
|
888
924
|
headers = headers or {}
|
|
889
925
|
if "user-agent" not in {header.lower() for header in headers}:
|
|
890
926
|
headers["User-Agent"] = USER_AGENT
|
|
891
|
-
timeout = prepare_timeout(request_timeout)
|
|
892
927
|
if not dry_run:
|
|
893
928
|
args = (
|
|
894
929
|
checks,
|
|
895
930
|
targets,
|
|
896
931
|
result,
|
|
897
932
|
session,
|
|
898
|
-
|
|
933
|
+
request_config,
|
|
899
934
|
store_interactions,
|
|
900
935
|
headers,
|
|
901
936
|
feedback,
|
|
902
|
-
request_tls_verify,
|
|
903
|
-
request_proxy,
|
|
904
|
-
request_cert,
|
|
905
937
|
max_response_time,
|
|
906
938
|
)
|
|
907
939
|
response = _network_test(case, *args)
|
|
@@ -914,13 +946,10 @@ def _network_test(
|
|
|
914
946
|
targets: Iterable[Target],
|
|
915
947
|
result: TestResult,
|
|
916
948
|
session: requests.Session,
|
|
917
|
-
|
|
949
|
+
request_config: RequestConfig,
|
|
918
950
|
store_interactions: bool,
|
|
919
951
|
headers: dict[str, Any] | None,
|
|
920
952
|
feedback: Feedback | None,
|
|
921
|
-
request_tls_verify: bool,
|
|
922
|
-
request_proxy: str | None,
|
|
923
|
-
request_cert: RequestCert | None,
|
|
924
953
|
max_response_time: int | None,
|
|
925
954
|
) -> requests.Response:
|
|
926
955
|
check_results: list[Check] = []
|
|
@@ -929,19 +958,21 @@ def _network_test(
|
|
|
929
958
|
kwargs: dict[str, Any] = {
|
|
930
959
|
"session": session,
|
|
931
960
|
"headers": headers,
|
|
932
|
-
"timeout":
|
|
933
|
-
"verify":
|
|
934
|
-
"cert":
|
|
961
|
+
"timeout": request_config.prepared_timeout,
|
|
962
|
+
"verify": request_config.tls_verify,
|
|
963
|
+
"cert": request_config.cert,
|
|
935
964
|
}
|
|
936
|
-
if
|
|
937
|
-
kwargs["proxies"] = {"all":
|
|
965
|
+
if request_config.proxy is not None:
|
|
966
|
+
kwargs["proxies"] = {"all": request_config.proxy}
|
|
938
967
|
hooks.dispatch("process_call_kwargs", hook_context, case, kwargs)
|
|
939
968
|
response = case.call(**kwargs)
|
|
940
969
|
except CheckFailed as exc:
|
|
941
970
|
check_name = "request_timeout"
|
|
942
971
|
requests_kwargs = RequestsTransport().serialize_case(case, base_url=case.get_full_base_url(), headers=headers)
|
|
943
972
|
request = requests.Request(**requests_kwargs).prepare()
|
|
944
|
-
elapsed = cast(
|
|
973
|
+
elapsed = cast(
|
|
974
|
+
float, request_config.prepared_timeout
|
|
975
|
+
) # It is defined and not empty, since the exception happened
|
|
945
976
|
check_result = result.add_failure(
|
|
946
977
|
check_name, case, None, elapsed, f"Response timed out after {1000 * elapsed:.2f}ms", exc.context, request
|
|
947
978
|
)
|
|
@@ -997,7 +1028,7 @@ def wsgi_test(
|
|
|
997
1028
|
with ErrorCollector(errors):
|
|
998
1029
|
_force_data_generation_method(data_generation_methods, case)
|
|
999
1030
|
result.mark_executed()
|
|
1000
|
-
headers =
|
|
1031
|
+
headers = prepare_wsgi_headers(headers, auth, auth_type)
|
|
1001
1032
|
if not dry_run:
|
|
1002
1033
|
args = (
|
|
1003
1034
|
checks,
|
|
@@ -1055,26 +1086,6 @@ def _wsgi_test(
|
|
|
1055
1086
|
return response
|
|
1056
1087
|
|
|
1057
1088
|
|
|
1058
|
-
def _prepare_wsgi_headers(
|
|
1059
|
-
headers: dict[str, Any] | None, auth: RawAuth | None, auth_type: str | None
|
|
1060
|
-
) -> dict[str, Any]:
|
|
1061
|
-
headers = headers or {}
|
|
1062
|
-
if "user-agent" not in {header.lower() for header in headers}:
|
|
1063
|
-
headers["User-Agent"] = USER_AGENT
|
|
1064
|
-
wsgi_auth = get_wsgi_auth(auth, auth_type)
|
|
1065
|
-
if wsgi_auth:
|
|
1066
|
-
headers["Authorization"] = wsgi_auth
|
|
1067
|
-
return headers
|
|
1068
|
-
|
|
1069
|
-
|
|
1070
|
-
def get_wsgi_auth(auth: RawAuth | None, auth_type: str | None) -> str | None:
|
|
1071
|
-
if auth:
|
|
1072
|
-
if auth_type == "digest":
|
|
1073
|
-
raise ValueError("Digest auth is not supported for WSGI apps")
|
|
1074
|
-
return _basic_auth_str(*auth)
|
|
1075
|
-
return None
|
|
1076
|
-
|
|
1077
|
-
|
|
1078
1089
|
def asgi_test(
|
|
1079
1090
|
case: Case,
|
|
1080
1091
|
checks: Iterable[CheckFunction],
|
schemathesis/runner/impl/solo.py
CHANGED
|
@@ -6,7 +6,6 @@ from typing import Generator
|
|
|
6
6
|
|
|
7
7
|
from ...models import TestResultSet
|
|
8
8
|
from ...transports.auth import get_requests_auth
|
|
9
|
-
from ...types import RequestCert
|
|
10
9
|
from .. import events
|
|
11
10
|
from .core import BaseRunner, asgi_test, get_session, network_test, wsgi_test
|
|
12
11
|
|
|
@@ -15,10 +14,6 @@ from .core import BaseRunner, asgi_test, get_session, network_test, wsgi_test
|
|
|
15
14
|
class SingleThreadRunner(BaseRunner):
|
|
16
15
|
"""Fast runner that runs tests sequentially in the main thread."""
|
|
17
16
|
|
|
18
|
-
request_tls_verify: bool | str = True
|
|
19
|
-
request_proxy: str | None = None
|
|
20
|
-
request_cert: RequestCert | None = None
|
|
21
|
-
|
|
22
17
|
def _execute(
|
|
23
18
|
self, results: TestResultSet, stop_event: threading.Event
|
|
24
19
|
) -> Generator[events.ExecutionEvent, None, None]:
|
|
@@ -42,10 +37,7 @@ class SingleThreadRunner(BaseRunner):
|
|
|
42
37
|
results=results,
|
|
43
38
|
session=session,
|
|
44
39
|
headers=self.headers,
|
|
45
|
-
|
|
46
|
-
request_tls_verify=self.request_tls_verify,
|
|
47
|
-
request_proxy=self.request_proxy,
|
|
48
|
-
request_cert=self.request_cert,
|
|
40
|
+
request_config=self.request_config,
|
|
49
41
|
store_interactions=self.store_interactions,
|
|
50
42
|
dry_run=self.dry_run,
|
|
51
43
|
)
|
|
@@ -19,7 +19,7 @@ from ...models import CheckFunction, TestResultSet
|
|
|
19
19
|
from ...stateful import Feedback, Stateful
|
|
20
20
|
from ...targets import Target
|
|
21
21
|
from ...transports.auth import get_requests_auth
|
|
22
|
-
from ...types import RawAuth
|
|
22
|
+
from ...types import RawAuth
|
|
23
23
|
from ...utils import capture_hypothesis_output
|
|
24
24
|
from .. import events
|
|
25
25
|
from .core import BaseRunner, asgi_test, get_session, handle_schema_error, network_test, run_test, wsgi_test
|
|
@@ -227,9 +227,6 @@ class ThreadPoolRunner(BaseRunner):
|
|
|
227
227
|
"""Spread different tests among multiple worker threads."""
|
|
228
228
|
|
|
229
229
|
workers_num: int = 2
|
|
230
|
-
request_tls_verify: bool | str = True
|
|
231
|
-
request_proxy: str | None = None
|
|
232
|
-
request_cert: RequestCert | None = None
|
|
233
230
|
|
|
234
231
|
def _execute(
|
|
235
232
|
self, results: TestResultSet, stop_event: threading.Event
|
|
@@ -333,10 +330,7 @@ class ThreadPoolRunner(BaseRunner):
|
|
|
333
330
|
"stateful_recursion_limit": self.stateful_recursion_limit,
|
|
334
331
|
"data_generation_methods": self.schema.data_generation_methods,
|
|
335
332
|
"kwargs": {
|
|
336
|
-
"
|
|
337
|
-
"request_tls_verify": self.request_tls_verify,
|
|
338
|
-
"request_proxy": self.request_proxy,
|
|
339
|
-
"request_cert": self.request_cert,
|
|
333
|
+
"request_config": self.request_config,
|
|
340
334
|
"store_interactions": self.store_interactions,
|
|
341
335
|
"max_response_time": self.max_response_time,
|
|
342
336
|
"dry_run": self.dry_run,
|