schemathesis 3.29.2__py3-none-any.whl → 3.30.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- schemathesis/__init__.py +3 -3
- schemathesis/_compat.py +2 -2
- schemathesis/_dependency_versions.py +1 -3
- schemathesis/_hypothesis.py +6 -0
- schemathesis/_lazy_import.py +1 -0
- schemathesis/_override.py +1 -0
- schemathesis/_rate_limiter.py +2 -1
- schemathesis/_xml.py +1 -0
- schemathesis/auths.py +4 -2
- schemathesis/checks.py +8 -5
- schemathesis/cli/__init__.py +8 -1
- schemathesis/cli/callbacks.py +3 -4
- schemathesis/cli/cassettes.py +6 -4
- schemathesis/cli/constants.py +2 -0
- schemathesis/cli/context.py +3 -0
- schemathesis/cli/debug.py +2 -1
- schemathesis/cli/handlers.py +1 -1
- schemathesis/cli/options.py +1 -0
- schemathesis/cli/output/default.py +50 -22
- schemathesis/cli/output/short.py +21 -10
- schemathesis/cli/sanitization.py +1 -0
- schemathesis/code_samples.py +1 -0
- schemathesis/constants.py +1 -0
- schemathesis/contrib/openapi/__init__.py +1 -1
- schemathesis/contrib/openapi/fill_missing_examples.py +2 -0
- schemathesis/contrib/openapi/formats/uuid.py +2 -1
- schemathesis/contrib/unique_data.py +2 -1
- schemathesis/exceptions.py +40 -26
- schemathesis/experimental/__init__.py +14 -0
- schemathesis/extra/_aiohttp.py +1 -0
- schemathesis/extra/_server.py +1 -0
- schemathesis/extra/pytest_plugin.py +13 -24
- schemathesis/failures.py +32 -3
- schemathesis/filters.py +2 -1
- schemathesis/fixups/__init__.py +1 -0
- schemathesis/fixups/fast_api.py +2 -2
- schemathesis/fixups/utf8_bom.py +1 -2
- schemathesis/generation/__init__.py +2 -1
- schemathesis/hooks.py +3 -1
- schemathesis/internal/copy.py +19 -3
- schemathesis/internal/deprecation.py +1 -1
- schemathesis/internal/jsonschema.py +2 -1
- schemathesis/internal/result.py +1 -1
- schemathesis/internal/transformation.py +1 -0
- schemathesis/lazy.py +3 -2
- schemathesis/loaders.py +4 -2
- schemathesis/models.py +20 -5
- schemathesis/parameters.py +1 -0
- schemathesis/runner/__init__.py +1 -1
- schemathesis/runner/events.py +21 -4
- schemathesis/runner/impl/core.py +61 -33
- schemathesis/runner/impl/solo.py +2 -1
- schemathesis/runner/impl/threadpool.py +4 -0
- schemathesis/runner/probes.py +1 -1
- schemathesis/runner/serialization.py +1 -1
- schemathesis/sanitization.py +2 -0
- schemathesis/schemas.py +1 -4
- schemathesis/service/ci.py +1 -0
- schemathesis/service/client.py +7 -7
- schemathesis/service/events.py +2 -1
- schemathesis/service/extensions.py +5 -5
- schemathesis/service/hosts.py +1 -0
- schemathesis/service/metadata.py +2 -1
- schemathesis/service/models.py +2 -1
- schemathesis/service/report.py +3 -3
- schemathesis/service/serialization.py +54 -23
- schemathesis/service/usage.py +1 -0
- schemathesis/specs/graphql/_cache.py +1 -1
- schemathesis/specs/graphql/loaders.py +1 -1
- schemathesis/specs/graphql/nodes.py +1 -0
- schemathesis/specs/graphql/scalars.py +2 -2
- schemathesis/specs/graphql/schemas.py +7 -7
- schemathesis/specs/graphql/validation.py +1 -2
- schemathesis/specs/openapi/_hypothesis.py +17 -11
- schemathesis/specs/openapi/checks.py +102 -9
- schemathesis/specs/openapi/converter.py +2 -1
- schemathesis/specs/openapi/definitions.py +2 -1
- schemathesis/specs/openapi/examples.py +7 -9
- schemathesis/specs/openapi/expressions/__init__.py +29 -2
- schemathesis/specs/openapi/expressions/context.py +1 -1
- schemathesis/specs/openapi/expressions/extractors.py +23 -0
- schemathesis/specs/openapi/expressions/lexer.py +19 -18
- schemathesis/specs/openapi/expressions/nodes.py +24 -4
- schemathesis/specs/openapi/expressions/parser.py +26 -5
- schemathesis/specs/openapi/filters.py +1 -0
- schemathesis/specs/openapi/links.py +35 -7
- schemathesis/specs/openapi/loaders.py +13 -11
- schemathesis/specs/openapi/negative/__init__.py +2 -1
- schemathesis/specs/openapi/negative/mutations.py +1 -0
- schemathesis/specs/openapi/parameters.py +1 -0
- schemathesis/specs/openapi/schemas.py +27 -38
- schemathesis/specs/openapi/security.py +1 -0
- schemathesis/specs/openapi/serialization.py +1 -0
- schemathesis/specs/openapi/stateful/__init__.py +159 -70
- schemathesis/specs/openapi/stateful/statistic.py +198 -0
- schemathesis/specs/openapi/stateful/types.py +13 -0
- schemathesis/specs/openapi/utils.py +1 -0
- schemathesis/specs/openapi/validation.py +1 -0
- schemathesis/stateful/__init__.py +4 -2
- schemathesis/stateful/config.py +66 -0
- schemathesis/stateful/context.py +93 -0
- schemathesis/stateful/events.py +209 -0
- schemathesis/stateful/runner.py +233 -0
- schemathesis/stateful/sink.py +68 -0
- schemathesis/stateful/state_machine.py +39 -22
- schemathesis/stateful/statistic.py +20 -0
- schemathesis/stateful/validation.py +66 -0
- schemathesis/targets.py +1 -0
- schemathesis/throttling.py +23 -3
- schemathesis/transports/__init__.py +28 -10
- schemathesis/transports/auth.py +1 -0
- schemathesis/transports/content_types.py +1 -1
- schemathesis/transports/headers.py +2 -1
- schemathesis/transports/responses.py +6 -4
- schemathesis/types.py +1 -0
- schemathesis/utils.py +1 -0
- {schemathesis-3.29.2.dist-info → schemathesis-3.30.0.dist-info}/METADATA +1 -1
- schemathesis-3.30.0.dist-info/RECORD +150 -0
- schemathesis/specs/openapi/stateful/links.py +0 -92
- schemathesis-3.29.2.dist-info/RECORD +0 -141
- {schemathesis-3.29.2.dist-info → schemathesis-3.30.0.dist-info}/WHEEL +0 -0
- {schemathesis-3.29.2.dist-info → schemathesis-3.30.0.dist-info}/entry_points.txt +0 -0
- {schemathesis-3.29.2.dist-info → schemathesis-3.30.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import queue
|
|
4
|
+
import threading
|
|
5
|
+
from contextlib import contextmanager
|
|
6
|
+
from dataclasses import dataclass, field
|
|
7
|
+
from typing import TYPE_CHECKING, Any, Generator, Iterator, Type
|
|
8
|
+
|
|
9
|
+
from hypothesis.control import current_build_context
|
|
10
|
+
from hypothesis.errors import Flaky
|
|
11
|
+
|
|
12
|
+
from ..exceptions import CheckFailed
|
|
13
|
+
from . import events
|
|
14
|
+
from .config import StatefulTestRunnerConfig
|
|
15
|
+
from .context import RunnerContext
|
|
16
|
+
from .validation import validate_response
|
|
17
|
+
|
|
18
|
+
if TYPE_CHECKING:
|
|
19
|
+
from ..models import Case, CheckFunction
|
|
20
|
+
from ..transports.responses import GenericResponse
|
|
21
|
+
from .state_machine import APIStateMachine, Direction, StepResult
|
|
22
|
+
|
|
23
|
+
EVENT_QUEUE_TIMEOUT = 0.01
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
@dataclass
|
|
27
|
+
class StatefulTestRunner:
|
|
28
|
+
"""Stateful test runner for the given state machine.
|
|
29
|
+
|
|
30
|
+
By default, the test runner executes the state machine in a loop until there are no new failures are found.
|
|
31
|
+
The loop is executed in a separate thread for better control over the execution and reporting.
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
# State machine class to use
|
|
35
|
+
state_machine: Type[APIStateMachine]
|
|
36
|
+
# Test runner configuration that defines the runtime behavior
|
|
37
|
+
config: StatefulTestRunnerConfig = field(default_factory=StatefulTestRunnerConfig)
|
|
38
|
+
# Event to stop the execution
|
|
39
|
+
stop_event: threading.Event = field(default_factory=threading.Event)
|
|
40
|
+
# Queue to communicate with the state machine execution
|
|
41
|
+
event_queue: queue.Queue = field(default_factory=queue.Queue)
|
|
42
|
+
|
|
43
|
+
def execute(self) -> Iterator[events.StatefulEvent]:
|
|
44
|
+
"""Execute a test run for a state machine."""
|
|
45
|
+
self.stop_event.clear()
|
|
46
|
+
|
|
47
|
+
yield events.RunStarted(state_machine=self.state_machine)
|
|
48
|
+
|
|
49
|
+
runner_thread = threading.Thread(
|
|
50
|
+
target=_execute_state_machine_loop,
|
|
51
|
+
kwargs={
|
|
52
|
+
"state_machine": self.state_machine,
|
|
53
|
+
"event_queue": self.event_queue,
|
|
54
|
+
"config": self.config,
|
|
55
|
+
"stop_event": self.stop_event,
|
|
56
|
+
},
|
|
57
|
+
)
|
|
58
|
+
run_status = events.RunStatus.SUCCESS
|
|
59
|
+
|
|
60
|
+
with thread_manager(runner_thread):
|
|
61
|
+
try:
|
|
62
|
+
while True:
|
|
63
|
+
try:
|
|
64
|
+
event = self.event_queue.get(timeout=EVENT_QUEUE_TIMEOUT)
|
|
65
|
+
# Set the run status based on the suite status
|
|
66
|
+
# ERROR & INTERRPUTED statuses are terminal, therefore they should not be overridden
|
|
67
|
+
if isinstance(event, events.SuiteFinished):
|
|
68
|
+
if event.status == events.SuiteStatus.FAILURE:
|
|
69
|
+
run_status = events.RunStatus.FAILURE
|
|
70
|
+
elif event.status == events.SuiteStatus.ERROR:
|
|
71
|
+
run_status = events.RunStatus.ERROR
|
|
72
|
+
elif event.status == events.SuiteStatus.INTERRUPTED:
|
|
73
|
+
run_status = events.RunStatus.INTERRUPTED
|
|
74
|
+
yield event
|
|
75
|
+
except queue.Empty:
|
|
76
|
+
if not runner_thread.is_alive():
|
|
77
|
+
break
|
|
78
|
+
except KeyboardInterrupt:
|
|
79
|
+
# Immediately notify the runner thread to stop, even though that the event will be set below in `finally`
|
|
80
|
+
self.stop()
|
|
81
|
+
run_status = events.RunStatus.INTERRUPTED
|
|
82
|
+
yield events.Interrupted()
|
|
83
|
+
finally:
|
|
84
|
+
self.stop()
|
|
85
|
+
|
|
86
|
+
yield events.RunFinished(status=run_status)
|
|
87
|
+
|
|
88
|
+
def stop(self) -> None:
|
|
89
|
+
"""Stop the execution of the state machine."""
|
|
90
|
+
self.stop_event.set()
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
@contextmanager
|
|
94
|
+
def thread_manager(thread: threading.Thread) -> Generator[None, None, None]:
|
|
95
|
+
thread.start()
|
|
96
|
+
try:
|
|
97
|
+
yield
|
|
98
|
+
finally:
|
|
99
|
+
thread.join()
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def _execute_state_machine_loop(
|
|
103
|
+
*,
|
|
104
|
+
state_machine: Type[APIStateMachine],
|
|
105
|
+
event_queue: queue.Queue,
|
|
106
|
+
config: StatefulTestRunnerConfig,
|
|
107
|
+
stop_event: threading.Event,
|
|
108
|
+
) -> None:
|
|
109
|
+
"""Execute the state machine testing loop."""
|
|
110
|
+
from hypothesis import reporting
|
|
111
|
+
|
|
112
|
+
from ..transports import RequestsTransport, prepare_timeout
|
|
113
|
+
|
|
114
|
+
ctx = RunnerContext()
|
|
115
|
+
|
|
116
|
+
call_kwargs: dict[str, Any] = {"headers": config.headers}
|
|
117
|
+
if isinstance(state_machine.schema.transport, RequestsTransport):
|
|
118
|
+
call_kwargs["timeout"] = prepare_timeout(config.request_timeout)
|
|
119
|
+
|
|
120
|
+
class InstrumentedStateMachine(state_machine): # type: ignore[valid-type,misc]
|
|
121
|
+
"""State machine with additional hooks for emitting events."""
|
|
122
|
+
|
|
123
|
+
def setup(self) -> None:
|
|
124
|
+
build_ctx = current_build_context()
|
|
125
|
+
event_queue.put(events.ScenarioStarted(is_final=build_ctx.is_final))
|
|
126
|
+
super().setup()
|
|
127
|
+
|
|
128
|
+
def get_call_kwargs(self, case: Case) -> dict[str, Any]:
|
|
129
|
+
return call_kwargs
|
|
130
|
+
|
|
131
|
+
def step(self, case: Case, previous: tuple[StepResult, Direction] | None = None) -> StepResult:
|
|
132
|
+
# Checking the stop event once inside `step` is sufficient as it is called frequently
|
|
133
|
+
# The idea is to stop the execution as soon as possible
|
|
134
|
+
if stop_event.is_set():
|
|
135
|
+
raise KeyboardInterrupt
|
|
136
|
+
event_queue.put(events.StepStarted())
|
|
137
|
+
ctx.reset_step()
|
|
138
|
+
try:
|
|
139
|
+
result = super().step(case, previous)
|
|
140
|
+
except CheckFailed:
|
|
141
|
+
ctx.step_failed()
|
|
142
|
+
raise
|
|
143
|
+
except Exception:
|
|
144
|
+
ctx.step_errored()
|
|
145
|
+
raise
|
|
146
|
+
finally:
|
|
147
|
+
transition_id: events.TransitionId | None
|
|
148
|
+
if previous is not None:
|
|
149
|
+
transition = previous[1]
|
|
150
|
+
transition_id = events.TransitionId(
|
|
151
|
+
name=transition.name,
|
|
152
|
+
status_code=transition.status_code,
|
|
153
|
+
source=transition.operation.verbose_name,
|
|
154
|
+
)
|
|
155
|
+
else:
|
|
156
|
+
transition_id = None
|
|
157
|
+
response: events.ResponseData | None
|
|
158
|
+
if ctx.current_response is not None:
|
|
159
|
+
response = events.ResponseData(
|
|
160
|
+
status_code=ctx.current_response.status_code,
|
|
161
|
+
elapsed=ctx.current_response.elapsed.total_seconds(),
|
|
162
|
+
)
|
|
163
|
+
else:
|
|
164
|
+
response = None
|
|
165
|
+
event_queue.put(
|
|
166
|
+
events.StepFinished(
|
|
167
|
+
status=ctx.current_step_status,
|
|
168
|
+
transition_id=transition_id,
|
|
169
|
+
target=case.operation.verbose_name,
|
|
170
|
+
response=response,
|
|
171
|
+
)
|
|
172
|
+
)
|
|
173
|
+
return result
|
|
174
|
+
|
|
175
|
+
def validate_response(
|
|
176
|
+
self, response: GenericResponse, case: Case, additional_checks: tuple[CheckFunction, ...] = ()
|
|
177
|
+
) -> None:
|
|
178
|
+
ctx.current_response = response
|
|
179
|
+
validate_response(response, case, ctx, config.checks, additional_checks)
|
|
180
|
+
|
|
181
|
+
def teardown(self) -> None:
|
|
182
|
+
build_ctx = current_build_context()
|
|
183
|
+
event_queue.put(
|
|
184
|
+
events.ScenarioFinished(
|
|
185
|
+
status=ctx.current_scenario_status,
|
|
186
|
+
is_final=build_ctx.is_final,
|
|
187
|
+
)
|
|
188
|
+
)
|
|
189
|
+
super().teardown()
|
|
190
|
+
|
|
191
|
+
while True:
|
|
192
|
+
# This loop is running until no new failures are found in a single iteration
|
|
193
|
+
event_queue.put(events.SuiteStarted())
|
|
194
|
+
if stop_event.is_set():
|
|
195
|
+
event_queue.put(events.SuiteFinished(status=events.SuiteStatus.INTERRUPTED, failures=[]))
|
|
196
|
+
break
|
|
197
|
+
suite_status = events.SuiteStatus.SUCCESS
|
|
198
|
+
try:
|
|
199
|
+
with reporting.with_reporter(lambda _: None): # type: ignore
|
|
200
|
+
InstrumentedStateMachine.run(settings=config.hypothesis_settings)
|
|
201
|
+
except KeyboardInterrupt:
|
|
202
|
+
# Raised in the state machine when the stop event is set or it is raised by the user's code
|
|
203
|
+
# that is placed in the base class of the state machine.
|
|
204
|
+
# Therefore, set the stop event to cover the latter case
|
|
205
|
+
stop_event.set()
|
|
206
|
+
suite_status = events.SuiteStatus.INTERRUPTED
|
|
207
|
+
break
|
|
208
|
+
except CheckFailed as exc:
|
|
209
|
+
# When a check fails, the state machine is stopped
|
|
210
|
+
# The failure is already sent to the queue by the state machine
|
|
211
|
+
# Here we need to either exit or re-run the state machine with this failure marked as known
|
|
212
|
+
suite_status = events.SuiteStatus.FAILURE
|
|
213
|
+
if config.exit_first:
|
|
214
|
+
break
|
|
215
|
+
ctx.mark_as_seen_in_run(exc)
|
|
216
|
+
continue
|
|
217
|
+
except Flaky:
|
|
218
|
+
suite_status = events.SuiteStatus.FAILURE
|
|
219
|
+
if config.exit_first:
|
|
220
|
+
break
|
|
221
|
+
# Mark all failures in this suite as seen to prevent them being re-discovered
|
|
222
|
+
ctx.mark_current_suite_as_seen_in_run()
|
|
223
|
+
continue
|
|
224
|
+
except Exception as exc:
|
|
225
|
+
# Any other exception is an inner error and the test run should be stopped
|
|
226
|
+
suite_status = events.SuiteStatus.ERROR
|
|
227
|
+
event_queue.put(events.Errored(exception=exc))
|
|
228
|
+
break
|
|
229
|
+
finally:
|
|
230
|
+
event_queue.put(events.SuiteFinished(status=suite_status, failures=ctx.failures_for_suite))
|
|
231
|
+
ctx.reset()
|
|
232
|
+
# Exit on the first successful state machine execution
|
|
233
|
+
break
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from typing import TYPE_CHECKING
|
|
5
|
+
|
|
6
|
+
from . import events
|
|
7
|
+
from .statistic import TransitionStats
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from ..models import Check
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass
|
|
14
|
+
class AverageResponseTime:
|
|
15
|
+
"""Average response time for a given status code.
|
|
16
|
+
|
|
17
|
+
Stored as a sum of all response times and a count of responses.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
total: float
|
|
21
|
+
count: int
|
|
22
|
+
|
|
23
|
+
__slots__ = ("total", "count")
|
|
24
|
+
|
|
25
|
+
def __init__(self) -> None:
|
|
26
|
+
self.total = 0.0
|
|
27
|
+
self.count = 0
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@dataclass
|
|
31
|
+
class StateMachineSink:
|
|
32
|
+
"""Collects events and stores data about the state machine execution."""
|
|
33
|
+
|
|
34
|
+
transitions: TransitionStats
|
|
35
|
+
response_times: dict[str, dict[int, AverageResponseTime]] = field(default_factory=dict)
|
|
36
|
+
steps: dict[events.StepStatus, int] = field(default_factory=lambda: {status: 0 for status in events.StepStatus})
|
|
37
|
+
scenarios: dict[events.ScenarioStatus, int] = field(
|
|
38
|
+
default_factory=lambda: {status: 0 for status in events.ScenarioStatus}
|
|
39
|
+
)
|
|
40
|
+
suites: dict[events.SuiteStatus, int] = field(default_factory=lambda: {status: 0 for status in events.SuiteStatus})
|
|
41
|
+
failures: list[Check] = field(default_factory=list)
|
|
42
|
+
start_time: float | None = None
|
|
43
|
+
end_time: float | None = None
|
|
44
|
+
|
|
45
|
+
def consume(self, event: events.StatefulEvent) -> None:
|
|
46
|
+
self.transitions.consume(event)
|
|
47
|
+
if isinstance(event, events.RunStarted):
|
|
48
|
+
self.start_time = event.timestamp
|
|
49
|
+
elif isinstance(event, events.StepFinished):
|
|
50
|
+
self.steps[event.status] += 1
|
|
51
|
+
responses = self.response_times.setdefault(event.target, {})
|
|
52
|
+
if event.response is not None:
|
|
53
|
+
average = responses.setdefault(event.response.status_code, AverageResponseTime())
|
|
54
|
+
average.total += event.response.elapsed
|
|
55
|
+
average.count += 1
|
|
56
|
+
elif isinstance(event, events.ScenarioFinished):
|
|
57
|
+
self.scenarios[event.status] += 1
|
|
58
|
+
elif isinstance(event, events.SuiteFinished):
|
|
59
|
+
self.suites[event.status] += 1
|
|
60
|
+
self.failures.extend(event.failures)
|
|
61
|
+
elif isinstance(event, events.RunFinished):
|
|
62
|
+
self.end_time = event.timestamp
|
|
63
|
+
|
|
64
|
+
@property
|
|
65
|
+
def duration(self) -> float | None:
|
|
66
|
+
if self.start_time is not None and self.end_time is not None:
|
|
67
|
+
return self.end_time - self.start_time
|
|
68
|
+
return None
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
import time
|
|
4
3
|
import re
|
|
4
|
+
import time
|
|
5
5
|
from dataclasses import dataclass
|
|
6
6
|
from typing import TYPE_CHECKING, Any, ClassVar
|
|
7
7
|
|
|
@@ -12,6 +12,9 @@ from .._dependency_versions import HYPOTHESIS_HAS_STATEFUL_NAMING_IMPROVEMENTS
|
|
|
12
12
|
from ..constants import NO_LINKS_ERROR_MESSAGE, NOT_SET
|
|
13
13
|
from ..exceptions import UsageError
|
|
14
14
|
from ..models import APIOperation, Case, CheckFunction
|
|
15
|
+
from .runner import StatefulTestRunner, StatefulTestRunnerConfig
|
|
16
|
+
from .sink import StateMachineSink
|
|
17
|
+
from .statistic import TransitionStats
|
|
15
18
|
|
|
16
19
|
if TYPE_CHECKING:
|
|
17
20
|
import hypothesis
|
|
@@ -30,7 +33,7 @@ class StepResult:
|
|
|
30
33
|
elapsed: float
|
|
31
34
|
|
|
32
35
|
|
|
33
|
-
def
|
|
36
|
+
def _normalize_name(name: str) -> str:
|
|
34
37
|
return re.sub(r"\W|^(?=\d)", "_", name).replace("__", "_")
|
|
35
38
|
|
|
36
39
|
|
|
@@ -45,6 +48,8 @@ class APIStateMachine(RuleBasedStateMachine):
|
|
|
45
48
|
# attribute will be renamed in the future
|
|
46
49
|
bundles: ClassVar[dict[str, CaseInsensitiveDict]] # type: ignore
|
|
47
50
|
schema: BaseSchema
|
|
51
|
+
# A template for transition statistics that can be filled with data from the state machine during its execution
|
|
52
|
+
_transition_stats_template: ClassVar[TransitionStats]
|
|
48
53
|
|
|
49
54
|
def __init__(self) -> None:
|
|
50
55
|
try:
|
|
@@ -60,18 +65,26 @@ class APIStateMachine(RuleBasedStateMachine):
|
|
|
60
65
|
# State machines suppose to be reproducible, hence it is OK to get kwargs here
|
|
61
66
|
kwargs = self.get_call_kwargs(value)
|
|
62
67
|
return _print_case(value, kwargs)
|
|
63
|
-
if isinstance(value, tuple) and len(value) == 2:
|
|
64
|
-
result, direction = value
|
|
65
|
-
wrapper = _DirectionWrapper(direction)
|
|
66
|
-
return super()._pretty_print((result, wrapper)) # type: ignore
|
|
67
68
|
return super()._pretty_print(value) # type: ignore
|
|
68
69
|
|
|
69
70
|
if HYPOTHESIS_HAS_STATEFUL_NAMING_IMPROVEMENTS:
|
|
70
71
|
|
|
71
72
|
def _new_name(self, target: str) -> str:
|
|
72
|
-
target =
|
|
73
|
+
target = _normalize_name(target)
|
|
73
74
|
return super()._new_name(target) # type: ignore
|
|
74
75
|
|
|
76
|
+
def _get_target_for_result(self, result: StepResult) -> str | None:
|
|
77
|
+
raise NotImplementedError
|
|
78
|
+
|
|
79
|
+
def _add_result_to_targets(self, targets: tuple[str, ...], result: StepResult) -> None:
|
|
80
|
+
target = self._get_target_for_result(result)
|
|
81
|
+
if target is not None:
|
|
82
|
+
super()._add_result_to_targets((target,), result)
|
|
83
|
+
|
|
84
|
+
@classmethod
|
|
85
|
+
def format_rules(cls) -> str:
|
|
86
|
+
raise NotImplementedError
|
|
87
|
+
|
|
75
88
|
@classmethod
|
|
76
89
|
def run(cls, *, settings: hypothesis.settings | None = None) -> None:
|
|
77
90
|
"""Run state machine as a test."""
|
|
@@ -79,6 +92,18 @@ class APIStateMachine(RuleBasedStateMachine):
|
|
|
79
92
|
|
|
80
93
|
return run_state_machine_as_test(cls, settings=settings)
|
|
81
94
|
|
|
95
|
+
@classmethod
|
|
96
|
+
def runner(cls, *, config: StatefulTestRunnerConfig | None = None) -> StatefulTestRunner:
|
|
97
|
+
"""Create a runner for this state machine."""
|
|
98
|
+
from .runner import StatefulTestRunnerConfig
|
|
99
|
+
|
|
100
|
+
return StatefulTestRunner(cls, config=config or StatefulTestRunnerConfig())
|
|
101
|
+
|
|
102
|
+
@classmethod
|
|
103
|
+
def sink(cls) -> StateMachineSink:
|
|
104
|
+
"""Create a sink to collect events into."""
|
|
105
|
+
return StateMachineSink(transitions=cls._transition_stats_template.copy())
|
|
106
|
+
|
|
82
107
|
def setup(self) -> None:
|
|
83
108
|
"""Hook method that runs unconditionally in the beginning of each test scenario.
|
|
84
109
|
|
|
@@ -94,12 +119,14 @@ class APIStateMachine(RuleBasedStateMachine):
|
|
|
94
119
|
def transform(self, result: StepResult, direction: Direction, case: Case) -> Case:
|
|
95
120
|
raise NotImplementedError
|
|
96
121
|
|
|
97
|
-
def _step(self, case: Case, previous:
|
|
122
|
+
def _step(self, case: Case, previous: StepResult | None = None, link: Direction | None = None) -> StepResult:
|
|
98
123
|
# This method is a proxy that is used under the hood during the state machine initialization.
|
|
99
124
|
# The whole point of having it is to make it possible to override `step`; otherwise, custom "step" is ignored.
|
|
100
125
|
# It happens because, at the point of initialization, the final class is not yet created.
|
|
101
126
|
__tracebackhide__ = True
|
|
102
|
-
|
|
127
|
+
if previous is not None and link is not None:
|
|
128
|
+
return self.step(case, (previous, link))
|
|
129
|
+
return self.step(case, None)
|
|
103
130
|
|
|
104
131
|
def step(self, case: Case, previous: tuple[StepResult, Direction] | None = None) -> StepResult:
|
|
105
132
|
"""A single state machine step.
|
|
@@ -110,6 +137,8 @@ class APIStateMachine(RuleBasedStateMachine):
|
|
|
110
137
|
Schemathesis prepares data, makes a call and validates the received response.
|
|
111
138
|
It is the most high-level point to extend the testing process. You probably don't need it in most cases.
|
|
112
139
|
"""
|
|
140
|
+
from ..specs.openapi.checks import use_after_free
|
|
141
|
+
|
|
113
142
|
__tracebackhide__ = True
|
|
114
143
|
if previous is not None:
|
|
115
144
|
result, direction = previous
|
|
@@ -120,7 +149,7 @@ class APIStateMachine(RuleBasedStateMachine):
|
|
|
120
149
|
response = self.call(case, **kwargs)
|
|
121
150
|
elapsed = time.monotonic() - start
|
|
122
151
|
self.after_call(response, case)
|
|
123
|
-
self.validate_response(response, case)
|
|
152
|
+
self.validate_response(response, case, additional_checks=(use_after_free,))
|
|
124
153
|
return self.store_result(response, case, elapsed)
|
|
125
154
|
|
|
126
155
|
def before_call(self, case: Case) -> None:
|
|
@@ -274,15 +303,3 @@ class Direction:
|
|
|
274
303
|
|
|
275
304
|
def set_data(self, case: Case, elapsed: float, **kwargs: Any) -> None:
|
|
276
305
|
raise NotImplementedError
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
@dataclass(repr=False)
|
|
280
|
-
class _DirectionWrapper:
|
|
281
|
-
"""Purely to avoid modification of `Direction.__repr__`."""
|
|
282
|
-
|
|
283
|
-
direction: Direction
|
|
284
|
-
|
|
285
|
-
def __repr__(self) -> str:
|
|
286
|
-
path = self.direction.operation.path
|
|
287
|
-
method = self.direction.operation.method.upper()
|
|
288
|
-
return f"state.schema['{path}']['{method}'].links['{self.direction.status_code}']['{self.direction.name}']"
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
|
|
5
|
+
from . import events
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class TransitionStats:
|
|
10
|
+
"""Statistic for transitions in a state machine."""
|
|
11
|
+
|
|
12
|
+
def consume(self, event: events.StatefulEvent) -> None:
|
|
13
|
+
raise NotImplementedError
|
|
14
|
+
|
|
15
|
+
def copy(self) -> TransitionStats:
|
|
16
|
+
"""Create a copy of the statistic."""
|
|
17
|
+
raise NotImplementedError
|
|
18
|
+
|
|
19
|
+
def to_formatted_table(self, width: int) -> str:
|
|
20
|
+
raise NotImplementedError
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
from ..exceptions import CheckFailed, get_grouped_exception
|
|
6
|
+
from .context import RunnerContext
|
|
7
|
+
|
|
8
|
+
if TYPE_CHECKING:
|
|
9
|
+
from ..failures import FailureContext
|
|
10
|
+
from ..models import Case, CheckFunction
|
|
11
|
+
from ..transports.responses import GenericResponse
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def validate_response(
|
|
15
|
+
response: GenericResponse,
|
|
16
|
+
case: Case,
|
|
17
|
+
failures: RunnerContext,
|
|
18
|
+
checks: tuple[CheckFunction, ...],
|
|
19
|
+
additional_checks: tuple[CheckFunction, ...] = (),
|
|
20
|
+
) -> None:
|
|
21
|
+
"""Validate the response against the provided checks."""
|
|
22
|
+
from .._compat import MultipleFailures
|
|
23
|
+
from ..models import Check, Status
|
|
24
|
+
|
|
25
|
+
exceptions: list[CheckFailed | AssertionError] = []
|
|
26
|
+
|
|
27
|
+
def _on_failure(exc: CheckFailed | AssertionError, message: str, context: FailureContext | None) -> None:
|
|
28
|
+
exceptions.append(exc)
|
|
29
|
+
if failures.is_seen_in_suite(exc):
|
|
30
|
+
return
|
|
31
|
+
failures.add_failed_check(
|
|
32
|
+
Check(
|
|
33
|
+
name=name,
|
|
34
|
+
value=Status.failure,
|
|
35
|
+
response=response,
|
|
36
|
+
elapsed=response.elapsed.total_seconds(),
|
|
37
|
+
example=copied_case,
|
|
38
|
+
message=message,
|
|
39
|
+
context=context,
|
|
40
|
+
request=None,
|
|
41
|
+
)
|
|
42
|
+
)
|
|
43
|
+
failures.mark_as_seen_in_suite(exc)
|
|
44
|
+
|
|
45
|
+
for check in checks + additional_checks:
|
|
46
|
+
name = check.__name__
|
|
47
|
+
copied_case = case.partial_deepcopy()
|
|
48
|
+
try:
|
|
49
|
+
check(response, copied_case)
|
|
50
|
+
except CheckFailed as exc:
|
|
51
|
+
if failures.is_seen_in_run(exc):
|
|
52
|
+
continue
|
|
53
|
+
_on_failure(exc, str(exc), exc.context)
|
|
54
|
+
except AssertionError as exc:
|
|
55
|
+
if failures.is_seen_in_run(exc):
|
|
56
|
+
continue
|
|
57
|
+
_on_failure(exc, str(exc) or f"Custom check failed: `{name}`", None)
|
|
58
|
+
except MultipleFailures as exc:
|
|
59
|
+
for subexc in exc.exceptions:
|
|
60
|
+
if failures.is_seen_in_run(subexc):
|
|
61
|
+
continue
|
|
62
|
+
_on_failure(subexc, str(subexc), subexc.context)
|
|
63
|
+
|
|
64
|
+
# Raise a grouped exception so Hypothesis can properly deduplicate it against the other failures
|
|
65
|
+
if exceptions:
|
|
66
|
+
raise get_grouped_exception(case.operation.verbose_name, *exceptions)(causes=tuple(exceptions))
|
schemathesis/targets.py
CHANGED
schemathesis/throttling.py
CHANGED
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
|
+
|
|
2
3
|
from typing import TYPE_CHECKING
|
|
3
4
|
|
|
5
|
+
from ._dependency_versions import IS_PYRATE_LIMITER_ABOVE_3
|
|
4
6
|
from .exceptions import UsageError
|
|
5
7
|
|
|
6
|
-
|
|
7
8
|
if TYPE_CHECKING:
|
|
8
|
-
from pyrate_limiter import Limiter
|
|
9
|
+
from pyrate_limiter import Duration, Limiter
|
|
9
10
|
|
|
10
11
|
|
|
11
12
|
def parse_units(rate: str) -> tuple[int, int]:
|
|
@@ -33,9 +34,28 @@ def invalid_rate(value: str) -> UsageError:
|
|
|
33
34
|
)
|
|
34
35
|
|
|
35
36
|
|
|
37
|
+
def _get_max_delay(value: int, unit: Duration) -> int:
|
|
38
|
+
from pyrate_limiter import Duration
|
|
39
|
+
|
|
40
|
+
if unit == Duration.SECOND:
|
|
41
|
+
multiplier = 1
|
|
42
|
+
elif unit == Duration.MINUTE:
|
|
43
|
+
multiplier = 60
|
|
44
|
+
elif unit == Duration.HOUR:
|
|
45
|
+
multiplier = 60 * 60
|
|
46
|
+
else:
|
|
47
|
+
multiplier = 60 * 60 * 24
|
|
48
|
+
# Delay is in milliseconds + `pyrate_limiter` adds 50ms on top.
|
|
49
|
+
# Hence adding 100 covers this
|
|
50
|
+
return value * multiplier * 1000 + 100
|
|
51
|
+
|
|
52
|
+
|
|
36
53
|
def build_limiter(rate: str) -> Limiter:
|
|
37
54
|
from ._rate_limiter import Limiter, Rate
|
|
38
55
|
|
|
39
56
|
limit, interval = parse_units(rate)
|
|
40
57
|
rate = Rate(limit, interval)
|
|
41
|
-
|
|
58
|
+
kwargs = {}
|
|
59
|
+
if IS_PYRATE_LIMITER_ABOVE_3:
|
|
60
|
+
kwargs["max_delay"] = _get_max_delay(limit, interval)
|
|
61
|
+
return Limiter(rate, **kwargs)
|
|
@@ -2,16 +2,16 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import base64
|
|
4
4
|
import time
|
|
5
|
-
from inspect import iscoroutinefunction
|
|
6
5
|
from contextlib import contextmanager
|
|
7
6
|
from dataclasses import dataclass
|
|
8
7
|
from datetime import timedelta
|
|
9
|
-
from
|
|
8
|
+
from inspect import iscoroutinefunction
|
|
9
|
+
from typing import TYPE_CHECKING, Any, Generator, Optional, Protocol, TypeVar, cast
|
|
10
10
|
from urllib.parse import urlparse
|
|
11
11
|
|
|
12
12
|
from .. import failures
|
|
13
13
|
from .._dependency_versions import IS_WERKZEUG_ABOVE_3
|
|
14
|
-
from ..constants import DEFAULT_RESPONSE_TIMEOUT
|
|
14
|
+
from ..constants import DEFAULT_RESPONSE_TIMEOUT, NOT_SET
|
|
15
15
|
from ..exceptions import get_timeout_error
|
|
16
16
|
from ..serializers import SerializerContext
|
|
17
17
|
from ..types import Cookies, NotSet
|
|
@@ -88,12 +88,17 @@ class RequestsTransport:
|
|
|
88
88
|
cookies: dict[str, Any] | None = None,
|
|
89
89
|
) -> dict[str, Any]:
|
|
90
90
|
final_headers = case._get_headers(headers)
|
|
91
|
-
|
|
91
|
+
media_type: Optional[str]
|
|
92
|
+
if case.body is not NOT_SET and case.media_type is None:
|
|
93
|
+
media_type = case.operation._get_default_media_type()
|
|
94
|
+
else:
|
|
95
|
+
media_type = case.media_type
|
|
96
|
+
if media_type and media_type != "multipart/form-data" and not isinstance(case.body, NotSet):
|
|
92
97
|
# `requests` will handle multipart form headers with the proper `boundary` value.
|
|
93
98
|
if "content-type" not in final_headers:
|
|
94
|
-
final_headers["Content-Type"] =
|
|
99
|
+
final_headers["Content-Type"] = media_type
|
|
95
100
|
url = case._get_url(base_url)
|
|
96
|
-
serializer = case._get_serializer()
|
|
101
|
+
serializer = case._get_serializer(media_type)
|
|
97
102
|
if serializer is not None and not isinstance(case.body, NotSet):
|
|
98
103
|
context = SerializerContext(case=case)
|
|
99
104
|
extra = serializer.as_requests(context, case._get_body())
|
|
@@ -169,7 +174,7 @@ class RequestsTransport:
|
|
|
169
174
|
timeout = 1000 * data["timeout"] # It is defined and not empty, since the exception happened
|
|
170
175
|
code_message = case._get_code_message(case.operation.schema.code_sample_style, request, verify=verify)
|
|
171
176
|
message = f"The server failed to respond within the specified limit of {timeout:.2f}ms"
|
|
172
|
-
raise get_timeout_error(timeout)(
|
|
177
|
+
raise get_timeout_error(case.operation.verbose_name, timeout)(
|
|
173
178
|
f"\n\n1. {failures.RequestTimeout.title}\n\n{message}\n\n{code_message}",
|
|
174
179
|
context=failures.RequestTimeout(message=message, timeout=timeout),
|
|
175
180
|
) from None
|
|
@@ -186,6 +191,14 @@ def _merge_dict_to(data: dict[str, Any], data_key: str, new: dict[str, Any]) ->
|
|
|
186
191
|
data[data_key] = original
|
|
187
192
|
|
|
188
193
|
|
|
194
|
+
def prepare_timeout(timeout: int | None) -> float | None:
|
|
195
|
+
"""Request timeout is in milliseconds, but `requests` uses seconds."""
|
|
196
|
+
output: int | float | None = timeout
|
|
197
|
+
if timeout is not None:
|
|
198
|
+
output = timeout / 1000
|
|
199
|
+
return output
|
|
200
|
+
|
|
201
|
+
|
|
189
202
|
def validate_vanilla_requests_kwargs(data: dict[str, Any]) -> None:
|
|
190
203
|
"""Check arguments for `requests.Session.request`.
|
|
191
204
|
|
|
@@ -240,11 +253,16 @@ class WSGITransport:
|
|
|
240
253
|
cookies: dict[str, Any] | None = None,
|
|
241
254
|
) -> dict[str, Any]:
|
|
242
255
|
final_headers = case._get_headers(headers)
|
|
243
|
-
|
|
256
|
+
media_type: Optional[str]
|
|
257
|
+
if case.body is not NOT_SET and case.media_type is None:
|
|
258
|
+
media_type = case.operation._get_default_media_type()
|
|
259
|
+
else:
|
|
260
|
+
media_type = case.media_type
|
|
261
|
+
if media_type and not isinstance(case.body, NotSet):
|
|
244
262
|
# If we need to send a payload, then the Content-Type header should be set
|
|
245
|
-
final_headers["Content-Type"] =
|
|
263
|
+
final_headers["Content-Type"] = media_type
|
|
246
264
|
extra: dict[str, Any]
|
|
247
|
-
serializer = case._get_serializer()
|
|
265
|
+
serializer = case._get_serializer(media_type)
|
|
248
266
|
if serializer is not None and not isinstance(case.body, NotSet):
|
|
249
267
|
context = SerializerContext(case=case)
|
|
250
268
|
extra = serializer.as_werkzeug(context, case._get_body())
|
schemathesis/transports/auth.py
CHANGED