schemathesis 3.29.2__py3-none-any.whl → 3.30.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (125) hide show
  1. schemathesis/__init__.py +3 -3
  2. schemathesis/_compat.py +2 -2
  3. schemathesis/_dependency_versions.py +1 -3
  4. schemathesis/_hypothesis.py +6 -0
  5. schemathesis/_lazy_import.py +1 -0
  6. schemathesis/_override.py +1 -0
  7. schemathesis/_rate_limiter.py +2 -1
  8. schemathesis/_xml.py +1 -0
  9. schemathesis/auths.py +4 -2
  10. schemathesis/checks.py +8 -5
  11. schemathesis/cli/__init__.py +28 -1
  12. schemathesis/cli/callbacks.py +3 -4
  13. schemathesis/cli/cassettes.py +6 -4
  14. schemathesis/cli/constants.py +2 -0
  15. schemathesis/cli/context.py +5 -0
  16. schemathesis/cli/debug.py +2 -1
  17. schemathesis/cli/handlers.py +1 -1
  18. schemathesis/cli/junitxml.py +5 -4
  19. schemathesis/cli/options.py +1 -0
  20. schemathesis/cli/output/default.py +56 -24
  21. schemathesis/cli/output/short.py +21 -10
  22. schemathesis/cli/sanitization.py +1 -0
  23. schemathesis/code_samples.py +1 -0
  24. schemathesis/constants.py +1 -0
  25. schemathesis/contrib/openapi/__init__.py +1 -1
  26. schemathesis/contrib/openapi/fill_missing_examples.py +2 -0
  27. schemathesis/contrib/openapi/formats/uuid.py +2 -1
  28. schemathesis/contrib/unique_data.py +2 -1
  29. schemathesis/exceptions.py +42 -61
  30. schemathesis/experimental/__init__.py +14 -0
  31. schemathesis/extra/_aiohttp.py +1 -0
  32. schemathesis/extra/_server.py +1 -0
  33. schemathesis/extra/pytest_plugin.py +13 -24
  34. schemathesis/failures.py +42 -8
  35. schemathesis/filters.py +2 -1
  36. schemathesis/fixups/__init__.py +1 -0
  37. schemathesis/fixups/fast_api.py +2 -2
  38. schemathesis/fixups/utf8_bom.py +1 -2
  39. schemathesis/generation/__init__.py +2 -1
  40. schemathesis/hooks.py +3 -1
  41. schemathesis/internal/copy.py +19 -3
  42. schemathesis/internal/deprecation.py +1 -1
  43. schemathesis/internal/jsonschema.py +2 -1
  44. schemathesis/internal/output.py +68 -0
  45. schemathesis/internal/result.py +1 -1
  46. schemathesis/internal/transformation.py +1 -0
  47. schemathesis/lazy.py +11 -2
  48. schemathesis/loaders.py +4 -2
  49. schemathesis/models.py +22 -7
  50. schemathesis/parameters.py +1 -0
  51. schemathesis/runner/__init__.py +1 -1
  52. schemathesis/runner/events.py +22 -4
  53. schemathesis/runner/impl/core.py +69 -33
  54. schemathesis/runner/impl/solo.py +2 -1
  55. schemathesis/runner/impl/threadpool.py +4 -0
  56. schemathesis/runner/probes.py +1 -1
  57. schemathesis/runner/serialization.py +1 -1
  58. schemathesis/sanitization.py +2 -0
  59. schemathesis/schemas.py +7 -4
  60. schemathesis/service/ci.py +1 -0
  61. schemathesis/service/client.py +7 -7
  62. schemathesis/service/events.py +2 -1
  63. schemathesis/service/extensions.py +5 -5
  64. schemathesis/service/hosts.py +1 -0
  65. schemathesis/service/metadata.py +2 -1
  66. schemathesis/service/models.py +2 -1
  67. schemathesis/service/report.py +3 -3
  68. schemathesis/service/serialization.py +62 -23
  69. schemathesis/service/usage.py +1 -0
  70. schemathesis/specs/graphql/_cache.py +1 -1
  71. schemathesis/specs/graphql/loaders.py +17 -1
  72. schemathesis/specs/graphql/nodes.py +1 -0
  73. schemathesis/specs/graphql/scalars.py +2 -2
  74. schemathesis/specs/graphql/schemas.py +7 -7
  75. schemathesis/specs/graphql/validation.py +1 -2
  76. schemathesis/specs/openapi/_hypothesis.py +17 -11
  77. schemathesis/specs/openapi/checks.py +102 -9
  78. schemathesis/specs/openapi/converter.py +2 -1
  79. schemathesis/specs/openapi/definitions.py +2 -1
  80. schemathesis/specs/openapi/examples.py +7 -9
  81. schemathesis/specs/openapi/expressions/__init__.py +29 -2
  82. schemathesis/specs/openapi/expressions/context.py +1 -1
  83. schemathesis/specs/openapi/expressions/extractors.py +23 -0
  84. schemathesis/specs/openapi/expressions/lexer.py +19 -18
  85. schemathesis/specs/openapi/expressions/nodes.py +24 -4
  86. schemathesis/specs/openapi/expressions/parser.py +26 -5
  87. schemathesis/specs/openapi/filters.py +1 -0
  88. schemathesis/specs/openapi/links.py +35 -7
  89. schemathesis/specs/openapi/loaders.py +31 -11
  90. schemathesis/specs/openapi/negative/__init__.py +2 -1
  91. schemathesis/specs/openapi/negative/mutations.py +1 -0
  92. schemathesis/specs/openapi/parameters.py +1 -0
  93. schemathesis/specs/openapi/schemas.py +28 -39
  94. schemathesis/specs/openapi/security.py +1 -0
  95. schemathesis/specs/openapi/serialization.py +1 -0
  96. schemathesis/specs/openapi/stateful/__init__.py +159 -70
  97. schemathesis/specs/openapi/stateful/statistic.py +198 -0
  98. schemathesis/specs/openapi/stateful/types.py +13 -0
  99. schemathesis/specs/openapi/utils.py +1 -0
  100. schemathesis/specs/openapi/validation.py +1 -0
  101. schemathesis/stateful/__init__.py +4 -2
  102. schemathesis/stateful/config.py +66 -0
  103. schemathesis/stateful/context.py +103 -0
  104. schemathesis/stateful/events.py +215 -0
  105. schemathesis/stateful/runner.py +238 -0
  106. schemathesis/stateful/sink.py +68 -0
  107. schemathesis/stateful/state_machine.py +39 -22
  108. schemathesis/stateful/statistic.py +20 -0
  109. schemathesis/stateful/validation.py +66 -0
  110. schemathesis/targets.py +1 -0
  111. schemathesis/throttling.py +23 -3
  112. schemathesis/transports/__init__.py +28 -10
  113. schemathesis/transports/auth.py +1 -0
  114. schemathesis/transports/content_types.py +1 -1
  115. schemathesis/transports/headers.py +2 -1
  116. schemathesis/transports/responses.py +6 -4
  117. schemathesis/types.py +1 -0
  118. schemathesis/utils.py +1 -0
  119. {schemathesis-3.29.2.dist-info → schemathesis-3.30.1.dist-info}/METADATA +3 -3
  120. schemathesis-3.30.1.dist-info/RECORD +151 -0
  121. schemathesis/specs/openapi/stateful/links.py +0 -92
  122. schemathesis-3.29.2.dist-info/RECORD +0 -141
  123. {schemathesis-3.29.2.dist-info → schemathesis-3.30.1.dist-info}/WHEEL +0 -0
  124. {schemathesis-3.29.2.dist-info → schemathesis-3.30.1.dist-info}/entry_points.txt +0 -0
  125. {schemathesis-3.29.2.dist-info → schemathesis-3.30.1.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,238 @@
1
+ from __future__ import annotations
2
+
3
+ import queue
4
+ import threading
5
+ from contextlib import contextmanager
6
+ from dataclasses import dataclass, field
7
+ from typing import TYPE_CHECKING, Any, Generator, Iterator, Type, cast
8
+
9
+ from hypothesis.control import current_build_context
10
+ from hypothesis.errors import Flaky
11
+
12
+ from ..exceptions import CheckFailed
13
+ from . import events
14
+ from .config import StatefulTestRunnerConfig
15
+ from .context import RunnerContext
16
+ from .validation import validate_response
17
+
18
+ if TYPE_CHECKING:
19
+ from ..models import Case, CheckFunction
20
+ from ..transports.responses import GenericResponse
21
+ from .state_machine import APIStateMachine, Direction, StepResult
22
+
23
+ EVENT_QUEUE_TIMEOUT = 0.01
24
+
25
+
26
+ @dataclass
27
+ class StatefulTestRunner:
28
+ """Stateful test runner for the given state machine.
29
+
30
+ By default, the test runner executes the state machine in a loop until there are no new failures are found.
31
+ The loop is executed in a separate thread for better control over the execution and reporting.
32
+ """
33
+
34
+ # State machine class to use
35
+ state_machine: Type[APIStateMachine]
36
+ # Test runner configuration that defines the runtime behavior
37
+ config: StatefulTestRunnerConfig = field(default_factory=StatefulTestRunnerConfig)
38
+ # Event to stop the execution
39
+ stop_event: threading.Event = field(default_factory=threading.Event)
40
+ # Queue to communicate with the state machine execution
41
+ event_queue: queue.Queue = field(default_factory=queue.Queue)
42
+
43
+ def execute(self) -> Iterator[events.StatefulEvent]:
44
+ """Execute a test run for a state machine."""
45
+ self.stop_event.clear()
46
+
47
+ yield events.RunStarted(state_machine=self.state_machine)
48
+
49
+ runner_thread = threading.Thread(
50
+ target=_execute_state_machine_loop,
51
+ kwargs={
52
+ "state_machine": self.state_machine,
53
+ "event_queue": self.event_queue,
54
+ "config": self.config,
55
+ "stop_event": self.stop_event,
56
+ },
57
+ )
58
+ run_status = events.RunStatus.SUCCESS
59
+
60
+ with thread_manager(runner_thread):
61
+ try:
62
+ while True:
63
+ try:
64
+ event = self.event_queue.get(timeout=EVENT_QUEUE_TIMEOUT)
65
+ # Set the run status based on the suite status
66
+ # ERROR & INTERRPUTED statuses are terminal, therefore they should not be overridden
67
+ if isinstance(event, events.SuiteFinished):
68
+ if event.status == events.SuiteStatus.FAILURE:
69
+ run_status = events.RunStatus.FAILURE
70
+ elif event.status == events.SuiteStatus.ERROR:
71
+ run_status = events.RunStatus.ERROR
72
+ elif event.status == events.SuiteStatus.INTERRUPTED:
73
+ run_status = events.RunStatus.INTERRUPTED
74
+ yield event
75
+ except queue.Empty:
76
+ if not runner_thread.is_alive():
77
+ break
78
+ except KeyboardInterrupt:
79
+ # Immediately notify the runner thread to stop, even though that the event will be set below in `finally`
80
+ self.stop()
81
+ run_status = events.RunStatus.INTERRUPTED
82
+ yield events.Interrupted()
83
+ finally:
84
+ self.stop()
85
+
86
+ yield events.RunFinished(status=run_status)
87
+
88
+ def stop(self) -> None:
89
+ """Stop the execution of the state machine."""
90
+ self.stop_event.set()
91
+
92
+
93
+ @contextmanager
94
+ def thread_manager(thread: threading.Thread) -> Generator[None, None, None]:
95
+ thread.start()
96
+ try:
97
+ yield
98
+ finally:
99
+ thread.join()
100
+
101
+
102
+ def _execute_state_machine_loop(
103
+ *,
104
+ state_machine: Type[APIStateMachine],
105
+ event_queue: queue.Queue,
106
+ config: StatefulTestRunnerConfig,
107
+ stop_event: threading.Event,
108
+ ) -> None:
109
+ """Execute the state machine testing loop."""
110
+ from hypothesis import reporting
111
+
112
+ from ..transports import RequestsTransport, prepare_timeout
113
+
114
+ ctx = RunnerContext()
115
+
116
+ call_kwargs: dict[str, Any] = {"headers": config.headers}
117
+ if isinstance(state_machine.schema.transport, RequestsTransport):
118
+ call_kwargs["timeout"] = prepare_timeout(config.request_timeout)
119
+
120
+ class InstrumentedStateMachine(state_machine): # type: ignore[valid-type,misc]
121
+ """State machine with additional hooks for emitting events."""
122
+
123
+ def setup(self) -> None:
124
+ build_ctx = current_build_context()
125
+ event_queue.put(events.ScenarioStarted(is_final=build_ctx.is_final))
126
+ super().setup()
127
+
128
+ def get_call_kwargs(self, case: Case) -> dict[str, Any]:
129
+ return call_kwargs
130
+
131
+ def step(self, case: Case, previous: tuple[StepResult, Direction] | None = None) -> StepResult:
132
+ # Checking the stop event once inside `step` is sufficient as it is called frequently
133
+ # The idea is to stop the execution as soon as possible
134
+ if stop_event.is_set():
135
+ raise KeyboardInterrupt
136
+ event_queue.put(events.StepStarted())
137
+ try:
138
+ result = super().step(case, previous)
139
+ ctx.step_succeeded()
140
+ except CheckFailed:
141
+ ctx.step_failed()
142
+ raise
143
+ except Exception:
144
+ ctx.step_errored()
145
+ raise
146
+ except KeyboardInterrupt:
147
+ ctx.step_interrupted()
148
+ raise
149
+ finally:
150
+ transition_id: events.TransitionId | None
151
+ if previous is not None:
152
+ transition = previous[1]
153
+ transition_id = events.TransitionId(
154
+ name=transition.name,
155
+ status_code=transition.status_code,
156
+ source=transition.operation.verbose_name,
157
+ )
158
+ else:
159
+ transition_id = None
160
+ response: events.ResponseData | None
161
+ if ctx.current_response is not None:
162
+ response = events.ResponseData(
163
+ status_code=ctx.current_response.status_code,
164
+ elapsed=ctx.current_response.elapsed.total_seconds(),
165
+ )
166
+ else:
167
+ response = None
168
+ status = cast(events.StepStatus, ctx.current_step_status)
169
+ event_queue.put(
170
+ events.StepFinished(
171
+ status=status,
172
+ transition_id=transition_id,
173
+ target=case.operation.verbose_name,
174
+ response=response,
175
+ )
176
+ )
177
+ return result
178
+
179
+ def validate_response(
180
+ self, response: GenericResponse, case: Case, additional_checks: tuple[CheckFunction, ...] = ()
181
+ ) -> None:
182
+ ctx.current_response = response
183
+ validate_response(response, case, ctx, config.checks, additional_checks)
184
+
185
+ def teardown(self) -> None:
186
+ build_ctx = current_build_context()
187
+ event_queue.put(
188
+ events.ScenarioFinished(
189
+ status=ctx.current_scenario_status,
190
+ is_final=build_ctx.is_final,
191
+ )
192
+ )
193
+ ctx.reset_step()
194
+ super().teardown()
195
+
196
+ while True:
197
+ # This loop is running until no new failures are found in a single iteration
198
+ event_queue.put(events.SuiteStarted())
199
+ if stop_event.is_set():
200
+ event_queue.put(events.SuiteFinished(status=events.SuiteStatus.INTERRUPTED, failures=[]))
201
+ break
202
+ suite_status = events.SuiteStatus.SUCCESS
203
+ try:
204
+ with reporting.with_reporter(lambda _: None): # type: ignore
205
+ InstrumentedStateMachine.run(settings=config.hypothesis_settings)
206
+ except KeyboardInterrupt:
207
+ # Raised in the state machine when the stop event is set or it is raised by the user's code
208
+ # that is placed in the base class of the state machine.
209
+ # Therefore, set the stop event to cover the latter case
210
+ stop_event.set()
211
+ suite_status = events.SuiteStatus.INTERRUPTED
212
+ break
213
+ except CheckFailed as exc:
214
+ # When a check fails, the state machine is stopped
215
+ # The failure is already sent to the queue by the state machine
216
+ # Here we need to either exit or re-run the state machine with this failure marked as known
217
+ suite_status = events.SuiteStatus.FAILURE
218
+ if config.exit_first:
219
+ break
220
+ ctx.mark_as_seen_in_run(exc)
221
+ continue
222
+ except Flaky:
223
+ suite_status = events.SuiteStatus.FAILURE
224
+ if config.exit_first:
225
+ break
226
+ # Mark all failures in this suite as seen to prevent them being re-discovered
227
+ ctx.mark_current_suite_as_seen_in_run()
228
+ continue
229
+ except Exception as exc:
230
+ # Any other exception is an inner error and the test run should be stopped
231
+ suite_status = events.SuiteStatus.ERROR
232
+ event_queue.put(events.Errored(exception=exc))
233
+ break
234
+ finally:
235
+ event_queue.put(events.SuiteFinished(status=suite_status, failures=ctx.failures_for_suite))
236
+ ctx.reset()
237
+ # Exit on the first successful state machine execution
238
+ break
@@ -0,0 +1,68 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass, field
4
+ from typing import TYPE_CHECKING
5
+
6
+ from . import events
7
+ from .statistic import TransitionStats
8
+
9
+ if TYPE_CHECKING:
10
+ from ..models import Check
11
+
12
+
13
+ @dataclass
14
+ class AverageResponseTime:
15
+ """Average response time for a given status code.
16
+
17
+ Stored as a sum of all response times and a count of responses.
18
+ """
19
+
20
+ total: float
21
+ count: int
22
+
23
+ __slots__ = ("total", "count")
24
+
25
+ def __init__(self) -> None:
26
+ self.total = 0.0
27
+ self.count = 0
28
+
29
+
30
+ @dataclass
31
+ class StateMachineSink:
32
+ """Collects events and stores data about the state machine execution."""
33
+
34
+ transitions: TransitionStats
35
+ response_times: dict[str, dict[int, AverageResponseTime]] = field(default_factory=dict)
36
+ steps: dict[events.StepStatus, int] = field(default_factory=lambda: {status: 0 for status in events.StepStatus})
37
+ scenarios: dict[events.ScenarioStatus, int] = field(
38
+ default_factory=lambda: {status: 0 for status in events.ScenarioStatus}
39
+ )
40
+ suites: dict[events.SuiteStatus, int] = field(default_factory=lambda: {status: 0 for status in events.SuiteStatus})
41
+ failures: list[Check] = field(default_factory=list)
42
+ start_time: float | None = None
43
+ end_time: float | None = None
44
+
45
+ def consume(self, event: events.StatefulEvent) -> None:
46
+ self.transitions.consume(event)
47
+ if isinstance(event, events.RunStarted):
48
+ self.start_time = event.timestamp
49
+ elif isinstance(event, events.StepFinished):
50
+ self.steps[event.status] += 1
51
+ responses = self.response_times.setdefault(event.target, {})
52
+ if event.response is not None:
53
+ average = responses.setdefault(event.response.status_code, AverageResponseTime())
54
+ average.total += event.response.elapsed
55
+ average.count += 1
56
+ elif isinstance(event, events.ScenarioFinished):
57
+ self.scenarios[event.status] += 1
58
+ elif isinstance(event, events.SuiteFinished):
59
+ self.suites[event.status] += 1
60
+ self.failures.extend(event.failures)
61
+ elif isinstance(event, events.RunFinished):
62
+ self.end_time = event.timestamp
63
+
64
+ @property
65
+ def duration(self) -> float | None:
66
+ if self.start_time is not None and self.end_time is not None:
67
+ return self.end_time - self.start_time
68
+ return None
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
- import time
4
3
  import re
4
+ import time
5
5
  from dataclasses import dataclass
6
6
  from typing import TYPE_CHECKING, Any, ClassVar
7
7
 
@@ -12,6 +12,9 @@ from .._dependency_versions import HYPOTHESIS_HAS_STATEFUL_NAMING_IMPROVEMENTS
12
12
  from ..constants import NO_LINKS_ERROR_MESSAGE, NOT_SET
13
13
  from ..exceptions import UsageError
14
14
  from ..models import APIOperation, Case, CheckFunction
15
+ from .runner import StatefulTestRunner, StatefulTestRunnerConfig
16
+ from .sink import StateMachineSink
17
+ from .statistic import TransitionStats
15
18
 
16
19
  if TYPE_CHECKING:
17
20
  import hypothesis
@@ -30,7 +33,7 @@ class StepResult:
30
33
  elapsed: float
31
34
 
32
35
 
33
- def _operation_name_to_identifier(name: str) -> str:
36
+ def _normalize_name(name: str) -> str:
34
37
  return re.sub(r"\W|^(?=\d)", "_", name).replace("__", "_")
35
38
 
36
39
 
@@ -45,6 +48,8 @@ class APIStateMachine(RuleBasedStateMachine):
45
48
  # attribute will be renamed in the future
46
49
  bundles: ClassVar[dict[str, CaseInsensitiveDict]] # type: ignore
47
50
  schema: BaseSchema
51
+ # A template for transition statistics that can be filled with data from the state machine during its execution
52
+ _transition_stats_template: ClassVar[TransitionStats]
48
53
 
49
54
  def __init__(self) -> None:
50
55
  try:
@@ -60,18 +65,26 @@ class APIStateMachine(RuleBasedStateMachine):
60
65
  # State machines suppose to be reproducible, hence it is OK to get kwargs here
61
66
  kwargs = self.get_call_kwargs(value)
62
67
  return _print_case(value, kwargs)
63
- if isinstance(value, tuple) and len(value) == 2:
64
- result, direction = value
65
- wrapper = _DirectionWrapper(direction)
66
- return super()._pretty_print((result, wrapper)) # type: ignore
67
68
  return super()._pretty_print(value) # type: ignore
68
69
 
69
70
  if HYPOTHESIS_HAS_STATEFUL_NAMING_IMPROVEMENTS:
70
71
 
71
72
  def _new_name(self, target: str) -> str:
72
- target = _operation_name_to_identifier(target)
73
+ target = _normalize_name(target)
73
74
  return super()._new_name(target) # type: ignore
74
75
 
76
+ def _get_target_for_result(self, result: StepResult) -> str | None:
77
+ raise NotImplementedError
78
+
79
+ def _add_result_to_targets(self, targets: tuple[str, ...], result: StepResult) -> None:
80
+ target = self._get_target_for_result(result)
81
+ if target is not None:
82
+ super()._add_result_to_targets((target,), result)
83
+
84
+ @classmethod
85
+ def format_rules(cls) -> str:
86
+ raise NotImplementedError
87
+
75
88
  @classmethod
76
89
  def run(cls, *, settings: hypothesis.settings | None = None) -> None:
77
90
  """Run state machine as a test."""
@@ -79,6 +92,18 @@ class APIStateMachine(RuleBasedStateMachine):
79
92
 
80
93
  return run_state_machine_as_test(cls, settings=settings)
81
94
 
95
+ @classmethod
96
+ def runner(cls, *, config: StatefulTestRunnerConfig | None = None) -> StatefulTestRunner:
97
+ """Create a runner for this state machine."""
98
+ from .runner import StatefulTestRunnerConfig
99
+
100
+ return StatefulTestRunner(cls, config=config or StatefulTestRunnerConfig())
101
+
102
+ @classmethod
103
+ def sink(cls) -> StateMachineSink:
104
+ """Create a sink to collect events into."""
105
+ return StateMachineSink(transitions=cls._transition_stats_template.copy())
106
+
82
107
  def setup(self) -> None:
83
108
  """Hook method that runs unconditionally in the beginning of each test scenario.
84
109
 
@@ -94,12 +119,14 @@ class APIStateMachine(RuleBasedStateMachine):
94
119
  def transform(self, result: StepResult, direction: Direction, case: Case) -> Case:
95
120
  raise NotImplementedError
96
121
 
97
- def _step(self, case: Case, previous: tuple[StepResult, Direction] | None = None) -> StepResult:
122
+ def _step(self, case: Case, previous: StepResult | None = None, link: Direction | None = None) -> StepResult:
98
123
  # This method is a proxy that is used under the hood during the state machine initialization.
99
124
  # The whole point of having it is to make it possible to override `step`; otherwise, custom "step" is ignored.
100
125
  # It happens because, at the point of initialization, the final class is not yet created.
101
126
  __tracebackhide__ = True
102
- return self.step(case, previous)
127
+ if previous is not None and link is not None:
128
+ return self.step(case, (previous, link))
129
+ return self.step(case, None)
103
130
 
104
131
  def step(self, case: Case, previous: tuple[StepResult, Direction] | None = None) -> StepResult:
105
132
  """A single state machine step.
@@ -110,6 +137,8 @@ class APIStateMachine(RuleBasedStateMachine):
110
137
  Schemathesis prepares data, makes a call and validates the received response.
111
138
  It is the most high-level point to extend the testing process. You probably don't need it in most cases.
112
139
  """
140
+ from ..specs.openapi.checks import use_after_free
141
+
113
142
  __tracebackhide__ = True
114
143
  if previous is not None:
115
144
  result, direction = previous
@@ -120,7 +149,7 @@ class APIStateMachine(RuleBasedStateMachine):
120
149
  response = self.call(case, **kwargs)
121
150
  elapsed = time.monotonic() - start
122
151
  self.after_call(response, case)
123
- self.validate_response(response, case)
152
+ self.validate_response(response, case, additional_checks=(use_after_free,))
124
153
  return self.store_result(response, case, elapsed)
125
154
 
126
155
  def before_call(self, case: Case) -> None:
@@ -274,15 +303,3 @@ class Direction:
274
303
 
275
304
  def set_data(self, case: Case, elapsed: float, **kwargs: Any) -> None:
276
305
  raise NotImplementedError
277
-
278
-
279
- @dataclass(repr=False)
280
- class _DirectionWrapper:
281
- """Purely to avoid modification of `Direction.__repr__`."""
282
-
283
- direction: Direction
284
-
285
- def __repr__(self) -> str:
286
- path = self.direction.operation.path
287
- method = self.direction.operation.method.upper()
288
- return f"state.schema['{path}']['{method}'].links['{self.direction.status_code}']['{self.direction.name}']"
@@ -0,0 +1,20 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass
4
+
5
+ from . import events
6
+
7
+
8
+ @dataclass
9
+ class TransitionStats:
10
+ """Statistic for transitions in a state machine."""
11
+
12
+ def consume(self, event: events.StatefulEvent) -> None:
13
+ raise NotImplementedError
14
+
15
+ def copy(self) -> TransitionStats:
16
+ """Create a copy of the statistic."""
17
+ raise NotImplementedError
18
+
19
+ def to_formatted_table(self, width: int) -> str:
20
+ raise NotImplementedError
@@ -0,0 +1,66 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING
4
+
5
+ from ..exceptions import CheckFailed, get_grouped_exception
6
+ from .context import RunnerContext
7
+
8
+ if TYPE_CHECKING:
9
+ from ..failures import FailureContext
10
+ from ..models import Case, CheckFunction
11
+ from ..transports.responses import GenericResponse
12
+
13
+
14
+ def validate_response(
15
+ response: GenericResponse,
16
+ case: Case,
17
+ failures: RunnerContext,
18
+ checks: tuple[CheckFunction, ...],
19
+ additional_checks: tuple[CheckFunction, ...] = (),
20
+ ) -> None:
21
+ """Validate the response against the provided checks."""
22
+ from .._compat import MultipleFailures
23
+ from ..models import Check, Status
24
+
25
+ exceptions: list[CheckFailed | AssertionError] = []
26
+
27
+ def _on_failure(exc: CheckFailed | AssertionError, message: str, context: FailureContext | None) -> None:
28
+ exceptions.append(exc)
29
+ if failures.is_seen_in_suite(exc):
30
+ return
31
+ failures.add_failed_check(
32
+ Check(
33
+ name=name,
34
+ value=Status.failure,
35
+ response=response,
36
+ elapsed=response.elapsed.total_seconds(),
37
+ example=copied_case,
38
+ message=message,
39
+ context=context,
40
+ request=None,
41
+ )
42
+ )
43
+ failures.mark_as_seen_in_suite(exc)
44
+
45
+ for check in checks + additional_checks:
46
+ name = check.__name__
47
+ copied_case = case.partial_deepcopy()
48
+ try:
49
+ check(response, copied_case)
50
+ except CheckFailed as exc:
51
+ if failures.is_seen_in_run(exc):
52
+ continue
53
+ _on_failure(exc, str(exc), exc.context)
54
+ except AssertionError as exc:
55
+ if failures.is_seen_in_run(exc):
56
+ continue
57
+ _on_failure(exc, str(exc) or f"Custom check failed: `{name}`", None)
58
+ except MultipleFailures as exc:
59
+ for subexc in exc.exceptions:
60
+ if failures.is_seen_in_run(subexc):
61
+ continue
62
+ _on_failure(subexc, str(subexc), subexc.context)
63
+
64
+ # Raise a grouped exception so Hypothesis can properly deduplicate it against the other failures
65
+ if exceptions:
66
+ raise get_grouped_exception(case.operation.verbose_name, *exceptions)(causes=tuple(exceptions))
schemathesis/targets.py CHANGED
@@ -1,4 +1,5 @@
1
1
  from __future__ import annotations
2
+
2
3
  from dataclasses import dataclass
3
4
  from typing import TYPE_CHECKING, Callable
4
5
 
@@ -1,11 +1,12 @@
1
1
  from __future__ import annotations
2
+
2
3
  from typing import TYPE_CHECKING
3
4
 
5
+ from ._dependency_versions import IS_PYRATE_LIMITER_ABOVE_3
4
6
  from .exceptions import UsageError
5
7
 
6
-
7
8
  if TYPE_CHECKING:
8
- from pyrate_limiter import Limiter
9
+ from pyrate_limiter import Duration, Limiter
9
10
 
10
11
 
11
12
  def parse_units(rate: str) -> tuple[int, int]:
@@ -33,9 +34,28 @@ def invalid_rate(value: str) -> UsageError:
33
34
  )
34
35
 
35
36
 
37
+ def _get_max_delay(value: int, unit: Duration) -> int:
38
+ from pyrate_limiter import Duration
39
+
40
+ if unit == Duration.SECOND:
41
+ multiplier = 1
42
+ elif unit == Duration.MINUTE:
43
+ multiplier = 60
44
+ elif unit == Duration.HOUR:
45
+ multiplier = 60 * 60
46
+ else:
47
+ multiplier = 60 * 60 * 24
48
+ # Delay is in milliseconds + `pyrate_limiter` adds 50ms on top.
49
+ # Hence adding 100 covers this
50
+ return value * multiplier * 1000 + 100
51
+
52
+
36
53
  def build_limiter(rate: str) -> Limiter:
37
54
  from ._rate_limiter import Limiter, Rate
38
55
 
39
56
  limit, interval = parse_units(rate)
40
57
  rate = Rate(limit, interval)
41
- return Limiter(rate)
58
+ kwargs = {}
59
+ if IS_PYRATE_LIMITER_ABOVE_3:
60
+ kwargs["max_delay"] = _get_max_delay(limit, interval)
61
+ return Limiter(rate, **kwargs)
@@ -2,16 +2,16 @@ from __future__ import annotations
2
2
 
3
3
  import base64
4
4
  import time
5
- from inspect import iscoroutinefunction
6
5
  from contextlib import contextmanager
7
6
  from dataclasses import dataclass
8
7
  from datetime import timedelta
9
- from typing import TYPE_CHECKING, Any, Generator, Protocol, TypeVar, cast
8
+ from inspect import iscoroutinefunction
9
+ from typing import TYPE_CHECKING, Any, Generator, Optional, Protocol, TypeVar, cast
10
10
  from urllib.parse import urlparse
11
11
 
12
12
  from .. import failures
13
13
  from .._dependency_versions import IS_WERKZEUG_ABOVE_3
14
- from ..constants import DEFAULT_RESPONSE_TIMEOUT
14
+ from ..constants import DEFAULT_RESPONSE_TIMEOUT, NOT_SET
15
15
  from ..exceptions import get_timeout_error
16
16
  from ..serializers import SerializerContext
17
17
  from ..types import Cookies, NotSet
@@ -88,12 +88,17 @@ class RequestsTransport:
88
88
  cookies: dict[str, Any] | None = None,
89
89
  ) -> dict[str, Any]:
90
90
  final_headers = case._get_headers(headers)
91
- if case.media_type and case.media_type != "multipart/form-data" and not isinstance(case.body, NotSet):
91
+ media_type: Optional[str]
92
+ if case.body is not NOT_SET and case.media_type is None:
93
+ media_type = case.operation._get_default_media_type()
94
+ else:
95
+ media_type = case.media_type
96
+ if media_type and media_type != "multipart/form-data" and not isinstance(case.body, NotSet):
92
97
  # `requests` will handle multipart form headers with the proper `boundary` value.
93
98
  if "content-type" not in final_headers:
94
- final_headers["Content-Type"] = case.media_type
99
+ final_headers["Content-Type"] = media_type
95
100
  url = case._get_url(base_url)
96
- serializer = case._get_serializer()
101
+ serializer = case._get_serializer(media_type)
97
102
  if serializer is not None and not isinstance(case.body, NotSet):
98
103
  context = SerializerContext(case=case)
99
104
  extra = serializer.as_requests(context, case._get_body())
@@ -169,7 +174,7 @@ class RequestsTransport:
169
174
  timeout = 1000 * data["timeout"] # It is defined and not empty, since the exception happened
170
175
  code_message = case._get_code_message(case.operation.schema.code_sample_style, request, verify=verify)
171
176
  message = f"The server failed to respond within the specified limit of {timeout:.2f}ms"
172
- raise get_timeout_error(timeout)(
177
+ raise get_timeout_error(case.operation.verbose_name, timeout)(
173
178
  f"\n\n1. {failures.RequestTimeout.title}\n\n{message}\n\n{code_message}",
174
179
  context=failures.RequestTimeout(message=message, timeout=timeout),
175
180
  ) from None
@@ -186,6 +191,14 @@ def _merge_dict_to(data: dict[str, Any], data_key: str, new: dict[str, Any]) ->
186
191
  data[data_key] = original
187
192
 
188
193
 
194
+ def prepare_timeout(timeout: int | None) -> float | None:
195
+ """Request timeout is in milliseconds, but `requests` uses seconds."""
196
+ output: int | float | None = timeout
197
+ if timeout is not None:
198
+ output = timeout / 1000
199
+ return output
200
+
201
+
189
202
  def validate_vanilla_requests_kwargs(data: dict[str, Any]) -> None:
190
203
  """Check arguments for `requests.Session.request`.
191
204
 
@@ -240,11 +253,16 @@ class WSGITransport:
240
253
  cookies: dict[str, Any] | None = None,
241
254
  ) -> dict[str, Any]:
242
255
  final_headers = case._get_headers(headers)
243
- if case.media_type and not isinstance(case.body, NotSet):
256
+ media_type: Optional[str]
257
+ if case.body is not NOT_SET and case.media_type is None:
258
+ media_type = case.operation._get_default_media_type()
259
+ else:
260
+ media_type = case.media_type
261
+ if media_type and not isinstance(case.body, NotSet):
244
262
  # If we need to send a payload, then the Content-Type header should be set
245
- final_headers["Content-Type"] = case.media_type
263
+ final_headers["Content-Type"] = media_type
246
264
  extra: dict[str, Any]
247
- serializer = case._get_serializer()
265
+ serializer = case._get_serializer(media_type)
248
266
  if serializer is not None and not isinstance(case.body, NotSet):
249
267
  context = SerializerContext(case=case)
250
268
  extra = serializer.as_werkzeug(context, case._get_body())
@@ -1,4 +1,5 @@
1
1
  from __future__ import annotations
2
+
2
3
  from typing import TYPE_CHECKING
3
4
 
4
5
  from ..types import RawAuth
@@ -1,5 +1,5 @@
1
1
  from functools import lru_cache
2
- from typing import Tuple, Generator
2
+ from typing import Generator, Tuple
3
3
 
4
4
 
5
5
  def _parseparam(s: str) -> Generator[str, None, None]: