schemathesis 4.0.0a10__py3-none-any.whl → 4.0.0a12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (111) hide show
  1. schemathesis/__init__.py +29 -30
  2. schemathesis/auths.py +65 -24
  3. schemathesis/checks.py +73 -39
  4. schemathesis/cli/commands/__init__.py +51 -3
  5. schemathesis/cli/commands/data.py +10 -0
  6. schemathesis/cli/commands/run/__init__.py +163 -274
  7. schemathesis/cli/commands/run/context.py +8 -4
  8. schemathesis/cli/commands/run/events.py +11 -1
  9. schemathesis/cli/commands/run/executor.py +70 -78
  10. schemathesis/cli/commands/run/filters.py +15 -165
  11. schemathesis/cli/commands/run/handlers/cassettes.py +105 -104
  12. schemathesis/cli/commands/run/handlers/junitxml.py +5 -4
  13. schemathesis/cli/commands/run/handlers/output.py +195 -121
  14. schemathesis/cli/commands/run/loaders.py +35 -50
  15. schemathesis/cli/commands/run/validation.py +52 -162
  16. schemathesis/cli/core.py +5 -3
  17. schemathesis/cli/ext/fs.py +7 -5
  18. schemathesis/cli/ext/options.py +0 -21
  19. schemathesis/config/__init__.py +189 -0
  20. schemathesis/config/_auth.py +51 -0
  21. schemathesis/config/_checks.py +268 -0
  22. schemathesis/config/_diff_base.py +99 -0
  23. schemathesis/config/_env.py +21 -0
  24. schemathesis/config/_error.py +156 -0
  25. schemathesis/config/_generation.py +149 -0
  26. schemathesis/config/_health_check.py +24 -0
  27. schemathesis/config/_operations.py +327 -0
  28. schemathesis/config/_output.py +171 -0
  29. schemathesis/config/_parameters.py +19 -0
  30. schemathesis/config/_phases.py +187 -0
  31. schemathesis/config/_projects.py +523 -0
  32. schemathesis/config/_rate_limit.py +17 -0
  33. schemathesis/config/_report.py +120 -0
  34. schemathesis/config/_validator.py +9 -0
  35. schemathesis/config/_warnings.py +25 -0
  36. schemathesis/config/schema.json +885 -0
  37. schemathesis/core/__init__.py +2 -0
  38. schemathesis/core/compat.py +16 -9
  39. schemathesis/core/errors.py +24 -4
  40. schemathesis/core/failures.py +6 -7
  41. schemathesis/core/hooks.py +20 -0
  42. schemathesis/core/output/__init__.py +14 -37
  43. schemathesis/core/output/sanitization.py +3 -146
  44. schemathesis/core/transport.py +36 -1
  45. schemathesis/core/validation.py +16 -0
  46. schemathesis/engine/__init__.py +2 -4
  47. schemathesis/engine/context.py +42 -43
  48. schemathesis/engine/core.py +7 -5
  49. schemathesis/engine/errors.py +60 -1
  50. schemathesis/engine/events.py +10 -2
  51. schemathesis/engine/phases/__init__.py +10 -0
  52. schemathesis/engine/phases/probes.py +11 -8
  53. schemathesis/engine/phases/stateful/__init__.py +2 -1
  54. schemathesis/engine/phases/stateful/_executor.py +104 -46
  55. schemathesis/engine/phases/stateful/context.py +2 -2
  56. schemathesis/engine/phases/unit/__init__.py +23 -15
  57. schemathesis/engine/phases/unit/_executor.py +110 -21
  58. schemathesis/engine/phases/unit/_pool.py +1 -1
  59. schemathesis/errors.py +2 -0
  60. schemathesis/filters.py +2 -3
  61. schemathesis/generation/__init__.py +5 -33
  62. schemathesis/generation/case.py +6 -3
  63. schemathesis/generation/coverage.py +154 -124
  64. schemathesis/generation/hypothesis/builder.py +70 -20
  65. schemathesis/generation/meta.py +3 -3
  66. schemathesis/generation/metrics.py +93 -0
  67. schemathesis/generation/modes.py +0 -8
  68. schemathesis/generation/overrides.py +37 -1
  69. schemathesis/generation/stateful/__init__.py +4 -0
  70. schemathesis/generation/stateful/state_machine.py +9 -1
  71. schemathesis/graphql/loaders.py +159 -16
  72. schemathesis/hooks.py +62 -35
  73. schemathesis/openapi/checks.py +12 -8
  74. schemathesis/openapi/generation/filters.py +10 -8
  75. schemathesis/openapi/loaders.py +142 -17
  76. schemathesis/pytest/lazy.py +2 -5
  77. schemathesis/pytest/loaders.py +24 -0
  78. schemathesis/pytest/plugin.py +33 -2
  79. schemathesis/schemas.py +21 -66
  80. schemathesis/specs/graphql/scalars.py +37 -3
  81. schemathesis/specs/graphql/schemas.py +23 -18
  82. schemathesis/specs/openapi/_hypothesis.py +26 -28
  83. schemathesis/specs/openapi/checks.py +37 -36
  84. schemathesis/specs/openapi/examples.py +4 -3
  85. schemathesis/specs/openapi/formats.py +32 -5
  86. schemathesis/specs/openapi/media_types.py +44 -1
  87. schemathesis/specs/openapi/negative/__init__.py +2 -2
  88. schemathesis/specs/openapi/patterns.py +46 -16
  89. schemathesis/specs/openapi/references.py +2 -3
  90. schemathesis/specs/openapi/schemas.py +19 -22
  91. schemathesis/specs/openapi/stateful/__init__.py +12 -6
  92. schemathesis/transport/__init__.py +54 -16
  93. schemathesis/transport/prepare.py +38 -13
  94. schemathesis/transport/requests.py +12 -9
  95. schemathesis/transport/wsgi.py +11 -12
  96. {schemathesis-4.0.0a10.dist-info → schemathesis-4.0.0a12.dist-info}/METADATA +50 -97
  97. schemathesis-4.0.0a12.dist-info/RECORD +164 -0
  98. schemathesis/cli/commands/run/checks.py +0 -79
  99. schemathesis/cli/commands/run/hypothesis.py +0 -78
  100. schemathesis/cli/commands/run/reports.py +0 -72
  101. schemathesis/cli/hooks.py +0 -36
  102. schemathesis/contrib/__init__.py +0 -9
  103. schemathesis/contrib/openapi/__init__.py +0 -9
  104. schemathesis/contrib/openapi/fill_missing_examples.py +0 -20
  105. schemathesis/engine/config.py +0 -59
  106. schemathesis/experimental/__init__.py +0 -72
  107. schemathesis/generation/targets.py +0 -69
  108. schemathesis-4.0.0a10.dist-info/RECORD +0 -153
  109. {schemathesis-4.0.0a10.dist-info → schemathesis-4.0.0a12.dist-info}/WHEEL +0 -0
  110. {schemathesis-4.0.0a10.dist-info → schemathesis-4.0.0a12.dist-info}/entry_points.txt +0 -0
  111. {schemathesis-4.0.0a10.dist-info → schemathesis-4.0.0a12.dist-info}/licenses/LICENSE +0 -0
@@ -1,16 +1,19 @@
1
- from __future__ import annotations
1
+ from __future__ import annotations # noqa: I001
2
2
 
3
3
  import queue
4
4
  import time
5
5
  import unittest
6
- from dataclasses import replace
6
+ from dataclasses import dataclass
7
7
  from typing import Any
8
8
  from warnings import catch_warnings
9
9
 
10
10
  import hypothesis
11
+ import requests
11
12
  from hypothesis.control import current_build_context
12
13
  from hypothesis.errors import Flaky, Unsatisfiable
13
14
  from hypothesis.stateful import Rule
15
+ from requests.exceptions import ChunkedEncodingError
16
+ from requests.structures import CaseInsensitiveDict
14
17
 
15
18
  from schemathesis.checks import CheckContext, CheckFunction, run_checks
16
19
  from schemathesis.core.failures import Failure, FailureGroup
@@ -18,18 +21,26 @@ from schemathesis.core.transport import Response
18
21
  from schemathesis.engine import Status, events
19
22
  from schemathesis.engine.context import EngineContext
20
23
  from schemathesis.engine.control import ExecutionControl
24
+ from schemathesis.engine.errors import (
25
+ TestingState,
26
+ UnrecoverableNetworkError,
27
+ clear_hypothesis_notes,
28
+ is_unrecoverable_network_error,
29
+ )
21
30
  from schemathesis.engine.phases import PhaseName
22
31
  from schemathesis.engine.phases.stateful.context import StatefulContext
23
32
  from schemathesis.engine.recorder import ScenarioRecorder
33
+ from schemathesis.generation import overrides
24
34
  from schemathesis.generation.case import Case
25
35
  from schemathesis.generation.hypothesis.reporting import ignore_hypothesis_output
36
+ from schemathesis.generation.stateful import STATEFUL_TESTS_LABEL
26
37
  from schemathesis.generation.stateful.state_machine import (
27
38
  DEFAULT_STATE_MACHINE_SETTINGS,
28
39
  APIStateMachine,
29
40
  StepInput,
30
41
  StepOutput,
31
42
  )
32
- from schemathesis.generation.targets import TargetMetricCollector
43
+ from schemathesis.generation.metrics import MetricCollector
33
44
 
34
45
 
35
46
  def _get_hypothesis_settings_kwargs_override(settings: hypothesis.settings) -> dict[str, Any]:
@@ -47,6 +58,17 @@ def _get_hypothesis_settings_kwargs_override(settings: hypothesis.settings) -> d
47
58
  return kwargs
48
59
 
49
60
 
61
+ @dataclass
62
+ class CachedCheckContextData:
63
+ override: Any
64
+ auth: Any
65
+ headers: Any
66
+ config: Any
67
+ transport_kwargs: Any
68
+
69
+ __slots__ = ("override", "auth", "headers", "config", "transport_kwargs")
70
+
71
+
50
72
  def execute_state_machine_loop(
51
73
  *,
52
74
  state_machine: type[APIStateMachine],
@@ -54,21 +76,16 @@ def execute_state_machine_loop(
54
76
  engine: EngineContext,
55
77
  ) -> None:
56
78
  """Execute the state machine testing loop."""
57
- kwargs = _get_hypothesis_settings_kwargs_override(engine.config.execution.hypothesis_settings)
58
- if kwargs:
59
- config = replace(
60
- engine.config,
61
- execution=replace(
62
- engine.config.execution,
63
- hypothesis_settings=hypothesis.settings(engine.config.execution.hypothesis_settings, **kwargs),
64
- ),
65
- )
66
- else:
67
- config = engine.config
79
+ configured_hypothesis_settings = engine.config.get_hypothesis_settings(phase="stateful")
80
+ kwargs = _get_hypothesis_settings_kwargs_override(configured_hypothesis_settings)
81
+ hypothesis_settings = hypothesis.settings(configured_hypothesis_settings, **kwargs)
82
+ generation = engine.config.generation_for(phase="stateful")
68
83
 
69
- ctx = StatefulContext(metric_collector=TargetMetricCollector(targets=config.execution.targets))
84
+ ctx = StatefulContext(metric_collector=MetricCollector(metrics=generation.maximize))
85
+ state = TestingState()
70
86
 
71
- transport_kwargs = engine.transport_kwargs
87
+ # Caches for validate_response to avoid repeated config lookups per operation
88
+ _check_context_cache: dict[str, CachedCheckContextData] = {}
72
89
 
73
90
  class _InstrumentedStateMachine(state_machine): # type: ignore[valid-type,misc]
74
91
  """State machine with additional hooks for emitting events."""
@@ -78,23 +95,22 @@ def execute_state_machine_loop(
78
95
  self._start_time = time.monotonic()
79
96
  self._scenario_id = scenario_started.id
80
97
  event_queue.put(scenario_started)
81
- self._check_ctx = engine.get_check_context(self.recorder)
82
98
 
83
99
  def get_call_kwargs(self, case: Case) -> dict[str, Any]:
84
- return transport_kwargs
100
+ return engine.get_transport_kwargs(operation=case.operation)
85
101
 
86
102
  def _repr_step(self, rule: Rule, data: dict, result: StepOutput) -> str:
87
103
  return ""
88
104
 
89
- if config.override is not None:
90
-
91
- def before_call(self, case: Case) -> None:
92
- for location, entry in config.override.for_operation(case.operation).items(): # type: ignore[union-attr]
93
- if entry:
94
- container = getattr(case, location) or {}
95
- container.update(entry)
96
- setattr(case, location, container)
97
- return super().before_call(case)
105
+ def before_call(self, case: Case) -> None:
106
+ override = overrides.for_operation(engine.config, operation=case.operation)
107
+ for location in ("query", "headers", "cookies", "path_parameters"):
108
+ entry = getattr(override, location)
109
+ if entry:
110
+ container = getattr(case, location) or {}
111
+ container.update(entry)
112
+ setattr(case, location, container)
113
+ return super().before_call(case)
98
114
 
99
115
  def step(self, input: StepInput) -> StepOutput | None:
100
116
  # Checking the stop event once inside `step` is sufficient as it is called frequently
@@ -102,7 +118,7 @@ def execute_state_machine_loop(
102
118
  if engine.has_to_stop:
103
119
  raise KeyboardInterrupt
104
120
  try:
105
- if config.execution.unique_inputs:
121
+ if generation.unique_inputs:
106
122
  cached = ctx.get_step_outcome(input.case)
107
123
  if isinstance(cached, BaseException):
108
124
  raise cached
@@ -111,13 +127,27 @@ def execute_state_machine_loop(
111
127
  result = super().step(input)
112
128
  ctx.step_succeeded()
113
129
  except FailureGroup as exc:
114
- if config.execution.unique_inputs:
130
+ if generation.unique_inputs:
115
131
  for failure in exc.exceptions:
116
132
  ctx.store_step_outcome(input.case, failure)
117
133
  ctx.step_failed()
118
134
  raise
119
135
  except Exception as exc:
120
- if config.execution.unique_inputs:
136
+ if isinstance(exc, (requests.ConnectionError, ChunkedEncodingError)) and is_unrecoverable_network_error(
137
+ exc
138
+ ):
139
+ transport_kwargs = engine.get_transport_kwargs(operation=input.case.operation)
140
+ if exc.request is not None:
141
+ headers = {key: value[0] for key, value in exc.request.headers.items()}
142
+ else:
143
+ headers = {**dict(input.case.headers or {}), **transport_kwargs.get("headers", {})}
144
+ verify = transport_kwargs.get("verify", True)
145
+ state.unrecoverable_network_error = UnrecoverableNetworkError(
146
+ error=exc,
147
+ code_sample=input.case.as_curl_command(headers=headers, verify=verify),
148
+ )
149
+
150
+ if generation.unique_inputs:
121
151
  ctx.store_step_outcome(input.case, exc)
122
152
  ctx.step_errored()
123
153
  raise
@@ -125,11 +155,11 @@ def execute_state_machine_loop(
125
155
  ctx.step_interrupted()
126
156
  raise
127
157
  except BaseException as exc:
128
- if config.execution.unique_inputs:
158
+ if generation.unique_inputs:
129
159
  ctx.store_step_outcome(input.case, exc)
130
160
  raise exc
131
161
  else:
132
- if config.execution.unique_inputs:
162
+ if generation.unique_inputs:
133
163
  ctx.store_step_outcome(input.case, None)
134
164
  return result
135
165
 
@@ -139,12 +169,34 @@ def execute_state_machine_loop(
139
169
  self.recorder.record_response(case_id=case.id, response=response)
140
170
  ctx.collect_metric(case, response)
141
171
  ctx.current_response = response
172
+
173
+ label = case.operation.label
174
+ cached = _check_context_cache.get(label)
175
+ if cached is None:
176
+ headers = engine.config.headers_for(operation=case.operation)
177
+ cached = CachedCheckContextData(
178
+ override=overrides.for_operation(engine.config, operation=case.operation),
179
+ auth=engine.config.auth_for(operation=case.operation),
180
+ headers=CaseInsensitiveDict(headers) if headers else None,
181
+ config=engine.config.checks_config_for(operation=case.operation, phase="stateful"),
182
+ transport_kwargs=engine.get_transport_kwargs(operation=case.operation),
183
+ )
184
+ _check_context_cache[label] = cached
185
+
186
+ check_ctx = CheckContext(
187
+ override=cached.override,
188
+ auth=cached.auth,
189
+ headers=cached.headers,
190
+ config=cached.config,
191
+ transport_kwargs=cached.transport_kwargs,
192
+ recorder=self.recorder,
193
+ )
142
194
  validate_response(
143
195
  response=response,
144
196
  case=case,
145
197
  stateful_ctx=ctx,
146
- check_ctx=self._check_ctx,
147
- checks=config.execution.checks,
198
+ check_ctx=check_ctx,
199
+ checks=check_ctx._checks,
148
200
  control=engine.control,
149
201
  recorder=self.recorder,
150
202
  additional_checks=additional_checks,
@@ -169,7 +221,7 @@ def execute_state_machine_loop(
169
221
  ctx.reset_scenario()
170
222
  super().teardown()
171
223
 
172
- seed = config.execution.seed
224
+ seed = engine.config.seed
173
225
 
174
226
  while True:
175
227
  # This loop is running until no new failures are found in a single iteration
@@ -187,16 +239,13 @@ def execute_state_machine_loop(
187
239
  )
188
240
  break
189
241
  suite_status = Status.SUCCESS
190
- if seed is not None:
191
- InstrumentedStateMachine = hypothesis.seed(seed)(_InstrumentedStateMachine)
192
- # Predictably change the seed to avoid re-running the same sequences if tests fail
193
- # yet have reproducible results
194
- seed += 1
195
- else:
196
- InstrumentedStateMachine = _InstrumentedStateMachine
242
+ InstrumentedStateMachine = hypothesis.seed(seed)(_InstrumentedStateMachine)
243
+ # Predictably change the seed to avoid re-running the same sequences if tests fail
244
+ # yet have reproducible results
245
+ seed += 1
197
246
  try:
198
247
  with catch_warnings(), ignore_hypothesis_output(): # type: ignore
199
- InstrumentedStateMachine.run(settings=config.execution.hypothesis_settings)
248
+ InstrumentedStateMachine.run(settings=hypothesis_settings)
200
249
  except KeyboardInterrupt:
201
250
  # Raised in the state machine when the stop event is set or it is raised by the user's code
202
251
  # that is placed in the base class of the state machine.
@@ -220,7 +269,7 @@ def execute_state_machine_loop(
220
269
  ctx.mark_as_seen_in_run(failure)
221
270
  continue
222
271
  except Flaky:
223
- suite_status = Status.FAILURE
272
+ # Ignore flakiness
224
273
  if engine.has_reached_the_failure_limit:
225
274
  break # type: ignore[unreachable]
226
275
  # Mark all failures in this suite as seen to prevent them being re-discovered
@@ -230,15 +279,24 @@ def execute_state_machine_loop(
230
279
  if isinstance(exc, Unsatisfiable) and ctx.completed_scenarios > 0:
231
280
  # Sometimes Hypothesis randomly gives up on generating some complex cases. However, if we know that
232
281
  # values are possible to generate based on the previous observations, we retry the generation
233
- if ctx.completed_scenarios >= config.execution.hypothesis_settings.max_examples:
282
+ if ctx.completed_scenarios >= hypothesis_settings.max_examples:
234
283
  # Avoid infinite restarts
235
284
  break
236
285
  continue
286
+ clear_hypothesis_notes(exc)
237
287
  # Any other exception is an inner error and the test run should be stopped
238
288
  suite_status = Status.ERROR
289
+ code_sample: str | None = None
290
+ if state.unrecoverable_network_error is not None:
291
+ exc = state.unrecoverable_network_error.error
292
+ code_sample = state.unrecoverable_network_error.code_sample
239
293
  event_queue.put(
240
294
  events.NonFatalError(
241
- error=exc, phase=PhaseName.STATEFUL_TESTING, label="Stateful tests", related_to_operation=False
295
+ error=exc,
296
+ phase=PhaseName.STATEFUL_TESTING,
297
+ label=STATEFUL_TESTS_LABEL,
298
+ related_to_operation=False,
299
+ code_sample=code_sample,
242
300
  )
243
301
  )
244
302
  break
@@ -7,7 +7,7 @@ from schemathesis.core.failures import Failure
7
7
  from schemathesis.core.transport import Response
8
8
  from schemathesis.engine import Status
9
9
  from schemathesis.generation.case import Case
10
- from schemathesis.generation.targets import TargetMetricCollector
10
+ from schemathesis.generation.metrics import MetricCollector
11
11
 
12
12
 
13
13
  @dataclass
@@ -27,7 +27,7 @@ class StatefulContext:
27
27
  # The total number of completed test scenario
28
28
  completed_scenarios: int = 0
29
29
  # Metrics collector for targeted testing
30
- metric_collector: TargetMetricCollector = field(default_factory=TargetMetricCollector)
30
+ metric_collector: MetricCollector = field(default_factory=MetricCollector)
31
31
  step_outcomes: dict[int, BaseException | None] = field(default_factory=dict)
32
32
 
33
33
  @property
@@ -16,6 +16,7 @@ from schemathesis.core.result import Ok
16
16
  from schemathesis.engine import Status, events
17
17
  from schemathesis.engine.phases import PhaseName, PhaseSkipReason
18
18
  from schemathesis.engine.recorder import ScenarioRecorder
19
+ from schemathesis.generation import overrides
19
20
  from schemathesis.generation.hypothesis.builder import HypothesisTestConfig, HypothesisTestMode
20
21
  from schemathesis.generation.hypothesis.reporting import ignore_hypothesis_output
21
22
 
@@ -42,7 +43,6 @@ def execute(engine: EngineContext, phase: Phase) -> events.EventGenerator:
42
43
  else:
43
44
  mode = HypothesisTestMode.FUZZING
44
45
  producer = TaskProducer(engine)
45
- workers_num = engine.config.execution.workers_num
46
46
 
47
47
  suite_started = events.SuiteStarted(phase=phase.name)
48
48
 
@@ -53,7 +53,7 @@ def execute(engine: EngineContext, phase: Phase) -> events.EventGenerator:
53
53
 
54
54
  try:
55
55
  with WorkerPool(
56
- workers_num=workers_num,
56
+ workers_num=engine.config.workers,
57
57
  producer=producer,
58
58
  worker_factory=worker_task,
59
59
  ctx=engine,
@@ -160,16 +160,24 @@ def worker_task(
160
160
 
161
161
  if isinstance(result, Ok):
162
162
  operation = result.ok()
163
- as_strategy_kwargs = get_strategy_kwargs(ctx, operation)
163
+ phases = ctx.config.phases_for(operation=operation)
164
+ # Skip tests if this phase is disabled
165
+ if (
166
+ (phase == PhaseName.EXAMPLES and not phases.examples.enabled)
167
+ or (phase == PhaseName.FUZZING and not phases.fuzzing.enabled)
168
+ or (phase == PhaseName.COVERAGE and not phases.coverage.enabled)
169
+ ):
170
+ continue
171
+ as_strategy_kwargs = get_strategy_kwargs(ctx, operation=operation)
164
172
  try:
165
173
  test_function = create_test(
166
174
  operation=operation,
167
175
  test_func=test_func,
168
176
  config=HypothesisTestConfig(
169
177
  modes=[mode],
170
- settings=ctx.config.execution.hypothesis_settings,
171
- seed=ctx.config.execution.seed,
172
- generation=ctx.config.execution.generation,
178
+ settings=ctx.config.get_hypothesis_settings(operation=operation, phase=phase.name),
179
+ seed=ctx.config.seed,
180
+ project=ctx.config,
173
181
  as_strategy_kwargs=as_strategy_kwargs,
174
182
  ),
175
183
  )
@@ -191,14 +199,14 @@ def worker_task(
191
199
  events_queue.put(events.Interrupted(phase=phase))
192
200
 
193
201
 
194
- def get_strategy_kwargs(ctx: EngineContext, operation: APIOperation) -> dict[str, Any]:
202
+ def get_strategy_kwargs(ctx: EngineContext, *, operation: APIOperation) -> dict[str, Any]:
195
203
  kwargs = {}
196
- if ctx.config.override is not None:
197
- for location, entry in ctx.config.override.for_operation(operation).items():
198
- if entry:
199
- kwargs[location] = entry
200
- if ctx.config.network.headers:
201
- kwargs["headers"] = {
202
- key: value for key, value in ctx.config.network.headers.items() if key.lower() != "user-agent"
203
- }
204
+ override = overrides.for_operation(ctx.config, operation=operation)
205
+ for location in ("query", "headers", "cookies", "path_parameters"):
206
+ entry = getattr(override, location)
207
+ if entry:
208
+ kwargs[location] = entry
209
+ headers = ctx.config.headers_for(operation=operation)
210
+ if headers:
211
+ kwargs["headers"] = {key: value for key, value in headers.items() if key.lower() != "user-agent"}
204
212
  return kwargs
@@ -3,7 +3,7 @@ from __future__ import annotations
3
3
  import time
4
4
  import unittest
5
5
  import uuid
6
- from typing import TYPE_CHECKING, Callable, Iterable
6
+ from typing import TYPE_CHECKING, Any, Callable
7
7
  from warnings import WarningMessage, catch_warnings
8
8
 
9
9
  import requests
@@ -11,8 +11,11 @@ from hypothesis.errors import InvalidArgument
11
11
  from hypothesis_jsonschema._canonicalise import HypothesisRefResolutionError
12
12
  from jsonschema.exceptions import SchemaError as JsonSchemaError
13
13
  from jsonschema.exceptions import ValidationError
14
+ from requests.exceptions import ChunkedEncodingError
15
+ from requests.structures import CaseInsensitiveDict
14
16
 
15
- from schemathesis.checks import CheckContext, CheckFunction, run_checks
17
+ from schemathesis.checks import CheckContext, run_checks
18
+ from schemathesis.config._generation import GenerationConfig
16
19
  from schemathesis.core.compat import BaseExceptionGroup
17
20
  from schemathesis.core.control import SkipTest
18
21
  from schemathesis.core.errors import (
@@ -31,13 +34,17 @@ from schemathesis.engine import Status, events
31
34
  from schemathesis.engine.context import EngineContext
32
35
  from schemathesis.engine.errors import (
33
36
  DeadlineExceeded,
37
+ TestingState,
34
38
  UnexpectedError,
39
+ UnrecoverableNetworkError,
35
40
  UnsupportedRecursiveReference,
41
+ clear_hypothesis_notes,
36
42
  deduplicate_errors,
43
+ is_unrecoverable_network_error,
37
44
  )
38
45
  from schemathesis.engine.phases import PhaseName
39
46
  from schemathesis.engine.recorder import ScenarioRecorder
40
- from schemathesis.generation import targets
47
+ from schemathesis.generation import metrics, overrides
41
48
  from schemathesis.generation.case import Case
42
49
  from schemathesis.generation.hypothesis.builder import (
43
50
  InvalidHeadersExampleMark,
@@ -68,9 +75,12 @@ def run_test(
68
75
  skip_reason = None
69
76
  test_start_time = time.monotonic()
70
77
  recorder = ScenarioRecorder(label=operation.label)
78
+ state = TestingState()
71
79
 
72
- def non_fatal_error(error: Exception) -> events.NonFatalError:
73
- return events.NonFatalError(error=error, phase=phase, label=operation.label, related_to_operation=True)
80
+ def non_fatal_error(error: Exception, code_sample: str | None = None) -> events.NonFatalError:
81
+ return events.NonFatalError(
82
+ error=error, phase=phase, label=operation.label, related_to_operation=True, code_sample=code_sample
83
+ )
74
84
 
75
85
  def scenario_finished(status: Status) -> events.ScenarioFinished:
76
86
  return events.ScenarioFinished(
@@ -85,10 +95,38 @@ def run_test(
85
95
  is_final=False,
86
96
  )
87
97
 
98
+ phase_name = phase.value.lower()
99
+ assert phase_name in ("examples", "coverage", "fuzzing", "stateful")
100
+
101
+ operation_config = ctx.config.operations.get_for_operation(operation)
102
+ continue_on_failure = operation_config.continue_on_failure or ctx.config.continue_on_failure or False
103
+ generation = ctx.config.generation_for(operation=operation, phase=phase_name)
104
+ override = overrides.for_operation(ctx.config, operation=operation)
105
+ auth = ctx.config.auth_for(operation=operation)
106
+ headers = ctx.config.headers_for(operation=operation)
107
+ transport_kwargs = ctx.get_transport_kwargs(operation=operation)
108
+ check_ctx = CheckContext(
109
+ override=override,
110
+ auth=auth,
111
+ headers=CaseInsensitiveDict(headers) if headers else None,
112
+ config=ctx.config.checks_config_for(operation=operation, phase=phase_name),
113
+ transport_kwargs=transport_kwargs,
114
+ recorder=recorder,
115
+ )
116
+
88
117
  try:
89
118
  setup_hypothesis_database_key(test_function, operation)
90
119
  with catch_warnings(record=True) as warnings, ignore_hypothesis_output():
91
- test_function(ctx=ctx, errors=errors, recorder=recorder)
120
+ test_function(
121
+ ctx=ctx,
122
+ state=state,
123
+ errors=errors,
124
+ check_ctx=check_ctx,
125
+ recorder=recorder,
126
+ generation=generation,
127
+ transport_kwargs=transport_kwargs,
128
+ continue_on_failure=continue_on_failure,
129
+ )
92
130
  # Test body was not executed at all - Hypothesis did not generate any tests, but there is no error
93
131
  status = Status.SUCCESS
94
132
  except (SkipTest, unittest.case.SkipTest) as exc:
@@ -147,6 +185,7 @@ def run_test(
147
185
  exc,
148
186
  path=operation.path,
149
187
  method=operation.method,
188
+ config=ctx.config.output,
150
189
  )
151
190
  )
152
191
  except HypothesisRefResolutionError:
@@ -168,6 +207,7 @@ def run_test(
168
207
  yield non_fatal_error(InvalidRegexPattern.from_schema_error(exc, from_examples=False))
169
208
  except Exception as exc:
170
209
  status = Status.ERROR
210
+ clear_hypothesis_notes(exc)
171
211
  # Likely a YAML parsing issue. E.g. `00:00:00.00` (without quotes) is parsed as float `0.0`
172
212
  if str(exc) == "first argument must be string or compiled pattern":
173
213
  yield non_fatal_error(
@@ -177,10 +217,13 @@ def run_test(
177
217
  )
178
218
  )
179
219
  else:
180
- yield non_fatal_error(exc)
220
+ code_sample: str | None = None
221
+ if state.unrecoverable_network_error is not None and state.unrecoverable_network_error.error is exc:
222
+ code_sample = state.unrecoverable_network_error.code_sample
223
+ yield non_fatal_error(exc, code_sample=code_sample)
181
224
  if (
182
225
  status == Status.SUCCESS
183
- and ctx.config.execution.continue_on_failure
226
+ and continue_on_failure
184
227
  and any(check.status == Status.FAILURE for checks in recorder.checks.values() for check in checks)
185
228
  ):
186
229
  status = Status.FAILURE
@@ -237,28 +280,68 @@ def get_invalid_regular_expression_message(warnings: list[WarningMessage]) -> st
237
280
 
238
281
 
239
282
  def cached_test_func(f: Callable) -> Callable:
240
- def wrapped(*, ctx: EngineContext, case: Case, errors: list[Exception], recorder: ScenarioRecorder) -> None:
283
+ def wrapped(
284
+ *,
285
+ ctx: EngineContext,
286
+ state: TestingState,
287
+ case: Case,
288
+ errors: list[Exception],
289
+ check_ctx: CheckContext,
290
+ recorder: ScenarioRecorder,
291
+ generation: GenerationConfig,
292
+ transport_kwargs: dict[str, Any],
293
+ continue_on_failure: bool,
294
+ ) -> None:
241
295
  try:
242
296
  if ctx.has_to_stop:
243
297
  raise KeyboardInterrupt
244
- if ctx.config.execution.unique_inputs:
298
+ if generation.unique_inputs:
245
299
  cached = ctx.get_cached_outcome(case)
246
300
  if isinstance(cached, BaseException):
247
301
  raise cached
248
302
  elif cached is None:
249
303
  return None
250
304
  try:
251
- f(ctx=ctx, case=case, recorder=recorder)
305
+ f(
306
+ case=case,
307
+ check_ctx=check_ctx,
308
+ recorder=recorder,
309
+ generation=generation,
310
+ transport_kwargs=transport_kwargs,
311
+ continue_on_failure=continue_on_failure,
312
+ )
252
313
  except BaseException as exc:
253
314
  ctx.cache_outcome(case, exc)
254
315
  raise
255
316
  else:
256
317
  ctx.cache_outcome(case, None)
257
318
  else:
258
- f(ctx=ctx, case=case, recorder=recorder)
319
+ f(
320
+ case=case,
321
+ check_ctx=check_ctx,
322
+ recorder=recorder,
323
+ generation=generation,
324
+ transport_kwargs=transport_kwargs,
325
+ continue_on_failure=continue_on_failure,
326
+ )
259
327
  except (KeyboardInterrupt, Failure):
260
328
  raise
261
329
  except Exception as exc:
330
+ if isinstance(exc, (requests.ConnectionError, ChunkedEncodingError)) and is_unrecoverable_network_error(
331
+ exc
332
+ ):
333
+ # Server likely has crashed and does not accept any connections at all
334
+ # Don't report these error - only the original crash should be reported
335
+ if exc.request is not None:
336
+ headers = {key: value[0] for key, value in exc.request.headers.items()}
337
+ else:
338
+ headers = {**dict(case.headers or {}), **transport_kwargs.get("headers", {})}
339
+ verify = transport_kwargs.get("verify", True)
340
+ state.unrecoverable_network_error = UnrecoverableNetworkError(
341
+ error=exc,
342
+ code_sample=case.as_curl_command(headers=headers, verify=verify),
343
+ )
344
+ raise
262
345
  errors.append(exc)
263
346
  raise UnexpectedError from None
264
347
 
@@ -268,24 +351,31 @@ def cached_test_func(f: Callable) -> Callable:
268
351
 
269
352
 
270
353
  @cached_test_func
271
- def test_func(*, ctx: EngineContext, case: Case, recorder: ScenarioRecorder) -> None:
354
+ def test_func(
355
+ *,
356
+ case: Case,
357
+ check_ctx: CheckContext,
358
+ recorder: ScenarioRecorder,
359
+ generation: GenerationConfig,
360
+ transport_kwargs: dict[str, Any],
361
+ continue_on_failure: bool,
362
+ ) -> None:
272
363
  recorder.record_case(parent_id=None, transition=None, case=case)
273
364
  try:
274
- response = case.call(**ctx.transport_kwargs)
275
- except (requests.Timeout, requests.ConnectionError) as error:
365
+ response = case.call(**transport_kwargs)
366
+ except (requests.Timeout, requests.ConnectionError, ChunkedEncodingError) as error:
276
367
  if isinstance(error.request, requests.Request):
277
368
  recorder.record_request(case_id=case.id, request=error.request.prepare())
278
369
  elif isinstance(error.request, requests.PreparedRequest):
279
370
  recorder.record_request(case_id=case.id, request=error.request)
280
371
  raise
281
372
  recorder.record_response(case_id=case.id, response=response)
282
- targets.run(ctx.config.execution.targets, case=case, response=response)
373
+ metrics.maximize(generation.maximize, case=case, response=response)
283
374
  validate_response(
284
375
  case=case,
285
- ctx=ctx.get_check_context(recorder),
286
- checks=ctx.config.execution.checks,
376
+ ctx=check_ctx,
287
377
  response=response,
288
- continue_on_failure=ctx.config.execution.continue_on_failure,
378
+ continue_on_failure=continue_on_failure,
289
379
  recorder=recorder,
290
380
  )
291
381
 
@@ -294,7 +384,6 @@ def validate_response(
294
384
  *,
295
385
  case: Case,
296
386
  ctx: CheckContext,
297
- checks: Iterable[CheckFunction],
298
387
  response: Response,
299
388
  continue_on_failure: bool,
300
389
  recorder: ScenarioRecorder,
@@ -318,7 +407,7 @@ def validate_response(
318
407
  case=case,
319
408
  response=response,
320
409
  ctx=ctx,
321
- checks=checks,
410
+ checks=ctx._checks,
322
411
  on_failure=on_failure,
323
412
  on_success=on_success,
324
413
  )
@@ -18,7 +18,7 @@ class TaskProducer:
18
18
  """Produces test tasks for workers to execute."""
19
19
 
20
20
  def __init__(self, ctx: EngineContext) -> None:
21
- self.operations = ctx.schema.get_all_operations(generation_config=ctx.config.execution.generation)
21
+ self.operations = ctx.schema.get_all_operations()
22
22
  self.lock = threading.Lock()
23
23
 
24
24
  def next_operation(self) -> Result | None:
schemathesis/errors.py CHANGED
@@ -1,6 +1,7 @@
1
1
  """Public Schemathesis errors."""
2
2
 
3
3
  from schemathesis.core.errors import (
4
+ HookError,
4
5
  IncorrectUsage,
5
6
  InternalError,
6
7
  InvalidHeadersExample,
@@ -21,6 +22,7 @@ from schemathesis.core.errors import (
21
22
  )
22
23
 
23
24
  __all__ = [
25
+ "HookError",
24
26
  "IncorrectUsage",
25
27
  "InternalError",
26
28
  "InvalidHeadersExample",
schemathesis/filters.py CHANGED
@@ -150,9 +150,8 @@ class FilterSet:
150
150
  def clone(self) -> FilterSet:
151
151
  return FilterSet(_includes=self._includes.copy(), _excludes=self._excludes.copy())
152
152
 
153
- def apply_to(self, operations: list[APIOperation]) -> list[APIOperation]:
154
- """Get a filtered list of the given operations that match the filters."""
155
- return [operation for operation in operations if self.match(SimpleNamespace(operation=operation))]
153
+ def applies_to(self, operation: APIOperation) -> bool:
154
+ return self.match(SimpleNamespace(operation=operation))
156
155
 
157
156
  def match(self, ctx: HasAPIOperation) -> bool:
158
157
  """Determines whether the given operation should be included based on the defined filters.