schemathesis 4.2.1__py3-none-any.whl → 4.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. schemathesis/config/__init__.py +8 -1
  2. schemathesis/config/_phases.py +14 -3
  3. schemathesis/config/schema.json +2 -1
  4. schemathesis/core/jsonschema/bundler.py +4 -3
  5. schemathesis/core/jsonschema/references.py +185 -85
  6. schemathesis/core/transforms.py +14 -6
  7. schemathesis/engine/context.py +35 -2
  8. schemathesis/generation/hypothesis/__init__.py +3 -1
  9. schemathesis/specs/openapi/adapter/parameters.py +3 -3
  10. schemathesis/specs/openapi/adapter/protocol.py +2 -0
  11. schemathesis/specs/openapi/adapter/responses.py +29 -7
  12. schemathesis/specs/openapi/adapter/v2.py +2 -0
  13. schemathesis/specs/openapi/adapter/v3_0.py +2 -0
  14. schemathesis/specs/openapi/adapter/v3_1.py +2 -0
  15. schemathesis/specs/openapi/examples.py +92 -50
  16. schemathesis/specs/openapi/stateful/dependencies/__init__.py +88 -0
  17. schemathesis/specs/openapi/stateful/dependencies/inputs.py +182 -0
  18. schemathesis/specs/openapi/stateful/dependencies/models.py +270 -0
  19. schemathesis/specs/openapi/stateful/dependencies/naming.py +168 -0
  20. schemathesis/specs/openapi/stateful/dependencies/outputs.py +34 -0
  21. schemathesis/specs/openapi/stateful/dependencies/resources.py +270 -0
  22. schemathesis/specs/openapi/stateful/dependencies/schemas.py +343 -0
  23. schemathesis/specs/openapi/stateful/inference.py +2 -1
  24. {schemathesis-4.2.1.dist-info → schemathesis-4.3.0.dist-info}/METADATA +1 -1
  25. {schemathesis-4.2.1.dist-info → schemathesis-4.3.0.dist-info}/RECORD +28 -21
  26. {schemathesis-4.2.1.dist-info → schemathesis-4.3.0.dist-info}/WHEEL +0 -0
  27. {schemathesis-4.2.1.dist-info → schemathesis-4.3.0.dist-info}/entry_points.txt +0 -0
  28. {schemathesis-4.2.1.dist-info → schemathesis-4.3.0.dist-info}/licenses/LICENSE +0 -0
@@ -19,7 +19,13 @@ from schemathesis.config._error import ConfigError
19
19
  from schemathesis.config._generation import GenerationConfig
20
20
  from schemathesis.config._health_check import HealthCheck
21
21
  from schemathesis.config._output import OutputConfig, SanitizationConfig, TruncationConfig
22
- from schemathesis.config._phases import CoveragePhaseConfig, PhaseConfig, PhasesConfig, StatefulPhaseConfig
22
+ from schemathesis.config._phases import (
23
+ CoveragePhaseConfig,
24
+ InferenceAlgorithm,
25
+ PhaseConfig,
26
+ PhasesConfig,
27
+ StatefulPhaseConfig,
28
+ )
23
29
  from schemathesis.config._projects import ProjectConfig, ProjectsConfig, SchemathesisWarning, get_workers_count
24
30
  from schemathesis.config._report import DEFAULT_REPORT_DIRECTORY, ReportConfig, ReportFormat, ReportsConfig
25
31
 
@@ -44,6 +50,7 @@ __all__ = [
44
50
  "PhasesConfig",
45
51
  "CoveragePhaseConfig",
46
52
  "StatefulPhaseConfig",
53
+ "InferenceAlgorithm",
47
54
  "ProjectsConfig",
48
55
  "ProjectConfig",
49
56
  "get_workers_count",
@@ -1,6 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from dataclasses import dataclass
4
+ from enum import Enum
4
5
  from typing import Any
5
6
 
6
7
  from schemathesis.config._checks import ChecksConfig
@@ -125,9 +126,14 @@ class CoveragePhaseConfig(DiffBase):
125
126
  )
126
127
 
127
128
 
129
+ class InferenceAlgorithm(str, Enum):
130
+ LOCATION_HEADERS = "location-headers"
131
+ DEPENDENCY_ANALYSIS = "dependency-analysis"
132
+
133
+
128
134
  @dataclass(repr=False)
129
135
  class InferenceConfig(DiffBase):
130
- algorithms: list[str]
136
+ algorithms: list[InferenceAlgorithm]
131
137
 
132
138
  __slots__ = ("algorithms",)
133
139
 
@@ -136,12 +142,14 @@ class InferenceConfig(DiffBase):
136
142
  *,
137
143
  algorithms: list[str] | None = None,
138
144
  ) -> None:
139
- self.algorithms = algorithms if algorithms is not None else ["location-headers"]
145
+ self.algorithms = (
146
+ [InferenceAlgorithm(a) for a in algorithms] if algorithms is not None else list(InferenceAlgorithm)
147
+ )
140
148
 
141
149
  @classmethod
142
150
  def from_dict(cls, data: dict[str, Any]) -> InferenceConfig:
143
151
  return cls(
144
- algorithms=data.get("algorithms", ["location-headers"]),
152
+ algorithms=data.get("algorithms", list(InferenceAlgorithm)),
145
153
  )
146
154
 
147
155
  @property
@@ -149,6 +157,9 @@ class InferenceConfig(DiffBase):
149
157
  """Inference is enabled if any algorithms are configured."""
150
158
  return bool(self.algorithms)
151
159
 
160
+ def is_algorithm_enabled(self, algorithm: InferenceAlgorithm) -> bool:
161
+ return algorithm in self.algorithms
162
+
152
163
 
153
164
  @dataclass(repr=False)
154
165
  class StatefulPhaseConfig(DiffBase):
@@ -325,7 +325,8 @@
325
325
  "items": {
326
326
  "type": "string",
327
327
  "enum": [
328
- "location-headers"
328
+ "location-headers",
329
+ "dependency-analysis"
329
330
  ]
330
331
  },
331
332
  "uniqueItems": true
@@ -90,15 +90,16 @@ class Bundler:
90
90
  # In the future, it **should** be handled by `hypothesis-jsonschema` instead.
91
91
  cloned = deepclone(resolved_schema)
92
92
  remaining_references = sanitize(cloned)
93
- if remaining_references:
93
+ if reference in remaining_references:
94
94
  # This schema is either infinitely recursive or the sanitization logic misses it, in any
95
95
  # event, we git up here
96
96
  raise InfiniteRecursiveReference(reference)
97
97
 
98
98
  result = {key: _bundle_recursive(value) for key, value in current.items() if key != "$ref"}
99
99
  # Recursive references need `$ref` to be in them, which is only possible with `dict`
100
- assert isinstance(cloned, dict)
101
- result.update(cloned)
100
+ bundled_clone = _bundle_recursive(cloned)
101
+ assert isinstance(bundled_clone, dict)
102
+ result.update(bundled_clone)
102
103
  return result
103
104
  elif resolved_uri not in visited:
104
105
  # Bundle only new schemas
@@ -1,122 +1,222 @@
1
1
  from __future__ import annotations
2
2
 
3
- from schemathesis.core.jsonschema.keywords import ALL_KEYWORDS
4
- from schemathesis.core.jsonschema.types import JsonSchema, JsonSchemaObject, get_type
3
+ from typing import Any
5
4
 
5
+ from schemathesis.core.jsonschema.types import JsonSchema, JsonSchemaObject
6
6
 
7
- def sanitize(schema: JsonSchema) -> set[str]:
8
- """Remove optional parts of the schema that contain references.
9
7
 
10
- It covers only the most popular cases, as removing all optional parts is complicated.
11
- We might fall back to filtering out invalid cases in the future.
12
- """
8
+ def sanitize(schema: JsonSchema) -> set[str]:
9
+ """Remove $ref from optional locations."""
13
10
  if isinstance(schema, bool):
14
11
  return set()
15
12
 
16
- stack = [schema]
13
+ stack: list[JsonSchema] = [schema]
14
+
17
15
  while stack:
18
16
  current = stack.pop()
19
- if isinstance(current, dict):
20
- # Optional properties
21
- if "properties" in current:
22
- properties = current["properties"]
23
- required = current.get("required", [])
24
- for name, value in list(properties.items()):
25
- if isinstance(value, dict):
26
- if name not in required and _has_references(value):
27
- del properties[name]
28
- elif _find_single_reference_combinators(value):
29
- properties.pop(name, None)
30
- else:
31
- stack.append(value)
32
-
33
- # Optional items
34
- if "items" in current:
35
- _sanitize_items(current)
36
- # Not required additional properties
37
- if "additionalProperties" in current:
38
- _sanitize_additional_properties(current)
39
- for k in _find_single_reference_combinators(current):
40
- del current[k]
17
+ if not isinstance(current, dict):
18
+ continue
19
+
20
+ _sanitize_combinators(current)
21
+
22
+ _sanitize_properties(current)
23
+
24
+ if "items" in current:
25
+ _sanitize_items(current)
26
+
27
+ if "prefixItems" in current:
28
+ _sanitize_prefix_items(current)
29
+
30
+ if "additionalProperties" in current:
31
+ _sanitize_additional_properties(current)
32
+
33
+ if "additionalItems" in current:
34
+ _sanitize_additional_items(current)
35
+
36
+ for value in current.values():
37
+ if isinstance(value, dict):
38
+ stack.append(value)
39
+ elif isinstance(value, list):
40
+ for item in value:
41
+ if isinstance(item, dict):
42
+ stack.append(item)
41
43
 
42
44
  remaining: set[str] = set()
43
45
  _collect_all_references(schema, remaining)
44
46
  return remaining
45
47
 
46
48
 
47
- def _collect_all_references(schema: JsonSchema | list[JsonSchema], remaining: set[str]) -> None:
48
- """Recursively collect all $ref present in the schema."""
49
- if isinstance(schema, dict):
50
- reference = schema.get("$ref")
51
- if isinstance(reference, str):
52
- remaining.add(reference)
53
- for value in schema.values():
54
- _collect_all_references(value, remaining)
55
- elif isinstance(schema, list):
56
- for item in schema:
57
- _collect_all_references(item, remaining)
49
+ def _sanitize_combinators(schema: JsonSchemaObject) -> None:
50
+ """Sanitize anyOf/oneOf/allOf."""
51
+ for combinator_key in ("anyOf", "oneOf"):
52
+ variants = schema.get(combinator_key)
53
+ if not isinstance(variants, list):
54
+ continue
58
55
 
56
+ flattened = _flatten_combinator(variants, combinator_key)
59
57
 
60
- def _has_references_in_items(items: list[JsonSchema]) -> bool:
61
- return any("$ref" in item for item in items if isinstance(item, dict))
58
+ cleaned = [variant for variant in flattened if not _has_ref(variant)]
62
59
 
60
+ # Only update if we have non-$ref variants
61
+ if cleaned:
62
+ # At least one alternative remains, which narrows the constraints
63
+ schema[combinator_key] = cleaned
64
+ elif not flattened:
65
+ schema.pop(combinator_key, None)
63
66
 
64
- def _has_references(schema: JsonSchemaObject) -> bool:
65
- if "$ref" in schema:
67
+ all_of = schema.get("allOf")
68
+ if isinstance(all_of, list):
69
+ flattened = _flatten_combinator(all_of, "allOf")
70
+
71
+ cleaned = [variant for variant in flattened if not _is_empty(variant)]
72
+ if cleaned:
73
+ schema["allOf"] = cleaned
74
+ else:
75
+ schema.pop("allOf", None)
76
+
77
+
78
+ def _flatten_combinator(variants: list, key: str) -> list:
79
+ """Flatten nested same-type combinators."""
80
+ result = []
81
+ for variant in variants:
82
+ if isinstance(variant, dict) and key in variant and isinstance(variant[key], list):
83
+ result.extend(variant[key])
84
+ else:
85
+ result.append(variant)
86
+ return result
87
+
88
+
89
+ def _is_empty(schema: JsonSchema) -> bool:
90
+ """Check if schema accepts anything."""
91
+ if schema is True:
66
92
  return True
67
- items = schema.get("items")
68
- return (isinstance(items, dict) and "$ref" in items) or isinstance(items, list) and _has_references_in_items(items)
69
93
 
94
+ if not isinstance(schema, dict):
95
+ return False
70
96
 
71
- def _is_optional_schema(schema: JsonSchema) -> bool:
72
- # Whether this schema could be dropped from a list of schemas
73
- if isinstance(schema, bool):
97
+ if not schema:
74
98
  return True
75
- type_ = get_type(schema)
76
- if type_ == ["object"]:
77
- # Empty object is valid for this schema -> could be dropped
78
- return schema.get("required", []) == [] and schema.get("minProperties", 0) == 0
79
- # Has at least one keyword -> should not be removed
80
- return not any(k in ALL_KEYWORDS for k in schema)
81
-
82
-
83
- def _find_single_reference_combinators(schema: JsonSchemaObject) -> list[str]:
84
- # Schema example:
85
- # {
86
- # "type": "object",
87
- # "properties": {
88
- # "parent": {
89
- # "allOf": [{"$ref": "#/components/schemas/User"}]
90
- # }
91
- # }
92
- # }
93
- found = []
94
- for keyword in ("allOf", "oneOf", "anyOf"):
95
- combinator = schema.get(keyword)
96
- if combinator is not None:
97
- optionals = [subschema for subschema in combinator if not _is_optional_schema(subschema)]
98
- # NOTE: The first schema is not bool, hence it is safe to pass it to `_has_references`
99
- if len(optionals) == 1 and _has_references(optionals[0]):
100
- found.append(keyword)
101
- return found
99
+
100
+ # Only non-validating keywords
101
+ NON_VALIDATING = {
102
+ "$id",
103
+ "$schema",
104
+ "$defs",
105
+ "definitions",
106
+ "title",
107
+ "description",
108
+ "default",
109
+ "examples",
110
+ "example",
111
+ "$comment",
112
+ "deprecated",
113
+ "readOnly",
114
+ "writeOnly",
115
+ }
116
+
117
+ return all(key in NON_VALIDATING for key in schema.keys())
118
+
119
+
120
+ def _sanitize_properties(schema: JsonSchemaObject) -> None:
121
+ """Remove OPTIONAL property schemas if they have $ref."""
122
+ if "properties" not in schema:
123
+ return
124
+
125
+ properties = schema["properties"]
126
+ if not isinstance(properties, dict):
127
+ return
128
+
129
+ required = schema.get("required", [])
130
+
131
+ for name, subschema in list(properties.items()):
132
+ if not _has_ref(subschema):
133
+ continue
134
+
135
+ if name not in required:
136
+ del properties[name]
102
137
 
103
138
 
104
139
  def _sanitize_items(schema: JsonSchemaObject) -> None:
140
+ """Convert to empty array ONLY if minItems allows it."""
105
141
  items = schema["items"]
142
+
143
+ has_ref = False
144
+ if isinstance(items, dict):
145
+ has_ref = _has_ref(items)
146
+ elif isinstance(items, list):
147
+ has_ref = any(_has_ref(item) for item in items)
148
+
149
+ if not has_ref:
150
+ return
151
+
106
152
  min_items = schema.get("minItems", 0)
107
- if not min_items:
108
- if isinstance(items, dict) and ("$ref" in items or _find_single_reference_combinators(items)):
109
- _convert_to_empty_array(schema)
110
- if isinstance(items, list) and _has_references_in_items(items):
111
- _convert_to_empty_array(schema)
153
+
154
+ if min_items == 0:
155
+ _convert_to_empty_array(schema)
156
+
157
+
158
+ def _sanitize_prefix_items(schema: JsonSchemaObject) -> None:
159
+ """Same logic as items."""
160
+ prefix_items = schema["prefixItems"]
161
+
162
+ if not isinstance(prefix_items, list):
163
+ return
164
+
165
+ if not any(_has_ref(item) for item in prefix_items):
166
+ return
167
+
168
+ min_items = schema.get("minItems", 0)
169
+
170
+ if min_items == 0:
171
+ _convert_to_empty_array(schema)
112
172
 
113
173
 
114
174
  def _convert_to_empty_array(schema: JsonSchemaObject) -> None:
115
- del schema["items"]
175
+ schema.pop("items", None)
176
+ schema.pop("prefixItems", None)
116
177
  schema["maxItems"] = 0
178
+ schema["minItems"] = 0
117
179
 
118
180
 
119
181
  def _sanitize_additional_properties(schema: JsonSchemaObject) -> None:
120
- additional_properties = schema["additionalProperties"]
121
- if isinstance(additional_properties, dict) and "$ref" in additional_properties:
182
+ additional = schema["additionalProperties"]
183
+ if _has_ref(additional):
122
184
  schema["additionalProperties"] = False
185
+
186
+
187
+ def _sanitize_additional_items(schema: JsonSchemaObject) -> None:
188
+ additional = schema["additionalItems"]
189
+ if _has_ref(additional):
190
+ schema["additionalItems"] = False
191
+
192
+
193
+ def _has_ref(schema: Any) -> bool:
194
+ """Check if schema contains $ref at any level."""
195
+ if not isinstance(schema, dict):
196
+ return False
197
+
198
+ if "$ref" in schema:
199
+ return True
200
+ for value in schema.values():
201
+ if isinstance(value, dict):
202
+ if _has_ref(value):
203
+ return True
204
+ elif isinstance(value, list):
205
+ for item in value:
206
+ if isinstance(item, dict) and _has_ref(item):
207
+ return True
208
+
209
+ return False
210
+
211
+
212
+ def _collect_all_references(schema: JsonSchema | list[JsonSchema], remaining: set[str]) -> None:
213
+ """Collect all remaining $ref."""
214
+ if isinstance(schema, dict):
215
+ ref = schema.get("$ref")
216
+ if isinstance(ref, str):
217
+ remaining.add(ref)
218
+ for value in schema.values():
219
+ _collect_all_references(value, remaining)
220
+ elif isinstance(schema, list):
221
+ for item in schema:
222
+ _collect_all_references(item, remaining)
@@ -1,6 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import Any, Callable, Dict, List, Mapping, TypeVar, Union, overload
3
+ from typing import Any, Callable, Dict, Iterator, List, Mapping, TypeVar, Union, overload
4
4
 
5
5
  T = TypeVar("T")
6
6
 
@@ -106,6 +106,18 @@ class Unresolvable: ...
106
106
  UNRESOLVABLE = Unresolvable()
107
107
 
108
108
 
109
+ def encode_pointer(pointer: str) -> str:
110
+ return pointer.replace("~", "~0").replace("/", "~1")
111
+
112
+
113
+ def decode_pointer(value: str) -> str:
114
+ return value.replace("~1", "/").replace("~0", "~")
115
+
116
+
117
+ def iter_decoded_pointer_segments(pointer: str) -> Iterator[str]:
118
+ return map(decode_pointer, pointer.split("/")[1:])
119
+
120
+
109
121
  def resolve_pointer(document: Any, pointer: str) -> dict | list | str | int | float | None | Unresolvable:
110
122
  """Implementation is adapted from Rust's `serde-json` crate.
111
123
 
@@ -116,12 +128,8 @@ def resolve_pointer(document: Any, pointer: str) -> dict | list | str | int | fl
116
128
  if not pointer.startswith("/"):
117
129
  return UNRESOLVABLE
118
130
 
119
- def replace(value: str) -> str:
120
- return value.replace("~1", "/").replace("~0", "~")
121
-
122
- tokens = map(replace, pointer.split("/")[1:])
123
131
  target = document
124
- for token in tokens:
132
+ for token in iter_decoded_pointer_segments(pointer):
125
133
  if isinstance(target, dict):
126
134
  target = target.get(token, UNRESOLVABLE)
127
135
  if target is UNRESOLVABLE:
@@ -4,12 +4,13 @@ import time
4
4
  from dataclasses import dataclass
5
5
  from typing import TYPE_CHECKING, Any
6
6
 
7
- from schemathesis.config import ProjectConfig
7
+ from schemathesis.config import InferenceAlgorithm, ProjectConfig
8
8
  from schemathesis.core import NOT_SET, NotSet
9
9
  from schemathesis.engine.control import ExecutionControl
10
10
  from schemathesis.engine.observations import Observations
11
11
  from schemathesis.generation.case import Case
12
12
  from schemathesis.schemas import APIOperation, BaseSchema
13
+ from schemathesis.specs.openapi.stateful import dependencies
13
14
 
14
15
  if TYPE_CHECKING:
15
16
  import threading
@@ -85,9 +86,10 @@ class EngineContext:
85
86
 
86
87
  def inject_links(self) -> int:
87
88
  """Inject inferred OpenAPI links into API operations based on collected observations."""
89
+ from schemathesis.specs.openapi.schemas import BaseOpenAPISchema
90
+
88
91
  injected = 0
89
92
  if self.observations is not None and self.observations.location_headers:
90
- from schemathesis.specs.openapi.schemas import BaseOpenAPISchema
91
93
  from schemathesis.specs.openapi.stateful.inference import LinkInferencer
92
94
 
93
95
  assert isinstance(self.schema, BaseOpenAPISchema)
@@ -96,6 +98,24 @@ class EngineContext:
96
98
  inferencer = LinkInferencer.from_schema(self.schema)
97
99
  for operation, entries in self.observations.location_headers.items():
98
100
  injected += inferencer.inject_links(operation.responses, entries)
101
+ if (
102
+ isinstance(self.schema, BaseOpenAPISchema)
103
+ and self.schema.config.phases.stateful.enabled
104
+ and self.schema.config.phases.stateful.inference.is_algorithm_enabled(
105
+ InferenceAlgorithm.DEPENDENCY_ANALYSIS
106
+ )
107
+ ):
108
+ graph = dependencies.analyze(self.schema)
109
+ for response_links in graph.iter_links():
110
+ operation = self.schema.get_operation_by_reference(response_links.producer_operation_ref)
111
+ response = operation.responses.get(response_links.status_code)
112
+ links = response.definition.setdefault(self.schema.adapter.links_keyword, {})
113
+
114
+ for link_name, definition in response_links.links.items():
115
+ # Find unique name if collision exists
116
+ final_name = _resolve_link_name_collision(link_name, links)
117
+ links[final_name] = definition.to_openapi()
118
+ injected += 1
99
119
 
100
120
  return injected
101
121
 
@@ -151,3 +171,16 @@ class EngineContext:
151
171
  kwargs["proxies"] = {"all": proxy}
152
172
  self._transport_kwargs_cache[key] = kwargs
153
173
  return kwargs
174
+
175
+
176
+ def _resolve_link_name_collision(proposed_name: str, existing_links: dict[str, Any]) -> str:
177
+ if proposed_name not in existing_links:
178
+ return proposed_name
179
+
180
+ # Name collision - find next available suffix
181
+ suffix = 0
182
+ while True:
183
+ candidate = f"{proposed_name}_{suffix}"
184
+ if candidate not in existing_links:
185
+ return candidate
186
+ suffix += 1
@@ -90,7 +90,9 @@ def setup() -> None:
90
90
  url, resolved = resolver.resolve(ref)
91
91
  resolver.push_scope(url)
92
92
  try:
93
- return merged([s, _resolve_all_refs(deepclone(resolved), resolver=resolver)]) # type: ignore
93
+ return merged(
94
+ [_resolve_all_refs(s, resolver=resolver), _resolve_all_refs(deepclone(resolved), resolver=resolver)]
95
+ ) # type: ignore
94
96
  finally:
95
97
  resolver.pop_scope()
96
98
 
@@ -324,7 +324,7 @@ def iter_parameters_v2(
324
324
  _, param = maybe_resolve(param, resolver, "")
325
325
  if param.get("in") == ParameterLocation.BODY:
326
326
  if "$ref" in param["schema"]:
327
- resource_name = _get_resource_name(param["schema"]["$ref"])
327
+ resource_name = resource_name_from_ref(param["schema"]["$ref"])
328
328
  for media_type in body_media_types:
329
329
  yield OpenApiBody.from_definition(
330
330
  definition=parameter,
@@ -375,7 +375,7 @@ def iter_parameters_v3(
375
375
  if isinstance(schema, dict):
376
376
  content = dict(content)
377
377
  if "$ref" in schema:
378
- resource_name = _get_resource_name(schema["$ref"])
378
+ resource_name = resource_name_from_ref(schema["$ref"])
379
379
  try:
380
380
  to_bundle = cast(dict[str, Any], schema)
381
381
  bundled = bundler.bundle(to_bundle, resolver, inline_recursive=True)
@@ -391,7 +391,7 @@ def iter_parameters_v3(
391
391
  )
392
392
 
393
393
 
394
- def _get_resource_name(reference: str) -> str:
394
+ def resource_name_from_ref(reference: str) -> str:
395
395
  return reference.rsplit("/", maxsplit=1)[1]
396
396
 
397
397
 
@@ -10,6 +10,7 @@ if TYPE_CHECKING:
10
10
  from schemathesis.core.jsonschema.types import JsonSchema
11
11
 
12
12
  IterResponseExamples = Callable[[Mapping[str, Any], str], Iterator[tuple[str, object]]]
13
+ ExtractRawResponseSchema = Callable[[Mapping[str, Any]], Union["JsonSchema", None]]
13
14
  ExtractResponseSchema = Callable[[Mapping[str, Any], "RefResolver", str, str], Union["JsonSchema", None]]
14
15
  ExtractHeaderSchema = Callable[[Mapping[str, Any], "RefResolver", str, str], "JsonSchema"]
15
16
  ExtractParameterSchema = Callable[[Mapping[str, Any]], "JsonSchema"]
@@ -41,6 +42,7 @@ class SpecificationAdapter(Protocol):
41
42
  # Function to extract schema from parameter definition
42
43
  extract_parameter_schema: ExtractParameterSchema
43
44
  # Function to extract response schema from specification
45
+ extract_raw_response_schema: ExtractRawResponseSchema
44
46
  extract_response_schema: ExtractResponseSchema
45
47
  # Function to extract header schema from specification
46
48
  extract_header_schema: ExtractHeaderSchema
@@ -48,6 +48,10 @@ class OpenApiResponse:
48
48
  assert not isinstance(self._schema, NotSet)
49
49
  return self._schema
50
50
 
51
+ def get_raw_schema(self) -> JsonSchema | None:
52
+ """Raw and unresolved response schema."""
53
+ return self.adapter.extract_raw_response_schema(self.definition)
54
+
51
55
  @property
52
56
  def validator(self) -> Validator | None:
53
57
  """JSON Schema validator for this response."""
@@ -118,6 +122,9 @@ class OpenApiResponses:
118
122
  def items(self) -> ItemsView[str, OpenApiResponse]:
119
123
  return self._inner.items()
120
124
 
125
+ def get(self, key: str) -> OpenApiResponse | None:
126
+ return self._inner.get(key)
127
+
121
128
  def add(self, status_code: str, definition: dict[str, Any]) -> OpenApiResponse:
122
129
  instance = OpenApiResponse(
123
130
  status_code=status_code,
@@ -153,12 +160,16 @@ class OpenApiResponses:
153
160
  # The default response has the lowest priority
154
161
  return responses.get("default")
155
162
 
156
- def iter_examples(self) -> Iterator[tuple[str, object]]:
157
- """Iterate over all examples for all responses."""
163
+ def iter_successful_responses(self) -> Iterator[OpenApiResponse]:
164
+ """Iterate over all response definitions for successful responses."""
158
165
  for response in self._inner.values():
159
- # Check only 2xx responses
160
166
  if response.status_code.startswith("2"):
161
- yield from response.iter_examples()
167
+ yield response
168
+
169
+ def iter_examples(self) -> Iterator[tuple[str, object]]:
170
+ """Iterate over all examples for all responses."""
171
+ for response in self.iter_successful_responses():
172
+ yield from response.iter_examples()
162
173
 
163
174
 
164
175
  def _iter_resolved_responses(
@@ -178,20 +189,31 @@ def _iter_resolved_responses(
178
189
  def extract_response_schema_v2(
179
190
  response: Mapping[str, Any], resolver: RefResolver, scope: str, nullable_keyword: str
180
191
  ) -> JsonSchema | None:
181
- schema = response.get("schema")
192
+ schema = extract_raw_response_schema_v2(response)
182
193
  if schema is not None:
183
194
  return _prepare_schema(schema, resolver, scope, nullable_keyword)
184
195
  return None
185
196
 
186
197
 
198
+ def extract_raw_response_schema_v2(response: Mapping[str, Any]) -> JsonSchema | None:
199
+ return response.get("schema")
200
+
201
+
187
202
  def extract_response_schema_v3(
188
203
  response: Mapping[str, Any], resolver: RefResolver, scope: str, nullable_keyword: str
189
204
  ) -> JsonSchema | None:
205
+ schema = extract_raw_response_schema_v3(response)
206
+ if schema is not None:
207
+ return _prepare_schema(schema, resolver, scope, nullable_keyword)
208
+ return None
209
+
210
+
211
+ def extract_raw_response_schema_v3(response: Mapping[str, Any]) -> JsonSchema | None:
190
212
  options = iter(response.get("content", {}).values())
191
213
  media_type = next(options, None)
192
214
  # "schema" is an optional key in the `MediaType` object
193
- if media_type and "schema" in media_type:
194
- return _prepare_schema(media_type["schema"], resolver, scope, nullable_keyword)
215
+ if media_type is not None:
216
+ return media_type.get("schema")
195
217
  return None
196
218
 
197
219