schemathesis 4.2.2__py3-none-any.whl → 4.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- schemathesis/config/__init__.py +8 -1
- schemathesis/config/_phases.py +14 -3
- schemathesis/config/schema.json +2 -1
- schemathesis/core/jsonschema/bundler.py +3 -2
- schemathesis/core/transforms.py +14 -6
- schemathesis/engine/context.py +35 -2
- schemathesis/generation/hypothesis/__init__.py +3 -1
- schemathesis/specs/openapi/adapter/parameters.py +3 -3
- schemathesis/specs/openapi/adapter/protocol.py +2 -0
- schemathesis/specs/openapi/adapter/responses.py +29 -7
- schemathesis/specs/openapi/adapter/v2.py +2 -0
- schemathesis/specs/openapi/adapter/v3_0.py +2 -0
- schemathesis/specs/openapi/adapter/v3_1.py +2 -0
- schemathesis/specs/openapi/stateful/dependencies/__init__.py +88 -0
- schemathesis/specs/openapi/stateful/dependencies/inputs.py +182 -0
- schemathesis/specs/openapi/stateful/dependencies/models.py +270 -0
- schemathesis/specs/openapi/stateful/dependencies/naming.py +168 -0
- schemathesis/specs/openapi/stateful/dependencies/outputs.py +34 -0
- schemathesis/specs/openapi/stateful/dependencies/resources.py +270 -0
- schemathesis/specs/openapi/stateful/dependencies/schemas.py +343 -0
- schemathesis/specs/openapi/stateful/inference.py +2 -1
- {schemathesis-4.2.2.dist-info → schemathesis-4.3.0.dist-info}/METADATA +1 -1
- {schemathesis-4.2.2.dist-info → schemathesis-4.3.0.dist-info}/RECORD +26 -19
- {schemathesis-4.2.2.dist-info → schemathesis-4.3.0.dist-info}/WHEEL +0 -0
- {schemathesis-4.2.2.dist-info → schemathesis-4.3.0.dist-info}/entry_points.txt +0 -0
- {schemathesis-4.2.2.dist-info → schemathesis-4.3.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,270 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
from dataclasses import dataclass
|
4
|
+
from typing import TYPE_CHECKING, Any, Iterator, Mapping, cast
|
5
|
+
|
6
|
+
from schemathesis.core.errors import InfiniteRecursiveReference
|
7
|
+
from schemathesis.core.jsonschema.bundler import BundleError
|
8
|
+
from schemathesis.specs.openapi.adapter.parameters import resource_name_from_ref
|
9
|
+
from schemathesis.specs.openapi.adapter.references import maybe_resolve
|
10
|
+
from schemathesis.specs.openapi.stateful.dependencies import naming
|
11
|
+
from schemathesis.specs.openapi.stateful.dependencies.models import (
|
12
|
+
CanonicalizationCache,
|
13
|
+
Cardinality,
|
14
|
+
DefinitionSource,
|
15
|
+
ResourceDefinition,
|
16
|
+
ResourceMap,
|
17
|
+
)
|
18
|
+
from schemathesis.specs.openapi.stateful.dependencies.naming import from_path
|
19
|
+
from schemathesis.specs.openapi.stateful.dependencies.schemas import (
|
20
|
+
ROOT_POINTER,
|
21
|
+
canonicalize,
|
22
|
+
try_unwrap_composition,
|
23
|
+
unwrap_schema,
|
24
|
+
)
|
25
|
+
|
26
|
+
if TYPE_CHECKING:
|
27
|
+
from schemathesis.core.compat import RefResolver
|
28
|
+
from schemathesis.schemas import APIOperation
|
29
|
+
from schemathesis.specs.openapi.adapter.responses import OpenApiResponse
|
30
|
+
|
31
|
+
|
32
|
+
@dataclass
|
33
|
+
class ExtractedResource:
|
34
|
+
"""How a resource was extracted from a response."""
|
35
|
+
|
36
|
+
resource: ResourceDefinition
|
37
|
+
# Where in response body (JSON pointer)
|
38
|
+
pointer: str
|
39
|
+
# Is this a single resource or an array?
|
40
|
+
cardinality: Cardinality
|
41
|
+
|
42
|
+
__slots__ = ("resource", "pointer", "cardinality")
|
43
|
+
|
44
|
+
|
45
|
+
def extract_resources_from_responses(
|
46
|
+
*,
|
47
|
+
operation: APIOperation,
|
48
|
+
resources: ResourceMap,
|
49
|
+
updated_resources: set[str],
|
50
|
+
resolver: RefResolver,
|
51
|
+
canonicalization_cache: CanonicalizationCache,
|
52
|
+
) -> Iterator[tuple[OpenApiResponse, ExtractedResource]]:
|
53
|
+
"""Extract resource definitions from operation's successful responses.
|
54
|
+
|
55
|
+
Processes each 2xx response, unwrapping pagination wrappers,
|
56
|
+
handling `allOf` / `oneOf` / `anyOf` composition, and determining cardinality.
|
57
|
+
Updates the global resource registry as resources are discovered.
|
58
|
+
"""
|
59
|
+
for response in operation.responses.iter_successful_responses():
|
60
|
+
for extracted in iter_resources_from_response(
|
61
|
+
path=operation.path,
|
62
|
+
response=response,
|
63
|
+
resources=resources,
|
64
|
+
updated_resources=updated_resources,
|
65
|
+
resolver=resolver,
|
66
|
+
canonicalization_cache=canonicalization_cache,
|
67
|
+
):
|
68
|
+
yield response, extracted
|
69
|
+
|
70
|
+
|
71
|
+
def iter_resources_from_response(
|
72
|
+
*,
|
73
|
+
path: str,
|
74
|
+
response: OpenApiResponse,
|
75
|
+
resources: ResourceMap,
|
76
|
+
updated_resources: set[str],
|
77
|
+
resolver: RefResolver,
|
78
|
+
canonicalization_cache: CanonicalizationCache,
|
79
|
+
) -> Iterator[ExtractedResource]:
|
80
|
+
schema = response.get_raw_schema()
|
81
|
+
|
82
|
+
if isinstance(schema, bool):
|
83
|
+
boolean_resource = _resource_from_boolean_schema(path=path, resources=resources)
|
84
|
+
if boolean_resource is not None:
|
85
|
+
yield boolean_resource
|
86
|
+
return None
|
87
|
+
elif not isinstance(schema, dict):
|
88
|
+
# Ignore invalid schemas
|
89
|
+
return None
|
90
|
+
|
91
|
+
parent_ref = schema.get("$ref")
|
92
|
+
_, resolved = maybe_resolve(schema, resolver, "")
|
93
|
+
|
94
|
+
resolved = try_unwrap_composition(resolved, resolver)
|
95
|
+
|
96
|
+
if "allOf" in resolved:
|
97
|
+
if parent_ref is not None and parent_ref in canonicalization_cache:
|
98
|
+
canonicalized = canonicalization_cache[parent_ref]
|
99
|
+
else:
|
100
|
+
try:
|
101
|
+
canonicalized = canonicalize(cast(dict, resolved), resolver)
|
102
|
+
except (InfiniteRecursiveReference, BundleError):
|
103
|
+
canonicalized = resolved
|
104
|
+
if parent_ref is not None:
|
105
|
+
canonicalization_cache[parent_ref] = canonicalized
|
106
|
+
else:
|
107
|
+
canonicalized = resolved
|
108
|
+
|
109
|
+
# Detect wrapper pattern and navigate to data
|
110
|
+
unwrapped = unwrap_schema(schema=canonicalized, path=path, parent_ref=parent_ref, resolver=resolver)
|
111
|
+
|
112
|
+
# Recover $ref lost during allOf canonicalization
|
113
|
+
recovered_ref = None
|
114
|
+
if unwrapped.pointer != ROOT_POINTER and "allOf" in resolved:
|
115
|
+
recovered_ref = _recover_ref_from_allof(
|
116
|
+
branches=resolved["allOf"],
|
117
|
+
pointer=unwrapped.pointer,
|
118
|
+
resolver=resolver,
|
119
|
+
)
|
120
|
+
|
121
|
+
# Extract resource and determine cardinality
|
122
|
+
result = _extract_resource_and_cardinality(
|
123
|
+
schema=unwrapped.schema,
|
124
|
+
path=path,
|
125
|
+
resources=resources,
|
126
|
+
updated_resources=updated_resources,
|
127
|
+
resolver=resolver,
|
128
|
+
parent_ref=recovered_ref or unwrapped.ref or parent_ref,
|
129
|
+
)
|
130
|
+
|
131
|
+
if result is not None:
|
132
|
+
resource, cardinality = result
|
133
|
+
yield ExtractedResource(resource=resource, cardinality=cardinality, pointer=unwrapped.pointer)
|
134
|
+
|
135
|
+
|
136
|
+
def _recover_ref_from_allof(*, branches: list[dict], pointer: str, resolver: RefResolver) -> str | None:
|
137
|
+
"""Recover original $ref from allOf branches after canonicalization.
|
138
|
+
|
139
|
+
Canonicalization inlines all $refs, losing resource name information.
|
140
|
+
This searches original allOf branches to find which one defined the
|
141
|
+
property at the given pointer.
|
142
|
+
"""
|
143
|
+
# Parse pointer segments (e.g., "/data" -> ["data"])
|
144
|
+
segments = [s for s in pointer.strip("/").split("/") if s]
|
145
|
+
|
146
|
+
# Search each branch for the property
|
147
|
+
for branch in branches:
|
148
|
+
_, resolved_branch = maybe_resolve(branch, resolver, "")
|
149
|
+
properties = resolved_branch.get("properties", {})
|
150
|
+
|
151
|
+
# Check if this branch defines the target property
|
152
|
+
if segments[-1] in properties:
|
153
|
+
# Navigate to property in original (unresolved) branch
|
154
|
+
original_properties = branch.get("properties", {})
|
155
|
+
if segments[-1] in original_properties:
|
156
|
+
prop_schema = original_properties[segments[-1]]
|
157
|
+
# Extract $ref from property or its items
|
158
|
+
return prop_schema.get("$ref") or prop_schema.get("items", {}).get("$ref")
|
159
|
+
|
160
|
+
return None
|
161
|
+
|
162
|
+
|
163
|
+
def _resource_from_boolean_schema(*, path: str, resources: ResourceMap) -> ExtractedResource | None:
|
164
|
+
name = from_path(path)
|
165
|
+
if name is None:
|
166
|
+
return None
|
167
|
+
resource = resources.get(name)
|
168
|
+
if resource is None:
|
169
|
+
resource = ResourceDefinition.without_properties(name)
|
170
|
+
resources[name] = resource
|
171
|
+
# Do not update existing resource as if it is inferred, it will have at least one field
|
172
|
+
return ExtractedResource(resource=resource, cardinality=Cardinality.ONE, pointer=ROOT_POINTER)
|
173
|
+
|
174
|
+
|
175
|
+
def _extract_resource_and_cardinality(
|
176
|
+
*,
|
177
|
+
schema: Mapping[str, Any],
|
178
|
+
path: str,
|
179
|
+
resources: ResourceMap,
|
180
|
+
updated_resources: set[str],
|
181
|
+
resolver: RefResolver,
|
182
|
+
parent_ref: str | None = None,
|
183
|
+
) -> tuple[ResourceDefinition, Cardinality] | None:
|
184
|
+
"""Extract resource from schema and determine cardinality."""
|
185
|
+
# Check if it's an array
|
186
|
+
if schema.get("type") == "array" or "items" in schema:
|
187
|
+
items = schema.get("items")
|
188
|
+
if not isinstance(items, dict):
|
189
|
+
return None
|
190
|
+
|
191
|
+
# Resolve items if it's a $ref
|
192
|
+
_, resolved_items = maybe_resolve(items, resolver, "")
|
193
|
+
|
194
|
+
# Extract resource from items
|
195
|
+
resource = _extract_resource_from_schema(
|
196
|
+
schema=resolved_items,
|
197
|
+
path=path,
|
198
|
+
resources=resources,
|
199
|
+
updated_resources=updated_resources,
|
200
|
+
resolver=resolver,
|
201
|
+
# Prefer items $ref for name
|
202
|
+
parent_ref=items.get("$ref") or parent_ref,
|
203
|
+
)
|
204
|
+
|
205
|
+
if resource is None:
|
206
|
+
return None
|
207
|
+
|
208
|
+
return resource, Cardinality.MANY
|
209
|
+
|
210
|
+
# Single object
|
211
|
+
resource = _extract_resource_from_schema(
|
212
|
+
schema=schema,
|
213
|
+
path=path,
|
214
|
+
resources=resources,
|
215
|
+
updated_resources=updated_resources,
|
216
|
+
resolver=resolver,
|
217
|
+
parent_ref=parent_ref,
|
218
|
+
)
|
219
|
+
|
220
|
+
if resource is None:
|
221
|
+
return None
|
222
|
+
|
223
|
+
return resource, Cardinality.ONE
|
224
|
+
|
225
|
+
|
226
|
+
def _extract_resource_from_schema(
|
227
|
+
*,
|
228
|
+
schema: Mapping[str, Any],
|
229
|
+
path: str,
|
230
|
+
resources: ResourceMap,
|
231
|
+
updated_resources: set[str],
|
232
|
+
resolver: RefResolver,
|
233
|
+
parent_ref: str | None = None,
|
234
|
+
) -> ResourceDefinition | None:
|
235
|
+
"""Extract resource definition from a schema."""
|
236
|
+
resource_name: str | None = None
|
237
|
+
|
238
|
+
ref = schema.get("$ref")
|
239
|
+
if ref is not None:
|
240
|
+
resource_name = resource_name_from_ref(ref)
|
241
|
+
elif parent_ref is not None:
|
242
|
+
resource_name = resource_name_from_ref(parent_ref)
|
243
|
+
else:
|
244
|
+
resource_name = naming.from_path(path)
|
245
|
+
|
246
|
+
if resource_name is None:
|
247
|
+
return None
|
248
|
+
|
249
|
+
resource = resources.get(resource_name)
|
250
|
+
|
251
|
+
if resource is None or resource.source < DefinitionSource.SCHEMA_WITH_PROPERTIES:
|
252
|
+
_, resolved = maybe_resolve(schema, resolver, "")
|
253
|
+
|
254
|
+
properties = resolved.get("properties")
|
255
|
+
if properties:
|
256
|
+
fields = list(properties)
|
257
|
+
source = DefinitionSource.SCHEMA_WITH_PROPERTIES
|
258
|
+
else:
|
259
|
+
fields = []
|
260
|
+
source = DefinitionSource.SCHEMA_WITHOUT_PROPERTIES
|
261
|
+
if resource is not None:
|
262
|
+
if resource.source < source:
|
263
|
+
resource.source = source
|
264
|
+
resource.fields = fields
|
265
|
+
updated_resources.add(resource_name)
|
266
|
+
else:
|
267
|
+
resource = ResourceDefinition(name=resource_name, fields=fields, source=source)
|
268
|
+
resources[resource_name] = resource
|
269
|
+
|
270
|
+
return resource
|
@@ -0,0 +1,343 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
from dataclasses import dataclass
|
4
|
+
from typing import TYPE_CHECKING, Any, Mapping
|
5
|
+
|
6
|
+
from schemathesis.core.jsonschema import ALL_KEYWORDS
|
7
|
+
from schemathesis.core.jsonschema.bundler import BUNDLE_STORAGE_KEY, bundle
|
8
|
+
from schemathesis.specs.openapi.adapter.parameters import resource_name_from_ref
|
9
|
+
from schemathesis.specs.openapi.adapter.references import maybe_resolve
|
10
|
+
from schemathesis.specs.openapi.stateful.dependencies import naming
|
11
|
+
|
12
|
+
if TYPE_CHECKING:
|
13
|
+
from schemathesis.core.compat import RefResolver
|
14
|
+
|
15
|
+
ROOT_POINTER = "/"
|
16
|
+
|
17
|
+
|
18
|
+
def canonicalize(schema: dict[str, Any], resolver: RefResolver) -> Mapping[str, Any]:
|
19
|
+
"""Transform the input schema into its canonical-ish form."""
|
20
|
+
from hypothesis_jsonschema._canonicalise import canonicalish
|
21
|
+
from hypothesis_jsonschema._resolve import resolve_all_refs
|
22
|
+
|
23
|
+
# Canonicalisation in `hypothesis_jsonschema` requires all references to be resovable and non-recursive
|
24
|
+
# On the Schemathesis side bundling solves this problem
|
25
|
+
bundled = bundle(schema, resolver, inline_recursive=True)
|
26
|
+
canonicalized = canonicalish(bundled)
|
27
|
+
resolved = resolve_all_refs(canonicalized)
|
28
|
+
resolved.pop(BUNDLE_STORAGE_KEY, None)
|
29
|
+
return resolved
|
30
|
+
|
31
|
+
|
32
|
+
def try_unwrap_composition(schema: Mapping[str, Any], resolver: RefResolver) -> Mapping[str, Any]:
|
33
|
+
"""Unwrap oneOf/anyOf if we can safely extract a single schema."""
|
34
|
+
keys = ("anyOf", "oneOf")
|
35
|
+
composition_key = None
|
36
|
+
for key in keys:
|
37
|
+
if key in schema:
|
38
|
+
composition_key = key
|
39
|
+
break
|
40
|
+
|
41
|
+
if composition_key is None:
|
42
|
+
return schema
|
43
|
+
|
44
|
+
alternatives = schema[composition_key]
|
45
|
+
|
46
|
+
if not isinstance(alternatives, list):
|
47
|
+
return schema
|
48
|
+
|
49
|
+
# Filter to interesting alternatives
|
50
|
+
interesting = _filter_composition_alternatives(alternatives, resolver)
|
51
|
+
|
52
|
+
# If no interesting alternatives, return original
|
53
|
+
if not interesting:
|
54
|
+
return schema
|
55
|
+
|
56
|
+
# If exactly one interesting alternative, unwrap it
|
57
|
+
if len(interesting) == 1:
|
58
|
+
return interesting[0]
|
59
|
+
|
60
|
+
# Pick the first one
|
61
|
+
# TODO: Support multiple alternatives
|
62
|
+
return interesting[0]
|
63
|
+
|
64
|
+
|
65
|
+
def try_unwrap_all_of(schema: Mapping[str, Any]) -> Mapping[str, Any]:
|
66
|
+
alternatives = schema.get("allOf")
|
67
|
+
if not isinstance(alternatives, list):
|
68
|
+
return schema
|
69
|
+
|
70
|
+
interesting = []
|
71
|
+
|
72
|
+
for subschema in alternatives:
|
73
|
+
if isinstance(subschema, dict) and _is_interesting_schema(subschema):
|
74
|
+
interesting.append(subschema)
|
75
|
+
|
76
|
+
if len(interesting) == 1:
|
77
|
+
return interesting[0]
|
78
|
+
return schema
|
79
|
+
|
80
|
+
|
81
|
+
def _filter_composition_alternatives(alternatives: list[dict], resolver: RefResolver) -> list[dict]:
|
82
|
+
"""Filter oneOf/anyOf alternatives to keep only interesting schemas."""
|
83
|
+
interesting = []
|
84
|
+
|
85
|
+
for alt_schema in alternatives:
|
86
|
+
_, resolved = maybe_resolve(alt_schema, resolver, "")
|
87
|
+
|
88
|
+
if _is_interesting_schema(resolved):
|
89
|
+
# Keep original (with $ref)
|
90
|
+
interesting.append(alt_schema)
|
91
|
+
|
92
|
+
return interesting
|
93
|
+
|
94
|
+
|
95
|
+
def _is_interesting_schema(schema: Mapping[str, Any]) -> bool:
|
96
|
+
"""Check if a schema represents interesting structured data."""
|
97
|
+
# Has $ref - definitely interesting (references a named schema)
|
98
|
+
if "$ref" in schema:
|
99
|
+
return True
|
100
|
+
|
101
|
+
ty = schema.get("type")
|
102
|
+
|
103
|
+
# Primitives are not interesting
|
104
|
+
if ty in {"string", "number", "integer", "boolean", "null"}:
|
105
|
+
return False
|
106
|
+
|
107
|
+
# Arrays - check items
|
108
|
+
if ty == "array":
|
109
|
+
items = schema.get("items")
|
110
|
+
if not isinstance(items, dict):
|
111
|
+
return False
|
112
|
+
# Recursively check if items are interesting
|
113
|
+
return _is_interesting_schema(items)
|
114
|
+
|
115
|
+
# allOf/anyOf/oneOf - interesting (composition)
|
116
|
+
if any(key in schema for key in ["allOf", "anyOf", "oneOf"]):
|
117
|
+
return True
|
118
|
+
|
119
|
+
# Objects (or untyped) - check if they have any keywords
|
120
|
+
return bool(set(schema).intersection(ALL_KEYWORDS))
|
121
|
+
|
122
|
+
|
123
|
+
@dataclass
|
124
|
+
class UnwrappedSchema:
|
125
|
+
"""Result of wrapper pattern detection."""
|
126
|
+
|
127
|
+
pointer: str
|
128
|
+
schema: Mapping[str, Any]
|
129
|
+
ref: str | None
|
130
|
+
|
131
|
+
__slots__ = ("pointer", "schema", "ref")
|
132
|
+
|
133
|
+
|
134
|
+
def unwrap_schema(
|
135
|
+
schema: Mapping[str, Any], path: str, parent_ref: str | None, resolver: RefResolver
|
136
|
+
) -> UnwrappedSchema:
|
137
|
+
# Array at root
|
138
|
+
if schema.get("type") == "array":
|
139
|
+
return UnwrappedSchema(pointer="/", schema=schema, ref=None)
|
140
|
+
|
141
|
+
properties = schema.get("properties", {})
|
142
|
+
|
143
|
+
# HAL _embedded (Spring-specific)
|
144
|
+
hal_field = _detect_hal_embedded(schema)
|
145
|
+
if hal_field:
|
146
|
+
embedded_schema = properties["_embedded"]
|
147
|
+
_, resolved_embedded = maybe_resolve(embedded_schema, resolver, "")
|
148
|
+
resource_schema = resolved_embedded.get("properties", {}).get(hal_field, {})
|
149
|
+
_, resolved_resource = maybe_resolve(resource_schema, resolver, "")
|
150
|
+
|
151
|
+
return UnwrappedSchema(
|
152
|
+
pointer=f"/_embedded/{hal_field}", schema=resolved_resource, ref=resource_schema.get("$ref")
|
153
|
+
)
|
154
|
+
|
155
|
+
# Pagination wrapper
|
156
|
+
array_field = _is_pagination_wrapper(schema=schema, path=path, parent_ref=parent_ref, resolver=resolver)
|
157
|
+
if array_field:
|
158
|
+
array_schema = properties[array_field]
|
159
|
+
_, resolved_array = maybe_resolve(array_schema, resolver, "")
|
160
|
+
|
161
|
+
return UnwrappedSchema(pointer=f"/{array_field}", schema=resolved_array, ref=array_schema.get("$ref"))
|
162
|
+
|
163
|
+
# External tag
|
164
|
+
external_tag = _detect_externally_tagged_pattern(schema, path)
|
165
|
+
if external_tag:
|
166
|
+
tagged_schema = properties[external_tag]
|
167
|
+
_, resolved_tagged = maybe_resolve(tagged_schema, resolver, "")
|
168
|
+
|
169
|
+
resolved = try_unwrap_all_of(resolved_tagged)
|
170
|
+
ref = resolved.get("$ref") or resolved_tagged.get("$ref") or tagged_schema.get("$ref")
|
171
|
+
|
172
|
+
_, resolved = maybe_resolve(resolved, resolver, "")
|
173
|
+
return UnwrappedSchema(pointer=f"/{external_tag}", schema=resolved, ref=ref)
|
174
|
+
|
175
|
+
# No wrapper - single object at root
|
176
|
+
return UnwrappedSchema(pointer="/", schema=schema, ref=schema.get("$ref"))
|
177
|
+
|
178
|
+
|
179
|
+
def _detect_hal_embedded(schema: Mapping[str, Any]) -> str | None:
|
180
|
+
"""Detect HAL _embedded pattern.
|
181
|
+
|
182
|
+
Spring Data REST uses: {_embedded: {users: [...]}}
|
183
|
+
"""
|
184
|
+
properties = schema.get("properties", {})
|
185
|
+
embedded = properties.get("_embedded")
|
186
|
+
|
187
|
+
if not isinstance(embedded, dict):
|
188
|
+
return None
|
189
|
+
|
190
|
+
embedded_properties = embedded.get("properties", {})
|
191
|
+
|
192
|
+
# Find array properties in _embedded
|
193
|
+
for name, subschema in embedded_properties.items():
|
194
|
+
if isinstance(subschema, dict) and subschema.get("type") == "array":
|
195
|
+
# Found array in _embedded
|
196
|
+
return name
|
197
|
+
|
198
|
+
return None
|
199
|
+
|
200
|
+
|
201
|
+
def _is_pagination_wrapper(
|
202
|
+
schema: Mapping[str, Any], path: str, parent_ref: str | None, resolver: RefResolver
|
203
|
+
) -> str | None:
|
204
|
+
"""Detect if schema is a pagination wrapper."""
|
205
|
+
properties = schema.get("properties", {})
|
206
|
+
|
207
|
+
if not properties:
|
208
|
+
return None
|
209
|
+
|
210
|
+
metadata_fields = frozenset(["links", "errors"])
|
211
|
+
|
212
|
+
# Find array properties
|
213
|
+
arrays = []
|
214
|
+
for name, subschema in properties.items():
|
215
|
+
if name in metadata_fields:
|
216
|
+
continue
|
217
|
+
if isinstance(subschema, dict):
|
218
|
+
_, subschema = maybe_resolve(subschema, resolver, "")
|
219
|
+
if subschema.get("type") == "array":
|
220
|
+
arrays.append(name)
|
221
|
+
|
222
|
+
# Must have exactly one array property
|
223
|
+
if len(arrays) != 1:
|
224
|
+
return None
|
225
|
+
|
226
|
+
array_field = arrays[0]
|
227
|
+
|
228
|
+
# Check if array field name matches common patterns
|
229
|
+
common_data_fields = {"data", "items", "results", "value", "content", "elements", "records", "list"}
|
230
|
+
|
231
|
+
if parent_ref:
|
232
|
+
resource_name = resource_name_from_ref(parent_ref)
|
233
|
+
resource_name = naming.strip_affixes(resource_name, ["get", "create", "list", "delete"], ["response"])
|
234
|
+
common_data_fields.add(resource_name.lower())
|
235
|
+
|
236
|
+
if array_field.lower() not in common_data_fields:
|
237
|
+
# Check if field name matches resource-specific pattern
|
238
|
+
# Example: path="/items/runner-groups" -> resource="RunnerGroup" -> "runner_groups"
|
239
|
+
resource_name_from_path = naming.from_path(path)
|
240
|
+
if resource_name_from_path is None:
|
241
|
+
return None
|
242
|
+
|
243
|
+
candidate = naming.to_plural(naming.to_snake_case(resource_name_from_path))
|
244
|
+
if array_field.lower() != candidate:
|
245
|
+
# Field name doesn't match resource pattern
|
246
|
+
return None
|
247
|
+
|
248
|
+
# Check for pagination metadata indicators
|
249
|
+
others = [p for p in properties if p != array_field]
|
250
|
+
|
251
|
+
pagination_indicators = {
|
252
|
+
"count",
|
253
|
+
"total",
|
254
|
+
"totalcount",
|
255
|
+
"total_count",
|
256
|
+
"totalelements",
|
257
|
+
"total_elements",
|
258
|
+
"page",
|
259
|
+
"pagenumber",
|
260
|
+
"page_number",
|
261
|
+
"currentpage",
|
262
|
+
"current_page",
|
263
|
+
"next",
|
264
|
+
"previous",
|
265
|
+
"prev",
|
266
|
+
"nextpage",
|
267
|
+
"prevpage",
|
268
|
+
"nextpageurl",
|
269
|
+
"prevpageurl",
|
270
|
+
"next_page_url",
|
271
|
+
"prev_page_url",
|
272
|
+
"next_page_token",
|
273
|
+
"nextpagetoken",
|
274
|
+
"cursor",
|
275
|
+
"nextcursor",
|
276
|
+
"next_cursor",
|
277
|
+
"nextlink",
|
278
|
+
"next_link",
|
279
|
+
"endcursor",
|
280
|
+
"hasmore",
|
281
|
+
"has_more",
|
282
|
+
"hasnextpage",
|
283
|
+
"haspreviouspage",
|
284
|
+
"pagesize",
|
285
|
+
"page_size",
|
286
|
+
"perpage",
|
287
|
+
"per_page",
|
288
|
+
"limit",
|
289
|
+
"size",
|
290
|
+
"pageinfo",
|
291
|
+
"page_info",
|
292
|
+
"pagination",
|
293
|
+
"links",
|
294
|
+
"meta",
|
295
|
+
}
|
296
|
+
|
297
|
+
# Check if any other property looks like pagination metadata
|
298
|
+
has_pagination_metadata = any(
|
299
|
+
prop.lower().replace("_", "").replace("-", "") in pagination_indicators for prop in others
|
300
|
+
)
|
301
|
+
|
302
|
+
# Either there is pagination metadata or the wrapper has just items + some other field which is likely an unrecognized metadata
|
303
|
+
if has_pagination_metadata or len(properties) <= 2:
|
304
|
+
return array_field
|
305
|
+
|
306
|
+
return None
|
307
|
+
|
308
|
+
|
309
|
+
def _detect_externally_tagged_pattern(schema: Mapping[str, Any], path: str) -> str | None:
|
310
|
+
"""Detect externally tagged resource pattern.
|
311
|
+
|
312
|
+
Pattern: {ResourceName: [...]} or {resourceName: [...]}
|
313
|
+
|
314
|
+
Examples:
|
315
|
+
- GET /merchants -> {"Merchants": [...]}
|
316
|
+
- GET /users -> {"Users": [...]} or {"users": [...]}
|
317
|
+
|
318
|
+
"""
|
319
|
+
properties = schema.get("properties", {})
|
320
|
+
|
321
|
+
if not properties:
|
322
|
+
return None
|
323
|
+
|
324
|
+
resource_name = naming.from_path(path)
|
325
|
+
|
326
|
+
if not resource_name:
|
327
|
+
return None
|
328
|
+
|
329
|
+
possible_names = {
|
330
|
+
# "merchant"
|
331
|
+
resource_name.lower(),
|
332
|
+
# "merchants"
|
333
|
+
naming.to_plural(resource_name.lower()),
|
334
|
+
}
|
335
|
+
|
336
|
+
for name, subschema in properties.items():
|
337
|
+
if name.lower() not in possible_names:
|
338
|
+
continue
|
339
|
+
|
340
|
+
if isinstance(subschema, dict):
|
341
|
+
return name
|
342
|
+
|
343
|
+
return None
|
@@ -21,6 +21,7 @@ from werkzeug.exceptions import MethodNotAllowed, NotFound
|
|
21
21
|
from werkzeug.routing import Map, MapAdapter, Rule
|
22
22
|
|
23
23
|
from schemathesis.core.adapter import ResponsesContainer
|
24
|
+
from schemathesis.core.transforms import encode_pointer
|
24
25
|
|
25
26
|
if TYPE_CHECKING:
|
26
27
|
from schemathesis.engine.observations import LocationHeaderEntry
|
@@ -95,7 +96,7 @@ class LinkInferencer:
|
|
95
96
|
if operation_id:
|
96
97
|
operation = OperationById(operation_id, method=method, path=path)
|
97
98
|
else:
|
98
|
-
encoded_path = path
|
99
|
+
encoded_path = encode_pointer(path)
|
99
100
|
operation = OperationByRef(f"#/paths/{encoded_path}/{method}", method=method, path=path)
|
100
101
|
|
101
102
|
operations.append(operation)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: schemathesis
|
3
|
-
Version: 4.
|
3
|
+
Version: 4.3.0
|
4
4
|
Summary: Property-based testing framework for Open API and GraphQL based apps
|
5
5
|
Project-URL: Documentation, https://schemathesis.readthedocs.io/en/stable/
|
6
6
|
Project-URL: Changelog, https://github.com/schemathesis/schemathesis/blob/master/CHANGELOG.md
|