schemathesis 4.2.1__py3-none-any.whl → 4.2.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -90,7 +90,7 @@ class Bundler:
90
90
  # In the future, it **should** be handled by `hypothesis-jsonschema` instead.
91
91
  cloned = deepclone(resolved_schema)
92
92
  remaining_references = sanitize(cloned)
93
- if remaining_references:
93
+ if reference in remaining_references:
94
94
  # This schema is either infinitely recursive or the sanitization logic misses it, in any
95
95
  # event, we git up here
96
96
  raise InfiniteRecursiveReference(reference)
@@ -1,122 +1,222 @@
1
1
  from __future__ import annotations
2
2
 
3
- from schemathesis.core.jsonschema.keywords import ALL_KEYWORDS
4
- from schemathesis.core.jsonschema.types import JsonSchema, JsonSchemaObject, get_type
3
+ from typing import Any
5
4
 
5
+ from schemathesis.core.jsonschema.types import JsonSchema, JsonSchemaObject
6
6
 
7
- def sanitize(schema: JsonSchema) -> set[str]:
8
- """Remove optional parts of the schema that contain references.
9
7
 
10
- It covers only the most popular cases, as removing all optional parts is complicated.
11
- We might fall back to filtering out invalid cases in the future.
12
- """
8
+ def sanitize(schema: JsonSchema) -> set[str]:
9
+ """Remove $ref from optional locations."""
13
10
  if isinstance(schema, bool):
14
11
  return set()
15
12
 
16
- stack = [schema]
13
+ stack: list[JsonSchema] = [schema]
14
+
17
15
  while stack:
18
16
  current = stack.pop()
19
- if isinstance(current, dict):
20
- # Optional properties
21
- if "properties" in current:
22
- properties = current["properties"]
23
- required = current.get("required", [])
24
- for name, value in list(properties.items()):
25
- if isinstance(value, dict):
26
- if name not in required and _has_references(value):
27
- del properties[name]
28
- elif _find_single_reference_combinators(value):
29
- properties.pop(name, None)
30
- else:
31
- stack.append(value)
32
-
33
- # Optional items
34
- if "items" in current:
35
- _sanitize_items(current)
36
- # Not required additional properties
37
- if "additionalProperties" in current:
38
- _sanitize_additional_properties(current)
39
- for k in _find_single_reference_combinators(current):
40
- del current[k]
17
+ if not isinstance(current, dict):
18
+ continue
19
+
20
+ _sanitize_combinators(current)
21
+
22
+ _sanitize_properties(current)
23
+
24
+ if "items" in current:
25
+ _sanitize_items(current)
26
+
27
+ if "prefixItems" in current:
28
+ _sanitize_prefix_items(current)
29
+
30
+ if "additionalProperties" in current:
31
+ _sanitize_additional_properties(current)
32
+
33
+ if "additionalItems" in current:
34
+ _sanitize_additional_items(current)
35
+
36
+ for value in current.values():
37
+ if isinstance(value, dict):
38
+ stack.append(value)
39
+ elif isinstance(value, list):
40
+ for item in value:
41
+ if isinstance(item, dict):
42
+ stack.append(item)
41
43
 
42
44
  remaining: set[str] = set()
43
45
  _collect_all_references(schema, remaining)
44
46
  return remaining
45
47
 
46
48
 
47
- def _collect_all_references(schema: JsonSchema | list[JsonSchema], remaining: set[str]) -> None:
48
- """Recursively collect all $ref present in the schema."""
49
- if isinstance(schema, dict):
50
- reference = schema.get("$ref")
51
- if isinstance(reference, str):
52
- remaining.add(reference)
53
- for value in schema.values():
54
- _collect_all_references(value, remaining)
55
- elif isinstance(schema, list):
56
- for item in schema:
57
- _collect_all_references(item, remaining)
49
+ def _sanitize_combinators(schema: JsonSchemaObject) -> None:
50
+ """Sanitize anyOf/oneOf/allOf."""
51
+ for combinator_key in ("anyOf", "oneOf"):
52
+ variants = schema.get(combinator_key)
53
+ if not isinstance(variants, list):
54
+ continue
58
55
 
56
+ flattened = _flatten_combinator(variants, combinator_key)
59
57
 
60
- def _has_references_in_items(items: list[JsonSchema]) -> bool:
61
- return any("$ref" in item for item in items if isinstance(item, dict))
58
+ cleaned = [variant for variant in flattened if not _has_ref(variant)]
62
59
 
60
+ # Only update if we have non-$ref variants
61
+ if cleaned:
62
+ # At least one alternative remains, which narrows the constraints
63
+ schema[combinator_key] = cleaned
64
+ elif not flattened:
65
+ schema.pop(combinator_key, None)
63
66
 
64
- def _has_references(schema: JsonSchemaObject) -> bool:
65
- if "$ref" in schema:
67
+ all_of = schema.get("allOf")
68
+ if isinstance(all_of, list):
69
+ flattened = _flatten_combinator(all_of, "allOf")
70
+
71
+ cleaned = [variant for variant in flattened if not _is_empty(variant)]
72
+ if cleaned:
73
+ schema["allOf"] = cleaned
74
+ else:
75
+ schema.pop("allOf", None)
76
+
77
+
78
+ def _flatten_combinator(variants: list, key: str) -> list:
79
+ """Flatten nested same-type combinators."""
80
+ result = []
81
+ for variant in variants:
82
+ if isinstance(variant, dict) and key in variant and isinstance(variant[key], list):
83
+ result.extend(variant[key])
84
+ else:
85
+ result.append(variant)
86
+ return result
87
+
88
+
89
+ def _is_empty(schema: JsonSchema) -> bool:
90
+ """Check if schema accepts anything."""
91
+ if schema is True:
66
92
  return True
67
- items = schema.get("items")
68
- return (isinstance(items, dict) and "$ref" in items) or isinstance(items, list) and _has_references_in_items(items)
69
93
 
94
+ if not isinstance(schema, dict):
95
+ return False
70
96
 
71
- def _is_optional_schema(schema: JsonSchema) -> bool:
72
- # Whether this schema could be dropped from a list of schemas
73
- if isinstance(schema, bool):
97
+ if not schema:
74
98
  return True
75
- type_ = get_type(schema)
76
- if type_ == ["object"]:
77
- # Empty object is valid for this schema -> could be dropped
78
- return schema.get("required", []) == [] and schema.get("minProperties", 0) == 0
79
- # Has at least one keyword -> should not be removed
80
- return not any(k in ALL_KEYWORDS for k in schema)
81
-
82
-
83
- def _find_single_reference_combinators(schema: JsonSchemaObject) -> list[str]:
84
- # Schema example:
85
- # {
86
- # "type": "object",
87
- # "properties": {
88
- # "parent": {
89
- # "allOf": [{"$ref": "#/components/schemas/User"}]
90
- # }
91
- # }
92
- # }
93
- found = []
94
- for keyword in ("allOf", "oneOf", "anyOf"):
95
- combinator = schema.get(keyword)
96
- if combinator is not None:
97
- optionals = [subschema for subschema in combinator if not _is_optional_schema(subschema)]
98
- # NOTE: The first schema is not bool, hence it is safe to pass it to `_has_references`
99
- if len(optionals) == 1 and _has_references(optionals[0]):
100
- found.append(keyword)
101
- return found
99
+
100
+ # Only non-validating keywords
101
+ NON_VALIDATING = {
102
+ "$id",
103
+ "$schema",
104
+ "$defs",
105
+ "definitions",
106
+ "title",
107
+ "description",
108
+ "default",
109
+ "examples",
110
+ "example",
111
+ "$comment",
112
+ "deprecated",
113
+ "readOnly",
114
+ "writeOnly",
115
+ }
116
+
117
+ return all(key in NON_VALIDATING for key in schema.keys())
118
+
119
+
120
+ def _sanitize_properties(schema: JsonSchemaObject) -> None:
121
+ """Remove OPTIONAL property schemas if they have $ref."""
122
+ if "properties" not in schema:
123
+ return
124
+
125
+ properties = schema["properties"]
126
+ if not isinstance(properties, dict):
127
+ return
128
+
129
+ required = schema.get("required", [])
130
+
131
+ for name, subschema in list(properties.items()):
132
+ if not _has_ref(subschema):
133
+ continue
134
+
135
+ if name not in required:
136
+ del properties[name]
102
137
 
103
138
 
104
139
  def _sanitize_items(schema: JsonSchemaObject) -> None:
140
+ """Convert to empty array ONLY if minItems allows it."""
105
141
  items = schema["items"]
142
+
143
+ has_ref = False
144
+ if isinstance(items, dict):
145
+ has_ref = _has_ref(items)
146
+ elif isinstance(items, list):
147
+ has_ref = any(_has_ref(item) for item in items)
148
+
149
+ if not has_ref:
150
+ return
151
+
106
152
  min_items = schema.get("minItems", 0)
107
- if not min_items:
108
- if isinstance(items, dict) and ("$ref" in items or _find_single_reference_combinators(items)):
109
- _convert_to_empty_array(schema)
110
- if isinstance(items, list) and _has_references_in_items(items):
111
- _convert_to_empty_array(schema)
153
+
154
+ if min_items == 0:
155
+ _convert_to_empty_array(schema)
156
+
157
+
158
+ def _sanitize_prefix_items(schema: JsonSchemaObject) -> None:
159
+ """Same logic as items."""
160
+ prefix_items = schema["prefixItems"]
161
+
162
+ if not isinstance(prefix_items, list):
163
+ return
164
+
165
+ if not any(_has_ref(item) for item in prefix_items):
166
+ return
167
+
168
+ min_items = schema.get("minItems", 0)
169
+
170
+ if min_items == 0:
171
+ _convert_to_empty_array(schema)
112
172
 
113
173
 
114
174
  def _convert_to_empty_array(schema: JsonSchemaObject) -> None:
115
- del schema["items"]
175
+ schema.pop("items", None)
176
+ schema.pop("prefixItems", None)
116
177
  schema["maxItems"] = 0
178
+ schema["minItems"] = 0
117
179
 
118
180
 
119
181
  def _sanitize_additional_properties(schema: JsonSchemaObject) -> None:
120
- additional_properties = schema["additionalProperties"]
121
- if isinstance(additional_properties, dict) and "$ref" in additional_properties:
182
+ additional = schema["additionalProperties"]
183
+ if _has_ref(additional):
122
184
  schema["additionalProperties"] = False
185
+
186
+
187
+ def _sanitize_additional_items(schema: JsonSchemaObject) -> None:
188
+ additional = schema["additionalItems"]
189
+ if _has_ref(additional):
190
+ schema["additionalItems"] = False
191
+
192
+
193
+ def _has_ref(schema: Any) -> bool:
194
+ """Check if schema contains $ref at any level."""
195
+ if not isinstance(schema, dict):
196
+ return False
197
+
198
+ if "$ref" in schema:
199
+ return True
200
+ for value in schema.values():
201
+ if isinstance(value, dict):
202
+ if _has_ref(value):
203
+ return True
204
+ elif isinstance(value, list):
205
+ for item in value:
206
+ if isinstance(item, dict) and _has_ref(item):
207
+ return True
208
+
209
+ return False
210
+
211
+
212
+ def _collect_all_references(schema: JsonSchema | list[JsonSchema], remaining: set[str]) -> None:
213
+ """Collect all remaining $ref."""
214
+ if isinstance(schema, dict):
215
+ ref = schema.get("$ref")
216
+ if isinstance(ref, str):
217
+ remaining.add(ref)
218
+ for value in schema.values():
219
+ _collect_all_references(value, remaining)
220
+ elif isinstance(schema, list):
221
+ for item in schema:
222
+ _collect_all_references(item, remaining)
@@ -109,15 +109,19 @@ def extract_top_level(
109
109
  assert isinstance(operation.schema, BaseOpenAPISchema)
110
110
 
111
111
  responses = list(operation.responses.iter_examples())
112
- seen_references: set[str] = set()
113
112
  for parameter in operation.iter_parameters():
114
113
  if "schema" in parameter.definition:
115
114
  schema = parameter.definition["schema"]
116
115
  resolver = RefResolver.from_schema(schema)
117
- seen_references.clear()
116
+ reference_path: tuple[str, ...] = ()
118
117
  definitions = [
119
118
  parameter.definition,
120
- *_expand_subschemas(schema=schema, resolver=resolver, seen_references=seen_references),
119
+ *[
120
+ expanded_schema
121
+ for expanded_schema, _ in _expand_subschemas(
122
+ schema=schema, resolver=resolver, reference_path=reference_path
123
+ )
124
+ ],
121
125
  ]
122
126
  else:
123
127
  definitions = [parameter.definition]
@@ -138,10 +142,15 @@ def extract_top_level(
138
142
  if "schema" in parameter.definition:
139
143
  schema = parameter.definition["schema"]
140
144
  resolver = RefResolver.from_schema(schema)
141
- seen_references.clear()
142
- for expanded in _expand_subschemas(schema=schema, resolver=resolver, seen_references=seen_references):
143
- if isinstance(expanded, dict) and parameter.adapter.examples_container_keyword in expanded:
144
- for value in expanded[parameter.adapter.examples_container_keyword]:
145
+ reference_path = ()
146
+ for expanded_schema, _ in _expand_subschemas(
147
+ schema=schema, resolver=resolver, reference_path=reference_path
148
+ ):
149
+ if (
150
+ isinstance(expanded_schema, dict)
151
+ and parameter.adapter.examples_container_keyword in expanded_schema
152
+ ):
153
+ for value in expanded_schema[parameter.adapter.examples_container_keyword]:
145
154
  yield ParameterExample(
146
155
  container=parameter.location.container_name, name=parameter.name, value=value
147
156
  )
@@ -152,10 +161,15 @@ def extract_top_level(
152
161
  if "schema" in body.definition:
153
162
  schema = body.definition["schema"]
154
163
  resolver = RefResolver.from_schema(schema)
155
- seen_references.clear()
164
+ reference_path = ()
156
165
  definitions = [
157
166
  body.definition,
158
- *_expand_subschemas(schema=schema, resolver=resolver, seen_references=seen_references),
167
+ *[
168
+ expanded_schema
169
+ for expanded_schema, _ in _expand_subschemas(
170
+ schema=schema, resolver=resolver, reference_path=reference_path
171
+ )
172
+ ],
159
173
  ]
160
174
  else:
161
175
  definitions = [body.definition]
@@ -172,58 +186,76 @@ def extract_top_level(
172
186
  if "schema" in body.definition:
173
187
  schema = body.definition["schema"]
174
188
  resolver = RefResolver.from_schema(schema)
175
- seen_references.clear()
176
- for expanded in _expand_subschemas(schema=schema, resolver=resolver, seen_references=seen_references):
177
- if isinstance(expanded, dict) and body.adapter.examples_container_keyword in expanded:
178
- for value in expanded[body.adapter.examples_container_keyword]:
189
+ reference_path = ()
190
+ for expanded_schema, _ in _expand_subschemas(
191
+ schema=schema, resolver=resolver, reference_path=reference_path
192
+ ):
193
+ if isinstance(expanded_schema, dict) and body.adapter.examples_container_keyword in expanded_schema:
194
+ for value in expanded_schema[body.adapter.examples_container_keyword]:
179
195
  yield BodyExample(value=value, media_type=body.media_type)
180
196
 
181
197
 
182
198
  @overload
183
199
  def _resolve_bundled(
184
- schema: dict[str, Any], resolver: RefResolver, seen_references: set[str]
185
- ) -> dict[str, Any]: ... # pragma: no cover
200
+ schema: dict[str, Any], resolver: RefResolver, reference_path: tuple[str, ...]
201
+ ) -> tuple[dict[str, Any], tuple[str, ...]]: ...
186
202
 
187
203
 
188
204
  @overload
189
- def _resolve_bundled(schema: bool, resolver: RefResolver, seen_references: set[str]) -> bool: ... # pragma: no cover
205
+ def _resolve_bundled(
206
+ schema: bool, resolver: RefResolver, reference_path: tuple[str, ...]
207
+ ) -> tuple[bool, tuple[str, ...]]: ...
190
208
 
191
209
 
192
210
  def _resolve_bundled(
193
- schema: dict[str, Any] | bool, resolver: RefResolver, seen_references: set[str]
194
- ) -> dict[str, Any] | bool:
211
+ schema: dict[str, Any] | bool, resolver: RefResolver, reference_path: tuple[str, ...]
212
+ ) -> tuple[dict[str, Any] | bool, tuple[str, ...]]:
213
+ """Resolve $ref if present."""
195
214
  if isinstance(schema, dict):
196
215
  reference = schema.get("$ref")
197
216
  if isinstance(reference, str):
198
- if reference in seen_references:
217
+ # Check if this reference is already in the current path
218
+ if reference in reference_path:
199
219
  # Try to remove recursive references to avoid infinite recursion
200
220
  remaining_references = references.sanitize(schema)
201
221
  if reference in remaining_references:
202
222
  raise InfiniteRecursiveReference(reference)
203
- seen_references.add(reference)
223
+
224
+ new_path = reference_path + (reference,)
225
+
204
226
  try:
205
- _, schema = resolver.resolve(schema["$ref"])
227
+ _, resolved_schema = resolver.resolve(reference)
206
228
  except RefResolutionError as exc:
207
229
  raise UnresolvableReference(reference) from exc
208
- return schema
230
+
231
+ return resolved_schema, new_path
232
+
233
+ return schema, reference_path
209
234
 
210
235
 
211
236
  def _expand_subschemas(
212
- *, schema: dict[str, Any] | bool, resolver: RefResolver, seen_references: set[str]
213
- ) -> Generator[dict[str, Any] | bool, None, None]:
214
- schema = _resolve_bundled(schema, resolver, seen_references)
215
- yield schema
237
+ *, schema: dict[str, Any] | bool, resolver: RefResolver, reference_path: tuple[str, ...]
238
+ ) -> Generator[tuple[dict[str, Any] | bool, tuple[str, ...]], None, None]:
239
+ """Expand schema and all its subschemas."""
240
+ schema, current_path = _resolve_bundled(schema, resolver, reference_path)
241
+ yield (schema, current_path)
242
+
216
243
  if isinstance(schema, dict):
244
+ # For anyOf/oneOf, yield each alternative with the same path
217
245
  for key in ("anyOf", "oneOf"):
218
246
  if key in schema:
219
247
  for subschema in schema[key]:
220
- yield subschema
248
+ # Each alternative starts with the current path
249
+ yield (subschema, current_path)
250
+
251
+ # For allOf, merge all alternatives
221
252
  if "allOf" in schema:
222
253
  subschema = deepclone(schema["allOf"][0])
223
- subschema = _resolve_bundled(subschema, resolver, seen_references)
254
+ subschema, _ = _resolve_bundled(subschema, resolver, current_path)
255
+
224
256
  for sub in schema["allOf"][1:]:
225
257
  if isinstance(sub, dict):
226
- sub = _resolve_bundled(sub, resolver, seen_references)
258
+ sub, _ = _resolve_bundled(sub, resolver, current_path)
227
259
  for key, value in sub.items():
228
260
  if key == "properties":
229
261
  subschema.setdefault("properties", {}).update(value)
@@ -235,7 +267,8 @@ def _expand_subschemas(
235
267
  subschema.setdefault("examples", []).append(value)
236
268
  else:
237
269
  subschema[key] = value
238
- yield subschema
270
+
271
+ yield (subschema, current_path)
239
272
 
240
273
 
241
274
  def extract_inner_examples(examples: dict[str, Any] | list, schema: BaseOpenAPISchema) -> Generator[Any, None, None]:
@@ -269,13 +302,12 @@ def extract_from_schemas(
269
302
  operation: APIOperation[OpenApiParameter, OpenApiResponses, OpenApiSecurityParameters],
270
303
  ) -> Generator[Example, None, None]:
271
304
  """Extract examples from parameters' schema definitions."""
272
- seen_references: set[str] = set()
273
305
  for parameter in operation.iter_parameters():
274
306
  schema = parameter.optimized_schema
275
307
  if isinstance(schema, bool):
276
308
  continue
277
309
  resolver = RefResolver.from_schema(schema)
278
- seen_references.clear()
310
+ reference_path: tuple[str, ...] = ()
279
311
  bundle_storage = schema.get(BUNDLE_STORAGE_KEY)
280
312
  for value in extract_from_schema(
281
313
  operation=operation,
@@ -283,7 +315,7 @@ def extract_from_schemas(
283
315
  example_keyword=parameter.adapter.example_keyword,
284
316
  examples_container_keyword=parameter.adapter.examples_container_keyword,
285
317
  resolver=resolver,
286
- seen_references=seen_references,
318
+ reference_path=reference_path,
287
319
  bundle_storage=bundle_storage,
288
320
  ):
289
321
  yield ParameterExample(container=parameter.location.container_name, name=parameter.name, value=value)
@@ -295,14 +327,14 @@ def extract_from_schemas(
295
327
  resolver = RefResolver.from_schema(schema)
296
328
  bundle_storage = schema.get(BUNDLE_STORAGE_KEY)
297
329
  for example_keyword, examples_container_keyword in (("example", "examples"), ("x-example", "x-examples")):
298
- seen_references.clear()
330
+ reference_path = ()
299
331
  for value in extract_from_schema(
300
332
  operation=operation,
301
333
  schema=schema,
302
334
  example_keyword=example_keyword,
303
335
  examples_container_keyword=examples_container_keyword,
304
336
  resolver=resolver,
305
- seen_references=seen_references,
337
+ reference_path=reference_path,
306
338
  bundle_storage=bundle_storage,
307
339
  ):
308
340
  yield BodyExample(value=value, media_type=body.media_type)
@@ -315,49 +347,57 @@ def extract_from_schema(
315
347
  example_keyword: str,
316
348
  examples_container_keyword: str,
317
349
  resolver: RefResolver,
318
- seen_references: set[str],
350
+ reference_path: tuple[str, ...],
319
351
  bundle_storage: dict[str, Any] | None,
320
352
  ) -> Generator[Any, None, None]:
321
353
  """Extract all examples from a single schema definition."""
322
354
  # This implementation supports only `properties` and `items`
323
- schema = _resolve_bundled(schema, resolver, seen_references)
355
+ schema, current_path = _resolve_bundled(schema, resolver, reference_path)
356
+
324
357
  if "properties" in schema:
325
358
  variants = {}
326
359
  required = schema.get("required", [])
327
360
  to_generate: dict[str, Any] = {}
361
+
328
362
  for name, subschema in schema["properties"].items():
329
363
  values = []
330
- for subsubschema in _expand_subschemas(
331
- schema=subschema, resolver=resolver, seen_references=seen_references
364
+ for expanded_schema, expanded_path in _expand_subschemas(
365
+ schema=subschema, resolver=resolver, reference_path=current_path
332
366
  ):
333
- if isinstance(subsubschema, bool):
334
- to_generate[name] = subsubschema
367
+ if isinstance(expanded_schema, bool):
368
+ to_generate[name] = expanded_schema
335
369
  continue
336
- if example_keyword in subsubschema:
337
- values.append(subsubschema[example_keyword])
338
- if examples_container_keyword in subsubschema and isinstance(
339
- subsubschema[examples_container_keyword], list
370
+
371
+ if example_keyword in expanded_schema:
372
+ values.append(expanded_schema[example_keyword])
373
+
374
+ if examples_container_keyword in expanded_schema and isinstance(
375
+ expanded_schema[examples_container_keyword], list
340
376
  ):
341
377
  # These are JSON Schema examples, which is an array of values
342
- values.extend(subsubschema[examples_container_keyword])
378
+ values.extend(expanded_schema[examples_container_keyword])
379
+
343
380
  # Check nested examples as well
344
381
  values.extend(
345
382
  extract_from_schema(
346
383
  operation=operation,
347
- schema=subsubschema,
384
+ schema=expanded_schema,
348
385
  example_keyword=example_keyword,
349
386
  examples_container_keyword=examples_container_keyword,
350
387
  resolver=resolver,
351
- seen_references=seen_references,
388
+ reference_path=expanded_path,
352
389
  bundle_storage=bundle_storage,
353
390
  )
354
391
  )
392
+
355
393
  if not values:
356
394
  if name in required:
357
395
  # Defer generation to only generate these variants if at least one property has examples
358
- to_generate[name] = subsubschema
396
+ to_generate[name] = expanded_schema
359
397
  continue
398
+
360
399
  variants[name] = values
400
+
361
401
  if variants:
362
402
  config = operation.schema.config.generation_for(operation=operation, phase="examples")
363
403
  for name, subschema in to_generate.items():
@@ -369,6 +409,7 @@ def extract_from_schema(
369
409
  subschema[BUNDLE_STORAGE_KEY] = bundle_storage
370
410
  generated = _generate_single_example(subschema, config)
371
411
  variants[name] = [generated]
412
+
372
413
  # Calculate the maximum number of examples any property has
373
414
  total_combos = max(len(examples) for examples in variants.values())
374
415
  # Evenly distribute examples by cycling through them
@@ -377,6 +418,7 @@ def extract_from_schema(
377
418
  name: next(islice(cycle(property_variants), idx, None))
378
419
  for name, property_variants in variants.items()
379
420
  }
421
+
380
422
  elif "items" in schema and isinstance(schema["items"], dict):
381
423
  # Each inner value should be wrapped in an array
382
424
  for value in extract_from_schema(
@@ -385,7 +427,7 @@ def extract_from_schema(
385
427
  example_keyword=example_keyword,
386
428
  examples_container_keyword=examples_container_keyword,
387
429
  resolver=resolver,
388
- seen_references=seen_references,
430
+ reference_path=current_path,
389
431
  bundle_storage=bundle_storage,
390
432
  ):
391
433
  yield [value]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: schemathesis
3
- Version: 4.2.1
3
+ Version: 4.2.2
4
4
  Summary: Property-based testing framework for Open API and GraphQL based apps
5
5
  Project-URL: Documentation, https://schemathesis.readthedocs.io/en/stable/
6
6
  Project-URL: Changelog, https://github.com/schemathesis/schemathesis/blob/master/CHANGELOG.md
@@ -69,9 +69,9 @@ schemathesis/core/transport.py,sha256=LQcamAkFqJ0HuXQzepevAq2MCJW-uq5Nm-HE9yc7HM
69
69
  schemathesis/core/validation.py,sha256=b0USkKzkWvdz3jOW1JXYc_TfYshfKZeP7xAUnMqcNoc,2303
70
70
  schemathesis/core/version.py,sha256=dOBUWrY3-uA2NQXJp9z7EtZgkR6jYeLg8sMhQCL1mcI,205
71
71
  schemathesis/core/jsonschema/__init__.py,sha256=gBZGsXIpK2EFfcp8x0b69dqzWAm2OeZHepKImkkLvoE,320
72
- schemathesis/core/jsonschema/bundler.py,sha256=ECNAHrXl5nh52crm5Qu1QvRWVV2Vv9gU8H722oKA7k0,7711
72
+ schemathesis/core/jsonschema/bundler.py,sha256=IWZqKb0PqYSKtFW5ncItqHg5o1uVM0yS7vgbBg6pD0c,7724
73
73
  schemathesis/core/jsonschema/keywords.py,sha256=pjseXTfH9OItNs_Qq6ubkhNWQOrxTnwHmrP_jxrHeJU,631
74
- schemathesis/core/jsonschema/references.py,sha256=uB7-DGYhLFqbIAvuO-IDc9xSatoGw54FOYn4xE4qE6A,4667
74
+ schemathesis/core/jsonschema/references.py,sha256=c2Q4IKWUbwENNtkbFaqf8r3LLZu6GFE5YLnYQlg5tPg,6069
75
75
  schemathesis/core/jsonschema/types.py,sha256=C7f9g8yKFuoxC5_0YNIh8QAyGU0-tj8pzTMfMDjjjVM,1248
76
76
  schemathesis/core/output/__init__.py,sha256=SiHqONFskXl73AtP5dV29L14nZoKo7B-IeG52KZB32M,1446
77
77
  schemathesis/core/output/sanitization.py,sha256=Ev3tae8dVwsYd7yVb2_1VBFYs92WFsQ4Eu1fGaymItE,2013
@@ -133,7 +133,7 @@ schemathesis/specs/openapi/_hypothesis.py,sha256=g5476s_ArzheWKHHlOfKwx46tqoiehP
133
133
  schemathesis/specs/openapi/checks.py,sha256=YYV6j6idyw2ubY4sLp-avs2OVEkAWeIihjT0xiV1RRA,30669
134
134
  schemathesis/specs/openapi/converter.py,sha256=4a6-8STT5snF7B-t6IsOIGdK5rV16oNqsdvWL7VFf2M,6472
135
135
  schemathesis/specs/openapi/definitions.py,sha256=8htclglV3fW6JPBqs59lgM4LnA25Mm9IptXBPb_qUT0,93949
136
- schemathesis/specs/openapi/examples.py,sha256=UquzOwy5QhmpHjFXv_QnZGpWZTU-N4CkQ1PQOxACIb8,22981
136
+ schemathesis/specs/openapi/examples.py,sha256=xddFhKUQFmh5wdHrSWPBAcaby6C3Jtvuo_hsVbRGN1w,24006
137
137
  schemathesis/specs/openapi/formats.py,sha256=4tYRdckauHxkJCmOhmdwDq_eOpHPaKloi89lzMPbPzw,3975
138
138
  schemathesis/specs/openapi/media_types.py,sha256=F5M6TKl0s6Z5X8mZpPsWDEdPBvxclKRcUOc41eEwKbo,2472
139
139
  schemathesis/specs/openapi/patterns.py,sha256=GqPZEXMRdWENQxanWjBOalIZ2MQUjuxk21kmdiI703E,18027
@@ -172,8 +172,8 @@ schemathesis/transport/prepare.py,sha256=erYXRaxpQokIDzaIuvt_csHcw72iHfCyNq8VNEz
172
172
  schemathesis/transport/requests.py,sha256=wriRI9fprTplE_qEZLEz1TerX6GwkE3pwr6ZnU2o6vQ,10648
173
173
  schemathesis/transport/serialization.py,sha256=GwO6OAVTmL1JyKw7HiZ256tjV4CbrRbhQN0ep1uaZwI,11157
174
174
  schemathesis/transport/wsgi.py,sha256=kQtasFre6pjdJWRKwLA_Qb-RyQHCFNpaey9ubzlFWKI,5907
175
- schemathesis-4.2.1.dist-info/METADATA,sha256=66gKdf-zvd-gHePTQubZs_QnlCvuuC3fELYv4hHK8NY,8540
176
- schemathesis-4.2.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
177
- schemathesis-4.2.1.dist-info/entry_points.txt,sha256=hiK3un-xfgPdwj9uj16YVDtTNpO128bmk0U82SMv8ZQ,152
178
- schemathesis-4.2.1.dist-info/licenses/LICENSE,sha256=2Ve4J8v5jMQAWrT7r1nf3bI8Vflk3rZVQefiF2zpxwg,1121
179
- schemathesis-4.2.1.dist-info/RECORD,,
175
+ schemathesis-4.2.2.dist-info/METADATA,sha256=x3D71YX9YO81-h_n1Emw98R4jxq6NoL33XbYe3ntWNc,8540
176
+ schemathesis-4.2.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
177
+ schemathesis-4.2.2.dist-info/entry_points.txt,sha256=hiK3un-xfgPdwj9uj16YVDtTNpO128bmk0U82SMv8ZQ,152
178
+ schemathesis-4.2.2.dist-info/licenses/LICENSE,sha256=2Ve4J8v5jMQAWrT7r1nf3bI8Vflk3rZVQefiF2zpxwg,1121
179
+ schemathesis-4.2.2.dist-info/RECORD,,