schemathesis 4.2.0__py3-none-any.whl → 4.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -119,102 +119,84 @@ def get_command_representation() -> str:
119
119
 
120
120
 
121
121
  def vcr_writer(output: TextOutput, config: ProjectConfig, queue: Queue) -> None:
122
- """Write YAML to a file in an incremental manner.
123
-
124
- This implementation doesn't use `pyyaml` package and composes YAML manually as string due to the following reasons:
125
- - It is much faster. The string-based approach gives only ~2.5% time overhead when `yaml.CDumper` has ~11.2%;
126
- - Implementation complexity. We have a quite simple format where almost all values are strings, and it is much
127
- simpler to implement it with string composition rather than with adjusting `yaml.Serializer` to emit explicit
128
- types. Another point is that with `pyyaml` we need to emit events and handle some low-level details like
129
- providing tags, anchors to have incremental writing, with primitive types it is much simpler.
130
- """
122
+ """Write YAML to a file in an incremental manner."""
131
123
  current_id = 1
132
124
 
133
- def format_header_values(values: list[str]) -> str:
134
- return "\n".join(f" - {json.dumps(v)}" for v in values)
125
+ def write_header_values(stream: IO, values: list[str]) -> None:
126
+ for v in values:
127
+ stream.write(f" - {json.dumps(v)}\n")
135
128
 
136
129
  if config.output.sanitization.enabled:
137
-
138
- def format_headers(headers: dict[str, list[str]]) -> str:
139
- headers = deepclone(headers)
140
- sanitize_value(headers, config=config.output.sanitization)
141
- return "\n".join(f' "{name}":\n{format_header_values(values)}' for name, values in headers.items())
142
-
130
+ sanitization_keys = config.output.sanitization.keys_to_sanitize
131
+ sensitive_markers = config.output.sanitization.sensitive_markers
132
+ replacement = config.output.sanitization.replacement
133
+
134
+ def write_headers(stream: IO, headers: dict[str, list[str]]) -> None:
135
+ for name, values in headers.items():
136
+ lower_name = name.lower()
137
+ stream.write(f' "{name}":\n')
138
+
139
+ # Sanitize inline if needed
140
+ if lower_name in sanitization_keys or any(marker in lower_name for marker in sensitive_markers):
141
+ stream.write(f" - {json.dumps(replacement)}\n")
142
+ else:
143
+ write_header_values(stream, values)
143
144
  else:
144
145
 
145
- def format_headers(headers: dict[str, list[str]]) -> str:
146
- return "\n".join(f' "{name}":\n{format_header_values(values)}' for name, values in headers.items())
147
-
148
- def format_check_message(message: str | None) -> str:
149
- return "~" if message is None else f"{message!r}"
146
+ def write_headers(stream: IO, headers: dict[str, list[str]]) -> None:
147
+ for name, values in headers.items():
148
+ stream.write(f' "{name}":\n')
149
+ write_header_values(stream, values)
150
150
 
151
- def format_checks(checks: list[CheckNode]) -> str:
151
+ def write_checks(stream: IO, checks: list[CheckNode]) -> None:
152
152
  if not checks:
153
- return "\n checks: []"
154
- items = "\n".join(
155
- f" - name: '{check.name}'\n status: '{check.status.name.upper()}'\n message: {format_check_message(check.failure_info.failure.title if check.failure_info else None)}"
156
- for check in checks
157
- )
158
- return f"""
159
- checks:
160
- {items}"""
153
+ stream.write("\n checks: []")
154
+ return
155
+
156
+ stream.write("\n checks:\n")
157
+ for check in checks:
158
+ message = check.failure_info.failure.title if check.failure_info else None
159
+ message_str = "~" if message is None else repr(message)
160
+ stream.write(
161
+ f" - name: '{check.name}'\n"
162
+ f" status: '{check.status.name.upper()}'\n"
163
+ f" message: {message_str}\n"
164
+ )
161
165
 
162
166
  if config.reports.preserve_bytes:
163
167
 
164
- def format_request_body(output: IO, request: Request) -> None:
168
+ def write_request_body(stream: IO, request: Request) -> None:
165
169
  if request.encoded_body is not None:
166
- output.write(
167
- f"""
168
- body:
169
- encoding: 'utf-8'
170
- base64_string: '{request.encoded_body}'"""
171
- )
170
+ stream.write(f"\n body:\n encoding: 'utf-8'\n base64_string: '{request.encoded_body}'")
172
171
 
173
- def format_response_body(output: IO, response: Response) -> None:
172
+ def write_response_body(stream: IO, response: Response) -> None:
174
173
  if response.encoded_body is not None:
175
- output.write(
176
- f""" body:
177
- encoding: '{response.encoding}'
178
- base64_string: '{response.encoded_body}'"""
174
+ stream.write(
175
+ f" body:\n encoding: '{response.encoding}'\n base64_string: '{response.encoded_body}'"
179
176
  )
180
-
181
177
  else:
182
178
 
183
- def format_request_body(output: IO, request: Request) -> None:
179
+ def write_request_body(stream: IO, request: Request) -> None:
184
180
  if request.body is not None:
185
181
  string = request.body.decode("utf8", "replace")
186
- output.write(
187
- """
188
- body:
189
- encoding: 'utf-8'
190
- string: """
191
- )
192
- write_double_quoted(output, string)
182
+ stream.write("\n body:\n encoding: 'utf-8'\n string: ")
183
+ write_double_quoted(stream, string)
193
184
 
194
- def format_response_body(output: IO, response: Response) -> None:
185
+ def write_response_body(stream: IO, response: Response) -> None:
195
186
  if response.content is not None:
196
187
  encoding = response.encoding or "utf8"
197
188
  string = response.content.decode(encoding, "replace")
198
- output.write(
199
- f""" body:
200
- encoding: '{encoding}'
201
- string: """
202
- )
203
- write_double_quoted(output, string)
189
+ stream.write(f" body:\n encoding: '{encoding}'\n string: ")
190
+ write_double_quoted(stream, string)
204
191
 
205
192
  with open_text_output(output) as stream:
206
193
  while True:
207
194
  item = queue.get()
208
- if isinstance(item, Initialize):
209
- stream.write(
210
- f"""command: '{get_command_representation()}'
211
- recorded_with: 'Schemathesis {SCHEMATHESIS_VERSION}'
212
- seed: {item.seed}
213
- http_interactions:"""
214
- )
215
- elif isinstance(item, Process):
195
+ if isinstance(item, Process):
216
196
  for case_id, interaction in item.recorder.interactions.items():
217
197
  case = item.recorder.cases[case_id]
198
+
199
+ # Determine status and checks
218
200
  if interaction.response is not None:
219
201
  if case_id in item.recorder.checks:
220
202
  checks = item.recorder.checks[case_id]
@@ -224,101 +206,93 @@ http_interactions:"""
224
206
  status = check.status
225
207
  break
226
208
  else:
227
- # NOTE: Checks recording could be skipped if Schemathesis start skipping just
228
- # discovered failures in order to get past them and potentially discover more failures
229
209
  checks = []
230
210
  status = Status.SKIP
231
211
  else:
232
212
  checks = []
233
213
  status = Status.ERROR
234
- # Body payloads are handled via separate `stream.write` calls to avoid some allocations
235
- stream.write(
236
- f"""\n- id: '{case_id}'
237
- status: '{status.name}'"""
238
- )
214
+
215
+ # Write interaction header
216
+ stream.write(f"\n- id: '{case_id}'\n status: '{status.name}'")
217
+
218
+ # Write metadata if present
239
219
  meta = case.value.meta
240
220
  if meta is not None:
241
- # Start metadata block
242
- stream.write(f"""
243
- generation:
244
- time: {meta.generation.time}
245
- mode: {meta.generation.mode.value}
246
- components:""")
247
-
248
- # Write components
221
+ stream.write(
222
+ f"\n generation:\n"
223
+ f" time: {meta.generation.time}\n"
224
+ f" mode: {meta.generation.mode.value}\n"
225
+ f" components:"
226
+ )
227
+
249
228
  for kind, info in meta.components.items():
250
- stream.write(f"""
251
- {kind.value}:
252
- mode: '{info.mode.value}'""")
253
- # Write phase info
254
- stream.write("\n phase:")
255
- stream.write(f"\n name: '{meta.phase.name.value}'")
256
- stream.write("\n data: ")
257
-
258
- # Write phase-specific data
229
+ stream.write(f"\n {kind.value}:\n mode: '{info.mode.value}'")
230
+
231
+ stream.write(f"\n phase:\n name: '{meta.phase.name.value}'\n data: ")
232
+
259
233
  if isinstance(meta.phase.data, CoveragePhaseData):
260
- stream.write("""
261
- description: """)
234
+ stream.write("\n description: ")
262
235
  write_double_quoted(stream, meta.phase.data.description)
263
- stream.write("""
264
- location: """)
236
+ stream.write("\n location: ")
265
237
  write_double_quoted(stream, meta.phase.data.location)
266
- stream.write("""
267
- parameter: """)
238
+ stream.write("\n parameter: ")
268
239
  if meta.phase.data.parameter is not None:
269
240
  write_double_quoted(stream, meta.phase.data.parameter)
270
241
  else:
271
242
  stream.write("null")
272
- stream.write("""
273
- parameter_location: """)
243
+ stream.write("\n parameter_location: ")
274
244
  if meta.phase.data.parameter_location is not None:
275
245
  write_double_quoted(stream, meta.phase.data.parameter_location)
276
246
  else:
277
247
  stream.write("null")
278
248
  else:
279
- # Empty objects for these phases
280
249
  stream.write("{}")
281
250
  else:
282
- stream.write("null")
251
+ stream.write("\n metadata: null")
283
252
 
253
+ # Sanitize URL if needed
284
254
  if config.output.sanitization.enabled:
285
255
  uri = sanitize_url(interaction.request.uri, config=config.output.sanitization)
286
256
  else:
287
257
  uri = interaction.request.uri
258
+
288
259
  recorded_at = datetime.datetime.fromtimestamp(
289
260
  interaction.timestamp, datetime.timezone.utc
290
261
  ).isoformat()
262
+
263
+ stream.write(f"\n recorded_at: '{recorded_at}'")
264
+ write_checks(stream, checks)
291
265
  stream.write(
292
- f"""
293
- recorded_at: '{recorded_at}'{format_checks(checks)}
294
- request:
295
- uri: '{uri}'
296
- method: '{interaction.request.method}'
297
- headers:
298
- {format_headers(interaction.request.headers)}"""
266
+ f"\n request:\n uri: '{uri}'\n method: '{interaction.request.method}'\n headers:\n"
299
267
  )
300
- format_request_body(stream, interaction.request)
268
+ write_headers(stream, interaction.request.headers)
269
+ write_request_body(stream, interaction.request)
270
+
271
+ # Write response
301
272
  if interaction.response is not None:
302
273
  stream.write(
303
- f"""
304
- response:
305
- status:
306
- code: '{interaction.response.status_code}'
307
- message: {json.dumps(interaction.response.message)}
308
- elapsed: '{interaction.response.elapsed}'
309
- headers:
310
- {format_headers(interaction.response.headers)}
311
- """
312
- )
313
- format_response_body(stream, interaction.response)
314
- stream.write(
315
- f"""
316
- http_version: '{interaction.response.http_version}'"""
274
+ f"\n response:\n"
275
+ f" status:\n"
276
+ f" code: '{interaction.response.status_code}'\n"
277
+ f" message: {json.dumps(interaction.response.message)}\n"
278
+ f" elapsed: '{interaction.response.elapsed}'\n"
279
+ f" headers:\n"
317
280
  )
281
+ write_headers(stream, interaction.response.headers)
282
+ stream.write("\n")
283
+ write_response_body(stream, interaction.response)
284
+ stream.write(f"\n http_version: '{interaction.response.http_version}'")
318
285
  else:
319
- stream.write("""
320
- response: null""")
286
+ stream.write("\n response: null")
287
+
321
288
  current_id += 1
289
+ elif isinstance(item, Initialize):
290
+ stream.write(
291
+ f"command: '{get_command_representation()}'\n"
292
+ f"recorded_with: 'Schemathesis {SCHEMATHESIS_VERSION}'\n"
293
+ f"seed: {item.seed}\n"
294
+ f"http_interactions:"
295
+ )
322
296
  else:
323
297
  break
324
298
 
@@ -17,7 +17,7 @@ def get_type(schema: JsonSchema, *, _check_type: bool = False) -> list[str]:
17
17
  return [ty]
18
18
  if ty is ANY_TYPE:
19
19
  return list(ty)
20
- return list(ty)
20
+ return [t for t in ALL_TYPES if t in ty]
21
21
 
22
22
 
23
23
  def _get_type(schema: JsonSchema) -> list[str]:
@@ -1,4 +1,8 @@
1
- from typing import Any
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ from functools import lru_cache
5
+ from typing import Any, Literal
2
6
 
3
7
 
4
8
  def setup() -> None:
@@ -9,6 +13,8 @@ def setup() -> None:
9
13
  from hypothesis.strategies._internal import collections, core
10
14
  from hypothesis.vendor import pretty
11
15
  from hypothesis_jsonschema import _canonicalise, _from_schema, _resolve
16
+ from hypothesis_jsonschema._canonicalise import SCHEMA_KEYS, SCHEMA_OBJECT_KEYS, merged
17
+ from hypothesis_jsonschema._resolve import LocalResolver
12
18
 
13
19
  from schemathesis.core import INTERNAL_BUFFER_SIZE
14
20
  from schemathesis.core.jsonschema.types import _get_type
@@ -35,11 +41,79 @@ def setup() -> None:
35
41
  # depending on the schema size (~300 seconds -> 4.5 seconds in one of the benchmarks)
36
42
  return None
37
43
 
44
+ class CacheableSchema:
45
+ """Cache schema by its JSON representation.
46
+
47
+ Canonicalisation is not required as schemas with the same JSON representation
48
+ will have the same validator.
49
+ """
50
+
51
+ __slots__ = ("schema", "encoded")
52
+
53
+ def __init__(self, schema: dict[str, Any]) -> None:
54
+ self.schema = schema
55
+ self.encoded = hash(json.dumps(schema, sort_keys=True))
56
+
57
+ def __eq__(self, other: "CacheableSchema") -> bool: # type: ignore
58
+ return self.encoded == other.encoded
59
+
60
+ def __hash__(self) -> int:
61
+ return self.encoded
62
+
63
+ SCHEMA_KEYS = frozenset(SCHEMA_KEYS)
64
+ SCHEMA_OBJECT_KEYS = frozenset(SCHEMA_OBJECT_KEYS)
65
+
66
+ @lru_cache()
67
+ def get_resolver(cache_key: CacheableSchema) -> LocalResolver:
68
+ """LRU resolver cache."""
69
+ return LocalResolver.from_schema(cache_key.schema)
70
+
71
+ def resolve_all_refs(
72
+ schema: Literal[True, False] | dict[str, Any],
73
+ *,
74
+ resolver: LocalResolver | None = None,
75
+ ) -> dict[str, Any]:
76
+ if schema is True:
77
+ return {}
78
+ if schema is False:
79
+ return {"not": {}}
80
+ if not schema:
81
+ return schema
82
+ if resolver is None:
83
+ resolver = get_resolver(CacheableSchema(schema))
84
+
85
+ _resolve_all_refs = resolve_all_refs
86
+
87
+ if "$ref" in schema:
88
+ s = dict(schema)
89
+ ref = s.pop("$ref")
90
+ url, resolved = resolver.resolve(ref)
91
+ resolver.push_scope(url)
92
+ try:
93
+ return merged([s, _resolve_all_refs(deepclone(resolved), resolver=resolver)]) # type: ignore
94
+ finally:
95
+ resolver.pop_scope()
96
+
97
+ for key, value in schema.items():
98
+ if key in SCHEMA_KEYS:
99
+ if isinstance(value, list):
100
+ schema[key] = [_resolve_all_refs(v, resolver=resolver) if isinstance(v, dict) else v for v in value]
101
+ elif isinstance(value, dict):
102
+ schema[key] = _resolve_all_refs(value, resolver=resolver)
103
+ if key in SCHEMA_OBJECT_KEYS:
104
+ schema[key] = {
105
+ k: _resolve_all_refs(v, resolver=resolver) if isinstance(v, dict) else v for k, v in value.items()
106
+ }
107
+ return schema
108
+
38
109
  root_core.RepresentationPrinter = RepresentationPrinter # type: ignore
39
110
  _resolve.deepcopy = deepclone # type: ignore
111
+ _resolve.resolve_all_refs = resolve_all_refs # type: ignore
40
112
  _from_schema.deepcopy = deepclone # type: ignore
41
113
  _from_schema.get_type = _get_type # type: ignore
114
+ _from_schema.resolve_all_refs = resolve_all_refs # type: ignore
42
115
  _canonicalise.get_type = _get_type # type: ignore
116
+ _canonicalise.CacheableSchema = CacheableSchema # type: ignore
43
117
  root_core.BUFFER_SIZE = INTERNAL_BUFFER_SIZE # type: ignore
44
118
  engine.BUFFER_SIZE = INTERNAL_BUFFER_SIZE
45
119
  collections.BUFFER_SIZE = INTERNAL_BUFFER_SIZE # type: ignore
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: schemathesis
3
- Version: 4.2.0
3
+ Version: 4.2.1
4
4
  Summary: Property-based testing framework for Open API and GraphQL based apps
5
5
  Project-URL: Documentation, https://schemathesis.readthedocs.io/en/stable/
6
6
  Project-URL: Changelog, https://github.com/schemathesis/schemathesis/blob/master/CHANGELOG.md
@@ -21,7 +21,7 @@ schemathesis/cli/commands/run/loaders.py,sha256=6j0ez7wduAUYbUT28ELKxMf-dYEWr_67
21
21
  schemathesis/cli/commands/run/validation.py,sha256=DQaMiBLN2tYT9hONvv8xnyPvNXZH768UlOdUxTd5kZs,9193
22
22
  schemathesis/cli/commands/run/handlers/__init__.py,sha256=TPZ3KdGi8m0fjlN0GjA31MAXXn1qI7uU4FtiDwroXZI,1915
23
23
  schemathesis/cli/commands/run/handlers/base.py,sha256=qUtDvtr3F6were_BznfnaPpMibGJMnQ5CA9aEzcIUBc,1306
24
- schemathesis/cli/commands/run/handlers/cassettes.py,sha256=Px1-xBw5t6tg8rzYNM-VBTpe6qvbUu_RsrYBG_RWGt8,19501
24
+ schemathesis/cli/commands/run/handlers/cassettes.py,sha256=LzvQp--Ub5MXF7etet7fQD0Ufloh1R0j2X1o9dT8Z4k,19253
25
25
  schemathesis/cli/commands/run/handlers/junitxml.py,sha256=qiFvM4-SlM67sep003SkLqPslzaEb4nOm3bkzw-DO-Q,2602
26
26
  schemathesis/cli/commands/run/handlers/output.py,sha256=TwK82zNpIZ7Q76ggTp8gcW2clzrw0WBmHFJMcvYL1nE,63927
27
27
  schemathesis/cli/ext/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -72,7 +72,7 @@ schemathesis/core/jsonschema/__init__.py,sha256=gBZGsXIpK2EFfcp8x0b69dqzWAm2OeZH
72
72
  schemathesis/core/jsonschema/bundler.py,sha256=ECNAHrXl5nh52crm5Qu1QvRWVV2Vv9gU8H722oKA7k0,7711
73
73
  schemathesis/core/jsonschema/keywords.py,sha256=pjseXTfH9OItNs_Qq6ubkhNWQOrxTnwHmrP_jxrHeJU,631
74
74
  schemathesis/core/jsonschema/references.py,sha256=uB7-DGYhLFqbIAvuO-IDc9xSatoGw54FOYn4xE4qE6A,4667
75
- schemathesis/core/jsonschema/types.py,sha256=_B1Q6pFzLqjuLU6mR1JiHsRbuz_pnSmN5HNnbeZJtT0,1223
75
+ schemathesis/core/jsonschema/types.py,sha256=C7f9g8yKFuoxC5_0YNIh8QAyGU0-tj8pzTMfMDjjjVM,1248
76
76
  schemathesis/core/output/__init__.py,sha256=SiHqONFskXl73AtP5dV29L14nZoKo7B-IeG52KZB32M,1446
77
77
  schemathesis/core/output/sanitization.py,sha256=Ev3tae8dVwsYd7yVb2_1VBFYs92WFsQ4Eu1fGaymItE,2013
78
78
  schemathesis/engine/__init__.py,sha256=QaFE-FinaTAaarteADo2RRMJ-Sz6hZB9TzD5KjMinIA,706
@@ -98,7 +98,7 @@ schemathesis/generation/meta.py,sha256=tXhUZBEdpQMn68uMx1SW8Vv59Uf6Wl6yzs-VB9lu_
98
98
  schemathesis/generation/metrics.py,sha256=cZU5HdeAMcLFEDnTbNE56NuNq4P0N4ew-g1NEz5-kt4,2836
99
99
  schemathesis/generation/modes.py,sha256=Q1fhjWr3zxabU5qdtLvKfpMFZJAwlW9pnxgenjeXTyU,481
100
100
  schemathesis/generation/overrides.py,sha256=xI2djHsa42fzP32xpxgxO52INixKagf5DjDAWJYswM8,3890
101
- schemathesis/generation/hypothesis/__init__.py,sha256=j_lKp7loYRe63TCzIx5yozyt0Ub1ilSsXEP-2zfJ0Ok,2144
101
+ schemathesis/generation/hypothesis/__init__.py,sha256=Dfdz6_Wa7ez6GxrMBxnWQnLaNjPUweTxfi01jiFqso4,4900
102
102
  schemathesis/generation/hypothesis/builder.py,sha256=ZdY68aDGeZLLtIld288KF_O6ardFKZdFMBogwltTx2o,38362
103
103
  schemathesis/generation/hypothesis/examples.py,sha256=6eGaKUEC3elmKsaqfKj1sLvM8EHc-PWT4NRBq4NI0Rs,1409
104
104
  schemathesis/generation/hypothesis/given.py,sha256=sTZR1of6XaHAPWtHx2_WLlZ50M8D5Rjux0GmWkWjDq4,2337
@@ -172,8 +172,8 @@ schemathesis/transport/prepare.py,sha256=erYXRaxpQokIDzaIuvt_csHcw72iHfCyNq8VNEz
172
172
  schemathesis/transport/requests.py,sha256=wriRI9fprTplE_qEZLEz1TerX6GwkE3pwr6ZnU2o6vQ,10648
173
173
  schemathesis/transport/serialization.py,sha256=GwO6OAVTmL1JyKw7HiZ256tjV4CbrRbhQN0ep1uaZwI,11157
174
174
  schemathesis/transport/wsgi.py,sha256=kQtasFre6pjdJWRKwLA_Qb-RyQHCFNpaey9ubzlFWKI,5907
175
- schemathesis-4.2.0.dist-info/METADATA,sha256=kkIlvuG4NKfAwk6kzHRslKYeYaaFhfSYciy9777c4L0,8540
176
- schemathesis-4.2.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
177
- schemathesis-4.2.0.dist-info/entry_points.txt,sha256=hiK3un-xfgPdwj9uj16YVDtTNpO128bmk0U82SMv8ZQ,152
178
- schemathesis-4.2.0.dist-info/licenses/LICENSE,sha256=2Ve4J8v5jMQAWrT7r1nf3bI8Vflk3rZVQefiF2zpxwg,1121
179
- schemathesis-4.2.0.dist-info/RECORD,,
175
+ schemathesis-4.2.1.dist-info/METADATA,sha256=66gKdf-zvd-gHePTQubZs_QnlCvuuC3fELYv4hHK8NY,8540
176
+ schemathesis-4.2.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
177
+ schemathesis-4.2.1.dist-info/entry_points.txt,sha256=hiK3un-xfgPdwj9uj16YVDtTNpO128bmk0U82SMv8ZQ,152
178
+ schemathesis-4.2.1.dist-info/licenses/LICENSE,sha256=2Ve4J8v5jMQAWrT7r1nf3bI8Vflk3rZVQefiF2zpxwg,1121
179
+ schemathesis-4.2.1.dist-info/RECORD,,