schemathesis 4.0.0a3__py3-none-any.whl → 4.0.0a4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. schemathesis/cli/__init__.py +3 -3
  2. schemathesis/cli/commands/run/__init__.py +148 -94
  3. schemathesis/cli/commands/run/context.py +72 -2
  4. schemathesis/cli/commands/run/executor.py +32 -12
  5. schemathesis/cli/commands/run/filters.py +1 -0
  6. schemathesis/cli/commands/run/handlers/cassettes.py +27 -46
  7. schemathesis/cli/commands/run/handlers/junitxml.py +1 -1
  8. schemathesis/cli/commands/run/handlers/output.py +72 -16
  9. schemathesis/cli/commands/run/hypothesis.py +30 -19
  10. schemathesis/cli/commands/run/reports.py +72 -0
  11. schemathesis/cli/commands/run/validation.py +18 -12
  12. schemathesis/cli/ext/groups.py +42 -13
  13. schemathesis/cli/ext/options.py +15 -8
  14. schemathesis/core/errors.py +79 -11
  15. schemathesis/core/failures.py +2 -1
  16. schemathesis/core/transforms.py +1 -1
  17. schemathesis/engine/errors.py +8 -3
  18. schemathesis/engine/phases/stateful/_executor.py +1 -1
  19. schemathesis/engine/phases/unit/__init__.py +2 -3
  20. schemathesis/engine/phases/unit/_executor.py +16 -13
  21. schemathesis/errors.py +6 -2
  22. schemathesis/filters.py +8 -0
  23. schemathesis/generation/coverage.py +6 -1
  24. schemathesis/generation/stateful/state_machine.py +49 -3
  25. schemathesis/pytest/lazy.py +2 -3
  26. schemathesis/pytest/plugin.py +2 -3
  27. schemathesis/schemas.py +1 -1
  28. schemathesis/specs/openapi/checks.py +27 -10
  29. schemathesis/specs/openapi/expressions/__init__.py +22 -6
  30. schemathesis/specs/openapi/expressions/nodes.py +15 -21
  31. schemathesis/specs/openapi/expressions/parser.py +1 -1
  32. schemathesis/specs/openapi/parameters.py +0 -2
  33. schemathesis/specs/openapi/schemas.py +13 -13
  34. schemathesis/specs/openapi/stateful/__init__.py +96 -23
  35. schemathesis/specs/openapi/{links.py → stateful/links.py} +60 -16
  36. {schemathesis-4.0.0a3.dist-info → schemathesis-4.0.0a4.dist-info}/METADATA +1 -1
  37. {schemathesis-4.0.0a3.dist-info → schemathesis-4.0.0a4.dist-info}/RECORD +40 -39
  38. {schemathesis-4.0.0a3.dist-info → schemathesis-4.0.0a4.dist-info}/WHEEL +0 -0
  39. {schemathesis-4.0.0a3.dist-info → schemathesis-4.0.0a4.dist-info}/entry_points.txt +0 -0
  40. {schemathesis-4.0.0a3.dist-info → schemathesis-4.0.0a4.dist-info}/licenses/LICENSE +0 -0
@@ -10,10 +10,11 @@ from schemathesis.cli.commands.run.context import ExecutionContext
10
10
  from schemathesis.cli.commands.run.events import LoadingFinished, LoadingStarted
11
11
  from schemathesis.cli.commands.run.handlers import display_handler_error
12
12
  from schemathesis.cli.commands.run.handlers.base import EventHandler
13
- from schemathesis.cli.commands.run.handlers.cassettes import CassetteConfig, CassetteWriter
13
+ from schemathesis.cli.commands.run.handlers.cassettes import CassetteWriter
14
14
  from schemathesis.cli.commands.run.handlers.junitxml import JunitXMLHandler
15
15
  from schemathesis.cli.commands.run.handlers.output import OutputHandler
16
16
  from schemathesis.cli.commands.run.loaders import AutodetectConfig, load_schema
17
+ from schemathesis.cli.commands.run.reports import ReportConfig, ReportFormat
17
18
  from schemathesis.cli.ext.fs import open_file
18
19
  from schemathesis.core.errors import LoaderError
19
20
  from schemathesis.core.output import OutputConfig
@@ -43,8 +44,7 @@ class RunConfig:
43
44
  wait_for_schema: float | None
44
45
  rate_limit: str | None
45
46
  output: OutputConfig
46
- junit_xml: click.utils.LazyFile | None
47
- cassette: CassetteConfig | None
47
+ report: ReportConfig | None
48
48
  args: list[str]
49
49
  params: dict[str, Any]
50
50
 
@@ -93,27 +93,47 @@ def into_event_stream(config: RunConfig) -> EventGenerator:
93
93
  yield FatalError(exception=exc)
94
94
 
95
95
 
96
- def _execute(event_stream: EventGenerator, config: RunConfig) -> None:
96
+ def initialize_handlers(config: RunConfig) -> list[EventHandler]:
97
+ """Create event handlers based on run configuration."""
97
98
  handlers: list[EventHandler] = []
98
- if config.junit_xml is not None:
99
- open_file(config.junit_xml)
100
- handlers.append(JunitXMLHandler(config.junit_xml))
101
- if config.cassette is not None:
102
- open_file(config.cassette.path)
103
- handlers.append(CassetteWriter(config=config.cassette))
99
+
100
+ if config.report is not None:
101
+ if ReportFormat.JUNIT in config.report.formats:
102
+ path = config.report.get_path(ReportFormat.JUNIT)
103
+ open_file(path)
104
+ handlers.append(JunitXMLHandler(path))
105
+
106
+ for format in (ReportFormat.VCR, ReportFormat.HAR):
107
+ if format in config.report.formats:
108
+ path = config.report.get_path(format)
109
+ open_file(path)
110
+ handlers.append(
111
+ CassetteWriter(
112
+ format=format,
113
+ path=path,
114
+ sanitize_output=config.report.sanitize_output,
115
+ preserve_bytes=config.report.preserve_bytes,
116
+ )
117
+ )
118
+
104
119
  for custom_handler in CUSTOM_HANDLERS:
105
120
  handlers.append(custom_handler(*config.args, **config.params))
121
+
106
122
  handlers.append(
107
123
  OutputHandler(
108
124
  workers_num=config.engine.execution.workers_num,
109
125
  seed=config.engine.execution.seed,
110
126
  rate_limit=config.rate_limit,
111
127
  wait_for_schema=config.wait_for_schema,
112
- cassette_config=config.cassette,
113
- junit_xml_file=config.junit_xml.name if config.junit_xml is not None else None,
128
+ report_config=config.report,
114
129
  )
115
130
  )
116
131
 
132
+ return handlers
133
+
134
+
135
+ def _execute(event_stream: EventGenerator, config: RunConfig) -> None:
136
+ handlers = initialize_handlers(config)
117
137
  ctx = ExecutionContext(output_config=config.output, seed=config.engine.execution.seed)
118
138
 
119
139
  def shutdown() -> None:
@@ -27,6 +27,7 @@ def _with_filter(*, by: str, mode: Literal["include", "exclude"], modifier: Lite
27
27
  help=help_text,
28
28
  type=str,
29
29
  multiple=modifier is None,
30
+ hidden=True,
30
31
  )
31
32
 
32
33
 
@@ -1,7 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import datetime
4
- import enum
5
4
  import json
6
5
  import sys
7
6
  import threading
@@ -11,11 +10,12 @@ from queue import Queue
11
10
  from typing import IO, Callable, Iterator
12
11
  from urllib.parse import parse_qsl, urlparse
13
12
 
14
- import click
15
13
  import harfile
14
+ from click.utils import LazyFile
16
15
 
17
16
  from schemathesis.cli.commands.run.context import ExecutionContext
18
17
  from schemathesis.cli.commands.run.handlers.base import EventHandler
18
+ from schemathesis.cli.commands.run.reports import ReportFormat
19
19
  from schemathesis.core.output.sanitization import sanitize_url, sanitize_value
20
20
  from schemathesis.core.transforms import deepclone
21
21
  from schemathesis.core.transport import Response
@@ -28,43 +28,26 @@ from schemathesis.generation.meta import CoveragePhaseData
28
28
  WRITER_WORKER_JOIN_TIMEOUT = 1
29
29
 
30
30
 
31
- class CassetteFormat(str, enum.Enum):
32
- """Type of the cassette."""
33
-
34
- VCR = "vcr"
35
- HAR = "har"
36
-
37
- @classmethod
38
- def from_str(cls, value: str) -> CassetteFormat:
39
- try:
40
- return cls[value.upper()]
41
- except KeyError:
42
- available_formats = ", ".join(cls)
43
- raise ValueError(
44
- f"Invalid value for cassette format: {value}. Available formats: {available_formats}"
45
- ) from None
46
-
47
-
48
- @dataclass
49
- class CassetteConfig:
50
- path: click.utils.LazyFile
51
- format: CassetteFormat = CassetteFormat.VCR
52
- preserve_exact_body_bytes: bool = False
53
- sanitize_output: bool = True
54
-
55
-
56
31
  @dataclass
57
32
  class CassetteWriter(EventHandler):
58
33
  """Write network interactions to a cassette."""
59
34
 
60
- config: CassetteConfig
35
+ format: ReportFormat
36
+ path: LazyFile
37
+ sanitize_output: bool = True
38
+ preserve_bytes: bool = False
61
39
  queue: Queue = field(default_factory=Queue)
62
40
  worker: threading.Thread = field(init=False)
63
41
 
64
42
  def __post_init__(self) -> None:
65
- kwargs = {"config": self.config, "queue": self.queue}
43
+ kwargs = {
44
+ "path": self.path,
45
+ "sanitize_output": self.sanitize_output,
46
+ "preserve_bytes": self.preserve_bytes,
47
+ "queue": self.queue,
48
+ }
66
49
  writer: Callable
67
- if self.config.format == CassetteFormat.HAR:
50
+ if self.format == ReportFormat.HAR:
68
51
  writer = har_writer
69
52
  else:
70
53
  writer = vcr_writer
@@ -120,7 +103,7 @@ def get_command_representation() -> str:
120
103
  return f"st {args}"
121
104
 
122
105
 
123
- def vcr_writer(config: CassetteConfig, queue: Queue) -> None:
106
+ def vcr_writer(path: LazyFile, sanitize_output: bool, preserve_bytes: bool, queue: Queue) -> None:
124
107
  """Write YAML to a file in an incremental manner.
125
108
 
126
109
  This implementation doesn't use `pyyaml` package and composes YAML manually as string due to the following reasons:
@@ -131,12 +114,12 @@ def vcr_writer(config: CassetteConfig, queue: Queue) -> None:
131
114
  providing tags, anchors to have incremental writing, with primitive types it is much simpler.
132
115
  """
133
116
  current_id = 1
134
- stream = config.path.open()
117
+ stream = path.open()
135
118
 
136
119
  def format_header_values(values: list[str]) -> str:
137
120
  return "\n".join(f" - {json.dumps(v)}" for v in values)
138
121
 
139
- if config.sanitize_output:
122
+ if sanitize_output:
140
123
 
141
124
  def format_headers(headers: dict[str, list[str]]) -> str:
142
125
  headers = deepclone(headers)
@@ -162,7 +145,7 @@ def vcr_writer(config: CassetteConfig, queue: Queue) -> None:
162
145
  checks:
163
146
  {items}"""
164
147
 
165
- if config.preserve_exact_body_bytes:
148
+ if preserve_bytes:
166
149
 
167
150
  def format_request_body(output: IO, request: Request) -> None:
168
151
  if request.encoded_body is not None:
@@ -283,7 +266,7 @@ http_interactions:"""
283
266
  else:
284
267
  stream.write("null")
285
268
 
286
- if config.sanitize_output:
269
+ if sanitize_output:
287
270
  uri = sanitize_url(interaction.request.uri)
288
271
  else:
289
272
  uri = interaction.request.uri
@@ -321,7 +304,7 @@ http_interactions:"""
321
304
  current_id += 1
322
305
  else:
323
306
  break
324
- config.path.close()
307
+ path.close()
325
308
 
326
309
 
327
310
  def write_double_quoted(stream: IO, text: str | None) -> None:
@@ -367,13 +350,13 @@ def write_double_quoted(stream: IO, text: str | None) -> None:
367
350
  stream.write('"')
368
351
 
369
352
 
370
- def har_writer(config: CassetteConfig, queue: Queue) -> None:
371
- with harfile.open(config.path) as har:
353
+ def har_writer(path: LazyFile, sanitize_output: bool, preserve_bytes: bool, queue: Queue) -> None:
354
+ with harfile.open(path) as har:
372
355
  while True:
373
356
  item = queue.get()
374
357
  if isinstance(item, Process):
375
358
  for interaction in item.recorder.interactions.values():
376
- if config.sanitize_output:
359
+ if sanitize_output:
377
360
  uri = sanitize_url(interaction.request.uri)
378
361
  else:
379
362
  uri = interaction.request.uri
@@ -382,7 +365,7 @@ def har_writer(config: CassetteConfig, queue: Queue) -> None:
382
365
  post_data = harfile.PostData(
383
366
  mimeType=interaction.request.headers.get("Content-Type", [""])[0],
384
367
  text=interaction.request.encoded_body
385
- if config.preserve_exact_body_bytes
368
+ if preserve_bytes
386
369
  else interaction.request.body.decode("utf-8", "replace"),
387
370
  )
388
371
  else:
@@ -393,16 +376,14 @@ def har_writer(config: CassetteConfig, queue: Queue) -> None:
393
376
  size=interaction.response.body_size or 0,
394
377
  mimeType=content_type,
395
378
  text=interaction.response.encoded_body
396
- if config.preserve_exact_body_bytes
379
+ if preserve_bytes
397
380
  else interaction.response.content.decode("utf-8", "replace")
398
381
  if interaction.response.content is not None
399
382
  else None,
400
- encoding="base64"
401
- if interaction.response.content is not None and config.preserve_exact_body_bytes
402
- else None,
383
+ encoding="base64" if interaction.response.content is not None and preserve_bytes else None,
403
384
  )
404
385
  http_version = f"HTTP/{interaction.response.http_version}"
405
- if config.sanitize_output:
386
+ if sanitize_output:
406
387
  headers = deepclone(interaction.response.headers)
407
388
  sanitize_value(headers)
408
389
  else:
@@ -424,7 +405,7 @@ def har_writer(config: CassetteConfig, queue: Queue) -> None:
424
405
  time = 0
425
406
  http_version = ""
426
407
 
427
- if config.sanitize_output:
408
+ if sanitize_output:
428
409
  headers = deepclone(interaction.request.headers)
429
410
  sanitize_value(headers)
430
411
  else:
@@ -25,7 +25,7 @@ class JunitXMLHandler(EventHandler):
25
25
  test_case.elapsed_sec += event.elapsed_time
26
26
  if event.status == Status.FAILURE:
27
27
  add_failure(test_case, ctx.statistic.failures[label].values(), ctx)
28
- elif event.status == Status.SKIP:
28
+ elif event.status == Status.SKIP and event.skip_reason is not None:
29
29
  test_case.add_skipped_info(output=event.skip_reason)
30
30
  elif isinstance(event, events.NonFatalError):
31
31
  test_case = self.get_or_create_test_case(event.label)
@@ -1,8 +1,10 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import os
4
+ import textwrap
4
5
  import time
5
6
  from dataclasses import dataclass, field
7
+ from json.decoder import JSONDecodeError
6
8
  from types import GeneratorType
7
9
  from typing import TYPE_CHECKING, Any, Generator, Iterable
8
10
 
@@ -11,11 +13,12 @@ import click
11
13
  from schemathesis.cli.commands.run.context import ExecutionContext, GroupedFailures
12
14
  from schemathesis.cli.commands.run.events import LoadingFinished, LoadingStarted
13
15
  from schemathesis.cli.commands.run.handlers.base import EventHandler
14
- from schemathesis.cli.commands.run.handlers.cassettes import CassetteConfig
16
+ from schemathesis.cli.commands.run.reports import ReportConfig, ReportFormat
15
17
  from schemathesis.cli.constants import ISSUE_TRACKER_URL
16
18
  from schemathesis.cli.core import get_terminal_width
17
19
  from schemathesis.core.errors import LoaderError, LoaderErrorKind, format_exception, split_traceback
18
20
  from schemathesis.core.failures import MessageBlock, Severity, format_failures
21
+ from schemathesis.core.output import prepare_response_payload
19
22
  from schemathesis.core.result import Err, Ok
20
23
  from schemathesis.core.version import SCHEMATHESIS_VERSION
21
24
  from schemathesis.engine import Status, events
@@ -32,6 +35,8 @@ if TYPE_CHECKING:
32
35
  from rich.progress import Progress, TaskID
33
36
  from rich.text import Text
34
37
 
38
+ from schemathesis.generation.stateful.state_machine import ExtractionFailure
39
+
35
40
  IO_ENCODING = os.getenv("PYTHONIOENCODING", "utf-8")
36
41
  DISCORD_LINK = "https://discord.gg/R9ASRAmHnA"
37
42
 
@@ -100,7 +105,7 @@ def display_failures_for_single_test(ctx: ExecutionContext, label: str, checks:
100
105
 
101
106
 
102
107
  VERIFY_URL_SUGGESTION = "Verify that the URL points directly to the Open API schema or GraphQL endpoint"
103
- DISABLE_SSL_SUGGESTION = f"Bypass SSL verification with {bold('`--request-tls-verify=false`')}."
108
+ DISABLE_SSL_SUGGESTION = f"Bypass SSL verification with {bold('`--tls-verify=false`')}."
104
109
  LOADER_ERROR_SUGGESTIONS = {
105
110
  # SSL-specific connection issue
106
111
  LoaderErrorKind.CONNECTION_SSL: DISABLE_SSL_SUGGESTION,
@@ -379,6 +384,7 @@ class UnitTestProgressManager:
379
384
 
380
385
  def __init__(
381
386
  self,
387
+ *,
382
388
  console: Console,
383
389
  title: str,
384
390
  total: int,
@@ -437,6 +443,7 @@ class UnitTestProgressManager:
437
443
  Status.FAILURE: 0,
438
444
  Status.SKIP: 0,
439
445
  Status.ERROR: 0,
446
+ Status.INTERRUPTED: 0,
440
447
  }
441
448
  self._update_stats_display()
442
449
 
@@ -453,8 +460,8 @@ class UnitTestProgressManager:
453
460
  parts.append(f"❌ {self.stats[Status.FAILURE]:{width}d} failed")
454
461
  if self.stats[Status.ERROR]:
455
462
  parts.append(f"🚫 {self.stats[Status.ERROR]:{width}d} errors")
456
- if self.stats[Status.SKIP]:
457
- parts.append(f"⏭️ {self.stats[Status.SKIP]:{width}d} skipped")
463
+ if self.stats[Status.SKIP] or self.stats[Status.INTERRUPTED]:
464
+ parts.append(f"⏭️ {self.stats[Status.SKIP] + self.stats[Status.INTERRUPTED]:{width}d} skipped")
458
465
  return " ".join(parts)
459
466
 
460
467
  def _update_stats_display(self) -> None:
@@ -789,8 +796,7 @@ class OutputHandler(EventHandler):
789
796
 
790
797
  statistic: ApiStatistic | None = None
791
798
  skip_reasons: list[str] = field(default_factory=list)
792
- cassette_config: CassetteConfig | None = None
793
- junit_xml_file: str | None = None
799
+ report_config: ReportConfig | None = None
794
800
  warnings: WarningData = field(default_factory=WarningData)
795
801
  errors: list[events.NonFatalError] = field(default_factory=list)
796
802
  phases: dict[PhaseName, tuple[Status, PhaseSkipReason | None]] = field(
@@ -865,7 +871,8 @@ class OutputHandler(EventHandler):
865
871
 
866
872
  table.add_row("Base URL:", event.base_url)
867
873
  table.add_row("Specification:", event.specification.name)
868
- table.add_row("Operations:", str(event.statistic.operations.total))
874
+ statistic = event.statistic.operations
875
+ table.add_row("Operations:", f"{statistic.selected} selected / {statistic.total} total")
869
876
 
870
877
  message = Padding(table, BLOCK_PADDING)
871
878
  self.console.print(message)
@@ -892,7 +899,7 @@ class OutputHandler(EventHandler):
892
899
  self.unit_tests_manager = UnitTestProgressManager(
893
900
  console=self.console,
894
901
  title="Unit tests",
895
- total=self.statistic.operations.total,
902
+ total=self.statistic.operations.selected,
896
903
  )
897
904
  self.unit_tests_manager.start()
898
905
 
@@ -1165,6 +1172,54 @@ class OutputHandler(EventHandler):
1165
1172
  )
1166
1173
  click.echo()
1167
1174
 
1175
+ def display_stateful_failures(self, ctx: ExecutionContext) -> None:
1176
+ display_section_name("Stateful tests")
1177
+
1178
+ click.echo("\nFailed to extract data from response:")
1179
+
1180
+ grouped: dict[str, list[ExtractionFailure]] = {}
1181
+ for failure in ctx.statistic.extraction_failures:
1182
+ grouped.setdefault(failure.id, []).append(failure)
1183
+
1184
+ for idx, (transition_id, failures) in enumerate(grouped.items(), 1):
1185
+ for failure in failures:
1186
+ click.echo(f"\n {idx}. Test Case ID: {failure.case_id}\n")
1187
+ click.echo(f" {transition_id}")
1188
+
1189
+ indent = " "
1190
+ if failure.error:
1191
+ if isinstance(failure.error, JSONDecodeError):
1192
+ click.echo(f"\n{indent}Failed to parse JSON from response")
1193
+ else:
1194
+ click.echo(f"\n{indent}{failure.error.__class__.__name__}: {failure.error}")
1195
+ else:
1196
+ description = (
1197
+ f"\n{indent}Could not resolve parameter `{failure.parameter_name}` via `{failure.expression}`"
1198
+ )
1199
+ prefix = "$response.body"
1200
+ if failure.expression.startswith(prefix):
1201
+ description += f"\n{indent}Path `{failure.expression[len(prefix) :]}` not found in response"
1202
+ click.echo(description)
1203
+
1204
+ click.echo()
1205
+
1206
+ for case, response in reversed(failure.history):
1207
+ curl = case.as_curl_command(headers=dict(response.request.headers), verify=response.verify)
1208
+ click.echo(f"{indent}[{response.status_code}] {curl}")
1209
+
1210
+ response = failure.response
1211
+
1212
+ if response.content is None or not response.content:
1213
+ click.echo(f"\n{indent}<EMPTY>")
1214
+ else:
1215
+ try:
1216
+ payload = prepare_response_payload(response.text, config=ctx.output_config)
1217
+ click.echo(textwrap.indent(f"\n{payload}", prefix=indent))
1218
+ except UnicodeDecodeError:
1219
+ click.echo(f"\n{indent}<BINARY>")
1220
+
1221
+ click.echo()
1222
+
1168
1223
  def display_api_operations(self, ctx: ExecutionContext) -> None:
1169
1224
  assert self.statistic is not None
1170
1225
  click.echo(_style("API Operations:", bold=True))
@@ -1313,14 +1368,13 @@ class OutputHandler(EventHandler):
1313
1368
  display_section_name(message, fg=color)
1314
1369
 
1315
1370
  def display_reports(self) -> None:
1316
- reports = []
1317
- if self.cassette_config is not None:
1318
- format_name = self.cassette_config.format.name.upper()
1319
- reports.append((format_name, self.cassette_config.path.name))
1320
- if self.junit_xml_file is not None:
1321
- reports.append(("JUnit XML", self.junit_xml_file))
1322
-
1323
- if reports:
1371
+ if self.report_config is not None:
1372
+ reports = [
1373
+ (format.value.upper(), self.report_config.get_path(format).name)
1374
+ for format in ReportFormat
1375
+ if format in self.report_config.formats
1376
+ ]
1377
+
1324
1378
  click.echo(_style("Reports:", bold=True))
1325
1379
  for report_type, path in reports:
1326
1380
  click.echo(_style(f" - {report_type}: {path}"))
@@ -1356,6 +1410,8 @@ class OutputHandler(EventHandler):
1356
1410
  self.display_warnings()
1357
1411
  if GLOBAL_EXPERIMENTS.enabled:
1358
1412
  self.display_experiments()
1413
+ if ctx.statistic.extraction_failures:
1414
+ self.display_stateful_failures(ctx)
1359
1415
  display_section_name("SUMMARY")
1360
1416
  click.echo()
1361
1417
 
@@ -1,6 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
- from enum import IntEnum, unique
3
+ from enum import Enum, unique
4
4
  from typing import TYPE_CHECKING, Any
5
5
 
6
6
  import click
@@ -16,12 +16,11 @@ HYPOTHESIS_IN_MEMORY_DATABASE_IDENTIFIER = ":memory:"
16
16
 
17
17
 
18
18
  @unique
19
- class Phase(IntEnum):
20
- explicit = 0 #: controls whether explicit examples are run.
21
- reuse = 1 #: controls whether previous examples will be reused.
22
- generate = 2 #: controls whether new examples will be generated.
23
- target = 3 #: controls whether examples will be mutated for targeting.
24
- shrink = 4 #: controls whether examples will be shrunk.
19
+ class Phase(str, Enum):
20
+ explicit = "explicit" #: controls whether explicit examples are run.
21
+ reuse = "reuse" #: controls whether previous examples will be reused.
22
+ generate = "generate" #: controls whether new examples will be generated.
23
+ target = "target" #: controls whether examples will be mutated for targeting.
25
24
  # The `explain` phase is not supported
26
25
 
27
26
  def as_hypothesis(self) -> hypothesis.Phase:
@@ -30,20 +29,23 @@ class Phase(IntEnum):
30
29
  return Phase[self.name]
31
30
 
32
31
  @staticmethod
33
- def filter_from_all(variants: list[Phase]) -> list[hypothesis.Phase]:
32
+ def filter_from_all(variants: list[Phase], no_shrink: bool) -> list[hypothesis.Phase]:
34
33
  from hypothesis import Phase
35
34
 
36
- return list(set(Phase) - {Phase.explain} - set(variants))
35
+ phases = set(Phase) - {Phase.explain} - set(variants)
36
+ if no_shrink:
37
+ return list(phases - {Phase.shrink})
38
+ return list(phases)
37
39
 
38
40
 
39
41
  @unique
40
- class HealthCheck(IntEnum):
42
+ class HealthCheck(str, Enum):
41
43
  # We remove not relevant checks
42
- data_too_large = 1
43
- filter_too_much = 2
44
- too_slow = 3
45
- large_base_example = 7
46
- all = 8
44
+ data_too_large = "data_too_large"
45
+ filter_too_much = "filter_too_much"
46
+ too_slow = "too_slow"
47
+ large_base_example = "large_base_example"
48
+ all = "all"
47
49
 
48
50
  def as_hypothesis(self) -> list[hypothesis.HealthCheck]:
49
51
  from hypothesis import HealthCheck
@@ -64,14 +66,23 @@ def prepare_health_checks(
64
66
 
65
67
 
66
68
  def prepare_phases(
67
- hypothesis_phases: list[Phase] | None, hypothesis_no_phases: list[Phase] | None
69
+ hypothesis_phases: list[Phase] | None,
70
+ hypothesis_no_phases: list[Phase] | None,
71
+ no_shrink: bool = False,
68
72
  ) -> list[hypothesis.Phase] | None:
73
+ from hypothesis import Phase as HypothesisPhase
74
+
69
75
  if hypothesis_phases is not None and hypothesis_no_phases is not None:
70
76
  raise click.UsageError(PHASES_INVALID_USAGE_MESSAGE)
71
77
  if hypothesis_phases:
72
- return [phase.as_hypothesis() for phase in hypothesis_phases]
73
- if hypothesis_no_phases:
74
- return Phase.filter_from_all(hypothesis_no_phases)
78
+ phases = [phase.as_hypothesis() for phase in hypothesis_phases]
79
+ if not no_shrink:
80
+ phases.append(HypothesisPhase.shrink)
81
+ return phases
82
+ elif hypothesis_no_phases:
83
+ return Phase.filter_from_all(hypothesis_no_phases, no_shrink)
84
+ elif no_shrink:
85
+ return Phase.filter_from_all([], no_shrink)
75
86
  return None
76
87
 
77
88
 
@@ -0,0 +1,72 @@
1
+ from __future__ import annotations
2
+
3
+ from enum import Enum
4
+ from pathlib import Path
5
+
6
+ from click.utils import LazyFile
7
+
8
+ DEFAULT_REPORT_DIRECTORY = Path("./schemathesis-report")
9
+
10
+
11
+ class ReportFormat(str, Enum):
12
+ """Available report formats."""
13
+
14
+ JUNIT = "junit"
15
+ VCR = "vcr"
16
+ HAR = "har"
17
+
18
+ @property
19
+ def extension(self) -> str:
20
+ """File extension for this format."""
21
+ return {
22
+ self.JUNIT: "xml",
23
+ self.VCR: "yaml",
24
+ self.HAR: "json",
25
+ }[self]
26
+
27
+
28
+ class ReportConfig:
29
+ """Configuration for test report generation."""
30
+
31
+ __slots__ = (
32
+ "formats",
33
+ "directory",
34
+ "junit_path",
35
+ "vcr_path",
36
+ "har_path",
37
+ "preserve_bytes",
38
+ "sanitize_output",
39
+ )
40
+
41
+ def __init__(
42
+ self,
43
+ formats: list[ReportFormat] | None = None,
44
+ directory: Path = DEFAULT_REPORT_DIRECTORY,
45
+ *,
46
+ junit_path: LazyFile | None = None,
47
+ vcr_path: LazyFile | None = None,
48
+ har_path: LazyFile | None = None,
49
+ preserve_bytes: bool = False,
50
+ sanitize_output: bool = True,
51
+ ) -> None:
52
+ self.formats = formats or []
53
+ # Auto-enable formats when paths are specified
54
+ if junit_path and ReportFormat.JUNIT not in self.formats:
55
+ self.formats.append(ReportFormat.JUNIT)
56
+ if vcr_path and ReportFormat.VCR not in self.formats:
57
+ self.formats.append(ReportFormat.VCR)
58
+ if har_path and ReportFormat.HAR not in self.formats:
59
+ self.formats.append(ReportFormat.HAR)
60
+ self.directory = directory
61
+ self.junit_path = junit_path
62
+ self.vcr_path = vcr_path
63
+ self.har_path = har_path
64
+ self.preserve_bytes = preserve_bytes
65
+ self.sanitize_output = sanitize_output
66
+
67
+ def get_path(self, format: ReportFormat) -> LazyFile:
68
+ """Get the final path for a specific format."""
69
+ custom_path = getattr(self, f"{format.value}_path")
70
+ if custom_path is not None:
71
+ return custom_path
72
+ return LazyFile(self.directory / f"{format.value}.{format.extension}", mode="w", encoding="utf-8")