schemathesis 3.21.2__py3-none-any.whl → 3.22.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- schemathesis/__init__.py +1 -1
- schemathesis/_compat.py +2 -18
- schemathesis/_dependency_versions.py +1 -6
- schemathesis/_hypothesis.py +15 -12
- schemathesis/_lazy_import.py +3 -2
- schemathesis/_xml.py +12 -11
- schemathesis/auths.py +88 -81
- schemathesis/checks.py +4 -4
- schemathesis/cli/__init__.py +202 -171
- schemathesis/cli/callbacks.py +29 -32
- schemathesis/cli/cassettes.py +25 -25
- schemathesis/cli/context.py +18 -12
- schemathesis/cli/junitxml.py +2 -2
- schemathesis/cli/options.py +10 -11
- schemathesis/cli/output/default.py +64 -34
- schemathesis/code_samples.py +10 -10
- schemathesis/constants.py +1 -1
- schemathesis/contrib/unique_data.py +2 -2
- schemathesis/exceptions.py +55 -42
- schemathesis/extra/_aiohttp.py +2 -2
- schemathesis/extra/_flask.py +2 -2
- schemathesis/extra/_server.py +3 -2
- schemathesis/extra/pytest_plugin.py +10 -10
- schemathesis/failures.py +16 -16
- schemathesis/filters.py +40 -41
- schemathesis/fixups/__init__.py +4 -3
- schemathesis/fixups/fast_api.py +5 -4
- schemathesis/generation/__init__.py +16 -4
- schemathesis/hooks.py +25 -25
- schemathesis/internal/jsonschema.py +4 -3
- schemathesis/internal/transformation.py +3 -2
- schemathesis/lazy.py +39 -31
- schemathesis/loaders.py +8 -8
- schemathesis/models.py +128 -126
- schemathesis/parameters.py +6 -5
- schemathesis/runner/__init__.py +107 -81
- schemathesis/runner/events.py +37 -26
- schemathesis/runner/impl/core.py +86 -81
- schemathesis/runner/impl/solo.py +19 -15
- schemathesis/runner/impl/threadpool.py +40 -22
- schemathesis/runner/serialization.py +67 -40
- schemathesis/sanitization.py +18 -20
- schemathesis/schemas.py +83 -72
- schemathesis/serializers.py +39 -30
- schemathesis/service/ci.py +20 -21
- schemathesis/service/client.py +29 -9
- schemathesis/service/constants.py +1 -0
- schemathesis/service/events.py +2 -2
- schemathesis/service/hosts.py +8 -7
- schemathesis/service/metadata.py +5 -0
- schemathesis/service/models.py +22 -4
- schemathesis/service/report.py +15 -15
- schemathesis/service/serialization.py +23 -27
- schemathesis/service/usage.py +8 -7
- schemathesis/specs/graphql/loaders.py +31 -24
- schemathesis/specs/graphql/nodes.py +3 -2
- schemathesis/specs/graphql/scalars.py +26 -2
- schemathesis/specs/graphql/schemas.py +38 -34
- schemathesis/specs/openapi/_hypothesis.py +62 -44
- schemathesis/specs/openapi/checks.py +10 -10
- schemathesis/specs/openapi/converter.py +10 -9
- schemathesis/specs/openapi/definitions.py +2 -2
- schemathesis/specs/openapi/examples.py +22 -21
- schemathesis/specs/openapi/expressions/nodes.py +5 -4
- schemathesis/specs/openapi/expressions/parser.py +7 -6
- schemathesis/specs/openapi/filters.py +6 -6
- schemathesis/specs/openapi/formats.py +2 -2
- schemathesis/specs/openapi/links.py +19 -21
- schemathesis/specs/openapi/loaders.py +133 -78
- schemathesis/specs/openapi/negative/__init__.py +16 -11
- schemathesis/specs/openapi/negative/mutations.py +11 -10
- schemathesis/specs/openapi/parameters.py +20 -19
- schemathesis/specs/openapi/references.py +21 -20
- schemathesis/specs/openapi/schemas.py +97 -84
- schemathesis/specs/openapi/security.py +25 -24
- schemathesis/specs/openapi/serialization.py +20 -23
- schemathesis/specs/openapi/stateful/__init__.py +12 -11
- schemathesis/specs/openapi/stateful/links.py +7 -7
- schemathesis/specs/openapi/utils.py +4 -3
- schemathesis/specs/openapi/validation.py +3 -2
- schemathesis/stateful/__init__.py +15 -16
- schemathesis/stateful/state_machine.py +9 -9
- schemathesis/targets.py +3 -3
- schemathesis/throttling.py +2 -2
- schemathesis/transports/auth.py +2 -2
- schemathesis/transports/content_types.py +5 -0
- schemathesis/transports/headers.py +3 -2
- schemathesis/transports/responses.py +1 -1
- schemathesis/utils.py +7 -10
- {schemathesis-3.21.2.dist-info → schemathesis-3.22.1.dist-info}/METADATA +12 -13
- schemathesis-3.22.1.dist-info/RECORD +130 -0
- schemathesis-3.21.2.dist-info/RECORD +0 -130
- {schemathesis-3.21.2.dist-info → schemathesis-3.22.1.dist-info}/WHEEL +0 -0
- {schemathesis-3.21.2.dist-info → schemathesis-3.22.1.dist-info}/entry_points.txt +0 -0
- {schemathesis-3.21.2.dist-info → schemathesis-3.22.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -7,12 +7,12 @@ import textwrap
|
|
|
7
7
|
import time
|
|
8
8
|
from itertools import groupby
|
|
9
9
|
from queue import Queue
|
|
10
|
-
from typing import Any,
|
|
10
|
+
from typing import Any, Generator, cast
|
|
11
11
|
|
|
12
12
|
import click
|
|
13
|
+
from importlib import metadata
|
|
13
14
|
|
|
14
15
|
from ... import service
|
|
15
|
-
from ..._compat import metadata
|
|
16
16
|
from ...code_samples import CodeSampleStyle
|
|
17
17
|
from ...constants import (
|
|
18
18
|
DISCORD_LINK,
|
|
@@ -48,7 +48,7 @@ def display_section_name(title: str, separator: str = "=", **kwargs: Any) -> Non
|
|
|
48
48
|
click.secho(message, **kwargs)
|
|
49
49
|
|
|
50
50
|
|
|
51
|
-
def display_subsection(result: SerializedTestResult, color:
|
|
51
|
+
def display_subsection(result: SerializedTestResult, color: str | None = "red") -> None:
|
|
52
52
|
display_section_name(result.verbose_name, "_", fg=color)
|
|
53
53
|
|
|
54
54
|
|
|
@@ -87,7 +87,7 @@ def display_summary(event: events.Finished) -> None:
|
|
|
87
87
|
display_section_name(message, fg=color)
|
|
88
88
|
|
|
89
89
|
|
|
90
|
-
def get_summary_message_parts(event: events.Finished) ->
|
|
90
|
+
def get_summary_message_parts(event: events.Finished) -> list[str]:
|
|
91
91
|
parts = []
|
|
92
92
|
passed = event.passed_count
|
|
93
93
|
if passed:
|
|
@@ -104,7 +104,7 @@ def get_summary_message_parts(event: events.Finished) -> List[str]:
|
|
|
104
104
|
return parts
|
|
105
105
|
|
|
106
106
|
|
|
107
|
-
def get_summary_output(event: events.Finished) ->
|
|
107
|
+
def get_summary_output(event: events.Finished) -> tuple[str, str]:
|
|
108
108
|
parts = get_summary_message_parts(event)
|
|
109
109
|
if not parts:
|
|
110
110
|
message = "Empty test suite"
|
|
@@ -120,7 +120,7 @@ def get_summary_output(event: events.Finished) -> Tuple[str, str]:
|
|
|
120
120
|
return message, color
|
|
121
121
|
|
|
122
122
|
|
|
123
|
-
def display_hypothesis_output(hypothesis_output:
|
|
123
|
+
def display_hypothesis_output(hypothesis_output: list[str]) -> None:
|
|
124
124
|
"""Show falsifying examples from Hypothesis output if there are any."""
|
|
125
125
|
if hypothesis_output:
|
|
126
126
|
display_section_name("HYPOTHESIS OUTPUT")
|
|
@@ -147,9 +147,9 @@ def display_errors(context: ExecutionContext, event: events.Finished) -> None:
|
|
|
147
147
|
should_display_full_traceback_message |= display_single_error(context, result)
|
|
148
148
|
if event.generic_errors:
|
|
149
149
|
display_generic_errors(context, event.generic_errors)
|
|
150
|
-
if should_display_full_traceback_message and not context.
|
|
150
|
+
if should_display_full_traceback_message and not context.show_trace:
|
|
151
151
|
click.secho(
|
|
152
|
-
"\nAdd this option to your command line parameters to see full tracebacks: --show-
|
|
152
|
+
"\nAdd this option to your command line parameters to see full tracebacks: --show-trace",
|
|
153
153
|
fg="red",
|
|
154
154
|
)
|
|
155
155
|
click.secho(
|
|
@@ -166,7 +166,7 @@ def display_single_error(context: ExecutionContext, result: SerializedTestResult
|
|
|
166
166
|
return should_display_full_traceback_message
|
|
167
167
|
|
|
168
168
|
|
|
169
|
-
def display_generic_errors(context: ExecutionContext, errors:
|
|
169
|
+
def display_generic_errors(context: ExecutionContext, errors: list[SerializedError]) -> None:
|
|
170
170
|
for error in errors:
|
|
171
171
|
display_section_name(error.title or "Generic error", "_", fg="red")
|
|
172
172
|
_display_error(context, error)
|
|
@@ -181,6 +181,7 @@ def display_full_traceback_message(error: SerializedError) -> bool:
|
|
|
181
181
|
"requests.exceptions",
|
|
182
182
|
"SerializationNotPossible",
|
|
183
183
|
"hypothesis.errors.FailedHealthCheck",
|
|
184
|
+
"hypothesis.errors.InvalidArgument: Scalar ",
|
|
184
185
|
)
|
|
185
186
|
)
|
|
186
187
|
|
|
@@ -207,8 +208,10 @@ RUNTIME_ERROR_SUGGESTIONS = {
|
|
|
207
208
|
),
|
|
208
209
|
RuntimeErrorType.HYPOTHESIS_UNSATISFIABLE: "Examine the schema for inconsistencies and consider simplifying it.",
|
|
209
210
|
RuntimeErrorType.SCHEMA_BODY_IN_GET_REQUEST: DISABLE_SCHEMA_VALIDATION_SUGGESTION,
|
|
210
|
-
RuntimeErrorType.SCHEMA_INVALID_REGULAR_EXPRESSION: "Ensure your regex is compatible with Python's syntax
|
|
211
|
+
RuntimeErrorType.SCHEMA_INVALID_REGULAR_EXPRESSION: "Ensure your regex is compatible with Python's syntax.\n"
|
|
211
212
|
"For guidance, visit: https://docs.python.org/3/library/re.html",
|
|
213
|
+
RuntimeErrorType.HYPOTHESIS_UNSUPPORTED_GRAPHQL_SCALAR: "Define a custom strategy for it.\n"
|
|
214
|
+
"For guidance, visit: https://schemathesis.readthedocs.io/en/stable/graphql.html#custom-scalars",
|
|
212
215
|
RuntimeErrorType.HYPOTHESIS_HEALTH_CHECK_DATA_TOO_LARGE: _format_health_check_suggestion("data_too_large"),
|
|
213
216
|
RuntimeErrorType.HYPOTHESIS_HEALTH_CHECK_FILTER_TOO_MUCH: _format_health_check_suggestion("filter_too_much"),
|
|
214
217
|
RuntimeErrorType.HYPOTHESIS_HEALTH_CHECK_TOO_SLOW: _format_health_check_suggestion("too_slow"),
|
|
@@ -231,7 +234,7 @@ def _display_error(context: ExecutionContext, error: SerializedError) -> bool:
|
|
|
231
234
|
click.echo(error.exception)
|
|
232
235
|
if error.extras:
|
|
233
236
|
extras = error.extras
|
|
234
|
-
elif context.
|
|
237
|
+
elif context.show_trace:
|
|
235
238
|
extras = _split_traceback(error.exception_with_traceback)
|
|
236
239
|
else:
|
|
237
240
|
extras = []
|
|
@@ -310,13 +313,11 @@ def display_failures_for_single_test(context: ExecutionContext, result: Serializ
|
|
|
310
313
|
click.echo(
|
|
311
314
|
f"\n{bold('Reproduce with')}: \n\n {code_sample}\n",
|
|
312
315
|
)
|
|
313
|
-
if result.seed is not None:
|
|
314
|
-
click.secho(f"Or add this option to your command line parameters: --hypothesis-seed={result.seed}")
|
|
315
316
|
|
|
316
317
|
|
|
317
318
|
def group_by_case(
|
|
318
|
-
checks:
|
|
319
|
-
) -> Generator[
|
|
319
|
+
checks: list[SerializedCheck], code_sample_style: CodeSampleStyle
|
|
320
|
+
) -> Generator[tuple[str, Generator[SerializedCheck, None, None]], None, None]:
|
|
320
321
|
checks = deduplicate_failures(checks)
|
|
321
322
|
checks = sorted(checks, key=lambda c: _by_unique_code_sample(c, code_sample_style))
|
|
322
323
|
yield from groupby(checks, lambda c: _by_unique_code_sample(c, code_sample_style))
|
|
@@ -398,6 +399,9 @@ def display_statistic(context: ExecutionContext, event: events.Finished) -> None
|
|
|
398
399
|
f"\n{bold('Note')}: Use the '{SCHEMATHESIS_TEST_CASE_HEADER}' header to correlate test case ids "
|
|
399
400
|
"from failure messages with server logs for debugging."
|
|
400
401
|
)
|
|
402
|
+
if context.seed is not None:
|
|
403
|
+
seed_option = f"`--hypothesis-seed={context.seed}`"
|
|
404
|
+
click.secho(f"\n{bold('Note')}: To replicate these test failures, rerun with {bold(seed_option)}")
|
|
401
405
|
|
|
402
406
|
if context.report is not None and not context.is_interrupted:
|
|
403
407
|
if isinstance(context.report, FileReportContext):
|
|
@@ -462,31 +466,52 @@ def display_report_metadata(meta: service.Metadata) -> None:
|
|
|
462
466
|
click.secho(f"Compressed report size: {meta.size / 1024.:,.0f} KB", bold=True)
|
|
463
467
|
|
|
464
468
|
|
|
465
|
-
def
|
|
469
|
+
def display_service_unauthorized(hostname: str) -> None:
|
|
470
|
+
click.secho("\nTo authenticate:")
|
|
471
|
+
click.secho(f"1. Retrieve your token from {bold(hostname)}")
|
|
472
|
+
click.secho(f"2. Execute {bold('`st auth login <TOKEN>`')}")
|
|
473
|
+
env_var = bold(f"`{service.TOKEN_ENV_VAR}`")
|
|
474
|
+
click.secho(
|
|
475
|
+
f"\nAs an alternative, supply the token directly "
|
|
476
|
+
f"using the {bold('`--schemathesis-io-token`')} option "
|
|
477
|
+
f"or the {env_var} environment variable."
|
|
478
|
+
)
|
|
479
|
+
click.echo("\nFor more information, please visit: https://schemathesis.readthedocs.io/en/stable/service.html")
|
|
480
|
+
|
|
481
|
+
|
|
482
|
+
def display_service_error(event: service.Error, message_prefix: str = "") -> None:
|
|
466
483
|
"""Show information about an error during communication with Schemathesis.io."""
|
|
467
484
|
from requests import RequestException, HTTPError, Response
|
|
468
485
|
|
|
469
486
|
if isinstance(event.exception, HTTPError):
|
|
470
487
|
response = cast(Response, event.exception.response)
|
|
471
488
|
status_code = response.status_code
|
|
472
|
-
click.secho(f"Schemathesis.io responded with HTTP {status_code}", fg="red")
|
|
473
489
|
if 500 <= status_code <= 599:
|
|
490
|
+
click.secho(f"Schemathesis.io responded with HTTP {status_code}", fg="red")
|
|
474
491
|
# Server error, should be resolved soon
|
|
475
492
|
click.secho(
|
|
476
|
-
"
|
|
493
|
+
"\nIt is likely that we are already notified about the issue and working on a fix\n"
|
|
477
494
|
"Please, try again in 30 minutes",
|
|
478
495
|
fg="red",
|
|
479
496
|
)
|
|
480
497
|
elif status_code == 401:
|
|
481
498
|
# Likely an invalid token
|
|
482
|
-
click.
|
|
483
|
-
|
|
484
|
-
"See https://schemathesis.readthedocs.io/en/stable/service.html for more details",
|
|
485
|
-
fg="red",
|
|
486
|
-
)
|
|
499
|
+
click.echo("Your CLI is not authenticated.")
|
|
500
|
+
display_service_unauthorized("schemathesis.io")
|
|
487
501
|
else:
|
|
488
|
-
|
|
489
|
-
|
|
502
|
+
try:
|
|
503
|
+
data = response.json()
|
|
504
|
+
detail = data["detail"]
|
|
505
|
+
click.secho(f"{message_prefix}{detail}", fg="red")
|
|
506
|
+
except Exception:
|
|
507
|
+
# Other client-side errors are likely caused by a bug on the CLI side
|
|
508
|
+
click.secho(
|
|
509
|
+
"We apologize for the inconvenience. This appears to be an internal issue.\n"
|
|
510
|
+
"Please, consider reporting the following details to our issue "
|
|
511
|
+
f"tracker:\n\n {ISSUE_TRACKER_URL}\n\nResponse: {response.text!r}\n"
|
|
512
|
+
f"Headers: {response.headers!r}",
|
|
513
|
+
fg="red",
|
|
514
|
+
)
|
|
490
515
|
elif isinstance(event.exception, RequestException):
|
|
491
516
|
ask_to_report(event, report_to_issues=False)
|
|
492
517
|
else:
|
|
@@ -506,7 +531,7 @@ def ask_to_report(event: service.Error, report_to_issues: bool = True, extra: st
|
|
|
506
531
|
else:
|
|
507
532
|
response = ""
|
|
508
533
|
if report_to_issues:
|
|
509
|
-
ask = f"Please, consider reporting the
|
|
534
|
+
ask = f"Please, consider reporting the following details to our issue tracker:\n\n {ISSUE_TRACKER_URL}\n\n"
|
|
510
535
|
else:
|
|
511
536
|
ask = ""
|
|
512
537
|
click.secho(
|
|
@@ -539,7 +564,7 @@ def create_spinner(repetitions: int) -> Generator[str, None, None]:
|
|
|
539
564
|
yield ch
|
|
540
565
|
|
|
541
566
|
|
|
542
|
-
def display_checks_statistics(total:
|
|
567
|
+
def display_checks_statistics(total: dict[str, dict[str | Status, int]]) -> None:
|
|
543
568
|
padding = 20
|
|
544
569
|
col1_len = max(map(len, total.keys())) + padding
|
|
545
570
|
col2_len = len(str(max(total.values(), key=lambda v: v["total"])["total"])) * 2 + padding
|
|
@@ -550,7 +575,7 @@ def display_checks_statistics(total: Dict[str, Dict[Union[str, Status], int]]) -
|
|
|
550
575
|
display_check_result(check_name, results, template)
|
|
551
576
|
|
|
552
577
|
|
|
553
|
-
def display_check_result(check_name: str, results:
|
|
578
|
+
def display_check_result(check_name: str, results: dict[str | Status, int], template: str) -> None:
|
|
554
579
|
"""Show results of single check execution."""
|
|
555
580
|
if Status.failure in results:
|
|
556
581
|
verdict = "FAILED"
|
|
@@ -591,18 +616,18 @@ def should_skip_suggestion(context: ExecutionContext, event: events.InternalErro
|
|
|
591
616
|
return event.subtype == SchemaErrorType.CONNECTION_OTHER and context.wait_for_schema is not None
|
|
592
617
|
|
|
593
618
|
|
|
594
|
-
def _split_traceback(traceback: str) ->
|
|
619
|
+
def _split_traceback(traceback: str) -> list[str]:
|
|
595
620
|
return [entry for entry in traceback.splitlines() if entry]
|
|
596
621
|
|
|
597
622
|
|
|
598
|
-
def _display_extras(extras:
|
|
623
|
+
def _display_extras(extras: list[str]) -> None:
|
|
599
624
|
if extras:
|
|
600
625
|
click.echo()
|
|
601
626
|
for extra in extras:
|
|
602
627
|
click.secho(f" {extra}")
|
|
603
628
|
|
|
604
629
|
|
|
605
|
-
def _maybe_display_tip(suggestion:
|
|
630
|
+
def _maybe_display_tip(suggestion: str | None) -> None:
|
|
606
631
|
# Display suggestion if any
|
|
607
632
|
if suggestion is not None:
|
|
608
633
|
click.secho(f"\n{click.style('Tip:', bold=True, fg='green')} {suggestion}")
|
|
@@ -614,7 +639,7 @@ def display_internal_error(context: ExecutionContext, event: events.InternalErro
|
|
|
614
639
|
click.secho(event.message)
|
|
615
640
|
if event.type == InternalErrorType.SCHEMA:
|
|
616
641
|
extras = event.extras
|
|
617
|
-
elif context.
|
|
642
|
+
elif context.show_trace:
|
|
618
643
|
extras = _split_traceback(event.exception_with_traceback)
|
|
619
644
|
else:
|
|
620
645
|
extras = [event.exception]
|
|
@@ -622,18 +647,19 @@ def display_internal_error(context: ExecutionContext, event: events.InternalErro
|
|
|
622
647
|
if not should_skip_suggestion(context, event):
|
|
623
648
|
if event.type == InternalErrorType.SCHEMA and isinstance(event.subtype, SchemaErrorType):
|
|
624
649
|
suggestion = SCHEMA_ERROR_SUGGESTIONS.get(event.subtype)
|
|
625
|
-
elif context.
|
|
650
|
+
elif context.show_trace:
|
|
626
651
|
suggestion = (
|
|
627
652
|
f"Please consider reporting the traceback above to our issue tracker:\n\n {ISSUE_TRACKER_URL}."
|
|
628
653
|
)
|
|
629
654
|
else:
|
|
630
|
-
suggestion = f"To see full tracebacks, add {bold('`--show-
|
|
655
|
+
suggestion = f"To see full tracebacks, add {bold('`--show-trace`')} to your CLI options"
|
|
631
656
|
_maybe_display_tip(suggestion)
|
|
632
657
|
|
|
633
658
|
|
|
634
659
|
def handle_initialized(context: ExecutionContext, event: events.Initialized) -> None:
|
|
635
660
|
"""Display information about the test session."""
|
|
636
661
|
context.operations_count = cast(int, event.operations_count) # INVARIANT: should not be `None`
|
|
662
|
+
context.seed = event.seed
|
|
637
663
|
display_section_name("Schemathesis test session starts")
|
|
638
664
|
if context.verbosity > 0:
|
|
639
665
|
versions = (
|
|
@@ -651,10 +677,14 @@ def handle_initialized(context: ExecutionContext, event: events.Initialized) ->
|
|
|
651
677
|
click.secho(f"Schema location: {event.location}", bold=True)
|
|
652
678
|
click.secho(f"Base URL: {event.base_url}", bold=True)
|
|
653
679
|
click.secho(f"Specification version: {event.specification_name}", bold=True)
|
|
680
|
+
if context.seed is not None:
|
|
681
|
+
click.secho(f"Random seed: {context.seed}", bold=True)
|
|
654
682
|
click.secho(f"Workers: {context.workers_num}", bold=True)
|
|
655
683
|
if context.rate_limit is not None:
|
|
656
684
|
click.secho(f"Rate limit: {context.rate_limit}", bold=True)
|
|
657
685
|
click.secho(f"Collected API operations: {context.operations_count}", bold=True)
|
|
686
|
+
links_count = cast(int, event.links_count)
|
|
687
|
+
click.secho(f"Collected API links: {links_count}", bold=True)
|
|
658
688
|
if isinstance(context.report, ServiceReportContext):
|
|
659
689
|
click.secho("Report to Schemathesis.io: ENABLED", bold=True)
|
|
660
690
|
if context.operations_count >= 1:
|
schemathesis/code_samples.py
CHANGED
|
@@ -2,7 +2,7 @@ from __future__ import annotations
|
|
|
2
2
|
from enum import Enum
|
|
3
3
|
from functools import lru_cache
|
|
4
4
|
from shlex import quote
|
|
5
|
-
from typing import
|
|
5
|
+
from typing import TYPE_CHECKING
|
|
6
6
|
|
|
7
7
|
from .constants import SCHEMATHESIS_TEST_CASE_HEADER
|
|
8
8
|
from .types import Headers
|
|
@@ -11,7 +11,7 @@ if TYPE_CHECKING:
|
|
|
11
11
|
from requests.structures import CaseInsensitiveDict
|
|
12
12
|
|
|
13
13
|
|
|
14
|
-
@lru_cache
|
|
14
|
+
@lru_cache
|
|
15
15
|
def get_excluded_headers() -> CaseInsensitiveDict:
|
|
16
16
|
from requests.structures import CaseInsensitiveDict
|
|
17
17
|
from requests.utils import default_headers
|
|
@@ -43,11 +43,11 @@ class CodeSampleStyle(str, Enum):
|
|
|
43
43
|
}[self]
|
|
44
44
|
|
|
45
45
|
@classmethod
|
|
46
|
-
def default(cls) ->
|
|
46
|
+
def default(cls) -> CodeSampleStyle:
|
|
47
47
|
return cls.curl
|
|
48
48
|
|
|
49
49
|
@classmethod
|
|
50
|
-
def from_str(cls, value: str) ->
|
|
50
|
+
def from_str(cls, value: str) -> CodeSampleStyle:
|
|
51
51
|
try:
|
|
52
52
|
return cls[value]
|
|
53
53
|
except KeyError:
|
|
@@ -61,10 +61,10 @@ class CodeSampleStyle(str, Enum):
|
|
|
61
61
|
*,
|
|
62
62
|
method: str,
|
|
63
63
|
url: str,
|
|
64
|
-
body:
|
|
65
|
-
headers:
|
|
64
|
+
body: str | bytes | None,
|
|
65
|
+
headers: Headers | None,
|
|
66
66
|
verify: bool,
|
|
67
|
-
extra_headers:
|
|
67
|
+
extra_headers: Headers | None = None,
|
|
68
68
|
) -> str:
|
|
69
69
|
"""Generate a code snippet for making HTTP requests."""
|
|
70
70
|
handlers = {
|
|
@@ -76,7 +76,7 @@ class CodeSampleStyle(str, Enum):
|
|
|
76
76
|
)
|
|
77
77
|
|
|
78
78
|
|
|
79
|
-
def _filter_headers(headers:
|
|
79
|
+
def _filter_headers(headers: Headers | None, extra: Headers | None = None) -> Headers:
|
|
80
80
|
headers = headers.copy() if headers else {}
|
|
81
81
|
if extra is not None:
|
|
82
82
|
for key, value in extra.items():
|
|
@@ -89,7 +89,7 @@ def _generate_curl(
|
|
|
89
89
|
*,
|
|
90
90
|
method: str,
|
|
91
91
|
url: str,
|
|
92
|
-
body:
|
|
92
|
+
body: str | bytes | None,
|
|
93
93
|
headers: Headers,
|
|
94
94
|
verify: bool,
|
|
95
95
|
) -> str:
|
|
@@ -111,7 +111,7 @@ def _generate_requests(
|
|
|
111
111
|
*,
|
|
112
112
|
method: str,
|
|
113
113
|
url: str,
|
|
114
|
-
body:
|
|
114
|
+
body: str | bytes | None,
|
|
115
115
|
headers: Headers,
|
|
116
116
|
verify: bool,
|
|
117
117
|
) -> str:
|
schemathesis/constants.py
CHANGED
|
@@ -16,10 +16,10 @@ def uninstall() -> None:
|
|
|
16
16
|
unregister(before_generate_case)
|
|
17
17
|
|
|
18
18
|
|
|
19
|
-
def before_generate_case(context: HookContext, strategy: st.SearchStrategy[
|
|
19
|
+
def before_generate_case(context: HookContext, strategy: st.SearchStrategy[Case]) -> st.SearchStrategy[Case]:
|
|
20
20
|
seen = set()
|
|
21
21
|
|
|
22
|
-
def is_not_seen(case:
|
|
22
|
+
def is_not_seen(case: Case) -> bool:
|
|
23
23
|
# Calculate hash just once as it is costly
|
|
24
24
|
hashed = hash(case)
|
|
25
25
|
if hashed not in seen:
|
schemathesis/exceptions.py
CHANGED
|
@@ -7,7 +7,7 @@ from dataclasses import dataclass, field
|
|
|
7
7
|
from hashlib import sha1
|
|
8
8
|
from json import JSONDecodeError
|
|
9
9
|
from types import TracebackType
|
|
10
|
-
from typing import TYPE_CHECKING, Any, Callable,
|
|
10
|
+
from typing import TYPE_CHECKING, Any, Callable, Generator, NoReturn
|
|
11
11
|
|
|
12
12
|
from .constants import SERIALIZERS_SUGGESTION_MESSAGE
|
|
13
13
|
from .failures import FailureContext
|
|
@@ -23,14 +23,14 @@ class CheckFailed(AssertionError):
|
|
|
23
23
|
"""Custom error type to distinguish from arbitrary AssertionError that may happen in the dependent libraries."""
|
|
24
24
|
|
|
25
25
|
__module__ = "builtins"
|
|
26
|
-
context:
|
|
27
|
-
causes:
|
|
26
|
+
context: FailureContext | None
|
|
27
|
+
causes: tuple[CheckFailed | AssertionError, ...] | None
|
|
28
28
|
|
|
29
29
|
def __init__(
|
|
30
30
|
self,
|
|
31
31
|
*args: Any,
|
|
32
|
-
context:
|
|
33
|
-
causes:
|
|
32
|
+
context: FailureContext | None = None,
|
|
33
|
+
causes: tuple[CheckFailed | AssertionError, ...] | None = None,
|
|
34
34
|
):
|
|
35
35
|
super().__init__(*args)
|
|
36
36
|
self.context = context
|
|
@@ -38,8 +38,8 @@ class CheckFailed(AssertionError):
|
|
|
38
38
|
|
|
39
39
|
|
|
40
40
|
def make_unique_by_key(
|
|
41
|
-
check_name: str, check_message:
|
|
42
|
-
) ->
|
|
41
|
+
check_name: str, check_message: str | None, context: FailureContext | None
|
|
42
|
+
) -> tuple[str | None, ...]:
|
|
43
43
|
"""A key to distinguish different failed checks.
|
|
44
44
|
|
|
45
45
|
It is not only based on `FailureContext`, because the end-user may raise plain `AssertionError` in their custom
|
|
@@ -51,8 +51,8 @@ def make_unique_by_key(
|
|
|
51
51
|
|
|
52
52
|
|
|
53
53
|
def deduplicate_failed_checks(
|
|
54
|
-
checks:
|
|
55
|
-
) -> Generator[
|
|
54
|
+
checks: list[CheckFailed | AssertionError]
|
|
55
|
+
) -> Generator[CheckFailed | AssertionError, None, None]:
|
|
56
56
|
"""Keep only unique failed checks."""
|
|
57
57
|
seen = set()
|
|
58
58
|
for check in checks:
|
|
@@ -66,10 +66,10 @@ def deduplicate_failed_checks(
|
|
|
66
66
|
seen.add(key)
|
|
67
67
|
|
|
68
68
|
|
|
69
|
-
CACHE:
|
|
69
|
+
CACHE: dict[str | int, type[CheckFailed]] = {}
|
|
70
70
|
|
|
71
71
|
|
|
72
|
-
def get_exception(name: str) ->
|
|
72
|
+
def get_exception(name: str) -> type[CheckFailed]:
|
|
73
73
|
"""Create a new exception class with provided name or fetch one from the cache."""
|
|
74
74
|
if name in CACHE:
|
|
75
75
|
exception_class = CACHE[name]
|
|
@@ -81,14 +81,14 @@ def get_exception(name: str) -> Type[CheckFailed]:
|
|
|
81
81
|
return exception_class
|
|
82
82
|
|
|
83
83
|
|
|
84
|
-
def _get_hashed_exception(prefix: str, message: str) ->
|
|
84
|
+
def _get_hashed_exception(prefix: str, message: str) -> type[CheckFailed]:
|
|
85
85
|
"""Give different exceptions for different error messages."""
|
|
86
86
|
messages_digest = sha1(message.encode("utf-8")).hexdigest()
|
|
87
87
|
name = f"{prefix}{messages_digest}"
|
|
88
88
|
return get_exception(name)
|
|
89
89
|
|
|
90
90
|
|
|
91
|
-
def get_grouped_exception(prefix: str, *exceptions: AssertionError) ->
|
|
91
|
+
def get_grouped_exception(prefix: str, *exceptions: AssertionError) -> type[CheckFailed]:
|
|
92
92
|
# The prefix is needed to distinguish multiple operations with the same error messages
|
|
93
93
|
# that are coming from different operations
|
|
94
94
|
messages = [exception.args[0] for exception in exceptions]
|
|
@@ -96,50 +96,50 @@ def get_grouped_exception(prefix: str, *exceptions: AssertionError) -> Type[Chec
|
|
|
96
96
|
return _get_hashed_exception("GroupedException", f"{prefix}{message}")
|
|
97
97
|
|
|
98
98
|
|
|
99
|
-
def get_server_error(status_code: int) ->
|
|
99
|
+
def get_server_error(status_code: int) -> type[CheckFailed]:
|
|
100
100
|
"""Return new exception for the Internal Server Error cases."""
|
|
101
101
|
name = f"ServerError{status_code}"
|
|
102
102
|
return get_exception(name)
|
|
103
103
|
|
|
104
104
|
|
|
105
|
-
def get_status_code_error(status_code: int) ->
|
|
105
|
+
def get_status_code_error(status_code: int) -> type[CheckFailed]:
|
|
106
106
|
"""Return new exception for an unexpected status code."""
|
|
107
107
|
name = f"StatusCodeError{status_code}"
|
|
108
108
|
return get_exception(name)
|
|
109
109
|
|
|
110
110
|
|
|
111
|
-
def get_response_type_error(expected: str, received: str) ->
|
|
111
|
+
def get_response_type_error(expected: str, received: str) -> type[CheckFailed]:
|
|
112
112
|
"""Return new exception for an unexpected response type."""
|
|
113
113
|
name = f"SchemaValidationError{expected}_{received}"
|
|
114
114
|
return get_exception(name)
|
|
115
115
|
|
|
116
116
|
|
|
117
|
-
def get_malformed_media_type_error(media_type: str) ->
|
|
117
|
+
def get_malformed_media_type_error(media_type: str) -> type[CheckFailed]:
|
|
118
118
|
name = f"MalformedMediaType{media_type}"
|
|
119
119
|
return get_exception(name)
|
|
120
120
|
|
|
121
121
|
|
|
122
|
-
def get_missing_content_type_error() ->
|
|
122
|
+
def get_missing_content_type_error() -> type[CheckFailed]:
|
|
123
123
|
"""Return new exception for a missing Content-Type header."""
|
|
124
124
|
return get_exception("MissingContentTypeError")
|
|
125
125
|
|
|
126
126
|
|
|
127
|
-
def get_schema_validation_error(exception: ValidationError) ->
|
|
127
|
+
def get_schema_validation_error(exception: ValidationError) -> type[CheckFailed]:
|
|
128
128
|
"""Return new exception for schema validation error."""
|
|
129
129
|
return _get_hashed_exception("SchemaValidationError", str(exception))
|
|
130
130
|
|
|
131
131
|
|
|
132
|
-
def get_response_parsing_error(exception: JSONDecodeError) ->
|
|
132
|
+
def get_response_parsing_error(exception: JSONDecodeError) -> type[CheckFailed]:
|
|
133
133
|
"""Return new exception for response parsing error."""
|
|
134
134
|
return _get_hashed_exception("ResponseParsingError", str(exception))
|
|
135
135
|
|
|
136
136
|
|
|
137
|
-
def get_headers_error(message: str) ->
|
|
137
|
+
def get_headers_error(message: str) -> type[CheckFailed]:
|
|
138
138
|
"""Return new exception for missing headers."""
|
|
139
139
|
return _get_hashed_exception("MissingHeadersError", message)
|
|
140
140
|
|
|
141
141
|
|
|
142
|
-
def get_timeout_error(deadline:
|
|
142
|
+
def get_timeout_error(deadline: float | int) -> type[CheckFailed]:
|
|
143
143
|
"""Request took too long."""
|
|
144
144
|
return _get_hashed_exception("TimeoutError", str(deadline))
|
|
145
145
|
|
|
@@ -152,15 +152,15 @@ class OperationSchemaError(Exception):
|
|
|
152
152
|
"""Schema associated with an API operation contains an error."""
|
|
153
153
|
|
|
154
154
|
__module__ = "builtins"
|
|
155
|
-
message:
|
|
156
|
-
path:
|
|
157
|
-
method:
|
|
158
|
-
full_path:
|
|
155
|
+
message: str | None = None
|
|
156
|
+
path: str | None = None
|
|
157
|
+
method: str | None = None
|
|
158
|
+
full_path: str | None = None
|
|
159
159
|
|
|
160
160
|
@classmethod
|
|
161
161
|
def from_jsonschema_error(
|
|
162
|
-
cls, error: ValidationError, path:
|
|
163
|
-
) ->
|
|
162
|
+
cls, error: ValidationError, path: str | None, method: str | None, full_path: str | None
|
|
163
|
+
) -> OperationSchemaError:
|
|
164
164
|
if error.absolute_path:
|
|
165
165
|
part = error.absolute_path[-1]
|
|
166
166
|
if isinstance(part, int) and len(error.absolute_path) > 1:
|
|
@@ -170,7 +170,7 @@ class OperationSchemaError(Exception):
|
|
|
170
170
|
message = f"Invalid `{part}` definition"
|
|
171
171
|
else:
|
|
172
172
|
message = "Invalid schema definition"
|
|
173
|
-
error_path = " -> ".join(
|
|
173
|
+
error_path = " -> ".join(str(entry) for entry in error.path) or "[root]"
|
|
174
174
|
message += f"\n\nLocation:\n {error_path}"
|
|
175
175
|
instance = truncated_json(error.instance)
|
|
176
176
|
message += f"\n\nProblematic definition:\n{instance}"
|
|
@@ -185,8 +185,8 @@ class OperationSchemaError(Exception):
|
|
|
185
185
|
|
|
186
186
|
@classmethod
|
|
187
187
|
def from_reference_resolution_error(
|
|
188
|
-
cls, error: RefResolutionError, path:
|
|
189
|
-
) ->
|
|
188
|
+
cls, error: RefResolutionError, path: str | None, method: str | None, full_path: str | None
|
|
189
|
+
) -> OperationSchemaError:
|
|
190
190
|
message = "Unresolvable JSON pointer in the schema"
|
|
191
191
|
# Get the pointer value from "Unresolvable JSON pointer: 'components/UnknownParameter'"
|
|
192
192
|
pointer = str(error).split(": ", 1)[-1]
|
|
@@ -217,7 +217,7 @@ class InvalidRegularExpression(OperationSchemaError):
|
|
|
217
217
|
__module__ = "builtins"
|
|
218
218
|
|
|
219
219
|
@classmethod
|
|
220
|
-
def from_hypothesis_jsonschema_message(cls, message: str) ->
|
|
220
|
+
def from_hypothesis_jsonschema_message(cls, message: str) -> InvalidRegularExpression:
|
|
221
221
|
match = re.search(r"pattern='(.*?)'.*?\((.*?)\)", message)
|
|
222
222
|
if match:
|
|
223
223
|
message = f"Invalid regular expression. Pattern `{match.group(1)}` is not recognized - `{match.group(2)}`"
|
|
@@ -263,7 +263,7 @@ class DeadlineExceeded(Exception):
|
|
|
263
263
|
__module__ = "builtins"
|
|
264
264
|
|
|
265
265
|
@classmethod
|
|
266
|
-
def from_exc(cls, exc: hypothesis.errors.DeadlineExceeded) ->
|
|
266
|
+
def from_exc(cls, exc: hypothesis.errors.DeadlineExceeded) -> DeadlineExceeded:
|
|
267
267
|
runtime = exc.runtime.total_seconds() * 1000
|
|
268
268
|
deadline = exc.deadline.total_seconds() * 1000
|
|
269
269
|
return cls(
|
|
@@ -281,6 +281,7 @@ class RuntimeErrorType(str, enum.Enum):
|
|
|
281
281
|
# Hypothesis issues
|
|
282
282
|
HYPOTHESIS_DEADLINE_EXCEEDED = "hypothesis_deadline_exceeded"
|
|
283
283
|
HYPOTHESIS_UNSATISFIABLE = "hypothesis_unsatisfiable"
|
|
284
|
+
HYPOTHESIS_UNSUPPORTED_GRAPHQL_SCALAR = "hypothesis_unsupported_graphql_scalar"
|
|
284
285
|
HYPOTHESIS_HEALTH_CHECK_DATA_TOO_LARGE = "hypothesis_health_check_data_too_large"
|
|
285
286
|
HYPOTHESIS_HEALTH_CHECK_FILTER_TOO_MUCH = "hypothesis_health_check_filter_too_much"
|
|
286
287
|
HYPOTHESIS_HEALTH_CHECK_TOO_SLOW = "hypothesis_health_check_too_slow"
|
|
@@ -290,6 +291,9 @@ class RuntimeErrorType(str, enum.Enum):
|
|
|
290
291
|
SCHEMA_INVALID_REGULAR_EXPRESSION = "schema_invalid_regular_expression"
|
|
291
292
|
SCHEMA_GENERIC = "schema_generic"
|
|
292
293
|
|
|
294
|
+
SERIALIZATION_NOT_POSSIBLE = "serialization_not_possible"
|
|
295
|
+
SERIALIZATION_UNBOUNDED_PREFIX = "serialization_unbounded_prefix"
|
|
296
|
+
|
|
293
297
|
# Unclassified
|
|
294
298
|
UNCLASSIFIED = "unclassified"
|
|
295
299
|
|
|
@@ -308,6 +312,7 @@ class SchemaErrorType(str, enum.Enum):
|
|
|
308
312
|
HTTP_FORBIDDEN = "http_forbidden"
|
|
309
313
|
|
|
310
314
|
# Content decoding issues
|
|
315
|
+
SYNTAX_ERROR = "syntax_error"
|
|
311
316
|
UNEXPECTED_CONTENT_TYPE = "unexpected_content_type"
|
|
312
317
|
YAML_NUMERIC_STATUS_CODES = "yaml_numeric_status_codes"
|
|
313
318
|
YAML_NON_STRING_KEYS = "yaml_non_string_keys"
|
|
@@ -317,6 +322,9 @@ class SchemaErrorType(str, enum.Enum):
|
|
|
317
322
|
OPEN_API_UNSPECIFIED_VERSION = "open_api_unspecified_version"
|
|
318
323
|
OPEN_API_UNSUPPORTED_VERSION = "open_api_unsupported_version"
|
|
319
324
|
|
|
325
|
+
# GraphQL validation
|
|
326
|
+
GRAPHQL_INVALID_SCHEMA = "graphql_invalid_schema"
|
|
327
|
+
|
|
320
328
|
# Unclassified
|
|
321
329
|
UNCLASSIFIED = "unclassified"
|
|
322
330
|
|
|
@@ -327,9 +335,9 @@ class SchemaError(RuntimeError):
|
|
|
327
335
|
|
|
328
336
|
type: SchemaErrorType
|
|
329
337
|
message: str
|
|
330
|
-
url:
|
|
331
|
-
response:
|
|
332
|
-
extras:
|
|
338
|
+
url: str | None = None
|
|
339
|
+
response: GenericResponse | None = None
|
|
340
|
+
extras: list[str] = field(default_factory=list)
|
|
333
341
|
|
|
334
342
|
def __str__(self) -> str:
|
|
335
343
|
return self.message
|
|
@@ -398,11 +406,11 @@ class SerializationNotPossible(SerializationError):
|
|
|
398
406
|
__module__ = "builtins"
|
|
399
407
|
|
|
400
408
|
@classmethod
|
|
401
|
-
def from_media_types(cls, *media_types: str) ->
|
|
409
|
+
def from_media_types(cls, *media_types: str) -> SerializationNotPossible:
|
|
402
410
|
return cls(SERIALIZATION_NOT_POSSIBLE_MESSAGE.format(", ".join(media_types)))
|
|
403
411
|
|
|
404
412
|
@classmethod
|
|
405
|
-
def for_media_type(cls, media_type: str) ->
|
|
413
|
+
def for_media_type(cls, media_type: str) -> SerializationNotPossible:
|
|
406
414
|
return cls(SERIALIZATION_FOR_TYPE_IS_NOT_POSSIBLE_MESSAGE.format(media_type))
|
|
407
415
|
|
|
408
416
|
|
|
@@ -430,7 +438,7 @@ def format_exception(error: Exception, include_traceback: bool = False) -> str:
|
|
|
430
438
|
return "".join(lines).strip()
|
|
431
439
|
|
|
432
440
|
|
|
433
|
-
def extract_nth_traceback(trace:
|
|
441
|
+
def extract_nth_traceback(trace: TracebackType | None, n: int) -> TracebackType | None:
|
|
434
442
|
depth = 0
|
|
435
443
|
while depth < n and trace is not None:
|
|
436
444
|
trace = trace.tb_next
|
|
@@ -442,8 +450,9 @@ def remove_ssl_line_number(text: str) -> str:
|
|
|
442
450
|
return re.sub(r"\(_ssl\.c:\d+\)", "", text)
|
|
443
451
|
|
|
444
452
|
|
|
445
|
-
def extract_requests_exception_details(exc: RequestException) ->
|
|
453
|
+
def extract_requests_exception_details(exc: RequestException) -> tuple[str, list[str]]:
|
|
446
454
|
from requests.exceptions import SSLError, ConnectionError, ChunkedEncodingError
|
|
455
|
+
from urllib3.exceptions import MaxRetryError
|
|
447
456
|
|
|
448
457
|
if isinstance(exc, SSLError):
|
|
449
458
|
message = "SSL verification problem"
|
|
@@ -451,8 +460,12 @@ def extract_requests_exception_details(exc: RequestException) -> Tuple[str, List
|
|
|
451
460
|
extra = [remove_ssl_line_number(reason)]
|
|
452
461
|
elif isinstance(exc, ConnectionError):
|
|
453
462
|
message = "Connection failed"
|
|
454
|
-
|
|
455
|
-
|
|
463
|
+
inner = exc.args[0]
|
|
464
|
+
if isinstance(inner, MaxRetryError) and inner.reason is not None:
|
|
465
|
+
_, reason = inner.reason.args[0].split(":", maxsplit=1)
|
|
466
|
+
extra = [reason.strip()]
|
|
467
|
+
else:
|
|
468
|
+
extra = [" ".join(map(str, inner.args))]
|
|
456
469
|
elif isinstance(exc, ChunkedEncodingError):
|
|
457
470
|
message = "Connection broken. The server declared chunked encoding but sent an invalid chunk"
|
|
458
471
|
extra = [str(exc.args[0].args[1])]
|