schemathesis 3.25.6__py3-none-any.whl → 3.39.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. schemathesis/__init__.py +6 -6
  2. schemathesis/_compat.py +2 -2
  3. schemathesis/_dependency_versions.py +4 -2
  4. schemathesis/_hypothesis.py +369 -56
  5. schemathesis/_lazy_import.py +1 -0
  6. schemathesis/_override.py +5 -4
  7. schemathesis/_patches.py +21 -0
  8. schemathesis/_rate_limiter.py +7 -0
  9. schemathesis/_xml.py +75 -22
  10. schemathesis/auths.py +78 -16
  11. schemathesis/checks.py +21 -9
  12. schemathesis/cli/__init__.py +783 -432
  13. schemathesis/cli/__main__.py +4 -0
  14. schemathesis/cli/callbacks.py +58 -13
  15. schemathesis/cli/cassettes.py +233 -47
  16. schemathesis/cli/constants.py +8 -2
  17. schemathesis/cli/context.py +22 -5
  18. schemathesis/cli/debug.py +2 -1
  19. schemathesis/cli/handlers.py +4 -1
  20. schemathesis/cli/junitxml.py +103 -22
  21. schemathesis/cli/options.py +15 -4
  22. schemathesis/cli/output/default.py +258 -112
  23. schemathesis/cli/output/short.py +23 -8
  24. schemathesis/cli/reporting.py +79 -0
  25. schemathesis/cli/sanitization.py +6 -0
  26. schemathesis/code_samples.py +5 -3
  27. schemathesis/constants.py +1 -0
  28. schemathesis/contrib/openapi/__init__.py +1 -1
  29. schemathesis/contrib/openapi/fill_missing_examples.py +3 -1
  30. schemathesis/contrib/openapi/formats/uuid.py +2 -1
  31. schemathesis/contrib/unique_data.py +3 -3
  32. schemathesis/exceptions.py +76 -65
  33. schemathesis/experimental/__init__.py +35 -0
  34. schemathesis/extra/_aiohttp.py +1 -0
  35. schemathesis/extra/_flask.py +4 -1
  36. schemathesis/extra/_server.py +1 -0
  37. schemathesis/extra/pytest_plugin.py +17 -25
  38. schemathesis/failures.py +77 -9
  39. schemathesis/filters.py +185 -8
  40. schemathesis/fixups/__init__.py +1 -0
  41. schemathesis/fixups/fast_api.py +2 -2
  42. schemathesis/fixups/utf8_bom.py +1 -2
  43. schemathesis/generation/__init__.py +20 -36
  44. schemathesis/generation/_hypothesis.py +59 -0
  45. schemathesis/generation/_methods.py +44 -0
  46. schemathesis/generation/coverage.py +931 -0
  47. schemathesis/graphql.py +0 -1
  48. schemathesis/hooks.py +89 -12
  49. schemathesis/internal/checks.py +84 -0
  50. schemathesis/internal/copy.py +22 -3
  51. schemathesis/internal/deprecation.py +6 -2
  52. schemathesis/internal/diff.py +15 -0
  53. schemathesis/internal/extensions.py +27 -0
  54. schemathesis/internal/jsonschema.py +2 -1
  55. schemathesis/internal/output.py +68 -0
  56. schemathesis/internal/result.py +1 -1
  57. schemathesis/internal/transformation.py +11 -0
  58. schemathesis/lazy.py +138 -25
  59. schemathesis/loaders.py +7 -5
  60. schemathesis/models.py +318 -211
  61. schemathesis/parameters.py +4 -0
  62. schemathesis/runner/__init__.py +50 -15
  63. schemathesis/runner/events.py +65 -5
  64. schemathesis/runner/impl/context.py +104 -0
  65. schemathesis/runner/impl/core.py +388 -177
  66. schemathesis/runner/impl/solo.py +19 -29
  67. schemathesis/runner/impl/threadpool.py +70 -79
  68. schemathesis/runner/probes.py +11 -9
  69. schemathesis/runner/serialization.py +150 -17
  70. schemathesis/sanitization.py +5 -1
  71. schemathesis/schemas.py +170 -102
  72. schemathesis/serializers.py +7 -2
  73. schemathesis/service/ci.py +1 -0
  74. schemathesis/service/client.py +39 -6
  75. schemathesis/service/events.py +5 -1
  76. schemathesis/service/extensions.py +224 -0
  77. schemathesis/service/hosts.py +6 -2
  78. schemathesis/service/metadata.py +25 -0
  79. schemathesis/service/models.py +211 -2
  80. schemathesis/service/report.py +6 -6
  81. schemathesis/service/serialization.py +45 -71
  82. schemathesis/service/usage.py +1 -0
  83. schemathesis/specs/graphql/_cache.py +26 -0
  84. schemathesis/specs/graphql/loaders.py +25 -5
  85. schemathesis/specs/graphql/nodes.py +1 -0
  86. schemathesis/specs/graphql/scalars.py +2 -2
  87. schemathesis/specs/graphql/schemas.py +130 -100
  88. schemathesis/specs/graphql/validation.py +1 -2
  89. schemathesis/specs/openapi/__init__.py +1 -0
  90. schemathesis/specs/openapi/_cache.py +123 -0
  91. schemathesis/specs/openapi/_hypothesis.py +78 -60
  92. schemathesis/specs/openapi/checks.py +504 -25
  93. schemathesis/specs/openapi/converter.py +31 -4
  94. schemathesis/specs/openapi/definitions.py +10 -17
  95. schemathesis/specs/openapi/examples.py +126 -12
  96. schemathesis/specs/openapi/expressions/__init__.py +37 -2
  97. schemathesis/specs/openapi/expressions/context.py +1 -1
  98. schemathesis/specs/openapi/expressions/extractors.py +26 -0
  99. schemathesis/specs/openapi/expressions/lexer.py +20 -18
  100. schemathesis/specs/openapi/expressions/nodes.py +29 -6
  101. schemathesis/specs/openapi/expressions/parser.py +26 -5
  102. schemathesis/specs/openapi/formats.py +44 -0
  103. schemathesis/specs/openapi/links.py +125 -42
  104. schemathesis/specs/openapi/loaders.py +77 -36
  105. schemathesis/specs/openapi/media_types.py +34 -0
  106. schemathesis/specs/openapi/negative/__init__.py +6 -3
  107. schemathesis/specs/openapi/negative/mutations.py +21 -6
  108. schemathesis/specs/openapi/parameters.py +39 -25
  109. schemathesis/specs/openapi/patterns.py +137 -0
  110. schemathesis/specs/openapi/references.py +37 -7
  111. schemathesis/specs/openapi/schemas.py +360 -241
  112. schemathesis/specs/openapi/security.py +25 -7
  113. schemathesis/specs/openapi/serialization.py +1 -0
  114. schemathesis/specs/openapi/stateful/__init__.py +198 -70
  115. schemathesis/specs/openapi/stateful/statistic.py +198 -0
  116. schemathesis/specs/openapi/stateful/types.py +14 -0
  117. schemathesis/specs/openapi/utils.py +6 -1
  118. schemathesis/specs/openapi/validation.py +1 -0
  119. schemathesis/stateful/__init__.py +35 -21
  120. schemathesis/stateful/config.py +97 -0
  121. schemathesis/stateful/context.py +135 -0
  122. schemathesis/stateful/events.py +274 -0
  123. schemathesis/stateful/runner.py +309 -0
  124. schemathesis/stateful/sink.py +68 -0
  125. schemathesis/stateful/state_machine.py +67 -38
  126. schemathesis/stateful/statistic.py +22 -0
  127. schemathesis/stateful/validation.py +100 -0
  128. schemathesis/targets.py +33 -1
  129. schemathesis/throttling.py +25 -5
  130. schemathesis/transports/__init__.py +354 -0
  131. schemathesis/transports/asgi.py +7 -0
  132. schemathesis/transports/auth.py +25 -2
  133. schemathesis/transports/content_types.py +3 -1
  134. schemathesis/transports/headers.py +2 -1
  135. schemathesis/transports/responses.py +9 -4
  136. schemathesis/types.py +9 -0
  137. schemathesis/utils.py +11 -16
  138. schemathesis-3.39.7.dist-info/METADATA +293 -0
  139. schemathesis-3.39.7.dist-info/RECORD +160 -0
  140. {schemathesis-3.25.6.dist-info → schemathesis-3.39.7.dist-info}/WHEEL +1 -1
  141. schemathesis/specs/openapi/filters.py +0 -49
  142. schemathesis/specs/openapi/stateful/links.py +0 -92
  143. schemathesis-3.25.6.dist-info/METADATA +0 -356
  144. schemathesis-3.25.6.dist-info/RECORD +0 -134
  145. {schemathesis-3.25.6.dist-info → schemathesis-3.39.7.dist-info}/entry_points.txt +0 -0
  146. {schemathesis-3.25.6.dist-info → schemathesis-3.39.7.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,100 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING
4
+
5
+ from ..exceptions import CheckFailed, get_grouped_exception
6
+ from ..internal.checks import CheckContext
7
+
8
+ if TYPE_CHECKING:
9
+ from ..failures import FailureContext
10
+ from ..internal.checks import CheckFunction
11
+ from ..models import Case
12
+ from ..transports.responses import GenericResponse
13
+ from .context import RunnerContext
14
+
15
+
16
+ def validate_response(
17
+ *,
18
+ response: GenericResponse,
19
+ case: Case,
20
+ runner_ctx: RunnerContext,
21
+ check_ctx: CheckContext,
22
+ checks: tuple[CheckFunction, ...],
23
+ additional_checks: tuple[CheckFunction, ...] = (),
24
+ max_response_time: int | None = None,
25
+ ) -> None:
26
+ """Validate the response against the provided checks."""
27
+ from .._compat import MultipleFailures
28
+ from ..checks import _make_max_response_time_failure_message
29
+ from ..failures import ResponseTimeExceeded
30
+ from ..models import Check, Status
31
+
32
+ exceptions: list[CheckFailed | AssertionError] = []
33
+ check_results = runner_ctx.checks_for_step
34
+
35
+ def _on_failure(exc: CheckFailed | AssertionError, message: str, context: FailureContext | None) -> None:
36
+ exceptions.append(exc)
37
+ if runner_ctx.is_seen_in_suite(exc):
38
+ return
39
+ failed_check = Check(
40
+ name=name,
41
+ value=Status.failure,
42
+ response=response,
43
+ elapsed=response.elapsed.total_seconds(),
44
+ example=copied_case,
45
+ message=message,
46
+ context=context,
47
+ request=None,
48
+ )
49
+ runner_ctx.add_failed_check(failed_check)
50
+ check_results.append(failed_check)
51
+ runner_ctx.mark_as_seen_in_suite(exc)
52
+
53
+ def _on_passed(_name: str, _case: Case) -> None:
54
+ passed_check = Check(
55
+ name=_name,
56
+ value=Status.success,
57
+ response=response,
58
+ elapsed=response.elapsed.total_seconds(),
59
+ example=_case,
60
+ request=None,
61
+ )
62
+ check_results.append(passed_check)
63
+
64
+ for check in tuple(checks) + tuple(additional_checks):
65
+ name = check.__name__
66
+ copied_case = case.partial_deepcopy()
67
+ try:
68
+ skip_check = check(check_ctx, response, copied_case)
69
+ if not skip_check:
70
+ _on_passed(name, copied_case)
71
+ except CheckFailed as exc:
72
+ if runner_ctx.is_seen_in_run(exc):
73
+ continue
74
+ _on_failure(exc, str(exc), exc.context)
75
+ except AssertionError as exc:
76
+ if runner_ctx.is_seen_in_run(exc):
77
+ continue
78
+ _on_failure(exc, str(exc) or f"Custom check failed: `{name}`", None)
79
+ except MultipleFailures as exc:
80
+ for subexc in exc.exceptions:
81
+ if runner_ctx.is_seen_in_run(subexc):
82
+ continue
83
+ _on_failure(subexc, str(subexc), subexc.context)
84
+
85
+ if max_response_time:
86
+ elapsed_time = response.elapsed.total_seconds() * 1000
87
+ if elapsed_time > max_response_time:
88
+ message = _make_max_response_time_failure_message(elapsed_time, max_response_time)
89
+ context = ResponseTimeExceeded(message=message, elapsed=elapsed_time, deadline=max_response_time)
90
+ try:
91
+ raise AssertionError(message)
92
+ except AssertionError as _exc:
93
+ if not runner_ctx.is_seen_in_run(_exc):
94
+ _on_failure(_exc, message, context)
95
+ else:
96
+ _on_passed("max_response_time", case)
97
+
98
+ # Raise a grouped exception so Hypothesis can properly deduplicate it against the other failures
99
+ if exceptions:
100
+ raise get_grouped_exception(case.operation.verbose_name, *exceptions)(causes=tuple(exceptions))
schemathesis/targets.py CHANGED
@@ -1,5 +1,6 @@
1
1
  from __future__ import annotations
2
- from dataclasses import dataclass
2
+
3
+ from dataclasses import dataclass, field
3
4
  from typing import TYPE_CHECKING, Callable
4
5
 
5
6
  if TYPE_CHECKING:
@@ -31,6 +32,37 @@ OPTIONAL_TARGETS = (response_time,)
31
32
  ALL_TARGETS: tuple[Target, ...] = DEFAULT_TARGETS + OPTIONAL_TARGETS
32
33
 
33
34
 
35
+ @dataclass
36
+ class TargetMetricCollector:
37
+ """Collect multiple observations for target metrics."""
38
+
39
+ targets: list[Target]
40
+ observations: dict[str, list[int | float]] = field(init=False)
41
+
42
+ def __post_init__(self) -> None:
43
+ self.observations = {target.__name__: [] for target in self.targets}
44
+
45
+ def reset(self) -> None:
46
+ """Reset all collected observations."""
47
+ for target in self.targets:
48
+ self.observations[target.__name__].clear()
49
+
50
+ def store(self, case: Case, response: GenericResponse) -> None:
51
+ """Calculate target metrics & store them."""
52
+ context = TargetContext(case=case, response=response, response_time=response.elapsed.total_seconds())
53
+ for target in self.targets:
54
+ self.observations[target.__name__].append(target(context))
55
+
56
+ def maximize(self) -> None:
57
+ """Give feedback to the Hypothesis engine, so it maximizes the aggregated metrics."""
58
+ import hypothesis
59
+
60
+ for target in self.targets:
61
+ # Currently aggregation is just a sum
62
+ metric = sum(self.observations[target.__name__])
63
+ hypothesis.target(metric, label=target.__name__)
64
+
65
+
34
66
  def register(target: Target) -> Target:
35
67
  """Register a new testing target for schemathesis CLI.
36
68
 
@@ -1,11 +1,12 @@
1
1
  from __future__ import annotations
2
+
2
3
  from typing import TYPE_CHECKING
3
4
 
5
+ from ._dependency_versions import IS_PYRATE_LIMITER_ABOVE_3
4
6
  from .exceptions import UsageError
5
7
 
6
-
7
8
  if TYPE_CHECKING:
8
- from pyrate_limiter import Limiter
9
+ from pyrate_limiter import Duration, Limiter
9
10
 
10
11
 
11
12
  def parse_units(rate: str) -> tuple[int, int]:
@@ -33,9 +34,28 @@ def invalid_rate(value: str) -> UsageError:
33
34
  )
34
35
 
35
36
 
37
+ def _get_max_delay(value: int, unit: Duration) -> int:
38
+ from pyrate_limiter import Duration
39
+
40
+ if unit == Duration.SECOND:
41
+ multiplier = 1
42
+ elif unit == Duration.MINUTE:
43
+ multiplier = 60
44
+ elif unit == Duration.HOUR:
45
+ multiplier = 60 * 60
46
+ else:
47
+ multiplier = 60 * 60 * 24
48
+ # Delay is in milliseconds + `pyrate_limiter` adds 50ms on top.
49
+ # Hence adding 100 covers this
50
+ return value * multiplier * 1000 + 100
51
+
52
+
36
53
  def build_limiter(rate: str) -> Limiter:
37
- from pyrate_limiter import Limiter, RequestRate
54
+ from ._rate_limiter import Limiter, Rate
38
55
 
39
56
  limit, interval = parse_units(rate)
40
- rate = RequestRate(limit, interval)
41
- return Limiter(rate)
57
+ rate = Rate(limit, interval)
58
+ kwargs = {}
59
+ if IS_PYRATE_LIMITER_ABOVE_3:
60
+ kwargs["max_delay"] = _get_max_delay(limit, interval)
61
+ return Limiter(rate, **kwargs)
@@ -1,5 +1,359 @@
1
+ from __future__ import annotations
2
+
1
3
  import base64
4
+ import inspect
5
+ import time
6
+ from contextlib import contextmanager
7
+ from dataclasses import dataclass
8
+ from datetime import timedelta
9
+ from inspect import iscoroutinefunction
10
+ from typing import TYPE_CHECKING, Any, Generator, Protocol, TypeVar, cast
11
+ from urllib.parse import urlparse
12
+
13
+ from .. import failures
14
+ from .._dependency_versions import IS_WERKZEUG_ABOVE_3
15
+ from ..constants import DEFAULT_RESPONSE_TIMEOUT, NOT_SET
16
+ from ..exceptions import get_timeout_error
17
+ from ..serializers import SerializerContext
18
+ from ..types import Cookies, NotSet, RequestCert
19
+
20
+ if TYPE_CHECKING:
21
+ import requests
22
+ import werkzeug
23
+ from _typeshed.wsgi import WSGIApplication
24
+ from starlette_testclient._testclient import ASGI2App, ASGI3App
25
+
26
+ from ..models import Case
27
+ from .responses import WSGIResponse
28
+
29
+
30
+ @dataclass
31
+ class RequestConfig:
32
+ timeout: int | None = None
33
+ tls_verify: bool | str = True
34
+ proxy: str | None = None
35
+ cert: RequestCert | None = None
36
+
37
+ def _repr_pretty_(self, *args: Any, **kwargs: Any) -> None: ...
38
+
39
+ @property
40
+ def prepared_timeout(self) -> float | None:
41
+ return prepare_timeout(self.timeout)
2
42
 
3
43
 
4
44
  def serialize_payload(payload: bytes) -> str:
5
45
  return base64.b64encode(payload).decode()
46
+
47
+
48
+ def deserialize_payload(data: str | None) -> bytes | None:
49
+ if data is None:
50
+ return None
51
+ return base64.b64decode(data)
52
+
53
+
54
+ def get(app: Any) -> Transport:
55
+ """Get transport to send the data to the application."""
56
+ if app is None:
57
+ return RequestsTransport()
58
+ if iscoroutinefunction(app) or (
59
+ hasattr(app, "__call__") and iscoroutinefunction(app.__call__) # noqa: B004
60
+ ):
61
+ return ASGITransport(app=app)
62
+ return WSGITransport(app=app)
63
+
64
+
65
+ S = TypeVar("S", contravariant=True)
66
+ R = TypeVar("R", covariant=True)
67
+
68
+
69
+ class Transport(Protocol[S, R]):
70
+ def serialize_case(
71
+ self,
72
+ case: Case,
73
+ *,
74
+ base_url: str | None = None,
75
+ headers: dict[str, Any] | None = None,
76
+ params: dict[str, Any] | None = None,
77
+ cookies: dict[str, Any] | None = None,
78
+ ) -> dict[str, Any]:
79
+ raise NotImplementedError
80
+
81
+ def send(
82
+ self,
83
+ case: Case,
84
+ *,
85
+ session: S | None = None,
86
+ base_url: str | None = None,
87
+ headers: dict[str, Any] | None = None,
88
+ params: dict[str, Any] | None = None,
89
+ cookies: dict[str, Any] | None = None,
90
+ **kwargs: Any,
91
+ ) -> R:
92
+ raise NotImplementedError
93
+
94
+
95
+ class RequestsTransport:
96
+ def serialize_case(
97
+ self,
98
+ case: Case,
99
+ *,
100
+ base_url: str | None = None,
101
+ headers: dict[str, Any] | None = None,
102
+ params: dict[str, Any] | None = None,
103
+ cookies: dict[str, Any] | None = None,
104
+ ) -> dict[str, Any]:
105
+ final_headers = case._get_headers(headers)
106
+ media_type: str | None
107
+ if case.body is not NOT_SET and case.media_type is None:
108
+ media_type = case.operation._get_default_media_type()
109
+ else:
110
+ media_type = case.media_type
111
+ if media_type and media_type != "multipart/form-data" and not isinstance(case.body, NotSet):
112
+ # `requests` will handle multipart form headers with the proper `boundary` value.
113
+ if "content-type" not in final_headers:
114
+ final_headers["Content-Type"] = media_type
115
+ url = case._get_url(base_url)
116
+ serializer = case._get_serializer(media_type)
117
+ if serializer is not None and not isinstance(case.body, NotSet):
118
+ context = SerializerContext(case=case)
119
+ extra = serializer.as_requests(context, case._get_body())
120
+ else:
121
+ extra = {}
122
+ if case._auth is not None:
123
+ extra["auth"] = case._auth
124
+ additional_headers = extra.pop("headers", None)
125
+ if additional_headers:
126
+ # Additional headers, needed for the serializer
127
+ for key, value in additional_headers.items():
128
+ final_headers.setdefault(key, value)
129
+ data = {
130
+ "method": case.method,
131
+ "url": url,
132
+ "cookies": case.cookies,
133
+ "headers": final_headers,
134
+ "params": case.query,
135
+ **extra,
136
+ }
137
+ if params is not None:
138
+ _merge_dict_to(data, "params", params)
139
+ if cookies is not None:
140
+ _merge_dict_to(data, "cookies", cookies)
141
+ return data
142
+
143
+ def send(
144
+ self,
145
+ case: Case,
146
+ *,
147
+ session: requests.Session | None = None,
148
+ base_url: str | None = None,
149
+ headers: dict[str, Any] | None = None,
150
+ params: dict[str, Any] | None = None,
151
+ cookies: dict[str, Any] | None = None,
152
+ **kwargs: Any,
153
+ ) -> requests.Response:
154
+ import requests
155
+ from urllib3.exceptions import ReadTimeoutError
156
+
157
+ data = self.serialize_case(case, base_url=base_url, headers=headers, params=params, cookies=cookies)
158
+ data.update(kwargs)
159
+ data.setdefault("timeout", DEFAULT_RESPONSE_TIMEOUT / 1000)
160
+ if session is None:
161
+ validate_vanilla_requests_kwargs(data)
162
+ session = requests.Session()
163
+ close_session = True
164
+ else:
165
+ close_session = False
166
+ verify = data.get("verify", True)
167
+ try:
168
+ with case.operation.schema.ratelimit():
169
+ response = session.request(**data) # type: ignore
170
+ except (requests.Timeout, requests.ConnectionError) as exc:
171
+ if isinstance(exc, requests.ConnectionError):
172
+ if not isinstance(exc.args[0], ReadTimeoutError):
173
+ raise
174
+ req = requests.Request(
175
+ method=data["method"].upper(),
176
+ url=data["url"],
177
+ headers=data["headers"],
178
+ files=data.get("files"),
179
+ data=data.get("data") or {},
180
+ json=data.get("json"),
181
+ params=data.get("params") or {},
182
+ auth=data.get("auth"),
183
+ cookies=data["cookies"],
184
+ hooks=data.get("hooks"),
185
+ )
186
+ request = session.prepare_request(req)
187
+ else:
188
+ request = cast(requests.PreparedRequest, exc.request)
189
+ timeout = 1000 * data["timeout"] # It is defined and not empty, since the exception happened
190
+ code_message = case._get_code_message(case.operation.schema.code_sample_style, request, verify=verify)
191
+ message = f"The server failed to respond within the specified limit of {timeout:.2f}ms"
192
+ raise get_timeout_error(case.operation.verbose_name, timeout)(
193
+ f"\n\n1. {failures.RequestTimeout.title}\n\n{message}\n\n{code_message}",
194
+ context=failures.RequestTimeout(message=message, timeout=timeout),
195
+ ) from None
196
+ response.verify = verify # type: ignore[attr-defined]
197
+ response._session = session # type: ignore[attr-defined]
198
+ if close_session:
199
+ session.close()
200
+ return response
201
+
202
+
203
+ def _merge_dict_to(data: dict[str, Any], data_key: str, new: dict[str, Any]) -> None:
204
+ original = data[data_key] or {}
205
+ for key, value in new.items():
206
+ original[key] = value
207
+ data[data_key] = original
208
+
209
+
210
+ def prepare_timeout(timeout: int | None) -> float | None:
211
+ """Request timeout is in milliseconds, but `requests` uses seconds."""
212
+ output: int | float | None = timeout
213
+ if timeout is not None:
214
+ output = timeout / 1000
215
+ return output
216
+
217
+
218
+ def validate_vanilla_requests_kwargs(data: dict[str, Any]) -> None:
219
+ """Check arguments for `requests.Session.request`.
220
+
221
+ Some arguments can be valid for cases like ASGI integration, but at the same time they won't work for the regular
222
+ `requests` calls. In such cases we need to avoid an obscure error message, that comes from `requests`.
223
+ """
224
+ url = data["url"]
225
+ if not urlparse(url).netloc:
226
+ stack = inspect.stack()
227
+ method_name = "call"
228
+ for frame in stack[1:]:
229
+ if frame.function == "call_and_validate":
230
+ method_name = "call_and_validate"
231
+ break
232
+ raise RuntimeError(
233
+ "The `base_url` argument is required when specifying a schema via a file, so Schemathesis knows where to send the data. \n"
234
+ f"Pass `base_url` either to the `schemathesis.from_*` loader or to the `Case.{method_name}`.\n"
235
+ f"If you use the ASGI integration, please supply your test client "
236
+ f"as the `session` argument to `call`.\nURL: {url}"
237
+ )
238
+
239
+
240
+ @dataclass
241
+ class ASGITransport(RequestsTransport):
242
+ app: ASGI2App | ASGI3App
243
+
244
+ def send(
245
+ self,
246
+ case: Case,
247
+ *,
248
+ session: requests.Session | None = None,
249
+ base_url: str | None = None,
250
+ headers: dict[str, Any] | None = None,
251
+ params: dict[str, Any] | None = None,
252
+ cookies: dict[str, Any] | None = None,
253
+ **kwargs: Any,
254
+ ) -> requests.Response:
255
+ from starlette_testclient import TestClient as ASGIClient
256
+
257
+ if base_url is None:
258
+ base_url = case.get_full_base_url()
259
+ with ASGIClient(self.app) as client:
260
+ return super().send(
261
+ case, session=client, base_url=base_url, headers=headers, params=params, cookies=cookies, **kwargs
262
+ )
263
+
264
+
265
+ @dataclass
266
+ class WSGITransport:
267
+ app: WSGIApplication
268
+
269
+ def serialize_case(
270
+ self,
271
+ case: Case,
272
+ *,
273
+ base_url: str | None = None,
274
+ headers: dict[str, Any] | None = None,
275
+ params: dict[str, Any] | None = None,
276
+ cookies: dict[str, Any] | None = None,
277
+ ) -> dict[str, Any]:
278
+ final_headers = case._get_headers(headers)
279
+ media_type: str | None
280
+ if case.body is not NOT_SET and case.media_type is None:
281
+ media_type = case.operation._get_default_media_type()
282
+ else:
283
+ media_type = case.media_type
284
+ if media_type and not isinstance(case.body, NotSet):
285
+ # If we need to send a payload, then the Content-Type header should be set
286
+ final_headers["Content-Type"] = media_type
287
+ extra: dict[str, Any]
288
+ serializer = case._get_serializer(media_type)
289
+ if serializer is not None and not isinstance(case.body, NotSet):
290
+ context = SerializerContext(case=case)
291
+ extra = serializer.as_werkzeug(context, case._get_body())
292
+ else:
293
+ extra = {}
294
+ data = {
295
+ "method": case.method,
296
+ "path": case.operation.schema.get_full_path(case.formatted_path),
297
+ # Convert to a regular dictionary, as we use `CaseInsensitiveDict` which is not supported by Werkzeug
298
+ "headers": dict(final_headers),
299
+ "query_string": case.query,
300
+ **extra,
301
+ }
302
+ if params is not None:
303
+ _merge_dict_to(data, "query_string", params)
304
+ return data
305
+
306
+ def send(
307
+ self,
308
+ case: Case,
309
+ *,
310
+ session: Any = None,
311
+ base_url: str | None = None,
312
+ headers: dict[str, Any] | None = None,
313
+ params: dict[str, Any] | None = None,
314
+ cookies: dict[str, Any] | None = None,
315
+ **kwargs: Any,
316
+ ) -> WSGIResponse:
317
+ import requests
318
+ import werkzeug
319
+
320
+ from .responses import WSGIResponse
321
+
322
+ application = kwargs.pop("app", self.app) or self.app
323
+ data = self.serialize_case(case, headers=headers, params=params)
324
+ data.update(kwargs)
325
+ client = werkzeug.Client(application, WSGIResponse)
326
+ cookies = {**(case.cookies or {}), **(cookies or {})}
327
+ with cookie_handler(client, cookies), case.operation.schema.ratelimit():
328
+ start = time.monotonic()
329
+ response = client.open(**data)
330
+ elapsed = time.monotonic() - start
331
+ requests_kwargs = RequestsTransport().serialize_case(
332
+ case,
333
+ base_url=case.get_full_base_url(),
334
+ headers=headers,
335
+ params=params,
336
+ cookies=cookies,
337
+ )
338
+ response.request = requests.Request(**requests_kwargs).prepare()
339
+ response.elapsed = timedelta(seconds=elapsed)
340
+ return response
341
+
342
+
343
+ @contextmanager
344
+ def cookie_handler(client: werkzeug.Client, cookies: Cookies | None) -> Generator[None, None, None]:
345
+ """Set cookies required for a call."""
346
+ if not cookies:
347
+ yield
348
+ else:
349
+ for key, value in cookies.items():
350
+ if IS_WERKZEUG_ABOVE_3:
351
+ client.set_cookie(key=key, value=value, domain="localhost")
352
+ else:
353
+ client.set_cookie("localhost", key=key, value=value)
354
+ yield
355
+ for key in cookies:
356
+ if IS_WERKZEUG_ABOVE_3:
357
+ client.delete_cookie(key=key, domain="localhost")
358
+ else:
359
+ client.delete_cookie("localhost", key=key)
@@ -0,0 +1,7 @@
1
+ from inspect import iscoroutinefunction
2
+
3
+
4
+ def is_asgi_app(app: object) -> bool:
5
+ return iscoroutinefunction(app) or (
6
+ hasattr(app, "__call__") and iscoroutinefunction(app.__call__) # noqa: B004
7
+ )
@@ -1,11 +1,14 @@
1
1
  from __future__ import annotations
2
- from typing import TYPE_CHECKING
3
2
 
4
- from ..types import RawAuth
3
+ from typing import TYPE_CHECKING, Any
4
+
5
+ from ..constants import USER_AGENT
5
6
 
6
7
  if TYPE_CHECKING:
7
8
  from requests.auth import HTTPDigestAuth
8
9
 
10
+ from ..types import RawAuth
11
+
9
12
 
10
13
  def get_requests_auth(auth: RawAuth | None, auth_type: str | None) -> HTTPDigestAuth | RawAuth | None:
11
14
  from requests.auth import HTTPDigestAuth
@@ -13,3 +16,23 @@ def get_requests_auth(auth: RawAuth | None, auth_type: str | None) -> HTTPDigest
13
16
  if auth and auth_type == "digest":
14
17
  return HTTPDigestAuth(*auth)
15
18
  return auth
19
+
20
+
21
+ def prepare_wsgi_headers(headers: dict[str, Any] | None, auth: RawAuth | None, auth_type: str | None) -> dict[str, Any]:
22
+ headers = headers or {}
23
+ if "user-agent" not in {header.lower() for header in headers}:
24
+ headers["User-Agent"] = USER_AGENT
25
+ wsgi_auth = get_wsgi_auth(auth, auth_type)
26
+ if wsgi_auth:
27
+ headers["Authorization"] = wsgi_auth
28
+ return headers
29
+
30
+
31
+ def get_wsgi_auth(auth: RawAuth | None, auth_type: str | None) -> str | None:
32
+ from requests.auth import _basic_auth_str
33
+
34
+ if auth:
35
+ if auth_type == "digest":
36
+ raise ValueError("Digest auth is not supported for WSGI apps")
37
+ return _basic_auth_str(*auth)
38
+ return None
@@ -1,4 +1,5 @@
1
- from typing import Tuple, Generator
1
+ from functools import lru_cache
2
+ from typing import Generator, Tuple
2
3
 
3
4
 
4
5
  def _parseparam(s: str) -> Generator[str, None, None]:
@@ -30,6 +31,7 @@ def parse_header(line: str) -> Tuple[str, dict]:
30
31
  return key, pdict
31
32
 
32
33
 
34
+ @lru_cache
33
35
  def parse_content_type(content_type: str) -> Tuple[str, str]:
34
36
  """Parse Content Type and return main type and subtype."""
35
37
  try:
@@ -1,4 +1,5 @@
1
1
  from __future__ import annotations
2
+
2
3
  import re
3
4
  from typing import Any
4
5
 
@@ -25,8 +26,8 @@ INVALID_HEADER_RE = re.compile(r"\n(?![ \t])|\r(?![ \t\n])")
25
26
 
26
27
 
27
28
  def has_invalid_characters(name: str, value: str) -> bool:
28
- from requests.utils import check_header_validity
29
29
  from requests.exceptions import InvalidHeader
30
+ from requests.utils import check_header_validity
30
31
 
31
32
  try:
32
33
  check_header_validity((name, value))
@@ -1,20 +1,25 @@
1
1
  from __future__ import annotations
2
2
 
3
- import sys
4
3
  import json
5
- from typing import Union, TYPE_CHECKING, NoReturn, Any
6
- from .._compat import JSONMixin
4
+ import sys
5
+ from typing import TYPE_CHECKING, Any, NoReturn, Union
6
+
7
7
  from werkzeug.wrappers import Response as BaseResponse
8
8
 
9
+ from .._compat import JSONMixin
10
+
9
11
  if TYPE_CHECKING:
12
+ from datetime import timedelta
13
+
10
14
  from httpx import Response as httpxResponse
11
- from requests import Response as requestsResponse
12
15
  from requests import PreparedRequest
16
+ from requests import Response as requestsResponse
13
17
 
14
18
 
15
19
  class WSGIResponse(BaseResponse, JSONMixin):
16
20
  # We store "requests" request to build a reproduction code
17
21
  request: PreparedRequest
22
+ elapsed: timedelta
18
23
 
19
24
  def on_json_loading_failed(self, e: json.JSONDecodeError) -> NoReturn:
20
25
  # We don't need a werkzeug-specific exception when JSON parsing error happens
schemathesis/types.py CHANGED
@@ -1,8 +1,10 @@
1
+ import enum
1
2
  from pathlib import Path
2
3
  from typing import TYPE_CHECKING, Any, Callable, Dict, List, Set, Tuple, Union
3
4
 
4
5
  if TYPE_CHECKING:
5
6
  from hypothesis.strategies import SearchStrategy
7
+
6
8
  from .hooks import HookContext
7
9
 
8
10
  PathLike = Union[Path, str]
@@ -33,3 +35,10 @@ Hook = Union[
33
35
  RawAuth = Tuple[str, str]
34
36
  # Generic test with any arguments and no return
35
37
  GenericTest = Callable[..., None]
38
+
39
+
40
+ class Specification(str, enum.Enum):
41
+ """Specification of the given schema."""
42
+
43
+ OPENAPI = "openapi"
44
+ GRAPHQL = "graphql"