schemathesis 3.18.5__py3-none-any.whl → 3.19.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. schemathesis/__init__.py +1 -3
  2. schemathesis/auths.py +218 -43
  3. schemathesis/cli/__init__.py +37 -20
  4. schemathesis/cli/callbacks.py +13 -1
  5. schemathesis/cli/cassettes.py +18 -18
  6. schemathesis/cli/context.py +25 -24
  7. schemathesis/cli/debug.py +3 -3
  8. schemathesis/cli/junitxml.py +4 -4
  9. schemathesis/cli/options.py +1 -1
  10. schemathesis/cli/output/default.py +2 -0
  11. schemathesis/constants.py +3 -3
  12. schemathesis/exceptions.py +9 -9
  13. schemathesis/extra/pytest_plugin.py +1 -1
  14. schemathesis/failures.py +65 -66
  15. schemathesis/filters.py +269 -0
  16. schemathesis/hooks.py +11 -11
  17. schemathesis/lazy.py +21 -16
  18. schemathesis/models.py +149 -107
  19. schemathesis/parameters.py +12 -7
  20. schemathesis/runner/events.py +55 -55
  21. schemathesis/runner/impl/core.py +26 -26
  22. schemathesis/runner/impl/solo.py +6 -7
  23. schemathesis/runner/impl/threadpool.py +5 -5
  24. schemathesis/runner/serialization.py +50 -50
  25. schemathesis/schemas.py +38 -23
  26. schemathesis/serializers.py +3 -3
  27. schemathesis/service/ci.py +25 -25
  28. schemathesis/service/client.py +2 -2
  29. schemathesis/service/events.py +12 -13
  30. schemathesis/service/hosts.py +4 -4
  31. schemathesis/service/metadata.py +14 -15
  32. schemathesis/service/models.py +12 -13
  33. schemathesis/service/report.py +30 -31
  34. schemathesis/service/serialization.py +2 -4
  35. schemathesis/specs/graphql/loaders.py +21 -2
  36. schemathesis/specs/graphql/schemas.py +8 -8
  37. schemathesis/specs/openapi/expressions/context.py +4 -4
  38. schemathesis/specs/openapi/expressions/lexer.py +11 -12
  39. schemathesis/specs/openapi/expressions/nodes.py +16 -16
  40. schemathesis/specs/openapi/expressions/parser.py +1 -1
  41. schemathesis/specs/openapi/links.py +15 -17
  42. schemathesis/specs/openapi/loaders.py +29 -2
  43. schemathesis/specs/openapi/negative/__init__.py +5 -5
  44. schemathesis/specs/openapi/negative/mutations.py +6 -6
  45. schemathesis/specs/openapi/parameters.py +12 -13
  46. schemathesis/specs/openapi/references.py +2 -2
  47. schemathesis/specs/openapi/schemas.py +11 -15
  48. schemathesis/specs/openapi/security.py +12 -7
  49. schemathesis/specs/openapi/stateful/links.py +4 -4
  50. schemathesis/stateful.py +19 -19
  51. schemathesis/targets.py +5 -6
  52. schemathesis/throttling.py +34 -0
  53. schemathesis/types.py +11 -13
  54. schemathesis/utils.py +2 -2
  55. {schemathesis-3.18.5.dist-info → schemathesis-3.19.1.dist-info}/METADATA +4 -3
  56. schemathesis-3.19.1.dist-info/RECORD +107 -0
  57. schemathesis-3.18.5.dist-info/RECORD +0 -105
  58. {schemathesis-3.18.5.dist-info → schemathesis-3.19.1.dist-info}/WHEEL +0 -0
  59. {schemathesis-3.18.5.dist-info → schemathesis-3.19.1.dist-info}/entry_points.txt +0 -0
  60. {schemathesis-3.18.5.dist-info → schemathesis-3.19.1.dist-info}/licenses/LICENSE +0 -0
@@ -1,9 +1,9 @@
1
1
  import binascii
2
2
  import os
3
+ from dataclasses import dataclass
3
4
  from io import BytesIO
4
5
  from typing import TYPE_CHECKING, Any, Callable, Collection, Dict, Generator, Optional, Type
5
6
 
6
- import attr
7
7
  import yaml
8
8
  from typing_extensions import Protocol, runtime_checkable
9
9
 
@@ -22,14 +22,14 @@ except ImportError:
22
22
  SERIALIZERS: Dict[str, Type["Serializer"]] = {}
23
23
 
24
24
 
25
- @attr.s(slots=True) # pragma: no mutate
25
+ @dataclass
26
26
  class SerializerContext:
27
27
  """The context for serialization process.
28
28
 
29
29
  :ivar Case case: Generated example that is being processed.
30
30
  """
31
31
 
32
- case: "Case" = attr.ib() # pragma: no mutate
32
+ case: "Case"
33
33
 
34
34
 
35
35
  @runtime_checkable
@@ -1,8 +1,8 @@
1
1
  import enum
2
2
  import os
3
+ from dataclasses import asdict, dataclass
3
4
  from typing import Dict, Optional
4
5
 
5
- import attr
6
6
  from typing_extensions import Protocol, runtime_checkable
7
7
 
8
8
 
@@ -54,52 +54,52 @@ def detect() -> Optional[CIProvider]:
54
54
  return None
55
55
 
56
56
 
57
- def asdict(env: Environment) -> Dict[str, Optional[str]]:
58
- data = attr.asdict(env)
57
+ def _asdict(env: Environment) -> Dict[str, Optional[str]]:
58
+ data = asdict(env)
59
59
  data["provider"] = env.provider.value
60
60
  return data
61
61
 
62
62
 
63
- @attr.s(slots=True)
63
+ @dataclass
64
64
  class GitHubActionsEnvironment:
65
65
  """Useful data to capture from GitHub Actions environment."""
66
66
 
67
67
  provider = CIProvider.GITHUB
68
68
  variable_name = "GITHUB_ACTIONS"
69
69
  verbose_name = "GitHub Actions"
70
- asdict = asdict
70
+ asdict = _asdict
71
71
 
72
72
  # GitHub API URL.
73
73
  # For example, `https://api.github.com`
74
- api_url: str = attr.ib()
74
+ api_url: str
75
75
  # The owner and repository name.
76
76
  # For example, `schemathesis/schemathesis`.
77
- repository: str = attr.ib()
77
+ repository: str
78
78
  # The name of the person or app that initiated the workflow.
79
79
  # For example, `Stranger6667`
80
- actor: str = attr.ib()
80
+ actor: str
81
81
  # The commit SHA that triggered the workflow.
82
82
  # For example, `e56e13224f08469841e106449f6467b769e2afca`
83
- sha: str = attr.ib()
83
+ sha: str
84
84
  # A unique number for each workflow run within a repository.
85
85
  # For example, `1658821493`.
86
- run_id: str = attr.ib()
86
+ run_id: str
87
87
  # The name of the workflow.
88
88
  # For example, `My test workflow`.
89
- workflow: str = attr.ib()
89
+ workflow: str
90
90
  # The head ref or source branch of the pull request in a workflow run.
91
91
  # For example, `dd/report-ci`.
92
- head_ref: Optional[str] = attr.ib()
92
+ head_ref: Optional[str]
93
93
  # The name of the base ref or target branch of the pull request in a workflow run.
94
94
  # For example, `main`.
95
- base_ref: Optional[str] = attr.ib()
95
+ base_ref: Optional[str]
96
96
  # The branch or tag ref that triggered the workflow run.
97
97
  # This is only set if a branch or tag is available for the event type.
98
98
  # For example, `refs/pull/1533/merge`
99
- ref: Optional[str] = attr.ib()
99
+ ref: Optional[str]
100
100
  # The Schemathesis GitHub Action version.
101
101
  # For example `v1.0.1`
102
- action_ref: Optional[str] = attr.ib()
102
+ action_ref: Optional[str]
103
103
 
104
104
  @classmethod
105
105
  def is_set(cls) -> bool:
@@ -135,42 +135,42 @@ class GitHubActionsEnvironment:
135
135
  }
136
136
 
137
137
 
138
- @attr.s(slots=True)
138
+ @dataclass
139
139
  class GitLabCIEnvironment:
140
140
  """Useful data to capture from GitLab CI environment."""
141
141
 
142
142
  provider = CIProvider.GITLAB
143
143
  variable_name = "GITLAB_CI"
144
144
  verbose_name = "GitLab CI"
145
- asdict = asdict
145
+ asdict = _asdict
146
146
 
147
147
  # GitLab API URL
148
148
  # For example, `https://gitlab.com/api/v4`
149
- api_v4_url: str = attr.ib()
149
+ api_v4_url: str
150
150
  # The ID of the current project.
151
151
  # For example, `12345678`
152
- project_id: str = attr.ib()
152
+ project_id: str
153
153
  # The username of the user who started the job.
154
154
  # For example, `Stranger6667`
155
- user_login: str = attr.ib()
155
+ user_login: str
156
156
  # The commit revision the project is built for.
157
157
  # For example, `e56e13224f08469841e106449f6467b769e2afca`
158
- commit_sha: str = attr.ib()
158
+ commit_sha: str
159
159
  # NOTE: `commit_branch` and `merge_request_source_branch_name` may mean the same thing, but they are available
160
160
  # in different context. There are also a couple of `CI_BUILD_*` variables that could be used, but they are
161
161
  # not documented.
162
162
  # The commit branch name. Not available in merge request pipelines or tag pipelines.
163
163
  # For example, `dd/report-ci`.
164
- commit_branch: Optional[str] = attr.ib()
164
+ commit_branch: Optional[str]
165
165
  # The source branch name of the merge request. Only available in merge request pipelines.
166
166
  # For example, `dd/report-ci`.
167
- merge_request_source_branch_name: Optional[str] = attr.ib()
167
+ merge_request_source_branch_name: Optional[str]
168
168
  # The target branch name of the merge request.
169
169
  # For example, `main`.
170
- merge_request_target_branch_name: Optional[str] = attr.ib()
170
+ merge_request_target_branch_name: Optional[str]
171
171
  # The project-level internal ID of the merge request.
172
172
  # For example, `42`.
173
- merge_request_iid: Optional[str] = attr.ib()
173
+ merge_request_iid: Optional[str]
174
174
 
175
175
  @classmethod
176
176
  def is_set(cls) -> bool:
@@ -1,9 +1,9 @@
1
1
  import hashlib
2
2
  import http
3
+ from dataclasses import asdict
3
4
  from typing import Any, Optional, Union
4
5
  from urllib.parse import urljoin
5
6
 
6
- import attr
7
7
  import requests
8
8
  from requests.adapters import HTTPAdapter, Retry
9
9
 
@@ -51,7 +51,7 @@ class ServiceClient(requests.Session):
51
51
 
52
52
  def login(self, metadata: Metadata) -> AuthResponse:
53
53
  """Send a login request."""
54
- response = self.post("/auth/cli/login/", json={"metadata": attr.asdict(metadata)})
54
+ response = self.post("/auth/cli/login/", json={"metadata": asdict(metadata)})
55
55
  data = response.json()
56
56
  return AuthResponse(username=data["username"])
57
57
 
@@ -1,7 +1,6 @@
1
+ from dataclasses import dataclass
1
2
  from typing import Optional
2
3
 
3
- import attr
4
-
5
4
  from ..utils import format_exception
6
5
  from . import ci
7
6
 
@@ -17,40 +16,40 @@ class Event:
17
16
  return self.__class__.__name__.upper()
18
17
 
19
18
 
20
- @attr.s(slots=True)
19
+ @dataclass
21
20
  class Metadata(Event):
22
21
  """Meta-information about the report."""
23
22
 
24
- size: int = attr.ib()
25
- ci_environment: Optional[ci.Environment] = attr.ib()
23
+ size: int
24
+ ci_environment: Optional[ci.Environment]
26
25
 
27
26
 
28
- @attr.s(slots=True)
27
+ @dataclass
29
28
  class Completed(Event):
30
29
  """Report uploaded successfully."""
31
30
 
32
- message: str = attr.ib()
33
- next_url: str = attr.ib()
31
+ message: str
32
+ next_url: str
34
33
 
35
34
 
36
- @attr.s(slots=True)
35
+ @dataclass
37
36
  class Error(Event):
38
37
  """Internal error inside the Schemathesis.io handler."""
39
38
 
40
- exception: Exception = attr.ib()
39
+ exception: Exception
41
40
 
42
41
  def get_message(self, include_traceback: bool = False) -> str:
43
42
  return format_exception(self.exception, include_traceback=include_traceback)
44
43
 
45
44
 
46
- @attr.s(slots=True)
45
+ @dataclass
47
46
  class Failed(Event):
48
47
  """A client-side error which should be displayed to the user."""
49
48
 
50
- detail: str = attr.ib()
49
+ detail: str
51
50
 
52
51
 
53
- @attr.s(slots=True)
52
+ @dataclass
54
53
  class Timeout(Event):
55
54
  """The handler did not finish its work in time.
56
55
 
@@ -1,10 +1,10 @@
1
1
  """Work with stored auth data."""
2
2
  import enum
3
3
  import tempfile
4
+ from dataclasses import dataclass
4
5
  from pathlib import Path
5
6
  from typing import Any, Dict, Optional
6
7
 
7
- import attr
8
8
  import tomli
9
9
  import tomli_w
10
10
 
@@ -12,12 +12,12 @@ from ..types import PathLike
12
12
  from .constants import DEFAULT_HOSTNAME, DEFAULT_HOSTS_PATH, HOSTS_FORMAT_VERSION
13
13
 
14
14
 
15
- @attr.s(slots=True)
15
+ @dataclass
16
16
  class HostData:
17
17
  """Stored data related to a host."""
18
18
 
19
- hostname: str = attr.ib()
20
- hosts_file: PathLike = attr.ib()
19
+ hostname: str
20
+ hosts_file: PathLike
21
21
 
22
22
  def load(self) -> Dict[str, Any]:
23
23
  return load(self.hosts_file).get(self.hostname, {})
@@ -1,42 +1,41 @@
1
1
  """Useful info to collect from CLI usage."""
2
2
  import platform
3
-
4
- import attr
3
+ from dataclasses import dataclass, field
5
4
 
6
5
  from ..constants import __version__
7
6
 
8
7
 
9
- @attr.s(slots=True)
8
+ @dataclass
10
9
  class PlatformMetadata:
11
10
  # System / OS name, e.g. "Linux" or "Windows".
12
- system: str = attr.ib(factory=platform.system)
11
+ system: str = field(default_factory=platform.system)
13
12
  # System release, e.g. "5.14" or "NT".
14
- release: str = attr.ib(factory=platform.release)
13
+ release: str = field(default_factory=platform.release)
15
14
  # Machine type, e.g. "i386".
16
- machine: str = attr.ib(factory=platform.machine)
15
+ machine: str = field(default_factory=platform.machine)
17
16
 
18
17
 
19
- @attr.s(slots=True)
18
+ @dataclass
20
19
  class InterpreterMetadata:
21
20
  # The Python version as "major.minor.patch".
22
- version: str = attr.ib(factory=platform.python_version)
21
+ version: str = field(default_factory=platform.python_version)
23
22
  # Python implementation, e.g. "CPython" or "PyPy".
24
- implementation: str = attr.ib(factory=platform.python_implementation)
23
+ implementation: str = field(default_factory=platform.python_implementation)
25
24
 
26
25
 
27
- @attr.s(slots=True)
26
+ @dataclass
28
27
  class CliMetadata:
29
28
  # Schemathesis package version.
30
- version: str = attr.ib(default=__version__)
29
+ version: str = __version__
31
30
 
32
31
 
33
- @attr.s(slots=True)
32
+ @dataclass
34
33
  class Metadata:
35
34
  """CLI environment metadata."""
36
35
 
37
36
  # Information about the host platform.
38
- platform: PlatformMetadata = attr.ib(factory=PlatformMetadata)
37
+ platform: PlatformMetadata = field(default_factory=PlatformMetadata)
39
38
  # Python interpreter info.
40
- interpreter: InterpreterMetadata = attr.ib(factory=InterpreterMetadata)
39
+ interpreter: InterpreterMetadata = field(default_factory=InterpreterMetadata)
41
40
  # CLI info itself.
42
- cli: CliMetadata = attr.ib(factory=CliMetadata)
41
+ cli: CliMetadata = field(default_factory=CliMetadata)
@@ -1,26 +1,25 @@
1
+ from dataclasses import dataclass
1
2
  from typing import Optional
2
3
 
3
- import attr
4
4
 
5
-
6
- @attr.s(slots=True)
5
+ @dataclass
7
6
  class ApiDetails:
8
- location: str = attr.ib()
9
- base_url: Optional[str] = attr.ib()
7
+ location: str
8
+ base_url: Optional[str]
10
9
 
11
10
 
12
- @attr.s(slots=True)
11
+ @dataclass
13
12
  class AuthResponse:
14
- username: str = attr.ib()
13
+ username: str
15
14
 
16
15
 
17
- @attr.s(slots=True)
16
+ @dataclass
18
17
  class UploadResponse:
19
- message: str = attr.ib()
20
- next_url: str = attr.ib()
21
- correlation_id: str = attr.ib()
18
+ message: str
19
+ next_url: str
20
+ correlation_id: str
22
21
 
23
22
 
24
- @attr.s(slots=True)
23
+ @dataclass
25
24
  class FailedUploadResponse:
26
- detail: str = attr.ib()
25
+ detail: str
@@ -5,11 +5,11 @@ import tarfile
5
5
  import threading
6
6
  import time
7
7
  from contextlib import suppress
8
+ from dataclasses import asdict, dataclass, field
8
9
  from io import BytesIO
9
10
  from queue import Queue
10
11
  from typing import Any, Dict, Optional
11
12
 
12
- import attr
13
13
  import click
14
14
 
15
15
  from ..cli.context import ExecutionContext
@@ -23,15 +23,15 @@ from .models import UploadResponse
23
23
  from .serialization import serialize_event
24
24
 
25
25
 
26
- @attr.s(slots=True)
26
+ @dataclass
27
27
  class ReportWriter:
28
28
  """Schemathesis.io test run report.
29
29
 
30
30
  Simplifies adding new files to the archive.
31
31
  """
32
32
 
33
- _tar: tarfile.TarFile = attr.ib()
34
- _events_count: int = attr.ib(default=0)
33
+ _tar: tarfile.TarFile
34
+ _events_count: int = 0
35
35
 
36
36
  def add_json_file(self, name: str, data: Any) -> None:
37
37
  buffer = BytesIO()
@@ -63,7 +63,7 @@ class ReportWriter:
63
63
  # The time that the test run began
64
64
  "started_at": started_at,
65
65
  # Metadata about CLI environment
66
- "environment": attr.asdict(metadata),
66
+ "environment": asdict(metadata),
67
67
  # Environment variables specific for CI providers
68
68
  "ci": ci_environment.asdict() if ci_environment is not None else None,
69
69
  # CLI usage statistic
@@ -80,7 +80,6 @@ class ReportWriter:
80
80
  self.add_json_file(filename, serialize_event(event))
81
81
 
82
82
 
83
- @attr.s(slots=True) # pragma: no mutate
84
83
  class BaseReportHandler(EventHandler):
85
84
  in_queue: Queue
86
85
  worker: threading.Thread
@@ -96,20 +95,20 @@ class BaseReportHandler(EventHandler):
96
95
  self.worker.join(WORKER_JOIN_TIMEOUT)
97
96
 
98
97
 
99
- @attr.s(slots=True) # pragma: no mutate
98
+ @dataclass
100
99
  class ServiceReportHandler(BaseReportHandler):
101
- client: ServiceClient = attr.ib() # pragma: no mutate
102
- host_data: HostData = attr.ib() # pragma: no mutate
103
- api_name: Optional[str] = attr.ib() # pragma: no mutate
104
- location: str = attr.ib() # pragma: no mutate
105
- base_url: Optional[str] = attr.ib() # pragma: no mutate
106
- started_at: str = attr.ib() # pragma: no mutate
107
- telemetry: bool = attr.ib() # pragma: no mutate
108
- out_queue: Queue = attr.ib() # pragma: no mutate
109
- in_queue: Queue = attr.ib(factory=Queue) # pragma: no mutate
110
- worker: threading.Thread = attr.ib(init=False) # pragma: no mutate
111
-
112
- def __attrs_post_init__(self) -> None:
100
+ client: ServiceClient
101
+ host_data: HostData
102
+ api_name: Optional[str]
103
+ location: str
104
+ base_url: Optional[str]
105
+ started_at: str
106
+ telemetry: bool
107
+ out_queue: Queue
108
+ in_queue: Queue = field(default_factory=Queue)
109
+ worker: threading.Thread = field(init=False)
110
+
111
+ def __post_init__(self) -> None:
113
112
  self.worker = threading.Thread(
114
113
  target=write_remote,
115
114
  kwargs={
@@ -191,19 +190,19 @@ def write_remote(
191
190
  out_queue.put(events.Error(exc))
192
191
 
193
192
 
194
- @attr.s(slots=True) # pragma: no mutate
193
+ @dataclass
195
194
  class FileReportHandler(BaseReportHandler):
196
- file_handle: click.utils.LazyFile = attr.ib() # pragma: no mutate
197
- api_name: Optional[str] = attr.ib() # pragma: no mutate
198
- location: str = attr.ib() # pragma: no mutate
199
- base_url: Optional[str] = attr.ib() # pragma: no mutate
200
- started_at: str = attr.ib() # pragma: no mutate
201
- telemetry: bool = attr.ib() # pragma: no mutate
202
- out_queue: Queue = attr.ib() # pragma: no mutate
203
- in_queue: Queue = attr.ib(factory=Queue) # pragma: no mutate
204
- worker: threading.Thread = attr.ib(init=False) # pragma: no mutate
205
-
206
- def __attrs_post_init__(self) -> None:
195
+ file_handle: click.utils.LazyFile
196
+ api_name: Optional[str]
197
+ location: str
198
+ base_url: Optional[str]
199
+ started_at: str
200
+ telemetry: bool
201
+ out_queue: Queue
202
+ in_queue: Queue = field(default_factory=Queue)
203
+ worker: threading.Thread = field(init=False)
204
+
205
+ def __post_init__(self) -> None:
207
206
  self.worker = threading.Thread(
208
207
  target=write_file,
209
208
  kwargs={
@@ -1,7 +1,6 @@
1
+ from dataclasses import asdict
1
2
  from typing import Any, Callable, Dict, List, Optional, TypeVar, cast
2
3
 
3
- import attr
4
-
5
4
  from ..models import Response
6
5
  from ..runner import events
7
6
  from ..runner.serialization import SerializedCase
@@ -70,7 +69,7 @@ def serialize_after_execution(event: events.AfterExecution) -> Optional[Dict[str
70
69
  "response": _serialize_response(check.response) if check.response is not None else None,
71
70
  "example": _serialize_case(check.example),
72
71
  "message": check.message,
73
- "context": attr.asdict(check.context) if check.context is not None else None,
72
+ "context": asdict(check.context) if check.context is not None else None,
74
73
  "history": [
75
74
  {"case": _serialize_case(entry.case), "response": _serialize_response(entry.response)}
76
75
  for entry in check.history
@@ -138,7 +137,6 @@ def serialize_event(
138
137
  extra: Optional[Dict[str, Any]] = None,
139
138
  ) -> Dict[str, Optional[Dict[str, Any]]]:
140
139
  """Turn an event into JSON-serializable structure."""
141
- # Due to https://github.com/python-attrs/attrs/issues/864 it is easier to implement filtration manually
142
140
  # Use the explicitly provided serializer for this event and fallback to default one if it is not provided
143
141
  serializer = {
144
142
  events.Initialized: on_initialized,
@@ -5,6 +5,7 @@ import backoff
5
5
  import graphql
6
6
  import requests
7
7
  from graphql import ExecutionResult
8
+ from pyrate_limiter import Limiter
8
9
  from starlette.applications import Starlette
9
10
  from starlette_testclient import TestClient as ASGIClient
10
11
  from werkzeug import Client
@@ -13,6 +14,7 @@ from yarl import URL
13
14
  from ...constants import DEFAULT_DATA_GENERATION_METHODS, WAIT_FOR_SCHEMA_INTERVAL, CodeSampleStyle
14
15
  from ...exceptions import HTTPError
15
16
  from ...hooks import HookContext, dispatch
17
+ from ...throttling import build_limiter
16
18
  from ...types import DataGenerationMethodInput, PathLike
17
19
  from ...utils import WSGIResponse, prepare_data_generation_methods, require_relative_url, setup_headers
18
20
  from .schemas import GraphQLSchema
@@ -28,6 +30,7 @@ def from_path(
28
30
  base_url: Optional[str] = None,
29
31
  data_generation_methods: DataGenerationMethodInput = DEFAULT_DATA_GENERATION_METHODS,
30
32
  code_sample_style: str = CodeSampleStyle.default().name,
33
+ rate_limit: Optional[str] = None,
31
34
  encoding: str = "utf8",
32
35
  ) -> GraphQLSchema:
33
36
  """Load GraphQL schema via a file from an OS path.
@@ -43,6 +46,7 @@ def from_path(
43
46
  data_generation_methods=data_generation_methods,
44
47
  code_sample_style=code_sample_style,
45
48
  location=pathlib.Path(path).absolute().as_uri(),
49
+ rate_limit=rate_limit,
46
50
  )
47
51
 
48
52
 
@@ -55,6 +59,7 @@ def from_url(
55
59
  data_generation_methods: DataGenerationMethodInput = DEFAULT_DATA_GENERATION_METHODS,
56
60
  code_sample_style: str = CodeSampleStyle.default().name,
57
61
  wait_for_schema: Optional[float] = None,
62
+ rate_limit: Optional[str] = None,
58
63
  **kwargs: Any,
59
64
  ) -> GraphQLSchema:
60
65
  """Load GraphQL schema from the network.
@@ -68,8 +73,10 @@ def from_url(
68
73
  """
69
74
  setup_headers(kwargs)
70
75
  kwargs.setdefault("json", {"query": INTROSPECTION_QUERY})
71
- if not base_url and port:
72
- base_url = str(URL(url).with_port(port))
76
+ if port:
77
+ url = str(URL(url).with_port(port))
78
+ if not base_url:
79
+ base_url = url
73
80
 
74
81
  if wait_for_schema is not None:
75
82
 
@@ -94,6 +101,7 @@ def from_url(
94
101
  app=app,
95
102
  data_generation_methods=data_generation_methods,
96
103
  code_sample_style=code_sample_style,
104
+ rate_limit=rate_limit,
97
105
  )
98
106
 
99
107
 
@@ -105,6 +113,7 @@ def from_file(
105
113
  data_generation_methods: DataGenerationMethodInput = DEFAULT_DATA_GENERATION_METHODS,
106
114
  code_sample_style: str = CodeSampleStyle.default().name,
107
115
  location: Optional[str] = None,
116
+ rate_limit: Optional[str] = None,
108
117
  ) -> GraphQLSchema:
109
118
  """Load GraphQL schema from a file descriptor or a string.
110
119
 
@@ -130,6 +139,7 @@ def from_file(
130
139
  data_generation_methods=data_generation_methods,
131
140
  code_sample_style=code_sample_style,
132
141
  location=location,
142
+ rate_limit=rate_limit,
133
143
  )
134
144
 
135
145
 
@@ -141,6 +151,7 @@ def from_dict(
141
151
  location: Optional[str] = None,
142
152
  data_generation_methods: DataGenerationMethodInput = DEFAULT_DATA_GENERATION_METHODS,
143
153
  code_sample_style: str = CodeSampleStyle.default().name,
154
+ rate_limit: Optional[str] = None,
144
155
  ) -> GraphQLSchema:
145
156
  """Load GraphQL schema from a Python dictionary.
146
157
 
@@ -153,6 +164,9 @@ def from_dict(
153
164
  _code_sample_style = CodeSampleStyle.from_str(code_sample_style)
154
165
  hook_context = HookContext()
155
166
  dispatch("before_load_schema", hook_context, raw_schema)
167
+ rate_limiter: Optional[Limiter] = None
168
+ if rate_limit is not None:
169
+ rate_limiter = build_limiter(rate_limit)
156
170
  instance = GraphQLSchema(
157
171
  raw_schema,
158
172
  location=location,
@@ -160,6 +174,7 @@ def from_dict(
160
174
  app=app,
161
175
  data_generation_methods=prepare_data_generation_methods(data_generation_methods),
162
176
  code_sample_style=_code_sample_style,
177
+ rate_limiter=rate_limiter,
163
178
  ) # type: ignore
164
179
  dispatch("after_load_schema", hook_context, instance)
165
180
  return instance
@@ -172,6 +187,7 @@ def from_wsgi(
172
187
  base_url: Optional[str] = None,
173
188
  data_generation_methods: DataGenerationMethodInput = DEFAULT_DATA_GENERATION_METHODS,
174
189
  code_sample_style: str = CodeSampleStyle.default().name,
190
+ rate_limit: Optional[str] = None,
175
191
  **kwargs: Any,
176
192
  ) -> GraphQLSchema:
177
193
  """Load GraphQL schema from a WSGI app.
@@ -194,6 +210,7 @@ def from_wsgi(
194
210
  app=app,
195
211
  data_generation_methods=data_generation_methods,
196
212
  code_sample_style=code_sample_style,
213
+ rate_limit=rate_limit,
197
214
  )
198
215
 
199
216
 
@@ -204,6 +221,7 @@ def from_asgi(
204
221
  base_url: Optional[str] = None,
205
222
  data_generation_methods: DataGenerationMethodInput = DEFAULT_DATA_GENERATION_METHODS,
206
223
  code_sample_style: str = CodeSampleStyle.default().name,
224
+ rate_limit: Optional[str] = None,
207
225
  **kwargs: Any,
208
226
  ) -> GraphQLSchema:
209
227
  """Load GraphQL schema from an ASGI app.
@@ -225,6 +243,7 @@ def from_asgi(
225
243
  app=app,
226
244
  data_generation_methods=data_generation_methods,
227
245
  code_sample_style=code_sample_style,
246
+ rate_limit=rate_limit,
228
247
  )
229
248
 
230
249
 
@@ -1,9 +1,9 @@
1
1
  import enum
2
+ from dataclasses import dataclass
2
3
  from enum import unique
3
4
  from typing import Any, Callable, Dict, Generator, List, Optional, Sequence, Tuple, Type, TypeVar, Union, cast
4
5
  from urllib.parse import urlsplit
5
6
 
6
- import attr
7
7
  import graphql
8
8
  import requests
9
9
  from hypothesis import strategies as st
@@ -31,7 +31,7 @@ class RootType(enum.Enum):
31
31
  MUTATION = enum.auto()
32
32
 
33
33
 
34
- @attr.s(slots=True, repr=False) # pragma: no mutate
34
+ @dataclass(repr=False)
35
35
  class GraphQLCase(Case):
36
36
  def as_requests_kwargs(
37
37
  self, base_url: Optional[str] = None, headers: Optional[Dict[str, str]] = None
@@ -84,19 +84,19 @@ class GraphQLCase(Case):
84
84
  C = TypeVar("C", bound=Case)
85
85
 
86
86
 
87
- @attr.s()
87
+ @dataclass
88
88
  class GraphQLOperationDefinition(OperationDefinition):
89
- field_name: str = attr.ib()
90
- type_: graphql.GraphQLType = attr.ib()
91
- root_type: RootType = attr.ib()
89
+ field_name: str
90
+ type_: graphql.GraphQLType
91
+ root_type: RootType
92
92
 
93
93
 
94
- @attr.s() # pragma: no mutate
94
+ @dataclass
95
95
  class GraphQLSchema(BaseSchema):
96
96
  def get_full_path(self, path: str) -> str:
97
97
  return self.base_path
98
98
 
99
- @property # pragma: no mutate
99
+ @property
100
100
  def verbose_name(self) -> str:
101
101
  return "GraphQL"
102
102