schemathesis 3.29.1__py3-none-any.whl → 3.30.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- schemathesis/__init__.py +3 -3
- schemathesis/_compat.py +2 -2
- schemathesis/_dependency_versions.py +1 -3
- schemathesis/_hypothesis.py +6 -0
- schemathesis/_lazy_import.py +1 -0
- schemathesis/_override.py +1 -0
- schemathesis/_rate_limiter.py +2 -1
- schemathesis/_xml.py +1 -0
- schemathesis/auths.py +4 -2
- schemathesis/checks.py +8 -5
- schemathesis/cli/__init__.py +8 -1
- schemathesis/cli/callbacks.py +3 -4
- schemathesis/cli/cassettes.py +6 -4
- schemathesis/cli/constants.py +2 -0
- schemathesis/cli/context.py +3 -0
- schemathesis/cli/debug.py +2 -1
- schemathesis/cli/handlers.py +1 -1
- schemathesis/cli/options.py +1 -0
- schemathesis/cli/output/default.py +50 -22
- schemathesis/cli/output/short.py +21 -10
- schemathesis/cli/sanitization.py +1 -0
- schemathesis/code_samples.py +1 -0
- schemathesis/constants.py +1 -0
- schemathesis/contrib/openapi/__init__.py +1 -1
- schemathesis/contrib/openapi/fill_missing_examples.py +2 -0
- schemathesis/contrib/openapi/formats/uuid.py +2 -1
- schemathesis/contrib/unique_data.py +2 -1
- schemathesis/exceptions.py +40 -26
- schemathesis/experimental/__init__.py +14 -0
- schemathesis/extra/_aiohttp.py +1 -0
- schemathesis/extra/_server.py +1 -0
- schemathesis/extra/pytest_plugin.py +13 -24
- schemathesis/failures.py +32 -3
- schemathesis/filters.py +2 -1
- schemathesis/fixups/__init__.py +1 -0
- schemathesis/fixups/fast_api.py +2 -2
- schemathesis/fixups/utf8_bom.py +1 -2
- schemathesis/generation/__init__.py +2 -1
- schemathesis/hooks.py +3 -1
- schemathesis/internal/copy.py +19 -3
- schemathesis/internal/deprecation.py +1 -1
- schemathesis/internal/jsonschema.py +2 -1
- schemathesis/internal/result.py +1 -1
- schemathesis/internal/transformation.py +1 -0
- schemathesis/lazy.py +3 -2
- schemathesis/loaders.py +4 -2
- schemathesis/models.py +20 -5
- schemathesis/parameters.py +1 -0
- schemathesis/runner/__init__.py +1 -1
- schemathesis/runner/events.py +21 -4
- schemathesis/runner/impl/core.py +61 -33
- schemathesis/runner/impl/solo.py +2 -1
- schemathesis/runner/impl/threadpool.py +4 -0
- schemathesis/runner/probes.py +1 -1
- schemathesis/runner/serialization.py +1 -1
- schemathesis/sanitization.py +2 -0
- schemathesis/schemas.py +1 -4
- schemathesis/service/ci.py +1 -0
- schemathesis/service/client.py +7 -7
- schemathesis/service/events.py +2 -1
- schemathesis/service/extensions.py +5 -5
- schemathesis/service/hosts.py +1 -0
- schemathesis/service/metadata.py +2 -1
- schemathesis/service/models.py +2 -1
- schemathesis/service/report.py +3 -3
- schemathesis/service/serialization.py +54 -23
- schemathesis/service/usage.py +1 -0
- schemathesis/specs/graphql/_cache.py +1 -1
- schemathesis/specs/graphql/loaders.py +1 -1
- schemathesis/specs/graphql/nodes.py +1 -0
- schemathesis/specs/graphql/scalars.py +2 -2
- schemathesis/specs/graphql/schemas.py +7 -7
- schemathesis/specs/graphql/validation.py +1 -2
- schemathesis/specs/openapi/_hypothesis.py +17 -11
- schemathesis/specs/openapi/checks.py +102 -9
- schemathesis/specs/openapi/converter.py +2 -1
- schemathesis/specs/openapi/definitions.py +2 -1
- schemathesis/specs/openapi/examples.py +7 -9
- schemathesis/specs/openapi/expressions/__init__.py +29 -2
- schemathesis/specs/openapi/expressions/context.py +1 -1
- schemathesis/specs/openapi/expressions/extractors.py +23 -0
- schemathesis/specs/openapi/expressions/lexer.py +19 -18
- schemathesis/specs/openapi/expressions/nodes.py +24 -4
- schemathesis/specs/openapi/expressions/parser.py +26 -5
- schemathesis/specs/openapi/filters.py +1 -0
- schemathesis/specs/openapi/links.py +35 -7
- schemathesis/specs/openapi/loaders.py +13 -11
- schemathesis/specs/openapi/negative/__init__.py +2 -1
- schemathesis/specs/openapi/negative/mutations.py +1 -0
- schemathesis/specs/openapi/parameters.py +1 -0
- schemathesis/specs/openapi/schemas.py +27 -38
- schemathesis/specs/openapi/security.py +1 -0
- schemathesis/specs/openapi/serialization.py +1 -0
- schemathesis/specs/openapi/stateful/__init__.py +159 -70
- schemathesis/specs/openapi/stateful/statistic.py +198 -0
- schemathesis/specs/openapi/stateful/types.py +13 -0
- schemathesis/specs/openapi/utils.py +1 -0
- schemathesis/specs/openapi/validation.py +1 -0
- schemathesis/stateful/__init__.py +4 -2
- schemathesis/stateful/config.py +66 -0
- schemathesis/stateful/context.py +93 -0
- schemathesis/stateful/events.py +209 -0
- schemathesis/stateful/runner.py +233 -0
- schemathesis/stateful/sink.py +68 -0
- schemathesis/stateful/state_machine.py +39 -22
- schemathesis/stateful/statistic.py +20 -0
- schemathesis/stateful/validation.py +66 -0
- schemathesis/targets.py +1 -0
- schemathesis/throttling.py +23 -3
- schemathesis/transports/__init__.py +28 -10
- schemathesis/transports/auth.py +1 -0
- schemathesis/transports/content_types.py +1 -1
- schemathesis/transports/headers.py +2 -1
- schemathesis/transports/responses.py +6 -4
- schemathesis/types.py +1 -0
- schemathesis/utils.py +1 -0
- {schemathesis-3.29.1.dist-info → schemathesis-3.30.0.dist-info}/METADATA +1 -1
- schemathesis-3.30.0.dist-info/RECORD +150 -0
- schemathesis/specs/openapi/stateful/links.py +0 -94
- schemathesis-3.29.1.dist-info/RECORD +0 -141
- {schemathesis-3.29.1.dist-info → schemathesis-3.30.0.dist-info}/WHEEL +0 -0
- {schemathesis-3.29.1.dist-info → schemathesis-3.30.0.dist-info}/entry_points.txt +0 -0
- {schemathesis-3.29.1.dist-info → schemathesis-3.30.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from typing import TYPE_CHECKING, Iterator, List, Union
|
|
5
|
+
|
|
6
|
+
from ....internal.copy import fast_deepcopy
|
|
7
|
+
from ....stateful.statistic import TransitionStats
|
|
8
|
+
from .types import AggregatedResponseCounter, LinkName, ResponseCounter, SourceName, StatusCode, TargetName
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from ....stateful import events
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@dataclass
|
|
15
|
+
class LinkSource:
|
|
16
|
+
name: str
|
|
17
|
+
responses: dict[StatusCode, dict[TargetName, dict[LinkName, ResponseCounter]]]
|
|
18
|
+
is_first: bool
|
|
19
|
+
|
|
20
|
+
__slots__ = ("name", "responses", "is_first")
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@dataclass
|
|
24
|
+
class OperationResponse:
|
|
25
|
+
status_code: str
|
|
26
|
+
targets: dict[TargetName, dict[LinkName, ResponseCounter]]
|
|
27
|
+
is_last: bool
|
|
28
|
+
|
|
29
|
+
__slots__ = ("status_code", "targets", "is_last")
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@dataclass
|
|
33
|
+
class Link:
|
|
34
|
+
name: str
|
|
35
|
+
target: str
|
|
36
|
+
responses: ResponseCounter
|
|
37
|
+
is_last: bool
|
|
38
|
+
is_single: bool
|
|
39
|
+
|
|
40
|
+
__slots__ = ("name", "target", "responses", "is_last", "is_single")
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
StatisticEntry = Union[LinkSource, OperationResponse, Link]
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
@dataclass
|
|
47
|
+
class FormattedStatisticEntry:
|
|
48
|
+
line: str
|
|
49
|
+
entry: StatisticEntry
|
|
50
|
+
__slots__ = ("line", "entry")
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
@dataclass
|
|
54
|
+
class OpenAPILinkStats(TransitionStats):
|
|
55
|
+
"""Statistics about link transitions for a state machine run."""
|
|
56
|
+
|
|
57
|
+
transitions: dict[SourceName, dict[StatusCode, dict[TargetName, dict[LinkName, ResponseCounter]]]]
|
|
58
|
+
|
|
59
|
+
roots: dict[TargetName, ResponseCounter] = field(default_factory=dict)
|
|
60
|
+
|
|
61
|
+
__slots__ = ("transitions",)
|
|
62
|
+
|
|
63
|
+
def consume(self, event: events.StatefulEvent) -> None:
|
|
64
|
+
from ....stateful import events
|
|
65
|
+
|
|
66
|
+
if isinstance(event, events.StepFinished):
|
|
67
|
+
if event.transition_id is not None:
|
|
68
|
+
transition_id = event.transition_id
|
|
69
|
+
source = self.transitions[transition_id.source]
|
|
70
|
+
transition = source[transition_id.status_code][event.target][transition_id.name]
|
|
71
|
+
if event.response is not None:
|
|
72
|
+
key = event.response.status_code
|
|
73
|
+
else:
|
|
74
|
+
key = None
|
|
75
|
+
counter = transition.setdefault(key, 0)
|
|
76
|
+
transition[key] = counter + 1
|
|
77
|
+
else:
|
|
78
|
+
# A start of a sequence has an empty source and does not belong to any transition
|
|
79
|
+
target = self.roots.setdefault(event.target, {})
|
|
80
|
+
if event.response is not None:
|
|
81
|
+
key = event.response.status_code
|
|
82
|
+
else:
|
|
83
|
+
key = None
|
|
84
|
+
counter = target.setdefault(key, 0)
|
|
85
|
+
target[key] = counter + 1
|
|
86
|
+
|
|
87
|
+
def copy(self) -> OpenAPILinkStats:
|
|
88
|
+
return self.__class__(transitions=fast_deepcopy(self.transitions))
|
|
89
|
+
|
|
90
|
+
def iter(self) -> Iterator[StatisticEntry]:
|
|
91
|
+
for source_idx, (source, responses) in enumerate(self.transitions.items()):
|
|
92
|
+
yield LinkSource(name=source, responses=responses, is_first=source_idx == 0)
|
|
93
|
+
for response_idx, (status_code, targets) in enumerate(responses.items()):
|
|
94
|
+
yield OperationResponse(
|
|
95
|
+
status_code=status_code, targets=targets, is_last=response_idx == len(responses) - 1
|
|
96
|
+
)
|
|
97
|
+
for target_idx, (target, links) in enumerate(targets.items()):
|
|
98
|
+
for link_idx, (link_name, link_responses) in enumerate(links.items()):
|
|
99
|
+
yield Link(
|
|
100
|
+
name=link_name,
|
|
101
|
+
target=target,
|
|
102
|
+
responses=link_responses,
|
|
103
|
+
is_last=target_idx == len(targets) - 1 and link_idx == len(links) - 1,
|
|
104
|
+
is_single=len(links) == 1,
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
def iter_with_format(self) -> Iterator[FormattedStatisticEntry]:
|
|
108
|
+
current_response = None
|
|
109
|
+
for entry in self.iter():
|
|
110
|
+
if isinstance(entry, LinkSource):
|
|
111
|
+
if not entry.is_first:
|
|
112
|
+
yield FormattedStatisticEntry(line=f"\n{entry.name}", entry=entry)
|
|
113
|
+
else:
|
|
114
|
+
yield FormattedStatisticEntry(line=f"{entry.name}", entry=entry)
|
|
115
|
+
elif isinstance(entry, OperationResponse):
|
|
116
|
+
current_response = entry
|
|
117
|
+
if entry.is_last:
|
|
118
|
+
yield FormattedStatisticEntry(line=f"└── {entry.status_code}", entry=entry)
|
|
119
|
+
else:
|
|
120
|
+
yield FormattedStatisticEntry(line=f"├── {entry.status_code}", entry=entry)
|
|
121
|
+
else:
|
|
122
|
+
if current_response is not None and current_response.is_last:
|
|
123
|
+
line = " "
|
|
124
|
+
else:
|
|
125
|
+
line = "│ "
|
|
126
|
+
if entry.is_last:
|
|
127
|
+
line += "└"
|
|
128
|
+
else:
|
|
129
|
+
line += "├"
|
|
130
|
+
if entry.is_single or entry.name == entry.target:
|
|
131
|
+
line += f"── {entry.target}"
|
|
132
|
+
else:
|
|
133
|
+
line += f"── {entry.name} -> {entry.target}"
|
|
134
|
+
yield FormattedStatisticEntry(line=line, entry=entry)
|
|
135
|
+
|
|
136
|
+
def to_formatted_table(self, width: int) -> str:
|
|
137
|
+
"""Format the statistic as a table."""
|
|
138
|
+
entries = list(self.iter_with_format())
|
|
139
|
+
lines: List[str | list[str]] = [HEADER, ""]
|
|
140
|
+
column_widths = [len(column) for column in HEADER]
|
|
141
|
+
for entry in entries:
|
|
142
|
+
if isinstance(entry.entry, Link):
|
|
143
|
+
aggregated = _aggregate_responses(entry.entry.responses)
|
|
144
|
+
values = [
|
|
145
|
+
entry.line,
|
|
146
|
+
str(aggregated["2xx"]),
|
|
147
|
+
str(aggregated["4xx"]),
|
|
148
|
+
str(aggregated["5xx"]),
|
|
149
|
+
str(aggregated["Total"]),
|
|
150
|
+
]
|
|
151
|
+
column_widths = [max(column_widths[idx], len(column)) for idx, column in enumerate(values)]
|
|
152
|
+
lines.append(values)
|
|
153
|
+
else:
|
|
154
|
+
lines.append(entry.line)
|
|
155
|
+
used_width = sum(column_widths) + 4 * PADDING
|
|
156
|
+
max_space = width - used_width if used_width < width else 0
|
|
157
|
+
formatted_lines = []
|
|
158
|
+
|
|
159
|
+
for line in lines:
|
|
160
|
+
if isinstance(line, list):
|
|
161
|
+
formatted_line, *counters = line
|
|
162
|
+
formatted_line = formatted_line.ljust(column_widths[0] + max_space)
|
|
163
|
+
|
|
164
|
+
for column, max_width in zip(counters, column_widths[1:]):
|
|
165
|
+
formatted_line += f"{column:>{max_width + PADDING}}"
|
|
166
|
+
|
|
167
|
+
formatted_lines.append(formatted_line)
|
|
168
|
+
else:
|
|
169
|
+
formatted_lines.append(line)
|
|
170
|
+
|
|
171
|
+
return "\n".join(formatted_lines)
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
PADDING = 4
|
|
175
|
+
HEADER = ["Links", "2xx", "4xx", "5xx", "Total"]
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
def _aggregate_responses(responses: ResponseCounter) -> AggregatedResponseCounter:
|
|
179
|
+
"""Aggregate responses by status code ranges."""
|
|
180
|
+
output: AggregatedResponseCounter = {
|
|
181
|
+
"2xx": 0,
|
|
182
|
+
# NOTE: 3xx responses are not counted
|
|
183
|
+
"4xx": 0,
|
|
184
|
+
"5xx": 0,
|
|
185
|
+
"Total": 0,
|
|
186
|
+
}
|
|
187
|
+
for status_code, count in responses.items():
|
|
188
|
+
if status_code is not None:
|
|
189
|
+
if 200 <= status_code < 300:
|
|
190
|
+
output["2xx"] += count
|
|
191
|
+
output["Total"] += count
|
|
192
|
+
elif 400 <= status_code < 500:
|
|
193
|
+
output["4xx"] += count
|
|
194
|
+
output["Total"] += count
|
|
195
|
+
elif 500 <= status_code < 600:
|
|
196
|
+
output["5xx"] += count
|
|
197
|
+
output["Total"] += count
|
|
198
|
+
return output
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Callable, Dict, TypedDict, Union
|
|
4
|
+
|
|
5
|
+
from ....stateful.state_machine import StepResult
|
|
6
|
+
|
|
7
|
+
StatusCode = str
|
|
8
|
+
LinkName = str
|
|
9
|
+
TargetName = str
|
|
10
|
+
SourceName = str
|
|
11
|
+
ResponseCounter = Dict[Union[int, None], int]
|
|
12
|
+
FilterFunction = Callable[["StepResult"], bool]
|
|
13
|
+
AggregatedResponseCounter = TypedDict("AggregatedResponseCounter", {"2xx": int, "4xx": int, "5xx": int, "Total": int})
|
|
@@ -1,17 +1,19 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
|
+
|
|
2
3
|
import enum
|
|
3
4
|
import json
|
|
4
5
|
from dataclasses import dataclass, field
|
|
5
6
|
from typing import TYPE_CHECKING, Any, Callable, Generator
|
|
6
7
|
|
|
7
8
|
from .. import GenerationConfig
|
|
8
|
-
from ..exceptions import OperationSchemaError
|
|
9
|
-
from ..models import APIOperation, Case
|
|
10
9
|
from ..constants import NOT_SET
|
|
10
|
+
from ..exceptions import OperationSchemaError
|
|
11
11
|
from ..internal.result import Ok, Result
|
|
12
|
+
from ..models import APIOperation, Case
|
|
12
13
|
|
|
13
14
|
if TYPE_CHECKING:
|
|
14
15
|
import hypothesis
|
|
16
|
+
|
|
15
17
|
from ..transports.responses import GenericResponse
|
|
16
18
|
from .state_machine import APIStateMachine
|
|
17
19
|
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from typing import TYPE_CHECKING, Any
|
|
5
|
+
|
|
6
|
+
if TYPE_CHECKING:
|
|
7
|
+
import hypothesis
|
|
8
|
+
|
|
9
|
+
from ..models import CheckFunction
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def _default_checks_factory() -> tuple[CheckFunction, ...]:
|
|
13
|
+
from ..checks import ALL_CHECKS
|
|
14
|
+
from ..specs.openapi.checks import use_after_free
|
|
15
|
+
|
|
16
|
+
return ALL_CHECKS + (use_after_free,)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def _get_default_hypothesis_settings_kwargs() -> dict[str, Any]:
|
|
20
|
+
import hypothesis
|
|
21
|
+
|
|
22
|
+
return {"phases": (hypothesis.Phase.generate,), "deadline": None}
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def _default_hypothesis_settings_factory() -> hypothesis.settings:
|
|
26
|
+
# To avoid importing hypothesis at the module level
|
|
27
|
+
import hypothesis
|
|
28
|
+
|
|
29
|
+
return hypothesis.settings(**_get_default_hypothesis_settings_kwargs())
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@dataclass
|
|
33
|
+
class StatefulTestRunnerConfig:
|
|
34
|
+
"""Configuration for the stateful test runner."""
|
|
35
|
+
|
|
36
|
+
# Checks to run against each response
|
|
37
|
+
checks: tuple[CheckFunction, ...] = field(default_factory=_default_checks_factory)
|
|
38
|
+
# Hypothesis settings for state machine execution
|
|
39
|
+
hypothesis_settings: hypothesis.settings = field(default_factory=_default_hypothesis_settings_factory)
|
|
40
|
+
# Whether to stop the execution after the first failure
|
|
41
|
+
exit_first: bool = False
|
|
42
|
+
# Custom headers sent with each request
|
|
43
|
+
headers: dict[str, str] = field(default_factory=dict)
|
|
44
|
+
# Timeout for each request in milliseconds
|
|
45
|
+
request_timeout: int | None = None
|
|
46
|
+
|
|
47
|
+
def __post_init__(self) -> None:
|
|
48
|
+
import hypothesis
|
|
49
|
+
|
|
50
|
+
kwargs = _get_hypothesis_settings_kwargs_override(self.hypothesis_settings)
|
|
51
|
+
if kwargs:
|
|
52
|
+
self.hypothesis_settings = hypothesis.settings(self.hypothesis_settings, **kwargs)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def _get_hypothesis_settings_kwargs_override(settings: hypothesis.settings) -> dict[str, Any]:
|
|
56
|
+
"""Get the settings that should be overridden to match the defaults for API state machines."""
|
|
57
|
+
import hypothesis
|
|
58
|
+
|
|
59
|
+
kwargs = {}
|
|
60
|
+
hypothesis_default = hypothesis.settings()
|
|
61
|
+
state_machine_default = _default_hypothesis_settings_factory()
|
|
62
|
+
if settings.phases == hypothesis_default.phases:
|
|
63
|
+
kwargs["phases"] = state_machine_default.phases
|
|
64
|
+
if settings.deadline == hypothesis_default.deadline:
|
|
65
|
+
kwargs["deadline"] = state_machine_default.deadline
|
|
66
|
+
return kwargs
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import traceback
|
|
4
|
+
from dataclasses import dataclass, field
|
|
5
|
+
from typing import TYPE_CHECKING, Tuple, Type, Union
|
|
6
|
+
|
|
7
|
+
from ..exceptions import CheckFailed
|
|
8
|
+
from . import events
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from ..models import Check
|
|
12
|
+
from ..transports.responses import GenericResponse
|
|
13
|
+
|
|
14
|
+
FailureKey = Union[Type[CheckFailed], Tuple[str, int]]
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def _failure_cache_key(exc: CheckFailed | AssertionError) -> FailureKey:
|
|
18
|
+
"""Create a ket to identify unique failures."""
|
|
19
|
+
from hypothesis.internal.escalation import get_trimmed_traceback
|
|
20
|
+
|
|
21
|
+
# For CheckFailed, we already have all distinctive information about the failure, which is contained
|
|
22
|
+
# in the exception type itself.
|
|
23
|
+
if isinstance(exc, CheckFailed):
|
|
24
|
+
return exc.__class__
|
|
25
|
+
|
|
26
|
+
# Assertion come from the user's code and we may try to group them by location
|
|
27
|
+
tb = get_trimmed_traceback(exc)
|
|
28
|
+
filename, lineno, *_ = traceback.extract_tb(tb)[-1]
|
|
29
|
+
return (filename, lineno)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@dataclass
|
|
33
|
+
class RunnerContext:
|
|
34
|
+
"""Mutable context for state machine execution."""
|
|
35
|
+
|
|
36
|
+
# All seen failure keys, both grouped and individual ones
|
|
37
|
+
seen_in_run: set[FailureKey] = field(default_factory=set)
|
|
38
|
+
# Failures keys seen in the current suite
|
|
39
|
+
seen_in_suite: set[FailureKey] = field(default_factory=set)
|
|
40
|
+
# Unique failures collected in the current suite
|
|
41
|
+
failures_for_suite: list[Check] = field(default_factory=list)
|
|
42
|
+
# Status of the current step
|
|
43
|
+
current_step_status: events.StepStatus = events.StepStatus.SUCCESS
|
|
44
|
+
current_response: GenericResponse | None = None
|
|
45
|
+
|
|
46
|
+
@property
|
|
47
|
+
def current_scenario_status(self) -> events.ScenarioStatus:
|
|
48
|
+
if self.current_step_status == events.StepStatus.SUCCESS:
|
|
49
|
+
return events.ScenarioStatus.SUCCESS
|
|
50
|
+
elif self.current_step_status == events.StepStatus.FAILURE:
|
|
51
|
+
return events.ScenarioStatus.FAILURE
|
|
52
|
+
return events.ScenarioStatus.ERROR
|
|
53
|
+
|
|
54
|
+
def reset_step(self) -> None:
|
|
55
|
+
self.current_step_status = events.StepStatus.SUCCESS
|
|
56
|
+
self.current_response = None
|
|
57
|
+
|
|
58
|
+
def step_failed(self) -> None:
|
|
59
|
+
self.current_step_status = events.StepStatus.FAILURE
|
|
60
|
+
|
|
61
|
+
def step_errored(self) -> None:
|
|
62
|
+
self.current_step_status = events.StepStatus.ERROR
|
|
63
|
+
|
|
64
|
+
def mark_as_seen_in_run(self, exc: CheckFailed) -> None:
|
|
65
|
+
key = _failure_cache_key(exc)
|
|
66
|
+
self.seen_in_run.add(key)
|
|
67
|
+
causes = exc.causes or ()
|
|
68
|
+
for cause in causes:
|
|
69
|
+
key = _failure_cache_key(cause)
|
|
70
|
+
self.seen_in_run.add(key)
|
|
71
|
+
|
|
72
|
+
def mark_as_seen_in_suite(self, exc: CheckFailed | AssertionError) -> None:
|
|
73
|
+
key = _failure_cache_key(exc)
|
|
74
|
+
self.seen_in_suite.add(key)
|
|
75
|
+
|
|
76
|
+
def mark_current_suite_as_seen_in_run(self) -> None:
|
|
77
|
+
self.seen_in_run.update(self.seen_in_suite)
|
|
78
|
+
|
|
79
|
+
def is_seen_in_run(self, exc: CheckFailed | AssertionError) -> bool:
|
|
80
|
+
key = _failure_cache_key(exc)
|
|
81
|
+
return key in self.seen_in_run
|
|
82
|
+
|
|
83
|
+
def is_seen_in_suite(self, exc: CheckFailed | AssertionError) -> bool:
|
|
84
|
+
key = _failure_cache_key(exc)
|
|
85
|
+
return key in self.seen_in_suite
|
|
86
|
+
|
|
87
|
+
def add_failed_check(self, check: Check) -> None:
|
|
88
|
+
self.failures_for_suite.append(check)
|
|
89
|
+
|
|
90
|
+
def reset(self) -> None:
|
|
91
|
+
self.failures_for_suite = []
|
|
92
|
+
self.seen_in_suite.clear()
|
|
93
|
+
self.reset_step()
|
|
@@ -0,0 +1,209 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import time
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from enum import Enum
|
|
6
|
+
from typing import TYPE_CHECKING, Type
|
|
7
|
+
|
|
8
|
+
if TYPE_CHECKING:
|
|
9
|
+
from ..models import Check
|
|
10
|
+
from .state_machine import APIStateMachine
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class RunStatus(str, Enum):
|
|
14
|
+
"""Status of the state machine run."""
|
|
15
|
+
|
|
16
|
+
SUCCESS = "success"
|
|
17
|
+
FAILURE = "failure"
|
|
18
|
+
ERROR = "error"
|
|
19
|
+
INTERRUPTED = "interrupted"
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@dataclass
|
|
23
|
+
class StatefulEvent:
|
|
24
|
+
"""Basic stateful test event."""
|
|
25
|
+
|
|
26
|
+
__slots__ = ("timestamp",)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
@dataclass
|
|
30
|
+
class RunStarted(StatefulEvent):
|
|
31
|
+
"""Before executing all scenarios."""
|
|
32
|
+
|
|
33
|
+
state_machine: Type[APIStateMachine]
|
|
34
|
+
|
|
35
|
+
__slots__ = ("state_machine", "timestamp", "started_at")
|
|
36
|
+
|
|
37
|
+
def __init__(self, *, state_machine: Type[APIStateMachine]) -> None:
|
|
38
|
+
self.state_machine = state_machine
|
|
39
|
+
self.started_at = time.time()
|
|
40
|
+
self.timestamp = time.monotonic()
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
@dataclass
|
|
44
|
+
class RunFinished(StatefulEvent):
|
|
45
|
+
"""After executing all scenarios."""
|
|
46
|
+
|
|
47
|
+
status: RunStatus
|
|
48
|
+
|
|
49
|
+
__slots__ = ("timestamp", "status")
|
|
50
|
+
|
|
51
|
+
def __init__(self, *, status: RunStatus) -> None:
|
|
52
|
+
self.status = status
|
|
53
|
+
self.timestamp = time.monotonic()
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class SuiteStatus(str, Enum):
|
|
57
|
+
"""Status of the suite execution."""
|
|
58
|
+
|
|
59
|
+
SUCCESS = "success"
|
|
60
|
+
FAILURE = "failure"
|
|
61
|
+
ERROR = "error"
|
|
62
|
+
INTERRUPTED = "interrupted"
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
@dataclass
|
|
66
|
+
class SuiteStarted(StatefulEvent):
|
|
67
|
+
"""Before executing a set of scenarios."""
|
|
68
|
+
|
|
69
|
+
__slots__ = ("timestamp",)
|
|
70
|
+
|
|
71
|
+
def __init__(self) -> None:
|
|
72
|
+
self.timestamp = time.monotonic()
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
@dataclass
|
|
76
|
+
class SuiteFinished(StatefulEvent):
|
|
77
|
+
"""After executing a set of scenarios."""
|
|
78
|
+
|
|
79
|
+
status: SuiteStatus
|
|
80
|
+
failures: list[Check]
|
|
81
|
+
|
|
82
|
+
__slots__ = ("timestamp", "status", "failures")
|
|
83
|
+
|
|
84
|
+
def __init__(self, *, status: SuiteStatus, failures: list[Check]) -> None:
|
|
85
|
+
self.status = status
|
|
86
|
+
self.failures = failures
|
|
87
|
+
self.timestamp = time.monotonic()
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
class ScenarioStatus(str, Enum):
|
|
91
|
+
"""Status of a single scenario execution."""
|
|
92
|
+
|
|
93
|
+
SUCCESS = "success"
|
|
94
|
+
FAILURE = "failure"
|
|
95
|
+
# TODO: Count for Hypothesis' rejected?
|
|
96
|
+
ERROR = "error"
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
@dataclass
|
|
100
|
+
class ScenarioStarted(StatefulEvent):
|
|
101
|
+
"""Before a single state machine execution."""
|
|
102
|
+
|
|
103
|
+
# Whether this is a scenario that tries to reproduce a failure
|
|
104
|
+
is_final: bool
|
|
105
|
+
|
|
106
|
+
__slots__ = ("timestamp", "is_final")
|
|
107
|
+
|
|
108
|
+
def __init__(self, *, is_final: bool) -> None:
|
|
109
|
+
self.is_final = is_final
|
|
110
|
+
self.timestamp = time.monotonic()
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
@dataclass
|
|
114
|
+
class ScenarioFinished(StatefulEvent):
|
|
115
|
+
"""After a single state machine execution."""
|
|
116
|
+
|
|
117
|
+
status: ScenarioStatus
|
|
118
|
+
# Whether this is a scenario that tries to reproduce a failure
|
|
119
|
+
is_final: bool
|
|
120
|
+
|
|
121
|
+
__slots__ = ("timestamp", "status", "is_final")
|
|
122
|
+
|
|
123
|
+
def __init__(self, *, status: ScenarioStatus, is_final: bool) -> None:
|
|
124
|
+
self.status = status
|
|
125
|
+
self.is_final = is_final
|
|
126
|
+
self.timestamp = time.monotonic()
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
class StepStatus(str, Enum):
|
|
130
|
+
"""Status of a single state machine step."""
|
|
131
|
+
|
|
132
|
+
SUCCESS = "success"
|
|
133
|
+
FAILURE = "failure"
|
|
134
|
+
ERROR = "error"
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
@dataclass
|
|
138
|
+
class StepStarted(StatefulEvent):
|
|
139
|
+
"""Before a single state machine step."""
|
|
140
|
+
|
|
141
|
+
__slots__ = ("timestamp",)
|
|
142
|
+
|
|
143
|
+
def __init__(self) -> None:
|
|
144
|
+
self.timestamp = time.monotonic()
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
@dataclass
|
|
148
|
+
class TransitionId:
|
|
149
|
+
"""Id of the the that was hit."""
|
|
150
|
+
|
|
151
|
+
name: str
|
|
152
|
+
# Status code as defined in the transition, i.e. may be `default`
|
|
153
|
+
status_code: str
|
|
154
|
+
source: str
|
|
155
|
+
|
|
156
|
+
__slots__ = ("name", "status_code", "source")
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
@dataclass
|
|
160
|
+
class ResponseData:
|
|
161
|
+
"""Common data for responses."""
|
|
162
|
+
|
|
163
|
+
status_code: int
|
|
164
|
+
elapsed: float
|
|
165
|
+
__slots__ = ("status_code", "elapsed")
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
@dataclass
|
|
169
|
+
class StepFinished(StatefulEvent):
|
|
170
|
+
"""After a single state machine step."""
|
|
171
|
+
|
|
172
|
+
status: StepStatus
|
|
173
|
+
transition_id: TransitionId | None
|
|
174
|
+
target: str
|
|
175
|
+
response: ResponseData | None
|
|
176
|
+
|
|
177
|
+
__slots__ = ("timestamp", "status", "transition_id", "target", "response")
|
|
178
|
+
|
|
179
|
+
def __init__(
|
|
180
|
+
self, *, status: StepStatus, transition_id: TransitionId | None, target: str, response: ResponseData | None
|
|
181
|
+
) -> None:
|
|
182
|
+
self.status = status
|
|
183
|
+
self.transition_id = transition_id
|
|
184
|
+
self.target = target
|
|
185
|
+
self.response = response
|
|
186
|
+
self.timestamp = time.monotonic()
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
@dataclass
|
|
190
|
+
class Interrupted(StatefulEvent):
|
|
191
|
+
"""The state machine execution was interrupted."""
|
|
192
|
+
|
|
193
|
+
__slots__ = ("timestamp",)
|
|
194
|
+
|
|
195
|
+
def __init__(self) -> None:
|
|
196
|
+
self.timestamp = time.monotonic()
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
@dataclass
|
|
200
|
+
class Errored(StatefulEvent):
|
|
201
|
+
"""An error occurred during the state machine execution."""
|
|
202
|
+
|
|
203
|
+
exception: Exception
|
|
204
|
+
|
|
205
|
+
__slots__ = ("timestamp", "exception")
|
|
206
|
+
|
|
207
|
+
def __init__(self, *, exception: Exception) -> None:
|
|
208
|
+
self.exception = exception
|
|
209
|
+
self.timestamp = time.monotonic()
|