schemathesis 4.0.0a3__py3-none-any.whl → 4.0.0a5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- schemathesis/cli/__init__.py +3 -3
- schemathesis/cli/commands/run/__init__.py +159 -135
- schemathesis/cli/commands/run/checks.py +2 -3
- schemathesis/cli/commands/run/context.py +102 -19
- schemathesis/cli/commands/run/executor.py +33 -12
- schemathesis/cli/commands/run/filters.py +1 -0
- schemathesis/cli/commands/run/handlers/cassettes.py +27 -46
- schemathesis/cli/commands/run/handlers/junitxml.py +1 -1
- schemathesis/cli/commands/run/handlers/output.py +238 -102
- schemathesis/cli/commands/run/hypothesis.py +14 -41
- schemathesis/cli/commands/run/reports.py +72 -0
- schemathesis/cli/commands/run/validation.py +18 -12
- schemathesis/cli/ext/groups.py +42 -13
- schemathesis/cli/ext/options.py +15 -8
- schemathesis/core/__init__.py +7 -1
- schemathesis/core/errors.py +79 -11
- schemathesis/core/failures.py +2 -1
- schemathesis/core/transforms.py +1 -1
- schemathesis/engine/config.py +2 -2
- schemathesis/engine/core.py +11 -1
- schemathesis/engine/errors.py +8 -3
- schemathesis/engine/events.py +7 -0
- schemathesis/engine/phases/__init__.py +16 -4
- schemathesis/engine/phases/stateful/_executor.py +1 -1
- schemathesis/engine/phases/unit/__init__.py +77 -53
- schemathesis/engine/phases/unit/_executor.py +28 -23
- schemathesis/engine/phases/unit/_pool.py +8 -0
- schemathesis/errors.py +6 -2
- schemathesis/experimental/__init__.py +0 -6
- schemathesis/filters.py +8 -0
- schemathesis/generation/coverage.py +6 -1
- schemathesis/generation/hypothesis/builder.py +222 -97
- schemathesis/generation/stateful/state_machine.py +49 -3
- schemathesis/openapi/checks.py +3 -1
- schemathesis/pytest/lazy.py +43 -5
- schemathesis/pytest/plugin.py +4 -4
- schemathesis/schemas.py +1 -1
- schemathesis/specs/openapi/checks.py +28 -11
- schemathesis/specs/openapi/examples.py +2 -5
- schemathesis/specs/openapi/expressions/__init__.py +22 -6
- schemathesis/specs/openapi/expressions/nodes.py +15 -21
- schemathesis/specs/openapi/expressions/parser.py +1 -1
- schemathesis/specs/openapi/parameters.py +0 -2
- schemathesis/specs/openapi/patterns.py +24 -7
- schemathesis/specs/openapi/schemas.py +13 -13
- schemathesis/specs/openapi/serialization.py +14 -0
- schemathesis/specs/openapi/stateful/__init__.py +96 -23
- schemathesis/specs/openapi/{links.py → stateful/links.py} +60 -16
- {schemathesis-4.0.0a3.dist-info → schemathesis-4.0.0a5.dist-info}/METADATA +7 -26
- {schemathesis-4.0.0a3.dist-info → schemathesis-4.0.0a5.dist-info}/RECORD +53 -52
- {schemathesis-4.0.0a3.dist-info → schemathesis-4.0.0a5.dist-info}/WHEEL +0 -0
- {schemathesis-4.0.0a3.dist-info → schemathesis-4.0.0a5.dist-info}/entry_points.txt +0 -0
- {schemathesis-4.0.0a3.dist-info → schemathesis-4.0.0a5.dist-info}/licenses/LICENSE +0 -0
@@ -1,21 +1,30 @@
|
|
1
1
|
from __future__ import annotations
|
2
2
|
|
3
3
|
from dataclasses import dataclass, field
|
4
|
-
from typing import Generator
|
4
|
+
from typing import TYPE_CHECKING, Generator
|
5
5
|
|
6
6
|
from schemathesis.core.failures import Failure
|
7
7
|
from schemathesis.core.output import OutputConfig
|
8
|
+
from schemathesis.core.result import Err, Ok
|
9
|
+
from schemathesis.core.transforms import UNRESOLVABLE
|
8
10
|
from schemathesis.core.transport import Response
|
9
11
|
from schemathesis.engine import Status, events
|
10
|
-
from schemathesis.engine.recorder import ScenarioRecorder
|
12
|
+
from schemathesis.engine.recorder import CaseNode, ScenarioRecorder
|
13
|
+
from schemathesis.generation.case import Case
|
14
|
+
|
15
|
+
if TYPE_CHECKING:
|
16
|
+
from schemathesis.generation.stateful.state_machine import ExtractionFailure
|
11
17
|
|
12
18
|
|
13
19
|
@dataclass
|
14
20
|
class Statistic:
|
15
21
|
"""Running statistics about test execution."""
|
16
22
|
|
17
|
-
outcomes: dict[Status, int]
|
18
23
|
failures: dict[str, dict[str, GroupedFailures]]
|
24
|
+
# Track first case_id where each unique failure was found
|
25
|
+
unique_failures_map: dict[Failure, str]
|
26
|
+
|
27
|
+
extraction_failures: set[ExtractionFailure]
|
19
28
|
|
20
29
|
tested_operations: set[str]
|
21
30
|
|
@@ -24,8 +33,9 @@ class Statistic:
|
|
24
33
|
cases_without_checks: int
|
25
34
|
|
26
35
|
__slots__ = (
|
27
|
-
"outcomes",
|
28
36
|
"failures",
|
37
|
+
"unique_failures_map",
|
38
|
+
"extraction_failures",
|
29
39
|
"tested_operations",
|
30
40
|
"total_cases",
|
31
41
|
"cases_with_failures",
|
@@ -33,19 +43,35 @@ class Statistic:
|
|
33
43
|
)
|
34
44
|
|
35
45
|
def __init__(self) -> None:
|
36
|
-
self.outcomes = {}
|
37
46
|
self.failures = {}
|
47
|
+
self.unique_failures_map = {}
|
48
|
+
self.extraction_failures = set()
|
38
49
|
self.tested_operations = set()
|
39
50
|
self.total_cases = 0
|
40
51
|
self.cases_with_failures = 0
|
41
52
|
self.cases_without_checks = 0
|
42
53
|
|
43
|
-
def
|
54
|
+
def on_scenario_finished(self, recorder: ScenarioRecorder) -> None:
|
44
55
|
"""Update statistics and store failures from a new batch of checks."""
|
56
|
+
from schemathesis.generation.stateful.state_machine import ExtractionFailure
|
57
|
+
|
45
58
|
failures = self.failures.get(recorder.label, {})
|
46
59
|
|
47
60
|
self.total_cases += len(recorder.cases)
|
48
61
|
|
62
|
+
extraction_failures = set()
|
63
|
+
|
64
|
+
def collect_history(node: CaseNode, response: Response) -> list[tuple[Case, Response]]:
|
65
|
+
history = [(node.value, response)]
|
66
|
+
current = node
|
67
|
+
while current.parent_id is not None:
|
68
|
+
current_response = recorder.find_response(case_id=current.parent_id)
|
69
|
+
# We need a response to get there, so it should be present
|
70
|
+
assert current_response is not None
|
71
|
+
current = recorder.cases[current.parent_id]
|
72
|
+
history.append((current.value, current_response))
|
73
|
+
return history
|
74
|
+
|
49
75
|
for case_id, case in recorder.cases.items():
|
50
76
|
checks = recorder.checks.get(case_id, [])
|
51
77
|
|
@@ -55,27 +81,84 @@ class Statistic:
|
|
55
81
|
|
56
82
|
self.tested_operations.add(case.value.operation.label)
|
57
83
|
has_failures = False
|
58
|
-
|
59
|
-
|
84
|
+
current_case_failures = []
|
85
|
+
last_failure_info = None
|
60
86
|
|
61
|
-
|
87
|
+
for check in checks:
|
62
88
|
if check.failure_info is not None:
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
89
|
+
failure = check.failure_info.failure
|
90
|
+
|
91
|
+
# Check if this is a new unique failure
|
92
|
+
if failure not in self.unique_failures_map:
|
93
|
+
last_failure_info = check.failure_info
|
94
|
+
self.unique_failures_map[failure] = case_id
|
95
|
+
current_case_failures.append(failure)
|
96
|
+
has_failures = True
|
97
|
+
else:
|
98
|
+
# This failure was already seen - skip it
|
99
|
+
continue
|
100
|
+
|
101
|
+
if current_case_failures:
|
102
|
+
assert last_failure_info is not None
|
103
|
+
failures[case_id] = GroupedFailures(
|
104
|
+
case_id=case_id,
|
105
|
+
code_sample=last_failure_info.code_sample,
|
106
|
+
failures=current_case_failures,
|
107
|
+
response=recorder.interactions[case_id].response,
|
108
|
+
)
|
109
|
+
|
72
110
|
if has_failures:
|
73
111
|
self.cases_with_failures += 1
|
112
|
+
|
113
|
+
if case.transition is None:
|
114
|
+
continue
|
115
|
+
transition = case.transition
|
116
|
+
parent = recorder.cases[transition.parent_id]
|
117
|
+
response = recorder.find_response(case_id=parent.value.id)
|
118
|
+
# We need a response to get there, so it should be present
|
119
|
+
assert response is not None
|
120
|
+
|
121
|
+
for params in transition.parameters.values():
|
122
|
+
for parameter, extracted in params.items():
|
123
|
+
if isinstance(extracted.value, Ok) and extracted.value.ok() is UNRESOLVABLE:
|
124
|
+
history = collect_history(parent, response)
|
125
|
+
extraction_failures.add(
|
126
|
+
ExtractionFailure(
|
127
|
+
id=transition.id,
|
128
|
+
case_id=case_id,
|
129
|
+
source=parent.value.operation.label,
|
130
|
+
target=case.value.operation.label,
|
131
|
+
parameter_name=parameter,
|
132
|
+
expression=extracted.definition,
|
133
|
+
history=history,
|
134
|
+
response=response,
|
135
|
+
error=None,
|
136
|
+
)
|
137
|
+
)
|
138
|
+
elif isinstance(extracted.value, Err):
|
139
|
+
history = collect_history(parent, response)
|
140
|
+
extraction_failures.add(
|
141
|
+
ExtractionFailure(
|
142
|
+
id=transition.id,
|
143
|
+
case_id=case_id,
|
144
|
+
source=parent.value.operation.label,
|
145
|
+
target=case.value.operation.label,
|
146
|
+
parameter_name=parameter,
|
147
|
+
expression=extracted.definition,
|
148
|
+
history=history,
|
149
|
+
response=response,
|
150
|
+
error=extracted.value.err(),
|
151
|
+
)
|
152
|
+
)
|
153
|
+
|
74
154
|
if failures:
|
75
155
|
for group in failures.values():
|
76
156
|
group.failures = sorted(set(group.failures))
|
77
157
|
self.failures[recorder.label] = failures
|
78
158
|
|
159
|
+
if extraction_failures:
|
160
|
+
self.extraction_failures.update(extraction_failures)
|
161
|
+
|
79
162
|
|
80
163
|
@dataclass
|
81
164
|
class GroupedFailures:
|
@@ -108,7 +191,7 @@ class ExecutionContext:
|
|
108
191
|
|
109
192
|
def on_event(self, event: events.EngineEvent) -> None:
|
110
193
|
if isinstance(event, events.ScenarioFinished):
|
111
|
-
self.statistic.
|
194
|
+
self.statistic.on_scenario_finished(event.recorder)
|
112
195
|
elif isinstance(event, events.NonFatalError) or (
|
113
196
|
isinstance(event, events.PhaseFinished)
|
114
197
|
and event.phase.is_enabled
|
@@ -10,10 +10,11 @@ from schemathesis.cli.commands.run.context import ExecutionContext
|
|
10
10
|
from schemathesis.cli.commands.run.events import LoadingFinished, LoadingStarted
|
11
11
|
from schemathesis.cli.commands.run.handlers import display_handler_error
|
12
12
|
from schemathesis.cli.commands.run.handlers.base import EventHandler
|
13
|
-
from schemathesis.cli.commands.run.handlers.cassettes import
|
13
|
+
from schemathesis.cli.commands.run.handlers.cassettes import CassetteWriter
|
14
14
|
from schemathesis.cli.commands.run.handlers.junitxml import JunitXMLHandler
|
15
15
|
from schemathesis.cli.commands.run.handlers.output import OutputHandler
|
16
16
|
from schemathesis.cli.commands.run.loaders import AutodetectConfig, load_schema
|
17
|
+
from schemathesis.cli.commands.run.reports import ReportConfig, ReportFormat
|
17
18
|
from schemathesis.cli.ext.fs import open_file
|
18
19
|
from schemathesis.core.errors import LoaderError
|
19
20
|
from schemathesis.core.output import OutputConfig
|
@@ -43,8 +44,7 @@ class RunConfig:
|
|
43
44
|
wait_for_schema: float | None
|
44
45
|
rate_limit: str | None
|
45
46
|
output: OutputConfig
|
46
|
-
|
47
|
-
cassette: CassetteConfig | None
|
47
|
+
report: ReportConfig | None
|
48
48
|
args: list[str]
|
49
49
|
params: dict[str, Any]
|
50
50
|
|
@@ -93,27 +93,48 @@ def into_event_stream(config: RunConfig) -> EventGenerator:
|
|
93
93
|
yield FatalError(exception=exc)
|
94
94
|
|
95
95
|
|
96
|
-
def
|
96
|
+
def initialize_handlers(config: RunConfig) -> list[EventHandler]:
|
97
|
+
"""Create event handlers based on run configuration."""
|
97
98
|
handlers: list[EventHandler] = []
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
99
|
+
|
100
|
+
if config.report is not None:
|
101
|
+
if ReportFormat.JUNIT in config.report.formats:
|
102
|
+
path = config.report.get_path(ReportFormat.JUNIT)
|
103
|
+
open_file(path)
|
104
|
+
handlers.append(JunitXMLHandler(path))
|
105
|
+
|
106
|
+
for format in (ReportFormat.VCR, ReportFormat.HAR):
|
107
|
+
if format in config.report.formats:
|
108
|
+
path = config.report.get_path(format)
|
109
|
+
open_file(path)
|
110
|
+
handlers.append(
|
111
|
+
CassetteWriter(
|
112
|
+
format=format,
|
113
|
+
path=path,
|
114
|
+
sanitize_output=config.report.sanitize_output,
|
115
|
+
preserve_bytes=config.report.preserve_bytes,
|
116
|
+
)
|
117
|
+
)
|
118
|
+
|
104
119
|
for custom_handler in CUSTOM_HANDLERS:
|
105
120
|
handlers.append(custom_handler(*config.args, **config.params))
|
121
|
+
|
106
122
|
handlers.append(
|
107
123
|
OutputHandler(
|
108
124
|
workers_num=config.engine.execution.workers_num,
|
109
125
|
seed=config.engine.execution.seed,
|
110
126
|
rate_limit=config.rate_limit,
|
111
127
|
wait_for_schema=config.wait_for_schema,
|
112
|
-
|
113
|
-
|
128
|
+
engine_config=config.engine,
|
129
|
+
report_config=config.report,
|
114
130
|
)
|
115
131
|
)
|
116
132
|
|
133
|
+
return handlers
|
134
|
+
|
135
|
+
|
136
|
+
def _execute(event_stream: EventGenerator, config: RunConfig) -> None:
|
137
|
+
handlers = initialize_handlers(config)
|
117
138
|
ctx = ExecutionContext(output_config=config.output, seed=config.engine.execution.seed)
|
118
139
|
|
119
140
|
def shutdown() -> None:
|
@@ -1,7 +1,6 @@
|
|
1
1
|
from __future__ import annotations
|
2
2
|
|
3
3
|
import datetime
|
4
|
-
import enum
|
5
4
|
import json
|
6
5
|
import sys
|
7
6
|
import threading
|
@@ -11,11 +10,12 @@ from queue import Queue
|
|
11
10
|
from typing import IO, Callable, Iterator
|
12
11
|
from urllib.parse import parse_qsl, urlparse
|
13
12
|
|
14
|
-
import click
|
15
13
|
import harfile
|
14
|
+
from click.utils import LazyFile
|
16
15
|
|
17
16
|
from schemathesis.cli.commands.run.context import ExecutionContext
|
18
17
|
from schemathesis.cli.commands.run.handlers.base import EventHandler
|
18
|
+
from schemathesis.cli.commands.run.reports import ReportFormat
|
19
19
|
from schemathesis.core.output.sanitization import sanitize_url, sanitize_value
|
20
20
|
from schemathesis.core.transforms import deepclone
|
21
21
|
from schemathesis.core.transport import Response
|
@@ -28,43 +28,26 @@ from schemathesis.generation.meta import CoveragePhaseData
|
|
28
28
|
WRITER_WORKER_JOIN_TIMEOUT = 1
|
29
29
|
|
30
30
|
|
31
|
-
class CassetteFormat(str, enum.Enum):
|
32
|
-
"""Type of the cassette."""
|
33
|
-
|
34
|
-
VCR = "vcr"
|
35
|
-
HAR = "har"
|
36
|
-
|
37
|
-
@classmethod
|
38
|
-
def from_str(cls, value: str) -> CassetteFormat:
|
39
|
-
try:
|
40
|
-
return cls[value.upper()]
|
41
|
-
except KeyError:
|
42
|
-
available_formats = ", ".join(cls)
|
43
|
-
raise ValueError(
|
44
|
-
f"Invalid value for cassette format: {value}. Available formats: {available_formats}"
|
45
|
-
) from None
|
46
|
-
|
47
|
-
|
48
|
-
@dataclass
|
49
|
-
class CassetteConfig:
|
50
|
-
path: click.utils.LazyFile
|
51
|
-
format: CassetteFormat = CassetteFormat.VCR
|
52
|
-
preserve_exact_body_bytes: bool = False
|
53
|
-
sanitize_output: bool = True
|
54
|
-
|
55
|
-
|
56
31
|
@dataclass
|
57
32
|
class CassetteWriter(EventHandler):
|
58
33
|
"""Write network interactions to a cassette."""
|
59
34
|
|
60
|
-
|
35
|
+
format: ReportFormat
|
36
|
+
path: LazyFile
|
37
|
+
sanitize_output: bool = True
|
38
|
+
preserve_bytes: bool = False
|
61
39
|
queue: Queue = field(default_factory=Queue)
|
62
40
|
worker: threading.Thread = field(init=False)
|
63
41
|
|
64
42
|
def __post_init__(self) -> None:
|
65
|
-
kwargs = {
|
43
|
+
kwargs = {
|
44
|
+
"path": self.path,
|
45
|
+
"sanitize_output": self.sanitize_output,
|
46
|
+
"preserve_bytes": self.preserve_bytes,
|
47
|
+
"queue": self.queue,
|
48
|
+
}
|
66
49
|
writer: Callable
|
67
|
-
if self.
|
50
|
+
if self.format == ReportFormat.HAR:
|
68
51
|
writer = har_writer
|
69
52
|
else:
|
70
53
|
writer = vcr_writer
|
@@ -120,7 +103,7 @@ def get_command_representation() -> str:
|
|
120
103
|
return f"st {args}"
|
121
104
|
|
122
105
|
|
123
|
-
def vcr_writer(
|
106
|
+
def vcr_writer(path: LazyFile, sanitize_output: bool, preserve_bytes: bool, queue: Queue) -> None:
|
124
107
|
"""Write YAML to a file in an incremental manner.
|
125
108
|
|
126
109
|
This implementation doesn't use `pyyaml` package and composes YAML manually as string due to the following reasons:
|
@@ -131,12 +114,12 @@ def vcr_writer(config: CassetteConfig, queue: Queue) -> None:
|
|
131
114
|
providing tags, anchors to have incremental writing, with primitive types it is much simpler.
|
132
115
|
"""
|
133
116
|
current_id = 1
|
134
|
-
stream =
|
117
|
+
stream = path.open()
|
135
118
|
|
136
119
|
def format_header_values(values: list[str]) -> str:
|
137
120
|
return "\n".join(f" - {json.dumps(v)}" for v in values)
|
138
121
|
|
139
|
-
if
|
122
|
+
if sanitize_output:
|
140
123
|
|
141
124
|
def format_headers(headers: dict[str, list[str]]) -> str:
|
142
125
|
headers = deepclone(headers)
|
@@ -162,7 +145,7 @@ def vcr_writer(config: CassetteConfig, queue: Queue) -> None:
|
|
162
145
|
checks:
|
163
146
|
{items}"""
|
164
147
|
|
165
|
-
if
|
148
|
+
if preserve_bytes:
|
166
149
|
|
167
150
|
def format_request_body(output: IO, request: Request) -> None:
|
168
151
|
if request.encoded_body is not None:
|
@@ -283,7 +266,7 @@ http_interactions:"""
|
|
283
266
|
else:
|
284
267
|
stream.write("null")
|
285
268
|
|
286
|
-
if
|
269
|
+
if sanitize_output:
|
287
270
|
uri = sanitize_url(interaction.request.uri)
|
288
271
|
else:
|
289
272
|
uri = interaction.request.uri
|
@@ -321,7 +304,7 @@ http_interactions:"""
|
|
321
304
|
current_id += 1
|
322
305
|
else:
|
323
306
|
break
|
324
|
-
|
307
|
+
path.close()
|
325
308
|
|
326
309
|
|
327
310
|
def write_double_quoted(stream: IO, text: str | None) -> None:
|
@@ -367,13 +350,13 @@ def write_double_quoted(stream: IO, text: str | None) -> None:
|
|
367
350
|
stream.write('"')
|
368
351
|
|
369
352
|
|
370
|
-
def har_writer(
|
371
|
-
with harfile.open(
|
353
|
+
def har_writer(path: LazyFile, sanitize_output: bool, preserve_bytes: bool, queue: Queue) -> None:
|
354
|
+
with harfile.open(path) as har:
|
372
355
|
while True:
|
373
356
|
item = queue.get()
|
374
357
|
if isinstance(item, Process):
|
375
358
|
for interaction in item.recorder.interactions.values():
|
376
|
-
if
|
359
|
+
if sanitize_output:
|
377
360
|
uri = sanitize_url(interaction.request.uri)
|
378
361
|
else:
|
379
362
|
uri = interaction.request.uri
|
@@ -382,7 +365,7 @@ def har_writer(config: CassetteConfig, queue: Queue) -> None:
|
|
382
365
|
post_data = harfile.PostData(
|
383
366
|
mimeType=interaction.request.headers.get("Content-Type", [""])[0],
|
384
367
|
text=interaction.request.encoded_body
|
385
|
-
if
|
368
|
+
if preserve_bytes
|
386
369
|
else interaction.request.body.decode("utf-8", "replace"),
|
387
370
|
)
|
388
371
|
else:
|
@@ -393,16 +376,14 @@ def har_writer(config: CassetteConfig, queue: Queue) -> None:
|
|
393
376
|
size=interaction.response.body_size or 0,
|
394
377
|
mimeType=content_type,
|
395
378
|
text=interaction.response.encoded_body
|
396
|
-
if
|
379
|
+
if preserve_bytes
|
397
380
|
else interaction.response.content.decode("utf-8", "replace")
|
398
381
|
if interaction.response.content is not None
|
399
382
|
else None,
|
400
|
-
encoding="base64"
|
401
|
-
if interaction.response.content is not None and config.preserve_exact_body_bytes
|
402
|
-
else None,
|
383
|
+
encoding="base64" if interaction.response.content is not None and preserve_bytes else None,
|
403
384
|
)
|
404
385
|
http_version = f"HTTP/{interaction.response.http_version}"
|
405
|
-
if
|
386
|
+
if sanitize_output:
|
406
387
|
headers = deepclone(interaction.response.headers)
|
407
388
|
sanitize_value(headers)
|
408
389
|
else:
|
@@ -424,7 +405,7 @@ def har_writer(config: CassetteConfig, queue: Queue) -> None:
|
|
424
405
|
time = 0
|
425
406
|
http_version = ""
|
426
407
|
|
427
|
-
if
|
408
|
+
if sanitize_output:
|
428
409
|
headers = deepclone(interaction.request.headers)
|
429
410
|
sanitize_value(headers)
|
430
411
|
else:
|
@@ -25,7 +25,7 @@ class JunitXMLHandler(EventHandler):
|
|
25
25
|
test_case.elapsed_sec += event.elapsed_time
|
26
26
|
if event.status == Status.FAILURE:
|
27
27
|
add_failure(test_case, ctx.statistic.failures[label].values(), ctx)
|
28
|
-
elif event.status == Status.SKIP:
|
28
|
+
elif event.status == Status.SKIP and event.skip_reason is not None:
|
29
29
|
test_case.add_skipped_info(output=event.skip_reason)
|
30
30
|
elif isinstance(event, events.NonFatalError):
|
31
31
|
test_case = self.get_or_create_test_case(event.label)
|