pytest-isolated 0.1.0__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pytest_isolated/__init__.py +1 -1
- pytest_isolated/plugin.py +159 -59
- pytest_isolated/py.typed +0 -0
- {pytest_isolated-0.1.0.dist-info → pytest_isolated-0.3.0.dist-info}/METADATA +41 -36
- pytest_isolated-0.3.0.dist-info/RECORD +9 -0
- {pytest_isolated-0.1.0.dist-info → pytest_isolated-0.3.0.dist-info}/WHEEL +1 -1
- pytest_isolated-0.1.0.dist-info/RECORD +0 -8
- {pytest_isolated-0.1.0.dist-info → pytest_isolated-0.3.0.dist-info}/entry_points.txt +0 -0
- {pytest_isolated-0.1.0.dist-info → pytest_isolated-0.3.0.dist-info}/licenses/LICENSE +0 -0
- {pytest_isolated-0.1.0.dist-info → pytest_isolated-0.3.0.dist-info}/top_level.txt +0 -0
pytest_isolated/__init__.py
CHANGED
pytest_isolated/plugin.py
CHANGED
|
@@ -6,42 +6,77 @@ import os
|
|
|
6
6
|
import subprocess
|
|
7
7
|
import sys
|
|
8
8
|
import tempfile
|
|
9
|
+
import time
|
|
9
10
|
from collections import OrderedDict
|
|
10
11
|
from pathlib import Path
|
|
11
|
-
from typing import Any
|
|
12
|
+
from typing import Any, Final, Literal, TypedDict, cast
|
|
12
13
|
|
|
13
14
|
import pytest
|
|
14
15
|
|
|
15
16
|
# Guard to prevent infinite recursion (parent spawns child; child must not spawn again)
|
|
16
|
-
SUBPROC_ENV = "PYTEST_RUNNING_IN_SUBPROCESS"
|
|
17
|
+
SUBPROC_ENV: Final = "PYTEST_RUNNING_IN_SUBPROCESS"
|
|
17
18
|
|
|
18
19
|
# Parent tells child where to write JSONL records per test call
|
|
19
|
-
SUBPROC_REPORT_PATH = "PYTEST_SUBPROCESS_REPORT_PATH"
|
|
20
|
+
SUBPROC_REPORT_PATH: Final = "PYTEST_SUBPROCESS_REPORT_PATH"
|
|
21
|
+
|
|
22
|
+
# Arguments to exclude when forwarding options to subprocess
|
|
23
|
+
_EXCLUDED_ARG_PREFIXES: Final = (
|
|
24
|
+
"--junitxml=",
|
|
25
|
+
"--html=",
|
|
26
|
+
"--result-log=",
|
|
27
|
+
"--collect-only",
|
|
28
|
+
"--setup-only",
|
|
29
|
+
"--setup-plan",
|
|
30
|
+
"-x",
|
|
31
|
+
"--exitfirst",
|
|
32
|
+
"--maxfail=",
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
# Plugin-specific options that take values and should not be forwarded
|
|
36
|
+
_PLUGIN_OPTIONS_WITH_VALUE: Final = ("--isolated-timeout",)
|
|
37
|
+
|
|
38
|
+
# Plugin-specific flag options that should not be forwarded
|
|
39
|
+
_PLUGIN_FLAGS: Final = ("--no-isolation",)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class _TestRecord(TypedDict, total=False):
|
|
43
|
+
"""Structure for test phase results from subprocess."""
|
|
44
|
+
|
|
45
|
+
nodeid: str
|
|
46
|
+
when: Literal["setup", "call", "teardown"]
|
|
47
|
+
outcome: Literal["passed", "failed", "skipped"]
|
|
48
|
+
longrepr: str
|
|
49
|
+
duration: float
|
|
50
|
+
stdout: str
|
|
51
|
+
stderr: str
|
|
52
|
+
keywords: list[str]
|
|
53
|
+
sections: list[tuple[str, str]]
|
|
54
|
+
user_properties: list[tuple[str, Any]]
|
|
55
|
+
wasxfail: bool
|
|
20
56
|
|
|
21
57
|
|
|
22
58
|
def pytest_addoption(parser: pytest.Parser) -> None:
|
|
23
|
-
|
|
24
|
-
group = parser.getgroup("subprocess")
|
|
59
|
+
group = parser.getgroup("isolated")
|
|
25
60
|
group.addoption(
|
|
26
|
-
"--
|
|
61
|
+
"--isolated-timeout",
|
|
27
62
|
type=int,
|
|
28
63
|
default=None,
|
|
29
|
-
help="Timeout in seconds for
|
|
64
|
+
help="Timeout in seconds for isolated test groups (default: 300)",
|
|
30
65
|
)
|
|
31
66
|
group.addoption(
|
|
32
|
-
"--no-
|
|
67
|
+
"--no-isolation",
|
|
33
68
|
action="store_true",
|
|
34
69
|
default=False,
|
|
35
70
|
help="Disable subprocess isolation (for debugging)",
|
|
36
71
|
)
|
|
37
72
|
parser.addini(
|
|
38
|
-
"
|
|
73
|
+
"isolated_timeout",
|
|
39
74
|
type="string",
|
|
40
75
|
default="300",
|
|
41
|
-
help="Default timeout in seconds for
|
|
76
|
+
help="Default timeout in seconds for isolated test groups",
|
|
42
77
|
)
|
|
43
78
|
parser.addini(
|
|
44
|
-
"
|
|
79
|
+
"isolated_capture_passed",
|
|
45
80
|
type="bool",
|
|
46
81
|
default=False,
|
|
47
82
|
help="Capture output for passed tests (default: False)",
|
|
@@ -51,8 +86,9 @@ def pytest_addoption(parser: pytest.Parser) -> None:
|
|
|
51
86
|
def pytest_configure(config: pytest.Config) -> None:
|
|
52
87
|
config.addinivalue_line(
|
|
53
88
|
"markers",
|
|
54
|
-
"
|
|
55
|
-
"tests with the same group run together in
|
|
89
|
+
"isolated(group=None, timeout=None): run this test in a grouped "
|
|
90
|
+
"fresh Python subprocess; tests with the same group run together in "
|
|
91
|
+
"one subprocess. timeout (seconds) overrides global --isolated-timeout.",
|
|
56
92
|
)
|
|
57
93
|
|
|
58
94
|
|
|
@@ -60,17 +96,13 @@ def pytest_configure(config: pytest.Config) -> None:
|
|
|
60
96
|
# CHILD MODE: record results + captured output per test phase
|
|
61
97
|
# ----------------------------
|
|
62
98
|
def pytest_runtest_logreport(report: pytest.TestReport) -> None:
|
|
63
|
-
"""
|
|
64
|
-
In the child process, write one JSON line per test phase (setup/call/teardown)
|
|
65
|
-
containing outcome, captured stdout/stderr, duration, and other metadata.
|
|
66
|
-
The parent will aggregate and re-emit this info.
|
|
67
|
-
"""
|
|
99
|
+
"""Write test phase results to a JSONL file when running in subprocess mode."""
|
|
68
100
|
path = os.environ.get(SUBPROC_REPORT_PATH)
|
|
69
101
|
if not path:
|
|
70
102
|
return
|
|
71
103
|
|
|
72
104
|
# Capture ALL phases (setup, call, teardown), not just call
|
|
73
|
-
rec = {
|
|
105
|
+
rec: _TestRecord = {
|
|
74
106
|
"nodeid": report.nodeid,
|
|
75
107
|
"when": report.when, # setup, call, or teardown
|
|
76
108
|
"outcome": report.outcome, # passed/failed/skipped
|
|
@@ -94,23 +126,21 @@ def pytest_runtest_logreport(report: pytest.TestReport) -> None:
|
|
|
94
126
|
def pytest_collection_modifyitems(
|
|
95
127
|
config: pytest.Config, items: list[pytest.Item]
|
|
96
128
|
) -> None:
|
|
97
|
-
"""
|
|
98
|
-
Partition items into subprocess groups + normal items and stash on config.
|
|
99
|
-
"""
|
|
100
129
|
if os.environ.get(SUBPROC_ENV) == "1":
|
|
101
130
|
return # child should not do grouping
|
|
102
131
|
|
|
103
|
-
# If --no-
|
|
104
|
-
if config.getoption("
|
|
132
|
+
# If --no-isolation is set, treat all tests as normal (no subprocess isolation)
|
|
133
|
+
if config.getoption("no_isolation", False):
|
|
105
134
|
config._subprocess_groups = OrderedDict() # type: ignore[attr-defined]
|
|
106
135
|
config._subprocess_normal_items = items # type: ignore[attr-defined]
|
|
107
136
|
return
|
|
108
137
|
|
|
109
138
|
groups: OrderedDict[str, list[pytest.Item]] = OrderedDict()
|
|
139
|
+
group_timeouts: dict[str, int | None] = {} # Track timeout per group
|
|
110
140
|
normal: list[pytest.Item] = []
|
|
111
141
|
|
|
112
142
|
for item in items:
|
|
113
|
-
m = item.get_closest_marker("
|
|
143
|
+
m = item.get_closest_marker("isolated")
|
|
114
144
|
if not m:
|
|
115
145
|
normal.append(item)
|
|
116
146
|
continue
|
|
@@ -119,46 +149,51 @@ def pytest_collection_modifyitems(
|
|
|
119
149
|
# Default grouping to module path (so you don't accidentally group everything)
|
|
120
150
|
if group is None:
|
|
121
151
|
group = item.nodeid.split("::")[0]
|
|
122
|
-
|
|
152
|
+
|
|
153
|
+
# Store group-specific timeout (first marker wins)
|
|
154
|
+
group_key = str(group)
|
|
155
|
+
if group_key not in group_timeouts:
|
|
156
|
+
group_timeouts[group_key] = m.kwargs.get("timeout")
|
|
157
|
+
|
|
158
|
+
groups.setdefault(group_key, []).append(item)
|
|
123
159
|
|
|
124
160
|
config._subprocess_groups = groups # type: ignore[attr-defined]
|
|
161
|
+
config._subprocess_group_timeouts = group_timeouts # type: ignore[attr-defined]
|
|
125
162
|
config._subprocess_normal_items = normal # type: ignore[attr-defined]
|
|
126
163
|
|
|
127
164
|
|
|
128
165
|
def pytest_runtestloop(session: pytest.Session) -> int | None:
|
|
129
|
-
"""
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
Enhanced to:
|
|
134
|
-
- Capture all test phases (setup, call, teardown)
|
|
135
|
-
- Support configurable timeouts
|
|
136
|
-
- Properly handle crashes and missing results
|
|
137
|
-
- Integrate with pytest's reporting system
|
|
166
|
+
"""Execute isolated test groups in subprocesses and remaining tests in-process.
|
|
167
|
+
|
|
168
|
+
Any subprocess timeouts are caught and reported as test failures; the
|
|
169
|
+
subprocess.TimeoutExpired exception is not propagated to the caller.
|
|
138
170
|
"""
|
|
139
171
|
if os.environ.get(SUBPROC_ENV) == "1":
|
|
140
172
|
return None # child runs the normal loop
|
|
141
173
|
|
|
142
174
|
config = session.config
|
|
143
|
-
groups
|
|
144
|
-
|
|
175
|
+
groups = getattr(config, "_subprocess_groups", OrderedDict())
|
|
176
|
+
if not isinstance(groups, OrderedDict):
|
|
177
|
+
groups = OrderedDict()
|
|
178
|
+
group_timeouts: dict[str, int | None] = getattr(
|
|
179
|
+
config, "_subprocess_group_timeouts", {}
|
|
145
180
|
)
|
|
146
181
|
normal_items: list[pytest.Item] = getattr(
|
|
147
182
|
config, "_subprocess_normal_items", session.items
|
|
148
183
|
)
|
|
149
184
|
|
|
150
|
-
# Get timeout configuration
|
|
151
|
-
timeout_opt = config.getoption("
|
|
152
|
-
timeout_ini = config.getini("
|
|
153
|
-
|
|
185
|
+
# Get default timeout configuration
|
|
186
|
+
timeout_opt = config.getoption("isolated_timeout", None)
|
|
187
|
+
timeout_ini = config.getini("isolated_timeout")
|
|
188
|
+
default_timeout = timeout_opt or (int(timeout_ini) if timeout_ini else 300)
|
|
154
189
|
|
|
155
190
|
# Get capture configuration
|
|
156
|
-
capture_passed = config.getini("
|
|
191
|
+
capture_passed = config.getini("isolated_capture_passed")
|
|
157
192
|
|
|
158
193
|
def emit_report(
|
|
159
194
|
item: pytest.Item,
|
|
160
|
-
when:
|
|
161
|
-
outcome:
|
|
195
|
+
when: Literal["setup", "call", "teardown"],
|
|
196
|
+
outcome: Literal["passed", "failed", "skipped"],
|
|
162
197
|
longrepr: str = "",
|
|
163
198
|
duration: float = 0.0,
|
|
164
199
|
stdout: str = "",
|
|
@@ -167,10 +202,6 @@ def pytest_runtestloop(session: pytest.Session) -> int | None:
|
|
|
167
202
|
user_properties: list[tuple[str, Any]] | None = None,
|
|
168
203
|
wasxfail: bool = False,
|
|
169
204
|
) -> None:
|
|
170
|
-
"""
|
|
171
|
-
Emit a synthetic report for the given item and phase.
|
|
172
|
-
Attach captured output based on outcome and configuration.
|
|
173
|
-
"""
|
|
174
205
|
call = pytest.CallInfo.from_call(lambda: None, when=when)
|
|
175
206
|
rep = pytest.TestReport.from_item_and_call(item, call)
|
|
176
207
|
rep.outcome = outcome
|
|
@@ -185,7 +216,8 @@ def pytest_runtestloop(session: pytest.Session) -> int | None:
|
|
|
185
216
|
# For skipped tests, longrepr needs to be a tuple (path, lineno, reason)
|
|
186
217
|
if outcome == "skipped" and longrepr:
|
|
187
218
|
# Parse longrepr or create simple tuple
|
|
188
|
-
|
|
219
|
+
lineno = item.location[1] if item.location[1] is not None else -1
|
|
220
|
+
rep.longrepr = (str(item.fspath), lineno, longrepr) # type: ignore[assignment]
|
|
189
221
|
elif outcome == "failed" and longrepr:
|
|
190
222
|
rep.longrepr = longrepr
|
|
191
223
|
|
|
@@ -205,6 +237,9 @@ def pytest_runtestloop(session: pytest.Session) -> int | None:
|
|
|
205
237
|
for group_name, group_items in groups.items():
|
|
206
238
|
nodeids = [it.nodeid for it in group_items]
|
|
207
239
|
|
|
240
|
+
# Get timeout for this group (marker timeout > global timeout)
|
|
241
|
+
group_timeout = group_timeouts.get(group_name) or default_timeout
|
|
242
|
+
|
|
208
243
|
# file where the child will append JSONL records
|
|
209
244
|
with tempfile.NamedTemporaryFile(
|
|
210
245
|
prefix="pytest-subproc-", suffix=".jsonl", delete=False
|
|
@@ -215,12 +250,74 @@ def pytest_runtestloop(session: pytest.Session) -> int | None:
|
|
|
215
250
|
env[SUBPROC_ENV] = "1"
|
|
216
251
|
env[SUBPROC_REPORT_PATH] = report_path
|
|
217
252
|
|
|
218
|
-
# Run pytest in subprocess with timeout
|
|
219
|
-
|
|
253
|
+
# Run pytest in subprocess with timeout, tracking execution time
|
|
254
|
+
# Preserve rootdir and run subprocess from correct directory to ensure
|
|
255
|
+
# nodeids can be resolved
|
|
256
|
+
cmd = [sys.executable, "-m", "pytest"]
|
|
257
|
+
|
|
258
|
+
# Forward relevant pytest options to subprocess for consistency
|
|
259
|
+
# We filter out options that would interfere with subprocess execution
|
|
260
|
+
if hasattr(config, "invocation_params") and hasattr(
|
|
261
|
+
config.invocation_params, "args"
|
|
262
|
+
):
|
|
263
|
+
forwarded_args = []
|
|
264
|
+
skip_next = False
|
|
265
|
+
|
|
266
|
+
for arg in config.invocation_params.args:
|
|
267
|
+
if skip_next:
|
|
268
|
+
skip_next = False
|
|
269
|
+
continue
|
|
270
|
+
|
|
271
|
+
# Skip our own plugin options
|
|
272
|
+
if arg in _PLUGIN_OPTIONS_WITH_VALUE:
|
|
273
|
+
skip_next = True
|
|
274
|
+
continue
|
|
275
|
+
if arg in _PLUGIN_FLAGS:
|
|
276
|
+
continue
|
|
277
|
+
|
|
278
|
+
# Skip output/reporting options that would conflict
|
|
279
|
+
if any(arg.startswith(prefix) for prefix in _EXCLUDED_ARG_PREFIXES):
|
|
280
|
+
continue
|
|
281
|
+
if arg in ("-x", "--exitfirst"):
|
|
282
|
+
continue
|
|
283
|
+
|
|
284
|
+
# Skip test file paths and nodeids - we provide our own
|
|
285
|
+
if not arg.startswith("-") and ("::" in arg or arg.endswith(".py")):
|
|
286
|
+
continue
|
|
287
|
+
|
|
288
|
+
forwarded_args.append(arg)
|
|
289
|
+
|
|
290
|
+
cmd.extend(forwarded_args)
|
|
291
|
+
|
|
292
|
+
# Pass rootdir to subprocess to ensure it uses the same project root
|
|
293
|
+
# (config.rootpath is available in pytest 7.0+, which is our minimum version)
|
|
294
|
+
if config.rootpath:
|
|
295
|
+
cmd.extend(["--rootdir", str(config.rootpath)])
|
|
296
|
+
|
|
297
|
+
# Add the test nodeids
|
|
298
|
+
cmd.extend(nodeids)
|
|
299
|
+
|
|
300
|
+
start_time = time.time()
|
|
301
|
+
|
|
302
|
+
# Determine the working directory for the subprocess
|
|
303
|
+
# Use rootpath if set, otherwise use invocation directory
|
|
304
|
+
# This ensures nodeids (which are relative to rootpath) can be resolved
|
|
305
|
+
subprocess_cwd = None
|
|
306
|
+
if config.rootpath:
|
|
307
|
+
subprocess_cwd = str(config.rootpath)
|
|
308
|
+
elif hasattr(config, "invocation_params") and hasattr(
|
|
309
|
+
config.invocation_params, "dir"
|
|
310
|
+
):
|
|
311
|
+
subprocess_cwd = str(config.invocation_params.dir)
|
|
220
312
|
|
|
221
313
|
try:
|
|
222
314
|
proc = subprocess.run(
|
|
223
|
-
cmd,
|
|
315
|
+
cmd,
|
|
316
|
+
env=env,
|
|
317
|
+
timeout=group_timeout,
|
|
318
|
+
capture_output=False,
|
|
319
|
+
check=False,
|
|
320
|
+
cwd=subprocess_cwd,
|
|
224
321
|
)
|
|
225
322
|
returncode = proc.returncode
|
|
226
323
|
timed_out = False
|
|
@@ -228,8 +325,10 @@ def pytest_runtestloop(session: pytest.Session) -> int | None:
|
|
|
228
325
|
returncode = -1
|
|
229
326
|
timed_out = True
|
|
230
327
|
|
|
328
|
+
execution_time = time.time() - start_time
|
|
329
|
+
|
|
231
330
|
# Gather results from JSONL file
|
|
232
|
-
results: dict[str, dict[str,
|
|
331
|
+
results: dict[str, dict[str, _TestRecord]] = {}
|
|
233
332
|
report_file = Path(report_path)
|
|
234
333
|
if report_file.exists():
|
|
235
334
|
with report_file.open(encoding="utf-8") as f:
|
|
@@ -237,7 +336,7 @@ def pytest_runtestloop(session: pytest.Session) -> int | None:
|
|
|
237
336
|
file_line = line.strip()
|
|
238
337
|
if not file_line:
|
|
239
338
|
continue
|
|
240
|
-
rec = json.loads(file_line)
|
|
339
|
+
rec = cast(_TestRecord, json.loads(file_line))
|
|
241
340
|
nodeid = rec["nodeid"]
|
|
242
341
|
when = rec["when"]
|
|
243
342
|
|
|
@@ -250,9 +349,10 @@ def pytest_runtestloop(session: pytest.Session) -> int | None:
|
|
|
250
349
|
# Handle timeout or crash
|
|
251
350
|
if timed_out:
|
|
252
351
|
msg = (
|
|
253
|
-
f"Subprocess group={group_name!r} timed out after {
|
|
254
|
-
f"seconds
|
|
255
|
-
f"
|
|
352
|
+
f"Subprocess group={group_name!r} timed out after {group_timeout} "
|
|
353
|
+
f"seconds (execution time: {execution_time:.2f}s). "
|
|
354
|
+
f"Increase timeout with --isolated-timeout, isolated_timeout ini, "
|
|
355
|
+
f"or @pytest.mark.isolated(timeout=N)."
|
|
256
356
|
)
|
|
257
357
|
for it in group_items:
|
|
258
358
|
emit_report(it, "call", "failed", longrepr=msg)
|
|
@@ -275,7 +375,7 @@ def pytest_runtestloop(session: pytest.Session) -> int | None:
|
|
|
275
375
|
node_results = results.get(it.nodeid, {})
|
|
276
376
|
|
|
277
377
|
# Emit setup, call, teardown in order
|
|
278
|
-
for when in ["setup", "call", "teardown"]:
|
|
378
|
+
for when in ["setup", "call", "teardown"]: # type: ignore[assignment]
|
|
279
379
|
if when not in node_results:
|
|
280
380
|
# If missing a phase, synthesize a passing one
|
|
281
381
|
if when == "call" and not node_results:
|
|
@@ -292,7 +392,7 @@ def pytest_runtestloop(session: pytest.Session) -> int | None:
|
|
|
292
392
|
rec = node_results[when]
|
|
293
393
|
emit_report(
|
|
294
394
|
it,
|
|
295
|
-
when=when,
|
|
395
|
+
when=when, # type: ignore[arg-type]
|
|
296
396
|
outcome=rec["outcome"],
|
|
297
397
|
longrepr=rec.get("longrepr", ""),
|
|
298
398
|
duration=rec.get("duration", 0.0),
|
pytest_isolated/py.typed
ADDED
|
File without changes
|
|
@@ -1,13 +1,12 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: pytest-isolated
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.3.0
|
|
4
4
|
Summary: Run marked pytest tests in grouped subprocesses (cross-platform).
|
|
5
5
|
Author: pytest-isolated contributors
|
|
6
|
-
License: MIT
|
|
6
|
+
License-Expression: MIT
|
|
7
7
|
Classifier: Development Status :: 4 - Beta
|
|
8
8
|
Classifier: Framework :: Pytest
|
|
9
9
|
Classifier: Intended Audience :: Developers
|
|
10
|
-
Classifier: License :: OSI Approved :: MIT License
|
|
11
10
|
Classifier: Operating System :: OS Independent
|
|
12
11
|
Classifier: Programming Language :: Python :: 3
|
|
13
12
|
Classifier: Programming Language :: Python :: 3.11
|
|
@@ -27,6 +26,9 @@ Dynamic: license-file
|
|
|
27
26
|
|
|
28
27
|
# pytest-isolated
|
|
29
28
|
|
|
29
|
+
[](https://github.com/dyollb/pytest-isolated/actions/workflows/test.yml)
|
|
30
|
+
[](https://pypi.org/project/pytest-isolated/)
|
|
31
|
+
|
|
30
32
|
A pytest plugin that runs marked tests in isolated subprocesses with intelligent grouping.
|
|
31
33
|
|
|
32
34
|
## Features
|
|
@@ -52,7 +54,7 @@ Mark tests to run in isolated subprocesses:
|
|
|
52
54
|
```python
|
|
53
55
|
import pytest
|
|
54
56
|
|
|
55
|
-
@pytest.mark.
|
|
57
|
+
@pytest.mark.isolated
|
|
56
58
|
def test_isolated():
|
|
57
59
|
# Runs in a fresh subprocess
|
|
58
60
|
assert True
|
|
@@ -61,16 +63,25 @@ def test_isolated():
|
|
|
61
63
|
Tests with the same group run together in one subprocess:
|
|
62
64
|
|
|
63
65
|
```python
|
|
64
|
-
@pytest.mark.
|
|
66
|
+
@pytest.mark.isolated(group="mygroup")
|
|
65
67
|
def test_one():
|
|
66
68
|
shared_state.append(1)
|
|
67
69
|
|
|
68
|
-
@pytest.mark.
|
|
70
|
+
@pytest.mark.isolated(group="mygroup")
|
|
69
71
|
def test_two():
|
|
70
72
|
# Sees state from test_one
|
|
71
73
|
assert len(shared_state) == 2
|
|
72
74
|
```
|
|
73
75
|
|
|
76
|
+
Set timeout per test group:
|
|
77
|
+
|
|
78
|
+
```python
|
|
79
|
+
@pytest.mark.isolated(timeout=30)
|
|
80
|
+
def test_with_timeout():
|
|
81
|
+
# This group gets 30 second timeout (overrides global setting)
|
|
82
|
+
expensive_operation()
|
|
83
|
+
```
|
|
84
|
+
|
|
74
85
|
Tests without an explicit group are automatically grouped by module.
|
|
75
86
|
|
|
76
87
|
## Configuration
|
|
@@ -78,30 +89,30 @@ Tests without an explicit group are automatically grouped by module.
|
|
|
78
89
|
### Command Line
|
|
79
90
|
|
|
80
91
|
```bash
|
|
81
|
-
# Set
|
|
82
|
-
pytest --
|
|
92
|
+
# Set isolated test timeout (seconds)
|
|
93
|
+
pytest --isolated-timeout=60
|
|
83
94
|
|
|
84
95
|
# Disable subprocess isolation for debugging
|
|
85
|
-
pytest --no-
|
|
96
|
+
pytest --no-isolation
|
|
86
97
|
|
|
87
98
|
# Combine with pytest debugger
|
|
88
|
-
pytest --no-
|
|
99
|
+
pytest --no-isolation --pdb
|
|
89
100
|
```
|
|
90
101
|
|
|
91
102
|
### pytest.ini / pyproject.toml
|
|
92
103
|
|
|
93
104
|
```ini
|
|
94
105
|
[pytest]
|
|
95
|
-
|
|
96
|
-
|
|
106
|
+
isolated_timeout = 300
|
|
107
|
+
isolated_capture_passed = false
|
|
97
108
|
```
|
|
98
109
|
|
|
99
110
|
Or in `pyproject.toml`:
|
|
100
111
|
|
|
101
112
|
```toml
|
|
102
113
|
[tool.pytest.ini_options]
|
|
103
|
-
|
|
104
|
-
|
|
114
|
+
isolated_timeout = "300"
|
|
115
|
+
isolated_capture_passed = false
|
|
105
116
|
```
|
|
106
117
|
|
|
107
118
|
## Use Cases
|
|
@@ -109,13 +120,13 @@ subprocess_capture_passed = false
|
|
|
109
120
|
### Testing Global State
|
|
110
121
|
|
|
111
122
|
```python
|
|
112
|
-
@pytest.mark.
|
|
123
|
+
@pytest.mark.isolated
|
|
113
124
|
def test_modifies_environ():
|
|
114
125
|
import os
|
|
115
126
|
os.environ["MY_VAR"] = "value"
|
|
116
127
|
# Won't affect other tests
|
|
117
128
|
|
|
118
|
-
@pytest.mark.
|
|
129
|
+
@pytest.mark.isolated
|
|
119
130
|
def test_clean_environ():
|
|
120
131
|
import os
|
|
121
132
|
assert "MY_VAR" not in os.environ
|
|
@@ -124,13 +135,13 @@ def test_clean_environ():
|
|
|
124
135
|
### Testing Singletons
|
|
125
136
|
|
|
126
137
|
```python
|
|
127
|
-
@pytest.mark.
|
|
138
|
+
@pytest.mark.isolated(group="singleton_tests")
|
|
128
139
|
def test_singleton_init():
|
|
129
140
|
from myapp import DatabaseConnection
|
|
130
141
|
db = DatabaseConnection.get_instance()
|
|
131
142
|
assert db is not None
|
|
132
143
|
|
|
133
|
-
@pytest.mark.
|
|
144
|
+
@pytest.mark.isolated(group="singleton_tests")
|
|
134
145
|
def test_singleton_reuse():
|
|
135
146
|
db = DatabaseConnection.get_instance()
|
|
136
147
|
# Same instance as previous test in group
|
|
@@ -139,7 +150,7 @@ def test_singleton_reuse():
|
|
|
139
150
|
### Testing Process Resources
|
|
140
151
|
|
|
141
152
|
```python
|
|
142
|
-
@pytest.mark.
|
|
153
|
+
@pytest.mark.isolated
|
|
143
154
|
def test_signal_handlers():
|
|
144
155
|
import signal
|
|
145
156
|
signal.signal(signal.SIGTERM, custom_handler)
|
|
@@ -151,7 +162,7 @@ def test_signal_handlers():
|
|
|
151
162
|
Failed tests automatically capture and display stdout/stderr:
|
|
152
163
|
|
|
153
164
|
```python
|
|
154
|
-
@pytest.mark.
|
|
165
|
+
@pytest.mark.isolated
|
|
155
166
|
def test_failing():
|
|
156
167
|
print("Debug info")
|
|
157
168
|
assert False
|
|
@@ -167,7 +178,7 @@ pytest --junitxml=report.xml --durations=10
|
|
|
167
178
|
|
|
168
179
|
**Fixtures**: Module/session fixtures run in each subprocess group. Cannot share fixture objects between parent and subprocess.
|
|
169
180
|
|
|
170
|
-
**Debugging**: Use `--no-
|
|
181
|
+
**Debugging**: Use `--no-isolation` to run all tests in the main process for easier debugging with `pdb` or IDE debuggers.
|
|
171
182
|
|
|
172
183
|
**Performance**: Subprocess creation adds ~100-500ms per group. Group related tests to minimize overhead.
|
|
173
184
|
|
|
@@ -176,7 +187,7 @@ pytest --junitxml=report.xml --durations=10
|
|
|
176
187
|
### Timeout Handling
|
|
177
188
|
|
|
178
189
|
```bash
|
|
179
|
-
pytest --
|
|
190
|
+
pytest --isolated-timeout=30
|
|
180
191
|
```
|
|
181
192
|
|
|
182
193
|
Timeout errors are clearly reported with the group name and timeout duration.
|
|
@@ -197,24 +208,18 @@ if os.environ.get("PYTEST_RUNNING_IN_SUBPROCESS") == "1":
|
|
|
197
208
|
|
|
198
209
|
## Troubleshooting
|
|
199
210
|
|
|
200
|
-
**Tests timing out**: Increase timeout with `--
|
|
211
|
+
**Tests timing out**: Increase timeout with `--isolated-timeout=600`
|
|
201
212
|
|
|
202
|
-
**Missing output**: Enable capture for passed tests with `
|
|
213
|
+
**Missing output**: Enable capture for passed tests with `isolated_capture_passed = true`
|
|
203
214
|
|
|
204
215
|
**Subprocess crashes**: Check for segfaults, OOM, or signal issues. Run with `-v` for details.
|
|
205
216
|
|
|
206
|
-
##
|
|
207
|
-
|
|
208
|
-
MIT License - see LICENSE file for details.
|
|
217
|
+
## Contributing
|
|
209
218
|
|
|
210
|
-
|
|
219
|
+
1. Install pre-commit: `pip install pre-commit && pre-commit install`
|
|
220
|
+
1. Run tests: `pytest tests/ -v`
|
|
221
|
+
1. Open an issue before submitting PRs for new features
|
|
211
222
|
|
|
212
|
-
|
|
223
|
+
## License
|
|
213
224
|
|
|
214
|
-
-
|
|
215
|
-
- Process isolation with subprocess marker
|
|
216
|
-
- Smart grouping by module or explicit group names
|
|
217
|
-
- Timeout support
|
|
218
|
-
- Complete test phase capture (setup/call/teardown)
|
|
219
|
-
- JUnit XML and standard reporter integration
|
|
220
|
-
- Comprehensive error handling and reporting
|
|
225
|
+
MIT License - see LICENSE file for details.
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
pytest_isolated/__init__.py,sha256=b7RfFW9uJXumJ6DTDdg-VvSUEeh-Pc2srTjCKJRxB7k,89
|
|
2
|
+
pytest_isolated/plugin.py,sha256=ISfxTMyJVaddrqCFZOdBXFnC4E_Lh22gRUaNRb1Wo8I,15179
|
|
3
|
+
pytest_isolated/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
|
+
pytest_isolated-0.3.0.dist-info/licenses/LICENSE,sha256=WECJyowi685PZSnKcA4Tqs7jukfzbnk7iMPLnm_q4JI,1067
|
|
5
|
+
pytest_isolated-0.3.0.dist-info/METADATA,sha256=CDzpvrFqvUFguTk9_Yr32h88GatvKDHCO0d7CsVC5Ug,5384
|
|
6
|
+
pytest_isolated-0.3.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
7
|
+
pytest_isolated-0.3.0.dist-info/entry_points.txt,sha256=HgRNPjIGoPBF1pkhma4UtaSwhpOVB8oZRZ0L1FcZXgk,45
|
|
8
|
+
pytest_isolated-0.3.0.dist-info/top_level.txt,sha256=FAtpozhvI-YaiFoZMepi9JAm6e87mW-TM1Ovu5xLOxg,16
|
|
9
|
+
pytest_isolated-0.3.0.dist-info/RECORD,,
|
|
@@ -1,8 +0,0 @@
|
|
|
1
|
-
pytest_isolated/__init__.py,sha256=iqPlqv1sT1c10XcWOeYbJyRyIzeQk0Sa-OhOS14IMfg,89
|
|
2
|
-
pytest_isolated/plugin.py,sha256=lLNjY5Jm5SmllySvVPRCF87fpQTnRx_RMtloKeM1m9U,11296
|
|
3
|
-
pytest_isolated-0.1.0.dist-info/licenses/LICENSE,sha256=WECJyowi685PZSnKcA4Tqs7jukfzbnk7iMPLnm_q4JI,1067
|
|
4
|
-
pytest_isolated-0.1.0.dist-info/METADATA,sha256=qfVoG6VGZCb-Dj5Wl12sGQH7WO-U2Gj4KcGV2H_FXzU,5131
|
|
5
|
-
pytest_isolated-0.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
6
|
-
pytest_isolated-0.1.0.dist-info/entry_points.txt,sha256=HgRNPjIGoPBF1pkhma4UtaSwhpOVB8oZRZ0L1FcZXgk,45
|
|
7
|
-
pytest_isolated-0.1.0.dist-info/top_level.txt,sha256=FAtpozhvI-YaiFoZMepi9JAm6e87mW-TM1Ovu5xLOxg,16
|
|
8
|
-
pytest_isolated-0.1.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|