pytest-isolated 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pytest_isolated/__init__.py +3 -0
- pytest_isolated/plugin.py +314 -0
- pytest_isolated-0.1.0.dist-info/METADATA +220 -0
- pytest_isolated-0.1.0.dist-info/RECORD +8 -0
- pytest_isolated-0.1.0.dist-info/WHEEL +5 -0
- pytest_isolated-0.1.0.dist-info/entry_points.txt +2 -0
- pytest_isolated-0.1.0.dist-info/licenses/LICENSE +21 -0
- pytest_isolated-0.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,314 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import contextlib
|
|
4
|
+
import json
|
|
5
|
+
import os
|
|
6
|
+
import subprocess
|
|
7
|
+
import sys
|
|
8
|
+
import tempfile
|
|
9
|
+
from collections import OrderedDict
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Any
|
|
12
|
+
|
|
13
|
+
import pytest
|
|
14
|
+
|
|
15
|
+
# Guard to prevent infinite recursion (parent spawns child; child must not spawn again)
|
|
16
|
+
SUBPROC_ENV = "PYTEST_RUNNING_IN_SUBPROCESS"
|
|
17
|
+
|
|
18
|
+
# Parent tells child where to write JSONL records per test call
|
|
19
|
+
SUBPROC_REPORT_PATH = "PYTEST_SUBPROCESS_REPORT_PATH"
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def pytest_addoption(parser: pytest.Parser) -> None:
|
|
23
|
+
"""Add configuration options for subprocess isolation."""
|
|
24
|
+
group = parser.getgroup("subprocess")
|
|
25
|
+
group.addoption(
|
|
26
|
+
"--subprocess-timeout",
|
|
27
|
+
type=int,
|
|
28
|
+
default=None,
|
|
29
|
+
help="Timeout in seconds for subprocess groups (default: 300)",
|
|
30
|
+
)
|
|
31
|
+
group.addoption(
|
|
32
|
+
"--no-subprocess",
|
|
33
|
+
action="store_true",
|
|
34
|
+
default=False,
|
|
35
|
+
help="Disable subprocess isolation (for debugging)",
|
|
36
|
+
)
|
|
37
|
+
parser.addini(
|
|
38
|
+
"subprocess_timeout",
|
|
39
|
+
type="string",
|
|
40
|
+
default="300",
|
|
41
|
+
help="Default timeout in seconds for subprocess groups",
|
|
42
|
+
)
|
|
43
|
+
parser.addini(
|
|
44
|
+
"subprocess_capture_passed",
|
|
45
|
+
type="bool",
|
|
46
|
+
default=False,
|
|
47
|
+
help="Capture output for passed tests (default: False)",
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def pytest_configure(config: pytest.Config) -> None:
|
|
52
|
+
config.addinivalue_line(
|
|
53
|
+
"markers",
|
|
54
|
+
"subprocess(group=None): run this test in a grouped fresh Python subprocess; "
|
|
55
|
+
"tests with the same group run together in one subprocess.",
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
# ----------------------------
|
|
60
|
+
# CHILD MODE: record results + captured output per test phase
|
|
61
|
+
# ----------------------------
|
|
62
|
+
def pytest_runtest_logreport(report: pytest.TestReport) -> None:
|
|
63
|
+
"""
|
|
64
|
+
In the child process, write one JSON line per test phase (setup/call/teardown)
|
|
65
|
+
containing outcome, captured stdout/stderr, duration, and other metadata.
|
|
66
|
+
The parent will aggregate and re-emit this info.
|
|
67
|
+
"""
|
|
68
|
+
path = os.environ.get(SUBPROC_REPORT_PATH)
|
|
69
|
+
if not path:
|
|
70
|
+
return
|
|
71
|
+
|
|
72
|
+
# Capture ALL phases (setup, call, teardown), not just call
|
|
73
|
+
rec = {
|
|
74
|
+
"nodeid": report.nodeid,
|
|
75
|
+
"when": report.when, # setup, call, or teardown
|
|
76
|
+
"outcome": report.outcome, # passed/failed/skipped
|
|
77
|
+
"longrepr": str(report.longrepr) if report.longrepr else "",
|
|
78
|
+
"duration": getattr(report, "duration", 0.0),
|
|
79
|
+
"stdout": getattr(report, "capstdout", "") or "",
|
|
80
|
+
"stderr": getattr(report, "capstderr", "") or "",
|
|
81
|
+
# Preserve test metadata for proper reporting
|
|
82
|
+
"keywords": list(report.keywords),
|
|
83
|
+
"sections": getattr(report, "sections", []), # captured logs, etc.
|
|
84
|
+
"user_properties": getattr(report, "user_properties", []),
|
|
85
|
+
"wasxfail": hasattr(report, "wasxfail"),
|
|
86
|
+
}
|
|
87
|
+
with Path(path).open("a", encoding="utf-8") as f:
|
|
88
|
+
f.write(json.dumps(rec) + "\n")
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
# ----------------------------
|
|
92
|
+
# PARENT MODE: group marked tests
|
|
93
|
+
# ----------------------------
|
|
94
|
+
def pytest_collection_modifyitems(
|
|
95
|
+
config: pytest.Config, items: list[pytest.Item]
|
|
96
|
+
) -> None:
|
|
97
|
+
"""
|
|
98
|
+
Partition items into subprocess groups + normal items and stash on config.
|
|
99
|
+
"""
|
|
100
|
+
if os.environ.get(SUBPROC_ENV) == "1":
|
|
101
|
+
return # child should not do grouping
|
|
102
|
+
|
|
103
|
+
# If --no-subprocess is set, treat all tests as normal (no subprocess isolation)
|
|
104
|
+
if config.getoption("no_subprocess", False):
|
|
105
|
+
config._subprocess_groups = OrderedDict() # type: ignore[attr-defined]
|
|
106
|
+
config._subprocess_normal_items = items # type: ignore[attr-defined]
|
|
107
|
+
return
|
|
108
|
+
|
|
109
|
+
groups: OrderedDict[str, list[pytest.Item]] = OrderedDict()
|
|
110
|
+
normal: list[pytest.Item] = []
|
|
111
|
+
|
|
112
|
+
for item in items:
|
|
113
|
+
m = item.get_closest_marker("subprocess")
|
|
114
|
+
if not m:
|
|
115
|
+
normal.append(item)
|
|
116
|
+
continue
|
|
117
|
+
|
|
118
|
+
group = m.kwargs.get("group")
|
|
119
|
+
# Default grouping to module path (so you don't accidentally group everything)
|
|
120
|
+
if group is None:
|
|
121
|
+
group = item.nodeid.split("::")[0]
|
|
122
|
+
groups.setdefault(str(group), []).append(item)
|
|
123
|
+
|
|
124
|
+
config._subprocess_groups = groups # type: ignore[attr-defined]
|
|
125
|
+
config._subprocess_normal_items = normal # type: ignore[attr-defined]
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def pytest_runtestloop(session: pytest.Session) -> int | None:
|
|
129
|
+
"""
|
|
130
|
+
Run each subprocess group in its own subprocess once;
|
|
131
|
+
then run normal tests in-process.
|
|
132
|
+
|
|
133
|
+
Enhanced to:
|
|
134
|
+
- Capture all test phases (setup, call, teardown)
|
|
135
|
+
- Support configurable timeouts
|
|
136
|
+
- Properly handle crashes and missing results
|
|
137
|
+
- Integrate with pytest's reporting system
|
|
138
|
+
"""
|
|
139
|
+
if os.environ.get(SUBPROC_ENV) == "1":
|
|
140
|
+
return None # child runs the normal loop
|
|
141
|
+
|
|
142
|
+
config = session.config
|
|
143
|
+
groups: OrderedDict[str, list[pytest.Item]] = getattr(
|
|
144
|
+
config, "_subprocess_groups", OrderedDict()
|
|
145
|
+
)
|
|
146
|
+
normal_items: list[pytest.Item] = getattr(
|
|
147
|
+
config, "_subprocess_normal_items", session.items
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
# Get timeout configuration
|
|
151
|
+
timeout_opt = config.getoption("subprocess_timeout", None)
|
|
152
|
+
timeout_ini = config.getini("subprocess_timeout")
|
|
153
|
+
timeout = timeout_opt or (int(timeout_ini) if timeout_ini else 300)
|
|
154
|
+
|
|
155
|
+
# Get capture configuration
|
|
156
|
+
capture_passed = config.getini("subprocess_capture_passed")
|
|
157
|
+
|
|
158
|
+
def emit_report(
|
|
159
|
+
item: pytest.Item,
|
|
160
|
+
when: str,
|
|
161
|
+
outcome: str,
|
|
162
|
+
longrepr: str = "",
|
|
163
|
+
duration: float = 0.0,
|
|
164
|
+
stdout: str = "",
|
|
165
|
+
stderr: str = "",
|
|
166
|
+
sections: list[tuple[str, str]] | None = None,
|
|
167
|
+
user_properties: list[tuple[str, Any]] | None = None,
|
|
168
|
+
wasxfail: bool = False,
|
|
169
|
+
) -> None:
|
|
170
|
+
"""
|
|
171
|
+
Emit a synthetic report for the given item and phase.
|
|
172
|
+
Attach captured output based on outcome and configuration.
|
|
173
|
+
"""
|
|
174
|
+
call = pytest.CallInfo.from_call(lambda: None, when=when)
|
|
175
|
+
rep = pytest.TestReport.from_item_and_call(item, call)
|
|
176
|
+
rep.outcome = outcome
|
|
177
|
+
rep.duration = duration
|
|
178
|
+
|
|
179
|
+
if user_properties:
|
|
180
|
+
rep.user_properties = user_properties
|
|
181
|
+
|
|
182
|
+
if wasxfail:
|
|
183
|
+
rep.wasxfail = "reason: xfail"
|
|
184
|
+
|
|
185
|
+
# For skipped tests, longrepr needs to be a tuple (path, lineno, reason)
|
|
186
|
+
if outcome == "skipped" and longrepr:
|
|
187
|
+
# Parse longrepr or create simple tuple
|
|
188
|
+
rep.longrepr = (str(item.fspath), item.location[1], longrepr)
|
|
189
|
+
elif outcome == "failed" and longrepr:
|
|
190
|
+
rep.longrepr = longrepr
|
|
191
|
+
|
|
192
|
+
# Add captured output as sections (capstdout/capstderr are read-only)
|
|
193
|
+
if outcome == "failed" or (outcome == "passed" and capture_passed):
|
|
194
|
+
all_sections = list(sections) if sections else []
|
|
195
|
+
if stdout:
|
|
196
|
+
all_sections.append(("Captured stdout call", stdout))
|
|
197
|
+
if stderr:
|
|
198
|
+
all_sections.append(("Captured stderr call", stderr))
|
|
199
|
+
if all_sections:
|
|
200
|
+
rep.sections = all_sections
|
|
201
|
+
|
|
202
|
+
item.ihook.pytest_runtest_logreport(report=rep)
|
|
203
|
+
|
|
204
|
+
# Run groups
|
|
205
|
+
for group_name, group_items in groups.items():
|
|
206
|
+
nodeids = [it.nodeid for it in group_items]
|
|
207
|
+
|
|
208
|
+
# file where the child will append JSONL records
|
|
209
|
+
with tempfile.NamedTemporaryFile(
|
|
210
|
+
prefix="pytest-subproc-", suffix=".jsonl", delete=False
|
|
211
|
+
) as tf:
|
|
212
|
+
report_path = tf.name
|
|
213
|
+
|
|
214
|
+
env = os.environ.copy()
|
|
215
|
+
env[SUBPROC_ENV] = "1"
|
|
216
|
+
env[SUBPROC_REPORT_PATH] = report_path
|
|
217
|
+
|
|
218
|
+
# Run pytest in subprocess with timeout
|
|
219
|
+
cmd = [sys.executable, "-m", "pytest", *nodeids]
|
|
220
|
+
|
|
221
|
+
try:
|
|
222
|
+
proc = subprocess.run(
|
|
223
|
+
cmd, env=env, timeout=timeout, capture_output=False, check=False
|
|
224
|
+
)
|
|
225
|
+
returncode = proc.returncode
|
|
226
|
+
timed_out = False
|
|
227
|
+
except subprocess.TimeoutExpired:
|
|
228
|
+
returncode = -1
|
|
229
|
+
timed_out = True
|
|
230
|
+
|
|
231
|
+
# Gather results from JSONL file
|
|
232
|
+
results: dict[str, dict[str, Any]] = {}
|
|
233
|
+
report_file = Path(report_path)
|
|
234
|
+
if report_file.exists():
|
|
235
|
+
with report_file.open(encoding="utf-8") as f:
|
|
236
|
+
for line in f:
|
|
237
|
+
file_line = line.strip()
|
|
238
|
+
if not file_line:
|
|
239
|
+
continue
|
|
240
|
+
rec = json.loads(file_line)
|
|
241
|
+
nodeid = rec["nodeid"]
|
|
242
|
+
when = rec["when"]
|
|
243
|
+
|
|
244
|
+
if nodeid not in results:
|
|
245
|
+
results[nodeid] = {}
|
|
246
|
+
results[nodeid][when] = rec
|
|
247
|
+
with contextlib.suppress(OSError):
|
|
248
|
+
report_file.unlink()
|
|
249
|
+
|
|
250
|
+
# Handle timeout or crash
|
|
251
|
+
if timed_out:
|
|
252
|
+
msg = (
|
|
253
|
+
f"Subprocess group={group_name!r} timed out after {timeout} "
|
|
254
|
+
f"seconds. Increase timeout with --subprocess-timeout or "
|
|
255
|
+
f"subprocess_timeout ini option."
|
|
256
|
+
)
|
|
257
|
+
for it in group_items:
|
|
258
|
+
emit_report(it, "call", "failed", longrepr=msg)
|
|
259
|
+
session.testsfailed += 1
|
|
260
|
+
continue
|
|
261
|
+
|
|
262
|
+
if not results:
|
|
263
|
+
msg = (
|
|
264
|
+
f"Subprocess group={group_name!r} exited with code {returncode} "
|
|
265
|
+
f"and produced no per-test report. The subprocess may have "
|
|
266
|
+
f"crashed during collection."
|
|
267
|
+
)
|
|
268
|
+
for it in group_items:
|
|
269
|
+
emit_report(it, "call", "failed", longrepr=msg)
|
|
270
|
+
session.testsfailed += 1
|
|
271
|
+
continue
|
|
272
|
+
|
|
273
|
+
# Emit per-test results into parent (all phases)
|
|
274
|
+
for it in group_items:
|
|
275
|
+
node_results = results.get(it.nodeid, {})
|
|
276
|
+
|
|
277
|
+
# Emit setup, call, teardown in order
|
|
278
|
+
for when in ["setup", "call", "teardown"]:
|
|
279
|
+
if when not in node_results:
|
|
280
|
+
# If missing a phase, synthesize a passing one
|
|
281
|
+
if when == "call" and not node_results:
|
|
282
|
+
# Test completely missing - mark as failed
|
|
283
|
+
emit_report(
|
|
284
|
+
it,
|
|
285
|
+
"call",
|
|
286
|
+
"failed",
|
|
287
|
+
longrepr=f"Missing result from subprocess for {it.nodeid}",
|
|
288
|
+
)
|
|
289
|
+
session.testsfailed += 1
|
|
290
|
+
continue
|
|
291
|
+
|
|
292
|
+
rec = node_results[when]
|
|
293
|
+
emit_report(
|
|
294
|
+
it,
|
|
295
|
+
when=when,
|
|
296
|
+
outcome=rec["outcome"],
|
|
297
|
+
longrepr=rec.get("longrepr", ""),
|
|
298
|
+
duration=rec.get("duration", 0.0),
|
|
299
|
+
stdout=rec.get("stdout", ""),
|
|
300
|
+
stderr=rec.get("stderr", ""),
|
|
301
|
+
sections=rec.get("sections"),
|
|
302
|
+
user_properties=rec.get("user_properties"),
|
|
303
|
+
wasxfail=rec.get("wasxfail", False),
|
|
304
|
+
)
|
|
305
|
+
|
|
306
|
+
if when == "call" and rec["outcome"] == "failed":
|
|
307
|
+
session.testsfailed += 1
|
|
308
|
+
|
|
309
|
+
# Run normal tests in-process
|
|
310
|
+
for idx, item in enumerate(normal_items):
|
|
311
|
+
nextitem = normal_items[idx + 1] if idx + 1 < len(normal_items) else None
|
|
312
|
+
item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem)
|
|
313
|
+
|
|
314
|
+
return 1 if session.testsfailed else 0
|
|
@@ -0,0 +1,220 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: pytest-isolated
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Run marked pytest tests in grouped subprocesses (cross-platform).
|
|
5
|
+
Author: pytest-isolated contributors
|
|
6
|
+
License: MIT
|
|
7
|
+
Classifier: Development Status :: 4 - Beta
|
|
8
|
+
Classifier: Framework :: Pytest
|
|
9
|
+
Classifier: Intended Audience :: Developers
|
|
10
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
11
|
+
Classifier: Operating System :: OS Independent
|
|
12
|
+
Classifier: Programming Language :: Python :: 3
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
15
|
+
Classifier: Topic :: Software Development :: Testing
|
|
16
|
+
Requires-Python: >=3.11
|
|
17
|
+
Description-Content-Type: text/markdown
|
|
18
|
+
License-File: LICENSE
|
|
19
|
+
Requires-Dist: pytest>=7.0
|
|
20
|
+
Provides-Extra: dev
|
|
21
|
+
Requires-Dist: pre-commit; extra == "dev"
|
|
22
|
+
Requires-Dist: build; extra == "dev"
|
|
23
|
+
Requires-Dist: ruff; extra == "dev"
|
|
24
|
+
Provides-Extra: test
|
|
25
|
+
Requires-Dist: pytest-timeout; extra == "test"
|
|
26
|
+
Dynamic: license-file
|
|
27
|
+
|
|
28
|
+
# pytest-isolated
|
|
29
|
+
|
|
30
|
+
A pytest plugin that runs marked tests in isolated subprocesses with intelligent grouping.
|
|
31
|
+
|
|
32
|
+
## Features
|
|
33
|
+
|
|
34
|
+
- Run tests in fresh Python subprocesses to prevent state pollution
|
|
35
|
+
- Group related tests to run together in the same subprocess
|
|
36
|
+
- Handles crashes, timeouts, and setup/teardown failures
|
|
37
|
+
- Captures stdout/stderr for failed tests
|
|
38
|
+
- Works with pytest reporters (JUnit XML, etc.)
|
|
39
|
+
- Configurable timeouts to prevent hanging subprocesses
|
|
40
|
+
- Cross-platform: Linux, macOS, Windows
|
|
41
|
+
|
|
42
|
+
## Installation
|
|
43
|
+
|
|
44
|
+
```bash
|
|
45
|
+
pip install pytest-isolated
|
|
46
|
+
```
|
|
47
|
+
|
|
48
|
+
## Quick Start
|
|
49
|
+
|
|
50
|
+
Mark tests to run in isolated subprocesses:
|
|
51
|
+
|
|
52
|
+
```python
|
|
53
|
+
import pytest
|
|
54
|
+
|
|
55
|
+
@pytest.mark.subprocess
|
|
56
|
+
def test_isolated():
|
|
57
|
+
# Runs in a fresh subprocess
|
|
58
|
+
assert True
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
Tests with the same group run together in one subprocess:
|
|
62
|
+
|
|
63
|
+
```python
|
|
64
|
+
@pytest.mark.subprocess(group="mygroup")
|
|
65
|
+
def test_one():
|
|
66
|
+
shared_state.append(1)
|
|
67
|
+
|
|
68
|
+
@pytest.mark.subprocess(group="mygroup")
|
|
69
|
+
def test_two():
|
|
70
|
+
# Sees state from test_one
|
|
71
|
+
assert len(shared_state) == 2
|
|
72
|
+
```
|
|
73
|
+
|
|
74
|
+
Tests without an explicit group are automatically grouped by module.
|
|
75
|
+
|
|
76
|
+
## Configuration
|
|
77
|
+
|
|
78
|
+
### Command Line
|
|
79
|
+
|
|
80
|
+
```bash
|
|
81
|
+
# Set subprocess timeout (seconds)
|
|
82
|
+
pytest --subprocess-timeout=60
|
|
83
|
+
|
|
84
|
+
# Disable subprocess isolation for debugging
|
|
85
|
+
pytest --no-subprocess
|
|
86
|
+
|
|
87
|
+
# Combine with pytest debugger
|
|
88
|
+
pytest --no-subprocess --pdb
|
|
89
|
+
```
|
|
90
|
+
|
|
91
|
+
### pytest.ini / pyproject.toml
|
|
92
|
+
|
|
93
|
+
```ini
|
|
94
|
+
[pytest]
|
|
95
|
+
subprocess_timeout = 300
|
|
96
|
+
subprocess_capture_passed = false
|
|
97
|
+
```
|
|
98
|
+
|
|
99
|
+
Or in `pyproject.toml`:
|
|
100
|
+
|
|
101
|
+
```toml
|
|
102
|
+
[tool.pytest.ini_options]
|
|
103
|
+
subprocess_timeout = "300"
|
|
104
|
+
subprocess_capture_passed = false
|
|
105
|
+
```
|
|
106
|
+
|
|
107
|
+
## Use Cases
|
|
108
|
+
|
|
109
|
+
### Testing Global State
|
|
110
|
+
|
|
111
|
+
```python
|
|
112
|
+
@pytest.mark.subprocess
|
|
113
|
+
def test_modifies_environ():
|
|
114
|
+
import os
|
|
115
|
+
os.environ["MY_VAR"] = "value"
|
|
116
|
+
# Won't affect other tests
|
|
117
|
+
|
|
118
|
+
@pytest.mark.subprocess
|
|
119
|
+
def test_clean_environ():
|
|
120
|
+
import os
|
|
121
|
+
assert "MY_VAR" not in os.environ
|
|
122
|
+
```
|
|
123
|
+
|
|
124
|
+
### Testing Singletons
|
|
125
|
+
|
|
126
|
+
```python
|
|
127
|
+
@pytest.mark.subprocess(group="singleton_tests")
|
|
128
|
+
def test_singleton_init():
|
|
129
|
+
from myapp import DatabaseConnection
|
|
130
|
+
db = DatabaseConnection.get_instance()
|
|
131
|
+
assert db is not None
|
|
132
|
+
|
|
133
|
+
@pytest.mark.subprocess(group="singleton_tests")
|
|
134
|
+
def test_singleton_reuse():
|
|
135
|
+
db = DatabaseConnection.get_instance()
|
|
136
|
+
# Same instance as previous test in group
|
|
137
|
+
```
|
|
138
|
+
|
|
139
|
+
### Testing Process Resources
|
|
140
|
+
|
|
141
|
+
```python
|
|
142
|
+
@pytest.mark.subprocess
|
|
143
|
+
def test_signal_handlers():
|
|
144
|
+
import signal
|
|
145
|
+
signal.signal(signal.SIGTERM, custom_handler)
|
|
146
|
+
# Won't interfere with pytest
|
|
147
|
+
```
|
|
148
|
+
|
|
149
|
+
## Output and Reporting
|
|
150
|
+
|
|
151
|
+
Failed tests automatically capture and display stdout/stderr:
|
|
152
|
+
|
|
153
|
+
```python
|
|
154
|
+
@pytest.mark.subprocess
|
|
155
|
+
def test_failing():
|
|
156
|
+
print("Debug info")
|
|
157
|
+
assert False
|
|
158
|
+
```
|
|
159
|
+
|
|
160
|
+
Works with standard pytest reporters:
|
|
161
|
+
|
|
162
|
+
```bash
|
|
163
|
+
pytest --junitxml=report.xml --durations=10
|
|
164
|
+
```
|
|
165
|
+
|
|
166
|
+
## Limitations
|
|
167
|
+
|
|
168
|
+
**Fixtures**: Module/session fixtures run in each subprocess group. Cannot share fixture objects between parent and subprocess.
|
|
169
|
+
|
|
170
|
+
**Debugging**: Use `--no-subprocess` to run all tests in the main process for easier debugging with `pdb` or IDE debuggers.
|
|
171
|
+
|
|
172
|
+
**Performance**: Subprocess creation adds ~100-500ms per group. Group related tests to minimize overhead.
|
|
173
|
+
|
|
174
|
+
## Advanced
|
|
175
|
+
|
|
176
|
+
### Timeout Handling
|
|
177
|
+
|
|
178
|
+
```bash
|
|
179
|
+
pytest --subprocess-timeout=30
|
|
180
|
+
```
|
|
181
|
+
|
|
182
|
+
Timeout errors are clearly reported with the group name and timeout duration.
|
|
183
|
+
|
|
184
|
+
### Crash Detection
|
|
185
|
+
|
|
186
|
+
If a subprocess crashes, tests in that group are marked as failed with exit code information.
|
|
187
|
+
|
|
188
|
+
### Subprocess Detection
|
|
189
|
+
|
|
190
|
+
```python
|
|
191
|
+
import os
|
|
192
|
+
|
|
193
|
+
if os.environ.get("PYTEST_RUNNING_IN_SUBPROCESS") == "1":
|
|
194
|
+
# Running in subprocess
|
|
195
|
+
pass
|
|
196
|
+
```
|
|
197
|
+
|
|
198
|
+
## Troubleshooting
|
|
199
|
+
|
|
200
|
+
**Tests timing out**: Increase timeout with `--subprocess-timeout=600`
|
|
201
|
+
|
|
202
|
+
**Missing output**: Enable capture for passed tests with `subprocess_capture_passed = true`
|
|
203
|
+
|
|
204
|
+
**Subprocess crashes**: Check for segfaults, OOM, or signal issues. Run with `-v` for details.
|
|
205
|
+
|
|
206
|
+
## License
|
|
207
|
+
|
|
208
|
+
MIT License - see LICENSE file for details.
|
|
209
|
+
|
|
210
|
+
## Changelog
|
|
211
|
+
|
|
212
|
+
### 0.1.0 (2026-01-12)
|
|
213
|
+
|
|
214
|
+
- Initial release
|
|
215
|
+
- Process isolation with subprocess marker
|
|
216
|
+
- Smart grouping by module or explicit group names
|
|
217
|
+
- Timeout support
|
|
218
|
+
- Complete test phase capture (setup/call/teardown)
|
|
219
|
+
- JUnit XML and standard reporter integration
|
|
220
|
+
- Comprehensive error handling and reporting
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
pytest_isolated/__init__.py,sha256=iqPlqv1sT1c10XcWOeYbJyRyIzeQk0Sa-OhOS14IMfg,89
|
|
2
|
+
pytest_isolated/plugin.py,sha256=lLNjY5Jm5SmllySvVPRCF87fpQTnRx_RMtloKeM1m9U,11296
|
|
3
|
+
pytest_isolated-0.1.0.dist-info/licenses/LICENSE,sha256=WECJyowi685PZSnKcA4Tqs7jukfzbnk7iMPLnm_q4JI,1067
|
|
4
|
+
pytest_isolated-0.1.0.dist-info/METADATA,sha256=qfVoG6VGZCb-Dj5Wl12sGQH7WO-U2Gj4KcGV2H_FXzU,5131
|
|
5
|
+
pytest_isolated-0.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
6
|
+
pytest_isolated-0.1.0.dist-info/entry_points.txt,sha256=HgRNPjIGoPBF1pkhma4UtaSwhpOVB8oZRZ0L1FcZXgk,45
|
|
7
|
+
pytest_isolated-0.1.0.dist-info/top_level.txt,sha256=FAtpozhvI-YaiFoZMepi9JAm6e87mW-TM1Ovu5xLOxg,16
|
|
8
|
+
pytest_isolated-0.1.0.dist-info/RECORD,,
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Bryn Lloyd
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
pytest_isolated
|