coverage 7.11.1__cp314-cp314-musllinux_1_2_aarch64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. coverage/__init__.py +40 -0
  2. coverage/__main__.py +12 -0
  3. coverage/annotate.py +114 -0
  4. coverage/bytecode.py +196 -0
  5. coverage/cmdline.py +1184 -0
  6. coverage/collector.py +486 -0
  7. coverage/config.py +731 -0
  8. coverage/context.py +74 -0
  9. coverage/control.py +1481 -0
  10. coverage/core.py +139 -0
  11. coverage/data.py +227 -0
  12. coverage/debug.py +669 -0
  13. coverage/disposition.py +59 -0
  14. coverage/env.py +135 -0
  15. coverage/exceptions.py +91 -0
  16. coverage/execfile.py +329 -0
  17. coverage/files.py +553 -0
  18. coverage/html.py +856 -0
  19. coverage/htmlfiles/coverage_html.js +733 -0
  20. coverage/htmlfiles/favicon_32.png +0 -0
  21. coverage/htmlfiles/index.html +164 -0
  22. coverage/htmlfiles/keybd_closed.png +0 -0
  23. coverage/htmlfiles/pyfile.html +149 -0
  24. coverage/htmlfiles/style.css +377 -0
  25. coverage/htmlfiles/style.scss +824 -0
  26. coverage/inorout.py +614 -0
  27. coverage/jsonreport.py +188 -0
  28. coverage/lcovreport.py +219 -0
  29. coverage/misc.py +373 -0
  30. coverage/multiproc.py +120 -0
  31. coverage/numbits.py +146 -0
  32. coverage/parser.py +1213 -0
  33. coverage/patch.py +166 -0
  34. coverage/phystokens.py +197 -0
  35. coverage/plugin.py +617 -0
  36. coverage/plugin_support.py +299 -0
  37. coverage/py.typed +1 -0
  38. coverage/python.py +269 -0
  39. coverage/pytracer.py +369 -0
  40. coverage/regions.py +127 -0
  41. coverage/report.py +298 -0
  42. coverage/report_core.py +117 -0
  43. coverage/results.py +471 -0
  44. coverage/sqldata.py +1153 -0
  45. coverage/sqlitedb.py +239 -0
  46. coverage/sysmon.py +474 -0
  47. coverage/templite.py +306 -0
  48. coverage/tomlconfig.py +210 -0
  49. coverage/tracer.cpython-314-aarch64-linux-musl.so +0 -0
  50. coverage/tracer.pyi +43 -0
  51. coverage/types.py +206 -0
  52. coverage/version.py +35 -0
  53. coverage/xmlreport.py +264 -0
  54. coverage-7.11.1.dist-info/METADATA +221 -0
  55. coverage-7.11.1.dist-info/RECORD +59 -0
  56. coverage-7.11.1.dist-info/WHEEL +5 -0
  57. coverage-7.11.1.dist-info/entry_points.txt +4 -0
  58. coverage-7.11.1.dist-info/licenses/LICENSE.txt +177 -0
  59. coverage-7.11.1.dist-info/top_level.txt +1 -0
coverage/patch.py ADDED
@@ -0,0 +1,166 @@
1
+ # Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
2
+ # For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
3
+
4
+ """Invasive patches for coverage.py."""
5
+
6
+ from __future__ import annotations
7
+
8
+ import atexit
9
+ import contextlib
10
+ import os
11
+ import site
12
+ from pathlib import Path
13
+ from typing import TYPE_CHECKING, Any, NoReturn
14
+
15
+ from coverage import env
16
+ from coverage.debug import NoDebugging, DevNullDebug
17
+ from coverage.exceptions import ConfigError, CoverageException
18
+
19
+ if TYPE_CHECKING:
20
+ from coverage import Coverage
21
+ from coverage.config import CoverageConfig
22
+ from coverage.types import TDebugCtl
23
+
24
+
25
+ def apply_patches(
26
+ cov: Coverage,
27
+ config: CoverageConfig,
28
+ debug: TDebugCtl,
29
+ *,
30
+ make_pth_file: bool = True,
31
+ ) -> None:
32
+ """Apply invasive patches requested by `[run] patch=`."""
33
+ debug = debug if debug.should("patch") else DevNullDebug()
34
+ for patch in sorted(set(config.patch)):
35
+ match patch:
36
+ case "_exit":
37
+ _patch__exit(cov, debug)
38
+
39
+ case "execv":
40
+ _patch_execv(cov, config, debug)
41
+
42
+ case "fork":
43
+ _patch_fork(debug)
44
+
45
+ case "subprocess":
46
+ _patch_subprocess(config, debug, make_pth_file)
47
+
48
+ case _:
49
+ raise ConfigError(f"Unknown patch {patch!r}")
50
+
51
+
52
+ def _patch__exit(cov: Coverage, debug: TDebugCtl) -> None:
53
+ """Patch os._exit."""
54
+ debug.write("Patching _exit")
55
+
56
+ old_exit = os._exit
57
+
58
+ def coverage_os_exit_patch(status: int) -> NoReturn:
59
+ with contextlib.suppress(Exception):
60
+ debug.write(f"Using _exit patch with {cov = }")
61
+ with contextlib.suppress(Exception):
62
+ cov.save()
63
+ old_exit(status)
64
+
65
+ os._exit = coverage_os_exit_patch
66
+
67
+
68
+ def _patch_execv(cov: Coverage, config: CoverageConfig, debug: TDebugCtl) -> None:
69
+ """Patch the execv family of functions."""
70
+ if env.WINDOWS:
71
+ raise CoverageException("patch=execv isn't supported yet on Windows.")
72
+
73
+ debug.write("Patching execv")
74
+
75
+ def make_execv_patch(fname: str, old_execv: Any) -> Any:
76
+ def coverage_execv_patch(*args: Any, **kwargs: Any) -> Any:
77
+ with contextlib.suppress(Exception):
78
+ debug.write(f"Using execv patch for {fname} with {cov = }")
79
+ with contextlib.suppress(Exception):
80
+ cov.save()
81
+
82
+ if fname.endswith("e"):
83
+ # Assume the `env` argument is passed positionally.
84
+ new_env = args[-1]
85
+ # Pass our configuration in the new environment.
86
+ new_env["COVERAGE_PROCESS_CONFIG"] = config.serialize()
87
+ if env.TESTING:
88
+ # The subprocesses need to use the same core as the main process.
89
+ new_env["COVERAGE_CORE"] = os.getenv("COVERAGE_CORE")
90
+
91
+ # When testing locally, we need to honor the pyc file location
92
+ # or they get written to the .tox directories and pollute the
93
+ # next run with a different core.
94
+ if (cache_prefix := os.getenv("PYTHONPYCACHEPREFIX")) is not None:
95
+ new_env["PYTHONPYCACHEPREFIX"] = cache_prefix
96
+
97
+ # Without this, it fails on PyPy and Ubuntu.
98
+ new_env["PATH"] = os.getenv("PATH")
99
+ old_execv(*args, **kwargs)
100
+
101
+ return coverage_execv_patch
102
+
103
+ # All the exec* and spawn* functions eventually call execv or execve.
104
+ os.execv = make_execv_patch("execv", os.execv)
105
+ os.execve = make_execv_patch("execve", os.execve)
106
+
107
+
108
+ def _patch_fork(debug: TDebugCtl) -> None:
109
+ """Ensure Coverage is properly reset after a fork."""
110
+ from coverage.control import _after_fork_in_child
111
+
112
+ if env.WINDOWS:
113
+ raise CoverageException("patch=fork isn't supported yet on Windows.")
114
+
115
+ debug.write("Patching fork")
116
+ os.register_at_fork(after_in_child=_after_fork_in_child)
117
+
118
+
119
+ def _patch_subprocess(config: CoverageConfig, debug: TDebugCtl, make_pth_file: bool) -> None:
120
+ """Write .pth files and set environment vars to measure subprocesses."""
121
+ debug.write("Patching subprocess")
122
+
123
+ if make_pth_file:
124
+ pth_files = create_pth_files(debug)
125
+
126
+ def delete_pth_files() -> None:
127
+ for p in pth_files:
128
+ debug.write(f"Deleting subprocess .pth file: {str(p)!r}")
129
+ p.unlink(missing_ok=True)
130
+
131
+ atexit.register(delete_pth_files)
132
+ assert config.config_file is not None
133
+ os.environ["COVERAGE_PROCESS_CONFIG"] = config.serialize()
134
+
135
+
136
+ # Writing .pth files is not obvious. On Windows, getsitepackages() returns two
137
+ # directories. A .pth file in the first will be run, but coverage isn't
138
+ # importable yet. We write into all the places we can, but with defensive
139
+ # import code.
140
+
141
+ PTH_CODE = """\
142
+ try:
143
+ import coverage
144
+ except:
145
+ pass
146
+ else:
147
+ coverage.process_startup()
148
+ """
149
+
150
+ PTH_TEXT = f"import sys; exec({PTH_CODE!r})\n"
151
+
152
+
153
+ def create_pth_files(debug: TDebugCtl = NoDebugging()) -> list[Path]:
154
+ """Create .pth files for measuring subprocesses."""
155
+ pth_files = []
156
+ for pth_dir in site.getsitepackages():
157
+ pth_file = Path(pth_dir) / f"subcover_{os.getpid()}.pth"
158
+ try:
159
+ if debug.should("patch"):
160
+ debug.write(f"Writing subprocess .pth file: {str(pth_file)!r}")
161
+ pth_file.write_text(PTH_TEXT, encoding="utf-8")
162
+ except OSError: # pragma: cant happen
163
+ continue
164
+ else:
165
+ pth_files.append(pth_file)
166
+ return pth_files
coverage/phystokens.py ADDED
@@ -0,0 +1,197 @@
1
+ # Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
2
+ # For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
3
+
4
+ """Better tokenizing for coverage.py."""
5
+
6
+ from __future__ import annotations
7
+
8
+ import ast
9
+ import io
10
+ import keyword
11
+ import re
12
+ import sys
13
+ import token
14
+ import tokenize
15
+ from collections.abc import Iterable
16
+
17
+ from coverage import env
18
+ from coverage.types import TLineNo, TSourceTokenLines
19
+
20
+ TokenInfos = Iterable[tokenize.TokenInfo]
21
+
22
+
23
+ def _phys_tokens(toks: TokenInfos) -> TokenInfos:
24
+ """Return all physical tokens, even line continuations.
25
+
26
+ tokenize.generate_tokens() doesn't return a token for the backslash that
27
+ continues lines. This wrapper provides those tokens so that we can
28
+ re-create a faithful representation of the original source.
29
+
30
+ Returns the same values as generate_tokens()
31
+
32
+ """
33
+ last_line: str | None = None
34
+ last_lineno = -1
35
+ last_ttext: str = ""
36
+ for ttype, ttext, (slineno, scol), (elineno, ecol), ltext in toks:
37
+ if last_lineno != elineno:
38
+ if last_line and last_line.endswith("\\\n"):
39
+ # We are at the beginning of a new line, and the last line
40
+ # ended with a backslash. We probably have to inject a
41
+ # backslash token into the stream. Unfortunately, there's more
42
+ # to figure out. This code::
43
+ #
44
+ # usage = """\
45
+ # HEY THERE
46
+ # """
47
+ #
48
+ # triggers this condition, but the token text is::
49
+ #
50
+ # '"""\\\nHEY THERE\n"""'
51
+ #
52
+ # so we need to figure out if the backslash is already in the
53
+ # string token or not.
54
+ inject_backslash = True
55
+ if last_ttext.endswith("\\"):
56
+ inject_backslash = False
57
+ elif ttype == token.STRING:
58
+ if ( # pylint: disable=simplifiable-if-statement
59
+ last_line.endswith("\\\n")
60
+ and last_line.rstrip(" \\\n").endswith(last_ttext)
61
+ ):
62
+ # Deal with special cases like such code::
63
+ #
64
+ # a = ["aaa",\ # there may be zero or more blanks between "," and "\".
65
+ # "bbb \
66
+ # ccc"]
67
+ #
68
+ inject_backslash = True
69
+ else:
70
+ # It's a multi-line string and the first line ends with
71
+ # a backslash, so we don't need to inject another.
72
+ inject_backslash = False
73
+ elif env.PYBEHAVIOR.fstring_syntax and ttype == token.FSTRING_MIDDLE:
74
+ inject_backslash = False
75
+ if inject_backslash:
76
+ # Figure out what column the backslash is in.
77
+ ccol = len(last_line.split("\n")[-2]) - 1
78
+ # Yield the token, with a fake token type.
79
+ yield tokenize.TokenInfo(
80
+ 99999,
81
+ "\\\n",
82
+ (slineno, ccol),
83
+ (slineno, ccol + 2),
84
+ last_line,
85
+ )
86
+ last_line = ltext
87
+ if ttype not in (tokenize.NEWLINE, tokenize.NL):
88
+ last_ttext = ttext
89
+ yield tokenize.TokenInfo(ttype, ttext, (slineno, scol), (elineno, ecol), ltext)
90
+ last_lineno = elineno
91
+
92
+
93
+ def find_soft_key_lines(source: str) -> set[TLineNo]:
94
+ """Helper for finding lines with soft keywords, like match/case lines."""
95
+ soft_key_lines: set[TLineNo] = set()
96
+
97
+ for node in ast.walk(ast.parse(source)):
98
+ if isinstance(node, ast.Match):
99
+ soft_key_lines.add(node.lineno)
100
+ for case in node.cases:
101
+ soft_key_lines.add(case.pattern.lineno)
102
+ elif sys.version_info >= (3, 12) and isinstance(node, ast.TypeAlias):
103
+ soft_key_lines.add(node.lineno)
104
+
105
+ return soft_key_lines
106
+
107
+
108
+ def source_token_lines(source: str) -> TSourceTokenLines:
109
+ """Generate a series of lines, one for each line in `source`.
110
+
111
+ Each line is a list of pairs, each pair is a token::
112
+
113
+ [('key', 'def'), ('ws', ' '), ('nam', 'hello'), ('op', '('), ... ]
114
+
115
+ Each pair has a token class, and the token text.
116
+
117
+ If you concatenate all the token texts, and then join them with newlines,
118
+ you should have your original `source` back, with two differences:
119
+ trailing white space is not preserved, and a final line with no newline
120
+ is indistinguishable from a final line with a newline.
121
+
122
+ """
123
+
124
+ ws_tokens = {token.INDENT, token.DEDENT, token.NEWLINE, tokenize.NL}
125
+ line: list[tuple[str, str]] = []
126
+ col = 0
127
+
128
+ source = source.expandtabs(8).replace("\r\n", "\n")
129
+ tokgen = generate_tokens(source)
130
+
131
+ soft_key_lines = find_soft_key_lines(source)
132
+
133
+ for ttype, ttext, (sline, scol), (_, ecol), _ in _phys_tokens(tokgen):
134
+ mark_start = True
135
+ for part in re.split("(\n)", ttext):
136
+ if part == "\n":
137
+ yield line
138
+ line = []
139
+ col = 0
140
+ mark_end = False
141
+ elif part == "":
142
+ mark_end = False
143
+ elif ttype in ws_tokens:
144
+ mark_end = False
145
+ else:
146
+ if env.PYBEHAVIOR.fstring_syntax and ttype == token.FSTRING_MIDDLE:
147
+ part = part.replace("{", "{{").replace("}", "}}")
148
+ ecol = scol + len(part)
149
+ if mark_start and scol > col:
150
+ line.append(("ws", " " * (scol - col)))
151
+ mark_start = False
152
+ tok_class = tokenize.tok_name.get(ttype, "xx").lower()[:3]
153
+ if ttype == token.NAME:
154
+ if keyword.iskeyword(ttext):
155
+ # Hard keywords are always keywords.
156
+ tok_class = "key"
157
+ elif keyword.issoftkeyword(ttext):
158
+ # Soft keywords appear at the start of their line.
159
+ if len(line) == 0:
160
+ is_start_of_line = True
161
+ elif (len(line) == 1) and line[0][0] == "ws":
162
+ is_start_of_line = True
163
+ else:
164
+ is_start_of_line = False
165
+ if is_start_of_line and sline in soft_key_lines:
166
+ tok_class = "key"
167
+ line.append((tok_class, part))
168
+ mark_end = True
169
+ scol = 0
170
+ if mark_end:
171
+ col = ecol
172
+
173
+ if line:
174
+ yield line
175
+
176
+
177
+ def generate_tokens(text: str) -> TokenInfos:
178
+ """A helper around `tokenize.generate_tokens`.
179
+
180
+ Originally this was used to cache the results, but it didn't seem to make
181
+ reporting go faster, and caused issues with using too much memory.
182
+
183
+ """
184
+ readline = io.StringIO(text).readline
185
+ return tokenize.generate_tokens(readline)
186
+
187
+
188
+ def source_encoding(source: bytes) -> str:
189
+ """Determine the encoding for `source`, according to PEP 263.
190
+
191
+ `source` is a byte string: the text of the program.
192
+
193
+ Returns a string, the name of the encoding.
194
+
195
+ """
196
+ readline = iter(source.splitlines(True)).__next__
197
+ return tokenize.detect_encoding(readline)[0]