coverage 7.13.0__cp312-cp312-win_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. a1_coverage.pth +1 -0
  2. coverage/__init__.py +38 -0
  3. coverage/__main__.py +12 -0
  4. coverage/annotate.py +114 -0
  5. coverage/bytecode.py +196 -0
  6. coverage/cmdline.py +1198 -0
  7. coverage/collector.py +486 -0
  8. coverage/config.py +732 -0
  9. coverage/context.py +74 -0
  10. coverage/control.py +1513 -0
  11. coverage/core.py +139 -0
  12. coverage/data.py +227 -0
  13. coverage/debug.py +669 -0
  14. coverage/disposition.py +59 -0
  15. coverage/env.py +135 -0
  16. coverage/exceptions.py +85 -0
  17. coverage/execfile.py +329 -0
  18. coverage/files.py +553 -0
  19. coverage/html.py +860 -0
  20. coverage/htmlfiles/coverage_html.js +735 -0
  21. coverage/htmlfiles/favicon_32.png +0 -0
  22. coverage/htmlfiles/index.html +199 -0
  23. coverage/htmlfiles/keybd_closed.png +0 -0
  24. coverage/htmlfiles/pyfile.html +149 -0
  25. coverage/htmlfiles/style.css +389 -0
  26. coverage/htmlfiles/style.scss +844 -0
  27. coverage/inorout.py +614 -0
  28. coverage/jsonreport.py +192 -0
  29. coverage/lcovreport.py +219 -0
  30. coverage/misc.py +373 -0
  31. coverage/multiproc.py +120 -0
  32. coverage/numbits.py +146 -0
  33. coverage/parser.py +1215 -0
  34. coverage/patch.py +118 -0
  35. coverage/phystokens.py +197 -0
  36. coverage/plugin.py +617 -0
  37. coverage/plugin_support.py +299 -0
  38. coverage/pth_file.py +16 -0
  39. coverage/py.typed +1 -0
  40. coverage/python.py +272 -0
  41. coverage/pytracer.py +369 -0
  42. coverage/regions.py +127 -0
  43. coverage/report.py +298 -0
  44. coverage/report_core.py +117 -0
  45. coverage/results.py +502 -0
  46. coverage/sqldata.py +1153 -0
  47. coverage/sqlitedb.py +239 -0
  48. coverage/sysmon.py +517 -0
  49. coverage/templite.py +318 -0
  50. coverage/tomlconfig.py +212 -0
  51. coverage/tracer.cp312-win_arm64.pyd +0 -0
  52. coverage/tracer.pyi +43 -0
  53. coverage/types.py +206 -0
  54. coverage/version.py +35 -0
  55. coverage/xmlreport.py +264 -0
  56. coverage-7.13.0.dist-info/METADATA +200 -0
  57. coverage-7.13.0.dist-info/RECORD +61 -0
  58. coverage-7.13.0.dist-info/WHEEL +5 -0
  59. coverage-7.13.0.dist-info/entry_points.txt +4 -0
  60. coverage-7.13.0.dist-info/licenses/LICENSE.txt +177 -0
  61. coverage-7.13.0.dist-info/top_level.txt +1 -0
coverage/core.py ADDED
@@ -0,0 +1,139 @@
1
+ # Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
2
+ # For details: https://github.com/coveragepy/coveragepy/blob/main/NOTICE.txt
3
+
4
+ """Management of core choices."""
5
+
6
+ from __future__ import annotations
7
+
8
+ import os
9
+ import sys
10
+ from typing import Any
11
+
12
+ from coverage import env
13
+ from coverage.config import CoverageConfig
14
+ from coverage.disposition import FileDisposition
15
+ from coverage.exceptions import ConfigError
16
+ from coverage.misc import isolate_module
17
+ from coverage.pytracer import PyTracer
18
+ from coverage.sysmon import SysMonitor
19
+ from coverage.types import TDebugCtl, TFileDisposition, Tracer, TWarnFn
20
+
21
+ os = isolate_module(os)
22
+
23
+ IMPORT_ERROR: str = ""
24
+
25
+ try:
26
+ # Use the C extension code when we can, for speed.
27
+ import coverage.tracer
28
+
29
+ CTRACER_FILE: str | None = getattr(coverage.tracer, "__file__", "unknown")
30
+ except ImportError as imp_err:
31
+ # Couldn't import the C extension, maybe it isn't built.
32
+ # We still need to check the environment variable directly here,
33
+ # as this code runs before configuration is loaded.
34
+ if os.getenv("COVERAGE_CORE") == "ctrace": # pragma: part covered
35
+ # During testing, we use the COVERAGE_CORE environment variable
36
+ # to indicate that we've fiddled with the environment to test this
37
+ # fallback code. If we thought we had a C tracer, but couldn't import
38
+ # it, then exit quickly and clearly instead of dribbling confusing
39
+ # errors. I'm using sys.exit here instead of an exception because an
40
+ # exception here causes all sorts of other noise in unittest.
41
+ sys.stderr.write("*** COVERAGE_CORE is 'ctrace' but can't import CTracer!\n")
42
+ sys.exit(1)
43
+ IMPORT_ERROR = str(imp_err)
44
+ CTRACER_FILE = None
45
+
46
+
47
+ class Core:
48
+ """Information about the central technology enabling execution measurement."""
49
+
50
+ tracer_class: type[Tracer]
51
+ tracer_kwargs: dict[str, Any]
52
+ file_disposition_class: type[TFileDisposition]
53
+ supports_plugins: bool
54
+ packed_arcs: bool
55
+ systrace: bool
56
+
57
+ def __init__(
58
+ self,
59
+ *,
60
+ warn: TWarnFn,
61
+ debug: TDebugCtl | None,
62
+ config: CoverageConfig,
63
+ dynamic_contexts: bool,
64
+ metacov: bool,
65
+ ) -> None:
66
+ def _debug(msg: str) -> None:
67
+ if debug:
68
+ debug.write(msg)
69
+
70
+ _debug("in core.py")
71
+
72
+ # Check the conditions that preclude us from using sys.monitoring.
73
+ reason_no_sysmon = ""
74
+ if not env.PYBEHAVIOR.pep669:
75
+ reason_no_sysmon = "sys.monitoring isn't available in this version"
76
+ elif config.branch and not env.PYBEHAVIOR.branch_right_left:
77
+ reason_no_sysmon = "sys.monitoring can't measure branches in this version"
78
+ elif dynamic_contexts:
79
+ reason_no_sysmon = "it doesn't yet support dynamic contexts"
80
+ elif any((bad := c) in config.concurrency for c in ["greenlet", "eventlet", "gevent"]):
81
+ reason_no_sysmon = f"it doesn't support concurrency={bad}"
82
+
83
+ core_name: str | None = None
84
+ if config.timid:
85
+ core_name = "pytrace"
86
+ _debug("core.py: Using pytrace because timid=True")
87
+ elif core_name is None:
88
+ # This could still leave core_name as None.
89
+ core_name = config.core
90
+ _debug(f"core.py: core from config is {core_name!r}")
91
+
92
+ if core_name == "sysmon" and reason_no_sysmon:
93
+ _debug(f"core.py: defaulting because sysmon not usable: {reason_no_sysmon}")
94
+ warn(f"Can't use core=sysmon: {reason_no_sysmon}, using default core", slug="no-sysmon")
95
+ core_name = None
96
+
97
+ if core_name is None:
98
+ if env.SYSMON_DEFAULT and not reason_no_sysmon:
99
+ core_name = "sysmon"
100
+ _debug("core.py: Using sysmon because SYSMON_DEFAULT is set")
101
+ else:
102
+ core_name = "ctrace"
103
+ _debug("core.py: Defaulting to ctrace core")
104
+
105
+ if core_name == "ctrace":
106
+ if not CTRACER_FILE:
107
+ if IMPORT_ERROR and env.SHIPPING_WHEELS:
108
+ warn(f"Couldn't import C tracer: {IMPORT_ERROR}", slug="no-ctracer", once=True)
109
+ core_name = "pytrace"
110
+ _debug("core.py: Falling back to pytrace because C tracer not available")
111
+
112
+ _debug(f"core.py: Using core={core_name}")
113
+
114
+ self.tracer_kwargs = {}
115
+
116
+ if core_name == "sysmon":
117
+ self.tracer_class = SysMonitor
118
+ self.tracer_kwargs["tool_id"] = 3 if metacov else 1
119
+ self.file_disposition_class = FileDisposition
120
+ self.supports_plugins = False
121
+ self.packed_arcs = False
122
+ self.systrace = False
123
+ elif core_name == "ctrace":
124
+ self.tracer_class = coverage.tracer.CTracer
125
+ self.file_disposition_class = coverage.tracer.CFileDisposition
126
+ self.supports_plugins = True
127
+ self.packed_arcs = True
128
+ self.systrace = True
129
+ elif core_name == "pytrace":
130
+ self.tracer_class = PyTracer
131
+ self.file_disposition_class = FileDisposition
132
+ self.supports_plugins = False
133
+ self.packed_arcs = False
134
+ self.systrace = True
135
+ else:
136
+ raise ConfigError(f"Unknown core value: {core_name!r}")
137
+
138
+ def __repr__(self) -> str:
139
+ return f"<Core tracer_class={self.tracer_class.__name__}>"
coverage/data.py ADDED
@@ -0,0 +1,227 @@
1
+ # Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
2
+ # For details: https://github.com/coveragepy/coveragepy/blob/main/NOTICE.txt
3
+
4
+ """Coverage data for coverage.py.
5
+
6
+ This file had the 4.x JSON data support, which is now gone. This file still
7
+ has storage-agnostic helpers, and is kept to avoid changing too many imports.
8
+ CoverageData is now defined in sqldata.py, and imported here to keep the
9
+ imports working.
10
+
11
+ """
12
+
13
+ from __future__ import annotations
14
+
15
+ import functools
16
+ import glob
17
+ import hashlib
18
+ import os.path
19
+ from collections.abc import Iterable
20
+ from typing import Callable
21
+
22
+ from coverage.exceptions import CoverageException, NoDataError
23
+ from coverage.files import PathAliases
24
+ from coverage.misc import Hasher, file_be_gone, human_sorted, plural
25
+ from coverage.sqldata import CoverageData as CoverageData # pylint: disable=useless-import-alias
26
+
27
+
28
+ def line_counts(data: CoverageData, fullpath: bool = False) -> dict[str, int]:
29
+ """Return a dict summarizing the line coverage data.
30
+
31
+ Keys are based on the file names, and values are the number of executed
32
+ lines. If `fullpath` is true, then the keys are the full pathnames of
33
+ the files, otherwise they are the basenames of the files.
34
+
35
+ Returns a dict mapping file names to counts of lines.
36
+
37
+ """
38
+ summ = {}
39
+ filename_fn: Callable[[str], str]
40
+ if fullpath:
41
+ # pylint: disable=unnecessary-lambda-assignment
42
+ filename_fn = lambda f: f
43
+ else:
44
+ filename_fn = os.path.basename
45
+ for filename in data.measured_files():
46
+ lines = data.lines(filename)
47
+ assert lines is not None
48
+ summ[filename_fn(filename)] = len(lines)
49
+ return summ
50
+
51
+
52
+ def add_data_to_hash(data: CoverageData, filename: str, hasher: Hasher) -> None:
53
+ """Contribute `filename`'s data to the `hasher`.
54
+
55
+ `hasher` is a `coverage.misc.Hasher` instance to be updated with
56
+ the file's data. It should only get the results data, not the run
57
+ data.
58
+
59
+ """
60
+ if data.has_arcs():
61
+ hasher.update(sorted(data.arcs(filename) or []))
62
+ else:
63
+ hasher.update(sorted_lines(data, filename))
64
+ hasher.update(data.file_tracer(filename))
65
+
66
+
67
+ def combinable_files(data_file: str, data_paths: Iterable[str] | None = None) -> list[str]:
68
+ """Make a list of data files to be combined.
69
+
70
+ `data_file` is a path to a data file. `data_paths` is a list of files or
71
+ directories of files.
72
+
73
+ Returns a list of absolute file paths.
74
+ """
75
+ data_dir, local = os.path.split(os.path.abspath(data_file))
76
+
77
+ data_paths = data_paths or [data_dir]
78
+ files_to_combine = []
79
+ for p in data_paths:
80
+ if os.path.isfile(p):
81
+ files_to_combine.append(os.path.abspath(p))
82
+ elif os.path.isdir(p):
83
+ pattern = glob.escape(os.path.join(os.path.abspath(p), local)) + ".*"
84
+ files_to_combine.extend(glob.glob(pattern))
85
+ else:
86
+ raise NoDataError(f"Couldn't combine from non-existent path '{p}'")
87
+
88
+ # SQLite might have made journal files alongside our database files.
89
+ # We never want to combine those.
90
+ files_to_combine = [fnm for fnm in files_to_combine if not fnm.endswith("-journal")]
91
+
92
+ # Sorting isn't usually needed, since it shouldn't matter what order files
93
+ # are combined, but sorting makes tests more predictable, and makes
94
+ # debugging more understandable when things go wrong.
95
+ return sorted(files_to_combine)
96
+
97
+
98
+ def combine_parallel_data(
99
+ data: CoverageData,
100
+ aliases: PathAliases | None = None,
101
+ data_paths: Iterable[str] | None = None,
102
+ strict: bool = False,
103
+ keep: bool = False,
104
+ message: Callable[[str], None] | None = None,
105
+ ) -> None:
106
+ """Combine a number of data files together.
107
+
108
+ `data` is a CoverageData.
109
+
110
+ Treat `data.filename` as a file prefix, and combine the data from all
111
+ of the data files starting with that prefix plus a dot.
112
+
113
+ If `aliases` is provided, it's a `PathAliases` object that is used to
114
+ re-map paths to match the local machine's.
115
+
116
+ If `data_paths` is provided, it is a list of directories or files to
117
+ combine. Directories are searched for files that start with
118
+ `data.filename` plus dot as a prefix, and those files are combined.
119
+
120
+ If `data_paths` is not provided, then the directory portion of
121
+ `data.filename` is used as the directory to search for data files.
122
+
123
+ Unless `keep` is True every data file found and combined is then deleted
124
+ from disk. If a file cannot be read, a warning will be issued, and the
125
+ file will not be deleted.
126
+
127
+ If `strict` is true, and no files are found to combine, an error is
128
+ raised.
129
+
130
+ `message` is a function to use for printing messages to the user.
131
+
132
+ """
133
+ files_to_combine = combinable_files(data.base_filename(), data_paths)
134
+
135
+ if strict and not files_to_combine:
136
+ raise NoDataError("No data to combine")
137
+
138
+ if aliases is None:
139
+ map_path = None
140
+ else:
141
+ map_path = functools.cache(aliases.map)
142
+
143
+ file_hashes = set()
144
+ combined_any = False
145
+
146
+ for f in files_to_combine:
147
+ if f == data.data_filename():
148
+ # Sometimes we are combining into a file which is one of the
149
+ # parallel files. Skip that file.
150
+ if data._debug.should("dataio"):
151
+ data._debug.write(f"Skipping combining ourself: {f!r}")
152
+ continue
153
+
154
+ try:
155
+ rel_file_name = os.path.relpath(f)
156
+ except ValueError:
157
+ # ValueError can be raised under Windows when os.getcwd() returns a
158
+ # folder from a different drive than the drive of f, in which case
159
+ # we print the original value of f instead of its relative path
160
+ rel_file_name = f
161
+
162
+ with open(f, "rb") as fobj:
163
+ hasher = hashlib.new("sha3_256", usedforsecurity=False)
164
+ hasher.update(fobj.read())
165
+ sha = hasher.digest()
166
+ combine_this_one = sha not in file_hashes
167
+
168
+ delete_this_one = not keep
169
+ if combine_this_one:
170
+ if data._debug.should("dataio"):
171
+ data._debug.write(f"Combining data file {f!r}")
172
+ file_hashes.add(sha)
173
+ try:
174
+ new_data = CoverageData(f, debug=data._debug)
175
+ new_data.read()
176
+ except CoverageException as exc:
177
+ if data._warn:
178
+ # The CoverageException has the file name in it, so just
179
+ # use the message as the warning.
180
+ data._warn(str(exc))
181
+ if message:
182
+ message(f"Couldn't combine data file {rel_file_name}: {exc}")
183
+ delete_this_one = False
184
+ else:
185
+ data.update(new_data, map_path=map_path)
186
+ combined_any = True
187
+ if message:
188
+ message(f"Combined data file {rel_file_name}")
189
+ else:
190
+ if message:
191
+ message(f"Skipping duplicate data {rel_file_name}")
192
+
193
+ if delete_this_one:
194
+ if data._debug.should("dataio"):
195
+ data._debug.write(f"Deleting data file {f!r}")
196
+ file_be_gone(f)
197
+
198
+ if strict and not combined_any:
199
+ raise NoDataError("No usable data files")
200
+
201
+
202
+ def debug_data_file(filename: str) -> None:
203
+ """Implementation of 'coverage debug data'."""
204
+ data = CoverageData(filename)
205
+ filename = data.data_filename()
206
+ print(f"path: {filename}")
207
+ if not os.path.exists(filename):
208
+ print("No data collected: file doesn't exist")
209
+ return
210
+ data.read()
211
+ print(f"has_arcs: {data.has_arcs()!r}")
212
+ summary = line_counts(data, fullpath=True)
213
+ filenames = human_sorted(summary.keys())
214
+ nfiles = len(filenames)
215
+ print(f"{nfiles} file{plural(nfiles)}:")
216
+ for f in filenames:
217
+ line = f"{f}: {summary[f]} line{plural(summary[f])}"
218
+ plugin = data.file_tracer(f)
219
+ if plugin:
220
+ line += f" [{plugin}]"
221
+ print(line)
222
+
223
+
224
+ def sorted_lines(data: CoverageData, filename: str) -> list[int]:
225
+ """Get the sorted lines for a file, for tests."""
226
+ lines = data.lines(filename)
227
+ return sorted(lines or [])