coverage 7.11.1__cp314-cp314-musllinux_1_2_aarch64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. coverage/__init__.py +40 -0
  2. coverage/__main__.py +12 -0
  3. coverage/annotate.py +114 -0
  4. coverage/bytecode.py +196 -0
  5. coverage/cmdline.py +1184 -0
  6. coverage/collector.py +486 -0
  7. coverage/config.py +731 -0
  8. coverage/context.py +74 -0
  9. coverage/control.py +1481 -0
  10. coverage/core.py +139 -0
  11. coverage/data.py +227 -0
  12. coverage/debug.py +669 -0
  13. coverage/disposition.py +59 -0
  14. coverage/env.py +135 -0
  15. coverage/exceptions.py +91 -0
  16. coverage/execfile.py +329 -0
  17. coverage/files.py +553 -0
  18. coverage/html.py +856 -0
  19. coverage/htmlfiles/coverage_html.js +733 -0
  20. coverage/htmlfiles/favicon_32.png +0 -0
  21. coverage/htmlfiles/index.html +164 -0
  22. coverage/htmlfiles/keybd_closed.png +0 -0
  23. coverage/htmlfiles/pyfile.html +149 -0
  24. coverage/htmlfiles/style.css +377 -0
  25. coverage/htmlfiles/style.scss +824 -0
  26. coverage/inorout.py +614 -0
  27. coverage/jsonreport.py +188 -0
  28. coverage/lcovreport.py +219 -0
  29. coverage/misc.py +373 -0
  30. coverage/multiproc.py +120 -0
  31. coverage/numbits.py +146 -0
  32. coverage/parser.py +1213 -0
  33. coverage/patch.py +166 -0
  34. coverage/phystokens.py +197 -0
  35. coverage/plugin.py +617 -0
  36. coverage/plugin_support.py +299 -0
  37. coverage/py.typed +1 -0
  38. coverage/python.py +269 -0
  39. coverage/pytracer.py +369 -0
  40. coverage/regions.py +127 -0
  41. coverage/report.py +298 -0
  42. coverage/report_core.py +117 -0
  43. coverage/results.py +471 -0
  44. coverage/sqldata.py +1153 -0
  45. coverage/sqlitedb.py +239 -0
  46. coverage/sysmon.py +474 -0
  47. coverage/templite.py +306 -0
  48. coverage/tomlconfig.py +210 -0
  49. coverage/tracer.cpython-314-aarch64-linux-musl.so +0 -0
  50. coverage/tracer.pyi +43 -0
  51. coverage/types.py +206 -0
  52. coverage/version.py +35 -0
  53. coverage/xmlreport.py +264 -0
  54. coverage-7.11.1.dist-info/METADATA +221 -0
  55. coverage-7.11.1.dist-info/RECORD +59 -0
  56. coverage-7.11.1.dist-info/WHEEL +5 -0
  57. coverage-7.11.1.dist-info/entry_points.txt +4 -0
  58. coverage-7.11.1.dist-info/licenses/LICENSE.txt +177 -0
  59. coverage-7.11.1.dist-info/top_level.txt +1 -0
coverage/jsonreport.py ADDED
@@ -0,0 +1,188 @@
1
+ # Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
2
+ # For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
3
+
4
+ """Json reporting for coverage.py"""
5
+
6
+ from __future__ import annotations
7
+
8
+ import datetime
9
+ import json
10
+ import sys
11
+ from collections.abc import Iterable
12
+ from typing import IO, TYPE_CHECKING, Any
13
+
14
+ from coverage import __version__
15
+ from coverage.report_core import get_analysis_to_report
16
+ from coverage.results import Analysis, AnalysisNarrower, Numbers
17
+ from coverage.types import TLineNo, TMorf
18
+
19
+ if TYPE_CHECKING:
20
+ from coverage import Coverage
21
+ from coverage.data import CoverageData
22
+ from coverage.plugin import FileReporter
23
+
24
+
25
+ # A type for data that can be JSON-serialized.
26
+ JsonObj = dict[str, Any]
27
+
28
+ # "Version 1" had no format number at all.
29
+ # 2: add the meta.format field.
30
+ # 3: add region information (functions, classes)
31
+ FORMAT_VERSION = 3
32
+
33
+
34
+ class JsonReporter:
35
+ """A reporter for writing JSON coverage results."""
36
+
37
+ report_type = "JSON report"
38
+
39
+ def __init__(self, coverage: Coverage) -> None:
40
+ self.coverage = coverage
41
+ self.config = self.coverage.config
42
+ self.total = Numbers(self.config.precision)
43
+ self.report_data: JsonObj = {}
44
+
45
+ def make_summary(self, nums: Numbers) -> JsonObj:
46
+ """Create a dict summarizing `nums`."""
47
+ return {
48
+ "covered_lines": nums.n_executed,
49
+ "num_statements": nums.n_statements,
50
+ "percent_covered": nums.pc_covered,
51
+ "percent_covered_display": nums.pc_covered_str,
52
+ "missing_lines": nums.n_missing,
53
+ "excluded_lines": nums.n_excluded,
54
+ }
55
+
56
+ def make_branch_summary(self, nums: Numbers) -> JsonObj:
57
+ """Create a dict summarizing the branch info in `nums`."""
58
+ return {
59
+ "num_branches": nums.n_branches,
60
+ "num_partial_branches": nums.n_partial_branches,
61
+ "covered_branches": nums.n_executed_branches,
62
+ "missing_branches": nums.n_missing_branches,
63
+ }
64
+
65
+ def report(self, morfs: Iterable[TMorf] | None, outfile: IO[str]) -> float:
66
+ """Generate a json report for `morfs`.
67
+
68
+ `morfs` is a list of modules or file names.
69
+
70
+ `outfile` is a file object to write the json to.
71
+
72
+ """
73
+ outfile = outfile or sys.stdout
74
+ coverage_data = self.coverage.get_data()
75
+ coverage_data.set_query_contexts(self.config.report_contexts)
76
+ self.report_data["meta"] = {
77
+ "format": FORMAT_VERSION,
78
+ "version": __version__,
79
+ "timestamp": datetime.datetime.now().isoformat(),
80
+ "branch_coverage": coverage_data.has_arcs(),
81
+ "show_contexts": self.config.json_show_contexts,
82
+ }
83
+
84
+ measured_files = {}
85
+ for file_reporter, analysis in get_analysis_to_report(self.coverage, morfs):
86
+ measured_files[file_reporter.relative_filename()] = self.report_one_file(
87
+ coverage_data,
88
+ analysis,
89
+ file_reporter,
90
+ )
91
+
92
+ self.report_data["files"] = measured_files
93
+ self.report_data["totals"] = self.make_summary(self.total)
94
+
95
+ if coverage_data.has_arcs():
96
+ self.report_data["totals"].update(self.make_branch_summary(self.total))
97
+
98
+ json.dump(
99
+ self.report_data,
100
+ outfile,
101
+ indent=(4 if self.config.json_pretty_print else None),
102
+ )
103
+
104
+ return self.total.n_statements and self.total.pc_covered
105
+
106
+ def report_one_file(
107
+ self, coverage_data: CoverageData, analysis: Analysis, file_reporter: FileReporter
108
+ ) -> JsonObj:
109
+ """Extract the relevant report data for a single file."""
110
+ nums = analysis.numbers
111
+ self.total += nums
112
+ summary = self.make_summary(nums)
113
+ reported_file: JsonObj = {
114
+ "executed_lines": sorted(analysis.executed),
115
+ "summary": summary,
116
+ "missing_lines": sorted(analysis.missing),
117
+ "excluded_lines": sorted(analysis.excluded),
118
+ }
119
+ if self.config.json_show_contexts:
120
+ reported_file["contexts"] = coverage_data.contexts_by_lineno(analysis.filename)
121
+ if coverage_data.has_arcs():
122
+ summary.update(self.make_branch_summary(nums))
123
+ reported_file["executed_branches"] = list(
124
+ _convert_branch_arcs(analysis.executed_branch_arcs()),
125
+ )
126
+ reported_file["missing_branches"] = list(
127
+ _convert_branch_arcs(analysis.missing_branch_arcs()),
128
+ )
129
+
130
+ num_lines = len(file_reporter.source().splitlines())
131
+ regions = file_reporter.code_regions()
132
+ for noun, plural in file_reporter.code_region_kinds():
133
+ outside_lines = set(range(1, num_lines + 1))
134
+ for region in regions:
135
+ if region.kind != noun:
136
+ continue
137
+ outside_lines -= region.lines
138
+
139
+ narrower = AnalysisNarrower(analysis)
140
+ narrower.add_regions(r.lines for r in regions if r.kind == noun)
141
+ narrower.add_regions([outside_lines])
142
+
143
+ reported_file[plural] = region_data = {}
144
+ for region in regions:
145
+ if region.kind != noun:
146
+ continue
147
+ region_data[region.name] = self.make_region_data(
148
+ coverage_data,
149
+ narrower.narrow(region.lines),
150
+ )
151
+
152
+ region_data[""] = self.make_region_data(
153
+ coverage_data,
154
+ narrower.narrow(outside_lines),
155
+ )
156
+ return reported_file
157
+
158
+ def make_region_data(self, coverage_data: CoverageData, narrowed_analysis: Analysis) -> JsonObj:
159
+ """Create the data object for one region of a file."""
160
+ narrowed_nums = narrowed_analysis.numbers
161
+ narrowed_summary = self.make_summary(narrowed_nums)
162
+ this_region = {
163
+ "executed_lines": sorted(narrowed_analysis.executed),
164
+ "summary": narrowed_summary,
165
+ "missing_lines": sorted(narrowed_analysis.missing),
166
+ "excluded_lines": sorted(narrowed_analysis.excluded),
167
+ }
168
+ if self.config.json_show_contexts:
169
+ contexts = coverage_data.contexts_by_lineno(narrowed_analysis.filename)
170
+ this_region["contexts"] = contexts
171
+ if coverage_data.has_arcs():
172
+ narrowed_summary.update(self.make_branch_summary(narrowed_nums))
173
+ this_region["executed_branches"] = list(
174
+ _convert_branch_arcs(narrowed_analysis.executed_branch_arcs()),
175
+ )
176
+ this_region["missing_branches"] = list(
177
+ _convert_branch_arcs(narrowed_analysis.missing_branch_arcs()),
178
+ )
179
+ return this_region
180
+
181
+
182
+ def _convert_branch_arcs(
183
+ branch_arcs: dict[TLineNo, list[TLineNo]],
184
+ ) -> Iterable[tuple[TLineNo, TLineNo]]:
185
+ """Convert branch arcs to a list of two-element tuples."""
186
+ for source, targets in branch_arcs.items():
187
+ for target in targets:
188
+ yield source, target
coverage/lcovreport.py ADDED
@@ -0,0 +1,219 @@
1
+ # Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
2
+ # For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
3
+
4
+ """LCOV reporting for coverage.py."""
5
+
6
+ from __future__ import annotations
7
+
8
+ import base64
9
+ import hashlib
10
+ import sys
11
+ from collections.abc import Iterable
12
+ from typing import IO, TYPE_CHECKING
13
+
14
+ from coverage.plugin import FileReporter
15
+ from coverage.report_core import get_analysis_to_report
16
+ from coverage.results import Analysis, AnalysisNarrower, Numbers
17
+ from coverage.types import TMorf
18
+
19
+ if TYPE_CHECKING:
20
+ from coverage import Coverage
21
+
22
+
23
+ def line_hash(line: str) -> str:
24
+ """Produce a hash of a source line for use in the LCOV file."""
25
+ # The LCOV file format optionally allows each line to be MD5ed as a
26
+ # fingerprint of the file. This is not a security use. Some security
27
+ # scanners raise alarms about the use of MD5 here, but it is a false
28
+ # positive. This is not a security concern.
29
+ # The unusual encoding of the MD5 hash, as a base64 sequence with the
30
+ # trailing = signs stripped, is specified by the LCOV file format.
31
+ hashed = hashlib.md5(line.encode("utf-8"), usedforsecurity=False).digest()
32
+ return base64.b64encode(hashed).decode("ascii").rstrip("=")
33
+
34
+
35
+ def lcov_lines(
36
+ analysis: Analysis,
37
+ lines: list[int],
38
+ source_lines: list[str],
39
+ outfile: IO[str],
40
+ ) -> None:
41
+ """Emit line coverage records for an analyzed file."""
42
+ hash_suffix = ""
43
+ for line in lines:
44
+ if source_lines:
45
+ hash_suffix = "," + line_hash(source_lines[line - 1])
46
+ # Q: can we get info about the number of times a statement is
47
+ # executed? If so, that should be recorded here.
48
+ hit = int(line not in analysis.missing)
49
+ outfile.write(f"DA:{line},{hit}{hash_suffix}\n")
50
+
51
+ if analysis.numbers.n_statements > 0:
52
+ outfile.write(f"LF:{analysis.numbers.n_statements}\n")
53
+ outfile.write(f"LH:{analysis.numbers.n_executed}\n")
54
+
55
+
56
+ def lcov_functions(
57
+ fr: FileReporter,
58
+ file_analysis: Analysis,
59
+ outfile: IO[str],
60
+ ) -> None:
61
+ """Emit function coverage records for an analyzed file."""
62
+ # lcov 2.2 introduces a new format for function coverage records.
63
+ # We continue to generate the old format because we don't know what
64
+ # version of the lcov tools will be used to read this report.
65
+
66
+ # "and region.lines" below avoids a crash due to a bug in PyPy 3.8
67
+ # where, for whatever reason, when collecting data in --branch mode,
68
+ # top-level functions have an empty lines array. Instead we just don't
69
+ # emit function records for those.
70
+
71
+ # suppressions because of https://github.com/pylint-dev/pylint/issues/9923
72
+ functions = [
73
+ (
74
+ min(region.start, min(region.lines)), # pylint: disable=nested-min-max
75
+ max(region.start, max(region.lines)), # pylint: disable=nested-min-max
76
+ region,
77
+ )
78
+ for region in fr.code_regions()
79
+ if region.kind == "function" and region.lines
80
+ ]
81
+ if not functions:
82
+ return
83
+
84
+ narrower = AnalysisNarrower(file_analysis)
85
+ narrower.add_regions(r.lines for _, _, r in functions)
86
+
87
+ functions.sort()
88
+ functions_hit = 0
89
+ for first_line, last_line, region in functions:
90
+ # A function counts as having been executed if any of it has been
91
+ # executed.
92
+ analysis = narrower.narrow(region.lines)
93
+ hit = int(analysis.numbers.n_executed > 0)
94
+ functions_hit += hit
95
+
96
+ outfile.write(f"FN:{first_line},{last_line},{region.name}\n")
97
+ outfile.write(f"FNDA:{hit},{region.name}\n")
98
+
99
+ outfile.write(f"FNF:{len(functions)}\n")
100
+ outfile.write(f"FNH:{functions_hit}\n")
101
+
102
+
103
+ def lcov_arcs(
104
+ fr: FileReporter,
105
+ analysis: Analysis,
106
+ lines: list[int],
107
+ outfile: IO[str],
108
+ ) -> None:
109
+ """Emit branch coverage records for an analyzed file."""
110
+ branch_stats = analysis.branch_stats()
111
+ executed_arcs = analysis.executed_branch_arcs()
112
+ missing_arcs = analysis.missing_branch_arcs()
113
+
114
+ for line in lines:
115
+ if line not in branch_stats:
116
+ continue
117
+
118
+ # This is only one of several possible ways to map our sets of executed
119
+ # and not-executed arcs to BRDA codes. It seems to produce reasonable
120
+ # results when fed through genhtml.
121
+ _, taken = branch_stats[line]
122
+
123
+ if taken == 0:
124
+ # When _none_ of the out arcs from 'line' were executed,
125
+ # it can mean the line always raised an exception.
126
+ assert len(executed_arcs[line]) == 0
127
+ destinations = [(dst, "-") for dst in missing_arcs[line]]
128
+ else:
129
+ # Q: can we get counts of the number of times each arc was executed?
130
+ # branch_stats has "total" and "taken" counts for each branch,
131
+ # but it doesn't have "taken" broken down by destination.
132
+ destinations = [(dst, "1") for dst in executed_arcs[line]]
133
+ destinations.extend((dst, "0") for dst in missing_arcs[line])
134
+
135
+ # Sort exit arcs after normal arcs. Exit arcs typically come from
136
+ # an if statement, at the end of a function, with no else clause.
137
+ # This structure reads like you're jumping to the end of the function
138
+ # when the conditional expression is false, so it should be presented
139
+ # as the second alternative for the branch, after the alternative that
140
+ # enters the if clause.
141
+ destinations.sort(key=lambda d: (d[0] < 0, d))
142
+
143
+ for dst, hit in destinations:
144
+ branch = fr.arc_description(line, dst)
145
+ outfile.write(f"BRDA:{line},0,{branch},{hit}\n")
146
+
147
+ # Summary of the branch coverage.
148
+ brf = sum(t for t, k in branch_stats.values())
149
+ brh = brf - sum(t - k for t, k in branch_stats.values())
150
+ if brf > 0:
151
+ outfile.write(f"BRF:{brf}\n")
152
+ outfile.write(f"BRH:{brh}\n")
153
+
154
+
155
+ class LcovReporter:
156
+ """A reporter for writing LCOV coverage reports."""
157
+
158
+ report_type = "LCOV report"
159
+
160
+ def __init__(self, coverage: Coverage) -> None:
161
+ self.coverage = coverage
162
+ self.config = coverage.config
163
+ self.total = Numbers(self.coverage.config.precision)
164
+
165
+ def report(self, morfs: Iterable[TMorf] | None, outfile: IO[str]) -> float:
166
+ """Renders the full lcov report.
167
+
168
+ `morfs` is a list of modules or filenames
169
+
170
+ outfile is the file object to write the file into.
171
+ """
172
+
173
+ self.coverage.get_data()
174
+ outfile = outfile or sys.stdout
175
+
176
+ # ensure file records are sorted by the _relative_ filename, not the full path
177
+ to_report = [
178
+ (fr.relative_filename(), fr, analysis)
179
+ for fr, analysis in get_analysis_to_report(self.coverage, morfs)
180
+ ]
181
+ to_report.sort()
182
+
183
+ for fname, fr, analysis in to_report:
184
+ self.total += analysis.numbers
185
+ self.lcov_file(fname, fr, analysis, outfile)
186
+
187
+ return self.total.n_statements and self.total.pc_covered
188
+
189
+ def lcov_file(
190
+ self,
191
+ rel_fname: str,
192
+ fr: FileReporter,
193
+ analysis: Analysis,
194
+ outfile: IO[str],
195
+ ) -> None:
196
+ """Produces the lcov data for a single file.
197
+
198
+ This currently supports both line and branch coverage,
199
+ however function coverage is not supported.
200
+ """
201
+
202
+ if analysis.numbers.n_statements == 0:
203
+ if self.config.skip_empty:
204
+ return
205
+
206
+ outfile.write(f"SF:{rel_fname}\n")
207
+
208
+ lines = sorted(analysis.statements)
209
+ if self.config.lcov_line_checksums:
210
+ source_lines = fr.source().splitlines()
211
+ else:
212
+ source_lines = []
213
+
214
+ lcov_lines(analysis, lines, source_lines, outfile)
215
+ lcov_functions(fr, analysis, outfile)
216
+ if analysis.has_arcs:
217
+ lcov_arcs(fr, analysis, lines, outfile)
218
+
219
+ outfile.write("end_of_record\n")