coverage 7.6.7__cp311-cp311-win_amd64.whl → 7.11.1__cp311-cp311-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. coverage/__init__.py +2 -0
  2. coverage/__main__.py +2 -0
  3. coverage/annotate.py +1 -2
  4. coverage/bytecode.py +177 -3
  5. coverage/cmdline.py +329 -154
  6. coverage/collector.py +31 -42
  7. coverage/config.py +166 -62
  8. coverage/context.py +4 -5
  9. coverage/control.py +164 -85
  10. coverage/core.py +70 -33
  11. coverage/data.py +3 -4
  12. coverage/debug.py +112 -56
  13. coverage/disposition.py +1 -0
  14. coverage/env.py +65 -55
  15. coverage/exceptions.py +35 -7
  16. coverage/execfile.py +18 -13
  17. coverage/files.py +23 -18
  18. coverage/html.py +134 -88
  19. coverage/htmlfiles/style.css +42 -2
  20. coverage/htmlfiles/style.scss +65 -1
  21. coverage/inorout.py +61 -44
  22. coverage/jsonreport.py +17 -8
  23. coverage/lcovreport.py +16 -20
  24. coverage/misc.py +50 -46
  25. coverage/multiproc.py +12 -7
  26. coverage/numbits.py +3 -4
  27. coverage/parser.py +193 -269
  28. coverage/patch.py +166 -0
  29. coverage/phystokens.py +24 -25
  30. coverage/plugin.py +13 -13
  31. coverage/plugin_support.py +36 -35
  32. coverage/python.py +9 -13
  33. coverage/pytracer.py +40 -33
  34. coverage/regions.py +2 -1
  35. coverage/report.py +59 -43
  36. coverage/report_core.py +6 -9
  37. coverage/results.py +118 -66
  38. coverage/sqldata.py +260 -210
  39. coverage/sqlitedb.py +33 -25
  40. coverage/sysmon.py +195 -157
  41. coverage/templite.py +6 -6
  42. coverage/tomlconfig.py +12 -12
  43. coverage/tracer.cp311-win_amd64.pyd +0 -0
  44. coverage/tracer.pyi +2 -0
  45. coverage/types.py +25 -22
  46. coverage/version.py +3 -18
  47. coverage/xmlreport.py +16 -13
  48. {coverage-7.6.7.dist-info → coverage-7.11.1.dist-info}/METADATA +40 -18
  49. coverage-7.11.1.dist-info/RECORD +59 -0
  50. {coverage-7.6.7.dist-info → coverage-7.11.1.dist-info}/WHEEL +1 -1
  51. coverage-7.6.7.dist-info/RECORD +0 -58
  52. {coverage-7.6.7.dist-info → coverage-7.11.1.dist-info}/entry_points.txt +0 -0
  53. {coverage-7.6.7.dist-info → coverage-7.11.1.dist-info/licenses}/LICENSE.txt +0 -0
  54. {coverage-7.6.7.dist-info → coverage-7.11.1.dist-info}/top_level.txt +0 -0
coverage/inorout.py CHANGED
@@ -15,51 +15,54 @@ import re
15
15
  import sys
16
16
  import sysconfig
17
17
  import traceback
18
-
19
- from types import FrameType, ModuleType
20
- from typing import (
21
- cast, Any, TYPE_CHECKING,
22
- )
23
18
  from collections.abc import Iterable
19
+ from types import FrameType, ModuleType
20
+ from typing import TYPE_CHECKING, Any, cast
24
21
 
25
22
  from coverage import env
26
23
  from coverage.disposition import FileDisposition, disposition_init
27
- from coverage.exceptions import CoverageException, PluginError
28
- from coverage.files import TreeMatcher, GlobMatcher, ModuleMatcher
29
- from coverage.files import prep_patterns, find_python_files, canonical_filename
24
+ from coverage.exceptions import ConfigError, CoverageException, PluginError
25
+ from coverage.files import (
26
+ GlobMatcher,
27
+ ModuleMatcher,
28
+ TreeMatcher,
29
+ canonical_filename,
30
+ find_python_files,
31
+ prep_patterns,
32
+ )
30
33
  from coverage.misc import isolate_module, sys_modules_saved
31
34
  from coverage.python import source_for_file, source_for_morf
32
- from coverage.types import TFileDisposition, TMorf, TWarnFn, TDebugCtl
35
+ from coverage.types import TDebugCtl, TFileDisposition, TMorf, TWarnFn
33
36
 
34
37
  if TYPE_CHECKING:
35
38
  from coverage.config import CoverageConfig
36
39
  from coverage.plugin_support import Plugins
37
40
 
38
41
 
39
- # Pypy has some unusual stuff in the "stdlib". Consider those locations
40
- # when deciding where the stdlib is. These modules are not used for anything,
41
- # they are modules importable from the pypy lib directories, so that we can
42
- # find those directories.
43
42
  modules_we_happen_to_have: list[ModuleType] = [
44
- inspect, itertools, os, platform, re, sysconfig, traceback,
43
+ inspect,
44
+ itertools,
45
+ os,
46
+ platform,
47
+ re,
48
+ sysconfig,
49
+ traceback,
45
50
  ]
46
51
 
47
52
  if env.PYPY:
48
- try:
49
- import _structseq
50
- modules_we_happen_to_have.append(_structseq)
51
- except ImportError:
52
- pass
53
+ # Pypy has some unusual stuff in the "stdlib". Consider those locations
54
+ # when deciding where the stdlib is. These modules are not used for anything,
55
+ # they are modules importable from the pypy lib directories, so that we can
56
+ # find those directories.
57
+ import _pypy_irc_topic # pylint: disable=import-error
58
+ import _structseq # pylint: disable=import-error
53
59
 
54
- try:
55
- import _pypy_irc_topic
56
- modules_we_happen_to_have.append(_pypy_irc_topic)
57
- except ImportError:
58
- pass
60
+ modules_we_happen_to_have.extend([_structseq, _pypy_irc_topic])
59
61
 
60
62
 
61
63
  os = isolate_module(os)
62
64
 
65
+
63
66
  def canonical_path(morf: TMorf, directory: bool = False) -> str:
64
67
  """Return the canonical path of the module or file `morf`.
65
68
 
@@ -74,7 +77,7 @@ def canonical_path(morf: TMorf, directory: bool = False) -> str:
74
77
  return morf_path
75
78
 
76
79
 
77
- def name_for_module(filename: str, frame: FrameType | None) -> str:
80
+ def name_for_module(filename: str, frame: FrameType | None) -> str | None:
78
81
  """Get the name of the module for a filename and frame.
79
82
 
80
83
  For configurability's sake, we allow __main__ modules to be matched by
@@ -87,7 +90,7 @@ def name_for_module(filename: str, frame: FrameType | None) -> str:
87
90
 
88
91
  """
89
92
  module_globals = frame.f_globals if frame is not None else {}
90
- dunder_name: str = module_globals.get("__name__", None)
93
+ dunder_name: str | None = module_globals.get("__name__", None)
91
94
 
92
95
  if isinstance(dunder_name, str) and dunder_name != "__main__":
93
96
  # This is the usual case: an imported module.
@@ -191,14 +194,23 @@ class InOrOut:
191
194
  self.debug = debug
192
195
  self.include_namespace_packages = include_namespace_packages
193
196
 
194
- self.source: list[str] = []
195
197
  self.source_pkgs: list[str] = []
196
198
  self.source_pkgs.extend(config.source_pkgs)
199
+ self.source_dirs: list[str] = []
200
+ self.source_dirs.extend(config.source_dirs)
197
201
  for src in config.source or []:
198
202
  if os.path.isdir(src):
199
- self.source.append(canonical_filename(src))
203
+ self.source_dirs.append(src)
200
204
  else:
201
205
  self.source_pkgs.append(src)
206
+
207
+ # Canonicalize everything in `source_dirs`.
208
+ # Also confirm that they actually are directories.
209
+ for i, src in enumerate(self.source_dirs):
210
+ if not os.path.isdir(src):
211
+ raise ConfigError(f"Source dir is not a directory: {src!r}")
212
+ self.source_dirs[i] = canonical_filename(src)
213
+
202
214
  self.source_pkgs_unmatched = self.source_pkgs[:]
203
215
 
204
216
  self.include = prep_patterns(config.run_include)
@@ -233,10 +245,10 @@ class InOrOut:
233
245
  self.pylib_match = None
234
246
  self.include_match = self.omit_match = None
235
247
 
236
- if self.source or self.source_pkgs:
248
+ if self.source_dirs or self.source_pkgs:
237
249
  against = []
238
- if self.source:
239
- self.source_match = TreeMatcher(self.source, "source")
250
+ if self.source_dirs:
251
+ self.source_match = TreeMatcher(self.source_dirs, "source")
240
252
  against.append(f"trees {self.source_match!r}")
241
253
  if self.source_pkgs:
242
254
  self.source_pkgs_match = ModuleMatcher(self.source_pkgs, "source_pkgs")
@@ -285,7 +297,7 @@ class InOrOut:
285
297
  )
286
298
  self.source_in_third_paths.add(pathdir)
287
299
 
288
- for src in self.source:
300
+ for src in self.source_dirs:
289
301
  if self.third_match.match(src):
290
302
  _debug(f"Source in third-party: source directory {src!r}")
291
303
  self.source_in_third_paths.add(src)
@@ -410,7 +422,7 @@ class InOrOut:
410
422
  extra = ""
411
423
  ok = False
412
424
  if self.source_pkgs_match:
413
- if self.source_pkgs_match.match(modulename):
425
+ if isinstance(modulename, str) and self.source_pkgs_match.match(modulename):
414
426
  ok = True
415
427
  if modulename in self.source_pkgs_unmatched:
416
428
  self.source_pkgs_unmatched.remove(modulename)
@@ -457,12 +469,12 @@ class InOrOut:
457
469
  def warn_conflicting_settings(self) -> None:
458
470
  """Warn if there are settings that conflict."""
459
471
  if self.include:
460
- if self.source or self.source_pkgs:
472
+ if self.source_dirs or self.source_pkgs:
461
473
  self.warn("--include is ignored because --source is set", slug="include-ignored")
462
474
 
463
475
  def warn_already_imported_files(self) -> None:
464
476
  """Warn if files have already been imported that we will be measuring."""
465
- if self.include or self.source or self.source_pkgs:
477
+ if self.include or self.source_dirs or self.source_pkgs:
466
478
  warned = set()
467
479
  for mod in list(sys.modules.values()):
468
480
  filename = getattr(mod, "__file__", None)
@@ -488,7 +500,8 @@ class InOrOut:
488
500
  elif self.debug and self.debug.should("trace"):
489
501
  self.debug.write(
490
502
  "Didn't trace already imported file {!r}: {}".format(
491
- disp.original_filename, disp.reason,
503
+ disp.original_filename,
504
+ disp.reason,
492
505
  ),
493
506
  )
494
507
 
@@ -529,13 +542,12 @@ class InOrOut:
529
542
  Yields pairs: file path, and responsible plug-in name.
530
543
  """
531
544
  for pkg in self.source_pkgs:
532
- if (pkg not in sys.modules or
533
- not module_has_file(sys.modules[pkg])):
545
+ if pkg not in sys.modules or not module_has_file(sys.modules[pkg]):
534
546
  continue
535
547
  pkg_file = source_for_file(cast(str, sys.modules[pkg].__file__))
536
548
  yield from self._find_executable_files(canonical_path(pkg_file))
537
549
 
538
- for src in self.source:
550
+ for src in self.source_dirs:
539
551
  yield from self._find_executable_files(src)
540
552
 
541
553
  def _find_plugin_files(self, src_dir: str) -> Iterable[tuple[str, str]]:
@@ -555,8 +567,8 @@ class InOrOut:
555
567
 
556
568
  """
557
569
  py_files = (
558
- (py_file, None) for py_file in
559
- find_python_files(src_dir, self.include_namespace_packages)
570
+ (py_file, None)
571
+ for py_file in find_python_files(src_dir, self.include_namespace_packages)
560
572
  )
561
573
  plugin_files = self._find_plugin_files(src_dir)
562
574
 
@@ -581,9 +593,14 @@ class InOrOut:
581
593
  ]
582
594
 
583
595
  matcher_names = [
584
- "source_match", "source_pkgs_match",
585
- "include_match", "omit_match",
586
- "cover_match", "pylib_match", "third_match", "source_in_third_match",
596
+ "source_match",
597
+ "source_pkgs_match",
598
+ "include_match",
599
+ "omit_match",
600
+ "cover_match",
601
+ "pylib_match",
602
+ "third_match",
603
+ "source_in_third_match",
587
604
  ]
588
605
 
589
606
  for matcher_name in matcher_names:
coverage/jsonreport.py CHANGED
@@ -8,14 +8,13 @@ from __future__ import annotations
8
8
  import datetime
9
9
  import json
10
10
  import sys
11
-
12
11
  from collections.abc import Iterable
13
- from typing import Any, IO, TYPE_CHECKING
12
+ from typing import IO, TYPE_CHECKING, Any
14
13
 
15
14
  from coverage import __version__
16
15
  from coverage.report_core import get_analysis_to_report
17
- from coverage.results import Analysis, Numbers
18
- from coverage.types import TMorf, TLineNo
16
+ from coverage.results import Analysis, AnalysisNarrower, Numbers
17
+ from coverage.types import TLineNo, TMorf
19
18
 
20
19
  if TYPE_CHECKING:
21
20
  from coverage import Coverage
@@ -31,6 +30,7 @@ JsonObj = dict[str, Any]
31
30
  # 3: add region information (functions, classes)
32
31
  FORMAT_VERSION = 3
33
32
 
33
+
34
34
  class JsonReporter:
35
35
  """A reporter for writing JSON coverage results."""
36
36
 
@@ -128,21 +128,30 @@ class JsonReporter:
128
128
  )
129
129
 
130
130
  num_lines = len(file_reporter.source().splitlines())
131
+ regions = file_reporter.code_regions()
131
132
  for noun, plural in file_reporter.code_region_kinds():
132
- reported_file[plural] = region_data = {}
133
133
  outside_lines = set(range(1, num_lines + 1))
134
- for region in file_reporter.code_regions():
134
+ for region in regions:
135
135
  if region.kind != noun:
136
136
  continue
137
137
  outside_lines -= region.lines
138
+
139
+ narrower = AnalysisNarrower(analysis)
140
+ narrower.add_regions(r.lines for r in regions if r.kind == noun)
141
+ narrower.add_regions([outside_lines])
142
+
143
+ reported_file[plural] = region_data = {}
144
+ for region in regions:
145
+ if region.kind != noun:
146
+ continue
138
147
  region_data[region.name] = self.make_region_data(
139
148
  coverage_data,
140
- analysis.narrow(region.lines),
149
+ narrower.narrow(region.lines),
141
150
  )
142
151
 
143
152
  region_data[""] = self.make_region_data(
144
153
  coverage_data,
145
- analysis.narrow(outside_lines),
154
+ narrower.narrow(outside_lines),
146
155
  )
147
156
  return reported_file
148
157
 
coverage/lcovreport.py CHANGED
@@ -8,13 +8,12 @@ from __future__ import annotations
8
8
  import base64
9
9
  import hashlib
10
10
  import sys
11
-
12
- from typing import IO, TYPE_CHECKING
13
11
  from collections.abc import Iterable
12
+ from typing import IO, TYPE_CHECKING
14
13
 
15
14
  from coverage.plugin import FileReporter
16
15
  from coverage.report_core import get_analysis_to_report
17
- from coverage.results import Analysis, Numbers
16
+ from coverage.results import Analysis, AnalysisNarrower, Numbers
18
17
  from coverage.types import TMorf
19
18
 
20
19
  if TYPE_CHECKING:
@@ -43,7 +42,7 @@ def lcov_lines(
43
42
  hash_suffix = ""
44
43
  for line in lines:
45
44
  if source_lines:
46
- hash_suffix = "," + line_hash(source_lines[line-1])
45
+ hash_suffix = "," + line_hash(source_lines[line - 1])
47
46
  # Q: can we get info about the number of times a statement is
48
47
  # executed? If so, that should be recorded here.
49
48
  hit = int(line not in analysis.missing)
@@ -71,21 +70,26 @@ def lcov_functions(
71
70
 
72
71
  # suppressions because of https://github.com/pylint-dev/pylint/issues/9923
73
72
  functions = [
74
- (min(region.start, min(region.lines)), #pylint: disable=nested-min-max
75
- max(region.start, max(region.lines)), #pylint: disable=nested-min-max
76
- region)
73
+ (
74
+ min(region.start, min(region.lines)), # pylint: disable=nested-min-max
75
+ max(region.start, max(region.lines)), # pylint: disable=nested-min-max
76
+ region,
77
+ )
77
78
  for region in fr.code_regions()
78
79
  if region.kind == "function" and region.lines
79
80
  ]
80
81
  if not functions:
81
82
  return
82
83
 
84
+ narrower = AnalysisNarrower(file_analysis)
85
+ narrower.add_regions(r.lines for _, _, r in functions)
86
+
83
87
  functions.sort()
84
88
  functions_hit = 0
85
89
  for first_line, last_line, region in functions:
86
90
  # A function counts as having been executed if any of it has been
87
91
  # executed.
88
- analysis = file_analysis.narrow(region.lines)
92
+ analysis = narrower.narrow(region.lines)
89
93
  hit = int(analysis.numbers.n_executed > 0)
90
94
  functions_hit += hit
91
95
 
@@ -118,23 +122,15 @@ def lcov_arcs(
118
122
 
119
123
  if taken == 0:
120
124
  # When _none_ of the out arcs from 'line' were executed,
121
- # this probably means 'line' was never executed at all.
122
- # Cross-check with the line stats.
125
+ # it can mean the line always raised an exception.
123
126
  assert len(executed_arcs[line]) == 0
124
- assert line in analysis.missing
125
- destinations = [
126
- (dst, "-") for dst in missing_arcs[line]
127
- ]
127
+ destinations = [(dst, "-") for dst in missing_arcs[line]]
128
128
  else:
129
129
  # Q: can we get counts of the number of times each arc was executed?
130
130
  # branch_stats has "total" and "taken" counts for each branch,
131
131
  # but it doesn't have "taken" broken down by destination.
132
- destinations = [
133
- (dst, "1") for dst in executed_arcs[line]
134
- ]
135
- destinations.extend(
136
- (dst, "0") for dst in missing_arcs[line]
137
- )
132
+ destinations = [(dst, "1") for dst in executed_arcs[line]]
133
+ destinations.extend((dst, "0") for dst in missing_arcs[line])
138
134
 
139
135
  # Sort exit arcs after normal arcs. Exit arcs typically come from
140
136
  # an if statement, at the end of a function, with no else clause.
coverage/misc.py CHANGED
@@ -18,20 +18,16 @@ import os.path
18
18
  import re
19
19
  import sys
20
20
  import types
21
-
22
- from types import ModuleType
23
- from typing import (
24
- Any, NoReturn, TypeVar,
25
- )
26
21
  from collections.abc import Iterable, Iterator, Mapping, Sequence
27
-
28
- from coverage.exceptions import CoverageException
29
- from coverage.types import TArc
22
+ from types import ModuleType
23
+ from typing import Any, NoReturn, TypeVar
30
24
 
31
25
  # In 6.0, the exceptions moved from misc.py to exceptions.py. But a number of
32
26
  # other packages were importing the exceptions from misc, so import them here.
33
27
  # pylint: disable=unused-wildcard-import
34
- from coverage.exceptions import * # pylint: disable=wildcard-import
28
+ from coverage.exceptions import * # pylint: disable=wildcard-import
29
+ from coverage.exceptions import CoverageException
30
+ from coverage.types import TArc
35
31
 
36
32
  ISOLATED_MODULES: dict[ModuleType, ModuleType] = {}
37
33
 
@@ -54,11 +50,13 @@ def isolate_module(mod: ModuleType) -> ModuleType:
54
50
  setattr(new_mod, name, value)
55
51
  return ISOLATED_MODULES[mod]
56
52
 
53
+
57
54
  os = isolate_module(os)
58
55
 
59
56
 
60
57
  class SysModuleSaver:
61
58
  """Saves the contents of sys.modules, and removes new modules later."""
59
+
62
60
  def __init__(self) -> None:
63
61
  self.old_modules = set(sys.modules)
64
62
 
@@ -111,9 +109,9 @@ def nice_pair(pair: TArc) -> str:
111
109
  """
112
110
  start, end = pair
113
111
  if start == end:
114
- return "%d" % start
112
+ return f"{start}"
115
113
  else:
116
- return "%d-%d" % (start, end)
114
+ return f"{start}-{end}"
117
115
 
118
116
 
119
117
  def bool_or_none(b: Any) -> bool | None:
@@ -158,37 +156,39 @@ def ensure_dir_for_file(path: str) -> None:
158
156
 
159
157
  class Hasher:
160
158
  """Hashes Python data for fingerprinting."""
159
+
161
160
  def __init__(self) -> None:
162
161
  self.hash = hashlib.new("sha3_256", usedforsecurity=False)
163
162
 
164
163
  def update(self, v: Any) -> None:
165
164
  """Add `v` to the hash, recursively if needed."""
166
165
  self.hash.update(str(type(v)).encode("utf-8"))
167
- if isinstance(v, str):
168
- self.hash.update(v.encode("utf-8"))
169
- elif isinstance(v, bytes):
170
- self.hash.update(v)
171
- elif v is None:
172
- pass
173
- elif isinstance(v, (int, float)):
174
- self.hash.update(str(v).encode("utf-8"))
175
- elif isinstance(v, (tuple, list)):
176
- for e in v:
177
- self.update(e)
178
- elif isinstance(v, dict):
179
- keys = v.keys()
180
- for k in sorted(keys):
181
- self.update(k)
182
- self.update(v[k])
183
- else:
184
- for k in dir(v):
185
- if k.startswith("__"):
186
- continue
187
- a = getattr(v, k)
188
- if inspect.isroutine(a):
189
- continue
190
- self.update(k)
191
- self.update(a)
166
+ match v:
167
+ case None:
168
+ pass
169
+ case str():
170
+ self.hash.update(v.encode("utf-8"))
171
+ case bytes():
172
+ self.hash.update(v)
173
+ case int() | float():
174
+ self.hash.update(str(v).encode("utf-8"))
175
+ case tuple() | list():
176
+ for e in v:
177
+ self.update(e)
178
+ case dict():
179
+ keys = v.keys()
180
+ for k in sorted(keys):
181
+ self.update(k)
182
+ self.update(v[k])
183
+ case _:
184
+ for k in dir(v):
185
+ if k.startswith("__"):
186
+ continue
187
+ a = getattr(v, k)
188
+ if inspect.isroutine(a):
189
+ continue
190
+ self.update(k)
191
+ self.update(a)
192
192
  self.hash.update(b".")
193
193
 
194
194
  def hexdigest(self) -> str:
@@ -218,6 +218,7 @@ class DefaultValue:
218
218
  and Sphinx output.
219
219
 
220
220
  """
221
+
221
222
  def __init__(self, display_as: str) -> None:
222
223
  self.display_as = display_as
223
224
 
@@ -244,13 +245,13 @@ def substitute_variables(text: str, variables: Mapping[str, str]) -> str:
244
245
  dollar_pattern = r"""(?x) # Use extended regex syntax
245
246
  \$ # A dollar sign,
246
247
  (?: # then
247
- (?P<dollar>\$) | # a dollar sign, or
248
- (?P<word1>\w+) | # a plain word, or
249
- { # a {-wrapped
250
- (?P<word2>\w+) # word,
251
- (?:
252
- (?P<strict>\?) | # with a strict marker
253
- -(?P<defval>[^}]*) # or a default value
248
+ (?P<dollar> \$ ) | # a dollar sign, or
249
+ (?P<word1> \w+ ) | # a plain word, or
250
+ \{ # a {-wrapped
251
+ (?P<word2> \w+ ) # word,
252
+ (?: # either
253
+ (?P<strict> \? ) | # with a strict marker
254
+ -(?P<defval> [^}]* ) # or a default value
254
255
  )? # maybe.
255
256
  }
256
257
  )
@@ -261,7 +262,7 @@ def substitute_variables(text: str, variables: Mapping[str, str]) -> str:
261
262
  def dollar_replace(match: re.Match[str]) -> str:
262
263
  """Called for each $replacement."""
263
264
  # Only one of the dollar_groups will have matched, just get its text.
264
- word = next(g for g in match.group(*dollar_groups) if g) # pragma: always breaks
265
+ word = next(g for g in match.group(*dollar_groups) if g) # pragma: always breaks
265
266
  if word == "$":
266
267
  return "$"
267
268
  elif word in variables:
@@ -277,8 +278,7 @@ def substitute_variables(text: str, variables: Mapping[str, str]) -> str:
277
278
 
278
279
 
279
280
  def format_local_datetime(dt: datetime.datetime) -> str:
280
- """Return a string with local timezone representing the date.
281
- """
281
+ """Return a string with local timezone representing the date."""
282
282
  return dt.astimezone().strftime("%Y-%m-%d %H:%M %z")
283
283
 
284
284
 
@@ -311,6 +311,7 @@ def _human_key(s: str) -> tuple[list[str | int], str]:
311
311
  The original string is appended as a last value to ensure the
312
312
  key is unique enough so that "x1y" and "x001y" can be distinguished.
313
313
  """
314
+
314
315
  def tryint(s: str) -> str | int:
315
316
  """If `s` is a number, return an int, else `s` unchanged."""
316
317
  try:
@@ -320,6 +321,7 @@ def _human_key(s: str) -> tuple[list[str | int], str]:
320
321
 
321
322
  return ([tryint(c) for c in re.split(r"(\d+)", s)], s)
322
323
 
324
+
323
325
  def human_sorted(strings: Iterable[str]) -> list[str]:
324
326
  """Sort the given iterable of strings the way that humans expect.
325
327
 
@@ -330,8 +332,10 @@ def human_sorted(strings: Iterable[str]) -> list[str]:
330
332
  """
331
333
  return sorted(strings, key=_human_key)
332
334
 
335
+
333
336
  SortableItem = TypeVar("SortableItem", bound=Sequence[Any])
334
337
 
338
+
335
339
  def human_sorted_items(
336
340
  items: Iterable[SortableItem],
337
341
  reverse: bool = False,
coverage/multiproc.py CHANGED
@@ -11,7 +11,6 @@ import os
11
11
  import os.path
12
12
  import sys
13
13
  import traceback
14
-
15
14
  from typing import Any
16
15
 
17
16
  from coverage.debug import DebugControl
@@ -22,16 +21,18 @@ PATCHED_MARKER = "_coverage$patched"
22
21
 
23
22
 
24
23
  OriginalProcess = multiprocessing.process.BaseProcess
25
- original_bootstrap = OriginalProcess._bootstrap # type: ignore[attr-defined]
24
+ original_bootstrap = OriginalProcess._bootstrap # type: ignore[attr-defined]
25
+
26
26
 
27
- class ProcessWithCoverage(OriginalProcess): # pylint: disable=abstract-method
27
+ class ProcessWithCoverage(OriginalProcess): # pylint: disable=abstract-method
28
28
  """A replacement for multiprocess.Process that starts coverage."""
29
29
 
30
- def _bootstrap(self, *args, **kwargs): # type: ignore[no-untyped-def]
30
+ def _bootstrap(self, *args, **kwargs): # type: ignore[no-untyped-def]
31
31
  """Wrapper around _bootstrap to start coverage."""
32
32
  debug: DebugControl | None = None
33
33
  try:
34
- from coverage import Coverage # avoid circular import
34
+ from coverage import Coverage # avoid circular import
35
+
35
36
  cov = Coverage(data_suffix=True, auto_data=True)
36
37
  cov._warn_preimported_source = False
37
38
  cov.start()
@@ -61,8 +62,10 @@ class ProcessWithCoverage(OriginalProcess): # pylint: disable=abstract-m
61
62
  if debug:
62
63
  debug.write("Saved multiprocessing data")
63
64
 
65
+
64
66
  class Stowaway:
65
67
  """An object to pickle, so when it is unpickled, it can apply the monkey-patch."""
68
+
66
69
  def __init__(self, rcfile: str) -> None:
67
70
  self.rcfile = rcfile
68
71
 
@@ -86,7 +89,7 @@ def patch_multiprocessing(rcfile: str) -> None:
86
89
  if hasattr(multiprocessing, PATCHED_MARKER):
87
90
  return
88
91
 
89
- OriginalProcess._bootstrap = ProcessWithCoverage._bootstrap # type: ignore[attr-defined]
92
+ OriginalProcess._bootstrap = ProcessWithCoverage._bootstrap # type: ignore[attr-defined]
90
93
 
91
94
  # Set the value in ProcessWithCoverage that will be pickled into the child
92
95
  # process.
@@ -94,16 +97,18 @@ def patch_multiprocessing(rcfile: str) -> None:
94
97
 
95
98
  # When spawning processes rather than forking them, we have no state in the
96
99
  # new process. We sneak in there with a Stowaway: we stuff one of our own
97
- # objects into the data that gets pickled and sent to the sub-process. When
100
+ # objects into the data that gets pickled and sent to the subprocess. When
98
101
  # the Stowaway is unpickled, its __setstate__ method is called, which
99
102
  # re-applies the monkey-patch.
100
103
  # Windows only spawns, so this is needed to keep Windows working.
101
104
  try:
102
105
  from multiprocessing import spawn
106
+
103
107
  original_get_preparation_data = spawn.get_preparation_data
104
108
  except (ImportError, AttributeError):
105
109
  pass
106
110
  else:
111
+
107
112
  def get_preparation_data_with_stowaway(name: str) -> dict[str, Any]:
108
113
  """Get the original preparation data, and also insert our stowaway."""
109
114
  d = original_get_preparation_data(name)
coverage/numbits.py CHANGED
@@ -18,9 +18,8 @@ from __future__ import annotations
18
18
 
19
19
  import json
20
20
  import sqlite3
21
-
22
- from itertools import zip_longest
23
21
  from collections.abc import Iterable
22
+ from itertools import zip_longest
24
23
 
25
24
 
26
25
  def nums_to_numbits(nums: Iterable[int]) -> bytes:
@@ -39,7 +38,7 @@ def nums_to_numbits(nums: Iterable[int]) -> bytes:
39
38
  return b""
40
39
  b = bytearray(nbytes)
41
40
  for num in nums:
42
- b[num//8] |= 1 << num % 8
41
+ b[num // 8] |= 1 << num % 8
43
42
  return bytes(b)
44
43
 
45
44
 
@@ -59,7 +58,7 @@ def numbits_to_nums(numbits: bytes) -> list[int]:
59
58
  nums = []
60
59
  for byte_i, byte in enumerate(numbits):
61
60
  for bit_i in range(8):
62
- if (byte & (1 << bit_i)):
61
+ if byte & (1 << bit_i):
63
62
  nums.append(byte_i * 8 + bit_i)
64
63
  return nums
65
64