coverage 7.6.7__cp311-cp311-win_amd64.whl → 7.11.1__cp311-cp311-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- coverage/__init__.py +2 -0
- coverage/__main__.py +2 -0
- coverage/annotate.py +1 -2
- coverage/bytecode.py +177 -3
- coverage/cmdline.py +329 -154
- coverage/collector.py +31 -42
- coverage/config.py +166 -62
- coverage/context.py +4 -5
- coverage/control.py +164 -85
- coverage/core.py +70 -33
- coverage/data.py +3 -4
- coverage/debug.py +112 -56
- coverage/disposition.py +1 -0
- coverage/env.py +65 -55
- coverage/exceptions.py +35 -7
- coverage/execfile.py +18 -13
- coverage/files.py +23 -18
- coverage/html.py +134 -88
- coverage/htmlfiles/style.css +42 -2
- coverage/htmlfiles/style.scss +65 -1
- coverage/inorout.py +61 -44
- coverage/jsonreport.py +17 -8
- coverage/lcovreport.py +16 -20
- coverage/misc.py +50 -46
- coverage/multiproc.py +12 -7
- coverage/numbits.py +3 -4
- coverage/parser.py +193 -269
- coverage/patch.py +166 -0
- coverage/phystokens.py +24 -25
- coverage/plugin.py +13 -13
- coverage/plugin_support.py +36 -35
- coverage/python.py +9 -13
- coverage/pytracer.py +40 -33
- coverage/regions.py +2 -1
- coverage/report.py +59 -43
- coverage/report_core.py +6 -9
- coverage/results.py +118 -66
- coverage/sqldata.py +260 -210
- coverage/sqlitedb.py +33 -25
- coverage/sysmon.py +195 -157
- coverage/templite.py +6 -6
- coverage/tomlconfig.py +12 -12
- coverage/tracer.cp311-win_amd64.pyd +0 -0
- coverage/tracer.pyi +2 -0
- coverage/types.py +25 -22
- coverage/version.py +3 -18
- coverage/xmlreport.py +16 -13
- {coverage-7.6.7.dist-info → coverage-7.11.1.dist-info}/METADATA +40 -18
- coverage-7.11.1.dist-info/RECORD +59 -0
- {coverage-7.6.7.dist-info → coverage-7.11.1.dist-info}/WHEEL +1 -1
- coverage-7.6.7.dist-info/RECORD +0 -58
- {coverage-7.6.7.dist-info → coverage-7.11.1.dist-info}/entry_points.txt +0 -0
- {coverage-7.6.7.dist-info → coverage-7.11.1.dist-info/licenses}/LICENSE.txt +0 -0
- {coverage-7.6.7.dist-info → coverage-7.11.1.dist-info}/top_level.txt +0 -0
coverage/parser.py
CHANGED
|
@@ -6,18 +6,16 @@
|
|
|
6
6
|
from __future__ import annotations
|
|
7
7
|
|
|
8
8
|
import ast
|
|
9
|
-
import functools
|
|
10
9
|
import collections
|
|
10
|
+
import functools
|
|
11
11
|
import os
|
|
12
12
|
import re
|
|
13
|
-
import sys
|
|
14
13
|
import token
|
|
15
14
|
import tokenize
|
|
16
|
-
|
|
17
15
|
from collections.abc import Iterable, Sequence
|
|
18
16
|
from dataclasses import dataclass
|
|
19
17
|
from types import CodeType
|
|
20
|
-
from typing import
|
|
18
|
+
from typing import Callable, Optional, Protocol, cast
|
|
21
19
|
|
|
22
20
|
from coverage import env
|
|
23
21
|
from coverage.bytecode import code_objects
|
|
@@ -37,6 +35,7 @@ class PythonParser:
|
|
|
37
35
|
involved.
|
|
38
36
|
|
|
39
37
|
"""
|
|
38
|
+
|
|
40
39
|
def __init__(
|
|
41
40
|
self,
|
|
42
41
|
text: str | None = None,
|
|
@@ -55,6 +54,7 @@ class PythonParser:
|
|
|
55
54
|
self.text: str = text
|
|
56
55
|
else:
|
|
57
56
|
from coverage.python import get_python_source
|
|
57
|
+
|
|
58
58
|
try:
|
|
59
59
|
self.text = get_python_source(self.filename)
|
|
60
60
|
except OSError as err:
|
|
@@ -92,7 +92,7 @@ class PythonParser:
|
|
|
92
92
|
|
|
93
93
|
# A dict mapping line numbers to lexical statement starts for
|
|
94
94
|
# multi-line statements.
|
|
95
|
-
self.
|
|
95
|
+
self.multiline_map: dict[TLineNo, TLineNo] = {}
|
|
96
96
|
|
|
97
97
|
# Lazily-created arc data, and missing arc descriptions.
|
|
98
98
|
self._all_arcs: set[TArc] | None = None
|
|
@@ -113,9 +113,11 @@ class PythonParser:
|
|
|
113
113
|
last_start_line = 0
|
|
114
114
|
for match in re.finditer(regex, self.text, flags=re.MULTILINE):
|
|
115
115
|
start, end = match.span()
|
|
116
|
-
start_line = last_start_line + self.text.count(
|
|
117
|
-
end_line = last_start_line + self.text.count(
|
|
118
|
-
matches.update(
|
|
116
|
+
start_line = last_start_line + self.text.count("\n", last_start, start)
|
|
117
|
+
end_line = last_start_line + self.text.count("\n", last_start, end)
|
|
118
|
+
matches.update(
|
|
119
|
+
self.multiline_map.get(i, i) for i in range(start_line + 1, end_line + 2)
|
|
120
|
+
)
|
|
119
121
|
last_start = start
|
|
120
122
|
last_start_line = start_line
|
|
121
123
|
return matches
|
|
@@ -147,20 +149,23 @@ class PythonParser:
|
|
|
147
149
|
assert self.text is not None
|
|
148
150
|
tokgen = generate_tokens(self.text)
|
|
149
151
|
for toktype, ttext, (slineno, _), (elineno, _), ltext in tokgen:
|
|
150
|
-
if self.show_tokens:
|
|
151
|
-
print(
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
152
|
+
if self.show_tokens: # pragma: debugging
|
|
153
|
+
print(
|
|
154
|
+
"%10s %5s %-20r %r"
|
|
155
|
+
% (
|
|
156
|
+
tokenize.tok_name.get(toktype, toktype),
|
|
157
|
+
nice_pair((slineno, elineno)),
|
|
158
|
+
ttext,
|
|
159
|
+
ltext,
|
|
160
|
+
)
|
|
161
|
+
)
|
|
155
162
|
if toktype == token.INDENT:
|
|
156
163
|
indent += 1
|
|
157
164
|
elif toktype == token.DEDENT:
|
|
158
165
|
indent -= 1
|
|
159
166
|
elif toktype == token.OP:
|
|
160
167
|
if ttext == ":" and nesting == 0:
|
|
161
|
-
should_exclude = (
|
|
162
|
-
self.excluded.intersection(range(first_line, elineno + 1))
|
|
163
|
-
)
|
|
168
|
+
should_exclude = self.excluded.intersection(range(first_line, elineno + 1))
|
|
164
169
|
if not excluding and should_exclude:
|
|
165
170
|
# Start excluding a suite. We trigger off of the colon
|
|
166
171
|
# token so that the #pragma comment will be recognized on
|
|
@@ -177,8 +182,8 @@ class PythonParser:
|
|
|
177
182
|
# We're at the end of a line, and we've ended on a
|
|
178
183
|
# different line than the first line of the statement,
|
|
179
184
|
# so record a multi-line range.
|
|
180
|
-
for l in range(first_line, elineno+1):
|
|
181
|
-
self.
|
|
185
|
+
for l in range(first_line, elineno + 1):
|
|
186
|
+
self.multiline_map[l] = first_line
|
|
182
187
|
first_line = 0
|
|
183
188
|
|
|
184
189
|
if ttext.strip() and toktype != tokenize.COMMENT:
|
|
@@ -198,12 +203,6 @@ class PythonParser:
|
|
|
198
203
|
byte_parser = ByteParser(self.text, filename=self.filename)
|
|
199
204
|
self.raw_statements.update(byte_parser._find_statements())
|
|
200
205
|
|
|
201
|
-
# The first line of modules can lie and say 1 always, even if the first
|
|
202
|
-
# line of code is later. If so, map 1 to the actual first line of the
|
|
203
|
-
# module.
|
|
204
|
-
if env.PYBEHAVIOR.module_firstline_1 and self._multiline:
|
|
205
|
-
self._multiline[1] = min(self.raw_statements)
|
|
206
|
-
|
|
207
206
|
self.excluded = self.first_lines(self.excluded)
|
|
208
207
|
|
|
209
208
|
# AST lets us find classes, docstrings, and decorator-affected
|
|
@@ -233,9 +232,9 @@ class PythonParser:
|
|
|
233
232
|
def first_line(self, lineno: TLineNo) -> TLineNo:
|
|
234
233
|
"""Return the first line number of the statement including `lineno`."""
|
|
235
234
|
if lineno < 0:
|
|
236
|
-
lineno = -self.
|
|
235
|
+
lineno = -self.multiline_map.get(-lineno, -lineno)
|
|
237
236
|
else:
|
|
238
|
-
lineno = self.
|
|
237
|
+
lineno = self.multiline_map.get(lineno, lineno)
|
|
239
238
|
return lineno
|
|
240
239
|
|
|
241
240
|
def first_lines(self, linenos: Iterable[TLineNo]) -> set[TLineNo]:
|
|
@@ -267,12 +266,12 @@ class PythonParser:
|
|
|
267
266
|
self._raw_parse()
|
|
268
267
|
except (tokenize.TokenError, IndentationError, SyntaxError) as err:
|
|
269
268
|
if hasattr(err, "lineno"):
|
|
270
|
-
lineno = err.lineno
|
|
269
|
+
lineno = err.lineno # IndentationError
|
|
271
270
|
else:
|
|
272
|
-
lineno = err.args[1][0]
|
|
271
|
+
lineno = err.args[1][0] # TokenError
|
|
273
272
|
raise NotPython(
|
|
274
|
-
f"Couldn't parse '{self.filename}' as Python source: "
|
|
275
|
-
f"{err.args[0]!r} at line {lineno}",
|
|
273
|
+
f"Couldn't parse '{self.filename}' as Python source: "
|
|
274
|
+
+ f"{err.args[0]!r} at line {lineno}",
|
|
276
275
|
) from err
|
|
277
276
|
|
|
278
277
|
ignore = self.excluded | self.raw_docstrings
|
|
@@ -298,12 +297,15 @@ class PythonParser:
|
|
|
298
297
|
|
|
299
298
|
"""
|
|
300
299
|
assert self._ast_root is not None
|
|
301
|
-
aaa = AstArcAnalyzer(self.filename, self._ast_root, self.raw_statements, self.
|
|
300
|
+
aaa = AstArcAnalyzer(self.filename, self._ast_root, self.raw_statements, self.multiline_map)
|
|
302
301
|
aaa.analyze()
|
|
302
|
+
arcs = aaa.arcs
|
|
303
303
|
self._with_jump_fixers = aaa.with_jump_fixers()
|
|
304
|
+
if self._with_jump_fixers:
|
|
305
|
+
arcs = self.fix_with_jumps(arcs)
|
|
304
306
|
|
|
305
307
|
self._all_arcs = set()
|
|
306
|
-
for l1, l2 in
|
|
308
|
+
for l1, l2 in arcs:
|
|
307
309
|
fl1 = self.first_line(l1)
|
|
308
310
|
fl2 = self.first_line(l2)
|
|
309
311
|
if fl1 != fl2:
|
|
@@ -312,20 +314,41 @@ class PythonParser:
|
|
|
312
314
|
self._missing_arc_fragments = aaa.missing_arc_fragments
|
|
313
315
|
|
|
314
316
|
def fix_with_jumps(self, arcs: Iterable[TArc]) -> set[TArc]:
|
|
315
|
-
"""Adjust arcs to fix jumps leaving `with` statements.
|
|
317
|
+
"""Adjust arcs to fix jumps leaving `with` statements.
|
|
318
|
+
|
|
319
|
+
Consider this code:
|
|
320
|
+
|
|
321
|
+
with open("/tmp/test", "w") as f1:
|
|
322
|
+
a = 2
|
|
323
|
+
b = 3
|
|
324
|
+
print(4)
|
|
325
|
+
|
|
326
|
+
In 3.10+, we get traces for lines 1, 2, 3, 1, 4. But we want to present
|
|
327
|
+
it to the user as if it had been 1, 2, 3, 4. The arc 3->1 should be
|
|
328
|
+
replaced with 3->4, and 1->4 should be removed.
|
|
329
|
+
|
|
330
|
+
For this code, the fixers dict is {(3, 1): ((1, 4), (3, 4))}. The key
|
|
331
|
+
is the actual measured arc from the end of the with block back to the
|
|
332
|
+
start of the with-statement. The values are start_next (the with
|
|
333
|
+
statement to the next statement after the with), and end_next (the end
|
|
334
|
+
of the with-statement to the next statement after the with).
|
|
335
|
+
|
|
336
|
+
With nested with-statements, we have to trace through a few levels to
|
|
337
|
+
correct a longer chain of arcs.
|
|
338
|
+
|
|
339
|
+
"""
|
|
316
340
|
to_remove = set()
|
|
317
341
|
to_add = set()
|
|
318
342
|
for arc in arcs:
|
|
319
343
|
if arc in self._with_jump_fixers:
|
|
320
|
-
|
|
344
|
+
end0 = arc[0]
|
|
321
345
|
to_remove.add(arc)
|
|
322
|
-
start_next,
|
|
346
|
+
start_next, end_next = self._with_jump_fixers[arc]
|
|
323
347
|
while start_next in self._with_jump_fixers:
|
|
324
348
|
to_remove.add(start_next)
|
|
325
|
-
start_next,
|
|
326
|
-
to_remove.add(
|
|
327
|
-
to_add.add((
|
|
328
|
-
to_remove.add(arc)
|
|
349
|
+
start_next, end_next = self._with_jump_fixers[start_next]
|
|
350
|
+
to_remove.add(end_next)
|
|
351
|
+
to_add.add((end0, end_next[1]))
|
|
329
352
|
to_remove.add(start_next)
|
|
330
353
|
arcs = (set(arcs) | to_add) - to_remove
|
|
331
354
|
return arcs
|
|
@@ -413,39 +436,26 @@ class ByteParser:
|
|
|
413
436
|
|
|
414
437
|
The iteration includes `self` as its first value.
|
|
415
438
|
|
|
439
|
+
We skip code objects named `__annotate__` since they are deferred
|
|
440
|
+
annotations that usually are never run. If there are errors in the
|
|
441
|
+
annotations, they will be caught by type checkers or other tools that
|
|
442
|
+
use annotations.
|
|
443
|
+
|
|
416
444
|
"""
|
|
417
|
-
return (
|
|
445
|
+
return (
|
|
446
|
+
ByteParser(self.text, code=c)
|
|
447
|
+
for c in code_objects(self.code)
|
|
448
|
+
if c.co_name != "__annotate__"
|
|
449
|
+
)
|
|
418
450
|
|
|
419
451
|
def _line_numbers(self) -> Iterable[TLineNo]:
|
|
420
452
|
"""Yield the line numbers possible in this code object.
|
|
421
453
|
|
|
422
|
-
Uses
|
|
423
|
-
line numbers. Produces a sequence: l0, l1, ...
|
|
454
|
+
Uses co_lines() to produce a sequence: l0, l1, ...
|
|
424
455
|
"""
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
if line:
|
|
429
|
-
yield line
|
|
430
|
-
else:
|
|
431
|
-
# Adapted from dis.py in the standard library.
|
|
432
|
-
byte_increments = self.code.co_lnotab[0::2]
|
|
433
|
-
line_increments = self.code.co_lnotab[1::2]
|
|
434
|
-
|
|
435
|
-
last_line_num = None
|
|
436
|
-
line_num = self.code.co_firstlineno
|
|
437
|
-
byte_num = 0
|
|
438
|
-
for byte_incr, line_incr in zip(byte_increments, line_increments):
|
|
439
|
-
if byte_incr:
|
|
440
|
-
if line_num != last_line_num:
|
|
441
|
-
yield line_num
|
|
442
|
-
last_line_num = line_num
|
|
443
|
-
byte_num += byte_incr
|
|
444
|
-
if line_incr >= 0x80:
|
|
445
|
-
line_incr -= 0x100
|
|
446
|
-
line_num += line_incr
|
|
447
|
-
if line_num != last_line_num:
|
|
448
|
-
yield line_num
|
|
456
|
+
for _, _, line in self.code.co_lines():
|
|
457
|
+
if line:
|
|
458
|
+
yield line
|
|
449
459
|
|
|
450
460
|
def _find_statements(self) -> Iterable[TLineNo]:
|
|
451
461
|
"""Find the statements in `self.code`.
|
|
@@ -463,6 +473,7 @@ class ByteParser:
|
|
|
463
473
|
# AST analysis
|
|
464
474
|
#
|
|
465
475
|
|
|
476
|
+
|
|
466
477
|
@dataclass(frozen=True, order=True)
|
|
467
478
|
class ArcStart:
|
|
468
479
|
"""The information needed to start an arc.
|
|
@@ -493,12 +504,14 @@ class ArcStart:
|
|
|
493
504
|
"line 1 didn't jump to line 2 because the condition on line 1 was never true."
|
|
494
505
|
|
|
495
506
|
"""
|
|
507
|
+
|
|
496
508
|
lineno: TLineNo
|
|
497
509
|
cause: str = ""
|
|
498
510
|
|
|
499
511
|
|
|
500
512
|
class TAddArcFn(Protocol):
|
|
501
513
|
"""The type for AstArcAnalyzer.add_arc()."""
|
|
514
|
+
|
|
502
515
|
def __call__(
|
|
503
516
|
self,
|
|
504
517
|
start: TLineNo,
|
|
@@ -521,6 +534,7 @@ class TAddArcFn(Protocol):
|
|
|
521
534
|
|
|
522
535
|
TArcFragments = dict[TArc, list[tuple[Optional[str], Optional[str]]]]
|
|
523
536
|
|
|
537
|
+
|
|
524
538
|
class Block:
|
|
525
539
|
"""
|
|
526
540
|
Blocks need to handle various exiting statements in their own ways.
|
|
@@ -530,6 +544,7 @@ class Block:
|
|
|
530
544
|
exits are handled, or False if the search should continue up the block
|
|
531
545
|
stack.
|
|
532
546
|
"""
|
|
547
|
+
|
|
533
548
|
# pylint: disable=unused-argument
|
|
534
549
|
def process_break_exits(self, exits: set[ArcStart], add_arc: TAddArcFn) -> bool:
|
|
535
550
|
"""Process break exits."""
|
|
@@ -550,6 +565,7 @@ class Block:
|
|
|
550
565
|
|
|
551
566
|
class LoopBlock(Block):
|
|
552
567
|
"""A block on the block stack representing a `for` or `while` loop."""
|
|
568
|
+
|
|
553
569
|
def __init__(self, start: TLineNo) -> None:
|
|
554
570
|
# The line number where the loop starts.
|
|
555
571
|
self.start = start
|
|
@@ -568,6 +584,7 @@ class LoopBlock(Block):
|
|
|
568
584
|
|
|
569
585
|
class FunctionBlock(Block):
|
|
570
586
|
"""A block on the block stack representing a function definition."""
|
|
587
|
+
|
|
571
588
|
def __init__(self, start: TLineNo, name: str) -> None:
|
|
572
589
|
# The line number where the function starts.
|
|
573
590
|
self.start = start
|
|
@@ -577,7 +594,9 @@ class FunctionBlock(Block):
|
|
|
577
594
|
def process_raise_exits(self, exits: set[ArcStart], add_arc: TAddArcFn) -> bool:
|
|
578
595
|
for xit in exits:
|
|
579
596
|
add_arc(
|
|
580
|
-
xit.lineno,
|
|
597
|
+
xit.lineno,
|
|
598
|
+
-self.start,
|
|
599
|
+
xit.cause,
|
|
581
600
|
f"except from function {self.name!r}",
|
|
582
601
|
)
|
|
583
602
|
return True
|
|
@@ -585,7 +604,9 @@ class FunctionBlock(Block):
|
|
|
585
604
|
def process_return_exits(self, exits: set[ArcStart], add_arc: TAddArcFn) -> bool:
|
|
586
605
|
for xit in exits:
|
|
587
606
|
add_arc(
|
|
588
|
-
xit.lineno,
|
|
607
|
+
xit.lineno,
|
|
608
|
+
-self.start,
|
|
609
|
+
xit.cause,
|
|
589
610
|
f"return from function {self.name!r}",
|
|
590
611
|
)
|
|
591
612
|
return True
|
|
@@ -593,6 +614,7 @@ class FunctionBlock(Block):
|
|
|
593
614
|
|
|
594
615
|
class TryBlock(Block):
|
|
595
616
|
"""A block on the block stack representing a `try` block."""
|
|
617
|
+
|
|
596
618
|
def __init__(self, handler_start: TLineNo | None, final_start: TLineNo | None) -> None:
|
|
597
619
|
# The line number of the first "except" handler, if any.
|
|
598
620
|
self.handler_start = handler_start
|
|
@@ -606,18 +628,33 @@ class TryBlock(Block):
|
|
|
606
628
|
return True
|
|
607
629
|
|
|
608
630
|
|
|
609
|
-
|
|
610
|
-
"""A synthetic fictitious node, containing a sequence of nodes.
|
|
631
|
+
# TODO: Shouldn't the cause messages join with "and" instead of "or"?
|
|
611
632
|
|
|
612
|
-
This is used when collapsing optimized if-statements, to represent the
|
|
613
|
-
unconditional execution of one of the clauses.
|
|
614
633
|
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
self.body = body
|
|
618
|
-
self.lineno = body[0].lineno # type: ignore[attr-defined]
|
|
634
|
+
def is_constant_test_expr(node: ast.AST) -> tuple[bool, bool]:
|
|
635
|
+
"""Is this a compile-time constant test expression?
|
|
619
636
|
|
|
620
|
-
|
|
637
|
+
We don't try to mimic all of CPython's optimizations. We just have to
|
|
638
|
+
handle the kinds of constant expressions people might actually use.
|
|
639
|
+
|
|
640
|
+
"""
|
|
641
|
+
match node:
|
|
642
|
+
case ast.Constant():
|
|
643
|
+
return True, bool(node.value)
|
|
644
|
+
case ast.Name():
|
|
645
|
+
if node.id in ["True", "False", "None", "__debug__"]:
|
|
646
|
+
return True, eval(node.id) # pylint: disable=eval-used
|
|
647
|
+
case ast.UnaryOp():
|
|
648
|
+
if isinstance(node.op, ast.Not):
|
|
649
|
+
is_constant, val = is_constant_test_expr(node.operand)
|
|
650
|
+
return is_constant, not val
|
|
651
|
+
case ast.BoolOp():
|
|
652
|
+
rets = [is_constant_test_expr(v) for v in node.values]
|
|
653
|
+
is_constant = all(is_const for is_const, _ in rets)
|
|
654
|
+
if is_constant:
|
|
655
|
+
op = any if isinstance(node.op, ast.Or) else all
|
|
656
|
+
return True, op(v for _, v in rets)
|
|
657
|
+
return False, False
|
|
621
658
|
|
|
622
659
|
|
|
623
660
|
class AstArcAnalyzer:
|
|
@@ -650,7 +687,6 @@ class AstArcAnalyzer:
|
|
|
650
687
|
) -> None:
|
|
651
688
|
self.filename = filename
|
|
652
689
|
self.root_node = root_node
|
|
653
|
-
# TODO: I think this is happening in too many places.
|
|
654
690
|
self.statements = {multiline.get(l, l) for l in statements}
|
|
655
691
|
self.multiline = multiline
|
|
656
692
|
|
|
@@ -658,7 +694,7 @@ class AstArcAnalyzer:
|
|
|
658
694
|
# $set_env.py: COVERAGE_AST_DUMP - Dump the AST nodes when parsing code.
|
|
659
695
|
dump_ast = bool(int(os.getenv("COVERAGE_AST_DUMP", "0")))
|
|
660
696
|
|
|
661
|
-
if dump_ast:
|
|
697
|
+
if dump_ast: # pragma: debugging
|
|
662
698
|
# Dump the AST so that failing tests have helpful output.
|
|
663
699
|
print(f"Statements: {self.statements}")
|
|
664
700
|
print(f"Multiline map: {self.multiline}")
|
|
@@ -684,22 +720,19 @@ class AstArcAnalyzer:
|
|
|
684
720
|
"""Examine the AST tree from `self.root_node` to determine possible arcs."""
|
|
685
721
|
for node in ast.walk(self.root_node):
|
|
686
722
|
node_name = node.__class__.__name__
|
|
687
|
-
code_object_handler = getattr(self, "_code_object__"
|
|
723
|
+
code_object_handler = getattr(self, f"_code_object__{node_name}", None)
|
|
688
724
|
if code_object_handler is not None:
|
|
689
725
|
code_object_handler(node)
|
|
690
726
|
|
|
691
727
|
def with_jump_fixers(self) -> dict[TArc, tuple[TArc, TArc]]:
|
|
692
728
|
"""Get a dict with data for fixing jumps out of with statements.
|
|
693
729
|
|
|
694
|
-
Returns a dict. The keys are arcs leaving a with
|
|
730
|
+
Returns a dict. The keys are arcs leaving a with-statement by jumping
|
|
695
731
|
back to its start. The values are pairs: first, the arc from the start
|
|
696
732
|
to the next statement, then the arc that exits the with without going
|
|
697
733
|
to the start.
|
|
698
734
|
|
|
699
735
|
"""
|
|
700
|
-
if not env.PYBEHAVIOR.exit_through_with:
|
|
701
|
-
return {}
|
|
702
|
-
|
|
703
736
|
fixers = {}
|
|
704
737
|
with_nexts = {
|
|
705
738
|
arc
|
|
@@ -712,9 +745,9 @@ class AstArcAnalyzer:
|
|
|
712
745
|
continue
|
|
713
746
|
assert len(nexts) == 1, f"Expected one arc, got {nexts} with {start = }"
|
|
714
747
|
nxt = nexts.pop()
|
|
715
|
-
|
|
716
|
-
for
|
|
717
|
-
fixers[(
|
|
748
|
+
ends = {arc[0] for arc in self.with_exits if arc[1] == start}
|
|
749
|
+
for end in ends:
|
|
750
|
+
fixers[(end, start)] = ((start, nxt), (end, nxt))
|
|
718
751
|
return fixers
|
|
719
752
|
|
|
720
753
|
# Code object dispatchers: _code_object__*
|
|
@@ -755,7 +788,7 @@ class AstArcAnalyzer:
|
|
|
755
788
|
action_msg: str | None = None,
|
|
756
789
|
) -> None:
|
|
757
790
|
"""Add an arc, including message fragments to use if it is missing."""
|
|
758
|
-
if self.debug:
|
|
791
|
+
if self.debug: # pragma: debugging
|
|
759
792
|
print(f"Adding possible arc: ({start}, {end}): {missing_cause_msg!r}, {action_msg!r}")
|
|
760
793
|
print(short_stack(), end="\n\n")
|
|
761
794
|
self.arcs.add((start, end))
|
|
@@ -778,12 +811,13 @@ class AstArcAnalyzer:
|
|
|
778
811
|
node_name = node.__class__.__name__
|
|
779
812
|
handler = cast(
|
|
780
813
|
Optional[Callable[[ast.AST], TLineNo]],
|
|
781
|
-
getattr(self, "_line__"
|
|
814
|
+
getattr(self, f"_line__{node_name}", None),
|
|
782
815
|
)
|
|
783
816
|
if handler is not None:
|
|
784
|
-
|
|
817
|
+
line = handler(node)
|
|
785
818
|
else:
|
|
786
|
-
|
|
819
|
+
line = node.lineno # type: ignore[attr-defined]
|
|
820
|
+
return self.multiline.get(line, line)
|
|
787
821
|
|
|
788
822
|
# First lines: _line__*
|
|
789
823
|
#
|
|
@@ -823,19 +857,22 @@ class AstArcAnalyzer:
|
|
|
823
857
|
else:
|
|
824
858
|
return node.lineno
|
|
825
859
|
|
|
826
|
-
def _line__Module(self, node: ast.Module) -> TLineNo:
|
|
827
|
-
|
|
828
|
-
return 1
|
|
829
|
-
elif node.body:
|
|
830
|
-
return self.line_for_node(node.body[0])
|
|
831
|
-
else:
|
|
832
|
-
# Empty modules have no line number, they always start at 1.
|
|
833
|
-
return 1
|
|
860
|
+
def _line__Module(self, node: ast.Module) -> TLineNo: # pylint: disable=unused-argument
|
|
861
|
+
return 1
|
|
834
862
|
|
|
835
863
|
# The node types that just flow to the next node with no complications.
|
|
836
864
|
OK_TO_DEFAULT = {
|
|
837
|
-
"AnnAssign",
|
|
838
|
-
"
|
|
865
|
+
"AnnAssign",
|
|
866
|
+
"Assign",
|
|
867
|
+
"Assert",
|
|
868
|
+
"AugAssign",
|
|
869
|
+
"Delete",
|
|
870
|
+
"Expr",
|
|
871
|
+
"Global",
|
|
872
|
+
"Import",
|
|
873
|
+
"ImportFrom",
|
|
874
|
+
"Nonlocal",
|
|
875
|
+
"Pass",
|
|
839
876
|
}
|
|
840
877
|
|
|
841
878
|
def node_exits(self, node: ast.AST) -> set[ArcStart]:
|
|
@@ -858,7 +895,7 @@ class AstArcAnalyzer:
|
|
|
858
895
|
node_name = node.__class__.__name__
|
|
859
896
|
handler = cast(
|
|
860
897
|
Optional[Callable[[ast.AST], set[ArcStart]]],
|
|
861
|
-
getattr(self, "_handle__"
|
|
898
|
+
getattr(self, f"_handle__{node_name}", None),
|
|
862
899
|
)
|
|
863
900
|
if handler is not None:
|
|
864
901
|
arc_starts = handler(node)
|
|
@@ -867,7 +904,7 @@ class AstArcAnalyzer:
|
|
|
867
904
|
# statement), or it's something we overlooked.
|
|
868
905
|
if env.TESTING:
|
|
869
906
|
if node_name not in self.OK_TO_DEFAULT:
|
|
870
|
-
raise RuntimeError(f"*** Unhandled: {node}")
|
|
907
|
+
raise RuntimeError(f"*** Unhandled: {node}") # pragma: only failure
|
|
871
908
|
|
|
872
909
|
# Default for simple statements: one exit from this node.
|
|
873
910
|
arc_starts = {ArcStart(self.line_for_node(node))}
|
|
@@ -905,111 +942,13 @@ class AstArcAnalyzer:
|
|
|
905
942
|
# the next node.
|
|
906
943
|
for body_node in body:
|
|
907
944
|
lineno = self.line_for_node(body_node)
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
maybe_body_node = self.find_non_missing_node(body_node)
|
|
911
|
-
if maybe_body_node is None:
|
|
912
|
-
continue
|
|
913
|
-
body_node = maybe_body_node
|
|
914
|
-
lineno = self.line_for_node(body_node)
|
|
945
|
+
if lineno not in self.statements:
|
|
946
|
+
continue
|
|
915
947
|
for prev_start in prev_starts:
|
|
916
948
|
self.add_arc(prev_start.lineno, lineno, prev_start.cause)
|
|
917
949
|
prev_starts = self.node_exits(body_node)
|
|
918
950
|
return prev_starts
|
|
919
951
|
|
|
920
|
-
def find_non_missing_node(self, node: ast.AST) -> ast.AST | None:
|
|
921
|
-
"""Search `node` looking for a child that has not been optimized away.
|
|
922
|
-
|
|
923
|
-
This might return the node you started with, or it will work recursively
|
|
924
|
-
to find a child node in self.statements.
|
|
925
|
-
|
|
926
|
-
Returns a node, or None if none of the node remains.
|
|
927
|
-
|
|
928
|
-
"""
|
|
929
|
-
# This repeats work just done in process_body, but this duplication
|
|
930
|
-
# means we can avoid a function call in the 99.9999% case of not
|
|
931
|
-
# optimizing away statements.
|
|
932
|
-
lineno = self.line_for_node(node)
|
|
933
|
-
first_line = self.multiline.get(lineno, lineno)
|
|
934
|
-
if first_line in self.statements:
|
|
935
|
-
return node
|
|
936
|
-
|
|
937
|
-
missing_fn = cast(
|
|
938
|
-
Optional[Callable[[ast.AST], Optional[ast.AST]]],
|
|
939
|
-
getattr(self, "_missing__" + node.__class__.__name__, None),
|
|
940
|
-
)
|
|
941
|
-
if missing_fn is not None:
|
|
942
|
-
ret_node = missing_fn(node)
|
|
943
|
-
else:
|
|
944
|
-
ret_node = None
|
|
945
|
-
return ret_node
|
|
946
|
-
|
|
947
|
-
# Missing nodes: _missing__*
|
|
948
|
-
#
|
|
949
|
-
# Entire statements can be optimized away by Python. They will appear in
|
|
950
|
-
# the AST, but not the bytecode. These functions are called (by
|
|
951
|
-
# find_non_missing_node) to find a node to use instead of the missing
|
|
952
|
-
# node. They can return None if the node should truly be gone.
|
|
953
|
-
|
|
954
|
-
def _missing__If(self, node: ast.If) -> ast.AST | None:
|
|
955
|
-
# If the if-node is missing, then one of its children might still be
|
|
956
|
-
# here, but not both. So return the first of the two that isn't missing.
|
|
957
|
-
# Use a NodeList to hold the clauses as a single node.
|
|
958
|
-
non_missing = self.find_non_missing_node(NodeList(node.body))
|
|
959
|
-
if non_missing:
|
|
960
|
-
return non_missing
|
|
961
|
-
if node.orelse:
|
|
962
|
-
return self.find_non_missing_node(NodeList(node.orelse))
|
|
963
|
-
return None
|
|
964
|
-
|
|
965
|
-
def _missing__NodeList(self, node: NodeList) -> ast.AST | None:
|
|
966
|
-
# A NodeList might be a mixture of missing and present nodes. Find the
|
|
967
|
-
# ones that are present.
|
|
968
|
-
non_missing_children = []
|
|
969
|
-
for child in node.body:
|
|
970
|
-
maybe_child = self.find_non_missing_node(child)
|
|
971
|
-
if maybe_child is not None:
|
|
972
|
-
non_missing_children.append(maybe_child)
|
|
973
|
-
|
|
974
|
-
# Return the simplest representation of the present children.
|
|
975
|
-
if not non_missing_children:
|
|
976
|
-
return None
|
|
977
|
-
if len(non_missing_children) == 1:
|
|
978
|
-
return non_missing_children[0]
|
|
979
|
-
return NodeList(non_missing_children)
|
|
980
|
-
|
|
981
|
-
def _missing__While(self, node: ast.While) -> ast.AST | None:
|
|
982
|
-
body_nodes = self.find_non_missing_node(NodeList(node.body))
|
|
983
|
-
if not body_nodes:
|
|
984
|
-
return None
|
|
985
|
-
# Make a synthetic While-true node.
|
|
986
|
-
new_while = ast.While() # type: ignore[call-arg]
|
|
987
|
-
new_while.lineno = body_nodes.lineno # type: ignore[attr-defined]
|
|
988
|
-
new_while.test = ast.Name() # type: ignore[call-arg]
|
|
989
|
-
new_while.test.lineno = body_nodes.lineno # type: ignore[attr-defined]
|
|
990
|
-
new_while.test.id = "True"
|
|
991
|
-
assert hasattr(body_nodes, "body")
|
|
992
|
-
new_while.body = body_nodes.body
|
|
993
|
-
new_while.orelse = []
|
|
994
|
-
return new_while
|
|
995
|
-
|
|
996
|
-
def is_constant_expr(self, node: ast.AST) -> str | None:
|
|
997
|
-
"""Is this a compile-time constant?"""
|
|
998
|
-
node_name = node.__class__.__name__
|
|
999
|
-
if node_name in ["Constant", "NameConstant", "Num"]:
|
|
1000
|
-
return "Num"
|
|
1001
|
-
elif isinstance(node, ast.Name):
|
|
1002
|
-
if node.id in ["True", "False", "None", "__debug__"]:
|
|
1003
|
-
return "Name"
|
|
1004
|
-
return None
|
|
1005
|
-
|
|
1006
|
-
# In the fullness of time, these might be good tests to write:
|
|
1007
|
-
# while EXPR:
|
|
1008
|
-
# while False:
|
|
1009
|
-
# listcomps hidden deep in other expressions
|
|
1010
|
-
# listcomps hidden in lists: x = [[i for i in range(10)]]
|
|
1011
|
-
# nested function definitions
|
|
1012
|
-
|
|
1013
952
|
# Exit processing: process_*_exits
|
|
1014
953
|
#
|
|
1015
954
|
# These functions process the four kinds of jump exits: break, continue,
|
|
@@ -1020,13 +959,13 @@ class AstArcAnalyzer:
|
|
|
1020
959
|
|
|
1021
960
|
def process_break_exits(self, exits: set[ArcStart]) -> None:
|
|
1022
961
|
"""Add arcs due to jumps from `exits` being breaks."""
|
|
1023
|
-
for block in self.nearest_blocks():
|
|
962
|
+
for block in self.nearest_blocks(): # pragma: always breaks
|
|
1024
963
|
if block.process_break_exits(exits, self.add_arc):
|
|
1025
964
|
break
|
|
1026
965
|
|
|
1027
966
|
def process_continue_exits(self, exits: set[ArcStart]) -> None:
|
|
1028
967
|
"""Add arcs due to jumps from `exits` being continues."""
|
|
1029
|
-
for block in self.nearest_blocks():
|
|
968
|
+
for block in self.nearest_blocks(): # pragma: always breaks
|
|
1030
969
|
if block.process_continue_exits(exits, self.add_arc):
|
|
1031
970
|
break
|
|
1032
971
|
|
|
@@ -1038,7 +977,7 @@ class AstArcAnalyzer:
|
|
|
1038
977
|
|
|
1039
978
|
def process_return_exits(self, exits: set[ArcStart]) -> None:
|
|
1040
979
|
"""Add arcs due to jumps from `exits` being returns."""
|
|
1041
|
-
for block in self.nearest_blocks():
|
|
980
|
+
for block in self.nearest_blocks(): # pragma: always breaks
|
|
1042
981
|
if block.process_return_exits(exits, self.add_arc):
|
|
1043
982
|
break
|
|
1044
983
|
|
|
@@ -1068,8 +1007,8 @@ class AstArcAnalyzer:
|
|
|
1068
1007
|
last = None
|
|
1069
1008
|
for dec_node in decs:
|
|
1070
1009
|
dec_start = self.line_for_node(dec_node)
|
|
1071
|
-
if last is not None and dec_start != last:
|
|
1072
|
-
self.add_arc(last, dec_start)
|
|
1010
|
+
if last is not None and dec_start != last:
|
|
1011
|
+
self.add_arc(last, dec_start)
|
|
1073
1012
|
last = dec_start
|
|
1074
1013
|
assert last is not None
|
|
1075
1014
|
self.add_arc(last, main_line)
|
|
@@ -1079,8 +1018,6 @@ class AstArcAnalyzer:
|
|
|
1079
1018
|
# not what we'd think of as the first line in the statement, so map
|
|
1080
1019
|
# it to the first one.
|
|
1081
1020
|
assert node.body, f"Oops: {node.body = } in {self.filename}@{node.lineno}"
|
|
1082
|
-
body_start = self.line_for_node(node.body[0])
|
|
1083
|
-
body_start = self.multiline.get(body_start, body_start)
|
|
1084
1021
|
# The body is handled in collect_arcs.
|
|
1085
1022
|
assert last is not None
|
|
1086
1023
|
return {ArcStart(last)}
|
|
@@ -1120,48 +1057,44 @@ class AstArcAnalyzer:
|
|
|
1120
1057
|
|
|
1121
1058
|
def _handle__If(self, node: ast.If) -> set[ArcStart]:
|
|
1122
1059
|
start = self.line_for_node(node.test)
|
|
1123
|
-
|
|
1124
|
-
exits =
|
|
1125
|
-
|
|
1126
|
-
|
|
1060
|
+
constant_test, val = is_constant_test_expr(node.test)
|
|
1061
|
+
exits = set()
|
|
1062
|
+
if not constant_test or val:
|
|
1063
|
+
from_start = ArcStart(start, cause="the condition on line {lineno} was never true")
|
|
1064
|
+
exits |= self.process_body(node.body, from_start=from_start)
|
|
1065
|
+
if not constant_test or not val:
|
|
1066
|
+
from_start = ArcStart(start, cause="the condition on line {lineno} was always true")
|
|
1067
|
+
exits |= self.process_body(node.orelse, from_start=from_start)
|
|
1127
1068
|
return exits
|
|
1128
1069
|
|
|
1129
|
-
|
|
1130
|
-
|
|
1131
|
-
|
|
1132
|
-
|
|
1133
|
-
|
|
1134
|
-
|
|
1135
|
-
|
|
1136
|
-
|
|
1137
|
-
|
|
1138
|
-
|
|
1139
|
-
cause="the pattern on line {lineno} never matched",
|
|
1140
|
-
)
|
|
1141
|
-
exits |= self.process_body(case.body, from_start=from_start)
|
|
1142
|
-
last_start = case_start
|
|
1143
|
-
|
|
1144
|
-
# case is now the last case, check for wildcard match.
|
|
1145
|
-
pattern = case.pattern # pylint: disable=undefined-loop-variable
|
|
1146
|
-
while isinstance(pattern, ast.MatchOr):
|
|
1147
|
-
pattern = pattern.patterns[-1]
|
|
1148
|
-
while isinstance(pattern, ast.MatchAs) and pattern.pattern is not None:
|
|
1149
|
-
pattern = pattern.pattern
|
|
1150
|
-
had_wildcard = (
|
|
1151
|
-
isinstance(pattern, ast.MatchAs)
|
|
1152
|
-
and pattern.pattern is None
|
|
1153
|
-
and case.guard is None # pylint: disable=undefined-loop-variable
|
|
1070
|
+
def _handle__Match(self, node: ast.Match) -> set[ArcStart]:
|
|
1071
|
+
start = self.line_for_node(node)
|
|
1072
|
+
last_start = start
|
|
1073
|
+
exits = set()
|
|
1074
|
+
for case in node.cases:
|
|
1075
|
+
case_start = self.line_for_node(case.pattern)
|
|
1076
|
+
self.add_arc(last_start, case_start, "the pattern on line {lineno} always matched")
|
|
1077
|
+
from_start = ArcStart(
|
|
1078
|
+
case_start,
|
|
1079
|
+
cause="the pattern on line {lineno} never matched",
|
|
1154
1080
|
)
|
|
1081
|
+
exits |= self.process_body(case.body, from_start=from_start)
|
|
1082
|
+
last_start = case_start
|
|
1083
|
+
|
|
1084
|
+
# case is now the last case, check for wildcard match.
|
|
1085
|
+
pattern = case.pattern # pylint: disable=undefined-loop-variable
|
|
1086
|
+
while isinstance(pattern, ast.MatchOr):
|
|
1087
|
+
pattern = pattern.patterns[-1]
|
|
1088
|
+
while isinstance(pattern, ast.MatchAs) and pattern.pattern is not None:
|
|
1089
|
+
pattern = pattern.pattern
|
|
1090
|
+
had_wildcard = (
|
|
1091
|
+
isinstance(pattern, ast.MatchAs) and pattern.pattern is None and case.guard is None # pylint: disable=undefined-loop-variable
|
|
1092
|
+
)
|
|
1155
1093
|
|
|
1156
|
-
|
|
1157
|
-
|
|
1158
|
-
|
|
1159
|
-
|
|
1160
|
-
return exits
|
|
1161
|
-
|
|
1162
|
-
def _handle__NodeList(self, node: NodeList) -> set[ArcStart]:
|
|
1163
|
-
start = self.line_for_node(node)
|
|
1164
|
-
exits = self.process_body(node.body, from_start=ArcStart(start))
|
|
1094
|
+
if not had_wildcard:
|
|
1095
|
+
exits.add(
|
|
1096
|
+
ArcStart(case_start, cause="the pattern on line {lineno} always matched"),
|
|
1097
|
+
)
|
|
1165
1098
|
return exits
|
|
1166
1099
|
|
|
1167
1100
|
def _handle__Raise(self, node: ast.Raise) -> set[ArcStart]:
|
|
@@ -1235,14 +1168,7 @@ class AstArcAnalyzer:
|
|
|
1235
1168
|
|
|
1236
1169
|
def _handle__While(self, node: ast.While) -> set[ArcStart]:
|
|
1237
1170
|
start = to_top = self.line_for_node(node.test)
|
|
1238
|
-
constant_test =
|
|
1239
|
-
top_is_body0 = False
|
|
1240
|
-
if constant_test:
|
|
1241
|
-
top_is_body0 = True
|
|
1242
|
-
if env.PYBEHAVIOR.keep_constant_test:
|
|
1243
|
-
top_is_body0 = False
|
|
1244
|
-
if top_is_body0:
|
|
1245
|
-
to_top = self.line_for_node(node.body[0])
|
|
1171
|
+
constant_test, _ = is_constant_test_expr(node.test)
|
|
1246
1172
|
self.block_stack.append(LoopBlock(start=to_top))
|
|
1247
1173
|
from_start = ArcStart(start, cause="the condition on line {lineno} was never true")
|
|
1248
1174
|
exits = self.process_body(node.body, from_start=from_start)
|
|
@@ -1267,22 +1193,20 @@ class AstArcAnalyzer:
|
|
|
1267
1193
|
starts = [self.line_for_node(item.context_expr) for item in node.items]
|
|
1268
1194
|
else:
|
|
1269
1195
|
starts = [self.line_for_node(node)]
|
|
1270
|
-
|
|
1271
|
-
|
|
1272
|
-
|
|
1273
|
-
self.all_with_starts.add(start)
|
|
1196
|
+
for start in starts:
|
|
1197
|
+
self.current_with_starts.add(start)
|
|
1198
|
+
self.all_with_starts.add(start)
|
|
1274
1199
|
|
|
1275
1200
|
exits = self.process_body(node.body, from_start=ArcStart(starts[-1]))
|
|
1276
1201
|
|
|
1277
|
-
|
|
1278
|
-
|
|
1279
|
-
|
|
1280
|
-
|
|
1281
|
-
|
|
1282
|
-
|
|
1283
|
-
|
|
1284
|
-
|
|
1285
|
-
exits = with_exit
|
|
1202
|
+
start = starts[-1]
|
|
1203
|
+
self.current_with_starts.remove(start)
|
|
1204
|
+
with_exit = {ArcStart(start)}
|
|
1205
|
+
if exits:
|
|
1206
|
+
for xit in exits:
|
|
1207
|
+
self.add_arc(xit.lineno, start)
|
|
1208
|
+
self.with_exits.add((xit.lineno, start))
|
|
1209
|
+
exits = with_exit
|
|
1286
1210
|
|
|
1287
1211
|
return exits
|
|
1288
1212
|
|