coverage 7.6.7__cp311-cp311-win_amd64.whl → 7.11.1__cp311-cp311-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- coverage/__init__.py +2 -0
- coverage/__main__.py +2 -0
- coverage/annotate.py +1 -2
- coverage/bytecode.py +177 -3
- coverage/cmdline.py +329 -154
- coverage/collector.py +31 -42
- coverage/config.py +166 -62
- coverage/context.py +4 -5
- coverage/control.py +164 -85
- coverage/core.py +70 -33
- coverage/data.py +3 -4
- coverage/debug.py +112 -56
- coverage/disposition.py +1 -0
- coverage/env.py +65 -55
- coverage/exceptions.py +35 -7
- coverage/execfile.py +18 -13
- coverage/files.py +23 -18
- coverage/html.py +134 -88
- coverage/htmlfiles/style.css +42 -2
- coverage/htmlfiles/style.scss +65 -1
- coverage/inorout.py +61 -44
- coverage/jsonreport.py +17 -8
- coverage/lcovreport.py +16 -20
- coverage/misc.py +50 -46
- coverage/multiproc.py +12 -7
- coverage/numbits.py +3 -4
- coverage/parser.py +193 -269
- coverage/patch.py +166 -0
- coverage/phystokens.py +24 -25
- coverage/plugin.py +13 -13
- coverage/plugin_support.py +36 -35
- coverage/python.py +9 -13
- coverage/pytracer.py +40 -33
- coverage/regions.py +2 -1
- coverage/report.py +59 -43
- coverage/report_core.py +6 -9
- coverage/results.py +118 -66
- coverage/sqldata.py +260 -210
- coverage/sqlitedb.py +33 -25
- coverage/sysmon.py +195 -157
- coverage/templite.py +6 -6
- coverage/tomlconfig.py +12 -12
- coverage/tracer.cp311-win_amd64.pyd +0 -0
- coverage/tracer.pyi +2 -0
- coverage/types.py +25 -22
- coverage/version.py +3 -18
- coverage/xmlreport.py +16 -13
- {coverage-7.6.7.dist-info → coverage-7.11.1.dist-info}/METADATA +40 -18
- coverage-7.11.1.dist-info/RECORD +59 -0
- {coverage-7.6.7.dist-info → coverage-7.11.1.dist-info}/WHEEL +1 -1
- coverage-7.6.7.dist-info/RECORD +0 -58
- {coverage-7.6.7.dist-info → coverage-7.11.1.dist-info}/entry_points.txt +0 -0
- {coverage-7.6.7.dist-info → coverage-7.11.1.dist-info/licenses}/LICENSE.txt +0 -0
- {coverage-7.6.7.dist-info → coverage-7.11.1.dist-info}/top_level.txt +0 -0
coverage/sqldata.py
CHANGED
|
@@ -18,14 +18,12 @@ import string
|
|
|
18
18
|
import sys
|
|
19
19
|
import textwrap
|
|
20
20
|
import threading
|
|
21
|
+
import uuid
|
|
21
22
|
import zlib
|
|
22
|
-
|
|
23
|
-
from typing import (
|
|
24
|
-
cast, Any, Callable,
|
|
25
|
-
)
|
|
26
23
|
from collections.abc import Collection, Mapping, Sequence
|
|
24
|
+
from typing import Any, Callable, cast
|
|
27
25
|
|
|
28
|
-
from coverage.debug import NoDebugging, auto_repr
|
|
26
|
+
from coverage.debug import NoDebugging, auto_repr, file_summary
|
|
29
27
|
from coverage.exceptions import CoverageException, DataError
|
|
30
28
|
from coverage.misc import file_be_gone, isolate_module
|
|
31
29
|
from coverage.numbits import numbits_to_nums, numbits_union, nums_to_numbits
|
|
@@ -111,19 +109,37 @@ CREATE TABLE tracer (
|
|
|
111
109
|
);
|
|
112
110
|
"""
|
|
113
111
|
|
|
112
|
+
|
|
114
113
|
def _locked(method: AnyCallable) -> AnyCallable:
|
|
115
114
|
"""A decorator for methods that should hold self._lock."""
|
|
115
|
+
|
|
116
116
|
@functools.wraps(method)
|
|
117
117
|
def _wrapped(self: CoverageData, *args: Any, **kwargs: Any) -> Any:
|
|
118
118
|
if self._debug.should("lock"):
|
|
119
119
|
self._debug.write(f"Locking {self._lock!r} for {method.__name__}")
|
|
120
120
|
with self._lock:
|
|
121
121
|
if self._debug.should("lock"):
|
|
122
|
-
self._debug.write(f"Locked
|
|
122
|
+
self._debug.write(f"Locked {self._lock!r} for {method.__name__}")
|
|
123
123
|
return method(self, *args, **kwargs)
|
|
124
|
+
|
|
124
125
|
return _wrapped
|
|
125
126
|
|
|
126
127
|
|
|
128
|
+
class NumbitsUnionAgg:
|
|
129
|
+
"""SQLite aggregate function for computing union of numbits."""
|
|
130
|
+
|
|
131
|
+
def __init__(self) -> None:
|
|
132
|
+
self.result = b""
|
|
133
|
+
|
|
134
|
+
def step(self, value: bytes) -> None:
|
|
135
|
+
"""Process one value in the aggregation."""
|
|
136
|
+
self.result = numbits_union(self.result, value)
|
|
137
|
+
|
|
138
|
+
def finalize(self) -> bytes:
|
|
139
|
+
"""Return the final aggregated result."""
|
|
140
|
+
return self.result
|
|
141
|
+
|
|
142
|
+
|
|
127
143
|
class CoverageData:
|
|
128
144
|
"""Manages collected coverage data, including file storage.
|
|
129
145
|
|
|
@@ -258,31 +274,41 @@ class CoverageData:
|
|
|
258
274
|
|
|
259
275
|
__repr__ = auto_repr
|
|
260
276
|
|
|
277
|
+
def _debug_dataio(self, msg: str, filename: str) -> None:
|
|
278
|
+
"""A helper for debug messages which are all similar."""
|
|
279
|
+
if self._debug.should("dataio"):
|
|
280
|
+
self._debug.write(f"{msg} {filename!r} ({file_summary(filename)})")
|
|
281
|
+
|
|
261
282
|
def _choose_filename(self) -> None:
|
|
262
283
|
"""Set self._filename based on inited attributes."""
|
|
263
284
|
if self._no_disk:
|
|
264
|
-
self._filename = ":memory
|
|
285
|
+
self._filename = f"file:coverage-{uuid.uuid4()}?mode=memory&cache=shared"
|
|
265
286
|
else:
|
|
266
287
|
self._filename = self._basename
|
|
267
288
|
suffix = filename_suffix(self._suffix)
|
|
268
289
|
if suffix:
|
|
269
|
-
self._filename += "."
|
|
290
|
+
self._filename += f".{suffix}"
|
|
270
291
|
|
|
271
292
|
def _reset(self) -> None:
|
|
272
293
|
"""Reset our attributes."""
|
|
273
294
|
if not self._no_disk:
|
|
274
|
-
|
|
275
|
-
db.close()
|
|
276
|
-
self._dbs = {}
|
|
295
|
+
self.close()
|
|
277
296
|
self._file_map = {}
|
|
278
297
|
self._have_used = False
|
|
279
298
|
self._current_context_id = None
|
|
280
299
|
|
|
300
|
+
def close(self, force: bool = False) -> None:
|
|
301
|
+
"""Really close all the database objects."""
|
|
302
|
+
if self._debug.should("dataio"):
|
|
303
|
+
self._debug.write(f"Closing dbs, force={force}: {self._dbs}")
|
|
304
|
+
for db in self._dbs.values():
|
|
305
|
+
db.close(force=force)
|
|
306
|
+
self._dbs = {}
|
|
307
|
+
|
|
281
308
|
def _open_db(self) -> None:
|
|
282
309
|
"""Open an existing db file, and read its metadata."""
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
self._dbs[threading.get_ident()] = SqliteDb(self._filename, self._debug)
|
|
310
|
+
self._debug_dataio("Opening data file", self._filename)
|
|
311
|
+
self._dbs[threading.get_ident()] = SqliteDb(self._filename, self._debug, self._no_disk)
|
|
286
312
|
self._read_db()
|
|
287
313
|
|
|
288
314
|
def _read_db(self) -> None:
|
|
@@ -297,7 +323,8 @@ class CoverageData:
|
|
|
297
323
|
else:
|
|
298
324
|
raise DataError(
|
|
299
325
|
"Data file {!r} doesn't seem to be a coverage data file: {}".format(
|
|
300
|
-
self._filename,
|
|
326
|
+
self._filename,
|
|
327
|
+
exc,
|
|
301
328
|
),
|
|
302
329
|
) from exc
|
|
303
330
|
else:
|
|
@@ -305,7 +332,9 @@ class CoverageData:
|
|
|
305
332
|
if schema_version != SCHEMA_VERSION:
|
|
306
333
|
raise DataError(
|
|
307
334
|
"Couldn't use data file {!r}: wrong schema: {} instead of {}".format(
|
|
308
|
-
self._filename,
|
|
335
|
+
self._filename,
|
|
336
|
+
schema_version,
|
|
337
|
+
SCHEMA_VERSION,
|
|
309
338
|
),
|
|
310
339
|
)
|
|
311
340
|
|
|
@@ -320,10 +349,9 @@ class CoverageData:
|
|
|
320
349
|
|
|
321
350
|
def _init_db(self, db: SqliteDb) -> None:
|
|
322
351
|
"""Write the initial contents of the database."""
|
|
323
|
-
|
|
324
|
-
self._debug.write(f"Initing data file {self._filename!r}")
|
|
352
|
+
self._debug_dataio("Initing data file", self._filename)
|
|
325
353
|
db.executescript(SCHEMA)
|
|
326
|
-
db.execute_void("
|
|
354
|
+
db.execute_void("INSERT INTO coverage_schema (version) VALUES (?)", (SCHEMA_VERSION,))
|
|
327
355
|
|
|
328
356
|
# When writing metadata, avoid information that will needlessly change
|
|
329
357
|
# the hash of the data file, unless we're debugging processes.
|
|
@@ -331,11 +359,13 @@ class CoverageData:
|
|
|
331
359
|
("version", __version__),
|
|
332
360
|
]
|
|
333
361
|
if self._debug.should("process"):
|
|
334
|
-
meta_data.extend(
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
362
|
+
meta_data.extend(
|
|
363
|
+
[
|
|
364
|
+
("sys_argv", str(getattr(sys, "argv", None))),
|
|
365
|
+
("when", datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")),
|
|
366
|
+
]
|
|
367
|
+
)
|
|
368
|
+
db.executemany_void("INSERT OR IGNORE INTO meta (key, value) VALUES (?, ?)", meta_data)
|
|
339
369
|
|
|
340
370
|
def _connect(self) -> SqliteDb:
|
|
341
371
|
"""Get the SqliteDb object to use."""
|
|
@@ -344,11 +374,11 @@ class CoverageData:
|
|
|
344
374
|
return self._dbs[threading.get_ident()]
|
|
345
375
|
|
|
346
376
|
def __bool__(self) -> bool:
|
|
347
|
-
if
|
|
377
|
+
if threading.get_ident() not in self._dbs and not os.path.exists(self._filename):
|
|
348
378
|
return False
|
|
349
379
|
try:
|
|
350
380
|
with self._connect() as con:
|
|
351
|
-
with con.execute("
|
|
381
|
+
with con.execute("SELECT * FROM file LIMIT 1") as cur:
|
|
352
382
|
return bool(list(cur))
|
|
353
383
|
except CoverageException:
|
|
354
384
|
return False
|
|
@@ -370,8 +400,7 @@ class CoverageData:
|
|
|
370
400
|
.. versionadded:: 5.0
|
|
371
401
|
|
|
372
402
|
"""
|
|
373
|
-
|
|
374
|
-
self._debug.write(f"Dumping data from data file {self._filename!r}")
|
|
403
|
+
self._debug_dataio("Dumping data from data file", self._filename)
|
|
375
404
|
with self._connect() as con:
|
|
376
405
|
script = con.dump()
|
|
377
406
|
return b"z" + zlib.compress(script.encode("utf-8"))
|
|
@@ -391,14 +420,13 @@ class CoverageData:
|
|
|
391
420
|
.. versionadded:: 5.0
|
|
392
421
|
|
|
393
422
|
"""
|
|
394
|
-
|
|
395
|
-
self._debug.write(f"Loading data into data file {self._filename!r}")
|
|
423
|
+
self._debug_dataio("Loading data into data file", self._filename)
|
|
396
424
|
if data[:1] != b"z":
|
|
397
425
|
raise DataError(
|
|
398
426
|
f"Unrecognized serialization: {data[:40]!r} (head of {len(data)} bytes)",
|
|
399
427
|
)
|
|
400
428
|
script = zlib.decompress(data[1:]).decode("utf-8")
|
|
401
|
-
self._dbs[threading.get_ident()] = db = SqliteDb(self._filename, self._debug)
|
|
429
|
+
self._dbs[threading.get_ident()] = db = SqliteDb(self._filename, self._debug, self._no_disk)
|
|
402
430
|
with db:
|
|
403
431
|
db.executescript(script)
|
|
404
432
|
self._read_db()
|
|
@@ -414,7 +442,7 @@ class CoverageData:
|
|
|
414
442
|
if add:
|
|
415
443
|
with self._connect() as con:
|
|
416
444
|
self._file_map[filename] = con.execute_for_rowid(
|
|
417
|
-
"
|
|
445
|
+
"INSERT OR REPLACE INTO file (path) VALUES (?)",
|
|
418
446
|
(filename,),
|
|
419
447
|
)
|
|
420
448
|
return self._file_map.get(filename)
|
|
@@ -424,7 +452,7 @@ class CoverageData:
|
|
|
424
452
|
assert context is not None
|
|
425
453
|
self._start_using()
|
|
426
454
|
with self._connect() as con:
|
|
427
|
-
row = con.execute_one("
|
|
455
|
+
row = con.execute_one("SELECT id FROM context WHERE context = ?", (context,))
|
|
428
456
|
if row is not None:
|
|
429
457
|
return cast(int, row[0])
|
|
430
458
|
else:
|
|
@@ -454,7 +482,7 @@ class CoverageData:
|
|
|
454
482
|
else:
|
|
455
483
|
with self._connect() as con:
|
|
456
484
|
self._current_context_id = con.execute_for_rowid(
|
|
457
|
-
"
|
|
485
|
+
"INSERT INTO context (context) VALUES (?)",
|
|
458
486
|
(context,),
|
|
459
487
|
)
|
|
460
488
|
|
|
@@ -484,9 +512,13 @@ class CoverageData:
|
|
|
484
512
|
|
|
485
513
|
"""
|
|
486
514
|
if self._debug.should("dataop"):
|
|
487
|
-
self._debug.write(
|
|
488
|
-
|
|
489
|
-
|
|
515
|
+
self._debug.write(
|
|
516
|
+
"Adding lines: %d files, %d lines total"
|
|
517
|
+
% (
|
|
518
|
+
len(line_data),
|
|
519
|
+
sum(len(lines) for lines in line_data.values()),
|
|
520
|
+
)
|
|
521
|
+
)
|
|
490
522
|
if self._debug.should("dataop2"):
|
|
491
523
|
for filename, linenos in sorted(line_data.items()):
|
|
492
524
|
self._debug.write(f" {filename}: {linenos}")
|
|
@@ -499,15 +531,17 @@ class CoverageData:
|
|
|
499
531
|
for filename, linenos in line_data.items():
|
|
500
532
|
line_bits = nums_to_numbits(linenos)
|
|
501
533
|
file_id = self._file_id(filename, add=True)
|
|
502
|
-
query = "
|
|
534
|
+
query = "SELECT numbits FROM line_bits WHERE file_id = ? AND context_id = ?"
|
|
503
535
|
with con.execute(query, (file_id, self._current_context_id)) as cur:
|
|
504
536
|
existing = list(cur)
|
|
505
537
|
if existing:
|
|
506
538
|
line_bits = numbits_union(line_bits, existing[0][0])
|
|
507
539
|
|
|
508
540
|
con.execute_void(
|
|
509
|
-
"
|
|
510
|
-
|
|
541
|
+
"""
|
|
542
|
+
INSERT OR REPLACE INTO line_bits
|
|
543
|
+
(file_id, context_id, numbits) VALUES (?, ?, ?)
|
|
544
|
+
""",
|
|
511
545
|
(file_id, self._current_context_id, line_bits),
|
|
512
546
|
)
|
|
513
547
|
|
|
@@ -522,9 +556,13 @@ class CoverageData:
|
|
|
522
556
|
|
|
523
557
|
"""
|
|
524
558
|
if self._debug.should("dataop"):
|
|
525
|
-
self._debug.write(
|
|
526
|
-
|
|
527
|
-
|
|
559
|
+
self._debug.write(
|
|
560
|
+
"Adding arcs: %d files, %d arcs total"
|
|
561
|
+
% (
|
|
562
|
+
len(arc_data),
|
|
563
|
+
sum(len(arcs) for arcs in arc_data.values()),
|
|
564
|
+
)
|
|
565
|
+
)
|
|
528
566
|
if self._debug.should("dataop2"):
|
|
529
567
|
for filename, arcs in sorted(arc_data.items()):
|
|
530
568
|
self._debug.write(f" {filename}: {arcs}")
|
|
@@ -540,8 +578,10 @@ class CoverageData:
|
|
|
540
578
|
file_id = self._file_id(filename, add=True)
|
|
541
579
|
data = [(file_id, self._current_context_id, fromno, tono) for fromno, tono in arcs]
|
|
542
580
|
con.executemany_void(
|
|
543
|
-
"
|
|
544
|
-
|
|
581
|
+
"""
|
|
582
|
+
INSERT OR IGNORE INTO arc
|
|
583
|
+
(file_id, context_id, fromno, tono) VALUES (?, ?, ?, ?)
|
|
584
|
+
""",
|
|
545
585
|
data,
|
|
546
586
|
)
|
|
547
587
|
|
|
@@ -562,7 +602,7 @@ class CoverageData:
|
|
|
562
602
|
self._has_arcs = arcs
|
|
563
603
|
with self._connect() as con:
|
|
564
604
|
con.execute_void(
|
|
565
|
-
"
|
|
605
|
+
"INSERT OR IGNORE INTO meta (key, value) VALUES (?, ?)",
|
|
566
606
|
("has_arcs", str(int(arcs))),
|
|
567
607
|
)
|
|
568
608
|
|
|
@@ -574,7 +614,7 @@ class CoverageData:
|
|
|
574
614
|
|
|
575
615
|
"""
|
|
576
616
|
if self._debug.should("dataop"):
|
|
577
|
-
self._debug.write("Adding file tracers:
|
|
617
|
+
self._debug.write(f"Adding file tracers: {len(file_tracers)} files")
|
|
578
618
|
if not file_tracers:
|
|
579
619
|
return
|
|
580
620
|
self._start_using()
|
|
@@ -586,12 +626,14 @@ class CoverageData:
|
|
|
586
626
|
if existing_plugin != plugin_name:
|
|
587
627
|
raise DataError(
|
|
588
628
|
"Conflicting file tracer name for '{}': {!r} vs {!r}".format(
|
|
589
|
-
filename,
|
|
629
|
+
filename,
|
|
630
|
+
existing_plugin,
|
|
631
|
+
plugin_name,
|
|
590
632
|
),
|
|
591
633
|
)
|
|
592
634
|
elif plugin_name:
|
|
593
635
|
con.execute_void(
|
|
594
|
-
"
|
|
636
|
+
"INSERT INTO TRACER (file_id, tracer) VALUES (?, ?)",
|
|
595
637
|
(file_id, plugin_name),
|
|
596
638
|
)
|
|
597
639
|
|
|
@@ -612,7 +654,7 @@ class CoverageData:
|
|
|
612
654
|
if self._debug.should("dataop"):
|
|
613
655
|
self._debug.write(f"Touching {filenames!r}")
|
|
614
656
|
self._start_using()
|
|
615
|
-
with self._connect():
|
|
657
|
+
with self._connect(): # Use this to get one transaction.
|
|
616
658
|
if not self._has_arcs and not self._has_lines:
|
|
617
659
|
raise DataError("Can't touch files in an empty CoverageData")
|
|
618
660
|
|
|
@@ -632,11 +674,10 @@ class CoverageData:
|
|
|
632
674
|
self._debug.write(f"Purging data for {filenames!r}")
|
|
633
675
|
self._start_using()
|
|
634
676
|
with self._connect() as con:
|
|
635
|
-
|
|
636
677
|
if self._has_lines:
|
|
637
|
-
sql = "
|
|
678
|
+
sql = "DELETE FROM line_bits WHERE file_id=?"
|
|
638
679
|
elif self._has_arcs:
|
|
639
|
-
sql = "
|
|
680
|
+
sql = "DELETE FROM arc WHERE file_id=?"
|
|
640
681
|
else:
|
|
641
682
|
raise DataError("Can't purge files in an empty CoverageData")
|
|
642
683
|
|
|
@@ -659,158 +700,169 @@ class CoverageData:
|
|
|
659
700
|
|
|
660
701
|
"""
|
|
661
702
|
if self._debug.should("dataop"):
|
|
662
|
-
self._debug.write(
|
|
663
|
-
|
|
664
|
-
|
|
703
|
+
self._debug.write(
|
|
704
|
+
"Updating with data from {!r}".format(
|
|
705
|
+
getattr(other_data, "_filename", "???"),
|
|
706
|
+
)
|
|
707
|
+
)
|
|
665
708
|
if self._has_lines and other_data._has_arcs:
|
|
666
|
-
raise DataError(
|
|
709
|
+
raise DataError(
|
|
710
|
+
"Can't combine branch coverage data with statement data", slug="cant-combine"
|
|
711
|
+
)
|
|
667
712
|
if self._has_arcs and other_data._has_lines:
|
|
668
|
-
raise DataError(
|
|
713
|
+
raise DataError(
|
|
714
|
+
"Can't combine statement coverage data with branch data", slug="cant-combine"
|
|
715
|
+
)
|
|
669
716
|
|
|
670
717
|
map_path = map_path or (lambda p: p)
|
|
671
718
|
|
|
672
719
|
# Force the database we're writing to to exist before we start nesting contexts.
|
|
673
720
|
self._start_using()
|
|
674
|
-
|
|
675
|
-
# Collector for all arcs, lines and tracers
|
|
676
721
|
other_data.read()
|
|
677
|
-
with other_data._connect() as con:
|
|
678
|
-
# Get files data.
|
|
679
|
-
with con.execute("select path from file") as cur:
|
|
680
|
-
files = {path: map_path(path) for (path,) in cur}
|
|
681
|
-
|
|
682
|
-
# Get contexts data.
|
|
683
|
-
with con.execute("select context from context") as cur:
|
|
684
|
-
contexts = [context for (context,) in cur]
|
|
685
|
-
|
|
686
|
-
# Get arc data.
|
|
687
|
-
with con.execute(
|
|
688
|
-
"select file.path, context.context, arc.fromno, arc.tono " +
|
|
689
|
-
"from arc " +
|
|
690
|
-
"inner join file on file.id = arc.file_id " +
|
|
691
|
-
"inner join context on context.id = arc.context_id",
|
|
692
|
-
) as cur:
|
|
693
|
-
arcs = [
|
|
694
|
-
(files[path], context, fromno, tono)
|
|
695
|
-
for (path, context, fromno, tono) in cur
|
|
696
|
-
]
|
|
697
722
|
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
"from line_bits " +
|
|
702
|
-
"inner join file on file.id = line_bits.file_id " +
|
|
703
|
-
"inner join context on context.id = line_bits.context_id",
|
|
704
|
-
) as cur:
|
|
705
|
-
lines: dict[tuple[str, str], bytes] = {}
|
|
706
|
-
for path, context, numbits in cur:
|
|
707
|
-
key = (files[path], context)
|
|
708
|
-
if key in lines:
|
|
709
|
-
numbits = numbits_union(lines[key], numbits)
|
|
710
|
-
lines[key] = numbits
|
|
711
|
-
|
|
712
|
-
# Get tracer data.
|
|
713
|
-
with con.execute(
|
|
714
|
-
"select file.path, tracer " +
|
|
715
|
-
"from tracer " +
|
|
716
|
-
"inner join file on file.id = tracer.file_id",
|
|
717
|
-
) as cur:
|
|
718
|
-
tracers = {files[path]: tracer for (path, tracer) in cur}
|
|
723
|
+
# Ensure other_data has a properly initialized database
|
|
724
|
+
with other_data._connect():
|
|
725
|
+
pass
|
|
719
726
|
|
|
720
727
|
with self._connect() as con:
|
|
721
728
|
assert con.con is not None
|
|
722
729
|
con.con.isolation_level = "IMMEDIATE"
|
|
723
730
|
|
|
724
|
-
#
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
"select file.path, tracer from tracer " +
|
|
732
|
-
"inner join file on file.id = tracer.file_id",
|
|
733
|
-
) as cur:
|
|
734
|
-
this_tracers.update({
|
|
735
|
-
map_path(path): tracer
|
|
736
|
-
for path, tracer in cur
|
|
737
|
-
})
|
|
738
|
-
|
|
739
|
-
# Create all file and context rows in the DB.
|
|
740
|
-
con.executemany_void(
|
|
741
|
-
"insert or ignore into file (path) values (?)",
|
|
742
|
-
((file,) for file in files.values()),
|
|
743
|
-
)
|
|
744
|
-
with con.execute("select id, path from file") as cur:
|
|
745
|
-
file_ids = {path: id for id, path in cur}
|
|
746
|
-
self._file_map.update(file_ids)
|
|
747
|
-
con.executemany_void(
|
|
748
|
-
"insert or ignore into context (context) values (?)",
|
|
749
|
-
((context,) for context in contexts),
|
|
731
|
+
# Register functions for SQLite
|
|
732
|
+
con.con.create_function("numbits_union", 2, numbits_union)
|
|
733
|
+
con.con.create_function("map_path", 1, map_path)
|
|
734
|
+
con.con.create_aggregate(
|
|
735
|
+
"numbits_union_agg",
|
|
736
|
+
1,
|
|
737
|
+
NumbitsUnionAgg, # type: ignore[arg-type]
|
|
750
738
|
)
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
#
|
|
756
|
-
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
739
|
+
|
|
740
|
+
# Attach the other database
|
|
741
|
+
con.execute_void("ATTACH DATABASE ? AS other_db", (other_data.data_filename(),))
|
|
742
|
+
|
|
743
|
+
# Create temporary table with mapped file paths to avoid repeated map_path() calls
|
|
744
|
+
con.execute_void("""
|
|
745
|
+
CREATE TEMP TABLE other_file_mapped AS
|
|
746
|
+
SELECT
|
|
747
|
+
other_file.id as other_file_id,
|
|
748
|
+
map_path(other_file.path) as mapped_path
|
|
749
|
+
FROM other_db.file AS other_file
|
|
750
|
+
""")
|
|
751
|
+
|
|
752
|
+
# Check for tracer conflicts before proceeding
|
|
753
|
+
with con.execute("""
|
|
754
|
+
SELECT other_file_mapped.mapped_path,
|
|
755
|
+
COALESCE(main.tracer.tracer, ''),
|
|
756
|
+
COALESCE(other_db.tracer.tracer, '')
|
|
757
|
+
FROM main.file
|
|
758
|
+
LEFT JOIN main.tracer ON main.file.id = main.tracer.file_id
|
|
759
|
+
INNER JOIN other_file_mapped ON main.file.path = other_file_mapped.mapped_path
|
|
760
|
+
LEFT JOIN other_db.tracer ON other_file_mapped.other_file_id = other_db.tracer.file_id
|
|
761
|
+
WHERE COALESCE(main.tracer.tracer, '') != COALESCE(other_db.tracer.tracer, '')
|
|
762
|
+
""") as cur:
|
|
763
|
+
conflicts = list(cur)
|
|
764
|
+
if conflicts:
|
|
765
|
+
path, this_tracer, other_tracer = conflicts[0]
|
|
763
766
|
raise DataError(
|
|
764
767
|
"Conflicting file tracer name for '{}': {!r} vs {!r}".format(
|
|
765
|
-
path,
|
|
768
|
+
path,
|
|
769
|
+
this_tracer,
|
|
770
|
+
other_tracer,
|
|
766
771
|
),
|
|
767
772
|
)
|
|
768
|
-
tracer_map[path] = other_tracer
|
|
769
773
|
|
|
770
|
-
#
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
774
|
+
# Insert missing files from other_db (with map_path applied)
|
|
775
|
+
con.execute_void("""
|
|
776
|
+
INSERT OR IGNORE INTO main.file (path)
|
|
777
|
+
SELECT DISTINCT mapped_path FROM other_file_mapped
|
|
778
|
+
""")
|
|
779
|
+
|
|
780
|
+
# Insert missing contexts from other_db
|
|
781
|
+
con.execute_void("""
|
|
782
|
+
INSERT OR IGNORE INTO main.context (context)
|
|
783
|
+
SELECT context FROM other_db.context
|
|
784
|
+
""")
|
|
785
|
+
|
|
786
|
+
# Update file_map with any new files
|
|
787
|
+
with con.execute("SELECT id, path FROM file") as cur:
|
|
788
|
+
self._file_map.update({path: id for id, path in cur})
|
|
789
|
+
|
|
790
|
+
with con.execute("""
|
|
791
|
+
SELECT
|
|
792
|
+
EXISTS(SELECT 1 FROM other_db.arc),
|
|
793
|
+
EXISTS(SELECT 1 FROM other_db.line_bits)
|
|
794
|
+
""") as cur:
|
|
795
|
+
has_arcs, has_lines = cur.fetchone()
|
|
796
|
+
|
|
797
|
+
# Handle arcs if present in other_db
|
|
798
|
+
if has_arcs:
|
|
775
799
|
self._choose_lines_or_arcs(arcs=True)
|
|
776
800
|
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
|
|
782
|
-
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
|
|
801
|
+
# Create context mapping table for faster lookups
|
|
802
|
+
con.execute_void("""
|
|
803
|
+
CREATE TEMP TABLE context_mapping AS
|
|
804
|
+
SELECT
|
|
805
|
+
other_context.id as other_id,
|
|
806
|
+
main_context.id as main_id
|
|
807
|
+
FROM other_db.context AS other_context
|
|
808
|
+
INNER JOIN main.context AS main_context ON other_context.context = main_context.context
|
|
809
|
+
""")
|
|
810
|
+
|
|
811
|
+
con.execute_void("""
|
|
812
|
+
INSERT OR IGNORE INTO main.arc (file_id, context_id, fromno, tono)
|
|
813
|
+
SELECT
|
|
814
|
+
main_file.id,
|
|
815
|
+
context_mapping.main_id,
|
|
816
|
+
other_arc.fromno,
|
|
817
|
+
other_arc.tono
|
|
818
|
+
FROM other_db.arc AS other_arc
|
|
819
|
+
INNER JOIN other_file_mapped ON other_arc.file_id = other_file_mapped.other_file_id
|
|
820
|
+
INNER JOIN context_mapping ON other_arc.context_id = context_mapping.other_id
|
|
821
|
+
INNER JOIN main.file AS main_file ON other_file_mapped.mapped_path = main_file.path
|
|
822
|
+
""")
|
|
823
|
+
|
|
824
|
+
# Handle line_bits if present in other_db
|
|
825
|
+
if has_lines:
|
|
790
826
|
self._choose_lines_or_arcs(lines=True)
|
|
791
827
|
|
|
792
|
-
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
828
|
+
# Handle line_bits by aggregating other_db data by mapped target,
|
|
829
|
+
# then inserting/updating
|
|
830
|
+
con.execute_void("""
|
|
831
|
+
INSERT OR REPLACE INTO main.line_bits (file_id, context_id, numbits)
|
|
832
|
+
SELECT
|
|
833
|
+
main_file.id,
|
|
834
|
+
main_context.id,
|
|
835
|
+
numbits_union(
|
|
836
|
+
COALESCE((
|
|
837
|
+
SELECT numbits FROM main.line_bits
|
|
838
|
+
WHERE file_id = main_file.id AND context_id = main_context.id
|
|
839
|
+
), X''),
|
|
840
|
+
aggregated.combined_numbits
|
|
841
|
+
)
|
|
842
|
+
FROM (
|
|
843
|
+
SELECT
|
|
844
|
+
other_file_mapped.mapped_path,
|
|
845
|
+
other_context.context,
|
|
846
|
+
numbits_union_agg(other_line_bits.numbits) as combined_numbits
|
|
847
|
+
FROM other_db.line_bits AS other_line_bits
|
|
848
|
+
INNER JOIN other_file_mapped ON other_line_bits.file_id = other_file_mapped.other_file_id
|
|
849
|
+
INNER JOIN other_db.context AS other_context ON other_line_bits.context_id = other_context.id
|
|
850
|
+
GROUP BY other_file_mapped.mapped_path, other_context.context
|
|
851
|
+
) AS aggregated
|
|
852
|
+
INNER JOIN main.file AS main_file ON aggregated.mapped_path = main_file.path
|
|
853
|
+
INNER JOIN main.context AS main_context ON aggregated.context = main_context.context
|
|
854
|
+
""")
|
|
855
|
+
|
|
856
|
+
# Insert tracers from other_db (avoiding conflicts we already checked)
|
|
857
|
+
con.execute_void("""
|
|
858
|
+
INSERT OR IGNORE INTO main.tracer (file_id, tracer)
|
|
859
|
+
SELECT
|
|
860
|
+
main_file.id,
|
|
861
|
+
other_tracer.tracer
|
|
862
|
+
FROM other_db.tracer AS other_tracer
|
|
863
|
+
INNER JOIN other_file_mapped ON other_tracer.file_id = other_file_mapped.other_file_id
|
|
864
|
+
INNER JOIN main.file AS main_file ON other_file_mapped.mapped_path = main_file.path
|
|
865
|
+
""")
|
|
814
866
|
|
|
815
867
|
if not self._no_disk:
|
|
816
868
|
# Update all internal cache data.
|
|
@@ -827,16 +879,14 @@ class CoverageData:
|
|
|
827
879
|
self._reset()
|
|
828
880
|
if self._no_disk:
|
|
829
881
|
return
|
|
830
|
-
|
|
831
|
-
self._debug.write(f"Erasing data file {self._filename!r}")
|
|
882
|
+
self._debug_dataio("Erasing data file", self._filename)
|
|
832
883
|
file_be_gone(self._filename)
|
|
833
884
|
if parallel:
|
|
834
885
|
data_dir, local = os.path.split(self._filename)
|
|
835
886
|
local_abs_path = os.path.join(os.path.abspath(data_dir), local)
|
|
836
887
|
pattern = glob.escape(local_abs_path) + ".*"
|
|
837
888
|
for filename in glob.glob(pattern):
|
|
838
|
-
|
|
839
|
-
self._debug.write(f"Erasing parallel data file {filename!r}")
|
|
889
|
+
self._debug_dataio("Erasing parallel data file", filename)
|
|
840
890
|
file_be_gone(filename)
|
|
841
891
|
|
|
842
892
|
def read(self) -> None:
|
|
@@ -847,7 +897,7 @@ class CoverageData:
|
|
|
847
897
|
|
|
848
898
|
def write(self) -> None:
|
|
849
899
|
"""Ensure the data is written to the data file."""
|
|
850
|
-
|
|
900
|
+
self._debug_dataio("Writing (no-op) data file", self._filename)
|
|
851
901
|
|
|
852
902
|
def _start_using(self) -> None:
|
|
853
903
|
"""Call this before using the database at all."""
|
|
@@ -881,7 +931,7 @@ class CoverageData:
|
|
|
881
931
|
"""
|
|
882
932
|
self._start_using()
|
|
883
933
|
with self._connect() as con:
|
|
884
|
-
with con.execute("
|
|
934
|
+
with con.execute("SELECT DISTINCT(context) FROM context") as cur:
|
|
885
935
|
contexts = {row[0] for row in cur}
|
|
886
936
|
return contexts
|
|
887
937
|
|
|
@@ -898,10 +948,10 @@ class CoverageData:
|
|
|
898
948
|
file_id = self._file_id(filename)
|
|
899
949
|
if file_id is None:
|
|
900
950
|
return None
|
|
901
|
-
row = con.execute_one("
|
|
951
|
+
row = con.execute_one("SELECT tracer FROM tracer WHERE file_id = ?", (file_id,))
|
|
902
952
|
if row is not None:
|
|
903
953
|
return row[0] or ""
|
|
904
|
-
return ""
|
|
954
|
+
return "" # File was measured, but no tracer associated.
|
|
905
955
|
|
|
906
956
|
def set_query_context(self, context: str) -> None:
|
|
907
957
|
"""Set a context for subsequent querying.
|
|
@@ -916,7 +966,7 @@ class CoverageData:
|
|
|
916
966
|
"""
|
|
917
967
|
self._start_using()
|
|
918
968
|
with self._connect() as con:
|
|
919
|
-
with con.execute("
|
|
969
|
+
with con.execute("SELECT id FROM context WHERE context = ?", (context,)) as cur:
|
|
920
970
|
self._query_context_ids = [row[0] for row in cur.fetchall()]
|
|
921
971
|
|
|
922
972
|
def set_query_contexts(self, contexts: Sequence[str] | None) -> None:
|
|
@@ -934,8 +984,8 @@ class CoverageData:
|
|
|
934
984
|
self._start_using()
|
|
935
985
|
if contexts:
|
|
936
986
|
with self._connect() as con:
|
|
937
|
-
context_clause = " or ".join(["context
|
|
938
|
-
with con.execute("
|
|
987
|
+
context_clause = " or ".join(["context REGEXP ?"] * len(contexts))
|
|
988
|
+
with con.execute("SELECT id FROM context WHERE " + context_clause, contexts) as cur:
|
|
939
989
|
self._query_context_ids = [row[0] for row in cur.fetchall()]
|
|
940
990
|
else:
|
|
941
991
|
self._query_context_ids = None
|
|
@@ -962,11 +1012,11 @@ class CoverageData:
|
|
|
962
1012
|
if file_id is None:
|
|
963
1013
|
return None
|
|
964
1014
|
else:
|
|
965
|
-
query = "
|
|
1015
|
+
query = "SELECT numbits FROM line_bits WHERE file_id = ?"
|
|
966
1016
|
data = [file_id]
|
|
967
1017
|
if self._query_context_ids is not None:
|
|
968
1018
|
ids_array = ", ".join("?" * len(self._query_context_ids))
|
|
969
|
-
query += "
|
|
1019
|
+
query += " AND context_id IN (" + ids_array + ")"
|
|
970
1020
|
data += self._query_context_ids
|
|
971
1021
|
with con.execute(query, data) as cur:
|
|
972
1022
|
bitmaps = list(cur)
|
|
@@ -998,11 +1048,11 @@ class CoverageData:
|
|
|
998
1048
|
if file_id is None:
|
|
999
1049
|
return None
|
|
1000
1050
|
else:
|
|
1001
|
-
query = "
|
|
1051
|
+
query = "SELECT DISTINCT fromno, tono FROM arc WHERE file_id = ?"
|
|
1002
1052
|
data = [file_id]
|
|
1003
1053
|
if self._query_context_ids is not None:
|
|
1004
1054
|
ids_array = ", ".join("?" * len(self._query_context_ids))
|
|
1005
|
-
query += "
|
|
1055
|
+
query += " AND context_id IN (" + ids_array + ")"
|
|
1006
1056
|
data += self._query_context_ids
|
|
1007
1057
|
with con.execute(query, data) as cur:
|
|
1008
1058
|
return list(cur)
|
|
@@ -1024,15 +1074,15 @@ class CoverageData:
|
|
|
1024
1074
|
|
|
1025
1075
|
lineno_contexts_map = collections.defaultdict(set)
|
|
1026
1076
|
if self.has_arcs():
|
|
1027
|
-
query =
|
|
1028
|
-
|
|
1029
|
-
|
|
1030
|
-
|
|
1031
|
-
|
|
1077
|
+
query = """
|
|
1078
|
+
SELECT arc.fromno, arc.tono, context.context
|
|
1079
|
+
FROM arc, context
|
|
1080
|
+
WHERE arc.file_id = ? AND arc.context_id = context.id
|
|
1081
|
+
"""
|
|
1032
1082
|
data = [file_id]
|
|
1033
1083
|
if self._query_context_ids is not None:
|
|
1034
1084
|
ids_array = ", ".join("?" * len(self._query_context_ids))
|
|
1035
|
-
query += "
|
|
1085
|
+
query += " AND arc.context_id IN (" + ids_array + ")"
|
|
1036
1086
|
data += self._query_context_ids
|
|
1037
1087
|
with con.execute(query, data) as cur:
|
|
1038
1088
|
for fromno, tono, context in cur:
|
|
@@ -1041,15 +1091,15 @@ class CoverageData:
|
|
|
1041
1091
|
if tono > 0:
|
|
1042
1092
|
lineno_contexts_map[tono].add(context)
|
|
1043
1093
|
else:
|
|
1044
|
-
query =
|
|
1045
|
-
|
|
1046
|
-
|
|
1047
|
-
|
|
1048
|
-
|
|
1094
|
+
query = """
|
|
1095
|
+
SELECT l.numbits, c.context FROM line_bits l, context c
|
|
1096
|
+
WHERE l.context_id = c.id
|
|
1097
|
+
AND file_id = ?
|
|
1098
|
+
"""
|
|
1049
1099
|
data = [file_id]
|
|
1050
1100
|
if self._query_context_ids is not None:
|
|
1051
1101
|
ids_array = ", ".join("?" * len(self._query_context_ids))
|
|
1052
|
-
query += "
|
|
1102
|
+
query += " AND l.context_id IN (" + ids_array + ")"
|
|
1053
1103
|
data += self._query_context_ids
|
|
1054
1104
|
with con.execute(query, data) as cur:
|
|
1055
1105
|
for numbits, context in cur:
|
|
@@ -1066,9 +1116,9 @@ class CoverageData:
|
|
|
1066
1116
|
|
|
1067
1117
|
"""
|
|
1068
1118
|
with SqliteDb(":memory:", debug=NoDebugging()) as db:
|
|
1069
|
-
with db.execute("
|
|
1119
|
+
with db.execute("PRAGMA temp_store") as cur:
|
|
1070
1120
|
temp_store = [row[0] for row in cur]
|
|
1071
|
-
with db.execute("
|
|
1121
|
+
with db.execute("PRAGMA compile_options") as cur:
|
|
1072
1122
|
copts = [row[0] for row in cur]
|
|
1073
1123
|
copts = textwrap.wrap(", ".join(copts), width=75)
|
|
1074
1124
|
|