coverage 7.13.1__cp314-cp314t-musllinux_1_2_aarch64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. a1_coverage.pth +1 -0
  2. coverage/__init__.py +38 -0
  3. coverage/__main__.py +12 -0
  4. coverage/annotate.py +113 -0
  5. coverage/bytecode.py +197 -0
  6. coverage/cmdline.py +1220 -0
  7. coverage/collector.py +487 -0
  8. coverage/config.py +732 -0
  9. coverage/context.py +74 -0
  10. coverage/control.py +1514 -0
  11. coverage/core.py +139 -0
  12. coverage/data.py +251 -0
  13. coverage/debug.py +669 -0
  14. coverage/disposition.py +59 -0
  15. coverage/env.py +135 -0
  16. coverage/exceptions.py +85 -0
  17. coverage/execfile.py +329 -0
  18. coverage/files.py +553 -0
  19. coverage/html.py +860 -0
  20. coverage/htmlfiles/coverage_html.js +735 -0
  21. coverage/htmlfiles/favicon_32.png +0 -0
  22. coverage/htmlfiles/index.html +199 -0
  23. coverage/htmlfiles/keybd_closed.png +0 -0
  24. coverage/htmlfiles/pyfile.html +149 -0
  25. coverage/htmlfiles/style.css +389 -0
  26. coverage/htmlfiles/style.scss +844 -0
  27. coverage/inorout.py +590 -0
  28. coverage/jsonreport.py +200 -0
  29. coverage/lcovreport.py +218 -0
  30. coverage/misc.py +381 -0
  31. coverage/multiproc.py +120 -0
  32. coverage/numbits.py +146 -0
  33. coverage/parser.py +1215 -0
  34. coverage/patch.py +118 -0
  35. coverage/phystokens.py +197 -0
  36. coverage/plugin.py +617 -0
  37. coverage/plugin_support.py +299 -0
  38. coverage/pth_file.py +16 -0
  39. coverage/py.typed +1 -0
  40. coverage/python.py +272 -0
  41. coverage/pytracer.py +370 -0
  42. coverage/regions.py +127 -0
  43. coverage/report.py +298 -0
  44. coverage/report_core.py +117 -0
  45. coverage/results.py +502 -0
  46. coverage/sqldata.py +1212 -0
  47. coverage/sqlitedb.py +226 -0
  48. coverage/sysmon.py +509 -0
  49. coverage/templite.py +319 -0
  50. coverage/tomlconfig.py +212 -0
  51. coverage/tracer.cpython-314t-aarch64-linux-musl.so +0 -0
  52. coverage/tracer.pyi +43 -0
  53. coverage/types.py +214 -0
  54. coverage/version.py +35 -0
  55. coverage/xmlreport.py +263 -0
  56. coverage-7.13.1.dist-info/METADATA +200 -0
  57. coverage-7.13.1.dist-info/RECORD +61 -0
  58. coverage-7.13.1.dist-info/WHEEL +5 -0
  59. coverage-7.13.1.dist-info/entry_points.txt +4 -0
  60. coverage-7.13.1.dist-info/licenses/LICENSE.txt +177 -0
  61. coverage-7.13.1.dist-info/top_level.txt +1 -0
coverage/sqldata.py ADDED
@@ -0,0 +1,1212 @@
1
+ # Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
2
+ # For details: https://github.com/coveragepy/coveragepy/blob/main/NOTICE.txt
3
+
4
+ """SQLite coverage data."""
5
+
6
+ from __future__ import annotations
7
+
8
+ import base64
9
+ import collections
10
+ import datetime
11
+ import functools
12
+ import glob
13
+ import itertools
14
+ import os
15
+ import random
16
+ import re
17
+ import socket
18
+ import sqlite3
19
+ import string
20
+ import sys
21
+ import textwrap
22
+ import threading
23
+ import uuid
24
+ import zlib
25
+ from collections.abc import Callable, Collection, Mapping, Sequence
26
+ from typing import Any, cast
27
+
28
+ from coverage.debug import NoDebugging, auto_repr, file_summary
29
+ from coverage.exceptions import CoverageException, DataError
30
+ from coverage.misc import Hasher, file_be_gone, isolate_module
31
+ from coverage.numbits import numbits_to_nums, numbits_union, nums_to_numbits
32
+ from coverage.sqlitedb import SqliteDb
33
+ from coverage.types import AnyCallable, FilePath, TArc, TDebugCtl, TLineNo, TWarnFn
34
+ from coverage.version import __version__
35
+
36
+ os = isolate_module(os)
37
+
38
+ # If you change the schema: increment the SCHEMA_VERSION and update the
39
+ # docs in docs/dbschema.rst by running "make cogdoc".
40
+
41
+ SCHEMA_VERSION = 7
42
+
43
+ # Schema versions:
44
+ # 1: Released in 5.0a2
45
+ # 2: Added contexts in 5.0a3.
46
+ # 3: Replaced line table with line_map table.
47
+ # 4: Changed line_map.bitmap to line_map.numbits.
48
+ # 5: Added foreign key declarations.
49
+ # 6: Key-value in meta.
50
+ # 7: line_map -> line_bits
51
+
52
+ SCHEMA = """\
53
+ CREATE TABLE coverage_schema (
54
+ -- One row, to record the version of the schema in this db.
55
+ version integer
56
+ );
57
+
58
+ CREATE TABLE meta (
59
+ -- Key-value pairs, to record metadata about the data
60
+ key text,
61
+ value text,
62
+ unique (key)
63
+ -- Possible keys:
64
+ -- 'has_arcs' boolean -- Is this data recording branches?
65
+ -- 'sys_argv' text -- The coverage command line that recorded the data.
66
+ -- 'version' text -- The version of coverage.py that made the file.
67
+ -- 'when' text -- Datetime when the file was created.
68
+ -- 'hash' text -- Hash of the data.
69
+ );
70
+
71
+ CREATE TABLE file (
72
+ -- A row per file measured.
73
+ id integer primary key,
74
+ path text,
75
+ unique (path)
76
+ );
77
+
78
+ CREATE TABLE context (
79
+ -- A row per context measured.
80
+ id integer primary key,
81
+ context text,
82
+ unique (context)
83
+ );
84
+
85
+ CREATE TABLE line_bits (
86
+ -- If recording lines, a row per context per file executed.
87
+ -- All of the line numbers for that file/context are in one numbits.
88
+ file_id integer, -- foreign key to `file`.
89
+ context_id integer, -- foreign key to `context`.
90
+ numbits blob, -- see the numbits functions in coverage.numbits
91
+ foreign key (file_id) references file (id),
92
+ foreign key (context_id) references context (id),
93
+ unique (file_id, context_id)
94
+ );
95
+
96
+ CREATE TABLE arc (
97
+ -- If recording branches, a row per context per from/to line transition executed.
98
+ file_id integer, -- foreign key to `file`.
99
+ context_id integer, -- foreign key to `context`.
100
+ fromno integer, -- line number jumped from.
101
+ tono integer, -- line number jumped to.
102
+ foreign key (file_id) references file (id),
103
+ foreign key (context_id) references context (id),
104
+ unique (file_id, context_id, fromno, tono)
105
+ );
106
+
107
+ CREATE TABLE tracer (
108
+ -- A row per file indicating the tracer used for that file.
109
+ file_id integer primary key,
110
+ tracer text,
111
+ foreign key (file_id) references file (id)
112
+ );
113
+ """
114
+
115
+
116
+ def _locked(method: AnyCallable) -> AnyCallable:
117
+ """A decorator for methods that should hold self._lock."""
118
+
119
+ @functools.wraps(method)
120
+ def _wrapped(self: CoverageData, *args: Any, **kwargs: Any) -> Any:
121
+ if self._debug.should("lock"):
122
+ self._debug.write(f"Locking {self._lock!r} for {method.__name__}")
123
+ with self._lock:
124
+ if self._debug.should("lock"):
125
+ self._debug.write(f"Locked {self._lock!r} for {method.__name__}")
126
+ return method(self, *args, **kwargs)
127
+
128
+ return _wrapped
129
+
130
+
131
+ class NumbitsUnionAgg:
132
+ """SQLite aggregate function for computing union of numbits."""
133
+
134
+ def __init__(self) -> None:
135
+ self.result = b""
136
+
137
+ def step(self, value: bytes) -> None:
138
+ """Process one value in the aggregation."""
139
+ self.result = numbits_union(self.result, value)
140
+
141
+ def finalize(self) -> bytes:
142
+ """Return the final aggregated result."""
143
+ return self.result
144
+
145
+
146
+ class CoverageData:
147
+ """Manages collected coverage data, including file storage.
148
+
149
+ This class is the public supported API to the data that coverage.py
150
+ collects during program execution. It includes information about what code
151
+ was executed. It does not include information from the analysis phase, to
152
+ determine what lines could have been executed, or what lines were not
153
+ executed.
154
+
155
+ .. note::
156
+
157
+ The data file is currently a SQLite database file, with a
158
+ :ref:`documented schema <dbschema>`. The schema is subject to change
159
+ though, so be careful about querying it directly. Use this API if you
160
+ can to isolate yourself from changes.
161
+
162
+ There are a number of kinds of data that can be collected:
163
+
164
+ * **lines**: the line numbers of source lines that were executed.
165
+ These are always available.
166
+
167
+ * **arcs**: pairs of source and destination line numbers for transitions
168
+ between source lines. These are only available if branch coverage was
169
+ used.
170
+
171
+ * **file tracer names**: the module names of the file tracer plugins that
172
+ handled each file in the data.
173
+
174
+ Lines, arcs, and file tracer names are stored for each source file. File
175
+ names in this API are case-sensitive, even on platforms with
176
+ case-insensitive file systems.
177
+
178
+ A data file either stores lines, or arcs, but not both.
179
+
180
+ A data file is associated with the data when the :class:`CoverageData`
181
+ is created, using the parameters `basename`, `suffix`, and `no_disk`. The
182
+ base name can be queried with :meth:`base_filename`, and the actual file
183
+ name being used is available from :meth:`data_filename`.
184
+
185
+ To read an existing coverage.py data file, use :meth:`read`. You can then
186
+ access the line, arc, or file tracer data with :meth:`lines`, :meth:`arcs`,
187
+ or :meth:`file_tracer`.
188
+
189
+ The :meth:`has_arcs` method indicates whether arc data is available. You
190
+ can get a set of the files in the data with :meth:`measured_files`. As
191
+ with most Python containers, you can determine if there is any data at all
192
+ by using this object as a boolean value.
193
+
194
+ The contexts for each line in a file can be read with
195
+ :meth:`contexts_by_lineno`.
196
+
197
+ To limit querying to certain contexts, use :meth:`set_query_context` or
198
+ :meth:`set_query_contexts`. These will narrow the focus of subsequent
199
+ :meth:`lines`, :meth:`arcs`, and :meth:`contexts_by_lineno` calls. The set
200
+ of all measured context names can be retrieved with
201
+ :meth:`measured_contexts`.
202
+
203
+ Most data files will be created by coverage.py itself, but you can use
204
+ methods here to create data files if you like. The :meth:`add_lines`,
205
+ :meth:`add_arcs`, and :meth:`add_file_tracers` methods add data, in ways
206
+ that are convenient for coverage.py.
207
+
208
+ To record data for contexts, use :meth:`set_context` to set a context to
209
+ be used for subsequent :meth:`add_lines` and :meth:`add_arcs` calls.
210
+
211
+ To add a source file without any measured data, use :meth:`touch_file`,
212
+ or :meth:`touch_files` for a list of such files.
213
+
214
+ Write the data to its file with :meth:`write`.
215
+
216
+ You can clear the data in memory with :meth:`erase`. Data for specific
217
+ files can be removed from the database with :meth:`purge_files`.
218
+
219
+ Two data collections can be combined by using :meth:`update` on one
220
+ :class:`CoverageData`, passing it the other.
221
+
222
+ Data in a :class:`CoverageData` can be serialized and deserialized with
223
+ :meth:`dumps` and :meth:`loads`.
224
+
225
+ The methods used during the coverage.py collection phase
226
+ (:meth:`add_lines`, :meth:`add_arcs`, :meth:`set_context`, and
227
+ :meth:`add_file_tracers`) are thread-safe. Other methods may not be.
228
+
229
+ """
230
+
231
+ def __init__(
232
+ self,
233
+ basename: FilePath | None = None,
234
+ suffix: str | bool | None = None,
235
+ no_disk: bool = False,
236
+ warn: TWarnFn | None = None,
237
+ debug: TDebugCtl | None = None,
238
+ ) -> None:
239
+ """Create a :class:`CoverageData` object to hold coverage-measured data.
240
+
241
+ Arguments:
242
+ basename (str): the base name of the data file, defaulting to
243
+ ".coverage". This can be a path to a file in another directory.
244
+ suffix (str or bool): has the same meaning as the `data_suffix`
245
+ argument to :class:`coverage.Coverage`.
246
+ no_disk (bool): if True, keep all data in memory, and don't
247
+ write any disk file.
248
+ warn: a warning callback function, accepting a warning message
249
+ argument.
250
+ debug: a `DebugControl` object (optional)
251
+
252
+ """
253
+ self._no_disk = no_disk
254
+ self._basename = os.path.abspath(basename or ".coverage")
255
+ self._suffix = suffix
256
+ self._our_suffix = suffix is True
257
+ self._warn = warn
258
+ self._debug = debug or NoDebugging()
259
+
260
+ self._choose_filename()
261
+ # Maps filenames to row ids.
262
+ self._file_map: dict[str, int] = {}
263
+ # Maps thread ids to SqliteDb objects.
264
+ self._dbs: dict[int, SqliteDb] = {}
265
+ self._pid = os.getpid()
266
+ # Synchronize the operations used during collection.
267
+ self._lock = threading.RLock()
268
+
269
+ self._wrote_hash = False
270
+ self._hasher = Hasher()
271
+
272
+ # Are we in sync with the data file?
273
+ self._have_used = False
274
+
275
+ self._has_lines = False
276
+ self._has_arcs = False
277
+
278
+ self._current_context: str | None = None
279
+ self._current_context_id: int | None = None
280
+ self._query_context_ids: list[int] | None = None
281
+
282
+ __repr__ = auto_repr
283
+
284
+ def _debug_dataio(self, msg: str, filename: str) -> None:
285
+ """A helper for debug messages which are all similar."""
286
+ if self._debug.should("dataio"):
287
+ self._debug.write(f"{msg} {filename!r} ({file_summary(filename)})")
288
+
289
+ def _choose_filename(self) -> None:
290
+ """Set self._filename based on inited attributes."""
291
+ if self._no_disk:
292
+ self._filename = f"file:coverage-{uuid.uuid4()}?mode=memory&cache=shared"
293
+ else:
294
+ self._filename = self._basename
295
+ suffix = filename_suffix(self._suffix)
296
+ if suffix:
297
+ self._filename += f".{suffix}"
298
+
299
+ def _reset(self) -> None:
300
+ """Reset our attributes."""
301
+ if not self._no_disk:
302
+ self.close()
303
+ self._file_map = {}
304
+ self._have_used = False
305
+ self._current_context_id = None
306
+
307
+ def close(self, force: bool = False) -> None:
308
+ """Really close all the database objects."""
309
+ if self._debug.should("dataio"):
310
+ self._debug.write(f"Closing dbs, force={force}: {self._dbs}")
311
+ for db in self._dbs.values():
312
+ db.close(force=force)
313
+ self._dbs = {}
314
+
315
+ def _open_db(self) -> None:
316
+ """Open an existing db file, and read its metadata."""
317
+ self._debug_dataio("Opening data file", self._filename)
318
+ self._dbs[threading.get_ident()] = SqliteDb(self._filename, self._debug, self._no_disk)
319
+ self._read_db()
320
+
321
+ def _read_db(self) -> None:
322
+ """Read the metadata from a database so that we are ready to use it."""
323
+ with self._dbs[threading.get_ident()] as db:
324
+ try:
325
+ row = db.execute_one("select version from coverage_schema")
326
+ assert row is not None
327
+ except Exception as exc:
328
+ if "no such table: coverage_schema" in str(exc):
329
+ self._init_db(db)
330
+ else:
331
+ raise DataError(
332
+ "Data file {!r} doesn't seem to be a coverage data file: {}".format(
333
+ self._filename,
334
+ exc,
335
+ ),
336
+ ) from exc
337
+ else:
338
+ schema_version = row[0]
339
+ if schema_version != SCHEMA_VERSION:
340
+ raise DataError(
341
+ "Couldn't use data file {!r}: wrong schema: {} instead of {}".format(
342
+ self._filename,
343
+ schema_version,
344
+ SCHEMA_VERSION,
345
+ ),
346
+ )
347
+
348
+ row = db.execute_one("select value from meta where key = 'has_arcs'")
349
+ if row is not None:
350
+ self._has_arcs = bool(int(row[0]))
351
+ self._has_lines = not self._has_arcs
352
+
353
+ with db.execute("select id, path from file") as cur:
354
+ for file_id, path in cur:
355
+ self._file_map[path] = file_id
356
+
357
+ def _init_db(self, db: SqliteDb) -> None:
358
+ """Write the initial contents of the database."""
359
+ self._debug_dataio("Initing data file", self._filename)
360
+ db.executescript(SCHEMA)
361
+ db.execute_void("INSERT INTO coverage_schema (version) VALUES (?)", (SCHEMA_VERSION,))
362
+
363
+ # When writing metadata, avoid information that will needlessly change
364
+ # the hash of the data file, unless we're debugging processes.
365
+ # If we control the suffix, then the hash is in the file name, and we
366
+ # can write any metadata without affecting the hash determination
367
+ # later.
368
+ meta_data = [
369
+ ("version", __version__),
370
+ ]
371
+ if self._our_suffix or self._debug.should("process"):
372
+ meta_data.extend(
373
+ [
374
+ ("sys_argv", str(getattr(sys, "argv", None))),
375
+ ("when", datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")),
376
+ ]
377
+ )
378
+ db.executemany_void("INSERT OR IGNORE INTO meta (key, value) VALUES (?, ?)", meta_data)
379
+
380
+ def _connect(self) -> SqliteDb:
381
+ """Get the SqliteDb object to use."""
382
+ if threading.get_ident() not in self._dbs:
383
+ self._open_db()
384
+ return self._dbs[threading.get_ident()]
385
+
386
+ def __bool__(self) -> bool:
387
+ if threading.get_ident() not in self._dbs and not os.path.exists(self._filename):
388
+ return False
389
+ try:
390
+ with self._connect() as con:
391
+ with con.execute("SELECT * FROM file LIMIT 1") as cur:
392
+ return bool(list(cur))
393
+ except CoverageException:
394
+ return False
395
+
396
+ def dumps(self) -> bytes:
397
+ """Serialize the current data to a byte string.
398
+
399
+ The format of the serialized data is not documented. It is only
400
+ suitable for use with :meth:`loads` in the same version of
401
+ coverage.py.
402
+
403
+ Note that this serialization is not what gets stored in coverage data
404
+ files. This method is meant to produce bytes that can be transmitted
405
+ elsewhere and then deserialized with :meth:`loads`.
406
+
407
+ Returns:
408
+ A byte string of serialized data.
409
+
410
+ .. versionadded:: 5.0
411
+
412
+ """
413
+ self._debug_dataio("Dumping data from data file", self._filename)
414
+ with self._connect() as con:
415
+ script = con.dump()
416
+ return b"z" + zlib.compress(script.encode("utf-8"))
417
+
418
+ def loads(self, data: bytes) -> None:
419
+ """Deserialize data from :meth:`dumps`.
420
+
421
+ Use with a newly-created empty :class:`CoverageData` object. It's
422
+ undefined what happens if the object already has data in it.
423
+
424
+ Note that this is not for reading data from a coverage data file. It
425
+ is only for use on data you produced with :meth:`dumps`.
426
+
427
+ Arguments:
428
+ data: A byte string of serialized data produced by :meth:`dumps`.
429
+
430
+ .. versionadded:: 5.0
431
+
432
+ """
433
+ self._debug_dataio("Loading data into data file", self._filename)
434
+ if data[:1] != b"z":
435
+ raise DataError(
436
+ f"Unrecognized serialization: {data[:40]!r} (head of {len(data)} bytes)",
437
+ )
438
+ script = zlib.decompress(data[1:]).decode("utf-8")
439
+ self._dbs[threading.get_ident()] = db = SqliteDb(self._filename, self._debug, self._no_disk)
440
+ with db:
441
+ db.executescript(script)
442
+ self._read_db()
443
+ self._have_used = True
444
+
445
+ def _file_id(self, filename: str, add: bool = False) -> int | None:
446
+ """Get the file id for `filename`.
447
+
448
+ If filename is not in the database yet, add it if `add` is True.
449
+ If `add` is not True, return None.
450
+ """
451
+ if filename not in self._file_map:
452
+ if add:
453
+ with self._connect() as con:
454
+ self._file_map[filename] = con.execute_for_rowid(
455
+ "INSERT OR REPLACE INTO file (path) VALUES (?)",
456
+ (filename,),
457
+ )
458
+ return self._file_map.get(filename)
459
+
460
+ def _context_id(self, context: str) -> int | None:
461
+ """Get the id for a context."""
462
+ assert context is not None
463
+ self._start_using()
464
+ with self._connect() as con:
465
+ row = con.execute_one("SELECT id FROM context WHERE context = ?", (context,))
466
+ if row is not None:
467
+ return cast(int, row[0])
468
+ else:
469
+ return None
470
+
471
+ @_locked
472
+ def set_context(self, context: str | None) -> None:
473
+ """Set the current context for future :meth:`add_lines` etc.
474
+
475
+ `context` is a str, the name of the context to use for the next data
476
+ additions. The context persists until the next :meth:`set_context`.
477
+
478
+ .. versionadded:: 5.0
479
+
480
+ """
481
+ if self._debug.should("dataop"):
482
+ self._debug.write(f"Setting coverage context: {context!r}")
483
+ self._current_context = context
484
+ self._current_context_id = None
485
+ self._hasher.update(context)
486
+
487
+ def _set_context_id(self) -> None:
488
+ """Use the _current_context to set _current_context_id."""
489
+ context = self._current_context or ""
490
+ context_id = self._context_id(context)
491
+ if context_id is not None:
492
+ self._current_context_id = context_id
493
+ else:
494
+ with self._connect() as con:
495
+ self._current_context_id = con.execute_for_rowid(
496
+ "INSERT INTO context (context) VALUES (?)",
497
+ (context,),
498
+ )
499
+
500
+ def base_filename(self) -> str:
501
+ """The base filename for storing data.
502
+
503
+ .. versionadded:: 5.0
504
+
505
+ """
506
+ return self._basename
507
+
508
+ def data_filename(self) -> str:
509
+ """Where is the data stored?
510
+
511
+ .. versionadded:: 5.0
512
+
513
+ """
514
+ return self._filename
515
+
516
+ @_locked
517
+ def add_lines(self, line_data: Mapping[str, Collection[TLineNo]]) -> None:
518
+ """Add measured line data.
519
+
520
+ `line_data` is a dictionary mapping file names to iterables of ints::
521
+
522
+ { filename: { line1, line2, ... }, ...}
523
+
524
+ """
525
+ if self._debug.should("dataop"):
526
+ self._debug.write(
527
+ "Adding lines: %d files, %d lines total"
528
+ % (
529
+ len(line_data),
530
+ sum(len(lines) for lines in line_data.values()),
531
+ )
532
+ )
533
+ if self._debug.should("dataop2"):
534
+ for filename, linenos in sorted(line_data.items()):
535
+ self._debug.write(f" {filename}: {linenos}")
536
+ self._start_using()
537
+ self._choose_lines_or_arcs(lines=True)
538
+ if not line_data:
539
+ return
540
+ with self._connect() as con:
541
+ self._set_context_id()
542
+ for filename, linenos in line_data.items():
543
+ self._hasher.update(filename)
544
+ line_bits = nums_to_numbits(linenos)
545
+ self._hasher.update(line_bits)
546
+ file_id = self._file_id(filename, add=True)
547
+ query = "SELECT numbits FROM line_bits WHERE file_id = ? AND context_id = ?"
548
+ with con.execute(query, (file_id, self._current_context_id)) as cur:
549
+ existing = list(cur)
550
+ if existing:
551
+ line_bits = numbits_union(line_bits, existing[0][0])
552
+
553
+ con.execute_void(
554
+ """
555
+ INSERT OR REPLACE INTO line_bits
556
+ (file_id, context_id, numbits) VALUES (?, ?, ?)
557
+ """,
558
+ (file_id, self._current_context_id, line_bits),
559
+ )
560
+
561
+ @_locked
562
+ def add_arcs(self, arc_data: Mapping[str, Collection[TArc]]) -> None:
563
+ """Add measured arc data.
564
+
565
+ `arc_data` is a dictionary mapping file names to iterables of pairs of
566
+ ints::
567
+
568
+ { filename: { (l1,l2), (l1,l2), ... }, ...}
569
+
570
+ """
571
+ if self._debug.should("dataop"):
572
+ self._debug.write(
573
+ "Adding arcs: %d files, %d arcs total"
574
+ % (
575
+ len(arc_data),
576
+ sum(len(arcs) for arcs in arc_data.values()),
577
+ )
578
+ )
579
+ if self._debug.should("dataop2"):
580
+ for filename, arcs in sorted(arc_data.items()):
581
+ self._debug.write(f" {filename}: {arcs}")
582
+ self._start_using()
583
+ self._choose_lines_or_arcs(arcs=True)
584
+ if not arc_data:
585
+ return
586
+ with self._connect() as con:
587
+ self._set_context_id()
588
+ for filename, arcs in arc_data.items():
589
+ self._hasher.update(filename)
590
+ self._hasher.update(arcs)
591
+ if not arcs:
592
+ continue
593
+ file_id = self._file_id(filename, add=True)
594
+ data = [(file_id, self._current_context_id, fromno, tono) for fromno, tono in arcs]
595
+ con.executemany_void(
596
+ """
597
+ INSERT OR IGNORE INTO arc
598
+ (file_id, context_id, fromno, tono) VALUES (?, ?, ?, ?)
599
+ """,
600
+ data,
601
+ )
602
+
603
+ def _choose_lines_or_arcs(self, lines: bool = False, arcs: bool = False) -> None:
604
+ """Force the data file to choose between lines and arcs."""
605
+ assert lines or arcs
606
+ assert not (lines and arcs)
607
+ if lines and self._has_arcs:
608
+ if self._debug.should("dataop"):
609
+ self._debug.write("Error: Can't add line measurements to existing branch data")
610
+ raise DataError("Can't add line measurements to existing branch data")
611
+ if arcs and self._has_lines:
612
+ if self._debug.should("dataop"):
613
+ self._debug.write("Error: Can't add branch measurements to existing line data")
614
+ raise DataError("Can't add branch measurements to existing line data")
615
+ if not self._has_arcs and not self._has_lines:
616
+ self._has_lines = lines
617
+ self._has_arcs = arcs
618
+ with self._connect() as con:
619
+ con.execute_void(
620
+ "INSERT OR IGNORE INTO meta (key, value) VALUES (?, ?)",
621
+ ("has_arcs", str(int(arcs))),
622
+ )
623
+
624
+ @_locked
625
+ def add_file_tracers(self, file_tracers: Mapping[str, str]) -> None:
626
+ """Add per-file plugin information.
627
+
628
+ `file_tracers` is { filename: plugin_name, ... }
629
+
630
+ """
631
+ if self._debug.should("dataop"):
632
+ self._debug.write(f"Adding file tracers: {len(file_tracers)} files")
633
+ if not file_tracers:
634
+ return
635
+ self._start_using()
636
+ with self._connect() as con:
637
+ for filename, plugin_name in file_tracers.items():
638
+ self._hasher.update(filename)
639
+ self._hasher.update(plugin_name)
640
+ file_id = self._file_id(filename, add=True)
641
+ existing_plugin = self.file_tracer(filename)
642
+ if existing_plugin:
643
+ if existing_plugin != plugin_name:
644
+ raise DataError(
645
+ "Conflicting file tracer name for '{}': {!r} vs {!r}".format(
646
+ filename,
647
+ existing_plugin,
648
+ plugin_name,
649
+ ),
650
+ )
651
+ elif plugin_name:
652
+ con.execute_void(
653
+ "INSERT INTO TRACER (file_id, tracer) VALUES (?, ?)",
654
+ (file_id, plugin_name),
655
+ )
656
+
657
+ def touch_file(self, filename: str, plugin_name: str = "") -> None:
658
+ """Ensure that `filename` appears in the data, empty if needed.
659
+
660
+ `plugin_name` is the name of the plugin responsible for this file.
661
+ It is used to associate the right filereporter, etc.
662
+ """
663
+ self.touch_files([filename], plugin_name)
664
+
665
+ def touch_files(self, filenames: Collection[str], plugin_name: str | None = None) -> None:
666
+ """Ensure that `filenames` appear in the data, empty if needed.
667
+
668
+ `plugin_name` is the name of the plugin responsible for these files.
669
+ It is used to associate the right filereporter, etc.
670
+ """
671
+ if self._debug.should("dataop"):
672
+ self._debug.write(f"Touching {filenames!r}")
673
+ self._start_using()
674
+ with self._connect(): # Use this to get one transaction.
675
+ if not self._has_arcs and not self._has_lines:
676
+ raise DataError("Can't touch files in an empty CoverageData")
677
+
678
+ for filename in filenames:
679
+ self._file_id(filename, add=True)
680
+ if plugin_name:
681
+ # Set the tracer for this file
682
+ self.add_file_tracers({filename: plugin_name})
683
+
684
+ def purge_files(self, filenames: Collection[str]) -> None:
685
+ """Purge any existing coverage data for the given `filenames`.
686
+
687
+ .. versionadded:: 7.2
688
+
689
+ """
690
+ if self._debug.should("dataop"):
691
+ self._debug.write(f"Purging data for {filenames!r}")
692
+ self._start_using()
693
+ with self._connect() as con:
694
+ if self._has_lines:
695
+ sql = "DELETE FROM line_bits WHERE file_id=?"
696
+ elif self._has_arcs:
697
+ sql = "DELETE FROM arc WHERE file_id=?"
698
+ else:
699
+ raise DataError("Can't purge files in an empty CoverageData")
700
+
701
+ for filename in filenames:
702
+ file_id = self._file_id(filename, add=False)
703
+ if file_id is None:
704
+ continue
705
+ con.execute_void(sql, (file_id,))
706
+
707
+ def update(
708
+ self,
709
+ other_data: CoverageData,
710
+ map_path: Callable[[str], str] | None = None,
711
+ ) -> None:
712
+ """Update this data with data from another :class:`CoverageData`.
713
+
714
+ If `map_path` is provided, it's a function that re-map paths to match
715
+ the local machine's. Note: `map_path` is None only when called
716
+ directly from the test suite.
717
+
718
+ """
719
+ if self._debug.should("dataop"):
720
+ self._debug.write(
721
+ "Updating with data from {!r}".format(
722
+ getattr(other_data, "_filename", "???"),
723
+ )
724
+ )
725
+ if self._has_lines and other_data._has_arcs:
726
+ raise DataError(
727
+ "Can't combine branch coverage data with statement data", slug="cant-combine"
728
+ )
729
+ if self._has_arcs and other_data._has_lines:
730
+ raise DataError(
731
+ "Can't combine statement coverage data with branch data", slug="cant-combine"
732
+ )
733
+
734
+ map_path = map_path or (lambda p: p)
735
+
736
+ # Force the database we're writing to to exist before we start nesting contexts.
737
+ self._start_using()
738
+ other_data.read()
739
+
740
+ # Ensure other_data has a properly initialized database
741
+ with other_data._connect():
742
+ pass
743
+
744
+ with self._connect() as con:
745
+ assert con.con is not None
746
+ con.con.isolation_level = "IMMEDIATE"
747
+
748
+ # Register functions for SQLite
749
+ con.con.create_function("numbits_union", 2, numbits_union)
750
+ con.con.create_function("map_path", 1, map_path)
751
+ con.con.create_aggregate(
752
+ "numbits_union_agg",
753
+ 1,
754
+ NumbitsUnionAgg, # type: ignore[arg-type]
755
+ )
756
+
757
+ # Attach the other database
758
+ con.execute_void("ATTACH DATABASE ? AS other_db", (other_data.data_filename(),))
759
+
760
+ # Create temporary table with mapped file paths to avoid repeated map_path() calls
761
+ con.execute_void("""
762
+ CREATE TEMP TABLE other_file_mapped AS
763
+ SELECT
764
+ other_file.id as other_file_id,
765
+ map_path(other_file.path) as mapped_path
766
+ FROM other_db.file AS other_file
767
+ """)
768
+
769
+ # Check for tracer conflicts before proceeding
770
+ with con.execute("""
771
+ SELECT other_file_mapped.mapped_path,
772
+ COALESCE(main.tracer.tracer, ''),
773
+ COALESCE(other_db.tracer.tracer, '')
774
+ FROM main.file
775
+ LEFT JOIN main.tracer ON main.file.id = main.tracer.file_id
776
+ INNER JOIN other_file_mapped ON main.file.path = other_file_mapped.mapped_path
777
+ LEFT JOIN other_db.tracer ON other_file_mapped.other_file_id = other_db.tracer.file_id
778
+ WHERE COALESCE(main.tracer.tracer, '') != COALESCE(other_db.tracer.tracer, '')
779
+ """) as cur:
780
+ conflicts = list(cur)
781
+ if conflicts:
782
+ path, this_tracer, other_tracer = conflicts[0]
783
+ raise DataError(
784
+ "Conflicting file tracer name for '{}': {!r} vs {!r}".format(
785
+ path,
786
+ this_tracer,
787
+ other_tracer,
788
+ ),
789
+ )
790
+
791
+ # Insert missing files from other_db (with map_path applied)
792
+ con.execute_void("""
793
+ INSERT OR IGNORE INTO main.file (path)
794
+ SELECT DISTINCT mapped_path FROM other_file_mapped
795
+ """)
796
+
797
+ # Insert missing contexts from other_db
798
+ con.execute_void("""
799
+ INSERT OR IGNORE INTO main.context (context)
800
+ SELECT context FROM other_db.context
801
+ """)
802
+
803
+ # Update file_map with any new files
804
+ with con.execute("SELECT id, path FROM file") as cur:
805
+ self._file_map.update({path: id for id, path in cur})
806
+
807
+ with con.execute("""
808
+ SELECT
809
+ EXISTS(SELECT 1 FROM other_db.arc),
810
+ EXISTS(SELECT 1 FROM other_db.line_bits)
811
+ """) as cur:
812
+ has_arcs, has_lines = cur.fetchone()
813
+
814
+ # Handle arcs if present in other_db
815
+ if has_arcs:
816
+ self._choose_lines_or_arcs(arcs=True)
817
+
818
+ # Create context mapping table for faster lookups
819
+ con.execute_void("""
820
+ CREATE TEMP TABLE context_mapping AS
821
+ SELECT
822
+ other_context.id as other_id,
823
+ main_context.id as main_id
824
+ FROM other_db.context AS other_context
825
+ INNER JOIN main.context AS main_context ON other_context.context = main_context.context
826
+ """)
827
+
828
+ con.execute_void("""
829
+ INSERT OR IGNORE INTO main.arc (file_id, context_id, fromno, tono)
830
+ SELECT
831
+ main_file.id,
832
+ context_mapping.main_id,
833
+ other_arc.fromno,
834
+ other_arc.tono
835
+ FROM other_db.arc AS other_arc
836
+ INNER JOIN other_file_mapped ON other_arc.file_id = other_file_mapped.other_file_id
837
+ INNER JOIN context_mapping ON other_arc.context_id = context_mapping.other_id
838
+ INNER JOIN main.file AS main_file ON other_file_mapped.mapped_path = main_file.path
839
+ """)
840
+
841
+ # Handle line_bits if present in other_db
842
+ if has_lines:
843
+ self._choose_lines_or_arcs(lines=True)
844
+
845
+ # Handle line_bits by aggregating other_db data by mapped target,
846
+ # then inserting/updating
847
+ con.execute_void("""
848
+ INSERT OR REPLACE INTO main.line_bits (file_id, context_id, numbits)
849
+ SELECT
850
+ main_file.id,
851
+ main_context.id,
852
+ numbits_union(
853
+ COALESCE((
854
+ SELECT numbits FROM main.line_bits
855
+ WHERE file_id = main_file.id AND context_id = main_context.id
856
+ ), X''),
857
+ aggregated.combined_numbits
858
+ )
859
+ FROM (
860
+ SELECT
861
+ other_file_mapped.mapped_path,
862
+ other_context.context,
863
+ numbits_union_agg(other_line_bits.numbits) as combined_numbits
864
+ FROM other_db.line_bits AS other_line_bits
865
+ INNER JOIN other_file_mapped ON other_line_bits.file_id = other_file_mapped.other_file_id
866
+ INNER JOIN other_db.context AS other_context ON other_line_bits.context_id = other_context.id
867
+ GROUP BY other_file_mapped.mapped_path, other_context.context
868
+ ) AS aggregated
869
+ INNER JOIN main.file AS main_file ON aggregated.mapped_path = main_file.path
870
+ INNER JOIN main.context AS main_context ON aggregated.context = main_context.context
871
+ """)
872
+
873
+ # Insert tracers from other_db (avoiding conflicts we already checked)
874
+ con.execute_void("""
875
+ INSERT OR IGNORE INTO main.tracer (file_id, tracer)
876
+ SELECT
877
+ main_file.id,
878
+ other_tracer.tracer
879
+ FROM other_db.tracer AS other_tracer
880
+ INNER JOIN other_file_mapped ON other_tracer.file_id = other_file_mapped.other_file_id
881
+ INNER JOIN main.file AS main_file ON other_file_mapped.mapped_path = main_file.path
882
+ """)
883
+
884
+ if not self._no_disk:
885
+ # Update all internal cache data.
886
+ self._reset()
887
+ self.read()
888
+
889
+ def erase(self, parallel: bool = False) -> None:
890
+ """Erase the data in this object.
891
+
892
+ If `parallel` is true, then also deletes data files created from the
893
+ basename by parallel-mode.
894
+
895
+ """
896
+ self._reset()
897
+ if self._no_disk:
898
+ return
899
+ self._debug_dataio("Erasing data file", self._filename)
900
+ file_be_gone(self._filename)
901
+ if parallel:
902
+ data_dir, local = os.path.split(self._filename)
903
+ local_abs_path = os.path.join(os.path.abspath(data_dir), local)
904
+ pattern = glob.escape(local_abs_path) + ".*"
905
+ for filename in glob.glob(pattern):
906
+ self._debug_dataio("Erasing parallel data file", filename)
907
+ file_be_gone(filename)
908
+
909
+ def read(self) -> None:
910
+ """Start using an existing data file."""
911
+ if os.path.exists(self._filename):
912
+ with self._connect():
913
+ self._have_used = True
914
+
915
+ def write(self) -> None:
916
+ """Ensure the data is written to the data file."""
917
+ if self._our_suffix and not self._wrote_hash:
918
+ self._debug_dataio("Finishing data file", self._filename)
919
+ with self._connect() as con:
920
+ con.execute_void(
921
+ "INSERT OR IGNORE INTO meta (key, value) VALUES ('hash', ?)",
922
+ (self._hasher.hexdigest(),),
923
+ )
924
+ self.close()
925
+ data_hash = base64.b64encode(self._hasher.digest(), altchars=b"01").decode()[:NHASH]
926
+ current_filename = self._filename
927
+ self._filename += f".H{data_hash}h"
928
+ self._debug_dataio("Renaming data file to", self._filename)
929
+ os.rename(current_filename, self._filename)
930
+ self._wrote_hash = True
931
+ else:
932
+ self._debug_dataio("Writing (no-op) data file", self._filename)
933
+
934
+ def _start_using(self) -> None:
935
+ """Call this before using the database at all."""
936
+ if self._pid != os.getpid():
937
+ # Looks like we forked! Have to start a new data file.
938
+ self._reset()
939
+ self._choose_filename()
940
+ self._pid = os.getpid()
941
+ if not self._have_used:
942
+ self.erase()
943
+ self._have_used = True
944
+
945
+ def has_arcs(self) -> bool:
946
+ """Does the database have arcs (True) or lines (False)."""
947
+ return bool(self._has_arcs)
948
+
949
+ def measured_files(self) -> set[str]:
950
+ """A set of all files that have been measured.
951
+
952
+ Note that a file may be mentioned as measured even though no lines or
953
+ arcs for that file are present in the data.
954
+
955
+ """
956
+ return set(self._file_map)
957
+
958
+ def measured_contexts(self) -> set[str]:
959
+ """A set of all contexts that have been measured.
960
+
961
+ .. versionadded:: 5.0
962
+
963
+ """
964
+ self._start_using()
965
+ with self._connect() as con:
966
+ with con.execute("SELECT DISTINCT(context) FROM context") as cur:
967
+ contexts = {row[0] for row in cur}
968
+ return contexts
969
+
970
+ def file_tracer(self, filename: str) -> str | None:
971
+ """Get the plugin name of the file tracer for a file.
972
+
973
+ Returns the name of the plugin that handles this file. If the file was
974
+ measured, but didn't use a plugin, then "" is returned. If the file
975
+ was not measured, then None is returned.
976
+
977
+ """
978
+ self._start_using()
979
+ with self._connect() as con:
980
+ file_id = self._file_id(filename)
981
+ if file_id is None:
982
+ return None
983
+ row = con.execute_one("SELECT tracer FROM tracer WHERE file_id = ?", (file_id,))
984
+ if row is not None:
985
+ return row[0] or ""
986
+ return "" # File was measured, but no tracer associated.
987
+
988
+ def set_query_context(self, context: str) -> None:
989
+ """Set a context for subsequent querying.
990
+
991
+ The next :meth:`lines`, :meth:`arcs`, or :meth:`contexts_by_lineno`
992
+ calls will be limited to only one context. `context` is a string which
993
+ must match a context exactly. If it does not, no exception is raised,
994
+ but queries will return no data.
995
+
996
+ .. versionadded:: 5.0
997
+
998
+ """
999
+ self._start_using()
1000
+ with self._connect() as con:
1001
+ with con.execute("SELECT id FROM context WHERE context = ?", (context,)) as cur:
1002
+ self._query_context_ids = [row[0] for row in cur.fetchall()]
1003
+
1004
+ def set_query_contexts(self, contexts: Sequence[str] | None) -> None:
1005
+ """Set a number of contexts for subsequent querying.
1006
+
1007
+ The next :meth:`lines`, :meth:`arcs`, or :meth:`contexts_by_lineno`
1008
+ calls will be limited to the specified contexts. `contexts` is a list
1009
+ of Python regular expressions. Contexts will be matched using
1010
+ :func:`re.search <python:re.search>`. Data will be included in query
1011
+ results if they are part of any of the contexts matched.
1012
+
1013
+ .. versionadded:: 5.0
1014
+
1015
+ """
1016
+ self._start_using()
1017
+ if contexts:
1018
+ with self._connect() as con:
1019
+ context_clause = " or ".join(["context REGEXP ?"] * len(contexts))
1020
+ with con.execute("SELECT id FROM context WHERE " + context_clause, contexts) as cur:
1021
+ self._query_context_ids = [row[0] for row in cur.fetchall()]
1022
+ else:
1023
+ self._query_context_ids = None
1024
+
1025
+ def lines(self, filename: str) -> list[TLineNo] | None:
1026
+ """Get the list of lines executed for a source file.
1027
+
1028
+ If the file was not measured, returns None. A file might be measured,
1029
+ and have no lines executed, in which case an empty list is returned.
1030
+
1031
+ If the file was executed, returns a list of integers, the line numbers
1032
+ executed in the file. The list is in no particular order.
1033
+
1034
+ """
1035
+ self._start_using()
1036
+ if self.has_arcs():
1037
+ arcs = self.arcs(filename)
1038
+ if arcs is not None:
1039
+ all_lines = itertools.chain.from_iterable(arcs)
1040
+ return list({l for l in all_lines if l > 0})
1041
+
1042
+ with self._connect() as con:
1043
+ file_id = self._file_id(filename)
1044
+ if file_id is None:
1045
+ return None
1046
+ else:
1047
+ query = "SELECT numbits FROM line_bits WHERE file_id = ?"
1048
+ data = [file_id]
1049
+ if self._query_context_ids is not None:
1050
+ ids_array = ", ".join("?" * len(self._query_context_ids))
1051
+ query += " AND context_id IN (" + ids_array + ")"
1052
+ data += self._query_context_ids
1053
+ with con.execute(query, data) as cur:
1054
+ bitmaps = list(cur)
1055
+ nums = set()
1056
+ for row in bitmaps:
1057
+ nums.update(numbits_to_nums(row[0]))
1058
+ return list(nums)
1059
+
1060
+ def arcs(self, filename: str) -> list[TArc] | None:
1061
+ """Get the list of arcs executed for a file.
1062
+
1063
+ If the file was not measured, returns None. A file might be measured,
1064
+ and have no arcs executed, in which case an empty list is returned.
1065
+
1066
+ If the file was executed, returns a list of 2-tuples of integers. Each
1067
+ pair is a starting line number and an ending line number for a
1068
+ transition from one line to another. The list is in no particular
1069
+ order.
1070
+
1071
+ Negative numbers have special meaning. If the starting line number is
1072
+ -N, it represents an entry to the code object that starts at line N.
1073
+ If the ending ling number is -N, it's an exit from the code object that
1074
+ starts at line N.
1075
+
1076
+ """
1077
+ self._start_using()
1078
+ with self._connect() as con:
1079
+ file_id = self._file_id(filename)
1080
+ if file_id is None:
1081
+ return None
1082
+ else:
1083
+ query = "SELECT DISTINCT fromno, tono FROM arc WHERE file_id = ?"
1084
+ data = [file_id]
1085
+ if self._query_context_ids is not None:
1086
+ ids_array = ", ".join("?" * len(self._query_context_ids))
1087
+ query += " AND context_id IN (" + ids_array + ")"
1088
+ data += self._query_context_ids
1089
+ with con.execute(query, data) as cur:
1090
+ return list(cur)
1091
+
1092
+ def contexts_by_lineno(self, filename: str) -> dict[TLineNo, list[str]]:
1093
+ """Get the contexts for each line in a file.
1094
+
1095
+ Returns:
1096
+ A dict mapping line numbers to a list of context names.
1097
+
1098
+ .. versionadded:: 5.0
1099
+
1100
+ """
1101
+ self._start_using()
1102
+ with self._connect() as con:
1103
+ file_id = self._file_id(filename)
1104
+ if file_id is None:
1105
+ return {}
1106
+
1107
+ lineno_contexts_map = collections.defaultdict(set)
1108
+ if self.has_arcs():
1109
+ query = """
1110
+ SELECT arc.fromno, arc.tono, context.context
1111
+ FROM arc, context
1112
+ WHERE arc.file_id = ? AND arc.context_id = context.id
1113
+ """
1114
+ data = [file_id]
1115
+ if self._query_context_ids is not None:
1116
+ ids_array = ", ".join("?" * len(self._query_context_ids))
1117
+ query += " AND arc.context_id IN (" + ids_array + ")"
1118
+ data += self._query_context_ids
1119
+ with con.execute(query, data) as cur:
1120
+ for fromno, tono, context in cur:
1121
+ if fromno > 0:
1122
+ lineno_contexts_map[fromno].add(context)
1123
+ if tono > 0:
1124
+ lineno_contexts_map[tono].add(context)
1125
+ else:
1126
+ query = """
1127
+ SELECT l.numbits, c.context FROM line_bits l, context c
1128
+ WHERE l.context_id = c.id
1129
+ AND file_id = ?
1130
+ """
1131
+ data = [file_id]
1132
+ if self._query_context_ids is not None:
1133
+ ids_array = ", ".join("?" * len(self._query_context_ids))
1134
+ query += " AND l.context_id IN (" + ids_array + ")"
1135
+ data += self._query_context_ids
1136
+ with con.execute(query, data) as cur:
1137
+ for numbits, context in cur:
1138
+ for lineno in numbits_to_nums(numbits):
1139
+ lineno_contexts_map[lineno].add(context)
1140
+
1141
+ return {lineno: list(contexts) for lineno, contexts in lineno_contexts_map.items()}
1142
+
1143
+ @classmethod
1144
+ def sys_info(cls) -> list[tuple[str, Any]]:
1145
+ """Our information for `Coverage.sys_info`.
1146
+
1147
+ Returns a list of (key, value) pairs.
1148
+
1149
+ """
1150
+ with SqliteDb(":memory:", debug=NoDebugging()) as db:
1151
+ with db.execute("PRAGMA temp_store") as cur:
1152
+ temp_store = [row[0] for row in cur]
1153
+ with db.execute("PRAGMA compile_options") as cur:
1154
+ copts = [row[0] for row in cur]
1155
+ copts = textwrap.wrap(", ".join(copts), width=75)
1156
+
1157
+ return [
1158
+ ("sqlite3_sqlite_version", sqlite3.sqlite_version),
1159
+ ("sqlite3_temp_store", temp_store),
1160
+ ("sqlite3_compile_options", copts),
1161
+ ]
1162
+
1163
+
1164
+ ASCII = string.ascii_letters + string.digits
1165
+ NRAND = 6
1166
+ NHASH = 10
1167
+
1168
+
1169
+ def filename_suffix(suffix: str | bool | None) -> str | None:
1170
+ """Compute a filename suffix for a data file.
1171
+
1172
+ If `suffix` is a string or None, simply return it. If `suffix` is True,
1173
+ then build a suffix incorporating the hostname, process id, and a random
1174
+ number.
1175
+
1176
+ Returns a string or None.
1177
+
1178
+ """
1179
+ if suffix is True:
1180
+ # If data_suffix was a simple true value, then make a suffix with
1181
+ # plenty of distinguishing information. We do this here in
1182
+ # `save()` at the last minute so that the pid will be correct even
1183
+ # if the process forks.
1184
+ die = random.Random(os.urandom(8))
1185
+ rolls = "".join(die.choice(ASCII) for _ in range(NRAND))
1186
+ host = socket.gethostname().replace(".", "_")
1187
+ suffix = f"{host}.pid{os.getpid()}.X{rolls}x"
1188
+ elif suffix is False:
1189
+ suffix = None
1190
+ return suffix
1191
+
1192
+
1193
+ # A regex to match parallel file name suffixes, with named groups.
1194
+ # We combine this with other regexes, so be careful with flags.
1195
+ SUFFIX_PATTERN = rf"""(?x: # re.VERBOSE, but only for part of the pattern
1196
+ \.(?P<host>[^.]+) # .hostname
1197
+ \.pid(?P<pid>\d+) # .pid1234
1198
+ \.X(?P<random>\w{{{NRAND}}})x # .Xabc123x
1199
+ (\.H(?P<hash>\w{{{NHASH}}}h))? # .Habcdef1234h (optional)
1200
+ )"""
1201
+
1202
+
1203
+ def filename_match(filename: str) -> re.Match[str] | None:
1204
+ """Return a match object to pick apart the filename."""
1205
+ return re.search(f"{SUFFIX_PATTERN}$", filename)
1206
+
1207
+
1208
+ def good_filename_match(filename: str) -> re.Match[str]:
1209
+ """Match the filename where we know it will match."""
1210
+ m = filename_match(filename)
1211
+ assert m is not None
1212
+ return m