gha-utils 4.22.0__py3-none-any.whl → 4.23.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of gha-utils might be problematic. Click here for more details.

gha_utils/__init__.py CHANGED
@@ -17,4 +17,4 @@
17
17
 
18
18
  from __future__ import annotations
19
19
 
20
- __version__ = "4.22.0"
20
+ __version__ = "4.23.0"
gha_utils/cli.py CHANGED
@@ -23,7 +23,6 @@ import sys
23
23
  from collections import Counter
24
24
  from datetime import datetime
25
25
  from pathlib import Path
26
- from typing import IO
27
26
 
28
27
  from boltons.iterutils import unique
29
28
  from click_extra import (
@@ -47,6 +46,10 @@ from .mailmap import Mailmap
47
46
  from .metadata import NUITKA_BUILD_TARGETS, Dialects, Metadata
48
47
  from .test_plan import DEFAULT_TEST_PLAN, SkippedTest, parse_test_plan
49
48
 
49
+ TYPE_CHECKING = False
50
+ if TYPE_CHECKING:
51
+ from typing import IO
52
+
50
53
 
51
54
  def is_stdout(filepath: Path) -> bool:
52
55
  """Check if a file path is set to stdout.
@@ -303,11 +306,11 @@ def mailmap_sync(ctx, source, create_if_missing, destination_mailmap):
303
306
 
304
307
  @gha_utils.command(short_help="Run a test plan from a file against a binary")
305
308
  @option(
309
+ "--command",
306
310
  "--binary",
307
- type=file_path(exists=True, executable=True, resolve_path=True),
308
311
  required=True,
309
- metavar="FILE_PATH",
310
- help="Path to the binary file to test.",
312
+ metavar="COMMAND",
313
+ help="Path to the binary file to test, or a command line to be executed.",
311
314
  )
312
315
  @option(
313
316
  "-F",
@@ -375,7 +378,7 @@ def mailmap_sync(ctx, source, create_if_missing, destination_mailmap):
375
378
  help="Print per-manager package statistics.",
376
379
  )
377
380
  def test_plan(
378
- binary: Path,
381
+ command: str,
379
382
  plan_file: tuple[Path, ...] | None,
380
383
  plan_envvar: tuple[str, ...] | None,
381
384
  select_test: tuple[int, ...] | None,
@@ -422,7 +425,9 @@ def test_plan(
422
425
  try:
423
426
  logging.debug(f"Test case parameters: {test_case}")
424
427
  test_case.run_cli_test(
425
- binary, additional_skip_platforms=skip_platform, default_timeout=timeout
428
+ command,
429
+ additional_skip_platforms=skip_platform,
430
+ default_timeout=timeout,
426
431
  )
427
432
  except SkippedTest as ex:
428
433
  counter["skipped"] += 1
@@ -430,8 +435,8 @@ def test_plan(
430
435
  except Exception as ex:
431
436
  counter["failed"] += 1
432
437
  logging.error(f"Test {test_name} failed: {ex}")
433
- if show_trace_on_error:
434
- echo(test_case.execution_trace or "No execution trace available.")
438
+ if show_trace_on_error and test_case.execution_trace:
439
+ echo(test_case.execution_trace)
435
440
  if exit_on_error:
436
441
  logging.debug("Don't continue testing, a failed test was found.")
437
442
  sys.exit(1)
gha_utils/matrix.py CHANGED
@@ -19,11 +19,15 @@ from __future__ import annotations
19
19
  import itertools
20
20
  import json
21
21
  import logging
22
- from typing import Iterable, Iterator
23
22
 
24
23
  from boltons.dictutils import FrozenDict
25
24
  from boltons.iterutils import unique
26
25
 
26
+ TYPE_CHECKING = False
27
+ if TYPE_CHECKING:
28
+ from collections.abc import Iterable, Iterator
29
+
30
+
27
31
  RESERVED_MATRIX_KEYWORDS = ["include", "exclude"]
28
32
 
29
33
 
gha_utils/metadata.py CHANGED
@@ -286,12 +286,12 @@ from operator import itemgetter
286
286
  from pathlib import Path
287
287
  from random import randint
288
288
  from re import escape
289
- from typing import Any, Final, cast
290
289
 
291
290
  from bumpversion.config import get_configuration # type: ignore[import-untyped]
292
291
  from bumpversion.config.files import find_config_file # type: ignore[import-untyped]
293
292
  from bumpversion.show import resolve_name # type: ignore[import-untyped]
294
293
  from extra_platforms import is_github_ci
294
+ from gitdb.exc import BadName # type: ignore[import-untyped]
295
295
  from packaging.specifiers import SpecifierSet
296
296
  from packaging.version import Version
297
297
  from py_walk import get_parser_from_file
@@ -311,6 +311,11 @@ from wcmatch.glob import (
311
311
 
312
312
  from .matrix import Matrix
313
313
 
314
+ TYPE_CHECKING = False
315
+ if TYPE_CHECKING:
316
+ from typing import Any, Final
317
+
318
+
314
319
  SHORT_SHA_LENGTH = 7
315
320
  """Default SHA length hard-coded to ``7``.
316
321
 
@@ -560,6 +565,59 @@ class Metadata:
560
565
  logging.debug(f"Number of stashes in repository: {count}")
561
566
  return count
562
567
 
568
+ def git_deepen(
569
+ self, commit_hash: str, max_attempts: int = 10, deepen_increment: int = 50
570
+ ) -> bool:
571
+ """Deepen a shallow clone until the provided ``commit_hash`` is found.
572
+
573
+ Progressively fetches more commits from the current repository until the
574
+ specified commit is found or max attempts is reached.
575
+
576
+ Returns ``True`` if the commit was found, ``False`` otherwise.
577
+ """
578
+ for attempt in range(max_attempts):
579
+ try:
580
+ _ = self.git.get_commit(commit_hash)
581
+ if attempt > 0:
582
+ logging.info(
583
+ f"Found commit {commit_hash} after {attempt} deepen "
584
+ "operation(s)."
585
+ )
586
+ return True
587
+ except (ValueError, BadName) as ex:
588
+ logging.debug(f"Commit {commit_hash} not found: {ex}")
589
+
590
+ current_depth = self.git.total_commits()
591
+
592
+ if attempt == max_attempts - 1:
593
+ # We've exhausted all attempts
594
+ logging.error(
595
+ f"Cannot find commit {commit_hash} in repository after "
596
+ f"{max_attempts} deepen attempts. "
597
+ f"Final depth is {current_depth} commits."
598
+ )
599
+ return False
600
+
601
+ logging.info(
602
+ f"Commit {commit_hash} not found at depth {current_depth}."
603
+ )
604
+ logging.info(
605
+ f"Deepening by {deepen_increment} commits (attempt "
606
+ f"{attempt + 1}/{max_attempts})..."
607
+ )
608
+
609
+ try:
610
+ self.git.repo.git.fetch(f"--deepen={deepen_increment}")
611
+ new_depth = self.git.total_commits()
612
+ logging.debug(
613
+ f"Repository deepened successfully. New depth: {new_depth}"
614
+ )
615
+ except Exception as ex:
616
+ logging.error(f"Failed to deepen repository: {ex}")
617
+ return False
618
+
619
+ return False
620
+
563
621
  def commit_matrix(self, commits: Iterable[Commit] | None) -> Matrix | None:
564
622
  """Pre-compute a matrix of commits.
565
623
 
@@ -720,7 +778,10 @@ class Metadata:
720
778
  def event_sender_type(self) -> str | None:
721
779
  """Returns the type of the user that triggered the workflow run."""
722
780
  sender_type = self.github_context.get("event", {}).get("sender", {}).get("type")
723
- return cast(str | None, sender_type)
781
+ if not sender_type:
782
+ return None
783
+ assert isinstance(sender_type, str)
784
+ return sender_type
724
785
 
725
786
  @cached_property
726
787
  def is_bot(self) -> bool:
@@ -778,7 +839,12 @@ class Metadata:
778
839
  ):
779
840
  base_ref = os.environ["GITHUB_BASE_REF"]
780
841
  assert base_ref
781
- start = f"origin/{base_ref}"
842
+ assert (
843
+ self.github_context["event"]["pull_request"]["base"]["ref"] == base_ref
844
+ )
845
+ full_base_ref = f"origin/{base_ref}"
846
+ base_ref_sha = self.github_context["event"]["pull_request"]["base"]["sha"]
847
+ start = base_ref_sha
782
848
  # We need to checkout the HEAD commit instead of the artificial merge
783
849
  # commit introduced by the pull request.
784
850
  end = self.github_context["event"]["pull_request"]["head"]["sha"]
@@ -814,14 +880,8 @@ class Metadata:
814
880
  for commit_id in (start, end):
815
881
  if not commit_id:
816
882
  continue
817
- try:
818
- _ = self.git.get_commit(commit_id)
819
- except ValueError:
820
- logging.error(
821
- f"Cannot find commit {commit_id} in repository. "
822
- "Repository was probably not checked out with enough depth. "
823
- f"Current depth is {self.git.total_commits()}. "
824
- )
883
+
884
+ if not self.git_deepen(commit_id):
825
885
  logging.warning(
826
886
  "Skipping metadata extraction of the range of new commits."
827
887
  )
@@ -1144,6 +1204,7 @@ class Metadata:
1144
1204
  - ``--target-version py311``
1145
1205
  - ``--target-version py312``
1146
1206
  - ``--target-version py313``
1207
+ - ``--target-version py314``
1147
1208
 
1148
1209
  As mentioned in Black usage, you should `include all Python versions that you
1149
1210
  want your code to run under
@@ -1573,7 +1634,7 @@ class Metadata:
1573
1634
  else:
1574
1635
  raise NotImplementedError(f"GitHub formatting for: {value!r}")
1575
1636
 
1576
- return cast(str, value)
1637
+ return str(value)
1577
1638
 
1578
1639
  def dump(self, dialect: Dialects = Dialects.github) -> str:
1579
1640
  """Returns all metadata in the specified format.
gha_utils/test_plan.py CHANGED
@@ -17,13 +17,15 @@
17
17
  from __future__ import annotations
18
18
 
19
19
  import logging
20
+ import os
20
21
  import re
21
22
  import shlex
22
23
  import sys
24
+ from collections.abc import Sequence
23
25
  from dataclasses import asdict, dataclass, field
24
26
  from pathlib import Path
27
+ from shutil import which
25
28
  from subprocess import TimeoutExpired, run
26
- from typing import Generator, Sequence
27
29
 
28
30
  import yaml
29
31
  from boltons.iterutils import flatten
@@ -33,7 +35,13 @@ from click_extra.testing import (
33
35
  regex_fullmatch_line_by_line,
34
36
  render_cli_run,
35
37
  )
36
- from extra_platforms import Group, _TNestedReferences, current_os
38
+ from extra_platforms import Group, current_os
39
+
40
+ TYPE_CHECKING = False
41
+ if TYPE_CHECKING:
42
+ from collections.abc import Generator
43
+
44
+ from extra_platforms._types import _TNestedReferences
37
45
 
38
46
 
39
47
  class SkippedTest(Exception):
@@ -42,6 +50,20 @@ class SkippedTest(Exception):
42
50
  pass
43
51
 
44
52
 
53
+ def _split_args(cli: str) -> list[str]:
54
+ """Split a string or sequence of strings into a tuple of arguments.
55
+
56
+ .. todo::
57
+ Evaluate better Windows CLI parsing with:
58
+ `w32lex <https://github.com/maxpat78/w32lex>`_.
59
+ """
60
+ if sys.platform == "win32":
61
+ return cli.split()
62
+ # For Unix platforms, we have the dedicated shlex module.
63
+ else:
64
+ return shlex.split(cli)
65
+
66
+
45
67
  @dataclass(order=True)
46
68
  class CLITestCase:
47
69
  cli_parameters: tuple[str, ...] | str = field(default_factory=tuple)
@@ -106,13 +128,7 @@ class CLITestCase:
106
128
  # CLI parameters provided as a long string needs to be split so
107
129
  # that each argument is a separate item in the final tuple.
108
130
  if field_id == "cli_parameters":
109
- # XXX Maybe we should rely on a library to parse them:
110
- # https://github.com/maxpat78/w32lex
111
- if sys.platform == "win32":
112
- field_data = field_data.split()
113
- # For Unix platforms, we have the dedicated shlex module.
114
- else:
115
- field_data = shlex.split(field_data)
131
+ field_data = _split_args(field_data)
116
132
  else:
117
133
  field_data = (field_data,)
118
134
 
@@ -151,16 +167,22 @@ class CLITestCase:
151
167
 
152
168
  def run_cli_test(
153
169
  self,
154
- binary: str | Path,
170
+ command: Path | str,
155
171
  additional_skip_platforms: _TNestedReferences | None,
156
172
  default_timeout: float | None,
157
173
  ):
158
174
  """Run a CLI command and check its output against the test case.
159
175
 
160
- ..todo::
176
+ The provided ``command`` can be either:
177
+
178
+ - a path to a binary or script to execute;
179
+ - a command name to be searched in the ``PATH``,
180
+ - a command line with arguments to be parsed and executed by the shell.
181
+
182
+ .. todo::
161
183
  Add support for environment variables.
162
184
 
163
- ..todo::
185
+ .. todo::
164
186
  Add support for proper mixed <stdout>/<stderr> stream as a single,
165
187
  intertwined output.
166
188
  """
@@ -177,7 +199,28 @@ class CLITestCase:
177
199
  logging.info(f"Set default test case timeout to {default_timeout} seconds")
178
200
  self.timeout = default_timeout
179
201
 
180
- clean_args = args_cleanup(binary, self.cli_parameters)
202
+ # Separate the command into binary file path and arguments.
203
+ args = []
204
+ if isinstance(command, str):
205
+ args = _split_args(command)
206
+ command = args[0]
207
+ args = args[1:]
208
+ # Ensure the command to execute is in PATH.
209
+ if not which(command):
210
+ raise FileNotFoundError(f"Command not found in PATH: {command!r}")
211
+ # Resolve the command to an absolute path.
212
+ command = which(command) # type: ignore[assignment]
213
+ assert command is not None
214
+
215
+ # Check the binary exists and is executable.
216
+ binary = Path(command).resolve()
217
+ assert binary.exists()
218
+ assert binary.is_file()
219
+ assert os.access(binary, os.X_OK)
220
+
221
+ clean_args = args_cleanup(binary, args, self.cli_parameters)
222
+ logging.info(f"Run CLI command: {' '.join(clean_args)}")
223
+
181
224
  try:
182
225
  result = run(
183
226
  clean_args,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: gha-utils
3
- Version: 4.22.0
3
+ Version: 4.23.0
4
4
  Summary: ⚙️ CLI helpers for GitHub Actions + reuseable workflows
5
5
  Author-email: Kevin Deldycke <kevin@deldycke.com>
6
6
  Project-URL: Homepage, https://github.com/kdeldycke/workflows
@@ -48,18 +48,18 @@ Classifier: Topic :: Utilities
48
48
  Classifier: Typing :: Typed
49
49
  Requires-Python: >=3.11
50
50
  Description-Content-Type: text/markdown
51
- Requires-Dist: boltons>=24.0.0
51
+ Requires-Dist: boltons>=25.0.0
52
52
  Requires-Dist: bump-my-version<1.1.1,>=0.32.2
53
- Requires-Dist: click-extra~=6.0.2
54
- Requires-Dist: extra-platforms~=3.2.0
55
- Requires-Dist: packaging~=25.0
56
- Requires-Dist: py-walk~=0.3.3
57
- Requires-Dist: PyDriller~=2.6
58
- Requires-Dist: pyproject-metadata~=0.9.0
59
- Requires-Dist: pyyaml~=6.0.0
60
- Requires-Dist: wcmatch>=8.5
53
+ Requires-Dist: click-extra>=6.0.3
54
+ Requires-Dist: extra-platforms>=4.0.0
55
+ Requires-Dist: packaging>=25.0
56
+ Requires-Dist: py-walk>=0.3.3
57
+ Requires-Dist: PyDriller>=2.6
58
+ Requires-Dist: pyproject-metadata>=0.9.0
59
+ Requires-Dist: pyyaml>=6.0.3
60
+ Requires-Dist: wcmatch>=10.0
61
61
  Provides-Extra: test
62
- Requires-Dist: coverage[toml]~=7.10.0; extra == "test"
62
+ Requires-Dist: coverage[toml]~=7.11.0; extra == "test"
63
63
  Requires-Dist: pytest~=8.4.0; extra == "test"
64
64
  Requires-Dist: pytest-cases~=3.9.1; extra == "test"
65
65
  Requires-Dist: pytest-cov~=7.0.0; extra == "test"
@@ -0,0 +1,14 @@
1
+ gha_utils/__init__.py,sha256=2-HGMEacBozPv5vkkzNBw1QXVNpQS6cGdU_jNj8-AcU,866
2
+ gha_utils/__main__.py,sha256=Dck9BjpLXmIRS83k0mghAMcYVYiMiFLltQdfRuMSP_Q,1703
3
+ gha_utils/changelog.py,sha256=JR7iQrWjLoIOpVNe6iXQSyEii82_hM_zrYpR7QO_Uxo,5777
4
+ gha_utils/cli.py,sha256=3mSO9qoVOkKGZARH6CSjTsyWpSodPd64uDZHHazzd_E,15289
5
+ gha_utils/mailmap.py,sha256=oQt3m0hj-mwg7WxsuJQXWeQTFjlkqTgRNjYsUv7dlYQ,7013
6
+ gha_utils/matrix.py,sha256=K0J-OHHEXibBTRmuCmyvscSjRGPPsGRuwp9rKnlj8KI,12486
7
+ gha_utils/metadata.py,sha256=pUuaJPJrKgjTjbh2v2VwDGf8mkmmE9mfsCXUaWaMOBg,61750
8
+ gha_utils/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
+ gha_utils/test_plan.py,sha256=-Uwe1Y5xCNHuXERzGsZV3aaZvbvITNR1nxVFbIB3c1s,14573
10
+ gha_utils-4.23.0.dist-info/METADATA,sha256=QaW2CqLZ-eHrSg5ralL0eOI_jBejUrrFCsE0NmypzGY,21421
11
+ gha_utils-4.23.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
12
+ gha_utils-4.23.0.dist-info/entry_points.txt,sha256=8bJOwQYf9ZqsLhBR6gUCzvwLNI9f8tiiBrJ3AR0EK4o,54
13
+ gha_utils-4.23.0.dist-info/top_level.txt,sha256=C94Blb61YkkyPBwCdM3J_JPDjWH0lnKa5nGZeZ5M6yE,10
14
+ gha_utils-4.23.0.dist-info/RECORD,,
@@ -1,14 +0,0 @@
1
- gha_utils/__init__.py,sha256=gNHVP8cQ0XbGUYqL65Qc0zhxC3_Wstfs0IOorioqYeQ,866
2
- gha_utils/__main__.py,sha256=Dck9BjpLXmIRS83k0mghAMcYVYiMiFLltQdfRuMSP_Q,1703
3
- gha_utils/changelog.py,sha256=JR7iQrWjLoIOpVNe6iXQSyEii82_hM_zrYpR7QO_Uxo,5777
4
- gha_utils/cli.py,sha256=hwvjKEUvctmtJL2aliV0dytF67uHjGVuUKv17j_lMlg,15235
5
- gha_utils/mailmap.py,sha256=oQt3m0hj-mwg7WxsuJQXWeQTFjlkqTgRNjYsUv7dlYQ,7013
6
- gha_utils/matrix.py,sha256=eBAU3bKrCif7FQ74EWhK_AwDcNUkGp8Om1NtlFdYJpI,12431
7
- gha_utils/metadata.py,sha256=NHyBZ5rAI1DNvpfNC5PMlagTYL-rCSz5KUghldGCkMA,59491
8
- gha_utils/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
- gha_utils/test_plan.py,sha256=NDmh1uuKPyqswIRykKpE7sZ1W0hcVBlYSwsQ3F21HUQ,13336
10
- gha_utils-4.22.0.dist-info/METADATA,sha256=tNtxQmSSLnFz-0fkHzy30zKe2fDQ_ylpE4_5UkdRocY,21420
11
- gha_utils-4.22.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
12
- gha_utils-4.22.0.dist-info/entry_points.txt,sha256=8bJOwQYf9ZqsLhBR6gUCzvwLNI9f8tiiBrJ3AR0EK4o,54
13
- gha_utils-4.22.0.dist-info/top_level.txt,sha256=C94Blb61YkkyPBwCdM3J_JPDjWH0lnKa5nGZeZ5M6yE,10
14
- gha_utils-4.22.0.dist-info/RECORD,,