gha-utils 4.17.9__tar.gz → 4.18.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of gha-utils might be problematic. Click here for more details.

Files changed (24) hide show
  1. {gha_utils-4.17.9 → gha_utils-4.18.1}/PKG-INFO +3 -2
  2. {gha_utils-4.17.9 → gha_utils-4.18.1}/gha_utils/__init__.py +1 -1
  3. {gha_utils-4.17.9 → gha_utils-4.18.1}/gha_utils/cli.py +2 -2
  4. {gha_utils-4.17.9 → gha_utils-4.18.1}/gha_utils/matrix.py +33 -25
  5. {gha_utils-4.17.9 → gha_utils-4.18.1}/gha_utils/metadata.py +139 -33
  6. {gha_utils-4.17.9 → gha_utils-4.18.1}/gha_utils.egg-info/PKG-INFO +3 -2
  7. {gha_utils-4.17.9 → gha_utils-4.18.1}/gha_utils.egg-info/requires.txt +2 -1
  8. {gha_utils-4.17.9 → gha_utils-4.18.1}/pyproject.toml +5 -4
  9. {gha_utils-4.17.9 → gha_utils-4.18.1}/tests/test_matrix.py +12 -11
  10. gha_utils-4.18.1/tests/test_metadata.py +333 -0
  11. gha_utils-4.17.9/tests/test_metadata.py +0 -214
  12. {gha_utils-4.17.9 → gha_utils-4.18.1}/gha_utils/__main__.py +0 -0
  13. {gha_utils-4.17.9 → gha_utils-4.18.1}/gha_utils/changelog.py +0 -0
  14. {gha_utils-4.17.9 → gha_utils-4.18.1}/gha_utils/mailmap.py +0 -0
  15. {gha_utils-4.17.9 → gha_utils-4.18.1}/gha_utils/py.typed +0 -0
  16. {gha_utils-4.17.9 → gha_utils-4.18.1}/gha_utils/test_plan.py +0 -0
  17. {gha_utils-4.17.9 → gha_utils-4.18.1}/gha_utils.egg-info/SOURCES.txt +0 -0
  18. {gha_utils-4.17.9 → gha_utils-4.18.1}/gha_utils.egg-info/dependency_links.txt +0 -0
  19. {gha_utils-4.17.9 → gha_utils-4.18.1}/gha_utils.egg-info/entry_points.txt +0 -0
  20. {gha_utils-4.17.9 → gha_utils-4.18.1}/gha_utils.egg-info/top_level.txt +0 -0
  21. {gha_utils-4.17.9 → gha_utils-4.18.1}/readme.md +0 -0
  22. {gha_utils-4.17.9 → gha_utils-4.18.1}/setup.cfg +0 -0
  23. {gha_utils-4.17.9 → gha_utils-4.18.1}/tests/test_changelog.py +0 -0
  24. {gha_utils-4.17.9 → gha_utils-4.18.1}/tests/test_mailmap.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: gha-utils
3
- Version: 4.17.9
3
+ Version: 4.18.1
4
4
  Summary: ⚙️ CLI helpers for GitHub Actions + reuseable workflows
5
5
  Author-email: Kevin Deldycke <kevin@deldycke.com>
6
6
  Project-URL: Homepage, https://github.com/kdeldycke/workflows
@@ -48,13 +48,14 @@ Requires-Dist: boltons>=24.0.0
48
48
  Requires-Dist: bump-my-version<1.1.1,>=0.32.2
49
49
  Requires-Dist: click-extra~=5.0.2
50
50
  Requires-Dist: extra-platforms~=3.2.0
51
+ Requires-Dist: gitignore-parser~=0.1.12
51
52
  Requires-Dist: packaging~=25.0
52
53
  Requires-Dist: PyDriller~=2.6
53
54
  Requires-Dist: pyproject-metadata~=0.9.0
54
55
  Requires-Dist: pyyaml~=6.0.0
55
56
  Requires-Dist: wcmatch>=8.5
56
57
  Provides-Extra: test
57
- Requires-Dist: coverage[toml]~=7.9.1; extra == "test"
58
+ Requires-Dist: coverage[toml]~=7.10.0; extra == "test"
58
59
  Requires-Dist: pytest~=8.4.0; extra == "test"
59
60
  Requires-Dist: pytest-cases~=3.9.1; extra == "test"
60
61
  Requires-Dist: pytest-cov~=6.2.1; extra == "test"
@@ -17,4 +17,4 @@
17
17
 
18
18
  from __future__ import annotations
19
19
 
20
- __version__ = "4.17.9"
20
+ __version__ = "4.18.1"
@@ -116,8 +116,8 @@ def gha_utils():
116
116
  )
117
117
  @option(
118
118
  "--format",
119
- type=Choice(tuple(item.value for item in Dialects), case_sensitive=False),
120
- default="github",
119
+ type=Choice(Dialects, case_sensitive=False), # type: ignore[arg-type]
120
+ default=Dialects.github,
121
121
  help="Rendering format of the metadata.",
122
122
  )
123
123
  @option(
@@ -27,7 +27,7 @@ from boltons.iterutils import unique
27
27
  RESERVED_MATRIX_KEYWORDS = ["include", "exclude"]
28
28
 
29
29
 
30
- class Matrix(FrozenDict):
30
+ class Matrix:
31
31
  """A matrix as defined by GitHub's actions workflows.
32
32
 
33
33
  See GitHub official documentation on `how-to implement variations of jobs in a
@@ -47,35 +47,43 @@ class Matrix(FrozenDict):
47
47
  matrix.
48
48
  """
49
49
 
50
- # Tuples are used to keep track of the insertion order and force immutability.
51
- include: tuple[dict[str, str], ...] = tuple()
52
- exclude: tuple[dict[str, str], ...] = tuple()
50
+ def __init__(self, *args, **kwargs):
51
+ self.variations: dict[str, tuple[str, ...]] = {}
52
+
53
+ # Tuples are used to keep track of the insertion order and force immutability.
54
+ self.include: tuple[dict[str, str], ...] = tuple()
55
+ self.exclude: tuple[dict[str, str], ...] = tuple()
56
+
57
+ self._job_counter = None
53
58
 
54
59
  def matrix(
55
60
  self, ignore_includes: bool = False, ignore_excludes: bool = False
56
- ) -> dict[str, str]:
61
+ ) -> FrozenDict[str, str]:
57
62
  """Returns a copy of the matrix.
58
63
 
59
64
  The special ``include`` and ``excludes`` directives will be added by default.
60
65
  You can selectively ignore them by passing the corresponding boolean parameters.
61
66
  """
62
- dict_copy = dict(self)
67
+ dict_copy = self.variations.copy()
63
68
  if not ignore_includes and self.include:
64
- dict_copy["include"] = self.include
69
+ dict_copy["include"] = self.include # type: ignore[assignment]
65
70
  if not ignore_excludes and self.exclude:
66
- dict_copy["exclude"] = self.exclude
67
- return dict_copy
71
+ dict_copy["exclude"] = self.exclude # type: ignore[assignment]
72
+ return FrozenDict(dict_copy)
68
73
 
69
74
  def __repr__(self) -> str:
70
- return (
71
- f"<{self.__class__.__name__}: {super(FrozenDict, self).__repr__()}; "
72
- f"include={self.include}; exclude={self.exclude}>"
73
- )
75
+ return f"<{self.__class__.__name__}: {self.matrix()}>"
74
76
 
75
77
  def __str__(self) -> str:
76
78
  """Render matrix as a JSON string."""
77
79
  return json.dumps(self.matrix())
78
80
 
81
+ def __getitem__(self, key: str) -> tuple[str, ...]:
82
+ """Returns the values of a variation by its ID."""
83
+ if key in self.variations:
84
+ return self.variations[key]
85
+ raise KeyError(f"Variation {key} not found in matrix")
86
+
79
87
  @staticmethod
80
88
  def _check_ids(*var_ids: str) -> None:
81
89
  for var_id in var_ids:
@@ -89,8 +97,8 @@ class Matrix(FrozenDict):
89
97
  if any(type(v) is not str for v in values):
90
98
  raise ValueError(f"Only strings are accepted in {values}")
91
99
  # Extend variation with values, and deduplicate them along the way.
92
- var_values = list(self.get(variation_id, [])) + list(values)
93
- super(FrozenDict, self).__setitem__(variation_id, tuple(unique(var_values)))
100
+ var_values = list(self.variations.get(variation_id, [])) + list(values)
101
+ self.variations[variation_id] = tuple(unique(var_values))
94
102
 
95
103
  def _add_and_dedup_dicts(
96
104
  self, *new_dicts: dict[str, str]
@@ -123,9 +131,9 @@ class Matrix(FrozenDict):
123
131
  passing the corresponding ``with_matrix``, ``with_includes`` and
124
132
  ``with_excludes`` boolean filter parameters.
125
133
  """
126
- variations = {}
134
+ all_variations = {}
127
135
  if with_matrix:
128
- variations = {k: list(v) for k, v in self.items()}
136
+ all_variations = {k: list(v) for k, v in self.variations.items()}
129
137
 
130
138
  for expand, directives in (
131
139
  (with_includes, self.include),
@@ -134,9 +142,9 @@ class Matrix(FrozenDict):
134
142
  if expand:
135
143
  for value in directives:
136
144
  for k, v in value.items():
137
- variations.setdefault(k, []).append(v)
145
+ all_variations.setdefault(k, []).append(v)
138
146
 
139
- return {k: tuple(unique(v)) for k, v in variations.items()}
147
+ return {k: tuple(unique(v)) for k, v in all_variations.items()}
140
148
 
141
149
  def product(
142
150
  self, with_includes: bool = False, with_excludes: bool = False
@@ -148,17 +156,17 @@ class Matrix(FrozenDict):
148
156
 
149
157
  Respects the order of variations and their values.
150
158
  """
151
- variations = self.all_variations(
159
+ all_variations = self.all_variations(
152
160
  with_includes=with_includes, with_excludes=with_excludes
153
161
  )
154
- if not variations:
162
+ if not all_variations:
155
163
  return
156
164
  yield from map(
157
165
  dict,
158
166
  itertools.product(
159
167
  *(
160
168
  tuple((variant_id, v) for v in variations)
161
- for variant_id, variations in variations.items()
169
+ for variant_id, variations in all_variations.items()
162
170
  )
163
171
  ),
164
172
  )
@@ -187,11 +195,11 @@ class Matrix(FrozenDict):
187
195
  self.all_variations(
188
196
  with_matrix=False, with_includes=True, with_excludes=True
189
197
  )
190
- ).difference(self)
198
+ ).difference(self.variations)
191
199
  if unreferenced_keys:
192
200
  raise ValueError(
193
201
  f"Matrix exclude keys {list(unreferenced_keys)} does not match any "
194
- f"{list(self)} key within the matrix"
202
+ f"{self.variations.keys()} key within the matrix"
195
203
  )
196
204
 
197
205
  # Reset the number of combinations.
@@ -202,7 +210,7 @@ class Matrix(FrozenDict):
202
210
 
203
211
  # The matrix is empty, none of the include directive will match, so condider all
204
212
  # directives as un-applicable.
205
- if not self:
213
+ if not self.variations:
206
214
  leftover_includes = list(self.include)
207
215
 
208
216
  # Search for include directives that matches the original matrix variations
@@ -156,6 +156,7 @@ import json
156
156
  import logging
157
157
  import os
158
158
  import re
159
+ import sys
159
160
  import tomllib
160
161
  from collections.abc import Iterable
161
162
  from enum import StrEnum
@@ -164,8 +165,9 @@ from operator import itemgetter
164
165
  from pathlib import Path
165
166
  from random import randint
166
167
  from re import escape
167
- from typing import Any, Final, Iterator, cast
168
+ from typing import Any, Final, cast
168
169
 
170
+ import gitignore_parser
169
171
  from bumpversion.config import get_configuration # type: ignore[import-untyped]
170
172
  from bumpversion.config.files import find_config_file # type: ignore[import-untyped]
171
173
  from bumpversion.show import resolve_name # type: ignore[import-untyped]
@@ -196,6 +198,7 @@ SHORT_SHA_LENGTH = 7
196
198
  depends on the size of the repository.
197
199
  """
198
200
 
201
+ GITIGNORE_PATH = Path(".gitignore")
199
202
 
200
203
  NUITKA_BUILD_TARGETS = {
201
204
  "linux-arm64": {
@@ -315,7 +318,7 @@ WorkflowEvent = StrEnum(
315
318
  """
316
319
 
317
320
 
318
- Dialects = StrEnum("Dialects", ("github", "plain"))
321
+ Dialects = StrEnum("Dialects", ("github", "json"))
319
322
  """Dialects in which metadata can be formatted to."""
320
323
 
321
324
 
@@ -347,6 +350,75 @@ MYPY_VERSION_MIN: Final = (3, 8)
347
350
  """
348
351
 
349
352
 
353
+ # XXX Patch gitignore-parser to support Windows paths. Refs:
354
+ # https://github.com/mherrmann/gitignore_parser/issues/60
355
+ # https://github.com/mherrmann/gitignore_parser/pull/61
356
+ # XXX In the future, replace this with wcmatch once it supports gitignore files:
357
+ # https://github.com/facelessuser/wcmatch/issues/226
358
+
359
+
360
+ _OriginalIgnoreRule = gitignore_parser.IgnoreRule
361
+
362
+
363
+ class PatchedIgnoreRule(_OriginalIgnoreRule): # type: ignore[misc,valid-type]
364
+ """Patch version of ``IgnoreRule`` to support Windows paths.
365
+
366
+ Taken from: https://github.com/mherrmann/gitignore_parser/pull/61/files
367
+ """
368
+
369
+ @staticmethod
370
+ def _count_trailing_symbol(symbol: str, text: str) -> int:
371
+ """Count the number of trailing characters in a string."""
372
+ count = 0
373
+ for char in reversed(str(text)):
374
+ if char == symbol:
375
+ count += 1
376
+ else:
377
+ break
378
+ return count
379
+
380
+ def match(self, abs_path: str | Path) -> bool:
381
+ matched = False
382
+ if self.base_path:
383
+ rel_path = (
384
+ gitignore_parser._normalize_path(abs_path)
385
+ .relative_to(self.base_path)
386
+ .as_posix()
387
+ )
388
+ else:
389
+ rel_path = gitignore_parser._normalize_path(abs_path).as_posix()
390
+ # Path() strips the trailing following symbols on windows, so we need to
391
+ # preserve it: ' ', '.'
392
+ if sys.platform.startswith("win"):
393
+ rel_path += " " * self._count_trailing_symbol(" ", abs_path)
394
+ rel_path += "." * self._count_trailing_symbol(".", abs_path)
395
+ # Path() strips the trailing slash, so we need to preserve it
396
+ # in case of directory-only negation
397
+ if self.negation and type(abs_path) is str and abs_path[-1] == "/":
398
+ rel_path += "/"
399
+ if rel_path.startswith("./"):
400
+ rel_path = rel_path[2:]
401
+ if re.search(self.regex, rel_path):
402
+ matched = True
403
+ return matched
404
+
405
+
406
+ gitignore_parser.IgnoreRule = PatchedIgnoreRule
407
+
408
+
409
+ class JSONMetadata(json.JSONEncoder):
410
+ """Custom JSON encoder for metadata serialization."""
411
+
412
+ def default(self, o: Any) -> Any:
413
+ if isinstance(o, Matrix):
414
+ return o.matrix()
415
+
416
+ if isinstance(o, Path):
417
+ return str(o)
418
+
419
+ return super().default(o)
420
+
421
+
350
422
  class Metadata:
351
423
  """Metadata class."""
352
424
 
@@ -577,11 +649,15 @@ class Metadata:
577
649
  events.
578
650
 
579
651
  .. seealso::
580
-
581
652
  - https://stackoverflow.com/a/67204539
582
653
  - https://stackoverflow.com/a/62953566
583
654
  - https://stackoverflow.com/a/61861763
584
655
 
656
+ .. seealso::
657
+ Pull request events on GitHub are a bit complex, see: `The Many SHAs of a
658
+ GitHub Pull Request
659
+ <https://www.kenmuse.com/blog/the-many-shas-of-a-github-pull-request/>`_.
660
+
585
661
  .. todo::
586
662
  Refactor so we can get rid of ``self.github_context``. Maybe there's enough
587
663
  metadata lying around in the environment variables that we can inspect the
@@ -642,11 +718,7 @@ class Metadata:
642
718
  @cached_property
643
719
  def new_commits_hash(self) -> tuple[str, ...] | None:
644
720
  """List all hashes of new commits."""
645
- return (
646
- cast(tuple[str, ...], self.new_commits_matrix["commit"])
647
- if self.new_commits_matrix
648
- else None
649
- )
721
+ return self.new_commits_matrix["commit"] if self.new_commits_matrix else None
650
722
 
651
723
  @cached_property
652
724
  def release_commits(self) -> tuple[Commit, ...] | None:
@@ -680,13 +752,16 @@ class Metadata:
680
752
  def release_commits_hash(self) -> tuple[str, ...] | None:
681
753
  """List all hashes of release commits."""
682
754
  return (
683
- cast(tuple[str, ...], self.release_commits_matrix["commit"])
755
+ self.release_commits_matrix["commit"]
684
756
  if self.release_commits_matrix
685
757
  else None
686
758
  )
687
759
 
688
- @staticmethod
689
- def glob_files(*patterns: str) -> Iterator[Path]:
760
+ @cached_property
761
+ def gitignore_exists(self) -> bool:
762
+ return GITIGNORE_PATH.is_file()
763
+
764
+ def glob_files(self, *patterns: str) -> list[Path]:
690
765
  """Return all file path matching the ``patterns``.
691
766
 
692
767
  Patterns are glob patterns supporting ``**`` for recursive search, and ``!``
@@ -695,46 +770,77 @@ class Metadata:
695
770
  All directories are traversed, whether they are hidden (i.e. starting with a
696
771
  dot ``.``) or not, including symlinks.
697
772
 
698
- Returns both hidden and non-hidden files, but no directories.
773
+ Skips:
774
+
775
+ - files which does not exists
776
+ - directories
777
+ - broken symlinks
778
+ - files matching patterns specified by ``.gitignore`` file
779
+
780
+ Returns both hidden and non-hidden files.
699
781
 
700
782
  All files are normalized to their absolute path, so that duplicates produced by
701
783
  symlinks are ignored.
702
784
 
703
- Files that doesn't exist and broken symlinks are skipped.
785
+ File path are returned as relative to the current working directory if
786
+ possible, or as absolute path otherwise.
787
+
788
+ The resulting list of file paths is sorted.
704
789
  """
790
+ current_dir = Path.cwd()
705
791
  seen = set()
792
+
793
+ # If the .gitignore file exists, we parse it to filter out ignored files.
794
+ gitignore = None
795
+ if self.gitignore_exists:
796
+ logging.debug(f"Load {GITIGNORE_PATH} to filter out ignored files.")
797
+ gitignore = gitignore_parser.parse_gitignore(GITIGNORE_PATH)
798
+
706
799
  for file_path in iglob(
707
800
  patterns,
708
801
  flags=NODIR | GLOBSTAR | DOTGLOB | GLOBTILDE | BRACE | FOLLOW | NEGATE,
709
802
  ):
710
803
  # Normalize the path to avoid duplicates.
711
804
  try:
712
- normalized_path = Path(file_path).resolve(strict=True)
713
- # Skip files that do not exist or broken symlinks.
805
+ absolute_path = Path(file_path).resolve(strict=True)
806
+ # Skip files that do not exists and broken symlinks.
714
807
  except OSError:
715
- logging.warning(
716
- f"Skipping non-existing file / broken symlink: {file_path}"
717
- )
808
+ logging.warning(f"Skip non-existing file / broken symlink: {file_path}")
718
809
  continue
810
+
811
+ # Simplify the path by trying to make it relative to the current location.
812
+ normalized_path = absolute_path
813
+ try:
814
+ normalized_path = absolute_path.relative_to(current_dir)
815
+ except ValueError:
816
+ # If the file is not relative to the current directory, keep its
817
+ # absolute path.
818
+ logging.debug(
819
+ f"{absolute_path} is not relative to {current_dir}. "
820
+ "Keeping the path absolute."
821
+ )
822
+
719
823
  if normalized_path in seen:
720
- logging.debug(f"Skipping duplicate file: {normalized_path}")
824
+ logging.debug(f"Skip duplicate file: {normalized_path}")
721
825
  continue
722
- seen.add(normalized_path)
723
- yield normalized_path
724
826
 
725
- @cached_property
726
- def gitignore_exists(self) -> bool:
727
- return Path(".gitignore").is_file()
827
+ # Skip files that are ignored by .gitignore.
828
+ if gitignore and gitignore(file_path):
829
+ logging.debug(f"Skip file matching {GITIGNORE_PATH}: {file_path}")
830
+ continue
831
+
832
+ seen.add(normalized_path)
833
+ return sorted(seen)
728
834
 
729
835
  @cached_property
730
- def python_files(self) -> Iterator[Path]:
836
+ def python_files(self) -> list[Path]:
731
837
  """Returns a list of python files."""
732
- yield from self.glob_files("**/*.py", "!.venv/**")
838
+ return self.glob_files("**/*.py", "!.venv/**")
733
839
 
734
840
  @cached_property
735
- def doc_files(self) -> Iterator[Path]:
841
+ def doc_files(self) -> list[Path]:
736
842
  """Returns a list of doc files."""
737
- yield from self.glob_files("**/*.{md,markdown,rst,tex}", "!.venv/**")
843
+ return self.glob_files("**/*.{md,markdown,rst,tex}", "!.venv/**")
738
844
 
739
845
  @property
740
846
  def is_python_project(self):
@@ -844,7 +950,7 @@ class Metadata:
844
950
  return None
845
951
 
846
952
  @cached_property
847
- def blacken_docs_params(self) -> tuple[str, ...] | None:
953
+ def blacken_docs_params(self) -> str | None:
848
954
  """Generates ``blacken-docs`` parameters.
849
955
 
850
956
  `Blacken-docs reuses Black's --target-version pyXY parameters
@@ -867,7 +973,7 @@ class Metadata:
867
973
  <https://github.com/psf/black/issues/751#issuecomment-473066811>`_.
868
974
  """
869
975
  if self.py_target_versions:
870
- return tuple(
976
+ return " ".join(
871
977
  f"--target-version py{version.major}{version.minor}"
872
978
  for version in self.py_target_versions
873
979
  )
@@ -1185,7 +1291,7 @@ class Metadata:
1185
1291
  for variations in matrix.solve():
1186
1292
  # We will re-attach back this binary name to the with an include directive,
1187
1293
  # so we need a copy the main variants it corresponds to.
1188
- bin_name_include = {k: variations[k] for k in matrix}
1294
+ bin_name_include = {k: variations[k] for k in matrix.variations}
1189
1295
  bin_name_include["bin_name"] = (
1190
1296
  "{cli_id}-{target}-{short_sha}.{extension}"
1191
1297
  ).format(**variations)
@@ -1316,8 +1422,8 @@ class Metadata:
1316
1422
  delimiter = f"ghadelimiter_{randint(10**8, (10**9) - 1)}"
1317
1423
  content += f"{env_name}<<{delimiter}\n{env_value}\n{delimiter}\n"
1318
1424
  else:
1319
- assert dialect == Dialects.plain
1320
- content = repr(metadata)
1425
+ assert dialect == Dialects.json
1426
+ content = json.dumps(metadata, cls=JSONMetadata, indent=2)
1321
1427
 
1322
1428
  logging.debug(f"Formatted metadata:\n{content}")
1323
1429
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: gha-utils
3
- Version: 4.17.9
3
+ Version: 4.18.1
4
4
  Summary: ⚙️ CLI helpers for GitHub Actions + reuseable workflows
5
5
  Author-email: Kevin Deldycke <kevin@deldycke.com>
6
6
  Project-URL: Homepage, https://github.com/kdeldycke/workflows
@@ -48,13 +48,14 @@ Requires-Dist: boltons>=24.0.0
48
48
  Requires-Dist: bump-my-version<1.1.1,>=0.32.2
49
49
  Requires-Dist: click-extra~=5.0.2
50
50
  Requires-Dist: extra-platforms~=3.2.0
51
+ Requires-Dist: gitignore-parser~=0.1.12
51
52
  Requires-Dist: packaging~=25.0
52
53
  Requires-Dist: PyDriller~=2.6
53
54
  Requires-Dist: pyproject-metadata~=0.9.0
54
55
  Requires-Dist: pyyaml~=6.0.0
55
56
  Requires-Dist: wcmatch>=8.5
56
57
  Provides-Extra: test
57
- Requires-Dist: coverage[toml]~=7.9.1; extra == "test"
58
+ Requires-Dist: coverage[toml]~=7.10.0; extra == "test"
58
59
  Requires-Dist: pytest~=8.4.0; extra == "test"
59
60
  Requires-Dist: pytest-cases~=3.9.1; extra == "test"
60
61
  Requires-Dist: pytest-cov~=6.2.1; extra == "test"
@@ -2,6 +2,7 @@ boltons>=24.0.0
2
2
  bump-my-version<1.1.1,>=0.32.2
3
3
  click-extra~=5.0.2
4
4
  extra-platforms~=3.2.0
5
+ gitignore-parser~=0.1.12
5
6
  packaging~=25.0
6
7
  PyDriller~=2.6
7
8
  pyproject-metadata~=0.9.0
@@ -9,7 +10,7 @@ pyyaml~=6.0.0
9
10
  wcmatch>=8.5
10
11
 
11
12
  [test]
12
- coverage[toml]~=7.9.1
13
+ coverage[toml]~=7.10.0
13
14
  pytest~=8.4.0
14
15
  pytest-cases~=3.9.1
15
16
  pytest-cov~=6.2.1
@@ -1,7 +1,7 @@
1
1
  [project]
2
2
  # Docs: https://packaging.python.org/en/latest/guides/writing-pyproject-toml/
3
3
  name = "gha-utils"
4
- version = "4.17.9"
4
+ version = "4.18.1"
5
5
  # Python versions and their status: https://devguide.python.org/versions/
6
6
  requires-python = ">= 3.11"
7
7
  description = "⚙️ CLI helpers for GitHub Actions + reuseable workflows"
@@ -76,6 +76,7 @@ dependencies = [
76
76
  "bump-my-version >= 0.32.2, < 1.1.1",
77
77
  "click-extra ~= 5.0.2",
78
78
  "extra-platforms ~= 3.2.0",
79
+ "gitignore-parser ~= 0.1.12",
79
80
  "packaging ~= 25.0",
80
81
  "PyDriller ~= 2.6",
81
82
  "pyproject-metadata ~= 0.9.0",
@@ -86,7 +87,7 @@ dependencies = [
86
87
 
87
88
  [project.optional-dependencies]
88
89
  test = [
89
- "coverage [toml] ~= 7.9.1",
90
+ "coverage [toml] ~= 7.10.0",
90
91
  "pytest ~= 8.4.0",
91
92
  # More pytest plugins at: https://docs.pytest.org/en/latest/reference/plugin_list.html
92
93
  "pytest-cases ~= 3.9.1",
@@ -119,7 +120,7 @@ pretty = true
119
120
 
120
121
  [[tool.mypy.overrides]]
121
122
  ignore_missing_imports = true
122
- module = ["boltons.*"]
123
+ module = ["boltons.*", "gitignore_parser.*"]
123
124
 
124
125
  [tool.pytest.ini_options]
125
126
  # https://docs.pytest.org/en/latest/customize.html#pyproject-toml
@@ -137,7 +138,7 @@ addopts = [
137
138
  xfail_strict = true
138
139
 
139
140
  [tool.bumpversion]
140
- current_version = "4.17.9"
141
+ current_version = "4.18.1"
141
142
  allow_dirty = true
142
143
  ignore_missing_files = true
143
144
 
@@ -26,7 +26,11 @@ from gha_utils.matrix import Matrix
26
26
  def test_matrix():
27
27
  matrix = Matrix()
28
28
 
29
- assert matrix == dict()
29
+ assert isinstance(matrix, Matrix)
30
+ assert not isinstance(matrix, dict)
31
+
32
+ assert hasattr(matrix, "variations")
33
+ assert isinstance(matrix.variations, dict)
30
34
 
31
35
  assert hasattr(matrix, "include")
32
36
  assert hasattr(matrix, "exclude")
@@ -34,23 +38,20 @@ def test_matrix():
34
38
  assert matrix.exclude == tuple()
35
39
 
36
40
  matrix.add_variation("foo", ["a", "b", "c"])
37
- assert matrix == {"foo": ("a", "b", "c")}
41
+ assert matrix.variations == {"foo": ("a", "b", "c")}
38
42
  assert not matrix.include
39
43
  assert not matrix.exclude
40
44
 
41
45
  # Natural deduplication.
42
46
  matrix.add_variation("foo", ["a", "a", "d"])
43
- assert matrix == {"foo": ("a", "b", "c", "d")}
47
+ assert matrix.variations == {"foo": ("a", "b", "c", "d")}
44
48
  assert not matrix.include
45
49
  assert not matrix.exclude
46
50
 
47
51
  assert matrix.matrix() == {"foo": ("a", "b", "c", "d")}
48
52
 
49
53
  assert str(matrix) == '{"foo": ["a", "b", "c", "d"]}'
50
- assert (
51
- repr(matrix)
52
- == "<Matrix: {'foo': ('a', 'b', 'c', 'd')}; include=(); exclude=()>"
53
- )
54
+ assert repr(matrix) == "<Matrix: FrozenDict({'foo': ('a', 'b', 'c', 'd')})>"
54
55
 
55
56
  with pytest.raises(ValueError):
56
57
  matrix.add_variation("variation_1", None)
@@ -101,8 +102,8 @@ def test_includes():
101
102
  '"include": [{"foo": "a", "bar": "1"}, {"foo": "b", "bar": "2"}]}'
102
103
  )
103
104
  assert (
104
- repr(matrix) == "<Matrix: {'foo': ('a', 'b', 'c')}; "
105
- "include=({'foo': 'a', 'bar': '1'}, {'foo': 'b', 'bar': '2'}); exclude=()>"
105
+ repr(matrix) == "<Matrix: FrozenDict({'foo': ('a', 'b', 'c'), "
106
+ "'include': ({'foo': 'a', 'bar': '1'}, {'foo': 'b', 'bar': '2'})})>"
106
107
  )
107
108
 
108
109
  # Multiple insertions.
@@ -157,8 +158,8 @@ def test_excludes():
157
158
  '"exclude": [{"foo": "a", "bar": "1"}, {"foo": "b", "bar": "2"}]}'
158
159
  )
159
160
  assert (
160
- repr(matrix) == "<Matrix: {'foo': ('a', 'b', 'c')}; "
161
- "include=(); exclude=({'foo': 'a', 'bar': '1'}, {'foo': 'b', 'bar': '2'})>"
161
+ repr(matrix) == "<Matrix: FrozenDict({'foo': ('a', 'b', 'c'), "
162
+ "'exclude': ({'foo': 'a', 'bar': '1'}, {'foo': 'b', 'bar': '2'})})>"
162
163
  )
163
164
 
164
165
  # Multiple insertions.
@@ -0,0 +1,333 @@
1
+ # Copyright Kevin Deldycke <kevin@deldycke.com> and contributors.
2
+ #
3
+ # This program is Free Software; you can redistribute it and/or
4
+ # modify it under the terms of the GNU General Public License
5
+ # as published by the Free Software Foundation; either version 2
6
+ # of the License, or (at your option) any later version.
7
+ #
8
+ # This program is distributed in the hope that it will be useful,
9
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
10
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
+ # GNU General Public License for more details.
12
+ #
13
+ # You should have received a copy of the GNU General Public License
14
+ # along with this program; if not, write to the Free Software
15
+ # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
16
+
17
+ from __future__ import annotations
18
+
19
+ import json
20
+ import re
21
+ from string import ascii_lowercase, digits
22
+ from typing import Any
23
+
24
+ import pytest
25
+ from extra_platforms import ALL_IDS, is_windows
26
+
27
+ from gha_utils.metadata import NUITKA_BUILD_TARGETS, Dialects, Metadata
28
+
29
+
30
+ @pytest.mark.parametrize("target_id, target_data", NUITKA_BUILD_TARGETS.items())
31
+ def test_nuitka_targets(target_id: str, target_data: dict[str, str]) -> None:
32
+ assert isinstance(target_id, str)
33
+ assert isinstance(target_data, dict)
34
+
35
+ assert set(target_data) == {
36
+ "os",
37
+ "platform_id",
38
+ "arch",
39
+ "extension",
40
+ }, f"Unexpected keys in target data for {target_id}"
41
+
42
+ assert isinstance(target_data["os"], str)
43
+ assert isinstance(target_data["platform_id"], str)
44
+ assert isinstance(target_data["arch"], str)
45
+ assert isinstance(target_data["extension"], str)
46
+
47
+ assert set(target_data["os"]).issubset(ascii_lowercase + digits + "-.")
48
+ assert target_data["platform_id"] in ALL_IDS
49
+ assert target_data["arch"] in {"arm64", "x64"}
50
+ assert set(target_data["extension"]).issubset(ascii_lowercase)
51
+
52
+ assert target_id == target_data["platform_id"] + "-" + target_data["arch"]
53
+ assert set(target_id).issubset(ascii_lowercase + digits + "-")
54
+
55
+
56
+ def regex(pattern: str) -> re.Pattern:
57
+ """Compile a regex pattern with DOTALL flag."""
58
+ return re.compile(pattern, re.DOTALL)
59
+
60
+
61
+ def iter_checks(metadata: Any, expected: Any, context: Any) -> None:
62
+ """Recursively iterate over expected content and check it matches in metadata."""
63
+
64
+ if isinstance(expected, re.Pattern):
65
+ assert isinstance(metadata, str)
66
+ assert re.fullmatch(expected, metadata) is not None, (
67
+ f"{metadata!r} does not match {expected.pattern!r} in {context!r}"
68
+ )
69
+
70
+ elif isinstance(expected, dict):
71
+ assert isinstance(metadata, dict)
72
+ assert set(metadata) == set(expected)
73
+ for key, value in expected.items():
74
+ iter_checks(metadata[key], value, metadata)
75
+
76
+ elif isinstance(expected, list):
77
+ assert isinstance(metadata, list)
78
+ assert len(metadata) == len(expected)
79
+ for item in expected:
80
+ iter_checks(metadata[expected.index(item)], item, metadata)
81
+
82
+ else:
83
+ assert metadata == expected, (
84
+ f"{metadata!r} does not match {expected!r} in {context!r}"
85
+ )
86
+ assert type(metadata) is type(expected)
87
+
88
+
89
+ expected = {
90
+ "new_commits": None,
91
+ "release_commits": None,
92
+ "gitignore_exists": True,
93
+ "python_files": [
94
+ "gha_utils\\__init__.py",
95
+ "gha_utils\\__main__.py",
96
+ "gha_utils\\changelog.py",
97
+ "gha_utils\\cli.py",
98
+ "gha_utils\\mailmap.py",
99
+ "gha_utils\\matrix.py",
100
+ "gha_utils\\metadata.py",
101
+ "gha_utils\\test_plan.py",
102
+ "tests\\__init__.py",
103
+ "tests\\test_changelog.py",
104
+ "tests\\test_mailmap.py",
105
+ "tests\\test_matrix.py",
106
+ "tests\\test_metadata.py",
107
+ ]
108
+ if is_windows()
109
+ else [
110
+ "gha_utils/__init__.py",
111
+ "gha_utils/__main__.py",
112
+ "gha_utils/changelog.py",
113
+ "gha_utils/cli.py",
114
+ "gha_utils/mailmap.py",
115
+ "gha_utils/matrix.py",
116
+ "gha_utils/metadata.py",
117
+ "gha_utils/test_plan.py",
118
+ "tests/__init__.py",
119
+ "tests/test_changelog.py",
120
+ "tests/test_mailmap.py",
121
+ "tests/test_matrix.py",
122
+ "tests/test_metadata.py",
123
+ ],
124
+ "doc_files": [
125
+ ".github\\code-of-conduct.md",
126
+ "changelog.md",
127
+ "readme.md",
128
+ ]
129
+ if is_windows()
130
+ else [
131
+ ".github/code-of-conduct.md",
132
+ "changelog.md",
133
+ "readme.md",
134
+ ],
135
+ "is_python_project": True,
136
+ "package_name": "gha-utils",
137
+ "blacken_docs_params": "--target-version py311 --target-version py312 --target-version py313",
138
+ "mypy_params": "--python-version 3.11",
139
+ "current_version": regex(r"[0-9\.]+"),
140
+ "released_version": None,
141
+ "is_sphinx": False,
142
+ "active_autodoc": False,
143
+ "release_notes": regex(
144
+ r"### Changes\n\n"
145
+ r"> \[\!IMPORTANT\]\n"
146
+ r"> This version is not released yet and is under active development\.\n\n"
147
+ r".+"
148
+ ),
149
+ "new_commits_matrix": None,
150
+ "release_commits_matrix": None,
151
+ "nuitka_matrix": {
152
+ "os": [
153
+ "ubuntu-24.04-arm",
154
+ "ubuntu-24.04",
155
+ "macos-15",
156
+ "macos-13",
157
+ "windows-11-arm",
158
+ "windows-2025",
159
+ ],
160
+ "entry_point": ["gha-utils"],
161
+ "commit": [regex(r"[a-z0-9]+")],
162
+ "include": [
163
+ {
164
+ "target": "linux-arm64",
165
+ "os": "ubuntu-24.04-arm",
166
+ "platform_id": "linux",
167
+ "arch": "arm64",
168
+ "extension": "bin",
169
+ },
170
+ {
171
+ "target": "linux-x64",
172
+ "os": "ubuntu-24.04",
173
+ "platform_id": "linux",
174
+ "arch": "x64",
175
+ "extension": "bin",
176
+ },
177
+ {
178
+ "target": "macos-arm64",
179
+ "os": "macos-15",
180
+ "platform_id": "macos",
181
+ "arch": "arm64",
182
+ "extension": "bin",
183
+ },
184
+ {
185
+ "target": "macos-x64",
186
+ "os": "macos-13",
187
+ "platform_id": "macos",
188
+ "arch": "x64",
189
+ "extension": "bin",
190
+ },
191
+ {
192
+ "target": "windows-arm64",
193
+ "os": "windows-11-arm",
194
+ "platform_id": "windows",
195
+ "arch": "arm64",
196
+ "extension": "exe",
197
+ },
198
+ {
199
+ "target": "windows-x64",
200
+ "os": "windows-2025",
201
+ "platform_id": "windows",
202
+ "arch": "x64",
203
+ "extension": "exe",
204
+ },
205
+ {
206
+ "entry_point": "gha-utils",
207
+ "cli_id": "gha-utils",
208
+ "module_id": "gha_utils.__main__",
209
+ "callable_id": "main",
210
+ "module_path": regex(r"gha_utils(/|\\)__main__\.py"),
211
+ },
212
+ {
213
+ "commit": regex(r"[a-z0-9]+"),
214
+ "short_sha": regex(r"[a-z0-9]+"),
215
+ "current_version": regex(r"[0-9\.]+"),
216
+ },
217
+ {
218
+ "os": "ubuntu-24.04-arm",
219
+ "entry_point": "gha-utils",
220
+ "commit": regex(r"[a-z0-9]+"),
221
+ "bin_name": regex(r"gha-utils-linux-arm64-[a-z0-9]+\.bin"),
222
+ },
223
+ {
224
+ "os": "ubuntu-24.04",
225
+ "entry_point": "gha-utils",
226
+ "commit": regex(r"[a-z0-9]+"),
227
+ "bin_name": regex(r"gha-utils-linux-x64-[a-z0-9]+\.bin"),
228
+ },
229
+ {
230
+ "os": "macos-15",
231
+ "entry_point": "gha-utils",
232
+ "commit": regex(r"[a-z0-9]+"),
233
+ "bin_name": regex(r"gha-utils-macos-arm64-[a-z0-9]+\.bin"),
234
+ },
235
+ {
236
+ "os": "macos-13",
237
+ "entry_point": "gha-utils",
238
+ "commit": regex(r"[a-z0-9]+"),
239
+ "bin_name": regex(r"gha-utils-macos-x64-[a-z0-9]+\.bin"),
240
+ },
241
+ {
242
+ "os": "windows-11-arm",
243
+ "entry_point": "gha-utils",
244
+ "commit": regex(r"[a-z0-9]+"),
245
+ "bin_name": regex(r"gha-utils-windows-arm64-[a-z0-9]+\.exe"),
246
+ },
247
+ {
248
+ "os": "windows-2025",
249
+ "entry_point": "gha-utils",
250
+ "commit": regex(r"[a-z0-9]+"),
251
+ "bin_name": regex(r"gha-utils-windows-x64-[a-z0-9]+\.exe"),
252
+ },
253
+ {"state": "stable"},
254
+ ],
255
+ },
256
+ }
257
+
258
+
259
+ def test_metadata_json_format():
260
+ metadata = Metadata().dump(Dialects.json)
261
+ assert isinstance(metadata, str)
262
+
263
+ iter_checks(json.loads(metadata), expected, metadata)
264
+
265
+
266
+ def test_metadata_github_format():
267
+ raw_metadata = Metadata().dump()
268
+ assert isinstance(raw_metadata, str)
269
+
270
+ # Prepare metadata for checks
271
+ metadata = {}
272
+ # Accumulation states.
273
+ acc_key = None
274
+ acc_delimiter = None
275
+ acc_lines = []
276
+ for line in raw_metadata.splitlines():
277
+ # We are at the end of the accumulation for a key.
278
+ if line == acc_delimiter:
279
+ assert acc_delimiter
280
+ assert acc_key
281
+ assert acc_lines
282
+ metadata[acc_key] = "\n".join(acc_lines)
283
+ # Reset accumulation states.
284
+ acc_key = None
285
+ acc_delimiter = None
286
+ acc_lines = []
287
+ continue
288
+
289
+ # We are accumulating lines for a key.
290
+ if acc_key:
291
+ acc_lines.append(line)
292
+ continue
293
+
294
+ # We should not have any accumulation state at this point.
295
+ assert acc_key is None
296
+ assert acc_delimiter is None
297
+ assert acc_lines == []
298
+
299
+ # We are starting a new accumulation for a key.
300
+ if "<<" in line:
301
+ # Check the delimiter syntax.
302
+ assert line.count("<<") == 1
303
+ acc_key, acc_delimiter = line.split("<<", 1)
304
+ assert re.fullmatch(r"ghadelimiter_[0-9]+", acc_delimiter)
305
+ continue
306
+
307
+ # We are at a simple key-value pair.
308
+ if "=" in line:
309
+ key, value = line.split("=", 1)
310
+ # Convert dict-like JSON string into Python dict.
311
+ if value.startswith("{"):
312
+ value = json.loads(value)
313
+ metadata[key] = value
314
+ continue
315
+
316
+ raise ValueError(
317
+ f"Unexpected line format in metadata: {line!r}. "
318
+ "Expecting a key-value pair or a delimited block."
319
+ )
320
+
321
+ # Adapt expected values to match GitHub Actions format.
322
+ github_format_expected = {}
323
+ for key, value in expected.items():
324
+ new_value = value
325
+ if value is None:
326
+ new_value = ""
327
+ elif isinstance(value, bool):
328
+ new_value = str(value).lower()
329
+ elif isinstance(value, list):
330
+ new_value = " ".join(f'"{i}"' for i in value)
331
+ github_format_expected[key] = new_value
332
+
333
+ iter_checks(metadata, github_format_expected, raw_metadata)
@@ -1,214 +0,0 @@
1
- # Copyright Kevin Deldycke <kevin@deldycke.com> and contributors.
2
- #
3
- # This program is Free Software; you can redistribute it and/or
4
- # modify it under the terms of the GNU General Public License
5
- # as published by the Free Software Foundation; either version 2
6
- # of the License, or (at your option) any later version.
7
- #
8
- # This program is distributed in the hope that it will be useful,
9
- # but WITHOUT ANY WARRANTY; without even the implied warranty of
10
- # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
- # GNU General Public License for more details.
12
- #
13
- # You should have received a copy of the GNU General Public License
14
- # along with this program; if not, write to the Free Software
15
- # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
16
-
17
- from __future__ import annotations
18
-
19
- import re
20
- from string import ascii_lowercase, digits
21
-
22
- from extra_platforms import ALL_IDS
23
-
24
- from gha_utils.metadata import NUITKA_BUILD_TARGETS, Dialects, Metadata
25
-
26
-
27
- def test_nuitka_targets():
28
- for target_id, target_data in NUITKA_BUILD_TARGETS.items():
29
- assert isinstance(target_id, str)
30
- assert isinstance(target_data, dict)
31
-
32
- assert set(target_data) == {
33
- "os",
34
- "platform_id",
35
- "arch",
36
- "extension",
37
- }, f"Unexpected keys in target data for {target_id}"
38
-
39
- assert isinstance(target_data["os"], str)
40
- assert isinstance(target_data["platform_id"], str)
41
- assert isinstance(target_data["arch"], str)
42
- assert isinstance(target_data["extension"], str)
43
-
44
- assert set(target_data["os"]).issubset(ascii_lowercase + digits + "-.")
45
- assert target_data["platform_id"] in ALL_IDS
46
- assert target_data["arch"] in {"arm64", "x64"}
47
- assert set(target_data["extension"]).issubset(ascii_lowercase)
48
-
49
- assert target_id == target_data["platform_id"] + "-" + target_data["arch"]
50
- assert set(target_id).issubset(ascii_lowercase + digits + "-")
51
-
52
-
53
- def test_metadata_github_format():
54
- metadata = Metadata()
55
-
56
- assert re.fullmatch(
57
- (
58
- r"new_commits=\n"
59
- r"release_commits=\n"
60
- r"gitignore_exists=true\n"
61
- r"python_files=[\S ]*\n"
62
- r"doc_files=[\S ]*\n"
63
- r"is_python_project=true\n"
64
- r"package_name=gha-utils\n"
65
- r"blacken_docs_params=--target-version py311 "
66
- r"--target-version py312 --target-version py313\n"
67
- r"mypy_params=--python-version 3\.11\n"
68
- r"current_version=[0-9\.]+\n"
69
- r"released_version=\n"
70
- r"is_sphinx=false\n"
71
- r"active_autodoc=false\n"
72
- r"release_notes<<ghadelimiter_[0-9]+\n"
73
- r"### Changes\n\n"
74
- r"> \[\!IMPORTANT\]\n"
75
- r"> This version is not released yet and is under active development.\n\n"
76
- r".+\n"
77
- r"ghadelimiter_[0-9]+\n"
78
- r"new_commits_matrix=\n"
79
- r"release_commits_matrix=\n"
80
- #
81
- r"nuitka_matrix=\{"
82
- #
83
- r'"os": \["ubuntu-24\.04-arm", "ubuntu-24\.04", '
84
- r'"macos-15", "macos-13", "windows-11-arm", "windows-2025"\], '
85
- #
86
- r'"entry_point": \["gha-utils"\], '
87
- #
88
- r'"commit": \["[a-z0-9]+"\], '
89
- #
90
- r'"include": \['
91
- #
92
- r'\{"target": "linux-arm64", "os": "ubuntu-24\.04-arm", '
93
- r'"platform_id": "linux", "arch": "arm64", "extension": "bin"\}, '
94
- r'\{"target": "linux-x64", "os": "ubuntu-24\.04", '
95
- r'"platform_id": "linux", "arch": "x64", "extension": "bin"\}, '
96
- r'\{"target": "macos-arm64", "os": "macos-15", '
97
- r'"platform_id": "macos", "arch": "arm64", "extension": "bin"\}, '
98
- r'\{"target": "macos-x64", "os": "macos-13", '
99
- r'"platform_id": "macos", "arch": "x64", '
100
- r'"extension": "bin"\}, '
101
- r'\{"target": "windows-arm64", "os": "windows-11-arm", '
102
- r'"platform_id": "windows", "arch": "arm64", "extension": "exe"\}, '
103
- r'\{"target": "windows-x64", "os": "windows-2025", '
104
- r'"platform_id": "windows", "arch": "x64", "extension": "exe"\}, '
105
- #
106
- r'\{"entry_point": "gha-utils", '
107
- r'"cli_id": "gha-utils", "module_id": "gha_utils\.__main__", '
108
- r'"callable_id": "main", '
109
- r'"module_path": "gha_utils(/|\\\\)__main__\.py"\}, '
110
- #
111
- r'\{"commit": "[a-z0-9]+", "short_sha": "[a-z0-9]+", '
112
- r'"current_version": "[0-9\.]+"\}, '
113
- #
114
- r'\{"os": "ubuntu-24\.04-arm", "entry_point": "gha-utils", '
115
- r'"commit": "[a-z0-9]+", '
116
- r'"bin_name": "gha-utils-linux-arm64-[a-z0-9]+\.bin"\}, '
117
- r'\{"os": "ubuntu-24\.04", "entry_point": "gha-utils", '
118
- r'"commit": "[a-z0-9]+", '
119
- r'"bin_name": "gha-utils-linux-x64-[a-z0-9]+\.bin"\}, '
120
- r'\{"os": "macos-15", "entry_point": "gha-utils", '
121
- r'"commit": "[a-z0-9]+", '
122
- r'"bin_name": "gha-utils-macos-arm64-[a-z0-9]+\.bin"\}, '
123
- r'\{"os": "macos-13", "entry_point": "gha-utils", '
124
- r'"commit": "[a-z0-9]+", '
125
- r'"bin_name": "gha-utils-macos-x64-[a-z0-9]+\.bin"\}, '
126
- r'\{"os": "windows-11-arm", "entry_point": "gha-utils", '
127
- r'"commit": "[a-z0-9]+", '
128
- r'"bin_name": "gha-utils-windows-arm64-[a-z0-9]+\.exe"\}, '
129
- r'\{"os": "windows-2025", "entry_point": "gha-utils", '
130
- r'"commit": "[a-z0-9]+", '
131
- r'"bin_name": "gha-utils-windows-x64-[a-z0-9]+\.exe"\}, '
132
- r'\{"state": "stable"\}\]\}\n'
133
- ),
134
- metadata.dump(Dialects.github),
135
- re.DOTALL,
136
- )
137
-
138
-
139
- def test_metadata_plain_format():
140
- metadata = Metadata()
141
-
142
- assert re.fullmatch(
143
- (
144
- r"\{"
145
- r"'new_commits': None, "
146
- r"'release_commits': None, "
147
- r"'gitignore_exists': True, "
148
- r"'python_files': <generator object Metadata\.python_files at \S+>, "
149
- r"'doc_files': <generator object Metadata\.doc_files at \S+>, "
150
- r"'is_python_project': True, "
151
- r"'package_name': 'gha-utils', "
152
- r"'blacken_docs_params': \("
153
- r"'--target-version py311', "
154
- r"'--target-version py312', "
155
- r"'--target-version py313'\), "
156
- r"'mypy_params': '--python-version 3\.11', "
157
- r"'current_version': '[0-9\.]+', "
158
- r"'released_version': None, "
159
- r"'is_sphinx': False, "
160
- r"'active_autodoc': False, "
161
- r"'release_notes': '### Changes\\n\\n"
162
- r"> \[\!IMPORTANT\]\\n"
163
- r"> This version is not released yet and is under active development.\\n\\n"
164
- r".+', "
165
- r"'new_commits_matrix': None, "
166
- r"'release_commits_matrix': None, "
167
- r"'nuitka_matrix': <Matrix: \{"
168
- r"'os': \('ubuntu-24\.04-arm', 'ubuntu-24\.04', "
169
- r"'macos-15', 'macos-13', 'windows-11-arm', 'windows-2025'\), "
170
- r"'entry_point': \('gha-utils',\), "
171
- r"'commit': \('[a-z0-9]+',\)\}; "
172
- #
173
- r"include=\(\{'target': 'linux-arm64', 'os': 'ubuntu-24\.04-arm', "
174
- r"'platform_id': 'linux', 'arch': 'arm64', 'extension': 'bin'\}, "
175
- r"\{'target': 'linux-x64', 'os': 'ubuntu-24\.04', 'platform_id': 'linux', "
176
- r"'arch': 'x64', 'extension': 'bin'\}, \{'target': 'macos-arm64', 'os': 'macos-15', "
177
- r"'platform_id': 'macos', 'arch': 'arm64', 'extension': 'bin'\}, "
178
- r"\{'target': 'macos-x64', 'os': 'macos-13', 'platform_id': 'macos', 'arch': 'x64', "
179
- r"'extension': 'bin'\}, \{'target': 'windows-arm64', 'os': 'windows-11-arm', 'platform_id': "
180
- r"'windows', 'arch': 'arm64', 'extension': 'exe'\}, "
181
- r"\{'target': 'windows-x64', 'os': 'windows-2025', 'platform_id': "
182
- r"'windows', 'arch': 'x64', 'extension': 'exe'\}, "
183
- #
184
- r"\{'entry_point': 'gha-utils', 'cli_id': 'gha-utils', "
185
- r"'module_id': 'gha_utils\.__main__', 'callable_id': 'main', "
186
- r"'module_path': 'gha_utils(/|\\\\)__main__\.py'\}, "
187
- #
188
- r"\{'commit': '[a-z0-9]+', 'short_sha': '[a-z0-9]+', "
189
- r"'current_version': '[0-9\.]+'\}, "
190
- #
191
- r"\{'os': 'ubuntu-24\.04-arm', 'entry_point': 'gha-utils', "
192
- r"'commit': '[a-z0-9]+', "
193
- r"'bin_name': 'gha-utils-linux-arm64-[a-z0-9]+\.bin'\}, "
194
- r"\{'os': 'ubuntu-24\.04', 'entry_point': 'gha-utils', "
195
- r"'commit': '[a-z0-9]+', "
196
- r"'bin_name': 'gha-utils-linux-x64-[a-z0-9]+\.bin'\}, "
197
- r"\{'os': 'macos-15', 'entry_point': 'gha-utils', "
198
- r"'commit': '[a-z0-9]+', "
199
- r"'bin_name': 'gha-utils-macos-arm64-[a-z0-9]+\.bin'\}, "
200
- r"\{'os': 'macos-13', 'entry_point': 'gha-utils', "
201
- r"'commit': '[a-z0-9]+', 'bin_name': "
202
- r"'gha-utils-macos-x64-[a-z0-9]+\.bin'\}, "
203
- r"\{'os': 'windows-11-arm', 'entry_point': 'gha-utils', "
204
- r"'commit': '[a-z0-9]+', "
205
- r"'bin_name': 'gha-utils-windows-arm64-[a-z0-9]+\.exe'\}, "
206
- r"\{'os': 'windows-2025', 'entry_point': 'gha-utils', "
207
- r"'commit': '[a-z0-9]+', "
208
- r"'bin_name': 'gha-utils-windows-x64-[a-z0-9]+\.exe'\}, "
209
- r"\{'state': 'stable'\}\); "
210
- r"exclude=\(\)>\}"
211
- ),
212
- metadata.dump(Dialects.plain),
213
- re.DOTALL,
214
- )
File without changes
File without changes