gha-utils 4.13.3__py3-none-any.whl → 4.14.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of gha-utils might be problematic. Click here for more details.
- gha_utils/__init__.py +1 -1
- gha_utils/cli.py +39 -2
- gha_utils/mailmap.py +1 -1
- gha_utils/matrix.py +279 -0
- gha_utils/metadata.py +47 -119
- gha_utils/test_plan.py +226 -0
- {gha_utils-4.13.3.dist-info → gha_utils-4.14.0.dist-info}/METADATA +5 -2
- gha_utils-4.14.0.dist-info/RECORD +14 -0
- gha_utils-4.13.3.dist-info/RECORD +0 -12
- {gha_utils-4.13.3.dist-info → gha_utils-4.14.0.dist-info}/WHEEL +0 -0
- {gha_utils-4.13.3.dist-info → gha_utils-4.14.0.dist-info}/entry_points.txt +0 -0
- {gha_utils-4.13.3.dist-info → gha_utils-4.14.0.dist-info}/top_level.txt +0 -0
gha_utils/__init__.py
CHANGED
gha_utils/cli.py
CHANGED
|
@@ -23,22 +23,24 @@ from datetime import datetime
|
|
|
23
23
|
from pathlib import Path
|
|
24
24
|
from typing import IO
|
|
25
25
|
|
|
26
|
+
import click
|
|
26
27
|
from click_extra import (
|
|
27
28
|
Choice,
|
|
28
29
|
Context,
|
|
30
|
+
IntRange,
|
|
29
31
|
argument,
|
|
30
32
|
echo,
|
|
31
33
|
extra_group,
|
|
32
34
|
file_path,
|
|
33
35
|
option,
|
|
34
36
|
pass_context,
|
|
35
|
-
path,
|
|
36
37
|
)
|
|
37
38
|
|
|
38
39
|
from . import __version__
|
|
39
40
|
from .changelog import Changelog
|
|
40
41
|
from .mailmap import Mailmap
|
|
41
42
|
from .metadata import Dialects, Metadata
|
|
43
|
+
from .test_plan import DEFAULT_TEST_PLAN, parse_test_plan
|
|
42
44
|
|
|
43
45
|
|
|
44
46
|
def is_stdout(filepath: Path) -> bool:
|
|
@@ -167,7 +169,7 @@ def metadata(ctx, format, overwrite, output_path):
|
|
|
167
169
|
@gha_utils.command(short_help="Maintain a Markdown-formatted changelog")
|
|
168
170
|
@option(
|
|
169
171
|
"--source",
|
|
170
|
-
type=
|
|
172
|
+
type=file_path(exists=True, readable=True, resolve_path=True),
|
|
171
173
|
default="changelog.md",
|
|
172
174
|
help="Changelog source file in Markdown format.",
|
|
173
175
|
)
|
|
@@ -268,3 +270,38 @@ def mailmap_sync(ctx, source, create_if_missing, destination_mailmap):
|
|
|
268
270
|
ctx.exit()
|
|
269
271
|
|
|
270
272
|
echo(generate_header(ctx) + new_content, file=prep_path(destination_mailmap))
|
|
273
|
+
|
|
274
|
+
|
|
275
|
+
@gha_utils.command(short_help="Run a test plan from a file against a binary")
|
|
276
|
+
@option(
|
|
277
|
+
"--binary",
|
|
278
|
+
# XXX Wait for https://github.com/janluke/cloup/issues/185 to use the
|
|
279
|
+
# `file_path` type.
|
|
280
|
+
type=click.Path(exists=True, executable=True, resolve_path=True),
|
|
281
|
+
required=True,
|
|
282
|
+
help="Path to the binary to test.",
|
|
283
|
+
)
|
|
284
|
+
@option(
|
|
285
|
+
"--plan",
|
|
286
|
+
type=file_path(exists=True, readable=True, resolve_path=True),
|
|
287
|
+
help="Test plan in YAML.",
|
|
288
|
+
)
|
|
289
|
+
@option(
|
|
290
|
+
"-t",
|
|
291
|
+
"--timeout",
|
|
292
|
+
type=IntRange(min=0),
|
|
293
|
+
default=60,
|
|
294
|
+
help="Set maximum duration in seconds for each CLI call.",
|
|
295
|
+
)
|
|
296
|
+
def test_plan(binary, plan, timeout):
|
|
297
|
+
# Load test plan from workflow input, or use a default one.
|
|
298
|
+
if plan:
|
|
299
|
+
logging.debug(f"Read test plan from {plan}")
|
|
300
|
+
test_plan = parse_test_plan(plan)
|
|
301
|
+
else:
|
|
302
|
+
logging.warning(f"No test plan provided. Default to: {DEFAULT_TEST_PLAN}")
|
|
303
|
+
test_plan = DEFAULT_TEST_PLAN
|
|
304
|
+
|
|
305
|
+
for index, test_case in enumerate(test_plan):
|
|
306
|
+
logging.info(f"Run test #{index}")
|
|
307
|
+
test_case.check_cli_test(binary, timeout=timeout)
|
gha_utils/mailmap.py
CHANGED
|
@@ -22,7 +22,7 @@ from dataclasses import dataclass, field
|
|
|
22
22
|
from functools import cached_property
|
|
23
23
|
from subprocess import run
|
|
24
24
|
|
|
25
|
-
from boltons.iterutils import unique
|
|
25
|
+
from boltons.iterutils import unique
|
|
26
26
|
|
|
27
27
|
|
|
28
28
|
@dataclass(order=True, frozen=True)
|
gha_utils/matrix.py
ADDED
|
@@ -0,0 +1,279 @@
|
|
|
1
|
+
# Copyright Kevin Deldycke <kevin@deldycke.com> and contributors.
|
|
2
|
+
#
|
|
3
|
+
# This program is Free Software; you can redistribute it and/or
|
|
4
|
+
# modify it under the terms of the GNU General Public License
|
|
5
|
+
# as published by the Free Software Foundation; either version 2
|
|
6
|
+
# of the License, or (at your option) any later version.
|
|
7
|
+
#
|
|
8
|
+
# This program is distributed in the hope that it will be useful,
|
|
9
|
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
10
|
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
11
|
+
# GNU General Public License for more details.
|
|
12
|
+
#
|
|
13
|
+
# You should have received a copy of the GNU General Public License
|
|
14
|
+
# along with this program; if not, write to the Free Software
|
|
15
|
+
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
|
16
|
+
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
import itertools
|
|
20
|
+
import json
|
|
21
|
+
import logging
|
|
22
|
+
from typing import Iterable, Iterator
|
|
23
|
+
|
|
24
|
+
from boltons.dictutils import FrozenDict
|
|
25
|
+
from boltons.iterutils import unique
|
|
26
|
+
|
|
27
|
+
RESERVED_MATRIX_KEYWORDS = ["include", "exclude"]
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class Matrix(FrozenDict):
|
|
31
|
+
"""A matrix as defined by GitHub's actions workflows.
|
|
32
|
+
|
|
33
|
+
See GitHub official documentation on `how-to implement variations of jobs in a
|
|
34
|
+
workflow
|
|
35
|
+
<https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/running-variations-of-jobs-in-a-workflow>`_.
|
|
36
|
+
|
|
37
|
+
This Matrix behave like a ``dict`` and works everywhere a ``dict`` would. Only that
|
|
38
|
+
it is immutable and based on :class:`FrozenDict`. If you want to populate the matrix
|
|
39
|
+
you have to use the following methods:
|
|
40
|
+
|
|
41
|
+
- :meth:`add_variation`
|
|
42
|
+
- :meth:`add_includes`
|
|
43
|
+
- :meth:`add_excludes`
|
|
44
|
+
|
|
45
|
+
The implementation respects the order in which items were inserted. This provides a
|
|
46
|
+
natural and visual sorting that should ease the inspection and debugging of large
|
|
47
|
+
matrix.
|
|
48
|
+
"""
|
|
49
|
+
|
|
50
|
+
# Tuples are used to keep track of the insertion order and force immutability.
|
|
51
|
+
include: tuple[dict[str, str], ...] = tuple()
|
|
52
|
+
exclude: tuple[dict[str, str], ...] = tuple()
|
|
53
|
+
|
|
54
|
+
def matrix(
|
|
55
|
+
self, ignore_includes: bool = False, ignore_excludes: bool = False
|
|
56
|
+
) -> dict[str, str]:
|
|
57
|
+
"""Returns a copy of the matrix.
|
|
58
|
+
|
|
59
|
+
The special ``include`` and ``excludes`` directives will be added by default.
|
|
60
|
+
You can selectively ignore them by passing the corresponding boolean parameters.
|
|
61
|
+
"""
|
|
62
|
+
dict_copy = dict(self)
|
|
63
|
+
if not ignore_includes and self.include:
|
|
64
|
+
dict_copy["include"] = self.include
|
|
65
|
+
if not ignore_excludes and self.exclude:
|
|
66
|
+
dict_copy["exclude"] = self.exclude
|
|
67
|
+
return dict_copy
|
|
68
|
+
|
|
69
|
+
def __repr__(self) -> str:
|
|
70
|
+
return (
|
|
71
|
+
f"<{self.__class__.__name__}: {super(FrozenDict, self).__repr__()}; "
|
|
72
|
+
f"include={self.include}; exclude={self.exclude}>"
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
def __str__(self) -> str:
|
|
76
|
+
"""Render matrix as a JSON string."""
|
|
77
|
+
return json.dumps(self.matrix())
|
|
78
|
+
|
|
79
|
+
@staticmethod
|
|
80
|
+
def _check_ids(*var_ids: str) -> None:
|
|
81
|
+
for var_id in var_ids:
|
|
82
|
+
if var_id in RESERVED_MATRIX_KEYWORDS:
|
|
83
|
+
raise ValueError(f"{var_id} cannot be used as a variation ID")
|
|
84
|
+
|
|
85
|
+
def add_variation(self, variation_id: str, values: Iterable[str]) -> None:
|
|
86
|
+
self._check_ids(variation_id)
|
|
87
|
+
if not values:
|
|
88
|
+
raise ValueError(f"No variation values provided: {values}")
|
|
89
|
+
if any(type(v) is not str for v in values):
|
|
90
|
+
raise ValueError(f"Only strings are accepted in {values}")
|
|
91
|
+
# Extend variation with values, and deduplicate them along the way.
|
|
92
|
+
var_values = list(self.get(variation_id, [])) + list(values)
|
|
93
|
+
super(FrozenDict, self).__setitem__(variation_id, tuple(unique(var_values)))
|
|
94
|
+
|
|
95
|
+
def _add_and_dedup_dicts(
|
|
96
|
+
self, *new_dicts: dict[str, str]
|
|
97
|
+
) -> tuple[dict[str, str], ...]:
|
|
98
|
+
self._check_ids(*(k for d in new_dicts for k in d))
|
|
99
|
+
return tuple(
|
|
100
|
+
dict(items) for items in unique((tuple(d.items()) for d in new_dicts))
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
def add_includes(self, *new_includes: dict[str, str]) -> None:
|
|
104
|
+
"""Add one or more ``include`` special directives to the matrix."""
|
|
105
|
+
self.include = self._add_and_dedup_dicts(*self.include, *new_includes)
|
|
106
|
+
|
|
107
|
+
def add_excludes(self, *new_excludes: dict[str, str]) -> None:
|
|
108
|
+
"""Add one or more ``exclude`` special directives to the matrix."""
|
|
109
|
+
self.exclude = self._add_and_dedup_dicts(*self.exclude, *new_excludes)
|
|
110
|
+
|
|
111
|
+
def all_variations(
|
|
112
|
+
self,
|
|
113
|
+
with_matrix: bool = True,
|
|
114
|
+
with_includes: bool = False,
|
|
115
|
+
with_excludes: bool = False,
|
|
116
|
+
) -> dict[str, tuple[str, ...]]:
|
|
117
|
+
"""Collect all variations encountered in the matrix.
|
|
118
|
+
|
|
119
|
+
Extra variations mentioned in the special ``include`` and ``exclude``
|
|
120
|
+
directives will be ignored by default.
|
|
121
|
+
|
|
122
|
+
You can selectively expand or restrict the resulting inventory of variations by
|
|
123
|
+
passing the corresponding ``with_matrix``, ``with_includes`` and
|
|
124
|
+
``with_excludes`` boolean filter parameters.
|
|
125
|
+
"""
|
|
126
|
+
variations = {}
|
|
127
|
+
if with_matrix:
|
|
128
|
+
variations = {k: list(v) for k, v in self.items()}
|
|
129
|
+
|
|
130
|
+
for expand, directives in (
|
|
131
|
+
(with_includes, self.include),
|
|
132
|
+
(with_excludes, self.exclude),
|
|
133
|
+
):
|
|
134
|
+
if expand:
|
|
135
|
+
for value in directives:
|
|
136
|
+
for k, v in value.items():
|
|
137
|
+
variations.setdefault(k, []).append(v)
|
|
138
|
+
|
|
139
|
+
return {k: tuple(unique(v)) for k, v in variations.items()}
|
|
140
|
+
|
|
141
|
+
def product(
|
|
142
|
+
self, with_includes: bool = False, with_excludes: bool = False
|
|
143
|
+
) -> Iterator[dict[str, str]]:
|
|
144
|
+
"""Only returns the combinations of the base matrix by default.
|
|
145
|
+
|
|
146
|
+
You can optionally add any variation referenced in the ``include`` and
|
|
147
|
+
``exclude`` special directives.
|
|
148
|
+
|
|
149
|
+
Respects the order of variations and their values.
|
|
150
|
+
"""
|
|
151
|
+
variations = self.all_variations(
|
|
152
|
+
with_includes=with_includes, with_excludes=with_excludes
|
|
153
|
+
)
|
|
154
|
+
if not variations:
|
|
155
|
+
return
|
|
156
|
+
yield from map(
|
|
157
|
+
dict,
|
|
158
|
+
itertools.product(
|
|
159
|
+
*(
|
|
160
|
+
tuple((variant_id, v) for v in variations)
|
|
161
|
+
for variant_id, variations in variations.items()
|
|
162
|
+
)
|
|
163
|
+
),
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
def _count_job(self) -> None:
|
|
167
|
+
self._job_counter += 1
|
|
168
|
+
if self._job_counter > 256:
|
|
169
|
+
logging.critical("GitHub job matrix limit of 256 jobs reached")
|
|
170
|
+
|
|
171
|
+
def solve(self, strict: bool = False) -> Iterator[dict[str, str]]:
|
|
172
|
+
"""Returns all combinations and apply ``include`` and ``exclude`` constraints.
|
|
173
|
+
|
|
174
|
+
.. caution::
|
|
175
|
+
As per GitHub specifications, all ``include`` combinations are processed
|
|
176
|
+
after ``exclude``. This allows you to use ``include`` to add back
|
|
177
|
+
combinations that were previously excluded.
|
|
178
|
+
"""
|
|
179
|
+
# GitHub jobs fails with the following message if the exclude directive is
|
|
180
|
+
# referencing keys that are not present in the original base matrix:
|
|
181
|
+
# Invalid workflow file: .github/workflows/tests.yaml#L48
|
|
182
|
+
# The workflow is not valid.
|
|
183
|
+
# .github/workflows/tests.yaml (Line: 48, Col: 13): Matrix exclude key 'state'
|
|
184
|
+
# does not match any key within the matrix
|
|
185
|
+
if strict:
|
|
186
|
+
unreferenced_keys = set(
|
|
187
|
+
self.all_variations(
|
|
188
|
+
with_matrix=False, with_includes=True, with_excludes=True
|
|
189
|
+
)
|
|
190
|
+
).difference(self)
|
|
191
|
+
if unreferenced_keys:
|
|
192
|
+
raise ValueError(
|
|
193
|
+
f"Matrix exclude keys {list(unreferenced_keys)} does not match any "
|
|
194
|
+
f"{list(self)} key within the matrix"
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
# Reset the number of combinations.
|
|
198
|
+
self._job_counter = 0
|
|
199
|
+
|
|
200
|
+
applicable_includes = []
|
|
201
|
+
leftover_includes: list[dict[str, str]] = []
|
|
202
|
+
|
|
203
|
+
# The matrix is empty, none of the include directive will match, so condider all
|
|
204
|
+
# directives as un-applicable.
|
|
205
|
+
if not self:
|
|
206
|
+
leftover_includes = list(self.include)
|
|
207
|
+
|
|
208
|
+
# Search for include directives that matches the original matrix variations
|
|
209
|
+
# without overwriting their values. Keep the left overs on the side.
|
|
210
|
+
else:
|
|
211
|
+
original_variations = self.all_variations()
|
|
212
|
+
for include in self.include:
|
|
213
|
+
# Keys shared between the include directive and the original matrix.
|
|
214
|
+
keys_overlap = set(include).intersection(original_variations)
|
|
215
|
+
# Collect include directives applicable to the original matrix.
|
|
216
|
+
if (
|
|
217
|
+
# If all overlapping keys in the directive exactly match any value
|
|
218
|
+
# of the original matrix, then we are certain the directive can be
|
|
219
|
+
# applied without overwriting the original variations.
|
|
220
|
+
all(include[k] in original_variations[k] for k in keys_overlap)
|
|
221
|
+
# Same if no keys are shared, in which case these extra variations
|
|
222
|
+
# will be added to all original ones.
|
|
223
|
+
or not keys_overlap
|
|
224
|
+
):
|
|
225
|
+
applicable_includes.append(include)
|
|
226
|
+
# Other directives are considered non-applicable and will be returned
|
|
227
|
+
# as-is at the end of the process.
|
|
228
|
+
else:
|
|
229
|
+
leftover_includes.append(include)
|
|
230
|
+
|
|
231
|
+
# Iterates through all the variations of the original matrix, and act on the
|
|
232
|
+
# matching exclude and include directives.
|
|
233
|
+
for base_variations in self.product():
|
|
234
|
+
# Skip the variation if it is fully matching at least one exclude directive.
|
|
235
|
+
exclusion_candidate = False
|
|
236
|
+
if any(
|
|
237
|
+
all(
|
|
238
|
+
exclude[k] == base_variations[k]
|
|
239
|
+
for k in set(exclude).intersection(base_variations)
|
|
240
|
+
)
|
|
241
|
+
for exclude in self.exclude
|
|
242
|
+
):
|
|
243
|
+
exclusion_candidate = True
|
|
244
|
+
|
|
245
|
+
# Expand and/or extend the original variation set with applicable include
|
|
246
|
+
# directives.
|
|
247
|
+
updated_variations = base_variations.copy()
|
|
248
|
+
for include in applicable_includes:
|
|
249
|
+
# Check if the include directive is completely disjoint to the
|
|
250
|
+
# variations of the original matrix. If that's the case, then we are
|
|
251
|
+
# supposed to augment the current variation with this include, at it has
|
|
252
|
+
# already been identified as applicable. But only do that if the updated
|
|
253
|
+
# variation has not been already updated with a previously evaluated,
|
|
254
|
+
# more targeted include directive.
|
|
255
|
+
if set(include).isdisjoint(base_variations):
|
|
256
|
+
if set(include).isdisjoint(updated_variations):
|
|
257
|
+
updated_variations.update(include)
|
|
258
|
+
continue
|
|
259
|
+
|
|
260
|
+
# Expand the base variation set with the fully matching include
|
|
261
|
+
# directive.
|
|
262
|
+
if all(
|
|
263
|
+
include[k] == base_variations[k]
|
|
264
|
+
for k in set(include).intersection(base_variations)
|
|
265
|
+
):
|
|
266
|
+
# Re-instate the variation set as a valid candidate since we found
|
|
267
|
+
# an include directive that is explicitly referring to it,
|
|
268
|
+
# resurrecting it from the dead.
|
|
269
|
+
exclusion_candidate = False
|
|
270
|
+
updated_variations.update(include)
|
|
271
|
+
|
|
272
|
+
if not exclusion_candidate:
|
|
273
|
+
self._count_job()
|
|
274
|
+
yield updated_variations
|
|
275
|
+
|
|
276
|
+
# Return as-is all the includes that were not applied to the original matrix.
|
|
277
|
+
for variation in leftover_includes:
|
|
278
|
+
self._count_job()
|
|
279
|
+
yield variation
|
gha_utils/metadata.py
CHANGED
|
@@ -28,7 +28,6 @@ doc_files="changelog.md" "readme.md" "docs/license.md"
|
|
|
28
28
|
is_python_project=true
|
|
29
29
|
package_name=click-extra
|
|
30
30
|
blacken_docs_params=--target-version py37 --target-version py38
|
|
31
|
-
ruff_py_version=py37
|
|
32
31
|
mypy_params=--python-version 3.7
|
|
33
32
|
current_version=2.0.1
|
|
34
33
|
released_version=2.0.0
|
|
@@ -151,7 +150,6 @@ import re
|
|
|
151
150
|
import sys
|
|
152
151
|
from collections.abc import Iterable
|
|
153
152
|
from functools import cached_property
|
|
154
|
-
from itertools import product
|
|
155
153
|
from pathlib import Path
|
|
156
154
|
from random import randint
|
|
157
155
|
from re import escape
|
|
@@ -183,6 +181,8 @@ from wcmatch.glob import (
|
|
|
183
181
|
iglob,
|
|
184
182
|
)
|
|
185
183
|
|
|
184
|
+
from .matrix import Matrix
|
|
185
|
+
|
|
186
186
|
SHORT_SHA_LENGTH = 7
|
|
187
187
|
"""Default SHA length hard-coded to ``7``.
|
|
188
188
|
|
|
@@ -192,8 +192,6 @@ SHORT_SHA_LENGTH = 7
|
|
|
192
192
|
depends on the size of the repository.
|
|
193
193
|
"""
|
|
194
194
|
|
|
195
|
-
RESERVED_MATRIX_KEYWORDS = ["include", "exclude"]
|
|
196
|
-
|
|
197
195
|
|
|
198
196
|
WorkflowEvent = StrEnum(
|
|
199
197
|
"WorkflowEvent",
|
|
@@ -275,14 +273,6 @@ MYPY_VERSION_MIN: Final = (3, 8)
|
|
|
275
273
|
"""
|
|
276
274
|
|
|
277
275
|
|
|
278
|
-
class Matrix(dict):
|
|
279
|
-
"""A matrix to used in a GitHub workflow."""
|
|
280
|
-
|
|
281
|
-
def __str__(self) -> str:
|
|
282
|
-
"""Render matrix as a JSON string."""
|
|
283
|
-
return json.dumps(self)
|
|
284
|
-
|
|
285
|
-
|
|
286
276
|
class Metadata:
|
|
287
277
|
"""Metadata class."""
|
|
288
278
|
|
|
@@ -437,22 +427,24 @@ class Metadata:
|
|
|
437
427
|
f"No need to look into the commit history: repository is already checked out at {current_commit}"
|
|
438
428
|
)
|
|
439
429
|
|
|
440
|
-
|
|
441
|
-
include_list = []
|
|
430
|
+
matrix = Matrix()
|
|
442
431
|
for commit in commits:
|
|
443
432
|
if past_commit_lookup:
|
|
444
433
|
logging.debug(f"Checkout to commit {commit.hash}")
|
|
445
434
|
git.checkout(commit.hash)
|
|
446
435
|
|
|
436
|
+
commit_metadata = {
|
|
437
|
+
"commit": commit.hash,
|
|
438
|
+
"short_sha": commit.hash[:SHORT_SHA_LENGTH],
|
|
439
|
+
}
|
|
440
|
+
|
|
447
441
|
logging.debug(f"Extract project version at commit {commit.hash}")
|
|
448
442
|
current_version = Metadata.get_current_version()
|
|
443
|
+
if current_version:
|
|
444
|
+
commit_metadata["current_version"] = current_version
|
|
449
445
|
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
"commit": commit.hash,
|
|
453
|
-
"short_sha": commit.hash[:SHORT_SHA_LENGTH],
|
|
454
|
-
"current_version": current_version,
|
|
455
|
-
})
|
|
446
|
+
matrix.add_variation("commit", [commit.hash])
|
|
447
|
+
matrix.add_includes(commit_metadata)
|
|
456
448
|
|
|
457
449
|
# Restore the repository to its initial state.
|
|
458
450
|
if past_commit_lookup:
|
|
@@ -462,10 +454,7 @@ class Metadata:
|
|
|
462
454
|
logging.debug("Unstash local changes that were previously saved.")
|
|
463
455
|
git.repo.git.stash("pop")
|
|
464
456
|
|
|
465
|
-
return
|
|
466
|
-
"commit": sha_list,
|
|
467
|
-
"include": include_list,
|
|
468
|
-
})
|
|
457
|
+
return matrix
|
|
469
458
|
|
|
470
459
|
@cached_property
|
|
471
460
|
def event_type(self) -> WorkflowEvent | None: # type: ignore[valid-type]
|
|
@@ -779,23 +768,6 @@ class Metadata:
|
|
|
779
768
|
)
|
|
780
769
|
return None
|
|
781
770
|
|
|
782
|
-
@cached_property
|
|
783
|
-
def ruff_py_version(self) -> str | None:
|
|
784
|
-
"""Returns the oldest Python version targeted.
|
|
785
|
-
|
|
786
|
-
.. caution::
|
|
787
|
-
|
|
788
|
-
Unlike ``blacken-docs``, `ruff doesn't support multiple
|
|
789
|
-
--target-version values
|
|
790
|
-
<https://github.com/astral-sh/ruff/issues/2857#issuecomment-1428100515>`_,
|
|
791
|
-
and `only supports the minimum Python version
|
|
792
|
-
<https://github.com/astral-sh/ruff/issues/2519>`_.
|
|
793
|
-
"""
|
|
794
|
-
if self.py_target_versions:
|
|
795
|
-
version = self.py_target_versions[0]
|
|
796
|
-
return f"py{version.major}{version.minor}"
|
|
797
|
-
return None
|
|
798
|
-
|
|
799
771
|
@cached_property
|
|
800
772
|
def mypy_params(self) -> str | None:
|
|
801
773
|
"""Generates `mypy` parameters.
|
|
@@ -1035,48 +1007,40 @@ class Metadata:
|
|
|
1035
1007
|
if not self.script_entries:
|
|
1036
1008
|
return None
|
|
1037
1009
|
|
|
1038
|
-
|
|
1039
|
-
|
|
1040
|
-
#
|
|
1041
|
-
#
|
|
1042
|
-
|
|
1043
|
-
|
|
1044
|
-
|
|
1045
|
-
|
|
1046
|
-
|
|
1047
|
-
|
|
1048
|
-
# https://github.com/actions/runner-images/issues/10820
|
|
1049
|
-
"os": [
|
|
1010
|
+
matrix = Matrix()
|
|
1011
|
+
|
|
1012
|
+
# Run the compilation only on the latest supported version of each OS. The
|
|
1013
|
+
# exception is macOS, as macos-15 is arm64-only and macos-13 is x64-only, so we
|
|
1014
|
+
# need both to target the two architectures.
|
|
1015
|
+
# XXX arm64 Windows is planned for the future:
|
|
1016
|
+
# https://github.com/actions/runner-images/issues/10820
|
|
1017
|
+
matrix.add_variation(
|
|
1018
|
+
"os",
|
|
1019
|
+
(
|
|
1050
1020
|
"ubuntu-24.04", # x64
|
|
1051
1021
|
"ubuntu-24.04-arm", # arm64
|
|
1052
1022
|
"macos-15", # arm64
|
|
1053
1023
|
"macos-13", # x64
|
|
1054
1024
|
"windows-2022", # x64
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
"include": [],
|
|
1058
|
-
}
|
|
1025
|
+
),
|
|
1026
|
+
)
|
|
1059
1027
|
|
|
1060
1028
|
# Augment each entry point with some metadata.
|
|
1061
|
-
extra_entry_point_params = []
|
|
1062
1029
|
for cli_id, module_id, callable_id in self.script_entries:
|
|
1063
1030
|
# CLI ID is supposed to be unique, we'll use that as a key.
|
|
1064
|
-
matrix
|
|
1031
|
+
matrix.add_variation("entry_point", [cli_id])
|
|
1065
1032
|
# Derive CLI module path from its ID.
|
|
1066
1033
|
# XXX We consider here the module is directly callable, because Nuitka
|
|
1067
1034
|
# doesn't seems to support the entry-point notation.
|
|
1068
1035
|
module_path = Path(f"{module_id.replace('.', '/')}.py")
|
|
1069
1036
|
assert module_path.exists()
|
|
1070
|
-
|
|
1071
|
-
|
|
1072
|
-
|
|
1073
|
-
|
|
1074
|
-
|
|
1075
|
-
|
|
1076
|
-
|
|
1077
|
-
},
|
|
1078
|
-
)
|
|
1079
|
-
matrix["include"].extend(extra_entry_point_params)
|
|
1037
|
+
matrix.add_includes({
|
|
1038
|
+
"entry_point": cli_id,
|
|
1039
|
+
"cli_id": cli_id,
|
|
1040
|
+
"module_id": module_id,
|
|
1041
|
+
"callable_id": callable_id,
|
|
1042
|
+
"module_path": str(module_path),
|
|
1043
|
+
})
|
|
1080
1044
|
|
|
1081
1045
|
# We'd like to run a build for each new commit bundled in the action trigger.
|
|
1082
1046
|
# If no new commits are detected, it's because we are not in a GitHub workflow
|
|
@@ -1088,15 +1052,15 @@ class Metadata:
|
|
|
1088
1052
|
)
|
|
1089
1053
|
assert build_commit_matrix
|
|
1090
1054
|
# Extend the matrix with a new dimension: a list of commits.
|
|
1091
|
-
matrix
|
|
1092
|
-
matrix
|
|
1055
|
+
matrix.add_variation("commit", build_commit_matrix["commit"])
|
|
1056
|
+
matrix.add_includes(*build_commit_matrix.include)
|
|
1093
1057
|
|
|
1094
1058
|
# Add platform-specific variables.
|
|
1095
1059
|
# Arch values are inspired from those specified for self-hosted runners:
|
|
1096
1060
|
# https://docs.github.com/en/actions/hosting-your-own-runners/about-self-hosted-runners#architectures
|
|
1097
1061
|
# Arch is not a matrix variant because support is not widely distributed
|
|
1098
1062
|
# between different OS.
|
|
1099
|
-
|
|
1063
|
+
matrix.add_includes(
|
|
1100
1064
|
{
|
|
1101
1065
|
"os": "ubuntu-24.04",
|
|
1102
1066
|
"platform_id": "linux",
|
|
@@ -1127,54 +1091,19 @@ class Metadata:
|
|
|
1127
1091
|
"arch": "x64",
|
|
1128
1092
|
"extension": "exe",
|
|
1129
1093
|
},
|
|
1130
|
-
]
|
|
1131
|
-
matrix["include"].extend(extra_os_params)
|
|
1132
|
-
|
|
1133
|
-
# Check no extra parameter in reserved directive do not override themselves.
|
|
1134
|
-
all_extra_keys = set().union(
|
|
1135
|
-
*(
|
|
1136
|
-
extras.keys()
|
|
1137
|
-
for reserved_key in RESERVED_MATRIX_KEYWORDS
|
|
1138
|
-
if reserved_key in matrix
|
|
1139
|
-
for extras in matrix[reserved_key]
|
|
1140
|
-
),
|
|
1141
|
-
)
|
|
1142
|
-
assert all_extra_keys.isdisjoint(RESERVED_MATRIX_KEYWORDS)
|
|
1143
|
-
|
|
1144
|
-
# Produce all variations encoded by the matrix, by skipping the special
|
|
1145
|
-
# directives.
|
|
1146
|
-
all_variations = tuple(
|
|
1147
|
-
tuple((variant_id, value) for value in variant_values)
|
|
1148
|
-
for variant_id, variant_values in matrix.items()
|
|
1149
|
-
if variant_id not in RESERVED_MATRIX_KEYWORDS
|
|
1150
1094
|
)
|
|
1151
1095
|
|
|
1152
|
-
#
|
|
1153
|
-
#
|
|
1154
|
-
|
|
1155
|
-
|
|
1156
|
-
|
|
1157
|
-
#
|
|
1158
|
-
|
|
1159
|
-
|
|
1160
|
-
for extra_params in matrix["include"]:
|
|
1161
|
-
# Check if the variant match the extra parameters.
|
|
1162
|
-
dimensions_to_match = set(variant_dict).intersection(extra_params)
|
|
1163
|
-
d0 = {key: variant_dict[key] for key in dimensions_to_match}
|
|
1164
|
-
d1 = {key: extra_params[key] for key in dimensions_to_match}
|
|
1165
|
-
# Extra parameters are matching the current variant, merge their values.
|
|
1166
|
-
if d0 == d1:
|
|
1167
|
-
full_variant.update(extra_params)
|
|
1168
|
-
|
|
1169
|
-
# Add to the 'include' directive a new extra parameter that match the
|
|
1170
|
-
# current variant.
|
|
1171
|
-
extra_name_param = variant_dict.copy()
|
|
1172
|
-
# Generate for Nuitka the binary file name to be used that is unique to
|
|
1173
|
-
# this variant.
|
|
1174
|
-
extra_name_param["bin_name"] = (
|
|
1096
|
+
# Augment each variation set of the matrix with a the binary name to be produced
|
|
1097
|
+
# by Nuitka. Itererate over all matrix variation sets so we have all metadata
|
|
1098
|
+
# necessary to generate a unique name specific to these variations.
|
|
1099
|
+
for variations in matrix.solve():
|
|
1100
|
+
# We will re-attach back this binary name to the with an include directive,
|
|
1101
|
+
# so we need a copy the main variants it corresponds to.
|
|
1102
|
+
bin_name_include = {k: variations[k] for k in matrix}
|
|
1103
|
+
bin_name_include["bin_name"] = (
|
|
1175
1104
|
"{cli_id}-{platform_id}-{arch}-build-{short_sha}.{extension}"
|
|
1176
|
-
).format(**
|
|
1177
|
-
matrix
|
|
1105
|
+
).format(**variations)
|
|
1106
|
+
matrix.add_includes(bin_name_include)
|
|
1178
1107
|
|
|
1179
1108
|
return Matrix(matrix)
|
|
1180
1109
|
|
|
@@ -1268,7 +1197,6 @@ class Metadata:
|
|
|
1268
1197
|
"is_python_project": self.is_python_project,
|
|
1269
1198
|
"package_name": self.package_name,
|
|
1270
1199
|
"blacken_docs_params": self.blacken_docs_params,
|
|
1271
|
-
"ruff_py_version": self.ruff_py_version,
|
|
1272
1200
|
"mypy_params": self.mypy_params,
|
|
1273
1201
|
"current_version": self.current_version,
|
|
1274
1202
|
"released_version": self.released_version,
|
|
@@ -1296,7 +1224,7 @@ class Metadata:
|
|
|
1296
1224
|
delimiter = f"ghadelimiter_{randint(10**8, (10**9) - 1)}"
|
|
1297
1225
|
content += f"{env_name}<<{delimiter}\n{env_value}\n{delimiter}\n"
|
|
1298
1226
|
else:
|
|
1299
|
-
assert dialect == Dialects.
|
|
1227
|
+
assert dialect == Dialects.plain
|
|
1300
1228
|
content = repr(metadata)
|
|
1301
1229
|
|
|
1302
1230
|
logging.debug(f"Formatted metadata:\n{content}")
|
gha_utils/test_plan.py
ADDED
|
@@ -0,0 +1,226 @@
|
|
|
1
|
+
# Copyright Kevin Deldycke <kevin@deldycke.com> and contributors.
|
|
2
|
+
#
|
|
3
|
+
# This program is Free Software; you can redistribute it and/or
|
|
4
|
+
# modify it under the terms of the GNU General Public License
|
|
5
|
+
# as published by the Free Software Foundation; either version 2
|
|
6
|
+
# of the License, or (at your option) any later version.
|
|
7
|
+
#
|
|
8
|
+
# This program is distributed in the hope that it will be useful,
|
|
9
|
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
10
|
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
11
|
+
# GNU General Public License for more details.
|
|
12
|
+
#
|
|
13
|
+
# You should have received a copy of the GNU General Public License
|
|
14
|
+
# along with this program; if not, write to the Free Software
|
|
15
|
+
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
|
16
|
+
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
import re
|
|
20
|
+
from dataclasses import asdict, dataclass, field
|
|
21
|
+
from pathlib import Path
|
|
22
|
+
from subprocess import run
|
|
23
|
+
from typing import Generator, Sequence
|
|
24
|
+
|
|
25
|
+
import yaml
|
|
26
|
+
from boltons.iterutils import flatten
|
|
27
|
+
from boltons.strutils import strip_ansi
|
|
28
|
+
from click_extra.testing import args_cleanup, print_cli_run
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@dataclass(order=True)
|
|
32
|
+
class TestCase:
|
|
33
|
+
cli_parameters: tuple[str, ...] | str = field(default_factory=tuple)
|
|
34
|
+
"""Parameters, arguments and options to pass to the CLI."""
|
|
35
|
+
|
|
36
|
+
exit_code: int | str | None = None
|
|
37
|
+
strip_ansi: bool = False
|
|
38
|
+
output_contains: tuple[str, ...] | str = field(default_factory=tuple)
|
|
39
|
+
stdout_contains: tuple[str, ...] | str = field(default_factory=tuple)
|
|
40
|
+
stderr_contains: tuple[str, ...] | str = field(default_factory=tuple)
|
|
41
|
+
output_regex_matches: tuple[str, ...] | str = field(default_factory=tuple)
|
|
42
|
+
stdout_regex_matches: tuple[str, ...] | str = field(default_factory=tuple)
|
|
43
|
+
stderr_regex_matches: tuple[str, ...] | str = field(default_factory=tuple)
|
|
44
|
+
output_regex_fullmatch: str | None = None
|
|
45
|
+
stdout_regex_fullmatch: str | None = None
|
|
46
|
+
stderr_regex_fullmatch: str | None = None
|
|
47
|
+
|
|
48
|
+
def __post_init__(self) -> None:
|
|
49
|
+
"""Normalize all fields."""
|
|
50
|
+
for field_id, field_data in asdict(self).items():
|
|
51
|
+
# Validates and normalize exit code.
|
|
52
|
+
if field_id == "exit_code":
|
|
53
|
+
if isinstance(field_data, str):
|
|
54
|
+
field_data = int(field_data)
|
|
55
|
+
elif field_data is not None and not isinstance(field_data, int):
|
|
56
|
+
raise ValueError(f"exit_code is not an integer: {field_data}")
|
|
57
|
+
|
|
58
|
+
elif field_id == "strip_ansi":
|
|
59
|
+
if not isinstance(field_data, bool):
|
|
60
|
+
raise ValueError(f"strip_ansi is not a boolean: {field_data}")
|
|
61
|
+
|
|
62
|
+
# Validates and normalize regex fullmatch fields.
|
|
63
|
+
elif field_id.endswith("_fullmatch"):
|
|
64
|
+
if field_data:
|
|
65
|
+
if not isinstance(field_data, str):
|
|
66
|
+
raise ValueError(f"{field_id} is not a string: {field_data}")
|
|
67
|
+
# Normalize empty strings to None.
|
|
68
|
+
else:
|
|
69
|
+
field_data = None
|
|
70
|
+
|
|
71
|
+
# Validates and normalize tuple of strings.
|
|
72
|
+
else:
|
|
73
|
+
# Wraps single string into a tuple.
|
|
74
|
+
if isinstance(field_data, str):
|
|
75
|
+
field_data = (field_data,)
|
|
76
|
+
if not isinstance(field_data, Sequence):
|
|
77
|
+
raise ValueError(
|
|
78
|
+
f"{field_id} is not a tuple or a list: {field_data}"
|
|
79
|
+
)
|
|
80
|
+
if not all(isinstance(i, str) for i in field_data):
|
|
81
|
+
raise ValueError(
|
|
82
|
+
f"{field_id} contains non-string elements: {field_data}"
|
|
83
|
+
)
|
|
84
|
+
# Ignore blank value.
|
|
85
|
+
field_data = tuple(i.strip() for i in field_data if i.strip())
|
|
86
|
+
|
|
87
|
+
# Validates regexps.
|
|
88
|
+
if field_data and "_regex_" in field_id:
|
|
89
|
+
for regex in flatten((field_data,)):
|
|
90
|
+
try:
|
|
91
|
+
re.compile(regex)
|
|
92
|
+
except re.error as ex:
|
|
93
|
+
raise ValueError(
|
|
94
|
+
f"Invalid regex in {field_id}: {regex}"
|
|
95
|
+
) from ex
|
|
96
|
+
|
|
97
|
+
setattr(self, field_id, field_data)
|
|
98
|
+
|
|
99
|
+
def check_cli_test(self, binary: str | Path, timeout: int | None = None):
|
|
100
|
+
"""Run a CLI command and check its output against the test case.
|
|
101
|
+
|
|
102
|
+
..todo::
|
|
103
|
+
Add support for environment variables.
|
|
104
|
+
|
|
105
|
+
..todo::
|
|
106
|
+
Add support for ANSI code stripping.
|
|
107
|
+
|
|
108
|
+
..todo::
|
|
109
|
+
Add support for proper mixed stdout/stderr stream as a single,
|
|
110
|
+
intertwined output.
|
|
111
|
+
"""
|
|
112
|
+
clean_args = args_cleanup(binary, self.cli_parameters)
|
|
113
|
+
result = run(
|
|
114
|
+
clean_args,
|
|
115
|
+
capture_output=True,
|
|
116
|
+
timeout=timeout,
|
|
117
|
+
# XXX Do not force encoding to let CLIs figure out by themselves the
|
|
118
|
+
# contextual encoding to use. This avoid UnicodeDecodeError on output in
|
|
119
|
+
# Window's console which still defaults to legacy encoding (e.g. cp1252,
|
|
120
|
+
# cp932, etc...):
|
|
121
|
+
#
|
|
122
|
+
# Traceback (most recent call last):
|
|
123
|
+
# File "…\__main__.py", line 49, in <module>
|
|
124
|
+
# File "…\__main__.py", line 45, in main
|
|
125
|
+
# File "…\click\core.py", line 1157, in __call__
|
|
126
|
+
# File "…\click_extra\commands.py", line 347, in main
|
|
127
|
+
# File "…\click\core.py", line 1078, in main
|
|
128
|
+
# File "…\click_extra\commands.py", line 377, in invoke
|
|
129
|
+
# File "…\click\core.py", line 1688, in invoke
|
|
130
|
+
# File "…\click_extra\commands.py", line 377, in invoke
|
|
131
|
+
# File "…\click\core.py", line 1434, in invoke
|
|
132
|
+
# File "…\click\core.py", line 783, in invoke
|
|
133
|
+
# File "…\cloup\_context.py", line 47, in new_func
|
|
134
|
+
# File "…\mpm\cli.py", line 570, in managers
|
|
135
|
+
# File "…\mpm\output.py", line 187, in print_table
|
|
136
|
+
# File "…\click_extra\tabulate.py", line 97, in render_csv
|
|
137
|
+
# File "encodings\cp1252.py", line 19, in encode
|
|
138
|
+
# UnicodeEncodeError: 'charmap' codec can't encode character
|
|
139
|
+
# '\u2713' in position 128: character maps to <undefined>
|
|
140
|
+
#
|
|
141
|
+
# encoding="utf-8",
|
|
142
|
+
text=True,
|
|
143
|
+
)
|
|
144
|
+
print_cli_run(clean_args, result)
|
|
145
|
+
|
|
146
|
+
for field_id, field_data in asdict(self).items():
|
|
147
|
+
if field_id == "cli_parameters" or (not field_data and field_data != 0):
|
|
148
|
+
continue
|
|
149
|
+
|
|
150
|
+
if field_id == "exit_code":
|
|
151
|
+
if result.returncode != field_data:
|
|
152
|
+
raise AssertionError(
|
|
153
|
+
f"CLI exited with code {result.returncode}, "
|
|
154
|
+
f"expected {field_data}"
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
output = ""
|
|
158
|
+
name = ""
|
|
159
|
+
if field_id.startswith("output_"):
|
|
160
|
+
raise NotImplementedError("Output mixing <stdout>/<stderr>")
|
|
161
|
+
# output = result.output
|
|
162
|
+
# name = "output"
|
|
163
|
+
elif field_id.startswith("stdout_"):
|
|
164
|
+
output = result.stdout
|
|
165
|
+
name = "<stdout>"
|
|
166
|
+
elif field_id.startswith("stderr_"):
|
|
167
|
+
output = result.stderr
|
|
168
|
+
name = "<stderr>"
|
|
169
|
+
|
|
170
|
+
if self.strip_ansi:
|
|
171
|
+
output = strip_ansi(output)
|
|
172
|
+
|
|
173
|
+
if field_id.endswith("_contains"):
|
|
174
|
+
for sub_string in field_data:
|
|
175
|
+
if sub_string not in output:
|
|
176
|
+
raise AssertionError(
|
|
177
|
+
f"CLI's {name} does not contain {sub_string!r}"
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
elif field_id.endswith("_regex_matches"):
|
|
181
|
+
for regex in field_data:
|
|
182
|
+
if not re.search(regex, output):
|
|
183
|
+
raise AssertionError(
|
|
184
|
+
f"CLI's {name} does not match regex {regex!r}"
|
|
185
|
+
)
|
|
186
|
+
|
|
187
|
+
elif field_id.endswith("_regex_fullmatch"):
|
|
188
|
+
regex = field_data
|
|
189
|
+
if not re.fullmatch(regex, output):
|
|
190
|
+
raise AssertionError(
|
|
191
|
+
f"CLI's {name} does not fully match regex {regex!r}"
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
DEFAULT_TEST_PLAN = (
|
|
196
|
+
# Output the version of the CLI.
|
|
197
|
+
TestCase(cli_parameters="--version"),
|
|
198
|
+
# Test combination of version and verbosity.
|
|
199
|
+
TestCase(cli_parameters=("--verbosity", "DEBUG", "--version")),
|
|
200
|
+
# Test help output.
|
|
201
|
+
TestCase(cli_parameters="--help"),
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def parse_test_plan(plan_path: Path) -> Generator[TestCase, None, None]:
|
|
206
|
+
plan = yaml.full_load(plan_path.read_text(encoding="UTF-8"))
|
|
207
|
+
|
|
208
|
+
# Validates test plan structure.
|
|
209
|
+
if not plan:
|
|
210
|
+
raise ValueError(f"Empty test plan file {plan_path}")
|
|
211
|
+
if not isinstance(plan, list):
|
|
212
|
+
raise ValueError(f"Test plan is not a list: {plan}")
|
|
213
|
+
|
|
214
|
+
directives = frozenset(TestCase.__dataclass_fields__.keys())
|
|
215
|
+
|
|
216
|
+
for index, test_case in enumerate(plan):
|
|
217
|
+
# Validates test case structure.
|
|
218
|
+
if not isinstance(test_case, dict):
|
|
219
|
+
raise ValueError(f"Test case #{index + 1} is not a dict: {test_case}")
|
|
220
|
+
if not directives.issuperset(test_case):
|
|
221
|
+
raise ValueError(
|
|
222
|
+
f"Test case #{index + 1} contains invalid directives:"
|
|
223
|
+
f"{set(test_case) - directives}"
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
yield TestCase(**test_case)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.2
|
|
2
2
|
Name: gha-utils
|
|
3
|
-
Version: 4.
|
|
3
|
+
Version: 4.14.0
|
|
4
4
|
Summary: ⚙️ CLI helpers for GitHub Actions + reuseable workflows
|
|
5
5
|
Author-email: Kevin Deldycke <kevin@deldycke.com>
|
|
6
6
|
Project-URL: Homepage, https://github.com/kdeldycke/workflows
|
|
@@ -48,10 +48,11 @@ Description-Content-Type: text/markdown
|
|
|
48
48
|
Requires-Dist: backports.strenum~=1.3.1; python_version < "3.11"
|
|
49
49
|
Requires-Dist: boltons>=24.0.0
|
|
50
50
|
Requires-Dist: bump-my-version>=0.21.0
|
|
51
|
-
Requires-Dist: click-extra~=4.
|
|
51
|
+
Requires-Dist: click-extra~=4.14.1
|
|
52
52
|
Requires-Dist: packaging~=24.1
|
|
53
53
|
Requires-Dist: PyDriller~=2.6
|
|
54
54
|
Requires-Dist: pyproject-metadata~=0.9.0
|
|
55
|
+
Requires-Dist: pyyaml~=6.0.0
|
|
55
56
|
Requires-Dist: tomli~=2.0.1; python_version < "3.11"
|
|
56
57
|
Requires-Dist: wcmatch>=8.5
|
|
57
58
|
Provides-Extra: test
|
|
@@ -61,6 +62,8 @@ Requires-Dist: pytest-cases~=3.8.3; extra == "test"
|
|
|
61
62
|
Requires-Dist: pytest-cov~=6.0.0; extra == "test"
|
|
62
63
|
Requires-Dist: pytest-github-actions-annotate-failures~=0.3.0; extra == "test"
|
|
63
64
|
Requires-Dist: pytest-randomly~=3.16.0; extra == "test"
|
|
65
|
+
Provides-Extra: typing
|
|
66
|
+
Requires-Dist: types-PyYAML~=6.0.12.9; extra == "typing"
|
|
64
67
|
|
|
65
68
|
# `gha-utils` CLI + reusable workflows
|
|
66
69
|
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
gha_utils/__init__.py,sha256=XhzcFkFs63M5SvQuWYvvNyqSQ8Z5nylmJq0ga2xTJGo,866
|
|
2
|
+
gha_utils/__main__.py,sha256=Dck9BjpLXmIRS83k0mghAMcYVYiMiFLltQdfRuMSP_Q,1703
|
|
3
|
+
gha_utils/changelog.py,sha256=oahY88A9FRV14f1JSFKIiYrN_TS7Jo3QlljXqJbeuaE,5892
|
|
4
|
+
gha_utils/cli.py,sha256=J6cqO-LlVXmLq0Z5Mmpv34ySbvVzVPqU1-c7iHqqITA,10348
|
|
5
|
+
gha_utils/mailmap.py,sha256=naUqJYJnE3fLTjju1nd6WMm7ODiSaI2SHuJxRtmaFWs,6269
|
|
6
|
+
gha_utils/matrix.py,sha256=_afJD0K-xZLNxwykVnUhD0Gj9cdO0Z43g3VHa-q_tkI,11941
|
|
7
|
+
gha_utils/metadata.py,sha256=YbWPxNwWxcTMj67q6I4adFXgLF11YBv6urAzNopWYHE,48657
|
|
8
|
+
gha_utils/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
|
+
gha_utils/test_plan.py,sha256=6Anw8Aa7rlIdJH4XdfZddWjh_Q2VK7Ehq4UJ0cHAt2c,9467
|
|
10
|
+
gha_utils-4.14.0.dist-info/METADATA,sha256=aLM8hgjGOfr2tLuparflwcPmYp21mKW8bKSp3BNMMc8,20288
|
|
11
|
+
gha_utils-4.14.0.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
|
|
12
|
+
gha_utils-4.14.0.dist-info/entry_points.txt,sha256=8bJOwQYf9ZqsLhBR6gUCzvwLNI9f8tiiBrJ3AR0EK4o,54
|
|
13
|
+
gha_utils-4.14.0.dist-info/top_level.txt,sha256=C94Blb61YkkyPBwCdM3J_JPDjWH0lnKa5nGZeZ5M6yE,10
|
|
14
|
+
gha_utils-4.14.0.dist-info/RECORD,,
|
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
gha_utils/__init__.py,sha256=G5wydTPH8gma2O_DIBo9f3tS3KUnJNeWKrLBxfSqltw,866
|
|
2
|
-
gha_utils/__main__.py,sha256=Dck9BjpLXmIRS83k0mghAMcYVYiMiFLltQdfRuMSP_Q,1703
|
|
3
|
-
gha_utils/changelog.py,sha256=oahY88A9FRV14f1JSFKIiYrN_TS7Jo3QlljXqJbeuaE,5892
|
|
4
|
-
gha_utils/cli.py,sha256=1sgNwDQS9vL5eTUjFXWQVYxtV6LTLRyo4kMnl4Joqg4,9175
|
|
5
|
-
gha_utils/mailmap.py,sha256=snSQBn1BDZ21783l4yCkQc3RLIxh5X6QCunFRkDj-24,6301
|
|
6
|
-
gha_utils/metadata.py,sha256=jREHEbkwDm4OU4Pw_Q0KMRMbOy93_8Gjnccq3DWHq3s,51540
|
|
7
|
-
gha_utils/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
|
-
gha_utils-4.13.3.dist-info/METADATA,sha256=A75KxK1e2Y-5S2vJGtJXWFrAwOSbeEaNGDBRtVi22QY,20179
|
|
9
|
-
gha_utils-4.13.3.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
|
|
10
|
-
gha_utils-4.13.3.dist-info/entry_points.txt,sha256=8bJOwQYf9ZqsLhBR6gUCzvwLNI9f8tiiBrJ3AR0EK4o,54
|
|
11
|
-
gha_utils-4.13.3.dist-info/top_level.txt,sha256=C94Blb61YkkyPBwCdM3J_JPDjWH0lnKa5nGZeZ5M6yE,10
|
|
12
|
-
gha_utils-4.13.3.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|