gha-utils 4.24.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gha_utils/__init__.py +20 -0
- gha_utils/__main__.py +49 -0
- gha_utils/changelog.py +146 -0
- gha_utils/cli.py +452 -0
- gha_utils/mailmap.py +184 -0
- gha_utils/matrix.py +291 -0
- gha_utils/metadata.py +1693 -0
- gha_utils/py.typed +0 -0
- gha_utils/test_plan.py +352 -0
- gha_utils-4.24.0.dist-info/METADATA +375 -0
- gha_utils-4.24.0.dist-info/RECORD +14 -0
- gha_utils-4.24.0.dist-info/WHEEL +5 -0
- gha_utils-4.24.0.dist-info/entry_points.txt +2 -0
- gha_utils-4.24.0.dist-info/top_level.txt +1 -0
gha_utils/mailmap.py
ADDED
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
# Copyright Kevin Deldycke <kevin@deldycke.com> and contributors.
|
|
2
|
+
#
|
|
3
|
+
# This program is Free Software; you can redistribute it and/or
|
|
4
|
+
# modify it under the terms of the GNU General Public License
|
|
5
|
+
# as published by the Free Software Foundation; either version 2
|
|
6
|
+
# of the License, or (at your option) any later version.
|
|
7
|
+
#
|
|
8
|
+
# This program is distributed in the hope that it will be useful,
|
|
9
|
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
10
|
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
11
|
+
# GNU General Public License for more details.
|
|
12
|
+
#
|
|
13
|
+
# You should have received a copy of the GNU General Public License
|
|
14
|
+
# along with this program; if not, write to the Free Software
|
|
15
|
+
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
|
16
|
+
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
import logging
|
|
20
|
+
import sys
|
|
21
|
+
from dataclasses import dataclass, field
|
|
22
|
+
from functools import cached_property
|
|
23
|
+
from subprocess import run
|
|
24
|
+
|
|
25
|
+
from boltons.iterutils import unique
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dataclass(order=True)
|
|
29
|
+
class Record:
|
|
30
|
+
"""A mailmap identity mapping entry."""
|
|
31
|
+
|
|
32
|
+
# Mapping is define as the first field so we have natural sorting,
|
|
33
|
+
# whatever the value of the pre_comment is.
|
|
34
|
+
canonical: str = ""
|
|
35
|
+
aliases: set[str] = field(default_factory=set)
|
|
36
|
+
pre_comment: str = ""
|
|
37
|
+
|
|
38
|
+
def __post_init__(self) -> None:
|
|
39
|
+
"""Empty pre-comment are normalized to empty string, even if they are multi-lines."""
|
|
40
|
+
if self.pre_comment.strip() == "":
|
|
41
|
+
self.pre_comment = ""
|
|
42
|
+
|
|
43
|
+
def __str__(self) -> str:
|
|
44
|
+
"""Render the record with pre-comments first, followed by the identity mapping.
|
|
45
|
+
|
|
46
|
+
Sort all entries in the mapping without case-sensitivity, but keep the first in
|
|
47
|
+
its place as the canonical identity.
|
|
48
|
+
"""
|
|
49
|
+
lines = []
|
|
50
|
+
if self.pre_comment:
|
|
51
|
+
lines.append(self.pre_comment)
|
|
52
|
+
if self.canonical:
|
|
53
|
+
lines.append(
|
|
54
|
+
" ".join((self.canonical, *sorted(self.aliases, key=str.casefold)))
|
|
55
|
+
)
|
|
56
|
+
return "\n".join(lines)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class Mailmap:
|
|
60
|
+
"""Helpers to manipulate ``.mailmap`` files.
|
|
61
|
+
|
|
62
|
+
``.mailmap`` `file format is documented on Git website
|
|
63
|
+
<https://git-scm.com/docs/gitmailmap>`_.
|
|
64
|
+
"""
|
|
65
|
+
|
|
66
|
+
records: list[Record]
|
|
67
|
+
|
|
68
|
+
def __init__(self) -> None:
|
|
69
|
+
"""Initialize the mailmap with an empty list of records."""
|
|
70
|
+
self.records = []
|
|
71
|
+
|
|
72
|
+
@staticmethod
|
|
73
|
+
def split_identities(mapping: str) -> tuple[str, set[str]]:
|
|
74
|
+
"""Split a mapping of identities and normalize them."""
|
|
75
|
+
identities = []
|
|
76
|
+
for identity in map(str.strip, mapping.split(">")):
|
|
77
|
+
# Skip blank strings produced by uneven spaces.
|
|
78
|
+
if not identity:
|
|
79
|
+
continue
|
|
80
|
+
assert identity.count("<") == 1, f"Unexpected email format in {identity!r}"
|
|
81
|
+
name, email = identity.split("<", maxsplit=1)
|
|
82
|
+
identities.append(f"{name.strip()} <{email}>")
|
|
83
|
+
|
|
84
|
+
assert len(identities), f"No identities found in {mapping!r}"
|
|
85
|
+
|
|
86
|
+
identities = list(unique(identities))
|
|
87
|
+
return identities[0], set(identities[1:])
|
|
88
|
+
|
|
89
|
+
def parse(self, content: str) -> None:
|
|
90
|
+
"""Parse mailmap content and add it to the current list of records.
|
|
91
|
+
|
|
92
|
+
Each non-empty, non-comment line is considered a mapping entry.
|
|
93
|
+
|
|
94
|
+
The preceding lines of a mapping entry are kept attached to it as pre-comments,
|
|
95
|
+
so the layout will be preserved on rendering, during which records are sorted.
|
|
96
|
+
"""
|
|
97
|
+
logging.debug(f"Parsing:\n{content}")
|
|
98
|
+
pre_lines = []
|
|
99
|
+
for line in map(str.strip, content.splitlines()):
|
|
100
|
+
# Comment lines are added as-is.
|
|
101
|
+
if line.startswith("#"):
|
|
102
|
+
pre_lines.append(line)
|
|
103
|
+
# Blank lines are added as-is.
|
|
104
|
+
elif not line:
|
|
105
|
+
pre_lines.append(line)
|
|
106
|
+
# Mapping entry, which mark the end of a block, so add it to the list
|
|
107
|
+
# mailmap records.
|
|
108
|
+
else:
|
|
109
|
+
canonical, aliases = self.split_identities(line)
|
|
110
|
+
record = Record(
|
|
111
|
+
pre_comment="\n".join(pre_lines),
|
|
112
|
+
canonical=canonical,
|
|
113
|
+
aliases=aliases,
|
|
114
|
+
)
|
|
115
|
+
logging.debug(record)
|
|
116
|
+
pre_lines = []
|
|
117
|
+
self.records.append(record)
|
|
118
|
+
|
|
119
|
+
def find(self, identity: str) -> bool:
|
|
120
|
+
"""Returns ``True`` if the provided identity matched any record."""
|
|
121
|
+
identity_token = identity.lower()
|
|
122
|
+
for record in self.records:
|
|
123
|
+
# Identity matching is case insensitive:
|
|
124
|
+
# https://git-scm.com/docs/gitmailmap#_syntax
|
|
125
|
+
if identity_token in map(str.lower, (record.canonical, *record.aliases)):
|
|
126
|
+
return True
|
|
127
|
+
return False
|
|
128
|
+
|
|
129
|
+
@cached_property
|
|
130
|
+
def git_contributors(self) -> set[str]:
|
|
131
|
+
"""Returns the set of all contributors found in the Git commit history.
|
|
132
|
+
|
|
133
|
+
No normalization happens: all variations of authors and committers strings
|
|
134
|
+
attached to all commits are considered.
|
|
135
|
+
|
|
136
|
+
For format output syntax, see:
|
|
137
|
+
https://git-scm.com/docs/pretty-formats#Documentation/pretty-formats.txt-emaNem
|
|
138
|
+
"""
|
|
139
|
+
contributors = set()
|
|
140
|
+
|
|
141
|
+
git_cli = ("git", "log", "--pretty=format:%aN <%aE>%n%cN <%cE>")
|
|
142
|
+
logging.debug(f"Run: {' '.join(git_cli)}")
|
|
143
|
+
process = run(git_cli, capture_output=True, encoding="UTF-8")
|
|
144
|
+
|
|
145
|
+
# Parse git CLI output.
|
|
146
|
+
if process.returncode:
|
|
147
|
+
sys.exit(process.stderr)
|
|
148
|
+
for line in map(str.strip, process.stdout.splitlines()):
|
|
149
|
+
if line:
|
|
150
|
+
contributors.add(line)
|
|
151
|
+
|
|
152
|
+
logging.debug(
|
|
153
|
+
"Authors and committers found in Git history:\n"
|
|
154
|
+
+ "\n".join(sorted(contributors, key=str.casefold))
|
|
155
|
+
)
|
|
156
|
+
return contributors
|
|
157
|
+
|
|
158
|
+
def update_from_git(self) -> None:
|
|
159
|
+
"""Add to internal records all missing contributors found in commit history.
|
|
160
|
+
|
|
161
|
+
This method will refrain from adding contributors already registered as aliases.
|
|
162
|
+
"""
|
|
163
|
+
for contributor in self.git_contributors:
|
|
164
|
+
if not self.find(contributor):
|
|
165
|
+
record = Record(canonical=contributor)
|
|
166
|
+
logging.info(f"Add new identity {record}")
|
|
167
|
+
self.records.append(record)
|
|
168
|
+
else:
|
|
169
|
+
logging.debug(f"Ignore existing identity {contributor}")
|
|
170
|
+
|
|
171
|
+
def render(self) -> str:
|
|
172
|
+
"""Render internal records in Mailmap format."""
|
|
173
|
+
# Extract the pre-comment from the first record, if any, so we can keep it
|
|
174
|
+
# attached to the top of the file.
|
|
175
|
+
top_comment = self.records[0].pre_comment if self.records else ""
|
|
176
|
+
if top_comment:
|
|
177
|
+
top_comment += "\n"
|
|
178
|
+
# Reset the pre-comment of the first record, so it doesn't get duplicated
|
|
179
|
+
# in the output.
|
|
180
|
+
self.records[0].pre_comment = ""
|
|
181
|
+
|
|
182
|
+
return top_comment + "\n".join(
|
|
183
|
+
map(str, sorted(self.records, key=lambda r: r.canonical.casefold()))
|
|
184
|
+
)
|
gha_utils/matrix.py
ADDED
|
@@ -0,0 +1,291 @@
|
|
|
1
|
+
# Copyright Kevin Deldycke <kevin@deldycke.com> and contributors.
|
|
2
|
+
#
|
|
3
|
+
# This program is Free Software; you can redistribute it and/or
|
|
4
|
+
# modify it under the terms of the GNU General Public License
|
|
5
|
+
# as published by the Free Software Foundation; either version 2
|
|
6
|
+
# of the License, or (at your option) any later version.
|
|
7
|
+
#
|
|
8
|
+
# This program is distributed in the hope that it will be useful,
|
|
9
|
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
10
|
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
11
|
+
# GNU General Public License for more details.
|
|
12
|
+
#
|
|
13
|
+
# You should have received a copy of the GNU General Public License
|
|
14
|
+
# along with this program; if not, write to the Free Software
|
|
15
|
+
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
|
16
|
+
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
import itertools
|
|
20
|
+
import json
|
|
21
|
+
import logging
|
|
22
|
+
|
|
23
|
+
from boltons.dictutils import FrozenDict
|
|
24
|
+
from boltons.iterutils import unique
|
|
25
|
+
|
|
26
|
+
TYPE_CHECKING = False
|
|
27
|
+
if TYPE_CHECKING:
|
|
28
|
+
from collections.abc import Iterable, Iterator
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
RESERVED_MATRIX_KEYWORDS = ["include", "exclude"]
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class Matrix:
|
|
35
|
+
"""A matrix as defined by GitHub's actions workflows.
|
|
36
|
+
|
|
37
|
+
See GitHub official documentation on `how-to implement variations of jobs in a
|
|
38
|
+
workflow
|
|
39
|
+
<https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/running-variations-of-jobs-in-a-workflow>`_.
|
|
40
|
+
|
|
41
|
+
This Matrix behave like a ``dict`` and works everywhere a ``dict`` would. Only that
|
|
42
|
+
it is immutable and based on :class:`FrozenDict`. If you want to populate the matrix
|
|
43
|
+
you have to use the following methods:
|
|
44
|
+
|
|
45
|
+
- :meth:`add_variation`
|
|
46
|
+
- :meth:`add_includes`
|
|
47
|
+
- :meth:`add_excludes`
|
|
48
|
+
|
|
49
|
+
The implementation respects the order in which items were inserted. This provides a
|
|
50
|
+
natural and visual sorting that should ease the inspection and debugging of large
|
|
51
|
+
matrix.
|
|
52
|
+
"""
|
|
53
|
+
|
|
54
|
+
def __init__(self, *args, **kwargs) -> None:
|
|
55
|
+
self.variations: dict[str, tuple[str, ...]] = {}
|
|
56
|
+
|
|
57
|
+
# Tuples are used to keep track of the insertion order and force immutability.
|
|
58
|
+
self.include: tuple[dict[str, str], ...] = tuple()
|
|
59
|
+
self.exclude: tuple[dict[str, str], ...] = tuple()
|
|
60
|
+
|
|
61
|
+
self._job_counter: int = 0
|
|
62
|
+
|
|
63
|
+
def matrix(
|
|
64
|
+
self, ignore_includes: bool = False, ignore_excludes: bool = False
|
|
65
|
+
) -> FrozenDict[str, tuple[str, ...] | tuple[dict[str, str], ...]]:
|
|
66
|
+
"""Returns a copy of the matrix.
|
|
67
|
+
|
|
68
|
+
The special ``include`` and ``excludes`` directives will be added by default.
|
|
69
|
+
You can selectively ignore them by passing the corresponding boolean parameters.
|
|
70
|
+
"""
|
|
71
|
+
dict_copy = self.variations.copy()
|
|
72
|
+
if not ignore_includes and self.include:
|
|
73
|
+
dict_copy["include"] = self.include # type: ignore[assignment]
|
|
74
|
+
if not ignore_excludes and self.exclude:
|
|
75
|
+
dict_copy["exclude"] = self.exclude # type: ignore[assignment]
|
|
76
|
+
return FrozenDict(dict_copy)
|
|
77
|
+
|
|
78
|
+
def __repr__(self) -> str:
|
|
79
|
+
return f"<{self.__class__.__name__}: {self.matrix()}>"
|
|
80
|
+
|
|
81
|
+
def __str__(self) -> str:
|
|
82
|
+
"""Render matrix as a JSON string."""
|
|
83
|
+
return json.dumps(self.matrix())
|
|
84
|
+
|
|
85
|
+
def __getitem__(self, key: str) -> tuple[str, ...]:
|
|
86
|
+
"""Returns the values of a variation by its ID."""
|
|
87
|
+
if key in self.variations:
|
|
88
|
+
return self.variations[key]
|
|
89
|
+
raise KeyError(f"Variation {key} not found in matrix")
|
|
90
|
+
|
|
91
|
+
@staticmethod
|
|
92
|
+
def _check_ids(*var_ids: str) -> None:
|
|
93
|
+
for var_id in var_ids:
|
|
94
|
+
if var_id in RESERVED_MATRIX_KEYWORDS:
|
|
95
|
+
raise ValueError(f"{var_id} cannot be used as a variation ID")
|
|
96
|
+
|
|
97
|
+
def add_variation(self, variation_id: str, values: Iterable[str]) -> None:
|
|
98
|
+
self._check_ids(variation_id)
|
|
99
|
+
if not values:
|
|
100
|
+
raise ValueError(f"No variation values provided: {values}")
|
|
101
|
+
if any(type(v) is not str for v in values):
|
|
102
|
+
raise ValueError(f"Only strings are accepted in {values}")
|
|
103
|
+
# Extend variation with values, and deduplicate them along the way.
|
|
104
|
+
var_values = list(self.variations.get(variation_id, [])) + list(values)
|
|
105
|
+
self.variations[variation_id] = tuple(unique(var_values))
|
|
106
|
+
|
|
107
|
+
def _add_and_dedup_dicts(
|
|
108
|
+
self, *new_dicts: dict[str, str]
|
|
109
|
+
) -> tuple[dict[str, str], ...]:
|
|
110
|
+
self._check_ids(*(k for d in new_dicts for k in d))
|
|
111
|
+
return tuple(
|
|
112
|
+
dict(items) for items in unique((tuple(d.items()) for d in new_dicts))
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
def add_includes(self, *new_includes: dict[str, str]) -> None:
|
|
116
|
+
"""Add one or more ``include`` special directives to the matrix."""
|
|
117
|
+
self.include = self._add_and_dedup_dicts(*self.include, *new_includes)
|
|
118
|
+
|
|
119
|
+
def add_excludes(self, *new_excludes: dict[str, str]) -> None:
|
|
120
|
+
"""Add one or more ``exclude`` special directives to the matrix."""
|
|
121
|
+
self.exclude = self._add_and_dedup_dicts(*self.exclude, *new_excludes)
|
|
122
|
+
|
|
123
|
+
def all_variations(
|
|
124
|
+
self,
|
|
125
|
+
with_matrix: bool = True,
|
|
126
|
+
with_includes: bool = False,
|
|
127
|
+
with_excludes: bool = False,
|
|
128
|
+
) -> dict[str, tuple[str, ...]]:
|
|
129
|
+
"""Collect all variations encountered in the matrix.
|
|
130
|
+
|
|
131
|
+
Extra variations mentioned in the special ``include`` and ``exclude``
|
|
132
|
+
directives will be ignored by default.
|
|
133
|
+
|
|
134
|
+
You can selectively expand or restrict the resulting inventory of variations by
|
|
135
|
+
passing the corresponding ``with_matrix``, ``with_includes`` and
|
|
136
|
+
``with_excludes`` boolean filter parameters.
|
|
137
|
+
"""
|
|
138
|
+
all_variations = {}
|
|
139
|
+
if with_matrix:
|
|
140
|
+
all_variations = {k: list(v) for k, v in self.variations.items()}
|
|
141
|
+
|
|
142
|
+
for expand, directives in (
|
|
143
|
+
(with_includes, self.include),
|
|
144
|
+
(with_excludes, self.exclude),
|
|
145
|
+
):
|
|
146
|
+
if expand:
|
|
147
|
+
for value in directives:
|
|
148
|
+
for k, v in value.items():
|
|
149
|
+
all_variations.setdefault(k, []).append(v)
|
|
150
|
+
|
|
151
|
+
return {k: tuple(unique(v)) for k, v in all_variations.items()}
|
|
152
|
+
|
|
153
|
+
def product(
|
|
154
|
+
self, with_includes: bool = False, with_excludes: bool = False
|
|
155
|
+
) -> Iterator[dict[str, str]]:
|
|
156
|
+
"""Only returns the combinations of the base matrix by default.
|
|
157
|
+
|
|
158
|
+
You can optionally add any variation referenced in the ``include`` and
|
|
159
|
+
``exclude`` special directives.
|
|
160
|
+
|
|
161
|
+
Respects the order of variations and their values.
|
|
162
|
+
"""
|
|
163
|
+
all_variations = self.all_variations(
|
|
164
|
+
with_includes=with_includes, with_excludes=with_excludes
|
|
165
|
+
)
|
|
166
|
+
if not all_variations:
|
|
167
|
+
return
|
|
168
|
+
yield from map(
|
|
169
|
+
dict,
|
|
170
|
+
itertools.product(
|
|
171
|
+
*(
|
|
172
|
+
tuple((variant_id, v) for v in variations)
|
|
173
|
+
for variant_id, variations in all_variations.items()
|
|
174
|
+
)
|
|
175
|
+
),
|
|
176
|
+
)
|
|
177
|
+
|
|
178
|
+
def _count_job(self) -> None:
|
|
179
|
+
self._job_counter += 1
|
|
180
|
+
if self._job_counter > 256:
|
|
181
|
+
logging.critical("GitHub job matrix limit of 256 jobs reached")
|
|
182
|
+
|
|
183
|
+
def solve(self, strict: bool = False) -> Iterator[dict[str, str]]:
|
|
184
|
+
"""Returns all combinations and apply ``include`` and ``exclude`` constraints.
|
|
185
|
+
|
|
186
|
+
.. caution::
|
|
187
|
+
As per GitHub specifications, all ``include`` combinations are processed
|
|
188
|
+
after ``exclude``. This allows you to use ``include`` to add back
|
|
189
|
+
combinations that were previously excluded.
|
|
190
|
+
"""
|
|
191
|
+
# GitHub jobs fails with the following message if the exclude directive is
|
|
192
|
+
# referencing keys that are not present in the original base matrix:
|
|
193
|
+
# Invalid workflow file: .github/workflows/tests.yaml#L48
|
|
194
|
+
# The workflow is not valid.
|
|
195
|
+
# .github/workflows/tests.yaml (Line: 48, Col: 13): Matrix exclude key 'state'
|
|
196
|
+
# does not match any key within the matrix
|
|
197
|
+
if strict:
|
|
198
|
+
unreferenced_keys = set(
|
|
199
|
+
self.all_variations(
|
|
200
|
+
with_matrix=False, with_includes=True, with_excludes=True
|
|
201
|
+
)
|
|
202
|
+
).difference(self.variations)
|
|
203
|
+
if unreferenced_keys:
|
|
204
|
+
raise ValueError(
|
|
205
|
+
f"Matrix exclude keys {list(unreferenced_keys)} does not match any "
|
|
206
|
+
f"{self.variations.keys()} key within the matrix"
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
# Reset the number of combinations.
|
|
210
|
+
self._job_counter = 0
|
|
211
|
+
|
|
212
|
+
applicable_includes = []
|
|
213
|
+
leftover_includes: list[dict[str, str]] = []
|
|
214
|
+
|
|
215
|
+
# The matrix is empty, none of the include directive will match, so condider all
|
|
216
|
+
# directives as un-applicable.
|
|
217
|
+
if not self.variations:
|
|
218
|
+
leftover_includes = list(self.include)
|
|
219
|
+
|
|
220
|
+
# Search for include directives that matches the original matrix variations
|
|
221
|
+
# without overwriting their values. Keep the left overs on the side.
|
|
222
|
+
else:
|
|
223
|
+
original_variations = self.all_variations()
|
|
224
|
+
for include in self.include:
|
|
225
|
+
# Keys shared between the include directive and the original matrix.
|
|
226
|
+
keys_overlap = set(include).intersection(original_variations)
|
|
227
|
+
# Collect include directives applicable to the original matrix.
|
|
228
|
+
if (
|
|
229
|
+
# If all overlapping keys in the directive exactly match any value
|
|
230
|
+
# of the original matrix, then we are certain the directive can be
|
|
231
|
+
# applied without overwriting the original variations.
|
|
232
|
+
all(include[k] in original_variations[k] for k in keys_overlap)
|
|
233
|
+
# Same if no keys are shared, in which case these extra variations
|
|
234
|
+
# will be added to all original ones.
|
|
235
|
+
or not keys_overlap
|
|
236
|
+
):
|
|
237
|
+
applicable_includes.append(include)
|
|
238
|
+
# Other directives are considered non-applicable and will be returned
|
|
239
|
+
# as-is at the end of the process.
|
|
240
|
+
else:
|
|
241
|
+
leftover_includes.append(include)
|
|
242
|
+
|
|
243
|
+
# Iterates through all the variations of the original matrix, and act on the
|
|
244
|
+
# matching exclude and include directives.
|
|
245
|
+
for base_variations in self.product():
|
|
246
|
+
# Skip the variation if it is fully matching at least one exclude directive.
|
|
247
|
+
exclusion_candidate = False
|
|
248
|
+
if any(
|
|
249
|
+
all(
|
|
250
|
+
exclude[k] == base_variations[k]
|
|
251
|
+
for k in set(exclude).intersection(base_variations)
|
|
252
|
+
)
|
|
253
|
+
for exclude in self.exclude
|
|
254
|
+
):
|
|
255
|
+
exclusion_candidate = True
|
|
256
|
+
|
|
257
|
+
# Expand and/or extend the original variation set with applicable include
|
|
258
|
+
# directives.
|
|
259
|
+
updated_variations = base_variations.copy()
|
|
260
|
+
for include in applicable_includes:
|
|
261
|
+
# Check if the include directive is completely disjoint to the
|
|
262
|
+
# variations of the original matrix. If that's the case, then we are
|
|
263
|
+
# supposed to augment the current variation with this include, at it has
|
|
264
|
+
# already been identified as applicable. But only do that if the updated
|
|
265
|
+
# variation has not been already updated with a previously evaluated,
|
|
266
|
+
# more targeted include directive.
|
|
267
|
+
if set(include).isdisjoint(base_variations):
|
|
268
|
+
if set(include).isdisjoint(updated_variations):
|
|
269
|
+
updated_variations.update(include)
|
|
270
|
+
continue
|
|
271
|
+
|
|
272
|
+
# Expand the base variation set with the fully matching include
|
|
273
|
+
# directive.
|
|
274
|
+
if all(
|
|
275
|
+
include[k] == base_variations[k]
|
|
276
|
+
for k in set(include).intersection(base_variations)
|
|
277
|
+
):
|
|
278
|
+
# Re-instate the variation set as a valid candidate since we found
|
|
279
|
+
# an include directive that is explicitly referring to it,
|
|
280
|
+
# resurrecting it from the dead.
|
|
281
|
+
exclusion_candidate = False
|
|
282
|
+
updated_variations.update(include)
|
|
283
|
+
|
|
284
|
+
if not exclusion_candidate:
|
|
285
|
+
self._count_job()
|
|
286
|
+
yield updated_variations
|
|
287
|
+
|
|
288
|
+
# Return as-is all the includes that were not applied to the original matrix.
|
|
289
|
+
for variation in leftover_includes:
|
|
290
|
+
self._count_job()
|
|
291
|
+
yield variation
|