gha-utils 4.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of gha-utils might be problematic. Click here for more details.

gha_utils/metadata.py ADDED
@@ -0,0 +1,1032 @@
1
+ # Copyright Kevin Deldycke <kevin@deldycke.com> and contributors.
2
+ #
3
+ # This program is Free Software; you can redistribute it and/or
4
+ # modify it under the terms of the GNU General Public License
5
+ # as published by the Free Software Foundation; either version 2
6
+ # of the License, or (at your option) any later version.
7
+ #
8
+ # This program is distributed in the hope that it will be useful,
9
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
10
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
+ # GNU General Public License for more details.
12
+ #
13
+ # You should have received a copy of the GNU General Public License
14
+ # along with this program; if not, write to the Free Software
15
+ # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
16
+
17
+ """Extract metadata from repository and Python projects to be used by GitHub workflows.
18
+
19
+ The following variables are `printed to the environment file
20
+ <https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-commands-for-github-actions#environment-files>`_:
21
+
22
+ ```text
23
+ new_commits=346ce664f055fbd042a25ee0b7e96702e95 6f27db47612aaee06fdf08744b09a9f5f6c2
24
+ release_commits=6f27db47612aaee06fdf08744b09a9f5f6c2
25
+ python_files=".github/update_mailmap.py" ".github/metadata.py" "setup.py"
26
+ doc_files="changelog.md" "readme.md" "docs/license.md"
27
+ is_python_project=true
28
+ uv_requirement_params=--requirement pyproject.toml
29
+ package_name=click-extra
30
+ blacken_docs_params=--target-version py37 --target-version py38
31
+ ruff_py_version=py37
32
+ mypy_params=--python-version 3.7
33
+ current_version=2.0.1
34
+ released_version=2.0.0
35
+ is_sphinx=true
36
+ active_autodoc=true
37
+ release_notes=[🐍 Available on PyPi](https://pypi.org/project/click-extra/2.21.3).
38
+ new_commits_matrix={'commit': ['346ce664f055fbd042a25ee0b7e96702e95',
39
+ '6f27db47612aaee06fdf08744b09a9f5f6c2'],
40
+ 'include': [{'commit': '346ce664f055fbd042a25ee0b7e96702e95',
41
+ 'short_sha': '346ce66',
42
+ 'current_version': '2.0.1'},
43
+ {'commit': '6f27db47612aaee06fdf08744b09a9f5f6c2',
44
+ 'short_sha': '6f27db4',
45
+ 'current_version': '2.0.0'}]}
46
+ release_commits_matrix={'commit': ['6f27db47612aaee06fdf08744b09a9f5f6c2'],
47
+ 'include': [{'commit': '6f27db47612aaee06fdf08744b09a9f5f6c2',
48
+ 'short_sha': '6f27db4',
49
+ 'current_version': '2.0.0'}]}
50
+ nuitka_matrix={'entry_point': ['mpm'],
51
+ 'os': ['ubuntu-22.04', 'macos-13', 'windows-2022'],
52
+ 'include': [{'entry_point': 'mpm',
53
+ 'cli_id': 'mpm',
54
+ 'module_id': 'meta_package_manager.__main__',
55
+ 'callable_id': 'main',
56
+ 'module_path': 'meta_package_manager/__main__.py'},
57
+ {'os': 'ubuntu-22.04',
58
+ 'platform_id': 'linux',
59
+ 'extension': 'bin',
60
+ 'extra_python_params': ''},
61
+ {'os': 'macos-13',
62
+ 'platform_id': 'macos',
63
+ 'extension': 'bin',
64
+ 'extra_python_params': ''},
65
+ {'os': 'windows-2022',
66
+ 'platform_id': 'windows',
67
+ 'extension': 'exe',
68
+ 'extra_python_params': '-X utf8'},
69
+ {'entry_point': 'mpm',
70
+ 'os': 'ubuntu-22.04',
71
+ 'arch': 'x64',
72
+ 'bin_name': 'mpm-linux-x64-build-6f27db4.bin'},
73
+ {'entry_point': 'mpm',
74
+ 'os': 'macos-14',
75
+ 'arch': 'arm64',
76
+ 'bin_name': 'mpm-macos-arm64-build-6f27db4.bin'},
77
+ {'entry_point': 'mpm',
78
+ 'os': 'macos-13',
79
+ 'arch': 'x64',
80
+ 'bin_name': 'mpm-macos-x64-build-6f27db4.bin'},
81
+ {'entry_point': 'mpm',
82
+ 'os': 'windows-2022',
83
+ 'arch': 'x64',
84
+ 'bin_name': 'mpm-windows-x64-build-6f27db4.exe'}]}
85
+ ```
86
+
87
+ .. warning::
88
+
89
+ The ``new_commits_matrix``, ``release_commits_matrix`` and ``nuitka_matrix``
90
+ variables in the block above are pretty-printed for readability. They are not
91
+ actually formatted this way in the environment file, but inlined.
92
+ """
93
+
94
+ from __future__ import annotations
95
+
96
+ import ast
97
+ import json
98
+ import logging
99
+ import os
100
+ import re
101
+ import sys
102
+ from collections.abc import Iterable
103
+ from functools import cached_property
104
+ from itertools import product
105
+ from pathlib import Path
106
+ from random import randint
107
+ from re import escape
108
+ from typing import Any, Iterator, cast
109
+
110
+ if sys.version_info >= (3, 11):
111
+ import tomllib
112
+ else:
113
+ import tomli as tomllib # type: ignore[import-not-found]
114
+ from enum import Enum
115
+
116
+ from black.mode import TargetVersion
117
+ from bumpversion.config import get_configuration # type: ignore[import-untyped]
118
+ from bumpversion.config.files import find_config_file # type: ignore[import-untyped]
119
+ from bumpversion.show import resolve_name # type: ignore[import-untyped]
120
+ from mypy.defaults import PYTHON3_VERSION_MIN
121
+ from packaging.version import Version
122
+ from pydriller import Commit, Git, Repository # type: ignore[import]
123
+ from pyproject_metadata import ConfigurationError, StandardMetadata
124
+ from wcmatch.glob import (
125
+ BRACE,
126
+ DOTGLOB,
127
+ FOLLOW,
128
+ GLOBSTAR,
129
+ GLOBTILDE,
130
+ NEGATE,
131
+ NODIR,
132
+ iglob,
133
+ )
134
+
135
+ SHORT_SHA_LENGTH = 7
136
+ """Default SHA length hard-coded to ``7``.
137
+
138
+ .. caution::
139
+
140
+ The `default is subject to change <https://stackoverflow.com/a/21015031>`_ and
141
+ depends on the size of the repository.
142
+ """
143
+
144
+ RESERVED_MATRIX_KEYWORDS = ["include", "exclude"]
145
+
146
+
147
+ class Dialects(Enum):
148
+ """Dialects in which metadata can be formatted to."""
149
+
150
+ GITHUB = "github"
151
+ PLAIN = "plain"
152
+
153
+
154
+ class Matrix(dict):
155
+ """A matrix to used in a GitHub workflow."""
156
+
157
+ def __str__(self) -> str:
158
+ """Render matrix as a JSON string."""
159
+ return json.dumps(self)
160
+
161
+
162
+ class Metadata:
163
+ """Metadata class."""
164
+
165
+ def __init__(self) -> None:
166
+ """Initialize internal variables."""
167
+ # None indicates the is_python_project variable has not been evaluated yet.
168
+ self._is_python_project: bool | None = None
169
+
170
+ pyproject_path = Path() / "pyproject.toml"
171
+ sphinx_conf_path = Path() / "docs" / "conf.py"
172
+
173
+ @cached_property
174
+ def in_ci_env(self) -> bool:
175
+ """Returns ``True`` if the code is executed in a GitHub Actions runner.
176
+
177
+ Other CI are available at:
178
+ https://github.com/cucumber/ci-environment/blob/main/python/src/ci_environment/CiEnvironments.json
179
+ """
180
+ return bool("GITHUB_RUN_ID" in os.environ)
181
+
182
+ @cached_property
183
+ def github_context(self) -> dict[str, Any]:
184
+ """Load GitHub context from the environment.
185
+
186
+ Expect ``GITHUB_CONTEXT`` to be set as part of the environment. I.e., adds the
187
+ following as part of your job step calling this script:
188
+
189
+ .. code-block:: yaml
190
+
191
+ - name: Project metadata
192
+ id: project-metadata
193
+ env:
194
+ GITHUB_CONTEXT: ${{ toJSON(github) }}
195
+ run: >
196
+ python -c "$(curl -fsSL
197
+ https://raw.githubusercontent.com/kdeldycke/workflows/main/.github/metadata.py)"
198
+ """
199
+ if "GITHUB_CONTEXT" not in os.environ:
200
+ if self.in_ci_env:
201
+ message = (
202
+ "Missing GitHub context in environment. "
203
+ "Did you forget to set GITHUB_CONTEXT?"
204
+ )
205
+ logging.warning(message)
206
+ return {}
207
+ context = json.loads(os.environ["GITHUB_CONTEXT"])
208
+ logging.debug("--- GitHub context ---")
209
+ logging.debug(json.dumps(context, indent=4))
210
+ return context
211
+
212
+ def commit_matrix(self, commits: Iterable[Commit] | None) -> Matrix | None:
213
+ """Pre-compute a matrix of commits.
214
+
215
+ .. danger::
216
+ This method temporarily modify the state of the repository to compute
217
+ version metadata from the past.
218
+
219
+ To prevent any loss of uncommitted data, it stashes and unstash the
220
+ local changes between checkouts.
221
+
222
+ The list of commits is augmented with long and short SHA values, as well as
223
+ current version. Most recent commit is first, oldest is last.
224
+
225
+ Returns a ready-to-use matrix structure:
226
+
227
+ .. code-block:: python
228
+ {
229
+ "commit": [
230
+ "346ce664f055fbd042a25ee0b7e96702e95",
231
+ "6f27db47612aaee06fdf08744b09a9f5f6c2",
232
+ ],
233
+ "include": [
234
+ {
235
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
236
+ "short_sha": "346ce66",
237
+ "current_version": "2.0.1",
238
+ },
239
+ {
240
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
241
+ "short_sha": "6f27db4",
242
+ "current_version": "2.0.0",
243
+ },
244
+ ],
245
+ }
246
+ """
247
+ if not commits:
248
+ return None
249
+
250
+ # Save the initial commit reference and SHA of the repository. The reference is
251
+ # either the canonical active branch name (i.e. ``main``), or the commit SHA if
252
+ # the current HEAD commit is detached from a branch.
253
+ git = Git(".")
254
+ init_sha = git.repo.head.commit.hexsha
255
+ if git.repo.head.is_detached:
256
+ init_ref = init_sha
257
+ else:
258
+ init_ref = git.repo.active_branch.name
259
+
260
+ sha_list = []
261
+ include_list = []
262
+ for commit in commits:
263
+ sha = commit.hash
264
+
265
+ # Checkout the target commit so we can read the version associated with it,
266
+ # but stash local changes first. Do not perform the stash/checkout dance if
267
+ # the repository is already at the target commit.
268
+ need_checkout = bool(git.repo.head.commit.hexsha != sha)
269
+ if need_checkout and not self.in_ci_env:
270
+ raise RuntimeError(
271
+ "Local repository manipulations only allowed in CI environment"
272
+ )
273
+ if need_checkout:
274
+ git.repo.git.stash()
275
+ git.checkout(sha)
276
+ current_version = Metadata.get_current_version()
277
+ if need_checkout:
278
+ git.repo.git.stash("pop")
279
+
280
+ sha_list.append(sha)
281
+ include_list.append({
282
+ "commit": sha,
283
+ "short_sha": sha[:SHORT_SHA_LENGTH],
284
+ "current_version": current_version,
285
+ })
286
+
287
+ # Restore the repository to its initial commit if its not in the initial state.
288
+ if git.repo.head.commit.hexsha != init_sha:
289
+ git.checkout(init_ref)
290
+
291
+ return Matrix({
292
+ "commit": sha_list,
293
+ "include": include_list,
294
+ })
295
+
296
+ @cached_property
297
+ def commit_range(self) -> tuple[str, str] | None:
298
+ """Range of commits bundled within the triggering event.
299
+
300
+ A workflow run is triggered by a singular event, which might encapsulate one or
301
+ more commits. This means the workflow will only run once on the last commit,
302
+ even if multiple new commits where pushed.
303
+
304
+ This is annoying when we want to keep a carefully constructed commit history,
305
+ and want to run the workflow on each commit. The typical example is a pull
306
+ request that is merged upstream but we'd like to produce artifacts (builds,
307
+ packages, etc.) for each individual commit.
308
+
309
+ The default ``GITHUB_SHA`` environment variable is useless as it only points to
310
+ the last commit. We need to inspect the commit history to find all new one. New
311
+ commits needs to be fetched differently in ``push`` and ``pull_requests``
312
+ events.
313
+
314
+ .. seealso::
315
+
316
+ - https://stackoverflow.com/a/67204539
317
+ - https://stackoverflow.com/a/62953566
318
+ - https://stackoverflow.com/a/61861763
319
+ """
320
+ if not self.github_context:
321
+ return None
322
+ # Pull request event.
323
+ if self.github_context["base_ref"]:
324
+ start = f"origin/{self.github_context['base_ref']}"
325
+ # We need to checkout the HEAD commit instead of the artificial merge
326
+ # commit introduced by the pull request.
327
+ end = self.github_context["event"]["pull_request"]["head"]["sha"]
328
+ # Push event.
329
+ else:
330
+ start = self.github_context["event"]["before"]
331
+ end = self.github_context["sha"]
332
+ logging.debug("--- Commit range ---")
333
+ logging.debug(f"Range start: {start}")
334
+ logging.debug(f"Range end: {end}")
335
+ return start, end
336
+
337
+ @cached_property
338
+ def current_commit(self) -> Commit | None:
339
+ """Returns the current ``Commit`` object."""
340
+ return next(Repository(".", single="HEAD").traverse_commits())
341
+
342
+ @cached_property
343
+ def current_commit_matrix(self) -> Matrix | None:
344
+ """Pre-computed matrix with long and short SHA values of the current commit."""
345
+ return self.commit_matrix((self.current_commit,))
346
+
347
+ @cached_property
348
+ def new_commits(self) -> tuple[Commit, ...] | None:
349
+ """Returns list of ``Commit`` objects bundled within the triggering event."""
350
+ if not self.commit_range:
351
+ return None
352
+ start, end = self.commit_range
353
+ # Remove the last commit, as the commit range is inclusive.
354
+ return tuple(
355
+ Repository(
356
+ ".",
357
+ from_commit=start,
358
+ to_commit=end,
359
+ order="reverse",
360
+ ).traverse_commits(),
361
+ )[:-1]
362
+
363
+ @cached_property
364
+ def new_commits_matrix(self) -> Matrix | None:
365
+ """Pre-computed matrix with long and short SHA values of new commits."""
366
+ return self.commit_matrix(self.new_commits)
367
+
368
+ @cached_property
369
+ def new_commits_hash(self) -> tuple[str, ...] | None:
370
+ """List all hashes of new commits."""
371
+ return (
372
+ cast(tuple[str, ...], self.new_commits_matrix["commit"])
373
+ if self.new_commits_matrix
374
+ else None
375
+ )
376
+
377
+ @cached_property
378
+ def release_commits(self) -> tuple[Commit, ...] | None:
379
+ """Returns list of ``Commit`` objects to be tagged within the triggering event.
380
+
381
+ We cannot identify a release commit based the presence of a ``vX.Y.Z`` tag
382
+ alone. That's because it is not present in the ``prepare-release`` pull request
383
+ produced by the ``changelog.yaml`` workflow. The tag is produced later on by
384
+ the ``release.yaml`` workflow, when the pull request is merged to ``main``.
385
+
386
+ Our best second option is to identify a release based on the full commit
387
+ message, based on the template used in the ``changelog.yaml`` workflow.
388
+ """
389
+ if not self.new_commits:
390
+ return None
391
+ return tuple(
392
+ commit
393
+ for commit in self.new_commits
394
+ if re.fullmatch(
395
+ r"^\[changelog\] Release v[0-9]+\.[0-9]+\.[0-9]+$",
396
+ commit.msg,
397
+ )
398
+ )
399
+
400
+ @cached_property
401
+ def release_commits_matrix(self) -> Matrix | None:
402
+ """Pre-computed matrix with long and short SHA values of release commits."""
403
+ return self.commit_matrix(self.release_commits)
404
+
405
+ @cached_property
406
+ def release_commits_hash(self) -> tuple[str, ...] | None:
407
+ """List all hashes of release commits."""
408
+ return (
409
+ cast(tuple[str, ...], self.release_commits_matrix["commit"])
410
+ if self.release_commits_matrix
411
+ else None
412
+ )
413
+
414
+ @staticmethod
415
+ def glob_files(*patterns: str) -> Iterator[str]:
416
+ """Glob files in patterns, while optionally ignoring some."""
417
+ yield from iglob(
418
+ patterns,
419
+ flags=NODIR | GLOBSTAR | DOTGLOB | GLOBTILDE | BRACE | FOLLOW | NEGATE,
420
+ )
421
+
422
+ @cached_property
423
+ def python_files(self) -> Iterator[str]:
424
+ """Returns a list of python files."""
425
+ yield from self.glob_files("**/*.py", "!.venv/**")
426
+
427
+ @cached_property
428
+ def requirement_files(self) -> Iterator[str]:
429
+ """Returns a list of requirement files supported by uv."""
430
+ yield from self.glob_files(
431
+ "**/pyproject.toml", "*requirements.txt", "requirements/*.txt"
432
+ )
433
+
434
+ @cached_property
435
+ def doc_files(self) -> Iterator[str]:
436
+ """Returns a list of doc files."""
437
+ yield from self.glob_files("**/*.{md,markdown,rst,tex}", "!.venv/**")
438
+
439
+ @cached_property
440
+ def uv_requirement_params(self) -> Iterator[str]:
441
+ return (f"--requirement {req}" for req in self.requirement_files)
442
+
443
+ @property
444
+ def is_python_project(self):
445
+ """Returns ``true`` if repository is a Python project.
446
+
447
+ Presence of a ``pyproject.toml`` file is not enough, as 3rd party tools can use
448
+ that file to store their own configuration.
449
+ """
450
+ return self._is_python_project
451
+
452
+ @is_python_project.getter
453
+ def is_python_project(self):
454
+ """Try to read and validate the ``pyproject.toml`` file on access to the
455
+ ``is_python_project`` property.
456
+ """
457
+ if self._is_python_project is None:
458
+ self.pyproject
459
+ return self._is_python_project
460
+
461
+ @cached_property
462
+ def pyproject(self) -> StandardMetadata | None:
463
+ """Returns metadata stored in the ``pyproject.toml`` file.
464
+
465
+ Also sets the internal ``_is_python_project`` value to ``True`` if the
466
+ ``pyproject.toml`` exists and respects the standards. ``False`` otherwise.
467
+ """
468
+ if self.pyproject_path.exists() and self.pyproject_path.is_file():
469
+ toml = tomllib.loads(self.pyproject_path.read_text())
470
+ try:
471
+ metadata = StandardMetadata.from_pyproject(toml)
472
+ self._is_python_project = True
473
+ return metadata
474
+ except ConfigurationError:
475
+ pass
476
+
477
+ self._is_python_project = False
478
+ return None
479
+
480
+ @cached_property
481
+ def package_name(self) -> str | None:
482
+ """Returns package name as published on PyPi."""
483
+ if self.pyproject and self.pyproject.canonical_name:
484
+ return self.pyproject.canonical_name
485
+ return None
486
+
487
+ @cached_property
488
+ def script_entries(self) -> list[tuple[str, str, str]]:
489
+ """Returns a list of tuples containing the script name, its module and callable.
490
+
491
+ Results are derived from the script entries of ``pyproject.toml``. So that:
492
+
493
+ .. code-block:: toml
494
+ [project.scripts]
495
+ mdedup = "mail_deduplicate.cli:mdedup"
496
+ mpm = "meta_package_manager.__main__:main"
497
+
498
+ Will yields the following list:
499
+
500
+ .. code-block:: python
501
+ (
502
+ ("mdedup", "mail_deduplicate.cli", "mdedup"),
503
+ ("mpm", "meta_package_manager.__main__", "main"),
504
+ ...,
505
+ )
506
+ """
507
+ entries = []
508
+ if self.pyproject:
509
+ for cli_id, script in self.pyproject.scripts.items():
510
+ module_id, callable_id = script.split(":")
511
+ entries.append((cli_id, module_id, callable_id))
512
+ # Double check we do not have duplicate entries.
513
+ all_cli_ids = [cli_id for cli_id, _, _ in entries]
514
+ assert len(set(all_cli_ids)) == len(all_cli_ids)
515
+ return entries
516
+
517
+ @cached_property
518
+ def py_target_versions(self) -> tuple[Version, ...] | None:
519
+ """Generates the list of Python target versions.
520
+
521
+ This is based on Black's support matrix.
522
+ """
523
+ if self.pyproject and self.pyproject.requires_python:
524
+ minor_range = sorted(v.value for v in TargetVersion)
525
+ black_range = (Version(f"3.{minor}") for minor in minor_range)
526
+ return tuple(
527
+ version
528
+ for version in black_range
529
+ if self.pyproject.requires_python.contains(version)
530
+ )
531
+ return None
532
+
533
+ @cached_property
534
+ def blacken_docs_params(self) -> tuple[str, ...] | None:
535
+ """Generates ``blacken-docs`` parameters.
536
+
537
+ `Blacken-docs reuses Black's --target-version pyXY parameters
538
+ <https://github.com/adamchainz/blacken-docs/blob/cd4e60f/src/blacken_docs/__init__.py#L263-L271>`_,
539
+ and needs to be fed with a subset of these:
540
+ - ``--target-version py33``
541
+ - ``--target-version py34``
542
+ - ``--target-version py35``
543
+ - ``--target-version py36``
544
+ - ``--target-version py37``
545
+ - ``--target-version py38``
546
+ - ``--target-version py39``
547
+ - ``--target-version py310``
548
+ - ``--target-version py311``
549
+ - ``--target-version py312``
550
+
551
+ As mentioned in Black usage, you should `include all Python versions that you
552
+ want your code to run under
553
+ <https://github.com/psf/black/issues/751#issuecomment-473066811>`_.
554
+ """
555
+ if self.py_target_versions:
556
+ return tuple(
557
+ f"--target-version py{version.major}{version.minor}"
558
+ for version in self.py_target_versions
559
+ )
560
+ return None
561
+
562
+ @cached_property
563
+ def ruff_py_version(self) -> str | None:
564
+ """Returns the oldest Python version targeted.
565
+
566
+ .. caution::
567
+
568
+ Unlike ``blacken-docs``, `ruff doesn't support multiple
569
+ --target-version values
570
+ <https://github.com/astral-sh/ruff/issues/2857#issuecomment-1428100515>`_,
571
+ and `only supports the minimum Python version
572
+ <https://github.com/astral-sh/ruff/issues/2519>`_.
573
+ """
574
+ if self.py_target_versions:
575
+ version = self.py_target_versions[0]
576
+ return f"py{version.major}{version.minor}"
577
+ return None
578
+
579
+ @cached_property
580
+ def mypy_params(self) -> str | None:
581
+ """Generates `mypy` parameters.
582
+
583
+ Mypy needs to be fed with this parameter: ``--python-version x.y``.
584
+ """
585
+ if self.py_target_versions:
586
+ # Compare to Mypy's lowest supported version of Python dialect.
587
+ major, minor = max(
588
+ PYTHON3_VERSION_MIN,
589
+ min((v.major, v.minor) for v in self.py_target_versions),
590
+ )
591
+ return f"--python-version {major}.{minor}"
592
+ return None
593
+
594
+ @staticmethod
595
+ def get_current_version() -> str | None:
596
+ """Returns the current version as managed by bump-my-version.
597
+
598
+ Same as calling the CLI:
599
+
600
+ .. code-block:: shell-session
601
+ $ bump-my-version show current_version
602
+ """
603
+ conf_file = find_config_file()
604
+ if not conf_file:
605
+ return None
606
+ config = get_configuration(conf_file)
607
+ config_dict = config.model_dump()
608
+ return str(resolve_name(config_dict, "current_version"))
609
+
610
+ @cached_property
611
+ def current_version(self) -> str | None:
612
+ """Returns the current version.
613
+
614
+ I.e. the version of the most recent commit.
615
+ """
616
+ version = None
617
+ if self.new_commits_matrix:
618
+ details = self.new_commits_matrix.get("include")
619
+ if details:
620
+ version = details[0].get("current_version") # type: ignore[union-attr]
621
+ return version
622
+
623
+ @cached_property
624
+ def released_version(self) -> str | None:
625
+ """Returns the version of the release commit."""
626
+ version = None
627
+ if self.release_commits_matrix:
628
+ details = self.release_commits_matrix.get("include")
629
+ if details:
630
+ # This script is only designed for at most 1 release in the list of new
631
+ # commits.
632
+ assert len(details) == 1
633
+ version = details[0].get("current_version") # type: ignore[union-attr]
634
+ return version
635
+
636
+ @cached_property
637
+ def is_sphinx(self) -> bool:
638
+ """Returns true if the Sphinx config file is present."""
639
+ # The Sphinx config file is present, that's enough for us.
640
+ return self.sphinx_conf_path.exists() and self.sphinx_conf_path.is_file()
641
+
642
+ @cached_property
643
+ def active_autodoc(self) -> bool:
644
+ """Returns true if there are active Sphinx extensions."""
645
+ if self.is_sphinx:
646
+ # Look for list of active Sphinx extensions.
647
+ for node in ast.parse(self.sphinx_conf_path.read_bytes()).body:
648
+ if isinstance(node, ast.Assign) and isinstance(
649
+ node.value,
650
+ ast.List | ast.Tuple, # type: ignore[operator]
651
+ ):
652
+ extension_found = "extensions" in (
653
+ t.id # type: ignore[attr-defined]
654
+ for t in node.targets
655
+ )
656
+ if extension_found:
657
+ elements = (
658
+ e.value
659
+ for e in node.value.elts
660
+ if isinstance(e, ast.Constant)
661
+ )
662
+ if "sphinx.ext.autodoc" in elements:
663
+ return True
664
+ return False
665
+
666
+ @cached_property
667
+ def nuitka_matrix(self) -> Matrix | None:
668
+ """Pre-compute a matrix for Nuitka compilation workflows.
669
+
670
+ Combine the variations of:
671
+ - all new commits
672
+ - all entry points
673
+ - for the 3 main OSes
674
+ - for a set of architectures
675
+
676
+ Returns a ready-to-use matrix structure, where each variation is augmented with
677
+ specific extra parameters by the way of matching parameters in the `include`
678
+ directive.
679
+
680
+ .. code-block:: python
681
+ {
682
+ "entry_point": ["mpm"],
683
+ "commit": [
684
+ "346ce664f055fbd042a25ee0b7e96702e95",
685
+ "6f27db47612aaee06fdf08744b09a9f5f6c2",
686
+ ],
687
+ "os": ["ubuntu-22.04", "macos-13", "windows-2022"],
688
+ "arch": ["x64"],
689
+ "include": [
690
+ {
691
+ "entry_point": "mpm",
692
+ "cli_id": "mpm",
693
+ "module_id": "meta_package_manager.__main__",
694
+ "callable_id": "main",
695
+ "module_path": "meta_package_manager/__main__.py",
696
+ },
697
+ {
698
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
699
+ "short_sha": "346ce66",
700
+ },
701
+ {
702
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
703
+ "short_sha": "6f27db4",
704
+ },
705
+ {
706
+ "os": "ubuntu-22.04",
707
+ "platform_id": "linux",
708
+ "extension": "bin",
709
+ "extra_python_params": "",
710
+ },
711
+ {
712
+ "os": "macos-13",
713
+ "platform_id": "macos",
714
+ "extension": "bin",
715
+ "extra_python_params": "",
716
+ },
717
+ {
718
+ "os": "windows-2022",
719
+ "platform_id": "windows",
720
+ "extension": "exe",
721
+ "extra_python_params": "-X utf8",
722
+ },
723
+ {
724
+ "entry_point": "mpm",
725
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
726
+ "os": "ubuntu-22.04",
727
+ "arch": "x64",
728
+ "bin_name": "mpm-linux-x64-build-346ce66.bin",
729
+ },
730
+ {
731
+ "entry_point": "mpm",
732
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
733
+ "os": "macos-13",
734
+ "arch": "x64",
735
+ "bin_name": "mpm-macos-x64-build-346ce66.bin",
736
+ },
737
+ {
738
+ "entry_point": "mpm",
739
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
740
+ "os": "windows-2022",
741
+ "arch": "x64",
742
+ "bin_name": "mpm-windows-x64-build-346ce66.exe",
743
+ },
744
+ {
745
+ "entry_point": "mpm",
746
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
747
+ "os": "ubuntu-22.04",
748
+ "arch": "x64",
749
+ "bin_name": "mpm-linux-x64-build-6f27db4.bin",
750
+ },
751
+ {
752
+ "entry_point": "mpm",
753
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
754
+ "os": "macos-13",
755
+ "arch": "x64",
756
+ "bin_name": "mpm-macos-x64-build-6f27db4.bin",
757
+ },
758
+ {
759
+ "entry_point": "mpm",
760
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
761
+ "os": "windows-2022",
762
+ "arch": "x64",
763
+ "bin_name": "mpm-windows-x64-build-6f27db4.exe",
764
+ },
765
+ ],
766
+ }
767
+ """
768
+ # Only produce a matrix if the project is providing CLI entry points.
769
+ if not self.script_entries:
770
+ return None
771
+
772
+ # In the future, we might support and bridge that matrix with the full CPython
773
+ # platform support list. See target triples at:
774
+ # https://peps.python.org/pep-0011/
775
+ # https://snarky.ca/webassembly-and-its-platform-targets/
776
+ matrix: dict[str, list[Any]] = {
777
+ "entry_point": [],
778
+ # Run the compilation only the latest supported version of each OS.
779
+ # The exception is macOS, as macos-14 is arm64 and macos-13 is x64, so we
780
+ # need both to target the two architectures.
781
+ "os": [
782
+ "ubuntu-22.04",
783
+ "macos-14",
784
+ "macos-13",
785
+ "windows-2022",
786
+ ],
787
+ # Extra parameters.
788
+ "include": [],
789
+ }
790
+
791
+ # Augment each entry point with some metadata.
792
+ extra_entry_point_params = []
793
+ for cli_id, module_id, callable_id in self.script_entries:
794
+ # CLI ID is supposed to be unique, we'll use that as a key.
795
+ matrix["entry_point"].append(cli_id)
796
+ # Derive CLI module path from its ID.
797
+ # XXX We consider here the module is directly callable, because Nuitka
798
+ # doesn't seems to support the entry-point notation.
799
+ module_path = Path(f"{module_id.replace('.', '/')}.py")
800
+ assert module_path.exists()
801
+ extra_entry_point_params.append(
802
+ {
803
+ "entry_point": cli_id,
804
+ "cli_id": cli_id,
805
+ "module_id": module_id,
806
+ "callable_id": callable_id,
807
+ "module_path": str(module_path),
808
+ },
809
+ )
810
+ matrix["include"].extend(extra_entry_point_params)
811
+
812
+ # We'd like to run a build for each new commit bundled in the action trigger.
813
+ # If no new commits are detected, it's because we are not in a GitHub workflow
814
+ # event, so we'll fallback to the current commit and only build for it.
815
+ build_commit_matrix = (
816
+ self.new_commits_matrix
817
+ if self.new_commits_matrix
818
+ else self.current_commit_matrix
819
+ )
820
+ assert build_commit_matrix
821
+ # Extend the matrix with a new dimension: a list of commits.
822
+ matrix["commit"] = build_commit_matrix["commit"]
823
+ matrix["include"].extend(build_commit_matrix["include"])
824
+
825
+ # Add platform-specific variables.
826
+ # Arch values are inspired from those specified for self-hosted runners:
827
+ # https://docs.github.com/en/actions/hosting-your-own-runners/about-self-hosted-runners#architectures
828
+ # Arch is not a matrix variant because support is not widely distributed
829
+ # between different OS.
830
+ extra_os_params = [
831
+ {
832
+ "os": "ubuntu-22.04",
833
+ "platform_id": "linux",
834
+ "arch": "x64",
835
+ "extension": "bin",
836
+ "extra_python_params": "",
837
+ },
838
+ {
839
+ "os": "macos-14",
840
+ "platform_id": "macos",
841
+ "arch": "arm64",
842
+ "extension": "bin",
843
+ "extra_python_params": "",
844
+ },
845
+ {
846
+ "os": "macos-13",
847
+ "platform_id": "macos",
848
+ "arch": "x64",
849
+ "extension": "bin",
850
+ "extra_python_params": "",
851
+ },
852
+ {
853
+ "os": "windows-2022",
854
+ "platform_id": "windows",
855
+ "arch": "x64",
856
+ "extension": "exe",
857
+ # XXX "-X utf8" parameter is a workaround for Windows runners
858
+ # redirecting the output of commands to files. See:
859
+ # https://github.com/databrickslabs/dbx/issues/455#issuecomment-1312770919
860
+ # https://github.com/pallets/click/issues/2121#issuecomment-1312773882
861
+ # https://gist.github.com/NodeJSmith/e7e37f2d3f162456869f015f842bcf15
862
+ # https://github.com/Nuitka/Nuitka/blob/ca1ec9e/nuitka/utils/ReExecute.py#L73-L74
863
+ "extra_python_params": "-X utf8",
864
+ },
865
+ ]
866
+ matrix["include"].extend(extra_os_params)
867
+
868
+ # Check no extra parameter in reserved directive do not override themselves.
869
+ all_extra_keys = set().union(
870
+ *(
871
+ extras.keys()
872
+ for reserved_key in RESERVED_MATRIX_KEYWORDS
873
+ if reserved_key in matrix
874
+ for extras in matrix[reserved_key]
875
+ ),
876
+ )
877
+ assert all_extra_keys.isdisjoint(RESERVED_MATRIX_KEYWORDS)
878
+
879
+ # Produce all variations encoded by the matrix, by skipping the special
880
+ # directives.
881
+ all_variations = tuple(
882
+ tuple((variant_id, value) for value in variant_values)
883
+ for variant_id, variant_values in matrix.items()
884
+ if variant_id not in RESERVED_MATRIX_KEYWORDS
885
+ )
886
+
887
+ # Emulate collection and aggregation of the 'include' directive to all
888
+ # variations produced by the matrix.
889
+ for variant in product(*all_variations):
890
+ variant_dict = dict(variant)
891
+
892
+ # Check each extra parameters from the 'include' directive and accumulate
893
+ # the matching ones to the variant.
894
+ full_variant = variant_dict.copy()
895
+ for extra_params in matrix["include"]:
896
+ # Check if the variant match the extra parameters.
897
+ dimensions_to_match = set(variant_dict).intersection(extra_params)
898
+ d0 = {key: variant_dict[key] for key in dimensions_to_match}
899
+ d1 = {key: extra_params[key] for key in dimensions_to_match}
900
+ # Extra parameters are matching the current variant, merge their values.
901
+ if d0 == d1:
902
+ full_variant.update(extra_params)
903
+
904
+ # Add to the 'include' directive a new extra parameter that match the
905
+ # current variant.
906
+ extra_name_param = variant_dict.copy()
907
+ # Generate for Nuitka the binary file name to be used that is unique to
908
+ # this variant.
909
+ extra_name_param["bin_name"] = (
910
+ "{cli_id}-{platform_id}-{arch}-build-{short_sha}.{extension}"
911
+ ).format(**full_variant)
912
+ matrix["include"].append(extra_name_param)
913
+
914
+ return Matrix(matrix)
915
+
916
+ @cached_property
917
+ def release_notes(self) -> str | None:
918
+ """Generate notes to be attached to the GitHub release."""
919
+ # Produce the release notes of the release version or the current one.
920
+ version = self.released_version
921
+ if not version:
922
+ version = self.current_version
923
+ if not version:
924
+ return None
925
+
926
+ # Extract the changelog entry corresponding to the release version, and located
927
+ # between the first two `##` second-level markdown titles.
928
+ changes = ""
929
+ match = re.search(
930
+ rf"^##(?P<title>.+{escape(version)} .+?)\n(?P<changes>.*?)\n##",
931
+ Path("./changelog.md").read_text(encoding="utf-8"),
932
+ flags=re.MULTILINE | re.DOTALL,
933
+ )
934
+ if match:
935
+ changes = match.groupdict().get("changes", "").strip()
936
+ # Add a title.
937
+ if changes:
938
+ changes = "### Changes\n\n" + changes
939
+
940
+ # Generate a link to the version of the package published on PyPi.
941
+ pypi_link = ""
942
+ if self.package_name:
943
+ pypi_link = f"[🐍 Available on PyPi](https://pypi.org/project/{
944
+ self.package_name
945
+ }/{version})."
946
+
947
+ # Assemble the release notes.
948
+ return f"{changes}\n\n{pypi_link}".strip()
949
+
950
+ @staticmethod
951
+ def format_github_value(value: Any) -> str:
952
+ """Transform Python value to GitHub-friendly, JSON-like, console string.
953
+
954
+ Renders:
955
+ - `str` as-is
956
+ - `None` into empty string
957
+ - `bool` into lower-cased string
958
+ - `Matrix` into JSON string
959
+ - `Iterable` of strings into a serialized space-separated string
960
+ - `Iterable` of `Path` into a serialized string whose items are space-separated
961
+ and double-quoted
962
+ """
963
+ # Structured metadata to be rendered as JSON.
964
+ if isinstance(value, Matrix):
965
+ return str(value)
966
+
967
+ # Convert non-strings.
968
+ if not isinstance(value, str):
969
+ if value is None:
970
+ value = ""
971
+
972
+ elif isinstance(value, bool):
973
+ value = str(value).lower()
974
+
975
+ elif isinstance(value, dict):
976
+ raise NotImplementedError
977
+
978
+ elif isinstance(value, Iterable):
979
+ # Cast all items to string, wrapping Path items with double-quotes.
980
+ items = ((f'"{i}"' if isinstance(i, Path) else str(i)) for i in value)
981
+ value = " ".join(items)
982
+
983
+ return cast(str, value)
984
+
985
+ def dump(self, dialect: Dialects = Dialects.GITHUB) -> str:
986
+ """Returns all metadata in the specified format.
987
+
988
+ Defaults to GitHub dialect.
989
+ """
990
+ metadata: dict[str, Any] = {
991
+ "new_commits": self.new_commits_hash,
992
+ "release_commits": self.release_commits_hash,
993
+ "python_files": self.python_files,
994
+ "doc_files": self.doc_files,
995
+ "is_python_project": self.is_python_project,
996
+ "uv_requirement_params": self.uv_requirement_params,
997
+ "package_name": self.package_name,
998
+ "blacken_docs_params": self.blacken_docs_params,
999
+ "ruff_py_version": self.ruff_py_version,
1000
+ "mypy_params": self.mypy_params,
1001
+ "current_version": self.current_version,
1002
+ "released_version": self.released_version,
1003
+ "is_sphinx": self.is_sphinx,
1004
+ "active_autodoc": self.active_autodoc,
1005
+ "release_notes": self.release_notes,
1006
+ "new_commits_matrix": self.new_commits_matrix,
1007
+ "release_commits_matrix": self.release_commits_matrix,
1008
+ "nuitka_matrix": self.nuitka_matrix,
1009
+ }
1010
+
1011
+ logging.debug(f"Raw metadata: {metadata!r}")
1012
+ logging.debug(f"Format metadata into {dialect} dialect.")
1013
+
1014
+ content = ""
1015
+ if dialect == Dialects.GITHUB:
1016
+ for env_name, value in metadata.items():
1017
+ env_value = self.format_github_value(value)
1018
+
1019
+ is_multiline = bool(len(env_value.splitlines()) > 1)
1020
+ if not is_multiline:
1021
+ content += f"{env_name}={env_value}\n"
1022
+ else:
1023
+ # Use a random unique delimiter to encode multiline value:
1024
+ delimiter = f"ghadelimiter_{randint(10**8, (10**9) - 1)}"
1025
+ content += f"{env_name}<<{delimiter}\n{env_value}\n{delimiter}\n"
1026
+ else:
1027
+ assert dialect == Dialects.PLAIN
1028
+ content = repr(metadata)
1029
+
1030
+ logging.debug(f"Formatted metadata: {content}")
1031
+
1032
+ return content