gha-utils 4.24.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
gha_utils/metadata.py ADDED
@@ -0,0 +1,1693 @@
1
+ # Copyright Kevin Deldycke <kevin@deldycke.com> and contributors.
2
+ #
3
+ # This program is Free Software; you can redistribute it and/or
4
+ # modify it under the terms of the GNU General Public License
5
+ # as published by the Free Software Foundation; either version 2
6
+ # of the License, or (at your option) any later version.
7
+ #
8
+ # This program is distributed in the hope that it will be useful,
9
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
10
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
+ # GNU General Public License for more details.
12
+ #
13
+ # You should have received a copy of the GNU General Public License
14
+ # along with this program; if not, write to the Free Software
15
+ # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
16
+
17
+ """Extract metadata from repository and Python projects to be used by GitHub workflows.
18
+
19
+ The following variables are `printed to the environment file
20
+ <https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-commands-for-github-actions#environment-files>`_:
21
+
22
+ ```text
23
+ is_bot=false
24
+ new_commits=346ce664f055fbd042a25ee0b7e96702e95 6f27db47612aaee06fdf08744b09a9f5f6c2
25
+ release_commits=6f27db47612aaee06fdf08744b09a9f5f6c2
26
+ mailmap_exists=true
27
+ gitignore_exists=true
28
+ python_files=".github/update_mailmap.py" ".github/metadata.py" "setup.py"
29
+ json_files=
30
+ yaml_files="config.yaml" ".github/workflows/lint.yaml" ".github/workflows/test.yaml"
31
+ workflow_files=".github/workflows/lint.yaml" ".github/workflows/test.yaml"
32
+ doc_files="changelog.md" "readme.md" "docs/license.md"
33
+ markdown_files="changelog.md" "readme.md" "docs/license.md"
34
+ image_files=
35
+ zsh_files=
36
+ is_python_project=true
37
+ package_name=click-extra
38
+ blacken_docs_params=--target-version py37 --target-version py38
39
+ mypy_params=--python-version 3.7
40
+ current_version=2.0.1
41
+ released_version=2.0.0
42
+ is_sphinx=true
43
+ active_autodoc=true
44
+ release_notes=[🐍 Available on PyPi](https://pypi.org/project/click-extra/2.21.3).
45
+ new_commits_matrix={
46
+ "commit": [
47
+ "346ce664f055fbd042a25ee0b7e96702e95",
48
+ "6f27db47612aaee06fdf08744b09a9f5f6c2"
49
+ ],
50
+ "include": [
51
+ {
52
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
53
+ "short_sha": "346ce66",
54
+ "current_version": "2.0.1"
55
+ },
56
+ {
57
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
58
+ "short_sha": "6f27db4",
59
+ "current_version": "2.0.0"
60
+ }
61
+ ]
62
+ }
63
+ release_commits_matrix={
64
+ "commit": ["6f27db47612aaee06fdf08744b09a9f5f6c2"],
65
+ "include": [
66
+ {
67
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
68
+ "short_sha": "6f27db4",
69
+ "current_version": "2.0.0"
70
+ }
71
+ ]
72
+ }
73
+ build_targets=[
74
+ {
75
+ "target": "linux-arm64",
76
+ "os": "ubuntu-24.04-arm",
77
+ "platform_id": "linux",
78
+ "arch": "arm64",
79
+ "extension": "bin"
80
+ },
81
+ {
82
+ "target": "linux-x64",
83
+ "os": "ubuntu-slim",
84
+ "platform_id": "linux",
85
+ "arch": "x64",
86
+ "extension": "bin"
87
+ },
88
+ {
89
+ "target": "macos-arm64",
90
+ "os": "macos-26",
91
+ "platform_id": "macos",
92
+ "arch": "arm64",
93
+ "extension": "bin"
94
+ },
95
+ {
96
+ "target": "macos-x64",
97
+ "os": "macos-15-intel",
98
+ "platform_id": "macos",
99
+ "arch": "x64",
100
+ "extension": "bin"
101
+ },
102
+ {
103
+ "target": "windows-arm64",
104
+ "os": "windows-11-arm",
105
+ "platform_id": "windows",
106
+ "arch": "arm64",
107
+ "extension": "exe"
108
+ },
109
+ {
110
+ "target": "windows-x64",
111
+ "os": "windows-2025",
112
+ "platform_id": "windows",
113
+ "arch": "x64",
114
+ "extension": "exe"
115
+ }
116
+ ]
117
+ nuitka_matrix={
118
+ "os": [
119
+ "ubuntu-24.04-arm",
120
+ "ubuntu-slim",
121
+ "macos-26",
122
+ "macos-15-intel",
123
+ "windows-11-arm",
124
+ "windows-2025"
125
+ ],
126
+ "entry_point": ["mpm"],
127
+ "commit": [
128
+ "346ce664f055fbd042a25ee0b7e96702e95",
129
+ "6f27db47612aaee06fdf08744b09a9f5f6c2"
130
+ ],
131
+ "include": [
132
+ {
133
+ "target": "linux-arm64",
134
+ "os": "ubuntu-24.04-arm",
135
+ "platform_id": "linux",
136
+ "arch": "arm64",
137
+ "extension": "bin"
138
+ },
139
+ {
140
+ "target": "linux-x64",
141
+ "os": "ubuntu-slim",
142
+ "platform_id": "linux",
143
+ "arch": "x64",
144
+ "extension": "bin"
145
+ },
146
+ {
147
+ "target": "macos-arm64",
148
+ "os": "macos-26",
149
+ "platform_id": "macos",
150
+ "arch": "arm64",
151
+ "extension": "bin"
152
+ },
153
+ {
154
+ "target": "macos-x64",
155
+ "os": "macos-15-intel",
156
+ "platform_id": "macos",
157
+ "arch": "x64",
158
+ "extension": "bin"
159
+ },
160
+ {
161
+ "target": "windows-arm64",
162
+ "os": "windows-11-arm",
163
+ "platform_id": "windows",
164
+ "arch": "arm64",
165
+ "extension": "exe"
166
+ },
167
+ {
168
+ "target": "windows-x64",
169
+ "os": "windows-2025",
170
+ "platform_id": "windows",
171
+ "arch": "x64",
172
+ "extension": "exe"
173
+ },
174
+ {
175
+ "entry_point": "mpm",
176
+ "cli_id": "mpm",
177
+ "module_id": "meta_package_manager.__main__",
178
+ "callable_id": "main",
179
+ "module_path": "meta_package_manager/__main__.py"
180
+ },
181
+ {
182
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
183
+ "short_sha": "346ce66",
184
+ "current_version": "2.0.0"
185
+ },
186
+ {
187
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
188
+ "short_sha": "6f27db4",
189
+ "current_version": "1.9.1"
190
+ },
191
+ {
192
+ "os": "ubuntu-24.04-arm",
193
+ "entry_point": "mpm",
194
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
195
+ "bin_name": "mpm-linux-arm64-346ce66.bin"
196
+ },
197
+ {
198
+ "os": "ubuntu-24.04-arm",
199
+ "entry_point": "mpm",
200
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
201
+ "bin_name": "mpm-linux-arm64-6f27db4.bin"
202
+ },
203
+ {
204
+ "os": "ubuntu-slim",
205
+ "entry_point": "mpm",
206
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
207
+ "bin_name": "mpm-linux-x64-346ce66.bin"
208
+ },
209
+ {
210
+ "os": "ubuntu-slim",
211
+ "entry_point": "mpm",
212
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
213
+ "bin_name": "mpm-linux-x64-6f27db4.bin"
214
+ },
215
+ {
216
+ "os": "macos-26",
217
+ "entry_point": "mpm",
218
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
219
+ "bin_name": "mpm-macos-arm64-346ce66.bin"
220
+ },
221
+ {
222
+ "os": "macos-26",
223
+ "entry_point": "mpm",
224
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
225
+ "bin_name": "mpm-macos-arm64-6f27db4.bin"
226
+ },
227
+ {
228
+ "os": "macos-15-intel",
229
+ "entry_point": "mpm",
230
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
231
+ "bin_name": "mpm-macos-x64-346ce66.bin"
232
+ },
233
+ {
234
+ "os": "macos-15-intel",
235
+ "entry_point": "mpm",
236
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
237
+ "bin_name": "mpm-macos-x64-6f27db4.bin"
238
+ },
239
+ {
240
+ "os": "windows-11-arm",
241
+ "entry_point": "mpm",
242
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
243
+ "bin_name": "mpm-windows-arm64-346ce66.bin"
244
+ },
245
+ {
246
+ "os": "windows-11-arm",
247
+ "entry_point": "mpm",
248
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
249
+ "bin_name": "mpm-windows-arm64-6f27db4.bin"
250
+ },
251
+ {
252
+ "os": "windows-2025",
253
+ "entry_point": "mpm",
254
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
255
+ "bin_name": "mpm-windows-x64-346ce66.exe"
256
+ },
257
+ {
258
+ "os": "windows-2025",
259
+ "entry_point": "mpm",
260
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
261
+ "bin_name": "mpm-windows-x64-6f27db4.exe"
262
+ },
263
+ {"state": "stable"}
264
+ ]
265
+ }
266
+ ```
267
+
268
+ .. warning::
269
+ Fields with serialized lists and dictionaries, like ``new_commits_matrix``,
270
+ ``build_targets`` or ``nuitka_matrix``, are pretty-printed in the example above for
271
+ readability. They are inlined in the actual output and not formatted this way.
272
+ """
273
+
274
+ from __future__ import annotations
275
+
276
+ import ast
277
+ import json
278
+ import logging
279
+ import os
280
+ import re
281
+ import tomllib
282
+ from collections.abc import Iterable
283
+ from enum import StrEnum
284
+ from functools import cached_property
285
+ from operator import itemgetter
286
+ from pathlib import Path
287
+ from random import randint
288
+ from re import escape
289
+
290
+ from bumpversion.config import get_configuration # type: ignore[import-untyped]
291
+ from bumpversion.config.files import find_config_file # type: ignore[import-untyped]
292
+ from bumpversion.show import resolve_name # type: ignore[import-untyped]
293
+ from extra_platforms import is_github_ci
294
+ from gitdb.exc import BadName # type: ignore[import-untyped]
295
+ from packaging.specifiers import SpecifierSet
296
+ from packaging.version import Version
297
+ from py_walk import get_parser_from_file
298
+ from py_walk.models import Parser
299
+ from pydriller import Commit, Git, Repository # type: ignore[import-untyped]
300
+ from pyproject_metadata import ConfigurationError, StandardMetadata
301
+ from wcmatch.glob import (
302
+ BRACE,
303
+ DOTGLOB,
304
+ FOLLOW,
305
+ GLOBSTAR,
306
+ GLOBTILDE,
307
+ NEGATE,
308
+ NODIR,
309
+ iglob,
310
+ )
311
+
312
+ from .matrix import Matrix
313
+
314
+ TYPE_CHECKING = False
315
+ if TYPE_CHECKING:
316
+ from typing import Any, Final
317
+
318
+
319
+ SHORT_SHA_LENGTH = 7
320
+ """Default SHA length hard-coded to ``7``.
321
+
322
+ .. caution::
323
+
324
+ The `default is subject to change <https://stackoverflow.com/a/21015031>`_ and
325
+ depends on the size of the repository.
326
+ """
327
+
328
+ MAILMAP_PATH = Path(".mailmap")
329
+
330
+ GITIGNORE_PATH = Path(".gitignore")
331
+
332
+ NUITKA_BUILD_TARGETS = {
333
+ "linux-arm64": {
334
+ "os": "ubuntu-24.04-arm",
335
+ "platform_id": "linux",
336
+ "arch": "arm64",
337
+ "extension": "bin",
338
+ },
339
+ "linux-x64": {
340
+ "os": "ubuntu-slim",
341
+ "platform_id": "linux",
342
+ "arch": "x64",
343
+ "extension": "bin",
344
+ },
345
+ "macos-arm64": {
346
+ "os": "macos-26",
347
+ "platform_id": "macos",
348
+ "arch": "arm64",
349
+ "extension": "bin",
350
+ },
351
+ "macos-x64": {
352
+ "os": "macos-15-intel",
353
+ "platform_id": "macos",
354
+ "arch": "x64",
355
+ "extension": "bin",
356
+ },
357
+ "windows-arm64": {
358
+ "os": "windows-11-arm",
359
+ "platform_id": "windows",
360
+ "arch": "arm64",
361
+ "extension": "exe",
362
+ },
363
+ "windows-x64": {
364
+ "os": "windows-2025",
365
+ "platform_id": "windows",
366
+ "arch": "x64",
367
+ "extension": "exe",
368
+ },
369
+ }
370
+ """List of GitHub-hosted runners used for Nuitka builds.
371
+
372
+ The key of the dictionary is the target name, which is used as a short name for
373
+ user-friendlyness. As such, it is used to name the compiled binary.
374
+
375
+ Values are dictionaries with the following keys:
376
+
377
+ - ``os``: Operating system name, as used in `GitHub-hosted runners
378
+ <https://docs.github.com/en/actions/writing-workflows/choosing-where-your-workflow-runs/choosing-the-runner-for-a-job#standard-github-hosted-runners-for-public-repositories>`_.
379
+
380
+ .. hint::
381
+ We choose to run the compilation only on the latest supported version of each
382
+ OS, for each architecture. Note that macOS and Windows do not have the latest
383
+ version available for each architecture.
384
+
385
+ - ``platform_id``: Platform identifier, as defined by `Extra Platform
386
+ <https://github.com/kdeldycke/extra-platforms>`_.
387
+
388
+ - ``arch``: Architecture identifier.
389
+
390
+ .. note::
391
+ Architecture IDs are `inspired from those specified for self-hosted runners
392
+ <https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/supported-architectures-and-operating-systems-for-self-hosted-runners#supported-processor-architectures>`_
393
+
394
+ .. note::
395
+ Maybe we should just adopt `target triple
396
+ <https://mcyoung.xyz/2025/04/14/target-triples/>`_.
397
+
398
+ - ``extension``: File extension of the compiled binary.
399
+ """
400
+
401
+
402
+ FLAT_BUILD_TARGETS = [
403
+ {"target": target_id} | target_data
404
+ for target_id, target_data in NUITKA_BUILD_TARGETS.items()
405
+ ]
406
+ """List of build targets in a flat format, suitable for matrix inclusion."""
407
+
408
+
409
+ WorkflowEvent = StrEnum(
410
+ "WorkflowEvent",
411
+ (
412
+ "branch_protection_rule",
413
+ "check_run",
414
+ "check_suite",
415
+ "create",
416
+ "delete",
417
+ "deployment",
418
+ "deployment_status",
419
+ "discussion",
420
+ "discussion_comment",
421
+ "fork",
422
+ "gollum",
423
+ "issue_comment",
424
+ "issues",
425
+ "label",
426
+ "merge_group",
427
+ "milestone",
428
+ "page_build",
429
+ "project",
430
+ "project_card",
431
+ "project_column",
432
+ "public",
433
+ "pull_request",
434
+ "pull_request_comment",
435
+ "pull_request_review",
436
+ "pull_request_review_comment",
437
+ "pull_request_target",
438
+ "push",
439
+ "registry_package",
440
+ "release",
441
+ "repository_dispatch",
442
+ "schedule",
443
+ "status",
444
+ "watch",
445
+ "workflow_call",
446
+ "workflow_dispatch",
447
+ "workflow_run",
448
+ ),
449
+ )
450
+ """Workflow events that cause a workflow to run.
451
+
452
+ `List of events
453
+ <https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows>`_.
454
+ """
455
+
456
+
457
+ Dialect = StrEnum("Dialect", ("github", "json"))
458
+ """Dialect in which metadata can be formatted to."""
459
+
460
+
461
+ class TargetVersion(StrEnum):
462
+ """List of Python 3 minor versions supported by Black.
463
+
464
+ `Mirrors official implementation from black.mode.TargetVersion
465
+ <https://github.com/psf/black/blob/main/src/black/mode.py>`_.
466
+ """
467
+
468
+ PY33 = "3.3"
469
+ PY34 = "3.4"
470
+ PY35 = "3.5"
471
+ PY36 = "3.6"
472
+ PY37 = "3.7"
473
+ PY38 = "3.8"
474
+ PY39 = "3.9"
475
+ PY310 = "3.10"
476
+ PY311 = "3.11"
477
+ PY312 = "3.12"
478
+ PY313 = "3.13"
479
+ PY314 = "3.14"
480
+
481
+
482
+ MYPY_VERSION_MIN: Final = (3, 8)
483
+ """Earliest version supported by Mypy's ``--python-version 3.x`` parameter.
484
+
485
+ `Sourced from Mypy original implementation
486
+ <https://github.com/python/mypy/blob/master/mypy/defaults.py>`_.
487
+ """
488
+
489
+
490
+ # Silence overly verbose debug messages from py-walk logger.
491
+ logging.getLogger("py_walk").setLevel(logging.WARNING)
492
+
493
+
494
+ class JSONMetadata(json.JSONEncoder):
495
+ """Custom JSON encoder for metadata serialization."""
496
+
497
+ def default(self, o: Any) -> Any:
498
+ if isinstance(o, Matrix):
499
+ return o.matrix()
500
+
501
+ if isinstance(o, Path):
502
+ return str(o)
503
+
504
+ return super().default(o)
505
+
506
+
507
+ class Metadata:
508
+ """Metadata class."""
509
+
510
+ def __init__(self, unstable_targets: Iterable[str] | None = None) -> None:
511
+ """Initialize internal variables."""
512
+ self.unstable_targets = set()
513
+ if unstable_targets:
514
+ self.unstable_targets = set(unstable_targets)
515
+ assert self.unstable_targets.issubset(NUITKA_BUILD_TARGETS)
516
+
517
+ pyproject_path = Path() / "pyproject.toml"
518
+ sphinx_conf_path = Path() / "docs" / "conf.py"
519
+
520
+ @cached_property
521
+ def github_context(self) -> dict[str, Any]:
522
+ """Load GitHub context from the environment.
523
+
524
+ Expect ``GITHUB_CONTEXT`` to be set as part of the environment. I.e., adds the
525
+ following as part of your job step calling this script:
526
+
527
+ .. code-block:: yaml
528
+
529
+ - name: Project metadata
530
+ id: project-metadata
531
+ env:
532
+ GITHUB_CONTEXT: ${{ toJSON(github) }}
533
+ run: |
534
+ gha-utils --verbosity DEBUG metadata --overwrite "$GITHUB_OUTPUT"
535
+
536
+ .. todo::
537
+ Try to remove reliance on GitHub context entirely so we can eliminate the
538
+ JSON/env hack above.
539
+ """
540
+ if "GITHUB_CONTEXT" not in os.environ:
541
+ if is_github_ci():
542
+ message = (
543
+ "Missing GitHub context in environment. "
544
+ "Did you forget to set GITHUB_CONTEXT?"
545
+ )
546
+ logging.warning(message)
547
+ return {}
548
+ context = json.loads(os.environ["GITHUB_CONTEXT"])
549
+ logging.debug("--- GitHub context ---")
550
+ logging.debug(json.dumps(context, indent=4))
551
+ return context # type:ignore[no-any-return]
552
+
553
+ @cached_property
554
+ def git(self) -> Git:
555
+ """Return a PyDriller Git object."""
556
+ return Git(".")
557
+
558
+ def git_stash_count(self) -> int:
559
+ """Returns the number of stashes."""
560
+ count = int(
561
+ self.git.repo.git.rev_list(
562
+ "--walk-reflogs", "--ignore-missing", "--count", "refs/stash"
563
+ )
564
+ )
565
+ logging.debug(f"Number of stashes in repository: {count}")
566
+ return count
567
+
568
+ def git_deepen(
569
+ self, commit_hash: str, max_attempts: int = 10, deepen_increment: int = 50
570
+ ) -> bool:
571
+ """Deepen a shallow clone until the provided ``commit_hash`` is found.
572
+
573
+ Progressively fetches more commits from the current repository until the
574
+ specified commit is found or max attempts is reached.
575
+
576
+ Returns ``True`` if the commit was found, ``False`` otherwise.
577
+ """
578
+ for attempt in range(max_attempts):
579
+ try:
580
+ _ = self.git.get_commit(commit_hash)
581
+ if attempt > 0:
582
+ logging.info(
583
+ f"Found commit {commit_hash} after {attempt} deepen "
584
+ "operation(s)."
585
+ )
586
+ return True
587
+ except (ValueError, BadName) as ex:
588
+ logging.debug(f"Commit {commit_hash} not found: {ex}")
589
+
590
+ current_depth = self.git.total_commits()
591
+
592
+ if attempt == max_attempts - 1:
593
+ # We've exhausted all attempts
594
+ logging.error(
595
+ f"Cannot find commit {commit_hash} in repository after "
596
+ f"{max_attempts} deepen attempts. "
597
+ f"Final depth is {current_depth} commits."
598
+ )
599
+ return False
600
+
601
+ logging.info(
602
+ f"Commit {commit_hash} not found at depth {current_depth}."
603
+ )
604
+ logging.info(
605
+ f"Deepening by {deepen_increment} commits (attempt "
606
+ f"{attempt + 1}/{max_attempts})..."
607
+ )
608
+
609
+ try:
610
+ self.git.repo.git.fetch(f"--deepen={deepen_increment}")
611
+ new_depth = self.git.total_commits()
612
+ logging.debug(
613
+ f"Repository deepened successfully. New depth: {new_depth}"
614
+ )
615
+ except Exception as ex:
616
+ logging.error(f"Failed to deepen repository: {ex}")
617
+ return False
618
+
619
+ return False
620
+
621
+ def commit_matrix(self, commits: Iterable[Commit] | None) -> Matrix | None:
622
+ """Pre-compute a matrix of commits.
623
+
624
+ .. danger::
625
+ This method temporarily modify the state of the repository to compute
626
+ version metadata from the past.
627
+
628
+ To prevent any loss of uncommitted data, it stashes and unstash the
629
+ local changes between checkouts.
630
+
631
+ The list of commits is augmented with long and short SHA values, as well as
632
+ current version. Most recent commit is first, oldest is last.
633
+
634
+ Returns a ready-to-use matrix structure:
635
+
636
+ .. code-block:: python
637
+ {
638
+ "commit": [
639
+ "346ce664f055fbd042a25ee0b7e96702e95",
640
+ "6f27db47612aaee06fdf08744b09a9f5f6c2",
641
+ ],
642
+ "include": [
643
+ {
644
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
645
+ "short_sha": "346ce66",
646
+ "current_version": "2.0.1",
647
+ },
648
+ {
649
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
650
+ "short_sha": "6f27db4",
651
+ "current_version": "2.0.0",
652
+ },
653
+ ],
654
+ }
655
+ """
656
+ if not commits:
657
+ return None
658
+
659
+ current_commit = self.git.repo.head.commit.hexsha
660
+
661
+ # Check if we need to get back in time in the Git log and browse past commits.
662
+ if len(commits) == 1: # type: ignore[arg-type]
663
+ # Is the current commit the one we're looking for?
664
+ past_commit_lookup = bool(
665
+ current_commit != commits[0].hash # type: ignore[index]
666
+ )
667
+ # If we have multiple commits then yes, we need to look for past commits.
668
+ else:
669
+ past_commit_lookup = True
670
+
671
+ # We need to go back in time, but first save the current state of the
672
+ # repository.
673
+ if past_commit_lookup:
674
+ logging.debug(
675
+ "We need to look into the commit history. Inspect the initial state "
676
+ "of the repository."
677
+ )
678
+
679
+ if not is_github_ci():
680
+ raise RuntimeError(
681
+ "Local repository manipulations only allowed in CI environment"
682
+ )
683
+
684
+ # Save the initial commit reference and SHA of the repository. The
685
+ # reference is either the canonical active branch name (i.e. ``main``), or
686
+ # the commit SHA if the current HEAD commit is detached from a branch.
687
+ if self.git.repo.head.is_detached:
688
+ init_ref = current_commit
689
+ else:
690
+ init_ref = self.git.repo.active_branch.name
691
+ logging.debug(f"Initial commit reference: {init_ref}")
692
+
693
+ # Try to stash local changes and check if we'll need to unstash them later.
694
+ counter_before = self.git_stash_count()
695
+ logging.debug("Try to stash local changes before our series of checkouts.")
696
+ self.git.repo.git.stash()
697
+ counter_after = self.git_stash_count()
698
+ logging.debug(
699
+ "Stash counter changes after 'git stash' command: "
700
+ f"{counter_before} -> {counter_after}"
701
+ )
702
+ assert counter_after >= counter_before
703
+ need_unstash = bool(counter_after > counter_before)
704
+ logging.debug(f"Need to unstash after checkouts: {need_unstash}")
705
+
706
+ else:
707
+ init_ref = None
708
+ need_unstash = False
709
+ logging.debug(
710
+ "No need to look into the commit history: repository is already "
711
+ f"checked out at {current_commit}"
712
+ )
713
+
714
+ matrix = Matrix()
715
+ for commit in commits:
716
+ if past_commit_lookup:
717
+ logging.debug(f"Checkout to commit {commit.hash}")
718
+ self.git.checkout(commit.hash)
719
+
720
+ commit_metadata = {
721
+ "commit": commit.hash,
722
+ "short_sha": commit.hash[:SHORT_SHA_LENGTH],
723
+ }
724
+
725
+ logging.debug(f"Extract project version at commit {commit.hash}")
726
+ current_version = Metadata.get_current_version()
727
+ if current_version:
728
+ commit_metadata["current_version"] = current_version
729
+
730
+ matrix.add_variation("commit", [commit.hash])
731
+ matrix.add_includes(commit_metadata)
732
+
733
+ # Restore the repository to its initial state.
734
+ if past_commit_lookup:
735
+ logging.debug(f"Restore repository to {init_ref}.")
736
+ self.git.checkout(init_ref)
737
+ if need_unstash:
738
+ logging.debug("Unstash local changes that were previously saved.")
739
+ self.git.repo.git.stash("pop")
740
+
741
+ return matrix
742
+
743
+ @cached_property
744
+ def event_type(self) -> WorkflowEvent | None:
745
+ """Returns the type of event that triggered the workflow run.
746
+
747
+ .. caution::
748
+ This property is based on a crude heuristics as it only looks at the value
749
+ of the ``GITHUB_BASE_REF`` environment variable. Which is `only set when
750
+ the event that triggers a workflow run is either pull_request or
751
+ pull_request_target
752
+ <https://docs.github.com/en/actions/learn-github-actions/variables#default-environment-variables>`_.
753
+
754
+ .. todo::
755
+ Add detection of all workflow trigger events.
756
+ """
757
+ if not is_github_ci():
758
+ logging.warning(
759
+ "Cannot guess event type because we're not in a CI environment."
760
+ )
761
+ return None
762
+ if "GITHUB_BASE_REF" not in os.environ:
763
+ logging.warning(
764
+ "Cannot guess event type because no GITHUB_BASE_REF env var found."
765
+ )
766
+ return None
767
+
768
+ if bool(os.environ.get("GITHUB_BASE_REF")):
769
+ return WorkflowEvent.pull_request
770
+ return WorkflowEvent.push
771
+
772
+ @cached_property
773
+ def event_actor(self) -> str | None:
774
+ """Returns the GitHub login of the user that triggered the workflow run."""
775
+ return self.github_context.get("actor")
776
+
777
+ @cached_property
778
+ def event_sender_type(self) -> str | None:
779
+ """Returns the type of the user that triggered the workflow run."""
780
+ sender_type = self.github_context.get("event", {}).get("sender", {}).get("type")
781
+ if not sender_type:
782
+ return None
783
+ assert isinstance(sender_type, str)
784
+ return sender_type
785
+
786
+ @cached_property
787
+ def is_bot(self) -> bool:
788
+ """Returns ``True`` if the workflow was triggered by a bot or automated process.
789
+
790
+ This is useful to only run some jobs on human-triggered events. Or skip jobs
791
+ triggered by bots to avoid infinite loops.
792
+ """
793
+ if self.event_sender_type == "Bot" or self.event_actor in (
794
+ "dependabot[bot]",
795
+ "dependabot-preview[bot]",
796
+ ):
797
+ return True
798
+ return False
799
+
800
+ @cached_property
801
+ def commit_range(self) -> tuple[str | None, str] | None:
802
+ """Range of commits bundled within the triggering event.
803
+
804
+ A workflow run is triggered by a singular event, which might encapsulate one or
805
+ more commits. This means the workflow will only run once on the last commit,
806
+ even if multiple new commits where pushed.
807
+
808
+ This is annoying when we want to keep a carefully constructed commit history,
809
+ and want to run the workflow on each commit. The typical example is a pull
810
+ request that is merged upstream but we'd like to produce artifacts (builds,
811
+ packages, etc.) for each individual commit.
812
+
813
+ The default ``GITHUB_SHA`` environment variable is not enough as it only points
814
+ to the last commit. We need to inspect the commit history to find all new ones.
815
+ New commits needs to be fetched differently in ``push`` and ``pull_requests``
816
+ events.
817
+
818
+ .. seealso::
819
+ - https://stackoverflow.com/a/67204539
820
+ - https://stackoverflow.com/a/62953566
821
+ - https://stackoverflow.com/a/61861763
822
+
823
+ .. seealso::
824
+ Pull request events on GitHub are a bit complex, see: `The Many SHAs of a
825
+ GitHub Pull Request
826
+ <https://www.kenmuse.com/blog/the-many-shas-of-a-github-pull-request/>`_.
827
+
828
+ .. todo::
829
+ Refactor so we can get rid of ``self.github_context``. Maybe there's enough
830
+ metadata lying around in the environment variables that we can inspect the
831
+ git history and find the commit range.
832
+ """
833
+ if not self.github_context or not self.event_type:
834
+ return None
835
+ # Pull request event.
836
+ if self.event_type in (
837
+ WorkflowEvent.pull_request,
838
+ WorkflowEvent.pull_request_target,
839
+ ):
840
+ base_ref = os.environ["GITHUB_BASE_REF"]
841
+ assert base_ref
842
+ assert (
843
+ self.github_context["event"]["pull_request"]["base"]["ref"] == base_ref
844
+ )
845
+ base_ref_sha = self.github_context["event"]["pull_request"]["base"]["sha"]
846
+ start = base_ref_sha
847
+ # We need to checkout the HEAD commit instead of the artificial merge
848
+ # commit introduced by the pull request.
849
+ end = self.github_context["event"]["pull_request"]["head"]["sha"]
850
+ # Push event.
851
+ else:
852
+ start = self.github_context["event"].get("before")
853
+ end = os.environ["GITHUB_SHA"]
854
+ assert end
855
+ logging.debug(f"Commit range: {start} -> {end}")
856
+ return start, end
857
+
858
+ @cached_property
859
+ def current_commit(self) -> Commit | None:
860
+ """Returns the current ``Commit`` object."""
861
+ return next(Repository(".", single="HEAD").traverse_commits())
862
+
863
+ @cached_property
864
+ def current_commit_matrix(self) -> Matrix | None:
865
+ """Pre-computed matrix with long and short SHA values of the current commit."""
866
+ return self.commit_matrix((self.current_commit,))
867
+
868
+ @cached_property
869
+ def new_commits(self) -> tuple[Commit, ...] | None:
870
+ """Returns list of ``Commit`` objects bundled within the triggering event."""
871
+ if not self.commit_range:
872
+ return None
873
+ start, end = self.commit_range
874
+
875
+ # Sanity check: make sure the start commit exists in the repository.
876
+ # XXX Even if we skip the start commit later on (because the range is
877
+ # inclusive), we still need to make sure it exists: PyDriller stills needs to
878
+ # find it to be able to traverse the commit history.
879
+ for commit_id in (start, end):
880
+ if not commit_id:
881
+ continue
882
+
883
+ if not self.git_deepen(commit_id):
884
+ logging.warning(
885
+ "Skipping metadata extraction of the range of new commits."
886
+ )
887
+ return None
888
+
889
+ if not start:
890
+ logging.warning("No start commit found. Only one commit in range.")
891
+ assert end
892
+ return (self.git.get_commit(end),)
893
+
894
+ commit_list = []
895
+ for index, commit in enumerate(
896
+ Repository(".", from_commit=start, to_commit=end).traverse_commits()
897
+ ):
898
+ # Skip the first commit because the commit range is inclusive.
899
+ if index == 0:
900
+ continue
901
+ commit_list.append(commit)
902
+ return tuple(commit_list)
903
+
904
+ @cached_property
905
+ def new_commits_matrix(self) -> Matrix | None:
906
+ """Pre-computed matrix with long and short SHA values of new commits."""
907
+ return self.commit_matrix(self.new_commits)
908
+
909
+ @cached_property
910
+ def new_commits_hash(self) -> tuple[str, ...] | None:
911
+ """List all hashes of new commits."""
912
+ return self.new_commits_matrix["commit"] if self.new_commits_matrix else None
913
+
914
+ @cached_property
915
+ def release_commits(self) -> tuple[Commit, ...] | None:
916
+ """Returns list of ``Commit`` objects to be tagged within the triggering event.
917
+
918
+ We cannot identify a release commit based the presence of a ``vX.Y.Z`` tag
919
+ alone. That's because it is not present in the ``prepare-release`` pull request
920
+ produced by the ``changelog.yaml`` workflow. The tag is produced later on by
921
+ the ``release.yaml`` workflow, when the pull request is merged to ``main``.
922
+
923
+ Our best second option is to identify a release based on the full commit
924
+ message, based on the template used in the ``changelog.yaml`` workflow.
925
+ """
926
+ if not self.new_commits:
927
+ return None
928
+ return tuple(
929
+ commit
930
+ for commit in self.new_commits
931
+ if re.fullmatch(
932
+ r"^\[changelog\] Release v[0-9]+\.[0-9]+\.[0-9]+$",
933
+ commit.msg,
934
+ )
935
+ )
936
+
937
+ @cached_property
938
+ def release_commits_matrix(self) -> Matrix | None:
939
+ """Pre-computed matrix with long and short SHA values of release commits."""
940
+ return self.commit_matrix(self.release_commits)
941
+
942
+ @cached_property
943
+ def release_commits_hash(self) -> tuple[str, ...] | None:
944
+ """List all hashes of release commits."""
945
+ return (
946
+ self.release_commits_matrix["commit"]
947
+ if self.release_commits_matrix
948
+ else None
949
+ )
950
+
951
+ @cached_property
952
+ def mailmap_exists(self) -> bool:
953
+ return MAILMAP_PATH.is_file()
954
+
955
+ @cached_property
956
+ def gitignore_exists(self) -> bool:
957
+ return GITIGNORE_PATH.is_file()
958
+
959
+ @cached_property
960
+ def gitignore_parser(self) -> Parser | None:
961
+ """Returns a parser for the ``.gitignore`` file, if it exists."""
962
+ if self.gitignore_exists:
963
+ logging.debug(f"Parse {GITIGNORE_PATH}")
964
+ return get_parser_from_file(GITIGNORE_PATH)
965
+ return None
966
+
967
+ def gitignore_match(self, file_path: Path | str) -> bool:
968
+ if self.gitignore_parser and self.gitignore_parser.match(file_path):
969
+ return True
970
+ return False
971
+
972
+ def glob_files(self, *patterns: str) -> list[Path]:
973
+ """Return all file path matching the ``patterns``.
974
+
975
+ Patterns are glob patterns supporting ``**`` for recursive search, and ``!``
976
+ for negation.
977
+
978
+ All directories are traversed, whether they are hidden (i.e. starting with a
979
+ dot ``.``) or not, including symlinks.
980
+
981
+ Skips:
982
+
983
+ - files which does not exists
984
+ - directories
985
+ - broken symlinks
986
+ - files matching patterns specified by ``.gitignore`` file
987
+
988
+ Returns both hidden and non-hidden files.
989
+
990
+ All files are normalized to their absolute path, so that duplicates produced by
991
+ symlinks are ignored.
992
+
993
+ File path are returned as relative to the current working directory if
994
+ possible, or as absolute path otherwise.
995
+
996
+ The resulting list of file paths is sorted.
997
+ """
998
+ current_dir = Path.cwd()
999
+ seen = set()
1000
+
1001
+ for file_path in iglob(
1002
+ patterns,
1003
+ flags=NODIR | GLOBSTAR | DOTGLOB | GLOBTILDE | BRACE | FOLLOW | NEGATE,
1004
+ ):
1005
+ # Normalize the path to avoid duplicates.
1006
+ try:
1007
+ absolute_path = Path(file_path).resolve(strict=True)
1008
+ # Skip files that do not exists and broken symlinks.
1009
+ except OSError:
1010
+ logging.warning(f"Skip non-existing file / broken symlink: {file_path}")
1011
+ continue
1012
+
1013
+ # Simplify the path by trying to make it relative to the current location.
1014
+ normalized_path = absolute_path
1015
+ try:
1016
+ normalized_path = absolute_path.relative_to(current_dir)
1017
+ except ValueError:
1018
+ # If the file is not relative to the current directory, keep its
1019
+ # absolute path.
1020
+ logging.debug(
1021
+ f"{absolute_path} is not relative to {current_dir}. "
1022
+ "Keeping the path absolute."
1023
+ )
1024
+
1025
+ if normalized_path in seen:
1026
+ logging.debug(f"Skip duplicate file: {normalized_path}")
1027
+ continue
1028
+
1029
+ # Skip files that are ignored by .gitignore.
1030
+ if self.gitignore_match(file_path):
1031
+ logging.debug(f"Skip file matching {GITIGNORE_PATH}: {file_path}")
1032
+ continue
1033
+
1034
+ seen.add(normalized_path)
1035
+ return sorted(seen)
1036
+
1037
+ @cached_property
1038
+ def python_files(self) -> list[Path]:
1039
+ """Returns a list of python files."""
1040
+ return self.glob_files("**/*.{py,pyi,pyw,pyx,ipynb}")
1041
+
1042
+ @cached_property
1043
+ def json_files(self) -> list[Path]:
1044
+ """Returns a list of JSON files."""
1045
+ return self.glob_files(
1046
+ "**/*.{json,jsonc,json5}",
1047
+ "**/.code-workspace",
1048
+ "!**/package-lock.json",
1049
+ )
1050
+
1051
+ @cached_property
1052
+ def yaml_files(self) -> list[Path]:
1053
+ """Returns a list of YAML files."""
1054
+ return self.glob_files("**/*.{yaml,yml}")
1055
+
1056
+ @cached_property
1057
+ def workflow_files(self) -> list[Path]:
1058
+ """Returns a list of GitHub workflow files."""
1059
+ return self.glob_files(".github/workflows/**/*.{yaml,yml}")
1060
+
1061
+ @cached_property
1062
+ def doc_files(self) -> list[Path]:
1063
+ """Returns a list of doc files."""
1064
+ return self.glob_files(
1065
+ "**/*.{markdown,mdown,mkdn,mdwn,mkd,md,mdtxt,mdtext,rst,tex}"
1066
+ )
1067
+
1068
+ @cached_property
1069
+ def markdown_files(self) -> list[Path]:
1070
+ """Returns a list of Markdown files."""
1071
+ return self.glob_files("**/*.{markdown,mdown,mkdn,mdwn,mkd,md,mdtxt,mdtext}")
1072
+
1073
+ @cached_property
1074
+ def image_files(self) -> list[Path]:
1075
+ """Returns a list of image files.
1076
+
1077
+ Inspired by the list of image extensions supported by calibre's image-actions:
1078
+ https://github.com/calibreapp/image-actions/blob/f325757/src/constants.ts#L32
1079
+ """
1080
+ return self.glob_files("**/*.{jpeg,jpg,png,webp,avif}")
1081
+
1082
+ @cached_property
1083
+ def zsh_files(self) -> list[Path]:
1084
+ """Returns a list of Zsh files."""
1085
+ return self.glob_files("**/*.{sh,zsh}", "**/.{zshrc,zprofile,zshenv,zlogin}")
1086
+
1087
+ @cached_property
1088
+ def is_python_project(self):
1089
+ """Returns ``True`` if repository is a Python project.
1090
+
1091
+ Presence of a ``pyproject.toml`` file that respects the standards is enough
1092
+ to consider the project as a Python one.
1093
+ """
1094
+ return False if self.pyproject is None else True
1095
+
1096
+ @cached_property
1097
+ def pyproject(self) -> StandardMetadata | None:
1098
+ """Returns metadata stored in the ``pyproject.toml`` file.
1099
+
1100
+ Returns ``None`` if the ``pyproject.toml`` does not exists or does not respects
1101
+ the PEP standards.
1102
+
1103
+ .. warning::
1104
+ Some third-party apps have their configuration saved into
1105
+ ``pyproject.toml`` file, but that does not means the project is a Python
1106
+ one. For that, the ``pyproject.toml`` needs to respect the PEPs.
1107
+ """
1108
+ if self.pyproject_path.exists() and self.pyproject_path.is_file():
1109
+ toml = tomllib.loads(self.pyproject_path.read_text(encoding="UTF-8"))
1110
+ try:
1111
+ metadata = StandardMetadata.from_pyproject(toml)
1112
+ return metadata
1113
+ except ConfigurationError:
1114
+ pass
1115
+
1116
+ return None
1117
+
1118
+ @cached_property
1119
+ def package_name(self) -> str | None:
1120
+ """Returns package name as published on PyPi."""
1121
+ if self.pyproject and self.pyproject.canonical_name:
1122
+ return self.pyproject.canonical_name
1123
+ return None
1124
+
1125
+ @cached_property
1126
+ def script_entries(self) -> list[tuple[str, str, str]]:
1127
+ """Returns a list of tuples containing the script name, its module and
1128
+ callable.
1129
+
1130
+ Results are derived from the script entries of ``pyproject.toml``. So that:
1131
+
1132
+ .. code-block:: toml
1133
+ [project.scripts]
1134
+ mdedup = "mail_deduplicate.cli:mdedup"
1135
+ mpm = "meta_package_manager.__main__:main"
1136
+
1137
+ Will yields the following list:
1138
+
1139
+ .. code-block:: python
1140
+ (
1141
+ ("mdedup", "mail_deduplicate.cli", "mdedup"),
1142
+ ("mpm", "meta_package_manager.__main__", "main"),
1143
+ ...,
1144
+ )
1145
+ """
1146
+ entries = []
1147
+ if self.pyproject:
1148
+ for cli_id, script in self.pyproject.scripts.items():
1149
+ module_id, callable_id = script.split(":")
1150
+ entries.append((cli_id, module_id, callable_id))
1151
+ # Double check we do not have duplicate entries.
1152
+ all_cli_ids = [cli_id for cli_id, _, _ in entries]
1153
+ assert len(set(all_cli_ids)) == len(all_cli_ids)
1154
+ return entries
1155
+
1156
+ @cached_property
1157
+ def py_target_versions(self) -> tuple[Version, ...] | None:
1158
+ """Generates the list of Python target versions.
1159
+
1160
+ Only takes ``major.minor`` variations into account. Smaller version dimensions
1161
+ are ignored, so a package depending on ``3.8.6`` will keep ``3.8`` as a Python
1162
+ target.
1163
+ """
1164
+ if self.pyproject and self.pyproject.requires_python:
1165
+ # Dumb down specifiers' lower bounds to their major.minor version.
1166
+ spec_list = []
1167
+ for spec in self.pyproject.requires_python:
1168
+ if spec.operator in (">=", ">"):
1169
+ release = Version(spec.version).release
1170
+ new_spec = f"{spec.operator}{release[0]}.{release[1]}"
1171
+ else:
1172
+ new_spec = str(spec)
1173
+ spec_list.append(new_spec)
1174
+ relaxed_specs = SpecifierSet(",".join(spec_list))
1175
+ logging.debug(
1176
+ "Relax Python requirements from "
1177
+ f"{self.pyproject.requires_python} to {relaxed_specs}."
1178
+ )
1179
+
1180
+ # Iterate through Python version support.
1181
+ return tuple(
1182
+ Version(target)
1183
+ for target in tuple(TargetVersion)
1184
+ if relaxed_specs.contains(target)
1185
+ )
1186
+ return None
1187
+
1188
+ @cached_property
1189
+ def blacken_docs_params(self) -> str | None:
1190
+ """Generates ``blacken-docs`` parameters.
1191
+
1192
+ `Blacken-docs reuses Black's --target-version pyXY parameters
1193
+ <https://github.com/adamchainz/blacken-docs/blob/cd4e60f/src/blacken_docs/__init__.py#L263-L271>`_,
1194
+ and needs to be fed with a subset of these:
1195
+ - ``--target-version py33``
1196
+ - ``--target-version py34``
1197
+ - ``--target-version py35``
1198
+ - ``--target-version py36``
1199
+ - ``--target-version py37``
1200
+ - ``--target-version py38``
1201
+ - ``--target-version py39``
1202
+ - ``--target-version py310``
1203
+ - ``--target-version py311``
1204
+ - ``--target-version py312``
1205
+ - ``--target-version py313``
1206
+ - ``--target-version py314``
1207
+
1208
+ As mentioned in Black usage, you should `include all Python versions that you
1209
+ want your code to run under
1210
+ <https://github.com/psf/black/issues/751#issuecomment-473066811>`_.
1211
+ """
1212
+ if self.py_target_versions:
1213
+ return " ".join(
1214
+ f"--target-version py{version.major}{version.minor}"
1215
+ for version in self.py_target_versions
1216
+ )
1217
+ return None
1218
+
1219
+ @cached_property
1220
+ def mypy_params(self) -> str | None:
1221
+ """Generates `mypy` parameters.
1222
+
1223
+ Mypy needs to be fed with this parameter: ``--python-version 3.x``.
1224
+ """
1225
+ if self.py_target_versions:
1226
+ # Compare to Mypy's lowest supported version of Python dialect.
1227
+ major, minor = max(
1228
+ MYPY_VERSION_MIN,
1229
+ min((v.major, v.minor) for v in self.py_target_versions),
1230
+ )
1231
+ return f"--python-version {major}.{minor}"
1232
+ return None
1233
+
1234
+ @staticmethod
1235
+ def get_current_version() -> str | None:
1236
+ """Returns the current version as managed by bump-my-version.
1237
+
1238
+ Same as calling the CLI:
1239
+
1240
+ .. code-block:: shell-session
1241
+ $ bump-my-version show current_version
1242
+ """
1243
+ conf_file = find_config_file()
1244
+ if not conf_file:
1245
+ return None
1246
+ config = get_configuration(conf_file)
1247
+ config_dict = config.model_dump()
1248
+ return str(resolve_name(config_dict, "current_version"))
1249
+
1250
+ @cached_property
1251
+ def current_version(self) -> str | None:
1252
+ """Returns the current version.
1253
+
1254
+ Current version is fetched from the ``bump-my-version`` configuration file.
1255
+
1256
+ During a release we get two commits bundled into a single event. The first one
1257
+ is the release commit itself freezing the version to the release number. The
1258
+ second one is the commit that bumps the version to the next one. In this
1259
+ situation, the current version returned is the one from the most recent commit.
1260
+ """
1261
+ version = None
1262
+ if self.new_commits_matrix:
1263
+ details = self.new_commits_matrix.include
1264
+ if details:
1265
+ version = details[0].get("current_version")
1266
+ else:
1267
+ version = self.get_current_version()
1268
+ return version
1269
+
1270
+ @cached_property
1271
+ def released_version(self) -> str | None:
1272
+ """Returns the version of the release commit."""
1273
+ version = None
1274
+ if self.release_commits_matrix:
1275
+ details = self.release_commits_matrix.include
1276
+ if details:
1277
+ # This script is only designed for at most 1 release in the list of new
1278
+ # commits.
1279
+ assert len(details) == 1
1280
+ version = details[0].get("current_version")
1281
+ return version
1282
+
1283
+ @cached_property
1284
+ def is_sphinx(self) -> bool:
1285
+ """Returns ``True`` if the Sphinx config file is present."""
1286
+ # The Sphinx config file is present, that's enough for us.
1287
+ return self.sphinx_conf_path.exists() and self.sphinx_conf_path.is_file()
1288
+
1289
+ @cached_property
1290
+ def active_autodoc(self) -> bool:
1291
+ """Returns ``True`` if there are active Sphinx extensions."""
1292
+ if self.is_sphinx:
1293
+ # Look for list of active Sphinx extensions.
1294
+ for node in ast.parse(self.sphinx_conf_path.read_bytes()).body:
1295
+ if isinstance(node, ast.Assign) and isinstance(
1296
+ node.value, ast.List | ast.Tuple
1297
+ ):
1298
+ extension_found = "extensions" in (
1299
+ t.id # type: ignore[attr-defined]
1300
+ for t in node.targets
1301
+ )
1302
+ if extension_found:
1303
+ elements = (
1304
+ e.value
1305
+ for e in node.value.elts
1306
+ if isinstance(e, ast.Constant)
1307
+ )
1308
+ if "sphinx.ext.autodoc" in elements:
1309
+ return True
1310
+ return False
1311
+
1312
+ @cached_property
1313
+ def nuitka_matrix(self) -> Matrix | None:
1314
+ """Pre-compute a matrix for Nuitka compilation workflows.
1315
+
1316
+ Combine the variations of:
1317
+ - all new commits
1318
+ - all entry points
1319
+ - for the 3 main OSes
1320
+ - for a set of architectures
1321
+
1322
+ Returns a ready-to-use matrix structure, where each variation is augmented with
1323
+ specific extra parameters by the way of matching parameters in the `include`
1324
+ directive.
1325
+
1326
+ .. code-block:: python
1327
+ {
1328
+ "os": [
1329
+ "ubuntu-24.04-arm",
1330
+ "ubuntu-slim",
1331
+ "macos-26",
1332
+ "macos-15-intel",
1333
+ "windows-11-arm",
1334
+ "windows-2025",
1335
+ ],
1336
+ "entry_point": [
1337
+ "mpm",
1338
+ ],
1339
+ "commit": [
1340
+ "346ce664f055fbd042a25ee0b7e96702e95",
1341
+ "6f27db47612aaee06fdf08744b09a9f5f6c2",
1342
+ ],
1343
+ "include": [
1344
+ {
1345
+ "target": "linux-arm64",
1346
+ "os": "ubuntu-24.04-arm",
1347
+ "platform_id": "linux",
1348
+ "arch": "arm64",
1349
+ "extension": "bin",
1350
+ },
1351
+ {
1352
+ "target": "linux-x64",
1353
+ "os": "ubuntu-slim",
1354
+ "platform_id": "linux",
1355
+ "arch": "x64",
1356
+ "extension": "bin",
1357
+ },
1358
+ {
1359
+ "target": "macos-arm64",
1360
+ "os": "macos-26",
1361
+ "platform_id": "macos",
1362
+ "arch": "arm64",
1363
+ "extension": "bin",
1364
+ },
1365
+ {
1366
+ "target": "macos-x64",
1367
+ "os": "macos-15-intel",
1368
+ "platform_id": "macos",
1369
+ "arch": "x64",
1370
+ "extension": "bin",
1371
+ },
1372
+ {
1373
+ "target": "windows-arm64",
1374
+ "os": "windows-11-arm",
1375
+ "platform_id": "windows",
1376
+ "arch": "arm64",
1377
+ "extension": "exe",
1378
+ },
1379
+ {
1380
+ "target": "windows-x64",
1381
+ "os": "windows-2025",
1382
+ "platform_id": "windows",
1383
+ "arch": "x64",
1384
+ "extension": "exe",
1385
+ },
1386
+ {
1387
+ "entry_point": "mpm",
1388
+ "cli_id": "mpm",
1389
+ "module_id": "meta_package_manager.__main__",
1390
+ "callable_id": "main",
1391
+ "module_path": "meta_package_manager/__main__.py",
1392
+ },
1393
+ {
1394
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
1395
+ "short_sha": "346ce66",
1396
+ "current_version": "2.0.0",
1397
+ },
1398
+ {
1399
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
1400
+ "short_sha": "6f27db4",
1401
+ "current_version": "1.9.1",
1402
+ },
1403
+ {
1404
+ "os": "ubuntu-24.04-arm",
1405
+ "entry_point": "mpm",
1406
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
1407
+ "bin_name": "mpm-linux-arm64-346ce66.bin",
1408
+ },
1409
+ {
1410
+ "os": "ubuntu-24.04-arm",
1411
+ "entry_point": "mpm",
1412
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
1413
+ "bin_name": "mpm-linux-arm64-6f27db4.bin",
1414
+ },
1415
+ {
1416
+ "os": "ubuntu-slim",
1417
+ "entry_point": "mpm",
1418
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
1419
+ "bin_name": "mpm-linux-x64-346ce66.bin",
1420
+ },
1421
+ {
1422
+ "os": "ubuntu-slim",
1423
+ "entry_point": "mpm",
1424
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
1425
+ "bin_name": "mpm-linux-x64-6f27db4.bin",
1426
+ },
1427
+ {
1428
+ "os": "macos-26",
1429
+ "entry_point": "mpm",
1430
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
1431
+ "bin_name": "mpm-macos-arm64-346ce66.bin",
1432
+ },
1433
+ {
1434
+ "os": "macos-26",
1435
+ "entry_point": "mpm",
1436
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
1437
+ "bin_name": "mpm-macos-arm64-6f27db4.bin",
1438
+ },
1439
+ {
1440
+ "os": "macos-15-intel",
1441
+ "entry_point": "mpm",
1442
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
1443
+ "bin_name": "mpm-macos-x64-346ce66.bin",
1444
+ },
1445
+ {
1446
+ "os": "macos-15-intel",
1447
+ "entry_point": "mpm",
1448
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
1449
+ "bin_name": "mpm-macos-x64-6f27db4.bin",
1450
+ },
1451
+ {
1452
+ "os": "windows-11-arm",
1453
+ "entry_point": "mpm",
1454
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
1455
+ "bin_name": "mpm-windows-arm64-346ce66.bin",
1456
+ },
1457
+ {
1458
+ "os": "windows-11-arm",
1459
+ "entry_point": "mpm",
1460
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
1461
+ "bin_name": "mpm-windows-arm64-6f27db4.bin",
1462
+ },
1463
+ {
1464
+ "os": "windows-2025",
1465
+ "entry_point": "mpm",
1466
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
1467
+ "bin_name": "mpm-windows-x64-346ce66.exe",
1468
+ },
1469
+ {
1470
+ "os": "windows-2025",
1471
+ "entry_point": "mpm",
1472
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
1473
+ "bin_name": "mpm-windows-x64-6f27db4.exe",
1474
+ },
1475
+ {
1476
+ "state": "stable",
1477
+ },
1478
+ ],
1479
+ }
1480
+ """
1481
+ # Only produce a matrix if the project is providing CLI entry points.
1482
+ if not self.script_entries:
1483
+ return None
1484
+
1485
+ matrix = Matrix()
1486
+
1487
+ # Register all runners on which we want to run Nuitka builds.
1488
+ matrix.add_variation(
1489
+ "os", tuple(map(itemgetter("os"), NUITKA_BUILD_TARGETS.values()))
1490
+ )
1491
+ # Augment each "os" entry with platform-specific data.
1492
+ for target_data in FLAT_BUILD_TARGETS:
1493
+ matrix.add_includes(target_data)
1494
+
1495
+ # Augment each entry point with some metadata.
1496
+ for cli_id, module_id, callable_id in self.script_entries:
1497
+ # CLI ID is supposed to be unique, we'll use that as a key.
1498
+ matrix.add_variation("entry_point", [cli_id])
1499
+ # Derive CLI module path from its ID.
1500
+ # XXX We consider here the module is directly callable, because Nuitka
1501
+ # doesn't seems to support the entry-point notation.
1502
+ module_path = Path(f"{module_id.replace('.', '/')}.py")
1503
+ assert module_path.exists()
1504
+ matrix.add_includes({
1505
+ "entry_point": cli_id,
1506
+ "cli_id": cli_id,
1507
+ "module_id": module_id,
1508
+ "callable_id": callable_id,
1509
+ "module_path": str(module_path),
1510
+ })
1511
+
1512
+ # We'd like to run a build for each new commit bundled in the action trigger.
1513
+ # If no new commits are detected, it's because we are not in a GitHub workflow
1514
+ # event, so we'll fallback to the current commit and only build for it.
1515
+ build_commit_matrix = (
1516
+ self.new_commits_matrix
1517
+ if self.new_commits_matrix
1518
+ else self.current_commit_matrix
1519
+ )
1520
+ assert build_commit_matrix
1521
+ # Extend the matrix with a new dimension: a list of commits.
1522
+ matrix.add_variation("commit", build_commit_matrix["commit"])
1523
+ matrix.add_includes(*build_commit_matrix.include)
1524
+
1525
+ # Augment each variation set of the matrix with a the binary name to be
1526
+ # produced by Nuitka. Itererate over all matrix variation sets so we have all
1527
+ # metadata necessary to generate a unique name specific to these variations.
1528
+ for variations in matrix.solve():
1529
+ # We will re-attach back this binary name to the with an include directive,
1530
+ # so we need a copy the main variants it corresponds to.
1531
+ bin_name_include = {k: variations[k] for k in matrix.variations}
1532
+ bin_name_include["bin_name"] = (
1533
+ "{cli_id}-{target}-{short_sha}.{extension}"
1534
+ ).format(**variations)
1535
+ matrix.add_includes(bin_name_include)
1536
+
1537
+ # All jobs are stable by default, unless marked otherwise by specific
1538
+ # configuration.
1539
+ matrix.add_includes({"state": "stable"})
1540
+ for unstable_target in self.unstable_targets:
1541
+ matrix.add_includes({
1542
+ "state": "unstable",
1543
+ "os": NUITKA_BUILD_TARGETS[unstable_target]["os"],
1544
+ })
1545
+
1546
+ return matrix
1547
+
1548
+ @cached_property
1549
+ def release_notes(self) -> str | None:
1550
+ """Generate notes to be attached to the GitHub release."""
1551
+ # Produce the release notes of the release version or the current one.
1552
+ version = self.released_version
1553
+ if not version:
1554
+ version = self.current_version
1555
+ if not version:
1556
+ return None
1557
+
1558
+ # Extract the changelog entry corresponding to the release version, and located
1559
+ # between the first two `##` second-level markdown titles.
1560
+ changes = ""
1561
+ match = re.search(
1562
+ rf"^##(?P<title>.+{escape(version)} .+?)\n(?P<changes>.*?)\n##",
1563
+ Path("./changelog.md").read_text(encoding="UTF-8"),
1564
+ flags=re.MULTILINE | re.DOTALL,
1565
+ )
1566
+ if match:
1567
+ changes = match.groupdict().get("changes", "").strip()
1568
+ # Add a title.
1569
+ if changes:
1570
+ changes = "### Changes\n\n" + changes
1571
+
1572
+ # Generate a link to the version of the package published on PyPi.
1573
+ pypi_link = ""
1574
+ if self.package_name:
1575
+ pypi_link = (
1576
+ "[🐍 Available on PyPi](https://pypi.org/project/"
1577
+ + self.package_name
1578
+ + "/"
1579
+ + version
1580
+ + ")."
1581
+ )
1582
+
1583
+ # Assemble the release notes.
1584
+ return f"{changes}\n\n{pypi_link}".strip()
1585
+
1586
+ @staticmethod
1587
+ def format_github_value(value: Any) -> str:
1588
+ """Transform Python value to GitHub-friendly, JSON-like, console string.
1589
+
1590
+ Renders:
1591
+ - `str` as-is
1592
+ - `None` into empty string
1593
+ - `bool` into lower-cased string
1594
+ - `Matrix` into JSON string
1595
+ - `Iterable` of mixed strings and `Path` into a serialized space-separated
1596
+ string, where `Path` items are double-quoted
1597
+ - other `Iterable` into a JSON string
1598
+ """
1599
+ # Structured metadata to be rendered as JSON.
1600
+ if isinstance(value, Matrix):
1601
+ return str(value)
1602
+
1603
+ # Convert non-strings.
1604
+ if not isinstance(value, str):
1605
+ if value is None:
1606
+ value = ""
1607
+
1608
+ elif isinstance(value, bool):
1609
+ value = str(value).lower()
1610
+
1611
+ elif isinstance(value, dict):
1612
+ raise NotImplementedError
1613
+
1614
+ elif isinstance(value, Iterable):
1615
+ # Cast all items to strings, wrapping Path items with double-quotes.
1616
+ if all(isinstance(i, (str, Path)) for i in value):
1617
+ items = (
1618
+ (f'"{i}"' if isinstance(i, Path) else str(i)) for i in value
1619
+ )
1620
+ value = " ".join(items)
1621
+ # XXX We only support iterables of dict[str, str] for now.
1622
+ else:
1623
+ assert all(
1624
+ isinstance(i, dict)
1625
+ and all(
1626
+ isinstance(k, str) and isinstance(v, str)
1627
+ for k, v in i.items()
1628
+ )
1629
+ for i in value
1630
+ ), f"Unsupported iterable value: {value!r}"
1631
+ value = json.dumps(value)
1632
+
1633
+ else:
1634
+ raise NotImplementedError(f"GitHub formatting for: {value!r}")
1635
+
1636
+ return str(value)
1637
+
1638
+ def dump(self, dialect: Dialect = Dialect.github) -> str:
1639
+ """Returns all metadata in the specified format.
1640
+
1641
+ Defaults to GitHub dialect.
1642
+ """
1643
+ metadata: dict[str, Any] = {
1644
+ "is_bot": self.is_bot,
1645
+ "new_commits": self.new_commits_hash,
1646
+ "release_commits": self.release_commits_hash,
1647
+ "mailmap_exists": self.mailmap_exists,
1648
+ "gitignore_exists": self.gitignore_exists,
1649
+ "python_files": self.python_files,
1650
+ "json_files": self.json_files,
1651
+ "yaml_files": self.yaml_files,
1652
+ "workflow_files": self.workflow_files,
1653
+ "doc_files": self.doc_files,
1654
+ "markdown_files": self.markdown_files,
1655
+ "image_files": self.image_files,
1656
+ "zsh_files": self.zsh_files,
1657
+ "is_python_project": self.is_python_project,
1658
+ "package_name": self.package_name,
1659
+ "blacken_docs_params": self.blacken_docs_params,
1660
+ "mypy_params": self.mypy_params,
1661
+ "current_version": self.current_version,
1662
+ "released_version": self.released_version,
1663
+ "is_sphinx": self.is_sphinx,
1664
+ "active_autodoc": self.active_autodoc,
1665
+ "release_notes": self.release_notes,
1666
+ "new_commits_matrix": self.new_commits_matrix,
1667
+ "release_commits_matrix": self.release_commits_matrix,
1668
+ "build_targets": FLAT_BUILD_TARGETS,
1669
+ "nuitka_matrix": self.nuitka_matrix,
1670
+ }
1671
+
1672
+ logging.debug(f"Raw metadata: {metadata!r}")
1673
+ logging.debug(f"Format metadata into {dialect} format.")
1674
+
1675
+ content = ""
1676
+ if dialect == Dialect.github:
1677
+ for env_name, value in metadata.items():
1678
+ env_value = self.format_github_value(value)
1679
+
1680
+ is_multiline = bool(len(env_value.splitlines()) > 1)
1681
+ if not is_multiline:
1682
+ content += f"{env_name}={env_value}\n"
1683
+ else:
1684
+ # Use a random unique delimiter to encode multiline value:
1685
+ delimiter = f"GHA_DELIMITER_{randint(10**8, (10**9) - 1)}"
1686
+ content += f"{env_name}<<{delimiter}\n{env_value}\n{delimiter}\n"
1687
+ else:
1688
+ assert dialect == Dialect.json
1689
+ content = json.dumps(metadata, cls=JSONMetadata, indent=2)
1690
+
1691
+ logging.debug(f"Formatted metadata:\n{content}")
1692
+
1693
+ return content