gha-utils 4.11.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of gha-utils might be problematic. Click here for more details.

gha_utils/metadata.py ADDED
@@ -0,0 +1,1304 @@
1
+ # Copyright Kevin Deldycke <kevin@deldycke.com> and contributors.
2
+ #
3
+ # This program is Free Software; you can redistribute it and/or
4
+ # modify it under the terms of the GNU General Public License
5
+ # as published by the Free Software Foundation; either version 2
6
+ # of the License, or (at your option) any later version.
7
+ #
8
+ # This program is distributed in the hope that it will be useful,
9
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
10
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
+ # GNU General Public License for more details.
12
+ #
13
+ # You should have received a copy of the GNU General Public License
14
+ # along with this program; if not, write to the Free Software
15
+ # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
16
+
17
+ """Extract metadata from repository and Python projects to be used by GitHub workflows.
18
+
19
+ The following variables are `printed to the environment file
20
+ <https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-commands-for-github-actions#environment-files>`_:
21
+
22
+ ```text
23
+ new_commits=346ce664f055fbd042a25ee0b7e96702e95 6f27db47612aaee06fdf08744b09a9f5f6c2
24
+ release_commits=6f27db47612aaee06fdf08744b09a9f5f6c2
25
+ gitignore_exists=true
26
+ python_files=".github/update_mailmap.py" ".github/metadata.py" "setup.py"
27
+ doc_files="changelog.md" "readme.md" "docs/license.md"
28
+ is_python_project=true
29
+ package_name=click-extra
30
+ blacken_docs_params=--target-version py37 --target-version py38
31
+ ruff_py_version=py37
32
+ mypy_params=--python-version 3.7
33
+ current_version=2.0.1
34
+ released_version=2.0.0
35
+ is_sphinx=true
36
+ active_autodoc=true
37
+ release_notes=[🐍 Available on PyPi](https://pypi.org/project/click-extra/2.21.3).
38
+ new_commits_matrix={'commit': ['346ce664f055fbd042a25ee0b7e96702e95',
39
+ '6f27db47612aaee06fdf08744b09a9f5f6c2'],
40
+ 'include': [{'commit': '346ce664f055fbd042a25ee0b7e96702e95',
41
+ 'short_sha': '346ce66',
42
+ 'current_version': '2.0.1'},
43
+ {'commit': '6f27db47612aaee06fdf08744b09a9f5f6c2',
44
+ 'short_sha': '6f27db4',
45
+ 'current_version': '2.0.0'}]}
46
+ release_commits_matrix={'commit': ['6f27db47612aaee06fdf08744b09a9f5f6c2'],
47
+ 'include': [{'commit': '6f27db47612aaee06fdf08744b09a9f5f6c2',
48
+ 'short_sha': '6f27db4',
49
+ 'current_version': '2.0.0'}]}
50
+ nuitka_matrix={'entry_point': ['mpm'],
51
+ 'commit': ['346ce664f055fbd042a25ee0b7e96702e95',
52
+ '6f27db47612aaee06fdf08744b09a9f5f6c2'],
53
+ 'os': ['ubuntu-24.04', 'ubuntu-24.04-arm', 'macos-15', 'macos-13', 'windows-2022'],
54
+ 'include': [{'entry_point': 'mpm',
55
+ 'cli_id': 'mpm',
56
+ 'module_id': 'meta_package_manager.__main__',
57
+ 'callable_id': 'main',
58
+ 'module_path': 'meta_package_manager/__main__.py'},
59
+ {'commit': '346ce664f055fbd042a25ee0b7e96702e95',
60
+ 'short_sha': '346ce66',
61
+ 'current_version': '2.0.0'},
62
+ {'commit': '6f27db47612aaee06fdf08744b09a9f5f6c2',
63
+ 'short_sha': '6f27db4',
64
+ 'current_version': '1.9.1'},
65
+ {'os': 'ubuntu-24.04',
66
+ 'platform_id': 'linux',
67
+ 'arch': 'x64',
68
+ 'extension': 'bin'},
69
+ {'os': 'ubuntu-24.04-arm',
70
+ 'platform_id': 'linux',
71
+ 'arch': 'arm64',
72
+ 'extension': 'bin'},
73
+ {'os': 'macos-15',
74
+ 'platform_id': 'macos',
75
+ 'arch': 'arm64',
76
+ 'extension': 'bin'},
77
+ {'os': 'macos-13',
78
+ 'platform_id': 'macos',
79
+ 'arch': 'x64',
80
+ 'extension': 'bin'},
81
+ {'os': 'windows-2022',
82
+ 'platform_id': 'windows',
83
+ 'arch': 'x64',
84
+ 'extension': 'exe'},
85
+ {'entry_point': 'mpm',
86
+ 'commit': '346ce664f055fbd042a25ee0b7e96702e95',
87
+ 'os': 'ubuntu-24.04',
88
+ 'arch': 'x64',
89
+ 'bin_name': 'mpm-linux-x64-build-346ce66.bin'},
90
+ {'entry_point': 'mpm',
91
+ 'commit': '6f27db47612aaee06fdf08744b09a9f5f6c2',
92
+ 'os': 'ubuntu-24.04',
93
+ 'arch': 'x64',
94
+ 'bin_name': 'mpm-linux-x64-build-6f27db4.bin'},
95
+ {'entry_point': 'mpm',
96
+ 'commit': '346ce664f055fbd042a25ee0b7e96702e95',
97
+ 'os': 'ubuntu-24.04-arm',
98
+ 'arch': 'arm64',
99
+ 'bin_name': 'mpm-linux-arm64-build-346ce66.bin'},
100
+ {'entry_point': 'mpm',
101
+ 'commit': '6f27db47612aaee06fdf08744b09a9f5f6c2',
102
+ 'os': 'ubuntu-24.04-arm',
103
+ 'arch': 'arm64',
104
+ 'bin_name': 'mpm-linux-arm64-build-6f27db4.bin'},
105
+ {'entry_point': 'mpm',
106
+ 'commit': '346ce664f055fbd042a25ee0b7e96702e95',
107
+ 'os': 'macos-15',
108
+ 'arch': 'arm64',
109
+ 'bin_name': 'mpm-macos-arm64-build-346ce66.bin'},
110
+ {'entry_point': 'mpm',
111
+ 'commit': '6f27db47612aaee06fdf08744b09a9f5f6c2',
112
+ 'os': 'macos-15',
113
+ 'arch': 'arm64',
114
+ 'bin_name': 'mpm-macos-arm64-build-6f27db4.bin'},
115
+ {'entry_point': 'mpm',
116
+ 'commit': '346ce664f055fbd042a25ee0b7e96702e95',
117
+ 'os': 'macos-13',
118
+ 'arch': 'x64',
119
+ 'bin_name': 'mpm-macos-x64-build-346ce66.bin'},
120
+ {'entry_point': 'mpm',
121
+ 'commit': '6f27db47612aaee06fdf08744b09a9f5f6c2',
122
+ 'os': 'macos-13',
123
+ 'arch': 'x64',
124
+ 'bin_name': 'mpm-macos-x64-build-6f27db4.bin'},
125
+ {'entry_point': 'mpm',
126
+ 'commit': '346ce664f055fbd042a25ee0b7e96702e95',
127
+ 'os': 'windows-2022',
128
+ 'arch': 'x64',
129
+ 'bin_name': 'mpm-windows-x64-build-346ce66.exe'},
130
+ {'entry_point': 'mpm',
131
+ 'commit': '6f27db47612aaee06fdf08744b09a9f5f6c2',
132
+ 'os': 'windows-2022',
133
+ 'arch': 'x64',
134
+ 'bin_name': 'mpm-windows-x64-build-6f27db4.exe'}]}
135
+ ```
136
+
137
+ .. warning::
138
+
139
+ The ``new_commits_matrix``, ``release_commits_matrix`` and ``nuitka_matrix``
140
+ variables in the block above are pretty-printed for readability. They are not
141
+ actually formatted this way in the environment file, but inlined.
142
+ """
143
+
144
+ from __future__ import annotations
145
+
146
+ import ast
147
+ import json
148
+ import logging
149
+ import os
150
+ import re
151
+ import sys
152
+ from collections.abc import Iterable
153
+ from functools import cached_property
154
+ from itertools import product
155
+ from pathlib import Path
156
+ from random import randint
157
+ from re import escape
158
+ from typing import Any, Final, Iterator, cast
159
+
160
+ if sys.version_info >= (3, 11):
161
+ from enum import StrEnum
162
+
163
+ import tomllib
164
+ else:
165
+ import tomli as tomllib # type: ignore[import-not-found]
166
+ from backports.strenum import StrEnum # type: ignore[import-not-found]
167
+
168
+ from bumpversion.config import get_configuration # type: ignore[import-untyped]
169
+ from bumpversion.config.files import find_config_file # type: ignore[import-untyped]
170
+ from bumpversion.show import resolve_name # type: ignore[import-untyped]
171
+ from packaging.specifiers import SpecifierSet
172
+ from packaging.version import Version
173
+ from pydriller import Commit, Git, Repository # type: ignore[import-untyped]
174
+ from pyproject_metadata import ConfigurationError, StandardMetadata
175
+ from wcmatch.glob import (
176
+ BRACE,
177
+ DOTGLOB,
178
+ FOLLOW,
179
+ GLOBSTAR,
180
+ GLOBTILDE,
181
+ NEGATE,
182
+ NODIR,
183
+ iglob,
184
+ )
185
+
186
+ SHORT_SHA_LENGTH = 7
187
+ """Default SHA length hard-coded to ``7``.
188
+
189
+ .. caution::
190
+
191
+ The `default is subject to change <https://stackoverflow.com/a/21015031>`_ and
192
+ depends on the size of the repository.
193
+ """
194
+
195
+ RESERVED_MATRIX_KEYWORDS = ["include", "exclude"]
196
+
197
+
198
+ WorkflowEvent = StrEnum(
199
+ "WorkflowEvent",
200
+ (
201
+ "branch_protection_rule",
202
+ "check_run",
203
+ "check_suite",
204
+ "create",
205
+ "delete",
206
+ "deployment",
207
+ "deployment_status",
208
+ "discussion",
209
+ "discussion_comment",
210
+ "fork",
211
+ "gollum",
212
+ "issue_comment",
213
+ "issues",
214
+ "label",
215
+ "merge_group",
216
+ "milestone",
217
+ "page_build",
218
+ "project",
219
+ "project_card",
220
+ "project_column",
221
+ "public",
222
+ "pull_request",
223
+ "pull_request_comment",
224
+ "pull_request_review",
225
+ "pull_request_review_comment",
226
+ "pull_request_target",
227
+ "push",
228
+ "registry_package",
229
+ "release",
230
+ "repository_dispatch",
231
+ "schedule",
232
+ "status",
233
+ "watch",
234
+ "workflow_call",
235
+ "workflow_dispatch",
236
+ "workflow_run",
237
+ ),
238
+ )
239
+ """Workflow events that cause a workflow to run.
240
+
241
+ `List of events
242
+ <https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows>`_.
243
+ """
244
+
245
+
246
+ Dialects = StrEnum("Dialects", ("github", "plain"))
247
+ """Dialects in which metadata can be formatted to."""
248
+
249
+
250
+ class TargetVersion(StrEnum):
251
+ """List of Python 3 minor versions supported by Black.
252
+
253
+ `Mirrors official implementation from black.mode.TargetVersion
254
+ <https://github.com/psf/black/blob/main/src/black/mode.py>`_.
255
+ """
256
+
257
+ PY33 = "3.3"
258
+ PY34 = "3.4"
259
+ PY35 = "3.5"
260
+ PY36 = "3.6"
261
+ PY37 = "3.7"
262
+ PY38 = "3.8"
263
+ PY39 = "3.9"
264
+ PY310 = "3.10"
265
+ PY311 = "3.11"
266
+ PY312 = "3.12"
267
+ PY313 = "3.13"
268
+
269
+
270
+ MYPY_VERSION_MIN: Final = (3, 8)
271
+ """Earliest version supported by Mypy's ``--python-version 3.x`` parameter.
272
+
273
+ `Sourced from Mypy original implementation
274
+ <https://github.com/python/mypy/blob/master/mypy/defaults.py>`_.
275
+ """
276
+
277
+
278
+ class Matrix(dict):
279
+ """A matrix to used in a GitHub workflow."""
280
+
281
+ def __str__(self) -> str:
282
+ """Render matrix as a JSON string."""
283
+ return json.dumps(self)
284
+
285
+
286
+ class Metadata:
287
+ """Metadata class."""
288
+
289
+ def __init__(self) -> None:
290
+ """Initialize internal variables."""
291
+ # None indicates the is_python_project variable has not been evaluated yet.
292
+ self._is_python_project: bool | None = None
293
+
294
+ pyproject_path = Path() / "pyproject.toml"
295
+ sphinx_conf_path = Path() / "docs" / "conf.py"
296
+
297
+ @cached_property
298
+ def in_ci_env(self) -> bool:
299
+ """Returns ``True`` if the code is executed in a GitHub Actions runner.
300
+
301
+ Other CI are available at:
302
+ https://github.com/cucumber/ci-environment/blob/main/python/src/ci_environment/CiEnvironments.json
303
+ """
304
+ return bool("GITHUB_RUN_ID" in os.environ)
305
+
306
+ @cached_property
307
+ def github_context(self) -> dict[str, Any]:
308
+ """Load GitHub context from the environment.
309
+
310
+ Expect ``GITHUB_CONTEXT`` to be set as part of the environment. I.e., adds the
311
+ following as part of your job step calling this script:
312
+
313
+ .. code-block:: yaml
314
+
315
+ - name: Project metadata
316
+ id: project-metadata
317
+ env:
318
+ GITHUB_CONTEXT: ${{ toJSON(github) }}
319
+ run: |
320
+ gha-utils --verbosity DEBUG metadata --overwrite "$GITHUB_OUTPUT"
321
+
322
+ .. todo::
323
+ Try to remove reliance on GitHub context entirely so we can eliminate the
324
+ JSON/env hack above.
325
+ """
326
+ if "GITHUB_CONTEXT" not in os.environ:
327
+ if self.in_ci_env:
328
+ message = (
329
+ "Missing GitHub context in environment. "
330
+ "Did you forget to set GITHUB_CONTEXT?"
331
+ )
332
+ logging.warning(message)
333
+ return {}
334
+ context = json.loads(os.environ["GITHUB_CONTEXT"])
335
+ logging.debug("--- GitHub context ---")
336
+ logging.debug(json.dumps(context, indent=4))
337
+ return context # type:ignore[no-any-return]
338
+
339
+ def git_stash_count(self, git_repo: Git) -> int:
340
+ """Returns the number of stashes."""
341
+ count = int(
342
+ git_repo.repo.git.rev_list(
343
+ "--walk-reflogs", "--ignore-missing", "--count", "refs/stash"
344
+ )
345
+ )
346
+ logging.debug(f"Number of stashes in repository: {count}")
347
+ return count
348
+
349
+ def commit_matrix(self, commits: Iterable[Commit] | None) -> Matrix | None:
350
+ """Pre-compute a matrix of commits.
351
+
352
+ .. danger::
353
+ This method temporarily modify the state of the repository to compute
354
+ version metadata from the past.
355
+
356
+ To prevent any loss of uncommitted data, it stashes and unstash the
357
+ local changes between checkouts.
358
+
359
+ The list of commits is augmented with long and short SHA values, as well as
360
+ current version. Most recent commit is first, oldest is last.
361
+
362
+ Returns a ready-to-use matrix structure:
363
+
364
+ .. code-block:: python
365
+ {
366
+ "commit": [
367
+ "346ce664f055fbd042a25ee0b7e96702e95",
368
+ "6f27db47612aaee06fdf08744b09a9f5f6c2",
369
+ ],
370
+ "include": [
371
+ {
372
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
373
+ "short_sha": "346ce66",
374
+ "current_version": "2.0.1",
375
+ },
376
+ {
377
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
378
+ "short_sha": "6f27db4",
379
+ "current_version": "2.0.0",
380
+ },
381
+ ],
382
+ }
383
+ """
384
+ if not commits:
385
+ return None
386
+
387
+ git = Git(".")
388
+ current_commit = git.repo.head.commit.hexsha
389
+
390
+ # Check if we need to get back in time in the Git log and browse past commits.
391
+ if len(commits) == 1: # type: ignore[arg-type]
392
+ # Is the current commit the one we're looking for?
393
+ past_commit_lookup = bool(
394
+ current_commit != commits[0].hash # type: ignore[index]
395
+ )
396
+ # If we have multiple commits then yes, we need to look for past commits.
397
+ else:
398
+ past_commit_lookup = True
399
+
400
+ # We need to go back in time, but first save the current state of the
401
+ # repository.
402
+ if past_commit_lookup:
403
+ logging.debug(
404
+ "We need to look into the commit history. Inspect the initial state of the repository."
405
+ )
406
+
407
+ if not self.in_ci_env:
408
+ raise RuntimeError(
409
+ "Local repository manipulations only allowed in CI environment"
410
+ )
411
+
412
+ # Save the initial commit reference and SHA of the repository. The reference is
413
+ # either the canonical active branch name (i.e. ``main``), or the commit SHA if
414
+ # the current HEAD commit is detached from a branch.
415
+ if git.repo.head.is_detached:
416
+ init_ref = current_commit
417
+ else:
418
+ init_ref = git.repo.active_branch.name
419
+ logging.debug(f"Initial commit reference: {init_ref}")
420
+
421
+ # Try to stash local changes and check if we'll need to unstash them later.
422
+ counter_before = self.git_stash_count(git)
423
+ logging.debug("Try to stash local changes before our series of checkouts.")
424
+ git.repo.git.stash()
425
+ counter_after = self.git_stash_count(git)
426
+ logging.debug(
427
+ f"Stash counter changes after 'git stash' command: {counter_before} -> {counter_after}"
428
+ )
429
+ assert counter_after >= counter_before
430
+ need_unstash = bool(counter_after > counter_before)
431
+ logging.debug(f"Need to unstash after checkouts: {need_unstash}")
432
+
433
+ else:
434
+ init_ref = None
435
+ need_unstash = False
436
+ logging.debug(
437
+ f"No need to look into the commit history: repository is already checked out at {current_commit}"
438
+ )
439
+
440
+ sha_list = []
441
+ include_list = []
442
+ for commit in commits:
443
+ if past_commit_lookup:
444
+ logging.debug(f"Checkout to commit {commit.hash}")
445
+ git.checkout(commit.hash)
446
+
447
+ logging.debug(f"Extract project version at commit {commit.hash}")
448
+ current_version = Metadata.get_current_version()
449
+
450
+ sha_list.append(commit.hash)
451
+ include_list.append({
452
+ "commit": commit.hash,
453
+ "short_sha": commit.hash[:SHORT_SHA_LENGTH],
454
+ "current_version": current_version,
455
+ })
456
+
457
+ # Restore the repository to its initial state.
458
+ if past_commit_lookup:
459
+ logging.debug(f"Restore repository to {init_ref}.")
460
+ git.checkout(init_ref)
461
+ if need_unstash:
462
+ logging.debug("Unstash local changes that were previously saved.")
463
+ git.repo.git.stash("pop")
464
+
465
+ return Matrix({
466
+ "commit": sha_list,
467
+ "include": include_list,
468
+ })
469
+
470
+ @cached_property
471
+ def event_type(self) -> WorkflowEvent | None: # type: ignore[valid-type]
472
+ """Returns the type of event that triggered the workflow run.
473
+
474
+ .. caution::
475
+ This property is based on a crude heuristics as it only looks at the value
476
+ of the ``GITHUB_BASE_REF`` environment variable. Which is `only set when
477
+ the event that triggers a workflow run is either pull_request or pull_request_target
478
+ <https://docs.github.com/en/actions/learn-github-actions/variables#default-environment-variables>`_.
479
+
480
+ .. todo::
481
+ Add detection of all workflow trigger events.
482
+ """
483
+ if not self.in_ci_env:
484
+ logging.warning(
485
+ "Cannot guess event type because we're not in a CI environment."
486
+ )
487
+ return None
488
+ if "GITHUB_BASE_REF" not in os.environ:
489
+ logging.warning(
490
+ "Cannot guess event type because no GITHUB_BASE_REF env var found."
491
+ )
492
+ return None
493
+
494
+ if bool(os.environ.get("GITHUB_BASE_REF")):
495
+ return WorkflowEvent.pull_request # type: ignore[no-any-return]
496
+ return WorkflowEvent.push # type: ignore[no-any-return]
497
+
498
+ @cached_property
499
+ def commit_range(self) -> tuple[str, str] | None:
500
+ """Range of commits bundled within the triggering event.
501
+
502
+ A workflow run is triggered by a singular event, which might encapsulate one or
503
+ more commits. This means the workflow will only run once on the last commit,
504
+ even if multiple new commits where pushed.
505
+
506
+ This is annoying when we want to keep a carefully constructed commit history,
507
+ and want to run the workflow on each commit. The typical example is a pull
508
+ request that is merged upstream but we'd like to produce artifacts (builds,
509
+ packages, etc.) for each individual commit.
510
+
511
+ The default ``GITHUB_SHA`` environment variable is not enough as it only points to
512
+ the last commit. We need to inspect the commit history to find all new ones. New
513
+ commits needs to be fetched differently in ``push`` and ``pull_requests``
514
+ events.
515
+
516
+ .. seealso::
517
+
518
+ - https://stackoverflow.com/a/67204539
519
+ - https://stackoverflow.com/a/62953566
520
+ - https://stackoverflow.com/a/61861763
521
+
522
+ .. todo::
523
+ Refactor so we can get rid of ``self.github_context``. Maybe there's enough metadata lying around in
524
+ the environment variables that we can inspect the git history and find the commit range.
525
+ """
526
+ if not self.github_context or not self.event_type:
527
+ return None
528
+ # Pull request event.
529
+ if self.event_type in ( # type: ignore[unreachable]
530
+ WorkflowEvent.pull_request,
531
+ WorkflowEvent.pull_request_target,
532
+ ):
533
+ base_ref = os.environ["GITHUB_BASE_REF"]
534
+ assert base_ref
535
+ start = f"origin/{base_ref}"
536
+ # We need to checkout the HEAD commit instead of the artificial merge
537
+ # commit introduced by the pull request.
538
+ end = self.github_context["event"]["pull_request"]["head"]["sha"]
539
+ # Push event.
540
+ else:
541
+ start = self.github_context["event"]["before"]
542
+ end = os.environ["GITHUB_SHA"]
543
+ assert end
544
+ logging.debug(f"Commit range: {start} -> {end}")
545
+ return start, end
546
+
547
+ @cached_property
548
+ def current_commit(self) -> Commit | None:
549
+ """Returns the current ``Commit`` object."""
550
+ return next(Repository(".", single="HEAD").traverse_commits())
551
+
552
+ @cached_property
553
+ def current_commit_matrix(self) -> Matrix | None:
554
+ """Pre-computed matrix with long and short SHA values of the current commit."""
555
+ return self.commit_matrix((self.current_commit,))
556
+
557
+ @cached_property
558
+ def new_commits(self) -> tuple[Commit, ...] | None:
559
+ """Returns list of ``Commit`` objects bundled within the triggering event."""
560
+ if not self.commit_range:
561
+ return None
562
+ start, end = self.commit_range
563
+ # Remove the last commit, as the commit range is inclusive.
564
+ return tuple(
565
+ Repository(
566
+ ".",
567
+ from_commit=start,
568
+ to_commit=end,
569
+ order="reverse",
570
+ ).traverse_commits(),
571
+ )[:-1]
572
+
573
+ @cached_property
574
+ def new_commits_matrix(self) -> Matrix | None:
575
+ """Pre-computed matrix with long and short SHA values of new commits."""
576
+ return self.commit_matrix(self.new_commits)
577
+
578
+ @cached_property
579
+ def new_commits_hash(self) -> tuple[str, ...] | None:
580
+ """List all hashes of new commits."""
581
+ return (
582
+ cast(tuple[str, ...], self.new_commits_matrix["commit"])
583
+ if self.new_commits_matrix
584
+ else None
585
+ )
586
+
587
+ @cached_property
588
+ def release_commits(self) -> tuple[Commit, ...] | None:
589
+ """Returns list of ``Commit`` objects to be tagged within the triggering event.
590
+
591
+ We cannot identify a release commit based the presence of a ``vX.Y.Z`` tag
592
+ alone. That's because it is not present in the ``prepare-release`` pull request
593
+ produced by the ``changelog.yaml`` workflow. The tag is produced later on by
594
+ the ``release.yaml`` workflow, when the pull request is merged to ``main``.
595
+
596
+ Our best second option is to identify a release based on the full commit
597
+ message, based on the template used in the ``changelog.yaml`` workflow.
598
+ """
599
+ if not self.new_commits:
600
+ return None
601
+ return tuple(
602
+ commit
603
+ for commit in self.new_commits
604
+ if re.fullmatch(
605
+ r"^\[changelog\] Release v[0-9]+\.[0-9]+\.[0-9]+$",
606
+ commit.msg,
607
+ )
608
+ )
609
+
610
+ @cached_property
611
+ def release_commits_matrix(self) -> Matrix | None:
612
+ """Pre-computed matrix with long and short SHA values of release commits."""
613
+ return self.commit_matrix(self.release_commits)
614
+
615
+ @cached_property
616
+ def release_commits_hash(self) -> tuple[str, ...] | None:
617
+ """List all hashes of release commits."""
618
+ return (
619
+ cast(tuple[str, ...], self.release_commits_matrix["commit"])
620
+ if self.release_commits_matrix
621
+ else None
622
+ )
623
+
624
+ @staticmethod
625
+ def glob_files(*patterns: str) -> Iterator[str]:
626
+ """Glob files in patterns, while optionally ignoring some."""
627
+ yield from iglob(
628
+ patterns,
629
+ flags=NODIR | GLOBSTAR | DOTGLOB | GLOBTILDE | BRACE | FOLLOW | NEGATE,
630
+ )
631
+
632
+ @cached_property
633
+ def gitignore_exists(self) -> bool:
634
+ return Path(".gitignore").is_file()
635
+
636
+ @cached_property
637
+ def python_files(self) -> Iterator[str]:
638
+ """Returns a list of python files."""
639
+ yield from self.glob_files("**/*.py", "!.venv/**")
640
+
641
+ @cached_property
642
+ def doc_files(self) -> Iterator[str]:
643
+ """Returns a list of doc files."""
644
+ yield from self.glob_files("**/*.{md,markdown,rst,tex}", "!.venv/**")
645
+
646
+ @property
647
+ def is_python_project(self):
648
+ """Returns ``True`` if repository is a Python project.
649
+
650
+ Presence of a ``pyproject.toml`` file is not enough, as 3rd party tools can use
651
+ that file to store their own configuration.
652
+ """
653
+ return self._is_python_project
654
+
655
+ @is_python_project.getter
656
+ def is_python_project(self):
657
+ """Try to read and validate the ``pyproject.toml`` file on access to the
658
+ ``is_python_project`` property.
659
+ """
660
+ if self._is_python_project is None:
661
+ self.pyproject
662
+ return self._is_python_project
663
+
664
+ @cached_property
665
+ def pyproject(self) -> StandardMetadata | None:
666
+ """Returns metadata stored in the ``pyproject.toml`` file.
667
+
668
+ Also sets the internal ``_is_python_project`` value to ``True`` if the
669
+ ``pyproject.toml`` exists and respects the standards. ``False`` otherwise.
670
+ """
671
+ if self.pyproject_path.exists() and self.pyproject_path.is_file():
672
+ toml = tomllib.loads(self.pyproject_path.read_text(encoding="UTF-8"))
673
+ try:
674
+ metadata = StandardMetadata.from_pyproject(toml)
675
+ self._is_python_project = True
676
+ return metadata
677
+ except ConfigurationError:
678
+ pass
679
+
680
+ self._is_python_project = False
681
+ return None
682
+
683
+ @cached_property
684
+ def package_name(self) -> str | None:
685
+ """Returns package name as published on PyPi."""
686
+ if self.pyproject and self.pyproject.canonical_name:
687
+ return self.pyproject.canonical_name
688
+ return None
689
+
690
+ @cached_property
691
+ def script_entries(self) -> list[tuple[str, str, str]]:
692
+ """Returns a list of tuples containing the script name, its module and callable.
693
+
694
+ Results are derived from the script entries of ``pyproject.toml``. So that:
695
+
696
+ .. code-block:: toml
697
+ [project.scripts]
698
+ mdedup = "mail_deduplicate.cli:mdedup"
699
+ mpm = "meta_package_manager.__main__:main"
700
+
701
+ Will yields the following list:
702
+
703
+ .. code-block:: python
704
+ (
705
+ ("mdedup", "mail_deduplicate.cli", "mdedup"),
706
+ ("mpm", "meta_package_manager.__main__", "main"),
707
+ ...,
708
+ )
709
+ """
710
+ entries = []
711
+ if self.pyproject:
712
+ for cli_id, script in self.pyproject.scripts.items():
713
+ module_id, callable_id = script.split(":")
714
+ entries.append((cli_id, module_id, callable_id))
715
+ # Double check we do not have duplicate entries.
716
+ all_cli_ids = [cli_id for cli_id, _, _ in entries]
717
+ assert len(set(all_cli_ids)) == len(all_cli_ids)
718
+ return entries
719
+
720
+ @cached_property
721
+ def py_target_versions(self) -> tuple[Version, ...] | None:
722
+ """Generates the list of Python target versions.
723
+
724
+ Only takes ``major.minor`` variations into account. Smaller version dimensions
725
+ are ignored, so a package depending on ``3.8.6`` will keep ``3.8`` as a Python
726
+ target.
727
+ """
728
+ if self.pyproject and self.pyproject.requires_python:
729
+ # Dumb down specifiers' lower bounds to their major.minor version.
730
+ spec_list = []
731
+ for spec in self.pyproject.requires_python:
732
+ if spec.operator in (">=", ">"):
733
+ release = Version(spec.version).release
734
+ new_spec = f"{spec.operator}{release[0]}.{release[1]}"
735
+ else:
736
+ new_spec = str(spec)
737
+ spec_list.append(new_spec)
738
+ relaxed_specs = SpecifierSet(",".join(spec_list))
739
+ logging.debug(
740
+ "Relax Python requirements from "
741
+ f"{self.pyproject.requires_python} to {relaxed_specs}."
742
+ )
743
+
744
+ # Iterate through Python version support.
745
+ return tuple(
746
+ Version(target)
747
+ for target in tuple(TargetVersion)
748
+ if relaxed_specs.contains(target)
749
+ )
750
+ return None
751
+
752
+ @cached_property
753
+ def blacken_docs_params(self) -> tuple[str, ...] | None:
754
+ """Generates ``blacken-docs`` parameters.
755
+
756
+ `Blacken-docs reuses Black's --target-version pyXY parameters
757
+ <https://github.com/adamchainz/blacken-docs/blob/cd4e60f/src/blacken_docs/__init__.py#L263-L271>`_,
758
+ and needs to be fed with a subset of these:
759
+ - ``--target-version py33``
760
+ - ``--target-version py34``
761
+ - ``--target-version py35``
762
+ - ``--target-version py36``
763
+ - ``--target-version py37``
764
+ - ``--target-version py38``
765
+ - ``--target-version py39``
766
+ - ``--target-version py310``
767
+ - ``--target-version py311``
768
+ - ``--target-version py312``
769
+ - ``--target-version py313``
770
+
771
+ As mentioned in Black usage, you should `include all Python versions that you
772
+ want your code to run under
773
+ <https://github.com/psf/black/issues/751#issuecomment-473066811>`_.
774
+ """
775
+ if self.py_target_versions:
776
+ return tuple(
777
+ f"--target-version py{version.major}{version.minor}"
778
+ for version in self.py_target_versions
779
+ )
780
+ return None
781
+
782
+ @cached_property
783
+ def ruff_py_version(self) -> str | None:
784
+ """Returns the oldest Python version targeted.
785
+
786
+ .. caution::
787
+
788
+ Unlike ``blacken-docs``, `ruff doesn't support multiple
789
+ --target-version values
790
+ <https://github.com/astral-sh/ruff/issues/2857#issuecomment-1428100515>`_,
791
+ and `only supports the minimum Python version
792
+ <https://github.com/astral-sh/ruff/issues/2519>`_.
793
+ """
794
+ if self.py_target_versions:
795
+ version = self.py_target_versions[0]
796
+ return f"py{version.major}{version.minor}"
797
+ return None
798
+
799
+ @cached_property
800
+ def mypy_params(self) -> str | None:
801
+ """Generates `mypy` parameters.
802
+
803
+ Mypy needs to be fed with this parameter: ``--python-version 3.x``.
804
+ """
805
+ if self.py_target_versions:
806
+ # Compare to Mypy's lowest supported version of Python dialect.
807
+ major, minor = max(
808
+ MYPY_VERSION_MIN,
809
+ min((v.major, v.minor) for v in self.py_target_versions),
810
+ )
811
+ return f"--python-version {major}.{minor}"
812
+ return None
813
+
814
+ @staticmethod
815
+ def get_current_version() -> str | None:
816
+ """Returns the current version as managed by bump-my-version.
817
+
818
+ Same as calling the CLI:
819
+
820
+ .. code-block:: shell-session
821
+ $ bump-my-version show current_version
822
+ """
823
+ conf_file = find_config_file()
824
+ if not conf_file:
825
+ return None
826
+ config = get_configuration(conf_file)
827
+ config_dict = config.model_dump()
828
+ return str(resolve_name(config_dict, "current_version"))
829
+
830
+ @cached_property
831
+ def current_version(self) -> str | None:
832
+ """Returns the current version.
833
+
834
+ I.e. the version of the most recent commit.
835
+ """
836
+ version = None
837
+ if self.new_commits_matrix:
838
+ details = self.new_commits_matrix.get("include")
839
+ if details:
840
+ version = details[0].get("current_version")
841
+ return version
842
+
843
+ @cached_property
844
+ def released_version(self) -> str | None:
845
+ """Returns the version of the release commit."""
846
+ version = None
847
+ if self.release_commits_matrix:
848
+ details = self.release_commits_matrix.get("include")
849
+ if details:
850
+ # This script is only designed for at most 1 release in the list of new
851
+ # commits.
852
+ assert len(details) == 1
853
+ version = details[0].get("current_version")
854
+ return version
855
+
856
+ @cached_property
857
+ def is_sphinx(self) -> bool:
858
+ """Returns ``True`` if the Sphinx config file is present."""
859
+ # The Sphinx config file is present, that's enough for us.
860
+ return self.sphinx_conf_path.exists() and self.sphinx_conf_path.is_file()
861
+
862
+ @cached_property
863
+ def active_autodoc(self) -> bool:
864
+ """Returns ``True`` if there are active Sphinx extensions."""
865
+ if self.is_sphinx:
866
+ # Look for list of active Sphinx extensions.
867
+ for node in ast.parse(self.sphinx_conf_path.read_bytes()).body:
868
+ if isinstance(node, ast.Assign) and isinstance(
869
+ node.value, ast.List | ast.Tuple
870
+ ):
871
+ extension_found = "extensions" in (
872
+ t.id # type: ignore[attr-defined]
873
+ for t in node.targets
874
+ )
875
+ if extension_found:
876
+ elements = (
877
+ e.value
878
+ for e in node.value.elts
879
+ if isinstance(e, ast.Constant)
880
+ )
881
+ if "sphinx.ext.autodoc" in elements:
882
+ return True
883
+ return False
884
+
885
+ @cached_property
886
+ def nuitka_matrix(self) -> Matrix | None:
887
+ """Pre-compute a matrix for Nuitka compilation workflows.
888
+
889
+ Combine the variations of:
890
+ - all new commits
891
+ - all entry points
892
+ - for the 3 main OSes
893
+ - for a set of architectures
894
+
895
+ Returns a ready-to-use matrix structure, where each variation is augmented with
896
+ specific extra parameters by the way of matching parameters in the `include`
897
+ directive.
898
+
899
+ .. code-block:: python
900
+ {
901
+ "entry_point": ["mpm"],
902
+ "commit": [
903
+ "346ce664f055fbd042a25ee0b7e96702e95",
904
+ "6f27db47612aaee06fdf08744b09a9f5f6c2",
905
+ ],
906
+ "os": [
907
+ "ubuntu-24.04",
908
+ "ubuntu-24.04-arm",
909
+ "macos-15",
910
+ "macos-13",
911
+ "windows-2022",
912
+ ],
913
+ "include": [
914
+ {
915
+ "entry_point": "mpm",
916
+ "cli_id": "mpm",
917
+ "module_id": "meta_package_manager.__main__",
918
+ "callable_id": "main",
919
+ "module_path": "meta_package_manager/__main__.py",
920
+ },
921
+ {
922
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
923
+ "short_sha": "346ce66",
924
+ "current_version": "2.0.0",
925
+ },
926
+ {
927
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
928
+ "short_sha": "6f27db4",
929
+ "current_version": "1.9.1",
930
+ },
931
+ {
932
+ "os": "ubuntu-24.04",
933
+ "platform_id": "linux",
934
+ "arch": "x64",
935
+ "extension": "bin",
936
+ },
937
+ {
938
+ "os": "ubuntu-24.04-arm",
939
+ "platform_id": "linux",
940
+ "arch": "arm64",
941
+ "extension": "bin",
942
+ },
943
+ {
944
+ "os": "macos-15",
945
+ "platform_id": "macos",
946
+ "arch": "arm64",
947
+ "extension": "bin",
948
+ },
949
+ {
950
+ "os": "macos-13",
951
+ "platform_id": "macos",
952
+ "arch": "x64",
953
+ "extension": "bin",
954
+ },
955
+ {
956
+ "os": "windows-2022",
957
+ "platform_id": "windows",
958
+ "arch": "x64",
959
+ "extension": "exe",
960
+ },
961
+ {
962
+ "entry_point": "mpm",
963
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
964
+ "os": "ubuntu-24.04",
965
+ "arch": "x64",
966
+ "bin_name": "mpm-linux-x64-build-346ce66.bin",
967
+ },
968
+ {
969
+ "entry_point": "mpm",
970
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
971
+ "os": "ubuntu-24.04",
972
+ "arch": "x64",
973
+ "bin_name": "mpm-linux-x64-build-6f27db4.bin",
974
+ },
975
+ {
976
+ "entry_point": "mpm",
977
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
978
+ "os": "ubuntu-24.04-arm",
979
+ "arch": "arm64",
980
+ "bin_name": "mpm-linux-arm64-build-346ce66.bin",
981
+ },
982
+ {
983
+ "entry_point": "mpm",
984
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
985
+ "os": "ubuntu-24.04-arm",
986
+ "arch": "arm64",
987
+ "bin_name": "mpm-linux-arm64-build-6f27db4.bin",
988
+ },
989
+ {
990
+ "entry_point": "mpm",
991
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
992
+ "os": "macos-15",
993
+ "arch": "arm64",
994
+ "bin_name": "mpm-macos-arm64-build-346ce66.bin",
995
+ },
996
+ {
997
+ "entry_point": "mpm",
998
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
999
+ "os": "macos-15",
1000
+ "arch": "arm64",
1001
+ "bin_name": "mpm-macos-arm64-build-6f27db4.bin",
1002
+ },
1003
+ {
1004
+ "entry_point": "mpm",
1005
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
1006
+ "os": "macos-13",
1007
+ "arch": "x64",
1008
+ "bin_name": "mpm-macos-x64-build-346ce66.bin",
1009
+ },
1010
+ {
1011
+ "entry_point": "mpm",
1012
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
1013
+ "os": "macos-13",
1014
+ "arch": "x64",
1015
+ "bin_name": "mpm-macos-x64-build-6f27db4.bin",
1016
+ },
1017
+ {
1018
+ "entry_point": "mpm",
1019
+ "commit": "346ce664f055fbd042a25ee0b7e96702e95",
1020
+ "os": "windows-2022",
1021
+ "arch": "x64",
1022
+ "bin_name": "mpm-windows-x64-build-346ce66.exe",
1023
+ },
1024
+ {
1025
+ "entry_point": "mpm",
1026
+ "commit": "6f27db47612aaee06fdf08744b09a9f5f6c2",
1027
+ "os": "windows-2022",
1028
+ "arch": "x64",
1029
+ "bin_name": "mpm-windows-x64-build-6f27db4.exe",
1030
+ },
1031
+ ],
1032
+ }
1033
+ """
1034
+ # Only produce a matrix if the project is providing CLI entry points.
1035
+ if not self.script_entries:
1036
+ return None
1037
+
1038
+ # In the future, we might support and bridge that matrix with the full CPython
1039
+ # platform support list. See target triples at:
1040
+ # https://peps.python.org/pep-0011/
1041
+ # https://snarky.ca/webassembly-and-its-platform-targets/
1042
+ matrix: dict[str, list[Any]] = {
1043
+ "entry_point": [],
1044
+ # Run the compilation only on the latest supported version of each OS.
1045
+ # The exception is macOS, as macos-15 is arm64 and macos-13 is x64, so we
1046
+ # need both to target the two architectures.
1047
+ # XXX Windows arm64 Windows is considered:
1048
+ # https://github.com/actions/runner-images/issues/10820
1049
+ "os": [
1050
+ "ubuntu-24.04", # x64
1051
+ "ubuntu-24.04-arm", # arm64
1052
+ "macos-15", # arm64
1053
+ "macos-13", # x64
1054
+ "windows-2022", # x64
1055
+ ],
1056
+ # Extra parameters.
1057
+ "include": [],
1058
+ }
1059
+
1060
+ # Augment each entry point with some metadata.
1061
+ extra_entry_point_params = []
1062
+ for cli_id, module_id, callable_id in self.script_entries:
1063
+ # CLI ID is supposed to be unique, we'll use that as a key.
1064
+ matrix["entry_point"].append(cli_id)
1065
+ # Derive CLI module path from its ID.
1066
+ # XXX We consider here the module is directly callable, because Nuitka
1067
+ # doesn't seems to support the entry-point notation.
1068
+ module_path = Path(f"{module_id.replace('.', '/')}.py")
1069
+ assert module_path.exists()
1070
+ extra_entry_point_params.append(
1071
+ {
1072
+ "entry_point": cli_id,
1073
+ "cli_id": cli_id,
1074
+ "module_id": module_id,
1075
+ "callable_id": callable_id,
1076
+ "module_path": str(module_path),
1077
+ },
1078
+ )
1079
+ matrix["include"].extend(extra_entry_point_params)
1080
+
1081
+ # We'd like to run a build for each new commit bundled in the action trigger.
1082
+ # If no new commits are detected, it's because we are not in a GitHub workflow
1083
+ # event, so we'll fallback to the current commit and only build for it.
1084
+ build_commit_matrix = (
1085
+ self.new_commits_matrix
1086
+ if self.new_commits_matrix
1087
+ else self.current_commit_matrix
1088
+ )
1089
+ assert build_commit_matrix
1090
+ # Extend the matrix with a new dimension: a list of commits.
1091
+ matrix["commit"] = build_commit_matrix["commit"]
1092
+ matrix["include"].extend(build_commit_matrix["include"])
1093
+
1094
+ # Add platform-specific variables.
1095
+ # Arch values are inspired from those specified for self-hosted runners:
1096
+ # https://docs.github.com/en/actions/hosting-your-own-runners/about-self-hosted-runners#architectures
1097
+ # Arch is not a matrix variant because support is not widely distributed
1098
+ # between different OS.
1099
+ extra_os_params = [
1100
+ {
1101
+ "os": "ubuntu-24.04",
1102
+ "platform_id": "linux",
1103
+ "arch": "x64",
1104
+ "extension": "bin",
1105
+ },
1106
+ {
1107
+ "os": "ubuntu-24.04-arm",
1108
+ "platform_id": "linux",
1109
+ "arch": "arm64",
1110
+ "extension": "bin",
1111
+ },
1112
+ {
1113
+ "os": "macos-15",
1114
+ "platform_id": "macos",
1115
+ "arch": "arm64",
1116
+ "extension": "bin",
1117
+ },
1118
+ {
1119
+ "os": "macos-13",
1120
+ "platform_id": "macos",
1121
+ "arch": "x64",
1122
+ "extension": "bin",
1123
+ },
1124
+ {
1125
+ "os": "windows-2022",
1126
+ "platform_id": "windows",
1127
+ "arch": "x64",
1128
+ "extension": "exe",
1129
+ },
1130
+ ]
1131
+ matrix["include"].extend(extra_os_params)
1132
+
1133
+ # Check no extra parameter in reserved directive do not override themselves.
1134
+ all_extra_keys = set().union(
1135
+ *(
1136
+ extras.keys()
1137
+ for reserved_key in RESERVED_MATRIX_KEYWORDS
1138
+ if reserved_key in matrix
1139
+ for extras in matrix[reserved_key]
1140
+ ),
1141
+ )
1142
+ assert all_extra_keys.isdisjoint(RESERVED_MATRIX_KEYWORDS)
1143
+
1144
+ # Produce all variations encoded by the matrix, by skipping the special
1145
+ # directives.
1146
+ all_variations = tuple(
1147
+ tuple((variant_id, value) for value in variant_values)
1148
+ for variant_id, variant_values in matrix.items()
1149
+ if variant_id not in RESERVED_MATRIX_KEYWORDS
1150
+ )
1151
+
1152
+ # Emulate collection and aggregation of the 'include' directive to all
1153
+ # variations produced by the matrix.
1154
+ for variant in product(*all_variations):
1155
+ variant_dict = dict(variant)
1156
+
1157
+ # Check each extra parameters from the 'include' directive and accumulate
1158
+ # the matching ones to the variant.
1159
+ full_variant = variant_dict.copy()
1160
+ for extra_params in matrix["include"]:
1161
+ # Check if the variant match the extra parameters.
1162
+ dimensions_to_match = set(variant_dict).intersection(extra_params)
1163
+ d0 = {key: variant_dict[key] for key in dimensions_to_match}
1164
+ d1 = {key: extra_params[key] for key in dimensions_to_match}
1165
+ # Extra parameters are matching the current variant, merge their values.
1166
+ if d0 == d1:
1167
+ full_variant.update(extra_params)
1168
+
1169
+ # Add to the 'include' directive a new extra parameter that match the
1170
+ # current variant.
1171
+ extra_name_param = variant_dict.copy()
1172
+ # Generate for Nuitka the binary file name to be used that is unique to
1173
+ # this variant.
1174
+ extra_name_param["bin_name"] = (
1175
+ "{cli_id}-{platform_id}-{arch}-build-{short_sha}.{extension}"
1176
+ ).format(**full_variant)
1177
+ matrix["include"].append(extra_name_param)
1178
+
1179
+ return Matrix(matrix)
1180
+
1181
+ @cached_property
1182
+ def release_notes(self) -> str | None:
1183
+ """Generate notes to be attached to the GitHub release."""
1184
+ # Produce the release notes of the release version or the current one.
1185
+ version = self.released_version
1186
+ if not version:
1187
+ version = self.current_version
1188
+ if not version:
1189
+ return None
1190
+
1191
+ # Extract the changelog entry corresponding to the release version, and located
1192
+ # between the first two `##` second-level markdown titles.
1193
+ changes = ""
1194
+ match = re.search(
1195
+ rf"^##(?P<title>.+{escape(version)} .+?)\n(?P<changes>.*?)\n##",
1196
+ Path("./changelog.md").read_text(encoding="UTF-8"),
1197
+ flags=re.MULTILINE | re.DOTALL,
1198
+ )
1199
+ if match:
1200
+ changes = match.groupdict().get("changes", "").strip()
1201
+ # Add a title.
1202
+ if changes:
1203
+ changes = "### Changes\n\n" + changes
1204
+
1205
+ # Generate a link to the version of the package published on PyPi.
1206
+ pypi_link = ""
1207
+ if self.package_name:
1208
+ pypi_link = (
1209
+ "[🐍 Available on PyPi](https://pypi.org/project/"
1210
+ + self.package_name
1211
+ + "/"
1212
+ + version
1213
+ + ")."
1214
+ )
1215
+
1216
+ # Assemble the release notes.
1217
+ return f"{changes}\n\n{pypi_link}".strip()
1218
+
1219
+ @staticmethod
1220
+ def format_github_value(value: Any) -> str:
1221
+ """Transform Python value to GitHub-friendly, JSON-like, console string.
1222
+
1223
+ Renders:
1224
+ - `str` as-is
1225
+ - `None` into empty string
1226
+ - `bool` into lower-cased string
1227
+ - `Matrix` into JSON string
1228
+ - `Iterable` of strings into a serialized space-separated string
1229
+ - `Iterable` of `Path` into a serialized string whose items are space-separated
1230
+ and double-quoted
1231
+ """
1232
+ # Structured metadata to be rendered as JSON.
1233
+ if isinstance(value, Matrix):
1234
+ return str(value)
1235
+
1236
+ # Convert non-strings.
1237
+ if not isinstance(value, str):
1238
+ if value is None:
1239
+ value = ""
1240
+
1241
+ elif isinstance(value, bool):
1242
+ value = str(value).lower()
1243
+
1244
+ elif isinstance(value, dict):
1245
+ raise NotImplementedError
1246
+
1247
+ elif isinstance(value, Iterable):
1248
+ # Cast all items to string, wrapping Path items with double-quotes.
1249
+ items = ((f'"{i}"' if isinstance(i, Path) else str(i)) for i in value)
1250
+ value = " ".join(items)
1251
+
1252
+ return cast(str, value)
1253
+
1254
+ def dump(
1255
+ self,
1256
+ dialect: Dialects = Dialects.github, # type: ignore[valid-type]
1257
+ ) -> str:
1258
+ """Returns all metadata in the specified format.
1259
+
1260
+ Defaults to GitHub dialect.
1261
+ """
1262
+ metadata: dict[str, Any] = {
1263
+ "new_commits": self.new_commits_hash,
1264
+ "release_commits": self.release_commits_hash,
1265
+ "gitignore_exists": self.gitignore_exists,
1266
+ "python_files": self.python_files,
1267
+ "doc_files": self.doc_files,
1268
+ "is_python_project": self.is_python_project,
1269
+ "package_name": self.package_name,
1270
+ "blacken_docs_params": self.blacken_docs_params,
1271
+ "ruff_py_version": self.ruff_py_version,
1272
+ "mypy_params": self.mypy_params,
1273
+ "current_version": self.current_version,
1274
+ "released_version": self.released_version,
1275
+ "is_sphinx": self.is_sphinx,
1276
+ "active_autodoc": self.active_autodoc,
1277
+ "release_notes": self.release_notes,
1278
+ "new_commits_matrix": self.new_commits_matrix,
1279
+ "release_commits_matrix": self.release_commits_matrix,
1280
+ "nuitka_matrix": self.nuitka_matrix,
1281
+ }
1282
+
1283
+ logging.debug(f"Raw metadata: {metadata!r}")
1284
+ logging.debug(f"Format metadata into {dialect} format.")
1285
+
1286
+ content = ""
1287
+ if dialect == Dialects.github:
1288
+ for env_name, value in metadata.items():
1289
+ env_value = self.format_github_value(value)
1290
+
1291
+ is_multiline = bool(len(env_value.splitlines()) > 1)
1292
+ if not is_multiline:
1293
+ content += f"{env_name}={env_value}\n"
1294
+ else:
1295
+ # Use a random unique delimiter to encode multiline value:
1296
+ delimiter = f"ghadelimiter_{randint(10**8, (10**9) - 1)}"
1297
+ content += f"{env_name}<<{delimiter}\n{env_value}\n{delimiter}\n"
1298
+ else:
1299
+ assert dialect == Dialects.PLAIN
1300
+ content = repr(metadata)
1301
+
1302
+ logging.debug(f"Formatted metadata:\n{content}")
1303
+
1304
+ return content