gha-utils 4.0.1__py3-none-any.whl → 4.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of gha-utils might be problematic. Click here for more details.

gha_utils/__init__.py CHANGED
@@ -17,4 +17,4 @@
17
17
 
18
18
  from __future__ import annotations
19
19
 
20
- __version__ = "4.0.1"
20
+ __version__ = "4.1.1"
gha_utils/changelog.py CHANGED
@@ -38,9 +38,11 @@ class Changelog:
38
38
  self.content = initial_changelog
39
39
  logging.debug(f"Initial content set to:\n{self.content}")
40
40
 
41
- def update(self) -> str:
41
+ def update(self) -> str | None:
42
42
  r"""Adds a new empty entry at the top of the changelog.
43
43
 
44
+ Returns ``None`` if initial changelog content has already been updated.
45
+
44
46
  This is designed to be used just after a new release has been tagged. And before a
45
47
  post-release version increment is applied with a call to:
46
48
 
@@ -145,7 +147,9 @@ class Changelog:
145
147
 
146
148
  logging.info("New generated section:\n" + indent(new_entry, " " * 2))
147
149
 
148
- assert new_entry not in history
150
+ # No need to update.
151
+ if new_entry in history:
152
+ return None
149
153
 
150
154
  # Recompose full changelog with new top entry.
151
155
  return f"{changelog_header}{new_entry}{history}"
gha_utils/cli.py CHANGED
@@ -25,6 +25,7 @@ from pathlib import Path
25
25
 
26
26
  from click_extra import (
27
27
  Choice,
28
+ Context,
28
29
  argument,
29
30
  extra_group,
30
31
  file_path,
@@ -55,6 +56,15 @@ def file_writer(filepath):
55
56
  writer.close()
56
57
 
57
58
 
59
+ def get_header(ctx: Context):
60
+ """Generates metadata to be leaved as comments to the top of a file generated by this CLI."""
61
+ return (
62
+ f"# Generated by {ctx.command_path} v{__version__}"
63
+ " - https://github.com/kdeldycke/workflows\n"
64
+ f"# Timestamp: {datetime.now().isoformat()}.\n\n"
65
+ )
66
+
67
+
58
68
  @extra_group
59
69
  def gha_utils():
60
70
  pass
@@ -114,20 +124,16 @@ def metadata(ctx, format, overwrite, output_path):
114
124
  env_file = os.getenv("GITHUB_OUTPUT")
115
125
  if env_file and Path(env_file) != output_path:
116
126
  logging.warning(
117
- "Output path is not the same as $GITHUB_OUTPUT environment variable, which is generally what we're looking to do in GitHub CI runners for other jobs to consume the produced metadata."
127
+ "Output path is not the same as $GITHUB_OUTPUT environment variable,"
128
+ " which is generally what we're looking to do in GitHub CI runners for"
129
+ " other jobs to consume the produced metadata."
118
130
  )
119
131
 
120
132
  dialect = Dialects(format)
121
133
  content = metadata.dump(dialect=dialect)
122
134
 
123
135
  with file_writer(output_path) as f:
124
- f.write(
125
- # Leave some metadata as comment.
126
- f"# Generated by {ctx.command_path} v{__version__}"
127
- " - https://github.com/kdeldycke/workflows.\n"
128
- f"# Timestamp: {datetime.now().isoformat()}.\n"
129
- f"{content}"
130
- )
136
+ f.write(content)
131
137
 
132
138
 
133
139
  @gha_utils.command(short_help="Maintain a Markdown-formatted changelog")
@@ -147,10 +153,13 @@ def changelog(ctx, source, changelog_path):
147
153
  initial_content = None
148
154
  if source:
149
155
  logging.info(f"Read initial changelog from {source}")
150
- initial_content = source.read_text()
156
+ initial_content = source.read_text(encoding="utf-8")
151
157
 
152
158
  changelog = Changelog(initial_content)
153
159
  content = changelog.update()
160
+ if not content:
161
+ logging.warning("Changelog already up to date. Do nothing.")
162
+ ctx.exit()
154
163
 
155
164
  if is_stdout(changelog_path):
156
165
  logging.info(f"Print updated results to {sys.stdout.name}")
@@ -189,7 +198,7 @@ def mailmap(ctx, source, updated_mailmap):
189
198
  initial_content = None
190
199
  if source:
191
200
  logging.info(f"Read initial mapping from {source}")
192
- initial_content = source.read_text()
201
+ initial_content = source.read_text(encoding="utf-8")
193
202
 
194
203
  mailmap = Mailmap(initial_content)
195
204
  content = mailmap.updated_map()
@@ -200,10 +209,4 @@ def mailmap(ctx, source, updated_mailmap):
200
209
  logging.info(f"Save updated results to {updated_mailmap}")
201
210
 
202
211
  with file_writer(updated_mailmap) as f:
203
- f.write(
204
- # Leave some metadata as comment.
205
- f"# Generated by {ctx.command_path} v{__version__}"
206
- " - https://github.com/kdeldycke/workflows.\n"
207
- f"# Timestamp: {datetime.now().isoformat()}.\n\n"
208
- f"{content}"
209
- )
212
+ f.write(f"{get_header(ctx)}{content}")
gha_utils/mailmap.py CHANGED
@@ -68,7 +68,7 @@ class Mailmap:
68
68
 
69
69
  logging.debug(
70
70
  "Authors and committers found in Git history:\n"
71
- f"{'\n'.join(sorted(contributors, key=str.casefold))}"
71
+ + "\n".join(sorted(contributors, key=str.casefold))
72
72
  )
73
73
  return contributors
74
74
 
@@ -94,6 +94,7 @@ class Mailmap:
94
94
 
95
95
  # Render content in .mailmap format.
96
96
  return (
97
- f"{'\n'.join(header_comments)}\n\n"
98
- f"{'\n'.join(sorted(mappings, key=str.casefold))}\n"
97
+ "\n".join(header_comments)
98
+ + "\n\n"
99
+ + "\n".join(sorted(mappings, key=str.casefold))
99
100
  )
gha_utils/metadata.py CHANGED
@@ -105,19 +105,20 @@ from itertools import product
105
105
  from pathlib import Path
106
106
  from random import randint
107
107
  from re import escape
108
- from typing import Any, Iterator, cast
108
+ from typing import Any, Final, Iterator, cast
109
109
 
110
110
  if sys.version_info >= (3, 11):
111
+ from enum import StrEnum
112
+
111
113
  import tomllib
112
114
  else:
113
115
  import tomli as tomllib # type: ignore[import-not-found]
114
- from enum import Enum
116
+ from backports.strenum import StrEnum # type: ignore[import-not-found]
115
117
 
116
- from black.mode import TargetVersion
117
118
  from bumpversion.config import get_configuration # type: ignore[import-untyped]
118
119
  from bumpversion.config.files import find_config_file # type: ignore[import-untyped]
119
120
  from bumpversion.show import resolve_name # type: ignore[import-untyped]
120
- from mypy.defaults import PYTHON3_VERSION_MIN
121
+ from packaging.specifiers import SpecifierSet
121
122
  from packaging.version import Version
122
123
  from pydriller import Commit, Git, Repository # type: ignore[import]
123
124
  from pyproject_metadata import ConfigurationError, StandardMetadata
@@ -144,11 +145,84 @@ SHORT_SHA_LENGTH = 7
144
145
  RESERVED_MATRIX_KEYWORDS = ["include", "exclude"]
145
146
 
146
147
 
147
- class Dialects(Enum):
148
- """Dialects in which metadata can be formatted to."""
148
+ WorkflowEvent = StrEnum(
149
+ "WorkflowEvent",
150
+ (
151
+ "branch_protection_rule",
152
+ "check_run",
153
+ "check_suite",
154
+ "create",
155
+ "delete",
156
+ "deployment",
157
+ "deployment_status",
158
+ "discussion",
159
+ "discussion_comment",
160
+ "fork",
161
+ "gollum",
162
+ "issue_comment",
163
+ "issues",
164
+ "label",
165
+ "merge_group",
166
+ "milestone",
167
+ "page_build",
168
+ "project",
169
+ "project_card",
170
+ "project_column",
171
+ "public",
172
+ "pull_request",
173
+ "pull_request_comment",
174
+ "pull_request_review",
175
+ "pull_request_review_comment",
176
+ "pull_request_target",
177
+ "push",
178
+ "registry_package",
179
+ "release",
180
+ "repository_dispatch",
181
+ "schedule",
182
+ "status",
183
+ "watch",
184
+ "workflow_call",
185
+ "workflow_dispatch",
186
+ "workflow_run",
187
+ ),
188
+ )
189
+ """Workflow events that cause a workflow to run.
190
+
191
+ `List of events
192
+ <https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows>`_.
193
+ """
194
+
195
+
196
+ Dialects = StrEnum("Dialects", ("github", "plain"))
197
+ """Dialects in which metadata can be formatted to."""
198
+
149
199
 
150
- GITHUB = "github"
151
- PLAIN = "plain"
200
+ class TargetVersion(StrEnum):
201
+ """List of Python 3 minor versions supported by Black.
202
+
203
+ `Mirrors official implementation from black.mode.TargetVersion
204
+ <https://github.com/psf/black/blob/main/src/black/mode.py>`_.
205
+ """
206
+
207
+ PY33 = "3.3"
208
+ PY34 = "3.4"
209
+ PY35 = "3.5"
210
+ PY36 = "3.6"
211
+ PY37 = "3.7"
212
+ PY38 = "3.8"
213
+ PY39 = "3.9"
214
+ PY310 = "3.10"
215
+ PY311 = "3.11"
216
+ PY312 = "3.12"
217
+ PY313 = "3.13"
218
+
219
+
220
+ MYPY_VERSION_MIN: Final = (3, 8)
221
+ """Earliest version supported by Mypy's ``--python-version 3.x`` parameter.
222
+
223
+ `Sourced from Mypy original implementation
224
+ <https://github.com/python/mypy/blob/master/mypy/defaults.py>`_.
225
+ """
152
226
 
153
227
 
154
228
  class Matrix(dict):
@@ -192,9 +266,11 @@ class Metadata:
192
266
  id: project-metadata
193
267
  env:
194
268
  GITHUB_CONTEXT: ${{ toJSON(github) }}
195
- run: >
196
- python -c "$(curl -fsSL
197
- https://raw.githubusercontent.com/kdeldycke/workflows/main/.github/metadata.py)"
269
+ run: |
270
+ uv run gha-utils --verbosity DEBUG metadata --overwrite "$GITHUB_OUTPUT"
271
+
272
+ .. todo::
273
+ Try to remove reliance on GitHub context entirely so we can eliminate the JSON/env hack above.
198
274
  """
199
275
  if "GITHUB_CONTEXT" not in os.environ:
200
276
  if self.in_ci_env:
@@ -209,6 +285,16 @@ class Metadata:
209
285
  logging.debug(json.dumps(context, indent=4))
210
286
  return context
211
287
 
288
+ def git_stash_count(self, git_repo: Git) -> int:
289
+ """Returns the number of stashes."""
290
+ count = int(
291
+ git_repo.repo.git.rev_list(
292
+ "--walk-reflogs", "--ignore-missing", "--count", "refs/stash"
293
+ )
294
+ )
295
+ logging.debug(f"Number of stashes in repository: {count}")
296
+ return count
297
+
212
298
  def commit_matrix(self, commits: Iterable[Commit] | None) -> Matrix | None:
213
299
  """Pre-compute a matrix of commits.
214
300
 
@@ -247,52 +333,117 @@ class Metadata:
247
333
  if not commits:
248
334
  return None
249
335
 
250
- # Save the initial commit reference and SHA of the repository. The reference is
251
- # either the canonical active branch name (i.e. ``main``), or the commit SHA if
252
- # the current HEAD commit is detached from a branch.
253
336
  git = Git(".")
254
- init_sha = git.repo.head.commit.hexsha
255
- if git.repo.head.is_detached:
256
- init_ref = init_sha
337
+ current_commit = git.repo.head.commit.hexsha
338
+
339
+ # Check if we need to get back in time in the Git log and browse past commits.
340
+ if len(commits) == 1: # type: ignore[arg-type]
341
+ # Is the current commit the one we're looking for?
342
+ past_commit_lookup = bool(
343
+ current_commit != commits[0].hash # type: ignore[index]
344
+ )
345
+ # If we have multiple commits then yes, we need to look for past commits.
257
346
  else:
258
- init_ref = git.repo.active_branch.name
347
+ past_commit_lookup = True
259
348
 
260
- sha_list = []
261
- include_list = []
262
- for commit in commits:
263
- sha = commit.hash
349
+ # We need to go back in time, but first save the current state of the
350
+ # repository.
351
+ if past_commit_lookup:
352
+ logging.debug(
353
+ "We need to look into the commit history. Inspect the initial state of the repository."
354
+ )
264
355
 
265
- # Checkout the target commit so we can read the version associated with it,
266
- # but stash local changes first. Do not perform the stash/checkout dance if
267
- # the repository is already at the target commit.
268
- need_checkout = bool(git.repo.head.commit.hexsha != sha)
269
- if need_checkout and not self.in_ci_env:
356
+ if not self.in_ci_env:
270
357
  raise RuntimeError(
271
358
  "Local repository manipulations only allowed in CI environment"
272
359
  )
273
- if need_checkout:
274
- git.repo.git.stash()
275
- git.checkout(sha)
360
+
361
+ # Save the initial commit reference and SHA of the repository. The reference is
362
+ # either the canonical active branch name (i.e. ``main``), or the commit SHA if
363
+ # the current HEAD commit is detached from a branch.
364
+ if git.repo.head.is_detached:
365
+ init_ref = current_commit
366
+ else:
367
+ init_ref = git.repo.active_branch.name
368
+ logging.debug(f"Initial commit reference: {init_ref}")
369
+
370
+ # Try to stash local changes and check if we'll need to unstash them later.
371
+ counter_before = self.git_stash_count(git)
372
+ logging.debug("Try to stash local changes before our series of checkouts.")
373
+ git.repo.git.stash()
374
+ counter_after = self.git_stash_count(git)
375
+ logging.debug(
376
+ f"Stash counter changes after 'git stash' command: {counter_before} -> {counter_after}"
377
+ )
378
+ assert counter_after >= counter_before
379
+ need_unstash = bool(counter_after > counter_before)
380
+ logging.debug(f"Need to unstash after checkouts: {need_unstash}")
381
+
382
+ else:
383
+ init_ref = None
384
+ need_unstash = False
385
+ logging.debug(
386
+ f"No need to look into the commit history: repository is already checked out at {current_commit}"
387
+ )
388
+
389
+ sha_list = []
390
+ include_list = []
391
+ for commit in commits:
392
+ if past_commit_lookup:
393
+ logging.debug(f"Checkout to commit {commit.hash}")
394
+ git.checkout(commit.hash)
395
+
396
+ logging.debug(f"Extract project version at commit {commit.hash}")
276
397
  current_version = Metadata.get_current_version()
277
- if need_checkout:
278
- git.repo.git.stash("pop")
279
398
 
280
- sha_list.append(sha)
399
+ sha_list.append(commit.hash)
281
400
  include_list.append({
282
- "commit": sha,
283
- "short_sha": sha[:SHORT_SHA_LENGTH],
401
+ "commit": commit.hash,
402
+ "short_sha": commit.hash[:SHORT_SHA_LENGTH],
284
403
  "current_version": current_version,
285
404
  })
286
405
 
287
- # Restore the repository to its initial commit if its not in the initial state.
288
- if git.repo.head.commit.hexsha != init_sha:
406
+ # Restore the repository to its initial state.
407
+ if past_commit_lookup:
408
+ logging.debug(f"Restore repository to {init_ref}.")
289
409
  git.checkout(init_ref)
410
+ if need_unstash:
411
+ logging.debug("Unstash local changes that were previously saved.")
412
+ git.repo.git.stash("pop")
290
413
 
291
414
  return Matrix({
292
415
  "commit": sha_list,
293
416
  "include": include_list,
294
417
  })
295
418
 
419
+ @cached_property
420
+ def event_type(self) -> WorkflowEvent | None: # type: ignore[valid-type]
421
+ """Returns the type of event that triggered the workflow run.
422
+
423
+ .. caution::
424
+ This property is based on a crude heuristics as it only looks at the value
425
+ of the ``GITHUB_BASE_REF`` environment variable. Which is `only set when
426
+ the event that triggers a workflow run is either pull_request or pull_request_target
427
+ <https://docs.github.com/en/actions/learn-github-actions/variables#default-environment-variables>`_.
428
+
429
+ .. todo::
430
+ Add detection of all workflow trigger events.
431
+ """
432
+ if not self.in_ci_env:
433
+ logging.warning(
434
+ "Cannot guess event type because we're not in a CI environment."
435
+ )
436
+ return None
437
+ if "GITHUB_BASE_REF" not in os.environ:
438
+ logging.warning(
439
+ "Cannot guess event type because no GITHUB_BASE_REF env var found."
440
+ )
441
+ return None
442
+
443
+ if bool(os.environ.get("GITHUB_BASE_REF")):
444
+ return WorkflowEvent.pull_request
445
+ return WorkflowEvent.push
446
+
296
447
  @cached_property
297
448
  def commit_range(self) -> tuple[str, str] | None:
298
449
  """Range of commits bundled within the triggering event.
@@ -306,8 +457,8 @@ class Metadata:
306
457
  request that is merged upstream but we'd like to produce artifacts (builds,
307
458
  packages, etc.) for each individual commit.
308
459
 
309
- The default ``GITHUB_SHA`` environment variable is useless as it only points to
310
- the last commit. We need to inspect the commit history to find all new one. New
460
+ The default ``GITHUB_SHA`` environment variable is not enough as it only points to
461
+ the last commit. We need to inspect the commit history to find all new ones. New
311
462
  commits needs to be fetched differently in ``push`` and ``pull_requests``
312
463
  events.
313
464
 
@@ -316,22 +467,30 @@ class Metadata:
316
467
  - https://stackoverflow.com/a/67204539
317
468
  - https://stackoverflow.com/a/62953566
318
469
  - https://stackoverflow.com/a/61861763
470
+
471
+ .. todo::
472
+ Refactor so we can get rid of ``self.github_context``. Maybe there's enough metadata lying around in
473
+ the environment variables that we can inspect the git history and find the commit range.
319
474
  """
320
- if not self.github_context:
475
+ if not self.github_context or not self.event_type:
321
476
  return None
322
477
  # Pull request event.
323
- if self.github_context["base_ref"]:
324
- start = f"origin/{self.github_context['base_ref']}"
478
+ if self.event_type in (
479
+ WorkflowEvent.pull_request,
480
+ WorkflowEvent.pull_request_target,
481
+ ):
482
+ base_ref = os.environ["GITHUB_BASE_REF"]
483
+ assert base_ref
484
+ start = f"origin/{base_ref}"
325
485
  # We need to checkout the HEAD commit instead of the artificial merge
326
486
  # commit introduced by the pull request.
327
487
  end = self.github_context["event"]["pull_request"]["head"]["sha"]
328
488
  # Push event.
329
489
  else:
330
490
  start = self.github_context["event"]["before"]
331
- end = self.github_context["sha"]
332
- logging.debug("--- Commit range ---")
333
- logging.debug(f"Range start: {start}")
334
- logging.debug(f"Range end: {end}")
491
+ end = os.environ["GITHUB_SHA"]
492
+ assert end
493
+ logging.debug(f"Commit range: {start} -> {end}")
335
494
  return start, end
336
495
 
337
496
  @cached_property
@@ -466,7 +625,7 @@ class Metadata:
466
625
  ``pyproject.toml`` exists and respects the standards. ``False`` otherwise.
467
626
  """
468
627
  if self.pyproject_path.exists() and self.pyproject_path.is_file():
469
- toml = tomllib.loads(self.pyproject_path.read_text())
628
+ toml = tomllib.loads(self.pyproject_path.read_text(encoding="utf-8"))
470
629
  try:
471
630
  metadata = StandardMetadata.from_pyproject(toml)
472
631
  self._is_python_project = True
@@ -518,15 +677,31 @@ class Metadata:
518
677
  def py_target_versions(self) -> tuple[Version, ...] | None:
519
678
  """Generates the list of Python target versions.
520
679
 
521
- This is based on Black's support matrix.
680
+ Only takes ``major.minor`` variations into account. Smaller version dimensions
681
+ are ignored, so a package depending on ``3.8.6`` will keep ``3.8`` as a Python
682
+ target.
522
683
  """
523
684
  if self.pyproject and self.pyproject.requires_python:
524
- minor_range = sorted(v.value for v in TargetVersion)
525
- black_range = (Version(f"3.{minor}") for minor in minor_range)
685
+ # Dumb down specifiers' lower bounds to their major.minor version.
686
+ spec_list = []
687
+ for spec in self.pyproject.requires_python:
688
+ if spec.operator in (">=", ">"):
689
+ major, minor, _ = Version(spec.version).release
690
+ new_spec = f"{spec.operator}{major}.{minor}"
691
+ else:
692
+ new_spec = str(spec)
693
+ spec_list.append(new_spec)
694
+ relaxed_specs = SpecifierSet(",".join(spec_list))
695
+ logging.debug(
696
+ "Relax Python requirements from "
697
+ f"{self.pyproject.requires_python} to {relaxed_specs}."
698
+ )
699
+
700
+ # Iterate through Python version support.
526
701
  return tuple(
527
- version
528
- for version in black_range
529
- if self.pyproject.requires_python.contains(version)
702
+ Version(target)
703
+ for target in tuple(TargetVersion)
704
+ if relaxed_specs.contains(target)
530
705
  )
531
706
  return None
532
707
 
@@ -580,12 +755,12 @@ class Metadata:
580
755
  def mypy_params(self) -> str | None:
581
756
  """Generates `mypy` parameters.
582
757
 
583
- Mypy needs to be fed with this parameter: ``--python-version x.y``.
758
+ Mypy needs to be fed with this parameter: ``--python-version 3.x``.
584
759
  """
585
760
  if self.py_target_versions:
586
761
  # Compare to Mypy's lowest supported version of Python dialect.
587
762
  major, minor = max(
588
- PYTHON3_VERSION_MIN,
763
+ MYPY_VERSION_MIN,
589
764
  min((v.major, v.minor) for v in self.py_target_versions),
590
765
  )
591
766
  return f"--python-version {major}.{minor}"
@@ -940,9 +1115,13 @@ class Metadata:
940
1115
  # Generate a link to the version of the package published on PyPi.
941
1116
  pypi_link = ""
942
1117
  if self.package_name:
943
- pypi_link = f"[🐍 Available on PyPi](https://pypi.org/project/{
944
- self.package_name
945
- }/{version})."
1118
+ pypi_link = (
1119
+ "[🐍 Available on PyPi](https://pypi.org/project/"
1120
+ + self.package_name
1121
+ + "/"
1122
+ + version
1123
+ + ")."
1124
+ )
946
1125
 
947
1126
  # Assemble the release notes.
948
1127
  return f"{changes}\n\n{pypi_link}".strip()
@@ -982,7 +1161,10 @@ class Metadata:
982
1161
 
983
1162
  return cast(str, value)
984
1163
 
985
- def dump(self, dialect: Dialects = Dialects.GITHUB) -> str:
1164
+ def dump(
1165
+ self,
1166
+ dialect: Dialects = Dialects.github, # type: ignore[valid-type]
1167
+ ) -> str:
986
1168
  """Returns all metadata in the specified format.
987
1169
 
988
1170
  Defaults to GitHub dialect.
@@ -1009,10 +1191,10 @@ class Metadata:
1009
1191
  }
1010
1192
 
1011
1193
  logging.debug(f"Raw metadata: {metadata!r}")
1012
- logging.debug(f"Format metadata into {dialect} dialect.")
1194
+ logging.debug(f"Format metadata into {dialect} format.")
1013
1195
 
1014
1196
  content = ""
1015
- if dialect == Dialects.GITHUB:
1197
+ if dialect == Dialects.github:
1016
1198
  for env_name, value in metadata.items():
1017
1199
  env_value = self.format_github_value(value)
1018
1200
 
@@ -1027,6 +1209,6 @@ class Metadata:
1027
1209
  assert dialect == Dialects.PLAIN
1028
1210
  content = repr(metadata)
1029
1211
 
1030
- logging.debug(f"Formatted metadata: {content}")
1212
+ logging.debug(f"Formatted metadata:\n{content}")
1031
1213
 
1032
1214
  return content