arthexis 0.1.10__py3-none-any.whl → 0.1.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arthexis might be problematic. Click here for more details.

core/tests.py CHANGED
@@ -15,7 +15,7 @@ from unittest.mock import patch
15
15
  from pathlib import Path
16
16
  import subprocess
17
17
  from glob import glob
18
- from datetime import timedelta
18
+ from datetime import datetime, timedelta, timezone as datetime_timezone
19
19
  import tempfile
20
20
  from urllib.parse import quote
21
21
 
@@ -831,15 +831,21 @@ class ReleaseProcessTests(TestCase):
831
831
  )
832
832
  version_path.write_text(original, encoding="utf-8")
833
833
 
834
+ @mock.patch("core.views.timezone.now")
834
835
  @mock.patch("core.views.PackageRelease.dump_fixture")
835
836
  @mock.patch("core.views.release_utils.publish")
836
- def test_publish_sets_pypi_url(self, publish, dump_fixture):
837
+ def test_publish_sets_pypi_url(self, publish, dump_fixture, now):
838
+ now.return_value = datetime(2025, 3, 4, 5, 6, tzinfo=datetime_timezone.utc)
837
839
  _step_publish(self.release, {}, Path("rel.log"))
838
840
  self.release.refresh_from_db()
839
841
  self.assertEqual(
840
842
  self.release.pypi_url,
841
843
  f"https://pypi.org/project/{self.package.name}/{self.release.version}/",
842
844
  )
845
+ self.assertEqual(
846
+ self.release.release_on,
847
+ datetime(2025, 3, 4, 5, 6, tzinfo=datetime_timezone.utc),
848
+ )
843
849
  dump_fixture.assert_called_once()
844
850
 
845
851
  @mock.patch("core.views.PackageRelease.dump_fixture")
@@ -849,8 +855,33 @@ class ReleaseProcessTests(TestCase):
849
855
  _step_publish(self.release, {}, Path("rel.log"))
850
856
  self.release.refresh_from_db()
851
857
  self.assertEqual(self.release.pypi_url, "")
858
+ self.assertIsNone(self.release.release_on)
852
859
  dump_fixture.assert_not_called()
853
860
 
861
+ def test_new_todo_does_not_reset_pending_flow(self):
862
+ user = User.objects.create_superuser("admin", "admin@example.com", "pw")
863
+ url = reverse("release-progress", args=[self.release.pk, "publish"])
864
+ Todo.objects.create(request="Initial checklist item")
865
+ steps = [("Confirm release TODO completion", core_views._step_check_todos)]
866
+ with mock.patch("core.views.PUBLISH_STEPS", steps):
867
+ self.client.force_login(user)
868
+ response = self.client.get(url)
869
+ self.assertTrue(response.context["has_pending_todos"])
870
+ self.client.get(f"{url}?ack_todos=1")
871
+ self.client.get(f"{url}?start=1")
872
+ self.client.get(f"{url}?step=0")
873
+ Todo.objects.create(request="Follow-up checklist item")
874
+ response = self.client.get(url)
875
+ self.assertEqual(
876
+ Todo.objects.filter(is_deleted=False, done_on__isnull=True).count(),
877
+ 1,
878
+ )
879
+ self.assertIsNone(response.context["todos"])
880
+ self.assertFalse(response.context["has_pending_todos"])
881
+ session = self.client.session
882
+ ctx = session.get(f"release_publish_{self.release.pk}")
883
+ self.assertTrue(ctx.get("todos_ack"))
884
+
854
885
  def test_release_progress_uses_lockfile(self):
855
886
  run = []
856
887
 
@@ -1044,6 +1075,8 @@ class PackageReleaseAdminActionTests(TestCase):
1044
1075
  self.admin = PackageReleaseAdmin(PackageRelease, self.site)
1045
1076
  self.admin.message_user = lambda *args, **kwargs: None
1046
1077
  self.package = Package.objects.create(name="pkg")
1078
+ self.package.is_active = True
1079
+ self.package.save(update_fields=["is_active"])
1047
1080
  self.release = PackageRelease.objects.create(
1048
1081
  package=self.package,
1049
1082
  version="1.0.0",
@@ -1072,11 +1105,64 @@ class PackageReleaseAdminActionTests(TestCase):
1072
1105
  def test_refresh_from_pypi_creates_releases(self, mock_get, dump):
1073
1106
  mock_get.return_value.raise_for_status.return_value = None
1074
1107
  mock_get.return_value.json.return_value = {
1075
- "releases": {"1.0.0": [], "1.1.0": []}
1108
+ "releases": {
1109
+ "1.0.0": [
1110
+ {"upload_time_iso_8601": "2024-01-01T12:30:00.000000Z"}
1111
+ ],
1112
+ "1.1.0": [
1113
+ {"upload_time_iso_8601": "2024-02-02T15:45:00.000000Z"}
1114
+ ],
1115
+ }
1076
1116
  }
1077
1117
  self.admin.refresh_from_pypi(self.request, PackageRelease.objects.none())
1078
1118
  new_release = PackageRelease.objects.get(version="1.1.0")
1079
1119
  self.assertEqual(new_release.revision, "")
1120
+ self.assertEqual(
1121
+ new_release.release_on,
1122
+ datetime(2024, 2, 2, 15, 45, tzinfo=datetime_timezone.utc),
1123
+ )
1124
+ dump.assert_called_once()
1125
+
1126
+ @mock.patch("core.admin.PackageRelease.dump_fixture")
1127
+ @mock.patch("core.admin.requests.get")
1128
+ def test_refresh_from_pypi_updates_release_date(self, mock_get, dump):
1129
+ self.release.release_on = None
1130
+ self.release.save(update_fields=["release_on"])
1131
+ mock_get.return_value.raise_for_status.return_value = None
1132
+ mock_get.return_value.json.return_value = {
1133
+ "releases": {
1134
+ "1.0.0": [
1135
+ {"upload_time_iso_8601": "2024-01-01T12:30:00.000000Z"}
1136
+ ]
1137
+ }
1138
+ }
1139
+ self.admin.refresh_from_pypi(self.request, PackageRelease.objects.none())
1140
+ self.release.refresh_from_db()
1141
+ self.assertEqual(
1142
+ self.release.release_on,
1143
+ datetime(2024, 1, 1, 12, 30, tzinfo=datetime_timezone.utc),
1144
+ )
1145
+ dump.assert_called_once()
1146
+
1147
+ @mock.patch("core.admin.PackageRelease.dump_fixture")
1148
+ @mock.patch("core.admin.requests.get")
1149
+ def test_refresh_from_pypi_restores_deleted_release(self, mock_get, dump):
1150
+ self.release.is_deleted = True
1151
+ self.release.save(update_fields=["is_deleted"])
1152
+ mock_get.return_value.raise_for_status.return_value = None
1153
+ mock_get.return_value.json.return_value = {
1154
+ "releases": {
1155
+ "1.0.0": [
1156
+ {"upload_time_iso_8601": "2024-01-01T12:30:00.000000Z"}
1157
+ ]
1158
+ }
1159
+ }
1160
+
1161
+ self.admin.refresh_from_pypi(self.request, PackageRelease.objects.none())
1162
+
1163
+ self.assertTrue(
1164
+ PackageRelease.objects.filter(version="1.0.0").exists()
1165
+ )
1080
1166
  dump.assert_called_once()
1081
1167
 
1082
1168
 
@@ -1300,6 +1386,22 @@ class TodoFocusViewTests(TestCase):
1300
1386
  change_url = reverse("admin:core_todo_change", args=[todo.pk])
1301
1387
  self.assertContains(resp, f'src="{change_url}"')
1302
1388
 
1389
+ def test_focus_view_avoids_recursive_focus_url(self):
1390
+ todo = Todo.objects.create(request="Task")
1391
+ focus_url = reverse("todo-focus", args=[todo.pk])
1392
+ Todo.objects.filter(pk=todo.pk).update(url=focus_url)
1393
+ resp = self.client.get(reverse("todo-focus", args=[todo.pk]))
1394
+ change_url = reverse("admin:core_todo_change", args=[todo.pk])
1395
+ self.assertContains(resp, f'src="{change_url}"')
1396
+
1397
+ def test_focus_view_avoids_recursive_focus_absolute_url(self):
1398
+ todo = Todo.objects.create(request="Task")
1399
+ focus_url = reverse("todo-focus", args=[todo.pk])
1400
+ Todo.objects.filter(pk=todo.pk).update(url=f"http://testserver{focus_url}")
1401
+ resp = self.client.get(reverse("todo-focus", args=[todo.pk]))
1402
+ change_url = reverse("admin:core_todo_change", args=[todo.pk])
1403
+ self.assertContains(resp, f'src="{change_url}"')
1404
+
1303
1405
  def test_focus_view_redirects_if_todo_completed(self):
1304
1406
  todo = Todo.objects.create(request="Task")
1305
1407
  todo.done_on = timezone.now()
core/user_data.py CHANGED
@@ -238,6 +238,36 @@ def _mark_fixture_user_data(path: Path) -> None:
238
238
  model.all_objects.filter(pk=pk).update(is_user_data=True)
239
239
 
240
240
 
241
+ def _fixture_targets_installed_apps(data) -> bool:
242
+ """Return ``True`` when *data* only targets installed apps and models."""
243
+
244
+ if not isinstance(data, list):
245
+ return True
246
+
247
+ labels = {
248
+ obj.get("model")
249
+ for obj in data
250
+ if isinstance(obj, dict) and obj.get("model")
251
+ }
252
+
253
+ for label in labels:
254
+ if not isinstance(label, str):
255
+ continue
256
+ if "." not in label:
257
+ continue
258
+ app_label, model_name = label.split(".", 1)
259
+ if not app_label or not model_name:
260
+ continue
261
+ if not apps.is_installed(app_label):
262
+ return False
263
+ try:
264
+ apps.get_model(label)
265
+ except LookupError:
266
+ return False
267
+
268
+ return True
269
+
270
+
241
271
  def _load_fixture(path: Path, *, mark_user_data: bool = True) -> bool:
242
272
  """Load a fixture from *path* and optionally flag loaded entities."""
243
273
 
@@ -261,9 +291,12 @@ def _load_fixture(path: Path, *, mark_user_data: bool = True) -> bool:
261
291
  except Exception:
262
292
  data = None
263
293
  else:
264
- if isinstance(data, list) and not data:
265
- path.unlink(missing_ok=True)
266
- return False
294
+ if isinstance(data, list):
295
+ if not data:
296
+ path.unlink(missing_ok=True)
297
+ return False
298
+ if not _fixture_targets_installed_apps(data):
299
+ return False
267
300
 
268
301
  try:
269
302
  call_command("loaddata", str(path), ignorenonexistent=True)
@@ -484,11 +517,23 @@ def patch_admin_user_datum() -> None:
484
517
  admin.site._user_datum_patched = True
485
518
 
486
519
 
487
- def _seed_data_view(request):
488
- sections = []
520
+ def _iter_entity_admin_models():
521
+ """Yield registered :class:`Entity` admin models without proxy duplicates."""
522
+
523
+ seen: set[type] = set()
489
524
  for model, model_admin in admin.site._registry.items():
490
525
  if not issubclass(model, Entity):
491
526
  continue
527
+ concrete_model = model._meta.concrete_model
528
+ if concrete_model in seen:
529
+ continue
530
+ seen.add(concrete_model)
531
+ yield model, model_admin
532
+
533
+
534
+ def _seed_data_view(request):
535
+ sections = []
536
+ for model, model_admin in _iter_entity_admin_models():
492
537
  objs = model.objects.filter(is_seed_data=True)
493
538
  if not objs.exists():
494
539
  continue
@@ -508,9 +553,7 @@ def _seed_data_view(request):
508
553
 
509
554
  def _user_data_view(request):
510
555
  sections = []
511
- for model, model_admin in admin.site._registry.items():
512
- if not issubclass(model, Entity):
513
- continue
556
+ for model, model_admin in _iter_entity_admin_models():
514
557
  objs = model.objects.filter(is_user_data=True)
515
558
  if not objs.exists():
516
559
  continue
core/views.py CHANGED
@@ -11,6 +11,7 @@ from django.contrib.sites.models import Site
11
11
  from django.http import Http404, JsonResponse
12
12
  from django.shortcuts import get_object_or_404, redirect, render, resolve_url
13
13
  from django.utils import timezone
14
+ from django.utils.text import slugify
14
15
  from django.utils.translation import gettext as _
15
16
  from django.urls import NoReverseMatch, reverse
16
17
  from django.views.decorators.csrf import csrf_exempt
@@ -18,6 +19,7 @@ from django.views.decorators.http import require_GET, require_POST
18
19
  from django.utils.http import url_has_allowed_host_and_scheme
19
20
  from pathlib import Path
20
21
  from urllib.parse import urlsplit, urlunsplit
22
+ import errno
21
23
  import subprocess
22
24
 
23
25
  from utils import revision
@@ -81,6 +83,126 @@ def _clean_repo() -> None:
81
83
  subprocess.run(["git", "clean", "-fd"], check=False)
82
84
 
83
85
 
86
+ def _format_path(path: Path) -> str:
87
+ try:
88
+ return str(path.resolve().relative_to(Path.cwd()))
89
+ except ValueError:
90
+ return str(path)
91
+
92
+
93
+ def _next_patch_version(version: str) -> str:
94
+ from packaging.version import InvalidVersion, Version
95
+
96
+ try:
97
+ parsed = Version(version)
98
+ except InvalidVersion:
99
+ parts = version.split(".")
100
+ for index in range(len(parts) - 1, -1, -1):
101
+ segment = parts[index]
102
+ if segment.isdigit():
103
+ parts[index] = str(int(segment) + 1)
104
+ return ".".join(parts)
105
+ return version
106
+ return f"{parsed.major}.{parsed.minor}.{parsed.micro + 1}"
107
+
108
+
109
+ def _write_todo_fixture(todo: Todo) -> Path:
110
+ safe_request = todo.request.replace(".", " ")
111
+ slug = slugify(safe_request).replace("-", "_")
112
+ if not slug:
113
+ slug = "todo"
114
+ path = TODO_FIXTURE_DIR / f"todos__{slug}.json"
115
+ path.parent.mkdir(parents=True, exist_ok=True)
116
+ data = [
117
+ {
118
+ "model": "core.todo",
119
+ "fields": {
120
+ "request": todo.request,
121
+ "url": todo.url,
122
+ "request_details": todo.request_details,
123
+ },
124
+ }
125
+ ]
126
+ path.write_text(json.dumps(data, indent=2) + "\n", encoding="utf-8")
127
+ return path
128
+
129
+
130
+ def _should_use_python_changelog(exc: OSError) -> bool:
131
+ winerror = getattr(exc, "winerror", None)
132
+ if winerror in {193}:
133
+ return True
134
+ return exc.errno in {errno.ENOEXEC, errno.EACCES, errno.ENOENT}
135
+
136
+
137
+ def _generate_changelog_with_python(log_path: Path) -> None:
138
+ _append_log(log_path, "Falling back to Python changelog generator")
139
+ describe = subprocess.run(
140
+ ["git", "describe", "--tags", "--abbrev=0"],
141
+ capture_output=True,
142
+ text=True,
143
+ check=False,
144
+ )
145
+ start_tag = describe.stdout.strip() if describe.returncode == 0 else ""
146
+ range_spec = f"{start_tag}..HEAD" if start_tag else "HEAD"
147
+ log_proc = subprocess.run(
148
+ ["git", "log", range_spec, "--no-merges", "--pretty=format:- %h %s"],
149
+ capture_output=True,
150
+ text=True,
151
+ check=True,
152
+ )
153
+ entries = [line for line in log_proc.stdout.splitlines() if line]
154
+ changelog_path = Path("CHANGELOG.rst")
155
+ previous_lines: list[str] = []
156
+ if changelog_path.exists():
157
+ previous_lines = changelog_path.read_text(encoding="utf-8").splitlines()
158
+ if len(previous_lines) > 6:
159
+ previous_lines = previous_lines[6:]
160
+ else:
161
+ previous_lines = []
162
+ lines = [
163
+ "Changelog",
164
+ "=========",
165
+ "",
166
+ "Unreleased",
167
+ "----------",
168
+ "",
169
+ ]
170
+ if entries:
171
+ lines.extend(entries)
172
+ if previous_lines:
173
+ lines.append("")
174
+ lines.extend(previous_lines)
175
+ content = "\n".join(lines)
176
+ if not content.endswith("\n"):
177
+ content += "\n"
178
+ changelog_path.write_text(content, encoding="utf-8")
179
+ _append_log(log_path, "Regenerated CHANGELOG.rst using Python fallback")
180
+
181
+
182
+ def _ensure_release_todo(release) -> tuple[Todo, Path]:
183
+ target_version = _next_patch_version(release.version)
184
+ request = f"Create release {release.package.name} {target_version}"
185
+ try:
186
+ url = reverse("admin:core_packagerelease_changelist")
187
+ except NoReverseMatch:
188
+ url = ""
189
+ todo, _ = Todo.all_objects.update_or_create(
190
+ request__iexact=request,
191
+ defaults={
192
+ "request": request,
193
+ "url": url,
194
+ "request_details": "",
195
+ "is_seed_data": True,
196
+ "is_deleted": False,
197
+ "is_user_data": False,
198
+ "done_on": None,
199
+ "on_done_condition": "",
200
+ },
201
+ )
202
+ fixture_path = _write_todo_fixture(todo)
203
+ return todo, fixture_path
204
+
205
+
84
206
  def _sync_release_with_revision(release: PackageRelease) -> tuple[bool, str]:
85
207
  """Ensure ``release`` matches the repository revision and version.
86
208
 
@@ -125,7 +247,22 @@ def _sync_release_with_revision(release: PackageRelease) -> tuple[bool, str]:
125
247
  release.save(update_fields=list(updated_fields))
126
248
  PackageRelease.dump_fixture()
127
249
 
128
- return bool(updated_fields), previous_version
250
+ package_updated = False
251
+ if release.package_id and not release.package.is_active:
252
+ release.package.is_active = True
253
+ release.package.save(update_fields=["is_active"])
254
+ package_updated = True
255
+
256
+ version_updated = False
257
+ if release.version:
258
+ current = ""
259
+ if version_path.exists():
260
+ current = version_path.read_text(encoding="utf-8").strip()
261
+ if current != release.version:
262
+ version_path.write_text(f"{release.version}\n", encoding="utf-8")
263
+ version_updated = True
264
+
265
+ return bool(updated_fields or version_updated or package_updated), previous_version
129
266
 
130
267
 
131
268
  def _changelog_notes(version: str) -> str:
@@ -215,7 +352,7 @@ def _step_check_todos(release, ctx, log_path: Path) -> None:
215
352
  check=False,
216
353
  )
217
354
  ctx.pop("todos", None)
218
- ctx.pop("todos_ack", None)
355
+ ctx["todos_ack"] = True
219
356
 
220
357
 
221
358
  def _step_check_version(release, ctx, log_path: Path) -> None:
@@ -266,6 +403,7 @@ def _step_check_version(release, ctx, log_path: Path) -> None:
266
403
  )
267
404
  subprocess.run(["git", "add", *fixture_files], check=True)
268
405
  subprocess.run(["git", "commit", "-m", "chore: update fixtures"], check=True)
406
+ _append_log(log_path, "Fixture changes committed")
269
407
 
270
408
  version_path = Path("VERSION")
271
409
  if version_path.exists():
@@ -286,25 +424,59 @@ def _step_check_version(release, ctx, log_path: Path) -> None:
286
424
  if "already on PyPI" in str(exc):
287
425
  raise
288
426
  _append_log(log_path, f"PyPI check failed: {exc}")
427
+ else:
428
+ _append_log(
429
+ log_path,
430
+ f"Version {release.version} not published on PyPI",
431
+ )
289
432
  else:
290
433
  _append_log(log_path, "Network unavailable, skipping PyPI check")
291
434
 
292
435
 
293
436
  def _step_handle_migrations(release, ctx, log_path: Path) -> None:
294
437
  _append_log(log_path, "Freeze, squash and approve migrations")
438
+ _append_log(log_path, "Migration review acknowledged (manual step)")
295
439
 
296
440
 
297
441
  def _step_changelog_docs(release, ctx, log_path: Path) -> None:
298
442
  _append_log(log_path, "Compose CHANGELOG and documentation")
443
+ _append_log(log_path, "CHANGELOG and documentation review recorded")
299
444
 
300
445
 
301
446
  def _step_pre_release_actions(release, ctx, log_path: Path) -> None:
302
447
  _append_log(log_path, "Execute pre-release actions")
448
+ try:
449
+ subprocess.run(["scripts/generate-changelog.sh"], check=True)
450
+ except OSError as exc:
451
+ if _should_use_python_changelog(exc):
452
+ _append_log(
453
+ log_path,
454
+ f"scripts/generate-changelog.sh failed: {exc}",
455
+ )
456
+ _generate_changelog_with_python(log_path)
457
+ else: # pragma: no cover - unexpected OSError
458
+ raise
459
+ else:
460
+ _append_log(
461
+ log_path, "Regenerated CHANGELOG.rst using scripts/generate-changelog.sh"
462
+ )
463
+ subprocess.run(["git", "add", "CHANGELOG.rst"], check=True)
464
+ _append_log(log_path, "Staged CHANGELOG.rst for commit")
303
465
  version_path = Path("VERSION")
304
466
  version_path.write_text(f"{release.version}\n", encoding="utf-8")
467
+ _append_log(log_path, f"Updated VERSION file to {release.version}")
305
468
  subprocess.run(["git", "add", "VERSION"], check=True)
469
+ _append_log(log_path, "Staged VERSION for commit")
306
470
  diff = subprocess.run(
307
- ["git", "diff", "--cached", "--quiet", "--", "VERSION"],
471
+ [
472
+ "git",
473
+ "diff",
474
+ "--cached",
475
+ "--quiet",
476
+ "--",
477
+ "CHANGELOG.rst",
478
+ "VERSION",
479
+ ],
308
480
  check=False,
309
481
  )
310
482
  if diff.returncode != 0:
@@ -312,13 +484,40 @@ def _step_pre_release_actions(release, ctx, log_path: Path) -> None:
312
484
  ["git", "commit", "-m", f"pre-release commit {release.version}"],
313
485
  check=True,
314
486
  )
487
+ _append_log(log_path, f"Committed VERSION update for {release.version}")
315
488
  else:
316
- _append_log(log_path, "No changes detected for VERSION; skipping commit")
489
+ _append_log(
490
+ log_path, "No changes detected for VERSION or CHANGELOG; skipping commit"
491
+ )
492
+ subprocess.run(["git", "reset", "HEAD", "CHANGELOG.rst"], check=False)
493
+ _append_log(log_path, "Unstaged CHANGELOG.rst")
317
494
  subprocess.run(["git", "reset", "HEAD", "VERSION"], check=False)
495
+ _append_log(log_path, "Unstaged VERSION file")
496
+ todo, fixture_path = _ensure_release_todo(release)
497
+ fixture_display = _format_path(fixture_path)
498
+ _append_log(log_path, f"Added TODO: {todo.request}")
499
+ _append_log(log_path, f"Wrote TODO fixture {fixture_display}")
500
+ subprocess.run(["git", "add", str(fixture_path)], check=True)
501
+ _append_log(log_path, f"Staged TODO fixture {fixture_display}")
502
+ fixture_diff = subprocess.run(
503
+ ["git", "diff", "--cached", "--quiet", "--", str(fixture_path)],
504
+ check=False,
505
+ )
506
+ if fixture_diff.returncode != 0:
507
+ commit_message = f"chore: add release TODO for {release.package.name}"
508
+ subprocess.run(["git", "commit", "-m", commit_message], check=True)
509
+ _append_log(log_path, f"Committed TODO fixture {fixture_display}")
510
+ else:
511
+ _append_log(
512
+ log_path,
513
+ f"No changes detected for TODO fixture {fixture_display}; skipping commit",
514
+ )
515
+ _append_log(log_path, "Pre-release actions complete")
318
516
 
319
517
 
320
518
  def _step_run_tests(release, ctx, log_path: Path) -> None:
321
519
  _append_log(log_path, "Complete test suite with --all flag")
520
+ _append_log(log_path, "Test suite completion acknowledged")
322
521
 
323
522
 
324
523
  def _step_promote_build(release, ctx, log_path: Path) -> None:
@@ -328,15 +527,22 @@ def _step_promote_build(release, ctx, log_path: Path) -> None:
328
527
  try:
329
528
  try:
330
529
  subprocess.run(["git", "fetch", "origin", "main"], check=True)
530
+ _append_log(log_path, "Fetched latest changes from origin/main")
331
531
  subprocess.run(["git", "rebase", "origin/main"], check=True)
532
+ _append_log(log_path, "Rebased current branch onto origin/main")
332
533
  except subprocess.CalledProcessError as exc:
333
534
  subprocess.run(["git", "rebase", "--abort"], check=False)
535
+ _append_log(log_path, "Rebase onto origin/main failed; aborted rebase")
334
536
  raise Exception("Rebase onto main failed") from exc
335
537
  release_utils.promote(
336
538
  package=release.to_package(),
337
539
  version=release.version,
338
540
  creds=release.to_credentials(),
339
541
  )
542
+ _append_log(
543
+ log_path,
544
+ f"Generated release artifacts for v{release.version}",
545
+ )
340
546
  from glob import glob
341
547
 
342
548
  paths = ["VERSION", *glob("core/fixtures/releases__*.json")]
@@ -347,6 +553,7 @@ def _step_promote_build(release, ctx, log_path: Path) -> None:
347
553
  )
348
554
  if diff.stdout.strip():
349
555
  subprocess.run(["git", "add", *paths], check=True)
556
+ _append_log(log_path, "Staged release metadata updates")
350
557
  subprocess.run(
351
558
  [
352
559
  "git",
@@ -356,8 +563,14 @@ def _step_promote_build(release, ctx, log_path: Path) -> None:
356
563
  ],
357
564
  check=True,
358
565
  )
566
+ _append_log(
567
+ log_path,
568
+ f"Committed release metadata for v{release.version}",
569
+ )
359
570
  subprocess.run(["git", "push"], check=True)
571
+ _append_log(log_path, "Pushed release changes to origin")
360
572
  PackageRelease.dump_fixture()
573
+ _append_log(log_path, "Updated release fixtures")
361
574
  except Exception:
362
575
  _clean_repo()
363
576
  raise
@@ -415,8 +628,10 @@ def _step_publish(release, ctx, log_path: Path) -> None:
415
628
  release.pypi_url = (
416
629
  f"https://pypi.org/project/{release.package.name}/{release.version}/"
417
630
  )
418
- release.save(update_fields=["pypi_url"])
631
+ release.release_on = timezone.now()
632
+ release.save(update_fields=["pypi_url", "release_on"])
419
633
  PackageRelease.dump_fixture()
634
+ _append_log(log_path, f"Recorded PyPI URL: {release.pypi_url}")
420
635
  _append_log(log_path, "Upload complete")
421
636
 
422
637
 
@@ -644,6 +859,8 @@ def release_progress(request, pk: int, action: str):
644
859
  f"{release.package.name}-{previous_version}*.log"
645
860
  ):
646
861
  log_file.unlink()
862
+ if not release.is_current:
863
+ raise Http404("Release is not current")
647
864
 
648
865
  if request.GET.get("restart"):
649
866
  count = 0
@@ -656,7 +873,8 @@ def release_progress(request, pk: int, action: str):
656
873
  restart_path.write_text(str(count + 1), encoding="utf-8")
657
874
  _clean_repo()
658
875
  release.pypi_url = ""
659
- release.save(update_fields=["pypi_url"])
876
+ release.release_on = None
877
+ release.save(update_fields=["pypi_url", "release_on"])
660
878
  request.session.pop(session_key, None)
661
879
  if lock_path.exists():
662
880
  lock_path.unlink()
@@ -954,9 +1172,25 @@ def _todo_iframe_url(request, todo: Todo) -> str:
954
1172
  if not raw_url:
955
1173
  return fallback
956
1174
 
1175
+ focus_path = reverse("todo-focus", args=[todo.pk])
1176
+ focus_norm = focus_path.strip("/").lower()
1177
+
1178
+ def _is_focus_target(target: str) -> bool:
1179
+ if not target:
1180
+ return False
1181
+ parsed_target = urlsplit(target)
1182
+ path = parsed_target.path
1183
+ if not path and not parsed_target.scheme and not parsed_target.netloc:
1184
+ path = target.split("?", 1)[0].split("#", 1)[0]
1185
+ normalized = path.strip("/").lower()
1186
+ return normalized == focus_norm if normalized else False
1187
+
1188
+ if _is_focus_target(raw_url):
1189
+ return fallback
1190
+
957
1191
  parsed = urlsplit(raw_url)
958
1192
  if not parsed.scheme and not parsed.netloc:
959
- return raw_url
1193
+ return fallback if _is_focus_target(parsed.path) else raw_url
960
1194
 
961
1195
  if parsed.scheme and parsed.scheme.lower() not in {"http", "https"}:
962
1196
  return fallback
@@ -996,7 +1230,10 @@ def _todo_iframe_url(request, todo: Todo) -> str:
996
1230
  path = parsed.path or "/"
997
1231
  if not path.startswith("/"):
998
1232
  path = f"/{path}"
999
- return urlunsplit(("", "", path, parsed.query, parsed.fragment)) or fallback
1233
+ relative_url = urlunsplit(("", "", path, parsed.query, parsed.fragment))
1234
+ if _is_focus_target(relative_url):
1235
+ return fallback
1236
+ return relative_url or fallback
1000
1237
 
1001
1238
  return fallback
1002
1239