arthexis 0.1.13__py3-none-any.whl → 0.1.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arthexis might be problematic. Click here for more details.

Files changed (107) hide show
  1. {arthexis-0.1.13.dist-info → arthexis-0.1.14.dist-info}/METADATA +222 -221
  2. arthexis-0.1.14.dist-info/RECORD +109 -0
  3. {arthexis-0.1.13.dist-info → arthexis-0.1.14.dist-info}/licenses/LICENSE +674 -674
  4. config/__init__.py +5 -5
  5. config/active_app.py +15 -15
  6. config/asgi.py +43 -43
  7. config/auth_app.py +7 -7
  8. config/celery.py +32 -32
  9. config/context_processors.py +67 -69
  10. config/horologia_app.py +7 -7
  11. config/loadenv.py +11 -11
  12. config/logging.py +59 -48
  13. config/middleware.py +25 -25
  14. config/offline.py +49 -49
  15. config/settings.py +691 -682
  16. config/settings_helpers.py +109 -109
  17. config/urls.py +171 -166
  18. config/wsgi.py +17 -17
  19. core/admin.py +3771 -2809
  20. core/admin_history.py +50 -50
  21. core/admindocs.py +151 -151
  22. core/apps.py +356 -272
  23. core/auto_upgrade.py +57 -57
  24. core/backends.py +265 -236
  25. core/changelog.py +342 -0
  26. core/entity.py +133 -133
  27. core/environment.py +61 -61
  28. core/fields.py +168 -168
  29. core/form_fields.py +75 -75
  30. core/github_helper.py +188 -25
  31. core/github_issues.py +178 -172
  32. core/github_repos.py +72 -0
  33. core/lcd_screen.py +78 -78
  34. core/liveupdate.py +25 -25
  35. core/log_paths.py +100 -100
  36. core/mailer.py +85 -85
  37. core/middleware.py +91 -91
  38. core/models.py +3609 -2795
  39. core/notifications.py +105 -105
  40. core/public_wifi.py +267 -227
  41. core/reference_utils.py +108 -108
  42. core/release.py +721 -368
  43. core/rfid_import_export.py +113 -0
  44. core/sigil_builder.py +149 -149
  45. core/sigil_context.py +20 -20
  46. core/sigil_resolver.py +315 -315
  47. core/system.py +752 -493
  48. core/tasks.py +408 -394
  49. core/temp_passwords.py +181 -181
  50. core/test_system_info.py +186 -139
  51. core/tests.py +2095 -1521
  52. core/tests_liveupdate.py +17 -17
  53. core/urls.py +11 -11
  54. core/user_data.py +641 -633
  55. core/views.py +2175 -1417
  56. core/widgets.py +213 -94
  57. core/workgroup_urls.py +17 -17
  58. core/workgroup_views.py +94 -94
  59. nodes/admin.py +1720 -1161
  60. nodes/apps.py +87 -85
  61. nodes/backends.py +160 -160
  62. nodes/dns.py +203 -203
  63. nodes/feature_checks.py +133 -133
  64. nodes/lcd.py +165 -165
  65. nodes/models.py +1737 -1597
  66. nodes/reports.py +411 -411
  67. nodes/rfid_sync.py +195 -0
  68. nodes/signals.py +18 -0
  69. nodes/tasks.py +46 -46
  70. nodes/tests.py +3810 -3116
  71. nodes/urls.py +15 -14
  72. nodes/utils.py +121 -105
  73. nodes/views.py +683 -619
  74. ocpp/admin.py +948 -948
  75. ocpp/apps.py +25 -25
  76. ocpp/consumers.py +1565 -1459
  77. ocpp/evcs.py +844 -844
  78. ocpp/evcs_discovery.py +158 -158
  79. ocpp/models.py +917 -917
  80. ocpp/reference_utils.py +42 -42
  81. ocpp/routing.py +11 -11
  82. ocpp/simulator.py +745 -745
  83. ocpp/status_display.py +26 -26
  84. ocpp/store.py +601 -541
  85. ocpp/tasks.py +31 -31
  86. ocpp/test_export_import.py +130 -130
  87. ocpp/test_rfid.py +913 -702
  88. ocpp/tests.py +4445 -4094
  89. ocpp/transactions_io.py +189 -189
  90. ocpp/urls.py +50 -50
  91. ocpp/views.py +1479 -1251
  92. pages/admin.py +708 -539
  93. pages/apps.py +10 -10
  94. pages/checks.py +40 -40
  95. pages/context_processors.py +127 -119
  96. pages/defaults.py +13 -13
  97. pages/forms.py +198 -198
  98. pages/middleware.py +205 -153
  99. pages/models.py +607 -426
  100. pages/tests.py +2612 -2200
  101. pages/urls.py +25 -25
  102. pages/utils.py +12 -12
  103. pages/views.py +1165 -1128
  104. arthexis-0.1.13.dist-info/RECORD +0 -105
  105. nodes/actions.py +0 -70
  106. {arthexis-0.1.13.dist-info → arthexis-0.1.14.dist-info}/WHEEL +0 -0
  107. {arthexis-0.1.13.dist-info → arthexis-0.1.14.dist-info}/top_level.txt +0 -0
core/views.py CHANGED
@@ -1,1417 +1,2175 @@
1
- import json
2
- import logging
3
- import shutil
4
- from datetime import timedelta
5
-
6
- import requests
7
- from django.conf import settings
8
- from django.contrib.admin.views.decorators import staff_member_required
9
- from django.contrib.auth import authenticate, login
10
- from django.contrib import messages
11
- from django.contrib.sites.models import Site
12
- from django.http import Http404, JsonResponse, HttpResponse
13
- from django.shortcuts import get_object_or_404, redirect, render, resolve_url
14
- from django.utils import timezone
15
- from django.utils.text import slugify
16
- from django.utils.translation import gettext as _
17
- from django.urls import NoReverseMatch, reverse
18
- from django.views.decorators.csrf import csrf_exempt
19
- from django.views.decorators.http import require_GET, require_POST
20
- from django.utils.http import url_has_allowed_host_and_scheme
21
- from pathlib import Path
22
- from urllib.parse import parse_qsl, urlencode, urlsplit, urlunsplit
23
- import errno
24
- import subprocess
25
-
26
- from django.template.loader import get_template
27
- from django.test import signals
28
-
29
- from utils import revision
30
- from utils.api import api_login_required
31
-
32
- logger = logging.getLogger(__name__)
33
-
34
- from .models import Product, EnergyAccount, PackageRelease, Todo
35
- from .models import RFID
36
-
37
-
38
- @staff_member_required
39
- def odoo_products(request):
40
- """Return available products from the user's Odoo instance."""
41
-
42
- profile = getattr(request.user, "odoo_profile", None)
43
- if not profile or not profile.is_verified:
44
- raise Http404
45
- try:
46
- products = profile.execute(
47
- "product.product",
48
- "search_read",
49
- [[]],
50
- {"fields": ["name"], "limit": 50},
51
- )
52
- except Exception:
53
- logger.exception(
54
- "Failed to fetch Odoo products via API for user %s (profile_id=%s, host=%s, database=%s)",
55
- getattr(request.user, "pk", None),
56
- getattr(profile, "pk", None),
57
- getattr(profile, "host", None),
58
- getattr(profile, "database", None),
59
- )
60
- return JsonResponse({"detail": "Unable to fetch products"}, status=502)
61
- items = [{"id": p.get("id"), "name": p.get("name", "")} for p in products]
62
- return JsonResponse(items, safe=False)
63
-
64
-
65
- @require_GET
66
- def version_info(request):
67
- """Return the running application version and Git revision."""
68
-
69
- version = ""
70
- version_path = Path(settings.BASE_DIR) / "VERSION"
71
- if version_path.exists():
72
- version = version_path.read_text(encoding="utf-8").strip()
73
- return JsonResponse(
74
- {
75
- "version": version,
76
- "revision": revision.get_revision(),
77
- }
78
- )
79
-
80
-
81
- from . import release as release_utils
82
-
83
-
84
- TODO_FIXTURE_DIR = Path(__file__).resolve().parent / "fixtures"
85
-
86
-
87
- def _append_log(path: Path, message: str) -> None:
88
- path.parent.mkdir(parents=True, exist_ok=True)
89
- with path.open("a", encoding="utf-8") as fh:
90
- fh.write(message + "\n")
91
-
92
-
93
- def _release_log_name(package_name: str, version: str) -> str:
94
- return f"pr.{package_name}.v{version}.log"
95
-
96
-
97
- def _clean_repo() -> None:
98
- """Return the git repository to a clean state."""
99
- subprocess.run(["git", "reset", "--hard"], check=False)
100
- subprocess.run(["git", "clean", "-fd"], check=False)
101
-
102
-
103
- def _format_path(path: Path) -> str:
104
- try:
105
- return str(path.resolve().relative_to(Path.cwd()))
106
- except ValueError:
107
- return str(path)
108
-
109
-
110
- def _next_patch_version(version: str) -> str:
111
- from packaging.version import InvalidVersion, Version
112
-
113
- try:
114
- parsed = Version(version)
115
- except InvalidVersion:
116
- parts = version.split(".")
117
- for index in range(len(parts) - 1, -1, -1):
118
- segment = parts[index]
119
- if segment.isdigit():
120
- parts[index] = str(int(segment) + 1)
121
- return ".".join(parts)
122
- return version
123
- return f"{parsed.major}.{parsed.minor}.{parsed.micro + 1}"
124
-
125
-
126
- def _write_todo_fixture(todo: Todo) -> Path:
127
- safe_request = todo.request.replace(".", " ")
128
- slug = slugify(safe_request).replace("-", "_")
129
- if not slug:
130
- slug = "todo"
131
- path = TODO_FIXTURE_DIR / f"todos__{slug}.json"
132
- path.parent.mkdir(parents=True, exist_ok=True)
133
- data = [
134
- {
135
- "model": "core.todo",
136
- "fields": {
137
- "request": todo.request,
138
- "url": todo.url,
139
- "request_details": todo.request_details,
140
- },
141
- }
142
- ]
143
- path.write_text(json.dumps(data, indent=2) + "\n", encoding="utf-8")
144
- return path
145
-
146
-
147
- def _should_use_python_changelog(exc: OSError) -> bool:
148
- winerror = getattr(exc, "winerror", None)
149
- if winerror in {193}:
150
- return True
151
- return exc.errno in {errno.ENOEXEC, errno.EACCES, errno.ENOENT}
152
-
153
-
154
- def _generate_changelog_with_python(log_path: Path) -> None:
155
- _append_log(log_path, "Falling back to Python changelog generator")
156
- describe = subprocess.run(
157
- ["git", "describe", "--tags", "--abbrev=0"],
158
- capture_output=True,
159
- text=True,
160
- check=False,
161
- )
162
- start_tag = describe.stdout.strip() if describe.returncode == 0 else ""
163
- range_spec = f"{start_tag}..HEAD" if start_tag else "HEAD"
164
- log_proc = subprocess.run(
165
- ["git", "log", range_spec, "--no-merges", "--pretty=format:- %h %s"],
166
- capture_output=True,
167
- text=True,
168
- check=True,
169
- )
170
- entries = [line for line in log_proc.stdout.splitlines() if line]
171
- changelog_path = Path("CHANGELOG.rst")
172
- previous_lines: list[str] = []
173
- if changelog_path.exists():
174
- previous_lines = changelog_path.read_text(encoding="utf-8").splitlines()
175
- if len(previous_lines) > 6:
176
- previous_lines = previous_lines[6:]
177
- else:
178
- previous_lines = []
179
- lines = [
180
- "Changelog",
181
- "=========",
182
- "",
183
- "Unreleased",
184
- "----------",
185
- "",
186
- ]
187
- if entries:
188
- lines.extend(entries)
189
- if previous_lines:
190
- lines.append("")
191
- lines.extend(previous_lines)
192
- content = "\n".join(lines)
193
- if not content.endswith("\n"):
194
- content += "\n"
195
- changelog_path.write_text(content, encoding="utf-8")
196
- _append_log(log_path, "Regenerated CHANGELOG.rst using Python fallback")
197
-
198
-
199
- def _ensure_release_todo(release) -> tuple[Todo, Path]:
200
- target_version = _next_patch_version(release.version)
201
- request = f"Create release {release.package.name} {target_version}"
202
- try:
203
- url = reverse("admin:core_packagerelease_changelist")
204
- except NoReverseMatch:
205
- url = ""
206
- todo, _ = Todo.all_objects.update_or_create(
207
- request__iexact=request,
208
- defaults={
209
- "request": request,
210
- "url": url,
211
- "request_details": "",
212
- "is_seed_data": True,
213
- "is_deleted": False,
214
- "is_user_data": False,
215
- "done_on": None,
216
- "on_done_condition": "",
217
- },
218
- )
219
- fixture_path = _write_todo_fixture(todo)
220
- return todo, fixture_path
221
-
222
-
223
- def _sync_release_with_revision(release: PackageRelease) -> tuple[bool, str]:
224
- """Ensure ``release`` matches the repository revision and version.
225
-
226
- Returns a tuple ``(updated, previous_version)`` where ``updated`` is
227
- ``True`` when any field changed and ``previous_version`` is the version
228
- before synchronization.
229
- """
230
-
231
- from packaging.version import InvalidVersion, Version
232
-
233
- previous_version = release.version
234
- updated_fields: set[str] = set()
235
-
236
- repo_version: Version | None = None
237
- version_path = Path("VERSION")
238
- if version_path.exists():
239
- try:
240
- repo_version = Version(version_path.read_text(encoding="utf-8").strip())
241
- except InvalidVersion:
242
- repo_version = None
243
-
244
- try:
245
- release_version = Version(release.version)
246
- except InvalidVersion:
247
- release_version = None
248
-
249
- if repo_version is not None:
250
- bumped_repo_version = Version(
251
- f"{repo_version.major}.{repo_version.minor}.{repo_version.micro + 1}"
252
- )
253
- if release_version is None or release_version < bumped_repo_version:
254
- release.version = str(bumped_repo_version)
255
- release_version = bumped_repo_version
256
- updated_fields.add("version")
257
-
258
- current_revision = revision.get_revision()
259
- if current_revision and current_revision != release.revision:
260
- release.revision = current_revision
261
- updated_fields.add("revision")
262
-
263
- if updated_fields:
264
- release.save(update_fields=list(updated_fields))
265
- PackageRelease.dump_fixture()
266
-
267
- package_updated = False
268
- if release.package_id and not release.package.is_active:
269
- release.package.is_active = True
270
- release.package.save(update_fields=["is_active"])
271
- package_updated = True
272
-
273
- version_updated = False
274
- if release.version:
275
- current = ""
276
- if version_path.exists():
277
- current = version_path.read_text(encoding="utf-8").strip()
278
- if current != release.version:
279
- version_path.write_text(f"{release.version}\n", encoding="utf-8")
280
- version_updated = True
281
-
282
- return bool(updated_fields or version_updated or package_updated), previous_version
283
-
284
-
285
- def _changelog_notes(version: str) -> str:
286
- path = Path("CHANGELOG.rst")
287
- if not path.exists():
288
- return ""
289
- lines = path.read_text(encoding="utf-8").splitlines()
290
- prefix = f"{version} "
291
- for i, line in enumerate(lines):
292
- if line.startswith(prefix):
293
- j = i + 2
294
- items = []
295
- while j < len(lines) and lines[j].startswith("- "):
296
- items.append(lines[j])
297
- j += 1
298
- return "\n".join(items)
299
- return ""
300
-
301
-
302
- class PendingTodos(Exception):
303
- """Raised when TODO items require acknowledgment before proceeding."""
304
-
305
-
306
- class ApprovalRequired(Exception):
307
- """Raised when release manager approval is required before continuing."""
308
-
309
-
310
- def _format_condition_failure(todo: Todo, result) -> str:
311
- """Return a localized error message for a failed TODO condition."""
312
-
313
- if result.error and result.resolved:
314
- detail = _("%(condition)s (error: %(error)s)") % {
315
- "condition": result.resolved,
316
- "error": result.error,
317
- }
318
- elif result.error:
319
- detail = _("Error: %(error)s") % {"error": result.error}
320
- elif result.resolved:
321
- detail = result.resolved
322
- else:
323
- detail = _("Condition evaluated to False")
324
- return _("Condition failed for %(todo)s: %(detail)s") % {
325
- "todo": todo.request,
326
- "detail": detail,
327
- }
328
-
329
-
330
- def _get_return_url(request) -> str:
331
- """Return a safe URL to redirect back to after completing a TODO."""
332
-
333
- candidates = [request.GET.get("next"), request.POST.get("next")]
334
- referer = request.META.get("HTTP_REFERER")
335
- if referer:
336
- candidates.append(referer)
337
-
338
- for candidate in candidates:
339
- if not candidate:
340
- continue
341
- if url_has_allowed_host_and_scheme(
342
- candidate,
343
- allowed_hosts={request.get_host()},
344
- require_https=request.is_secure(),
345
- ):
346
- return candidate
347
- return resolve_url("admin:index")
348
-
349
-
350
- def _step_check_todos(release, ctx, log_path: Path) -> None:
351
- pending_qs = Todo.objects.filter(is_deleted=False, done_on__isnull=True)
352
- if pending_qs.exists():
353
- ctx["todos"] = list(
354
- pending_qs.values("id", "request", "url", "request_details")
355
- )
356
- if not ctx.get("todos_ack"):
357
- raise PendingTodos()
358
- todos = list(Todo.objects.filter(is_deleted=False))
359
- for todo in todos:
360
- todo.delete()
361
- removed = []
362
- for path in TODO_FIXTURE_DIR.glob("todos__*.json"):
363
- removed.append(str(path))
364
- path.unlink()
365
- if removed:
366
- subprocess.run(["git", "add", *removed], check=False)
367
- subprocess.run(
368
- ["git", "commit", "-m", "chore: remove TODO fixtures"],
369
- check=False,
370
- )
371
- ctx.pop("todos", None)
372
- ctx["todos_ack"] = True
373
-
374
-
375
- def _step_check_version(release, ctx, log_path: Path) -> None:
376
- from . import release as release_utils
377
- from packaging.version import InvalidVersion, Version
378
-
379
- if not release_utils._git_clean():
380
- proc = subprocess.run(
381
- ["git", "status", "--porcelain"],
382
- capture_output=True,
383
- text=True,
384
- )
385
- files = [line[3:] for line in proc.stdout.splitlines()]
386
- fixture_files = [
387
- f
388
- for f in files
389
- if "fixtures" in Path(f).parts and Path(f).suffix == ".json"
390
- ]
391
- if not files or len(fixture_files) != len(files):
392
- raise Exception("Git repository is not clean")
393
-
394
- summary = []
395
- for f in fixture_files:
396
- path = Path(f)
397
- try:
398
- data = json.loads(path.read_text(encoding="utf-8"))
399
- except Exception:
400
- count = 0
401
- models: list[str] = []
402
- else:
403
- if isinstance(data, list):
404
- count = len(data)
405
- models = sorted(
406
- {obj.get("model", "") for obj in data if isinstance(obj, dict)}
407
- )
408
- elif isinstance(data, dict):
409
- count = 1
410
- models = [data.get("model", "")]
411
- else: # pragma: no cover - unexpected structure
412
- count = 0
413
- models = []
414
- summary.append({"path": f, "count": count, "models": models})
415
-
416
- ctx["fixtures"] = summary
417
- _append_log(
418
- log_path,
419
- "Committing fixture changes: " + ", ".join(fixture_files),
420
- )
421
- subprocess.run(["git", "add", *fixture_files], check=True)
422
- subprocess.run(["git", "commit", "-m", "chore: update fixtures"], check=True)
423
- _append_log(log_path, "Fixture changes committed")
424
-
425
- version_path = Path("VERSION")
426
- if version_path.exists():
427
- current = version_path.read_text(encoding="utf-8").strip()
428
- if current and Version(release.version) < Version(current):
429
- raise Exception(
430
- f"Version {release.version} is older than existing {current}"
431
- )
432
-
433
- _append_log(log_path, f"Checking if version {release.version} exists on PyPI")
434
- if release_utils.network_available():
435
- try:
436
- resp = requests.get(f"https://pypi.org/pypi/{release.package.name}/json")
437
- if resp.ok:
438
- data = resp.json()
439
- releases = data.get("releases", {})
440
- try:
441
- target_version = Version(release.version)
442
- except InvalidVersion:
443
- target_version = None
444
-
445
- for candidate, files in releases.items():
446
- same_version = candidate == release.version
447
- if target_version is not None and not same_version:
448
- try:
449
- same_version = Version(candidate) == target_version
450
- except InvalidVersion:
451
- same_version = False
452
- if not same_version:
453
- continue
454
-
455
- has_available_files = any(
456
- isinstance(file_data, dict)
457
- and not file_data.get("yanked", False)
458
- for file_data in files or []
459
- )
460
- if has_available_files:
461
- raise Exception(
462
- f"Version {release.version} already on PyPI"
463
- )
464
- except Exception as exc:
465
- # network errors should be logged but not crash
466
- if "already on PyPI" in str(exc):
467
- raise
468
- _append_log(log_path, f"PyPI check failed: {exc}")
469
- else:
470
- _append_log(
471
- log_path,
472
- f"Version {release.version} not published on PyPI",
473
- )
474
- else:
475
- _append_log(log_path, "Network unavailable, skipping PyPI check")
476
-
477
-
478
- def _step_handle_migrations(release, ctx, log_path: Path) -> None:
479
- _append_log(log_path, "Freeze, squash and approve migrations")
480
- _append_log(log_path, "Migration review acknowledged (manual step)")
481
-
482
-
483
- def _step_changelog_docs(release, ctx, log_path: Path) -> None:
484
- _append_log(log_path, "Compose CHANGELOG and documentation")
485
- _append_log(log_path, "CHANGELOG and documentation review recorded")
486
-
487
-
488
- def _step_pre_release_actions(release, ctx, log_path: Path) -> None:
489
- _append_log(log_path, "Execute pre-release actions")
490
- try:
491
- subprocess.run(["scripts/generate-changelog.sh"], check=True)
492
- except OSError as exc:
493
- if _should_use_python_changelog(exc):
494
- _append_log(
495
- log_path,
496
- f"scripts/generate-changelog.sh failed: {exc}",
497
- )
498
- _generate_changelog_with_python(log_path)
499
- else: # pragma: no cover - unexpected OSError
500
- raise
501
- else:
502
- _append_log(
503
- log_path, "Regenerated CHANGELOG.rst using scripts/generate-changelog.sh"
504
- )
505
- subprocess.run(["git", "add", "CHANGELOG.rst"], check=True)
506
- _append_log(log_path, "Staged CHANGELOG.rst for commit")
507
- version_path = Path("VERSION")
508
- version_path.write_text(f"{release.version}\n", encoding="utf-8")
509
- _append_log(log_path, f"Updated VERSION file to {release.version}")
510
- subprocess.run(["git", "add", "VERSION"], check=True)
511
- _append_log(log_path, "Staged VERSION for commit")
512
- diff = subprocess.run(
513
- [
514
- "git",
515
- "diff",
516
- "--cached",
517
- "--quiet",
518
- "--",
519
- "CHANGELOG.rst",
520
- "VERSION",
521
- ],
522
- check=False,
523
- )
524
- if diff.returncode != 0:
525
- subprocess.run(
526
- ["git", "commit", "-m", f"pre-release commit {release.version}"],
527
- check=True,
528
- )
529
- _append_log(log_path, f"Committed VERSION update for {release.version}")
530
- else:
531
- _append_log(
532
- log_path, "No changes detected for VERSION or CHANGELOG; skipping commit"
533
- )
534
- subprocess.run(["git", "reset", "HEAD", "CHANGELOG.rst"], check=False)
535
- _append_log(log_path, "Unstaged CHANGELOG.rst")
536
- subprocess.run(["git", "reset", "HEAD", "VERSION"], check=False)
537
- _append_log(log_path, "Unstaged VERSION file")
538
- todo, fixture_path = _ensure_release_todo(release)
539
- fixture_display = _format_path(fixture_path)
540
- _append_log(log_path, f"Added TODO: {todo.request}")
541
- _append_log(log_path, f"Wrote TODO fixture {fixture_display}")
542
- subprocess.run(["git", "add", str(fixture_path)], check=True)
543
- _append_log(log_path, f"Staged TODO fixture {fixture_display}")
544
- fixture_diff = subprocess.run(
545
- ["git", "diff", "--cached", "--quiet", "--", str(fixture_path)],
546
- check=False,
547
- )
548
- if fixture_diff.returncode != 0:
549
- commit_message = f"chore: add release TODO for {release.package.name}"
550
- subprocess.run(["git", "commit", "-m", commit_message], check=True)
551
- _append_log(log_path, f"Committed TODO fixture {fixture_display}")
552
- else:
553
- _append_log(
554
- log_path,
555
- f"No changes detected for TODO fixture {fixture_display}; skipping commit",
556
- )
557
- _append_log(log_path, "Pre-release actions complete")
558
-
559
-
560
- def _step_run_tests(release, ctx, log_path: Path) -> None:
561
- _append_log(log_path, "Complete test suite with --all flag")
562
- _append_log(log_path, "Test suite completion acknowledged")
563
-
564
-
565
- def _step_promote_build(release, ctx, log_path: Path) -> None:
566
- from . import release as release_utils
567
-
568
- _append_log(log_path, "Generating build files")
569
- try:
570
- try:
571
- subprocess.run(["git", "fetch", "origin", "main"], check=True)
572
- _append_log(log_path, "Fetched latest changes from origin/main")
573
- subprocess.run(["git", "rebase", "origin/main"], check=True)
574
- _append_log(log_path, "Rebased current branch onto origin/main")
575
- except subprocess.CalledProcessError as exc:
576
- subprocess.run(["git", "rebase", "--abort"], check=False)
577
- _append_log(log_path, "Rebase onto origin/main failed; aborted rebase")
578
- raise Exception("Rebase onto main failed") from exc
579
- release_utils.promote(
580
- package=release.to_package(),
581
- version=release.version,
582
- creds=release.to_credentials(),
583
- )
584
- _append_log(
585
- log_path,
586
- f"Generated release artifacts for v{release.version}",
587
- )
588
- from glob import glob
589
-
590
- paths = ["VERSION", *glob("core/fixtures/releases__*.json")]
591
- diff = subprocess.run(
592
- ["git", "status", "--porcelain", *paths],
593
- capture_output=True,
594
- text=True,
595
- )
596
- if diff.stdout.strip():
597
- subprocess.run(["git", "add", *paths], check=True)
598
- _append_log(log_path, "Staged release metadata updates")
599
- subprocess.run(
600
- [
601
- "git",
602
- "commit",
603
- "-m",
604
- f"chore: update release metadata for v{release.version}",
605
- ],
606
- check=True,
607
- )
608
- _append_log(
609
- log_path,
610
- f"Committed release metadata for v{release.version}",
611
- )
612
- subprocess.run(["git", "push"], check=True)
613
- _append_log(log_path, "Pushed release changes to origin")
614
- PackageRelease.dump_fixture()
615
- _append_log(log_path, "Updated release fixtures")
616
- except Exception:
617
- _clean_repo()
618
- raise
619
- target_name = _release_log_name(release.package.name, release.version)
620
- new_log = log_path.with_name(target_name)
621
- if log_path != new_log:
622
- if new_log.exists():
623
- new_log.unlink()
624
- log_path.rename(new_log)
625
- else:
626
- new_log = log_path
627
- ctx["log"] = new_log.name
628
- _append_log(new_log, "Build complete")
629
-
630
-
631
- def _step_release_manager_approval(release, ctx, log_path: Path) -> None:
632
- if release.to_credentials() is None:
633
- ctx.pop("release_approval", None)
634
- if not ctx.get("approval_credentials_missing"):
635
- _append_log(log_path, "Release manager publishing credentials missing")
636
- ctx["approval_credentials_missing"] = True
637
- ctx["awaiting_approval"] = True
638
- raise ApprovalRequired()
639
-
640
- missing_before = ctx.pop("approval_credentials_missing", None)
641
- if missing_before:
642
- ctx.pop("awaiting_approval", None)
643
- decision = ctx.get("release_approval")
644
- if decision == "approved":
645
- ctx.pop("release_approval", None)
646
- ctx.pop("awaiting_approval", None)
647
- ctx.pop("approval_credentials_missing", None)
648
- _append_log(log_path, "Release manager approved release")
649
- return
650
- if decision == "rejected":
651
- ctx.pop("release_approval", None)
652
- ctx.pop("awaiting_approval", None)
653
- ctx.pop("approval_credentials_missing", None)
654
- _append_log(log_path, "Release manager rejected release")
655
- raise RuntimeError(
656
- _("Release manager rejected the release. Restart required."),
657
- )
658
- if not ctx.get("awaiting_approval"):
659
- ctx["awaiting_approval"] = True
660
- _append_log(log_path, "Awaiting release manager approval")
661
- else:
662
- ctx["awaiting_approval"] = True
663
- raise ApprovalRequired()
664
-
665
-
666
- def _step_publish(release, ctx, log_path: Path) -> None:
667
- from . import release as release_utils
668
-
669
- _append_log(log_path, "Uploading distribution")
670
- release_utils.publish(
671
- package=release.to_package(),
672
- version=release.version,
673
- creds=release.to_credentials(),
674
- )
675
- release.pypi_url = (
676
- f"https://pypi.org/project/{release.package.name}/{release.version}/"
677
- )
678
- release.release_on = timezone.now()
679
- release.save(update_fields=["pypi_url", "release_on"])
680
- PackageRelease.dump_fixture()
681
- _append_log(log_path, f"Recorded PyPI URL: {release.pypi_url}")
682
- _append_log(log_path, "Upload complete")
683
-
684
-
685
- FIXTURE_REVIEW_STEP_NAME = "Freeze, squash and approve migrations"
686
-
687
-
688
- PUBLISH_STEPS = [
689
- ("Check version number availability", _step_check_version),
690
- ("Confirm release TODO completion", _step_check_todos),
691
- (FIXTURE_REVIEW_STEP_NAME, _step_handle_migrations),
692
- ("Compose CHANGELOG and documentation", _step_changelog_docs),
693
- ("Execute pre-release actions", _step_pre_release_actions),
694
- ("Build release artifacts", _step_promote_build),
695
- ("Complete test suite with --all flag", _step_run_tests),
696
- ("Get Release Manager Approval", _step_release_manager_approval),
697
- ("Upload final build to PyPI", _step_publish),
698
- ]
699
-
700
-
701
- @csrf_exempt
702
- def rfid_login(request):
703
- """Authenticate a user using an RFID."""
704
-
705
- if request.method != "POST":
706
- return JsonResponse({"detail": "POST required"}, status=400)
707
-
708
- try:
709
- data = json.loads(request.body.decode())
710
- except json.JSONDecodeError:
711
- data = request.POST
712
-
713
- rfid = data.get("rfid")
714
- if not rfid:
715
- return JsonResponse({"detail": "rfid required"}, status=400)
716
-
717
- user = authenticate(request, rfid=rfid)
718
- if user is None:
719
- return JsonResponse({"detail": "invalid RFID"}, status=401)
720
-
721
- login(request, user)
722
- return JsonResponse({"id": user.id, "username": user.username})
723
-
724
-
725
- @api_login_required
726
- def product_list(request):
727
- """Return a JSON list of products."""
728
-
729
- products = list(
730
- Product.objects.values("id", "name", "description", "renewal_period")
731
- )
732
- return JsonResponse({"products": products})
733
-
734
-
735
- @csrf_exempt
736
- @api_login_required
737
- def add_live_subscription(request):
738
- """Create a live subscription for an energy account from POSTed JSON."""
739
-
740
- if request.method != "POST":
741
- return JsonResponse({"detail": "POST required"}, status=400)
742
-
743
- try:
744
- data = json.loads(request.body.decode())
745
- except json.JSONDecodeError:
746
- data = request.POST
747
-
748
- account_id = data.get("account_id")
749
- product_id = data.get("product_id")
750
-
751
- if not account_id or not product_id:
752
- return JsonResponse(
753
- {"detail": "account_id and product_id required"}, status=400
754
- )
755
-
756
- try:
757
- product = Product.objects.get(id=product_id)
758
- except Product.DoesNotExist:
759
- return JsonResponse({"detail": "invalid product"}, status=404)
760
-
761
- try:
762
- account = EnergyAccount.objects.get(id=account_id)
763
- except EnergyAccount.DoesNotExist:
764
- return JsonResponse({"detail": "invalid account"}, status=404)
765
-
766
- start_date = timezone.now().date()
767
- account.live_subscription_product = product
768
- account.live_subscription_start_date = start_date
769
- account.live_subscription_next_renewal = start_date + timedelta(
770
- days=product.renewal_period
771
- )
772
- account.save()
773
-
774
- return JsonResponse({"id": account.id})
775
-
776
-
777
- @api_login_required
778
- def live_subscription_list(request):
779
- """Return live subscriptions for the given account_id."""
780
-
781
- account_id = request.GET.get("account_id")
782
- if not account_id:
783
- return JsonResponse({"detail": "account_id required"}, status=400)
784
-
785
- try:
786
- account = EnergyAccount.objects.select_related("live_subscription_product").get(
787
- id=account_id
788
- )
789
- except EnergyAccount.DoesNotExist:
790
- return JsonResponse({"detail": "invalid account"}, status=404)
791
-
792
- subs = []
793
- product = account.live_subscription_product
794
- if product:
795
- next_renewal = account.live_subscription_next_renewal
796
- if not next_renewal and account.live_subscription_start_date:
797
- next_renewal = account.live_subscription_start_date + timedelta(
798
- days=product.renewal_period
799
- )
800
-
801
- subs.append(
802
- {
803
- "id": account.id,
804
- "product__name": product.name,
805
- "next_renewal": next_renewal,
806
- }
807
- )
808
-
809
- return JsonResponse({"live_subscriptions": subs})
810
-
811
-
812
- @csrf_exempt
813
- @api_login_required
814
- def rfid_batch(request):
815
- """Export or import RFID tags in batch."""
816
-
817
- if request.method == "GET":
818
- color = request.GET.get("color", RFID.BLACK).upper()
819
- released = request.GET.get("released")
820
- if released is not None:
821
- released = released.lower()
822
- qs = RFID.objects.all()
823
- if color != "ALL":
824
- qs = qs.filter(color=color)
825
- if released in ("true", "false"):
826
- qs = qs.filter(released=(released == "true"))
827
- tags = [
828
- {
829
- "rfid": t.rfid,
830
- "custom_label": t.custom_label,
831
- "energy_accounts": list(t.energy_accounts.values_list("id", flat=True)),
832
- "allowed": t.allowed,
833
- "color": t.color,
834
- "released": t.released,
835
- }
836
- for t in qs.order_by("rfid")
837
- ]
838
- return JsonResponse({"rfids": tags})
839
-
840
- if request.method == "POST":
841
- try:
842
- data = json.loads(request.body.decode())
843
- except json.JSONDecodeError:
844
- return JsonResponse({"detail": "invalid JSON"}, status=400)
845
-
846
- tags = data.get("rfids") if isinstance(data, dict) else data
847
- if not isinstance(tags, list):
848
- return JsonResponse({"detail": "rfids list required"}, status=400)
849
-
850
- count = 0
851
- for row in tags:
852
- rfid = (row.get("rfid") or "").strip()
853
- if not rfid:
854
- continue
855
- allowed = row.get("allowed", True)
856
- energy_accounts = row.get("energy_accounts") or []
857
- color = (row.get("color") or RFID.BLACK).strip().upper() or RFID.BLACK
858
- released = row.get("released", False)
859
- if isinstance(released, str):
860
- released = released.lower() == "true"
861
- custom_label = (row.get("custom_label") or "").strip()
862
-
863
- tag, _ = RFID.objects.update_or_create(
864
- rfid=rfid.upper(),
865
- defaults={
866
- "allowed": allowed,
867
- "color": color,
868
- "released": released,
869
- "custom_label": custom_label,
870
- },
871
- )
872
- if energy_accounts:
873
- tag.energy_accounts.set(
874
- EnergyAccount.objects.filter(id__in=energy_accounts)
875
- )
876
- else:
877
- tag.energy_accounts.clear()
878
- count += 1
879
-
880
- return JsonResponse({"imported": count})
881
-
882
- return JsonResponse({"detail": "GET or POST required"}, status=400)
883
-
884
-
885
- @staff_member_required
886
- def release_progress(request, pk: int, action: str):
887
- release = get_object_or_404(PackageRelease, pk=pk)
888
- if action != "publish":
889
- raise Http404("Unknown action")
890
- session_key = f"release_publish_{pk}"
891
- lock_path = Path("locks") / f"release_publish_{pk}.json"
892
- restart_path = Path("locks") / f"release_publish_{pk}.restarts"
893
-
894
- if not release.is_current:
895
- if release.is_published:
896
- raise Http404("Release is not current")
897
- updated, previous_version = _sync_release_with_revision(release)
898
- if updated:
899
- request.session.pop(session_key, None)
900
- if lock_path.exists():
901
- lock_path.unlink()
902
- if restart_path.exists():
903
- restart_path.unlink()
904
- log_dir = Path("logs")
905
- pattern = f"pr.{release.package.name}.v{previous_version}*.log"
906
- for log_file in log_dir.glob(pattern):
907
- log_file.unlink()
908
- if not release.is_current:
909
- raise Http404("Release is not current")
910
-
911
- if request.GET.get("restart"):
912
- count = 0
913
- if restart_path.exists():
914
- try:
915
- count = int(restart_path.read_text(encoding="utf-8"))
916
- except Exception:
917
- count = 0
918
- restart_path.parent.mkdir(parents=True, exist_ok=True)
919
- restart_path.write_text(str(count + 1), encoding="utf-8")
920
- _clean_repo()
921
- release.pypi_url = ""
922
- release.release_on = None
923
- release.save(update_fields=["pypi_url", "release_on"])
924
- request.session.pop(session_key, None)
925
- if lock_path.exists():
926
- lock_path.unlink()
927
- log_dir = Path("logs")
928
- pattern = f"pr.{release.package.name}.v{release.version}*.log"
929
- for f in log_dir.glob(pattern):
930
- f.unlink()
931
- return redirect(request.path)
932
- ctx = request.session.get(session_key)
933
- if ctx is None and lock_path.exists():
934
- try:
935
- ctx = json.loads(lock_path.read_text(encoding="utf-8"))
936
- except Exception:
937
- ctx = {"step": 0}
938
- if ctx is None:
939
- ctx = {"step": 0}
940
- if restart_path.exists():
941
- restart_path.unlink()
942
-
943
- manager = release.release_manager or release.package.release_manager
944
- credentials_ready = bool(release.to_credentials())
945
- if credentials_ready and ctx.get("approval_credentials_missing"):
946
- ctx.pop("approval_credentials_missing", None)
947
-
948
- ack_todos_requested = bool(request.GET.get("ack_todos"))
949
-
950
- if request.GET.get("start"):
951
- ctx["started"] = True
952
- ctx["paused"] = False
953
- if (
954
- ctx.get("awaiting_approval")
955
- and not ctx.get("approval_credentials_missing")
956
- and credentials_ready
957
- ):
958
- if request.GET.get("approve"):
959
- ctx["release_approval"] = "approved"
960
- if request.GET.get("reject"):
961
- ctx["release_approval"] = "rejected"
962
- if request.GET.get("pause") and ctx.get("started"):
963
- ctx["paused"] = True
964
- restart_count = 0
965
- if restart_path.exists():
966
- try:
967
- restart_count = int(restart_path.read_text(encoding="utf-8"))
968
- except Exception:
969
- restart_count = 0
970
- step_count = ctx.get("step", 0)
971
- step_param = request.GET.get("step")
972
-
973
- pending_qs = Todo.objects.filter(is_deleted=False, done_on__isnull=True)
974
- pending_items = list(pending_qs)
975
- if ack_todos_requested:
976
- if pending_items:
977
- failures = []
978
- for todo in pending_items:
979
- result = todo.check_on_done_condition()
980
- if not result.passed:
981
- failures.append((todo, result))
982
- if failures:
983
- ctx.pop("todos_ack", None)
984
- for todo, result in failures:
985
- messages.error(request, _format_condition_failure(todo, result))
986
- else:
987
- ctx["todos_ack"] = True
988
- else:
989
- ctx["todos_ack"] = True
990
-
991
- if pending_items and not ctx.get("todos_ack"):
992
- ctx["todos"] = [
993
- {
994
- "id": todo.pk,
995
- "request": todo.request,
996
- "url": todo.url,
997
- "request_details": todo.request_details,
998
- }
999
- for todo in pending_items
1000
- ]
1001
- else:
1002
- ctx.pop("todos", None)
1003
-
1004
- log_name = _release_log_name(release.package.name, release.version)
1005
- if ctx.get("log") != log_name:
1006
- ctx = {
1007
- "step": 0,
1008
- "log": log_name,
1009
- "started": ctx.get("started", False),
1010
- }
1011
- step_count = 0
1012
- log_path = Path("logs") / log_name
1013
- ctx.setdefault("log", log_name)
1014
- ctx.setdefault("paused", False)
1015
-
1016
- if (
1017
- ctx.get("started")
1018
- and step_count == 0
1019
- and (step_param is None or step_param == "0")
1020
- ):
1021
- if log_path.exists():
1022
- log_path.unlink()
1023
-
1024
- steps = PUBLISH_STEPS
1025
- fixtures_step_index = next(
1026
- (
1027
- index
1028
- for index, (name, _) in enumerate(steps)
1029
- if name == FIXTURE_REVIEW_STEP_NAME
1030
- ),
1031
- None,
1032
- )
1033
- error = ctx.get("error")
1034
-
1035
- if (
1036
- ctx.get("started")
1037
- and not ctx.get("paused")
1038
- and step_param is not None
1039
- and not error
1040
- and step_count < len(steps)
1041
- ):
1042
- to_run = int(step_param)
1043
- if to_run == step_count:
1044
- name, func = steps[to_run]
1045
- try:
1046
- func(release, ctx, log_path)
1047
- except PendingTodos:
1048
- pass
1049
- except ApprovalRequired:
1050
- pass
1051
- except Exception as exc: # pragma: no cover - best effort logging
1052
- _append_log(log_path, f"{name} failed: {exc}")
1053
- ctx["error"] = str(exc)
1054
- request.session[session_key] = ctx
1055
- lock_path.parent.mkdir(parents=True, exist_ok=True)
1056
- lock_path.write_text(json.dumps(ctx), encoding="utf-8")
1057
- else:
1058
- step_count += 1
1059
- ctx["step"] = step_count
1060
- request.session[session_key] = ctx
1061
- lock_path.parent.mkdir(parents=True, exist_ok=True)
1062
- lock_path.write_text(json.dumps(ctx), encoding="utf-8")
1063
-
1064
- done = step_count >= len(steps) and not ctx.get("error")
1065
-
1066
- show_log = ctx.get("started") or step_count > 0 or done or ctx.get("error")
1067
- if show_log and log_path.exists():
1068
- log_content = log_path.read_text(encoding="utf-8")
1069
- else:
1070
- log_content = ""
1071
- next_step = (
1072
- step_count
1073
- if ctx.get("started")
1074
- and not ctx.get("paused")
1075
- and not done
1076
- and not ctx.get("error")
1077
- else None
1078
- )
1079
- has_pending_todos = bool(ctx.get("todos") and not ctx.get("todos_ack"))
1080
- if has_pending_todos:
1081
- next_step = None
1082
- awaiting_approval = bool(ctx.get("awaiting_approval"))
1083
- approval_credentials_missing = bool(ctx.get("approval_credentials_missing"))
1084
- if awaiting_approval:
1085
- next_step = None
1086
- if approval_credentials_missing:
1087
- next_step = None
1088
- paused = ctx.get("paused", False)
1089
-
1090
- step_names = [s[0] for s in steps]
1091
- approval_credentials_ready = credentials_ready
1092
- credentials_blocking = approval_credentials_missing or (
1093
- awaiting_approval and not approval_credentials_ready
1094
- )
1095
- step_states = []
1096
- for index, name in enumerate(step_names):
1097
- if index < step_count:
1098
- status = "complete"
1099
- icon = "✅"
1100
- label = _("Completed")
1101
- elif error and index == step_count:
1102
- status = "error"
1103
- icon = "❌"
1104
- label = _("Failed")
1105
- elif paused and ctx.get("started") and index == step_count and not done:
1106
- status = "paused"
1107
- icon = "⏸️"
1108
- label = _("Paused")
1109
- elif (
1110
- has_pending_todos
1111
- and ctx.get("started")
1112
- and index == step_count
1113
- and not done
1114
- ):
1115
- status = "blocked"
1116
- icon = "📝"
1117
- label = _("Awaiting checklist")
1118
- elif (
1119
- credentials_blocking
1120
- and ctx.get("started")
1121
- and index == step_count
1122
- and not done
1123
- ):
1124
- status = "missing-credentials"
1125
- icon = "🔐"
1126
- label = _("Credentials required")
1127
- elif (
1128
- awaiting_approval
1129
- and approval_credentials_ready
1130
- and ctx.get("started")
1131
- and index == step_count
1132
- and not done
1133
- ):
1134
- status = "awaiting-approval"
1135
- icon = "🤝"
1136
- label = _("Awaiting approval")
1137
- elif ctx.get("started") and index == step_count and not done:
1138
- status = "active"
1139
- icon = "⏳"
1140
- label = _("In progress")
1141
- else:
1142
- status = "pending"
1143
- icon = "⬜"
1144
- label = _("Pending")
1145
- step_states.append(
1146
- {
1147
- "index": index + 1,
1148
- "name": name,
1149
- "status": status,
1150
- "icon": icon,
1151
- "label": label,
1152
- }
1153
- )
1154
-
1155
- is_running = ctx.get("started") and not paused and not done and not ctx.get("error")
1156
- can_resume = ctx.get("started") and paused and not done and not ctx.get("error")
1157
- release_manager_owner = manager.owner_display() if manager else ""
1158
- try:
1159
- current_user_admin_url = reverse(
1160
- "admin:teams_user_change", args=[request.user.pk]
1161
- )
1162
- except NoReverseMatch:
1163
- current_user_admin_url = reverse(
1164
- "admin:core_user_change", args=[request.user.pk]
1165
- )
1166
-
1167
- fixtures_summary = ctx.get("fixtures")
1168
- if (
1169
- fixtures_summary
1170
- and fixtures_step_index is not None
1171
- and step_count > fixtures_step_index
1172
- ):
1173
- fixtures_summary = None
1174
-
1175
- context = {
1176
- "release": release,
1177
- "action": "publish",
1178
- "steps": step_names,
1179
- "current_step": step_count,
1180
- "next_step": next_step,
1181
- "done": done,
1182
- "error": ctx.get("error"),
1183
- "log_content": log_content,
1184
- "log_path": str(log_path),
1185
- "cert_log": ctx.get("cert_log"),
1186
- "fixtures": fixtures_summary,
1187
- "todos": ctx.get("todos"),
1188
- "restart_count": restart_count,
1189
- "started": ctx.get("started", False),
1190
- "paused": paused,
1191
- "show_log": show_log,
1192
- "step_states": step_states,
1193
- "has_pending_todos": has_pending_todos,
1194
- "awaiting_approval": awaiting_approval,
1195
- "approval_credentials_missing": approval_credentials_missing,
1196
- "approval_credentials_ready": approval_credentials_ready,
1197
- "release_manager_owner": release_manager_owner,
1198
- "has_release_manager": bool(manager),
1199
- "current_user_admin_url": current_user_admin_url,
1200
- "is_running": is_running,
1201
- "can_resume": can_resume,
1202
- }
1203
- request.session[session_key] = ctx
1204
- if done or ctx.get("error"):
1205
- if lock_path.exists():
1206
- lock_path.unlink()
1207
- else:
1208
- lock_path.parent.mkdir(parents=True, exist_ok=True)
1209
- lock_path.write_text(json.dumps(ctx), encoding="utf-8")
1210
- template = get_template("core/release_progress.html")
1211
- content = template.render(context, request)
1212
- signals.template_rendered.send(
1213
- sender=template.__class__,
1214
- template=template,
1215
- context=context,
1216
- using=getattr(getattr(template, "engine", None), "name", None),
1217
- )
1218
- response = HttpResponse(content)
1219
- response.context = context
1220
- response.templates = [template]
1221
- return response
1222
-
1223
-
1224
- def _dedupe_preserve_order(values):
1225
- seen = set()
1226
- result = []
1227
- for value in values:
1228
- if value in seen:
1229
- continue
1230
- seen.add(value)
1231
- result.append(value)
1232
- return result
1233
-
1234
-
1235
- def _parse_todo_auth_directives(query: str):
1236
- directives = {
1237
- "require_logout": False,
1238
- "users": [],
1239
- "permissions": [],
1240
- "notes": [],
1241
- }
1242
- if not query:
1243
- return "", directives
1244
-
1245
- remaining = []
1246
- for key, value in parse_qsl(query, keep_blank_values=True):
1247
- if key != "_todo_auth":
1248
- remaining.append((key, value))
1249
- continue
1250
- token = (value or "").strip()
1251
- if not token:
1252
- continue
1253
- kind, _, payload = token.partition(":")
1254
- kind = kind.strip().lower()
1255
- payload = payload.strip()
1256
- if kind in {"logout", "anonymous", "anon"}:
1257
- directives["require_logout"] = True
1258
- elif kind in {"user", "username"} and payload:
1259
- directives["users"].append(payload)
1260
- elif kind in {"perm", "permission"} and payload:
1261
- directives["permissions"].append(payload)
1262
- else:
1263
- directives["notes"].append(token)
1264
-
1265
- sanitized_query = urlencode(remaining, doseq=True)
1266
- return sanitized_query, directives
1267
-
1268
-
1269
- def _todo_iframe_url(request, todo: Todo):
1270
- """Return a safe iframe URL and auth context for ``todo``."""
1271
-
1272
- fallback = reverse("admin:core_todo_change", args=[todo.pk])
1273
- raw_url = (todo.url or "").strip()
1274
-
1275
- auth_context = {
1276
- "require_logout": False,
1277
- "users": [],
1278
- "permissions": [],
1279
- "notes": [],
1280
- }
1281
-
1282
- def _final_context(target_url: str):
1283
- return {
1284
- "target_url": target_url or fallback,
1285
- "require_logout": auth_context["require_logout"],
1286
- "users": _dedupe_preserve_order(auth_context["users"]),
1287
- "permissions": _dedupe_preserve_order(auth_context["permissions"]),
1288
- "notes": _dedupe_preserve_order(auth_context["notes"]),
1289
- "has_requirements": bool(
1290
- auth_context["require_logout"]
1291
- or auth_context["users"]
1292
- or auth_context["permissions"]
1293
- or auth_context["notes"]
1294
- ),
1295
- }
1296
-
1297
- if not raw_url:
1298
- return fallback, _final_context(fallback)
1299
-
1300
- focus_path = reverse("todo-focus", args=[todo.pk])
1301
- focus_norm = focus_path.strip("/").lower()
1302
-
1303
- def _is_focus_target(target: str) -> bool:
1304
- if not target:
1305
- return False
1306
- parsed_target = urlsplit(target)
1307
- path = parsed_target.path
1308
- if not path and not parsed_target.scheme and not parsed_target.netloc:
1309
- path = target.split("?", 1)[0].split("#", 1)[0]
1310
- normalized = path.strip("/").lower()
1311
- return normalized == focus_norm if normalized else False
1312
-
1313
- if _is_focus_target(raw_url):
1314
- return fallback, _final_context(fallback)
1315
-
1316
- parsed = urlsplit(raw_url)
1317
-
1318
- def _merge_directives(parsed_result):
1319
- sanitized_query, directives = _parse_todo_auth_directives(parsed_result.query)
1320
- if directives["require_logout"]:
1321
- auth_context["require_logout"] = True
1322
- auth_context["users"].extend(directives["users"])
1323
- auth_context["permissions"].extend(directives["permissions"])
1324
- auth_context["notes"].extend(directives["notes"])
1325
- return parsed_result._replace(query=sanitized_query)
1326
-
1327
- if not parsed.scheme and not parsed.netloc:
1328
- sanitized = _merge_directives(parsed)
1329
- path = sanitized.path or "/"
1330
- if not path.startswith("/"):
1331
- path = f"/{path}"
1332
- relative_url = urlunsplit(("", "", path, sanitized.query, sanitized.fragment))
1333
- if _is_focus_target(relative_url):
1334
- return fallback, _final_context(fallback)
1335
- return relative_url or fallback, _final_context(relative_url)
1336
-
1337
- if parsed.scheme and parsed.scheme.lower() not in {"http", "https"}:
1338
- return fallback, _final_context(fallback)
1339
-
1340
- request_host = request.get_host().strip().lower()
1341
- host_without_port = request_host.split(":", 1)[0]
1342
- allowed_hosts = {
1343
- request_host,
1344
- host_without_port,
1345
- "localhost",
1346
- "127.0.0.1",
1347
- "0.0.0.0",
1348
- "::1",
1349
- }
1350
-
1351
- site_domain = ""
1352
- try:
1353
- site_domain = Site.objects.get_current().domain.strip().lower()
1354
- except Site.DoesNotExist:
1355
- site_domain = ""
1356
- if site_domain:
1357
- allowed_hosts.add(site_domain)
1358
- allowed_hosts.add(site_domain.split(":", 1)[0])
1359
-
1360
- for host in getattr(settings, "ALLOWED_HOSTS", []):
1361
- if not isinstance(host, str):
1362
- continue
1363
- normalized = host.strip().lower()
1364
- if not normalized or normalized.startswith("*"):
1365
- continue
1366
- allowed_hosts.add(normalized)
1367
- allowed_hosts.add(normalized.split(":", 1)[0])
1368
-
1369
- hostname = (parsed.hostname or "").strip().lower()
1370
- netloc = parsed.netloc.strip().lower()
1371
- if hostname in allowed_hosts or netloc in allowed_hosts:
1372
- sanitized = _merge_directives(parsed)
1373
- path = sanitized.path or "/"
1374
- if not path.startswith("/"):
1375
- path = f"/{path}"
1376
- relative_url = urlunsplit(("", "", path, sanitized.query, sanitized.fragment))
1377
- if _is_focus_target(relative_url):
1378
- return fallback, _final_context(fallback)
1379
- return relative_url or fallback, _final_context(relative_url)
1380
-
1381
- return fallback, _final_context(fallback)
1382
-
1383
-
1384
- @staff_member_required
1385
- def todo_focus(request, pk: int):
1386
- todo = get_object_or_404(Todo, pk=pk, is_deleted=False)
1387
- if todo.done_on:
1388
- return redirect(_get_return_url(request))
1389
-
1390
- iframe_url, focus_auth = _todo_iframe_url(request, todo)
1391
- focus_target_url = focus_auth.get("target_url", iframe_url) if focus_auth else iframe_url
1392
- context = {
1393
- "todo": todo,
1394
- "iframe_url": iframe_url,
1395
- "focus_target_url": focus_target_url,
1396
- "focus_auth": focus_auth,
1397
- "next_url": _get_return_url(request),
1398
- "done_url": reverse("todo-done", args=[todo.pk]),
1399
- }
1400
- return render(request, "core/todo_focus.html", context)
1401
-
1402
-
1403
- @staff_member_required
1404
- @require_POST
1405
- def todo_done(request, pk: int):
1406
- redirect_to = _get_return_url(request)
1407
- try:
1408
- todo = Todo.objects.get(pk=pk, is_deleted=False, done_on__isnull=True)
1409
- except Todo.DoesNotExist:
1410
- return redirect(redirect_to)
1411
- result = todo.check_on_done_condition()
1412
- if not result.passed:
1413
- messages.error(request, _format_condition_failure(todo, result))
1414
- return redirect(redirect_to)
1415
- todo.done_on = timezone.now()
1416
- todo.save(update_fields=["done_on"])
1417
- return redirect(redirect_to)
1
+ import json
2
+ import logging
3
+ import os
4
+ import shutil
5
+ import uuid
6
+ from datetime import datetime, timedelta, timezone as datetime_timezone
7
+
8
+ import requests
9
+ from django.conf import settings
10
+ from django.contrib.admin.sites import site as admin_site
11
+ from django.contrib.admin.views.decorators import staff_member_required
12
+ from django.contrib.auth import authenticate, login
13
+ from django.contrib import messages
14
+ from django.contrib.sites.models import Site
15
+ from django.http import Http404, JsonResponse, HttpResponse
16
+ from django.shortcuts import get_object_or_404, redirect, render, resolve_url
17
+ from django.template.response import TemplateResponse
18
+ from django.utils import timezone
19
+ from django.utils.text import slugify
20
+ from django.utils.translation import gettext as _
21
+ from django.urls import NoReverseMatch, reverse
22
+ from django.views.decorators.csrf import csrf_exempt
23
+ from django.views.decorators.http import require_GET, require_POST
24
+ from django.utils.http import url_has_allowed_host_and_scheme
25
+ from pathlib import Path
26
+ from urllib.parse import parse_qsl, urlencode, urlsplit, urlunsplit
27
+ import errno
28
+ import subprocess
29
+ from typing import Optional, Sequence
30
+
31
+ from django.template.loader import get_template
32
+ from django.test import signals
33
+
34
+ from utils import revision
35
+ from utils.api import api_login_required
36
+
37
+ logger = logging.getLogger(__name__)
38
+
39
+ PYPI_REQUEST_TIMEOUT = 10
40
+
41
+ from . import changelog as changelog_utils
42
+ from .models import OdooProfile, Product, EnergyAccount, PackageRelease, Todo
43
+ from .models import RFID
44
+
45
+
46
+ @staff_member_required
47
+ def odoo_products(request):
48
+ """Return available products from the user's Odoo instance."""
49
+
50
+ profile = getattr(request.user, "odoo_profile", None)
51
+ if not profile or not profile.is_verified:
52
+ raise Http404
53
+ try:
54
+ products = profile.execute(
55
+ "product.product",
56
+ "search_read",
57
+ [[]],
58
+ fields=["name"],
59
+ limit=50,
60
+ )
61
+ except Exception:
62
+ logger.exception(
63
+ "Failed to fetch Odoo products via API for user %s (profile_id=%s, host=%s, database=%s)",
64
+ getattr(request.user, "pk", None),
65
+ getattr(profile, "pk", None),
66
+ getattr(profile, "host", None),
67
+ getattr(profile, "database", None),
68
+ )
69
+ return JsonResponse({"detail": "Unable to fetch products"}, status=502)
70
+ items = [{"id": p.get("id"), "name": p.get("name", "")} for p in products]
71
+ return JsonResponse(items, safe=False)
72
+
73
+
74
+ @staff_member_required
75
+ def odoo_quote_report(request):
76
+ """Display a consolidated quote report from the user's Odoo instance."""
77
+
78
+ profile = getattr(request.user, "odoo_profile", None)
79
+ context = {
80
+ "title": _("Quote Report"),
81
+ "profile": profile,
82
+ "error": None,
83
+ "template_stats": [],
84
+ "quotes": [],
85
+ "recent_products": [],
86
+ "installed_modules": [],
87
+ "profile_url": "",
88
+ }
89
+
90
+ profile_admin = admin_site._registry.get(OdooProfile)
91
+ if profile_admin is not None:
92
+ try:
93
+ context["profile_url"] = profile_admin.get_my_profile_url(request)
94
+ except Exception: # pragma: no cover - defensive fallback
95
+ context["profile_url"] = ""
96
+
97
+ if not profile or not profile.is_verified:
98
+ context["error"] = _(
99
+ "Configure and verify your Odoo employee credentials before generating the report."
100
+ )
101
+ return TemplateResponse(
102
+ request, "admin/core/odoo_quote_report.html", context
103
+ )
104
+
105
+ def _parse_datetime(value):
106
+ if not value:
107
+ return None
108
+ if isinstance(value, datetime):
109
+ dt = value
110
+ else:
111
+ text = str(value)
112
+ try:
113
+ dt = datetime.fromisoformat(text)
114
+ except ValueError:
115
+ text_iso = text.replace(" ", "T")
116
+ try:
117
+ dt = datetime.fromisoformat(text_iso)
118
+ except ValueError:
119
+ for fmt in ("%Y-%m-%d %H:%M:%S.%f", "%Y-%m-%d %H:%M:%S"):
120
+ try:
121
+ dt = datetime.strptime(text, fmt)
122
+ break
123
+ except ValueError:
124
+ continue
125
+ else:
126
+ return None
127
+ if timezone.is_naive(dt):
128
+ tzinfo = getattr(timezone, "utc", datetime_timezone.utc)
129
+ dt = timezone.make_aware(dt, tzinfo)
130
+ return dt
131
+
132
+ try:
133
+ templates = profile.execute(
134
+ "sale.order.template",
135
+ "search_read",
136
+ [[]],
137
+ fields=["name"],
138
+ order="name asc",
139
+ )
140
+ template_usage = profile.execute(
141
+ "sale.order",
142
+ "read_group",
143
+ [[("sale_order_template_id", "!=", False)]],
144
+ ["sale_order_template_id"],
145
+ lazy=False,
146
+ )
147
+
148
+ usage_map = {}
149
+ for entry in template_usage:
150
+ template_info = entry.get("sale_order_template_id")
151
+ if not template_info:
152
+ continue
153
+ template_id = template_info[0]
154
+ usage_map[template_id] = entry.get(
155
+ "sale_order_template_id_count", 0
156
+ )
157
+
158
+ context["template_stats"] = [
159
+ {
160
+ "id": template.get("id"),
161
+ "name": template.get("name", ""),
162
+ "quote_count": usage_map.get(template.get("id"), 0),
163
+ }
164
+ for template in templates
165
+ ]
166
+
167
+ ninety_days_ago = timezone.now() - timedelta(days=90)
168
+ quotes = profile.execute(
169
+ "sale.order",
170
+ "search_read",
171
+ [
172
+ [
173
+ ("create_date", ">=", ninety_days_ago.strftime("%Y-%m-%d %H:%M:%S")),
174
+ ("state", "!=", "cancel"),
175
+ ("quote_sent", "=", False),
176
+ ]
177
+ ],
178
+ fields=[
179
+ "name",
180
+ "amount_total",
181
+ "partner_id",
182
+ "activity_type_id",
183
+ "activity_summary",
184
+ "tag_ids",
185
+ "create_date",
186
+ "currency_id",
187
+ ],
188
+ order="create_date desc",
189
+ )
190
+
191
+ tag_ids = set()
192
+ currency_ids = set()
193
+ for quote in quotes:
194
+ tag_ids.update(quote.get("tag_ids") or [])
195
+ currency_info = quote.get("currency_id")
196
+ if (
197
+ isinstance(currency_info, (list, tuple))
198
+ and len(currency_info) >= 1
199
+ and currency_info[0]
200
+ ):
201
+ currency_ids.add(currency_info[0])
202
+
203
+ tag_map: dict[int, str] = {}
204
+ if tag_ids:
205
+ tag_records = profile.execute(
206
+ "sale.order.tag",
207
+ "read",
208
+ list(tag_ids),
209
+ fields=["name"],
210
+ )
211
+ for tag in tag_records:
212
+ tag_id = tag.get("id")
213
+ if tag_id is not None:
214
+ tag_map[tag_id] = tag.get("name", "")
215
+
216
+ currency_map: dict[int, dict[str, str]] = {}
217
+ if currency_ids:
218
+ currency_records = profile.execute(
219
+ "res.currency",
220
+ "read",
221
+ list(currency_ids),
222
+ fields=["name", "symbol"],
223
+ )
224
+ for currency in currency_records:
225
+ currency_id = currency.get("id")
226
+ if currency_id is not None:
227
+ currency_map[currency_id] = {
228
+ "name": currency.get("name", ""),
229
+ "symbol": currency.get("symbol", ""),
230
+ }
231
+
232
+ prepared_quotes = []
233
+ for quote in quotes:
234
+ partner = quote.get("partner_id")
235
+ customer = ""
236
+ if isinstance(partner, (list, tuple)) and len(partner) >= 2:
237
+ customer = partner[1]
238
+
239
+ activity_type = quote.get("activity_type_id")
240
+ activity_name = ""
241
+ if isinstance(activity_type, (list, tuple)) and len(activity_type) >= 2:
242
+ activity_name = activity_type[1]
243
+
244
+ activity_summary = quote.get("activity_summary") or ""
245
+ activity_value = activity_summary or activity_name
246
+
247
+ quote_tags = [
248
+ tag_map.get(tag_id, str(tag_id))
249
+ for tag_id in quote.get("tag_ids") or []
250
+ ]
251
+
252
+ currency_info = quote.get("currency_id")
253
+ currency_label = ""
254
+ if isinstance(currency_info, (list, tuple)) and currency_info:
255
+ currency_id = currency_info[0]
256
+ currency_details = currency_map.get(currency_id, {})
257
+ currency_label = (
258
+ currency_details.get("symbol")
259
+ or currency_details.get("name")
260
+ or (currency_info[1] if len(currency_info) >= 2 else "")
261
+ )
262
+
263
+ amount_total = quote.get("amount_total") or 0
264
+ if currency_label:
265
+ total_display = f"{currency_label}{amount_total:,.2f}"
266
+ else:
267
+ total_display = f"{amount_total:,.2f}"
268
+
269
+ prepared_quotes.append(
270
+ {
271
+ "name": quote.get("name", ""),
272
+ "customer": customer,
273
+ "activity": activity_value,
274
+ "tags": quote_tags,
275
+ "create_date": _parse_datetime(quote.get("create_date")),
276
+ "total": amount_total,
277
+ "total_display": total_display,
278
+ }
279
+ )
280
+
281
+ context["quotes"] = prepared_quotes
282
+
283
+ products = profile.execute(
284
+ "product.product",
285
+ "search_read",
286
+ [[]],
287
+ fields=["name", "default_code", "write_date", "create_date"],
288
+ limit=10,
289
+ order="write_date desc, create_date desc",
290
+ )
291
+ context["recent_products"] = [
292
+ {
293
+ "name": product.get("name", ""),
294
+ "default_code": product.get("default_code", ""),
295
+ "create_date": _parse_datetime(product.get("create_date")),
296
+ "write_date": _parse_datetime(product.get("write_date")),
297
+ }
298
+ for product in products
299
+ ]
300
+
301
+ modules = profile.execute(
302
+ "ir.module.module",
303
+ "search_read",
304
+ [[("state", "=", "installed")]],
305
+ fields=["name", "shortdesc", "latest_version", "author"],
306
+ order="name asc",
307
+ )
308
+ context["installed_modules"] = [
309
+ {
310
+ "name": module.get("name", ""),
311
+ "shortdesc": module.get("shortdesc", ""),
312
+ "latest_version": module.get("latest_version", ""),
313
+ "author": module.get("author", ""),
314
+ }
315
+ for module in modules
316
+ ]
317
+
318
+ except Exception:
319
+ logger.exception(
320
+ "Failed to build Odoo quote report for user %s (profile_id=%s)",
321
+ getattr(request.user, "pk", None),
322
+ getattr(profile, "pk", None),
323
+ )
324
+ context["error"] = _("Unable to generate the quote report from Odoo.")
325
+ return TemplateResponse(
326
+ request,
327
+ "admin/core/odoo_quote_report.html",
328
+ context,
329
+ status=502,
330
+ )
331
+
332
+ return TemplateResponse(request, "admin/core/odoo_quote_report.html", context)
333
+
334
+
335
+ @require_GET
336
+ def version_info(request):
337
+ """Return the running application version and Git revision."""
338
+
339
+ version = ""
340
+ version_path = Path(settings.BASE_DIR) / "VERSION"
341
+ if version_path.exists():
342
+ version = version_path.read_text(encoding="utf-8").strip()
343
+ return JsonResponse(
344
+ {
345
+ "version": version,
346
+ "revision": revision.get_revision(),
347
+ }
348
+ )
349
+
350
+
351
+ from . import release as release_utils
352
+ from .log_paths import select_log_dir
353
+
354
+
355
+ TODO_FIXTURE_DIR = Path(__file__).resolve().parent / "fixtures"
356
+
357
+
358
+ DIRTY_COMMIT_DEFAULT_MESSAGE = "chore: commit pending changes"
359
+
360
+
361
+ DIRTY_STATUS_LABELS = {
362
+ "A": _("Added"),
363
+ "C": _("Copied"),
364
+ "D": _("Deleted"),
365
+ "M": _("Modified"),
366
+ "R": _("Renamed"),
367
+ "U": _("Updated"),
368
+ "??": _("Untracked"),
369
+ }
370
+
371
+
372
+ def _append_log(path: Path, message: str) -> None:
373
+ path.parent.mkdir(parents=True, exist_ok=True)
374
+ with path.open("a", encoding="utf-8") as fh:
375
+ fh.write(message + "\n")
376
+
377
+
378
+ def _release_log_name(package_name: str, version: str) -> str:
379
+ return f"pr.{package_name}.v{version}.log"
380
+
381
+
382
+ def _ensure_log_directory(path: Path) -> tuple[bool, OSError | None]:
383
+ """Return whether ``path`` is writable along with the triggering error."""
384
+
385
+ try:
386
+ path.mkdir(parents=True, exist_ok=True)
387
+ except OSError as exc:
388
+ return False, exc
389
+
390
+ probe = path / f".permcheck_{uuid.uuid4().hex}"
391
+ try:
392
+ with probe.open("w", encoding="utf-8") as fh:
393
+ fh.write("")
394
+ except OSError as exc:
395
+ return False, exc
396
+ else:
397
+ try:
398
+ probe.unlink()
399
+ except OSError:
400
+ pass
401
+ return True, None
402
+
403
+
404
+ def _resolve_release_log_dir(preferred: Path) -> tuple[Path, str | None]:
405
+ """Return a writable log directory for the release publish flow."""
406
+
407
+ writable, error = _ensure_log_directory(preferred)
408
+ if writable:
409
+ return preferred, None
410
+
411
+ logger.warning(
412
+ "Release log directory %s is not writable: %s", preferred, error
413
+ )
414
+
415
+ env_override = os.environ.pop("ARTHEXIS_LOG_DIR", None)
416
+ fallback = select_log_dir(Path(settings.BASE_DIR))
417
+ if env_override and Path(env_override) != fallback:
418
+ os.environ["ARTHEXIS_LOG_DIR"] = str(fallback)
419
+
420
+ if fallback == preferred:
421
+ if error:
422
+ raise error
423
+ raise PermissionError(f"Release log directory {preferred} is not writable")
424
+
425
+ fallback_writable, fallback_error = _ensure_log_directory(fallback)
426
+ if not fallback_writable:
427
+ raise fallback_error or PermissionError(
428
+ f"Release log directory {fallback} is not writable"
429
+ )
430
+
431
+ settings.LOG_DIR = fallback
432
+ warning = (
433
+ f"Release log directory {preferred} is not writable; using {fallback}"
434
+ )
435
+ logger.warning(warning)
436
+ return fallback, warning
437
+
438
+
439
+ def _sync_with_origin_main(log_path: Path) -> None:
440
+ """Ensure the current branch is rebased onto ``origin/main``."""
441
+
442
+ if not _has_remote("origin"):
443
+ _append_log(log_path, "No git remote configured; skipping sync with origin/main")
444
+ return
445
+
446
+ try:
447
+ subprocess.run(["git", "fetch", "origin", "main"], check=True)
448
+ _append_log(log_path, "Fetched latest changes from origin/main")
449
+ subprocess.run(["git", "rebase", "origin/main"], check=True)
450
+ _append_log(log_path, "Rebased current branch onto origin/main")
451
+ except subprocess.CalledProcessError as exc:
452
+ subprocess.run(["git", "rebase", "--abort"], check=False)
453
+ _append_log(log_path, "Rebase onto origin/main failed; aborted rebase")
454
+
455
+ stdout = (exc.stdout or "").strip()
456
+ stderr = (exc.stderr or "").strip()
457
+ if stdout:
458
+ _append_log(log_path, "git output:\n" + stdout)
459
+ if stderr:
460
+ _append_log(log_path, "git errors:\n" + stderr)
461
+
462
+ branch = _current_branch() or "(detached HEAD)"
463
+ instructions = [
464
+ "Manual intervention required to finish syncing with origin/main.",
465
+ "Ensure you are on the branch you intend to publish (normally `main`; currently "
466
+ f"{branch}).",
467
+ "Then run these commands from the repository root:",
468
+ " git fetch origin main",
469
+ " git rebase origin/main",
470
+ "Resolve any conflicts (use `git status` to review files) and continue the rebase.",
471
+ ]
472
+
473
+ if branch != "main" and branch != "(detached HEAD)":
474
+ instructions.append(
475
+ "If this branch should mirror main, push the rebased changes with "
476
+ f"`git push origin {branch}:main`."
477
+ )
478
+ else:
479
+ instructions.append("Push the rebased branch with `git push origin main`.")
480
+
481
+ instructions.append(
482
+ "If push authentication fails, verify your git remote permissions and SSH keys "
483
+ "for origin/main before retrying the publish flow."
484
+ )
485
+ _append_log(log_path, "\n".join(instructions))
486
+
487
+ raise Exception("Rebase onto main failed") from exc
488
+
489
+
490
+ def _clean_repo() -> None:
491
+ """Return the git repository to a clean state."""
492
+ subprocess.run(["git", "reset", "--hard"], check=False)
493
+ subprocess.run(["git", "clean", "-fd"], check=False)
494
+
495
+
496
+ def _format_path(path: Path) -> str:
497
+ try:
498
+ return str(path.resolve().relative_to(Path.cwd()))
499
+ except ValueError:
500
+ return str(path)
501
+
502
+
503
+ def _git_stdout(args: Sequence[str]) -> str:
504
+ proc = subprocess.run(args, check=True, capture_output=True, text=True)
505
+ return (proc.stdout or "").strip()
506
+
507
+
508
+ def _has_remote(remote: str) -> bool:
509
+ proc = subprocess.run(
510
+ ["git", "remote"],
511
+ check=True,
512
+ capture_output=True,
513
+ text=True,
514
+ )
515
+ remotes = [line.strip() for line in proc.stdout.splitlines() if line.strip()]
516
+ return remote in remotes
517
+
518
+
519
+ def _current_branch() -> str | None:
520
+ branch = _git_stdout(["git", "rev-parse", "--abbrev-ref", "HEAD"])
521
+ if branch == "HEAD":
522
+ return None
523
+ return branch
524
+
525
+
526
+ def _has_upstream(branch: str) -> bool:
527
+ proc = subprocess.run(
528
+ ["git", "rev-parse", "--abbrev-ref", f"{branch}@{{upstream}}"],
529
+ capture_output=True,
530
+ text=True,
531
+ check=False,
532
+ )
533
+ return proc.returncode == 0
534
+
535
+
536
+ def _collect_dirty_files() -> list[dict[str, str]]:
537
+ proc = subprocess.run(
538
+ ["git", "status", "--porcelain"],
539
+ capture_output=True,
540
+ text=True,
541
+ check=True,
542
+ )
543
+ dirty: list[dict[str, str]] = []
544
+ for line in proc.stdout.splitlines():
545
+ if not line.strip():
546
+ continue
547
+ status_code = line[:2]
548
+ status = status_code.strip() or status_code
549
+ path = line[3:]
550
+ dirty.append(
551
+ {
552
+ "path": path,
553
+ "status": status,
554
+ "status_label": DIRTY_STATUS_LABELS.get(status, status),
555
+ }
556
+ )
557
+ return dirty
558
+
559
+
560
+ def _format_subprocess_error(exc: subprocess.CalledProcessError) -> str:
561
+ return (exc.stderr or exc.stdout or str(exc)).strip() or str(exc)
562
+
563
+
564
+ def _git_authentication_missing(exc: subprocess.CalledProcessError) -> bool:
565
+ message = (exc.stderr or exc.stdout or "").strip().lower()
566
+ if not message:
567
+ return False
568
+ auth_markers = [
569
+ "could not read username",
570
+ "authentication failed",
571
+ "fatal: authentication failed",
572
+ "terminal prompts disabled",
573
+ ]
574
+ return any(marker in message for marker in auth_markers)
575
+
576
+
577
+ def _ensure_origin_main_unchanged(log_path: Path) -> None:
578
+ """Verify that ``origin/main`` has not advanced during the release."""
579
+
580
+ if not _has_remote("origin"):
581
+ _append_log(
582
+ log_path, "No git remote configured; skipping origin/main verification"
583
+ )
584
+ return
585
+
586
+ try:
587
+ subprocess.run(["git", "fetch", "origin", "main"], check=True)
588
+ _append_log(log_path, "Fetched latest changes from origin/main")
589
+ origin_main = _git_stdout(["git", "rev-parse", "origin/main"])
590
+ merge_base = _git_stdout(["git", "merge-base", "HEAD", "origin/main"])
591
+ except subprocess.CalledProcessError as exc:
592
+ details = (getattr(exc, "stderr", "") or getattr(exc, "stdout", "") or str(exc)).strip()
593
+ if details:
594
+ _append_log(log_path, f"Failed to verify origin/main status: {details}")
595
+ else: # pragma: no cover - defensive fallback
596
+ _append_log(log_path, "Failed to verify origin/main status")
597
+ raise Exception("Unable to verify origin/main status") from exc
598
+
599
+ if origin_main != merge_base:
600
+ _append_log(log_path, "origin/main advanced during release; restart required")
601
+ raise Exception("origin/main changed during release; restart required")
602
+
603
+ _append_log(log_path, "origin/main unchanged since last sync")
604
+
605
+
606
+ def _next_patch_version(version: str) -> str:
607
+ from packaging.version import InvalidVersion, Version
608
+
609
+ try:
610
+ parsed = Version(version)
611
+ except InvalidVersion:
612
+ parts = version.split(".")
613
+ for index in range(len(parts) - 1, -1, -1):
614
+ segment = parts[index]
615
+ if segment.isdigit():
616
+ parts[index] = str(int(segment) + 1)
617
+ return ".".join(parts)
618
+ return version
619
+ return f"{parsed.major}.{parsed.minor}.{parsed.micro + 1}"
620
+
621
+
622
+ def _write_todo_fixture(todo: Todo) -> Path:
623
+ safe_request = todo.request.replace(".", " ")
624
+ slug = slugify(safe_request).replace("-", "_")
625
+ if not slug:
626
+ slug = "todo"
627
+ path = TODO_FIXTURE_DIR / f"todos__{slug}.json"
628
+ path.parent.mkdir(parents=True, exist_ok=True)
629
+ data = [
630
+ {
631
+ "model": "core.todo",
632
+ "fields": {
633
+ "request": todo.request,
634
+ "url": todo.url,
635
+ "request_details": todo.request_details,
636
+ },
637
+ }
638
+ ]
639
+ path.write_text(json.dumps(data, indent=2) + "\n", encoding="utf-8")
640
+ return path
641
+
642
+
643
+ def _should_use_python_changelog(exc: OSError) -> bool:
644
+ winerror = getattr(exc, "winerror", None)
645
+ if winerror in {193}:
646
+ return True
647
+ return exc.errno in {errno.ENOEXEC, errno.EACCES, errno.ENOENT}
648
+
649
+
650
+ def _generate_changelog_with_python(log_path: Path) -> None:
651
+ _append_log(log_path, "Falling back to Python changelog generator")
652
+ changelog_path = Path("CHANGELOG.rst")
653
+ range_spec = changelog_utils.determine_range_spec()
654
+ previous = changelog_path.read_text(encoding="utf-8") if changelog_path.exists() else None
655
+ sections = changelog_utils.collect_sections(range_spec=range_spec, previous_text=previous)
656
+ content = changelog_utils.render_changelog(sections)
657
+ if not content.endswith("\n"):
658
+ content += "\n"
659
+ changelog_path.write_text(content, encoding="utf-8")
660
+ _append_log(log_path, "Regenerated CHANGELOG.rst using Python fallback")
661
+
662
+
663
+ def _ensure_release_todo(
664
+ release, *, previous_version: str | None = None
665
+ ) -> tuple[Todo, Path]:
666
+ previous_version = (previous_version or "").strip()
667
+ target_version = _next_patch_version(release.version)
668
+ if previous_version:
669
+ incremented_previous = _next_patch_version(previous_version)
670
+ if incremented_previous == release.version:
671
+ target_version = release.version
672
+ request = f"Create release {release.package.name} {target_version}"
673
+ try:
674
+ url = reverse("admin:core_packagerelease_changelist")
675
+ except NoReverseMatch:
676
+ url = ""
677
+ todo, _ = Todo.all_objects.update_or_create(
678
+ request__iexact=request,
679
+ defaults={
680
+ "request": request,
681
+ "url": url,
682
+ "request_details": "",
683
+ "is_seed_data": True,
684
+ "is_deleted": False,
685
+ "is_user_data": False,
686
+ "done_on": None,
687
+ "on_done_condition": "",
688
+ },
689
+ )
690
+ fixture_path = _write_todo_fixture(todo)
691
+ return todo, fixture_path
692
+
693
+
694
+ def _sync_release_with_revision(release: PackageRelease) -> tuple[bool, str]:
695
+ """Ensure ``release`` matches the repository revision and version.
696
+
697
+ Returns a tuple ``(updated, previous_version)`` where ``updated`` is
698
+ ``True`` when any field changed and ``previous_version`` is the version
699
+ before synchronization.
700
+ """
701
+
702
+ from packaging.version import InvalidVersion, Version
703
+
704
+ previous_version = release.version
705
+ updated_fields: set[str] = set()
706
+
707
+ repo_version: Version | None = None
708
+ version_path = Path("VERSION")
709
+ if version_path.exists():
710
+ try:
711
+ repo_version = Version(version_path.read_text(encoding="utf-8").strip())
712
+ except InvalidVersion:
713
+ repo_version = None
714
+
715
+ try:
716
+ release_version = Version(release.version)
717
+ except InvalidVersion:
718
+ release_version = None
719
+
720
+ if repo_version is not None:
721
+ bumped_repo_version = Version(
722
+ f"{repo_version.major}.{repo_version.minor}.{repo_version.micro + 1}"
723
+ )
724
+ if release_version is None or release_version < bumped_repo_version:
725
+ release.version = str(bumped_repo_version)
726
+ release_version = bumped_repo_version
727
+ updated_fields.add("version")
728
+
729
+ current_revision = revision.get_revision()
730
+ if current_revision and current_revision != release.revision:
731
+ release.revision = current_revision
732
+ updated_fields.add("revision")
733
+
734
+ if updated_fields:
735
+ release.save(update_fields=list(updated_fields))
736
+ PackageRelease.dump_fixture()
737
+
738
+ package_updated = False
739
+ if release.package_id and not release.package.is_active:
740
+ release.package.is_active = True
741
+ release.package.save(update_fields=["is_active"])
742
+ package_updated = True
743
+
744
+ version_updated = False
745
+ if release.version:
746
+ current = ""
747
+ if version_path.exists():
748
+ current = version_path.read_text(encoding="utf-8").strip()
749
+ if current != release.version:
750
+ version_path.write_text(f"{release.version}\n", encoding="utf-8")
751
+ version_updated = True
752
+
753
+ return bool(updated_fields or version_updated or package_updated), previous_version
754
+
755
+
756
+ def _changelog_notes(version: str) -> str:
757
+ path = Path("CHANGELOG.rst")
758
+ if not path.exists():
759
+ return ""
760
+ notes = changelog_utils.extract_release_notes(
761
+ path.read_text(encoding="utf-8"), version
762
+ )
763
+ return notes.strip()
764
+
765
+
766
+ class PendingTodos(Exception):
767
+ """Raised when TODO items require acknowledgment before proceeding."""
768
+
769
+
770
+ class ApprovalRequired(Exception):
771
+ """Raised when release manager approval is required before continuing."""
772
+
773
+
774
+ class DirtyRepository(Exception):
775
+ """Raised when the Git workspace has uncommitted changes."""
776
+
777
+
778
+ def _format_condition_failure(todo: Todo, result) -> str:
779
+ """Return a localized error message for a failed TODO condition."""
780
+
781
+ if result.error and result.resolved:
782
+ detail = _("%(condition)s (error: %(error)s)") % {
783
+ "condition": result.resolved,
784
+ "error": result.error,
785
+ }
786
+ elif result.error:
787
+ detail = _("Error: %(error)s") % {"error": result.error}
788
+ elif result.resolved:
789
+ detail = result.resolved
790
+ else:
791
+ detail = _("Condition evaluated to False")
792
+ return _("Condition failed for %(todo)s: %(detail)s") % {
793
+ "todo": todo.request,
794
+ "detail": detail,
795
+ }
796
+
797
+
798
+ def _get_return_url(request) -> str:
799
+ """Return a safe URL to redirect back to after completing a TODO."""
800
+
801
+ candidates = [request.GET.get("next"), request.POST.get("next")]
802
+ referer = request.META.get("HTTP_REFERER")
803
+ if referer:
804
+ candidates.append(referer)
805
+
806
+ for candidate in candidates:
807
+ if not candidate:
808
+ continue
809
+ if url_has_allowed_host_and_scheme(
810
+ candidate,
811
+ allowed_hosts={request.get_host()},
812
+ require_https=request.is_secure(),
813
+ ):
814
+ return candidate
815
+ return resolve_url("admin:index")
816
+
817
+
818
+ def _step_check_todos(release, ctx, log_path: Path) -> None:
819
+ pending_qs = Todo.objects.filter(is_deleted=False, done_on__isnull=True)
820
+ if pending_qs.exists():
821
+ ctx["todos"] = list(
822
+ pending_qs.values("id", "request", "url", "request_details")
823
+ )
824
+ if not ctx.get("todos_ack"):
825
+ raise PendingTodos()
826
+ todos = list(Todo.objects.filter(is_deleted=False))
827
+ for todo in todos:
828
+ todo.delete()
829
+ removed = []
830
+ for path in TODO_FIXTURE_DIR.glob("todos__*.json"):
831
+ removed.append(str(path))
832
+ path.unlink()
833
+ if removed:
834
+ subprocess.run(["git", "add", *removed], check=False)
835
+ subprocess.run(
836
+ ["git", "commit", "-m", "chore: remove TODO fixtures"],
837
+ check=False,
838
+ )
839
+ ctx.pop("todos", None)
840
+ ctx["todos_ack"] = True
841
+
842
+
843
+ def _step_check_version(release, ctx, log_path: Path) -> None:
844
+ from . import release as release_utils
845
+ from packaging.version import InvalidVersion, Version
846
+
847
+ sync_error: Optional[Exception] = None
848
+ retry_sync = False
849
+ try:
850
+ _sync_with_origin_main(log_path)
851
+ except Exception as exc:
852
+ sync_error = exc
853
+
854
+ if not release_utils._git_clean():
855
+ dirty_entries = _collect_dirty_files()
856
+ files = [entry["path"] for entry in dirty_entries]
857
+ fixture_files = [
858
+ f
859
+ for f in files
860
+ if "fixtures" in Path(f).parts and Path(f).suffix == ".json"
861
+ ]
862
+ if files and len(fixture_files) == len(files):
863
+ summary = []
864
+ for f in fixture_files:
865
+ path = Path(f)
866
+ try:
867
+ data = json.loads(path.read_text(encoding="utf-8"))
868
+ except Exception:
869
+ count = 0
870
+ models: list[str] = []
871
+ else:
872
+ if isinstance(data, list):
873
+ count = len(data)
874
+ models = sorted(
875
+ {
876
+ obj.get("model", "")
877
+ for obj in data
878
+ if isinstance(obj, dict)
879
+ }
880
+ )
881
+ elif isinstance(data, dict):
882
+ count = 1
883
+ models = [data.get("model", "")]
884
+ else: # pragma: no cover - unexpected structure
885
+ count = 0
886
+ models = []
887
+ summary.append({"path": f, "count": count, "models": models})
888
+
889
+ ctx["fixtures"] = summary
890
+ _append_log(
891
+ log_path,
892
+ "Committing fixture changes: " + ", ".join(fixture_files),
893
+ )
894
+ subprocess.run(["git", "add", *fixture_files], check=True)
895
+ subprocess.run(
896
+ ["git", "commit", "-m", "chore: update fixtures"], check=True
897
+ )
898
+ _append_log(log_path, "Fixture changes committed")
899
+ ctx.pop("dirty_files", None)
900
+ ctx.pop("dirty_commit_error", None)
901
+ retry_sync = True
902
+ else:
903
+ ctx["dirty_files"] = dirty_entries
904
+ ctx.setdefault("dirty_commit_message", DIRTY_COMMIT_DEFAULT_MESSAGE)
905
+ ctx.pop("fixtures", None)
906
+ ctx.pop("dirty_commit_error", None)
907
+ if dirty_entries:
908
+ details = ", ".join(entry["path"] for entry in dirty_entries)
909
+ else:
910
+ details = ""
911
+ message = "Git repository has uncommitted changes"
912
+ if details:
913
+ message += f": {details}"
914
+ if ctx.get("dirty_log_message") != message:
915
+ _append_log(log_path, message)
916
+ ctx["dirty_log_message"] = message
917
+ raise DirtyRepository()
918
+ else:
919
+ ctx.pop("dirty_files", None)
920
+ ctx.pop("dirty_commit_error", None)
921
+ ctx.pop("dirty_log_message", None)
922
+
923
+ if retry_sync and sync_error is not None:
924
+ try:
925
+ _sync_with_origin_main(log_path)
926
+ except Exception as exc:
927
+ sync_error = exc
928
+ else:
929
+ sync_error = None
930
+
931
+ if sync_error is not None:
932
+ raise sync_error
933
+
934
+ version_path = Path("VERSION")
935
+ if version_path.exists():
936
+ current = version_path.read_text(encoding="utf-8").strip()
937
+ if current and Version(release.version) < Version(current):
938
+ raise Exception(
939
+ f"Version {release.version} is older than existing {current}"
940
+ )
941
+
942
+ _append_log(log_path, f"Checking if version {release.version} exists on PyPI")
943
+ if release_utils.network_available():
944
+ try:
945
+ resp = requests.get(
946
+ f"https://pypi.org/pypi/{release.package.name}/json",
947
+ timeout=PYPI_REQUEST_TIMEOUT,
948
+ )
949
+ if resp.ok:
950
+ data = resp.json()
951
+ releases = data.get("releases", {})
952
+ try:
953
+ target_version = Version(release.version)
954
+ except InvalidVersion:
955
+ target_version = None
956
+
957
+ for candidate, files in releases.items():
958
+ same_version = candidate == release.version
959
+ if target_version is not None and not same_version:
960
+ try:
961
+ same_version = Version(candidate) == target_version
962
+ except InvalidVersion:
963
+ same_version = False
964
+ if not same_version:
965
+ continue
966
+
967
+ has_available_files = any(
968
+ isinstance(file_data, dict)
969
+ and not file_data.get("yanked", False)
970
+ for file_data in files or []
971
+ )
972
+ if has_available_files:
973
+ raise Exception(
974
+ f"Version {release.version} already on PyPI"
975
+ )
976
+ except Exception as exc:
977
+ # network errors should be logged but not crash
978
+ if "already on PyPI" in str(exc):
979
+ raise
980
+ _append_log(log_path, f"PyPI check failed: {exc}")
981
+ else:
982
+ _append_log(
983
+ log_path,
984
+ f"Version {release.version} not published on PyPI",
985
+ )
986
+ else:
987
+ _append_log(log_path, "Network unavailable, skipping PyPI check")
988
+
989
+
990
+ def _step_handle_migrations(release, ctx, log_path: Path) -> None:
991
+ _append_log(log_path, "Freeze, squash and approve migrations")
992
+ _append_log(log_path, "Migration review acknowledged (manual step)")
993
+
994
+
995
+ def _step_changelog_docs(release, ctx, log_path: Path) -> None:
996
+ _append_log(log_path, "Compose CHANGELOG and documentation")
997
+ _append_log(log_path, "CHANGELOG and documentation review recorded")
998
+
999
+
1000
+ def _step_pre_release_actions(release, ctx, log_path: Path) -> None:
1001
+ _append_log(log_path, "Execute pre-release actions")
1002
+ if ctx.get("dry_run"):
1003
+ _append_log(log_path, "Dry run: skipping pre-release actions")
1004
+ return
1005
+ _sync_with_origin_main(log_path)
1006
+ try:
1007
+ subprocess.run(["scripts/generate-changelog.sh"], check=True)
1008
+ except OSError as exc:
1009
+ if _should_use_python_changelog(exc):
1010
+ _append_log(
1011
+ log_path,
1012
+ f"scripts/generate-changelog.sh failed: {exc}",
1013
+ )
1014
+ _generate_changelog_with_python(log_path)
1015
+ else: # pragma: no cover - unexpected OSError
1016
+ raise
1017
+ else:
1018
+ _append_log(
1019
+ log_path, "Regenerated CHANGELOG.rst using scripts/generate-changelog.sh"
1020
+ )
1021
+ notes = _changelog_notes(release.version)
1022
+ staged_release_fixtures: list[Path] = []
1023
+ if notes != release.changelog:
1024
+ release.changelog = notes
1025
+ release.save(update_fields=["changelog"])
1026
+ PackageRelease.dump_fixture()
1027
+ _append_log(log_path, f"Recorded changelog notes for v{release.version}")
1028
+ release_fixture_paths = sorted(
1029
+ Path("core/fixtures").glob("releases__*.json")
1030
+ )
1031
+ if release_fixture_paths:
1032
+ subprocess.run(
1033
+ ["git", "add", *[str(path) for path in release_fixture_paths]],
1034
+ check=True,
1035
+ )
1036
+ staged_release_fixtures = release_fixture_paths
1037
+ formatted = ", ".join(_format_path(path) for path in release_fixture_paths)
1038
+ _append_log(
1039
+ log_path,
1040
+ "Staged release fixtures " + formatted,
1041
+ )
1042
+ subprocess.run(["git", "add", "CHANGELOG.rst"], check=True)
1043
+ _append_log(log_path, "Staged CHANGELOG.rst for commit")
1044
+ version_path = Path("VERSION")
1045
+ previous_version_text = ""
1046
+ if version_path.exists():
1047
+ previous_version_text = version_path.read_text(encoding="utf-8").strip()
1048
+ repo_version_before_sync = getattr(
1049
+ release, "_repo_version_before_sync", previous_version_text
1050
+ )
1051
+ version_path.write_text(f"{release.version}\n", encoding="utf-8")
1052
+ _append_log(log_path, f"Updated VERSION file to {release.version}")
1053
+ subprocess.run(["git", "add", "VERSION"], check=True)
1054
+ _append_log(log_path, "Staged VERSION for commit")
1055
+ diff = subprocess.run(["git", "diff", "--cached", "--quiet"], check=False)
1056
+ if diff.returncode != 0:
1057
+ subprocess.run(
1058
+ ["git", "commit", "-m", f"pre-release commit {release.version}"],
1059
+ check=True,
1060
+ )
1061
+ _append_log(log_path, f"Committed VERSION update for {release.version}")
1062
+ else:
1063
+ _append_log(
1064
+ log_path, "No changes detected for VERSION or CHANGELOG; skipping commit"
1065
+ )
1066
+ subprocess.run(["git", "reset", "HEAD", "CHANGELOG.rst"], check=False)
1067
+ _append_log(log_path, "Unstaged CHANGELOG.rst")
1068
+ subprocess.run(["git", "reset", "HEAD", "VERSION"], check=False)
1069
+ _append_log(log_path, "Unstaged VERSION file")
1070
+ for path in staged_release_fixtures:
1071
+ subprocess.run(["git", "reset", "HEAD", str(path)], check=False)
1072
+ _append_log(log_path, f"Unstaged release fixture {_format_path(path)}")
1073
+ todo, fixture_path = _ensure_release_todo(
1074
+ release, previous_version=repo_version_before_sync
1075
+ )
1076
+ fixture_display = _format_path(fixture_path)
1077
+ _append_log(log_path, f"Added TODO: {todo.request}")
1078
+ _append_log(log_path, f"Wrote TODO fixture {fixture_display}")
1079
+ subprocess.run(["git", "add", str(fixture_path)], check=True)
1080
+ _append_log(log_path, f"Staged TODO fixture {fixture_display}")
1081
+ fixture_diff = subprocess.run(
1082
+ ["git", "diff", "--cached", "--quiet", "--", str(fixture_path)],
1083
+ check=False,
1084
+ )
1085
+ if fixture_diff.returncode != 0:
1086
+ commit_message = f"chore: add release TODO for {release.package.name}"
1087
+ subprocess.run(["git", "commit", "-m", commit_message], check=True)
1088
+ _append_log(log_path, f"Committed TODO fixture {fixture_display}")
1089
+ else:
1090
+ _append_log(
1091
+ log_path,
1092
+ f"No changes detected for TODO fixture {fixture_display}; skipping commit",
1093
+ )
1094
+ _append_log(log_path, "Pre-release actions complete")
1095
+
1096
+
1097
+ def _step_run_tests(release, ctx, log_path: Path) -> None:
1098
+ _append_log(log_path, "Complete test suite with --all flag")
1099
+ _append_log(log_path, "Test suite completion acknowledged")
1100
+
1101
+
1102
+ def _step_promote_build(release, ctx, log_path: Path) -> None:
1103
+ from . import release as release_utils
1104
+
1105
+ _append_log(log_path, "Generating build files")
1106
+ if ctx.get("dry_run"):
1107
+ _append_log(log_path, "Dry run: skipping build promotion")
1108
+ return
1109
+ try:
1110
+ _ensure_origin_main_unchanged(log_path)
1111
+ release_utils.promote(
1112
+ package=release.to_package(),
1113
+ version=release.version,
1114
+ creds=release.to_credentials(),
1115
+ )
1116
+ _append_log(
1117
+ log_path,
1118
+ f"Generated release artifacts for v{release.version}",
1119
+ )
1120
+ from glob import glob
1121
+
1122
+ paths = ["VERSION", *glob("core/fixtures/releases__*.json")]
1123
+ diff = subprocess.run(
1124
+ ["git", "status", "--porcelain", *paths],
1125
+ capture_output=True,
1126
+ text=True,
1127
+ )
1128
+ if diff.stdout.strip():
1129
+ subprocess.run(["git", "add", *paths], check=True)
1130
+ _append_log(log_path, "Staged release metadata updates")
1131
+ subprocess.run(
1132
+ [
1133
+ "git",
1134
+ "commit",
1135
+ "-m",
1136
+ f"chore: update release metadata for v{release.version}",
1137
+ ],
1138
+ check=True,
1139
+ )
1140
+ _append_log(
1141
+ log_path,
1142
+ f"Committed release metadata for v{release.version}",
1143
+ )
1144
+ if _has_remote("origin"):
1145
+ try:
1146
+ branch = _current_branch()
1147
+ if branch is None:
1148
+ push_cmd = ["git", "push", "origin", "HEAD"]
1149
+ elif _has_upstream(branch):
1150
+ push_cmd = ["git", "push"]
1151
+ else:
1152
+ push_cmd = ["git", "push", "--set-upstream", "origin", branch]
1153
+ subprocess.run(push_cmd, check=True, capture_output=True, text=True)
1154
+ except subprocess.CalledProcessError as exc:
1155
+ details = _format_subprocess_error(exc)
1156
+ if _git_authentication_missing(exc):
1157
+ _append_log(
1158
+ log_path,
1159
+ "Authentication is required to push release changes to origin; skipping push",
1160
+ )
1161
+ if details:
1162
+ _append_log(log_path, details)
1163
+ else:
1164
+ _append_log(
1165
+ log_path, f"Failed to push release changes to origin: {details}"
1166
+ )
1167
+ raise Exception("Failed to push release changes") from exc
1168
+ else:
1169
+ _append_log(log_path, "Pushed release changes to origin")
1170
+ else:
1171
+ _append_log(
1172
+ log_path,
1173
+ "No git remote configured; skipping push of release changes",
1174
+ )
1175
+ PackageRelease.dump_fixture()
1176
+ _append_log(log_path, "Updated release fixtures")
1177
+ except Exception:
1178
+ _clean_repo()
1179
+ raise
1180
+ target_name = _release_log_name(release.package.name, release.version)
1181
+ new_log = log_path.with_name(target_name)
1182
+ if log_path != new_log:
1183
+ if new_log.exists():
1184
+ new_log.unlink()
1185
+ log_path.rename(new_log)
1186
+ else:
1187
+ new_log = log_path
1188
+ ctx["log"] = new_log.name
1189
+ _append_log(new_log, "Build complete")
1190
+
1191
+
1192
+ def _step_release_manager_approval(release, ctx, log_path: Path) -> None:
1193
+ if release.to_credentials() is None:
1194
+ ctx.pop("release_approval", None)
1195
+ if not ctx.get("approval_credentials_missing"):
1196
+ _append_log(log_path, "Release manager publishing credentials missing")
1197
+ ctx["approval_credentials_missing"] = True
1198
+ ctx["awaiting_approval"] = True
1199
+ raise ApprovalRequired()
1200
+
1201
+ missing_before = ctx.pop("approval_credentials_missing", None)
1202
+ if missing_before:
1203
+ ctx.pop("awaiting_approval", None)
1204
+ decision = ctx.get("release_approval")
1205
+ if decision == "approved":
1206
+ ctx.pop("release_approval", None)
1207
+ ctx.pop("awaiting_approval", None)
1208
+ ctx.pop("approval_credentials_missing", None)
1209
+ _append_log(log_path, "Release manager approved release")
1210
+ return
1211
+ if decision == "rejected":
1212
+ ctx.pop("release_approval", None)
1213
+ ctx.pop("awaiting_approval", None)
1214
+ ctx.pop("approval_credentials_missing", None)
1215
+ _append_log(log_path, "Release manager rejected release")
1216
+ raise RuntimeError(
1217
+ _("Release manager rejected the release. Restart required."),
1218
+ )
1219
+ if not ctx.get("awaiting_approval"):
1220
+ ctx["awaiting_approval"] = True
1221
+ _append_log(log_path, "Awaiting release manager approval")
1222
+ else:
1223
+ ctx["awaiting_approval"] = True
1224
+ raise ApprovalRequired()
1225
+
1226
+
1227
+ def _step_publish(release, ctx, log_path: Path) -> None:
1228
+ from . import release as release_utils
1229
+
1230
+ if ctx.get("dry_run"):
1231
+ test_repository_url = os.environ.get(
1232
+ "PYPI_TEST_REPOSITORY_URL", "https://test.pypi.org/legacy/"
1233
+ )
1234
+ test_creds = release.to_credentials()
1235
+ if not (test_creds and test_creds.has_auth()):
1236
+ test_creds = release_utils.Credentials(
1237
+ token=os.environ.get("PYPI_TEST_API_TOKEN"),
1238
+ username=os.environ.get("PYPI_TEST_USERNAME"),
1239
+ password=os.environ.get("PYPI_TEST_PASSWORD"),
1240
+ )
1241
+ if not test_creds.has_auth():
1242
+ test_creds = None
1243
+ target = release_utils.RepositoryTarget(
1244
+ name="Test PyPI",
1245
+ repository_url=(test_repository_url or None),
1246
+ credentials=test_creds,
1247
+ verify_availability=False,
1248
+ )
1249
+ label = target.repository_url or target.name
1250
+ dist_path = Path("dist")
1251
+ if not dist_path.exists():
1252
+ _append_log(log_path, "Dry run: building distribution artifacts")
1253
+ package = release.to_package()
1254
+ version_path = (
1255
+ Path(package.version_path)
1256
+ if package.version_path
1257
+ else Path("VERSION")
1258
+ )
1259
+ original_version = (
1260
+ version_path.read_text(encoding="utf-8")
1261
+ if version_path.exists()
1262
+ else None
1263
+ )
1264
+ pyproject_path = Path("pyproject.toml")
1265
+ original_pyproject = (
1266
+ pyproject_path.read_text(encoding="utf-8")
1267
+ if pyproject_path.exists()
1268
+ else None
1269
+ )
1270
+ try:
1271
+ release_utils.build(
1272
+ package=package,
1273
+ version=release.version,
1274
+ creds=release.to_credentials(),
1275
+ dist=True,
1276
+ tests=False,
1277
+ twine=False,
1278
+ git=False,
1279
+ tag=False,
1280
+ stash=True,
1281
+ )
1282
+ except release_utils.ReleaseError as exc:
1283
+ _append_log(
1284
+ log_path,
1285
+ f"Dry run: failed to prepare distribution artifacts ({exc})",
1286
+ )
1287
+ raise
1288
+ finally:
1289
+ if original_version is None:
1290
+ if version_path.exists():
1291
+ version_path.unlink()
1292
+ else:
1293
+ version_path.write_text(original_version, encoding="utf-8")
1294
+ if original_pyproject is None:
1295
+ if pyproject_path.exists():
1296
+ pyproject_path.unlink()
1297
+ else:
1298
+ pyproject_path.write_text(original_pyproject, encoding="utf-8")
1299
+ _append_log(log_path, f"Dry run: uploading distribution to {label}")
1300
+ release_utils.publish(
1301
+ package=release.to_package(),
1302
+ version=release.version,
1303
+ creds=target.credentials or release.to_credentials(),
1304
+ repositories=[target],
1305
+ )
1306
+ _append_log(log_path, "Dry run: skipped release metadata updates")
1307
+ return
1308
+
1309
+ targets = release.build_publish_targets()
1310
+ repo_labels = []
1311
+ for target in targets:
1312
+ label = target.name
1313
+ if target.repository_url:
1314
+ label = f"{label} ({target.repository_url})"
1315
+ repo_labels.append(label)
1316
+ if repo_labels:
1317
+ _append_log(
1318
+ log_path,
1319
+ "Uploading distribution" if len(repo_labels) == 1 else "Uploading distribution to: " + ", ".join(repo_labels),
1320
+ )
1321
+ else:
1322
+ _append_log(log_path, "Uploading distribution")
1323
+ release_utils.publish(
1324
+ package=release.to_package(),
1325
+ version=release.version,
1326
+ creds=release.to_credentials(),
1327
+ repositories=targets,
1328
+ )
1329
+ release.pypi_url = (
1330
+ f"https://pypi.org/project/{release.package.name}/{release.version}/"
1331
+ )
1332
+ github_url = ""
1333
+ for target in targets[1:]:
1334
+ if target.repository_url and "github.com" in target.repository_url:
1335
+ github_url = release.github_package_url() or ""
1336
+ break
1337
+ if github_url:
1338
+ release.github_url = github_url
1339
+ else:
1340
+ release.github_url = ""
1341
+ release.release_on = timezone.now()
1342
+ release.save(update_fields=["pypi_url", "github_url", "release_on"])
1343
+ PackageRelease.dump_fixture()
1344
+ _append_log(log_path, f"Recorded PyPI URL: {release.pypi_url}")
1345
+ if release.github_url:
1346
+ _append_log(log_path, f"Recorded GitHub URL: {release.github_url}")
1347
+ _append_log(log_path, "Upload complete")
1348
+
1349
+
1350
+ FIXTURE_REVIEW_STEP_NAME = "Freeze, squash and approve migrations"
1351
+
1352
+
1353
+ PUBLISH_STEPS = [
1354
+ ("Check version number availability", _step_check_version),
1355
+ ("Confirm release TODO completion", _step_check_todos),
1356
+ (FIXTURE_REVIEW_STEP_NAME, _step_handle_migrations),
1357
+ ("Compose CHANGELOG and documentation", _step_changelog_docs),
1358
+ ("Execute pre-release actions", _step_pre_release_actions),
1359
+ ("Build release artifacts", _step_promote_build),
1360
+ ("Complete test suite with --all flag", _step_run_tests),
1361
+ ("Get Release Manager Approval", _step_release_manager_approval),
1362
+ ("Upload final build to PyPI", _step_publish),
1363
+ ]
1364
+
1365
+
1366
+ @csrf_exempt
1367
+ def rfid_login(request):
1368
+ """Authenticate a user using an RFID."""
1369
+
1370
+ if request.method != "POST":
1371
+ return JsonResponse({"detail": "POST required"}, status=400)
1372
+
1373
+ try:
1374
+ data = json.loads(request.body.decode())
1375
+ except json.JSONDecodeError:
1376
+ data = request.POST
1377
+
1378
+ rfid = data.get("rfid")
1379
+ if not rfid:
1380
+ return JsonResponse({"detail": "rfid required"}, status=400)
1381
+
1382
+ user = authenticate(request, rfid=rfid)
1383
+ if user is None:
1384
+ return JsonResponse({"detail": "invalid RFID"}, status=401)
1385
+
1386
+ login(request, user)
1387
+ return JsonResponse({"id": user.id, "username": user.username})
1388
+
1389
+
1390
+ @api_login_required
1391
+ def product_list(request):
1392
+ """Return a JSON list of products."""
1393
+
1394
+ products = list(
1395
+ Product.objects.values("id", "name", "description", "renewal_period")
1396
+ )
1397
+ return JsonResponse({"products": products})
1398
+
1399
+
1400
+ @csrf_exempt
1401
+ @api_login_required
1402
+ def add_live_subscription(request):
1403
+ """Create a live subscription for an energy account from POSTed JSON."""
1404
+
1405
+ if request.method != "POST":
1406
+ return JsonResponse({"detail": "POST required"}, status=400)
1407
+
1408
+ try:
1409
+ data = json.loads(request.body.decode())
1410
+ except json.JSONDecodeError:
1411
+ data = request.POST
1412
+
1413
+ account_id = data.get("account_id")
1414
+ product_id = data.get("product_id")
1415
+
1416
+ if not account_id or not product_id:
1417
+ return JsonResponse(
1418
+ {"detail": "account_id and product_id required"}, status=400
1419
+ )
1420
+
1421
+ try:
1422
+ product = Product.objects.get(id=product_id)
1423
+ except Product.DoesNotExist:
1424
+ return JsonResponse({"detail": "invalid product"}, status=404)
1425
+
1426
+ try:
1427
+ account = EnergyAccount.objects.get(id=account_id)
1428
+ except EnergyAccount.DoesNotExist:
1429
+ return JsonResponse({"detail": "invalid account"}, status=404)
1430
+
1431
+ start_date = timezone.now().date()
1432
+ account.live_subscription_product = product
1433
+ account.live_subscription_start_date = start_date
1434
+ account.live_subscription_next_renewal = start_date + timedelta(
1435
+ days=product.renewal_period
1436
+ )
1437
+ account.save()
1438
+
1439
+ return JsonResponse({"id": account.id})
1440
+
1441
+
1442
+ @api_login_required
1443
+ def live_subscription_list(request):
1444
+ """Return live subscriptions for the given account_id."""
1445
+
1446
+ account_id = request.GET.get("account_id")
1447
+ if not account_id:
1448
+ return JsonResponse({"detail": "account_id required"}, status=400)
1449
+
1450
+ try:
1451
+ account = EnergyAccount.objects.select_related("live_subscription_product").get(
1452
+ id=account_id
1453
+ )
1454
+ except EnergyAccount.DoesNotExist:
1455
+ return JsonResponse({"detail": "invalid account"}, status=404)
1456
+
1457
+ subs = []
1458
+ product = account.live_subscription_product
1459
+ if product:
1460
+ next_renewal = account.live_subscription_next_renewal
1461
+ if not next_renewal and account.live_subscription_start_date:
1462
+ next_renewal = account.live_subscription_start_date + timedelta(
1463
+ days=product.renewal_period
1464
+ )
1465
+
1466
+ subs.append(
1467
+ {
1468
+ "id": account.id,
1469
+ "product__name": product.name,
1470
+ "next_renewal": next_renewal,
1471
+ }
1472
+ )
1473
+
1474
+ return JsonResponse({"live_subscriptions": subs})
1475
+
1476
+
1477
+ @csrf_exempt
1478
+ @api_login_required
1479
+ def rfid_batch(request):
1480
+ """Export or import RFID tags in batch."""
1481
+
1482
+ if request.method == "GET":
1483
+ color = request.GET.get("color", RFID.BLACK).upper()
1484
+ released = request.GET.get("released")
1485
+ if released is not None:
1486
+ released = released.lower()
1487
+ qs = RFID.objects.all()
1488
+ if color != "ALL":
1489
+ qs = qs.filter(color=color)
1490
+ if released in ("true", "false"):
1491
+ qs = qs.filter(released=(released == "true"))
1492
+ tags = [
1493
+ {
1494
+ "rfid": t.rfid,
1495
+ "custom_label": t.custom_label,
1496
+ "energy_accounts": list(t.energy_accounts.values_list("id", flat=True)),
1497
+ "external_command": t.external_command,
1498
+ "allowed": t.allowed,
1499
+ "color": t.color,
1500
+ "released": t.released,
1501
+ }
1502
+ for t in qs.order_by("rfid")
1503
+ ]
1504
+ return JsonResponse({"rfids": tags})
1505
+
1506
+ if request.method == "POST":
1507
+ try:
1508
+ data = json.loads(request.body.decode())
1509
+ except json.JSONDecodeError:
1510
+ return JsonResponse({"detail": "invalid JSON"}, status=400)
1511
+
1512
+ tags = data.get("rfids") if isinstance(data, dict) else data
1513
+ if not isinstance(tags, list):
1514
+ return JsonResponse({"detail": "rfids list required"}, status=400)
1515
+
1516
+ count = 0
1517
+ for row in tags:
1518
+ rfid = (row.get("rfid") or "").strip()
1519
+ if not rfid:
1520
+ continue
1521
+ allowed = row.get("allowed", True)
1522
+ energy_accounts = row.get("energy_accounts") or []
1523
+ color = (row.get("color") or RFID.BLACK).strip().upper() or RFID.BLACK
1524
+ released = row.get("released", False)
1525
+ if isinstance(released, str):
1526
+ released = released.lower() == "true"
1527
+ custom_label = (row.get("custom_label") or "").strip()
1528
+ external_command = row.get("external_command")
1529
+ if not isinstance(external_command, str):
1530
+ external_command = ""
1531
+ else:
1532
+ external_command = external_command.strip()
1533
+
1534
+ tag, _ = RFID.objects.update_or_create(
1535
+ rfid=rfid.upper(),
1536
+ defaults={
1537
+ "allowed": allowed,
1538
+ "color": color,
1539
+ "released": released,
1540
+ "custom_label": custom_label,
1541
+ "external_command": external_command,
1542
+ },
1543
+ )
1544
+ if energy_accounts:
1545
+ tag.energy_accounts.set(
1546
+ EnergyAccount.objects.filter(id__in=energy_accounts)
1547
+ )
1548
+ else:
1549
+ tag.energy_accounts.clear()
1550
+ count += 1
1551
+
1552
+ return JsonResponse({"imported": count})
1553
+
1554
+ return JsonResponse({"detail": "GET or POST required"}, status=400)
1555
+
1556
+
1557
+ @staff_member_required
1558
+ def release_progress(request, pk: int, action: str):
1559
+ release = get_object_or_404(PackageRelease, pk=pk)
1560
+ if action != "publish":
1561
+ raise Http404("Unknown action")
1562
+ session_key = f"release_publish_{pk}"
1563
+ lock_path = Path("locks") / f"release_publish_{pk}.json"
1564
+ restart_path = Path("locks") / f"release_publish_{pk}.restarts"
1565
+ log_dir, log_dir_warning = _resolve_release_log_dir(Path(settings.LOG_DIR))
1566
+ log_dir_warning_message = log_dir_warning
1567
+
1568
+ version_path = Path("VERSION")
1569
+ repo_version_before_sync = ""
1570
+ if version_path.exists():
1571
+ repo_version_before_sync = version_path.read_text(encoding="utf-8").strip()
1572
+ setattr(release, "_repo_version_before_sync", repo_version_before_sync)
1573
+
1574
+ if not release.is_current:
1575
+ if release.is_published:
1576
+ raise Http404("Release is not current")
1577
+ updated, previous_version = _sync_release_with_revision(release)
1578
+ if updated:
1579
+ request.session.pop(session_key, None)
1580
+ if lock_path.exists():
1581
+ lock_path.unlink()
1582
+ if restart_path.exists():
1583
+ restart_path.unlink()
1584
+ pattern = f"pr.{release.package.name}.v{previous_version}*.log"
1585
+ for log_file in log_dir.glob(pattern):
1586
+ log_file.unlink()
1587
+ if not release.is_current:
1588
+ raise Http404("Release is not current")
1589
+
1590
+ if request.GET.get("restart"):
1591
+ count = 0
1592
+ if restart_path.exists():
1593
+ try:
1594
+ count = int(restart_path.read_text(encoding="utf-8"))
1595
+ except Exception:
1596
+ count = 0
1597
+ restart_path.parent.mkdir(parents=True, exist_ok=True)
1598
+ restart_path.write_text(str(count + 1), encoding="utf-8")
1599
+ _clean_repo()
1600
+ release.pypi_url = ""
1601
+ release.release_on = None
1602
+ release.save(update_fields=["pypi_url", "release_on"])
1603
+ request.session.pop(session_key, None)
1604
+ if lock_path.exists():
1605
+ lock_path.unlink()
1606
+ pattern = f"pr.{release.package.name}.v{release.version}*.log"
1607
+ for f in log_dir.glob(pattern):
1608
+ f.unlink()
1609
+ return redirect(request.path)
1610
+ ctx = request.session.get(session_key)
1611
+ if ctx is None and lock_path.exists():
1612
+ try:
1613
+ ctx = json.loads(lock_path.read_text(encoding="utf-8"))
1614
+ except Exception:
1615
+ ctx = {"step": 0}
1616
+ if ctx is None:
1617
+ ctx = {"step": 0}
1618
+ if restart_path.exists():
1619
+ restart_path.unlink()
1620
+ if log_dir_warning_message:
1621
+ ctx["log_dir_warning_message"] = log_dir_warning_message
1622
+ else:
1623
+ log_dir_warning_message = ctx.get("log_dir_warning_message")
1624
+
1625
+ steps = PUBLISH_STEPS
1626
+ total_steps = len(steps)
1627
+ step_count = ctx.get("step", 0)
1628
+ started_flag = bool(ctx.get("started"))
1629
+ paused_flag = bool(ctx.get("paused"))
1630
+ error_flag = bool(ctx.get("error"))
1631
+ done_flag = step_count >= total_steps and not error_flag
1632
+ start_enabled = (not started_flag or paused_flag) and not done_flag and not error_flag
1633
+
1634
+ ctx["dry_run"] = bool(ctx.get("dry_run"))
1635
+
1636
+ if request.GET.get("set_dry_run") is not None:
1637
+ if start_enabled:
1638
+ ctx["dry_run"] = bool(request.GET.get("dry_run"))
1639
+ request.session[session_key] = ctx
1640
+ return redirect(request.path)
1641
+
1642
+ manager = release.release_manager or release.package.release_manager
1643
+ credentials_ready = bool(release.to_credentials())
1644
+ if credentials_ready and ctx.get("approval_credentials_missing"):
1645
+ ctx.pop("approval_credentials_missing", None)
1646
+
1647
+ ack_todos_requested = bool(request.GET.get("ack_todos"))
1648
+
1649
+ if request.GET.get("start"):
1650
+ if start_enabled:
1651
+ ctx["dry_run"] = bool(request.GET.get("dry_run"))
1652
+ ctx["started"] = True
1653
+ ctx["paused"] = False
1654
+ if (
1655
+ ctx.get("awaiting_approval")
1656
+ and not ctx.get("approval_credentials_missing")
1657
+ and credentials_ready
1658
+ ):
1659
+ if request.GET.get("approve"):
1660
+ ctx["release_approval"] = "approved"
1661
+ if request.GET.get("reject"):
1662
+ ctx["release_approval"] = "rejected"
1663
+ if request.GET.get("pause") and ctx.get("started"):
1664
+ ctx["paused"] = True
1665
+ restart_count = 0
1666
+ if restart_path.exists():
1667
+ try:
1668
+ restart_count = int(restart_path.read_text(encoding="utf-8"))
1669
+ except Exception:
1670
+ restart_count = 0
1671
+ step_count = ctx.get("step", 0)
1672
+ step_param = request.GET.get("step")
1673
+
1674
+ pending_qs = Todo.objects.filter(is_deleted=False, done_on__isnull=True)
1675
+ pending_items = list(pending_qs)
1676
+ if ack_todos_requested:
1677
+ if pending_items:
1678
+ failures = []
1679
+ for todo in pending_items:
1680
+ result = todo.check_on_done_condition()
1681
+ if not result.passed:
1682
+ failures.append((todo, result))
1683
+ if failures:
1684
+ ctx.pop("todos_ack", None)
1685
+ for todo, result in failures:
1686
+ messages.error(request, _format_condition_failure(todo, result))
1687
+ else:
1688
+ ctx["todos_ack"] = True
1689
+ else:
1690
+ ctx["todos_ack"] = True
1691
+
1692
+ if pending_items and not ctx.get("todos_ack"):
1693
+ ctx["todos"] = [
1694
+ {
1695
+ "id": todo.pk,
1696
+ "request": todo.request,
1697
+ "url": todo.url,
1698
+ "request_details": todo.request_details,
1699
+ }
1700
+ for todo in pending_items
1701
+ ]
1702
+ else:
1703
+ ctx.pop("todos", None)
1704
+
1705
+ log_name = _release_log_name(release.package.name, release.version)
1706
+ if ctx.get("log") != log_name:
1707
+ ctx = {
1708
+ "step": 0,
1709
+ "log": log_name,
1710
+ "started": ctx.get("started", False),
1711
+ }
1712
+ step_count = 0
1713
+ log_path = log_dir / log_name
1714
+ ctx.setdefault("log", log_name)
1715
+ ctx.setdefault("paused", False)
1716
+ ctx.setdefault("dirty_commit_message", DIRTY_COMMIT_DEFAULT_MESSAGE)
1717
+
1718
+ dirty_action = request.GET.get("dirty_action")
1719
+ if dirty_action and ctx.get("dirty_files"):
1720
+ if dirty_action == "discard":
1721
+ _clean_repo()
1722
+ remaining = _collect_dirty_files()
1723
+ if remaining:
1724
+ ctx["dirty_files"] = remaining
1725
+ ctx.pop("dirty_commit_error", None)
1726
+ else:
1727
+ ctx.pop("dirty_files", None)
1728
+ ctx.pop("dirty_commit_error", None)
1729
+ ctx.pop("dirty_log_message", None)
1730
+ _append_log(log_path, "Discarded local changes before publish")
1731
+ elif dirty_action == "commit":
1732
+ message = request.GET.get("dirty_message", "").strip()
1733
+ if not message:
1734
+ message = ctx.get("dirty_commit_message") or DIRTY_COMMIT_DEFAULT_MESSAGE
1735
+ ctx["dirty_commit_message"] = message
1736
+ try:
1737
+ subprocess.run(["git", "add", "--all"], check=True)
1738
+ subprocess.run(["git", "commit", "-m", message], check=True)
1739
+ except subprocess.CalledProcessError as exc:
1740
+ ctx["dirty_commit_error"] = _format_subprocess_error(exc)
1741
+ else:
1742
+ ctx.pop("dirty_commit_error", None)
1743
+ remaining = _collect_dirty_files()
1744
+ if remaining:
1745
+ ctx["dirty_files"] = remaining
1746
+ else:
1747
+ ctx.pop("dirty_files", None)
1748
+ ctx.pop("dirty_log_message", None)
1749
+ _append_log(
1750
+ log_path,
1751
+ _("Committed pending changes: %(message)s")
1752
+ % {"message": message},
1753
+ )
1754
+
1755
+ if (
1756
+ ctx.get("started")
1757
+ and step_count == 0
1758
+ and (step_param is None or step_param == "0")
1759
+ ):
1760
+ if log_path.exists():
1761
+ log_path.unlink()
1762
+ ctx.pop("log_dir_warning_logged", None)
1763
+
1764
+ if log_dir_warning_message and not ctx.get("log_dir_warning_logged"):
1765
+ _append_log(log_path, log_dir_warning_message)
1766
+ ctx["log_dir_warning_logged"] = True
1767
+
1768
+ fixtures_step_index = next(
1769
+ (
1770
+ index
1771
+ for index, (name, _) in enumerate(steps)
1772
+ if name == FIXTURE_REVIEW_STEP_NAME
1773
+ ),
1774
+ None,
1775
+ )
1776
+ error = ctx.get("error")
1777
+
1778
+ if (
1779
+ ctx.get("started")
1780
+ and not ctx.get("paused")
1781
+ and step_param is not None
1782
+ and not error
1783
+ and step_count < len(steps)
1784
+ ):
1785
+ to_run = int(step_param)
1786
+ if to_run == step_count:
1787
+ name, func = steps[to_run]
1788
+ try:
1789
+ func(release, ctx, log_path)
1790
+ except PendingTodos:
1791
+ pass
1792
+ except ApprovalRequired:
1793
+ pass
1794
+ except DirtyRepository:
1795
+ pass
1796
+ except Exception as exc: # pragma: no cover - best effort logging
1797
+ _append_log(log_path, f"{name} failed: {exc}")
1798
+ ctx["error"] = str(exc)
1799
+ request.session[session_key] = ctx
1800
+ lock_path.parent.mkdir(parents=True, exist_ok=True)
1801
+ lock_path.write_text(json.dumps(ctx), encoding="utf-8")
1802
+ else:
1803
+ step_count += 1
1804
+ ctx["step"] = step_count
1805
+ request.session[session_key] = ctx
1806
+ lock_path.parent.mkdir(parents=True, exist_ok=True)
1807
+ lock_path.write_text(json.dumps(ctx), encoding="utf-8")
1808
+
1809
+ done = step_count >= len(steps) and not ctx.get("error")
1810
+
1811
+ show_log = ctx.get("started") or step_count > 0 or done or ctx.get("error")
1812
+ if show_log and log_path.exists():
1813
+ log_content = log_path.read_text(encoding="utf-8")
1814
+ else:
1815
+ log_content = ""
1816
+ next_step = (
1817
+ step_count
1818
+ if ctx.get("started")
1819
+ and not ctx.get("paused")
1820
+ and not done
1821
+ and not ctx.get("error")
1822
+ else None
1823
+ )
1824
+ has_pending_todos = bool(ctx.get("todos") and not ctx.get("todos_ack"))
1825
+ if has_pending_todos:
1826
+ next_step = None
1827
+ dirty_files = ctx.get("dirty_files")
1828
+ if dirty_files:
1829
+ next_step = None
1830
+ awaiting_approval = bool(ctx.get("awaiting_approval"))
1831
+ approval_credentials_missing = bool(ctx.get("approval_credentials_missing"))
1832
+ if awaiting_approval:
1833
+ next_step = None
1834
+ if approval_credentials_missing:
1835
+ next_step = None
1836
+ paused = ctx.get("paused", False)
1837
+
1838
+ step_names = [s[0] for s in steps]
1839
+ approval_credentials_ready = credentials_ready
1840
+ credentials_blocking = approval_credentials_missing or (
1841
+ awaiting_approval and not approval_credentials_ready
1842
+ )
1843
+ step_states = []
1844
+ for index, name in enumerate(step_names):
1845
+ if index < step_count:
1846
+ status = "complete"
1847
+ icon = "✅"
1848
+ label = _("Completed")
1849
+ elif error and index == step_count:
1850
+ status = "error"
1851
+ icon = "❌"
1852
+ label = _("Failed")
1853
+ elif paused and ctx.get("started") and index == step_count and not done:
1854
+ status = "paused"
1855
+ icon = "⏸️"
1856
+ label = _("Paused")
1857
+ elif (
1858
+ has_pending_todos
1859
+ and ctx.get("started")
1860
+ and index == step_count
1861
+ and not done
1862
+ ):
1863
+ status = "blocked"
1864
+ icon = "📝"
1865
+ label = _("Awaiting checklist")
1866
+ elif (
1867
+ credentials_blocking
1868
+ and ctx.get("started")
1869
+ and index == step_count
1870
+ and not done
1871
+ ):
1872
+ status = "missing-credentials"
1873
+ icon = "🔐"
1874
+ label = _("Credentials required")
1875
+ elif (
1876
+ awaiting_approval
1877
+ and approval_credentials_ready
1878
+ and ctx.get("started")
1879
+ and index == step_count
1880
+ and not done
1881
+ ):
1882
+ status = "awaiting-approval"
1883
+ icon = "🤝"
1884
+ label = _("Awaiting approval")
1885
+ elif ctx.get("started") and index == step_count and not done:
1886
+ status = "active"
1887
+ icon = "⏳"
1888
+ label = _("In progress")
1889
+ else:
1890
+ status = "pending"
1891
+ icon = "⬜"
1892
+ label = _("Pending")
1893
+ step_states.append(
1894
+ {
1895
+ "index": index + 1,
1896
+ "name": name,
1897
+ "status": status,
1898
+ "icon": icon,
1899
+ "label": label,
1900
+ }
1901
+ )
1902
+
1903
+ is_running = ctx.get("started") and not paused and not done and not ctx.get("error")
1904
+ can_resume = ctx.get("started") and paused and not done and not ctx.get("error")
1905
+ release_manager_owner = manager.owner_display() if manager else ""
1906
+ try:
1907
+ current_user_admin_url = reverse(
1908
+ "admin:teams_user_change", args=[request.user.pk]
1909
+ )
1910
+ except NoReverseMatch:
1911
+ current_user_admin_url = reverse(
1912
+ "admin:core_user_change", args=[request.user.pk]
1913
+ )
1914
+
1915
+ fixtures_summary = ctx.get("fixtures")
1916
+ if (
1917
+ fixtures_summary
1918
+ and fixtures_step_index is not None
1919
+ and step_count > fixtures_step_index
1920
+ ):
1921
+ fixtures_summary = None
1922
+
1923
+ todos_display = ctx.get("todos") if has_pending_todos else None
1924
+
1925
+ dry_run_active = bool(ctx.get("dry_run"))
1926
+ dry_run_toggle_enabled = not is_running and not done and not ctx.get("error")
1927
+
1928
+ context = {
1929
+ "release": release,
1930
+ "action": "publish",
1931
+ "steps": step_names,
1932
+ "current_step": step_count,
1933
+ "next_step": next_step,
1934
+ "done": done,
1935
+ "error": ctx.get("error"),
1936
+ "log_content": log_content,
1937
+ "log_path": str(log_path),
1938
+ "cert_log": ctx.get("cert_log"),
1939
+ "fixtures": fixtures_summary,
1940
+ "todos": todos_display,
1941
+ "dirty_files": dirty_files,
1942
+ "dirty_commit_message": ctx.get("dirty_commit_message", DIRTY_COMMIT_DEFAULT_MESSAGE),
1943
+ "dirty_commit_error": ctx.get("dirty_commit_error"),
1944
+ "restart_count": restart_count,
1945
+ "started": ctx.get("started", False),
1946
+ "paused": paused,
1947
+ "show_log": show_log,
1948
+ "step_states": step_states,
1949
+ "has_pending_todos": has_pending_todos,
1950
+ "awaiting_approval": awaiting_approval,
1951
+ "approval_credentials_missing": approval_credentials_missing,
1952
+ "approval_credentials_ready": approval_credentials_ready,
1953
+ "release_manager_owner": release_manager_owner,
1954
+ "has_release_manager": bool(manager),
1955
+ "current_user_admin_url": current_user_admin_url,
1956
+ "is_running": is_running,
1957
+ "can_resume": can_resume,
1958
+ "dry_run": dry_run_active,
1959
+ "dry_run_toggle_enabled": dry_run_toggle_enabled,
1960
+ }
1961
+ request.session[session_key] = ctx
1962
+ if done or ctx.get("error"):
1963
+ if lock_path.exists():
1964
+ lock_path.unlink()
1965
+ else:
1966
+ lock_path.parent.mkdir(parents=True, exist_ok=True)
1967
+ lock_path.write_text(json.dumps(ctx), encoding="utf-8")
1968
+ template = get_template("core/release_progress.html")
1969
+ content = template.render(context, request)
1970
+ signals.template_rendered.send(
1971
+ sender=template.__class__,
1972
+ template=template,
1973
+ context=context,
1974
+ using=getattr(getattr(template, "engine", None), "name", None),
1975
+ )
1976
+ response = HttpResponse(content)
1977
+ response.context = context
1978
+ response.templates = [template]
1979
+ return response
1980
+
1981
+
1982
+ def _dedupe_preserve_order(values):
1983
+ seen = set()
1984
+ result = []
1985
+ for value in values:
1986
+ if value in seen:
1987
+ continue
1988
+ seen.add(value)
1989
+ result.append(value)
1990
+ return result
1991
+
1992
+
1993
+ def _parse_todo_auth_directives(query: str):
1994
+ directives = {
1995
+ "require_logout": False,
1996
+ "users": [],
1997
+ "permissions": [],
1998
+ "notes": [],
1999
+ }
2000
+ if not query:
2001
+ return "", directives
2002
+
2003
+ remaining = []
2004
+ for key, value in parse_qsl(query, keep_blank_values=True):
2005
+ if key != "_todo_auth":
2006
+ remaining.append((key, value))
2007
+ continue
2008
+ token = (value or "").strip()
2009
+ if not token:
2010
+ continue
2011
+ kind, _, payload = token.partition(":")
2012
+ kind = kind.strip().lower()
2013
+ payload = payload.strip()
2014
+ if kind in {"logout", "anonymous", "anon"}:
2015
+ directives["require_logout"] = True
2016
+ elif kind in {"user", "username"} and payload:
2017
+ directives["users"].append(payload)
2018
+ elif kind in {"perm", "permission"} and payload:
2019
+ directives["permissions"].append(payload)
2020
+ else:
2021
+ directives["notes"].append(token)
2022
+
2023
+ sanitized_query = urlencode(remaining, doseq=True)
2024
+ return sanitized_query, directives
2025
+
2026
+
2027
+ def _todo_iframe_url(request, todo: Todo):
2028
+ """Return a safe iframe URL and auth context for ``todo``."""
2029
+
2030
+ fallback = reverse("admin:core_todo_change", args=[todo.pk])
2031
+ raw_url = (todo.url or "").strip()
2032
+
2033
+ auth_context = {
2034
+ "require_logout": False,
2035
+ "users": [],
2036
+ "permissions": [],
2037
+ "notes": [],
2038
+ }
2039
+
2040
+ def _final_context(target_url: str):
2041
+ return {
2042
+ "target_url": target_url or fallback,
2043
+ "require_logout": auth_context["require_logout"],
2044
+ "users": _dedupe_preserve_order(auth_context["users"]),
2045
+ "permissions": _dedupe_preserve_order(auth_context["permissions"]),
2046
+ "notes": _dedupe_preserve_order(auth_context["notes"]),
2047
+ "has_requirements": bool(
2048
+ auth_context["require_logout"]
2049
+ or auth_context["users"]
2050
+ or auth_context["permissions"]
2051
+ or auth_context["notes"]
2052
+ ),
2053
+ }
2054
+
2055
+ if not raw_url:
2056
+ return fallback, _final_context(fallback)
2057
+
2058
+ focus_path = reverse("todo-focus", args=[todo.pk])
2059
+ focus_norm = focus_path.strip("/").lower()
2060
+
2061
+ def _is_focus_target(target: str) -> bool:
2062
+ if not target:
2063
+ return False
2064
+ parsed_target = urlsplit(target)
2065
+ path = parsed_target.path
2066
+ if not path and not parsed_target.scheme and not parsed_target.netloc:
2067
+ path = target.split("?", 1)[0].split("#", 1)[0]
2068
+ normalized = path.strip("/").lower()
2069
+ return normalized == focus_norm if normalized else False
2070
+
2071
+ if _is_focus_target(raw_url):
2072
+ return fallback, _final_context(fallback)
2073
+
2074
+ parsed = urlsplit(raw_url)
2075
+
2076
+ def _merge_directives(parsed_result):
2077
+ sanitized_query, directives = _parse_todo_auth_directives(parsed_result.query)
2078
+ if directives["require_logout"]:
2079
+ auth_context["require_logout"] = True
2080
+ auth_context["users"].extend(directives["users"])
2081
+ auth_context["permissions"].extend(directives["permissions"])
2082
+ auth_context["notes"].extend(directives["notes"])
2083
+ return parsed_result._replace(query=sanitized_query)
2084
+
2085
+ if not parsed.scheme and not parsed.netloc:
2086
+ sanitized = _merge_directives(parsed)
2087
+ path = sanitized.path or "/"
2088
+ if not path.startswith("/"):
2089
+ path = f"/{path}"
2090
+ relative_url = urlunsplit(("", "", path, sanitized.query, sanitized.fragment))
2091
+ if _is_focus_target(relative_url):
2092
+ return fallback, _final_context(fallback)
2093
+ return relative_url or fallback, _final_context(relative_url)
2094
+
2095
+ if parsed.scheme and parsed.scheme.lower() not in {"http", "https"}:
2096
+ return fallback, _final_context(fallback)
2097
+
2098
+ request_host = request.get_host().strip().lower()
2099
+ host_without_port = request_host.split(":", 1)[0]
2100
+ allowed_hosts = {
2101
+ request_host,
2102
+ host_without_port,
2103
+ "localhost",
2104
+ "127.0.0.1",
2105
+ "0.0.0.0",
2106
+ "::1",
2107
+ }
2108
+
2109
+ site_domain = ""
2110
+ try:
2111
+ site_domain = Site.objects.get_current().domain.strip().lower()
2112
+ except Site.DoesNotExist:
2113
+ site_domain = ""
2114
+ if site_domain:
2115
+ allowed_hosts.add(site_domain)
2116
+ allowed_hosts.add(site_domain.split(":", 1)[0])
2117
+
2118
+ for host in getattr(settings, "ALLOWED_HOSTS", []):
2119
+ if not isinstance(host, str):
2120
+ continue
2121
+ normalized = host.strip().lower()
2122
+ if not normalized or normalized.startswith("*"):
2123
+ continue
2124
+ allowed_hosts.add(normalized)
2125
+ allowed_hosts.add(normalized.split(":", 1)[0])
2126
+
2127
+ hostname = (parsed.hostname or "").strip().lower()
2128
+ netloc = parsed.netloc.strip().lower()
2129
+ if hostname in allowed_hosts or netloc in allowed_hosts:
2130
+ sanitized = _merge_directives(parsed)
2131
+ path = sanitized.path or "/"
2132
+ if not path.startswith("/"):
2133
+ path = f"/{path}"
2134
+ relative_url = urlunsplit(("", "", path, sanitized.query, sanitized.fragment))
2135
+ if _is_focus_target(relative_url):
2136
+ return fallback, _final_context(fallback)
2137
+ return relative_url or fallback, _final_context(relative_url)
2138
+
2139
+ return fallback, _final_context(fallback)
2140
+
2141
+
2142
+ @staff_member_required
2143
+ def todo_focus(request, pk: int):
2144
+ todo = get_object_or_404(Todo, pk=pk, is_deleted=False)
2145
+ if todo.done_on:
2146
+ return redirect(_get_return_url(request))
2147
+
2148
+ iframe_url, focus_auth = _todo_iframe_url(request, todo)
2149
+ focus_target_url = focus_auth.get("target_url", iframe_url) if focus_auth else iframe_url
2150
+ context = {
2151
+ "todo": todo,
2152
+ "iframe_url": iframe_url,
2153
+ "focus_target_url": focus_target_url,
2154
+ "focus_auth": focus_auth,
2155
+ "next_url": _get_return_url(request),
2156
+ "done_url": reverse("todo-done", args=[todo.pk]),
2157
+ }
2158
+ return render(request, "core/todo_focus.html", context)
2159
+
2160
+
2161
+ @staff_member_required
2162
+ @require_POST
2163
+ def todo_done(request, pk: int):
2164
+ redirect_to = _get_return_url(request)
2165
+ try:
2166
+ todo = Todo.objects.get(pk=pk, is_deleted=False, done_on__isnull=True)
2167
+ except Todo.DoesNotExist:
2168
+ return redirect(redirect_to)
2169
+ result = todo.check_on_done_condition()
2170
+ if not result.passed:
2171
+ messages.error(request, _format_condition_failure(todo, result))
2172
+ return redirect(redirect_to)
2173
+ todo.done_on = timezone.now()
2174
+ todo.save(update_fields=["done_on"])
2175
+ return redirect(redirect_to)