arthexis 0.1.12__py3-none-any.whl → 0.1.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arthexis might be problematic. Click here for more details.

Files changed (107) hide show
  1. {arthexis-0.1.12.dist-info → arthexis-0.1.14.dist-info}/METADATA +222 -221
  2. arthexis-0.1.14.dist-info/RECORD +109 -0
  3. {arthexis-0.1.12.dist-info → arthexis-0.1.14.dist-info}/licenses/LICENSE +674 -674
  4. config/__init__.py +5 -5
  5. config/active_app.py +15 -15
  6. config/asgi.py +43 -29
  7. config/auth_app.py +7 -7
  8. config/celery.py +32 -25
  9. config/context_processors.py +67 -69
  10. config/horologia_app.py +7 -7
  11. config/loadenv.py +11 -11
  12. config/logging.py +59 -48
  13. config/middleware.py +25 -25
  14. config/offline.py +49 -49
  15. config/settings.py +691 -716
  16. config/settings_helpers.py +109 -0
  17. config/urls.py +171 -166
  18. config/wsgi.py +17 -17
  19. core/admin.py +3771 -2772
  20. core/admin_history.py +50 -50
  21. core/admindocs.py +151 -151
  22. core/apps.py +356 -272
  23. core/auto_upgrade.py +57 -57
  24. core/backends.py +265 -236
  25. core/changelog.py +342 -0
  26. core/entity.py +133 -133
  27. core/environment.py +61 -61
  28. core/fields.py +168 -168
  29. core/form_fields.py +75 -0
  30. core/github_helper.py +188 -25
  31. core/github_issues.py +178 -172
  32. core/github_repos.py +72 -0
  33. core/lcd_screen.py +78 -78
  34. core/liveupdate.py +25 -25
  35. core/log_paths.py +100 -100
  36. core/mailer.py +85 -85
  37. core/middleware.py +91 -91
  38. core/models.py +3609 -2672
  39. core/notifications.py +105 -105
  40. core/public_wifi.py +267 -227
  41. core/reference_utils.py +108 -108
  42. core/release.py +721 -350
  43. core/rfid_import_export.py +113 -0
  44. core/sigil_builder.py +149 -149
  45. core/sigil_context.py +20 -20
  46. core/sigil_resolver.py +315 -315
  47. core/system.py +752 -493
  48. core/tasks.py +408 -394
  49. core/temp_passwords.py +181 -181
  50. core/test_system_info.py +186 -139
  51. core/tests.py +2095 -1511
  52. core/tests_liveupdate.py +17 -17
  53. core/urls.py +11 -11
  54. core/user_data.py +641 -633
  55. core/views.py +2175 -1382
  56. core/widgets.py +213 -51
  57. core/workgroup_urls.py +17 -17
  58. core/workgroup_views.py +94 -94
  59. nodes/admin.py +1720 -898
  60. nodes/apps.py +87 -70
  61. nodes/backends.py +160 -160
  62. nodes/dns.py +203 -203
  63. nodes/feature_checks.py +133 -133
  64. nodes/lcd.py +165 -165
  65. nodes/models.py +1737 -1416
  66. nodes/reports.py +411 -411
  67. nodes/rfid_sync.py +195 -0
  68. nodes/signals.py +18 -0
  69. nodes/tasks.py +46 -46
  70. nodes/tests.py +3810 -2497
  71. nodes/urls.py +15 -13
  72. nodes/utils.py +121 -105
  73. nodes/views.py +683 -451
  74. ocpp/admin.py +948 -804
  75. ocpp/apps.py +25 -25
  76. ocpp/consumers.py +1565 -1342
  77. ocpp/evcs.py +844 -931
  78. ocpp/evcs_discovery.py +158 -158
  79. ocpp/models.py +917 -915
  80. ocpp/reference_utils.py +42 -42
  81. ocpp/routing.py +11 -9
  82. ocpp/simulator.py +745 -724
  83. ocpp/status_display.py +26 -0
  84. ocpp/store.py +601 -541
  85. ocpp/tasks.py +31 -31
  86. ocpp/test_export_import.py +130 -130
  87. ocpp/test_rfid.py +913 -702
  88. ocpp/tests.py +4445 -3485
  89. ocpp/transactions_io.py +189 -179
  90. ocpp/urls.py +50 -50
  91. ocpp/views.py +1479 -1151
  92. pages/admin.py +708 -536
  93. pages/apps.py +10 -10
  94. pages/checks.py +40 -40
  95. pages/context_processors.py +127 -119
  96. pages/defaults.py +13 -13
  97. pages/forms.py +198 -169
  98. pages/middleware.py +205 -153
  99. pages/models.py +607 -426
  100. pages/tests.py +2612 -2083
  101. pages/urls.py +25 -25
  102. pages/utils.py +12 -12
  103. pages/views.py +1165 -1120
  104. arthexis-0.1.12.dist-info/RECORD +0 -102
  105. nodes/actions.py +0 -70
  106. {arthexis-0.1.12.dist-info → arthexis-0.1.14.dist-info}/WHEEL +0 -0
  107. {arthexis-0.1.12.dist-info → arthexis-0.1.14.dist-info}/top_level.txt +0 -0
core/views.py CHANGED
@@ -1,1382 +1,2175 @@
1
- import json
2
- import shutil
3
- from datetime import timedelta
4
-
5
- import requests
6
- from django.conf import settings
7
- from django.contrib.admin.views.decorators import staff_member_required
8
- from django.contrib.auth import authenticate, login
9
- from django.contrib import messages
10
- from django.contrib.sites.models import Site
11
- from django.http import Http404, JsonResponse
12
- from django.shortcuts import get_object_or_404, redirect, render, resolve_url
13
- from django.utils import timezone
14
- from django.utils.text import slugify
15
- from django.utils.translation import gettext as _
16
- from django.urls import NoReverseMatch, reverse
17
- from django.views.decorators.csrf import csrf_exempt
18
- from django.views.decorators.http import require_GET, require_POST
19
- from django.utils.http import url_has_allowed_host_and_scheme
20
- from pathlib import Path
21
- from urllib.parse import parse_qsl, urlencode, urlsplit, urlunsplit
22
- import errno
23
- import subprocess
24
-
25
- from utils import revision
26
- from utils.api import api_login_required
27
-
28
- from .models import Product, EnergyAccount, PackageRelease, Todo
29
- from .models import RFID
30
-
31
-
32
- @staff_member_required
33
- def odoo_products(request):
34
- """Return available products from the user's Odoo instance."""
35
-
36
- profile = getattr(request.user, "odoo_profile", None)
37
- if not profile or not profile.is_verified:
38
- raise Http404
39
- try:
40
- products = profile.execute(
41
- "product.product",
42
- "search_read",
43
- [[]],
44
- {"fields": ["name"], "limit": 50},
45
- )
46
- except Exception:
47
- return JsonResponse({"detail": "Unable to fetch products"}, status=502)
48
- items = [{"id": p.get("id"), "name": p.get("name", "")} for p in products]
49
- return JsonResponse(items, safe=False)
50
-
51
-
52
- @require_GET
53
- def version_info(request):
54
- """Return the running application version and Git revision."""
55
-
56
- version = ""
57
- version_path = Path(settings.BASE_DIR) / "VERSION"
58
- if version_path.exists():
59
- version = version_path.read_text(encoding="utf-8").strip()
60
- return JsonResponse(
61
- {
62
- "version": version,
63
- "revision": revision.get_revision(),
64
- }
65
- )
66
-
67
-
68
- from . import release as release_utils
69
-
70
-
71
- TODO_FIXTURE_DIR = Path(__file__).resolve().parent / "fixtures"
72
-
73
-
74
- def _append_log(path: Path, message: str) -> None:
75
- path.parent.mkdir(parents=True, exist_ok=True)
76
- with path.open("a", encoding="utf-8") as fh:
77
- fh.write(message + "\n")
78
-
79
-
80
- def _clean_repo() -> None:
81
- """Return the git repository to a clean state."""
82
- subprocess.run(["git", "reset", "--hard"], check=False)
83
- subprocess.run(["git", "clean", "-fd"], check=False)
84
-
85
-
86
- def _format_path(path: Path) -> str:
87
- try:
88
- return str(path.resolve().relative_to(Path.cwd()))
89
- except ValueError:
90
- return str(path)
91
-
92
-
93
- def _next_patch_version(version: str) -> str:
94
- from packaging.version import InvalidVersion, Version
95
-
96
- try:
97
- parsed = Version(version)
98
- except InvalidVersion:
99
- parts = version.split(".")
100
- for index in range(len(parts) - 1, -1, -1):
101
- segment = parts[index]
102
- if segment.isdigit():
103
- parts[index] = str(int(segment) + 1)
104
- return ".".join(parts)
105
- return version
106
- return f"{parsed.major}.{parsed.minor}.{parsed.micro + 1}"
107
-
108
-
109
- def _write_todo_fixture(todo: Todo) -> Path:
110
- safe_request = todo.request.replace(".", " ")
111
- slug = slugify(safe_request).replace("-", "_")
112
- if not slug:
113
- slug = "todo"
114
- path = TODO_FIXTURE_DIR / f"todos__{slug}.json"
115
- path.parent.mkdir(parents=True, exist_ok=True)
116
- data = [
117
- {
118
- "model": "core.todo",
119
- "fields": {
120
- "request": todo.request,
121
- "url": todo.url,
122
- "request_details": todo.request_details,
123
- },
124
- }
125
- ]
126
- path.write_text(json.dumps(data, indent=2) + "\n", encoding="utf-8")
127
- return path
128
-
129
-
130
- def _should_use_python_changelog(exc: OSError) -> bool:
131
- winerror = getattr(exc, "winerror", None)
132
- if winerror in {193}:
133
- return True
134
- return exc.errno in {errno.ENOEXEC, errno.EACCES, errno.ENOENT}
135
-
136
-
137
- def _generate_changelog_with_python(log_path: Path) -> None:
138
- _append_log(log_path, "Falling back to Python changelog generator")
139
- describe = subprocess.run(
140
- ["git", "describe", "--tags", "--abbrev=0"],
141
- capture_output=True,
142
- text=True,
143
- check=False,
144
- )
145
- start_tag = describe.stdout.strip() if describe.returncode == 0 else ""
146
- range_spec = f"{start_tag}..HEAD" if start_tag else "HEAD"
147
- log_proc = subprocess.run(
148
- ["git", "log", range_spec, "--no-merges", "--pretty=format:- %h %s"],
149
- capture_output=True,
150
- text=True,
151
- check=True,
152
- )
153
- entries = [line for line in log_proc.stdout.splitlines() if line]
154
- changelog_path = Path("CHANGELOG.rst")
155
- previous_lines: list[str] = []
156
- if changelog_path.exists():
157
- previous_lines = changelog_path.read_text(encoding="utf-8").splitlines()
158
- if len(previous_lines) > 6:
159
- previous_lines = previous_lines[6:]
160
- else:
161
- previous_lines = []
162
- lines = [
163
- "Changelog",
164
- "=========",
165
- "",
166
- "Unreleased",
167
- "----------",
168
- "",
169
- ]
170
- if entries:
171
- lines.extend(entries)
172
- if previous_lines:
173
- lines.append("")
174
- lines.extend(previous_lines)
175
- content = "\n".join(lines)
176
- if not content.endswith("\n"):
177
- content += "\n"
178
- changelog_path.write_text(content, encoding="utf-8")
179
- _append_log(log_path, "Regenerated CHANGELOG.rst using Python fallback")
180
-
181
-
182
- def _ensure_release_todo(release) -> tuple[Todo, Path]:
183
- target_version = _next_patch_version(release.version)
184
- request = f"Create release {release.package.name} {target_version}"
185
- try:
186
- url = reverse("admin:core_packagerelease_changelist")
187
- except NoReverseMatch:
188
- url = ""
189
- todo, _ = Todo.all_objects.update_or_create(
190
- request__iexact=request,
191
- defaults={
192
- "request": request,
193
- "url": url,
194
- "request_details": "",
195
- "is_seed_data": True,
196
- "is_deleted": False,
197
- "is_user_data": False,
198
- "done_on": None,
199
- "on_done_condition": "",
200
- },
201
- )
202
- fixture_path = _write_todo_fixture(todo)
203
- return todo, fixture_path
204
-
205
-
206
- def _sync_release_with_revision(release: PackageRelease) -> tuple[bool, str]:
207
- """Ensure ``release`` matches the repository revision and version.
208
-
209
- Returns a tuple ``(updated, previous_version)`` where ``updated`` is
210
- ``True`` when any field changed and ``previous_version`` is the version
211
- before synchronization.
212
- """
213
-
214
- from packaging.version import InvalidVersion, Version
215
-
216
- previous_version = release.version
217
- updated_fields: set[str] = set()
218
-
219
- repo_version: Version | None = None
220
- version_path = Path("VERSION")
221
- if version_path.exists():
222
- try:
223
- repo_version = Version(version_path.read_text(encoding="utf-8").strip())
224
- except InvalidVersion:
225
- repo_version = None
226
-
227
- try:
228
- release_version = Version(release.version)
229
- except InvalidVersion:
230
- release_version = None
231
-
232
- if repo_version is not None:
233
- bumped_repo_version = Version(
234
- f"{repo_version.major}.{repo_version.minor}.{repo_version.micro + 1}"
235
- )
236
- if release_version is None or release_version < bumped_repo_version:
237
- release.version = str(bumped_repo_version)
238
- release_version = bumped_repo_version
239
- updated_fields.add("version")
240
-
241
- current_revision = revision.get_revision()
242
- if current_revision and current_revision != release.revision:
243
- release.revision = current_revision
244
- updated_fields.add("revision")
245
-
246
- if updated_fields:
247
- release.save(update_fields=list(updated_fields))
248
- PackageRelease.dump_fixture()
249
-
250
- package_updated = False
251
- if release.package_id and not release.package.is_active:
252
- release.package.is_active = True
253
- release.package.save(update_fields=["is_active"])
254
- package_updated = True
255
-
256
- version_updated = False
257
- if release.version:
258
- current = ""
259
- if version_path.exists():
260
- current = version_path.read_text(encoding="utf-8").strip()
261
- if current != release.version:
262
- version_path.write_text(f"{release.version}\n", encoding="utf-8")
263
- version_updated = True
264
-
265
- return bool(updated_fields or version_updated or package_updated), previous_version
266
-
267
-
268
- def _changelog_notes(version: str) -> str:
269
- path = Path("CHANGELOG.rst")
270
- if not path.exists():
271
- return ""
272
- lines = path.read_text(encoding="utf-8").splitlines()
273
- prefix = f"{version} "
274
- for i, line in enumerate(lines):
275
- if line.startswith(prefix):
276
- j = i + 2
277
- items = []
278
- while j < len(lines) and lines[j].startswith("- "):
279
- items.append(lines[j])
280
- j += 1
281
- return "\n".join(items)
282
- return ""
283
-
284
-
285
- class PendingTodos(Exception):
286
- """Raised when TODO items require acknowledgment before proceeding."""
287
-
288
-
289
- class ApprovalRequired(Exception):
290
- """Raised when release manager approval is required before continuing."""
291
-
292
-
293
- def _format_condition_failure(todo: Todo, result) -> str:
294
- """Return a localized error message for a failed TODO condition."""
295
-
296
- if result.error and result.resolved:
297
- detail = _("%(condition)s (error: %(error)s)") % {
298
- "condition": result.resolved,
299
- "error": result.error,
300
- }
301
- elif result.error:
302
- detail = _("Error: %(error)s") % {"error": result.error}
303
- elif result.resolved:
304
- detail = result.resolved
305
- else:
306
- detail = _("Condition evaluated to False")
307
- return _("Condition failed for %(todo)s: %(detail)s") % {
308
- "todo": todo.request,
309
- "detail": detail,
310
- }
311
-
312
-
313
- def _get_return_url(request) -> str:
314
- """Return a safe URL to redirect back to after completing a TODO."""
315
-
316
- candidates = [request.GET.get("next"), request.POST.get("next")]
317
- referer = request.META.get("HTTP_REFERER")
318
- if referer:
319
- candidates.append(referer)
320
-
321
- for candidate in candidates:
322
- if not candidate:
323
- continue
324
- if url_has_allowed_host_and_scheme(
325
- candidate,
326
- allowed_hosts={request.get_host()},
327
- require_https=request.is_secure(),
328
- ):
329
- return candidate
330
- return resolve_url("admin:index")
331
-
332
-
333
- def _step_check_todos(release, ctx, log_path: Path) -> None:
334
- pending_qs = Todo.objects.filter(is_deleted=False, done_on__isnull=True)
335
- if pending_qs.exists():
336
- ctx["todos"] = list(
337
- pending_qs.values("id", "request", "url", "request_details")
338
- )
339
- if not ctx.get("todos_ack"):
340
- raise PendingTodos()
341
- todos = list(Todo.objects.filter(is_deleted=False))
342
- for todo in todos:
343
- todo.delete()
344
- removed = []
345
- for path in TODO_FIXTURE_DIR.glob("todos__*.json"):
346
- removed.append(str(path))
347
- path.unlink()
348
- if removed:
349
- subprocess.run(["git", "add", *removed], check=False)
350
- subprocess.run(
351
- ["git", "commit", "-m", "chore: remove TODO fixtures"],
352
- check=False,
353
- )
354
- ctx.pop("todos", None)
355
- ctx["todos_ack"] = True
356
-
357
-
358
- def _step_check_version(release, ctx, log_path: Path) -> None:
359
- from . import release as release_utils
360
- from packaging.version import InvalidVersion, Version
361
-
362
- if not release_utils._git_clean():
363
- proc = subprocess.run(
364
- ["git", "status", "--porcelain"],
365
- capture_output=True,
366
- text=True,
367
- )
368
- files = [line[3:] for line in proc.stdout.splitlines()]
369
- fixture_files = [
370
- f
371
- for f in files
372
- if "fixtures" in Path(f).parts and Path(f).suffix == ".json"
373
- ]
374
- if not files or len(fixture_files) != len(files):
375
- raise Exception("Git repository is not clean")
376
-
377
- summary = []
378
- for f in fixture_files:
379
- path = Path(f)
380
- try:
381
- data = json.loads(path.read_text(encoding="utf-8"))
382
- except Exception:
383
- count = 0
384
- models: list[str] = []
385
- else:
386
- if isinstance(data, list):
387
- count = len(data)
388
- models = sorted(
389
- {obj.get("model", "") for obj in data if isinstance(obj, dict)}
390
- )
391
- elif isinstance(data, dict):
392
- count = 1
393
- models = [data.get("model", "")]
394
- else: # pragma: no cover - unexpected structure
395
- count = 0
396
- models = []
397
- summary.append({"path": f, "count": count, "models": models})
398
-
399
- ctx["fixtures"] = summary
400
- _append_log(
401
- log_path,
402
- "Committing fixture changes: " + ", ".join(fixture_files),
403
- )
404
- subprocess.run(["git", "add", *fixture_files], check=True)
405
- subprocess.run(["git", "commit", "-m", "chore: update fixtures"], check=True)
406
- _append_log(log_path, "Fixture changes committed")
407
-
408
- version_path = Path("VERSION")
409
- if version_path.exists():
410
- current = version_path.read_text(encoding="utf-8").strip()
411
- if current and Version(release.version) < Version(current):
412
- raise Exception(
413
- f"Version {release.version} is older than existing {current}"
414
- )
415
-
416
- _append_log(log_path, f"Checking if version {release.version} exists on PyPI")
417
- if release_utils.network_available():
418
- try:
419
- resp = requests.get(f"https://pypi.org/pypi/{release.package.name}/json")
420
- if resp.ok:
421
- data = resp.json()
422
- releases = data.get("releases", {})
423
- try:
424
- target_version = Version(release.version)
425
- except InvalidVersion:
426
- target_version = None
427
-
428
- for candidate, files in releases.items():
429
- same_version = candidate == release.version
430
- if target_version is not None and not same_version:
431
- try:
432
- same_version = Version(candidate) == target_version
433
- except InvalidVersion:
434
- same_version = False
435
- if not same_version:
436
- continue
437
-
438
- has_available_files = any(
439
- isinstance(file_data, dict)
440
- and not file_data.get("yanked", False)
441
- for file_data in files or []
442
- )
443
- if has_available_files:
444
- raise Exception(
445
- f"Version {release.version} already on PyPI"
446
- )
447
- except Exception as exc:
448
- # network errors should be logged but not crash
449
- if "already on PyPI" in str(exc):
450
- raise
451
- _append_log(log_path, f"PyPI check failed: {exc}")
452
- else:
453
- _append_log(
454
- log_path,
455
- f"Version {release.version} not published on PyPI",
456
- )
457
- else:
458
- _append_log(log_path, "Network unavailable, skipping PyPI check")
459
-
460
-
461
- def _step_handle_migrations(release, ctx, log_path: Path) -> None:
462
- _append_log(log_path, "Freeze, squash and approve migrations")
463
- _append_log(log_path, "Migration review acknowledged (manual step)")
464
-
465
-
466
- def _step_changelog_docs(release, ctx, log_path: Path) -> None:
467
- _append_log(log_path, "Compose CHANGELOG and documentation")
468
- _append_log(log_path, "CHANGELOG and documentation review recorded")
469
-
470
-
471
- def _step_pre_release_actions(release, ctx, log_path: Path) -> None:
472
- _append_log(log_path, "Execute pre-release actions")
473
- try:
474
- subprocess.run(["scripts/generate-changelog.sh"], check=True)
475
- except OSError as exc:
476
- if _should_use_python_changelog(exc):
477
- _append_log(
478
- log_path,
479
- f"scripts/generate-changelog.sh failed: {exc}",
480
- )
481
- _generate_changelog_with_python(log_path)
482
- else: # pragma: no cover - unexpected OSError
483
- raise
484
- else:
485
- _append_log(
486
- log_path, "Regenerated CHANGELOG.rst using scripts/generate-changelog.sh"
487
- )
488
- subprocess.run(["git", "add", "CHANGELOG.rst"], check=True)
489
- _append_log(log_path, "Staged CHANGELOG.rst for commit")
490
- version_path = Path("VERSION")
491
- version_path.write_text(f"{release.version}\n", encoding="utf-8")
492
- _append_log(log_path, f"Updated VERSION file to {release.version}")
493
- subprocess.run(["git", "add", "VERSION"], check=True)
494
- _append_log(log_path, "Staged VERSION for commit")
495
- diff = subprocess.run(
496
- [
497
- "git",
498
- "diff",
499
- "--cached",
500
- "--quiet",
501
- "--",
502
- "CHANGELOG.rst",
503
- "VERSION",
504
- ],
505
- check=False,
506
- )
507
- if diff.returncode != 0:
508
- subprocess.run(
509
- ["git", "commit", "-m", f"pre-release commit {release.version}"],
510
- check=True,
511
- )
512
- _append_log(log_path, f"Committed VERSION update for {release.version}")
513
- else:
514
- _append_log(
515
- log_path, "No changes detected for VERSION or CHANGELOG; skipping commit"
516
- )
517
- subprocess.run(["git", "reset", "HEAD", "CHANGELOG.rst"], check=False)
518
- _append_log(log_path, "Unstaged CHANGELOG.rst")
519
- subprocess.run(["git", "reset", "HEAD", "VERSION"], check=False)
520
- _append_log(log_path, "Unstaged VERSION file")
521
- todo, fixture_path = _ensure_release_todo(release)
522
- fixture_display = _format_path(fixture_path)
523
- _append_log(log_path, f"Added TODO: {todo.request}")
524
- _append_log(log_path, f"Wrote TODO fixture {fixture_display}")
525
- subprocess.run(["git", "add", str(fixture_path)], check=True)
526
- _append_log(log_path, f"Staged TODO fixture {fixture_display}")
527
- fixture_diff = subprocess.run(
528
- ["git", "diff", "--cached", "--quiet", "--", str(fixture_path)],
529
- check=False,
530
- )
531
- if fixture_diff.returncode != 0:
532
- commit_message = f"chore: add release TODO for {release.package.name}"
533
- subprocess.run(["git", "commit", "-m", commit_message], check=True)
534
- _append_log(log_path, f"Committed TODO fixture {fixture_display}")
535
- else:
536
- _append_log(
537
- log_path,
538
- f"No changes detected for TODO fixture {fixture_display}; skipping commit",
539
- )
540
- _append_log(log_path, "Pre-release actions complete")
541
-
542
-
543
- def _step_run_tests(release, ctx, log_path: Path) -> None:
544
- _append_log(log_path, "Complete test suite with --all flag")
545
- _append_log(log_path, "Test suite completion acknowledged")
546
-
547
-
548
- def _step_promote_build(release, ctx, log_path: Path) -> None:
549
- from . import release as release_utils
550
-
551
- _append_log(log_path, "Generating build files")
552
- try:
553
- try:
554
- subprocess.run(["git", "fetch", "origin", "main"], check=True)
555
- _append_log(log_path, "Fetched latest changes from origin/main")
556
- subprocess.run(["git", "rebase", "origin/main"], check=True)
557
- _append_log(log_path, "Rebased current branch onto origin/main")
558
- except subprocess.CalledProcessError as exc:
559
- subprocess.run(["git", "rebase", "--abort"], check=False)
560
- _append_log(log_path, "Rebase onto origin/main failed; aborted rebase")
561
- raise Exception("Rebase onto main failed") from exc
562
- release_utils.promote(
563
- package=release.to_package(),
564
- version=release.version,
565
- creds=release.to_credentials(),
566
- )
567
- _append_log(
568
- log_path,
569
- f"Generated release artifacts for v{release.version}",
570
- )
571
- from glob import glob
572
-
573
- paths = ["VERSION", *glob("core/fixtures/releases__*.json")]
574
- diff = subprocess.run(
575
- ["git", "status", "--porcelain", *paths],
576
- capture_output=True,
577
- text=True,
578
- )
579
- if diff.stdout.strip():
580
- subprocess.run(["git", "add", *paths], check=True)
581
- _append_log(log_path, "Staged release metadata updates")
582
- subprocess.run(
583
- [
584
- "git",
585
- "commit",
586
- "-m",
587
- f"chore: update release metadata for v{release.version}",
588
- ],
589
- check=True,
590
- )
591
- _append_log(
592
- log_path,
593
- f"Committed release metadata for v{release.version}",
594
- )
595
- subprocess.run(["git", "push"], check=True)
596
- _append_log(log_path, "Pushed release changes to origin")
597
- PackageRelease.dump_fixture()
598
- _append_log(log_path, "Updated release fixtures")
599
- except Exception:
600
- _clean_repo()
601
- raise
602
- release_name = f"{release.package.name}-{release.version}"
603
- new_log = log_path.with_name(f"{release_name}.log")
604
- log_path.rename(new_log)
605
- ctx["log"] = new_log.name
606
- _append_log(new_log, "Build complete")
607
-
608
-
609
- def _step_release_manager_approval(release, ctx, log_path: Path) -> None:
610
- if release.to_credentials() is None:
611
- ctx.pop("release_approval", None)
612
- if not ctx.get("approval_credentials_missing"):
613
- _append_log(log_path, "Release manager publishing credentials missing")
614
- ctx["approval_credentials_missing"] = True
615
- ctx["awaiting_approval"] = True
616
- raise ApprovalRequired()
617
-
618
- missing_before = ctx.pop("approval_credentials_missing", None)
619
- if missing_before:
620
- ctx.pop("awaiting_approval", None)
621
- decision = ctx.get("release_approval")
622
- if decision == "approved":
623
- ctx.pop("release_approval", None)
624
- ctx.pop("awaiting_approval", None)
625
- ctx.pop("approval_credentials_missing", None)
626
- _append_log(log_path, "Release manager approved release")
627
- return
628
- if decision == "rejected":
629
- ctx.pop("release_approval", None)
630
- ctx.pop("awaiting_approval", None)
631
- ctx.pop("approval_credentials_missing", None)
632
- _append_log(log_path, "Release manager rejected release")
633
- raise RuntimeError(
634
- _("Release manager rejected the release. Restart required."),
635
- )
636
- if not ctx.get("awaiting_approval"):
637
- ctx["awaiting_approval"] = True
638
- _append_log(log_path, "Awaiting release manager approval")
639
- else:
640
- ctx["awaiting_approval"] = True
641
- raise ApprovalRequired()
642
-
643
-
644
- def _step_publish(release, ctx, log_path: Path) -> None:
645
- from . import release as release_utils
646
-
647
- _append_log(log_path, "Uploading distribution")
648
- release_utils.publish(
649
- package=release.to_package(),
650
- version=release.version,
651
- creds=release.to_credentials(),
652
- )
653
- release.pypi_url = (
654
- f"https://pypi.org/project/{release.package.name}/{release.version}/"
655
- )
656
- release.release_on = timezone.now()
657
- release.save(update_fields=["pypi_url", "release_on"])
658
- PackageRelease.dump_fixture()
659
- _append_log(log_path, f"Recorded PyPI URL: {release.pypi_url}")
660
- _append_log(log_path, "Upload complete")
661
-
662
-
663
- FIXTURE_REVIEW_STEP_NAME = "Freeze, squash and approve migrations"
664
-
665
-
666
- PUBLISH_STEPS = [
667
- ("Check version number availability", _step_check_version),
668
- ("Confirm release TODO completion", _step_check_todos),
669
- (FIXTURE_REVIEW_STEP_NAME, _step_handle_migrations),
670
- ("Compose CHANGELOG and documentation", _step_changelog_docs),
671
- ("Execute pre-release actions", _step_pre_release_actions),
672
- ("Build release artifacts", _step_promote_build),
673
- ("Complete test suite with --all flag", _step_run_tests),
674
- ("Get Release Manager Approval", _step_release_manager_approval),
675
- ("Upload final build to PyPI", _step_publish),
676
- ]
677
-
678
-
679
- @csrf_exempt
680
- def rfid_login(request):
681
- """Authenticate a user using an RFID."""
682
-
683
- if request.method != "POST":
684
- return JsonResponse({"detail": "POST required"}, status=400)
685
-
686
- try:
687
- data = json.loads(request.body.decode())
688
- except json.JSONDecodeError:
689
- data = request.POST
690
-
691
- rfid = data.get("rfid")
692
- if not rfid:
693
- return JsonResponse({"detail": "rfid required"}, status=400)
694
-
695
- user = authenticate(request, rfid=rfid)
696
- if user is None:
697
- return JsonResponse({"detail": "invalid RFID"}, status=401)
698
-
699
- login(request, user)
700
- return JsonResponse({"id": user.id, "username": user.username})
701
-
702
-
703
- @api_login_required
704
- def product_list(request):
705
- """Return a JSON list of products."""
706
-
707
- products = list(
708
- Product.objects.values("id", "name", "description", "renewal_period")
709
- )
710
- return JsonResponse({"products": products})
711
-
712
-
713
- @csrf_exempt
714
- @api_login_required
715
- def add_live_subscription(request):
716
- """Create a live subscription for an energy account from POSTed JSON."""
717
-
718
- if request.method != "POST":
719
- return JsonResponse({"detail": "POST required"}, status=400)
720
-
721
- try:
722
- data = json.loads(request.body.decode())
723
- except json.JSONDecodeError:
724
- data = request.POST
725
-
726
- account_id = data.get("account_id")
727
- product_id = data.get("product_id")
728
-
729
- if not account_id or not product_id:
730
- return JsonResponse(
731
- {"detail": "account_id and product_id required"}, status=400
732
- )
733
-
734
- try:
735
- product = Product.objects.get(id=product_id)
736
- except Product.DoesNotExist:
737
- return JsonResponse({"detail": "invalid product"}, status=404)
738
-
739
- try:
740
- account = EnergyAccount.objects.get(id=account_id)
741
- except EnergyAccount.DoesNotExist:
742
- return JsonResponse({"detail": "invalid account"}, status=404)
743
-
744
- start_date = timezone.now().date()
745
- account.live_subscription_product = product
746
- account.live_subscription_start_date = start_date
747
- account.live_subscription_next_renewal = start_date + timedelta(
748
- days=product.renewal_period
749
- )
750
- account.save()
751
-
752
- return JsonResponse({"id": account.id})
753
-
754
-
755
- @api_login_required
756
- def live_subscription_list(request):
757
- """Return live subscriptions for the given account_id."""
758
-
759
- account_id = request.GET.get("account_id")
760
- if not account_id:
761
- return JsonResponse({"detail": "account_id required"}, status=400)
762
-
763
- try:
764
- account = EnergyAccount.objects.select_related("live_subscription_product").get(
765
- id=account_id
766
- )
767
- except EnergyAccount.DoesNotExist:
768
- return JsonResponse({"detail": "invalid account"}, status=404)
769
-
770
- subs = []
771
- product = account.live_subscription_product
772
- if product:
773
- next_renewal = account.live_subscription_next_renewal
774
- if not next_renewal and account.live_subscription_start_date:
775
- next_renewal = account.live_subscription_start_date + timedelta(
776
- days=product.renewal_period
777
- )
778
-
779
- subs.append(
780
- {
781
- "id": account.id,
782
- "product__name": product.name,
783
- "next_renewal": next_renewal,
784
- }
785
- )
786
-
787
- return JsonResponse({"live_subscriptions": subs})
788
-
789
-
790
- @csrf_exempt
791
- @api_login_required
792
- def rfid_batch(request):
793
- """Export or import RFID tags in batch."""
794
-
795
- if request.method == "GET":
796
- color = request.GET.get("color", RFID.BLACK).upper()
797
- released = request.GET.get("released")
798
- if released is not None:
799
- released = released.lower()
800
- qs = RFID.objects.all()
801
- if color != "ALL":
802
- qs = qs.filter(color=color)
803
- if released in ("true", "false"):
804
- qs = qs.filter(released=(released == "true"))
805
- tags = [
806
- {
807
- "rfid": t.rfid,
808
- "custom_label": t.custom_label,
809
- "energy_accounts": list(t.energy_accounts.values_list("id", flat=True)),
810
- "allowed": t.allowed,
811
- "color": t.color,
812
- "released": t.released,
813
- }
814
- for t in qs.order_by("rfid")
815
- ]
816
- return JsonResponse({"rfids": tags})
817
-
818
- if request.method == "POST":
819
- try:
820
- data = json.loads(request.body.decode())
821
- except json.JSONDecodeError:
822
- return JsonResponse({"detail": "invalid JSON"}, status=400)
823
-
824
- tags = data.get("rfids") if isinstance(data, dict) else data
825
- if not isinstance(tags, list):
826
- return JsonResponse({"detail": "rfids list required"}, status=400)
827
-
828
- count = 0
829
- for row in tags:
830
- rfid = (row.get("rfid") or "").strip()
831
- if not rfid:
832
- continue
833
- allowed = row.get("allowed", True)
834
- energy_accounts = row.get("energy_accounts") or []
835
- color = (row.get("color") or RFID.BLACK).strip().upper() or RFID.BLACK
836
- released = row.get("released", False)
837
- if isinstance(released, str):
838
- released = released.lower() == "true"
839
- custom_label = (row.get("custom_label") or "").strip()
840
-
841
- tag, _ = RFID.objects.update_or_create(
842
- rfid=rfid.upper(),
843
- defaults={
844
- "allowed": allowed,
845
- "color": color,
846
- "released": released,
847
- "custom_label": custom_label,
848
- },
849
- )
850
- if energy_accounts:
851
- tag.energy_accounts.set(
852
- EnergyAccount.objects.filter(id__in=energy_accounts)
853
- )
854
- else:
855
- tag.energy_accounts.clear()
856
- count += 1
857
-
858
- return JsonResponse({"imported": count})
859
-
860
- return JsonResponse({"detail": "GET or POST required"}, status=400)
861
-
862
-
863
- @staff_member_required
864
- def release_progress(request, pk: int, action: str):
865
- release = get_object_or_404(PackageRelease, pk=pk)
866
- if action != "publish":
867
- raise Http404("Unknown action")
868
- session_key = f"release_publish_{pk}"
869
- lock_path = Path("locks") / f"release_publish_{pk}.json"
870
- restart_path = Path("locks") / f"release_publish_{pk}.restarts"
871
-
872
- if not release.is_current:
873
- if release.is_published:
874
- raise Http404("Release is not current")
875
- updated, previous_version = _sync_release_with_revision(release)
876
- if updated:
877
- request.session.pop(session_key, None)
878
- if lock_path.exists():
879
- lock_path.unlink()
880
- if restart_path.exists():
881
- restart_path.unlink()
882
- log_dir = Path("logs")
883
- for log_file in log_dir.glob(
884
- f"{release.package.name}-{previous_version}*.log"
885
- ):
886
- log_file.unlink()
887
- if not release.is_current:
888
- raise Http404("Release is not current")
889
-
890
- if request.GET.get("restart"):
891
- count = 0
892
- if restart_path.exists():
893
- try:
894
- count = int(restart_path.read_text(encoding="utf-8"))
895
- except Exception:
896
- count = 0
897
- restart_path.parent.mkdir(parents=True, exist_ok=True)
898
- restart_path.write_text(str(count + 1), encoding="utf-8")
899
- _clean_repo()
900
- release.pypi_url = ""
901
- release.release_on = None
902
- release.save(update_fields=["pypi_url", "release_on"])
903
- request.session.pop(session_key, None)
904
- if lock_path.exists():
905
- lock_path.unlink()
906
- log_dir = Path("logs")
907
- for f in log_dir.glob(f"{release.package.name}-{release.version}*.log"):
908
- f.unlink()
909
- return redirect(request.path)
910
- ctx = request.session.get(session_key)
911
- if ctx is None and lock_path.exists():
912
- try:
913
- ctx = json.loads(lock_path.read_text(encoding="utf-8"))
914
- except Exception:
915
- ctx = {"step": 0}
916
- if ctx is None:
917
- ctx = {"step": 0}
918
- if restart_path.exists():
919
- restart_path.unlink()
920
-
921
- manager = release.release_manager or release.package.release_manager
922
- credentials_ready = bool(release.to_credentials())
923
- if credentials_ready and ctx.get("approval_credentials_missing"):
924
- ctx.pop("approval_credentials_missing", None)
925
-
926
- ack_todos_requested = bool(request.GET.get("ack_todos"))
927
-
928
- if request.GET.get("start"):
929
- ctx["started"] = True
930
- ctx["paused"] = False
931
- if (
932
- ctx.get("awaiting_approval")
933
- and not ctx.get("approval_credentials_missing")
934
- and credentials_ready
935
- ):
936
- if request.GET.get("approve"):
937
- ctx["release_approval"] = "approved"
938
- if request.GET.get("reject"):
939
- ctx["release_approval"] = "rejected"
940
- if request.GET.get("pause") and ctx.get("started"):
941
- ctx["paused"] = True
942
- restart_count = 0
943
- if restart_path.exists():
944
- try:
945
- restart_count = int(restart_path.read_text(encoding="utf-8"))
946
- except Exception:
947
- restart_count = 0
948
- step_count = ctx.get("step", 0)
949
- step_param = request.GET.get("step")
950
-
951
- pending_qs = Todo.objects.filter(is_deleted=False, done_on__isnull=True)
952
- pending_items = list(pending_qs)
953
- if ack_todos_requested:
954
- if pending_items:
955
- failures = []
956
- for todo in pending_items:
957
- result = todo.check_on_done_condition()
958
- if not result.passed:
959
- failures.append((todo, result))
960
- if failures:
961
- ctx.pop("todos_ack", None)
962
- for todo, result in failures:
963
- messages.error(request, _format_condition_failure(todo, result))
964
- else:
965
- ctx["todos_ack"] = True
966
- else:
967
- ctx["todos_ack"] = True
968
-
969
- if pending_items and not ctx.get("todos_ack"):
970
- ctx["todos"] = [
971
- {
972
- "id": todo.pk,
973
- "request": todo.request,
974
- "url": todo.url,
975
- "request_details": todo.request_details,
976
- }
977
- for todo in pending_items
978
- ]
979
- else:
980
- ctx.pop("todos", None)
981
-
982
- identifier = f"{release.package.name}-{release.version}"
983
- log_name = f"{identifier}.log"
984
- if ctx.get("log") != log_name:
985
- ctx = {
986
- "step": 0,
987
- "log": log_name,
988
- "started": ctx.get("started", False),
989
- }
990
- step_count = 0
991
- log_path = Path("logs") / log_name
992
- ctx.setdefault("log", log_name)
993
- ctx.setdefault("paused", False)
994
-
995
- if (
996
- ctx.get("started")
997
- and step_count == 0
998
- and (step_param is None or step_param == "0")
999
- ):
1000
- if log_path.exists():
1001
- log_path.unlink()
1002
-
1003
- steps = PUBLISH_STEPS
1004
- fixtures_step_index = next(
1005
- (
1006
- index
1007
- for index, (name, _) in enumerate(steps)
1008
- if name == FIXTURE_REVIEW_STEP_NAME
1009
- ),
1010
- None,
1011
- )
1012
- error = ctx.get("error")
1013
-
1014
- if (
1015
- ctx.get("started")
1016
- and not ctx.get("paused")
1017
- and step_param is not None
1018
- and not error
1019
- and step_count < len(steps)
1020
- ):
1021
- to_run = int(step_param)
1022
- if to_run == step_count:
1023
- name, func = steps[to_run]
1024
- try:
1025
- func(release, ctx, log_path)
1026
- except PendingTodos:
1027
- pass
1028
- except ApprovalRequired:
1029
- pass
1030
- except Exception as exc: # pragma: no cover - best effort logging
1031
- _append_log(log_path, f"{name} failed: {exc}")
1032
- ctx["error"] = str(exc)
1033
- request.session[session_key] = ctx
1034
- lock_path.parent.mkdir(parents=True, exist_ok=True)
1035
- lock_path.write_text(json.dumps(ctx), encoding="utf-8")
1036
- else:
1037
- step_count += 1
1038
- ctx["step"] = step_count
1039
- request.session[session_key] = ctx
1040
- lock_path.parent.mkdir(parents=True, exist_ok=True)
1041
- lock_path.write_text(json.dumps(ctx), encoding="utf-8")
1042
-
1043
- done = step_count >= len(steps) and not ctx.get("error")
1044
-
1045
- show_log = ctx.get("started") or step_count > 0 or done or ctx.get("error")
1046
- if show_log and log_path.exists():
1047
- log_content = log_path.read_text(encoding="utf-8")
1048
- else:
1049
- log_content = ""
1050
- next_step = (
1051
- step_count
1052
- if ctx.get("started")
1053
- and not ctx.get("paused")
1054
- and not done
1055
- and not ctx.get("error")
1056
- else None
1057
- )
1058
- has_pending_todos = bool(ctx.get("todos") and not ctx.get("todos_ack"))
1059
- if has_pending_todos:
1060
- next_step = None
1061
- awaiting_approval = bool(ctx.get("awaiting_approval"))
1062
- approval_credentials_missing = bool(ctx.get("approval_credentials_missing"))
1063
- if awaiting_approval:
1064
- next_step = None
1065
- if approval_credentials_missing:
1066
- next_step = None
1067
- paused = ctx.get("paused", False)
1068
-
1069
- step_names = [s[0] for s in steps]
1070
- approval_credentials_ready = credentials_ready
1071
- credentials_blocking = approval_credentials_missing or (
1072
- awaiting_approval and not approval_credentials_ready
1073
- )
1074
- step_states = []
1075
- for index, name in enumerate(step_names):
1076
- if index < step_count:
1077
- status = "complete"
1078
- icon = "✅"
1079
- label = _("Completed")
1080
- elif error and index == step_count:
1081
- status = "error"
1082
- icon = ""
1083
- label = _("Failed")
1084
- elif paused and ctx.get("started") and index == step_count and not done:
1085
- status = "paused"
1086
- icon = "⏸️"
1087
- label = _("Paused")
1088
- elif (
1089
- has_pending_todos
1090
- and ctx.get("started")
1091
- and index == step_count
1092
- and not done
1093
- ):
1094
- status = "blocked"
1095
- icon = "📝"
1096
- label = _("Awaiting checklist")
1097
- elif (
1098
- credentials_blocking
1099
- and ctx.get("started")
1100
- and index == step_count
1101
- and not done
1102
- ):
1103
- status = "missing-credentials"
1104
- icon = "🔐"
1105
- label = _("Credentials required")
1106
- elif (
1107
- awaiting_approval
1108
- and approval_credentials_ready
1109
- and ctx.get("started")
1110
- and index == step_count
1111
- and not done
1112
- ):
1113
- status = "awaiting-approval"
1114
- icon = "🤝"
1115
- label = _("Awaiting approval")
1116
- elif ctx.get("started") and index == step_count and not done:
1117
- status = "active"
1118
- icon = "⏳"
1119
- label = _("In progress")
1120
- else:
1121
- status = "pending"
1122
- icon = ""
1123
- label = _("Pending")
1124
- step_states.append(
1125
- {
1126
- "index": index + 1,
1127
- "name": name,
1128
- "status": status,
1129
- "icon": icon,
1130
- "label": label,
1131
- }
1132
- )
1133
-
1134
- is_running = ctx.get("started") and not paused and not done and not ctx.get("error")
1135
- can_resume = ctx.get("started") and paused and not done and not ctx.get("error")
1136
- release_manager_owner = manager.owner_display() if manager else ""
1137
- try:
1138
- current_user_admin_url = reverse(
1139
- "admin:teams_user_change", args=[request.user.pk]
1140
- )
1141
- except NoReverseMatch:
1142
- current_user_admin_url = reverse(
1143
- "admin:core_user_change", args=[request.user.pk]
1144
- )
1145
-
1146
- fixtures_summary = ctx.get("fixtures")
1147
- if (
1148
- fixtures_summary
1149
- and fixtures_step_index is not None
1150
- and step_count > fixtures_step_index
1151
- ):
1152
- fixtures_summary = None
1153
-
1154
- context = {
1155
- "release": release,
1156
- "action": "publish",
1157
- "steps": step_names,
1158
- "current_step": step_count,
1159
- "next_step": next_step,
1160
- "done": done,
1161
- "error": ctx.get("error"),
1162
- "log_content": log_content,
1163
- "log_path": str(log_path),
1164
- "cert_log": ctx.get("cert_log"),
1165
- "fixtures": fixtures_summary,
1166
- "todos": ctx.get("todos"),
1167
- "restart_count": restart_count,
1168
- "started": ctx.get("started", False),
1169
- "paused": paused,
1170
- "show_log": show_log,
1171
- "step_states": step_states,
1172
- "has_pending_todos": has_pending_todos,
1173
- "awaiting_approval": awaiting_approval,
1174
- "approval_credentials_missing": approval_credentials_missing,
1175
- "approval_credentials_ready": approval_credentials_ready,
1176
- "release_manager_owner": release_manager_owner,
1177
- "has_release_manager": bool(manager),
1178
- "current_user_admin_url": current_user_admin_url,
1179
- "is_running": is_running,
1180
- "can_resume": can_resume,
1181
- }
1182
- request.session[session_key] = ctx
1183
- if done or ctx.get("error"):
1184
- if lock_path.exists():
1185
- lock_path.unlink()
1186
- else:
1187
- lock_path.parent.mkdir(parents=True, exist_ok=True)
1188
- lock_path.write_text(json.dumps(ctx), encoding="utf-8")
1189
- return render(request, "core/release_progress.html", context)
1190
-
1191
-
1192
- def _dedupe_preserve_order(values):
1193
- seen = set()
1194
- result = []
1195
- for value in values:
1196
- if value in seen:
1197
- continue
1198
- seen.add(value)
1199
- result.append(value)
1200
- return result
1201
-
1202
-
1203
- def _parse_todo_auth_directives(query: str):
1204
- directives = {
1205
- "require_logout": False,
1206
- "users": [],
1207
- "permissions": [],
1208
- "notes": [],
1209
- }
1210
- if not query:
1211
- return "", directives
1212
-
1213
- remaining = []
1214
- for key, value in parse_qsl(query, keep_blank_values=True):
1215
- if key != "_todo_auth":
1216
- remaining.append((key, value))
1217
- continue
1218
- token = (value or "").strip()
1219
- if not token:
1220
- continue
1221
- kind, _, payload = token.partition(":")
1222
- kind = kind.strip().lower()
1223
- payload = payload.strip()
1224
- if kind in {"logout", "anonymous", "anon"}:
1225
- directives["require_logout"] = True
1226
- elif kind in {"user", "username"} and payload:
1227
- directives["users"].append(payload)
1228
- elif kind in {"perm", "permission"} and payload:
1229
- directives["permissions"].append(payload)
1230
- else:
1231
- directives["notes"].append(token)
1232
-
1233
- sanitized_query = urlencode(remaining, doseq=True)
1234
- return sanitized_query, directives
1235
-
1236
-
1237
- def _todo_iframe_url(request, todo: Todo):
1238
- """Return a safe iframe URL and auth context for ``todo``."""
1239
-
1240
- fallback = reverse("admin:core_todo_change", args=[todo.pk])
1241
- raw_url = (todo.url or "").strip()
1242
-
1243
- auth_context = {
1244
- "require_logout": False,
1245
- "users": [],
1246
- "permissions": [],
1247
- "notes": [],
1248
- }
1249
-
1250
- def _final_context(target_url: str):
1251
- return {
1252
- "target_url": target_url or fallback,
1253
- "require_logout": auth_context["require_logout"],
1254
- "users": _dedupe_preserve_order(auth_context["users"]),
1255
- "permissions": _dedupe_preserve_order(auth_context["permissions"]),
1256
- "notes": _dedupe_preserve_order(auth_context["notes"]),
1257
- "has_requirements": bool(
1258
- auth_context["require_logout"]
1259
- or auth_context["users"]
1260
- or auth_context["permissions"]
1261
- or auth_context["notes"]
1262
- ),
1263
- }
1264
-
1265
- if not raw_url:
1266
- return fallback, _final_context(fallback)
1267
-
1268
- focus_path = reverse("todo-focus", args=[todo.pk])
1269
- focus_norm = focus_path.strip("/").lower()
1270
-
1271
- def _is_focus_target(target: str) -> bool:
1272
- if not target:
1273
- return False
1274
- parsed_target = urlsplit(target)
1275
- path = parsed_target.path
1276
- if not path and not parsed_target.scheme and not parsed_target.netloc:
1277
- path = target.split("?", 1)[0].split("#", 1)[0]
1278
- normalized = path.strip("/").lower()
1279
- return normalized == focus_norm if normalized else False
1280
-
1281
- if _is_focus_target(raw_url):
1282
- return fallback, _final_context(fallback)
1283
-
1284
- parsed = urlsplit(raw_url)
1285
-
1286
- def _merge_directives(parsed_result):
1287
- sanitized_query, directives = _parse_todo_auth_directives(parsed_result.query)
1288
- if directives["require_logout"]:
1289
- auth_context["require_logout"] = True
1290
- auth_context["users"].extend(directives["users"])
1291
- auth_context["permissions"].extend(directives["permissions"])
1292
- auth_context["notes"].extend(directives["notes"])
1293
- return parsed_result._replace(query=sanitized_query)
1294
-
1295
- if not parsed.scheme and not parsed.netloc:
1296
- sanitized = _merge_directives(parsed)
1297
- path = sanitized.path or "/"
1298
- if not path.startswith("/"):
1299
- path = f"/{path}"
1300
- relative_url = urlunsplit(("", "", path, sanitized.query, sanitized.fragment))
1301
- if _is_focus_target(relative_url):
1302
- return fallback, _final_context(fallback)
1303
- return relative_url or fallback, _final_context(relative_url)
1304
-
1305
- if parsed.scheme and parsed.scheme.lower() not in {"http", "https"}:
1306
- return fallback, _final_context(fallback)
1307
-
1308
- request_host = request.get_host().strip().lower()
1309
- host_without_port = request_host.split(":", 1)[0]
1310
- allowed_hosts = {
1311
- request_host,
1312
- host_without_port,
1313
- "localhost",
1314
- "127.0.0.1",
1315
- "0.0.0.0",
1316
- "::1",
1317
- }
1318
-
1319
- site_domain = ""
1320
- try:
1321
- site_domain = Site.objects.get_current().domain.strip().lower()
1322
- except Site.DoesNotExist:
1323
- site_domain = ""
1324
- if site_domain:
1325
- allowed_hosts.add(site_domain)
1326
- allowed_hosts.add(site_domain.split(":", 1)[0])
1327
-
1328
- for host in getattr(settings, "ALLOWED_HOSTS", []):
1329
- if not isinstance(host, str):
1330
- continue
1331
- normalized = host.strip().lower()
1332
- if not normalized or normalized.startswith("*"):
1333
- continue
1334
- allowed_hosts.add(normalized)
1335
- allowed_hosts.add(normalized.split(":", 1)[0])
1336
-
1337
- hostname = (parsed.hostname or "").strip().lower()
1338
- netloc = parsed.netloc.strip().lower()
1339
- if hostname in allowed_hosts or netloc in allowed_hosts:
1340
- sanitized = _merge_directives(parsed)
1341
- path = sanitized.path or "/"
1342
- if not path.startswith("/"):
1343
- path = f"/{path}"
1344
- relative_url = urlunsplit(("", "", path, sanitized.query, sanitized.fragment))
1345
- if _is_focus_target(relative_url):
1346
- return fallback, _final_context(fallback)
1347
- return relative_url or fallback, _final_context(relative_url)
1348
-
1349
- return fallback, _final_context(fallback)
1350
-
1351
-
1352
- @staff_member_required
1353
- def todo_focus(request, pk: int):
1354
- todo = get_object_or_404(Todo, pk=pk, is_deleted=False)
1355
- if todo.done_on:
1356
- return redirect(_get_return_url(request))
1357
-
1358
- iframe_url, focus_auth = _todo_iframe_url(request, todo)
1359
- focus_target_url = focus_auth.get("target_url", iframe_url) if focus_auth else iframe_url
1360
- context = {
1361
- "todo": todo,
1362
- "iframe_url": iframe_url,
1363
- "focus_target_url": focus_target_url,
1364
- "focus_auth": focus_auth,
1365
- "next_url": _get_return_url(request),
1366
- "done_url": reverse("todo-done", args=[todo.pk]),
1367
- }
1368
- return render(request, "core/todo_focus.html", context)
1369
-
1370
-
1371
- @staff_member_required
1372
- @require_POST
1373
- def todo_done(request, pk: int):
1374
- todo = get_object_or_404(Todo, pk=pk, is_deleted=False, done_on__isnull=True)
1375
- redirect_to = _get_return_url(request)
1376
- result = todo.check_on_done_condition()
1377
- if not result.passed:
1378
- messages.error(request, _format_condition_failure(todo, result))
1379
- return redirect(redirect_to)
1380
- todo.done_on = timezone.now()
1381
- todo.save(update_fields=["done_on"])
1382
- return redirect(redirect_to)
1
+ import json
2
+ import logging
3
+ import os
4
+ import shutil
5
+ import uuid
6
+ from datetime import datetime, timedelta, timezone as datetime_timezone
7
+
8
+ import requests
9
+ from django.conf import settings
10
+ from django.contrib.admin.sites import site as admin_site
11
+ from django.contrib.admin.views.decorators import staff_member_required
12
+ from django.contrib.auth import authenticate, login
13
+ from django.contrib import messages
14
+ from django.contrib.sites.models import Site
15
+ from django.http import Http404, JsonResponse, HttpResponse
16
+ from django.shortcuts import get_object_or_404, redirect, render, resolve_url
17
+ from django.template.response import TemplateResponse
18
+ from django.utils import timezone
19
+ from django.utils.text import slugify
20
+ from django.utils.translation import gettext as _
21
+ from django.urls import NoReverseMatch, reverse
22
+ from django.views.decorators.csrf import csrf_exempt
23
+ from django.views.decorators.http import require_GET, require_POST
24
+ from django.utils.http import url_has_allowed_host_and_scheme
25
+ from pathlib import Path
26
+ from urllib.parse import parse_qsl, urlencode, urlsplit, urlunsplit
27
+ import errno
28
+ import subprocess
29
+ from typing import Optional, Sequence
30
+
31
+ from django.template.loader import get_template
32
+ from django.test import signals
33
+
34
+ from utils import revision
35
+ from utils.api import api_login_required
36
+
37
+ logger = logging.getLogger(__name__)
38
+
39
+ PYPI_REQUEST_TIMEOUT = 10
40
+
41
+ from . import changelog as changelog_utils
42
+ from .models import OdooProfile, Product, EnergyAccount, PackageRelease, Todo
43
+ from .models import RFID
44
+
45
+
46
+ @staff_member_required
47
+ def odoo_products(request):
48
+ """Return available products from the user's Odoo instance."""
49
+
50
+ profile = getattr(request.user, "odoo_profile", None)
51
+ if not profile or not profile.is_verified:
52
+ raise Http404
53
+ try:
54
+ products = profile.execute(
55
+ "product.product",
56
+ "search_read",
57
+ [[]],
58
+ fields=["name"],
59
+ limit=50,
60
+ )
61
+ except Exception:
62
+ logger.exception(
63
+ "Failed to fetch Odoo products via API for user %s (profile_id=%s, host=%s, database=%s)",
64
+ getattr(request.user, "pk", None),
65
+ getattr(profile, "pk", None),
66
+ getattr(profile, "host", None),
67
+ getattr(profile, "database", None),
68
+ )
69
+ return JsonResponse({"detail": "Unable to fetch products"}, status=502)
70
+ items = [{"id": p.get("id"), "name": p.get("name", "")} for p in products]
71
+ return JsonResponse(items, safe=False)
72
+
73
+
74
+ @staff_member_required
75
+ def odoo_quote_report(request):
76
+ """Display a consolidated quote report from the user's Odoo instance."""
77
+
78
+ profile = getattr(request.user, "odoo_profile", None)
79
+ context = {
80
+ "title": _("Quote Report"),
81
+ "profile": profile,
82
+ "error": None,
83
+ "template_stats": [],
84
+ "quotes": [],
85
+ "recent_products": [],
86
+ "installed_modules": [],
87
+ "profile_url": "",
88
+ }
89
+
90
+ profile_admin = admin_site._registry.get(OdooProfile)
91
+ if profile_admin is not None:
92
+ try:
93
+ context["profile_url"] = profile_admin.get_my_profile_url(request)
94
+ except Exception: # pragma: no cover - defensive fallback
95
+ context["profile_url"] = ""
96
+
97
+ if not profile or not profile.is_verified:
98
+ context["error"] = _(
99
+ "Configure and verify your Odoo employee credentials before generating the report."
100
+ )
101
+ return TemplateResponse(
102
+ request, "admin/core/odoo_quote_report.html", context
103
+ )
104
+
105
+ def _parse_datetime(value):
106
+ if not value:
107
+ return None
108
+ if isinstance(value, datetime):
109
+ dt = value
110
+ else:
111
+ text = str(value)
112
+ try:
113
+ dt = datetime.fromisoformat(text)
114
+ except ValueError:
115
+ text_iso = text.replace(" ", "T")
116
+ try:
117
+ dt = datetime.fromisoformat(text_iso)
118
+ except ValueError:
119
+ for fmt in ("%Y-%m-%d %H:%M:%S.%f", "%Y-%m-%d %H:%M:%S"):
120
+ try:
121
+ dt = datetime.strptime(text, fmt)
122
+ break
123
+ except ValueError:
124
+ continue
125
+ else:
126
+ return None
127
+ if timezone.is_naive(dt):
128
+ tzinfo = getattr(timezone, "utc", datetime_timezone.utc)
129
+ dt = timezone.make_aware(dt, tzinfo)
130
+ return dt
131
+
132
+ try:
133
+ templates = profile.execute(
134
+ "sale.order.template",
135
+ "search_read",
136
+ [[]],
137
+ fields=["name"],
138
+ order="name asc",
139
+ )
140
+ template_usage = profile.execute(
141
+ "sale.order",
142
+ "read_group",
143
+ [[("sale_order_template_id", "!=", False)]],
144
+ ["sale_order_template_id"],
145
+ lazy=False,
146
+ )
147
+
148
+ usage_map = {}
149
+ for entry in template_usage:
150
+ template_info = entry.get("sale_order_template_id")
151
+ if not template_info:
152
+ continue
153
+ template_id = template_info[0]
154
+ usage_map[template_id] = entry.get(
155
+ "sale_order_template_id_count", 0
156
+ )
157
+
158
+ context["template_stats"] = [
159
+ {
160
+ "id": template.get("id"),
161
+ "name": template.get("name", ""),
162
+ "quote_count": usage_map.get(template.get("id"), 0),
163
+ }
164
+ for template in templates
165
+ ]
166
+
167
+ ninety_days_ago = timezone.now() - timedelta(days=90)
168
+ quotes = profile.execute(
169
+ "sale.order",
170
+ "search_read",
171
+ [
172
+ [
173
+ ("create_date", ">=", ninety_days_ago.strftime("%Y-%m-%d %H:%M:%S")),
174
+ ("state", "!=", "cancel"),
175
+ ("quote_sent", "=", False),
176
+ ]
177
+ ],
178
+ fields=[
179
+ "name",
180
+ "amount_total",
181
+ "partner_id",
182
+ "activity_type_id",
183
+ "activity_summary",
184
+ "tag_ids",
185
+ "create_date",
186
+ "currency_id",
187
+ ],
188
+ order="create_date desc",
189
+ )
190
+
191
+ tag_ids = set()
192
+ currency_ids = set()
193
+ for quote in quotes:
194
+ tag_ids.update(quote.get("tag_ids") or [])
195
+ currency_info = quote.get("currency_id")
196
+ if (
197
+ isinstance(currency_info, (list, tuple))
198
+ and len(currency_info) >= 1
199
+ and currency_info[0]
200
+ ):
201
+ currency_ids.add(currency_info[0])
202
+
203
+ tag_map: dict[int, str] = {}
204
+ if tag_ids:
205
+ tag_records = profile.execute(
206
+ "sale.order.tag",
207
+ "read",
208
+ list(tag_ids),
209
+ fields=["name"],
210
+ )
211
+ for tag in tag_records:
212
+ tag_id = tag.get("id")
213
+ if tag_id is not None:
214
+ tag_map[tag_id] = tag.get("name", "")
215
+
216
+ currency_map: dict[int, dict[str, str]] = {}
217
+ if currency_ids:
218
+ currency_records = profile.execute(
219
+ "res.currency",
220
+ "read",
221
+ list(currency_ids),
222
+ fields=["name", "symbol"],
223
+ )
224
+ for currency in currency_records:
225
+ currency_id = currency.get("id")
226
+ if currency_id is not None:
227
+ currency_map[currency_id] = {
228
+ "name": currency.get("name", ""),
229
+ "symbol": currency.get("symbol", ""),
230
+ }
231
+
232
+ prepared_quotes = []
233
+ for quote in quotes:
234
+ partner = quote.get("partner_id")
235
+ customer = ""
236
+ if isinstance(partner, (list, tuple)) and len(partner) >= 2:
237
+ customer = partner[1]
238
+
239
+ activity_type = quote.get("activity_type_id")
240
+ activity_name = ""
241
+ if isinstance(activity_type, (list, tuple)) and len(activity_type) >= 2:
242
+ activity_name = activity_type[1]
243
+
244
+ activity_summary = quote.get("activity_summary") or ""
245
+ activity_value = activity_summary or activity_name
246
+
247
+ quote_tags = [
248
+ tag_map.get(tag_id, str(tag_id))
249
+ for tag_id in quote.get("tag_ids") or []
250
+ ]
251
+
252
+ currency_info = quote.get("currency_id")
253
+ currency_label = ""
254
+ if isinstance(currency_info, (list, tuple)) and currency_info:
255
+ currency_id = currency_info[0]
256
+ currency_details = currency_map.get(currency_id, {})
257
+ currency_label = (
258
+ currency_details.get("symbol")
259
+ or currency_details.get("name")
260
+ or (currency_info[1] if len(currency_info) >= 2 else "")
261
+ )
262
+
263
+ amount_total = quote.get("amount_total") or 0
264
+ if currency_label:
265
+ total_display = f"{currency_label}{amount_total:,.2f}"
266
+ else:
267
+ total_display = f"{amount_total:,.2f}"
268
+
269
+ prepared_quotes.append(
270
+ {
271
+ "name": quote.get("name", ""),
272
+ "customer": customer,
273
+ "activity": activity_value,
274
+ "tags": quote_tags,
275
+ "create_date": _parse_datetime(quote.get("create_date")),
276
+ "total": amount_total,
277
+ "total_display": total_display,
278
+ }
279
+ )
280
+
281
+ context["quotes"] = prepared_quotes
282
+
283
+ products = profile.execute(
284
+ "product.product",
285
+ "search_read",
286
+ [[]],
287
+ fields=["name", "default_code", "write_date", "create_date"],
288
+ limit=10,
289
+ order="write_date desc, create_date desc",
290
+ )
291
+ context["recent_products"] = [
292
+ {
293
+ "name": product.get("name", ""),
294
+ "default_code": product.get("default_code", ""),
295
+ "create_date": _parse_datetime(product.get("create_date")),
296
+ "write_date": _parse_datetime(product.get("write_date")),
297
+ }
298
+ for product in products
299
+ ]
300
+
301
+ modules = profile.execute(
302
+ "ir.module.module",
303
+ "search_read",
304
+ [[("state", "=", "installed")]],
305
+ fields=["name", "shortdesc", "latest_version", "author"],
306
+ order="name asc",
307
+ )
308
+ context["installed_modules"] = [
309
+ {
310
+ "name": module.get("name", ""),
311
+ "shortdesc": module.get("shortdesc", ""),
312
+ "latest_version": module.get("latest_version", ""),
313
+ "author": module.get("author", ""),
314
+ }
315
+ for module in modules
316
+ ]
317
+
318
+ except Exception:
319
+ logger.exception(
320
+ "Failed to build Odoo quote report for user %s (profile_id=%s)",
321
+ getattr(request.user, "pk", None),
322
+ getattr(profile, "pk", None),
323
+ )
324
+ context["error"] = _("Unable to generate the quote report from Odoo.")
325
+ return TemplateResponse(
326
+ request,
327
+ "admin/core/odoo_quote_report.html",
328
+ context,
329
+ status=502,
330
+ )
331
+
332
+ return TemplateResponse(request, "admin/core/odoo_quote_report.html", context)
333
+
334
+
335
+ @require_GET
336
+ def version_info(request):
337
+ """Return the running application version and Git revision."""
338
+
339
+ version = ""
340
+ version_path = Path(settings.BASE_DIR) / "VERSION"
341
+ if version_path.exists():
342
+ version = version_path.read_text(encoding="utf-8").strip()
343
+ return JsonResponse(
344
+ {
345
+ "version": version,
346
+ "revision": revision.get_revision(),
347
+ }
348
+ )
349
+
350
+
351
+ from . import release as release_utils
352
+ from .log_paths import select_log_dir
353
+
354
+
355
+ TODO_FIXTURE_DIR = Path(__file__).resolve().parent / "fixtures"
356
+
357
+
358
+ DIRTY_COMMIT_DEFAULT_MESSAGE = "chore: commit pending changes"
359
+
360
+
361
+ DIRTY_STATUS_LABELS = {
362
+ "A": _("Added"),
363
+ "C": _("Copied"),
364
+ "D": _("Deleted"),
365
+ "M": _("Modified"),
366
+ "R": _("Renamed"),
367
+ "U": _("Updated"),
368
+ "??": _("Untracked"),
369
+ }
370
+
371
+
372
+ def _append_log(path: Path, message: str) -> None:
373
+ path.parent.mkdir(parents=True, exist_ok=True)
374
+ with path.open("a", encoding="utf-8") as fh:
375
+ fh.write(message + "\n")
376
+
377
+
378
+ def _release_log_name(package_name: str, version: str) -> str:
379
+ return f"pr.{package_name}.v{version}.log"
380
+
381
+
382
+ def _ensure_log_directory(path: Path) -> tuple[bool, OSError | None]:
383
+ """Return whether ``path`` is writable along with the triggering error."""
384
+
385
+ try:
386
+ path.mkdir(parents=True, exist_ok=True)
387
+ except OSError as exc:
388
+ return False, exc
389
+
390
+ probe = path / f".permcheck_{uuid.uuid4().hex}"
391
+ try:
392
+ with probe.open("w", encoding="utf-8") as fh:
393
+ fh.write("")
394
+ except OSError as exc:
395
+ return False, exc
396
+ else:
397
+ try:
398
+ probe.unlink()
399
+ except OSError:
400
+ pass
401
+ return True, None
402
+
403
+
404
+ def _resolve_release_log_dir(preferred: Path) -> tuple[Path, str | None]:
405
+ """Return a writable log directory for the release publish flow."""
406
+
407
+ writable, error = _ensure_log_directory(preferred)
408
+ if writable:
409
+ return preferred, None
410
+
411
+ logger.warning(
412
+ "Release log directory %s is not writable: %s", preferred, error
413
+ )
414
+
415
+ env_override = os.environ.pop("ARTHEXIS_LOG_DIR", None)
416
+ fallback = select_log_dir(Path(settings.BASE_DIR))
417
+ if env_override and Path(env_override) != fallback:
418
+ os.environ["ARTHEXIS_LOG_DIR"] = str(fallback)
419
+
420
+ if fallback == preferred:
421
+ if error:
422
+ raise error
423
+ raise PermissionError(f"Release log directory {preferred} is not writable")
424
+
425
+ fallback_writable, fallback_error = _ensure_log_directory(fallback)
426
+ if not fallback_writable:
427
+ raise fallback_error or PermissionError(
428
+ f"Release log directory {fallback} is not writable"
429
+ )
430
+
431
+ settings.LOG_DIR = fallback
432
+ warning = (
433
+ f"Release log directory {preferred} is not writable; using {fallback}"
434
+ )
435
+ logger.warning(warning)
436
+ return fallback, warning
437
+
438
+
439
+ def _sync_with_origin_main(log_path: Path) -> None:
440
+ """Ensure the current branch is rebased onto ``origin/main``."""
441
+
442
+ if not _has_remote("origin"):
443
+ _append_log(log_path, "No git remote configured; skipping sync with origin/main")
444
+ return
445
+
446
+ try:
447
+ subprocess.run(["git", "fetch", "origin", "main"], check=True)
448
+ _append_log(log_path, "Fetched latest changes from origin/main")
449
+ subprocess.run(["git", "rebase", "origin/main"], check=True)
450
+ _append_log(log_path, "Rebased current branch onto origin/main")
451
+ except subprocess.CalledProcessError as exc:
452
+ subprocess.run(["git", "rebase", "--abort"], check=False)
453
+ _append_log(log_path, "Rebase onto origin/main failed; aborted rebase")
454
+
455
+ stdout = (exc.stdout or "").strip()
456
+ stderr = (exc.stderr or "").strip()
457
+ if stdout:
458
+ _append_log(log_path, "git output:\n" + stdout)
459
+ if stderr:
460
+ _append_log(log_path, "git errors:\n" + stderr)
461
+
462
+ branch = _current_branch() or "(detached HEAD)"
463
+ instructions = [
464
+ "Manual intervention required to finish syncing with origin/main.",
465
+ "Ensure you are on the branch you intend to publish (normally `main`; currently "
466
+ f"{branch}).",
467
+ "Then run these commands from the repository root:",
468
+ " git fetch origin main",
469
+ " git rebase origin/main",
470
+ "Resolve any conflicts (use `git status` to review files) and continue the rebase.",
471
+ ]
472
+
473
+ if branch != "main" and branch != "(detached HEAD)":
474
+ instructions.append(
475
+ "If this branch should mirror main, push the rebased changes with "
476
+ f"`git push origin {branch}:main`."
477
+ )
478
+ else:
479
+ instructions.append("Push the rebased branch with `git push origin main`.")
480
+
481
+ instructions.append(
482
+ "If push authentication fails, verify your git remote permissions and SSH keys "
483
+ "for origin/main before retrying the publish flow."
484
+ )
485
+ _append_log(log_path, "\n".join(instructions))
486
+
487
+ raise Exception("Rebase onto main failed") from exc
488
+
489
+
490
+ def _clean_repo() -> None:
491
+ """Return the git repository to a clean state."""
492
+ subprocess.run(["git", "reset", "--hard"], check=False)
493
+ subprocess.run(["git", "clean", "-fd"], check=False)
494
+
495
+
496
+ def _format_path(path: Path) -> str:
497
+ try:
498
+ return str(path.resolve().relative_to(Path.cwd()))
499
+ except ValueError:
500
+ return str(path)
501
+
502
+
503
+ def _git_stdout(args: Sequence[str]) -> str:
504
+ proc = subprocess.run(args, check=True, capture_output=True, text=True)
505
+ return (proc.stdout or "").strip()
506
+
507
+
508
+ def _has_remote(remote: str) -> bool:
509
+ proc = subprocess.run(
510
+ ["git", "remote"],
511
+ check=True,
512
+ capture_output=True,
513
+ text=True,
514
+ )
515
+ remotes = [line.strip() for line in proc.stdout.splitlines() if line.strip()]
516
+ return remote in remotes
517
+
518
+
519
+ def _current_branch() -> str | None:
520
+ branch = _git_stdout(["git", "rev-parse", "--abbrev-ref", "HEAD"])
521
+ if branch == "HEAD":
522
+ return None
523
+ return branch
524
+
525
+
526
+ def _has_upstream(branch: str) -> bool:
527
+ proc = subprocess.run(
528
+ ["git", "rev-parse", "--abbrev-ref", f"{branch}@{{upstream}}"],
529
+ capture_output=True,
530
+ text=True,
531
+ check=False,
532
+ )
533
+ return proc.returncode == 0
534
+
535
+
536
+ def _collect_dirty_files() -> list[dict[str, str]]:
537
+ proc = subprocess.run(
538
+ ["git", "status", "--porcelain"],
539
+ capture_output=True,
540
+ text=True,
541
+ check=True,
542
+ )
543
+ dirty: list[dict[str, str]] = []
544
+ for line in proc.stdout.splitlines():
545
+ if not line.strip():
546
+ continue
547
+ status_code = line[:2]
548
+ status = status_code.strip() or status_code
549
+ path = line[3:]
550
+ dirty.append(
551
+ {
552
+ "path": path,
553
+ "status": status,
554
+ "status_label": DIRTY_STATUS_LABELS.get(status, status),
555
+ }
556
+ )
557
+ return dirty
558
+
559
+
560
+ def _format_subprocess_error(exc: subprocess.CalledProcessError) -> str:
561
+ return (exc.stderr or exc.stdout or str(exc)).strip() or str(exc)
562
+
563
+
564
+ def _git_authentication_missing(exc: subprocess.CalledProcessError) -> bool:
565
+ message = (exc.stderr or exc.stdout or "").strip().lower()
566
+ if not message:
567
+ return False
568
+ auth_markers = [
569
+ "could not read username",
570
+ "authentication failed",
571
+ "fatal: authentication failed",
572
+ "terminal prompts disabled",
573
+ ]
574
+ return any(marker in message for marker in auth_markers)
575
+
576
+
577
+ def _ensure_origin_main_unchanged(log_path: Path) -> None:
578
+ """Verify that ``origin/main`` has not advanced during the release."""
579
+
580
+ if not _has_remote("origin"):
581
+ _append_log(
582
+ log_path, "No git remote configured; skipping origin/main verification"
583
+ )
584
+ return
585
+
586
+ try:
587
+ subprocess.run(["git", "fetch", "origin", "main"], check=True)
588
+ _append_log(log_path, "Fetched latest changes from origin/main")
589
+ origin_main = _git_stdout(["git", "rev-parse", "origin/main"])
590
+ merge_base = _git_stdout(["git", "merge-base", "HEAD", "origin/main"])
591
+ except subprocess.CalledProcessError as exc:
592
+ details = (getattr(exc, "stderr", "") or getattr(exc, "stdout", "") or str(exc)).strip()
593
+ if details:
594
+ _append_log(log_path, f"Failed to verify origin/main status: {details}")
595
+ else: # pragma: no cover - defensive fallback
596
+ _append_log(log_path, "Failed to verify origin/main status")
597
+ raise Exception("Unable to verify origin/main status") from exc
598
+
599
+ if origin_main != merge_base:
600
+ _append_log(log_path, "origin/main advanced during release; restart required")
601
+ raise Exception("origin/main changed during release; restart required")
602
+
603
+ _append_log(log_path, "origin/main unchanged since last sync")
604
+
605
+
606
+ def _next_patch_version(version: str) -> str:
607
+ from packaging.version import InvalidVersion, Version
608
+
609
+ try:
610
+ parsed = Version(version)
611
+ except InvalidVersion:
612
+ parts = version.split(".")
613
+ for index in range(len(parts) - 1, -1, -1):
614
+ segment = parts[index]
615
+ if segment.isdigit():
616
+ parts[index] = str(int(segment) + 1)
617
+ return ".".join(parts)
618
+ return version
619
+ return f"{parsed.major}.{parsed.minor}.{parsed.micro + 1}"
620
+
621
+
622
+ def _write_todo_fixture(todo: Todo) -> Path:
623
+ safe_request = todo.request.replace(".", " ")
624
+ slug = slugify(safe_request).replace("-", "_")
625
+ if not slug:
626
+ slug = "todo"
627
+ path = TODO_FIXTURE_DIR / f"todos__{slug}.json"
628
+ path.parent.mkdir(parents=True, exist_ok=True)
629
+ data = [
630
+ {
631
+ "model": "core.todo",
632
+ "fields": {
633
+ "request": todo.request,
634
+ "url": todo.url,
635
+ "request_details": todo.request_details,
636
+ },
637
+ }
638
+ ]
639
+ path.write_text(json.dumps(data, indent=2) + "\n", encoding="utf-8")
640
+ return path
641
+
642
+
643
+ def _should_use_python_changelog(exc: OSError) -> bool:
644
+ winerror = getattr(exc, "winerror", None)
645
+ if winerror in {193}:
646
+ return True
647
+ return exc.errno in {errno.ENOEXEC, errno.EACCES, errno.ENOENT}
648
+
649
+
650
+ def _generate_changelog_with_python(log_path: Path) -> None:
651
+ _append_log(log_path, "Falling back to Python changelog generator")
652
+ changelog_path = Path("CHANGELOG.rst")
653
+ range_spec = changelog_utils.determine_range_spec()
654
+ previous = changelog_path.read_text(encoding="utf-8") if changelog_path.exists() else None
655
+ sections = changelog_utils.collect_sections(range_spec=range_spec, previous_text=previous)
656
+ content = changelog_utils.render_changelog(sections)
657
+ if not content.endswith("\n"):
658
+ content += "\n"
659
+ changelog_path.write_text(content, encoding="utf-8")
660
+ _append_log(log_path, "Regenerated CHANGELOG.rst using Python fallback")
661
+
662
+
663
+ def _ensure_release_todo(
664
+ release, *, previous_version: str | None = None
665
+ ) -> tuple[Todo, Path]:
666
+ previous_version = (previous_version or "").strip()
667
+ target_version = _next_patch_version(release.version)
668
+ if previous_version:
669
+ incremented_previous = _next_patch_version(previous_version)
670
+ if incremented_previous == release.version:
671
+ target_version = release.version
672
+ request = f"Create release {release.package.name} {target_version}"
673
+ try:
674
+ url = reverse("admin:core_packagerelease_changelist")
675
+ except NoReverseMatch:
676
+ url = ""
677
+ todo, _ = Todo.all_objects.update_or_create(
678
+ request__iexact=request,
679
+ defaults={
680
+ "request": request,
681
+ "url": url,
682
+ "request_details": "",
683
+ "is_seed_data": True,
684
+ "is_deleted": False,
685
+ "is_user_data": False,
686
+ "done_on": None,
687
+ "on_done_condition": "",
688
+ },
689
+ )
690
+ fixture_path = _write_todo_fixture(todo)
691
+ return todo, fixture_path
692
+
693
+
694
+ def _sync_release_with_revision(release: PackageRelease) -> tuple[bool, str]:
695
+ """Ensure ``release`` matches the repository revision and version.
696
+
697
+ Returns a tuple ``(updated, previous_version)`` where ``updated`` is
698
+ ``True`` when any field changed and ``previous_version`` is the version
699
+ before synchronization.
700
+ """
701
+
702
+ from packaging.version import InvalidVersion, Version
703
+
704
+ previous_version = release.version
705
+ updated_fields: set[str] = set()
706
+
707
+ repo_version: Version | None = None
708
+ version_path = Path("VERSION")
709
+ if version_path.exists():
710
+ try:
711
+ repo_version = Version(version_path.read_text(encoding="utf-8").strip())
712
+ except InvalidVersion:
713
+ repo_version = None
714
+
715
+ try:
716
+ release_version = Version(release.version)
717
+ except InvalidVersion:
718
+ release_version = None
719
+
720
+ if repo_version is not None:
721
+ bumped_repo_version = Version(
722
+ f"{repo_version.major}.{repo_version.minor}.{repo_version.micro + 1}"
723
+ )
724
+ if release_version is None or release_version < bumped_repo_version:
725
+ release.version = str(bumped_repo_version)
726
+ release_version = bumped_repo_version
727
+ updated_fields.add("version")
728
+
729
+ current_revision = revision.get_revision()
730
+ if current_revision and current_revision != release.revision:
731
+ release.revision = current_revision
732
+ updated_fields.add("revision")
733
+
734
+ if updated_fields:
735
+ release.save(update_fields=list(updated_fields))
736
+ PackageRelease.dump_fixture()
737
+
738
+ package_updated = False
739
+ if release.package_id and not release.package.is_active:
740
+ release.package.is_active = True
741
+ release.package.save(update_fields=["is_active"])
742
+ package_updated = True
743
+
744
+ version_updated = False
745
+ if release.version:
746
+ current = ""
747
+ if version_path.exists():
748
+ current = version_path.read_text(encoding="utf-8").strip()
749
+ if current != release.version:
750
+ version_path.write_text(f"{release.version}\n", encoding="utf-8")
751
+ version_updated = True
752
+
753
+ return bool(updated_fields or version_updated or package_updated), previous_version
754
+
755
+
756
+ def _changelog_notes(version: str) -> str:
757
+ path = Path("CHANGELOG.rst")
758
+ if not path.exists():
759
+ return ""
760
+ notes = changelog_utils.extract_release_notes(
761
+ path.read_text(encoding="utf-8"), version
762
+ )
763
+ return notes.strip()
764
+
765
+
766
+ class PendingTodos(Exception):
767
+ """Raised when TODO items require acknowledgment before proceeding."""
768
+
769
+
770
+ class ApprovalRequired(Exception):
771
+ """Raised when release manager approval is required before continuing."""
772
+
773
+
774
+ class DirtyRepository(Exception):
775
+ """Raised when the Git workspace has uncommitted changes."""
776
+
777
+
778
+ def _format_condition_failure(todo: Todo, result) -> str:
779
+ """Return a localized error message for a failed TODO condition."""
780
+
781
+ if result.error and result.resolved:
782
+ detail = _("%(condition)s (error: %(error)s)") % {
783
+ "condition": result.resolved,
784
+ "error": result.error,
785
+ }
786
+ elif result.error:
787
+ detail = _("Error: %(error)s") % {"error": result.error}
788
+ elif result.resolved:
789
+ detail = result.resolved
790
+ else:
791
+ detail = _("Condition evaluated to False")
792
+ return _("Condition failed for %(todo)s: %(detail)s") % {
793
+ "todo": todo.request,
794
+ "detail": detail,
795
+ }
796
+
797
+
798
+ def _get_return_url(request) -> str:
799
+ """Return a safe URL to redirect back to after completing a TODO."""
800
+
801
+ candidates = [request.GET.get("next"), request.POST.get("next")]
802
+ referer = request.META.get("HTTP_REFERER")
803
+ if referer:
804
+ candidates.append(referer)
805
+
806
+ for candidate in candidates:
807
+ if not candidate:
808
+ continue
809
+ if url_has_allowed_host_and_scheme(
810
+ candidate,
811
+ allowed_hosts={request.get_host()},
812
+ require_https=request.is_secure(),
813
+ ):
814
+ return candidate
815
+ return resolve_url("admin:index")
816
+
817
+
818
+ def _step_check_todos(release, ctx, log_path: Path) -> None:
819
+ pending_qs = Todo.objects.filter(is_deleted=False, done_on__isnull=True)
820
+ if pending_qs.exists():
821
+ ctx["todos"] = list(
822
+ pending_qs.values("id", "request", "url", "request_details")
823
+ )
824
+ if not ctx.get("todos_ack"):
825
+ raise PendingTodos()
826
+ todos = list(Todo.objects.filter(is_deleted=False))
827
+ for todo in todos:
828
+ todo.delete()
829
+ removed = []
830
+ for path in TODO_FIXTURE_DIR.glob("todos__*.json"):
831
+ removed.append(str(path))
832
+ path.unlink()
833
+ if removed:
834
+ subprocess.run(["git", "add", *removed], check=False)
835
+ subprocess.run(
836
+ ["git", "commit", "-m", "chore: remove TODO fixtures"],
837
+ check=False,
838
+ )
839
+ ctx.pop("todos", None)
840
+ ctx["todos_ack"] = True
841
+
842
+
843
+ def _step_check_version(release, ctx, log_path: Path) -> None:
844
+ from . import release as release_utils
845
+ from packaging.version import InvalidVersion, Version
846
+
847
+ sync_error: Optional[Exception] = None
848
+ retry_sync = False
849
+ try:
850
+ _sync_with_origin_main(log_path)
851
+ except Exception as exc:
852
+ sync_error = exc
853
+
854
+ if not release_utils._git_clean():
855
+ dirty_entries = _collect_dirty_files()
856
+ files = [entry["path"] for entry in dirty_entries]
857
+ fixture_files = [
858
+ f
859
+ for f in files
860
+ if "fixtures" in Path(f).parts and Path(f).suffix == ".json"
861
+ ]
862
+ if files and len(fixture_files) == len(files):
863
+ summary = []
864
+ for f in fixture_files:
865
+ path = Path(f)
866
+ try:
867
+ data = json.loads(path.read_text(encoding="utf-8"))
868
+ except Exception:
869
+ count = 0
870
+ models: list[str] = []
871
+ else:
872
+ if isinstance(data, list):
873
+ count = len(data)
874
+ models = sorted(
875
+ {
876
+ obj.get("model", "")
877
+ for obj in data
878
+ if isinstance(obj, dict)
879
+ }
880
+ )
881
+ elif isinstance(data, dict):
882
+ count = 1
883
+ models = [data.get("model", "")]
884
+ else: # pragma: no cover - unexpected structure
885
+ count = 0
886
+ models = []
887
+ summary.append({"path": f, "count": count, "models": models})
888
+
889
+ ctx["fixtures"] = summary
890
+ _append_log(
891
+ log_path,
892
+ "Committing fixture changes: " + ", ".join(fixture_files),
893
+ )
894
+ subprocess.run(["git", "add", *fixture_files], check=True)
895
+ subprocess.run(
896
+ ["git", "commit", "-m", "chore: update fixtures"], check=True
897
+ )
898
+ _append_log(log_path, "Fixture changes committed")
899
+ ctx.pop("dirty_files", None)
900
+ ctx.pop("dirty_commit_error", None)
901
+ retry_sync = True
902
+ else:
903
+ ctx["dirty_files"] = dirty_entries
904
+ ctx.setdefault("dirty_commit_message", DIRTY_COMMIT_DEFAULT_MESSAGE)
905
+ ctx.pop("fixtures", None)
906
+ ctx.pop("dirty_commit_error", None)
907
+ if dirty_entries:
908
+ details = ", ".join(entry["path"] for entry in dirty_entries)
909
+ else:
910
+ details = ""
911
+ message = "Git repository has uncommitted changes"
912
+ if details:
913
+ message += f": {details}"
914
+ if ctx.get("dirty_log_message") != message:
915
+ _append_log(log_path, message)
916
+ ctx["dirty_log_message"] = message
917
+ raise DirtyRepository()
918
+ else:
919
+ ctx.pop("dirty_files", None)
920
+ ctx.pop("dirty_commit_error", None)
921
+ ctx.pop("dirty_log_message", None)
922
+
923
+ if retry_sync and sync_error is not None:
924
+ try:
925
+ _sync_with_origin_main(log_path)
926
+ except Exception as exc:
927
+ sync_error = exc
928
+ else:
929
+ sync_error = None
930
+
931
+ if sync_error is not None:
932
+ raise sync_error
933
+
934
+ version_path = Path("VERSION")
935
+ if version_path.exists():
936
+ current = version_path.read_text(encoding="utf-8").strip()
937
+ if current and Version(release.version) < Version(current):
938
+ raise Exception(
939
+ f"Version {release.version} is older than existing {current}"
940
+ )
941
+
942
+ _append_log(log_path, f"Checking if version {release.version} exists on PyPI")
943
+ if release_utils.network_available():
944
+ try:
945
+ resp = requests.get(
946
+ f"https://pypi.org/pypi/{release.package.name}/json",
947
+ timeout=PYPI_REQUEST_TIMEOUT,
948
+ )
949
+ if resp.ok:
950
+ data = resp.json()
951
+ releases = data.get("releases", {})
952
+ try:
953
+ target_version = Version(release.version)
954
+ except InvalidVersion:
955
+ target_version = None
956
+
957
+ for candidate, files in releases.items():
958
+ same_version = candidate == release.version
959
+ if target_version is not None and not same_version:
960
+ try:
961
+ same_version = Version(candidate) == target_version
962
+ except InvalidVersion:
963
+ same_version = False
964
+ if not same_version:
965
+ continue
966
+
967
+ has_available_files = any(
968
+ isinstance(file_data, dict)
969
+ and not file_data.get("yanked", False)
970
+ for file_data in files or []
971
+ )
972
+ if has_available_files:
973
+ raise Exception(
974
+ f"Version {release.version} already on PyPI"
975
+ )
976
+ except Exception as exc:
977
+ # network errors should be logged but not crash
978
+ if "already on PyPI" in str(exc):
979
+ raise
980
+ _append_log(log_path, f"PyPI check failed: {exc}")
981
+ else:
982
+ _append_log(
983
+ log_path,
984
+ f"Version {release.version} not published on PyPI",
985
+ )
986
+ else:
987
+ _append_log(log_path, "Network unavailable, skipping PyPI check")
988
+
989
+
990
+ def _step_handle_migrations(release, ctx, log_path: Path) -> None:
991
+ _append_log(log_path, "Freeze, squash and approve migrations")
992
+ _append_log(log_path, "Migration review acknowledged (manual step)")
993
+
994
+
995
+ def _step_changelog_docs(release, ctx, log_path: Path) -> None:
996
+ _append_log(log_path, "Compose CHANGELOG and documentation")
997
+ _append_log(log_path, "CHANGELOG and documentation review recorded")
998
+
999
+
1000
+ def _step_pre_release_actions(release, ctx, log_path: Path) -> None:
1001
+ _append_log(log_path, "Execute pre-release actions")
1002
+ if ctx.get("dry_run"):
1003
+ _append_log(log_path, "Dry run: skipping pre-release actions")
1004
+ return
1005
+ _sync_with_origin_main(log_path)
1006
+ try:
1007
+ subprocess.run(["scripts/generate-changelog.sh"], check=True)
1008
+ except OSError as exc:
1009
+ if _should_use_python_changelog(exc):
1010
+ _append_log(
1011
+ log_path,
1012
+ f"scripts/generate-changelog.sh failed: {exc}",
1013
+ )
1014
+ _generate_changelog_with_python(log_path)
1015
+ else: # pragma: no cover - unexpected OSError
1016
+ raise
1017
+ else:
1018
+ _append_log(
1019
+ log_path, "Regenerated CHANGELOG.rst using scripts/generate-changelog.sh"
1020
+ )
1021
+ notes = _changelog_notes(release.version)
1022
+ staged_release_fixtures: list[Path] = []
1023
+ if notes != release.changelog:
1024
+ release.changelog = notes
1025
+ release.save(update_fields=["changelog"])
1026
+ PackageRelease.dump_fixture()
1027
+ _append_log(log_path, f"Recorded changelog notes for v{release.version}")
1028
+ release_fixture_paths = sorted(
1029
+ Path("core/fixtures").glob("releases__*.json")
1030
+ )
1031
+ if release_fixture_paths:
1032
+ subprocess.run(
1033
+ ["git", "add", *[str(path) for path in release_fixture_paths]],
1034
+ check=True,
1035
+ )
1036
+ staged_release_fixtures = release_fixture_paths
1037
+ formatted = ", ".join(_format_path(path) for path in release_fixture_paths)
1038
+ _append_log(
1039
+ log_path,
1040
+ "Staged release fixtures " + formatted,
1041
+ )
1042
+ subprocess.run(["git", "add", "CHANGELOG.rst"], check=True)
1043
+ _append_log(log_path, "Staged CHANGELOG.rst for commit")
1044
+ version_path = Path("VERSION")
1045
+ previous_version_text = ""
1046
+ if version_path.exists():
1047
+ previous_version_text = version_path.read_text(encoding="utf-8").strip()
1048
+ repo_version_before_sync = getattr(
1049
+ release, "_repo_version_before_sync", previous_version_text
1050
+ )
1051
+ version_path.write_text(f"{release.version}\n", encoding="utf-8")
1052
+ _append_log(log_path, f"Updated VERSION file to {release.version}")
1053
+ subprocess.run(["git", "add", "VERSION"], check=True)
1054
+ _append_log(log_path, "Staged VERSION for commit")
1055
+ diff = subprocess.run(["git", "diff", "--cached", "--quiet"], check=False)
1056
+ if diff.returncode != 0:
1057
+ subprocess.run(
1058
+ ["git", "commit", "-m", f"pre-release commit {release.version}"],
1059
+ check=True,
1060
+ )
1061
+ _append_log(log_path, f"Committed VERSION update for {release.version}")
1062
+ else:
1063
+ _append_log(
1064
+ log_path, "No changes detected for VERSION or CHANGELOG; skipping commit"
1065
+ )
1066
+ subprocess.run(["git", "reset", "HEAD", "CHANGELOG.rst"], check=False)
1067
+ _append_log(log_path, "Unstaged CHANGELOG.rst")
1068
+ subprocess.run(["git", "reset", "HEAD", "VERSION"], check=False)
1069
+ _append_log(log_path, "Unstaged VERSION file")
1070
+ for path in staged_release_fixtures:
1071
+ subprocess.run(["git", "reset", "HEAD", str(path)], check=False)
1072
+ _append_log(log_path, f"Unstaged release fixture {_format_path(path)}")
1073
+ todo, fixture_path = _ensure_release_todo(
1074
+ release, previous_version=repo_version_before_sync
1075
+ )
1076
+ fixture_display = _format_path(fixture_path)
1077
+ _append_log(log_path, f"Added TODO: {todo.request}")
1078
+ _append_log(log_path, f"Wrote TODO fixture {fixture_display}")
1079
+ subprocess.run(["git", "add", str(fixture_path)], check=True)
1080
+ _append_log(log_path, f"Staged TODO fixture {fixture_display}")
1081
+ fixture_diff = subprocess.run(
1082
+ ["git", "diff", "--cached", "--quiet", "--", str(fixture_path)],
1083
+ check=False,
1084
+ )
1085
+ if fixture_diff.returncode != 0:
1086
+ commit_message = f"chore: add release TODO for {release.package.name}"
1087
+ subprocess.run(["git", "commit", "-m", commit_message], check=True)
1088
+ _append_log(log_path, f"Committed TODO fixture {fixture_display}")
1089
+ else:
1090
+ _append_log(
1091
+ log_path,
1092
+ f"No changes detected for TODO fixture {fixture_display}; skipping commit",
1093
+ )
1094
+ _append_log(log_path, "Pre-release actions complete")
1095
+
1096
+
1097
+ def _step_run_tests(release, ctx, log_path: Path) -> None:
1098
+ _append_log(log_path, "Complete test suite with --all flag")
1099
+ _append_log(log_path, "Test suite completion acknowledged")
1100
+
1101
+
1102
+ def _step_promote_build(release, ctx, log_path: Path) -> None:
1103
+ from . import release as release_utils
1104
+
1105
+ _append_log(log_path, "Generating build files")
1106
+ if ctx.get("dry_run"):
1107
+ _append_log(log_path, "Dry run: skipping build promotion")
1108
+ return
1109
+ try:
1110
+ _ensure_origin_main_unchanged(log_path)
1111
+ release_utils.promote(
1112
+ package=release.to_package(),
1113
+ version=release.version,
1114
+ creds=release.to_credentials(),
1115
+ )
1116
+ _append_log(
1117
+ log_path,
1118
+ f"Generated release artifacts for v{release.version}",
1119
+ )
1120
+ from glob import glob
1121
+
1122
+ paths = ["VERSION", *glob("core/fixtures/releases__*.json")]
1123
+ diff = subprocess.run(
1124
+ ["git", "status", "--porcelain", *paths],
1125
+ capture_output=True,
1126
+ text=True,
1127
+ )
1128
+ if diff.stdout.strip():
1129
+ subprocess.run(["git", "add", *paths], check=True)
1130
+ _append_log(log_path, "Staged release metadata updates")
1131
+ subprocess.run(
1132
+ [
1133
+ "git",
1134
+ "commit",
1135
+ "-m",
1136
+ f"chore: update release metadata for v{release.version}",
1137
+ ],
1138
+ check=True,
1139
+ )
1140
+ _append_log(
1141
+ log_path,
1142
+ f"Committed release metadata for v{release.version}",
1143
+ )
1144
+ if _has_remote("origin"):
1145
+ try:
1146
+ branch = _current_branch()
1147
+ if branch is None:
1148
+ push_cmd = ["git", "push", "origin", "HEAD"]
1149
+ elif _has_upstream(branch):
1150
+ push_cmd = ["git", "push"]
1151
+ else:
1152
+ push_cmd = ["git", "push", "--set-upstream", "origin", branch]
1153
+ subprocess.run(push_cmd, check=True, capture_output=True, text=True)
1154
+ except subprocess.CalledProcessError as exc:
1155
+ details = _format_subprocess_error(exc)
1156
+ if _git_authentication_missing(exc):
1157
+ _append_log(
1158
+ log_path,
1159
+ "Authentication is required to push release changes to origin; skipping push",
1160
+ )
1161
+ if details:
1162
+ _append_log(log_path, details)
1163
+ else:
1164
+ _append_log(
1165
+ log_path, f"Failed to push release changes to origin: {details}"
1166
+ )
1167
+ raise Exception("Failed to push release changes") from exc
1168
+ else:
1169
+ _append_log(log_path, "Pushed release changes to origin")
1170
+ else:
1171
+ _append_log(
1172
+ log_path,
1173
+ "No git remote configured; skipping push of release changes",
1174
+ )
1175
+ PackageRelease.dump_fixture()
1176
+ _append_log(log_path, "Updated release fixtures")
1177
+ except Exception:
1178
+ _clean_repo()
1179
+ raise
1180
+ target_name = _release_log_name(release.package.name, release.version)
1181
+ new_log = log_path.with_name(target_name)
1182
+ if log_path != new_log:
1183
+ if new_log.exists():
1184
+ new_log.unlink()
1185
+ log_path.rename(new_log)
1186
+ else:
1187
+ new_log = log_path
1188
+ ctx["log"] = new_log.name
1189
+ _append_log(new_log, "Build complete")
1190
+
1191
+
1192
+ def _step_release_manager_approval(release, ctx, log_path: Path) -> None:
1193
+ if release.to_credentials() is None:
1194
+ ctx.pop("release_approval", None)
1195
+ if not ctx.get("approval_credentials_missing"):
1196
+ _append_log(log_path, "Release manager publishing credentials missing")
1197
+ ctx["approval_credentials_missing"] = True
1198
+ ctx["awaiting_approval"] = True
1199
+ raise ApprovalRequired()
1200
+
1201
+ missing_before = ctx.pop("approval_credentials_missing", None)
1202
+ if missing_before:
1203
+ ctx.pop("awaiting_approval", None)
1204
+ decision = ctx.get("release_approval")
1205
+ if decision == "approved":
1206
+ ctx.pop("release_approval", None)
1207
+ ctx.pop("awaiting_approval", None)
1208
+ ctx.pop("approval_credentials_missing", None)
1209
+ _append_log(log_path, "Release manager approved release")
1210
+ return
1211
+ if decision == "rejected":
1212
+ ctx.pop("release_approval", None)
1213
+ ctx.pop("awaiting_approval", None)
1214
+ ctx.pop("approval_credentials_missing", None)
1215
+ _append_log(log_path, "Release manager rejected release")
1216
+ raise RuntimeError(
1217
+ _("Release manager rejected the release. Restart required."),
1218
+ )
1219
+ if not ctx.get("awaiting_approval"):
1220
+ ctx["awaiting_approval"] = True
1221
+ _append_log(log_path, "Awaiting release manager approval")
1222
+ else:
1223
+ ctx["awaiting_approval"] = True
1224
+ raise ApprovalRequired()
1225
+
1226
+
1227
+ def _step_publish(release, ctx, log_path: Path) -> None:
1228
+ from . import release as release_utils
1229
+
1230
+ if ctx.get("dry_run"):
1231
+ test_repository_url = os.environ.get(
1232
+ "PYPI_TEST_REPOSITORY_URL", "https://test.pypi.org/legacy/"
1233
+ )
1234
+ test_creds = release.to_credentials()
1235
+ if not (test_creds and test_creds.has_auth()):
1236
+ test_creds = release_utils.Credentials(
1237
+ token=os.environ.get("PYPI_TEST_API_TOKEN"),
1238
+ username=os.environ.get("PYPI_TEST_USERNAME"),
1239
+ password=os.environ.get("PYPI_TEST_PASSWORD"),
1240
+ )
1241
+ if not test_creds.has_auth():
1242
+ test_creds = None
1243
+ target = release_utils.RepositoryTarget(
1244
+ name="Test PyPI",
1245
+ repository_url=(test_repository_url or None),
1246
+ credentials=test_creds,
1247
+ verify_availability=False,
1248
+ )
1249
+ label = target.repository_url or target.name
1250
+ dist_path = Path("dist")
1251
+ if not dist_path.exists():
1252
+ _append_log(log_path, "Dry run: building distribution artifacts")
1253
+ package = release.to_package()
1254
+ version_path = (
1255
+ Path(package.version_path)
1256
+ if package.version_path
1257
+ else Path("VERSION")
1258
+ )
1259
+ original_version = (
1260
+ version_path.read_text(encoding="utf-8")
1261
+ if version_path.exists()
1262
+ else None
1263
+ )
1264
+ pyproject_path = Path("pyproject.toml")
1265
+ original_pyproject = (
1266
+ pyproject_path.read_text(encoding="utf-8")
1267
+ if pyproject_path.exists()
1268
+ else None
1269
+ )
1270
+ try:
1271
+ release_utils.build(
1272
+ package=package,
1273
+ version=release.version,
1274
+ creds=release.to_credentials(),
1275
+ dist=True,
1276
+ tests=False,
1277
+ twine=False,
1278
+ git=False,
1279
+ tag=False,
1280
+ stash=True,
1281
+ )
1282
+ except release_utils.ReleaseError as exc:
1283
+ _append_log(
1284
+ log_path,
1285
+ f"Dry run: failed to prepare distribution artifacts ({exc})",
1286
+ )
1287
+ raise
1288
+ finally:
1289
+ if original_version is None:
1290
+ if version_path.exists():
1291
+ version_path.unlink()
1292
+ else:
1293
+ version_path.write_text(original_version, encoding="utf-8")
1294
+ if original_pyproject is None:
1295
+ if pyproject_path.exists():
1296
+ pyproject_path.unlink()
1297
+ else:
1298
+ pyproject_path.write_text(original_pyproject, encoding="utf-8")
1299
+ _append_log(log_path, f"Dry run: uploading distribution to {label}")
1300
+ release_utils.publish(
1301
+ package=release.to_package(),
1302
+ version=release.version,
1303
+ creds=target.credentials or release.to_credentials(),
1304
+ repositories=[target],
1305
+ )
1306
+ _append_log(log_path, "Dry run: skipped release metadata updates")
1307
+ return
1308
+
1309
+ targets = release.build_publish_targets()
1310
+ repo_labels = []
1311
+ for target in targets:
1312
+ label = target.name
1313
+ if target.repository_url:
1314
+ label = f"{label} ({target.repository_url})"
1315
+ repo_labels.append(label)
1316
+ if repo_labels:
1317
+ _append_log(
1318
+ log_path,
1319
+ "Uploading distribution" if len(repo_labels) == 1 else "Uploading distribution to: " + ", ".join(repo_labels),
1320
+ )
1321
+ else:
1322
+ _append_log(log_path, "Uploading distribution")
1323
+ release_utils.publish(
1324
+ package=release.to_package(),
1325
+ version=release.version,
1326
+ creds=release.to_credentials(),
1327
+ repositories=targets,
1328
+ )
1329
+ release.pypi_url = (
1330
+ f"https://pypi.org/project/{release.package.name}/{release.version}/"
1331
+ )
1332
+ github_url = ""
1333
+ for target in targets[1:]:
1334
+ if target.repository_url and "github.com" in target.repository_url:
1335
+ github_url = release.github_package_url() or ""
1336
+ break
1337
+ if github_url:
1338
+ release.github_url = github_url
1339
+ else:
1340
+ release.github_url = ""
1341
+ release.release_on = timezone.now()
1342
+ release.save(update_fields=["pypi_url", "github_url", "release_on"])
1343
+ PackageRelease.dump_fixture()
1344
+ _append_log(log_path, f"Recorded PyPI URL: {release.pypi_url}")
1345
+ if release.github_url:
1346
+ _append_log(log_path, f"Recorded GitHub URL: {release.github_url}")
1347
+ _append_log(log_path, "Upload complete")
1348
+
1349
+
1350
+ FIXTURE_REVIEW_STEP_NAME = "Freeze, squash and approve migrations"
1351
+
1352
+
1353
+ PUBLISH_STEPS = [
1354
+ ("Check version number availability", _step_check_version),
1355
+ ("Confirm release TODO completion", _step_check_todos),
1356
+ (FIXTURE_REVIEW_STEP_NAME, _step_handle_migrations),
1357
+ ("Compose CHANGELOG and documentation", _step_changelog_docs),
1358
+ ("Execute pre-release actions", _step_pre_release_actions),
1359
+ ("Build release artifacts", _step_promote_build),
1360
+ ("Complete test suite with --all flag", _step_run_tests),
1361
+ ("Get Release Manager Approval", _step_release_manager_approval),
1362
+ ("Upload final build to PyPI", _step_publish),
1363
+ ]
1364
+
1365
+
1366
+ @csrf_exempt
1367
+ def rfid_login(request):
1368
+ """Authenticate a user using an RFID."""
1369
+
1370
+ if request.method != "POST":
1371
+ return JsonResponse({"detail": "POST required"}, status=400)
1372
+
1373
+ try:
1374
+ data = json.loads(request.body.decode())
1375
+ except json.JSONDecodeError:
1376
+ data = request.POST
1377
+
1378
+ rfid = data.get("rfid")
1379
+ if not rfid:
1380
+ return JsonResponse({"detail": "rfid required"}, status=400)
1381
+
1382
+ user = authenticate(request, rfid=rfid)
1383
+ if user is None:
1384
+ return JsonResponse({"detail": "invalid RFID"}, status=401)
1385
+
1386
+ login(request, user)
1387
+ return JsonResponse({"id": user.id, "username": user.username})
1388
+
1389
+
1390
+ @api_login_required
1391
+ def product_list(request):
1392
+ """Return a JSON list of products."""
1393
+
1394
+ products = list(
1395
+ Product.objects.values("id", "name", "description", "renewal_period")
1396
+ )
1397
+ return JsonResponse({"products": products})
1398
+
1399
+
1400
+ @csrf_exempt
1401
+ @api_login_required
1402
+ def add_live_subscription(request):
1403
+ """Create a live subscription for an energy account from POSTed JSON."""
1404
+
1405
+ if request.method != "POST":
1406
+ return JsonResponse({"detail": "POST required"}, status=400)
1407
+
1408
+ try:
1409
+ data = json.loads(request.body.decode())
1410
+ except json.JSONDecodeError:
1411
+ data = request.POST
1412
+
1413
+ account_id = data.get("account_id")
1414
+ product_id = data.get("product_id")
1415
+
1416
+ if not account_id or not product_id:
1417
+ return JsonResponse(
1418
+ {"detail": "account_id and product_id required"}, status=400
1419
+ )
1420
+
1421
+ try:
1422
+ product = Product.objects.get(id=product_id)
1423
+ except Product.DoesNotExist:
1424
+ return JsonResponse({"detail": "invalid product"}, status=404)
1425
+
1426
+ try:
1427
+ account = EnergyAccount.objects.get(id=account_id)
1428
+ except EnergyAccount.DoesNotExist:
1429
+ return JsonResponse({"detail": "invalid account"}, status=404)
1430
+
1431
+ start_date = timezone.now().date()
1432
+ account.live_subscription_product = product
1433
+ account.live_subscription_start_date = start_date
1434
+ account.live_subscription_next_renewal = start_date + timedelta(
1435
+ days=product.renewal_period
1436
+ )
1437
+ account.save()
1438
+
1439
+ return JsonResponse({"id": account.id})
1440
+
1441
+
1442
+ @api_login_required
1443
+ def live_subscription_list(request):
1444
+ """Return live subscriptions for the given account_id."""
1445
+
1446
+ account_id = request.GET.get("account_id")
1447
+ if not account_id:
1448
+ return JsonResponse({"detail": "account_id required"}, status=400)
1449
+
1450
+ try:
1451
+ account = EnergyAccount.objects.select_related("live_subscription_product").get(
1452
+ id=account_id
1453
+ )
1454
+ except EnergyAccount.DoesNotExist:
1455
+ return JsonResponse({"detail": "invalid account"}, status=404)
1456
+
1457
+ subs = []
1458
+ product = account.live_subscription_product
1459
+ if product:
1460
+ next_renewal = account.live_subscription_next_renewal
1461
+ if not next_renewal and account.live_subscription_start_date:
1462
+ next_renewal = account.live_subscription_start_date + timedelta(
1463
+ days=product.renewal_period
1464
+ )
1465
+
1466
+ subs.append(
1467
+ {
1468
+ "id": account.id,
1469
+ "product__name": product.name,
1470
+ "next_renewal": next_renewal,
1471
+ }
1472
+ )
1473
+
1474
+ return JsonResponse({"live_subscriptions": subs})
1475
+
1476
+
1477
+ @csrf_exempt
1478
+ @api_login_required
1479
+ def rfid_batch(request):
1480
+ """Export or import RFID tags in batch."""
1481
+
1482
+ if request.method == "GET":
1483
+ color = request.GET.get("color", RFID.BLACK).upper()
1484
+ released = request.GET.get("released")
1485
+ if released is not None:
1486
+ released = released.lower()
1487
+ qs = RFID.objects.all()
1488
+ if color != "ALL":
1489
+ qs = qs.filter(color=color)
1490
+ if released in ("true", "false"):
1491
+ qs = qs.filter(released=(released == "true"))
1492
+ tags = [
1493
+ {
1494
+ "rfid": t.rfid,
1495
+ "custom_label": t.custom_label,
1496
+ "energy_accounts": list(t.energy_accounts.values_list("id", flat=True)),
1497
+ "external_command": t.external_command,
1498
+ "allowed": t.allowed,
1499
+ "color": t.color,
1500
+ "released": t.released,
1501
+ }
1502
+ for t in qs.order_by("rfid")
1503
+ ]
1504
+ return JsonResponse({"rfids": tags})
1505
+
1506
+ if request.method == "POST":
1507
+ try:
1508
+ data = json.loads(request.body.decode())
1509
+ except json.JSONDecodeError:
1510
+ return JsonResponse({"detail": "invalid JSON"}, status=400)
1511
+
1512
+ tags = data.get("rfids") if isinstance(data, dict) else data
1513
+ if not isinstance(tags, list):
1514
+ return JsonResponse({"detail": "rfids list required"}, status=400)
1515
+
1516
+ count = 0
1517
+ for row in tags:
1518
+ rfid = (row.get("rfid") or "").strip()
1519
+ if not rfid:
1520
+ continue
1521
+ allowed = row.get("allowed", True)
1522
+ energy_accounts = row.get("energy_accounts") or []
1523
+ color = (row.get("color") or RFID.BLACK).strip().upper() or RFID.BLACK
1524
+ released = row.get("released", False)
1525
+ if isinstance(released, str):
1526
+ released = released.lower() == "true"
1527
+ custom_label = (row.get("custom_label") or "").strip()
1528
+ external_command = row.get("external_command")
1529
+ if not isinstance(external_command, str):
1530
+ external_command = ""
1531
+ else:
1532
+ external_command = external_command.strip()
1533
+
1534
+ tag, _ = RFID.objects.update_or_create(
1535
+ rfid=rfid.upper(),
1536
+ defaults={
1537
+ "allowed": allowed,
1538
+ "color": color,
1539
+ "released": released,
1540
+ "custom_label": custom_label,
1541
+ "external_command": external_command,
1542
+ },
1543
+ )
1544
+ if energy_accounts:
1545
+ tag.energy_accounts.set(
1546
+ EnergyAccount.objects.filter(id__in=energy_accounts)
1547
+ )
1548
+ else:
1549
+ tag.energy_accounts.clear()
1550
+ count += 1
1551
+
1552
+ return JsonResponse({"imported": count})
1553
+
1554
+ return JsonResponse({"detail": "GET or POST required"}, status=400)
1555
+
1556
+
1557
+ @staff_member_required
1558
+ def release_progress(request, pk: int, action: str):
1559
+ release = get_object_or_404(PackageRelease, pk=pk)
1560
+ if action != "publish":
1561
+ raise Http404("Unknown action")
1562
+ session_key = f"release_publish_{pk}"
1563
+ lock_path = Path("locks") / f"release_publish_{pk}.json"
1564
+ restart_path = Path("locks") / f"release_publish_{pk}.restarts"
1565
+ log_dir, log_dir_warning = _resolve_release_log_dir(Path(settings.LOG_DIR))
1566
+ log_dir_warning_message = log_dir_warning
1567
+
1568
+ version_path = Path("VERSION")
1569
+ repo_version_before_sync = ""
1570
+ if version_path.exists():
1571
+ repo_version_before_sync = version_path.read_text(encoding="utf-8").strip()
1572
+ setattr(release, "_repo_version_before_sync", repo_version_before_sync)
1573
+
1574
+ if not release.is_current:
1575
+ if release.is_published:
1576
+ raise Http404("Release is not current")
1577
+ updated, previous_version = _sync_release_with_revision(release)
1578
+ if updated:
1579
+ request.session.pop(session_key, None)
1580
+ if lock_path.exists():
1581
+ lock_path.unlink()
1582
+ if restart_path.exists():
1583
+ restart_path.unlink()
1584
+ pattern = f"pr.{release.package.name}.v{previous_version}*.log"
1585
+ for log_file in log_dir.glob(pattern):
1586
+ log_file.unlink()
1587
+ if not release.is_current:
1588
+ raise Http404("Release is not current")
1589
+
1590
+ if request.GET.get("restart"):
1591
+ count = 0
1592
+ if restart_path.exists():
1593
+ try:
1594
+ count = int(restart_path.read_text(encoding="utf-8"))
1595
+ except Exception:
1596
+ count = 0
1597
+ restart_path.parent.mkdir(parents=True, exist_ok=True)
1598
+ restart_path.write_text(str(count + 1), encoding="utf-8")
1599
+ _clean_repo()
1600
+ release.pypi_url = ""
1601
+ release.release_on = None
1602
+ release.save(update_fields=["pypi_url", "release_on"])
1603
+ request.session.pop(session_key, None)
1604
+ if lock_path.exists():
1605
+ lock_path.unlink()
1606
+ pattern = f"pr.{release.package.name}.v{release.version}*.log"
1607
+ for f in log_dir.glob(pattern):
1608
+ f.unlink()
1609
+ return redirect(request.path)
1610
+ ctx = request.session.get(session_key)
1611
+ if ctx is None and lock_path.exists():
1612
+ try:
1613
+ ctx = json.loads(lock_path.read_text(encoding="utf-8"))
1614
+ except Exception:
1615
+ ctx = {"step": 0}
1616
+ if ctx is None:
1617
+ ctx = {"step": 0}
1618
+ if restart_path.exists():
1619
+ restart_path.unlink()
1620
+ if log_dir_warning_message:
1621
+ ctx["log_dir_warning_message"] = log_dir_warning_message
1622
+ else:
1623
+ log_dir_warning_message = ctx.get("log_dir_warning_message")
1624
+
1625
+ steps = PUBLISH_STEPS
1626
+ total_steps = len(steps)
1627
+ step_count = ctx.get("step", 0)
1628
+ started_flag = bool(ctx.get("started"))
1629
+ paused_flag = bool(ctx.get("paused"))
1630
+ error_flag = bool(ctx.get("error"))
1631
+ done_flag = step_count >= total_steps and not error_flag
1632
+ start_enabled = (not started_flag or paused_flag) and not done_flag and not error_flag
1633
+
1634
+ ctx["dry_run"] = bool(ctx.get("dry_run"))
1635
+
1636
+ if request.GET.get("set_dry_run") is not None:
1637
+ if start_enabled:
1638
+ ctx["dry_run"] = bool(request.GET.get("dry_run"))
1639
+ request.session[session_key] = ctx
1640
+ return redirect(request.path)
1641
+
1642
+ manager = release.release_manager or release.package.release_manager
1643
+ credentials_ready = bool(release.to_credentials())
1644
+ if credentials_ready and ctx.get("approval_credentials_missing"):
1645
+ ctx.pop("approval_credentials_missing", None)
1646
+
1647
+ ack_todos_requested = bool(request.GET.get("ack_todos"))
1648
+
1649
+ if request.GET.get("start"):
1650
+ if start_enabled:
1651
+ ctx["dry_run"] = bool(request.GET.get("dry_run"))
1652
+ ctx["started"] = True
1653
+ ctx["paused"] = False
1654
+ if (
1655
+ ctx.get("awaiting_approval")
1656
+ and not ctx.get("approval_credentials_missing")
1657
+ and credentials_ready
1658
+ ):
1659
+ if request.GET.get("approve"):
1660
+ ctx["release_approval"] = "approved"
1661
+ if request.GET.get("reject"):
1662
+ ctx["release_approval"] = "rejected"
1663
+ if request.GET.get("pause") and ctx.get("started"):
1664
+ ctx["paused"] = True
1665
+ restart_count = 0
1666
+ if restart_path.exists():
1667
+ try:
1668
+ restart_count = int(restart_path.read_text(encoding="utf-8"))
1669
+ except Exception:
1670
+ restart_count = 0
1671
+ step_count = ctx.get("step", 0)
1672
+ step_param = request.GET.get("step")
1673
+
1674
+ pending_qs = Todo.objects.filter(is_deleted=False, done_on__isnull=True)
1675
+ pending_items = list(pending_qs)
1676
+ if ack_todos_requested:
1677
+ if pending_items:
1678
+ failures = []
1679
+ for todo in pending_items:
1680
+ result = todo.check_on_done_condition()
1681
+ if not result.passed:
1682
+ failures.append((todo, result))
1683
+ if failures:
1684
+ ctx.pop("todos_ack", None)
1685
+ for todo, result in failures:
1686
+ messages.error(request, _format_condition_failure(todo, result))
1687
+ else:
1688
+ ctx["todos_ack"] = True
1689
+ else:
1690
+ ctx["todos_ack"] = True
1691
+
1692
+ if pending_items and not ctx.get("todos_ack"):
1693
+ ctx["todos"] = [
1694
+ {
1695
+ "id": todo.pk,
1696
+ "request": todo.request,
1697
+ "url": todo.url,
1698
+ "request_details": todo.request_details,
1699
+ }
1700
+ for todo in pending_items
1701
+ ]
1702
+ else:
1703
+ ctx.pop("todos", None)
1704
+
1705
+ log_name = _release_log_name(release.package.name, release.version)
1706
+ if ctx.get("log") != log_name:
1707
+ ctx = {
1708
+ "step": 0,
1709
+ "log": log_name,
1710
+ "started": ctx.get("started", False),
1711
+ }
1712
+ step_count = 0
1713
+ log_path = log_dir / log_name
1714
+ ctx.setdefault("log", log_name)
1715
+ ctx.setdefault("paused", False)
1716
+ ctx.setdefault("dirty_commit_message", DIRTY_COMMIT_DEFAULT_MESSAGE)
1717
+
1718
+ dirty_action = request.GET.get("dirty_action")
1719
+ if dirty_action and ctx.get("dirty_files"):
1720
+ if dirty_action == "discard":
1721
+ _clean_repo()
1722
+ remaining = _collect_dirty_files()
1723
+ if remaining:
1724
+ ctx["dirty_files"] = remaining
1725
+ ctx.pop("dirty_commit_error", None)
1726
+ else:
1727
+ ctx.pop("dirty_files", None)
1728
+ ctx.pop("dirty_commit_error", None)
1729
+ ctx.pop("dirty_log_message", None)
1730
+ _append_log(log_path, "Discarded local changes before publish")
1731
+ elif dirty_action == "commit":
1732
+ message = request.GET.get("dirty_message", "").strip()
1733
+ if not message:
1734
+ message = ctx.get("dirty_commit_message") or DIRTY_COMMIT_DEFAULT_MESSAGE
1735
+ ctx["dirty_commit_message"] = message
1736
+ try:
1737
+ subprocess.run(["git", "add", "--all"], check=True)
1738
+ subprocess.run(["git", "commit", "-m", message], check=True)
1739
+ except subprocess.CalledProcessError as exc:
1740
+ ctx["dirty_commit_error"] = _format_subprocess_error(exc)
1741
+ else:
1742
+ ctx.pop("dirty_commit_error", None)
1743
+ remaining = _collect_dirty_files()
1744
+ if remaining:
1745
+ ctx["dirty_files"] = remaining
1746
+ else:
1747
+ ctx.pop("dirty_files", None)
1748
+ ctx.pop("dirty_log_message", None)
1749
+ _append_log(
1750
+ log_path,
1751
+ _("Committed pending changes: %(message)s")
1752
+ % {"message": message},
1753
+ )
1754
+
1755
+ if (
1756
+ ctx.get("started")
1757
+ and step_count == 0
1758
+ and (step_param is None or step_param == "0")
1759
+ ):
1760
+ if log_path.exists():
1761
+ log_path.unlink()
1762
+ ctx.pop("log_dir_warning_logged", None)
1763
+
1764
+ if log_dir_warning_message and not ctx.get("log_dir_warning_logged"):
1765
+ _append_log(log_path, log_dir_warning_message)
1766
+ ctx["log_dir_warning_logged"] = True
1767
+
1768
+ fixtures_step_index = next(
1769
+ (
1770
+ index
1771
+ for index, (name, _) in enumerate(steps)
1772
+ if name == FIXTURE_REVIEW_STEP_NAME
1773
+ ),
1774
+ None,
1775
+ )
1776
+ error = ctx.get("error")
1777
+
1778
+ if (
1779
+ ctx.get("started")
1780
+ and not ctx.get("paused")
1781
+ and step_param is not None
1782
+ and not error
1783
+ and step_count < len(steps)
1784
+ ):
1785
+ to_run = int(step_param)
1786
+ if to_run == step_count:
1787
+ name, func = steps[to_run]
1788
+ try:
1789
+ func(release, ctx, log_path)
1790
+ except PendingTodos:
1791
+ pass
1792
+ except ApprovalRequired:
1793
+ pass
1794
+ except DirtyRepository:
1795
+ pass
1796
+ except Exception as exc: # pragma: no cover - best effort logging
1797
+ _append_log(log_path, f"{name} failed: {exc}")
1798
+ ctx["error"] = str(exc)
1799
+ request.session[session_key] = ctx
1800
+ lock_path.parent.mkdir(parents=True, exist_ok=True)
1801
+ lock_path.write_text(json.dumps(ctx), encoding="utf-8")
1802
+ else:
1803
+ step_count += 1
1804
+ ctx["step"] = step_count
1805
+ request.session[session_key] = ctx
1806
+ lock_path.parent.mkdir(parents=True, exist_ok=True)
1807
+ lock_path.write_text(json.dumps(ctx), encoding="utf-8")
1808
+
1809
+ done = step_count >= len(steps) and not ctx.get("error")
1810
+
1811
+ show_log = ctx.get("started") or step_count > 0 or done or ctx.get("error")
1812
+ if show_log and log_path.exists():
1813
+ log_content = log_path.read_text(encoding="utf-8")
1814
+ else:
1815
+ log_content = ""
1816
+ next_step = (
1817
+ step_count
1818
+ if ctx.get("started")
1819
+ and not ctx.get("paused")
1820
+ and not done
1821
+ and not ctx.get("error")
1822
+ else None
1823
+ )
1824
+ has_pending_todos = bool(ctx.get("todos") and not ctx.get("todos_ack"))
1825
+ if has_pending_todos:
1826
+ next_step = None
1827
+ dirty_files = ctx.get("dirty_files")
1828
+ if dirty_files:
1829
+ next_step = None
1830
+ awaiting_approval = bool(ctx.get("awaiting_approval"))
1831
+ approval_credentials_missing = bool(ctx.get("approval_credentials_missing"))
1832
+ if awaiting_approval:
1833
+ next_step = None
1834
+ if approval_credentials_missing:
1835
+ next_step = None
1836
+ paused = ctx.get("paused", False)
1837
+
1838
+ step_names = [s[0] for s in steps]
1839
+ approval_credentials_ready = credentials_ready
1840
+ credentials_blocking = approval_credentials_missing or (
1841
+ awaiting_approval and not approval_credentials_ready
1842
+ )
1843
+ step_states = []
1844
+ for index, name in enumerate(step_names):
1845
+ if index < step_count:
1846
+ status = "complete"
1847
+ icon = "✅"
1848
+ label = _("Completed")
1849
+ elif error and index == step_count:
1850
+ status = "error"
1851
+ icon = "❌"
1852
+ label = _("Failed")
1853
+ elif paused and ctx.get("started") and index == step_count and not done:
1854
+ status = "paused"
1855
+ icon = "⏸️"
1856
+ label = _("Paused")
1857
+ elif (
1858
+ has_pending_todos
1859
+ and ctx.get("started")
1860
+ and index == step_count
1861
+ and not done
1862
+ ):
1863
+ status = "blocked"
1864
+ icon = "📝"
1865
+ label = _("Awaiting checklist")
1866
+ elif (
1867
+ credentials_blocking
1868
+ and ctx.get("started")
1869
+ and index == step_count
1870
+ and not done
1871
+ ):
1872
+ status = "missing-credentials"
1873
+ icon = "🔐"
1874
+ label = _("Credentials required")
1875
+ elif (
1876
+ awaiting_approval
1877
+ and approval_credentials_ready
1878
+ and ctx.get("started")
1879
+ and index == step_count
1880
+ and not done
1881
+ ):
1882
+ status = "awaiting-approval"
1883
+ icon = "🤝"
1884
+ label = _("Awaiting approval")
1885
+ elif ctx.get("started") and index == step_count and not done:
1886
+ status = "active"
1887
+ icon = "⏳"
1888
+ label = _("In progress")
1889
+ else:
1890
+ status = "pending"
1891
+ icon = "⬜"
1892
+ label = _("Pending")
1893
+ step_states.append(
1894
+ {
1895
+ "index": index + 1,
1896
+ "name": name,
1897
+ "status": status,
1898
+ "icon": icon,
1899
+ "label": label,
1900
+ }
1901
+ )
1902
+
1903
+ is_running = ctx.get("started") and not paused and not done and not ctx.get("error")
1904
+ can_resume = ctx.get("started") and paused and not done and not ctx.get("error")
1905
+ release_manager_owner = manager.owner_display() if manager else ""
1906
+ try:
1907
+ current_user_admin_url = reverse(
1908
+ "admin:teams_user_change", args=[request.user.pk]
1909
+ )
1910
+ except NoReverseMatch:
1911
+ current_user_admin_url = reverse(
1912
+ "admin:core_user_change", args=[request.user.pk]
1913
+ )
1914
+
1915
+ fixtures_summary = ctx.get("fixtures")
1916
+ if (
1917
+ fixtures_summary
1918
+ and fixtures_step_index is not None
1919
+ and step_count > fixtures_step_index
1920
+ ):
1921
+ fixtures_summary = None
1922
+
1923
+ todos_display = ctx.get("todos") if has_pending_todos else None
1924
+
1925
+ dry_run_active = bool(ctx.get("dry_run"))
1926
+ dry_run_toggle_enabled = not is_running and not done and not ctx.get("error")
1927
+
1928
+ context = {
1929
+ "release": release,
1930
+ "action": "publish",
1931
+ "steps": step_names,
1932
+ "current_step": step_count,
1933
+ "next_step": next_step,
1934
+ "done": done,
1935
+ "error": ctx.get("error"),
1936
+ "log_content": log_content,
1937
+ "log_path": str(log_path),
1938
+ "cert_log": ctx.get("cert_log"),
1939
+ "fixtures": fixtures_summary,
1940
+ "todos": todos_display,
1941
+ "dirty_files": dirty_files,
1942
+ "dirty_commit_message": ctx.get("dirty_commit_message", DIRTY_COMMIT_DEFAULT_MESSAGE),
1943
+ "dirty_commit_error": ctx.get("dirty_commit_error"),
1944
+ "restart_count": restart_count,
1945
+ "started": ctx.get("started", False),
1946
+ "paused": paused,
1947
+ "show_log": show_log,
1948
+ "step_states": step_states,
1949
+ "has_pending_todos": has_pending_todos,
1950
+ "awaiting_approval": awaiting_approval,
1951
+ "approval_credentials_missing": approval_credentials_missing,
1952
+ "approval_credentials_ready": approval_credentials_ready,
1953
+ "release_manager_owner": release_manager_owner,
1954
+ "has_release_manager": bool(manager),
1955
+ "current_user_admin_url": current_user_admin_url,
1956
+ "is_running": is_running,
1957
+ "can_resume": can_resume,
1958
+ "dry_run": dry_run_active,
1959
+ "dry_run_toggle_enabled": dry_run_toggle_enabled,
1960
+ }
1961
+ request.session[session_key] = ctx
1962
+ if done or ctx.get("error"):
1963
+ if lock_path.exists():
1964
+ lock_path.unlink()
1965
+ else:
1966
+ lock_path.parent.mkdir(parents=True, exist_ok=True)
1967
+ lock_path.write_text(json.dumps(ctx), encoding="utf-8")
1968
+ template = get_template("core/release_progress.html")
1969
+ content = template.render(context, request)
1970
+ signals.template_rendered.send(
1971
+ sender=template.__class__,
1972
+ template=template,
1973
+ context=context,
1974
+ using=getattr(getattr(template, "engine", None), "name", None),
1975
+ )
1976
+ response = HttpResponse(content)
1977
+ response.context = context
1978
+ response.templates = [template]
1979
+ return response
1980
+
1981
+
1982
+ def _dedupe_preserve_order(values):
1983
+ seen = set()
1984
+ result = []
1985
+ for value in values:
1986
+ if value in seen:
1987
+ continue
1988
+ seen.add(value)
1989
+ result.append(value)
1990
+ return result
1991
+
1992
+
1993
+ def _parse_todo_auth_directives(query: str):
1994
+ directives = {
1995
+ "require_logout": False,
1996
+ "users": [],
1997
+ "permissions": [],
1998
+ "notes": [],
1999
+ }
2000
+ if not query:
2001
+ return "", directives
2002
+
2003
+ remaining = []
2004
+ for key, value in parse_qsl(query, keep_blank_values=True):
2005
+ if key != "_todo_auth":
2006
+ remaining.append((key, value))
2007
+ continue
2008
+ token = (value or "").strip()
2009
+ if not token:
2010
+ continue
2011
+ kind, _, payload = token.partition(":")
2012
+ kind = kind.strip().lower()
2013
+ payload = payload.strip()
2014
+ if kind in {"logout", "anonymous", "anon"}:
2015
+ directives["require_logout"] = True
2016
+ elif kind in {"user", "username"} and payload:
2017
+ directives["users"].append(payload)
2018
+ elif kind in {"perm", "permission"} and payload:
2019
+ directives["permissions"].append(payload)
2020
+ else:
2021
+ directives["notes"].append(token)
2022
+
2023
+ sanitized_query = urlencode(remaining, doseq=True)
2024
+ return sanitized_query, directives
2025
+
2026
+
2027
+ def _todo_iframe_url(request, todo: Todo):
2028
+ """Return a safe iframe URL and auth context for ``todo``."""
2029
+
2030
+ fallback = reverse("admin:core_todo_change", args=[todo.pk])
2031
+ raw_url = (todo.url or "").strip()
2032
+
2033
+ auth_context = {
2034
+ "require_logout": False,
2035
+ "users": [],
2036
+ "permissions": [],
2037
+ "notes": [],
2038
+ }
2039
+
2040
+ def _final_context(target_url: str):
2041
+ return {
2042
+ "target_url": target_url or fallback,
2043
+ "require_logout": auth_context["require_logout"],
2044
+ "users": _dedupe_preserve_order(auth_context["users"]),
2045
+ "permissions": _dedupe_preserve_order(auth_context["permissions"]),
2046
+ "notes": _dedupe_preserve_order(auth_context["notes"]),
2047
+ "has_requirements": bool(
2048
+ auth_context["require_logout"]
2049
+ or auth_context["users"]
2050
+ or auth_context["permissions"]
2051
+ or auth_context["notes"]
2052
+ ),
2053
+ }
2054
+
2055
+ if not raw_url:
2056
+ return fallback, _final_context(fallback)
2057
+
2058
+ focus_path = reverse("todo-focus", args=[todo.pk])
2059
+ focus_norm = focus_path.strip("/").lower()
2060
+
2061
+ def _is_focus_target(target: str) -> bool:
2062
+ if not target:
2063
+ return False
2064
+ parsed_target = urlsplit(target)
2065
+ path = parsed_target.path
2066
+ if not path and not parsed_target.scheme and not parsed_target.netloc:
2067
+ path = target.split("?", 1)[0].split("#", 1)[0]
2068
+ normalized = path.strip("/").lower()
2069
+ return normalized == focus_norm if normalized else False
2070
+
2071
+ if _is_focus_target(raw_url):
2072
+ return fallback, _final_context(fallback)
2073
+
2074
+ parsed = urlsplit(raw_url)
2075
+
2076
+ def _merge_directives(parsed_result):
2077
+ sanitized_query, directives = _parse_todo_auth_directives(parsed_result.query)
2078
+ if directives["require_logout"]:
2079
+ auth_context["require_logout"] = True
2080
+ auth_context["users"].extend(directives["users"])
2081
+ auth_context["permissions"].extend(directives["permissions"])
2082
+ auth_context["notes"].extend(directives["notes"])
2083
+ return parsed_result._replace(query=sanitized_query)
2084
+
2085
+ if not parsed.scheme and not parsed.netloc:
2086
+ sanitized = _merge_directives(parsed)
2087
+ path = sanitized.path or "/"
2088
+ if not path.startswith("/"):
2089
+ path = f"/{path}"
2090
+ relative_url = urlunsplit(("", "", path, sanitized.query, sanitized.fragment))
2091
+ if _is_focus_target(relative_url):
2092
+ return fallback, _final_context(fallback)
2093
+ return relative_url or fallback, _final_context(relative_url)
2094
+
2095
+ if parsed.scheme and parsed.scheme.lower() not in {"http", "https"}:
2096
+ return fallback, _final_context(fallback)
2097
+
2098
+ request_host = request.get_host().strip().lower()
2099
+ host_without_port = request_host.split(":", 1)[0]
2100
+ allowed_hosts = {
2101
+ request_host,
2102
+ host_without_port,
2103
+ "localhost",
2104
+ "127.0.0.1",
2105
+ "0.0.0.0",
2106
+ "::1",
2107
+ }
2108
+
2109
+ site_domain = ""
2110
+ try:
2111
+ site_domain = Site.objects.get_current().domain.strip().lower()
2112
+ except Site.DoesNotExist:
2113
+ site_domain = ""
2114
+ if site_domain:
2115
+ allowed_hosts.add(site_domain)
2116
+ allowed_hosts.add(site_domain.split(":", 1)[0])
2117
+
2118
+ for host in getattr(settings, "ALLOWED_HOSTS", []):
2119
+ if not isinstance(host, str):
2120
+ continue
2121
+ normalized = host.strip().lower()
2122
+ if not normalized or normalized.startswith("*"):
2123
+ continue
2124
+ allowed_hosts.add(normalized)
2125
+ allowed_hosts.add(normalized.split(":", 1)[0])
2126
+
2127
+ hostname = (parsed.hostname or "").strip().lower()
2128
+ netloc = parsed.netloc.strip().lower()
2129
+ if hostname in allowed_hosts or netloc in allowed_hosts:
2130
+ sanitized = _merge_directives(parsed)
2131
+ path = sanitized.path or "/"
2132
+ if not path.startswith("/"):
2133
+ path = f"/{path}"
2134
+ relative_url = urlunsplit(("", "", path, sanitized.query, sanitized.fragment))
2135
+ if _is_focus_target(relative_url):
2136
+ return fallback, _final_context(fallback)
2137
+ return relative_url or fallback, _final_context(relative_url)
2138
+
2139
+ return fallback, _final_context(fallback)
2140
+
2141
+
2142
+ @staff_member_required
2143
+ def todo_focus(request, pk: int):
2144
+ todo = get_object_or_404(Todo, pk=pk, is_deleted=False)
2145
+ if todo.done_on:
2146
+ return redirect(_get_return_url(request))
2147
+
2148
+ iframe_url, focus_auth = _todo_iframe_url(request, todo)
2149
+ focus_target_url = focus_auth.get("target_url", iframe_url) if focus_auth else iframe_url
2150
+ context = {
2151
+ "todo": todo,
2152
+ "iframe_url": iframe_url,
2153
+ "focus_target_url": focus_target_url,
2154
+ "focus_auth": focus_auth,
2155
+ "next_url": _get_return_url(request),
2156
+ "done_url": reverse("todo-done", args=[todo.pk]),
2157
+ }
2158
+ return render(request, "core/todo_focus.html", context)
2159
+
2160
+
2161
+ @staff_member_required
2162
+ @require_POST
2163
+ def todo_done(request, pk: int):
2164
+ redirect_to = _get_return_url(request)
2165
+ try:
2166
+ todo = Todo.objects.get(pk=pk, is_deleted=False, done_on__isnull=True)
2167
+ except Todo.DoesNotExist:
2168
+ return redirect(redirect_to)
2169
+ result = todo.check_on_done_condition()
2170
+ if not result.passed:
2171
+ messages.error(request, _format_condition_failure(todo, result))
2172
+ return redirect(redirect_to)
2173
+ todo.done_on = timezone.now()
2174
+ todo.save(update_fields=["done_on"])
2175
+ return redirect(redirect_to)