arthexis 0.1.16__py3-none-any.whl → 0.1.26__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arthexis might be problematic. Click here for more details.
- {arthexis-0.1.16.dist-info → arthexis-0.1.26.dist-info}/METADATA +84 -35
- arthexis-0.1.26.dist-info/RECORD +111 -0
- config/asgi.py +1 -15
- config/middleware.py +47 -1
- config/settings.py +15 -30
- config/urls.py +53 -1
- core/admin.py +540 -450
- core/apps.py +0 -6
- core/auto_upgrade.py +19 -4
- core/backends.py +13 -3
- core/changelog.py +66 -5
- core/environment.py +4 -5
- core/models.py +1566 -203
- core/notifications.py +1 -1
- core/reference_utils.py +10 -11
- core/release.py +55 -7
- core/sigil_builder.py +2 -2
- core/sigil_resolver.py +1 -66
- core/system.py +268 -2
- core/tasks.py +174 -48
- core/tests.py +314 -16
- core/user_data.py +42 -2
- core/views.py +278 -183
- nodes/admin.py +557 -65
- nodes/apps.py +11 -0
- nodes/models.py +658 -113
- nodes/rfid_sync.py +1 -1
- nodes/tasks.py +97 -2
- nodes/tests.py +1212 -116
- nodes/urls.py +15 -1
- nodes/utils.py +51 -3
- nodes/views.py +1239 -154
- ocpp/admin.py +979 -152
- ocpp/consumers.py +268 -28
- ocpp/models.py +488 -3
- ocpp/network.py +398 -0
- ocpp/store.py +6 -4
- ocpp/tasks.py +296 -2
- ocpp/test_export_import.py +1 -0
- ocpp/test_rfid.py +121 -4
- ocpp/tests.py +950 -11
- ocpp/transactions_io.py +9 -1
- ocpp/urls.py +3 -3
- ocpp/views.py +596 -51
- pages/admin.py +262 -30
- pages/apps.py +35 -0
- pages/context_processors.py +26 -21
- pages/defaults.py +1 -1
- pages/forms.py +31 -8
- pages/middleware.py +6 -2
- pages/models.py +77 -2
- pages/module_defaults.py +5 -5
- pages/site_config.py +137 -0
- pages/tests.py +885 -109
- pages/urls.py +13 -2
- pages/utils.py +70 -0
- pages/views.py +558 -55
- arthexis-0.1.16.dist-info/RECORD +0 -111
- core/workgroup_urls.py +0 -17
- core/workgroup_views.py +0 -94
- {arthexis-0.1.16.dist-info → arthexis-0.1.26.dist-info}/WHEEL +0 -0
- {arthexis-0.1.16.dist-info → arthexis-0.1.26.dist-info}/licenses/LICENSE +0 -0
- {arthexis-0.1.16.dist-info → arthexis-0.1.26.dist-info}/top_level.txt +0 -0
core/views.py
CHANGED
|
@@ -19,7 +19,6 @@ from django.shortcuts import get_object_or_404, redirect, render, resolve_url
|
|
|
19
19
|
from django.template.response import TemplateResponse
|
|
20
20
|
from django.utils import timezone
|
|
21
21
|
from django.utils.html import strip_tags
|
|
22
|
-
from django.utils.text import slugify
|
|
23
22
|
from django.utils.translation import gettext as _
|
|
24
23
|
from django.urls import NoReverseMatch, reverse
|
|
25
24
|
from django.views.decorators.csrf import csrf_exempt
|
|
@@ -43,6 +42,7 @@ logger = logging.getLogger(__name__)
|
|
|
43
42
|
PYPI_REQUEST_TIMEOUT = 10
|
|
44
43
|
|
|
45
44
|
from . import changelog as changelog_utils
|
|
45
|
+
from . import temp_passwords
|
|
46
46
|
from .models import OdooProfile, Product, EnergyAccount, PackageRelease, Todo
|
|
47
47
|
from .models import RFID
|
|
48
48
|
|
|
@@ -58,7 +58,6 @@ def odoo_products(request):
|
|
|
58
58
|
products = profile.execute(
|
|
59
59
|
"product.product",
|
|
60
60
|
"search_read",
|
|
61
|
-
[[]],
|
|
62
61
|
fields=["name"],
|
|
63
62
|
limit=50,
|
|
64
63
|
)
|
|
@@ -137,7 +136,6 @@ def odoo_quote_report(request):
|
|
|
137
136
|
templates = profile.execute(
|
|
138
137
|
"sale.order.template",
|
|
139
138
|
"search_read",
|
|
140
|
-
[[]],
|
|
141
139
|
fields=["name"],
|
|
142
140
|
order="name asc",
|
|
143
141
|
)
|
|
@@ -287,7 +285,6 @@ def odoo_quote_report(request):
|
|
|
287
285
|
products = profile.execute(
|
|
288
286
|
"product.product",
|
|
289
287
|
"search_read",
|
|
290
|
-
[[]],
|
|
291
288
|
fields=["name", "default_code", "write_date", "create_date"],
|
|
292
289
|
limit=10,
|
|
293
290
|
order="write_date desc, create_date desc",
|
|
@@ -336,6 +333,36 @@ def odoo_quote_report(request):
|
|
|
336
333
|
return TemplateResponse(request, "admin/core/odoo_quote_report.html", context)
|
|
337
334
|
|
|
338
335
|
|
|
336
|
+
@staff_member_required
|
|
337
|
+
@require_GET
|
|
338
|
+
def request_temp_password(request):
|
|
339
|
+
"""Generate a temporary password for the authenticated staff member."""
|
|
340
|
+
|
|
341
|
+
user = request.user
|
|
342
|
+
username = user.get_username()
|
|
343
|
+
password = temp_passwords.generate_password()
|
|
344
|
+
entry = temp_passwords.store_temp_password(
|
|
345
|
+
username,
|
|
346
|
+
password,
|
|
347
|
+
allow_change=True,
|
|
348
|
+
)
|
|
349
|
+
context = {
|
|
350
|
+
**admin_site.each_context(request),
|
|
351
|
+
"title": _("Temporary password"),
|
|
352
|
+
"username": username,
|
|
353
|
+
"password": password,
|
|
354
|
+
"expires_at": timezone.localtime(entry.expires_at),
|
|
355
|
+
"allow_change": entry.allow_change,
|
|
356
|
+
"return_url": reverse("admin:password_change"),
|
|
357
|
+
}
|
|
358
|
+
return TemplateResponse(
|
|
359
|
+
request,
|
|
360
|
+
"admin/core/request_temp_password.html",
|
|
361
|
+
context,
|
|
362
|
+
)
|
|
363
|
+
|
|
364
|
+
|
|
365
|
+
@staff_member_required
|
|
339
366
|
@require_GET
|
|
340
367
|
def version_info(request):
|
|
341
368
|
"""Return the running application version and Git revision."""
|
|
@@ -418,8 +445,11 @@ def _resolve_release_log_dir(preferred: Path) -> tuple[Path, str | None]:
|
|
|
418
445
|
|
|
419
446
|
env_override = os.environ.pop("ARTHEXIS_LOG_DIR", None)
|
|
420
447
|
fallback = select_log_dir(Path(settings.BASE_DIR))
|
|
421
|
-
if env_override
|
|
422
|
-
|
|
448
|
+
if env_override is not None:
|
|
449
|
+
if Path(env_override) == fallback:
|
|
450
|
+
os.environ["ARTHEXIS_LOG_DIR"] = env_override
|
|
451
|
+
else:
|
|
452
|
+
os.environ["ARTHEXIS_LOG_DIR"] = str(fallback)
|
|
423
453
|
|
|
424
454
|
if fallback == preferred:
|
|
425
455
|
if error:
|
|
@@ -463,6 +493,16 @@ def _sync_with_origin_main(log_path: Path) -> None:
|
|
|
463
493
|
if stderr:
|
|
464
494
|
_append_log(log_path, "git errors:\n" + stderr)
|
|
465
495
|
|
|
496
|
+
status = subprocess.run(
|
|
497
|
+
["git", "status"], capture_output=True, text=True, check=False
|
|
498
|
+
)
|
|
499
|
+
status_output = (status.stdout or "").strip()
|
|
500
|
+
status_errors = (status.stderr or "").strip()
|
|
501
|
+
if status_output:
|
|
502
|
+
_append_log(log_path, "git status:\n" + status_output)
|
|
503
|
+
if status_errors:
|
|
504
|
+
_append_log(log_path, "git status errors:\n" + status_errors)
|
|
505
|
+
|
|
466
506
|
branch = _current_branch() or "(detached HEAD)"
|
|
467
507
|
instructions = [
|
|
468
508
|
"Manual intervention required to finish syncing with origin/main.",
|
|
@@ -578,6 +618,43 @@ def _git_authentication_missing(exc: subprocess.CalledProcessError) -> bool:
|
|
|
578
618
|
return any(marker in message for marker in auth_markers)
|
|
579
619
|
|
|
580
620
|
|
|
621
|
+
def _push_release_changes(log_path: Path) -> bool:
|
|
622
|
+
"""Push release commits to ``origin`` and log the outcome."""
|
|
623
|
+
|
|
624
|
+
if not _has_remote("origin"):
|
|
625
|
+
_append_log(
|
|
626
|
+
log_path, "No git remote configured; skipping push of release changes"
|
|
627
|
+
)
|
|
628
|
+
return False
|
|
629
|
+
|
|
630
|
+
try:
|
|
631
|
+
branch = _current_branch()
|
|
632
|
+
if branch is None:
|
|
633
|
+
push_cmd = ["git", "push", "origin", "HEAD"]
|
|
634
|
+
elif _has_upstream(branch):
|
|
635
|
+
push_cmd = ["git", "push"]
|
|
636
|
+
else:
|
|
637
|
+
push_cmd = ["git", "push", "--set-upstream", "origin", branch]
|
|
638
|
+
subprocess.run(push_cmd, check=True, capture_output=True, text=True)
|
|
639
|
+
except subprocess.CalledProcessError as exc:
|
|
640
|
+
details = _format_subprocess_error(exc)
|
|
641
|
+
if _git_authentication_missing(exc):
|
|
642
|
+
_append_log(
|
|
643
|
+
log_path,
|
|
644
|
+
"Authentication is required to push release changes to origin; skipping push",
|
|
645
|
+
)
|
|
646
|
+
if details:
|
|
647
|
+
_append_log(log_path, details)
|
|
648
|
+
return False
|
|
649
|
+
_append_log(
|
|
650
|
+
log_path, f"Failed to push release changes to origin: {details}"
|
|
651
|
+
)
|
|
652
|
+
raise Exception("Failed to push release changes") from exc
|
|
653
|
+
|
|
654
|
+
_append_log(log_path, "Pushed release changes to origin")
|
|
655
|
+
return True
|
|
656
|
+
|
|
657
|
+
|
|
581
658
|
def _ensure_origin_main_unchanged(log_path: Path) -> None:
|
|
582
659
|
"""Verify that ``origin/main`` has not advanced during the release."""
|
|
583
660
|
|
|
@@ -610,42 +687,20 @@ def _ensure_origin_main_unchanged(log_path: Path) -> None:
|
|
|
610
687
|
def _next_patch_version(version: str) -> str:
|
|
611
688
|
from packaging.version import InvalidVersion, Version
|
|
612
689
|
|
|
690
|
+
cleaned = version.rstrip("+")
|
|
613
691
|
try:
|
|
614
|
-
parsed = Version(
|
|
692
|
+
parsed = Version(cleaned)
|
|
615
693
|
except InvalidVersion:
|
|
616
|
-
parts =
|
|
694
|
+
parts = cleaned.split(".") if cleaned else []
|
|
617
695
|
for index in range(len(parts) - 1, -1, -1):
|
|
618
696
|
segment = parts[index]
|
|
619
697
|
if segment.isdigit():
|
|
620
698
|
parts[index] = str(int(segment) + 1)
|
|
621
699
|
return ".".join(parts)
|
|
622
|
-
return version
|
|
700
|
+
return cleaned or version
|
|
623
701
|
return f"{parsed.major}.{parsed.minor}.{parsed.micro + 1}"
|
|
624
702
|
|
|
625
703
|
|
|
626
|
-
def _write_todo_fixture(todo: Todo) -> Path:
|
|
627
|
-
safe_request = todo.request.replace(".", " ")
|
|
628
|
-
slug = slugify(safe_request).replace("-", "_")
|
|
629
|
-
if not slug:
|
|
630
|
-
slug = "todo"
|
|
631
|
-
path = TODO_FIXTURE_DIR / f"todos__{slug}.json"
|
|
632
|
-
path.parent.mkdir(parents=True, exist_ok=True)
|
|
633
|
-
data = [
|
|
634
|
-
{
|
|
635
|
-
"model": "core.todo",
|
|
636
|
-
"fields": {
|
|
637
|
-
"request": todo.request,
|
|
638
|
-
"url": todo.url,
|
|
639
|
-
"request_details": todo.request_details,
|
|
640
|
-
"generated_for_version": todo.generated_for_version,
|
|
641
|
-
"generated_for_revision": todo.generated_for_revision,
|
|
642
|
-
},
|
|
643
|
-
}
|
|
644
|
-
]
|
|
645
|
-
path.write_text(json.dumps(data, indent=2) + "\n", encoding="utf-8")
|
|
646
|
-
return path
|
|
647
|
-
|
|
648
|
-
|
|
649
704
|
def _should_use_python_changelog(exc: OSError) -> bool:
|
|
650
705
|
winerror = getattr(exc, "winerror", None)
|
|
651
706
|
if winerror in {193}:
|
|
@@ -656,8 +711,8 @@ def _should_use_python_changelog(exc: OSError) -> bool:
|
|
|
656
711
|
def _generate_changelog_with_python(log_path: Path) -> None:
|
|
657
712
|
_append_log(log_path, "Falling back to Python changelog generator")
|
|
658
713
|
changelog_path = Path("CHANGELOG.rst")
|
|
659
|
-
range_spec = changelog_utils.determine_range_spec()
|
|
660
714
|
previous = changelog_path.read_text(encoding="utf-8") if changelog_path.exists() else None
|
|
715
|
+
range_spec = changelog_utils.determine_range_spec(previous_text=previous)
|
|
661
716
|
sections = changelog_utils.collect_sections(range_spec=range_spec, previous_text=previous)
|
|
662
717
|
content = changelog_utils.render_changelog(sections)
|
|
663
718
|
if not content.endswith("\n"):
|
|
@@ -666,44 +721,32 @@ def _generate_changelog_with_python(log_path: Path) -> None:
|
|
|
666
721
|
_append_log(log_path, "Regenerated CHANGELOG.rst using Python fallback")
|
|
667
722
|
|
|
668
723
|
|
|
669
|
-
def
|
|
670
|
-
|
|
671
|
-
) -> tuple[Todo, Path]:
|
|
672
|
-
previous_version = (previous_version or "").strip()
|
|
673
|
-
target_version = _next_patch_version(release.version)
|
|
674
|
-
if previous_version:
|
|
675
|
-
try:
|
|
676
|
-
from packaging.version import InvalidVersion, Version
|
|
724
|
+
def _todo_blocks_publish(todo: Todo, release: PackageRelease) -> bool:
|
|
725
|
+
"""Return ``True`` when ``todo`` should block the release workflow."""
|
|
677
726
|
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
"done_on": None,
|
|
702
|
-
"on_done_condition": "",
|
|
703
|
-
},
|
|
704
|
-
)
|
|
705
|
-
fixture_path = _write_todo_fixture(todo)
|
|
706
|
-
return todo, fixture_path
|
|
727
|
+
request = (todo.request or "").strip()
|
|
728
|
+
release_name = (release.package.name or "").strip()
|
|
729
|
+
if not request or not release_name:
|
|
730
|
+
return True
|
|
731
|
+
|
|
732
|
+
prefix = f"create release {release_name.lower()} "
|
|
733
|
+
if not request.lower().startswith(prefix):
|
|
734
|
+
return True
|
|
735
|
+
|
|
736
|
+
release_version = (release.version or "").strip()
|
|
737
|
+
generated_version = (todo.generated_for_version or "").strip()
|
|
738
|
+
if not release_version or release_version != generated_version:
|
|
739
|
+
return True
|
|
740
|
+
|
|
741
|
+
generated_revision = (todo.generated_for_revision or "").strip()
|
|
742
|
+
release_revision = (release.revision or "").strip()
|
|
743
|
+
if generated_revision and release_revision and generated_revision != release_revision:
|
|
744
|
+
return True
|
|
745
|
+
|
|
746
|
+
if not todo.is_seed_data:
|
|
747
|
+
return True
|
|
748
|
+
|
|
749
|
+
return False
|
|
707
750
|
|
|
708
751
|
|
|
709
752
|
def _sync_release_with_revision(release: PackageRelease) -> tuple[bool, str]:
|
|
@@ -723,7 +766,9 @@ def _sync_release_with_revision(release: PackageRelease) -> tuple[bool, str]:
|
|
|
723
766
|
version_path = Path("VERSION")
|
|
724
767
|
if version_path.exists():
|
|
725
768
|
try:
|
|
726
|
-
|
|
769
|
+
raw_version = version_path.read_text(encoding="utf-8").strip()
|
|
770
|
+
cleaned_version = raw_version.rstrip("+") or "0.0.0"
|
|
771
|
+
repo_version = Version(cleaned_version)
|
|
727
772
|
except InvalidVersion:
|
|
728
773
|
repo_version = None
|
|
729
774
|
|
|
@@ -890,14 +935,25 @@ def _refresh_changelog_once(ctx, log_path: Path) -> None:
|
|
|
890
935
|
ctx["changelog_refreshed"] = True
|
|
891
936
|
|
|
892
937
|
|
|
893
|
-
def _step_check_todos(release, ctx, log_path: Path) -> None:
|
|
938
|
+
def _step_check_todos(release, ctx, log_path: Path, *, user=None) -> None:
|
|
894
939
|
_refresh_changelog_once(ctx, log_path)
|
|
895
940
|
|
|
896
941
|
pending_qs = Todo.objects.filter(is_deleted=False, done_on__isnull=True)
|
|
897
942
|
pending_values = list(
|
|
898
943
|
pending_qs.values("id", "request", "url", "request_details")
|
|
899
944
|
)
|
|
945
|
+
if not pending_values:
|
|
946
|
+
ctx["todos_ack"] = True
|
|
947
|
+
|
|
900
948
|
if not ctx.get("todos_ack"):
|
|
949
|
+
if not ctx.get("todos_block_logged"):
|
|
950
|
+
_append_log(
|
|
951
|
+
log_path,
|
|
952
|
+
"Release checklist requires acknowledgment before continuing. "
|
|
953
|
+
"Review outstanding TODO items and confirm the checklist; "
|
|
954
|
+
"publishing will resume automatically afterward.",
|
|
955
|
+
)
|
|
956
|
+
ctx["todos_block_logged"] = True
|
|
901
957
|
ctx["todos"] = pending_values
|
|
902
958
|
ctx["todos_required"] = True
|
|
903
959
|
raise PendingTodos()
|
|
@@ -919,7 +975,7 @@ def _step_check_todos(release, ctx, log_path: Path) -> None:
|
|
|
919
975
|
ctx["todos_ack"] = True
|
|
920
976
|
|
|
921
977
|
|
|
922
|
-
def _step_check_version(release, ctx, log_path: Path) -> None:
|
|
978
|
+
def _step_check_version(release, ctx, log_path: Path, *, user=None) -> None:
|
|
923
979
|
from . import release as release_utils
|
|
924
980
|
from packaging.version import InvalidVersion, Version
|
|
925
981
|
|
|
@@ -1054,10 +1110,12 @@ def _step_check_version(release, ctx, log_path: Path) -> None:
|
|
|
1054
1110
|
version_path = Path("VERSION")
|
|
1055
1111
|
if version_path.exists():
|
|
1056
1112
|
current = version_path.read_text(encoding="utf-8").strip()
|
|
1057
|
-
if current
|
|
1058
|
-
|
|
1059
|
-
|
|
1060
|
-
|
|
1113
|
+
if current:
|
|
1114
|
+
current_clean = current.rstrip("+") or "0.0.0"
|
|
1115
|
+
if Version(release.version) < Version(current_clean):
|
|
1116
|
+
raise Exception(
|
|
1117
|
+
f"Version {release.version} is older than existing {current}"
|
|
1118
|
+
)
|
|
1061
1119
|
|
|
1062
1120
|
_append_log(log_path, f"Checking if version {release.version} exists on PyPI")
|
|
1063
1121
|
if release_utils.network_available():
|
|
@@ -1107,47 +1165,17 @@ def _step_check_version(release, ctx, log_path: Path) -> None:
|
|
|
1107
1165
|
_append_log(log_path, "Network unavailable, skipping PyPI check")
|
|
1108
1166
|
|
|
1109
1167
|
|
|
1110
|
-
def _step_handle_migrations(release, ctx, log_path: Path) -> None:
|
|
1168
|
+
def _step_handle_migrations(release, ctx, log_path: Path, *, user=None) -> None:
|
|
1111
1169
|
_append_log(log_path, "Freeze, squash and approve migrations")
|
|
1112
1170
|
_append_log(log_path, "Migration review acknowledged (manual step)")
|
|
1113
1171
|
|
|
1114
1172
|
|
|
1115
|
-
def _step_changelog_docs(release, ctx, log_path: Path) -> None:
|
|
1173
|
+
def _step_changelog_docs(release, ctx, log_path: Path, *, user=None) -> None:
|
|
1116
1174
|
_append_log(log_path, "Compose CHANGELOG and documentation")
|
|
1117
1175
|
_append_log(log_path, "CHANGELOG and documentation review recorded")
|
|
1118
1176
|
|
|
1119
1177
|
|
|
1120
|
-
def
|
|
1121
|
-
release, ctx, log_path: Path, *, previous_version: str | None = None
|
|
1122
|
-
) -> None:
|
|
1123
|
-
previous_version = previous_version or ctx.pop(
|
|
1124
|
-
"release_todo_previous_version",
|
|
1125
|
-
getattr(release, "_repo_version_before_sync", ""),
|
|
1126
|
-
)
|
|
1127
|
-
todo, fixture_path = _ensure_release_todo(
|
|
1128
|
-
release, previous_version=previous_version
|
|
1129
|
-
)
|
|
1130
|
-
fixture_display = _format_path(fixture_path)
|
|
1131
|
-
_append_log(log_path, f"Added TODO: {todo.request}")
|
|
1132
|
-
_append_log(log_path, f"Wrote TODO fixture {fixture_display}")
|
|
1133
|
-
subprocess.run(["git", "add", str(fixture_path)], check=True)
|
|
1134
|
-
_append_log(log_path, f"Staged TODO fixture {fixture_display}")
|
|
1135
|
-
fixture_diff = subprocess.run(
|
|
1136
|
-
["git", "diff", "--cached", "--quiet", "--", str(fixture_path)],
|
|
1137
|
-
check=False,
|
|
1138
|
-
)
|
|
1139
|
-
if fixture_diff.returncode != 0:
|
|
1140
|
-
commit_message = f"chore: add release TODO for {release.package.name}"
|
|
1141
|
-
subprocess.run(["git", "commit", "-m", commit_message], check=True)
|
|
1142
|
-
_append_log(log_path, f"Committed TODO fixture {fixture_display}")
|
|
1143
|
-
else:
|
|
1144
|
-
_append_log(
|
|
1145
|
-
log_path,
|
|
1146
|
-
f"No changes detected for TODO fixture {fixture_display}; skipping commit",
|
|
1147
|
-
)
|
|
1148
|
-
|
|
1149
|
-
|
|
1150
|
-
def _step_pre_release_actions(release, ctx, log_path: Path) -> None:
|
|
1178
|
+
def _step_pre_release_actions(release, ctx, log_path: Path, *, user=None) -> None:
|
|
1151
1179
|
_append_log(log_path, "Execute pre-release actions")
|
|
1152
1180
|
if ctx.get("dry_run"):
|
|
1153
1181
|
_append_log(log_path, "Dry run: skipping pre-release actions")
|
|
@@ -1220,16 +1248,15 @@ def _step_pre_release_actions(release, ctx, log_path: Path) -> None:
|
|
|
1220
1248
|
for path in staged_release_fixtures:
|
|
1221
1249
|
subprocess.run(["git", "reset", "HEAD", str(path)], check=False)
|
|
1222
1250
|
_append_log(log_path, f"Unstaged release fixture {_format_path(path)}")
|
|
1223
|
-
ctx["release_todo_previous_version"] = repo_version_before_sync
|
|
1224
1251
|
_append_log(log_path, "Pre-release actions complete")
|
|
1225
1252
|
|
|
1226
1253
|
|
|
1227
|
-
def _step_run_tests(release, ctx, log_path: Path) -> None:
|
|
1254
|
+
def _step_run_tests(release, ctx, log_path: Path, *, user=None) -> None:
|
|
1228
1255
|
_append_log(log_path, "Complete test suite with --all flag")
|
|
1229
1256
|
_append_log(log_path, "Test suite completion acknowledged")
|
|
1230
1257
|
|
|
1231
1258
|
|
|
1232
|
-
def _step_promote_build(release, ctx, log_path: Path) -> None:
|
|
1259
|
+
def _step_promote_build(release, ctx, log_path: Path, *, user=None) -> None:
|
|
1233
1260
|
from . import release as release_utils
|
|
1234
1261
|
|
|
1235
1262
|
_append_log(log_path, "Generating build files")
|
|
@@ -1241,7 +1268,7 @@ def _step_promote_build(release, ctx, log_path: Path) -> None:
|
|
|
1241
1268
|
release_utils.promote(
|
|
1242
1269
|
package=release.to_package(),
|
|
1243
1270
|
version=release.version,
|
|
1244
|
-
creds=release.to_credentials(),
|
|
1271
|
+
creds=release.to_credentials(user=user),
|
|
1245
1272
|
)
|
|
1246
1273
|
_append_log(
|
|
1247
1274
|
log_path,
|
|
@@ -1271,40 +1298,9 @@ def _step_promote_build(release, ctx, log_path: Path) -> None:
|
|
|
1271
1298
|
log_path,
|
|
1272
1299
|
f"Committed release metadata for v{release.version}",
|
|
1273
1300
|
)
|
|
1274
|
-
|
|
1275
|
-
try:
|
|
1276
|
-
branch = _current_branch()
|
|
1277
|
-
if branch is None:
|
|
1278
|
-
push_cmd = ["git", "push", "origin", "HEAD"]
|
|
1279
|
-
elif _has_upstream(branch):
|
|
1280
|
-
push_cmd = ["git", "push"]
|
|
1281
|
-
else:
|
|
1282
|
-
push_cmd = ["git", "push", "--set-upstream", "origin", branch]
|
|
1283
|
-
subprocess.run(push_cmd, check=True, capture_output=True, text=True)
|
|
1284
|
-
except subprocess.CalledProcessError as exc:
|
|
1285
|
-
details = _format_subprocess_error(exc)
|
|
1286
|
-
if _git_authentication_missing(exc):
|
|
1287
|
-
_append_log(
|
|
1288
|
-
log_path,
|
|
1289
|
-
"Authentication is required to push release changes to origin; skipping push",
|
|
1290
|
-
)
|
|
1291
|
-
if details:
|
|
1292
|
-
_append_log(log_path, details)
|
|
1293
|
-
else:
|
|
1294
|
-
_append_log(
|
|
1295
|
-
log_path, f"Failed to push release changes to origin: {details}"
|
|
1296
|
-
)
|
|
1297
|
-
raise Exception("Failed to push release changes") from exc
|
|
1298
|
-
else:
|
|
1299
|
-
_append_log(log_path, "Pushed release changes to origin")
|
|
1300
|
-
else:
|
|
1301
|
-
_append_log(
|
|
1302
|
-
log_path,
|
|
1303
|
-
"No git remote configured; skipping push of release changes",
|
|
1304
|
-
)
|
|
1301
|
+
_push_release_changes(log_path)
|
|
1305
1302
|
PackageRelease.dump_fixture()
|
|
1306
1303
|
_append_log(log_path, "Updated release fixtures")
|
|
1307
|
-
_record_release_todo(release, ctx, log_path)
|
|
1308
1304
|
except Exception:
|
|
1309
1305
|
_clean_repo()
|
|
1310
1306
|
raise
|
|
@@ -1320,8 +1316,10 @@ def _step_promote_build(release, ctx, log_path: Path) -> None:
|
|
|
1320
1316
|
_append_log(new_log, "Build complete")
|
|
1321
1317
|
|
|
1322
1318
|
|
|
1323
|
-
def _step_release_manager_approval(
|
|
1324
|
-
|
|
1319
|
+
def _step_release_manager_approval(
|
|
1320
|
+
release, ctx, log_path: Path, *, user=None
|
|
1321
|
+
) -> None:
|
|
1322
|
+
if release.to_credentials(user=user) is None:
|
|
1325
1323
|
ctx.pop("release_approval", None)
|
|
1326
1324
|
if not ctx.get("approval_credentials_missing"):
|
|
1327
1325
|
_append_log(log_path, "Release manager publishing credentials missing")
|
|
@@ -1355,14 +1353,14 @@ def _step_release_manager_approval(release, ctx, log_path: Path) -> None:
|
|
|
1355
1353
|
raise ApprovalRequired()
|
|
1356
1354
|
|
|
1357
1355
|
|
|
1358
|
-
def _step_publish(release, ctx, log_path: Path) -> None:
|
|
1356
|
+
def _step_publish(release, ctx, log_path: Path, *, user=None) -> None:
|
|
1359
1357
|
from . import release as release_utils
|
|
1360
1358
|
|
|
1361
1359
|
if ctx.get("dry_run"):
|
|
1362
1360
|
test_repository_url = os.environ.get(
|
|
1363
1361
|
"PYPI_TEST_REPOSITORY_URL", "https://test.pypi.org/legacy/"
|
|
1364
1362
|
)
|
|
1365
|
-
test_creds = release.to_credentials()
|
|
1363
|
+
test_creds = release.to_credentials(user=user)
|
|
1366
1364
|
if not (test_creds and test_creds.has_auth()):
|
|
1367
1365
|
test_creds = release_utils.Credentials(
|
|
1368
1366
|
token=os.environ.get("PYPI_TEST_API_TOKEN"),
|
|
@@ -1402,7 +1400,7 @@ def _step_publish(release, ctx, log_path: Path) -> None:
|
|
|
1402
1400
|
release_utils.build(
|
|
1403
1401
|
package=package,
|
|
1404
1402
|
version=release.version,
|
|
1405
|
-
creds=release.to_credentials(),
|
|
1403
|
+
creds=release.to_credentials(user=user),
|
|
1406
1404
|
dist=True,
|
|
1407
1405
|
tests=False,
|
|
1408
1406
|
twine=False,
|
|
@@ -1431,13 +1429,13 @@ def _step_publish(release, ctx, log_path: Path) -> None:
|
|
|
1431
1429
|
release_utils.publish(
|
|
1432
1430
|
package=release.to_package(),
|
|
1433
1431
|
version=release.version,
|
|
1434
|
-
creds=target.credentials or release.to_credentials(),
|
|
1432
|
+
creds=target.credentials or release.to_credentials(user=user),
|
|
1435
1433
|
repositories=[target],
|
|
1436
1434
|
)
|
|
1437
1435
|
_append_log(log_path, "Dry run: skipped release metadata updates")
|
|
1438
1436
|
return
|
|
1439
1437
|
|
|
1440
|
-
targets = release.build_publish_targets()
|
|
1438
|
+
targets = release.build_publish_targets(user=user)
|
|
1441
1439
|
repo_labels = []
|
|
1442
1440
|
for target in targets:
|
|
1443
1441
|
label = target.name
|
|
@@ -1451,12 +1449,29 @@ def _step_publish(release, ctx, log_path: Path) -> None:
|
|
|
1451
1449
|
)
|
|
1452
1450
|
else:
|
|
1453
1451
|
_append_log(log_path, "Uploading distribution")
|
|
1454
|
-
release_utils.
|
|
1455
|
-
|
|
1456
|
-
|
|
1457
|
-
|
|
1458
|
-
|
|
1459
|
-
|
|
1452
|
+
publish_warning: release_utils.PostPublishWarning | None = None
|
|
1453
|
+
try:
|
|
1454
|
+
release_utils.publish(
|
|
1455
|
+
package=release.to_package(),
|
|
1456
|
+
version=release.version,
|
|
1457
|
+
creds=release.to_credentials(user=user),
|
|
1458
|
+
repositories=targets,
|
|
1459
|
+
)
|
|
1460
|
+
except release_utils.PostPublishWarning as warning:
|
|
1461
|
+
publish_warning = warning
|
|
1462
|
+
|
|
1463
|
+
if publish_warning is not None:
|
|
1464
|
+
message = str(publish_warning)
|
|
1465
|
+
followups = _dedupe_preserve_order(publish_warning.followups)
|
|
1466
|
+
warning_entries = ctx.setdefault("warnings", [])
|
|
1467
|
+
if not any(entry.get("message") == message for entry in warning_entries):
|
|
1468
|
+
entry: dict[str, object] = {"message": message}
|
|
1469
|
+
if followups:
|
|
1470
|
+
entry["followups"] = followups
|
|
1471
|
+
warning_entries.append(entry)
|
|
1472
|
+
_append_log(log_path, message)
|
|
1473
|
+
for note in followups:
|
|
1474
|
+
_append_log(log_path, f"Follow-up: {note}")
|
|
1460
1475
|
release.pypi_url = (
|
|
1461
1476
|
f"https://pypi.org/project/{release.package.name}/{release.version}/"
|
|
1462
1477
|
)
|
|
@@ -1475,6 +1490,30 @@ def _step_publish(release, ctx, log_path: Path) -> None:
|
|
|
1475
1490
|
_append_log(log_path, f"Recorded PyPI URL: {release.pypi_url}")
|
|
1476
1491
|
if release.github_url:
|
|
1477
1492
|
_append_log(log_path, f"Recorded GitHub URL: {release.github_url}")
|
|
1493
|
+
fixture_paths = [
|
|
1494
|
+
str(path) for path in Path("core/fixtures").glob("releases__*.json")
|
|
1495
|
+
]
|
|
1496
|
+
if fixture_paths:
|
|
1497
|
+
status = subprocess.run(
|
|
1498
|
+
["git", "status", "--porcelain", "--", *fixture_paths],
|
|
1499
|
+
capture_output=True,
|
|
1500
|
+
text=True,
|
|
1501
|
+
check=True,
|
|
1502
|
+
)
|
|
1503
|
+
if status.stdout.strip():
|
|
1504
|
+
subprocess.run(["git", "add", *fixture_paths], check=True)
|
|
1505
|
+
_append_log(log_path, "Staged publish metadata updates")
|
|
1506
|
+
commit_message = f"chore: record publish metadata for v{release.version}"
|
|
1507
|
+
subprocess.run(["git", "commit", "-m", commit_message], check=True)
|
|
1508
|
+
_append_log(
|
|
1509
|
+
log_path, f"Committed publish metadata for v{release.version}"
|
|
1510
|
+
)
|
|
1511
|
+
_push_release_changes(log_path)
|
|
1512
|
+
else:
|
|
1513
|
+
_append_log(
|
|
1514
|
+
log_path,
|
|
1515
|
+
"No release metadata updates detected after publish; skipping commit",
|
|
1516
|
+
)
|
|
1478
1517
|
_append_log(log_path, "Upload complete")
|
|
1479
1518
|
|
|
1480
1519
|
|
|
@@ -1668,9 +1707,9 @@ def rfid_batch(request):
|
|
|
1668
1707
|
else:
|
|
1669
1708
|
post_auth_command = post_auth_command.strip()
|
|
1670
1709
|
|
|
1671
|
-
tag, _ = RFID.
|
|
1672
|
-
rfid
|
|
1673
|
-
|
|
1710
|
+
tag, _ = RFID.update_or_create_from_code(
|
|
1711
|
+
rfid,
|
|
1712
|
+
{
|
|
1674
1713
|
"allowed": allowed,
|
|
1675
1714
|
"color": color,
|
|
1676
1715
|
"released": released,
|
|
@@ -1784,7 +1823,7 @@ def release_progress(request, pk: int, action: str):
|
|
|
1784
1823
|
return redirect(request.path)
|
|
1785
1824
|
|
|
1786
1825
|
manager = release.release_manager or release.package.release_manager
|
|
1787
|
-
credentials_ready = bool(release.to_credentials())
|
|
1826
|
+
credentials_ready = bool(release.to_credentials(user=request.user))
|
|
1788
1827
|
if credentials_ready and ctx.get("approval_credentials_missing"):
|
|
1789
1828
|
ctx.pop("approval_credentials_missing", None)
|
|
1790
1829
|
|
|
@@ -1804,8 +1843,16 @@ def release_progress(request, pk: int, action: str):
|
|
|
1804
1843
|
ctx["release_approval"] = "approved"
|
|
1805
1844
|
if request.GET.get("reject"):
|
|
1806
1845
|
ctx["release_approval"] = "rejected"
|
|
1846
|
+
resume_requested = bool(request.GET.get("resume"))
|
|
1847
|
+
|
|
1807
1848
|
if request.GET.get("pause") and ctx.get("started"):
|
|
1808
1849
|
ctx["paused"] = True
|
|
1850
|
+
|
|
1851
|
+
if resume_requested:
|
|
1852
|
+
if not ctx.get("started"):
|
|
1853
|
+
ctx["started"] = True
|
|
1854
|
+
if ctx.get("paused"):
|
|
1855
|
+
ctx["paused"] = False
|
|
1809
1856
|
restart_count = 0
|
|
1810
1857
|
if restart_path.exists():
|
|
1811
1858
|
try:
|
|
@@ -1814,26 +1861,42 @@ def release_progress(request, pk: int, action: str):
|
|
|
1814
1861
|
restart_count = 0
|
|
1815
1862
|
step_count = ctx.get("step", 0)
|
|
1816
1863
|
step_param = request.GET.get("step")
|
|
1864
|
+
if resume_requested and step_param is None:
|
|
1865
|
+
step_param = str(step_count)
|
|
1817
1866
|
|
|
1818
1867
|
pending_qs = Todo.objects.filter(is_deleted=False, done_on__isnull=True)
|
|
1819
1868
|
pending_items = list(pending_qs)
|
|
1820
|
-
|
|
1821
|
-
if
|
|
1822
|
-
|
|
1823
|
-
|
|
1824
|
-
|
|
1825
|
-
|
|
1826
|
-
|
|
1827
|
-
|
|
1828
|
-
|
|
1829
|
-
|
|
1830
|
-
|
|
1831
|
-
|
|
1832
|
-
|
|
1869
|
+
blocking_todos = [
|
|
1870
|
+
todo for todo in pending_items if _todo_blocks_publish(todo, release)
|
|
1871
|
+
]
|
|
1872
|
+
if not blocking_todos:
|
|
1873
|
+
ctx["todos_ack"] = True
|
|
1874
|
+
ctx["todos_ack_auto"] = True
|
|
1875
|
+
elif ack_todos_requested:
|
|
1876
|
+
failures = []
|
|
1877
|
+
for todo in blocking_todos:
|
|
1878
|
+
result = todo.check_on_done_condition()
|
|
1879
|
+
if not result.passed:
|
|
1880
|
+
failures.append((todo, result))
|
|
1881
|
+
if failures:
|
|
1882
|
+
ctx["todos_ack"] = False
|
|
1883
|
+
ctx.pop("todos_ack_auto", None)
|
|
1884
|
+
for todo, result in failures:
|
|
1885
|
+
messages.error(request, _format_condition_failure(todo, result))
|
|
1833
1886
|
else:
|
|
1834
1887
|
ctx["todos_ack"] = True
|
|
1888
|
+
ctx.pop("todos_ack_auto", None)
|
|
1889
|
+
else:
|
|
1890
|
+
if ctx.pop("todos_ack_auto", None):
|
|
1891
|
+
ctx["todos_ack"] = False
|
|
1892
|
+
else:
|
|
1893
|
+
ctx.setdefault("todos_ack", False)
|
|
1835
1894
|
|
|
1836
|
-
if
|
|
1895
|
+
if ctx.get("todos_ack"):
|
|
1896
|
+
ctx.pop("todos_block_logged", None)
|
|
1897
|
+
ctx.pop("todos", None)
|
|
1898
|
+
ctx.pop("todos_required", None)
|
|
1899
|
+
else:
|
|
1837
1900
|
ctx["todos"] = [
|
|
1838
1901
|
{
|
|
1839
1902
|
"id": todo.pk,
|
|
@@ -1841,12 +1904,9 @@ def release_progress(request, pk: int, action: str):
|
|
|
1841
1904
|
"url": todo.url,
|
|
1842
1905
|
"request_details": todo.request_details,
|
|
1843
1906
|
}
|
|
1844
|
-
for todo in
|
|
1907
|
+
for todo in blocking_todos
|
|
1845
1908
|
]
|
|
1846
1909
|
ctx["todos_required"] = True
|
|
1847
|
-
else:
|
|
1848
|
-
ctx.pop("todos", None)
|
|
1849
|
-
ctx.pop("todos_required", None)
|
|
1850
1910
|
|
|
1851
1911
|
log_name = _release_log_name(release.package.name, release.version)
|
|
1852
1912
|
if ctx.get("log") != log_name:
|
|
@@ -1856,6 +1916,8 @@ def release_progress(request, pk: int, action: str):
|
|
|
1856
1916
|
"started": ctx.get("started", False),
|
|
1857
1917
|
}
|
|
1858
1918
|
step_count = 0
|
|
1919
|
+
if not blocking_todos:
|
|
1920
|
+
ctx["todos_ack"] = True
|
|
1859
1921
|
log_path = log_dir / log_name
|
|
1860
1922
|
ctx.setdefault("log", log_name)
|
|
1861
1923
|
ctx.setdefault("paused", False)
|
|
@@ -1932,7 +1994,7 @@ def release_progress(request, pk: int, action: str):
|
|
|
1932
1994
|
if to_run == step_count:
|
|
1933
1995
|
name, func = steps[to_run]
|
|
1934
1996
|
try:
|
|
1935
|
-
func(release, ctx, log_path)
|
|
1997
|
+
func(release, ctx, log_path, user=request.user)
|
|
1936
1998
|
except PendingTodos:
|
|
1937
1999
|
pass
|
|
1938
2000
|
except ApprovalRequired:
|
|
@@ -2049,6 +2111,14 @@ def release_progress(request, pk: int, action: str):
|
|
|
2049
2111
|
)
|
|
2050
2112
|
|
|
2051
2113
|
is_running = ctx.get("started") and not paused and not done and not ctx.get("error")
|
|
2114
|
+
resume_available = (
|
|
2115
|
+
ctx.get("started")
|
|
2116
|
+
and not paused
|
|
2117
|
+
and not done
|
|
2118
|
+
and not ctx.get("error")
|
|
2119
|
+
and step_count < len(steps)
|
|
2120
|
+
and next_step is None
|
|
2121
|
+
)
|
|
2052
2122
|
can_resume = ctx.get("started") and paused and not done and not ctx.get("error")
|
|
2053
2123
|
release_manager_owner = manager.owner_display() if manager else ""
|
|
2054
2124
|
try:
|
|
@@ -2103,9 +2173,11 @@ def release_progress(request, pk: int, action: str):
|
|
|
2103
2173
|
"has_release_manager": bool(manager),
|
|
2104
2174
|
"current_user_admin_url": current_user_admin_url,
|
|
2105
2175
|
"is_running": is_running,
|
|
2176
|
+
"resume_available": resume_available,
|
|
2106
2177
|
"can_resume": can_resume,
|
|
2107
2178
|
"dry_run": dry_run_active,
|
|
2108
2179
|
"dry_run_toggle_enabled": dry_run_toggle_enabled,
|
|
2180
|
+
"warnings": ctx.get("warnings", []),
|
|
2109
2181
|
}
|
|
2110
2182
|
request.session[session_key] = ctx
|
|
2111
2183
|
if done or ctx.get("error"):
|
|
@@ -2303,6 +2375,7 @@ def todo_focus(request, pk: int):
|
|
|
2303
2375
|
"focus_auth": focus_auth,
|
|
2304
2376
|
"next_url": _get_return_url(request),
|
|
2305
2377
|
"done_url": reverse("todo-done", args=[todo.pk]),
|
|
2378
|
+
"delete_url": reverse("todo-delete", args=[todo.pk]),
|
|
2306
2379
|
"snapshot_url": reverse("todo-snapshot", args=[todo.pk]),
|
|
2307
2380
|
}
|
|
2308
2381
|
return render(request, "core/todo_focus.html", context)
|
|
@@ -2321,7 +2394,29 @@ def todo_done(request, pk: int):
|
|
|
2321
2394
|
messages.error(request, _format_condition_failure(todo, result))
|
|
2322
2395
|
return redirect(redirect_to)
|
|
2323
2396
|
todo.done_on = timezone.now()
|
|
2324
|
-
todo.
|
|
2397
|
+
todo.populate_done_metadata(request.user)
|
|
2398
|
+
todo.save(
|
|
2399
|
+
update_fields=[
|
|
2400
|
+
"done_on",
|
|
2401
|
+
"done_node",
|
|
2402
|
+
"done_version",
|
|
2403
|
+
"done_revision",
|
|
2404
|
+
"done_username",
|
|
2405
|
+
]
|
|
2406
|
+
)
|
|
2407
|
+
return redirect(redirect_to)
|
|
2408
|
+
|
|
2409
|
+
|
|
2410
|
+
@staff_member_required
|
|
2411
|
+
@require_POST
|
|
2412
|
+
def todo_delete(request, pk: int):
|
|
2413
|
+
redirect_to = reverse("admin:index")
|
|
2414
|
+
try:
|
|
2415
|
+
todo = Todo.objects.get(pk=pk, is_deleted=False)
|
|
2416
|
+
except Todo.DoesNotExist:
|
|
2417
|
+
return redirect(redirect_to)
|
|
2418
|
+
todo.is_deleted = True
|
|
2419
|
+
todo.save(update_fields=["is_deleted"])
|
|
2325
2420
|
return redirect(redirect_to)
|
|
2326
2421
|
|
|
2327
2422
|
|