arthexis 0.1.19__py3-none-any.whl → 0.1.20__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {arthexis-0.1.19.dist-info → arthexis-0.1.20.dist-info}/METADATA +3 -3
- {arthexis-0.1.19.dist-info → arthexis-0.1.20.dist-info}/RECORD +38 -38
- core/admin.py +142 -1
- core/backends.py +8 -2
- core/environment.py +221 -4
- core/models.py +124 -25
- core/notifications.py +1 -1
- core/reference_utils.py +10 -11
- core/sigil_builder.py +2 -2
- core/tasks.py +24 -1
- core/tests.py +1 -0
- core/views.py +70 -36
- nodes/admin.py +133 -1
- nodes/models.py +294 -48
- nodes/rfid_sync.py +1 -1
- nodes/tasks.py +100 -2
- nodes/tests.py +532 -15
- nodes/urls.py +4 -0
- nodes/views.py +500 -95
- ocpp/admin.py +101 -3
- ocpp/consumers.py +106 -9
- ocpp/models.py +83 -1
- ocpp/tasks.py +4 -0
- ocpp/test_export_import.py +1 -0
- ocpp/test_rfid.py +3 -1
- ocpp/tests.py +100 -9
- ocpp/transactions_io.py +9 -1
- ocpp/urls.py +3 -3
- ocpp/views.py +101 -28
- pages/context_processors.py +15 -9
- pages/defaults.py +1 -1
- pages/module_defaults.py +5 -5
- pages/tests.py +110 -38
- pages/urls.py +1 -0
- pages/views.py +108 -8
- {arthexis-0.1.19.dist-info → arthexis-0.1.20.dist-info}/WHEEL +0 -0
- {arthexis-0.1.19.dist-info → arthexis-0.1.20.dist-info}/licenses/LICENSE +0 -0
- {arthexis-0.1.19.dist-info → arthexis-0.1.20.dist-info}/top_level.txt +0 -0
core/models.py
CHANGED
|
@@ -5,7 +5,7 @@ from django.contrib.auth.models import (
|
|
|
5
5
|
)
|
|
6
6
|
from django.db import DatabaseError, IntegrityError, connections, models, transaction
|
|
7
7
|
from django.db.models import Q
|
|
8
|
-
from django.db.models.functions import Lower
|
|
8
|
+
from django.db.models.functions import Lower, Length
|
|
9
9
|
from django.conf import settings
|
|
10
10
|
from django.contrib.auth import get_user_model
|
|
11
11
|
from django.utils.translation import gettext_lazy as _
|
|
@@ -1764,6 +1764,7 @@ class RFID(Entity):
|
|
|
1764
1764
|
"""RFID tag that may be assigned to one account."""
|
|
1765
1765
|
|
|
1766
1766
|
label_id = models.AutoField(primary_key=True, db_column="label_id")
|
|
1767
|
+
MATCH_PREFIX_LENGTH = 8
|
|
1767
1768
|
rfid = models.CharField(
|
|
1768
1769
|
max_length=255,
|
|
1769
1770
|
unique=True,
|
|
@@ -1939,6 +1940,108 @@ class RFID(Entity):
|
|
|
1939
1940
|
def __str__(self): # pragma: no cover - simple representation
|
|
1940
1941
|
return str(self.label_id)
|
|
1941
1942
|
|
|
1943
|
+
@classmethod
|
|
1944
|
+
def normalize_code(cls, value: str) -> str:
|
|
1945
|
+
"""Return ``value`` normalized for comparisons."""
|
|
1946
|
+
|
|
1947
|
+
return "".join((value or "").split()).upper()
|
|
1948
|
+
|
|
1949
|
+
def adopt_rfid(self, candidate: str) -> bool:
|
|
1950
|
+
"""Adopt ``candidate`` as the stored RFID if it is a better match."""
|
|
1951
|
+
|
|
1952
|
+
normalized = type(self).normalize_code(candidate)
|
|
1953
|
+
if not normalized:
|
|
1954
|
+
return False
|
|
1955
|
+
current = type(self).normalize_code(self.rfid)
|
|
1956
|
+
if current == normalized:
|
|
1957
|
+
return False
|
|
1958
|
+
if not current:
|
|
1959
|
+
self.rfid = normalized
|
|
1960
|
+
return True
|
|
1961
|
+
reversed_current = type(self).reverse_uid(current)
|
|
1962
|
+
if reversed_current and reversed_current == normalized:
|
|
1963
|
+
self.rfid = normalized
|
|
1964
|
+
return True
|
|
1965
|
+
if len(normalized) < len(current):
|
|
1966
|
+
self.rfid = normalized
|
|
1967
|
+
return True
|
|
1968
|
+
if len(normalized) == len(current) and normalized < current:
|
|
1969
|
+
self.rfid = normalized
|
|
1970
|
+
return True
|
|
1971
|
+
return False
|
|
1972
|
+
|
|
1973
|
+
@classmethod
|
|
1974
|
+
def matching_queryset(cls, value: str) -> models.QuerySet["RFID"]:
|
|
1975
|
+
"""Return RFID records matching ``value`` using prefix comparison."""
|
|
1976
|
+
|
|
1977
|
+
normalized = cls.normalize_code(value)
|
|
1978
|
+
if not normalized:
|
|
1979
|
+
return cls.objects.none()
|
|
1980
|
+
|
|
1981
|
+
conditions: list[Q] = []
|
|
1982
|
+
candidate = normalized
|
|
1983
|
+
if candidate:
|
|
1984
|
+
conditions.append(Q(rfid=candidate))
|
|
1985
|
+
alternate = cls.reverse_uid(candidate)
|
|
1986
|
+
if alternate and alternate != candidate:
|
|
1987
|
+
conditions.append(Q(rfid=alternate))
|
|
1988
|
+
|
|
1989
|
+
prefix_length = min(len(candidate), cls.MATCH_PREFIX_LENGTH)
|
|
1990
|
+
if prefix_length:
|
|
1991
|
+
prefix = candidate[:prefix_length]
|
|
1992
|
+
conditions.append(Q(rfid__startswith=prefix))
|
|
1993
|
+
if alternate and alternate != candidate:
|
|
1994
|
+
alt_prefix = alternate[:prefix_length]
|
|
1995
|
+
if alt_prefix:
|
|
1996
|
+
conditions.append(Q(rfid__startswith=alt_prefix))
|
|
1997
|
+
|
|
1998
|
+
query: Q | None = None
|
|
1999
|
+
for condition in conditions:
|
|
2000
|
+
query = condition if query is None else query | condition
|
|
2001
|
+
|
|
2002
|
+
if query is None:
|
|
2003
|
+
return cls.objects.none()
|
|
2004
|
+
|
|
2005
|
+
queryset = cls.objects.filter(query).distinct()
|
|
2006
|
+
return queryset.annotate(rfid_length=Length("rfid")).order_by(
|
|
2007
|
+
"rfid_length", "rfid", "pk"
|
|
2008
|
+
)
|
|
2009
|
+
|
|
2010
|
+
@classmethod
|
|
2011
|
+
def find_match(cls, value: str) -> "RFID | None":
|
|
2012
|
+
"""Return the best matching RFID for ``value`` if it exists."""
|
|
2013
|
+
|
|
2014
|
+
return cls.matching_queryset(value).first()
|
|
2015
|
+
|
|
2016
|
+
@classmethod
|
|
2017
|
+
def update_or_create_from_code(
|
|
2018
|
+
cls, value: str, defaults: dict[str, Any] | None = None
|
|
2019
|
+
) -> tuple["RFID", bool]:
|
|
2020
|
+
"""Update or create an RFID using relaxed matching rules."""
|
|
2021
|
+
|
|
2022
|
+
normalized = cls.normalize_code(value)
|
|
2023
|
+
if not normalized:
|
|
2024
|
+
raise ValueError("RFID value is required")
|
|
2025
|
+
|
|
2026
|
+
defaults_map = defaults.copy() if defaults else {}
|
|
2027
|
+
existing = cls.find_match(normalized)
|
|
2028
|
+
if existing:
|
|
2029
|
+
update_fields: set[str] = set()
|
|
2030
|
+
if existing.adopt_rfid(normalized):
|
|
2031
|
+
update_fields.add("rfid")
|
|
2032
|
+
for field_name, new_value in defaults_map.items():
|
|
2033
|
+
if getattr(existing, field_name) != new_value:
|
|
2034
|
+
setattr(existing, field_name, new_value)
|
|
2035
|
+
update_fields.add(field_name)
|
|
2036
|
+
if update_fields:
|
|
2037
|
+
existing.save(update_fields=sorted(update_fields))
|
|
2038
|
+
return existing, False
|
|
2039
|
+
|
|
2040
|
+
create_kwargs = defaults_map
|
|
2041
|
+
create_kwargs["rfid"] = normalized
|
|
2042
|
+
tag = cls.objects.create(**create_kwargs)
|
|
2043
|
+
return tag, True
|
|
2044
|
+
|
|
1942
2045
|
@classmethod
|
|
1943
2046
|
def normalize_endianness(cls, value: object) -> str:
|
|
1944
2047
|
"""Return a valid endianness value, defaulting to BIG."""
|
|
@@ -2033,25 +2136,12 @@ class RFID(Entity):
|
|
|
2033
2136
|
) -> tuple["RFID", bool]:
|
|
2034
2137
|
"""Return or create an RFID that was detected via scanning."""
|
|
2035
2138
|
|
|
2036
|
-
normalized =
|
|
2139
|
+
normalized = cls.normalize_code(rfid)
|
|
2037
2140
|
desired_endianness = cls.normalize_endianness(endianness)
|
|
2038
|
-
|
|
2039
|
-
if normalized and len(normalized) % 2 == 0:
|
|
2040
|
-
bytes_list = [normalized[i : i + 2] for i in range(0, len(normalized), 2)]
|
|
2041
|
-
bytes_list.reverse()
|
|
2042
|
-
alternate_candidate = "".join(bytes_list)
|
|
2043
|
-
if alternate_candidate != normalized:
|
|
2044
|
-
alternate = alternate_candidate
|
|
2045
|
-
|
|
2046
|
-
existing = None
|
|
2047
|
-
if normalized:
|
|
2048
|
-
existing = cls.objects.filter(rfid=normalized).first()
|
|
2049
|
-
if not existing and alternate:
|
|
2050
|
-
existing = cls.objects.filter(rfid=alternate).first()
|
|
2141
|
+
existing = cls.find_match(normalized)
|
|
2051
2142
|
if existing:
|
|
2052
2143
|
update_fields: list[str] = []
|
|
2053
|
-
if
|
|
2054
|
-
existing.rfid = normalized
|
|
2144
|
+
if existing.adopt_rfid(normalized):
|
|
2055
2145
|
update_fields.append("rfid")
|
|
2056
2146
|
if existing.endianness != desired_endianness:
|
|
2057
2147
|
existing.endianness = desired_endianness
|
|
@@ -2079,23 +2169,28 @@ class RFID(Entity):
|
|
|
2079
2169
|
tag = cls.objects.create(**create_kwargs)
|
|
2080
2170
|
cls._reset_label_sequence()
|
|
2081
2171
|
except IntegrityError:
|
|
2082
|
-
existing = cls.
|
|
2172
|
+
existing = cls.find_match(normalized)
|
|
2083
2173
|
if existing:
|
|
2084
2174
|
return existing, False
|
|
2085
2175
|
else:
|
|
2086
2176
|
return tag, True
|
|
2087
2177
|
raise IntegrityError("Unable to allocate label id for scanned RFID")
|
|
2088
2178
|
|
|
2089
|
-
@
|
|
2090
|
-
def get_account_by_rfid(value):
|
|
2179
|
+
@classmethod
|
|
2180
|
+
def get_account_by_rfid(cls, value):
|
|
2091
2181
|
"""Return the energy account associated with an RFID code if it exists."""
|
|
2092
2182
|
try:
|
|
2093
2183
|
EnergyAccount = apps.get_model("core", "EnergyAccount")
|
|
2094
2184
|
except LookupError: # pragma: no cover - energy accounts app optional
|
|
2095
2185
|
return None
|
|
2096
|
-
|
|
2097
|
-
|
|
2098
|
-
|
|
2186
|
+
matches = cls.matching_queryset(value).filter(allowed=True)
|
|
2187
|
+
if not matches.exists():
|
|
2188
|
+
return None
|
|
2189
|
+
return (
|
|
2190
|
+
EnergyAccount.objects.filter(rfids__in=matches)
|
|
2191
|
+
.distinct()
|
|
2192
|
+
.first()
|
|
2193
|
+
)
|
|
2099
2194
|
|
|
2100
2195
|
class Meta:
|
|
2101
2196
|
verbose_name = "RFID"
|
|
@@ -2795,7 +2890,10 @@ class ClientReport(Entity):
|
|
|
2795
2890
|
def build_rows(start_date=None, end_date=None, *, for_display: bool = False):
|
|
2796
2891
|
from ocpp.models import Transaction
|
|
2797
2892
|
|
|
2798
|
-
qs = Transaction.objects.
|
|
2893
|
+
qs = Transaction.objects.filter(
|
|
2894
|
+
(Q(rfid__isnull=False) & ~Q(rfid=""))
|
|
2895
|
+
| (Q(vid__isnull=False) & ~Q(vid=""))
|
|
2896
|
+
)
|
|
2799
2897
|
if start_date:
|
|
2800
2898
|
from datetime import datetime, time, timedelta, timezone as pytimezone
|
|
2801
2899
|
|
|
@@ -2841,7 +2939,7 @@ class ClientReport(Entity):
|
|
|
2841
2939
|
subject = str(tag.label_id)
|
|
2842
2940
|
|
|
2843
2941
|
if subject is None:
|
|
2844
|
-
subject = tx.rfid
|
|
2942
|
+
subject = tx.rfid or tx.vid
|
|
2845
2943
|
|
|
2846
2944
|
start_value = tx.start_time
|
|
2847
2945
|
end_value = tx.stop_time
|
|
@@ -2853,6 +2951,7 @@ class ClientReport(Entity):
|
|
|
2853
2951
|
{
|
|
2854
2952
|
"subject": subject,
|
|
2855
2953
|
"rfid": tx.rfid,
|
|
2954
|
+
"vid": tx.vid,
|
|
2856
2955
|
"kw": energy,
|
|
2857
2956
|
"start": start_value,
|
|
2858
2957
|
"end": end_value,
|
core/notifications.py
CHANGED
|
@@ -39,7 +39,7 @@ class NotificationManager:
|
|
|
39
39
|
self.lock_file.parent.mkdir(parents=True, exist_ok=True)
|
|
40
40
|
# ``plyer`` is only available on Windows and can fail when used in
|
|
41
41
|
# a non-interactive environment (e.g. service or CI).
|
|
42
|
-
# Any failure will
|
|
42
|
+
# Any failure will fall back to logging quietly.
|
|
43
43
|
|
|
44
44
|
def _write_lock_file(self, subject: str, body: str) -> None:
|
|
45
45
|
self.lock_file.write_text(f"{subject}\n{body}\n", encoding="utf-8")
|
core/reference_utils.py
CHANGED
|
@@ -70,17 +70,16 @@ def filter_visible_references(
|
|
|
70
70
|
required_sites = {current_site.pk for current_site in ref.sites.all()}
|
|
71
71
|
|
|
72
72
|
if required_roles or required_features or required_sites:
|
|
73
|
-
allowed =
|
|
74
|
-
if required_roles
|
|
75
|
-
allowed =
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
allowed = True
|
|
73
|
+
allowed = True
|
|
74
|
+
if required_roles:
|
|
75
|
+
allowed = bool(node_role_id and node_role_id in required_roles)
|
|
76
|
+
if allowed and required_features:
|
|
77
|
+
allowed = bool(
|
|
78
|
+
node_active_feature_ids
|
|
79
|
+
and node_active_feature_ids.intersection(required_features)
|
|
80
|
+
)
|
|
81
|
+
if allowed and required_sites:
|
|
82
|
+
allowed = bool(site_id and site_id in required_sites)
|
|
84
83
|
|
|
85
84
|
if not allowed:
|
|
86
85
|
continue
|
core/sigil_builder.py
CHANGED
|
@@ -40,12 +40,12 @@ def _sigil_builder_view(request):
|
|
|
40
40
|
{
|
|
41
41
|
"prefix": "ENV",
|
|
42
42
|
"url": reverse("admin:environment"),
|
|
43
|
-
"label": _("
|
|
43
|
+
"label": _("Environment"),
|
|
44
44
|
},
|
|
45
45
|
{
|
|
46
46
|
"prefix": "CONF",
|
|
47
47
|
"url": reverse("admin:config"),
|
|
48
|
-
"label": _("
|
|
48
|
+
"label": _("Django Settings"),
|
|
49
49
|
},
|
|
50
50
|
{
|
|
51
51
|
"prefix": "SYS",
|
core/tasks.py
CHANGED
|
@@ -2,6 +2,7 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import logging
|
|
4
4
|
import shutil
|
|
5
|
+
import re
|
|
5
6
|
import subprocess
|
|
6
7
|
from pathlib import Path
|
|
7
8
|
import urllib.error
|
|
@@ -102,6 +103,21 @@ def _resolve_service_url(base_dir: Path) -> str:
|
|
|
102
103
|
return f"http://127.0.0.1:{port}/"
|
|
103
104
|
|
|
104
105
|
|
|
106
|
+
def _parse_major_minor(version: str) -> tuple[int, int] | None:
|
|
107
|
+
match = re.match(r"^\s*(\d+)\.(\d+)", version)
|
|
108
|
+
if not match:
|
|
109
|
+
return None
|
|
110
|
+
return int(match.group(1)), int(match.group(2))
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def _shares_stable_series(local: str, remote: str) -> bool:
|
|
114
|
+
local_parts = _parse_major_minor(local)
|
|
115
|
+
remote_parts = _parse_major_minor(remote)
|
|
116
|
+
if not local_parts or not remote_parts:
|
|
117
|
+
return False
|
|
118
|
+
return local_parts == remote_parts
|
|
119
|
+
|
|
120
|
+
|
|
105
121
|
@shared_task
|
|
106
122
|
def check_github_updates() -> None:
|
|
107
123
|
"""Check the GitHub repo for updates and upgrade if needed."""
|
|
@@ -196,9 +212,16 @@ def check_github_updates() -> None:
|
|
|
196
212
|
if startup:
|
|
197
213
|
startup()
|
|
198
214
|
return
|
|
215
|
+
if mode == "stable" and _shares_stable_series(local, remote):
|
|
216
|
+
if startup:
|
|
217
|
+
startup()
|
|
218
|
+
return
|
|
199
219
|
if notify:
|
|
200
220
|
notify("Upgrading...", upgrade_stamp)
|
|
201
|
-
|
|
221
|
+
if mode == "stable":
|
|
222
|
+
args = ["./upgrade.sh", "--stable", "--no-restart"]
|
|
223
|
+
else:
|
|
224
|
+
args = ["./upgrade.sh", "--no-restart"]
|
|
202
225
|
upgrade_was_applied = True
|
|
203
226
|
|
|
204
227
|
with log_file.open("a") as fh:
|
core/tests.py
CHANGED
core/views.py
CHANGED
|
@@ -448,8 +448,11 @@ def _resolve_release_log_dir(preferred: Path) -> tuple[Path, str | None]:
|
|
|
448
448
|
|
|
449
449
|
env_override = os.environ.pop("ARTHEXIS_LOG_DIR", None)
|
|
450
450
|
fallback = select_log_dir(Path(settings.BASE_DIR))
|
|
451
|
-
if env_override
|
|
452
|
-
|
|
451
|
+
if env_override is not None:
|
|
452
|
+
if Path(env_override) == fallback:
|
|
453
|
+
os.environ["ARTHEXIS_LOG_DIR"] = env_override
|
|
454
|
+
else:
|
|
455
|
+
os.environ["ARTHEXIS_LOG_DIR"] = str(fallback)
|
|
453
456
|
|
|
454
457
|
if fallback == preferred:
|
|
455
458
|
if error:
|
|
@@ -608,6 +611,43 @@ def _git_authentication_missing(exc: subprocess.CalledProcessError) -> bool:
|
|
|
608
611
|
return any(marker in message for marker in auth_markers)
|
|
609
612
|
|
|
610
613
|
|
|
614
|
+
def _push_release_changes(log_path: Path) -> bool:
|
|
615
|
+
"""Push release commits to ``origin`` and log the outcome."""
|
|
616
|
+
|
|
617
|
+
if not _has_remote("origin"):
|
|
618
|
+
_append_log(
|
|
619
|
+
log_path, "No git remote configured; skipping push of release changes"
|
|
620
|
+
)
|
|
621
|
+
return False
|
|
622
|
+
|
|
623
|
+
try:
|
|
624
|
+
branch = _current_branch()
|
|
625
|
+
if branch is None:
|
|
626
|
+
push_cmd = ["git", "push", "origin", "HEAD"]
|
|
627
|
+
elif _has_upstream(branch):
|
|
628
|
+
push_cmd = ["git", "push"]
|
|
629
|
+
else:
|
|
630
|
+
push_cmd = ["git", "push", "--set-upstream", "origin", branch]
|
|
631
|
+
subprocess.run(push_cmd, check=True, capture_output=True, text=True)
|
|
632
|
+
except subprocess.CalledProcessError as exc:
|
|
633
|
+
details = _format_subprocess_error(exc)
|
|
634
|
+
if _git_authentication_missing(exc):
|
|
635
|
+
_append_log(
|
|
636
|
+
log_path,
|
|
637
|
+
"Authentication is required to push release changes to origin; skipping push",
|
|
638
|
+
)
|
|
639
|
+
if details:
|
|
640
|
+
_append_log(log_path, details)
|
|
641
|
+
return False
|
|
642
|
+
_append_log(
|
|
643
|
+
log_path, f"Failed to push release changes to origin: {details}"
|
|
644
|
+
)
|
|
645
|
+
raise Exception("Failed to push release changes") from exc
|
|
646
|
+
|
|
647
|
+
_append_log(log_path, "Pushed release changes to origin")
|
|
648
|
+
return True
|
|
649
|
+
|
|
650
|
+
|
|
611
651
|
def _ensure_origin_main_unchanged(log_path: Path) -> None:
|
|
612
652
|
"""Verify that ``origin/main`` has not advanced during the release."""
|
|
613
653
|
|
|
@@ -1340,37 +1380,7 @@ def _step_promote_build(release, ctx, log_path: Path) -> None:
|
|
|
1340
1380
|
log_path,
|
|
1341
1381
|
f"Committed release metadata for v{release.version}",
|
|
1342
1382
|
)
|
|
1343
|
-
|
|
1344
|
-
try:
|
|
1345
|
-
branch = _current_branch()
|
|
1346
|
-
if branch is None:
|
|
1347
|
-
push_cmd = ["git", "push", "origin", "HEAD"]
|
|
1348
|
-
elif _has_upstream(branch):
|
|
1349
|
-
push_cmd = ["git", "push"]
|
|
1350
|
-
else:
|
|
1351
|
-
push_cmd = ["git", "push", "--set-upstream", "origin", branch]
|
|
1352
|
-
subprocess.run(push_cmd, check=True, capture_output=True, text=True)
|
|
1353
|
-
except subprocess.CalledProcessError as exc:
|
|
1354
|
-
details = _format_subprocess_error(exc)
|
|
1355
|
-
if _git_authentication_missing(exc):
|
|
1356
|
-
_append_log(
|
|
1357
|
-
log_path,
|
|
1358
|
-
"Authentication is required to push release changes to origin; skipping push",
|
|
1359
|
-
)
|
|
1360
|
-
if details:
|
|
1361
|
-
_append_log(log_path, details)
|
|
1362
|
-
else:
|
|
1363
|
-
_append_log(
|
|
1364
|
-
log_path, f"Failed to push release changes to origin: {details}"
|
|
1365
|
-
)
|
|
1366
|
-
raise Exception("Failed to push release changes") from exc
|
|
1367
|
-
else:
|
|
1368
|
-
_append_log(log_path, "Pushed release changes to origin")
|
|
1369
|
-
else:
|
|
1370
|
-
_append_log(
|
|
1371
|
-
log_path,
|
|
1372
|
-
"No git remote configured; skipping push of release changes",
|
|
1373
|
-
)
|
|
1383
|
+
_push_release_changes(log_path)
|
|
1374
1384
|
PackageRelease.dump_fixture()
|
|
1375
1385
|
_append_log(log_path, "Updated release fixtures")
|
|
1376
1386
|
_record_release_todo(release, ctx, log_path)
|
|
@@ -1561,6 +1571,30 @@ def _step_publish(release, ctx, log_path: Path) -> None:
|
|
|
1561
1571
|
_append_log(log_path, f"Recorded PyPI URL: {release.pypi_url}")
|
|
1562
1572
|
if release.github_url:
|
|
1563
1573
|
_append_log(log_path, f"Recorded GitHub URL: {release.github_url}")
|
|
1574
|
+
fixture_paths = [
|
|
1575
|
+
str(path) for path in Path("core/fixtures").glob("releases__*.json")
|
|
1576
|
+
]
|
|
1577
|
+
if fixture_paths:
|
|
1578
|
+
status = subprocess.run(
|
|
1579
|
+
["git", "status", "--porcelain", "--", *fixture_paths],
|
|
1580
|
+
capture_output=True,
|
|
1581
|
+
text=True,
|
|
1582
|
+
check=True,
|
|
1583
|
+
)
|
|
1584
|
+
if status.stdout.strip():
|
|
1585
|
+
subprocess.run(["git", "add", *fixture_paths], check=True)
|
|
1586
|
+
_append_log(log_path, "Staged publish metadata updates")
|
|
1587
|
+
commit_message = f"chore: record publish metadata for v{release.version}"
|
|
1588
|
+
subprocess.run(["git", "commit", "-m", commit_message], check=True)
|
|
1589
|
+
_append_log(
|
|
1590
|
+
log_path, f"Committed publish metadata for v{release.version}"
|
|
1591
|
+
)
|
|
1592
|
+
_push_release_changes(log_path)
|
|
1593
|
+
else:
|
|
1594
|
+
_append_log(
|
|
1595
|
+
log_path,
|
|
1596
|
+
"No release metadata updates detected after publish; skipping commit",
|
|
1597
|
+
)
|
|
1564
1598
|
_append_log(log_path, "Upload complete")
|
|
1565
1599
|
|
|
1566
1600
|
|
|
@@ -1754,9 +1788,9 @@ def rfid_batch(request):
|
|
|
1754
1788
|
else:
|
|
1755
1789
|
post_auth_command = post_auth_command.strip()
|
|
1756
1790
|
|
|
1757
|
-
tag, _ = RFID.
|
|
1758
|
-
rfid
|
|
1759
|
-
|
|
1791
|
+
tag, _ = RFID.update_or_create_from_code(
|
|
1792
|
+
rfid,
|
|
1793
|
+
{
|
|
1760
1794
|
"allowed": allowed,
|
|
1761
1795
|
"color": color,
|
|
1762
1796
|
"released": released,
|
nodes/admin.py
CHANGED
|
@@ -8,7 +8,7 @@ from django.contrib.admin import helpers
|
|
|
8
8
|
from django.contrib.admin.widgets import FilteredSelectMultiple
|
|
9
9
|
from django.core.exceptions import PermissionDenied
|
|
10
10
|
from django.db.models import Count
|
|
11
|
-
from django.http import HttpResponse, JsonResponse
|
|
11
|
+
from django.http import Http404, HttpResponse, JsonResponse
|
|
12
12
|
from django.shortcuts import redirect, render
|
|
13
13
|
from django.template.response import TemplateResponse
|
|
14
14
|
from django.urls import NoReverseMatch, path, reverse
|
|
@@ -233,6 +233,7 @@ class NodeAdmin(EntityModelAdmin):
|
|
|
233
233
|
"role",
|
|
234
234
|
"relation",
|
|
235
235
|
"last_seen",
|
|
236
|
+
"proxy_link",
|
|
236
237
|
)
|
|
237
238
|
search_fields = ("hostname", "address", "mac_address")
|
|
238
239
|
change_list_template = "admin/nodes/node/change_list.html"
|
|
@@ -247,6 +248,7 @@ class NodeAdmin(EntityModelAdmin):
|
|
|
247
248
|
"address",
|
|
248
249
|
"mac_address",
|
|
249
250
|
"port",
|
|
251
|
+
"message_queue_length",
|
|
250
252
|
"role",
|
|
251
253
|
"current_relation",
|
|
252
254
|
)
|
|
@@ -290,6 +292,16 @@ class NodeAdmin(EntityModelAdmin):
|
|
|
290
292
|
def relation(self, obj):
|
|
291
293
|
return obj.get_current_relation_display()
|
|
292
294
|
|
|
295
|
+
@admin.display(description=_("Proxy"))
|
|
296
|
+
def proxy_link(self, obj):
|
|
297
|
+
if not obj or obj.is_local:
|
|
298
|
+
return ""
|
|
299
|
+
try:
|
|
300
|
+
url = reverse("admin:nodes_node_proxy", args=[obj.pk])
|
|
301
|
+
except NoReverseMatch:
|
|
302
|
+
return ""
|
|
303
|
+
return format_html('<a class="button" href="{}">{}</a>', url, _("Proxy"))
|
|
304
|
+
|
|
293
305
|
def get_urls(self):
|
|
294
306
|
urls = super().get_urls()
|
|
295
307
|
custom = [
|
|
@@ -313,6 +325,11 @@ class NodeAdmin(EntityModelAdmin):
|
|
|
313
325
|
self.admin_site.admin_view(self.update_selected_progress),
|
|
314
326
|
name="nodes_node_update_selected_progress",
|
|
315
327
|
),
|
|
328
|
+
path(
|
|
329
|
+
"<int:node_id>/proxy/",
|
|
330
|
+
self.admin_site.admin_view(self.proxy_node),
|
|
331
|
+
name="nodes_node_proxy",
|
|
332
|
+
),
|
|
316
333
|
]
|
|
317
334
|
return custom + urls
|
|
318
335
|
|
|
@@ -332,6 +349,121 @@ class NodeAdmin(EntityModelAdmin):
|
|
|
332
349
|
}
|
|
333
350
|
return render(request, "admin/nodes/node/register_remote.html", context)
|
|
334
351
|
|
|
352
|
+
def _load_local_private_key(self, node):
|
|
353
|
+
security_dir = Path(node.base_path or settings.BASE_DIR) / "security"
|
|
354
|
+
priv_path = security_dir / f"{node.public_endpoint}"
|
|
355
|
+
if not priv_path.exists():
|
|
356
|
+
return None, _("Local node private key not found.")
|
|
357
|
+
try:
|
|
358
|
+
return (
|
|
359
|
+
serialization.load_pem_private_key(
|
|
360
|
+
priv_path.read_bytes(), password=None
|
|
361
|
+
),
|
|
362
|
+
"",
|
|
363
|
+
)
|
|
364
|
+
except Exception as exc: # pragma: no cover - unexpected errors
|
|
365
|
+
return None, str(exc)
|
|
366
|
+
|
|
367
|
+
def _build_proxy_payload(self, request, local_node):
|
|
368
|
+
user = request.user
|
|
369
|
+
payload = {
|
|
370
|
+
"requester": str(local_node.uuid),
|
|
371
|
+
"user": {
|
|
372
|
+
"username": user.get_username(),
|
|
373
|
+
"email": user.email or "",
|
|
374
|
+
"first_name": user.first_name or "",
|
|
375
|
+
"last_name": user.last_name or "",
|
|
376
|
+
"is_staff": user.is_staff,
|
|
377
|
+
"is_superuser": user.is_superuser,
|
|
378
|
+
"groups": list(user.groups.values_list("name", flat=True)),
|
|
379
|
+
"permissions": sorted(user.get_all_permissions()),
|
|
380
|
+
},
|
|
381
|
+
"target": reverse("admin:index"),
|
|
382
|
+
}
|
|
383
|
+
return payload
|
|
384
|
+
|
|
385
|
+
def _start_proxy_session(self, request, node):
|
|
386
|
+
if node.is_local:
|
|
387
|
+
return {"ok": False, "message": _("Local node cannot be proxied.")}
|
|
388
|
+
|
|
389
|
+
local_node = Node.get_local()
|
|
390
|
+
if local_node is None:
|
|
391
|
+
try:
|
|
392
|
+
local_node, _ = Node.register_current()
|
|
393
|
+
except Exception as exc: # pragma: no cover - unexpected errors
|
|
394
|
+
return {"ok": False, "message": str(exc)}
|
|
395
|
+
|
|
396
|
+
private_key, error = self._load_local_private_key(local_node)
|
|
397
|
+
if private_key is None:
|
|
398
|
+
return {"ok": False, "message": error}
|
|
399
|
+
|
|
400
|
+
payload = self._build_proxy_payload(request, local_node)
|
|
401
|
+
body = json.dumps(payload, separators=(",", ":"), sort_keys=True)
|
|
402
|
+
try:
|
|
403
|
+
signature = private_key.sign(
|
|
404
|
+
body.encode(),
|
|
405
|
+
padding.PKCS1v15(),
|
|
406
|
+
hashes.SHA256(),
|
|
407
|
+
)
|
|
408
|
+
except Exception as exc: # pragma: no cover - unexpected errors
|
|
409
|
+
return {"ok": False, "message": str(exc)}
|
|
410
|
+
|
|
411
|
+
headers = {
|
|
412
|
+
"Content-Type": "application/json",
|
|
413
|
+
"X-Signature": base64.b64encode(signature).decode(),
|
|
414
|
+
}
|
|
415
|
+
|
|
416
|
+
last_error = ""
|
|
417
|
+
for url in self._iter_remote_urls(node, "/nodes/proxy/session/"):
|
|
418
|
+
try:
|
|
419
|
+
response = requests.post(url, data=body, headers=headers, timeout=5)
|
|
420
|
+
except RequestException as exc:
|
|
421
|
+
last_error = str(exc)
|
|
422
|
+
continue
|
|
423
|
+
if not response.ok:
|
|
424
|
+
last_error = f"{response.status_code} {response.text}"
|
|
425
|
+
continue
|
|
426
|
+
try:
|
|
427
|
+
data = response.json()
|
|
428
|
+
except ValueError:
|
|
429
|
+
last_error = "Invalid JSON response"
|
|
430
|
+
continue
|
|
431
|
+
login_url = data.get("login_url")
|
|
432
|
+
if not login_url:
|
|
433
|
+
last_error = "login_url missing"
|
|
434
|
+
continue
|
|
435
|
+
return {
|
|
436
|
+
"ok": True,
|
|
437
|
+
"login_url": login_url,
|
|
438
|
+
"expires": data.get("expires"),
|
|
439
|
+
}
|
|
440
|
+
|
|
441
|
+
return {
|
|
442
|
+
"ok": False,
|
|
443
|
+
"message": last_error or "Unable to initiate proxy.",
|
|
444
|
+
}
|
|
445
|
+
|
|
446
|
+
def proxy_node(self, request, node_id):
|
|
447
|
+
node = self.get_queryset(request).filter(pk=node_id).first()
|
|
448
|
+
if not node:
|
|
449
|
+
raise Http404
|
|
450
|
+
if not self.has_view_permission(request):
|
|
451
|
+
raise PermissionDenied
|
|
452
|
+
result = self._start_proxy_session(request, node)
|
|
453
|
+
if not result.get("ok"):
|
|
454
|
+
message = result.get("message") or _("Unable to proxy node.")
|
|
455
|
+
self.message_user(request, message, messages.ERROR)
|
|
456
|
+
return redirect("admin:nodes_node_changelist")
|
|
457
|
+
|
|
458
|
+
context = {
|
|
459
|
+
**self.admin_site.each_context(request),
|
|
460
|
+
"opts": self.model._meta,
|
|
461
|
+
"node": node,
|
|
462
|
+
"frame_url": result.get("login_url"),
|
|
463
|
+
"expires": result.get("expires"),
|
|
464
|
+
}
|
|
465
|
+
return TemplateResponse(request, "admin/nodes/node/proxy.html", context)
|
|
466
|
+
|
|
335
467
|
@admin.action(description="Register Visitor")
|
|
336
468
|
def register_visitor(self, request, queryset=None):
|
|
337
469
|
return self.register_visitor_view(request)
|