arthexis 0.1.20__py3-none-any.whl → 0.1.22__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arthexis might be problematic. Click here for more details.
- {arthexis-0.1.20.dist-info → arthexis-0.1.22.dist-info}/METADATA +10 -11
- {arthexis-0.1.20.dist-info → arthexis-0.1.22.dist-info}/RECORD +34 -36
- config/asgi.py +1 -15
- config/settings.py +4 -26
- config/urls.py +5 -1
- core/admin.py +140 -252
- core/apps.py +0 -6
- core/environment.py +2 -220
- core/models.py +425 -77
- core/system.py +76 -0
- core/tests.py +153 -15
- core/views.py +35 -97
- nodes/admin.py +165 -32
- nodes/apps.py +11 -0
- nodes/models.py +26 -6
- nodes/tests.py +263 -1
- nodes/views.py +61 -1
- ocpp/admin.py +68 -7
- ocpp/consumers.py +1 -0
- ocpp/models.py +71 -1
- ocpp/tasks.py +99 -1
- ocpp/tests.py +310 -2
- ocpp/views.py +365 -5
- pages/admin.py +112 -15
- pages/apps.py +32 -0
- pages/context_processors.py +0 -12
- pages/forms.py +31 -8
- pages/models.py +42 -2
- pages/tests.py +361 -63
- pages/urls.py +5 -1
- pages/views.py +264 -16
- core/workgroup_urls.py +0 -17
- core/workgroup_views.py +0 -94
- {arthexis-0.1.20.dist-info → arthexis-0.1.22.dist-info}/WHEEL +0 -0
- {arthexis-0.1.20.dist-info → arthexis-0.1.22.dist-info}/licenses/LICENSE +0 -0
- {arthexis-0.1.20.dist-info → arthexis-0.1.22.dist-info}/top_level.txt +0 -0
core/models.py
CHANGED
|
@@ -15,14 +15,20 @@ from django.apps import apps
|
|
|
15
15
|
from django.db.models.signals import m2m_changed, post_delete, post_save
|
|
16
16
|
from django.dispatch import receiver
|
|
17
17
|
from django.views.decorators.debug import sensitive_variables
|
|
18
|
-
from datetime import
|
|
18
|
+
from datetime import (
|
|
19
|
+
time as datetime_time,
|
|
20
|
+
timedelta,
|
|
21
|
+
datetime as datetime_datetime,
|
|
22
|
+
date as datetime_date,
|
|
23
|
+
timezone as datetime_timezone,
|
|
24
|
+
)
|
|
19
25
|
import logging
|
|
26
|
+
import json
|
|
20
27
|
from django.contrib.contenttypes.models import ContentType
|
|
21
28
|
import hashlib
|
|
22
29
|
import hmac
|
|
23
30
|
import os
|
|
24
31
|
import subprocess
|
|
25
|
-
import secrets
|
|
26
32
|
import re
|
|
27
33
|
from io import BytesIO
|
|
28
34
|
from django.core.files.base import ContentFile
|
|
@@ -33,7 +39,8 @@ import uuid
|
|
|
33
39
|
from pathlib import Path
|
|
34
40
|
from django.core import serializers
|
|
35
41
|
from django.core.management.color import no_style
|
|
36
|
-
from urllib.parse import quote_plus, urlparse
|
|
42
|
+
from urllib.parse import quote, quote_plus, urlparse
|
|
43
|
+
from zoneinfo import ZoneInfo
|
|
37
44
|
from utils import revision as revision_utils
|
|
38
45
|
from typing import Any, Type
|
|
39
46
|
from defusedxml import xmlrpc as defused_xmlrpc
|
|
@@ -518,17 +525,13 @@ class User(Entity, AbstractUser):
|
|
|
518
525
|
def odoo_profile(self):
|
|
519
526
|
return self._direct_profile("OdooProfile")
|
|
520
527
|
|
|
521
|
-
@property
|
|
522
|
-
def assistant_profile(self):
|
|
523
|
-
return self._direct_profile("AssistantProfile")
|
|
524
|
-
|
|
525
528
|
@property
|
|
526
529
|
def social_profile(self):
|
|
527
530
|
return self._direct_profile("SocialProfile")
|
|
528
531
|
|
|
529
532
|
@property
|
|
530
|
-
def
|
|
531
|
-
return self.
|
|
533
|
+
def google_calendar_profile(self):
|
|
534
|
+
return self._direct_profile("GoogleCalendarProfile")
|
|
532
535
|
|
|
533
536
|
|
|
534
537
|
class UserPhoneNumber(Entity):
|
|
@@ -847,6 +850,184 @@ class OpenPayProfile(Profile):
|
|
|
847
850
|
]
|
|
848
851
|
|
|
849
852
|
|
|
853
|
+
class GoogleCalendarProfile(Profile):
|
|
854
|
+
"""Store Google Calendar configuration for a user or security group."""
|
|
855
|
+
|
|
856
|
+
profile_fields = ("calendar_id", "api_key", "display_name", "timezone")
|
|
857
|
+
|
|
858
|
+
calendar_id = SigilShortAutoField(max_length=255)
|
|
859
|
+
api_key = SigilShortAutoField(max_length=255)
|
|
860
|
+
display_name = models.CharField(max_length=255, blank=True)
|
|
861
|
+
max_events = models.PositiveIntegerField(
|
|
862
|
+
default=5,
|
|
863
|
+
validators=[MinValueValidator(1), MaxValueValidator(20)],
|
|
864
|
+
help_text=_("Number of upcoming events to display (1-20)."),
|
|
865
|
+
)
|
|
866
|
+
timezone = SigilShortAutoField(max_length=100, blank=True)
|
|
867
|
+
|
|
868
|
+
GOOGLE_EVENTS_URL = (
|
|
869
|
+
"https://www.googleapis.com/calendar/v3/calendars/{calendar}/events"
|
|
870
|
+
)
|
|
871
|
+
GOOGLE_EMBED_URL = "https://calendar.google.com/calendar/embed?src={calendar}&ctz={tz}"
|
|
872
|
+
|
|
873
|
+
class Meta:
|
|
874
|
+
verbose_name = _("Google Calendar")
|
|
875
|
+
verbose_name_plural = _("Google Calendars")
|
|
876
|
+
constraints = [
|
|
877
|
+
models.CheckConstraint(
|
|
878
|
+
check=(
|
|
879
|
+
(Q(user__isnull=False) & Q(group__isnull=True))
|
|
880
|
+
| (Q(user__isnull=True) & Q(group__isnull=False))
|
|
881
|
+
),
|
|
882
|
+
name="googlecalendarprofile_requires_owner",
|
|
883
|
+
)
|
|
884
|
+
]
|
|
885
|
+
|
|
886
|
+
def __str__(self): # pragma: no cover - simple representation
|
|
887
|
+
label = self.get_display_name()
|
|
888
|
+
return label or self.resolved_calendar_id()
|
|
889
|
+
|
|
890
|
+
def resolved_calendar_id(self) -> str:
|
|
891
|
+
value = self.resolve_sigils("calendar_id")
|
|
892
|
+
return value or self.calendar_id or ""
|
|
893
|
+
|
|
894
|
+
def resolved_api_key(self) -> str:
|
|
895
|
+
value = self.resolve_sigils("api_key")
|
|
896
|
+
return value or self.api_key or ""
|
|
897
|
+
|
|
898
|
+
def resolved_timezone(self) -> str:
|
|
899
|
+
value = self.resolve_sigils("timezone")
|
|
900
|
+
return value or self.timezone or ""
|
|
901
|
+
|
|
902
|
+
def get_timezone(self) -> ZoneInfo:
|
|
903
|
+
tz_name = self.resolved_timezone() or settings.TIME_ZONE
|
|
904
|
+
try:
|
|
905
|
+
return ZoneInfo(tz_name)
|
|
906
|
+
except Exception:
|
|
907
|
+
return ZoneInfo("UTC")
|
|
908
|
+
|
|
909
|
+
def get_display_name(self) -> str:
|
|
910
|
+
value = self.resolve_sigils("display_name")
|
|
911
|
+
if value:
|
|
912
|
+
return value
|
|
913
|
+
if self.display_name:
|
|
914
|
+
return self.display_name
|
|
915
|
+
return ""
|
|
916
|
+
|
|
917
|
+
def build_events_url(self) -> str:
|
|
918
|
+
calendar = self.resolved_calendar_id().strip()
|
|
919
|
+
if not calendar:
|
|
920
|
+
return ""
|
|
921
|
+
encoded = quote(calendar, safe="@")
|
|
922
|
+
return self.GOOGLE_EVENTS_URL.format(calendar=encoded)
|
|
923
|
+
|
|
924
|
+
def build_calendar_url(self) -> str:
|
|
925
|
+
calendar = self.resolved_calendar_id().strip()
|
|
926
|
+
if not calendar:
|
|
927
|
+
return ""
|
|
928
|
+
tz = self.get_timezone().key
|
|
929
|
+
encoded_calendar = quote_plus(calendar)
|
|
930
|
+
encoded_tz = quote_plus(tz)
|
|
931
|
+
return self.GOOGLE_EMBED_URL.format(calendar=encoded_calendar, tz=encoded_tz)
|
|
932
|
+
|
|
933
|
+
def _parse_event_point(self, data: dict) -> tuple[datetime_datetime | None, bool]:
|
|
934
|
+
if not isinstance(data, dict):
|
|
935
|
+
return None, False
|
|
936
|
+
|
|
937
|
+
tz_name = data.get("timeZone")
|
|
938
|
+
default_tz = self.get_timezone()
|
|
939
|
+
tzinfo = default_tz
|
|
940
|
+
if tz_name:
|
|
941
|
+
try:
|
|
942
|
+
tzinfo = ZoneInfo(tz_name)
|
|
943
|
+
except Exception:
|
|
944
|
+
tzinfo = default_tz
|
|
945
|
+
|
|
946
|
+
timestamp = data.get("dateTime")
|
|
947
|
+
if timestamp:
|
|
948
|
+
dt = parse_datetime(timestamp)
|
|
949
|
+
if dt is None:
|
|
950
|
+
try:
|
|
951
|
+
dt = datetime_datetime.fromisoformat(
|
|
952
|
+
timestamp.replace("Z", "+00:00")
|
|
953
|
+
)
|
|
954
|
+
except ValueError:
|
|
955
|
+
dt = None
|
|
956
|
+
if dt is not None and dt.tzinfo is None:
|
|
957
|
+
dt = dt.replace(tzinfo=tzinfo)
|
|
958
|
+
return dt, False
|
|
959
|
+
|
|
960
|
+
date_value = data.get("date")
|
|
961
|
+
if date_value:
|
|
962
|
+
try:
|
|
963
|
+
day = datetime_date.fromisoformat(date_value)
|
|
964
|
+
except ValueError:
|
|
965
|
+
return None, True
|
|
966
|
+
dt = datetime_datetime.combine(day, datetime_time.min, tzinfo=tzinfo)
|
|
967
|
+
return dt, True
|
|
968
|
+
|
|
969
|
+
return None, False
|
|
970
|
+
|
|
971
|
+
def fetch_events(self, *, max_results: int | None = None) -> list[dict[str, object]]:
|
|
972
|
+
calendar_id = self.resolved_calendar_id().strip()
|
|
973
|
+
api_key = self.resolved_api_key().strip()
|
|
974
|
+
if not calendar_id or not api_key:
|
|
975
|
+
return []
|
|
976
|
+
|
|
977
|
+
url = self.build_events_url()
|
|
978
|
+
if not url:
|
|
979
|
+
return []
|
|
980
|
+
|
|
981
|
+
now = timezone.now().astimezone(datetime_timezone.utc).replace(microsecond=0)
|
|
982
|
+
params = {
|
|
983
|
+
"key": api_key,
|
|
984
|
+
"singleEvents": "true",
|
|
985
|
+
"orderBy": "startTime",
|
|
986
|
+
"timeMin": now.isoformat().replace("+00:00", "Z"),
|
|
987
|
+
"maxResults": max_results or self.max_events or 5,
|
|
988
|
+
}
|
|
989
|
+
|
|
990
|
+
try:
|
|
991
|
+
response = requests.get(url, params=params, timeout=10)
|
|
992
|
+
response.raise_for_status()
|
|
993
|
+
payload = response.json()
|
|
994
|
+
except (requests.RequestException, ValueError):
|
|
995
|
+
logger.warning(
|
|
996
|
+
"Failed to fetch Google Calendar events for profile %s", self.pk,
|
|
997
|
+
exc_info=True,
|
|
998
|
+
)
|
|
999
|
+
return []
|
|
1000
|
+
|
|
1001
|
+
items = payload.get("items")
|
|
1002
|
+
if not isinstance(items, list):
|
|
1003
|
+
return []
|
|
1004
|
+
|
|
1005
|
+
events: list[dict[str, object]] = []
|
|
1006
|
+
for item in items:
|
|
1007
|
+
if not isinstance(item, dict):
|
|
1008
|
+
continue
|
|
1009
|
+
start, all_day = self._parse_event_point(item.get("start") or {})
|
|
1010
|
+
end, _ = self._parse_event_point(item.get("end") or {})
|
|
1011
|
+
summary = item.get("summary") or ""
|
|
1012
|
+
link = item.get("htmlLink") or ""
|
|
1013
|
+
location = item.get("location") or ""
|
|
1014
|
+
if start is None:
|
|
1015
|
+
continue
|
|
1016
|
+
events.append(
|
|
1017
|
+
{
|
|
1018
|
+
"summary": summary,
|
|
1019
|
+
"start": start,
|
|
1020
|
+
"end": end,
|
|
1021
|
+
"all_day": all_day,
|
|
1022
|
+
"html_link": link,
|
|
1023
|
+
"location": location,
|
|
1024
|
+
}
|
|
1025
|
+
)
|
|
1026
|
+
|
|
1027
|
+
events.sort(key=lambda event: event.get("start") or timezone.now())
|
|
1028
|
+
return events
|
|
1029
|
+
|
|
1030
|
+
|
|
850
1031
|
class EmailInbox(Profile):
|
|
851
1032
|
"""Credentials and configuration for connecting to an email mailbox."""
|
|
852
1033
|
|
|
@@ -3423,7 +3604,13 @@ class PackageRelease(Entity):
|
|
|
3423
3604
|
for release in cls.objects.all():
|
|
3424
3605
|
name = f"releases__packagerelease_{release.version.replace('.', '_')}.json"
|
|
3425
3606
|
path = base / name
|
|
3426
|
-
data = serializers.serialize(
|
|
3607
|
+
data = serializers.serialize(
|
|
3608
|
+
"json",
|
|
3609
|
+
[release],
|
|
3610
|
+
use_natural_foreign_keys=True,
|
|
3611
|
+
use_natural_primary_keys=True,
|
|
3612
|
+
)
|
|
3613
|
+
data = json.dumps(json.loads(data), indent=2) + "\n"
|
|
3427
3614
|
expected.add(name)
|
|
3428
3615
|
try:
|
|
3429
3616
|
current = path.read_text(encoding="utf-8")
|
|
@@ -3703,73 +3890,6 @@ def _rfid_unique_energy_account(
|
|
|
3703
3890
|
"RFID tags may only be assigned to one energy account."
|
|
3704
3891
|
)
|
|
3705
3892
|
|
|
3706
|
-
|
|
3707
|
-
def hash_key(key: str) -> str:
|
|
3708
|
-
"""Return a SHA-256 hash for ``key``."""
|
|
3709
|
-
|
|
3710
|
-
return hashlib.sha256(key.encode()).hexdigest()
|
|
3711
|
-
|
|
3712
|
-
|
|
3713
|
-
class AssistantProfile(Profile):
|
|
3714
|
-
"""Stores a hashed user key used by the assistant for authentication.
|
|
3715
|
-
|
|
3716
|
-
The plain-text ``user_key`` is generated server-side and shown only once.
|
|
3717
|
-
Users must supply this key in the ``Authorization: Bearer <user_key>``
|
|
3718
|
-
header when requesting protected endpoints. Only the hash is stored.
|
|
3719
|
-
"""
|
|
3720
|
-
|
|
3721
|
-
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
|
3722
|
-
profile_fields = ("assistant_name", "user_key_hash", "scopes", "is_active")
|
|
3723
|
-
assistant_name = models.CharField(max_length=100, default="Assistant")
|
|
3724
|
-
user_key_hash = models.CharField(max_length=64, unique=True)
|
|
3725
|
-
scopes = models.JSONField(default=list, blank=True)
|
|
3726
|
-
created_at = models.DateTimeField(auto_now_add=True)
|
|
3727
|
-
last_used_at = models.DateTimeField(null=True, blank=True)
|
|
3728
|
-
is_active = models.BooleanField(default=True)
|
|
3729
|
-
|
|
3730
|
-
class Meta:
|
|
3731
|
-
db_table = "workgroup_assistantprofile"
|
|
3732
|
-
verbose_name = "Assistant Profile"
|
|
3733
|
-
verbose_name_plural = "Assistant Profiles"
|
|
3734
|
-
constraints = [
|
|
3735
|
-
models.CheckConstraint(
|
|
3736
|
-
check=(
|
|
3737
|
-
(Q(user__isnull=False) & Q(group__isnull=True))
|
|
3738
|
-
| (Q(user__isnull=True) & Q(group__isnull=False))
|
|
3739
|
-
),
|
|
3740
|
-
name="assistantprofile_requires_owner",
|
|
3741
|
-
)
|
|
3742
|
-
]
|
|
3743
|
-
|
|
3744
|
-
@classmethod
|
|
3745
|
-
def issue_key(cls, user) -> tuple["AssistantProfile", str]:
|
|
3746
|
-
"""Create or update a profile and return it with a new plain key."""
|
|
3747
|
-
|
|
3748
|
-
key = secrets.token_hex(32)
|
|
3749
|
-
key_hash = hash_key(key)
|
|
3750
|
-
if user is None:
|
|
3751
|
-
raise ValueError("Assistant profiles require a user instance")
|
|
3752
|
-
|
|
3753
|
-
profile, _ = cls.objects.update_or_create(
|
|
3754
|
-
user=user,
|
|
3755
|
-
defaults={
|
|
3756
|
-
"user_key_hash": key_hash,
|
|
3757
|
-
"last_used_at": None,
|
|
3758
|
-
"is_active": True,
|
|
3759
|
-
},
|
|
3760
|
-
)
|
|
3761
|
-
return profile, key
|
|
3762
|
-
|
|
3763
|
-
def touch(self) -> None:
|
|
3764
|
-
"""Record that the key was used."""
|
|
3765
|
-
|
|
3766
|
-
self.last_used_at = timezone.now()
|
|
3767
|
-
self.save(update_fields=["last_used_at"])
|
|
3768
|
-
|
|
3769
|
-
def __str__(self) -> str: # pragma: no cover - simple representation
|
|
3770
|
-
return self.assistant_name or "AssistantProfile"
|
|
3771
|
-
|
|
3772
|
-
|
|
3773
3893
|
def validate_relative_url(value: str) -> None:
|
|
3774
3894
|
if not value:
|
|
3775
3895
|
return
|
|
@@ -3794,7 +3914,38 @@ class Todo(Entity):
|
|
|
3794
3914
|
generated_for_version = models.CharField(max_length=20, blank=True, default="")
|
|
3795
3915
|
generated_for_revision = models.CharField(max_length=40, blank=True, default="")
|
|
3796
3916
|
done_on = models.DateTimeField(null=True, blank=True)
|
|
3917
|
+
done_node = models.ForeignKey(
|
|
3918
|
+
"nodes.Node",
|
|
3919
|
+
null=True,
|
|
3920
|
+
blank=True,
|
|
3921
|
+
on_delete=models.SET_NULL,
|
|
3922
|
+
related_name="completed_todos",
|
|
3923
|
+
help_text="Node where this TODO was completed.",
|
|
3924
|
+
)
|
|
3925
|
+
done_version = models.CharField(max_length=20, blank=True, default="")
|
|
3926
|
+
done_revision = models.CharField(max_length=40, blank=True, default="")
|
|
3927
|
+
done_username = models.CharField(max_length=150, blank=True, default="")
|
|
3797
3928
|
on_done_condition = ConditionTextField(blank=True, default="")
|
|
3929
|
+
origin_node = models.ForeignKey(
|
|
3930
|
+
"nodes.Node",
|
|
3931
|
+
null=True,
|
|
3932
|
+
blank=True,
|
|
3933
|
+
on_delete=models.SET_NULL,
|
|
3934
|
+
related_name="originated_todos",
|
|
3935
|
+
help_text="Node where this TODO was generated.",
|
|
3936
|
+
)
|
|
3937
|
+
original_user = models.ForeignKey(
|
|
3938
|
+
settings.AUTH_USER_MODEL,
|
|
3939
|
+
null=True,
|
|
3940
|
+
blank=True,
|
|
3941
|
+
on_delete=models.SET_NULL,
|
|
3942
|
+
related_name="originated_todos",
|
|
3943
|
+
help_text="User responsible for creating this TODO.",
|
|
3944
|
+
)
|
|
3945
|
+
original_user_is_authenticated = models.BooleanField(
|
|
3946
|
+
default=False,
|
|
3947
|
+
help_text="Whether the originating user was authenticated during creation.",
|
|
3948
|
+
)
|
|
3798
3949
|
|
|
3799
3950
|
objects = TodoManager()
|
|
3800
3951
|
|
|
@@ -3835,6 +3986,203 @@ class Todo(Entity):
|
|
|
3835
3986
|
return field.evaluate(self)
|
|
3836
3987
|
return ConditionCheckResult(True, "")
|
|
3837
3988
|
|
|
3989
|
+
def save(self, *args, **kwargs):
|
|
3990
|
+
created = self.pk is None
|
|
3991
|
+
tracked_fields = {
|
|
3992
|
+
"done_on",
|
|
3993
|
+
"done_node",
|
|
3994
|
+
"done_node_id",
|
|
3995
|
+
"done_revision",
|
|
3996
|
+
"done_username",
|
|
3997
|
+
"done_version",
|
|
3998
|
+
"is_deleted",
|
|
3999
|
+
}
|
|
4000
|
+
update_fields = kwargs.get("update_fields")
|
|
4001
|
+
monitor_changes = not created and (
|
|
4002
|
+
update_fields is None or tracked_fields.intersection(update_fields)
|
|
4003
|
+
)
|
|
4004
|
+
previous_state = None
|
|
4005
|
+
if monitor_changes:
|
|
4006
|
+
previous_state = (
|
|
4007
|
+
type(self)
|
|
4008
|
+
.all_objects.filter(pk=self.pk)
|
|
4009
|
+
.values(
|
|
4010
|
+
"done_on",
|
|
4011
|
+
"done_node_id",
|
|
4012
|
+
"done_revision",
|
|
4013
|
+
"done_username",
|
|
4014
|
+
"done_version",
|
|
4015
|
+
"is_deleted",
|
|
4016
|
+
)
|
|
4017
|
+
.first()
|
|
4018
|
+
)
|
|
4019
|
+
super().save(*args, **kwargs)
|
|
4020
|
+
|
|
4021
|
+
if created:
|
|
4022
|
+
return
|
|
4023
|
+
|
|
4024
|
+
previous_done_on = previous_state["done_on"] if previous_state else None
|
|
4025
|
+
previous_is_deleted = previous_state["is_deleted"] if previous_state else False
|
|
4026
|
+
previous_done_node = (
|
|
4027
|
+
previous_state["done_node_id"] if previous_state else None
|
|
4028
|
+
)
|
|
4029
|
+
previous_done_revision = (
|
|
4030
|
+
previous_state["done_revision"] if previous_state else ""
|
|
4031
|
+
)
|
|
4032
|
+
previous_done_username = (
|
|
4033
|
+
previous_state["done_username"] if previous_state else ""
|
|
4034
|
+
)
|
|
4035
|
+
previous_done_version = (
|
|
4036
|
+
previous_state["done_version"] if previous_state else ""
|
|
4037
|
+
)
|
|
4038
|
+
if (
|
|
4039
|
+
previous_done_on == self.done_on
|
|
4040
|
+
and previous_is_deleted == self.is_deleted
|
|
4041
|
+
and previous_done_node == getattr(self, "done_node_id", None)
|
|
4042
|
+
and previous_done_revision == self.done_revision
|
|
4043
|
+
and previous_done_username == self.done_username
|
|
4044
|
+
and previous_done_version == self.done_version
|
|
4045
|
+
):
|
|
4046
|
+
return
|
|
4047
|
+
|
|
4048
|
+
self._update_fixture_state()
|
|
4049
|
+
|
|
4050
|
+
def populate_done_metadata(self, user=None) -> None:
|
|
4051
|
+
"""Populate metadata fields for a completed TODO."""
|
|
4052
|
+
|
|
4053
|
+
node = None
|
|
4054
|
+
try: # pragma: no cover - defensive import guard
|
|
4055
|
+
from nodes.models import Node # type: ignore
|
|
4056
|
+
except Exception: # pragma: no cover - when app not ready
|
|
4057
|
+
Node = None
|
|
4058
|
+
|
|
4059
|
+
if Node is not None:
|
|
4060
|
+
try:
|
|
4061
|
+
node = Node.get_local()
|
|
4062
|
+
except Exception: # pragma: no cover - fallback on errors
|
|
4063
|
+
node = None
|
|
4064
|
+
self.done_node = node if node else None
|
|
4065
|
+
|
|
4066
|
+
version_value = ""
|
|
4067
|
+
revision_value = ""
|
|
4068
|
+
if node is not None:
|
|
4069
|
+
version_value = (node.installed_version or "").strip()
|
|
4070
|
+
revision_value = (node.installed_revision or "").strip()
|
|
4071
|
+
|
|
4072
|
+
if not version_value:
|
|
4073
|
+
version_path = Path(settings.BASE_DIR) / "VERSION"
|
|
4074
|
+
try:
|
|
4075
|
+
version_value = version_path.read_text(encoding="utf-8").strip()
|
|
4076
|
+
except OSError:
|
|
4077
|
+
version_value = ""
|
|
4078
|
+
|
|
4079
|
+
if not revision_value:
|
|
4080
|
+
try:
|
|
4081
|
+
revision_value = revision_utils.get_revision() or ""
|
|
4082
|
+
except Exception: # pragma: no cover - defensive fallback
|
|
4083
|
+
revision_value = ""
|
|
4084
|
+
|
|
4085
|
+
username_value = ""
|
|
4086
|
+
if user is not None and getattr(user, "is_authenticated", False):
|
|
4087
|
+
try:
|
|
4088
|
+
username_value = user.get_username() or ""
|
|
4089
|
+
except Exception: # pragma: no cover - fallback to attribute
|
|
4090
|
+
username_value = getattr(user, "username", "") or ""
|
|
4091
|
+
|
|
4092
|
+
self.done_version = version_value
|
|
4093
|
+
self.done_revision = revision_value
|
|
4094
|
+
self.done_username = username_value
|
|
4095
|
+
|
|
4096
|
+
def _update_fixture_state(self) -> None:
|
|
4097
|
+
if not self.is_seed_data:
|
|
4098
|
+
return
|
|
4099
|
+
|
|
4100
|
+
request_text = (self.request or "").strip()
|
|
4101
|
+
if not request_text:
|
|
4102
|
+
return
|
|
4103
|
+
|
|
4104
|
+
slug = self._fixture_slug(request_text)
|
|
4105
|
+
if not slug:
|
|
4106
|
+
return
|
|
4107
|
+
|
|
4108
|
+
base_dir = Path(settings.BASE_DIR)
|
|
4109
|
+
fixture_path = base_dir / "core" / "fixtures" / f"todo__{slug}.json"
|
|
4110
|
+
if not fixture_path.exists():
|
|
4111
|
+
return
|
|
4112
|
+
|
|
4113
|
+
try:
|
|
4114
|
+
with fixture_path.open("r", encoding="utf-8") as handle:
|
|
4115
|
+
data = json.load(handle)
|
|
4116
|
+
except Exception:
|
|
4117
|
+
logger.exception("Failed to read TODO fixture %s", fixture_path)
|
|
4118
|
+
return
|
|
4119
|
+
|
|
4120
|
+
if not isinstance(data, list):
|
|
4121
|
+
return
|
|
4122
|
+
|
|
4123
|
+
updated = False
|
|
4124
|
+
normalized_request = request_text.lower()
|
|
4125
|
+
for item in data:
|
|
4126
|
+
if not isinstance(item, dict):
|
|
4127
|
+
continue
|
|
4128
|
+
fields = item.get("fields")
|
|
4129
|
+
if not isinstance(fields, dict):
|
|
4130
|
+
continue
|
|
4131
|
+
candidate = (fields.get("request") or "").strip().lower()
|
|
4132
|
+
if candidate != normalized_request:
|
|
4133
|
+
continue
|
|
4134
|
+
if self._apply_fixture_fields(fields):
|
|
4135
|
+
updated = True
|
|
4136
|
+
|
|
4137
|
+
if not updated:
|
|
4138
|
+
return
|
|
4139
|
+
|
|
4140
|
+
content = json.dumps(data, indent=2, ensure_ascii=False)
|
|
4141
|
+
if not content.endswith("\n"):
|
|
4142
|
+
content += "\n"
|
|
4143
|
+
|
|
4144
|
+
try:
|
|
4145
|
+
fixture_path.write_text(content, encoding="utf-8")
|
|
4146
|
+
except OSError:
|
|
4147
|
+
logger.exception("Failed to write TODO fixture %s", fixture_path)
|
|
4148
|
+
|
|
4149
|
+
def _apply_fixture_fields(self, fields: dict[str, object]) -> bool:
|
|
4150
|
+
changed = False
|
|
4151
|
+
|
|
4152
|
+
def _assign(key: str, value: object) -> None:
|
|
4153
|
+
nonlocal changed
|
|
4154
|
+
if fields.get(key) != value:
|
|
4155
|
+
fields[key] = value
|
|
4156
|
+
changed = True
|
|
4157
|
+
|
|
4158
|
+
_assign("request", self.request or "")
|
|
4159
|
+
_assign("url", self.url or "")
|
|
4160
|
+
_assign("request_details", self.request_details or "")
|
|
4161
|
+
_assign("done_version", self.done_version or "")
|
|
4162
|
+
_assign("done_revision", self.done_revision or "")
|
|
4163
|
+
_assign("done_username", self.done_username or "")
|
|
4164
|
+
|
|
4165
|
+
if self.done_on:
|
|
4166
|
+
done_value = timezone.localtime(self.done_on)
|
|
4167
|
+
_assign("done_on", done_value.isoformat())
|
|
4168
|
+
else:
|
|
4169
|
+
if fields.get("done_on") is not None:
|
|
4170
|
+
fields["done_on"] = None
|
|
4171
|
+
changed = True
|
|
4172
|
+
|
|
4173
|
+
if self.is_deleted:
|
|
4174
|
+
_assign("is_deleted", True)
|
|
4175
|
+
elif fields.get("is_deleted"):
|
|
4176
|
+
fields["is_deleted"] = False
|
|
4177
|
+
changed = True
|
|
4178
|
+
|
|
4179
|
+
return changed
|
|
4180
|
+
|
|
4181
|
+
@staticmethod
|
|
4182
|
+
def _fixture_slug(value: str) -> str:
|
|
4183
|
+
slug = re.sub(r"[^a-z0-9]+", "_", value.lower()).strip("_")
|
|
4184
|
+
return slug
|
|
4185
|
+
|
|
3838
4186
|
|
|
3839
4187
|
class TOTPDeviceSettings(models.Model):
|
|
3840
4188
|
"""Per-device configuration options for authenticator enrollments."""
|
core/system.py
CHANGED
|
@@ -101,6 +101,72 @@ def _open_changelog_entries() -> list[dict[str, str]]:
|
|
|
101
101
|
return entries
|
|
102
102
|
|
|
103
103
|
|
|
104
|
+
def _latest_release_changelog() -> dict[str, object]:
|
|
105
|
+
"""Return the most recent tagged release entries for display."""
|
|
106
|
+
|
|
107
|
+
changelog_path = Path("CHANGELOG.rst")
|
|
108
|
+
try:
|
|
109
|
+
text = changelog_path.read_text(encoding="utf-8")
|
|
110
|
+
except (FileNotFoundError, OSError):
|
|
111
|
+
return {"title": "", "entries": []}
|
|
112
|
+
|
|
113
|
+
lines = text.splitlines()
|
|
114
|
+
state = "before"
|
|
115
|
+
release_title = ""
|
|
116
|
+
entries: list[dict[str, str]] = []
|
|
117
|
+
|
|
118
|
+
for raw_line in lines:
|
|
119
|
+
stripped = raw_line.strip()
|
|
120
|
+
|
|
121
|
+
if state == "before":
|
|
122
|
+
if stripped == "Unreleased":
|
|
123
|
+
state = "unreleased-heading"
|
|
124
|
+
continue
|
|
125
|
+
|
|
126
|
+
if state == "unreleased-heading":
|
|
127
|
+
if set(stripped) == {"-"}:
|
|
128
|
+
state = "unreleased-body"
|
|
129
|
+
else:
|
|
130
|
+
state = "unreleased-body"
|
|
131
|
+
continue
|
|
132
|
+
|
|
133
|
+
if state == "unreleased-body":
|
|
134
|
+
if not stripped:
|
|
135
|
+
state = "after-unreleased"
|
|
136
|
+
continue
|
|
137
|
+
|
|
138
|
+
if state == "after-unreleased":
|
|
139
|
+
if not stripped:
|
|
140
|
+
continue
|
|
141
|
+
release_title = stripped
|
|
142
|
+
state = "release-heading"
|
|
143
|
+
continue
|
|
144
|
+
|
|
145
|
+
if state == "release-heading":
|
|
146
|
+
if set(stripped) == {"-"}:
|
|
147
|
+
state = "release-body"
|
|
148
|
+
else:
|
|
149
|
+
state = "release-body"
|
|
150
|
+
continue
|
|
151
|
+
|
|
152
|
+
if state == "release-body":
|
|
153
|
+
if not stripped:
|
|
154
|
+
if entries:
|
|
155
|
+
break
|
|
156
|
+
continue
|
|
157
|
+
if not stripped.startswith("- "):
|
|
158
|
+
break
|
|
159
|
+
trimmed = stripped[2:].strip()
|
|
160
|
+
if not trimmed:
|
|
161
|
+
continue
|
|
162
|
+
parts = trimmed.split(" ", 1)
|
|
163
|
+
sha = parts[0]
|
|
164
|
+
message = parts[1] if len(parts) > 1 else ""
|
|
165
|
+
entries.append({"sha": sha, "message": message})
|
|
166
|
+
|
|
167
|
+
return {"title": release_title, "entries": entries}
|
|
168
|
+
|
|
169
|
+
|
|
104
170
|
def _exclude_changelog_entries(shas: Iterable[str]) -> int:
|
|
105
171
|
"""Remove entries matching ``shas`` from the changelog.
|
|
106
172
|
|
|
@@ -1073,6 +1139,7 @@ def _system_changelog_report_view(request):
|
|
|
1073
1139
|
{
|
|
1074
1140
|
"title": _("Changelog Report"),
|
|
1075
1141
|
"open_changelog_entries": _open_changelog_entries(),
|
|
1142
|
+
"latest_release_changelog": _latest_release_changelog(),
|
|
1076
1143
|
}
|
|
1077
1144
|
)
|
|
1078
1145
|
return TemplateResponse(request, "admin/system_changelog_report.html", context)
|
|
@@ -1119,6 +1186,14 @@ class PendingTodoForm(forms.ModelForm):
|
|
|
1119
1186
|
for name in ["request_details", "on_done_condition"]:
|
|
1120
1187
|
self.fields[name].widget.attrs.setdefault("class", "vLargeTextField")
|
|
1121
1188
|
|
|
1189
|
+
mark_done_widget = self.fields["mark_done"].widget
|
|
1190
|
+
existing_classes = mark_done_widget.attrs.get("class", "").split()
|
|
1191
|
+
if "approve-checkbox" not in existing_classes:
|
|
1192
|
+
existing_classes.append("approve-checkbox")
|
|
1193
|
+
mark_done_widget.attrs["class"] = " ".join(
|
|
1194
|
+
class_name for class_name in existing_classes if class_name
|
|
1195
|
+
)
|
|
1196
|
+
|
|
1122
1197
|
|
|
1123
1198
|
PendingTodoFormSet = modelformset_factory(Todo, form=PendingTodoForm, extra=0)
|
|
1124
1199
|
|
|
@@ -1144,6 +1219,7 @@ def _system_pending_todos_report_view(request):
|
|
|
1144
1219
|
has_changes = form.has_changed()
|
|
1145
1220
|
if mark_done and todo.done_on is None:
|
|
1146
1221
|
todo.done_on = timezone.now()
|
|
1222
|
+
todo.populate_done_metadata(request.user)
|
|
1147
1223
|
approved_count += 1
|
|
1148
1224
|
has_changes = True
|
|
1149
1225
|
if has_changes:
|