arthexis 0.1.16__py3-none-any.whl → 0.1.26__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arthexis might be problematic. Click here for more details.
- {arthexis-0.1.16.dist-info → arthexis-0.1.26.dist-info}/METADATA +84 -35
- arthexis-0.1.26.dist-info/RECORD +111 -0
- config/asgi.py +1 -15
- config/middleware.py +47 -1
- config/settings.py +15 -30
- config/urls.py +53 -1
- core/admin.py +540 -450
- core/apps.py +0 -6
- core/auto_upgrade.py +19 -4
- core/backends.py +13 -3
- core/changelog.py +66 -5
- core/environment.py +4 -5
- core/models.py +1566 -203
- core/notifications.py +1 -1
- core/reference_utils.py +10 -11
- core/release.py +55 -7
- core/sigil_builder.py +2 -2
- core/sigil_resolver.py +1 -66
- core/system.py +268 -2
- core/tasks.py +174 -48
- core/tests.py +314 -16
- core/user_data.py +42 -2
- core/views.py +278 -183
- nodes/admin.py +557 -65
- nodes/apps.py +11 -0
- nodes/models.py +658 -113
- nodes/rfid_sync.py +1 -1
- nodes/tasks.py +97 -2
- nodes/tests.py +1212 -116
- nodes/urls.py +15 -1
- nodes/utils.py +51 -3
- nodes/views.py +1239 -154
- ocpp/admin.py +979 -152
- ocpp/consumers.py +268 -28
- ocpp/models.py +488 -3
- ocpp/network.py +398 -0
- ocpp/store.py +6 -4
- ocpp/tasks.py +296 -2
- ocpp/test_export_import.py +1 -0
- ocpp/test_rfid.py +121 -4
- ocpp/tests.py +950 -11
- ocpp/transactions_io.py +9 -1
- ocpp/urls.py +3 -3
- ocpp/views.py +596 -51
- pages/admin.py +262 -30
- pages/apps.py +35 -0
- pages/context_processors.py +26 -21
- pages/defaults.py +1 -1
- pages/forms.py +31 -8
- pages/middleware.py +6 -2
- pages/models.py +77 -2
- pages/module_defaults.py +5 -5
- pages/site_config.py +137 -0
- pages/tests.py +885 -109
- pages/urls.py +13 -2
- pages/utils.py +70 -0
- pages/views.py +558 -55
- arthexis-0.1.16.dist-info/RECORD +0 -111
- core/workgroup_urls.py +0 -17
- core/workgroup_views.py +0 -94
- {arthexis-0.1.16.dist-info → arthexis-0.1.26.dist-info}/WHEEL +0 -0
- {arthexis-0.1.16.dist-info → arthexis-0.1.26.dist-info}/licenses/LICENSE +0 -0
- {arthexis-0.1.16.dist-info → arthexis-0.1.26.dist-info}/top_level.txt +0 -0
core/models.py
CHANGED
|
@@ -5,35 +5,42 @@ from django.contrib.auth.models import (
|
|
|
5
5
|
)
|
|
6
6
|
from django.db import DatabaseError, IntegrityError, connections, models, transaction
|
|
7
7
|
from django.db.models import Q
|
|
8
|
-
from django.db.models.functions import Lower
|
|
8
|
+
from django.db.models.functions import Lower, Length
|
|
9
9
|
from django.conf import settings
|
|
10
10
|
from django.contrib.auth import get_user_model
|
|
11
|
-
from django.utils.translation import gettext_lazy as _
|
|
11
|
+
from django.utils.translation import gettext_lazy as _, gettext, override
|
|
12
12
|
from django.core.validators import MaxValueValidator, MinValueValidator, RegexValidator
|
|
13
13
|
from django.core.exceptions import ValidationError
|
|
14
14
|
from django.apps import apps
|
|
15
15
|
from django.db.models.signals import m2m_changed, post_delete, post_save
|
|
16
16
|
from django.dispatch import receiver
|
|
17
17
|
from django.views.decorators.debug import sensitive_variables
|
|
18
|
-
from datetime import
|
|
18
|
+
from datetime import (
|
|
19
|
+
time as datetime_time,
|
|
20
|
+
timedelta,
|
|
21
|
+
datetime as datetime_datetime,
|
|
22
|
+
date as datetime_date,
|
|
23
|
+
timezone as datetime_timezone,
|
|
24
|
+
)
|
|
19
25
|
import logging
|
|
26
|
+
import json
|
|
20
27
|
from django.contrib.contenttypes.models import ContentType
|
|
21
28
|
import hashlib
|
|
22
29
|
import hmac
|
|
23
30
|
import os
|
|
24
31
|
import subprocess
|
|
25
|
-
import secrets
|
|
26
32
|
import re
|
|
27
33
|
from io import BytesIO
|
|
28
34
|
from django.core.files.base import ContentFile
|
|
29
35
|
import qrcode
|
|
30
|
-
from django.utils import timezone
|
|
36
|
+
from django.utils import timezone, formats
|
|
31
37
|
from django.utils.dateparse import parse_datetime
|
|
32
38
|
import uuid
|
|
33
39
|
from pathlib import Path
|
|
34
40
|
from django.core import serializers
|
|
35
41
|
from django.core.management.color import no_style
|
|
36
|
-
from urllib.parse import quote_plus, urlparse
|
|
42
|
+
from urllib.parse import quote, quote_plus, urlparse
|
|
43
|
+
from zoneinfo import ZoneInfo
|
|
37
44
|
from utils import revision as revision_utils
|
|
38
45
|
from typing import Any, Type
|
|
39
46
|
from defusedxml import xmlrpc as defused_xmlrpc
|
|
@@ -44,6 +51,51 @@ xmlrpc_client = defused_xmlrpc.xmlrpc_client
|
|
|
44
51
|
|
|
45
52
|
logger = logging.getLogger(__name__)
|
|
46
53
|
|
|
54
|
+
|
|
55
|
+
def _available_language_codes() -> set[str]:
|
|
56
|
+
return {code.lower() for code, _ in getattr(settings, "LANGUAGES", [])}
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def default_report_language() -> str:
|
|
60
|
+
configured = getattr(settings, "LANGUAGE_CODE", "en") or "en"
|
|
61
|
+
configured = configured.replace("_", "-").lower()
|
|
62
|
+
base = configured.split("-", 1)[0]
|
|
63
|
+
available = _available_language_codes()
|
|
64
|
+
if base in available:
|
|
65
|
+
return base
|
|
66
|
+
if configured in available:
|
|
67
|
+
return configured
|
|
68
|
+
if available:
|
|
69
|
+
return next(iter(sorted(available)))
|
|
70
|
+
return "en"
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def normalize_report_language(language: str | None) -> str:
|
|
74
|
+
default = default_report_language()
|
|
75
|
+
if not language:
|
|
76
|
+
return default
|
|
77
|
+
candidate = str(language).strip().lower()
|
|
78
|
+
if not candidate:
|
|
79
|
+
return default
|
|
80
|
+
candidate = candidate.replace("_", "-")
|
|
81
|
+
available = _available_language_codes()
|
|
82
|
+
if candidate in available:
|
|
83
|
+
return candidate
|
|
84
|
+
base = candidate.split("-", 1)[0]
|
|
85
|
+
if base in available:
|
|
86
|
+
return base
|
|
87
|
+
return default
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def normalize_report_title(title: str | None) -> str:
|
|
91
|
+
value = (title or "").strip()
|
|
92
|
+
if "\r" in value or "\n" in value:
|
|
93
|
+
raise ValidationError(
|
|
94
|
+
_("Report title cannot contain control characters."),
|
|
95
|
+
)
|
|
96
|
+
return value
|
|
97
|
+
|
|
98
|
+
|
|
47
99
|
from .entity import Entity, EntityUserManager, EntityManager
|
|
48
100
|
from .release import (
|
|
49
101
|
Package as ReleasePackage,
|
|
@@ -518,17 +570,13 @@ class User(Entity, AbstractUser):
|
|
|
518
570
|
def odoo_profile(self):
|
|
519
571
|
return self._direct_profile("OdooProfile")
|
|
520
572
|
|
|
521
|
-
@property
|
|
522
|
-
def assistant_profile(self):
|
|
523
|
-
return self._direct_profile("AssistantProfile")
|
|
524
|
-
|
|
525
573
|
@property
|
|
526
574
|
def social_profile(self):
|
|
527
575
|
return self._direct_profile("SocialProfile")
|
|
528
576
|
|
|
529
577
|
@property
|
|
530
|
-
def
|
|
531
|
-
return self.
|
|
578
|
+
def google_calendar_profile(self):
|
|
579
|
+
return self._direct_profile("GoogleCalendarProfile")
|
|
532
580
|
|
|
533
581
|
|
|
534
582
|
class UserPhoneNumber(Entity):
|
|
@@ -591,6 +639,14 @@ class OdooProfile(Profile):
|
|
|
591
639
|
database = self._resolved_field_value("database")
|
|
592
640
|
return database or ""
|
|
593
641
|
|
|
642
|
+
def _profile_name(self) -> str:
|
|
643
|
+
"""Return the stored name for this profile without database suffix."""
|
|
644
|
+
|
|
645
|
+
username = self._resolved_field_value("username")
|
|
646
|
+
if username:
|
|
647
|
+
return username
|
|
648
|
+
return self._resolved_field_value("database")
|
|
649
|
+
|
|
594
650
|
def save(self, *args, **kwargs):
|
|
595
651
|
if self.pk:
|
|
596
652
|
old = type(self).all_objects.get(pk=self.pk)
|
|
@@ -601,7 +657,7 @@ class OdooProfile(Profile):
|
|
|
601
657
|
or old.host != self.host
|
|
602
658
|
):
|
|
603
659
|
self._clear_verification()
|
|
604
|
-
computed_name = self.
|
|
660
|
+
computed_name = self._profile_name()
|
|
605
661
|
update_fields = kwargs.get("update_fields")
|
|
606
662
|
update_fields_set = set(update_fields) if update_fields is not None else None
|
|
607
663
|
if computed_name != self.name:
|
|
@@ -636,6 +692,7 @@ class OdooProfile(Profile):
|
|
|
636
692
|
self.odoo_uid = uid
|
|
637
693
|
self.email = info.get("email", "")
|
|
638
694
|
self.verified_on = timezone.now()
|
|
695
|
+
self.name = self._profile_name()
|
|
639
696
|
self.save(update_fields=["odoo_uid", "name", "email", "verified_on"])
|
|
640
697
|
return True
|
|
641
698
|
|
|
@@ -847,6 +904,184 @@ class OpenPayProfile(Profile):
|
|
|
847
904
|
]
|
|
848
905
|
|
|
849
906
|
|
|
907
|
+
class GoogleCalendarProfile(Profile):
|
|
908
|
+
"""Store Google Calendar configuration for a user or security group."""
|
|
909
|
+
|
|
910
|
+
profile_fields = ("calendar_id", "api_key", "display_name", "timezone")
|
|
911
|
+
|
|
912
|
+
calendar_id = SigilShortAutoField(max_length=255)
|
|
913
|
+
api_key = SigilShortAutoField(max_length=255)
|
|
914
|
+
display_name = models.CharField(max_length=255, blank=True)
|
|
915
|
+
max_events = models.PositiveIntegerField(
|
|
916
|
+
default=5,
|
|
917
|
+
validators=[MinValueValidator(1), MaxValueValidator(20)],
|
|
918
|
+
help_text=_("Number of upcoming events to display (1-20)."),
|
|
919
|
+
)
|
|
920
|
+
timezone = SigilShortAutoField(max_length=100, blank=True)
|
|
921
|
+
|
|
922
|
+
GOOGLE_EVENTS_URL = (
|
|
923
|
+
"https://www.googleapis.com/calendar/v3/calendars/{calendar}/events"
|
|
924
|
+
)
|
|
925
|
+
GOOGLE_EMBED_URL = "https://calendar.google.com/calendar/embed?src={calendar}&ctz={tz}"
|
|
926
|
+
|
|
927
|
+
class Meta:
|
|
928
|
+
verbose_name = _("Google Calendar")
|
|
929
|
+
verbose_name_plural = _("Google Calendars")
|
|
930
|
+
constraints = [
|
|
931
|
+
models.CheckConstraint(
|
|
932
|
+
check=(
|
|
933
|
+
(Q(user__isnull=False) & Q(group__isnull=True))
|
|
934
|
+
| (Q(user__isnull=True) & Q(group__isnull=False))
|
|
935
|
+
),
|
|
936
|
+
name="googlecalendarprofile_requires_owner",
|
|
937
|
+
)
|
|
938
|
+
]
|
|
939
|
+
|
|
940
|
+
def __str__(self): # pragma: no cover - simple representation
|
|
941
|
+
label = self.get_display_name()
|
|
942
|
+
return label or self.resolved_calendar_id()
|
|
943
|
+
|
|
944
|
+
def resolved_calendar_id(self) -> str:
|
|
945
|
+
value = self.resolve_sigils("calendar_id")
|
|
946
|
+
return value or self.calendar_id or ""
|
|
947
|
+
|
|
948
|
+
def resolved_api_key(self) -> str:
|
|
949
|
+
value = self.resolve_sigils("api_key")
|
|
950
|
+
return value or self.api_key or ""
|
|
951
|
+
|
|
952
|
+
def resolved_timezone(self) -> str:
|
|
953
|
+
value = self.resolve_sigils("timezone")
|
|
954
|
+
return value or self.timezone or ""
|
|
955
|
+
|
|
956
|
+
def get_timezone(self) -> ZoneInfo:
|
|
957
|
+
tz_name = self.resolved_timezone() or settings.TIME_ZONE
|
|
958
|
+
try:
|
|
959
|
+
return ZoneInfo(tz_name)
|
|
960
|
+
except Exception:
|
|
961
|
+
return ZoneInfo("UTC")
|
|
962
|
+
|
|
963
|
+
def get_display_name(self) -> str:
|
|
964
|
+
value = self.resolve_sigils("display_name")
|
|
965
|
+
if value:
|
|
966
|
+
return value
|
|
967
|
+
if self.display_name:
|
|
968
|
+
return self.display_name
|
|
969
|
+
return ""
|
|
970
|
+
|
|
971
|
+
def build_events_url(self) -> str:
|
|
972
|
+
calendar = self.resolved_calendar_id().strip()
|
|
973
|
+
if not calendar:
|
|
974
|
+
return ""
|
|
975
|
+
encoded = quote(calendar, safe="@")
|
|
976
|
+
return self.GOOGLE_EVENTS_URL.format(calendar=encoded)
|
|
977
|
+
|
|
978
|
+
def build_calendar_url(self) -> str:
|
|
979
|
+
calendar = self.resolved_calendar_id().strip()
|
|
980
|
+
if not calendar:
|
|
981
|
+
return ""
|
|
982
|
+
tz = self.get_timezone().key
|
|
983
|
+
encoded_calendar = quote_plus(calendar)
|
|
984
|
+
encoded_tz = quote_plus(tz)
|
|
985
|
+
return self.GOOGLE_EMBED_URL.format(calendar=encoded_calendar, tz=encoded_tz)
|
|
986
|
+
|
|
987
|
+
def _parse_event_point(self, data: dict) -> tuple[datetime_datetime | None, bool]:
|
|
988
|
+
if not isinstance(data, dict):
|
|
989
|
+
return None, False
|
|
990
|
+
|
|
991
|
+
tz_name = data.get("timeZone")
|
|
992
|
+
default_tz = self.get_timezone()
|
|
993
|
+
tzinfo = default_tz
|
|
994
|
+
if tz_name:
|
|
995
|
+
try:
|
|
996
|
+
tzinfo = ZoneInfo(tz_name)
|
|
997
|
+
except Exception:
|
|
998
|
+
tzinfo = default_tz
|
|
999
|
+
|
|
1000
|
+
timestamp = data.get("dateTime")
|
|
1001
|
+
if timestamp:
|
|
1002
|
+
dt = parse_datetime(timestamp)
|
|
1003
|
+
if dt is None:
|
|
1004
|
+
try:
|
|
1005
|
+
dt = datetime_datetime.fromisoformat(
|
|
1006
|
+
timestamp.replace("Z", "+00:00")
|
|
1007
|
+
)
|
|
1008
|
+
except ValueError:
|
|
1009
|
+
dt = None
|
|
1010
|
+
if dt is not None and dt.tzinfo is None:
|
|
1011
|
+
dt = dt.replace(tzinfo=tzinfo)
|
|
1012
|
+
return dt, False
|
|
1013
|
+
|
|
1014
|
+
date_value = data.get("date")
|
|
1015
|
+
if date_value:
|
|
1016
|
+
try:
|
|
1017
|
+
day = datetime_date.fromisoformat(date_value)
|
|
1018
|
+
except ValueError:
|
|
1019
|
+
return None, True
|
|
1020
|
+
dt = datetime_datetime.combine(day, datetime_time.min, tzinfo=tzinfo)
|
|
1021
|
+
return dt, True
|
|
1022
|
+
|
|
1023
|
+
return None, False
|
|
1024
|
+
|
|
1025
|
+
def fetch_events(self, *, max_results: int | None = None) -> list[dict[str, object]]:
|
|
1026
|
+
calendar_id = self.resolved_calendar_id().strip()
|
|
1027
|
+
api_key = self.resolved_api_key().strip()
|
|
1028
|
+
if not calendar_id or not api_key:
|
|
1029
|
+
return []
|
|
1030
|
+
|
|
1031
|
+
url = self.build_events_url()
|
|
1032
|
+
if not url:
|
|
1033
|
+
return []
|
|
1034
|
+
|
|
1035
|
+
now = timezone.now().astimezone(datetime_timezone.utc).replace(microsecond=0)
|
|
1036
|
+
params = {
|
|
1037
|
+
"key": api_key,
|
|
1038
|
+
"singleEvents": "true",
|
|
1039
|
+
"orderBy": "startTime",
|
|
1040
|
+
"timeMin": now.isoformat().replace("+00:00", "Z"),
|
|
1041
|
+
"maxResults": max_results or self.max_events or 5,
|
|
1042
|
+
}
|
|
1043
|
+
|
|
1044
|
+
try:
|
|
1045
|
+
response = requests.get(url, params=params, timeout=10)
|
|
1046
|
+
response.raise_for_status()
|
|
1047
|
+
payload = response.json()
|
|
1048
|
+
except (requests.RequestException, ValueError):
|
|
1049
|
+
logger.warning(
|
|
1050
|
+
"Failed to fetch Google Calendar events for profile %s", self.pk,
|
|
1051
|
+
exc_info=True,
|
|
1052
|
+
)
|
|
1053
|
+
return []
|
|
1054
|
+
|
|
1055
|
+
items = payload.get("items")
|
|
1056
|
+
if not isinstance(items, list):
|
|
1057
|
+
return []
|
|
1058
|
+
|
|
1059
|
+
events: list[dict[str, object]] = []
|
|
1060
|
+
for item in items:
|
|
1061
|
+
if not isinstance(item, dict):
|
|
1062
|
+
continue
|
|
1063
|
+
start, all_day = self._parse_event_point(item.get("start") or {})
|
|
1064
|
+
end, _ = self._parse_event_point(item.get("end") or {})
|
|
1065
|
+
summary = item.get("summary") or ""
|
|
1066
|
+
link = item.get("htmlLink") or ""
|
|
1067
|
+
location = item.get("location") or ""
|
|
1068
|
+
if start is None:
|
|
1069
|
+
continue
|
|
1070
|
+
events.append(
|
|
1071
|
+
{
|
|
1072
|
+
"summary": summary,
|
|
1073
|
+
"start": start,
|
|
1074
|
+
"end": end,
|
|
1075
|
+
"all_day": all_day,
|
|
1076
|
+
"html_link": link,
|
|
1077
|
+
"location": location,
|
|
1078
|
+
}
|
|
1079
|
+
)
|
|
1080
|
+
|
|
1081
|
+
events.sort(key=lambda event: event.get("start") or timezone.now())
|
|
1082
|
+
return events
|
|
1083
|
+
|
|
1084
|
+
|
|
850
1085
|
class EmailInbox(Profile):
|
|
851
1086
|
"""Credentials and configuration for connecting to an email mailbox."""
|
|
852
1087
|
|
|
@@ -1764,6 +1999,7 @@ class RFID(Entity):
|
|
|
1764
1999
|
"""RFID tag that may be assigned to one account."""
|
|
1765
2000
|
|
|
1766
2001
|
label_id = models.AutoField(primary_key=True, db_column="label_id")
|
|
2002
|
+
MATCH_PREFIX_LENGTH = 8
|
|
1767
2003
|
rfid = models.CharField(
|
|
1768
2004
|
max_length=255,
|
|
1769
2005
|
unique=True,
|
|
@@ -1775,6 +2011,14 @@ class RFID(Entity):
|
|
|
1775
2011
|
)
|
|
1776
2012
|
],
|
|
1777
2013
|
)
|
|
2014
|
+
reversed_uid = models.CharField(
|
|
2015
|
+
max_length=255,
|
|
2016
|
+
default="",
|
|
2017
|
+
blank=True,
|
|
2018
|
+
editable=False,
|
|
2019
|
+
verbose_name="Reversed UID",
|
|
2020
|
+
help_text="UID value stored with opposite endianness for reference.",
|
|
2021
|
+
)
|
|
1778
2022
|
custom_label = models.CharField(
|
|
1779
2023
|
max_length=32,
|
|
1780
2024
|
blank=True,
|
|
@@ -1851,6 +2095,17 @@ class RFID(Entity):
|
|
|
1851
2095
|
choices=KIND_CHOICES,
|
|
1852
2096
|
default=CLASSIC,
|
|
1853
2097
|
)
|
|
2098
|
+
BIG_ENDIAN = "BIG"
|
|
2099
|
+
LITTLE_ENDIAN = "LITTLE"
|
|
2100
|
+
ENDIANNESS_CHOICES = [
|
|
2101
|
+
(BIG_ENDIAN, _("Big endian")),
|
|
2102
|
+
(LITTLE_ENDIAN, _("Little endian")),
|
|
2103
|
+
]
|
|
2104
|
+
endianness = models.CharField(
|
|
2105
|
+
max_length=6,
|
|
2106
|
+
choices=ENDIANNESS_CHOICES,
|
|
2107
|
+
default=BIG_ENDIAN,
|
|
2108
|
+
)
|
|
1854
2109
|
reference = models.ForeignKey(
|
|
1855
2110
|
"Reference",
|
|
1856
2111
|
null=True,
|
|
@@ -1895,13 +2150,24 @@ class RFID(Entity):
|
|
|
1895
2150
|
if self.key_b and old["key_b"] != self.key_b.upper():
|
|
1896
2151
|
self.key_b_verified = False
|
|
1897
2152
|
if self.rfid:
|
|
1898
|
-
|
|
2153
|
+
normalized_rfid = self.rfid.upper()
|
|
2154
|
+
self.rfid = normalized_rfid
|
|
2155
|
+
reversed_uid = self.reverse_uid(normalized_rfid)
|
|
2156
|
+
if reversed_uid != self.reversed_uid:
|
|
2157
|
+
self.reversed_uid = reversed_uid
|
|
2158
|
+
if update_fields:
|
|
2159
|
+
fields = set(update_fields)
|
|
2160
|
+
if "reversed_uid" not in fields:
|
|
2161
|
+
fields.add("reversed_uid")
|
|
2162
|
+
kwargs["update_fields"] = tuple(fields)
|
|
1899
2163
|
if self.key_a:
|
|
1900
2164
|
self.key_a = self.key_a.upper()
|
|
1901
2165
|
if self.key_b:
|
|
1902
2166
|
self.key_b = self.key_b.upper()
|
|
1903
2167
|
if self.kind:
|
|
1904
2168
|
self.kind = self.kind.upper()
|
|
2169
|
+
if self.endianness:
|
|
2170
|
+
self.endianness = self.normalize_endianness(self.endianness)
|
|
1905
2171
|
super().save(*args, **kwargs)
|
|
1906
2172
|
if not self.allowed:
|
|
1907
2173
|
self.energy_accounts.clear()
|
|
@@ -1909,6 +2175,132 @@ class RFID(Entity):
|
|
|
1909
2175
|
def __str__(self): # pragma: no cover - simple representation
|
|
1910
2176
|
return str(self.label_id)
|
|
1911
2177
|
|
|
2178
|
+
@classmethod
|
|
2179
|
+
def normalize_code(cls, value: str) -> str:
|
|
2180
|
+
"""Return ``value`` normalized for comparisons."""
|
|
2181
|
+
|
|
2182
|
+
return "".join((value or "").split()).upper()
|
|
2183
|
+
|
|
2184
|
+
def adopt_rfid(self, candidate: str) -> bool:
|
|
2185
|
+
"""Adopt ``candidate`` as the stored RFID if it is a better match."""
|
|
2186
|
+
|
|
2187
|
+
normalized = type(self).normalize_code(candidate)
|
|
2188
|
+
if not normalized:
|
|
2189
|
+
return False
|
|
2190
|
+
current = type(self).normalize_code(self.rfid)
|
|
2191
|
+
if current == normalized:
|
|
2192
|
+
return False
|
|
2193
|
+
if not current:
|
|
2194
|
+
self.rfid = normalized
|
|
2195
|
+
return True
|
|
2196
|
+
reversed_current = type(self).reverse_uid(current)
|
|
2197
|
+
if reversed_current and reversed_current == normalized:
|
|
2198
|
+
self.rfid = normalized
|
|
2199
|
+
return True
|
|
2200
|
+
if len(normalized) < len(current):
|
|
2201
|
+
self.rfid = normalized
|
|
2202
|
+
return True
|
|
2203
|
+
if len(normalized) == len(current) and normalized < current:
|
|
2204
|
+
self.rfid = normalized
|
|
2205
|
+
return True
|
|
2206
|
+
return False
|
|
2207
|
+
|
|
2208
|
+
@classmethod
|
|
2209
|
+
def matching_queryset(cls, value: str) -> models.QuerySet["RFID"]:
|
|
2210
|
+
"""Return RFID records matching ``value`` using prefix comparison."""
|
|
2211
|
+
|
|
2212
|
+
normalized = cls.normalize_code(value)
|
|
2213
|
+
if not normalized:
|
|
2214
|
+
return cls.objects.none()
|
|
2215
|
+
|
|
2216
|
+
conditions: list[Q] = []
|
|
2217
|
+
candidate = normalized
|
|
2218
|
+
if candidate:
|
|
2219
|
+
conditions.append(Q(rfid=candidate))
|
|
2220
|
+
alternate = cls.reverse_uid(candidate)
|
|
2221
|
+
if alternate and alternate != candidate:
|
|
2222
|
+
conditions.append(Q(rfid=alternate))
|
|
2223
|
+
|
|
2224
|
+
prefix_length = min(len(candidate), cls.MATCH_PREFIX_LENGTH)
|
|
2225
|
+
if prefix_length:
|
|
2226
|
+
prefix = candidate[:prefix_length]
|
|
2227
|
+
conditions.append(Q(rfid__startswith=prefix))
|
|
2228
|
+
if alternate and alternate != candidate:
|
|
2229
|
+
alt_prefix = alternate[:prefix_length]
|
|
2230
|
+
if alt_prefix:
|
|
2231
|
+
conditions.append(Q(rfid__startswith=alt_prefix))
|
|
2232
|
+
|
|
2233
|
+
query: Q | None = None
|
|
2234
|
+
for condition in conditions:
|
|
2235
|
+
query = condition if query is None else query | condition
|
|
2236
|
+
|
|
2237
|
+
if query is None:
|
|
2238
|
+
return cls.objects.none()
|
|
2239
|
+
|
|
2240
|
+
queryset = cls.objects.filter(query).distinct()
|
|
2241
|
+
return queryset.annotate(rfid_length=Length("rfid")).order_by(
|
|
2242
|
+
"rfid_length", "rfid", "pk"
|
|
2243
|
+
)
|
|
2244
|
+
|
|
2245
|
+
@classmethod
|
|
2246
|
+
def find_match(cls, value: str) -> "RFID | None":
|
|
2247
|
+
"""Return the best matching RFID for ``value`` if it exists."""
|
|
2248
|
+
|
|
2249
|
+
return cls.matching_queryset(value).first()
|
|
2250
|
+
|
|
2251
|
+
@classmethod
|
|
2252
|
+
def update_or_create_from_code(
|
|
2253
|
+
cls, value: str, defaults: dict[str, Any] | None = None
|
|
2254
|
+
) -> tuple["RFID", bool]:
|
|
2255
|
+
"""Update or create an RFID using relaxed matching rules."""
|
|
2256
|
+
|
|
2257
|
+
normalized = cls.normalize_code(value)
|
|
2258
|
+
if not normalized:
|
|
2259
|
+
raise ValueError("RFID value is required")
|
|
2260
|
+
|
|
2261
|
+
defaults_map = defaults.copy() if defaults else {}
|
|
2262
|
+
existing = cls.find_match(normalized)
|
|
2263
|
+
if existing:
|
|
2264
|
+
update_fields: set[str] = set()
|
|
2265
|
+
if existing.adopt_rfid(normalized):
|
|
2266
|
+
update_fields.add("rfid")
|
|
2267
|
+
for field_name, new_value in defaults_map.items():
|
|
2268
|
+
if getattr(existing, field_name) != new_value:
|
|
2269
|
+
setattr(existing, field_name, new_value)
|
|
2270
|
+
update_fields.add(field_name)
|
|
2271
|
+
if update_fields:
|
|
2272
|
+
existing.save(update_fields=sorted(update_fields))
|
|
2273
|
+
return existing, False
|
|
2274
|
+
|
|
2275
|
+
create_kwargs = defaults_map
|
|
2276
|
+
create_kwargs["rfid"] = normalized
|
|
2277
|
+
tag = cls.objects.create(**create_kwargs)
|
|
2278
|
+
return tag, True
|
|
2279
|
+
|
|
2280
|
+
@classmethod
|
|
2281
|
+
def normalize_endianness(cls, value: object) -> str:
|
|
2282
|
+
"""Return a valid endianness value, defaulting to BIG."""
|
|
2283
|
+
|
|
2284
|
+
if isinstance(value, str):
|
|
2285
|
+
candidate = value.strip().upper()
|
|
2286
|
+
valid = {choice[0] for choice in cls.ENDIANNESS_CHOICES}
|
|
2287
|
+
if candidate in valid:
|
|
2288
|
+
return candidate
|
|
2289
|
+
return cls.BIG_ENDIAN
|
|
2290
|
+
|
|
2291
|
+
@staticmethod
|
|
2292
|
+
def reverse_uid(value: str) -> str:
|
|
2293
|
+
"""Return ``value`` with reversed byte order for reference storage."""
|
|
2294
|
+
|
|
2295
|
+
normalized = "".join((value or "").split()).upper()
|
|
2296
|
+
if not normalized:
|
|
2297
|
+
return ""
|
|
2298
|
+
if len(normalized) % 2 != 0:
|
|
2299
|
+
return normalized[::-1]
|
|
2300
|
+
bytes_list = [normalized[index : index + 2] for index in range(0, len(normalized), 2)]
|
|
2301
|
+
bytes_list.reverse()
|
|
2302
|
+
return "".join(bytes_list)
|
|
2303
|
+
|
|
1912
2304
|
@classmethod
|
|
1913
2305
|
def next_scan_label(
|
|
1914
2306
|
cls, *, step: int | None = None, start: int | None = None
|
|
@@ -1971,13 +2363,26 @@ class RFID(Entity):
|
|
|
1971
2363
|
|
|
1972
2364
|
@classmethod
|
|
1973
2365
|
def register_scan(
|
|
1974
|
-
cls,
|
|
2366
|
+
cls,
|
|
2367
|
+
rfid: str,
|
|
2368
|
+
*,
|
|
2369
|
+
kind: str | None = None,
|
|
2370
|
+
endianness: str | None = None,
|
|
1975
2371
|
) -> tuple["RFID", bool]:
|
|
1976
2372
|
"""Return or create an RFID that was detected via scanning."""
|
|
1977
2373
|
|
|
1978
|
-
normalized = (rfid
|
|
1979
|
-
|
|
2374
|
+
normalized = cls.normalize_code(rfid)
|
|
2375
|
+
desired_endianness = cls.normalize_endianness(endianness)
|
|
2376
|
+
existing = cls.find_match(normalized)
|
|
1980
2377
|
if existing:
|
|
2378
|
+
update_fields: list[str] = []
|
|
2379
|
+
if existing.adopt_rfid(normalized):
|
|
2380
|
+
update_fields.append("rfid")
|
|
2381
|
+
if existing.endianness != desired_endianness:
|
|
2382
|
+
existing.endianness = desired_endianness
|
|
2383
|
+
update_fields.append("endianness")
|
|
2384
|
+
if update_fields:
|
|
2385
|
+
existing.save(update_fields=update_fields)
|
|
1981
2386
|
return existing, False
|
|
1982
2387
|
|
|
1983
2388
|
attempts = 0
|
|
@@ -1990,6 +2395,7 @@ class RFID(Entity):
|
|
|
1990
2395
|
"rfid": normalized,
|
|
1991
2396
|
"allowed": True,
|
|
1992
2397
|
"released": False,
|
|
2398
|
+
"endianness": desired_endianness,
|
|
1993
2399
|
}
|
|
1994
2400
|
if kind:
|
|
1995
2401
|
create_kwargs["kind"] = kind
|
|
@@ -1998,23 +2404,28 @@ class RFID(Entity):
|
|
|
1998
2404
|
tag = cls.objects.create(**create_kwargs)
|
|
1999
2405
|
cls._reset_label_sequence()
|
|
2000
2406
|
except IntegrityError:
|
|
2001
|
-
existing = cls.
|
|
2407
|
+
existing = cls.find_match(normalized)
|
|
2002
2408
|
if existing:
|
|
2003
2409
|
return existing, False
|
|
2004
2410
|
else:
|
|
2005
2411
|
return tag, True
|
|
2006
2412
|
raise IntegrityError("Unable to allocate label id for scanned RFID")
|
|
2007
2413
|
|
|
2008
|
-
@
|
|
2009
|
-
def get_account_by_rfid(value):
|
|
2414
|
+
@classmethod
|
|
2415
|
+
def get_account_by_rfid(cls, value):
|
|
2010
2416
|
"""Return the energy account associated with an RFID code if it exists."""
|
|
2011
2417
|
try:
|
|
2012
2418
|
EnergyAccount = apps.get_model("core", "EnergyAccount")
|
|
2013
2419
|
except LookupError: # pragma: no cover - energy accounts app optional
|
|
2014
2420
|
return None
|
|
2015
|
-
|
|
2016
|
-
|
|
2017
|
-
|
|
2421
|
+
matches = cls.matching_queryset(value).filter(allowed=True)
|
|
2422
|
+
if not matches.exists():
|
|
2423
|
+
return None
|
|
2424
|
+
return (
|
|
2425
|
+
EnergyAccount.objects.filter(rfids__in=matches)
|
|
2426
|
+
.distinct()
|
|
2427
|
+
.first()
|
|
2428
|
+
)
|
|
2018
2429
|
|
|
2019
2430
|
class Meta:
|
|
2020
2431
|
verbose_name = "RFID"
|
|
@@ -2365,8 +2776,24 @@ class ClientReportSchedule(Entity):
|
|
|
2365
2776
|
periodicity = models.CharField(
|
|
2366
2777
|
max_length=12, choices=PERIODICITY_CHOICES, default=PERIODICITY_NONE
|
|
2367
2778
|
)
|
|
2779
|
+
language = models.CharField(
|
|
2780
|
+
max_length=12,
|
|
2781
|
+
choices=settings.LANGUAGES,
|
|
2782
|
+
default=default_report_language,
|
|
2783
|
+
)
|
|
2784
|
+
title = models.CharField(
|
|
2785
|
+
max_length=200,
|
|
2786
|
+
blank=True,
|
|
2787
|
+
default="",
|
|
2788
|
+
verbose_name=_("Title"),
|
|
2789
|
+
)
|
|
2368
2790
|
email_recipients = models.JSONField(default=list, blank=True)
|
|
2369
2791
|
disable_emails = models.BooleanField(default=False)
|
|
2792
|
+
chargers = models.ManyToManyField(
|
|
2793
|
+
"ocpp.Charger",
|
|
2794
|
+
blank=True,
|
|
2795
|
+
related_name="client_report_schedules",
|
|
2796
|
+
)
|
|
2370
2797
|
periodic_task = models.OneToOneField(
|
|
2371
2798
|
"django_celery_beat.PeriodicTask",
|
|
2372
2799
|
on_delete=models.SET_NULL,
|
|
@@ -2380,11 +2807,19 @@ class ClientReportSchedule(Entity):
|
|
|
2380
2807
|
verbose_name = "Client Report Schedule"
|
|
2381
2808
|
verbose_name_plural = "Client Report Schedules"
|
|
2382
2809
|
|
|
2810
|
+
@classmethod
|
|
2811
|
+
def label_for_periodicity(cls, value: str) -> str:
|
|
2812
|
+
lookup = dict(cls.PERIODICITY_CHOICES)
|
|
2813
|
+
return lookup.get(value, value)
|
|
2814
|
+
|
|
2383
2815
|
def __str__(self) -> str: # pragma: no cover - simple representation
|
|
2384
2816
|
owner = self.owner.get_username() if self.owner else "Unassigned"
|
|
2385
2817
|
return f"Client Report Schedule ({owner})"
|
|
2386
2818
|
|
|
2387
2819
|
def save(self, *args, **kwargs):
|
|
2820
|
+
if self.language:
|
|
2821
|
+
self.language = normalize_report_language(self.language)
|
|
2822
|
+
self.title = normalize_report_title(self.title)
|
|
2388
2823
|
sync = kwargs.pop("sync_task", True)
|
|
2389
2824
|
super().save(*args, **kwargs)
|
|
2390
2825
|
if sync and self.pk:
|
|
@@ -2476,6 +2911,78 @@ class ClientReportSchedule(Entity):
|
|
|
2476
2911
|
|
|
2477
2912
|
return start, end
|
|
2478
2913
|
|
|
2914
|
+
def _advance_period(
|
|
2915
|
+
self, start: datetime_date, end: datetime_date
|
|
2916
|
+
) -> tuple[datetime_date, datetime_date]:
|
|
2917
|
+
import calendar as _calendar
|
|
2918
|
+
import datetime as _datetime
|
|
2919
|
+
|
|
2920
|
+
if self.periodicity == self.PERIODICITY_DAILY:
|
|
2921
|
+
delta = _datetime.timedelta(days=1)
|
|
2922
|
+
return start + delta, end + delta
|
|
2923
|
+
if self.periodicity == self.PERIODICITY_WEEKLY:
|
|
2924
|
+
delta = _datetime.timedelta(days=7)
|
|
2925
|
+
return start + delta, end + delta
|
|
2926
|
+
if self.periodicity == self.PERIODICITY_MONTHLY:
|
|
2927
|
+
base_start = start.replace(day=1)
|
|
2928
|
+
year = base_start.year
|
|
2929
|
+
month = base_start.month
|
|
2930
|
+
if month == 12:
|
|
2931
|
+
next_year = year + 1
|
|
2932
|
+
next_month = 1
|
|
2933
|
+
else:
|
|
2934
|
+
next_year = year
|
|
2935
|
+
next_month = month + 1
|
|
2936
|
+
next_start = base_start.replace(year=next_year, month=next_month, day=1)
|
|
2937
|
+
last_day = _calendar.monthrange(next_year, next_month)[1]
|
|
2938
|
+
next_end = next_start.replace(day=last_day)
|
|
2939
|
+
return next_start, next_end
|
|
2940
|
+
raise ValueError("advance_period called for non-recurring schedule")
|
|
2941
|
+
|
|
2942
|
+
def iter_pending_periods(self, reference=None):
|
|
2943
|
+
from django.utils import timezone
|
|
2944
|
+
|
|
2945
|
+
if self.periodicity == self.PERIODICITY_NONE:
|
|
2946
|
+
return []
|
|
2947
|
+
|
|
2948
|
+
ref_date = reference or timezone.localdate()
|
|
2949
|
+
try:
|
|
2950
|
+
target_start, target_end = self.calculate_period(reference=ref_date)
|
|
2951
|
+
except ValueError:
|
|
2952
|
+
return []
|
|
2953
|
+
|
|
2954
|
+
reports = self.reports.order_by("start_date", "end_date")
|
|
2955
|
+
last_report = reports.last()
|
|
2956
|
+
if last_report:
|
|
2957
|
+
current_start, current_end = self._advance_period(
|
|
2958
|
+
last_report.start_date, last_report.end_date
|
|
2959
|
+
)
|
|
2960
|
+
else:
|
|
2961
|
+
current_start, current_end = target_start, target_end
|
|
2962
|
+
|
|
2963
|
+
if current_end < current_start:
|
|
2964
|
+
return []
|
|
2965
|
+
|
|
2966
|
+
pending: list[tuple[datetime.date, datetime.date]] = []
|
|
2967
|
+
safety = 0
|
|
2968
|
+
while current_end <= target_end:
|
|
2969
|
+
exists = reports.filter(
|
|
2970
|
+
start_date=current_start, end_date=current_end
|
|
2971
|
+
).exists()
|
|
2972
|
+
if not exists:
|
|
2973
|
+
pending.append((current_start, current_end))
|
|
2974
|
+
try:
|
|
2975
|
+
current_start, current_end = self._advance_period(
|
|
2976
|
+
current_start, current_end
|
|
2977
|
+
)
|
|
2978
|
+
except ValueError:
|
|
2979
|
+
break
|
|
2980
|
+
safety += 1
|
|
2981
|
+
if safety > 400:
|
|
2982
|
+
break
|
|
2983
|
+
|
|
2984
|
+
return pending
|
|
2985
|
+
|
|
2479
2986
|
def resolve_recipients(self):
|
|
2480
2987
|
"""Return (to, cc) email lists respecting owner fallbacks."""
|
|
2481
2988
|
|
|
@@ -2523,38 +3030,27 @@ class ClientReportSchedule(Entity):
|
|
|
2523
3030
|
|
|
2524
3031
|
return to, cc
|
|
2525
3032
|
|
|
3033
|
+
def resolve_reply_to(self) -> list[str]:
|
|
3034
|
+
return ClientReport.resolve_reply_to_for_owner(self.owner)
|
|
3035
|
+
|
|
2526
3036
|
def get_outbox(self):
|
|
2527
3037
|
"""Return the preferred :class:`nodes.models.EmailOutbox` instance."""
|
|
2528
3038
|
|
|
2529
|
-
|
|
2530
|
-
|
|
2531
|
-
if self.owner:
|
|
2532
|
-
try:
|
|
2533
|
-
outbox = self.owner.get_profile(EmailOutbox)
|
|
2534
|
-
except Exception: # pragma: no cover - defensive catch
|
|
2535
|
-
outbox = None
|
|
2536
|
-
if outbox:
|
|
2537
|
-
return outbox
|
|
2538
|
-
|
|
2539
|
-
node = Node.get_local()
|
|
2540
|
-
if node:
|
|
2541
|
-
return getattr(node, "email_outbox", None)
|
|
2542
|
-
return None
|
|
3039
|
+
return ClientReport.resolve_outbox_for_owner(self.owner)
|
|
2543
3040
|
|
|
2544
3041
|
def notify_failure(self, message: str):
|
|
2545
3042
|
from nodes.models import NetMessage
|
|
2546
3043
|
|
|
2547
3044
|
NetMessage.broadcast("Client report delivery issue", message)
|
|
2548
3045
|
|
|
2549
|
-
def run(self):
|
|
3046
|
+
def run(self, *, start: datetime_date | None = None, end: datetime_date | None = None):
|
|
2550
3047
|
"""Generate the report, persist it and deliver notifications."""
|
|
2551
3048
|
|
|
2552
|
-
|
|
2553
|
-
|
|
2554
|
-
|
|
2555
|
-
|
|
2556
|
-
|
|
2557
|
-
return None
|
|
3049
|
+
if start is None or end is None:
|
|
3050
|
+
try:
|
|
3051
|
+
start, end = self.calculate_period()
|
|
3052
|
+
except ValueError:
|
|
3053
|
+
return None
|
|
2558
3054
|
|
|
2559
3055
|
try:
|
|
2560
3056
|
report = ClientReport.generate(
|
|
@@ -2564,8 +3060,12 @@ class ClientReportSchedule(Entity):
|
|
|
2564
3060
|
schedule=self,
|
|
2565
3061
|
recipients=self.email_recipients,
|
|
2566
3062
|
disable_emails=self.disable_emails,
|
|
3063
|
+
chargers=list(self.chargers.all()),
|
|
3064
|
+
language=self.language,
|
|
3065
|
+
title=self.title,
|
|
2567
3066
|
)
|
|
2568
|
-
|
|
3067
|
+
report.chargers.set(self.chargers.all())
|
|
3068
|
+
report.store_local_copy()
|
|
2569
3069
|
except Exception as exc:
|
|
2570
3070
|
self.notify_failure(str(exc))
|
|
2571
3071
|
raise
|
|
@@ -2577,32 +3077,12 @@ class ClientReportSchedule(Entity):
|
|
|
2577
3077
|
raise RuntimeError("No recipients available for client report")
|
|
2578
3078
|
else:
|
|
2579
3079
|
try:
|
|
2580
|
-
|
|
2581
|
-
|
|
2582
|
-
attachments.append((html_name, html_content, "text/html"))
|
|
2583
|
-
json_file = Path(settings.BASE_DIR) / export["json_path"]
|
|
2584
|
-
if json_file.exists():
|
|
2585
|
-
attachments.append(
|
|
2586
|
-
(
|
|
2587
|
-
json_file.name,
|
|
2588
|
-
json_file.read_text(encoding="utf-8"),
|
|
2589
|
-
"application/json",
|
|
2590
|
-
)
|
|
2591
|
-
)
|
|
2592
|
-
subject = f"Client report {report.start_date} to {report.end_date}"
|
|
2593
|
-
body = (
|
|
2594
|
-
"Attached is the client report generated for the period "
|
|
2595
|
-
f"{report.start_date} to {report.end_date}."
|
|
2596
|
-
)
|
|
2597
|
-
mailer.send(
|
|
2598
|
-
subject,
|
|
2599
|
-
body,
|
|
2600
|
-
to,
|
|
2601
|
-
outbox=self.get_outbox(),
|
|
3080
|
+
delivered = report.send_delivery(
|
|
3081
|
+
to=to,
|
|
2602
3082
|
cc=cc,
|
|
2603
|
-
|
|
3083
|
+
outbox=self.get_outbox(),
|
|
3084
|
+
reply_to=self.resolve_reply_to(),
|
|
2604
3085
|
)
|
|
2605
|
-
delivered = list(dict.fromkeys(to + (cc or [])))
|
|
2606
3086
|
if delivered:
|
|
2607
3087
|
type(report).objects.filter(pk=report.pk).update(
|
|
2608
3088
|
recipients=delivered
|
|
@@ -2617,6 +3097,14 @@ class ClientReportSchedule(Entity):
|
|
|
2617
3097
|
self.last_generated_on = now
|
|
2618
3098
|
return report
|
|
2619
3099
|
|
|
3100
|
+
def generate_missing_reports(self, reference=None):
|
|
3101
|
+
generated: list["ClientReport"] = []
|
|
3102
|
+
for start, end in self.iter_pending_periods(reference=reference):
|
|
3103
|
+
report = self.run(start=start, end=end)
|
|
3104
|
+
if report:
|
|
3105
|
+
generated.append(report)
|
|
3106
|
+
return generated
|
|
3107
|
+
|
|
2620
3108
|
|
|
2621
3109
|
class ClientReport(Entity):
|
|
2622
3110
|
"""Snapshot of energy usage over a period."""
|
|
@@ -2639,15 +3127,70 @@ class ClientReport(Entity):
|
|
|
2639
3127
|
blank=True,
|
|
2640
3128
|
related_name="reports",
|
|
2641
3129
|
)
|
|
3130
|
+
language = models.CharField(
|
|
3131
|
+
max_length=12,
|
|
3132
|
+
choices=settings.LANGUAGES,
|
|
3133
|
+
default=default_report_language,
|
|
3134
|
+
)
|
|
3135
|
+
title = models.CharField(
|
|
3136
|
+
max_length=200,
|
|
3137
|
+
blank=True,
|
|
3138
|
+
default="",
|
|
3139
|
+
verbose_name=_("Title"),
|
|
3140
|
+
)
|
|
2642
3141
|
recipients = models.JSONField(default=list, blank=True)
|
|
2643
3142
|
disable_emails = models.BooleanField(default=False)
|
|
3143
|
+
chargers = models.ManyToManyField(
|
|
3144
|
+
"ocpp.Charger",
|
|
3145
|
+
blank=True,
|
|
3146
|
+
related_name="client_reports",
|
|
3147
|
+
)
|
|
2644
3148
|
|
|
2645
3149
|
class Meta:
|
|
2646
|
-
verbose_name = "Consumer Report"
|
|
2647
|
-
verbose_name_plural = "Consumer Reports"
|
|
3150
|
+
verbose_name = _("Consumer Report")
|
|
3151
|
+
verbose_name_plural = _("Consumer Reports")
|
|
2648
3152
|
db_table = "core_client_report"
|
|
2649
3153
|
ordering = ["-created_on"]
|
|
2650
3154
|
|
|
3155
|
+
def __str__(self) -> str: # pragma: no cover - simple representation
|
|
3156
|
+
period_type = (
|
|
3157
|
+
self.schedule.periodicity
|
|
3158
|
+
if self.schedule
|
|
3159
|
+
else ClientReportSchedule.PERIODICITY_NONE
|
|
3160
|
+
)
|
|
3161
|
+
return f"{self.start_date} - {self.end_date} ({period_type})"
|
|
3162
|
+
|
|
3163
|
+
@staticmethod
|
|
3164
|
+
def default_language() -> str:
|
|
3165
|
+
return default_report_language()
|
|
3166
|
+
|
|
3167
|
+
@staticmethod
|
|
3168
|
+
def normalize_language(language: str | None) -> str:
|
|
3169
|
+
return normalize_report_language(language)
|
|
3170
|
+
|
|
3171
|
+
@staticmethod
|
|
3172
|
+
def normalize_title(title: str | None) -> str:
|
|
3173
|
+
return normalize_report_title(title)
|
|
3174
|
+
|
|
3175
|
+
def save(self, *args, **kwargs):
|
|
3176
|
+
if self.language:
|
|
3177
|
+
self.language = normalize_report_language(self.language)
|
|
3178
|
+
self.title = self.normalize_title(self.title)
|
|
3179
|
+
super().save(*args, **kwargs)
|
|
3180
|
+
|
|
3181
|
+
@property
|
|
3182
|
+
def periodicity_label(self) -> str:
|
|
3183
|
+
if self.schedule:
|
|
3184
|
+
return self.schedule.get_periodicity_display()
|
|
3185
|
+
return ClientReportSchedule.label_for_periodicity(
|
|
3186
|
+
ClientReportSchedule.PERIODICITY_NONE
|
|
3187
|
+
)
|
|
3188
|
+
|
|
3189
|
+
@property
|
|
3190
|
+
def total_kw_period(self) -> float:
|
|
3191
|
+
totals = (self.rows_for_display or {}).get("totals", {})
|
|
3192
|
+
return float(totals.get("total_kw_period", 0.0) or 0.0)
|
|
3193
|
+
|
|
2651
3194
|
@classmethod
|
|
2652
3195
|
def generate(
|
|
2653
3196
|
cls,
|
|
@@ -2658,17 +3201,36 @@ class ClientReport(Entity):
|
|
|
2658
3201
|
schedule=None,
|
|
2659
3202
|
recipients: list[str] | None = None,
|
|
2660
3203
|
disable_emails: bool = False,
|
|
3204
|
+
chargers=None,
|
|
3205
|
+
language: str | None = None,
|
|
3206
|
+
title: str | None = None,
|
|
2661
3207
|
):
|
|
2662
|
-
|
|
2663
|
-
|
|
3208
|
+
from collections.abc import Iterable as _Iterable
|
|
3209
|
+
|
|
3210
|
+
charger_list = []
|
|
3211
|
+
if chargers:
|
|
3212
|
+
if isinstance(chargers, _Iterable):
|
|
3213
|
+
charger_list = list(chargers)
|
|
3214
|
+
else:
|
|
3215
|
+
charger_list = [chargers]
|
|
3216
|
+
|
|
3217
|
+
payload = cls.build_rows(start_date, end_date, chargers=charger_list)
|
|
3218
|
+
normalized_language = cls.normalize_language(language)
|
|
3219
|
+
title_value = cls.normalize_title(title)
|
|
3220
|
+
report = cls.objects.create(
|
|
2664
3221
|
start_date=start_date,
|
|
2665
3222
|
end_date=end_date,
|
|
2666
|
-
data=
|
|
3223
|
+
data=payload,
|
|
2667
3224
|
owner=owner,
|
|
2668
3225
|
schedule=schedule,
|
|
2669
3226
|
recipients=list(recipients or []),
|
|
2670
3227
|
disable_emails=disable_emails,
|
|
3228
|
+
language=normalized_language,
|
|
3229
|
+
title=title_value,
|
|
2671
3230
|
)
|
|
3231
|
+
if charger_list:
|
|
3232
|
+
report.chargers.set(charger_list)
|
|
3233
|
+
return report
|
|
2672
3234
|
|
|
2673
3235
|
def store_local_copy(self, html: str | None = None):
|
|
2674
3236
|
"""Persist the report data and optional HTML rendering to disk."""
|
|
@@ -2682,9 +3244,16 @@ class ClientReport(Entity):
|
|
|
2682
3244
|
timestamp = timezone.now().strftime("%Y%m%d%H%M%S")
|
|
2683
3245
|
identifier = f"client_report_{self.pk}_{timestamp}"
|
|
2684
3246
|
|
|
2685
|
-
|
|
2686
|
-
|
|
2687
|
-
|
|
3247
|
+
language_code = self.normalize_language(self.language)
|
|
3248
|
+
context = {
|
|
3249
|
+
"report": self,
|
|
3250
|
+
"language_code": language_code,
|
|
3251
|
+
"default_language": type(self).default_language(),
|
|
3252
|
+
}
|
|
3253
|
+
with override(language_code):
|
|
3254
|
+
html_content = html or render_to_string(
|
|
3255
|
+
"core/reports/client_report_email.html", context
|
|
3256
|
+
)
|
|
2688
3257
|
html_path = report_dir / f"{identifier}.html"
|
|
2689
3258
|
html_path.write_text(html_content, encoding="utf-8")
|
|
2690
3259
|
|
|
@@ -2693,15 +3262,13 @@ class ClientReport(Entity):
|
|
|
2693
3262
|
_json.dumps(self.data, indent=2, default=str), encoding="utf-8"
|
|
2694
3263
|
)
|
|
2695
3264
|
|
|
2696
|
-
|
|
2697
|
-
|
|
2698
|
-
return str(path.relative_to(base_dir))
|
|
2699
|
-
except ValueError:
|
|
2700
|
-
return str(path)
|
|
3265
|
+
pdf_path = report_dir / f"{identifier}.pdf"
|
|
3266
|
+
self.render_pdf(pdf_path)
|
|
2701
3267
|
|
|
2702
3268
|
export = {
|
|
2703
|
-
"html_path":
|
|
2704
|
-
"json_path":
|
|
3269
|
+
"html_path": ClientReport._relative_to_base(html_path, base_dir),
|
|
3270
|
+
"json_path": ClientReport._relative_to_base(json_path, base_dir),
|
|
3271
|
+
"pdf_path": ClientReport._relative_to_base(pdf_path, base_dir),
|
|
2705
3272
|
}
|
|
2706
3273
|
|
|
2707
3274
|
updated = dict(self.data)
|
|
@@ -2710,27 +3277,113 @@ class ClientReport(Entity):
|
|
|
2710
3277
|
self.data = updated
|
|
2711
3278
|
return export, html_content
|
|
2712
3279
|
|
|
3280
|
+
def send_delivery(
|
|
3281
|
+
self,
|
|
3282
|
+
*,
|
|
3283
|
+
to: list[str] | tuple[str, ...],
|
|
3284
|
+
cc: list[str] | tuple[str, ...] | None = None,
|
|
3285
|
+
outbox=None,
|
|
3286
|
+
reply_to: list[str] | None = None,
|
|
3287
|
+
) -> list[str]:
|
|
3288
|
+
from core import mailer
|
|
3289
|
+
|
|
3290
|
+
recipients = list(to or [])
|
|
3291
|
+
if not recipients:
|
|
3292
|
+
return []
|
|
3293
|
+
|
|
3294
|
+
pdf_path = self.ensure_pdf()
|
|
3295
|
+
attachments = [
|
|
3296
|
+
(pdf_path.name, pdf_path.read_bytes(), "application/pdf"),
|
|
3297
|
+
]
|
|
3298
|
+
|
|
3299
|
+
language_code = self.normalize_language(self.language)
|
|
3300
|
+
with override(language_code):
|
|
3301
|
+
totals = self.rows_for_display.get("totals", {})
|
|
3302
|
+
start_display = formats.date_format(
|
|
3303
|
+
self.start_date, format="DATE_FORMAT", use_l10n=True
|
|
3304
|
+
)
|
|
3305
|
+
end_display = formats.date_format(
|
|
3306
|
+
self.end_date, format="DATE_FORMAT", use_l10n=True
|
|
3307
|
+
)
|
|
3308
|
+
total_kw_period_label = gettext("Total kW during period")
|
|
3309
|
+
total_kw_all_label = gettext("Total kW (all time)")
|
|
3310
|
+
report_title = self.normalize_title(self.title) or gettext(
|
|
3311
|
+
"Consumer Report"
|
|
3312
|
+
)
|
|
3313
|
+
body_lines = [
|
|
3314
|
+
gettext("%(title)s for %(start)s through %(end)s.")
|
|
3315
|
+
% {"title": report_title, "start": start_display, "end": end_display},
|
|
3316
|
+
f"{total_kw_period_label}: "
|
|
3317
|
+
f"{formats.number_format(totals.get('total_kw_period', 0.0), decimal_pos=2, use_l10n=True)}.",
|
|
3318
|
+
f"{total_kw_all_label}: "
|
|
3319
|
+
f"{formats.number_format(totals.get('total_kw', 0.0), decimal_pos=2, use_l10n=True)}.",
|
|
3320
|
+
]
|
|
3321
|
+
message = "\n".join(body_lines)
|
|
3322
|
+
subject = gettext("%(title)s %(start)s - %(end)s") % {
|
|
3323
|
+
"title": report_title,
|
|
3324
|
+
"start": start_display,
|
|
3325
|
+
"end": end_display,
|
|
3326
|
+
}
|
|
3327
|
+
|
|
3328
|
+
kwargs = {}
|
|
3329
|
+
if reply_to:
|
|
3330
|
+
kwargs["reply_to"] = reply_to
|
|
3331
|
+
|
|
3332
|
+
mailer.send(
|
|
3333
|
+
subject,
|
|
3334
|
+
message,
|
|
3335
|
+
recipients,
|
|
3336
|
+
outbox=outbox,
|
|
3337
|
+
cc=list(cc or []),
|
|
3338
|
+
attachments=attachments,
|
|
3339
|
+
**kwargs,
|
|
3340
|
+
)
|
|
3341
|
+
|
|
3342
|
+
delivered = list(dict.fromkeys(recipients + list(cc or [])))
|
|
3343
|
+
return delivered
|
|
3344
|
+
|
|
3345
|
+
@staticmethod
|
|
3346
|
+
def build_rows(
|
|
3347
|
+
start_date=None,
|
|
3348
|
+
end_date=None,
|
|
3349
|
+
*,
|
|
3350
|
+
for_display: bool = False,
|
|
3351
|
+
chargers=None,
|
|
3352
|
+
):
|
|
3353
|
+
dataset = ClientReport._build_dataset(start_date, end_date, chargers=chargers)
|
|
3354
|
+
if for_display:
|
|
3355
|
+
return ClientReport._normalize_dataset_for_display(dataset)
|
|
3356
|
+
return dataset
|
|
3357
|
+
|
|
2713
3358
|
@staticmethod
|
|
2714
|
-
def
|
|
2715
|
-
from
|
|
3359
|
+
def _build_dataset(start_date=None, end_date=None, *, chargers=None):
|
|
3360
|
+
from datetime import datetime, time, timedelta, timezone as pytimezone
|
|
3361
|
+
from ocpp.models import Charger, Transaction
|
|
2716
3362
|
|
|
2717
|
-
qs = Transaction.objects.
|
|
2718
|
-
if start_date:
|
|
2719
|
-
from datetime import datetime, time, timedelta, timezone as pytimezone
|
|
3363
|
+
qs = Transaction.objects.all()
|
|
2720
3364
|
|
|
3365
|
+
start_dt = None
|
|
3366
|
+
end_dt = None
|
|
3367
|
+
if start_date:
|
|
2721
3368
|
start_dt = datetime.combine(start_date, time.min, tzinfo=pytimezone.utc)
|
|
2722
3369
|
qs = qs.filter(start_time__gte=start_dt)
|
|
2723
3370
|
if end_date:
|
|
2724
|
-
from datetime import datetime, time, timedelta, timezone as pytimezone
|
|
2725
|
-
|
|
2726
3371
|
end_dt = datetime.combine(
|
|
2727
3372
|
end_date + timedelta(days=1), time.min, tzinfo=pytimezone.utc
|
|
2728
3373
|
)
|
|
2729
3374
|
qs = qs.filter(start_time__lt=end_dt)
|
|
2730
3375
|
|
|
2731
|
-
|
|
2732
|
-
|
|
2733
|
-
|
|
3376
|
+
selected_base_ids = None
|
|
3377
|
+
if chargers:
|
|
3378
|
+
selected_base_ids = {
|
|
3379
|
+
charger.charger_id for charger in chargers if charger.charger_id
|
|
3380
|
+
}
|
|
3381
|
+
if selected_base_ids:
|
|
3382
|
+
qs = qs.filter(charger__charger_id__in=selected_base_ids)
|
|
3383
|
+
|
|
3384
|
+
qs = qs.select_related("account", "charger").prefetch_related("meter_values")
|
|
3385
|
+
transactions = list(qs.order_by("start_time", "pk"))
|
|
3386
|
+
|
|
2734
3387
|
rfid_values = {tx.rfid for tx in transactions if tx.rfid}
|
|
2735
3388
|
tag_map: dict[str, RFID] = {}
|
|
2736
3389
|
if rfid_values:
|
|
@@ -2741,51 +3394,269 @@ class ClientReport(Entity):
|
|
|
2741
3394
|
)
|
|
2742
3395
|
}
|
|
2743
3396
|
|
|
2744
|
-
|
|
3397
|
+
charger_ids = {
|
|
3398
|
+
tx.charger.charger_id
|
|
3399
|
+
for tx in transactions
|
|
3400
|
+
if getattr(tx, "charger", None) and tx.charger.charger_id
|
|
3401
|
+
}
|
|
3402
|
+
aggregator_map: dict[str, Charger] = {}
|
|
3403
|
+
if charger_ids:
|
|
3404
|
+
aggregator_map = {
|
|
3405
|
+
charger.charger_id: charger
|
|
3406
|
+
for charger in Charger.objects.filter(
|
|
3407
|
+
charger_id__in=charger_ids, connector_id__isnull=True
|
|
3408
|
+
)
|
|
3409
|
+
}
|
|
3410
|
+
|
|
3411
|
+
groups: dict[str, dict[str, Any]] = {}
|
|
2745
3412
|
for tx in transactions:
|
|
2746
|
-
|
|
2747
|
-
if
|
|
3413
|
+
charger = getattr(tx, "charger", None)
|
|
3414
|
+
if charger is None:
|
|
3415
|
+
continue
|
|
3416
|
+
base_id = charger.charger_id
|
|
3417
|
+
if selected_base_ids is not None and base_id not in selected_base_ids:
|
|
2748
3418
|
continue
|
|
3419
|
+
aggregator = aggregator_map.get(base_id) or charger
|
|
3420
|
+
entry = groups.setdefault(
|
|
3421
|
+
base_id,
|
|
3422
|
+
{"charger": aggregator, "transactions": []},
|
|
3423
|
+
)
|
|
3424
|
+
entry["transactions"].append(tx)
|
|
3425
|
+
|
|
3426
|
+
evcs_entries: list[dict[str, Any]] = []
|
|
3427
|
+
total_all_time = 0.0
|
|
3428
|
+
total_period = 0.0
|
|
3429
|
+
|
|
3430
|
+
def _sort_key(tx):
|
|
3431
|
+
anchor = getattr(tx, "start_time", None)
|
|
3432
|
+
if anchor is None:
|
|
3433
|
+
anchor = datetime.min.replace(tzinfo=pytimezone.utc)
|
|
3434
|
+
return (anchor, tx.pk or 0)
|
|
3435
|
+
|
|
3436
|
+
for base_id, info in sorted(groups.items(), key=lambda item: item[0]):
|
|
3437
|
+
aggregator = info["charger"]
|
|
3438
|
+
txs = sorted(info["transactions"], key=_sort_key)
|
|
3439
|
+
total_kw_all = float(getattr(aggregator, "total_kw", 0.0) or 0.0)
|
|
3440
|
+
total_kw_period = 0.0
|
|
3441
|
+
if hasattr(aggregator, "total_kw_for_range"):
|
|
3442
|
+
total_kw_period = float(
|
|
3443
|
+
aggregator.total_kw_for_range(start=start_dt, end=end_dt) or 0.0
|
|
3444
|
+
)
|
|
3445
|
+
total_all_time += total_kw_all
|
|
3446
|
+
total_period += total_kw_period
|
|
2749
3447
|
|
|
2750
|
-
|
|
2751
|
-
|
|
2752
|
-
|
|
2753
|
-
|
|
2754
|
-
|
|
2755
|
-
if tag:
|
|
2756
|
-
account = next(iter(tag.energy_accounts.all()), None)
|
|
2757
|
-
if account:
|
|
2758
|
-
subject = account.name
|
|
2759
|
-
else:
|
|
2760
|
-
subject = str(tag.label_id)
|
|
3448
|
+
session_rows: list[dict[str, Any]] = []
|
|
3449
|
+
for tx in txs:
|
|
3450
|
+
session_kw = float(getattr(tx, "kw", 0.0) or 0.0)
|
|
3451
|
+
if session_kw <= 0:
|
|
3452
|
+
continue
|
|
2761
3453
|
|
|
2762
|
-
|
|
2763
|
-
subject = tx.rfid
|
|
3454
|
+
start_kwh, end_kwh = ClientReport._resolve_meter_bounds(tx)
|
|
2764
3455
|
|
|
2765
|
-
|
|
2766
|
-
|
|
2767
|
-
|
|
2768
|
-
|
|
2769
|
-
|
|
3456
|
+
connector_number = (
|
|
3457
|
+
tx.connector_id
|
|
3458
|
+
if getattr(tx, "connector_id", None) is not None
|
|
3459
|
+
else getattr(getattr(tx, "charger", None), "connector_id", None)
|
|
3460
|
+
)
|
|
2770
3461
|
|
|
2771
|
-
|
|
3462
|
+
rfid_value = (tx.rfid or "").strip()
|
|
3463
|
+
tag = tag_map.get(rfid_value)
|
|
3464
|
+
label = None
|
|
3465
|
+
account_name = (
|
|
3466
|
+
tx.account.name
|
|
3467
|
+
if tx.account and getattr(tx.account, "name", None)
|
|
3468
|
+
else None
|
|
3469
|
+
)
|
|
3470
|
+
if tag:
|
|
3471
|
+
label = tag.custom_label or str(tag.label_id)
|
|
3472
|
+
if not account_name:
|
|
3473
|
+
account = next(iter(tag.energy_accounts.all()), None)
|
|
3474
|
+
if account and getattr(account, "name", None):
|
|
3475
|
+
account_name = account.name
|
|
3476
|
+
elif rfid_value:
|
|
3477
|
+
label = rfid_value
|
|
3478
|
+
|
|
3479
|
+
session_rows.append(
|
|
3480
|
+
{
|
|
3481
|
+
"connector": connector_number,
|
|
3482
|
+
"rfid_label": label,
|
|
3483
|
+
"account_name": account_name,
|
|
3484
|
+
"start_kwh": start_kwh,
|
|
3485
|
+
"end_kwh": end_kwh,
|
|
3486
|
+
"session_kwh": session_kw,
|
|
3487
|
+
"start": tx.start_time.isoformat()
|
|
3488
|
+
if getattr(tx, "start_time", None)
|
|
3489
|
+
else None,
|
|
3490
|
+
"end": tx.stop_time.isoformat()
|
|
3491
|
+
if getattr(tx, "stop_time", None)
|
|
3492
|
+
else None,
|
|
3493
|
+
}
|
|
3494
|
+
)
|
|
3495
|
+
|
|
3496
|
+
evcs_entries.append(
|
|
2772
3497
|
{
|
|
2773
|
-
"
|
|
2774
|
-
"
|
|
2775
|
-
"
|
|
2776
|
-
|
|
2777
|
-
|
|
3498
|
+
"charger_id": aggregator.pk,
|
|
3499
|
+
"serial_number": aggregator.charger_id,
|
|
3500
|
+
"display_name": aggregator.display_name
|
|
3501
|
+
or aggregator.name
|
|
3502
|
+
or aggregator.charger_id,
|
|
3503
|
+
"total_kw": total_kw_all,
|
|
3504
|
+
"total_kw_period": total_kw_period,
|
|
3505
|
+
"transactions": session_rows,
|
|
2778
3506
|
}
|
|
2779
3507
|
)
|
|
2780
3508
|
|
|
2781
|
-
|
|
3509
|
+
filters: dict[str, Any] = {}
|
|
3510
|
+
if selected_base_ids:
|
|
3511
|
+
filters["chargers"] = sorted(selected_base_ids)
|
|
2782
3512
|
|
|
2783
|
-
|
|
2784
|
-
|
|
2785
|
-
|
|
2786
|
-
|
|
3513
|
+
return {
|
|
3514
|
+
"schema": "evcs-session/v1",
|
|
3515
|
+
"evcs": evcs_entries,
|
|
3516
|
+
"totals": {
|
|
3517
|
+
"total_kw": total_all_time,
|
|
3518
|
+
"total_kw_period": total_period,
|
|
3519
|
+
},
|
|
3520
|
+
"filters": filters,
|
|
3521
|
+
}
|
|
3522
|
+
|
|
3523
|
+
@staticmethod
|
|
3524
|
+
def _resolve_meter_bounds(tx) -> tuple[float | None, float | None]:
|
|
3525
|
+
def _convert(value):
|
|
3526
|
+
if value in {None, ""}:
|
|
3527
|
+
return None
|
|
3528
|
+
try:
|
|
3529
|
+
return float(value) / 1000.0
|
|
3530
|
+
except (TypeError, ValueError):
|
|
3531
|
+
return None
|
|
3532
|
+
|
|
3533
|
+
start_value = _convert(getattr(tx, "meter_start", None))
|
|
3534
|
+
end_value = _convert(getattr(tx, "meter_stop", None))
|
|
3535
|
+
|
|
3536
|
+
readings_manager = getattr(tx, "meter_values", None)
|
|
3537
|
+
readings = []
|
|
3538
|
+
if readings_manager is not None:
|
|
3539
|
+
readings = [
|
|
3540
|
+
reading
|
|
3541
|
+
for reading in readings_manager.all()
|
|
3542
|
+
if getattr(reading, "energy", None) is not None
|
|
3543
|
+
]
|
|
3544
|
+
if readings:
|
|
3545
|
+
readings.sort(key=lambda item: item.timestamp)
|
|
3546
|
+
if start_value is None:
|
|
3547
|
+
start_value = float(readings[0].energy or 0)
|
|
3548
|
+
if end_value is None:
|
|
3549
|
+
end_value = float(readings[-1].energy or 0)
|
|
3550
|
+
|
|
3551
|
+
return start_value, end_value
|
|
3552
|
+
|
|
3553
|
+
@staticmethod
|
|
3554
|
+
def _format_session_datetime(value):
|
|
3555
|
+
if not value:
|
|
3556
|
+
return None
|
|
3557
|
+
localized = timezone.localtime(value)
|
|
3558
|
+
date_part = formats.date_format(
|
|
3559
|
+
localized, format="MONTH_DAY_FORMAT", use_l10n=True
|
|
3560
|
+
)
|
|
3561
|
+
time_part = formats.time_format(
|
|
3562
|
+
localized, format="TIME_FORMAT", use_l10n=True
|
|
3563
|
+
)
|
|
3564
|
+
return gettext("%(date)s, %(time)s") % {
|
|
3565
|
+
"date": date_part,
|
|
3566
|
+
"time": time_part,
|
|
3567
|
+
}
|
|
3568
|
+
|
|
3569
|
+
@staticmethod
|
|
3570
|
+
def _calculate_duration_minutes(start, end):
|
|
3571
|
+
if not start or not end:
|
|
3572
|
+
return None
|
|
3573
|
+
total_seconds = (end - start).total_seconds()
|
|
3574
|
+
if total_seconds < 0:
|
|
3575
|
+
return None
|
|
3576
|
+
return int(round(total_seconds / 60.0))
|
|
3577
|
+
|
|
3578
|
+
@staticmethod
|
|
3579
|
+
def _normalize_dataset_for_display(dataset: dict[str, Any]):
|
|
3580
|
+
schema = dataset.get("schema")
|
|
3581
|
+
if schema == "evcs-session/v1":
|
|
3582
|
+
from datetime import datetime
|
|
3583
|
+
|
|
3584
|
+
evcs_entries: list[dict[str, Any]] = []
|
|
3585
|
+
for entry in dataset.get("evcs", []):
|
|
3586
|
+
normalized_rows: list[dict[str, Any]] = []
|
|
3587
|
+
for row in entry.get("transactions", []):
|
|
3588
|
+
start_val = row.get("start")
|
|
3589
|
+
end_val = row.get("end")
|
|
3590
|
+
|
|
3591
|
+
start_dt = None
|
|
3592
|
+
if start_val:
|
|
3593
|
+
start_dt = parse_datetime(start_val)
|
|
3594
|
+
if start_dt and timezone.is_naive(start_dt):
|
|
3595
|
+
start_dt = timezone.make_aware(start_dt, timezone.utc)
|
|
3596
|
+
|
|
3597
|
+
end_dt = None
|
|
3598
|
+
if end_val:
|
|
3599
|
+
end_dt = parse_datetime(end_val)
|
|
3600
|
+
if end_dt and timezone.is_naive(end_dt):
|
|
3601
|
+
end_dt = timezone.make_aware(end_dt, timezone.utc)
|
|
3602
|
+
|
|
3603
|
+
normalized_rows.append(
|
|
3604
|
+
{
|
|
3605
|
+
"connector": row.get("connector"),
|
|
3606
|
+
"rfid_label": row.get("rfid_label"),
|
|
3607
|
+
"account_name": row.get("account_name"),
|
|
3608
|
+
"start_kwh": row.get("start_kwh"),
|
|
3609
|
+
"end_kwh": row.get("end_kwh"),
|
|
3610
|
+
"session_kwh": row.get("session_kwh"),
|
|
3611
|
+
"start": start_dt,
|
|
3612
|
+
"end": end_dt,
|
|
3613
|
+
"start_display": ClientReport._format_session_datetime(
|
|
3614
|
+
start_dt
|
|
3615
|
+
),
|
|
3616
|
+
"end_display": ClientReport._format_session_datetime(
|
|
3617
|
+
end_dt
|
|
3618
|
+
),
|
|
3619
|
+
"duration_minutes": ClientReport._calculate_duration_minutes(
|
|
3620
|
+
start_dt, end_dt
|
|
3621
|
+
),
|
|
3622
|
+
}
|
|
3623
|
+
)
|
|
3624
|
+
|
|
3625
|
+
normalized_rows.sort(
|
|
3626
|
+
key=lambda item: (
|
|
3627
|
+
item["start"]
|
|
3628
|
+
if item["start"] is not None
|
|
3629
|
+
else datetime.min.replace(tzinfo=timezone.utc),
|
|
3630
|
+
item.get("connector") or 0,
|
|
3631
|
+
)
|
|
3632
|
+
)
|
|
3633
|
+
|
|
3634
|
+
evcs_entries.append(
|
|
3635
|
+
{
|
|
3636
|
+
"display_name": entry.get("display_name")
|
|
3637
|
+
or entry.get("serial_number")
|
|
3638
|
+
or "Charge Point",
|
|
3639
|
+
"serial_number": entry.get("serial_number"),
|
|
3640
|
+
"total_kw": entry.get("total_kw", 0.0),
|
|
3641
|
+
"total_kw_period": entry.get("total_kw_period", 0.0),
|
|
3642
|
+
"transactions": normalized_rows,
|
|
3643
|
+
}
|
|
3644
|
+
)
|
|
3645
|
+
|
|
3646
|
+
totals = dataset.get("totals", {})
|
|
3647
|
+
return {
|
|
3648
|
+
"schema": schema,
|
|
3649
|
+
"evcs": evcs_entries,
|
|
3650
|
+
"totals": {
|
|
3651
|
+
"total_kw": totals.get("total_kw", 0.0),
|
|
3652
|
+
"total_kw_period": totals.get("total_kw_period", 0.0),
|
|
3653
|
+
},
|
|
3654
|
+
"filters": dataset.get("filters", {}),
|
|
3655
|
+
}
|
|
3656
|
+
|
|
3657
|
+
if schema == "session-list/v1":
|
|
2787
3658
|
parsed: list[dict[str, Any]] = []
|
|
2788
|
-
for row in rows:
|
|
3659
|
+
for row in dataset.get("rows", []):
|
|
2789
3660
|
item = dict(row)
|
|
2790
3661
|
start_val = row.get("start")
|
|
2791
3662
|
end_val = row.get("end")
|
|
@@ -2796,6 +3667,7 @@ class ClientReport(Entity):
|
|
|
2796
3667
|
start_dt = timezone.make_aware(start_dt, timezone.utc)
|
|
2797
3668
|
item["start"] = start_dt
|
|
2798
3669
|
else:
|
|
3670
|
+
start_dt = None
|
|
2799
3671
|
item["start"] = None
|
|
2800
3672
|
|
|
2801
3673
|
if end_val:
|
|
@@ -2804,11 +3676,299 @@ class ClientReport(Entity):
|
|
|
2804
3676
|
end_dt = timezone.make_aware(end_dt, timezone.utc)
|
|
2805
3677
|
item["end"] = end_dt
|
|
2806
3678
|
else:
|
|
3679
|
+
end_dt = None
|
|
2807
3680
|
item["end"] = None
|
|
2808
3681
|
|
|
3682
|
+
item["start_display"] = ClientReport._format_session_datetime(start_dt)
|
|
3683
|
+
item["end_display"] = ClientReport._format_session_datetime(end_dt)
|
|
3684
|
+
item["duration_minutes"] = ClientReport._calculate_duration_minutes(
|
|
3685
|
+
start_dt, end_dt
|
|
3686
|
+
)
|
|
3687
|
+
|
|
2809
3688
|
parsed.append(item)
|
|
2810
|
-
|
|
2811
|
-
|
|
3689
|
+
|
|
3690
|
+
return {"schema": schema, "rows": parsed}
|
|
3691
|
+
|
|
3692
|
+
return {
|
|
3693
|
+
"schema": schema,
|
|
3694
|
+
"rows": dataset.get("rows", []),
|
|
3695
|
+
"filters": dataset.get("filters", {}),
|
|
3696
|
+
}
|
|
3697
|
+
|
|
3698
|
+
@property
|
|
3699
|
+
def rows_for_display(self):
|
|
3700
|
+
data = self.data or {}
|
|
3701
|
+
return ClientReport._normalize_dataset_for_display(data)
|
|
3702
|
+
|
|
3703
|
+
@staticmethod
|
|
3704
|
+
def _relative_to_base(path: Path, base_dir: Path) -> str:
|
|
3705
|
+
try:
|
|
3706
|
+
return str(path.relative_to(base_dir))
|
|
3707
|
+
except ValueError:
|
|
3708
|
+
return str(path)
|
|
3709
|
+
|
|
3710
|
+
@staticmethod
|
|
3711
|
+
def resolve_reply_to_for_owner(owner) -> list[str]:
|
|
3712
|
+
if not owner:
|
|
3713
|
+
return []
|
|
3714
|
+
try:
|
|
3715
|
+
inbox = owner.get_profile(EmailInbox)
|
|
3716
|
+
except Exception: # pragma: no cover - defensive catch
|
|
3717
|
+
inbox = None
|
|
3718
|
+
if inbox and getattr(inbox, "username", ""):
|
|
3719
|
+
address = inbox.username.strip()
|
|
3720
|
+
if address:
|
|
3721
|
+
return [address]
|
|
3722
|
+
return []
|
|
3723
|
+
|
|
3724
|
+
@staticmethod
|
|
3725
|
+
def resolve_outbox_for_owner(owner):
|
|
3726
|
+
from nodes.models import EmailOutbox, Node
|
|
3727
|
+
|
|
3728
|
+
if owner:
|
|
3729
|
+
try:
|
|
3730
|
+
outbox = owner.get_profile(EmailOutbox)
|
|
3731
|
+
except Exception: # pragma: no cover - defensive catch
|
|
3732
|
+
outbox = None
|
|
3733
|
+
if outbox:
|
|
3734
|
+
return outbox
|
|
3735
|
+
|
|
3736
|
+
node = Node.get_local()
|
|
3737
|
+
if node:
|
|
3738
|
+
return getattr(node, "email_outbox", None)
|
|
3739
|
+
return None
|
|
3740
|
+
|
|
3741
|
+
def render_pdf(self, target: Path):
|
|
3742
|
+
from reportlab.lib import colors
|
|
3743
|
+
from reportlab.lib.pagesizes import landscape, letter
|
|
3744
|
+
from reportlab.lib.styles import getSampleStyleSheet
|
|
3745
|
+
from reportlab.lib.units import inch
|
|
3746
|
+
from reportlab.platypus import (
|
|
3747
|
+
Paragraph,
|
|
3748
|
+
SimpleDocTemplate,
|
|
3749
|
+
Spacer,
|
|
3750
|
+
Table,
|
|
3751
|
+
TableStyle,
|
|
3752
|
+
)
|
|
3753
|
+
|
|
3754
|
+
target_path = Path(target)
|
|
3755
|
+
target_path.parent.mkdir(parents=True, exist_ok=True)
|
|
3756
|
+
|
|
3757
|
+
dataset = self.rows_for_display
|
|
3758
|
+
schema = dataset.get("schema")
|
|
3759
|
+
|
|
3760
|
+
language_code = self.normalize_language(self.language)
|
|
3761
|
+
with override(language_code):
|
|
3762
|
+
styles = getSampleStyleSheet()
|
|
3763
|
+
title_style = styles["Title"]
|
|
3764
|
+
subtitle_style = styles["Heading2"]
|
|
3765
|
+
normal_style = styles["BodyText"]
|
|
3766
|
+
emphasis_style = styles["Heading3"]
|
|
3767
|
+
|
|
3768
|
+
document = SimpleDocTemplate(
|
|
3769
|
+
str(target_path),
|
|
3770
|
+
pagesize=landscape(letter),
|
|
3771
|
+
leftMargin=0.5 * inch,
|
|
3772
|
+
rightMargin=0.5 * inch,
|
|
3773
|
+
topMargin=0.6 * inch,
|
|
3774
|
+
bottomMargin=0.5 * inch,
|
|
3775
|
+
)
|
|
3776
|
+
|
|
3777
|
+
story: list = []
|
|
3778
|
+
|
|
3779
|
+
report_title = self.normalize_title(self.title) or gettext(
|
|
3780
|
+
"Consumer Report"
|
|
3781
|
+
)
|
|
3782
|
+
story.append(Paragraph(report_title, title_style))
|
|
3783
|
+
|
|
3784
|
+
start_display = formats.date_format(
|
|
3785
|
+
self.start_date, format="DATE_FORMAT", use_l10n=True
|
|
3786
|
+
)
|
|
3787
|
+
end_display = formats.date_format(
|
|
3788
|
+
self.end_date, format="DATE_FORMAT", use_l10n=True
|
|
3789
|
+
)
|
|
3790
|
+
story.append(
|
|
3791
|
+
Paragraph(
|
|
3792
|
+
gettext("Period: %(start)s to %(end)s")
|
|
3793
|
+
% {"start": start_display, "end": end_display},
|
|
3794
|
+
emphasis_style,
|
|
3795
|
+
)
|
|
3796
|
+
)
|
|
3797
|
+
story.append(Spacer(1, 0.25 * inch))
|
|
3798
|
+
|
|
3799
|
+
total_kw_all_time_label = gettext("Total kW (all time)")
|
|
3800
|
+
total_kw_period_label = gettext("Total kW (period)")
|
|
3801
|
+
connector_label = gettext("Connector")
|
|
3802
|
+
account_label = gettext("Account")
|
|
3803
|
+
session_kwh_label = gettext("Session kW")
|
|
3804
|
+
time_label = gettext("Time")
|
|
3805
|
+
no_sessions_period = gettext(
|
|
3806
|
+
"No charging sessions recorded for the selected period."
|
|
3807
|
+
)
|
|
3808
|
+
no_sessions_point = gettext(
|
|
3809
|
+
"No charging sessions recorded for this charge point."
|
|
3810
|
+
)
|
|
3811
|
+
no_structured_data = gettext(
|
|
3812
|
+
"No structured data is available for this report."
|
|
3813
|
+
)
|
|
3814
|
+
report_totals_label = gettext("Report totals")
|
|
3815
|
+
total_kw_period_line = gettext("Total kW during period")
|
|
3816
|
+
|
|
3817
|
+
def format_datetime(value):
|
|
3818
|
+
if not value:
|
|
3819
|
+
return "—"
|
|
3820
|
+
return ClientReport._format_session_datetime(value) or "—"
|
|
3821
|
+
|
|
3822
|
+
def format_decimal(value):
|
|
3823
|
+
if value is None:
|
|
3824
|
+
return "—"
|
|
3825
|
+
return formats.number_format(value, decimal_pos=2, use_l10n=True)
|
|
3826
|
+
|
|
3827
|
+
def format_duration(value):
|
|
3828
|
+
if value is None:
|
|
3829
|
+
return "—"
|
|
3830
|
+
return formats.number_format(value, decimal_pos=0, use_l10n=True)
|
|
3831
|
+
|
|
3832
|
+
if schema == "evcs-session/v1":
|
|
3833
|
+
evcs_entries = dataset.get("evcs", [])
|
|
3834
|
+
if not evcs_entries:
|
|
3835
|
+
story.append(Paragraph(no_sessions_period, normal_style))
|
|
3836
|
+
for index, evcs in enumerate(evcs_entries):
|
|
3837
|
+
if index:
|
|
3838
|
+
story.append(Spacer(1, 0.2 * inch))
|
|
3839
|
+
|
|
3840
|
+
display_name = evcs.get("display_name") or gettext("Charge Point")
|
|
3841
|
+
serial_number = evcs.get("serial_number")
|
|
3842
|
+
if serial_number:
|
|
3843
|
+
header_text = gettext("%(name)s (Serial: %(serial)s)") % {
|
|
3844
|
+
"name": display_name,
|
|
3845
|
+
"serial": serial_number,
|
|
3846
|
+
}
|
|
3847
|
+
else:
|
|
3848
|
+
header_text = display_name
|
|
3849
|
+
story.append(Paragraph(header_text, subtitle_style))
|
|
3850
|
+
|
|
3851
|
+
metrics_text = (
|
|
3852
|
+
f"{total_kw_all_time_label}: "
|
|
3853
|
+
f"{format_decimal(evcs.get('total_kw', 0.0))} | "
|
|
3854
|
+
f"{total_kw_period_label}: "
|
|
3855
|
+
f"{format_decimal(evcs.get('total_kw_period', 0.0))}"
|
|
3856
|
+
)
|
|
3857
|
+
story.append(Paragraph(metrics_text, normal_style))
|
|
3858
|
+
story.append(Spacer(1, 0.1 * inch))
|
|
3859
|
+
|
|
3860
|
+
transactions = evcs.get("transactions", [])
|
|
3861
|
+
if transactions:
|
|
3862
|
+
table_data = [
|
|
3863
|
+
[
|
|
3864
|
+
session_kwh_label,
|
|
3865
|
+
gettext("Session start"),
|
|
3866
|
+
gettext("Session end"),
|
|
3867
|
+
time_label,
|
|
3868
|
+
connector_label,
|
|
3869
|
+
gettext("RFID label"),
|
|
3870
|
+
account_label,
|
|
3871
|
+
]
|
|
3872
|
+
]
|
|
3873
|
+
|
|
3874
|
+
for row in transactions:
|
|
3875
|
+
start_dt = row.get("start")
|
|
3876
|
+
end_dt = row.get("end")
|
|
3877
|
+
duration_value = row.get("duration_minutes")
|
|
3878
|
+
table_data.append(
|
|
3879
|
+
[
|
|
3880
|
+
format_decimal(row.get("session_kwh")),
|
|
3881
|
+
format_datetime(start_dt),
|
|
3882
|
+
format_datetime(end_dt),
|
|
3883
|
+
format_duration(duration_value),
|
|
3884
|
+
row.get("connector")
|
|
3885
|
+
if row.get("connector") is not None
|
|
3886
|
+
else "—",
|
|
3887
|
+
row.get("rfid_label") or "—",
|
|
3888
|
+
row.get("account_name") or "—",
|
|
3889
|
+
]
|
|
3890
|
+
)
|
|
3891
|
+
|
|
3892
|
+
column_count = len(table_data[0])
|
|
3893
|
+
col_width = document.width / column_count if column_count else None
|
|
3894
|
+
table = Table(
|
|
3895
|
+
table_data,
|
|
3896
|
+
repeatRows=1,
|
|
3897
|
+
colWidths=[col_width] * column_count if col_width else None,
|
|
3898
|
+
hAlign="LEFT",
|
|
3899
|
+
)
|
|
3900
|
+
table.setStyle(
|
|
3901
|
+
TableStyle(
|
|
3902
|
+
[
|
|
3903
|
+
(
|
|
3904
|
+
"BACKGROUND",
|
|
3905
|
+
(0, 0),
|
|
3906
|
+
(-1, 0),
|
|
3907
|
+
colors.HexColor("#0f172a"),
|
|
3908
|
+
),
|
|
3909
|
+
("TEXTCOLOR", (0, 0), (-1, 0), colors.white),
|
|
3910
|
+
("ALIGN", (0, 0), (-1, 0), "CENTER"),
|
|
3911
|
+
("FONTNAME", (0, 0), (-1, 0), "Helvetica-Bold"),
|
|
3912
|
+
("FONTSIZE", (0, 0), (-1, 0), 9),
|
|
3913
|
+
(
|
|
3914
|
+
"ROWBACKGROUNDS",
|
|
3915
|
+
(0, 1),
|
|
3916
|
+
(-1, -1),
|
|
3917
|
+
[colors.whitesmoke, colors.HexColor("#eef2ff")],
|
|
3918
|
+
),
|
|
3919
|
+
("GRID", (0, 0), (-1, -1), 0.25, colors.grey),
|
|
3920
|
+
("VALIGN", (0, 1), (-1, -1), "MIDDLE"),
|
|
3921
|
+
]
|
|
3922
|
+
)
|
|
3923
|
+
)
|
|
3924
|
+
story.append(table)
|
|
3925
|
+
else:
|
|
3926
|
+
story.append(Paragraph(no_sessions_point, normal_style))
|
|
3927
|
+
else:
|
|
3928
|
+
story.append(Paragraph(no_structured_data, normal_style))
|
|
3929
|
+
|
|
3930
|
+
totals = dataset.get("totals") or {}
|
|
3931
|
+
story.append(Spacer(1, 0.3 * inch))
|
|
3932
|
+
story.append(Paragraph(report_totals_label, emphasis_style))
|
|
3933
|
+
story.append(
|
|
3934
|
+
Paragraph(
|
|
3935
|
+
f"{total_kw_all_time_label}: "
|
|
3936
|
+
f"{format_decimal(totals.get('total_kw', 0.0))}",
|
|
3937
|
+
emphasis_style,
|
|
3938
|
+
)
|
|
3939
|
+
)
|
|
3940
|
+
story.append(
|
|
3941
|
+
Paragraph(
|
|
3942
|
+
f"{total_kw_period_line}: "
|
|
3943
|
+
f"{format_decimal(totals.get('total_kw_period', 0.0))}",
|
|
3944
|
+
emphasis_style,
|
|
3945
|
+
)
|
|
3946
|
+
)
|
|
3947
|
+
|
|
3948
|
+
document.build(story)
|
|
3949
|
+
|
|
3950
|
+
def ensure_pdf(self) -> Path:
|
|
3951
|
+
base_dir = Path(settings.BASE_DIR)
|
|
3952
|
+
export = dict((self.data or {}).get("export") or {})
|
|
3953
|
+
pdf_relative = export.get("pdf_path")
|
|
3954
|
+
if pdf_relative:
|
|
3955
|
+
candidate = base_dir / pdf_relative
|
|
3956
|
+
if candidate.exists():
|
|
3957
|
+
return candidate
|
|
3958
|
+
|
|
3959
|
+
report_dir = base_dir / "work" / "reports"
|
|
3960
|
+
report_dir.mkdir(parents=True, exist_ok=True)
|
|
3961
|
+
timestamp = timezone.now().strftime("%Y%m%d%H%M%S")
|
|
3962
|
+
identifier = f"client_report_{self.pk}_{timestamp}"
|
|
3963
|
+
pdf_path = report_dir / f"{identifier}.pdf"
|
|
3964
|
+
self.render_pdf(pdf_path)
|
|
3965
|
+
|
|
3966
|
+
export["pdf_path"] = ClientReport._relative_to_base(pdf_path, base_dir)
|
|
3967
|
+
updated = dict(self.data)
|
|
3968
|
+
updated["export"] = export
|
|
3969
|
+
type(self).objects.filter(pk=self.pk).update(data=updated)
|
|
3970
|
+
self.data = updated
|
|
3971
|
+
return pdf_path
|
|
2812
3972
|
|
|
2813
3973
|
|
|
2814
3974
|
class BrandManager(EntityManager):
|
|
@@ -3209,6 +4369,11 @@ class PackageRelease(Entity):
|
|
|
3209
4369
|
def natural_key(self):
|
|
3210
4370
|
return (self.package.name, self.version)
|
|
3211
4371
|
|
|
4372
|
+
class Severity(models.TextChoices):
|
|
4373
|
+
NORMAL = "normal", _("Normal")
|
|
4374
|
+
LOW = "low", _("Low")
|
|
4375
|
+
CRITICAL = "critical", _("Critical")
|
|
4376
|
+
|
|
3212
4377
|
package = models.ForeignKey(
|
|
3213
4378
|
Package, on_delete=models.CASCADE, related_name="releases"
|
|
3214
4379
|
)
|
|
@@ -3219,6 +4384,12 @@ class PackageRelease(Entity):
|
|
|
3219
4384
|
revision = models.CharField(
|
|
3220
4385
|
max_length=40, blank=True, default=revision_utils.get_revision, editable=False
|
|
3221
4386
|
)
|
|
4387
|
+
severity = models.CharField(
|
|
4388
|
+
max_length=16,
|
|
4389
|
+
choices=Severity.choices,
|
|
4390
|
+
default=Severity.NORMAL,
|
|
4391
|
+
help_text=_("Controls the expected urgency for auto-upgrades."),
|
|
4392
|
+
)
|
|
3222
4393
|
changelog = models.TextField(blank=True, default="")
|
|
3223
4394
|
pypi_url = models.URLField("PyPI URL", blank=True, editable=False)
|
|
3224
4395
|
github_url = models.URLField("GitHub URL", blank=True, editable=False)
|
|
@@ -3243,7 +4414,13 @@ class PackageRelease(Entity):
|
|
|
3243
4414
|
for release in cls.objects.all():
|
|
3244
4415
|
name = f"releases__packagerelease_{release.version.replace('.', '_')}.json"
|
|
3245
4416
|
path = base / name
|
|
3246
|
-
data = serializers.serialize(
|
|
4417
|
+
data = serializers.serialize(
|
|
4418
|
+
"json",
|
|
4419
|
+
[release],
|
|
4420
|
+
use_natural_foreign_keys=True,
|
|
4421
|
+
use_natural_primary_keys=True,
|
|
4422
|
+
)
|
|
4423
|
+
data = json.dumps(json.loads(data), indent=2) + "\n"
|
|
3247
4424
|
expected.add(name)
|
|
3248
4425
|
try:
|
|
3249
4426
|
current = path.read_text(encoding="utf-8")
|
|
@@ -3255,6 +4432,10 @@ class PackageRelease(Entity):
|
|
|
3255
4432
|
if old_name not in expected and old_path.exists():
|
|
3256
4433
|
old_path.unlink()
|
|
3257
4434
|
|
|
4435
|
+
def delete(self, using=None, keep_parents=False):
|
|
4436
|
+
user_data.delete_user_fixture(self)
|
|
4437
|
+
super().delete(using=using, keep_parents=keep_parents)
|
|
4438
|
+
|
|
3258
4439
|
def __str__(self) -> str: # pragma: no cover - trivial
|
|
3259
4440
|
return f"{self.package.name} {self.version}"
|
|
3260
4441
|
|
|
@@ -3262,10 +4443,27 @@ class PackageRelease(Entity):
|
|
|
3262
4443
|
"""Return a :class:`ReleasePackage` built from the package."""
|
|
3263
4444
|
return self.package.to_package()
|
|
3264
4445
|
|
|
3265
|
-
def to_credentials(
|
|
3266
|
-
|
|
3267
|
-
|
|
3268
|
-
|
|
4446
|
+
def to_credentials(
|
|
4447
|
+
self, user: models.Model | None = None
|
|
4448
|
+
) -> Credentials | None:
|
|
4449
|
+
"""Return :class:`Credentials` from available release managers."""
|
|
4450
|
+
|
|
4451
|
+
manager_candidates: list[ReleaseManager] = []
|
|
4452
|
+
|
|
4453
|
+
for candidate in (self.release_manager, self.package.release_manager):
|
|
4454
|
+
if candidate and candidate not in manager_candidates:
|
|
4455
|
+
manager_candidates.append(candidate)
|
|
4456
|
+
|
|
4457
|
+
if user is not None and getattr(user, "is_authenticated", False):
|
|
4458
|
+
try:
|
|
4459
|
+
user_manager = ReleaseManager.objects.get(user=user)
|
|
4460
|
+
except ReleaseManager.DoesNotExist:
|
|
4461
|
+
user_manager = None
|
|
4462
|
+
else:
|
|
4463
|
+
if user_manager not in manager_candidates:
|
|
4464
|
+
manager_candidates.append(user_manager)
|
|
4465
|
+
|
|
4466
|
+
for manager in manager_candidates:
|
|
3269
4467
|
creds = manager.to_credentials()
|
|
3270
4468
|
if creds and creds.has_auth():
|
|
3271
4469
|
return creds
|
|
@@ -3287,7 +4485,9 @@ class PackageRelease(Entity):
|
|
|
3287
4485
|
return manager.github_token
|
|
3288
4486
|
return os.environ.get("GITHUB_TOKEN")
|
|
3289
4487
|
|
|
3290
|
-
def build_publish_targets(
|
|
4488
|
+
def build_publish_targets(
|
|
4489
|
+
self, user: models.Model | None = None
|
|
4490
|
+
) -> list[RepositoryTarget]:
|
|
3291
4491
|
"""Return repository targets for publishing this release."""
|
|
3292
4492
|
|
|
3293
4493
|
manager = self.release_manager or self.package.release_manager
|
|
@@ -3296,7 +4496,7 @@ class PackageRelease(Entity):
|
|
|
3296
4496
|
env_primary = os.environ.get("PYPI_REPOSITORY_URL", "")
|
|
3297
4497
|
primary_url = env_primary.strip()
|
|
3298
4498
|
|
|
3299
|
-
primary_creds = self.to_credentials()
|
|
4499
|
+
primary_creds = self.to_credentials(user=user)
|
|
3300
4500
|
targets.append(
|
|
3301
4501
|
RepositoryTarget(
|
|
3302
4502
|
name="PyPI",
|
|
@@ -3438,6 +4638,8 @@ class PackageRelease(Entity):
|
|
|
3438
4638
|
"""
|
|
3439
4639
|
|
|
3440
4640
|
version = (version or "").strip()
|
|
4641
|
+
if version.endswith("+"):
|
|
4642
|
+
version = version.rstrip("+")
|
|
3441
4643
|
revision = (revision or "").strip()
|
|
3442
4644
|
if not version or not revision:
|
|
3443
4645
|
return True
|
|
@@ -3523,73 +4725,6 @@ def _rfid_unique_energy_account(
|
|
|
3523
4725
|
"RFID tags may only be assigned to one energy account."
|
|
3524
4726
|
)
|
|
3525
4727
|
|
|
3526
|
-
|
|
3527
|
-
def hash_key(key: str) -> str:
|
|
3528
|
-
"""Return a SHA-256 hash for ``key``."""
|
|
3529
|
-
|
|
3530
|
-
return hashlib.sha256(key.encode()).hexdigest()
|
|
3531
|
-
|
|
3532
|
-
|
|
3533
|
-
class AssistantProfile(Profile):
|
|
3534
|
-
"""Stores a hashed user key used by the assistant for authentication.
|
|
3535
|
-
|
|
3536
|
-
The plain-text ``user_key`` is generated server-side and shown only once.
|
|
3537
|
-
Users must supply this key in the ``Authorization: Bearer <user_key>``
|
|
3538
|
-
header when requesting protected endpoints. Only the hash is stored.
|
|
3539
|
-
"""
|
|
3540
|
-
|
|
3541
|
-
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
|
3542
|
-
profile_fields = ("user_key_hash", "scopes", "is_active")
|
|
3543
|
-
user_key_hash = models.CharField(max_length=64, unique=True)
|
|
3544
|
-
scopes = models.JSONField(default=list, blank=True)
|
|
3545
|
-
created_at = models.DateTimeField(auto_now_add=True)
|
|
3546
|
-
last_used_at = models.DateTimeField(null=True, blank=True)
|
|
3547
|
-
is_active = models.BooleanField(default=True)
|
|
3548
|
-
|
|
3549
|
-
class Meta:
|
|
3550
|
-
db_table = "workgroup_assistantprofile"
|
|
3551
|
-
verbose_name = "Assistant Profile"
|
|
3552
|
-
verbose_name_plural = "Assistant Profiles"
|
|
3553
|
-
constraints = [
|
|
3554
|
-
models.CheckConstraint(
|
|
3555
|
-
check=(
|
|
3556
|
-
(Q(user__isnull=False) & Q(group__isnull=True))
|
|
3557
|
-
| (Q(user__isnull=True) & Q(group__isnull=False))
|
|
3558
|
-
),
|
|
3559
|
-
name="assistantprofile_requires_owner",
|
|
3560
|
-
)
|
|
3561
|
-
]
|
|
3562
|
-
|
|
3563
|
-
@classmethod
|
|
3564
|
-
def issue_key(cls, user) -> tuple["AssistantProfile", str]:
|
|
3565
|
-
"""Create or update a profile and return it with a new plain key."""
|
|
3566
|
-
|
|
3567
|
-
key = secrets.token_hex(32)
|
|
3568
|
-
key_hash = hash_key(key)
|
|
3569
|
-
if user is None:
|
|
3570
|
-
raise ValueError("Assistant profiles require a user instance")
|
|
3571
|
-
|
|
3572
|
-
profile, _ = cls.objects.update_or_create(
|
|
3573
|
-
user=user,
|
|
3574
|
-
defaults={
|
|
3575
|
-
"user_key_hash": key_hash,
|
|
3576
|
-
"last_used_at": None,
|
|
3577
|
-
"is_active": True,
|
|
3578
|
-
},
|
|
3579
|
-
)
|
|
3580
|
-
return profile, key
|
|
3581
|
-
|
|
3582
|
-
def touch(self) -> None:
|
|
3583
|
-
"""Record that the key was used."""
|
|
3584
|
-
|
|
3585
|
-
self.last_used_at = timezone.now()
|
|
3586
|
-
self.save(update_fields=["last_used_at"])
|
|
3587
|
-
|
|
3588
|
-
def __str__(self) -> str: # pragma: no cover - simple representation
|
|
3589
|
-
owner = self.owner_display()
|
|
3590
|
-
return f"AssistantProfile for {owner}" if owner else "AssistantProfile"
|
|
3591
|
-
|
|
3592
|
-
|
|
3593
4728
|
def validate_relative_url(value: str) -> None:
|
|
3594
4729
|
if not value:
|
|
3595
4730
|
return
|
|
@@ -3614,7 +4749,38 @@ class Todo(Entity):
|
|
|
3614
4749
|
generated_for_version = models.CharField(max_length=20, blank=True, default="")
|
|
3615
4750
|
generated_for_revision = models.CharField(max_length=40, blank=True, default="")
|
|
3616
4751
|
done_on = models.DateTimeField(null=True, blank=True)
|
|
4752
|
+
done_node = models.ForeignKey(
|
|
4753
|
+
"nodes.Node",
|
|
4754
|
+
null=True,
|
|
4755
|
+
blank=True,
|
|
4756
|
+
on_delete=models.SET_NULL,
|
|
4757
|
+
related_name="completed_todos",
|
|
4758
|
+
help_text="Node where this TODO was completed.",
|
|
4759
|
+
)
|
|
4760
|
+
done_version = models.CharField(max_length=20, blank=True, default="")
|
|
4761
|
+
done_revision = models.CharField(max_length=40, blank=True, default="")
|
|
4762
|
+
done_username = models.CharField(max_length=150, blank=True, default="")
|
|
3617
4763
|
on_done_condition = ConditionTextField(blank=True, default="")
|
|
4764
|
+
origin_node = models.ForeignKey(
|
|
4765
|
+
"nodes.Node",
|
|
4766
|
+
null=True,
|
|
4767
|
+
blank=True,
|
|
4768
|
+
on_delete=models.SET_NULL,
|
|
4769
|
+
related_name="originated_todos",
|
|
4770
|
+
help_text="Node where this TODO was generated.",
|
|
4771
|
+
)
|
|
4772
|
+
original_user = models.ForeignKey(
|
|
4773
|
+
settings.AUTH_USER_MODEL,
|
|
4774
|
+
null=True,
|
|
4775
|
+
blank=True,
|
|
4776
|
+
on_delete=models.SET_NULL,
|
|
4777
|
+
related_name="originated_todos",
|
|
4778
|
+
help_text="User responsible for creating this TODO.",
|
|
4779
|
+
)
|
|
4780
|
+
original_user_is_authenticated = models.BooleanField(
|
|
4781
|
+
default=False,
|
|
4782
|
+
help_text="Whether the originating user was authenticated during creation.",
|
|
4783
|
+
)
|
|
3618
4784
|
|
|
3619
4785
|
objects = TodoManager()
|
|
3620
4786
|
|
|
@@ -3655,6 +4821,203 @@ class Todo(Entity):
|
|
|
3655
4821
|
return field.evaluate(self)
|
|
3656
4822
|
return ConditionCheckResult(True, "")
|
|
3657
4823
|
|
|
4824
|
+
def save(self, *args, **kwargs):
|
|
4825
|
+
created = self.pk is None
|
|
4826
|
+
tracked_fields = {
|
|
4827
|
+
"done_on",
|
|
4828
|
+
"done_node",
|
|
4829
|
+
"done_node_id",
|
|
4830
|
+
"done_revision",
|
|
4831
|
+
"done_username",
|
|
4832
|
+
"done_version",
|
|
4833
|
+
"is_deleted",
|
|
4834
|
+
}
|
|
4835
|
+
update_fields = kwargs.get("update_fields")
|
|
4836
|
+
monitor_changes = not created and (
|
|
4837
|
+
update_fields is None or tracked_fields.intersection(update_fields)
|
|
4838
|
+
)
|
|
4839
|
+
previous_state = None
|
|
4840
|
+
if monitor_changes:
|
|
4841
|
+
previous_state = (
|
|
4842
|
+
type(self)
|
|
4843
|
+
.all_objects.filter(pk=self.pk)
|
|
4844
|
+
.values(
|
|
4845
|
+
"done_on",
|
|
4846
|
+
"done_node_id",
|
|
4847
|
+
"done_revision",
|
|
4848
|
+
"done_username",
|
|
4849
|
+
"done_version",
|
|
4850
|
+
"is_deleted",
|
|
4851
|
+
)
|
|
4852
|
+
.first()
|
|
4853
|
+
)
|
|
4854
|
+
super().save(*args, **kwargs)
|
|
4855
|
+
|
|
4856
|
+
if created:
|
|
4857
|
+
return
|
|
4858
|
+
|
|
4859
|
+
previous_done_on = previous_state["done_on"] if previous_state else None
|
|
4860
|
+
previous_is_deleted = previous_state["is_deleted"] if previous_state else False
|
|
4861
|
+
previous_done_node = (
|
|
4862
|
+
previous_state["done_node_id"] if previous_state else None
|
|
4863
|
+
)
|
|
4864
|
+
previous_done_revision = (
|
|
4865
|
+
previous_state["done_revision"] if previous_state else ""
|
|
4866
|
+
)
|
|
4867
|
+
previous_done_username = (
|
|
4868
|
+
previous_state["done_username"] if previous_state else ""
|
|
4869
|
+
)
|
|
4870
|
+
previous_done_version = (
|
|
4871
|
+
previous_state["done_version"] if previous_state else ""
|
|
4872
|
+
)
|
|
4873
|
+
if (
|
|
4874
|
+
previous_done_on == self.done_on
|
|
4875
|
+
and previous_is_deleted == self.is_deleted
|
|
4876
|
+
and previous_done_node == getattr(self, "done_node_id", None)
|
|
4877
|
+
and previous_done_revision == self.done_revision
|
|
4878
|
+
and previous_done_username == self.done_username
|
|
4879
|
+
and previous_done_version == self.done_version
|
|
4880
|
+
):
|
|
4881
|
+
return
|
|
4882
|
+
|
|
4883
|
+
self._update_fixture_state()
|
|
4884
|
+
|
|
4885
|
+
def populate_done_metadata(self, user=None) -> None:
|
|
4886
|
+
"""Populate metadata fields for a completed TODO."""
|
|
4887
|
+
|
|
4888
|
+
node = None
|
|
4889
|
+
try: # pragma: no cover - defensive import guard
|
|
4890
|
+
from nodes.models import Node # type: ignore
|
|
4891
|
+
except Exception: # pragma: no cover - when app not ready
|
|
4892
|
+
Node = None
|
|
4893
|
+
|
|
4894
|
+
if Node is not None:
|
|
4895
|
+
try:
|
|
4896
|
+
node = Node.get_local()
|
|
4897
|
+
except Exception: # pragma: no cover - fallback on errors
|
|
4898
|
+
node = None
|
|
4899
|
+
self.done_node = node if node else None
|
|
4900
|
+
|
|
4901
|
+
version_value = ""
|
|
4902
|
+
revision_value = ""
|
|
4903
|
+
if node is not None:
|
|
4904
|
+
version_value = (node.installed_version or "").strip()
|
|
4905
|
+
revision_value = (node.installed_revision or "").strip()
|
|
4906
|
+
|
|
4907
|
+
if not version_value:
|
|
4908
|
+
version_path = Path(settings.BASE_DIR) / "VERSION"
|
|
4909
|
+
try:
|
|
4910
|
+
version_value = version_path.read_text(encoding="utf-8").strip()
|
|
4911
|
+
except OSError:
|
|
4912
|
+
version_value = ""
|
|
4913
|
+
|
|
4914
|
+
if not revision_value:
|
|
4915
|
+
try:
|
|
4916
|
+
revision_value = revision_utils.get_revision() or ""
|
|
4917
|
+
except Exception: # pragma: no cover - defensive fallback
|
|
4918
|
+
revision_value = ""
|
|
4919
|
+
|
|
4920
|
+
username_value = ""
|
|
4921
|
+
if user is not None and getattr(user, "is_authenticated", False):
|
|
4922
|
+
try:
|
|
4923
|
+
username_value = user.get_username() or ""
|
|
4924
|
+
except Exception: # pragma: no cover - fallback to attribute
|
|
4925
|
+
username_value = getattr(user, "username", "") or ""
|
|
4926
|
+
|
|
4927
|
+
self.done_version = version_value
|
|
4928
|
+
self.done_revision = revision_value
|
|
4929
|
+
self.done_username = username_value
|
|
4930
|
+
|
|
4931
|
+
def _update_fixture_state(self) -> None:
|
|
4932
|
+
if not self.is_seed_data:
|
|
4933
|
+
return
|
|
4934
|
+
|
|
4935
|
+
request_text = (self.request or "").strip()
|
|
4936
|
+
if not request_text:
|
|
4937
|
+
return
|
|
4938
|
+
|
|
4939
|
+
slug = self._fixture_slug(request_text)
|
|
4940
|
+
if not slug:
|
|
4941
|
+
return
|
|
4942
|
+
|
|
4943
|
+
base_dir = Path(settings.BASE_DIR)
|
|
4944
|
+
fixture_path = base_dir / "core" / "fixtures" / f"todo__{slug}.json"
|
|
4945
|
+
if not fixture_path.exists():
|
|
4946
|
+
return
|
|
4947
|
+
|
|
4948
|
+
try:
|
|
4949
|
+
with fixture_path.open("r", encoding="utf-8") as handle:
|
|
4950
|
+
data = json.load(handle)
|
|
4951
|
+
except Exception:
|
|
4952
|
+
logger.exception("Failed to read TODO fixture %s", fixture_path)
|
|
4953
|
+
return
|
|
4954
|
+
|
|
4955
|
+
if not isinstance(data, list):
|
|
4956
|
+
return
|
|
4957
|
+
|
|
4958
|
+
updated = False
|
|
4959
|
+
normalized_request = request_text.lower()
|
|
4960
|
+
for item in data:
|
|
4961
|
+
if not isinstance(item, dict):
|
|
4962
|
+
continue
|
|
4963
|
+
fields = item.get("fields")
|
|
4964
|
+
if not isinstance(fields, dict):
|
|
4965
|
+
continue
|
|
4966
|
+
candidate = (fields.get("request") or "").strip().lower()
|
|
4967
|
+
if candidate != normalized_request:
|
|
4968
|
+
continue
|
|
4969
|
+
if self._apply_fixture_fields(fields):
|
|
4970
|
+
updated = True
|
|
4971
|
+
|
|
4972
|
+
if not updated:
|
|
4973
|
+
return
|
|
4974
|
+
|
|
4975
|
+
content = json.dumps(data, indent=2, ensure_ascii=False)
|
|
4976
|
+
if not content.endswith("\n"):
|
|
4977
|
+
content += "\n"
|
|
4978
|
+
|
|
4979
|
+
try:
|
|
4980
|
+
fixture_path.write_text(content, encoding="utf-8")
|
|
4981
|
+
except OSError:
|
|
4982
|
+
logger.exception("Failed to write TODO fixture %s", fixture_path)
|
|
4983
|
+
|
|
4984
|
+
def _apply_fixture_fields(self, fields: dict[str, object]) -> bool:
|
|
4985
|
+
changed = False
|
|
4986
|
+
|
|
4987
|
+
def _assign(key: str, value: object) -> None:
|
|
4988
|
+
nonlocal changed
|
|
4989
|
+
if fields.get(key) != value:
|
|
4990
|
+
fields[key] = value
|
|
4991
|
+
changed = True
|
|
4992
|
+
|
|
4993
|
+
_assign("request", self.request or "")
|
|
4994
|
+
_assign("url", self.url or "")
|
|
4995
|
+
_assign("request_details", self.request_details or "")
|
|
4996
|
+
_assign("done_version", self.done_version or "")
|
|
4997
|
+
_assign("done_revision", self.done_revision or "")
|
|
4998
|
+
_assign("done_username", self.done_username or "")
|
|
4999
|
+
|
|
5000
|
+
if self.done_on:
|
|
5001
|
+
done_value = timezone.localtime(self.done_on)
|
|
5002
|
+
_assign("done_on", done_value.isoformat())
|
|
5003
|
+
else:
|
|
5004
|
+
if fields.get("done_on") is not None:
|
|
5005
|
+
fields["done_on"] = None
|
|
5006
|
+
changed = True
|
|
5007
|
+
|
|
5008
|
+
if self.is_deleted:
|
|
5009
|
+
_assign("is_deleted", True)
|
|
5010
|
+
elif fields.get("is_deleted"):
|
|
5011
|
+
fields["is_deleted"] = False
|
|
5012
|
+
changed = True
|
|
5013
|
+
|
|
5014
|
+
return changed
|
|
5015
|
+
|
|
5016
|
+
@staticmethod
|
|
5017
|
+
def _fixture_slug(value: str) -> str:
|
|
5018
|
+
slug = re.sub(r"[^a-z0-9]+", "_", value.lower()).strip("_")
|
|
5019
|
+
return slug
|
|
5020
|
+
|
|
3658
5021
|
|
|
3659
5022
|
class TOTPDeviceSettings(models.Model):
|
|
3660
5023
|
"""Per-device configuration options for authenticator enrollments."""
|