arthexis 0.1.16__py3-none-any.whl → 0.1.28__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arthexis might be problematic. Click here for more details.
- {arthexis-0.1.16.dist-info → arthexis-0.1.28.dist-info}/METADATA +95 -41
- arthexis-0.1.28.dist-info/RECORD +112 -0
- config/asgi.py +1 -15
- config/middleware.py +47 -1
- config/settings.py +21 -30
- config/settings_helpers.py +176 -1
- config/urls.py +69 -1
- core/admin.py +805 -473
- core/apps.py +6 -8
- core/auto_upgrade.py +19 -4
- core/backends.py +13 -3
- core/celery_utils.py +73 -0
- core/changelog.py +66 -5
- core/environment.py +4 -5
- core/models.py +1825 -218
- core/notifications.py +1 -1
- core/reference_utils.py +10 -11
- core/release.py +55 -7
- core/sigil_builder.py +2 -2
- core/sigil_resolver.py +1 -66
- core/system.py +285 -4
- core/tasks.py +439 -138
- core/test_system_info.py +43 -5
- core/tests.py +516 -18
- core/user_data.py +94 -21
- core/views.py +348 -186
- nodes/admin.py +904 -67
- nodes/apps.py +12 -1
- nodes/feature_checks.py +30 -0
- nodes/models.py +800 -127
- nodes/rfid_sync.py +1 -1
- nodes/tasks.py +98 -3
- nodes/tests.py +1381 -152
- nodes/urls.py +15 -1
- nodes/utils.py +51 -3
- nodes/views.py +1382 -152
- ocpp/admin.py +1970 -152
- ocpp/consumers.py +839 -34
- ocpp/models.py +968 -17
- ocpp/network.py +398 -0
- ocpp/store.py +411 -43
- ocpp/tasks.py +261 -3
- ocpp/test_export_import.py +1 -0
- ocpp/test_rfid.py +194 -6
- ocpp/tests.py +1918 -87
- ocpp/transactions_io.py +9 -1
- ocpp/urls.py +8 -3
- ocpp/views.py +700 -53
- pages/admin.py +262 -30
- pages/apps.py +35 -0
- pages/context_processors.py +28 -21
- pages/defaults.py +1 -1
- pages/forms.py +31 -8
- pages/middleware.py +6 -2
- pages/models.py +86 -2
- pages/module_defaults.py +5 -5
- pages/site_config.py +137 -0
- pages/tests.py +1050 -126
- pages/urls.py +14 -2
- pages/utils.py +70 -0
- pages/views.py +622 -56
- arthexis-0.1.16.dist-info/RECORD +0 -111
- core/workgroup_urls.py +0 -17
- core/workgroup_views.py +0 -94
- {arthexis-0.1.16.dist-info → arthexis-0.1.28.dist-info}/WHEEL +0 -0
- {arthexis-0.1.16.dist-info → arthexis-0.1.28.dist-info}/licenses/LICENSE +0 -0
- {arthexis-0.1.16.dist-info → arthexis-0.1.28.dist-info}/top_level.txt +0 -0
core/models.py
CHANGED
|
@@ -4,37 +4,45 @@ from django.contrib.auth.models import (
|
|
|
4
4
|
UserManager as DjangoUserManager,
|
|
5
5
|
)
|
|
6
6
|
from django.db import DatabaseError, IntegrityError, connections, models, transaction
|
|
7
|
-
from django.db.models import Q
|
|
8
|
-
from django.db.models.functions import Lower
|
|
7
|
+
from django.db.models import Q, F
|
|
8
|
+
from django.db.models.functions import Lower, Length
|
|
9
9
|
from django.conf import settings
|
|
10
10
|
from django.contrib.auth import get_user_model
|
|
11
|
-
from django.utils.translation import gettext_lazy as _
|
|
11
|
+
from django.utils.translation import gettext_lazy as _, gettext, override
|
|
12
12
|
from django.core.validators import MaxValueValidator, MinValueValidator, RegexValidator
|
|
13
13
|
from django.core.exceptions import ValidationError
|
|
14
14
|
from django.apps import apps
|
|
15
15
|
from django.db.models.signals import m2m_changed, post_delete, post_save
|
|
16
16
|
from django.dispatch import receiver
|
|
17
17
|
from django.views.decorators.debug import sensitive_variables
|
|
18
|
-
from datetime import
|
|
18
|
+
from datetime import (
|
|
19
|
+
time as datetime_time,
|
|
20
|
+
timedelta,
|
|
21
|
+
datetime as datetime_datetime,
|
|
22
|
+
date as datetime_date,
|
|
23
|
+
timezone as datetime_timezone,
|
|
24
|
+
)
|
|
19
25
|
import logging
|
|
26
|
+
import json
|
|
20
27
|
from django.contrib.contenttypes.models import ContentType
|
|
21
28
|
import hashlib
|
|
22
29
|
import hmac
|
|
23
30
|
import os
|
|
24
31
|
import subprocess
|
|
25
|
-
import secrets
|
|
26
32
|
import re
|
|
27
33
|
from io import BytesIO
|
|
28
34
|
from django.core.files.base import ContentFile
|
|
29
35
|
import qrcode
|
|
30
|
-
from django.utils import timezone
|
|
36
|
+
from django.utils import timezone, formats
|
|
31
37
|
from django.utils.dateparse import parse_datetime
|
|
32
38
|
import uuid
|
|
33
39
|
from pathlib import Path
|
|
34
40
|
from django.core import serializers
|
|
35
41
|
from django.core.management.color import no_style
|
|
36
|
-
from urllib.parse import quote_plus, urlparse
|
|
42
|
+
from urllib.parse import quote, quote_plus, urlparse
|
|
43
|
+
from zoneinfo import ZoneInfo
|
|
37
44
|
from utils import revision as revision_utils
|
|
45
|
+
from core.celery_utils import normalize_periodic_task_name
|
|
38
46
|
from typing import Any, Type
|
|
39
47
|
from defusedxml import xmlrpc as defused_xmlrpc
|
|
40
48
|
import requests
|
|
@@ -44,6 +52,51 @@ xmlrpc_client = defused_xmlrpc.xmlrpc_client
|
|
|
44
52
|
|
|
45
53
|
logger = logging.getLogger(__name__)
|
|
46
54
|
|
|
55
|
+
|
|
56
|
+
def _available_language_codes() -> set[str]:
|
|
57
|
+
return {code.lower() for code, _ in getattr(settings, "LANGUAGES", [])}
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def default_report_language() -> str:
|
|
61
|
+
configured = getattr(settings, "LANGUAGE_CODE", "en") or "en"
|
|
62
|
+
configured = configured.replace("_", "-").lower()
|
|
63
|
+
base = configured.split("-", 1)[0]
|
|
64
|
+
available = _available_language_codes()
|
|
65
|
+
if base in available:
|
|
66
|
+
return base
|
|
67
|
+
if configured in available:
|
|
68
|
+
return configured
|
|
69
|
+
if available:
|
|
70
|
+
return next(iter(sorted(available)))
|
|
71
|
+
return "en"
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def normalize_report_language(language: str | None) -> str:
|
|
75
|
+
default = default_report_language()
|
|
76
|
+
if not language:
|
|
77
|
+
return default
|
|
78
|
+
candidate = str(language).strip().lower()
|
|
79
|
+
if not candidate:
|
|
80
|
+
return default
|
|
81
|
+
candidate = candidate.replace("_", "-")
|
|
82
|
+
available = _available_language_codes()
|
|
83
|
+
if candidate in available:
|
|
84
|
+
return candidate
|
|
85
|
+
base = candidate.split("-", 1)[0]
|
|
86
|
+
if base in available:
|
|
87
|
+
return base
|
|
88
|
+
return default
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def normalize_report_title(title: str | None) -> str:
|
|
92
|
+
value = (title or "").strip()
|
|
93
|
+
if "\r" in value or "\n" in value:
|
|
94
|
+
raise ValidationError(
|
|
95
|
+
_("Report title cannot contain control characters."),
|
|
96
|
+
)
|
|
97
|
+
return value
|
|
98
|
+
|
|
99
|
+
|
|
47
100
|
from .entity import Entity, EntityUserManager, EntityManager
|
|
48
101
|
from .release import (
|
|
49
102
|
Package as ReleasePackage,
|
|
@@ -336,7 +389,6 @@ class User(Entity, AbstractUser):
|
|
|
336
389
|
objects = EntityUserManager()
|
|
337
390
|
all_objects = DjangoUserManager()
|
|
338
391
|
"""Custom user model."""
|
|
339
|
-
birthday = models.DateField(null=True, blank=True)
|
|
340
392
|
data_path = models.CharField(max_length=255, blank=True)
|
|
341
393
|
last_visit_ip_address = models.GenericIPAddressField(null=True, blank=True)
|
|
342
394
|
operate_as = models.ForeignKey(
|
|
@@ -518,17 +570,18 @@ class User(Entity, AbstractUser):
|
|
|
518
570
|
def odoo_profile(self):
|
|
519
571
|
return self._direct_profile("OdooProfile")
|
|
520
572
|
|
|
521
|
-
@property
|
|
522
|
-
def assistant_profile(self):
|
|
523
|
-
return self._direct_profile("AssistantProfile")
|
|
524
|
-
|
|
525
573
|
@property
|
|
526
574
|
def social_profile(self):
|
|
527
575
|
return self._direct_profile("SocialProfile")
|
|
528
576
|
|
|
529
577
|
@property
|
|
530
|
-
def
|
|
531
|
-
return self.
|
|
578
|
+
def google_calendar_profile(self):
|
|
579
|
+
return self._direct_profile("GoogleCalendarProfile")
|
|
580
|
+
|
|
581
|
+
|
|
582
|
+
class Meta(AbstractUser.Meta):
|
|
583
|
+
verbose_name = _("User")
|
|
584
|
+
verbose_name_plural = _("Users")
|
|
532
585
|
|
|
533
586
|
|
|
534
587
|
class UserPhoneNumber(Entity):
|
|
@@ -557,11 +610,19 @@ class UserPhoneNumber(Entity):
|
|
|
557
610
|
class OdooProfile(Profile):
|
|
558
611
|
"""Store Odoo API credentials for a user."""
|
|
559
612
|
|
|
613
|
+
class CRM(models.TextChoices):
|
|
614
|
+
ODOO = "odoo", _("Odoo")
|
|
615
|
+
|
|
560
616
|
profile_fields = ("host", "database", "username", "password")
|
|
561
617
|
host = SigilShortAutoField(max_length=255)
|
|
562
618
|
database = SigilShortAutoField(max_length=255)
|
|
563
619
|
username = SigilShortAutoField(max_length=255)
|
|
564
620
|
password = SigilShortAutoField(max_length=255)
|
|
621
|
+
crm = models.CharField(
|
|
622
|
+
max_length=32,
|
|
623
|
+
choices=CRM.choices,
|
|
624
|
+
default=CRM.ODOO,
|
|
625
|
+
)
|
|
565
626
|
verified_on = models.DateTimeField(null=True, blank=True)
|
|
566
627
|
odoo_uid = models.PositiveIntegerField(null=True, blank=True, editable=False)
|
|
567
628
|
name = models.CharField(max_length=255, blank=True, editable=False)
|
|
@@ -591,6 +652,14 @@ class OdooProfile(Profile):
|
|
|
591
652
|
database = self._resolved_field_value("database")
|
|
592
653
|
return database or ""
|
|
593
654
|
|
|
655
|
+
def _profile_name(self) -> str:
|
|
656
|
+
"""Return the stored name for this profile without database suffix."""
|
|
657
|
+
|
|
658
|
+
username = self._resolved_field_value("username")
|
|
659
|
+
if username:
|
|
660
|
+
return username
|
|
661
|
+
return self._resolved_field_value("database")
|
|
662
|
+
|
|
594
663
|
def save(self, *args, **kwargs):
|
|
595
664
|
if self.pk:
|
|
596
665
|
old = type(self).all_objects.get(pk=self.pk)
|
|
@@ -601,7 +670,7 @@ class OdooProfile(Profile):
|
|
|
601
670
|
or old.host != self.host
|
|
602
671
|
):
|
|
603
672
|
self._clear_verification()
|
|
604
|
-
computed_name = self.
|
|
673
|
+
computed_name = self._profile_name()
|
|
605
674
|
update_fields = kwargs.get("update_fields")
|
|
606
675
|
update_fields_set = set(update_fields) if update_fields is not None else None
|
|
607
676
|
if computed_name != self.name:
|
|
@@ -636,6 +705,7 @@ class OdooProfile(Profile):
|
|
|
636
705
|
self.odoo_uid = uid
|
|
637
706
|
self.email = info.get("email", "")
|
|
638
707
|
self.verified_on = timezone.now()
|
|
708
|
+
self.name = self._profile_name()
|
|
639
709
|
self.save(update_fields=["odoo_uid", "name", "email", "verified_on"])
|
|
640
710
|
return True
|
|
641
711
|
|
|
@@ -676,8 +746,8 @@ class OdooProfile(Profile):
|
|
|
676
746
|
return f"{owner} @ {self.host}" if owner else self.host
|
|
677
747
|
|
|
678
748
|
class Meta:
|
|
679
|
-
verbose_name = _("
|
|
680
|
-
verbose_name_plural = _("
|
|
749
|
+
verbose_name = _("CRM Employee")
|
|
750
|
+
verbose_name_plural = _("CRM Employees")
|
|
681
751
|
constraints = [
|
|
682
752
|
models.CheckConstraint(
|
|
683
753
|
check=(
|
|
@@ -690,24 +760,47 @@ class OdooProfile(Profile):
|
|
|
690
760
|
|
|
691
761
|
|
|
692
762
|
class OpenPayProfile(Profile):
|
|
693
|
-
"""Store
|
|
763
|
+
"""Store payment processor credentials for a user or security group."""
|
|
764
|
+
|
|
765
|
+
PROCESSOR_OPENPAY = "openpay"
|
|
766
|
+
PROCESSOR_PAYPAL = "paypal"
|
|
767
|
+
PROCESSOR_CHOICES = (
|
|
768
|
+
(PROCESSOR_OPENPAY, _("OpenPay")),
|
|
769
|
+
(PROCESSOR_PAYPAL, _("PayPal")),
|
|
770
|
+
)
|
|
694
771
|
|
|
695
772
|
SANDBOX_API_URL = "https://sandbox-api.openpay.mx/v1"
|
|
696
773
|
PRODUCTION_API_URL = "https://api.openpay.mx/v1"
|
|
697
774
|
|
|
775
|
+
PAYPAL_SANDBOX_API_URL = "https://api-m.sandbox.paypal.com"
|
|
776
|
+
PAYPAL_PRODUCTION_API_URL = "https://api-m.paypal.com"
|
|
777
|
+
|
|
698
778
|
profile_fields = (
|
|
699
779
|
"merchant_id",
|
|
700
780
|
"private_key",
|
|
701
781
|
"public_key",
|
|
702
782
|
"is_production",
|
|
703
783
|
"webhook_secret",
|
|
784
|
+
"paypal_client_id",
|
|
785
|
+
"paypal_client_secret",
|
|
786
|
+
"paypal_webhook_id",
|
|
787
|
+
"paypal_is_production",
|
|
704
788
|
)
|
|
705
789
|
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
790
|
+
default_processor = models.CharField(
|
|
791
|
+
max_length=20,
|
|
792
|
+
choices=PROCESSOR_CHOICES,
|
|
793
|
+
default=PROCESSOR_OPENPAY,
|
|
794
|
+
)
|
|
795
|
+
merchant_id = SigilShortAutoField(max_length=100, blank=True)
|
|
796
|
+
private_key = SigilShortAutoField(max_length=255, blank=True)
|
|
797
|
+
public_key = SigilShortAutoField(max_length=255, blank=True)
|
|
709
798
|
is_production = models.BooleanField(default=False)
|
|
710
799
|
webhook_secret = SigilShortAutoField(max_length=255, blank=True)
|
|
800
|
+
paypal_client_id = SigilShortAutoField(max_length=255, blank=True)
|
|
801
|
+
paypal_client_secret = SigilShortAutoField(max_length=255, blank=True)
|
|
802
|
+
paypal_webhook_id = SigilShortAutoField(max_length=255, blank=True)
|
|
803
|
+
paypal_is_production = models.BooleanField(default=False)
|
|
711
804
|
verified_on = models.DateTimeField(null=True, blank=True)
|
|
712
805
|
verification_reference = models.CharField(max_length=255, blank=True, editable=False)
|
|
713
806
|
|
|
@@ -724,6 +817,11 @@ class OpenPayProfile(Profile):
|
|
|
724
817
|
or old.public_key != self.public_key
|
|
725
818
|
or old.is_production != self.is_production
|
|
726
819
|
or old.webhook_secret != self.webhook_secret
|
|
820
|
+
or old.default_processor != self.default_processor
|
|
821
|
+
or old.paypal_client_id != self.paypal_client_id
|
|
822
|
+
or old.paypal_client_secret != self.paypal_client_secret
|
|
823
|
+
or old.paypal_webhook_id != self.paypal_webhook_id
|
|
824
|
+
or old.paypal_is_production != self.paypal_is_production
|
|
727
825
|
):
|
|
728
826
|
self._clear_verification()
|
|
729
827
|
super().save(*args, **kwargs)
|
|
@@ -732,6 +830,8 @@ class OpenPayProfile(Profile):
|
|
|
732
830
|
def is_verified(self):
|
|
733
831
|
return self.verified_on is not None
|
|
734
832
|
|
|
833
|
+
# --- OpenPay helpers -------------------------------------------------
|
|
834
|
+
|
|
735
835
|
def get_api_base_url(self) -> str:
|
|
736
836
|
return self.PRODUCTION_API_URL if self.is_production else self.SANDBOX_API_URL
|
|
737
837
|
|
|
@@ -748,6 +848,47 @@ class OpenPayProfile(Profile):
|
|
|
748
848
|
def is_sandbox(self) -> bool:
|
|
749
849
|
return not self.is_production
|
|
750
850
|
|
|
851
|
+
# --- PayPal helpers --------------------------------------------------
|
|
852
|
+
|
|
853
|
+
def get_paypal_api_base_url(self) -> str:
|
|
854
|
+
return (
|
|
855
|
+
self.PAYPAL_PRODUCTION_API_URL
|
|
856
|
+
if self.paypal_is_production
|
|
857
|
+
else self.PAYPAL_SANDBOX_API_URL
|
|
858
|
+
)
|
|
859
|
+
|
|
860
|
+
def get_paypal_auth(self) -> tuple[str, str]:
|
|
861
|
+
return (self.paypal_client_id, self.paypal_client_secret)
|
|
862
|
+
|
|
863
|
+
# --- Processor utilities --------------------------------------------
|
|
864
|
+
|
|
865
|
+
def has_openpay_credentials(self) -> bool:
|
|
866
|
+
return all(
|
|
867
|
+
getattr(self, field)
|
|
868
|
+
for field in ("merchant_id", "private_key", "public_key")
|
|
869
|
+
)
|
|
870
|
+
|
|
871
|
+
def has_paypal_credentials(self) -> bool:
|
|
872
|
+
return all(
|
|
873
|
+
getattr(self, field)
|
|
874
|
+
for field in ("paypal_client_id", "paypal_client_secret")
|
|
875
|
+
)
|
|
876
|
+
|
|
877
|
+
def iter_processors(self):
|
|
878
|
+
preferred = self.default_processor or self.PROCESSOR_OPENPAY
|
|
879
|
+
ordered = [preferred]
|
|
880
|
+
other = (
|
|
881
|
+
self.PROCESSOR_PAYPAL
|
|
882
|
+
if preferred == self.PROCESSOR_OPENPAY
|
|
883
|
+
else self.PROCESSOR_OPENPAY
|
|
884
|
+
)
|
|
885
|
+
ordered.append(other)
|
|
886
|
+
for processor in ordered:
|
|
887
|
+
if processor == self.PROCESSOR_OPENPAY and self.has_openpay_credentials():
|
|
888
|
+
yield processor
|
|
889
|
+
elif processor == self.PROCESSOR_PAYPAL and self.has_paypal_credentials():
|
|
890
|
+
yield processor
|
|
891
|
+
|
|
751
892
|
def sign_webhook(self, payload: bytes | str, timestamp: str | None = None) -> str:
|
|
752
893
|
if not self.webhook_secret:
|
|
753
894
|
raise ValueError("Webhook secret is not configured")
|
|
@@ -780,7 +921,7 @@ class OpenPayProfile(Profile):
|
|
|
780
921
|
self._clear_verification()
|
|
781
922
|
return self
|
|
782
923
|
|
|
783
|
-
def
|
|
924
|
+
def _verify_openpay(self):
|
|
784
925
|
url = self.build_api_url("charges")
|
|
785
926
|
try:
|
|
786
927
|
response = requests.get(
|
|
@@ -829,13 +970,62 @@ class OpenPayProfile(Profile):
|
|
|
829
970
|
self.save(update_fields=["verification_reference", "verified_on"])
|
|
830
971
|
return True
|
|
831
972
|
|
|
973
|
+
def _verify_paypal(self):
|
|
974
|
+
url = f"{self.get_paypal_api_base_url()}/v1/oauth2/token"
|
|
975
|
+
try:
|
|
976
|
+
response = requests.post(
|
|
977
|
+
url,
|
|
978
|
+
auth=self.get_paypal_auth(),
|
|
979
|
+
data={"grant_type": "client_credentials"},
|
|
980
|
+
timeout=10,
|
|
981
|
+
)
|
|
982
|
+
except requests.RequestException as exc: # pragma: no cover - network failure
|
|
983
|
+
self._clear_verification()
|
|
984
|
+
if self.pk:
|
|
985
|
+
self.save(update_fields=["verification_reference", "verified_on"])
|
|
986
|
+
raise ValidationError(
|
|
987
|
+
_("Unable to verify PayPal credentials: %(error)s")
|
|
988
|
+
% {"error": exc}
|
|
989
|
+
) from exc
|
|
990
|
+
if response.status_code != 200:
|
|
991
|
+
self._clear_verification()
|
|
992
|
+
if self.pk:
|
|
993
|
+
self.save(update_fields=["verification_reference", "verified_on"])
|
|
994
|
+
raise ValidationError(_("Invalid PayPal credentials"))
|
|
995
|
+
try:
|
|
996
|
+
payload = response.json() or {}
|
|
997
|
+
except ValueError:
|
|
998
|
+
payload = {}
|
|
999
|
+
scope = ""
|
|
1000
|
+
if isinstance(payload, dict):
|
|
1001
|
+
scope = payload.get("scope") or payload.get("access_token") or ""
|
|
1002
|
+
self.verification_reference = f"PayPal: {scope}" if scope else "PayPal"
|
|
1003
|
+
self.verified_on = timezone.now()
|
|
1004
|
+
self.save(update_fields=["verification_reference", "verified_on"])
|
|
1005
|
+
return True
|
|
1006
|
+
|
|
1007
|
+
def verify(self):
|
|
1008
|
+
errors = []
|
|
1009
|
+
for processor in self.iter_processors():
|
|
1010
|
+
try:
|
|
1011
|
+
if processor == self.PROCESSOR_OPENPAY:
|
|
1012
|
+
return self._verify_openpay()
|
|
1013
|
+
if processor == self.PROCESSOR_PAYPAL:
|
|
1014
|
+
return self._verify_paypal()
|
|
1015
|
+
except ValidationError as exc:
|
|
1016
|
+
errors.append(exc)
|
|
1017
|
+
if errors:
|
|
1018
|
+
raise errors[-1]
|
|
1019
|
+
raise ValidationError(_("No payment processors are configured."))
|
|
1020
|
+
|
|
832
1021
|
def __str__(self): # pragma: no cover - simple representation
|
|
833
1022
|
owner = self.owner_display()
|
|
834
|
-
|
|
1023
|
+
identifier = self.merchant_id or self.paypal_client_id or ""
|
|
1024
|
+
return f"{owner} @ {identifier}" if owner and identifier else (owner or identifier)
|
|
835
1025
|
|
|
836
1026
|
class Meta:
|
|
837
|
-
verbose_name = _("
|
|
838
|
-
verbose_name_plural = _("
|
|
1027
|
+
verbose_name = _("Payment Processor")
|
|
1028
|
+
verbose_name_plural = _("Payment Processors")
|
|
839
1029
|
constraints = [
|
|
840
1030
|
models.CheckConstraint(
|
|
841
1031
|
check=(
|
|
@@ -847,6 +1037,184 @@ class OpenPayProfile(Profile):
|
|
|
847
1037
|
]
|
|
848
1038
|
|
|
849
1039
|
|
|
1040
|
+
class GoogleCalendarProfile(Profile):
|
|
1041
|
+
"""Store Google Calendar configuration for a user or security group."""
|
|
1042
|
+
|
|
1043
|
+
profile_fields = ("calendar_id", "api_key", "display_name", "timezone")
|
|
1044
|
+
|
|
1045
|
+
calendar_id = SigilShortAutoField(max_length=255)
|
|
1046
|
+
api_key = SigilShortAutoField(max_length=255)
|
|
1047
|
+
display_name = models.CharField(max_length=255, blank=True)
|
|
1048
|
+
max_events = models.PositiveIntegerField(
|
|
1049
|
+
default=5,
|
|
1050
|
+
validators=[MinValueValidator(1), MaxValueValidator(20)],
|
|
1051
|
+
help_text=_("Number of upcoming events to display (1-20)."),
|
|
1052
|
+
)
|
|
1053
|
+
timezone = SigilShortAutoField(max_length=100, blank=True)
|
|
1054
|
+
|
|
1055
|
+
GOOGLE_EVENTS_URL = (
|
|
1056
|
+
"https://www.googleapis.com/calendar/v3/calendars/{calendar}/events"
|
|
1057
|
+
)
|
|
1058
|
+
GOOGLE_EMBED_URL = "https://calendar.google.com/calendar/embed?src={calendar}&ctz={tz}"
|
|
1059
|
+
|
|
1060
|
+
class Meta:
|
|
1061
|
+
verbose_name = _("Google Calendar")
|
|
1062
|
+
verbose_name_plural = _("Google Calendars")
|
|
1063
|
+
constraints = [
|
|
1064
|
+
models.CheckConstraint(
|
|
1065
|
+
check=(
|
|
1066
|
+
(Q(user__isnull=False) & Q(group__isnull=True))
|
|
1067
|
+
| (Q(user__isnull=True) & Q(group__isnull=False))
|
|
1068
|
+
),
|
|
1069
|
+
name="googlecalendarprofile_requires_owner",
|
|
1070
|
+
)
|
|
1071
|
+
]
|
|
1072
|
+
|
|
1073
|
+
def __str__(self): # pragma: no cover - simple representation
|
|
1074
|
+
label = self.get_display_name()
|
|
1075
|
+
return label or self.resolved_calendar_id()
|
|
1076
|
+
|
|
1077
|
+
def resolved_calendar_id(self) -> str:
|
|
1078
|
+
value = self.resolve_sigils("calendar_id")
|
|
1079
|
+
return value or self.calendar_id or ""
|
|
1080
|
+
|
|
1081
|
+
def resolved_api_key(self) -> str:
|
|
1082
|
+
value = self.resolve_sigils("api_key")
|
|
1083
|
+
return value or self.api_key or ""
|
|
1084
|
+
|
|
1085
|
+
def resolved_timezone(self) -> str:
|
|
1086
|
+
value = self.resolve_sigils("timezone")
|
|
1087
|
+
return value or self.timezone or ""
|
|
1088
|
+
|
|
1089
|
+
def get_timezone(self) -> ZoneInfo:
|
|
1090
|
+
tz_name = self.resolved_timezone() or settings.TIME_ZONE
|
|
1091
|
+
try:
|
|
1092
|
+
return ZoneInfo(tz_name)
|
|
1093
|
+
except Exception:
|
|
1094
|
+
return ZoneInfo("UTC")
|
|
1095
|
+
|
|
1096
|
+
def get_display_name(self) -> str:
|
|
1097
|
+
value = self.resolve_sigils("display_name")
|
|
1098
|
+
if value:
|
|
1099
|
+
return value
|
|
1100
|
+
if self.display_name:
|
|
1101
|
+
return self.display_name
|
|
1102
|
+
return ""
|
|
1103
|
+
|
|
1104
|
+
def build_events_url(self) -> str:
|
|
1105
|
+
calendar = self.resolved_calendar_id().strip()
|
|
1106
|
+
if not calendar:
|
|
1107
|
+
return ""
|
|
1108
|
+
encoded = quote(calendar, safe="@")
|
|
1109
|
+
return self.GOOGLE_EVENTS_URL.format(calendar=encoded)
|
|
1110
|
+
|
|
1111
|
+
def build_calendar_url(self) -> str:
|
|
1112
|
+
calendar = self.resolved_calendar_id().strip()
|
|
1113
|
+
if not calendar:
|
|
1114
|
+
return ""
|
|
1115
|
+
tz = self.get_timezone().key
|
|
1116
|
+
encoded_calendar = quote_plus(calendar)
|
|
1117
|
+
encoded_tz = quote_plus(tz)
|
|
1118
|
+
return self.GOOGLE_EMBED_URL.format(calendar=encoded_calendar, tz=encoded_tz)
|
|
1119
|
+
|
|
1120
|
+
def _parse_event_point(self, data: dict) -> tuple[datetime_datetime | None, bool]:
|
|
1121
|
+
if not isinstance(data, dict):
|
|
1122
|
+
return None, False
|
|
1123
|
+
|
|
1124
|
+
tz_name = data.get("timeZone")
|
|
1125
|
+
default_tz = self.get_timezone()
|
|
1126
|
+
tzinfo = default_tz
|
|
1127
|
+
if tz_name:
|
|
1128
|
+
try:
|
|
1129
|
+
tzinfo = ZoneInfo(tz_name)
|
|
1130
|
+
except Exception:
|
|
1131
|
+
tzinfo = default_tz
|
|
1132
|
+
|
|
1133
|
+
timestamp = data.get("dateTime")
|
|
1134
|
+
if timestamp:
|
|
1135
|
+
dt = parse_datetime(timestamp)
|
|
1136
|
+
if dt is None:
|
|
1137
|
+
try:
|
|
1138
|
+
dt = datetime_datetime.fromisoformat(
|
|
1139
|
+
timestamp.replace("Z", "+00:00")
|
|
1140
|
+
)
|
|
1141
|
+
except ValueError:
|
|
1142
|
+
dt = None
|
|
1143
|
+
if dt is not None and dt.tzinfo is None:
|
|
1144
|
+
dt = dt.replace(tzinfo=tzinfo)
|
|
1145
|
+
return dt, False
|
|
1146
|
+
|
|
1147
|
+
date_value = data.get("date")
|
|
1148
|
+
if date_value:
|
|
1149
|
+
try:
|
|
1150
|
+
day = datetime_date.fromisoformat(date_value)
|
|
1151
|
+
except ValueError:
|
|
1152
|
+
return None, True
|
|
1153
|
+
dt = datetime_datetime.combine(day, datetime_time.min, tzinfo=tzinfo)
|
|
1154
|
+
return dt, True
|
|
1155
|
+
|
|
1156
|
+
return None, False
|
|
1157
|
+
|
|
1158
|
+
def fetch_events(self, *, max_results: int | None = None) -> list[dict[str, object]]:
|
|
1159
|
+
calendar_id = self.resolved_calendar_id().strip()
|
|
1160
|
+
api_key = self.resolved_api_key().strip()
|
|
1161
|
+
if not calendar_id or not api_key:
|
|
1162
|
+
return []
|
|
1163
|
+
|
|
1164
|
+
url = self.build_events_url()
|
|
1165
|
+
if not url:
|
|
1166
|
+
return []
|
|
1167
|
+
|
|
1168
|
+
now = timezone.now().astimezone(datetime_timezone.utc).replace(microsecond=0)
|
|
1169
|
+
params = {
|
|
1170
|
+
"key": api_key,
|
|
1171
|
+
"singleEvents": "true",
|
|
1172
|
+
"orderBy": "startTime",
|
|
1173
|
+
"timeMin": now.isoformat().replace("+00:00", "Z"),
|
|
1174
|
+
"maxResults": max_results or self.max_events or 5,
|
|
1175
|
+
}
|
|
1176
|
+
|
|
1177
|
+
try:
|
|
1178
|
+
response = requests.get(url, params=params, timeout=10)
|
|
1179
|
+
response.raise_for_status()
|
|
1180
|
+
payload = response.json()
|
|
1181
|
+
except (requests.RequestException, ValueError):
|
|
1182
|
+
logger.warning(
|
|
1183
|
+
"Failed to fetch Google Calendar events for profile %s", self.pk,
|
|
1184
|
+
exc_info=True,
|
|
1185
|
+
)
|
|
1186
|
+
return []
|
|
1187
|
+
|
|
1188
|
+
items = payload.get("items")
|
|
1189
|
+
if not isinstance(items, list):
|
|
1190
|
+
return []
|
|
1191
|
+
|
|
1192
|
+
events: list[dict[str, object]] = []
|
|
1193
|
+
for item in items:
|
|
1194
|
+
if not isinstance(item, dict):
|
|
1195
|
+
continue
|
|
1196
|
+
start, all_day = self._parse_event_point(item.get("start") or {})
|
|
1197
|
+
end, _ = self._parse_event_point(item.get("end") or {})
|
|
1198
|
+
summary = item.get("summary") or ""
|
|
1199
|
+
link = item.get("htmlLink") or ""
|
|
1200
|
+
location = item.get("location") or ""
|
|
1201
|
+
if start is None:
|
|
1202
|
+
continue
|
|
1203
|
+
events.append(
|
|
1204
|
+
{
|
|
1205
|
+
"summary": summary,
|
|
1206
|
+
"start": start,
|
|
1207
|
+
"end": end,
|
|
1208
|
+
"all_day": all_day,
|
|
1209
|
+
"html_link": link,
|
|
1210
|
+
"location": location,
|
|
1211
|
+
}
|
|
1212
|
+
)
|
|
1213
|
+
|
|
1214
|
+
events.sort(key=lambda event: event.get("start") or timezone.now())
|
|
1215
|
+
return events
|
|
1216
|
+
|
|
1217
|
+
|
|
850
1218
|
class EmailInbox(Profile):
|
|
851
1219
|
"""Credentials and configuration for connecting to an email mailbox."""
|
|
852
1220
|
|
|
@@ -1760,10 +2128,16 @@ class Reference(Entity):
|
|
|
1760
2128
|
return (self.alt_text,)
|
|
1761
2129
|
|
|
1762
2130
|
|
|
2131
|
+
class Meta:
|
|
2132
|
+
verbose_name = _("Reference")
|
|
2133
|
+
verbose_name_plural = _("References")
|
|
2134
|
+
|
|
2135
|
+
|
|
1763
2136
|
class RFID(Entity):
|
|
1764
2137
|
"""RFID tag that may be assigned to one account."""
|
|
1765
2138
|
|
|
1766
2139
|
label_id = models.AutoField(primary_key=True, db_column="label_id")
|
|
2140
|
+
MATCH_PREFIX_LENGTH = 8
|
|
1767
2141
|
rfid = models.CharField(
|
|
1768
2142
|
max_length=255,
|
|
1769
2143
|
unique=True,
|
|
@@ -1775,6 +2149,14 @@ class RFID(Entity):
|
|
|
1775
2149
|
)
|
|
1776
2150
|
],
|
|
1777
2151
|
)
|
|
2152
|
+
reversed_uid = models.CharField(
|
|
2153
|
+
max_length=255,
|
|
2154
|
+
default="",
|
|
2155
|
+
blank=True,
|
|
2156
|
+
editable=False,
|
|
2157
|
+
verbose_name="Reversed UID",
|
|
2158
|
+
help_text="UID value stored with opposite endianness for reference.",
|
|
2159
|
+
)
|
|
1778
2160
|
custom_label = models.CharField(
|
|
1779
2161
|
max_length=32,
|
|
1780
2162
|
blank=True,
|
|
@@ -1851,6 +2233,17 @@ class RFID(Entity):
|
|
|
1851
2233
|
choices=KIND_CHOICES,
|
|
1852
2234
|
default=CLASSIC,
|
|
1853
2235
|
)
|
|
2236
|
+
BIG_ENDIAN = "BIG"
|
|
2237
|
+
LITTLE_ENDIAN = "LITTLE"
|
|
2238
|
+
ENDIANNESS_CHOICES = [
|
|
2239
|
+
(BIG_ENDIAN, _("Big endian")),
|
|
2240
|
+
(LITTLE_ENDIAN, _("Little endian")),
|
|
2241
|
+
]
|
|
2242
|
+
endianness = models.CharField(
|
|
2243
|
+
max_length=6,
|
|
2244
|
+
choices=ENDIANNESS_CHOICES,
|
|
2245
|
+
default=BIG_ENDIAN,
|
|
2246
|
+
)
|
|
1854
2247
|
reference = models.ForeignKey(
|
|
1855
2248
|
"Reference",
|
|
1856
2249
|
null=True,
|
|
@@ -1895,13 +2288,24 @@ class RFID(Entity):
|
|
|
1895
2288
|
if self.key_b and old["key_b"] != self.key_b.upper():
|
|
1896
2289
|
self.key_b_verified = False
|
|
1897
2290
|
if self.rfid:
|
|
1898
|
-
|
|
2291
|
+
normalized_rfid = self.rfid.upper()
|
|
2292
|
+
self.rfid = normalized_rfid
|
|
2293
|
+
reversed_uid = self.reverse_uid(normalized_rfid)
|
|
2294
|
+
if reversed_uid != self.reversed_uid:
|
|
2295
|
+
self.reversed_uid = reversed_uid
|
|
2296
|
+
if update_fields:
|
|
2297
|
+
fields = set(update_fields)
|
|
2298
|
+
if "reversed_uid" not in fields:
|
|
2299
|
+
fields.add("reversed_uid")
|
|
2300
|
+
kwargs["update_fields"] = tuple(fields)
|
|
1899
2301
|
if self.key_a:
|
|
1900
2302
|
self.key_a = self.key_a.upper()
|
|
1901
2303
|
if self.key_b:
|
|
1902
2304
|
self.key_b = self.key_b.upper()
|
|
1903
2305
|
if self.kind:
|
|
1904
2306
|
self.kind = self.kind.upper()
|
|
2307
|
+
if self.endianness:
|
|
2308
|
+
self.endianness = self.normalize_endianness(self.endianness)
|
|
1905
2309
|
super().save(*args, **kwargs)
|
|
1906
2310
|
if not self.allowed:
|
|
1907
2311
|
self.energy_accounts.clear()
|
|
@@ -1909,6 +2313,132 @@ class RFID(Entity):
|
|
|
1909
2313
|
def __str__(self): # pragma: no cover - simple representation
|
|
1910
2314
|
return str(self.label_id)
|
|
1911
2315
|
|
|
2316
|
+
@classmethod
|
|
2317
|
+
def normalize_code(cls, value: str) -> str:
|
|
2318
|
+
"""Return ``value`` normalized for comparisons."""
|
|
2319
|
+
|
|
2320
|
+
return "".join((value or "").split()).upper()
|
|
2321
|
+
|
|
2322
|
+
def adopt_rfid(self, candidate: str) -> bool:
|
|
2323
|
+
"""Adopt ``candidate`` as the stored RFID if it is a better match."""
|
|
2324
|
+
|
|
2325
|
+
normalized = type(self).normalize_code(candidate)
|
|
2326
|
+
if not normalized:
|
|
2327
|
+
return False
|
|
2328
|
+
current = type(self).normalize_code(self.rfid)
|
|
2329
|
+
if current == normalized:
|
|
2330
|
+
return False
|
|
2331
|
+
if not current:
|
|
2332
|
+
self.rfid = normalized
|
|
2333
|
+
return True
|
|
2334
|
+
reversed_current = type(self).reverse_uid(current)
|
|
2335
|
+
if reversed_current and reversed_current == normalized:
|
|
2336
|
+
self.rfid = normalized
|
|
2337
|
+
return True
|
|
2338
|
+
if len(normalized) < len(current):
|
|
2339
|
+
self.rfid = normalized
|
|
2340
|
+
return True
|
|
2341
|
+
if len(normalized) == len(current) and normalized < current:
|
|
2342
|
+
self.rfid = normalized
|
|
2343
|
+
return True
|
|
2344
|
+
return False
|
|
2345
|
+
|
|
2346
|
+
@classmethod
|
|
2347
|
+
def matching_queryset(cls, value: str) -> models.QuerySet["RFID"]:
|
|
2348
|
+
"""Return RFID records matching ``value`` using prefix comparison."""
|
|
2349
|
+
|
|
2350
|
+
normalized = cls.normalize_code(value)
|
|
2351
|
+
if not normalized:
|
|
2352
|
+
return cls.objects.none()
|
|
2353
|
+
|
|
2354
|
+
conditions: list[Q] = []
|
|
2355
|
+
candidate = normalized
|
|
2356
|
+
if candidate:
|
|
2357
|
+
conditions.append(Q(rfid=candidate))
|
|
2358
|
+
alternate = cls.reverse_uid(candidate)
|
|
2359
|
+
if alternate and alternate != candidate:
|
|
2360
|
+
conditions.append(Q(rfid=alternate))
|
|
2361
|
+
|
|
2362
|
+
prefix_length = min(len(candidate), cls.MATCH_PREFIX_LENGTH)
|
|
2363
|
+
if prefix_length:
|
|
2364
|
+
prefix = candidate[:prefix_length]
|
|
2365
|
+
conditions.append(Q(rfid__startswith=prefix))
|
|
2366
|
+
if alternate and alternate != candidate:
|
|
2367
|
+
alt_prefix = alternate[:prefix_length]
|
|
2368
|
+
if alt_prefix:
|
|
2369
|
+
conditions.append(Q(rfid__startswith=alt_prefix))
|
|
2370
|
+
|
|
2371
|
+
query: Q | None = None
|
|
2372
|
+
for condition in conditions:
|
|
2373
|
+
query = condition if query is None else query | condition
|
|
2374
|
+
|
|
2375
|
+
if query is None:
|
|
2376
|
+
return cls.objects.none()
|
|
2377
|
+
|
|
2378
|
+
queryset = cls.objects.filter(query).distinct()
|
|
2379
|
+
return queryset.annotate(rfid_length=Length("rfid")).order_by(
|
|
2380
|
+
"rfid_length", "rfid", "pk"
|
|
2381
|
+
)
|
|
2382
|
+
|
|
2383
|
+
@classmethod
|
|
2384
|
+
def find_match(cls, value: str) -> "RFID | None":
|
|
2385
|
+
"""Return the best matching RFID for ``value`` if it exists."""
|
|
2386
|
+
|
|
2387
|
+
return cls.matching_queryset(value).first()
|
|
2388
|
+
|
|
2389
|
+
@classmethod
|
|
2390
|
+
def update_or_create_from_code(
|
|
2391
|
+
cls, value: str, defaults: dict[str, Any] | None = None
|
|
2392
|
+
) -> tuple["RFID", bool]:
|
|
2393
|
+
"""Update or create an RFID using relaxed matching rules."""
|
|
2394
|
+
|
|
2395
|
+
normalized = cls.normalize_code(value)
|
|
2396
|
+
if not normalized:
|
|
2397
|
+
raise ValueError("RFID value is required")
|
|
2398
|
+
|
|
2399
|
+
defaults_map = defaults.copy() if defaults else {}
|
|
2400
|
+
existing = cls.find_match(normalized)
|
|
2401
|
+
if existing:
|
|
2402
|
+
update_fields: set[str] = set()
|
|
2403
|
+
if existing.adopt_rfid(normalized):
|
|
2404
|
+
update_fields.add("rfid")
|
|
2405
|
+
for field_name, new_value in defaults_map.items():
|
|
2406
|
+
if getattr(existing, field_name) != new_value:
|
|
2407
|
+
setattr(existing, field_name, new_value)
|
|
2408
|
+
update_fields.add(field_name)
|
|
2409
|
+
if update_fields:
|
|
2410
|
+
existing.save(update_fields=sorted(update_fields))
|
|
2411
|
+
return existing, False
|
|
2412
|
+
|
|
2413
|
+
create_kwargs = defaults_map
|
|
2414
|
+
create_kwargs["rfid"] = normalized
|
|
2415
|
+
tag = cls.objects.create(**create_kwargs)
|
|
2416
|
+
return tag, True
|
|
2417
|
+
|
|
2418
|
+
@classmethod
|
|
2419
|
+
def normalize_endianness(cls, value: object) -> str:
|
|
2420
|
+
"""Return a valid endianness value, defaulting to BIG."""
|
|
2421
|
+
|
|
2422
|
+
if isinstance(value, str):
|
|
2423
|
+
candidate = value.strip().upper()
|
|
2424
|
+
valid = {choice[0] for choice in cls.ENDIANNESS_CHOICES}
|
|
2425
|
+
if candidate in valid:
|
|
2426
|
+
return candidate
|
|
2427
|
+
return cls.BIG_ENDIAN
|
|
2428
|
+
|
|
2429
|
+
@staticmethod
|
|
2430
|
+
def reverse_uid(value: str) -> str:
|
|
2431
|
+
"""Return ``value`` with reversed byte order for reference storage."""
|
|
2432
|
+
|
|
2433
|
+
normalized = "".join((value or "").split()).upper()
|
|
2434
|
+
if not normalized:
|
|
2435
|
+
return ""
|
|
2436
|
+
if len(normalized) % 2 != 0:
|
|
2437
|
+
return normalized[::-1]
|
|
2438
|
+
bytes_list = [normalized[index : index + 2] for index in range(0, len(normalized), 2)]
|
|
2439
|
+
bytes_list.reverse()
|
|
2440
|
+
return "".join(bytes_list)
|
|
2441
|
+
|
|
1912
2442
|
@classmethod
|
|
1913
2443
|
def next_scan_label(
|
|
1914
2444
|
cls, *, step: int | None = None, start: int | None = None
|
|
@@ -1971,13 +2501,26 @@ class RFID(Entity):
|
|
|
1971
2501
|
|
|
1972
2502
|
@classmethod
|
|
1973
2503
|
def register_scan(
|
|
1974
|
-
cls,
|
|
2504
|
+
cls,
|
|
2505
|
+
rfid: str,
|
|
2506
|
+
*,
|
|
2507
|
+
kind: str | None = None,
|
|
2508
|
+
endianness: str | None = None,
|
|
1975
2509
|
) -> tuple["RFID", bool]:
|
|
1976
2510
|
"""Return or create an RFID that was detected via scanning."""
|
|
1977
2511
|
|
|
1978
|
-
normalized = (rfid
|
|
1979
|
-
|
|
2512
|
+
normalized = cls.normalize_code(rfid)
|
|
2513
|
+
desired_endianness = cls.normalize_endianness(endianness)
|
|
2514
|
+
existing = cls.find_match(normalized)
|
|
1980
2515
|
if existing:
|
|
2516
|
+
update_fields: list[str] = []
|
|
2517
|
+
if existing.adopt_rfid(normalized):
|
|
2518
|
+
update_fields.append("rfid")
|
|
2519
|
+
if existing.endianness != desired_endianness:
|
|
2520
|
+
existing.endianness = desired_endianness
|
|
2521
|
+
update_fields.append("endianness")
|
|
2522
|
+
if update_fields:
|
|
2523
|
+
existing.save(update_fields=update_fields)
|
|
1981
2524
|
return existing, False
|
|
1982
2525
|
|
|
1983
2526
|
attempts = 0
|
|
@@ -1990,6 +2533,7 @@ class RFID(Entity):
|
|
|
1990
2533
|
"rfid": normalized,
|
|
1991
2534
|
"allowed": True,
|
|
1992
2535
|
"released": False,
|
|
2536
|
+
"endianness": desired_endianness,
|
|
1993
2537
|
}
|
|
1994
2538
|
if kind:
|
|
1995
2539
|
create_kwargs["kind"] = kind
|
|
@@ -1998,23 +2542,28 @@ class RFID(Entity):
|
|
|
1998
2542
|
tag = cls.objects.create(**create_kwargs)
|
|
1999
2543
|
cls._reset_label_sequence()
|
|
2000
2544
|
except IntegrityError:
|
|
2001
|
-
existing = cls.
|
|
2545
|
+
existing = cls.find_match(normalized)
|
|
2002
2546
|
if existing:
|
|
2003
2547
|
return existing, False
|
|
2004
2548
|
else:
|
|
2005
2549
|
return tag, True
|
|
2006
2550
|
raise IntegrityError("Unable to allocate label id for scanned RFID")
|
|
2007
2551
|
|
|
2008
|
-
@
|
|
2009
|
-
def get_account_by_rfid(value):
|
|
2552
|
+
@classmethod
|
|
2553
|
+
def get_account_by_rfid(cls, value):
|
|
2010
2554
|
"""Return the energy account associated with an RFID code if it exists."""
|
|
2011
2555
|
try:
|
|
2012
2556
|
EnergyAccount = apps.get_model("core", "EnergyAccount")
|
|
2013
2557
|
except LookupError: # pragma: no cover - energy accounts app optional
|
|
2014
2558
|
return None
|
|
2015
|
-
|
|
2016
|
-
|
|
2017
|
-
|
|
2559
|
+
matches = cls.matching_queryset(value).filter(allowed=True)
|
|
2560
|
+
if not matches.exists():
|
|
2561
|
+
return None
|
|
2562
|
+
return (
|
|
2563
|
+
EnergyAccount.objects.filter(rfids__in=matches)
|
|
2564
|
+
.distinct()
|
|
2565
|
+
.first()
|
|
2566
|
+
)
|
|
2018
2567
|
|
|
2019
2568
|
class Meta:
|
|
2020
2569
|
verbose_name = "RFID"
|
|
@@ -2365,8 +2914,24 @@ class ClientReportSchedule(Entity):
|
|
|
2365
2914
|
periodicity = models.CharField(
|
|
2366
2915
|
max_length=12, choices=PERIODICITY_CHOICES, default=PERIODICITY_NONE
|
|
2367
2916
|
)
|
|
2917
|
+
language = models.CharField(
|
|
2918
|
+
max_length=12,
|
|
2919
|
+
choices=settings.LANGUAGES,
|
|
2920
|
+
default=default_report_language,
|
|
2921
|
+
)
|
|
2922
|
+
title = models.CharField(
|
|
2923
|
+
max_length=200,
|
|
2924
|
+
blank=True,
|
|
2925
|
+
default="",
|
|
2926
|
+
verbose_name=_("Title"),
|
|
2927
|
+
)
|
|
2368
2928
|
email_recipients = models.JSONField(default=list, blank=True)
|
|
2369
2929
|
disable_emails = models.BooleanField(default=False)
|
|
2930
|
+
chargers = models.ManyToManyField(
|
|
2931
|
+
"ocpp.Charger",
|
|
2932
|
+
blank=True,
|
|
2933
|
+
related_name="client_report_schedules",
|
|
2934
|
+
)
|
|
2370
2935
|
periodic_task = models.OneToOneField(
|
|
2371
2936
|
"django_celery_beat.PeriodicTask",
|
|
2372
2937
|
on_delete=models.SET_NULL,
|
|
@@ -2380,11 +2945,19 @@ class ClientReportSchedule(Entity):
|
|
|
2380
2945
|
verbose_name = "Client Report Schedule"
|
|
2381
2946
|
verbose_name_plural = "Client Report Schedules"
|
|
2382
2947
|
|
|
2948
|
+
@classmethod
|
|
2949
|
+
def label_for_periodicity(cls, value: str) -> str:
|
|
2950
|
+
lookup = dict(cls.PERIODICITY_CHOICES)
|
|
2951
|
+
return lookup.get(value, value)
|
|
2952
|
+
|
|
2383
2953
|
def __str__(self) -> str: # pragma: no cover - simple representation
|
|
2384
2954
|
owner = self.owner.get_username() if self.owner else "Unassigned"
|
|
2385
2955
|
return f"Client Report Schedule ({owner})"
|
|
2386
2956
|
|
|
2387
2957
|
def save(self, *args, **kwargs):
|
|
2958
|
+
if self.language:
|
|
2959
|
+
self.language = normalize_report_language(self.language)
|
|
2960
|
+
self.title = normalize_report_title(self.title)
|
|
2388
2961
|
sync = kwargs.pop("sync_task", True)
|
|
2389
2962
|
super().save(*args, **kwargs)
|
|
2390
2963
|
if sync and self.pk:
|
|
@@ -2436,7 +3009,8 @@ class ClientReportSchedule(Entity):
|
|
|
2436
3009
|
month_of_year="*",
|
|
2437
3010
|
)
|
|
2438
3011
|
|
|
2439
|
-
|
|
3012
|
+
raw_name = f"client_report_schedule_{self.pk}"
|
|
3013
|
+
name = normalize_periodic_task_name(PeriodicTask.objects, raw_name)
|
|
2440
3014
|
defaults = {
|
|
2441
3015
|
"crontab": schedule,
|
|
2442
3016
|
"task": "core.tasks.run_client_report_schedule",
|
|
@@ -2476,6 +3050,78 @@ class ClientReportSchedule(Entity):
|
|
|
2476
3050
|
|
|
2477
3051
|
return start, end
|
|
2478
3052
|
|
|
3053
|
+
def _advance_period(
|
|
3054
|
+
self, start: datetime_date, end: datetime_date
|
|
3055
|
+
) -> tuple[datetime_date, datetime_date]:
|
|
3056
|
+
import calendar as _calendar
|
|
3057
|
+
import datetime as _datetime
|
|
3058
|
+
|
|
3059
|
+
if self.periodicity == self.PERIODICITY_DAILY:
|
|
3060
|
+
delta = _datetime.timedelta(days=1)
|
|
3061
|
+
return start + delta, end + delta
|
|
3062
|
+
if self.periodicity == self.PERIODICITY_WEEKLY:
|
|
3063
|
+
delta = _datetime.timedelta(days=7)
|
|
3064
|
+
return start + delta, end + delta
|
|
3065
|
+
if self.periodicity == self.PERIODICITY_MONTHLY:
|
|
3066
|
+
base_start = start.replace(day=1)
|
|
3067
|
+
year = base_start.year
|
|
3068
|
+
month = base_start.month
|
|
3069
|
+
if month == 12:
|
|
3070
|
+
next_year = year + 1
|
|
3071
|
+
next_month = 1
|
|
3072
|
+
else:
|
|
3073
|
+
next_year = year
|
|
3074
|
+
next_month = month + 1
|
|
3075
|
+
next_start = base_start.replace(year=next_year, month=next_month, day=1)
|
|
3076
|
+
last_day = _calendar.monthrange(next_year, next_month)[1]
|
|
3077
|
+
next_end = next_start.replace(day=last_day)
|
|
3078
|
+
return next_start, next_end
|
|
3079
|
+
raise ValueError("advance_period called for non-recurring schedule")
|
|
3080
|
+
|
|
3081
|
+
def iter_pending_periods(self, reference=None):
|
|
3082
|
+
from django.utils import timezone
|
|
3083
|
+
|
|
3084
|
+
if self.periodicity == self.PERIODICITY_NONE:
|
|
3085
|
+
return []
|
|
3086
|
+
|
|
3087
|
+
ref_date = reference or timezone.localdate()
|
|
3088
|
+
try:
|
|
3089
|
+
target_start, target_end = self.calculate_period(reference=ref_date)
|
|
3090
|
+
except ValueError:
|
|
3091
|
+
return []
|
|
3092
|
+
|
|
3093
|
+
reports = self.reports.order_by("start_date", "end_date")
|
|
3094
|
+
last_report = reports.last()
|
|
3095
|
+
if last_report:
|
|
3096
|
+
current_start, current_end = self._advance_period(
|
|
3097
|
+
last_report.start_date, last_report.end_date
|
|
3098
|
+
)
|
|
3099
|
+
else:
|
|
3100
|
+
current_start, current_end = target_start, target_end
|
|
3101
|
+
|
|
3102
|
+
if current_end < current_start:
|
|
3103
|
+
return []
|
|
3104
|
+
|
|
3105
|
+
pending: list[tuple[datetime.date, datetime.date]] = []
|
|
3106
|
+
safety = 0
|
|
3107
|
+
while current_end <= target_end:
|
|
3108
|
+
exists = reports.filter(
|
|
3109
|
+
start_date=current_start, end_date=current_end
|
|
3110
|
+
).exists()
|
|
3111
|
+
if not exists:
|
|
3112
|
+
pending.append((current_start, current_end))
|
|
3113
|
+
try:
|
|
3114
|
+
current_start, current_end = self._advance_period(
|
|
3115
|
+
current_start, current_end
|
|
3116
|
+
)
|
|
3117
|
+
except ValueError:
|
|
3118
|
+
break
|
|
3119
|
+
safety += 1
|
|
3120
|
+
if safety > 400:
|
|
3121
|
+
break
|
|
3122
|
+
|
|
3123
|
+
return pending
|
|
3124
|
+
|
|
2479
3125
|
def resolve_recipients(self):
|
|
2480
3126
|
"""Return (to, cc) email lists respecting owner fallbacks."""
|
|
2481
3127
|
|
|
@@ -2523,38 +3169,27 @@ class ClientReportSchedule(Entity):
|
|
|
2523
3169
|
|
|
2524
3170
|
return to, cc
|
|
2525
3171
|
|
|
3172
|
+
def resolve_reply_to(self) -> list[str]:
|
|
3173
|
+
return ClientReport.resolve_reply_to_for_owner(self.owner)
|
|
3174
|
+
|
|
2526
3175
|
def get_outbox(self):
|
|
2527
3176
|
"""Return the preferred :class:`nodes.models.EmailOutbox` instance."""
|
|
2528
3177
|
|
|
2529
|
-
|
|
2530
|
-
|
|
2531
|
-
if self.owner:
|
|
2532
|
-
try:
|
|
2533
|
-
outbox = self.owner.get_profile(EmailOutbox)
|
|
2534
|
-
except Exception: # pragma: no cover - defensive catch
|
|
2535
|
-
outbox = None
|
|
2536
|
-
if outbox:
|
|
2537
|
-
return outbox
|
|
2538
|
-
|
|
2539
|
-
node = Node.get_local()
|
|
2540
|
-
if node:
|
|
2541
|
-
return getattr(node, "email_outbox", None)
|
|
2542
|
-
return None
|
|
3178
|
+
return ClientReport.resolve_outbox_for_owner(self.owner)
|
|
2543
3179
|
|
|
2544
3180
|
def notify_failure(self, message: str):
|
|
2545
3181
|
from nodes.models import NetMessage
|
|
2546
3182
|
|
|
2547
3183
|
NetMessage.broadcast("Client report delivery issue", message)
|
|
2548
3184
|
|
|
2549
|
-
def run(self):
|
|
3185
|
+
def run(self, *, start: datetime_date | None = None, end: datetime_date | None = None):
|
|
2550
3186
|
"""Generate the report, persist it and deliver notifications."""
|
|
2551
3187
|
|
|
2552
|
-
|
|
2553
|
-
|
|
2554
|
-
|
|
2555
|
-
|
|
2556
|
-
|
|
2557
|
-
return None
|
|
3188
|
+
if start is None or end is None:
|
|
3189
|
+
try:
|
|
3190
|
+
start, end = self.calculate_period()
|
|
3191
|
+
except ValueError:
|
|
3192
|
+
return None
|
|
2558
3193
|
|
|
2559
3194
|
try:
|
|
2560
3195
|
report = ClientReport.generate(
|
|
@@ -2564,8 +3199,12 @@ class ClientReportSchedule(Entity):
|
|
|
2564
3199
|
schedule=self,
|
|
2565
3200
|
recipients=self.email_recipients,
|
|
2566
3201
|
disable_emails=self.disable_emails,
|
|
3202
|
+
chargers=list(self.chargers.all()),
|
|
3203
|
+
language=self.language,
|
|
3204
|
+
title=self.title,
|
|
2567
3205
|
)
|
|
2568
|
-
|
|
3206
|
+
report.chargers.set(self.chargers.all())
|
|
3207
|
+
report.store_local_copy()
|
|
2569
3208
|
except Exception as exc:
|
|
2570
3209
|
self.notify_failure(str(exc))
|
|
2571
3210
|
raise
|
|
@@ -2577,32 +3216,12 @@ class ClientReportSchedule(Entity):
|
|
|
2577
3216
|
raise RuntimeError("No recipients available for client report")
|
|
2578
3217
|
else:
|
|
2579
3218
|
try:
|
|
2580
|
-
|
|
2581
|
-
|
|
2582
|
-
attachments.append((html_name, html_content, "text/html"))
|
|
2583
|
-
json_file = Path(settings.BASE_DIR) / export["json_path"]
|
|
2584
|
-
if json_file.exists():
|
|
2585
|
-
attachments.append(
|
|
2586
|
-
(
|
|
2587
|
-
json_file.name,
|
|
2588
|
-
json_file.read_text(encoding="utf-8"),
|
|
2589
|
-
"application/json",
|
|
2590
|
-
)
|
|
2591
|
-
)
|
|
2592
|
-
subject = f"Client report {report.start_date} to {report.end_date}"
|
|
2593
|
-
body = (
|
|
2594
|
-
"Attached is the client report generated for the period "
|
|
2595
|
-
f"{report.start_date} to {report.end_date}."
|
|
2596
|
-
)
|
|
2597
|
-
mailer.send(
|
|
2598
|
-
subject,
|
|
2599
|
-
body,
|
|
2600
|
-
to,
|
|
2601
|
-
outbox=self.get_outbox(),
|
|
3219
|
+
delivered = report.send_delivery(
|
|
3220
|
+
to=to,
|
|
2602
3221
|
cc=cc,
|
|
2603
|
-
|
|
3222
|
+
outbox=self.get_outbox(),
|
|
3223
|
+
reply_to=self.resolve_reply_to(),
|
|
2604
3224
|
)
|
|
2605
|
-
delivered = list(dict.fromkeys(to + (cc or [])))
|
|
2606
3225
|
if delivered:
|
|
2607
3226
|
type(report).objects.filter(pk=report.pk).update(
|
|
2608
3227
|
recipients=delivered
|
|
@@ -2617,6 +3236,14 @@ class ClientReportSchedule(Entity):
|
|
|
2617
3236
|
self.last_generated_on = now
|
|
2618
3237
|
return report
|
|
2619
3238
|
|
|
3239
|
+
def generate_missing_reports(self, reference=None):
|
|
3240
|
+
generated: list["ClientReport"] = []
|
|
3241
|
+
for start, end in self.iter_pending_periods(reference=reference):
|
|
3242
|
+
report = self.run(start=start, end=end)
|
|
3243
|
+
if report:
|
|
3244
|
+
generated.append(report)
|
|
3245
|
+
return generated
|
|
3246
|
+
|
|
2620
3247
|
|
|
2621
3248
|
class ClientReport(Entity):
|
|
2622
3249
|
"""Snapshot of energy usage over a period."""
|
|
@@ -2639,15 +3266,70 @@ class ClientReport(Entity):
|
|
|
2639
3266
|
blank=True,
|
|
2640
3267
|
related_name="reports",
|
|
2641
3268
|
)
|
|
3269
|
+
language = models.CharField(
|
|
3270
|
+
max_length=12,
|
|
3271
|
+
choices=settings.LANGUAGES,
|
|
3272
|
+
default=default_report_language,
|
|
3273
|
+
)
|
|
3274
|
+
title = models.CharField(
|
|
3275
|
+
max_length=200,
|
|
3276
|
+
blank=True,
|
|
3277
|
+
default="",
|
|
3278
|
+
verbose_name=_("Title"),
|
|
3279
|
+
)
|
|
2642
3280
|
recipients = models.JSONField(default=list, blank=True)
|
|
2643
3281
|
disable_emails = models.BooleanField(default=False)
|
|
3282
|
+
chargers = models.ManyToManyField(
|
|
3283
|
+
"ocpp.Charger",
|
|
3284
|
+
blank=True,
|
|
3285
|
+
related_name="client_reports",
|
|
3286
|
+
)
|
|
2644
3287
|
|
|
2645
3288
|
class Meta:
|
|
2646
|
-
verbose_name = "Consumer Report"
|
|
2647
|
-
verbose_name_plural = "Consumer Reports"
|
|
3289
|
+
verbose_name = _("Consumer Report")
|
|
3290
|
+
verbose_name_plural = _("Consumer Reports")
|
|
2648
3291
|
db_table = "core_client_report"
|
|
2649
3292
|
ordering = ["-created_on"]
|
|
2650
3293
|
|
|
3294
|
+
def __str__(self) -> str: # pragma: no cover - simple representation
|
|
3295
|
+
period_type = (
|
|
3296
|
+
self.schedule.periodicity
|
|
3297
|
+
if self.schedule
|
|
3298
|
+
else ClientReportSchedule.PERIODICITY_NONE
|
|
3299
|
+
)
|
|
3300
|
+
return f"{self.start_date} - {self.end_date} ({period_type})"
|
|
3301
|
+
|
|
3302
|
+
@staticmethod
|
|
3303
|
+
def default_language() -> str:
|
|
3304
|
+
return default_report_language()
|
|
3305
|
+
|
|
3306
|
+
@staticmethod
|
|
3307
|
+
def normalize_language(language: str | None) -> str:
|
|
3308
|
+
return normalize_report_language(language)
|
|
3309
|
+
|
|
3310
|
+
@staticmethod
|
|
3311
|
+
def normalize_title(title: str | None) -> str:
|
|
3312
|
+
return normalize_report_title(title)
|
|
3313
|
+
|
|
3314
|
+
def save(self, *args, **kwargs):
|
|
3315
|
+
if self.language:
|
|
3316
|
+
self.language = normalize_report_language(self.language)
|
|
3317
|
+
self.title = self.normalize_title(self.title)
|
|
3318
|
+
super().save(*args, **kwargs)
|
|
3319
|
+
|
|
3320
|
+
@property
|
|
3321
|
+
def periodicity_label(self) -> str:
|
|
3322
|
+
if self.schedule:
|
|
3323
|
+
return self.schedule.get_periodicity_display()
|
|
3324
|
+
return ClientReportSchedule.label_for_periodicity(
|
|
3325
|
+
ClientReportSchedule.PERIODICITY_NONE
|
|
3326
|
+
)
|
|
3327
|
+
|
|
3328
|
+
@property
|
|
3329
|
+
def total_kw_period(self) -> float:
|
|
3330
|
+
totals = (self.rows_for_display or {}).get("totals", {})
|
|
3331
|
+
return float(totals.get("total_kw_period", 0.0) or 0.0)
|
|
3332
|
+
|
|
2651
3333
|
@classmethod
|
|
2652
3334
|
def generate(
|
|
2653
3335
|
cls,
|
|
@@ -2658,17 +3340,36 @@ class ClientReport(Entity):
|
|
|
2658
3340
|
schedule=None,
|
|
2659
3341
|
recipients: list[str] | None = None,
|
|
2660
3342
|
disable_emails: bool = False,
|
|
3343
|
+
chargers=None,
|
|
3344
|
+
language: str | None = None,
|
|
3345
|
+
title: str | None = None,
|
|
2661
3346
|
):
|
|
2662
|
-
|
|
2663
|
-
|
|
3347
|
+
from collections.abc import Iterable as _Iterable
|
|
3348
|
+
|
|
3349
|
+
charger_list = []
|
|
3350
|
+
if chargers:
|
|
3351
|
+
if isinstance(chargers, _Iterable):
|
|
3352
|
+
charger_list = list(chargers)
|
|
3353
|
+
else:
|
|
3354
|
+
charger_list = [chargers]
|
|
3355
|
+
|
|
3356
|
+
payload = cls.build_rows(start_date, end_date, chargers=charger_list)
|
|
3357
|
+
normalized_language = cls.normalize_language(language)
|
|
3358
|
+
title_value = cls.normalize_title(title)
|
|
3359
|
+
report = cls.objects.create(
|
|
2664
3360
|
start_date=start_date,
|
|
2665
3361
|
end_date=end_date,
|
|
2666
|
-
data=
|
|
3362
|
+
data=payload,
|
|
2667
3363
|
owner=owner,
|
|
2668
3364
|
schedule=schedule,
|
|
2669
3365
|
recipients=list(recipients or []),
|
|
2670
3366
|
disable_emails=disable_emails,
|
|
3367
|
+
language=normalized_language,
|
|
3368
|
+
title=title_value,
|
|
2671
3369
|
)
|
|
3370
|
+
if charger_list:
|
|
3371
|
+
report.chargers.set(charger_list)
|
|
3372
|
+
return report
|
|
2672
3373
|
|
|
2673
3374
|
def store_local_copy(self, html: str | None = None):
|
|
2674
3375
|
"""Persist the report data and optional HTML rendering to disk."""
|
|
@@ -2682,9 +3383,16 @@ class ClientReport(Entity):
|
|
|
2682
3383
|
timestamp = timezone.now().strftime("%Y%m%d%H%M%S")
|
|
2683
3384
|
identifier = f"client_report_{self.pk}_{timestamp}"
|
|
2684
3385
|
|
|
2685
|
-
|
|
2686
|
-
|
|
2687
|
-
|
|
3386
|
+
language_code = self.normalize_language(self.language)
|
|
3387
|
+
context = {
|
|
3388
|
+
"report": self,
|
|
3389
|
+
"language_code": language_code,
|
|
3390
|
+
"default_language": type(self).default_language(),
|
|
3391
|
+
}
|
|
3392
|
+
with override(language_code):
|
|
3393
|
+
html_content = html or render_to_string(
|
|
3394
|
+
"core/reports/client_report_email.html", context
|
|
3395
|
+
)
|
|
2688
3396
|
html_path = report_dir / f"{identifier}.html"
|
|
2689
3397
|
html_path.write_text(html_content, encoding="utf-8")
|
|
2690
3398
|
|
|
@@ -2693,15 +3401,13 @@ class ClientReport(Entity):
|
|
|
2693
3401
|
_json.dumps(self.data, indent=2, default=str), encoding="utf-8"
|
|
2694
3402
|
)
|
|
2695
3403
|
|
|
2696
|
-
|
|
2697
|
-
|
|
2698
|
-
return str(path.relative_to(base_dir))
|
|
2699
|
-
except ValueError:
|
|
2700
|
-
return str(path)
|
|
3404
|
+
pdf_path = report_dir / f"{identifier}.pdf"
|
|
3405
|
+
self.render_pdf(pdf_path)
|
|
2701
3406
|
|
|
2702
3407
|
export = {
|
|
2703
|
-
"html_path":
|
|
2704
|
-
"json_path":
|
|
3408
|
+
"html_path": ClientReport._relative_to_base(html_path, base_dir),
|
|
3409
|
+
"json_path": ClientReport._relative_to_base(json_path, base_dir),
|
|
3410
|
+
"pdf_path": ClientReport._relative_to_base(pdf_path, base_dir),
|
|
2705
3411
|
}
|
|
2706
3412
|
|
|
2707
3413
|
updated = dict(self.data)
|
|
@@ -2710,27 +3416,122 @@ class ClientReport(Entity):
|
|
|
2710
3416
|
self.data = updated
|
|
2711
3417
|
return export, html_content
|
|
2712
3418
|
|
|
3419
|
+
def send_delivery(
|
|
3420
|
+
self,
|
|
3421
|
+
*,
|
|
3422
|
+
to: list[str] | tuple[str, ...],
|
|
3423
|
+
cc: list[str] | tuple[str, ...] | None = None,
|
|
3424
|
+
outbox=None,
|
|
3425
|
+
reply_to: list[str] | None = None,
|
|
3426
|
+
) -> list[str]:
|
|
3427
|
+
from core import mailer
|
|
3428
|
+
|
|
3429
|
+
recipients = list(to or [])
|
|
3430
|
+
if not recipients:
|
|
3431
|
+
return []
|
|
3432
|
+
|
|
3433
|
+
pdf_path = self.ensure_pdf()
|
|
3434
|
+
attachments = [
|
|
3435
|
+
(pdf_path.name, pdf_path.read_bytes(), "application/pdf"),
|
|
3436
|
+
]
|
|
3437
|
+
|
|
3438
|
+
language_code = self.normalize_language(self.language)
|
|
3439
|
+
with override(language_code):
|
|
3440
|
+
totals = self.rows_for_display.get("totals", {})
|
|
3441
|
+
start_display = formats.date_format(
|
|
3442
|
+
self.start_date, format="DATE_FORMAT", use_l10n=True
|
|
3443
|
+
)
|
|
3444
|
+
end_display = formats.date_format(
|
|
3445
|
+
self.end_date, format="DATE_FORMAT", use_l10n=True
|
|
3446
|
+
)
|
|
3447
|
+
total_kw_period_label = gettext("Total kW during period")
|
|
3448
|
+
total_kw_all_label = gettext("Total kW (all time)")
|
|
3449
|
+
report_title = self.normalize_title(self.title) or gettext(
|
|
3450
|
+
"Consumer Report"
|
|
3451
|
+
)
|
|
3452
|
+
body_lines = [
|
|
3453
|
+
gettext("%(title)s for %(start)s through %(end)s.")
|
|
3454
|
+
% {"title": report_title, "start": start_display, "end": end_display},
|
|
3455
|
+
f"{total_kw_period_label}: "
|
|
3456
|
+
f"{formats.number_format(totals.get('total_kw_period', 0.0), decimal_pos=2, use_l10n=True)}.",
|
|
3457
|
+
f"{total_kw_all_label}: "
|
|
3458
|
+
f"{formats.number_format(totals.get('total_kw', 0.0), decimal_pos=2, use_l10n=True)}.",
|
|
3459
|
+
]
|
|
3460
|
+
message = "\n".join(body_lines)
|
|
3461
|
+
subject = gettext("%(title)s %(start)s - %(end)s") % {
|
|
3462
|
+
"title": report_title,
|
|
3463
|
+
"start": start_display,
|
|
3464
|
+
"end": end_display,
|
|
3465
|
+
}
|
|
3466
|
+
|
|
3467
|
+
kwargs = {}
|
|
3468
|
+
if reply_to:
|
|
3469
|
+
kwargs["reply_to"] = reply_to
|
|
3470
|
+
|
|
3471
|
+
mailer.send(
|
|
3472
|
+
subject,
|
|
3473
|
+
message,
|
|
3474
|
+
recipients,
|
|
3475
|
+
outbox=outbox,
|
|
3476
|
+
cc=list(cc or []),
|
|
3477
|
+
attachments=attachments,
|
|
3478
|
+
**kwargs,
|
|
3479
|
+
)
|
|
3480
|
+
|
|
3481
|
+
delivered = list(dict.fromkeys(recipients + list(cc or [])))
|
|
3482
|
+
return delivered
|
|
3483
|
+
|
|
3484
|
+
@staticmethod
|
|
3485
|
+
def build_rows(
|
|
3486
|
+
start_date=None,
|
|
3487
|
+
end_date=None,
|
|
3488
|
+
*,
|
|
3489
|
+
for_display: bool = False,
|
|
3490
|
+
chargers=None,
|
|
3491
|
+
):
|
|
3492
|
+
dataset = ClientReport._build_dataset(start_date, end_date, chargers=chargers)
|
|
3493
|
+
if for_display:
|
|
3494
|
+
return ClientReport._normalize_dataset_for_display(dataset)
|
|
3495
|
+
return dataset
|
|
3496
|
+
|
|
2713
3497
|
@staticmethod
|
|
2714
|
-
def
|
|
2715
|
-
from
|
|
3498
|
+
def _build_dataset(start_date=None, end_date=None, *, chargers=None):
|
|
3499
|
+
from datetime import datetime, time, timedelta, timezone as pytimezone
|
|
3500
|
+
from ocpp.models import (
|
|
3501
|
+
Charger,
|
|
3502
|
+
Transaction,
|
|
3503
|
+
annotate_transaction_energy_bounds,
|
|
3504
|
+
)
|
|
2716
3505
|
|
|
2717
|
-
qs = Transaction.objects.
|
|
2718
|
-
if start_date:
|
|
2719
|
-
from datetime import datetime, time, timedelta, timezone as pytimezone
|
|
3506
|
+
qs = Transaction.objects.all()
|
|
2720
3507
|
|
|
3508
|
+
start_dt = None
|
|
3509
|
+
end_dt = None
|
|
3510
|
+
if start_date:
|
|
2721
3511
|
start_dt = datetime.combine(start_date, time.min, tzinfo=pytimezone.utc)
|
|
2722
3512
|
qs = qs.filter(start_time__gte=start_dt)
|
|
2723
3513
|
if end_date:
|
|
2724
|
-
from datetime import datetime, time, timedelta, timezone as pytimezone
|
|
2725
|
-
|
|
2726
3514
|
end_dt = datetime.combine(
|
|
2727
3515
|
end_date + timedelta(days=1), time.min, tzinfo=pytimezone.utc
|
|
2728
3516
|
)
|
|
2729
3517
|
qs = qs.filter(start_time__lt=end_dt)
|
|
2730
3518
|
|
|
2731
|
-
|
|
2732
|
-
|
|
3519
|
+
selected_base_ids = None
|
|
3520
|
+
if chargers:
|
|
3521
|
+
selected_base_ids = {
|
|
3522
|
+
charger.charger_id for charger in chargers if charger.charger_id
|
|
3523
|
+
}
|
|
3524
|
+
if selected_base_ids:
|
|
3525
|
+
qs = qs.filter(charger__charger_id__in=selected_base_ids)
|
|
3526
|
+
|
|
3527
|
+
qs = qs.select_related("account", "charger")
|
|
3528
|
+
qs = annotate_transaction_energy_bounds(
|
|
3529
|
+
qs,
|
|
3530
|
+
start_field="report_meter_energy_start",
|
|
3531
|
+
end_field="report_meter_energy_end",
|
|
2733
3532
|
)
|
|
3533
|
+
transactions = list(qs.order_by("start_time", "pk"))
|
|
3534
|
+
|
|
2734
3535
|
rfid_values = {tx.rfid for tx in transactions if tx.rfid}
|
|
2735
3536
|
tag_map: dict[str, RFID] = {}
|
|
2736
3537
|
if rfid_values:
|
|
@@ -2741,51 +3542,283 @@ class ClientReport(Entity):
|
|
|
2741
3542
|
)
|
|
2742
3543
|
}
|
|
2743
3544
|
|
|
2744
|
-
|
|
3545
|
+
charger_ids = {
|
|
3546
|
+
tx.charger.charger_id
|
|
3547
|
+
for tx in transactions
|
|
3548
|
+
if getattr(tx, "charger", None) and tx.charger.charger_id
|
|
3549
|
+
}
|
|
3550
|
+
aggregator_map: dict[str, Charger] = {}
|
|
3551
|
+
if charger_ids:
|
|
3552
|
+
aggregator_map = {
|
|
3553
|
+
charger.charger_id: charger
|
|
3554
|
+
for charger in Charger.objects.filter(
|
|
3555
|
+
charger_id__in=charger_ids, connector_id__isnull=True
|
|
3556
|
+
)
|
|
3557
|
+
}
|
|
3558
|
+
|
|
3559
|
+
groups: dict[str, dict[str, Any]] = {}
|
|
2745
3560
|
for tx in transactions:
|
|
2746
|
-
|
|
2747
|
-
if
|
|
3561
|
+
charger = getattr(tx, "charger", None)
|
|
3562
|
+
if charger is None:
|
|
3563
|
+
continue
|
|
3564
|
+
base_id = charger.charger_id
|
|
3565
|
+
if selected_base_ids is not None and base_id not in selected_base_ids:
|
|
2748
3566
|
continue
|
|
3567
|
+
aggregator = aggregator_map.get(base_id) or charger
|
|
3568
|
+
entry = groups.setdefault(
|
|
3569
|
+
base_id,
|
|
3570
|
+
{"charger": aggregator, "transactions": []},
|
|
3571
|
+
)
|
|
3572
|
+
entry["transactions"].append(tx)
|
|
3573
|
+
|
|
3574
|
+
evcs_entries: list[dict[str, Any]] = []
|
|
3575
|
+
total_all_time = 0.0
|
|
3576
|
+
total_period = 0.0
|
|
3577
|
+
|
|
3578
|
+
def _sort_key(tx):
|
|
3579
|
+
anchor = getattr(tx, "start_time", None)
|
|
3580
|
+
if anchor is None:
|
|
3581
|
+
anchor = datetime.min.replace(tzinfo=pytimezone.utc)
|
|
3582
|
+
return (anchor, tx.pk or 0)
|
|
3583
|
+
|
|
3584
|
+
for base_id, info in sorted(groups.items(), key=lambda item: item[0]):
|
|
3585
|
+
aggregator = info["charger"]
|
|
3586
|
+
txs = sorted(info["transactions"], key=_sort_key)
|
|
3587
|
+
total_kw_all = float(getattr(aggregator, "total_kw", 0.0) or 0.0)
|
|
3588
|
+
total_kw_period = 0.0
|
|
3589
|
+
if hasattr(aggregator, "total_kw_for_range"):
|
|
3590
|
+
total_kw_period = float(
|
|
3591
|
+
aggregator.total_kw_for_range(start=start_dt, end=end_dt) or 0.0
|
|
3592
|
+
)
|
|
3593
|
+
total_all_time += total_kw_all
|
|
3594
|
+
total_period += total_kw_period
|
|
2749
3595
|
|
|
2750
|
-
|
|
2751
|
-
|
|
2752
|
-
|
|
2753
|
-
|
|
2754
|
-
|
|
2755
|
-
if tag:
|
|
2756
|
-
account = next(iter(tag.energy_accounts.all()), None)
|
|
2757
|
-
if account:
|
|
2758
|
-
subject = account.name
|
|
2759
|
-
else:
|
|
2760
|
-
subject = str(tag.label_id)
|
|
3596
|
+
session_rows: list[dict[str, Any]] = []
|
|
3597
|
+
for tx in txs:
|
|
3598
|
+
session_kw = float(getattr(tx, "kw", 0.0) or 0.0)
|
|
3599
|
+
if session_kw <= 0:
|
|
3600
|
+
continue
|
|
2761
3601
|
|
|
2762
|
-
|
|
2763
|
-
subject = tx.rfid
|
|
3602
|
+
start_kwh, end_kwh = ClientReport._resolve_meter_bounds(tx)
|
|
2764
3603
|
|
|
2765
|
-
|
|
2766
|
-
|
|
2767
|
-
|
|
2768
|
-
|
|
2769
|
-
|
|
3604
|
+
connector_number = (
|
|
3605
|
+
tx.connector_id
|
|
3606
|
+
if getattr(tx, "connector_id", None) is not None
|
|
3607
|
+
else getattr(getattr(tx, "charger", None), "connector_id", None)
|
|
3608
|
+
)
|
|
3609
|
+
|
|
3610
|
+
rfid_value = (tx.rfid or "").strip()
|
|
3611
|
+
tag = tag_map.get(rfid_value)
|
|
3612
|
+
label = None
|
|
3613
|
+
account_name = (
|
|
3614
|
+
tx.account.name
|
|
3615
|
+
if tx.account and getattr(tx.account, "name", None)
|
|
3616
|
+
else None
|
|
3617
|
+
)
|
|
3618
|
+
if tag:
|
|
3619
|
+
label = tag.custom_label or str(tag.label_id)
|
|
3620
|
+
if not account_name:
|
|
3621
|
+
account = next(iter(tag.energy_accounts.all()), None)
|
|
3622
|
+
if account and getattr(account, "name", None):
|
|
3623
|
+
account_name = account.name
|
|
3624
|
+
elif rfid_value:
|
|
3625
|
+
label = rfid_value
|
|
3626
|
+
|
|
3627
|
+
session_rows.append(
|
|
3628
|
+
{
|
|
3629
|
+
"connector": connector_number,
|
|
3630
|
+
"rfid_label": label,
|
|
3631
|
+
"account_name": account_name,
|
|
3632
|
+
"start_kwh": start_kwh,
|
|
3633
|
+
"end_kwh": end_kwh,
|
|
3634
|
+
"session_kwh": session_kw,
|
|
3635
|
+
"start": tx.start_time.isoformat()
|
|
3636
|
+
if getattr(tx, "start_time", None)
|
|
3637
|
+
else None,
|
|
3638
|
+
"end": tx.stop_time.isoformat()
|
|
3639
|
+
if getattr(tx, "stop_time", None)
|
|
3640
|
+
else None,
|
|
3641
|
+
}
|
|
3642
|
+
)
|
|
2770
3643
|
|
|
2771
|
-
|
|
3644
|
+
evcs_entries.append(
|
|
2772
3645
|
{
|
|
2773
|
-
"
|
|
2774
|
-
"
|
|
2775
|
-
"
|
|
2776
|
-
|
|
2777
|
-
|
|
3646
|
+
"charger_id": aggregator.pk,
|
|
3647
|
+
"serial_number": aggregator.charger_id,
|
|
3648
|
+
"display_name": aggregator.display_name
|
|
3649
|
+
or aggregator.name
|
|
3650
|
+
or aggregator.charger_id,
|
|
3651
|
+
"total_kw": total_kw_all,
|
|
3652
|
+
"total_kw_period": total_kw_period,
|
|
3653
|
+
"transactions": session_rows,
|
|
2778
3654
|
}
|
|
2779
3655
|
)
|
|
2780
3656
|
|
|
2781
|
-
|
|
3657
|
+
filters: dict[str, Any] = {}
|
|
3658
|
+
if selected_base_ids:
|
|
3659
|
+
filters["chargers"] = sorted(selected_base_ids)
|
|
2782
3660
|
|
|
2783
|
-
|
|
2784
|
-
|
|
2785
|
-
|
|
2786
|
-
|
|
3661
|
+
return {
|
|
3662
|
+
"schema": "evcs-session/v1",
|
|
3663
|
+
"evcs": evcs_entries,
|
|
3664
|
+
"totals": {
|
|
3665
|
+
"total_kw": total_all_time,
|
|
3666
|
+
"total_kw_period": total_period,
|
|
3667
|
+
},
|
|
3668
|
+
"filters": filters,
|
|
3669
|
+
}
|
|
3670
|
+
|
|
3671
|
+
@staticmethod
|
|
3672
|
+
def _resolve_meter_bounds(tx) -> tuple[float | None, float | None]:
|
|
3673
|
+
def _convert(value):
|
|
3674
|
+
if value in {None, ""}:
|
|
3675
|
+
return None
|
|
3676
|
+
try:
|
|
3677
|
+
return float(value) / 1000.0
|
|
3678
|
+
except (TypeError, ValueError):
|
|
3679
|
+
return None
|
|
3680
|
+
|
|
3681
|
+
start_value = _convert(getattr(tx, "meter_start", None))
|
|
3682
|
+
end_value = _convert(getattr(tx, "meter_stop", None))
|
|
3683
|
+
|
|
3684
|
+
def _coerce_energy(value):
|
|
3685
|
+
if value in {None, ""}:
|
|
3686
|
+
return None
|
|
3687
|
+
try:
|
|
3688
|
+
return float(value)
|
|
3689
|
+
except (TypeError, ValueError):
|
|
3690
|
+
return None
|
|
3691
|
+
|
|
3692
|
+
if start_value is None:
|
|
3693
|
+
annotated_start = getattr(tx, "report_meter_energy_start", None)
|
|
3694
|
+
start_value = _coerce_energy(annotated_start)
|
|
3695
|
+
|
|
3696
|
+
if end_value is None:
|
|
3697
|
+
annotated_end = getattr(tx, "report_meter_energy_end", None)
|
|
3698
|
+
end_value = _coerce_energy(annotated_end)
|
|
3699
|
+
|
|
3700
|
+
if start_value is None or end_value is None:
|
|
3701
|
+
readings_manager = getattr(tx, "meter_values", None)
|
|
3702
|
+
if readings_manager is not None:
|
|
3703
|
+
qs = readings_manager.filter(energy__isnull=False).order_by("timestamp")
|
|
3704
|
+
if start_value is None:
|
|
3705
|
+
first_energy = qs.values_list("energy", flat=True).first()
|
|
3706
|
+
start_value = _coerce_energy(first_energy)
|
|
3707
|
+
if end_value is None:
|
|
3708
|
+
last_energy = qs.order_by("-timestamp").values_list(
|
|
3709
|
+
"energy", flat=True
|
|
3710
|
+
).first()
|
|
3711
|
+
end_value = _coerce_energy(last_energy)
|
|
3712
|
+
|
|
3713
|
+
return start_value, end_value
|
|
3714
|
+
|
|
3715
|
+
@staticmethod
|
|
3716
|
+
def _format_session_datetime(value):
|
|
3717
|
+
if not value:
|
|
3718
|
+
return None
|
|
3719
|
+
localized = timezone.localtime(value)
|
|
3720
|
+
date_part = formats.date_format(
|
|
3721
|
+
localized, format="MONTH_DAY_FORMAT", use_l10n=True
|
|
3722
|
+
)
|
|
3723
|
+
time_part = formats.time_format(
|
|
3724
|
+
localized, format="TIME_FORMAT", use_l10n=True
|
|
3725
|
+
)
|
|
3726
|
+
return gettext("%(date)s, %(time)s") % {
|
|
3727
|
+
"date": date_part,
|
|
3728
|
+
"time": time_part,
|
|
3729
|
+
}
|
|
3730
|
+
|
|
3731
|
+
@staticmethod
|
|
3732
|
+
def _calculate_duration_minutes(start, end):
|
|
3733
|
+
if not start or not end:
|
|
3734
|
+
return None
|
|
3735
|
+
total_seconds = (end - start).total_seconds()
|
|
3736
|
+
if total_seconds < 0:
|
|
3737
|
+
return None
|
|
3738
|
+
return int(round(total_seconds / 60.0))
|
|
3739
|
+
|
|
3740
|
+
@staticmethod
|
|
3741
|
+
def _normalize_dataset_for_display(dataset: dict[str, Any]):
|
|
3742
|
+
schema = dataset.get("schema")
|
|
3743
|
+
if schema == "evcs-session/v1":
|
|
3744
|
+
from datetime import datetime
|
|
3745
|
+
|
|
3746
|
+
evcs_entries: list[dict[str, Any]] = []
|
|
3747
|
+
for entry in dataset.get("evcs", []):
|
|
3748
|
+
normalized_rows: list[dict[str, Any]] = []
|
|
3749
|
+
for row in entry.get("transactions", []):
|
|
3750
|
+
start_val = row.get("start")
|
|
3751
|
+
end_val = row.get("end")
|
|
3752
|
+
|
|
3753
|
+
start_dt = None
|
|
3754
|
+
if start_val:
|
|
3755
|
+
start_dt = parse_datetime(start_val)
|
|
3756
|
+
if start_dt and timezone.is_naive(start_dt):
|
|
3757
|
+
start_dt = timezone.make_aware(start_dt, timezone.utc)
|
|
3758
|
+
|
|
3759
|
+
end_dt = None
|
|
3760
|
+
if end_val:
|
|
3761
|
+
end_dt = parse_datetime(end_val)
|
|
3762
|
+
if end_dt and timezone.is_naive(end_dt):
|
|
3763
|
+
end_dt = timezone.make_aware(end_dt, timezone.utc)
|
|
3764
|
+
|
|
3765
|
+
normalized_rows.append(
|
|
3766
|
+
{
|
|
3767
|
+
"connector": row.get("connector"),
|
|
3768
|
+
"rfid_label": row.get("rfid_label"),
|
|
3769
|
+
"account_name": row.get("account_name"),
|
|
3770
|
+
"start_kwh": row.get("start_kwh"),
|
|
3771
|
+
"end_kwh": row.get("end_kwh"),
|
|
3772
|
+
"session_kwh": row.get("session_kwh"),
|
|
3773
|
+
"start": start_dt,
|
|
3774
|
+
"end": end_dt,
|
|
3775
|
+
"start_display": ClientReport._format_session_datetime(
|
|
3776
|
+
start_dt
|
|
3777
|
+
),
|
|
3778
|
+
"end_display": ClientReport._format_session_datetime(
|
|
3779
|
+
end_dt
|
|
3780
|
+
),
|
|
3781
|
+
"duration_minutes": ClientReport._calculate_duration_minutes(
|
|
3782
|
+
start_dt, end_dt
|
|
3783
|
+
),
|
|
3784
|
+
}
|
|
3785
|
+
)
|
|
3786
|
+
|
|
3787
|
+
normalized_rows.sort(
|
|
3788
|
+
key=lambda item: (
|
|
3789
|
+
item["start"]
|
|
3790
|
+
if item["start"] is not None
|
|
3791
|
+
else datetime.min.replace(tzinfo=timezone.utc),
|
|
3792
|
+
item.get("connector") or 0,
|
|
3793
|
+
)
|
|
3794
|
+
)
|
|
3795
|
+
|
|
3796
|
+
evcs_entries.append(
|
|
3797
|
+
{
|
|
3798
|
+
"display_name": entry.get("display_name")
|
|
3799
|
+
or entry.get("serial_number")
|
|
3800
|
+
or "Charge Point",
|
|
3801
|
+
"serial_number": entry.get("serial_number"),
|
|
3802
|
+
"total_kw": entry.get("total_kw", 0.0),
|
|
3803
|
+
"total_kw_period": entry.get("total_kw_period", 0.0),
|
|
3804
|
+
"transactions": normalized_rows,
|
|
3805
|
+
}
|
|
3806
|
+
)
|
|
3807
|
+
|
|
3808
|
+
totals = dataset.get("totals", {})
|
|
3809
|
+
return {
|
|
3810
|
+
"schema": schema,
|
|
3811
|
+
"evcs": evcs_entries,
|
|
3812
|
+
"totals": {
|
|
3813
|
+
"total_kw": totals.get("total_kw", 0.0),
|
|
3814
|
+
"total_kw_period": totals.get("total_kw_period", 0.0),
|
|
3815
|
+
},
|
|
3816
|
+
"filters": dataset.get("filters", {}),
|
|
3817
|
+
}
|
|
3818
|
+
|
|
3819
|
+
if schema == "session-list/v1":
|
|
2787
3820
|
parsed: list[dict[str, Any]] = []
|
|
2788
|
-
for row in rows:
|
|
3821
|
+
for row in dataset.get("rows", []):
|
|
2789
3822
|
item = dict(row)
|
|
2790
3823
|
start_val = row.get("start")
|
|
2791
3824
|
end_val = row.get("end")
|
|
@@ -2796,6 +3829,7 @@ class ClientReport(Entity):
|
|
|
2796
3829
|
start_dt = timezone.make_aware(start_dt, timezone.utc)
|
|
2797
3830
|
item["start"] = start_dt
|
|
2798
3831
|
else:
|
|
3832
|
+
start_dt = None
|
|
2799
3833
|
item["start"] = None
|
|
2800
3834
|
|
|
2801
3835
|
if end_val:
|
|
@@ -2804,11 +3838,377 @@ class ClientReport(Entity):
|
|
|
2804
3838
|
end_dt = timezone.make_aware(end_dt, timezone.utc)
|
|
2805
3839
|
item["end"] = end_dt
|
|
2806
3840
|
else:
|
|
3841
|
+
end_dt = None
|
|
2807
3842
|
item["end"] = None
|
|
2808
3843
|
|
|
3844
|
+
item["start_display"] = ClientReport._format_session_datetime(start_dt)
|
|
3845
|
+
item["end_display"] = ClientReport._format_session_datetime(end_dt)
|
|
3846
|
+
item["duration_minutes"] = ClientReport._calculate_duration_minutes(
|
|
3847
|
+
start_dt, end_dt
|
|
3848
|
+
)
|
|
3849
|
+
|
|
2809
3850
|
parsed.append(item)
|
|
2810
|
-
|
|
2811
|
-
|
|
3851
|
+
|
|
3852
|
+
return {"schema": schema, "rows": parsed}
|
|
3853
|
+
|
|
3854
|
+
return {
|
|
3855
|
+
"schema": schema,
|
|
3856
|
+
"rows": dataset.get("rows", []),
|
|
3857
|
+
"filters": dataset.get("filters", {}),
|
|
3858
|
+
}
|
|
3859
|
+
|
|
3860
|
+
@property
|
|
3861
|
+
def rows_for_display(self):
|
|
3862
|
+
data = self.data or {}
|
|
3863
|
+
return ClientReport._normalize_dataset_for_display(data)
|
|
3864
|
+
|
|
3865
|
+
@staticmethod
|
|
3866
|
+
def _relative_to_base(path: Path, base_dir: Path) -> str:
|
|
3867
|
+
try:
|
|
3868
|
+
return str(path.relative_to(base_dir))
|
|
3869
|
+
except ValueError:
|
|
3870
|
+
return str(path)
|
|
3871
|
+
|
|
3872
|
+
@classmethod
|
|
3873
|
+
def _load_pdf_template(cls, language_code: str | None) -> dict[str, str]:
|
|
3874
|
+
from django.template import TemplateDoesNotExist
|
|
3875
|
+
from django.template.loader import render_to_string
|
|
3876
|
+
|
|
3877
|
+
candidates: list[str] = []
|
|
3878
|
+
normalized = cls.normalize_language(language_code)
|
|
3879
|
+
if normalized:
|
|
3880
|
+
candidates.append(normalized)
|
|
3881
|
+
|
|
3882
|
+
default_code = default_report_language()
|
|
3883
|
+
if default_code and default_code not in candidates:
|
|
3884
|
+
candidates.append(default_code)
|
|
3885
|
+
|
|
3886
|
+
if "en" not in candidates:
|
|
3887
|
+
candidates.append("en")
|
|
3888
|
+
|
|
3889
|
+
for code in dict.fromkeys(candidates):
|
|
3890
|
+
template_name = f"core/reports/client_report_pdf/{code}.json"
|
|
3891
|
+
try:
|
|
3892
|
+
rendered = render_to_string(template_name)
|
|
3893
|
+
except TemplateDoesNotExist:
|
|
3894
|
+
continue
|
|
3895
|
+
if not rendered:
|
|
3896
|
+
continue
|
|
3897
|
+
try:
|
|
3898
|
+
data = json.loads(rendered)
|
|
3899
|
+
except json.JSONDecodeError:
|
|
3900
|
+
logger.warning(
|
|
3901
|
+
"Invalid client report PDF template %s", template_name, exc_info=True
|
|
3902
|
+
)
|
|
3903
|
+
continue
|
|
3904
|
+
if isinstance(data, dict):
|
|
3905
|
+
return data
|
|
3906
|
+
|
|
3907
|
+
return {}
|
|
3908
|
+
|
|
3909
|
+
@staticmethod
|
|
3910
|
+
def resolve_reply_to_for_owner(owner) -> list[str]:
|
|
3911
|
+
if not owner:
|
|
3912
|
+
return []
|
|
3913
|
+
try:
|
|
3914
|
+
inbox = owner.get_profile(EmailInbox)
|
|
3915
|
+
except Exception: # pragma: no cover - defensive catch
|
|
3916
|
+
inbox = None
|
|
3917
|
+
if inbox and getattr(inbox, "username", ""):
|
|
3918
|
+
address = inbox.username.strip()
|
|
3919
|
+
if address:
|
|
3920
|
+
return [address]
|
|
3921
|
+
return []
|
|
3922
|
+
|
|
3923
|
+
@staticmethod
|
|
3924
|
+
def resolve_outbox_for_owner(owner):
|
|
3925
|
+
from nodes.models import EmailOutbox, Node
|
|
3926
|
+
|
|
3927
|
+
if owner:
|
|
3928
|
+
try:
|
|
3929
|
+
outbox = owner.get_profile(EmailOutbox)
|
|
3930
|
+
except Exception: # pragma: no cover - defensive catch
|
|
3931
|
+
outbox = None
|
|
3932
|
+
if outbox:
|
|
3933
|
+
return outbox
|
|
3934
|
+
|
|
3935
|
+
node = Node.get_local()
|
|
3936
|
+
if node:
|
|
3937
|
+
return getattr(node, "email_outbox", None)
|
|
3938
|
+
return None
|
|
3939
|
+
|
|
3940
|
+
def render_pdf(self, target: Path):
|
|
3941
|
+
from reportlab.lib import colors
|
|
3942
|
+
from reportlab.lib.pagesizes import landscape, letter
|
|
3943
|
+
from reportlab.lib.styles import getSampleStyleSheet
|
|
3944
|
+
from reportlab.lib.units import inch
|
|
3945
|
+
from reportlab.platypus import (
|
|
3946
|
+
Paragraph,
|
|
3947
|
+
SimpleDocTemplate,
|
|
3948
|
+
Spacer,
|
|
3949
|
+
Table,
|
|
3950
|
+
TableStyle,
|
|
3951
|
+
)
|
|
3952
|
+
|
|
3953
|
+
target_path = Path(target)
|
|
3954
|
+
target_path.parent.mkdir(parents=True, exist_ok=True)
|
|
3955
|
+
|
|
3956
|
+
dataset = self.rows_for_display
|
|
3957
|
+
schema = dataset.get("schema")
|
|
3958
|
+
|
|
3959
|
+
language_code = self.normalize_language(self.language)
|
|
3960
|
+
with override(language_code):
|
|
3961
|
+
styles = getSampleStyleSheet()
|
|
3962
|
+
title_style = styles["Title"]
|
|
3963
|
+
subtitle_style = styles["Heading2"]
|
|
3964
|
+
normal_style = styles["BodyText"]
|
|
3965
|
+
emphasis_style = styles["Heading3"]
|
|
3966
|
+
|
|
3967
|
+
document = SimpleDocTemplate(
|
|
3968
|
+
str(target_path),
|
|
3969
|
+
pagesize=landscape(letter),
|
|
3970
|
+
leftMargin=0.5 * inch,
|
|
3971
|
+
rightMargin=0.5 * inch,
|
|
3972
|
+
topMargin=0.6 * inch,
|
|
3973
|
+
bottomMargin=0.5 * inch,
|
|
3974
|
+
)
|
|
3975
|
+
|
|
3976
|
+
story: list = []
|
|
3977
|
+
labels = self._load_pdf_template(language_code)
|
|
3978
|
+
|
|
3979
|
+
def label(key: str, default: str) -> str:
|
|
3980
|
+
value = labels.get(key) if isinstance(labels, dict) else None
|
|
3981
|
+
if isinstance(value, str) and value.strip():
|
|
3982
|
+
return value
|
|
3983
|
+
return gettext(default)
|
|
3984
|
+
|
|
3985
|
+
report_title = self.normalize_title(self.title) or label(
|
|
3986
|
+
"title", "Consumer Report"
|
|
3987
|
+
)
|
|
3988
|
+
story.append(Paragraph(report_title, title_style))
|
|
3989
|
+
|
|
3990
|
+
start_display = formats.date_format(
|
|
3991
|
+
self.start_date, format="DATE_FORMAT", use_l10n=True
|
|
3992
|
+
)
|
|
3993
|
+
end_display = formats.date_format(
|
|
3994
|
+
self.end_date, format="DATE_FORMAT", use_l10n=True
|
|
3995
|
+
)
|
|
3996
|
+
default_period_text = gettext("Period: %(start)s to %(end)s") % {
|
|
3997
|
+
"start": start_display,
|
|
3998
|
+
"end": end_display,
|
|
3999
|
+
}
|
|
4000
|
+
period_template = labels.get("period") if isinstance(labels, dict) else None
|
|
4001
|
+
if isinstance(period_template, str):
|
|
4002
|
+
try:
|
|
4003
|
+
period_text = period_template.format(
|
|
4004
|
+
start=start_display, end=end_display
|
|
4005
|
+
)
|
|
4006
|
+
except (KeyError, IndexError, ValueError):
|
|
4007
|
+
logger.warning(
|
|
4008
|
+
"Invalid period template for client report PDF: %s",
|
|
4009
|
+
period_template,
|
|
4010
|
+
)
|
|
4011
|
+
period_text = default_period_text
|
|
4012
|
+
else:
|
|
4013
|
+
period_text = default_period_text
|
|
4014
|
+
story.append(Paragraph(period_text, emphasis_style))
|
|
4015
|
+
story.append(Spacer(1, 0.25 * inch))
|
|
4016
|
+
|
|
4017
|
+
total_kw_all_time_label = label("total_kw_all_time", "Total kW (all time)")
|
|
4018
|
+
total_kw_period_label = label("total_kw_period", "Total kW (period)")
|
|
4019
|
+
connector_label = label("connector", "Connector")
|
|
4020
|
+
account_label = label("account", "Account")
|
|
4021
|
+
session_kwh_label = label("session_kwh", "Session kW")
|
|
4022
|
+
session_start_label = label("session_start", "Session start")
|
|
4023
|
+
session_end_label = label("session_end", "Session end")
|
|
4024
|
+
time_label = label("time", "Time")
|
|
4025
|
+
rfid_label = label("rfid_label", "RFID label")
|
|
4026
|
+
no_sessions_period = label(
|
|
4027
|
+
"no_sessions_period",
|
|
4028
|
+
"No charging sessions recorded for the selected period.",
|
|
4029
|
+
)
|
|
4030
|
+
no_sessions_point = label(
|
|
4031
|
+
"no_sessions_point",
|
|
4032
|
+
"No charging sessions recorded for this charge point.",
|
|
4033
|
+
)
|
|
4034
|
+
no_structured_data = label(
|
|
4035
|
+
"no_structured_data",
|
|
4036
|
+
"No structured data is available for this report.",
|
|
4037
|
+
)
|
|
4038
|
+
report_totals_label = label("report_totals", "Report totals")
|
|
4039
|
+
total_kw_period_line = label(
|
|
4040
|
+
"total_kw_period_line", "Total kW during period"
|
|
4041
|
+
)
|
|
4042
|
+
charge_point_label = label("charge_point", "Charge Point")
|
|
4043
|
+
serial_template = (
|
|
4044
|
+
labels.get("charge_point_serial")
|
|
4045
|
+
if isinstance(labels, dict)
|
|
4046
|
+
else None
|
|
4047
|
+
)
|
|
4048
|
+
|
|
4049
|
+
def format_datetime(value):
|
|
4050
|
+
if not value:
|
|
4051
|
+
return "—"
|
|
4052
|
+
return ClientReport._format_session_datetime(value) or "—"
|
|
4053
|
+
|
|
4054
|
+
def format_decimal(value):
|
|
4055
|
+
if value is None:
|
|
4056
|
+
return "—"
|
|
4057
|
+
return formats.number_format(value, decimal_pos=2, use_l10n=True)
|
|
4058
|
+
|
|
4059
|
+
def format_duration(value):
|
|
4060
|
+
if value is None:
|
|
4061
|
+
return "—"
|
|
4062
|
+
return formats.number_format(value, decimal_pos=0, use_l10n=True)
|
|
4063
|
+
|
|
4064
|
+
if schema == "evcs-session/v1":
|
|
4065
|
+
evcs_entries = dataset.get("evcs", [])
|
|
4066
|
+
if not evcs_entries:
|
|
4067
|
+
story.append(Paragraph(no_sessions_period, normal_style))
|
|
4068
|
+
for index, evcs in enumerate(evcs_entries):
|
|
4069
|
+
if index:
|
|
4070
|
+
story.append(Spacer(1, 0.2 * inch))
|
|
4071
|
+
|
|
4072
|
+
display_name = evcs.get("display_name") or charge_point_label
|
|
4073
|
+
serial_number = evcs.get("serial_number")
|
|
4074
|
+
if serial_number:
|
|
4075
|
+
if isinstance(serial_template, str):
|
|
4076
|
+
try:
|
|
4077
|
+
header_text = serial_template.format(
|
|
4078
|
+
name=display_name, serial=serial_number
|
|
4079
|
+
)
|
|
4080
|
+
except (KeyError, IndexError, ValueError):
|
|
4081
|
+
header_text = serial_template
|
|
4082
|
+
else:
|
|
4083
|
+
header_text = gettext("%(name)s (Serial: %(serial)s)") % {
|
|
4084
|
+
"name": display_name,
|
|
4085
|
+
"serial": serial_number,
|
|
4086
|
+
}
|
|
4087
|
+
else:
|
|
4088
|
+
header_text = display_name
|
|
4089
|
+
story.append(Paragraph(header_text, subtitle_style))
|
|
4090
|
+
|
|
4091
|
+
metrics_text = (
|
|
4092
|
+
f"{total_kw_all_time_label}: "
|
|
4093
|
+
f"{format_decimal(evcs.get('total_kw', 0.0))} | "
|
|
4094
|
+
f"{total_kw_period_label}: "
|
|
4095
|
+
f"{format_decimal(evcs.get('total_kw_period', 0.0))}"
|
|
4096
|
+
)
|
|
4097
|
+
story.append(Paragraph(metrics_text, normal_style))
|
|
4098
|
+
story.append(Spacer(1, 0.1 * inch))
|
|
4099
|
+
|
|
4100
|
+
transactions = evcs.get("transactions", [])
|
|
4101
|
+
if transactions:
|
|
4102
|
+
table_data = [
|
|
4103
|
+
[
|
|
4104
|
+
session_kwh_label,
|
|
4105
|
+
session_start_label,
|
|
4106
|
+
session_end_label,
|
|
4107
|
+
time_label,
|
|
4108
|
+
connector_label,
|
|
4109
|
+
rfid_label,
|
|
4110
|
+
account_label,
|
|
4111
|
+
]
|
|
4112
|
+
]
|
|
4113
|
+
|
|
4114
|
+
for row in transactions:
|
|
4115
|
+
start_dt = row.get("start")
|
|
4116
|
+
end_dt = row.get("end")
|
|
4117
|
+
duration_value = row.get("duration_minutes")
|
|
4118
|
+
table_data.append(
|
|
4119
|
+
[
|
|
4120
|
+
format_decimal(row.get("session_kwh")),
|
|
4121
|
+
format_datetime(start_dt),
|
|
4122
|
+
format_datetime(end_dt),
|
|
4123
|
+
format_duration(duration_value),
|
|
4124
|
+
row.get("connector")
|
|
4125
|
+
if row.get("connector") is not None
|
|
4126
|
+
else "—",
|
|
4127
|
+
row.get("rfid_label") or "—",
|
|
4128
|
+
row.get("account_name") or "—",
|
|
4129
|
+
]
|
|
4130
|
+
)
|
|
4131
|
+
|
|
4132
|
+
column_count = len(table_data[0])
|
|
4133
|
+
col_width = document.width / column_count if column_count else None
|
|
4134
|
+
table = Table(
|
|
4135
|
+
table_data,
|
|
4136
|
+
repeatRows=1,
|
|
4137
|
+
colWidths=[col_width] * column_count if col_width else None,
|
|
4138
|
+
hAlign="LEFT",
|
|
4139
|
+
)
|
|
4140
|
+
table.setStyle(
|
|
4141
|
+
TableStyle(
|
|
4142
|
+
[
|
|
4143
|
+
(
|
|
4144
|
+
"BACKGROUND",
|
|
4145
|
+
(0, 0),
|
|
4146
|
+
(-1, 0),
|
|
4147
|
+
colors.HexColor("#0f172a"),
|
|
4148
|
+
),
|
|
4149
|
+
("TEXTCOLOR", (0, 0), (-1, 0), colors.white),
|
|
4150
|
+
("ALIGN", (0, 0), (-1, 0), "CENTER"),
|
|
4151
|
+
("FONTNAME", (0, 0), (-1, 0), "Helvetica-Bold"),
|
|
4152
|
+
("FONTSIZE", (0, 0), (-1, 0), 9),
|
|
4153
|
+
(
|
|
4154
|
+
"ROWBACKGROUNDS",
|
|
4155
|
+
(0, 1),
|
|
4156
|
+
(-1, -1),
|
|
4157
|
+
[colors.whitesmoke, colors.HexColor("#eef2ff")],
|
|
4158
|
+
),
|
|
4159
|
+
("GRID", (0, 0), (-1, -1), 0.25, colors.grey),
|
|
4160
|
+
("VALIGN", (0, 1), (-1, -1), "MIDDLE"),
|
|
4161
|
+
]
|
|
4162
|
+
)
|
|
4163
|
+
)
|
|
4164
|
+
story.append(table)
|
|
4165
|
+
else:
|
|
4166
|
+
story.append(Paragraph(no_sessions_point, normal_style))
|
|
4167
|
+
else:
|
|
4168
|
+
story.append(Paragraph(no_structured_data, normal_style))
|
|
4169
|
+
|
|
4170
|
+
totals = dataset.get("totals") or {}
|
|
4171
|
+
story.append(Spacer(1, 0.3 * inch))
|
|
4172
|
+
story.append(Paragraph(report_totals_label, emphasis_style))
|
|
4173
|
+
story.append(
|
|
4174
|
+
Paragraph(
|
|
4175
|
+
f"{total_kw_all_time_label}: "
|
|
4176
|
+
f"{format_decimal(totals.get('total_kw', 0.0))}",
|
|
4177
|
+
emphasis_style,
|
|
4178
|
+
)
|
|
4179
|
+
)
|
|
4180
|
+
story.append(
|
|
4181
|
+
Paragraph(
|
|
4182
|
+
f"{total_kw_period_line}: "
|
|
4183
|
+
f"{format_decimal(totals.get('total_kw_period', 0.0))}",
|
|
4184
|
+
emphasis_style,
|
|
4185
|
+
)
|
|
4186
|
+
)
|
|
4187
|
+
|
|
4188
|
+
document.build(story)
|
|
4189
|
+
|
|
4190
|
+
def ensure_pdf(self) -> Path:
|
|
4191
|
+
base_dir = Path(settings.BASE_DIR)
|
|
4192
|
+
export = dict((self.data or {}).get("export") or {})
|
|
4193
|
+
pdf_relative = export.get("pdf_path")
|
|
4194
|
+
if pdf_relative:
|
|
4195
|
+
candidate = base_dir / pdf_relative
|
|
4196
|
+
if candidate.exists():
|
|
4197
|
+
return candidate
|
|
4198
|
+
|
|
4199
|
+
report_dir = base_dir / "work" / "reports"
|
|
4200
|
+
report_dir.mkdir(parents=True, exist_ok=True)
|
|
4201
|
+
timestamp = timezone.now().strftime("%Y%m%d%H%M%S")
|
|
4202
|
+
identifier = f"client_report_{self.pk}_{timestamp}"
|
|
4203
|
+
pdf_path = report_dir / f"{identifier}.pdf"
|
|
4204
|
+
self.render_pdf(pdf_path)
|
|
4205
|
+
|
|
4206
|
+
export["pdf_path"] = ClientReport._relative_to_base(pdf_path, base_dir)
|
|
4207
|
+
updated = dict(self.data)
|
|
4208
|
+
updated["export"] = export
|
|
4209
|
+
type(self).objects.filter(pk=self.pk).update(data=updated)
|
|
4210
|
+
self.data = updated
|
|
4211
|
+
return pdf_path
|
|
2812
4212
|
|
|
2813
4213
|
|
|
2814
4214
|
class BrandManager(EntityManager):
|
|
@@ -2968,6 +4368,11 @@ class Product(Entity):
|
|
|
2968
4368
|
return self.name
|
|
2969
4369
|
|
|
2970
4370
|
|
|
4371
|
+
class Meta:
|
|
4372
|
+
verbose_name = _("Product")
|
|
4373
|
+
verbose_name_plural = _("Products")
|
|
4374
|
+
|
|
4375
|
+
|
|
2971
4376
|
class AdminHistory(Entity):
|
|
2972
4377
|
"""Record of recently visited admin changelists for a user."""
|
|
2973
4378
|
|
|
@@ -3209,6 +4614,11 @@ class PackageRelease(Entity):
|
|
|
3209
4614
|
def natural_key(self):
|
|
3210
4615
|
return (self.package.name, self.version)
|
|
3211
4616
|
|
|
4617
|
+
class Severity(models.TextChoices):
|
|
4618
|
+
NORMAL = "normal", _("Normal")
|
|
4619
|
+
LOW = "low", _("Low")
|
|
4620
|
+
CRITICAL = "critical", _("Critical")
|
|
4621
|
+
|
|
3212
4622
|
package = models.ForeignKey(
|
|
3213
4623
|
Package, on_delete=models.CASCADE, related_name="releases"
|
|
3214
4624
|
)
|
|
@@ -3219,6 +4629,12 @@ class PackageRelease(Entity):
|
|
|
3219
4629
|
revision = models.CharField(
|
|
3220
4630
|
max_length=40, blank=True, default=revision_utils.get_revision, editable=False
|
|
3221
4631
|
)
|
|
4632
|
+
severity = models.CharField(
|
|
4633
|
+
max_length=16,
|
|
4634
|
+
choices=Severity.choices,
|
|
4635
|
+
default=Severity.NORMAL,
|
|
4636
|
+
help_text=_("Controls the expected urgency for auto-upgrades."),
|
|
4637
|
+
)
|
|
3222
4638
|
changelog = models.TextField(blank=True, default="")
|
|
3223
4639
|
pypi_url = models.URLField("PyPI URL", blank=True, editable=False)
|
|
3224
4640
|
github_url = models.URLField("GitHub URL", blank=True, editable=False)
|
|
@@ -3243,7 +4659,13 @@ class PackageRelease(Entity):
|
|
|
3243
4659
|
for release in cls.objects.all():
|
|
3244
4660
|
name = f"releases__packagerelease_{release.version.replace('.', '_')}.json"
|
|
3245
4661
|
path = base / name
|
|
3246
|
-
data = serializers.serialize(
|
|
4662
|
+
data = serializers.serialize(
|
|
4663
|
+
"json",
|
|
4664
|
+
[release],
|
|
4665
|
+
use_natural_foreign_keys=True,
|
|
4666
|
+
use_natural_primary_keys=True,
|
|
4667
|
+
)
|
|
4668
|
+
data = json.dumps(json.loads(data), indent=2) + "\n"
|
|
3247
4669
|
expected.add(name)
|
|
3248
4670
|
try:
|
|
3249
4671
|
current = path.read_text(encoding="utf-8")
|
|
@@ -3255,6 +4677,10 @@ class PackageRelease(Entity):
|
|
|
3255
4677
|
if old_name not in expected and old_path.exists():
|
|
3256
4678
|
old_path.unlink()
|
|
3257
4679
|
|
|
4680
|
+
def delete(self, using=None, keep_parents=False):
|
|
4681
|
+
user_data.delete_user_fixture(self)
|
|
4682
|
+
super().delete(using=using, keep_parents=keep_parents)
|
|
4683
|
+
|
|
3258
4684
|
def __str__(self) -> str: # pragma: no cover - trivial
|
|
3259
4685
|
return f"{self.package.name} {self.version}"
|
|
3260
4686
|
|
|
@@ -3262,10 +4688,27 @@ class PackageRelease(Entity):
|
|
|
3262
4688
|
"""Return a :class:`ReleasePackage` built from the package."""
|
|
3263
4689
|
return self.package.to_package()
|
|
3264
4690
|
|
|
3265
|
-
def to_credentials(
|
|
3266
|
-
|
|
3267
|
-
|
|
3268
|
-
|
|
4691
|
+
def to_credentials(
|
|
4692
|
+
self, user: models.Model | None = None
|
|
4693
|
+
) -> Credentials | None:
|
|
4694
|
+
"""Return :class:`Credentials` from available release managers."""
|
|
4695
|
+
|
|
4696
|
+
manager_candidates: list[ReleaseManager] = []
|
|
4697
|
+
|
|
4698
|
+
for candidate in (self.release_manager, self.package.release_manager):
|
|
4699
|
+
if candidate and candidate not in manager_candidates:
|
|
4700
|
+
manager_candidates.append(candidate)
|
|
4701
|
+
|
|
4702
|
+
if user is not None and getattr(user, "is_authenticated", False):
|
|
4703
|
+
try:
|
|
4704
|
+
user_manager = ReleaseManager.objects.get(user=user)
|
|
4705
|
+
except ReleaseManager.DoesNotExist:
|
|
4706
|
+
user_manager = None
|
|
4707
|
+
else:
|
|
4708
|
+
if user_manager not in manager_candidates:
|
|
4709
|
+
manager_candidates.append(user_manager)
|
|
4710
|
+
|
|
4711
|
+
for manager in manager_candidates:
|
|
3269
4712
|
creds = manager.to_credentials()
|
|
3270
4713
|
if creds and creds.has_auth():
|
|
3271
4714
|
return creds
|
|
@@ -3287,7 +4730,9 @@ class PackageRelease(Entity):
|
|
|
3287
4730
|
return manager.github_token
|
|
3288
4731
|
return os.environ.get("GITHUB_TOKEN")
|
|
3289
4732
|
|
|
3290
|
-
def build_publish_targets(
|
|
4733
|
+
def build_publish_targets(
|
|
4734
|
+
self, user: models.Model | None = None
|
|
4735
|
+
) -> list[RepositoryTarget]:
|
|
3291
4736
|
"""Return repository targets for publishing this release."""
|
|
3292
4737
|
|
|
3293
4738
|
manager = self.release_manager or self.package.release_manager
|
|
@@ -3296,7 +4741,7 @@ class PackageRelease(Entity):
|
|
|
3296
4741
|
env_primary = os.environ.get("PYPI_REPOSITORY_URL", "")
|
|
3297
4742
|
primary_url = env_primary.strip()
|
|
3298
4743
|
|
|
3299
|
-
primary_creds = self.to_credentials()
|
|
4744
|
+
primary_creds = self.to_credentials(user=user)
|
|
3300
4745
|
targets.append(
|
|
3301
4746
|
RepositoryTarget(
|
|
3302
4747
|
name="PyPI",
|
|
@@ -3438,6 +4883,8 @@ class PackageRelease(Entity):
|
|
|
3438
4883
|
"""
|
|
3439
4884
|
|
|
3440
4885
|
version = (version or "").strip()
|
|
4886
|
+
if version.endswith("+"):
|
|
4887
|
+
version = version.rstrip("+")
|
|
3441
4888
|
revision = (revision or "").strip()
|
|
3442
4889
|
if not version or not revision:
|
|
3443
4890
|
return True
|
|
@@ -3523,73 +4970,6 @@ def _rfid_unique_energy_account(
|
|
|
3523
4970
|
"RFID tags may only be assigned to one energy account."
|
|
3524
4971
|
)
|
|
3525
4972
|
|
|
3526
|
-
|
|
3527
|
-
def hash_key(key: str) -> str:
|
|
3528
|
-
"""Return a SHA-256 hash for ``key``."""
|
|
3529
|
-
|
|
3530
|
-
return hashlib.sha256(key.encode()).hexdigest()
|
|
3531
|
-
|
|
3532
|
-
|
|
3533
|
-
class AssistantProfile(Profile):
|
|
3534
|
-
"""Stores a hashed user key used by the assistant for authentication.
|
|
3535
|
-
|
|
3536
|
-
The plain-text ``user_key`` is generated server-side and shown only once.
|
|
3537
|
-
Users must supply this key in the ``Authorization: Bearer <user_key>``
|
|
3538
|
-
header when requesting protected endpoints. Only the hash is stored.
|
|
3539
|
-
"""
|
|
3540
|
-
|
|
3541
|
-
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
|
3542
|
-
profile_fields = ("user_key_hash", "scopes", "is_active")
|
|
3543
|
-
user_key_hash = models.CharField(max_length=64, unique=True)
|
|
3544
|
-
scopes = models.JSONField(default=list, blank=True)
|
|
3545
|
-
created_at = models.DateTimeField(auto_now_add=True)
|
|
3546
|
-
last_used_at = models.DateTimeField(null=True, blank=True)
|
|
3547
|
-
is_active = models.BooleanField(default=True)
|
|
3548
|
-
|
|
3549
|
-
class Meta:
|
|
3550
|
-
db_table = "workgroup_assistantprofile"
|
|
3551
|
-
verbose_name = "Assistant Profile"
|
|
3552
|
-
verbose_name_plural = "Assistant Profiles"
|
|
3553
|
-
constraints = [
|
|
3554
|
-
models.CheckConstraint(
|
|
3555
|
-
check=(
|
|
3556
|
-
(Q(user__isnull=False) & Q(group__isnull=True))
|
|
3557
|
-
| (Q(user__isnull=True) & Q(group__isnull=False))
|
|
3558
|
-
),
|
|
3559
|
-
name="assistantprofile_requires_owner",
|
|
3560
|
-
)
|
|
3561
|
-
]
|
|
3562
|
-
|
|
3563
|
-
@classmethod
|
|
3564
|
-
def issue_key(cls, user) -> tuple["AssistantProfile", str]:
|
|
3565
|
-
"""Create or update a profile and return it with a new plain key."""
|
|
3566
|
-
|
|
3567
|
-
key = secrets.token_hex(32)
|
|
3568
|
-
key_hash = hash_key(key)
|
|
3569
|
-
if user is None:
|
|
3570
|
-
raise ValueError("Assistant profiles require a user instance")
|
|
3571
|
-
|
|
3572
|
-
profile, _ = cls.objects.update_or_create(
|
|
3573
|
-
user=user,
|
|
3574
|
-
defaults={
|
|
3575
|
-
"user_key_hash": key_hash,
|
|
3576
|
-
"last_used_at": None,
|
|
3577
|
-
"is_active": True,
|
|
3578
|
-
},
|
|
3579
|
-
)
|
|
3580
|
-
return profile, key
|
|
3581
|
-
|
|
3582
|
-
def touch(self) -> None:
|
|
3583
|
-
"""Record that the key was used."""
|
|
3584
|
-
|
|
3585
|
-
self.last_used_at = timezone.now()
|
|
3586
|
-
self.save(update_fields=["last_used_at"])
|
|
3587
|
-
|
|
3588
|
-
def __str__(self) -> str: # pragma: no cover - simple representation
|
|
3589
|
-
owner = self.owner_display()
|
|
3590
|
-
return f"AssistantProfile for {owner}" if owner else "AssistantProfile"
|
|
3591
|
-
|
|
3592
|
-
|
|
3593
4973
|
def validate_relative_url(value: str) -> None:
|
|
3594
4974
|
if not value:
|
|
3595
4975
|
return
|
|
@@ -3602,7 +4982,6 @@ class TodoManager(EntityManager):
|
|
|
3602
4982
|
def get_by_natural_key(self, request: str):
|
|
3603
4983
|
return self.get(request=request)
|
|
3604
4984
|
|
|
3605
|
-
|
|
3606
4985
|
class Todo(Entity):
|
|
3607
4986
|
"""Tasks requested for the Release Manager."""
|
|
3608
4987
|
|
|
@@ -3614,7 +4993,38 @@ class Todo(Entity):
|
|
|
3614
4993
|
generated_for_version = models.CharField(max_length=20, blank=True, default="")
|
|
3615
4994
|
generated_for_revision = models.CharField(max_length=40, blank=True, default="")
|
|
3616
4995
|
done_on = models.DateTimeField(null=True, blank=True)
|
|
4996
|
+
done_node = models.ForeignKey(
|
|
4997
|
+
"nodes.Node",
|
|
4998
|
+
null=True,
|
|
4999
|
+
blank=True,
|
|
5000
|
+
on_delete=models.SET_NULL,
|
|
5001
|
+
related_name="completed_todos",
|
|
5002
|
+
help_text="Node where this TODO was completed.",
|
|
5003
|
+
)
|
|
5004
|
+
done_version = models.CharField(max_length=20, blank=True, default="")
|
|
5005
|
+
done_revision = models.CharField(max_length=40, blank=True, default="")
|
|
5006
|
+
done_username = models.CharField(max_length=150, blank=True, default="")
|
|
3617
5007
|
on_done_condition = ConditionTextField(blank=True, default="")
|
|
5008
|
+
origin_node = models.ForeignKey(
|
|
5009
|
+
"nodes.Node",
|
|
5010
|
+
null=True,
|
|
5011
|
+
blank=True,
|
|
5012
|
+
on_delete=models.SET_NULL,
|
|
5013
|
+
related_name="originated_todos",
|
|
5014
|
+
help_text="Node where this TODO was generated.",
|
|
5015
|
+
)
|
|
5016
|
+
original_user = models.ForeignKey(
|
|
5017
|
+
settings.AUTH_USER_MODEL,
|
|
5018
|
+
null=True,
|
|
5019
|
+
blank=True,
|
|
5020
|
+
on_delete=models.SET_NULL,
|
|
5021
|
+
related_name="originated_todos",
|
|
5022
|
+
help_text="User responsible for creating this TODO.",
|
|
5023
|
+
)
|
|
5024
|
+
original_user_is_authenticated = models.BooleanField(
|
|
5025
|
+
default=False,
|
|
5026
|
+
help_text="Whether the originating user was authenticated during creation.",
|
|
5027
|
+
)
|
|
3618
5028
|
|
|
3619
5029
|
objects = TodoManager()
|
|
3620
5030
|
|
|
@@ -3655,6 +5065,203 @@ class Todo(Entity):
|
|
|
3655
5065
|
return field.evaluate(self)
|
|
3656
5066
|
return ConditionCheckResult(True, "")
|
|
3657
5067
|
|
|
5068
|
+
def save(self, *args, **kwargs):
|
|
5069
|
+
created = self.pk is None
|
|
5070
|
+
tracked_fields = {
|
|
5071
|
+
"done_on",
|
|
5072
|
+
"done_node",
|
|
5073
|
+
"done_node_id",
|
|
5074
|
+
"done_revision",
|
|
5075
|
+
"done_username",
|
|
5076
|
+
"done_version",
|
|
5077
|
+
"is_deleted",
|
|
5078
|
+
}
|
|
5079
|
+
update_fields = kwargs.get("update_fields")
|
|
5080
|
+
monitor_changes = not created and (
|
|
5081
|
+
update_fields is None or tracked_fields.intersection(update_fields)
|
|
5082
|
+
)
|
|
5083
|
+
previous_state = None
|
|
5084
|
+
if monitor_changes:
|
|
5085
|
+
previous_state = (
|
|
5086
|
+
type(self)
|
|
5087
|
+
.all_objects.filter(pk=self.pk)
|
|
5088
|
+
.values(
|
|
5089
|
+
"done_on",
|
|
5090
|
+
"done_node_id",
|
|
5091
|
+
"done_revision",
|
|
5092
|
+
"done_username",
|
|
5093
|
+
"done_version",
|
|
5094
|
+
"is_deleted",
|
|
5095
|
+
)
|
|
5096
|
+
.first()
|
|
5097
|
+
)
|
|
5098
|
+
super().save(*args, **kwargs)
|
|
5099
|
+
|
|
5100
|
+
if created:
|
|
5101
|
+
return
|
|
5102
|
+
|
|
5103
|
+
previous_done_on = previous_state["done_on"] if previous_state else None
|
|
5104
|
+
previous_is_deleted = previous_state["is_deleted"] if previous_state else False
|
|
5105
|
+
previous_done_node = (
|
|
5106
|
+
previous_state["done_node_id"] if previous_state else None
|
|
5107
|
+
)
|
|
5108
|
+
previous_done_revision = (
|
|
5109
|
+
previous_state["done_revision"] if previous_state else ""
|
|
5110
|
+
)
|
|
5111
|
+
previous_done_username = (
|
|
5112
|
+
previous_state["done_username"] if previous_state else ""
|
|
5113
|
+
)
|
|
5114
|
+
previous_done_version = (
|
|
5115
|
+
previous_state["done_version"] if previous_state else ""
|
|
5116
|
+
)
|
|
5117
|
+
if (
|
|
5118
|
+
previous_done_on == self.done_on
|
|
5119
|
+
and previous_is_deleted == self.is_deleted
|
|
5120
|
+
and previous_done_node == getattr(self, "done_node_id", None)
|
|
5121
|
+
and previous_done_revision == self.done_revision
|
|
5122
|
+
and previous_done_username == self.done_username
|
|
5123
|
+
and previous_done_version == self.done_version
|
|
5124
|
+
):
|
|
5125
|
+
return
|
|
5126
|
+
|
|
5127
|
+
self._update_fixture_state()
|
|
5128
|
+
|
|
5129
|
+
def populate_done_metadata(self, user=None) -> None:
|
|
5130
|
+
"""Populate metadata fields for a completed TODO."""
|
|
5131
|
+
|
|
5132
|
+
node = None
|
|
5133
|
+
try: # pragma: no cover - defensive import guard
|
|
5134
|
+
from nodes.models import Node # type: ignore
|
|
5135
|
+
except Exception: # pragma: no cover - when app not ready
|
|
5136
|
+
Node = None
|
|
5137
|
+
|
|
5138
|
+
if Node is not None:
|
|
5139
|
+
try:
|
|
5140
|
+
node = Node.get_local()
|
|
5141
|
+
except Exception: # pragma: no cover - fallback on errors
|
|
5142
|
+
node = None
|
|
5143
|
+
self.done_node = node if node else None
|
|
5144
|
+
|
|
5145
|
+
version_value = ""
|
|
5146
|
+
revision_value = ""
|
|
5147
|
+
if node is not None:
|
|
5148
|
+
version_value = (node.installed_version or "").strip()
|
|
5149
|
+
revision_value = (node.installed_revision or "").strip()
|
|
5150
|
+
|
|
5151
|
+
if not version_value:
|
|
5152
|
+
version_path = Path(settings.BASE_DIR) / "VERSION"
|
|
5153
|
+
try:
|
|
5154
|
+
version_value = version_path.read_text(encoding="utf-8").strip()
|
|
5155
|
+
except OSError:
|
|
5156
|
+
version_value = ""
|
|
5157
|
+
|
|
5158
|
+
if not revision_value:
|
|
5159
|
+
try:
|
|
5160
|
+
revision_value = revision_utils.get_revision() or ""
|
|
5161
|
+
except Exception: # pragma: no cover - defensive fallback
|
|
5162
|
+
revision_value = ""
|
|
5163
|
+
|
|
5164
|
+
username_value = ""
|
|
5165
|
+
if user is not None and getattr(user, "is_authenticated", False):
|
|
5166
|
+
try:
|
|
5167
|
+
username_value = user.get_username() or ""
|
|
5168
|
+
except Exception: # pragma: no cover - fallback to attribute
|
|
5169
|
+
username_value = getattr(user, "username", "") or ""
|
|
5170
|
+
|
|
5171
|
+
self.done_version = version_value
|
|
5172
|
+
self.done_revision = revision_value
|
|
5173
|
+
self.done_username = username_value
|
|
5174
|
+
|
|
5175
|
+
def _update_fixture_state(self) -> None:
|
|
5176
|
+
if not self.is_seed_data:
|
|
5177
|
+
return
|
|
5178
|
+
|
|
5179
|
+
request_text = (self.request or "").strip()
|
|
5180
|
+
if not request_text:
|
|
5181
|
+
return
|
|
5182
|
+
|
|
5183
|
+
slug = self._fixture_slug(request_text)
|
|
5184
|
+
if not slug:
|
|
5185
|
+
return
|
|
5186
|
+
|
|
5187
|
+
base_dir = Path(settings.BASE_DIR)
|
|
5188
|
+
fixture_path = base_dir / "core" / "fixtures" / f"todo__{slug}.json"
|
|
5189
|
+
if not fixture_path.exists():
|
|
5190
|
+
return
|
|
5191
|
+
|
|
5192
|
+
try:
|
|
5193
|
+
with fixture_path.open("r", encoding="utf-8") as handle:
|
|
5194
|
+
data = json.load(handle)
|
|
5195
|
+
except Exception:
|
|
5196
|
+
logger.exception("Failed to read TODO fixture %s", fixture_path)
|
|
5197
|
+
return
|
|
5198
|
+
|
|
5199
|
+
if not isinstance(data, list):
|
|
5200
|
+
return
|
|
5201
|
+
|
|
5202
|
+
updated = False
|
|
5203
|
+
normalized_request = request_text.lower()
|
|
5204
|
+
for item in data:
|
|
5205
|
+
if not isinstance(item, dict):
|
|
5206
|
+
continue
|
|
5207
|
+
fields = item.get("fields")
|
|
5208
|
+
if not isinstance(fields, dict):
|
|
5209
|
+
continue
|
|
5210
|
+
candidate = (fields.get("request") or "").strip().lower()
|
|
5211
|
+
if candidate != normalized_request:
|
|
5212
|
+
continue
|
|
5213
|
+
if self._apply_fixture_fields(fields):
|
|
5214
|
+
updated = True
|
|
5215
|
+
|
|
5216
|
+
if not updated:
|
|
5217
|
+
return
|
|
5218
|
+
|
|
5219
|
+
content = json.dumps(data, indent=2, ensure_ascii=False)
|
|
5220
|
+
if not content.endswith("\n"):
|
|
5221
|
+
content += "\n"
|
|
5222
|
+
|
|
5223
|
+
try:
|
|
5224
|
+
fixture_path.write_text(content, encoding="utf-8")
|
|
5225
|
+
except OSError:
|
|
5226
|
+
logger.exception("Failed to write TODO fixture %s", fixture_path)
|
|
5227
|
+
|
|
5228
|
+
def _apply_fixture_fields(self, fields: dict[str, object]) -> bool:
|
|
5229
|
+
changed = False
|
|
5230
|
+
|
|
5231
|
+
def _assign(key: str, value: object) -> None:
|
|
5232
|
+
nonlocal changed
|
|
5233
|
+
if fields.get(key) != value:
|
|
5234
|
+
fields[key] = value
|
|
5235
|
+
changed = True
|
|
5236
|
+
|
|
5237
|
+
_assign("request", self.request or "")
|
|
5238
|
+
_assign("url", self.url or "")
|
|
5239
|
+
_assign("request_details", self.request_details or "")
|
|
5240
|
+
_assign("done_version", self.done_version or "")
|
|
5241
|
+
_assign("done_revision", self.done_revision or "")
|
|
5242
|
+
_assign("done_username", self.done_username or "")
|
|
5243
|
+
|
|
5244
|
+
if self.done_on:
|
|
5245
|
+
done_value = timezone.localtime(self.done_on)
|
|
5246
|
+
_assign("done_on", done_value.isoformat())
|
|
5247
|
+
else:
|
|
5248
|
+
if fields.get("done_on") is not None:
|
|
5249
|
+
fields["done_on"] = None
|
|
5250
|
+
changed = True
|
|
5251
|
+
|
|
5252
|
+
if self.is_deleted:
|
|
5253
|
+
_assign("is_deleted", True)
|
|
5254
|
+
elif fields.get("is_deleted"):
|
|
5255
|
+
fields["is_deleted"] = False
|
|
5256
|
+
changed = True
|
|
5257
|
+
|
|
5258
|
+
return changed
|
|
5259
|
+
|
|
5260
|
+
@staticmethod
|
|
5261
|
+
def _fixture_slug(value: str) -> str:
|
|
5262
|
+
slug = re.sub(r"[^a-z0-9]+", "_", value.lower()).strip("_")
|
|
5263
|
+
return slug
|
|
5264
|
+
|
|
3658
5265
|
|
|
3659
5266
|
class TOTPDeviceSettings(models.Model):
|
|
3660
5267
|
"""Per-device configuration options for authenticator enrollments."""
|
|
@@ -3674,5 +5281,5 @@ class TOTPDeviceSettings(models.Model):
|
|
|
3674
5281
|
is_user_data = models.BooleanField(default=False)
|
|
3675
5282
|
|
|
3676
5283
|
class Meta:
|
|
3677
|
-
verbose_name = _("Authenticator
|
|
3678
|
-
verbose_name_plural = _("Authenticator
|
|
5284
|
+
verbose_name = _("Authenticator Device Setting")
|
|
5285
|
+
verbose_name_plural = _("Authenticator Device Settings")
|