arthexis 0.1.12__py3-none-any.whl → 0.1.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arthexis might be problematic. Click here for more details.
- {arthexis-0.1.12.dist-info → arthexis-0.1.13.dist-info}/METADATA +2 -2
- {arthexis-0.1.12.dist-info → arthexis-0.1.13.dist-info}/RECORD +37 -34
- config/asgi.py +15 -1
- config/celery.py +8 -1
- config/settings.py +42 -76
- config/settings_helpers.py +109 -0
- core/admin.py +47 -10
- core/auto_upgrade.py +2 -2
- core/form_fields.py +75 -0
- core/models.py +182 -59
- core/release.py +38 -20
- core/tests.py +11 -1
- core/views.py +47 -12
- core/widgets.py +43 -0
- nodes/admin.py +277 -14
- nodes/apps.py +15 -0
- nodes/models.py +224 -43
- nodes/tests.py +629 -10
- nodes/urls.py +1 -0
- nodes/views.py +173 -5
- ocpp/admin.py +146 -2
- ocpp/consumers.py +125 -8
- ocpp/evcs.py +7 -94
- ocpp/models.py +2 -0
- ocpp/routing.py +4 -2
- ocpp/simulator.py +29 -8
- ocpp/status_display.py +26 -0
- ocpp/tests.py +625 -16
- ocpp/transactions_io.py +10 -0
- ocpp/views.py +122 -22
- pages/admin.py +3 -0
- pages/forms.py +30 -1
- pages/tests.py +118 -1
- pages/views.py +12 -4
- {arthexis-0.1.12.dist-info → arthexis-0.1.13.dist-info}/WHEEL +0 -0
- {arthexis-0.1.12.dist-info → arthexis-0.1.13.dist-info}/licenses/LICENSE +0 -0
- {arthexis-0.1.12.dist-info → arthexis-0.1.13.dist-info}/top_level.txt +0 -0
core/form_fields.py
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
"""Custom form fields for the Arthexis admin."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import base64
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
from django.core.exceptions import ValidationError
|
|
9
|
+
from django.forms.fields import FileField
|
|
10
|
+
from django.forms.widgets import FILE_INPUT_CONTRADICTION
|
|
11
|
+
from django.utils.translation import gettext_lazy as _
|
|
12
|
+
|
|
13
|
+
from .widgets import AdminBase64FileWidget
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class Base64FileField(FileField):
|
|
17
|
+
"""Form field storing uploaded files as base64 encoded strings.
|
|
18
|
+
|
|
19
|
+
The field behaves like :class:`~django.forms.FileField` from the user's
|
|
20
|
+
perspective. Uploaded files are converted to base64 and returned as text so
|
|
21
|
+
they can be stored in ``TextField`` columns. When no new file is uploaded the
|
|
22
|
+
initial base64 value is preserved, while clearing the field stores an empty
|
|
23
|
+
string.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
widget = AdminBase64FileWidget
|
|
27
|
+
default_error_messages = {
|
|
28
|
+
**FileField.default_error_messages,
|
|
29
|
+
"contradiction": _(
|
|
30
|
+
"Please either submit a file or check the clear checkbox, not both."
|
|
31
|
+
),
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
def __init__(
|
|
35
|
+
self,
|
|
36
|
+
*,
|
|
37
|
+
download_name: str | None = None,
|
|
38
|
+
content_type: str = "application/octet-stream",
|
|
39
|
+
**kwargs: Any,
|
|
40
|
+
) -> None:
|
|
41
|
+
widget = kwargs.pop("widget", None) or self.widget()
|
|
42
|
+
if download_name:
|
|
43
|
+
widget.download_name = download_name
|
|
44
|
+
if content_type:
|
|
45
|
+
widget.content_type = content_type
|
|
46
|
+
super().__init__(widget=widget, **kwargs)
|
|
47
|
+
|
|
48
|
+
def to_python(self, data: Any) -> str | None:
|
|
49
|
+
"""Convert uploaded data to a base64 string."""
|
|
50
|
+
|
|
51
|
+
if isinstance(data, str):
|
|
52
|
+
return data
|
|
53
|
+
uploaded = super().to_python(data)
|
|
54
|
+
if uploaded is None:
|
|
55
|
+
return None
|
|
56
|
+
content = uploaded.read()
|
|
57
|
+
if hasattr(uploaded, "seek"):
|
|
58
|
+
uploaded.seek(0)
|
|
59
|
+
return base64.b64encode(content).decode("ascii")
|
|
60
|
+
|
|
61
|
+
def clean(self, data: Any, initial: str | None = None) -> str:
|
|
62
|
+
if data is FILE_INPUT_CONTRADICTION:
|
|
63
|
+
raise ValidationError(
|
|
64
|
+
self.error_messages["contradiction"], code="contradiction"
|
|
65
|
+
)
|
|
66
|
+
cleaned = super().clean(data, initial)
|
|
67
|
+
if cleaned in {None, False}:
|
|
68
|
+
return ""
|
|
69
|
+
return cleaned
|
|
70
|
+
|
|
71
|
+
def bound_data(self, data: Any, initial: str | None) -> str | None:
|
|
72
|
+
return initial
|
|
73
|
+
|
|
74
|
+
def has_changed(self, initial: str | None, data: Any) -> bool:
|
|
75
|
+
return not self.disabled and data is not None
|
core/models.py
CHANGED
|
@@ -3,7 +3,7 @@ from django.contrib.auth.models import (
|
|
|
3
3
|
Group,
|
|
4
4
|
UserManager as DjangoUserManager,
|
|
5
5
|
)
|
|
6
|
-
from django.db import models
|
|
6
|
+
from django.db import DatabaseError, models
|
|
7
7
|
from django.db.models import Q
|
|
8
8
|
from django.db.models.functions import Lower
|
|
9
9
|
from django.conf import settings
|
|
@@ -16,6 +16,7 @@ from django.db.models.signals import m2m_changed, post_delete, post_save
|
|
|
16
16
|
from django.dispatch import receiver
|
|
17
17
|
from django.views.decorators.debug import sensitive_variables
|
|
18
18
|
from datetime import time as datetime_time, timedelta
|
|
19
|
+
import logging
|
|
19
20
|
from django.contrib.contenttypes.models import ContentType
|
|
20
21
|
import hashlib
|
|
21
22
|
import os
|
|
@@ -37,8 +38,20 @@ from defusedxml import xmlrpc as defused_xmlrpc
|
|
|
37
38
|
defused_xmlrpc.monkey_patch()
|
|
38
39
|
xmlrpc_client = defused_xmlrpc.xmlrpc_client
|
|
39
40
|
|
|
41
|
+
logger = logging.getLogger(__name__)
|
|
42
|
+
|
|
40
43
|
from .entity import Entity, EntityUserManager, EntityManager
|
|
41
|
-
from .release import
|
|
44
|
+
from .release import (
|
|
45
|
+
Package as ReleasePackage,
|
|
46
|
+
Credentials,
|
|
47
|
+
DEFAULT_PACKAGE,
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def default_package_modules() -> list[str]:
|
|
52
|
+
"""Return the default package module list."""
|
|
53
|
+
|
|
54
|
+
return list(DEFAULT_PACKAGE.packages)
|
|
42
55
|
from . import temp_passwords
|
|
43
56
|
from . import user_data # noqa: F401 - ensure signal registration
|
|
44
57
|
from .fields import (
|
|
@@ -593,6 +606,15 @@ class OdooProfile(Profile):
|
|
|
593
606
|
kwargs,
|
|
594
607
|
)
|
|
595
608
|
except Exception:
|
|
609
|
+
logger.exception(
|
|
610
|
+
"Odoo RPC %s.%s failed for profile %s (host=%s, database=%s, username=%s)",
|
|
611
|
+
model,
|
|
612
|
+
method,
|
|
613
|
+
self.pk,
|
|
614
|
+
self.host,
|
|
615
|
+
self.database,
|
|
616
|
+
self.username,
|
|
617
|
+
)
|
|
596
618
|
self._clear_verification()
|
|
597
619
|
self.save(update_fields=["verified_on"])
|
|
598
620
|
raise
|
|
@@ -753,66 +775,93 @@ class EmailInbox(Profile):
|
|
|
753
775
|
import imaplib
|
|
754
776
|
import email
|
|
755
777
|
|
|
778
|
+
def _decode_imap_bytes(value):
|
|
779
|
+
if isinstance(value, bytes):
|
|
780
|
+
return value.decode("utf-8", errors="ignore")
|
|
781
|
+
return str(value)
|
|
782
|
+
|
|
756
783
|
conn = (
|
|
757
784
|
imaplib.IMAP4_SSL(self.host, self.port)
|
|
758
785
|
if self.use_ssl
|
|
759
786
|
else imaplib.IMAP4(self.host, self.port)
|
|
760
787
|
)
|
|
761
|
-
|
|
762
|
-
|
|
763
|
-
|
|
764
|
-
|
|
765
|
-
|
|
766
|
-
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
try:
|
|
775
|
-
value.encode("ascii")
|
|
776
|
-
encoded_value = value
|
|
777
|
-
except UnicodeEncodeError:
|
|
778
|
-
charset = charset or "UTF-8"
|
|
779
|
-
encoded_value = value.encode("utf-8")
|
|
780
|
-
criteria.extend([term, encoded_value])
|
|
781
|
-
|
|
782
|
-
_append("SUBJECT", subject)
|
|
783
|
-
_append("FROM", from_address)
|
|
784
|
-
_append("TEXT", body)
|
|
785
|
-
|
|
786
|
-
if not criteria:
|
|
788
|
+
try:
|
|
789
|
+
conn.login(self.username, self.password)
|
|
790
|
+
typ, data = conn.select("INBOX")
|
|
791
|
+
if typ != "OK":
|
|
792
|
+
message = " ".join(_decode_imap_bytes(item) for item in data or [])
|
|
793
|
+
if not message:
|
|
794
|
+
message = "Unable to select INBOX"
|
|
795
|
+
raise ValidationError(message)
|
|
796
|
+
|
|
797
|
+
fetch_limit = (
|
|
798
|
+
limit if not use_regular_expressions else max(limit * 5, limit)
|
|
799
|
+
)
|
|
800
|
+
if use_regular_expressions:
|
|
787
801
|
typ, data = conn.search(None, "ALL")
|
|
788
802
|
else:
|
|
789
|
-
|
|
790
|
-
|
|
791
|
-
|
|
792
|
-
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
803
|
+
criteria = []
|
|
804
|
+
charset = None
|
|
805
|
+
|
|
806
|
+
def _append(term: str, value: str):
|
|
807
|
+
nonlocal charset
|
|
808
|
+
if not value:
|
|
809
|
+
return
|
|
810
|
+
try:
|
|
811
|
+
value.encode("ascii")
|
|
812
|
+
encoded_value = value
|
|
813
|
+
except UnicodeEncodeError:
|
|
814
|
+
charset = charset or "UTF-8"
|
|
815
|
+
encoded_value = value.encode("utf-8")
|
|
816
|
+
criteria.extend([term, encoded_value])
|
|
817
|
+
|
|
818
|
+
_append("SUBJECT", subject)
|
|
819
|
+
_append("FROM", from_address)
|
|
820
|
+
_append("TEXT", body)
|
|
821
|
+
|
|
822
|
+
if not criteria:
|
|
823
|
+
typ, data = conn.search(None, "ALL")
|
|
824
|
+
else:
|
|
825
|
+
typ, data = conn.search(charset, *criteria)
|
|
826
|
+
|
|
827
|
+
if typ != "OK":
|
|
828
|
+
message = " ".join(_decode_imap_bytes(item) for item in data or [])
|
|
829
|
+
if not message:
|
|
830
|
+
message = "Unable to search mailbox"
|
|
831
|
+
raise ValidationError(message)
|
|
832
|
+
|
|
833
|
+
ids = data[0].split()[-fetch_limit:]
|
|
834
|
+
messages = []
|
|
835
|
+
for mid in ids:
|
|
836
|
+
typ, msg_data = conn.fetch(mid, "(RFC822)")
|
|
837
|
+
if typ != "OK" or not msg_data:
|
|
838
|
+
continue
|
|
839
|
+
msg = email.message_from_bytes(msg_data[0][1])
|
|
840
|
+
body_text = _get_body(msg)
|
|
841
|
+
subj_value = msg.get("Subject", "")
|
|
842
|
+
from_value = msg.get("From", "")
|
|
843
|
+
if not (
|
|
844
|
+
_matches(subj_value, subject, subject_regex)
|
|
845
|
+
and _matches(from_value, from_address, sender_regex)
|
|
846
|
+
and _matches(body_text, body, body_regex)
|
|
847
|
+
):
|
|
848
|
+
continue
|
|
849
|
+
messages.append(
|
|
850
|
+
{
|
|
851
|
+
"subject": subj_value,
|
|
852
|
+
"from": from_value,
|
|
853
|
+
"body": body_text,
|
|
854
|
+
"date": msg.get("Date", ""),
|
|
855
|
+
}
|
|
856
|
+
)
|
|
857
|
+
if len(messages) >= limit:
|
|
858
|
+
break
|
|
859
|
+
return list(reversed(messages))
|
|
860
|
+
finally:
|
|
861
|
+
try:
|
|
862
|
+
conn.logout()
|
|
863
|
+
except Exception: # pragma: no cover - best effort cleanup
|
|
864
|
+
pass
|
|
816
865
|
|
|
817
866
|
import poplib
|
|
818
867
|
import email
|
|
@@ -1260,11 +1309,34 @@ class RFID(Entity):
|
|
|
1260
1309
|
related_name="rfids",
|
|
1261
1310
|
help_text="Optional reference for this RFID.",
|
|
1262
1311
|
)
|
|
1312
|
+
origin_node = models.ForeignKey(
|
|
1313
|
+
"nodes.Node",
|
|
1314
|
+
null=True,
|
|
1315
|
+
blank=True,
|
|
1316
|
+
on_delete=models.SET_NULL,
|
|
1317
|
+
related_name="created_rfids",
|
|
1318
|
+
help_text="Node where this RFID record was created.",
|
|
1319
|
+
)
|
|
1263
1320
|
released = models.BooleanField(default=False)
|
|
1264
1321
|
added_on = models.DateTimeField(auto_now_add=True)
|
|
1265
1322
|
last_seen_on = models.DateTimeField(null=True, blank=True)
|
|
1266
1323
|
|
|
1267
1324
|
def save(self, *args, **kwargs):
|
|
1325
|
+
update_fields = kwargs.get("update_fields")
|
|
1326
|
+
if not self.origin_node_id:
|
|
1327
|
+
try:
|
|
1328
|
+
from nodes.models import Node # imported lazily to avoid circular import
|
|
1329
|
+
except Exception: # pragma: no cover - nodes app may be unavailable
|
|
1330
|
+
node = None
|
|
1331
|
+
else:
|
|
1332
|
+
node = Node.get_local()
|
|
1333
|
+
if node:
|
|
1334
|
+
self.origin_node = node
|
|
1335
|
+
if update_fields:
|
|
1336
|
+
fields = set(update_fields)
|
|
1337
|
+
if "origin_node" not in fields:
|
|
1338
|
+
fields.add("origin_node")
|
|
1339
|
+
kwargs["update_fields"] = tuple(fields)
|
|
1268
1340
|
if self.pk:
|
|
1269
1341
|
old = type(self).objects.filter(pk=self.pk).values("key_a", "key_b").first()
|
|
1270
1342
|
if old:
|
|
@@ -2314,6 +2386,9 @@ class Package(Entity):
|
|
|
2314
2386
|
license = models.CharField(max_length=100, default=DEFAULT_PACKAGE.license)
|
|
2315
2387
|
repository_url = models.URLField(default=DEFAULT_PACKAGE.repository_url)
|
|
2316
2388
|
homepage_url = models.URLField(default=DEFAULT_PACKAGE.homepage_url)
|
|
2389
|
+
version_path = models.CharField(max_length=255, blank=True, default="")
|
|
2390
|
+
dependencies_path = models.CharField(max_length=255, blank=True, default="")
|
|
2391
|
+
test_command = models.TextField(blank=True, default="")
|
|
2317
2392
|
release_manager = models.ForeignKey(
|
|
2318
2393
|
ReleaseManager, on_delete=models.SET_NULL, null=True, blank=True
|
|
2319
2394
|
)
|
|
@@ -2352,12 +2427,22 @@ class Package(Entity):
|
|
|
2352
2427
|
license=self.license,
|
|
2353
2428
|
repository_url=self.repository_url,
|
|
2354
2429
|
homepage_url=self.homepage_url,
|
|
2430
|
+
version_path=self.version_path or None,
|
|
2431
|
+
dependencies_path=self.dependencies_path or None,
|
|
2432
|
+
test_command=self.test_command or None,
|
|
2355
2433
|
)
|
|
2356
2434
|
|
|
2357
2435
|
|
|
2358
2436
|
class PackageRelease(Entity):
|
|
2359
2437
|
"""Store metadata for a specific package version."""
|
|
2360
2438
|
|
|
2439
|
+
_PATCH_BITS = 12
|
|
2440
|
+
_MINOR_BITS = 12
|
|
2441
|
+
_PATCH_MASK = (1 << _PATCH_BITS) - 1
|
|
2442
|
+
_MINOR_MASK = (1 << _MINOR_BITS) - 1
|
|
2443
|
+
_MINOR_SHIFT = _PATCH_BITS
|
|
2444
|
+
_MAJOR_SHIFT = _PATCH_BITS + _MINOR_BITS
|
|
2445
|
+
|
|
2361
2446
|
objects = PackageReleaseManager()
|
|
2362
2447
|
|
|
2363
2448
|
def natural_key(self):
|
|
@@ -2425,14 +2510,18 @@ class PackageRelease(Entity):
|
|
|
2425
2510
|
from packaging.version import Version
|
|
2426
2511
|
|
|
2427
2512
|
v = Version(self.version)
|
|
2428
|
-
return (
|
|
2513
|
+
return (
|
|
2514
|
+
(v.major << self._MAJOR_SHIFT)
|
|
2515
|
+
| (v.minor << self._MINOR_SHIFT)
|
|
2516
|
+
| v.micro
|
|
2517
|
+
)
|
|
2429
2518
|
|
|
2430
2519
|
@staticmethod
|
|
2431
2520
|
def version_from_migration(number: int) -> str:
|
|
2432
2521
|
"""Return version string encoded by ``number``."""
|
|
2433
|
-
major =
|
|
2434
|
-
minor = (number >>
|
|
2435
|
-
patch = number &
|
|
2522
|
+
major = number >> PackageRelease._MAJOR_SHIFT
|
|
2523
|
+
minor = (number >> PackageRelease._MINOR_SHIFT) & PackageRelease._MINOR_MASK
|
|
2524
|
+
patch = number & PackageRelease._PATCH_MASK
|
|
2436
2525
|
return f"{major}.{minor}.{patch}"
|
|
2437
2526
|
|
|
2438
2527
|
@property
|
|
@@ -2460,6 +2549,40 @@ class PackageRelease(Entity):
|
|
|
2460
2549
|
return None
|
|
2461
2550
|
return max(releases, key=lambda r: Version(r.version))
|
|
2462
2551
|
|
|
2552
|
+
@classmethod
|
|
2553
|
+
def matches_revision(cls, version: str, revision: str) -> bool:
|
|
2554
|
+
"""Return ``True`` when *revision* matches the stored release revision.
|
|
2555
|
+
|
|
2556
|
+
When the release metadata cannot be retrieved (for example during
|
|
2557
|
+
database initialization), the method optimistically returns ``True`` so
|
|
2558
|
+
callers continue operating without raising secondary errors.
|
|
2559
|
+
"""
|
|
2560
|
+
|
|
2561
|
+
version = (version or "").strip()
|
|
2562
|
+
revision = (revision or "").strip()
|
|
2563
|
+
if not version or not revision:
|
|
2564
|
+
return True
|
|
2565
|
+
|
|
2566
|
+
try:
|
|
2567
|
+
queryset = cls.objects.filter(version=version)
|
|
2568
|
+
release_revision = (
|
|
2569
|
+
queryset.filter(package__is_active=True)
|
|
2570
|
+
.values_list("revision", flat=True)
|
|
2571
|
+
.first()
|
|
2572
|
+
)
|
|
2573
|
+
if release_revision is None:
|
|
2574
|
+
release_revision = queryset.values_list("revision", flat=True).first()
|
|
2575
|
+
except DatabaseError: # pragma: no cover - depends on DB availability
|
|
2576
|
+
logger.debug(
|
|
2577
|
+
"PackageRelease.matches_revision skipped: database unavailable",
|
|
2578
|
+
exc_info=True,
|
|
2579
|
+
)
|
|
2580
|
+
return True
|
|
2581
|
+
|
|
2582
|
+
if not release_revision:
|
|
2583
|
+
return True
|
|
2584
|
+
return release_revision.strip() == revision
|
|
2585
|
+
|
|
2463
2586
|
def build(self, **kwargs) -> None:
|
|
2464
2587
|
"""Wrapper around :func:`core.release.build` for convenience."""
|
|
2465
2588
|
from . import release as release_utils
|
core/release.py
CHANGED
|
@@ -1,12 +1,13 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import os
|
|
4
|
+
import shlex
|
|
5
|
+
import shutil
|
|
4
6
|
import subprocess
|
|
5
7
|
import sys
|
|
6
|
-
import shutil
|
|
7
8
|
from dataclasses import dataclass
|
|
8
9
|
from pathlib import Path
|
|
9
|
-
from typing import Optional
|
|
10
|
+
from typing import Optional, Sequence
|
|
10
11
|
|
|
11
12
|
try: # pragma: no cover - optional dependency
|
|
12
13
|
import toml # type: ignore
|
|
@@ -16,6 +17,15 @@ except Exception: # pragma: no cover - fallback when missing
|
|
|
16
17
|
from config.offline import requires_network, network_available
|
|
17
18
|
|
|
18
19
|
|
|
20
|
+
DEFAULT_PACKAGE_MODULES = [
|
|
21
|
+
"core",
|
|
22
|
+
"config",
|
|
23
|
+
"nodes",
|
|
24
|
+
"ocpp",
|
|
25
|
+
"pages",
|
|
26
|
+
]
|
|
27
|
+
|
|
28
|
+
|
|
19
29
|
@dataclass
|
|
20
30
|
class Package:
|
|
21
31
|
"""Metadata for building a distributable package."""
|
|
@@ -28,6 +38,10 @@ class Package:
|
|
|
28
38
|
license: str
|
|
29
39
|
repository_url: str = "https://github.com/arthexis/arthexis"
|
|
30
40
|
homepage_url: str = "https://arthexis.com"
|
|
41
|
+
packages: Sequence[str] = tuple(DEFAULT_PACKAGE_MODULES)
|
|
42
|
+
version_path: Optional[Path | str] = None
|
|
43
|
+
dependencies_path: Optional[Path | str] = None
|
|
44
|
+
test_command: Optional[str] = None
|
|
31
45
|
|
|
32
46
|
|
|
33
47
|
@dataclass
|
|
@@ -104,7 +118,10 @@ def _manager_credentials() -> Optional[Credentials]:
|
|
|
104
118
|
return None
|
|
105
119
|
|
|
106
120
|
|
|
107
|
-
def run_tests(
|
|
121
|
+
def run_tests(
|
|
122
|
+
log_path: Optional[Path] = None,
|
|
123
|
+
command: Optional[Sequence[str]] = None,
|
|
124
|
+
) -> subprocess.CompletedProcess:
|
|
108
125
|
"""Run the project's test suite and write output to ``log_path``.
|
|
109
126
|
|
|
110
127
|
The log file is stored separately from regular application logs to avoid
|
|
@@ -112,11 +129,8 @@ def run_tests(log_path: Optional[Path] = None) -> subprocess.CompletedProcess:
|
|
|
112
129
|
"""
|
|
113
130
|
|
|
114
131
|
log_path = log_path or Path("logs/test.log")
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
capture_output=True,
|
|
118
|
-
text=True,
|
|
119
|
-
)
|
|
132
|
+
cmd = list(command) if command is not None else [sys.executable, "manage.py", "test"]
|
|
133
|
+
proc = subprocess.run(cmd, capture_output=True, text=True)
|
|
120
134
|
log_path.parent.mkdir(parents=True, exist_ok=True)
|
|
121
135
|
log_path.write_text(proc.stdout + proc.stderr, encoding="utf-8")
|
|
122
136
|
return proc
|
|
@@ -147,15 +161,7 @@ def _write_pyproject(package: Package, version: str, requirements: list[str]) ->
|
|
|
147
161
|
},
|
|
148
162
|
},
|
|
149
163
|
"tool": {
|
|
150
|
-
"setuptools": {
|
|
151
|
-
"packages": [
|
|
152
|
-
"core",
|
|
153
|
-
"config",
|
|
154
|
-
"nodes",
|
|
155
|
-
"ocpp",
|
|
156
|
-
"pages",
|
|
157
|
-
]
|
|
158
|
-
}
|
|
164
|
+
"setuptools": {"packages": list(package.packages)}
|
|
159
165
|
},
|
|
160
166
|
}
|
|
161
167
|
|
|
@@ -198,7 +204,7 @@ def build(
|
|
|
198
204
|
"Git repository is not clean. Commit, stash, or enable auto stash before building."
|
|
199
205
|
)
|
|
200
206
|
|
|
201
|
-
version_path = Path("VERSION")
|
|
207
|
+
version_path = Path(package.version_path) if package.version_path else Path("VERSION")
|
|
202
208
|
if version is None:
|
|
203
209
|
if not version_path.exists():
|
|
204
210
|
raise ReleaseError("VERSION file not found")
|
|
@@ -210,17 +216,29 @@ def build(
|
|
|
210
216
|
version_path.write_text(version + "\n")
|
|
211
217
|
else:
|
|
212
218
|
# Ensure the VERSION file reflects the provided release version
|
|
219
|
+
if version_path.parent != Path("."):
|
|
220
|
+
version_path.parent.mkdir(parents=True, exist_ok=True)
|
|
213
221
|
version_path.write_text(version + "\n")
|
|
214
222
|
|
|
223
|
+
requirements_path = (
|
|
224
|
+
Path(package.dependencies_path)
|
|
225
|
+
if package.dependencies_path
|
|
226
|
+
else Path("requirements.txt")
|
|
227
|
+
)
|
|
215
228
|
requirements = [
|
|
216
229
|
line.strip()
|
|
217
|
-
for line in
|
|
230
|
+
for line in requirements_path.read_text().splitlines()
|
|
218
231
|
if line.strip() and not line.startswith("#")
|
|
219
232
|
]
|
|
220
233
|
|
|
221
234
|
if tests:
|
|
222
235
|
log_path = Path("logs/test.log")
|
|
223
|
-
|
|
236
|
+
test_command = (
|
|
237
|
+
shlex.split(package.test_command)
|
|
238
|
+
if package.test_command
|
|
239
|
+
else None
|
|
240
|
+
)
|
|
241
|
+
proc = run_tests(log_path=log_path, command=test_command)
|
|
224
242
|
if proc.returncode != 0:
|
|
225
243
|
raise TestsFailed(log_path, proc.stdout + proc.stderr)
|
|
226
244
|
|
core/tests.py
CHANGED
|
@@ -1046,7 +1046,9 @@ class ReleaseProgressFixtureVisibilityTests(TestCase):
|
|
|
1046
1046
|
package=package, version=current_version
|
|
1047
1047
|
)
|
|
1048
1048
|
self.session_key = f"release_publish_{self.release.pk}"
|
|
1049
|
-
self.log_name =
|
|
1049
|
+
self.log_name = core_views._release_log_name(
|
|
1050
|
+
self.release.package.name, self.release.version
|
|
1051
|
+
)
|
|
1050
1052
|
self.lock_path = Path("locks") / f"{self.session_key}.json"
|
|
1051
1053
|
self.restart_path = Path("locks") / f"{self.session_key}.restarts"
|
|
1052
1054
|
self.log_path = Path("logs") / self.log_name
|
|
@@ -1328,6 +1330,14 @@ class TodoDoneTests(TestCase):
|
|
|
1328
1330
|
self.assertIsNotNone(todo.done_on)
|
|
1329
1331
|
self.assertFalse(todo.is_deleted)
|
|
1330
1332
|
|
|
1333
|
+
def test_mark_done_missing_task_refreshes(self):
|
|
1334
|
+
todo = Todo.objects.create(request="Task", is_seed_data=True)
|
|
1335
|
+
todo.delete()
|
|
1336
|
+
resp = self.client.post(reverse("todo-done", args=[todo.pk]))
|
|
1337
|
+
self.assertRedirects(resp, reverse("admin:index"))
|
|
1338
|
+
messages = [m.message for m in get_messages(resp.wsgi_request)]
|
|
1339
|
+
self.assertFalse(messages)
|
|
1340
|
+
|
|
1331
1341
|
def test_mark_done_condition_failure_shows_message(self):
|
|
1332
1342
|
todo = Todo.objects.create(
|
|
1333
1343
|
request="Task",
|