arthexis 0.1.11__py3-none-any.whl → 0.1.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arthexis might be problematic. Click here for more details.
- {arthexis-0.1.11.dist-info → arthexis-0.1.13.dist-info}/METADATA +2 -2
- {arthexis-0.1.11.dist-info → arthexis-0.1.13.dist-info}/RECORD +50 -44
- config/asgi.py +15 -1
- config/celery.py +8 -1
- config/settings.py +49 -78
- config/settings_helpers.py +109 -0
- core/admin.py +293 -78
- core/apps.py +21 -0
- core/auto_upgrade.py +2 -2
- core/form_fields.py +75 -0
- core/models.py +203 -47
- core/reference_utils.py +1 -1
- core/release.py +42 -20
- core/system.py +6 -3
- core/tasks.py +92 -40
- core/tests.py +75 -1
- core/views.py +178 -29
- core/widgets.py +43 -0
- nodes/admin.py +583 -10
- nodes/apps.py +15 -0
- nodes/feature_checks.py +133 -0
- nodes/models.py +287 -49
- nodes/reports.py +411 -0
- nodes/tests.py +990 -42
- nodes/urls.py +1 -0
- nodes/utils.py +32 -0
- nodes/views.py +173 -5
- ocpp/admin.py +424 -17
- ocpp/consumers.py +630 -15
- ocpp/evcs.py +7 -94
- ocpp/evcs_discovery.py +158 -0
- ocpp/models.py +236 -4
- ocpp/routing.py +4 -2
- ocpp/simulator.py +346 -26
- ocpp/status_display.py +26 -0
- ocpp/store.py +110 -2
- ocpp/tests.py +1425 -33
- ocpp/transactions_io.py +27 -3
- ocpp/views.py +344 -38
- pages/admin.py +138 -3
- pages/context_processors.py +15 -1
- pages/defaults.py +1 -2
- pages/forms.py +67 -0
- pages/models.py +136 -1
- pages/tests.py +379 -4
- pages/urls.py +1 -0
- pages/views.py +64 -7
- {arthexis-0.1.11.dist-info → arthexis-0.1.13.dist-info}/WHEEL +0 -0
- {arthexis-0.1.11.dist-info → arthexis-0.1.13.dist-info}/licenses/LICENSE +0 -0
- {arthexis-0.1.11.dist-info → arthexis-0.1.13.dist-info}/top_level.txt +0 -0
core/models.py
CHANGED
|
@@ -3,7 +3,7 @@ from django.contrib.auth.models import (
|
|
|
3
3
|
Group,
|
|
4
4
|
UserManager as DjangoUserManager,
|
|
5
5
|
)
|
|
6
|
-
from django.db import models
|
|
6
|
+
from django.db import DatabaseError, models
|
|
7
7
|
from django.db.models import Q
|
|
8
8
|
from django.db.models.functions import Lower
|
|
9
9
|
from django.conf import settings
|
|
@@ -16,6 +16,7 @@ from django.db.models.signals import m2m_changed, post_delete, post_save
|
|
|
16
16
|
from django.dispatch import receiver
|
|
17
17
|
from django.views.decorators.debug import sensitive_variables
|
|
18
18
|
from datetime import time as datetime_time, timedelta
|
|
19
|
+
import logging
|
|
19
20
|
from django.contrib.contenttypes.models import ContentType
|
|
20
21
|
import hashlib
|
|
21
22
|
import os
|
|
@@ -37,8 +38,20 @@ from defusedxml import xmlrpc as defused_xmlrpc
|
|
|
37
38
|
defused_xmlrpc.monkey_patch()
|
|
38
39
|
xmlrpc_client = defused_xmlrpc.xmlrpc_client
|
|
39
40
|
|
|
41
|
+
logger = logging.getLogger(__name__)
|
|
42
|
+
|
|
40
43
|
from .entity import Entity, EntityUserManager, EntityManager
|
|
41
|
-
from .release import
|
|
44
|
+
from .release import (
|
|
45
|
+
Package as ReleasePackage,
|
|
46
|
+
Credentials,
|
|
47
|
+
DEFAULT_PACKAGE,
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def default_package_modules() -> list[str]:
|
|
52
|
+
"""Return the default package module list."""
|
|
53
|
+
|
|
54
|
+
return list(DEFAULT_PACKAGE.packages)
|
|
42
55
|
from . import temp_passwords
|
|
43
56
|
from . import user_data # noqa: F401 - ensure signal registration
|
|
44
57
|
from .fields import (
|
|
@@ -593,6 +606,15 @@ class OdooProfile(Profile):
|
|
|
593
606
|
kwargs,
|
|
594
607
|
)
|
|
595
608
|
except Exception:
|
|
609
|
+
logger.exception(
|
|
610
|
+
"Odoo RPC %s.%s failed for profile %s (host=%s, database=%s, username=%s)",
|
|
611
|
+
model,
|
|
612
|
+
method,
|
|
613
|
+
self.pk,
|
|
614
|
+
self.host,
|
|
615
|
+
self.database,
|
|
616
|
+
self.username,
|
|
617
|
+
)
|
|
596
618
|
self._clear_verification()
|
|
597
619
|
self.save(update_fields=["verified_on"])
|
|
598
620
|
raise
|
|
@@ -753,53 +775,93 @@ class EmailInbox(Profile):
|
|
|
753
775
|
import imaplib
|
|
754
776
|
import email
|
|
755
777
|
|
|
778
|
+
def _decode_imap_bytes(value):
|
|
779
|
+
if isinstance(value, bytes):
|
|
780
|
+
return value.decode("utf-8", errors="ignore")
|
|
781
|
+
return str(value)
|
|
782
|
+
|
|
756
783
|
conn = (
|
|
757
784
|
imaplib.IMAP4_SSL(self.host, self.port)
|
|
758
785
|
if self.use_ssl
|
|
759
786
|
else imaplib.IMAP4(self.host, self.port)
|
|
760
787
|
)
|
|
761
|
-
|
|
762
|
-
|
|
763
|
-
|
|
764
|
-
|
|
765
|
-
|
|
766
|
-
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
if body:
|
|
773
|
-
criteria.extend(["TEXT", f'"{body}"'])
|
|
774
|
-
if not criteria:
|
|
775
|
-
criteria = ["ALL"]
|
|
776
|
-
typ, data = conn.search(None, *criteria)
|
|
777
|
-
ids = data[0].split()[-fetch_limit:]
|
|
778
|
-
messages = []
|
|
779
|
-
for mid in ids:
|
|
780
|
-
typ, msg_data = conn.fetch(mid, "(RFC822)")
|
|
781
|
-
msg = email.message_from_bytes(msg_data[0][1])
|
|
782
|
-
body_text = _get_body(msg)
|
|
783
|
-
subj_value = msg.get("Subject", "")
|
|
784
|
-
from_value = msg.get("From", "")
|
|
785
|
-
if not (
|
|
786
|
-
_matches(subj_value, subject, subject_regex)
|
|
787
|
-
and _matches(from_value, from_address, sender_regex)
|
|
788
|
-
and _matches(body_text, body, body_regex)
|
|
789
|
-
):
|
|
790
|
-
continue
|
|
791
|
-
messages.append(
|
|
792
|
-
{
|
|
793
|
-
"subject": subj_value,
|
|
794
|
-
"from": from_value,
|
|
795
|
-
"body": body_text,
|
|
796
|
-
"date": msg.get("Date", ""),
|
|
797
|
-
}
|
|
788
|
+
try:
|
|
789
|
+
conn.login(self.username, self.password)
|
|
790
|
+
typ, data = conn.select("INBOX")
|
|
791
|
+
if typ != "OK":
|
|
792
|
+
message = " ".join(_decode_imap_bytes(item) for item in data or [])
|
|
793
|
+
if not message:
|
|
794
|
+
message = "Unable to select INBOX"
|
|
795
|
+
raise ValidationError(message)
|
|
796
|
+
|
|
797
|
+
fetch_limit = (
|
|
798
|
+
limit if not use_regular_expressions else max(limit * 5, limit)
|
|
798
799
|
)
|
|
799
|
-
if
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
800
|
+
if use_regular_expressions:
|
|
801
|
+
typ, data = conn.search(None, "ALL")
|
|
802
|
+
else:
|
|
803
|
+
criteria = []
|
|
804
|
+
charset = None
|
|
805
|
+
|
|
806
|
+
def _append(term: str, value: str):
|
|
807
|
+
nonlocal charset
|
|
808
|
+
if not value:
|
|
809
|
+
return
|
|
810
|
+
try:
|
|
811
|
+
value.encode("ascii")
|
|
812
|
+
encoded_value = value
|
|
813
|
+
except UnicodeEncodeError:
|
|
814
|
+
charset = charset or "UTF-8"
|
|
815
|
+
encoded_value = value.encode("utf-8")
|
|
816
|
+
criteria.extend([term, encoded_value])
|
|
817
|
+
|
|
818
|
+
_append("SUBJECT", subject)
|
|
819
|
+
_append("FROM", from_address)
|
|
820
|
+
_append("TEXT", body)
|
|
821
|
+
|
|
822
|
+
if not criteria:
|
|
823
|
+
typ, data = conn.search(None, "ALL")
|
|
824
|
+
else:
|
|
825
|
+
typ, data = conn.search(charset, *criteria)
|
|
826
|
+
|
|
827
|
+
if typ != "OK":
|
|
828
|
+
message = " ".join(_decode_imap_bytes(item) for item in data or [])
|
|
829
|
+
if not message:
|
|
830
|
+
message = "Unable to search mailbox"
|
|
831
|
+
raise ValidationError(message)
|
|
832
|
+
|
|
833
|
+
ids = data[0].split()[-fetch_limit:]
|
|
834
|
+
messages = []
|
|
835
|
+
for mid in ids:
|
|
836
|
+
typ, msg_data = conn.fetch(mid, "(RFC822)")
|
|
837
|
+
if typ != "OK" or not msg_data:
|
|
838
|
+
continue
|
|
839
|
+
msg = email.message_from_bytes(msg_data[0][1])
|
|
840
|
+
body_text = _get_body(msg)
|
|
841
|
+
subj_value = msg.get("Subject", "")
|
|
842
|
+
from_value = msg.get("From", "")
|
|
843
|
+
if not (
|
|
844
|
+
_matches(subj_value, subject, subject_regex)
|
|
845
|
+
and _matches(from_value, from_address, sender_regex)
|
|
846
|
+
and _matches(body_text, body, body_regex)
|
|
847
|
+
):
|
|
848
|
+
continue
|
|
849
|
+
messages.append(
|
|
850
|
+
{
|
|
851
|
+
"subject": subj_value,
|
|
852
|
+
"from": from_value,
|
|
853
|
+
"body": body_text,
|
|
854
|
+
"date": msg.get("Date", ""),
|
|
855
|
+
}
|
|
856
|
+
)
|
|
857
|
+
if len(messages) >= limit:
|
|
858
|
+
break
|
|
859
|
+
return list(reversed(messages))
|
|
860
|
+
finally:
|
|
861
|
+
try:
|
|
862
|
+
conn.logout()
|
|
863
|
+
except Exception: # pragma: no cover - best effort cleanup
|
|
864
|
+
pass
|
|
803
865
|
|
|
804
866
|
import poplib
|
|
805
867
|
import email
|
|
@@ -1247,11 +1309,34 @@ class RFID(Entity):
|
|
|
1247
1309
|
related_name="rfids",
|
|
1248
1310
|
help_text="Optional reference for this RFID.",
|
|
1249
1311
|
)
|
|
1312
|
+
origin_node = models.ForeignKey(
|
|
1313
|
+
"nodes.Node",
|
|
1314
|
+
null=True,
|
|
1315
|
+
blank=True,
|
|
1316
|
+
on_delete=models.SET_NULL,
|
|
1317
|
+
related_name="created_rfids",
|
|
1318
|
+
help_text="Node where this RFID record was created.",
|
|
1319
|
+
)
|
|
1250
1320
|
released = models.BooleanField(default=False)
|
|
1251
1321
|
added_on = models.DateTimeField(auto_now_add=True)
|
|
1252
1322
|
last_seen_on = models.DateTimeField(null=True, blank=True)
|
|
1253
1323
|
|
|
1254
1324
|
def save(self, *args, **kwargs):
|
|
1325
|
+
update_fields = kwargs.get("update_fields")
|
|
1326
|
+
if not self.origin_node_id:
|
|
1327
|
+
try:
|
|
1328
|
+
from nodes.models import Node # imported lazily to avoid circular import
|
|
1329
|
+
except Exception: # pragma: no cover - nodes app may be unavailable
|
|
1330
|
+
node = None
|
|
1331
|
+
else:
|
|
1332
|
+
node = Node.get_local()
|
|
1333
|
+
if node:
|
|
1334
|
+
self.origin_node = node
|
|
1335
|
+
if update_fields:
|
|
1336
|
+
fields = set(update_fields)
|
|
1337
|
+
if "origin_node" not in fields:
|
|
1338
|
+
fields.add("origin_node")
|
|
1339
|
+
kwargs["update_fields"] = tuple(fields)
|
|
1255
1340
|
if self.pk:
|
|
1256
1341
|
old = type(self).objects.filter(pk=self.pk).values("key_a", "key_b").first()
|
|
1257
1342
|
if old:
|
|
@@ -2301,6 +2386,9 @@ class Package(Entity):
|
|
|
2301
2386
|
license = models.CharField(max_length=100, default=DEFAULT_PACKAGE.license)
|
|
2302
2387
|
repository_url = models.URLField(default=DEFAULT_PACKAGE.repository_url)
|
|
2303
2388
|
homepage_url = models.URLField(default=DEFAULT_PACKAGE.homepage_url)
|
|
2389
|
+
version_path = models.CharField(max_length=255, blank=True, default="")
|
|
2390
|
+
dependencies_path = models.CharField(max_length=255, blank=True, default="")
|
|
2391
|
+
test_command = models.TextField(blank=True, default="")
|
|
2304
2392
|
release_manager = models.ForeignKey(
|
|
2305
2393
|
ReleaseManager, on_delete=models.SET_NULL, null=True, blank=True
|
|
2306
2394
|
)
|
|
@@ -2339,12 +2427,22 @@ class Package(Entity):
|
|
|
2339
2427
|
license=self.license,
|
|
2340
2428
|
repository_url=self.repository_url,
|
|
2341
2429
|
homepage_url=self.homepage_url,
|
|
2430
|
+
version_path=self.version_path or None,
|
|
2431
|
+
dependencies_path=self.dependencies_path or None,
|
|
2432
|
+
test_command=self.test_command or None,
|
|
2342
2433
|
)
|
|
2343
2434
|
|
|
2344
2435
|
|
|
2345
2436
|
class PackageRelease(Entity):
|
|
2346
2437
|
"""Store metadata for a specific package version."""
|
|
2347
2438
|
|
|
2439
|
+
_PATCH_BITS = 12
|
|
2440
|
+
_MINOR_BITS = 12
|
|
2441
|
+
_PATCH_MASK = (1 << _PATCH_BITS) - 1
|
|
2442
|
+
_MINOR_MASK = (1 << _MINOR_BITS) - 1
|
|
2443
|
+
_MINOR_SHIFT = _PATCH_BITS
|
|
2444
|
+
_MAJOR_SHIFT = _PATCH_BITS + _MINOR_BITS
|
|
2445
|
+
|
|
2348
2446
|
objects = PackageReleaseManager()
|
|
2349
2447
|
|
|
2350
2448
|
def natural_key(self):
|
|
@@ -2412,14 +2510,18 @@ class PackageRelease(Entity):
|
|
|
2412
2510
|
from packaging.version import Version
|
|
2413
2511
|
|
|
2414
2512
|
v = Version(self.version)
|
|
2415
|
-
return (
|
|
2513
|
+
return (
|
|
2514
|
+
(v.major << self._MAJOR_SHIFT)
|
|
2515
|
+
| (v.minor << self._MINOR_SHIFT)
|
|
2516
|
+
| v.micro
|
|
2517
|
+
)
|
|
2416
2518
|
|
|
2417
2519
|
@staticmethod
|
|
2418
2520
|
def version_from_migration(number: int) -> str:
|
|
2419
2521
|
"""Return version string encoded by ``number``."""
|
|
2420
|
-
major =
|
|
2421
|
-
minor = (number >>
|
|
2422
|
-
patch = number &
|
|
2522
|
+
major = number >> PackageRelease._MAJOR_SHIFT
|
|
2523
|
+
minor = (number >> PackageRelease._MINOR_SHIFT) & PackageRelease._MINOR_MASK
|
|
2524
|
+
patch = number & PackageRelease._PATCH_MASK
|
|
2423
2525
|
return f"{major}.{minor}.{patch}"
|
|
2424
2526
|
|
|
2425
2527
|
@property
|
|
@@ -2447,6 +2549,40 @@ class PackageRelease(Entity):
|
|
|
2447
2549
|
return None
|
|
2448
2550
|
return max(releases, key=lambda r: Version(r.version))
|
|
2449
2551
|
|
|
2552
|
+
@classmethod
|
|
2553
|
+
def matches_revision(cls, version: str, revision: str) -> bool:
|
|
2554
|
+
"""Return ``True`` when *revision* matches the stored release revision.
|
|
2555
|
+
|
|
2556
|
+
When the release metadata cannot be retrieved (for example during
|
|
2557
|
+
database initialization), the method optimistically returns ``True`` so
|
|
2558
|
+
callers continue operating without raising secondary errors.
|
|
2559
|
+
"""
|
|
2560
|
+
|
|
2561
|
+
version = (version or "").strip()
|
|
2562
|
+
revision = (revision or "").strip()
|
|
2563
|
+
if not version or not revision:
|
|
2564
|
+
return True
|
|
2565
|
+
|
|
2566
|
+
try:
|
|
2567
|
+
queryset = cls.objects.filter(version=version)
|
|
2568
|
+
release_revision = (
|
|
2569
|
+
queryset.filter(package__is_active=True)
|
|
2570
|
+
.values_list("revision", flat=True)
|
|
2571
|
+
.first()
|
|
2572
|
+
)
|
|
2573
|
+
if release_revision is None:
|
|
2574
|
+
release_revision = queryset.values_list("revision", flat=True).first()
|
|
2575
|
+
except DatabaseError: # pragma: no cover - depends on DB availability
|
|
2576
|
+
logger.debug(
|
|
2577
|
+
"PackageRelease.matches_revision skipped: database unavailable",
|
|
2578
|
+
exc_info=True,
|
|
2579
|
+
)
|
|
2580
|
+
return True
|
|
2581
|
+
|
|
2582
|
+
if not release_revision:
|
|
2583
|
+
return True
|
|
2584
|
+
return release_revision.strip() == revision
|
|
2585
|
+
|
|
2450
2586
|
def build(self, **kwargs) -> None:
|
|
2451
2587
|
"""Wrapper around :func:`core.release.build` for convenience."""
|
|
2452
2588
|
from . import release as release_utils
|
|
@@ -2637,3 +2773,23 @@ class Todo(Entity):
|
|
|
2637
2773
|
if isinstance(field, ConditionTextField):
|
|
2638
2774
|
return field.evaluate(self)
|
|
2639
2775
|
return ConditionCheckResult(True, "")
|
|
2776
|
+
|
|
2777
|
+
|
|
2778
|
+
class TOTPDeviceSettings(models.Model):
|
|
2779
|
+
"""Per-device configuration options for authenticator enrollments."""
|
|
2780
|
+
|
|
2781
|
+
device = models.OneToOneField(
|
|
2782
|
+
"otp_totp.TOTPDevice",
|
|
2783
|
+
on_delete=models.CASCADE,
|
|
2784
|
+
related_name="custom_settings",
|
|
2785
|
+
)
|
|
2786
|
+
issuer = models.CharField(
|
|
2787
|
+
max_length=64,
|
|
2788
|
+
blank=True,
|
|
2789
|
+
default="",
|
|
2790
|
+
help_text=_("Label shown in authenticator apps. Leave blank to use Arthexis."),
|
|
2791
|
+
)
|
|
2792
|
+
|
|
2793
|
+
class Meta:
|
|
2794
|
+
verbose_name = _("Authenticator device settings")
|
|
2795
|
+
verbose_name_plural = _("Authenticator device settings")
|
core/reference_utils.py
CHANGED
core/release.py
CHANGED
|
@@ -1,12 +1,13 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import os
|
|
4
|
+
import shlex
|
|
5
|
+
import shutil
|
|
4
6
|
import subprocess
|
|
5
7
|
import sys
|
|
6
|
-
import shutil
|
|
7
8
|
from dataclasses import dataclass
|
|
8
9
|
from pathlib import Path
|
|
9
|
-
from typing import Optional
|
|
10
|
+
from typing import Optional, Sequence
|
|
10
11
|
|
|
11
12
|
try: # pragma: no cover - optional dependency
|
|
12
13
|
import toml # type: ignore
|
|
@@ -16,6 +17,15 @@ except Exception: # pragma: no cover - fallback when missing
|
|
|
16
17
|
from config.offline import requires_network, network_available
|
|
17
18
|
|
|
18
19
|
|
|
20
|
+
DEFAULT_PACKAGE_MODULES = [
|
|
21
|
+
"core",
|
|
22
|
+
"config",
|
|
23
|
+
"nodes",
|
|
24
|
+
"ocpp",
|
|
25
|
+
"pages",
|
|
26
|
+
]
|
|
27
|
+
|
|
28
|
+
|
|
19
29
|
@dataclass
|
|
20
30
|
class Package:
|
|
21
31
|
"""Metadata for building a distributable package."""
|
|
@@ -28,6 +38,10 @@ class Package:
|
|
|
28
38
|
license: str
|
|
29
39
|
repository_url: str = "https://github.com/arthexis/arthexis"
|
|
30
40
|
homepage_url: str = "https://arthexis.com"
|
|
41
|
+
packages: Sequence[str] = tuple(DEFAULT_PACKAGE_MODULES)
|
|
42
|
+
version_path: Optional[Path | str] = None
|
|
43
|
+
dependencies_path: Optional[Path | str] = None
|
|
44
|
+
test_command: Optional[str] = None
|
|
31
45
|
|
|
32
46
|
|
|
33
47
|
@dataclass
|
|
@@ -104,7 +118,10 @@ def _manager_credentials() -> Optional[Credentials]:
|
|
|
104
118
|
return None
|
|
105
119
|
|
|
106
120
|
|
|
107
|
-
def run_tests(
|
|
121
|
+
def run_tests(
|
|
122
|
+
log_path: Optional[Path] = None,
|
|
123
|
+
command: Optional[Sequence[str]] = None,
|
|
124
|
+
) -> subprocess.CompletedProcess:
|
|
108
125
|
"""Run the project's test suite and write output to ``log_path``.
|
|
109
126
|
|
|
110
127
|
The log file is stored separately from regular application logs to avoid
|
|
@@ -112,11 +129,8 @@ def run_tests(log_path: Optional[Path] = None) -> subprocess.CompletedProcess:
|
|
|
112
129
|
"""
|
|
113
130
|
|
|
114
131
|
log_path = log_path or Path("logs/test.log")
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
capture_output=True,
|
|
118
|
-
text=True,
|
|
119
|
-
)
|
|
132
|
+
cmd = list(command) if command is not None else [sys.executable, "manage.py", "test"]
|
|
133
|
+
proc = subprocess.run(cmd, capture_output=True, text=True)
|
|
120
134
|
log_path.parent.mkdir(parents=True, exist_ok=True)
|
|
121
135
|
log_path.write_text(proc.stdout + proc.stderr, encoding="utf-8")
|
|
122
136
|
return proc
|
|
@@ -147,15 +161,7 @@ def _write_pyproject(package: Package, version: str, requirements: list[str]) ->
|
|
|
147
161
|
},
|
|
148
162
|
},
|
|
149
163
|
"tool": {
|
|
150
|
-
"setuptools": {
|
|
151
|
-
"packages": [
|
|
152
|
-
"core",
|
|
153
|
-
"config",
|
|
154
|
-
"nodes",
|
|
155
|
-
"ocpp",
|
|
156
|
-
"pages",
|
|
157
|
-
]
|
|
158
|
-
}
|
|
164
|
+
"setuptools": {"packages": list(package.packages)}
|
|
159
165
|
},
|
|
160
166
|
}
|
|
161
167
|
|
|
@@ -198,7 +204,7 @@ def build(
|
|
|
198
204
|
"Git repository is not clean. Commit, stash, or enable auto stash before building."
|
|
199
205
|
)
|
|
200
206
|
|
|
201
|
-
version_path = Path("VERSION")
|
|
207
|
+
version_path = Path(package.version_path) if package.version_path else Path("VERSION")
|
|
202
208
|
if version is None:
|
|
203
209
|
if not version_path.exists():
|
|
204
210
|
raise ReleaseError("VERSION file not found")
|
|
@@ -210,17 +216,29 @@ def build(
|
|
|
210
216
|
version_path.write_text(version + "\n")
|
|
211
217
|
else:
|
|
212
218
|
# Ensure the VERSION file reflects the provided release version
|
|
219
|
+
if version_path.parent != Path("."):
|
|
220
|
+
version_path.parent.mkdir(parents=True, exist_ok=True)
|
|
213
221
|
version_path.write_text(version + "\n")
|
|
214
222
|
|
|
223
|
+
requirements_path = (
|
|
224
|
+
Path(package.dependencies_path)
|
|
225
|
+
if package.dependencies_path
|
|
226
|
+
else Path("requirements.txt")
|
|
227
|
+
)
|
|
215
228
|
requirements = [
|
|
216
229
|
line.strip()
|
|
217
|
-
for line in
|
|
230
|
+
for line in requirements_path.read_text().splitlines()
|
|
218
231
|
if line.strip() and not line.startswith("#")
|
|
219
232
|
]
|
|
220
233
|
|
|
221
234
|
if tests:
|
|
222
235
|
log_path = Path("logs/test.log")
|
|
223
|
-
|
|
236
|
+
test_command = (
|
|
237
|
+
shlex.split(package.test_command)
|
|
238
|
+
if package.test_command
|
|
239
|
+
else None
|
|
240
|
+
)
|
|
241
|
+
proc = run_tests(log_path=log_path, command=test_command)
|
|
224
242
|
if proc.returncode != 0:
|
|
225
243
|
raise TestsFailed(log_path, proc.stdout + proc.stderr)
|
|
226
244
|
|
|
@@ -344,3 +362,7 @@ def publish(
|
|
|
344
362
|
proc = subprocess.run(cmd, capture_output=True, text=True)
|
|
345
363
|
if proc.returncode != 0:
|
|
346
364
|
raise ReleaseError(proc.stdout + proc.stderr)
|
|
365
|
+
|
|
366
|
+
tag_name = f"v{version}"
|
|
367
|
+
_run(["git", "tag", tag_name])
|
|
368
|
+
_run(["git", "push", "origin", tag_name])
|
core/system.py
CHANGED
|
@@ -119,7 +119,7 @@ def _auto_upgrade_next_check() -> str:
|
|
|
119
119
|
|
|
120
120
|
|
|
121
121
|
def _resolve_auto_upgrade_namespace(key: str) -> str | None:
|
|
122
|
-
"""Resolve sigils within the ``AUTO-UPGRADE`` namespace."""
|
|
122
|
+
"""Resolve sigils within the legacy ``AUTO-UPGRADE`` namespace."""
|
|
123
123
|
|
|
124
124
|
normalized = key.replace("-", "_").upper()
|
|
125
125
|
if normalized == "NEXT_CHECK":
|
|
@@ -137,6 +137,9 @@ def resolve_system_namespace_value(key: str) -> str | None:
|
|
|
137
137
|
|
|
138
138
|
if not key:
|
|
139
139
|
return None
|
|
140
|
+
normalized_key = key.replace("-", "_").upper()
|
|
141
|
+
if normalized_key == "NEXT_VER_CHECK":
|
|
142
|
+
return _auto_upgrade_next_check()
|
|
140
143
|
namespace, _, remainder = key.partition(".")
|
|
141
144
|
if not remainder:
|
|
142
145
|
return None
|
|
@@ -218,8 +221,8 @@ def _build_system_fields(info: dict[str, object]) -> list[SystemField]:
|
|
|
218
221
|
)
|
|
219
222
|
|
|
220
223
|
add_field(
|
|
221
|
-
_("Next
|
|
222
|
-
"
|
|
224
|
+
_("Next version check"),
|
|
225
|
+
"NEXT-VER-CHECK",
|
|
223
226
|
info.get("auto_upgrade_next_check", ""),
|
|
224
227
|
)
|
|
225
228
|
|