arthexis 0.1.10__py3-none-any.whl → 0.1.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arthexis might be problematic. Click here for more details.
- {arthexis-0.1.10.dist-info → arthexis-0.1.12.dist-info}/METADATA +36 -26
- arthexis-0.1.12.dist-info/RECORD +102 -0
- config/context_processors.py +1 -0
- config/settings.py +31 -5
- config/urls.py +5 -4
- core/admin.py +430 -90
- core/apps.py +48 -2
- core/backends.py +38 -0
- core/environment.py +23 -5
- core/mailer.py +3 -1
- core/models.py +303 -31
- core/reference_utils.py +20 -9
- core/release.py +4 -0
- core/sigil_builder.py +7 -2
- core/sigil_resolver.py +35 -4
- core/system.py +250 -1
- core/tasks.py +92 -40
- core/temp_passwords.py +181 -0
- core/test_system_info.py +62 -2
- core/tests.py +169 -3
- core/user_data.py +51 -8
- core/views.py +371 -20
- nodes/admin.py +453 -8
- nodes/backends.py +21 -6
- nodes/dns.py +203 -0
- nodes/feature_checks.py +133 -0
- nodes/models.py +374 -31
- nodes/reports.py +411 -0
- nodes/tests.py +677 -38
- nodes/utils.py +32 -0
- nodes/views.py +14 -0
- ocpp/admin.py +278 -15
- ocpp/consumers.py +517 -16
- ocpp/evcs_discovery.py +158 -0
- ocpp/models.py +237 -4
- ocpp/reference_utils.py +42 -0
- ocpp/simulator.py +321 -22
- ocpp/store.py +110 -2
- ocpp/test_rfid.py +169 -7
- ocpp/tests.py +819 -6
- ocpp/transactions_io.py +17 -3
- ocpp/views.py +233 -19
- pages/admin.py +144 -4
- pages/context_processors.py +21 -7
- pages/defaults.py +13 -0
- pages/forms.py +38 -0
- pages/models.py +189 -15
- pages/tests.py +281 -8
- pages/urls.py +4 -0
- pages/views.py +137 -21
- arthexis-0.1.10.dist-info/RECORD +0 -95
- {arthexis-0.1.10.dist-info → arthexis-0.1.12.dist-info}/WHEEL +0 -0
- {arthexis-0.1.10.dist-info → arthexis-0.1.12.dist-info}/licenses/LICENSE +0 -0
- {arthexis-0.1.10.dist-info → arthexis-0.1.12.dist-info}/top_level.txt +0 -0
core/temp_passwords.py
ADDED
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
"""Utilities for temporary password lock files."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import hashlib
|
|
6
|
+
import json
|
|
7
|
+
import re
|
|
8
|
+
import secrets
|
|
9
|
+
import string
|
|
10
|
+
from dataclasses import dataclass
|
|
11
|
+
from datetime import datetime, timedelta
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Optional
|
|
14
|
+
|
|
15
|
+
from django.conf import settings
|
|
16
|
+
from django.contrib.auth.hashers import check_password, make_password
|
|
17
|
+
from django.utils import timezone
|
|
18
|
+
from django.utils.dateparse import parse_datetime
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
DEFAULT_PASSWORD_LENGTH = 16
|
|
22
|
+
DEFAULT_EXPIRATION = timedelta(hours=1)
|
|
23
|
+
_SAFE_COMPONENT_RE = re.compile(r"[^A-Za-z0-9_.-]+")
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def _base_lock_dir() -> Path:
|
|
27
|
+
"""Return the root directory used for temporary password lock files."""
|
|
28
|
+
|
|
29
|
+
configured = getattr(settings, "TEMP_PASSWORD_LOCK_DIR", None)
|
|
30
|
+
if configured:
|
|
31
|
+
path = Path(configured)
|
|
32
|
+
else:
|
|
33
|
+
path = Path(settings.BASE_DIR) / "locks" / "temp-passwords"
|
|
34
|
+
path.mkdir(parents=True, exist_ok=True)
|
|
35
|
+
return path
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _safe_component(value: str) -> str:
|
|
39
|
+
"""Return a filesystem safe component derived from ``value``."""
|
|
40
|
+
|
|
41
|
+
if not value:
|
|
42
|
+
return ""
|
|
43
|
+
safe = _SAFE_COMPONENT_RE.sub("_", value)
|
|
44
|
+
safe = safe.strip("._")
|
|
45
|
+
return safe[:64]
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def _lockfile_name(username: str) -> str:
|
|
49
|
+
"""Return the filename used for the provided ``username``."""
|
|
50
|
+
|
|
51
|
+
digest = hashlib.sha256(username.encode("utf-8")).hexdigest()[:12]
|
|
52
|
+
safe = _safe_component(username)
|
|
53
|
+
if safe:
|
|
54
|
+
return f"{safe}-{digest}.json"
|
|
55
|
+
return f"user-{digest}.json"
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def _lockfile_path(username: str) -> Path:
|
|
59
|
+
"""Return the lockfile path for ``username``."""
|
|
60
|
+
|
|
61
|
+
return _base_lock_dir() / _lockfile_name(username)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def _parse_timestamp(value: str | None) -> Optional[datetime]:
|
|
65
|
+
"""Return a timezone aware datetime parsed from ``value``."""
|
|
66
|
+
|
|
67
|
+
if not value:
|
|
68
|
+
return None
|
|
69
|
+
parsed = parse_datetime(value)
|
|
70
|
+
if parsed is None:
|
|
71
|
+
return None
|
|
72
|
+
if timezone.is_naive(parsed):
|
|
73
|
+
parsed = timezone.make_aware(parsed)
|
|
74
|
+
return parsed
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
@dataclass(frozen=True)
|
|
78
|
+
class TempPasswordEntry:
|
|
79
|
+
"""Details for a temporary password stored on disk."""
|
|
80
|
+
|
|
81
|
+
username: str
|
|
82
|
+
password_hash: str
|
|
83
|
+
expires_at: datetime
|
|
84
|
+
created_at: datetime
|
|
85
|
+
path: Path
|
|
86
|
+
allow_change: bool = False
|
|
87
|
+
|
|
88
|
+
@property
|
|
89
|
+
def is_expired(self) -> bool:
|
|
90
|
+
return timezone.now() >= self.expires_at
|
|
91
|
+
|
|
92
|
+
def check_password(self, raw_password: str) -> bool:
|
|
93
|
+
"""Return ``True`` if ``raw_password`` matches this entry."""
|
|
94
|
+
|
|
95
|
+
return check_password(raw_password, self.password_hash)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def generate_password(length: int = DEFAULT_PASSWORD_LENGTH) -> str:
|
|
99
|
+
"""Return a random password composed of letters and digits."""
|
|
100
|
+
|
|
101
|
+
if length <= 0:
|
|
102
|
+
raise ValueError("length must be a positive integer")
|
|
103
|
+
alphabet = string.ascii_letters + string.digits
|
|
104
|
+
return "".join(secrets.choice(alphabet) for _ in range(length))
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def store_temp_password(
|
|
108
|
+
username: str,
|
|
109
|
+
raw_password: str,
|
|
110
|
+
expires_at: Optional[datetime] = None,
|
|
111
|
+
*,
|
|
112
|
+
allow_change: bool = False,
|
|
113
|
+
) -> TempPasswordEntry:
|
|
114
|
+
"""Persist a temporary password for ``username`` and return the entry."""
|
|
115
|
+
|
|
116
|
+
if expires_at is None:
|
|
117
|
+
expires_at = timezone.now() + DEFAULT_EXPIRATION
|
|
118
|
+
if timezone.is_naive(expires_at):
|
|
119
|
+
expires_at = timezone.make_aware(expires_at)
|
|
120
|
+
created_at = timezone.now()
|
|
121
|
+
path = _lockfile_path(username)
|
|
122
|
+
data = {
|
|
123
|
+
"username": username,
|
|
124
|
+
"password_hash": make_password(raw_password),
|
|
125
|
+
"expires_at": expires_at.isoformat(),
|
|
126
|
+
"created_at": created_at.isoformat(),
|
|
127
|
+
"allow_change": allow_change,
|
|
128
|
+
}
|
|
129
|
+
path.write_text(json.dumps(data, indent=2, sort_keys=True))
|
|
130
|
+
return TempPasswordEntry(
|
|
131
|
+
username=username,
|
|
132
|
+
password_hash=data["password_hash"],
|
|
133
|
+
expires_at=expires_at,
|
|
134
|
+
created_at=created_at,
|
|
135
|
+
path=path,
|
|
136
|
+
allow_change=allow_change,
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
def load_temp_password(username: str) -> Optional[TempPasswordEntry]:
|
|
141
|
+
"""Return the stored temporary password for ``username``, if any."""
|
|
142
|
+
|
|
143
|
+
path = _lockfile_path(username)
|
|
144
|
+
if not path.exists():
|
|
145
|
+
return None
|
|
146
|
+
try:
|
|
147
|
+
data = json.loads(path.read_text())
|
|
148
|
+
except (json.JSONDecodeError, UnicodeDecodeError):
|
|
149
|
+
path.unlink(missing_ok=True)
|
|
150
|
+
return None
|
|
151
|
+
|
|
152
|
+
expires_at = _parse_timestamp(data.get("expires_at"))
|
|
153
|
+
created_at = _parse_timestamp(data.get("created_at")) or timezone.now()
|
|
154
|
+
password_hash = data.get("password_hash")
|
|
155
|
+
if not expires_at or not password_hash:
|
|
156
|
+
path.unlink(missing_ok=True)
|
|
157
|
+
return None
|
|
158
|
+
|
|
159
|
+
username = data.get("username") or username
|
|
160
|
+
allow_change_value = data.get("allow_change", False)
|
|
161
|
+
if isinstance(allow_change_value, str):
|
|
162
|
+
allow_change = allow_change_value.lower() in {"1", "true", "yes", "on"}
|
|
163
|
+
else:
|
|
164
|
+
allow_change = bool(allow_change_value)
|
|
165
|
+
|
|
166
|
+
return TempPasswordEntry(
|
|
167
|
+
username=username,
|
|
168
|
+
password_hash=password_hash,
|
|
169
|
+
expires_at=expires_at,
|
|
170
|
+
created_at=created_at,
|
|
171
|
+
path=path,
|
|
172
|
+
allow_change=allow_change,
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
def discard_temp_password(username: str) -> None:
|
|
177
|
+
"""Remove any stored temporary password for ``username``."""
|
|
178
|
+
|
|
179
|
+
path = _lockfile_path(username)
|
|
180
|
+
path.unlink(missing_ok=True)
|
|
181
|
+
|
core/test_system_info.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import json
|
|
1
2
|
import os
|
|
2
3
|
from pathlib import Path
|
|
3
4
|
from subprocess import CompletedProcess
|
|
@@ -11,9 +12,9 @@ import django
|
|
|
11
12
|
django.setup()
|
|
12
13
|
|
|
13
14
|
from django.conf import settings
|
|
14
|
-
from django.test import SimpleTestCase,
|
|
15
|
+
from django.test import SimpleTestCase, override_settings
|
|
15
16
|
from nodes.models import Node, NodeFeature, NodeRole
|
|
16
|
-
from core.system import _gather_info
|
|
17
|
+
from core.system import _gather_info, get_system_sigil_values
|
|
17
18
|
|
|
18
19
|
|
|
19
20
|
class SystemInfoRoleTests(SimpleTestCase):
|
|
@@ -55,6 +56,27 @@ class SystemInfoRevisionTests(SimpleTestCase):
|
|
|
55
56
|
mock_revision.assert_called_once()
|
|
56
57
|
|
|
57
58
|
|
|
59
|
+
class SystemInfoDatabaseTests(SimpleTestCase):
|
|
60
|
+
def test_collects_database_definitions(self):
|
|
61
|
+
info = _gather_info()
|
|
62
|
+
self.assertIn("databases", info)
|
|
63
|
+
aliases = {entry["alias"] for entry in info["databases"]}
|
|
64
|
+
self.assertIn("default", aliases)
|
|
65
|
+
|
|
66
|
+
@override_settings(
|
|
67
|
+
DATABASES={
|
|
68
|
+
"default": {
|
|
69
|
+
"ENGINE": "django.db.backends.sqlite3",
|
|
70
|
+
"NAME": Path("/tmp/db.sqlite3"),
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
)
|
|
74
|
+
def test_serializes_path_database_names(self):
|
|
75
|
+
info = _gather_info()
|
|
76
|
+
databases = info["databases"]
|
|
77
|
+
self.assertEqual(databases[0]["name"], "/tmp/db.sqlite3")
|
|
78
|
+
|
|
79
|
+
|
|
58
80
|
class SystemInfoRunserverDetectionTests(SimpleTestCase):
|
|
59
81
|
@patch("core.system.subprocess.run")
|
|
60
82
|
def test_detects_runserver_process_port(self, mock_run):
|
|
@@ -77,3 +99,41 @@ class SystemInfoRunserverDetectionTests(SimpleTestCase):
|
|
|
77
99
|
self.assertTrue(info["running"])
|
|
78
100
|
self.assertEqual(info["port"], 8000)
|
|
79
101
|
|
|
102
|
+
|
|
103
|
+
class SystemSigilValueTests(SimpleTestCase):
|
|
104
|
+
def test_exports_values_for_sigil_resolution(self):
|
|
105
|
+
sample_info = {
|
|
106
|
+
"installed": True,
|
|
107
|
+
"revision": "abcdef",
|
|
108
|
+
"service": "gunicorn",
|
|
109
|
+
"mode": "internal",
|
|
110
|
+
"port": 8888,
|
|
111
|
+
"role": "Terminal",
|
|
112
|
+
"screen_mode": "",
|
|
113
|
+
"features": [
|
|
114
|
+
{"display": "Feature", "expected": True, "actual": False, "slug": "feature"}
|
|
115
|
+
],
|
|
116
|
+
"running": True,
|
|
117
|
+
"service_status": "active",
|
|
118
|
+
"hostname": "example.local",
|
|
119
|
+
"ip_addresses": ["127.0.0.1"],
|
|
120
|
+
"databases": [
|
|
121
|
+
{
|
|
122
|
+
"alias": "default",
|
|
123
|
+
"engine": "django.db.backends.sqlite3",
|
|
124
|
+
"name": "db.sqlite3",
|
|
125
|
+
}
|
|
126
|
+
],
|
|
127
|
+
}
|
|
128
|
+
with patch("core.system._gather_info", return_value=sample_info):
|
|
129
|
+
values = get_system_sigil_values()
|
|
130
|
+
|
|
131
|
+
self.assertEqual(values["REVISION"], "abcdef")
|
|
132
|
+
self.assertEqual(values["RUNNING"], "True")
|
|
133
|
+
self.assertEqual(values["NGINX_MODE"], "internal (8888)")
|
|
134
|
+
self.assertEqual(values["IP_ADDRESSES"], "127.0.0.1")
|
|
135
|
+
features = json.loads(values["FEATURES"])
|
|
136
|
+
self.assertEqual(features[0]["display"], "Feature")
|
|
137
|
+
databases = json.loads(values["DATABASES"])
|
|
138
|
+
self.assertEqual(databases[0]["alias"], "default")
|
|
139
|
+
|
core/tests.py
CHANGED
|
@@ -15,7 +15,7 @@ from unittest.mock import patch
|
|
|
15
15
|
from pathlib import Path
|
|
16
16
|
import subprocess
|
|
17
17
|
from glob import glob
|
|
18
|
-
from datetime import timedelta
|
|
18
|
+
from datetime import datetime, timedelta, timezone as datetime_timezone
|
|
19
19
|
import tempfile
|
|
20
20
|
from urllib.parse import quote
|
|
21
21
|
|
|
@@ -770,6 +770,50 @@ class ReleaseProcessTests(TestCase):
|
|
|
770
770
|
self.assertFalse(proc.stdout.strip())
|
|
771
771
|
self.assertEqual(version_path.read_text(encoding="utf-8"), original)
|
|
772
772
|
|
|
773
|
+
@mock.patch("core.views.requests.get")
|
|
774
|
+
@mock.patch("core.views.release_utils.network_available", return_value=True)
|
|
775
|
+
@mock.patch("core.views.release_utils._git_clean", return_value=True)
|
|
776
|
+
def test_step_check_ignores_yanked_release(
|
|
777
|
+
self, git_clean, network_available, requests_get
|
|
778
|
+
):
|
|
779
|
+
response = mock.Mock()
|
|
780
|
+
response.ok = True
|
|
781
|
+
response.json.return_value = {
|
|
782
|
+
"releases": {
|
|
783
|
+
"0.1.12": [
|
|
784
|
+
{"filename": "pkg.whl", "yanked": True},
|
|
785
|
+
{"filename": "pkg.tar.gz", "yanked": True},
|
|
786
|
+
]
|
|
787
|
+
}
|
|
788
|
+
}
|
|
789
|
+
requests_get.return_value = response
|
|
790
|
+
self.release.version = "0.1.12"
|
|
791
|
+
_step_check_version(self.release, {}, Path("rel.log"))
|
|
792
|
+
requests_get.assert_called_once()
|
|
793
|
+
|
|
794
|
+
@mock.patch("core.views.requests.get")
|
|
795
|
+
@mock.patch("core.views.release_utils.network_available", return_value=True)
|
|
796
|
+
@mock.patch("core.views.release_utils._git_clean", return_value=True)
|
|
797
|
+
def test_step_check_blocks_available_release(
|
|
798
|
+
self, git_clean, network_available, requests_get
|
|
799
|
+
):
|
|
800
|
+
response = mock.Mock()
|
|
801
|
+
response.ok = True
|
|
802
|
+
response.json.return_value = {
|
|
803
|
+
"releases": {
|
|
804
|
+
"0.1.12": [
|
|
805
|
+
{"filename": "pkg.whl", "yanked": False},
|
|
806
|
+
{"filename": "pkg.tar.gz"},
|
|
807
|
+
]
|
|
808
|
+
}
|
|
809
|
+
}
|
|
810
|
+
requests_get.return_value = response
|
|
811
|
+
self.release.version = "0.1.12"
|
|
812
|
+
with self.assertRaises(Exception) as exc:
|
|
813
|
+
_step_check_version(self.release, {}, Path("rel.log"))
|
|
814
|
+
self.assertIn("already on PyPI", str(exc.exception))
|
|
815
|
+
requests_get.assert_called_once()
|
|
816
|
+
|
|
773
817
|
@mock.patch("core.models.PackageRelease.dump_fixture")
|
|
774
818
|
def test_save_does_not_dump_fixture(self, dump):
|
|
775
819
|
self.release.pypi_url = "https://example.com"
|
|
@@ -831,15 +875,21 @@ class ReleaseProcessTests(TestCase):
|
|
|
831
875
|
)
|
|
832
876
|
version_path.write_text(original, encoding="utf-8")
|
|
833
877
|
|
|
878
|
+
@mock.patch("core.views.timezone.now")
|
|
834
879
|
@mock.patch("core.views.PackageRelease.dump_fixture")
|
|
835
880
|
@mock.patch("core.views.release_utils.publish")
|
|
836
|
-
def test_publish_sets_pypi_url(self, publish, dump_fixture):
|
|
881
|
+
def test_publish_sets_pypi_url(self, publish, dump_fixture, now):
|
|
882
|
+
now.return_value = datetime(2025, 3, 4, 5, 6, tzinfo=datetime_timezone.utc)
|
|
837
883
|
_step_publish(self.release, {}, Path("rel.log"))
|
|
838
884
|
self.release.refresh_from_db()
|
|
839
885
|
self.assertEqual(
|
|
840
886
|
self.release.pypi_url,
|
|
841
887
|
f"https://pypi.org/project/{self.package.name}/{self.release.version}/",
|
|
842
888
|
)
|
|
889
|
+
self.assertEqual(
|
|
890
|
+
self.release.release_on,
|
|
891
|
+
datetime(2025, 3, 4, 5, 6, tzinfo=datetime_timezone.utc),
|
|
892
|
+
)
|
|
843
893
|
dump_fixture.assert_called_once()
|
|
844
894
|
|
|
845
895
|
@mock.patch("core.views.PackageRelease.dump_fixture")
|
|
@@ -849,8 +899,33 @@ class ReleaseProcessTests(TestCase):
|
|
|
849
899
|
_step_publish(self.release, {}, Path("rel.log"))
|
|
850
900
|
self.release.refresh_from_db()
|
|
851
901
|
self.assertEqual(self.release.pypi_url, "")
|
|
902
|
+
self.assertIsNone(self.release.release_on)
|
|
852
903
|
dump_fixture.assert_not_called()
|
|
853
904
|
|
|
905
|
+
def test_new_todo_does_not_reset_pending_flow(self):
|
|
906
|
+
user = User.objects.create_superuser("admin", "admin@example.com", "pw")
|
|
907
|
+
url = reverse("release-progress", args=[self.release.pk, "publish"])
|
|
908
|
+
Todo.objects.create(request="Initial checklist item")
|
|
909
|
+
steps = [("Confirm release TODO completion", core_views._step_check_todos)]
|
|
910
|
+
with mock.patch("core.views.PUBLISH_STEPS", steps):
|
|
911
|
+
self.client.force_login(user)
|
|
912
|
+
response = self.client.get(url)
|
|
913
|
+
self.assertTrue(response.context["has_pending_todos"])
|
|
914
|
+
self.client.get(f"{url}?ack_todos=1")
|
|
915
|
+
self.client.get(f"{url}?start=1")
|
|
916
|
+
self.client.get(f"{url}?step=0")
|
|
917
|
+
Todo.objects.create(request="Follow-up checklist item")
|
|
918
|
+
response = self.client.get(url)
|
|
919
|
+
self.assertEqual(
|
|
920
|
+
Todo.objects.filter(is_deleted=False, done_on__isnull=True).count(),
|
|
921
|
+
1,
|
|
922
|
+
)
|
|
923
|
+
self.assertIsNone(response.context["todos"])
|
|
924
|
+
self.assertFalse(response.context["has_pending_todos"])
|
|
925
|
+
session = self.client.session
|
|
926
|
+
ctx = session.get(f"release_publish_{self.release.pk}")
|
|
927
|
+
self.assertTrue(ctx.get("todos_ack"))
|
|
928
|
+
|
|
854
929
|
def test_release_progress_uses_lockfile(self):
|
|
855
930
|
run = []
|
|
856
931
|
|
|
@@ -1044,6 +1119,8 @@ class PackageReleaseAdminActionTests(TestCase):
|
|
|
1044
1119
|
self.admin = PackageReleaseAdmin(PackageRelease, self.site)
|
|
1045
1120
|
self.admin.message_user = lambda *args, **kwargs: None
|
|
1046
1121
|
self.package = Package.objects.create(name="pkg")
|
|
1122
|
+
self.package.is_active = True
|
|
1123
|
+
self.package.save(update_fields=["is_active"])
|
|
1047
1124
|
self.release = PackageRelease.objects.create(
|
|
1048
1125
|
package=self.package,
|
|
1049
1126
|
version="1.0.0",
|
|
@@ -1072,11 +1149,64 @@ class PackageReleaseAdminActionTests(TestCase):
|
|
|
1072
1149
|
def test_refresh_from_pypi_creates_releases(self, mock_get, dump):
|
|
1073
1150
|
mock_get.return_value.raise_for_status.return_value = None
|
|
1074
1151
|
mock_get.return_value.json.return_value = {
|
|
1075
|
-
"releases": {
|
|
1152
|
+
"releases": {
|
|
1153
|
+
"1.0.0": [
|
|
1154
|
+
{"upload_time_iso_8601": "2024-01-01T12:30:00.000000Z"}
|
|
1155
|
+
],
|
|
1156
|
+
"1.1.0": [
|
|
1157
|
+
{"upload_time_iso_8601": "2024-02-02T15:45:00.000000Z"}
|
|
1158
|
+
],
|
|
1159
|
+
}
|
|
1076
1160
|
}
|
|
1077
1161
|
self.admin.refresh_from_pypi(self.request, PackageRelease.objects.none())
|
|
1078
1162
|
new_release = PackageRelease.objects.get(version="1.1.0")
|
|
1079
1163
|
self.assertEqual(new_release.revision, "")
|
|
1164
|
+
self.assertEqual(
|
|
1165
|
+
new_release.release_on,
|
|
1166
|
+
datetime(2024, 2, 2, 15, 45, tzinfo=datetime_timezone.utc),
|
|
1167
|
+
)
|
|
1168
|
+
dump.assert_called_once()
|
|
1169
|
+
|
|
1170
|
+
@mock.patch("core.admin.PackageRelease.dump_fixture")
|
|
1171
|
+
@mock.patch("core.admin.requests.get")
|
|
1172
|
+
def test_refresh_from_pypi_updates_release_date(self, mock_get, dump):
|
|
1173
|
+
self.release.release_on = None
|
|
1174
|
+
self.release.save(update_fields=["release_on"])
|
|
1175
|
+
mock_get.return_value.raise_for_status.return_value = None
|
|
1176
|
+
mock_get.return_value.json.return_value = {
|
|
1177
|
+
"releases": {
|
|
1178
|
+
"1.0.0": [
|
|
1179
|
+
{"upload_time_iso_8601": "2024-01-01T12:30:00.000000Z"}
|
|
1180
|
+
]
|
|
1181
|
+
}
|
|
1182
|
+
}
|
|
1183
|
+
self.admin.refresh_from_pypi(self.request, PackageRelease.objects.none())
|
|
1184
|
+
self.release.refresh_from_db()
|
|
1185
|
+
self.assertEqual(
|
|
1186
|
+
self.release.release_on,
|
|
1187
|
+
datetime(2024, 1, 1, 12, 30, tzinfo=datetime_timezone.utc),
|
|
1188
|
+
)
|
|
1189
|
+
dump.assert_called_once()
|
|
1190
|
+
|
|
1191
|
+
@mock.patch("core.admin.PackageRelease.dump_fixture")
|
|
1192
|
+
@mock.patch("core.admin.requests.get")
|
|
1193
|
+
def test_refresh_from_pypi_restores_deleted_release(self, mock_get, dump):
|
|
1194
|
+
self.release.is_deleted = True
|
|
1195
|
+
self.release.save(update_fields=["is_deleted"])
|
|
1196
|
+
mock_get.return_value.raise_for_status.return_value = None
|
|
1197
|
+
mock_get.return_value.json.return_value = {
|
|
1198
|
+
"releases": {
|
|
1199
|
+
"1.0.0": [
|
|
1200
|
+
{"upload_time_iso_8601": "2024-01-01T12:30:00.000000Z"}
|
|
1201
|
+
]
|
|
1202
|
+
}
|
|
1203
|
+
}
|
|
1204
|
+
|
|
1205
|
+
self.admin.refresh_from_pypi(self.request, PackageRelease.objects.none())
|
|
1206
|
+
|
|
1207
|
+
self.assertTrue(
|
|
1208
|
+
PackageRelease.objects.filter(version="1.0.0").exists()
|
|
1209
|
+
)
|
|
1080
1210
|
dump.assert_called_once()
|
|
1081
1211
|
|
|
1082
1212
|
|
|
@@ -1283,6 +1413,13 @@ class TodoFocusViewTests(TestCase):
|
|
|
1283
1413
|
change_url = reverse("admin:core_todo_change", args=[todo.pk])
|
|
1284
1414
|
self.assertContains(resp, f'src="{change_url}"')
|
|
1285
1415
|
|
|
1416
|
+
def test_focus_view_includes_open_target_button(self):
|
|
1417
|
+
todo = Todo.objects.create(request="Task", url="/docs/")
|
|
1418
|
+
resp = self.client.get(reverse("todo-focus", args=[todo.pk]))
|
|
1419
|
+
self.assertContains(resp, 'class="todo-button todo-button-open"')
|
|
1420
|
+
self.assertContains(resp, 'target="_blank"')
|
|
1421
|
+
self.assertContains(resp, 'href="/docs/"')
|
|
1422
|
+
|
|
1286
1423
|
def test_focus_view_sanitizes_loopback_absolute_url(self):
|
|
1287
1424
|
todo = Todo.objects.create(
|
|
1288
1425
|
request="Task",
|
|
@@ -1300,6 +1437,35 @@ class TodoFocusViewTests(TestCase):
|
|
|
1300
1437
|
change_url = reverse("admin:core_todo_change", args=[todo.pk])
|
|
1301
1438
|
self.assertContains(resp, f'src="{change_url}"')
|
|
1302
1439
|
|
|
1440
|
+
def test_focus_view_avoids_recursive_focus_url(self):
|
|
1441
|
+
todo = Todo.objects.create(request="Task")
|
|
1442
|
+
focus_url = reverse("todo-focus", args=[todo.pk])
|
|
1443
|
+
Todo.objects.filter(pk=todo.pk).update(url=focus_url)
|
|
1444
|
+
resp = self.client.get(reverse("todo-focus", args=[todo.pk]))
|
|
1445
|
+
change_url = reverse("admin:core_todo_change", args=[todo.pk])
|
|
1446
|
+
self.assertContains(resp, f'src="{change_url}"')
|
|
1447
|
+
|
|
1448
|
+
def test_focus_view_avoids_recursive_focus_absolute_url(self):
|
|
1449
|
+
todo = Todo.objects.create(request="Task")
|
|
1450
|
+
focus_url = reverse("todo-focus", args=[todo.pk])
|
|
1451
|
+
Todo.objects.filter(pk=todo.pk).update(url=f"http://testserver{focus_url}")
|
|
1452
|
+
resp = self.client.get(reverse("todo-focus", args=[todo.pk]))
|
|
1453
|
+
change_url = reverse("admin:core_todo_change", args=[todo.pk])
|
|
1454
|
+
self.assertContains(resp, f'src="{change_url}"')
|
|
1455
|
+
|
|
1456
|
+
def test_focus_view_parses_auth_directives(self):
|
|
1457
|
+
todo = Todo.objects.create(
|
|
1458
|
+
request="Task",
|
|
1459
|
+
url="/docs/?section=chart&_todo_auth=logout&_todo_auth=user:demo&_todo_auth=perm:core.view_user&_todo_auth=extra",
|
|
1460
|
+
)
|
|
1461
|
+
resp = self.client.get(reverse("todo-focus", args=[todo.pk]))
|
|
1462
|
+
self.assertContains(resp, 'src="/docs/?section=chart"')
|
|
1463
|
+
self.assertContains(resp, 'href="/docs/?section=chart"')
|
|
1464
|
+
self.assertContains(resp, "logged out")
|
|
1465
|
+
self.assertContains(resp, "Sign in using: demo")
|
|
1466
|
+
self.assertContains(resp, "Required permissions: core.view_user")
|
|
1467
|
+
self.assertContains(resp, "Additional authentication notes: extra")
|
|
1468
|
+
|
|
1303
1469
|
def test_focus_view_redirects_if_todo_completed(self):
|
|
1304
1470
|
todo = Todo.objects.create(request="Task")
|
|
1305
1471
|
todo.done_on = timezone.now()
|
core/user_data.py
CHANGED
|
@@ -238,6 +238,36 @@ def _mark_fixture_user_data(path: Path) -> None:
|
|
|
238
238
|
model.all_objects.filter(pk=pk).update(is_user_data=True)
|
|
239
239
|
|
|
240
240
|
|
|
241
|
+
def _fixture_targets_installed_apps(data) -> bool:
|
|
242
|
+
"""Return ``True`` when *data* only targets installed apps and models."""
|
|
243
|
+
|
|
244
|
+
if not isinstance(data, list):
|
|
245
|
+
return True
|
|
246
|
+
|
|
247
|
+
labels = {
|
|
248
|
+
obj.get("model")
|
|
249
|
+
for obj in data
|
|
250
|
+
if isinstance(obj, dict) and obj.get("model")
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
for label in labels:
|
|
254
|
+
if not isinstance(label, str):
|
|
255
|
+
continue
|
|
256
|
+
if "." not in label:
|
|
257
|
+
continue
|
|
258
|
+
app_label, model_name = label.split(".", 1)
|
|
259
|
+
if not app_label or not model_name:
|
|
260
|
+
continue
|
|
261
|
+
if not apps.is_installed(app_label):
|
|
262
|
+
return False
|
|
263
|
+
try:
|
|
264
|
+
apps.get_model(label)
|
|
265
|
+
except LookupError:
|
|
266
|
+
return False
|
|
267
|
+
|
|
268
|
+
return True
|
|
269
|
+
|
|
270
|
+
|
|
241
271
|
def _load_fixture(path: Path, *, mark_user_data: bool = True) -> bool:
|
|
242
272
|
"""Load a fixture from *path* and optionally flag loaded entities."""
|
|
243
273
|
|
|
@@ -261,9 +291,12 @@ def _load_fixture(path: Path, *, mark_user_data: bool = True) -> bool:
|
|
|
261
291
|
except Exception:
|
|
262
292
|
data = None
|
|
263
293
|
else:
|
|
264
|
-
if isinstance(data, list)
|
|
265
|
-
|
|
266
|
-
|
|
294
|
+
if isinstance(data, list):
|
|
295
|
+
if not data:
|
|
296
|
+
path.unlink(missing_ok=True)
|
|
297
|
+
return False
|
|
298
|
+
if not _fixture_targets_installed_apps(data):
|
|
299
|
+
return False
|
|
267
300
|
|
|
268
301
|
try:
|
|
269
302
|
call_command("loaddata", str(path), ignorenonexistent=True)
|
|
@@ -484,11 +517,23 @@ def patch_admin_user_datum() -> None:
|
|
|
484
517
|
admin.site._user_datum_patched = True
|
|
485
518
|
|
|
486
519
|
|
|
487
|
-
def
|
|
488
|
-
|
|
520
|
+
def _iter_entity_admin_models():
|
|
521
|
+
"""Yield registered :class:`Entity` admin models without proxy duplicates."""
|
|
522
|
+
|
|
523
|
+
seen: set[type] = set()
|
|
489
524
|
for model, model_admin in admin.site._registry.items():
|
|
490
525
|
if not issubclass(model, Entity):
|
|
491
526
|
continue
|
|
527
|
+
concrete_model = model._meta.concrete_model
|
|
528
|
+
if concrete_model in seen:
|
|
529
|
+
continue
|
|
530
|
+
seen.add(concrete_model)
|
|
531
|
+
yield model, model_admin
|
|
532
|
+
|
|
533
|
+
|
|
534
|
+
def _seed_data_view(request):
|
|
535
|
+
sections = []
|
|
536
|
+
for model, model_admin in _iter_entity_admin_models():
|
|
492
537
|
objs = model.objects.filter(is_seed_data=True)
|
|
493
538
|
if not objs.exists():
|
|
494
539
|
continue
|
|
@@ -508,9 +553,7 @@ def _seed_data_view(request):
|
|
|
508
553
|
|
|
509
554
|
def _user_data_view(request):
|
|
510
555
|
sections = []
|
|
511
|
-
for model, model_admin in
|
|
512
|
-
if not issubclass(model, Entity):
|
|
513
|
-
continue
|
|
556
|
+
for model, model_admin in _iter_entity_admin_models():
|
|
514
557
|
objs = model.objects.filter(is_user_data=True)
|
|
515
558
|
if not objs.exists():
|
|
516
559
|
continue
|