arthexis 0.1.10__py3-none-any.whl → 0.1.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arthexis might be problematic. Click here for more details.
- {arthexis-0.1.10.dist-info → arthexis-0.1.11.dist-info}/METADATA +36 -26
- {arthexis-0.1.10.dist-info → arthexis-0.1.11.dist-info}/RECORD +42 -38
- config/context_processors.py +1 -0
- config/settings.py +24 -3
- config/urls.py +5 -4
- core/admin.py +184 -22
- core/apps.py +27 -2
- core/backends.py +38 -0
- core/environment.py +23 -5
- core/mailer.py +3 -1
- core/models.py +270 -31
- core/reference_utils.py +19 -8
- core/sigil_builder.py +7 -2
- core/sigil_resolver.py +35 -4
- core/system.py +247 -1
- core/temp_passwords.py +181 -0
- core/test_system_info.py +62 -2
- core/tests.py +105 -3
- core/user_data.py +51 -8
- core/views.py +245 -8
- nodes/admin.py +137 -2
- nodes/backends.py +21 -6
- nodes/dns.py +203 -0
- nodes/models.py +293 -7
- nodes/tests.py +312 -2
- nodes/views.py +14 -0
- ocpp/consumers.py +11 -8
- ocpp/models.py +3 -0
- ocpp/reference_utils.py +42 -0
- ocpp/test_rfid.py +169 -7
- ocpp/tests.py +30 -0
- ocpp/views.py +8 -0
- pages/admin.py +9 -1
- pages/context_processors.py +6 -6
- pages/defaults.py +14 -0
- pages/models.py +53 -14
- pages/tests.py +19 -4
- pages/urls.py +3 -0
- pages/views.py +86 -19
- {arthexis-0.1.10.dist-info → arthexis-0.1.11.dist-info}/WHEEL +0 -0
- {arthexis-0.1.10.dist-info → arthexis-0.1.11.dist-info}/licenses/LICENSE +0 -0
- {arthexis-0.1.10.dist-info → arthexis-0.1.11.dist-info}/top_level.txt +0 -0
core/system.py
CHANGED
|
@@ -1,25 +1,262 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
from contextlib import closing
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from datetime import datetime
|
|
4
6
|
from pathlib import Path
|
|
7
|
+
import json
|
|
5
8
|
import re
|
|
6
9
|
import socket
|
|
7
10
|
import subprocess
|
|
8
11
|
import shutil
|
|
12
|
+
from typing import Callable, Iterable, Optional
|
|
9
13
|
|
|
10
14
|
from django.conf import settings
|
|
11
15
|
from django.contrib import admin
|
|
12
16
|
from django.template.response import TemplateResponse
|
|
13
17
|
from django.urls import path
|
|
18
|
+
from django.utils import timezone
|
|
19
|
+
from django.utils.formats import date_format
|
|
14
20
|
from django.utils.translation import gettext_lazy as _
|
|
15
21
|
|
|
22
|
+
from core.auto_upgrade import AUTO_UPGRADE_TASK_NAME
|
|
16
23
|
from utils import revision
|
|
17
24
|
|
|
18
25
|
|
|
26
|
+
@dataclass(frozen=True)
|
|
27
|
+
class SystemField:
|
|
28
|
+
"""Metadata describing a single entry on the system admin page."""
|
|
29
|
+
|
|
30
|
+
label: str
|
|
31
|
+
sigil_key: str
|
|
32
|
+
value: object
|
|
33
|
+
field_type: str = "text"
|
|
34
|
+
|
|
35
|
+
@property
|
|
36
|
+
def sigil(self) -> str:
|
|
37
|
+
return f"SYS.{self.sigil_key}"
|
|
38
|
+
|
|
39
|
+
|
|
19
40
|
_RUNSERVER_PORT_PATTERN = re.compile(r":(\d{2,5})(?:\D|$)")
|
|
20
41
|
_RUNSERVER_PORT_FLAG_PATTERN = re.compile(r"--port(?:=|\s+)(\d{2,5})", re.IGNORECASE)
|
|
21
42
|
|
|
22
43
|
|
|
44
|
+
def _format_timestamp(dt: datetime | None) -> str:
|
|
45
|
+
"""Return ``dt`` formatted using the active ``DATETIME_FORMAT``."""
|
|
46
|
+
|
|
47
|
+
if dt is None:
|
|
48
|
+
return ""
|
|
49
|
+
try:
|
|
50
|
+
localized = timezone.localtime(dt)
|
|
51
|
+
except Exception:
|
|
52
|
+
localized = dt
|
|
53
|
+
return date_format(localized, "DATETIME_FORMAT")
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def _auto_upgrade_next_check() -> str:
|
|
57
|
+
"""Return the human-readable timestamp for the next auto-upgrade check."""
|
|
58
|
+
|
|
59
|
+
try: # pragma: no cover - optional dependency failures
|
|
60
|
+
from django_celery_beat.models import PeriodicTask
|
|
61
|
+
except Exception:
|
|
62
|
+
return ""
|
|
63
|
+
|
|
64
|
+
try:
|
|
65
|
+
task = (
|
|
66
|
+
PeriodicTask.objects.select_related(
|
|
67
|
+
"interval", "crontab", "solar", "clocked"
|
|
68
|
+
)
|
|
69
|
+
.only("enabled", "last_run_at", "start_time", "name")
|
|
70
|
+
.get(name=AUTO_UPGRADE_TASK_NAME)
|
|
71
|
+
)
|
|
72
|
+
except PeriodicTask.DoesNotExist:
|
|
73
|
+
return ""
|
|
74
|
+
except Exception: # pragma: no cover - database unavailable
|
|
75
|
+
return ""
|
|
76
|
+
|
|
77
|
+
if not task.enabled:
|
|
78
|
+
return str(_("Disabled"))
|
|
79
|
+
|
|
80
|
+
schedule = task.schedule
|
|
81
|
+
if schedule is None:
|
|
82
|
+
return ""
|
|
83
|
+
|
|
84
|
+
now = schedule.maybe_make_aware(schedule.now())
|
|
85
|
+
|
|
86
|
+
start_time = task.start_time
|
|
87
|
+
if start_time is not None:
|
|
88
|
+
try:
|
|
89
|
+
candidate_start = schedule.maybe_make_aware(start_time)
|
|
90
|
+
except Exception:
|
|
91
|
+
candidate_start = (
|
|
92
|
+
timezone.make_aware(start_time)
|
|
93
|
+
if timezone.is_naive(start_time)
|
|
94
|
+
else start_time
|
|
95
|
+
)
|
|
96
|
+
if candidate_start and candidate_start > now:
|
|
97
|
+
return _format_timestamp(candidate_start)
|
|
98
|
+
|
|
99
|
+
last_run_at = task.last_run_at
|
|
100
|
+
if last_run_at is not None:
|
|
101
|
+
try:
|
|
102
|
+
reference = schedule.maybe_make_aware(last_run_at)
|
|
103
|
+
except Exception:
|
|
104
|
+
reference = (
|
|
105
|
+
timezone.make_aware(last_run_at)
|
|
106
|
+
if timezone.is_naive(last_run_at)
|
|
107
|
+
else last_run_at
|
|
108
|
+
)
|
|
109
|
+
else:
|
|
110
|
+
reference = now
|
|
111
|
+
|
|
112
|
+
try:
|
|
113
|
+
remaining = schedule.remaining_estimate(reference)
|
|
114
|
+
except Exception:
|
|
115
|
+
return ""
|
|
116
|
+
|
|
117
|
+
next_run = now + remaining
|
|
118
|
+
return _format_timestamp(next_run)
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def _resolve_auto_upgrade_namespace(key: str) -> str | None:
|
|
122
|
+
"""Resolve sigils within the ``AUTO-UPGRADE`` namespace."""
|
|
123
|
+
|
|
124
|
+
normalized = key.replace("-", "_").upper()
|
|
125
|
+
if normalized == "NEXT_CHECK":
|
|
126
|
+
return _auto_upgrade_next_check()
|
|
127
|
+
return None
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
_SYSTEM_SIGIL_NAMESPACES: dict[str, Callable[[str], Optional[str]]] = {
|
|
131
|
+
"AUTO_UPGRADE": _resolve_auto_upgrade_namespace,
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def resolve_system_namespace_value(key: str) -> str | None:
|
|
136
|
+
"""Resolve dot-notation sigils mapped to dynamic ``SYS`` namespaces."""
|
|
137
|
+
|
|
138
|
+
if not key:
|
|
139
|
+
return None
|
|
140
|
+
namespace, _, remainder = key.partition(".")
|
|
141
|
+
if not remainder:
|
|
142
|
+
return None
|
|
143
|
+
normalized = namespace.replace("-", "_").upper()
|
|
144
|
+
handler = _SYSTEM_SIGIL_NAMESPACES.get(normalized)
|
|
145
|
+
if not handler:
|
|
146
|
+
return None
|
|
147
|
+
return handler(remainder)
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def _database_configurations() -> list[dict[str, str]]:
|
|
151
|
+
"""Return a normalized list of configured database connections."""
|
|
152
|
+
|
|
153
|
+
databases: list[dict[str, str]] = []
|
|
154
|
+
for alias, config in settings.DATABASES.items():
|
|
155
|
+
engine = config.get("ENGINE", "")
|
|
156
|
+
name = config.get("NAME", "")
|
|
157
|
+
if engine is None:
|
|
158
|
+
engine = ""
|
|
159
|
+
if name is None:
|
|
160
|
+
name = ""
|
|
161
|
+
databases.append({
|
|
162
|
+
"alias": alias,
|
|
163
|
+
"engine": str(engine),
|
|
164
|
+
"name": str(name),
|
|
165
|
+
})
|
|
166
|
+
databases.sort(key=lambda entry: entry["alias"].lower())
|
|
167
|
+
return databases
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def _build_system_fields(info: dict[str, object]) -> list[SystemField]:
|
|
171
|
+
"""Convert gathered system information into renderable rows."""
|
|
172
|
+
|
|
173
|
+
fields: list[SystemField] = []
|
|
174
|
+
|
|
175
|
+
def add_field(label: str, key: str, value: object, *, field_type: str = "text", visible: bool = True) -> None:
|
|
176
|
+
if not visible:
|
|
177
|
+
return
|
|
178
|
+
fields.append(SystemField(label=label, sigil_key=key, value=value, field_type=field_type))
|
|
179
|
+
|
|
180
|
+
add_field(_("Suite installed"), "INSTALLED", info.get("installed", False), field_type="boolean")
|
|
181
|
+
add_field(_("Revision"), "REVISION", info.get("revision", ""))
|
|
182
|
+
|
|
183
|
+
service_value = info.get("service") or _("not installed")
|
|
184
|
+
add_field(_("Service"), "SERVICE", service_value)
|
|
185
|
+
|
|
186
|
+
nginx_mode = info.get("mode", "")
|
|
187
|
+
port = info.get("port", "")
|
|
188
|
+
nginx_display = f"{nginx_mode} ({port})" if port else nginx_mode
|
|
189
|
+
add_field(_("Nginx mode"), "NGINX_MODE", nginx_display)
|
|
190
|
+
|
|
191
|
+
add_field(_("Node role"), "NODE_ROLE", info.get("role", ""))
|
|
192
|
+
add_field(
|
|
193
|
+
_("Display mode"),
|
|
194
|
+
"DISPLAY_MODE",
|
|
195
|
+
info.get("screen_mode", ""),
|
|
196
|
+
visible=bool(info.get("screen_mode")),
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
add_field(_("Features"), "FEATURES", info.get("features", []), field_type="features")
|
|
200
|
+
add_field(_("Running"), "RUNNING", info.get("running", False), field_type="boolean")
|
|
201
|
+
add_field(
|
|
202
|
+
_("Service status"),
|
|
203
|
+
"SERVICE_STATUS",
|
|
204
|
+
info.get("service_status", ""),
|
|
205
|
+
visible=bool(info.get("service")),
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
add_field(_("Hostname"), "HOSTNAME", info.get("hostname", ""))
|
|
209
|
+
|
|
210
|
+
ip_addresses: Iterable[str] = info.get("ip_addresses", []) # type: ignore[assignment]
|
|
211
|
+
add_field(_("IP addresses"), "IP_ADDRESSES", " ".join(ip_addresses))
|
|
212
|
+
|
|
213
|
+
add_field(
|
|
214
|
+
_("Databases"),
|
|
215
|
+
"DATABASES",
|
|
216
|
+
info.get("databases", []),
|
|
217
|
+
field_type="databases",
|
|
218
|
+
)
|
|
219
|
+
|
|
220
|
+
add_field(
|
|
221
|
+
_("Next auto-upgrade check"),
|
|
222
|
+
"AUTO-UPGRADE.NEXT-CHECK",
|
|
223
|
+
info.get("auto_upgrade_next_check", ""),
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
return fields
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
def _export_field_value(field: SystemField) -> str:
|
|
230
|
+
"""Serialize a ``SystemField`` value for sigil resolution."""
|
|
231
|
+
|
|
232
|
+
if field.field_type in {"features", "databases"}:
|
|
233
|
+
return json.dumps(field.value)
|
|
234
|
+
if field.field_type == "boolean":
|
|
235
|
+
return "True" if field.value else "False"
|
|
236
|
+
if field.value is None:
|
|
237
|
+
return ""
|
|
238
|
+
return str(field.value)
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
def get_system_sigil_values() -> dict[str, str]:
|
|
242
|
+
"""Expose system information in a format suitable for sigil lookups."""
|
|
243
|
+
|
|
244
|
+
info = _gather_info()
|
|
245
|
+
values: dict[str, str] = {}
|
|
246
|
+
for field in _build_system_fields(info):
|
|
247
|
+
exported = _export_field_value(field)
|
|
248
|
+
raw_key = (field.sigil_key or "").strip()
|
|
249
|
+
if not raw_key:
|
|
250
|
+
continue
|
|
251
|
+
variants = {
|
|
252
|
+
raw_key.upper(),
|
|
253
|
+
raw_key.replace("-", "_").upper(),
|
|
254
|
+
}
|
|
255
|
+
for variant in variants:
|
|
256
|
+
values[variant] = exported
|
|
257
|
+
return values
|
|
258
|
+
|
|
259
|
+
|
|
23
260
|
def _parse_runserver_port(command_line: str) -> int | None:
|
|
24
261
|
"""Extract the HTTP port from a runserver command line."""
|
|
25
262
|
|
|
@@ -219,6 +456,9 @@ def _gather_info() -> dict:
|
|
|
219
456
|
info["hostname"] = hostname
|
|
220
457
|
info["ip_addresses"] = ip_list
|
|
221
458
|
|
|
459
|
+
info["databases"] = _database_configurations()
|
|
460
|
+
info["auto_upgrade_next_check"] = _auto_upgrade_next_check()
|
|
461
|
+
|
|
222
462
|
return info
|
|
223
463
|
|
|
224
464
|
|
|
@@ -226,7 +466,13 @@ def _system_view(request):
|
|
|
226
466
|
info = _gather_info()
|
|
227
467
|
|
|
228
468
|
context = admin.site.each_context(request)
|
|
229
|
-
context.update(
|
|
469
|
+
context.update(
|
|
470
|
+
{
|
|
471
|
+
"title": _("System"),
|
|
472
|
+
"info": info,
|
|
473
|
+
"system_fields": _build_system_fields(info),
|
|
474
|
+
}
|
|
475
|
+
)
|
|
230
476
|
return TemplateResponse(request, "admin/system.html", context)
|
|
231
477
|
|
|
232
478
|
|
core/temp_passwords.py
ADDED
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
"""Utilities for temporary password lock files."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import hashlib
|
|
6
|
+
import json
|
|
7
|
+
import re
|
|
8
|
+
import secrets
|
|
9
|
+
import string
|
|
10
|
+
from dataclasses import dataclass
|
|
11
|
+
from datetime import datetime, timedelta
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Optional
|
|
14
|
+
|
|
15
|
+
from django.conf import settings
|
|
16
|
+
from django.contrib.auth.hashers import check_password, make_password
|
|
17
|
+
from django.utils import timezone
|
|
18
|
+
from django.utils.dateparse import parse_datetime
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
DEFAULT_PASSWORD_LENGTH = 16
|
|
22
|
+
DEFAULT_EXPIRATION = timedelta(hours=1)
|
|
23
|
+
_SAFE_COMPONENT_RE = re.compile(r"[^A-Za-z0-9_.-]+")
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def _base_lock_dir() -> Path:
|
|
27
|
+
"""Return the root directory used for temporary password lock files."""
|
|
28
|
+
|
|
29
|
+
configured = getattr(settings, "TEMP_PASSWORD_LOCK_DIR", None)
|
|
30
|
+
if configured:
|
|
31
|
+
path = Path(configured)
|
|
32
|
+
else:
|
|
33
|
+
path = Path(settings.BASE_DIR) / "locks" / "temp-passwords"
|
|
34
|
+
path.mkdir(parents=True, exist_ok=True)
|
|
35
|
+
return path
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _safe_component(value: str) -> str:
|
|
39
|
+
"""Return a filesystem safe component derived from ``value``."""
|
|
40
|
+
|
|
41
|
+
if not value:
|
|
42
|
+
return ""
|
|
43
|
+
safe = _SAFE_COMPONENT_RE.sub("_", value)
|
|
44
|
+
safe = safe.strip("._")
|
|
45
|
+
return safe[:64]
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def _lockfile_name(username: str) -> str:
|
|
49
|
+
"""Return the filename used for the provided ``username``."""
|
|
50
|
+
|
|
51
|
+
digest = hashlib.sha256(username.encode("utf-8")).hexdigest()[:12]
|
|
52
|
+
safe = _safe_component(username)
|
|
53
|
+
if safe:
|
|
54
|
+
return f"{safe}-{digest}.json"
|
|
55
|
+
return f"user-{digest}.json"
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def _lockfile_path(username: str) -> Path:
|
|
59
|
+
"""Return the lockfile path for ``username``."""
|
|
60
|
+
|
|
61
|
+
return _base_lock_dir() / _lockfile_name(username)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def _parse_timestamp(value: str | None) -> Optional[datetime]:
|
|
65
|
+
"""Return a timezone aware datetime parsed from ``value``."""
|
|
66
|
+
|
|
67
|
+
if not value:
|
|
68
|
+
return None
|
|
69
|
+
parsed = parse_datetime(value)
|
|
70
|
+
if parsed is None:
|
|
71
|
+
return None
|
|
72
|
+
if timezone.is_naive(parsed):
|
|
73
|
+
parsed = timezone.make_aware(parsed)
|
|
74
|
+
return parsed
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
@dataclass(frozen=True)
|
|
78
|
+
class TempPasswordEntry:
|
|
79
|
+
"""Details for a temporary password stored on disk."""
|
|
80
|
+
|
|
81
|
+
username: str
|
|
82
|
+
password_hash: str
|
|
83
|
+
expires_at: datetime
|
|
84
|
+
created_at: datetime
|
|
85
|
+
path: Path
|
|
86
|
+
allow_change: bool = False
|
|
87
|
+
|
|
88
|
+
@property
|
|
89
|
+
def is_expired(self) -> bool:
|
|
90
|
+
return timezone.now() >= self.expires_at
|
|
91
|
+
|
|
92
|
+
def check_password(self, raw_password: str) -> bool:
|
|
93
|
+
"""Return ``True`` if ``raw_password`` matches this entry."""
|
|
94
|
+
|
|
95
|
+
return check_password(raw_password, self.password_hash)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def generate_password(length: int = DEFAULT_PASSWORD_LENGTH) -> str:
|
|
99
|
+
"""Return a random password composed of letters and digits."""
|
|
100
|
+
|
|
101
|
+
if length <= 0:
|
|
102
|
+
raise ValueError("length must be a positive integer")
|
|
103
|
+
alphabet = string.ascii_letters + string.digits
|
|
104
|
+
return "".join(secrets.choice(alphabet) for _ in range(length))
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def store_temp_password(
|
|
108
|
+
username: str,
|
|
109
|
+
raw_password: str,
|
|
110
|
+
expires_at: Optional[datetime] = None,
|
|
111
|
+
*,
|
|
112
|
+
allow_change: bool = False,
|
|
113
|
+
) -> TempPasswordEntry:
|
|
114
|
+
"""Persist a temporary password for ``username`` and return the entry."""
|
|
115
|
+
|
|
116
|
+
if expires_at is None:
|
|
117
|
+
expires_at = timezone.now() + DEFAULT_EXPIRATION
|
|
118
|
+
if timezone.is_naive(expires_at):
|
|
119
|
+
expires_at = timezone.make_aware(expires_at)
|
|
120
|
+
created_at = timezone.now()
|
|
121
|
+
path = _lockfile_path(username)
|
|
122
|
+
data = {
|
|
123
|
+
"username": username,
|
|
124
|
+
"password_hash": make_password(raw_password),
|
|
125
|
+
"expires_at": expires_at.isoformat(),
|
|
126
|
+
"created_at": created_at.isoformat(),
|
|
127
|
+
"allow_change": allow_change,
|
|
128
|
+
}
|
|
129
|
+
path.write_text(json.dumps(data, indent=2, sort_keys=True))
|
|
130
|
+
return TempPasswordEntry(
|
|
131
|
+
username=username,
|
|
132
|
+
password_hash=data["password_hash"],
|
|
133
|
+
expires_at=expires_at,
|
|
134
|
+
created_at=created_at,
|
|
135
|
+
path=path,
|
|
136
|
+
allow_change=allow_change,
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
def load_temp_password(username: str) -> Optional[TempPasswordEntry]:
|
|
141
|
+
"""Return the stored temporary password for ``username``, if any."""
|
|
142
|
+
|
|
143
|
+
path = _lockfile_path(username)
|
|
144
|
+
if not path.exists():
|
|
145
|
+
return None
|
|
146
|
+
try:
|
|
147
|
+
data = json.loads(path.read_text())
|
|
148
|
+
except (json.JSONDecodeError, UnicodeDecodeError):
|
|
149
|
+
path.unlink(missing_ok=True)
|
|
150
|
+
return None
|
|
151
|
+
|
|
152
|
+
expires_at = _parse_timestamp(data.get("expires_at"))
|
|
153
|
+
created_at = _parse_timestamp(data.get("created_at")) or timezone.now()
|
|
154
|
+
password_hash = data.get("password_hash")
|
|
155
|
+
if not expires_at or not password_hash:
|
|
156
|
+
path.unlink(missing_ok=True)
|
|
157
|
+
return None
|
|
158
|
+
|
|
159
|
+
username = data.get("username") or username
|
|
160
|
+
allow_change_value = data.get("allow_change", False)
|
|
161
|
+
if isinstance(allow_change_value, str):
|
|
162
|
+
allow_change = allow_change_value.lower() in {"1", "true", "yes", "on"}
|
|
163
|
+
else:
|
|
164
|
+
allow_change = bool(allow_change_value)
|
|
165
|
+
|
|
166
|
+
return TempPasswordEntry(
|
|
167
|
+
username=username,
|
|
168
|
+
password_hash=password_hash,
|
|
169
|
+
expires_at=expires_at,
|
|
170
|
+
created_at=created_at,
|
|
171
|
+
path=path,
|
|
172
|
+
allow_change=allow_change,
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
def discard_temp_password(username: str) -> None:
|
|
177
|
+
"""Remove any stored temporary password for ``username``."""
|
|
178
|
+
|
|
179
|
+
path = _lockfile_path(username)
|
|
180
|
+
path.unlink(missing_ok=True)
|
|
181
|
+
|
core/test_system_info.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import json
|
|
1
2
|
import os
|
|
2
3
|
from pathlib import Path
|
|
3
4
|
from subprocess import CompletedProcess
|
|
@@ -11,9 +12,9 @@ import django
|
|
|
11
12
|
django.setup()
|
|
12
13
|
|
|
13
14
|
from django.conf import settings
|
|
14
|
-
from django.test import SimpleTestCase,
|
|
15
|
+
from django.test import SimpleTestCase, override_settings
|
|
15
16
|
from nodes.models import Node, NodeFeature, NodeRole
|
|
16
|
-
from core.system import _gather_info
|
|
17
|
+
from core.system import _gather_info, get_system_sigil_values
|
|
17
18
|
|
|
18
19
|
|
|
19
20
|
class SystemInfoRoleTests(SimpleTestCase):
|
|
@@ -55,6 +56,27 @@ class SystemInfoRevisionTests(SimpleTestCase):
|
|
|
55
56
|
mock_revision.assert_called_once()
|
|
56
57
|
|
|
57
58
|
|
|
59
|
+
class SystemInfoDatabaseTests(SimpleTestCase):
|
|
60
|
+
def test_collects_database_definitions(self):
|
|
61
|
+
info = _gather_info()
|
|
62
|
+
self.assertIn("databases", info)
|
|
63
|
+
aliases = {entry["alias"] for entry in info["databases"]}
|
|
64
|
+
self.assertIn("default", aliases)
|
|
65
|
+
|
|
66
|
+
@override_settings(
|
|
67
|
+
DATABASES={
|
|
68
|
+
"default": {
|
|
69
|
+
"ENGINE": "django.db.backends.sqlite3",
|
|
70
|
+
"NAME": Path("/tmp/db.sqlite3"),
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
)
|
|
74
|
+
def test_serializes_path_database_names(self):
|
|
75
|
+
info = _gather_info()
|
|
76
|
+
databases = info["databases"]
|
|
77
|
+
self.assertEqual(databases[0]["name"], "/tmp/db.sqlite3")
|
|
78
|
+
|
|
79
|
+
|
|
58
80
|
class SystemInfoRunserverDetectionTests(SimpleTestCase):
|
|
59
81
|
@patch("core.system.subprocess.run")
|
|
60
82
|
def test_detects_runserver_process_port(self, mock_run):
|
|
@@ -77,3 +99,41 @@ class SystemInfoRunserverDetectionTests(SimpleTestCase):
|
|
|
77
99
|
self.assertTrue(info["running"])
|
|
78
100
|
self.assertEqual(info["port"], 8000)
|
|
79
101
|
|
|
102
|
+
|
|
103
|
+
class SystemSigilValueTests(SimpleTestCase):
|
|
104
|
+
def test_exports_values_for_sigil_resolution(self):
|
|
105
|
+
sample_info = {
|
|
106
|
+
"installed": True,
|
|
107
|
+
"revision": "abcdef",
|
|
108
|
+
"service": "gunicorn",
|
|
109
|
+
"mode": "internal",
|
|
110
|
+
"port": 8888,
|
|
111
|
+
"role": "Terminal",
|
|
112
|
+
"screen_mode": "",
|
|
113
|
+
"features": [
|
|
114
|
+
{"display": "Feature", "expected": True, "actual": False, "slug": "feature"}
|
|
115
|
+
],
|
|
116
|
+
"running": True,
|
|
117
|
+
"service_status": "active",
|
|
118
|
+
"hostname": "example.local",
|
|
119
|
+
"ip_addresses": ["127.0.0.1"],
|
|
120
|
+
"databases": [
|
|
121
|
+
{
|
|
122
|
+
"alias": "default",
|
|
123
|
+
"engine": "django.db.backends.sqlite3",
|
|
124
|
+
"name": "db.sqlite3",
|
|
125
|
+
}
|
|
126
|
+
],
|
|
127
|
+
}
|
|
128
|
+
with patch("core.system._gather_info", return_value=sample_info):
|
|
129
|
+
values = get_system_sigil_values()
|
|
130
|
+
|
|
131
|
+
self.assertEqual(values["REVISION"], "abcdef")
|
|
132
|
+
self.assertEqual(values["RUNNING"], "True")
|
|
133
|
+
self.assertEqual(values["NGINX_MODE"], "internal (8888)")
|
|
134
|
+
self.assertEqual(values["IP_ADDRESSES"], "127.0.0.1")
|
|
135
|
+
features = json.loads(values["FEATURES"])
|
|
136
|
+
self.assertEqual(features[0]["display"], "Feature")
|
|
137
|
+
databases = json.loads(values["DATABASES"])
|
|
138
|
+
self.assertEqual(databases[0]["alias"], "default")
|
|
139
|
+
|