arthexis 0.1.9__py3-none-any.whl → 0.1.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arthexis might be problematic. Click here for more details.
- {arthexis-0.1.9.dist-info → arthexis-0.1.11.dist-info}/METADATA +76 -23
- arthexis-0.1.11.dist-info/RECORD +99 -0
- config/context_processors.py +1 -0
- config/settings.py +245 -26
- config/urls.py +11 -4
- core/admin.py +585 -57
- core/apps.py +29 -1
- core/auto_upgrade.py +57 -0
- core/backends.py +115 -3
- core/environment.py +23 -5
- core/fields.py +93 -0
- core/mailer.py +3 -1
- core/models.py +482 -38
- core/reference_utils.py +108 -0
- core/sigil_builder.py +23 -5
- core/sigil_resolver.py +35 -4
- core/system.py +400 -140
- core/tasks.py +151 -8
- core/temp_passwords.py +181 -0
- core/test_system_info.py +97 -1
- core/tests.py +393 -15
- core/user_data.py +154 -16
- core/views.py +499 -20
- nodes/admin.py +149 -6
- nodes/backends.py +125 -18
- nodes/dns.py +203 -0
- nodes/models.py +498 -9
- nodes/tests.py +682 -3
- nodes/views.py +154 -7
- ocpp/admin.py +63 -3
- ocpp/consumers.py +255 -41
- ocpp/evcs.py +6 -3
- ocpp/models.py +52 -7
- ocpp/reference_utils.py +42 -0
- ocpp/simulator.py +62 -5
- ocpp/store.py +30 -0
- ocpp/test_rfid.py +169 -7
- ocpp/tests.py +414 -8
- ocpp/views.py +109 -76
- pages/admin.py +9 -1
- pages/context_processors.py +24 -4
- pages/defaults.py +14 -0
- pages/forms.py +131 -0
- pages/models.py +53 -14
- pages/tests.py +450 -14
- pages/urls.py +4 -0
- pages/views.py +419 -110
- arthexis-0.1.9.dist-info/RECORD +0 -92
- {arthexis-0.1.9.dist-info → arthexis-0.1.11.dist-info}/WHEEL +0 -0
- {arthexis-0.1.9.dist-info → arthexis-0.1.11.dist-info}/licenses/LICENSE +0 -0
- {arthexis-0.1.9.dist-info → arthexis-0.1.11.dist-info}/top_level.txt +0 -0
core/tasks.py
CHANGED
|
@@ -2,8 +2,9 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import logging
|
|
4
4
|
import subprocess
|
|
5
|
-
from datetime import datetime
|
|
6
5
|
from pathlib import Path
|
|
6
|
+
import urllib.error
|
|
7
|
+
import urllib.request
|
|
7
8
|
|
|
8
9
|
from celery import shared_task
|
|
9
10
|
from django.conf import settings
|
|
@@ -15,6 +16,10 @@ from django.utils import timezone
|
|
|
15
16
|
from nodes.models import NetMessage
|
|
16
17
|
|
|
17
18
|
|
|
19
|
+
AUTO_UPGRADE_HEALTH_DELAY_SECONDS = 30
|
|
20
|
+
AUTO_UPGRADE_HEALTH_MAX_ATTEMPTS = 3
|
|
21
|
+
|
|
22
|
+
|
|
18
23
|
logger = logging.getLogger(__name__)
|
|
19
24
|
|
|
20
25
|
|
|
@@ -41,6 +46,41 @@ def birthday_greetings() -> None:
|
|
|
41
46
|
)
|
|
42
47
|
|
|
43
48
|
|
|
49
|
+
def _auto_upgrade_log_path(base_dir: Path) -> Path:
|
|
50
|
+
"""Return the log file used for auto-upgrade events."""
|
|
51
|
+
|
|
52
|
+
log_dir = base_dir / "logs"
|
|
53
|
+
log_dir.mkdir(parents=True, exist_ok=True)
|
|
54
|
+
return log_dir / "auto-upgrade.log"
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def _append_auto_upgrade_log(base_dir: Path, message: str) -> None:
|
|
58
|
+
"""Append ``message`` to the auto-upgrade log, ignoring errors."""
|
|
59
|
+
|
|
60
|
+
try:
|
|
61
|
+
log_file = _auto_upgrade_log_path(base_dir)
|
|
62
|
+
timestamp = timezone.now().isoformat()
|
|
63
|
+
with log_file.open("a") as fh:
|
|
64
|
+
fh.write(f"{timestamp} {message}\n")
|
|
65
|
+
except Exception: # pragma: no cover - best effort logging only
|
|
66
|
+
logger.warning("Failed to append auto-upgrade log entry: %s", message)
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def _resolve_service_url(base_dir: Path) -> str:
|
|
70
|
+
"""Return the local URL used to probe the Django suite."""
|
|
71
|
+
|
|
72
|
+
lock_dir = base_dir / "locks"
|
|
73
|
+
mode_file = lock_dir / "nginx_mode.lck"
|
|
74
|
+
mode = "internal"
|
|
75
|
+
if mode_file.exists():
|
|
76
|
+
try:
|
|
77
|
+
mode = mode_file.read_text().strip() or "internal"
|
|
78
|
+
except OSError:
|
|
79
|
+
mode = "internal"
|
|
80
|
+
port = 8000 if mode == "public" else 8888
|
|
81
|
+
return f"http://127.0.0.1:{port}/"
|
|
82
|
+
|
|
83
|
+
|
|
44
84
|
@shared_task
|
|
45
85
|
def check_github_updates() -> None:
|
|
46
86
|
"""Check the GitHub repo for updates and upgrade if needed."""
|
|
@@ -53,11 +93,11 @@ def check_github_updates() -> None:
|
|
|
53
93
|
branch = "main"
|
|
54
94
|
subprocess.run(["git", "fetch", "origin", branch], cwd=base_dir, check=True)
|
|
55
95
|
|
|
56
|
-
|
|
57
|
-
log_dir.mkdir(parents=True, exist_ok=True)
|
|
58
|
-
log_file = log_dir / "auto-upgrade.log"
|
|
96
|
+
log_file = _auto_upgrade_log_path(base_dir)
|
|
59
97
|
with log_file.open("a") as fh:
|
|
60
|
-
fh.write(
|
|
98
|
+
fh.write(
|
|
99
|
+
f"{timezone.now().isoformat()} check_github_updates triggered\n"
|
|
100
|
+
)
|
|
61
101
|
|
|
62
102
|
notify = None
|
|
63
103
|
startup = None
|
|
@@ -70,6 +110,10 @@ def check_github_updates() -> None:
|
|
|
70
110
|
except Exception:
|
|
71
111
|
startup = None
|
|
72
112
|
|
|
113
|
+
upgrade_stamp = timezone.now().strftime("@ %Y%m%d %H:%M")
|
|
114
|
+
|
|
115
|
+
upgrade_was_applied = False
|
|
116
|
+
|
|
73
117
|
if mode == "latest":
|
|
74
118
|
local = (
|
|
75
119
|
subprocess.check_output(["git", "rev-parse", branch], cwd=base_dir)
|
|
@@ -93,8 +137,9 @@ def check_github_updates() -> None:
|
|
|
93
137
|
startup()
|
|
94
138
|
return
|
|
95
139
|
if notify:
|
|
96
|
-
notify("Upgrading...",
|
|
140
|
+
notify("Upgrading...", upgrade_stamp)
|
|
97
141
|
args = ["./upgrade.sh", "--latest", "--no-restart"]
|
|
142
|
+
upgrade_was_applied = True
|
|
98
143
|
else:
|
|
99
144
|
local = "0"
|
|
100
145
|
version_file = base_dir / "VERSION"
|
|
@@ -117,11 +162,14 @@ def check_github_updates() -> None:
|
|
|
117
162
|
startup()
|
|
118
163
|
return
|
|
119
164
|
if notify:
|
|
120
|
-
notify("Upgrading...",
|
|
165
|
+
notify("Upgrading...", upgrade_stamp)
|
|
121
166
|
args = ["./upgrade.sh", "--no-restart"]
|
|
167
|
+
upgrade_was_applied = True
|
|
122
168
|
|
|
123
169
|
with log_file.open("a") as fh:
|
|
124
|
-
fh.write(
|
|
170
|
+
fh.write(
|
|
171
|
+
f"{timezone.now().isoformat()} running: {' '.join(args)}\n"
|
|
172
|
+
)
|
|
125
173
|
|
|
126
174
|
subprocess.run(args, cwd=base_dir, check=True)
|
|
127
175
|
|
|
@@ -140,6 +188,16 @@ def check_github_updates() -> None:
|
|
|
140
188
|
else:
|
|
141
189
|
subprocess.run(["pkill", "-f", "manage.py runserver"])
|
|
142
190
|
|
|
191
|
+
if upgrade_was_applied:
|
|
192
|
+
_append_auto_upgrade_log(
|
|
193
|
+
base_dir,
|
|
194
|
+
(
|
|
195
|
+
"Scheduled post-upgrade health check in %s seconds"
|
|
196
|
+
% AUTO_UPGRADE_HEALTH_DELAY_SECONDS
|
|
197
|
+
),
|
|
198
|
+
)
|
|
199
|
+
_schedule_health_check(1)
|
|
200
|
+
|
|
143
201
|
|
|
144
202
|
@shared_task
|
|
145
203
|
def poll_email_collectors() -> None:
|
|
@@ -181,6 +239,91 @@ def report_runtime_issue(
|
|
|
181
239
|
return response
|
|
182
240
|
|
|
183
241
|
|
|
242
|
+
def _record_health_check_result(
|
|
243
|
+
base_dir: Path, attempt: int, status: int | None, detail: str
|
|
244
|
+
) -> None:
|
|
245
|
+
status_display = status if status is not None else "unreachable"
|
|
246
|
+
message = "Health check attempt %s %s (%s)" % (attempt, detail, status_display)
|
|
247
|
+
_append_auto_upgrade_log(base_dir, message)
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
def _schedule_health_check(next_attempt: int) -> None:
|
|
251
|
+
verify_auto_upgrade_health.apply_async(
|
|
252
|
+
kwargs={"attempt": next_attempt},
|
|
253
|
+
countdown=AUTO_UPGRADE_HEALTH_DELAY_SECONDS,
|
|
254
|
+
)
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
@shared_task
|
|
258
|
+
def verify_auto_upgrade_health(attempt: int = 1) -> bool | None:
|
|
259
|
+
"""Verify the upgraded suite responds successfully.
|
|
260
|
+
|
|
261
|
+
When the check fails three times in a row the upgrade is rolled back by
|
|
262
|
+
invoking ``upgrade.sh --revert``.
|
|
263
|
+
"""
|
|
264
|
+
|
|
265
|
+
base_dir = Path(__file__).resolve().parent.parent
|
|
266
|
+
url = _resolve_service_url(base_dir)
|
|
267
|
+
request = urllib.request.Request(
|
|
268
|
+
url,
|
|
269
|
+
headers={"User-Agent": "Arthexis-AutoUpgrade/1.0"},
|
|
270
|
+
)
|
|
271
|
+
|
|
272
|
+
status: int | None = None
|
|
273
|
+
try:
|
|
274
|
+
with urllib.request.urlopen(request, timeout=10) as response:
|
|
275
|
+
status = getattr(response, "status", response.getcode())
|
|
276
|
+
except urllib.error.HTTPError as exc:
|
|
277
|
+
status = exc.code
|
|
278
|
+
logger.warning(
|
|
279
|
+
"Auto-upgrade health check attempt %s returned HTTP %s", attempt, exc.code
|
|
280
|
+
)
|
|
281
|
+
except urllib.error.URLError as exc:
|
|
282
|
+
logger.warning(
|
|
283
|
+
"Auto-upgrade health check attempt %s failed: %s", attempt, exc
|
|
284
|
+
)
|
|
285
|
+
except Exception as exc: # pragma: no cover - unexpected network error
|
|
286
|
+
logger.exception(
|
|
287
|
+
"Unexpected error probing suite during auto-upgrade attempt %s", attempt
|
|
288
|
+
)
|
|
289
|
+
detail = f"failed with {exc}"
|
|
290
|
+
_record_health_check_result(base_dir, attempt, status, detail)
|
|
291
|
+
if attempt >= AUTO_UPGRADE_HEALTH_MAX_ATTEMPTS:
|
|
292
|
+
_append_auto_upgrade_log(
|
|
293
|
+
base_dir,
|
|
294
|
+
"Health check raised unexpected error; reverting upgrade",
|
|
295
|
+
)
|
|
296
|
+
subprocess.run(["./upgrade.sh", "--revert"], cwd=base_dir, check=True)
|
|
297
|
+
else:
|
|
298
|
+
_schedule_health_check(attempt + 1)
|
|
299
|
+
return None
|
|
300
|
+
|
|
301
|
+
if status == 200:
|
|
302
|
+
_record_health_check_result(base_dir, attempt, status, "succeeded")
|
|
303
|
+
logger.info(
|
|
304
|
+
"Auto-upgrade health check succeeded on attempt %s with HTTP %s",
|
|
305
|
+
attempt,
|
|
306
|
+
status,
|
|
307
|
+
)
|
|
308
|
+
return True
|
|
309
|
+
|
|
310
|
+
_record_health_check_result(base_dir, attempt, status, "failed")
|
|
311
|
+
|
|
312
|
+
if attempt >= AUTO_UPGRADE_HEALTH_MAX_ATTEMPTS:
|
|
313
|
+
logger.error(
|
|
314
|
+
"Auto-upgrade health check failed after %s attempts; reverting", attempt
|
|
315
|
+
)
|
|
316
|
+
_append_auto_upgrade_log(
|
|
317
|
+
base_dir,
|
|
318
|
+
"Health check failed three times; reverting upgrade",
|
|
319
|
+
)
|
|
320
|
+
subprocess.run(["./upgrade.sh", "--revert"], cwd=base_dir, check=True)
|
|
321
|
+
return False
|
|
322
|
+
|
|
323
|
+
_schedule_health_check(attempt + 1)
|
|
324
|
+
return None
|
|
325
|
+
|
|
326
|
+
|
|
184
327
|
@shared_task
|
|
185
328
|
def run_client_report_schedule(schedule_id: int) -> None:
|
|
186
329
|
"""Execute a :class:`core.models.ClientReportSchedule` run."""
|
core/temp_passwords.py
ADDED
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
"""Utilities for temporary password lock files."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import hashlib
|
|
6
|
+
import json
|
|
7
|
+
import re
|
|
8
|
+
import secrets
|
|
9
|
+
import string
|
|
10
|
+
from dataclasses import dataclass
|
|
11
|
+
from datetime import datetime, timedelta
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Optional
|
|
14
|
+
|
|
15
|
+
from django.conf import settings
|
|
16
|
+
from django.contrib.auth.hashers import check_password, make_password
|
|
17
|
+
from django.utils import timezone
|
|
18
|
+
from django.utils.dateparse import parse_datetime
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
DEFAULT_PASSWORD_LENGTH = 16
|
|
22
|
+
DEFAULT_EXPIRATION = timedelta(hours=1)
|
|
23
|
+
_SAFE_COMPONENT_RE = re.compile(r"[^A-Za-z0-9_.-]+")
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def _base_lock_dir() -> Path:
|
|
27
|
+
"""Return the root directory used for temporary password lock files."""
|
|
28
|
+
|
|
29
|
+
configured = getattr(settings, "TEMP_PASSWORD_LOCK_DIR", None)
|
|
30
|
+
if configured:
|
|
31
|
+
path = Path(configured)
|
|
32
|
+
else:
|
|
33
|
+
path = Path(settings.BASE_DIR) / "locks" / "temp-passwords"
|
|
34
|
+
path.mkdir(parents=True, exist_ok=True)
|
|
35
|
+
return path
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _safe_component(value: str) -> str:
|
|
39
|
+
"""Return a filesystem safe component derived from ``value``."""
|
|
40
|
+
|
|
41
|
+
if not value:
|
|
42
|
+
return ""
|
|
43
|
+
safe = _SAFE_COMPONENT_RE.sub("_", value)
|
|
44
|
+
safe = safe.strip("._")
|
|
45
|
+
return safe[:64]
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def _lockfile_name(username: str) -> str:
|
|
49
|
+
"""Return the filename used for the provided ``username``."""
|
|
50
|
+
|
|
51
|
+
digest = hashlib.sha256(username.encode("utf-8")).hexdigest()[:12]
|
|
52
|
+
safe = _safe_component(username)
|
|
53
|
+
if safe:
|
|
54
|
+
return f"{safe}-{digest}.json"
|
|
55
|
+
return f"user-{digest}.json"
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def _lockfile_path(username: str) -> Path:
|
|
59
|
+
"""Return the lockfile path for ``username``."""
|
|
60
|
+
|
|
61
|
+
return _base_lock_dir() / _lockfile_name(username)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def _parse_timestamp(value: str | None) -> Optional[datetime]:
|
|
65
|
+
"""Return a timezone aware datetime parsed from ``value``."""
|
|
66
|
+
|
|
67
|
+
if not value:
|
|
68
|
+
return None
|
|
69
|
+
parsed = parse_datetime(value)
|
|
70
|
+
if parsed is None:
|
|
71
|
+
return None
|
|
72
|
+
if timezone.is_naive(parsed):
|
|
73
|
+
parsed = timezone.make_aware(parsed)
|
|
74
|
+
return parsed
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
@dataclass(frozen=True)
|
|
78
|
+
class TempPasswordEntry:
|
|
79
|
+
"""Details for a temporary password stored on disk."""
|
|
80
|
+
|
|
81
|
+
username: str
|
|
82
|
+
password_hash: str
|
|
83
|
+
expires_at: datetime
|
|
84
|
+
created_at: datetime
|
|
85
|
+
path: Path
|
|
86
|
+
allow_change: bool = False
|
|
87
|
+
|
|
88
|
+
@property
|
|
89
|
+
def is_expired(self) -> bool:
|
|
90
|
+
return timezone.now() >= self.expires_at
|
|
91
|
+
|
|
92
|
+
def check_password(self, raw_password: str) -> bool:
|
|
93
|
+
"""Return ``True`` if ``raw_password`` matches this entry."""
|
|
94
|
+
|
|
95
|
+
return check_password(raw_password, self.password_hash)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def generate_password(length: int = DEFAULT_PASSWORD_LENGTH) -> str:
|
|
99
|
+
"""Return a random password composed of letters and digits."""
|
|
100
|
+
|
|
101
|
+
if length <= 0:
|
|
102
|
+
raise ValueError("length must be a positive integer")
|
|
103
|
+
alphabet = string.ascii_letters + string.digits
|
|
104
|
+
return "".join(secrets.choice(alphabet) for _ in range(length))
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def store_temp_password(
|
|
108
|
+
username: str,
|
|
109
|
+
raw_password: str,
|
|
110
|
+
expires_at: Optional[datetime] = None,
|
|
111
|
+
*,
|
|
112
|
+
allow_change: bool = False,
|
|
113
|
+
) -> TempPasswordEntry:
|
|
114
|
+
"""Persist a temporary password for ``username`` and return the entry."""
|
|
115
|
+
|
|
116
|
+
if expires_at is None:
|
|
117
|
+
expires_at = timezone.now() + DEFAULT_EXPIRATION
|
|
118
|
+
if timezone.is_naive(expires_at):
|
|
119
|
+
expires_at = timezone.make_aware(expires_at)
|
|
120
|
+
created_at = timezone.now()
|
|
121
|
+
path = _lockfile_path(username)
|
|
122
|
+
data = {
|
|
123
|
+
"username": username,
|
|
124
|
+
"password_hash": make_password(raw_password),
|
|
125
|
+
"expires_at": expires_at.isoformat(),
|
|
126
|
+
"created_at": created_at.isoformat(),
|
|
127
|
+
"allow_change": allow_change,
|
|
128
|
+
}
|
|
129
|
+
path.write_text(json.dumps(data, indent=2, sort_keys=True))
|
|
130
|
+
return TempPasswordEntry(
|
|
131
|
+
username=username,
|
|
132
|
+
password_hash=data["password_hash"],
|
|
133
|
+
expires_at=expires_at,
|
|
134
|
+
created_at=created_at,
|
|
135
|
+
path=path,
|
|
136
|
+
allow_change=allow_change,
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
def load_temp_password(username: str) -> Optional[TempPasswordEntry]:
|
|
141
|
+
"""Return the stored temporary password for ``username``, if any."""
|
|
142
|
+
|
|
143
|
+
path = _lockfile_path(username)
|
|
144
|
+
if not path.exists():
|
|
145
|
+
return None
|
|
146
|
+
try:
|
|
147
|
+
data = json.loads(path.read_text())
|
|
148
|
+
except (json.JSONDecodeError, UnicodeDecodeError):
|
|
149
|
+
path.unlink(missing_ok=True)
|
|
150
|
+
return None
|
|
151
|
+
|
|
152
|
+
expires_at = _parse_timestamp(data.get("expires_at"))
|
|
153
|
+
created_at = _parse_timestamp(data.get("created_at")) or timezone.now()
|
|
154
|
+
password_hash = data.get("password_hash")
|
|
155
|
+
if not expires_at or not password_hash:
|
|
156
|
+
path.unlink(missing_ok=True)
|
|
157
|
+
return None
|
|
158
|
+
|
|
159
|
+
username = data.get("username") or username
|
|
160
|
+
allow_change_value = data.get("allow_change", False)
|
|
161
|
+
if isinstance(allow_change_value, str):
|
|
162
|
+
allow_change = allow_change_value.lower() in {"1", "true", "yes", "on"}
|
|
163
|
+
else:
|
|
164
|
+
allow_change = bool(allow_change_value)
|
|
165
|
+
|
|
166
|
+
return TempPasswordEntry(
|
|
167
|
+
username=username,
|
|
168
|
+
password_hash=password_hash,
|
|
169
|
+
expires_at=expires_at,
|
|
170
|
+
created_at=created_at,
|
|
171
|
+
path=path,
|
|
172
|
+
allow_change=allow_change,
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
def discard_temp_password(username: str) -> None:
|
|
177
|
+
"""Remove any stored temporary password for ``username``."""
|
|
178
|
+
|
|
179
|
+
path = _lockfile_path(username)
|
|
180
|
+
path.unlink(missing_ok=True)
|
|
181
|
+
|
core/test_system_info.py
CHANGED
|
@@ -1,5 +1,9 @@
|
|
|
1
|
+
import json
|
|
1
2
|
import os
|
|
2
3
|
from pathlib import Path
|
|
4
|
+
from subprocess import CompletedProcess
|
|
5
|
+
from unittest.mock import patch
|
|
6
|
+
|
|
3
7
|
|
|
4
8
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings")
|
|
5
9
|
|
|
@@ -9,7 +13,8 @@ django.setup()
|
|
|
9
13
|
|
|
10
14
|
from django.conf import settings
|
|
11
15
|
from django.test import SimpleTestCase, override_settings
|
|
12
|
-
from
|
|
16
|
+
from nodes.models import Node, NodeFeature, NodeRole
|
|
17
|
+
from core.system import _gather_info, get_system_sigil_values
|
|
13
18
|
|
|
14
19
|
|
|
15
20
|
class SystemInfoRoleTests(SimpleTestCase):
|
|
@@ -41,3 +46,94 @@ class SystemInfoScreenModeTests(SimpleTestCase):
|
|
|
41
46
|
lock_file.unlink()
|
|
42
47
|
if not any(lock_dir.iterdir()):
|
|
43
48
|
lock_dir.rmdir()
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class SystemInfoRevisionTests(SimpleTestCase):
|
|
52
|
+
@patch("core.system.revision.get_revision", return_value="abcdef1234567890")
|
|
53
|
+
def test_includes_full_revision(self, mock_revision):
|
|
54
|
+
info = _gather_info()
|
|
55
|
+
self.assertEqual(info["revision"], "abcdef1234567890")
|
|
56
|
+
mock_revision.assert_called_once()
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class SystemInfoDatabaseTests(SimpleTestCase):
|
|
60
|
+
def test_collects_database_definitions(self):
|
|
61
|
+
info = _gather_info()
|
|
62
|
+
self.assertIn("databases", info)
|
|
63
|
+
aliases = {entry["alias"] for entry in info["databases"]}
|
|
64
|
+
self.assertIn("default", aliases)
|
|
65
|
+
|
|
66
|
+
@override_settings(
|
|
67
|
+
DATABASES={
|
|
68
|
+
"default": {
|
|
69
|
+
"ENGINE": "django.db.backends.sqlite3",
|
|
70
|
+
"NAME": Path("/tmp/db.sqlite3"),
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
)
|
|
74
|
+
def test_serializes_path_database_names(self):
|
|
75
|
+
info = _gather_info()
|
|
76
|
+
databases = info["databases"]
|
|
77
|
+
self.assertEqual(databases[0]["name"], "/tmp/db.sqlite3")
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
class SystemInfoRunserverDetectionTests(SimpleTestCase):
|
|
81
|
+
@patch("core.system.subprocess.run")
|
|
82
|
+
def test_detects_runserver_process_port(self, mock_run):
|
|
83
|
+
mock_run.return_value = CompletedProcess(
|
|
84
|
+
args=["pgrep"],
|
|
85
|
+
returncode=0,
|
|
86
|
+
stdout="123 python manage.py runserver 0.0.0.0:8000 --noreload\n",
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
info = _gather_info()
|
|
90
|
+
|
|
91
|
+
self.assertTrue(info["running"])
|
|
92
|
+
self.assertEqual(info["port"], 8000)
|
|
93
|
+
|
|
94
|
+
@patch("core.system._probe_ports", return_value=(True, 8000))
|
|
95
|
+
@patch("core.system.subprocess.run", side_effect=FileNotFoundError)
|
|
96
|
+
def test_falls_back_to_port_probe_when_pgrep_missing(self, mock_run, mock_probe):
|
|
97
|
+
info = _gather_info()
|
|
98
|
+
|
|
99
|
+
self.assertTrue(info["running"])
|
|
100
|
+
self.assertEqual(info["port"], 8000)
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
class SystemSigilValueTests(SimpleTestCase):
|
|
104
|
+
def test_exports_values_for_sigil_resolution(self):
|
|
105
|
+
sample_info = {
|
|
106
|
+
"installed": True,
|
|
107
|
+
"revision": "abcdef",
|
|
108
|
+
"service": "gunicorn",
|
|
109
|
+
"mode": "internal",
|
|
110
|
+
"port": 8888,
|
|
111
|
+
"role": "Terminal",
|
|
112
|
+
"screen_mode": "",
|
|
113
|
+
"features": [
|
|
114
|
+
{"display": "Feature", "expected": True, "actual": False, "slug": "feature"}
|
|
115
|
+
],
|
|
116
|
+
"running": True,
|
|
117
|
+
"service_status": "active",
|
|
118
|
+
"hostname": "example.local",
|
|
119
|
+
"ip_addresses": ["127.0.0.1"],
|
|
120
|
+
"databases": [
|
|
121
|
+
{
|
|
122
|
+
"alias": "default",
|
|
123
|
+
"engine": "django.db.backends.sqlite3",
|
|
124
|
+
"name": "db.sqlite3",
|
|
125
|
+
}
|
|
126
|
+
],
|
|
127
|
+
}
|
|
128
|
+
with patch("core.system._gather_info", return_value=sample_info):
|
|
129
|
+
values = get_system_sigil_values()
|
|
130
|
+
|
|
131
|
+
self.assertEqual(values["REVISION"], "abcdef")
|
|
132
|
+
self.assertEqual(values["RUNNING"], "True")
|
|
133
|
+
self.assertEqual(values["NGINX_MODE"], "internal (8888)")
|
|
134
|
+
self.assertEqual(values["IP_ADDRESSES"], "127.0.0.1")
|
|
135
|
+
features = json.loads(values["FEATURES"])
|
|
136
|
+
self.assertEqual(features[0]["display"], "Feature")
|
|
137
|
+
databases = json.loads(values["DATABASES"])
|
|
138
|
+
self.assertEqual(databases[0]["alias"], "default")
|
|
139
|
+
|