arthexis 0.1.10__py3-none-any.whl → 0.1.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arthexis might be problematic. Click here for more details.

Files changed (54) hide show
  1. {arthexis-0.1.10.dist-info → arthexis-0.1.12.dist-info}/METADATA +36 -26
  2. arthexis-0.1.12.dist-info/RECORD +102 -0
  3. config/context_processors.py +1 -0
  4. config/settings.py +31 -5
  5. config/urls.py +5 -4
  6. core/admin.py +430 -90
  7. core/apps.py +48 -2
  8. core/backends.py +38 -0
  9. core/environment.py +23 -5
  10. core/mailer.py +3 -1
  11. core/models.py +303 -31
  12. core/reference_utils.py +20 -9
  13. core/release.py +4 -0
  14. core/sigil_builder.py +7 -2
  15. core/sigil_resolver.py +35 -4
  16. core/system.py +250 -1
  17. core/tasks.py +92 -40
  18. core/temp_passwords.py +181 -0
  19. core/test_system_info.py +62 -2
  20. core/tests.py +169 -3
  21. core/user_data.py +51 -8
  22. core/views.py +371 -20
  23. nodes/admin.py +453 -8
  24. nodes/backends.py +21 -6
  25. nodes/dns.py +203 -0
  26. nodes/feature_checks.py +133 -0
  27. nodes/models.py +374 -31
  28. nodes/reports.py +411 -0
  29. nodes/tests.py +677 -38
  30. nodes/utils.py +32 -0
  31. nodes/views.py +14 -0
  32. ocpp/admin.py +278 -15
  33. ocpp/consumers.py +517 -16
  34. ocpp/evcs_discovery.py +158 -0
  35. ocpp/models.py +237 -4
  36. ocpp/reference_utils.py +42 -0
  37. ocpp/simulator.py +321 -22
  38. ocpp/store.py +110 -2
  39. ocpp/test_rfid.py +169 -7
  40. ocpp/tests.py +819 -6
  41. ocpp/transactions_io.py +17 -3
  42. ocpp/views.py +233 -19
  43. pages/admin.py +144 -4
  44. pages/context_processors.py +21 -7
  45. pages/defaults.py +13 -0
  46. pages/forms.py +38 -0
  47. pages/models.py +189 -15
  48. pages/tests.py +281 -8
  49. pages/urls.py +4 -0
  50. pages/views.py +137 -21
  51. arthexis-0.1.10.dist-info/RECORD +0 -95
  52. {arthexis-0.1.10.dist-info → arthexis-0.1.12.dist-info}/WHEEL +0 -0
  53. {arthexis-0.1.10.dist-info → arthexis-0.1.12.dist-info}/licenses/LICENSE +0 -0
  54. {arthexis-0.1.10.dist-info → arthexis-0.1.12.dist-info}/top_level.txt +0 -0
core/reference_utils.py CHANGED
@@ -30,7 +30,7 @@ def filter_visible_references(
30
30
  if host:
31
31
  site = Site.objects.filter(domain__iexact=host).first()
32
32
 
33
- site_id = site.pk if site else None
33
+ site_id = getattr(site, "pk", None)
34
34
 
35
35
  if node is None:
36
36
  try:
@@ -41,16 +41,27 @@ def filter_visible_references(
41
41
  node = None
42
42
 
43
43
  node_role_id = getattr(node, "role_id", None)
44
- node_feature_ids: set[int] = set()
44
+ node_active_feature_ids: set[int] = set()
45
45
  if node is not None:
46
- features_manager = getattr(node, "features", None)
47
- if features_manager is not None:
46
+ assignments_manager = getattr(node, "feature_assignments", None)
47
+ if assignments_manager is not None:
48
48
  try:
49
- node_feature_ids = set(
50
- features_manager.values_list("pk", flat=True)
49
+ assignments = list(
50
+ assignments_manager.filter(is_deleted=False).select_related(
51
+ "feature"
52
+ )
51
53
  )
52
54
  except Exception:
53
- node_feature_ids = set()
55
+ assignments = []
56
+ for assignment in assignments:
57
+ feature = getattr(assignment, "feature", None)
58
+ if feature is None or getattr(feature, "is_deleted", False):
59
+ continue
60
+ try:
61
+ if feature.is_enabled:
62
+ node_active_feature_ids.add(feature.pk)
63
+ except Exception:
64
+ continue
54
65
 
55
66
  visible_refs: list["Reference"] = []
56
67
  for ref in refs:
@@ -64,8 +75,8 @@ def filter_visible_references(
64
75
  allowed = True
65
76
  elif (
66
77
  required_features
67
- and node_feature_ids
68
- and node_feature_ids.intersection(required_features)
78
+ and node_active_feature_ids
79
+ and node_active_feature_ids.intersection(required_features)
69
80
  ):
70
81
  allowed = True
71
82
  elif required_sites and site_id and site_id in required_sites:
core/release.py CHANGED
@@ -344,3 +344,7 @@ def publish(
344
344
  proc = subprocess.run(cmd, capture_output=True, text=True)
345
345
  if proc.returncode != 0:
346
346
  raise ReleaseError(proc.stdout + proc.stderr)
347
+
348
+ tag_name = f"v{version}"
349
+ _run(["git", "tag", tag_name])
350
+ _run(["git", "push", "origin", tag_name])
core/sigil_builder.py CHANGED
@@ -17,7 +17,7 @@ from .sigil_resolver import (
17
17
  def generate_model_sigils(**kwargs) -> None:
18
18
  """Ensure built-in configuration SigilRoot entries exist."""
19
19
  SigilRoot = apps.get_model("core", "SigilRoot")
20
- for prefix in ["ENV", "SYS"]:
20
+ for prefix in ["ENV", "CONF", "SYS"]:
21
21
  # Ensure built-in configuration roots exist without violating the
22
22
  # unique ``prefix`` constraint, even if older databases already have
23
23
  # entries with a different ``context_type``.
@@ -40,7 +40,12 @@ def _sigil_builder_view(request):
40
40
  {
41
41
  "prefix": "ENV",
42
42
  "url": reverse("admin:environment"),
43
- "label": _("Environment"),
43
+ "label": _("Environ"),
44
+ },
45
+ {
46
+ "prefix": "CONF",
47
+ "url": reverse("admin:config"),
48
+ "label": _("Config"),
44
49
  },
45
50
  {
46
51
  "prefix": "SYS",
core/sigil_resolver.py CHANGED
@@ -11,6 +11,7 @@ from django.core import serializers
11
11
  from django.db import models
12
12
 
13
13
  from .sigil_context import get_context
14
+ from .system import get_system_sigil_values, resolve_system_namespace_value
14
15
 
15
16
  logger = logging.getLogger("core.entity")
16
17
 
@@ -150,6 +151,18 @@ def _resolve_token(token: str, current: Optional[models.Model] = None) -> str:
150
151
  SigilRoot = apps.get_model("core", "SigilRoot")
151
152
  try:
152
153
  root = SigilRoot.objects.get(prefix__iexact=lookup_root)
154
+ except SigilRoot.DoesNotExist:
155
+ logger.warning("Unknown sigil root [%s]", lookup_root)
156
+ return _failed_resolution(original_token)
157
+ except Exception:
158
+ logger.exception(
159
+ "Error resolving sigil [%s.%s]",
160
+ lookup_root,
161
+ key_upper or normalized_key or raw_key,
162
+ )
163
+ return _failed_resolution(original_token)
164
+
165
+ try:
153
166
  if root.context_type == SigilRoot.Context.CONFIG:
154
167
  if not normalized_key:
155
168
  return ""
@@ -176,7 +189,7 @@ def _resolve_token(token: str, current: Optional[models.Model] = None) -> str:
176
189
  key_upper or normalized_key or raw_key or "",
177
190
  )
178
191
  return _failed_resolution(original_token)
179
- if root.prefix.upper() == "SYS":
192
+ if root.prefix.upper() == "CONF":
180
193
  for candidate in [normalized_key, key_upper, key_lower]:
181
194
  if not candidate:
182
195
  continue
@@ -188,6 +201,26 @@ def _resolve_token(token: str, current: Optional[models.Model] = None) -> str:
188
201
  if fallback is not None:
189
202
  return fallback
190
203
  return ""
204
+ if root.prefix.upper() == "SYS":
205
+ values = get_system_sigil_values()
206
+ candidates = {
207
+ key_upper,
208
+ normalized_key.upper() if normalized_key else None,
209
+ (raw_key or "").upper(),
210
+ }
211
+ for candidate in candidates:
212
+ if not candidate:
213
+ continue
214
+ if candidate in values:
215
+ return values[candidate]
216
+ resolved = resolve_system_namespace_value(candidate)
217
+ if resolved is not None:
218
+ return resolved
219
+ logger.warning(
220
+ "Missing system information for sigil [SYS.%s]",
221
+ key_upper or normalized_key or raw_key or "",
222
+ )
223
+ return _failed_resolution(original_token)
191
224
  elif root.context_type == SigilRoot.Context.ENTITY:
192
225
  model = root.content_type.model_class() if root.content_type else None
193
226
  instance = None
@@ -243,15 +276,13 @@ def _resolve_token(token: str, current: Optional[models.Model] = None) -> str:
243
276
  return _failed_resolution(original_token)
244
277
  return serializers.serialize("json", [instance])
245
278
  return _failed_resolution(original_token)
246
- except SigilRoot.DoesNotExist:
247
- logger.warning("Unknown sigil root [%s]", lookup_root)
248
279
  except Exception:
249
280
  logger.exception(
250
281
  "Error resolving sigil [%s.%s]",
251
282
  lookup_root,
252
283
  key_upper or normalized_key or raw_key,
253
284
  )
254
- return _failed_resolution(original_token)
285
+ return _failed_resolution(original_token)
255
286
 
256
287
 
257
288
  def resolve_sigils(text: str, current: Optional[models.Model] = None) -> str:
core/system.py CHANGED
@@ -1,25 +1,265 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from contextlib import closing
4
+ from dataclasses import dataclass
5
+ from datetime import datetime
4
6
  from pathlib import Path
7
+ import json
5
8
  import re
6
9
  import socket
7
10
  import subprocess
8
11
  import shutil
12
+ from typing import Callable, Iterable, Optional
9
13
 
10
14
  from django.conf import settings
11
15
  from django.contrib import admin
12
16
  from django.template.response import TemplateResponse
13
17
  from django.urls import path
18
+ from django.utils import timezone
19
+ from django.utils.formats import date_format
14
20
  from django.utils.translation import gettext_lazy as _
15
21
 
22
+ from core.auto_upgrade import AUTO_UPGRADE_TASK_NAME
16
23
  from utils import revision
17
24
 
18
25
 
26
+ @dataclass(frozen=True)
27
+ class SystemField:
28
+ """Metadata describing a single entry on the system admin page."""
29
+
30
+ label: str
31
+ sigil_key: str
32
+ value: object
33
+ field_type: str = "text"
34
+
35
+ @property
36
+ def sigil(self) -> str:
37
+ return f"SYS.{self.sigil_key}"
38
+
39
+
19
40
  _RUNSERVER_PORT_PATTERN = re.compile(r":(\d{2,5})(?:\D|$)")
20
41
  _RUNSERVER_PORT_FLAG_PATTERN = re.compile(r"--port(?:=|\s+)(\d{2,5})", re.IGNORECASE)
21
42
 
22
43
 
44
+ def _format_timestamp(dt: datetime | None) -> str:
45
+ """Return ``dt`` formatted using the active ``DATETIME_FORMAT``."""
46
+
47
+ if dt is None:
48
+ return ""
49
+ try:
50
+ localized = timezone.localtime(dt)
51
+ except Exception:
52
+ localized = dt
53
+ return date_format(localized, "DATETIME_FORMAT")
54
+
55
+
56
+ def _auto_upgrade_next_check() -> str:
57
+ """Return the human-readable timestamp for the next auto-upgrade check."""
58
+
59
+ try: # pragma: no cover - optional dependency failures
60
+ from django_celery_beat.models import PeriodicTask
61
+ except Exception:
62
+ return ""
63
+
64
+ try:
65
+ task = (
66
+ PeriodicTask.objects.select_related(
67
+ "interval", "crontab", "solar", "clocked"
68
+ )
69
+ .only("enabled", "last_run_at", "start_time", "name")
70
+ .get(name=AUTO_UPGRADE_TASK_NAME)
71
+ )
72
+ except PeriodicTask.DoesNotExist:
73
+ return ""
74
+ except Exception: # pragma: no cover - database unavailable
75
+ return ""
76
+
77
+ if not task.enabled:
78
+ return str(_("Disabled"))
79
+
80
+ schedule = task.schedule
81
+ if schedule is None:
82
+ return ""
83
+
84
+ now = schedule.maybe_make_aware(schedule.now())
85
+
86
+ start_time = task.start_time
87
+ if start_time is not None:
88
+ try:
89
+ candidate_start = schedule.maybe_make_aware(start_time)
90
+ except Exception:
91
+ candidate_start = (
92
+ timezone.make_aware(start_time)
93
+ if timezone.is_naive(start_time)
94
+ else start_time
95
+ )
96
+ if candidate_start and candidate_start > now:
97
+ return _format_timestamp(candidate_start)
98
+
99
+ last_run_at = task.last_run_at
100
+ if last_run_at is not None:
101
+ try:
102
+ reference = schedule.maybe_make_aware(last_run_at)
103
+ except Exception:
104
+ reference = (
105
+ timezone.make_aware(last_run_at)
106
+ if timezone.is_naive(last_run_at)
107
+ else last_run_at
108
+ )
109
+ else:
110
+ reference = now
111
+
112
+ try:
113
+ remaining = schedule.remaining_estimate(reference)
114
+ except Exception:
115
+ return ""
116
+
117
+ next_run = now + remaining
118
+ return _format_timestamp(next_run)
119
+
120
+
121
+ def _resolve_auto_upgrade_namespace(key: str) -> str | None:
122
+ """Resolve sigils within the legacy ``AUTO-UPGRADE`` namespace."""
123
+
124
+ normalized = key.replace("-", "_").upper()
125
+ if normalized == "NEXT_CHECK":
126
+ return _auto_upgrade_next_check()
127
+ return None
128
+
129
+
130
+ _SYSTEM_SIGIL_NAMESPACES: dict[str, Callable[[str], Optional[str]]] = {
131
+ "AUTO_UPGRADE": _resolve_auto_upgrade_namespace,
132
+ }
133
+
134
+
135
+ def resolve_system_namespace_value(key: str) -> str | None:
136
+ """Resolve dot-notation sigils mapped to dynamic ``SYS`` namespaces."""
137
+
138
+ if not key:
139
+ return None
140
+ normalized_key = key.replace("-", "_").upper()
141
+ if normalized_key == "NEXT_VER_CHECK":
142
+ return _auto_upgrade_next_check()
143
+ namespace, _, remainder = key.partition(".")
144
+ if not remainder:
145
+ return None
146
+ normalized = namespace.replace("-", "_").upper()
147
+ handler = _SYSTEM_SIGIL_NAMESPACES.get(normalized)
148
+ if not handler:
149
+ return None
150
+ return handler(remainder)
151
+
152
+
153
+ def _database_configurations() -> list[dict[str, str]]:
154
+ """Return a normalized list of configured database connections."""
155
+
156
+ databases: list[dict[str, str]] = []
157
+ for alias, config in settings.DATABASES.items():
158
+ engine = config.get("ENGINE", "")
159
+ name = config.get("NAME", "")
160
+ if engine is None:
161
+ engine = ""
162
+ if name is None:
163
+ name = ""
164
+ databases.append({
165
+ "alias": alias,
166
+ "engine": str(engine),
167
+ "name": str(name),
168
+ })
169
+ databases.sort(key=lambda entry: entry["alias"].lower())
170
+ return databases
171
+
172
+
173
+ def _build_system_fields(info: dict[str, object]) -> list[SystemField]:
174
+ """Convert gathered system information into renderable rows."""
175
+
176
+ fields: list[SystemField] = []
177
+
178
+ def add_field(label: str, key: str, value: object, *, field_type: str = "text", visible: bool = True) -> None:
179
+ if not visible:
180
+ return
181
+ fields.append(SystemField(label=label, sigil_key=key, value=value, field_type=field_type))
182
+
183
+ add_field(_("Suite installed"), "INSTALLED", info.get("installed", False), field_type="boolean")
184
+ add_field(_("Revision"), "REVISION", info.get("revision", ""))
185
+
186
+ service_value = info.get("service") or _("not installed")
187
+ add_field(_("Service"), "SERVICE", service_value)
188
+
189
+ nginx_mode = info.get("mode", "")
190
+ port = info.get("port", "")
191
+ nginx_display = f"{nginx_mode} ({port})" if port else nginx_mode
192
+ add_field(_("Nginx mode"), "NGINX_MODE", nginx_display)
193
+
194
+ add_field(_("Node role"), "NODE_ROLE", info.get("role", ""))
195
+ add_field(
196
+ _("Display mode"),
197
+ "DISPLAY_MODE",
198
+ info.get("screen_mode", ""),
199
+ visible=bool(info.get("screen_mode")),
200
+ )
201
+
202
+ add_field(_("Features"), "FEATURES", info.get("features", []), field_type="features")
203
+ add_field(_("Running"), "RUNNING", info.get("running", False), field_type="boolean")
204
+ add_field(
205
+ _("Service status"),
206
+ "SERVICE_STATUS",
207
+ info.get("service_status", ""),
208
+ visible=bool(info.get("service")),
209
+ )
210
+
211
+ add_field(_("Hostname"), "HOSTNAME", info.get("hostname", ""))
212
+
213
+ ip_addresses: Iterable[str] = info.get("ip_addresses", []) # type: ignore[assignment]
214
+ add_field(_("IP addresses"), "IP_ADDRESSES", " ".join(ip_addresses))
215
+
216
+ add_field(
217
+ _("Databases"),
218
+ "DATABASES",
219
+ info.get("databases", []),
220
+ field_type="databases",
221
+ )
222
+
223
+ add_field(
224
+ _("Next version check"),
225
+ "NEXT-VER-CHECK",
226
+ info.get("auto_upgrade_next_check", ""),
227
+ )
228
+
229
+ return fields
230
+
231
+
232
+ def _export_field_value(field: SystemField) -> str:
233
+ """Serialize a ``SystemField`` value for sigil resolution."""
234
+
235
+ if field.field_type in {"features", "databases"}:
236
+ return json.dumps(field.value)
237
+ if field.field_type == "boolean":
238
+ return "True" if field.value else "False"
239
+ if field.value is None:
240
+ return ""
241
+ return str(field.value)
242
+
243
+
244
+ def get_system_sigil_values() -> dict[str, str]:
245
+ """Expose system information in a format suitable for sigil lookups."""
246
+
247
+ info = _gather_info()
248
+ values: dict[str, str] = {}
249
+ for field in _build_system_fields(info):
250
+ exported = _export_field_value(field)
251
+ raw_key = (field.sigil_key or "").strip()
252
+ if not raw_key:
253
+ continue
254
+ variants = {
255
+ raw_key.upper(),
256
+ raw_key.replace("-", "_").upper(),
257
+ }
258
+ for variant in variants:
259
+ values[variant] = exported
260
+ return values
261
+
262
+
23
263
  def _parse_runserver_port(command_line: str) -> int | None:
24
264
  """Extract the HTTP port from a runserver command line."""
25
265
 
@@ -219,6 +459,9 @@ def _gather_info() -> dict:
219
459
  info["hostname"] = hostname
220
460
  info["ip_addresses"] = ip_list
221
461
 
462
+ info["databases"] = _database_configurations()
463
+ info["auto_upgrade_next_check"] = _auto_upgrade_next_check()
464
+
222
465
  return info
223
466
 
224
467
 
@@ -226,7 +469,13 @@ def _system_view(request):
226
469
  info = _gather_info()
227
470
 
228
471
  context = admin.site.each_context(request)
229
- context.update({"title": _("System"), "info": info})
472
+ context.update(
473
+ {
474
+ "title": _("System"),
475
+ "info": info,
476
+ "system_fields": _build_system_fields(info),
477
+ }
478
+ )
230
479
  return TemplateResponse(request, "admin/system.html", context)
231
480
 
232
481
 
core/tasks.py CHANGED
@@ -17,7 +17,7 @@ from nodes.models import NetMessage
17
17
 
18
18
 
19
19
  AUTO_UPGRADE_HEALTH_DELAY_SECONDS = 30
20
- AUTO_UPGRADE_HEALTH_MAX_ATTEMPTS = 3
20
+ AUTO_UPGRADE_SKIP_LOCK_NAME = "auto_upgrade_skip_revisions.lck"
21
21
 
22
22
 
23
23
  logger = logging.getLogger(__name__)
@@ -66,6 +66,46 @@ def _append_auto_upgrade_log(base_dir: Path, message: str) -> None:
66
66
  logger.warning("Failed to append auto-upgrade log entry: %s", message)
67
67
 
68
68
 
69
+ def _skip_lock_path(base_dir: Path) -> Path:
70
+ return base_dir / "locks" / AUTO_UPGRADE_SKIP_LOCK_NAME
71
+
72
+
73
+ def _load_skipped_revisions(base_dir: Path) -> set[str]:
74
+ skip_file = _skip_lock_path(base_dir)
75
+ try:
76
+ return {
77
+ line.strip()
78
+ for line in skip_file.read_text().splitlines()
79
+ if line.strip()
80
+ }
81
+ except FileNotFoundError:
82
+ return set()
83
+ except OSError:
84
+ logger.warning("Failed to read auto-upgrade skip lockfile")
85
+ return set()
86
+
87
+
88
+ def _add_skipped_revision(base_dir: Path, revision: str) -> None:
89
+ if not revision:
90
+ return
91
+
92
+ skip_file = _skip_lock_path(base_dir)
93
+ try:
94
+ skip_file.parent.mkdir(parents=True, exist_ok=True)
95
+ existing = _load_skipped_revisions(base_dir)
96
+ if revision in existing:
97
+ return
98
+ with skip_file.open("a", encoding="utf-8") as fh:
99
+ fh.write(f"{revision}\n")
100
+ _append_auto_upgrade_log(
101
+ base_dir, f"Recorded blocked revision {revision} for auto-upgrade"
102
+ )
103
+ except OSError:
104
+ logger.warning(
105
+ "Failed to update auto-upgrade skip lockfile with revision %s", revision
106
+ )
107
+
108
+
69
109
  def _resolve_service_url(base_dir: Path) -> str:
70
110
  """Return the local URL used to probe the Django suite."""
71
111
 
@@ -110,6 +150,23 @@ def check_github_updates() -> None:
110
150
  except Exception:
111
151
  startup = None
112
152
 
153
+ remote_revision = (
154
+ subprocess.check_output(
155
+ ["git", "rev-parse", f"origin/{branch}"], cwd=base_dir
156
+ )
157
+ .decode()
158
+ .strip()
159
+ )
160
+
161
+ skipped_revisions = _load_skipped_revisions(base_dir)
162
+ if remote_revision in skipped_revisions:
163
+ _append_auto_upgrade_log(
164
+ base_dir, f"Skipping auto-upgrade for blocked revision {remote_revision}"
165
+ )
166
+ if startup:
167
+ startup()
168
+ return
169
+
113
170
  upgrade_stamp = timezone.now().strftime("@ %Y%m%d %H:%M")
114
171
 
115
172
  upgrade_was_applied = False
@@ -120,19 +177,7 @@ def check_github_updates() -> None:
120
177
  .decode()
121
178
  .strip()
122
179
  )
123
- remote = (
124
- subprocess.check_output(
125
- [
126
- "git",
127
- "rev-parse",
128
- f"origin/{branch}",
129
- ],
130
- cwd=base_dir,
131
- )
132
- .decode()
133
- .strip()
134
- )
135
- if local == remote:
180
+ if local == remote_revision:
136
181
  if startup:
137
182
  startup()
138
183
  return
@@ -254,12 +299,29 @@ def _schedule_health_check(next_attempt: int) -> None:
254
299
  )
255
300
 
256
301
 
302
+ def _handle_failed_health_check(base_dir: Path, detail: str) -> None:
303
+ revision = ""
304
+ try:
305
+ revision = (
306
+ subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=base_dir)
307
+ .decode()
308
+ .strip()
309
+ )
310
+ except Exception: # pragma: no cover - best effort capture
311
+ logger.warning("Failed to determine revision during auto-upgrade revert")
312
+
313
+ _add_skipped_revision(base_dir, revision)
314
+ _append_auto_upgrade_log(base_dir, "Health check failed; reverting upgrade")
315
+ subprocess.run(["./upgrade.sh", "--revert"], cwd=base_dir, check=True)
316
+
317
+
257
318
  @shared_task
258
319
  def verify_auto_upgrade_health(attempt: int = 1) -> bool | None:
259
320
  """Verify the upgraded suite responds successfully.
260
321
 
261
- When the check fails three times in a row the upgrade is rolled back by
262
- invoking ``upgrade.sh --revert``.
322
+ After the post-upgrade delay the site is probed once; any response other
323
+ than HTTP 200 triggers an automatic revert and records the failing
324
+ revision so future upgrade attempts skip it.
263
325
  """
264
326
 
265
327
  base_dir = Path(__file__).resolve().parent.parent
@@ -270,33 +332,29 @@ def verify_auto_upgrade_health(attempt: int = 1) -> bool | None:
270
332
  )
271
333
 
272
334
  status: int | None = None
335
+ detail = "succeeded"
273
336
  try:
274
337
  with urllib.request.urlopen(request, timeout=10) as response:
275
338
  status = getattr(response, "status", response.getcode())
276
339
  except urllib.error.HTTPError as exc:
277
340
  status = exc.code
341
+ detail = f"returned HTTP {exc.code}"
278
342
  logger.warning(
279
343
  "Auto-upgrade health check attempt %s returned HTTP %s", attempt, exc.code
280
344
  )
281
345
  except urllib.error.URLError as exc:
346
+ detail = f"failed with {exc}"
282
347
  logger.warning(
283
348
  "Auto-upgrade health check attempt %s failed: %s", attempt, exc
284
349
  )
285
350
  except Exception as exc: # pragma: no cover - unexpected network error
351
+ detail = f"failed with {exc}"
286
352
  logger.exception(
287
353
  "Unexpected error probing suite during auto-upgrade attempt %s", attempt
288
354
  )
289
- detail = f"failed with {exc}"
290
355
  _record_health_check_result(base_dir, attempt, status, detail)
291
- if attempt >= AUTO_UPGRADE_HEALTH_MAX_ATTEMPTS:
292
- _append_auto_upgrade_log(
293
- base_dir,
294
- "Health check raised unexpected error; reverting upgrade",
295
- )
296
- subprocess.run(["./upgrade.sh", "--revert"], cwd=base_dir, check=True)
297
- else:
298
- _schedule_health_check(attempt + 1)
299
- return None
356
+ _handle_failed_health_check(base_dir, detail)
357
+ return False
300
358
 
301
359
  if status == 200:
302
360
  _record_health_check_result(base_dir, attempt, status, "succeeded")
@@ -307,21 +365,15 @@ def verify_auto_upgrade_health(attempt: int = 1) -> bool | None:
307
365
  )
308
366
  return True
309
367
 
310
- _record_health_check_result(base_dir, attempt, status, "failed")
311
-
312
- if attempt >= AUTO_UPGRADE_HEALTH_MAX_ATTEMPTS:
313
- logger.error(
314
- "Auto-upgrade health check failed after %s attempts; reverting", attempt
315
- )
316
- _append_auto_upgrade_log(
317
- base_dir,
318
- "Health check failed three times; reverting upgrade",
319
- )
320
- subprocess.run(["./upgrade.sh", "--revert"], cwd=base_dir, check=True)
321
- return False
368
+ if detail == "succeeded":
369
+ if status is not None:
370
+ detail = f"returned HTTP {status}"
371
+ else:
372
+ detail = "failed with unknown status"
322
373
 
323
- _schedule_health_check(attempt + 1)
324
- return None
374
+ _record_health_check_result(base_dir, attempt, status, detail)
375
+ _handle_failed_health_check(base_dir, detail)
376
+ return False
325
377
 
326
378
 
327
379
  @shared_task