arthexis 0.1.9__py3-none-any.whl → 0.1.26__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arthexis might be problematic. Click here for more details.

Files changed (112) hide show
  1. arthexis-0.1.26.dist-info/METADATA +272 -0
  2. arthexis-0.1.26.dist-info/RECORD +111 -0
  3. {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/licenses/LICENSE +674 -674
  4. config/__init__.py +5 -5
  5. config/active_app.py +15 -15
  6. config/asgi.py +29 -29
  7. config/auth_app.py +7 -7
  8. config/celery.py +32 -25
  9. config/context_processors.py +67 -68
  10. config/horologia_app.py +7 -7
  11. config/loadenv.py +11 -11
  12. config/logging.py +59 -48
  13. config/middleware.py +71 -25
  14. config/offline.py +49 -49
  15. config/settings.py +676 -492
  16. config/settings_helpers.py +109 -0
  17. config/urls.py +228 -159
  18. config/wsgi.py +17 -17
  19. core/admin.py +4052 -2066
  20. core/admin_history.py +50 -50
  21. core/admindocs.py +192 -151
  22. core/apps.py +350 -223
  23. core/auto_upgrade.py +72 -0
  24. core/backends.py +311 -124
  25. core/changelog.py +403 -0
  26. core/entity.py +149 -133
  27. core/environment.py +60 -43
  28. core/fields.py +168 -75
  29. core/form_fields.py +75 -0
  30. core/github_helper.py +188 -25
  31. core/github_issues.py +183 -172
  32. core/github_repos.py +72 -0
  33. core/lcd_screen.py +78 -78
  34. core/liveupdate.py +25 -25
  35. core/log_paths.py +114 -100
  36. core/mailer.py +89 -83
  37. core/middleware.py +91 -91
  38. core/models.py +5041 -2195
  39. core/notifications.py +105 -105
  40. core/public_wifi.py +267 -227
  41. core/reference_utils.py +107 -0
  42. core/release.py +940 -346
  43. core/rfid_import_export.py +113 -0
  44. core/sigil_builder.py +149 -131
  45. core/sigil_context.py +20 -20
  46. core/sigil_resolver.py +250 -284
  47. core/system.py +1425 -230
  48. core/tasks.py +538 -199
  49. core/temp_passwords.py +181 -0
  50. core/test_system_info.py +202 -43
  51. core/tests.py +2673 -1069
  52. core/tests_liveupdate.py +17 -17
  53. core/urls.py +11 -11
  54. core/user_data.py +681 -495
  55. core/views.py +2484 -789
  56. core/widgets.py +213 -51
  57. nodes/admin.py +2236 -445
  58. nodes/apps.py +98 -70
  59. nodes/backends.py +160 -53
  60. nodes/dns.py +203 -0
  61. nodes/feature_checks.py +133 -0
  62. nodes/lcd.py +165 -165
  63. nodes/models.py +2375 -870
  64. nodes/reports.py +411 -0
  65. nodes/rfid_sync.py +210 -0
  66. nodes/signals.py +18 -0
  67. nodes/tasks.py +141 -46
  68. nodes/tests.py +5045 -1489
  69. nodes/urls.py +29 -13
  70. nodes/utils.py +172 -73
  71. nodes/views.py +1768 -304
  72. ocpp/admin.py +1775 -481
  73. ocpp/apps.py +25 -25
  74. ocpp/consumers.py +1843 -630
  75. ocpp/evcs.py +844 -928
  76. ocpp/evcs_discovery.py +158 -0
  77. ocpp/models.py +1417 -640
  78. ocpp/network.py +398 -0
  79. ocpp/reference_utils.py +42 -0
  80. ocpp/routing.py +11 -9
  81. ocpp/simulator.py +745 -368
  82. ocpp/status_display.py +26 -0
  83. ocpp/store.py +603 -403
  84. ocpp/tasks.py +479 -31
  85. ocpp/test_export_import.py +131 -130
  86. ocpp/test_rfid.py +1072 -540
  87. ocpp/tests.py +5494 -2296
  88. ocpp/transactions_io.py +197 -165
  89. ocpp/urls.py +50 -50
  90. ocpp/views.py +2024 -912
  91. pages/admin.py +1123 -396
  92. pages/apps.py +45 -10
  93. pages/checks.py +40 -40
  94. pages/context_processors.py +151 -85
  95. pages/defaults.py +13 -0
  96. pages/forms.py +221 -0
  97. pages/middleware.py +213 -153
  98. pages/models.py +720 -252
  99. pages/module_defaults.py +156 -0
  100. pages/site_config.py +137 -0
  101. pages/tasks.py +74 -0
  102. pages/tests.py +4009 -1389
  103. pages/urls.py +38 -20
  104. pages/utils.py +93 -12
  105. pages/views.py +1736 -762
  106. arthexis-0.1.9.dist-info/METADATA +0 -168
  107. arthexis-0.1.9.dist-info/RECORD +0 -92
  108. core/workgroup_urls.py +0 -17
  109. core/workgroup_views.py +0 -94
  110. nodes/actions.py +0 -70
  111. {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/WHEEL +0 -0
  112. {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/top_level.txt +0 -0
core/tasks.py CHANGED
@@ -1,199 +1,538 @@
1
- from __future__ import annotations
2
-
3
- import logging
4
- import subprocess
5
- from datetime import datetime
6
- from pathlib import Path
7
-
8
- from celery import shared_task
9
- from django.conf import settings
10
- from django.contrib.auth import get_user_model
11
- from core import mailer
12
- from core import github_issues
13
- from django.utils import timezone
14
-
15
- from nodes.models import NetMessage
16
-
17
-
18
- logger = logging.getLogger(__name__)
19
-
20
-
21
- @shared_task
22
- def heartbeat() -> None:
23
- """Log a simple heartbeat message."""
24
- logger.info("Heartbeat task executed")
25
-
26
-
27
- @shared_task
28
- def birthday_greetings() -> None:
29
- """Send birthday greetings to users via Net Message and email."""
30
- User = get_user_model()
31
- today = timezone.localdate()
32
- for user in User.objects.filter(birthday=today):
33
- NetMessage.broadcast("Happy bday!", user.username)
34
- if user.email:
35
- mailer.send(
36
- "Happy bday!",
37
- f"Happy bday! {user.username}",
38
- [user.email],
39
- settings.DEFAULT_FROM_EMAIL,
40
- fail_silently=True,
41
- )
42
-
43
-
44
- @shared_task
45
- def check_github_updates() -> None:
46
- """Check the GitHub repo for updates and upgrade if needed."""
47
- base_dir = Path(__file__).resolve().parent.parent
48
- mode_file = base_dir / "locks" / "auto_upgrade.lck"
49
- mode = "version"
50
- if mode_file.exists():
51
- mode = mode_file.read_text().strip()
52
-
53
- branch = "main"
54
- subprocess.run(["git", "fetch", "origin", branch], cwd=base_dir, check=True)
55
-
56
- log_dir = base_dir / "logs"
57
- log_dir.mkdir(parents=True, exist_ok=True)
58
- log_file = log_dir / "auto-upgrade.log"
59
- with log_file.open("a") as fh:
60
- fh.write(f"{datetime.utcnow().isoformat()} check_github_updates triggered\n")
61
-
62
- notify = None
63
- startup = None
64
- try: # pragma: no cover - optional dependency
65
- from core.notifications import notify # type: ignore
66
- except Exception:
67
- notify = None
68
- try: # pragma: no cover - optional dependency
69
- from nodes.apps import _startup_notification as startup # type: ignore
70
- except Exception:
71
- startup = None
72
-
73
- if mode == "latest":
74
- local = (
75
- subprocess.check_output(["git", "rev-parse", branch], cwd=base_dir)
76
- .decode()
77
- .strip()
78
- )
79
- remote = (
80
- subprocess.check_output(
81
- [
82
- "git",
83
- "rev-parse",
84
- f"origin/{branch}",
85
- ],
86
- cwd=base_dir,
87
- )
88
- .decode()
89
- .strip()
90
- )
91
- if local == remote:
92
- if startup:
93
- startup()
94
- return
95
- if notify:
96
- notify("Upgrading...", "")
97
- args = ["./upgrade.sh", "--latest", "--no-restart"]
98
- else:
99
- local = "0"
100
- version_file = base_dir / "VERSION"
101
- if version_file.exists():
102
- local = version_file.read_text().strip()
103
- remote = (
104
- subprocess.check_output(
105
- [
106
- "git",
107
- "show",
108
- f"origin/{branch}:VERSION",
109
- ],
110
- cwd=base_dir,
111
- )
112
- .decode()
113
- .strip()
114
- )
115
- if local == remote:
116
- if startup:
117
- startup()
118
- return
119
- if notify:
120
- notify("Upgrading...", "")
121
- args = ["./upgrade.sh", "--no-restart"]
122
-
123
- with log_file.open("a") as fh:
124
- fh.write(f"{datetime.utcnow().isoformat()} running: {' '.join(args)}\n")
125
-
126
- subprocess.run(args, cwd=base_dir, check=True)
127
-
128
- service_file = base_dir / "locks/service.lck"
129
- if service_file.exists():
130
- service = service_file.read_text().strip()
131
- subprocess.run(
132
- [
133
- "sudo",
134
- "systemctl",
135
- "kill",
136
- "--signal=TERM",
137
- service,
138
- ]
139
- )
140
- else:
141
- subprocess.run(["pkill", "-f", "manage.py runserver"])
142
-
143
-
144
- @shared_task
145
- def poll_email_collectors() -> None:
146
- """Poll all configured email collectors for new messages."""
147
- try:
148
- from .models import EmailCollector
149
- except Exception: # pragma: no cover - app not ready
150
- return
151
-
152
- for collector in EmailCollector.objects.all():
153
- collector.collect()
154
-
155
-
156
- @shared_task
157
- def report_runtime_issue(
158
- title: str,
159
- body: str,
160
- labels: list[str] | None = None,
161
- fingerprint: str | None = None,
162
- ):
163
- """Report a runtime issue to GitHub using :mod:`core.github_issues`."""
164
-
165
- try:
166
- response = github_issues.create_issue(
167
- title,
168
- body,
169
- labels=labels,
170
- fingerprint=fingerprint,
171
- )
172
- except Exception:
173
- logger.exception("Failed to report runtime issue '%s'", title)
174
- raise
175
-
176
- if response is None:
177
- logger.info("Skipped GitHub issue creation for fingerprint %s", fingerprint)
178
- else:
179
- logger.info("Reported runtime issue '%s' to GitHub", title)
180
-
181
- return response
182
-
183
-
184
- @shared_task
185
- def run_client_report_schedule(schedule_id: int) -> None:
186
- """Execute a :class:`core.models.ClientReportSchedule` run."""
187
-
188
- from core.models import ClientReportSchedule
189
-
190
- schedule = ClientReportSchedule.objects.filter(pk=schedule_id).first()
191
- if not schedule:
192
- logger.warning("ClientReportSchedule %s no longer exists", schedule_id)
193
- return
194
-
195
- try:
196
- schedule.run()
197
- except Exception:
198
- logger.exception("ClientReportSchedule %s failed", schedule_id)
199
- raise
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ import shutil
5
+ import re
6
+ import subprocess
7
+ from pathlib import Path
8
+ import urllib.error
9
+ import urllib.request
10
+
11
+ from celery import shared_task
12
+ from core import github_issues
13
+ from django.db import DatabaseError
14
+ from django.utils import timezone
15
+
16
+
17
+ AUTO_UPGRADE_HEALTH_DELAY_SECONDS = 30
18
+ AUTO_UPGRADE_SKIP_LOCK_NAME = "auto_upgrade_skip_revisions.lck"
19
+
20
+ SEVERITY_NORMAL = "normal"
21
+ SEVERITY_LOW = "low"
22
+ SEVERITY_CRITICAL = "critical"
23
+
24
+ _PackageReleaseModel = None
25
+
26
+
27
+ def _get_package_release_model():
28
+ """Return the :class:`core.models.PackageRelease` model when available."""
29
+
30
+ global _PackageReleaseModel
31
+
32
+ if _PackageReleaseModel is not None:
33
+ return _PackageReleaseModel
34
+
35
+ try:
36
+ from core.models import PackageRelease # noqa: WPS433 - runtime import
37
+ except Exception: # pragma: no cover - app registry not ready
38
+ return None
39
+
40
+ _PackageReleaseModel = PackageRelease
41
+ return PackageRelease
42
+
43
+
44
+ model = _get_package_release_model()
45
+ if model is not None: # pragma: no branch - runtime constant setup
46
+ SEVERITY_NORMAL = model.Severity.NORMAL
47
+ SEVERITY_LOW = model.Severity.LOW
48
+ SEVERITY_CRITICAL = model.Severity.CRITICAL
49
+
50
+
51
+ logger = logging.getLogger(__name__)
52
+
53
+
54
+ @shared_task
55
+ def heartbeat() -> None:
56
+ """Log a simple heartbeat message."""
57
+ logger.info("Heartbeat task executed")
58
+
59
+
60
+ def _auto_upgrade_log_path(base_dir: Path) -> Path:
61
+ """Return the log file used for auto-upgrade events."""
62
+
63
+ log_dir = base_dir / "logs"
64
+ log_dir.mkdir(parents=True, exist_ok=True)
65
+ return log_dir / "auto-upgrade.log"
66
+
67
+
68
+ def _append_auto_upgrade_log(base_dir: Path, message: str) -> None:
69
+ """Append ``message`` to the auto-upgrade log, ignoring errors."""
70
+
71
+ try:
72
+ log_file = _auto_upgrade_log_path(base_dir)
73
+ timestamp = timezone.now().isoformat()
74
+ with log_file.open("a") as fh:
75
+ fh.write(f"{timestamp} {message}\n")
76
+ except Exception: # pragma: no cover - best effort logging only
77
+ logger.warning("Failed to append auto-upgrade log entry: %s", message)
78
+
79
+
80
+ def _resolve_release_severity(version: str | None) -> str:
81
+ """Return the stored severity for *version*, defaulting to normal."""
82
+
83
+ if not version:
84
+ return SEVERITY_NORMAL
85
+
86
+ model = _get_package_release_model()
87
+ if model is None:
88
+ return SEVERITY_NORMAL
89
+
90
+ try:
91
+ queryset = model.objects.filter(version=version)
92
+ release = (
93
+ queryset.filter(package__is_active=True).first() or queryset.first()
94
+ )
95
+ except DatabaseError: # pragma: no cover - depends on DB availability
96
+ return SEVERITY_NORMAL
97
+
98
+ if not release:
99
+ return SEVERITY_NORMAL
100
+
101
+ severity = getattr(release, "severity", None)
102
+ if not severity:
103
+ return SEVERITY_NORMAL
104
+ return severity
105
+
106
+
107
+ def _read_local_version(base_dir: Path) -> str | None:
108
+ """Return the local VERSION file contents when readable."""
109
+
110
+ version_path = base_dir / "VERSION"
111
+ if not version_path.exists():
112
+ return None
113
+ try:
114
+ return version_path.read_text().strip()
115
+ except OSError: # pragma: no cover - filesystem error
116
+ return None
117
+
118
+
119
+ def _read_remote_version(base_dir: Path, branch: str) -> str | None:
120
+ """Return the VERSION file from ``origin/<branch>`` when available."""
121
+
122
+ try:
123
+ return (
124
+ subprocess.check_output(
125
+ [
126
+ "git",
127
+ "show",
128
+ f"origin/{branch}:VERSION",
129
+ ],
130
+ cwd=base_dir,
131
+ )
132
+ .decode()
133
+ .strip()
134
+ )
135
+ except subprocess.CalledProcessError: # pragma: no cover - git failure
136
+ return None
137
+
138
+
139
+ def _skip_lock_path(base_dir: Path) -> Path:
140
+ return base_dir / "locks" / AUTO_UPGRADE_SKIP_LOCK_NAME
141
+
142
+
143
+ def _load_skipped_revisions(base_dir: Path) -> set[str]:
144
+ skip_file = _skip_lock_path(base_dir)
145
+ try:
146
+ return {
147
+ line.strip()
148
+ for line in skip_file.read_text().splitlines()
149
+ if line.strip()
150
+ }
151
+ except FileNotFoundError:
152
+ return set()
153
+ except OSError:
154
+ logger.warning("Failed to read auto-upgrade skip lockfile")
155
+ return set()
156
+
157
+
158
+ def _add_skipped_revision(base_dir: Path, revision: str) -> None:
159
+ if not revision:
160
+ return
161
+
162
+ skip_file = _skip_lock_path(base_dir)
163
+ try:
164
+ skip_file.parent.mkdir(parents=True, exist_ok=True)
165
+ existing = _load_skipped_revisions(base_dir)
166
+ if revision in existing:
167
+ return
168
+ with skip_file.open("a", encoding="utf-8") as fh:
169
+ fh.write(f"{revision}\n")
170
+ _append_auto_upgrade_log(
171
+ base_dir, f"Recorded blocked revision {revision} for auto-upgrade"
172
+ )
173
+ except OSError:
174
+ logger.warning(
175
+ "Failed to update auto-upgrade skip lockfile with revision %s", revision
176
+ )
177
+
178
+
179
+ def _resolve_service_url(base_dir: Path) -> str:
180
+ """Return the local URL used to probe the Django suite."""
181
+
182
+ lock_dir = base_dir / "locks"
183
+ mode_file = lock_dir / "nginx_mode.lck"
184
+ mode = "internal"
185
+ if mode_file.exists():
186
+ try:
187
+ value = mode_file.read_text(encoding="utf-8").strip()
188
+ except OSError:
189
+ value = ""
190
+ if value:
191
+ mode = value.lower()
192
+ port = 8000 if mode == "public" else 8888
193
+ return f"http://127.0.0.1:{port}/"
194
+
195
+
196
+ def _parse_major_minor(version: str) -> tuple[int, int] | None:
197
+ match = re.match(r"^\s*(\d+)\.(\d+)", version)
198
+ if not match:
199
+ return None
200
+ return int(match.group(1)), int(match.group(2))
201
+
202
+
203
+ def _shares_stable_series(local: str, remote: str) -> bool:
204
+ local_parts = _parse_major_minor(local)
205
+ remote_parts = _parse_major_minor(remote)
206
+ if not local_parts or not remote_parts:
207
+ return False
208
+ return local_parts == remote_parts
209
+
210
+
211
+ @shared_task
212
+ def check_github_updates() -> None:
213
+ """Check the GitHub repo for updates and upgrade if needed."""
214
+ base_dir = Path(__file__).resolve().parent.parent
215
+ mode_file = base_dir / "locks" / "auto_upgrade.lck"
216
+ mode = "version"
217
+ if mode_file.exists():
218
+ try:
219
+ raw_mode = mode_file.read_text().strip()
220
+ except (OSError, UnicodeDecodeError):
221
+ logger.warning(
222
+ "Failed to read auto-upgrade mode lockfile", exc_info=True
223
+ )
224
+ else:
225
+ cleaned_mode = raw_mode.lower()
226
+ if cleaned_mode:
227
+ mode = cleaned_mode
228
+
229
+ branch = "main"
230
+ subprocess.run(["git", "fetch", "origin", branch], cwd=base_dir, check=True)
231
+
232
+ log_file = _auto_upgrade_log_path(base_dir)
233
+ with log_file.open("a") as fh:
234
+ fh.write(
235
+ f"{timezone.now().isoformat()} check_github_updates triggered\n"
236
+ )
237
+
238
+ notify = None
239
+ startup = None
240
+ try: # pragma: no cover - optional dependency
241
+ from core.notifications import notify # type: ignore
242
+ except Exception:
243
+ notify = None
244
+ try: # pragma: no cover - optional dependency
245
+ from nodes.apps import _startup_notification as startup # type: ignore
246
+ except Exception:
247
+ startup = None
248
+
249
+ remote_revision = (
250
+ subprocess.check_output(
251
+ ["git", "rev-parse", f"origin/{branch}"], cwd=base_dir
252
+ )
253
+ .decode()
254
+ .strip()
255
+ )
256
+
257
+ skipped_revisions = _load_skipped_revisions(base_dir)
258
+ if remote_revision in skipped_revisions:
259
+ _append_auto_upgrade_log(
260
+ base_dir, f"Skipping auto-upgrade for blocked revision {remote_revision}"
261
+ )
262
+ if startup:
263
+ startup()
264
+ return
265
+
266
+ remote_version = _read_remote_version(base_dir, branch)
267
+ local_version = _read_local_version(base_dir)
268
+ remote_severity = _resolve_release_severity(remote_version)
269
+
270
+ upgrade_stamp = timezone.now().strftime("@ %Y%m%d %H:%M")
271
+
272
+ upgrade_was_applied = False
273
+
274
+ if mode == "latest":
275
+ local_revision = (
276
+ subprocess.check_output(["git", "rev-parse", branch], cwd=base_dir)
277
+ .decode()
278
+ .strip()
279
+ )
280
+ if local_revision == remote_revision:
281
+ if startup:
282
+ startup()
283
+ return
284
+
285
+ if (
286
+ remote_version
287
+ and local_version
288
+ and remote_version != local_version
289
+ and remote_severity == SEVERITY_LOW
290
+ and _shares_stable_series(local_version, remote_version)
291
+ ):
292
+ _append_auto_upgrade_log(
293
+ base_dir,
294
+ f"Skipping auto-upgrade for low severity patch {remote_version}",
295
+ )
296
+ if startup:
297
+ startup()
298
+ return
299
+
300
+ if notify:
301
+ notify("Upgrading...", upgrade_stamp)
302
+ args = ["./upgrade.sh", "--latest", "--no-restart"]
303
+ upgrade_was_applied = True
304
+ else:
305
+ local_value = local_version or "0"
306
+ remote_value = remote_version or local_value
307
+
308
+ if local_value == remote_value:
309
+ if startup:
310
+ startup()
311
+ return
312
+
313
+ if (
314
+ mode == "stable"
315
+ and local_version
316
+ and remote_version
317
+ and remote_version != local_version
318
+ and _shares_stable_series(local_version, remote_version)
319
+ and remote_severity != SEVERITY_CRITICAL
320
+ ):
321
+ if startup:
322
+ startup()
323
+ return
324
+
325
+ if notify:
326
+ notify("Upgrading...", upgrade_stamp)
327
+ if mode == "stable":
328
+ args = ["./upgrade.sh", "--stable", "--no-restart"]
329
+ else:
330
+ args = ["./upgrade.sh", "--no-restart"]
331
+ upgrade_was_applied = True
332
+
333
+ with log_file.open("a") as fh:
334
+ fh.write(
335
+ f"{timezone.now().isoformat()} running: {' '.join(args)}\n"
336
+ )
337
+
338
+ subprocess.run(args, cwd=base_dir, check=True)
339
+
340
+ service_file = base_dir / "locks/service.lck"
341
+ if service_file.exists():
342
+ service = service_file.read_text().strip()
343
+ subprocess.run(
344
+ [
345
+ "sudo",
346
+ "systemctl",
347
+ "kill",
348
+ "--signal=TERM",
349
+ service,
350
+ ]
351
+ )
352
+ else:
353
+ subprocess.run(["pkill", "-f", "manage.py runserver"])
354
+
355
+ if upgrade_was_applied:
356
+ _append_auto_upgrade_log(
357
+ base_dir,
358
+ (
359
+ "Scheduled post-upgrade health check in %s seconds"
360
+ % AUTO_UPGRADE_HEALTH_DELAY_SECONDS
361
+ ),
362
+ )
363
+ _schedule_health_check(1)
364
+
365
+
366
+ @shared_task
367
+ def poll_email_collectors() -> None:
368
+ """Poll all configured email collectors for new messages."""
369
+ try:
370
+ from .models import EmailCollector
371
+ except Exception: # pragma: no cover - app not ready
372
+ return
373
+
374
+ for collector in EmailCollector.objects.all():
375
+ collector.collect()
376
+
377
+
378
+ @shared_task
379
+ def report_runtime_issue(
380
+ title: str,
381
+ body: str,
382
+ labels: list[str] | None = None,
383
+ fingerprint: str | None = None,
384
+ ):
385
+ """Report a runtime issue to GitHub using :mod:`core.github_issues`."""
386
+
387
+ try:
388
+ response = github_issues.create_issue(
389
+ title,
390
+ body,
391
+ labels=labels,
392
+ fingerprint=fingerprint,
393
+ )
394
+ except Exception:
395
+ logger.exception("Failed to report runtime issue '%s'", title)
396
+ raise
397
+
398
+ if response is None:
399
+ logger.info("Skipped GitHub issue creation for fingerprint %s", fingerprint)
400
+ else:
401
+ logger.info("Reported runtime issue '%s' to GitHub", title)
402
+
403
+ return response
404
+
405
+
406
+ def _record_health_check_result(
407
+ base_dir: Path, attempt: int, status: int | None, detail: str
408
+ ) -> None:
409
+ status_display = status if status is not None else "unreachable"
410
+ message = "Health check attempt %s %s (%s)" % (attempt, detail, status_display)
411
+ _append_auto_upgrade_log(base_dir, message)
412
+
413
+
414
+ def _schedule_health_check(next_attempt: int) -> None:
415
+ verify_auto_upgrade_health.apply_async(
416
+ kwargs={"attempt": next_attempt},
417
+ countdown=AUTO_UPGRADE_HEALTH_DELAY_SECONDS,
418
+ )
419
+
420
+
421
+ def _handle_failed_health_check(base_dir: Path, detail: str) -> None:
422
+ revision = ""
423
+ try:
424
+ revision = (
425
+ subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=base_dir)
426
+ .decode()
427
+ .strip()
428
+ )
429
+ except Exception: # pragma: no cover - best effort capture
430
+ logger.warning("Failed to determine revision during auto-upgrade revert")
431
+
432
+ _add_skipped_revision(base_dir, revision)
433
+ _append_auto_upgrade_log(base_dir, "Health check failed; reverting upgrade")
434
+ subprocess.run(["./upgrade.sh", "--revert"], cwd=base_dir, check=True)
435
+
436
+
437
+ @shared_task
438
+ def verify_auto_upgrade_health(attempt: int = 1) -> bool | None:
439
+ """Verify the upgraded suite responds successfully.
440
+
441
+ After the post-upgrade delay the site is probed once; any response other
442
+ than HTTP 200 triggers an automatic revert and records the failing
443
+ revision so future upgrade attempts skip it.
444
+ """
445
+
446
+ base_dir = Path(__file__).resolve().parent.parent
447
+ url = _resolve_service_url(base_dir)
448
+ request = urllib.request.Request(
449
+ url,
450
+ headers={"User-Agent": "Arthexis-AutoUpgrade/1.0"},
451
+ )
452
+
453
+ status: int | None = None
454
+ detail = "succeeded"
455
+ try:
456
+ with urllib.request.urlopen(request, timeout=10) as response:
457
+ status = getattr(response, "status", response.getcode())
458
+ except urllib.error.HTTPError as exc:
459
+ status = exc.code
460
+ detail = f"returned HTTP {exc.code}"
461
+ logger.warning(
462
+ "Auto-upgrade health check attempt %s returned HTTP %s", attempt, exc.code
463
+ )
464
+ except urllib.error.URLError as exc:
465
+ detail = f"failed with {exc}"
466
+ logger.warning(
467
+ "Auto-upgrade health check attempt %s failed: %s", attempt, exc
468
+ )
469
+ except Exception as exc: # pragma: no cover - unexpected network error
470
+ detail = f"failed with {exc}"
471
+ logger.exception(
472
+ "Unexpected error probing suite during auto-upgrade attempt %s", attempt
473
+ )
474
+ _record_health_check_result(base_dir, attempt, status, detail)
475
+ _handle_failed_health_check(base_dir, detail)
476
+ return False
477
+
478
+ if status == 200:
479
+ _record_health_check_result(base_dir, attempt, status, "succeeded")
480
+ logger.info(
481
+ "Auto-upgrade health check succeeded on attempt %s with HTTP %s",
482
+ attempt,
483
+ status,
484
+ )
485
+ return True
486
+
487
+ if detail == "succeeded":
488
+ if status is not None:
489
+ detail = f"returned HTTP {status}"
490
+ else:
491
+ detail = "failed with unknown status"
492
+
493
+ _record_health_check_result(base_dir, attempt, status, detail)
494
+ _handle_failed_health_check(base_dir, detail)
495
+ return False
496
+
497
+
498
+ @shared_task
499
+ def run_client_report_schedule(schedule_id: int) -> None:
500
+ """Execute a :class:`core.models.ClientReportSchedule` run."""
501
+
502
+ from core.models import ClientReportSchedule
503
+
504
+ schedule = ClientReportSchedule.objects.filter(pk=schedule_id).first()
505
+ if not schedule:
506
+ logger.warning("ClientReportSchedule %s no longer exists", schedule_id)
507
+ return
508
+
509
+ try:
510
+ schedule.run()
511
+ except Exception:
512
+ logger.exception("ClientReportSchedule %s failed", schedule_id)
513
+ raise
514
+
515
+
516
+ @shared_task
517
+ def ensure_recurring_client_reports() -> None:
518
+ """Ensure scheduled consumer reports run for the current period."""
519
+
520
+ from core.models import ClientReportSchedule
521
+
522
+ reference = timezone.localdate()
523
+ schedules = ClientReportSchedule.objects.filter(
524
+ periodicity__in=[
525
+ ClientReportSchedule.PERIODICITY_DAILY,
526
+ ClientReportSchedule.PERIODICITY_WEEKLY,
527
+ ClientReportSchedule.PERIODICITY_MONTHLY,
528
+ ]
529
+ ).prefetch_related("chargers")
530
+
531
+ for schedule in schedules:
532
+ try:
533
+ schedule.generate_missing_reports(reference=reference)
534
+ except Exception:
535
+ logger.exception(
536
+ "Automatic consumer report generation failed for schedule %s",
537
+ schedule.pk,
538
+ )