arthexis 0.1.13__py3-none-any.whl → 0.1.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (108) hide show
  1. {arthexis-0.1.13.dist-info → arthexis-0.1.15.dist-info}/METADATA +224 -221
  2. arthexis-0.1.15.dist-info/RECORD +110 -0
  3. {arthexis-0.1.13.dist-info → arthexis-0.1.15.dist-info}/licenses/LICENSE +674 -674
  4. config/__init__.py +5 -5
  5. config/active_app.py +15 -15
  6. config/asgi.py +43 -43
  7. config/auth_app.py +7 -7
  8. config/celery.py +32 -32
  9. config/context_processors.py +67 -69
  10. config/horologia_app.py +7 -7
  11. config/loadenv.py +11 -11
  12. config/logging.py +59 -48
  13. config/middleware.py +25 -25
  14. config/offline.py +49 -49
  15. config/settings.py +691 -682
  16. config/settings_helpers.py +109 -109
  17. config/urls.py +171 -166
  18. config/wsgi.py +17 -17
  19. core/admin.py +3795 -2809
  20. core/admin_history.py +50 -50
  21. core/admindocs.py +151 -151
  22. core/apps.py +356 -272
  23. core/auto_upgrade.py +57 -57
  24. core/backends.py +265 -236
  25. core/changelog.py +342 -0
  26. core/entity.py +149 -133
  27. core/environment.py +61 -61
  28. core/fields.py +168 -168
  29. core/form_fields.py +75 -75
  30. core/github_helper.py +188 -25
  31. core/github_issues.py +178 -172
  32. core/github_repos.py +72 -0
  33. core/lcd_screen.py +78 -78
  34. core/liveupdate.py +25 -25
  35. core/log_paths.py +114 -100
  36. core/mailer.py +85 -85
  37. core/middleware.py +91 -91
  38. core/models.py +3637 -2795
  39. core/notifications.py +105 -105
  40. core/public_wifi.py +267 -227
  41. core/reference_utils.py +108 -108
  42. core/release.py +840 -368
  43. core/rfid_import_export.py +113 -0
  44. core/sigil_builder.py +149 -149
  45. core/sigil_context.py +20 -20
  46. core/sigil_resolver.py +315 -315
  47. core/system.py +952 -493
  48. core/tasks.py +408 -394
  49. core/temp_passwords.py +181 -181
  50. core/test_system_info.py +186 -139
  51. core/tests.py +2168 -1521
  52. core/tests_liveupdate.py +17 -17
  53. core/urls.py +11 -11
  54. core/user_data.py +641 -633
  55. core/views.py +2201 -1417
  56. core/widgets.py +213 -94
  57. core/workgroup_urls.py +17 -17
  58. core/workgroup_views.py +94 -94
  59. nodes/admin.py +1720 -1161
  60. nodes/apps.py +87 -85
  61. nodes/backends.py +160 -160
  62. nodes/dns.py +203 -203
  63. nodes/feature_checks.py +133 -133
  64. nodes/lcd.py +165 -165
  65. nodes/models.py +1764 -1597
  66. nodes/reports.py +411 -411
  67. nodes/rfid_sync.py +195 -0
  68. nodes/signals.py +18 -0
  69. nodes/tasks.py +46 -46
  70. nodes/tests.py +3830 -3116
  71. nodes/urls.py +15 -14
  72. nodes/utils.py +121 -105
  73. nodes/views.py +683 -619
  74. ocpp/admin.py +948 -948
  75. ocpp/apps.py +25 -25
  76. ocpp/consumers.py +1565 -1459
  77. ocpp/evcs.py +844 -844
  78. ocpp/evcs_discovery.py +158 -158
  79. ocpp/models.py +917 -917
  80. ocpp/reference_utils.py +42 -42
  81. ocpp/routing.py +11 -11
  82. ocpp/simulator.py +745 -745
  83. ocpp/status_display.py +26 -26
  84. ocpp/store.py +601 -541
  85. ocpp/tasks.py +31 -31
  86. ocpp/test_export_import.py +130 -130
  87. ocpp/test_rfid.py +913 -702
  88. ocpp/tests.py +4445 -4094
  89. ocpp/transactions_io.py +189 -189
  90. ocpp/urls.py +50 -50
  91. ocpp/views.py +1479 -1251
  92. pages/admin.py +769 -539
  93. pages/apps.py +10 -10
  94. pages/checks.py +40 -40
  95. pages/context_processors.py +127 -119
  96. pages/defaults.py +13 -13
  97. pages/forms.py +198 -198
  98. pages/middleware.py +209 -153
  99. pages/models.py +643 -426
  100. pages/tasks.py +74 -0
  101. pages/tests.py +3025 -2200
  102. pages/urls.py +26 -25
  103. pages/utils.py +23 -12
  104. pages/views.py +1176 -1128
  105. arthexis-0.1.13.dist-info/RECORD +0 -105
  106. nodes/actions.py +0 -70
  107. {arthexis-0.1.13.dist-info → arthexis-0.1.15.dist-info}/WHEEL +0 -0
  108. {arthexis-0.1.13.dist-info → arthexis-0.1.15.dist-info}/top_level.txt +0 -0
core/tasks.py CHANGED
@@ -1,394 +1,408 @@
1
- from __future__ import annotations
2
-
3
- import logging
4
- import subprocess
5
- from pathlib import Path
6
- import urllib.error
7
- import urllib.request
8
-
9
- from celery import shared_task
10
- from django.conf import settings
11
- from django.contrib.auth import get_user_model
12
- from core import mailer
13
- from core import github_issues
14
- from django.utils import timezone
15
-
16
- from nodes.models import NetMessage
17
-
18
-
19
- AUTO_UPGRADE_HEALTH_DELAY_SECONDS = 30
20
- AUTO_UPGRADE_SKIP_LOCK_NAME = "auto_upgrade_skip_revisions.lck"
21
-
22
-
23
- logger = logging.getLogger(__name__)
24
-
25
-
26
- @shared_task
27
- def heartbeat() -> None:
28
- """Log a simple heartbeat message."""
29
- logger.info("Heartbeat task executed")
30
-
31
-
32
- @shared_task
33
- def birthday_greetings() -> None:
34
- """Send birthday greetings to users via Net Message and email."""
35
- User = get_user_model()
36
- today = timezone.localdate()
37
- for user in User.objects.filter(birthday=today):
38
- NetMessage.broadcast("Happy bday!", user.username)
39
- if user.email:
40
- mailer.send(
41
- "Happy bday!",
42
- f"Happy bday! {user.username}",
43
- [user.email],
44
- settings.DEFAULT_FROM_EMAIL,
45
- fail_silently=True,
46
- )
47
-
48
-
49
- def _auto_upgrade_log_path(base_dir: Path) -> Path:
50
- """Return the log file used for auto-upgrade events."""
51
-
52
- log_dir = base_dir / "logs"
53
- log_dir.mkdir(parents=True, exist_ok=True)
54
- return log_dir / "auto-upgrade.log"
55
-
56
-
57
- def _append_auto_upgrade_log(base_dir: Path, message: str) -> None:
58
- """Append ``message`` to the auto-upgrade log, ignoring errors."""
59
-
60
- try:
61
- log_file = _auto_upgrade_log_path(base_dir)
62
- timestamp = timezone.now().isoformat()
63
- with log_file.open("a") as fh:
64
- fh.write(f"{timestamp} {message}\n")
65
- except Exception: # pragma: no cover - best effort logging only
66
- logger.warning("Failed to append auto-upgrade log entry: %s", message)
67
-
68
-
69
- def _skip_lock_path(base_dir: Path) -> Path:
70
- return base_dir / "locks" / AUTO_UPGRADE_SKIP_LOCK_NAME
71
-
72
-
73
- def _load_skipped_revisions(base_dir: Path) -> set[str]:
74
- skip_file = _skip_lock_path(base_dir)
75
- try:
76
- return {
77
- line.strip()
78
- for line in skip_file.read_text().splitlines()
79
- if line.strip()
80
- }
81
- except FileNotFoundError:
82
- return set()
83
- except OSError:
84
- logger.warning("Failed to read auto-upgrade skip lockfile")
85
- return set()
86
-
87
-
88
- def _add_skipped_revision(base_dir: Path, revision: str) -> None:
89
- if not revision:
90
- return
91
-
92
- skip_file = _skip_lock_path(base_dir)
93
- try:
94
- skip_file.parent.mkdir(parents=True, exist_ok=True)
95
- existing = _load_skipped_revisions(base_dir)
96
- if revision in existing:
97
- return
98
- with skip_file.open("a", encoding="utf-8") as fh:
99
- fh.write(f"{revision}\n")
100
- _append_auto_upgrade_log(
101
- base_dir, f"Recorded blocked revision {revision} for auto-upgrade"
102
- )
103
- except OSError:
104
- logger.warning(
105
- "Failed to update auto-upgrade skip lockfile with revision %s", revision
106
- )
107
-
108
-
109
- def _resolve_service_url(base_dir: Path) -> str:
110
- """Return the local URL used to probe the Django suite."""
111
-
112
- lock_dir = base_dir / "locks"
113
- mode_file = lock_dir / "nginx_mode.lck"
114
- mode = "internal"
115
- if mode_file.exists():
116
- try:
117
- mode = mode_file.read_text().strip() or "internal"
118
- except OSError:
119
- mode = "internal"
120
- port = 8000 if mode == "public" else 8888
121
- return f"http://127.0.0.1:{port}/"
122
-
123
-
124
- @shared_task
125
- def check_github_updates() -> None:
126
- """Check the GitHub repo for updates and upgrade if needed."""
127
- base_dir = Path(__file__).resolve().parent.parent
128
- mode_file = base_dir / "locks" / "auto_upgrade.lck"
129
- mode = "version"
130
- if mode_file.exists():
131
- mode = mode_file.read_text().strip()
132
-
133
- branch = "main"
134
- subprocess.run(["git", "fetch", "origin", branch], cwd=base_dir, check=True)
135
-
136
- log_file = _auto_upgrade_log_path(base_dir)
137
- with log_file.open("a") as fh:
138
- fh.write(
139
- f"{timezone.now().isoformat()} check_github_updates triggered\n"
140
- )
141
-
142
- notify = None
143
- startup = None
144
- try: # pragma: no cover - optional dependency
145
- from core.notifications import notify # type: ignore
146
- except Exception:
147
- notify = None
148
- try: # pragma: no cover - optional dependency
149
- from nodes.apps import _startup_notification as startup # type: ignore
150
- except Exception:
151
- startup = None
152
-
153
- remote_revision = (
154
- subprocess.check_output(
155
- ["git", "rev-parse", f"origin/{branch}"], cwd=base_dir
156
- )
157
- .decode()
158
- .strip()
159
- )
160
-
161
- skipped_revisions = _load_skipped_revisions(base_dir)
162
- if remote_revision in skipped_revisions:
163
- _append_auto_upgrade_log(
164
- base_dir, f"Skipping auto-upgrade for blocked revision {remote_revision}"
165
- )
166
- if startup:
167
- startup()
168
- return
169
-
170
- upgrade_stamp = timezone.now().strftime("@ %Y%m%d %H:%M")
171
-
172
- upgrade_was_applied = False
173
-
174
- if mode == "latest":
175
- local = (
176
- subprocess.check_output(["git", "rev-parse", branch], cwd=base_dir)
177
- .decode()
178
- .strip()
179
- )
180
- if local == remote_revision:
181
- if startup:
182
- startup()
183
- return
184
- if notify:
185
- notify("Upgrading...", upgrade_stamp)
186
- args = ["./upgrade.sh", "--latest", "--no-restart"]
187
- upgrade_was_applied = True
188
- else:
189
- local = "0"
190
- version_file = base_dir / "VERSION"
191
- if version_file.exists():
192
- local = version_file.read_text().strip()
193
- remote = (
194
- subprocess.check_output(
195
- [
196
- "git",
197
- "show",
198
- f"origin/{branch}:VERSION",
199
- ],
200
- cwd=base_dir,
201
- )
202
- .decode()
203
- .strip()
204
- )
205
- if local == remote:
206
- if startup:
207
- startup()
208
- return
209
- if notify:
210
- notify("Upgrading...", upgrade_stamp)
211
- args = ["./upgrade.sh", "--no-restart"]
212
- upgrade_was_applied = True
213
-
214
- with log_file.open("a") as fh:
215
- fh.write(
216
- f"{timezone.now().isoformat()} running: {' '.join(args)}\n"
217
- )
218
-
219
- subprocess.run(args, cwd=base_dir, check=True)
220
-
221
- service_file = base_dir / "locks/service.lck"
222
- if service_file.exists():
223
- service = service_file.read_text().strip()
224
- subprocess.run(
225
- [
226
- "sudo",
227
- "systemctl",
228
- "kill",
229
- "--signal=TERM",
230
- service,
231
- ]
232
- )
233
- else:
234
- subprocess.run(["pkill", "-f", "manage.py runserver"])
235
-
236
- if upgrade_was_applied:
237
- _append_auto_upgrade_log(
238
- base_dir,
239
- (
240
- "Scheduled post-upgrade health check in %s seconds"
241
- % AUTO_UPGRADE_HEALTH_DELAY_SECONDS
242
- ),
243
- )
244
- _schedule_health_check(1)
245
-
246
-
247
- @shared_task
248
- def poll_email_collectors() -> None:
249
- """Poll all configured email collectors for new messages."""
250
- try:
251
- from .models import EmailCollector
252
- except Exception: # pragma: no cover - app not ready
253
- return
254
-
255
- for collector in EmailCollector.objects.all():
256
- collector.collect()
257
-
258
-
259
- @shared_task
260
- def report_runtime_issue(
261
- title: str,
262
- body: str,
263
- labels: list[str] | None = None,
264
- fingerprint: str | None = None,
265
- ):
266
- """Report a runtime issue to GitHub using :mod:`core.github_issues`."""
267
-
268
- try:
269
- response = github_issues.create_issue(
270
- title,
271
- body,
272
- labels=labels,
273
- fingerprint=fingerprint,
274
- )
275
- except Exception:
276
- logger.exception("Failed to report runtime issue '%s'", title)
277
- raise
278
-
279
- if response is None:
280
- logger.info("Skipped GitHub issue creation for fingerprint %s", fingerprint)
281
- else:
282
- logger.info("Reported runtime issue '%s' to GitHub", title)
283
-
284
- return response
285
-
286
-
287
- def _record_health_check_result(
288
- base_dir: Path, attempt: int, status: int | None, detail: str
289
- ) -> None:
290
- status_display = status if status is not None else "unreachable"
291
- message = "Health check attempt %s %s (%s)" % (attempt, detail, status_display)
292
- _append_auto_upgrade_log(base_dir, message)
293
-
294
-
295
- def _schedule_health_check(next_attempt: int) -> None:
296
- verify_auto_upgrade_health.apply_async(
297
- kwargs={"attempt": next_attempt},
298
- countdown=AUTO_UPGRADE_HEALTH_DELAY_SECONDS,
299
- )
300
-
301
-
302
- def _handle_failed_health_check(base_dir: Path, detail: str) -> None:
303
- revision = ""
304
- try:
305
- revision = (
306
- subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=base_dir)
307
- .decode()
308
- .strip()
309
- )
310
- except Exception: # pragma: no cover - best effort capture
311
- logger.warning("Failed to determine revision during auto-upgrade revert")
312
-
313
- _add_skipped_revision(base_dir, revision)
314
- _append_auto_upgrade_log(base_dir, "Health check failed; reverting upgrade")
315
- subprocess.run(["./upgrade.sh", "--revert"], cwd=base_dir, check=True)
316
-
317
-
318
- @shared_task
319
- def verify_auto_upgrade_health(attempt: int = 1) -> bool | None:
320
- """Verify the upgraded suite responds successfully.
321
-
322
- After the post-upgrade delay the site is probed once; any response other
323
- than HTTP 200 triggers an automatic revert and records the failing
324
- revision so future upgrade attempts skip it.
325
- """
326
-
327
- base_dir = Path(__file__).resolve().parent.parent
328
- url = _resolve_service_url(base_dir)
329
- request = urllib.request.Request(
330
- url,
331
- headers={"User-Agent": "Arthexis-AutoUpgrade/1.0"},
332
- )
333
-
334
- status: int | None = None
335
- detail = "succeeded"
336
- try:
337
- with urllib.request.urlopen(request, timeout=10) as response:
338
- status = getattr(response, "status", response.getcode())
339
- except urllib.error.HTTPError as exc:
340
- status = exc.code
341
- detail = f"returned HTTP {exc.code}"
342
- logger.warning(
343
- "Auto-upgrade health check attempt %s returned HTTP %s", attempt, exc.code
344
- )
345
- except urllib.error.URLError as exc:
346
- detail = f"failed with {exc}"
347
- logger.warning(
348
- "Auto-upgrade health check attempt %s failed: %s", attempt, exc
349
- )
350
- except Exception as exc: # pragma: no cover - unexpected network error
351
- detail = f"failed with {exc}"
352
- logger.exception(
353
- "Unexpected error probing suite during auto-upgrade attempt %s", attempt
354
- )
355
- _record_health_check_result(base_dir, attempt, status, detail)
356
- _handle_failed_health_check(base_dir, detail)
357
- return False
358
-
359
- if status == 200:
360
- _record_health_check_result(base_dir, attempt, status, "succeeded")
361
- logger.info(
362
- "Auto-upgrade health check succeeded on attempt %s with HTTP %s",
363
- attempt,
364
- status,
365
- )
366
- return True
367
-
368
- if detail == "succeeded":
369
- if status is not None:
370
- detail = f"returned HTTP {status}"
371
- else:
372
- detail = "failed with unknown status"
373
-
374
- _record_health_check_result(base_dir, attempt, status, detail)
375
- _handle_failed_health_check(base_dir, detail)
376
- return False
377
-
378
-
379
- @shared_task
380
- def run_client_report_schedule(schedule_id: int) -> None:
381
- """Execute a :class:`core.models.ClientReportSchedule` run."""
382
-
383
- from core.models import ClientReportSchedule
384
-
385
- schedule = ClientReportSchedule.objects.filter(pk=schedule_id).first()
386
- if not schedule:
387
- logger.warning("ClientReportSchedule %s no longer exists", schedule_id)
388
- return
389
-
390
- try:
391
- schedule.run()
392
- except Exception:
393
- logger.exception("ClientReportSchedule %s failed", schedule_id)
394
- raise
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ import shutil
5
+ import subprocess
6
+ from pathlib import Path
7
+ import urllib.error
8
+ import urllib.request
9
+
10
+ from celery import shared_task
11
+ from django.conf import settings
12
+ from django.contrib.auth import get_user_model
13
+ from core import mailer
14
+ from core import github_issues
15
+ from django.utils import timezone
16
+
17
+ from nodes.models import NetMessage
18
+
19
+
20
+ AUTO_UPGRADE_HEALTH_DELAY_SECONDS = 30
21
+ AUTO_UPGRADE_SKIP_LOCK_NAME = "auto_upgrade_skip_revisions.lck"
22
+
23
+
24
+ logger = logging.getLogger(__name__)
25
+
26
+
27
+ @shared_task
28
+ def heartbeat() -> None:
29
+ """Log a simple heartbeat message."""
30
+ logger.info("Heartbeat task executed")
31
+
32
+
33
+ @shared_task
34
+ def birthday_greetings() -> None:
35
+ """Send birthday greetings to users via Net Message and email."""
36
+ User = get_user_model()
37
+ today = timezone.localdate()
38
+ for user in User.objects.filter(birthday=today):
39
+ NetMessage.broadcast("Happy bday!", user.username)
40
+ if user.email:
41
+ mailer.send(
42
+ "Happy bday!",
43
+ f"Happy bday! {user.username}",
44
+ [user.email],
45
+ settings.DEFAULT_FROM_EMAIL,
46
+ fail_silently=True,
47
+ )
48
+
49
+
50
+ def _auto_upgrade_log_path(base_dir: Path) -> Path:
51
+ """Return the log file used for auto-upgrade events."""
52
+
53
+ log_dir = base_dir / "logs"
54
+ log_dir.mkdir(parents=True, exist_ok=True)
55
+ return log_dir / "auto-upgrade.log"
56
+
57
+
58
+ def _append_auto_upgrade_log(base_dir: Path, message: str) -> None:
59
+ """Append ``message`` to the auto-upgrade log, ignoring errors."""
60
+
61
+ try:
62
+ log_file = _auto_upgrade_log_path(base_dir)
63
+ timestamp = timezone.now().isoformat()
64
+ with log_file.open("a") as fh:
65
+ fh.write(f"{timestamp} {message}\n")
66
+ except Exception: # pragma: no cover - best effort logging only
67
+ logger.warning("Failed to append auto-upgrade log entry: %s", message)
68
+
69
+
70
+ def _skip_lock_path(base_dir: Path) -> Path:
71
+ return base_dir / "locks" / AUTO_UPGRADE_SKIP_LOCK_NAME
72
+
73
+
74
+ def _load_skipped_revisions(base_dir: Path) -> set[str]:
75
+ skip_file = _skip_lock_path(base_dir)
76
+ try:
77
+ return {
78
+ line.strip()
79
+ for line in skip_file.read_text().splitlines()
80
+ if line.strip()
81
+ }
82
+ except FileNotFoundError:
83
+ return set()
84
+ except OSError:
85
+ logger.warning("Failed to read auto-upgrade skip lockfile")
86
+ return set()
87
+
88
+
89
+ def _add_skipped_revision(base_dir: Path, revision: str) -> None:
90
+ if not revision:
91
+ return
92
+
93
+ skip_file = _skip_lock_path(base_dir)
94
+ try:
95
+ skip_file.parent.mkdir(parents=True, exist_ok=True)
96
+ existing = _load_skipped_revisions(base_dir)
97
+ if revision in existing:
98
+ return
99
+ with skip_file.open("a", encoding="utf-8") as fh:
100
+ fh.write(f"{revision}\n")
101
+ _append_auto_upgrade_log(
102
+ base_dir, f"Recorded blocked revision {revision} for auto-upgrade"
103
+ )
104
+ except OSError:
105
+ logger.warning(
106
+ "Failed to update auto-upgrade skip lockfile with revision %s", revision
107
+ )
108
+
109
+
110
+ def _resolve_service_url(base_dir: Path) -> str:
111
+ """Return the local URL used to probe the Django suite."""
112
+
113
+ lock_dir = base_dir / "locks"
114
+ mode_file = lock_dir / "nginx_mode.lck"
115
+ mode = "internal"
116
+ if mode_file.exists():
117
+ try:
118
+ value = mode_file.read_text(encoding="utf-8").strip()
119
+ except OSError:
120
+ value = ""
121
+ if value:
122
+ mode = value.lower()
123
+ port = 8000 if mode == "public" else 8888
124
+ return f"http://127.0.0.1:{port}/"
125
+
126
+
127
+ @shared_task
128
+ def check_github_updates() -> None:
129
+ """Check the GitHub repo for updates and upgrade if needed."""
130
+ base_dir = Path(__file__).resolve().parent.parent
131
+ mode_file = base_dir / "locks" / "auto_upgrade.lck"
132
+ mode = "version"
133
+ if mode_file.exists():
134
+ try:
135
+ mode = mode_file.read_text().strip() or "version"
136
+ except (OSError, UnicodeDecodeError):
137
+ logger.warning(
138
+ "Failed to read auto-upgrade mode lockfile", exc_info=True
139
+ )
140
+
141
+ branch = "main"
142
+ subprocess.run(["git", "fetch", "origin", branch], cwd=base_dir, check=True)
143
+
144
+ log_file = _auto_upgrade_log_path(base_dir)
145
+ with log_file.open("a") as fh:
146
+ fh.write(
147
+ f"{timezone.now().isoformat()} check_github_updates triggered\n"
148
+ )
149
+
150
+ notify = None
151
+ startup = None
152
+ try: # pragma: no cover - optional dependency
153
+ from core.notifications import notify # type: ignore
154
+ except Exception:
155
+ notify = None
156
+ try: # pragma: no cover - optional dependency
157
+ from nodes.apps import _startup_notification as startup # type: ignore
158
+ except Exception:
159
+ startup = None
160
+
161
+ remote_revision = (
162
+ subprocess.check_output(
163
+ ["git", "rev-parse", f"origin/{branch}"], cwd=base_dir
164
+ )
165
+ .decode()
166
+ .strip()
167
+ )
168
+
169
+ skipped_revisions = _load_skipped_revisions(base_dir)
170
+ if remote_revision in skipped_revisions:
171
+ _append_auto_upgrade_log(
172
+ base_dir, f"Skipping auto-upgrade for blocked revision {remote_revision}"
173
+ )
174
+ if startup:
175
+ startup()
176
+ return
177
+
178
+ upgrade_stamp = timezone.now().strftime("@ %Y%m%d %H:%M")
179
+
180
+ upgrade_was_applied = False
181
+
182
+ if mode == "latest":
183
+ local = (
184
+ subprocess.check_output(["git", "rev-parse", branch], cwd=base_dir)
185
+ .decode()
186
+ .strip()
187
+ )
188
+ if local == remote_revision:
189
+ if startup:
190
+ startup()
191
+ return
192
+ if notify:
193
+ notify("Upgrading...", upgrade_stamp)
194
+ args = ["./upgrade.sh", "--latest", "--no-restart"]
195
+ upgrade_was_applied = True
196
+ else:
197
+ local = "0"
198
+ version_file = base_dir / "VERSION"
199
+ if version_file.exists():
200
+ local = version_file.read_text().strip()
201
+ remote = (
202
+ subprocess.check_output(
203
+ [
204
+ "git",
205
+ "show",
206
+ f"origin/{branch}:VERSION",
207
+ ],
208
+ cwd=base_dir,
209
+ )
210
+ .decode()
211
+ .strip()
212
+ )
213
+ if local == remote:
214
+ if startup:
215
+ startup()
216
+ return
217
+ if notify:
218
+ notify("Upgrading...", upgrade_stamp)
219
+ args = ["./upgrade.sh", "--no-restart"]
220
+ upgrade_was_applied = True
221
+
222
+ with log_file.open("a") as fh:
223
+ fh.write(
224
+ f"{timezone.now().isoformat()} running: {' '.join(args)}\n"
225
+ )
226
+
227
+ subprocess.run(args, cwd=base_dir, check=True)
228
+
229
+ if shutil.which("gway"):
230
+ try:
231
+ subprocess.run(["gway", "upgrade"], check=True)
232
+ except subprocess.CalledProcessError:
233
+ logger.warning("gway upgrade failed; continuing anyway", exc_info=True)
234
+
235
+ service_file = base_dir / "locks/service.lck"
236
+ if service_file.exists():
237
+ service = service_file.read_text().strip()
238
+ subprocess.run(
239
+ [
240
+ "sudo",
241
+ "systemctl",
242
+ "kill",
243
+ "--signal=TERM",
244
+ service,
245
+ ]
246
+ )
247
+ else:
248
+ subprocess.run(["pkill", "-f", "manage.py runserver"])
249
+
250
+ if upgrade_was_applied:
251
+ _append_auto_upgrade_log(
252
+ base_dir,
253
+ (
254
+ "Scheduled post-upgrade health check in %s seconds"
255
+ % AUTO_UPGRADE_HEALTH_DELAY_SECONDS
256
+ ),
257
+ )
258
+ _schedule_health_check(1)
259
+
260
+
261
+ @shared_task
262
+ def poll_email_collectors() -> None:
263
+ """Poll all configured email collectors for new messages."""
264
+ try:
265
+ from .models import EmailCollector
266
+ except Exception: # pragma: no cover - app not ready
267
+ return
268
+
269
+ for collector in EmailCollector.objects.all():
270
+ collector.collect()
271
+
272
+
273
+ @shared_task
274
+ def report_runtime_issue(
275
+ title: str,
276
+ body: str,
277
+ labels: list[str] | None = None,
278
+ fingerprint: str | None = None,
279
+ ):
280
+ """Report a runtime issue to GitHub using :mod:`core.github_issues`."""
281
+
282
+ try:
283
+ response = github_issues.create_issue(
284
+ title,
285
+ body,
286
+ labels=labels,
287
+ fingerprint=fingerprint,
288
+ )
289
+ except Exception:
290
+ logger.exception("Failed to report runtime issue '%s'", title)
291
+ raise
292
+
293
+ if response is None:
294
+ logger.info("Skipped GitHub issue creation for fingerprint %s", fingerprint)
295
+ else:
296
+ logger.info("Reported runtime issue '%s' to GitHub", title)
297
+
298
+ return response
299
+
300
+
301
+ def _record_health_check_result(
302
+ base_dir: Path, attempt: int, status: int | None, detail: str
303
+ ) -> None:
304
+ status_display = status if status is not None else "unreachable"
305
+ message = "Health check attempt %s %s (%s)" % (attempt, detail, status_display)
306
+ _append_auto_upgrade_log(base_dir, message)
307
+
308
+
309
+ def _schedule_health_check(next_attempt: int) -> None:
310
+ verify_auto_upgrade_health.apply_async(
311
+ kwargs={"attempt": next_attempt},
312
+ countdown=AUTO_UPGRADE_HEALTH_DELAY_SECONDS,
313
+ )
314
+
315
+
316
+ def _handle_failed_health_check(base_dir: Path, detail: str) -> None:
317
+ revision = ""
318
+ try:
319
+ revision = (
320
+ subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=base_dir)
321
+ .decode()
322
+ .strip()
323
+ )
324
+ except Exception: # pragma: no cover - best effort capture
325
+ logger.warning("Failed to determine revision during auto-upgrade revert")
326
+
327
+ _add_skipped_revision(base_dir, revision)
328
+ _append_auto_upgrade_log(base_dir, "Health check failed; reverting upgrade")
329
+ subprocess.run(["./upgrade.sh", "--revert"], cwd=base_dir, check=True)
330
+
331
+
332
+ @shared_task
333
+ def verify_auto_upgrade_health(attempt: int = 1) -> bool | None:
334
+ """Verify the upgraded suite responds successfully.
335
+
336
+ After the post-upgrade delay the site is probed once; any response other
337
+ than HTTP 200 triggers an automatic revert and records the failing
338
+ revision so future upgrade attempts skip it.
339
+ """
340
+
341
+ base_dir = Path(__file__).resolve().parent.parent
342
+ url = _resolve_service_url(base_dir)
343
+ request = urllib.request.Request(
344
+ url,
345
+ headers={"User-Agent": "Arthexis-AutoUpgrade/1.0"},
346
+ )
347
+
348
+ status: int | None = None
349
+ detail = "succeeded"
350
+ try:
351
+ with urllib.request.urlopen(request, timeout=10) as response:
352
+ status = getattr(response, "status", response.getcode())
353
+ except urllib.error.HTTPError as exc:
354
+ status = exc.code
355
+ detail = f"returned HTTP {exc.code}"
356
+ logger.warning(
357
+ "Auto-upgrade health check attempt %s returned HTTP %s", attempt, exc.code
358
+ )
359
+ except urllib.error.URLError as exc:
360
+ detail = f"failed with {exc}"
361
+ logger.warning(
362
+ "Auto-upgrade health check attempt %s failed: %s", attempt, exc
363
+ )
364
+ except Exception as exc: # pragma: no cover - unexpected network error
365
+ detail = f"failed with {exc}"
366
+ logger.exception(
367
+ "Unexpected error probing suite during auto-upgrade attempt %s", attempt
368
+ )
369
+ _record_health_check_result(base_dir, attempt, status, detail)
370
+ _handle_failed_health_check(base_dir, detail)
371
+ return False
372
+
373
+ if status == 200:
374
+ _record_health_check_result(base_dir, attempt, status, "succeeded")
375
+ logger.info(
376
+ "Auto-upgrade health check succeeded on attempt %s with HTTP %s",
377
+ attempt,
378
+ status,
379
+ )
380
+ return True
381
+
382
+ if detail == "succeeded":
383
+ if status is not None:
384
+ detail = f"returned HTTP {status}"
385
+ else:
386
+ detail = "failed with unknown status"
387
+
388
+ _record_health_check_result(base_dir, attempt, status, detail)
389
+ _handle_failed_health_check(base_dir, detail)
390
+ return False
391
+
392
+
393
+ @shared_task
394
+ def run_client_report_schedule(schedule_id: int) -> None:
395
+ """Execute a :class:`core.models.ClientReportSchedule` run."""
396
+
397
+ from core.models import ClientReportSchedule
398
+
399
+ schedule = ClientReportSchedule.objects.filter(pk=schedule_id).first()
400
+ if not schedule:
401
+ logger.warning("ClientReportSchedule %s no longer exists", schedule_id)
402
+ return
403
+
404
+ try:
405
+ schedule.run()
406
+ except Exception:
407
+ logger.exception("ClientReportSchedule %s failed", schedule_id)
408
+ raise