arthexis 0.1.16__py3-none-any.whl → 0.1.28__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arthexis might be problematic. Click here for more details.

Files changed (67) hide show
  1. {arthexis-0.1.16.dist-info → arthexis-0.1.28.dist-info}/METADATA +95 -41
  2. arthexis-0.1.28.dist-info/RECORD +112 -0
  3. config/asgi.py +1 -15
  4. config/middleware.py +47 -1
  5. config/settings.py +21 -30
  6. config/settings_helpers.py +176 -1
  7. config/urls.py +69 -1
  8. core/admin.py +805 -473
  9. core/apps.py +6 -8
  10. core/auto_upgrade.py +19 -4
  11. core/backends.py +13 -3
  12. core/celery_utils.py +73 -0
  13. core/changelog.py +66 -5
  14. core/environment.py +4 -5
  15. core/models.py +1825 -218
  16. core/notifications.py +1 -1
  17. core/reference_utils.py +10 -11
  18. core/release.py +55 -7
  19. core/sigil_builder.py +2 -2
  20. core/sigil_resolver.py +1 -66
  21. core/system.py +285 -4
  22. core/tasks.py +439 -138
  23. core/test_system_info.py +43 -5
  24. core/tests.py +516 -18
  25. core/user_data.py +94 -21
  26. core/views.py +348 -186
  27. nodes/admin.py +904 -67
  28. nodes/apps.py +12 -1
  29. nodes/feature_checks.py +30 -0
  30. nodes/models.py +800 -127
  31. nodes/rfid_sync.py +1 -1
  32. nodes/tasks.py +98 -3
  33. nodes/tests.py +1381 -152
  34. nodes/urls.py +15 -1
  35. nodes/utils.py +51 -3
  36. nodes/views.py +1382 -152
  37. ocpp/admin.py +1970 -152
  38. ocpp/consumers.py +839 -34
  39. ocpp/models.py +968 -17
  40. ocpp/network.py +398 -0
  41. ocpp/store.py +411 -43
  42. ocpp/tasks.py +261 -3
  43. ocpp/test_export_import.py +1 -0
  44. ocpp/test_rfid.py +194 -6
  45. ocpp/tests.py +1918 -87
  46. ocpp/transactions_io.py +9 -1
  47. ocpp/urls.py +8 -3
  48. ocpp/views.py +700 -53
  49. pages/admin.py +262 -30
  50. pages/apps.py +35 -0
  51. pages/context_processors.py +28 -21
  52. pages/defaults.py +1 -1
  53. pages/forms.py +31 -8
  54. pages/middleware.py +6 -2
  55. pages/models.py +86 -2
  56. pages/module_defaults.py +5 -5
  57. pages/site_config.py +137 -0
  58. pages/tests.py +1050 -126
  59. pages/urls.py +14 -2
  60. pages/utils.py +70 -0
  61. pages/views.py +622 -56
  62. arthexis-0.1.16.dist-info/RECORD +0 -111
  63. core/workgroup_urls.py +0 -17
  64. core/workgroup_views.py +0 -94
  65. {arthexis-0.1.16.dist-info → arthexis-0.1.28.dist-info}/WHEEL +0 -0
  66. {arthexis-0.1.16.dist-info → arthexis-0.1.28.dist-info}/licenses/LICENSE +0 -0
  67. {arthexis-0.1.16.dist-info → arthexis-0.1.28.dist-info}/top_level.txt +0 -0
core/tasks.py CHANGED
@@ -2,23 +2,64 @@ from __future__ import annotations
2
2
 
3
3
  import logging
4
4
  import shutil
5
+ import re
5
6
  import subprocess
6
7
  from pathlib import Path
7
8
  import urllib.error
8
9
  import urllib.request
9
10
 
10
11
  from celery import shared_task
11
- from django.conf import settings
12
- from django.contrib.auth import get_user_model
13
- from core import mailer
14
12
  from core import github_issues
13
+ from django.db import DatabaseError
15
14
  from django.utils import timezone
16
15
 
17
- from nodes.models import NetMessage
18
-
19
16
 
20
17
  AUTO_UPGRADE_HEALTH_DELAY_SECONDS = 30
21
18
  AUTO_UPGRADE_SKIP_LOCK_NAME = "auto_upgrade_skip_revisions.lck"
19
+ AUTO_UPGRADE_NETWORK_FAILURE_LOCK_NAME = "auto_upgrade_network_failures.lck"
20
+ AUTO_UPGRADE_NETWORK_FAILURE_THRESHOLD = 3
21
+
22
+ _NETWORK_FAILURE_PATTERNS = (
23
+ "could not resolve host",
24
+ "couldn't resolve host",
25
+ "failed to connect",
26
+ "connection timed out",
27
+ "network is unreachable",
28
+ "temporary failure in name resolution",
29
+ "name or service not known",
30
+ "could not resolve proxy",
31
+ "no route to host",
32
+ )
33
+
34
+ SEVERITY_NORMAL = "normal"
35
+ SEVERITY_LOW = "low"
36
+ SEVERITY_CRITICAL = "critical"
37
+
38
+ _PackageReleaseModel = None
39
+
40
+
41
+ def _get_package_release_model():
42
+ """Return the :class:`core.models.PackageRelease` model when available."""
43
+
44
+ global _PackageReleaseModel
45
+
46
+ if _PackageReleaseModel is not None:
47
+ return _PackageReleaseModel
48
+
49
+ try:
50
+ from core.models import PackageRelease # noqa: WPS433 - runtime import
51
+ except Exception: # pragma: no cover - app registry not ready
52
+ return None
53
+
54
+ _PackageReleaseModel = PackageRelease
55
+ return PackageRelease
56
+
57
+
58
+ model = _get_package_release_model()
59
+ if model is not None: # pragma: no branch - runtime constant setup
60
+ SEVERITY_NORMAL = model.Severity.NORMAL
61
+ SEVERITY_LOW = model.Severity.LOW
62
+ SEVERITY_CRITICAL = model.Severity.CRITICAL
22
63
 
23
64
 
24
65
  logger = logging.getLogger(__name__)
@@ -30,23 +71,6 @@ def heartbeat() -> None:
30
71
  logger.info("Heartbeat task executed")
31
72
 
32
73
 
33
- @shared_task
34
- def birthday_greetings() -> None:
35
- """Send birthday greetings to users via Net Message and email."""
36
- User = get_user_model()
37
- today = timezone.localdate()
38
- for user in User.objects.filter(birthday=today):
39
- NetMessage.broadcast("Happy bday!", user.username)
40
- if user.email:
41
- mailer.send(
42
- "Happy bday!",
43
- f"Happy bday! {user.username}",
44
- [user.email],
45
- settings.DEFAULT_FROM_EMAIL,
46
- fail_silently=True,
47
- )
48
-
49
-
50
74
  def _auto_upgrade_log_path(base_dir: Path) -> Path:
51
75
  """Return the log file used for auto-upgrade events."""
52
76
 
@@ -67,6 +91,66 @@ def _append_auto_upgrade_log(base_dir: Path, message: str) -> None:
67
91
  logger.warning("Failed to append auto-upgrade log entry: %s", message)
68
92
 
69
93
 
94
+ def _resolve_release_severity(version: str | None) -> str:
95
+ """Return the stored severity for *version*, defaulting to normal."""
96
+
97
+ if not version:
98
+ return SEVERITY_NORMAL
99
+
100
+ model = _get_package_release_model()
101
+ if model is None:
102
+ return SEVERITY_NORMAL
103
+
104
+ try:
105
+ queryset = model.objects.filter(version=version)
106
+ release = (
107
+ queryset.filter(package__is_active=True).first() or queryset.first()
108
+ )
109
+ except DatabaseError: # pragma: no cover - depends on DB availability
110
+ return SEVERITY_NORMAL
111
+
112
+ if not release:
113
+ return SEVERITY_NORMAL
114
+
115
+ severity = getattr(release, "severity", None)
116
+ if not severity:
117
+ return SEVERITY_NORMAL
118
+ return severity
119
+
120
+
121
+ def _read_local_version(base_dir: Path) -> str | None:
122
+ """Return the local VERSION file contents when readable."""
123
+
124
+ version_path = base_dir / "VERSION"
125
+ if not version_path.exists():
126
+ return None
127
+ try:
128
+ return version_path.read_text().strip()
129
+ except OSError: # pragma: no cover - filesystem error
130
+ return None
131
+
132
+
133
+ def _read_remote_version(base_dir: Path, branch: str) -> str | None:
134
+ """Return the VERSION file from ``origin/<branch>`` when available."""
135
+
136
+ try:
137
+ return (
138
+ subprocess.check_output(
139
+ [
140
+ "git",
141
+ "show",
142
+ f"origin/{branch}:VERSION",
143
+ ],
144
+ cwd=base_dir,
145
+ stderr=subprocess.STDOUT,
146
+ text=True,
147
+ )
148
+ .strip()
149
+ )
150
+ except (subprocess.CalledProcessError, FileNotFoundError): # pragma: no cover - git failure
151
+ return None
152
+
153
+
70
154
  def _skip_lock_path(base_dir: Path) -> Path:
71
155
  return base_dir / "locks" / AUTO_UPGRADE_SKIP_LOCK_NAME
72
156
 
@@ -107,6 +191,141 @@ def _add_skipped_revision(base_dir: Path, revision: str) -> None:
107
191
  )
108
192
 
109
193
 
194
+ def _network_failure_lock_path(base_dir: Path) -> Path:
195
+ return base_dir / "locks" / AUTO_UPGRADE_NETWORK_FAILURE_LOCK_NAME
196
+
197
+
198
+ def _read_network_failure_count(base_dir: Path) -> int:
199
+ lock_path = _network_failure_lock_path(base_dir)
200
+ try:
201
+ raw_value = lock_path.read_text(encoding="utf-8").strip()
202
+ except FileNotFoundError:
203
+ return 0
204
+ except OSError:
205
+ logger.warning("Failed to read auto-upgrade network failure lockfile")
206
+ return 0
207
+ if not raw_value:
208
+ return 0
209
+ try:
210
+ return int(raw_value)
211
+ except ValueError:
212
+ logger.warning(
213
+ "Invalid auto-upgrade network failure lockfile contents: %s", raw_value
214
+ )
215
+ return 0
216
+
217
+
218
+ def _write_network_failure_count(base_dir: Path, count: int) -> None:
219
+ lock_path = _network_failure_lock_path(base_dir)
220
+ try:
221
+ lock_path.parent.mkdir(parents=True, exist_ok=True)
222
+ lock_path.write_text(str(count), encoding="utf-8")
223
+ except OSError:
224
+ logger.warning("Failed to update auto-upgrade network failure lockfile")
225
+
226
+
227
+ def _reset_network_failure_count(base_dir: Path) -> None:
228
+ lock_path = _network_failure_lock_path(base_dir)
229
+ try:
230
+ if lock_path.exists():
231
+ lock_path.unlink()
232
+ except OSError:
233
+ logger.warning("Failed to remove auto-upgrade network failure lockfile")
234
+
235
+
236
+ def _extract_error_output(exc: subprocess.CalledProcessError) -> str:
237
+ parts: list[str] = []
238
+ for attr in ("stderr", "stdout", "output"):
239
+ value = getattr(exc, attr, None)
240
+ if not value:
241
+ continue
242
+ if isinstance(value, bytes):
243
+ try:
244
+ value = value.decode()
245
+ except Exception: # pragma: no cover - best effort decoding
246
+ value = value.decode(errors="ignore")
247
+ parts.append(str(value))
248
+ detail = " ".join(part.strip() for part in parts if part)
249
+ if not detail:
250
+ detail = str(exc)
251
+ return detail
252
+
253
+
254
+ def _is_network_failure(exc: subprocess.CalledProcessError) -> bool:
255
+ command = exc.cmd
256
+ if isinstance(command, (list, tuple)):
257
+ if not command:
258
+ return False
259
+ first = str(command[0])
260
+ else:
261
+ command_str = str(command)
262
+ first = command_str.split()[0] if command_str else ""
263
+ if "git" not in first:
264
+ return False
265
+ detail = _extract_error_output(exc).lower()
266
+ return any(pattern in detail for pattern in _NETWORK_FAILURE_PATTERNS)
267
+
268
+
269
+ def _record_network_failure(base_dir: Path, detail: str) -> int:
270
+ count = _read_network_failure_count(base_dir) + 1
271
+ _write_network_failure_count(base_dir, count)
272
+ _append_auto_upgrade_log(
273
+ base_dir,
274
+ f"Auto-upgrade network failure {count}: {detail}",
275
+ )
276
+ return count
277
+
278
+
279
+ def _charge_point_active(base_dir: Path) -> bool:
280
+ lock_path = base_dir / "locks" / "charging.lck"
281
+ if lock_path.exists():
282
+ return True
283
+ try:
284
+ from ocpp import store # type: ignore
285
+ except Exception:
286
+ return False
287
+ try:
288
+ connections = getattr(store, "connections", {})
289
+ except Exception: # pragma: no cover - defensive
290
+ return False
291
+ return bool(connections)
292
+
293
+
294
+ def _trigger_auto_upgrade_reboot(base_dir: Path) -> None:
295
+ try:
296
+ subprocess.run(["sudo", "systemctl", "reboot"], check=False)
297
+ except Exception: # pragma: no cover - best effort reboot command
298
+ logger.exception(
299
+ "Failed to trigger reboot after repeated auto-upgrade network failures"
300
+ )
301
+
302
+
303
+ def _reboot_if_no_charge_point(base_dir: Path) -> None:
304
+ if _charge_point_active(base_dir):
305
+ _append_auto_upgrade_log(
306
+ base_dir,
307
+ "Skipping reboot after repeated auto-upgrade network failures; a charge point is active",
308
+ )
309
+ return
310
+ _append_auto_upgrade_log(
311
+ base_dir,
312
+ "Rebooting due to repeated auto-upgrade network failures",
313
+ )
314
+ _trigger_auto_upgrade_reboot(base_dir)
315
+
316
+
317
+ def _handle_network_failure_if_applicable(
318
+ base_dir: Path, exc: subprocess.CalledProcessError
319
+ ) -> bool:
320
+ if not _is_network_failure(exc):
321
+ return False
322
+ detail = _extract_error_output(exc)
323
+ failure_count = _record_network_failure(base_dir, detail)
324
+ if failure_count >= AUTO_UPGRADE_NETWORK_FAILURE_THRESHOLD:
325
+ _reboot_if_no_charge_point(base_dir)
326
+ return True
327
+
328
+
110
329
  def _resolve_service_url(base_dir: Path) -> str:
111
330
  """Return the local URL used to probe the Django suite."""
112
331
 
@@ -120,146 +339,203 @@ def _resolve_service_url(base_dir: Path) -> str:
120
339
  value = ""
121
340
  if value:
122
341
  mode = value.lower()
123
- port = 8000 if mode == "public" else 8888
342
+ port = 8888
124
343
  return f"http://127.0.0.1:{port}/"
125
344
 
126
345
 
346
+ def _parse_major_minor(version: str) -> tuple[int, int] | None:
347
+ match = re.match(r"^\s*(\d+)\.(\d+)", version)
348
+ if not match:
349
+ return None
350
+ return int(match.group(1)), int(match.group(2))
351
+
352
+
353
+ def _shares_stable_series(local: str, remote: str) -> bool:
354
+ local_parts = _parse_major_minor(local)
355
+ remote_parts = _parse_major_minor(remote)
356
+ if not local_parts or not remote_parts:
357
+ return False
358
+ return local_parts == remote_parts
359
+
360
+
127
361
  @shared_task
128
362
  def check_github_updates() -> None:
129
363
  """Check the GitHub repo for updates and upgrade if needed."""
130
364
  base_dir = Path(__file__).resolve().parent.parent
131
365
  mode_file = base_dir / "locks" / "auto_upgrade.lck"
132
366
  mode = "version"
133
- if mode_file.exists():
367
+ reset_network_failures = True
368
+ try:
369
+ if mode_file.exists():
370
+ try:
371
+ raw_mode = mode_file.read_text().strip()
372
+ except (OSError, UnicodeDecodeError):
373
+ logger.warning(
374
+ "Failed to read auto-upgrade mode lockfile", exc_info=True
375
+ )
376
+ else:
377
+ cleaned_mode = raw_mode.lower()
378
+ if cleaned_mode:
379
+ mode = cleaned_mode
380
+
381
+ branch = "main"
134
382
  try:
135
- raw_mode = mode_file.read_text().strip()
136
- except (OSError, UnicodeDecodeError):
137
- logger.warning(
138
- "Failed to read auto-upgrade mode lockfile", exc_info=True
383
+ subprocess.run(
384
+ ["git", "fetch", "origin", branch],
385
+ cwd=base_dir,
386
+ check=True,
387
+ capture_output=True,
388
+ text=True,
139
389
  )
140
- else:
141
- cleaned_mode = raw_mode.lower()
142
- if cleaned_mode:
143
- mode = cleaned_mode
144
-
145
- branch = "main"
146
- subprocess.run(["git", "fetch", "origin", branch], cwd=base_dir, check=True)
390
+ except subprocess.CalledProcessError as exc:
391
+ if _handle_network_failure_if_applicable(base_dir, exc):
392
+ reset_network_failures = False
393
+ raise
147
394
 
148
- log_file = _auto_upgrade_log_path(base_dir)
149
- with log_file.open("a") as fh:
150
- fh.write(
151
- f"{timezone.now().isoformat()} check_github_updates triggered\n"
152
- )
395
+ log_file = _auto_upgrade_log_path(base_dir)
396
+ with log_file.open("a") as fh:
397
+ fh.write(
398
+ f"{timezone.now().isoformat()} check_github_updates triggered\n"
399
+ )
153
400
 
154
- notify = None
155
- startup = None
156
- try: # pragma: no cover - optional dependency
157
- from core.notifications import notify # type: ignore
158
- except Exception:
159
401
  notify = None
160
- try: # pragma: no cover - optional dependency
161
- from nodes.apps import _startup_notification as startup # type: ignore
162
- except Exception:
163
402
  startup = None
403
+ try: # pragma: no cover - optional dependency
404
+ from core.notifications import notify # type: ignore
405
+ except Exception:
406
+ notify = None
407
+ try: # pragma: no cover - optional dependency
408
+ from nodes.apps import _startup_notification as startup # type: ignore
409
+ except Exception:
410
+ startup = None
164
411
 
165
- remote_revision = (
166
- subprocess.check_output(
167
- ["git", "rev-parse", f"origin/{branch}"], cwd=base_dir
168
- )
169
- .decode()
170
- .strip()
171
- )
172
-
173
- skipped_revisions = _load_skipped_revisions(base_dir)
174
- if remote_revision in skipped_revisions:
175
- _append_auto_upgrade_log(
176
- base_dir, f"Skipping auto-upgrade for blocked revision {remote_revision}"
177
- )
178
- if startup:
179
- startup()
180
- return
181
-
182
- upgrade_stamp = timezone.now().strftime("@ %Y%m%d %H:%M")
183
-
184
- upgrade_was_applied = False
185
-
186
- if mode == "latest":
187
- local = (
188
- subprocess.check_output(["git", "rev-parse", branch], cwd=base_dir)
189
- .decode()
190
- .strip()
191
- )
192
- if local == remote_revision:
193
- if startup:
194
- startup()
195
- return
196
- if notify:
197
- notify("Upgrading...", upgrade_stamp)
198
- args = ["./upgrade.sh", "--latest", "--no-restart"]
199
- upgrade_was_applied = True
200
- else:
201
- local = "0"
202
- version_file = base_dir / "VERSION"
203
- if version_file.exists():
204
- local = version_file.read_text().strip()
205
- remote = (
206
- subprocess.check_output(
207
- [
208
- "git",
209
- "show",
210
- f"origin/{branch}:VERSION",
211
- ],
412
+ try:
413
+ remote_revision = subprocess.check_output(
414
+ ["git", "rev-parse", f"origin/{branch}"],
212
415
  cwd=base_dir,
416
+ stderr=subprocess.STDOUT,
417
+ text=True,
418
+ ).strip()
419
+ except subprocess.CalledProcessError as exc:
420
+ if _handle_network_failure_if_applicable(base_dir, exc):
421
+ reset_network_failures = False
422
+ raise
423
+
424
+ skipped_revisions = _load_skipped_revisions(base_dir)
425
+ if remote_revision in skipped_revisions:
426
+ _append_auto_upgrade_log(
427
+ base_dir,
428
+ f"Skipping auto-upgrade for blocked revision {remote_revision}",
213
429
  )
214
- .decode()
215
- .strip()
216
- )
217
- if local == remote:
218
430
  if startup:
219
431
  startup()
220
432
  return
221
- if notify:
222
- notify("Upgrading...", upgrade_stamp)
223
- args = ["./upgrade.sh", "--no-restart"]
224
- upgrade_was_applied = True
225
-
226
- with log_file.open("a") as fh:
227
- fh.write(
228
- f"{timezone.now().isoformat()} running: {' '.join(args)}\n"
229
- )
230
433
 
231
- subprocess.run(args, cwd=base_dir, check=True)
434
+ remote_version = _read_remote_version(base_dir, branch)
435
+ local_version = _read_local_version(base_dir)
436
+ remote_severity = _resolve_release_severity(remote_version)
232
437
 
233
- if shutil.which("gway"):
234
- try:
235
- subprocess.run(["gway", "upgrade"], check=True)
236
- except subprocess.CalledProcessError:
237
- logger.warning("gway upgrade failed; continuing anyway", exc_info=True)
238
-
239
- service_file = base_dir / "locks/service.lck"
240
- if service_file.exists():
241
- service = service_file.read_text().strip()
242
- subprocess.run(
243
- [
244
- "sudo",
245
- "systemctl",
246
- "kill",
247
- "--signal=TERM",
248
- service,
249
- ]
250
- )
251
- else:
252
- subprocess.run(["pkill", "-f", "manage.py runserver"])
438
+ upgrade_stamp = timezone.now().strftime("@ %Y%m%d %H:%M")
253
439
 
254
- if upgrade_was_applied:
255
- _append_auto_upgrade_log(
256
- base_dir,
257
- (
258
- "Scheduled post-upgrade health check in %s seconds"
259
- % AUTO_UPGRADE_HEALTH_DELAY_SECONDS
260
- ),
261
- )
262
- _schedule_health_check(1)
440
+ upgrade_was_applied = False
441
+
442
+ if mode == "latest":
443
+ local_revision = (
444
+ subprocess.check_output(
445
+ ["git", "rev-parse", branch],
446
+ cwd=base_dir,
447
+ stderr=subprocess.STDOUT,
448
+ text=True,
449
+ )
450
+ .strip()
451
+ )
452
+ if local_revision == remote_revision:
453
+ if startup:
454
+ startup()
455
+ return
456
+
457
+ if (
458
+ remote_version
459
+ and local_version
460
+ and remote_version != local_version
461
+ and remote_severity == SEVERITY_LOW
462
+ and _shares_stable_series(local_version, remote_version)
463
+ ):
464
+ _append_auto_upgrade_log(
465
+ base_dir,
466
+ f"Skipping auto-upgrade for low severity patch {remote_version}",
467
+ )
468
+ if startup:
469
+ startup()
470
+ return
471
+
472
+ if notify:
473
+ notify("Upgrading...", upgrade_stamp)
474
+ args = ["./upgrade.sh", "--latest", "--no-restart"]
475
+ upgrade_was_applied = True
476
+ else:
477
+ local_value = local_version or "0"
478
+ remote_value = remote_version or local_value
479
+
480
+ if local_value == remote_value:
481
+ if startup:
482
+ startup()
483
+ return
484
+
485
+ if (
486
+ mode == "stable"
487
+ and local_version
488
+ and remote_version
489
+ and remote_version != local_version
490
+ and _shares_stable_series(local_version, remote_version)
491
+ and remote_severity != SEVERITY_CRITICAL
492
+ ):
493
+ if startup:
494
+ startup()
495
+ return
496
+
497
+ if notify:
498
+ notify("Upgrading...", upgrade_stamp)
499
+ if mode == "stable":
500
+ args = ["./upgrade.sh", "--stable", "--no-restart"]
501
+ else:
502
+ args = ["./upgrade.sh", "--no-restart"]
503
+ upgrade_was_applied = True
504
+
505
+ with log_file.open("a") as fh:
506
+ fh.write(
507
+ f"{timezone.now().isoformat()} running: {' '.join(args)}\n"
508
+ )
509
+
510
+ subprocess.run(args, cwd=base_dir, check=True)
511
+
512
+ service_file = base_dir / "locks/service.lck"
513
+ if service_file.exists():
514
+ service = service_file.read_text().strip()
515
+ subprocess.run(
516
+ [
517
+ "sudo",
518
+ "systemctl",
519
+ "kill",
520
+ "--signal=TERM",
521
+ service,
522
+ ]
523
+ )
524
+ else:
525
+ subprocess.run(["pkill", "-f", "manage.py runserver"])
526
+
527
+ if upgrade_was_applied:
528
+ _append_auto_upgrade_log(
529
+ base_dir,
530
+ (
531
+ "Scheduled post-upgrade health check in %s seconds"
532
+ % AUTO_UPGRADE_HEALTH_DELAY_SECONDS
533
+ ),
534
+ )
535
+ _schedule_health_check(1)
536
+ finally:
537
+ if reset_network_failures:
538
+ _reset_network_failure_count(base_dir)
263
539
 
264
540
 
265
541
  @shared_task
@@ -410,3 +686,28 @@ def run_client_report_schedule(schedule_id: int) -> None:
410
686
  except Exception:
411
687
  logger.exception("ClientReportSchedule %s failed", schedule_id)
412
688
  raise
689
+
690
+
691
+ @shared_task
692
+ def ensure_recurring_client_reports() -> None:
693
+ """Ensure scheduled consumer reports run for the current period."""
694
+
695
+ from core.models import ClientReportSchedule
696
+
697
+ reference = timezone.localdate()
698
+ schedules = ClientReportSchedule.objects.filter(
699
+ periodicity__in=[
700
+ ClientReportSchedule.PERIODICITY_DAILY,
701
+ ClientReportSchedule.PERIODICITY_WEEKLY,
702
+ ClientReportSchedule.PERIODICITY_MONTHLY,
703
+ ]
704
+ ).prefetch_related("chargers")
705
+
706
+ for schedule in schedules:
707
+ try:
708
+ schedule.generate_missing_reports(reference=reference)
709
+ except Exception:
710
+ logger.exception(
711
+ "Automatic consumer report generation failed for schedule %s",
712
+ schedule.pk,
713
+ )