arthexis 0.1.15__py3-none-any.whl → 0.1.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arthexis might be problematic. Click here for more details.

core/views.py CHANGED
@@ -1,3 +1,5 @@
1
+ import base64
2
+ import binascii
1
3
  import json
2
4
  import logging
3
5
  import os
@@ -16,6 +18,7 @@ from django.http import Http404, JsonResponse, HttpResponse
16
18
  from django.shortcuts import get_object_or_404, redirect, render, resolve_url
17
19
  from django.template.response import TemplateResponse
18
20
  from django.utils import timezone
21
+ from django.utils.html import strip_tags
19
22
  from django.utils.text import slugify
20
23
  from django.utils.translation import gettext as _
21
24
  from django.urls import NoReverseMatch, reverse
@@ -32,6 +35,7 @@ from django.template.loader import get_template
32
35
  from django.test import signals
33
36
 
34
37
  from utils import revision
38
+ from nodes.utils import save_screenshot
35
39
  from utils.api import api_login_required
36
40
 
37
41
  logger = logging.getLogger(__name__)
@@ -633,6 +637,8 @@ def _write_todo_fixture(todo: Todo) -> Path:
633
637
  "request": todo.request,
634
638
  "url": todo.url,
635
639
  "request_details": todo.request_details,
640
+ "generated_for_version": todo.generated_for_version,
641
+ "generated_for_revision": todo.generated_for_revision,
636
642
  },
637
643
  }
638
644
  ]
@@ -666,9 +672,16 @@ def _ensure_release_todo(
666
672
  previous_version = (previous_version or "").strip()
667
673
  target_version = _next_patch_version(release.version)
668
674
  if previous_version:
669
- incremented_previous = _next_patch_version(previous_version)
670
- if incremented_previous == release.version:
671
- target_version = release.version
675
+ try:
676
+ from packaging.version import InvalidVersion, Version
677
+
678
+ parsed_previous = Version(previous_version)
679
+ parsed_target = Version(target_version)
680
+ except InvalidVersion:
681
+ pass
682
+ else:
683
+ if parsed_target <= parsed_previous:
684
+ target_version = _next_patch_version(previous_version)
672
685
  request = f"Create release {release.package.name} {target_version}"
673
686
  try:
674
687
  url = reverse("admin:core_packagerelease_changelist")
@@ -680,6 +693,8 @@ def _ensure_release_todo(
680
693
  "request": request,
681
694
  "url": url,
682
695
  "request_details": "",
696
+ "generated_for_version": release.version or "",
697
+ "generated_for_revision": release.revision or "",
683
698
  "is_seed_data": True,
684
699
  "is_deleted": False,
685
700
  "is_user_data": False,
@@ -815,14 +830,77 @@ def _get_return_url(request) -> str:
815
830
  return resolve_url("admin:index")
816
831
 
817
832
 
833
+ def _refresh_changelog_once(ctx, log_path: Path) -> None:
834
+ """Regenerate the changelog a single time per release run."""
835
+
836
+ if ctx.get("changelog_refreshed"):
837
+ return
838
+
839
+ _append_log(log_path, "Refreshing changelog before TODO review")
840
+ try:
841
+ subprocess.run(["scripts/generate-changelog.sh"], check=True)
842
+ except OSError as exc:
843
+ if _should_use_python_changelog(exc):
844
+ _append_log(
845
+ log_path,
846
+ f"scripts/generate-changelog.sh failed: {exc}",
847
+ )
848
+ _generate_changelog_with_python(log_path)
849
+ else: # pragma: no cover - unexpected OSError
850
+ raise
851
+ else:
852
+ _append_log(
853
+ log_path,
854
+ "Regenerated CHANGELOG.rst using scripts/generate-changelog.sh",
855
+ )
856
+
857
+ staged_paths: list[str] = []
858
+ changelog_path = Path("CHANGELOG.rst")
859
+ if changelog_path.exists():
860
+ staged_paths.append(str(changelog_path))
861
+
862
+ release_fixtures = sorted(Path("core/fixtures").glob("releases__*.json"))
863
+ staged_paths.extend(str(path) for path in release_fixtures)
864
+
865
+ if staged_paths:
866
+ subprocess.run(["git", "add", *staged_paths], check=True)
867
+
868
+ diff = subprocess.run(
869
+ ["git", "diff", "--cached", "--name-only"],
870
+ check=True,
871
+ capture_output=True,
872
+ text=True,
873
+ )
874
+ changed_paths = [line.strip() for line in diff.stdout.splitlines() if line.strip()]
875
+
876
+ if changed_paths:
877
+ changelog_dirty = "CHANGELOG.rst" in changed_paths
878
+ fixtures_dirty = any(path.startswith("core/fixtures/") for path in changed_paths)
879
+ if changelog_dirty and fixtures_dirty:
880
+ message = "chore: sync release fixtures and changelog"
881
+ elif changelog_dirty:
882
+ message = "docs: refresh changelog"
883
+ else:
884
+ message = "chore: update release fixtures"
885
+ subprocess.run(["git", "commit", "-m", message], check=True)
886
+ _append_log(log_path, f"Committed changelog refresh ({message})")
887
+ else:
888
+ _append_log(log_path, "Changelog already up to date")
889
+
890
+ ctx["changelog_refreshed"] = True
891
+
892
+
818
893
  def _step_check_todos(release, ctx, log_path: Path) -> None:
894
+ _refresh_changelog_once(ctx, log_path)
895
+
819
896
  pending_qs = Todo.objects.filter(is_deleted=False, done_on__isnull=True)
820
- if pending_qs.exists():
821
- ctx["todos"] = list(
822
- pending_qs.values("id", "request", "url", "request_details")
823
- )
824
- if not ctx.get("todos_ack"):
825
- raise PendingTodos()
897
+ pending_values = list(
898
+ pending_qs.values("id", "request", "url", "request_details")
899
+ )
900
+ if not ctx.get("todos_ack"):
901
+ ctx["todos"] = pending_values
902
+ ctx["todos_required"] = True
903
+ raise PendingTodos()
826
904
  todos = list(Todo.objects.filter(is_deleted=False))
827
905
  for todo in todos:
828
906
  todo.delete()
@@ -837,6 +915,7 @@ def _step_check_todos(release, ctx, log_path: Path) -> None:
837
915
  check=False,
838
916
  )
839
917
  ctx.pop("todos", None)
918
+ ctx.pop("todos_required", None)
840
919
  ctx["todos_ack"] = True
841
920
 
842
921
 
@@ -860,9 +939,12 @@ def _step_check_version(release, ctx, log_path: Path) -> None:
860
939
  if "fixtures" in Path(f).parts and Path(f).suffix == ".json"
861
940
  ]
862
941
  changelog_dirty = "CHANGELOG.rst" in files
942
+ version_dirty = "VERSION" in files
863
943
  allowed_dirty_files = set(fixture_files)
864
944
  if changelog_dirty:
865
945
  allowed_dirty_files.add("CHANGELOG.rst")
946
+ if version_dirty:
947
+ allowed_dirty_files.add("VERSION")
866
948
 
867
949
  if files and len(allowed_dirty_files) == len(files):
868
950
  summary = []
@@ -895,6 +977,8 @@ def _step_check_version(release, ctx, log_path: Path) -> None:
895
977
  commit_paths = [*fixture_files]
896
978
  if changelog_dirty:
897
979
  commit_paths.append("CHANGELOG.rst")
980
+ if version_dirty:
981
+ commit_paths.append("VERSION")
898
982
 
899
983
  log_fragments = []
900
984
  if fixture_files:
@@ -903,6 +987,8 @@ def _step_check_version(release, ctx, log_path: Path) -> None:
903
987
  )
904
988
  if changelog_dirty:
905
989
  log_fragments.append("CHANGELOG.rst")
990
+ if version_dirty:
991
+ log_fragments.append("VERSION")
906
992
  details = ", ".join(log_fragments) if log_fragments else "changes"
907
993
  _append_log(
908
994
  log_path,
@@ -910,8 +996,16 @@ def _step_check_version(release, ctx, log_path: Path) -> None:
910
996
  )
911
997
  subprocess.run(["git", "add", *commit_paths], check=True)
912
998
 
913
- if changelog_dirty and fixture_files:
999
+ if changelog_dirty and version_dirty and fixture_files:
1000
+ commit_message = "chore: sync release metadata"
1001
+ elif changelog_dirty and version_dirty:
1002
+ commit_message = "chore: update version and changelog"
1003
+ elif version_dirty and fixture_files:
1004
+ commit_message = "chore: update version and fixtures"
1005
+ elif changelog_dirty and fixture_files:
914
1006
  commit_message = "chore: sync release fixtures and changelog"
1007
+ elif version_dirty:
1008
+ commit_message = "chore: update version"
915
1009
  elif changelog_dirty:
916
1010
  commit_message = "docs: refresh changelog"
917
1011
  else:
@@ -1023,6 +1117,36 @@ def _step_changelog_docs(release, ctx, log_path: Path) -> None:
1023
1117
  _append_log(log_path, "CHANGELOG and documentation review recorded")
1024
1118
 
1025
1119
 
1120
+ def _record_release_todo(
1121
+ release, ctx, log_path: Path, *, previous_version: str | None = None
1122
+ ) -> None:
1123
+ previous_version = previous_version or ctx.pop(
1124
+ "release_todo_previous_version",
1125
+ getattr(release, "_repo_version_before_sync", ""),
1126
+ )
1127
+ todo, fixture_path = _ensure_release_todo(
1128
+ release, previous_version=previous_version
1129
+ )
1130
+ fixture_display = _format_path(fixture_path)
1131
+ _append_log(log_path, f"Added TODO: {todo.request}")
1132
+ _append_log(log_path, f"Wrote TODO fixture {fixture_display}")
1133
+ subprocess.run(["git", "add", str(fixture_path)], check=True)
1134
+ _append_log(log_path, f"Staged TODO fixture {fixture_display}")
1135
+ fixture_diff = subprocess.run(
1136
+ ["git", "diff", "--cached", "--quiet", "--", str(fixture_path)],
1137
+ check=False,
1138
+ )
1139
+ if fixture_diff.returncode != 0:
1140
+ commit_message = f"chore: add release TODO for {release.package.name}"
1141
+ subprocess.run(["git", "commit", "-m", commit_message], check=True)
1142
+ _append_log(log_path, f"Committed TODO fixture {fixture_display}")
1143
+ else:
1144
+ _append_log(
1145
+ log_path,
1146
+ f"No changes detected for TODO fixture {fixture_display}; skipping commit",
1147
+ )
1148
+
1149
+
1026
1150
  def _step_pre_release_actions(release, ctx, log_path: Path) -> None:
1027
1151
  _append_log(log_path, "Execute pre-release actions")
1028
1152
  if ctx.get("dry_run"):
@@ -1096,27 +1220,7 @@ def _step_pre_release_actions(release, ctx, log_path: Path) -> None:
1096
1220
  for path in staged_release_fixtures:
1097
1221
  subprocess.run(["git", "reset", "HEAD", str(path)], check=False)
1098
1222
  _append_log(log_path, f"Unstaged release fixture {_format_path(path)}")
1099
- todo, fixture_path = _ensure_release_todo(
1100
- release, previous_version=repo_version_before_sync
1101
- )
1102
- fixture_display = _format_path(fixture_path)
1103
- _append_log(log_path, f"Added TODO: {todo.request}")
1104
- _append_log(log_path, f"Wrote TODO fixture {fixture_display}")
1105
- subprocess.run(["git", "add", str(fixture_path)], check=True)
1106
- _append_log(log_path, f"Staged TODO fixture {fixture_display}")
1107
- fixture_diff = subprocess.run(
1108
- ["git", "diff", "--cached", "--quiet", "--", str(fixture_path)],
1109
- check=False,
1110
- )
1111
- if fixture_diff.returncode != 0:
1112
- commit_message = f"chore: add release TODO for {release.package.name}"
1113
- subprocess.run(["git", "commit", "-m", commit_message], check=True)
1114
- _append_log(log_path, f"Committed TODO fixture {fixture_display}")
1115
- else:
1116
- _append_log(
1117
- log_path,
1118
- f"No changes detected for TODO fixture {fixture_display}; skipping commit",
1119
- )
1223
+ ctx["release_todo_previous_version"] = repo_version_before_sync
1120
1224
  _append_log(log_path, "Pre-release actions complete")
1121
1225
 
1122
1226
 
@@ -1200,6 +1304,7 @@ def _step_promote_build(release, ctx, log_path: Path) -> None:
1200
1304
  )
1201
1305
  PackageRelease.dump_fixture()
1202
1306
  _append_log(log_path, "Updated release fixtures")
1307
+ _record_release_todo(release, ctx, log_path)
1203
1308
  except Exception:
1204
1309
  _clean_repo()
1205
1310
  raise
@@ -1521,6 +1626,7 @@ def rfid_batch(request):
1521
1626
  "custom_label": t.custom_label,
1522
1627
  "energy_accounts": list(t.energy_accounts.values_list("id", flat=True)),
1523
1628
  "external_command": t.external_command,
1629
+ "post_auth_command": t.post_auth_command,
1524
1630
  "allowed": t.allowed,
1525
1631
  "color": t.color,
1526
1632
  "released": t.released,
@@ -1556,6 +1662,11 @@ def rfid_batch(request):
1556
1662
  external_command = ""
1557
1663
  else:
1558
1664
  external_command = external_command.strip()
1665
+ post_auth_command = row.get("post_auth_command")
1666
+ if not isinstance(post_auth_command, str):
1667
+ post_auth_command = ""
1668
+ else:
1669
+ post_auth_command = post_auth_command.strip()
1559
1670
 
1560
1671
  tag, _ = RFID.objects.update_or_create(
1561
1672
  rfid=rfid.upper(),
@@ -1565,6 +1676,7 @@ def rfid_batch(request):
1565
1676
  "released": released,
1566
1677
  "custom_label": custom_label,
1567
1678
  "external_command": external_command,
1679
+ "post_auth_command": post_auth_command,
1568
1680
  },
1569
1681
  )
1570
1682
  if energy_accounts:
@@ -1648,6 +1760,12 @@ def release_progress(request, pk: int, action: str):
1648
1760
  else:
1649
1761
  log_dir_warning_message = ctx.get("log_dir_warning_message")
1650
1762
 
1763
+ if "changelog_report_url" not in ctx:
1764
+ try:
1765
+ ctx["changelog_report_url"] = reverse("admin:system-changelog-report")
1766
+ except NoReverseMatch:
1767
+ ctx["changelog_report_url"] = ""
1768
+
1651
1769
  steps = PUBLISH_STEPS
1652
1770
  total_steps = len(steps)
1653
1771
  step_count = ctx.get("step", 0)
@@ -1715,7 +1833,7 @@ def release_progress(request, pk: int, action: str):
1715
1833
  else:
1716
1834
  ctx["todos_ack"] = True
1717
1835
 
1718
- if pending_items and not ctx.get("todos_ack"):
1836
+ if not ctx.get("todos_ack"):
1719
1837
  ctx["todos"] = [
1720
1838
  {
1721
1839
  "id": todo.pk,
@@ -1725,8 +1843,10 @@ def release_progress(request, pk: int, action: str):
1725
1843
  }
1726
1844
  for todo in pending_items
1727
1845
  ]
1846
+ ctx["todos_required"] = True
1728
1847
  else:
1729
1848
  ctx.pop("todos", None)
1849
+ ctx.pop("todos_required", None)
1730
1850
 
1731
1851
  log_name = _release_log_name(release.package.name, release.version)
1732
1852
  if ctx.get("log") != log_name:
@@ -1847,7 +1967,9 @@ def release_progress(request, pk: int, action: str):
1847
1967
  and not ctx.get("error")
1848
1968
  else None
1849
1969
  )
1850
- has_pending_todos = bool(ctx.get("todos") and not ctx.get("todos_ack"))
1970
+ has_pending_todos = bool(
1971
+ ctx.get("todos_required") and not ctx.get("todos_ack")
1972
+ )
1851
1973
  if has_pending_todos:
1852
1974
  next_step = None
1853
1975
  dirty_files = ctx.get("dirty_files")
@@ -1964,6 +2086,7 @@ def release_progress(request, pk: int, action: str):
1964
2086
  "cert_log": ctx.get("cert_log"),
1965
2087
  "fixtures": fixtures_summary,
1966
2088
  "todos": todos_display,
2089
+ "changelog_report_url": ctx.get("changelog_report_url", ""),
1967
2090
  "dirty_files": dirty_files,
1968
2091
  "dirty_commit_message": ctx.get("dirty_commit_message", DIRTY_COMMIT_DEFAULT_MESSAGE),
1969
2092
  "dirty_commit_error": ctx.get("dirty_commit_error"),
@@ -2180,6 +2303,7 @@ def todo_focus(request, pk: int):
2180
2303
  "focus_auth": focus_auth,
2181
2304
  "next_url": _get_return_url(request),
2182
2305
  "done_url": reverse("todo-done", args=[todo.pk]),
2306
+ "snapshot_url": reverse("todo-snapshot", args=[todo.pk]),
2183
2307
  }
2184
2308
  return render(request, "core/todo_focus.html", context)
2185
2309
 
@@ -2199,3 +2323,67 @@ def todo_done(request, pk: int):
2199
2323
  todo.done_on = timezone.now()
2200
2324
  todo.save(update_fields=["done_on"])
2201
2325
  return redirect(redirect_to)
2326
+
2327
+
2328
+ @staff_member_required
2329
+ @require_POST
2330
+ def todo_snapshot(request, pk: int):
2331
+ todo = get_object_or_404(Todo, pk=pk, is_deleted=False)
2332
+ if todo.done_on:
2333
+ return JsonResponse({"detail": _("This TODO has already been completed.")}, status=400)
2334
+
2335
+ try:
2336
+ payload = json.loads(request.body.decode("utf-8") or "{}")
2337
+ except json.JSONDecodeError:
2338
+ return JsonResponse({"detail": _("Invalid JSON payload.")}, status=400)
2339
+
2340
+ image_data = payload.get("image", "") if isinstance(payload, dict) else ""
2341
+ if not isinstance(image_data, str) or not image_data.startswith("data:image/png;base64,"):
2342
+ return JsonResponse({"detail": _("A PNG data URL is required.")}, status=400)
2343
+
2344
+ try:
2345
+ encoded = image_data.split(",", 1)[1]
2346
+ except IndexError:
2347
+ return JsonResponse({"detail": _("Screenshot data is incomplete.")}, status=400)
2348
+
2349
+ try:
2350
+ image_bytes = base64.b64decode(encoded, validate=True)
2351
+ except (ValueError, binascii.Error):
2352
+ return JsonResponse({"detail": _("Unable to decode screenshot data.")}, status=400)
2353
+
2354
+ if not image_bytes:
2355
+ return JsonResponse({"detail": _("Screenshot data is empty.")}, status=400)
2356
+
2357
+ max_size = 5 * 1024 * 1024
2358
+ if len(image_bytes) > max_size:
2359
+ return JsonResponse({"detail": _("Screenshot is too large to store.")}, status=400)
2360
+
2361
+ relative_path = Path("screenshots") / f"todo-{todo.pk}-{uuid.uuid4().hex}.png"
2362
+ full_path = settings.LOG_DIR / relative_path
2363
+ full_path.parent.mkdir(parents=True, exist_ok=True)
2364
+ with full_path.open("wb") as fh:
2365
+ fh.write(image_bytes)
2366
+
2367
+ primary_text = strip_tags(todo.request or "").strip()
2368
+ details_text = strip_tags(todo.request_details or "").strip()
2369
+ alt_parts = [part for part in (primary_text, details_text) if part]
2370
+ if alt_parts:
2371
+ alt_text = " — ".join(alt_parts)
2372
+ else:
2373
+ alt_text = _("TODO %(id)s snapshot") % {"id": todo.pk}
2374
+
2375
+ sample = save_screenshot(
2376
+ relative_path,
2377
+ method="TODO_QA",
2378
+ content=alt_text,
2379
+ user=request.user if request.user.is_authenticated else None,
2380
+ )
2381
+
2382
+ if sample is None:
2383
+ try:
2384
+ full_path.unlink()
2385
+ except FileNotFoundError:
2386
+ pass
2387
+ return JsonResponse({"detail": _("Duplicate snapshot ignored.")})
2388
+
2389
+ return JsonResponse({"detail": _("Snapshot saved."), "sample": str(sample.pk)})
nodes/admin.py CHANGED
@@ -1217,6 +1217,11 @@ class NodeFeatureAdmin(EntityModelAdmin):
1217
1217
  self.admin_site.admin_view(self.celery_report),
1218
1218
  name="nodes_nodefeature_celery_report",
1219
1219
  ),
1220
+ path(
1221
+ "view-waveform/",
1222
+ self.admin_site.admin_view(self.view_waveform),
1223
+ name="nodes_nodefeature_view_waveform",
1224
+ ),
1220
1225
  path(
1221
1226
  "take-screenshot/",
1222
1227
  self.admin_site.admin_view(self.take_screenshot),
@@ -1291,6 +1296,24 @@ class NodeFeatureAdmin(EntityModelAdmin):
1291
1296
  return None
1292
1297
  return feature
1293
1298
 
1299
+ def view_waveform(self, request):
1300
+ feature = self._ensure_feature_enabled(
1301
+ request, "audio-capture", "View Waveform"
1302
+ )
1303
+ if not feature:
1304
+ return redirect("..")
1305
+
1306
+ context = {
1307
+ **self.admin_site.each_context(request),
1308
+ "title": _("Audio Capture Waveform"),
1309
+ "feature": feature,
1310
+ }
1311
+ return TemplateResponse(
1312
+ request,
1313
+ "admin/nodes/nodefeature/view_waveform.html",
1314
+ context,
1315
+ )
1316
+
1294
1317
  def take_screenshot(self, request):
1295
1318
  feature = self._ensure_feature_enabled(
1296
1319
  request, "screenshot-poll", "Take Screenshot"
@@ -1542,7 +1565,7 @@ class NetMessageAdmin(EntityModelAdmin):
1542
1565
  search_fields = ("subject", "body")
1543
1566
  list_filter = ("complete", "filter_node_role", "filter_current_relation")
1544
1567
  ordering = ("-created",)
1545
- readonly_fields = ("complete",)
1568
+ readonly_fields = ("complete", "confirmed_peers")
1546
1569
  actions = ["send_messages"]
1547
1570
  fieldsets = (
1548
1571
  (None, {"fields": ("subject", "body")}),
@@ -1567,6 +1590,7 @@ class NetMessageAdmin(EntityModelAdmin):
1567
1590
  "node_origin",
1568
1591
  "target_limit",
1569
1592
  "propagated_to",
1593
+ "confirmed_peers",
1570
1594
  "complete",
1571
1595
  )
1572
1596
  },
nodes/models.py CHANGED
@@ -98,6 +98,12 @@ class NodeFeature(Entity):
98
98
  url_name="admin:nodes_nodefeature_celery_report",
99
99
  ),
100
100
  ),
101
+ "audio-capture": (
102
+ NodeFeatureDefaultAction(
103
+ label="View Waveform",
104
+ url_name="admin:nodes_nodefeature_view_waveform",
105
+ ),
106
+ ),
101
107
  "screenshot-poll": (
102
108
  NodeFeatureDefaultAction(
103
109
  label="Take Screenshot",
@@ -244,7 +250,7 @@ class Node(Entity):
244
250
  "ap-router",
245
251
  "gway-runner",
246
252
  }
247
- MANUAL_FEATURE_SLUGS = {"clipboard-poll", "screenshot-poll"}
253
+ MANUAL_FEATURE_SLUGS = {"clipboard-poll", "screenshot-poll", "audio-capture"}
248
254
 
249
255
  def __str__(self) -> str: # pragma: no cover - simple representation
250
256
  return f"{self.hostname}:{self.port}"
@@ -747,6 +753,7 @@ class Node(Entity):
747
753
  self._sync_clipboard_task(clipboard_enabled)
748
754
  self._sync_screenshot_task(screenshot_enabled)
749
755
  self._sync_landing_lead_task(celery_enabled)
756
+ self._sync_ocpp_session_report_task(celery_enabled)
750
757
 
751
758
  def _sync_clipboard_task(self, enabled: bool):
752
759
  from django_celery_beat.models import IntervalSchedule, PeriodicTask
@@ -819,6 +826,39 @@ class Node(Entity):
819
826
  else:
820
827
  PeriodicTask.objects.filter(name=task_name).delete()
821
828
 
829
+ def _sync_ocpp_session_report_task(self, celery_enabled: bool):
830
+ from django_celery_beat.models import CrontabSchedule, PeriodicTask
831
+ from django.db.utils import OperationalError, ProgrammingError
832
+
833
+ task_name = "ocpp_send_daily_session_report"
834
+
835
+ if not self.is_local:
836
+ return
837
+
838
+ if not celery_enabled or not mailer.can_send_email():
839
+ PeriodicTask.objects.filter(name=task_name).delete()
840
+ return
841
+
842
+ try:
843
+ schedule, _ = CrontabSchedule.objects.get_or_create(
844
+ minute="0",
845
+ hour="18",
846
+ day_of_week="*",
847
+ day_of_month="*",
848
+ month_of_year="*",
849
+ )
850
+ PeriodicTask.objects.update_or_create(
851
+ name=task_name,
852
+ defaults={
853
+ "crontab": schedule,
854
+ "interval": None,
855
+ "task": "ocpp.tasks.send_daily_session_report",
856
+ "enabled": True,
857
+ },
858
+ )
859
+ except (OperationalError, ProgrammingError):
860
+ logger.debug("Skipping OCPP session report task sync; tables not ready")
861
+
822
862
  def send_mail(
823
863
  self,
824
864
  subject: str,
@@ -955,15 +995,18 @@ class NodeManager(Profile):
955
995
  )
956
996
  api_key = SigilShortAutoField(
957
997
  max_length=255,
998
+ verbose_name="API key",
958
999
  help_text="API key issued by the DNS provider.",
959
1000
  )
960
1001
  api_secret = SigilShortAutoField(
961
1002
  max_length=255,
1003
+ verbose_name="API secret",
962
1004
  help_text="API secret issued by the DNS provider.",
963
1005
  )
964
1006
  customer_id = SigilShortAutoField(
965
1007
  max_length=100,
966
1008
  blank=True,
1009
+ verbose_name="Customer ID",
967
1010
  help_text="Optional GoDaddy customer identifier for the account.",
968
1011
  )
969
1012
  default_domain = SigilShortAutoField(
@@ -1367,6 +1410,7 @@ class NetMessage(Entity):
1367
1410
  propagated_to = models.ManyToManyField(
1368
1411
  Node, blank=True, related_name="received_net_messages"
1369
1412
  )
1413
+ confirmed_peers = models.JSONField(default=dict, blank=True)
1370
1414
  created = models.DateTimeField(auto_now_add=True)
1371
1415
  complete = models.BooleanField(default=False, editable=False)
1372
1416
 
@@ -1596,7 +1640,10 @@ class NetMessage(Entity):
1596
1640
  seen_list = seen.copy()
1597
1641
  selected_ids = [str(n.uuid) for n in selected]
1598
1642
  payload_seen = seen_list + selected_ids
1643
+ confirmed_peers = dict(self.confirmed_peers or {})
1644
+
1599
1645
  for node in selected:
1646
+ now = timezone.now().isoformat()
1600
1647
  payload = {
1601
1648
  "uuid": str(self.uuid),
1602
1649
  "subject": self.subject,
@@ -1632,20 +1679,39 @@ class NetMessage(Entity):
1632
1679
  headers["X-Signature"] = base64.b64encode(signature).decode()
1633
1680
  except Exception:
1634
1681
  pass
1682
+ status_entry = {
1683
+ "status": "pending",
1684
+ "status_code": None,
1685
+ "updated": now,
1686
+ }
1635
1687
  try:
1636
- requests.post(
1688
+ response = requests.post(
1637
1689
  f"http://{node.address}:{node.port}/nodes/net-message/",
1638
1690
  data=payload_json,
1639
1691
  headers=headers,
1640
1692
  timeout=1,
1641
1693
  )
1694
+ status_entry["status_code"] = getattr(response, "status_code", None)
1695
+ if getattr(response, "ok", False):
1696
+ status_entry["status"] = "acknowledged"
1697
+ else:
1698
+ status_entry["status"] = "failed"
1642
1699
  except Exception:
1643
- pass
1700
+ status_entry["status"] = "error"
1644
1701
  self.propagated_to.add(node)
1702
+ confirmed_peers[str(node.uuid)] = status_entry
1703
+
1704
+ save_fields: list[str] = []
1705
+ if confirmed_peers != (self.confirmed_peers or {}):
1706
+ self.confirmed_peers = confirmed_peers
1707
+ save_fields.append("confirmed_peers")
1645
1708
 
1646
1709
  if total_known and self.propagated_to.count() >= total_known:
1647
1710
  self.complete = True
1648
- self.save(update_fields=["complete"] if self.complete else [])
1711
+ save_fields.append("complete")
1712
+
1713
+ if save_fields:
1714
+ self.save(update_fields=save_fields)
1649
1715
 
1650
1716
 
1651
1717
  class ContentSample(Entity):
nodes/rfid_sync.py CHANGED
@@ -45,6 +45,8 @@ def serialize_rfid(tag: RFID) -> dict[str, Any]:
45
45
  "color": tag.color,
46
46
  "kind": tag.kind,
47
47
  "released": tag.released,
48
+ "external_command": tag.external_command,
49
+ "post_auth_command": tag.post_auth_command,
48
50
  "last_seen_on": tag.last_seen_on.isoformat() if tag.last_seen_on else None,
49
51
  "energy_accounts": [account.id for account in accounts],
50
52
  "energy_account_names": [
@@ -64,6 +66,17 @@ def apply_rfid_payload(
64
66
  outcome.error = "Missing RFID value"
65
67
  return outcome
66
68
 
69
+ external_command = entry.get("external_command")
70
+ if not isinstance(external_command, str):
71
+ external_command = ""
72
+ else:
73
+ external_command = external_command.strip()
74
+ post_auth_command = entry.get("post_auth_command")
75
+ if not isinstance(post_auth_command, str):
76
+ post_auth_command = ""
77
+ else:
78
+ post_auth_command = post_auth_command.strip()
79
+
67
80
  defaults: dict[str, Any] = {
68
81
  "custom_label": entry.get("custom_label", ""),
69
82
  "key_a": entry.get("key_a", RFID._meta.get_field("key_a").default),
@@ -75,6 +88,8 @@ def apply_rfid_payload(
75
88
  "color": entry.get("color", RFID.BLACK),
76
89
  "kind": entry.get("kind", RFID.CLASSIC),
77
90
  "released": bool(entry.get("released", False)),
91
+ "external_command": external_command,
92
+ "post_auth_command": post_auth_command,
78
93
  }
79
94
 
80
95
  if origin_node is not None: