arthexis 0.1.9__py3-none-any.whl → 0.1.26__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arthexis might be problematic. Click here for more details.

Files changed (112) hide show
  1. arthexis-0.1.26.dist-info/METADATA +272 -0
  2. arthexis-0.1.26.dist-info/RECORD +111 -0
  3. {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/licenses/LICENSE +674 -674
  4. config/__init__.py +5 -5
  5. config/active_app.py +15 -15
  6. config/asgi.py +29 -29
  7. config/auth_app.py +7 -7
  8. config/celery.py +32 -25
  9. config/context_processors.py +67 -68
  10. config/horologia_app.py +7 -7
  11. config/loadenv.py +11 -11
  12. config/logging.py +59 -48
  13. config/middleware.py +71 -25
  14. config/offline.py +49 -49
  15. config/settings.py +676 -492
  16. config/settings_helpers.py +109 -0
  17. config/urls.py +228 -159
  18. config/wsgi.py +17 -17
  19. core/admin.py +4052 -2066
  20. core/admin_history.py +50 -50
  21. core/admindocs.py +192 -151
  22. core/apps.py +350 -223
  23. core/auto_upgrade.py +72 -0
  24. core/backends.py +311 -124
  25. core/changelog.py +403 -0
  26. core/entity.py +149 -133
  27. core/environment.py +60 -43
  28. core/fields.py +168 -75
  29. core/form_fields.py +75 -0
  30. core/github_helper.py +188 -25
  31. core/github_issues.py +183 -172
  32. core/github_repos.py +72 -0
  33. core/lcd_screen.py +78 -78
  34. core/liveupdate.py +25 -25
  35. core/log_paths.py +114 -100
  36. core/mailer.py +89 -83
  37. core/middleware.py +91 -91
  38. core/models.py +5041 -2195
  39. core/notifications.py +105 -105
  40. core/public_wifi.py +267 -227
  41. core/reference_utils.py +107 -0
  42. core/release.py +940 -346
  43. core/rfid_import_export.py +113 -0
  44. core/sigil_builder.py +149 -131
  45. core/sigil_context.py +20 -20
  46. core/sigil_resolver.py +250 -284
  47. core/system.py +1425 -230
  48. core/tasks.py +538 -199
  49. core/temp_passwords.py +181 -0
  50. core/test_system_info.py +202 -43
  51. core/tests.py +2673 -1069
  52. core/tests_liveupdate.py +17 -17
  53. core/urls.py +11 -11
  54. core/user_data.py +681 -495
  55. core/views.py +2484 -789
  56. core/widgets.py +213 -51
  57. nodes/admin.py +2236 -445
  58. nodes/apps.py +98 -70
  59. nodes/backends.py +160 -53
  60. nodes/dns.py +203 -0
  61. nodes/feature_checks.py +133 -0
  62. nodes/lcd.py +165 -165
  63. nodes/models.py +2375 -870
  64. nodes/reports.py +411 -0
  65. nodes/rfid_sync.py +210 -0
  66. nodes/signals.py +18 -0
  67. nodes/tasks.py +141 -46
  68. nodes/tests.py +5045 -1489
  69. nodes/urls.py +29 -13
  70. nodes/utils.py +172 -73
  71. nodes/views.py +1768 -304
  72. ocpp/admin.py +1775 -481
  73. ocpp/apps.py +25 -25
  74. ocpp/consumers.py +1843 -630
  75. ocpp/evcs.py +844 -928
  76. ocpp/evcs_discovery.py +158 -0
  77. ocpp/models.py +1417 -640
  78. ocpp/network.py +398 -0
  79. ocpp/reference_utils.py +42 -0
  80. ocpp/routing.py +11 -9
  81. ocpp/simulator.py +745 -368
  82. ocpp/status_display.py +26 -0
  83. ocpp/store.py +603 -403
  84. ocpp/tasks.py +479 -31
  85. ocpp/test_export_import.py +131 -130
  86. ocpp/test_rfid.py +1072 -540
  87. ocpp/tests.py +5494 -2296
  88. ocpp/transactions_io.py +197 -165
  89. ocpp/urls.py +50 -50
  90. ocpp/views.py +2024 -912
  91. pages/admin.py +1123 -396
  92. pages/apps.py +45 -10
  93. pages/checks.py +40 -40
  94. pages/context_processors.py +151 -85
  95. pages/defaults.py +13 -0
  96. pages/forms.py +221 -0
  97. pages/middleware.py +213 -153
  98. pages/models.py +720 -252
  99. pages/module_defaults.py +156 -0
  100. pages/site_config.py +137 -0
  101. pages/tasks.py +74 -0
  102. pages/tests.py +4009 -1389
  103. pages/urls.py +38 -20
  104. pages/utils.py +93 -12
  105. pages/views.py +1736 -762
  106. arthexis-0.1.9.dist-info/METADATA +0 -168
  107. arthexis-0.1.9.dist-info/RECORD +0 -92
  108. core/workgroup_urls.py +0 -17
  109. core/workgroup_views.py +0 -94
  110. nodes/actions.py +0 -70
  111. {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/WHEEL +0 -0
  112. {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/top_level.txt +0 -0
nodes/reports.py ADDED
@@ -0,0 +1,411 @@
1
+ """Utilities for generating Celery-focused admin reports."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from collections import deque
6
+ from dataclasses import dataclass
7
+ from datetime import datetime, timedelta, timezone as dt_timezone
8
+ import numbers
9
+ import re
10
+ from pathlib import Path
11
+ from typing import Iterable, Iterator
12
+
13
+ from django.conf import settings
14
+ from django.utils import timezone
15
+ from django.utils.translation import gettext_lazy as _
16
+
17
+
18
+ @dataclass(frozen=True)
19
+ class ReportPeriod:
20
+ """Representation of an available reporting window."""
21
+
22
+ key: str
23
+ label: str
24
+ delta: timedelta
25
+
26
+
27
+ REPORT_PERIOD_ORDER = ("1d", "7d", "30d")
28
+ REPORT_PERIODS: dict[str, ReportPeriod] = {
29
+ "1d": ReportPeriod("1d", _("Single day"), timedelta(days=1)),
30
+ "7d": ReportPeriod("7d", _("Seven days"), timedelta(days=7)),
31
+ "30d": ReportPeriod("30d", _("Monthly"), timedelta(days=30)),
32
+ }
33
+
34
+
35
+ @dataclass(frozen=True)
36
+ class ScheduledTaskSummary:
37
+ """Human-friendly representation of a Celery scheduled task."""
38
+
39
+ name: str
40
+ task: str
41
+ schedule_type: str
42
+ schedule_description: str
43
+ next_run: datetime | None
44
+ enabled: bool
45
+ source: str
46
+
47
+
48
+ @dataclass(frozen=True)
49
+ class CeleryLogEntry:
50
+ """A parsed log entry relevant to Celery activity."""
51
+
52
+ timestamp: datetime
53
+ level: str
54
+ logger: str
55
+ message: str
56
+ source: str
57
+
58
+
59
+ @dataclass(frozen=True)
60
+ class CeleryLogCollection:
61
+ """Container for log entries and the sources scanned."""
62
+
63
+ entries: list[CeleryLogEntry]
64
+ checked_sources: list[str]
65
+
66
+
67
+ _LOG_LINE_PATTERN = re.compile(
68
+ r"^(?P<timestamp>\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}(?:,\d{1,6})?) "
69
+ r"\[(?P<level>[A-Z]+)\] (?P<logger>[^:]+): (?P<message>.*)$"
70
+ )
71
+
72
+
73
+ def iter_report_periods() -> Iterator[ReportPeriod]:
74
+ """Yield configured reporting periods in display order."""
75
+
76
+ for key in REPORT_PERIOD_ORDER:
77
+ period = REPORT_PERIODS[key]
78
+ yield period
79
+
80
+
81
+ def resolve_period(period_key: str | None) -> ReportPeriod:
82
+ """Return the requested reporting period or fall back to the default."""
83
+
84
+ if not period_key:
85
+ return REPORT_PERIODS[REPORT_PERIOD_ORDER[0]]
86
+ return REPORT_PERIODS.get(period_key, REPORT_PERIODS[REPORT_PERIOD_ORDER[0]])
87
+
88
+
89
+ def collect_scheduled_tasks(now: datetime, window_end: datetime) -> list[ScheduledTaskSummary]:
90
+ """Return Celery tasks scheduled to run before ``window_end``.
91
+
92
+ Tasks with unknown scheduling information are included to avoid omitting
93
+ potentially important configuration.
94
+ """
95
+
96
+ summaries: list[ScheduledTaskSummary] = []
97
+ summaries.extend(_collect_db_tasks(now))
98
+ summaries.extend(_collect_settings_tasks(now))
99
+
100
+ filtered: list[ScheduledTaskSummary] = []
101
+ for summary in summaries:
102
+ if summary.next_run is None or summary.next_run <= window_end:
103
+ filtered.append(summary)
104
+
105
+ far_future = datetime.max.replace(tzinfo=dt_timezone.utc)
106
+ filtered.sort(
107
+ key=lambda item: (
108
+ item.next_run or far_future,
109
+ item.name.lower(),
110
+ )
111
+ )
112
+ return filtered
113
+
114
+
115
+ def collect_celery_log_entries(
116
+ start: datetime, end: datetime, *, max_lines: int = 500
117
+ ) -> CeleryLogCollection:
118
+ """Return Celery-related log entries within ``start`` and ``end``."""
119
+
120
+ entries: list[CeleryLogEntry] = []
121
+ checked_sources: list[str] = []
122
+
123
+ for path in _candidate_log_files():
124
+ checked_sources.append(path.name)
125
+ for entry in _read_log_entries(path, max_lines=max_lines):
126
+ if entry.timestamp < start or entry.timestamp > end:
127
+ continue
128
+ entries.append(entry)
129
+
130
+ entries.sort(key=lambda item: item.timestamp, reverse=True)
131
+ return CeleryLogCollection(entries=entries, checked_sources=checked_sources)
132
+
133
+
134
+ def _collect_db_tasks(now: datetime) -> list[ScheduledTaskSummary]:
135
+ try: # pragma: no cover - optional dependency guard
136
+ from django_celery_beat.models import PeriodicTask
137
+ except Exception:
138
+ return []
139
+
140
+ try:
141
+ tasks = list(
142
+ PeriodicTask.objects.select_related(
143
+ "interval", "crontab", "solar", "clocked"
144
+ )
145
+ )
146
+ except Exception: # pragma: no cover - database unavailable
147
+ return []
148
+
149
+ summaries: list[ScheduledTaskSummary] = []
150
+ for task in tasks:
151
+ schedule = getattr(task, "schedule", None)
152
+ next_run = _estimate_next_run(now, schedule, task.last_run_at, task.start_time)
153
+ schedule_type = _determine_schedule_type(task)
154
+ schedule_description = _describe_db_schedule(task)
155
+ summaries.append(
156
+ ScheduledTaskSummary(
157
+ name=task.name,
158
+ task=task.task,
159
+ schedule_type=schedule_type,
160
+ schedule_description=schedule_description,
161
+ next_run=next_run,
162
+ enabled=bool(task.enabled),
163
+ source=str(_("Database")),
164
+ )
165
+ )
166
+ return summaries
167
+
168
+
169
+ def _collect_settings_tasks(now: datetime) -> list[ScheduledTaskSummary]:
170
+ schedule_config = getattr(settings, "CELERY_BEAT_SCHEDULE", {})
171
+ summaries: list[ScheduledTaskSummary] = []
172
+
173
+ for name, config in schedule_config.items():
174
+ task_name = str(config.get("task", ""))
175
+ schedule = config.get("schedule")
176
+ next_run = _estimate_next_run(now, schedule, None, None)
177
+ schedule_type = _describe_schedule_type(schedule)
178
+ schedule_description = _describe_settings_schedule(schedule)
179
+ summaries.append(
180
+ ScheduledTaskSummary(
181
+ name=name,
182
+ task=task_name,
183
+ schedule_type=schedule_type,
184
+ schedule_description=schedule_description,
185
+ next_run=next_run,
186
+ enabled=True,
187
+ source=str(_("Settings")),
188
+ )
189
+ )
190
+
191
+ return summaries
192
+
193
+
194
+ def _determine_schedule_type(task) -> str:
195
+ if getattr(task, "clocked_id", None):
196
+ return "clocked"
197
+ if getattr(task, "solar_id", None):
198
+ return "solar"
199
+ if getattr(task, "crontab_id", None):
200
+ return "crontab"
201
+ if getattr(task, "interval_id", None):
202
+ return "interval"
203
+ return "unknown"
204
+
205
+
206
+ def _estimate_next_run(
207
+ now: datetime,
208
+ schedule,
209
+ last_run_at: datetime | None,
210
+ start_time: datetime | None,
211
+ ) -> datetime | None:
212
+ if schedule is None:
213
+ return None
214
+
215
+ if isinstance(schedule, timedelta):
216
+ return now + schedule
217
+
218
+ if isinstance(schedule, numbers.Real):
219
+ return now + timedelta(seconds=float(schedule))
220
+
221
+ if isinstance(schedule, datetime):
222
+ candidate = _make_aware(schedule)
223
+ if candidate and candidate >= now:
224
+ return candidate
225
+ return candidate
226
+
227
+ schedule_now = _schedule_now(schedule, now)
228
+ candidate_start = _coerce_with_schedule(schedule, start_time)
229
+ if candidate_start and candidate_start > schedule_now:
230
+ return candidate_start
231
+
232
+ reference = _coerce_with_schedule(schedule, last_run_at) or schedule_now
233
+
234
+ try:
235
+ remaining = schedule.remaining_estimate(reference)
236
+ if remaining is None:
237
+ return None
238
+ return schedule_now + remaining
239
+ except Exception:
240
+ try:
241
+ due, next_time_to_run = schedule.is_due(reference)
242
+ except Exception:
243
+ return None
244
+ if due:
245
+ return schedule_now
246
+ try:
247
+ seconds = float(next_time_to_run)
248
+ except (TypeError, ValueError):
249
+ return None
250
+ return schedule_now + timedelta(seconds=seconds)
251
+
252
+
253
+ def _schedule_now(schedule, fallback: datetime) -> datetime:
254
+ if hasattr(schedule, "now") and hasattr(schedule, "maybe_make_aware"):
255
+ try:
256
+ current = schedule.maybe_make_aware(schedule.now())
257
+ if isinstance(current, datetime):
258
+ return current
259
+ except Exception:
260
+ pass
261
+ return fallback
262
+
263
+
264
+ def _coerce_with_schedule(schedule, value: datetime | None) -> datetime | None:
265
+ if value is None:
266
+ return None
267
+ if hasattr(schedule, "maybe_make_aware"):
268
+ try:
269
+ coerced = schedule.maybe_make_aware(value)
270
+ if isinstance(coerced, datetime):
271
+ return coerced
272
+ except Exception:
273
+ pass
274
+ return _make_aware(value)
275
+
276
+
277
+ def _make_aware(value: datetime) -> datetime:
278
+ if timezone.is_naive(value):
279
+ try:
280
+ return timezone.make_aware(value)
281
+ except Exception:
282
+ return value
283
+ return value
284
+
285
+
286
+ def _describe_db_schedule(task) -> str:
287
+ schedule = getattr(task, "schedule", None)
288
+ if schedule is None:
289
+ return ""
290
+
291
+ try:
292
+ human_readable = getattr(schedule, "human_readable", None)
293
+ if callable(human_readable):
294
+ return str(human_readable())
295
+ if isinstance(human_readable, str):
296
+ return human_readable
297
+ except Exception:
298
+ pass
299
+
300
+ for attr in ("clocked", "solar", "crontab", "interval"):
301
+ obj = getattr(task, attr, None)
302
+ if obj is not None:
303
+ return str(obj)
304
+ return str(schedule)
305
+
306
+
307
+ def _describe_schedule_type(schedule) -> str:
308
+ type_name = type(schedule).__name__ if schedule is not None else "unknown"
309
+ return type_name.replace("Schedule", "").lower()
310
+
311
+
312
+ def _describe_settings_schedule(schedule) -> str:
313
+ if schedule is None:
314
+ return ""
315
+
316
+ try:
317
+ human_readable = getattr(schedule, "human_readable", None)
318
+ if callable(human_readable):
319
+ return str(human_readable())
320
+ if isinstance(human_readable, str):
321
+ return human_readable
322
+ except Exception:
323
+ pass
324
+
325
+ if isinstance(schedule, timedelta):
326
+ return str(schedule)
327
+ if isinstance(schedule, numbers.Real):
328
+ return _("Every %(seconds)s seconds") % {"seconds": schedule}
329
+ return str(schedule)
330
+
331
+
332
+ def _candidate_log_files() -> Iterable[Path]:
333
+ log_dir = Path(settings.LOG_DIR)
334
+ candidates = [
335
+ log_dir / "celery.log",
336
+ log_dir / "celery-worker.log",
337
+ log_dir / "celery-beat.log",
338
+ log_dir / getattr(settings, "LOG_FILE_NAME", ""),
339
+ ]
340
+
341
+ seen: set[Path] = set()
342
+ for path in candidates:
343
+ if not path:
344
+ continue
345
+ if path in seen:
346
+ continue
347
+ seen.add(path)
348
+ if path.exists():
349
+ yield path
350
+
351
+
352
+ def _read_log_entries(path: Path, *, max_lines: int) -> Iterator[CeleryLogEntry]:
353
+ try:
354
+ with path.open("r", encoding="utf-8", errors="ignore") as handle:
355
+ lines = deque(handle, maxlen=max_lines)
356
+ except OSError: # pragma: no cover - filesystem errors
357
+ return iter(())
358
+
359
+ return (
360
+ entry
361
+ for entry in (_parse_log_line(line, path.name) for line in lines)
362
+ if entry is not None
363
+ )
364
+
365
+
366
+ def _parse_log_line(line: str, source: str) -> CeleryLogEntry | None:
367
+ match = _LOG_LINE_PATTERN.match(line)
368
+ if not match:
369
+ return None
370
+
371
+ timestamp = _parse_timestamp(match.group("timestamp"))
372
+ if timestamp is None:
373
+ return None
374
+
375
+ logger_name = match.group("logger").strip()
376
+ message = match.group("message").strip()
377
+ level = match.group("level").strip()
378
+
379
+ if not _is_celery_related(logger_name, message):
380
+ return None
381
+
382
+ return CeleryLogEntry(
383
+ timestamp=timestamp,
384
+ level=level,
385
+ logger=logger_name,
386
+ message=message,
387
+ source=source,
388
+ )
389
+
390
+
391
+ def _parse_timestamp(value: str) -> datetime | None:
392
+ for fmt in ("%Y-%m-%d %H:%M:%S,%f", "%Y-%m-%d %H:%M:%S"):
393
+ try:
394
+ dt = datetime.strptime(value, fmt)
395
+ return _make_aware(dt)
396
+ except ValueError:
397
+ continue
398
+ try:
399
+ dt = datetime.fromisoformat(value)
400
+ except ValueError:
401
+ return None
402
+ return _make_aware(dt)
403
+
404
+
405
+ def _is_celery_related(logger_name: str, message: str) -> bool:
406
+ logger_lower = logger_name.lower()
407
+ message_lower = message.lower()
408
+ if any(keyword in logger_lower for keyword in ("celery", "task", "beat")):
409
+ return True
410
+ return "celery" in message_lower or "task" in message_lower
411
+
nodes/rfid_sync.py ADDED
@@ -0,0 +1,210 @@
1
+ """Helper utilities for synchronizing RFID records between nodes."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from collections import OrderedDict
6
+ from collections.abc import Iterable, Mapping
7
+ from dataclasses import dataclass, field
8
+ from typing import Any, TYPE_CHECKING
9
+
10
+ from django.utils.dateparse import parse_datetime
11
+
12
+ from core.models import EnergyAccount, RFID
13
+
14
+ if TYPE_CHECKING: # pragma: no cover - imported only for type checking
15
+ from nodes.models import Node
16
+
17
+
18
+ @dataclass(slots=True)
19
+ class RFIDSyncOutcome:
20
+ """Result of applying an RFID payload to the local database."""
21
+
22
+ instance: RFID | None = None
23
+ created: bool = False
24
+ updated: bool = False
25
+ accounts_linked: int = 0
26
+ missing_accounts: list[str] = field(default_factory=list)
27
+ account_data_provided: bool = False
28
+ ok: bool = False
29
+ error: str | None = None
30
+
31
+
32
+ def serialize_rfid(tag: RFID) -> dict[str, Any]:
33
+ """Return a dictionary representation suitable for the node API."""
34
+
35
+ accounts = list(tag.energy_accounts.all())
36
+ return {
37
+ "rfid": tag.rfid,
38
+ "custom_label": tag.custom_label,
39
+ "key_a": tag.key_a,
40
+ "key_b": tag.key_b,
41
+ "data": tag.data,
42
+ "key_a_verified": tag.key_a_verified,
43
+ "key_b_verified": tag.key_b_verified,
44
+ "allowed": tag.allowed,
45
+ "color": tag.color,
46
+ "kind": tag.kind,
47
+ "released": tag.released,
48
+ "external_command": tag.external_command,
49
+ "post_auth_command": tag.post_auth_command,
50
+ "last_seen_on": tag.last_seen_on.isoformat() if tag.last_seen_on else None,
51
+ "energy_accounts": [account.id for account in accounts],
52
+ "energy_account_names": [
53
+ account.name for account in accounts if account.name
54
+ ],
55
+ }
56
+
57
+
58
+ def apply_rfid_payload(
59
+ entry: Mapping[str, Any], *, origin_node: Node | None = None
60
+ ) -> RFIDSyncOutcome:
61
+ """Create or update an :class:`RFID` instance from API payload data."""
62
+
63
+ outcome = RFIDSyncOutcome()
64
+ rfid_value = str(entry.get("rfid") or "").strip()
65
+ if not rfid_value:
66
+ outcome.error = "Missing RFID value"
67
+ return outcome
68
+
69
+ external_command = entry.get("external_command")
70
+ if not isinstance(external_command, str):
71
+ external_command = ""
72
+ else:
73
+ external_command = external_command.strip()
74
+ post_auth_command = entry.get("post_auth_command")
75
+ if not isinstance(post_auth_command, str):
76
+ post_auth_command = ""
77
+ else:
78
+ post_auth_command = post_auth_command.strip()
79
+
80
+ defaults: dict[str, Any] = {
81
+ "custom_label": entry.get("custom_label", ""),
82
+ "key_a": entry.get("key_a", RFID._meta.get_field("key_a").default),
83
+ "key_b": entry.get("key_b", RFID._meta.get_field("key_b").default),
84
+ "data": entry.get("data", []) or [],
85
+ "key_a_verified": bool(entry.get("key_a_verified", False)),
86
+ "key_b_verified": bool(entry.get("key_b_verified", False)),
87
+ "allowed": bool(entry.get("allowed", True)),
88
+ "color": entry.get("color", RFID.BLACK),
89
+ "kind": entry.get("kind", RFID.CLASSIC),
90
+ "released": bool(entry.get("released", False)),
91
+ "external_command": external_command,
92
+ "post_auth_command": post_auth_command,
93
+ }
94
+
95
+ if origin_node is not None:
96
+ defaults["origin_node"] = origin_node
97
+
98
+ if "last_seen_on" in entry:
99
+ last_seen = entry.get("last_seen_on")
100
+ defaults["last_seen_on"] = parse_datetime(last_seen) if last_seen else None
101
+
102
+ obj, created = RFID.update_or_create_from_code(rfid_value, defaults=defaults)
103
+
104
+ outcome.instance = obj
105
+ outcome.created = created
106
+ outcome.updated = not created
107
+ outcome.ok = True
108
+
109
+ accounts, missing, provided = _resolve_accounts(entry)
110
+ outcome.account_data_provided = provided
111
+ if provided:
112
+ obj.energy_accounts.set(accounts)
113
+ outcome.accounts_linked = len(accounts)
114
+ else:
115
+ outcome.accounts_linked = 0
116
+ outcome.missing_accounts = missing
117
+
118
+ return outcome
119
+
120
+
121
+ def _resolve_accounts(
122
+ entry: Mapping[str, Any]
123
+ ) -> tuple[list[EnergyAccount], list[str], bool]:
124
+ """Return matching accounts and missing identifiers from payload data."""
125
+
126
+ has_account_data = "energy_accounts" in entry or "energy_account_names" in entry
127
+ if not has_account_data:
128
+ return [], [], False
129
+
130
+ accounts: list[EnergyAccount] = []
131
+ missing: list[str] = []
132
+ seen_ids: set[int] = set()
133
+ matched_names: "OrderedDict[str, None]" = OrderedDict()
134
+
135
+ # Resolve by numeric identifiers first to preserve ordering.
136
+ id_values = _coerce_values(entry.get("energy_accounts"))
137
+ parsed_ids: list[tuple[str, int]] = []
138
+ invalid_ids: list[str] = []
139
+ for raw in id_values:
140
+ try:
141
+ parsed_ids.append((raw, int(raw)))
142
+ except (TypeError, ValueError):
143
+ invalid_ids.append(raw)
144
+
145
+ existing_by_id = (
146
+ EnergyAccount.objects.in_bulk([pk for _, pk in parsed_ids])
147
+ if parsed_ids
148
+ else {}
149
+ )
150
+
151
+ for raw, pk in parsed_ids:
152
+ account = existing_by_id.get(pk)
153
+ if account and account.id not in seen_ids:
154
+ accounts.append(account)
155
+ seen_ids.add(account.id)
156
+ if account.name:
157
+ matched_names[account.name.strip().upper()] = None
158
+ else:
159
+ missing.append(raw)
160
+
161
+ missing.extend(invalid_ids)
162
+
163
+ # Resolve remaining accounts by name.
164
+ name_values = _coerce_values(entry.get("energy_account_names"))
165
+ processed_names: "OrderedDict[str, None]" = OrderedDict()
166
+ for raw in name_values:
167
+ normalized = raw.strip().upper()
168
+ if not normalized or normalized in processed_names:
169
+ continue
170
+ processed_names[normalized] = None
171
+ if normalized in matched_names:
172
+ continue
173
+ account = (
174
+ EnergyAccount.objects.filter(name__iexact=raw.strip())
175
+ .order_by("pk")
176
+ .first()
177
+ )
178
+ if account and account.id not in seen_ids:
179
+ accounts.append(account)
180
+ seen_ids.add(account.id)
181
+ if account.name:
182
+ matched_names[account.name.strip().upper()] = None
183
+ else:
184
+ missing.append(raw)
185
+
186
+ # Deduplicate missing entries while preserving order.
187
+ missing_unique = list(OrderedDict.fromkeys(raw for raw in missing if raw))
188
+
189
+ return accounts, missing_unique, True
190
+
191
+
192
+ def _coerce_values(values: Any) -> list[str]:
193
+ """Return a list of trimmed string values from the payload field."""
194
+
195
+ if values is None:
196
+ return []
197
+ if isinstance(values, str):
198
+ values = values.split(",")
199
+ if isinstance(values, Mapping):
200
+ values = list(values.values())
201
+ if not isinstance(values, Iterable) or isinstance(values, (bytes, bytearray)):
202
+ return []
203
+
204
+ result: list[str] = []
205
+ for value in values:
206
+ text = str(value or "").strip()
207
+ if text:
208
+ result.append(text)
209
+ return result
210
+
nodes/signals.py ADDED
@@ -0,0 +1,18 @@
1
+ """Signal handlers for the :mod:`nodes` application."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from django.db.models.signals import post_save
6
+ from django.dispatch import receiver
7
+
8
+ from .classifiers import run_default_classifiers, should_skip_default_classifiers
9
+ from .models import ContentSample
10
+
11
+
12
+ @receiver(post_save, sender=ContentSample)
13
+ def run_classifiers_on_sample_creation(sender, instance: ContentSample, created: bool, **_: object):
14
+ """Execute default classifiers whenever a new sample is stored."""
15
+
16
+ if not created or should_skip_default_classifiers():
17
+ return
18
+ run_default_classifiers(instance)