arthexis 0.1.12__py3-none-any.whl → 0.1.14__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arthexis might be problematic. Click here for more details.
- {arthexis-0.1.12.dist-info → arthexis-0.1.14.dist-info}/METADATA +222 -221
- arthexis-0.1.14.dist-info/RECORD +109 -0
- {arthexis-0.1.12.dist-info → arthexis-0.1.14.dist-info}/licenses/LICENSE +674 -674
- config/__init__.py +5 -5
- config/active_app.py +15 -15
- config/asgi.py +43 -29
- config/auth_app.py +7 -7
- config/celery.py +32 -25
- config/context_processors.py +67 -69
- config/horologia_app.py +7 -7
- config/loadenv.py +11 -11
- config/logging.py +59 -48
- config/middleware.py +25 -25
- config/offline.py +49 -49
- config/settings.py +691 -716
- config/settings_helpers.py +109 -0
- config/urls.py +171 -166
- config/wsgi.py +17 -17
- core/admin.py +3771 -2772
- core/admin_history.py +50 -50
- core/admindocs.py +151 -151
- core/apps.py +356 -272
- core/auto_upgrade.py +57 -57
- core/backends.py +265 -236
- core/changelog.py +342 -0
- core/entity.py +133 -133
- core/environment.py +61 -61
- core/fields.py +168 -168
- core/form_fields.py +75 -0
- core/github_helper.py +188 -25
- core/github_issues.py +178 -172
- core/github_repos.py +72 -0
- core/lcd_screen.py +78 -78
- core/liveupdate.py +25 -25
- core/log_paths.py +100 -100
- core/mailer.py +85 -85
- core/middleware.py +91 -91
- core/models.py +3609 -2672
- core/notifications.py +105 -105
- core/public_wifi.py +267 -227
- core/reference_utils.py +108 -108
- core/release.py +721 -350
- core/rfid_import_export.py +113 -0
- core/sigil_builder.py +149 -149
- core/sigil_context.py +20 -20
- core/sigil_resolver.py +315 -315
- core/system.py +752 -493
- core/tasks.py +408 -394
- core/temp_passwords.py +181 -181
- core/test_system_info.py +186 -139
- core/tests.py +2095 -1511
- core/tests_liveupdate.py +17 -17
- core/urls.py +11 -11
- core/user_data.py +641 -633
- core/views.py +2175 -1382
- core/widgets.py +213 -51
- core/workgroup_urls.py +17 -17
- core/workgroup_views.py +94 -94
- nodes/admin.py +1720 -898
- nodes/apps.py +87 -70
- nodes/backends.py +160 -160
- nodes/dns.py +203 -203
- nodes/feature_checks.py +133 -133
- nodes/lcd.py +165 -165
- nodes/models.py +1737 -1416
- nodes/reports.py +411 -411
- nodes/rfid_sync.py +195 -0
- nodes/signals.py +18 -0
- nodes/tasks.py +46 -46
- nodes/tests.py +3810 -2497
- nodes/urls.py +15 -13
- nodes/utils.py +121 -105
- nodes/views.py +683 -451
- ocpp/admin.py +948 -804
- ocpp/apps.py +25 -25
- ocpp/consumers.py +1565 -1342
- ocpp/evcs.py +844 -931
- ocpp/evcs_discovery.py +158 -158
- ocpp/models.py +917 -915
- ocpp/reference_utils.py +42 -42
- ocpp/routing.py +11 -9
- ocpp/simulator.py +745 -724
- ocpp/status_display.py +26 -0
- ocpp/store.py +601 -541
- ocpp/tasks.py +31 -31
- ocpp/test_export_import.py +130 -130
- ocpp/test_rfid.py +913 -702
- ocpp/tests.py +4445 -3485
- ocpp/transactions_io.py +189 -179
- ocpp/urls.py +50 -50
- ocpp/views.py +1479 -1151
- pages/admin.py +708 -536
- pages/apps.py +10 -10
- pages/checks.py +40 -40
- pages/context_processors.py +127 -119
- pages/defaults.py +13 -13
- pages/forms.py +198 -169
- pages/middleware.py +205 -153
- pages/models.py +607 -426
- pages/tests.py +2612 -2083
- pages/urls.py +25 -25
- pages/utils.py +12 -12
- pages/views.py +1165 -1120
- arthexis-0.1.12.dist-info/RECORD +0 -102
- nodes/actions.py +0 -70
- {arthexis-0.1.12.dist-info → arthexis-0.1.14.dist-info}/WHEEL +0 -0
- {arthexis-0.1.12.dist-info → arthexis-0.1.14.dist-info}/top_level.txt +0 -0
nodes/rfid_sync.py
ADDED
|
@@ -0,0 +1,195 @@
|
|
|
1
|
+
"""Helper utilities for synchronizing RFID records between nodes."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections import OrderedDict
|
|
6
|
+
from collections.abc import Iterable, Mapping
|
|
7
|
+
from dataclasses import dataclass, field
|
|
8
|
+
from typing import Any, TYPE_CHECKING
|
|
9
|
+
|
|
10
|
+
from django.utils.dateparse import parse_datetime
|
|
11
|
+
|
|
12
|
+
from core.models import EnergyAccount, RFID
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING: # pragma: no cover - imported only for type checking
|
|
15
|
+
from nodes.models import Node
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass(slots=True)
|
|
19
|
+
class RFIDSyncOutcome:
|
|
20
|
+
"""Result of applying an RFID payload to the local database."""
|
|
21
|
+
|
|
22
|
+
instance: RFID | None = None
|
|
23
|
+
created: bool = False
|
|
24
|
+
updated: bool = False
|
|
25
|
+
accounts_linked: int = 0
|
|
26
|
+
missing_accounts: list[str] = field(default_factory=list)
|
|
27
|
+
account_data_provided: bool = False
|
|
28
|
+
ok: bool = False
|
|
29
|
+
error: str | None = None
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def serialize_rfid(tag: RFID) -> dict[str, Any]:
|
|
33
|
+
"""Return a dictionary representation suitable for the node API."""
|
|
34
|
+
|
|
35
|
+
accounts = list(tag.energy_accounts.all())
|
|
36
|
+
return {
|
|
37
|
+
"rfid": tag.rfid,
|
|
38
|
+
"custom_label": tag.custom_label,
|
|
39
|
+
"key_a": tag.key_a,
|
|
40
|
+
"key_b": tag.key_b,
|
|
41
|
+
"data": tag.data,
|
|
42
|
+
"key_a_verified": tag.key_a_verified,
|
|
43
|
+
"key_b_verified": tag.key_b_verified,
|
|
44
|
+
"allowed": tag.allowed,
|
|
45
|
+
"color": tag.color,
|
|
46
|
+
"kind": tag.kind,
|
|
47
|
+
"released": tag.released,
|
|
48
|
+
"last_seen_on": tag.last_seen_on.isoformat() if tag.last_seen_on else None,
|
|
49
|
+
"energy_accounts": [account.id for account in accounts],
|
|
50
|
+
"energy_account_names": [
|
|
51
|
+
account.name for account in accounts if account.name
|
|
52
|
+
],
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def apply_rfid_payload(
|
|
57
|
+
entry: Mapping[str, Any], *, origin_node: Node | None = None
|
|
58
|
+
) -> RFIDSyncOutcome:
|
|
59
|
+
"""Create or update an :class:`RFID` instance from API payload data."""
|
|
60
|
+
|
|
61
|
+
outcome = RFIDSyncOutcome()
|
|
62
|
+
rfid_value = str(entry.get("rfid") or "").strip()
|
|
63
|
+
if not rfid_value:
|
|
64
|
+
outcome.error = "Missing RFID value"
|
|
65
|
+
return outcome
|
|
66
|
+
|
|
67
|
+
defaults: dict[str, Any] = {
|
|
68
|
+
"custom_label": entry.get("custom_label", ""),
|
|
69
|
+
"key_a": entry.get("key_a", RFID._meta.get_field("key_a").default),
|
|
70
|
+
"key_b": entry.get("key_b", RFID._meta.get_field("key_b").default),
|
|
71
|
+
"data": entry.get("data", []) or [],
|
|
72
|
+
"key_a_verified": bool(entry.get("key_a_verified", False)),
|
|
73
|
+
"key_b_verified": bool(entry.get("key_b_verified", False)),
|
|
74
|
+
"allowed": bool(entry.get("allowed", True)),
|
|
75
|
+
"color": entry.get("color", RFID.BLACK),
|
|
76
|
+
"kind": entry.get("kind", RFID.CLASSIC),
|
|
77
|
+
"released": bool(entry.get("released", False)),
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
if origin_node is not None:
|
|
81
|
+
defaults["origin_node"] = origin_node
|
|
82
|
+
|
|
83
|
+
if "last_seen_on" in entry:
|
|
84
|
+
last_seen = entry.get("last_seen_on")
|
|
85
|
+
defaults["last_seen_on"] = parse_datetime(last_seen) if last_seen else None
|
|
86
|
+
|
|
87
|
+
obj, created = RFID.objects.update_or_create(rfid=rfid_value, defaults=defaults)
|
|
88
|
+
|
|
89
|
+
outcome.instance = obj
|
|
90
|
+
outcome.created = created
|
|
91
|
+
outcome.updated = not created
|
|
92
|
+
outcome.ok = True
|
|
93
|
+
|
|
94
|
+
accounts, missing, provided = _resolve_accounts(entry)
|
|
95
|
+
outcome.account_data_provided = provided
|
|
96
|
+
if provided:
|
|
97
|
+
obj.energy_accounts.set(accounts)
|
|
98
|
+
outcome.accounts_linked = len(accounts)
|
|
99
|
+
else:
|
|
100
|
+
outcome.accounts_linked = 0
|
|
101
|
+
outcome.missing_accounts = missing
|
|
102
|
+
|
|
103
|
+
return outcome
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def _resolve_accounts(
|
|
107
|
+
entry: Mapping[str, Any]
|
|
108
|
+
) -> tuple[list[EnergyAccount], list[str], bool]:
|
|
109
|
+
"""Return matching accounts and missing identifiers from payload data."""
|
|
110
|
+
|
|
111
|
+
has_account_data = "energy_accounts" in entry or "energy_account_names" in entry
|
|
112
|
+
if not has_account_data:
|
|
113
|
+
return [], [], False
|
|
114
|
+
|
|
115
|
+
accounts: list[EnergyAccount] = []
|
|
116
|
+
missing: list[str] = []
|
|
117
|
+
seen_ids: set[int] = set()
|
|
118
|
+
matched_names: "OrderedDict[str, None]" = OrderedDict()
|
|
119
|
+
|
|
120
|
+
# Resolve by numeric identifiers first to preserve ordering.
|
|
121
|
+
id_values = _coerce_values(entry.get("energy_accounts"))
|
|
122
|
+
parsed_ids: list[tuple[str, int]] = []
|
|
123
|
+
invalid_ids: list[str] = []
|
|
124
|
+
for raw in id_values:
|
|
125
|
+
try:
|
|
126
|
+
parsed_ids.append((raw, int(raw)))
|
|
127
|
+
except (TypeError, ValueError):
|
|
128
|
+
invalid_ids.append(raw)
|
|
129
|
+
|
|
130
|
+
existing_by_id = (
|
|
131
|
+
EnergyAccount.objects.in_bulk([pk for _, pk in parsed_ids])
|
|
132
|
+
if parsed_ids
|
|
133
|
+
else {}
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
for raw, pk in parsed_ids:
|
|
137
|
+
account = existing_by_id.get(pk)
|
|
138
|
+
if account and account.id not in seen_ids:
|
|
139
|
+
accounts.append(account)
|
|
140
|
+
seen_ids.add(account.id)
|
|
141
|
+
if account.name:
|
|
142
|
+
matched_names[account.name.strip().upper()] = None
|
|
143
|
+
else:
|
|
144
|
+
missing.append(raw)
|
|
145
|
+
|
|
146
|
+
missing.extend(invalid_ids)
|
|
147
|
+
|
|
148
|
+
# Resolve remaining accounts by name.
|
|
149
|
+
name_values = _coerce_values(entry.get("energy_account_names"))
|
|
150
|
+
processed_names: "OrderedDict[str, None]" = OrderedDict()
|
|
151
|
+
for raw in name_values:
|
|
152
|
+
normalized = raw.strip().upper()
|
|
153
|
+
if not normalized or normalized in processed_names:
|
|
154
|
+
continue
|
|
155
|
+
processed_names[normalized] = None
|
|
156
|
+
if normalized in matched_names:
|
|
157
|
+
continue
|
|
158
|
+
account = (
|
|
159
|
+
EnergyAccount.objects.filter(name__iexact=raw.strip())
|
|
160
|
+
.order_by("pk")
|
|
161
|
+
.first()
|
|
162
|
+
)
|
|
163
|
+
if account and account.id not in seen_ids:
|
|
164
|
+
accounts.append(account)
|
|
165
|
+
seen_ids.add(account.id)
|
|
166
|
+
if account.name:
|
|
167
|
+
matched_names[account.name.strip().upper()] = None
|
|
168
|
+
else:
|
|
169
|
+
missing.append(raw)
|
|
170
|
+
|
|
171
|
+
# Deduplicate missing entries while preserving order.
|
|
172
|
+
missing_unique = list(OrderedDict.fromkeys(raw for raw in missing if raw))
|
|
173
|
+
|
|
174
|
+
return accounts, missing_unique, True
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
def _coerce_values(values: Any) -> list[str]:
|
|
178
|
+
"""Return a list of trimmed string values from the payload field."""
|
|
179
|
+
|
|
180
|
+
if values is None:
|
|
181
|
+
return []
|
|
182
|
+
if isinstance(values, str):
|
|
183
|
+
values = values.split(",")
|
|
184
|
+
if isinstance(values, Mapping):
|
|
185
|
+
values = list(values.values())
|
|
186
|
+
if not isinstance(values, Iterable) or isinstance(values, (bytes, bytearray)):
|
|
187
|
+
return []
|
|
188
|
+
|
|
189
|
+
result: list[str] = []
|
|
190
|
+
for value in values:
|
|
191
|
+
text = str(value or "").strip()
|
|
192
|
+
if text:
|
|
193
|
+
result.append(text)
|
|
194
|
+
return result
|
|
195
|
+
|
nodes/signals.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"""Signal handlers for the :mod:`nodes` application."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from django.db.models.signals import post_save
|
|
6
|
+
from django.dispatch import receiver
|
|
7
|
+
|
|
8
|
+
from .classifiers import run_default_classifiers, should_skip_default_classifiers
|
|
9
|
+
from .models import ContentSample
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@receiver(post_save, sender=ContentSample)
|
|
13
|
+
def run_classifiers_on_sample_creation(sender, instance: ContentSample, created: bool, **_: object):
|
|
14
|
+
"""Execute default classifiers whenever a new sample is stored."""
|
|
15
|
+
|
|
16
|
+
if not created or should_skip_default_classifiers():
|
|
17
|
+
return
|
|
18
|
+
run_default_classifiers(instance)
|
nodes/tasks.py
CHANGED
|
@@ -1,46 +1,46 @@
|
|
|
1
|
-
import logging
|
|
2
|
-
from pathlib import Path
|
|
3
|
-
|
|
4
|
-
import pyperclip
|
|
5
|
-
from pyperclip import PyperclipException
|
|
6
|
-
from celery import shared_task
|
|
7
|
-
|
|
8
|
-
from .models import ContentSample, Node
|
|
9
|
-
from .utils import capture_screenshot, save_screenshot
|
|
10
|
-
|
|
11
|
-
logger = logging.getLogger(__name__)
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
@shared_task
|
|
15
|
-
def sample_clipboard() -> None:
|
|
16
|
-
"""Save current clipboard contents to a :class:`ContentSample` entry."""
|
|
17
|
-
try:
|
|
18
|
-
content = pyperclip.paste()
|
|
19
|
-
except PyperclipException as exc: # pragma: no cover - depends on OS clipboard
|
|
20
|
-
logger.error("Clipboard error: %s", exc)
|
|
21
|
-
return
|
|
22
|
-
if not content:
|
|
23
|
-
logger.info("Clipboard is empty")
|
|
24
|
-
return
|
|
25
|
-
if ContentSample.objects.filter(content=content, kind=ContentSample.TEXT).exists():
|
|
26
|
-
logger.info("Duplicate clipboard content; sample not created")
|
|
27
|
-
return
|
|
28
|
-
node = Node.get_local()
|
|
29
|
-
ContentSample.objects.create(content=content, node=node, kind=ContentSample.TEXT)
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
@shared_task
|
|
33
|
-
def capture_node_screenshot(
|
|
34
|
-
url: str | None = None, port: int = 8000, method: str = "TASK"
|
|
35
|
-
) -> str:
|
|
36
|
-
"""Capture a screenshot of ``url`` and record it as a :class:`ContentSample`."""
|
|
37
|
-
if url is None:
|
|
38
|
-
url = f"http://localhost:{port}"
|
|
39
|
-
try:
|
|
40
|
-
path: Path = capture_screenshot(url)
|
|
41
|
-
except Exception as exc: # pragma: no cover - depends on selenium setup
|
|
42
|
-
logger.error("Screenshot capture failed: %s", exc)
|
|
43
|
-
return ""
|
|
44
|
-
node = Node.get_local()
|
|
45
|
-
save_screenshot(path, node=node, method=method)
|
|
46
|
-
return str(path)
|
|
1
|
+
import logging
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
import pyperclip
|
|
5
|
+
from pyperclip import PyperclipException
|
|
6
|
+
from celery import shared_task
|
|
7
|
+
|
|
8
|
+
from .models import ContentSample, Node
|
|
9
|
+
from .utils import capture_screenshot, save_screenshot
|
|
10
|
+
|
|
11
|
+
logger = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@shared_task
|
|
15
|
+
def sample_clipboard() -> None:
|
|
16
|
+
"""Save current clipboard contents to a :class:`ContentSample` entry."""
|
|
17
|
+
try:
|
|
18
|
+
content = pyperclip.paste()
|
|
19
|
+
except PyperclipException as exc: # pragma: no cover - depends on OS clipboard
|
|
20
|
+
logger.error("Clipboard error: %s", exc)
|
|
21
|
+
return
|
|
22
|
+
if not content:
|
|
23
|
+
logger.info("Clipboard is empty")
|
|
24
|
+
return
|
|
25
|
+
if ContentSample.objects.filter(content=content, kind=ContentSample.TEXT).exists():
|
|
26
|
+
logger.info("Duplicate clipboard content; sample not created")
|
|
27
|
+
return
|
|
28
|
+
node = Node.get_local()
|
|
29
|
+
ContentSample.objects.create(content=content, node=node, kind=ContentSample.TEXT)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@shared_task
|
|
33
|
+
def capture_node_screenshot(
|
|
34
|
+
url: str | None = None, port: int = 8000, method: str = "TASK"
|
|
35
|
+
) -> str:
|
|
36
|
+
"""Capture a screenshot of ``url`` and record it as a :class:`ContentSample`."""
|
|
37
|
+
if url is None:
|
|
38
|
+
url = f"http://localhost:{port}"
|
|
39
|
+
try:
|
|
40
|
+
path: Path = capture_screenshot(url)
|
|
41
|
+
except Exception as exc: # pragma: no cover - depends on selenium setup
|
|
42
|
+
logger.error("Screenshot capture failed: %s", exc)
|
|
43
|
+
return ""
|
|
44
|
+
node = Node.get_local()
|
|
45
|
+
save_screenshot(path, node=node, method=method)
|
|
46
|
+
return str(path)
|