arthexis 0.1.16__py3-none-any.whl → 0.1.28__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arthexis might be problematic. Click here for more details.
- {arthexis-0.1.16.dist-info → arthexis-0.1.28.dist-info}/METADATA +95 -41
- arthexis-0.1.28.dist-info/RECORD +112 -0
- config/asgi.py +1 -15
- config/middleware.py +47 -1
- config/settings.py +21 -30
- config/settings_helpers.py +176 -1
- config/urls.py +69 -1
- core/admin.py +805 -473
- core/apps.py +6 -8
- core/auto_upgrade.py +19 -4
- core/backends.py +13 -3
- core/celery_utils.py +73 -0
- core/changelog.py +66 -5
- core/environment.py +4 -5
- core/models.py +1825 -218
- core/notifications.py +1 -1
- core/reference_utils.py +10 -11
- core/release.py +55 -7
- core/sigil_builder.py +2 -2
- core/sigil_resolver.py +1 -66
- core/system.py +285 -4
- core/tasks.py +439 -138
- core/test_system_info.py +43 -5
- core/tests.py +516 -18
- core/user_data.py +94 -21
- core/views.py +348 -186
- nodes/admin.py +904 -67
- nodes/apps.py +12 -1
- nodes/feature_checks.py +30 -0
- nodes/models.py +800 -127
- nodes/rfid_sync.py +1 -1
- nodes/tasks.py +98 -3
- nodes/tests.py +1381 -152
- nodes/urls.py +15 -1
- nodes/utils.py +51 -3
- nodes/views.py +1382 -152
- ocpp/admin.py +1970 -152
- ocpp/consumers.py +839 -34
- ocpp/models.py +968 -17
- ocpp/network.py +398 -0
- ocpp/store.py +411 -43
- ocpp/tasks.py +261 -3
- ocpp/test_export_import.py +1 -0
- ocpp/test_rfid.py +194 -6
- ocpp/tests.py +1918 -87
- ocpp/transactions_io.py +9 -1
- ocpp/urls.py +8 -3
- ocpp/views.py +700 -53
- pages/admin.py +262 -30
- pages/apps.py +35 -0
- pages/context_processors.py +28 -21
- pages/defaults.py +1 -1
- pages/forms.py +31 -8
- pages/middleware.py +6 -2
- pages/models.py +86 -2
- pages/module_defaults.py +5 -5
- pages/site_config.py +137 -0
- pages/tests.py +1050 -126
- pages/urls.py +14 -2
- pages/utils.py +70 -0
- pages/views.py +622 -56
- arthexis-0.1.16.dist-info/RECORD +0 -111
- core/workgroup_urls.py +0 -17
- core/workgroup_views.py +0 -94
- {arthexis-0.1.16.dist-info → arthexis-0.1.28.dist-info}/WHEEL +0 -0
- {arthexis-0.1.16.dist-info → arthexis-0.1.28.dist-info}/licenses/LICENSE +0 -0
- {arthexis-0.1.16.dist-info → arthexis-0.1.28.dist-info}/top_level.txt +0 -0
nodes/views.py
CHANGED
|
@@ -1,37 +1,222 @@
|
|
|
1
1
|
import base64
|
|
2
2
|
import ipaddress
|
|
3
3
|
import json
|
|
4
|
+
import re
|
|
5
|
+
import secrets
|
|
4
6
|
import socket
|
|
7
|
+
import uuid
|
|
5
8
|
from collections.abc import Mapping
|
|
9
|
+
from datetime import timedelta
|
|
6
10
|
|
|
7
|
-
from django.
|
|
8
|
-
from django.http.request import split_domain_port
|
|
9
|
-
from django.views.decorators.csrf import csrf_exempt
|
|
10
|
-
from django.shortcuts import get_object_or_404
|
|
11
|
+
from django.apps import apps
|
|
11
12
|
from django.conf import settings
|
|
13
|
+
from django.contrib.auth import authenticate, get_user_model, login
|
|
14
|
+
from django.contrib.auth.models import Group, Permission
|
|
15
|
+
from django.core import serializers
|
|
16
|
+
from django.core.cache import cache
|
|
17
|
+
from django.core.signing import BadSignature, SignatureExpired, TimestampSigner
|
|
18
|
+
from django.http import HttpResponse, JsonResponse
|
|
19
|
+
from django.http.request import split_domain_port
|
|
20
|
+
from django.shortcuts import get_object_or_404, redirect
|
|
12
21
|
from django.urls import reverse
|
|
13
|
-
from
|
|
22
|
+
from django.utils import timezone
|
|
23
|
+
from django.utils.dateparse import parse_datetime
|
|
14
24
|
from django.utils.cache import patch_vary_headers
|
|
25
|
+
from django.utils.http import url_has_allowed_host_and_scheme
|
|
26
|
+
from django.views.decorators.csrf import csrf_exempt
|
|
27
|
+
from pathlib import Path
|
|
28
|
+
from urllib.parse import urlsplit
|
|
15
29
|
|
|
16
30
|
from utils.api import api_login_required
|
|
17
31
|
|
|
18
32
|
from cryptography.hazmat.primitives import serialization, hashes
|
|
19
33
|
from cryptography.hazmat.primitives.asymmetric import padding
|
|
20
34
|
|
|
35
|
+
from django.db.models import Q
|
|
36
|
+
|
|
21
37
|
from core.models import RFID
|
|
38
|
+
from ocpp import store
|
|
39
|
+
from ocpp.models import Charger
|
|
40
|
+
from ocpp.network import (
|
|
41
|
+
apply_remote_charger_payload,
|
|
42
|
+
serialize_charger_for_network,
|
|
43
|
+
sync_transactions_payload,
|
|
44
|
+
)
|
|
45
|
+
from ocpp.transactions_io import export_transactions
|
|
46
|
+
from asgiref.sync import async_to_sync
|
|
22
47
|
|
|
23
48
|
from .rfid_sync import apply_rfid_payload, serialize_rfid
|
|
24
49
|
|
|
25
50
|
from .models import (
|
|
26
51
|
Node,
|
|
27
52
|
NetMessage,
|
|
28
|
-
|
|
53
|
+
PendingNetMessage,
|
|
29
54
|
NodeRole,
|
|
30
55
|
node_information_updated,
|
|
31
56
|
)
|
|
32
57
|
from .utils import capture_screenshot, save_screenshot
|
|
33
58
|
|
|
34
59
|
|
|
60
|
+
PROXY_TOKEN_SALT = "nodes.proxy.session"
|
|
61
|
+
PROXY_TOKEN_TIMEOUT = 300
|
|
62
|
+
PROXY_CACHE_PREFIX = "nodes:proxy-session:"
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def _load_signed_node(
|
|
66
|
+
request,
|
|
67
|
+
requester_id: str,
|
|
68
|
+
*,
|
|
69
|
+
mac_address: str | None = None,
|
|
70
|
+
public_key: str | None = None,
|
|
71
|
+
):
|
|
72
|
+
signature = request.headers.get("X-Signature")
|
|
73
|
+
if not signature:
|
|
74
|
+
return None, JsonResponse({"detail": "signature required"}, status=403)
|
|
75
|
+
try:
|
|
76
|
+
signature_bytes = base64.b64decode(signature)
|
|
77
|
+
except Exception:
|
|
78
|
+
return None, JsonResponse({"detail": "invalid signature"}, status=403)
|
|
79
|
+
|
|
80
|
+
candidates: list[Node] = []
|
|
81
|
+
seen: set[int] = set()
|
|
82
|
+
|
|
83
|
+
lookup_values: list[tuple[str, str]] = []
|
|
84
|
+
if requester_id:
|
|
85
|
+
lookup_values.append(("uuid", requester_id))
|
|
86
|
+
if mac_address:
|
|
87
|
+
lookup_values.append(("mac_address__iexact", mac_address))
|
|
88
|
+
if public_key:
|
|
89
|
+
lookup_values.append(("public_key", public_key))
|
|
90
|
+
|
|
91
|
+
for field, value in lookup_values:
|
|
92
|
+
node = Node.objects.filter(**{field: value}).first()
|
|
93
|
+
if not node or not node.public_key:
|
|
94
|
+
continue
|
|
95
|
+
if node.pk is not None and node.pk in seen:
|
|
96
|
+
continue
|
|
97
|
+
if node.pk is not None:
|
|
98
|
+
seen.add(node.pk)
|
|
99
|
+
candidates.append(node)
|
|
100
|
+
|
|
101
|
+
if not candidates:
|
|
102
|
+
return None, JsonResponse({"detail": "unknown requester"}, status=403)
|
|
103
|
+
|
|
104
|
+
for node in candidates:
|
|
105
|
+
try:
|
|
106
|
+
loaded_key = serialization.load_pem_public_key(node.public_key.encode())
|
|
107
|
+
loaded_key.verify(
|
|
108
|
+
signature_bytes,
|
|
109
|
+
request.body,
|
|
110
|
+
padding.PKCS1v15(),
|
|
111
|
+
hashes.SHA256(),
|
|
112
|
+
)
|
|
113
|
+
except Exception:
|
|
114
|
+
continue
|
|
115
|
+
return node, None
|
|
116
|
+
|
|
117
|
+
return None, JsonResponse({"detail": "invalid signature"}, status=403)
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def _clean_requester_hint(value, *, strip: bool = True) -> str | None:
|
|
121
|
+
if not isinstance(value, str):
|
|
122
|
+
return None
|
|
123
|
+
cleaned = value.strip() if strip else value
|
|
124
|
+
if not cleaned:
|
|
125
|
+
return None
|
|
126
|
+
return cleaned
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def _sanitize_proxy_target(target: str | None, request) -> str:
|
|
130
|
+
default_target = reverse("admin:index")
|
|
131
|
+
if not target:
|
|
132
|
+
return default_target
|
|
133
|
+
candidate = str(target).strip()
|
|
134
|
+
if not candidate:
|
|
135
|
+
return default_target
|
|
136
|
+
if candidate.startswith(("http://", "https://")):
|
|
137
|
+
parsed = urlsplit(candidate)
|
|
138
|
+
if not parsed.path:
|
|
139
|
+
return default_target
|
|
140
|
+
allowed = url_has_allowed_host_and_scheme(
|
|
141
|
+
candidate,
|
|
142
|
+
allowed_hosts={request.get_host()},
|
|
143
|
+
require_https=request.is_secure(),
|
|
144
|
+
)
|
|
145
|
+
if not allowed:
|
|
146
|
+
return default_target
|
|
147
|
+
path = parsed.path
|
|
148
|
+
if parsed.query:
|
|
149
|
+
path = f"{path}?{parsed.query}"
|
|
150
|
+
return path
|
|
151
|
+
if not candidate.startswith("/"):
|
|
152
|
+
candidate = f"/{candidate}"
|
|
153
|
+
return candidate
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def _assign_groups_and_permissions(user, payload: Mapping) -> None:
|
|
157
|
+
groups = payload.get("groups", [])
|
|
158
|
+
group_objs: list[Group] = []
|
|
159
|
+
if isinstance(groups, (list, tuple)):
|
|
160
|
+
for name in groups:
|
|
161
|
+
if not isinstance(name, str):
|
|
162
|
+
continue
|
|
163
|
+
cleaned = name.strip()
|
|
164
|
+
if not cleaned:
|
|
165
|
+
continue
|
|
166
|
+
group, _ = Group.objects.get_or_create(name=cleaned)
|
|
167
|
+
group_objs.append(group)
|
|
168
|
+
if group_objs or user.groups.exists():
|
|
169
|
+
user.groups.set(group_objs)
|
|
170
|
+
|
|
171
|
+
permissions = payload.get("permissions", [])
|
|
172
|
+
perm_objs: list[Permission] = []
|
|
173
|
+
if isinstance(permissions, (list, tuple)):
|
|
174
|
+
for label in permissions:
|
|
175
|
+
if not isinstance(label, str):
|
|
176
|
+
continue
|
|
177
|
+
app_label, _, codename = label.partition(".")
|
|
178
|
+
if not app_label or not codename:
|
|
179
|
+
continue
|
|
180
|
+
perm = Permission.objects.filter(
|
|
181
|
+
content_type__app_label=app_label, codename=codename
|
|
182
|
+
).first()
|
|
183
|
+
if perm:
|
|
184
|
+
perm_objs.append(perm)
|
|
185
|
+
if perm_objs:
|
|
186
|
+
user.user_permissions.set(perm_objs)
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def _normalize_requested_chargers(values) -> list[tuple[str, int | None, object]]:
|
|
190
|
+
if not isinstance(values, list):
|
|
191
|
+
return []
|
|
192
|
+
|
|
193
|
+
normalized: list[tuple[str, int | None, object]] = []
|
|
194
|
+
for entry in values:
|
|
195
|
+
if not isinstance(entry, Mapping):
|
|
196
|
+
continue
|
|
197
|
+
serial = Charger.normalize_serial(entry.get("charger_id"))
|
|
198
|
+
if not serial or Charger.is_placeholder_serial(serial):
|
|
199
|
+
continue
|
|
200
|
+
connector = entry.get("connector_id")
|
|
201
|
+
if connector in ("", None):
|
|
202
|
+
connector_value = None
|
|
203
|
+
elif isinstance(connector, int):
|
|
204
|
+
connector_value = connector
|
|
205
|
+
else:
|
|
206
|
+
try:
|
|
207
|
+
connector_value = int(str(connector))
|
|
208
|
+
except (TypeError, ValueError):
|
|
209
|
+
connector_value = None
|
|
210
|
+
since_raw = entry.get("since")
|
|
211
|
+
since_dt = None
|
|
212
|
+
if isinstance(since_raw, str):
|
|
213
|
+
since_dt = parse_datetime(since_raw)
|
|
214
|
+
if since_dt is not None and timezone.is_naive(since_dt):
|
|
215
|
+
since_dt = timezone.make_aware(since_dt, timezone.get_current_timezone())
|
|
216
|
+
normalized.append((serial, connector_value, since_dt))
|
|
217
|
+
return normalized
|
|
218
|
+
|
|
219
|
+
|
|
35
220
|
def _get_client_ip(request):
|
|
36
221
|
"""Return the client IP from the request headers."""
|
|
37
222
|
|
|
@@ -104,6 +289,8 @@ def _get_host_domain(request) -> str:
|
|
|
104
289
|
domain, _ = split_domain_port(host)
|
|
105
290
|
if not domain:
|
|
106
291
|
return ""
|
|
292
|
+
if domain.lower() == "localhost":
|
|
293
|
+
return ""
|
|
107
294
|
try:
|
|
108
295
|
ipaddress.ip_address(domain)
|
|
109
296
|
except ValueError:
|
|
@@ -111,6 +298,60 @@ def _get_host_domain(request) -> str:
|
|
|
111
298
|
return ""
|
|
112
299
|
|
|
113
300
|
|
|
301
|
+
def _normalize_port(value: str | int | None) -> int | None:
|
|
302
|
+
"""Return ``value`` as an integer port number when valid."""
|
|
303
|
+
|
|
304
|
+
if value in (None, ""):
|
|
305
|
+
return None
|
|
306
|
+
try:
|
|
307
|
+
port = int(value)
|
|
308
|
+
except (TypeError, ValueError):
|
|
309
|
+
return None
|
|
310
|
+
if port <= 0 or port > 65535:
|
|
311
|
+
return None
|
|
312
|
+
return port
|
|
313
|
+
|
|
314
|
+
|
|
315
|
+
def _get_host_port(request) -> int | None:
|
|
316
|
+
"""Return the port implied by the current request if available."""
|
|
317
|
+
|
|
318
|
+
forwarded_port = request.headers.get("X-Forwarded-Port") or request.META.get(
|
|
319
|
+
"HTTP_X_FORWARDED_PORT"
|
|
320
|
+
)
|
|
321
|
+
port = _normalize_port(forwarded_port)
|
|
322
|
+
if port:
|
|
323
|
+
return port
|
|
324
|
+
|
|
325
|
+
try:
|
|
326
|
+
host = request.get_host()
|
|
327
|
+
except Exception: # pragma: no cover - defensive
|
|
328
|
+
host = ""
|
|
329
|
+
if host:
|
|
330
|
+
_, host_port = split_domain_port(host)
|
|
331
|
+
port = _normalize_port(host_port)
|
|
332
|
+
if port:
|
|
333
|
+
return port
|
|
334
|
+
|
|
335
|
+
forwarded_proto = request.headers.get("X-Forwarded-Proto", "")
|
|
336
|
+
if forwarded_proto:
|
|
337
|
+
scheme = forwarded_proto.split(",")[0].strip().lower()
|
|
338
|
+
if scheme == "https":
|
|
339
|
+
return 443
|
|
340
|
+
if scheme == "http":
|
|
341
|
+
return 80
|
|
342
|
+
|
|
343
|
+
if request.is_secure():
|
|
344
|
+
return 443
|
|
345
|
+
|
|
346
|
+
scheme = getattr(request, "scheme", "")
|
|
347
|
+
if scheme.lower() == "https":
|
|
348
|
+
return 443
|
|
349
|
+
if scheme.lower() == "http":
|
|
350
|
+
return 80
|
|
351
|
+
|
|
352
|
+
return None
|
|
353
|
+
|
|
354
|
+
|
|
114
355
|
def _get_advertised_address(request, node) -> str:
|
|
115
356
|
"""Return the best address for the client to reach this node."""
|
|
116
357
|
|
|
@@ -121,7 +362,7 @@ def _get_advertised_address(request, node) -> str:
|
|
|
121
362
|
host_ip = _get_host_ip(request)
|
|
122
363
|
if host_ip:
|
|
123
364
|
return host_ip
|
|
124
|
-
return node.address
|
|
365
|
+
return node.get_primary_contact() or node.address or node.hostname
|
|
125
366
|
|
|
126
367
|
|
|
127
368
|
@api_login_required
|
|
@@ -131,7 +372,10 @@ def node_list(request):
|
|
|
131
372
|
nodes = [
|
|
132
373
|
{
|
|
133
374
|
"hostname": node.hostname,
|
|
375
|
+
"network_hostname": node.network_hostname,
|
|
134
376
|
"address": node.address,
|
|
377
|
+
"ipv4_address": node.ipv4_address,
|
|
378
|
+
"ipv6_address": node.ipv6_address,
|
|
135
379
|
"port": node.port,
|
|
136
380
|
"last_seen": node.last_seen,
|
|
137
381
|
"features": list(node.features.values_list("slug", flat=True)),
|
|
@@ -152,22 +396,42 @@ def node_info(request):
|
|
|
152
396
|
token = request.GET.get("token", "")
|
|
153
397
|
host_domain = _get_host_domain(request)
|
|
154
398
|
advertised_address = _get_advertised_address(request, node)
|
|
399
|
+
advertised_port = node.port
|
|
400
|
+
if host_domain:
|
|
401
|
+
host_port = _get_host_port(request)
|
|
402
|
+
if host_port:
|
|
403
|
+
advertised_port = host_port
|
|
155
404
|
if host_domain:
|
|
156
405
|
hostname = host_domain
|
|
157
|
-
|
|
406
|
+
local_aliases = {
|
|
407
|
+
value
|
|
408
|
+
for value in (
|
|
409
|
+
node.hostname,
|
|
410
|
+
node.network_hostname,
|
|
411
|
+
node.address,
|
|
412
|
+
node.public_endpoint,
|
|
413
|
+
)
|
|
414
|
+
if value
|
|
415
|
+
}
|
|
416
|
+
if advertised_address and advertised_address not in local_aliases:
|
|
158
417
|
address = advertised_address
|
|
159
418
|
else:
|
|
160
419
|
address = host_domain
|
|
161
420
|
else:
|
|
162
421
|
hostname = node.hostname
|
|
163
|
-
address = advertised_address
|
|
422
|
+
address = advertised_address or node.address or node.network_hostname or ""
|
|
164
423
|
data = {
|
|
165
424
|
"hostname": hostname,
|
|
425
|
+
"network_hostname": node.network_hostname,
|
|
166
426
|
"address": address,
|
|
167
|
-
"
|
|
427
|
+
"ipv4_address": node.ipv4_address,
|
|
428
|
+
"ipv6_address": node.ipv6_address,
|
|
429
|
+
"port": advertised_port,
|
|
168
430
|
"mac_address": node.mac_address,
|
|
169
431
|
"public_key": node.public_key,
|
|
170
432
|
"features": list(node.features.values_list("slug", flat=True)),
|
|
433
|
+
"role": node.role.name if node.role_id else "",
|
|
434
|
+
"contact_hosts": node.get_remote_host_candidates(),
|
|
171
435
|
}
|
|
172
436
|
|
|
173
437
|
if token:
|
|
@@ -211,7 +475,14 @@ def _add_cors_headers(request, response):
|
|
|
211
475
|
def _node_display_name(node: Node) -> str:
|
|
212
476
|
"""Return a human-friendly name for ``node`` suitable for messaging."""
|
|
213
477
|
|
|
214
|
-
for attr in (
|
|
478
|
+
for attr in (
|
|
479
|
+
"hostname",
|
|
480
|
+
"network_hostname",
|
|
481
|
+
"public_endpoint",
|
|
482
|
+
"address",
|
|
483
|
+
"ipv6_address",
|
|
484
|
+
"ipv4_address",
|
|
485
|
+
):
|
|
215
486
|
value = getattr(node, attr, "") or ""
|
|
216
487
|
value = value.strip()
|
|
217
488
|
if value:
|
|
@@ -263,10 +534,13 @@ def register_node(request):
|
|
|
263
534
|
else:
|
|
264
535
|
features = data.get("features")
|
|
265
536
|
|
|
266
|
-
hostname = data.get("hostname")
|
|
267
|
-
address = data.get("address")
|
|
268
|
-
|
|
269
|
-
|
|
537
|
+
hostname = (data.get("hostname") or "").strip()
|
|
538
|
+
address = (data.get("address") or "").strip()
|
|
539
|
+
network_hostname = (data.get("network_hostname") or "").strip()
|
|
540
|
+
ipv4_address = (data.get("ipv4_address") or "").strip()
|
|
541
|
+
ipv6_address = (data.get("ipv6_address") or "").strip()
|
|
542
|
+
port = data.get("port", 8888)
|
|
543
|
+
mac_address = (data.get("mac_address") or "").strip()
|
|
270
544
|
public_key = data.get("public_key")
|
|
271
545
|
token = data.get("token")
|
|
272
546
|
signature = data.get("signature")
|
|
@@ -282,12 +556,27 @@ def register_node(request):
|
|
|
282
556
|
Node.normalize_relation(raw_relation) if relation_present else None
|
|
283
557
|
)
|
|
284
558
|
|
|
285
|
-
if not hostname or not
|
|
559
|
+
if not hostname or not mac_address:
|
|
286
560
|
response = JsonResponse(
|
|
287
|
-
{"detail": "hostname
|
|
561
|
+
{"detail": "hostname and mac_address required"}, status=400
|
|
288
562
|
)
|
|
289
563
|
return _add_cors_headers(request, response)
|
|
290
564
|
|
|
565
|
+
if not any([address, network_hostname, ipv4_address, ipv6_address]):
|
|
566
|
+
response = JsonResponse(
|
|
567
|
+
{
|
|
568
|
+
"detail": "at least one of address, network_hostname, "
|
|
569
|
+
"ipv4_address or ipv6_address must be provided",
|
|
570
|
+
},
|
|
571
|
+
status=400,
|
|
572
|
+
)
|
|
573
|
+
return _add_cors_headers(request, response)
|
|
574
|
+
|
|
575
|
+
try:
|
|
576
|
+
port = int(port)
|
|
577
|
+
except (TypeError, ValueError):
|
|
578
|
+
port = 8888
|
|
579
|
+
|
|
291
580
|
verified = False
|
|
292
581
|
if public_key and token and signature:
|
|
293
582
|
try:
|
|
@@ -308,11 +597,36 @@ def register_node(request):
|
|
|
308
597
|
return _add_cors_headers(request, response)
|
|
309
598
|
|
|
310
599
|
mac_address = mac_address.lower()
|
|
600
|
+
address_value = address or None
|
|
601
|
+
ipv4_value = ipv4_address or None
|
|
602
|
+
ipv6_value = ipv6_address or None
|
|
603
|
+
|
|
604
|
+
for candidate in (address, network_hostname, hostname):
|
|
605
|
+
candidate = (candidate or "").strip()
|
|
606
|
+
if not candidate:
|
|
607
|
+
continue
|
|
608
|
+
try:
|
|
609
|
+
parsed_ip = ipaddress.ip_address(candidate)
|
|
610
|
+
except ValueError:
|
|
611
|
+
continue
|
|
612
|
+
if parsed_ip.version == 4 and not ipv4_value:
|
|
613
|
+
ipv4_value = str(parsed_ip)
|
|
614
|
+
elif parsed_ip.version == 6 and not ipv6_value:
|
|
615
|
+
ipv6_value = str(parsed_ip)
|
|
311
616
|
defaults = {
|
|
312
617
|
"hostname": hostname,
|
|
313
|
-
"
|
|
618
|
+
"network_hostname": network_hostname,
|
|
619
|
+
"address": address_value,
|
|
620
|
+
"ipv4_address": ipv4_value,
|
|
621
|
+
"ipv6_address": ipv6_value,
|
|
314
622
|
"port": port,
|
|
315
623
|
}
|
|
624
|
+
role_name = str(data.get("role") or data.get("role_name") or "").strip()
|
|
625
|
+
desired_role = None
|
|
626
|
+
if role_name and (verified or request.user.is_authenticated):
|
|
627
|
+
desired_role = NodeRole.objects.filter(name=role_name).first()
|
|
628
|
+
if desired_role:
|
|
629
|
+
defaults["role"] = desired_role
|
|
316
630
|
if verified:
|
|
317
631
|
defaults["public_key"] = public_key
|
|
318
632
|
if installed_version is not None:
|
|
@@ -329,10 +643,18 @@ def register_node(request):
|
|
|
329
643
|
if not created:
|
|
330
644
|
previous_version = (node.installed_version or "").strip()
|
|
331
645
|
previous_revision = (node.installed_revision or "").strip()
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
646
|
+
update_fields = []
|
|
647
|
+
for field, value in (
|
|
648
|
+
("hostname", hostname),
|
|
649
|
+
("network_hostname", network_hostname),
|
|
650
|
+
("address", address_value),
|
|
651
|
+
("ipv4_address", ipv4_value),
|
|
652
|
+
("ipv6_address", ipv6_value),
|
|
653
|
+
("port", port),
|
|
654
|
+
):
|
|
655
|
+
if getattr(node, field) != value:
|
|
656
|
+
setattr(node, field, value)
|
|
657
|
+
update_fields.append(field)
|
|
336
658
|
if verified:
|
|
337
659
|
node.public_key = public_key
|
|
338
660
|
update_fields.append("public_key")
|
|
@@ -347,7 +669,11 @@ def register_node(request):
|
|
|
347
669
|
if relation_value is not None and node.current_relation != relation_value:
|
|
348
670
|
node.current_relation = relation_value
|
|
349
671
|
update_fields.append("current_relation")
|
|
350
|
-
node.
|
|
672
|
+
if desired_role and node.role_id != desired_role.id:
|
|
673
|
+
node.role = desired_role
|
|
674
|
+
update_fields.append("role")
|
|
675
|
+
if update_fields:
|
|
676
|
+
node.save(update_fields=update_fields)
|
|
351
677
|
current_version = (node.installed_version or "").strip()
|
|
352
678
|
current_revision = (node.installed_revision or "").strip()
|
|
353
679
|
node_information_updated.send(
|
|
@@ -366,7 +692,11 @@ def register_node(request):
|
|
|
366
692
|
feature_list = list(features)
|
|
367
693
|
node.update_manual_features(feature_list)
|
|
368
694
|
response = JsonResponse(
|
|
369
|
-
{
|
|
695
|
+
{
|
|
696
|
+
"id": node.id,
|
|
697
|
+
"uuid": str(node.uuid),
|
|
698
|
+
"detail": f"Node already exists (id: {node.id})",
|
|
699
|
+
}
|
|
370
700
|
)
|
|
371
701
|
return _add_cors_headers(request, response)
|
|
372
702
|
|
|
@@ -391,7 +721,7 @@ def register_node(request):
|
|
|
391
721
|
|
|
392
722
|
_announce_visitor_join(node, relation_value)
|
|
393
723
|
|
|
394
|
-
response = JsonResponse({"id": node.id})
|
|
724
|
+
response = JsonResponse({"id": node.id, "uuid": str(node.uuid)})
|
|
395
725
|
return _add_cors_headers(request, response)
|
|
396
726
|
|
|
397
727
|
|
|
@@ -423,26 +753,21 @@ def export_rfids(request):
|
|
|
423
753
|
return JsonResponse({"detail": "invalid json"}, status=400)
|
|
424
754
|
|
|
425
755
|
requester = payload.get("requester")
|
|
426
|
-
signature = request.headers.get("X-Signature")
|
|
427
756
|
if not requester:
|
|
428
757
|
return JsonResponse({"detail": "requester required"}, status=400)
|
|
429
|
-
if not signature:
|
|
430
|
-
return JsonResponse({"detail": "signature required"}, status=403)
|
|
431
758
|
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
except Exception:
|
|
445
|
-
return JsonResponse({"detail": "invalid signature"}, status=403)
|
|
759
|
+
requester_mac = _clean_requester_hint(payload.get("requester_mac"))
|
|
760
|
+
requester_public_key = _clean_requester_hint(
|
|
761
|
+
payload.get("requester_public_key"), strip=False
|
|
762
|
+
)
|
|
763
|
+
node, error_response = _load_signed_node(
|
|
764
|
+
request,
|
|
765
|
+
requester,
|
|
766
|
+
mac_address=requester_mac,
|
|
767
|
+
public_key=requester_public_key,
|
|
768
|
+
)
|
|
769
|
+
if error_response is not None:
|
|
770
|
+
return error_response
|
|
446
771
|
|
|
447
772
|
tags = [serialize_rfid(tag) for tag in RFID.objects.all().order_by("label_id")]
|
|
448
773
|
|
|
@@ -462,26 +787,21 @@ def import_rfids(request):
|
|
|
462
787
|
return JsonResponse({"detail": "invalid json"}, status=400)
|
|
463
788
|
|
|
464
789
|
requester = payload.get("requester")
|
|
465
|
-
signature = request.headers.get("X-Signature")
|
|
466
790
|
if not requester:
|
|
467
791
|
return JsonResponse({"detail": "requester required"}, status=400)
|
|
468
|
-
if not signature:
|
|
469
|
-
return JsonResponse({"detail": "signature required"}, status=403)
|
|
470
792
|
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
except Exception:
|
|
484
|
-
return JsonResponse({"detail": "invalid signature"}, status=403)
|
|
793
|
+
requester_mac = _clean_requester_hint(payload.get("requester_mac"))
|
|
794
|
+
requester_public_key = _clean_requester_hint(
|
|
795
|
+
payload.get("requester_public_key"), strip=False
|
|
796
|
+
)
|
|
797
|
+
node, error_response = _load_signed_node(
|
|
798
|
+
request,
|
|
799
|
+
requester,
|
|
800
|
+
mac_address=requester_mac,
|
|
801
|
+
public_key=requester_public_key,
|
|
802
|
+
)
|
|
803
|
+
if error_response is not None:
|
|
804
|
+
return error_response
|
|
485
805
|
|
|
486
806
|
rfids = payload.get("rfids", [])
|
|
487
807
|
if not isinstance(rfids, list):
|
|
@@ -522,6 +842,914 @@ def import_rfids(request):
|
|
|
522
842
|
)
|
|
523
843
|
|
|
524
844
|
|
|
845
|
+
@csrf_exempt
|
|
846
|
+
def network_chargers(request):
|
|
847
|
+
"""Return serialized charger information for trusted peers."""
|
|
848
|
+
|
|
849
|
+
if request.method != "POST":
|
|
850
|
+
return JsonResponse({"detail": "POST required"}, status=405)
|
|
851
|
+
|
|
852
|
+
try:
|
|
853
|
+
body = json.loads(request.body.decode() or "{}")
|
|
854
|
+
except json.JSONDecodeError:
|
|
855
|
+
return JsonResponse({"detail": "invalid json"}, status=400)
|
|
856
|
+
|
|
857
|
+
requester = body.get("requester")
|
|
858
|
+
if not requester:
|
|
859
|
+
return JsonResponse({"detail": "requester required"}, status=400)
|
|
860
|
+
|
|
861
|
+
requester_mac = _clean_requester_hint(body.get("requester_mac"))
|
|
862
|
+
requester_public_key = _clean_requester_hint(
|
|
863
|
+
body.get("requester_public_key"), strip=False
|
|
864
|
+
)
|
|
865
|
+
|
|
866
|
+
node, error_response = _load_signed_node(
|
|
867
|
+
request,
|
|
868
|
+
requester,
|
|
869
|
+
mac_address=requester_mac,
|
|
870
|
+
public_key=requester_public_key,
|
|
871
|
+
)
|
|
872
|
+
if error_response is not None:
|
|
873
|
+
return error_response
|
|
874
|
+
|
|
875
|
+
requested = _normalize_requested_chargers(body.get("chargers") or [])
|
|
876
|
+
|
|
877
|
+
qs = Charger.objects.all()
|
|
878
|
+
local_node = Node.get_local()
|
|
879
|
+
if local_node:
|
|
880
|
+
qs = qs.filter(Q(node_origin=local_node) | Q(node_origin__isnull=True))
|
|
881
|
+
|
|
882
|
+
if requested:
|
|
883
|
+
filters = Q()
|
|
884
|
+
for serial, connector_value, _ in requested:
|
|
885
|
+
if connector_value is None:
|
|
886
|
+
filters |= Q(charger_id=serial, connector_id__isnull=True)
|
|
887
|
+
else:
|
|
888
|
+
filters |= Q(charger_id=serial, connector_id=connector_value)
|
|
889
|
+
qs = qs.filter(filters)
|
|
890
|
+
|
|
891
|
+
chargers = [serialize_charger_for_network(charger) for charger in qs]
|
|
892
|
+
|
|
893
|
+
include_transactions = bool(body.get("include_transactions"))
|
|
894
|
+
response_data: dict[str, object] = {"chargers": chargers}
|
|
895
|
+
|
|
896
|
+
if include_transactions:
|
|
897
|
+
serials = [serial for serial, _, _ in requested] or list(
|
|
898
|
+
{charger["charger_id"] for charger in chargers}
|
|
899
|
+
)
|
|
900
|
+
since_values = [since for _, _, since in requested if since]
|
|
901
|
+
start = min(since_values) if since_values else None
|
|
902
|
+
tx_payload = export_transactions(start=start, chargers=serials or None)
|
|
903
|
+
response_data["transactions"] = tx_payload
|
|
904
|
+
|
|
905
|
+
return JsonResponse(response_data)
|
|
906
|
+
|
|
907
|
+
|
|
908
|
+
@csrf_exempt
|
|
909
|
+
def forward_chargers(request):
|
|
910
|
+
"""Receive forwarded charger metadata and transactions from trusted peers."""
|
|
911
|
+
|
|
912
|
+
if request.method != "POST":
|
|
913
|
+
return JsonResponse({"detail": "POST required"}, status=405)
|
|
914
|
+
|
|
915
|
+
try:
|
|
916
|
+
body = json.loads(request.body.decode() or "{}")
|
|
917
|
+
except json.JSONDecodeError:
|
|
918
|
+
return JsonResponse({"detail": "invalid json"}, status=400)
|
|
919
|
+
|
|
920
|
+
requester = body.get("requester")
|
|
921
|
+
if not requester:
|
|
922
|
+
return JsonResponse({"detail": "requester required"}, status=400)
|
|
923
|
+
|
|
924
|
+
requester_mac = _clean_requester_hint(body.get("requester_mac"))
|
|
925
|
+
requester_public_key = _clean_requester_hint(
|
|
926
|
+
body.get("requester_public_key"), strip=False
|
|
927
|
+
)
|
|
928
|
+
|
|
929
|
+
node, error_response = _load_signed_node(
|
|
930
|
+
request,
|
|
931
|
+
requester,
|
|
932
|
+
mac_address=requester_mac,
|
|
933
|
+
public_key=requester_public_key,
|
|
934
|
+
)
|
|
935
|
+
if error_response is not None:
|
|
936
|
+
return error_response
|
|
937
|
+
|
|
938
|
+
processed = 0
|
|
939
|
+
chargers_payload = body.get("chargers", [])
|
|
940
|
+
if not isinstance(chargers_payload, list):
|
|
941
|
+
chargers_payload = []
|
|
942
|
+
for entry in chargers_payload:
|
|
943
|
+
if not isinstance(entry, Mapping):
|
|
944
|
+
continue
|
|
945
|
+
charger = apply_remote_charger_payload(node, entry)
|
|
946
|
+
if charger:
|
|
947
|
+
processed += 1
|
|
948
|
+
|
|
949
|
+
imported = 0
|
|
950
|
+
transactions_payload = body.get("transactions")
|
|
951
|
+
if isinstance(transactions_payload, Mapping):
|
|
952
|
+
imported = sync_transactions_payload(transactions_payload)
|
|
953
|
+
|
|
954
|
+
return JsonResponse({"status": "ok", "chargers": processed, "transactions": imported})
|
|
955
|
+
|
|
956
|
+
|
|
957
|
+
def _require_local_origin(charger: Charger) -> bool:
|
|
958
|
+
local = Node.get_local()
|
|
959
|
+
if not local:
|
|
960
|
+
return charger.node_origin_id is None
|
|
961
|
+
if charger.node_origin_id is None:
|
|
962
|
+
return True
|
|
963
|
+
return charger.node_origin_id == local.pk
|
|
964
|
+
|
|
965
|
+
|
|
966
|
+
def _send_trigger_status(
|
|
967
|
+
charger: Charger, payload: Mapping | None = None
|
|
968
|
+
) -> tuple[bool, str, dict[str, object]]:
|
|
969
|
+
connector_value = charger.connector_id
|
|
970
|
+
ws = store.get_connection(charger.charger_id, connector_value)
|
|
971
|
+
if ws is None:
|
|
972
|
+
return False, "no active connection", {}
|
|
973
|
+
payload: dict[str, object] = {"requestedMessage": "StatusNotification"}
|
|
974
|
+
if connector_value is not None:
|
|
975
|
+
payload["connectorId"] = connector_value
|
|
976
|
+
message_id = uuid.uuid4().hex
|
|
977
|
+
msg = json.dumps([2, message_id, "TriggerMessage", payload])
|
|
978
|
+
try:
|
|
979
|
+
async_to_sync(ws.send)(msg)
|
|
980
|
+
except Exception as exc:
|
|
981
|
+
return False, f"failed to send TriggerMessage ({exc})", {}
|
|
982
|
+
log_key = store.identity_key(charger.charger_id, connector_value)
|
|
983
|
+
store.add_log(log_key, f"< {msg}", log_type="charger")
|
|
984
|
+
store.register_pending_call(
|
|
985
|
+
message_id,
|
|
986
|
+
{
|
|
987
|
+
"action": "TriggerMessage",
|
|
988
|
+
"charger_id": charger.charger_id,
|
|
989
|
+
"connector_id": connector_value,
|
|
990
|
+
"log_key": log_key,
|
|
991
|
+
"trigger_target": "StatusNotification",
|
|
992
|
+
"trigger_connector": connector_value,
|
|
993
|
+
"requested_at": timezone.now(),
|
|
994
|
+
},
|
|
995
|
+
)
|
|
996
|
+
store.schedule_call_timeout(
|
|
997
|
+
message_id,
|
|
998
|
+
timeout=5.0,
|
|
999
|
+
action="TriggerMessage",
|
|
1000
|
+
log_key=log_key,
|
|
1001
|
+
message="TriggerMessage StatusNotification timed out",
|
|
1002
|
+
)
|
|
1003
|
+
return True, "requested status update", {}
|
|
1004
|
+
|
|
1005
|
+
|
|
1006
|
+
def _send_get_configuration(
|
|
1007
|
+
charger: Charger, payload: Mapping | None = None
|
|
1008
|
+
) -> tuple[bool, str, dict[str, object]]:
|
|
1009
|
+
connector_value = charger.connector_id
|
|
1010
|
+
ws = store.get_connection(charger.charger_id, connector_value)
|
|
1011
|
+
if ws is None:
|
|
1012
|
+
return False, "no active connection", {}
|
|
1013
|
+
message_id = uuid.uuid4().hex
|
|
1014
|
+
msg = json.dumps([2, message_id, "GetConfiguration", {}])
|
|
1015
|
+
try:
|
|
1016
|
+
async_to_sync(ws.send)(msg)
|
|
1017
|
+
except Exception as exc:
|
|
1018
|
+
return False, f"failed to send GetConfiguration ({exc})", {}
|
|
1019
|
+
log_key = store.identity_key(charger.charger_id, connector_value)
|
|
1020
|
+
store.add_log(log_key, f"< {msg}", log_type="charger")
|
|
1021
|
+
store.register_pending_call(
|
|
1022
|
+
message_id,
|
|
1023
|
+
{
|
|
1024
|
+
"action": "GetConfiguration",
|
|
1025
|
+
"charger_id": charger.charger_id,
|
|
1026
|
+
"connector_id": connector_value,
|
|
1027
|
+
"log_key": log_key,
|
|
1028
|
+
"requested_at": timezone.now(),
|
|
1029
|
+
},
|
|
1030
|
+
)
|
|
1031
|
+
store.schedule_call_timeout(
|
|
1032
|
+
message_id,
|
|
1033
|
+
timeout=5.0,
|
|
1034
|
+
action="GetConfiguration",
|
|
1035
|
+
log_key=log_key,
|
|
1036
|
+
message=(
|
|
1037
|
+
"GetConfiguration timed out: charger did not respond"
|
|
1038
|
+
" (operation may not be supported)"
|
|
1039
|
+
),
|
|
1040
|
+
)
|
|
1041
|
+
return True, "requested configuration update", {}
|
|
1042
|
+
|
|
1043
|
+
|
|
1044
|
+
def _send_reset(
|
|
1045
|
+
charger: Charger, payload: Mapping | None = None
|
|
1046
|
+
) -> tuple[bool, str, dict[str, object]]:
|
|
1047
|
+
connector_value = charger.connector_id
|
|
1048
|
+
tx = store.get_transaction(charger.charger_id, connector_value)
|
|
1049
|
+
if tx:
|
|
1050
|
+
return False, "active session in progress", {}
|
|
1051
|
+
message_id = uuid.uuid4().hex
|
|
1052
|
+
reset_type = None
|
|
1053
|
+
if payload:
|
|
1054
|
+
reset_type = payload.get("reset_type")
|
|
1055
|
+
msg = json.dumps(
|
|
1056
|
+
[2, message_id, "Reset", {"type": (reset_type or "Soft")}]
|
|
1057
|
+
)
|
|
1058
|
+
ws = store.get_connection(charger.charger_id, connector_value)
|
|
1059
|
+
if ws is None:
|
|
1060
|
+
return False, "no active connection", {}
|
|
1061
|
+
try:
|
|
1062
|
+
async_to_sync(ws.send)(msg)
|
|
1063
|
+
except Exception as exc:
|
|
1064
|
+
return False, f"failed to send Reset ({exc})", {}
|
|
1065
|
+
log_key = store.identity_key(charger.charger_id, connector_value)
|
|
1066
|
+
store.add_log(log_key, f"< {msg}", log_type="charger")
|
|
1067
|
+
store.register_pending_call(
|
|
1068
|
+
message_id,
|
|
1069
|
+
{
|
|
1070
|
+
"action": "Reset",
|
|
1071
|
+
"charger_id": charger.charger_id,
|
|
1072
|
+
"connector_id": connector_value,
|
|
1073
|
+
"log_key": log_key,
|
|
1074
|
+
"requested_at": timezone.now(),
|
|
1075
|
+
},
|
|
1076
|
+
)
|
|
1077
|
+
store.schedule_call_timeout(
|
|
1078
|
+
message_id,
|
|
1079
|
+
timeout=5.0,
|
|
1080
|
+
action="Reset",
|
|
1081
|
+
log_key=log_key,
|
|
1082
|
+
message="Reset timed out: charger did not respond",
|
|
1083
|
+
)
|
|
1084
|
+
return True, "reset requested", {}
|
|
1085
|
+
|
|
1086
|
+
|
|
1087
|
+
def _toggle_rfid(
|
|
1088
|
+
charger: Charger, payload: Mapping | None = None
|
|
1089
|
+
) -> tuple[bool, str, dict[str, object]]:
|
|
1090
|
+
enable = None
|
|
1091
|
+
if payload is not None:
|
|
1092
|
+
enable = payload.get("enable")
|
|
1093
|
+
if isinstance(enable, str):
|
|
1094
|
+
enable = enable.lower() in {"1", "true", "yes", "on"}
|
|
1095
|
+
elif isinstance(enable, (int, bool)):
|
|
1096
|
+
enable = bool(enable)
|
|
1097
|
+
if enable is None:
|
|
1098
|
+
enable = not charger.require_rfid
|
|
1099
|
+
enable_bool = bool(enable)
|
|
1100
|
+
Charger.objects.filter(pk=charger.pk).update(require_rfid=enable_bool)
|
|
1101
|
+
charger.require_rfid = enable_bool
|
|
1102
|
+
detail = "RFID authentication enabled" if enable_bool else "RFID authentication disabled"
|
|
1103
|
+
return True, detail, {"require_rfid": enable_bool}
|
|
1104
|
+
|
|
1105
|
+
|
|
1106
|
+
def _send_local_rfid_list_remote(
|
|
1107
|
+
charger: Charger, payload: Mapping | None = None
|
|
1108
|
+
) -> tuple[bool, str, dict[str, object]]:
|
|
1109
|
+
connector_value = charger.connector_id
|
|
1110
|
+
ws = store.get_connection(charger.charger_id, connector_value)
|
|
1111
|
+
if ws is None:
|
|
1112
|
+
return False, "no active connection", {}
|
|
1113
|
+
authorization_list = []
|
|
1114
|
+
if payload is not None:
|
|
1115
|
+
authorization_list = payload.get("local_authorization_list", []) or []
|
|
1116
|
+
if not isinstance(authorization_list, list):
|
|
1117
|
+
return False, "local_authorization_list must be a list", {}
|
|
1118
|
+
list_version = None
|
|
1119
|
+
if payload is not None:
|
|
1120
|
+
list_version = payload.get("list_version")
|
|
1121
|
+
if list_version is None:
|
|
1122
|
+
list_version_value = (charger.local_auth_list_version or 0) + 1
|
|
1123
|
+
else:
|
|
1124
|
+
try:
|
|
1125
|
+
list_version_value = int(list_version)
|
|
1126
|
+
except (TypeError, ValueError):
|
|
1127
|
+
return False, "invalid list_version", {}
|
|
1128
|
+
if list_version_value <= 0:
|
|
1129
|
+
return False, "invalid list_version", {}
|
|
1130
|
+
update_type = "Full"
|
|
1131
|
+
if payload is not None and payload.get("update_type"):
|
|
1132
|
+
update_type = str(payload.get("update_type") or "").strip() or "Full"
|
|
1133
|
+
message_id = uuid.uuid4().hex
|
|
1134
|
+
msg_payload = {
|
|
1135
|
+
"listVersion": list_version_value,
|
|
1136
|
+
"updateType": update_type,
|
|
1137
|
+
"localAuthorizationList": authorization_list,
|
|
1138
|
+
}
|
|
1139
|
+
msg = json.dumps([2, message_id, "SendLocalList", msg_payload])
|
|
1140
|
+
try:
|
|
1141
|
+
async_to_sync(ws.send)(msg)
|
|
1142
|
+
except Exception as exc:
|
|
1143
|
+
return False, f"failed to send SendLocalList ({exc})", {}
|
|
1144
|
+
log_key = store.identity_key(charger.charger_id, connector_value)
|
|
1145
|
+
store.add_log(log_key, f"< {msg}", log_type="charger")
|
|
1146
|
+
store.register_pending_call(
|
|
1147
|
+
message_id,
|
|
1148
|
+
{
|
|
1149
|
+
"action": "SendLocalList",
|
|
1150
|
+
"charger_id": charger.charger_id,
|
|
1151
|
+
"connector_id": connector_value,
|
|
1152
|
+
"log_key": log_key,
|
|
1153
|
+
"list_version": list_version_value,
|
|
1154
|
+
"list_size": len(authorization_list),
|
|
1155
|
+
"requested_at": timezone.now(),
|
|
1156
|
+
},
|
|
1157
|
+
)
|
|
1158
|
+
store.schedule_call_timeout(
|
|
1159
|
+
message_id,
|
|
1160
|
+
action="SendLocalList",
|
|
1161
|
+
log_key=log_key,
|
|
1162
|
+
message="SendLocalList request timed out",
|
|
1163
|
+
)
|
|
1164
|
+
return True, "SendLocalList dispatched", {}
|
|
1165
|
+
|
|
1166
|
+
|
|
1167
|
+
def _get_local_list_version_remote(
|
|
1168
|
+
charger: Charger, payload: Mapping | None = None
|
|
1169
|
+
) -> tuple[bool, str, dict[str, object]]:
|
|
1170
|
+
connector_value = charger.connector_id
|
|
1171
|
+
ws = store.get_connection(charger.charger_id, connector_value)
|
|
1172
|
+
if ws is None:
|
|
1173
|
+
return False, "no active connection", {}
|
|
1174
|
+
message_id = uuid.uuid4().hex
|
|
1175
|
+
msg = json.dumps([2, message_id, "GetLocalListVersion", {}])
|
|
1176
|
+
try:
|
|
1177
|
+
async_to_sync(ws.send)(msg)
|
|
1178
|
+
except Exception as exc:
|
|
1179
|
+
return False, f"failed to send GetLocalListVersion ({exc})", {}
|
|
1180
|
+
log_key = store.identity_key(charger.charger_id, connector_value)
|
|
1181
|
+
store.add_log(log_key, f"< {msg}", log_type="charger")
|
|
1182
|
+
store.register_pending_call(
|
|
1183
|
+
message_id,
|
|
1184
|
+
{
|
|
1185
|
+
"action": "GetLocalListVersion",
|
|
1186
|
+
"charger_id": charger.charger_id,
|
|
1187
|
+
"connector_id": connector_value,
|
|
1188
|
+
"log_key": log_key,
|
|
1189
|
+
"requested_at": timezone.now(),
|
|
1190
|
+
},
|
|
1191
|
+
)
|
|
1192
|
+
store.schedule_call_timeout(
|
|
1193
|
+
message_id,
|
|
1194
|
+
action="GetLocalListVersion",
|
|
1195
|
+
log_key=log_key,
|
|
1196
|
+
message="GetLocalListVersion request timed out",
|
|
1197
|
+
)
|
|
1198
|
+
return True, "GetLocalListVersion requested", {}
|
|
1199
|
+
|
|
1200
|
+
|
|
1201
|
+
def _change_availability_remote(
|
|
1202
|
+
charger: Charger, payload: Mapping | None = None
|
|
1203
|
+
) -> tuple[bool, str, dict[str, object]]:
|
|
1204
|
+
availability_type = None
|
|
1205
|
+
if payload is not None:
|
|
1206
|
+
availability_type = payload.get("availability_type")
|
|
1207
|
+
availability_label = str(availability_type or "").strip()
|
|
1208
|
+
if availability_label not in {"Operative", "Inoperative"}:
|
|
1209
|
+
return False, "invalid availability type", {}
|
|
1210
|
+
connector_value = charger.connector_id
|
|
1211
|
+
ws = store.get_connection(charger.charger_id, connector_value)
|
|
1212
|
+
if ws is None:
|
|
1213
|
+
return False, "no active connection", {}
|
|
1214
|
+
connector_id = connector_value if connector_value is not None else 0
|
|
1215
|
+
message_id = uuid.uuid4().hex
|
|
1216
|
+
msg = json.dumps(
|
|
1217
|
+
[
|
|
1218
|
+
2,
|
|
1219
|
+
message_id,
|
|
1220
|
+
"ChangeAvailability",
|
|
1221
|
+
{"connectorId": connector_id, "type": availability_label},
|
|
1222
|
+
]
|
|
1223
|
+
)
|
|
1224
|
+
try:
|
|
1225
|
+
async_to_sync(ws.send)(msg)
|
|
1226
|
+
except Exception as exc:
|
|
1227
|
+
return False, f"failed to send ChangeAvailability ({exc})", {}
|
|
1228
|
+
log_key = store.identity_key(charger.charger_id, connector_value)
|
|
1229
|
+
store.add_log(log_key, f"< {msg}", log_type="charger")
|
|
1230
|
+
timestamp = timezone.now()
|
|
1231
|
+
store.register_pending_call(
|
|
1232
|
+
message_id,
|
|
1233
|
+
{
|
|
1234
|
+
"action": "ChangeAvailability",
|
|
1235
|
+
"charger_id": charger.charger_id,
|
|
1236
|
+
"connector_id": connector_value,
|
|
1237
|
+
"availability_type": availability_label,
|
|
1238
|
+
"requested_at": timestamp,
|
|
1239
|
+
},
|
|
1240
|
+
)
|
|
1241
|
+
updates = {
|
|
1242
|
+
"availability_requested_state": availability_label,
|
|
1243
|
+
"availability_requested_at": timestamp,
|
|
1244
|
+
"availability_request_status": "",
|
|
1245
|
+
"availability_request_status_at": None,
|
|
1246
|
+
"availability_request_details": "",
|
|
1247
|
+
}
|
|
1248
|
+
Charger.objects.filter(pk=charger.pk).update(**updates)
|
|
1249
|
+
for field, value in updates.items():
|
|
1250
|
+
setattr(charger, field, value)
|
|
1251
|
+
return True, f"requested ChangeAvailability {availability_label}", updates
|
|
1252
|
+
|
|
1253
|
+
|
|
1254
|
+
def _clear_cache_remote(
|
|
1255
|
+
charger: Charger, payload: Mapping | None = None
|
|
1256
|
+
) -> tuple[bool, str, dict[str, object]]:
|
|
1257
|
+
connector_value = charger.connector_id
|
|
1258
|
+
ws = store.get_connection(charger.charger_id, connector_value)
|
|
1259
|
+
if ws is None:
|
|
1260
|
+
return False, "no active connection", {}
|
|
1261
|
+
message_id = uuid.uuid4().hex
|
|
1262
|
+
msg = json.dumps([2, message_id, "ClearCache", {}])
|
|
1263
|
+
try:
|
|
1264
|
+
async_to_sync(ws.send)(msg)
|
|
1265
|
+
except Exception as exc:
|
|
1266
|
+
return False, f"failed to send ClearCache ({exc})", {}
|
|
1267
|
+
log_key = store.identity_key(charger.charger_id, connector_value)
|
|
1268
|
+
store.add_log(log_key, f"< {msg}", log_type="charger")
|
|
1269
|
+
requested_at = timezone.now()
|
|
1270
|
+
store.register_pending_call(
|
|
1271
|
+
message_id,
|
|
1272
|
+
{
|
|
1273
|
+
"action": "ClearCache",
|
|
1274
|
+
"charger_id": charger.charger_id,
|
|
1275
|
+
"connector_id": connector_value,
|
|
1276
|
+
"log_key": log_key,
|
|
1277
|
+
"requested_at": requested_at,
|
|
1278
|
+
},
|
|
1279
|
+
)
|
|
1280
|
+
store.schedule_call_timeout(
|
|
1281
|
+
message_id,
|
|
1282
|
+
action="ClearCache",
|
|
1283
|
+
log_key=log_key,
|
|
1284
|
+
)
|
|
1285
|
+
return True, "requested ClearCache", {}
|
|
1286
|
+
|
|
1287
|
+
|
|
1288
|
+
def _set_availability_state_remote(
|
|
1289
|
+
charger: Charger, payload: Mapping | None = None
|
|
1290
|
+
) -> tuple[bool, str, dict[str, object]]:
|
|
1291
|
+
availability_state = None
|
|
1292
|
+
if payload is not None:
|
|
1293
|
+
availability_state = payload.get("availability_state")
|
|
1294
|
+
availability_label = str(availability_state or "").strip()
|
|
1295
|
+
if availability_label not in {"Operative", "Inoperative"}:
|
|
1296
|
+
return False, "invalid availability state", {}
|
|
1297
|
+
timestamp = timezone.now()
|
|
1298
|
+
updates = {
|
|
1299
|
+
"availability_state": availability_label,
|
|
1300
|
+
"availability_state_updated_at": timestamp,
|
|
1301
|
+
}
|
|
1302
|
+
Charger.objects.filter(pk=charger.pk).update(**updates)
|
|
1303
|
+
for field, value in updates.items():
|
|
1304
|
+
setattr(charger, field, value)
|
|
1305
|
+
return True, f"availability marked {availability_label}", updates
|
|
1306
|
+
|
|
1307
|
+
|
|
1308
|
+
def _remote_stop_transaction_remote(
|
|
1309
|
+
charger: Charger, payload: Mapping | None = None
|
|
1310
|
+
) -> tuple[bool, str, dict[str, object]]:
|
|
1311
|
+
connector_value = charger.connector_id
|
|
1312
|
+
ws = store.get_connection(charger.charger_id, connector_value)
|
|
1313
|
+
if ws is None:
|
|
1314
|
+
return False, "no active connection", {}
|
|
1315
|
+
tx_obj = store.get_transaction(charger.charger_id, connector_value)
|
|
1316
|
+
if tx_obj is None:
|
|
1317
|
+
return False, "no active transaction", {}
|
|
1318
|
+
message_id = uuid.uuid4().hex
|
|
1319
|
+
msg = json.dumps(
|
|
1320
|
+
[
|
|
1321
|
+
2,
|
|
1322
|
+
message_id,
|
|
1323
|
+
"RemoteStopTransaction",
|
|
1324
|
+
{"transactionId": tx_obj.pk},
|
|
1325
|
+
]
|
|
1326
|
+
)
|
|
1327
|
+
try:
|
|
1328
|
+
async_to_sync(ws.send)(msg)
|
|
1329
|
+
except Exception as exc:
|
|
1330
|
+
return False, f"failed to send RemoteStopTransaction ({exc})", {}
|
|
1331
|
+
log_key = store.identity_key(charger.charger_id, connector_value)
|
|
1332
|
+
store.add_log(log_key, f"< {msg}", log_type="charger")
|
|
1333
|
+
store.register_pending_call(
|
|
1334
|
+
message_id,
|
|
1335
|
+
{
|
|
1336
|
+
"action": "RemoteStopTransaction",
|
|
1337
|
+
"charger_id": charger.charger_id,
|
|
1338
|
+
"connector_id": connector_value,
|
|
1339
|
+
"transaction_id": tx_obj.pk,
|
|
1340
|
+
"log_key": log_key,
|
|
1341
|
+
"requested_at": timezone.now(),
|
|
1342
|
+
},
|
|
1343
|
+
)
|
|
1344
|
+
return True, "remote stop requested", {}
|
|
1345
|
+
|
|
1346
|
+
|
|
1347
|
+
REMOTE_ACTIONS = {
|
|
1348
|
+
"trigger-status": _send_trigger_status,
|
|
1349
|
+
"get-configuration": _send_get_configuration,
|
|
1350
|
+
"reset": _send_reset,
|
|
1351
|
+
"toggle-rfid": _toggle_rfid,
|
|
1352
|
+
"send-local-rfid-list": _send_local_rfid_list_remote,
|
|
1353
|
+
"get-local-list-version": _get_local_list_version_remote,
|
|
1354
|
+
"change-availability": _change_availability_remote,
|
|
1355
|
+
"clear-cache": _clear_cache_remote,
|
|
1356
|
+
"set-availability-state": _set_availability_state_remote,
|
|
1357
|
+
"remote-stop": _remote_stop_transaction_remote,
|
|
1358
|
+
}
|
|
1359
|
+
|
|
1360
|
+
|
|
1361
|
+
@csrf_exempt
|
|
1362
|
+
def network_charger_action(request):
|
|
1363
|
+
"""Execute remote admin actions on behalf of trusted nodes."""
|
|
1364
|
+
|
|
1365
|
+
if request.method != "POST":
|
|
1366
|
+
return JsonResponse({"detail": "POST required"}, status=405)
|
|
1367
|
+
|
|
1368
|
+
try:
|
|
1369
|
+
body = json.loads(request.body.decode() or "{}")
|
|
1370
|
+
except json.JSONDecodeError:
|
|
1371
|
+
return JsonResponse({"detail": "invalid json"}, status=400)
|
|
1372
|
+
|
|
1373
|
+
requester = body.get("requester")
|
|
1374
|
+
if not requester:
|
|
1375
|
+
return JsonResponse({"detail": "requester required"}, status=400)
|
|
1376
|
+
|
|
1377
|
+
requester_mac = _clean_requester_hint(body.get("requester_mac"))
|
|
1378
|
+
requester_public_key = _clean_requester_hint(
|
|
1379
|
+
body.get("requester_public_key"), strip=False
|
|
1380
|
+
)
|
|
1381
|
+
|
|
1382
|
+
node, error_response = _load_signed_node(
|
|
1383
|
+
request,
|
|
1384
|
+
requester,
|
|
1385
|
+
mac_address=requester_mac,
|
|
1386
|
+
public_key=requester_public_key,
|
|
1387
|
+
)
|
|
1388
|
+
if error_response is not None:
|
|
1389
|
+
return error_response
|
|
1390
|
+
|
|
1391
|
+
serial = Charger.normalize_serial(body.get("charger_id"))
|
|
1392
|
+
if not serial or Charger.is_placeholder_serial(serial):
|
|
1393
|
+
return JsonResponse({"detail": "invalid charger"}, status=400)
|
|
1394
|
+
|
|
1395
|
+
connector = body.get("connector_id")
|
|
1396
|
+
if connector in ("", None):
|
|
1397
|
+
connector_value = None
|
|
1398
|
+
elif isinstance(connector, int):
|
|
1399
|
+
connector_value = connector
|
|
1400
|
+
else:
|
|
1401
|
+
try:
|
|
1402
|
+
connector_value = int(str(connector))
|
|
1403
|
+
except (TypeError, ValueError):
|
|
1404
|
+
return JsonResponse({"detail": "invalid connector"}, status=400)
|
|
1405
|
+
|
|
1406
|
+
charger = Charger.objects.filter(
|
|
1407
|
+
charger_id=serial, connector_id=connector_value
|
|
1408
|
+
).first()
|
|
1409
|
+
if not charger:
|
|
1410
|
+
return JsonResponse({"detail": "charger not found"}, status=404)
|
|
1411
|
+
|
|
1412
|
+
if not charger.allow_remote:
|
|
1413
|
+
return JsonResponse({"detail": "remote actions disabled"}, status=403)
|
|
1414
|
+
|
|
1415
|
+
if not _require_local_origin(charger):
|
|
1416
|
+
return JsonResponse({"detail": "charger is not managed by this node"}, status=403)
|
|
1417
|
+
|
|
1418
|
+
authorized_node_ids = {
|
|
1419
|
+
pk for pk in (charger.manager_node_id, charger.node_origin_id) if pk
|
|
1420
|
+
}
|
|
1421
|
+
if authorized_node_ids and node and node.pk not in authorized_node_ids:
|
|
1422
|
+
return JsonResponse(
|
|
1423
|
+
{"detail": "requester does not manage this charger"}, status=403
|
|
1424
|
+
)
|
|
1425
|
+
|
|
1426
|
+
action = body.get("action")
|
|
1427
|
+
handler = REMOTE_ACTIONS.get(action or "")
|
|
1428
|
+
if handler is None:
|
|
1429
|
+
return JsonResponse({"detail": "unsupported action"}, status=400)
|
|
1430
|
+
|
|
1431
|
+
success, message, updates = handler(charger, body)
|
|
1432
|
+
|
|
1433
|
+
status_code = 200 if success else 409
|
|
1434
|
+
status_label = "ok" if success else "error"
|
|
1435
|
+
serialized_updates: dict[str, object] = {}
|
|
1436
|
+
if isinstance(updates, Mapping):
|
|
1437
|
+
for key, value in updates.items():
|
|
1438
|
+
if hasattr(value, "isoformat"):
|
|
1439
|
+
serialized_updates[key] = value.isoformat()
|
|
1440
|
+
else:
|
|
1441
|
+
serialized_updates[key] = value
|
|
1442
|
+
return JsonResponse(
|
|
1443
|
+
{"status": status_label, "detail": message, "updates": serialized_updates},
|
|
1444
|
+
status=status_code,
|
|
1445
|
+
)
|
|
1446
|
+
|
|
1447
|
+
|
|
1448
|
+
@csrf_exempt
|
|
1449
|
+
def proxy_session(request):
|
|
1450
|
+
"""Create a proxy login session for a remote administrator."""
|
|
1451
|
+
|
|
1452
|
+
if request.method != "POST":
|
|
1453
|
+
return JsonResponse({"detail": "POST required"}, status=405)
|
|
1454
|
+
|
|
1455
|
+
try:
|
|
1456
|
+
payload = json.loads(request.body.decode() or "{}")
|
|
1457
|
+
except json.JSONDecodeError:
|
|
1458
|
+
return JsonResponse({"detail": "invalid json"}, status=400)
|
|
1459
|
+
|
|
1460
|
+
requester = payload.get("requester")
|
|
1461
|
+
if not requester:
|
|
1462
|
+
return JsonResponse({"detail": "requester required"}, status=400)
|
|
1463
|
+
|
|
1464
|
+
requester_mac = _clean_requester_hint(payload.get("requester_mac"))
|
|
1465
|
+
requester_public_key = _clean_requester_hint(
|
|
1466
|
+
payload.get("requester_public_key"), strip=False
|
|
1467
|
+
)
|
|
1468
|
+
node, error_response = _load_signed_node(
|
|
1469
|
+
request,
|
|
1470
|
+
requester,
|
|
1471
|
+
mac_address=requester_mac,
|
|
1472
|
+
public_key=requester_public_key,
|
|
1473
|
+
)
|
|
1474
|
+
if error_response is not None:
|
|
1475
|
+
return error_response
|
|
1476
|
+
|
|
1477
|
+
user_payload = payload.get("user") or {}
|
|
1478
|
+
username = str(user_payload.get("username", "")).strip()
|
|
1479
|
+
if not username:
|
|
1480
|
+
return JsonResponse({"detail": "username required"}, status=400)
|
|
1481
|
+
|
|
1482
|
+
User = get_user_model()
|
|
1483
|
+
user, created = User.objects.get_or_create(
|
|
1484
|
+
username=username,
|
|
1485
|
+
defaults={
|
|
1486
|
+
"email": user_payload.get("email", ""),
|
|
1487
|
+
"first_name": user_payload.get("first_name", ""),
|
|
1488
|
+
"last_name": user_payload.get("last_name", ""),
|
|
1489
|
+
},
|
|
1490
|
+
)
|
|
1491
|
+
|
|
1492
|
+
updates: list[str] = []
|
|
1493
|
+
for field in ("first_name", "last_name", "email"):
|
|
1494
|
+
value = user_payload.get(field)
|
|
1495
|
+
if isinstance(value, str) and getattr(user, field) != value:
|
|
1496
|
+
setattr(user, field, value)
|
|
1497
|
+
updates.append(field)
|
|
1498
|
+
|
|
1499
|
+
if created:
|
|
1500
|
+
user.set_unusable_password()
|
|
1501
|
+
updates.append("password")
|
|
1502
|
+
|
|
1503
|
+
staff_flag = user_payload.get("is_staff")
|
|
1504
|
+
if staff_flag is not None:
|
|
1505
|
+
is_staff = bool(staff_flag)
|
|
1506
|
+
else:
|
|
1507
|
+
is_staff = True
|
|
1508
|
+
if user.is_staff != is_staff:
|
|
1509
|
+
user.is_staff = is_staff
|
|
1510
|
+
updates.append("is_staff")
|
|
1511
|
+
|
|
1512
|
+
superuser_flag = user_payload.get("is_superuser")
|
|
1513
|
+
if superuser_flag is not None:
|
|
1514
|
+
is_superuser = bool(superuser_flag)
|
|
1515
|
+
if user.is_superuser != is_superuser:
|
|
1516
|
+
user.is_superuser = is_superuser
|
|
1517
|
+
updates.append("is_superuser")
|
|
1518
|
+
|
|
1519
|
+
if not user.is_active:
|
|
1520
|
+
user.is_active = True
|
|
1521
|
+
updates.append("is_active")
|
|
1522
|
+
|
|
1523
|
+
if updates:
|
|
1524
|
+
user.save(update_fields=updates)
|
|
1525
|
+
|
|
1526
|
+
_assign_groups_and_permissions(user, user_payload)
|
|
1527
|
+
|
|
1528
|
+
target_path = _sanitize_proxy_target(payload.get("target"), request)
|
|
1529
|
+
nonce = secrets.token_urlsafe(24)
|
|
1530
|
+
cache_key = f"{PROXY_CACHE_PREFIX}{nonce}"
|
|
1531
|
+
cache.set(cache_key, {"user_id": user.pk}, PROXY_TOKEN_TIMEOUT)
|
|
1532
|
+
|
|
1533
|
+
signer = TimestampSigner(salt=PROXY_TOKEN_SALT)
|
|
1534
|
+
token = signer.sign_object({"user": user.pk, "next": target_path, "nonce": nonce})
|
|
1535
|
+
login_url = request.build_absolute_uri(
|
|
1536
|
+
reverse("node-proxy-login", args=[token])
|
|
1537
|
+
)
|
|
1538
|
+
expires = timezone.now() + timedelta(seconds=PROXY_TOKEN_TIMEOUT)
|
|
1539
|
+
|
|
1540
|
+
return JsonResponse({"login_url": login_url, "expires": expires.isoformat()})
|
|
1541
|
+
|
|
1542
|
+
|
|
1543
|
+
@csrf_exempt
|
|
1544
|
+
def proxy_login(request, token):
|
|
1545
|
+
"""Redeem a proxy login token and redirect to the target path."""
|
|
1546
|
+
|
|
1547
|
+
signer = TimestampSigner(salt=PROXY_TOKEN_SALT)
|
|
1548
|
+
try:
|
|
1549
|
+
payload = signer.unsign_object(token, max_age=PROXY_TOKEN_TIMEOUT)
|
|
1550
|
+
except SignatureExpired:
|
|
1551
|
+
return HttpResponse(status=410)
|
|
1552
|
+
except BadSignature:
|
|
1553
|
+
return HttpResponse(status=400)
|
|
1554
|
+
|
|
1555
|
+
nonce = payload.get("nonce")
|
|
1556
|
+
if not nonce:
|
|
1557
|
+
return HttpResponse(status=400)
|
|
1558
|
+
|
|
1559
|
+
cache_key = f"{PROXY_CACHE_PREFIX}{nonce}"
|
|
1560
|
+
cache_payload = cache.get(cache_key)
|
|
1561
|
+
if not cache_payload:
|
|
1562
|
+
return HttpResponse(status=410)
|
|
1563
|
+
cache.delete(cache_key)
|
|
1564
|
+
|
|
1565
|
+
user_id = cache_payload.get("user_id")
|
|
1566
|
+
if not user_id:
|
|
1567
|
+
return HttpResponse(status=403)
|
|
1568
|
+
|
|
1569
|
+
User = get_user_model()
|
|
1570
|
+
user = User.objects.filter(pk=user_id).first()
|
|
1571
|
+
if not user or not user.is_active:
|
|
1572
|
+
return HttpResponse(status=403)
|
|
1573
|
+
|
|
1574
|
+
backend = getattr(user, "backend", "")
|
|
1575
|
+
if not backend:
|
|
1576
|
+
backends = getattr(settings, "AUTHENTICATION_BACKENDS", None) or ()
|
|
1577
|
+
backend = backends[0] if backends else "django.contrib.auth.backends.ModelBackend"
|
|
1578
|
+
login(request, user, backend=backend)
|
|
1579
|
+
|
|
1580
|
+
next_path = payload.get("next") or reverse("admin:index")
|
|
1581
|
+
if not url_has_allowed_host_and_scheme(
|
|
1582
|
+
next_path,
|
|
1583
|
+
allowed_hosts={request.get_host()},
|
|
1584
|
+
require_https=request.is_secure(),
|
|
1585
|
+
):
|
|
1586
|
+
next_path = reverse("admin:index")
|
|
1587
|
+
|
|
1588
|
+
return redirect(next_path)
|
|
1589
|
+
|
|
1590
|
+
|
|
1591
|
+
def _suite_model_name(meta) -> str:
|
|
1592
|
+
base = str(meta.verbose_name_plural or meta.verbose_name or meta.object_name)
|
|
1593
|
+
normalized = re.sub(r"[^0-9A-Za-z]+", " ", base).title().replace(" ", "")
|
|
1594
|
+
return normalized or meta.object_name
|
|
1595
|
+
|
|
1596
|
+
|
|
1597
|
+
@csrf_exempt
|
|
1598
|
+
def proxy_execute(request):
|
|
1599
|
+
"""Execute model operations on behalf of a remote interface node."""
|
|
1600
|
+
|
|
1601
|
+
if request.method != "POST":
|
|
1602
|
+
return JsonResponse({"detail": "POST required"}, status=405)
|
|
1603
|
+
|
|
1604
|
+
try:
|
|
1605
|
+
payload = json.loads(request.body.decode() or "{}")
|
|
1606
|
+
except json.JSONDecodeError:
|
|
1607
|
+
return JsonResponse({"detail": "invalid json"}, status=400)
|
|
1608
|
+
|
|
1609
|
+
requester = payload.get("requester")
|
|
1610
|
+
if not requester:
|
|
1611
|
+
return JsonResponse({"detail": "requester required"}, status=400)
|
|
1612
|
+
|
|
1613
|
+
requester_mac = _clean_requester_hint(payload.get("requester_mac"))
|
|
1614
|
+
requester_public_key = _clean_requester_hint(
|
|
1615
|
+
payload.get("requester_public_key"), strip=False
|
|
1616
|
+
)
|
|
1617
|
+
node, error_response = _load_signed_node(
|
|
1618
|
+
request,
|
|
1619
|
+
requester,
|
|
1620
|
+
mac_address=requester_mac,
|
|
1621
|
+
public_key=requester_public_key,
|
|
1622
|
+
)
|
|
1623
|
+
if error_response is not None:
|
|
1624
|
+
return error_response
|
|
1625
|
+
|
|
1626
|
+
action = str(payload.get("action", "")).strip().lower()
|
|
1627
|
+
if not action:
|
|
1628
|
+
return JsonResponse({"detail": "action required"}, status=400)
|
|
1629
|
+
|
|
1630
|
+
credentials = payload.get("credentials") or {}
|
|
1631
|
+
username = str(credentials.get("username", "")).strip()
|
|
1632
|
+
password_value = credentials.get("password")
|
|
1633
|
+
password = password_value if isinstance(password_value, str) else str(password_value or "")
|
|
1634
|
+
if not username or not password:
|
|
1635
|
+
return JsonResponse({"detail": "credentials required"}, status=401)
|
|
1636
|
+
|
|
1637
|
+
User = get_user_model()
|
|
1638
|
+
existing_user = User.objects.filter(username=username).first()
|
|
1639
|
+
auth_user = authenticate(request=None, username=username, password=password)
|
|
1640
|
+
|
|
1641
|
+
if auth_user is None:
|
|
1642
|
+
if existing_user is not None:
|
|
1643
|
+
return JsonResponse({"detail": "authentication failed"}, status=403)
|
|
1644
|
+
auth_user = User.objects.create_user(
|
|
1645
|
+
username=username,
|
|
1646
|
+
password=password,
|
|
1647
|
+
email=str(credentials.get("email", "")),
|
|
1648
|
+
)
|
|
1649
|
+
auth_user.is_staff = True
|
|
1650
|
+
auth_user.is_superuser = True
|
|
1651
|
+
auth_user.first_name = str(credentials.get("first_name", ""))
|
|
1652
|
+
auth_user.last_name = str(credentials.get("last_name", ""))
|
|
1653
|
+
auth_user.save()
|
|
1654
|
+
else:
|
|
1655
|
+
updates: list[str] = []
|
|
1656
|
+
for field in ("first_name", "last_name", "email"):
|
|
1657
|
+
value = credentials.get(field)
|
|
1658
|
+
if isinstance(value, str) and getattr(auth_user, field) != value:
|
|
1659
|
+
setattr(auth_user, field, value)
|
|
1660
|
+
updates.append(field)
|
|
1661
|
+
for flag in ("is_staff", "is_superuser"):
|
|
1662
|
+
if flag in credentials:
|
|
1663
|
+
desired = bool(credentials.get(flag))
|
|
1664
|
+
if getattr(auth_user, flag) != desired:
|
|
1665
|
+
setattr(auth_user, flag, desired)
|
|
1666
|
+
updates.append(flag)
|
|
1667
|
+
if updates:
|
|
1668
|
+
auth_user.save(update_fields=updates)
|
|
1669
|
+
|
|
1670
|
+
if not auth_user.is_active:
|
|
1671
|
+
return JsonResponse({"detail": "user inactive"}, status=403)
|
|
1672
|
+
|
|
1673
|
+
_assign_groups_and_permissions(auth_user, credentials)
|
|
1674
|
+
|
|
1675
|
+
model_label = payload.get("model")
|
|
1676
|
+
model = None
|
|
1677
|
+
if action != "schema":
|
|
1678
|
+
if not isinstance(model_label, str) or "." not in model_label:
|
|
1679
|
+
return JsonResponse({"detail": "model required"}, status=400)
|
|
1680
|
+
app_label, model_name = model_label.split(".", 1)
|
|
1681
|
+
model = apps.get_model(app_label, model_name)
|
|
1682
|
+
if model is None:
|
|
1683
|
+
return JsonResponse({"detail": "model not found"}, status=404)
|
|
1684
|
+
|
|
1685
|
+
if action == "schema":
|
|
1686
|
+
models_payload = []
|
|
1687
|
+
for registered_model in apps.get_models():
|
|
1688
|
+
meta = registered_model._meta
|
|
1689
|
+
models_payload.append(
|
|
1690
|
+
{
|
|
1691
|
+
"app_label": meta.app_label,
|
|
1692
|
+
"model": meta.model_name,
|
|
1693
|
+
"object_name": meta.object_name,
|
|
1694
|
+
"verbose_name": str(meta.verbose_name),
|
|
1695
|
+
"verbose_name_plural": str(meta.verbose_name_plural),
|
|
1696
|
+
"suite_name": _suite_model_name(meta),
|
|
1697
|
+
}
|
|
1698
|
+
)
|
|
1699
|
+
return JsonResponse({"models": models_payload})
|
|
1700
|
+
|
|
1701
|
+
action_perm = {
|
|
1702
|
+
"list": "view",
|
|
1703
|
+
"get": "view",
|
|
1704
|
+
"create": "add",
|
|
1705
|
+
"update": "change",
|
|
1706
|
+
"delete": "delete",
|
|
1707
|
+
}.get(action)
|
|
1708
|
+
|
|
1709
|
+
if action_perm and not auth_user.is_superuser:
|
|
1710
|
+
perm_codename = f"{model._meta.app_label}.{action_perm}_{model._meta.model_name}"
|
|
1711
|
+
if not auth_user.has_perm(perm_codename):
|
|
1712
|
+
return JsonResponse({"detail": "forbidden"}, status=403)
|
|
1713
|
+
|
|
1714
|
+
try:
|
|
1715
|
+
if action == "list":
|
|
1716
|
+
filters = payload.get("filters") or {}
|
|
1717
|
+
if filters and not isinstance(filters, Mapping):
|
|
1718
|
+
return JsonResponse({"detail": "filters must be a mapping"}, status=400)
|
|
1719
|
+
queryset = model._default_manager.all()
|
|
1720
|
+
if filters:
|
|
1721
|
+
queryset = queryset.filter(**filters)
|
|
1722
|
+
limit = payload.get("limit")
|
|
1723
|
+
if limit is not None:
|
|
1724
|
+
try:
|
|
1725
|
+
limit_value = int(limit)
|
|
1726
|
+
if limit_value > 0:
|
|
1727
|
+
queryset = queryset[:limit_value]
|
|
1728
|
+
except (TypeError, ValueError):
|
|
1729
|
+
pass
|
|
1730
|
+
data = serializers.serialize("python", queryset)
|
|
1731
|
+
return JsonResponse({"objects": data})
|
|
1732
|
+
|
|
1733
|
+
if action == "get":
|
|
1734
|
+
filters = payload.get("filters") or {}
|
|
1735
|
+
if filters and not isinstance(filters, Mapping):
|
|
1736
|
+
return JsonResponse({"detail": "filters must be a mapping"}, status=400)
|
|
1737
|
+
lookup = dict(filters)
|
|
1738
|
+
if not lookup and "pk" in payload:
|
|
1739
|
+
lookup = {"pk": payload.get("pk")}
|
|
1740
|
+
if not lookup:
|
|
1741
|
+
return JsonResponse({"detail": "lookup required"}, status=400)
|
|
1742
|
+
obj = model._default_manager.get(**lookup)
|
|
1743
|
+
data = serializers.serialize("python", [obj])[0]
|
|
1744
|
+
return JsonResponse({"object": data})
|
|
1745
|
+
except model.DoesNotExist:
|
|
1746
|
+
return JsonResponse({"detail": "not found"}, status=404)
|
|
1747
|
+
except Exception as exc:
|
|
1748
|
+
return JsonResponse({"detail": str(exc)}, status=400)
|
|
1749
|
+
|
|
1750
|
+
return JsonResponse({"detail": "unsupported action"}, status=400)
|
|
1751
|
+
|
|
1752
|
+
|
|
525
1753
|
@csrf_exempt
|
|
526
1754
|
@api_login_required
|
|
527
1755
|
def public_node_endpoint(request, endpoint):
|
|
@@ -536,7 +1764,10 @@ def public_node_endpoint(request, endpoint):
|
|
|
536
1764
|
if request.method == "GET":
|
|
537
1765
|
data = {
|
|
538
1766
|
"hostname": node.hostname,
|
|
539
|
-
"
|
|
1767
|
+
"network_hostname": node.network_hostname,
|
|
1768
|
+
"address": node.address or node.get_primary_contact(),
|
|
1769
|
+
"ipv4_address": node.ipv4_address,
|
|
1770
|
+
"ipv6_address": node.ipv6_address,
|
|
540
1771
|
"port": node.port,
|
|
541
1772
|
"badge_color": node.badge_color,
|
|
542
1773
|
"last_seen": node.last_seen,
|
|
@@ -584,100 +1815,99 @@ def net_message(request):
|
|
|
584
1815
|
except Exception:
|
|
585
1816
|
return JsonResponse({"detail": "invalid signature"}, status=403)
|
|
586
1817
|
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
reach_name = data.get("reach")
|
|
592
|
-
reach_role = None
|
|
593
|
-
if reach_name:
|
|
594
|
-
reach_role = NodeRole.objects.filter(name=reach_name).first()
|
|
595
|
-
filter_node_uuid = data.get("filter_node")
|
|
596
|
-
filter_node = None
|
|
597
|
-
if filter_node_uuid:
|
|
598
|
-
filter_node = Node.objects.filter(uuid=filter_node_uuid).first()
|
|
599
|
-
filter_feature_slug = data.get("filter_node_feature")
|
|
600
|
-
filter_feature = None
|
|
601
|
-
if filter_feature_slug:
|
|
602
|
-
filter_feature = NodeFeature.objects.filter(slug=filter_feature_slug).first()
|
|
603
|
-
filter_role_name = data.get("filter_node_role")
|
|
604
|
-
filter_role = None
|
|
605
|
-
if filter_role_name:
|
|
606
|
-
filter_role = NodeRole.objects.filter(name=filter_role_name).first()
|
|
607
|
-
filter_relation_value = data.get("filter_current_relation")
|
|
608
|
-
filter_relation = ""
|
|
609
|
-
if filter_relation_value:
|
|
610
|
-
relation = Node.normalize_relation(filter_relation_value)
|
|
611
|
-
filter_relation = relation.value if relation else ""
|
|
612
|
-
filter_installed_version = (data.get("filter_installed_version") or "")[:20]
|
|
613
|
-
filter_installed_revision = (data.get("filter_installed_revision") or "")[:40]
|
|
614
|
-
seen = data.get("seen", [])
|
|
615
|
-
origin_id = data.get("origin")
|
|
616
|
-
origin_node = None
|
|
617
|
-
if origin_id:
|
|
618
|
-
origin_node = Node.objects.filter(uuid=origin_id).first()
|
|
619
|
-
if not origin_node:
|
|
620
|
-
origin_node = node
|
|
621
|
-
if not msg_uuid:
|
|
622
|
-
return JsonResponse({"detail": "uuid required"}, status=400)
|
|
623
|
-
msg, created = NetMessage.objects.get_or_create(
|
|
624
|
-
uuid=msg_uuid,
|
|
625
|
-
defaults={
|
|
626
|
-
"subject": subject[:64],
|
|
627
|
-
"body": body[:256],
|
|
628
|
-
"reach": reach_role,
|
|
629
|
-
"node_origin": origin_node,
|
|
630
|
-
"attachments": attachments or None,
|
|
631
|
-
"filter_node": filter_node,
|
|
632
|
-
"filter_node_feature": filter_feature,
|
|
633
|
-
"filter_node_role": filter_role,
|
|
634
|
-
"filter_current_relation": filter_relation,
|
|
635
|
-
"filter_installed_version": filter_installed_version,
|
|
636
|
-
"filter_installed_revision": filter_installed_revision,
|
|
637
|
-
},
|
|
638
|
-
)
|
|
639
|
-
if not created:
|
|
640
|
-
msg.subject = subject[:64]
|
|
641
|
-
msg.body = body[:256]
|
|
642
|
-
update_fields = ["subject", "body"]
|
|
643
|
-
if reach_role and msg.reach_id != reach_role.id:
|
|
644
|
-
msg.reach = reach_role
|
|
645
|
-
update_fields.append("reach")
|
|
646
|
-
if msg.node_origin_id is None and origin_node:
|
|
647
|
-
msg.node_origin = origin_node
|
|
648
|
-
update_fields.append("node_origin")
|
|
649
|
-
if attachments and msg.attachments != attachments:
|
|
650
|
-
msg.attachments = attachments
|
|
651
|
-
update_fields.append("attachments")
|
|
652
|
-
field_updates = {
|
|
653
|
-
"filter_node": filter_node,
|
|
654
|
-
"filter_node_feature": filter_feature,
|
|
655
|
-
"filter_node_role": filter_role,
|
|
656
|
-
"filter_current_relation": filter_relation,
|
|
657
|
-
"filter_installed_version": filter_installed_version,
|
|
658
|
-
"filter_installed_revision": filter_installed_revision,
|
|
659
|
-
}
|
|
660
|
-
for field, value in field_updates.items():
|
|
661
|
-
if getattr(msg, field) != value:
|
|
662
|
-
setattr(msg, field, value)
|
|
663
|
-
update_fields.append(field)
|
|
664
|
-
msg.save(update_fields=update_fields)
|
|
665
|
-
if attachments:
|
|
666
|
-
msg.apply_attachments(attachments)
|
|
667
|
-
msg.propagate(seen=seen)
|
|
1818
|
+
try:
|
|
1819
|
+
msg = NetMessage.receive_payload(data, sender=node)
|
|
1820
|
+
except ValueError as exc:
|
|
1821
|
+
return JsonResponse({"detail": str(exc)}, status=400)
|
|
668
1822
|
return JsonResponse({"status": "propagated", "complete": msg.complete})
|
|
669
1823
|
|
|
670
1824
|
|
|
671
|
-
|
|
672
|
-
|
|
1825
|
+
@csrf_exempt
|
|
1826
|
+
def net_message_pull(request):
|
|
1827
|
+
"""Allow downstream nodes to retrieve queued network messages."""
|
|
673
1828
|
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
1829
|
+
if request.method != "POST":
|
|
1830
|
+
return JsonResponse({"detail": "POST required"}, status=405)
|
|
1831
|
+
try:
|
|
1832
|
+
data = json.loads(request.body.decode() or "{}")
|
|
1833
|
+
except json.JSONDecodeError:
|
|
1834
|
+
return JsonResponse({"detail": "invalid json"}, status=400)
|
|
1835
|
+
|
|
1836
|
+
requester = data.get("requester")
|
|
1837
|
+
if not requester:
|
|
1838
|
+
return JsonResponse({"detail": "requester required"}, status=400)
|
|
1839
|
+
signature = request.headers.get("X-Signature")
|
|
1840
|
+
if not signature:
|
|
1841
|
+
return JsonResponse({"detail": "signature required"}, status=403)
|
|
1842
|
+
|
|
1843
|
+
node = Node.objects.filter(uuid=requester).first()
|
|
1844
|
+
if not node or not node.public_key:
|
|
1845
|
+
return JsonResponse({"detail": "unknown requester"}, status=403)
|
|
1846
|
+
try:
|
|
1847
|
+
public_key = serialization.load_pem_public_key(node.public_key.encode())
|
|
1848
|
+
public_key.verify(
|
|
1849
|
+
base64.b64decode(signature),
|
|
1850
|
+
request.body,
|
|
1851
|
+
padding.PKCS1v15(),
|
|
1852
|
+
hashes.SHA256(),
|
|
1853
|
+
)
|
|
1854
|
+
except Exception:
|
|
1855
|
+
return JsonResponse({"detail": "invalid signature"}, status=403)
|
|
1856
|
+
|
|
1857
|
+
local = Node.get_local()
|
|
1858
|
+
if not local:
|
|
1859
|
+
return JsonResponse({"detail": "local node unavailable"}, status=503)
|
|
1860
|
+
private_key = local.get_private_key()
|
|
1861
|
+
if not private_key:
|
|
1862
|
+
return JsonResponse({"detail": "signing unavailable"}, status=503)
|
|
1863
|
+
|
|
1864
|
+
entries = (
|
|
1865
|
+
PendingNetMessage.objects.select_related(
|
|
1866
|
+
"message",
|
|
1867
|
+
"message__filter_node",
|
|
1868
|
+
"message__filter_node_feature",
|
|
1869
|
+
"message__filter_node_role",
|
|
1870
|
+
"message__node_origin",
|
|
1871
|
+
)
|
|
1872
|
+
.filter(node=node)
|
|
1873
|
+
.order_by("queued_at")
|
|
683
1874
|
)
|
|
1875
|
+
messages: list[dict[str, object]] = []
|
|
1876
|
+
expired_ids: list[int] = []
|
|
1877
|
+
delivered_ids: list[int] = []
|
|
1878
|
+
|
|
1879
|
+
origin_fallback = str(local.uuid)
|
|
1880
|
+
|
|
1881
|
+
for entry in entries:
|
|
1882
|
+
if entry.is_stale:
|
|
1883
|
+
expired_ids.append(entry.pk)
|
|
1884
|
+
continue
|
|
1885
|
+
message = entry.message
|
|
1886
|
+
reach_source = message.filter_node_role or message.reach
|
|
1887
|
+
reach_name = reach_source.name if reach_source else None
|
|
1888
|
+
origin_node = message.node_origin
|
|
1889
|
+
origin_uuid = str(origin_node.uuid) if origin_node else origin_fallback
|
|
1890
|
+
sender_id = str(local.uuid)
|
|
1891
|
+
seen = [str(value) for value in entry.seen]
|
|
1892
|
+
payload = message._build_payload(
|
|
1893
|
+
sender_id=sender_id,
|
|
1894
|
+
origin_uuid=origin_uuid,
|
|
1895
|
+
reach_name=reach_name,
|
|
1896
|
+
seen=seen,
|
|
1897
|
+
)
|
|
1898
|
+
payload_json = message._serialize_payload(payload)
|
|
1899
|
+
payload_signature = message._sign_payload(payload_json, private_key)
|
|
1900
|
+
if not payload_signature:
|
|
1901
|
+
logger.warning(
|
|
1902
|
+
"Unable to sign queued NetMessage %s for node %s", message.pk, node.pk
|
|
1903
|
+
)
|
|
1904
|
+
continue
|
|
1905
|
+
messages.append({"payload": payload, "signature": payload_signature})
|
|
1906
|
+
delivered_ids.append(entry.pk)
|
|
1907
|
+
|
|
1908
|
+
if expired_ids:
|
|
1909
|
+
PendingNetMessage.objects.filter(pk__in=expired_ids).delete()
|
|
1910
|
+
if delivered_ids:
|
|
1911
|
+
PendingNetMessage.objects.filter(pk__in=delivered_ids).delete()
|
|
1912
|
+
|
|
1913
|
+
return JsonResponse({"messages": messages})
|