arthexis 0.1.16__py3-none-any.whl → 0.1.28__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arthexis might be problematic. Click here for more details.
- {arthexis-0.1.16.dist-info → arthexis-0.1.28.dist-info}/METADATA +95 -41
- arthexis-0.1.28.dist-info/RECORD +112 -0
- config/asgi.py +1 -15
- config/middleware.py +47 -1
- config/settings.py +21 -30
- config/settings_helpers.py +176 -1
- config/urls.py +69 -1
- core/admin.py +805 -473
- core/apps.py +6 -8
- core/auto_upgrade.py +19 -4
- core/backends.py +13 -3
- core/celery_utils.py +73 -0
- core/changelog.py +66 -5
- core/environment.py +4 -5
- core/models.py +1825 -218
- core/notifications.py +1 -1
- core/reference_utils.py +10 -11
- core/release.py +55 -7
- core/sigil_builder.py +2 -2
- core/sigil_resolver.py +1 -66
- core/system.py +285 -4
- core/tasks.py +439 -138
- core/test_system_info.py +43 -5
- core/tests.py +516 -18
- core/user_data.py +94 -21
- core/views.py +348 -186
- nodes/admin.py +904 -67
- nodes/apps.py +12 -1
- nodes/feature_checks.py +30 -0
- nodes/models.py +800 -127
- nodes/rfid_sync.py +1 -1
- nodes/tasks.py +98 -3
- nodes/tests.py +1381 -152
- nodes/urls.py +15 -1
- nodes/utils.py +51 -3
- nodes/views.py +1382 -152
- ocpp/admin.py +1970 -152
- ocpp/consumers.py +839 -34
- ocpp/models.py +968 -17
- ocpp/network.py +398 -0
- ocpp/store.py +411 -43
- ocpp/tasks.py +261 -3
- ocpp/test_export_import.py +1 -0
- ocpp/test_rfid.py +194 -6
- ocpp/tests.py +1918 -87
- ocpp/transactions_io.py +9 -1
- ocpp/urls.py +8 -3
- ocpp/views.py +700 -53
- pages/admin.py +262 -30
- pages/apps.py +35 -0
- pages/context_processors.py +28 -21
- pages/defaults.py +1 -1
- pages/forms.py +31 -8
- pages/middleware.py +6 -2
- pages/models.py +86 -2
- pages/module_defaults.py +5 -5
- pages/site_config.py +137 -0
- pages/tests.py +1050 -126
- pages/urls.py +14 -2
- pages/utils.py +70 -0
- pages/views.py +622 -56
- arthexis-0.1.16.dist-info/RECORD +0 -111
- core/workgroup_urls.py +0 -17
- core/workgroup_views.py +0 -94
- {arthexis-0.1.16.dist-info → arthexis-0.1.28.dist-info}/WHEEL +0 -0
- {arthexis-0.1.16.dist-info → arthexis-0.1.28.dist-info}/licenses/LICENSE +0 -0
- {arthexis-0.1.16.dist-info → arthexis-0.1.28.dist-info}/top_level.txt +0 -0
nodes/models.py
CHANGED
|
@@ -4,6 +4,8 @@ from collections.abc import Iterable
|
|
|
4
4
|
from copy import deepcopy
|
|
5
5
|
from dataclasses import dataclass
|
|
6
6
|
from django.db import models
|
|
7
|
+
from django.apps import apps
|
|
8
|
+
from django.db.models import Q
|
|
7
9
|
from django.db.utils import DatabaseError
|
|
8
10
|
from django.db.models.signals import post_delete
|
|
9
11
|
from django.dispatch import Signal, receiver
|
|
@@ -13,19 +15,25 @@ from core.fields import SigilLongAutoField, SigilShortAutoField
|
|
|
13
15
|
import re
|
|
14
16
|
import json
|
|
15
17
|
import base64
|
|
18
|
+
import ipaddress
|
|
16
19
|
from django.utils import timezone
|
|
17
20
|
from django.utils.text import slugify
|
|
18
21
|
from django.conf import settings
|
|
19
|
-
from datetime import timedelta
|
|
22
|
+
from datetime import datetime, timedelta, timezone as datetime_timezone
|
|
20
23
|
import uuid
|
|
21
24
|
import os
|
|
22
|
-
import shutil
|
|
23
25
|
import socket
|
|
24
26
|
import stat
|
|
25
27
|
import subprocess
|
|
28
|
+
import shutil
|
|
26
29
|
from pathlib import Path
|
|
30
|
+
from urllib.parse import urlparse, urlunsplit
|
|
27
31
|
from utils import revision
|
|
28
32
|
from core.notifications import notify_async
|
|
33
|
+
from core.celery_utils import (
|
|
34
|
+
normalize_periodic_task_name,
|
|
35
|
+
periodic_task_name_variants,
|
|
36
|
+
)
|
|
29
37
|
from django.core.exceptions import ValidationError
|
|
30
38
|
from cryptography.hazmat.primitives.asymmetric import rsa
|
|
31
39
|
from cryptography.hazmat.primitives import serialization, hashes
|
|
@@ -41,6 +49,9 @@ import logging
|
|
|
41
49
|
logger = logging.getLogger(__name__)
|
|
42
50
|
|
|
43
51
|
|
|
52
|
+
ROLE_RENAMES: dict[str, str] = {"Constellation": "Watchtower"}
|
|
53
|
+
|
|
54
|
+
|
|
44
55
|
class NodeRoleManager(models.Manager):
|
|
45
56
|
def get_by_natural_key(self, name: str):
|
|
46
57
|
return self.get(name=name)
|
|
@@ -143,8 +154,6 @@ class NodeFeature(Entity):
|
|
|
143
154
|
return False
|
|
144
155
|
if node.features.filter(pk=self.pk).exists():
|
|
145
156
|
return True
|
|
146
|
-
if self.slug == "gway-runner":
|
|
147
|
-
return Node._has_gway_runner()
|
|
148
157
|
if self.slug == "gui-toast":
|
|
149
158
|
from core.notifications import supports_gui_toast
|
|
150
159
|
|
|
@@ -188,8 +197,10 @@ class Node(Entity):
|
|
|
188
197
|
|
|
189
198
|
DEFAULT_BADGE_COLOR = "#28a745"
|
|
190
199
|
ROLE_BADGE_COLORS = {
|
|
191
|
-
"
|
|
200
|
+
"Watchtower": "#daa520", # goldenrod
|
|
201
|
+
"Constellation": "#daa520", # legacy alias
|
|
192
202
|
"Control": "#673ab7", # deep purple
|
|
203
|
+
"Interface": "#0dcaf0", # cyan
|
|
193
204
|
}
|
|
194
205
|
|
|
195
206
|
class Relation(models.TextChoices):
|
|
@@ -199,9 +210,20 @@ class Node(Entity):
|
|
|
199
210
|
SELF = "SELF", "Self"
|
|
200
211
|
|
|
201
212
|
hostname = models.CharField(max_length=100)
|
|
202
|
-
|
|
213
|
+
network_hostname = models.CharField(max_length=253, blank=True)
|
|
214
|
+
ipv4_address = models.GenericIPAddressField(
|
|
215
|
+
protocol="IPv4", blank=True, null=True
|
|
216
|
+
)
|
|
217
|
+
ipv6_address = models.GenericIPAddressField(
|
|
218
|
+
protocol="IPv6", blank=True, null=True
|
|
219
|
+
)
|
|
220
|
+
address = models.GenericIPAddressField(blank=True, null=True)
|
|
203
221
|
mac_address = models.CharField(max_length=17, unique=True, null=True, blank=True)
|
|
204
|
-
port = models.PositiveIntegerField(default=
|
|
222
|
+
port = models.PositiveIntegerField(default=8888)
|
|
223
|
+
message_queue_length = models.PositiveSmallIntegerField(
|
|
224
|
+
default=10,
|
|
225
|
+
help_text="Maximum queued NetMessages to retain for this peer.",
|
|
226
|
+
)
|
|
205
227
|
badge_color = models.CharField(max_length=7, default=DEFAULT_BADGE_COLOR)
|
|
206
228
|
role = models.ForeignKey(NodeRole, on_delete=models.SET_NULL, null=True, blank=True)
|
|
207
229
|
current_relation = models.CharField(
|
|
@@ -241,20 +263,246 @@ class Node(Entity):
|
|
|
241
263
|
RPI_CAMERA_DEVICE = Path("/dev/video0")
|
|
242
264
|
RPI_CAMERA_BINARIES = ("rpicam-hello", "rpicam-still", "rpicam-vid")
|
|
243
265
|
AP_ROUTER_SSID = "gelectriic-ap"
|
|
266
|
+
AUDIO_CAPTURE_PCM_PATH = Path("/proc/asound/pcm")
|
|
244
267
|
NMCLI_TIMEOUT = 5
|
|
245
|
-
GWAY_RUNNER_COMMAND = "gway"
|
|
246
|
-
GWAY_RUNNER_CANDIDATES = ("~/.local/bin/gway", "/usr/local/bin/gway")
|
|
247
268
|
AUTO_MANAGED_FEATURES = set(FEATURE_LOCK_MAP.keys()) | {
|
|
248
269
|
"gui-toast",
|
|
249
270
|
"rpi-camera",
|
|
250
271
|
"ap-router",
|
|
251
|
-
"gway-runner",
|
|
252
272
|
}
|
|
253
273
|
MANUAL_FEATURE_SLUGS = {"clipboard-poll", "screenshot-poll", "audio-capture"}
|
|
254
274
|
|
|
255
275
|
def __str__(self) -> str: # pragma: no cover - simple representation
|
|
256
276
|
return f"{self.hostname}:{self.port}"
|
|
257
277
|
|
|
278
|
+
@staticmethod
|
|
279
|
+
def _ip_preference(ip_value: str) -> tuple[int, str]:
|
|
280
|
+
"""Return a sort key favouring globally routable addresses."""
|
|
281
|
+
|
|
282
|
+
try:
|
|
283
|
+
parsed = ipaddress.ip_address(ip_value)
|
|
284
|
+
except ValueError:
|
|
285
|
+
return (3, ip_value)
|
|
286
|
+
|
|
287
|
+
if parsed.is_global:
|
|
288
|
+
return (0, ip_value)
|
|
289
|
+
|
|
290
|
+
if parsed.is_loopback or parsed.is_link_local:
|
|
291
|
+
return (2, ip_value)
|
|
292
|
+
|
|
293
|
+
if parsed.is_private:
|
|
294
|
+
return (2, ip_value)
|
|
295
|
+
|
|
296
|
+
return (1, ip_value)
|
|
297
|
+
|
|
298
|
+
@classmethod
|
|
299
|
+
def _select_preferred_ip(cls, addresses: Iterable[str]) -> str | None:
|
|
300
|
+
"""Return the preferred IP from ``addresses`` when available."""
|
|
301
|
+
|
|
302
|
+
best: tuple[int, str] | None = None
|
|
303
|
+
for candidate in addresses:
|
|
304
|
+
candidate = (candidate or "").strip()
|
|
305
|
+
if not candidate:
|
|
306
|
+
continue
|
|
307
|
+
score = cls._ip_preference(candidate)
|
|
308
|
+
if best is None or score < best:
|
|
309
|
+
best = score
|
|
310
|
+
return best[1] if best else None
|
|
311
|
+
|
|
312
|
+
@classmethod
|
|
313
|
+
def _resolve_ip_addresses(
|
|
314
|
+
cls, *hosts: str, include_ipv4: bool = True, include_ipv6: bool = True
|
|
315
|
+
) -> tuple[list[str], list[str]]:
|
|
316
|
+
"""Resolve ``hosts`` into IPv4 and IPv6 address lists."""
|
|
317
|
+
|
|
318
|
+
ipv4: list[str] = []
|
|
319
|
+
ipv6: list[str] = []
|
|
320
|
+
|
|
321
|
+
for host in hosts:
|
|
322
|
+
host = (host or "").strip()
|
|
323
|
+
if not host:
|
|
324
|
+
continue
|
|
325
|
+
try:
|
|
326
|
+
info = socket.getaddrinfo(
|
|
327
|
+
host,
|
|
328
|
+
None,
|
|
329
|
+
socket.AF_UNSPEC,
|
|
330
|
+
socket.SOCK_STREAM,
|
|
331
|
+
)
|
|
332
|
+
except OSError:
|
|
333
|
+
continue
|
|
334
|
+
for family, _, _, _, sockaddr in info:
|
|
335
|
+
if family == socket.AF_INET and include_ipv4:
|
|
336
|
+
value = sockaddr[0]
|
|
337
|
+
if value not in ipv4:
|
|
338
|
+
ipv4.append(value)
|
|
339
|
+
elif family == socket.AF_INET6 and include_ipv6:
|
|
340
|
+
value = sockaddr[0]
|
|
341
|
+
if value not in ipv6:
|
|
342
|
+
ipv6.append(value)
|
|
343
|
+
|
|
344
|
+
return ipv4, ipv6
|
|
345
|
+
|
|
346
|
+
def get_remote_host_candidates(self) -> list[str]:
|
|
347
|
+
"""Return host strings that may reach this node."""
|
|
348
|
+
|
|
349
|
+
values: list[str] = []
|
|
350
|
+
for attr in (
|
|
351
|
+
"network_hostname",
|
|
352
|
+
"hostname",
|
|
353
|
+
"ipv6_address",
|
|
354
|
+
"ipv4_address",
|
|
355
|
+
"address",
|
|
356
|
+
"public_endpoint",
|
|
357
|
+
):
|
|
358
|
+
value = getattr(self, attr, "") or ""
|
|
359
|
+
value = value.strip()
|
|
360
|
+
if value and value not in values:
|
|
361
|
+
values.append(value)
|
|
362
|
+
|
|
363
|
+
resolved_ipv6: list[str] = []
|
|
364
|
+
resolved_ipv4: list[str] = []
|
|
365
|
+
for host in list(values):
|
|
366
|
+
if host.startswith("http://") or host.startswith("https://"):
|
|
367
|
+
continue
|
|
368
|
+
try:
|
|
369
|
+
ipaddress.ip_address(host)
|
|
370
|
+
except ValueError:
|
|
371
|
+
ipv4, ipv6 = self._resolve_ip_addresses(host)
|
|
372
|
+
for candidate in ipv6:
|
|
373
|
+
if candidate not in values and candidate not in resolved_ipv6:
|
|
374
|
+
resolved_ipv6.append(candidate)
|
|
375
|
+
for candidate in ipv4:
|
|
376
|
+
if candidate not in values and candidate not in resolved_ipv4:
|
|
377
|
+
resolved_ipv4.append(candidate)
|
|
378
|
+
values.extend(resolved_ipv6)
|
|
379
|
+
values.extend(resolved_ipv4)
|
|
380
|
+
return values
|
|
381
|
+
|
|
382
|
+
def get_primary_contact(self) -> str:
|
|
383
|
+
"""Return the first reachable host for this node."""
|
|
384
|
+
|
|
385
|
+
for host in self.get_remote_host_candidates():
|
|
386
|
+
if host:
|
|
387
|
+
return host
|
|
388
|
+
return ""
|
|
389
|
+
|
|
390
|
+
def get_best_ip(self) -> str:
|
|
391
|
+
"""Return the preferred IP address for this node if known."""
|
|
392
|
+
|
|
393
|
+
candidates: list[str] = []
|
|
394
|
+
for value in (
|
|
395
|
+
getattr(self, "address", "") or "",
|
|
396
|
+
getattr(self, "ipv4_address", "") or "",
|
|
397
|
+
getattr(self, "ipv6_address", "") or "",
|
|
398
|
+
):
|
|
399
|
+
value = value.strip()
|
|
400
|
+
if not value:
|
|
401
|
+
continue
|
|
402
|
+
try:
|
|
403
|
+
ipaddress.ip_address(value)
|
|
404
|
+
except ValueError:
|
|
405
|
+
continue
|
|
406
|
+
candidates.append(value)
|
|
407
|
+
if not candidates:
|
|
408
|
+
return ""
|
|
409
|
+
selected = self._select_preferred_ip(candidates)
|
|
410
|
+
return selected or ""
|
|
411
|
+
|
|
412
|
+
def iter_remote_urls(self, path: str):
|
|
413
|
+
"""Yield potential remote URLs for ``path`` on this node."""
|
|
414
|
+
|
|
415
|
+
host_candidates = self.get_remote_host_candidates()
|
|
416
|
+
default_port = self.port or 8888
|
|
417
|
+
normalized_path = path if path.startswith("/") else f"/{path}"
|
|
418
|
+
seen: set[str] = set()
|
|
419
|
+
|
|
420
|
+
for host in host_candidates:
|
|
421
|
+
host = host.strip()
|
|
422
|
+
if not host:
|
|
423
|
+
continue
|
|
424
|
+
base_path = ""
|
|
425
|
+
formatted_host = host
|
|
426
|
+
port_override: int | None = None
|
|
427
|
+
|
|
428
|
+
if "://" in host:
|
|
429
|
+
parsed = urlparse(host)
|
|
430
|
+
netloc = parsed.netloc or parsed.path
|
|
431
|
+
base_path = (parsed.path or "").rstrip("/")
|
|
432
|
+
combined_path = (
|
|
433
|
+
f"{base_path}{normalized_path}" if base_path else normalized_path
|
|
434
|
+
)
|
|
435
|
+
primary = urlunsplit((parsed.scheme, netloc, combined_path, "", ""))
|
|
436
|
+
if primary not in seen:
|
|
437
|
+
seen.add(primary)
|
|
438
|
+
yield primary
|
|
439
|
+
if parsed.scheme == "https":
|
|
440
|
+
fallback = urlunsplit(("http", netloc, combined_path, "", ""))
|
|
441
|
+
if fallback not in seen:
|
|
442
|
+
seen.add(fallback)
|
|
443
|
+
yield fallback
|
|
444
|
+
elif parsed.scheme == "http":
|
|
445
|
+
alternate = urlunsplit(("https", netloc, combined_path, "", ""))
|
|
446
|
+
if alternate not in seen:
|
|
447
|
+
seen.add(alternate)
|
|
448
|
+
yield alternate
|
|
449
|
+
continue
|
|
450
|
+
|
|
451
|
+
if host.startswith("[") and "]" in host:
|
|
452
|
+
end = host.index("]")
|
|
453
|
+
core_host = host[1:end]
|
|
454
|
+
remainder = host[end + 1 :]
|
|
455
|
+
if remainder.startswith(":"):
|
|
456
|
+
remainder = remainder[1:]
|
|
457
|
+
port_part, sep, path_tail = remainder.partition("/")
|
|
458
|
+
if port_part:
|
|
459
|
+
try:
|
|
460
|
+
port_override = int(port_part)
|
|
461
|
+
except ValueError:
|
|
462
|
+
port_override = None
|
|
463
|
+
if sep:
|
|
464
|
+
base_path = f"/{path_tail}".rstrip("/")
|
|
465
|
+
elif "/" in remainder:
|
|
466
|
+
_, _, path_tail = remainder.partition("/")
|
|
467
|
+
base_path = f"/{path_tail}".rstrip("/")
|
|
468
|
+
formatted_host = f"[{core_host}]"
|
|
469
|
+
else:
|
|
470
|
+
if "/" in host:
|
|
471
|
+
host_only, _, path_tail = host.partition("/")
|
|
472
|
+
formatted_host = host_only or host
|
|
473
|
+
base_path = f"/{path_tail}".rstrip("/")
|
|
474
|
+
try:
|
|
475
|
+
ip_obj = ipaddress.ip_address(formatted_host)
|
|
476
|
+
except ValueError:
|
|
477
|
+
parts = formatted_host.rsplit(":", 1)
|
|
478
|
+
if len(parts) == 2 and parts[1].isdigit():
|
|
479
|
+
formatted_host = parts[0]
|
|
480
|
+
port_override = int(parts[1])
|
|
481
|
+
try:
|
|
482
|
+
ip_obj = ipaddress.ip_address(formatted_host)
|
|
483
|
+
except ValueError:
|
|
484
|
+
ip_obj = None
|
|
485
|
+
else:
|
|
486
|
+
if ip_obj.version == 6 and not formatted_host.startswith("["):
|
|
487
|
+
formatted_host = f"[{formatted_host}]"
|
|
488
|
+
|
|
489
|
+
effective_port = port_override if port_override is not None else default_port
|
|
490
|
+
combined_path = f"{base_path}{normalized_path}" if base_path else normalized_path
|
|
491
|
+
|
|
492
|
+
for scheme, scheme_default_port in (("https", 443), ("http", 80)):
|
|
493
|
+
base = f"{scheme}://{formatted_host}"
|
|
494
|
+
if effective_port and (
|
|
495
|
+
port_override is not None or effective_port != scheme_default_port
|
|
496
|
+
):
|
|
497
|
+
explicit = f"{base}:{effective_port}{combined_path}"
|
|
498
|
+
if explicit not in seen:
|
|
499
|
+
seen.add(explicit)
|
|
500
|
+
yield explicit
|
|
501
|
+
candidate = f"{base}{combined_path}"
|
|
502
|
+
if candidate not in seen:
|
|
503
|
+
seen.add(candidate)
|
|
504
|
+
yield candidate
|
|
505
|
+
|
|
258
506
|
@staticmethod
|
|
259
507
|
def get_current_mac() -> str:
|
|
260
508
|
"""Return the MAC address of the current host."""
|
|
@@ -285,7 +533,14 @@ class Node(Entity):
|
|
|
285
533
|
"""Return the node representing the current host if it exists."""
|
|
286
534
|
mac = cls.get_current_mac()
|
|
287
535
|
try:
|
|
288
|
-
|
|
536
|
+
node = cls.objects.filter(mac_address__iexact=mac).first()
|
|
537
|
+
if node:
|
|
538
|
+
return node
|
|
539
|
+
return (
|
|
540
|
+
cls.objects.filter(current_relation=cls.Relation.SELF)
|
|
541
|
+
.filter(Q(mac_address__isnull=True) | Q(mac_address=""))
|
|
542
|
+
.first()
|
|
543
|
+
)
|
|
289
544
|
except DatabaseError:
|
|
290
545
|
logger.debug("nodes.Node.get_local skipped: database unavailable", exc_info=True)
|
|
291
546
|
return None
|
|
@@ -293,25 +548,87 @@ class Node(Entity):
|
|
|
293
548
|
@classmethod
|
|
294
549
|
def register_current(cls):
|
|
295
550
|
"""Create or update the :class:`Node` entry for this host."""
|
|
296
|
-
|
|
551
|
+
hostname_override = (
|
|
552
|
+
os.environ.get("NODE_HOSTNAME")
|
|
553
|
+
or os.environ.get("HOSTNAME")
|
|
554
|
+
or ""
|
|
555
|
+
)
|
|
556
|
+
hostname_override = hostname_override.strip()
|
|
557
|
+
hostname = hostname_override or socket.gethostname()
|
|
558
|
+
|
|
559
|
+
network_hostname = os.environ.get("NODE_PUBLIC_HOSTNAME", "").strip()
|
|
560
|
+
if not network_hostname:
|
|
561
|
+
fqdn = socket.getfqdn(hostname)
|
|
562
|
+
if fqdn and "." in fqdn:
|
|
563
|
+
network_hostname = fqdn
|
|
564
|
+
|
|
565
|
+
ipv4_override = os.environ.get("NODE_PUBLIC_IPV4", "").strip()
|
|
566
|
+
ipv6_override = os.environ.get("NODE_PUBLIC_IPV6", "").strip()
|
|
567
|
+
|
|
568
|
+
ipv4_candidates: list[str] = []
|
|
569
|
+
ipv6_candidates: list[str] = []
|
|
570
|
+
|
|
571
|
+
for override, version in ((ipv4_override, 4), (ipv6_override, 6)):
|
|
572
|
+
override = override.strip()
|
|
573
|
+
if not override:
|
|
574
|
+
continue
|
|
575
|
+
try:
|
|
576
|
+
parsed = ipaddress.ip_address(override)
|
|
577
|
+
except ValueError:
|
|
578
|
+
continue
|
|
579
|
+
if parsed.version == version:
|
|
580
|
+
if version == 4 and override not in ipv4_candidates:
|
|
581
|
+
ipv4_candidates.append(override)
|
|
582
|
+
elif version == 6 and override not in ipv6_candidates:
|
|
583
|
+
ipv6_candidates.append(override)
|
|
584
|
+
|
|
585
|
+
resolve_hosts: list[str] = []
|
|
586
|
+
for value in (network_hostname, hostname_override, hostname):
|
|
587
|
+
value = (value or "").strip()
|
|
588
|
+
if value and value not in resolve_hosts:
|
|
589
|
+
resolve_hosts.append(value)
|
|
590
|
+
|
|
591
|
+
resolved_ipv4, resolved_ipv6 = cls._resolve_ip_addresses(*resolve_hosts)
|
|
592
|
+
for ip_value in resolved_ipv4:
|
|
593
|
+
if ip_value not in ipv4_candidates:
|
|
594
|
+
ipv4_candidates.append(ip_value)
|
|
595
|
+
for ip_value in resolved_ipv6:
|
|
596
|
+
if ip_value not in ipv6_candidates:
|
|
597
|
+
ipv6_candidates.append(ip_value)
|
|
598
|
+
|
|
297
599
|
try:
|
|
298
|
-
|
|
600
|
+
direct_address = socket.gethostbyname(hostname)
|
|
299
601
|
except OSError:
|
|
300
|
-
|
|
301
|
-
|
|
602
|
+
direct_address = ""
|
|
603
|
+
|
|
604
|
+
if direct_address and direct_address not in ipv4_candidates:
|
|
605
|
+
ipv4_candidates.append(direct_address)
|
|
606
|
+
|
|
607
|
+
ipv4_address = cls._select_preferred_ip(ipv4_candidates)
|
|
608
|
+
ipv6_address = cls._select_preferred_ip(ipv6_candidates)
|
|
609
|
+
|
|
610
|
+
preferred_contact = ipv4_address or ipv6_address or direct_address or "127.0.0.1"
|
|
611
|
+
port = int(os.environ.get("PORT", 8888))
|
|
302
612
|
base_path = str(settings.BASE_DIR)
|
|
303
613
|
ver_path = Path(settings.BASE_DIR) / "VERSION"
|
|
304
614
|
installed_version = ver_path.read_text().strip() if ver_path.exists() else ""
|
|
305
615
|
rev_value = revision.get_revision()
|
|
306
616
|
installed_revision = rev_value if rev_value else ""
|
|
307
617
|
mac = cls.get_current_mac()
|
|
308
|
-
|
|
618
|
+
endpoint_override = os.environ.get("NODE_PUBLIC_ENDPOINT", "").strip()
|
|
619
|
+
slug_source = endpoint_override or hostname
|
|
620
|
+
slug = slugify(slug_source)
|
|
621
|
+
if not slug:
|
|
622
|
+
slug = cls._generate_unique_public_endpoint(hostname or mac)
|
|
309
623
|
node = cls.objects.filter(mac_address=mac).first()
|
|
310
624
|
if not node:
|
|
311
625
|
node = cls.objects.filter(public_endpoint=slug).first()
|
|
312
626
|
defaults = {
|
|
313
627
|
"hostname": hostname,
|
|
314
|
-
"
|
|
628
|
+
"network_hostname": network_hostname,
|
|
629
|
+
"ipv4_address": ipv4_address,
|
|
630
|
+
"ipv6_address": ipv6_address,
|
|
631
|
+
"address": preferred_contact,
|
|
315
632
|
"port": port,
|
|
316
633
|
"base_path": base_path,
|
|
317
634
|
"installed_version": installed_version,
|
|
@@ -322,6 +639,7 @@ class Node(Entity):
|
|
|
322
639
|
}
|
|
323
640
|
role_lock = Path(settings.BASE_DIR) / "locks" / "role.lck"
|
|
324
641
|
role_name = role_lock.read_text().strip() if role_lock.exists() else "Terminal"
|
|
642
|
+
role_name = ROLE_RENAMES.get(role_name, role_name)
|
|
325
643
|
desired_role = NodeRole.objects.filter(name=role_name).first()
|
|
326
644
|
|
|
327
645
|
if node:
|
|
@@ -388,7 +706,10 @@ class Node(Entity):
|
|
|
388
706
|
|
|
389
707
|
payload = {
|
|
390
708
|
"hostname": self.hostname,
|
|
709
|
+
"network_hostname": self.network_hostname,
|
|
391
710
|
"address": self.address,
|
|
711
|
+
"ipv4_address": self.ipv4_address,
|
|
712
|
+
"ipv6_address": self.ipv6_address,
|
|
392
713
|
"port": self.port,
|
|
393
714
|
"mac_address": self.mac_address,
|
|
394
715
|
"public_key": self.public_key,
|
|
@@ -405,17 +726,18 @@ class Node(Entity):
|
|
|
405
726
|
|
|
406
727
|
peers = Node.objects.exclude(pk=self.pk)
|
|
407
728
|
for peer in peers:
|
|
408
|
-
host_candidates
|
|
409
|
-
|
|
410
|
-
host_candidates.append(peer.address)
|
|
411
|
-
if peer.hostname and peer.hostname not in host_candidates:
|
|
412
|
-
host_candidates.append(peer.hostname)
|
|
413
|
-
port = peer.port or 8000
|
|
729
|
+
host_candidates = peer.get_remote_host_candidates()
|
|
730
|
+
port = peer.port or 8888
|
|
414
731
|
urls: list[str] = []
|
|
415
732
|
for host in host_candidates:
|
|
416
733
|
host = host.strip()
|
|
417
734
|
if not host:
|
|
418
735
|
continue
|
|
736
|
+
if host.startswith("http://") or host.startswith("https://"):
|
|
737
|
+
normalized = host.rstrip("/")
|
|
738
|
+
if normalized not in urls:
|
|
739
|
+
urls.append(normalized)
|
|
740
|
+
continue
|
|
419
741
|
if ":" in host and not host.startswith("["):
|
|
420
742
|
host = f"[{host}]"
|
|
421
743
|
http_url = (
|
|
@@ -461,7 +783,24 @@ class Node(Entity):
|
|
|
461
783
|
security_dir.mkdir(parents=True, exist_ok=True)
|
|
462
784
|
priv_path = security_dir / f"{self.public_endpoint}"
|
|
463
785
|
pub_path = security_dir / f"{self.public_endpoint}.pub"
|
|
464
|
-
|
|
786
|
+
regenerate = not priv_path.exists() or not pub_path.exists()
|
|
787
|
+
if not regenerate:
|
|
788
|
+
key_max_age = getattr(settings, "NODE_KEY_MAX_AGE", timedelta(days=90))
|
|
789
|
+
if key_max_age is not None:
|
|
790
|
+
try:
|
|
791
|
+
priv_mtime = datetime.fromtimestamp(
|
|
792
|
+
priv_path.stat().st_mtime, tz=datetime_timezone.utc
|
|
793
|
+
)
|
|
794
|
+
pub_mtime = datetime.fromtimestamp(
|
|
795
|
+
pub_path.stat().st_mtime, tz=datetime_timezone.utc
|
|
796
|
+
)
|
|
797
|
+
except OSError:
|
|
798
|
+
regenerate = True
|
|
799
|
+
else:
|
|
800
|
+
cutoff = timezone.now() - key_max_age
|
|
801
|
+
if priv_mtime < cutoff or pub_mtime < cutoff:
|
|
802
|
+
regenerate = True
|
|
803
|
+
if regenerate:
|
|
465
804
|
private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048)
|
|
466
805
|
private_bytes = private_key.private_bytes(
|
|
467
806
|
encoding=serialization.Encoding.PEM,
|
|
@@ -474,16 +813,45 @@ class Node(Entity):
|
|
|
474
813
|
)
|
|
475
814
|
priv_path.write_bytes(private_bytes)
|
|
476
815
|
pub_path.write_bytes(public_bytes)
|
|
477
|
-
|
|
478
|
-
self.
|
|
816
|
+
public_text = public_bytes.decode()
|
|
817
|
+
if self.public_key != public_text:
|
|
818
|
+
self.public_key = public_text
|
|
819
|
+
self.save(update_fields=["public_key"])
|
|
479
820
|
elif not self.public_key:
|
|
480
821
|
self.public_key = pub_path.read_text()
|
|
481
822
|
self.save(update_fields=["public_key"])
|
|
482
823
|
|
|
824
|
+
def get_private_key(self):
|
|
825
|
+
"""Return the private key for this node if available."""
|
|
826
|
+
|
|
827
|
+
if not self.public_endpoint:
|
|
828
|
+
return None
|
|
829
|
+
try:
|
|
830
|
+
self.ensure_keys()
|
|
831
|
+
except Exception:
|
|
832
|
+
return None
|
|
833
|
+
priv_path = (
|
|
834
|
+
Path(self.base_path or settings.BASE_DIR)
|
|
835
|
+
/ "security"
|
|
836
|
+
/ f"{self.public_endpoint}"
|
|
837
|
+
)
|
|
838
|
+
try:
|
|
839
|
+
return serialization.load_pem_private_key(
|
|
840
|
+
priv_path.read_bytes(), password=None
|
|
841
|
+
)
|
|
842
|
+
except Exception:
|
|
843
|
+
return None
|
|
844
|
+
|
|
483
845
|
@property
|
|
484
846
|
def is_local(self):
|
|
485
847
|
"""Determine if this node represents the current host."""
|
|
486
|
-
|
|
848
|
+
current_mac = self.get_current_mac()
|
|
849
|
+
stored_mac = (self.mac_address or "").strip()
|
|
850
|
+
if stored_mac:
|
|
851
|
+
normalized_stored = stored_mac.replace("-", ":").lower()
|
|
852
|
+
normalized_current = current_mac.replace("-", ":").lower()
|
|
853
|
+
return normalized_stored == normalized_current
|
|
854
|
+
return self.current_relation == self.Relation.SELF
|
|
487
855
|
|
|
488
856
|
@classmethod
|
|
489
857
|
def _generate_unique_public_endpoint(
|
|
@@ -563,13 +931,41 @@ class Node(Entity):
|
|
|
563
931
|
if self.public_endpoint != endpoint_value:
|
|
564
932
|
self.public_endpoint = endpoint_value
|
|
565
933
|
include_update_field("public_endpoint")
|
|
934
|
+
is_new = self.pk is None
|
|
566
935
|
super().save(*args, **kwargs)
|
|
567
936
|
if self.pk:
|
|
937
|
+
if is_new:
|
|
938
|
+
self._apply_role_manual_features()
|
|
568
939
|
self.refresh_features()
|
|
569
940
|
|
|
570
941
|
def has_feature(self, slug: str) -> bool:
|
|
571
942
|
return self.features.filter(slug=slug).exists()
|
|
572
943
|
|
|
944
|
+
def _apply_role_manual_features(self) -> None:
|
|
945
|
+
"""Enable manual features configured as defaults for this node's role."""
|
|
946
|
+
|
|
947
|
+
if not self.role_id:
|
|
948
|
+
return
|
|
949
|
+
|
|
950
|
+
role_features = self.role.features.filter(
|
|
951
|
+
slug__in=self.MANUAL_FEATURE_SLUGS
|
|
952
|
+
).values_list("slug", flat=True)
|
|
953
|
+
desired = set(role_features)
|
|
954
|
+
if not desired:
|
|
955
|
+
return
|
|
956
|
+
|
|
957
|
+
existing = set(
|
|
958
|
+
self.features.filter(slug__in=desired).values_list("slug", flat=True)
|
|
959
|
+
)
|
|
960
|
+
missing = desired - existing
|
|
961
|
+
if not missing:
|
|
962
|
+
return
|
|
963
|
+
|
|
964
|
+
for feature in NodeFeature.objects.filter(slug__in=missing):
|
|
965
|
+
NodeFeatureAssignment.objects.update_or_create(
|
|
966
|
+
node=self, feature=feature
|
|
967
|
+
)
|
|
968
|
+
|
|
573
969
|
@classmethod
|
|
574
970
|
def _has_rpi_camera(cls) -> bool:
|
|
575
971
|
"""Return ``True`` when the Raspberry Pi camera stack is available."""
|
|
@@ -604,6 +1000,29 @@ class Node(Entity):
|
|
|
604
1000
|
return False
|
|
605
1001
|
return True
|
|
606
1002
|
|
|
1003
|
+
@classmethod
|
|
1004
|
+
def _has_audio_capture_device(cls) -> bool:
|
|
1005
|
+
"""Return ``True`` when an audio capture device is available."""
|
|
1006
|
+
|
|
1007
|
+
pcm_path = cls.AUDIO_CAPTURE_PCM_PATH
|
|
1008
|
+
try:
|
|
1009
|
+
contents = pcm_path.read_text(errors="ignore")
|
|
1010
|
+
except OSError:
|
|
1011
|
+
return False
|
|
1012
|
+
for line in contents.splitlines():
|
|
1013
|
+
candidate = line.strip()
|
|
1014
|
+
if not candidate:
|
|
1015
|
+
continue
|
|
1016
|
+
lower_candidate = candidate.lower()
|
|
1017
|
+
if "capture" not in lower_candidate:
|
|
1018
|
+
continue
|
|
1019
|
+
match = re.search(r"capture\s+(\d+)", lower_candidate)
|
|
1020
|
+
if not match:
|
|
1021
|
+
continue
|
|
1022
|
+
if int(match.group(1)) > 0:
|
|
1023
|
+
return True
|
|
1024
|
+
return False
|
|
1025
|
+
|
|
607
1026
|
@classmethod
|
|
608
1027
|
def _hosts_gelectriic_ap(cls) -> bool:
|
|
609
1028
|
"""Return ``True`` when the node is hosting the gelectriic access point."""
|
|
@@ -671,21 +1090,6 @@ class Node(Entity):
|
|
|
671
1090
|
return True
|
|
672
1091
|
return False
|
|
673
1092
|
|
|
674
|
-
@classmethod
|
|
675
|
-
def _find_gway_runner_command(cls) -> str | None:
|
|
676
|
-
command = shutil.which(cls.GWAY_RUNNER_COMMAND)
|
|
677
|
-
if command:
|
|
678
|
-
return command
|
|
679
|
-
for candidate in cls.GWAY_RUNNER_CANDIDATES:
|
|
680
|
-
expanded = os.path.expanduser(candidate)
|
|
681
|
-
if os.path.isfile(expanded) and os.access(expanded, os.X_OK):
|
|
682
|
-
return expanded
|
|
683
|
-
return None
|
|
684
|
-
|
|
685
|
-
@classmethod
|
|
686
|
-
def _has_gway_runner(cls) -> bool:
|
|
687
|
-
return cls._find_gway_runner_command() is not None
|
|
688
|
-
|
|
689
1093
|
def refresh_features(self):
|
|
690
1094
|
if not self.pk:
|
|
691
1095
|
return
|
|
@@ -700,8 +1104,6 @@ class Node(Entity):
|
|
|
700
1104
|
detected_slugs.add(slug)
|
|
701
1105
|
if self._has_rpi_camera():
|
|
702
1106
|
detected_slugs.add("rpi-camera")
|
|
703
|
-
if self._has_gway_runner():
|
|
704
|
-
detected_slugs.add("gway-runner")
|
|
705
1107
|
if self._hosts_gelectriic_ap():
|
|
706
1108
|
detected_slugs.add("ap-router")
|
|
707
1109
|
try:
|
|
@@ -754,14 +1156,18 @@ class Node(Entity):
|
|
|
754
1156
|
self._sync_screenshot_task(screenshot_enabled)
|
|
755
1157
|
self._sync_landing_lead_task(celery_enabled)
|
|
756
1158
|
self._sync_ocpp_session_report_task(celery_enabled)
|
|
1159
|
+
self._sync_upstream_poll_task(celery_enabled)
|
|
757
1160
|
|
|
758
1161
|
def _sync_clipboard_task(self, enabled: bool):
|
|
759
1162
|
from django_celery_beat.models import IntervalSchedule, PeriodicTask
|
|
760
1163
|
|
|
761
|
-
|
|
1164
|
+
raw_task_name = f"poll_clipboard_node_{self.pk}"
|
|
762
1165
|
if enabled:
|
|
763
1166
|
schedule, _ = IntervalSchedule.objects.get_or_create(
|
|
764
|
-
every=
|
|
1167
|
+
every=10, period=IntervalSchedule.SECONDS
|
|
1168
|
+
)
|
|
1169
|
+
task_name = normalize_periodic_task_name(
|
|
1170
|
+
PeriodicTask.objects, raw_task_name
|
|
765
1171
|
)
|
|
766
1172
|
PeriodicTask.objects.update_or_create(
|
|
767
1173
|
name=task_name,
|
|
@@ -771,17 +1177,22 @@ class Node(Entity):
|
|
|
771
1177
|
},
|
|
772
1178
|
)
|
|
773
1179
|
else:
|
|
774
|
-
PeriodicTask.objects.filter(
|
|
1180
|
+
PeriodicTask.objects.filter(
|
|
1181
|
+
name__in=periodic_task_name_variants(raw_task_name)
|
|
1182
|
+
).delete()
|
|
775
1183
|
|
|
776
1184
|
def _sync_screenshot_task(self, enabled: bool):
|
|
777
1185
|
from django_celery_beat.models import IntervalSchedule, PeriodicTask
|
|
778
1186
|
import json
|
|
779
1187
|
|
|
780
|
-
|
|
1188
|
+
raw_task_name = f"capture_screenshot_node_{self.pk}"
|
|
781
1189
|
if enabled:
|
|
782
1190
|
schedule, _ = IntervalSchedule.objects.get_or_create(
|
|
783
1191
|
every=1, period=IntervalSchedule.MINUTES
|
|
784
1192
|
)
|
|
1193
|
+
task_name = normalize_periodic_task_name(
|
|
1194
|
+
PeriodicTask.objects, raw_task_name
|
|
1195
|
+
)
|
|
785
1196
|
PeriodicTask.objects.update_or_create(
|
|
786
1197
|
name=task_name,
|
|
787
1198
|
defaults={
|
|
@@ -797,7 +1208,9 @@ class Node(Entity):
|
|
|
797
1208
|
},
|
|
798
1209
|
)
|
|
799
1210
|
else:
|
|
800
|
-
PeriodicTask.objects.filter(
|
|
1211
|
+
PeriodicTask.objects.filter(
|
|
1212
|
+
name__in=periodic_task_name_variants(raw_task_name)
|
|
1213
|
+
).delete()
|
|
801
1214
|
|
|
802
1215
|
def _sync_landing_lead_task(self, enabled: bool):
|
|
803
1216
|
if not self.is_local:
|
|
@@ -805,7 +1218,10 @@ class Node(Entity):
|
|
|
805
1218
|
|
|
806
1219
|
from django_celery_beat.models import CrontabSchedule, PeriodicTask
|
|
807
1220
|
|
|
808
|
-
|
|
1221
|
+
raw_task_name = "pages_purge_landing_leads"
|
|
1222
|
+
task_name = normalize_periodic_task_name(
|
|
1223
|
+
PeriodicTask.objects, raw_task_name
|
|
1224
|
+
)
|
|
809
1225
|
if enabled:
|
|
810
1226
|
schedule, _ = CrontabSchedule.objects.get_or_create(
|
|
811
1227
|
minute="0",
|
|
@@ -824,19 +1240,26 @@ class Node(Entity):
|
|
|
824
1240
|
},
|
|
825
1241
|
)
|
|
826
1242
|
else:
|
|
827
|
-
PeriodicTask.objects.filter(
|
|
1243
|
+
PeriodicTask.objects.filter(
|
|
1244
|
+
name__in=periodic_task_name_variants(raw_task_name)
|
|
1245
|
+
).delete()
|
|
828
1246
|
|
|
829
1247
|
def _sync_ocpp_session_report_task(self, celery_enabled: bool):
|
|
830
1248
|
from django_celery_beat.models import CrontabSchedule, PeriodicTask
|
|
831
1249
|
from django.db.utils import OperationalError, ProgrammingError
|
|
832
1250
|
|
|
833
|
-
|
|
1251
|
+
raw_task_name = "ocpp_send_daily_session_report"
|
|
1252
|
+
task_name = normalize_periodic_task_name(
|
|
1253
|
+
PeriodicTask.objects, raw_task_name
|
|
1254
|
+
)
|
|
834
1255
|
|
|
835
1256
|
if not self.is_local:
|
|
836
1257
|
return
|
|
837
1258
|
|
|
838
1259
|
if not celery_enabled or not mailer.can_send_email():
|
|
839
|
-
PeriodicTask.objects.filter(
|
|
1260
|
+
PeriodicTask.objects.filter(
|
|
1261
|
+
name__in=periodic_task_name_variants(raw_task_name)
|
|
1262
|
+
).delete()
|
|
840
1263
|
return
|
|
841
1264
|
|
|
842
1265
|
try:
|
|
@@ -859,6 +1282,33 @@ class Node(Entity):
|
|
|
859
1282
|
except (OperationalError, ProgrammingError):
|
|
860
1283
|
logger.debug("Skipping OCPP session report task sync; tables not ready")
|
|
861
1284
|
|
|
1285
|
+
def _sync_upstream_poll_task(self, celery_enabled: bool):
|
|
1286
|
+
if not self.is_local:
|
|
1287
|
+
return
|
|
1288
|
+
|
|
1289
|
+
from django_celery_beat.models import IntervalSchedule, PeriodicTask
|
|
1290
|
+
|
|
1291
|
+
raw_task_name = "nodes_poll_upstream_messages"
|
|
1292
|
+
task_name = normalize_periodic_task_name(
|
|
1293
|
+
PeriodicTask.objects, raw_task_name
|
|
1294
|
+
)
|
|
1295
|
+
if celery_enabled:
|
|
1296
|
+
schedule, _ = IntervalSchedule.objects.get_or_create(
|
|
1297
|
+
every=5, period=IntervalSchedule.MINUTES
|
|
1298
|
+
)
|
|
1299
|
+
PeriodicTask.objects.update_or_create(
|
|
1300
|
+
name=task_name,
|
|
1301
|
+
defaults={
|
|
1302
|
+
"interval": schedule,
|
|
1303
|
+
"task": "nodes.tasks.poll_unreachable_upstream",
|
|
1304
|
+
"enabled": True,
|
|
1305
|
+
},
|
|
1306
|
+
)
|
|
1307
|
+
else:
|
|
1308
|
+
PeriodicTask.objects.filter(
|
|
1309
|
+
name__in=periodic_task_name_variants(raw_task_name)
|
|
1310
|
+
).delete()
|
|
1311
|
+
|
|
862
1312
|
def send_mail(
|
|
863
1313
|
self,
|
|
864
1314
|
subject: str,
|
|
@@ -884,6 +1334,10 @@ class Node(Entity):
|
|
|
884
1334
|
**kwargs,
|
|
885
1335
|
)
|
|
886
1336
|
|
|
1337
|
+
class Meta:
|
|
1338
|
+
verbose_name = "Node"
|
|
1339
|
+
verbose_name_plural = "Nodes"
|
|
1340
|
+
|
|
887
1341
|
|
|
888
1342
|
node_information_updated = Signal()
|
|
889
1343
|
|
|
@@ -1024,8 +1478,8 @@ class NodeManager(Profile):
|
|
|
1024
1478
|
)
|
|
1025
1479
|
|
|
1026
1480
|
class Meta:
|
|
1027
|
-
verbose_name = "Node
|
|
1028
|
-
verbose_name_plural = "Node
|
|
1481
|
+
verbose_name = "Node Profile"
|
|
1482
|
+
verbose_name_plural = "Node Profiles"
|
|
1029
1483
|
|
|
1030
1484
|
def __str__(self) -> str:
|
|
1031
1485
|
owner = self.owner_display()
|
|
@@ -1336,7 +1790,6 @@ class EmailOutbox(Profile):
|
|
|
1336
1790
|
return owner
|
|
1337
1791
|
return str(self.node) if self.node_id else ""
|
|
1338
1792
|
|
|
1339
|
-
|
|
1340
1793
|
class NetMessage(Entity):
|
|
1341
1794
|
"""Message propagated across nodes."""
|
|
1342
1795
|
|
|
@@ -1410,7 +1863,6 @@ class NetMessage(Entity):
|
|
|
1410
1863
|
propagated_to = models.ManyToManyField(
|
|
1411
1864
|
Node, blank=True, related_name="received_net_messages"
|
|
1412
1865
|
)
|
|
1413
|
-
confirmed_peers = models.JSONField(default=dict, blank=True)
|
|
1414
1866
|
created = models.DateTimeField(auto_now_add=True)
|
|
1415
1867
|
complete = models.BooleanField(default=False, editable=False)
|
|
1416
1868
|
|
|
@@ -1447,9 +1899,44 @@ class NetMessage(Entity):
|
|
|
1447
1899
|
)
|
|
1448
1900
|
if normalized_attachments:
|
|
1449
1901
|
msg.apply_attachments(normalized_attachments)
|
|
1902
|
+
msg.notify_slack()
|
|
1450
1903
|
msg.propagate(seen=seen or [])
|
|
1451
1904
|
return msg
|
|
1452
1905
|
|
|
1906
|
+
def notify_slack(self):
|
|
1907
|
+
"""Send this Net Message to any Slack chatbots owned by the origin node."""
|
|
1908
|
+
|
|
1909
|
+
try:
|
|
1910
|
+
SlackBotProfile = apps.get_model("teams", "SlackBotProfile")
|
|
1911
|
+
except (LookupError, ValueError):
|
|
1912
|
+
return
|
|
1913
|
+
if SlackBotProfile is None:
|
|
1914
|
+
return
|
|
1915
|
+
|
|
1916
|
+
origin = self.node_origin
|
|
1917
|
+
if origin is None:
|
|
1918
|
+
origin = Node.get_local()
|
|
1919
|
+
if not origin:
|
|
1920
|
+
return
|
|
1921
|
+
|
|
1922
|
+
try:
|
|
1923
|
+
bots = SlackBotProfile.objects.filter(node=origin, is_enabled=True)
|
|
1924
|
+
except Exception: # pragma: no cover - database errors surfaced in logs
|
|
1925
|
+
logger.exception(
|
|
1926
|
+
"Failed to load Slack chatbots for node %s", getattr(origin, "pk", None)
|
|
1927
|
+
)
|
|
1928
|
+
return
|
|
1929
|
+
|
|
1930
|
+
for bot in bots:
|
|
1931
|
+
try:
|
|
1932
|
+
bot.broadcast_net_message(self)
|
|
1933
|
+
except Exception: # pragma: no cover - network errors logged for diagnosis
|
|
1934
|
+
logger.exception(
|
|
1935
|
+
"Slack bot %s failed to broadcast NetMessage %s",
|
|
1936
|
+
getattr(bot, "pk", None),
|
|
1937
|
+
getattr(self, "pk", None),
|
|
1938
|
+
)
|
|
1939
|
+
|
|
1453
1940
|
@staticmethod
|
|
1454
1941
|
def normalize_attachments(
|
|
1455
1942
|
attachments: object,
|
|
@@ -1479,6 +1966,7 @@ class NetMessage(Entity):
|
|
|
1479
1966
|
payload = attachments if attachments is not None else self.attachments or []
|
|
1480
1967
|
if not payload:
|
|
1481
1968
|
return
|
|
1969
|
+
|
|
1482
1970
|
try:
|
|
1483
1971
|
objects = list(
|
|
1484
1972
|
serializers.deserialize(
|
|
@@ -1498,6 +1986,193 @@ class NetMessage(Entity):
|
|
|
1498
1986
|
self.pk,
|
|
1499
1987
|
)
|
|
1500
1988
|
|
|
1989
|
+
def _build_payload(
|
|
1990
|
+
self,
|
|
1991
|
+
*,
|
|
1992
|
+
sender_id: str | None,
|
|
1993
|
+
origin_uuid: str | None,
|
|
1994
|
+
reach_name: str | None,
|
|
1995
|
+
seen: list[str],
|
|
1996
|
+
) -> dict[str, object]:
|
|
1997
|
+
payload: dict[str, object] = {
|
|
1998
|
+
"uuid": str(self.uuid),
|
|
1999
|
+
"subject": self.subject,
|
|
2000
|
+
"body": self.body,
|
|
2001
|
+
"seen": list(seen),
|
|
2002
|
+
"reach": reach_name,
|
|
2003
|
+
"sender": sender_id,
|
|
2004
|
+
"origin": origin_uuid,
|
|
2005
|
+
}
|
|
2006
|
+
if self.attachments:
|
|
2007
|
+
payload["attachments"] = self.attachments
|
|
2008
|
+
if self.filter_node:
|
|
2009
|
+
payload["filter_node"] = str(self.filter_node.uuid)
|
|
2010
|
+
if self.filter_node_feature:
|
|
2011
|
+
payload["filter_node_feature"] = self.filter_node_feature.slug
|
|
2012
|
+
if self.filter_node_role:
|
|
2013
|
+
payload["filter_node_role"] = self.filter_node_role.name
|
|
2014
|
+
if self.filter_current_relation:
|
|
2015
|
+
payload["filter_current_relation"] = self.filter_current_relation
|
|
2016
|
+
if self.filter_installed_version:
|
|
2017
|
+
payload["filter_installed_version"] = self.filter_installed_version
|
|
2018
|
+
if self.filter_installed_revision:
|
|
2019
|
+
payload["filter_installed_revision"] = self.filter_installed_revision
|
|
2020
|
+
return payload
|
|
2021
|
+
|
|
2022
|
+
@staticmethod
|
|
2023
|
+
def _serialize_payload(payload: dict[str, object]) -> str:
|
|
2024
|
+
return json.dumps(payload, separators=(",", ":"), sort_keys=True)
|
|
2025
|
+
|
|
2026
|
+
@staticmethod
|
|
2027
|
+
def _sign_payload(payload_json: str, private_key) -> str | None:
|
|
2028
|
+
if not private_key:
|
|
2029
|
+
return None
|
|
2030
|
+
try:
|
|
2031
|
+
signature = private_key.sign(
|
|
2032
|
+
payload_json.encode(),
|
|
2033
|
+
padding.PKCS1v15(),
|
|
2034
|
+
hashes.SHA256(),
|
|
2035
|
+
)
|
|
2036
|
+
except Exception:
|
|
2037
|
+
return None
|
|
2038
|
+
return base64.b64encode(signature).decode()
|
|
2039
|
+
|
|
2040
|
+
def queue_for_node(self, node: "Node", seen: list[str]) -> None:
|
|
2041
|
+
"""Queue this message for later delivery to ``node``."""
|
|
2042
|
+
|
|
2043
|
+
if node.current_relation != Node.Relation.DOWNSTREAM:
|
|
2044
|
+
return
|
|
2045
|
+
|
|
2046
|
+
now = timezone.now()
|
|
2047
|
+
expires_at = now + timedelta(hours=1)
|
|
2048
|
+
normalized_seen = [str(value) for value in seen]
|
|
2049
|
+
entry, created = PendingNetMessage.objects.get_or_create(
|
|
2050
|
+
node=node,
|
|
2051
|
+
message=self,
|
|
2052
|
+
defaults={
|
|
2053
|
+
"seen": normalized_seen,
|
|
2054
|
+
"stale_at": expires_at,
|
|
2055
|
+
},
|
|
2056
|
+
)
|
|
2057
|
+
if created:
|
|
2058
|
+
entry.queued_at = now
|
|
2059
|
+
entry.save(update_fields=["queued_at"])
|
|
2060
|
+
else:
|
|
2061
|
+
entry.seen = normalized_seen
|
|
2062
|
+
entry.stale_at = expires_at
|
|
2063
|
+
entry.queued_at = now
|
|
2064
|
+
entry.save(update_fields=["seen", "stale_at", "queued_at"])
|
|
2065
|
+
self._trim_queue(node)
|
|
2066
|
+
|
|
2067
|
+
def clear_queue_for_node(self, node: "Node") -> None:
|
|
2068
|
+
PendingNetMessage.objects.filter(node=node, message=self).delete()
|
|
2069
|
+
|
|
2070
|
+
def _trim_queue(self, node: "Node") -> None:
|
|
2071
|
+
limit = max(int(node.message_queue_length or 0), 0)
|
|
2072
|
+
if limit == 0:
|
|
2073
|
+
PendingNetMessage.objects.filter(node=node).delete()
|
|
2074
|
+
return
|
|
2075
|
+
qs = PendingNetMessage.objects.filter(node=node).order_by("-queued_at")
|
|
2076
|
+
keep_ids = list(qs.values_list("pk", flat=True)[:limit])
|
|
2077
|
+
if keep_ids:
|
|
2078
|
+
PendingNetMessage.objects.filter(node=node).exclude(pk__in=keep_ids).delete()
|
|
2079
|
+
else:
|
|
2080
|
+
qs.delete()
|
|
2081
|
+
|
|
2082
|
+
@classmethod
|
|
2083
|
+
def receive_payload(
|
|
2084
|
+
cls,
|
|
2085
|
+
data: dict[str, object],
|
|
2086
|
+
*,
|
|
2087
|
+
sender: "Node",
|
|
2088
|
+
) -> "NetMessage":
|
|
2089
|
+
msg_uuid = data.get("uuid")
|
|
2090
|
+
if not msg_uuid:
|
|
2091
|
+
raise ValueError("uuid required")
|
|
2092
|
+
subject = (data.get("subject") or "")[:64]
|
|
2093
|
+
body = (data.get("body") or "")[:256]
|
|
2094
|
+
attachments = cls.normalize_attachments(data.get("attachments"))
|
|
2095
|
+
reach_name = data.get("reach")
|
|
2096
|
+
reach_role = None
|
|
2097
|
+
if reach_name:
|
|
2098
|
+
reach_role = NodeRole.objects.filter(name=reach_name).first()
|
|
2099
|
+
filter_node_uuid = data.get("filter_node")
|
|
2100
|
+
filter_node = None
|
|
2101
|
+
if filter_node_uuid:
|
|
2102
|
+
filter_node = Node.objects.filter(uuid=filter_node_uuid).first()
|
|
2103
|
+
filter_feature_slug = data.get("filter_node_feature")
|
|
2104
|
+
filter_feature = None
|
|
2105
|
+
if filter_feature_slug:
|
|
2106
|
+
filter_feature = NodeFeature.objects.filter(slug=filter_feature_slug).first()
|
|
2107
|
+
filter_role_name = data.get("filter_node_role")
|
|
2108
|
+
filter_role = None
|
|
2109
|
+
if filter_role_name:
|
|
2110
|
+
filter_role = NodeRole.objects.filter(name=filter_role_name).first()
|
|
2111
|
+
filter_relation_value = data.get("filter_current_relation")
|
|
2112
|
+
filter_relation = ""
|
|
2113
|
+
if filter_relation_value:
|
|
2114
|
+
relation = Node.normalize_relation(filter_relation_value)
|
|
2115
|
+
filter_relation = relation.value if relation else ""
|
|
2116
|
+
filter_installed_version = (data.get("filter_installed_version") or "")[:20]
|
|
2117
|
+
filter_installed_revision = (data.get("filter_installed_revision") or "")[:40]
|
|
2118
|
+
seen_values = data.get("seen", [])
|
|
2119
|
+
if not isinstance(seen_values, list):
|
|
2120
|
+
seen_values = list(seen_values) # type: ignore[arg-type]
|
|
2121
|
+
normalized_seen = [str(value) for value in seen_values if value is not None]
|
|
2122
|
+
origin_id = data.get("origin")
|
|
2123
|
+
origin_node = None
|
|
2124
|
+
if origin_id:
|
|
2125
|
+
origin_node = Node.objects.filter(uuid=origin_id).first()
|
|
2126
|
+
if not origin_node:
|
|
2127
|
+
origin_node = sender
|
|
2128
|
+
msg, created = cls.objects.get_or_create(
|
|
2129
|
+
uuid=msg_uuid,
|
|
2130
|
+
defaults={
|
|
2131
|
+
"subject": subject,
|
|
2132
|
+
"body": body,
|
|
2133
|
+
"reach": reach_role,
|
|
2134
|
+
"node_origin": origin_node,
|
|
2135
|
+
"attachments": attachments or None,
|
|
2136
|
+
"filter_node": filter_node,
|
|
2137
|
+
"filter_node_feature": filter_feature,
|
|
2138
|
+
"filter_node_role": filter_role,
|
|
2139
|
+
"filter_current_relation": filter_relation,
|
|
2140
|
+
"filter_installed_version": filter_installed_version,
|
|
2141
|
+
"filter_installed_revision": filter_installed_revision,
|
|
2142
|
+
},
|
|
2143
|
+
)
|
|
2144
|
+
if not created:
|
|
2145
|
+
msg.subject = subject
|
|
2146
|
+
msg.body = body
|
|
2147
|
+
update_fields = ["subject", "body"]
|
|
2148
|
+
if reach_role and msg.reach_id != reach_role.id:
|
|
2149
|
+
msg.reach = reach_role
|
|
2150
|
+
update_fields.append("reach")
|
|
2151
|
+
if msg.node_origin_id is None and origin_node:
|
|
2152
|
+
msg.node_origin = origin_node
|
|
2153
|
+
update_fields.append("node_origin")
|
|
2154
|
+
if attachments and msg.attachments != attachments:
|
|
2155
|
+
msg.attachments = attachments
|
|
2156
|
+
update_fields.append("attachments")
|
|
2157
|
+
field_updates = {
|
|
2158
|
+
"filter_node": filter_node,
|
|
2159
|
+
"filter_node_feature": filter_feature,
|
|
2160
|
+
"filter_node_role": filter_role,
|
|
2161
|
+
"filter_current_relation": filter_relation,
|
|
2162
|
+
"filter_installed_version": filter_installed_version,
|
|
2163
|
+
"filter_installed_revision": filter_installed_revision,
|
|
2164
|
+
}
|
|
2165
|
+
for field, value in field_updates.items():
|
|
2166
|
+
if getattr(msg, field) != value:
|
|
2167
|
+
setattr(msg, field, value)
|
|
2168
|
+
update_fields.append(field)
|
|
2169
|
+
if update_fields:
|
|
2170
|
+
msg.save(update_fields=update_fields)
|
|
2171
|
+
if attachments:
|
|
2172
|
+
msg.apply_attachments(attachments)
|
|
2173
|
+
msg.propagate(seen=normalized_seen)
|
|
2174
|
+
return msg
|
|
2175
|
+
|
|
1501
2176
|
def propagate(self, seen: list[str] | None = None):
|
|
1502
2177
|
from core.notifications import notify
|
|
1503
2178
|
import random
|
|
@@ -1532,17 +2207,7 @@ class NetMessage(Entity):
|
|
|
1532
2207
|
local_id = str(local.uuid)
|
|
1533
2208
|
if local_id not in seen:
|
|
1534
2209
|
seen.append(local_id)
|
|
1535
|
-
|
|
1536
|
-
Path(local.base_path or settings.BASE_DIR)
|
|
1537
|
-
/ "security"
|
|
1538
|
-
/ f"{local.public_endpoint}"
|
|
1539
|
-
)
|
|
1540
|
-
try:
|
|
1541
|
-
private_key = serialization.load_pem_private_key(
|
|
1542
|
-
priv_path.read_bytes(), password=None
|
|
1543
|
-
)
|
|
1544
|
-
except Exception:
|
|
1545
|
-
private_key = None
|
|
2210
|
+
private_key = local.get_private_key()
|
|
1546
2211
|
for node_id in seen:
|
|
1547
2212
|
node = Node.objects.filter(uuid=node_id).first()
|
|
1548
2213
|
if node and (not local or node.pk != local.pk):
|
|
@@ -1592,11 +2257,18 @@ class NetMessage(Entity):
|
|
|
1592
2257
|
reach_source = self.filter_node_role or self.reach
|
|
1593
2258
|
reach_name = reach_source.name if reach_source else None
|
|
1594
2259
|
role_map = {
|
|
2260
|
+
"Interface": ["Interface", "Terminal"],
|
|
1595
2261
|
"Terminal": ["Terminal"],
|
|
1596
2262
|
"Control": ["Control", "Terminal"],
|
|
1597
2263
|
"Satellite": ["Satellite", "Control", "Terminal"],
|
|
2264
|
+
"Watchtower": [
|
|
2265
|
+
"Watchtower",
|
|
2266
|
+
"Satellite",
|
|
2267
|
+
"Control",
|
|
2268
|
+
"Terminal",
|
|
2269
|
+
],
|
|
1598
2270
|
"Constellation": [
|
|
1599
|
-
"
|
|
2271
|
+
"Watchtower",
|
|
1600
2272
|
"Satellite",
|
|
1601
2273
|
"Control",
|
|
1602
2274
|
"Terminal",
|
|
@@ -1640,72 +2312,45 @@ class NetMessage(Entity):
|
|
|
1640
2312
|
seen_list = seen.copy()
|
|
1641
2313
|
selected_ids = [str(n.uuid) for n in selected]
|
|
1642
2314
|
payload_seen = seen_list + selected_ids
|
|
1643
|
-
confirmed_peers = dict(self.confirmed_peers or {})
|
|
1644
|
-
|
|
1645
2315
|
for node in selected:
|
|
1646
|
-
|
|
1647
|
-
|
|
1648
|
-
|
|
1649
|
-
|
|
1650
|
-
|
|
1651
|
-
|
|
1652
|
-
|
|
1653
|
-
"sender": local_id,
|
|
1654
|
-
"origin": origin_uuid,
|
|
1655
|
-
}
|
|
1656
|
-
if self.attachments:
|
|
1657
|
-
payload["attachments"] = self.attachments
|
|
1658
|
-
if self.filter_node:
|
|
1659
|
-
payload["filter_node"] = str(self.filter_node.uuid)
|
|
1660
|
-
if self.filter_node_feature:
|
|
1661
|
-
payload["filter_node_feature"] = self.filter_node_feature.slug
|
|
1662
|
-
if self.filter_node_role:
|
|
1663
|
-
payload["filter_node_role"] = self.filter_node_role.name
|
|
1664
|
-
if self.filter_current_relation:
|
|
1665
|
-
payload["filter_current_relation"] = self.filter_current_relation
|
|
1666
|
-
if self.filter_installed_version:
|
|
1667
|
-
payload["filter_installed_version"] = self.filter_installed_version
|
|
1668
|
-
if self.filter_installed_revision:
|
|
1669
|
-
payload["filter_installed_revision"] = self.filter_installed_revision
|
|
1670
|
-
payload_json = json.dumps(payload, separators=(",", ":"), sort_keys=True)
|
|
2316
|
+
payload = self._build_payload(
|
|
2317
|
+
sender_id=local_id,
|
|
2318
|
+
origin_uuid=origin_uuid,
|
|
2319
|
+
reach_name=reach_name,
|
|
2320
|
+
seen=payload_seen,
|
|
2321
|
+
)
|
|
2322
|
+
payload_json = self._serialize_payload(payload)
|
|
1671
2323
|
headers = {"Content-Type": "application/json"}
|
|
1672
|
-
|
|
2324
|
+
signature = self._sign_payload(payload_json, private_key)
|
|
2325
|
+
if signature:
|
|
2326
|
+
headers["X-Signature"] = signature
|
|
2327
|
+
success = False
|
|
2328
|
+
for url in node.iter_remote_urls("/nodes/net-message/"):
|
|
1673
2329
|
try:
|
|
1674
|
-
|
|
1675
|
-
|
|
1676
|
-
|
|
1677
|
-
|
|
2330
|
+
response = requests.post(
|
|
2331
|
+
url,
|
|
2332
|
+
data=payload_json,
|
|
2333
|
+
headers=headers,
|
|
2334
|
+
timeout=1,
|
|
1678
2335
|
)
|
|
1679
|
-
|
|
2336
|
+
success = bool(response.ok)
|
|
1680
2337
|
except Exception:
|
|
1681
|
-
|
|
1682
|
-
|
|
1683
|
-
|
|
1684
|
-
|
|
1685
|
-
|
|
1686
|
-
|
|
1687
|
-
|
|
1688
|
-
|
|
1689
|
-
|
|
1690
|
-
|
|
1691
|
-
|
|
1692
|
-
|
|
1693
|
-
)
|
|
1694
|
-
status_entry["status_code"] = getattr(response, "status_code", None)
|
|
1695
|
-
if getattr(response, "ok", False):
|
|
1696
|
-
status_entry["status"] = "acknowledged"
|
|
1697
|
-
else:
|
|
1698
|
-
status_entry["status"] = "failed"
|
|
1699
|
-
except Exception:
|
|
1700
|
-
status_entry["status"] = "error"
|
|
2338
|
+
logger.exception(
|
|
2339
|
+
"Failed to propagate NetMessage %s to node %s via %s",
|
|
2340
|
+
self.pk,
|
|
2341
|
+
node.pk,
|
|
2342
|
+
url,
|
|
2343
|
+
)
|
|
2344
|
+
continue
|
|
2345
|
+
if success:
|
|
2346
|
+
break
|
|
2347
|
+
if success:
|
|
2348
|
+
self.clear_queue_for_node(node)
|
|
2349
|
+
else:
|
|
2350
|
+
self.queue_for_node(node, payload_seen)
|
|
1701
2351
|
self.propagated_to.add(node)
|
|
1702
|
-
confirmed_peers[str(node.uuid)] = status_entry
|
|
1703
2352
|
|
|
1704
2353
|
save_fields: list[str] = []
|
|
1705
|
-
if confirmed_peers != (self.confirmed_peers or {}):
|
|
1706
|
-
self.confirmed_peers = confirmed_peers
|
|
1707
|
-
save_fields.append("confirmed_peers")
|
|
1708
|
-
|
|
1709
2354
|
if total_known and self.propagated_to.count() >= total_known:
|
|
1710
2355
|
self.complete = True
|
|
1711
2356
|
save_fields.append("complete")
|
|
@@ -1714,6 +2359,34 @@ class NetMessage(Entity):
|
|
|
1714
2359
|
self.save(update_fields=save_fields)
|
|
1715
2360
|
|
|
1716
2361
|
|
|
2362
|
+
class PendingNetMessage(models.Model):
|
|
2363
|
+
"""Queued :class:`NetMessage` awaiting delivery to a downstream node."""
|
|
2364
|
+
|
|
2365
|
+
node = models.ForeignKey(
|
|
2366
|
+
Node, on_delete=models.CASCADE, related_name="pending_net_messages"
|
|
2367
|
+
)
|
|
2368
|
+
message = models.ForeignKey(
|
|
2369
|
+
NetMessage,
|
|
2370
|
+
on_delete=models.CASCADE,
|
|
2371
|
+
related_name="pending_deliveries",
|
|
2372
|
+
)
|
|
2373
|
+
seen = models.JSONField(default=list)
|
|
2374
|
+
queued_at = models.DateTimeField(auto_now_add=True)
|
|
2375
|
+
stale_at = models.DateTimeField()
|
|
2376
|
+
|
|
2377
|
+
class Meta:
|
|
2378
|
+
unique_together = ("node", "message")
|
|
2379
|
+
ordering = ("queued_at",)
|
|
2380
|
+
verbose_name = "Pending Net Message"
|
|
2381
|
+
verbose_name_plural = "Pending Net Messages"
|
|
2382
|
+
|
|
2383
|
+
def __str__(self) -> str: # pragma: no cover - simple representation
|
|
2384
|
+
return f"{self.message_id} → {self.node_id}"
|
|
2385
|
+
|
|
2386
|
+
@property
|
|
2387
|
+
def is_stale(self) -> bool:
|
|
2388
|
+
return self.stale_at <= timezone.now()
|
|
2389
|
+
|
|
1717
2390
|
class ContentSample(Entity):
|
|
1718
2391
|
"""Collected content such as text snippets or screenshots."""
|
|
1719
2392
|
|