ReticulumTelemetryHub 0.1.0__py3-none-any.whl → 0.143.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- reticulum_telemetry_hub/api/__init__.py +23 -0
- reticulum_telemetry_hub/api/models.py +323 -0
- reticulum_telemetry_hub/api/service.py +836 -0
- reticulum_telemetry_hub/api/storage.py +528 -0
- reticulum_telemetry_hub/api/storage_base.py +156 -0
- reticulum_telemetry_hub/api/storage_models.py +118 -0
- reticulum_telemetry_hub/atak_cot/__init__.py +49 -0
- reticulum_telemetry_hub/atak_cot/base.py +277 -0
- reticulum_telemetry_hub/atak_cot/chat.py +506 -0
- reticulum_telemetry_hub/atak_cot/detail.py +235 -0
- reticulum_telemetry_hub/atak_cot/event.py +181 -0
- reticulum_telemetry_hub/atak_cot/pytak_client.py +569 -0
- reticulum_telemetry_hub/atak_cot/tak_connector.py +848 -0
- reticulum_telemetry_hub/config/__init__.py +25 -0
- reticulum_telemetry_hub/config/constants.py +7 -0
- reticulum_telemetry_hub/config/manager.py +515 -0
- reticulum_telemetry_hub/config/models.py +215 -0
- reticulum_telemetry_hub/embedded_lxmd/__init__.py +5 -0
- reticulum_telemetry_hub/embedded_lxmd/embedded.py +418 -0
- reticulum_telemetry_hub/internal_api/__init__.py +21 -0
- reticulum_telemetry_hub/internal_api/bus.py +344 -0
- reticulum_telemetry_hub/internal_api/core.py +690 -0
- reticulum_telemetry_hub/internal_api/v1/__init__.py +74 -0
- reticulum_telemetry_hub/internal_api/v1/enums.py +109 -0
- reticulum_telemetry_hub/internal_api/v1/manifest.json +8 -0
- reticulum_telemetry_hub/internal_api/v1/schemas.py +478 -0
- reticulum_telemetry_hub/internal_api/versioning.py +63 -0
- reticulum_telemetry_hub/lxmf_daemon/Handlers.py +122 -0
- reticulum_telemetry_hub/lxmf_daemon/LXMF.py +252 -0
- reticulum_telemetry_hub/lxmf_daemon/LXMPeer.py +898 -0
- reticulum_telemetry_hub/lxmf_daemon/LXMRouter.py +4227 -0
- reticulum_telemetry_hub/lxmf_daemon/LXMessage.py +1006 -0
- reticulum_telemetry_hub/lxmf_daemon/LXStamper.py +490 -0
- reticulum_telemetry_hub/lxmf_daemon/__init__.py +10 -0
- reticulum_telemetry_hub/lxmf_daemon/_version.py +1 -0
- reticulum_telemetry_hub/lxmf_daemon/lxmd.py +1655 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/fields/field_telemetry_stream.py +6 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/__init__.py +3 -0
- {lxmf_telemetry → reticulum_telemetry_hub/lxmf_telemetry}/model/persistance/appearance.py +19 -19
- {lxmf_telemetry → reticulum_telemetry_hub/lxmf_telemetry}/model/persistance/peer.py +17 -13
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/__init__.py +65 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/acceleration.py +68 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/ambient_light.py +37 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/angular_velocity.py +68 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/battery.py +68 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/connection_map.py +258 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/generic.py +841 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/gravity.py +68 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/humidity.py +37 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/information.py +42 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/location.py +110 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/lxmf_propagation.py +429 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/magnetic_field.py +68 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/physical_link.py +53 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/pressure.py +37 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/proximity.py +37 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/received.py +75 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/rns_transport.py +209 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/sensor.py +65 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/sensor_enum.py +27 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/sensor_mapping.py +58 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/temperature.py +37 -0
- {lxmf_telemetry → reticulum_telemetry_hub/lxmf_telemetry}/model/persistance/sensors/time.py +36 -32
- {lxmf_telemetry → reticulum_telemetry_hub/lxmf_telemetry}/model/persistance/telemeter.py +26 -23
- reticulum_telemetry_hub/lxmf_telemetry/sampler.py +229 -0
- reticulum_telemetry_hub/lxmf_telemetry/telemeter_manager.py +409 -0
- reticulum_telemetry_hub/lxmf_telemetry/telemetry_controller.py +804 -0
- reticulum_telemetry_hub/northbound/__init__.py +5 -0
- reticulum_telemetry_hub/northbound/app.py +195 -0
- reticulum_telemetry_hub/northbound/auth.py +119 -0
- reticulum_telemetry_hub/northbound/gateway.py +310 -0
- reticulum_telemetry_hub/northbound/internal_adapter.py +302 -0
- reticulum_telemetry_hub/northbound/models.py +213 -0
- reticulum_telemetry_hub/northbound/routes_chat.py +123 -0
- reticulum_telemetry_hub/northbound/routes_files.py +119 -0
- reticulum_telemetry_hub/northbound/routes_rest.py +345 -0
- reticulum_telemetry_hub/northbound/routes_subscribers.py +150 -0
- reticulum_telemetry_hub/northbound/routes_topics.py +178 -0
- reticulum_telemetry_hub/northbound/routes_ws.py +107 -0
- reticulum_telemetry_hub/northbound/serializers.py +72 -0
- reticulum_telemetry_hub/northbound/services.py +373 -0
- reticulum_telemetry_hub/northbound/websocket.py +855 -0
- reticulum_telemetry_hub/reticulum_server/__main__.py +2237 -0
- reticulum_telemetry_hub/reticulum_server/command_manager.py +1268 -0
- reticulum_telemetry_hub/reticulum_server/command_text.py +399 -0
- reticulum_telemetry_hub/reticulum_server/constants.py +1 -0
- reticulum_telemetry_hub/reticulum_server/event_log.py +357 -0
- reticulum_telemetry_hub/reticulum_server/internal_adapter.py +358 -0
- reticulum_telemetry_hub/reticulum_server/outbound_queue.py +312 -0
- reticulum_telemetry_hub/reticulum_server/services.py +422 -0
- reticulumtelemetryhub-0.143.0.dist-info/METADATA +181 -0
- reticulumtelemetryhub-0.143.0.dist-info/RECORD +97 -0
- {reticulumtelemetryhub-0.1.0.dist-info → reticulumtelemetryhub-0.143.0.dist-info}/WHEEL +1 -1
- reticulumtelemetryhub-0.143.0.dist-info/licenses/LICENSE +277 -0
- lxmf_telemetry/model/fields/field_telemetry_stream.py +0 -7
- lxmf_telemetry/model/persistance/__init__.py +0 -3
- lxmf_telemetry/model/persistance/sensors/location.py +0 -69
- lxmf_telemetry/model/persistance/sensors/magnetic_field.py +0 -36
- lxmf_telemetry/model/persistance/sensors/sensor.py +0 -44
- lxmf_telemetry/model/persistance/sensors/sensor_enum.py +0 -24
- lxmf_telemetry/model/persistance/sensors/sensor_mapping.py +0 -9
- lxmf_telemetry/telemetry_controller.py +0 -124
- reticulum_server/main.py +0 -182
- reticulumtelemetryhub-0.1.0.dist-info/METADATA +0 -15
- reticulumtelemetryhub-0.1.0.dist-info/RECORD +0 -19
- {lxmf_telemetry → reticulum_telemetry_hub}/__init__.py +0 -0
- {lxmf_telemetry/model/persistance/sensors → reticulum_telemetry_hub/lxmf_telemetry}/__init__.py +0 -0
- {reticulum_server → reticulum_telemetry_hub/reticulum_server}/__init__.py +0 -0
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
"""SQLAlchemy model for the Gravity sensor."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any, Optional
|
|
6
|
+
|
|
7
|
+
from sqlalchemy import Float, ForeignKey
|
|
8
|
+
from sqlalchemy.orm import Mapped, mapped_column
|
|
9
|
+
|
|
10
|
+
from .sensor import Sensor
|
|
11
|
+
from .sensor_enum import SID_GRAVITY
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class Gravity(Sensor):
|
|
15
|
+
__tablename__ = "Gravity"
|
|
16
|
+
|
|
17
|
+
id: Mapped[int] = mapped_column(ForeignKey("Sensor.id"), primary_key=True)
|
|
18
|
+
x: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
|
|
19
|
+
y: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
|
|
20
|
+
z: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
|
|
21
|
+
|
|
22
|
+
def __init__(
|
|
23
|
+
self,
|
|
24
|
+
stale_time: float | None = 1,
|
|
25
|
+
data: Any | None = None,
|
|
26
|
+
active: bool = False,
|
|
27
|
+
synthesized: bool = False,
|
|
28
|
+
last_update: float = 0,
|
|
29
|
+
last_read: float = 0,
|
|
30
|
+
) -> None:
|
|
31
|
+
super().__init__(
|
|
32
|
+
stale_time=stale_time,
|
|
33
|
+
data=data,
|
|
34
|
+
active=active,
|
|
35
|
+
synthesized=synthesized,
|
|
36
|
+
last_update=last_update,
|
|
37
|
+
last_read=last_read,
|
|
38
|
+
)
|
|
39
|
+
self.sid = SID_GRAVITY
|
|
40
|
+
|
|
41
|
+
def pack(self): # type: ignore[override]
|
|
42
|
+
if self.x is None and self.y is None and self.z is None:
|
|
43
|
+
return None
|
|
44
|
+
return [self.x, self.y, self.z]
|
|
45
|
+
|
|
46
|
+
def unpack(self, packed: Any): # type: ignore[override]
|
|
47
|
+
if packed is None:
|
|
48
|
+
self.x = None
|
|
49
|
+
self.y = None
|
|
50
|
+
self.z = None
|
|
51
|
+
return None
|
|
52
|
+
|
|
53
|
+
try:
|
|
54
|
+
self.x = packed[0]
|
|
55
|
+
self.y = packed[1]
|
|
56
|
+
self.z = packed[2]
|
|
57
|
+
except (IndexError, TypeError):
|
|
58
|
+
self.x = None
|
|
59
|
+
self.y = None
|
|
60
|
+
self.z = None
|
|
61
|
+
return None
|
|
62
|
+
|
|
63
|
+
return {"x": self.x, "y": self.y, "z": self.z}
|
|
64
|
+
|
|
65
|
+
__mapper_args__ = {
|
|
66
|
+
"polymorphic_identity": SID_GRAVITY,
|
|
67
|
+
"with_polymorphic": "*",
|
|
68
|
+
}
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
"""SQLAlchemy model for the Humidity sensor."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any, Optional
|
|
6
|
+
|
|
7
|
+
from sqlalchemy import Float, ForeignKey
|
|
8
|
+
from sqlalchemy.orm import Mapped, mapped_column
|
|
9
|
+
|
|
10
|
+
from .sensor import Sensor
|
|
11
|
+
from .sensor_enum import SID_HUMIDITY
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class Humidity(Sensor):
|
|
15
|
+
__tablename__ = "Humidity"
|
|
16
|
+
|
|
17
|
+
id: Mapped[int] = mapped_column(ForeignKey("Sensor.id"), primary_key=True)
|
|
18
|
+
percent_relative: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
|
|
19
|
+
|
|
20
|
+
def __init__(self) -> None:
|
|
21
|
+
super().__init__(stale_time=5)
|
|
22
|
+
self.sid = SID_HUMIDITY
|
|
23
|
+
|
|
24
|
+
def pack(self): # type: ignore[override]
|
|
25
|
+
return self.percent_relative
|
|
26
|
+
|
|
27
|
+
def unpack(self, packed: Any): # type: ignore[override]
|
|
28
|
+
if packed is None:
|
|
29
|
+
self.percent_relative = None
|
|
30
|
+
return None
|
|
31
|
+
self.percent_relative = packed
|
|
32
|
+
return {"percent_relative": self.percent_relative}
|
|
33
|
+
|
|
34
|
+
__mapper_args__ = {
|
|
35
|
+
"polymorphic_identity": SID_HUMIDITY,
|
|
36
|
+
"with_polymorphic": "*",
|
|
37
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
"""SQLAlchemy model for the Information sensor."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Optional
|
|
6
|
+
|
|
7
|
+
from sqlalchemy import ForeignKey, Text
|
|
8
|
+
from sqlalchemy.orm import Mapped, mapped_column
|
|
9
|
+
|
|
10
|
+
from .sensor import Sensor
|
|
11
|
+
from .sensor_enum import SID_INFORMATION
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class Information(Sensor):
|
|
15
|
+
"""Persisted representation of Sideband's information sensor."""
|
|
16
|
+
|
|
17
|
+
__tablename__ = "Information"
|
|
18
|
+
|
|
19
|
+
id: Mapped[int] = mapped_column(ForeignKey("Sensor.id"), primary_key=True)
|
|
20
|
+
contents: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
|
21
|
+
|
|
22
|
+
def __init__(self, contents: Optional[str] = "") -> None:
|
|
23
|
+
super().__init__(stale_time=5)
|
|
24
|
+
self.sid = SID_INFORMATION
|
|
25
|
+
self.contents = contents or ""
|
|
26
|
+
|
|
27
|
+
def pack(self): # type: ignore[override]
|
|
28
|
+
if self.contents is None:
|
|
29
|
+
return None
|
|
30
|
+
return str(self.contents)
|
|
31
|
+
|
|
32
|
+
def unpack(self, packed): # type: ignore[override]
|
|
33
|
+
if packed is None:
|
|
34
|
+
self.contents = None
|
|
35
|
+
return None
|
|
36
|
+
self.contents = str(packed)
|
|
37
|
+
return {"contents": self.contents}
|
|
38
|
+
|
|
39
|
+
__mapper_args__ = {
|
|
40
|
+
"polymorphic_identity": SID_INFORMATION,
|
|
41
|
+
"with_polymorphic": "*",
|
|
42
|
+
}
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
from sqlalchemy import Column
|
|
2
|
+
from reticulum_telemetry_hub.lxmf_telemetry.model.persistance.sensors.sensor import (
|
|
3
|
+
Sensor,
|
|
4
|
+
)
|
|
5
|
+
from .sensor_enum import SID_LOCATION
|
|
6
|
+
import struct
|
|
7
|
+
import RNS
|
|
8
|
+
from sqlalchemy import Integer, ForeignKey, Float, DateTime
|
|
9
|
+
from sqlalchemy.orm import Mapped, mapped_column
|
|
10
|
+
from typing import Optional
|
|
11
|
+
from datetime import datetime
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class Location(Sensor):
|
|
15
|
+
__tablename__ = "Location"
|
|
16
|
+
|
|
17
|
+
id: Mapped[int] = mapped_column(ForeignKey("Sensor.id"), primary_key=True)
|
|
18
|
+
latitude: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
|
|
19
|
+
longitude: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
|
|
20
|
+
altitude: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
|
|
21
|
+
speed: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
|
|
22
|
+
bearing: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
|
|
23
|
+
accuracy: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
|
|
24
|
+
last_update: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
|
25
|
+
|
|
26
|
+
def __init__(self):
|
|
27
|
+
super().__init__(stale_time=15)
|
|
28
|
+
self.sid = SID_LOCATION
|
|
29
|
+
self.latitude = None
|
|
30
|
+
self.longitude = None
|
|
31
|
+
self.altitude = None
|
|
32
|
+
self.speed = None
|
|
33
|
+
self.bearing = None
|
|
34
|
+
self.accuracy = None
|
|
35
|
+
self.last_update = None
|
|
36
|
+
|
|
37
|
+
def pack(self):
|
|
38
|
+
try:
|
|
39
|
+
latitude = self._require_float(self.latitude, "latitude")
|
|
40
|
+
longitude = self._require_float(self.longitude, "longitude")
|
|
41
|
+
altitude = self._normalize_altitude(self.altitude)
|
|
42
|
+
speed = self._require_float(self.speed, "speed")
|
|
43
|
+
bearing = self._require_float(self.bearing, "bearing")
|
|
44
|
+
accuracy = self._require_float(self.accuracy, "accuracy")
|
|
45
|
+
return [
|
|
46
|
+
struct.pack("!i", int(round(latitude, 6) * 1e6)),
|
|
47
|
+
struct.pack("!i", int(round(longitude, 6) * 1e6)),
|
|
48
|
+
struct.pack("!I", int(round(altitude, 2) * 1e2)),
|
|
49
|
+
struct.pack("!I", int(round(speed, 2) * 1e2)),
|
|
50
|
+
struct.pack("!I", int(round(bearing, 2) * 1e2)),
|
|
51
|
+
struct.pack("!H", int(round(accuracy, 2) * 1e2)),
|
|
52
|
+
self._serialize_last_update(),
|
|
53
|
+
]
|
|
54
|
+
except (KeyError, ValueError, struct.error, TypeError) as e:
|
|
55
|
+
RNS.log(
|
|
56
|
+
"An error occurred while packing location sensor data. "
|
|
57
|
+
"The contained exception was: " + str(e),
|
|
58
|
+
RNS.LOG_ERROR,
|
|
59
|
+
)
|
|
60
|
+
return None
|
|
61
|
+
|
|
62
|
+
def unpack(self, packed):
|
|
63
|
+
try:
|
|
64
|
+
if packed is None:
|
|
65
|
+
return None
|
|
66
|
+
else:
|
|
67
|
+
self.latitude = struct.unpack("!i", packed[0])[0] / 1e6
|
|
68
|
+
self.longitude = struct.unpack("!i", packed[1])[0] / 1e6
|
|
69
|
+
self.altitude = struct.unpack("!I", packed[2])[0] / 1e2
|
|
70
|
+
self.speed = struct.unpack("!I", packed[3])[0] / 1e2
|
|
71
|
+
self.bearing = struct.unpack("!I", packed[4])[0] / 1e2
|
|
72
|
+
self.accuracy = struct.unpack("!H", packed[5])[0] / 1e2
|
|
73
|
+
self.last_update = datetime.fromtimestamp(packed[6])
|
|
74
|
+
return {
|
|
75
|
+
"latitude": self.latitude,
|
|
76
|
+
"longitude": self.longitude,
|
|
77
|
+
"altitude": self.altitude,
|
|
78
|
+
"speed": self.speed,
|
|
79
|
+
"bearing": self.bearing,
|
|
80
|
+
"accuracy": self.accuracy,
|
|
81
|
+
"last_update_iso": self.last_update.isoformat(),
|
|
82
|
+
"last_update_timestamp": self.last_update.timestamp(),
|
|
83
|
+
}
|
|
84
|
+
except (struct.error, IndexError):
|
|
85
|
+
return None
|
|
86
|
+
|
|
87
|
+
def _require_float(self, value: Optional[float], field_name: str) -> float:
|
|
88
|
+
if value is None:
|
|
89
|
+
raise ValueError(f"{field_name} is not set on Location sensor")
|
|
90
|
+
return float(value)
|
|
91
|
+
|
|
92
|
+
def _serialize_last_update(self) -> float:
|
|
93
|
+
if self.last_update is None:
|
|
94
|
+
raise ValueError("last_update is not set on Location sensor")
|
|
95
|
+
if isinstance(self.last_update, datetime):
|
|
96
|
+
return self.last_update.timestamp()
|
|
97
|
+
if isinstance(self.last_update, (int, float)):
|
|
98
|
+
return float(self.last_update)
|
|
99
|
+
raise TypeError("last_update must be datetime or a unix timestamp")
|
|
100
|
+
|
|
101
|
+
def _normalize_altitude(self, value: Optional[float]) -> float:
|
|
102
|
+
"""Return a safe altitude value, replacing invalid sentinels with 0."""
|
|
103
|
+
altitude = self._require_float(value, "altitude")
|
|
104
|
+
# Sideband sometimes surfaces the 0xffffffff sentinel as 42949672.95;
|
|
105
|
+
# treat anything in that range as "no altitude" to avoid absurd UI values.
|
|
106
|
+
if altitude >= 4.294e7:
|
|
107
|
+
return 0.0
|
|
108
|
+
return altitude
|
|
109
|
+
|
|
110
|
+
__mapper_args__ = {"polymorphic_identity": SID_LOCATION, "with_polymorphic": "*"}
|
|
@@ -0,0 +1,429 @@
|
|
|
1
|
+
"""SQLAlchemy model for LXMF propagation telemetry data."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from sqlalchemy import Boolean, Float, ForeignKey, Integer, LargeBinary, String
|
|
8
|
+
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
|
9
|
+
|
|
10
|
+
from .. import Base
|
|
11
|
+
from .sensor import Sensor
|
|
12
|
+
from .sensor_enum import SID_LXMF_PROPAGATION
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _decode_hash(value: Any) -> bytes | None:
|
|
16
|
+
"""Normalize Sideband hash values to ``bytes``."""
|
|
17
|
+
|
|
18
|
+
if value is None:
|
|
19
|
+
return None
|
|
20
|
+
if isinstance(value, bytes):
|
|
21
|
+
return value
|
|
22
|
+
if isinstance(value, bytearray):
|
|
23
|
+
return bytes(value)
|
|
24
|
+
if isinstance(value, memoryview):
|
|
25
|
+
return value.tobytes()
|
|
26
|
+
if isinstance(value, str):
|
|
27
|
+
text = value.strip()
|
|
28
|
+
if not text:
|
|
29
|
+
return None
|
|
30
|
+
try:
|
|
31
|
+
# Sideband transports hashes as hex-encoded strings in some cases
|
|
32
|
+
if len(text) % 2 == 0 and all(c in "0123456789abcdefABCDEF" for c in text):
|
|
33
|
+
return bytes.fromhex(text)
|
|
34
|
+
except ValueError:
|
|
35
|
+
pass
|
|
36
|
+
return text.encode()
|
|
37
|
+
return None
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def _encode_hash(value: bytes | bytearray | memoryview | None) -> bytes | None:
|
|
41
|
+
if value is None:
|
|
42
|
+
return None
|
|
43
|
+
if isinstance(value, (bytes, bytearray)):
|
|
44
|
+
return bytes(value)
|
|
45
|
+
if isinstance(value, memoryview):
|
|
46
|
+
return value.tobytes()
|
|
47
|
+
return None
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def _maybe_int(value: Any) -> int | None:
|
|
51
|
+
if value is None:
|
|
52
|
+
return None
|
|
53
|
+
try:
|
|
54
|
+
return int(value)
|
|
55
|
+
except (TypeError, ValueError):
|
|
56
|
+
return None
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def _maybe_float(value: Any) -> float | None:
|
|
60
|
+
if value is None:
|
|
61
|
+
return None
|
|
62
|
+
try:
|
|
63
|
+
return float(value)
|
|
64
|
+
except (TypeError, ValueError):
|
|
65
|
+
return None
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def _maybe_bool(value: Any) -> bool | None:
|
|
69
|
+
if value is None:
|
|
70
|
+
return None
|
|
71
|
+
if isinstance(value, bool):
|
|
72
|
+
return value
|
|
73
|
+
if isinstance(value, (int, float)):
|
|
74
|
+
return bool(value)
|
|
75
|
+
if isinstance(value, str):
|
|
76
|
+
lowered = value.strip().lower()
|
|
77
|
+
if lowered in {"", "0", "false", "no", "off"}:
|
|
78
|
+
return False
|
|
79
|
+
if lowered in {"1", "true", "yes", "on"}:
|
|
80
|
+
return True
|
|
81
|
+
return bool(value)
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
class LXMFPropagationPeer(Base):
|
|
85
|
+
"""Per-peer telemetry as reported by the LXMF propagation daemon."""
|
|
86
|
+
|
|
87
|
+
__tablename__ = "LXMFPropagationPeer"
|
|
88
|
+
|
|
89
|
+
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
|
90
|
+
propagation_id: Mapped[int] = mapped_column(
|
|
91
|
+
ForeignKey("LXMFPropagation.id", ondelete="CASCADE")
|
|
92
|
+
)
|
|
93
|
+
propagation: Mapped["LXMFPropagation"] = relationship(
|
|
94
|
+
"LXMFPropagation", back_populates="peers"
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
peer_hash: Mapped[bytes] = mapped_column(LargeBinary, nullable=False)
|
|
98
|
+
peer_type: Mapped[str | None] = mapped_column(String, nullable=True)
|
|
99
|
+
state: Mapped[str | None] = mapped_column(String, nullable=True)
|
|
100
|
+
alive: Mapped[bool | None] = mapped_column(Boolean, nullable=True)
|
|
101
|
+
last_heard: Mapped[float | None] = mapped_column(Float, nullable=True)
|
|
102
|
+
next_sync_attempt: Mapped[float | None] = mapped_column(Float, nullable=True)
|
|
103
|
+
last_sync_attempt: Mapped[float | None] = mapped_column(Float, nullable=True)
|
|
104
|
+
sync_backoff: Mapped[float | None] = mapped_column(Float, nullable=True)
|
|
105
|
+
peering_timebase: Mapped[float | None] = mapped_column(Float, nullable=True)
|
|
106
|
+
ler: Mapped[float | None] = mapped_column(Float, nullable=True)
|
|
107
|
+
str_value: Mapped[float | None] = mapped_column("str", Float, nullable=True)
|
|
108
|
+
transfer_limit: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
109
|
+
network_distance: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
110
|
+
rx_bytes: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
111
|
+
tx_bytes: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
112
|
+
messages_offered: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
113
|
+
messages_outgoing: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
114
|
+
messages_incoming: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
115
|
+
messages_unhandled: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
116
|
+
|
|
117
|
+
def to_payload(self) -> dict[str, Any]:
|
|
118
|
+
messages = {
|
|
119
|
+
"offered": self.messages_offered,
|
|
120
|
+
"outgoing": self.messages_outgoing,
|
|
121
|
+
"incoming": self.messages_incoming,
|
|
122
|
+
"unhandled": self.messages_unhandled,
|
|
123
|
+
}
|
|
124
|
+
return {
|
|
125
|
+
"type": self.peer_type,
|
|
126
|
+
"state": self.state,
|
|
127
|
+
"alive": bool(self.alive) if self.alive is not None else False,
|
|
128
|
+
"last_heard": self.last_heard,
|
|
129
|
+
"next_sync_attempt": self.next_sync_attempt,
|
|
130
|
+
"last_sync_attempt": self.last_sync_attempt,
|
|
131
|
+
"sync_backoff": self.sync_backoff,
|
|
132
|
+
"peering_timebase": self.peering_timebase,
|
|
133
|
+
"ler": self.ler,
|
|
134
|
+
"str": self.str_value,
|
|
135
|
+
"transfer_limit": self.transfer_limit,
|
|
136
|
+
"network_distance": self.network_distance,
|
|
137
|
+
"rx_bytes": self.rx_bytes,
|
|
138
|
+
"tx_bytes": self.tx_bytes,
|
|
139
|
+
"messages": messages,
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
def update_from_payload(self, payload: Any) -> None:
|
|
143
|
+
if not isinstance(payload, dict):
|
|
144
|
+
return
|
|
145
|
+
|
|
146
|
+
peer_type = payload.get("type")
|
|
147
|
+
if peer_type is not None:
|
|
148
|
+
self.peer_type = str(peer_type)
|
|
149
|
+
|
|
150
|
+
state = payload.get("state")
|
|
151
|
+
if state is not None:
|
|
152
|
+
self.state = str(state)
|
|
153
|
+
|
|
154
|
+
alive = _maybe_bool(payload.get("alive"))
|
|
155
|
+
self.alive = alive
|
|
156
|
+
|
|
157
|
+
self.last_heard = _maybe_float(payload.get("last_heard"))
|
|
158
|
+
self.next_sync_attempt = _maybe_float(payload.get("next_sync_attempt"))
|
|
159
|
+
self.last_sync_attempt = _maybe_float(payload.get("last_sync_attempt"))
|
|
160
|
+
self.sync_backoff = _maybe_float(payload.get("sync_backoff"))
|
|
161
|
+
self.peering_timebase = _maybe_float(payload.get("peering_timebase"))
|
|
162
|
+
self.ler = _maybe_float(payload.get("ler"))
|
|
163
|
+
self.str_value = _maybe_float(payload.get("str"))
|
|
164
|
+
self.transfer_limit = _maybe_int(payload.get("transfer_limit"))
|
|
165
|
+
self.network_distance = _maybe_int(payload.get("network_distance"))
|
|
166
|
+
self.rx_bytes = _maybe_int(payload.get("rx_bytes"))
|
|
167
|
+
self.tx_bytes = _maybe_int(payload.get("tx_bytes"))
|
|
168
|
+
|
|
169
|
+
messages = payload.get("messages")
|
|
170
|
+
if isinstance(messages, dict):
|
|
171
|
+
self.messages_offered = _maybe_int(messages.get("offered"))
|
|
172
|
+
self.messages_outgoing = _maybe_int(messages.get("outgoing"))
|
|
173
|
+
self.messages_incoming = _maybe_int(messages.get("incoming"))
|
|
174
|
+
self.messages_unhandled = _maybe_int(messages.get("unhandled"))
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
class LXMFPropagation(Sensor):
|
|
178
|
+
"""Telemetry sensor describing LXMF propagation state."""
|
|
179
|
+
|
|
180
|
+
__tablename__ = "LXMFPropagation"
|
|
181
|
+
|
|
182
|
+
SID = SID_LXMF_PROPAGATION
|
|
183
|
+
|
|
184
|
+
id: Mapped[int] = mapped_column(
|
|
185
|
+
ForeignKey("Sensor.id", ondelete="CASCADE"), primary_key=True
|
|
186
|
+
)
|
|
187
|
+
destination_hash: Mapped[bytes | None] = mapped_column(LargeBinary, nullable=True)
|
|
188
|
+
identity_hash: Mapped[bytes | None] = mapped_column(LargeBinary, nullable=True)
|
|
189
|
+
uptime: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
190
|
+
delivery_limit: Mapped[float | None] = mapped_column(Float, nullable=True)
|
|
191
|
+
propagation_limit: Mapped[float | None] = mapped_column(Float, nullable=True)
|
|
192
|
+
autopeer_maxdepth: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
193
|
+
from_static_only: Mapped[bool | None] = mapped_column(Boolean, nullable=True)
|
|
194
|
+
|
|
195
|
+
message_store_count: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
196
|
+
message_store_bytes: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
197
|
+
message_store_limit: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
198
|
+
|
|
199
|
+
client_messages_received: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
200
|
+
client_messages_served: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
201
|
+
|
|
202
|
+
unpeered_incoming: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
203
|
+
unpeered_rx_bytes: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
204
|
+
|
|
205
|
+
static_peers: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
206
|
+
total_peers: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
207
|
+
max_peers: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
208
|
+
active_peers: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
209
|
+
unreachable_peers: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
210
|
+
|
|
211
|
+
peered_rx_bytes: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
212
|
+
peered_tx_bytes: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
213
|
+
peered_offered: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
214
|
+
peered_outgoing: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
215
|
+
peered_incoming: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
216
|
+
peered_unhandled: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
217
|
+
peered_max_unhandled: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
218
|
+
|
|
219
|
+
peers: Mapped[list[LXMFPropagationPeer]] = relationship(
|
|
220
|
+
LXMFPropagationPeer,
|
|
221
|
+
back_populates="propagation",
|
|
222
|
+
cascade="all, delete-orphan",
|
|
223
|
+
order_by="LXMFPropagationPeer.id",
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
|
227
|
+
if "stale_time" not in kwargs:
|
|
228
|
+
kwargs["stale_time"] = 300
|
|
229
|
+
super().__init__(*args, **kwargs)
|
|
230
|
+
self.sid = self.SID
|
|
231
|
+
|
|
232
|
+
def _ensure_peer_aggregates(self) -> dict[str, int]:
|
|
233
|
+
total = len(self.peers)
|
|
234
|
+
active = sum(1 for peer in self.peers if peer.alive)
|
|
235
|
+
rx_sum = sum(peer.rx_bytes or 0 for peer in self.peers)
|
|
236
|
+
tx_sum = sum(peer.tx_bytes or 0 for peer in self.peers)
|
|
237
|
+
offered_sum = sum(peer.messages_offered or 0 for peer in self.peers)
|
|
238
|
+
outgoing_sum = sum(peer.messages_outgoing or 0 for peer in self.peers)
|
|
239
|
+
incoming_sum = sum(peer.messages_incoming or 0 for peer in self.peers)
|
|
240
|
+
unhandled_sum = sum(peer.messages_unhandled or 0 for peer in self.peers)
|
|
241
|
+
max_unhandled = (
|
|
242
|
+
max((peer.messages_unhandled or 0 for peer in self.peers), default=0)
|
|
243
|
+
if self.peers
|
|
244
|
+
else 0
|
|
245
|
+
)
|
|
246
|
+
|
|
247
|
+
self.total_peers = total
|
|
248
|
+
self.active_peers = active
|
|
249
|
+
self.unreachable_peers = total - active
|
|
250
|
+
self.peered_rx_bytes = rx_sum
|
|
251
|
+
self.peered_tx_bytes = tx_sum
|
|
252
|
+
self.peered_offered = offered_sum
|
|
253
|
+
self.peered_outgoing = outgoing_sum
|
|
254
|
+
self.peered_incoming = incoming_sum
|
|
255
|
+
self.peered_unhandled = unhandled_sum
|
|
256
|
+
self.peered_max_unhandled = max_unhandled
|
|
257
|
+
|
|
258
|
+
return {
|
|
259
|
+
"total_peers": total,
|
|
260
|
+
"active_peers": active,
|
|
261
|
+
"unreachable_peers": total - active,
|
|
262
|
+
"peered_rx_bytes": rx_sum,
|
|
263
|
+
"peered_tx_bytes": tx_sum,
|
|
264
|
+
"peered_offered": offered_sum,
|
|
265
|
+
"peered_outgoing": outgoing_sum,
|
|
266
|
+
"peered_incoming": incoming_sum,
|
|
267
|
+
"peered_unhandled": unhandled_sum,
|
|
268
|
+
"peered_max_unhandled": max_unhandled,
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
def _pack_message_store(self) -> dict[str, Any] | None:
|
|
272
|
+
payload = {
|
|
273
|
+
"count": self.message_store_count,
|
|
274
|
+
"bytes": self.message_store_bytes,
|
|
275
|
+
"limit": self.message_store_limit,
|
|
276
|
+
}
|
|
277
|
+
if any(value is not None for value in payload.values()):
|
|
278
|
+
return payload
|
|
279
|
+
return None
|
|
280
|
+
|
|
281
|
+
def _pack_clients(self) -> dict[str, Any] | None:
|
|
282
|
+
payload = {
|
|
283
|
+
"client_propagation_messages_received": self.client_messages_received,
|
|
284
|
+
"client_propagation_messages_served": self.client_messages_served,
|
|
285
|
+
}
|
|
286
|
+
if any(value is not None for value in payload.values()):
|
|
287
|
+
return payload
|
|
288
|
+
return None
|
|
289
|
+
|
|
290
|
+
def _pack_peers(self) -> dict[bytes, dict[str, Any]]:
|
|
291
|
+
peers: dict[bytes, dict[str, Any]] = {}
|
|
292
|
+
for peer in self.peers:
|
|
293
|
+
key = _encode_hash(peer.peer_hash)
|
|
294
|
+
if key is None:
|
|
295
|
+
continue
|
|
296
|
+
peers[key] = peer.to_payload()
|
|
297
|
+
return peers
|
|
298
|
+
|
|
299
|
+
def pack(self) -> dict[str, Any] | None: # type: ignore[override]
|
|
300
|
+
totals = self._ensure_peer_aggregates()
|
|
301
|
+
peers_payload = self._pack_peers()
|
|
302
|
+
|
|
303
|
+
payload: dict[str, Any] = {
|
|
304
|
+
"destination_hash": _encode_hash(self.destination_hash),
|
|
305
|
+
"identity_hash": _encode_hash(self.identity_hash),
|
|
306
|
+
"uptime": self.uptime,
|
|
307
|
+
"delivery_limit": self.delivery_limit,
|
|
308
|
+
"propagation_limit": self.propagation_limit,
|
|
309
|
+
"autopeer_maxdepth": self.autopeer_maxdepth,
|
|
310
|
+
"from_static_only": (
|
|
311
|
+
bool(self.from_static_only)
|
|
312
|
+
if self.from_static_only is not None
|
|
313
|
+
else None
|
|
314
|
+
),
|
|
315
|
+
"unpeered_propagation_incoming": self.unpeered_incoming,
|
|
316
|
+
"unpeered_propagation_rx_bytes": self.unpeered_rx_bytes,
|
|
317
|
+
"static_peers": self.static_peers,
|
|
318
|
+
"total_peers": totals["total_peers"],
|
|
319
|
+
"active_peers": totals["active_peers"],
|
|
320
|
+
"unreachable_peers": totals["unreachable_peers"],
|
|
321
|
+
"max_peers": self.max_peers,
|
|
322
|
+
"peered_propagation_rx_bytes": totals["peered_rx_bytes"],
|
|
323
|
+
"peered_propagation_tx_bytes": totals["peered_tx_bytes"],
|
|
324
|
+
"peered_propagation_offered": totals["peered_offered"],
|
|
325
|
+
"peered_propagation_outgoing": totals["peered_outgoing"],
|
|
326
|
+
"peered_propagation_incoming": totals["peered_incoming"],
|
|
327
|
+
"peered_propagation_unhandled": totals["peered_unhandled"],
|
|
328
|
+
"peered_propagation_max_unhandled": totals["peered_max_unhandled"],
|
|
329
|
+
"peers": peers_payload,
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
message_store = self._pack_message_store()
|
|
333
|
+
if message_store is not None:
|
|
334
|
+
payload["messagestore"] = message_store
|
|
335
|
+
|
|
336
|
+
clients = self._pack_clients()
|
|
337
|
+
if clients is not None:
|
|
338
|
+
payload["clients"] = clients
|
|
339
|
+
|
|
340
|
+
if (
|
|
341
|
+
all(
|
|
342
|
+
value in (None, {}, [])
|
|
343
|
+
for key, value in payload.items()
|
|
344
|
+
if key not in {"peers", "messagestore", "clients"}
|
|
345
|
+
)
|
|
346
|
+
and not peers_payload
|
|
347
|
+
and message_store is None
|
|
348
|
+
and clients is None
|
|
349
|
+
):
|
|
350
|
+
return None
|
|
351
|
+
|
|
352
|
+
return payload
|
|
353
|
+
|
|
354
|
+
def unpack(self, packed: Any) -> Any: # type: ignore[override]
|
|
355
|
+
if packed is None or not isinstance(packed, dict):
|
|
356
|
+
return None
|
|
357
|
+
|
|
358
|
+
self.destination_hash = _decode_hash(packed.get("destination_hash"))
|
|
359
|
+
self.identity_hash = _decode_hash(packed.get("identity_hash"))
|
|
360
|
+
self.uptime = _maybe_int(packed.get("uptime"))
|
|
361
|
+
self.delivery_limit = _maybe_float(packed.get("delivery_limit"))
|
|
362
|
+
self.propagation_limit = _maybe_float(packed.get("propagation_limit"))
|
|
363
|
+
self.autopeer_maxdepth = _maybe_int(packed.get("autopeer_maxdepth"))
|
|
364
|
+
self.from_static_only = _maybe_bool(packed.get("from_static_only"))
|
|
365
|
+
|
|
366
|
+
messagestore = packed.get("messagestore")
|
|
367
|
+
if isinstance(messagestore, dict):
|
|
368
|
+
self.message_store_count = _maybe_int(messagestore.get("count"))
|
|
369
|
+
self.message_store_bytes = _maybe_int(messagestore.get("bytes"))
|
|
370
|
+
self.message_store_limit = _maybe_int(messagestore.get("limit"))
|
|
371
|
+
|
|
372
|
+
clients = packed.get("clients")
|
|
373
|
+
if isinstance(clients, dict):
|
|
374
|
+
self.client_messages_received = _maybe_int(
|
|
375
|
+
clients.get("client_propagation_messages_received")
|
|
376
|
+
)
|
|
377
|
+
self.client_messages_served = _maybe_int(
|
|
378
|
+
clients.get("client_propagation_messages_served")
|
|
379
|
+
)
|
|
380
|
+
|
|
381
|
+
self.unpeered_incoming = _maybe_int(packed.get("unpeered_propagation_incoming"))
|
|
382
|
+
self.unpeered_rx_bytes = _maybe_int(packed.get("unpeered_propagation_rx_bytes"))
|
|
383
|
+
|
|
384
|
+
self.static_peers = _maybe_int(packed.get("static_peers"))
|
|
385
|
+
self.max_peers = _maybe_int(packed.get("max_peers"))
|
|
386
|
+
|
|
387
|
+
# aggregated values are recomputed below but preserved if provided
|
|
388
|
+
self.total_peers = _maybe_int(packed.get("total_peers"))
|
|
389
|
+
self.active_peers = _maybe_int(packed.get("active_peers"))
|
|
390
|
+
self.unreachable_peers = _maybe_int(packed.get("unreachable_peers"))
|
|
391
|
+
self.peered_rx_bytes = _maybe_int(packed.get("peered_propagation_rx_bytes"))
|
|
392
|
+
self.peered_tx_bytes = _maybe_int(packed.get("peered_propagation_tx_bytes"))
|
|
393
|
+
self.peered_offered = _maybe_int(packed.get("peered_propagation_offered"))
|
|
394
|
+
self.peered_outgoing = _maybe_int(packed.get("peered_propagation_outgoing"))
|
|
395
|
+
self.peered_incoming = _maybe_int(packed.get("peered_propagation_incoming"))
|
|
396
|
+
self.peered_unhandled = _maybe_int(packed.get("peered_propagation_unhandled"))
|
|
397
|
+
self.peered_max_unhandled = _maybe_int(
|
|
398
|
+
packed.get("peered_propagation_max_unhandled")
|
|
399
|
+
)
|
|
400
|
+
|
|
401
|
+
peer_payload = packed.get("peers")
|
|
402
|
+
if isinstance(peer_payload, dict):
|
|
403
|
+
existing = {peer.peer_hash: peer for peer in self.peers}
|
|
404
|
+
updated: list[LXMFPropagationPeer] = []
|
|
405
|
+
for key, peer_data in peer_payload.items():
|
|
406
|
+
peer_hash = _decode_hash(key)
|
|
407
|
+
if peer_hash is None:
|
|
408
|
+
continue
|
|
409
|
+
peer = existing.pop(peer_hash, None)
|
|
410
|
+
if peer is None:
|
|
411
|
+
peer = LXMFPropagationPeer(peer_hash=peer_hash)
|
|
412
|
+
peer.propagation = self
|
|
413
|
+
peer.update_from_payload(peer_data)
|
|
414
|
+
updated.append(peer)
|
|
415
|
+
self.peers[:] = updated
|
|
416
|
+
else:
|
|
417
|
+
self.peers[:] = []
|
|
418
|
+
|
|
419
|
+
self._ensure_peer_aggregates()
|
|
420
|
+
|
|
421
|
+
return packed
|
|
422
|
+
|
|
423
|
+
__mapper_args__ = {
|
|
424
|
+
"polymorphic_identity": SID_LXMF_PROPAGATION,
|
|
425
|
+
"with_polymorphic": "*",
|
|
426
|
+
}
|
|
427
|
+
|
|
428
|
+
|
|
429
|
+
__all__ = ["LXMFPropagation", "LXMFPropagationPeer"]
|