ReticulumTelemetryHub 0.1.0__py3-none-any.whl → 0.143.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- reticulum_telemetry_hub/api/__init__.py +23 -0
- reticulum_telemetry_hub/api/models.py +323 -0
- reticulum_telemetry_hub/api/service.py +836 -0
- reticulum_telemetry_hub/api/storage.py +528 -0
- reticulum_telemetry_hub/api/storage_base.py +156 -0
- reticulum_telemetry_hub/api/storage_models.py +118 -0
- reticulum_telemetry_hub/atak_cot/__init__.py +49 -0
- reticulum_telemetry_hub/atak_cot/base.py +277 -0
- reticulum_telemetry_hub/atak_cot/chat.py +506 -0
- reticulum_telemetry_hub/atak_cot/detail.py +235 -0
- reticulum_telemetry_hub/atak_cot/event.py +181 -0
- reticulum_telemetry_hub/atak_cot/pytak_client.py +569 -0
- reticulum_telemetry_hub/atak_cot/tak_connector.py +848 -0
- reticulum_telemetry_hub/config/__init__.py +25 -0
- reticulum_telemetry_hub/config/constants.py +7 -0
- reticulum_telemetry_hub/config/manager.py +515 -0
- reticulum_telemetry_hub/config/models.py +215 -0
- reticulum_telemetry_hub/embedded_lxmd/__init__.py +5 -0
- reticulum_telemetry_hub/embedded_lxmd/embedded.py +418 -0
- reticulum_telemetry_hub/internal_api/__init__.py +21 -0
- reticulum_telemetry_hub/internal_api/bus.py +344 -0
- reticulum_telemetry_hub/internal_api/core.py +690 -0
- reticulum_telemetry_hub/internal_api/v1/__init__.py +74 -0
- reticulum_telemetry_hub/internal_api/v1/enums.py +109 -0
- reticulum_telemetry_hub/internal_api/v1/manifest.json +8 -0
- reticulum_telemetry_hub/internal_api/v1/schemas.py +478 -0
- reticulum_telemetry_hub/internal_api/versioning.py +63 -0
- reticulum_telemetry_hub/lxmf_daemon/Handlers.py +122 -0
- reticulum_telemetry_hub/lxmf_daemon/LXMF.py +252 -0
- reticulum_telemetry_hub/lxmf_daemon/LXMPeer.py +898 -0
- reticulum_telemetry_hub/lxmf_daemon/LXMRouter.py +4227 -0
- reticulum_telemetry_hub/lxmf_daemon/LXMessage.py +1006 -0
- reticulum_telemetry_hub/lxmf_daemon/LXStamper.py +490 -0
- reticulum_telemetry_hub/lxmf_daemon/__init__.py +10 -0
- reticulum_telemetry_hub/lxmf_daemon/_version.py +1 -0
- reticulum_telemetry_hub/lxmf_daemon/lxmd.py +1655 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/fields/field_telemetry_stream.py +6 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/__init__.py +3 -0
- {lxmf_telemetry → reticulum_telemetry_hub/lxmf_telemetry}/model/persistance/appearance.py +19 -19
- {lxmf_telemetry → reticulum_telemetry_hub/lxmf_telemetry}/model/persistance/peer.py +17 -13
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/__init__.py +65 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/acceleration.py +68 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/ambient_light.py +37 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/angular_velocity.py +68 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/battery.py +68 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/connection_map.py +258 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/generic.py +841 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/gravity.py +68 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/humidity.py +37 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/information.py +42 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/location.py +110 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/lxmf_propagation.py +429 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/magnetic_field.py +68 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/physical_link.py +53 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/pressure.py +37 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/proximity.py +37 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/received.py +75 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/rns_transport.py +209 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/sensor.py +65 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/sensor_enum.py +27 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/sensor_mapping.py +58 -0
- reticulum_telemetry_hub/lxmf_telemetry/model/persistance/sensors/temperature.py +37 -0
- {lxmf_telemetry → reticulum_telemetry_hub/lxmf_telemetry}/model/persistance/sensors/time.py +36 -32
- {lxmf_telemetry → reticulum_telemetry_hub/lxmf_telemetry}/model/persistance/telemeter.py +26 -23
- reticulum_telemetry_hub/lxmf_telemetry/sampler.py +229 -0
- reticulum_telemetry_hub/lxmf_telemetry/telemeter_manager.py +409 -0
- reticulum_telemetry_hub/lxmf_telemetry/telemetry_controller.py +804 -0
- reticulum_telemetry_hub/northbound/__init__.py +5 -0
- reticulum_telemetry_hub/northbound/app.py +195 -0
- reticulum_telemetry_hub/northbound/auth.py +119 -0
- reticulum_telemetry_hub/northbound/gateway.py +310 -0
- reticulum_telemetry_hub/northbound/internal_adapter.py +302 -0
- reticulum_telemetry_hub/northbound/models.py +213 -0
- reticulum_telemetry_hub/northbound/routes_chat.py +123 -0
- reticulum_telemetry_hub/northbound/routes_files.py +119 -0
- reticulum_telemetry_hub/northbound/routes_rest.py +345 -0
- reticulum_telemetry_hub/northbound/routes_subscribers.py +150 -0
- reticulum_telemetry_hub/northbound/routes_topics.py +178 -0
- reticulum_telemetry_hub/northbound/routes_ws.py +107 -0
- reticulum_telemetry_hub/northbound/serializers.py +72 -0
- reticulum_telemetry_hub/northbound/services.py +373 -0
- reticulum_telemetry_hub/northbound/websocket.py +855 -0
- reticulum_telemetry_hub/reticulum_server/__main__.py +2237 -0
- reticulum_telemetry_hub/reticulum_server/command_manager.py +1268 -0
- reticulum_telemetry_hub/reticulum_server/command_text.py +399 -0
- reticulum_telemetry_hub/reticulum_server/constants.py +1 -0
- reticulum_telemetry_hub/reticulum_server/event_log.py +357 -0
- reticulum_telemetry_hub/reticulum_server/internal_adapter.py +358 -0
- reticulum_telemetry_hub/reticulum_server/outbound_queue.py +312 -0
- reticulum_telemetry_hub/reticulum_server/services.py +422 -0
- reticulumtelemetryhub-0.143.0.dist-info/METADATA +181 -0
- reticulumtelemetryhub-0.143.0.dist-info/RECORD +97 -0
- {reticulumtelemetryhub-0.1.0.dist-info → reticulumtelemetryhub-0.143.0.dist-info}/WHEEL +1 -1
- reticulumtelemetryhub-0.143.0.dist-info/licenses/LICENSE +277 -0
- lxmf_telemetry/model/fields/field_telemetry_stream.py +0 -7
- lxmf_telemetry/model/persistance/__init__.py +0 -3
- lxmf_telemetry/model/persistance/sensors/location.py +0 -69
- lxmf_telemetry/model/persistance/sensors/magnetic_field.py +0 -36
- lxmf_telemetry/model/persistance/sensors/sensor.py +0 -44
- lxmf_telemetry/model/persistance/sensors/sensor_enum.py +0 -24
- lxmf_telemetry/model/persistance/sensors/sensor_mapping.py +0 -9
- lxmf_telemetry/telemetry_controller.py +0 -124
- reticulum_server/main.py +0 -182
- reticulumtelemetryhub-0.1.0.dist-info/METADATA +0 -15
- reticulumtelemetryhub-0.1.0.dist-info/RECORD +0 -19
- {lxmf_telemetry → reticulum_telemetry_hub}/__init__.py +0 -0
- {lxmf_telemetry/model/persistance/sensors → reticulum_telemetry_hub/lxmf_telemetry}/__init__.py +0 -0
- {reticulum_server → reticulum_telemetry_hub/reticulum_server}/__init__.py +0 -0
|
@@ -0,0 +1,357 @@
|
|
|
1
|
+
"""Event log helpers for Reticulum Telemetry Hub runtime."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections import deque
|
|
6
|
+
from datetime import datetime, timezone
|
|
7
|
+
import hashlib
|
|
8
|
+
import json
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
import threading
|
|
11
|
+
import time
|
|
12
|
+
import uuid
|
|
13
|
+
from typing import Callable
|
|
14
|
+
from typing import Deque
|
|
15
|
+
from typing import Dict
|
|
16
|
+
from typing import List
|
|
17
|
+
from typing import Optional
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
DEFAULT_EVENT_LOG_FILENAME = "events.jsonl"
|
|
21
|
+
DEFAULT_TAIL_INTERVAL_SECONDS = 0.5
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def resolve_event_log_path(storage_path: Path | str) -> Path:
|
|
25
|
+
"""Return the default event log file path for a storage directory."""
|
|
26
|
+
|
|
27
|
+
return Path(storage_path) / DEFAULT_EVENT_LOG_FILENAME
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def _utcnow() -> datetime:
|
|
31
|
+
"""Return the current UTC timestamp."""
|
|
32
|
+
|
|
33
|
+
return datetime.now(timezone.utc)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class EventLog:
|
|
37
|
+
"""Event buffer with optional shared-file persistence."""
|
|
38
|
+
|
|
39
|
+
def __init__(
|
|
40
|
+
self,
|
|
41
|
+
max_entries: int = 200,
|
|
42
|
+
*,
|
|
43
|
+
event_path: Path | str | None = None,
|
|
44
|
+
tail: bool = False,
|
|
45
|
+
tail_interval: float = DEFAULT_TAIL_INTERVAL_SECONDS,
|
|
46
|
+
) -> None:
|
|
47
|
+
"""Initialize the event log with a fixed-size buffer.
|
|
48
|
+
|
|
49
|
+
Args:
|
|
50
|
+
max_entries (int): Maximum number of events to retain.
|
|
51
|
+
event_path (Path | str | None): Optional path for shared event storage.
|
|
52
|
+
tail (bool): When True, tail the shared log file for new entries.
|
|
53
|
+
tail_interval (float): Seconds between tail polling attempts.
|
|
54
|
+
"""
|
|
55
|
+
|
|
56
|
+
self._events: Deque[Dict[str, object]] = deque(maxlen=max_entries)
|
|
57
|
+
self._listeners: List[Callable[[Dict[str, object]], None]] = []
|
|
58
|
+
self._lock = threading.Lock()
|
|
59
|
+
self._origin_id = uuid.uuid4().hex
|
|
60
|
+
self._event_path = Path(event_path) if event_path else None
|
|
61
|
+
self._tail_interval = max(tail_interval, 0.05)
|
|
62
|
+
self._seen_limit = max(max_entries * 4, 200)
|
|
63
|
+
self._seen_queue: Deque[str] = deque()
|
|
64
|
+
self._seen_lookup: set[str] = set()
|
|
65
|
+
self._tail_stop = threading.Event()
|
|
66
|
+
self._tail_thread: threading.Thread | None = None
|
|
67
|
+
self._tail_offset = 0
|
|
68
|
+
|
|
69
|
+
if self._event_path:
|
|
70
|
+
self._event_path.parent.mkdir(parents=True, exist_ok=True)
|
|
71
|
+
self._event_path.touch(exist_ok=True)
|
|
72
|
+
self._tail_offset = self._load_existing_events()
|
|
73
|
+
if tail:
|
|
74
|
+
self._start_tailer()
|
|
75
|
+
|
|
76
|
+
def add_listener(
|
|
77
|
+
self, listener: Callable[[Dict[str, object]], None]
|
|
78
|
+
) -> Callable[[], None]:
|
|
79
|
+
"""Register an event listener.
|
|
80
|
+
|
|
81
|
+
Args:
|
|
82
|
+
listener (Callable[[Dict[str, object]], None]): Callback invoked
|
|
83
|
+
with newly recorded events.
|
|
84
|
+
|
|
85
|
+
Returns:
|
|
86
|
+
Callable[[], None]: Callback that unregisters the listener.
|
|
87
|
+
"""
|
|
88
|
+
|
|
89
|
+
with self._lock:
|
|
90
|
+
self._listeners.append(listener)
|
|
91
|
+
|
|
92
|
+
def _remove_listener() -> None:
|
|
93
|
+
"""Remove the registered listener.
|
|
94
|
+
|
|
95
|
+
Returns:
|
|
96
|
+
None: Removes the listener if registered.
|
|
97
|
+
"""
|
|
98
|
+
|
|
99
|
+
with self._lock:
|
|
100
|
+
if listener in self._listeners:
|
|
101
|
+
self._listeners.remove(listener)
|
|
102
|
+
|
|
103
|
+
return _remove_listener
|
|
104
|
+
|
|
105
|
+
def add_event(
|
|
106
|
+
self,
|
|
107
|
+
event_type: str,
|
|
108
|
+
message: str,
|
|
109
|
+
*,
|
|
110
|
+
metadata: Optional[Dict[str, object]] = None,
|
|
111
|
+
) -> Dict[str, object]:
|
|
112
|
+
"""Append an event entry and return the stored representation.
|
|
113
|
+
|
|
114
|
+
Args:
|
|
115
|
+
event_type (str): Short category label for the event.
|
|
116
|
+
message (str): Human readable description of the event.
|
|
117
|
+
metadata (Optional[Dict[str, object]]): Optional structured details.
|
|
118
|
+
|
|
119
|
+
Returns:
|
|
120
|
+
Dict[str, object]: The recorded event entry.
|
|
121
|
+
"""
|
|
122
|
+
|
|
123
|
+
entry = {
|
|
124
|
+
"id": uuid.uuid4().hex,
|
|
125
|
+
"timestamp": _utcnow().isoformat(),
|
|
126
|
+
"type": event_type,
|
|
127
|
+
"message": message,
|
|
128
|
+
"metadata": metadata or {},
|
|
129
|
+
"origin": self._origin_id,
|
|
130
|
+
}
|
|
131
|
+
self._ingest_entry(entry, notify=True, allow_origin=True)
|
|
132
|
+
self._write_entry(entry)
|
|
133
|
+
return entry
|
|
134
|
+
|
|
135
|
+
def list_events(self, limit: int | None = None) -> List[Dict[str, object]]:
|
|
136
|
+
"""Return the most recent events, newest first.
|
|
137
|
+
|
|
138
|
+
Args:
|
|
139
|
+
limit (int | None): Maximum number of events to return.
|
|
140
|
+
|
|
141
|
+
Returns:
|
|
142
|
+
List[Dict[str, object]]: Event entries in reverse chronological order.
|
|
143
|
+
"""
|
|
144
|
+
|
|
145
|
+
with self._lock:
|
|
146
|
+
entries = [self._normalize_entry(entry) for entry in self._events]
|
|
147
|
+
if limit is None:
|
|
148
|
+
return list(reversed(entries))
|
|
149
|
+
return list(reversed(entries[-limit:]))
|
|
150
|
+
|
|
151
|
+
def close(self) -> None:
|
|
152
|
+
"""Stop the tailer thread when enabled."""
|
|
153
|
+
|
|
154
|
+
self._tail_stop.set()
|
|
155
|
+
if self._tail_thread is not None:
|
|
156
|
+
self._tail_thread.join(timeout=1.0)
|
|
157
|
+
self._tail_thread = None
|
|
158
|
+
|
|
159
|
+
def _write_entry(self, entry: Dict[str, object]) -> None:
|
|
160
|
+
"""Append an entry to the shared event log file when configured."""
|
|
161
|
+
|
|
162
|
+
if not self._event_path:
|
|
163
|
+
return
|
|
164
|
+
try:
|
|
165
|
+
payload = json.dumps(entry, ensure_ascii=True, default=str)
|
|
166
|
+
with self._event_path.open("a", encoding="utf-8") as handle:
|
|
167
|
+
handle.write(payload + "\n")
|
|
168
|
+
except (OSError, TypeError, ValueError):
|
|
169
|
+
# Reason: event logging should never break event recording.
|
|
170
|
+
return
|
|
171
|
+
|
|
172
|
+
def _load_existing_events(self) -> int:
|
|
173
|
+
"""Load existing events from the shared log file."""
|
|
174
|
+
|
|
175
|
+
if not self._event_path or not self._event_path.exists():
|
|
176
|
+
return 0
|
|
177
|
+
offset = 0
|
|
178
|
+
try:
|
|
179
|
+
with self._event_path.open("r", encoding="utf-8") as handle:
|
|
180
|
+
for line in handle:
|
|
181
|
+
self._ingest_line(line, notify=False, allow_origin=True)
|
|
182
|
+
offset = handle.tell()
|
|
183
|
+
except OSError:
|
|
184
|
+
return 0
|
|
185
|
+
return offset
|
|
186
|
+
|
|
187
|
+
def _start_tailer(self) -> None:
|
|
188
|
+
"""Start a background thread that tails the shared log file."""
|
|
189
|
+
|
|
190
|
+
if self._tail_thread is not None or not self._event_path:
|
|
191
|
+
return
|
|
192
|
+
self._tail_thread = threading.Thread(target=self._tail_loop, daemon=True)
|
|
193
|
+
self._tail_thread.start()
|
|
194
|
+
|
|
195
|
+
def _tail_loop(self) -> None:
|
|
196
|
+
"""Continuously tail the shared log file for new entries."""
|
|
197
|
+
|
|
198
|
+
if not self._event_path:
|
|
199
|
+
return
|
|
200
|
+
try:
|
|
201
|
+
with self._event_path.open("r", encoding="utf-8") as handle:
|
|
202
|
+
handle.seek(self._tail_offset)
|
|
203
|
+
while not self._tail_stop.is_set():
|
|
204
|
+
line = handle.readline()
|
|
205
|
+
if not line:
|
|
206
|
+
time.sleep(self._tail_interval)
|
|
207
|
+
continue
|
|
208
|
+
self._ingest_line(line, notify=True, allow_origin=False)
|
|
209
|
+
except OSError:
|
|
210
|
+
return
|
|
211
|
+
|
|
212
|
+
def _ingest_line(
|
|
213
|
+
self, line: str, *, notify: bool, allow_origin: bool
|
|
214
|
+
) -> None:
|
|
215
|
+
"""Parse and record a raw JSON line."""
|
|
216
|
+
|
|
217
|
+
payload = line.strip()
|
|
218
|
+
if not payload:
|
|
219
|
+
return
|
|
220
|
+
try:
|
|
221
|
+
entry = json.loads(payload)
|
|
222
|
+
except json.JSONDecodeError:
|
|
223
|
+
return
|
|
224
|
+
if not isinstance(entry, dict):
|
|
225
|
+
return
|
|
226
|
+
entry_id = entry.get("id")
|
|
227
|
+
if not isinstance(entry_id, str):
|
|
228
|
+
entry_id = self._hash_payload(payload)
|
|
229
|
+
entry["id"] = entry_id
|
|
230
|
+
self._ingest_entry(entry, notify=notify, allow_origin=allow_origin)
|
|
231
|
+
|
|
232
|
+
def _hash_payload(self, payload: str) -> str:
|
|
233
|
+
"""Return a stable hash for a raw payload string."""
|
|
234
|
+
|
|
235
|
+
return hashlib.sha1(payload.encode("utf-8")).hexdigest()
|
|
236
|
+
|
|
237
|
+
def _ingest_entry(
|
|
238
|
+
self,
|
|
239
|
+
entry: Dict[str, object],
|
|
240
|
+
*,
|
|
241
|
+
notify: bool,
|
|
242
|
+
allow_origin: bool,
|
|
243
|
+
) -> None:
|
|
244
|
+
"""Append a parsed entry to the buffer and notify listeners."""
|
|
245
|
+
|
|
246
|
+
normalized = self._normalize_entry(entry)
|
|
247
|
+
entry_id = normalized.get("id")
|
|
248
|
+
if not isinstance(entry_id, str):
|
|
249
|
+
entry_id = uuid.uuid4().hex
|
|
250
|
+
normalized["id"] = entry_id
|
|
251
|
+
if not allow_origin and normalized.get("origin") == self._origin_id:
|
|
252
|
+
return
|
|
253
|
+
if self._is_duplicate(entry_id):
|
|
254
|
+
return
|
|
255
|
+
self._remember_id(entry_id)
|
|
256
|
+
with self._lock:
|
|
257
|
+
self._events.append(normalized)
|
|
258
|
+
listeners = list(self._listeners)
|
|
259
|
+
if notify:
|
|
260
|
+
for listener in listeners:
|
|
261
|
+
try:
|
|
262
|
+
listener(normalized)
|
|
263
|
+
except Exception: # pragma: no cover - defensive logging
|
|
264
|
+
# Reason: event listeners should never break event recording.
|
|
265
|
+
continue
|
|
266
|
+
|
|
267
|
+
def _is_duplicate(self, entry_id: str) -> bool:
|
|
268
|
+
"""Return True when the entry ID has already been processed."""
|
|
269
|
+
|
|
270
|
+
return entry_id in self._seen_lookup
|
|
271
|
+
|
|
272
|
+
def _remember_id(self, entry_id: str) -> None:
|
|
273
|
+
"""Track the entry ID to avoid duplicate processing."""
|
|
274
|
+
|
|
275
|
+
if entry_id in self._seen_lookup:
|
|
276
|
+
return
|
|
277
|
+
if len(self._seen_queue) >= self._seen_limit:
|
|
278
|
+
oldest = self._seen_queue.popleft()
|
|
279
|
+
self._seen_lookup.discard(oldest)
|
|
280
|
+
self._seen_queue.append(entry_id)
|
|
281
|
+
self._seen_lookup.add(entry_id)
|
|
282
|
+
|
|
283
|
+
def _normalize_entry(self, entry: Dict[str, object]) -> Dict[str, object]:
|
|
284
|
+
"""Return a JSON-safe event entry."""
|
|
285
|
+
|
|
286
|
+
normalized: Dict[str, object] = {}
|
|
287
|
+
for key, value in entry.items():
|
|
288
|
+
if key == "metadata":
|
|
289
|
+
continue
|
|
290
|
+
safe_key = self._json_safe_key(key)
|
|
291
|
+
if key == "id":
|
|
292
|
+
normalized["id"] = self._coerce_id(value)
|
|
293
|
+
elif key == "type":
|
|
294
|
+
normalized["type"] = "" if value is None else str(value)
|
|
295
|
+
elif key == "message":
|
|
296
|
+
normalized["message"] = "" if value is None else str(value)
|
|
297
|
+
elif key == "timestamp":
|
|
298
|
+
normalized["timestamp"] = self._json_safe_value(value)
|
|
299
|
+
elif key == "origin":
|
|
300
|
+
normalized["origin"] = self._json_safe_value(value)
|
|
301
|
+
else:
|
|
302
|
+
normalized[safe_key] = self._json_safe_value(value)
|
|
303
|
+
|
|
304
|
+
if "id" not in normalized:
|
|
305
|
+
normalized["id"] = uuid.uuid4().hex
|
|
306
|
+
if "type" not in normalized:
|
|
307
|
+
normalized["type"] = ""
|
|
308
|
+
if "message" not in normalized:
|
|
309
|
+
normalized["message"] = ""
|
|
310
|
+
if "timestamp" not in normalized:
|
|
311
|
+
normalized["timestamp"] = _utcnow().isoformat()
|
|
312
|
+
if "origin" not in normalized:
|
|
313
|
+
normalized["origin"] = None
|
|
314
|
+
|
|
315
|
+
metadata = entry.get("metadata")
|
|
316
|
+
if metadata is None:
|
|
317
|
+
normalized["metadata"] = {}
|
|
318
|
+
elif isinstance(metadata, dict):
|
|
319
|
+
normalized["metadata"] = self._json_safe_value(metadata)
|
|
320
|
+
else:
|
|
321
|
+
normalized["metadata"] = {"value": self._json_safe_value(metadata)}
|
|
322
|
+
return normalized
|
|
323
|
+
|
|
324
|
+
def _coerce_id(self, value: object) -> str:
|
|
325
|
+
"""Return a safe string ID."""
|
|
326
|
+
|
|
327
|
+
if isinstance(value, (bytes, bytearray, memoryview)):
|
|
328
|
+
return bytes(value).hex()
|
|
329
|
+
if value is None:
|
|
330
|
+
return uuid.uuid4().hex
|
|
331
|
+
return str(value)
|
|
332
|
+
|
|
333
|
+
def _json_safe_key(self, key: object) -> str:
|
|
334
|
+
"""Return a JSON-safe dictionary key."""
|
|
335
|
+
|
|
336
|
+
if isinstance(key, (bytes, bytearray, memoryview)):
|
|
337
|
+
return bytes(key).hex()
|
|
338
|
+
if key is None:
|
|
339
|
+
return "null"
|
|
340
|
+
return str(key)
|
|
341
|
+
|
|
342
|
+
def _json_safe_value(self, value: object) -> object:
|
|
343
|
+
"""Return a JSON-safe value."""
|
|
344
|
+
|
|
345
|
+
if isinstance(value, dict):
|
|
346
|
+
return {self._json_safe_key(k): self._json_safe_value(v) for k, v in value.items()}
|
|
347
|
+
if isinstance(value, (list, tuple, set)):
|
|
348
|
+
return [self._json_safe_value(item) for item in value]
|
|
349
|
+
if isinstance(value, (bytes, bytearray, memoryview)):
|
|
350
|
+
return bytes(value).hex()
|
|
351
|
+
if isinstance(value, datetime):
|
|
352
|
+
return value.isoformat()
|
|
353
|
+
if isinstance(value, Path):
|
|
354
|
+
return str(value)
|
|
355
|
+
if isinstance(value, (str, int, float, bool)) or value is None:
|
|
356
|
+
return value
|
|
357
|
+
return str(value)
|