aiohomematic 2026.1.29__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aiohomematic/__init__.py +110 -0
- aiohomematic/_log_context_protocol.py +29 -0
- aiohomematic/api.py +410 -0
- aiohomematic/async_support.py +250 -0
- aiohomematic/backend_detection.py +462 -0
- aiohomematic/central/__init__.py +103 -0
- aiohomematic/central/async_rpc_server.py +760 -0
- aiohomematic/central/central_unit.py +1152 -0
- aiohomematic/central/config.py +463 -0
- aiohomematic/central/config_builder.py +772 -0
- aiohomematic/central/connection_state.py +160 -0
- aiohomematic/central/coordinators/__init__.py +38 -0
- aiohomematic/central/coordinators/cache.py +414 -0
- aiohomematic/central/coordinators/client.py +480 -0
- aiohomematic/central/coordinators/connection_recovery.py +1141 -0
- aiohomematic/central/coordinators/device.py +1166 -0
- aiohomematic/central/coordinators/event.py +514 -0
- aiohomematic/central/coordinators/hub.py +532 -0
- aiohomematic/central/decorators.py +184 -0
- aiohomematic/central/device_registry.py +229 -0
- aiohomematic/central/events/__init__.py +104 -0
- aiohomematic/central/events/bus.py +1392 -0
- aiohomematic/central/events/integration.py +424 -0
- aiohomematic/central/events/types.py +194 -0
- aiohomematic/central/health.py +762 -0
- aiohomematic/central/rpc_server.py +353 -0
- aiohomematic/central/scheduler.py +794 -0
- aiohomematic/central/state_machine.py +391 -0
- aiohomematic/client/__init__.py +203 -0
- aiohomematic/client/_rpc_errors.py +187 -0
- aiohomematic/client/backends/__init__.py +48 -0
- aiohomematic/client/backends/base.py +335 -0
- aiohomematic/client/backends/capabilities.py +138 -0
- aiohomematic/client/backends/ccu.py +487 -0
- aiohomematic/client/backends/factory.py +116 -0
- aiohomematic/client/backends/homegear.py +294 -0
- aiohomematic/client/backends/json_ccu.py +252 -0
- aiohomematic/client/backends/protocol.py +316 -0
- aiohomematic/client/ccu.py +1857 -0
- aiohomematic/client/circuit_breaker.py +459 -0
- aiohomematic/client/config.py +64 -0
- aiohomematic/client/handlers/__init__.py +40 -0
- aiohomematic/client/handlers/backup.py +157 -0
- aiohomematic/client/handlers/base.py +79 -0
- aiohomematic/client/handlers/device_ops.py +1085 -0
- aiohomematic/client/handlers/firmware.py +144 -0
- aiohomematic/client/handlers/link_mgmt.py +199 -0
- aiohomematic/client/handlers/metadata.py +436 -0
- aiohomematic/client/handlers/programs.py +144 -0
- aiohomematic/client/handlers/sysvars.py +100 -0
- aiohomematic/client/interface_client.py +1304 -0
- aiohomematic/client/json_rpc.py +2068 -0
- aiohomematic/client/request_coalescer.py +282 -0
- aiohomematic/client/rpc_proxy.py +629 -0
- aiohomematic/client/state_machine.py +324 -0
- aiohomematic/const.py +2207 -0
- aiohomematic/context.py +275 -0
- aiohomematic/converter.py +270 -0
- aiohomematic/decorators.py +390 -0
- aiohomematic/exceptions.py +185 -0
- aiohomematic/hmcli.py +997 -0
- aiohomematic/i18n.py +193 -0
- aiohomematic/interfaces/__init__.py +407 -0
- aiohomematic/interfaces/central.py +1067 -0
- aiohomematic/interfaces/client.py +1096 -0
- aiohomematic/interfaces/coordinators.py +63 -0
- aiohomematic/interfaces/model.py +1921 -0
- aiohomematic/interfaces/operations.py +217 -0
- aiohomematic/logging_context.py +134 -0
- aiohomematic/metrics/__init__.py +125 -0
- aiohomematic/metrics/_protocols.py +140 -0
- aiohomematic/metrics/aggregator.py +534 -0
- aiohomematic/metrics/dataclasses.py +489 -0
- aiohomematic/metrics/emitter.py +292 -0
- aiohomematic/metrics/events.py +183 -0
- aiohomematic/metrics/keys.py +300 -0
- aiohomematic/metrics/observer.py +563 -0
- aiohomematic/metrics/stats.py +172 -0
- aiohomematic/model/__init__.py +189 -0
- aiohomematic/model/availability.py +65 -0
- aiohomematic/model/calculated/__init__.py +89 -0
- aiohomematic/model/calculated/climate.py +276 -0
- aiohomematic/model/calculated/data_point.py +315 -0
- aiohomematic/model/calculated/field.py +147 -0
- aiohomematic/model/calculated/operating_voltage_level.py +286 -0
- aiohomematic/model/calculated/support.py +232 -0
- aiohomematic/model/custom/__init__.py +214 -0
- aiohomematic/model/custom/capabilities/__init__.py +67 -0
- aiohomematic/model/custom/capabilities/climate.py +41 -0
- aiohomematic/model/custom/capabilities/light.py +87 -0
- aiohomematic/model/custom/capabilities/lock.py +44 -0
- aiohomematic/model/custom/capabilities/siren.py +63 -0
- aiohomematic/model/custom/climate.py +1130 -0
- aiohomematic/model/custom/cover.py +722 -0
- aiohomematic/model/custom/data_point.py +360 -0
- aiohomematic/model/custom/definition.py +300 -0
- aiohomematic/model/custom/field.py +89 -0
- aiohomematic/model/custom/light.py +1174 -0
- aiohomematic/model/custom/lock.py +322 -0
- aiohomematic/model/custom/mixins.py +445 -0
- aiohomematic/model/custom/profile.py +945 -0
- aiohomematic/model/custom/registry.py +251 -0
- aiohomematic/model/custom/siren.py +462 -0
- aiohomematic/model/custom/switch.py +195 -0
- aiohomematic/model/custom/text_display.py +289 -0
- aiohomematic/model/custom/valve.py +78 -0
- aiohomematic/model/data_point.py +1416 -0
- aiohomematic/model/device.py +1840 -0
- aiohomematic/model/event.py +216 -0
- aiohomematic/model/generic/__init__.py +327 -0
- aiohomematic/model/generic/action.py +40 -0
- aiohomematic/model/generic/action_select.py +62 -0
- aiohomematic/model/generic/binary_sensor.py +30 -0
- aiohomematic/model/generic/button.py +31 -0
- aiohomematic/model/generic/data_point.py +177 -0
- aiohomematic/model/generic/dummy.py +150 -0
- aiohomematic/model/generic/number.py +76 -0
- aiohomematic/model/generic/select.py +56 -0
- aiohomematic/model/generic/sensor.py +76 -0
- aiohomematic/model/generic/switch.py +54 -0
- aiohomematic/model/generic/text.py +33 -0
- aiohomematic/model/hub/__init__.py +100 -0
- aiohomematic/model/hub/binary_sensor.py +24 -0
- aiohomematic/model/hub/button.py +28 -0
- aiohomematic/model/hub/connectivity.py +190 -0
- aiohomematic/model/hub/data_point.py +342 -0
- aiohomematic/model/hub/hub.py +864 -0
- aiohomematic/model/hub/inbox.py +135 -0
- aiohomematic/model/hub/install_mode.py +393 -0
- aiohomematic/model/hub/metrics.py +208 -0
- aiohomematic/model/hub/number.py +42 -0
- aiohomematic/model/hub/select.py +52 -0
- aiohomematic/model/hub/sensor.py +37 -0
- aiohomematic/model/hub/switch.py +43 -0
- aiohomematic/model/hub/text.py +30 -0
- aiohomematic/model/hub/update.py +221 -0
- aiohomematic/model/support.py +592 -0
- aiohomematic/model/update.py +140 -0
- aiohomematic/model/week_profile.py +1827 -0
- aiohomematic/property_decorators.py +719 -0
- aiohomematic/py.typed +0 -0
- aiohomematic/rega_scripts/accept_device_in_inbox.fn +51 -0
- aiohomematic/rega_scripts/create_backup_start.fn +28 -0
- aiohomematic/rega_scripts/create_backup_status.fn +89 -0
- aiohomematic/rega_scripts/fetch_all_device_data.fn +97 -0
- aiohomematic/rega_scripts/get_backend_info.fn +25 -0
- aiohomematic/rega_scripts/get_inbox_devices.fn +61 -0
- aiohomematic/rega_scripts/get_program_descriptions.fn +31 -0
- aiohomematic/rega_scripts/get_serial.fn +44 -0
- aiohomematic/rega_scripts/get_service_messages.fn +83 -0
- aiohomematic/rega_scripts/get_system_update_info.fn +39 -0
- aiohomematic/rega_scripts/get_system_variable_descriptions.fn +31 -0
- aiohomematic/rega_scripts/set_program_state.fn +17 -0
- aiohomematic/rega_scripts/set_system_variable.fn +19 -0
- aiohomematic/rega_scripts/trigger_firmware_update.fn +67 -0
- aiohomematic/schemas.py +256 -0
- aiohomematic/store/__init__.py +55 -0
- aiohomematic/store/dynamic/__init__.py +43 -0
- aiohomematic/store/dynamic/command.py +250 -0
- aiohomematic/store/dynamic/data.py +175 -0
- aiohomematic/store/dynamic/details.py +187 -0
- aiohomematic/store/dynamic/ping_pong.py +416 -0
- aiohomematic/store/persistent/__init__.py +71 -0
- aiohomematic/store/persistent/base.py +285 -0
- aiohomematic/store/persistent/device.py +233 -0
- aiohomematic/store/persistent/incident.py +380 -0
- aiohomematic/store/persistent/paramset.py +241 -0
- aiohomematic/store/persistent/session.py +556 -0
- aiohomematic/store/serialization.py +150 -0
- aiohomematic/store/storage.py +689 -0
- aiohomematic/store/types.py +526 -0
- aiohomematic/store/visibility/__init__.py +40 -0
- aiohomematic/store/visibility/parser.py +141 -0
- aiohomematic/store/visibility/registry.py +722 -0
- aiohomematic/store/visibility/rules.py +307 -0
- aiohomematic/strings.json +237 -0
- aiohomematic/support.py +706 -0
- aiohomematic/tracing.py +236 -0
- aiohomematic/translations/de.json +237 -0
- aiohomematic/translations/en.json +237 -0
- aiohomematic/type_aliases.py +51 -0
- aiohomematic/validator.py +128 -0
- aiohomematic-2026.1.29.dist-info/METADATA +296 -0
- aiohomematic-2026.1.29.dist-info/RECORD +188 -0
- aiohomematic-2026.1.29.dist-info/WHEEL +5 -0
- aiohomematic-2026.1.29.dist-info/entry_points.txt +2 -0
- aiohomematic-2026.1.29.dist-info/licenses/LICENSE +21 -0
- aiohomematic-2026.1.29.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,556 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MIT
|
|
2
|
+
# Copyright (c) 2021-2026
|
|
3
|
+
"""
|
|
4
|
+
Session recorder for persisting RPC method calls and responses.
|
|
5
|
+
|
|
6
|
+
This module provides SessionRecorder which records RPC method calls and responses
|
|
7
|
+
for test playback, enabling deterministic testing without a live CCU backend.
|
|
8
|
+
|
|
9
|
+
Data structure (4-level nested dict):
|
|
10
|
+
store[rpc_type][method][frozen_params][timestamp_ms] = response
|
|
11
|
+
|
|
12
|
+
TTL mechanism:
|
|
13
|
+
- Each entry has a timestamp when it was recorded
|
|
14
|
+
- Entries expire after ttl seconds
|
|
15
|
+
- Expiration is lazy: checked on access/update
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
from __future__ import annotations
|
|
19
|
+
|
|
20
|
+
import asyncio
|
|
21
|
+
from collections import defaultdict
|
|
22
|
+
from datetime import UTC, datetime
|
|
23
|
+
import logging
|
|
24
|
+
import random
|
|
25
|
+
from typing import TYPE_CHECKING, Any, Final, Self, cast
|
|
26
|
+
|
|
27
|
+
import orjson
|
|
28
|
+
from slugify import slugify
|
|
29
|
+
|
|
30
|
+
from aiohomematic import i18n
|
|
31
|
+
from aiohomematic.const import FILE_NAME_TS_PATTERN, FILE_SESSION_RECORDER, SUB_DIRECTORY_SESSION, RPCType
|
|
32
|
+
from aiohomematic.property_decorators import DelegatedProperty
|
|
33
|
+
from aiohomematic.store.serialization import cleanup_params_for_session, freeze_params, unfreeze_params
|
|
34
|
+
from aiohomematic.support import extract_exc_args
|
|
35
|
+
|
|
36
|
+
if TYPE_CHECKING:
|
|
37
|
+
from aiohomematic.interfaces import (
|
|
38
|
+
CentralInfoProtocol,
|
|
39
|
+
ConfigProviderProtocol,
|
|
40
|
+
DeviceProviderProtocol,
|
|
41
|
+
TaskSchedulerProtocol,
|
|
42
|
+
)
|
|
43
|
+
from aiohomematic.store import StorageFactoryProtocol
|
|
44
|
+
|
|
45
|
+
_LOGGER: Final = logging.getLogger(__name__)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def _now() -> int:
|
|
49
|
+
"""Return current UTC time as epoch seconds (int)."""
|
|
50
|
+
return int(datetime.now(tz=UTC).timestamp())
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class SessionRecorder:
|
|
54
|
+
"""
|
|
55
|
+
Session recorder for central unit.
|
|
56
|
+
|
|
57
|
+
Purpose:
|
|
58
|
+
Records RPC method calls and responses for test playback.
|
|
59
|
+
This enables deterministic testing without a live CCU backend.
|
|
60
|
+
|
|
61
|
+
Data structure (4-level nested dict):
|
|
62
|
+
store[rpc_type][method][frozen_params][timestamp_ms] = response
|
|
63
|
+
|
|
64
|
+
- rpc_type: "xml" or "json" (the RPC protocol used)
|
|
65
|
+
- method: RPC method name (e.g., "listDevices", "getValue")
|
|
66
|
+
- frozen_params: Parameters frozen to string via freeze_params()
|
|
67
|
+
- timestamp_ms: Integer timestamp in milliseconds (for TTL tracking)
|
|
68
|
+
- response: The actual RPC response to replay
|
|
69
|
+
|
|
70
|
+
TTL (Time-To-Live) mechanism:
|
|
71
|
+
- Each entry has a timestamp when it was recorded
|
|
72
|
+
- Entries expire after _ttl seconds
|
|
73
|
+
- Expiration is lazy: checked on access/update, not via background task
|
|
74
|
+
- Optional refresh_on_get: Reading an entry extends its TTL
|
|
75
|
+
|
|
76
|
+
Why nested defaultdicts?
|
|
77
|
+
Avoids explicit bucket creation when recording new entries.
|
|
78
|
+
store[rpc_type][method][params] automatically creates intermediate dicts.
|
|
79
|
+
|
|
80
|
+
Cleanup strategy:
|
|
81
|
+
_purge_expired_at() removes expired entries and cleans up empty buckets.
|
|
82
|
+
Important: Uses .get() chains to avoid creating buckets as side effect.
|
|
83
|
+
"""
|
|
84
|
+
|
|
85
|
+
__slots__ = (
|
|
86
|
+
"_active",
|
|
87
|
+
"_central_info",
|
|
88
|
+
"_config_provider",
|
|
89
|
+
"_device_provider",
|
|
90
|
+
"_is_recording",
|
|
91
|
+
"_refresh_on_get",
|
|
92
|
+
"_storage_factory",
|
|
93
|
+
"_store",
|
|
94
|
+
"_task_scheduler",
|
|
95
|
+
"_ttl",
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
def __init__(
|
|
99
|
+
self,
|
|
100
|
+
*,
|
|
101
|
+
central_info: CentralInfoProtocol,
|
|
102
|
+
config_provider: ConfigProviderProtocol,
|
|
103
|
+
device_provider: DeviceProviderProtocol,
|
|
104
|
+
task_scheduler: TaskSchedulerProtocol,
|
|
105
|
+
storage_factory: StorageFactoryProtocol,
|
|
106
|
+
active: bool,
|
|
107
|
+
ttl_seconds: float,
|
|
108
|
+
refresh_on_get: bool = False,
|
|
109
|
+
):
|
|
110
|
+
"""
|
|
111
|
+
Initialize the session recorder.
|
|
112
|
+
|
|
113
|
+
Args:
|
|
114
|
+
central_info: Provider for central system information.
|
|
115
|
+
config_provider: Provider for configuration access.
|
|
116
|
+
device_provider: Provider for device registry access.
|
|
117
|
+
task_scheduler: Scheduler for background tasks.
|
|
118
|
+
storage_factory: Factory for creating storage instances.
|
|
119
|
+
active: Whether recording is initially active.
|
|
120
|
+
ttl_seconds: Time-to-live for recorded entries (0 = no expiry).
|
|
121
|
+
refresh_on_get: Whether to extend TTL on read access.
|
|
122
|
+
|
|
123
|
+
"""
|
|
124
|
+
self._active = active
|
|
125
|
+
if ttl_seconds < 0:
|
|
126
|
+
raise ValueError(i18n.tr(key="exception.store.session_recorder.ttl_positive"))
|
|
127
|
+
self._ttl: Final = float(ttl_seconds)
|
|
128
|
+
self._is_recording: bool = False
|
|
129
|
+
self._refresh_on_get: Final = refresh_on_get
|
|
130
|
+
# Nested defaultdicts auto-create intermediate buckets on write.
|
|
131
|
+
# Structure: rpc_type -> method -> frozen_params -> ts(ms) -> response
|
|
132
|
+
self._store: dict[str, dict[str, dict[str, dict[int, Any]]]] = defaultdict(
|
|
133
|
+
lambda: defaultdict(lambda: defaultdict(dict))
|
|
134
|
+
)
|
|
135
|
+
self._central_info: Final = central_info
|
|
136
|
+
self._config_provider: Final = config_provider
|
|
137
|
+
self._device_provider: Final = device_provider
|
|
138
|
+
self._task_scheduler: Final = task_scheduler
|
|
139
|
+
self._storage_factory: Final = storage_factory
|
|
140
|
+
|
|
141
|
+
def __repr__(self) -> str:
|
|
142
|
+
"""Return the representation."""
|
|
143
|
+
self.cleanup()
|
|
144
|
+
return f"{self.__class__.__name__}({self._store})"
|
|
145
|
+
|
|
146
|
+
active: Final = DelegatedProperty[bool](path="_active")
|
|
147
|
+
|
|
148
|
+
@property
|
|
149
|
+
def _should_save(self) -> bool:
|
|
150
|
+
"""Determine if save operation should proceed."""
|
|
151
|
+
self.cleanup()
|
|
152
|
+
return len(self._store.items()) > 0
|
|
153
|
+
|
|
154
|
+
async def activate(
|
|
155
|
+
self, *, on_time: int = 0, auto_save: bool, randomize_output: bool, use_ts_in_file_name: bool
|
|
156
|
+
) -> bool:
|
|
157
|
+
"""Activate the session recorder. Disable after on_time(seconds)."""
|
|
158
|
+
if self._is_recording:
|
|
159
|
+
_LOGGER.info(i18n.tr(key="log.store.session_recorder.activate.already_running"))
|
|
160
|
+
return False
|
|
161
|
+
self._store.clear()
|
|
162
|
+
self._active = True
|
|
163
|
+
if on_time > 0:
|
|
164
|
+
self._task_scheduler.create_task(
|
|
165
|
+
target=self._deactivate_after_delay(
|
|
166
|
+
delay=on_time,
|
|
167
|
+
auto_save=auto_save,
|
|
168
|
+
randomize_output=randomize_output,
|
|
169
|
+
use_ts_in_file_name=use_ts_in_file_name,
|
|
170
|
+
),
|
|
171
|
+
name=f"session_recorder_{self._central_info.name}",
|
|
172
|
+
)
|
|
173
|
+
return True
|
|
174
|
+
|
|
175
|
+
def add_json_rpc_session(
|
|
176
|
+
self,
|
|
177
|
+
*,
|
|
178
|
+
method: str,
|
|
179
|
+
params: dict[str, Any],
|
|
180
|
+
response: dict[str, Any] | None = None,
|
|
181
|
+
session_exc: Exception | None = None,
|
|
182
|
+
) -> None:
|
|
183
|
+
"""Add json rpc session to content."""
|
|
184
|
+
try:
|
|
185
|
+
if session_exc:
|
|
186
|
+
self.set(
|
|
187
|
+
rpc_type=str(RPCType.JSON_RPC),
|
|
188
|
+
method=method,
|
|
189
|
+
params=params,
|
|
190
|
+
response=extract_exc_args(exc=session_exc),
|
|
191
|
+
)
|
|
192
|
+
return
|
|
193
|
+
self.set(rpc_type=str(RPCType.JSON_RPC), method=method, params=params, response=response)
|
|
194
|
+
except Exception as exc:
|
|
195
|
+
_LOGGER.debug("ADD_JSON_RPC_SESSION: failed with %s", extract_exc_args(exc=exc))
|
|
196
|
+
|
|
197
|
+
def add_xml_rpc_session(
|
|
198
|
+
self, *, method: str, params: tuple[Any, ...], response: Any | None = None, session_exc: Exception | None = None
|
|
199
|
+
) -> None:
|
|
200
|
+
"""Add rpc session to content."""
|
|
201
|
+
try:
|
|
202
|
+
if session_exc:
|
|
203
|
+
self.set(
|
|
204
|
+
rpc_type=str(RPCType.XML_RPC),
|
|
205
|
+
method=method,
|
|
206
|
+
params=params,
|
|
207
|
+
response=extract_exc_args(exc=session_exc),
|
|
208
|
+
)
|
|
209
|
+
return
|
|
210
|
+
self.set(rpc_type=str(RPCType.XML_RPC), method=method, params=params, response=response)
|
|
211
|
+
except Exception as exc:
|
|
212
|
+
_LOGGER.debug("ADD_XML_RPC_SESSION: failed with %s", extract_exc_args(exc=exc))
|
|
213
|
+
|
|
214
|
+
def cleanup(self) -> None:
|
|
215
|
+
"""Purge all expired entries globally."""
|
|
216
|
+
for rpc_type in list(self._store.keys()):
|
|
217
|
+
for method in list(self._store[rpc_type].keys()):
|
|
218
|
+
self._purge_expired_at(rpc_type=rpc_type, method=method)
|
|
219
|
+
|
|
220
|
+
async def clear(self) -> None:
|
|
221
|
+
"""Clear all stored session data."""
|
|
222
|
+
self._store.clear()
|
|
223
|
+
|
|
224
|
+
async def deactivate(
|
|
225
|
+
self, *, delay: int, auto_save: bool, randomize_output: bool, use_ts_in_file_name: bool
|
|
226
|
+
) -> bool:
|
|
227
|
+
"""Deactivate the session recorder. Optionally after a delay(seconds)."""
|
|
228
|
+
if self._is_recording:
|
|
229
|
+
_LOGGER.info(i18n.tr(key="log.store.session_recorder.deactivate.already_running"))
|
|
230
|
+
return False
|
|
231
|
+
if delay > 0:
|
|
232
|
+
self._task_scheduler.create_task(
|
|
233
|
+
target=self._deactivate_after_delay(
|
|
234
|
+
delay=delay,
|
|
235
|
+
auto_save=auto_save,
|
|
236
|
+
randomize_output=randomize_output,
|
|
237
|
+
use_ts_in_file_name=use_ts_in_file_name,
|
|
238
|
+
),
|
|
239
|
+
name=f"session_recorder_{self._central_info.name}",
|
|
240
|
+
)
|
|
241
|
+
else:
|
|
242
|
+
self._active = False
|
|
243
|
+
self._is_recording = False
|
|
244
|
+
return True
|
|
245
|
+
|
|
246
|
+
def delete(self, *, rpc_type: str, method: str, params: Any) -> bool:
|
|
247
|
+
"""
|
|
248
|
+
Delete an entry if it exists. Return True if removed.
|
|
249
|
+
|
|
250
|
+
Avoid creating buckets when the target does not exist.
|
|
251
|
+
Clean up empty parent buckets on successful deletion.
|
|
252
|
+
"""
|
|
253
|
+
if not (bucket_by_method := self._store.get(rpc_type)):
|
|
254
|
+
return False
|
|
255
|
+
if not (bucket_by_parameter := bucket_by_method.get(method)):
|
|
256
|
+
return False
|
|
257
|
+
if (frozen_param := freeze_params(params=cleanup_params_for_session(params=params))) not in bucket_by_parameter:
|
|
258
|
+
return False
|
|
259
|
+
# Perform deletion
|
|
260
|
+
bucket_by_parameter.pop(frozen_param, None)
|
|
261
|
+
if not bucket_by_parameter:
|
|
262
|
+
bucket_by_method.pop(method, None)
|
|
263
|
+
if not bucket_by_method:
|
|
264
|
+
self._store.pop(rpc_type, None)
|
|
265
|
+
return True
|
|
266
|
+
|
|
267
|
+
def get(
|
|
268
|
+
self,
|
|
269
|
+
*,
|
|
270
|
+
rpc_type: str,
|
|
271
|
+
method: str,
|
|
272
|
+
params: Any,
|
|
273
|
+
default: Any = None,
|
|
274
|
+
) -> Any:
|
|
275
|
+
"""
|
|
276
|
+
Return a cached response if still valid, else default.
|
|
277
|
+
|
|
278
|
+
Algorithm:
|
|
279
|
+
1. Purge expired entries for this method (lazy cleanup)
|
|
280
|
+
2. Navigate the nested dict safely using .get() to avoid bucket creation
|
|
281
|
+
3. Find the response at the latest timestamp (most recent recording)
|
|
282
|
+
4. Optionally extend TTL by adding a new timestamp (refresh_on_get)
|
|
283
|
+
|
|
284
|
+
Why use .get() chains instead of direct indexing?
|
|
285
|
+
Using self._store[rpc_type][method] would auto-create buckets due to
|
|
286
|
+
defaultdict behavior. This is a read operation, so we must not modify
|
|
287
|
+
the store when the entry doesn't exist. The .get() method returns None
|
|
288
|
+
without creating the missing key.
|
|
289
|
+
|
|
290
|
+
Latest timestamp selection:
|
|
291
|
+
Multiple timestamps can exist for the same params (from TTL refresh).
|
|
292
|
+
We always return the response at max(timestamps) to get the most recent.
|
|
293
|
+
"""
|
|
294
|
+
# Step 1: Remove expired entries before lookup
|
|
295
|
+
self._purge_expired_at(rpc_type=rpc_type, method=method)
|
|
296
|
+
|
|
297
|
+
# Step 2: Navigate safely without creating buckets (read-only access)
|
|
298
|
+
if not (bucket_by_method := self._store.get(rpc_type)):
|
|
299
|
+
return default
|
|
300
|
+
if not (bucket_by_parameter := bucket_by_method.get(method)):
|
|
301
|
+
return default
|
|
302
|
+
frozen_param = freeze_params(params=cleanup_params_for_session(params=params))
|
|
303
|
+
if not (bucket_by_ts := bucket_by_parameter.get(frozen_param)):
|
|
304
|
+
return default
|
|
305
|
+
|
|
306
|
+
# Step 3: Get response at latest timestamp
|
|
307
|
+
try:
|
|
308
|
+
latest_ts = max(bucket_by_ts.keys())
|
|
309
|
+
except ValueError:
|
|
310
|
+
# Empty bucket (all entries expired)
|
|
311
|
+
return default
|
|
312
|
+
resp = bucket_by_ts[latest_ts]
|
|
313
|
+
|
|
314
|
+
# Step 4: TTL refresh - add new timestamp to extend expiry
|
|
315
|
+
if self._refresh_on_get:
|
|
316
|
+
bucket_by_ts[_now()] = resp
|
|
317
|
+
return resp
|
|
318
|
+
|
|
319
|
+
def get_latest_response_by_method(self, *, rpc_type: str, method: str) -> list[tuple[Any, Any]]:
|
|
320
|
+
"""Return latest non-expired responses for a given (rpc_type, method)."""
|
|
321
|
+
# Purge expired entries first without creating any new buckets.
|
|
322
|
+
self._purge_expired_at(rpc_type=rpc_type, method=method)
|
|
323
|
+
result: list[Any] = []
|
|
324
|
+
# Access store safely to avoid side effects from creating buckets.
|
|
325
|
+
if not (bucket_by_method := self._store.get(rpc_type)):
|
|
326
|
+
return result
|
|
327
|
+
if not (bucket_by_parameter := bucket_by_method.get(method)):
|
|
328
|
+
return result
|
|
329
|
+
# For each parameter, choose the response at the latest timestamp.
|
|
330
|
+
for frozen_params, bucket_by_ts in bucket_by_parameter.items():
|
|
331
|
+
if not bucket_by_ts:
|
|
332
|
+
continue
|
|
333
|
+
try:
|
|
334
|
+
latest_ts = max(bucket_by_ts.keys())
|
|
335
|
+
except ValueError:
|
|
336
|
+
continue
|
|
337
|
+
resp = bucket_by_ts[latest_ts]
|
|
338
|
+
params = unfreeze_params(frozen_params=frozen_params)
|
|
339
|
+
|
|
340
|
+
result.append((params, resp))
|
|
341
|
+
return result
|
|
342
|
+
|
|
343
|
+
def get_latest_response_by_params(
|
|
344
|
+
self,
|
|
345
|
+
*,
|
|
346
|
+
rpc_type: str,
|
|
347
|
+
method: str,
|
|
348
|
+
params: Any,
|
|
349
|
+
) -> Any:
|
|
350
|
+
"""Return latest non-expired responses for a given (rpc_type, method, params)."""
|
|
351
|
+
# Purge expired entries first without creating any new buckets.
|
|
352
|
+
self._purge_expired_at(rpc_type=rpc_type, method=method)
|
|
353
|
+
|
|
354
|
+
# Access store safely to avoid side effects from creating buckets.
|
|
355
|
+
if not (bucket_by_method := self._store.get(rpc_type)):
|
|
356
|
+
return None
|
|
357
|
+
if not (bucket_by_parameter := bucket_by_method.get(method)):
|
|
358
|
+
return None
|
|
359
|
+
frozen_params = freeze_params(params=cleanup_params_for_session(params=params))
|
|
360
|
+
|
|
361
|
+
# For each parameter, choose the response at the latest timestamp.
|
|
362
|
+
if (bucket_by_ts := bucket_by_parameter.get(frozen_params)) is None:
|
|
363
|
+
return None
|
|
364
|
+
|
|
365
|
+
try:
|
|
366
|
+
latest_ts = max(bucket_by_ts.keys())
|
|
367
|
+
return bucket_by_ts[latest_ts]
|
|
368
|
+
except ValueError:
|
|
369
|
+
return None
|
|
370
|
+
|
|
371
|
+
def peek_ts(self, *, rpc_type: str, method: str, params: Any) -> datetime | None:
|
|
372
|
+
"""
|
|
373
|
+
Return the most recent timestamp for a live entry, else None.
|
|
374
|
+
|
|
375
|
+
This method must not create buckets as a side effect. It purges expired
|
|
376
|
+
entries first and then returns the newest timestamp for the given
|
|
377
|
+
(rpc_type, method, params) if present.
|
|
378
|
+
"""
|
|
379
|
+
self._purge_expired_at(rpc_type=rpc_type, method=method)
|
|
380
|
+
# Do NOT create buckets here — use .get chaining only.
|
|
381
|
+
if not (bucket_by_method := self._store.get(rpc_type)):
|
|
382
|
+
return None
|
|
383
|
+
if not (bucket_by_parameter := bucket_by_method.get(method)):
|
|
384
|
+
return None
|
|
385
|
+
frozen_param = freeze_params(params=cleanup_params_for_session(params=params))
|
|
386
|
+
if (bucket_by_ts := bucket_by_parameter.get(frozen_param)) is None or not bucket_by_ts:
|
|
387
|
+
return None
|
|
388
|
+
# After purge, remaining entries are alive; return the latest timestamp.
|
|
389
|
+
try:
|
|
390
|
+
latest_ts_int = max(bucket_by_ts.keys())
|
|
391
|
+
except ValueError:
|
|
392
|
+
# bucket was empty (shouldn't happen due to check), be safe
|
|
393
|
+
return None
|
|
394
|
+
return datetime.fromtimestamp(latest_ts_int, tz=UTC)
|
|
395
|
+
|
|
396
|
+
async def save(self, *, randomize_output: bool, use_ts_in_file_name: bool) -> None:
|
|
397
|
+
"""
|
|
398
|
+
Save the session data to storage.
|
|
399
|
+
|
|
400
|
+
Args:
|
|
401
|
+
randomize_output: Whether to randomize device addresses in output.
|
|
402
|
+
use_ts_in_file_name: Whether to include timestamp in the filename.
|
|
403
|
+
|
|
404
|
+
"""
|
|
405
|
+
if not self._should_save:
|
|
406
|
+
return
|
|
407
|
+
|
|
408
|
+
# Build storage key with optional timestamp
|
|
409
|
+
ts = datetime.now(tz=UTC) if use_ts_in_file_name else None
|
|
410
|
+
key = self._build_storage_key(ts=ts)
|
|
411
|
+
|
|
412
|
+
# Prepare data for storage
|
|
413
|
+
data = self._prepare_save_data(randomize_output=randomize_output)
|
|
414
|
+
|
|
415
|
+
# Create storage and save
|
|
416
|
+
storage = self._storage_factory.create_storage(
|
|
417
|
+
key=key,
|
|
418
|
+
sub_directory=SUB_DIRECTORY_SESSION,
|
|
419
|
+
formatted=False,
|
|
420
|
+
as_zip=True,
|
|
421
|
+
)
|
|
422
|
+
await storage.save(data=data)
|
|
423
|
+
_LOGGER.debug("Saved session recording to %s", key)
|
|
424
|
+
|
|
425
|
+
def set(
|
|
426
|
+
self,
|
|
427
|
+
*,
|
|
428
|
+
rpc_type: str,
|
|
429
|
+
method: str,
|
|
430
|
+
params: Any,
|
|
431
|
+
response: Any,
|
|
432
|
+
ts: int | datetime | None = None,
|
|
433
|
+
) -> Self:
|
|
434
|
+
"""Insert or update an entry."""
|
|
435
|
+
self._purge_expired_at(rpc_type=rpc_type, method=method)
|
|
436
|
+
frozen_param = freeze_params(params=params)
|
|
437
|
+
# Normalize timestamp to int epoch seconds
|
|
438
|
+
if isinstance(ts, datetime):
|
|
439
|
+
ts_int = int(ts.timestamp())
|
|
440
|
+
elif isinstance(ts, int):
|
|
441
|
+
ts_int = ts
|
|
442
|
+
else:
|
|
443
|
+
ts_int = _now()
|
|
444
|
+
self._bucket(rpc_type=rpc_type, method=method)[frozen_param][ts_int] = response
|
|
445
|
+
return self
|
|
446
|
+
|
|
447
|
+
def _bucket(self, *, rpc_type: str, method: str) -> dict[str, dict[int, tuple[Any, float]]]:
|
|
448
|
+
"""Ensure and return the innermost bucket."""
|
|
449
|
+
return self._store[rpc_type][method]
|
|
450
|
+
|
|
451
|
+
def _build_storage_key(self, *, ts: datetime | None = None) -> str:
|
|
452
|
+
"""Build the storage key for saving session data."""
|
|
453
|
+
key = f"{slugify(self._central_info.name)}_{FILE_SESSION_RECORDER}"
|
|
454
|
+
if ts:
|
|
455
|
+
key += f"_{ts.strftime(FILE_NAME_TS_PATTERN)}"
|
|
456
|
+
return key
|
|
457
|
+
|
|
458
|
+
async def _deactivate_after_delay(
|
|
459
|
+
self, *, delay: int, auto_save: bool, randomize_output: bool, use_ts_in_file_name: bool
|
|
460
|
+
) -> None:
|
|
461
|
+
"""Change the state of the session recorder after a delay."""
|
|
462
|
+
self._is_recording = True
|
|
463
|
+
await asyncio.sleep(delay)
|
|
464
|
+
self._active = False
|
|
465
|
+
self._is_recording = False
|
|
466
|
+
if auto_save:
|
|
467
|
+
await self.save(randomize_output=randomize_output, use_ts_in_file_name=use_ts_in_file_name)
|
|
468
|
+
_LOGGER.debug("Deactivated session recorder after %s seconds", {delay})
|
|
469
|
+
|
|
470
|
+
def _is_expired(self, *, ts: int, now: int | None = None) -> bool:
|
|
471
|
+
"""Check whether an entry has expired given epoch seconds."""
|
|
472
|
+
if self._ttl == 0:
|
|
473
|
+
return False
|
|
474
|
+
now = now if now is not None else _now()
|
|
475
|
+
return (now - ts) > self._ttl
|
|
476
|
+
|
|
477
|
+
def _prepare_save_data(self, *, randomize_output: bool) -> dict[str, Any]:
|
|
478
|
+
"""Prepare the data for saving, optionally randomizing device addresses."""
|
|
479
|
+
data: dict[str, Any] = dict(self._store)
|
|
480
|
+
|
|
481
|
+
if not randomize_output:
|
|
482
|
+
return data
|
|
483
|
+
|
|
484
|
+
# Collect all device addresses for randomization
|
|
485
|
+
if not (device_addresses := [device.address for device in self._device_provider.devices]):
|
|
486
|
+
return data
|
|
487
|
+
|
|
488
|
+
# Create randomized address mapping
|
|
489
|
+
randomized = device_addresses.copy()
|
|
490
|
+
random.shuffle(randomized)
|
|
491
|
+
address_map = dict(zip(device_addresses, randomized, strict=True))
|
|
492
|
+
|
|
493
|
+
# Replace addresses in the serialized data
|
|
494
|
+
json_str = orjson.dumps(data).decode("utf-8")
|
|
495
|
+
for original, replacement in address_map.items():
|
|
496
|
+
json_str = json_str.replace(original, replacement)
|
|
497
|
+
|
|
498
|
+
return cast(dict[str, Any], orjson.loads(json_str))
|
|
499
|
+
|
|
500
|
+
def _purge_expired_at(
|
|
501
|
+
self,
|
|
502
|
+
*,
|
|
503
|
+
rpc_type: str,
|
|
504
|
+
method: str,
|
|
505
|
+
) -> None:
|
|
506
|
+
"""
|
|
507
|
+
Remove expired entries for a given (rpc_type, method) bucket.
|
|
508
|
+
|
|
509
|
+
Multi-level cleanup algorithm:
|
|
510
|
+
This method cleans up the 4-level nested structure from bottom to top:
|
|
511
|
+
1. Remove expired timestamps from each params bucket
|
|
512
|
+
2. Remove empty params buckets from the method bucket
|
|
513
|
+
3. Remove empty method bucket from the rpc_type bucket
|
|
514
|
+
4. Remove empty rpc_type bucket from the store
|
|
515
|
+
|
|
516
|
+
Critical: No bucket creation
|
|
517
|
+
Uses .get() instead of direct indexing to avoid defaultdict's
|
|
518
|
+
auto-creation of missing buckets. A read/cleanup operation should
|
|
519
|
+
never modify the structure except to remove entries.
|
|
520
|
+
|
|
521
|
+
Two-pass deletion pattern:
|
|
522
|
+
For each level, we first collect items to delete, then delete them.
|
|
523
|
+
This avoids "dictionary changed size during iteration" errors.
|
|
524
|
+
"""
|
|
525
|
+
# TTL of 0 means entries never expire
|
|
526
|
+
if self._ttl == 0:
|
|
527
|
+
return
|
|
528
|
+
|
|
529
|
+
# Navigate safely without creating buckets
|
|
530
|
+
if not (bucket_by_method := self._store.get(rpc_type)):
|
|
531
|
+
return
|
|
532
|
+
if not (bucket_by_parameter := bucket_by_method.get(method)):
|
|
533
|
+
return
|
|
534
|
+
|
|
535
|
+
now = _now()
|
|
536
|
+
empty_params: list[str] = []
|
|
537
|
+
|
|
538
|
+
# Level 1: Remove expired timestamps from each params bucket
|
|
539
|
+
for p, bucket_by_ts in bucket_by_parameter.items():
|
|
540
|
+
# Collect expired timestamps (two-pass: collect then delete)
|
|
541
|
+
expired_ts = [ts for ts, _r in list(bucket_by_ts.items()) if self._is_expired(ts=ts, now=now)]
|
|
542
|
+
for ts in expired_ts:
|
|
543
|
+
del bucket_by_ts[ts]
|
|
544
|
+
# Track empty params buckets for cleanup
|
|
545
|
+
if not bucket_by_ts:
|
|
546
|
+
empty_params.append(p)
|
|
547
|
+
|
|
548
|
+
# Level 2: Remove empty params buckets
|
|
549
|
+
for p in empty_params:
|
|
550
|
+
bucket_by_parameter.pop(p, None)
|
|
551
|
+
|
|
552
|
+
# Level 3 & 4: Cascade cleanup of empty parent buckets
|
|
553
|
+
if not bucket_by_parameter:
|
|
554
|
+
bucket_by_method.pop(method, None)
|
|
555
|
+
if not bucket_by_method:
|
|
556
|
+
self._store.pop(rpc_type, None)
|