aiohomematic 2025.11.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of aiohomematic might be problematic. Click here for more details.

Files changed (77) hide show
  1. aiohomematic/__init__.py +61 -0
  2. aiohomematic/async_support.py +212 -0
  3. aiohomematic/central/__init__.py +2309 -0
  4. aiohomematic/central/decorators.py +155 -0
  5. aiohomematic/central/rpc_server.py +295 -0
  6. aiohomematic/client/__init__.py +1848 -0
  7. aiohomematic/client/_rpc_errors.py +81 -0
  8. aiohomematic/client/json_rpc.py +1326 -0
  9. aiohomematic/client/rpc_proxy.py +311 -0
  10. aiohomematic/const.py +1127 -0
  11. aiohomematic/context.py +18 -0
  12. aiohomematic/converter.py +108 -0
  13. aiohomematic/decorators.py +302 -0
  14. aiohomematic/exceptions.py +164 -0
  15. aiohomematic/hmcli.py +186 -0
  16. aiohomematic/model/__init__.py +140 -0
  17. aiohomematic/model/calculated/__init__.py +84 -0
  18. aiohomematic/model/calculated/climate.py +290 -0
  19. aiohomematic/model/calculated/data_point.py +327 -0
  20. aiohomematic/model/calculated/operating_voltage_level.py +299 -0
  21. aiohomematic/model/calculated/support.py +234 -0
  22. aiohomematic/model/custom/__init__.py +177 -0
  23. aiohomematic/model/custom/climate.py +1532 -0
  24. aiohomematic/model/custom/cover.py +792 -0
  25. aiohomematic/model/custom/data_point.py +334 -0
  26. aiohomematic/model/custom/definition.py +871 -0
  27. aiohomematic/model/custom/light.py +1128 -0
  28. aiohomematic/model/custom/lock.py +394 -0
  29. aiohomematic/model/custom/siren.py +275 -0
  30. aiohomematic/model/custom/support.py +41 -0
  31. aiohomematic/model/custom/switch.py +175 -0
  32. aiohomematic/model/custom/valve.py +114 -0
  33. aiohomematic/model/data_point.py +1123 -0
  34. aiohomematic/model/device.py +1445 -0
  35. aiohomematic/model/event.py +208 -0
  36. aiohomematic/model/generic/__init__.py +217 -0
  37. aiohomematic/model/generic/action.py +34 -0
  38. aiohomematic/model/generic/binary_sensor.py +30 -0
  39. aiohomematic/model/generic/button.py +27 -0
  40. aiohomematic/model/generic/data_point.py +171 -0
  41. aiohomematic/model/generic/dummy.py +147 -0
  42. aiohomematic/model/generic/number.py +76 -0
  43. aiohomematic/model/generic/select.py +39 -0
  44. aiohomematic/model/generic/sensor.py +74 -0
  45. aiohomematic/model/generic/switch.py +54 -0
  46. aiohomematic/model/generic/text.py +29 -0
  47. aiohomematic/model/hub/__init__.py +333 -0
  48. aiohomematic/model/hub/binary_sensor.py +24 -0
  49. aiohomematic/model/hub/button.py +28 -0
  50. aiohomematic/model/hub/data_point.py +340 -0
  51. aiohomematic/model/hub/number.py +39 -0
  52. aiohomematic/model/hub/select.py +49 -0
  53. aiohomematic/model/hub/sensor.py +37 -0
  54. aiohomematic/model/hub/switch.py +44 -0
  55. aiohomematic/model/hub/text.py +30 -0
  56. aiohomematic/model/support.py +586 -0
  57. aiohomematic/model/update.py +143 -0
  58. aiohomematic/property_decorators.py +496 -0
  59. aiohomematic/py.typed +0 -0
  60. aiohomematic/rega_scripts/fetch_all_device_data.fn +92 -0
  61. aiohomematic/rega_scripts/get_program_descriptions.fn +30 -0
  62. aiohomematic/rega_scripts/get_serial.fn +44 -0
  63. aiohomematic/rega_scripts/get_system_variable_descriptions.fn +30 -0
  64. aiohomematic/rega_scripts/set_program_state.fn +12 -0
  65. aiohomematic/rega_scripts/set_system_variable.fn +15 -0
  66. aiohomematic/store/__init__.py +34 -0
  67. aiohomematic/store/dynamic.py +551 -0
  68. aiohomematic/store/persistent.py +988 -0
  69. aiohomematic/store/visibility.py +812 -0
  70. aiohomematic/support.py +664 -0
  71. aiohomematic/validator.py +112 -0
  72. aiohomematic-2025.11.3.dist-info/METADATA +144 -0
  73. aiohomematic-2025.11.3.dist-info/RECORD +77 -0
  74. aiohomematic-2025.11.3.dist-info/WHEEL +5 -0
  75. aiohomematic-2025.11.3.dist-info/entry_points.txt +2 -0
  76. aiohomematic-2025.11.3.dist-info/licenses/LICENSE +21 -0
  77. aiohomematic-2025.11.3.dist-info/top_level.txt +1 -0
@@ -0,0 +1,988 @@
1
+ # SPDX-License-Identifier: MIT
2
+ # Copyright (c) 2021-2025
3
+ """
4
+ Persistent content used to persist Homematic metadata between runs.
5
+
6
+ This module provides on-disk store that complement the short‑lived, in‑memory
7
+ store from aiohomematic.store.dynamic. The goal is to minimize expensive data
8
+ retrieval from the backend by storing stable metadata such as device and
9
+ paramset descriptions in JSON files inside a dedicated cache directory.
10
+
11
+ Overview
12
+ - BasePersistentFile: Abstract base for file‑backed content. It encapsulates
13
+ file path resolution, change detection via hashing, and thread‑safe save/load
14
+ operations delegated to the CentralUnit looper.
15
+ - DeviceDescriptionCache: Persists device descriptions per interface, including
16
+ the mapping of device/channels and model metadata.
17
+ - ParamsetDescriptionCache: Persists paramset descriptions per interface and
18
+ channel, and offers helpers to query parameters, paramset keys and related
19
+ channel addresses.
20
+ - SessionRecorder: Persists session recorder data
21
+
22
+ Key behaviors
23
+ - Saves only if store are enabled (CentralConfig.use_caches) and content has
24
+ changed (hash comparison), keeping I/O minimal and predictable.
25
+ - Uses orjson for fast binary writes and json for reads with a custom
26
+ object_hook to rebuild nested defaultdict structures.
27
+ - Save/load/clear operations are synchronized via a semaphore and executed via
28
+ the CentralUnit looper to avoid blocking the event loop.
29
+
30
+ Helper functions are provided to build content paths and file names and to
31
+ optionally clean up stale content directories.
32
+ """
33
+
34
+ from __future__ import annotations
35
+
36
+ from abc import ABC
37
+ import ast
38
+ import asyncio
39
+ from collections import defaultdict
40
+ from collections.abc import Mapping
41
+ from datetime import UTC, datetime
42
+ import json
43
+ import logging
44
+ import os
45
+ from typing import Any, Final, Self
46
+ import zipfile
47
+
48
+ import orjson
49
+ from slugify import slugify
50
+
51
+ from aiohomematic import central as hmcu
52
+ from aiohomematic.async_support import loop_check
53
+ from aiohomematic.const import (
54
+ ADDRESS_SEPARATOR,
55
+ FILE_DEVICES,
56
+ FILE_NAME_TS_PATTERN,
57
+ FILE_PARAMSETS,
58
+ FILE_SESSION_RECORDER,
59
+ INIT_DATETIME,
60
+ SUB_DIRECTORY_CACHE,
61
+ SUB_DIRECTORY_SESSION,
62
+ UTF_8,
63
+ DataOperationResult,
64
+ DeviceDescription,
65
+ ParameterData,
66
+ ParamsetKey,
67
+ RPCType,
68
+ )
69
+ from aiohomematic.model.device import Device
70
+ from aiohomematic.support import (
71
+ check_or_create_directory,
72
+ create_random_device_addresses,
73
+ delete_file,
74
+ extract_exc_args,
75
+ get_device_address,
76
+ get_split_channel_address,
77
+ hash_sha256,
78
+ regular_to_default_dict_hook,
79
+ )
80
+
81
+ _LOGGER: Final = logging.getLogger(__name__)
82
+
83
+
84
+ class BasePersistentFile(ABC):
85
+ """Cache for files."""
86
+
87
+ __slots__ = (
88
+ "_central",
89
+ "_directory",
90
+ "_file_postfix",
91
+ "_persistent_content",
92
+ "_save_load_semaphore",
93
+ "_sub_directory",
94
+ "_use_ts_in_file_names",
95
+ "last_hash_saved",
96
+ "last_save_triggered",
97
+ )
98
+
99
+ _file_postfix: str
100
+ _sub_directory: str
101
+
102
+ def __init__(
103
+ self,
104
+ *,
105
+ central: hmcu.CentralUnit,
106
+ persistent_content: dict[str, Any],
107
+ ) -> None:
108
+ """Initialize the base class of the persistent content."""
109
+ self._save_load_semaphore: Final = asyncio.Semaphore()
110
+ self._central: Final = central
111
+ self._persistent_content: Final = persistent_content
112
+ self._directory: Final = _get_file_path(
113
+ storage_directory=central.config.storage_directory, sub_directory=self._sub_directory
114
+ )
115
+ self.last_save_triggered: datetime = INIT_DATETIME
116
+ self.last_hash_saved = hash_sha256(value=persistent_content)
117
+
118
+ @property
119
+ def content_hash(self) -> str:
120
+ """Return the hash of the content."""
121
+ return hash_sha256(value=self._persistent_content)
122
+
123
+ @property
124
+ def data_changed(self) -> bool:
125
+ """Return if the data has changed."""
126
+ return self.content_hash != self.last_hash_saved
127
+
128
+ def _get_file_name(
129
+ self,
130
+ *,
131
+ use_ts_in_file_name: bool = False,
132
+ ) -> str:
133
+ """Return the file name."""
134
+ return _get_file_name(
135
+ central_name=self._central.name,
136
+ file_name=self._file_postfix,
137
+ ts=datetime.now() if use_ts_in_file_name else None,
138
+ )
139
+
140
+ def _get_file_path(
141
+ self,
142
+ *,
143
+ use_ts_in_file_name: bool = False,
144
+ ) -> str:
145
+ """Return the full file path."""
146
+ return os.path.join(self._directory, self._get_file_name(use_ts_in_file_name=use_ts_in_file_name))
147
+
148
+ async def save(self, *, randomize_output: bool = False, use_ts_in_file_name: bool = False) -> DataOperationResult:
149
+ """Save current data to disk."""
150
+ if not self._should_save:
151
+ return DataOperationResult.NO_SAVE
152
+
153
+ if not check_or_create_directory(directory=self._directory):
154
+ return DataOperationResult.NO_SAVE
155
+
156
+ def _perform_save() -> DataOperationResult:
157
+ try:
158
+ with open(
159
+ file=self._get_file_path(use_ts_in_file_name=use_ts_in_file_name),
160
+ mode="wb",
161
+ ) as file_pointer:
162
+ file_pointer.write(
163
+ self._manipulate_content(
164
+ content=orjson.dumps(
165
+ self._persistent_content,
166
+ option=orjson.OPT_NON_STR_KEYS,
167
+ ),
168
+ randomize_output=randomize_output,
169
+ )
170
+ )
171
+ self.last_hash_saved = self.content_hash
172
+ except json.JSONDecodeError:
173
+ return DataOperationResult.SAVE_FAIL
174
+ return DataOperationResult.SAVE_SUCCESS
175
+
176
+ async with self._save_load_semaphore:
177
+ return await self._central.looper.async_add_executor_job(
178
+ _perform_save, name=f"save-persistent-content-{self._get_file_name()}"
179
+ )
180
+
181
+ def _manipulate_content(self, *, content: bytes, randomize_output: bool = False) -> bytes:
182
+ """Manipulate the content of the file. Optionally randomize addresses."""
183
+ if not randomize_output:
184
+ return content
185
+
186
+ addresses = [device.address for device in self._central.devices]
187
+ text = content.decode(encoding=UTF_8)
188
+ for device_address, rnd_address in create_random_device_addresses(addresses=addresses).items():
189
+ text = text.replace(device_address, rnd_address)
190
+ return text.encode(encoding=UTF_8)
191
+
192
+ @property
193
+ def _should_save(self) -> bool:
194
+ """Determine if save operation should proceed."""
195
+ self.last_save_triggered = datetime.now()
196
+ return (
197
+ check_or_create_directory(directory=self._directory)
198
+ and self._central.config.use_caches
199
+ and self.content_hash != self.last_hash_saved
200
+ )
201
+
202
+ async def load(self, *, file_path: str | None = None) -> DataOperationResult:
203
+ """
204
+ Load data from disk into the dictionary.
205
+
206
+ Supports plain JSON files and ZIP archives containing a JSON file.
207
+ When a ZIP archive is provided, the first JSON member inside the archive
208
+ will be loaded.
209
+ """
210
+ if not file_path and not check_or_create_directory(directory=self._directory):
211
+ return DataOperationResult.NO_LOAD
212
+
213
+ if (file_path := file_path or self._get_file_path()) and not os.path.exists(file_path):
214
+ return DataOperationResult.NO_LOAD
215
+
216
+ def _perform_load() -> DataOperationResult:
217
+ try:
218
+ if zipfile.is_zipfile(file_path):
219
+ with zipfile.ZipFile(file_path, mode="r") as zf:
220
+ # Prefer json files; pick the first .json entry if available
221
+ if not (json_members := [n for n in zf.namelist() if n.lower().endswith(".json")]):
222
+ return DataOperationResult.LOAD_FAIL
223
+ raw = zf.read(json_members[0]).decode(UTF_8)
224
+ data = json.loads(raw, object_hook=regular_to_default_dict_hook)
225
+ else:
226
+ with open(file=file_path, encoding=UTF_8) as file_pointer:
227
+ data = json.loads(file_pointer.read(), object_hook=regular_to_default_dict_hook)
228
+
229
+ if (converted_hash := hash_sha256(value=data)) == self.last_hash_saved:
230
+ return DataOperationResult.NO_LOAD
231
+ self._persistent_content.clear()
232
+ self._persistent_content.update(data)
233
+ self.last_hash_saved = converted_hash
234
+ except (json.JSONDecodeError, zipfile.BadZipFile, UnicodeDecodeError, OSError):
235
+ return DataOperationResult.LOAD_FAIL
236
+ return DataOperationResult.LOAD_SUCCESS
237
+
238
+ async with self._save_load_semaphore:
239
+ return await self._central.looper.async_add_executor_job(
240
+ _perform_load, name=f"load-persistent-content-{self._get_file_name()}"
241
+ )
242
+
243
+ async def clear(self) -> None:
244
+ """Remove stored file from disk."""
245
+
246
+ def _perform_clear() -> None:
247
+ delete_file(directory=self._directory, file_name=f"{self._central.name}*.json".lower())
248
+ self._persistent_content.clear()
249
+
250
+ async with self._save_load_semaphore:
251
+ await self._central.looper.async_add_executor_job(_perform_clear, name="clear-persistent-content")
252
+
253
+
254
+ class DeviceDescriptionCache(BasePersistentFile):
255
+ """Cache for device/channel names."""
256
+
257
+ __slots__ = (
258
+ "_addresses",
259
+ "_device_descriptions",
260
+ "_raw_device_descriptions",
261
+ )
262
+
263
+ _file_postfix = FILE_DEVICES
264
+ _sub_directory = SUB_DIRECTORY_CACHE
265
+
266
+ def __init__(self, *, central: hmcu.CentralUnit) -> None:
267
+ """Initialize the device description cache."""
268
+ # {interface_id, [device_descriptions]}
269
+ self._raw_device_descriptions: Final[dict[str, list[DeviceDescription]]] = defaultdict(list)
270
+ super().__init__(
271
+ central=central,
272
+ persistent_content=self._raw_device_descriptions,
273
+ )
274
+ # {interface_id, {device_address, [channel_address]}}
275
+ self._addresses: Final[dict[str, dict[str, set[str]]]] = defaultdict(lambda: defaultdict(set))
276
+ # {interface_id, {address, device_descriptions}}
277
+ self._device_descriptions: Final[dict[str, dict[str, DeviceDescription]]] = defaultdict(dict)
278
+
279
+ def add_device(self, *, interface_id: str, device_description: DeviceDescription) -> None:
280
+ """Add a device to the cache."""
281
+ # Fast-path: If the address is not yet known, skip costly removal operations.
282
+ if (address := device_description["ADDRESS"]) not in self._device_descriptions[interface_id]:
283
+ self._raw_device_descriptions[interface_id].append(device_description)
284
+ self._process_device_description(interface_id=interface_id, device_description=device_description)
285
+ return
286
+ # Address exists: remove old entries before adding the new description.
287
+ self._remove_device(
288
+ interface_id=interface_id,
289
+ addresses_to_remove=[address],
290
+ )
291
+ self._raw_device_descriptions[interface_id].append(device_description)
292
+ self._process_device_description(interface_id=interface_id, device_description=device_description)
293
+
294
+ def get_raw_device_descriptions(self, *, interface_id: str) -> list[DeviceDescription]:
295
+ """Retrieve raw device descriptions from the cache."""
296
+ return self._raw_device_descriptions[interface_id]
297
+
298
+ def remove_device(self, *, device: Device) -> None:
299
+ """Remove device from cache."""
300
+ self._remove_device(
301
+ interface_id=device.interface_id,
302
+ addresses_to_remove=[device.address, *device.channels.keys()],
303
+ )
304
+
305
+ def _remove_device(self, *, interface_id: str, addresses_to_remove: list[str]) -> None:
306
+ """Remove a device from the cache."""
307
+ # Use a set for faster membership checks
308
+ addresses_set = set(addresses_to_remove)
309
+ self._raw_device_descriptions[interface_id] = [
310
+ device for device in self._raw_device_descriptions[interface_id] if device["ADDRESS"] not in addresses_set
311
+ ]
312
+ addr_map = self._addresses[interface_id]
313
+ desc_map = self._device_descriptions[interface_id]
314
+ for address in addresses_set:
315
+ # Pop with default to avoid KeyError and try/except overhead
316
+ if ADDRESS_SEPARATOR not in address:
317
+ addr_map.pop(address, None)
318
+ desc_map.pop(address, None)
319
+
320
+ def get_addresses(self, *, interface_id: str | None = None) -> frozenset[str]:
321
+ """Return the addresses by interface as a set."""
322
+ if interface_id:
323
+ return frozenset(self._addresses[interface_id])
324
+ return frozenset(addr for interface_id in self.get_interface_ids() for addr in self._addresses[interface_id])
325
+
326
+ def get_device_descriptions(self, *, interface_id: str) -> Mapping[str, DeviceDescription]:
327
+ """Return the devices by interface."""
328
+ return self._device_descriptions[interface_id]
329
+
330
+ def get_interface_ids(self) -> tuple[str, ...]:
331
+ """Return the interface ids."""
332
+ return tuple(self._raw_device_descriptions.keys())
333
+
334
+ def has_device_descriptions(self, *, interface_id: str) -> bool:
335
+ """Return the devices by interface."""
336
+ return interface_id in self._device_descriptions
337
+
338
+ def find_device_description(self, *, interface_id: str, device_address: str) -> DeviceDescription | None:
339
+ """Return the device description by interface and device_address."""
340
+ return self._device_descriptions[interface_id].get(device_address)
341
+
342
+ def get_device_description(self, *, interface_id: str, address: str) -> DeviceDescription:
343
+ """Return the device description by interface and device_address."""
344
+ return self._device_descriptions[interface_id][address]
345
+
346
+ def get_device_with_channels(self, *, interface_id: str, device_address: str) -> Mapping[str, DeviceDescription]:
347
+ """Return the device dict by interface and device_address."""
348
+ device_descriptions: dict[str, DeviceDescription] = {
349
+ device_address: self.get_device_description(interface_id=interface_id, address=device_address)
350
+ }
351
+ children = device_descriptions[device_address]["CHILDREN"]
352
+ for channel_address in children:
353
+ device_descriptions[channel_address] = self.get_device_description(
354
+ interface_id=interface_id, address=channel_address
355
+ )
356
+ return device_descriptions
357
+
358
+ def get_model(self, *, device_address: str) -> str | None:
359
+ """Return the device type."""
360
+ for data in self._device_descriptions.values():
361
+ if items := data.get(device_address):
362
+ return items["TYPE"]
363
+ return None
364
+
365
+ def _convert_device_descriptions(self, *, interface_id: str, device_descriptions: list[DeviceDescription]) -> None:
366
+ """Convert provided list of device descriptions."""
367
+ for device_description in device_descriptions:
368
+ self._process_device_description(interface_id=interface_id, device_description=device_description)
369
+
370
+ def _process_device_description(self, *, interface_id: str, device_description: DeviceDescription) -> None:
371
+ """Convert provided dict of device descriptions."""
372
+ address = device_description["ADDRESS"]
373
+ device_address = get_device_address(address=address)
374
+ self._device_descriptions[interface_id][address] = device_description
375
+
376
+ # Avoid redundant membership checks; set.add is idempotent and cheaper than check+add
377
+ addr_set = self._addresses[interface_id][device_address]
378
+ addr_set.add(device_address)
379
+ addr_set.add(address)
380
+
381
+ async def load(self, *, file_path: str | None = None) -> DataOperationResult:
382
+ """Load device data from disk into _device_description_cache."""
383
+ if not self._central.config.use_caches:
384
+ _LOGGER.debug("load: not caching paramset descriptions for %s", self._central.name)
385
+ return DataOperationResult.NO_LOAD
386
+ if (result := await super().load(file_path=file_path)) == DataOperationResult.LOAD_SUCCESS:
387
+ for (
388
+ interface_id,
389
+ device_descriptions,
390
+ ) in self._raw_device_descriptions.items():
391
+ self._convert_device_descriptions(interface_id=interface_id, device_descriptions=device_descriptions)
392
+ return result
393
+
394
+
395
+ class ParamsetDescriptionCache(BasePersistentFile):
396
+ """Cache for paramset descriptions."""
397
+
398
+ __slots__ = (
399
+ "_address_parameter_cache",
400
+ "_raw_paramset_descriptions",
401
+ )
402
+
403
+ _file_postfix = FILE_PARAMSETS
404
+ _sub_directory = SUB_DIRECTORY_CACHE
405
+
406
+ def __init__(self, *, central: hmcu.CentralUnit) -> None:
407
+ """Init the paramset description cache."""
408
+ # {interface_id, {channel_address, paramsets}}
409
+ self._raw_paramset_descriptions: Final[dict[str, dict[str, dict[ParamsetKey, dict[str, ParameterData]]]]] = (
410
+ defaultdict(lambda: defaultdict(lambda: defaultdict(dict)))
411
+ )
412
+ super().__init__(
413
+ central=central,
414
+ persistent_content=self._raw_paramset_descriptions,
415
+ )
416
+
417
+ # {(device_address, parameter), [channel_no]}
418
+ self._address_parameter_cache: Final[dict[tuple[str, str], set[int | None]]] = {}
419
+
420
+ @property
421
+ def raw_paramset_descriptions(
422
+ self,
423
+ ) -> Mapping[str, Mapping[str, Mapping[ParamsetKey, Mapping[str, ParameterData]]]]:
424
+ """Return the paramset descriptions."""
425
+ return self._raw_paramset_descriptions
426
+
427
+ def add(
428
+ self,
429
+ *,
430
+ interface_id: str,
431
+ channel_address: str,
432
+ paramset_key: ParamsetKey,
433
+ paramset_description: dict[str, ParameterData],
434
+ ) -> None:
435
+ """Add paramset description to cache."""
436
+ self._raw_paramset_descriptions[interface_id][channel_address][paramset_key] = paramset_description
437
+ self._add_address_parameter(channel_address=channel_address, paramsets=[paramset_description])
438
+
439
+ def remove_device(self, *, device: Device) -> None:
440
+ """Remove device paramset descriptions from cache."""
441
+ if interface := self._raw_paramset_descriptions.get(device.interface_id):
442
+ for channel_address in device.channels:
443
+ if channel_address in interface:
444
+ del self._raw_paramset_descriptions[device.interface_id][channel_address]
445
+
446
+ def has_interface_id(self, *, interface_id: str) -> bool:
447
+ """Return if interface is in paramset_descriptions cache."""
448
+ return interface_id in self._raw_paramset_descriptions
449
+
450
+ def get_paramset_keys(self, *, interface_id: str, channel_address: str) -> tuple[ParamsetKey, ...]:
451
+ """Get paramset_keys from paramset descriptions cache."""
452
+ return tuple(self._raw_paramset_descriptions[interface_id][channel_address])
453
+
454
+ def get_channel_paramset_descriptions(
455
+ self, *, interface_id: str, channel_address: str
456
+ ) -> Mapping[ParamsetKey, Mapping[str, ParameterData]]:
457
+ """Get paramset descriptions for a channelfrom cache."""
458
+ return self._raw_paramset_descriptions[interface_id].get(channel_address, {})
459
+
460
+ def get_paramset_descriptions(
461
+ self, *, interface_id: str, channel_address: str, paramset_key: ParamsetKey
462
+ ) -> Mapping[str, ParameterData]:
463
+ """Get paramset descriptions from cache."""
464
+ return self._raw_paramset_descriptions[interface_id][channel_address][paramset_key]
465
+
466
+ def get_parameter_data(
467
+ self, *, interface_id: str, channel_address: str, paramset_key: ParamsetKey, parameter: str
468
+ ) -> ParameterData | None:
469
+ """Get parameter_data from cache."""
470
+ return self._raw_paramset_descriptions[interface_id][channel_address][paramset_key].get(parameter)
471
+
472
+ def is_in_multiple_channels(self, *, channel_address: str, parameter: str) -> bool:
473
+ """Check if parameter is in multiple channels per device."""
474
+ if ADDRESS_SEPARATOR not in channel_address:
475
+ return False
476
+ if channels := self._address_parameter_cache.get((get_device_address(address=channel_address), parameter)):
477
+ return len(channels) > 1
478
+ return False
479
+
480
+ def get_channel_addresses_by_paramset_key(
481
+ self, *, interface_id: str, device_address: str
482
+ ) -> Mapping[ParamsetKey, list[str]]:
483
+ """Get device channel addresses."""
484
+ channel_addresses: dict[ParamsetKey, list[str]] = {}
485
+ interface_paramset_descriptions = self._raw_paramset_descriptions[interface_id]
486
+ for (
487
+ channel_address,
488
+ paramset_descriptions,
489
+ ) in interface_paramset_descriptions.items():
490
+ if channel_address.startswith(device_address):
491
+ for p_key in paramset_descriptions:
492
+ if (paramset_key := ParamsetKey(p_key)) not in channel_addresses:
493
+ channel_addresses[paramset_key] = []
494
+ channel_addresses[paramset_key].append(channel_address)
495
+
496
+ return channel_addresses
497
+
498
+ def _init_address_parameter_list(self) -> None:
499
+ """
500
+ Initialize a device_address/parameter list.
501
+
502
+ Used to identify, if a parameter name exists is in multiple channels.
503
+ """
504
+ for channel_paramsets in self._raw_paramset_descriptions.values():
505
+ for channel_address, paramsets in channel_paramsets.items():
506
+ self._add_address_parameter(channel_address=channel_address, paramsets=list(paramsets.values()))
507
+
508
+ def _add_address_parameter(self, *, channel_address: str, paramsets: list[dict[str, Any]]) -> None:
509
+ """Add address parameter to cache."""
510
+ device_address, channel_no = get_split_channel_address(channel_address=channel_address)
511
+ cache = self._address_parameter_cache
512
+ for paramset in paramsets:
513
+ if not paramset:
514
+ continue
515
+ for parameter in paramset:
516
+ cache.setdefault((device_address, parameter), set()).add(channel_no)
517
+
518
+ async def load(self, *, file_path: str | None = None) -> DataOperationResult:
519
+ """Load paramset descriptions from disk into paramset cache."""
520
+ if not self._central.config.use_caches:
521
+ _LOGGER.debug("load: not caching device descriptions for %s", self._central.name)
522
+ return DataOperationResult.NO_LOAD
523
+ if (result := await super().load(file_path=file_path)) == DataOperationResult.LOAD_SUCCESS:
524
+ self._init_address_parameter_list()
525
+ return result
526
+
527
+
528
+ class SessionRecorder(BasePersistentFile):
529
+ """
530
+ Session recorder for central unit.
531
+
532
+ Nested cache with TTL support.
533
+ Structure:
534
+ store[rpc_type][method][params][ts: datetime] = response: Any
535
+
536
+ - Expiration is lazy (checked on access/update).
537
+ - Optional refresh_on_get extends TTL when reading.
538
+ """
539
+
540
+ __slots__ = (
541
+ "_active",
542
+ "_ttl",
543
+ "_is_recording",
544
+ "_refresh_on_get",
545
+ "_store",
546
+ )
547
+
548
+ _file_postfix = FILE_SESSION_RECORDER
549
+ _sub_directory = SUB_DIRECTORY_SESSION
550
+
551
+ def __init__(
552
+ self,
553
+ *,
554
+ central: hmcu.CentralUnit,
555
+ active: bool,
556
+ ttl_seconds: float,
557
+ refresh_on_get: bool = False,
558
+ ):
559
+ """Init the cache."""
560
+ self._active = active
561
+ if ttl_seconds < 0:
562
+ raise ValueError("default_ttl_seconds must be positive")
563
+ self._ttl: Final = float(ttl_seconds)
564
+ self._is_recording: bool = False
565
+ self._refresh_on_get: Final = refresh_on_get
566
+ # Use nested defaultdicts: rpc_type -> method -> params -> ts(int) -> response
567
+ # Annotate as defaultdict to match the actual type and satisfy mypy.
568
+ self._store: dict[str, dict[str, dict[str, dict[int, Any]]]] = defaultdict(
569
+ lambda: defaultdict(lambda: defaultdict(dict))
570
+ )
571
+ super().__init__(
572
+ central=central,
573
+ persistent_content=self._store,
574
+ )
575
+
576
+ # ---------- internal helpers ----------
577
+
578
+ def _is_expired(self, *, ts: int, now: int | None = None) -> bool:
579
+ """Check whether an entry has expired given epoch seconds."""
580
+ if self._ttl == 0:
581
+ return False
582
+ now = now if now is not None else _now()
583
+ return (now - ts) > self._ttl
584
+
585
+ def _purge_expired_at(
586
+ self,
587
+ *,
588
+ rpc_type: str,
589
+ method: str,
590
+ ) -> None:
591
+ """Remove expired entries for a given (rpc_type, method) bucket without creating new ones."""
592
+ if self._ttl == 0:
593
+ return
594
+ if not (bucket_by_method := self._store.get(rpc_type)):
595
+ return
596
+ if not (bucket_by_parameter := bucket_by_method.get(method)):
597
+ return
598
+ now = _now()
599
+ empty_params: list[str] = []
600
+ for p, bucket_by_ts in bucket_by_parameter.items():
601
+ expired_ts = [ts for ts, _r in list(bucket_by_ts.items()) if self._is_expired(ts=ts, now=now)]
602
+ for ts in expired_ts:
603
+ del bucket_by_ts[ts]
604
+ if not bucket_by_ts:
605
+ empty_params.append(p)
606
+ for p in empty_params:
607
+ bucket_by_parameter.pop(p, None)
608
+ if not bucket_by_parameter:
609
+ bucket_by_method.pop(method, None)
610
+ if not bucket_by_method:
611
+ self._store.pop(rpc_type, None)
612
+
613
+ def _bucket(self, *, rpc_type: str, method: str) -> dict[str, dict[int, tuple[Any, float]]]:
614
+ """Ensure and return the innermost bucket."""
615
+ return self._store[rpc_type][method]
616
+
617
+ # ---------- public API ----------
618
+
619
+ @property
620
+ def active(self) -> bool:
621
+ """Return if session recorder is active."""
622
+ return self._active
623
+
624
+ async def _deactivate_after_delay(
625
+ self, *, delay: int, auto_save: bool, randomize_output: bool, use_ts_in_file_name: bool
626
+ ) -> None:
627
+ """Change the state of the session recorder after a delay."""
628
+ self._is_recording = True
629
+ await asyncio.sleep(delay)
630
+ self._active = False
631
+ self._is_recording = False
632
+ if auto_save:
633
+ await self.save(randomize_output=randomize_output, use_ts_in_file_name=use_ts_in_file_name)
634
+ _LOGGER.debug("Deactivated session recorder after %s seconds", {delay})
635
+
636
+ async def activate(
637
+ self, *, on_time: int = 0, auto_save: bool, randomize_output: bool, use_ts_in_file_name: bool
638
+ ) -> bool:
639
+ """Activate the session recorder. Disable after on_time(seconds)."""
640
+ if self._is_recording:
641
+ _LOGGER.info("ACTIVATE: Recording session is already running.")
642
+ return False
643
+ self._store.clear()
644
+ self._active = True
645
+ if on_time > 0:
646
+ self._central.looper.create_task(
647
+ target=self._deactivate_after_delay(
648
+ delay=on_time,
649
+ auto_save=auto_save,
650
+ randomize_output=randomize_output,
651
+ use_ts_in_file_name=use_ts_in_file_name,
652
+ ),
653
+ name=f"session_recorder_{self._central.name}",
654
+ )
655
+ return True
656
+
657
+ async def deactivate(
658
+ self, *, delay: int, auto_save: bool, randomize_output: bool, use_ts_in_file_name: bool
659
+ ) -> bool:
660
+ """Deactivate the session recorder. Optionally after a delay(seconds)."""
661
+ if self._is_recording:
662
+ _LOGGER.info("DEACTIVATE: Recording session is already running.")
663
+ return False
664
+ if delay > 0:
665
+ self._central.looper.create_task(
666
+ target=self._deactivate_after_delay(
667
+ delay=delay,
668
+ auto_save=auto_save,
669
+ randomize_output=randomize_output,
670
+ use_ts_in_file_name=use_ts_in_file_name,
671
+ ),
672
+ name=f"session_recorder_{self._central.name}",
673
+ )
674
+ else:
675
+ self._active = False
676
+ self._is_recording = False
677
+ return True
678
+
679
+ def add_json_rpc_session(
680
+ self,
681
+ *,
682
+ method: str,
683
+ params: dict[str, Any],
684
+ response: dict[str, Any] | None = None,
685
+ session_exc: Exception | None = None,
686
+ ) -> None:
687
+ """Add json rpc session to content."""
688
+ try:
689
+ if session_exc:
690
+ self.set(
691
+ rpc_type=str(RPCType.JSON_RPC),
692
+ method=method,
693
+ params=params,
694
+ response=extract_exc_args(exc=session_exc),
695
+ )
696
+ return
697
+ self.set(rpc_type=str(RPCType.JSON_RPC), method=method, params=params, response=response)
698
+ except Exception as exc:
699
+ _LOGGER.debug("ADD_JSON_RPC_SESSION: failed with %s", extract_exc_args(exc=exc))
700
+
701
+ def add_xml_rpc_session(
702
+ self, *, method: str, params: tuple[Any, ...], response: Any | None = None, session_exc: Exception | None = None
703
+ ) -> None:
704
+ """Add rpc session to content."""
705
+ try:
706
+ if session_exc:
707
+ self.set(
708
+ rpc_type=str(RPCType.XML_RPC),
709
+ method=method,
710
+ params=params,
711
+ response=extract_exc_args(exc=session_exc),
712
+ )
713
+ return
714
+ self.set(rpc_type=str(RPCType.XML_RPC), method=method, params=params, response=response)
715
+ except Exception as exc:
716
+ _LOGGER.debug("ADD_XML_RPC_SESSION: failed with %s", extract_exc_args(exc=exc))
717
+
718
+ def set(
719
+ self,
720
+ *,
721
+ rpc_type: str,
722
+ method: str,
723
+ params: Any,
724
+ response: Any,
725
+ ts: int | datetime | None = None,
726
+ ) -> Self:
727
+ """Insert or update an entry."""
728
+ self._purge_expired_at(rpc_type=rpc_type, method=method)
729
+ frozen_param = _freeze_params(params)
730
+ # Normalize timestamp to int epoch seconds
731
+ if isinstance(ts, datetime):
732
+ ts_int = int(ts.timestamp())
733
+ elif isinstance(ts, int):
734
+ ts_int = ts
735
+ else:
736
+ ts_int = _now()
737
+ self._bucket(rpc_type=rpc_type, method=method)[frozen_param][ts_int] = response
738
+ return self
739
+
740
+ def get(
741
+ self,
742
+ *,
743
+ rpc_type: str,
744
+ method: str,
745
+ params: Any,
746
+ default: Any = None,
747
+ ) -> Any:
748
+ """
749
+ Return a cached response if still valid, else default.
750
+
751
+ This method must avoid creating buckets when the entry is missing.
752
+ It purges expired entries first, then returns the response at the
753
+ latest timestamp for the given params. If refresh_on_get is enabled,
754
+ it appends a new timestamp with the same response/ttl.
755
+ """
756
+ self._purge_expired_at(rpc_type=rpc_type, method=method)
757
+ # Access store safely to avoid side effects from creating buckets.
758
+ if not (bucket_by_method := self._store.get(rpc_type)):
759
+ return default
760
+ if not (bucket_by_parameter := bucket_by_method.get(method)):
761
+ return default
762
+ frozen_param = _freeze_params(params)
763
+ if not (bucket_by_ts := bucket_by_parameter.get(frozen_param)):
764
+ return default
765
+ try:
766
+ latest_ts = max(bucket_by_ts.keys())
767
+ except ValueError:
768
+ return default
769
+ resp = bucket_by_ts[latest_ts]
770
+ if self._refresh_on_get:
771
+ bucket_by_ts[_now()] = resp
772
+ return resp
773
+
774
+ def delete(self, *, rpc_type: str, method: str, params: Any) -> bool:
775
+ """
776
+ Delete an entry if it exists. Returns True if removed.
777
+
778
+ Avoid creating buckets when the target does not exist.
779
+ Clean up empty parent buckets on successful deletion.
780
+ """
781
+ if not (bucket_by_method := self._store.get(rpc_type)):
782
+ return False
783
+ if not (bucket_by_parameter := bucket_by_method.get(method)):
784
+ return False
785
+ if (frozen_param := _freeze_params(params)) not in bucket_by_parameter:
786
+ return False
787
+ # Perform deletion
788
+ bucket_by_parameter.pop(frozen_param, None)
789
+ if not bucket_by_parameter:
790
+ bucket_by_method.pop(method, None)
791
+ if not bucket_by_method:
792
+ self._store.pop(rpc_type, None)
793
+ return True
794
+
795
+ def get_latest_response_by_method(self, *, rpc_type: str, method: str) -> list[tuple[Any, Any]]:
796
+ """Return latest non-expired responses for a given (rpc_type, method)."""
797
+ # Purge expired entries first without creating any new buckets.
798
+ self._purge_expired_at(rpc_type=rpc_type, method=method)
799
+ result: list[Any] = []
800
+ # Access store safely to avoid side effects from creating buckets.
801
+ if not (bucket_by_method := self._store.get(rpc_type)):
802
+ return result
803
+ if not (bucket_by_parameter := bucket_by_method.get(method)):
804
+ return result
805
+ # For each parameter, choose the response at the latest timestamp.
806
+ for frozen_params, bucket_by_ts in bucket_by_parameter.items():
807
+ if not bucket_by_ts:
808
+ continue
809
+ try:
810
+ latest_ts = max(bucket_by_ts.keys())
811
+ except ValueError:
812
+ continue
813
+ resp = bucket_by_ts[latest_ts]
814
+ params = _unfreeze_params(frozen_params=frozen_params)
815
+
816
+ result.append((params, resp))
817
+ return result
818
+
819
+ def get_latest_response_by_params(
820
+ self,
821
+ *,
822
+ rpc_type: str,
823
+ method: str,
824
+ params: Any,
825
+ ) -> Any:
826
+ """Return latest non-expired responses for a given (rpc_type, method, params)."""
827
+ # Purge expired entries first without creating any new buckets.
828
+ self._purge_expired_at(rpc_type=rpc_type, method=method)
829
+
830
+ # Access store safely to avoid side effects from creating buckets.
831
+ if not (bucket_by_method := self._store.get(rpc_type)):
832
+ return None
833
+ if not (bucket_by_parameter := bucket_by_method.get(method)):
834
+ return None
835
+ frozen_params = _freeze_params(params=params)
836
+
837
+ # For each parameter, choose the response at the latest timestamp.
838
+ if (bucket_by_ts := bucket_by_parameter.get(frozen_params)) is None:
839
+ return None
840
+
841
+ try:
842
+ latest_ts = max(bucket_by_ts.keys())
843
+ return bucket_by_ts[latest_ts]
844
+ except ValueError:
845
+ return None
846
+
847
+ def cleanup(self) -> None:
848
+ """Purge all expired entries globally."""
849
+ for rpc_type in list(self._store.keys()):
850
+ for method in list(self._store[rpc_type].keys()):
851
+ self._purge_expired_at(rpc_type=rpc_type, method=method)
852
+
853
+ def peek_ts(self, *, rpc_type: str, method: str, params: Any) -> datetime | None:
854
+ """
855
+ Return the most recent timestamp for a live entry, else None.
856
+
857
+ This method must not create buckets as a side effect. It purges expired
858
+ entries first and then returns the newest timestamp for the given
859
+ (rpc_type, method, params) if present.
860
+ """
861
+ self._purge_expired_at(rpc_type=rpc_type, method=method)
862
+ # Do NOT create buckets here — use .get chaining only.
863
+ if not (bucket_by_method := self._store.get(rpc_type)):
864
+ return None
865
+ if not (bucket_by_parameter := bucket_by_method.get(method)):
866
+ return None
867
+ frozen_param = _freeze_params(params)
868
+ if (bucket_by_ts := bucket_by_parameter.get(frozen_param)) is None or not bucket_by_ts:
869
+ return None
870
+ # After purge, remaining entries are alive; return the latest timestamp.
871
+ try:
872
+ latest_ts_int = max(bucket_by_ts.keys())
873
+ except ValueError:
874
+ # bucket was empty (shouldn't happen due to check), be safe
875
+ return None
876
+ return datetime.fromtimestamp(latest_ts_int, tz=UTC)
877
+
878
+ @property
879
+ def _should_save(self) -> bool:
880
+ """Determine if save operation should proceed."""
881
+ self.cleanup()
882
+ return len(self._store.items()) > 0
883
+
884
+ def __repr__(self) -> str:
885
+ """Return the representation."""
886
+ self.cleanup()
887
+ return f"{self.__class__.__name__}({self._store})"
888
+
889
+
890
+ def _freeze_params(params: Any) -> str:
891
+ """
892
+ Recursively freeze any structure so it can be used as a dictionary key.
893
+
894
+ - dict → tuple of (key, frozen(value)) sorted by key.
895
+ - list/tuple → tuple of frozen elements.
896
+ - set/frozenset → tagged tuple ("__set__", tuple(sorted(frozen elements by repr))) to ensure JSON-serializable keys.
897
+ - datetime → tagged ISO 8601 string to ensure JSON-serializable keys.
898
+ """
899
+ res: Any = ""
900
+ match params:
901
+ case datetime():
902
+ # orjson cannot serialize datetime objects as dict keys even with OPT_NON_STR_KEYS.
903
+ # Use a tagged ISO string to preserve value and guarantee a stable, hashable key.
904
+ res = ("__datetime__", params.isoformat())
905
+ case dict():
906
+ res = {k: _freeze_params(v) for k, v in sorted(params.items())}
907
+ case list() | tuple():
908
+ res = tuple(_freeze_params(x) for x in params)
909
+ case set() | frozenset():
910
+ # Convert to a deterministically ordered, JSON-serializable representation.
911
+ frozen_elems = tuple(sorted((_freeze_params(x) for x in params), key=repr))
912
+ res = ("__set__", frozen_elems)
913
+ case _:
914
+ res = params
915
+
916
+ return str(res)
917
+
918
+
919
+ def _unfreeze_params(frozen_params: str) -> Any:
920
+ """
921
+ Reverse the _freeze_params transformation.
922
+
923
+ Tries to parse the frozen string with ast.literal_eval and then recursively
924
+ reconstructs original structures:
925
+ - ("__set__", (<items>...)) -> set of items
926
+ - ("__datetime__", iso_string) -> datetime.fromisoformat(iso_string)
927
+ - dict values and tuple elements are processed recursively
928
+
929
+ If parsing fails, return the original string.
930
+ """
931
+ try:
932
+ obj = ast.literal_eval(frozen_params)
933
+ except Exception:
934
+ return frozen_params
935
+
936
+ def _walk(o: Any) -> Any:
937
+ if o and isinstance(o, tuple):
938
+ tag = o[0]
939
+ # Tagged set
940
+ if tag == "__set__" and len(o) == 2 and isinstance(o[1], tuple):
941
+ return {_walk(x) for x in o[1]}
942
+ # Tagged datetime
943
+ if tag == "__datetime__" and len(o) == 2 and isinstance(o[1], str):
944
+ try:
945
+ return datetime.fromisoformat(o[1])
946
+ except Exception:
947
+ return o[1]
948
+ # Generic tuple
949
+ return tuple(_walk(x) for x in o)
950
+ if isinstance(o, dict):
951
+ return {k: _walk(v) for k, v in o.items()}
952
+ if isinstance(o, list):
953
+ return [_walk(x) for x in o]
954
+ if isinstance(o, tuple):
955
+ return tuple(_walk(x) for x in o)
956
+ if o.startswith("{") and o.endswith("}"):
957
+ return ast.literal_eval(o)
958
+ return o
959
+
960
+ return _walk(obj)
961
+
962
+
963
+ def _get_file_path(*, storage_directory: str, sub_directory: str) -> str:
964
+ """Return the content path."""
965
+ return f"{storage_directory}/{sub_directory}"
966
+
967
+
968
+ def _get_file_name(*, central_name: str, file_name: str, ts: datetime | None = None) -> str:
969
+ """Return the content file_name."""
970
+ fn = f"{slugify(central_name)}_{file_name}"
971
+ if ts:
972
+ fn += f"_{ts.strftime(FILE_NAME_TS_PATTERN)}"
973
+ return f"{fn}.json"
974
+
975
+
976
+ def _now() -> int:
977
+ """Return current UTC time as epoch seconds (int)."""
978
+ return int(datetime.now(tz=UTC).timestamp())
979
+
980
+
981
+ @loop_check
982
+ def cleanup_files(*, central_name: str, storage_directory: str) -> None:
983
+ """Clean up the used files."""
984
+ loop = asyncio.get_running_loop()
985
+ cache_dir = _get_file_path(storage_directory=storage_directory, sub_directory=SUB_DIRECTORY_CACHE)
986
+ loop.run_in_executor(None, delete_file, cache_dir, f"{central_name}*.json".lower())
987
+ session_dir = _get_file_path(storage_directory=storage_directory, sub_directory=SUB_DIRECTORY_SESSION)
988
+ loop.run_in_executor(None, delete_file, session_dir, f"{central_name}*.json".lower())