aiohomematic 2025.10.8__py3-none-any.whl → 2025.10.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of aiohomematic might be problematic. Click here for more details.

Files changed (73) hide show
  1. aiohomematic/__init__.py +3 -3
  2. aiohomematic/async_support.py +1 -1
  3. aiohomematic/central/__init__.py +58 -30
  4. aiohomematic/central/decorators.py +1 -1
  5. aiohomematic/central/rpc_server.py +1 -1
  6. aiohomematic/client/__init__.py +18 -12
  7. aiohomematic/client/_rpc_errors.py +1 -1
  8. aiohomematic/client/json_rpc.py +29 -3
  9. aiohomematic/client/rpc_proxy.py +20 -2
  10. aiohomematic/const.py +23 -6
  11. aiohomematic/context.py +1 -1
  12. aiohomematic/converter.py +1 -1
  13. aiohomematic/decorators.py +1 -1
  14. aiohomematic/exceptions.py +1 -1
  15. aiohomematic/hmcli.py +1 -1
  16. aiohomematic/model/__init__.py +1 -1
  17. aiohomematic/model/calculated/__init__.py +1 -1
  18. aiohomematic/model/calculated/climate.py +1 -1
  19. aiohomematic/model/calculated/data_point.py +1 -1
  20. aiohomematic/model/calculated/operating_voltage_level.py +1 -1
  21. aiohomematic/model/calculated/support.py +1 -1
  22. aiohomematic/model/custom/__init__.py +1 -1
  23. aiohomematic/model/custom/climate.py +7 -4
  24. aiohomematic/model/custom/const.py +1 -1
  25. aiohomematic/model/custom/cover.py +1 -1
  26. aiohomematic/model/custom/data_point.py +1 -1
  27. aiohomematic/model/custom/definition.py +1 -1
  28. aiohomematic/model/custom/light.py +1 -1
  29. aiohomematic/model/custom/lock.py +1 -1
  30. aiohomematic/model/custom/siren.py +1 -1
  31. aiohomematic/model/custom/support.py +1 -1
  32. aiohomematic/model/custom/switch.py +1 -1
  33. aiohomematic/model/custom/valve.py +1 -1
  34. aiohomematic/model/data_point.py +3 -2
  35. aiohomematic/model/device.py +10 -10
  36. aiohomematic/model/event.py +1 -1
  37. aiohomematic/model/generic/__init__.py +1 -1
  38. aiohomematic/model/generic/action.py +1 -1
  39. aiohomematic/model/generic/binary_sensor.py +1 -1
  40. aiohomematic/model/generic/button.py +1 -1
  41. aiohomematic/model/generic/data_point.py +1 -1
  42. aiohomematic/model/generic/number.py +1 -1
  43. aiohomematic/model/generic/select.py +1 -1
  44. aiohomematic/model/generic/sensor.py +1 -1
  45. aiohomematic/model/generic/switch.py +1 -1
  46. aiohomematic/model/generic/text.py +1 -1
  47. aiohomematic/model/hub/__init__.py +1 -1
  48. aiohomematic/model/hub/binary_sensor.py +1 -1
  49. aiohomematic/model/hub/button.py +1 -1
  50. aiohomematic/model/hub/data_point.py +1 -1
  51. aiohomematic/model/hub/number.py +1 -1
  52. aiohomematic/model/hub/select.py +1 -1
  53. aiohomematic/model/hub/sensor.py +1 -1
  54. aiohomematic/model/hub/switch.py +1 -1
  55. aiohomematic/model/hub/text.py +1 -1
  56. aiohomematic/model/support.py +1 -1
  57. aiohomematic/model/update.py +1 -1
  58. aiohomematic/property_decorators.py +2 -2
  59. aiohomematic/store/__init__.py +34 -0
  60. aiohomematic/{caches → store}/dynamic.py +4 -4
  61. aiohomematic/store/persistent.py +933 -0
  62. aiohomematic/{caches → store}/visibility.py +4 -4
  63. aiohomematic/support.py +16 -12
  64. aiohomematic/validator.py +1 -1
  65. {aiohomematic-2025.10.8.dist-info → aiohomematic-2025.10.9.dist-info}/METADATA +1 -1
  66. aiohomematic-2025.10.9.dist-info/RECORD +78 -0
  67. aiohomematic_support/client_local.py +2 -2
  68. aiohomematic/caches/__init__.py +0 -12
  69. aiohomematic/caches/persistent.py +0 -478
  70. aiohomematic-2025.10.8.dist-info/RECORD +0 -78
  71. {aiohomematic-2025.10.8.dist-info → aiohomematic-2025.10.9.dist-info}/WHEEL +0 -0
  72. {aiohomematic-2025.10.8.dist-info → aiohomematic-2025.10.9.dist-info}/licenses/LICENSE +0 -0
  73. {aiohomematic-2025.10.8.dist-info → aiohomematic-2025.10.9.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,933 @@
1
+ # SPDX-License-Identifier: MIT
2
+ # Copyright (c) 2021-2025
3
+ """
4
+ Persistent content used to persist Homematic metadata between runs.
5
+
6
+ This module provides on-disk store that complement the short‑lived, in‑memory
7
+ store from aiohomematic.store.dynamic. The goal is to minimize expensive data
8
+ retrieval from the backend by storing stable metadata such as device and
9
+ paramset descriptions in JSON files inside a dedicated cache directory.
10
+
11
+ Overview
12
+ - BasePersistentFile: Abstract base for file‑backed content. It encapsulates
13
+ file path resolution, change detection via hashing, and thread‑safe save/load
14
+ operations delegated to the CentralUnit looper.
15
+ - DeviceDescriptionCache: Persists device descriptions per interface, including
16
+ the mapping of device/channels and model metadata.
17
+ - ParamsetDescriptionCache: Persists paramset descriptions per interface and
18
+ channel, and offers helpers to query parameters, paramset keys and related
19
+ channel addresses.
20
+ - SessionRecorder: Persists session recorder data
21
+
22
+ Key behaviors
23
+ - Saves only if store are enabled (CentralConfig.use_caches) and content has
24
+ changed (hash comparison), keeping I/O minimal and predictable.
25
+ - Uses orjson for fast binary writes and json for reads with a custom
26
+ object_hook to rebuild nested defaultdict structures.
27
+ - Save/load/clear operations are synchronized via a semaphore and executed via
28
+ the CentralUnit looper to avoid blocking the event loop.
29
+
30
+ Helper functions are provided to build content paths and filenames and to
31
+ optionally clean up stale content directories.
32
+ """
33
+
34
+ from __future__ import annotations
35
+
36
+ from abc import ABC
37
+ import ast
38
+ import asyncio
39
+ from collections import defaultdict
40
+ from collections.abc import Mapping
41
+ from datetime import UTC, datetime
42
+ import json
43
+ import logging
44
+ import os
45
+ from typing import Any, Final, Self
46
+
47
+ import orjson
48
+ from slugify import slugify
49
+
50
+ from aiohomematic import central as hmcu
51
+ from aiohomematic.const import (
52
+ ADDRESS_SEPARATOR,
53
+ FILE_DEVICES,
54
+ FILE_NAME_TS_PATTERN,
55
+ FILE_PARAMSETS,
56
+ FILE_SESSION_RECORDER,
57
+ INIT_DATETIME,
58
+ SUB_DIRECTORY_CACHE,
59
+ SUB_DIRECTORY_SESSION,
60
+ UTF_8,
61
+ DataOperationResult,
62
+ DeviceDescription,
63
+ ParameterData,
64
+ ParamsetKey,
65
+ RPCType,
66
+ )
67
+ from aiohomematic.model.device import Device
68
+ from aiohomematic.support import (
69
+ check_or_create_directory,
70
+ create_random_device_addresses,
71
+ delete_file,
72
+ extract_exc_args,
73
+ get_device_address,
74
+ get_split_channel_address,
75
+ hash_sha256,
76
+ regular_to_default_dict_hook,
77
+ )
78
+
79
+ _LOGGER: Final = logging.getLogger(__name__)
80
+
81
+
82
+ class BasePersistentFile(ABC):
83
+ """Cache for files."""
84
+
85
+ __slots__ = (
86
+ "_central",
87
+ "_directory",
88
+ "_file_postfix",
89
+ "_persistent_content",
90
+ "_save_load_semaphore",
91
+ "_sub_directory",
92
+ "_use_ts_in_filenames",
93
+ "last_hash_saved",
94
+ "last_save_triggered",
95
+ )
96
+
97
+ _file_postfix: str
98
+ _sub_directory: str
99
+
100
+ def __init__(
101
+ self,
102
+ *,
103
+ central: hmcu.CentralUnit,
104
+ persistent_content: dict[str, Any],
105
+ ) -> None:
106
+ """Initialize the base class of the persistent content."""
107
+ self._save_load_semaphore: Final = asyncio.Semaphore()
108
+ self._central: Final = central
109
+ self._persistent_content: Final = persistent_content
110
+ self._directory: Final = _get_file_path(
111
+ storage_directory=central.config.storage_directory, sub_directory=self._sub_directory
112
+ )
113
+ self.last_save_triggered: datetime = INIT_DATETIME
114
+ self.last_hash_saved = hash_sha256(value=persistent_content)
115
+
116
+ @property
117
+ def content_hash(self) -> str:
118
+ """Return the hash of the content."""
119
+ return hash_sha256(value=self._persistent_content)
120
+
121
+ @property
122
+ def data_changed(self) -> bool:
123
+ """Return if the data has changed."""
124
+ return self.content_hash != self.last_hash_saved
125
+
126
+ def _get_filename(
127
+ self,
128
+ *,
129
+ use_ts_in_filename: bool = False,
130
+ ) -> str:
131
+ """Return the file name."""
132
+ return _get_filename(
133
+ central_name=self._central.name,
134
+ file_name=self._file_postfix,
135
+ ts=datetime.now() if use_ts_in_filename else None,
136
+ )
137
+
138
+ def _get_file_path(
139
+ self,
140
+ *,
141
+ use_ts_in_filename: bool = False,
142
+ ) -> str:
143
+ """Return the full file path."""
144
+ return os.path.join(self._directory, self._get_filename(use_ts_in_filename=use_ts_in_filename))
145
+
146
+ async def save(self, *, randomize_output: bool = False, use_ts_in_filename: bool = False) -> DataOperationResult:
147
+ """Save current data to disk."""
148
+ if not self._should_save:
149
+ return DataOperationResult.NO_SAVE
150
+
151
+ if not check_or_create_directory(directory=self._directory):
152
+ return DataOperationResult.NO_SAVE
153
+
154
+ def _perform_save() -> DataOperationResult:
155
+ try:
156
+ with open(
157
+ file=self._get_file_path(use_ts_in_filename=use_ts_in_filename),
158
+ mode="wb",
159
+ ) as file_pointer:
160
+ file_pointer.write(
161
+ self._manipulate_content(
162
+ content=orjson.dumps(
163
+ self._persistent_content,
164
+ option=orjson.OPT_NON_STR_KEYS,
165
+ ),
166
+ randomize_output=randomize_output,
167
+ )
168
+ )
169
+ self.last_hash_saved = self.content_hash
170
+ except json.JSONDecodeError:
171
+ return DataOperationResult.SAVE_FAIL
172
+ return DataOperationResult.SAVE_SUCCESS
173
+
174
+ async with self._save_load_semaphore:
175
+ return await self._central.looper.async_add_executor_job(
176
+ _perform_save, name=f"save-persistent-content-{self._get_filename()}"
177
+ )
178
+
179
+ def _manipulate_content(self, *, content: bytes, randomize_output: bool = False) -> bytes:
180
+ """Manipulate the content of the file. Optionally randomize addresses."""
181
+ if not randomize_output:
182
+ return content
183
+
184
+ addresses = [device.address for device in self._central.devices]
185
+ text = content.decode(encoding=UTF_8)
186
+ for device_address, rnd_address in create_random_device_addresses(addresses=addresses).items():
187
+ text = text.replace(device_address, rnd_address)
188
+ return text.encode(encoding=UTF_8)
189
+
190
+ @property
191
+ def _should_save(self) -> bool:
192
+ """Determine if save operation should proceed."""
193
+ self.last_save_triggered = datetime.now()
194
+ return (
195
+ check_or_create_directory(directory=self._directory)
196
+ and self._central.config.use_caches
197
+ and self.content_hash != self.last_hash_saved
198
+ )
199
+
200
+ async def load(self) -> DataOperationResult:
201
+ """Load data from disk into the dictionary."""
202
+ if not check_or_create_directory(directory=self._directory) or not os.path.exists(self._get_file_path()):
203
+ return DataOperationResult.NO_LOAD
204
+
205
+ def _perform_load() -> DataOperationResult:
206
+ with open(file=self._get_file_path(), encoding=UTF_8) as file_pointer:
207
+ try:
208
+ data = json.loads(file_pointer.read(), object_hook=regular_to_default_dict_hook)
209
+ if (converted_hash := hash_sha256(value=data)) == self.last_hash_saved:
210
+ return DataOperationResult.NO_LOAD
211
+ self._persistent_content.clear()
212
+ self._persistent_content.update(data)
213
+ self.last_hash_saved = converted_hash
214
+ except json.JSONDecodeError:
215
+ return DataOperationResult.LOAD_FAIL
216
+ return DataOperationResult.LOAD_SUCCESS
217
+
218
+ async with self._save_load_semaphore:
219
+ return await self._central.looper.async_add_executor_job(
220
+ _perform_load, name=f"load-persistent-content-{self._get_filename()}"
221
+ )
222
+
223
+ async def clear(self) -> None:
224
+ """Remove stored file from disk."""
225
+
226
+ def _perform_clear() -> None:
227
+ delete_file(directory=self._directory, file_name=f"{self._central.name}*.json".lower())
228
+ self._persistent_content.clear()
229
+
230
+ async with self._save_load_semaphore:
231
+ await self._central.looper.async_add_executor_job(_perform_clear, name="clear-persistent-content")
232
+
233
+
234
+ class DeviceDescriptionCache(BasePersistentFile):
235
+ """Cache for device/channel names."""
236
+
237
+ __slots__ = (
238
+ "_addresses",
239
+ "_device_descriptions",
240
+ "_raw_device_descriptions",
241
+ )
242
+
243
+ _file_postfix = FILE_DEVICES
244
+ _sub_directory = SUB_DIRECTORY_CACHE
245
+
246
+ def __init__(self, *, central: hmcu.CentralUnit) -> None:
247
+ """Initialize the device description cache."""
248
+ # {interface_id, [device_descriptions]}
249
+ self._raw_device_descriptions: Final[dict[str, list[DeviceDescription]]] = defaultdict(list)
250
+ super().__init__(
251
+ central=central,
252
+ persistent_content=self._raw_device_descriptions,
253
+ )
254
+ # {interface_id, {device_address, [channel_address]}}
255
+ self._addresses: Final[dict[str, dict[str, set[str]]]] = defaultdict(lambda: defaultdict(set))
256
+ # {interface_id, {address, device_descriptions}}
257
+ self._device_descriptions: Final[dict[str, dict[str, DeviceDescription]]] = defaultdict(dict)
258
+
259
+ def add_device(self, *, interface_id: str, device_description: DeviceDescription) -> None:
260
+ """Add a device to the cache."""
261
+ # Fast-path: If the address is not yet known, skip costly removal operations.
262
+ if (address := device_description["ADDRESS"]) not in self._device_descriptions[interface_id]:
263
+ self._raw_device_descriptions[interface_id].append(device_description)
264
+ self._process_device_description(interface_id=interface_id, device_description=device_description)
265
+ return
266
+ # Address exists: remove old entries before adding the new description.
267
+ self._remove_device(
268
+ interface_id=interface_id,
269
+ addresses_to_remove=[address],
270
+ )
271
+ self._raw_device_descriptions[interface_id].append(device_description)
272
+ self._process_device_description(interface_id=interface_id, device_description=device_description)
273
+
274
+ def get_raw_device_descriptions(self, *, interface_id: str) -> list[DeviceDescription]:
275
+ """Retrieve raw device descriptions from the cache."""
276
+ return self._raw_device_descriptions[interface_id]
277
+
278
+ def remove_device(self, *, device: Device) -> None:
279
+ """Remove device from cache."""
280
+ self._remove_device(
281
+ interface_id=device.interface_id,
282
+ addresses_to_remove=[device.address, *device.channels.keys()],
283
+ )
284
+
285
+ def _remove_device(self, *, interface_id: str, addresses_to_remove: list[str]) -> None:
286
+ """Remove a device from the cache."""
287
+ # Use a set for faster membership checks
288
+ addresses_set = set(addresses_to_remove)
289
+ self._raw_device_descriptions[interface_id] = [
290
+ device for device in self._raw_device_descriptions[interface_id] if device["ADDRESS"] not in addresses_set
291
+ ]
292
+ addr_map = self._addresses[interface_id]
293
+ desc_map = self._device_descriptions[interface_id]
294
+ for address in addresses_set:
295
+ # Pop with default to avoid KeyError and try/except overhead
296
+ if ADDRESS_SEPARATOR not in address:
297
+ addr_map.pop(address, None)
298
+ desc_map.pop(address, None)
299
+
300
+ def get_addresses(self, *, interface_id: str | None = None) -> frozenset[str]:
301
+ """Return the addresses by interface as a set."""
302
+ if interface_id:
303
+ return frozenset(self._addresses[interface_id])
304
+ return frozenset(addr for interface_id in self.get_interface_ids() for addr in self._addresses[interface_id])
305
+
306
+ def get_device_descriptions(self, *, interface_id: str) -> Mapping[str, DeviceDescription]:
307
+ """Return the devices by interface."""
308
+ return self._device_descriptions[interface_id]
309
+
310
+ def get_interface_ids(self) -> tuple[str, ...]:
311
+ """Return the interface ids."""
312
+ return tuple(self._raw_device_descriptions.keys())
313
+
314
+ def has_device_descriptions(self, *, interface_id: str) -> bool:
315
+ """Return the devices by interface."""
316
+ return interface_id in self._device_descriptions
317
+
318
+ def find_device_description(self, *, interface_id: str, device_address: str) -> DeviceDescription | None:
319
+ """Return the device description by interface and device_address."""
320
+ return self._device_descriptions[interface_id].get(device_address)
321
+
322
+ def get_device_description(self, *, interface_id: str, address: str) -> DeviceDescription:
323
+ """Return the device description by interface and device_address."""
324
+ return self._device_descriptions[interface_id][address]
325
+
326
+ def get_device_with_channels(self, *, interface_id: str, device_address: str) -> Mapping[str, DeviceDescription]:
327
+ """Return the device dict by interface and device_address."""
328
+ device_descriptions: dict[str, DeviceDescription] = {
329
+ device_address: self.get_device_description(interface_id=interface_id, address=device_address)
330
+ }
331
+ children = device_descriptions[device_address]["CHILDREN"]
332
+ for channel_address in children:
333
+ device_descriptions[channel_address] = self.get_device_description(
334
+ interface_id=interface_id, address=channel_address
335
+ )
336
+ return device_descriptions
337
+
338
+ def get_model(self, *, device_address: str) -> str | None:
339
+ """Return the device type."""
340
+ for data in self._device_descriptions.values():
341
+ if items := data.get(device_address):
342
+ return items["TYPE"]
343
+ return None
344
+
345
+ def _convert_device_descriptions(self, *, interface_id: str, device_descriptions: list[DeviceDescription]) -> None:
346
+ """Convert provided list of device descriptions."""
347
+ for device_description in device_descriptions:
348
+ self._process_device_description(interface_id=interface_id, device_description=device_description)
349
+
350
+ def _process_device_description(self, *, interface_id: str, device_description: DeviceDescription) -> None:
351
+ """Convert provided dict of device descriptions."""
352
+ address = device_description["ADDRESS"]
353
+ device_address = get_device_address(address=address)
354
+ self._device_descriptions[interface_id][address] = device_description
355
+
356
+ # Avoid redundant membership checks; set.add is idempotent and cheaper than check+add
357
+ addr_set = self._addresses[interface_id][device_address]
358
+ addr_set.add(device_address)
359
+ addr_set.add(address)
360
+
361
+ async def load(self) -> DataOperationResult:
362
+ """Load device data from disk into _device_description_cache."""
363
+ if not self._central.config.use_caches:
364
+ _LOGGER.debug("load: not caching paramset descriptions for %s", self._central.name)
365
+ return DataOperationResult.NO_LOAD
366
+ if (result := await super().load()) == DataOperationResult.LOAD_SUCCESS:
367
+ for (
368
+ interface_id,
369
+ device_descriptions,
370
+ ) in self._raw_device_descriptions.items():
371
+ self._convert_device_descriptions(interface_id=interface_id, device_descriptions=device_descriptions)
372
+ return result
373
+
374
+
375
+ class ParamsetDescriptionCache(BasePersistentFile):
376
+ """Cache for paramset descriptions."""
377
+
378
+ __slots__ = (
379
+ "_address_parameter_cache",
380
+ "_raw_paramset_descriptions",
381
+ )
382
+
383
+ _file_postfix = FILE_PARAMSETS
384
+ _sub_directory = SUB_DIRECTORY_CACHE
385
+
386
+ def __init__(self, *, central: hmcu.CentralUnit) -> None:
387
+ """Init the paramset description cache."""
388
+ # {interface_id, {channel_address, paramsets}}
389
+ self._raw_paramset_descriptions: Final[dict[str, dict[str, dict[ParamsetKey, dict[str, ParameterData]]]]] = (
390
+ defaultdict(lambda: defaultdict(lambda: defaultdict(dict)))
391
+ )
392
+ super().__init__(
393
+ central=central,
394
+ persistent_content=self._raw_paramset_descriptions,
395
+ )
396
+
397
+ # {(device_address, parameter), [channel_no]}
398
+ self._address_parameter_cache: Final[dict[tuple[str, str], set[int | None]]] = {}
399
+
400
+ @property
401
+ def raw_paramset_descriptions(
402
+ self,
403
+ ) -> Mapping[str, Mapping[str, Mapping[ParamsetKey, Mapping[str, ParameterData]]]]:
404
+ """Return the paramset descriptions."""
405
+ return self._raw_paramset_descriptions
406
+
407
+ def add(
408
+ self,
409
+ *,
410
+ interface_id: str,
411
+ channel_address: str,
412
+ paramset_key: ParamsetKey,
413
+ paramset_description: dict[str, ParameterData],
414
+ ) -> None:
415
+ """Add paramset description to cache."""
416
+ self._raw_paramset_descriptions[interface_id][channel_address][paramset_key] = paramset_description
417
+ self._add_address_parameter(channel_address=channel_address, paramsets=[paramset_description])
418
+
419
+ def remove_device(self, *, device: Device) -> None:
420
+ """Remove device paramset descriptions from cache."""
421
+ if interface := self._raw_paramset_descriptions.get(device.interface_id):
422
+ for channel_address in device.channels:
423
+ if channel_address in interface:
424
+ del self._raw_paramset_descriptions[device.interface_id][channel_address]
425
+
426
+ def has_interface_id(self, *, interface_id: str) -> bool:
427
+ """Return if interface is in paramset_descriptions cache."""
428
+ return interface_id in self._raw_paramset_descriptions
429
+
430
+ def get_paramset_keys(self, *, interface_id: str, channel_address: str) -> tuple[ParamsetKey, ...]:
431
+ """Get paramset_keys from paramset descriptions cache."""
432
+ return tuple(self._raw_paramset_descriptions[interface_id][channel_address])
433
+
434
+ def get_channel_paramset_descriptions(
435
+ self, *, interface_id: str, channel_address: str
436
+ ) -> Mapping[ParamsetKey, Mapping[str, ParameterData]]:
437
+ """Get paramset descriptions for a channelfrom cache."""
438
+ return self._raw_paramset_descriptions[interface_id].get(channel_address, {})
439
+
440
+ def get_paramset_descriptions(
441
+ self, *, interface_id: str, channel_address: str, paramset_key: ParamsetKey
442
+ ) -> Mapping[str, ParameterData]:
443
+ """Get paramset descriptions from cache."""
444
+ return self._raw_paramset_descriptions[interface_id][channel_address][paramset_key]
445
+
446
+ def get_parameter_data(
447
+ self, *, interface_id: str, channel_address: str, paramset_key: ParamsetKey, parameter: str
448
+ ) -> ParameterData | None:
449
+ """Get parameter_data from cache."""
450
+ return self._raw_paramset_descriptions[interface_id][channel_address][paramset_key].get(parameter)
451
+
452
+ def is_in_multiple_channels(self, *, channel_address: str, parameter: str) -> bool:
453
+ """Check if parameter is in multiple channels per device."""
454
+ if ADDRESS_SEPARATOR not in channel_address:
455
+ return False
456
+ if channels := self._address_parameter_cache.get((get_device_address(address=channel_address), parameter)):
457
+ return len(channels) > 1
458
+ return False
459
+
460
+ def get_channel_addresses_by_paramset_key(
461
+ self, *, interface_id: str, device_address: str
462
+ ) -> Mapping[ParamsetKey, list[str]]:
463
+ """Get device channel addresses."""
464
+ channel_addresses: dict[ParamsetKey, list[str]] = {}
465
+ interface_paramset_descriptions = self._raw_paramset_descriptions[interface_id]
466
+ for (
467
+ channel_address,
468
+ paramset_descriptions,
469
+ ) in interface_paramset_descriptions.items():
470
+ if channel_address.startswith(device_address):
471
+ for p_key in paramset_descriptions:
472
+ if (paramset_key := ParamsetKey(p_key)) not in channel_addresses:
473
+ channel_addresses[paramset_key] = []
474
+ channel_addresses[paramset_key].append(channel_address)
475
+
476
+ return channel_addresses
477
+
478
+ def _init_address_parameter_list(self) -> None:
479
+ """
480
+ Initialize a device_address/parameter list.
481
+
482
+ Used to identify, if a parameter name exists is in multiple channels.
483
+ """
484
+ for channel_paramsets in self._raw_paramset_descriptions.values():
485
+ for channel_address, paramsets in channel_paramsets.items():
486
+ self._add_address_parameter(channel_address=channel_address, paramsets=list(paramsets.values()))
487
+
488
+ def _add_address_parameter(self, *, channel_address: str, paramsets: list[dict[str, Any]]) -> None:
489
+ """Add address parameter to cache."""
490
+ device_address, channel_no = get_split_channel_address(channel_address=channel_address)
491
+ cache = self._address_parameter_cache
492
+ for paramset in paramsets:
493
+ if not paramset:
494
+ continue
495
+ for parameter in paramset:
496
+ cache.setdefault((device_address, parameter), set()).add(channel_no)
497
+
498
+ async def load(self) -> DataOperationResult:
499
+ """Load paramset descriptions from disk into paramset cache."""
500
+ if not self._central.config.use_caches:
501
+ _LOGGER.debug("load: not caching device descriptions for %s", self._central.name)
502
+ return DataOperationResult.NO_LOAD
503
+ if (result := await super().load()) == DataOperationResult.LOAD_SUCCESS:
504
+ self._init_address_parameter_list()
505
+ return result
506
+
507
+
508
+ class SessionRecorder(BasePersistentFile):
509
+ """
510
+ Session recorder for central unit.
511
+
512
+ Nested cache with TTL support.
513
+ Structure:
514
+ store[rpc_type][method][params] = (ts: datetime, response: Any, ttl_s: float)
515
+
516
+ - Each entry expires after its TTL (global default or per-entry override).
517
+ - Expiration is lazy (checked on access/update).
518
+ - Optional refresh_on_get extends TTL when reading.
519
+ """
520
+
521
+ __slots__ = (
522
+ "_active",
523
+ "_default_ttl",
524
+ "_is_delayed",
525
+ "_refresh_on_get",
526
+ "_store",
527
+ )
528
+
529
+ _file_postfix = FILE_SESSION_RECORDER
530
+ _sub_directory = SUB_DIRECTORY_SESSION
531
+
532
+ def __init__(
533
+ self,
534
+ *,
535
+ central: hmcu.CentralUnit,
536
+ default_ttl_seconds: float,
537
+ active: bool,
538
+ refresh_on_get: bool = False,
539
+ ):
540
+ """Init the cache."""
541
+ self._active = active
542
+ if default_ttl_seconds <= 0:
543
+ raise ValueError("default_ttl_seconds must be positive")
544
+ self._default_ttl: Final = float(default_ttl_seconds)
545
+ self._is_delayed: bool = False
546
+ self._refresh_on_get: Final = refresh_on_get
547
+ # Use nested defaultdicts: rpc_type -> method -> params -> ts(int) -> (response, ttl_s)
548
+ # Annotate as defaultdict to match the actual type and satisfy mypy.
549
+ self._store: dict[str, dict[str, dict[str, dict[int, tuple[Any, float]]]]] = defaultdict(
550
+ lambda: defaultdict(lambda: defaultdict(dict))
551
+ )
552
+ super().__init__(
553
+ central=central,
554
+ persistent_content=self._store,
555
+ )
556
+
557
+ # ---------- internal helpers ----------
558
+
559
+ def _purge_expired_at(
560
+ self,
561
+ *,
562
+ rpc_type: str,
563
+ method: str,
564
+ ) -> None:
565
+ """Remove expired entries for a given (rpc_type, method) bucket without creating new ones."""
566
+
567
+ if not (bucket_by_method := self._store.get(rpc_type)):
568
+ return
569
+ if not (bucket_by_parameter := bucket_by_method.get(method)):
570
+ return
571
+ now = _now()
572
+ empty_params: list[str] = []
573
+ for p, bucket_by_ts in bucket_by_parameter.items():
574
+ expired_ts = [
575
+ ts for ts, (_r, ttl_s) in list(bucket_by_ts.items()) if _is_expired(ts=ts, ttl_s=ttl_s, now=now)
576
+ ]
577
+ for ts in expired_ts:
578
+ del bucket_by_ts[ts]
579
+ if not bucket_by_ts:
580
+ empty_params.append(p)
581
+ for p in empty_params:
582
+ bucket_by_parameter.pop(p, None)
583
+ if not bucket_by_parameter:
584
+ bucket_by_method.pop(method, None)
585
+ if not bucket_by_method:
586
+ self._store.pop(rpc_type, None)
587
+
588
+ def _bucket(self, *, rpc_type: str, method: str) -> dict[str, dict[int, tuple[Any, float]]]:
589
+ """Ensure and return the innermost bucket."""
590
+ return self._store[rpc_type][method]
591
+
592
+ # ---------- public API ----------
593
+
594
+ @property
595
+ def active(self) -> bool:
596
+ """Return if session recorder is active."""
597
+ return self._active
598
+
599
+ async def _deactivate_after_delay(
600
+ self, *, delay: int, auto_save: bool, randomize_output: bool, use_ts_in_filename: bool
601
+ ) -> None:
602
+ """Change the state of the session recorder after a delay."""
603
+ self._is_delayed = True
604
+ await asyncio.sleep(delay)
605
+ self._active = False
606
+ self._is_delayed = False
607
+ if auto_save:
608
+ await self.save(randomize_output=randomize_output, use_ts_in_filename=use_ts_in_filename)
609
+ _LOGGER.debug("Deactivated session recorder after %s minutes", {delay / 60})
610
+
611
+ async def activate(
612
+ self, *, on_time: int = 0, auto_save: bool, randomize_output: bool, use_ts_in_filename: bool
613
+ ) -> None:
614
+ """Activate the session recorder. Disable after on_time(seconds)."""
615
+ self._store.clear()
616
+ self._active = True
617
+ if on_time > 0:
618
+ self._central.looper.create_task(
619
+ target=self._deactivate_after_delay(
620
+ delay=on_time,
621
+ auto_save=auto_save,
622
+ randomize_output=randomize_output,
623
+ use_ts_in_filename=use_ts_in_filename,
624
+ ),
625
+ name=f"session_recorder_{self._central.name}",
626
+ )
627
+
628
+ async def deactivate(
629
+ self, *, delay: int, auto_save: bool, randomize_output: bool, use_ts_in_filename: bool
630
+ ) -> None:
631
+ """Deactivate the session recorder. Optionally after a delay(seconds)."""
632
+ if delay > 0:
633
+ self._central.looper.create_task(
634
+ target=self._deactivate_after_delay(
635
+ delay=delay,
636
+ auto_save=auto_save,
637
+ randomize_output=randomize_output,
638
+ use_ts_in_filename=use_ts_in_filename,
639
+ ),
640
+ name=f"session_recorder_{self._central.name}",
641
+ )
642
+ else:
643
+ self._active = False
644
+ self._is_delayed = False
645
+
646
+ def add_json_rpc_session(
647
+ self,
648
+ *,
649
+ method: str,
650
+ params: dict[str, Any],
651
+ response: dict[str, Any] | None = None,
652
+ session_exc: Exception | None = None,
653
+ ) -> None:
654
+ """Add json rpc session to content."""
655
+ try:
656
+ if session_exc:
657
+ self.set(
658
+ rpc_type=str(RPCType.JSON_RPC),
659
+ method=method,
660
+ params=params,
661
+ response=extract_exc_args(exc=session_exc),
662
+ )
663
+ return
664
+ self.set(rpc_type=str(RPCType.JSON_RPC), method=method, params=params, response=response)
665
+ except Exception as exc:
666
+ _LOGGER.debug("ADD_JSON_RPC_SESSION: failed with %s", extract_exc_args(exc=exc))
667
+
668
+ def add_xml_rpc_session(
669
+ self, *, method: str, params: tuple[Any, ...], response: Any | None = None, session_exc: Exception | None = None
670
+ ) -> None:
671
+ """Add rpc session to content."""
672
+ try:
673
+ if session_exc:
674
+ self.set(
675
+ rpc_type=str(RPCType.XML_RPC),
676
+ method=method,
677
+ params=params,
678
+ response=extract_exc_args(exc=session_exc),
679
+ )
680
+ return
681
+ self.set(rpc_type=str(RPCType.XML_RPC), method=method, params=params, response=response)
682
+ except Exception as exc:
683
+ _LOGGER.debug("ADD_XML_RPC_SESSION: failed with %s", extract_exc_args(exc=exc))
684
+
685
+ def set(
686
+ self,
687
+ *,
688
+ rpc_type: str,
689
+ method: str,
690
+ params: Any,
691
+ response: Any,
692
+ ttl_seconds: float | None = None,
693
+ ts: int | datetime | None = None,
694
+ ) -> Self:
695
+ """Insert or update an entry."""
696
+ self._purge_expired_at(rpc_type=rpc_type, method=method)
697
+ frozen_param = _freeze_params(params)
698
+ if (ttl_s := ttl_seconds if ttl_seconds is not None else self._default_ttl) <= 0:
699
+ raise ValueError("ttl_seconds must be positive")
700
+ # Normalize timestamp to int epoch seconds
701
+ if isinstance(ts, datetime):
702
+ ts_int = int(ts.timestamp())
703
+ elif isinstance(ts, int):
704
+ ts_int = ts
705
+ else:
706
+ ts_int = _now()
707
+ self._bucket(rpc_type=rpc_type, method=method)[frozen_param][ts_int] = (response, ttl_s)
708
+ return self
709
+
710
+ def get(
711
+ self,
712
+ *,
713
+ rpc_type: str,
714
+ method: str,
715
+ params: Any,
716
+ default: Any = None,
717
+ ) -> Any:
718
+ """
719
+ Return a cached response if still valid, else default.
720
+
721
+ This method must avoid creating buckets when the entry is missing.
722
+ It purges expired entries first, then returns the response at the
723
+ latest timestamp for the given params. If refresh_on_get is enabled,
724
+ it appends a new timestamp with the same response/ttl.
725
+ """
726
+ self._purge_expired_at(rpc_type=rpc_type, method=method)
727
+ # Access store safely to avoid side effects from creating buckets.
728
+ if not (bucket_by_method := self._store.get(rpc_type)):
729
+ return default
730
+ if not (bucket_by_parameter := bucket_by_method.get(method)):
731
+ return default
732
+ frozen_param = _freeze_params(params)
733
+ if not (bucket_by_ts := bucket_by_parameter.get(frozen_param)):
734
+ return default
735
+ try:
736
+ latest_ts = max(bucket_by_ts.keys())
737
+ except ValueError:
738
+ return default
739
+ resp, ttl_s = bucket_by_ts[latest_ts]
740
+ if self._refresh_on_get:
741
+ bucket_by_ts[_now()] = (resp, ttl_s)
742
+ return resp
743
+
744
+ def delete(self, *, rpc_type: str, method: str, params: Any) -> bool:
745
+ """
746
+ Delete an entry if it exists. Returns True if removed.
747
+
748
+ Avoid creating buckets when the target does not exist.
749
+ Clean up empty parent buckets on successful deletion.
750
+ """
751
+ if not (bucket_by_method := self._store.get(rpc_type)):
752
+ return False
753
+ if not (bucket_by_parameter := bucket_by_method.get(method)):
754
+ return False
755
+ if (frozen_param := _freeze_params(params)) not in bucket_by_parameter:
756
+ return False
757
+ # Perform deletion
758
+ bucket_by_parameter.pop(frozen_param, None)
759
+ if not bucket_by_parameter:
760
+ bucket_by_method.pop(method, None)
761
+ if not bucket_by_method:
762
+ self._store.pop(rpc_type, None)
763
+ return True
764
+
765
+ def get_latest_fresh(self, *, rpc_type: str, method: str) -> list[tuple[Any, Any]]:
766
+ """Return latest non-expired responses for a given (rpc_type, method)."""
767
+ # Purge expired entries first without creating any new buckets.
768
+ self._purge_expired_at(rpc_type=rpc_type, method=method)
769
+ result: list[Any] = []
770
+ # Access store safely to avoid side effects from creating buckets.
771
+ if not (bucket_by_method := self._store.get(rpc_type)):
772
+ return result
773
+ if not (bucket_by_parameter := bucket_by_method.get(method)):
774
+ return result
775
+ # For each parameter, choose the response at the latest timestamp.
776
+ for frozen_params, bucket_by_ts in bucket_by_parameter.items():
777
+ if not bucket_by_ts:
778
+ continue
779
+ try:
780
+ latest_ts = max(bucket_by_ts.keys())
781
+ except ValueError:
782
+ continue
783
+ resp, _ttl_s = bucket_by_ts[latest_ts]
784
+ params = _unfreeze_params(frozen_params=frozen_params)
785
+
786
+ result.append((params, resp))
787
+ return result
788
+
789
+ def cleanup(self) -> None:
790
+ """Purge all expired entries globally."""
791
+ for rpc_type in list(self._store.keys()):
792
+ for method in list(self._store[rpc_type].keys()):
793
+ self._purge_expired_at(rpc_type=rpc_type, method=method)
794
+
795
+ def peek_ts(self, *, rpc_type: str, method: str, params: Any) -> datetime | None:
796
+ """
797
+ Return the most recent timestamp for a live entry, else None.
798
+
799
+ This method must not create buckets as a side effect. It purges expired
800
+ entries first and then returns the newest timestamp for the given
801
+ (rpc_type, method, params) if present.
802
+ """
803
+ self._purge_expired_at(rpc_type=rpc_type, method=method)
804
+ # Do NOT create buckets here — use .get chaining only.
805
+ if not (bucket_by_method := self._store.get(rpc_type)):
806
+ return None
807
+ if not (bucket_by_parameter := bucket_by_method.get(method)):
808
+ return None
809
+ frozen_param = _freeze_params(params)
810
+ if (bucket_by_ts := bucket_by_parameter.get(frozen_param)) is None or not bucket_by_ts:
811
+ return None
812
+ # After purge, remaining entries are alive; return the latest timestamp.
813
+ try:
814
+ latest_ts_int = max(bucket_by_ts.keys())
815
+ except ValueError:
816
+ # bucket was empty (shouldn't happen due to check), be safe
817
+ return None
818
+ return datetime.fromtimestamp(latest_ts_int, tz=UTC)
819
+
820
+ @property
821
+ def _should_save(self) -> bool:
822
+ """Determine if save operation should proceed."""
823
+ self.cleanup()
824
+ return len(self._store.items()) > 0
825
+
826
+ def __repr__(self) -> str:
827
+ """Return the representation."""
828
+ self.cleanup()
829
+ return f"{self.__class__.__name__}({self._store})"
830
+
831
+
832
+ def _freeze_params(params: Any) -> str:
833
+ """
834
+ Recursively freeze any structure so it can be used as a dictionary key.
835
+
836
+ - dict → tuple of (key, frozen(value)) sorted by key.
837
+ - list/tuple → tuple of frozen elements.
838
+ - set/frozenset → tagged tuple ("__set__", tuple(sorted(frozen elements by repr))) to ensure JSON-serializable keys.
839
+ - datetime → tagged ISO 8601 string to ensure JSON-serializable keys.
840
+ """
841
+ res: Any = ""
842
+ match params:
843
+ case datetime():
844
+ # orjson cannot serialize datetime objects as dict keys even with OPT_NON_STR_KEYS.
845
+ # Use a tagged ISO string to preserve value and guarantee a stable, hashable key.
846
+ res = ("__datetime__", params.isoformat())
847
+ case dict():
848
+ res = {k: _freeze_params(v) for k, v in sorted(params.items())}
849
+ case list() | tuple():
850
+ res = tuple(_freeze_params(x) for x in params)
851
+ case set() | frozenset():
852
+ # Convert to a deterministically ordered, JSON-serializable representation.
853
+ frozen_elems = tuple(sorted((_freeze_params(x) for x in params), key=repr))
854
+ res = ("__set__", frozen_elems)
855
+ case _:
856
+ res = params
857
+
858
+ return str(res)
859
+
860
+
861
+ def _unfreeze_params(frozen_params: str) -> Any:
862
+ """
863
+ Reverse the _freeze_params transformation.
864
+
865
+ Tries to parse the frozen string with ast.literal_eval and then recursively
866
+ reconstructs original structures:
867
+ - ("__set__", (<items>...)) -> set of items
868
+ - ("__datetime__", iso_string) -> datetime.fromisoformat(iso_string)
869
+ - dict values and tuple elements are processed recursively
870
+
871
+ If parsing fails, return the original string.
872
+ """
873
+ try:
874
+ obj = ast.literal_eval(frozen_params)
875
+ except Exception:
876
+ return frozen_params
877
+
878
+ def _walk(o: Any) -> Any:
879
+ if o and isinstance(o, tuple):
880
+ tag = o[0]
881
+ # Tagged set
882
+ if tag == "__set__" and len(o) == 2 and isinstance(o[1], tuple):
883
+ return {_walk(x) for x in o[1]}
884
+ # Tagged datetime
885
+ if tag == "__datetime__" and len(o) == 2 and isinstance(o[1], str):
886
+ try:
887
+ return datetime.fromisoformat(o[1])
888
+ except Exception:
889
+ return o[1]
890
+ # Generic tuple
891
+ return tuple(_walk(x) for x in o)
892
+ if isinstance(o, dict):
893
+ return {k: _walk(v) for k, v in o.items()}
894
+ if isinstance(o, list):
895
+ return [_walk(x) for x in o]
896
+ if o.startswith("{") and o.endswith("}"):
897
+ return ast.literal_eval(o)
898
+ return o
899
+
900
+ return _walk(obj)
901
+
902
+
903
+ def _get_file_path(*, storage_directory: str, sub_directory: str) -> str:
904
+ """Return the content path."""
905
+ return f"{storage_directory}/{sub_directory}"
906
+
907
+
908
+ def _get_filename(*, central_name: str, file_name: str, ts: datetime | None = None) -> str:
909
+ """Return the content filename."""
910
+ fn = f"{slugify(central_name)}_{file_name}"
911
+ if ts:
912
+ fn += f"_{ts.strftime(FILE_NAME_TS_PATTERN)}"
913
+ return f"{fn}.json"
914
+
915
+
916
+ def _now() -> int:
917
+ """Return current UTC time as epoch seconds (int)."""
918
+ return int(datetime.now(tz=UTC).timestamp())
919
+
920
+
921
+ def _is_expired(*, ts: int, ttl_s: float, now: int | None = None) -> bool:
922
+ """Check whether an entry has expired given epoch seconds."""
923
+ now = now if now is not None else _now()
924
+ return (now - ts) > ttl_s
925
+
926
+
927
+ async def cleanup_files(*, central_name: str, storage_directory: str) -> None:
928
+ """Clean up the used files."""
929
+ loop = asyncio.get_running_loop()
930
+ cache_dir = _get_file_path(storage_directory=storage_directory, sub_directory=SUB_DIRECTORY_CACHE)
931
+ loop.call_soon_threadsafe(delete_file, cache_dir, f"{central_name}*.json".lower())
932
+ session_dir = _get_file_path(storage_directory=storage_directory, sub_directory=SUB_DIRECTORY_SESSION)
933
+ loop.call_soon_threadsafe(delete_file, session_dir, f"{central_name}*.json".lower())