aiohomematic 2025.10.8__py3-none-any.whl → 2025.10.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of aiohomematic might be problematic. Click here for more details.

Files changed (73) hide show
  1. aiohomematic/__init__.py +3 -3
  2. aiohomematic/async_support.py +1 -1
  3. aiohomematic/central/__init__.py +69 -30
  4. aiohomematic/central/decorators.py +1 -1
  5. aiohomematic/central/rpc_server.py +1 -1
  6. aiohomematic/client/__init__.py +22 -14
  7. aiohomematic/client/_rpc_errors.py +1 -1
  8. aiohomematic/client/json_rpc.py +29 -3
  9. aiohomematic/client/rpc_proxy.py +20 -2
  10. aiohomematic/const.py +33 -7
  11. aiohomematic/context.py +1 -1
  12. aiohomematic/converter.py +1 -1
  13. aiohomematic/decorators.py +1 -1
  14. aiohomematic/exceptions.py +1 -1
  15. aiohomematic/hmcli.py +1 -1
  16. aiohomematic/model/__init__.py +1 -1
  17. aiohomematic/model/calculated/__init__.py +1 -1
  18. aiohomematic/model/calculated/climate.py +1 -1
  19. aiohomematic/model/calculated/data_point.py +1 -1
  20. aiohomematic/model/calculated/operating_voltage_level.py +1 -1
  21. aiohomematic/model/calculated/support.py +1 -1
  22. aiohomematic/model/custom/__init__.py +1 -1
  23. aiohomematic/model/custom/climate.py +7 -4
  24. aiohomematic/model/custom/const.py +1 -1
  25. aiohomematic/model/custom/cover.py +1 -1
  26. aiohomematic/model/custom/data_point.py +1 -1
  27. aiohomematic/model/custom/definition.py +1 -1
  28. aiohomematic/model/custom/light.py +1 -1
  29. aiohomematic/model/custom/lock.py +1 -1
  30. aiohomematic/model/custom/siren.py +1 -1
  31. aiohomematic/model/custom/support.py +1 -1
  32. aiohomematic/model/custom/switch.py +1 -1
  33. aiohomematic/model/custom/valve.py +1 -1
  34. aiohomematic/model/data_point.py +4 -4
  35. aiohomematic/model/device.py +13 -13
  36. aiohomematic/model/event.py +1 -1
  37. aiohomematic/model/generic/__init__.py +1 -1
  38. aiohomematic/model/generic/action.py +1 -1
  39. aiohomematic/model/generic/binary_sensor.py +1 -1
  40. aiohomematic/model/generic/button.py +1 -1
  41. aiohomematic/model/generic/data_point.py +1 -1
  42. aiohomematic/model/generic/number.py +1 -1
  43. aiohomematic/model/generic/select.py +1 -1
  44. aiohomematic/model/generic/sensor.py +1 -1
  45. aiohomematic/model/generic/switch.py +1 -1
  46. aiohomematic/model/generic/text.py +1 -1
  47. aiohomematic/model/hub/__init__.py +1 -1
  48. aiohomematic/model/hub/binary_sensor.py +1 -1
  49. aiohomematic/model/hub/button.py +1 -1
  50. aiohomematic/model/hub/data_point.py +1 -1
  51. aiohomematic/model/hub/number.py +1 -1
  52. aiohomematic/model/hub/select.py +1 -1
  53. aiohomematic/model/hub/sensor.py +1 -1
  54. aiohomematic/model/hub/switch.py +1 -1
  55. aiohomematic/model/hub/text.py +1 -1
  56. aiohomematic/model/support.py +1 -1
  57. aiohomematic/model/update.py +3 -3
  58. aiohomematic/property_decorators.py +2 -2
  59. aiohomematic/store/__init__.py +34 -0
  60. aiohomematic/{caches → store}/dynamic.py +4 -4
  61. aiohomematic/store/persistent.py +970 -0
  62. aiohomematic/{caches → store}/visibility.py +4 -4
  63. aiohomematic/support.py +16 -12
  64. aiohomematic/validator.py +1 -1
  65. {aiohomematic-2025.10.8.dist-info → aiohomematic-2025.10.10.dist-info}/METADATA +1 -1
  66. aiohomematic-2025.10.10.dist-info/RECORD +78 -0
  67. aiohomematic_support/client_local.py +8 -8
  68. aiohomematic/caches/__init__.py +0 -12
  69. aiohomematic/caches/persistent.py +0 -478
  70. aiohomematic-2025.10.8.dist-info/RECORD +0 -78
  71. {aiohomematic-2025.10.8.dist-info → aiohomematic-2025.10.10.dist-info}/WHEEL +0 -0
  72. {aiohomematic-2025.10.8.dist-info → aiohomematic-2025.10.10.dist-info}/licenses/LICENSE +0 -0
  73. {aiohomematic-2025.10.8.dist-info → aiohomematic-2025.10.10.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,970 @@
1
+ # SPDX-License-Identifier: MIT
2
+ # Copyright (c) 2021-2025
3
+ """
4
+ Persistent content used to persist Homematic metadata between runs.
5
+
6
+ This module provides on-disk store that complement the short‑lived, in‑memory
7
+ store from aiohomematic.store.dynamic. The goal is to minimize expensive data
8
+ retrieval from the backend by storing stable metadata such as device and
9
+ paramset descriptions in JSON files inside a dedicated cache directory.
10
+
11
+ Overview
12
+ - BasePersistentFile: Abstract base for file‑backed content. It encapsulates
13
+ file path resolution, change detection via hashing, and thread‑safe save/load
14
+ operations delegated to the CentralUnit looper.
15
+ - DeviceDescriptionCache: Persists device descriptions per interface, including
16
+ the mapping of device/channels and model metadata.
17
+ - ParamsetDescriptionCache: Persists paramset descriptions per interface and
18
+ channel, and offers helpers to query parameters, paramset keys and related
19
+ channel addresses.
20
+ - SessionRecorder: Persists session recorder data
21
+
22
+ Key behaviors
23
+ - Saves only if store are enabled (CentralConfig.use_caches) and content has
24
+ changed (hash comparison), keeping I/O minimal and predictable.
25
+ - Uses orjson for fast binary writes and json for reads with a custom
26
+ object_hook to rebuild nested defaultdict structures.
27
+ - Save/load/clear operations are synchronized via a semaphore and executed via
28
+ the CentralUnit looper to avoid blocking the event loop.
29
+
30
+ Helper functions are provided to build content paths and file names and to
31
+ optionally clean up stale content directories.
32
+ """
33
+
34
+ from __future__ import annotations
35
+
36
+ from abc import ABC
37
+ import ast
38
+ import asyncio
39
+ from collections import defaultdict
40
+ from collections.abc import Mapping
41
+ from datetime import UTC, datetime
42
+ import json
43
+ import logging
44
+ import os
45
+ from typing import Any, Final, Self
46
+
47
+ import orjson
48
+ from slugify import slugify
49
+
50
+ from aiohomematic import central as hmcu
51
+ from aiohomematic.const import (
52
+ ADDRESS_SEPARATOR,
53
+ FILE_DEVICES,
54
+ FILE_NAME_TS_PATTERN,
55
+ FILE_PARAMSETS,
56
+ FILE_SESSION_RECORDER,
57
+ INIT_DATETIME,
58
+ SUB_DIRECTORY_CACHE,
59
+ SUB_DIRECTORY_SESSION,
60
+ UTF_8,
61
+ DataOperationResult,
62
+ DeviceDescription,
63
+ ParameterData,
64
+ ParamsetKey,
65
+ RPCType,
66
+ )
67
+ from aiohomematic.model.device import Device
68
+ from aiohomematic.support import (
69
+ check_or_create_directory,
70
+ create_random_device_addresses,
71
+ delete_file,
72
+ extract_exc_args,
73
+ get_device_address,
74
+ get_split_channel_address,
75
+ hash_sha256,
76
+ regular_to_default_dict_hook,
77
+ )
78
+
79
+ _LOGGER: Final = logging.getLogger(__name__)
80
+
81
+
82
+ class BasePersistentFile(ABC):
83
+ """Cache for files."""
84
+
85
+ __slots__ = (
86
+ "_central",
87
+ "_directory",
88
+ "_file_postfix",
89
+ "_persistent_content",
90
+ "_save_load_semaphore",
91
+ "_sub_directory",
92
+ "_use_ts_in_file_names",
93
+ "last_hash_saved",
94
+ "last_save_triggered",
95
+ )
96
+
97
+ _file_postfix: str
98
+ _sub_directory: str
99
+
100
+ def __init__(
101
+ self,
102
+ *,
103
+ central: hmcu.CentralUnit,
104
+ persistent_content: dict[str, Any],
105
+ ) -> None:
106
+ """Initialize the base class of the persistent content."""
107
+ self._save_load_semaphore: Final = asyncio.Semaphore()
108
+ self._central: Final = central
109
+ self._persistent_content: Final = persistent_content
110
+ self._directory: Final = _get_file_path(
111
+ storage_directory=central.config.storage_directory, sub_directory=self._sub_directory
112
+ )
113
+ self.last_save_triggered: datetime = INIT_DATETIME
114
+ self.last_hash_saved = hash_sha256(value=persistent_content)
115
+
116
+ @property
117
+ def content_hash(self) -> str:
118
+ """Return the hash of the content."""
119
+ return hash_sha256(value=self._persistent_content)
120
+
121
+ @property
122
+ def data_changed(self) -> bool:
123
+ """Return if the data has changed."""
124
+ return self.content_hash != self.last_hash_saved
125
+
126
+ def _get_file_name(
127
+ self,
128
+ *,
129
+ use_ts_in_file_name: bool = False,
130
+ ) -> str:
131
+ """Return the file name."""
132
+ return _get_file_name(
133
+ central_name=self._central.name,
134
+ file_name=self._file_postfix,
135
+ ts=datetime.now() if use_ts_in_file_name else None,
136
+ )
137
+
138
+ def _get_file_path(
139
+ self,
140
+ *,
141
+ use_ts_in_file_name: bool = False,
142
+ ) -> str:
143
+ """Return the full file path."""
144
+ return os.path.join(self._directory, self._get_file_name(use_ts_in_file_name=use_ts_in_file_name))
145
+
146
+ async def save(self, *, randomize_output: bool = False, use_ts_in_file_name: bool = False) -> DataOperationResult:
147
+ """Save current data to disk."""
148
+ if not self._should_save:
149
+ return DataOperationResult.NO_SAVE
150
+
151
+ if not check_or_create_directory(directory=self._directory):
152
+ return DataOperationResult.NO_SAVE
153
+
154
+ def _perform_save() -> DataOperationResult:
155
+ try:
156
+ with open(
157
+ file=self._get_file_path(use_ts_in_file_name=use_ts_in_file_name),
158
+ mode="wb",
159
+ ) as file_pointer:
160
+ file_pointer.write(
161
+ self._manipulate_content(
162
+ content=orjson.dumps(
163
+ self._persistent_content,
164
+ option=orjson.OPT_NON_STR_KEYS,
165
+ ),
166
+ randomize_output=randomize_output,
167
+ )
168
+ )
169
+ self.last_hash_saved = self.content_hash
170
+ except json.JSONDecodeError:
171
+ return DataOperationResult.SAVE_FAIL
172
+ return DataOperationResult.SAVE_SUCCESS
173
+
174
+ async with self._save_load_semaphore:
175
+ return await self._central.looper.async_add_executor_job(
176
+ _perform_save, name=f"save-persistent-content-{self._get_file_name()}"
177
+ )
178
+
179
+ def _manipulate_content(self, *, content: bytes, randomize_output: bool = False) -> bytes:
180
+ """Manipulate the content of the file. Optionally randomize addresses."""
181
+ if not randomize_output:
182
+ return content
183
+
184
+ addresses = [device.address for device in self._central.devices]
185
+ text = content.decode(encoding=UTF_8)
186
+ for device_address, rnd_address in create_random_device_addresses(addresses=addresses).items():
187
+ text = text.replace(device_address, rnd_address)
188
+ return text.encode(encoding=UTF_8)
189
+
190
+ @property
191
+ def _should_save(self) -> bool:
192
+ """Determine if save operation should proceed."""
193
+ self.last_save_triggered = datetime.now()
194
+ return (
195
+ check_or_create_directory(directory=self._directory)
196
+ and self._central.config.use_caches
197
+ and self.content_hash != self.last_hash_saved
198
+ )
199
+
200
+ async def load(self, *, file_path: str | None = None) -> DataOperationResult:
201
+ """Load data from disk into the dictionary."""
202
+ if not file_path and not check_or_create_directory(directory=self._directory):
203
+ return DataOperationResult.NO_LOAD
204
+
205
+ if (file_path := file_path or self._get_file_path()) and not os.path.exists(file_path):
206
+ return DataOperationResult.NO_LOAD
207
+
208
+ def _perform_load() -> DataOperationResult:
209
+ with open(file=file_path, encoding=UTF_8) as file_pointer:
210
+ try:
211
+ data = json.loads(file_pointer.read(), object_hook=regular_to_default_dict_hook)
212
+ if (converted_hash := hash_sha256(value=data)) == self.last_hash_saved:
213
+ return DataOperationResult.NO_LOAD
214
+ self._persistent_content.clear()
215
+ self._persistent_content.update(data)
216
+ self.last_hash_saved = converted_hash
217
+ except json.JSONDecodeError:
218
+ return DataOperationResult.LOAD_FAIL
219
+ return DataOperationResult.LOAD_SUCCESS
220
+
221
+ async with self._save_load_semaphore:
222
+ return await self._central.looper.async_add_executor_job(
223
+ _perform_load, name=f"load-persistent-content-{self._get_file_name()}"
224
+ )
225
+
226
+ async def clear(self) -> None:
227
+ """Remove stored file from disk."""
228
+
229
+ def _perform_clear() -> None:
230
+ delete_file(directory=self._directory, file_name=f"{self._central.name}*.json".lower())
231
+ self._persistent_content.clear()
232
+
233
+ async with self._save_load_semaphore:
234
+ await self._central.looper.async_add_executor_job(_perform_clear, name="clear-persistent-content")
235
+
236
+
237
+ class DeviceDescriptionCache(BasePersistentFile):
238
+ """Cache for device/channel names."""
239
+
240
+ __slots__ = (
241
+ "_addresses",
242
+ "_device_descriptions",
243
+ "_raw_device_descriptions",
244
+ )
245
+
246
+ _file_postfix = FILE_DEVICES
247
+ _sub_directory = SUB_DIRECTORY_CACHE
248
+
249
+ def __init__(self, *, central: hmcu.CentralUnit) -> None:
250
+ """Initialize the device description cache."""
251
+ # {interface_id, [device_descriptions]}
252
+ self._raw_device_descriptions: Final[dict[str, list[DeviceDescription]]] = defaultdict(list)
253
+ super().__init__(
254
+ central=central,
255
+ persistent_content=self._raw_device_descriptions,
256
+ )
257
+ # {interface_id, {device_address, [channel_address]}}
258
+ self._addresses: Final[dict[str, dict[str, set[str]]]] = defaultdict(lambda: defaultdict(set))
259
+ # {interface_id, {address, device_descriptions}}
260
+ self._device_descriptions: Final[dict[str, dict[str, DeviceDescription]]] = defaultdict(dict)
261
+
262
+ def add_device(self, *, interface_id: str, device_description: DeviceDescription) -> None:
263
+ """Add a device to the cache."""
264
+ # Fast-path: If the address is not yet known, skip costly removal operations.
265
+ if (address := device_description["ADDRESS"]) not in self._device_descriptions[interface_id]:
266
+ self._raw_device_descriptions[interface_id].append(device_description)
267
+ self._process_device_description(interface_id=interface_id, device_description=device_description)
268
+ return
269
+ # Address exists: remove old entries before adding the new description.
270
+ self._remove_device(
271
+ interface_id=interface_id,
272
+ addresses_to_remove=[address],
273
+ )
274
+ self._raw_device_descriptions[interface_id].append(device_description)
275
+ self._process_device_description(interface_id=interface_id, device_description=device_description)
276
+
277
+ def get_raw_device_descriptions(self, *, interface_id: str) -> list[DeviceDescription]:
278
+ """Retrieve raw device descriptions from the cache."""
279
+ return self._raw_device_descriptions[interface_id]
280
+
281
+ def remove_device(self, *, device: Device) -> None:
282
+ """Remove device from cache."""
283
+ self._remove_device(
284
+ interface_id=device.interface_id,
285
+ addresses_to_remove=[device.address, *device.channels.keys()],
286
+ )
287
+
288
+ def _remove_device(self, *, interface_id: str, addresses_to_remove: list[str]) -> None:
289
+ """Remove a device from the cache."""
290
+ # Use a set for faster membership checks
291
+ addresses_set = set(addresses_to_remove)
292
+ self._raw_device_descriptions[interface_id] = [
293
+ device for device in self._raw_device_descriptions[interface_id] if device["ADDRESS"] not in addresses_set
294
+ ]
295
+ addr_map = self._addresses[interface_id]
296
+ desc_map = self._device_descriptions[interface_id]
297
+ for address in addresses_set:
298
+ # Pop with default to avoid KeyError and try/except overhead
299
+ if ADDRESS_SEPARATOR not in address:
300
+ addr_map.pop(address, None)
301
+ desc_map.pop(address, None)
302
+
303
+ def get_addresses(self, *, interface_id: str | None = None) -> frozenset[str]:
304
+ """Return the addresses by interface as a set."""
305
+ if interface_id:
306
+ return frozenset(self._addresses[interface_id])
307
+ return frozenset(addr for interface_id in self.get_interface_ids() for addr in self._addresses[interface_id])
308
+
309
+ def get_device_descriptions(self, *, interface_id: str) -> Mapping[str, DeviceDescription]:
310
+ """Return the devices by interface."""
311
+ return self._device_descriptions[interface_id]
312
+
313
+ def get_interface_ids(self) -> tuple[str, ...]:
314
+ """Return the interface ids."""
315
+ return tuple(self._raw_device_descriptions.keys())
316
+
317
+ def has_device_descriptions(self, *, interface_id: str) -> bool:
318
+ """Return the devices by interface."""
319
+ return interface_id in self._device_descriptions
320
+
321
+ def find_device_description(self, *, interface_id: str, device_address: str) -> DeviceDescription | None:
322
+ """Return the device description by interface and device_address."""
323
+ return self._device_descriptions[interface_id].get(device_address)
324
+
325
+ def get_device_description(self, *, interface_id: str, address: str) -> DeviceDescription:
326
+ """Return the device description by interface and device_address."""
327
+ return self._device_descriptions[interface_id][address]
328
+
329
+ def get_device_with_channels(self, *, interface_id: str, device_address: str) -> Mapping[str, DeviceDescription]:
330
+ """Return the device dict by interface and device_address."""
331
+ device_descriptions: dict[str, DeviceDescription] = {
332
+ device_address: self.get_device_description(interface_id=interface_id, address=device_address)
333
+ }
334
+ children = device_descriptions[device_address]["CHILDREN"]
335
+ for channel_address in children:
336
+ device_descriptions[channel_address] = self.get_device_description(
337
+ interface_id=interface_id, address=channel_address
338
+ )
339
+ return device_descriptions
340
+
341
+ def get_model(self, *, device_address: str) -> str | None:
342
+ """Return the device type."""
343
+ for data in self._device_descriptions.values():
344
+ if items := data.get(device_address):
345
+ return items["TYPE"]
346
+ return None
347
+
348
+ def _convert_device_descriptions(self, *, interface_id: str, device_descriptions: list[DeviceDescription]) -> None:
349
+ """Convert provided list of device descriptions."""
350
+ for device_description in device_descriptions:
351
+ self._process_device_description(interface_id=interface_id, device_description=device_description)
352
+
353
+ def _process_device_description(self, *, interface_id: str, device_description: DeviceDescription) -> None:
354
+ """Convert provided dict of device descriptions."""
355
+ address = device_description["ADDRESS"]
356
+ device_address = get_device_address(address=address)
357
+ self._device_descriptions[interface_id][address] = device_description
358
+
359
+ # Avoid redundant membership checks; set.add is idempotent and cheaper than check+add
360
+ addr_set = self._addresses[interface_id][device_address]
361
+ addr_set.add(device_address)
362
+ addr_set.add(address)
363
+
364
+ async def load(self, *, file_path: str | None = None) -> DataOperationResult:
365
+ """Load device data from disk into _device_description_cache."""
366
+ if not self._central.config.use_caches:
367
+ _LOGGER.debug("load: not caching paramset descriptions for %s", self._central.name)
368
+ return DataOperationResult.NO_LOAD
369
+ if (result := await super().load(file_path=file_path)) == DataOperationResult.LOAD_SUCCESS:
370
+ for (
371
+ interface_id,
372
+ device_descriptions,
373
+ ) in self._raw_device_descriptions.items():
374
+ self._convert_device_descriptions(interface_id=interface_id, device_descriptions=device_descriptions)
375
+ return result
376
+
377
+
378
+ class ParamsetDescriptionCache(BasePersistentFile):
379
+ """Cache for paramset descriptions."""
380
+
381
+ __slots__ = (
382
+ "_address_parameter_cache",
383
+ "_raw_paramset_descriptions",
384
+ )
385
+
386
+ _file_postfix = FILE_PARAMSETS
387
+ _sub_directory = SUB_DIRECTORY_CACHE
388
+
389
+ def __init__(self, *, central: hmcu.CentralUnit) -> None:
390
+ """Init the paramset description cache."""
391
+ # {interface_id, {channel_address, paramsets}}
392
+ self._raw_paramset_descriptions: Final[dict[str, dict[str, dict[ParamsetKey, dict[str, ParameterData]]]]] = (
393
+ defaultdict(lambda: defaultdict(lambda: defaultdict(dict)))
394
+ )
395
+ super().__init__(
396
+ central=central,
397
+ persistent_content=self._raw_paramset_descriptions,
398
+ )
399
+
400
+ # {(device_address, parameter), [channel_no]}
401
+ self._address_parameter_cache: Final[dict[tuple[str, str], set[int | None]]] = {}
402
+
403
+ @property
404
+ def raw_paramset_descriptions(
405
+ self,
406
+ ) -> Mapping[str, Mapping[str, Mapping[ParamsetKey, Mapping[str, ParameterData]]]]:
407
+ """Return the paramset descriptions."""
408
+ return self._raw_paramset_descriptions
409
+
410
+ def add(
411
+ self,
412
+ *,
413
+ interface_id: str,
414
+ channel_address: str,
415
+ paramset_key: ParamsetKey,
416
+ paramset_description: dict[str, ParameterData],
417
+ ) -> None:
418
+ """Add paramset description to cache."""
419
+ self._raw_paramset_descriptions[interface_id][channel_address][paramset_key] = paramset_description
420
+ self._add_address_parameter(channel_address=channel_address, paramsets=[paramset_description])
421
+
422
+ def remove_device(self, *, device: Device) -> None:
423
+ """Remove device paramset descriptions from cache."""
424
+ if interface := self._raw_paramset_descriptions.get(device.interface_id):
425
+ for channel_address in device.channels:
426
+ if channel_address in interface:
427
+ del self._raw_paramset_descriptions[device.interface_id][channel_address]
428
+
429
+ def has_interface_id(self, *, interface_id: str) -> bool:
430
+ """Return if interface is in paramset_descriptions cache."""
431
+ return interface_id in self._raw_paramset_descriptions
432
+
433
+ def get_paramset_keys(self, *, interface_id: str, channel_address: str) -> tuple[ParamsetKey, ...]:
434
+ """Get paramset_keys from paramset descriptions cache."""
435
+ return tuple(self._raw_paramset_descriptions[interface_id][channel_address])
436
+
437
+ def get_channel_paramset_descriptions(
438
+ self, *, interface_id: str, channel_address: str
439
+ ) -> Mapping[ParamsetKey, Mapping[str, ParameterData]]:
440
+ """Get paramset descriptions for a channelfrom cache."""
441
+ return self._raw_paramset_descriptions[interface_id].get(channel_address, {})
442
+
443
+ def get_paramset_descriptions(
444
+ self, *, interface_id: str, channel_address: str, paramset_key: ParamsetKey
445
+ ) -> Mapping[str, ParameterData]:
446
+ """Get paramset descriptions from cache."""
447
+ return self._raw_paramset_descriptions[interface_id][channel_address][paramset_key]
448
+
449
+ def get_parameter_data(
450
+ self, *, interface_id: str, channel_address: str, paramset_key: ParamsetKey, parameter: str
451
+ ) -> ParameterData | None:
452
+ """Get parameter_data from cache."""
453
+ return self._raw_paramset_descriptions[interface_id][channel_address][paramset_key].get(parameter)
454
+
455
+ def is_in_multiple_channels(self, *, channel_address: str, parameter: str) -> bool:
456
+ """Check if parameter is in multiple channels per device."""
457
+ if ADDRESS_SEPARATOR not in channel_address:
458
+ return False
459
+ if channels := self._address_parameter_cache.get((get_device_address(address=channel_address), parameter)):
460
+ return len(channels) > 1
461
+ return False
462
+
463
+ def get_channel_addresses_by_paramset_key(
464
+ self, *, interface_id: str, device_address: str
465
+ ) -> Mapping[ParamsetKey, list[str]]:
466
+ """Get device channel addresses."""
467
+ channel_addresses: dict[ParamsetKey, list[str]] = {}
468
+ interface_paramset_descriptions = self._raw_paramset_descriptions[interface_id]
469
+ for (
470
+ channel_address,
471
+ paramset_descriptions,
472
+ ) in interface_paramset_descriptions.items():
473
+ if channel_address.startswith(device_address):
474
+ for p_key in paramset_descriptions:
475
+ if (paramset_key := ParamsetKey(p_key)) not in channel_addresses:
476
+ channel_addresses[paramset_key] = []
477
+ channel_addresses[paramset_key].append(channel_address)
478
+
479
+ return channel_addresses
480
+
481
+ def _init_address_parameter_list(self) -> None:
482
+ """
483
+ Initialize a device_address/parameter list.
484
+
485
+ Used to identify, if a parameter name exists is in multiple channels.
486
+ """
487
+ for channel_paramsets in self._raw_paramset_descriptions.values():
488
+ for channel_address, paramsets in channel_paramsets.items():
489
+ self._add_address_parameter(channel_address=channel_address, paramsets=list(paramsets.values()))
490
+
491
+ def _add_address_parameter(self, *, channel_address: str, paramsets: list[dict[str, Any]]) -> None:
492
+ """Add address parameter to cache."""
493
+ device_address, channel_no = get_split_channel_address(channel_address=channel_address)
494
+ cache = self._address_parameter_cache
495
+ for paramset in paramsets:
496
+ if not paramset:
497
+ continue
498
+ for parameter in paramset:
499
+ cache.setdefault((device_address, parameter), set()).add(channel_no)
500
+
501
+ async def load(self, *, file_path: str | None = None) -> DataOperationResult:
502
+ """Load paramset descriptions from disk into paramset cache."""
503
+ if not self._central.config.use_caches:
504
+ _LOGGER.debug("load: not caching device descriptions for %s", self._central.name)
505
+ return DataOperationResult.NO_LOAD
506
+ if (result := await super().load(file_path=file_path)) == DataOperationResult.LOAD_SUCCESS:
507
+ self._init_address_parameter_list()
508
+ return result
509
+
510
+
511
+ class SessionRecorder(BasePersistentFile):
512
+ """
513
+ Session recorder for central unit.
514
+
515
+ Nested cache with TTL support.
516
+ Structure:
517
+ store[rpc_type][method][params][ts: datetime] = response: Any
518
+
519
+ - Expiration is lazy (checked on access/update).
520
+ - Optional refresh_on_get extends TTL when reading.
521
+ """
522
+
523
+ __slots__ = (
524
+ "_active",
525
+ "_ttl",
526
+ "_is_recording",
527
+ "_refresh_on_get",
528
+ "_store",
529
+ )
530
+
531
+ _file_postfix = FILE_SESSION_RECORDER
532
+ _sub_directory = SUB_DIRECTORY_SESSION
533
+
534
+ def __init__(
535
+ self,
536
+ *,
537
+ central: hmcu.CentralUnit,
538
+ active: bool,
539
+ ttl_seconds: float,
540
+ refresh_on_get: bool = False,
541
+ ):
542
+ """Init the cache."""
543
+ self._active = active
544
+ if ttl_seconds < 0:
545
+ raise ValueError("default_ttl_seconds must be positive")
546
+ self._ttl: Final = float(ttl_seconds)
547
+ self._is_recording: bool = False
548
+ self._refresh_on_get: Final = refresh_on_get
549
+ # Use nested defaultdicts: rpc_type -> method -> params -> ts(int) -> response
550
+ # Annotate as defaultdict to match the actual type and satisfy mypy.
551
+ self._store: dict[str, dict[str, dict[str, dict[int, Any]]]] = defaultdict(
552
+ lambda: defaultdict(lambda: defaultdict(dict))
553
+ )
554
+ super().__init__(
555
+ central=central,
556
+ persistent_content=self._store,
557
+ )
558
+
559
+ # ---------- internal helpers ----------
560
+
561
+ def _is_expired(self, *, ts: int, now: int | None = None) -> bool:
562
+ """Check whether an entry has expired given epoch seconds."""
563
+ if self._ttl == 0:
564
+ return False
565
+ now = now if now is not None else _now()
566
+ return (now - ts) > self._ttl
567
+
568
+ def _purge_expired_at(
569
+ self,
570
+ *,
571
+ rpc_type: str,
572
+ method: str,
573
+ ) -> None:
574
+ """Remove expired entries for a given (rpc_type, method) bucket without creating new ones."""
575
+ if self._ttl == 0:
576
+ return
577
+ if not (bucket_by_method := self._store.get(rpc_type)):
578
+ return
579
+ if not (bucket_by_parameter := bucket_by_method.get(method)):
580
+ return
581
+ now = _now()
582
+ empty_params: list[str] = []
583
+ for p, bucket_by_ts in bucket_by_parameter.items():
584
+ expired_ts = [ts for ts, _r in list(bucket_by_ts.items()) if self._is_expired(ts=ts, now=now)]
585
+ for ts in expired_ts:
586
+ del bucket_by_ts[ts]
587
+ if not bucket_by_ts:
588
+ empty_params.append(p)
589
+ for p in empty_params:
590
+ bucket_by_parameter.pop(p, None)
591
+ if not bucket_by_parameter:
592
+ bucket_by_method.pop(method, None)
593
+ if not bucket_by_method:
594
+ self._store.pop(rpc_type, None)
595
+
596
+ def _bucket(self, *, rpc_type: str, method: str) -> dict[str, dict[int, tuple[Any, float]]]:
597
+ """Ensure and return the innermost bucket."""
598
+ return self._store[rpc_type][method]
599
+
600
+ # ---------- public API ----------
601
+
602
+ @property
603
+ def active(self) -> bool:
604
+ """Return if session recorder is active."""
605
+ return self._active
606
+
607
+ async def _deactivate_after_delay(
608
+ self, *, delay: int, auto_save: bool, randomize_output: bool, use_ts_in_file_name: bool
609
+ ) -> None:
610
+ """Change the state of the session recorder after a delay."""
611
+ self._is_recording = True
612
+ await asyncio.sleep(delay)
613
+ self._active = False
614
+ self._is_recording = False
615
+ if auto_save:
616
+ await self.save(randomize_output=randomize_output, use_ts_in_file_name=use_ts_in_file_name)
617
+ _LOGGER.debug("Deactivated session recorder after %s seconds", {delay})
618
+
619
+ async def activate(
620
+ self, *, on_time: int = 0, auto_save: bool, randomize_output: bool, use_ts_in_file_name: bool
621
+ ) -> bool:
622
+ """Activate the session recorder. Disable after on_time(seconds)."""
623
+ if self._is_recording:
624
+ _LOGGER.info("ACTIVATE: Recording session is already running.")
625
+ return False
626
+ self._store.clear()
627
+ self._active = True
628
+ if on_time > 0:
629
+ self._central.looper.create_task(
630
+ target=self._deactivate_after_delay(
631
+ delay=on_time,
632
+ auto_save=auto_save,
633
+ randomize_output=randomize_output,
634
+ use_ts_in_file_name=use_ts_in_file_name,
635
+ ),
636
+ name=f"session_recorder_{self._central.name}",
637
+ )
638
+ return True
639
+
640
+ async def deactivate(
641
+ self, *, delay: int, auto_save: bool, randomize_output: bool, use_ts_in_file_name: bool
642
+ ) -> bool:
643
+ """Deactivate the session recorder. Optionally after a delay(seconds)."""
644
+ if self._is_recording:
645
+ _LOGGER.info("DEACTIVATE: Recording session is already running.")
646
+ return False
647
+ if delay > 0:
648
+ self._central.looper.create_task(
649
+ target=self._deactivate_after_delay(
650
+ delay=delay,
651
+ auto_save=auto_save,
652
+ randomize_output=randomize_output,
653
+ use_ts_in_file_name=use_ts_in_file_name,
654
+ ),
655
+ name=f"session_recorder_{self._central.name}",
656
+ )
657
+ else:
658
+ self._active = False
659
+ self._is_recording = False
660
+ return True
661
+
662
+ def add_json_rpc_session(
663
+ self,
664
+ *,
665
+ method: str,
666
+ params: dict[str, Any],
667
+ response: dict[str, Any] | None = None,
668
+ session_exc: Exception | None = None,
669
+ ) -> None:
670
+ """Add json rpc session to content."""
671
+ try:
672
+ if session_exc:
673
+ self.set(
674
+ rpc_type=str(RPCType.JSON_RPC),
675
+ method=method,
676
+ params=params,
677
+ response=extract_exc_args(exc=session_exc),
678
+ )
679
+ return
680
+ self.set(rpc_type=str(RPCType.JSON_RPC), method=method, params=params, response=response)
681
+ except Exception as exc:
682
+ _LOGGER.debug("ADD_JSON_RPC_SESSION: failed with %s", extract_exc_args(exc=exc))
683
+
684
+ def add_xml_rpc_session(
685
+ self, *, method: str, params: tuple[Any, ...], response: Any | None = None, session_exc: Exception | None = None
686
+ ) -> None:
687
+ """Add rpc session to content."""
688
+ try:
689
+ if session_exc:
690
+ self.set(
691
+ rpc_type=str(RPCType.XML_RPC),
692
+ method=method,
693
+ params=params,
694
+ response=extract_exc_args(exc=session_exc),
695
+ )
696
+ return
697
+ self.set(rpc_type=str(RPCType.XML_RPC), method=method, params=params, response=response)
698
+ except Exception as exc:
699
+ _LOGGER.debug("ADD_XML_RPC_SESSION: failed with %s", extract_exc_args(exc=exc))
700
+
701
+ def set(
702
+ self,
703
+ *,
704
+ rpc_type: str,
705
+ method: str,
706
+ params: Any,
707
+ response: Any,
708
+ ts: int | datetime | None = None,
709
+ ) -> Self:
710
+ """Insert or update an entry."""
711
+ self._purge_expired_at(rpc_type=rpc_type, method=method)
712
+ frozen_param = _freeze_params(params)
713
+ # Normalize timestamp to int epoch seconds
714
+ if isinstance(ts, datetime):
715
+ ts_int = int(ts.timestamp())
716
+ elif isinstance(ts, int):
717
+ ts_int = ts
718
+ else:
719
+ ts_int = _now()
720
+ self._bucket(rpc_type=rpc_type, method=method)[frozen_param][ts_int] = response
721
+ return self
722
+
723
+ def get(
724
+ self,
725
+ *,
726
+ rpc_type: str,
727
+ method: str,
728
+ params: Any,
729
+ default: Any = None,
730
+ ) -> Any:
731
+ """
732
+ Return a cached response if still valid, else default.
733
+
734
+ This method must avoid creating buckets when the entry is missing.
735
+ It purges expired entries first, then returns the response at the
736
+ latest timestamp for the given params. If refresh_on_get is enabled,
737
+ it appends a new timestamp with the same response/ttl.
738
+ """
739
+ self._purge_expired_at(rpc_type=rpc_type, method=method)
740
+ # Access store safely to avoid side effects from creating buckets.
741
+ if not (bucket_by_method := self._store.get(rpc_type)):
742
+ return default
743
+ if not (bucket_by_parameter := bucket_by_method.get(method)):
744
+ return default
745
+ frozen_param = _freeze_params(params)
746
+ if not (bucket_by_ts := bucket_by_parameter.get(frozen_param)):
747
+ return default
748
+ try:
749
+ latest_ts = max(bucket_by_ts.keys())
750
+ except ValueError:
751
+ return default
752
+ resp = bucket_by_ts[latest_ts]
753
+ if self._refresh_on_get:
754
+ bucket_by_ts[_now()] = resp
755
+ return resp
756
+
757
+ def delete(self, *, rpc_type: str, method: str, params: Any) -> bool:
758
+ """
759
+ Delete an entry if it exists. Returns True if removed.
760
+
761
+ Avoid creating buckets when the target does not exist.
762
+ Clean up empty parent buckets on successful deletion.
763
+ """
764
+ if not (bucket_by_method := self._store.get(rpc_type)):
765
+ return False
766
+ if not (bucket_by_parameter := bucket_by_method.get(method)):
767
+ return False
768
+ if (frozen_param := _freeze_params(params)) not in bucket_by_parameter:
769
+ return False
770
+ # Perform deletion
771
+ bucket_by_parameter.pop(frozen_param, None)
772
+ if not bucket_by_parameter:
773
+ bucket_by_method.pop(method, None)
774
+ if not bucket_by_method:
775
+ self._store.pop(rpc_type, None)
776
+ return True
777
+
778
+ def get_latest_response_by_method(self, *, rpc_type: str, method: str) -> list[tuple[Any, Any]]:
779
+ """Return latest non-expired responses for a given (rpc_type, method)."""
780
+ # Purge expired entries first without creating any new buckets.
781
+ self._purge_expired_at(rpc_type=rpc_type, method=method)
782
+ result: list[Any] = []
783
+ # Access store safely to avoid side effects from creating buckets.
784
+ if not (bucket_by_method := self._store.get(rpc_type)):
785
+ return result
786
+ if not (bucket_by_parameter := bucket_by_method.get(method)):
787
+ return result
788
+ # For each parameter, choose the response at the latest timestamp.
789
+ for frozen_params, bucket_by_ts in bucket_by_parameter.items():
790
+ if not bucket_by_ts:
791
+ continue
792
+ try:
793
+ latest_ts = max(bucket_by_ts.keys())
794
+ except ValueError:
795
+ continue
796
+ resp = bucket_by_ts[latest_ts]
797
+ params = _unfreeze_params(frozen_params=frozen_params)
798
+
799
+ result.append((params, resp))
800
+ return result
801
+
802
+ def get_latest_response_by_params(
803
+ self,
804
+ *,
805
+ rpc_type: str,
806
+ method: str,
807
+ params: Any,
808
+ ) -> Any:
809
+ """Return latest non-expired responses for a given (rpc_type, method, params)."""
810
+ # Purge expired entries first without creating any new buckets.
811
+ self._purge_expired_at(rpc_type=rpc_type, method=method)
812
+
813
+ # Access store safely to avoid side effects from creating buckets.
814
+ if not (bucket_by_method := self._store.get(rpc_type)):
815
+ return None
816
+ if not (bucket_by_parameter := bucket_by_method.get(method)):
817
+ return None
818
+ frozen_params = _freeze_params(params=params)
819
+
820
+ # For each parameter, choose the response at the latest timestamp.
821
+ if (bucket_by_ts := bucket_by_parameter.get(frozen_params)) is None:
822
+ return None
823
+
824
+ try:
825
+ latest_ts = max(bucket_by_ts.keys())
826
+ return bucket_by_ts[latest_ts]
827
+ except ValueError:
828
+ return None
829
+
830
+ def cleanup(self) -> None:
831
+ """Purge all expired entries globally."""
832
+ for rpc_type in list(self._store.keys()):
833
+ for method in list(self._store[rpc_type].keys()):
834
+ self._purge_expired_at(rpc_type=rpc_type, method=method)
835
+
836
+ def peek_ts(self, *, rpc_type: str, method: str, params: Any) -> datetime | None:
837
+ """
838
+ Return the most recent timestamp for a live entry, else None.
839
+
840
+ This method must not create buckets as a side effect. It purges expired
841
+ entries first and then returns the newest timestamp for the given
842
+ (rpc_type, method, params) if present.
843
+ """
844
+ self._purge_expired_at(rpc_type=rpc_type, method=method)
845
+ # Do NOT create buckets here — use .get chaining only.
846
+ if not (bucket_by_method := self._store.get(rpc_type)):
847
+ return None
848
+ if not (bucket_by_parameter := bucket_by_method.get(method)):
849
+ return None
850
+ frozen_param = _freeze_params(params)
851
+ if (bucket_by_ts := bucket_by_parameter.get(frozen_param)) is None or not bucket_by_ts:
852
+ return None
853
+ # After purge, remaining entries are alive; return the latest timestamp.
854
+ try:
855
+ latest_ts_int = max(bucket_by_ts.keys())
856
+ except ValueError:
857
+ # bucket was empty (shouldn't happen due to check), be safe
858
+ return None
859
+ return datetime.fromtimestamp(latest_ts_int, tz=UTC)
860
+
861
+ @property
862
+ def _should_save(self) -> bool:
863
+ """Determine if save operation should proceed."""
864
+ self.cleanup()
865
+ return len(self._store.items()) > 0
866
+
867
+ def __repr__(self) -> str:
868
+ """Return the representation."""
869
+ self.cleanup()
870
+ return f"{self.__class__.__name__}({self._store})"
871
+
872
+
873
+ def _freeze_params(params: Any) -> str:
874
+ """
875
+ Recursively freeze any structure so it can be used as a dictionary key.
876
+
877
+ - dict → tuple of (key, frozen(value)) sorted by key.
878
+ - list/tuple → tuple of frozen elements.
879
+ - set/frozenset → tagged tuple ("__set__", tuple(sorted(frozen elements by repr))) to ensure JSON-serializable keys.
880
+ - datetime → tagged ISO 8601 string to ensure JSON-serializable keys.
881
+ """
882
+ res: Any = ""
883
+ match params:
884
+ case datetime():
885
+ # orjson cannot serialize datetime objects as dict keys even with OPT_NON_STR_KEYS.
886
+ # Use a tagged ISO string to preserve value and guarantee a stable, hashable key.
887
+ res = ("__datetime__", params.isoformat())
888
+ case dict():
889
+ res = {k: _freeze_params(v) for k, v in sorted(params.items())}
890
+ case list() | tuple():
891
+ res = tuple(_freeze_params(x) for x in params)
892
+ case set() | frozenset():
893
+ # Convert to a deterministically ordered, JSON-serializable representation.
894
+ frozen_elems = tuple(sorted((_freeze_params(x) for x in params), key=repr))
895
+ res = ("__set__", frozen_elems)
896
+ case _:
897
+ res = params
898
+
899
+ return str(res)
900
+
901
+
902
+ def _unfreeze_params(frozen_params: str) -> Any:
903
+ """
904
+ Reverse the _freeze_params transformation.
905
+
906
+ Tries to parse the frozen string with ast.literal_eval and then recursively
907
+ reconstructs original structures:
908
+ - ("__set__", (<items>...)) -> set of items
909
+ - ("__datetime__", iso_string) -> datetime.fromisoformat(iso_string)
910
+ - dict values and tuple elements are processed recursively
911
+
912
+ If parsing fails, return the original string.
913
+ """
914
+ try:
915
+ obj = ast.literal_eval(frozen_params)
916
+ except Exception:
917
+ return frozen_params
918
+
919
+ def _walk(o: Any) -> Any:
920
+ if o and isinstance(o, tuple):
921
+ tag = o[0]
922
+ # Tagged set
923
+ if tag == "__set__" and len(o) == 2 and isinstance(o[1], tuple):
924
+ return {_walk(x) for x in o[1]}
925
+ # Tagged datetime
926
+ if tag == "__datetime__" and len(o) == 2 and isinstance(o[1], str):
927
+ try:
928
+ return datetime.fromisoformat(o[1])
929
+ except Exception:
930
+ return o[1]
931
+ # Generic tuple
932
+ return tuple(_walk(x) for x in o)
933
+ if isinstance(o, dict):
934
+ return {k: _walk(v) for k, v in o.items()}
935
+ if isinstance(o, list):
936
+ return [_walk(x) for x in o]
937
+ if isinstance(o, tuple):
938
+ return tuple(_walk(x) for x in o)
939
+ if o.startswith("{") and o.endswith("}"):
940
+ return ast.literal_eval(o)
941
+ return o
942
+
943
+ return _walk(obj)
944
+
945
+
946
+ def _get_file_path(*, storage_directory: str, sub_directory: str) -> str:
947
+ """Return the content path."""
948
+ return f"{storage_directory}/{sub_directory}"
949
+
950
+
951
+ def _get_file_name(*, central_name: str, file_name: str, ts: datetime | None = None) -> str:
952
+ """Return the content file_name."""
953
+ fn = f"{slugify(central_name)}_{file_name}"
954
+ if ts:
955
+ fn += f"_{ts.strftime(FILE_NAME_TS_PATTERN)}"
956
+ return f"{fn}.json"
957
+
958
+
959
+ def _now() -> int:
960
+ """Return current UTC time as epoch seconds (int)."""
961
+ return int(datetime.now(tz=UTC).timestamp())
962
+
963
+
964
+ async def cleanup_files(*, central_name: str, storage_directory: str) -> None:
965
+ """Clean up the used files."""
966
+ loop = asyncio.get_running_loop()
967
+ cache_dir = _get_file_path(storage_directory=storage_directory, sub_directory=SUB_DIRECTORY_CACHE)
968
+ loop.call_soon_threadsafe(delete_file, cache_dir, f"{central_name}*.json".lower())
969
+ session_dir = _get_file_path(storage_directory=storage_directory, sub_directory=SUB_DIRECTORY_SESSION)
970
+ loop.call_soon_threadsafe(delete_file, session_dir, f"{central_name}*.json".lower())