aiohomematic 2026.1.29__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aiohomematic/__init__.py +110 -0
- aiohomematic/_log_context_protocol.py +29 -0
- aiohomematic/api.py +410 -0
- aiohomematic/async_support.py +250 -0
- aiohomematic/backend_detection.py +462 -0
- aiohomematic/central/__init__.py +103 -0
- aiohomematic/central/async_rpc_server.py +760 -0
- aiohomematic/central/central_unit.py +1152 -0
- aiohomematic/central/config.py +463 -0
- aiohomematic/central/config_builder.py +772 -0
- aiohomematic/central/connection_state.py +160 -0
- aiohomematic/central/coordinators/__init__.py +38 -0
- aiohomematic/central/coordinators/cache.py +414 -0
- aiohomematic/central/coordinators/client.py +480 -0
- aiohomematic/central/coordinators/connection_recovery.py +1141 -0
- aiohomematic/central/coordinators/device.py +1166 -0
- aiohomematic/central/coordinators/event.py +514 -0
- aiohomematic/central/coordinators/hub.py +532 -0
- aiohomematic/central/decorators.py +184 -0
- aiohomematic/central/device_registry.py +229 -0
- aiohomematic/central/events/__init__.py +104 -0
- aiohomematic/central/events/bus.py +1392 -0
- aiohomematic/central/events/integration.py +424 -0
- aiohomematic/central/events/types.py +194 -0
- aiohomematic/central/health.py +762 -0
- aiohomematic/central/rpc_server.py +353 -0
- aiohomematic/central/scheduler.py +794 -0
- aiohomematic/central/state_machine.py +391 -0
- aiohomematic/client/__init__.py +203 -0
- aiohomematic/client/_rpc_errors.py +187 -0
- aiohomematic/client/backends/__init__.py +48 -0
- aiohomematic/client/backends/base.py +335 -0
- aiohomematic/client/backends/capabilities.py +138 -0
- aiohomematic/client/backends/ccu.py +487 -0
- aiohomematic/client/backends/factory.py +116 -0
- aiohomematic/client/backends/homegear.py +294 -0
- aiohomematic/client/backends/json_ccu.py +252 -0
- aiohomematic/client/backends/protocol.py +316 -0
- aiohomematic/client/ccu.py +1857 -0
- aiohomematic/client/circuit_breaker.py +459 -0
- aiohomematic/client/config.py +64 -0
- aiohomematic/client/handlers/__init__.py +40 -0
- aiohomematic/client/handlers/backup.py +157 -0
- aiohomematic/client/handlers/base.py +79 -0
- aiohomematic/client/handlers/device_ops.py +1085 -0
- aiohomematic/client/handlers/firmware.py +144 -0
- aiohomematic/client/handlers/link_mgmt.py +199 -0
- aiohomematic/client/handlers/metadata.py +436 -0
- aiohomematic/client/handlers/programs.py +144 -0
- aiohomematic/client/handlers/sysvars.py +100 -0
- aiohomematic/client/interface_client.py +1304 -0
- aiohomematic/client/json_rpc.py +2068 -0
- aiohomematic/client/request_coalescer.py +282 -0
- aiohomematic/client/rpc_proxy.py +629 -0
- aiohomematic/client/state_machine.py +324 -0
- aiohomematic/const.py +2207 -0
- aiohomematic/context.py +275 -0
- aiohomematic/converter.py +270 -0
- aiohomematic/decorators.py +390 -0
- aiohomematic/exceptions.py +185 -0
- aiohomematic/hmcli.py +997 -0
- aiohomematic/i18n.py +193 -0
- aiohomematic/interfaces/__init__.py +407 -0
- aiohomematic/interfaces/central.py +1067 -0
- aiohomematic/interfaces/client.py +1096 -0
- aiohomematic/interfaces/coordinators.py +63 -0
- aiohomematic/interfaces/model.py +1921 -0
- aiohomematic/interfaces/operations.py +217 -0
- aiohomematic/logging_context.py +134 -0
- aiohomematic/metrics/__init__.py +125 -0
- aiohomematic/metrics/_protocols.py +140 -0
- aiohomematic/metrics/aggregator.py +534 -0
- aiohomematic/metrics/dataclasses.py +489 -0
- aiohomematic/metrics/emitter.py +292 -0
- aiohomematic/metrics/events.py +183 -0
- aiohomematic/metrics/keys.py +300 -0
- aiohomematic/metrics/observer.py +563 -0
- aiohomematic/metrics/stats.py +172 -0
- aiohomematic/model/__init__.py +189 -0
- aiohomematic/model/availability.py +65 -0
- aiohomematic/model/calculated/__init__.py +89 -0
- aiohomematic/model/calculated/climate.py +276 -0
- aiohomematic/model/calculated/data_point.py +315 -0
- aiohomematic/model/calculated/field.py +147 -0
- aiohomematic/model/calculated/operating_voltage_level.py +286 -0
- aiohomematic/model/calculated/support.py +232 -0
- aiohomematic/model/custom/__init__.py +214 -0
- aiohomematic/model/custom/capabilities/__init__.py +67 -0
- aiohomematic/model/custom/capabilities/climate.py +41 -0
- aiohomematic/model/custom/capabilities/light.py +87 -0
- aiohomematic/model/custom/capabilities/lock.py +44 -0
- aiohomematic/model/custom/capabilities/siren.py +63 -0
- aiohomematic/model/custom/climate.py +1130 -0
- aiohomematic/model/custom/cover.py +722 -0
- aiohomematic/model/custom/data_point.py +360 -0
- aiohomematic/model/custom/definition.py +300 -0
- aiohomematic/model/custom/field.py +89 -0
- aiohomematic/model/custom/light.py +1174 -0
- aiohomematic/model/custom/lock.py +322 -0
- aiohomematic/model/custom/mixins.py +445 -0
- aiohomematic/model/custom/profile.py +945 -0
- aiohomematic/model/custom/registry.py +251 -0
- aiohomematic/model/custom/siren.py +462 -0
- aiohomematic/model/custom/switch.py +195 -0
- aiohomematic/model/custom/text_display.py +289 -0
- aiohomematic/model/custom/valve.py +78 -0
- aiohomematic/model/data_point.py +1416 -0
- aiohomematic/model/device.py +1840 -0
- aiohomematic/model/event.py +216 -0
- aiohomematic/model/generic/__init__.py +327 -0
- aiohomematic/model/generic/action.py +40 -0
- aiohomematic/model/generic/action_select.py +62 -0
- aiohomematic/model/generic/binary_sensor.py +30 -0
- aiohomematic/model/generic/button.py +31 -0
- aiohomematic/model/generic/data_point.py +177 -0
- aiohomematic/model/generic/dummy.py +150 -0
- aiohomematic/model/generic/number.py +76 -0
- aiohomematic/model/generic/select.py +56 -0
- aiohomematic/model/generic/sensor.py +76 -0
- aiohomematic/model/generic/switch.py +54 -0
- aiohomematic/model/generic/text.py +33 -0
- aiohomematic/model/hub/__init__.py +100 -0
- aiohomematic/model/hub/binary_sensor.py +24 -0
- aiohomematic/model/hub/button.py +28 -0
- aiohomematic/model/hub/connectivity.py +190 -0
- aiohomematic/model/hub/data_point.py +342 -0
- aiohomematic/model/hub/hub.py +864 -0
- aiohomematic/model/hub/inbox.py +135 -0
- aiohomematic/model/hub/install_mode.py +393 -0
- aiohomematic/model/hub/metrics.py +208 -0
- aiohomematic/model/hub/number.py +42 -0
- aiohomematic/model/hub/select.py +52 -0
- aiohomematic/model/hub/sensor.py +37 -0
- aiohomematic/model/hub/switch.py +43 -0
- aiohomematic/model/hub/text.py +30 -0
- aiohomematic/model/hub/update.py +221 -0
- aiohomematic/model/support.py +592 -0
- aiohomematic/model/update.py +140 -0
- aiohomematic/model/week_profile.py +1827 -0
- aiohomematic/property_decorators.py +719 -0
- aiohomematic/py.typed +0 -0
- aiohomematic/rega_scripts/accept_device_in_inbox.fn +51 -0
- aiohomematic/rega_scripts/create_backup_start.fn +28 -0
- aiohomematic/rega_scripts/create_backup_status.fn +89 -0
- aiohomematic/rega_scripts/fetch_all_device_data.fn +97 -0
- aiohomematic/rega_scripts/get_backend_info.fn +25 -0
- aiohomematic/rega_scripts/get_inbox_devices.fn +61 -0
- aiohomematic/rega_scripts/get_program_descriptions.fn +31 -0
- aiohomematic/rega_scripts/get_serial.fn +44 -0
- aiohomematic/rega_scripts/get_service_messages.fn +83 -0
- aiohomematic/rega_scripts/get_system_update_info.fn +39 -0
- aiohomematic/rega_scripts/get_system_variable_descriptions.fn +31 -0
- aiohomematic/rega_scripts/set_program_state.fn +17 -0
- aiohomematic/rega_scripts/set_system_variable.fn +19 -0
- aiohomematic/rega_scripts/trigger_firmware_update.fn +67 -0
- aiohomematic/schemas.py +256 -0
- aiohomematic/store/__init__.py +55 -0
- aiohomematic/store/dynamic/__init__.py +43 -0
- aiohomematic/store/dynamic/command.py +250 -0
- aiohomematic/store/dynamic/data.py +175 -0
- aiohomematic/store/dynamic/details.py +187 -0
- aiohomematic/store/dynamic/ping_pong.py +416 -0
- aiohomematic/store/persistent/__init__.py +71 -0
- aiohomematic/store/persistent/base.py +285 -0
- aiohomematic/store/persistent/device.py +233 -0
- aiohomematic/store/persistent/incident.py +380 -0
- aiohomematic/store/persistent/paramset.py +241 -0
- aiohomematic/store/persistent/session.py +556 -0
- aiohomematic/store/serialization.py +150 -0
- aiohomematic/store/storage.py +689 -0
- aiohomematic/store/types.py +526 -0
- aiohomematic/store/visibility/__init__.py +40 -0
- aiohomematic/store/visibility/parser.py +141 -0
- aiohomematic/store/visibility/registry.py +722 -0
- aiohomematic/store/visibility/rules.py +307 -0
- aiohomematic/strings.json +237 -0
- aiohomematic/support.py +706 -0
- aiohomematic/tracing.py +236 -0
- aiohomematic/translations/de.json +237 -0
- aiohomematic/translations/en.json +237 -0
- aiohomematic/type_aliases.py +51 -0
- aiohomematic/validator.py +128 -0
- aiohomematic-2026.1.29.dist-info/METADATA +296 -0
- aiohomematic-2026.1.29.dist-info/RECORD +188 -0
- aiohomematic-2026.1.29.dist-info/WHEEL +5 -0
- aiohomematic-2026.1.29.dist-info/entry_points.txt +2 -0
- aiohomematic-2026.1.29.dist-info/licenses/LICENSE +21 -0
- aiohomematic-2026.1.29.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,285 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MIT
|
|
2
|
+
# Copyright (c) 2021-2026
|
|
3
|
+
"""
|
|
4
|
+
Base class for persistent caches using storage abstraction.
|
|
5
|
+
|
|
6
|
+
This module provides the foundation for all persistent caches. Instead of
|
|
7
|
+
handling file I/O directly, caches now delegate to StorageProtocol instances.
|
|
8
|
+
|
|
9
|
+
Key behaviors:
|
|
10
|
+
- Delegates file I/O to StorageProtocol
|
|
11
|
+
- Hash-based change detection for efficient saves
|
|
12
|
+
- Optional caching control via config
|
|
13
|
+
- Supports delayed saves for batching updates
|
|
14
|
+
|
|
15
|
+
Migration from old BasePersistentFile
|
|
16
|
+
-------------------------------------
|
|
17
|
+
The old implementation mixed cache logic with file operations. The new
|
|
18
|
+
BasePersistentCache separates concerns:
|
|
19
|
+
|
|
20
|
+
- Cache logic: Handled by BasePersistentCache and subclasses
|
|
21
|
+
- File operations: Delegated to StorageProtocol
|
|
22
|
+
- Factory creation: Via StorageFactoryProtocol
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
from __future__ import annotations
|
|
26
|
+
|
|
27
|
+
from abc import ABC, abstractmethod
|
|
28
|
+
from datetime import datetime
|
|
29
|
+
import logging
|
|
30
|
+
from typing import TYPE_CHECKING, Any, Final
|
|
31
|
+
|
|
32
|
+
from slugify import slugify
|
|
33
|
+
|
|
34
|
+
from aiohomematic.const import FILE_NAME_TS_PATTERN, INIT_DATETIME, DataOperationResult
|
|
35
|
+
from aiohomematic.support import hash_sha256
|
|
36
|
+
|
|
37
|
+
if TYPE_CHECKING:
|
|
38
|
+
from aiohomematic.interfaces import ConfigProviderProtocol
|
|
39
|
+
from aiohomematic.store.storage import StorageProtocol
|
|
40
|
+
|
|
41
|
+
_LOGGER: Final = logging.getLogger(__name__)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class BasePersistentCache(ABC):
|
|
45
|
+
"""
|
|
46
|
+
Base class for persistent caches.
|
|
47
|
+
|
|
48
|
+
This abstract class provides common functionality for caches that need
|
|
49
|
+
to persist their data. Subclasses define the cache structure and logic,
|
|
50
|
+
while actual storage operations are delegated to a StorageProtocol.
|
|
51
|
+
|
|
52
|
+
Key differences from old BasePersistentFile:
|
|
53
|
+
- No direct file I/O - uses storage.save/load
|
|
54
|
+
- No semaphore needed - storage handles synchronization
|
|
55
|
+
- Hash-based change detection retained for efficiency
|
|
56
|
+
- Simpler interface - only cache logic, no file path handling
|
|
57
|
+
|
|
58
|
+
Subclasses must implement:
|
|
59
|
+
- _create_empty_content(): Define initial data structure
|
|
60
|
+
- _process_loaded_content(): Rebuild indexes after load
|
|
61
|
+
|
|
62
|
+
Schema Versioning:
|
|
63
|
+
- SCHEMA_VERSION: Subclasses override to define their schema version
|
|
64
|
+
- _migrate_schema(): Subclasses override to implement migrations
|
|
65
|
+
"""
|
|
66
|
+
|
|
67
|
+
# Subclasses override to define their schema version
|
|
68
|
+
SCHEMA_VERSION: int = 1
|
|
69
|
+
|
|
70
|
+
__slots__ = (
|
|
71
|
+
"_config_provider",
|
|
72
|
+
"_content",
|
|
73
|
+
"_last_hash_saved",
|
|
74
|
+
"_storage",
|
|
75
|
+
"last_save_triggered",
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
def __init__(
|
|
79
|
+
self,
|
|
80
|
+
*,
|
|
81
|
+
storage: StorageProtocol,
|
|
82
|
+
config_provider: ConfigProviderProtocol,
|
|
83
|
+
) -> None:
|
|
84
|
+
"""
|
|
85
|
+
Initialize the cache.
|
|
86
|
+
|
|
87
|
+
Args:
|
|
88
|
+
storage: Storage instance for persistence.
|
|
89
|
+
config_provider: Provider for configuration access.
|
|
90
|
+
|
|
91
|
+
"""
|
|
92
|
+
self._storage: Final = storage
|
|
93
|
+
self._config_provider: Final = config_provider
|
|
94
|
+
self._content: dict[str, Any] = self._create_empty_content()
|
|
95
|
+
self._last_hash_saved: str = ""
|
|
96
|
+
self.last_save_triggered: datetime = INIT_DATETIME
|
|
97
|
+
|
|
98
|
+
@property
|
|
99
|
+
def _should_save(self) -> bool:
|
|
100
|
+
"""Determine if save operation should proceed."""
|
|
101
|
+
self.last_save_triggered = datetime.now()
|
|
102
|
+
use_caches = self._config_provider.config.use_caches
|
|
103
|
+
has_changes = self.has_unsaved_changes
|
|
104
|
+
_LOGGER.debug(
|
|
105
|
+
"CACHE_SHOULD_SAVE: %s - use_caches=%s, has_unsaved_changes=%s, result=%s",
|
|
106
|
+
self.storage_key,
|
|
107
|
+
use_caches,
|
|
108
|
+
has_changes,
|
|
109
|
+
use_caches and has_changes,
|
|
110
|
+
)
|
|
111
|
+
return use_caches and has_changes
|
|
112
|
+
|
|
113
|
+
@property
|
|
114
|
+
def content_hash(self) -> str:
|
|
115
|
+
"""Return hash of current content."""
|
|
116
|
+
return hash_sha256(value=self._content)
|
|
117
|
+
|
|
118
|
+
@property
|
|
119
|
+
def has_unsaved_changes(self) -> bool:
|
|
120
|
+
"""Return True if content changed since last save."""
|
|
121
|
+
return self.content_hash != self._last_hash_saved
|
|
122
|
+
|
|
123
|
+
@property
|
|
124
|
+
def storage_key(self) -> str:
|
|
125
|
+
"""Return the storage key."""
|
|
126
|
+
return self._storage.key
|
|
127
|
+
|
|
128
|
+
async def clear(self) -> None:
|
|
129
|
+
"""Remove storage and clear content."""
|
|
130
|
+
await self._storage.remove()
|
|
131
|
+
self._content.clear()
|
|
132
|
+
self._content.update(self._create_empty_content())
|
|
133
|
+
self._last_hash_saved = ""
|
|
134
|
+
|
|
135
|
+
async def flush(self) -> None:
|
|
136
|
+
"""Flush any pending delayed saves immediately."""
|
|
137
|
+
await self._storage.flush()
|
|
138
|
+
|
|
139
|
+
async def load(self) -> DataOperationResult:
|
|
140
|
+
"""
|
|
141
|
+
Load content from storage.
|
|
142
|
+
|
|
143
|
+
After loading, calls _process_loaded_content to rebuild any
|
|
144
|
+
derived structures or indexes. If the loaded schema version is
|
|
145
|
+
older than the current SCHEMA_VERSION, _migrate_schema is called.
|
|
146
|
+
|
|
147
|
+
Returns:
|
|
148
|
+
DataOperationResult indicating success/skip/failure.
|
|
149
|
+
|
|
150
|
+
"""
|
|
151
|
+
_LOGGER.debug("CACHE_LOAD: Starting load for %s", self.storage_key)
|
|
152
|
+
try:
|
|
153
|
+
data = await self._storage.load()
|
|
154
|
+
except Exception:
|
|
155
|
+
_LOGGER.exception("CACHE: Failed to load %s", self.storage_key) # i18n-log: ignore
|
|
156
|
+
return DataOperationResult.LOAD_FAIL
|
|
157
|
+
|
|
158
|
+
if data is None:
|
|
159
|
+
_LOGGER.debug("CACHE_LOAD: No data found for %s", self.storage_key)
|
|
160
|
+
return DataOperationResult.NO_LOAD
|
|
161
|
+
|
|
162
|
+
_LOGGER.debug("CACHE_LOAD: Loaded data for %s (keys: %s)", self.storage_key, list(data.keys()))
|
|
163
|
+
|
|
164
|
+
# Check and migrate schema version
|
|
165
|
+
if (loaded_version := data.pop("_schema_version", 1)) < self.SCHEMA_VERSION:
|
|
166
|
+
data = self._migrate_schema(data=data, from_version=loaded_version)
|
|
167
|
+
|
|
168
|
+
if (loaded_hash := hash_sha256(value=data)) == self._last_hash_saved:
|
|
169
|
+
return DataOperationResult.NO_LOAD
|
|
170
|
+
|
|
171
|
+
self._content.clear()
|
|
172
|
+
self._content.update(data)
|
|
173
|
+
self._process_loaded_content(data=data)
|
|
174
|
+
self._last_hash_saved = loaded_hash
|
|
175
|
+
return DataOperationResult.LOAD_SUCCESS
|
|
176
|
+
|
|
177
|
+
async def save(self) -> DataOperationResult:
|
|
178
|
+
"""
|
|
179
|
+
Save content to storage if changed.
|
|
180
|
+
|
|
181
|
+
Only saves if caching is enabled and content has changed since last save.
|
|
182
|
+
Adds _schema_version to saved data for migration support.
|
|
183
|
+
|
|
184
|
+
Returns:
|
|
185
|
+
DataOperationResult indicating success/skip/failure.
|
|
186
|
+
|
|
187
|
+
"""
|
|
188
|
+
if not self._should_save:
|
|
189
|
+
_LOGGER.debug("CACHE_SAVE: Skipping save for %s (no changes or caching disabled)", self.storage_key)
|
|
190
|
+
return DataOperationResult.NO_SAVE
|
|
191
|
+
|
|
192
|
+
# Add schema version before saving
|
|
193
|
+
save_data = {"_schema_version": self.SCHEMA_VERSION, **self._content}
|
|
194
|
+
|
|
195
|
+
try:
|
|
196
|
+
_LOGGER.debug(
|
|
197
|
+
"CACHE_SAVE: Saving %s to storage (content keys: %s, sizes: %s)",
|
|
198
|
+
self.storage_key,
|
|
199
|
+
list(self._content.keys()),
|
|
200
|
+
{k: len(v) if isinstance(v, (list, dict)) else "?" for k, v in self._content.items()},
|
|
201
|
+
)
|
|
202
|
+
await self._storage.save(data=save_data)
|
|
203
|
+
self._last_hash_saved = self.content_hash
|
|
204
|
+
_LOGGER.debug("CACHE_SAVE: Successfully saved %s", self.storage_key)
|
|
205
|
+
except Exception:
|
|
206
|
+
_LOGGER.exception("CACHE: Failed to save %s", self.storage_key) # i18n-log: ignore
|
|
207
|
+
return DataOperationResult.SAVE_FAIL
|
|
208
|
+
else:
|
|
209
|
+
return DataOperationResult.SAVE_SUCCESS
|
|
210
|
+
|
|
211
|
+
async def save_delayed(self, *, delay: float = 1.0) -> None:
|
|
212
|
+
"""
|
|
213
|
+
Schedule a delayed save.
|
|
214
|
+
|
|
215
|
+
Multiple calls within the delay period will reset the timer.
|
|
216
|
+
Useful for batching rapid updates.
|
|
217
|
+
|
|
218
|
+
Args:
|
|
219
|
+
delay: Delay in seconds before saving (default: 1.0).
|
|
220
|
+
|
|
221
|
+
"""
|
|
222
|
+
if not self._config_provider.config.use_caches:
|
|
223
|
+
return
|
|
224
|
+
|
|
225
|
+
await self._storage.delay_save(
|
|
226
|
+
data_func=lambda: self._content,
|
|
227
|
+
delay=delay,
|
|
228
|
+
)
|
|
229
|
+
|
|
230
|
+
@abstractmethod
|
|
231
|
+
def _create_empty_content(self) -> dict[str, Any]:
|
|
232
|
+
"""
|
|
233
|
+
Create empty content structure.
|
|
234
|
+
|
|
235
|
+
Subclasses override to define their data structure.
|
|
236
|
+
|
|
237
|
+
Returns:
|
|
238
|
+
Empty dict structure for this cache type.
|
|
239
|
+
|
|
240
|
+
"""
|
|
241
|
+
|
|
242
|
+
def _migrate_schema(self, *, data: dict[str, Any], from_version: int) -> dict[str, Any]:
|
|
243
|
+
"""
|
|
244
|
+
Migrate data from older schema version.
|
|
245
|
+
|
|
246
|
+
Subclasses override to implement version-specific migrations.
|
|
247
|
+
Default implementation returns data unchanged.
|
|
248
|
+
|
|
249
|
+
Args:
|
|
250
|
+
data: Raw data loaded from storage.
|
|
251
|
+
from_version: Schema version of loaded data.
|
|
252
|
+
|
|
253
|
+
Returns:
|
|
254
|
+
Migrated data dict.
|
|
255
|
+
|
|
256
|
+
"""
|
|
257
|
+
return data
|
|
258
|
+
|
|
259
|
+
@abstractmethod
|
|
260
|
+
def _process_loaded_content(self, *, data: dict[str, Any]) -> None:
|
|
261
|
+
"""
|
|
262
|
+
Process data after loading from storage.
|
|
263
|
+
|
|
264
|
+
Subclasses implement to rebuild internal indexes or derived structures.
|
|
265
|
+
|
|
266
|
+
Args:
|
|
267
|
+
data: Raw data loaded from storage.
|
|
268
|
+
|
|
269
|
+
"""
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
# Helper functions for path/name generation
|
|
273
|
+
|
|
274
|
+
|
|
275
|
+
def get_file_path(*, storage_directory: str, sub_directory: str) -> str:
|
|
276
|
+
"""Return the content path."""
|
|
277
|
+
return f"{storage_directory}/{sub_directory}"
|
|
278
|
+
|
|
279
|
+
|
|
280
|
+
def get_file_name(*, central_name: str, file_name: str, ts: datetime | None = None) -> str:
|
|
281
|
+
"""Return the content file name."""
|
|
282
|
+
fn = f"{slugify(central_name)}_{file_name}"
|
|
283
|
+
if ts:
|
|
284
|
+
fn += f"_{ts.strftime(FILE_NAME_TS_PATTERN)}"
|
|
285
|
+
return f"{fn}.json"
|
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MIT
|
|
2
|
+
# Copyright (c) 2021-2026
|
|
3
|
+
"""
|
|
4
|
+
Device description registry for persisting device/channel metadata.
|
|
5
|
+
|
|
6
|
+
This module provides DeviceDescriptionRegistry which persists device descriptions
|
|
7
|
+
per interface, including the mapping of device/channels and model metadata.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
from __future__ import annotations
|
|
11
|
+
|
|
12
|
+
from collections import defaultdict
|
|
13
|
+
from collections.abc import Mapping
|
|
14
|
+
import logging
|
|
15
|
+
from typing import TYPE_CHECKING, Any, Final
|
|
16
|
+
|
|
17
|
+
from aiohomematic import i18n
|
|
18
|
+
from aiohomematic.const import ADDRESS_SEPARATOR, DeviceDescription
|
|
19
|
+
from aiohomematic.exceptions import DescriptionNotFoundException
|
|
20
|
+
from aiohomematic.interfaces import DeviceDescriptionProviderProtocol, DeviceDescriptionsAccessProtocol
|
|
21
|
+
from aiohomematic.interfaces.model import DeviceRemovalInfoProtocol
|
|
22
|
+
from aiohomematic.schemas import normalize_device_description
|
|
23
|
+
from aiohomematic.store.persistent.base import BasePersistentCache
|
|
24
|
+
from aiohomematic.support import get_device_address
|
|
25
|
+
|
|
26
|
+
if TYPE_CHECKING:
|
|
27
|
+
from aiohomematic.interfaces import ConfigProviderProtocol
|
|
28
|
+
from aiohomematic.store.storage import StorageProtocol
|
|
29
|
+
|
|
30
|
+
_LOGGER: Final = logging.getLogger(__name__)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class DeviceDescriptionRegistry(
|
|
34
|
+
BasePersistentCache, DeviceDescriptionProviderProtocol, DeviceDescriptionsAccessProtocol
|
|
35
|
+
):
|
|
36
|
+
"""Registry for device/channel descriptions."""
|
|
37
|
+
|
|
38
|
+
# Bump version when normalization logic changes
|
|
39
|
+
SCHEMA_VERSION: int = 2
|
|
40
|
+
|
|
41
|
+
__slots__ = (
|
|
42
|
+
"_addresses",
|
|
43
|
+
"_device_descriptions",
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
def __init__(
|
|
47
|
+
self,
|
|
48
|
+
*,
|
|
49
|
+
storage: StorageProtocol,
|
|
50
|
+
config_provider: ConfigProviderProtocol,
|
|
51
|
+
) -> None:
|
|
52
|
+
"""
|
|
53
|
+
Initialize the device description cache.
|
|
54
|
+
|
|
55
|
+
Args:
|
|
56
|
+
storage: Storage instance for persistence.
|
|
57
|
+
config_provider: Provider for configuration access.
|
|
58
|
+
|
|
59
|
+
"""
|
|
60
|
+
# {interface_id, {device_address, [channel_address]}}
|
|
61
|
+
self._addresses: Final[dict[str, dict[str, set[str]]]] = defaultdict(lambda: defaultdict(set))
|
|
62
|
+
# {interface_id, {address, device_descriptions}}
|
|
63
|
+
self._device_descriptions: Final[dict[str, dict[str, DeviceDescription]]] = defaultdict(dict)
|
|
64
|
+
super().__init__(
|
|
65
|
+
storage=storage,
|
|
66
|
+
config_provider=config_provider,
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
@property
|
|
70
|
+
def _raw_device_descriptions(self) -> dict[str, list[DeviceDescription]]:
|
|
71
|
+
"""Return the raw device descriptions (alias to _content)."""
|
|
72
|
+
return self._content
|
|
73
|
+
|
|
74
|
+
@property
|
|
75
|
+
def size(self) -> int:
|
|
76
|
+
"""Return total number of device descriptions in cache."""
|
|
77
|
+
return sum(len(descriptions) for descriptions in self._raw_device_descriptions.values())
|
|
78
|
+
|
|
79
|
+
def add_device(self, *, interface_id: str, device_description: DeviceDescription) -> None:
|
|
80
|
+
"""Add a device to the cache (normalized)."""
|
|
81
|
+
# Normalize at ingestion
|
|
82
|
+
normalized = normalize_device_description(device_description=device_description)
|
|
83
|
+
# Fast-path: If the address is not yet known, skip costly removal operations.
|
|
84
|
+
if (address := normalized["ADDRESS"]) not in self._device_descriptions[interface_id]:
|
|
85
|
+
self._raw_device_descriptions[interface_id].append(normalized)
|
|
86
|
+
_LOGGER.debug(
|
|
87
|
+
"DEVICE_REGISTRY_ADD: Added device %s to %s (total: %s)",
|
|
88
|
+
address,
|
|
89
|
+
interface_id,
|
|
90
|
+
len(self._raw_device_descriptions[interface_id]),
|
|
91
|
+
)
|
|
92
|
+
self._process_device_description(interface_id=interface_id, device_description=normalized)
|
|
93
|
+
return
|
|
94
|
+
# Address exists: remove old entries before adding the new description.
|
|
95
|
+
self._remove_device(
|
|
96
|
+
interface_id=interface_id,
|
|
97
|
+
addresses_to_remove=[address],
|
|
98
|
+
)
|
|
99
|
+
self._raw_device_descriptions[interface_id].append(normalized)
|
|
100
|
+
_LOGGER.debug(
|
|
101
|
+
"DEVICE_REGISTRY_UPDATE: Updated device %s in %s (total: %s)",
|
|
102
|
+
address,
|
|
103
|
+
interface_id,
|
|
104
|
+
len(self._raw_device_descriptions[interface_id]),
|
|
105
|
+
)
|
|
106
|
+
self._process_device_description(interface_id=interface_id, device_description=normalized)
|
|
107
|
+
|
|
108
|
+
def find_device_description(self, *, interface_id: str, device_address: str) -> DeviceDescription | None:
|
|
109
|
+
"""Return the device description by interface and device_address."""
|
|
110
|
+
return self._device_descriptions[interface_id].get(device_address)
|
|
111
|
+
|
|
112
|
+
def get_addresses(self, *, interface_id: str | None = None) -> frozenset[str]:
|
|
113
|
+
"""Return the addresses by interface as a set."""
|
|
114
|
+
if interface_id:
|
|
115
|
+
return frozenset(self._addresses[interface_id])
|
|
116
|
+
return frozenset(addr for interface_id in self.get_interface_ids() for addr in self._addresses[interface_id])
|
|
117
|
+
|
|
118
|
+
def get_device_description(self, *, interface_id: str, address: str) -> DeviceDescription:
|
|
119
|
+
"""Return the device description by interface and device_address."""
|
|
120
|
+
try:
|
|
121
|
+
return self._device_descriptions[interface_id][address]
|
|
122
|
+
except KeyError as exc:
|
|
123
|
+
raise DescriptionNotFoundException(
|
|
124
|
+
i18n.tr(
|
|
125
|
+
key="exception.store.device_description.not_found",
|
|
126
|
+
address=address,
|
|
127
|
+
interface_id=interface_id,
|
|
128
|
+
)
|
|
129
|
+
) from exc
|
|
130
|
+
|
|
131
|
+
def get_device_descriptions(self, *, interface_id: str) -> Mapping[str, DeviceDescription]:
|
|
132
|
+
"""Return the devices by interface."""
|
|
133
|
+
return self._device_descriptions[interface_id]
|
|
134
|
+
|
|
135
|
+
def get_device_with_channels(self, *, interface_id: str, device_address: str) -> Mapping[str, DeviceDescription]:
|
|
136
|
+
"""Return the device dict by interface and device_address."""
|
|
137
|
+
device_descriptions: dict[str, DeviceDescription] = {
|
|
138
|
+
device_address: self.get_device_description(interface_id=interface_id, address=device_address)
|
|
139
|
+
}
|
|
140
|
+
children = device_descriptions[device_address].get("CHILDREN", [])
|
|
141
|
+
for channel_address in children:
|
|
142
|
+
device_descriptions[channel_address] = self.get_device_description(
|
|
143
|
+
interface_id=interface_id, address=channel_address
|
|
144
|
+
)
|
|
145
|
+
return device_descriptions
|
|
146
|
+
|
|
147
|
+
def get_interface_ids(self) -> tuple[str, ...]:
|
|
148
|
+
"""Return the interface ids."""
|
|
149
|
+
return tuple(self._raw_device_descriptions.keys())
|
|
150
|
+
|
|
151
|
+
def get_model(self, *, device_address: str) -> str | None:
|
|
152
|
+
"""Return the device type."""
|
|
153
|
+
for data in self._device_descriptions.values():
|
|
154
|
+
if items := data.get(device_address):
|
|
155
|
+
return items["TYPE"]
|
|
156
|
+
return None
|
|
157
|
+
|
|
158
|
+
def get_raw_device_descriptions(self, *, interface_id: str) -> list[DeviceDescription]:
|
|
159
|
+
"""Retrieve raw device descriptions from the cache."""
|
|
160
|
+
return self._raw_device_descriptions[interface_id]
|
|
161
|
+
|
|
162
|
+
def has_device_descriptions(self, *, interface_id: str) -> bool:
|
|
163
|
+
"""Return the devices by interface."""
|
|
164
|
+
return interface_id in self._device_descriptions
|
|
165
|
+
|
|
166
|
+
def remove_device(self, *, device: DeviceRemovalInfoProtocol) -> None:
|
|
167
|
+
"""Remove device from cache."""
|
|
168
|
+
self._remove_device(
|
|
169
|
+
interface_id=device.interface_id,
|
|
170
|
+
addresses_to_remove=[device.address, *device.channels.keys()],
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
def _convert_device_descriptions(self, *, interface_id: str, device_descriptions: list[DeviceDescription]) -> None:
|
|
174
|
+
"""Convert provided list of device descriptions (normalized)."""
|
|
175
|
+
for device_description in device_descriptions:
|
|
176
|
+
# Normalize each description when loading
|
|
177
|
+
normalized = normalize_device_description(device_description=device_description)
|
|
178
|
+
self._process_device_description(interface_id=interface_id, device_description=normalized)
|
|
179
|
+
|
|
180
|
+
def _create_empty_content(self) -> dict[str, Any]:
|
|
181
|
+
"""Create empty content structure."""
|
|
182
|
+
return defaultdict(list)
|
|
183
|
+
|
|
184
|
+
def _migrate_schema(self, *, data: dict[str, Any], from_version: int) -> dict[str, Any]:
|
|
185
|
+
"""Migrate device descriptions from older schema."""
|
|
186
|
+
if from_version < 2:
|
|
187
|
+
# Migration from v1: normalize all CHILDREN fields
|
|
188
|
+
for interface_id, descriptions in data.items():
|
|
189
|
+
if interface_id.startswith("_"):
|
|
190
|
+
continue
|
|
191
|
+
for desc in descriptions:
|
|
192
|
+
children = desc.get("CHILDREN")
|
|
193
|
+
if children is None or isinstance(children, str):
|
|
194
|
+
desc["CHILDREN"] = []
|
|
195
|
+
return data
|
|
196
|
+
|
|
197
|
+
def _process_device_description(self, *, interface_id: str, device_description: DeviceDescription) -> None:
|
|
198
|
+
"""Convert provided dict of device descriptions."""
|
|
199
|
+
address = device_description["ADDRESS"]
|
|
200
|
+
device_address = get_device_address(address=address)
|
|
201
|
+
self._device_descriptions[interface_id][address] = device_description
|
|
202
|
+
|
|
203
|
+
# Avoid redundant membership checks; set.add is idempotent and cheaper than check+add
|
|
204
|
+
addr_set = self._addresses[interface_id][device_address]
|
|
205
|
+
addr_set.add(device_address)
|
|
206
|
+
addr_set.add(address)
|
|
207
|
+
|
|
208
|
+
def _process_loaded_content(self, *, data: dict[str, Any]) -> None:
|
|
209
|
+
"""Rebuild indexes from loaded data."""
|
|
210
|
+
self._addresses.clear()
|
|
211
|
+
self._device_descriptions.clear()
|
|
212
|
+
for interface_id, device_descriptions in data.items():
|
|
213
|
+
if interface_id.startswith("_"): # Skip metadata keys
|
|
214
|
+
continue
|
|
215
|
+
self._convert_device_descriptions(
|
|
216
|
+
interface_id=interface_id,
|
|
217
|
+
device_descriptions=device_descriptions,
|
|
218
|
+
)
|
|
219
|
+
|
|
220
|
+
def _remove_device(self, *, interface_id: str, addresses_to_remove: list[str]) -> None:
|
|
221
|
+
"""Remove a device from the cache."""
|
|
222
|
+
# Use a set for faster membership checks
|
|
223
|
+
addresses_set = set(addresses_to_remove)
|
|
224
|
+
self._raw_device_descriptions[interface_id] = [
|
|
225
|
+
device for device in self._raw_device_descriptions[interface_id] if device["ADDRESS"] not in addresses_set
|
|
226
|
+
]
|
|
227
|
+
addr_map = self._addresses[interface_id]
|
|
228
|
+
desc_map = self._device_descriptions[interface_id]
|
|
229
|
+
for address in addresses_set:
|
|
230
|
+
# Pop with default to avoid KeyError and try/except overhead
|
|
231
|
+
if ADDRESS_SEPARATOR not in address:
|
|
232
|
+
addr_map.pop(address, None)
|
|
233
|
+
desc_map.pop(address, None)
|