aiohomematic 2026.1.29__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aiohomematic/__init__.py +110 -0
- aiohomematic/_log_context_protocol.py +29 -0
- aiohomematic/api.py +410 -0
- aiohomematic/async_support.py +250 -0
- aiohomematic/backend_detection.py +462 -0
- aiohomematic/central/__init__.py +103 -0
- aiohomematic/central/async_rpc_server.py +760 -0
- aiohomematic/central/central_unit.py +1152 -0
- aiohomematic/central/config.py +463 -0
- aiohomematic/central/config_builder.py +772 -0
- aiohomematic/central/connection_state.py +160 -0
- aiohomematic/central/coordinators/__init__.py +38 -0
- aiohomematic/central/coordinators/cache.py +414 -0
- aiohomematic/central/coordinators/client.py +480 -0
- aiohomematic/central/coordinators/connection_recovery.py +1141 -0
- aiohomematic/central/coordinators/device.py +1166 -0
- aiohomematic/central/coordinators/event.py +514 -0
- aiohomematic/central/coordinators/hub.py +532 -0
- aiohomematic/central/decorators.py +184 -0
- aiohomematic/central/device_registry.py +229 -0
- aiohomematic/central/events/__init__.py +104 -0
- aiohomematic/central/events/bus.py +1392 -0
- aiohomematic/central/events/integration.py +424 -0
- aiohomematic/central/events/types.py +194 -0
- aiohomematic/central/health.py +762 -0
- aiohomematic/central/rpc_server.py +353 -0
- aiohomematic/central/scheduler.py +794 -0
- aiohomematic/central/state_machine.py +391 -0
- aiohomematic/client/__init__.py +203 -0
- aiohomematic/client/_rpc_errors.py +187 -0
- aiohomematic/client/backends/__init__.py +48 -0
- aiohomematic/client/backends/base.py +335 -0
- aiohomematic/client/backends/capabilities.py +138 -0
- aiohomematic/client/backends/ccu.py +487 -0
- aiohomematic/client/backends/factory.py +116 -0
- aiohomematic/client/backends/homegear.py +294 -0
- aiohomematic/client/backends/json_ccu.py +252 -0
- aiohomematic/client/backends/protocol.py +316 -0
- aiohomematic/client/ccu.py +1857 -0
- aiohomematic/client/circuit_breaker.py +459 -0
- aiohomematic/client/config.py +64 -0
- aiohomematic/client/handlers/__init__.py +40 -0
- aiohomematic/client/handlers/backup.py +157 -0
- aiohomematic/client/handlers/base.py +79 -0
- aiohomematic/client/handlers/device_ops.py +1085 -0
- aiohomematic/client/handlers/firmware.py +144 -0
- aiohomematic/client/handlers/link_mgmt.py +199 -0
- aiohomematic/client/handlers/metadata.py +436 -0
- aiohomematic/client/handlers/programs.py +144 -0
- aiohomematic/client/handlers/sysvars.py +100 -0
- aiohomematic/client/interface_client.py +1304 -0
- aiohomematic/client/json_rpc.py +2068 -0
- aiohomematic/client/request_coalescer.py +282 -0
- aiohomematic/client/rpc_proxy.py +629 -0
- aiohomematic/client/state_machine.py +324 -0
- aiohomematic/const.py +2207 -0
- aiohomematic/context.py +275 -0
- aiohomematic/converter.py +270 -0
- aiohomematic/decorators.py +390 -0
- aiohomematic/exceptions.py +185 -0
- aiohomematic/hmcli.py +997 -0
- aiohomematic/i18n.py +193 -0
- aiohomematic/interfaces/__init__.py +407 -0
- aiohomematic/interfaces/central.py +1067 -0
- aiohomematic/interfaces/client.py +1096 -0
- aiohomematic/interfaces/coordinators.py +63 -0
- aiohomematic/interfaces/model.py +1921 -0
- aiohomematic/interfaces/operations.py +217 -0
- aiohomematic/logging_context.py +134 -0
- aiohomematic/metrics/__init__.py +125 -0
- aiohomematic/metrics/_protocols.py +140 -0
- aiohomematic/metrics/aggregator.py +534 -0
- aiohomematic/metrics/dataclasses.py +489 -0
- aiohomematic/metrics/emitter.py +292 -0
- aiohomematic/metrics/events.py +183 -0
- aiohomematic/metrics/keys.py +300 -0
- aiohomematic/metrics/observer.py +563 -0
- aiohomematic/metrics/stats.py +172 -0
- aiohomematic/model/__init__.py +189 -0
- aiohomematic/model/availability.py +65 -0
- aiohomematic/model/calculated/__init__.py +89 -0
- aiohomematic/model/calculated/climate.py +276 -0
- aiohomematic/model/calculated/data_point.py +315 -0
- aiohomematic/model/calculated/field.py +147 -0
- aiohomematic/model/calculated/operating_voltage_level.py +286 -0
- aiohomematic/model/calculated/support.py +232 -0
- aiohomematic/model/custom/__init__.py +214 -0
- aiohomematic/model/custom/capabilities/__init__.py +67 -0
- aiohomematic/model/custom/capabilities/climate.py +41 -0
- aiohomematic/model/custom/capabilities/light.py +87 -0
- aiohomematic/model/custom/capabilities/lock.py +44 -0
- aiohomematic/model/custom/capabilities/siren.py +63 -0
- aiohomematic/model/custom/climate.py +1130 -0
- aiohomematic/model/custom/cover.py +722 -0
- aiohomematic/model/custom/data_point.py +360 -0
- aiohomematic/model/custom/definition.py +300 -0
- aiohomematic/model/custom/field.py +89 -0
- aiohomematic/model/custom/light.py +1174 -0
- aiohomematic/model/custom/lock.py +322 -0
- aiohomematic/model/custom/mixins.py +445 -0
- aiohomematic/model/custom/profile.py +945 -0
- aiohomematic/model/custom/registry.py +251 -0
- aiohomematic/model/custom/siren.py +462 -0
- aiohomematic/model/custom/switch.py +195 -0
- aiohomematic/model/custom/text_display.py +289 -0
- aiohomematic/model/custom/valve.py +78 -0
- aiohomematic/model/data_point.py +1416 -0
- aiohomematic/model/device.py +1840 -0
- aiohomematic/model/event.py +216 -0
- aiohomematic/model/generic/__init__.py +327 -0
- aiohomematic/model/generic/action.py +40 -0
- aiohomematic/model/generic/action_select.py +62 -0
- aiohomematic/model/generic/binary_sensor.py +30 -0
- aiohomematic/model/generic/button.py +31 -0
- aiohomematic/model/generic/data_point.py +177 -0
- aiohomematic/model/generic/dummy.py +150 -0
- aiohomematic/model/generic/number.py +76 -0
- aiohomematic/model/generic/select.py +56 -0
- aiohomematic/model/generic/sensor.py +76 -0
- aiohomematic/model/generic/switch.py +54 -0
- aiohomematic/model/generic/text.py +33 -0
- aiohomematic/model/hub/__init__.py +100 -0
- aiohomematic/model/hub/binary_sensor.py +24 -0
- aiohomematic/model/hub/button.py +28 -0
- aiohomematic/model/hub/connectivity.py +190 -0
- aiohomematic/model/hub/data_point.py +342 -0
- aiohomematic/model/hub/hub.py +864 -0
- aiohomematic/model/hub/inbox.py +135 -0
- aiohomematic/model/hub/install_mode.py +393 -0
- aiohomematic/model/hub/metrics.py +208 -0
- aiohomematic/model/hub/number.py +42 -0
- aiohomematic/model/hub/select.py +52 -0
- aiohomematic/model/hub/sensor.py +37 -0
- aiohomematic/model/hub/switch.py +43 -0
- aiohomematic/model/hub/text.py +30 -0
- aiohomematic/model/hub/update.py +221 -0
- aiohomematic/model/support.py +592 -0
- aiohomematic/model/update.py +140 -0
- aiohomematic/model/week_profile.py +1827 -0
- aiohomematic/property_decorators.py +719 -0
- aiohomematic/py.typed +0 -0
- aiohomematic/rega_scripts/accept_device_in_inbox.fn +51 -0
- aiohomematic/rega_scripts/create_backup_start.fn +28 -0
- aiohomematic/rega_scripts/create_backup_status.fn +89 -0
- aiohomematic/rega_scripts/fetch_all_device_data.fn +97 -0
- aiohomematic/rega_scripts/get_backend_info.fn +25 -0
- aiohomematic/rega_scripts/get_inbox_devices.fn +61 -0
- aiohomematic/rega_scripts/get_program_descriptions.fn +31 -0
- aiohomematic/rega_scripts/get_serial.fn +44 -0
- aiohomematic/rega_scripts/get_service_messages.fn +83 -0
- aiohomematic/rega_scripts/get_system_update_info.fn +39 -0
- aiohomematic/rega_scripts/get_system_variable_descriptions.fn +31 -0
- aiohomematic/rega_scripts/set_program_state.fn +17 -0
- aiohomematic/rega_scripts/set_system_variable.fn +19 -0
- aiohomematic/rega_scripts/trigger_firmware_update.fn +67 -0
- aiohomematic/schemas.py +256 -0
- aiohomematic/store/__init__.py +55 -0
- aiohomematic/store/dynamic/__init__.py +43 -0
- aiohomematic/store/dynamic/command.py +250 -0
- aiohomematic/store/dynamic/data.py +175 -0
- aiohomematic/store/dynamic/details.py +187 -0
- aiohomematic/store/dynamic/ping_pong.py +416 -0
- aiohomematic/store/persistent/__init__.py +71 -0
- aiohomematic/store/persistent/base.py +285 -0
- aiohomematic/store/persistent/device.py +233 -0
- aiohomematic/store/persistent/incident.py +380 -0
- aiohomematic/store/persistent/paramset.py +241 -0
- aiohomematic/store/persistent/session.py +556 -0
- aiohomematic/store/serialization.py +150 -0
- aiohomematic/store/storage.py +689 -0
- aiohomematic/store/types.py +526 -0
- aiohomematic/store/visibility/__init__.py +40 -0
- aiohomematic/store/visibility/parser.py +141 -0
- aiohomematic/store/visibility/registry.py +722 -0
- aiohomematic/store/visibility/rules.py +307 -0
- aiohomematic/strings.json +237 -0
- aiohomematic/support.py +706 -0
- aiohomematic/tracing.py +236 -0
- aiohomematic/translations/de.json +237 -0
- aiohomematic/translations/en.json +237 -0
- aiohomematic/type_aliases.py +51 -0
- aiohomematic/validator.py +128 -0
- aiohomematic-2026.1.29.dist-info/METADATA +296 -0
- aiohomematic-2026.1.29.dist-info/RECORD +188 -0
- aiohomematic-2026.1.29.dist-info/WHEEL +5 -0
- aiohomematic-2026.1.29.dist-info/entry_points.txt +2 -0
- aiohomematic-2026.1.29.dist-info/licenses/LICENSE +21 -0
- aiohomematic-2026.1.29.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,689 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MIT
|
|
2
|
+
# Copyright (c) 2021-2026
|
|
3
|
+
"""
|
|
4
|
+
Storage abstraction for persistent data.
|
|
5
|
+
|
|
6
|
+
This module provides a storage protocol and local implementation that can be
|
|
7
|
+
substituted with Home Assistant's Store when running within HA.
|
|
8
|
+
|
|
9
|
+
Overview
|
|
10
|
+
--------
|
|
11
|
+
The Storage class provides a unified interface for persisting JSON-serializable
|
|
12
|
+
data. It supports:
|
|
13
|
+
|
|
14
|
+
- orjson serialization for performance
|
|
15
|
+
- ZIP archive loading for backup files
|
|
16
|
+
- Version migrations for schema evolution
|
|
17
|
+
- Delayed/debounced saves to reduce I/O
|
|
18
|
+
- Atomic writes (write to temp, then rename)
|
|
19
|
+
|
|
20
|
+
Public API
|
|
21
|
+
----------
|
|
22
|
+
- StorageProtocol: Interface for storage operations
|
|
23
|
+
- StorageFactoryProtocol: Interface for creating storage instances
|
|
24
|
+
- Storage: Local file-based storage implementation
|
|
25
|
+
- LocalStorageFactory: Default factory using local Storage
|
|
26
|
+
- StorageError: Exception for storage operation failures
|
|
27
|
+
|
|
28
|
+
Example:
|
|
29
|
+
-------
|
|
30
|
+
Using local storage::
|
|
31
|
+
|
|
32
|
+
factory = LocalStorageFactory(
|
|
33
|
+
base_directory="/path/to/storage",
|
|
34
|
+
central_name="my-ccu",
|
|
35
|
+
)
|
|
36
|
+
storage = factory.create_storage(key="my_cache", version=1)
|
|
37
|
+
|
|
38
|
+
# Save data
|
|
39
|
+
await storage.save({"devices": [...]})
|
|
40
|
+
|
|
41
|
+
# Load data
|
|
42
|
+
data = await storage.load()
|
|
43
|
+
|
|
44
|
+
# Remove storage
|
|
45
|
+
await storage.remove()
|
|
46
|
+
|
|
47
|
+
Using delayed save::
|
|
48
|
+
|
|
49
|
+
# Schedule save with debouncing
|
|
50
|
+
await storage.delay_save(
|
|
51
|
+
data_func=lambda: cache.get_content(),
|
|
52
|
+
delay=2.0,
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
# Flush on shutdown
|
|
56
|
+
await storage.flush()
|
|
57
|
+
|
|
58
|
+
"""
|
|
59
|
+
|
|
60
|
+
from __future__ import annotations
|
|
61
|
+
|
|
62
|
+
import asyncio
|
|
63
|
+
from collections.abc import Awaitable, Callable
|
|
64
|
+
from functools import partial
|
|
65
|
+
import glob
|
|
66
|
+
import logging
|
|
67
|
+
import os
|
|
68
|
+
from typing import TYPE_CHECKING, Any, Final, Protocol, cast, runtime_checkable
|
|
69
|
+
import zipfile
|
|
70
|
+
|
|
71
|
+
import orjson
|
|
72
|
+
from slugify import slugify
|
|
73
|
+
|
|
74
|
+
if TYPE_CHECKING:
|
|
75
|
+
from aiohomematic.interfaces import TaskSchedulerProtocol
|
|
76
|
+
|
|
77
|
+
_LOGGER: Final = logging.getLogger(__name__)
|
|
78
|
+
|
|
79
|
+
# Type alias for migration function
|
|
80
|
+
MigrateFunc = Callable[[dict[str, Any]], Awaitable[dict[str, Any]]]
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class StorageError(Exception):
|
|
84
|
+
"""Exception raised for storage operation failures."""
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
@runtime_checkable
|
|
88
|
+
class StorageProtocol(Protocol):
|
|
89
|
+
"""
|
|
90
|
+
Protocol for storage operations.
|
|
91
|
+
|
|
92
|
+
This protocol defines the interface that both local Storage and
|
|
93
|
+
Home Assistant's Store must implement. It provides async methods
|
|
94
|
+
for loading, saving, and removing persisted data.
|
|
95
|
+
|
|
96
|
+
The data format is always a serializable dict. Implementations
|
|
97
|
+
must handle serialization internally.
|
|
98
|
+
|
|
99
|
+
Supports:
|
|
100
|
+
- Basic CRUD operations (load, save, remove)
|
|
101
|
+
- ZIP archive loading
|
|
102
|
+
- Version migrations
|
|
103
|
+
- Delayed/debounced saves
|
|
104
|
+
"""
|
|
105
|
+
|
|
106
|
+
@property
|
|
107
|
+
def key(self) -> str:
|
|
108
|
+
"""Return the storage key identifier."""
|
|
109
|
+
|
|
110
|
+
@property
|
|
111
|
+
def version(self) -> int:
|
|
112
|
+
"""Return the storage version for migration support."""
|
|
113
|
+
|
|
114
|
+
async def delay_save(
|
|
115
|
+
self,
|
|
116
|
+
*,
|
|
117
|
+
data_func: Callable[[], dict[str, Any]],
|
|
118
|
+
delay: float = 1.0,
|
|
119
|
+
) -> None:
|
|
120
|
+
"""
|
|
121
|
+
Schedule a delayed save operation.
|
|
122
|
+
|
|
123
|
+
Multiple calls within the delay period will reset the timer.
|
|
124
|
+
Only the last data_func will be used when the save executes.
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
data_func: Callable that returns the data to save.
|
|
128
|
+
delay: Delay in seconds before saving (default: 1.0).
|
|
129
|
+
|
|
130
|
+
"""
|
|
131
|
+
|
|
132
|
+
async def flush(self) -> None:
|
|
133
|
+
"""
|
|
134
|
+
Flush any pending delayed save immediately.
|
|
135
|
+
|
|
136
|
+
Call this during shutdown to ensure data is saved.
|
|
137
|
+
"""
|
|
138
|
+
|
|
139
|
+
async def load(self) -> dict[str, Any] | None:
|
|
140
|
+
"""
|
|
141
|
+
Load data from storage.
|
|
142
|
+
|
|
143
|
+
Returns:
|
|
144
|
+
The stored data as dict, or None if no data exists.
|
|
145
|
+
|
|
146
|
+
"""
|
|
147
|
+
|
|
148
|
+
async def remove(self) -> None:
|
|
149
|
+
"""Remove storage data."""
|
|
150
|
+
|
|
151
|
+
async def save(self, *, data: dict[str, Any]) -> None:
|
|
152
|
+
"""
|
|
153
|
+
Save data to storage.
|
|
154
|
+
|
|
155
|
+
Args:
|
|
156
|
+
data: Serializable dict to persist.
|
|
157
|
+
|
|
158
|
+
Raises:
|
|
159
|
+
StorageError: If data is not serializable or write fails.
|
|
160
|
+
|
|
161
|
+
"""
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
@runtime_checkable
|
|
165
|
+
class StorageFactoryProtocol(Protocol):
|
|
166
|
+
"""
|
|
167
|
+
Protocol for creating storage instances.
|
|
168
|
+
|
|
169
|
+
This protocol allows aiohomematic to receive either a local storage
|
|
170
|
+
factory or a Home Assistant store factory, enabling transparent
|
|
171
|
+
substitution of storage backends.
|
|
172
|
+
|
|
173
|
+
HomematicIP Local implements this protocol with a factory that
|
|
174
|
+
creates HA Store instances. aiohomematic provides LocalStorageFactory
|
|
175
|
+
as the default implementation.
|
|
176
|
+
"""
|
|
177
|
+
|
|
178
|
+
def create_storage(
|
|
179
|
+
self,
|
|
180
|
+
*,
|
|
181
|
+
key: str,
|
|
182
|
+
version: int = 1,
|
|
183
|
+
sub_directory: str | None = None,
|
|
184
|
+
migrate_func: MigrateFunc | None = None,
|
|
185
|
+
raw_mode: bool = True,
|
|
186
|
+
formatted: bool = False,
|
|
187
|
+
as_zip: bool = False,
|
|
188
|
+
) -> StorageProtocol:
|
|
189
|
+
"""
|
|
190
|
+
Create a storage instance.
|
|
191
|
+
|
|
192
|
+
Args:
|
|
193
|
+
key: Unique identifier for this storage (e.g., "device_cache").
|
|
194
|
+
version: Schema version for migration support.
|
|
195
|
+
sub_directory: Optional subdirectory within base storage.
|
|
196
|
+
migrate_func: Optional async function to migrate old data.
|
|
197
|
+
raw_mode: If True, save data without metadata wrapper (_key, _version).
|
|
198
|
+
Useful for export files that don't need version tracking.
|
|
199
|
+
formatted: If True, write indented JSON for readability.
|
|
200
|
+
Default is False (compact output).
|
|
201
|
+
as_zip: If True, save data as ZIP archive.
|
|
202
|
+
Default is False (plain JSON file).
|
|
203
|
+
|
|
204
|
+
Returns:
|
|
205
|
+
A storage instance implementing StorageProtocol.
|
|
206
|
+
|
|
207
|
+
"""
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
class Storage:
|
|
211
|
+
"""
|
|
212
|
+
Local file-based storage implementation.
|
|
213
|
+
|
|
214
|
+
This class provides a local alternative to Home Assistant's Store,
|
|
215
|
+
using orjson for fast serialization. It implements StorageProtocol
|
|
216
|
+
and can be used standalone or substituted with HA Store via the
|
|
217
|
+
factory pattern.
|
|
218
|
+
|
|
219
|
+
Features:
|
|
220
|
+
- orjson serialization for performance
|
|
221
|
+
- ZIP archive loading for backup files
|
|
222
|
+
- Automatic version migration
|
|
223
|
+
- Delayed/debounced saves
|
|
224
|
+
- Atomic writes (write to temp, then rename)
|
|
225
|
+
- Serialization validation
|
|
226
|
+
|
|
227
|
+
Thread Safety:
|
|
228
|
+
All operations are protected by an asyncio.Lock to prevent
|
|
229
|
+
concurrent read/write conflicts.
|
|
230
|
+
"""
|
|
231
|
+
|
|
232
|
+
__slots__ = (
|
|
233
|
+
"_as_zip",
|
|
234
|
+
"_base_directory",
|
|
235
|
+
"_delay_handle",
|
|
236
|
+
"_file_path",
|
|
237
|
+
"_formatted",
|
|
238
|
+
"_key",
|
|
239
|
+
"_lock",
|
|
240
|
+
"_migrate_func",
|
|
241
|
+
"_pending_data_func",
|
|
242
|
+
"_raw_mode",
|
|
243
|
+
"_task_scheduler",
|
|
244
|
+
"_version",
|
|
245
|
+
)
|
|
246
|
+
|
|
247
|
+
def __init__(
|
|
248
|
+
self,
|
|
249
|
+
*,
|
|
250
|
+
key: str,
|
|
251
|
+
base_directory: str,
|
|
252
|
+
version: int = 1,
|
|
253
|
+
sub_directory: str | None = None,
|
|
254
|
+
task_scheduler: TaskSchedulerProtocol,
|
|
255
|
+
migrate_func: MigrateFunc | None = None,
|
|
256
|
+
raw_mode: bool = True,
|
|
257
|
+
formatted: bool = False,
|
|
258
|
+
as_zip: bool = False,
|
|
259
|
+
) -> None:
|
|
260
|
+
"""
|
|
261
|
+
Initialize storage.
|
|
262
|
+
|
|
263
|
+
Args:
|
|
264
|
+
key: Unique identifier for this storage.
|
|
265
|
+
base_directory: Root directory for storage files.
|
|
266
|
+
version: Schema version.
|
|
267
|
+
sub_directory: Optional subdirectory.
|
|
268
|
+
task_scheduler: Scheduler for executor jobs.
|
|
269
|
+
migrate_func: Optional async function to migrate old data.
|
|
270
|
+
raw_mode: If True, save data without metadata wrapper (_key, _version).
|
|
271
|
+
Useful for export files that don't need version tracking.
|
|
272
|
+
formatted: If True, write indented JSON for readability.
|
|
273
|
+
Default is False (compact output).
|
|
274
|
+
as_zip: If True, save data as ZIP archive.
|
|
275
|
+
Default is False (plain JSON file).
|
|
276
|
+
|
|
277
|
+
"""
|
|
278
|
+
self._key: Final = key
|
|
279
|
+
self._version: Final = version
|
|
280
|
+
self._task_scheduler: Final = task_scheduler
|
|
281
|
+
self._migrate_func: Final = migrate_func
|
|
282
|
+
self._raw_mode: Final = raw_mode
|
|
283
|
+
self._formatted: Final = formatted
|
|
284
|
+
self._as_zip: Final = as_zip
|
|
285
|
+
self._lock: Final = asyncio.Lock()
|
|
286
|
+
|
|
287
|
+
# Delayed save state
|
|
288
|
+
self._delay_handle: asyncio.TimerHandle | None = None
|
|
289
|
+
self._pending_data_func: Callable[[], dict[str, Any]] | None = None
|
|
290
|
+
|
|
291
|
+
# Build file path
|
|
292
|
+
directory = base_directory
|
|
293
|
+
if sub_directory:
|
|
294
|
+
directory = os.path.join(base_directory, sub_directory)
|
|
295
|
+
self._base_directory: Final = directory
|
|
296
|
+
self._file_path: Final = os.path.join(directory, f"{key}.json")
|
|
297
|
+
|
|
298
|
+
@property
|
|
299
|
+
def file_path(self) -> str:
|
|
300
|
+
"""Return the full file path."""
|
|
301
|
+
return self._file_path
|
|
302
|
+
|
|
303
|
+
@property
|
|
304
|
+
def key(self) -> str:
|
|
305
|
+
"""Return the storage key identifier."""
|
|
306
|
+
return self._key
|
|
307
|
+
|
|
308
|
+
@property
|
|
309
|
+
def version(self) -> int:
|
|
310
|
+
"""Return the storage version."""
|
|
311
|
+
return self._version
|
|
312
|
+
|
|
313
|
+
async def delay_save(
|
|
314
|
+
self,
|
|
315
|
+
*,
|
|
316
|
+
data_func: Callable[[], dict[str, Any]],
|
|
317
|
+
delay: float = 1.0,
|
|
318
|
+
) -> None:
|
|
319
|
+
"""
|
|
320
|
+
Schedule a delayed save operation.
|
|
321
|
+
|
|
322
|
+
Multiple calls within the delay period will reset the timer.
|
|
323
|
+
Only the last data_func will be used when the save executes.
|
|
324
|
+
|
|
325
|
+
Args:
|
|
326
|
+
data_func: Callable that returns the data to save.
|
|
327
|
+
delay: Delay in seconds before saving (default: 1.0).
|
|
328
|
+
|
|
329
|
+
"""
|
|
330
|
+
# Cancel existing timer if any
|
|
331
|
+
if self._delay_handle is not None:
|
|
332
|
+
self._delay_handle.cancel()
|
|
333
|
+
self._delay_handle = None
|
|
334
|
+
|
|
335
|
+
self._pending_data_func = data_func
|
|
336
|
+
|
|
337
|
+
# Schedule new save
|
|
338
|
+
loop = asyncio.get_running_loop()
|
|
339
|
+
self._delay_handle = loop.call_later(
|
|
340
|
+
delay,
|
|
341
|
+
self._trigger_delayed_save,
|
|
342
|
+
)
|
|
343
|
+
|
|
344
|
+
async def flush(self) -> None:
|
|
345
|
+
"""
|
|
346
|
+
Flush any pending delayed save immediately.
|
|
347
|
+
|
|
348
|
+
Call this during shutdown to ensure data is saved.
|
|
349
|
+
"""
|
|
350
|
+
if self._delay_handle is not None:
|
|
351
|
+
self._delay_handle.cancel()
|
|
352
|
+
self._delay_handle = None
|
|
353
|
+
|
|
354
|
+
if self._pending_data_func is not None:
|
|
355
|
+
await self._execute_delayed_save()
|
|
356
|
+
|
|
357
|
+
async def load(self) -> dict[str, Any] | None:
|
|
358
|
+
"""
|
|
359
|
+
Load data from storage asynchronously.
|
|
360
|
+
|
|
361
|
+
Supports loading from:
|
|
362
|
+
- Regular JSON files
|
|
363
|
+
- ZIP archives containing JSON
|
|
364
|
+
|
|
365
|
+
If a migration function was provided and the stored version
|
|
366
|
+
is older than the current version, migration is performed
|
|
367
|
+
automatically.
|
|
368
|
+
|
|
369
|
+
Returns:
|
|
370
|
+
The stored data as dict, or None if file doesn't exist.
|
|
371
|
+
|
|
372
|
+
Raises:
|
|
373
|
+
StorageError: If file exists but cannot be read/parsed.
|
|
374
|
+
|
|
375
|
+
"""
|
|
376
|
+
async with self._lock:
|
|
377
|
+
if (raw_data := await self._load_raw()) is None:
|
|
378
|
+
return None
|
|
379
|
+
|
|
380
|
+
# Check version and migrate if needed
|
|
381
|
+
stored_version = cast(int, raw_data.get("_version", 1))
|
|
382
|
+
data = cast(dict[str, Any], raw_data.get("data", raw_data))
|
|
383
|
+
|
|
384
|
+
if stored_version < self._version and self._migrate_func:
|
|
385
|
+
_LOGGER.debug(
|
|
386
|
+
"STORAGE: Migrating %s from version %s to %s",
|
|
387
|
+
self._key,
|
|
388
|
+
stored_version,
|
|
389
|
+
self._version,
|
|
390
|
+
)
|
|
391
|
+
data = await self._migrate_func(data)
|
|
392
|
+
# Save migrated data (without holding the lock again)
|
|
393
|
+
await self._save_internal(data=data)
|
|
394
|
+
|
|
395
|
+
return data
|
|
396
|
+
|
|
397
|
+
async def remove(self) -> None:
|
|
398
|
+
"""Remove storage file asynchronously."""
|
|
399
|
+
async with self._lock:
|
|
400
|
+
if self._task_scheduler:
|
|
401
|
+
await self._task_scheduler.async_add_executor_job(self._remove_sync, name="storage-remove")
|
|
402
|
+
else:
|
|
403
|
+
await asyncio.to_thread(self._remove_sync)
|
|
404
|
+
|
|
405
|
+
async def save(self, *, data: dict[str, Any] | list[Any]) -> None:
|
|
406
|
+
"""
|
|
407
|
+
Save data to storage asynchronously.
|
|
408
|
+
|
|
409
|
+
Args:
|
|
410
|
+
data: Data to persist. Must be JSON-serializable.
|
|
411
|
+
In normal mode, must be a dict. In raw_mode, can be dict or list.
|
|
412
|
+
|
|
413
|
+
Raises:
|
|
414
|
+
StorageError: If data is not serializable or write fails.
|
|
415
|
+
|
|
416
|
+
"""
|
|
417
|
+
self._validate_serializable(data=data)
|
|
418
|
+
|
|
419
|
+
async with self._lock:
|
|
420
|
+
await self._save_internal(data=data)
|
|
421
|
+
|
|
422
|
+
async def _execute_delayed_save(self) -> None:
|
|
423
|
+
"""Execute the pending delayed save."""
|
|
424
|
+
if self._pending_data_func is None:
|
|
425
|
+
return
|
|
426
|
+
|
|
427
|
+
data = self._pending_data_func()
|
|
428
|
+
self._pending_data_func = None
|
|
429
|
+
self._delay_handle = None
|
|
430
|
+
|
|
431
|
+
try:
|
|
432
|
+
await self.save(data=data)
|
|
433
|
+
except StorageError:
|
|
434
|
+
_LOGGER.exception("STORAGE: Delayed save failed for %s", self._key) # i18n-log: ignore
|
|
435
|
+
|
|
436
|
+
def _load_from_zip(self, *, zip_path: str) -> dict[str, Any]:
|
|
437
|
+
"""Load data from ZIP archive."""
|
|
438
|
+
try:
|
|
439
|
+
with zipfile.ZipFile(zip_path, mode="r") as zf:
|
|
440
|
+
if not (json_files := [n for n in zf.namelist() if n.lower().endswith(".json")]):
|
|
441
|
+
raise StorageError(f"No JSON file found in ZIP: {zip_path}") # i18n-exc: ignore
|
|
442
|
+
raw = zf.read(json_files[0])
|
|
443
|
+
return self._parse_and_unwrap(raw_data=orjson.loads(raw))
|
|
444
|
+
except (zipfile.BadZipFile, OSError) as exc:
|
|
445
|
+
raise StorageError(f"Failed to load ZIP '{zip_path}': {exc}") from exc # i18n-exc: ignore
|
|
446
|
+
|
|
447
|
+
async def _load_raw(self) -> dict[str, Any] | None:
|
|
448
|
+
"""Load raw data without migration."""
|
|
449
|
+
if self._task_scheduler:
|
|
450
|
+
return await self._task_scheduler.async_add_executor_job(self._load_sync, name="storage-load")
|
|
451
|
+
return await asyncio.to_thread(self._load_sync)
|
|
452
|
+
|
|
453
|
+
def _load_sync(self) -> dict[str, Any] | None:
|
|
454
|
+
"""Load data synchronously with ZIP support."""
|
|
455
|
+
# Check if file exists, try ZIP variant if not
|
|
456
|
+
if not os.path.exists(self._file_path):
|
|
457
|
+
zip_path = f"{self._file_path}.zip"
|
|
458
|
+
if os.path.exists(zip_path):
|
|
459
|
+
return self._load_from_zip(zip_path=zip_path)
|
|
460
|
+
return None
|
|
461
|
+
|
|
462
|
+
# Check if file is a ZIP archive
|
|
463
|
+
if zipfile.is_zipfile(self._file_path):
|
|
464
|
+
return self._load_from_zip(zip_path=self._file_path)
|
|
465
|
+
|
|
466
|
+
# Regular JSON load
|
|
467
|
+
try:
|
|
468
|
+
with open(self._file_path, "rb") as f:
|
|
469
|
+
return self._parse_and_unwrap(raw_data=orjson.loads(f.read()))
|
|
470
|
+
except (orjson.JSONDecodeError, OSError) as exc:
|
|
471
|
+
raise StorageError(f"Failed to load storage '{self._key}': {exc}") from exc # i18n-exc: ignore
|
|
472
|
+
|
|
473
|
+
def _parse_and_unwrap(self, *, raw_data: Any) -> dict[str, Any]:
|
|
474
|
+
"""Parse and unwrap metadata if present."""
|
|
475
|
+
if isinstance(raw_data, dict) and "data" in raw_data and "_version" in raw_data:
|
|
476
|
+
# Return full structure for version checking
|
|
477
|
+
return raw_data
|
|
478
|
+
# Legacy format or unwrapped data
|
|
479
|
+
return {"data": raw_data, "_version": 1}
|
|
480
|
+
|
|
481
|
+
def _remove_sync(self) -> None:
|
|
482
|
+
"""Remove storage file synchronously."""
|
|
483
|
+
if os.path.exists(self._file_path):
|
|
484
|
+
os.remove(self._file_path)
|
|
485
|
+
|
|
486
|
+
async def _save_internal(self, *, data: dict[str, Any] | list[Any]) -> None:
|
|
487
|
+
"""Save data internally without acquiring lock."""
|
|
488
|
+
if self._task_scheduler:
|
|
489
|
+
await self._task_scheduler.async_add_executor_job(partial(self._save_sync, data=data), name="storage-save")
|
|
490
|
+
else:
|
|
491
|
+
await asyncio.to_thread(self._save_sync, data=data)
|
|
492
|
+
|
|
493
|
+
def _save_sync(self, *, data: dict[str, Any] | list[Any]) -> None:
|
|
494
|
+
"""Save data synchronously with atomic write."""
|
|
495
|
+
# Ensure directory exists
|
|
496
|
+
os.makedirs(self._base_directory, exist_ok=True)
|
|
497
|
+
|
|
498
|
+
# In raw mode, save data directly; otherwise wrap with version metadata
|
|
499
|
+
to_save = data if self._raw_mode else {"_version": self._version, "_key": self._key, "data": data}
|
|
500
|
+
|
|
501
|
+
# Serialize (formatted with indentation or compact)
|
|
502
|
+
opts = orjson.OPT_NON_STR_KEYS | (orjson.OPT_INDENT_2 if self._formatted else 0)
|
|
503
|
+
try:
|
|
504
|
+
serialized = orjson.dumps(to_save, option=opts)
|
|
505
|
+
except TypeError as exc:
|
|
506
|
+
raise StorageError(f"Data not serializable for '{self._key}': {exc}") from exc # i18n-exc: ignore
|
|
507
|
+
|
|
508
|
+
# Determine target path and temp path
|
|
509
|
+
target_path = f"{self._file_path}.zip" if self._as_zip else self._file_path
|
|
510
|
+
temp_path = f"{target_path}.tmp"
|
|
511
|
+
|
|
512
|
+
try:
|
|
513
|
+
if self._as_zip:
|
|
514
|
+
# Write as ZIP archive
|
|
515
|
+
with zipfile.ZipFile(temp_path, mode="w", compression=zipfile.ZIP_DEFLATED) as zf:
|
|
516
|
+
zf.writestr(f"{self._key}.json", serialized)
|
|
517
|
+
else:
|
|
518
|
+
# Write as plain JSON
|
|
519
|
+
with open(temp_path, "wb") as f:
|
|
520
|
+
f.write(serialized)
|
|
521
|
+
os.replace(temp_path, target_path)
|
|
522
|
+
except OSError as exc:
|
|
523
|
+
# Clean up temp file on failure
|
|
524
|
+
if os.path.exists(temp_path):
|
|
525
|
+
os.remove(temp_path)
|
|
526
|
+
raise StorageError(f"Failed to save storage '{self._key}': {exc}") from exc # i18n-exc: ignore
|
|
527
|
+
|
|
528
|
+
def _trigger_delayed_save(self) -> None:
|
|
529
|
+
"""Trigger the delayed save task via task_scheduler."""
|
|
530
|
+
self._task_scheduler.create_task(
|
|
531
|
+
target=self._execute_delayed_save(),
|
|
532
|
+
name=f"storage-delayed-save-{self._key}",
|
|
533
|
+
)
|
|
534
|
+
|
|
535
|
+
def _validate_serializable(self, *, data: dict[str, Any] | list[Any]) -> None:
|
|
536
|
+
"""
|
|
537
|
+
Validate that data is serializable.
|
|
538
|
+
|
|
539
|
+
Args:
|
|
540
|
+
data: Data to validate.
|
|
541
|
+
|
|
542
|
+
Raises:
|
|
543
|
+
StorageError: If data is not serializable or (in normal mode) not a dict.
|
|
544
|
+
|
|
545
|
+
"""
|
|
546
|
+
# In raw_mode, accept both dict and list; otherwise require dict
|
|
547
|
+
if not self._raw_mode and not isinstance(data, dict):
|
|
548
|
+
raise StorageError( # i18n-exc: ignore
|
|
549
|
+
f"Storage '{self._key}' requires dict, got {type(data).__name__}"
|
|
550
|
+
)
|
|
551
|
+
|
|
552
|
+
try:
|
|
553
|
+
orjson.dumps(data, option=orjson.OPT_NON_STR_KEYS)
|
|
554
|
+
except TypeError as exc:
|
|
555
|
+
raise StorageError( # i18n-exc: ignore
|
|
556
|
+
f"Data for storage '{self._key}' is not JSON-serializable: {exc}"
|
|
557
|
+
) from exc
|
|
558
|
+
|
|
559
|
+
|
|
560
|
+
class LocalStorageFactory:
|
|
561
|
+
"""
|
|
562
|
+
Factory for creating local Storage instances.
|
|
563
|
+
|
|
564
|
+
This is the default factory used by aiohomematic when no external
|
|
565
|
+
factory (e.g., from Home Assistant) is provided.
|
|
566
|
+
|
|
567
|
+
Example::
|
|
568
|
+
|
|
569
|
+
factory = LocalStorageFactory(
|
|
570
|
+
base_directory="/config/aiohomematic",
|
|
571
|
+
central_name="my-ccu",
|
|
572
|
+
)
|
|
573
|
+
device_storage = factory.create_storage(
|
|
574
|
+
key="device_cache",
|
|
575
|
+
version=1,
|
|
576
|
+
sub_directory="cache",
|
|
577
|
+
)
|
|
578
|
+
"""
|
|
579
|
+
|
|
580
|
+
__slots__ = ("_base_directory", "_central_name", "_task_scheduler")
|
|
581
|
+
|
|
582
|
+
def __init__(
|
|
583
|
+
self,
|
|
584
|
+
*,
|
|
585
|
+
base_directory: str,
|
|
586
|
+
central_name: str,
|
|
587
|
+
task_scheduler: TaskSchedulerProtocol,
|
|
588
|
+
) -> None:
|
|
589
|
+
"""
|
|
590
|
+
Initialize the factory.
|
|
591
|
+
|
|
592
|
+
Args:
|
|
593
|
+
base_directory: Root directory for all storage files.
|
|
594
|
+
central_name: Name of the central unit (used in file names).
|
|
595
|
+
task_scheduler: Scheduler for async executor jobs.
|
|
596
|
+
|
|
597
|
+
"""
|
|
598
|
+
self._base_directory: Final = base_directory
|
|
599
|
+
self._central_name: Final = central_name
|
|
600
|
+
self._task_scheduler: Final = task_scheduler
|
|
601
|
+
|
|
602
|
+
async def cleanup_files(self, *, sub_directory: str | None = None) -> int:
|
|
603
|
+
"""
|
|
604
|
+
Remove all storage files for this central unit.
|
|
605
|
+
|
|
606
|
+
Deletes all JSON files matching the central name pattern in the
|
|
607
|
+
specified directory. Useful for clearing caches or resetting state.
|
|
608
|
+
|
|
609
|
+
Args:
|
|
610
|
+
sub_directory: Optional subdirectory to clean. If None, cleans
|
|
611
|
+
the base directory.
|
|
612
|
+
|
|
613
|
+
Returns:
|
|
614
|
+
Number of files deleted.
|
|
615
|
+
|
|
616
|
+
"""
|
|
617
|
+
if self._task_scheduler:
|
|
618
|
+
return await self._task_scheduler.async_add_executor_job(
|
|
619
|
+
partial(self._cleanup_files_sync, sub_directory=sub_directory), name="storage-cleanup"
|
|
620
|
+
)
|
|
621
|
+
return await asyncio.to_thread(self._cleanup_files_sync, sub_directory=sub_directory)
|
|
622
|
+
|
|
623
|
+
def create_storage(
|
|
624
|
+
self,
|
|
625
|
+
*,
|
|
626
|
+
key: str,
|
|
627
|
+
version: int = 1,
|
|
628
|
+
sub_directory: str | None = None,
|
|
629
|
+
migrate_func: MigrateFunc | None = None,
|
|
630
|
+
raw_mode: bool = True,
|
|
631
|
+
formatted: bool = False,
|
|
632
|
+
as_zip: bool = False,
|
|
633
|
+
) -> StorageProtocol:
|
|
634
|
+
"""
|
|
635
|
+
Create a storage instance.
|
|
636
|
+
|
|
637
|
+
The storage key is prefixed with the central name to allow
|
|
638
|
+
multiple central units to coexist.
|
|
639
|
+
|
|
640
|
+
Args:
|
|
641
|
+
key: Base key for this storage.
|
|
642
|
+
version: Schema version.
|
|
643
|
+
sub_directory: Optional subdirectory.
|
|
644
|
+
migrate_func: Optional async migration function.
|
|
645
|
+
raw_mode: If True, save data without metadata wrapper (_key, _version).
|
|
646
|
+
Useful for export files that don't need version tracking.
|
|
647
|
+
formatted: If True, write indented JSON for readability.
|
|
648
|
+
Default is False (compact output).
|
|
649
|
+
as_zip: If True, save data as ZIP archive.
|
|
650
|
+
Default is False (plain JSON file).
|
|
651
|
+
|
|
652
|
+
Returns:
|
|
653
|
+
Storage instance.
|
|
654
|
+
|
|
655
|
+
"""
|
|
656
|
+
# Prefix key with central name (slugified)
|
|
657
|
+
full_key = f"{slugify(self._central_name)}_{key}"
|
|
658
|
+
|
|
659
|
+
return Storage(
|
|
660
|
+
key=full_key,
|
|
661
|
+
base_directory=self._base_directory,
|
|
662
|
+
version=version,
|
|
663
|
+
sub_directory=sub_directory,
|
|
664
|
+
task_scheduler=self._task_scheduler,
|
|
665
|
+
migrate_func=migrate_func,
|
|
666
|
+
raw_mode=raw_mode,
|
|
667
|
+
formatted=formatted,
|
|
668
|
+
as_zip=as_zip,
|
|
669
|
+
)
|
|
670
|
+
|
|
671
|
+
def _cleanup_files_sync(self, *, sub_directory: str | None) -> int:
|
|
672
|
+
"""Delete storage files synchronously."""
|
|
673
|
+
directory = self._base_directory
|
|
674
|
+
if sub_directory:
|
|
675
|
+
directory = os.path.join(self._base_directory, sub_directory)
|
|
676
|
+
|
|
677
|
+
if not os.path.exists(directory):
|
|
678
|
+
return 0
|
|
679
|
+
|
|
680
|
+
# Pattern: {central_name}*.json
|
|
681
|
+
pattern = os.path.join(directory, f"{slugify(self._central_name)}*.json")
|
|
682
|
+
deleted_count = 0
|
|
683
|
+
|
|
684
|
+
for file_path in glob.glob(pattern):
|
|
685
|
+
if os.path.isfile(file_path):
|
|
686
|
+
os.remove(file_path)
|
|
687
|
+
deleted_count += 1
|
|
688
|
+
|
|
689
|
+
return deleted_count
|