aiohomematic 2026.1.29__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aiohomematic/__init__.py +110 -0
- aiohomematic/_log_context_protocol.py +29 -0
- aiohomematic/api.py +410 -0
- aiohomematic/async_support.py +250 -0
- aiohomematic/backend_detection.py +462 -0
- aiohomematic/central/__init__.py +103 -0
- aiohomematic/central/async_rpc_server.py +760 -0
- aiohomematic/central/central_unit.py +1152 -0
- aiohomematic/central/config.py +463 -0
- aiohomematic/central/config_builder.py +772 -0
- aiohomematic/central/connection_state.py +160 -0
- aiohomematic/central/coordinators/__init__.py +38 -0
- aiohomematic/central/coordinators/cache.py +414 -0
- aiohomematic/central/coordinators/client.py +480 -0
- aiohomematic/central/coordinators/connection_recovery.py +1141 -0
- aiohomematic/central/coordinators/device.py +1166 -0
- aiohomematic/central/coordinators/event.py +514 -0
- aiohomematic/central/coordinators/hub.py +532 -0
- aiohomematic/central/decorators.py +184 -0
- aiohomematic/central/device_registry.py +229 -0
- aiohomematic/central/events/__init__.py +104 -0
- aiohomematic/central/events/bus.py +1392 -0
- aiohomematic/central/events/integration.py +424 -0
- aiohomematic/central/events/types.py +194 -0
- aiohomematic/central/health.py +762 -0
- aiohomematic/central/rpc_server.py +353 -0
- aiohomematic/central/scheduler.py +794 -0
- aiohomematic/central/state_machine.py +391 -0
- aiohomematic/client/__init__.py +203 -0
- aiohomematic/client/_rpc_errors.py +187 -0
- aiohomematic/client/backends/__init__.py +48 -0
- aiohomematic/client/backends/base.py +335 -0
- aiohomematic/client/backends/capabilities.py +138 -0
- aiohomematic/client/backends/ccu.py +487 -0
- aiohomematic/client/backends/factory.py +116 -0
- aiohomematic/client/backends/homegear.py +294 -0
- aiohomematic/client/backends/json_ccu.py +252 -0
- aiohomematic/client/backends/protocol.py +316 -0
- aiohomematic/client/ccu.py +1857 -0
- aiohomematic/client/circuit_breaker.py +459 -0
- aiohomematic/client/config.py +64 -0
- aiohomematic/client/handlers/__init__.py +40 -0
- aiohomematic/client/handlers/backup.py +157 -0
- aiohomematic/client/handlers/base.py +79 -0
- aiohomematic/client/handlers/device_ops.py +1085 -0
- aiohomematic/client/handlers/firmware.py +144 -0
- aiohomematic/client/handlers/link_mgmt.py +199 -0
- aiohomematic/client/handlers/metadata.py +436 -0
- aiohomematic/client/handlers/programs.py +144 -0
- aiohomematic/client/handlers/sysvars.py +100 -0
- aiohomematic/client/interface_client.py +1304 -0
- aiohomematic/client/json_rpc.py +2068 -0
- aiohomematic/client/request_coalescer.py +282 -0
- aiohomematic/client/rpc_proxy.py +629 -0
- aiohomematic/client/state_machine.py +324 -0
- aiohomematic/const.py +2207 -0
- aiohomematic/context.py +275 -0
- aiohomematic/converter.py +270 -0
- aiohomematic/decorators.py +390 -0
- aiohomematic/exceptions.py +185 -0
- aiohomematic/hmcli.py +997 -0
- aiohomematic/i18n.py +193 -0
- aiohomematic/interfaces/__init__.py +407 -0
- aiohomematic/interfaces/central.py +1067 -0
- aiohomematic/interfaces/client.py +1096 -0
- aiohomematic/interfaces/coordinators.py +63 -0
- aiohomematic/interfaces/model.py +1921 -0
- aiohomematic/interfaces/operations.py +217 -0
- aiohomematic/logging_context.py +134 -0
- aiohomematic/metrics/__init__.py +125 -0
- aiohomematic/metrics/_protocols.py +140 -0
- aiohomematic/metrics/aggregator.py +534 -0
- aiohomematic/metrics/dataclasses.py +489 -0
- aiohomematic/metrics/emitter.py +292 -0
- aiohomematic/metrics/events.py +183 -0
- aiohomematic/metrics/keys.py +300 -0
- aiohomematic/metrics/observer.py +563 -0
- aiohomematic/metrics/stats.py +172 -0
- aiohomematic/model/__init__.py +189 -0
- aiohomematic/model/availability.py +65 -0
- aiohomematic/model/calculated/__init__.py +89 -0
- aiohomematic/model/calculated/climate.py +276 -0
- aiohomematic/model/calculated/data_point.py +315 -0
- aiohomematic/model/calculated/field.py +147 -0
- aiohomematic/model/calculated/operating_voltage_level.py +286 -0
- aiohomematic/model/calculated/support.py +232 -0
- aiohomematic/model/custom/__init__.py +214 -0
- aiohomematic/model/custom/capabilities/__init__.py +67 -0
- aiohomematic/model/custom/capabilities/climate.py +41 -0
- aiohomematic/model/custom/capabilities/light.py +87 -0
- aiohomematic/model/custom/capabilities/lock.py +44 -0
- aiohomematic/model/custom/capabilities/siren.py +63 -0
- aiohomematic/model/custom/climate.py +1130 -0
- aiohomematic/model/custom/cover.py +722 -0
- aiohomematic/model/custom/data_point.py +360 -0
- aiohomematic/model/custom/definition.py +300 -0
- aiohomematic/model/custom/field.py +89 -0
- aiohomematic/model/custom/light.py +1174 -0
- aiohomematic/model/custom/lock.py +322 -0
- aiohomematic/model/custom/mixins.py +445 -0
- aiohomematic/model/custom/profile.py +945 -0
- aiohomematic/model/custom/registry.py +251 -0
- aiohomematic/model/custom/siren.py +462 -0
- aiohomematic/model/custom/switch.py +195 -0
- aiohomematic/model/custom/text_display.py +289 -0
- aiohomematic/model/custom/valve.py +78 -0
- aiohomematic/model/data_point.py +1416 -0
- aiohomematic/model/device.py +1840 -0
- aiohomematic/model/event.py +216 -0
- aiohomematic/model/generic/__init__.py +327 -0
- aiohomematic/model/generic/action.py +40 -0
- aiohomematic/model/generic/action_select.py +62 -0
- aiohomematic/model/generic/binary_sensor.py +30 -0
- aiohomematic/model/generic/button.py +31 -0
- aiohomematic/model/generic/data_point.py +177 -0
- aiohomematic/model/generic/dummy.py +150 -0
- aiohomematic/model/generic/number.py +76 -0
- aiohomematic/model/generic/select.py +56 -0
- aiohomematic/model/generic/sensor.py +76 -0
- aiohomematic/model/generic/switch.py +54 -0
- aiohomematic/model/generic/text.py +33 -0
- aiohomematic/model/hub/__init__.py +100 -0
- aiohomematic/model/hub/binary_sensor.py +24 -0
- aiohomematic/model/hub/button.py +28 -0
- aiohomematic/model/hub/connectivity.py +190 -0
- aiohomematic/model/hub/data_point.py +342 -0
- aiohomematic/model/hub/hub.py +864 -0
- aiohomematic/model/hub/inbox.py +135 -0
- aiohomematic/model/hub/install_mode.py +393 -0
- aiohomematic/model/hub/metrics.py +208 -0
- aiohomematic/model/hub/number.py +42 -0
- aiohomematic/model/hub/select.py +52 -0
- aiohomematic/model/hub/sensor.py +37 -0
- aiohomematic/model/hub/switch.py +43 -0
- aiohomematic/model/hub/text.py +30 -0
- aiohomematic/model/hub/update.py +221 -0
- aiohomematic/model/support.py +592 -0
- aiohomematic/model/update.py +140 -0
- aiohomematic/model/week_profile.py +1827 -0
- aiohomematic/property_decorators.py +719 -0
- aiohomematic/py.typed +0 -0
- aiohomematic/rega_scripts/accept_device_in_inbox.fn +51 -0
- aiohomematic/rega_scripts/create_backup_start.fn +28 -0
- aiohomematic/rega_scripts/create_backup_status.fn +89 -0
- aiohomematic/rega_scripts/fetch_all_device_data.fn +97 -0
- aiohomematic/rega_scripts/get_backend_info.fn +25 -0
- aiohomematic/rega_scripts/get_inbox_devices.fn +61 -0
- aiohomematic/rega_scripts/get_program_descriptions.fn +31 -0
- aiohomematic/rega_scripts/get_serial.fn +44 -0
- aiohomematic/rega_scripts/get_service_messages.fn +83 -0
- aiohomematic/rega_scripts/get_system_update_info.fn +39 -0
- aiohomematic/rega_scripts/get_system_variable_descriptions.fn +31 -0
- aiohomematic/rega_scripts/set_program_state.fn +17 -0
- aiohomematic/rega_scripts/set_system_variable.fn +19 -0
- aiohomematic/rega_scripts/trigger_firmware_update.fn +67 -0
- aiohomematic/schemas.py +256 -0
- aiohomematic/store/__init__.py +55 -0
- aiohomematic/store/dynamic/__init__.py +43 -0
- aiohomematic/store/dynamic/command.py +250 -0
- aiohomematic/store/dynamic/data.py +175 -0
- aiohomematic/store/dynamic/details.py +187 -0
- aiohomematic/store/dynamic/ping_pong.py +416 -0
- aiohomematic/store/persistent/__init__.py +71 -0
- aiohomematic/store/persistent/base.py +285 -0
- aiohomematic/store/persistent/device.py +233 -0
- aiohomematic/store/persistent/incident.py +380 -0
- aiohomematic/store/persistent/paramset.py +241 -0
- aiohomematic/store/persistent/session.py +556 -0
- aiohomematic/store/serialization.py +150 -0
- aiohomematic/store/storage.py +689 -0
- aiohomematic/store/types.py +526 -0
- aiohomematic/store/visibility/__init__.py +40 -0
- aiohomematic/store/visibility/parser.py +141 -0
- aiohomematic/store/visibility/registry.py +722 -0
- aiohomematic/store/visibility/rules.py +307 -0
- aiohomematic/strings.json +237 -0
- aiohomematic/support.py +706 -0
- aiohomematic/tracing.py +236 -0
- aiohomematic/translations/de.json +237 -0
- aiohomematic/translations/en.json +237 -0
- aiohomematic/type_aliases.py +51 -0
- aiohomematic/validator.py +128 -0
- aiohomematic-2026.1.29.dist-info/METADATA +296 -0
- aiohomematic-2026.1.29.dist-info/RECORD +188 -0
- aiohomematic-2026.1.29.dist-info/WHEEL +5 -0
- aiohomematic-2026.1.29.dist-info/entry_points.txt +2 -0
- aiohomematic-2026.1.29.dist-info/licenses/LICENSE +21 -0
- aiohomematic-2026.1.29.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,380 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MIT
|
|
2
|
+
# Copyright (c) 2021-2026
|
|
3
|
+
"""
|
|
4
|
+
Incident store for persistent diagnostic snapshots.
|
|
5
|
+
|
|
6
|
+
This module provides IncidentStore which persists diagnostic incidents
|
|
7
|
+
for post-mortem analysis. Unlike Journal events which expire after TTL,
|
|
8
|
+
incidents are preserved indefinitely (up to max count/age) and survive restarts.
|
|
9
|
+
|
|
10
|
+
Overview
|
|
11
|
+
--------
|
|
12
|
+
The IncidentStore captures significant events like:
|
|
13
|
+
- PingPong mismatch threshold crossings
|
|
14
|
+
- Connection losses and restorations
|
|
15
|
+
- RPC errors and timeouts
|
|
16
|
+
- Device unavailability
|
|
17
|
+
|
|
18
|
+
Each incident includes:
|
|
19
|
+
- Timestamp and severity
|
|
20
|
+
- Interface context
|
|
21
|
+
- Journal excerpt at time of incident
|
|
22
|
+
- Additional debugging context
|
|
23
|
+
|
|
24
|
+
Persistence Strategy
|
|
25
|
+
--------------------
|
|
26
|
+
- Save-on-incident: Automatically saves after each recorded incident
|
|
27
|
+
- Load-on-demand: Only loads from disk when diagnostics are requested
|
|
28
|
+
- Time-based cleanup: Old incidents are removed on load (default: 7 days)
|
|
29
|
+
- Size-based limit: Maximum number of incidents (default: 50)
|
|
30
|
+
|
|
31
|
+
Public API
|
|
32
|
+
----------
|
|
33
|
+
- IncidentStore: Persistent incident storage with size/time limits
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
from __future__ import annotations
|
|
37
|
+
|
|
38
|
+
from datetime import datetime, timedelta
|
|
39
|
+
import logging
|
|
40
|
+
from typing import TYPE_CHECKING, Any, Final
|
|
41
|
+
import uuid
|
|
42
|
+
|
|
43
|
+
from aiohomematic.const import INCIDENT_STORE_MAX_PER_TYPE
|
|
44
|
+
from aiohomematic.store.persistent.base import BasePersistentCache
|
|
45
|
+
from aiohomematic.store.types import IncidentSeverity, IncidentSnapshot, IncidentType
|
|
46
|
+
|
|
47
|
+
if TYPE_CHECKING:
|
|
48
|
+
from aiohomematic.interfaces import ConfigProviderProtocol
|
|
49
|
+
from aiohomematic.store.storage import StorageProtocol
|
|
50
|
+
from aiohomematic.store.types import PingPongJournal
|
|
51
|
+
|
|
52
|
+
from aiohomematic.interfaces import IncidentRecorderProtocol
|
|
53
|
+
|
|
54
|
+
_LOGGER: Final = logging.getLogger(__name__)
|
|
55
|
+
|
|
56
|
+
# Default retention period for incidents
|
|
57
|
+
DEFAULT_MAX_AGE_DAYS: Final = 7
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class IncidentStore(BasePersistentCache, IncidentRecorderProtocol):
|
|
61
|
+
"""
|
|
62
|
+
Persistent store for diagnostic incidents.
|
|
63
|
+
|
|
64
|
+
Stores incident snapshots that survive application restarts.
|
|
65
|
+
Uses a "save-on-incident, load-on-demand" strategy:
|
|
66
|
+
|
|
67
|
+
- When an incident is recorded, it's automatically persisted
|
|
68
|
+
- Historical incidents are only loaded when diagnostics are requested
|
|
69
|
+
- Old incidents (beyond max_age_days) are cleaned up on load
|
|
70
|
+
|
|
71
|
+
Features:
|
|
72
|
+
- Persistent storage via StorageProtocol
|
|
73
|
+
- Automatic save after each incident (debounced)
|
|
74
|
+
- Lazy loading on first diagnostics request
|
|
75
|
+
- Time-based cleanup (default: 7 days)
|
|
76
|
+
- Per-IncidentType size limiting (default: 20 per type)
|
|
77
|
+
- Journal excerpt capture at incident time
|
|
78
|
+
|
|
79
|
+
Storage Organization:
|
|
80
|
+
Incidents are stored per-IncidentType to ensure each incident type
|
|
81
|
+
maintains its own history without being crowded out by high-frequency
|
|
82
|
+
incident types.
|
|
83
|
+
|
|
84
|
+
"""
|
|
85
|
+
|
|
86
|
+
__slots__ = ("_incidents_by_type", "_loaded", "_max_age_days", "_max_per_type")
|
|
87
|
+
|
|
88
|
+
def __init__(
|
|
89
|
+
self,
|
|
90
|
+
*,
|
|
91
|
+
storage: StorageProtocol,
|
|
92
|
+
config_provider: ConfigProviderProtocol,
|
|
93
|
+
max_per_type: int = INCIDENT_STORE_MAX_PER_TYPE,
|
|
94
|
+
max_age_days: int = DEFAULT_MAX_AGE_DAYS,
|
|
95
|
+
) -> None:
|
|
96
|
+
"""
|
|
97
|
+
Initialize the incident store.
|
|
98
|
+
|
|
99
|
+
Args:
|
|
100
|
+
storage: Storage instance for persistence.
|
|
101
|
+
config_provider: Provider for configuration access.
|
|
102
|
+
max_per_type: Maximum number of incidents per IncidentType (default: 20).
|
|
103
|
+
max_age_days: Maximum age of incidents in days (default: 7).
|
|
104
|
+
|
|
105
|
+
"""
|
|
106
|
+
super().__init__(storage=storage, config_provider=config_provider)
|
|
107
|
+
self._max_per_type: Final = max_per_type
|
|
108
|
+
self._max_age_days: Final = max_age_days
|
|
109
|
+
self._incidents_by_type: dict[IncidentType, list[IncidentSnapshot]] = {}
|
|
110
|
+
self._loaded: bool = False
|
|
111
|
+
|
|
112
|
+
@property
|
|
113
|
+
def incident_count(self) -> int:
|
|
114
|
+
"""Return the total number of stored incidents (in-memory only)."""
|
|
115
|
+
return sum(len(incidents) for incidents in self._incidents_by_type.values())
|
|
116
|
+
|
|
117
|
+
@property
|
|
118
|
+
def incidents(self) -> list[IncidentSnapshot]:
|
|
119
|
+
"""Return all incidents as a flat list sorted by timestamp (in-memory only)."""
|
|
120
|
+
all_incidents: list[IncidentSnapshot] = []
|
|
121
|
+
for incidents in self._incidents_by_type.values():
|
|
122
|
+
all_incidents.extend(incidents)
|
|
123
|
+
all_incidents.sort(key=lambda i: i.timestamp_iso)
|
|
124
|
+
return all_incidents
|
|
125
|
+
|
|
126
|
+
@property
|
|
127
|
+
def is_loaded(self) -> bool:
|
|
128
|
+
"""Return True if historical incidents have been loaded from disk."""
|
|
129
|
+
return self._loaded
|
|
130
|
+
|
|
131
|
+
def clear_incidents(self) -> None:
|
|
132
|
+
"""Clear all incidents from memory (does not affect persistence)."""
|
|
133
|
+
self._incidents_by_type.clear()
|
|
134
|
+
self._content["incidents"] = []
|
|
135
|
+
|
|
136
|
+
async def get_all_incidents(self) -> list[IncidentSnapshot]:
|
|
137
|
+
"""
|
|
138
|
+
Return all incidents including historical ones from disk.
|
|
139
|
+
|
|
140
|
+
Loads from disk on first call.
|
|
141
|
+
"""
|
|
142
|
+
await self._ensure_loaded()
|
|
143
|
+
return self.incidents
|
|
144
|
+
|
|
145
|
+
async def get_diagnostics(self) -> dict[str, Any]:
|
|
146
|
+
"""
|
|
147
|
+
Return diagnostics data for HA Diagnostics.
|
|
148
|
+
|
|
149
|
+
Loads historical incidents from disk on first call.
|
|
150
|
+
"""
|
|
151
|
+
await self._ensure_loaded()
|
|
152
|
+
all_incidents = self.incidents
|
|
153
|
+
return {
|
|
154
|
+
"total_incidents": len(all_incidents),
|
|
155
|
+
"max_per_type": self._max_per_type,
|
|
156
|
+
"max_age_days": self._max_age_days,
|
|
157
|
+
"incidents_by_type": self._count_by_type(),
|
|
158
|
+
"incidents_by_severity": self._count_by_severity(),
|
|
159
|
+
"recent_incidents": [i.to_dict() for i in all_incidents[-10:]],
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
async def get_incidents_by_interface(self, *, interface_id: str) -> list[IncidentSnapshot]:
|
|
163
|
+
"""
|
|
164
|
+
Return incidents for a specific interface.
|
|
165
|
+
|
|
166
|
+
Loads historical incidents from disk on first call.
|
|
167
|
+
"""
|
|
168
|
+
await self._ensure_loaded()
|
|
169
|
+
return [i for i in self.incidents if i.interface_id == interface_id]
|
|
170
|
+
|
|
171
|
+
async def get_incidents_by_type(self, *, incident_type: IncidentType) -> list[IncidentSnapshot]:
|
|
172
|
+
"""
|
|
173
|
+
Return incidents of a specific type.
|
|
174
|
+
|
|
175
|
+
Loads historical incidents from disk on first call.
|
|
176
|
+
"""
|
|
177
|
+
await self._ensure_loaded()
|
|
178
|
+
return list(self._incidents_by_type.get(incident_type, []))
|
|
179
|
+
|
|
180
|
+
async def get_recent_incidents(self, *, limit: int = 20) -> list[dict[str, Any]]:
|
|
181
|
+
"""
|
|
182
|
+
Return recent incidents as list of dicts.
|
|
183
|
+
|
|
184
|
+
Loads historical incidents from disk on first call.
|
|
185
|
+
"""
|
|
186
|
+
await self._ensure_loaded()
|
|
187
|
+
all_incidents = self.incidents
|
|
188
|
+
return [i.to_dict() for i in all_incidents[-limit:]]
|
|
189
|
+
|
|
190
|
+
async def record_incident(
|
|
191
|
+
self,
|
|
192
|
+
*,
|
|
193
|
+
incident_type: IncidentType,
|
|
194
|
+
severity: IncidentSeverity,
|
|
195
|
+
message: str,
|
|
196
|
+
interface_id: str | None = None,
|
|
197
|
+
context: dict[str, Any] | None = None,
|
|
198
|
+
journal: PingPongJournal | None = None,
|
|
199
|
+
) -> IncidentSnapshot:
|
|
200
|
+
"""
|
|
201
|
+
Record a new incident and persist it.
|
|
202
|
+
|
|
203
|
+
The incident is saved to disk automatically (debounced).
|
|
204
|
+
Does NOT load historical incidents - only adds to current session.
|
|
205
|
+
|
|
206
|
+
Args:
|
|
207
|
+
incident_type: Type of incident.
|
|
208
|
+
severity: Severity level.
|
|
209
|
+
message: Human-readable description.
|
|
210
|
+
interface_id: Interface where incident occurred (optional).
|
|
211
|
+
context: Additional debugging context (optional).
|
|
212
|
+
journal: Journal to extract excerpt from (optional).
|
|
213
|
+
|
|
214
|
+
Returns:
|
|
215
|
+
The created IncidentSnapshot.
|
|
216
|
+
|
|
217
|
+
"""
|
|
218
|
+
# Generate unique incident ID
|
|
219
|
+
incident_id = f"{incident_type.value}_{datetime.now().strftime('%Y%m%d_%H%M%S')}_{uuid.uuid4().hex[:8]}"
|
|
220
|
+
|
|
221
|
+
# Extract journal excerpt if available
|
|
222
|
+
journal_excerpt: list[dict[str, Any]] = []
|
|
223
|
+
if journal is not None:
|
|
224
|
+
journal_excerpt = journal.get_recent_events(limit=20)
|
|
225
|
+
|
|
226
|
+
incident = IncidentSnapshot(
|
|
227
|
+
incident_id=incident_id,
|
|
228
|
+
timestamp_iso=datetime.now().isoformat(timespec="milliseconds"),
|
|
229
|
+
incident_type=incident_type,
|
|
230
|
+
severity=severity,
|
|
231
|
+
interface_id=interface_id,
|
|
232
|
+
message=message,
|
|
233
|
+
context=context or {},
|
|
234
|
+
journal_excerpt=journal_excerpt,
|
|
235
|
+
)
|
|
236
|
+
|
|
237
|
+
# Get or create the list for this incident type
|
|
238
|
+
if incident_type not in self._incidents_by_type:
|
|
239
|
+
self._incidents_by_type[incident_type] = []
|
|
240
|
+
type_incidents = self._incidents_by_type[incident_type]
|
|
241
|
+
|
|
242
|
+
type_incidents.append(incident)
|
|
243
|
+
|
|
244
|
+
# Enforce per-type size limit
|
|
245
|
+
while len(type_incidents) > self._max_per_type:
|
|
246
|
+
evicted = type_incidents.pop(0)
|
|
247
|
+
_LOGGER.debug(
|
|
248
|
+
"INCIDENT STORE: Evicted oldest %s incident %s to maintain per-type limit %d",
|
|
249
|
+
incident_type.value,
|
|
250
|
+
evicted.incident_id,
|
|
251
|
+
self._max_per_type,
|
|
252
|
+
)
|
|
253
|
+
|
|
254
|
+
# Update content for persistence (flat list for storage compatibility)
|
|
255
|
+
self._content["incidents"] = [i.to_dict() for i in self.incidents]
|
|
256
|
+
|
|
257
|
+
# Always log at DEBUG level - the incident store's purpose is to RECORD
|
|
258
|
+
# incidents for later diagnosis, not to actively log them. Active logging
|
|
259
|
+
# (ERROR/WARNING level) is already handled at the source (e.g., log_boundary_error
|
|
260
|
+
# in rpc_proxy.py, @inspector decorator, etc.)
|
|
261
|
+
_LOGGER.debug( # i18n-log: ignore
|
|
262
|
+
"INCIDENT STORE: Recorded %s incident: %s (interface: %s)",
|
|
263
|
+
severity.value.upper(),
|
|
264
|
+
message,
|
|
265
|
+
interface_id or "N/A",
|
|
266
|
+
)
|
|
267
|
+
|
|
268
|
+
# Auto-save with debouncing (2 second delay to batch rapid incidents)
|
|
269
|
+
await self.save_delayed(delay=2.0)
|
|
270
|
+
|
|
271
|
+
return incident
|
|
272
|
+
|
|
273
|
+
def _count_by_severity(self) -> dict[str, int]:
|
|
274
|
+
"""Count incidents by severity."""
|
|
275
|
+
counts: dict[str, int] = {}
|
|
276
|
+
for incidents in self._incidents_by_type.values():
|
|
277
|
+
for incident in incidents:
|
|
278
|
+
key = incident.severity.value
|
|
279
|
+
counts[key] = counts.get(key, 0) + 1
|
|
280
|
+
return counts
|
|
281
|
+
|
|
282
|
+
def _count_by_type(self) -> dict[str, int]:
|
|
283
|
+
"""Count incidents by type."""
|
|
284
|
+
return {incident_type.value: len(incidents) for incident_type, incidents in self._incidents_by_type.items()}
|
|
285
|
+
|
|
286
|
+
def _create_empty_content(self) -> dict[str, Any]:
|
|
287
|
+
"""Create empty content structure."""
|
|
288
|
+
return {"incidents": []}
|
|
289
|
+
|
|
290
|
+
async def _ensure_loaded(self) -> None:
|
|
291
|
+
"""Load historical incidents from disk if not already loaded."""
|
|
292
|
+
if self._loaded:
|
|
293
|
+
return
|
|
294
|
+
|
|
295
|
+
# Remember current in-memory incidents (from this session)
|
|
296
|
+
current_session_by_type: dict[IncidentType, list[IncidentSnapshot]] = {}
|
|
297
|
+
for incident_type, incidents in self._incidents_by_type.items():
|
|
298
|
+
current_session_by_type[incident_type] = list(incidents)
|
|
299
|
+
|
|
300
|
+
# Load from disk
|
|
301
|
+
await self.load()
|
|
302
|
+
|
|
303
|
+
# Merge: disk incidents first, then current session incidents
|
|
304
|
+
# (avoiding duplicates by incident_id)
|
|
305
|
+
for incident_type, session_incidents in current_session_by_type.items():
|
|
306
|
+
if incident_type not in self._incidents_by_type:
|
|
307
|
+
self._incidents_by_type[incident_type] = []
|
|
308
|
+
type_incidents = self._incidents_by_type[incident_type]
|
|
309
|
+
existing_ids = {i.incident_id for i in type_incidents}
|
|
310
|
+
for incident in session_incidents:
|
|
311
|
+
if incident.incident_id not in existing_ids:
|
|
312
|
+
type_incidents.append(incident)
|
|
313
|
+
|
|
314
|
+
# Re-sort by timestamp and enforce per-type limits
|
|
315
|
+
for incidents in self._incidents_by_type.values():
|
|
316
|
+
incidents.sort(key=lambda i: i.timestamp_iso)
|
|
317
|
+
while len(incidents) > self._max_per_type:
|
|
318
|
+
incidents.pop(0)
|
|
319
|
+
|
|
320
|
+
self._loaded = True
|
|
321
|
+
|
|
322
|
+
def _process_loaded_content(self, *, data: dict[str, Any]) -> None:
|
|
323
|
+
"""
|
|
324
|
+
Rebuild incidents by type from loaded data.
|
|
325
|
+
|
|
326
|
+
Applies time-based cleanup: incidents older than max_age_days are removed.
|
|
327
|
+
Enforces per-type size limits.
|
|
328
|
+
"""
|
|
329
|
+
self._incidents_by_type.clear()
|
|
330
|
+
incidents_data = data.get("incidents", [])
|
|
331
|
+
|
|
332
|
+
# Calculate cutoff time for age-based cleanup
|
|
333
|
+
cutoff_time = datetime.now() - timedelta(days=self._max_age_days)
|
|
334
|
+
|
|
335
|
+
loaded_count = 0
|
|
336
|
+
expired_count = 0
|
|
337
|
+
|
|
338
|
+
for incident_dict in incidents_data:
|
|
339
|
+
try:
|
|
340
|
+
incident = IncidentSnapshot.from_dict(data=incident_dict)
|
|
341
|
+
|
|
342
|
+
# Check age - skip old incidents
|
|
343
|
+
try:
|
|
344
|
+
if (datetime.fromisoformat(incident.timestamp_iso)) < cutoff_time:
|
|
345
|
+
expired_count += 1
|
|
346
|
+
continue
|
|
347
|
+
except ValueError:
|
|
348
|
+
pass # Keep incidents with unparsable timestamps
|
|
349
|
+
|
|
350
|
+
# Add to type-specific list
|
|
351
|
+
if (incident_type := incident.incident_type) not in self._incidents_by_type:
|
|
352
|
+
self._incidents_by_type[incident_type] = []
|
|
353
|
+
self._incidents_by_type[incident_type].append(incident)
|
|
354
|
+
loaded_count += 1
|
|
355
|
+
except (KeyError, ValueError) as err:
|
|
356
|
+
_LOGGER.warning( # i18n-log: ignore
|
|
357
|
+
"INCIDENT STORE: Failed to restore incident: %s",
|
|
358
|
+
err,
|
|
359
|
+
)
|
|
360
|
+
|
|
361
|
+
# Enforce per-type size limits after loading
|
|
362
|
+
for incidents in self._incidents_by_type.values():
|
|
363
|
+
# Sort by timestamp first
|
|
364
|
+
incidents.sort(key=lambda i: i.timestamp_iso)
|
|
365
|
+
# Remove oldest if over limit
|
|
366
|
+
while len(incidents) > self._max_per_type:
|
|
367
|
+
incidents.pop(0)
|
|
368
|
+
|
|
369
|
+
if expired_count > 0:
|
|
370
|
+
_LOGGER.debug(
|
|
371
|
+
"INCIDENT STORE: Removed %d incidents older than %d days",
|
|
372
|
+
expired_count,
|
|
373
|
+
self._max_age_days,
|
|
374
|
+
)
|
|
375
|
+
|
|
376
|
+
if loaded_count > 0:
|
|
377
|
+
_LOGGER.debug(
|
|
378
|
+
"INCIDENT STORE: Loaded %d incidents from storage",
|
|
379
|
+
loaded_count,
|
|
380
|
+
)
|
|
@@ -0,0 +1,241 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MIT
|
|
2
|
+
# Copyright (c) 2021-2026
|
|
3
|
+
"""
|
|
4
|
+
Paramset description registry for persisting parameter metadata.
|
|
5
|
+
|
|
6
|
+
This module provides ParamsetDescriptionRegistry which persists paramset descriptions
|
|
7
|
+
per interface and channel, and offers helpers to query parameters, paramset keys
|
|
8
|
+
and related channel addresses.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from __future__ import annotations
|
|
12
|
+
|
|
13
|
+
from collections import defaultdict
|
|
14
|
+
from collections.abc import Mapping
|
|
15
|
+
import logging
|
|
16
|
+
from typing import TYPE_CHECKING, Any, Final
|
|
17
|
+
|
|
18
|
+
from aiohomematic.const import ADDRESS_SEPARATOR, ParameterData, ParamsetKey
|
|
19
|
+
from aiohomematic.interfaces import ParamsetDescriptionProviderProtocol, ParamsetDescriptionWriterProtocol
|
|
20
|
+
from aiohomematic.interfaces.model import DeviceRemovalInfoProtocol
|
|
21
|
+
from aiohomematic.property_decorators import DelegatedProperty
|
|
22
|
+
from aiohomematic.schemas import normalize_paramset_description
|
|
23
|
+
from aiohomematic.store.persistent.base import BasePersistentCache
|
|
24
|
+
from aiohomematic.store.types import InterfaceParamsetMap
|
|
25
|
+
from aiohomematic.support import get_split_channel_address
|
|
26
|
+
|
|
27
|
+
if TYPE_CHECKING:
|
|
28
|
+
from aiohomematic.interfaces import ConfigProviderProtocol
|
|
29
|
+
from aiohomematic.store.storage import StorageProtocol
|
|
30
|
+
|
|
31
|
+
_LOGGER: Final = logging.getLogger(__name__)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class ParamsetDescriptionRegistry(
|
|
35
|
+
BasePersistentCache, ParamsetDescriptionProviderProtocol, ParamsetDescriptionWriterProtocol
|
|
36
|
+
):
|
|
37
|
+
"""Registry for paramset descriptions."""
|
|
38
|
+
|
|
39
|
+
# Bump version when normalization logic changes
|
|
40
|
+
SCHEMA_VERSION: int = 2
|
|
41
|
+
|
|
42
|
+
__slots__ = ("_address_parameter_cache",)
|
|
43
|
+
|
|
44
|
+
def __init__(
|
|
45
|
+
self,
|
|
46
|
+
*,
|
|
47
|
+
storage: StorageProtocol,
|
|
48
|
+
config_provider: ConfigProviderProtocol,
|
|
49
|
+
) -> None:
|
|
50
|
+
"""
|
|
51
|
+
Initialize the paramset description cache.
|
|
52
|
+
|
|
53
|
+
Args:
|
|
54
|
+
storage: Storage instance for persistence.
|
|
55
|
+
config_provider: Provider for configuration access.
|
|
56
|
+
|
|
57
|
+
"""
|
|
58
|
+
# {(device_address, parameter), [channel_no]}
|
|
59
|
+
self._address_parameter_cache: Final[dict[tuple[str, str], set[int | None]]] = {}
|
|
60
|
+
super().__init__(
|
|
61
|
+
storage=storage,
|
|
62
|
+
config_provider=config_provider,
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
@staticmethod
|
|
66
|
+
def _normalize_param_data_v1(*, param_data: dict[str, Any]) -> None:
|
|
67
|
+
"""Normalize parameter data from v1 schema (ensure OPERATIONS and FLAGS are integers)."""
|
|
68
|
+
if "OPERATIONS" in param_data:
|
|
69
|
+
try:
|
|
70
|
+
param_data["OPERATIONS"] = int(param_data["OPERATIONS"] or 0)
|
|
71
|
+
except (ValueError, TypeError):
|
|
72
|
+
param_data["OPERATIONS"] = 0
|
|
73
|
+
if "FLAGS" in param_data:
|
|
74
|
+
try:
|
|
75
|
+
param_data["FLAGS"] = int(param_data["FLAGS"] or 0)
|
|
76
|
+
except (ValueError, TypeError):
|
|
77
|
+
param_data["FLAGS"] = 0
|
|
78
|
+
|
|
79
|
+
raw_paramset_descriptions: Final = DelegatedProperty[
|
|
80
|
+
Mapping[str, Mapping[str, Mapping[ParamsetKey, Mapping[str, ParameterData]]]]
|
|
81
|
+
](path="_raw_paramset_descriptions")
|
|
82
|
+
|
|
83
|
+
@property
|
|
84
|
+
def _raw_paramset_descriptions(self) -> InterfaceParamsetMap:
|
|
85
|
+
"""Return the raw paramset descriptions (alias to _content)."""
|
|
86
|
+
return self._content
|
|
87
|
+
|
|
88
|
+
@property
|
|
89
|
+
def size(self) -> int:
|
|
90
|
+
"""Return total number of paramset descriptions in cache."""
|
|
91
|
+
return sum(
|
|
92
|
+
len(channel_paramsets)
|
|
93
|
+
for interface_paramsets in self._raw_paramset_descriptions.values()
|
|
94
|
+
for channel_paramsets in interface_paramsets.values()
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
def add(
|
|
98
|
+
self,
|
|
99
|
+
*,
|
|
100
|
+
interface_id: str,
|
|
101
|
+
channel_address: str,
|
|
102
|
+
paramset_key: ParamsetKey,
|
|
103
|
+
paramset_description: dict[str, ParameterData],
|
|
104
|
+
) -> None:
|
|
105
|
+
"""Add paramset description to cache (normalized)."""
|
|
106
|
+
# Normalize at ingestion
|
|
107
|
+
normalized = normalize_paramset_description(paramset=paramset_description)
|
|
108
|
+
self._raw_paramset_descriptions[interface_id][channel_address][paramset_key] = normalized
|
|
109
|
+
self._add_address_parameter(channel_address=channel_address, paramsets=[normalized])
|
|
110
|
+
|
|
111
|
+
def get_channel_addresses_by_paramset_key(
|
|
112
|
+
self, *, interface_id: str, device_address: str
|
|
113
|
+
) -> Mapping[ParamsetKey, list[str]]:
|
|
114
|
+
"""Get device channel addresses."""
|
|
115
|
+
channel_addresses: dict[ParamsetKey, list[str]] = {}
|
|
116
|
+
interface_paramset_descriptions = self._raw_paramset_descriptions[interface_id]
|
|
117
|
+
for (
|
|
118
|
+
channel_address,
|
|
119
|
+
paramset_descriptions,
|
|
120
|
+
) in interface_paramset_descriptions.items():
|
|
121
|
+
if channel_address.startswith(device_address):
|
|
122
|
+
for p_key in paramset_descriptions:
|
|
123
|
+
if (paramset_key := ParamsetKey(p_key)) not in channel_addresses:
|
|
124
|
+
channel_addresses[paramset_key] = []
|
|
125
|
+
channel_addresses[paramset_key].append(channel_address)
|
|
126
|
+
|
|
127
|
+
return channel_addresses
|
|
128
|
+
|
|
129
|
+
def get_channel_paramset_descriptions(
|
|
130
|
+
self, *, interface_id: str, channel_address: str
|
|
131
|
+
) -> Mapping[ParamsetKey, Mapping[str, ParameterData]]:
|
|
132
|
+
"""Get paramset descriptions for a channel from cache."""
|
|
133
|
+
return self._raw_paramset_descriptions[interface_id].get(channel_address, {})
|
|
134
|
+
|
|
135
|
+
def get_parameter_data(
|
|
136
|
+
self, *, interface_id: str, channel_address: str, paramset_key: ParamsetKey, parameter: str
|
|
137
|
+
) -> ParameterData | None:
|
|
138
|
+
"""Get parameter_data from cache."""
|
|
139
|
+
return self._raw_paramset_descriptions[interface_id][channel_address][paramset_key].get(parameter)
|
|
140
|
+
|
|
141
|
+
def get_paramset_descriptions(
|
|
142
|
+
self, *, interface_id: str, channel_address: str, paramset_key: ParamsetKey
|
|
143
|
+
) -> Mapping[str, ParameterData]:
|
|
144
|
+
"""Get paramset descriptions from cache."""
|
|
145
|
+
return self._raw_paramset_descriptions[interface_id][channel_address][paramset_key]
|
|
146
|
+
|
|
147
|
+
def get_paramset_keys(self, *, interface_id: str, channel_address: str) -> tuple[ParamsetKey, ...]:
|
|
148
|
+
"""Get paramset_keys from paramset descriptions cache."""
|
|
149
|
+
return tuple(self._raw_paramset_descriptions[interface_id][channel_address])
|
|
150
|
+
|
|
151
|
+
def has_interface_id(self, *, interface_id: str) -> bool:
|
|
152
|
+
"""Return if interface is in paramset_descriptions cache."""
|
|
153
|
+
return interface_id in self._raw_paramset_descriptions
|
|
154
|
+
|
|
155
|
+
def has_parameter(
|
|
156
|
+
self, *, interface_id: str, channel_address: str, paramset_key: ParamsetKey, parameter: str
|
|
157
|
+
) -> bool:
|
|
158
|
+
"""Check if a parameter exists in the paramset description."""
|
|
159
|
+
try:
|
|
160
|
+
return parameter in self._raw_paramset_descriptions[interface_id][channel_address][paramset_key]
|
|
161
|
+
except KeyError:
|
|
162
|
+
return False
|
|
163
|
+
|
|
164
|
+
def is_in_multiple_channels(self, *, channel_address: str, parameter: str) -> bool:
|
|
165
|
+
"""Check if parameter is in multiple channels per device."""
|
|
166
|
+
if ADDRESS_SEPARATOR not in channel_address:
|
|
167
|
+
return False
|
|
168
|
+
if channels := self._address_parameter_cache.get(
|
|
169
|
+
(get_split_channel_address(channel_address=channel_address)[0], parameter)
|
|
170
|
+
):
|
|
171
|
+
return len(channels) > 1
|
|
172
|
+
return False
|
|
173
|
+
|
|
174
|
+
def remove_device(self, *, device: DeviceRemovalInfoProtocol) -> None:
|
|
175
|
+
"""Remove device paramset descriptions from cache."""
|
|
176
|
+
if interface := self._raw_paramset_descriptions.get(device.interface_id):
|
|
177
|
+
for channel_address in device.channels:
|
|
178
|
+
if channel_address in interface:
|
|
179
|
+
del self._raw_paramset_descriptions[device.interface_id][channel_address]
|
|
180
|
+
|
|
181
|
+
def _add_address_parameter(self, *, channel_address: str, paramsets: list[dict[str, Any]]) -> None:
|
|
182
|
+
"""Add address parameter to cache."""
|
|
183
|
+
device_address, channel_no = get_split_channel_address(channel_address=channel_address)
|
|
184
|
+
cache = self._address_parameter_cache
|
|
185
|
+
for paramset in paramsets:
|
|
186
|
+
if not paramset:
|
|
187
|
+
continue
|
|
188
|
+
for parameter in paramset:
|
|
189
|
+
cache.setdefault((device_address, parameter), set()).add(channel_no)
|
|
190
|
+
|
|
191
|
+
def _create_empty_content(self) -> dict[str, Any]:
|
|
192
|
+
"""Create empty content structure."""
|
|
193
|
+
return defaultdict(lambda: defaultdict(lambda: defaultdict(dict)))
|
|
194
|
+
|
|
195
|
+
def _init_address_parameter_list(self) -> None:
|
|
196
|
+
"""
|
|
197
|
+
Initialize a device_address/parameter list.
|
|
198
|
+
|
|
199
|
+
Used to identify, if a parameter name exists is in multiple channels.
|
|
200
|
+
"""
|
|
201
|
+
for channel_paramsets in self._raw_paramset_descriptions.values():
|
|
202
|
+
for channel_address, paramsets in channel_paramsets.items():
|
|
203
|
+
self._add_address_parameter(channel_address=channel_address, paramsets=list(paramsets.values()))
|
|
204
|
+
|
|
205
|
+
def _migrate_schema(self, *, data: dict[str, Any], from_version: int) -> dict[str, Any]:
|
|
206
|
+
"""Migrate paramset descriptions from older schema."""
|
|
207
|
+
if from_version < 2:
|
|
208
|
+
# Migration from v1: normalize all parameter data
|
|
209
|
+
self._migrate_v1_to_v2(data=data)
|
|
210
|
+
return data
|
|
211
|
+
|
|
212
|
+
def _migrate_v1_to_v2(self, *, data: dict[str, Any]) -> None:
|
|
213
|
+
"""Migrate paramset descriptions from v1 to v2 schema."""
|
|
214
|
+
for interface_id, channels in data.items():
|
|
215
|
+
if interface_id.startswith("_"):
|
|
216
|
+
continue
|
|
217
|
+
for paramsets in channels.values():
|
|
218
|
+
for params in paramsets.values():
|
|
219
|
+
for param_data in params.values():
|
|
220
|
+
self._normalize_param_data_v1(param_data=param_data)
|
|
221
|
+
|
|
222
|
+
def _process_loaded_content(self, *, data: dict[str, Any]) -> None:
|
|
223
|
+
"""Rebuild indexes from loaded data."""
|
|
224
|
+
# Convert loaded regular dicts back to nested defaultdicts.
|
|
225
|
+
# After JSON deserialization, _content is updated with regular dicts,
|
|
226
|
+
# which breaks the defaultdict behavior for new keys.
|
|
227
|
+
# We need to rebuild the proper defaultdict structure.
|
|
228
|
+
self._content.clear()
|
|
229
|
+
self._content.update(self._create_empty_content())
|
|
230
|
+
for interface_id, channels in data.items():
|
|
231
|
+
if interface_id.startswith("_"): # Skip metadata keys
|
|
232
|
+
continue
|
|
233
|
+
for channel_address, paramsets in channels.items():
|
|
234
|
+
for paramset_key_str, paramset_desc in paramsets.items():
|
|
235
|
+
paramset_key = ParamsetKey(paramset_key_str)
|
|
236
|
+
# Normalize each paramset description when loading
|
|
237
|
+
normalized = normalize_paramset_description(paramset=paramset_desc)
|
|
238
|
+
self._content[interface_id][channel_address][paramset_key] = normalized
|
|
239
|
+
|
|
240
|
+
self._address_parameter_cache.clear()
|
|
241
|
+
self._init_address_parameter_list()
|