aiohomematic 2026.1.29__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aiohomematic/__init__.py +110 -0
- aiohomematic/_log_context_protocol.py +29 -0
- aiohomematic/api.py +410 -0
- aiohomematic/async_support.py +250 -0
- aiohomematic/backend_detection.py +462 -0
- aiohomematic/central/__init__.py +103 -0
- aiohomematic/central/async_rpc_server.py +760 -0
- aiohomematic/central/central_unit.py +1152 -0
- aiohomematic/central/config.py +463 -0
- aiohomematic/central/config_builder.py +772 -0
- aiohomematic/central/connection_state.py +160 -0
- aiohomematic/central/coordinators/__init__.py +38 -0
- aiohomematic/central/coordinators/cache.py +414 -0
- aiohomematic/central/coordinators/client.py +480 -0
- aiohomematic/central/coordinators/connection_recovery.py +1141 -0
- aiohomematic/central/coordinators/device.py +1166 -0
- aiohomematic/central/coordinators/event.py +514 -0
- aiohomematic/central/coordinators/hub.py +532 -0
- aiohomematic/central/decorators.py +184 -0
- aiohomematic/central/device_registry.py +229 -0
- aiohomematic/central/events/__init__.py +104 -0
- aiohomematic/central/events/bus.py +1392 -0
- aiohomematic/central/events/integration.py +424 -0
- aiohomematic/central/events/types.py +194 -0
- aiohomematic/central/health.py +762 -0
- aiohomematic/central/rpc_server.py +353 -0
- aiohomematic/central/scheduler.py +794 -0
- aiohomematic/central/state_machine.py +391 -0
- aiohomematic/client/__init__.py +203 -0
- aiohomematic/client/_rpc_errors.py +187 -0
- aiohomematic/client/backends/__init__.py +48 -0
- aiohomematic/client/backends/base.py +335 -0
- aiohomematic/client/backends/capabilities.py +138 -0
- aiohomematic/client/backends/ccu.py +487 -0
- aiohomematic/client/backends/factory.py +116 -0
- aiohomematic/client/backends/homegear.py +294 -0
- aiohomematic/client/backends/json_ccu.py +252 -0
- aiohomematic/client/backends/protocol.py +316 -0
- aiohomematic/client/ccu.py +1857 -0
- aiohomematic/client/circuit_breaker.py +459 -0
- aiohomematic/client/config.py +64 -0
- aiohomematic/client/handlers/__init__.py +40 -0
- aiohomematic/client/handlers/backup.py +157 -0
- aiohomematic/client/handlers/base.py +79 -0
- aiohomematic/client/handlers/device_ops.py +1085 -0
- aiohomematic/client/handlers/firmware.py +144 -0
- aiohomematic/client/handlers/link_mgmt.py +199 -0
- aiohomematic/client/handlers/metadata.py +436 -0
- aiohomematic/client/handlers/programs.py +144 -0
- aiohomematic/client/handlers/sysvars.py +100 -0
- aiohomematic/client/interface_client.py +1304 -0
- aiohomematic/client/json_rpc.py +2068 -0
- aiohomematic/client/request_coalescer.py +282 -0
- aiohomematic/client/rpc_proxy.py +629 -0
- aiohomematic/client/state_machine.py +324 -0
- aiohomematic/const.py +2207 -0
- aiohomematic/context.py +275 -0
- aiohomematic/converter.py +270 -0
- aiohomematic/decorators.py +390 -0
- aiohomematic/exceptions.py +185 -0
- aiohomematic/hmcli.py +997 -0
- aiohomematic/i18n.py +193 -0
- aiohomematic/interfaces/__init__.py +407 -0
- aiohomematic/interfaces/central.py +1067 -0
- aiohomematic/interfaces/client.py +1096 -0
- aiohomematic/interfaces/coordinators.py +63 -0
- aiohomematic/interfaces/model.py +1921 -0
- aiohomematic/interfaces/operations.py +217 -0
- aiohomematic/logging_context.py +134 -0
- aiohomematic/metrics/__init__.py +125 -0
- aiohomematic/metrics/_protocols.py +140 -0
- aiohomematic/metrics/aggregator.py +534 -0
- aiohomematic/metrics/dataclasses.py +489 -0
- aiohomematic/metrics/emitter.py +292 -0
- aiohomematic/metrics/events.py +183 -0
- aiohomematic/metrics/keys.py +300 -0
- aiohomematic/metrics/observer.py +563 -0
- aiohomematic/metrics/stats.py +172 -0
- aiohomematic/model/__init__.py +189 -0
- aiohomematic/model/availability.py +65 -0
- aiohomematic/model/calculated/__init__.py +89 -0
- aiohomematic/model/calculated/climate.py +276 -0
- aiohomematic/model/calculated/data_point.py +315 -0
- aiohomematic/model/calculated/field.py +147 -0
- aiohomematic/model/calculated/operating_voltage_level.py +286 -0
- aiohomematic/model/calculated/support.py +232 -0
- aiohomematic/model/custom/__init__.py +214 -0
- aiohomematic/model/custom/capabilities/__init__.py +67 -0
- aiohomematic/model/custom/capabilities/climate.py +41 -0
- aiohomematic/model/custom/capabilities/light.py +87 -0
- aiohomematic/model/custom/capabilities/lock.py +44 -0
- aiohomematic/model/custom/capabilities/siren.py +63 -0
- aiohomematic/model/custom/climate.py +1130 -0
- aiohomematic/model/custom/cover.py +722 -0
- aiohomematic/model/custom/data_point.py +360 -0
- aiohomematic/model/custom/definition.py +300 -0
- aiohomematic/model/custom/field.py +89 -0
- aiohomematic/model/custom/light.py +1174 -0
- aiohomematic/model/custom/lock.py +322 -0
- aiohomematic/model/custom/mixins.py +445 -0
- aiohomematic/model/custom/profile.py +945 -0
- aiohomematic/model/custom/registry.py +251 -0
- aiohomematic/model/custom/siren.py +462 -0
- aiohomematic/model/custom/switch.py +195 -0
- aiohomematic/model/custom/text_display.py +289 -0
- aiohomematic/model/custom/valve.py +78 -0
- aiohomematic/model/data_point.py +1416 -0
- aiohomematic/model/device.py +1840 -0
- aiohomematic/model/event.py +216 -0
- aiohomematic/model/generic/__init__.py +327 -0
- aiohomematic/model/generic/action.py +40 -0
- aiohomematic/model/generic/action_select.py +62 -0
- aiohomematic/model/generic/binary_sensor.py +30 -0
- aiohomematic/model/generic/button.py +31 -0
- aiohomematic/model/generic/data_point.py +177 -0
- aiohomematic/model/generic/dummy.py +150 -0
- aiohomematic/model/generic/number.py +76 -0
- aiohomematic/model/generic/select.py +56 -0
- aiohomematic/model/generic/sensor.py +76 -0
- aiohomematic/model/generic/switch.py +54 -0
- aiohomematic/model/generic/text.py +33 -0
- aiohomematic/model/hub/__init__.py +100 -0
- aiohomematic/model/hub/binary_sensor.py +24 -0
- aiohomematic/model/hub/button.py +28 -0
- aiohomematic/model/hub/connectivity.py +190 -0
- aiohomematic/model/hub/data_point.py +342 -0
- aiohomematic/model/hub/hub.py +864 -0
- aiohomematic/model/hub/inbox.py +135 -0
- aiohomematic/model/hub/install_mode.py +393 -0
- aiohomematic/model/hub/metrics.py +208 -0
- aiohomematic/model/hub/number.py +42 -0
- aiohomematic/model/hub/select.py +52 -0
- aiohomematic/model/hub/sensor.py +37 -0
- aiohomematic/model/hub/switch.py +43 -0
- aiohomematic/model/hub/text.py +30 -0
- aiohomematic/model/hub/update.py +221 -0
- aiohomematic/model/support.py +592 -0
- aiohomematic/model/update.py +140 -0
- aiohomematic/model/week_profile.py +1827 -0
- aiohomematic/property_decorators.py +719 -0
- aiohomematic/py.typed +0 -0
- aiohomematic/rega_scripts/accept_device_in_inbox.fn +51 -0
- aiohomematic/rega_scripts/create_backup_start.fn +28 -0
- aiohomematic/rega_scripts/create_backup_status.fn +89 -0
- aiohomematic/rega_scripts/fetch_all_device_data.fn +97 -0
- aiohomematic/rega_scripts/get_backend_info.fn +25 -0
- aiohomematic/rega_scripts/get_inbox_devices.fn +61 -0
- aiohomematic/rega_scripts/get_program_descriptions.fn +31 -0
- aiohomematic/rega_scripts/get_serial.fn +44 -0
- aiohomematic/rega_scripts/get_service_messages.fn +83 -0
- aiohomematic/rega_scripts/get_system_update_info.fn +39 -0
- aiohomematic/rega_scripts/get_system_variable_descriptions.fn +31 -0
- aiohomematic/rega_scripts/set_program_state.fn +17 -0
- aiohomematic/rega_scripts/set_system_variable.fn +19 -0
- aiohomematic/rega_scripts/trigger_firmware_update.fn +67 -0
- aiohomematic/schemas.py +256 -0
- aiohomematic/store/__init__.py +55 -0
- aiohomematic/store/dynamic/__init__.py +43 -0
- aiohomematic/store/dynamic/command.py +250 -0
- aiohomematic/store/dynamic/data.py +175 -0
- aiohomematic/store/dynamic/details.py +187 -0
- aiohomematic/store/dynamic/ping_pong.py +416 -0
- aiohomematic/store/persistent/__init__.py +71 -0
- aiohomematic/store/persistent/base.py +285 -0
- aiohomematic/store/persistent/device.py +233 -0
- aiohomematic/store/persistent/incident.py +380 -0
- aiohomematic/store/persistent/paramset.py +241 -0
- aiohomematic/store/persistent/session.py +556 -0
- aiohomematic/store/serialization.py +150 -0
- aiohomematic/store/storage.py +689 -0
- aiohomematic/store/types.py +526 -0
- aiohomematic/store/visibility/__init__.py +40 -0
- aiohomematic/store/visibility/parser.py +141 -0
- aiohomematic/store/visibility/registry.py +722 -0
- aiohomematic/store/visibility/rules.py +307 -0
- aiohomematic/strings.json +237 -0
- aiohomematic/support.py +706 -0
- aiohomematic/tracing.py +236 -0
- aiohomematic/translations/de.json +237 -0
- aiohomematic/translations/en.json +237 -0
- aiohomematic/type_aliases.py +51 -0
- aiohomematic/validator.py +128 -0
- aiohomematic-2026.1.29.dist-info/METADATA +296 -0
- aiohomematic-2026.1.29.dist-info/RECORD +188 -0
- aiohomematic-2026.1.29.dist-info/WHEEL +5 -0
- aiohomematic-2026.1.29.dist-info/entry_points.txt +2 -0
- aiohomematic-2026.1.29.dist-info/licenses/LICENSE +21 -0
- aiohomematic-2026.1.29.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,1416 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MIT
|
|
2
|
+
# Copyright (c) 2021-2026
|
|
3
|
+
"""
|
|
4
|
+
Core data point model for AioHomematic.
|
|
5
|
+
|
|
6
|
+
This module defines the abstract base classes and concrete building blocks for
|
|
7
|
+
representing Homematic parameters as data points, handling their lifecycle,
|
|
8
|
+
I/O, and event propagation.
|
|
9
|
+
|
|
10
|
+
Highlights:
|
|
11
|
+
- CallbackDataPoint: Base for objects that expose subscriptions and timestamps
|
|
12
|
+
(modified/refreshed) and manage subscription to update and removal events.
|
|
13
|
+
- BaseDataPoint/ BaseParameterDataPoint: Concrete foundations for channel-bound
|
|
14
|
+
data points, including type/flag handling, unit and multiplier normalization,
|
|
15
|
+
value conversion, temporary write buffering, and path/name metadata.
|
|
16
|
+
- CallParameterCollector: Helper to batch multiple set/put operations and wait
|
|
17
|
+
for events, optimizing command dispatch.
|
|
18
|
+
- bind_collector: Decorator to bind a collector to service methods conveniently.
|
|
19
|
+
|
|
20
|
+
The classes here are used by generic, custom, calculated, and hub data point
|
|
21
|
+
implementations to provide a uniform API for reading, writing, and observing
|
|
22
|
+
parameter values across all supported devices.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
from __future__ import annotations
|
|
26
|
+
|
|
27
|
+
from abc import ABC, abstractmethod
|
|
28
|
+
import asyncio
|
|
29
|
+
from collections.abc import Callable, Mapping
|
|
30
|
+
from contextvars import Token
|
|
31
|
+
from datetime import datetime, timedelta
|
|
32
|
+
from functools import wraps
|
|
33
|
+
from inspect import getfullargspec
|
|
34
|
+
import logging
|
|
35
|
+
from typing import Any, Final, TypeAlias, TypeVar, cast, overload
|
|
36
|
+
|
|
37
|
+
from aiohomematic import i18n, support as hms
|
|
38
|
+
from aiohomematic.async_support import loop_check
|
|
39
|
+
from aiohomematic.central.events import DataPointStateChangedEvent, DeviceRemovedEvent
|
|
40
|
+
from aiohomematic.const import (
|
|
41
|
+
DEFAULT_MULTIPLIER,
|
|
42
|
+
DP_KEY_VALUE,
|
|
43
|
+
INIT_DATETIME,
|
|
44
|
+
KEY_CHANNEL_OPERATION_MODE_VISIBILITY,
|
|
45
|
+
NO_CACHE_ENTRY,
|
|
46
|
+
WAIT_FOR_CALLBACK,
|
|
47
|
+
CallSource,
|
|
48
|
+
DataPointCategory,
|
|
49
|
+
DataPointKey,
|
|
50
|
+
DataPointUsage,
|
|
51
|
+
EventData,
|
|
52
|
+
Flag,
|
|
53
|
+
InternalCustomID,
|
|
54
|
+
Operations,
|
|
55
|
+
Parameter,
|
|
56
|
+
ParameterData,
|
|
57
|
+
ParameterStatus,
|
|
58
|
+
ParameterType,
|
|
59
|
+
ParamsetKey,
|
|
60
|
+
ProductGroup,
|
|
61
|
+
ServiceScope,
|
|
62
|
+
check_ignore_parameter_on_initial_load,
|
|
63
|
+
)
|
|
64
|
+
from aiohomematic.context import RequestContext, is_in_service, reset_request_context, set_request_context
|
|
65
|
+
from aiohomematic.decorators import get_service_calls, inspector
|
|
66
|
+
from aiohomematic.exceptions import AioHomematicException, BaseHomematicException
|
|
67
|
+
from aiohomematic.interfaces import (
|
|
68
|
+
BaseDataPointProtocol,
|
|
69
|
+
BaseParameterDataPointProtocol,
|
|
70
|
+
CallbackDataPointProtocol,
|
|
71
|
+
CentralInfoProtocol,
|
|
72
|
+
ChannelProtocol,
|
|
73
|
+
ClientProtocol,
|
|
74
|
+
DeviceProtocol,
|
|
75
|
+
EventBusProviderProtocol,
|
|
76
|
+
EventPublisherProtocol,
|
|
77
|
+
ParameterVisibilityProviderProtocol,
|
|
78
|
+
ParamsetDescriptionProviderProtocol,
|
|
79
|
+
TaskSchedulerProtocol,
|
|
80
|
+
)
|
|
81
|
+
from aiohomematic.interfaces.client import ValueAndParamsetOperationsProtocol
|
|
82
|
+
from aiohomematic.model.support import DataPointNameData, DataPointPathData, PathData, convert_value, generate_unique_id
|
|
83
|
+
from aiohomematic.property_decorators import (
|
|
84
|
+
DelegatedProperty,
|
|
85
|
+
Kind,
|
|
86
|
+
_GenericProperty,
|
|
87
|
+
config_property,
|
|
88
|
+
hm_property,
|
|
89
|
+
state_property,
|
|
90
|
+
)
|
|
91
|
+
from aiohomematic.support import LogContextMixin, PayloadMixin, log_boundary_error
|
|
92
|
+
from aiohomematic.type_aliases import (
|
|
93
|
+
CallableAny,
|
|
94
|
+
DataPointUpdatedHandler,
|
|
95
|
+
DeviceRemovedHandler,
|
|
96
|
+
ParamType,
|
|
97
|
+
ServiceMethodMap,
|
|
98
|
+
UnsubscribeCallback,
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
__all__ = [
|
|
102
|
+
"BaseDataPoint",
|
|
103
|
+
"BaseParameterDataPoint",
|
|
104
|
+
"CallParameterCollector",
|
|
105
|
+
"CallbackDataPoint",
|
|
106
|
+
"bind_collector",
|
|
107
|
+
]
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
# Type variable used for decorator typing
|
|
111
|
+
CallableT = TypeVar("CallableT", bound=CallableAny)
|
|
112
|
+
|
|
113
|
+
_LOGGER: Final = logging.getLogger(__name__)
|
|
114
|
+
|
|
115
|
+
_CONFIGURABLE_CHANNEL: Final[tuple[str, ...]] = (
|
|
116
|
+
"KEY_TRANSCEIVER",
|
|
117
|
+
"MULTI_MODE_INPUT_TRANSMITTER",
|
|
118
|
+
)
|
|
119
|
+
_COLLECTOR_ARGUMENT_NAME: Final = "collector"
|
|
120
|
+
_FIX_UNIT_REPLACE: Final[Mapping[str, str]] = {
|
|
121
|
+
'"': "",
|
|
122
|
+
"100%": "%",
|
|
123
|
+
"% rF": "%",
|
|
124
|
+
"degree": "°C",
|
|
125
|
+
"Lux": "lx",
|
|
126
|
+
"m3": "m³",
|
|
127
|
+
}
|
|
128
|
+
_FIX_UNIT_BY_PARAM: Final[Mapping[str, str]] = {
|
|
129
|
+
Parameter.ACTUAL_TEMPERATURE: "°C",
|
|
130
|
+
Parameter.CURRENT_ILLUMINATION: "lx",
|
|
131
|
+
Parameter.HUMIDITY: "%",
|
|
132
|
+
Parameter.ILLUMINATION: "lx",
|
|
133
|
+
Parameter.LEVEL: "%",
|
|
134
|
+
Parameter.MASS_CONCENTRATION_PM_10_24H_AVERAGE: "µg/m³",
|
|
135
|
+
Parameter.MASS_CONCENTRATION_PM_1_24H_AVERAGE: "µg/m³",
|
|
136
|
+
Parameter.MASS_CONCENTRATION_PM_2_5_24H_AVERAGE: "µg/m³",
|
|
137
|
+
Parameter.OPERATING_VOLTAGE: "V",
|
|
138
|
+
Parameter.RSSI_DEVICE: "dBm",
|
|
139
|
+
Parameter.RSSI_PEER: "dBm",
|
|
140
|
+
Parameter.SUNSHINE_DURATION: "min",
|
|
141
|
+
Parameter.WIND_DIRECTION: "°",
|
|
142
|
+
Parameter.WIND_DIRECTION_RANGE: "°",
|
|
143
|
+
}
|
|
144
|
+
_MULTIPLIER_UNIT: Final[Mapping[str, float]] = {
|
|
145
|
+
"100%": 100.0,
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
class CallbackDataPoint(ABC, CallbackDataPointProtocol, LogContextMixin):
|
|
150
|
+
"""
|
|
151
|
+
Base class for data points supporting subscriptions.
|
|
152
|
+
|
|
153
|
+
Provides event handling, subscription management, and timestamp tracking
|
|
154
|
+
for data point updates and refreshes.
|
|
155
|
+
"""
|
|
156
|
+
|
|
157
|
+
__slots__ = (
|
|
158
|
+
"__weakref__",
|
|
159
|
+
"_cached_enabled_default",
|
|
160
|
+
"_cached_service_method_names",
|
|
161
|
+
"_cached_service_methods",
|
|
162
|
+
"_central_info",
|
|
163
|
+
"_custom_id",
|
|
164
|
+
"_published_event_at",
|
|
165
|
+
"_event_bus_provider",
|
|
166
|
+
"_event_publisher",
|
|
167
|
+
"_modified_at",
|
|
168
|
+
"_parameter_visibility_provider",
|
|
169
|
+
"_paramset_description_provider",
|
|
170
|
+
"_path_data",
|
|
171
|
+
"_refreshed_at",
|
|
172
|
+
"_registered_custom_ids",
|
|
173
|
+
"_signature",
|
|
174
|
+
"_subscription_counts",
|
|
175
|
+
"_task_scheduler",
|
|
176
|
+
"_temporary_modified_at",
|
|
177
|
+
"_temporary_refreshed_at",
|
|
178
|
+
"_unique_id",
|
|
179
|
+
)
|
|
180
|
+
|
|
181
|
+
_category = DataPointCategory.UNDEFINED
|
|
182
|
+
|
|
183
|
+
def __init__(
|
|
184
|
+
self,
|
|
185
|
+
*,
|
|
186
|
+
unique_id: str,
|
|
187
|
+
central_info: CentralInfoProtocol,
|
|
188
|
+
event_bus_provider: EventBusProviderProtocol,
|
|
189
|
+
event_publisher: EventPublisherProtocol,
|
|
190
|
+
task_scheduler: TaskSchedulerProtocol,
|
|
191
|
+
paramset_description_provider: ParamsetDescriptionProviderProtocol,
|
|
192
|
+
parameter_visibility_provider: ParameterVisibilityProviderProtocol,
|
|
193
|
+
) -> None:
|
|
194
|
+
"""Initialize the callback data point."""
|
|
195
|
+
self._central_info: Final = central_info
|
|
196
|
+
self._event_bus_provider: Final = event_bus_provider
|
|
197
|
+
self._event_publisher: Final = event_publisher
|
|
198
|
+
self._task_scheduler: Final = task_scheduler
|
|
199
|
+
self._paramset_description_provider: Final = paramset_description_provider
|
|
200
|
+
self._parameter_visibility_provider: Final = parameter_visibility_provider
|
|
201
|
+
self._unique_id: Final = unique_id
|
|
202
|
+
self._registered_custom_ids: set[str] = set()
|
|
203
|
+
self._subscription_counts: dict[str, int] = {}
|
|
204
|
+
self._custom_id: str | None = None
|
|
205
|
+
self._path_data = self._get_path_data()
|
|
206
|
+
self._published_event_at: datetime = INIT_DATETIME
|
|
207
|
+
self._modified_at: datetime = INIT_DATETIME
|
|
208
|
+
self._refreshed_at: datetime = INIT_DATETIME
|
|
209
|
+
self._signature: Final = self._get_signature()
|
|
210
|
+
self._temporary_modified_at: datetime = INIT_DATETIME
|
|
211
|
+
self._temporary_refreshed_at: datetime = INIT_DATETIME
|
|
212
|
+
|
|
213
|
+
def __str__(self) -> str:
|
|
214
|
+
"""Provide some useful information."""
|
|
215
|
+
return f"path: {self.state_path}, name: {self.full_name}"
|
|
216
|
+
|
|
217
|
+
@classmethod
|
|
218
|
+
def default_category(cls) -> DataPointCategory:
|
|
219
|
+
"""Return, the default category of the data_point."""
|
|
220
|
+
return cls._category
|
|
221
|
+
|
|
222
|
+
custom_id: Final = DelegatedProperty[str | None](path="_custom_id")
|
|
223
|
+
published_event_at: Final = DelegatedProperty[datetime](path="_published_event_at")
|
|
224
|
+
set_path: Final = DelegatedProperty[str](path="_path_data.set_path")
|
|
225
|
+
signature: Final = DelegatedProperty[str](path="_signature")
|
|
226
|
+
state_path: Final = DelegatedProperty[str](path="_path_data.state_path")
|
|
227
|
+
|
|
228
|
+
@property
|
|
229
|
+
def _should_publish_data_point_updated_callback(self) -> bool:
|
|
230
|
+
"""Check if a data point has been updated or refreshed."""
|
|
231
|
+
return True
|
|
232
|
+
|
|
233
|
+
@property
|
|
234
|
+
def category(self) -> DataPointCategory:
|
|
235
|
+
"""Return, the category of the data point."""
|
|
236
|
+
return self._category
|
|
237
|
+
|
|
238
|
+
@property
|
|
239
|
+
@abstractmethod
|
|
240
|
+
def full_name(self) -> str:
|
|
241
|
+
"""Return the full name of the data_point."""
|
|
242
|
+
|
|
243
|
+
@property
|
|
244
|
+
def is_refreshed(self) -> bool:
|
|
245
|
+
"""Return if the data_point has been refreshed (received a value)."""
|
|
246
|
+
return self._refreshed_at > INIT_DATETIME
|
|
247
|
+
|
|
248
|
+
@property
|
|
249
|
+
def is_registered(self) -> bool:
|
|
250
|
+
"""Return if data_point is registered externally."""
|
|
251
|
+
return self._custom_id is not None
|
|
252
|
+
|
|
253
|
+
@property
|
|
254
|
+
def is_status_valid(self) -> bool:
|
|
255
|
+
"""Return if the status indicates a valid value."""
|
|
256
|
+
return True
|
|
257
|
+
|
|
258
|
+
@property
|
|
259
|
+
def is_valid(self) -> bool:
|
|
260
|
+
"""Return if the value is valid (refreshed and status is OK)."""
|
|
261
|
+
return self.is_refreshed and self.is_status_valid
|
|
262
|
+
|
|
263
|
+
@property
|
|
264
|
+
def usage(self) -> DataPointUsage:
|
|
265
|
+
"""Return the data_point usage."""
|
|
266
|
+
return DataPointUsage.DATA_POINT
|
|
267
|
+
|
|
268
|
+
@config_property
|
|
269
|
+
@abstractmethod
|
|
270
|
+
def name(self) -> str:
|
|
271
|
+
"""Return the name of the data_point."""
|
|
272
|
+
|
|
273
|
+
@config_property
|
|
274
|
+
def unique_id(self) -> str:
|
|
275
|
+
"""Return the unique_id."""
|
|
276
|
+
return self._unique_id
|
|
277
|
+
|
|
278
|
+
@state_property
|
|
279
|
+
def additional_information(self) -> dict[str, Any]:
|
|
280
|
+
"""Return additional information about the data point."""
|
|
281
|
+
return {}
|
|
282
|
+
|
|
283
|
+
@state_property
|
|
284
|
+
@abstractmethod
|
|
285
|
+
def available(self) -> bool:
|
|
286
|
+
"""Return the availability of the device."""
|
|
287
|
+
|
|
288
|
+
@state_property
|
|
289
|
+
def modified_at(self) -> datetime:
|
|
290
|
+
"""Return the last update datetime value."""
|
|
291
|
+
if self._temporary_modified_at > self._modified_at:
|
|
292
|
+
return self._temporary_modified_at
|
|
293
|
+
return self._modified_at
|
|
294
|
+
|
|
295
|
+
@state_property
|
|
296
|
+
def modified_recently(self) -> bool:
|
|
297
|
+
"""Return the data point modified within 500 milliseconds."""
|
|
298
|
+
if self._modified_at == INIT_DATETIME:
|
|
299
|
+
return False
|
|
300
|
+
return (datetime.now() - self._modified_at).total_seconds() < 0.5
|
|
301
|
+
|
|
302
|
+
@state_property
|
|
303
|
+
def published_event_recently(self) -> bool:
|
|
304
|
+
"""Return the data point published an event within 500 milliseconds."""
|
|
305
|
+
if self._published_event_at == INIT_DATETIME:
|
|
306
|
+
return False
|
|
307
|
+
return (datetime.now() - self._published_event_at).total_seconds() < 0.5
|
|
308
|
+
|
|
309
|
+
@state_property
|
|
310
|
+
def refreshed_at(self) -> datetime:
|
|
311
|
+
"""Return the last refresh datetime value."""
|
|
312
|
+
if self._temporary_refreshed_at > self._refreshed_at:
|
|
313
|
+
return self._temporary_refreshed_at
|
|
314
|
+
return self._refreshed_at
|
|
315
|
+
|
|
316
|
+
@state_property
|
|
317
|
+
def refreshed_recently(self) -> bool:
|
|
318
|
+
"""Return the data point refreshed within 500 milliseconds."""
|
|
319
|
+
if self._refreshed_at == INIT_DATETIME:
|
|
320
|
+
return False
|
|
321
|
+
return (datetime.now() - self._refreshed_at).total_seconds() < 0.5
|
|
322
|
+
|
|
323
|
+
@hm_property(cached=True)
|
|
324
|
+
def enabled_default(self) -> bool:
|
|
325
|
+
"""Return, if data_point should be enabled based on usage attribute."""
|
|
326
|
+
return self.usage in (
|
|
327
|
+
DataPointUsage.CDP_PRIMARY,
|
|
328
|
+
DataPointUsage.CDP_VISIBLE,
|
|
329
|
+
DataPointUsage.DATA_POINT,
|
|
330
|
+
DataPointUsage.EVENT,
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
@hm_property(cached=True)
|
|
334
|
+
def service_method_names(self) -> tuple[str, ...]:
|
|
335
|
+
"""Return all service methods."""
|
|
336
|
+
return tuple(self.service_methods.keys())
|
|
337
|
+
|
|
338
|
+
@hm_property(cached=True)
|
|
339
|
+
def service_methods(self) -> ServiceMethodMap:
|
|
340
|
+
"""Return all service methods."""
|
|
341
|
+
return get_service_calls(obj=self)
|
|
342
|
+
|
|
343
|
+
def cleanup_subscriptions(self) -> None:
|
|
344
|
+
"""
|
|
345
|
+
Clean up all EventBus subscriptions for this data point.
|
|
346
|
+
|
|
347
|
+
This should be called when the data point is being removed to prevent
|
|
348
|
+
memory leaks from orphaned handlers. It clears all subscriptions
|
|
349
|
+
registered with this data point's unique_id as the event_key.
|
|
350
|
+
"""
|
|
351
|
+
self._event_bus_provider.event_bus.clear_subscriptions_by_key(event_key=self._unique_id)
|
|
352
|
+
self._registered_custom_ids.clear()
|
|
353
|
+
self._subscription_counts.clear()
|
|
354
|
+
|
|
355
|
+
async def finalize_init(self) -> None:
|
|
356
|
+
"""Finalize the data point init action after model setup."""
|
|
357
|
+
|
|
358
|
+
@loop_check
|
|
359
|
+
def publish_data_point_updated_event(
|
|
360
|
+
self,
|
|
361
|
+
*,
|
|
362
|
+
data_point: CallbackDataPointProtocol | None = None,
|
|
363
|
+
custom_id: str | None = None,
|
|
364
|
+
old_value: Any = None,
|
|
365
|
+
new_value: Any = None,
|
|
366
|
+
) -> None:
|
|
367
|
+
"""Do what is needed when the value of the data_point has been updated/refreshed."""
|
|
368
|
+
if not self._should_publish_data_point_updated_callback:
|
|
369
|
+
return
|
|
370
|
+
self._published_event_at = datetime.now()
|
|
371
|
+
|
|
372
|
+
# Early exit if no subscribers - avoid creating unnecessary tasks
|
|
373
|
+
if not self._registered_custom_ids:
|
|
374
|
+
return
|
|
375
|
+
|
|
376
|
+
# Capture current custom_ids as tuple to prevent issues if set is modified
|
|
377
|
+
# during async iteration (e.g., if a handler unsubscribes during callback)
|
|
378
|
+
custom_ids = tuple(self._registered_custom_ids)
|
|
379
|
+
# Capture values for closure
|
|
380
|
+
_old_value = old_value
|
|
381
|
+
_new_value = new_value
|
|
382
|
+
|
|
383
|
+
async def _publish_all_events() -> None:
|
|
384
|
+
"""
|
|
385
|
+
Publish events to all registered custom_ids in a single task.
|
|
386
|
+
|
|
387
|
+
Performance optimization: Instead of creating one task per subscriber,
|
|
388
|
+
we create a single task that uses asyncio.gather() to publish all events
|
|
389
|
+
concurrently. This reduces task creation overhead when there are many
|
|
390
|
+
subscribers (common in Home Assistant with multiple data points).
|
|
391
|
+
|
|
392
|
+
The return_exceptions=True ensures one failing handler doesn't prevent
|
|
393
|
+
other handlers from receiving the event.
|
|
394
|
+
"""
|
|
395
|
+
publish_tasks = [
|
|
396
|
+
self._event_bus_provider.event_bus.publish(
|
|
397
|
+
event=DataPointStateChangedEvent(
|
|
398
|
+
timestamp=datetime.now(),
|
|
399
|
+
unique_id=self._unique_id,
|
|
400
|
+
custom_id=cid,
|
|
401
|
+
old_value=_old_value,
|
|
402
|
+
new_value=_new_value,
|
|
403
|
+
)
|
|
404
|
+
)
|
|
405
|
+
for cid in custom_ids
|
|
406
|
+
]
|
|
407
|
+
await asyncio.gather(*publish_tasks, return_exceptions=True)
|
|
408
|
+
|
|
409
|
+
# Single task for all events instead of one task per custom_id.
|
|
410
|
+
# This batching approach significantly reduces scheduler overhead.
|
|
411
|
+
self._task_scheduler.create_task(
|
|
412
|
+
target=_publish_all_events,
|
|
413
|
+
name=f"publish-dp-updated-events-{self._unique_id}",
|
|
414
|
+
)
|
|
415
|
+
|
|
416
|
+
@loop_check
|
|
417
|
+
def publish_device_removed_event(self) -> None:
|
|
418
|
+
"""Do what is needed when the data_point has been removed."""
|
|
419
|
+
|
|
420
|
+
# Publish to EventBus asynchronously, then cleanup subscriptions
|
|
421
|
+
async def _publish_device_removed_and_cleanup() -> None:
|
|
422
|
+
await self._event_bus_provider.event_bus.publish(
|
|
423
|
+
event=DeviceRemovedEvent(
|
|
424
|
+
timestamp=datetime.now(),
|
|
425
|
+
unique_id=self._unique_id,
|
|
426
|
+
)
|
|
427
|
+
)
|
|
428
|
+
# Clean up subscriptions after event is published to prevent memory leaks
|
|
429
|
+
self.cleanup_subscriptions()
|
|
430
|
+
|
|
431
|
+
self._task_scheduler.create_task(
|
|
432
|
+
target=_publish_device_removed_and_cleanup,
|
|
433
|
+
name=f"publish-device-removed-{self._unique_id}",
|
|
434
|
+
)
|
|
435
|
+
|
|
436
|
+
def subscribe_to_data_point_updated(
|
|
437
|
+
self, *, handler: DataPointUpdatedHandler, custom_id: str
|
|
438
|
+
) -> UnsubscribeCallback:
|
|
439
|
+
"""
|
|
440
|
+
Subscribe to data_point updated event.
|
|
441
|
+
|
|
442
|
+
Subscription pattern with reference counting:
|
|
443
|
+
Multiple handlers can subscribe with the same custom_id (e.g., Home Assistant
|
|
444
|
+
data point and its device tracker). We track subscription counts per custom_id
|
|
445
|
+
so that the custom_id is only removed from _registered_custom_ids when ALL
|
|
446
|
+
subscriptions for that custom_id have been unsubscribed.
|
|
447
|
+
|
|
448
|
+
The wrapped_unsubscribe function handles the reference counting cleanup.
|
|
449
|
+
"""
|
|
450
|
+
# Validate custom_id ownership - external custom_ids can only be registered once
|
|
451
|
+
# Internal custom_ids (system use) bypass this check
|
|
452
|
+
if custom_id not in InternalCustomID:
|
|
453
|
+
if self._custom_id is not None and self._custom_id != custom_id:
|
|
454
|
+
raise AioHomematicException(
|
|
455
|
+
i18n.tr(
|
|
456
|
+
key="exception.model.data_point.subscribe_handler.already_registered",
|
|
457
|
+
full_name=self.full_name,
|
|
458
|
+
custom_id=self._custom_id,
|
|
459
|
+
)
|
|
460
|
+
)
|
|
461
|
+
self._custom_id = custom_id
|
|
462
|
+
|
|
463
|
+
# Track registration for publish method - this set drives event publishing
|
|
464
|
+
self._registered_custom_ids.add(custom_id)
|
|
465
|
+
|
|
466
|
+
# Create adapter that filters for this data point's events with matching custom_id.
|
|
467
|
+
# The EventBus receives events for ALL data points, so we filter by unique_id
|
|
468
|
+
# and custom_id to ensure only the correct handler receives each event.
|
|
469
|
+
def event_handler(*, event: DataPointStateChangedEvent) -> None:
|
|
470
|
+
if event.unique_id == self._unique_id and event.custom_id == custom_id:
|
|
471
|
+
handler(data_point=self, custom_id=custom_id)
|
|
472
|
+
|
|
473
|
+
unsubscribe = self._event_bus_provider.event_bus.subscribe(
|
|
474
|
+
event_type=DataPointStateChangedEvent,
|
|
475
|
+
event_key=self._unique_id,
|
|
476
|
+
handler=event_handler,
|
|
477
|
+
)
|
|
478
|
+
|
|
479
|
+
# Reference counting: Track how many subscriptions exist for each custom_id.
|
|
480
|
+
# This enables multiple handlers per custom_id while ensuring proper cleanup.
|
|
481
|
+
current_count = self._subscription_counts.get(custom_id, 0)
|
|
482
|
+
self._subscription_counts[custom_id] = current_count + 1
|
|
483
|
+
|
|
484
|
+
def wrapped_unsubscribe() -> None:
|
|
485
|
+
"""
|
|
486
|
+
Unsubscribe and manage reference count.
|
|
487
|
+
|
|
488
|
+
Only removes custom_id from _registered_custom_ids when count reaches 0,
|
|
489
|
+
ensuring publish_data_point_updated_event still notifies other handlers
|
|
490
|
+
that share the same custom_id.
|
|
491
|
+
"""
|
|
492
|
+
unsubscribe()
|
|
493
|
+
# Decrement subscription count
|
|
494
|
+
count = self._subscription_counts.get(custom_id, 1)
|
|
495
|
+
count -= 1
|
|
496
|
+
if count <= 0:
|
|
497
|
+
# Last subscription for this custom_id - safe to remove from tracking
|
|
498
|
+
self._registered_custom_ids.discard(custom_id)
|
|
499
|
+
self._subscription_counts.pop(custom_id, None)
|
|
500
|
+
else:
|
|
501
|
+
self._subscription_counts[custom_id] = count
|
|
502
|
+
|
|
503
|
+
return wrapped_unsubscribe
|
|
504
|
+
|
|
505
|
+
def subscribe_to_device_removed(self, *, handler: DeviceRemovedHandler) -> UnsubscribeCallback:
|
|
506
|
+
"""Subscribe to the device removed event."""
|
|
507
|
+
|
|
508
|
+
# Create adapter that filters for this data point's events
|
|
509
|
+
def event_handler(*, event: DeviceRemovedEvent) -> None:
|
|
510
|
+
if event.unique_id == self._unique_id:
|
|
511
|
+
handler()
|
|
512
|
+
|
|
513
|
+
return self._event_bus_provider.event_bus.subscribe(
|
|
514
|
+
event_type=DeviceRemovedEvent,
|
|
515
|
+
event_key=self._unique_id,
|
|
516
|
+
handler=event_handler,
|
|
517
|
+
)
|
|
518
|
+
|
|
519
|
+
def subscribe_to_internal_data_point_updated(self, *, handler: DataPointUpdatedHandler) -> UnsubscribeCallback:
|
|
520
|
+
"""Subscribe to internal data_point updated event."""
|
|
521
|
+
return self.subscribe_to_data_point_updated(handler=handler, custom_id=InternalCustomID.DEFAULT)
|
|
522
|
+
|
|
523
|
+
@abstractmethod
|
|
524
|
+
def _get_path_data(self) -> PathData:
|
|
525
|
+
"""Return the path data."""
|
|
526
|
+
|
|
527
|
+
@abstractmethod
|
|
528
|
+
def _get_signature(self) -> str:
|
|
529
|
+
"""Return the signature of the data_point."""
|
|
530
|
+
|
|
531
|
+
def _reset_temporary_timestamps(self) -> None:
|
|
532
|
+
"""Reset the temporary timestamps."""
|
|
533
|
+
self._set_temporary_modified_at(modified_at=INIT_DATETIME)
|
|
534
|
+
self._set_temporary_refreshed_at(refreshed_at=INIT_DATETIME)
|
|
535
|
+
|
|
536
|
+
def _set_modified_at(self, *, modified_at: datetime) -> None:
|
|
537
|
+
"""Set modified_at to current datetime."""
|
|
538
|
+
self._modified_at = modified_at
|
|
539
|
+
self._set_refreshed_at(refreshed_at=modified_at)
|
|
540
|
+
|
|
541
|
+
def _set_refreshed_at(self, *, refreshed_at: datetime) -> None:
|
|
542
|
+
"""Set refreshed_at to current datetime."""
|
|
543
|
+
self._refreshed_at = refreshed_at
|
|
544
|
+
|
|
545
|
+
def _set_temporary_modified_at(self, *, modified_at: datetime) -> None:
|
|
546
|
+
"""Set temporary_modified_at to current datetime."""
|
|
547
|
+
self._temporary_modified_at = modified_at
|
|
548
|
+
self._set_temporary_refreshed_at(refreshed_at=modified_at)
|
|
549
|
+
|
|
550
|
+
def _set_temporary_refreshed_at(self, *, refreshed_at: datetime) -> None:
|
|
551
|
+
"""Set temporary_refreshed_at to current datetime."""
|
|
552
|
+
self._temporary_refreshed_at = refreshed_at
|
|
553
|
+
|
|
554
|
+
|
|
555
|
+
class BaseDataPoint(CallbackDataPoint, BaseDataPointProtocol, PayloadMixin):
|
|
556
|
+
"""
|
|
557
|
+
Base class for channel-bound data points.
|
|
558
|
+
|
|
559
|
+
Extends CallbackDataPoint with channel/device associations and provides
|
|
560
|
+
the foundation for generic, custom, and calculated data point implementations.
|
|
561
|
+
"""
|
|
562
|
+
|
|
563
|
+
__slots__ = (
|
|
564
|
+
"_cached_dpk",
|
|
565
|
+
"_cached_name",
|
|
566
|
+
"_cached_requires_polling",
|
|
567
|
+
"_channel",
|
|
568
|
+
"_client",
|
|
569
|
+
"_data_point_name_data",
|
|
570
|
+
"_device",
|
|
571
|
+
"_forced_usage",
|
|
572
|
+
"_is_in_multiple_channels",
|
|
573
|
+
"_timer_on_time",
|
|
574
|
+
"_timer_on_time_end",
|
|
575
|
+
)
|
|
576
|
+
|
|
577
|
+
_ignore_multiple_channels_for_name: bool = False
|
|
578
|
+
|
|
579
|
+
def __init__(
|
|
580
|
+
self,
|
|
581
|
+
*,
|
|
582
|
+
channel: ChannelProtocol,
|
|
583
|
+
unique_id: str,
|
|
584
|
+
is_in_multiple_channels: bool,
|
|
585
|
+
) -> None:
|
|
586
|
+
"""Initialize the data_point."""
|
|
587
|
+
PayloadMixin.__init__(self)
|
|
588
|
+
self._channel: Final[ChannelProtocol] = channel
|
|
589
|
+
self._device: Final[DeviceProtocol] = channel.device
|
|
590
|
+
super().__init__(
|
|
591
|
+
unique_id=unique_id,
|
|
592
|
+
central_info=channel.device.central_info,
|
|
593
|
+
event_bus_provider=channel.device.event_bus_provider,
|
|
594
|
+
event_publisher=channel.device.event_publisher,
|
|
595
|
+
task_scheduler=channel.device.task_scheduler,
|
|
596
|
+
paramset_description_provider=channel.device.paramset_description_provider,
|
|
597
|
+
parameter_visibility_provider=channel.device.parameter_visibility_provider,
|
|
598
|
+
)
|
|
599
|
+
self._is_in_multiple_channels: Final = is_in_multiple_channels
|
|
600
|
+
self._client: Final[ClientProtocol] = channel.device.client
|
|
601
|
+
self._forced_usage: DataPointUsage | None = None
|
|
602
|
+
self._data_point_name_data: Final = self._get_data_point_name()
|
|
603
|
+
self._timer_on_time: float | None = None
|
|
604
|
+
self._timer_on_time_end: datetime = INIT_DATETIME
|
|
605
|
+
|
|
606
|
+
available: Final = DelegatedProperty[bool](path="_device.available", kind=Kind.STATE)
|
|
607
|
+
channel: Final = DelegatedProperty[ChannelProtocol](path="_channel", log_context=True)
|
|
608
|
+
device: Final = DelegatedProperty[DeviceProtocol](path="_device")
|
|
609
|
+
full_name: Final = DelegatedProperty[str](path="_data_point_name_data.full_name")
|
|
610
|
+
function: Final = DelegatedProperty[str | None](path="_channel.function")
|
|
611
|
+
is_in_multiple_channels: Final = DelegatedProperty[bool](path="_is_in_multiple_channels")
|
|
612
|
+
name: Final = DelegatedProperty[str](path="_data_point_name_data.name", kind=Kind.CONFIG, cached=True)
|
|
613
|
+
name_data: Final = DelegatedProperty[DataPointNameData](path="_data_point_name_data")
|
|
614
|
+
room: Final = DelegatedProperty[str | None](path="_channel.room")
|
|
615
|
+
rooms: Final = DelegatedProperty[set[str]](path="_channel.rooms")
|
|
616
|
+
timer_on_time = DelegatedProperty[float | None](path="_timer_on_time")
|
|
617
|
+
|
|
618
|
+
@property
|
|
619
|
+
def timer_on_time_running(self) -> bool:
|
|
620
|
+
"""Return if on_time is running."""
|
|
621
|
+
return datetime.now() <= self._timer_on_time_end
|
|
622
|
+
|
|
623
|
+
@property
|
|
624
|
+
def usage(self) -> DataPointUsage:
|
|
625
|
+
"""Return the data_point usage."""
|
|
626
|
+
return self._get_data_point_usage()
|
|
627
|
+
|
|
628
|
+
def force_usage(self, *, forced_usage: DataPointUsage) -> None:
|
|
629
|
+
"""Set the data_point usage."""
|
|
630
|
+
self._forced_usage = forced_usage
|
|
631
|
+
|
|
632
|
+
def get_and_start_timer(self) -> float | None:
|
|
633
|
+
"""Return the on_time and set the end time."""
|
|
634
|
+
if self.timer_on_time_running and self._timer_on_time is not None and self._timer_on_time <= 0:
|
|
635
|
+
self.reset_timer_on_time()
|
|
636
|
+
return -1
|
|
637
|
+
if self._timer_on_time is None:
|
|
638
|
+
self.reset_timer_on_time()
|
|
639
|
+
return None
|
|
640
|
+
on_time = self._timer_on_time
|
|
641
|
+
self._timer_on_time = None
|
|
642
|
+
self._timer_on_time_end = datetime.now() + timedelta(seconds=on_time)
|
|
643
|
+
return on_time
|
|
644
|
+
|
|
645
|
+
@abstractmethod
|
|
646
|
+
@inspector(re_raise=False)
|
|
647
|
+
async def load_data_point_value(self, *, call_source: CallSource, direct_call: bool = False) -> None:
|
|
648
|
+
"""Initialize the data_point data."""
|
|
649
|
+
|
|
650
|
+
async def on_config_changed(self) -> None:
|
|
651
|
+
"""Do what is needed on device config change."""
|
|
652
|
+
|
|
653
|
+
def reset_timer_on_time(self) -> None:
|
|
654
|
+
"""Set the on_time."""
|
|
655
|
+
self._timer_on_time = None
|
|
656
|
+
self._timer_on_time_end = INIT_DATETIME
|
|
657
|
+
|
|
658
|
+
def set_timer_on_time(self, *, on_time: float) -> None:
|
|
659
|
+
"""Set the on_time."""
|
|
660
|
+
self._timer_on_time = on_time
|
|
661
|
+
self._timer_on_time_end = INIT_DATETIME
|
|
662
|
+
|
|
663
|
+
@abstractmethod
|
|
664
|
+
def _get_data_point_name(self) -> DataPointNameData:
|
|
665
|
+
"""Generate the name for the data_point."""
|
|
666
|
+
|
|
667
|
+
@abstractmethod
|
|
668
|
+
def _get_data_point_usage(self) -> DataPointUsage:
|
|
669
|
+
"""Generate the usage for the data_point."""
|
|
670
|
+
|
|
671
|
+
|
|
672
|
+
class BaseParameterDataPoint[
|
|
673
|
+
ParameterT: ParamType,
|
|
674
|
+
InputParameterT: ParamType,
|
|
675
|
+
](BaseDataPoint, BaseParameterDataPointProtocol[ParameterT | None]):
|
|
676
|
+
"""
|
|
677
|
+
Base class for parameter-backed data points with typed values.
|
|
678
|
+
|
|
679
|
+
Provides value handling, unit conversion, validation, and RPC communication
|
|
680
|
+
for data points mapped to Homematic device parameters.
|
|
681
|
+
"""
|
|
682
|
+
|
|
683
|
+
__slots__ = (
|
|
684
|
+
"_cached__enabled_by_channel_operation_mode",
|
|
685
|
+
"_current_value",
|
|
686
|
+
"_default",
|
|
687
|
+
"_enum_value_is_index",
|
|
688
|
+
"_ignore_on_initial_load",
|
|
689
|
+
"_is_forced_sensor",
|
|
690
|
+
"_is_un_ignored",
|
|
691
|
+
"_max",
|
|
692
|
+
"_min",
|
|
693
|
+
"_multiplier",
|
|
694
|
+
"_operations",
|
|
695
|
+
"_parameter",
|
|
696
|
+
"_paramset_key",
|
|
697
|
+
"_last_non_default_value",
|
|
698
|
+
"_raw_unit",
|
|
699
|
+
"_service",
|
|
700
|
+
"_special",
|
|
701
|
+
"_state_uncertain",
|
|
702
|
+
"_status_dpk",
|
|
703
|
+
"_status_parameter",
|
|
704
|
+
"_status_value",
|
|
705
|
+
"_status_value_list",
|
|
706
|
+
"_temporary_value",
|
|
707
|
+
"_type",
|
|
708
|
+
"_unit",
|
|
709
|
+
"_values",
|
|
710
|
+
"_visible",
|
|
711
|
+
)
|
|
712
|
+
|
|
713
|
+
def __init__(
|
|
714
|
+
self,
|
|
715
|
+
*,
|
|
716
|
+
channel: ChannelProtocol,
|
|
717
|
+
paramset_key: ParamsetKey,
|
|
718
|
+
parameter: str,
|
|
719
|
+
parameter_data: ParameterData,
|
|
720
|
+
unique_id_prefix: str = "",
|
|
721
|
+
) -> None:
|
|
722
|
+
"""Initialize the data_point."""
|
|
723
|
+
self._paramset_key: Final = paramset_key
|
|
724
|
+
# required for name in BaseDataPoint
|
|
725
|
+
self._parameter: Final[str] = parameter
|
|
726
|
+
self._ignore_on_initial_load: Final[bool] = check_ignore_parameter_on_initial_load(parameter=parameter)
|
|
727
|
+
|
|
728
|
+
super().__init__(
|
|
729
|
+
channel=channel,
|
|
730
|
+
unique_id=generate_unique_id(
|
|
731
|
+
config_provider=channel.device.config_provider,
|
|
732
|
+
address=channel.address,
|
|
733
|
+
parameter=parameter,
|
|
734
|
+
prefix=unique_id_prefix,
|
|
735
|
+
),
|
|
736
|
+
is_in_multiple_channels=channel.device.paramset_description_provider.is_in_multiple_channels(
|
|
737
|
+
channel_address=channel.address, parameter=parameter
|
|
738
|
+
),
|
|
739
|
+
)
|
|
740
|
+
self._is_un_ignored: Final[bool] = self._parameter_visibility_provider.parameter_is_un_ignored(
|
|
741
|
+
channel=channel,
|
|
742
|
+
paramset_key=self._paramset_key,
|
|
743
|
+
parameter=self._parameter,
|
|
744
|
+
custom_only=True,
|
|
745
|
+
)
|
|
746
|
+
self._current_value: ParameterT | None = None
|
|
747
|
+
self._last_non_default_value: ParameterT | None = None
|
|
748
|
+
self._temporary_value: ParameterT | None = None
|
|
749
|
+
|
|
750
|
+
self._state_uncertain: bool = True
|
|
751
|
+
self._is_forced_sensor: bool = False
|
|
752
|
+
self._assign_parameter_data(parameter_data=parameter_data)
|
|
753
|
+
|
|
754
|
+
# Initialize STATUS parameter support
|
|
755
|
+
self._status_parameter: str | None = self._detect_status_parameter()
|
|
756
|
+
self._status_value: ParameterStatus | None = None
|
|
757
|
+
self._status_dpk: DataPointKey | None = None
|
|
758
|
+
self._status_value_list: tuple[str, ...] | None = None
|
|
759
|
+
if self._status_parameter:
|
|
760
|
+
self._status_dpk = DataPointKey(
|
|
761
|
+
interface_id=self._device.interface_id,
|
|
762
|
+
channel_address=self._channel.address,
|
|
763
|
+
paramset_key=self._paramset_key,
|
|
764
|
+
parameter=self._status_parameter,
|
|
765
|
+
)
|
|
766
|
+
# Cache the VALUE_LIST for the status parameter
|
|
767
|
+
status_param_data = self._paramset_description_provider.get_parameter_data(
|
|
768
|
+
interface_id=self._device.interface_id,
|
|
769
|
+
channel_address=self._channel.address,
|
|
770
|
+
paramset_key=self._paramset_key,
|
|
771
|
+
parameter=self._status_parameter,
|
|
772
|
+
)
|
|
773
|
+
if status_param_data and (value_list := status_param_data.get("VALUE_LIST")):
|
|
774
|
+
self._status_value_list = tuple(value_list)
|
|
775
|
+
|
|
776
|
+
default: Final = DelegatedProperty[ParameterT](path="_default")
|
|
777
|
+
hmtype: Final = DelegatedProperty[ParameterType](path="_type")
|
|
778
|
+
ignore_on_initial_load: Final = DelegatedProperty[bool](path="_ignore_on_initial_load")
|
|
779
|
+
is_forced_sensor: Final = DelegatedProperty[bool](path="_is_forced_sensor")
|
|
780
|
+
is_un_ignored: Final = DelegatedProperty[bool](path="_is_un_ignored")
|
|
781
|
+
last_non_default_value: Final = DelegatedProperty[ParameterT | None](path="_last_non_default_value")
|
|
782
|
+
max: Final = DelegatedProperty[ParameterT](path="_max", kind=Kind.CONFIG)
|
|
783
|
+
min: Final = DelegatedProperty[ParameterT](path="_min", kind=Kind.CONFIG)
|
|
784
|
+
multiplier: Final = DelegatedProperty[float](path="_multiplier")
|
|
785
|
+
parameter: Final = DelegatedProperty[str](path="_parameter", log_context=True)
|
|
786
|
+
paramset_key: Final = DelegatedProperty[ParamsetKey](path="_paramset_key")
|
|
787
|
+
raw_unit: Final = DelegatedProperty[str | None](path="_raw_unit")
|
|
788
|
+
service: Final = DelegatedProperty[bool](path="_service")
|
|
789
|
+
status: Final = DelegatedProperty[ParameterStatus | None](path="_status_value")
|
|
790
|
+
status_dpk: Final = DelegatedProperty[DataPointKey | None](path="_status_dpk")
|
|
791
|
+
status_parameter: Final = DelegatedProperty[str | None](path="_status_parameter")
|
|
792
|
+
unit: Final = DelegatedProperty[str | None](path="_unit", kind=Kind.CONFIG)
|
|
793
|
+
values: Final = DelegatedProperty[tuple[str, ...] | None](path="_values", kind=Kind.CONFIG)
|
|
794
|
+
visible: Final = DelegatedProperty[bool](path="_visible")
|
|
795
|
+
|
|
796
|
+
@property
|
|
797
|
+
def _value(self) -> ParameterT | None:
|
|
798
|
+
"""Return the value of the data_point."""
|
|
799
|
+
return self._temporary_value if self._temporary_refreshed_at > self._refreshed_at else self._current_value
|
|
800
|
+
|
|
801
|
+
@property
|
|
802
|
+
def category(self) -> DataPointCategory:
|
|
803
|
+
"""Return, the category of the data_point."""
|
|
804
|
+
return DataPointCategory.SENSOR if self._is_forced_sensor else self._category
|
|
805
|
+
|
|
806
|
+
@property
|
|
807
|
+
def has_events(self) -> bool:
|
|
808
|
+
"""Return, if data_point is supports events."""
|
|
809
|
+
return bool(self._operations & Operations.EVENT)
|
|
810
|
+
|
|
811
|
+
@property
|
|
812
|
+
def has_status_parameter(self) -> bool:
|
|
813
|
+
"""Return if this parameter has a paired STATUS parameter."""
|
|
814
|
+
return self._status_parameter is not None
|
|
815
|
+
|
|
816
|
+
@property
|
|
817
|
+
def is_readable(self) -> bool:
|
|
818
|
+
"""Return, if data_point is readable."""
|
|
819
|
+
return bool(self._operations & Operations.READ)
|
|
820
|
+
|
|
821
|
+
@property
|
|
822
|
+
def is_status_valid(self) -> bool:
|
|
823
|
+
"""Return if the status indicates a valid value (NORMAL, UNKNOWN, or no STATUS parameter)."""
|
|
824
|
+
if self._status_value is None:
|
|
825
|
+
return True
|
|
826
|
+
# UNKNOWN means "not yet known" (e.g., during startup) - treat as valid for is_valid check
|
|
827
|
+
return self._status_value in (ParameterStatus.NORMAL, ParameterStatus.UNKNOWN)
|
|
828
|
+
|
|
829
|
+
@property
|
|
830
|
+
def is_unit_fixed(self) -> bool:
|
|
831
|
+
"""Return if the unit is fixed."""
|
|
832
|
+
return self._raw_unit != self._unit
|
|
833
|
+
|
|
834
|
+
@property
|
|
835
|
+
def is_writable(self) -> bool:
|
|
836
|
+
"""Return, if data_point is writable."""
|
|
837
|
+
return False if self._is_forced_sensor else bool(self._operations & Operations.WRITE)
|
|
838
|
+
|
|
839
|
+
@property
|
|
840
|
+
def state_uncertain(self) -> bool:
|
|
841
|
+
"""Return the state uncertain status."""
|
|
842
|
+
return self._state_uncertain
|
|
843
|
+
|
|
844
|
+
@property
|
|
845
|
+
def unconfirmed_last_value_send(self) -> ParameterT:
|
|
846
|
+
"""Return the unconfirmed value send for the data_point."""
|
|
847
|
+
return cast(
|
|
848
|
+
ParameterT,
|
|
849
|
+
self._client.last_value_send_tracker.get_last_value_send(dpk=self.dpk),
|
|
850
|
+
)
|
|
851
|
+
|
|
852
|
+
@config_property
|
|
853
|
+
def unique_id(self) -> str:
|
|
854
|
+
"""Return the unique_id."""
|
|
855
|
+
return f"{self._unique_id}_{DataPointCategory.SENSOR}" if self._is_forced_sensor else self._unique_id
|
|
856
|
+
|
|
857
|
+
@hm_property(cached=True)
|
|
858
|
+
def _enabled_by_channel_operation_mode(self) -> bool | None:
|
|
859
|
+
"""Return, if the data_point/event must be enabled."""
|
|
860
|
+
if self._channel.type_name not in _CONFIGURABLE_CHANNEL:
|
|
861
|
+
return None
|
|
862
|
+
if self._parameter not in KEY_CHANNEL_OPERATION_MODE_VISIBILITY:
|
|
863
|
+
return None
|
|
864
|
+
if (cop := self._channel.operation_mode) is None:
|
|
865
|
+
return None
|
|
866
|
+
return cop in KEY_CHANNEL_OPERATION_MODE_VISIBILITY[self._parameter]
|
|
867
|
+
|
|
868
|
+
@hm_property(cached=True)
|
|
869
|
+
def dpk(self) -> DataPointKey:
|
|
870
|
+
"""Return data_point key value."""
|
|
871
|
+
return DataPointKey(
|
|
872
|
+
interface_id=self._device.interface_id,
|
|
873
|
+
channel_address=self._channel.address,
|
|
874
|
+
paramset_key=self._paramset_key,
|
|
875
|
+
parameter=self._parameter,
|
|
876
|
+
)
|
|
877
|
+
|
|
878
|
+
@hm_property(cached=True)
|
|
879
|
+
def requires_polling(self) -> bool:
|
|
880
|
+
"""Return whether the data_point requires polling."""
|
|
881
|
+
return not self._channel.device.client.capabilities.push_updates or (
|
|
882
|
+
self._channel.device.product_group in (ProductGroup.HM, ProductGroup.HMW)
|
|
883
|
+
and self._paramset_key == ParamsetKey.MASTER
|
|
884
|
+
)
|
|
885
|
+
|
|
886
|
+
@abstractmethod
|
|
887
|
+
async def event(self, *, value: Any, received_at: datetime) -> None:
|
|
888
|
+
"""Handle event for which this handler has subscribed."""
|
|
889
|
+
|
|
890
|
+
def force_to_sensor(self) -> None:
|
|
891
|
+
"""Change the category of the data_point."""
|
|
892
|
+
if self.category == DataPointCategory.SENSOR:
|
|
893
|
+
_LOGGER.debug(
|
|
894
|
+
"Category for %s is already %s. Doing nothing",
|
|
895
|
+
self.full_name,
|
|
896
|
+
DataPointCategory.SENSOR,
|
|
897
|
+
)
|
|
898
|
+
return
|
|
899
|
+
if self.category not in (
|
|
900
|
+
DataPointCategory.NUMBER,
|
|
901
|
+
DataPointCategory.SELECT,
|
|
902
|
+
DataPointCategory.TEXT,
|
|
903
|
+
):
|
|
904
|
+
_LOGGER.debug(
|
|
905
|
+
"Category %s for %s cannot be changed to %s",
|
|
906
|
+
self.category,
|
|
907
|
+
self.full_name,
|
|
908
|
+
DataPointCategory.SENSOR,
|
|
909
|
+
)
|
|
910
|
+
_LOGGER.debug(
|
|
911
|
+
"Changing the category of %s to %s (read-only)",
|
|
912
|
+
self.full_name,
|
|
913
|
+
DataPointCategory.SENSOR,
|
|
914
|
+
)
|
|
915
|
+
self._is_forced_sensor = True
|
|
916
|
+
|
|
917
|
+
def get_event_data(self, *, value: Any = None) -> EventData:
|
|
918
|
+
"""Get the event_data."""
|
|
919
|
+
return EventData(
|
|
920
|
+
interface_id=self._device.interface_id,
|
|
921
|
+
model=self._device.model,
|
|
922
|
+
device_address=self._device.address,
|
|
923
|
+
channel_no=self._channel.no,
|
|
924
|
+
parameter=self._parameter,
|
|
925
|
+
value=value,
|
|
926
|
+
)
|
|
927
|
+
|
|
928
|
+
@inspector(re_raise=False)
|
|
929
|
+
async def load_data_point_value(self, *, call_source: CallSource, direct_call: bool = False) -> None:
|
|
930
|
+
"""Initialize the data_point data."""
|
|
931
|
+
if (self._ignore_on_initial_load or self._channel.device.ignore_on_initial_load) and call_source in (
|
|
932
|
+
CallSource.HM_INIT,
|
|
933
|
+
CallSource.HA_INIT,
|
|
934
|
+
):
|
|
935
|
+
# For ignored parameters, only try to load from cache (no RPC call).
|
|
936
|
+
# This allows calculated data points to get their values on restart
|
|
937
|
+
# without waking up battery-powered devices.
|
|
938
|
+
if (
|
|
939
|
+
self._paramset_key == ParamsetKey.VALUES
|
|
940
|
+
and (
|
|
941
|
+
cached_value := self._device.data_cache_provider.get_data(
|
|
942
|
+
interface=self._device.interface,
|
|
943
|
+
channel_address=self._channel.address,
|
|
944
|
+
parameter=self._parameter,
|
|
945
|
+
)
|
|
946
|
+
)
|
|
947
|
+
!= NO_CACHE_ENTRY
|
|
948
|
+
):
|
|
949
|
+
self.write_value(value=cached_value, write_at=datetime.now())
|
|
950
|
+
return
|
|
951
|
+
|
|
952
|
+
if direct_call is False and hms.changed_within_seconds(last_change=self._refreshed_at):
|
|
953
|
+
return
|
|
954
|
+
|
|
955
|
+
# Check, if data_point is readable
|
|
956
|
+
if not self.is_readable:
|
|
957
|
+
return
|
|
958
|
+
|
|
959
|
+
self.write_value(
|
|
960
|
+
value=await self._device.value_cache.get_value(
|
|
961
|
+
dpk=self.dpk,
|
|
962
|
+
call_source=call_source,
|
|
963
|
+
direct_call=direct_call,
|
|
964
|
+
),
|
|
965
|
+
write_at=datetime.now(),
|
|
966
|
+
)
|
|
967
|
+
|
|
968
|
+
async def on_config_changed(self) -> None:
|
|
969
|
+
"""Do what is needed on device config change."""
|
|
970
|
+
await super().on_config_changed()
|
|
971
|
+
|
|
972
|
+
# update parameter_data
|
|
973
|
+
self.update_parameter_data()
|
|
974
|
+
# reload master data
|
|
975
|
+
if self.is_readable and self._paramset_key == ParamsetKey.MASTER:
|
|
976
|
+
await self.load_data_point_value(call_source=CallSource.MANUAL_OR_SCHEDULED, direct_call=True)
|
|
977
|
+
|
|
978
|
+
def set_last_non_default_value(self, *, value: ParameterT | None) -> None:
|
|
979
|
+
"""Set the last non default value."""
|
|
980
|
+
self._last_non_default_value = value
|
|
981
|
+
|
|
982
|
+
def update_parameter_data(self) -> None:
|
|
983
|
+
"""Update parameter data."""
|
|
984
|
+
if parameter_data := self._paramset_description_provider.get_parameter_data(
|
|
985
|
+
interface_id=self._device.interface_id,
|
|
986
|
+
channel_address=self._channel.address,
|
|
987
|
+
paramset_key=self._paramset_key,
|
|
988
|
+
parameter=self._parameter,
|
|
989
|
+
):
|
|
990
|
+
self._assign_parameter_data(parameter_data=parameter_data)
|
|
991
|
+
|
|
992
|
+
def update_status(self, *, status_value: int | str) -> None:
|
|
993
|
+
"""Update the status from a STATUS parameter event only if changed."""
|
|
994
|
+
new_status: ParameterStatus | None = None
|
|
995
|
+
# Backend may send integer indices - convert using cached VALUE_LIST
|
|
996
|
+
if (
|
|
997
|
+
isinstance(status_value, int)
|
|
998
|
+
and self._status_value_list
|
|
999
|
+
and 0 <= status_value < len(self._status_value_list)
|
|
1000
|
+
):
|
|
1001
|
+
status_value = self._status_value_list[status_value]
|
|
1002
|
+
if isinstance(status_value, str) and status_value in ParameterStatus.__members__:
|
|
1003
|
+
new_status = ParameterStatus(status_value)
|
|
1004
|
+
|
|
1005
|
+
if new_status is None:
|
|
1006
|
+
_LOGGER.warning( # i18n-log: ignore
|
|
1007
|
+
"UPDATE_STATUS: Invalid status value %s for %s, ignoring",
|
|
1008
|
+
status_value,
|
|
1009
|
+
self.full_name,
|
|
1010
|
+
)
|
|
1011
|
+
return
|
|
1012
|
+
|
|
1013
|
+
# Only update and notify if status actually changed
|
|
1014
|
+
if self._status_value == new_status:
|
|
1015
|
+
return
|
|
1016
|
+
|
|
1017
|
+
self._status_value = new_status
|
|
1018
|
+
self.publish_data_point_updated_event()
|
|
1019
|
+
|
|
1020
|
+
def write_temporary_value(self, *, value: Any, write_at: datetime) -> None:
|
|
1021
|
+
"""Update the temporary value of the data_point."""
|
|
1022
|
+
self._reset_temporary_value()
|
|
1023
|
+
|
|
1024
|
+
old_value = self._value
|
|
1025
|
+
temp_value = self._convert_value(value=value)
|
|
1026
|
+
if old_value == temp_value:
|
|
1027
|
+
self._set_temporary_refreshed_at(refreshed_at=write_at)
|
|
1028
|
+
else:
|
|
1029
|
+
self._set_temporary_modified_at(modified_at=write_at)
|
|
1030
|
+
self._temporary_value = temp_value
|
|
1031
|
+
self._state_uncertain = True
|
|
1032
|
+
self.publish_data_point_updated_event(old_value=old_value, new_value=temp_value)
|
|
1033
|
+
|
|
1034
|
+
def write_value(self, *, value: Any, write_at: datetime) -> tuple[ParameterT | None, ParameterT | None]:
|
|
1035
|
+
"""Update value of the data_point."""
|
|
1036
|
+
self._reset_temporary_value()
|
|
1037
|
+
|
|
1038
|
+
old_value = self._current_value
|
|
1039
|
+
if value == NO_CACHE_ENTRY:
|
|
1040
|
+
if self.refreshed_at != INIT_DATETIME:
|
|
1041
|
+
self._state_uncertain = True
|
|
1042
|
+
self.publish_data_point_updated_event(old_value=old_value, new_value=None)
|
|
1043
|
+
return (old_value, None)
|
|
1044
|
+
|
|
1045
|
+
new_value = self._convert_value(value=value)
|
|
1046
|
+
if old_value == new_value:
|
|
1047
|
+
self._set_refreshed_at(refreshed_at=write_at)
|
|
1048
|
+
else:
|
|
1049
|
+
self._set_modified_at(modified_at=write_at)
|
|
1050
|
+
self._current_value = new_value
|
|
1051
|
+
# Track last user value: store new value only if it differs from default
|
|
1052
|
+
# This is used for "restore last value" scenarios (e.g., dimmer brightness)
|
|
1053
|
+
if new_value != self._default:
|
|
1054
|
+
self._last_non_default_value = new_value
|
|
1055
|
+
self._state_uncertain = False
|
|
1056
|
+
self.publish_data_point_updated_event(old_value=old_value, new_value=new_value)
|
|
1057
|
+
return (old_value, new_value)
|
|
1058
|
+
|
|
1059
|
+
def _assign_parameter_data(self, *, parameter_data: ParameterData) -> None:
|
|
1060
|
+
"""Assign parameter data to instance variables."""
|
|
1061
|
+
self._type: ParameterType = ParameterType(parameter_data["TYPE"])
|
|
1062
|
+
self._values = tuple(parameter_data["VALUE_LIST"]) if parameter_data.get("VALUE_LIST") else None
|
|
1063
|
+
# Determine if ENUM values should be sent as index (int) or string.
|
|
1064
|
+
# HM devices use integer MIN/MAX/DEFAULT → send as index.
|
|
1065
|
+
# HmIP devices use string MIN/MAX/DEFAULT → send as string.
|
|
1066
|
+
raw_min = parameter_data["MIN"]
|
|
1067
|
+
self._enum_value_is_index: bool = (
|
|
1068
|
+
self._type == ParameterType.ENUM and self._values is not None and isinstance(raw_min, int)
|
|
1069
|
+
)
|
|
1070
|
+
self._max: ParameterT = self._convert_value(value=parameter_data["MAX"])
|
|
1071
|
+
self._min: ParameterT = self._convert_value(value=raw_min)
|
|
1072
|
+
self._default: ParameterT = self._convert_value(value=parameter_data.get("DEFAULT")) or self._min
|
|
1073
|
+
flags: int = parameter_data["FLAGS"]
|
|
1074
|
+
self._visible: bool = flags & Flag.VISIBLE == Flag.VISIBLE
|
|
1075
|
+
self._service: bool = flags & Flag.SERVICE == Flag.SERVICE
|
|
1076
|
+
self._operations: int = parameter_data["OPERATIONS"]
|
|
1077
|
+
self._special: Mapping[str, Any] | None = parameter_data.get("SPECIAL")
|
|
1078
|
+
self._raw_unit: str | None = parameter_data.get("UNIT")
|
|
1079
|
+
self._unit: str | None = self._cleanup_unit(raw_unit=self._raw_unit)
|
|
1080
|
+
self._multiplier: float = self._get_multiplier(raw_unit=self._raw_unit)
|
|
1081
|
+
|
|
1082
|
+
def _cleanup_unit(self, *, raw_unit: str | None) -> str | None:
|
|
1083
|
+
"""Replace given unit."""
|
|
1084
|
+
if new_unit := _FIX_UNIT_BY_PARAM.get(self._parameter):
|
|
1085
|
+
return new_unit
|
|
1086
|
+
if not raw_unit:
|
|
1087
|
+
return None
|
|
1088
|
+
for check, fix in _FIX_UNIT_REPLACE.items():
|
|
1089
|
+
if check in raw_unit:
|
|
1090
|
+
return fix
|
|
1091
|
+
return raw_unit
|
|
1092
|
+
|
|
1093
|
+
def _convert_value(self, *, value: Any) -> ParameterT:
|
|
1094
|
+
"""Convert to value to ParameterT."""
|
|
1095
|
+
if value is None:
|
|
1096
|
+
return None # type: ignore[return-value]
|
|
1097
|
+
# Handle empty strings from CCU for numeric types (e.g., "" for LEVEL_2 when no slats)
|
|
1098
|
+
if value == "" and self._type in (ParameterType.FLOAT, ParameterType.INTEGER):
|
|
1099
|
+
return None # type: ignore[return-value]
|
|
1100
|
+
try:
|
|
1101
|
+
if (
|
|
1102
|
+
self._type == ParameterType.BOOL
|
|
1103
|
+
and self._values is not None
|
|
1104
|
+
and value is not None
|
|
1105
|
+
and isinstance(value, str)
|
|
1106
|
+
):
|
|
1107
|
+
return cast(
|
|
1108
|
+
ParameterT,
|
|
1109
|
+
convert_value(
|
|
1110
|
+
value=self._values.index(value),
|
|
1111
|
+
target_type=self._type,
|
|
1112
|
+
value_list=self.values,
|
|
1113
|
+
),
|
|
1114
|
+
)
|
|
1115
|
+
return cast(ParameterT, convert_value(value=value, target_type=self._type, value_list=self.values))
|
|
1116
|
+
except (ValueError, TypeError): # pragma: no cover
|
|
1117
|
+
_LOGGER.debug(
|
|
1118
|
+
"CONVERT_VALUE: conversion failed for %s, %s, %s, value: [%s]",
|
|
1119
|
+
self._device.interface_id,
|
|
1120
|
+
self._channel.address,
|
|
1121
|
+
self._parameter,
|
|
1122
|
+
value,
|
|
1123
|
+
)
|
|
1124
|
+
return None # type: ignore[return-value]
|
|
1125
|
+
|
|
1126
|
+
def _detect_status_parameter(self) -> str | None:
|
|
1127
|
+
"""
|
|
1128
|
+
Detect the paired STATUS parameter name if it exists.
|
|
1129
|
+
|
|
1130
|
+
Return the STATUS parameter name (e.g., "LEVEL_STATUS" for "LEVEL")
|
|
1131
|
+
if it exists in the paramset description, None otherwise.
|
|
1132
|
+
"""
|
|
1133
|
+
status_param = f"{self._parameter}_STATUS"
|
|
1134
|
+
try:
|
|
1135
|
+
if self._paramset_description_provider.has_parameter(
|
|
1136
|
+
interface_id=self._device.interface_id,
|
|
1137
|
+
channel_address=self._channel.address,
|
|
1138
|
+
paramset_key=self._paramset_key,
|
|
1139
|
+
parameter=status_param,
|
|
1140
|
+
):
|
|
1141
|
+
return status_param
|
|
1142
|
+
except (AttributeError, KeyError):
|
|
1143
|
+
# has_parameter not available or lookup failed
|
|
1144
|
+
pass
|
|
1145
|
+
return None
|
|
1146
|
+
|
|
1147
|
+
def _get_multiplier(self, *, raw_unit: str | None) -> float:
|
|
1148
|
+
"""Replace given unit."""
|
|
1149
|
+
if not raw_unit:
|
|
1150
|
+
return DEFAULT_MULTIPLIER
|
|
1151
|
+
if multiplier := _MULTIPLIER_UNIT.get(raw_unit):
|
|
1152
|
+
return multiplier
|
|
1153
|
+
return DEFAULT_MULTIPLIER
|
|
1154
|
+
|
|
1155
|
+
def _get_path_data(self) -> PathData:
|
|
1156
|
+
"""Return the path data of the data_point."""
|
|
1157
|
+
return DataPointPathData(
|
|
1158
|
+
interface=self._device.client.interface,
|
|
1159
|
+
address=self._device.address,
|
|
1160
|
+
channel_no=self._channel.no,
|
|
1161
|
+
kind=self._parameter,
|
|
1162
|
+
)
|
|
1163
|
+
|
|
1164
|
+
def _get_signature(self) -> str:
|
|
1165
|
+
"""Return the signature of the data_point."""
|
|
1166
|
+
return f"{self._category}/{self._channel.device.model}/{self._parameter}"
|
|
1167
|
+
|
|
1168
|
+
def _get_value(self) -> ParameterT | None:
|
|
1169
|
+
"""
|
|
1170
|
+
Return the value for readings. Override in subclasses for custom value processing.
|
|
1171
|
+
|
|
1172
|
+
Subclasses like DpSelect, DpSensor, DpBinarySensor override this to:
|
|
1173
|
+
- Convert integer indices to string values from VALUE_LIST
|
|
1174
|
+
- Apply value converters (e.g., RSSI negation)
|
|
1175
|
+
- Return defaults when value is None
|
|
1176
|
+
"""
|
|
1177
|
+
return self._value
|
|
1178
|
+
|
|
1179
|
+
def _reset_temporary_value(self) -> None:
|
|
1180
|
+
"""Reset the temp storage."""
|
|
1181
|
+
self._temporary_value = None
|
|
1182
|
+
self._reset_temporary_timestamps()
|
|
1183
|
+
|
|
1184
|
+
def _set_value(self, value: ParameterT) -> None: # kwonly: disable
|
|
1185
|
+
"""Set the local value."""
|
|
1186
|
+
self.write_value(value=value, write_at=datetime.now())
|
|
1187
|
+
|
|
1188
|
+
def __get_value_proxy(self) -> ParameterT | None:
|
|
1189
|
+
"""
|
|
1190
|
+
Proxy method for the value property getter.
|
|
1191
|
+
|
|
1192
|
+
This indirection is necessary because _GenericProperty(fget=method) binds the
|
|
1193
|
+
method at class definition time, bypassing MRO. By calling self._get_value()
|
|
1194
|
+
here, we ensure subclass overrides of _get_value() are properly invoked.
|
|
1195
|
+
"""
|
|
1196
|
+
return self._get_value()
|
|
1197
|
+
|
|
1198
|
+
value: _GenericProperty[ParameterT | None, ParameterT] = _GenericProperty(
|
|
1199
|
+
fget=__get_value_proxy, fset=_set_value, kind=Kind.STATE
|
|
1200
|
+
)
|
|
1201
|
+
|
|
1202
|
+
|
|
1203
|
+
BaseParameterDataPointAny: TypeAlias = BaseParameterDataPoint[Any, Any]
|
|
1204
|
+
|
|
1205
|
+
|
|
1206
|
+
class CallParameterCollector:
|
|
1207
|
+
"""Create a Paramset based on given generic data point."""
|
|
1208
|
+
|
|
1209
|
+
__slots__ = (
|
|
1210
|
+
"_client",
|
|
1211
|
+
"_paramsets",
|
|
1212
|
+
)
|
|
1213
|
+
|
|
1214
|
+
def __init__(self, *, client: ValueAndParamsetOperationsProtocol) -> None:
|
|
1215
|
+
"""Initialize the generator."""
|
|
1216
|
+
self._client: Final[ValueAndParamsetOperationsProtocol] = client
|
|
1217
|
+
# {"VALUES": {50: {"00021BE9957782:3": {"STATE3": True}}}}
|
|
1218
|
+
self._paramsets: Final[dict[ParamsetKey, dict[int, dict[str, dict[str, Any]]]]] = {}
|
|
1219
|
+
|
|
1220
|
+
def add_data_point(
|
|
1221
|
+
self,
|
|
1222
|
+
*,
|
|
1223
|
+
data_point: BaseParameterDataPointAny,
|
|
1224
|
+
value: Any,
|
|
1225
|
+
collector_order: int,
|
|
1226
|
+
) -> None:
|
|
1227
|
+
"""Add a generic data_point."""
|
|
1228
|
+
if data_point.paramset_key not in self._paramsets:
|
|
1229
|
+
self._paramsets[data_point.paramset_key] = {}
|
|
1230
|
+
if collector_order not in self._paramsets[data_point.paramset_key]:
|
|
1231
|
+
self._paramsets[data_point.paramset_key][collector_order] = {}
|
|
1232
|
+
if data_point.channel.address not in self._paramsets[data_point.paramset_key][collector_order]:
|
|
1233
|
+
self._paramsets[data_point.paramset_key][collector_order][data_point.channel.address] = {}
|
|
1234
|
+
self._paramsets[data_point.paramset_key][collector_order][data_point.channel.address][data_point.parameter] = (
|
|
1235
|
+
value
|
|
1236
|
+
)
|
|
1237
|
+
|
|
1238
|
+
async def send_data(self, *, wait_for_callback: int | None) -> set[DP_KEY_VALUE]:
|
|
1239
|
+
"""Send data to the backend."""
|
|
1240
|
+
dpk_values: set[DP_KEY_VALUE] = set()
|
|
1241
|
+
for paramset_key, paramsets in self._paramsets.items():
|
|
1242
|
+
for _, paramset_no in sorted(paramsets.items()):
|
|
1243
|
+
for channel_address, paramset in paramset_no.items():
|
|
1244
|
+
if len(paramset) == 1:
|
|
1245
|
+
for parameter, value in paramset.items():
|
|
1246
|
+
dpk_values.update(
|
|
1247
|
+
await self._client.set_value(
|
|
1248
|
+
channel_address=channel_address,
|
|
1249
|
+
paramset_key=paramset_key,
|
|
1250
|
+
parameter=parameter,
|
|
1251
|
+
value=value,
|
|
1252
|
+
wait_for_callback=wait_for_callback,
|
|
1253
|
+
)
|
|
1254
|
+
)
|
|
1255
|
+
else:
|
|
1256
|
+
dpk_values.update(
|
|
1257
|
+
await self._client.put_paramset(
|
|
1258
|
+
channel_address=channel_address,
|
|
1259
|
+
paramset_key_or_link_address=paramset_key,
|
|
1260
|
+
values=paramset,
|
|
1261
|
+
wait_for_callback=wait_for_callback,
|
|
1262
|
+
)
|
|
1263
|
+
)
|
|
1264
|
+
return dpk_values
|
|
1265
|
+
|
|
1266
|
+
|
|
1267
|
+
@overload
|
|
1268
|
+
def bind_collector[CallableBC: CallableAny]( # kwonly: disable
|
|
1269
|
+
func: CallableBC,
|
|
1270
|
+
*,
|
|
1271
|
+
wait_for_callback: int | None = WAIT_FOR_CALLBACK,
|
|
1272
|
+
enabled: bool = True,
|
|
1273
|
+
log_level: int = logging.ERROR,
|
|
1274
|
+
scope: ServiceScope = ...,
|
|
1275
|
+
) -> CallableBC: ...
|
|
1276
|
+
|
|
1277
|
+
|
|
1278
|
+
@overload
|
|
1279
|
+
def bind_collector[CallableBC: CallableAny]( # kwonly: disable
|
|
1280
|
+
*,
|
|
1281
|
+
wait_for_callback: int | None = WAIT_FOR_CALLBACK,
|
|
1282
|
+
enabled: bool = True,
|
|
1283
|
+
log_level: int = logging.ERROR,
|
|
1284
|
+
scope: ServiceScope = ...,
|
|
1285
|
+
) -> Callable[[CallableBC], CallableBC]: ...
|
|
1286
|
+
|
|
1287
|
+
|
|
1288
|
+
def bind_collector[CallableBC: CallableAny]( # kwonly: disable
|
|
1289
|
+
func: CallableBC | None = None,
|
|
1290
|
+
*,
|
|
1291
|
+
wait_for_callback: int | None = WAIT_FOR_CALLBACK,
|
|
1292
|
+
enabled: bool = True,
|
|
1293
|
+
log_level: int = logging.ERROR,
|
|
1294
|
+
scope: ServiceScope = ServiceScope.EXTERNAL,
|
|
1295
|
+
) -> Callable[[CallableBC], CallableBC] | CallableBC:
|
|
1296
|
+
"""
|
|
1297
|
+
Decorate function to automatically add collector if not set.
|
|
1298
|
+
|
|
1299
|
+
Usage:
|
|
1300
|
+
- With parentheses: `@bind_collector()`
|
|
1301
|
+
- Without parentheses: `@bind_collector`
|
|
1302
|
+
|
|
1303
|
+
Additionally, thrown exceptions are logged.
|
|
1304
|
+
|
|
1305
|
+
Args:
|
|
1306
|
+
func: Function to decorate (when used without parameters).
|
|
1307
|
+
wait_for_callback: Time to wait for callback after sending data.
|
|
1308
|
+
enabled: Whether the collector binding is enabled.
|
|
1309
|
+
log_level: Logging level for exceptions.
|
|
1310
|
+
scope: The scope of this service method (see ServiceScope enum).
|
|
1311
|
+
EXTERNAL: Methods for external consumers (HA) - user-invokable commands.
|
|
1312
|
+
Appears in service_method_names.
|
|
1313
|
+
INTERNAL: Infrastructure methods for library operation.
|
|
1314
|
+
Does NOT appear in service_method_names.
|
|
1315
|
+
|
|
1316
|
+
"""
|
|
1317
|
+
|
|
1318
|
+
def bind_decorator(func: CallableBC) -> CallableBC:
|
|
1319
|
+
"""Decorate function to automatically add collector if not set."""
|
|
1320
|
+
# Inspect the function signature to find where 'collector' parameter is located.
|
|
1321
|
+
# It can be either a positional argument (in spec.args) or keyword-only.
|
|
1322
|
+
spec = getfullargspec(func)
|
|
1323
|
+
if _COLLECTOR_ARGUMENT_NAME in spec.args:
|
|
1324
|
+
argument_index: int | None = spec.args.index(_COLLECTOR_ARGUMENT_NAME)
|
|
1325
|
+
else:
|
|
1326
|
+
# collector is keyword-only or doesn't exist
|
|
1327
|
+
argument_index = None
|
|
1328
|
+
|
|
1329
|
+
@wraps(func)
|
|
1330
|
+
async def bind_wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
1331
|
+
"""
|
|
1332
|
+
Wrap method to add collector.
|
|
1333
|
+
|
|
1334
|
+
Context variable pattern for nested service calls:
|
|
1335
|
+
RequestContext tracks whether we're already inside a service call.
|
|
1336
|
+
This prevents nested calls from creating duplicate collectors and
|
|
1337
|
+
ensures errors are only logged at the outermost boundary.
|
|
1338
|
+
|
|
1339
|
+
Algorithm:
|
|
1340
|
+
1. Set RequestContext if not already in service (track via token)
|
|
1341
|
+
2. Check if collector already exists in args or kwargs
|
|
1342
|
+
3. If no collector exists, create one and inject into kwargs
|
|
1343
|
+
4. Execute the wrapped function
|
|
1344
|
+
5. If we created the collector, send batched data
|
|
1345
|
+
6. Reset context variable on exit (success or exception)
|
|
1346
|
+
"""
|
|
1347
|
+
# Context variable management: Track if this is the outermost service call.
|
|
1348
|
+
# The token allows us to reset exactly to the previous state on exit.
|
|
1349
|
+
token: Token[RequestContext | None] | None = None
|
|
1350
|
+
if not is_in_service():
|
|
1351
|
+
ctx = RequestContext(operation=f"service:{func.__name__}")
|
|
1352
|
+
token = set_request_context(ctx=ctx)
|
|
1353
|
+
try:
|
|
1354
|
+
# Short-circuit if collector binding is disabled
|
|
1355
|
+
if not enabled:
|
|
1356
|
+
return_value = await func(*args, **kwargs)
|
|
1357
|
+
if token:
|
|
1358
|
+
reset_request_context(token=token)
|
|
1359
|
+
return return_value
|
|
1360
|
+
|
|
1361
|
+
# Detect if a collector was already provided by the caller.
|
|
1362
|
+
# Check both positional args (by index) and keyword args.
|
|
1363
|
+
try:
|
|
1364
|
+
collector_exists = (
|
|
1365
|
+
argument_index is not None and len(args) > argument_index and args[argument_index] is not None
|
|
1366
|
+
) or kwargs.get(_COLLECTOR_ARGUMENT_NAME) is not None
|
|
1367
|
+
except Exception:
|
|
1368
|
+
# Fallback: only check kwargs if positional check fails
|
|
1369
|
+
collector_exists = kwargs.get(_COLLECTOR_ARGUMENT_NAME) is not None
|
|
1370
|
+
|
|
1371
|
+
if collector_exists:
|
|
1372
|
+
# Collector provided by caller - they handle send_data()
|
|
1373
|
+
return_value = await func(*args, **kwargs)
|
|
1374
|
+
if token:
|
|
1375
|
+
reset_request_context(token=token)
|
|
1376
|
+
return return_value
|
|
1377
|
+
|
|
1378
|
+
# No collector provided - create one automatically.
|
|
1379
|
+
# args[0] is 'self' (the data point), which has channel.device.client
|
|
1380
|
+
collector = CallParameterCollector(client=args[0].channel.device.client)
|
|
1381
|
+
kwargs[_COLLECTOR_ARGUMENT_NAME] = collector
|
|
1382
|
+
return_value = await func(*args, **kwargs)
|
|
1383
|
+
# Send batched commands after function completes successfully
|
|
1384
|
+
await collector.send_data(wait_for_callback=wait_for_callback)
|
|
1385
|
+
except BaseHomematicException as bhexc:
|
|
1386
|
+
if token:
|
|
1387
|
+
reset_request_context(token=token)
|
|
1388
|
+
if not is_in_service() and log_level > logging.NOTSET:
|
|
1389
|
+
context_obj = args[0]
|
|
1390
|
+
logger = logging.getLogger(context_obj.__module__)
|
|
1391
|
+
log_context = context_obj.log_context if isinstance(context_obj, LogContextMixin) else None
|
|
1392
|
+
# Reuse centralized boundary logging to ensure consistent 'extra' structure
|
|
1393
|
+
log_boundary_error(
|
|
1394
|
+
logger=logger,
|
|
1395
|
+
boundary="service",
|
|
1396
|
+
action=func.__name__,
|
|
1397
|
+
err=bhexc,
|
|
1398
|
+
level=log_level,
|
|
1399
|
+
log_context=log_context,
|
|
1400
|
+
)
|
|
1401
|
+
# Re-raise domain-specific exceptions so callers and tests can handle them
|
|
1402
|
+
raise
|
|
1403
|
+
else:
|
|
1404
|
+
if token:
|
|
1405
|
+
reset_request_context(token=token)
|
|
1406
|
+
return return_value
|
|
1407
|
+
|
|
1408
|
+
if scope == ServiceScope.EXTERNAL:
|
|
1409
|
+
setattr(bind_wrapper, "lib_service", True)
|
|
1410
|
+
return cast(CallableBC, bind_wrapper)
|
|
1411
|
+
|
|
1412
|
+
# If used without parentheses: @bind_collector
|
|
1413
|
+
if func is not None:
|
|
1414
|
+
return bind_decorator(func)
|
|
1415
|
+
# If used with parentheses: @bind_collector(...)
|
|
1416
|
+
return bind_decorator
|