aiohomematic 2025.11.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of aiohomematic might be problematic. Click here for more details.
- aiohomematic/__init__.py +61 -0
- aiohomematic/async_support.py +212 -0
- aiohomematic/central/__init__.py +2309 -0
- aiohomematic/central/decorators.py +155 -0
- aiohomematic/central/rpc_server.py +295 -0
- aiohomematic/client/__init__.py +1848 -0
- aiohomematic/client/_rpc_errors.py +81 -0
- aiohomematic/client/json_rpc.py +1326 -0
- aiohomematic/client/rpc_proxy.py +311 -0
- aiohomematic/const.py +1127 -0
- aiohomematic/context.py +18 -0
- aiohomematic/converter.py +108 -0
- aiohomematic/decorators.py +302 -0
- aiohomematic/exceptions.py +164 -0
- aiohomematic/hmcli.py +186 -0
- aiohomematic/model/__init__.py +140 -0
- aiohomematic/model/calculated/__init__.py +84 -0
- aiohomematic/model/calculated/climate.py +290 -0
- aiohomematic/model/calculated/data_point.py +327 -0
- aiohomematic/model/calculated/operating_voltage_level.py +299 -0
- aiohomematic/model/calculated/support.py +234 -0
- aiohomematic/model/custom/__init__.py +177 -0
- aiohomematic/model/custom/climate.py +1532 -0
- aiohomematic/model/custom/cover.py +792 -0
- aiohomematic/model/custom/data_point.py +334 -0
- aiohomematic/model/custom/definition.py +871 -0
- aiohomematic/model/custom/light.py +1128 -0
- aiohomematic/model/custom/lock.py +394 -0
- aiohomematic/model/custom/siren.py +275 -0
- aiohomematic/model/custom/support.py +41 -0
- aiohomematic/model/custom/switch.py +175 -0
- aiohomematic/model/custom/valve.py +114 -0
- aiohomematic/model/data_point.py +1123 -0
- aiohomematic/model/device.py +1445 -0
- aiohomematic/model/event.py +208 -0
- aiohomematic/model/generic/__init__.py +217 -0
- aiohomematic/model/generic/action.py +34 -0
- aiohomematic/model/generic/binary_sensor.py +30 -0
- aiohomematic/model/generic/button.py +27 -0
- aiohomematic/model/generic/data_point.py +171 -0
- aiohomematic/model/generic/dummy.py +147 -0
- aiohomematic/model/generic/number.py +76 -0
- aiohomematic/model/generic/select.py +39 -0
- aiohomematic/model/generic/sensor.py +74 -0
- aiohomematic/model/generic/switch.py +54 -0
- aiohomematic/model/generic/text.py +29 -0
- aiohomematic/model/hub/__init__.py +333 -0
- aiohomematic/model/hub/binary_sensor.py +24 -0
- aiohomematic/model/hub/button.py +28 -0
- aiohomematic/model/hub/data_point.py +340 -0
- aiohomematic/model/hub/number.py +39 -0
- aiohomematic/model/hub/select.py +49 -0
- aiohomematic/model/hub/sensor.py +37 -0
- aiohomematic/model/hub/switch.py +44 -0
- aiohomematic/model/hub/text.py +30 -0
- aiohomematic/model/support.py +586 -0
- aiohomematic/model/update.py +143 -0
- aiohomematic/property_decorators.py +496 -0
- aiohomematic/py.typed +0 -0
- aiohomematic/rega_scripts/fetch_all_device_data.fn +92 -0
- aiohomematic/rega_scripts/get_program_descriptions.fn +30 -0
- aiohomematic/rega_scripts/get_serial.fn +44 -0
- aiohomematic/rega_scripts/get_system_variable_descriptions.fn +30 -0
- aiohomematic/rega_scripts/set_program_state.fn +12 -0
- aiohomematic/rega_scripts/set_system_variable.fn +15 -0
- aiohomematic/store/__init__.py +34 -0
- aiohomematic/store/dynamic.py +551 -0
- aiohomematic/store/persistent.py +988 -0
- aiohomematic/store/visibility.py +812 -0
- aiohomematic/support.py +664 -0
- aiohomematic/validator.py +112 -0
- aiohomematic-2025.11.3.dist-info/METADATA +144 -0
- aiohomematic-2025.11.3.dist-info/RECORD +77 -0
- aiohomematic-2025.11.3.dist-info/WHEEL +5 -0
- aiohomematic-2025.11.3.dist-info/entry_points.txt +2 -0
- aiohomematic-2025.11.3.dist-info/licenses/LICENSE +21 -0
- aiohomematic-2025.11.3.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,2309 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MIT
|
|
2
|
+
# Copyright (c) 2021-2025
|
|
3
|
+
"""
|
|
4
|
+
Central unit and core orchestration for Homematic CCU and compatible backends.
|
|
5
|
+
|
|
6
|
+
Overview
|
|
7
|
+
--------
|
|
8
|
+
This package provides the central coordination layer for aiohomematic. It models a
|
|
9
|
+
Homematic CCU (or compatible backend such as Homegear) and orchestrates
|
|
10
|
+
interfaces, devices, channels, data points, events, and background jobs.
|
|
11
|
+
|
|
12
|
+
The central unit ties together the various submodules: store, client adapters
|
|
13
|
+
(JSON-RPC/XML-RPC), device and data point models, and visibility/description store.
|
|
14
|
+
It exposes high-level APIs to query and manipulate the backend state while
|
|
15
|
+
encapsulating transport and scheduling details.
|
|
16
|
+
|
|
17
|
+
Public API (selected)
|
|
18
|
+
---------------------
|
|
19
|
+
- CentralUnit: The main coordination class. Manages client creation/lifecycle,
|
|
20
|
+
connection state, device and channel discovery, data point and event handling,
|
|
21
|
+
sysvar/program access, cache loading/saving, and dispatching callbacks.
|
|
22
|
+
- CentralConfig: Configuration builder/holder for CentralUnit instances, including
|
|
23
|
+
connection parameters, feature toggles, and cache behavior.
|
|
24
|
+
- CentralConnectionState: Tracks connection issues per transport/client.
|
|
25
|
+
|
|
26
|
+
Internal helpers
|
|
27
|
+
----------------
|
|
28
|
+
- _Scheduler: Background thread that periodically checks connection health,
|
|
29
|
+
refreshes data, and fetches firmware status according to configured intervals.
|
|
30
|
+
|
|
31
|
+
Quick start
|
|
32
|
+
-----------
|
|
33
|
+
Typical usage is to create a CentralConfig, build a CentralUnit, then start it.
|
|
34
|
+
|
|
35
|
+
Example (simplified):
|
|
36
|
+
|
|
37
|
+
from aiohomematic.central import CentralConfig
|
|
38
|
+
from aiohomematic import client as hmcl
|
|
39
|
+
|
|
40
|
+
iface_cfgs = {
|
|
41
|
+
hmcl.InterfaceConfig(interface=hmcl.Interface.HMIP, port=2010, enabled=True),
|
|
42
|
+
hmcl.InterfaceConfig(interface=hmcl.Interface.BIDCOS_RF, port=2001, enabled=True),
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
cfg = CentralConfig(
|
|
46
|
+
central_id="ccu-main",
|
|
47
|
+
host="ccu.local",
|
|
48
|
+
interface_configs=iface_cfgs,
|
|
49
|
+
name="MyCCU",
|
|
50
|
+
password="secret",
|
|
51
|
+
username="admin",
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
central = cfg.create_central()
|
|
55
|
+
central.start() # start XML-RPC server, create/init clients, load store
|
|
56
|
+
# ... interact with devices / data points via central ...
|
|
57
|
+
central.stop()
|
|
58
|
+
|
|
59
|
+
Notes
|
|
60
|
+
-----
|
|
61
|
+
- The central module is thread-aware and uses an internal Looper to schedule async tasks.
|
|
62
|
+
- For advanced scenarios, see xml_rpc_server and decorators modules in this package.
|
|
63
|
+
|
|
64
|
+
"""
|
|
65
|
+
|
|
66
|
+
from __future__ import annotations
|
|
67
|
+
|
|
68
|
+
import asyncio
|
|
69
|
+
from collections.abc import Callable, Coroutine, Mapping, Set as AbstractSet
|
|
70
|
+
from datetime import datetime, timedelta
|
|
71
|
+
from functools import partial
|
|
72
|
+
import logging
|
|
73
|
+
from logging import DEBUG
|
|
74
|
+
import threading
|
|
75
|
+
from typing import Any, Final, cast
|
|
76
|
+
|
|
77
|
+
from aiohttp import ClientSession
|
|
78
|
+
import voluptuous as vol
|
|
79
|
+
|
|
80
|
+
from aiohomematic import client as hmcl
|
|
81
|
+
from aiohomematic.async_support import Looper, loop_check
|
|
82
|
+
from aiohomematic.central import rpc_server as rpc
|
|
83
|
+
from aiohomematic.central.decorators import callback_backend_system, callback_event
|
|
84
|
+
from aiohomematic.client import AioJsonRpcAioHttpClient, BaseRpcProxy
|
|
85
|
+
from aiohomematic.const import (
|
|
86
|
+
CALLBACK_TYPE,
|
|
87
|
+
CATEGORIES,
|
|
88
|
+
CONNECTION_CHECKER_INTERVAL,
|
|
89
|
+
DATA_POINT_EVENTS,
|
|
90
|
+
DATETIME_FORMAT_MILLIS,
|
|
91
|
+
DEFAULT_DELAY_NEW_DEVICE_CREATION,
|
|
92
|
+
DEFAULT_ENABLE_DEVICE_FIRMWARE_CHECK,
|
|
93
|
+
DEFAULT_ENABLE_PROGRAM_SCAN,
|
|
94
|
+
DEFAULT_ENABLE_SYSVAR_SCAN,
|
|
95
|
+
DEFAULT_HM_MASTER_POLL_AFTER_SEND_INTERVALS,
|
|
96
|
+
DEFAULT_IGNORE_CUSTOM_DEVICE_DEFINITION_MODELS,
|
|
97
|
+
DEFAULT_INTERFACES_REQUIRING_PERIODIC_REFRESH,
|
|
98
|
+
DEFAULT_MAX_READ_WORKERS,
|
|
99
|
+
DEFAULT_OPTIONAL_SETTINGS,
|
|
100
|
+
DEFAULT_PERIODIC_REFRESH_INTERVAL,
|
|
101
|
+
DEFAULT_PROGRAM_MARKERS,
|
|
102
|
+
DEFAULT_SESSION_RECORDER_START_FOR_SECONDS,
|
|
103
|
+
DEFAULT_STORAGE_DIRECTORY,
|
|
104
|
+
DEFAULT_SYS_SCAN_INTERVAL,
|
|
105
|
+
DEFAULT_SYSVAR_MARKERS,
|
|
106
|
+
DEFAULT_TLS,
|
|
107
|
+
DEFAULT_UN_IGNORES,
|
|
108
|
+
DEFAULT_USE_GROUP_CHANNEL_FOR_COVER_STATE,
|
|
109
|
+
DEFAULT_VERIFY_TLS,
|
|
110
|
+
DEVICE_FIRMWARE_CHECK_INTERVAL,
|
|
111
|
+
DEVICE_FIRMWARE_DELIVERING_CHECK_INTERVAL,
|
|
112
|
+
DEVICE_FIRMWARE_UPDATING_CHECK_INTERVAL,
|
|
113
|
+
IDENTIFIER_SEPARATOR,
|
|
114
|
+
IGNORE_FOR_UN_IGNORE_PARAMETERS,
|
|
115
|
+
IP_ANY_V4,
|
|
116
|
+
LOCAL_HOST,
|
|
117
|
+
PORT_ANY,
|
|
118
|
+
PRIMARY_CLIENT_CANDIDATE_INTERFACES,
|
|
119
|
+
SCHEDULER_LOOP_SLEEP,
|
|
120
|
+
SCHEDULER_NOT_STARTED_SLEEP,
|
|
121
|
+
TIMEOUT,
|
|
122
|
+
UN_IGNORE_WILDCARD,
|
|
123
|
+
BackendSystemEvent,
|
|
124
|
+
CentralUnitState,
|
|
125
|
+
DataOperationResult,
|
|
126
|
+
DataPointCategory,
|
|
127
|
+
DataPointKey,
|
|
128
|
+
DescriptionMarker,
|
|
129
|
+
DeviceDescription,
|
|
130
|
+
DeviceFirmwareState,
|
|
131
|
+
EventKey,
|
|
132
|
+
EventType,
|
|
133
|
+
Interface,
|
|
134
|
+
InterfaceEventType,
|
|
135
|
+
Operations,
|
|
136
|
+
OptionalSettings,
|
|
137
|
+
Parameter,
|
|
138
|
+
ParamsetKey,
|
|
139
|
+
ProxyInitState,
|
|
140
|
+
RpcServerType,
|
|
141
|
+
SourceOfDeviceCreation,
|
|
142
|
+
SystemInformation,
|
|
143
|
+
)
|
|
144
|
+
from aiohomematic.decorators import inspector
|
|
145
|
+
from aiohomematic.exceptions import (
|
|
146
|
+
AioHomematicConfigException,
|
|
147
|
+
AioHomematicException,
|
|
148
|
+
BaseHomematicException,
|
|
149
|
+
NoClientsException,
|
|
150
|
+
NoConnectionException,
|
|
151
|
+
)
|
|
152
|
+
from aiohomematic.model import create_data_points_and_events
|
|
153
|
+
from aiohomematic.model.custom import CustomDataPoint, create_custom_data_points
|
|
154
|
+
from aiohomematic.model.data_point import BaseParameterDataPoint, CallbackDataPoint
|
|
155
|
+
from aiohomematic.model.device import Channel, Device
|
|
156
|
+
from aiohomematic.model.event import GenericEvent
|
|
157
|
+
from aiohomematic.model.generic import GenericDataPoint
|
|
158
|
+
from aiohomematic.model.hub import (
|
|
159
|
+
GenericHubDataPoint,
|
|
160
|
+
GenericProgramDataPoint,
|
|
161
|
+
GenericSysvarDataPoint,
|
|
162
|
+
Hub,
|
|
163
|
+
ProgramDpType,
|
|
164
|
+
)
|
|
165
|
+
from aiohomematic.property_decorators import info_property
|
|
166
|
+
from aiohomematic.store import (
|
|
167
|
+
CentralDataCache,
|
|
168
|
+
DeviceDescriptionCache,
|
|
169
|
+
DeviceDetailsCache,
|
|
170
|
+
ParameterVisibilityCache,
|
|
171
|
+
ParamsetDescriptionCache,
|
|
172
|
+
SessionRecorder,
|
|
173
|
+
)
|
|
174
|
+
from aiohomematic.support import (
|
|
175
|
+
LogContextMixin,
|
|
176
|
+
PayloadMixin,
|
|
177
|
+
check_or_create_directory,
|
|
178
|
+
check_password,
|
|
179
|
+
extract_device_addresses_from_device_descriptions,
|
|
180
|
+
extract_exc_args,
|
|
181
|
+
get_channel_no,
|
|
182
|
+
get_device_address,
|
|
183
|
+
get_ip_addr,
|
|
184
|
+
is_hostname,
|
|
185
|
+
is_ipv4_address,
|
|
186
|
+
is_port,
|
|
187
|
+
)
|
|
188
|
+
|
|
189
|
+
__all__ = ["CentralConfig", "CentralUnit", "INTERFACE_EVENT_SCHEMA"]
|
|
190
|
+
|
|
191
|
+
_LOGGER: Final = logging.getLogger(__name__)
|
|
192
|
+
_LOGGER_EVENT: Final = logging.getLogger(f"{__package__}.event")
|
|
193
|
+
|
|
194
|
+
# {central_name, central}
|
|
195
|
+
CENTRAL_INSTANCES: Final[dict[str, CentralUnit]] = {}
|
|
196
|
+
ConnectionProblemIssuer = AioJsonRpcAioHttpClient | BaseRpcProxy
|
|
197
|
+
|
|
198
|
+
INTERFACE_EVENT_SCHEMA = vol.Schema(
|
|
199
|
+
{
|
|
200
|
+
vol.Required(str(EventKey.INTERFACE_ID)): str,
|
|
201
|
+
vol.Required(str(EventKey.TYPE)): InterfaceEventType,
|
|
202
|
+
vol.Required(str(EventKey.DATA)): vol.Schema(
|
|
203
|
+
{vol.Required(vol.Any(EventKey)): vol.Schema(vol.Any(str, int, bool))}
|
|
204
|
+
),
|
|
205
|
+
}
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
class CentralUnit(LogContextMixin, PayloadMixin):
|
|
210
|
+
"""Central unit that collects everything to handle communication from/to the backend."""
|
|
211
|
+
|
|
212
|
+
def __init__(self, *, central_config: CentralConfig) -> None:
|
|
213
|
+
"""Init the central unit."""
|
|
214
|
+
self._state: CentralUnitState = CentralUnitState.NEW
|
|
215
|
+
self._clients_started: bool = False
|
|
216
|
+
self._device_add_semaphore: Final = asyncio.Semaphore()
|
|
217
|
+
self._connection_state: Final = CentralConnectionState()
|
|
218
|
+
self._tasks: Final[set[asyncio.Future[Any]]] = set()
|
|
219
|
+
# Keep the config for the central
|
|
220
|
+
self._config: Final = central_config
|
|
221
|
+
self._url: Final = self._config.create_central_url()
|
|
222
|
+
self._model: str | None = None
|
|
223
|
+
self._looper = Looper()
|
|
224
|
+
self._xml_rpc_server: rpc.XmlRpcServer | None = None
|
|
225
|
+
self._json_rpc_client: AioJsonRpcAioHttpClient | None = None
|
|
226
|
+
|
|
227
|
+
# Caches for the backend data
|
|
228
|
+
self._data_cache: Final = CentralDataCache(central=self)
|
|
229
|
+
self._device_details: Final = DeviceDetailsCache(central=self)
|
|
230
|
+
self._device_descriptions: Final = DeviceDescriptionCache(central=self)
|
|
231
|
+
self._paramset_descriptions: Final = ParamsetDescriptionCache(central=self)
|
|
232
|
+
self._parameter_visibility: Final = ParameterVisibilityCache(central=self)
|
|
233
|
+
self._recorder: Final = SessionRecorder(
|
|
234
|
+
central=self, ttl_seconds=600, active=central_config.session_recorder_start
|
|
235
|
+
)
|
|
236
|
+
self._primary_client: hmcl.Client | None = None
|
|
237
|
+
# {interface_id, client}
|
|
238
|
+
self._clients: Final[dict[str, hmcl.Client]] = {}
|
|
239
|
+
self._data_point_key_event_subscriptions: Final[
|
|
240
|
+
dict[DataPointKey, list[Callable[..., Coroutine[Any, Any, None]]]]
|
|
241
|
+
] = {}
|
|
242
|
+
self._data_point_path_event_subscriptions: Final[dict[str, DataPointKey]] = {}
|
|
243
|
+
self._sysvar_data_point_event_subscriptions: Final[dict[str, Callable]] = {}
|
|
244
|
+
# {device_address, device}
|
|
245
|
+
self._devices: Final[dict[str, Device]] = {}
|
|
246
|
+
# {sysvar_name, sysvar_data_point}
|
|
247
|
+
self._sysvar_data_points: Final[dict[str, GenericSysvarDataPoint]] = {}
|
|
248
|
+
# {sysvar_name, program_button}
|
|
249
|
+
self._program_data_points: Final[dict[str, ProgramDpType]] = {}
|
|
250
|
+
# Signature: (system_event, new_data_points, new_channel_events, **kwargs)
|
|
251
|
+
# e.g. DEVICES_CREATED, HUB_REFRESHED
|
|
252
|
+
self._backend_system_callbacks: Final[set[Callable]] = set()
|
|
253
|
+
# Signature: (interface_id, channel_address, parameter, value)
|
|
254
|
+
# Re-emitted events from the backend for parameter updates
|
|
255
|
+
self._backend_parameter_callbacks: Final[set[Callable]] = set()
|
|
256
|
+
# Signature: (event_type, event_data)
|
|
257
|
+
# Events like INTERFACE, KEYPRESS, ...
|
|
258
|
+
self._homematic_callbacks: Final[set[Callable]] = set()
|
|
259
|
+
|
|
260
|
+
CENTRAL_INSTANCES[self.name] = self
|
|
261
|
+
self._scheduler: Final = _Scheduler(central=self)
|
|
262
|
+
self._hub: Hub = Hub(central=self)
|
|
263
|
+
self._version: str | None = None
|
|
264
|
+
# store last event received datetime by interface_id
|
|
265
|
+
self._last_event_seen_for_interface: Final[dict[str, datetime]] = {}
|
|
266
|
+
self._rpc_callback_ip: str = IP_ANY_V4
|
|
267
|
+
self._listen_ip_addr: str = IP_ANY_V4
|
|
268
|
+
self._listen_port_xml_rpc: int = PORT_ANY
|
|
269
|
+
|
|
270
|
+
@property
|
|
271
|
+
def available(self) -> bool:
|
|
272
|
+
"""Return the availability of the central."""
|
|
273
|
+
return all(client.available for client in self._clients.values())
|
|
274
|
+
|
|
275
|
+
@property
|
|
276
|
+
def callback_ip_addr(self) -> str:
|
|
277
|
+
"""Return the xml rpc server callback ip address."""
|
|
278
|
+
return self._rpc_callback_ip
|
|
279
|
+
|
|
280
|
+
@info_property(log_context=True)
|
|
281
|
+
def url(self) -> str:
|
|
282
|
+
"""Return the central url."""
|
|
283
|
+
return self._url
|
|
284
|
+
|
|
285
|
+
@property
|
|
286
|
+
def clients(self) -> tuple[hmcl.Client, ...]:
|
|
287
|
+
"""Return all clients."""
|
|
288
|
+
return tuple(self._clients.values())
|
|
289
|
+
|
|
290
|
+
@property
|
|
291
|
+
def config(self) -> CentralConfig:
|
|
292
|
+
"""Return central config."""
|
|
293
|
+
return self._config
|
|
294
|
+
|
|
295
|
+
@property
|
|
296
|
+
def connection_state(self) -> CentralConnectionState:
|
|
297
|
+
"""Return the connection state."""
|
|
298
|
+
return self._connection_state
|
|
299
|
+
|
|
300
|
+
@property
|
|
301
|
+
def data_cache(self) -> CentralDataCache:
|
|
302
|
+
"""Return data_cache cache."""
|
|
303
|
+
return self._data_cache
|
|
304
|
+
|
|
305
|
+
@property
|
|
306
|
+
def device_details(self) -> DeviceDetailsCache:
|
|
307
|
+
"""Return device_details cache."""
|
|
308
|
+
return self._device_details
|
|
309
|
+
|
|
310
|
+
@property
|
|
311
|
+
def device_descriptions(self) -> DeviceDescriptionCache:
|
|
312
|
+
"""Return device_descriptions cache."""
|
|
313
|
+
return self._device_descriptions
|
|
314
|
+
|
|
315
|
+
@property
|
|
316
|
+
def devices(self) -> tuple[Device, ...]:
|
|
317
|
+
"""Return all devices."""
|
|
318
|
+
return tuple(self._devices.values())
|
|
319
|
+
|
|
320
|
+
@property
|
|
321
|
+
def _has_active_threads(self) -> bool:
|
|
322
|
+
"""Return if active sub threads are alive."""
|
|
323
|
+
if self._scheduler.is_alive():
|
|
324
|
+
return True
|
|
325
|
+
return bool(
|
|
326
|
+
self._xml_rpc_server and self._xml_rpc_server.no_central_assigned and self._xml_rpc_server.is_alive()
|
|
327
|
+
)
|
|
328
|
+
|
|
329
|
+
@property
|
|
330
|
+
def interface_ids(self) -> frozenset[str]:
|
|
331
|
+
"""Return all associated interface ids."""
|
|
332
|
+
return frozenset(self._clients)
|
|
333
|
+
|
|
334
|
+
@property
|
|
335
|
+
def interfaces(self) -> frozenset[Interface]:
|
|
336
|
+
"""Return all associated interfaces."""
|
|
337
|
+
return frozenset(client.interface for client in self._clients.values())
|
|
338
|
+
|
|
339
|
+
@property
|
|
340
|
+
def is_alive(self) -> bool:
|
|
341
|
+
"""Return if XmlRPC-Server is alive."""
|
|
342
|
+
return all(client.is_callback_alive() for client in self._clients.values())
|
|
343
|
+
|
|
344
|
+
@property
|
|
345
|
+
def json_rpc_client(self) -> AioJsonRpcAioHttpClient:
|
|
346
|
+
"""Return the json rpc client."""
|
|
347
|
+
if not self._json_rpc_client:
|
|
348
|
+
self._json_rpc_client = self._config.create_json_rpc_client(central=self)
|
|
349
|
+
return self._json_rpc_client
|
|
350
|
+
|
|
351
|
+
@property
|
|
352
|
+
def paramset_descriptions(self) -> ParamsetDescriptionCache:
|
|
353
|
+
"""Return paramset_descriptions cache."""
|
|
354
|
+
return self._paramset_descriptions
|
|
355
|
+
|
|
356
|
+
@property
|
|
357
|
+
def parameter_visibility(self) -> ParameterVisibilityCache:
|
|
358
|
+
"""Return parameter_visibility cache."""
|
|
359
|
+
return self._parameter_visibility
|
|
360
|
+
|
|
361
|
+
@property
|
|
362
|
+
def recorder(self) -> SessionRecorder:
|
|
363
|
+
"""Return the session recorder."""
|
|
364
|
+
return self._recorder
|
|
365
|
+
|
|
366
|
+
@property
|
|
367
|
+
def poll_clients(self) -> tuple[hmcl.Client, ...]:
|
|
368
|
+
"""Return clients that need to poll data."""
|
|
369
|
+
return tuple(client for client in self._clients.values() if not client.supports_push_updates)
|
|
370
|
+
|
|
371
|
+
@property
|
|
372
|
+
def primary_client(self) -> hmcl.Client | None:
|
|
373
|
+
"""Return the primary client of the backend."""
|
|
374
|
+
if self._primary_client is not None:
|
|
375
|
+
return self._primary_client
|
|
376
|
+
if client := self._get_primary_client():
|
|
377
|
+
self._primary_client = client
|
|
378
|
+
return self._primary_client
|
|
379
|
+
|
|
380
|
+
@property
|
|
381
|
+
def listen_ip_addr(self) -> str:
|
|
382
|
+
"""Return the xml rpc server listening ip address."""
|
|
383
|
+
return self._listen_ip_addr
|
|
384
|
+
|
|
385
|
+
@property
|
|
386
|
+
def listen_port_xml_rpc(self) -> int:
|
|
387
|
+
"""Return the xml rpc listening server port."""
|
|
388
|
+
return self._listen_port_xml_rpc
|
|
389
|
+
|
|
390
|
+
@property
|
|
391
|
+
def looper(self) -> Looper:
|
|
392
|
+
"""Return the loop support."""
|
|
393
|
+
return self._looper
|
|
394
|
+
|
|
395
|
+
@info_property(log_context=True)
|
|
396
|
+
def model(self) -> str | None:
|
|
397
|
+
"""Return the model of the backend."""
|
|
398
|
+
if not self._model and (client := self.primary_client):
|
|
399
|
+
self._model = client.model
|
|
400
|
+
return self._model
|
|
401
|
+
|
|
402
|
+
@info_property(log_context=True)
|
|
403
|
+
def name(self) -> str:
|
|
404
|
+
"""Return the name of the backend."""
|
|
405
|
+
return self._config.name
|
|
406
|
+
|
|
407
|
+
@property
|
|
408
|
+
def program_data_points(self) -> tuple[GenericProgramDataPoint, ...]:
|
|
409
|
+
"""Return the program data points."""
|
|
410
|
+
return tuple(
|
|
411
|
+
[x.button for x in self._program_data_points.values()]
|
|
412
|
+
+ [x.switch for x in self._program_data_points.values()]
|
|
413
|
+
)
|
|
414
|
+
|
|
415
|
+
@property
|
|
416
|
+
def state(self) -> CentralUnitState:
|
|
417
|
+
"""Return the central state."""
|
|
418
|
+
return self._state
|
|
419
|
+
|
|
420
|
+
@property
|
|
421
|
+
def supports_ping_pong(self) -> bool:
|
|
422
|
+
"""Return the backend supports ping pong."""
|
|
423
|
+
if primary_client := self.primary_client:
|
|
424
|
+
return primary_client.supports_ping_pong
|
|
425
|
+
return False
|
|
426
|
+
|
|
427
|
+
@property
|
|
428
|
+
def system_information(self) -> SystemInformation:
|
|
429
|
+
"""Return the system_information of the backend."""
|
|
430
|
+
if client := self.primary_client:
|
|
431
|
+
return client.system_information
|
|
432
|
+
return SystemInformation()
|
|
433
|
+
|
|
434
|
+
@property
|
|
435
|
+
def sysvar_data_points(self) -> tuple[GenericSysvarDataPoint, ...]:
|
|
436
|
+
"""Return the sysvar data points."""
|
|
437
|
+
return tuple(self._sysvar_data_points.values())
|
|
438
|
+
|
|
439
|
+
@info_property
|
|
440
|
+
def version(self) -> str | None:
|
|
441
|
+
"""Return the version of the backend."""
|
|
442
|
+
if self._version is None:
|
|
443
|
+
versions = [client.version for client in self._clients.values() if client.version]
|
|
444
|
+
self._version = max(versions) if versions else None
|
|
445
|
+
return self._version
|
|
446
|
+
|
|
447
|
+
def add_sysvar_data_point(self, *, sysvar_data_point: GenericSysvarDataPoint) -> None:
|
|
448
|
+
"""Add new program button."""
|
|
449
|
+
if (vid := sysvar_data_point.vid) is not None:
|
|
450
|
+
self._sysvar_data_points[vid] = sysvar_data_point
|
|
451
|
+
if sysvar_data_point.state_path not in self._sysvar_data_point_event_subscriptions:
|
|
452
|
+
self._sysvar_data_point_event_subscriptions[sysvar_data_point.state_path] = sysvar_data_point.event
|
|
453
|
+
|
|
454
|
+
def remove_sysvar_data_point(self, *, vid: str) -> None:
|
|
455
|
+
"""Remove a sysvar data_point."""
|
|
456
|
+
if (sysvar_dp := self.get_sysvar_data_point(vid=vid)) is not None:
|
|
457
|
+
sysvar_dp.emit_device_removed_event()
|
|
458
|
+
del self._sysvar_data_points[vid]
|
|
459
|
+
if sysvar_dp.state_path in self._sysvar_data_point_event_subscriptions:
|
|
460
|
+
del self._sysvar_data_point_event_subscriptions[sysvar_dp.state_path]
|
|
461
|
+
|
|
462
|
+
def add_program_data_point(self, *, program_dp: ProgramDpType) -> None:
|
|
463
|
+
"""Add new program button."""
|
|
464
|
+
self._program_data_points[program_dp.pid] = program_dp
|
|
465
|
+
|
|
466
|
+
def remove_program_button(self, *, pid: str) -> None:
|
|
467
|
+
"""Remove a program button."""
|
|
468
|
+
if (program_dp := self.get_program_data_point(pid=pid)) is not None:
|
|
469
|
+
program_dp.button.emit_device_removed_event()
|
|
470
|
+
program_dp.switch.emit_device_removed_event()
|
|
471
|
+
del self._program_data_points[pid]
|
|
472
|
+
|
|
473
|
+
def identify_channel(self, *, text: str) -> Channel | None:
|
|
474
|
+
"""Identify channel within a text."""
|
|
475
|
+
for device in self._devices.values():
|
|
476
|
+
if channel := device.identify_channel(text=text):
|
|
477
|
+
return channel
|
|
478
|
+
return None
|
|
479
|
+
|
|
480
|
+
async def save_files(
|
|
481
|
+
self,
|
|
482
|
+
*,
|
|
483
|
+
save_device_descriptions: bool = False,
|
|
484
|
+
save_paramset_descriptions: bool = False,
|
|
485
|
+
) -> None:
|
|
486
|
+
"""Save persistent files to disk."""
|
|
487
|
+
if save_device_descriptions:
|
|
488
|
+
await self._device_descriptions.save()
|
|
489
|
+
if save_paramset_descriptions:
|
|
490
|
+
await self._paramset_descriptions.save()
|
|
491
|
+
|
|
492
|
+
async def start(self) -> None:
|
|
493
|
+
"""Start processing of the central unit."""
|
|
494
|
+
|
|
495
|
+
_LOGGER.debug("START: Central %s is %s", self.name, self._state)
|
|
496
|
+
if self._state == CentralUnitState.INITIALIZING:
|
|
497
|
+
_LOGGER.debug("START: Central %s already starting", self.name)
|
|
498
|
+
return
|
|
499
|
+
|
|
500
|
+
if self._state == CentralUnitState.RUNNING:
|
|
501
|
+
_LOGGER.debug("START: Central %s already started", self.name)
|
|
502
|
+
return
|
|
503
|
+
|
|
504
|
+
if self._config.session_recorder_start:
|
|
505
|
+
await self._recorder.deactivate(
|
|
506
|
+
delay=self._config.session_recorder_start_for_seconds,
|
|
507
|
+
auto_save=True,
|
|
508
|
+
randomize_output=self._config.session_recorder_randomize_output,
|
|
509
|
+
use_ts_in_file_name=False,
|
|
510
|
+
)
|
|
511
|
+
_LOGGER.debug("START: Starting Recorder for %s seconds", self._config.session_recorder_start_for_seconds)
|
|
512
|
+
|
|
513
|
+
self._state = CentralUnitState.INITIALIZING
|
|
514
|
+
_LOGGER.debug("START: Initializing Central %s", self.name)
|
|
515
|
+
if self._config.enabled_interface_configs and (
|
|
516
|
+
ip_addr := await self._identify_ip_addr(port=self._config.connection_check_port)
|
|
517
|
+
):
|
|
518
|
+
self._rpc_callback_ip = ip_addr
|
|
519
|
+
self._listen_ip_addr = self._config.listen_ip_addr if self._config.listen_ip_addr else ip_addr
|
|
520
|
+
|
|
521
|
+
port_xml_rpc: int = (
|
|
522
|
+
self._config.listen_port_xml_rpc
|
|
523
|
+
if self._config.listen_port_xml_rpc
|
|
524
|
+
else self._config.callback_port_xml_rpc or self._config.default_callback_port_xml_rpc
|
|
525
|
+
)
|
|
526
|
+
try:
|
|
527
|
+
if (
|
|
528
|
+
xml_rpc_server := rpc.create_xml_rpc_server(ip_addr=self._listen_ip_addr, port=port_xml_rpc)
|
|
529
|
+
if self._config.enable_xml_rpc_server
|
|
530
|
+
else None
|
|
531
|
+
):
|
|
532
|
+
self._xml_rpc_server = xml_rpc_server
|
|
533
|
+
self._listen_port_xml_rpc = xml_rpc_server.listen_port
|
|
534
|
+
self._xml_rpc_server.add_central(central=self)
|
|
535
|
+
except OSError as oserr: # pragma: no cover - environment/OS-specific socket binding failures are not reliably reproducible in CI
|
|
536
|
+
self._state = CentralUnitState.STOPPED_BY_ERROR
|
|
537
|
+
raise AioHomematicException(
|
|
538
|
+
f"START: Failed to start central unit {self.name}: {extract_exc_args(exc=oserr)}"
|
|
539
|
+
) from oserr
|
|
540
|
+
|
|
541
|
+
if self._config.start_direct:
|
|
542
|
+
if await self._create_clients():
|
|
543
|
+
for client in self._clients.values():
|
|
544
|
+
await self._refresh_device_descriptions_and_create_missing_devices(
|
|
545
|
+
client=client, refresh_only_existing=False
|
|
546
|
+
)
|
|
547
|
+
else:
|
|
548
|
+
self._clients_started = await self._start_clients()
|
|
549
|
+
if self._config.enable_xml_rpc_server:
|
|
550
|
+
self._start_scheduler()
|
|
551
|
+
|
|
552
|
+
self._state = CentralUnitState.RUNNING
|
|
553
|
+
_LOGGER.debug("START: Central %s is %s", self.name, self._state)
|
|
554
|
+
|
|
555
|
+
async def stop(self) -> None:
|
|
556
|
+
"""Stop processing of the central unit."""
|
|
557
|
+
_LOGGER.debug("STOP: Central %s is %s", self.name, self._state)
|
|
558
|
+
if self._state == CentralUnitState.STOPPING:
|
|
559
|
+
_LOGGER.debug("STOP: Central %s is already stopping", self.name)
|
|
560
|
+
return
|
|
561
|
+
if self._state == CentralUnitState.STOPPED:
|
|
562
|
+
_LOGGER.debug("STOP: Central %s is already stopped", self.name)
|
|
563
|
+
return
|
|
564
|
+
if self._state != CentralUnitState.RUNNING:
|
|
565
|
+
_LOGGER.debug("STOP: Central %s not started", self.name)
|
|
566
|
+
return
|
|
567
|
+
self._state = CentralUnitState.STOPPING
|
|
568
|
+
_LOGGER.debug("STOP: Stopping Central %s", self.name)
|
|
569
|
+
|
|
570
|
+
await self.save_files(save_device_descriptions=True, save_paramset_descriptions=True)
|
|
571
|
+
self._stop_scheduler()
|
|
572
|
+
await self._stop_clients()
|
|
573
|
+
if self._json_rpc_client and self._json_rpc_client.is_activated:
|
|
574
|
+
await self._json_rpc_client.logout()
|
|
575
|
+
await self._json_rpc_client.stop()
|
|
576
|
+
|
|
577
|
+
if self._xml_rpc_server:
|
|
578
|
+
# un-register this instance from XmlRPC-Server
|
|
579
|
+
self._xml_rpc_server.remove_central(central=self)
|
|
580
|
+
# un-register and stop XmlRPC-Server, if possible
|
|
581
|
+
if self._xml_rpc_server.no_central_assigned:
|
|
582
|
+
self._xml_rpc_server.stop()
|
|
583
|
+
_LOGGER.debug("STOP: XmlRPC-Server stopped")
|
|
584
|
+
else:
|
|
585
|
+
_LOGGER.debug("STOP: shared XmlRPC-Server NOT stopped. There is still another central instance registered")
|
|
586
|
+
|
|
587
|
+
_LOGGER.debug("STOP: Removing instance")
|
|
588
|
+
if self.name in CENTRAL_INSTANCES:
|
|
589
|
+
del CENTRAL_INSTANCES[self.name]
|
|
590
|
+
|
|
591
|
+
# cancel outstanding tasks to speed up teardown
|
|
592
|
+
self.looper.cancel_tasks()
|
|
593
|
+
# wait until tasks are finished (with wait_time safeguard)
|
|
594
|
+
await self.looper.block_till_done(wait_time=5.0)
|
|
595
|
+
|
|
596
|
+
# Wait briefly for any auxiliary threads to finish without blocking forever
|
|
597
|
+
max_wait_seconds = 5.0
|
|
598
|
+
interval = 0.05
|
|
599
|
+
waited = 0.0
|
|
600
|
+
while self._has_active_threads and waited < max_wait_seconds:
|
|
601
|
+
await asyncio.sleep(interval)
|
|
602
|
+
waited += interval
|
|
603
|
+
self._state = CentralUnitState.STOPPED
|
|
604
|
+
_LOGGER.debug("STOP: Central %s is %s", self.name, self._state)
|
|
605
|
+
|
|
606
|
+
async def restart_clients(self) -> None:
|
|
607
|
+
"""Restart clients."""
|
|
608
|
+
await self._stop_clients()
|
|
609
|
+
if await self._start_clients():
|
|
610
|
+
_LOGGER.info("RESTART_CLIENTS: Central %s restarted clients", self.name)
|
|
611
|
+
|
|
612
|
+
@inspector(re_raise=False)
|
|
613
|
+
async def refresh_firmware_data(self, *, device_address: str | None = None) -> None:
|
|
614
|
+
"""Refresh device firmware data."""
|
|
615
|
+
if device_address and (device := self.get_device(address=device_address)) is not None and device.is_updatable:
|
|
616
|
+
await self._refresh_device_descriptions_and_create_missing_devices(
|
|
617
|
+
client=device.client, refresh_only_existing=True, device_address=device_address
|
|
618
|
+
)
|
|
619
|
+
device.refresh_firmware_data()
|
|
620
|
+
else:
|
|
621
|
+
for client in self._clients.values():
|
|
622
|
+
await self._refresh_device_descriptions_and_create_missing_devices(
|
|
623
|
+
client=client, refresh_only_existing=True
|
|
624
|
+
)
|
|
625
|
+
for device in self._devices.values():
|
|
626
|
+
if device.is_updatable:
|
|
627
|
+
device.refresh_firmware_data()
|
|
628
|
+
|
|
629
|
+
@inspector(re_raise=False)
|
|
630
|
+
async def refresh_firmware_data_by_state(self, *, device_firmware_states: tuple[DeviceFirmwareState, ...]) -> None:
|
|
631
|
+
"""Refresh device firmware data for processing devices."""
|
|
632
|
+
for device in [
|
|
633
|
+
device_in_state
|
|
634
|
+
for device_in_state in self._devices.values()
|
|
635
|
+
if device_in_state.firmware_update_state in device_firmware_states
|
|
636
|
+
]:
|
|
637
|
+
await self.refresh_firmware_data(device_address=device.address)
|
|
638
|
+
|
|
639
|
+
async def _refresh_device_descriptions_and_create_missing_devices(
|
|
640
|
+
self, *, client: hmcl.Client, refresh_only_existing: bool, device_address: str | None = None
|
|
641
|
+
) -> None:
|
|
642
|
+
"""Refresh device descriptions and create missing devices."""
|
|
643
|
+
device_descriptions: tuple[DeviceDescription, ...] | None = None
|
|
644
|
+
|
|
645
|
+
if (
|
|
646
|
+
device_address
|
|
647
|
+
and (device_description := await client.get_device_description(device_address=device_address)) is not None
|
|
648
|
+
):
|
|
649
|
+
device_descriptions = (device_description,)
|
|
650
|
+
else:
|
|
651
|
+
device_descriptions = await client.list_devices()
|
|
652
|
+
|
|
653
|
+
if (
|
|
654
|
+
device_descriptions
|
|
655
|
+
and refresh_only_existing
|
|
656
|
+
and (
|
|
657
|
+
existing_device_descriptions := tuple(
|
|
658
|
+
dev_desc
|
|
659
|
+
for dev_desc in list(device_descriptions)
|
|
660
|
+
if dev_desc["ADDRESS"]
|
|
661
|
+
in self.device_descriptions.get_device_descriptions(interface_id=client.interface_id)
|
|
662
|
+
)
|
|
663
|
+
)
|
|
664
|
+
):
|
|
665
|
+
device_descriptions = existing_device_descriptions
|
|
666
|
+
|
|
667
|
+
if device_descriptions:
|
|
668
|
+
await self._add_new_devices(
|
|
669
|
+
interface_id=client.interface_id,
|
|
670
|
+
device_descriptions=device_descriptions,
|
|
671
|
+
source=SourceOfDeviceCreation.REFRESH,
|
|
672
|
+
)
|
|
673
|
+
|
|
674
|
+
async def _start_clients(self) -> bool:
|
|
675
|
+
"""Start clients ."""
|
|
676
|
+
if not await self._create_clients():
|
|
677
|
+
return False
|
|
678
|
+
await self._load_caches()
|
|
679
|
+
if new_device_addresses := self._check_for_new_device_addresses():
|
|
680
|
+
await self._create_devices(new_device_addresses=new_device_addresses, source=SourceOfDeviceCreation.CACHE)
|
|
681
|
+
await self._init_hub()
|
|
682
|
+
await self._init_clients()
|
|
683
|
+
# Proactively fetch device descriptions if none were created yet to avoid slow startup
|
|
684
|
+
if not self._devices:
|
|
685
|
+
for client in self._clients.values():
|
|
686
|
+
await self._refresh_device_descriptions_and_create_missing_devices(
|
|
687
|
+
client=client, refresh_only_existing=False
|
|
688
|
+
)
|
|
689
|
+
return True
|
|
690
|
+
|
|
691
|
+
async def _stop_clients(self) -> None:
|
|
692
|
+
"""Stop clients."""
|
|
693
|
+
await self._de_init_clients()
|
|
694
|
+
for client in self._clients.values():
|
|
695
|
+
_LOGGER.debug("STOP_CLIENTS: Stopping %s", client.interface_id)
|
|
696
|
+
await client.stop()
|
|
697
|
+
_LOGGER.debug("STOP_CLIENTS: Clearing existing clients.")
|
|
698
|
+
self._clients.clear()
|
|
699
|
+
self._clients_started = False
|
|
700
|
+
|
|
701
|
+
async def _create_clients(self) -> bool:
|
|
702
|
+
"""Create clients for the central unit. Start connection checker afterwards."""
|
|
703
|
+
if len(self._clients) > 0:
|
|
704
|
+
_LOGGER.warning(
|
|
705
|
+
"CREATE_CLIENTS: Clients for %s are already created",
|
|
706
|
+
self.name,
|
|
707
|
+
)
|
|
708
|
+
return False
|
|
709
|
+
if len(self._config.enabled_interface_configs) == 0:
|
|
710
|
+
_LOGGER.warning(
|
|
711
|
+
"CREATE_CLIENTS failed: No Interfaces for %s defined",
|
|
712
|
+
self.name,
|
|
713
|
+
)
|
|
714
|
+
return False
|
|
715
|
+
|
|
716
|
+
# create primary clients
|
|
717
|
+
for interface_config in self._config.enabled_interface_configs:
|
|
718
|
+
if interface_config.interface in PRIMARY_CLIENT_CANDIDATE_INTERFACES:
|
|
719
|
+
await self._create_client(interface_config=interface_config)
|
|
720
|
+
|
|
721
|
+
# create secondary clients
|
|
722
|
+
for interface_config in self._config.enabled_interface_configs:
|
|
723
|
+
if interface_config.interface not in PRIMARY_CLIENT_CANDIDATE_INTERFACES:
|
|
724
|
+
if (
|
|
725
|
+
self.primary_client is not None
|
|
726
|
+
and interface_config.interface not in self.primary_client.system_information.available_interfaces
|
|
727
|
+
):
|
|
728
|
+
_LOGGER.warning(
|
|
729
|
+
"CREATE_CLIENTS failed: Interface: %s is not available for the backend %s",
|
|
730
|
+
interface_config.interface,
|
|
731
|
+
self.name,
|
|
732
|
+
)
|
|
733
|
+
interface_config.disable()
|
|
734
|
+
continue
|
|
735
|
+
await self._create_client(interface_config=interface_config)
|
|
736
|
+
|
|
737
|
+
if not self.all_clients_active:
|
|
738
|
+
_LOGGER.warning(
|
|
739
|
+
"CREATE_CLIENTS failed: Created %i of %i clients",
|
|
740
|
+
len(self._clients),
|
|
741
|
+
len(self._config.enabled_interface_configs),
|
|
742
|
+
)
|
|
743
|
+
return False
|
|
744
|
+
|
|
745
|
+
if self.primary_client is None:
|
|
746
|
+
_LOGGER.warning("CREATE_CLIENTS failed: No primary client identified for %s", self.name)
|
|
747
|
+
return True
|
|
748
|
+
|
|
749
|
+
_LOGGER.debug("CREATE_CLIENTS successful for %s", self.name)
|
|
750
|
+
return True
|
|
751
|
+
|
|
752
|
+
async def _create_client(self, *, interface_config: hmcl.InterfaceConfig) -> bool:
|
|
753
|
+
"""Create a client."""
|
|
754
|
+
try:
|
|
755
|
+
if client := await hmcl.create_client(
|
|
756
|
+
central=self,
|
|
757
|
+
interface_config=interface_config,
|
|
758
|
+
):
|
|
759
|
+
_LOGGER.debug(
|
|
760
|
+
"CREATE_CLIENT: Adding client %s to %s",
|
|
761
|
+
client.interface_id,
|
|
762
|
+
self.name,
|
|
763
|
+
)
|
|
764
|
+
self._clients[client.interface_id] = client
|
|
765
|
+
return True
|
|
766
|
+
except BaseHomematicException as bhexc: # pragma: no cover - deterministic simulation of client creation failures would require the full client/proxy stack and network timing; keeping this defensive log-and-state branch untested to avoid brittle CI
|
|
767
|
+
self.emit_interface_event(
|
|
768
|
+
interface_id=interface_config.interface_id,
|
|
769
|
+
interface_event_type=InterfaceEventType.PROXY,
|
|
770
|
+
data={EventKey.AVAILABLE: False},
|
|
771
|
+
)
|
|
772
|
+
|
|
773
|
+
_LOGGER.warning(
|
|
774
|
+
"CREATE_CLIENT failed: No connection to interface %s [%s]",
|
|
775
|
+
interface_config.interface_id,
|
|
776
|
+
extract_exc_args(exc=bhexc),
|
|
777
|
+
)
|
|
778
|
+
return False
|
|
779
|
+
|
|
780
|
+
async def _init_clients(self) -> None:
|
|
781
|
+
"""Init clients of control unit, and start connection checker."""
|
|
782
|
+
for client in self._clients.copy().values():
|
|
783
|
+
if client.interface not in self.system_information.available_interfaces:
|
|
784
|
+
_LOGGER.debug(
|
|
785
|
+
"INIT_CLIENTS failed: Interface: %s is not available for the backend %s",
|
|
786
|
+
client.interface,
|
|
787
|
+
self.name,
|
|
788
|
+
)
|
|
789
|
+
del self._clients[client.interface_id]
|
|
790
|
+
continue
|
|
791
|
+
if await client.initialize_proxy() == ProxyInitState.INIT_SUCCESS:
|
|
792
|
+
_LOGGER.debug("INIT_CLIENTS: client %s initialized for %s", client.interface_id, self.name)
|
|
793
|
+
|
|
794
|
+
async def _de_init_clients(self) -> None:
|
|
795
|
+
"""De-init clients."""
|
|
796
|
+
for name, client in self._clients.items():
|
|
797
|
+
if await client.deinitialize_proxy():
|
|
798
|
+
_LOGGER.debug("DE_INIT_CLIENTS: Proxy de-initialized: %s", name)
|
|
799
|
+
|
|
800
|
+
async def _init_hub(self) -> None:
|
|
801
|
+
"""Init the hub."""
|
|
802
|
+
await self._hub.fetch_program_data(scheduled=True)
|
|
803
|
+
await self._hub.fetch_sysvar_data(scheduled=True)
|
|
804
|
+
|
|
805
|
+
@loop_check
|
|
806
|
+
def emit_interface_event(
|
|
807
|
+
self,
|
|
808
|
+
*,
|
|
809
|
+
interface_id: str,
|
|
810
|
+
interface_event_type: InterfaceEventType,
|
|
811
|
+
data: dict[str, Any],
|
|
812
|
+
) -> None:
|
|
813
|
+
"""Emit an event about the interface status."""
|
|
814
|
+
data = data or {}
|
|
815
|
+
event_data: dict[str, Any] = {
|
|
816
|
+
EventKey.INTERFACE_ID: interface_id,
|
|
817
|
+
EventKey.TYPE: interface_event_type,
|
|
818
|
+
EventKey.DATA: data,
|
|
819
|
+
}
|
|
820
|
+
|
|
821
|
+
self.emit_homematic_callback(
|
|
822
|
+
event_type=EventType.INTERFACE,
|
|
823
|
+
event_data=cast(dict[EventKey, Any], INTERFACE_EVENT_SCHEMA(event_data)),
|
|
824
|
+
)
|
|
825
|
+
|
|
826
|
+
async def _identify_ip_addr(self, *, port: int) -> str:
|
|
827
|
+
ip_addr: str | None = None
|
|
828
|
+
while ip_addr is None:
|
|
829
|
+
try:
|
|
830
|
+
ip_addr = await self.looper.async_add_executor_job(
|
|
831
|
+
get_ip_addr, self._config.host, port, name="get_ip_addr"
|
|
832
|
+
)
|
|
833
|
+
except AioHomematicException:
|
|
834
|
+
ip_addr = LOCAL_HOST
|
|
835
|
+
if ip_addr is None:
|
|
836
|
+
_LOGGER.warning("GET_IP_ADDR: Waiting for %i s,", CONNECTION_CHECKER_INTERVAL)
|
|
837
|
+
await asyncio.sleep(TIMEOUT / 10)
|
|
838
|
+
return ip_addr
|
|
839
|
+
|
|
840
|
+
def _start_scheduler(self) -> None:
|
|
841
|
+
"""Start the scheduler."""
|
|
842
|
+
_LOGGER.debug(
|
|
843
|
+
"START_SCHEDULER: Starting scheduler for %s",
|
|
844
|
+
self.name,
|
|
845
|
+
)
|
|
846
|
+
self._scheduler.start()
|
|
847
|
+
|
|
848
|
+
def _stop_scheduler(self) -> None:
|
|
849
|
+
"""Start the connection checker."""
|
|
850
|
+
self._scheduler.stop()
|
|
851
|
+
_LOGGER.debug(
|
|
852
|
+
"STOP_SCHEDULER: Stopped scheduler for %s",
|
|
853
|
+
self.name,
|
|
854
|
+
)
|
|
855
|
+
|
|
856
|
+
async def validate_config_and_get_system_information(self) -> SystemInformation:
|
|
857
|
+
"""Validate the central configuration."""
|
|
858
|
+
if len(self._config.enabled_interface_configs) == 0:
|
|
859
|
+
raise NoClientsException("validate_config: No clients defined.")
|
|
860
|
+
|
|
861
|
+
system_information = SystemInformation()
|
|
862
|
+
for interface_config in self._config.enabled_interface_configs:
|
|
863
|
+
try:
|
|
864
|
+
client = await hmcl.create_client(central=self, interface_config=interface_config)
|
|
865
|
+
except BaseHomematicException as bhexc:
|
|
866
|
+
_LOGGER.error(
|
|
867
|
+
"VALIDATE_CONFIG_AND_GET_SYSTEM_INFORMATION failed for client %s: %s",
|
|
868
|
+
interface_config.interface,
|
|
869
|
+
extract_exc_args(exc=bhexc),
|
|
870
|
+
)
|
|
871
|
+
raise
|
|
872
|
+
if client.interface in PRIMARY_CLIENT_CANDIDATE_INTERFACES and not system_information.serial:
|
|
873
|
+
system_information = client.system_information
|
|
874
|
+
return system_information
|
|
875
|
+
|
|
876
|
+
def get_client(self, *, interface_id: str) -> hmcl.Client:
|
|
877
|
+
"""Return a client by interface_id."""
|
|
878
|
+
if not self.has_client(interface_id=interface_id):
|
|
879
|
+
raise AioHomematicException(f"get_client: interface_id {interface_id} does not exist on {self.name}")
|
|
880
|
+
return self._clients[interface_id]
|
|
881
|
+
|
|
882
|
+
def get_channel(self, *, channel_address: str) -> Channel | None:
|
|
883
|
+
"""Return Homematic channel."""
|
|
884
|
+
if device := self.get_device(address=channel_address):
|
|
885
|
+
return device.get_channel(channel_address=channel_address)
|
|
886
|
+
return None
|
|
887
|
+
|
|
888
|
+
def get_device(self, *, address: str) -> Device | None:
|
|
889
|
+
"""Return Homematic device."""
|
|
890
|
+
d_address = get_device_address(address=address)
|
|
891
|
+
return self._devices.get(d_address)
|
|
892
|
+
|
|
893
|
+
def get_data_point_by_custom_id(self, *, custom_id: str) -> CallbackDataPoint | None:
|
|
894
|
+
"""Return Homematic data_point by custom_id."""
|
|
895
|
+
for dp in self.get_data_points(registered=True):
|
|
896
|
+
if dp.custom_id == custom_id:
|
|
897
|
+
return dp
|
|
898
|
+
return None
|
|
899
|
+
|
|
900
|
+
def get_data_points(
|
|
901
|
+
self,
|
|
902
|
+
*,
|
|
903
|
+
category: DataPointCategory | None = None,
|
|
904
|
+
interface: Interface | None = None,
|
|
905
|
+
exclude_no_create: bool = True,
|
|
906
|
+
registered: bool | None = None,
|
|
907
|
+
) -> tuple[CallbackDataPoint, ...]:
|
|
908
|
+
"""Return all externally registered data points."""
|
|
909
|
+
all_data_points: list[CallbackDataPoint] = []
|
|
910
|
+
for device in self._devices.values():
|
|
911
|
+
if interface and interface != device.interface:
|
|
912
|
+
continue
|
|
913
|
+
all_data_points.extend(
|
|
914
|
+
device.get_data_points(category=category, exclude_no_create=exclude_no_create, registered=registered)
|
|
915
|
+
)
|
|
916
|
+
return tuple(all_data_points)
|
|
917
|
+
|
|
918
|
+
def get_readable_generic_data_points(
|
|
919
|
+
self, *, paramset_key: ParamsetKey | None = None, interface: Interface | None = None
|
|
920
|
+
) -> tuple[GenericDataPoint, ...]:
|
|
921
|
+
"""Return the readable generic data points."""
|
|
922
|
+
return tuple(
|
|
923
|
+
ge
|
|
924
|
+
for ge in self.get_data_points(interface=interface)
|
|
925
|
+
if (
|
|
926
|
+
isinstance(ge, GenericDataPoint)
|
|
927
|
+
and ge.is_readable
|
|
928
|
+
and ((paramset_key and ge.paramset_key == paramset_key) or paramset_key is None)
|
|
929
|
+
)
|
|
930
|
+
)
|
|
931
|
+
|
|
932
|
+
def _get_primary_client(self) -> hmcl.Client | None:
|
|
933
|
+
"""Return the client by interface_id or the first with a virtual remote."""
|
|
934
|
+
client: hmcl.Client | None = None
|
|
935
|
+
for client in self._clients.values():
|
|
936
|
+
if client.interface in PRIMARY_CLIENT_CANDIDATE_INTERFACES and client.available:
|
|
937
|
+
return client
|
|
938
|
+
return client
|
|
939
|
+
|
|
940
|
+
def get_hub_data_points(
|
|
941
|
+
self, *, category: DataPointCategory | None = None, registered: bool | None = None
|
|
942
|
+
) -> tuple[GenericHubDataPoint, ...]:
|
|
943
|
+
"""Return the program data points."""
|
|
944
|
+
return tuple(
|
|
945
|
+
he
|
|
946
|
+
for he in (self.program_data_points + self.sysvar_data_points)
|
|
947
|
+
if (category is None or he.category == category) and (registered is None or he.is_registered == registered)
|
|
948
|
+
)
|
|
949
|
+
|
|
950
|
+
def get_events(
|
|
951
|
+
self, *, event_type: EventType, registered: bool | None = None
|
|
952
|
+
) -> tuple[tuple[GenericEvent, ...], ...]:
|
|
953
|
+
"""Return all channel event data points."""
|
|
954
|
+
hm_channel_events: list[tuple[GenericEvent, ...]] = []
|
|
955
|
+
for device in self.devices:
|
|
956
|
+
for channel_events in device.get_events(event_type=event_type).values():
|
|
957
|
+
if registered is None or (channel_events[0].is_registered == registered):
|
|
958
|
+
hm_channel_events.append(channel_events)
|
|
959
|
+
continue
|
|
960
|
+
return tuple(hm_channel_events)
|
|
961
|
+
|
|
962
|
+
def get_virtual_remotes(self) -> tuple[Device, ...]:
|
|
963
|
+
"""Get the virtual remote for the Client."""
|
|
964
|
+
return tuple(
|
|
965
|
+
cl.get_virtual_remote() # type: ignore[misc]
|
|
966
|
+
for cl in self._clients.values()
|
|
967
|
+
if cl.get_virtual_remote() is not None
|
|
968
|
+
)
|
|
969
|
+
|
|
970
|
+
def has_client(self, *, interface_id: str) -> bool:
|
|
971
|
+
"""Check if client exists in central."""
|
|
972
|
+
return interface_id in self._clients
|
|
973
|
+
|
|
974
|
+
@property
|
|
975
|
+
def all_clients_active(self) -> bool:
|
|
976
|
+
"""Check if all configured clients exists in central."""
|
|
977
|
+
count_client = len(self._clients)
|
|
978
|
+
return count_client > 0 and count_client == len(self._config.enabled_interface_configs)
|
|
979
|
+
|
|
980
|
+
@property
|
|
981
|
+
def has_clients(self) -> bool:
|
|
982
|
+
"""Check if clients exists in central."""
|
|
983
|
+
return len(self._clients) > 0
|
|
984
|
+
|
|
985
|
+
async def _load_caches(self) -> bool:
|
|
986
|
+
"""Load files to store."""
|
|
987
|
+
if DataOperationResult.LOAD_FAIL in (
|
|
988
|
+
await self._device_descriptions.load(),
|
|
989
|
+
await self._paramset_descriptions.load(),
|
|
990
|
+
):
|
|
991
|
+
_LOGGER.warning("LOAD_CACHES failed: Unable to load store for %s. Clearing files", self.name)
|
|
992
|
+
await self.clear_files()
|
|
993
|
+
return False
|
|
994
|
+
await self._device_details.load()
|
|
995
|
+
await self._data_cache.load()
|
|
996
|
+
return True
|
|
997
|
+
|
|
998
|
+
async def _create_devices(
|
|
999
|
+
self, *, new_device_addresses: Mapping[str, set[str]], source: SourceOfDeviceCreation
|
|
1000
|
+
) -> None:
|
|
1001
|
+
"""Trigger creation of the objects that expose the functionality."""
|
|
1002
|
+
if not self._clients:
|
|
1003
|
+
raise AioHomematicException(
|
|
1004
|
+
f"CREATE_DEVICES failed: No clients initialized. Not starting central {self.name}."
|
|
1005
|
+
)
|
|
1006
|
+
_LOGGER.debug("CREATE_DEVICES: Starting to create devices for %s", self.name)
|
|
1007
|
+
|
|
1008
|
+
new_devices = set[Device]()
|
|
1009
|
+
|
|
1010
|
+
for interface_id, device_addresses in new_device_addresses.items():
|
|
1011
|
+
for device_address in device_addresses:
|
|
1012
|
+
# Do we check for duplicates here? For now, we do.
|
|
1013
|
+
if device_address in self._devices:
|
|
1014
|
+
continue
|
|
1015
|
+
device: Device | None = None
|
|
1016
|
+
try:
|
|
1017
|
+
device = Device(
|
|
1018
|
+
central=self,
|
|
1019
|
+
interface_id=interface_id,
|
|
1020
|
+
device_address=device_address,
|
|
1021
|
+
)
|
|
1022
|
+
except Exception as exc:
|
|
1023
|
+
_LOGGER.error(
|
|
1024
|
+
"CREATE_DEVICES failed: %s [%s] Unable to create device: %s, %s",
|
|
1025
|
+
type(exc).__name__,
|
|
1026
|
+
extract_exc_args(exc=exc),
|
|
1027
|
+
interface_id,
|
|
1028
|
+
device_address,
|
|
1029
|
+
)
|
|
1030
|
+
try:
|
|
1031
|
+
if device:
|
|
1032
|
+
create_data_points_and_events(device=device)
|
|
1033
|
+
create_custom_data_points(device=device)
|
|
1034
|
+
await device.load_value_cache()
|
|
1035
|
+
new_devices.add(device)
|
|
1036
|
+
self._devices[device_address] = device
|
|
1037
|
+
except Exception as exc:
|
|
1038
|
+
_LOGGER.error(
|
|
1039
|
+
"CREATE_DEVICES failed: %s [%s] Unable to create data points: %s, %s",
|
|
1040
|
+
type(exc).__name__,
|
|
1041
|
+
extract_exc_args(exc=exc),
|
|
1042
|
+
interface_id,
|
|
1043
|
+
device_address,
|
|
1044
|
+
)
|
|
1045
|
+
_LOGGER.debug("CREATE_DEVICES: Finished creating devices for %s", self.name)
|
|
1046
|
+
|
|
1047
|
+
if new_devices:
|
|
1048
|
+
new_dps = _get_new_data_points(new_devices=new_devices)
|
|
1049
|
+
new_channel_events = _get_new_channel_events(new_devices=new_devices)
|
|
1050
|
+
self.emit_backend_system_callback(
|
|
1051
|
+
system_event=BackendSystemEvent.DEVICES_CREATED,
|
|
1052
|
+
new_data_points=new_dps,
|
|
1053
|
+
new_channel_events=new_channel_events,
|
|
1054
|
+
source=source,
|
|
1055
|
+
)
|
|
1056
|
+
|
|
1057
|
+
async def delete_device(self, *, interface_id: str, device_address: str) -> None:
|
|
1058
|
+
"""Delete devices from central."""
|
|
1059
|
+
_LOGGER.debug(
|
|
1060
|
+
"DELETE_DEVICE: interface_id = %s, device_address = %s",
|
|
1061
|
+
interface_id,
|
|
1062
|
+
device_address,
|
|
1063
|
+
)
|
|
1064
|
+
|
|
1065
|
+
if (device := self._devices.get(device_address)) is None:
|
|
1066
|
+
return
|
|
1067
|
+
|
|
1068
|
+
await self.delete_devices(interface_id=interface_id, addresses=[device_address, *list(device.channels.keys())])
|
|
1069
|
+
|
|
1070
|
+
@callback_backend_system(system_event=BackendSystemEvent.DELETE_DEVICES)
|
|
1071
|
+
async def delete_devices(self, *, interface_id: str, addresses: tuple[str, ...]) -> None:
|
|
1072
|
+
"""Delete devices from central."""
|
|
1073
|
+
_LOGGER.debug(
|
|
1074
|
+
"DELETE_DEVICES: interface_id = %s, addresses = %s",
|
|
1075
|
+
interface_id,
|
|
1076
|
+
str(addresses),
|
|
1077
|
+
)
|
|
1078
|
+
for address in addresses:
|
|
1079
|
+
if device := self._devices.get(address):
|
|
1080
|
+
self.remove_device(device=device)
|
|
1081
|
+
await self.save_files(save_device_descriptions=True, save_paramset_descriptions=True)
|
|
1082
|
+
|
|
1083
|
+
@callback_backend_system(system_event=BackendSystemEvent.NEW_DEVICES)
|
|
1084
|
+
async def add_new_devices(self, *, interface_id: str, device_descriptions: tuple[DeviceDescription, ...]) -> None:
|
|
1085
|
+
"""Add new devices to central unit."""
|
|
1086
|
+
source = (
|
|
1087
|
+
SourceOfDeviceCreation.NEW
|
|
1088
|
+
if self._device_descriptions.has_device_descriptions(interface_id=interface_id)
|
|
1089
|
+
else SourceOfDeviceCreation.INIT
|
|
1090
|
+
)
|
|
1091
|
+
await self._add_new_devices(interface_id=interface_id, device_descriptions=device_descriptions, source=source)
|
|
1092
|
+
|
|
1093
|
+
async def add_new_device_manually(self, *, interface_id: str, address: str) -> None:
|
|
1094
|
+
"""Add new devices manually triggered to central unit."""
|
|
1095
|
+
if interface_id not in self._clients:
|
|
1096
|
+
_LOGGER.warning(
|
|
1097
|
+
"ADD_NEW_DEVICES_MANUALLY failed: Missing client for interface_id %s",
|
|
1098
|
+
interface_id,
|
|
1099
|
+
)
|
|
1100
|
+
return
|
|
1101
|
+
client = self._clients[interface_id]
|
|
1102
|
+
if not (device_descriptions := await client.get_all_device_descriptions(device_address=address)):
|
|
1103
|
+
_LOGGER.warning(
|
|
1104
|
+
"ADD_NEW_DEVICES_MANUALLY failed: No device description found for address %s on interface_id %s",
|
|
1105
|
+
address,
|
|
1106
|
+
interface_id,
|
|
1107
|
+
)
|
|
1108
|
+
return
|
|
1109
|
+
|
|
1110
|
+
await self._add_new_devices(
|
|
1111
|
+
interface_id=interface_id,
|
|
1112
|
+
device_descriptions=device_descriptions,
|
|
1113
|
+
source=SourceOfDeviceCreation.MANUAL,
|
|
1114
|
+
)
|
|
1115
|
+
|
|
1116
|
+
@inspector(measure_performance=True)
|
|
1117
|
+
async def _add_new_devices(
|
|
1118
|
+
self, *, interface_id: str, device_descriptions: tuple[DeviceDescription, ...], source: SourceOfDeviceCreation
|
|
1119
|
+
) -> None:
|
|
1120
|
+
"""Add new devices to central unit."""
|
|
1121
|
+
if not device_descriptions:
|
|
1122
|
+
_LOGGER.debug(
|
|
1123
|
+
"ADD_NEW_DEVICES: Nothing to add for interface_id %s",
|
|
1124
|
+
interface_id,
|
|
1125
|
+
)
|
|
1126
|
+
return
|
|
1127
|
+
|
|
1128
|
+
_LOGGER.debug(
|
|
1129
|
+
"ADD_NEW_DEVICES: interface_id = %s, device_descriptions = %s",
|
|
1130
|
+
interface_id,
|
|
1131
|
+
len(device_descriptions),
|
|
1132
|
+
)
|
|
1133
|
+
|
|
1134
|
+
if interface_id not in self._clients:
|
|
1135
|
+
_LOGGER.warning(
|
|
1136
|
+
"ADD_NEW_DEVICES failed: Missing client for interface_id %s",
|
|
1137
|
+
interface_id,
|
|
1138
|
+
)
|
|
1139
|
+
return
|
|
1140
|
+
|
|
1141
|
+
async with self._device_add_semaphore:
|
|
1142
|
+
if not (
|
|
1143
|
+
new_device_descriptions := self._identify_new_device_descriptions(
|
|
1144
|
+
device_descriptions=device_descriptions, interface_id=interface_id
|
|
1145
|
+
)
|
|
1146
|
+
):
|
|
1147
|
+
_LOGGER.debug("ADD_NEW_DEVICES: Nothing to add for interface_id %s", interface_id)
|
|
1148
|
+
return
|
|
1149
|
+
|
|
1150
|
+
# Here we block the automatic creation of new devices, if required
|
|
1151
|
+
if (
|
|
1152
|
+
self._config.delay_new_device_creation
|
|
1153
|
+
and source == SourceOfDeviceCreation.NEW
|
|
1154
|
+
and (
|
|
1155
|
+
new_addresses := extract_device_addresses_from_device_descriptions(
|
|
1156
|
+
device_descriptions=new_device_descriptions
|
|
1157
|
+
)
|
|
1158
|
+
)
|
|
1159
|
+
):
|
|
1160
|
+
self.emit_backend_system_callback(
|
|
1161
|
+
system_event=BackendSystemEvent.DEVICES_DELAYED,
|
|
1162
|
+
new_addresses=new_addresses,
|
|
1163
|
+
interface_id=interface_id,
|
|
1164
|
+
source=source,
|
|
1165
|
+
)
|
|
1166
|
+
return
|
|
1167
|
+
|
|
1168
|
+
client = self._clients[interface_id]
|
|
1169
|
+
save_descriptions = False
|
|
1170
|
+
for dev_desc in new_device_descriptions:
|
|
1171
|
+
try:
|
|
1172
|
+
self._device_descriptions.add_device(interface_id=interface_id, device_description=dev_desc)
|
|
1173
|
+
await client.fetch_paramset_descriptions(device_description=dev_desc)
|
|
1174
|
+
save_descriptions = True
|
|
1175
|
+
except Exception as exc: # pragma: no cover
|
|
1176
|
+
save_descriptions = False
|
|
1177
|
+
_LOGGER.error(
|
|
1178
|
+
"UPDATE_CACHES_WITH_NEW_DEVICES failed: %s [%s]",
|
|
1179
|
+
type(exc).__name__,
|
|
1180
|
+
extract_exc_args(exc=exc),
|
|
1181
|
+
)
|
|
1182
|
+
|
|
1183
|
+
await self.save_files(
|
|
1184
|
+
save_device_descriptions=save_descriptions,
|
|
1185
|
+
save_paramset_descriptions=save_descriptions,
|
|
1186
|
+
)
|
|
1187
|
+
|
|
1188
|
+
if new_device_addresses := self._check_for_new_device_addresses(interface_id=interface_id):
|
|
1189
|
+
await self._device_details.load()
|
|
1190
|
+
await self._data_cache.load(interface=client.interface)
|
|
1191
|
+
await self._create_devices(new_device_addresses=new_device_addresses, source=source)
|
|
1192
|
+
|
|
1193
|
+
def _identify_new_device_descriptions(
|
|
1194
|
+
self, *, device_descriptions: tuple[DeviceDescription, ...], interface_id: str | None = None
|
|
1195
|
+
) -> tuple[DeviceDescription, ...]:
|
|
1196
|
+
"""Identify devices whose ADDRESS isn't already known on any interface."""
|
|
1197
|
+
known_addresses = self._device_descriptions.get_addresses(interface_id=interface_id)
|
|
1198
|
+
return tuple(
|
|
1199
|
+
dev_desc
|
|
1200
|
+
for dev_desc in device_descriptions
|
|
1201
|
+
if (dev_desc["ADDRESS"] if not (parent_address := dev_desc.get("PARENT")) else parent_address)
|
|
1202
|
+
not in known_addresses
|
|
1203
|
+
)
|
|
1204
|
+
|
|
1205
|
+
def _check_for_new_device_addresses(self, *, interface_id: str | None = None) -> Mapping[str, set[str]]:
|
|
1206
|
+
"""Check if there are new devices that need to be created."""
|
|
1207
|
+
new_device_addresses: dict[str, set[str]] = {}
|
|
1208
|
+
|
|
1209
|
+
# Cache existing device addresses once to avoid repeated mapping lookups
|
|
1210
|
+
existing_addresses = set(self._devices.keys())
|
|
1211
|
+
|
|
1212
|
+
def _check_for_new_device_addresses_helper(*, iid: str) -> None:
|
|
1213
|
+
"""Check if there are new devices that need to be created."""
|
|
1214
|
+
if not self._paramset_descriptions.has_interface_id(interface_id=iid):
|
|
1215
|
+
_LOGGER.debug(
|
|
1216
|
+
"CHECK_FOR_NEW_DEVICE_ADDRESSES: Skipping interface %s, missing paramsets",
|
|
1217
|
+
iid,
|
|
1218
|
+
)
|
|
1219
|
+
return
|
|
1220
|
+
# Build the set locally and assign only if non-empty to avoid add-then-delete pattern
|
|
1221
|
+
# Use set difference for speed on large collections
|
|
1222
|
+
addresses = set(self._device_descriptions.get_addresses(interface_id=iid))
|
|
1223
|
+
# get_addresses returns an iterable (likely tuple); convert to set once for efficient diff
|
|
1224
|
+
if new_set := addresses - existing_addresses:
|
|
1225
|
+
new_device_addresses[iid] = new_set
|
|
1226
|
+
|
|
1227
|
+
if interface_id:
|
|
1228
|
+
_check_for_new_device_addresses_helper(iid=interface_id)
|
|
1229
|
+
else:
|
|
1230
|
+
for iid in self.interface_ids:
|
|
1231
|
+
_check_for_new_device_addresses_helper(iid=iid)
|
|
1232
|
+
|
|
1233
|
+
if _LOGGER.isEnabledFor(level=DEBUG):
|
|
1234
|
+
count = sum(len(item) for item in new_device_addresses.values())
|
|
1235
|
+
_LOGGER.debug(
|
|
1236
|
+
"CHECK_FOR_NEW_DEVICE_ADDRESSES: %s: %i.",
|
|
1237
|
+
"Found new device addresses" if new_device_addresses else "Did not find any new device addresses",
|
|
1238
|
+
count,
|
|
1239
|
+
)
|
|
1240
|
+
|
|
1241
|
+
return new_device_addresses
|
|
1242
|
+
|
|
1243
|
+
@callback_event
|
|
1244
|
+
async def data_point_event(self, *, interface_id: str, channel_address: str, parameter: str, value: Any) -> None:
|
|
1245
|
+
"""If a device emits some sort event, we will handle it here."""
|
|
1246
|
+
_LOGGER_EVENT.debug(
|
|
1247
|
+
"EVENT: interface_id = %s, channel_address = %s, parameter = %s, value = %s",
|
|
1248
|
+
interface_id,
|
|
1249
|
+
channel_address,
|
|
1250
|
+
parameter,
|
|
1251
|
+
str(value),
|
|
1252
|
+
)
|
|
1253
|
+
if not self.has_client(interface_id=interface_id):
|
|
1254
|
+
return
|
|
1255
|
+
|
|
1256
|
+
self.set_last_event_seen_for_interface(interface_id=interface_id)
|
|
1257
|
+
# No need to check the response of a XmlRPC-PING
|
|
1258
|
+
if parameter == Parameter.PONG:
|
|
1259
|
+
if "#" in value:
|
|
1260
|
+
v_interface_id, v_timestamp = value.split("#")
|
|
1261
|
+
if (
|
|
1262
|
+
v_interface_id == interface_id
|
|
1263
|
+
and (client := self.get_client(interface_id=interface_id))
|
|
1264
|
+
and client.supports_ping_pong
|
|
1265
|
+
):
|
|
1266
|
+
client.ping_pong_cache.handle_received_pong(
|
|
1267
|
+
pong_ts=datetime.strptime(v_timestamp, DATETIME_FORMAT_MILLIS)
|
|
1268
|
+
)
|
|
1269
|
+
return
|
|
1270
|
+
|
|
1271
|
+
dpk = DataPointKey(
|
|
1272
|
+
interface_id=interface_id,
|
|
1273
|
+
channel_address=channel_address,
|
|
1274
|
+
paramset_key=ParamsetKey.VALUES,
|
|
1275
|
+
parameter=parameter,
|
|
1276
|
+
)
|
|
1277
|
+
|
|
1278
|
+
if dpk in self._data_point_key_event_subscriptions:
|
|
1279
|
+
try:
|
|
1280
|
+
received_at = datetime.now()
|
|
1281
|
+
for callback_handler in self._data_point_key_event_subscriptions[dpk]:
|
|
1282
|
+
if callable(callback_handler):
|
|
1283
|
+
await callback_handler(value=value, received_at=received_at)
|
|
1284
|
+
except RuntimeError as rterr:
|
|
1285
|
+
_LOGGER_EVENT.debug(
|
|
1286
|
+
"EVENT: RuntimeError [%s]. Failed to call callback for: %s, %s, %s",
|
|
1287
|
+
extract_exc_args(exc=rterr),
|
|
1288
|
+
interface_id,
|
|
1289
|
+
channel_address,
|
|
1290
|
+
parameter,
|
|
1291
|
+
)
|
|
1292
|
+
except Exception as exc:
|
|
1293
|
+
_LOGGER_EVENT.warning(
|
|
1294
|
+
"EVENT failed: Unable to call callback for: %s, %s, %s, %s",
|
|
1295
|
+
interface_id,
|
|
1296
|
+
channel_address,
|
|
1297
|
+
parameter,
|
|
1298
|
+
extract_exc_args(exc=exc),
|
|
1299
|
+
)
|
|
1300
|
+
|
|
1301
|
+
def data_point_path_event(self, *, state_path: str, value: str) -> None:
|
|
1302
|
+
"""If a device emits some sort event, we will handle it here."""
|
|
1303
|
+
_LOGGER_EVENT.debug(
|
|
1304
|
+
"DATA_POINT_PATH_EVENT: topic = %s, payload = %s",
|
|
1305
|
+
state_path,
|
|
1306
|
+
value,
|
|
1307
|
+
)
|
|
1308
|
+
|
|
1309
|
+
if (dpk := self._data_point_path_event_subscriptions.get(state_path)) is not None:
|
|
1310
|
+
self._looper.create_task(
|
|
1311
|
+
target=self.data_point_event(
|
|
1312
|
+
interface_id=dpk.interface_id,
|
|
1313
|
+
channel_address=dpk.channel_address,
|
|
1314
|
+
parameter=dpk.parameter,
|
|
1315
|
+
value=value,
|
|
1316
|
+
),
|
|
1317
|
+
name=f"device-data-point-event-{dpk.interface_id}-{dpk.channel_address}-{dpk.parameter}",
|
|
1318
|
+
)
|
|
1319
|
+
|
|
1320
|
+
def sysvar_data_point_path_event(self, *, state_path: str, value: str) -> None:
|
|
1321
|
+
"""If a device emits some sort event, we will handle it here."""
|
|
1322
|
+
_LOGGER_EVENT.debug(
|
|
1323
|
+
"SYSVAR_DATA_POINT_PATH_EVENT: topic = %s, payload = %s",
|
|
1324
|
+
state_path,
|
|
1325
|
+
value,
|
|
1326
|
+
)
|
|
1327
|
+
|
|
1328
|
+
if state_path in self._sysvar_data_point_event_subscriptions:
|
|
1329
|
+
try:
|
|
1330
|
+
callback_handler = self._sysvar_data_point_event_subscriptions[state_path]
|
|
1331
|
+
if callable(callback_handler):
|
|
1332
|
+
received_at = datetime.now()
|
|
1333
|
+
self._looper.create_task(
|
|
1334
|
+
target=lambda: callback_handler(value=value, received_at=received_at),
|
|
1335
|
+
name=f"sysvar-data-point-event-{state_path}",
|
|
1336
|
+
)
|
|
1337
|
+
except RuntimeError as rterr:
|
|
1338
|
+
_LOGGER_EVENT.debug(
|
|
1339
|
+
"EVENT: RuntimeError [%s]. Failed to call callback for: %s",
|
|
1340
|
+
extract_exc_args(exc=rterr),
|
|
1341
|
+
state_path,
|
|
1342
|
+
)
|
|
1343
|
+
except Exception as exc: # pragma: no cover
|
|
1344
|
+
_LOGGER_EVENT.warning(
|
|
1345
|
+
"EVENT failed: Unable to call callback for: %s, %s",
|
|
1346
|
+
state_path,
|
|
1347
|
+
extract_exc_args(exc=exc),
|
|
1348
|
+
)
|
|
1349
|
+
|
|
1350
|
+
@callback_backend_system(system_event=BackendSystemEvent.LIST_DEVICES)
|
|
1351
|
+
def list_devices(self, *, interface_id: str) -> list[DeviceDescription]:
|
|
1352
|
+
"""Return already existing devices to the backend."""
|
|
1353
|
+
result = self._device_descriptions.get_raw_device_descriptions(interface_id=interface_id)
|
|
1354
|
+
_LOGGER.debug("LIST_DEVICES: interface_id = %s, channel_count = %i", interface_id, len(result))
|
|
1355
|
+
return result
|
|
1356
|
+
|
|
1357
|
+
def add_event_subscription(self, *, data_point: BaseParameterDataPoint) -> None:
|
|
1358
|
+
"""Add data_point to central event subscription."""
|
|
1359
|
+
if isinstance(data_point, GenericDataPoint | GenericEvent) and (
|
|
1360
|
+
data_point.is_readable or data_point.supports_events
|
|
1361
|
+
):
|
|
1362
|
+
if data_point.dpk not in self._data_point_key_event_subscriptions:
|
|
1363
|
+
self._data_point_key_event_subscriptions[data_point.dpk] = []
|
|
1364
|
+
self._data_point_key_event_subscriptions[data_point.dpk].append(data_point.event)
|
|
1365
|
+
if (
|
|
1366
|
+
not data_point.channel.device.client.supports_rpc_callback
|
|
1367
|
+
and data_point.state_path not in self._data_point_path_event_subscriptions
|
|
1368
|
+
):
|
|
1369
|
+
self._data_point_path_event_subscriptions[data_point.state_path] = data_point.dpk
|
|
1370
|
+
|
|
1371
|
+
@inspector
|
|
1372
|
+
async def create_central_links(self) -> None:
|
|
1373
|
+
"""Create a central links to support press events on all channels with click events."""
|
|
1374
|
+
for device in self.devices:
|
|
1375
|
+
await device.create_central_links()
|
|
1376
|
+
|
|
1377
|
+
@inspector
|
|
1378
|
+
async def remove_central_links(self) -> None:
|
|
1379
|
+
"""Remove central links."""
|
|
1380
|
+
for device in self.devices:
|
|
1381
|
+
await device.remove_central_links()
|
|
1382
|
+
|
|
1383
|
+
def remove_device(self, *, device: Device) -> None:
|
|
1384
|
+
"""Remove device to central collections."""
|
|
1385
|
+
if device.address not in self._devices:
|
|
1386
|
+
_LOGGER.debug(
|
|
1387
|
+
"REMOVE_DEVICE: device %s not registered in central",
|
|
1388
|
+
device.address,
|
|
1389
|
+
)
|
|
1390
|
+
return
|
|
1391
|
+
device.remove()
|
|
1392
|
+
|
|
1393
|
+
self._device_descriptions.remove_device(device=device)
|
|
1394
|
+
self._paramset_descriptions.remove_device(device=device)
|
|
1395
|
+
self._device_details.remove_device(device=device)
|
|
1396
|
+
del self._devices[device.address]
|
|
1397
|
+
|
|
1398
|
+
def remove_event_subscription(self, *, data_point: BaseParameterDataPoint) -> None:
|
|
1399
|
+
"""Remove event subscription from central collections."""
|
|
1400
|
+
if isinstance(data_point, GenericDataPoint | GenericEvent) and data_point.supports_events:
|
|
1401
|
+
if data_point.dpk in self._data_point_key_event_subscriptions:
|
|
1402
|
+
del self._data_point_key_event_subscriptions[data_point.dpk]
|
|
1403
|
+
if data_point.state_path in self._data_point_path_event_subscriptions:
|
|
1404
|
+
del self._data_point_path_event_subscriptions[data_point.state_path]
|
|
1405
|
+
|
|
1406
|
+
def get_last_event_seen_for_interface(self, *, interface_id: str) -> datetime | None:
|
|
1407
|
+
"""Return the last event seen for an interface."""
|
|
1408
|
+
return self._last_event_seen_for_interface.get(interface_id)
|
|
1409
|
+
|
|
1410
|
+
def set_last_event_seen_for_interface(self, *, interface_id: str) -> None:
|
|
1411
|
+
"""Set the last event seen for an interface."""
|
|
1412
|
+
self._last_event_seen_for_interface[interface_id] = datetime.now()
|
|
1413
|
+
|
|
1414
|
+
async def execute_program(self, *, pid: str) -> bool:
|
|
1415
|
+
"""Execute a program on the backend."""
|
|
1416
|
+
if client := self.primary_client:
|
|
1417
|
+
return await client.execute_program(pid=pid)
|
|
1418
|
+
return False
|
|
1419
|
+
|
|
1420
|
+
async def set_program_state(self, *, pid: str, state: bool) -> bool:
|
|
1421
|
+
"""Execute a program on the backend."""
|
|
1422
|
+
if client := self.primary_client:
|
|
1423
|
+
return await client.set_program_state(pid=pid, state=state)
|
|
1424
|
+
return False
|
|
1425
|
+
|
|
1426
|
+
@inspector(re_raise=False)
|
|
1427
|
+
async def fetch_sysvar_data(self, *, scheduled: bool) -> None:
|
|
1428
|
+
"""Fetch sysvar data for the hub."""
|
|
1429
|
+
await self._hub.fetch_sysvar_data(scheduled=scheduled)
|
|
1430
|
+
|
|
1431
|
+
@inspector(re_raise=False)
|
|
1432
|
+
async def fetch_program_data(self, *, scheduled: bool) -> None:
|
|
1433
|
+
"""Fetch program data for the hub."""
|
|
1434
|
+
await self._hub.fetch_program_data(scheduled=scheduled)
|
|
1435
|
+
|
|
1436
|
+
@inspector(measure_performance=True)
|
|
1437
|
+
async def load_and_refresh_data_point_data(
|
|
1438
|
+
self,
|
|
1439
|
+
*,
|
|
1440
|
+
interface: Interface,
|
|
1441
|
+
paramset_key: ParamsetKey | None = None,
|
|
1442
|
+
direct_call: bool = False,
|
|
1443
|
+
) -> None:
|
|
1444
|
+
"""Refresh data_point data."""
|
|
1445
|
+
if paramset_key != ParamsetKey.MASTER:
|
|
1446
|
+
await self._data_cache.load(interface=interface)
|
|
1447
|
+
await self._data_cache.refresh_data_point_data(
|
|
1448
|
+
paramset_key=paramset_key, interface=interface, direct_call=direct_call
|
|
1449
|
+
)
|
|
1450
|
+
|
|
1451
|
+
async def get_system_variable(self, *, legacy_name: str) -> Any | None:
|
|
1452
|
+
"""Get system variable from the backend."""
|
|
1453
|
+
if client := self.primary_client:
|
|
1454
|
+
return await client.get_system_variable(name=legacy_name)
|
|
1455
|
+
return None
|
|
1456
|
+
|
|
1457
|
+
async def set_system_variable(self, *, legacy_name: str, value: Any) -> None:
|
|
1458
|
+
"""Set variable value on the backend."""
|
|
1459
|
+
if dp := self.get_sysvar_data_point(legacy_name=legacy_name):
|
|
1460
|
+
await dp.send_variable(value=value)
|
|
1461
|
+
else:
|
|
1462
|
+
_LOGGER.warning("Variable %s not found on %s", legacy_name, self.name)
|
|
1463
|
+
|
|
1464
|
+
def get_parameters(
|
|
1465
|
+
self,
|
|
1466
|
+
*,
|
|
1467
|
+
paramset_key: ParamsetKey,
|
|
1468
|
+
operations: tuple[Operations, ...],
|
|
1469
|
+
full_format: bool = False,
|
|
1470
|
+
un_ignore_candidates_only: bool = False,
|
|
1471
|
+
use_channel_wildcard: bool = False,
|
|
1472
|
+
) -> tuple[str, ...]:
|
|
1473
|
+
"""
|
|
1474
|
+
Return all parameters from VALUES paramset.
|
|
1475
|
+
|
|
1476
|
+
Performance optimized to minimize repeated lookups and computations
|
|
1477
|
+
when iterating over all channels and parameters.
|
|
1478
|
+
"""
|
|
1479
|
+
parameters: set[str] = set()
|
|
1480
|
+
|
|
1481
|
+
# Precompute operations mask to avoid repeated checks in the inner loop
|
|
1482
|
+
op_mask: int = 0
|
|
1483
|
+
for op in operations:
|
|
1484
|
+
op_mask |= int(op)
|
|
1485
|
+
|
|
1486
|
+
raw_psd = self._paramset_descriptions.raw_paramset_descriptions
|
|
1487
|
+
ignore_set = IGNORE_FOR_UN_IGNORE_PARAMETERS
|
|
1488
|
+
|
|
1489
|
+
# Prepare optional helpers only if needed
|
|
1490
|
+
get_model = self._device_descriptions.get_model if full_format else None
|
|
1491
|
+
model_cache: dict[str, str | None] = {}
|
|
1492
|
+
channel_no_cache: dict[str, int | None] = {}
|
|
1493
|
+
|
|
1494
|
+
for channels in raw_psd.values():
|
|
1495
|
+
for channel_address, channel_paramsets in channels.items():
|
|
1496
|
+
# Resolve model lazily and cache per device address when full_format is requested
|
|
1497
|
+
model: str | None = None
|
|
1498
|
+
if get_model is not None:
|
|
1499
|
+
dev_addr = get_device_address(address=channel_address)
|
|
1500
|
+
if (model := model_cache.get(dev_addr)) is None:
|
|
1501
|
+
model = get_model(device_address=dev_addr)
|
|
1502
|
+
model_cache[dev_addr] = model
|
|
1503
|
+
|
|
1504
|
+
if (paramset := channel_paramsets.get(paramset_key)) is None:
|
|
1505
|
+
continue
|
|
1506
|
+
|
|
1507
|
+
for parameter, parameter_data in paramset.items():
|
|
1508
|
+
# Fast bitmask check: ensure all requested ops are present
|
|
1509
|
+
if (int(parameter_data["OPERATIONS"]) & op_mask) != op_mask:
|
|
1510
|
+
continue
|
|
1511
|
+
|
|
1512
|
+
if un_ignore_candidates_only:
|
|
1513
|
+
# Cheap check first to avoid expensive dp lookup when possible
|
|
1514
|
+
if parameter in ignore_set:
|
|
1515
|
+
continue
|
|
1516
|
+
dp = self.get_generic_data_point(
|
|
1517
|
+
channel_address=channel_address,
|
|
1518
|
+
parameter=parameter,
|
|
1519
|
+
paramset_key=paramset_key,
|
|
1520
|
+
)
|
|
1521
|
+
if dp and dp.enabled_default and not dp.is_un_ignored:
|
|
1522
|
+
continue
|
|
1523
|
+
|
|
1524
|
+
if not full_format:
|
|
1525
|
+
parameters.add(parameter)
|
|
1526
|
+
continue
|
|
1527
|
+
|
|
1528
|
+
if use_channel_wildcard:
|
|
1529
|
+
channel_repr: int | str | None = UN_IGNORE_WILDCARD
|
|
1530
|
+
elif channel_address in channel_no_cache:
|
|
1531
|
+
channel_repr = channel_no_cache[channel_address]
|
|
1532
|
+
else:
|
|
1533
|
+
channel_repr = get_channel_no(address=channel_address)
|
|
1534
|
+
channel_no_cache[channel_address] = channel_repr
|
|
1535
|
+
|
|
1536
|
+
# Build the full parameter string
|
|
1537
|
+
if channel_repr is None:
|
|
1538
|
+
parameters.add(f"{parameter}:{paramset_key}@{model}:")
|
|
1539
|
+
else:
|
|
1540
|
+
parameters.add(f"{parameter}:{paramset_key}@{model}:{channel_repr}")
|
|
1541
|
+
|
|
1542
|
+
return tuple(parameters)
|
|
1543
|
+
|
|
1544
|
+
def _get_virtual_remote(self, *, device_address: str) -> Device | None:
|
|
1545
|
+
"""Get the virtual remote for the Client."""
|
|
1546
|
+
for client in self._clients.values():
|
|
1547
|
+
virtual_remote = client.get_virtual_remote()
|
|
1548
|
+
if virtual_remote and virtual_remote.address == device_address:
|
|
1549
|
+
return virtual_remote
|
|
1550
|
+
return None
|
|
1551
|
+
|
|
1552
|
+
def get_generic_data_point(
|
|
1553
|
+
self, *, channel_address: str, parameter: str, paramset_key: ParamsetKey | None = None
|
|
1554
|
+
) -> GenericDataPoint | None:
|
|
1555
|
+
"""Get data_point by channel_address and parameter."""
|
|
1556
|
+
if device := self.get_device(address=channel_address):
|
|
1557
|
+
return device.get_generic_data_point(
|
|
1558
|
+
channel_address=channel_address, parameter=parameter, paramset_key=paramset_key
|
|
1559
|
+
)
|
|
1560
|
+
return None
|
|
1561
|
+
|
|
1562
|
+
def get_event(self, *, channel_address: str, parameter: str) -> GenericEvent | None:
|
|
1563
|
+
"""Return the hm event."""
|
|
1564
|
+
if device := self.get_device(address=channel_address):
|
|
1565
|
+
return device.get_generic_event(channel_address=channel_address, parameter=parameter)
|
|
1566
|
+
return None
|
|
1567
|
+
|
|
1568
|
+
def get_custom_data_point(self, *, address: str, channel_no: int) -> CustomDataPoint | None:
|
|
1569
|
+
"""Return the hm custom_data_point."""
|
|
1570
|
+
if device := self.get_device(address=address):
|
|
1571
|
+
return device.get_custom_data_point(channel_no=channel_no)
|
|
1572
|
+
return None
|
|
1573
|
+
|
|
1574
|
+
def get_sysvar_data_point(
|
|
1575
|
+
self, *, vid: str | None = None, legacy_name: str | None = None
|
|
1576
|
+
) -> GenericSysvarDataPoint | None:
|
|
1577
|
+
"""Return the sysvar data_point."""
|
|
1578
|
+
if vid and (sysvar := self._sysvar_data_points.get(vid)):
|
|
1579
|
+
return sysvar
|
|
1580
|
+
if legacy_name:
|
|
1581
|
+
for sysvar in self._sysvar_data_points.values():
|
|
1582
|
+
if sysvar.legacy_name == legacy_name:
|
|
1583
|
+
return sysvar
|
|
1584
|
+
return None
|
|
1585
|
+
|
|
1586
|
+
def get_program_data_point(self, *, pid: str | None = None, legacy_name: str | None = None) -> ProgramDpType | None:
|
|
1587
|
+
"""Return the program data points."""
|
|
1588
|
+
if pid and (program := self._program_data_points.get(pid)):
|
|
1589
|
+
return program
|
|
1590
|
+
if legacy_name:
|
|
1591
|
+
for program in self._program_data_points.values():
|
|
1592
|
+
if legacy_name in (program.button.legacy_name, program.switch.legacy_name):
|
|
1593
|
+
return program
|
|
1594
|
+
return None
|
|
1595
|
+
|
|
1596
|
+
def get_data_point_path(self) -> tuple[str, ...]:
|
|
1597
|
+
"""Return the registered state path."""
|
|
1598
|
+
return tuple(self._data_point_path_event_subscriptions)
|
|
1599
|
+
|
|
1600
|
+
def get_sysvar_data_point_path(self) -> tuple[str, ...]:
|
|
1601
|
+
"""Return the registered sysvar state path."""
|
|
1602
|
+
return tuple(self._sysvar_data_point_event_subscriptions)
|
|
1603
|
+
|
|
1604
|
+
def get_un_ignore_candidates(self, *, include_master: bool = False) -> list[str]:
|
|
1605
|
+
"""Return the candidates for un_ignore."""
|
|
1606
|
+
candidates = sorted(
|
|
1607
|
+
# 1. request simple parameter list for values parameters
|
|
1608
|
+
self.get_parameters(
|
|
1609
|
+
paramset_key=ParamsetKey.VALUES,
|
|
1610
|
+
operations=(Operations.READ, Operations.EVENT),
|
|
1611
|
+
un_ignore_candidates_only=True,
|
|
1612
|
+
)
|
|
1613
|
+
# 2. request full_format parameter list with channel wildcard for values parameters
|
|
1614
|
+
+ self.get_parameters(
|
|
1615
|
+
paramset_key=ParamsetKey.VALUES,
|
|
1616
|
+
operations=(Operations.READ, Operations.EVENT),
|
|
1617
|
+
full_format=True,
|
|
1618
|
+
un_ignore_candidates_only=True,
|
|
1619
|
+
use_channel_wildcard=True,
|
|
1620
|
+
)
|
|
1621
|
+
# 3. request full_format parameter list for values parameters
|
|
1622
|
+
+ self.get_parameters(
|
|
1623
|
+
paramset_key=ParamsetKey.VALUES,
|
|
1624
|
+
operations=(Operations.READ, Operations.EVENT),
|
|
1625
|
+
full_format=True,
|
|
1626
|
+
un_ignore_candidates_only=True,
|
|
1627
|
+
)
|
|
1628
|
+
)
|
|
1629
|
+
if include_master:
|
|
1630
|
+
# 4. request full_format parameter list for master parameters
|
|
1631
|
+
candidates += sorted(
|
|
1632
|
+
self.get_parameters(
|
|
1633
|
+
paramset_key=ParamsetKey.MASTER,
|
|
1634
|
+
operations=(Operations.READ,),
|
|
1635
|
+
full_format=True,
|
|
1636
|
+
un_ignore_candidates_only=True,
|
|
1637
|
+
)
|
|
1638
|
+
)
|
|
1639
|
+
return candidates
|
|
1640
|
+
|
|
1641
|
+
async def clear_files(self) -> None:
|
|
1642
|
+
"""Remove all stored files and caches."""
|
|
1643
|
+
await self._device_descriptions.clear()
|
|
1644
|
+
await self._paramset_descriptions.clear()
|
|
1645
|
+
await self._recorder.clear()
|
|
1646
|
+
self._device_details.clear()
|
|
1647
|
+
self._data_cache.clear()
|
|
1648
|
+
|
|
1649
|
+
def register_homematic_callback(self, *, cb: Callable) -> CALLBACK_TYPE:
|
|
1650
|
+
"""Register ha_event callback in central."""
|
|
1651
|
+
if callable(cb) and cb not in self._homematic_callbacks:
|
|
1652
|
+
self._homematic_callbacks.add(cb)
|
|
1653
|
+
return partial(self._unregister_homematic_callback, cb=cb)
|
|
1654
|
+
return None
|
|
1655
|
+
|
|
1656
|
+
def _unregister_homematic_callback(self, *, cb: Callable) -> None:
|
|
1657
|
+
"""RUn register ha_event callback in central."""
|
|
1658
|
+
if cb in self._homematic_callbacks:
|
|
1659
|
+
self._homematic_callbacks.remove(cb)
|
|
1660
|
+
|
|
1661
|
+
@loop_check
|
|
1662
|
+
def emit_homematic_callback(self, *, event_type: EventType, event_data: dict[EventKey, str]) -> None:
|
|
1663
|
+
"""
|
|
1664
|
+
Emit homematic_callback in central.
|
|
1665
|
+
|
|
1666
|
+
# Events like INTERFACE, KEYPRESS, ...
|
|
1667
|
+
"""
|
|
1668
|
+
for callback_handler in self._homematic_callbacks:
|
|
1669
|
+
try:
|
|
1670
|
+
callback_handler(event_type=event_type, event_data=event_data)
|
|
1671
|
+
except Exception as exc:
|
|
1672
|
+
_LOGGER.error(
|
|
1673
|
+
"EMIT_HOMEMATIC_CALLBACK: Unable to call handler: %s",
|
|
1674
|
+
extract_exc_args(exc=exc),
|
|
1675
|
+
)
|
|
1676
|
+
|
|
1677
|
+
def register_backend_parameter_callback(self, *, cb: Callable) -> CALLBACK_TYPE:
|
|
1678
|
+
"""Register backend_parameter callback in central."""
|
|
1679
|
+
if callable(cb) and cb not in self._backend_parameter_callbacks:
|
|
1680
|
+
self._backend_parameter_callbacks.add(cb)
|
|
1681
|
+
return partial(self._unregister_backend_parameter_callback, cb=cb)
|
|
1682
|
+
return None
|
|
1683
|
+
|
|
1684
|
+
def _unregister_backend_parameter_callback(self, *, cb: Callable) -> None:
|
|
1685
|
+
"""Un register backend_parameter callback in central."""
|
|
1686
|
+
if cb in self._backend_parameter_callbacks:
|
|
1687
|
+
self._backend_parameter_callbacks.remove(cb)
|
|
1688
|
+
|
|
1689
|
+
@loop_check
|
|
1690
|
+
def emit_backend_parameter_callback(
|
|
1691
|
+
self, *, interface_id: str, channel_address: str, parameter: str, value: Any
|
|
1692
|
+
) -> None:
|
|
1693
|
+
"""
|
|
1694
|
+
Emit backend_parameter callback in central.
|
|
1695
|
+
|
|
1696
|
+
Re-emitted events from the backend for parameter updates.
|
|
1697
|
+
"""
|
|
1698
|
+
for callback_handler in self._backend_parameter_callbacks:
|
|
1699
|
+
try:
|
|
1700
|
+
callback_handler(
|
|
1701
|
+
interface_id=interface_id, channel_address=channel_address, parameter=parameter, value=value
|
|
1702
|
+
)
|
|
1703
|
+
except Exception as exc:
|
|
1704
|
+
_LOGGER.error(
|
|
1705
|
+
"EMIT_BACKEND_PARAMETER_CALLBACK: Unable to call handler: %s",
|
|
1706
|
+
extract_exc_args(exc=exc),
|
|
1707
|
+
)
|
|
1708
|
+
|
|
1709
|
+
def register_backend_system_callback(self, *, cb: Callable) -> CALLBACK_TYPE:
|
|
1710
|
+
"""Register system_event callback in central."""
|
|
1711
|
+
if callable(cb) and cb not in self._backend_parameter_callbacks:
|
|
1712
|
+
self._backend_system_callbacks.add(cb)
|
|
1713
|
+
return partial(self._unregister_backend_system_callback, cb=cb)
|
|
1714
|
+
return None
|
|
1715
|
+
|
|
1716
|
+
def _unregister_backend_system_callback(self, *, cb: Callable) -> None:
|
|
1717
|
+
"""Un register system_event callback in central."""
|
|
1718
|
+
if cb in self._backend_system_callbacks:
|
|
1719
|
+
self._backend_system_callbacks.remove(cb)
|
|
1720
|
+
|
|
1721
|
+
@loop_check
|
|
1722
|
+
def emit_backend_system_callback(self, *, system_event: BackendSystemEvent, **kwargs: Any) -> None:
|
|
1723
|
+
"""
|
|
1724
|
+
Emit system_event callback in central.
|
|
1725
|
+
|
|
1726
|
+
e.g. DEVICES_CREATED, HUB_REFRESHED
|
|
1727
|
+
"""
|
|
1728
|
+
for callback_handler in self._backend_system_callbacks:
|
|
1729
|
+
try:
|
|
1730
|
+
callback_handler(system_event=system_event, **kwargs)
|
|
1731
|
+
except Exception as exc:
|
|
1732
|
+
_LOGGER.error(
|
|
1733
|
+
"EMIT_BACKEND_SYSTEM_CALLBACK: Unable to call handler: %s",
|
|
1734
|
+
extract_exc_args(exc=exc),
|
|
1735
|
+
)
|
|
1736
|
+
|
|
1737
|
+
def __str__(self) -> str:
|
|
1738
|
+
"""Provide some useful information."""
|
|
1739
|
+
return f"central: {self.name}"
|
|
1740
|
+
|
|
1741
|
+
|
|
1742
|
+
class _Scheduler(threading.Thread):
|
|
1743
|
+
"""Periodically check connection to the backend, and load data when required."""
|
|
1744
|
+
|
|
1745
|
+
def __init__(self, *, central: CentralUnit) -> None:
|
|
1746
|
+
"""Init the connection checker."""
|
|
1747
|
+
threading.Thread.__init__(self, name=f"ConnectionChecker for {central.name}")
|
|
1748
|
+
self._central: Final = central
|
|
1749
|
+
self._unregister_callback = self._central.register_backend_system_callback(cb=self._backend_system_callback)
|
|
1750
|
+
self._active = True
|
|
1751
|
+
self._devices_created = False
|
|
1752
|
+
self._scheduler_jobs = [
|
|
1753
|
+
_SchedulerJob(task=self._check_connection, run_interval=CONNECTION_CHECKER_INTERVAL),
|
|
1754
|
+
_SchedulerJob(
|
|
1755
|
+
task=self._refresh_client_data,
|
|
1756
|
+
run_interval=self._central.config.periodic_refresh_interval,
|
|
1757
|
+
),
|
|
1758
|
+
_SchedulerJob(
|
|
1759
|
+
task=self._refresh_program_data,
|
|
1760
|
+
run_interval=self._central.config.sys_scan_interval,
|
|
1761
|
+
),
|
|
1762
|
+
_SchedulerJob(task=self._refresh_sysvar_data, run_interval=self._central.config.sys_scan_interval),
|
|
1763
|
+
_SchedulerJob(
|
|
1764
|
+
task=self._fetch_device_firmware_update_data,
|
|
1765
|
+
run_interval=DEVICE_FIRMWARE_CHECK_INTERVAL,
|
|
1766
|
+
),
|
|
1767
|
+
_SchedulerJob(
|
|
1768
|
+
task=self._fetch_device_firmware_update_data_in_delivery,
|
|
1769
|
+
run_interval=DEVICE_FIRMWARE_DELIVERING_CHECK_INTERVAL,
|
|
1770
|
+
),
|
|
1771
|
+
_SchedulerJob(
|
|
1772
|
+
task=self._fetch_device_firmware_update_data_in_update,
|
|
1773
|
+
run_interval=DEVICE_FIRMWARE_UPDATING_CHECK_INTERVAL,
|
|
1774
|
+
),
|
|
1775
|
+
]
|
|
1776
|
+
|
|
1777
|
+
def _backend_system_callback(self, *, system_event: BackendSystemEvent, **kwargs: Any) -> None:
|
|
1778
|
+
"""Handle event of new device creation, to delay the start of the sysvar scan."""
|
|
1779
|
+
if system_event == BackendSystemEvent.DEVICES_CREATED:
|
|
1780
|
+
self._devices_created = True
|
|
1781
|
+
|
|
1782
|
+
def run(self) -> None:
|
|
1783
|
+
"""Run the scheduler thread."""
|
|
1784
|
+
_LOGGER.debug(
|
|
1785
|
+
"run: scheduler for %s",
|
|
1786
|
+
self._central.name,
|
|
1787
|
+
)
|
|
1788
|
+
|
|
1789
|
+
self._central.looper.create_task(
|
|
1790
|
+
target=self._run_scheduler_tasks(),
|
|
1791
|
+
name="run_scheduler_tasks",
|
|
1792
|
+
)
|
|
1793
|
+
|
|
1794
|
+
def stop(self) -> None:
|
|
1795
|
+
"""To stop the ConnectionChecker."""
|
|
1796
|
+
if self._unregister_callback is not None:
|
|
1797
|
+
self._unregister_callback()
|
|
1798
|
+
self._active = False
|
|
1799
|
+
|
|
1800
|
+
async def _run_scheduler_tasks(self) -> None:
|
|
1801
|
+
"""Run all tasks."""
|
|
1802
|
+
while self._active:
|
|
1803
|
+
if self._central.state != CentralUnitState.RUNNING:
|
|
1804
|
+
_LOGGER.debug("SCHEDULER: Waiting till central %s is started", self._central.name)
|
|
1805
|
+
await asyncio.sleep(SCHEDULER_NOT_STARTED_SLEEP)
|
|
1806
|
+
continue
|
|
1807
|
+
|
|
1808
|
+
any_executed = False
|
|
1809
|
+
for job in self._scheduler_jobs:
|
|
1810
|
+
if not self._active or not job.ready:
|
|
1811
|
+
continue
|
|
1812
|
+
await job.run()
|
|
1813
|
+
job.schedule_next_execution()
|
|
1814
|
+
any_executed = True
|
|
1815
|
+
|
|
1816
|
+
if not self._active:
|
|
1817
|
+
break # type: ignore[unreachable]
|
|
1818
|
+
|
|
1819
|
+
# If no job was executed this cycle, we can sleep until the next job is due
|
|
1820
|
+
if not any_executed:
|
|
1821
|
+
now = datetime.now()
|
|
1822
|
+
try:
|
|
1823
|
+
next_due = min(job.next_run for job in self._scheduler_jobs)
|
|
1824
|
+
# Sleep until the next task should run, but cap to 1s to remain responsive
|
|
1825
|
+
delay = max(0.0, (next_due - now).total_seconds())
|
|
1826
|
+
await asyncio.sleep(min(1.0, delay))
|
|
1827
|
+
except ValueError:
|
|
1828
|
+
# No jobs configured; fallback to default loop sleep
|
|
1829
|
+
await asyncio.sleep(SCHEDULER_LOOP_SLEEP)
|
|
1830
|
+
else:
|
|
1831
|
+
# When work was done, yield briefly to the loop
|
|
1832
|
+
await asyncio.sleep(SCHEDULER_LOOP_SLEEP)
|
|
1833
|
+
|
|
1834
|
+
async def _check_connection(self) -> None:
|
|
1835
|
+
"""Check connection to backend."""
|
|
1836
|
+
_LOGGER.debug("CHECK_CONNECTION: Checking connection to server %s", self._central.name)
|
|
1837
|
+
try:
|
|
1838
|
+
if not self._central.all_clients_active:
|
|
1839
|
+
_LOGGER.warning(
|
|
1840
|
+
"CHECK_CONNECTION failed: No clients exist. Trying to create clients for server %s",
|
|
1841
|
+
self._central.name,
|
|
1842
|
+
)
|
|
1843
|
+
await self._central.restart_clients()
|
|
1844
|
+
else:
|
|
1845
|
+
reconnects: list[Any] = []
|
|
1846
|
+
reloads: list[Any] = []
|
|
1847
|
+
for interface_id in self._central.interface_ids:
|
|
1848
|
+
# check:
|
|
1849
|
+
# - client is available
|
|
1850
|
+
# - client is connected
|
|
1851
|
+
# - interface callback is alive
|
|
1852
|
+
client = self._central.get_client(interface_id=interface_id)
|
|
1853
|
+
if client.available is False or not await client.is_connected() or not client.is_callback_alive():
|
|
1854
|
+
reconnects.append(client.reconnect())
|
|
1855
|
+
reloads.append(self._central.load_and_refresh_data_point_data(interface=client.interface))
|
|
1856
|
+
if reconnects:
|
|
1857
|
+
await asyncio.gather(*reconnects)
|
|
1858
|
+
if self._central.available:
|
|
1859
|
+
await asyncio.gather(*reloads)
|
|
1860
|
+
except NoConnectionException as nex:
|
|
1861
|
+
_LOGGER.error("CHECK_CONNECTION failed: no connection: %s", extract_exc_args(exc=nex))
|
|
1862
|
+
except Exception as exc:
|
|
1863
|
+
_LOGGER.error(
|
|
1864
|
+
"CHECK_CONNECTION failed: %s [%s]",
|
|
1865
|
+
type(exc).__name__,
|
|
1866
|
+
extract_exc_args(exc=exc),
|
|
1867
|
+
)
|
|
1868
|
+
|
|
1869
|
+
@inspector(re_raise=False)
|
|
1870
|
+
async def _refresh_client_data(self) -> None:
|
|
1871
|
+
"""Refresh client data."""
|
|
1872
|
+
if not self._central.available:
|
|
1873
|
+
return
|
|
1874
|
+
|
|
1875
|
+
if (poll_clients := self._central.poll_clients) is not None and len(poll_clients) > 0:
|
|
1876
|
+
_LOGGER.debug("REFRESH_CLIENT_DATA: Loading data for %s", self._central.name)
|
|
1877
|
+
for client in poll_clients:
|
|
1878
|
+
await self._central.load_and_refresh_data_point_data(interface=client.interface)
|
|
1879
|
+
self._central.set_last_event_seen_for_interface(interface_id=client.interface_id)
|
|
1880
|
+
|
|
1881
|
+
@inspector(re_raise=False)
|
|
1882
|
+
async def _refresh_sysvar_data(self) -> None:
|
|
1883
|
+
"""Refresh system variables."""
|
|
1884
|
+
if not self._central.config.enable_sysvar_scan or not self._central.available or not self._devices_created:
|
|
1885
|
+
return
|
|
1886
|
+
|
|
1887
|
+
_LOGGER.debug("REFRESH_SYSVAR_DATA: For %s", self._central.name)
|
|
1888
|
+
await self._central.fetch_sysvar_data(scheduled=True)
|
|
1889
|
+
|
|
1890
|
+
@inspector(re_raise=False)
|
|
1891
|
+
async def _refresh_program_data(self) -> None:
|
|
1892
|
+
"""Refresh system program_data."""
|
|
1893
|
+
if not self._central.config.enable_program_scan or not self._central.available or not self._devices_created:
|
|
1894
|
+
return
|
|
1895
|
+
|
|
1896
|
+
_LOGGER.debug("REFRESH_PROGRAM_DATA: For %s", self._central.name)
|
|
1897
|
+
await self._central.fetch_program_data(scheduled=True)
|
|
1898
|
+
|
|
1899
|
+
@inspector(re_raise=False)
|
|
1900
|
+
async def _fetch_device_firmware_update_data(self) -> None:
|
|
1901
|
+
"""Periodically fetch device firmware update data from backend."""
|
|
1902
|
+
if (
|
|
1903
|
+
not self._central.config.enable_device_firmware_check
|
|
1904
|
+
or not self._central.available
|
|
1905
|
+
or not self._devices_created
|
|
1906
|
+
):
|
|
1907
|
+
return
|
|
1908
|
+
|
|
1909
|
+
_LOGGER.debug(
|
|
1910
|
+
"FETCH_DEVICE_FIRMWARE_UPDATE_DATA: Scheduled fetching of device firmware update data for %s",
|
|
1911
|
+
self._central.name,
|
|
1912
|
+
)
|
|
1913
|
+
await self._central.refresh_firmware_data()
|
|
1914
|
+
|
|
1915
|
+
@inspector(re_raise=False)
|
|
1916
|
+
async def _fetch_device_firmware_update_data_in_delivery(self) -> None:
|
|
1917
|
+
"""Periodically fetch device firmware update data from backend."""
|
|
1918
|
+
if (
|
|
1919
|
+
not self._central.config.enable_device_firmware_check
|
|
1920
|
+
or not self._central.available
|
|
1921
|
+
or not self._devices_created
|
|
1922
|
+
):
|
|
1923
|
+
return
|
|
1924
|
+
|
|
1925
|
+
_LOGGER.debug(
|
|
1926
|
+
"FETCH_DEVICE_FIRMWARE_UPDATE_DATA_IN_DELIVERY: Scheduled fetching of device firmware update data for delivering devices for %s",
|
|
1927
|
+
self._central.name,
|
|
1928
|
+
)
|
|
1929
|
+
await self._central.refresh_firmware_data_by_state(
|
|
1930
|
+
device_firmware_states=(
|
|
1931
|
+
DeviceFirmwareState.DELIVER_FIRMWARE_IMAGE,
|
|
1932
|
+
DeviceFirmwareState.LIVE_DELIVER_FIRMWARE_IMAGE,
|
|
1933
|
+
)
|
|
1934
|
+
)
|
|
1935
|
+
|
|
1936
|
+
@inspector(re_raise=False)
|
|
1937
|
+
async def _fetch_device_firmware_update_data_in_update(self) -> None:
|
|
1938
|
+
"""Periodically fetch device firmware update data from backend."""
|
|
1939
|
+
if (
|
|
1940
|
+
not self._central.config.enable_device_firmware_check
|
|
1941
|
+
or not self._central.available
|
|
1942
|
+
or not self._devices_created
|
|
1943
|
+
):
|
|
1944
|
+
return
|
|
1945
|
+
|
|
1946
|
+
_LOGGER.debug(
|
|
1947
|
+
"FETCH_DEVICE_FIRMWARE_UPDATE_DATA_IN_UPDATE: Scheduled fetching of device firmware update data for updating devices for %s",
|
|
1948
|
+
self._central.name,
|
|
1949
|
+
)
|
|
1950
|
+
await self._central.refresh_firmware_data_by_state(
|
|
1951
|
+
device_firmware_states=(
|
|
1952
|
+
DeviceFirmwareState.READY_FOR_UPDATE,
|
|
1953
|
+
DeviceFirmwareState.DO_UPDATE_PENDING,
|
|
1954
|
+
DeviceFirmwareState.PERFORMING_UPDATE,
|
|
1955
|
+
)
|
|
1956
|
+
)
|
|
1957
|
+
|
|
1958
|
+
|
|
1959
|
+
class _SchedulerJob:
|
|
1960
|
+
"""Job to run in the scheduler."""
|
|
1961
|
+
|
|
1962
|
+
def __init__(
|
|
1963
|
+
self,
|
|
1964
|
+
*,
|
|
1965
|
+
task: Callable,
|
|
1966
|
+
run_interval: int,
|
|
1967
|
+
next_run: datetime | None = None,
|
|
1968
|
+
):
|
|
1969
|
+
"""Init the job."""
|
|
1970
|
+
self._task: Final = task
|
|
1971
|
+
self._next_run = next_run or datetime.now()
|
|
1972
|
+
self._run_interval: Final = run_interval
|
|
1973
|
+
|
|
1974
|
+
@property
|
|
1975
|
+
def ready(self) -> bool:
|
|
1976
|
+
"""Return if the job can be executed."""
|
|
1977
|
+
return self._next_run < datetime.now()
|
|
1978
|
+
|
|
1979
|
+
@property
|
|
1980
|
+
def next_run(self) -> datetime:
|
|
1981
|
+
"""Return the next scheduled run timestamp."""
|
|
1982
|
+
return self._next_run
|
|
1983
|
+
|
|
1984
|
+
async def run(self) -> None:
|
|
1985
|
+
"""Run the task."""
|
|
1986
|
+
await self._task()
|
|
1987
|
+
|
|
1988
|
+
def schedule_next_execution(self) -> None:
|
|
1989
|
+
"""Schedule the next execution of the job."""
|
|
1990
|
+
self._next_run += timedelta(seconds=self._run_interval)
|
|
1991
|
+
|
|
1992
|
+
|
|
1993
|
+
class CentralConfig:
|
|
1994
|
+
"""Config for a Client."""
|
|
1995
|
+
|
|
1996
|
+
def __init__(
|
|
1997
|
+
self,
|
|
1998
|
+
*,
|
|
1999
|
+
central_id: str,
|
|
2000
|
+
host: str,
|
|
2001
|
+
interface_configs: AbstractSet[hmcl.InterfaceConfig],
|
|
2002
|
+
name: str,
|
|
2003
|
+
password: str,
|
|
2004
|
+
username: str,
|
|
2005
|
+
client_session: ClientSession | None = None,
|
|
2006
|
+
callback_host: str | None = None,
|
|
2007
|
+
callback_port_xml_rpc: int | None = None,
|
|
2008
|
+
default_callback_port_xml_rpc: int = PORT_ANY,
|
|
2009
|
+
delay_new_device_creation: bool = DEFAULT_DELAY_NEW_DEVICE_CREATION,
|
|
2010
|
+
enable_device_firmware_check: bool = DEFAULT_ENABLE_DEVICE_FIRMWARE_CHECK,
|
|
2011
|
+
enable_program_scan: bool = DEFAULT_ENABLE_PROGRAM_SCAN,
|
|
2012
|
+
enable_sysvar_scan: bool = DEFAULT_ENABLE_SYSVAR_SCAN,
|
|
2013
|
+
hm_master_poll_after_send_intervals: tuple[int, ...] = DEFAULT_HM_MASTER_POLL_AFTER_SEND_INTERVALS,
|
|
2014
|
+
ignore_custom_device_definition_models: frozenset[str] = DEFAULT_IGNORE_CUSTOM_DEVICE_DEFINITION_MODELS,
|
|
2015
|
+
interfaces_requiring_periodic_refresh: frozenset[Interface] = DEFAULT_INTERFACES_REQUIRING_PERIODIC_REFRESH,
|
|
2016
|
+
json_port: int | None = None,
|
|
2017
|
+
listen_ip_addr: str | None = None,
|
|
2018
|
+
listen_port_xml_rpc: int | None = None,
|
|
2019
|
+
max_read_workers: int = DEFAULT_MAX_READ_WORKERS,
|
|
2020
|
+
optional_settings: tuple[OptionalSettings | str, ...] = DEFAULT_OPTIONAL_SETTINGS,
|
|
2021
|
+
periodic_refresh_interval: int = DEFAULT_PERIODIC_REFRESH_INTERVAL,
|
|
2022
|
+
program_markers: tuple[DescriptionMarker | str, ...] = DEFAULT_PROGRAM_MARKERS,
|
|
2023
|
+
start_direct: bool = False,
|
|
2024
|
+
storage_directory: str = DEFAULT_STORAGE_DIRECTORY,
|
|
2025
|
+
sys_scan_interval: int = DEFAULT_SYS_SCAN_INTERVAL,
|
|
2026
|
+
sysvar_markers: tuple[DescriptionMarker | str, ...] = DEFAULT_SYSVAR_MARKERS,
|
|
2027
|
+
tls: bool = DEFAULT_TLS,
|
|
2028
|
+
un_ignore_list: frozenset[str] = DEFAULT_UN_IGNORES,
|
|
2029
|
+
use_group_channel_for_cover_state: bool = DEFAULT_USE_GROUP_CHANNEL_FOR_COVER_STATE,
|
|
2030
|
+
verify_tls: bool = DEFAULT_VERIFY_TLS,
|
|
2031
|
+
) -> None:
|
|
2032
|
+
"""Init the client config."""
|
|
2033
|
+
self._interface_configs: Final = interface_configs
|
|
2034
|
+
self._optional_settings: Final = frozenset(optional_settings or ())
|
|
2035
|
+
self.requires_xml_rpc_server: Final = any(
|
|
2036
|
+
ic for ic in interface_configs if ic.rpc_server == RpcServerType.XML_RPC
|
|
2037
|
+
)
|
|
2038
|
+
self.callback_host: Final = callback_host
|
|
2039
|
+
self.callback_port_xml_rpc: Final = callback_port_xml_rpc
|
|
2040
|
+
self.central_id: Final = central_id
|
|
2041
|
+
self.client_session: Final = client_session
|
|
2042
|
+
self.default_callback_port_xml_rpc: Final = default_callback_port_xml_rpc
|
|
2043
|
+
self.delay_new_device_creation: Final = delay_new_device_creation
|
|
2044
|
+
self.enable_device_firmware_check: Final = enable_device_firmware_check
|
|
2045
|
+
self.enable_program_scan: Final = enable_program_scan
|
|
2046
|
+
self.enable_sysvar_scan: Final = enable_sysvar_scan
|
|
2047
|
+
self.hm_master_poll_after_send_intervals: Final = hm_master_poll_after_send_intervals
|
|
2048
|
+
self.host: Final = host
|
|
2049
|
+
self.ignore_custom_device_definition_models: Final = frozenset(ignore_custom_device_definition_models or ())
|
|
2050
|
+
self.interfaces_requiring_periodic_refresh: Final = frozenset(interfaces_requiring_periodic_refresh or ())
|
|
2051
|
+
self.json_port: Final = json_port
|
|
2052
|
+
self.listen_ip_addr: Final = listen_ip_addr
|
|
2053
|
+
self.listen_port_xml_rpc: Final = listen_port_xml_rpc
|
|
2054
|
+
self.max_read_workers = max_read_workers
|
|
2055
|
+
self.name: Final = name
|
|
2056
|
+
self.password: Final = password
|
|
2057
|
+
self.periodic_refresh_interval = periodic_refresh_interval
|
|
2058
|
+
self.program_markers: Final = program_markers
|
|
2059
|
+
self.start_direct: Final = start_direct
|
|
2060
|
+
self.session_recorder_randomize_output = (
|
|
2061
|
+
OptionalSettings.SR_DISABLE_RANDOMIZE_OUTPUT not in self._optional_settings
|
|
2062
|
+
)
|
|
2063
|
+
self.session_recorder_start_for_seconds: Final = (
|
|
2064
|
+
DEFAULT_SESSION_RECORDER_START_FOR_SECONDS
|
|
2065
|
+
if OptionalSettings.SR_RECORD_SYSTEM_INIT in self._optional_settings
|
|
2066
|
+
else 0
|
|
2067
|
+
)
|
|
2068
|
+
self.session_recorder_start = self.session_recorder_start_for_seconds > 0
|
|
2069
|
+
self.storage_directory: Final = storage_directory
|
|
2070
|
+
self.sys_scan_interval: Final = sys_scan_interval
|
|
2071
|
+
self.sysvar_markers: Final = sysvar_markers
|
|
2072
|
+
self.tls: Final = tls
|
|
2073
|
+
self.un_ignore_list: Final = un_ignore_list
|
|
2074
|
+
self.use_group_channel_for_cover_state: Final = use_group_channel_for_cover_state
|
|
2075
|
+
self.username: Final = username
|
|
2076
|
+
self.verify_tls: Final = verify_tls
|
|
2077
|
+
|
|
2078
|
+
@property
|
|
2079
|
+
def enable_xml_rpc_server(self) -> bool:
|
|
2080
|
+
"""Return if server and connection checker should be started."""
|
|
2081
|
+
return self.requires_xml_rpc_server and self.start_direct is False
|
|
2082
|
+
|
|
2083
|
+
@property
|
|
2084
|
+
def load_un_ignore(self) -> bool:
|
|
2085
|
+
"""Return if un_ignore should be loaded."""
|
|
2086
|
+
return self.start_direct is False
|
|
2087
|
+
|
|
2088
|
+
@property
|
|
2089
|
+
def connection_check_port(self) -> int:
|
|
2090
|
+
"""Return the connection check port."""
|
|
2091
|
+
if used_ports := tuple(ic.port for ic in self._interface_configs if ic.port is not None):
|
|
2092
|
+
return used_ports[0]
|
|
2093
|
+
if self.json_port:
|
|
2094
|
+
return self.json_port
|
|
2095
|
+
return 443 if self.tls else 80
|
|
2096
|
+
|
|
2097
|
+
@property
|
|
2098
|
+
def enabled_interface_configs(self) -> frozenset[hmcl.InterfaceConfig]:
|
|
2099
|
+
"""Return the interface configs."""
|
|
2100
|
+
return frozenset(ic for ic in self._interface_configs if ic.enabled is True)
|
|
2101
|
+
|
|
2102
|
+
@property
|
|
2103
|
+
def optional_settings(self) -> frozenset[OptionalSettings | str]:
|
|
2104
|
+
"""Return the optional settings."""
|
|
2105
|
+
return self._optional_settings
|
|
2106
|
+
|
|
2107
|
+
@property
|
|
2108
|
+
def use_caches(self) -> bool:
|
|
2109
|
+
"""Return if store should be used."""
|
|
2110
|
+
return self.start_direct is False
|
|
2111
|
+
|
|
2112
|
+
def check_config(self) -> None:
|
|
2113
|
+
"""Check config. Throws BaseHomematicException on failure."""
|
|
2114
|
+
if config_failures := check_config(
|
|
2115
|
+
central_name=self.name,
|
|
2116
|
+
host=self.host,
|
|
2117
|
+
username=self.username,
|
|
2118
|
+
password=self.password,
|
|
2119
|
+
storage_directory=self.storage_directory,
|
|
2120
|
+
callback_host=self.callback_host,
|
|
2121
|
+
callback_port_xml_rpc=self.callback_port_xml_rpc,
|
|
2122
|
+
json_port=self.json_port,
|
|
2123
|
+
interface_configs=self._interface_configs,
|
|
2124
|
+
):
|
|
2125
|
+
failures = ", ".join(config_failures)
|
|
2126
|
+
raise AioHomematicConfigException(failures)
|
|
2127
|
+
|
|
2128
|
+
def create_central(self) -> CentralUnit:
|
|
2129
|
+
"""Create the central. Throws BaseHomematicException on validation failure."""
|
|
2130
|
+
try:
|
|
2131
|
+
self.check_config()
|
|
2132
|
+
return CentralUnit(central_config=self)
|
|
2133
|
+
except BaseHomematicException as bhexc: # pragma: no cover
|
|
2134
|
+
raise AioHomematicException(
|
|
2135
|
+
f"CREATE_CENTRAL: Not able to create a central: : {extract_exc_args(exc=bhexc)}"
|
|
2136
|
+
) from bhexc
|
|
2137
|
+
|
|
2138
|
+
def create_central_url(self) -> str:
|
|
2139
|
+
"""Return the required url."""
|
|
2140
|
+
url = "https://" if self.tls else "http://"
|
|
2141
|
+
url = f"{url}{self.host}"
|
|
2142
|
+
if self.json_port:
|
|
2143
|
+
url = f"{url}:{self.json_port}"
|
|
2144
|
+
return f"{url}"
|
|
2145
|
+
|
|
2146
|
+
def create_json_rpc_client(self, *, central: CentralUnit) -> AioJsonRpcAioHttpClient:
|
|
2147
|
+
"""Create a json rpc client."""
|
|
2148
|
+
return AioJsonRpcAioHttpClient(
|
|
2149
|
+
username=self.username,
|
|
2150
|
+
password=self.password,
|
|
2151
|
+
device_url=central.url,
|
|
2152
|
+
connection_state=central.connection_state,
|
|
2153
|
+
client_session=self.client_session,
|
|
2154
|
+
tls=self.tls,
|
|
2155
|
+
verify_tls=self.verify_tls,
|
|
2156
|
+
session_recorder=central.recorder,
|
|
2157
|
+
)
|
|
2158
|
+
|
|
2159
|
+
|
|
2160
|
+
class CentralConnectionState:
|
|
2161
|
+
"""The central connection status."""
|
|
2162
|
+
|
|
2163
|
+
def __init__(self) -> None:
|
|
2164
|
+
"""Init the CentralConnectionStatus."""
|
|
2165
|
+
self._json_issues: Final[list[str]] = []
|
|
2166
|
+
self._rpc_proxy_issues: Final[list[str]] = []
|
|
2167
|
+
|
|
2168
|
+
def add_issue(self, *, issuer: ConnectionProblemIssuer, iid: str) -> bool:
|
|
2169
|
+
"""Add issue to collection."""
|
|
2170
|
+
if isinstance(issuer, AioJsonRpcAioHttpClient) and iid not in self._json_issues:
|
|
2171
|
+
self._json_issues.append(iid)
|
|
2172
|
+
_LOGGER.debug("add_issue: add issue [%s] for JsonRpcAioHttpClient", iid)
|
|
2173
|
+
return True
|
|
2174
|
+
if isinstance(issuer, BaseRpcProxy) and iid not in self._rpc_proxy_issues:
|
|
2175
|
+
self._rpc_proxy_issues.append(iid)
|
|
2176
|
+
_LOGGER.debug("add_issue: add issue [%s] for RpcProxy", iid)
|
|
2177
|
+
return True
|
|
2178
|
+
return False
|
|
2179
|
+
|
|
2180
|
+
def remove_issue(self, *, issuer: ConnectionProblemIssuer, iid: str) -> bool:
|
|
2181
|
+
"""Add issue to collection."""
|
|
2182
|
+
if isinstance(issuer, AioJsonRpcAioHttpClient) and iid in self._json_issues:
|
|
2183
|
+
self._json_issues.remove(iid)
|
|
2184
|
+
_LOGGER.debug("remove_issue: removing issue [%s] for JsonRpcAioHttpClient", iid)
|
|
2185
|
+
return True
|
|
2186
|
+
if isinstance(issuer, BaseRpcProxy) and iid in self._rpc_proxy_issues:
|
|
2187
|
+
self._rpc_proxy_issues.remove(iid)
|
|
2188
|
+
_LOGGER.debug("remove_issue: removing issue [%s] for RpcProxy", iid)
|
|
2189
|
+
return True
|
|
2190
|
+
return False
|
|
2191
|
+
|
|
2192
|
+
def has_issue(self, *, issuer: ConnectionProblemIssuer, iid: str) -> bool:
|
|
2193
|
+
"""Add issue to collection."""
|
|
2194
|
+
if isinstance(issuer, AioJsonRpcAioHttpClient):
|
|
2195
|
+
return iid in self._json_issues
|
|
2196
|
+
if isinstance(issuer, BaseRpcProxy):
|
|
2197
|
+
return iid in self._rpc_proxy_issues
|
|
2198
|
+
|
|
2199
|
+
def handle_exception_log(
|
|
2200
|
+
self,
|
|
2201
|
+
*,
|
|
2202
|
+
issuer: ConnectionProblemIssuer,
|
|
2203
|
+
iid: str,
|
|
2204
|
+
exception: Exception,
|
|
2205
|
+
logger: logging.Logger = _LOGGER,
|
|
2206
|
+
level: int = logging.ERROR,
|
|
2207
|
+
extra_msg: str = "",
|
|
2208
|
+
multiple_logs: bool = True,
|
|
2209
|
+
) -> None:
|
|
2210
|
+
"""Handle Exception and derivates logging."""
|
|
2211
|
+
exception_name = exception.name if hasattr(exception, "name") else exception.__class__.__name__
|
|
2212
|
+
if self.has_issue(issuer=issuer, iid=iid) and multiple_logs is False:
|
|
2213
|
+
logger.debug(
|
|
2214
|
+
"%s failed: %s [%s] %s",
|
|
2215
|
+
iid,
|
|
2216
|
+
exception_name,
|
|
2217
|
+
extract_exc_args(exc=exception),
|
|
2218
|
+
extra_msg,
|
|
2219
|
+
)
|
|
2220
|
+
else:
|
|
2221
|
+
self.add_issue(issuer=issuer, iid=iid)
|
|
2222
|
+
logger.log(
|
|
2223
|
+
level,
|
|
2224
|
+
"%s failed: %s [%s] %s",
|
|
2225
|
+
iid,
|
|
2226
|
+
exception_name,
|
|
2227
|
+
extract_exc_args(exc=exception),
|
|
2228
|
+
extra_msg,
|
|
2229
|
+
)
|
|
2230
|
+
|
|
2231
|
+
|
|
2232
|
+
def check_config(
|
|
2233
|
+
*,
|
|
2234
|
+
central_name: str,
|
|
2235
|
+
host: str,
|
|
2236
|
+
username: str,
|
|
2237
|
+
password: str,
|
|
2238
|
+
storage_directory: str,
|
|
2239
|
+
callback_host: str | None,
|
|
2240
|
+
callback_port_xml_rpc: int | None,
|
|
2241
|
+
json_port: int | None,
|
|
2242
|
+
interface_configs: AbstractSet[hmcl.InterfaceConfig] | None = None,
|
|
2243
|
+
) -> list[str]:
|
|
2244
|
+
"""Check config. Throws BaseHomematicException on failure."""
|
|
2245
|
+
config_failures: list[str] = []
|
|
2246
|
+
if central_name and IDENTIFIER_SEPARATOR in central_name:
|
|
2247
|
+
config_failures.append(f"Instance name must not contain {IDENTIFIER_SEPARATOR}")
|
|
2248
|
+
|
|
2249
|
+
if not (is_hostname(hostname=host) or is_ipv4_address(address=host)):
|
|
2250
|
+
config_failures.append("Invalid hostname or ipv4 address")
|
|
2251
|
+
if not username:
|
|
2252
|
+
config_failures.append("Username must not be empty")
|
|
2253
|
+
if not password:
|
|
2254
|
+
config_failures.append("Password is required")
|
|
2255
|
+
if not check_password(password=password):
|
|
2256
|
+
config_failures.append("Password is not valid")
|
|
2257
|
+
try:
|
|
2258
|
+
check_or_create_directory(directory=storage_directory)
|
|
2259
|
+
except BaseHomematicException as bhexc:
|
|
2260
|
+
config_failures.append(extract_exc_args(exc=bhexc)[0])
|
|
2261
|
+
if callback_host and not (is_hostname(hostname=callback_host) or is_ipv4_address(address=callback_host)):
|
|
2262
|
+
config_failures.append("Invalid callback hostname or ipv4 address")
|
|
2263
|
+
if callback_port_xml_rpc and not is_port(port=callback_port_xml_rpc):
|
|
2264
|
+
config_failures.append("Invalid xml rpc callback port")
|
|
2265
|
+
if json_port and not is_port(port=json_port):
|
|
2266
|
+
config_failures.append("Invalid json port")
|
|
2267
|
+
if interface_configs and not _has_primary_client(interface_configs=interface_configs):
|
|
2268
|
+
config_failures.append(f"No primary interface ({', '.join(PRIMARY_CLIENT_CANDIDATE_INTERFACES)}) defined")
|
|
2269
|
+
|
|
2270
|
+
return config_failures
|
|
2271
|
+
|
|
2272
|
+
|
|
2273
|
+
def _has_primary_client(*, interface_configs: AbstractSet[hmcl.InterfaceConfig]) -> bool:
|
|
2274
|
+
"""Check if all configured clients exists in central."""
|
|
2275
|
+
for interface_config in interface_configs:
|
|
2276
|
+
if interface_config.interface in PRIMARY_CLIENT_CANDIDATE_INTERFACES:
|
|
2277
|
+
return True
|
|
2278
|
+
return False
|
|
2279
|
+
|
|
2280
|
+
|
|
2281
|
+
def _get_new_data_points(
|
|
2282
|
+
*,
|
|
2283
|
+
new_devices: set[Device],
|
|
2284
|
+
) -> Mapping[DataPointCategory, AbstractSet[CallbackDataPoint]]:
|
|
2285
|
+
"""Return new data points by category."""
|
|
2286
|
+
|
|
2287
|
+
data_points_by_category: dict[DataPointCategory, set[CallbackDataPoint]] = {
|
|
2288
|
+
category: set() for category in CATEGORIES if category != DataPointCategory.EVENT
|
|
2289
|
+
}
|
|
2290
|
+
|
|
2291
|
+
for device in new_devices:
|
|
2292
|
+
for category, data_points in data_points_by_category.items():
|
|
2293
|
+
data_points.update(device.get_data_points(category=category, exclude_no_create=True, registered=False))
|
|
2294
|
+
|
|
2295
|
+
return data_points_by_category
|
|
2296
|
+
|
|
2297
|
+
|
|
2298
|
+
def _get_new_channel_events(*, new_devices: set[Device]) -> tuple[tuple[GenericEvent, ...], ...]:
|
|
2299
|
+
"""Return new channel events by category."""
|
|
2300
|
+
channel_events: list[tuple[GenericEvent, ...]] = []
|
|
2301
|
+
|
|
2302
|
+
for device in new_devices:
|
|
2303
|
+
for event_type in DATA_POINT_EVENTS:
|
|
2304
|
+
if (hm_channel_events := list(device.get_events(event_type=event_type, registered=False).values())) and len(
|
|
2305
|
+
hm_channel_events
|
|
2306
|
+
) > 0:
|
|
2307
|
+
channel_events.append(hm_channel_events) # type: ignore[arg-type] # noqa:PERF401
|
|
2308
|
+
|
|
2309
|
+
return tuple(channel_events)
|