aiohomematic 2025.8.8__py3-none-any.whl → 2025.8.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of aiohomematic might be problematic. Click here for more details.
- aiohomematic/__init__.py +15 -1
- aiohomematic/async_support.py +15 -2
- aiohomematic/caches/__init__.py +2 -0
- aiohomematic/caches/dynamic.py +2 -0
- aiohomematic/caches/persistent.py +29 -22
- aiohomematic/caches/visibility.py +277 -252
- aiohomematic/central/__init__.py +69 -49
- aiohomematic/central/decorators.py +60 -15
- aiohomematic/central/xml_rpc_server.py +15 -1
- aiohomematic/client/__init__.py +2 -0
- aiohomematic/client/_rpc_errors.py +81 -0
- aiohomematic/client/json_rpc.py +68 -19
- aiohomematic/client/xml_rpc.py +15 -8
- aiohomematic/const.py +145 -77
- aiohomematic/context.py +11 -1
- aiohomematic/converter.py +27 -1
- aiohomematic/decorators.py +88 -19
- aiohomematic/exceptions.py +19 -1
- aiohomematic/hmcli.py +13 -1
- aiohomematic/model/__init__.py +2 -0
- aiohomematic/model/calculated/__init__.py +2 -0
- aiohomematic/model/calculated/climate.py +2 -0
- aiohomematic/model/calculated/data_point.py +7 -1
- aiohomematic/model/calculated/operating_voltage_level.py +2 -0
- aiohomematic/model/calculated/support.py +2 -0
- aiohomematic/model/custom/__init__.py +2 -0
- aiohomematic/model/custom/climate.py +3 -1
- aiohomematic/model/custom/const.py +2 -0
- aiohomematic/model/custom/cover.py +30 -2
- aiohomematic/model/custom/data_point.py +6 -0
- aiohomematic/model/custom/definition.py +2 -0
- aiohomematic/model/custom/light.py +18 -10
- aiohomematic/model/custom/lock.py +2 -0
- aiohomematic/model/custom/siren.py +5 -2
- aiohomematic/model/custom/support.py +2 -0
- aiohomematic/model/custom/switch.py +2 -0
- aiohomematic/model/custom/valve.py +2 -0
- aiohomematic/model/data_point.py +30 -3
- aiohomematic/model/decorators.py +29 -8
- aiohomematic/model/device.py +9 -5
- aiohomematic/model/event.py +2 -0
- aiohomematic/model/generic/__init__.py +2 -0
- aiohomematic/model/generic/action.py +2 -0
- aiohomematic/model/generic/binary_sensor.py +2 -0
- aiohomematic/model/generic/button.py +2 -0
- aiohomematic/model/generic/data_point.py +4 -1
- aiohomematic/model/generic/number.py +4 -1
- aiohomematic/model/generic/select.py +4 -1
- aiohomematic/model/generic/sensor.py +2 -0
- aiohomematic/model/generic/switch.py +2 -0
- aiohomematic/model/generic/text.py +2 -0
- aiohomematic/model/hub/__init__.py +2 -0
- aiohomematic/model/hub/binary_sensor.py +2 -0
- aiohomematic/model/hub/button.py +2 -0
- aiohomematic/model/hub/data_point.py +6 -0
- aiohomematic/model/hub/number.py +2 -0
- aiohomematic/model/hub/select.py +2 -0
- aiohomematic/model/hub/sensor.py +2 -0
- aiohomematic/model/hub/switch.py +2 -0
- aiohomematic/model/hub/text.py +2 -0
- aiohomematic/model/support.py +26 -1
- aiohomematic/model/update.py +6 -0
- aiohomematic/support.py +175 -5
- aiohomematic/validator.py +49 -2
- aiohomematic-2025.8.10.dist-info/METADATA +124 -0
- aiohomematic-2025.8.10.dist-info/RECORD +78 -0
- {aiohomematic-2025.8.8.dist-info → aiohomematic-2025.8.10.dist-info}/licenses/LICENSE +1 -1
- aiohomematic-2025.8.8.dist-info/METADATA +0 -69
- aiohomematic-2025.8.8.dist-info/RECORD +0 -77
- {aiohomematic-2025.8.8.dist-info → aiohomematic-2025.8.10.dist-info}/WHEEL +0 -0
- {aiohomematic-2025.8.8.dist-info → aiohomematic-2025.8.10.dist-info}/top_level.txt +0 -0
aiohomematic/central/__init__.py
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MIT
|
|
2
|
+
# Copyright (c) 2021-2025 Daniel Perna, SukramJ
|
|
1
3
|
"""
|
|
2
4
|
Central unit and core orchestration for HomeMatic CCU and compatible backends.
|
|
3
5
|
|
|
@@ -119,6 +121,7 @@ from aiohomematic.const import (
|
|
|
119
121
|
TIMEOUT,
|
|
120
122
|
UN_IGNORE_WILDCARD,
|
|
121
123
|
BackendSystemEvent,
|
|
124
|
+
CentralUnitState,
|
|
122
125
|
DataOperationResult,
|
|
123
126
|
DataPointCategory,
|
|
124
127
|
DataPointKey,
|
|
@@ -163,6 +166,7 @@ from aiohomematic.support import check_config, extract_exc_args, get_channel_no,
|
|
|
163
166
|
__all__ = ["CentralConfig", "CentralUnit", "INTERFACE_EVENT_SCHEMA"]
|
|
164
167
|
|
|
165
168
|
_LOGGER: Final = logging.getLogger(__name__)
|
|
169
|
+
_LOGGER_EVENT: Final = logging.getLogger(f"{__name__}_event")
|
|
166
170
|
|
|
167
171
|
# {central_name, central}
|
|
168
172
|
CENTRAL_INSTANCES: Final[dict[str, CentralUnit]] = {}
|
|
@@ -184,7 +188,7 @@ class CentralUnit(PayloadMixin):
|
|
|
184
188
|
|
|
185
189
|
def __init__(self, central_config: CentralConfig) -> None:
|
|
186
190
|
"""Init the central unit."""
|
|
187
|
-
self.
|
|
191
|
+
self._state: CentralUnitState = CentralUnitState.NEW
|
|
188
192
|
self._clients_started: bool = False
|
|
189
193
|
self._device_add_semaphore: Final = asyncio.Semaphore()
|
|
190
194
|
self._connection_state: Final = CentralConnectionState()
|
|
@@ -298,14 +302,14 @@ class CentralUnit(PayloadMixin):
|
|
|
298
302
|
)
|
|
299
303
|
|
|
300
304
|
@property
|
|
301
|
-
def interface_ids(self) ->
|
|
305
|
+
def interface_ids(self) -> frozenset[str]:
|
|
302
306
|
"""Return all associated interface ids."""
|
|
303
|
-
return
|
|
307
|
+
return frozenset(self._clients)
|
|
304
308
|
|
|
305
309
|
@property
|
|
306
|
-
def interfaces(self) ->
|
|
310
|
+
def interfaces(self) -> frozenset[Interface]:
|
|
307
311
|
"""Return all associated interfaces."""
|
|
308
|
-
return
|
|
312
|
+
return frozenset(client.interface for client in self._clients.values())
|
|
309
313
|
|
|
310
314
|
@property
|
|
311
315
|
def is_alive(self) -> bool:
|
|
@@ -379,9 +383,9 @@ class CentralUnit(PayloadMixin):
|
|
|
379
383
|
)
|
|
380
384
|
|
|
381
385
|
@property
|
|
382
|
-
def
|
|
383
|
-
"""Return
|
|
384
|
-
return self.
|
|
386
|
+
def state(self) -> CentralUnitState:
|
|
387
|
+
"""Return the central state."""
|
|
388
|
+
return self._state
|
|
385
389
|
|
|
386
390
|
@property
|
|
387
391
|
def supports_ping_pong(self) -> bool:
|
|
@@ -455,9 +459,17 @@ class CentralUnit(PayloadMixin):
|
|
|
455
459
|
async def start(self) -> None:
|
|
456
460
|
"""Start processing of the central unit."""
|
|
457
461
|
|
|
458
|
-
|
|
462
|
+
_LOGGER.debug("START: Central %s is %s", self.name, self._state)
|
|
463
|
+
if self._state == CentralUnitState.INITIALIZING:
|
|
464
|
+
_LOGGER.debug("START: Central %s already starting", self.name)
|
|
465
|
+
return
|
|
466
|
+
|
|
467
|
+
if self._state == CentralUnitState.RUNNING:
|
|
459
468
|
_LOGGER.debug("START: Central %s already started", self.name)
|
|
460
469
|
return
|
|
470
|
+
|
|
471
|
+
self._state = CentralUnitState.INITIALIZING
|
|
472
|
+
_LOGGER.debug("START: Initializing Central %s", self.name)
|
|
461
473
|
if self._config.enabled_interface_configs and (
|
|
462
474
|
ip_addr := await self._identify_ip_addr(port=self._config.connection_check_port)
|
|
463
475
|
):
|
|
@@ -479,6 +491,7 @@ class CentralUnit(PayloadMixin):
|
|
|
479
491
|
self._listen_port = xml_rpc_server.listen_port
|
|
480
492
|
self._xml_rpc_server.add_central(self)
|
|
481
493
|
except OSError as oserr:
|
|
494
|
+
self._state = CentralUnitState.STOPPED_BY_ERROR
|
|
482
495
|
raise AioHomematicException(
|
|
483
496
|
f"START: Failed to start central unit {self.name}: {extract_exc_args(exc=oserr)}"
|
|
484
497
|
) from oserr
|
|
@@ -492,13 +505,24 @@ class CentralUnit(PayloadMixin):
|
|
|
492
505
|
if self._config.enable_server:
|
|
493
506
|
self._start_scheduler()
|
|
494
507
|
|
|
495
|
-
self.
|
|
508
|
+
self._state = CentralUnitState.RUNNING
|
|
509
|
+
_LOGGER.debug("START: Central %s is %s", self.name, self._state)
|
|
496
510
|
|
|
497
511
|
async def stop(self) -> None:
|
|
498
512
|
"""Stop processing of the central unit."""
|
|
499
|
-
|
|
513
|
+
_LOGGER.debug("STOP: Central %s is %s", self.name, self._state)
|
|
514
|
+
if self._state == CentralUnitState.STOPPING:
|
|
515
|
+
_LOGGER.debug("STOP: Central %s is already stopping", self.name)
|
|
516
|
+
return
|
|
517
|
+
if self._state == CentralUnitState.STOPPED:
|
|
518
|
+
_LOGGER.debug("STOP: Central %s is already stopped", self.name)
|
|
519
|
+
return
|
|
520
|
+
if self._state != CentralUnitState.RUNNING:
|
|
500
521
|
_LOGGER.debug("STOP: Central %s not started", self.name)
|
|
501
522
|
return
|
|
523
|
+
self._state = CentralUnitState.STOPPING
|
|
524
|
+
_LOGGER.debug("STOP: Stopping Central %s", self.name)
|
|
525
|
+
|
|
502
526
|
await self.save_caches(save_device_descriptions=True, save_paramset_descriptions=True)
|
|
503
527
|
self._stop_scheduler()
|
|
504
528
|
await self._stop_clients()
|
|
@@ -522,8 +546,8 @@ class CentralUnit(PayloadMixin):
|
|
|
522
546
|
|
|
523
547
|
# cancel outstanding tasks to speed up teardown
|
|
524
548
|
self.looper.cancel_tasks()
|
|
525
|
-
# wait until tasks are finished
|
|
526
|
-
await self.looper.block_till_done()
|
|
549
|
+
# wait until tasks are finished (with wait_time safeguard)
|
|
550
|
+
await self.looper.block_till_done(wait_time=5.0)
|
|
527
551
|
|
|
528
552
|
# Wait briefly for any auxiliary threads to finish without blocking forever
|
|
529
553
|
max_wait_seconds = 5.0
|
|
@@ -532,7 +556,8 @@ class CentralUnit(PayloadMixin):
|
|
|
532
556
|
while self._has_active_threads and waited < max_wait_seconds:
|
|
533
557
|
await asyncio.sleep(interval)
|
|
534
558
|
waited += interval
|
|
535
|
-
self.
|
|
559
|
+
self._state = CentralUnitState.STOPPED
|
|
560
|
+
_LOGGER.debug("STOP: Central %s is %s", self.name, self._state)
|
|
536
561
|
|
|
537
562
|
async def restart_clients(self) -> None:
|
|
538
563
|
"""Restart clients."""
|
|
@@ -1009,19 +1034,19 @@ class CentralUnit(PayloadMixin):
|
|
|
1009
1034
|
return
|
|
1010
1035
|
|
|
1011
1036
|
async with self._device_add_semaphore:
|
|
1012
|
-
#
|
|
1013
|
-
|
|
1014
|
-
dev_desc["ADDRESS"]
|
|
1015
|
-
for dev_desc in self._device_descriptions.get_raw_device_descriptions(interface_id=interface_id)
|
|
1016
|
-
)
|
|
1037
|
+
# Use mapping membership to avoid rebuilding known addresses and allow O(1) checks.
|
|
1038
|
+
existing_map = self._device_descriptions.get_device_descriptions(interface_id=interface_id)
|
|
1017
1039
|
client = self._clients[interface_id]
|
|
1018
1040
|
save_paramset_descriptions = False
|
|
1019
1041
|
save_device_descriptions = False
|
|
1020
1042
|
for dev_desc in device_descriptions:
|
|
1021
1043
|
try:
|
|
1044
|
+
address = dev_desc["ADDRESS"]
|
|
1045
|
+
# Check existence before mutating cache to ensure we detect truly new addresses.
|
|
1046
|
+
is_new_address = address not in existing_map
|
|
1022
1047
|
self._device_descriptions.add_device(interface_id=interface_id, device_description=dev_desc)
|
|
1023
1048
|
save_device_descriptions = True
|
|
1024
|
-
if
|
|
1049
|
+
if is_new_address:
|
|
1025
1050
|
await client.fetch_paramset_descriptions(device_description=dev_desc)
|
|
1026
1051
|
save_paramset_descriptions = True
|
|
1027
1052
|
except Exception as exc: # pragma: no cover
|
|
@@ -1043,7 +1068,7 @@ class CentralUnit(PayloadMixin):
|
|
|
1043
1068
|
await self._create_devices(new_device_addresses=new_device_addresses)
|
|
1044
1069
|
|
|
1045
1070
|
def _check_for_new_device_addresses(self) -> Mapping[str, set[str]]:
|
|
1046
|
-
"""Check if there are new devices
|
|
1071
|
+
"""Check if there are new devices that need to be created."""
|
|
1047
1072
|
new_device_addresses: dict[str, set[str]] = {}
|
|
1048
1073
|
for interface_id in self.interface_ids:
|
|
1049
1074
|
if not self._paramset_descriptions.has_interface_id(interface_id=interface_id):
|
|
@@ -1053,21 +1078,16 @@ class CentralUnit(PayloadMixin):
|
|
|
1053
1078
|
)
|
|
1054
1079
|
continue
|
|
1055
1080
|
|
|
1056
|
-
if
|
|
1057
|
-
|
|
1058
|
-
|
|
1081
|
+
# Build the set locally and assign only if non-empty to avoid add-then-delete pattern
|
|
1082
|
+
new_set: set[str] = set()
|
|
1059
1083
|
for device_address in self._device_descriptions.get_addresses(interface_id=interface_id):
|
|
1060
1084
|
if device_address not in self._devices:
|
|
1061
|
-
|
|
1062
|
-
|
|
1063
|
-
|
|
1064
|
-
del new_device_addresses[interface_id]
|
|
1085
|
+
new_set.add(device_address)
|
|
1086
|
+
if new_set:
|
|
1087
|
+
new_device_addresses[interface_id] = new_set
|
|
1065
1088
|
|
|
1066
1089
|
if _LOGGER.isEnabledFor(level=DEBUG):
|
|
1067
|
-
count
|
|
1068
|
-
for item in new_device_addresses.values():
|
|
1069
|
-
count += len(item)
|
|
1070
|
-
|
|
1090
|
+
count = sum(len(item) for item in new_device_addresses.values())
|
|
1071
1091
|
_LOGGER.debug(
|
|
1072
1092
|
"CHECK_FOR_NEW_DEVICE_ADDRESSES: %s: %i.",
|
|
1073
1093
|
"Found new device addresses" if new_device_addresses else "Did not find any new device addresses",
|
|
@@ -1079,7 +1099,7 @@ class CentralUnit(PayloadMixin):
|
|
|
1079
1099
|
@callback_event
|
|
1080
1100
|
async def data_point_event(self, interface_id: str, channel_address: str, parameter: str, value: Any) -> None:
|
|
1081
1101
|
"""If a device emits some sort event, we will handle it here."""
|
|
1082
|
-
|
|
1102
|
+
_LOGGER_EVENT.debug(
|
|
1083
1103
|
"EVENT: interface_id = %s, channel_address = %s, parameter = %s, value = %s",
|
|
1084
1104
|
interface_id,
|
|
1085
1105
|
channel_address,
|
|
@@ -1117,7 +1137,7 @@ class CentralUnit(PayloadMixin):
|
|
|
1117
1137
|
if callable(callback_handler):
|
|
1118
1138
|
await callback_handler(value)
|
|
1119
1139
|
except RuntimeError as rterr: # pragma: no cover
|
|
1120
|
-
|
|
1140
|
+
_LOGGER_EVENT.debug(
|
|
1121
1141
|
"EVENT: RuntimeError [%s]. Failed to call callback for: %s, %s, %s",
|
|
1122
1142
|
extract_exc_args(exc=rterr),
|
|
1123
1143
|
interface_id,
|
|
@@ -1125,7 +1145,7 @@ class CentralUnit(PayloadMixin):
|
|
|
1125
1145
|
parameter,
|
|
1126
1146
|
)
|
|
1127
1147
|
except Exception as exc: # pragma: no cover
|
|
1128
|
-
|
|
1148
|
+
_LOGGER_EVENT.warning(
|
|
1129
1149
|
"EVENT failed: Unable to call callback for: %s, %s, %s, %s",
|
|
1130
1150
|
interface_id,
|
|
1131
1151
|
channel_address,
|
|
@@ -1135,7 +1155,7 @@ class CentralUnit(PayloadMixin):
|
|
|
1135
1155
|
|
|
1136
1156
|
def data_point_path_event(self, state_path: str, value: str) -> None:
|
|
1137
1157
|
"""If a device emits some sort event, we will handle it here."""
|
|
1138
|
-
|
|
1158
|
+
_LOGGER_EVENT.debug(
|
|
1139
1159
|
"DATA_POINT_PATH_EVENT: topic = %s, payload = %s",
|
|
1140
1160
|
state_path,
|
|
1141
1161
|
value,
|
|
@@ -1154,7 +1174,7 @@ class CentralUnit(PayloadMixin):
|
|
|
1154
1174
|
|
|
1155
1175
|
def sysvar_data_point_path_event(self, state_path: str, value: str) -> None:
|
|
1156
1176
|
"""If a device emits some sort event, we will handle it here."""
|
|
1157
|
-
|
|
1177
|
+
_LOGGER_EVENT.debug(
|
|
1158
1178
|
"SYSVAR_DATA_POINT_PATH_EVENT: topic = %s, payload = %s",
|
|
1159
1179
|
state_path,
|
|
1160
1180
|
value,
|
|
@@ -1166,13 +1186,13 @@ class CentralUnit(PayloadMixin):
|
|
|
1166
1186
|
if callable(callback_handler):
|
|
1167
1187
|
self._looper.create_task(callback_handler(value), name=f"sysvar-data-point-event-{state_path}")
|
|
1168
1188
|
except RuntimeError as rterr: # pragma: no cover
|
|
1169
|
-
|
|
1189
|
+
_LOGGER_EVENT.debug(
|
|
1170
1190
|
"EVENT: RuntimeError [%s]. Failed to call callback for: %s",
|
|
1171
1191
|
extract_exc_args(exc=rterr),
|
|
1172
1192
|
state_path,
|
|
1173
1193
|
)
|
|
1174
1194
|
except Exception as exc: # pragma: no cover
|
|
1175
|
-
|
|
1195
|
+
_LOGGER_EVENT.warning(
|
|
1176
1196
|
"EVENT failed: Unable to call callback for: %s, %s",
|
|
1177
1197
|
state_path,
|
|
1178
1198
|
extract_exc_args(exc=exc),
|
|
@@ -1298,7 +1318,7 @@ class CentralUnit(PayloadMixin):
|
|
|
1298
1318
|
full_format: bool = False,
|
|
1299
1319
|
un_ignore_candidates_only: bool = False,
|
|
1300
1320
|
use_channel_wildcard: bool = False,
|
|
1301
|
-
) ->
|
|
1321
|
+
) -> tuple[str, ...]:
|
|
1302
1322
|
"""
|
|
1303
1323
|
Return all parameters from VALUES paramset.
|
|
1304
1324
|
|
|
@@ -1368,7 +1388,7 @@ class CentralUnit(PayloadMixin):
|
|
|
1368
1388
|
else:
|
|
1369
1389
|
parameters.add(f"{parameter}:{paramset_key}@{model}:{channel_repr}")
|
|
1370
1390
|
|
|
1371
|
-
return
|
|
1391
|
+
return tuple(parameters)
|
|
1372
1392
|
|
|
1373
1393
|
def _get_virtual_remote(self, device_address: str) -> Device | None:
|
|
1374
1394
|
"""Get the virtual remote for the Client."""
|
|
@@ -1626,7 +1646,7 @@ class _Scheduler(threading.Thread):
|
|
|
1626
1646
|
async def _run_scheduler_tasks(self) -> None:
|
|
1627
1647
|
"""Run all tasks."""
|
|
1628
1648
|
while self._active:
|
|
1629
|
-
if
|
|
1649
|
+
if self._central.state != CentralUnitState.RUNNING:
|
|
1630
1650
|
_LOGGER.debug("SCHEDULER: Waiting till central %s is started", self._central.name)
|
|
1631
1651
|
await asyncio.sleep(SCHEDULER_NOT_STARTED_SLEEP)
|
|
1632
1652
|
continue
|
|
@@ -1811,8 +1831,8 @@ class CentralConfig:
|
|
|
1811
1831
|
enable_program_scan: bool = DEFAULT_ENABLE_PROGRAM_SCAN,
|
|
1812
1832
|
enable_sysvar_scan: bool = DEFAULT_ENABLE_SYSVAR_SCAN,
|
|
1813
1833
|
hm_master_poll_after_send_intervals: tuple[int, ...] = DEFAULT_HM_MASTER_POLL_AFTER_SEND_INTERVALS,
|
|
1814
|
-
ignore_custom_device_definition_models:
|
|
1815
|
-
interfaces_requiring_periodic_refresh:
|
|
1834
|
+
ignore_custom_device_definition_models: frozenset[str] = DEFAULT_IGNORE_CUSTOM_DEVICE_DEFINITION_MODELS,
|
|
1835
|
+
interfaces_requiring_periodic_refresh: frozenset[Interface] = INTERFACES_REQUIRING_PERIODIC_REFRESH,
|
|
1816
1836
|
json_port: int | None = None,
|
|
1817
1837
|
listen_ip_addr: str | None = None,
|
|
1818
1838
|
listen_port: int | None = None,
|
|
@@ -1823,7 +1843,7 @@ class CentralConfig:
|
|
|
1823
1843
|
sys_scan_interval: int = DEFAULT_SYS_SCAN_INTERVAL,
|
|
1824
1844
|
sysvar_markers: tuple[DescriptionMarker | str, ...] = DEFAULT_SYSVAR_MARKERS,
|
|
1825
1845
|
tls: bool = DEFAULT_TLS,
|
|
1826
|
-
un_ignore_list:
|
|
1846
|
+
un_ignore_list: frozenset[str] = DEFAULT_UN_IGNORES,
|
|
1827
1847
|
verify_tls: bool = DEFAULT_VERIFY_TLS,
|
|
1828
1848
|
) -> None:
|
|
1829
1849
|
"""Init the client config."""
|
|
@@ -1838,8 +1858,8 @@ class CentralConfig:
|
|
|
1838
1858
|
self.enable_sysvar_scan: Final = enable_sysvar_scan
|
|
1839
1859
|
self.hm_master_poll_after_send_intervals: Final = hm_master_poll_after_send_intervals
|
|
1840
1860
|
self.host: Final = host
|
|
1841
|
-
self.ignore_custom_device_definition_models: Final = ignore_custom_device_definition_models
|
|
1842
|
-
self.interfaces_requiring_periodic_refresh: Final = interfaces_requiring_periodic_refresh
|
|
1861
|
+
self.ignore_custom_device_definition_models: Final = frozenset(ignore_custom_device_definition_models or ())
|
|
1862
|
+
self.interfaces_requiring_periodic_refresh: Final = frozenset(interfaces_requiring_periodic_refresh or ())
|
|
1843
1863
|
self.json_port: Final = json_port
|
|
1844
1864
|
self.listen_ip_addr: Final = listen_ip_addr
|
|
1845
1865
|
self.listen_port: Final = listen_port
|
|
@@ -1877,9 +1897,9 @@ class CentralConfig:
|
|
|
1877
1897
|
return 443 if self.tls else 80
|
|
1878
1898
|
|
|
1879
1899
|
@property
|
|
1880
|
-
def enabled_interface_configs(self) ->
|
|
1900
|
+
def enabled_interface_configs(self) -> frozenset[hmcl.InterfaceConfig]:
|
|
1881
1901
|
"""Return the interface configs."""
|
|
1882
|
-
return
|
|
1902
|
+
return frozenset(ic for ic in self._interface_configs if ic.enabled is True)
|
|
1883
1903
|
|
|
1884
1904
|
@property
|
|
1885
1905
|
def use_caches(self) -> bool:
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MIT
|
|
2
|
+
# Copyright (c) 2021-2025 Daniel Perna, SukramJ
|
|
1
3
|
"""Decorators for central used within aiohomematic."""
|
|
2
4
|
|
|
3
5
|
from __future__ import annotations
|
|
@@ -17,6 +19,9 @@ from aiohomematic.support import extract_exc_args
|
|
|
17
19
|
|
|
18
20
|
_LOGGER: Final = logging.getLogger(__name__)
|
|
19
21
|
_INTERFACE_ID: Final = "interface_id"
|
|
22
|
+
_CHANNEL_ADDRESS: Final = "channel_address"
|
|
23
|
+
_PARAMETER: Final = "parameter"
|
|
24
|
+
_VALUE: Final = "value"
|
|
20
25
|
|
|
21
26
|
|
|
22
27
|
def callback_backend_system(system_event: BackendSystemEvent) -> Callable:
|
|
@@ -83,28 +88,68 @@ def callback_backend_system(system_event: BackendSystemEvent) -> Callable:
|
|
|
83
88
|
return decorator_backend_system_callback
|
|
84
89
|
|
|
85
90
|
|
|
86
|
-
def callback_event[**P, R](
|
|
87
|
-
func: Callable[P, R],
|
|
88
|
-
) -> Callable:
|
|
91
|
+
def callback_event[**P, R](func: Callable[P, R]) -> Callable:
|
|
89
92
|
"""Check if event_callback is set and call it AFTER original function."""
|
|
90
93
|
|
|
91
|
-
@wraps(func)
|
|
92
|
-
async def async_wrapper_event_callback(*args: P.args, **kwargs: P.kwargs) -> R:
|
|
93
|
-
"""Wrap callback events."""
|
|
94
|
-
return_value = cast(R, await func(*args, **kwargs)) # type: ignore[misc]
|
|
95
|
-
_exec_event_callback(*args, **kwargs)
|
|
96
|
-
return return_value
|
|
97
|
-
|
|
98
94
|
def _exec_event_callback(*args: Any, **kwargs: Any) -> None:
|
|
99
95
|
"""Execute the callback for a data_point event."""
|
|
100
96
|
try:
|
|
101
|
-
|
|
102
|
-
interface_id: str
|
|
97
|
+
# Expected signature: (self, interface_id, channel_address, parameter, value)
|
|
98
|
+
interface_id: str
|
|
99
|
+
if len(args) > 1:
|
|
100
|
+
interface_id = cast(str, args[1])
|
|
101
|
+
channel_address = cast(str, args[2])
|
|
102
|
+
parameter = cast(str, args[3])
|
|
103
|
+
value = args[4] if len(args) > 4 else kwargs.get(_VALUE)
|
|
104
|
+
else:
|
|
105
|
+
interface_id = cast(str, kwargs[_INTERFACE_ID])
|
|
106
|
+
channel_address = cast(str, kwargs[_CHANNEL_ADDRESS])
|
|
107
|
+
parameter = cast(str, kwargs[_PARAMETER])
|
|
108
|
+
value = kwargs[_VALUE]
|
|
109
|
+
|
|
103
110
|
if client := hmcl.get_client(interface_id=interface_id):
|
|
104
111
|
client.modified_at = datetime.now()
|
|
105
|
-
client.central.fire_backend_parameter_callback(
|
|
112
|
+
client.central.fire_backend_parameter_callback(
|
|
113
|
+
interface_id=interface_id, channel_address=channel_address, parameter=parameter, value=value
|
|
114
|
+
)
|
|
106
115
|
except Exception as exc: # pragma: no cover
|
|
107
|
-
_LOGGER.warning("EXEC_DATA_POINT_EVENT_CALLBACK failed: Unable to
|
|
116
|
+
_LOGGER.warning("EXEC_DATA_POINT_EVENT_CALLBACK failed: Unable to process args/kwargs for event_callback")
|
|
108
117
|
raise AioHomematicException(f"args-exception event_callback [{extract_exc_args(exc=exc)}]") from exc
|
|
109
118
|
|
|
110
|
-
|
|
119
|
+
def _schedule_or_exec(*args: Any, **kwargs: Any) -> None:
|
|
120
|
+
"""Schedule event callback on central looper when possible, else execute inline."""
|
|
121
|
+
try:
|
|
122
|
+
# Prefer scheduling on the CentralUnit looper when available to avoid blocking hot path
|
|
123
|
+
unit = args[0]
|
|
124
|
+
if isinstance(unit, hmcu.CentralUnit):
|
|
125
|
+
unit.looper.create_task(
|
|
126
|
+
_async_wrap_sync(_exec_event_callback, *args, **kwargs),
|
|
127
|
+
name="wrapper_event_callback",
|
|
128
|
+
)
|
|
129
|
+
return
|
|
130
|
+
except Exception:
|
|
131
|
+
# Fall through to inline execution on any error
|
|
132
|
+
pass
|
|
133
|
+
_exec_event_callback(*args, **kwargs)
|
|
134
|
+
|
|
135
|
+
@wraps(func)
|
|
136
|
+
async def async_wrapper_event_callback(*args: P.args, **kwargs: P.kwargs) -> R:
|
|
137
|
+
"""Wrap async callback events."""
|
|
138
|
+
return_value = cast(R, await func(*args, **kwargs)) # type: ignore[misc]
|
|
139
|
+
_schedule_or_exec(*args, **kwargs)
|
|
140
|
+
return return_value
|
|
141
|
+
|
|
142
|
+
@wraps(func)
|
|
143
|
+
def wrapper_event_callback(*args: P.args, **kwargs: P.kwargs) -> R:
|
|
144
|
+
"""Wrap sync callback events."""
|
|
145
|
+
return_value = func(*args, **kwargs)
|
|
146
|
+
_schedule_or_exec(*args, **kwargs)
|
|
147
|
+
return return_value
|
|
148
|
+
|
|
149
|
+
# Helper to create a trivial coroutine from a sync callable
|
|
150
|
+
async def _async_wrap_sync(cb: Callable[..., None], *a: Any, **kw: Any) -> None:
|
|
151
|
+
cb(*a, **kw)
|
|
152
|
+
|
|
153
|
+
if inspect.iscoroutinefunction(func):
|
|
154
|
+
return async_wrapper_event_callback
|
|
155
|
+
return wrapper_event_callback
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MIT
|
|
2
|
+
# Copyright (c) 2021-2025 Daniel Perna, SukramJ
|
|
1
3
|
"""
|
|
2
4
|
XML-RPC server module.
|
|
3
5
|
|
|
@@ -16,7 +18,7 @@ from xmlrpc.server import SimpleXMLRPCRequestHandler, SimpleXMLRPCServer
|
|
|
16
18
|
from aiohomematic import central as hmcu
|
|
17
19
|
from aiohomematic.central.decorators import callback_backend_system
|
|
18
20
|
from aiohomematic.const import IP_ANY_V4, PORT_ANY, BackendSystemEvent
|
|
19
|
-
from aiohomematic.support import find_free_port
|
|
21
|
+
from aiohomematic.support import find_free_port, log_boundary_error
|
|
20
22
|
|
|
21
23
|
_LOGGER: Final = logging.getLogger(__name__)
|
|
22
24
|
|
|
@@ -45,6 +47,18 @@ class RPCFunctions:
|
|
|
45
47
|
@callback_backend_system(system_event=BackendSystemEvent.ERROR)
|
|
46
48
|
def error(self, interface_id: str, error_code: str, msg: str) -> None:
|
|
47
49
|
"""When some error occurs the CCU / Homegear will send its error message here."""
|
|
50
|
+
# Structured boundary log (warning level). XML-RPC server received error notification.
|
|
51
|
+
try:
|
|
52
|
+
raise RuntimeError(str(msg))
|
|
53
|
+
except RuntimeError as err:
|
|
54
|
+
log_boundary_error(
|
|
55
|
+
logger=_LOGGER,
|
|
56
|
+
boundary="xml-rpc-server",
|
|
57
|
+
action="error",
|
|
58
|
+
err=err,
|
|
59
|
+
level=logging.WARNING,
|
|
60
|
+
context={"interface_id": interface_id, "error_code": int(error_code)},
|
|
61
|
+
)
|
|
48
62
|
_LOGGER.warning(
|
|
49
63
|
"ERROR failed: interface_id = %s, error_code = %i, message = %s",
|
|
50
64
|
interface_id,
|
aiohomematic/client/__init__.py
CHANGED
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MIT
|
|
2
|
+
# Copyright (c) 2021-2025 Daniel Perna, SukramJ
|
|
3
|
+
"""
|
|
4
|
+
Error mapping helpers for RPC transports.
|
|
5
|
+
|
|
6
|
+
This module centralizes small, transport-agnostic utilities to turn backend
|
|
7
|
+
errors into domain-specific exceptions with useful context. It is used by both
|
|
8
|
+
JSON-RPC and XML-RPC clients.
|
|
9
|
+
|
|
10
|
+
Key types and functions
|
|
11
|
+
- RpcContext: Lightweight context container that formats protocol/method/host
|
|
12
|
+
for readable error messages and logs.
|
|
13
|
+
- map_jsonrpc_error: Maps a JSON-RPC error object to an appropriate exception
|
|
14
|
+
(AuthFailure, InternalBackendException, ClientException).
|
|
15
|
+
- map_transport_error: Maps generic transport-level exceptions like OSError to
|
|
16
|
+
domain exceptions (NoConnectionException/ClientException).
|
|
17
|
+
- map_xmlrpc_fault: Maps XML-RPC faults to domain exceptions with context.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
from __future__ import annotations
|
|
21
|
+
|
|
22
|
+
from collections.abc import Mapping
|
|
23
|
+
from dataclasses import dataclass
|
|
24
|
+
from typing import Any
|
|
25
|
+
|
|
26
|
+
from aiohomematic.exceptions import AuthFailure, ClientException, InternalBackendException, NoConnectionException
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
@dataclass(slots=True)
|
|
30
|
+
class RpcContext:
|
|
31
|
+
protocol: str
|
|
32
|
+
method: str
|
|
33
|
+
host: str | None = None
|
|
34
|
+
interface: str | None = None
|
|
35
|
+
params: Mapping[str, Any] | None = None
|
|
36
|
+
|
|
37
|
+
def fmt(self) -> str:
|
|
38
|
+
"""Format context for error messages."""
|
|
39
|
+
parts: list[str] = [f"protocol={self.protocol}", f"method={self.method}"]
|
|
40
|
+
if self.interface:
|
|
41
|
+
parts.append(f"interface={self.interface}")
|
|
42
|
+
if self.host:
|
|
43
|
+
parts.append(f"host={self.host}")
|
|
44
|
+
return ", ".join(parts)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def map_jsonrpc_error(error: Mapping[str, Any], ctx: RpcContext) -> Exception:
|
|
48
|
+
"""Map JSON-RPC error to exception."""
|
|
49
|
+
# JSON-RPC 2.0 like error: {code, message, data?}
|
|
50
|
+
code = int(error.get("code", 0))
|
|
51
|
+
message = str(error.get("message", ""))
|
|
52
|
+
# Enrich message with context
|
|
53
|
+
base_msg = f"{message} ({ctx.fmt()})"
|
|
54
|
+
|
|
55
|
+
# Map common codes
|
|
56
|
+
if message.startswith("access denied") or code in (401, -32001):
|
|
57
|
+
return AuthFailure(base_msg)
|
|
58
|
+
if "internal error" in message.lower() or code in (-32603, 500):
|
|
59
|
+
return InternalBackendException(base_msg)
|
|
60
|
+
# Generic client exception for others
|
|
61
|
+
return ClientException(base_msg)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def map_transport_error(exc: BaseException, ctx: RpcContext) -> Exception:
|
|
65
|
+
"""Map transport error to exception."""
|
|
66
|
+
msg = f"{exc} ({ctx.fmt()})"
|
|
67
|
+
if isinstance(exc, OSError):
|
|
68
|
+
return NoConnectionException(msg)
|
|
69
|
+
return ClientException(msg)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def map_xmlrpc_fault(code: int, fault_string: str, ctx: RpcContext) -> Exception:
|
|
73
|
+
"""Map XML-RPC fault to exception."""
|
|
74
|
+
# Enrich message with context
|
|
75
|
+
fault_msg = f"XMLRPC Fault {code}: {fault_string} ({ctx.fmt()})"
|
|
76
|
+
# Simple mappings
|
|
77
|
+
if "unauthorized" in fault_string.lower():
|
|
78
|
+
return AuthFailure(fault_msg)
|
|
79
|
+
if "internal" in fault_string.lower():
|
|
80
|
+
return InternalBackendException(fault_msg)
|
|
81
|
+
return ClientException(fault_msg)
|