aiohomematic 2025.10.21__py3-none-any.whl → 2025.10.24__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of aiohomematic might be problematic. Click here for more details.
- aiohomematic/central/__init__.py +29 -29
- aiohomematic/central/decorators.py +4 -4
- aiohomematic/client/__init__.py +17 -15
- aiohomematic/client/json_rpc.py +11 -6
- aiohomematic/client/rpc_proxy.py +2 -2
- aiohomematic/const.py +2 -2
- aiohomematic/hmcli.py +24 -9
- aiohomematic/model/calculated/data_point.py +6 -10
- aiohomematic/model/custom/data_point.py +2 -2
- aiohomematic/model/custom/definition.py +4 -1
- aiohomematic/model/custom/light.py +1 -1
- aiohomematic/model/data_point.py +19 -19
- aiohomematic/model/device.py +7 -7
- aiohomematic/model/event.py +7 -7
- aiohomematic/model/generic/data_point.py +2 -2
- aiohomematic/model/hub/__init__.py +2 -2
- aiohomematic/model/hub/data_point.py +3 -3
- aiohomematic/property_decorators.py +4 -4
- aiohomematic/store/dynamic.py +82 -102
- aiohomematic/store/visibility.py +4 -5
- {aiohomematic-2025.10.21.dist-info → aiohomematic-2025.10.24.dist-info}/METADATA +8 -2
- {aiohomematic-2025.10.21.dist-info → aiohomematic-2025.10.24.dist-info}/RECORD +26 -25
- aiohomematic-2025.10.24.dist-info/entry_points.txt +2 -0
- {aiohomematic-2025.10.21.dist-info → aiohomematic-2025.10.24.dist-info}/WHEEL +0 -0
- {aiohomematic-2025.10.21.dist-info → aiohomematic-2025.10.24.dist-info}/licenses/LICENSE +0 -0
- {aiohomematic-2025.10.21.dist-info → aiohomematic-2025.10.24.dist-info}/top_level.txt +0 -0
aiohomematic/central/__init__.py
CHANGED
|
@@ -251,7 +251,7 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
251
251
|
# e.g. DEVICES_CREATED, HUB_REFRESHED
|
|
252
252
|
self._backend_system_callbacks: Final[set[Callable]] = set()
|
|
253
253
|
# Signature: (interface_id, channel_address, parameter, value)
|
|
254
|
-
# Re-
|
|
254
|
+
# Re-emitted events from the backend for parameter updates
|
|
255
255
|
self._backend_parameter_callbacks: Final[set[Callable]] = set()
|
|
256
256
|
# Signature: (event_type, event_data)
|
|
257
257
|
# Events like INTERFACE, KEYPRESS, ...
|
|
@@ -454,7 +454,7 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
454
454
|
def remove_sysvar_data_point(self, *, vid: str) -> None:
|
|
455
455
|
"""Remove a sysvar data_point."""
|
|
456
456
|
if (sysvar_dp := self.get_sysvar_data_point(vid=vid)) is not None:
|
|
457
|
-
sysvar_dp.
|
|
457
|
+
sysvar_dp.emit_device_removed_event()
|
|
458
458
|
del self._sysvar_data_points[vid]
|
|
459
459
|
if sysvar_dp.state_path in self._sysvar_data_point_event_subscriptions:
|
|
460
460
|
del self._sysvar_data_point_event_subscriptions[sysvar_dp.state_path]
|
|
@@ -466,8 +466,8 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
466
466
|
def remove_program_button(self, *, pid: str) -> None:
|
|
467
467
|
"""Remove a program button."""
|
|
468
468
|
if (program_dp := self.get_program_data_point(pid=pid)) is not None:
|
|
469
|
-
program_dp.button.
|
|
470
|
-
program_dp.switch.
|
|
469
|
+
program_dp.button.emit_device_removed_event()
|
|
470
|
+
program_dp.switch.emit_device_removed_event()
|
|
471
471
|
del self._program_data_points[pid]
|
|
472
472
|
|
|
473
473
|
def identify_channel(self, *, text: str) -> Channel | None:
|
|
@@ -532,7 +532,7 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
532
532
|
self._xml_rpc_server = xml_rpc_server
|
|
533
533
|
self._listen_port_xml_rpc = xml_rpc_server.listen_port
|
|
534
534
|
self._xml_rpc_server.add_central(central=self)
|
|
535
|
-
except OSError as oserr:
|
|
535
|
+
except OSError as oserr: # pragma: no cover - environment/OS-specific socket binding failures are not reliably reproducible in CI
|
|
536
536
|
self._state = CentralUnitState.STOPPED_BY_ERROR
|
|
537
537
|
raise AioHomematicException(
|
|
538
538
|
f"START: Failed to start central unit {self.name}: {extract_exc_args(exc=oserr)}"
|
|
@@ -763,8 +763,8 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
763
763
|
)
|
|
764
764
|
self._clients[client.interface_id] = client
|
|
765
765
|
return True
|
|
766
|
-
except BaseHomematicException as bhexc:
|
|
767
|
-
self.
|
|
766
|
+
except BaseHomematicException as bhexc: # pragma: no cover - deterministic simulation of client creation failures would require the full client/proxy stack and network timing; keeping this defensive log-and-state branch untested to avoid brittle CI
|
|
767
|
+
self.emit_interface_event(
|
|
768
768
|
interface_id=interface_config.interface_id,
|
|
769
769
|
interface_event_type=InterfaceEventType.PROXY,
|
|
770
770
|
data={EventKey.AVAILABLE: False},
|
|
@@ -803,14 +803,14 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
803
803
|
await self._hub.fetch_sysvar_data(scheduled=True)
|
|
804
804
|
|
|
805
805
|
@loop_check
|
|
806
|
-
def
|
|
806
|
+
def emit_interface_event(
|
|
807
807
|
self,
|
|
808
808
|
*,
|
|
809
809
|
interface_id: str,
|
|
810
810
|
interface_event_type: InterfaceEventType,
|
|
811
811
|
data: dict[str, Any],
|
|
812
812
|
) -> None:
|
|
813
|
-
"""
|
|
813
|
+
"""Emit an event about the interface status."""
|
|
814
814
|
data = data or {}
|
|
815
815
|
event_data: dict[str, Any] = {
|
|
816
816
|
EventKey.INTERFACE_ID: interface_id,
|
|
@@ -818,7 +818,7 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
818
818
|
EventKey.DATA: data,
|
|
819
819
|
}
|
|
820
820
|
|
|
821
|
-
self.
|
|
821
|
+
self.emit_homematic_callback(
|
|
822
822
|
event_type=EventType.INTERFACE,
|
|
823
823
|
event_data=cast(dict[EventKey, Any], INTERFACE_EVENT_SCHEMA(event_data)),
|
|
824
824
|
)
|
|
@@ -1019,7 +1019,7 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
1019
1019
|
interface_id=interface_id,
|
|
1020
1020
|
device_address=device_address,
|
|
1021
1021
|
)
|
|
1022
|
-
except Exception as exc:
|
|
1022
|
+
except Exception as exc:
|
|
1023
1023
|
_LOGGER.error(
|
|
1024
1024
|
"CREATE_DEVICES failed: %s [%s] Unable to create device: %s, %s",
|
|
1025
1025
|
type(exc).__name__,
|
|
@@ -1034,7 +1034,7 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
1034
1034
|
await device.load_value_cache()
|
|
1035
1035
|
new_devices.add(device)
|
|
1036
1036
|
self._devices[device_address] = device
|
|
1037
|
-
except Exception as exc:
|
|
1037
|
+
except Exception as exc:
|
|
1038
1038
|
_LOGGER.error(
|
|
1039
1039
|
"CREATE_DEVICES failed: %s [%s] Unable to create data points: %s, %s",
|
|
1040
1040
|
type(exc).__name__,
|
|
@@ -1047,7 +1047,7 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
1047
1047
|
if new_devices:
|
|
1048
1048
|
new_dps = _get_new_data_points(new_devices=new_devices)
|
|
1049
1049
|
new_channel_events = _get_new_channel_events(new_devices=new_devices)
|
|
1050
|
-
self.
|
|
1050
|
+
self.emit_backend_system_callback(
|
|
1051
1051
|
system_event=BackendSystemEvent.DEVICES_CREATED,
|
|
1052
1052
|
new_data_points=new_dps,
|
|
1053
1053
|
new_channel_events=new_channel_events,
|
|
@@ -1157,7 +1157,7 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
1157
1157
|
)
|
|
1158
1158
|
)
|
|
1159
1159
|
):
|
|
1160
|
-
self.
|
|
1160
|
+
self.emit_backend_system_callback(
|
|
1161
1161
|
system_event=BackendSystemEvent.DEVICES_DELAYED,
|
|
1162
1162
|
new_addresses=new_addresses,
|
|
1163
1163
|
interface_id=interface_id,
|
|
@@ -1281,7 +1281,7 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
1281
1281
|
for callback_handler in self._data_point_key_event_subscriptions[dpk]:
|
|
1282
1282
|
if callable(callback_handler):
|
|
1283
1283
|
await callback_handler(value=value, received_at=received_at)
|
|
1284
|
-
except RuntimeError as rterr:
|
|
1284
|
+
except RuntimeError as rterr:
|
|
1285
1285
|
_LOGGER_EVENT.debug(
|
|
1286
1286
|
"EVENT: RuntimeError [%s]. Failed to call callback for: %s, %s, %s",
|
|
1287
1287
|
extract_exc_args(exc=rterr),
|
|
@@ -1289,7 +1289,7 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
1289
1289
|
channel_address,
|
|
1290
1290
|
parameter,
|
|
1291
1291
|
)
|
|
1292
|
-
except Exception as exc:
|
|
1292
|
+
except Exception as exc:
|
|
1293
1293
|
_LOGGER_EVENT.warning(
|
|
1294
1294
|
"EVENT failed: Unable to call callback for: %s, %s, %s, %s",
|
|
1295
1295
|
interface_id,
|
|
@@ -1331,10 +1331,10 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
1331
1331
|
if callable(callback_handler):
|
|
1332
1332
|
received_at = datetime.now()
|
|
1333
1333
|
self._looper.create_task(
|
|
1334
|
-
target=callback_handler(value=value, received_at=received_at),
|
|
1334
|
+
target=lambda: callback_handler(value=value, received_at=received_at),
|
|
1335
1335
|
name=f"sysvar-data-point-event-{state_path}",
|
|
1336
1336
|
)
|
|
1337
|
-
except RuntimeError as rterr:
|
|
1337
|
+
except RuntimeError as rterr:
|
|
1338
1338
|
_LOGGER_EVENT.debug(
|
|
1339
1339
|
"EVENT: RuntimeError [%s]. Failed to call callback for: %s",
|
|
1340
1340
|
extract_exc_args(exc=rterr),
|
|
@@ -1659,9 +1659,9 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
1659
1659
|
self._homematic_callbacks.remove(cb)
|
|
1660
1660
|
|
|
1661
1661
|
@loop_check
|
|
1662
|
-
def
|
|
1662
|
+
def emit_homematic_callback(self, *, event_type: EventType, event_data: dict[EventKey, str]) -> None:
|
|
1663
1663
|
"""
|
|
1664
|
-
|
|
1664
|
+
Emit homematic_callback in central.
|
|
1665
1665
|
|
|
1666
1666
|
# Events like INTERFACE, KEYPRESS, ...
|
|
1667
1667
|
"""
|
|
@@ -1670,7 +1670,7 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
1670
1670
|
callback_handler(event_type=event_type, event_data=event_data)
|
|
1671
1671
|
except Exception as exc:
|
|
1672
1672
|
_LOGGER.error(
|
|
1673
|
-
"
|
|
1673
|
+
"EMIT_HOMEMATIC_CALLBACK: Unable to call handler: %s",
|
|
1674
1674
|
extract_exc_args(exc=exc),
|
|
1675
1675
|
)
|
|
1676
1676
|
|
|
@@ -1687,13 +1687,13 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
1687
1687
|
self._backend_parameter_callbacks.remove(cb)
|
|
1688
1688
|
|
|
1689
1689
|
@loop_check
|
|
1690
|
-
def
|
|
1690
|
+
def emit_backend_parameter_callback(
|
|
1691
1691
|
self, *, interface_id: str, channel_address: str, parameter: str, value: Any
|
|
1692
1692
|
) -> None:
|
|
1693
1693
|
"""
|
|
1694
|
-
|
|
1694
|
+
Emit backend_parameter callback in central.
|
|
1695
1695
|
|
|
1696
|
-
Re-
|
|
1696
|
+
Re-emitted events from the backend for parameter updates.
|
|
1697
1697
|
"""
|
|
1698
1698
|
for callback_handler in self._backend_parameter_callbacks:
|
|
1699
1699
|
try:
|
|
@@ -1702,7 +1702,7 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
1702
1702
|
)
|
|
1703
1703
|
except Exception as exc:
|
|
1704
1704
|
_LOGGER.error(
|
|
1705
|
-
"
|
|
1705
|
+
"EMIT_BACKEND_PARAMETER_CALLBACK: Unable to call handler: %s",
|
|
1706
1706
|
extract_exc_args(exc=exc),
|
|
1707
1707
|
)
|
|
1708
1708
|
|
|
@@ -1719,9 +1719,9 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
1719
1719
|
self._backend_system_callbacks.remove(cb)
|
|
1720
1720
|
|
|
1721
1721
|
@loop_check
|
|
1722
|
-
def
|
|
1722
|
+
def emit_backend_system_callback(self, *, system_event: BackendSystemEvent, **kwargs: Any) -> None:
|
|
1723
1723
|
"""
|
|
1724
|
-
|
|
1724
|
+
Emit system_event callback in central.
|
|
1725
1725
|
|
|
1726
1726
|
e.g. DEVICES_CREATED, HUB_REFRESHED
|
|
1727
1727
|
"""
|
|
@@ -1730,7 +1730,7 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
1730
1730
|
callback_handler(system_event=system_event, **kwargs)
|
|
1731
1731
|
except Exception as exc:
|
|
1732
1732
|
_LOGGER.error(
|
|
1733
|
-
"
|
|
1733
|
+
"EMIT_BACKEND_SYSTEM_CALLBACK: Unable to call handler: %s",
|
|
1734
1734
|
extract_exc_args(exc=exc),
|
|
1735
1735
|
)
|
|
1736
1736
|
|
|
@@ -2125,7 +2125,7 @@ class CentralConfig:
|
|
|
2125
2125
|
try:
|
|
2126
2126
|
self.check_config()
|
|
2127
2127
|
return CentralUnit(central_config=self)
|
|
2128
|
-
except BaseHomematicException as bhexc:
|
|
2128
|
+
except BaseHomematicException as bhexc: # pragma: no cover
|
|
2129
2129
|
raise AioHomematicException(
|
|
2130
2130
|
f"CREATE_CENTRAL: Not able to create a central: : {extract_exc_args(exc=bhexc)}"
|
|
2131
2131
|
) from bhexc
|
|
@@ -52,7 +52,7 @@ def callback_backend_system(system_event: BackendSystemEvent) -> Callable:
|
|
|
52
52
|
central = unit.get_central(interface_id=str(args[1]))
|
|
53
53
|
if central:
|
|
54
54
|
central.looper.create_task(
|
|
55
|
-
target=_exec_backend_system_callback(*args, **kwargs),
|
|
55
|
+
target=lambda: _exec_backend_system_callback(*args, **kwargs),
|
|
56
56
|
name="wrapper_backend_system_callback",
|
|
57
57
|
)
|
|
58
58
|
except Exception as exc:
|
|
@@ -72,7 +72,7 @@ def callback_backend_system(system_event: BackendSystemEvent) -> Callable:
|
|
|
72
72
|
interface_id: str = args[0] if len(args) > 0 else str(kwargs[_INTERFACE_ID])
|
|
73
73
|
if client := hmcl.get_client(interface_id=interface_id):
|
|
74
74
|
client.modified_at = datetime.now()
|
|
75
|
-
client.central.
|
|
75
|
+
client.central.emit_backend_system_callback(system_event=system_event, **kwargs)
|
|
76
76
|
except Exception as exc: # pragma: no cover
|
|
77
77
|
_LOGGER.warning(
|
|
78
78
|
"EXEC_BACKEND_SYSTEM_CALLBACK failed: Unable to reduce kwargs for backend_system_callback"
|
|
@@ -109,7 +109,7 @@ def callback_event[**P, R](func: Callable[P, R]) -> Callable:
|
|
|
109
109
|
|
|
110
110
|
if client := hmcl.get_client(interface_id=interface_id):
|
|
111
111
|
client.modified_at = datetime.now()
|
|
112
|
-
client.central.
|
|
112
|
+
client.central.emit_backend_parameter_callback(
|
|
113
113
|
interface_id=interface_id, channel_address=channel_address, parameter=parameter, value=value
|
|
114
114
|
)
|
|
115
115
|
except Exception as exc: # pragma: no cover
|
|
@@ -123,7 +123,7 @@ def callback_event[**P, R](func: Callable[P, R]) -> Callable:
|
|
|
123
123
|
unit = args[0]
|
|
124
124
|
if isinstance(unit, hmcu.CentralUnit):
|
|
125
125
|
unit.looper.create_task(
|
|
126
|
-
target=_async_wrap_sync(_exec_event_callback, *args, **kwargs),
|
|
126
|
+
target=lambda: _async_wrap_sync(_exec_event_callback, *args, **kwargs),
|
|
127
127
|
name="wrapper_event_callback",
|
|
128
128
|
)
|
|
129
129
|
return
|
aiohomematic/client/__init__.py
CHANGED
|
@@ -336,7 +336,7 @@ class Client(ABC, LogContextMixin):
|
|
|
336
336
|
"available" if available else "unavailable",
|
|
337
337
|
self.interface_id,
|
|
338
338
|
)
|
|
339
|
-
self.central.
|
|
339
|
+
self.central.emit_interface_event(
|
|
340
340
|
interface_id=self.interface_id,
|
|
341
341
|
interface_event_type=InterfaceEventType.PROXY,
|
|
342
342
|
data={EventKey.AVAILABLE: available},
|
|
@@ -406,7 +406,7 @@ class Client(ABC, LogContextMixin):
|
|
|
406
406
|
) is not None:
|
|
407
407
|
if (seconds_since_last_event := (datetime.now() - last_events_dt).total_seconds()) > CALLBACK_WARN_INTERVAL:
|
|
408
408
|
if self._is_callback_alive:
|
|
409
|
-
self.central.
|
|
409
|
+
self.central.emit_interface_event(
|
|
410
410
|
interface_id=self.interface_id,
|
|
411
411
|
interface_event_type=InterfaceEventType.CALLBACK,
|
|
412
412
|
data={
|
|
@@ -423,7 +423,7 @@ class Client(ABC, LogContextMixin):
|
|
|
423
423
|
return False
|
|
424
424
|
|
|
425
425
|
if not self._is_callback_alive:
|
|
426
|
-
self.central.
|
|
426
|
+
self.central.emit_interface_event(
|
|
427
427
|
interface_id=self.interface_id,
|
|
428
428
|
interface_event_type=InterfaceEventType.CALLBACK,
|
|
429
429
|
data={EventKey.AVAILABLE: True},
|
|
@@ -437,13 +437,13 @@ class Client(ABC, LogContextMixin):
|
|
|
437
437
|
"""Send ping to the backend to generate PONG event."""
|
|
438
438
|
|
|
439
439
|
@inspector
|
|
440
|
-
async def execute_program(self, *, pid: str) -> bool:
|
|
440
|
+
async def execute_program(self, *, pid: str) -> bool: # pragma: no cover
|
|
441
441
|
"""Execute a program on the backend."""
|
|
442
442
|
_LOGGER.debug("EXECUTE_PROGRAM: not usable for %s.", self.interface_id)
|
|
443
443
|
return True
|
|
444
444
|
|
|
445
445
|
@inspector
|
|
446
|
-
async def set_program_state(self, *, pid: str, state: bool) -> bool:
|
|
446
|
+
async def set_program_state(self, *, pid: str, state: bool) -> bool: # pragma: no cover
|
|
447
447
|
"""Set the program state on the backend."""
|
|
448
448
|
_LOGGER.debug("SET_PROGRAM_STATE: not usable for %s.", self.interface_id)
|
|
449
449
|
return True
|
|
@@ -471,19 +471,21 @@ class Client(ABC, LogContextMixin):
|
|
|
471
471
|
"""Get all system variables from the backend."""
|
|
472
472
|
|
|
473
473
|
@inspector(re_raise=False)
|
|
474
|
-
async def get_all_programs(
|
|
474
|
+
async def get_all_programs(
|
|
475
|
+
self, *, markers: tuple[DescriptionMarker | str, ...]
|
|
476
|
+
) -> tuple[ProgramData, ...] | None: # pragma: no cover
|
|
475
477
|
"""Get all programs, if available."""
|
|
476
478
|
_LOGGER.debug("GET_ALL_PROGRAMS: not usable for %s.", self.interface_id)
|
|
477
479
|
return None
|
|
478
480
|
|
|
479
481
|
@inspector(re_raise=False, no_raise_return={})
|
|
480
|
-
async def get_all_rooms(self) -> dict[str, set[str]]:
|
|
482
|
+
async def get_all_rooms(self) -> dict[str, set[str]]: # pragma: no cover
|
|
481
483
|
"""Get all rooms, if available."""
|
|
482
484
|
_LOGGER.debug("GET_ALL_ROOMS: not usable for %s.", self.interface_id)
|
|
483
485
|
return {}
|
|
484
486
|
|
|
485
487
|
@inspector(re_raise=False, no_raise_return={})
|
|
486
|
-
async def get_all_functions(self) -> dict[str, set[str]]:
|
|
488
|
+
async def get_all_functions(self) -> dict[str, set[str]]: # pragma: no cover
|
|
487
489
|
"""Get all functions, if available."""
|
|
488
490
|
_LOGGER.debug("GET_ALL_FUNCTIONS: not usable for %s.", self.interface_id)
|
|
489
491
|
return {}
|
|
@@ -767,7 +769,7 @@ class Client(ABC, LogContextMixin):
|
|
|
767
769
|
call_source,
|
|
768
770
|
)
|
|
769
771
|
return cast(dict[str, Any], await self._proxy_read.getParamset(address, paramset_key))
|
|
770
|
-
except BaseHomematicException as bhexc:
|
|
772
|
+
except BaseHomematicException as bhexc: # pragma: no cover
|
|
771
773
|
raise ClientException(
|
|
772
774
|
f"GET_PARAMSET failed with for {address}/{paramset_key}: {extract_exc_args(exc=bhexc)}"
|
|
773
775
|
) from bhexc
|
|
@@ -1033,7 +1035,7 @@ class Client(ABC, LogContextMixin):
|
|
|
1033
1035
|
"""List devices of the backend."""
|
|
1034
1036
|
try:
|
|
1035
1037
|
return tuple(await self._proxy_read.listDevices())
|
|
1036
|
-
except BaseHomematicException as bhexc:
|
|
1038
|
+
except BaseHomematicException as bhexc: # pragma: no cover
|
|
1037
1039
|
_LOGGER.debug(
|
|
1038
1040
|
"LIST_DEVICES failed: %s [%s]",
|
|
1039
1041
|
bhexc.name,
|
|
@@ -1145,7 +1147,7 @@ class ClientCCU(Client):
|
|
|
1145
1147
|
self.central.data_cache.add_data(interface=self.interface, all_device_data=all_device_data)
|
|
1146
1148
|
return
|
|
1147
1149
|
except ClientException:
|
|
1148
|
-
self.central.
|
|
1150
|
+
self.central.emit_interface_event(
|
|
1149
1151
|
interface_id=self.interface_id,
|
|
1150
1152
|
interface_event_type=InterfaceEventType.FETCH_DATA,
|
|
1151
1153
|
data={EventKey.AVAILABLE: False},
|
|
@@ -1545,7 +1547,7 @@ class ClientHomegear(ClientCCU):
|
|
|
1545
1547
|
address=address,
|
|
1546
1548
|
name=await self._proxy_read.getMetadata(address, _NAME),
|
|
1547
1549
|
)
|
|
1548
|
-
except BaseHomematicException as bhexc:
|
|
1550
|
+
except BaseHomematicException as bhexc: # pragma: no cover
|
|
1549
1551
|
_LOGGER.warning(
|
|
1550
1552
|
"%s [%s] Failed to fetch name for device %s",
|
|
1551
1553
|
bhexc.name,
|
|
@@ -1559,7 +1561,7 @@ class ClientHomegear(ClientCCU):
|
|
|
1559
1561
|
try:
|
|
1560
1562
|
await self._proxy.clientServerInitialized(self.interface_id)
|
|
1561
1563
|
self.modified_at = datetime.now()
|
|
1562
|
-
except BaseHomematicException as bhexc:
|
|
1564
|
+
except BaseHomematicException as bhexc: # pragma: no cover
|
|
1563
1565
|
_LOGGER.debug(
|
|
1564
1566
|
"CHECK_CONNECTION_AVAILABILITY failed: %s [%s]",
|
|
1565
1567
|
bhexc.name,
|
|
@@ -1661,7 +1663,7 @@ class ClientConfig:
|
|
|
1661
1663
|
raise NoConnectionException(f"No connection to {self.interface_id}")
|
|
1662
1664
|
except BaseHomematicException:
|
|
1663
1665
|
raise
|
|
1664
|
-
except Exception as exc:
|
|
1666
|
+
except Exception as exc: # pragma: no cover
|
|
1665
1667
|
raise NoConnectionException(f"Unable to connect {extract_exc_args(exc=exc)}.") from exc
|
|
1666
1668
|
|
|
1667
1669
|
async def _get_version(self) -> str:
|
|
@@ -1673,7 +1675,7 @@ class ClientConfig:
|
|
|
1673
1675
|
if (methods := check_proxy.supported_methods) and "getVersion" in methods:
|
|
1674
1676
|
# BidCos-Wired does not support getVersion()
|
|
1675
1677
|
return cast(str, await check_proxy.getVersion())
|
|
1676
|
-
except Exception as exc:
|
|
1678
|
+
except Exception as exc: # pragma: no cover
|
|
1677
1679
|
raise NoConnectionException(f"Unable to connect {extract_exc_args(exc=exc)}.") from exc
|
|
1678
1680
|
return "0"
|
|
1679
1681
|
|
aiohomematic/client/json_rpc.py
CHANGED
|
@@ -339,6 +339,11 @@ class AioJsonRpcAioHttpClient(LogContextMixin):
|
|
|
339
339
|
keep_session: bool = True,
|
|
340
340
|
) -> dict[str, Any] | Any:
|
|
341
341
|
"""Reusable JSON-RPC POST_SCRIPT function."""
|
|
342
|
+
# Load and validate script first to avoid any network when script is missing
|
|
343
|
+
if (script := await self._get_script(script_name=script_name)) is None:
|
|
344
|
+
raise ClientException(f"Script file for {script_name} does not exist")
|
|
345
|
+
|
|
346
|
+
# Prepare session only after we know we have a script to run
|
|
342
347
|
if keep_session:
|
|
343
348
|
await self._login_or_renew()
|
|
344
349
|
session_id = self._session_id
|
|
@@ -351,9 +356,6 @@ class AioJsonRpcAioHttpClient(LogContextMixin):
|
|
|
351
356
|
if self._supported_methods is None:
|
|
352
357
|
await self._check_supported_methods()
|
|
353
358
|
|
|
354
|
-
if (script := await self._get_script(script_name=script_name)) is None:
|
|
355
|
-
raise ClientException(f"Script file for {script_name} does not exist")
|
|
356
|
-
|
|
357
359
|
if extra_params:
|
|
358
360
|
for variable, value in extra_params.items():
|
|
359
361
|
script = script.replace(f"##{variable}##", value)
|
|
@@ -384,9 +386,12 @@ class AioJsonRpcAioHttpClient(LogContextMixin):
|
|
|
384
386
|
def _load_script(script_name: str) -> str | None:
|
|
385
387
|
"""Load script from file system."""
|
|
386
388
|
script_file = os.path.join(Path(__file__).resolve().parent, REGA_SCRIPT_PATH, script_name)
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
389
|
+
try:
|
|
390
|
+
if script := Path(script_file).read_text(encoding=UTF_8):
|
|
391
|
+
self._script_cache[script_name] = script
|
|
392
|
+
return script
|
|
393
|
+
except FileNotFoundError:
|
|
394
|
+
return None
|
|
390
395
|
return None
|
|
391
396
|
|
|
392
397
|
return await self._looper.async_add_executor_job(_load_script, script_name, name=f"load_script-{script_name}")
|
aiohomematic/client/rpc_proxy.py
CHANGED
|
@@ -216,7 +216,7 @@ class AioXmlRpcProxy(BaseRpcProxy, xmlrpc.client.ServerProxy):
|
|
|
216
216
|
except BaseHomematicException as bhe:
|
|
217
217
|
self._record_session(method=args[0], params=args[1:], exc=bhe)
|
|
218
218
|
raise
|
|
219
|
-
except SSLError as sslerr:
|
|
219
|
+
except SSLError as sslerr: # pragma: no cover - SSL handshake/cert errors are OS/OpenSSL dependent and not reliably reproducible in CI
|
|
220
220
|
message = f"SSLError on {self._interface_id}: {extract_exc_args(exc=sslerr)}"
|
|
221
221
|
level = logging.ERROR
|
|
222
222
|
if sslerr.args[0] in _SSL_ERROR_CODES:
|
|
@@ -237,7 +237,7 @@ class AioXmlRpcProxy(BaseRpcProxy, xmlrpc.client.ServerProxy):
|
|
|
237
237
|
log_context=self.log_context,
|
|
238
238
|
)
|
|
239
239
|
raise NoConnectionException(message) from sslerr
|
|
240
|
-
except OSError as oserr:
|
|
240
|
+
except OSError as oserr: # pragma: no cover - Network/socket errno differences are platform/environment specific; simulating reliably in CI would be flaky
|
|
241
241
|
message = f"OSError on {self._interface_id}: {extract_exc_args(exc=oserr)}"
|
|
242
242
|
level = (
|
|
243
243
|
logging.ERROR
|
aiohomematic/const.py
CHANGED
|
@@ -19,7 +19,7 @@ import sys
|
|
|
19
19
|
from types import MappingProxyType
|
|
20
20
|
from typing import Any, Final, NamedTuple, Required, TypeAlias, TypedDict
|
|
21
21
|
|
|
22
|
-
VERSION: Final = "2025.10.
|
|
22
|
+
VERSION: Final = "2025.10.24"
|
|
23
23
|
|
|
24
24
|
# Detect test speedup mode via environment
|
|
25
25
|
_TEST_SPEEDUP: Final = (
|
|
@@ -303,7 +303,7 @@ class EventKey(StrEnum):
|
|
|
303
303
|
INTERFACE_ID = "interface_id"
|
|
304
304
|
MODEL = "model"
|
|
305
305
|
PARAMETER = "parameter"
|
|
306
|
-
|
|
306
|
+
PONG_MISMATCH_ACCEPTABLE = "pong_mismatch_allowed"
|
|
307
307
|
PONG_MISMATCH_COUNT = "pong_mismatch_count"
|
|
308
308
|
SECONDS_SINCE_LAST_EVENT = "seconds_since_last_event"
|
|
309
309
|
TYPE = "type"
|
aiohomematic/hmcli.py
CHANGED
|
@@ -13,6 +13,7 @@ exposes the 'main' entrypoint for invocation. All other names are internal.
|
|
|
13
13
|
from __future__ import annotations
|
|
14
14
|
|
|
15
15
|
import argparse
|
|
16
|
+
import json
|
|
16
17
|
import sys
|
|
17
18
|
from typing import Any
|
|
18
19
|
from xmlrpc.client import ServerProxy
|
|
@@ -124,11 +125,15 @@ def main() -> None:
|
|
|
124
125
|
|
|
125
126
|
try:
|
|
126
127
|
if args.paramset_key == ParamsetKey.VALUES and args.value is None:
|
|
127
|
-
proxy.getValue(args.address, args.parameter)
|
|
128
|
+
result = proxy.getValue(args.address, args.parameter)
|
|
128
129
|
if args.json:
|
|
129
|
-
|
|
130
|
+
print(
|
|
131
|
+
json.dumps(
|
|
132
|
+
{"address": args.address, "parameter": args.parameter, "value": result}, ensure_ascii=False
|
|
133
|
+
)
|
|
134
|
+
)
|
|
130
135
|
else:
|
|
131
|
-
|
|
136
|
+
print(result)
|
|
132
137
|
sys.exit(0)
|
|
133
138
|
elif args.paramset_key == ParamsetKey.VALUES and args.value:
|
|
134
139
|
value: Any
|
|
@@ -144,13 +149,22 @@ def main() -> None:
|
|
|
144
149
|
sys.exit(0)
|
|
145
150
|
elif args.paramset_key == ParamsetKey.MASTER and args.value is None:
|
|
146
151
|
paramset: dict[str, Any] | None
|
|
147
|
-
if (paramset := proxy.getParamset(args.address, args.paramset_key)) and paramset
|
|
148
|
-
args.parameter
|
|
149
|
-
):
|
|
152
|
+
if (paramset := proxy.getParamset(args.address, args.paramset_key)) and (args.parameter in paramset): # type: ignore[assignment]
|
|
153
|
+
result = paramset[args.parameter]
|
|
150
154
|
if args.json:
|
|
151
|
-
|
|
155
|
+
print(
|
|
156
|
+
json.dumps(
|
|
157
|
+
{
|
|
158
|
+
"address": args.address,
|
|
159
|
+
"paramset_key": args.paramset_key,
|
|
160
|
+
"parameter": args.parameter,
|
|
161
|
+
"value": result,
|
|
162
|
+
},
|
|
163
|
+
ensure_ascii=False,
|
|
164
|
+
)
|
|
165
|
+
)
|
|
152
166
|
else:
|
|
153
|
-
|
|
167
|
+
print(result)
|
|
154
168
|
sys.exit(0)
|
|
155
169
|
elif args.paramset_key == ParamsetKey.MASTER and args.value:
|
|
156
170
|
if args.type == "int":
|
|
@@ -163,7 +177,8 @@ def main() -> None:
|
|
|
163
177
|
value = args.value
|
|
164
178
|
proxy.putParamset(args.address, args.paramset_key, {args.parameter: value})
|
|
165
179
|
sys.exit(0)
|
|
166
|
-
except Exception:
|
|
180
|
+
except Exception as ex:
|
|
181
|
+
print(str(ex), file=sys.stderr)
|
|
167
182
|
sys.exit(1)
|
|
168
183
|
|
|
169
184
|
|
|
@@ -99,9 +99,7 @@ class CalculatedDataPoint[ParameterT: GenericParameterType](BaseDataPoint):
|
|
|
99
99
|
if generic_data_point := self._channel.get_generic_data_point(parameter=parameter, paramset_key=paramset_key):
|
|
100
100
|
self._data_points.append(generic_data_point)
|
|
101
101
|
self._unregister_callbacks.append(
|
|
102
|
-
generic_data_point.register_internal_data_point_updated_callback(
|
|
103
|
-
cb=self.fire_data_point_updated_callback
|
|
104
|
-
)
|
|
102
|
+
generic_data_point.register_internal_data_point_updated_callback(cb=self.emit_data_point_updated_event)
|
|
105
103
|
)
|
|
106
104
|
return cast(data_point_type, generic_data_point) # type: ignore[valid-type]
|
|
107
105
|
return cast(
|
|
@@ -123,9 +121,7 @@ class CalculatedDataPoint[ParameterT: GenericParameterType](BaseDataPoint):
|
|
|
123
121
|
):
|
|
124
122
|
self._data_points.append(generic_data_point)
|
|
125
123
|
self._unregister_callbacks.append(
|
|
126
|
-
generic_data_point.register_internal_data_point_updated_callback(
|
|
127
|
-
cb=self.fire_data_point_updated_callback
|
|
128
|
-
)
|
|
124
|
+
generic_data_point.register_internal_data_point_updated_callback(cb=self.emit_data_point_updated_event)
|
|
129
125
|
)
|
|
130
126
|
return cast(data_point_type, generic_data_point) # type: ignore[valid-type]
|
|
131
127
|
return cast(
|
|
@@ -296,7 +292,7 @@ class CalculatedDataPoint[ParameterT: GenericParameterType](BaseDataPoint):
|
|
|
296
292
|
"""Init the data point values."""
|
|
297
293
|
for dp in self._readable_data_points:
|
|
298
294
|
await dp.load_data_point_value(call_source=call_source, direct_call=direct_call)
|
|
299
|
-
self.
|
|
295
|
+
self.emit_data_point_updated_event()
|
|
300
296
|
|
|
301
297
|
def is_state_change(self, **kwargs: Any) -> bool:
|
|
302
298
|
"""
|
|
@@ -310,9 +306,9 @@ class CalculatedDataPoint[ParameterT: GenericParameterType](BaseDataPoint):
|
|
|
310
306
|
return False
|
|
311
307
|
|
|
312
308
|
@property
|
|
313
|
-
def
|
|
309
|
+
def _should_emit_data_point_updated_callback(self) -> bool:
|
|
314
310
|
"""Check if a data point has been updated or refreshed."""
|
|
315
|
-
if self.
|
|
311
|
+
if self.emitted_event_recently: # pylint: disable=using-constant-test
|
|
316
312
|
return False
|
|
317
313
|
|
|
318
314
|
if (relevant_values_data_point := self._relevant_values_data_points) is not None and len(
|
|
@@ -320,7 +316,7 @@ class CalculatedDataPoint[ParameterT: GenericParameterType](BaseDataPoint):
|
|
|
320
316
|
) <= 1:
|
|
321
317
|
return True
|
|
322
318
|
|
|
323
|
-
return all(dp.
|
|
319
|
+
return all(dp.emitted_event_recently for dp in relevant_values_data_point)
|
|
324
320
|
|
|
325
321
|
def _unregister_data_point_updated_callback(self, *, cb: Callable, custom_id: str) -> None:
|
|
326
322
|
"""Unregister update callback."""
|
|
@@ -188,7 +188,7 @@ class CustomDataPoint(BaseDataPoint):
|
|
|
188
188
|
"""Init the data point values."""
|
|
189
189
|
for dp in self._readable_data_points:
|
|
190
190
|
await dp.load_data_point_value(call_source=call_source, direct_call=direct_call)
|
|
191
|
-
self.
|
|
191
|
+
self.emit_data_point_updated_event()
|
|
192
192
|
|
|
193
193
|
def is_state_change(self, **kwargs: Any) -> bool:
|
|
194
194
|
"""
|
|
@@ -268,7 +268,7 @@ class CustomDataPoint(BaseDataPoint):
|
|
|
268
268
|
data_point.force_usage(forced_usage=DataPointUsage.NO_CREATE)
|
|
269
269
|
|
|
270
270
|
self._unregister_callbacks.append(
|
|
271
|
-
data_point.register_internal_data_point_updated_callback(cb=self.
|
|
271
|
+
data_point.register_internal_data_point_updated_callback(cb=self.emit_data_point_updated_event)
|
|
272
272
|
)
|
|
273
273
|
self._data_points[field] = data_point
|
|
274
274
|
|
|
@@ -353,7 +353,7 @@ _CUSTOM_DATA_POINT_DEFINITION: Mapping[CDPD, Mapping[int | DeviceProfile, Any]]
|
|
|
353
353
|
Field.LEVEL: Parameter.LEVEL,
|
|
354
354
|
Field.CONCENTRATION: Parameter.CONCENTRATION,
|
|
355
355
|
},
|
|
356
|
-
8: {
|
|
356
|
+
8: { # BWTH
|
|
357
357
|
Field.STATE: Parameter.STATE,
|
|
358
358
|
},
|
|
359
359
|
},
|
|
@@ -361,6 +361,9 @@ _CUSTOM_DATA_POINT_DEFINITION: Mapping[CDPD, Mapping[int | DeviceProfile, Any]]
|
|
|
361
361
|
7: {
|
|
362
362
|
Field.HEATING_VALVE_TYPE: Parameter.HEATING_VALVE_TYPE,
|
|
363
363
|
},
|
|
364
|
+
-5: { # WGTC
|
|
365
|
+
Field.STATE: Parameter.STATE,
|
|
366
|
+
},
|
|
364
367
|
},
|
|
365
368
|
},
|
|
366
369
|
},
|