aiohomematic 2025.10.22__py3-none-any.whl → 2025.10.25__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of aiohomematic might be problematic. Click here for more details.
- aiohomematic/central/__init__.py +29 -29
- aiohomematic/central/decorators.py +4 -4
- aiohomematic/client/__init__.py +18 -16
- aiohomematic/client/json_rpc.py +11 -6
- aiohomematic/client/rpc_proxy.py +2 -2
- aiohomematic/const.py +1 -1
- aiohomematic/hmcli.py +24 -9
- aiohomematic/model/calculated/data_point.py +6 -10
- aiohomematic/model/custom/data_point.py +2 -2
- aiohomematic/model/custom/definition.py +4 -1
- aiohomematic/model/custom/light.py +1 -1
- aiohomematic/model/data_point.py +19 -19
- aiohomematic/model/device.py +7 -7
- aiohomematic/model/event.py +7 -7
- aiohomematic/model/generic/data_point.py +2 -2
- aiohomematic/model/hub/__init__.py +2 -2
- aiohomematic/model/hub/data_point.py +3 -3
- aiohomematic/property_decorators.py +4 -4
- aiohomematic/store/dynamic.py +20 -13
- aiohomematic/store/visibility.py +3 -4
- {aiohomematic-2025.10.22.dist-info → aiohomematic-2025.10.25.dist-info}/METADATA +8 -2
- {aiohomematic-2025.10.22.dist-info → aiohomematic-2025.10.25.dist-info}/RECORD +26 -25
- aiohomematic-2025.10.25.dist-info/entry_points.txt +2 -0
- {aiohomematic-2025.10.22.dist-info → aiohomematic-2025.10.25.dist-info}/WHEEL +0 -0
- {aiohomematic-2025.10.22.dist-info → aiohomematic-2025.10.25.dist-info}/licenses/LICENSE +0 -0
- {aiohomematic-2025.10.22.dist-info → aiohomematic-2025.10.25.dist-info}/top_level.txt +0 -0
aiohomematic/central/__init__.py
CHANGED
|
@@ -251,7 +251,7 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
251
251
|
# e.g. DEVICES_CREATED, HUB_REFRESHED
|
|
252
252
|
self._backend_system_callbacks: Final[set[Callable]] = set()
|
|
253
253
|
# Signature: (interface_id, channel_address, parameter, value)
|
|
254
|
-
# Re-
|
|
254
|
+
# Re-emitted events from the backend for parameter updates
|
|
255
255
|
self._backend_parameter_callbacks: Final[set[Callable]] = set()
|
|
256
256
|
# Signature: (event_type, event_data)
|
|
257
257
|
# Events like INTERFACE, KEYPRESS, ...
|
|
@@ -454,7 +454,7 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
454
454
|
def remove_sysvar_data_point(self, *, vid: str) -> None:
|
|
455
455
|
"""Remove a sysvar data_point."""
|
|
456
456
|
if (sysvar_dp := self.get_sysvar_data_point(vid=vid)) is not None:
|
|
457
|
-
sysvar_dp.
|
|
457
|
+
sysvar_dp.emit_device_removed_event()
|
|
458
458
|
del self._sysvar_data_points[vid]
|
|
459
459
|
if sysvar_dp.state_path in self._sysvar_data_point_event_subscriptions:
|
|
460
460
|
del self._sysvar_data_point_event_subscriptions[sysvar_dp.state_path]
|
|
@@ -466,8 +466,8 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
466
466
|
def remove_program_button(self, *, pid: str) -> None:
|
|
467
467
|
"""Remove a program button."""
|
|
468
468
|
if (program_dp := self.get_program_data_point(pid=pid)) is not None:
|
|
469
|
-
program_dp.button.
|
|
470
|
-
program_dp.switch.
|
|
469
|
+
program_dp.button.emit_device_removed_event()
|
|
470
|
+
program_dp.switch.emit_device_removed_event()
|
|
471
471
|
del self._program_data_points[pid]
|
|
472
472
|
|
|
473
473
|
def identify_channel(self, *, text: str) -> Channel | None:
|
|
@@ -532,7 +532,7 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
532
532
|
self._xml_rpc_server = xml_rpc_server
|
|
533
533
|
self._listen_port_xml_rpc = xml_rpc_server.listen_port
|
|
534
534
|
self._xml_rpc_server.add_central(central=self)
|
|
535
|
-
except OSError as oserr:
|
|
535
|
+
except OSError as oserr: # pragma: no cover - environment/OS-specific socket binding failures are not reliably reproducible in CI
|
|
536
536
|
self._state = CentralUnitState.STOPPED_BY_ERROR
|
|
537
537
|
raise AioHomematicException(
|
|
538
538
|
f"START: Failed to start central unit {self.name}: {extract_exc_args(exc=oserr)}"
|
|
@@ -763,8 +763,8 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
763
763
|
)
|
|
764
764
|
self._clients[client.interface_id] = client
|
|
765
765
|
return True
|
|
766
|
-
except BaseHomematicException as bhexc:
|
|
767
|
-
self.
|
|
766
|
+
except BaseHomematicException as bhexc: # pragma: no cover - deterministic simulation of client creation failures would require the full client/proxy stack and network timing; keeping this defensive log-and-state branch untested to avoid brittle CI
|
|
767
|
+
self.emit_interface_event(
|
|
768
768
|
interface_id=interface_config.interface_id,
|
|
769
769
|
interface_event_type=InterfaceEventType.PROXY,
|
|
770
770
|
data={EventKey.AVAILABLE: False},
|
|
@@ -803,14 +803,14 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
803
803
|
await self._hub.fetch_sysvar_data(scheduled=True)
|
|
804
804
|
|
|
805
805
|
@loop_check
|
|
806
|
-
def
|
|
806
|
+
def emit_interface_event(
|
|
807
807
|
self,
|
|
808
808
|
*,
|
|
809
809
|
interface_id: str,
|
|
810
810
|
interface_event_type: InterfaceEventType,
|
|
811
811
|
data: dict[str, Any],
|
|
812
812
|
) -> None:
|
|
813
|
-
"""
|
|
813
|
+
"""Emit an event about the interface status."""
|
|
814
814
|
data = data or {}
|
|
815
815
|
event_data: dict[str, Any] = {
|
|
816
816
|
EventKey.INTERFACE_ID: interface_id,
|
|
@@ -818,7 +818,7 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
818
818
|
EventKey.DATA: data,
|
|
819
819
|
}
|
|
820
820
|
|
|
821
|
-
self.
|
|
821
|
+
self.emit_homematic_callback(
|
|
822
822
|
event_type=EventType.INTERFACE,
|
|
823
823
|
event_data=cast(dict[EventKey, Any], INTERFACE_EVENT_SCHEMA(event_data)),
|
|
824
824
|
)
|
|
@@ -1019,7 +1019,7 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
1019
1019
|
interface_id=interface_id,
|
|
1020
1020
|
device_address=device_address,
|
|
1021
1021
|
)
|
|
1022
|
-
except Exception as exc:
|
|
1022
|
+
except Exception as exc:
|
|
1023
1023
|
_LOGGER.error(
|
|
1024
1024
|
"CREATE_DEVICES failed: %s [%s] Unable to create device: %s, %s",
|
|
1025
1025
|
type(exc).__name__,
|
|
@@ -1034,7 +1034,7 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
1034
1034
|
await device.load_value_cache()
|
|
1035
1035
|
new_devices.add(device)
|
|
1036
1036
|
self._devices[device_address] = device
|
|
1037
|
-
except Exception as exc:
|
|
1037
|
+
except Exception as exc:
|
|
1038
1038
|
_LOGGER.error(
|
|
1039
1039
|
"CREATE_DEVICES failed: %s [%s] Unable to create data points: %s, %s",
|
|
1040
1040
|
type(exc).__name__,
|
|
@@ -1047,7 +1047,7 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
1047
1047
|
if new_devices:
|
|
1048
1048
|
new_dps = _get_new_data_points(new_devices=new_devices)
|
|
1049
1049
|
new_channel_events = _get_new_channel_events(new_devices=new_devices)
|
|
1050
|
-
self.
|
|
1050
|
+
self.emit_backend_system_callback(
|
|
1051
1051
|
system_event=BackendSystemEvent.DEVICES_CREATED,
|
|
1052
1052
|
new_data_points=new_dps,
|
|
1053
1053
|
new_channel_events=new_channel_events,
|
|
@@ -1157,7 +1157,7 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
1157
1157
|
)
|
|
1158
1158
|
)
|
|
1159
1159
|
):
|
|
1160
|
-
self.
|
|
1160
|
+
self.emit_backend_system_callback(
|
|
1161
1161
|
system_event=BackendSystemEvent.DEVICES_DELAYED,
|
|
1162
1162
|
new_addresses=new_addresses,
|
|
1163
1163
|
interface_id=interface_id,
|
|
@@ -1281,7 +1281,7 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
1281
1281
|
for callback_handler in self._data_point_key_event_subscriptions[dpk]:
|
|
1282
1282
|
if callable(callback_handler):
|
|
1283
1283
|
await callback_handler(value=value, received_at=received_at)
|
|
1284
|
-
except RuntimeError as rterr:
|
|
1284
|
+
except RuntimeError as rterr:
|
|
1285
1285
|
_LOGGER_EVENT.debug(
|
|
1286
1286
|
"EVENT: RuntimeError [%s]. Failed to call callback for: %s, %s, %s",
|
|
1287
1287
|
extract_exc_args(exc=rterr),
|
|
@@ -1289,7 +1289,7 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
1289
1289
|
channel_address,
|
|
1290
1290
|
parameter,
|
|
1291
1291
|
)
|
|
1292
|
-
except Exception as exc:
|
|
1292
|
+
except Exception as exc:
|
|
1293
1293
|
_LOGGER_EVENT.warning(
|
|
1294
1294
|
"EVENT failed: Unable to call callback for: %s, %s, %s, %s",
|
|
1295
1295
|
interface_id,
|
|
@@ -1331,10 +1331,10 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
1331
1331
|
if callable(callback_handler):
|
|
1332
1332
|
received_at = datetime.now()
|
|
1333
1333
|
self._looper.create_task(
|
|
1334
|
-
target=callback_handler(value=value, received_at=received_at),
|
|
1334
|
+
target=lambda: callback_handler(value=value, received_at=received_at),
|
|
1335
1335
|
name=f"sysvar-data-point-event-{state_path}",
|
|
1336
1336
|
)
|
|
1337
|
-
except RuntimeError as rterr:
|
|
1337
|
+
except RuntimeError as rterr:
|
|
1338
1338
|
_LOGGER_EVENT.debug(
|
|
1339
1339
|
"EVENT: RuntimeError [%s]. Failed to call callback for: %s",
|
|
1340
1340
|
extract_exc_args(exc=rterr),
|
|
@@ -1659,9 +1659,9 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
1659
1659
|
self._homematic_callbacks.remove(cb)
|
|
1660
1660
|
|
|
1661
1661
|
@loop_check
|
|
1662
|
-
def
|
|
1662
|
+
def emit_homematic_callback(self, *, event_type: EventType, event_data: dict[EventKey, str]) -> None:
|
|
1663
1663
|
"""
|
|
1664
|
-
|
|
1664
|
+
Emit homematic_callback in central.
|
|
1665
1665
|
|
|
1666
1666
|
# Events like INTERFACE, KEYPRESS, ...
|
|
1667
1667
|
"""
|
|
@@ -1670,7 +1670,7 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
1670
1670
|
callback_handler(event_type=event_type, event_data=event_data)
|
|
1671
1671
|
except Exception as exc:
|
|
1672
1672
|
_LOGGER.error(
|
|
1673
|
-
"
|
|
1673
|
+
"EMIT_HOMEMATIC_CALLBACK: Unable to call handler: %s",
|
|
1674
1674
|
extract_exc_args(exc=exc),
|
|
1675
1675
|
)
|
|
1676
1676
|
|
|
@@ -1687,13 +1687,13 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
1687
1687
|
self._backend_parameter_callbacks.remove(cb)
|
|
1688
1688
|
|
|
1689
1689
|
@loop_check
|
|
1690
|
-
def
|
|
1690
|
+
def emit_backend_parameter_callback(
|
|
1691
1691
|
self, *, interface_id: str, channel_address: str, parameter: str, value: Any
|
|
1692
1692
|
) -> None:
|
|
1693
1693
|
"""
|
|
1694
|
-
|
|
1694
|
+
Emit backend_parameter callback in central.
|
|
1695
1695
|
|
|
1696
|
-
Re-
|
|
1696
|
+
Re-emitted events from the backend for parameter updates.
|
|
1697
1697
|
"""
|
|
1698
1698
|
for callback_handler in self._backend_parameter_callbacks:
|
|
1699
1699
|
try:
|
|
@@ -1702,7 +1702,7 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
1702
1702
|
)
|
|
1703
1703
|
except Exception as exc:
|
|
1704
1704
|
_LOGGER.error(
|
|
1705
|
-
"
|
|
1705
|
+
"EMIT_BACKEND_PARAMETER_CALLBACK: Unable to call handler: %s",
|
|
1706
1706
|
extract_exc_args(exc=exc),
|
|
1707
1707
|
)
|
|
1708
1708
|
|
|
@@ -1719,9 +1719,9 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
1719
1719
|
self._backend_system_callbacks.remove(cb)
|
|
1720
1720
|
|
|
1721
1721
|
@loop_check
|
|
1722
|
-
def
|
|
1722
|
+
def emit_backend_system_callback(self, *, system_event: BackendSystemEvent, **kwargs: Any) -> None:
|
|
1723
1723
|
"""
|
|
1724
|
-
|
|
1724
|
+
Emit system_event callback in central.
|
|
1725
1725
|
|
|
1726
1726
|
e.g. DEVICES_CREATED, HUB_REFRESHED
|
|
1727
1727
|
"""
|
|
@@ -1730,7 +1730,7 @@ class CentralUnit(LogContextMixin, PayloadMixin):
|
|
|
1730
1730
|
callback_handler(system_event=system_event, **kwargs)
|
|
1731
1731
|
except Exception as exc:
|
|
1732
1732
|
_LOGGER.error(
|
|
1733
|
-
"
|
|
1733
|
+
"EMIT_BACKEND_SYSTEM_CALLBACK: Unable to call handler: %s",
|
|
1734
1734
|
extract_exc_args(exc=exc),
|
|
1735
1735
|
)
|
|
1736
1736
|
|
|
@@ -2125,7 +2125,7 @@ class CentralConfig:
|
|
|
2125
2125
|
try:
|
|
2126
2126
|
self.check_config()
|
|
2127
2127
|
return CentralUnit(central_config=self)
|
|
2128
|
-
except BaseHomematicException as bhexc:
|
|
2128
|
+
except BaseHomematicException as bhexc: # pragma: no cover
|
|
2129
2129
|
raise AioHomematicException(
|
|
2130
2130
|
f"CREATE_CENTRAL: Not able to create a central: : {extract_exc_args(exc=bhexc)}"
|
|
2131
2131
|
) from bhexc
|
|
@@ -52,7 +52,7 @@ def callback_backend_system(system_event: BackendSystemEvent) -> Callable:
|
|
|
52
52
|
central = unit.get_central(interface_id=str(args[1]))
|
|
53
53
|
if central:
|
|
54
54
|
central.looper.create_task(
|
|
55
|
-
target=_exec_backend_system_callback(*args, **kwargs),
|
|
55
|
+
target=lambda: _exec_backend_system_callback(*args, **kwargs),
|
|
56
56
|
name="wrapper_backend_system_callback",
|
|
57
57
|
)
|
|
58
58
|
except Exception as exc:
|
|
@@ -72,7 +72,7 @@ def callback_backend_system(system_event: BackendSystemEvent) -> Callable:
|
|
|
72
72
|
interface_id: str = args[0] if len(args) > 0 else str(kwargs[_INTERFACE_ID])
|
|
73
73
|
if client := hmcl.get_client(interface_id=interface_id):
|
|
74
74
|
client.modified_at = datetime.now()
|
|
75
|
-
client.central.
|
|
75
|
+
client.central.emit_backend_system_callback(system_event=system_event, **kwargs)
|
|
76
76
|
except Exception as exc: # pragma: no cover
|
|
77
77
|
_LOGGER.warning(
|
|
78
78
|
"EXEC_BACKEND_SYSTEM_CALLBACK failed: Unable to reduce kwargs for backend_system_callback"
|
|
@@ -109,7 +109,7 @@ def callback_event[**P, R](func: Callable[P, R]) -> Callable:
|
|
|
109
109
|
|
|
110
110
|
if client := hmcl.get_client(interface_id=interface_id):
|
|
111
111
|
client.modified_at = datetime.now()
|
|
112
|
-
client.central.
|
|
112
|
+
client.central.emit_backend_parameter_callback(
|
|
113
113
|
interface_id=interface_id, channel_address=channel_address, parameter=parameter, value=value
|
|
114
114
|
)
|
|
115
115
|
except Exception as exc: # pragma: no cover
|
|
@@ -123,7 +123,7 @@ def callback_event[**P, R](func: Callable[P, R]) -> Callable:
|
|
|
123
123
|
unit = args[0]
|
|
124
124
|
if isinstance(unit, hmcu.CentralUnit):
|
|
125
125
|
unit.looper.create_task(
|
|
126
|
-
target=_async_wrap_sync(_exec_event_callback, *args, **kwargs),
|
|
126
|
+
target=lambda: _async_wrap_sync(_exec_event_callback, *args, **kwargs),
|
|
127
127
|
name="wrapper_event_callback",
|
|
128
128
|
)
|
|
129
129
|
return
|
aiohomematic/client/__init__.py
CHANGED
|
@@ -336,7 +336,7 @@ class Client(ABC, LogContextMixin):
|
|
|
336
336
|
"available" if available else "unavailable",
|
|
337
337
|
self.interface_id,
|
|
338
338
|
)
|
|
339
|
-
self.central.
|
|
339
|
+
self.central.emit_interface_event(
|
|
340
340
|
interface_id=self.interface_id,
|
|
341
341
|
interface_event_type=InterfaceEventType.PROXY,
|
|
342
342
|
data={EventKey.AVAILABLE: available},
|
|
@@ -406,7 +406,7 @@ class Client(ABC, LogContextMixin):
|
|
|
406
406
|
) is not None:
|
|
407
407
|
if (seconds_since_last_event := (datetime.now() - last_events_dt).total_seconds()) > CALLBACK_WARN_INTERVAL:
|
|
408
408
|
if self._is_callback_alive:
|
|
409
|
-
self.central.
|
|
409
|
+
self.central.emit_interface_event(
|
|
410
410
|
interface_id=self.interface_id,
|
|
411
411
|
interface_event_type=InterfaceEventType.CALLBACK,
|
|
412
412
|
data={
|
|
@@ -423,7 +423,7 @@ class Client(ABC, LogContextMixin):
|
|
|
423
423
|
return False
|
|
424
424
|
|
|
425
425
|
if not self._is_callback_alive:
|
|
426
|
-
self.central.
|
|
426
|
+
self.central.emit_interface_event(
|
|
427
427
|
interface_id=self.interface_id,
|
|
428
428
|
interface_event_type=InterfaceEventType.CALLBACK,
|
|
429
429
|
data={EventKey.AVAILABLE: True},
|
|
@@ -437,13 +437,13 @@ class Client(ABC, LogContextMixin):
|
|
|
437
437
|
"""Send ping to the backend to generate PONG event."""
|
|
438
438
|
|
|
439
439
|
@inspector
|
|
440
|
-
async def execute_program(self, *, pid: str) -> bool:
|
|
440
|
+
async def execute_program(self, *, pid: str) -> bool: # pragma: no cover
|
|
441
441
|
"""Execute a program on the backend."""
|
|
442
442
|
_LOGGER.debug("EXECUTE_PROGRAM: not usable for %s.", self.interface_id)
|
|
443
443
|
return True
|
|
444
444
|
|
|
445
445
|
@inspector
|
|
446
|
-
async def set_program_state(self, *, pid: str, state: bool) -> bool:
|
|
446
|
+
async def set_program_state(self, *, pid: str, state: bool) -> bool: # pragma: no cover
|
|
447
447
|
"""Set the program state on the backend."""
|
|
448
448
|
_LOGGER.debug("SET_PROGRAM_STATE: not usable for %s.", self.interface_id)
|
|
449
449
|
return True
|
|
@@ -471,19 +471,21 @@ class Client(ABC, LogContextMixin):
|
|
|
471
471
|
"""Get all system variables from the backend."""
|
|
472
472
|
|
|
473
473
|
@inspector(re_raise=False)
|
|
474
|
-
async def get_all_programs(
|
|
474
|
+
async def get_all_programs(
|
|
475
|
+
self, *, markers: tuple[DescriptionMarker | str, ...]
|
|
476
|
+
) -> tuple[ProgramData, ...] | None: # pragma: no cover
|
|
475
477
|
"""Get all programs, if available."""
|
|
476
478
|
_LOGGER.debug("GET_ALL_PROGRAMS: not usable for %s.", self.interface_id)
|
|
477
479
|
return None
|
|
478
480
|
|
|
479
481
|
@inspector(re_raise=False, no_raise_return={})
|
|
480
|
-
async def get_all_rooms(self) -> dict[str, set[str]]:
|
|
482
|
+
async def get_all_rooms(self) -> dict[str, set[str]]: # pragma: no cover
|
|
481
483
|
"""Get all rooms, if available."""
|
|
482
484
|
_LOGGER.debug("GET_ALL_ROOMS: not usable for %s.", self.interface_id)
|
|
483
485
|
return {}
|
|
484
486
|
|
|
485
487
|
@inspector(re_raise=False, no_raise_return={})
|
|
486
|
-
async def get_all_functions(self) -> dict[str, set[str]]:
|
|
488
|
+
async def get_all_functions(self) -> dict[str, set[str]]: # pragma: no cover
|
|
487
489
|
"""Get all functions, if available."""
|
|
488
490
|
_LOGGER.debug("GET_ALL_FUNCTIONS: not usable for %s.", self.interface_id)
|
|
489
491
|
return {}
|
|
@@ -767,7 +769,7 @@ class Client(ABC, LogContextMixin):
|
|
|
767
769
|
call_source,
|
|
768
770
|
)
|
|
769
771
|
return cast(dict[str, Any], await self._proxy_read.getParamset(address, paramset_key))
|
|
770
|
-
except BaseHomematicException as bhexc:
|
|
772
|
+
except BaseHomematicException as bhexc: # pragma: no cover
|
|
771
773
|
raise ClientException(
|
|
772
774
|
f"GET_PARAMSET failed with for {address}/{paramset_key}: {extract_exc_args(exc=bhexc)}"
|
|
773
775
|
) from bhexc
|
|
@@ -1033,7 +1035,7 @@ class Client(ABC, LogContextMixin):
|
|
|
1033
1035
|
"""List devices of the backend."""
|
|
1034
1036
|
try:
|
|
1035
1037
|
return tuple(await self._proxy_read.listDevices())
|
|
1036
|
-
except BaseHomematicException as bhexc:
|
|
1038
|
+
except BaseHomematicException as bhexc: # pragma: no cover
|
|
1037
1039
|
_LOGGER.debug(
|
|
1038
1040
|
"LIST_DEVICES failed: %s [%s]",
|
|
1039
1041
|
bhexc.name,
|
|
@@ -1145,7 +1147,7 @@ class ClientCCU(Client):
|
|
|
1145
1147
|
self.central.data_cache.add_data(interface=self.interface, all_device_data=all_device_data)
|
|
1146
1148
|
return
|
|
1147
1149
|
except ClientException:
|
|
1148
|
-
self.central.
|
|
1150
|
+
self.central.emit_interface_event(
|
|
1149
1151
|
interface_id=self.interface_id,
|
|
1150
1152
|
interface_event_type=InterfaceEventType.FETCH_DATA,
|
|
1151
1153
|
data={EventKey.AVAILABLE: False},
|
|
@@ -1168,8 +1170,8 @@ class ClientCCU(Client):
|
|
|
1168
1170
|
if handle_ping_pong
|
|
1169
1171
|
else self.interface_id
|
|
1170
1172
|
)
|
|
1171
|
-
self._ping_pong_cache.handle_send_ping(ping_ts=dt_now)
|
|
1172
1173
|
await self._proxy.ping(callerId)
|
|
1174
|
+
self._ping_pong_cache.handle_send_ping(ping_ts=dt_now)
|
|
1173
1175
|
elif not self._is_initialized:
|
|
1174
1176
|
await self._proxy.ping(self.interface_id)
|
|
1175
1177
|
self.modified_at = dt_now
|
|
@@ -1545,7 +1547,7 @@ class ClientHomegear(ClientCCU):
|
|
|
1545
1547
|
address=address,
|
|
1546
1548
|
name=await self._proxy_read.getMetadata(address, _NAME),
|
|
1547
1549
|
)
|
|
1548
|
-
except BaseHomematicException as bhexc:
|
|
1550
|
+
except BaseHomematicException as bhexc: # pragma: no cover
|
|
1549
1551
|
_LOGGER.warning(
|
|
1550
1552
|
"%s [%s] Failed to fetch name for device %s",
|
|
1551
1553
|
bhexc.name,
|
|
@@ -1559,7 +1561,7 @@ class ClientHomegear(ClientCCU):
|
|
|
1559
1561
|
try:
|
|
1560
1562
|
await self._proxy.clientServerInitialized(self.interface_id)
|
|
1561
1563
|
self.modified_at = datetime.now()
|
|
1562
|
-
except BaseHomematicException as bhexc:
|
|
1564
|
+
except BaseHomematicException as bhexc: # pragma: no cover
|
|
1563
1565
|
_LOGGER.debug(
|
|
1564
1566
|
"CHECK_CONNECTION_AVAILABILITY failed: %s [%s]",
|
|
1565
1567
|
bhexc.name,
|
|
@@ -1661,7 +1663,7 @@ class ClientConfig:
|
|
|
1661
1663
|
raise NoConnectionException(f"No connection to {self.interface_id}")
|
|
1662
1664
|
except BaseHomematicException:
|
|
1663
1665
|
raise
|
|
1664
|
-
except Exception as exc:
|
|
1666
|
+
except Exception as exc: # pragma: no cover
|
|
1665
1667
|
raise NoConnectionException(f"Unable to connect {extract_exc_args(exc=exc)}.") from exc
|
|
1666
1668
|
|
|
1667
1669
|
async def _get_version(self) -> str:
|
|
@@ -1673,7 +1675,7 @@ class ClientConfig:
|
|
|
1673
1675
|
if (methods := check_proxy.supported_methods) and "getVersion" in methods:
|
|
1674
1676
|
# BidCos-Wired does not support getVersion()
|
|
1675
1677
|
return cast(str, await check_proxy.getVersion())
|
|
1676
|
-
except Exception as exc:
|
|
1678
|
+
except Exception as exc: # pragma: no cover
|
|
1677
1679
|
raise NoConnectionException(f"Unable to connect {extract_exc_args(exc=exc)}.") from exc
|
|
1678
1680
|
return "0"
|
|
1679
1681
|
|
aiohomematic/client/json_rpc.py
CHANGED
|
@@ -339,6 +339,11 @@ class AioJsonRpcAioHttpClient(LogContextMixin):
|
|
|
339
339
|
keep_session: bool = True,
|
|
340
340
|
) -> dict[str, Any] | Any:
|
|
341
341
|
"""Reusable JSON-RPC POST_SCRIPT function."""
|
|
342
|
+
# Load and validate script first to avoid any network when script is missing
|
|
343
|
+
if (script := await self._get_script(script_name=script_name)) is None:
|
|
344
|
+
raise ClientException(f"Script file for {script_name} does not exist")
|
|
345
|
+
|
|
346
|
+
# Prepare session only after we know we have a script to run
|
|
342
347
|
if keep_session:
|
|
343
348
|
await self._login_or_renew()
|
|
344
349
|
session_id = self._session_id
|
|
@@ -351,9 +356,6 @@ class AioJsonRpcAioHttpClient(LogContextMixin):
|
|
|
351
356
|
if self._supported_methods is None:
|
|
352
357
|
await self._check_supported_methods()
|
|
353
358
|
|
|
354
|
-
if (script := await self._get_script(script_name=script_name)) is None:
|
|
355
|
-
raise ClientException(f"Script file for {script_name} does not exist")
|
|
356
|
-
|
|
357
359
|
if extra_params:
|
|
358
360
|
for variable, value in extra_params.items():
|
|
359
361
|
script = script.replace(f"##{variable}##", value)
|
|
@@ -384,9 +386,12 @@ class AioJsonRpcAioHttpClient(LogContextMixin):
|
|
|
384
386
|
def _load_script(script_name: str) -> str | None:
|
|
385
387
|
"""Load script from file system."""
|
|
386
388
|
script_file = os.path.join(Path(__file__).resolve().parent, REGA_SCRIPT_PATH, script_name)
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
389
|
+
try:
|
|
390
|
+
if script := Path(script_file).read_text(encoding=UTF_8):
|
|
391
|
+
self._script_cache[script_name] = script
|
|
392
|
+
return script
|
|
393
|
+
except FileNotFoundError:
|
|
394
|
+
return None
|
|
390
395
|
return None
|
|
391
396
|
|
|
392
397
|
return await self._looper.async_add_executor_job(_load_script, script_name, name=f"load_script-{script_name}")
|
aiohomematic/client/rpc_proxy.py
CHANGED
|
@@ -216,7 +216,7 @@ class AioXmlRpcProxy(BaseRpcProxy, xmlrpc.client.ServerProxy):
|
|
|
216
216
|
except BaseHomematicException as bhe:
|
|
217
217
|
self._record_session(method=args[0], params=args[1:], exc=bhe)
|
|
218
218
|
raise
|
|
219
|
-
except SSLError as sslerr:
|
|
219
|
+
except SSLError as sslerr: # pragma: no cover - SSL handshake/cert errors are OS/OpenSSL dependent and not reliably reproducible in CI
|
|
220
220
|
message = f"SSLError on {self._interface_id}: {extract_exc_args(exc=sslerr)}"
|
|
221
221
|
level = logging.ERROR
|
|
222
222
|
if sslerr.args[0] in _SSL_ERROR_CODES:
|
|
@@ -237,7 +237,7 @@ class AioXmlRpcProxy(BaseRpcProxy, xmlrpc.client.ServerProxy):
|
|
|
237
237
|
log_context=self.log_context,
|
|
238
238
|
)
|
|
239
239
|
raise NoConnectionException(message) from sslerr
|
|
240
|
-
except OSError as oserr:
|
|
240
|
+
except OSError as oserr: # pragma: no cover - Network/socket errno differences are platform/environment specific; simulating reliably in CI would be flaky
|
|
241
241
|
message = f"OSError on {self._interface_id}: {extract_exc_args(exc=oserr)}"
|
|
242
242
|
level = (
|
|
243
243
|
logging.ERROR
|
aiohomematic/const.py
CHANGED
|
@@ -19,7 +19,7 @@ import sys
|
|
|
19
19
|
from types import MappingProxyType
|
|
20
20
|
from typing import Any, Final, NamedTuple, Required, TypeAlias, TypedDict
|
|
21
21
|
|
|
22
|
-
VERSION: Final = "2025.10.
|
|
22
|
+
VERSION: Final = "2025.10.25"
|
|
23
23
|
|
|
24
24
|
# Detect test speedup mode via environment
|
|
25
25
|
_TEST_SPEEDUP: Final = (
|
aiohomematic/hmcli.py
CHANGED
|
@@ -13,6 +13,7 @@ exposes the 'main' entrypoint for invocation. All other names are internal.
|
|
|
13
13
|
from __future__ import annotations
|
|
14
14
|
|
|
15
15
|
import argparse
|
|
16
|
+
import json
|
|
16
17
|
import sys
|
|
17
18
|
from typing import Any
|
|
18
19
|
from xmlrpc.client import ServerProxy
|
|
@@ -124,11 +125,15 @@ def main() -> None:
|
|
|
124
125
|
|
|
125
126
|
try:
|
|
126
127
|
if args.paramset_key == ParamsetKey.VALUES and args.value is None:
|
|
127
|
-
proxy.getValue(args.address, args.parameter)
|
|
128
|
+
result = proxy.getValue(args.address, args.parameter)
|
|
128
129
|
if args.json:
|
|
129
|
-
|
|
130
|
+
print(
|
|
131
|
+
json.dumps(
|
|
132
|
+
{"address": args.address, "parameter": args.parameter, "value": result}, ensure_ascii=False
|
|
133
|
+
)
|
|
134
|
+
)
|
|
130
135
|
else:
|
|
131
|
-
|
|
136
|
+
print(result)
|
|
132
137
|
sys.exit(0)
|
|
133
138
|
elif args.paramset_key == ParamsetKey.VALUES and args.value:
|
|
134
139
|
value: Any
|
|
@@ -144,13 +149,22 @@ def main() -> None:
|
|
|
144
149
|
sys.exit(0)
|
|
145
150
|
elif args.paramset_key == ParamsetKey.MASTER and args.value is None:
|
|
146
151
|
paramset: dict[str, Any] | None
|
|
147
|
-
if (paramset := proxy.getParamset(args.address, args.paramset_key)) and paramset
|
|
148
|
-
args.parameter
|
|
149
|
-
):
|
|
152
|
+
if (paramset := proxy.getParamset(args.address, args.paramset_key)) and (args.parameter in paramset): # type: ignore[assignment]
|
|
153
|
+
result = paramset[args.parameter]
|
|
150
154
|
if args.json:
|
|
151
|
-
|
|
155
|
+
print(
|
|
156
|
+
json.dumps(
|
|
157
|
+
{
|
|
158
|
+
"address": args.address,
|
|
159
|
+
"paramset_key": args.paramset_key,
|
|
160
|
+
"parameter": args.parameter,
|
|
161
|
+
"value": result,
|
|
162
|
+
},
|
|
163
|
+
ensure_ascii=False,
|
|
164
|
+
)
|
|
165
|
+
)
|
|
152
166
|
else:
|
|
153
|
-
|
|
167
|
+
print(result)
|
|
154
168
|
sys.exit(0)
|
|
155
169
|
elif args.paramset_key == ParamsetKey.MASTER and args.value:
|
|
156
170
|
if args.type == "int":
|
|
@@ -163,7 +177,8 @@ def main() -> None:
|
|
|
163
177
|
value = args.value
|
|
164
178
|
proxy.putParamset(args.address, args.paramset_key, {args.parameter: value})
|
|
165
179
|
sys.exit(0)
|
|
166
|
-
except Exception:
|
|
180
|
+
except Exception as ex:
|
|
181
|
+
print(str(ex), file=sys.stderr)
|
|
167
182
|
sys.exit(1)
|
|
168
183
|
|
|
169
184
|
|
|
@@ -99,9 +99,7 @@ class CalculatedDataPoint[ParameterT: GenericParameterType](BaseDataPoint):
|
|
|
99
99
|
if generic_data_point := self._channel.get_generic_data_point(parameter=parameter, paramset_key=paramset_key):
|
|
100
100
|
self._data_points.append(generic_data_point)
|
|
101
101
|
self._unregister_callbacks.append(
|
|
102
|
-
generic_data_point.register_internal_data_point_updated_callback(
|
|
103
|
-
cb=self.fire_data_point_updated_callback
|
|
104
|
-
)
|
|
102
|
+
generic_data_point.register_internal_data_point_updated_callback(cb=self.emit_data_point_updated_event)
|
|
105
103
|
)
|
|
106
104
|
return cast(data_point_type, generic_data_point) # type: ignore[valid-type]
|
|
107
105
|
return cast(
|
|
@@ -123,9 +121,7 @@ class CalculatedDataPoint[ParameterT: GenericParameterType](BaseDataPoint):
|
|
|
123
121
|
):
|
|
124
122
|
self._data_points.append(generic_data_point)
|
|
125
123
|
self._unregister_callbacks.append(
|
|
126
|
-
generic_data_point.register_internal_data_point_updated_callback(
|
|
127
|
-
cb=self.fire_data_point_updated_callback
|
|
128
|
-
)
|
|
124
|
+
generic_data_point.register_internal_data_point_updated_callback(cb=self.emit_data_point_updated_event)
|
|
129
125
|
)
|
|
130
126
|
return cast(data_point_type, generic_data_point) # type: ignore[valid-type]
|
|
131
127
|
return cast(
|
|
@@ -296,7 +292,7 @@ class CalculatedDataPoint[ParameterT: GenericParameterType](BaseDataPoint):
|
|
|
296
292
|
"""Init the data point values."""
|
|
297
293
|
for dp in self._readable_data_points:
|
|
298
294
|
await dp.load_data_point_value(call_source=call_source, direct_call=direct_call)
|
|
299
|
-
self.
|
|
295
|
+
self.emit_data_point_updated_event()
|
|
300
296
|
|
|
301
297
|
def is_state_change(self, **kwargs: Any) -> bool:
|
|
302
298
|
"""
|
|
@@ -310,9 +306,9 @@ class CalculatedDataPoint[ParameterT: GenericParameterType](BaseDataPoint):
|
|
|
310
306
|
return False
|
|
311
307
|
|
|
312
308
|
@property
|
|
313
|
-
def
|
|
309
|
+
def _should_emit_data_point_updated_callback(self) -> bool:
|
|
314
310
|
"""Check if a data point has been updated or refreshed."""
|
|
315
|
-
if self.
|
|
311
|
+
if self.emitted_event_recently: # pylint: disable=using-constant-test
|
|
316
312
|
return False
|
|
317
313
|
|
|
318
314
|
if (relevant_values_data_point := self._relevant_values_data_points) is not None and len(
|
|
@@ -320,7 +316,7 @@ class CalculatedDataPoint[ParameterT: GenericParameterType](BaseDataPoint):
|
|
|
320
316
|
) <= 1:
|
|
321
317
|
return True
|
|
322
318
|
|
|
323
|
-
return all(dp.
|
|
319
|
+
return all(dp.emitted_event_recently for dp in relevant_values_data_point)
|
|
324
320
|
|
|
325
321
|
def _unregister_data_point_updated_callback(self, *, cb: Callable, custom_id: str) -> None:
|
|
326
322
|
"""Unregister update callback."""
|
|
@@ -188,7 +188,7 @@ class CustomDataPoint(BaseDataPoint):
|
|
|
188
188
|
"""Init the data point values."""
|
|
189
189
|
for dp in self._readable_data_points:
|
|
190
190
|
await dp.load_data_point_value(call_source=call_source, direct_call=direct_call)
|
|
191
|
-
self.
|
|
191
|
+
self.emit_data_point_updated_event()
|
|
192
192
|
|
|
193
193
|
def is_state_change(self, **kwargs: Any) -> bool:
|
|
194
194
|
"""
|
|
@@ -268,7 +268,7 @@ class CustomDataPoint(BaseDataPoint):
|
|
|
268
268
|
data_point.force_usage(forced_usage=DataPointUsage.NO_CREATE)
|
|
269
269
|
|
|
270
270
|
self._unregister_callbacks.append(
|
|
271
|
-
data_point.register_internal_data_point_updated_callback(cb=self.
|
|
271
|
+
data_point.register_internal_data_point_updated_callback(cb=self.emit_data_point_updated_event)
|
|
272
272
|
)
|
|
273
273
|
self._data_points[field] = data_point
|
|
274
274
|
|
|
@@ -353,7 +353,7 @@ _CUSTOM_DATA_POINT_DEFINITION: Mapping[CDPD, Mapping[int | DeviceProfile, Any]]
|
|
|
353
353
|
Field.LEVEL: Parameter.LEVEL,
|
|
354
354
|
Field.CONCENTRATION: Parameter.CONCENTRATION,
|
|
355
355
|
},
|
|
356
|
-
8: {
|
|
356
|
+
8: { # BWTH
|
|
357
357
|
Field.STATE: Parameter.STATE,
|
|
358
358
|
},
|
|
359
359
|
},
|
|
@@ -361,6 +361,9 @@ _CUSTOM_DATA_POINT_DEFINITION: Mapping[CDPD, Mapping[int | DeviceProfile, Any]]
|
|
|
361
361
|
7: {
|
|
362
362
|
Field.HEATING_VALVE_TYPE: Parameter.HEATING_VALVE_TYPE,
|
|
363
363
|
},
|
|
364
|
+
-5: { # WGTC
|
|
365
|
+
Field.STATE: Parameter.STATE,
|
|
366
|
+
},
|
|
364
367
|
},
|
|
365
368
|
},
|
|
366
369
|
},
|
aiohomematic/model/data_point.py
CHANGED
|
@@ -143,7 +143,7 @@ class CallbackDataPoint(ABC, LogContextMixin):
|
|
|
143
143
|
"_custom_id",
|
|
144
144
|
"_data_point_updated_callbacks",
|
|
145
145
|
"_device_removed_callbacks",
|
|
146
|
-
"
|
|
146
|
+
"_emitted_event_at",
|
|
147
147
|
"_modified_at",
|
|
148
148
|
"_path_data",
|
|
149
149
|
"_refreshed_at",
|
|
@@ -163,7 +163,7 @@ class CallbackDataPoint(ABC, LogContextMixin):
|
|
|
163
163
|
self._device_removed_callbacks: list[Callable] = []
|
|
164
164
|
self._custom_id: str | None = None
|
|
165
165
|
self._path_data = self._get_path_data()
|
|
166
|
-
self.
|
|
166
|
+
self._emitted_event_at: datetime = INIT_DATETIME
|
|
167
167
|
self._modified_at: datetime = INIT_DATETIME
|
|
168
168
|
self._refreshed_at: datetime = INIT_DATETIME
|
|
169
169
|
self._signature: Final = self._get_signature()
|
|
@@ -191,16 +191,16 @@ class CallbackDataPoint(ABC, LogContextMixin):
|
|
|
191
191
|
return self._custom_id
|
|
192
192
|
|
|
193
193
|
@property
|
|
194
|
-
def
|
|
195
|
-
"""Return the data point updated
|
|
196
|
-
return self.
|
|
194
|
+
def emitted_event_at(self) -> datetime:
|
|
195
|
+
"""Return the data point updated emitted an event at."""
|
|
196
|
+
return self._emitted_event_at
|
|
197
197
|
|
|
198
198
|
@state_property
|
|
199
|
-
def
|
|
200
|
-
"""Return the data point
|
|
201
|
-
if self.
|
|
199
|
+
def emitted_event_recently(self) -> bool:
|
|
200
|
+
"""Return the data point emitted an event within 500 milliseconds."""
|
|
201
|
+
if self._emitted_event_at == INIT_DATETIME:
|
|
202
202
|
return False
|
|
203
|
-
return (datetime.now() - self.
|
|
203
|
+
return (datetime.now() - self._emitted_event_at).total_seconds() < 0.5
|
|
204
204
|
|
|
205
205
|
@classmethod
|
|
206
206
|
def default_category(cls) -> DataPointCategory:
|
|
@@ -357,11 +357,11 @@ class CallbackDataPoint(ABC, LogContextMixin):
|
|
|
357
357
|
self._device_removed_callbacks.remove(cb)
|
|
358
358
|
|
|
359
359
|
@loop_check
|
|
360
|
-
def
|
|
360
|
+
def emit_data_point_updated_event(self, **kwargs: Any) -> None:
|
|
361
361
|
"""Do what is needed when the value of the data_point has been updated/refreshed."""
|
|
362
|
-
if not self.
|
|
362
|
+
if not self._should_emit_data_point_updated_callback:
|
|
363
363
|
return
|
|
364
|
-
self.
|
|
364
|
+
self._emitted_event_at = datetime.now()
|
|
365
365
|
for callback_handler, custom_id in self._data_point_updated_callbacks.items():
|
|
366
366
|
try:
|
|
367
367
|
# Add the data_point reference once to kwargs to avoid per-callback writes.
|
|
@@ -369,19 +369,19 @@ class CallbackDataPoint(ABC, LogContextMixin):
|
|
|
369
369
|
kwargs[KWARGS_ARG_CUSTOM_ID] = custom_id
|
|
370
370
|
callback_handler(**kwargs)
|
|
371
371
|
except Exception as exc:
|
|
372
|
-
_LOGGER.warning("
|
|
372
|
+
_LOGGER.warning("EMIT_DATA_POINT_UPDATED_EVENT failed: %s", extract_exc_args(exc=exc))
|
|
373
373
|
|
|
374
374
|
@loop_check
|
|
375
|
-
def
|
|
375
|
+
def emit_device_removed_event(self) -> None:
|
|
376
376
|
"""Do what is needed when the data_point has been removed."""
|
|
377
377
|
for callback_handler in self._device_removed_callbacks:
|
|
378
378
|
try:
|
|
379
379
|
callback_handler()
|
|
380
380
|
except Exception as exc:
|
|
381
|
-
_LOGGER.warning("
|
|
381
|
+
_LOGGER.warning("EMIT_DEVICE_REMOVED_EVENT failed: %s", extract_exc_args(exc=exc))
|
|
382
382
|
|
|
383
383
|
@property
|
|
384
|
-
def
|
|
384
|
+
def _should_emit_data_point_updated_callback(self) -> bool:
|
|
385
385
|
"""Check if a data point has been updated or refreshed."""
|
|
386
386
|
return True
|
|
387
387
|
|
|
@@ -895,7 +895,7 @@ class BaseParameterDataPoint[
|
|
|
895
895
|
if value == NO_CACHE_ENTRY:
|
|
896
896
|
if self.refreshed_at != INIT_DATETIME:
|
|
897
897
|
self._state_uncertain = True
|
|
898
|
-
self.
|
|
898
|
+
self.emit_data_point_updated_event()
|
|
899
899
|
return (old_value, None) # type: ignore[return-value]
|
|
900
900
|
|
|
901
901
|
new_value = self._convert_value(value=value)
|
|
@@ -906,7 +906,7 @@ class BaseParameterDataPoint[
|
|
|
906
906
|
self._previous_value = old_value
|
|
907
907
|
self._current_value = new_value
|
|
908
908
|
self._state_uncertain = False
|
|
909
|
-
self.
|
|
909
|
+
self.emit_data_point_updated_event()
|
|
910
910
|
return (old_value, new_value)
|
|
911
911
|
|
|
912
912
|
def write_temporary_value(self, *, value: Any, write_at: datetime) -> None:
|
|
@@ -920,7 +920,7 @@ class BaseParameterDataPoint[
|
|
|
920
920
|
self._set_temporary_modified_at(modified_at=write_at)
|
|
921
921
|
self._temporary_value = temp_value
|
|
922
922
|
self._state_uncertain = True
|
|
923
|
-
self.
|
|
923
|
+
self.emit_data_point_updated_event()
|
|
924
924
|
|
|
925
925
|
def update_parameter_data(self) -> None:
|
|
926
926
|
"""Update parameter data."""
|
aiohomematic/model/device.py
CHANGED
|
@@ -598,7 +598,7 @@ class Device(LogContextMixin, PayloadMixin):
|
|
|
598
598
|
if self._forced_availability != forced_availability:
|
|
599
599
|
self._forced_availability = forced_availability
|
|
600
600
|
for dp in self.generic_data_points:
|
|
601
|
-
dp.
|
|
601
|
+
dp.emit_data_point_updated_event()
|
|
602
602
|
|
|
603
603
|
@inspector
|
|
604
604
|
async def export_device_definition(self) -> None:
|
|
@@ -674,17 +674,17 @@ class Device(LogContextMixin, PayloadMixin):
|
|
|
674
674
|
await self._central.save_files(save_paramset_descriptions=True)
|
|
675
675
|
for dp in self.generic_data_points:
|
|
676
676
|
dp.update_parameter_data()
|
|
677
|
-
self.
|
|
677
|
+
self.emit_device_updated_callback()
|
|
678
678
|
|
|
679
679
|
@loop_check
|
|
680
|
-
def
|
|
680
|
+
def emit_device_updated_callback(self) -> None:
|
|
681
681
|
"""Do what is needed when the state of the device has been updated."""
|
|
682
682
|
self._set_modified_at()
|
|
683
683
|
for callback_handler in self._device_updated_callbacks:
|
|
684
684
|
try:
|
|
685
685
|
callback_handler()
|
|
686
686
|
except Exception as exc:
|
|
687
|
-
_LOGGER.warning("
|
|
687
|
+
_LOGGER.warning("EMIT_DEVICE_UPDATED failed: %s", extract_exc_args(exc=exc))
|
|
688
688
|
|
|
689
689
|
def __str__(self) -> str:
|
|
690
690
|
"""Provide some useful information."""
|
|
@@ -963,7 +963,7 @@ class Channel(LogContextMixin, PayloadMixin):
|
|
|
963
963
|
self._calculated_data_points[data_point.dpk] = data_point
|
|
964
964
|
if isinstance(data_point, GenericDataPoint):
|
|
965
965
|
self._generic_data_points[data_point.dpk] = data_point
|
|
966
|
-
self._device.register_device_updated_callback(cb=data_point.
|
|
966
|
+
self._device.register_device_updated_callback(cb=data_point.emit_data_point_updated_event)
|
|
967
967
|
if isinstance(data_point, hmce.CustomDataPoint):
|
|
968
968
|
self._custom_data_point = data_point
|
|
969
969
|
if isinstance(data_point, GenericEvent):
|
|
@@ -977,12 +977,12 @@ class Channel(LogContextMixin, PayloadMixin):
|
|
|
977
977
|
del self._calculated_data_points[data_point.dpk]
|
|
978
978
|
if isinstance(data_point, GenericDataPoint):
|
|
979
979
|
del self._generic_data_points[data_point.dpk]
|
|
980
|
-
self._device.unregister_device_updated_callback(cb=data_point.
|
|
980
|
+
self._device.unregister_device_updated_callback(cb=data_point.emit_data_point_updated_event)
|
|
981
981
|
if isinstance(data_point, hmce.CustomDataPoint):
|
|
982
982
|
self._custom_data_point = None
|
|
983
983
|
if isinstance(data_point, GenericEvent):
|
|
984
984
|
del self._generic_events[data_point.dpk]
|
|
985
|
-
data_point.
|
|
985
|
+
data_point.emit_device_removed_event()
|
|
986
986
|
|
|
987
987
|
def remove(self) -> None:
|
|
988
988
|
"""Remove data points from collections and central."""
|
aiohomematic/model/event.py
CHANGED
|
@@ -8,7 +8,7 @@ button presses, device errors, and impulse notifications to applications.
|
|
|
8
8
|
|
|
9
9
|
Included classes:
|
|
10
10
|
- GenericEvent: Base event that integrates with the common data point API
|
|
11
|
-
(category, usage, names/paths, callbacks) and provides
|
|
11
|
+
(category, usage, names/paths, callbacks) and provides emit_event handling.
|
|
12
12
|
- ClickEvent: Represents key press events (EventType.KEYPRESS).
|
|
13
13
|
- DeviceErrorEvent: Represents device error signaling with special value change
|
|
14
14
|
semantics before emitting an event (EventType.DEVICE_ERROR).
|
|
@@ -102,14 +102,14 @@ class GenericEvent(BaseParameterDataPoint[Any, Any]):
|
|
|
102
102
|
async def event(self, *, value: Any, received_at: datetime) -> None:
|
|
103
103
|
"""Handle event for which this handler has subscribed."""
|
|
104
104
|
if self.event_type in DATA_POINT_EVENTS:
|
|
105
|
-
self.
|
|
105
|
+
self.emit_data_point_updated_event()
|
|
106
106
|
self._set_modified_at(modified_at=received_at)
|
|
107
|
-
self.
|
|
107
|
+
self.emit_event(value=value)
|
|
108
108
|
|
|
109
109
|
@loop_check
|
|
110
|
-
def
|
|
111
|
-
"""Do what is needed to
|
|
112
|
-
self._central.
|
|
110
|
+
def emit_event(self, *, value: Any) -> None:
|
|
111
|
+
"""Do what is needed to emit an event."""
|
|
112
|
+
self._central.emit_homematic_callback(event_type=self.event_type, event_data=self.get_event_data(value=value))
|
|
113
113
|
|
|
114
114
|
def _get_data_point_name(self) -> DataPointNameData:
|
|
115
115
|
"""Create the name for the data_point."""
|
|
@@ -149,7 +149,7 @@ class DeviceErrorEvent(GenericEvent):
|
|
|
149
149
|
isinstance(new_value, int)
|
|
150
150
|
and ((old_value is None and new_value > 0) or (isinstance(old_value, int) and old_value != new_value))
|
|
151
151
|
):
|
|
152
|
-
self.
|
|
152
|
+
self.emit_event(value=new_value)
|
|
153
153
|
|
|
154
154
|
|
|
155
155
|
class ImpulseEvent(GenericEvent):
|
|
@@ -83,8 +83,8 @@ class GenericDataPoint[ParameterT: GenericParameterType, InputParameterT: Generi
|
|
|
83
83
|
Parameter.UN_REACH,
|
|
84
84
|
Parameter.STICKY_UN_REACH,
|
|
85
85
|
):
|
|
86
|
-
self._device.
|
|
87
|
-
self._central.
|
|
86
|
+
self._device.emit_device_updated_callback()
|
|
87
|
+
self._central.emit_homematic_callback(
|
|
88
88
|
event_type=EventType.DEVICE_AVAILABILITY,
|
|
89
89
|
event_data=self.get_event_data(value=new_value),
|
|
90
90
|
)
|
|
@@ -204,7 +204,7 @@ class Hub:
|
|
|
204
204
|
new_programs.append(program_dp.switch)
|
|
205
205
|
|
|
206
206
|
if new_programs:
|
|
207
|
-
self._central.
|
|
207
|
+
self._central.emit_backend_system_callback(
|
|
208
208
|
system_event=BackendSystemEvent.HUB_REFRESHED,
|
|
209
209
|
new_data_points=_get_new_hub_data_points(data_points=new_programs),
|
|
210
210
|
)
|
|
@@ -240,7 +240,7 @@ class Hub:
|
|
|
240
240
|
new_sysvars.append(self._create_system_variable(data=sysvar))
|
|
241
241
|
|
|
242
242
|
if new_sysvars:
|
|
243
|
-
self._central.
|
|
243
|
+
self._central.emit_backend_system_callback(
|
|
244
244
|
system_event=BackendSystemEvent.HUB_REFRESHED,
|
|
245
245
|
new_data_points=_get_new_hub_data_points(data_points=new_sysvars),
|
|
246
246
|
)
|
|
@@ -230,7 +230,7 @@ class GenericSysvarDataPoint(GenericHubDataPoint):
|
|
|
230
230
|
self._previous_value = old_value
|
|
231
231
|
self._current_value = new_value
|
|
232
232
|
self._state_uncertain = False
|
|
233
|
-
self.
|
|
233
|
+
self.emit_data_point_updated_event()
|
|
234
234
|
|
|
235
235
|
def _write_temporary_value(self, *, value: Any, write_at: datetime) -> None:
|
|
236
236
|
"""Update the temporary value of the data_point."""
|
|
@@ -243,7 +243,7 @@ class GenericSysvarDataPoint(GenericHubDataPoint):
|
|
|
243
243
|
self._set_temporary_modified_at(modified_at=write_at)
|
|
244
244
|
self._temporary_value = temp_value
|
|
245
245
|
self._state_uncertain = True
|
|
246
|
-
self.
|
|
246
|
+
self.emit_data_point_updated_event()
|
|
247
247
|
|
|
248
248
|
def _convert_value(self, *, old_value: Any, new_value: Any) -> Any:
|
|
249
249
|
"""Convert to value to SYSVAR_TYPE."""
|
|
@@ -333,7 +333,7 @@ class GenericProgramDataPoint(GenericHubDataPoint):
|
|
|
333
333
|
self._last_execute_time = data.last_execute_time
|
|
334
334
|
do_update = True
|
|
335
335
|
if do_update:
|
|
336
|
-
self.
|
|
336
|
+
self.emit_data_point_updated_event()
|
|
337
337
|
|
|
338
338
|
def _get_path_data(self) -> PathData:
|
|
339
339
|
"""Return the path data of the data_point."""
|
|
@@ -117,7 +117,7 @@ class _GenericProperty[GETTER, SETTER](property):
|
|
|
117
117
|
kind=self.kind,
|
|
118
118
|
cached=self._cached,
|
|
119
119
|
log_context=self.log_context,
|
|
120
|
-
)
|
|
120
|
+
)
|
|
121
121
|
|
|
122
122
|
def setter(self, fset: Callable[[Any, SETTER], None], /) -> _GenericProperty:
|
|
123
123
|
"""Return generic setter."""
|
|
@@ -155,7 +155,7 @@ class _GenericProperty[GETTER, SETTER](property):
|
|
|
155
155
|
return cast(GETTER, self)
|
|
156
156
|
|
|
157
157
|
if (fget := self.fget) is None:
|
|
158
|
-
raise AttributeError("unreadable attribute")
|
|
158
|
+
raise AttributeError("unreadable attribute")
|
|
159
159
|
|
|
160
160
|
if not self._cached:
|
|
161
161
|
return fget(instance)
|
|
@@ -194,7 +194,7 @@ class _GenericProperty[GETTER, SETTER](property):
|
|
|
194
194
|
delattr(instance, self._cache_attr)
|
|
195
195
|
|
|
196
196
|
if self.fset is None:
|
|
197
|
-
raise AttributeError("can't set attribute")
|
|
197
|
+
raise AttributeError("can't set attribute")
|
|
198
198
|
self.fset(instance, value)
|
|
199
199
|
|
|
200
200
|
def __delete__(self, instance: Any, /) -> None:
|
|
@@ -210,7 +210,7 @@ class _GenericProperty[GETTER, SETTER](property):
|
|
|
210
210
|
delattr(instance, self._cache_attr)
|
|
211
211
|
|
|
212
212
|
if self.fdel is None:
|
|
213
|
-
raise AttributeError("can't delete attribute")
|
|
213
|
+
raise AttributeError("can't delete attribute")
|
|
214
214
|
self.fdel(instance)
|
|
215
215
|
|
|
216
216
|
|
aiohomematic/store/dynamic.py
CHANGED
|
@@ -416,7 +416,7 @@ class PingPongCache:
|
|
|
416
416
|
# but always emit when crossing the high threshold.
|
|
417
417
|
count = self._pending_pong_count
|
|
418
418
|
if (count > self._allowed_delta) or (count % 2 == 0):
|
|
419
|
-
self.
|
|
419
|
+
self._check_and_emit_pong_event(event_type=InterfaceEventType.PENDING_PONG)
|
|
420
420
|
_LOGGER.debug(
|
|
421
421
|
"PING PONG CACHE: Increase pending PING count: %s - %i for ts: %s",
|
|
422
422
|
self._interface_id,
|
|
@@ -430,7 +430,7 @@ class PingPongCache:
|
|
|
430
430
|
self._pending_pongs.remove(pong_ts)
|
|
431
431
|
self._cleanup_pending_pongs()
|
|
432
432
|
count = self._pending_pong_count
|
|
433
|
-
self.
|
|
433
|
+
self._check_and_emit_pong_event(event_type=InterfaceEventType.PENDING_PONG)
|
|
434
434
|
_LOGGER.debug(
|
|
435
435
|
"PING PONG CACHE: Reduce pending PING count: %s - %i for ts: %s",
|
|
436
436
|
self._interface_id,
|
|
@@ -441,7 +441,7 @@ class PingPongCache:
|
|
|
441
441
|
self._unknown_pongs.add(pong_ts)
|
|
442
442
|
self._cleanup_unknown_pongs()
|
|
443
443
|
count = self._unknown_pong_count
|
|
444
|
-
self.
|
|
444
|
+
self._check_and_emit_pong_event(event_type=InterfaceEventType.UNKNOWN_PONG)
|
|
445
445
|
_LOGGER.debug(
|
|
446
446
|
"PING PONG CACHE: Increase unknown PONG count: %s - %i for ts: %s",
|
|
447
447
|
self._interface_id,
|
|
@@ -477,11 +477,12 @@ class PingPongCache:
|
|
|
477
477
|
up_pong_ts,
|
|
478
478
|
)
|
|
479
479
|
|
|
480
|
-
def
|
|
481
|
-
"""
|
|
480
|
+
def _check_and_emit_pong_event(self, *, event_type: InterfaceEventType) -> None:
|
|
481
|
+
"""Emit an event about the pong status."""
|
|
482
482
|
|
|
483
|
-
def
|
|
484
|
-
|
|
483
|
+
def _emit_event(mismatch_count: int) -> None:
|
|
484
|
+
"""Emit event."""
|
|
485
|
+
self._central.emit_homematic_callback(
|
|
485
486
|
event_type=EventType.INTERFACE,
|
|
486
487
|
event_data=cast(
|
|
487
488
|
dict[EventKey, Any],
|
|
@@ -498,13 +499,19 @@ class PingPongCache:
|
|
|
498
499
|
),
|
|
499
500
|
),
|
|
500
501
|
)
|
|
502
|
+
_LOGGER.debug(
|
|
503
|
+
"PING PONG CACHE: Emitting event %s for %s with mismatch_count: %i with %i acceptable",
|
|
504
|
+
event_type,
|
|
505
|
+
self._interface_id,
|
|
506
|
+
mismatch_count,
|
|
507
|
+
self._allowed_delta,
|
|
508
|
+
)
|
|
501
509
|
|
|
502
510
|
if event_type == InterfaceEventType.PENDING_PONG:
|
|
503
511
|
self._cleanup_pending_pongs()
|
|
504
|
-
count
|
|
505
|
-
if self._pending_pong_count > self._allowed_delta:
|
|
512
|
+
if (count := self._pending_pong_count) > self._allowed_delta:
|
|
506
513
|
# Emit interface event to inform subscribers about high pending pong count.
|
|
507
|
-
|
|
514
|
+
_emit_event(mismatch_count=count)
|
|
508
515
|
if self._pending_pong_logged is False:
|
|
509
516
|
_LOGGER.warning(
|
|
510
517
|
"Pending PONG mismatch: There is a mismatch between send ping events and received pong events for instance %s. "
|
|
@@ -519,16 +526,16 @@ class PingPongCache:
|
|
|
519
526
|
# - If we previously logged a high state, emit a reset event (mismatch=0) exactly once.
|
|
520
527
|
# - Otherwise, throttle emission to every second ping (even counts > 0) to avoid spamming.
|
|
521
528
|
elif self._pending_pong_logged:
|
|
522
|
-
|
|
529
|
+
_emit_event(mismatch_count=0)
|
|
523
530
|
self._pending_pong_logged = False
|
|
524
531
|
elif count > 0 and count % 2 == 0:
|
|
525
|
-
|
|
532
|
+
_emit_event(mismatch_count=count)
|
|
526
533
|
elif event_type == InterfaceEventType.UNKNOWN_PONG:
|
|
527
534
|
self._cleanup_unknown_pongs()
|
|
528
535
|
count = self._unknown_pong_count
|
|
529
536
|
if self._unknown_pong_count > self._allowed_delta:
|
|
530
537
|
# Emit interface event to inform subscribers about high unknown pong count.
|
|
531
|
-
|
|
538
|
+
_emit_event(mismatch_count=count)
|
|
532
539
|
if self._unknown_pong_logged is False:
|
|
533
540
|
_LOGGER.warning(
|
|
534
541
|
"Unknown PONG Mismatch: Your instance %s receives PONG events, that it hasn't send. "
|
aiohomematic/store/visibility.py
CHANGED
|
@@ -423,8 +423,7 @@ class ParameterVisibilityCache:
|
|
|
423
423
|
self, *, model_l: TModelName, mapping: Mapping[str, object], cache_dict: dict[TModelName, str | None]
|
|
424
424
|
) -> str | None:
|
|
425
425
|
"""Resolve and memoize the first key in mapping that prefixes model_l."""
|
|
426
|
-
dt_short_key
|
|
427
|
-
if dt_short_key is None and model_l not in cache_dict:
|
|
426
|
+
if (dt_short_key := cache_dict.get(model_l)) is None and model_l not in cache_dict:
|
|
428
427
|
dt_short_key = next((k for k in mapping if model_l.startswith(k)), None)
|
|
429
428
|
cache_dict[model_l] = dt_short_key
|
|
430
429
|
return dt_short_key
|
|
@@ -486,7 +485,7 @@ class ParameterVisibilityCache:
|
|
|
486
485
|
return False
|
|
487
486
|
|
|
488
487
|
if parameter in self._custom_un_ignore_complex[model_l][channel.no][ParamsetKey.MASTER]:
|
|
489
|
-
return False
|
|
488
|
+
return False
|
|
490
489
|
|
|
491
490
|
dt_short_key = self._resolve_prefix_key(
|
|
492
491
|
model_l=model_l,
|
|
@@ -545,7 +544,7 @@ class ParameterVisibilityCache:
|
|
|
545
544
|
for ml, cno in search_matrix:
|
|
546
545
|
if parameter in self._custom_un_ignore_complex[ml][cno][paramset_key]:
|
|
547
546
|
self._param_un_ignored_cache[cache_key] = True
|
|
548
|
-
return True
|
|
547
|
+
return True
|
|
549
548
|
|
|
550
549
|
# check if parameter is in _UN_IGNORE_PARAMETERS_BY_DEVICE
|
|
551
550
|
result = bool(
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: aiohomematic
|
|
3
|
-
Version: 2025.10.
|
|
3
|
+
Version: 2025.10.25
|
|
4
4
|
Summary: Homematic interface for Home Assistant running on Python 3.
|
|
5
5
|
Home-page: https://github.com/sukramj/aiohomematic
|
|
6
6
|
Author-email: SukramJ <sukramj@icloud.com>, Daniel Perna <danielperna84@gmail.com>
|
|
@@ -15,10 +15,16 @@ Classifier: Intended Audience :: End Users/Desktop
|
|
|
15
15
|
Classifier: Intended Audience :: Developers
|
|
16
16
|
Classifier: License :: OSI Approved :: MIT License
|
|
17
17
|
Classifier: Operating System :: OS Independent
|
|
18
|
+
Classifier: Programming Language :: Python
|
|
19
|
+
Classifier: Programming Language :: Python :: 3
|
|
20
|
+
Classifier: Programming Language :: Python :: 3 :: Only
|
|
18
21
|
Classifier: Programming Language :: Python :: 3.13
|
|
19
22
|
Classifier: Programming Language :: Python :: 3.14
|
|
23
|
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
24
|
+
Classifier: Framework :: AsyncIO
|
|
25
|
+
Classifier: Typing :: Typed
|
|
20
26
|
Classifier: Topic :: Home Automation
|
|
21
|
-
Requires-Python: >=3.13
|
|
27
|
+
Requires-Python: >=3.13
|
|
22
28
|
Description-Content-Type: text/markdown
|
|
23
29
|
License-File: LICENSE
|
|
24
30
|
Requires-Dist: aiohttp>=3.12.0
|
|
@@ -1,40 +1,40 @@
|
|
|
1
1
|
aiohomematic/__init__.py,sha256=Uo9CIoil0Arl3GwtgMZAwM8jhcgoBKcZEgj8cXYlswY,2258
|
|
2
2
|
aiohomematic/async_support.py,sha256=Fg6RLD7Irt1mTwXbLkfphJbfd7oU_Svhp23i3Bb4Q7k,8762
|
|
3
|
-
aiohomematic/const.py,sha256=
|
|
3
|
+
aiohomematic/const.py,sha256=qS6qjW8szpJYt1hudHJG7zokHp_PjoO1-u65Z5-TsXU,27482
|
|
4
4
|
aiohomematic/context.py,sha256=hGE-iPcPt21dY-1MZar-Hyh9YaKL-VS42xjrulIVyRQ,429
|
|
5
5
|
aiohomematic/converter.py,sha256=FiHU71M5RZ7N5FXJYh2CN14s63-PM-SHdb0cJ_CLx54,3602
|
|
6
6
|
aiohomematic/decorators.py,sha256=cSW0aF3PzrW_qW6H0sjRNH9eqO8ysqhXZDgJ2OJTZM4,11038
|
|
7
7
|
aiohomematic/exceptions.py,sha256=RLldRD4XY8iYuNYVdspCbbphGcKsximB7R5OL7cYKw0,5006
|
|
8
|
-
aiohomematic/hmcli.py,sha256=
|
|
9
|
-
aiohomematic/property_decorators.py,sha256=
|
|
8
|
+
aiohomematic/hmcli.py,sha256=E44gJYSRiP0HGNHRD3LwWiYpIHpiBvJTsKcavVFIS8g,5635
|
|
9
|
+
aiohomematic/property_decorators.py,sha256=v8O_6hW5plpD1-Cmhbb-6t_RfAl8wD4TskWt777t8rY,17059
|
|
10
10
|
aiohomematic/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
11
|
aiohomematic/support.py,sha256=F8jiuRgw3Dn7v2kPBvFTHFGI4nJzybd3eQIQEK4Y6XI,21056
|
|
12
12
|
aiohomematic/validator.py,sha256=qX5janicu4jLrAVzKoyWgXe1XU4EOjk5-QhNFL4awTQ,3541
|
|
13
|
-
aiohomematic/central/__init__.py,sha256=
|
|
14
|
-
aiohomematic/central/decorators.py,sha256=
|
|
13
|
+
aiohomematic/central/__init__.py,sha256=UmPIumAjMLCfkbaMic_jvdH2V1feP0uqcsdS9ji14jo,96600
|
|
14
|
+
aiohomematic/central/decorators.py,sha256=ja0d5MujL-Kfv399XIPRgiUxeyf67HwNWAuD7nmFJzg,6902
|
|
15
15
|
aiohomematic/central/rpc_server.py,sha256=EhvBy8oMjBTR8MvH5QXo3lvlsCNJrvu6B85_CAg6sG8,10742
|
|
16
|
-
aiohomematic/client/__init__.py,sha256=
|
|
16
|
+
aiohomematic/client/__init__.py,sha256=TYeVOGNVsGSCoaL1v9z1F8Sfx-Aq7zzz-yComAnnh8E,74365
|
|
17
17
|
aiohomematic/client/_rpc_errors.py,sha256=IaYjX60mpBJ43gDCJjuUSVraamy5jXHTRjOnutK4azs,2962
|
|
18
|
-
aiohomematic/client/json_rpc.py,sha256=
|
|
19
|
-
aiohomematic/client/rpc_proxy.py,sha256=
|
|
18
|
+
aiohomematic/client/json_rpc.py,sha256=82uVldmvNEB_F9BA_2oA55UmThRJRW2wRTPbcJiBvQg,51580
|
|
19
|
+
aiohomematic/client/rpc_proxy.py,sha256=Am-MTLCVtQDPm2AOjVy1Bb4uulwdV2J6172SQRjdbso,11844
|
|
20
20
|
aiohomematic/model/__init__.py,sha256=gUYa8ROWSbXjZTWUTmINZ1bbYAxGkVpA-onxaJN2Iso,5436
|
|
21
|
-
aiohomematic/model/data_point.py,sha256=
|
|
22
|
-
aiohomematic/model/device.py,sha256=
|
|
23
|
-
aiohomematic/model/event.py,sha256=
|
|
21
|
+
aiohomematic/model/data_point.py,sha256=PqxMmKDqpUpkPO48WxNC5xeC9dPso5u8lQcKxwjusnw,41578
|
|
22
|
+
aiohomematic/model/device.py,sha256=gK-4truXJMNJcU6vCIW7xV2rYHsrRCLtyM89xS2-g5I,52884
|
|
23
|
+
aiohomematic/model/event.py,sha256=iyRW5569nttjGUa9iBeLHACviwVims2HiXrQFEz259M,6849
|
|
24
24
|
aiohomematic/model/support.py,sha256=MQJVuE1C043VRs7t7Ld7kZ1quMmIDNXjzwNKW1LQtEs,19622
|
|
25
25
|
aiohomematic/model/update.py,sha256=R3uUA61m-UQNNGkRod3vES66AgkPKay_CPyyrd-nqVI,5140
|
|
26
26
|
aiohomematic/model/calculated/__init__.py,sha256=JNtxK4-XZeyR6MxfKVPdcF6ezQliQYTWEDoeOChumaE,2966
|
|
27
27
|
aiohomematic/model/calculated/climate.py,sha256=rm9b4rCrmsZAA5_dzP6YRtahdveI97581_EnC4utqpg,10499
|
|
28
|
-
aiohomematic/model/calculated/data_point.py,sha256=
|
|
28
|
+
aiohomematic/model/calculated/data_point.py,sha256=1LluGPB4YHfyC7gAXEBYi2Hqg9SP8Z_WCz3HLGJxNe0,11506
|
|
29
29
|
aiohomematic/model/calculated/operating_voltage_level.py,sha256=99A8HvahVS4IxpgK1dsgQXHfeubU7JI2c0ObagbnSNQ,13505
|
|
30
30
|
aiohomematic/model/calculated/support.py,sha256=GBD35_OR3TEAWo5ADeH_gk2Ebw9pHOtOnOS7umCkXB0,7989
|
|
31
31
|
aiohomematic/model/custom/__init__.py,sha256=JxJXyr2CgKlj-jc1xQ14lbMT76vvswfLUecwj8RJCXA,6073
|
|
32
32
|
aiohomematic/model/custom/climate.py,sha256=OnkZKxJKInrP52Tqu_hPcDDZbyL0wTMaIjWuBJ3th_k,57292
|
|
33
33
|
aiohomematic/model/custom/const.py,sha256=s4iqhwvt8x41h4-CtMCyXwryGHuBNbhBrcJ5zGVRFJU,4939
|
|
34
34
|
aiohomematic/model/custom/cover.py,sha256=KQzLEoPkKgZ2oi2oblUrGReQnT_0WAuseWAxu_xH5_Y,29035
|
|
35
|
-
aiohomematic/model/custom/data_point.py,sha256=
|
|
36
|
-
aiohomematic/model/custom/definition.py,sha256=
|
|
37
|
-
aiohomematic/model/custom/light.py,sha256=
|
|
35
|
+
aiohomematic/model/custom/data_point.py,sha256=c0R7r6uAAZjxW01eUmmeHEe9doKtZYwZfUJbspMLjEM,14102
|
|
36
|
+
aiohomematic/model/custom/definition.py,sha256=lZX7nSFxbXL0SHCZQFpLBSs6NbVx8fNdDV6tPCHsy00,35759
|
|
37
|
+
aiohomematic/model/custom/light.py,sha256=_WDRxjEG8pjFKEjs_8pFiIyCykPRdSfi7SULk7srccA,44426
|
|
38
38
|
aiohomematic/model/custom/lock.py,sha256=iJY8jiJA_HRynpbeACEQ_kzwiXOfAaT2IcF1FbXc0xY,11974
|
|
39
39
|
aiohomematic/model/custom/siren.py,sha256=iaCJuRWiQH-r_ckhPZGRfmzxI8PddBBA5z_Jo78f1qI,9763
|
|
40
40
|
aiohomematic/model/custom/support.py,sha256=FPhC3kp7blJu4jWHWAmXUXv4JWJu5nhQkrr8dyYxFas,1431
|
|
@@ -44,16 +44,16 @@ aiohomematic/model/generic/__init__.py,sha256=-4yben7G-QKBWMWpusrLkGo65a4IXIPYoi
|
|
|
44
44
|
aiohomematic/model/generic/action.py,sha256=VDFCAkX0TkXV4xh3ya0AUIKnjOKF_rEobI6qdW5mAOQ,975
|
|
45
45
|
aiohomematic/model/generic/binary_sensor.py,sha256=yABUafcBmPHLNAkdj_malYroBZc5UaunYLhmRtS2nGU,865
|
|
46
46
|
aiohomematic/model/generic/button.py,sha256=3ahmj1BJZfdh6h6YV-urC8UwcKRp94QPOQtsF0fBI3Y,718
|
|
47
|
-
aiohomematic/model/generic/data_point.py,sha256=
|
|
47
|
+
aiohomematic/model/generic/data_point.py,sha256=eUOB8H-7PPKcjV8BKVPYlme519ppI1-odWslxQGcI-k,6062
|
|
48
48
|
aiohomematic/model/generic/number.py,sha256=WNu4EB_npDZ8f7RrCrab1g9KzRtG8dziUvNQApEKXYk,2656
|
|
49
49
|
aiohomematic/model/generic/select.py,sha256=-CXHdvzwWGp74t5UVdIWOFhlcW7bf0hcuGriBl7EWXk,1516
|
|
50
50
|
aiohomematic/model/generic/sensor.py,sha256=ueqXw9bkFFJeD_mer6idpMTGFg6rweDEUNOgqsF22NA,2240
|
|
51
51
|
aiohomematic/model/generic/switch.py,sha256=Y5Bml60QoHs14mfsMvhsk7S7I-2QSBoB7KWHMCJak3Q,1820
|
|
52
52
|
aiohomematic/model/generic/text.py,sha256=AOMZCHgPO2v4lza-OI0J6WNsQcUOvab9HA5sRN8NQZQ,832
|
|
53
|
-
aiohomematic/model/hub/__init__.py,sha256=
|
|
53
|
+
aiohomematic/model/hub/__init__.py,sha256=ml9uhexfzQ9LiQfob1yMf34mVbmi12-rMJweWYvWZlc,13503
|
|
54
54
|
aiohomematic/model/hub/binary_sensor.py,sha256=yqBXEzVS7zMoY4N7agwQ70ypAgIRkfUeBNCInsQV_kA,730
|
|
55
55
|
aiohomematic/model/hub/button.py,sha256=fEtBIxKF32kyacaCgsDLohw1exDpFtE7SCwEYgBAlOY,868
|
|
56
|
-
aiohomematic/model/hub/data_point.py,sha256=
|
|
56
|
+
aiohomematic/model/hub/data_point.py,sha256=FQrBi9BdwIMJH7ul1bSaNEspsBkC5ZouSQLd3Y33ROs,10622
|
|
57
57
|
aiohomematic/model/hub/number.py,sha256=zS5ft2wkUUJ0NkUlhkWvbXCeMcI9v-gH36NVCdrNtcU,1215
|
|
58
58
|
aiohomematic/model/hub/select.py,sha256=ENF1TMk5jrNbk7lspNTfWIROMifjJMrllnu6htM1C2E,1652
|
|
59
59
|
aiohomematic/model/hub/sensor.py,sha256=F--BHrgziizBOkXTupwV0ZZlzNnjsWrL9qnkHQv-lEE,1170
|
|
@@ -66,11 +66,12 @@ aiohomematic/rega_scripts/get_system_variable_descriptions.fn,sha256=UKXvC0_5lSA
|
|
|
66
66
|
aiohomematic/rega_scripts/set_program_state.fn,sha256=0bnv7lUj8FMjDZBz325tDVP61m04cHjVj4kIOnUUgpY,279
|
|
67
67
|
aiohomematic/rega_scripts/set_system_variable.fn,sha256=sTmr7vkPTPnPkor5cnLKlDvfsYRbGO1iq2z_2pMXq5E,383
|
|
68
68
|
aiohomematic/store/__init__.py,sha256=PHwF_tw_zL20ODwLywHgpOLWrghQo_BMZzeiQSXN1Fc,1081
|
|
69
|
-
aiohomematic/store/dynamic.py,sha256=
|
|
69
|
+
aiohomematic/store/dynamic.py,sha256=3i8oajVhfTeckAuOhwTyIxrd-eb1fl5VxEdK3NPfisw,22323
|
|
70
70
|
aiohomematic/store/persistent.py,sha256=SBL8AhqUzpoPtJ50GkLYHwvRJS52fBWqNPjgvykxbY8,40233
|
|
71
|
-
aiohomematic/store/visibility.py,sha256=
|
|
72
|
-
aiohomematic-2025.10.
|
|
73
|
-
aiohomematic-2025.10.
|
|
74
|
-
aiohomematic-2025.10.
|
|
75
|
-
aiohomematic-2025.10.
|
|
76
|
-
aiohomematic-2025.10.
|
|
71
|
+
aiohomematic/store/visibility.py,sha256=ZaqxN_FIsvpYZvCZWu0YSlWLtrFmAfUYILBYIXxPJZs,31622
|
|
72
|
+
aiohomematic-2025.10.25.dist-info/licenses/LICENSE,sha256=q-B0xpREuZuvKsmk3_iyVZqvZ-vJcWmzMZpeAd0RqtQ,1083
|
|
73
|
+
aiohomematic-2025.10.25.dist-info/METADATA,sha256=I3qrHxmZ2K1HTKCLARxj444R9y0PrOuhhmJ4UavC8vM,7950
|
|
74
|
+
aiohomematic-2025.10.25.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
75
|
+
aiohomematic-2025.10.25.dist-info/entry_points.txt,sha256=tzk3wIE-hXNhLEiefCCDhIiRT7DYY9MePAwGw-kPmWI,57
|
|
76
|
+
aiohomematic-2025.10.25.dist-info/top_level.txt,sha256=iGUvt1N-E72vKRq7Anpp62HwkQngStrUK0JfL1zj1TE,13
|
|
77
|
+
aiohomematic-2025.10.25.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|