aiohomematic 2025.10.9__py3-none-any.whl → 2025.10.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of aiohomematic might be problematic. Click here for more details.

@@ -27,7 +27,7 @@ Key behaviors
27
27
  - Save/load/clear operations are synchronized via a semaphore and executed via
28
28
  the CentralUnit looper to avoid blocking the event loop.
29
29
 
30
- Helper functions are provided to build content paths and filenames and to
30
+ Helper functions are provided to build content paths and file names and to
31
31
  optionally clean up stale content directories.
32
32
  """
33
33
 
@@ -43,6 +43,7 @@ import json
43
43
  import logging
44
44
  import os
45
45
  from typing import Any, Final, Self
46
+ import zipfile
46
47
 
47
48
  import orjson
48
49
  from slugify import slugify
@@ -89,7 +90,7 @@ class BasePersistentFile(ABC):
89
90
  "_persistent_content",
90
91
  "_save_load_semaphore",
91
92
  "_sub_directory",
92
- "_use_ts_in_filenames",
93
+ "_use_ts_in_file_names",
93
94
  "last_hash_saved",
94
95
  "last_save_triggered",
95
96
  )
@@ -123,27 +124,27 @@ class BasePersistentFile(ABC):
123
124
  """Return if the data has changed."""
124
125
  return self.content_hash != self.last_hash_saved
125
126
 
126
- def _get_filename(
127
+ def _get_file_name(
127
128
  self,
128
129
  *,
129
- use_ts_in_filename: bool = False,
130
+ use_ts_in_file_name: bool = False,
130
131
  ) -> str:
131
132
  """Return the file name."""
132
- return _get_filename(
133
+ return _get_file_name(
133
134
  central_name=self._central.name,
134
135
  file_name=self._file_postfix,
135
- ts=datetime.now() if use_ts_in_filename else None,
136
+ ts=datetime.now() if use_ts_in_file_name else None,
136
137
  )
137
138
 
138
139
  def _get_file_path(
139
140
  self,
140
141
  *,
141
- use_ts_in_filename: bool = False,
142
+ use_ts_in_file_name: bool = False,
142
143
  ) -> str:
143
144
  """Return the full file path."""
144
- return os.path.join(self._directory, self._get_filename(use_ts_in_filename=use_ts_in_filename))
145
+ return os.path.join(self._directory, self._get_file_name(use_ts_in_file_name=use_ts_in_file_name))
145
146
 
146
- async def save(self, *, randomize_output: bool = False, use_ts_in_filename: bool = False) -> DataOperationResult:
147
+ async def save(self, *, randomize_output: bool = False, use_ts_in_file_name: bool = False) -> DataOperationResult:
147
148
  """Save current data to disk."""
148
149
  if not self._should_save:
149
150
  return DataOperationResult.NO_SAVE
@@ -154,7 +155,7 @@ class BasePersistentFile(ABC):
154
155
  def _perform_save() -> DataOperationResult:
155
156
  try:
156
157
  with open(
157
- file=self._get_file_path(use_ts_in_filename=use_ts_in_filename),
158
+ file=self._get_file_path(use_ts_in_file_name=use_ts_in_file_name),
158
159
  mode="wb",
159
160
  ) as file_pointer:
160
161
  file_pointer.write(
@@ -173,7 +174,7 @@ class BasePersistentFile(ABC):
173
174
 
174
175
  async with self._save_load_semaphore:
175
176
  return await self._central.looper.async_add_executor_job(
176
- _perform_save, name=f"save-persistent-content-{self._get_filename()}"
177
+ _perform_save, name=f"save-persistent-content-{self._get_file_name()}"
177
178
  )
178
179
 
179
180
  def _manipulate_content(self, *, content: bytes, randomize_output: bool = False) -> bytes:
@@ -197,27 +198,45 @@ class BasePersistentFile(ABC):
197
198
  and self.content_hash != self.last_hash_saved
198
199
  )
199
200
 
200
- async def load(self) -> DataOperationResult:
201
- """Load data from disk into the dictionary."""
202
- if not check_or_create_directory(directory=self._directory) or not os.path.exists(self._get_file_path()):
201
+ async def load(self, *, file_path: str | None = None) -> DataOperationResult:
202
+ """
203
+ Load data from disk into the dictionary.
204
+
205
+ Supports plain JSON files and ZIP archives containing a JSON file.
206
+ When a ZIP archive is provided, the first JSON member inside the archive
207
+ will be loaded.
208
+ """
209
+ if not file_path and not check_or_create_directory(directory=self._directory):
210
+ return DataOperationResult.NO_LOAD
211
+
212
+ if (file_path := file_path or self._get_file_path()) and not os.path.exists(file_path):
203
213
  return DataOperationResult.NO_LOAD
204
214
 
205
215
  def _perform_load() -> DataOperationResult:
206
- with open(file=self._get_file_path(), encoding=UTF_8) as file_pointer:
207
- try:
208
- data = json.loads(file_pointer.read(), object_hook=regular_to_default_dict_hook)
209
- if (converted_hash := hash_sha256(value=data)) == self.last_hash_saved:
210
- return DataOperationResult.NO_LOAD
211
- self._persistent_content.clear()
212
- self._persistent_content.update(data)
213
- self.last_hash_saved = converted_hash
214
- except json.JSONDecodeError:
215
- return DataOperationResult.LOAD_FAIL
216
+ try:
217
+ if zipfile.is_zipfile(file_path):
218
+ with zipfile.ZipFile(file_path, mode="r") as zf:
219
+ # Prefer json files; pick the first .json entry if available
220
+ if not (json_members := [n for n in zf.namelist() if n.lower().endswith(".json")]):
221
+ return DataOperationResult.LOAD_FAIL
222
+ raw = zf.read(json_members[0]).decode(UTF_8)
223
+ data = json.loads(raw, object_hook=regular_to_default_dict_hook)
224
+ else:
225
+ with open(file=file_path, encoding=UTF_8) as file_pointer:
226
+ data = json.loads(file_pointer.read(), object_hook=regular_to_default_dict_hook)
227
+
228
+ if (converted_hash := hash_sha256(value=data)) == self.last_hash_saved:
229
+ return DataOperationResult.NO_LOAD
230
+ self._persistent_content.clear()
231
+ self._persistent_content.update(data)
232
+ self.last_hash_saved = converted_hash
233
+ except (json.JSONDecodeError, zipfile.BadZipFile, UnicodeDecodeError, OSError):
234
+ return DataOperationResult.LOAD_FAIL
216
235
  return DataOperationResult.LOAD_SUCCESS
217
236
 
218
237
  async with self._save_load_semaphore:
219
238
  return await self._central.looper.async_add_executor_job(
220
- _perform_load, name=f"load-persistent-content-{self._get_filename()}"
239
+ _perform_load, name=f"load-persistent-content-{self._get_file_name()}"
221
240
  )
222
241
 
223
242
  async def clear(self) -> None:
@@ -358,12 +377,12 @@ class DeviceDescriptionCache(BasePersistentFile):
358
377
  addr_set.add(device_address)
359
378
  addr_set.add(address)
360
379
 
361
- async def load(self) -> DataOperationResult:
380
+ async def load(self, *, file_path: str | None = None) -> DataOperationResult:
362
381
  """Load device data from disk into _device_description_cache."""
363
382
  if not self._central.config.use_caches:
364
383
  _LOGGER.debug("load: not caching paramset descriptions for %s", self._central.name)
365
384
  return DataOperationResult.NO_LOAD
366
- if (result := await super().load()) == DataOperationResult.LOAD_SUCCESS:
385
+ if (result := await super().load(file_path=file_path)) == DataOperationResult.LOAD_SUCCESS:
367
386
  for (
368
387
  interface_id,
369
388
  device_descriptions,
@@ -495,12 +514,12 @@ class ParamsetDescriptionCache(BasePersistentFile):
495
514
  for parameter in paramset:
496
515
  cache.setdefault((device_address, parameter), set()).add(channel_no)
497
516
 
498
- async def load(self) -> DataOperationResult:
517
+ async def load(self, *, file_path: str | None = None) -> DataOperationResult:
499
518
  """Load paramset descriptions from disk into paramset cache."""
500
519
  if not self._central.config.use_caches:
501
520
  _LOGGER.debug("load: not caching device descriptions for %s", self._central.name)
502
521
  return DataOperationResult.NO_LOAD
503
- if (result := await super().load()) == DataOperationResult.LOAD_SUCCESS:
522
+ if (result := await super().load(file_path=file_path)) == DataOperationResult.LOAD_SUCCESS:
504
523
  self._init_address_parameter_list()
505
524
  return result
506
525
 
@@ -511,17 +530,16 @@ class SessionRecorder(BasePersistentFile):
511
530
 
512
531
  Nested cache with TTL support.
513
532
  Structure:
514
- store[rpc_type][method][params] = (ts: datetime, response: Any, ttl_s: float)
533
+ store[rpc_type][method][params][ts: datetime] = response: Any
515
534
 
516
- - Each entry expires after its TTL (global default or per-entry override).
517
535
  - Expiration is lazy (checked on access/update).
518
536
  - Optional refresh_on_get extends TTL when reading.
519
537
  """
520
538
 
521
539
  __slots__ = (
522
540
  "_active",
523
- "_default_ttl",
524
- "_is_delayed",
541
+ "_ttl",
542
+ "_is_recording",
525
543
  "_refresh_on_get",
526
544
  "_store",
527
545
  )
@@ -533,20 +551,20 @@ class SessionRecorder(BasePersistentFile):
533
551
  self,
534
552
  *,
535
553
  central: hmcu.CentralUnit,
536
- default_ttl_seconds: float,
537
554
  active: bool,
555
+ ttl_seconds: float,
538
556
  refresh_on_get: bool = False,
539
557
  ):
540
558
  """Init the cache."""
541
559
  self._active = active
542
- if default_ttl_seconds <= 0:
560
+ if ttl_seconds < 0:
543
561
  raise ValueError("default_ttl_seconds must be positive")
544
- self._default_ttl: Final = float(default_ttl_seconds)
545
- self._is_delayed: bool = False
562
+ self._ttl: Final = float(ttl_seconds)
563
+ self._is_recording: bool = False
546
564
  self._refresh_on_get: Final = refresh_on_get
547
- # Use nested defaultdicts: rpc_type -> method -> params -> ts(int) -> (response, ttl_s)
565
+ # Use nested defaultdicts: rpc_type -> method -> params -> ts(int) -> response
548
566
  # Annotate as defaultdict to match the actual type and satisfy mypy.
549
- self._store: dict[str, dict[str, dict[str, dict[int, tuple[Any, float]]]]] = defaultdict(
567
+ self._store: dict[str, dict[str, dict[str, dict[int, Any]]]] = defaultdict(
550
568
  lambda: defaultdict(lambda: defaultdict(dict))
551
569
  )
552
570
  super().__init__(
@@ -556,6 +574,13 @@ class SessionRecorder(BasePersistentFile):
556
574
 
557
575
  # ---------- internal helpers ----------
558
576
 
577
+ def _is_expired(self, *, ts: int, now: int | None = None) -> bool:
578
+ """Check whether an entry has expired given epoch seconds."""
579
+ if self._ttl == 0:
580
+ return False
581
+ now = now if now is not None else _now()
582
+ return (now - ts) > self._ttl
583
+
559
584
  def _purge_expired_at(
560
585
  self,
561
586
  *,
@@ -563,7 +588,8 @@ class SessionRecorder(BasePersistentFile):
563
588
  method: str,
564
589
  ) -> None:
565
590
  """Remove expired entries for a given (rpc_type, method) bucket without creating new ones."""
566
-
591
+ if self._ttl == 0:
592
+ return
567
593
  if not (bucket_by_method := self._store.get(rpc_type)):
568
594
  return
569
595
  if not (bucket_by_parameter := bucket_by_method.get(method)):
@@ -571,9 +597,7 @@ class SessionRecorder(BasePersistentFile):
571
597
  now = _now()
572
598
  empty_params: list[str] = []
573
599
  for p, bucket_by_ts in bucket_by_parameter.items():
574
- expired_ts = [
575
- ts for ts, (_r, ttl_s) in list(bucket_by_ts.items()) if _is_expired(ts=ts, ttl_s=ttl_s, now=now)
576
- ]
600
+ expired_ts = [ts for ts, _r in list(bucket_by_ts.items()) if self._is_expired(ts=ts, now=now)]
577
601
  for ts in expired_ts:
578
602
  del bucket_by_ts[ts]
579
603
  if not bucket_by_ts:
@@ -597,21 +621,24 @@ class SessionRecorder(BasePersistentFile):
597
621
  return self._active
598
622
 
599
623
  async def _deactivate_after_delay(
600
- self, *, delay: int, auto_save: bool, randomize_output: bool, use_ts_in_filename: bool
624
+ self, *, delay: int, auto_save: bool, randomize_output: bool, use_ts_in_file_name: bool
601
625
  ) -> None:
602
626
  """Change the state of the session recorder after a delay."""
603
- self._is_delayed = True
627
+ self._is_recording = True
604
628
  await asyncio.sleep(delay)
605
629
  self._active = False
606
- self._is_delayed = False
630
+ self._is_recording = False
607
631
  if auto_save:
608
- await self.save(randomize_output=randomize_output, use_ts_in_filename=use_ts_in_filename)
609
- _LOGGER.debug("Deactivated session recorder after %s minutes", {delay / 60})
632
+ await self.save(randomize_output=randomize_output, use_ts_in_file_name=use_ts_in_file_name)
633
+ _LOGGER.debug("Deactivated session recorder after %s seconds", {delay})
610
634
 
611
635
  async def activate(
612
- self, *, on_time: int = 0, auto_save: bool, randomize_output: bool, use_ts_in_filename: bool
613
- ) -> None:
636
+ self, *, on_time: int = 0, auto_save: bool, randomize_output: bool, use_ts_in_file_name: bool
637
+ ) -> bool:
614
638
  """Activate the session recorder. Disable after on_time(seconds)."""
639
+ if self._is_recording:
640
+ _LOGGER.info("ACTIVATE: Recording session is already running.")
641
+ return False
615
642
  self._store.clear()
616
643
  self._active = True
617
644
  if on_time > 0:
@@ -620,28 +647,33 @@ class SessionRecorder(BasePersistentFile):
620
647
  delay=on_time,
621
648
  auto_save=auto_save,
622
649
  randomize_output=randomize_output,
623
- use_ts_in_filename=use_ts_in_filename,
650
+ use_ts_in_file_name=use_ts_in_file_name,
624
651
  ),
625
652
  name=f"session_recorder_{self._central.name}",
626
653
  )
654
+ return True
627
655
 
628
656
  async def deactivate(
629
- self, *, delay: int, auto_save: bool, randomize_output: bool, use_ts_in_filename: bool
630
- ) -> None:
657
+ self, *, delay: int, auto_save: bool, randomize_output: bool, use_ts_in_file_name: bool
658
+ ) -> bool:
631
659
  """Deactivate the session recorder. Optionally after a delay(seconds)."""
660
+ if self._is_recording:
661
+ _LOGGER.info("DEACTIVATE: Recording session is already running.")
662
+ return False
632
663
  if delay > 0:
633
664
  self._central.looper.create_task(
634
665
  target=self._deactivate_after_delay(
635
666
  delay=delay,
636
667
  auto_save=auto_save,
637
668
  randomize_output=randomize_output,
638
- use_ts_in_filename=use_ts_in_filename,
669
+ use_ts_in_file_name=use_ts_in_file_name,
639
670
  ),
640
671
  name=f"session_recorder_{self._central.name}",
641
672
  )
642
673
  else:
643
674
  self._active = False
644
- self._is_delayed = False
675
+ self._is_recording = False
676
+ return True
645
677
 
646
678
  def add_json_rpc_session(
647
679
  self,
@@ -689,14 +721,11 @@ class SessionRecorder(BasePersistentFile):
689
721
  method: str,
690
722
  params: Any,
691
723
  response: Any,
692
- ttl_seconds: float | None = None,
693
724
  ts: int | datetime | None = None,
694
725
  ) -> Self:
695
726
  """Insert or update an entry."""
696
727
  self._purge_expired_at(rpc_type=rpc_type, method=method)
697
728
  frozen_param = _freeze_params(params)
698
- if (ttl_s := ttl_seconds if ttl_seconds is not None else self._default_ttl) <= 0:
699
- raise ValueError("ttl_seconds must be positive")
700
729
  # Normalize timestamp to int epoch seconds
701
730
  if isinstance(ts, datetime):
702
731
  ts_int = int(ts.timestamp())
@@ -704,7 +733,7 @@ class SessionRecorder(BasePersistentFile):
704
733
  ts_int = ts
705
734
  else:
706
735
  ts_int = _now()
707
- self._bucket(rpc_type=rpc_type, method=method)[frozen_param][ts_int] = (response, ttl_s)
736
+ self._bucket(rpc_type=rpc_type, method=method)[frozen_param][ts_int] = response
708
737
  return self
709
738
 
710
739
  def get(
@@ -736,9 +765,9 @@ class SessionRecorder(BasePersistentFile):
736
765
  latest_ts = max(bucket_by_ts.keys())
737
766
  except ValueError:
738
767
  return default
739
- resp, ttl_s = bucket_by_ts[latest_ts]
768
+ resp = bucket_by_ts[latest_ts]
740
769
  if self._refresh_on_get:
741
- bucket_by_ts[_now()] = (resp, ttl_s)
770
+ bucket_by_ts[_now()] = resp
742
771
  return resp
743
772
 
744
773
  def delete(self, *, rpc_type: str, method: str, params: Any) -> bool:
@@ -762,7 +791,7 @@ class SessionRecorder(BasePersistentFile):
762
791
  self._store.pop(rpc_type, None)
763
792
  return True
764
793
 
765
- def get_latest_fresh(self, *, rpc_type: str, method: str) -> list[tuple[Any, Any]]:
794
+ def get_latest_response_by_method(self, *, rpc_type: str, method: str) -> list[tuple[Any, Any]]:
766
795
  """Return latest non-expired responses for a given (rpc_type, method)."""
767
796
  # Purge expired entries first without creating any new buckets.
768
797
  self._purge_expired_at(rpc_type=rpc_type, method=method)
@@ -780,12 +809,40 @@ class SessionRecorder(BasePersistentFile):
780
809
  latest_ts = max(bucket_by_ts.keys())
781
810
  except ValueError:
782
811
  continue
783
- resp, _ttl_s = bucket_by_ts[latest_ts]
812
+ resp = bucket_by_ts[latest_ts]
784
813
  params = _unfreeze_params(frozen_params=frozen_params)
785
814
 
786
815
  result.append((params, resp))
787
816
  return result
788
817
 
818
+ def get_latest_response_by_params(
819
+ self,
820
+ *,
821
+ rpc_type: str,
822
+ method: str,
823
+ params: Any,
824
+ ) -> Any:
825
+ """Return latest non-expired responses for a given (rpc_type, method, params)."""
826
+ # Purge expired entries first without creating any new buckets.
827
+ self._purge_expired_at(rpc_type=rpc_type, method=method)
828
+
829
+ # Access store safely to avoid side effects from creating buckets.
830
+ if not (bucket_by_method := self._store.get(rpc_type)):
831
+ return None
832
+ if not (bucket_by_parameter := bucket_by_method.get(method)):
833
+ return None
834
+ frozen_params = _freeze_params(params=params)
835
+
836
+ # For each parameter, choose the response at the latest timestamp.
837
+ if (bucket_by_ts := bucket_by_parameter.get(frozen_params)) is None:
838
+ return None
839
+
840
+ try:
841
+ latest_ts = max(bucket_by_ts.keys())
842
+ return bucket_by_ts[latest_ts]
843
+ except ValueError:
844
+ return None
845
+
789
846
  def cleanup(self) -> None:
790
847
  """Purge all expired entries globally."""
791
848
  for rpc_type in list(self._store.keys()):
@@ -893,6 +950,8 @@ def _unfreeze_params(frozen_params: str) -> Any:
893
950
  return {k: _walk(v) for k, v in o.items()}
894
951
  if isinstance(o, list):
895
952
  return [_walk(x) for x in o]
953
+ if isinstance(o, tuple):
954
+ return tuple(_walk(x) for x in o)
896
955
  if o.startswith("{") and o.endswith("}"):
897
956
  return ast.literal_eval(o)
898
957
  return o
@@ -905,8 +964,8 @@ def _get_file_path(*, storage_directory: str, sub_directory: str) -> str:
905
964
  return f"{storage_directory}/{sub_directory}"
906
965
 
907
966
 
908
- def _get_filename(*, central_name: str, file_name: str, ts: datetime | None = None) -> str:
909
- """Return the content filename."""
967
+ def _get_file_name(*, central_name: str, file_name: str, ts: datetime | None = None) -> str:
968
+ """Return the content file_name."""
910
969
  fn = f"{slugify(central_name)}_{file_name}"
911
970
  if ts:
912
971
  fn += f"_{ts.strftime(FILE_NAME_TS_PATTERN)}"
@@ -918,12 +977,6 @@ def _now() -> int:
918
977
  return int(datetime.now(tz=UTC).timestamp())
919
978
 
920
979
 
921
- def _is_expired(*, ts: int, ttl_s: float, now: int | None = None) -> bool:
922
- """Check whether an entry has expired given epoch seconds."""
923
- now = now if now is not None else _now()
924
- return (now - ts) > ttl_s
925
-
926
-
927
980
  async def cleanup_files(*, central_name: str, storage_directory: str) -> None:
928
981
  """Clean up the used files."""
929
982
  loop = asyncio.get_running_loop()
aiohomematic/support.py CHANGED
@@ -582,6 +582,15 @@ def create_random_device_addresses(*, addresses: list[str]) -> dict[str, str]:
582
582
  return {adr: f"VCU{int(random.randint(1000000, 9999999))}" for adr in addresses}
583
583
 
584
584
 
585
+ def shrink_json_file(file_name: str) -> None:
586
+ """Shrink a json file."""
587
+ with open(file_name, "rb") as f:
588
+ data = orjson.loads(f.read())
589
+
590
+ with open(file_name, "wb") as f:
591
+ f.write(orjson.dumps(data))
592
+
593
+
585
594
  # --- Structured error boundary logging helpers ---
586
595
 
587
596
  _BOUNDARY_MSG = "error_boundary"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: aiohomematic
3
- Version: 2025.10.9
3
+ Version: 2025.10.11
4
4
  Summary: Homematic interface for Home Assistant running on Python 3.
5
5
  Home-page: https://github.com/sukramj/aiohomematic
6
6
  Author-email: SukramJ <sukramj@icloud.com>, Daniel Perna <danielperna84@gmail.com>
@@ -1,6 +1,6 @@
1
1
  aiohomematic/__init__.py,sha256=Uo9CIoil0Arl3GwtgMZAwM8jhcgoBKcZEgj8cXYlswY,2258
2
2
  aiohomematic/async_support.py,sha256=01chvt-Ac_UIAWI39VeGpQV9AmxpSCbNyfPPAwX_Qqc,7865
3
- aiohomematic/const.py,sha256=8Reu1r01pcMN3U5VQLufSJxCcuzhcU1Utn-IKDIKTgE,27124
3
+ aiohomematic/const.py,sha256=FeWi0VLHTN0nP61c4xZjt1TRJfn2azFchkYPHiXCPrQ,27427
4
4
  aiohomematic/context.py,sha256=hGE-iPcPt21dY-1MZar-Hyh9YaKL-VS42xjrulIVyRQ,429
5
5
  aiohomematic/converter.py,sha256=FiHU71M5RZ7N5FXJYh2CN14s63-PM-SHdb0cJ_CLx54,3602
6
6
  aiohomematic/decorators.py,sha256=cSW0aF3PzrW_qW6H0sjRNH9eqO8ysqhXZDgJ2OJTZM4,11038
@@ -8,28 +8,28 @@ aiohomematic/exceptions.py,sha256=RLldRD4XY8iYuNYVdspCbbphGcKsximB7R5OL7cYKw0,50
8
8
  aiohomematic/hmcli.py,sha256=_QZFKcfr_KJrdiyBRbhz0f8LZ95glD7LgJBmQc8cwog,4911
9
9
  aiohomematic/property_decorators.py,sha256=3Id1_rWIYnwyN_oSMgbh7XNKz9HPkGTC1CeS5ei04ZQ,17139
10
10
  aiohomematic/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
- aiohomematic/support.py,sha256=R275ZIKoufMZORcBa7Tiq0MZ7KtMZiqoUXUJX4LM1qA,23083
11
+ aiohomematic/support.py,sha256=eUbtdnrkq99o2DxJzyai5LHqrkpXC_gWQtrkI4zIgzg,23310
12
12
  aiohomematic/validator.py,sha256=qX5janicu4jLrAVzKoyWgXe1XU4EOjk5-QhNFL4awTQ,3541
13
- aiohomematic/central/__init__.py,sha256=9q1cl0qCVvPSd6eW7ipFdbd783SVLVz1fD_Hh8iDDv8,93519
13
+ aiohomematic/central/__init__.py,sha256=xqlWOAKbnLzYTteVHIdxFpLCBcrTzgub8ZztJd43HTw,94122
14
14
  aiohomematic/central/decorators.py,sha256=vrujdw2QMXva-7DGXMQyittujx0q7cPuGD-SCeQlD30,6886
15
- aiohomematic/central/rpc_server.py,sha256=PX6idUEZ5j9fx9y__Q--Zcc2cyFMhLBie-lNvkx1bsI,10949
16
- aiohomematic/client/__init__.py,sha256=X7lR-A9fAYKbVoR3aNRui_-pfeR20-sehNSlXvkrIoE,73905
15
+ aiohomematic/central/rpc_server.py,sha256=EhvBy8oMjBTR8MvH5QXo3lvlsCNJrvu6B85_CAg6sG8,10742
16
+ aiohomematic/client/__init__.py,sha256=14lx62VvPm9yQgm5nUVdzgAKkhS8GXeAvV8gmGbldl8,73941
17
17
  aiohomematic/client/_rpc_errors.py,sha256=IaYjX60mpBJ43gDCJjuUSVraamy5jXHTRjOnutK4azs,2962
18
- aiohomematic/client/json_rpc.py,sha256=u25nedb3AEK54GN9F4z3oOCfoE-YTYZpL4166OsRPAg,51274
18
+ aiohomematic/client/json_rpc.py,sha256=mrPvRR4hmc2MfMec8tjdQbF2RK1u0W1byOFUsiEP4fs,51319
19
19
  aiohomematic/client/rpc_proxy.py,sha256=T6tmfBAJJSFxzBLrhKJc6_KiHyTs5EVnStQsVJA5YkY,11604
20
20
  aiohomematic/model/__init__.py,sha256=gUYa8ROWSbXjZTWUTmINZ1bbYAxGkVpA-onxaJN2Iso,5436
21
- aiohomematic/model/data_point.py,sha256=7FqnFO9C0_UtUfwPIsaq61QDhjZqsmjkepzzc_9StUs,41595
22
- aiohomematic/model/device.py,sha256=CQ_zFRVnwmuEQzBEHlgz3DxFzDnW36FVCPlckmho2TI,52891
21
+ aiohomematic/model/data_point.py,sha256=VdwzjRrBDaYhWyIQL4JVC9wYTFMSwvwymYSEAPxjms8,41573
22
+ aiohomematic/model/device.py,sha256=WfnSXPahzyCptCsvoWRbrnQQEqlEbbgKb9qappMUrx4,52898
23
23
  aiohomematic/model/event.py,sha256=uO6Z2pCZEU_8MR8uRIIZjX8IL0rFh7sNhhTNT8yFoVU,6852
24
24
  aiohomematic/model/support.py,sha256=ITyxBIJ2Bv69fcwuUD2HyMKlburOnLo9NFs5VKg34ZY,19635
25
- aiohomematic/model/update.py,sha256=p_zyC73fERQxXF2d1O89zi9EJovzNlrYIONJfPOz5_g,5134
25
+ aiohomematic/model/update.py,sha256=R3uUA61m-UQNNGkRod3vES66AgkPKay_CPyyrd-nqVI,5140
26
26
  aiohomematic/model/calculated/__init__.py,sha256=JNtxK4-XZeyR6MxfKVPdcF6ezQliQYTWEDoeOChumaE,2966
27
27
  aiohomematic/model/calculated/climate.py,sha256=rm9b4rCrmsZAA5_dzP6YRtahdveI97581_EnC4utqpg,10499
28
28
  aiohomematic/model/calculated/data_point.py,sha256=ESUqwolPViXPuH9hvL0q5FMvsGXjcqxeyySpNTUAMU8,11587
29
29
  aiohomematic/model/calculated/operating_voltage_level.py,sha256=99A8HvahVS4IxpgK1dsgQXHfeubU7JI2c0ObagbnSNQ,13505
30
30
  aiohomematic/model/calculated/support.py,sha256=GBD35_OR3TEAWo5ADeH_gk2Ebw9pHOtOnOS7umCkXB0,7989
31
31
  aiohomematic/model/custom/__init__.py,sha256=JxJXyr2CgKlj-jc1xQ14lbMT76vvswfLUecwj8RJCXA,6073
32
- aiohomematic/model/custom/climate.py,sha256=5r0zjBOhnp4QcL2p6x2UYhIMQqa0SG1Sw1cnnyY9POs,57274
32
+ aiohomematic/model/custom/climate.py,sha256=OnkZKxJKInrP52Tqu_hPcDDZbyL0wTMaIjWuBJ3th_k,57292
33
33
  aiohomematic/model/custom/const.py,sha256=s4iqhwvt8x41h4-CtMCyXwryGHuBNbhBrcJ5zGVRFJU,4939
34
34
  aiohomematic/model/custom/cover.py,sha256=KQzLEoPkKgZ2oi2oblUrGReQnT_0WAuseWAxu_xH5_Y,29035
35
35
  aiohomematic/model/custom/data_point.py,sha256=WLKygP3SQwtG35BpOH3HNt_o-z9dfSUmuSyPDIEWF8A,14133
@@ -67,12 +67,10 @@ aiohomematic/rega_scripts/set_program_state.fn,sha256=0bnv7lUj8FMjDZBz325tDVP61m
67
67
  aiohomematic/rega_scripts/set_system_variable.fn,sha256=sTmr7vkPTPnPkor5cnLKlDvfsYRbGO1iq2z_2pMXq5E,383
68
68
  aiohomematic/store/__init__.py,sha256=PHwF_tw_zL20ODwLywHgpOLWrghQo_BMZzeiQSXN1Fc,1081
69
69
  aiohomematic/store/dynamic.py,sha256=kgZs5gJ4i8bHZKkJ883xuLecSKdjj6UwlLRJAvQcNGI,22528
70
- aiohomematic/store/persistent.py,sha256=yRrGgCLGQtuUAKg37UeLDnp0HcnZjl9DordbjJukLCA,38066
70
+ aiohomematic/store/persistent.py,sha256=SBL8AhqUzpoPtJ50GkLYHwvRJS52fBWqNPjgvykxbY8,40233
71
71
  aiohomematic/store/visibility.py,sha256=0y93kPTugqQsrh6kKamfgwBkbIdBPEZpQVv_1NaLz3A,31662
72
- aiohomematic-2025.10.9.dist-info/licenses/LICENSE,sha256=q-B0xpREuZuvKsmk3_iyVZqvZ-vJcWmzMZpeAd0RqtQ,1083
73
- aiohomematic_support/__init__.py,sha256=_0YtF4lTdC_k6-zrM2IefI0u0LMr_WA61gXAyeGLgbY,66
74
- aiohomematic_support/client_local.py,sha256=GAIg3DN4lD4H9eA3gs_CILtWR3tphXrf_Hai9rWFpM8,12817
75
- aiohomematic-2025.10.9.dist-info/METADATA,sha256=ezEHtzT_d_BJhX-cCb920mQK5ED0UmvdNsqAoGJrELw,7603
76
- aiohomematic-2025.10.9.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
77
- aiohomematic-2025.10.9.dist-info/top_level.txt,sha256=5TDRlUWQPThIUwQjOj--aUo4UA-ow4m0sNhnoCBi5n8,34
78
- aiohomematic-2025.10.9.dist-info/RECORD,,
72
+ aiohomematic-2025.10.11.dist-info/licenses/LICENSE,sha256=q-B0xpREuZuvKsmk3_iyVZqvZ-vJcWmzMZpeAd0RqtQ,1083
73
+ aiohomematic-2025.10.11.dist-info/METADATA,sha256=r7XE6UJklJaiC8x0NM-lKx3BSErN8oj2ajLnb2Y9mVY,7604
74
+ aiohomematic-2025.10.11.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
75
+ aiohomematic-2025.10.11.dist-info/top_level.txt,sha256=iGUvt1N-E72vKRq7Anpp62HwkQngStrUK0JfL1zj1TE,13
76
+ aiohomematic-2025.10.11.dist-info/RECORD,,
@@ -0,0 +1 @@
1
+ aiohomematic
@@ -1,2 +0,0 @@
1
- aiohomematic
2
- aiohomematic_support
@@ -1 +0,0 @@
1
- """Module to support aiohomematic testing with a local client."""