ramses-rf 0.53.4__py3-none-any.whl → 0.53.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ramses_rf/database.py CHANGED
@@ -35,7 +35,7 @@ from typing import TYPE_CHECKING, Any, NewType
35
35
 
36
36
  from ramses_tx import CODES_SCHEMA, RQ, Code, Message, Packet
37
37
 
38
- from .storage import StorageWorker
38
+ from .storage import PacketLogEntry, StorageWorker
39
39
 
40
40
  if TYPE_CHECKING:
41
41
  DtmStrT = NewType("DtmStrT", str)
@@ -241,28 +241,30 @@ class MessageIndex:
241
241
  :param dt_now: current timestamp
242
242
  :param _cutoff: the oldest timestamp to retain, default is 24 hours ago
243
243
  """
244
- msgs = None
245
244
  dtm = dt_now - _cutoff
246
245
 
247
- self._cu.execute("SELECT dtm FROM messages WHERE dtm >= ?", (dtm,))
248
- rows = self._cu.fetchall() # fetch dtm of current messages to retain
246
+ # Submit prune request to worker (Non-blocking I/O)
247
+ self._worker.submit_prune(dtm)
248
+
249
+ # Prune in-memory cache synchronously (Fast CPU-bound op)
250
+ dtm_iso = dtm.isoformat(timespec="microseconds")
249
251
 
250
252
  try: # make this operation atomic, i.e. update self._msgs only on success
251
253
  await self._lock.acquire()
252
- self._cu.execute("DELETE FROM messages WHERE dtm < ?", (dtm,))
253
- msgs = OrderedDict({row[0]: self._msgs[row[0]] for row in rows})
254
- self._cx.commit()
254
+ # Rebuild dict keeping only newer items
255
+ self._msgs = OrderedDict(
256
+ (k, v) for k, v in self._msgs.items() if k >= dtm_iso
257
+ )
255
258
 
256
- except sqlite3.Error: # need to tighten?
257
- self._cx.rollback()
259
+ except Exception as err:
260
+ _LOGGER.warning("MessageIndex housekeeping error: %s", err)
258
261
  else:
259
- self._msgs = msgs
262
+ _LOGGER.debug(
263
+ "MessageIndex housekeeping completed, retained messages >= %s",
264
+ dtm_iso,
265
+ )
260
266
  finally:
261
267
  self._lock.release()
262
- if msgs:
263
- _LOGGER.debug(
264
- "MessageIndex size was: %d, now: %d", len(rows), len(msgs)
265
- )
266
268
 
267
269
  while True:
268
270
  self._last_housekeeping = dt.now()
@@ -345,15 +347,15 @@ class MessageIndex:
345
347
  # Avoid blocking read; worker handles REPLACE on unique constraint collision
346
348
 
347
349
  # Prepare data tuple for worker
348
- data = (
349
- _now,
350
- verb,
351
- src,
352
- src,
353
- code,
354
- None,
355
- hdr,
356
- "|",
350
+ data = PacketLogEntry(
351
+ dtm=_now,
352
+ verb=verb,
353
+ src=src,
354
+ dst=src,
355
+ code=code,
356
+ ctx=None,
357
+ hdr=hdr,
358
+ plk="|",
357
359
  )
358
360
 
359
361
  self._worker.submit_packet(data)
@@ -390,15 +392,15 @@ class MessageIndex:
390
392
  # _old_msgs = self._delete_from(hdr=msg._pkt._hdr)
391
393
  # Refactor: Worker uses INSERT OR REPLACE to handle collision
392
394
 
393
- data = (
394
- msg.dtm,
395
- str(msg.verb),
396
- msg.src.id,
397
- msg.dst.id,
398
- str(msg.code),
399
- msg_pkt_ctx,
400
- msg._pkt._hdr,
401
- payload_keys(msg.payload),
395
+ data = PacketLogEntry(
396
+ dtm=msg.dtm,
397
+ verb=str(msg.verb),
398
+ src=msg.src.id,
399
+ dst=msg.dst.id,
400
+ code=str(msg.code),
401
+ ctx=msg_pkt_ctx,
402
+ hdr=msg._pkt._hdr,
403
+ plk=payload_keys(msg.payload),
402
404
  )
403
405
 
404
406
  self._worker.submit_packet(data)
ramses_rf/dispatcher.py CHANGED
@@ -270,7 +270,11 @@ def process_msg(gwy: Gateway, msg: Message) -> None:
270
270
  )
271
271
 
272
272
  except (AttributeError, LookupError, TypeError, ValueError) as err:
273
- _LOGGER.exception("%s < %s(%s)", msg._pkt, err.__class__.__name__, err)
273
+ if getattr(gwy.config, "enforce_strict_handling", False):
274
+ raise
275
+ _LOGGER.warning(
276
+ "%s < %s(%s)", msg._pkt, err.__class__.__name__, err, exc_info=True
277
+ )
274
278
 
275
279
  else:
276
280
  logger_xxxx(msg)
ramses_rf/entity_base.py CHANGED
@@ -913,7 +913,9 @@ class _Discovery(_MessageDB):
913
913
  sql, (self.id[:_ID_SLICE], self.id[:_ID_SLICE])
914
914
  ):
915
915
  _LOGGER.debug("Fetched OT ctx from index: %s", rec[0])
916
- res.append(rec[0])
916
+ # SQLite can return int, expected str (hex)
917
+ val = f"{rec[0]:02X}" if isinstance(rec[0], int) else rec[0]
918
+ res.append(val)
917
919
  else: # TODO(eb): remove next Q1 2026
918
920
  res_dict: dict[bool | str | None, Message] | list[Any] = self._msgz[
919
921
  Code._3220
ramses_rf/gateway.py CHANGED
@@ -12,6 +12,7 @@ from __future__ import annotations
12
12
  import asyncio
13
13
  import logging
14
14
  from collections.abc import Awaitable, Callable
15
+ from logging.handlers import QueueListener
15
16
  from types import SimpleNamespace
16
17
  from typing import TYPE_CHECKING, Any
17
18
 
@@ -101,6 +102,7 @@ class Gateway(Engine):
101
102
  known_list: DeviceListT | None = None,
102
103
  loop: asyncio.AbstractEventLoop | None = None,
103
104
  transport_constructor: Callable[..., Awaitable[RamsesTransportT]] | None = None,
105
+ hgi_id: str | None = None,
104
106
  **kwargs: Any,
105
107
  ) -> None:
106
108
  """Initialize the Gateway instance.
@@ -121,6 +123,8 @@ class Gateway(Engine):
121
123
  :type loop: asyncio.AbstractEventLoop | None, optional
122
124
  :param transport_constructor: A factory for creating the transport layer, defaults to None.
123
125
  :type transport_constructor: Callable[..., Awaitable[RamsesTransportT]] | None, optional
126
+ :param hgi_id: The Device ID to use for the HGI (gateway), overriding defaults.
127
+ :type hgi_id: str | None, optional
124
128
  :param kwargs: Additional configuration parameters passed to the engine and schema.
125
129
  :type kwargs: Any
126
130
  """
@@ -138,6 +142,7 @@ class Gateway(Engine):
138
142
  block_list=block_list,
139
143
  known_list=known_list,
140
144
  loop=loop,
145
+ hgi_id=hgi_id,
141
146
  transport_constructor=transport_constructor,
142
147
  **SCH_ENGINE_CONFIG(config),
143
148
  )
@@ -159,6 +164,7 @@ class Gateway(Engine):
159
164
  self.device_by_id: dict[DeviceIdT, Device] = {}
160
165
 
161
166
  self.msg_db: MessageIndex | None = None
167
+ self._pkt_log_listener: QueueListener | None = None
162
168
 
163
169
  def __repr__(self) -> str:
164
170
  """Return a string representation of the Gateway.
@@ -218,10 +224,12 @@ class Gateway(Engine):
218
224
  if system.dhw:
219
225
  system.dhw._start_discovery_poller()
220
226
 
221
- await set_pkt_logging_config( # type: ignore[arg-type]
227
+ _, self._pkt_log_listener = await set_pkt_logging_config( # type: ignore[arg-type]
222
228
  cc_console=self.config.reduce_processing >= DONT_CREATE_MESSAGES,
223
229
  **self._packet_log,
224
230
  )
231
+ if self._pkt_log_listener:
232
+ self._pkt_log_listener.start()
225
233
 
226
234
  # initialize SQLite index, set in _tx/Engine
227
235
  if self._sqlite_index: # TODO(eb): default to True in Q1 2026
@@ -271,6 +279,13 @@ class Gateway(Engine):
271
279
  # to the DB while we are closing it.
272
280
  await super().stop()
273
281
 
282
+ if self._pkt_log_listener:
283
+ self._pkt_log_listener.stop()
284
+ # Close handlers to ensure files are flushed/closed
285
+ for handler in self._pkt_log_listener.handlers:
286
+ handler.close()
287
+ self._pkt_log_listener = None
288
+
274
289
  if self.msg_db:
275
290
  self.msg_db.stop()
276
291
 
ramses_rf/schemas.py CHANGED
@@ -245,6 +245,7 @@ SCH_GLOBAL_SCHEMAS = vol.Schema(SCH_GLOBAL_SCHEMAS_DICT, extra=vol.PREVENT_EXTRA
245
245
  # 4/7: Gateway (parser/state) configuration
246
246
  SZ_DISABLE_DISCOVERY: Final = "disable_discovery"
247
247
  SZ_ENABLE_EAVESDROP: Final = "enable_eavesdrop"
248
+ SZ_ENFORCE_STRICT_HANDLING: Final = "enforce_strict_handling"
248
249
  SZ_MAX_ZONES: Final = "max_zones" # TODO: move to TCS-attr from GWY-layer
249
250
  SZ_REDUCE_PROCESSING: Final = "reduce_processing"
250
251
  SZ_USE_ALIASES: Final = "use_aliases" # use friendly device names from known_list
@@ -253,6 +254,7 @@ SZ_USE_NATIVE_OT: Final = "use_native_ot" # favour OT (3220s) over RAMSES
253
254
  SCH_GATEWAY_DICT = {
254
255
  vol.Optional(SZ_DISABLE_DISCOVERY, default=False): bool,
255
256
  vol.Optional(SZ_ENABLE_EAVESDROP, default=False): bool,
257
+ vol.Optional(SZ_ENFORCE_STRICT_HANDLING, default=False): bool,
256
258
  vol.Optional(SZ_MAX_ZONES, default=DEFAULT_MAX_ZONES): vol.All(
257
259
  int, vol.Range(min=1, max=16)
258
260
  ), # NOTE: no default
ramses_rf/storage.py CHANGED
@@ -7,24 +7,46 @@ import logging
7
7
  import queue
8
8
  import sqlite3
9
9
  import threading
10
- from typing import Any
10
+ from typing import Any, NamedTuple
11
11
 
12
12
  _LOGGER = logging.getLogger(__name__)
13
13
 
14
14
 
15
+ class PacketLogEntry(NamedTuple):
16
+ """Represents a packet to be written to the database."""
17
+
18
+ dtm: Any
19
+ verb: str
20
+ src: str
21
+ dst: str
22
+ code: str
23
+ ctx: str | None
24
+ hdr: str
25
+ plk: str
26
+
27
+
28
+ class PruneRequest(NamedTuple):
29
+ """Represents a request to prune old records."""
30
+
31
+ dtm_limit: Any
32
+
33
+
34
+ QueueItem = PacketLogEntry | PruneRequest | tuple[str, Any] | None
35
+
36
+
15
37
  class StorageWorker:
16
38
  """A background worker thread to handle blocking storage I/O asynchronously."""
17
39
 
18
- def __init__(self, db_path: str = ":memory:"):
40
+ def __init__(self, db_path: str = ":memory:") -> None:
19
41
  """Initialize the storage worker thread."""
20
42
  self._db_path = db_path
21
- self._queue: queue.SimpleQueue[tuple[str, Any] | None] = queue.SimpleQueue()
43
+ self._queue: queue.SimpleQueue[QueueItem] = queue.SimpleQueue()
22
44
  self._ready_event = threading.Event()
23
45
 
24
46
  self._thread = threading.Thread(
25
47
  target=self._run,
26
48
  name="RamsesStorage",
27
- daemon=True, # FIX: Set to True so the process can exit even if stop() is missed
49
+ daemon=True, # Allows process exit even if stop() is missed
28
50
  )
29
51
  self._thread.start()
30
52
 
@@ -32,16 +54,16 @@ class StorageWorker:
32
54
  """Wait until the database is initialized and ready."""
33
55
  return self._ready_event.wait(timeout)
34
56
 
35
- def submit_packet(self, packet_data: tuple[Any, ...]) -> None:
57
+ def submit_packet(self, packet: PacketLogEntry) -> None:
36
58
  """Submit a packet tuple for SQL insertion (Non-blocking)."""
37
- self._queue.put(("SQL", packet_data))
59
+ self._queue.put(packet)
60
+
61
+ def submit_prune(self, dtm_limit: Any) -> None:
62
+ """Submit a prune request for SQL deletion (Non-blocking)."""
63
+ self._queue.put(PruneRequest(dtm_limit))
38
64
 
39
65
  def flush(self, timeout: float = 10.0) -> None:
40
66
  """Block until all currently pending tasks are processed."""
41
- # REMOVED: if self._queue.empty(): return
42
- # This check caused a race condition where flush() returned before
43
- # the worker finished committing the last item it just popped.
44
-
45
67
  # We inject a special marker into the queue
46
68
  sentinel = threading.Event()
47
69
  self._queue.put(("MARKER", sentinel))
@@ -89,7 +111,7 @@ class StorageWorker:
89
111
  detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES,
90
112
  check_same_thread=False,
91
113
  uri=True,
92
- timeout=10.0, # Increased timeout for locking
114
+ timeout=10.0,
93
115
  )
94
116
 
95
117
  # Enable Write-Ahead Logging for concurrency
@@ -116,16 +138,9 @@ class StorageWorker:
116
138
  if item is None: # Shutdown signal
117
139
  break
118
140
 
119
- task_type, data = item
120
-
121
- if task_type == "MARKER":
122
- # Flush requested
123
- data.set()
124
- continue
125
-
126
- if task_type == "SQL":
141
+ if isinstance(item, PacketLogEntry):
127
142
  # Optimization: Batch processing
128
- batch = [data]
143
+ batch = [item]
129
144
  # Drain queue of pending SQL tasks to bulk insert
130
145
  while not self._queue.empty():
131
146
  try:
@@ -135,22 +150,19 @@ class StorageWorker:
135
150
  self._queue.put(None) # Re-queue poison pill
136
151
  break
137
152
 
138
- next_type, next_data = next_item
139
- if next_type == "SQL":
140
- batch.append(next_data)
141
- elif next_type == "MARKER":
142
- # Handle marker after this batch
143
- self._queue.put(next_item) # Re-queue marker
144
- break
153
+ if isinstance(next_item, PacketLogEntry):
154
+ batch.append(next_item)
145
155
  else:
146
- pass
156
+ # Handle other types after this batch
157
+ self._queue.put(next_item) # Re-queue
158
+ break
147
159
  except queue.Empty:
148
160
  break
149
161
 
150
162
  try:
151
163
  conn.executemany(
152
164
  """
153
- INSERT OR REPLACE INTO messages
165
+ INSERT OR REPLACE INTO messages
154
166
  (dtm, verb, src, dst, code, ctx, hdr, plk)
155
167
  VALUES (?, ?, ?, ?, ?, ?, ?, ?)
156
168
  """,
@@ -160,6 +172,20 @@ class StorageWorker:
160
172
  except sqlite3.Error as err:
161
173
  _LOGGER.error("SQL Write Failed: %s", err)
162
174
 
175
+ elif isinstance(item, PruneRequest):
176
+ try:
177
+ conn.execute(
178
+ "DELETE FROM messages WHERE dtm < ?", (item.dtm_limit,)
179
+ )
180
+ conn.commit()
181
+ _LOGGER.debug("Pruned records older than %s", item.dtm_limit)
182
+ except sqlite3.Error as err:
183
+ _LOGGER.error("SQL Prune Failed: %s", err)
184
+
185
+ elif isinstance(item, tuple) and item[0] == "MARKER":
186
+ # Flush requested
187
+ item[1].set()
188
+
163
189
  except Exception as err:
164
190
  _LOGGER.exception("StorageWorker encountered an error: %s", err)
165
191
 
ramses_rf/system/heat.py CHANGED
@@ -137,7 +137,7 @@ class SystemBase(Parent, Entity): # 3B00 (multi-relay)
137
137
  self._child_id = FF # NOTE: domain_id
138
138
 
139
139
  self._app_cntrl: BdrSwitch | OtbGateway | None = None
140
- self._heat_demand = None
140
+ self._heat_demand: dict[str, Any] | None = None
141
141
 
142
142
  def __repr__(self) -> str:
143
143
  return f"{self.ctl.id} ({self._SLUG})"
@@ -217,17 +217,33 @@ class SystemBase(Parent, Entity): # 3B00 (multi-relay)
217
217
  super()._handle_msg(msg)
218
218
 
219
219
  if msg.code == Code._000C:
220
- if msg.payload[SZ_ZONE_TYPE] == DEV_ROLE_MAP.APP and msg.payload.get(
221
- SZ_DEVICES
222
- ):
223
- self._gwy.get_device(
224
- msg.payload[SZ_DEVICES][0], parent=self, child_id=FC
225
- ) # sets self._app_cntrl
220
+ if isinstance(msg.payload, dict):
221
+ if msg.payload[SZ_ZONE_TYPE] == DEV_ROLE_MAP.APP and msg.payload.get(
222
+ SZ_DEVICES
223
+ ):
224
+ self._gwy.get_device(
225
+ msg.payload[SZ_DEVICES][0], parent=self, child_id=FC
226
+ ) # sets self._app_cntrl
227
+ else:
228
+ _LOGGER.warning(
229
+ f"{msg!r} < Unexpected payload type for {msg.code}: {type(msg.payload)} (expected dict)"
230
+ )
226
231
  return
227
232
 
228
- if msg.code == Code._3150:
229
- if msg.payload.get(SZ_DOMAIN_ID) == FC and msg.verb in (I_, RP):
230
- self._heat_demand = msg.payload
233
+ if msg.code == Code._3150 and msg.verb in (I_, RP):
234
+ # 3150 payload can be a dict (old) or list (new, multi-zone)
235
+ if isinstance(msg.payload, list):
236
+ if payload := next(
237
+ (d for d in msg.payload if d.get(SZ_DOMAIN_ID) == FC), None
238
+ ):
239
+ self._heat_demand = payload
240
+ elif isinstance(msg.payload, dict):
241
+ if msg.payload.get(SZ_DOMAIN_ID) == FC:
242
+ self._heat_demand = msg.payload
243
+ else:
244
+ _LOGGER.warning(
245
+ f"{msg!r} < Unexpected payload type for {msg.code}: {type(msg.payload)} (expected list/dict)"
246
+ )
231
247
 
232
248
  if self._gwy.config.enable_eavesdrop and not self.appliance_control:
233
249
  eavesdrop_appliance_control(msg)
@@ -588,7 +604,12 @@ class ScheduleSync(SystemBase): # 0006 (+/- 0404?)
588
604
  super()._handle_msg(msg)
589
605
 
590
606
  if msg.code == Code._0006:
591
- self._msg_0006 = msg
607
+ if isinstance(msg.payload, dict):
608
+ self._msg_0006 = msg
609
+ else:
610
+ _LOGGER.warning(
611
+ f"{msg!r} < Unexpected payload type for {msg.code}: {type(msg.payload)} (expected dict)"
612
+ )
592
613
 
593
614
  async def _schedule_version(self, *, force_io: bool = False) -> tuple[int, bool]:
594
615
  """Return the global schedule version number, and an indication if I/O was done.
@@ -706,7 +727,12 @@ class Logbook(SystemBase): # 0418
706
727
  super()._handle_msg(msg)
707
728
 
708
729
  if msg.code == Code._0418: # and msg.verb in (I_, RP):
709
- self._faultlog.handle_msg(msg)
730
+ if isinstance(msg.payload, dict):
731
+ self._faultlog.handle_msg(msg)
732
+ else:
733
+ _LOGGER.warning(
734
+ f"{msg!r} < Unexpected payload type for {msg.code}: {type(msg.payload)} (expected dict)"
735
+ )
710
736
 
711
737
  async def get_faultlog(
712
738
  self,
ramses_rf/version.py CHANGED
@@ -1,4 +1,4 @@
1
1
  """RAMSES RF - a RAMSES-II protocol decoder & analyser (application layer)."""
2
2
 
3
- __version__ = "0.53.4"
3
+ __version__ = "0.53.6"
4
4
  VERSION = __version__
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ramses_rf
3
- Version: 0.53.4
3
+ Version: 0.53.6
4
4
  Summary: A stateful RAMSES-II protocol decoder & analyser.
5
5
  Project-URL: Homepage, https://github.com/ramses-rf/ramses_rf
6
6
  Project-URL: Bug Tracker, https://github.com/ramses-rf/ramses_rf/issues
@@ -62,11 +62,11 @@ To install the `ramses_rf` command line client:
62
62
  ```
63
63
  git clone https://github.com/ramses-rf/ramses_rf
64
64
  cd ramses_rf
65
- pip install -r requirements.txt
65
+ pip install -r requirements/requirements.txt
66
66
  pip install -e .
67
67
  ```
68
68
 
69
- The CLI is called ``client.py`` and is included in the code root.
69
+ The CLI is called `client.py` and is included in the code root.
70
70
  It has options to monitor and parse Ramses-II traffic to screen or a log file, and to parse a file containing Ramses-II messages to the screen.
71
71
  See the [client.py CLI wiki page](https://github.com/ramses-rf/ramses_rf/wiki/2.-The-client.py-command-line) for instructions.
72
72
 
@@ -8,50 +8,50 @@ ramses_cli/utils/convert.py,sha256=N3LxGe3_0pclijtmYW-ChqCuPTzbkoJA4XNAnoSnBk0,1
8
8
  ramses_rf/__init__.py,sha256=AXsCK1Eh9FWeAI9D_zY_2KB0dqrTb9a5TNY1NvyQaDM,1271
9
9
  ramses_rf/binding_fsm.py,sha256=fuqvcc9YW-wr8SPH8zadpPqrHAvzl_eeWF-IBtlLppY,26632
10
10
  ramses_rf/const.py,sha256=L3z31CZ-xqno6oZp_h-67CB_5tDDqTwSWXsqRtsjMcs,5460
11
- ramses_rf/database.py,sha256=Fv3Xv6S_7qOf6-biHHKZvntkB8ps_SJvjPlKX0pzGfg,23919
12
- ramses_rf/dispatcher.py,sha256=YjEU-QrBLo9IfoEhJo2ikg_FxOaMYoWvzelr9Vi-JZ8,11398
13
- ramses_rf/entity_base.py,sha256=L47P_6CRz3tLDzOzII9AgmueKDb-Bp7Ot3vVsr8jo10,59121
11
+ ramses_rf/database.py,sha256=eARZ8F5lcITK6d_MfvozmMxSGNkiy1kbtAh0NOIHMoc,24066
12
+ ramses_rf/dispatcher.py,sha256=pHNrXOLeAp6i9TUZEPNOB1AevlKkQyUfX2dkr-EzrDw,11517
13
+ ramses_rf/entity_base.py,sha256=Lv4N3dyIRfsz_5Ztgcu4bc49UE-N4c1VuN732_HQp-g,59255
14
14
  ramses_rf/exceptions.py,sha256=mt_T7irqHSDKir6KLaf6oDglUIdrw0S40JbOrWJk5jc,3657
15
- ramses_rf/gateway.py,sha256=3rnKm-OunN-O_T0eS9ZayEvg49WMVe7GF4pJhyGx3Io,30409
15
+ ramses_rf/gateway.py,sha256=BsS3gyFcSOCLuzQ_OxgyqHTcn2wAVsEJaV6B5PbYre0,31087
16
16
  ramses_rf/helpers.py,sha256=TNk_QkpIOB3alOp1sqnA9LOzi4fuDCeapNlW3zTzNas,4250
17
17
  ramses_rf/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
- ramses_rf/schemas.py,sha256=X1GAK3kttuLMiSCUDY2s-85fgBxPeU8xiDa6gJ1I5mY,13543
19
- ramses_rf/storage.py,sha256=lGKUgQzXBUwksmEeHMLoVoKPCMPAWWuzwCXM8G2CKmg,6452
20
- ramses_rf/version.py,sha256=4QvpG_-UTxyow-YNsbbOguS-J5cqoyPKHelB0WfKMkA,125
18
+ ramses_rf/schemas.py,sha256=k0IN2OvIgNGZ9lGkFMuM81jcjUqWvdGNfaccDBwfh_8,13672
19
+ ramses_rf/storage.py,sha256=ZFUhvgsWRCVS1_r6LL032XFEJwZVp1RAK8Nfba8nf7o,7052
20
+ ramses_rf/version.py,sha256=Qfhbwe6tvYg3scyTH0mknbjnE0tb-XQ19FcHCCdu8CM,125
21
21
  ramses_rf/device/__init__.py,sha256=sUbH5dhbYFXSoM_TPFRutpRutBRpup7_cQ9smPtDTy8,4858
22
22
  ramses_rf/device/base.py,sha256=Tu5I8Lj7KplfRsIBQAYjilS6YPgTyjpU8qgKugMR2Jk,18281
23
23
  ramses_rf/device/heat.py,sha256=CU6GlIgjuYD21braJ_RJlS56zP47TGXNxXnZeavfEMY,54654
24
24
  ramses_rf/device/hvac.py,sha256=vdgiPiLtCAGr7CVsGhQl6XuAFkyYdQSE_2AEdCmRl2I,48502
25
25
  ramses_rf/system/__init__.py,sha256=uZLKio3gLlBzePa2aDQ1nxkcp1YXOGrn6iHTG8LiNIw,711
26
26
  ramses_rf/system/faultlog.py,sha256=GdGmVGT3137KsTlV_nhccgIFEmYu6DFsLTn4S-8JSok,12799
27
- ramses_rf/system/heat.py,sha256=qQmzgmyHy2x87gHAstn0ee7ZVVOq-GJIfDxCrC-6gFU,39254
27
+ ramses_rf/system/heat.py,sha256=31vCAgazc3x27XdMbz6UWH_nt2y-W483Ud0kA7-qpEI,40522
28
28
  ramses_rf/system/schedule.py,sha256=Ts6tdZPTQLV5NkgwA73tPa5QUsnZNIIuYoKC-8VsXDk,18808
29
29
  ramses_rf/system/zones.py,sha256=6VbPsOuNbGwBUuiRu8w9D1Q18SHKkuZa2YtKTE5nqlo,37110
30
- ramses_tx/__init__.py,sha256=sqnjM7pUGJDmec6igTtKViSB8FLX49B5gwhAmcY9ERY,3596
30
+ ramses_tx/__init__.py,sha256=2Ouc5CQJ3O0W4P8BAm5ThST6NbErhrTCp_jxVn816AM,3714
31
31
  ramses_tx/address.py,sha256=IuwUwZxykn3fP1UCRcv4D-zbTICBe2FJjDAFX5X6VoI,9108
32
32
  ramses_tx/command.py,sha256=drxmpdM4YgyPg4h0QIr1ouxK9QjfeLVgnFpDRox0CCY,125652
33
- ramses_tx/const.py,sha256=jiE2UaGBJ5agr68EMrcEHWtVz2KMidU7c7rRYCIiaoM,33010
33
+ ramses_tx/const.py,sha256=bkIP8NNGKY2dH37LRFYBdOKM23UZ35kgcsqmS28Kbf0,33158
34
34
  ramses_tx/exceptions.py,sha256=FJSU9YkvpKjs3yeTqUJX1o3TPFSe_B01gRGIh9b3PNc,2632
35
35
  ramses_tx/fingerprints.py,sha256=nfftA1E62HQnb-eLt2EqjEi_la0DAoT0wt-PtTMie0s,11974
36
36
  ramses_tx/frame.py,sha256=GzNsXr15YLeidJYGtk_xPqsZQh4ehDDlUCtT6rTDhT8,22046
37
- ramses_tx/gateway.py,sha256=kJv3jRI66Ii-kxZO6ItbPODW2oJf-e46ou2JdQt0yTY,11498
37
+ ramses_tx/gateway.py,sha256=tJVyuKjftkcAVVCJTxQ4vZZKiI0KwYwIXA1t8QYclh4,11725
38
38
  ramses_tx/helpers.py,sha256=96OvSOWYuMcr89_c-3dRnqHZaMOctCO94uo1hETh3bc,33613
39
- ramses_tx/logger.py,sha256=1iKRHKUaqHqGd76CkE_6mCVR0sYODtxshRRwfY61fTk,10426
39
+ ramses_tx/logger.py,sha256=EizcFiuDPMf0eVbkfyo_ka2DHz1MsrbzdrYSZoQY5KU,10981
40
40
  ramses_tx/message.py,sha256=zsyDQztSUYeqj3-P598LSmy9ODQY2BUCzWxSoZds6bM,13953
41
41
  ramses_tx/opentherm.py,sha256=58PXz9l5x8Ou6Fm3y-R_UnGHCYahoi2RKIDdYStUMzk,42378
42
42
  ramses_tx/packet.py,sha256=_nzuInS_WhdOI26SYvgsdDqIaDvVNguc2YDwdPOVCbU,7661
43
- ramses_tx/parsers.py,sha256=CJKdLF1F5KR7MwYxwkwSvrusTONh5FGecj7_eeBWu7A,148609
44
- ramses_tx/protocol.py,sha256=nBPKCD1tcGp_FiX0qhsY0XoGO_h87w5cYywBjSpum4w,33048
45
- ramses_tx/protocol_fsm.py,sha256=o9vLvlXor3LkPgsY1zii5P1R01GzYLf_PECDdoxtC24,27520
43
+ ramses_tx/parsers.py,sha256=ALUoi21ewd_GZHvxq4051AcVwETOTgVr5feWaY7zdls,148659
44
+ ramses_tx/protocol.py,sha256=E62vWb8qY7_SB5tb_NcywAED4d9NJJJ-1NgMaK3HG5s,33198
45
+ ramses_tx/protocol_fsm.py,sha256=uT0jLuTsz_6zUJSvdMG200xbYQIFgqfzJ0t3l5bTTTc,27741
46
46
  ramses_tx/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
- ramses_tx/ramses.py,sha256=V4LqD6IaohU7TTZp-_f1K2SOCJwzRY0v8_-INESh2cU,53986
48
- ramses_tx/schemas.py,sha256=Hrmf_q9bAZtkKJzGu6GtUO0QV_-K9i4L99EzGWR13eE,13408
49
- ramses_tx/transport.py,sha256=RIrcNrJwiKB_xmJLgG4Z--V2d83PLsJnLXZK-WFFgsA,76568
47
+ ramses_tx/ramses.py,sha256=89EFL91zwnArefVcEVw3KoyqF92d3r3aBoJapMNAT0I,54389
48
+ ramses_tx/schemas.py,sha256=Bh877L2lmsrtq86ygQEnlbalcyRSEvfsjRnub-9P6X4,13495
49
+ ramses_tx/transport.py,sha256=W2y6em3PQytTYnREZsm2b2-FQeZQCciY2UprBa8-PM8,77057
50
50
  ramses_tx/typed_dicts.py,sha256=w-0V5t2Q3GiNUOrRAWiW9GtSwbta_7luME6GfIb1zhI,10869
51
51
  ramses_tx/typing.py,sha256=eF2SlPWhNhEFQj6WX2AhTXiyRQVXYnFutiepllYl2rI,5042
52
- ramses_tx/version.py,sha256=hutrhdcMJOwR4LO2siGwJHfwUmCNdXAWpBV8-SaqUVo,123
53
- ramses_rf-0.53.4.dist-info/METADATA,sha256=GNBabuxJgwv1H80fzFeCO31BSn0yI76V0cTeMKp2HjE,4179
54
- ramses_rf-0.53.4.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
55
- ramses_rf-0.53.4.dist-info/entry_points.txt,sha256=NnyK29baOCNg8DinPYiZ368h7MTH7bgTW26z2A1NeIE,50
56
- ramses_rf-0.53.4.dist-info/licenses/LICENSE,sha256=ptVutrtSMr7X-ek6LduiD8Cce4JsNn_8sR8MYlm-fvo,1086
57
- ramses_rf-0.53.4.dist-info/RECORD,,
52
+ ramses_tx/version.py,sha256=wG4Me_pyFvCD82_aAFqAut5OXMh7AJllw7_gB6uBe5M,123
53
+ ramses_rf-0.53.6.dist-info/METADATA,sha256=RDKvYo4RMR13qU0XyXjxv9HumiHUaZeXfaWQc4SlLNs,4190
54
+ ramses_rf-0.53.6.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
55
+ ramses_rf-0.53.6.dist-info/entry_points.txt,sha256=NnyK29baOCNg8DinPYiZ368h7MTH7bgTW26z2A1NeIE,50
56
+ ramses_rf-0.53.6.dist-info/licenses/LICENSE,sha256=ptVutrtSMr7X-ek6LduiD8Cce4JsNn_8sR8MYlm-fvo,1086
57
+ ramses_rf-0.53.6.dist-info/RECORD,,
ramses_tx/__init__.py CHANGED
@@ -7,6 +7,7 @@ from __future__ import annotations
7
7
 
8
8
  import asyncio
9
9
  from functools import partial
10
+ from logging.handlers import QueueListener
10
11
  from typing import TYPE_CHECKING, Any
11
12
 
12
13
  from .address import (
@@ -156,17 +157,19 @@ if TYPE_CHECKING:
156
157
  from logging import Logger
157
158
 
158
159
 
159
- async def set_pkt_logging_config(**config: Any) -> Logger:
160
+ async def set_pkt_logging_config(**config: Any) -> tuple[Logger, QueueListener | None]:
160
161
  """
161
162
  Set up ramses packet logging to a file or port.
162
- Must run async in executor to prevent HA blocking call opening packet log file (issue #200)
163
+ Must run async in executor to prevent HA blocking call opening packet log file.
163
164
 
164
165
  :param config: if file_name is included, opens packet_log file
165
- :return: a logging.Logger
166
+ :return: a tuple (logging.Logger, QueueListener)
166
167
  """
167
168
  loop = asyncio.get_running_loop()
168
- await loop.run_in_executor(None, partial(set_pkt_logging, PKT_LOGGER, **config))
169
- return PKT_LOGGER
169
+ listener = await loop.run_in_executor(
170
+ None, partial(set_pkt_logging, PKT_LOGGER, **config)
171
+ )
172
+ return PKT_LOGGER, listener
170
173
 
171
174
 
172
175
  def extract_known_hgi_id(
ramses_tx/const.py CHANGED
@@ -20,10 +20,12 @@ DEFAULT_DISABLE_QOS: Final[bool | None] = None
20
20
  DEFAULT_WAIT_FOR_REPLY: Final[bool | None] = None
21
21
 
22
22
  #: Waiting for echo pkt after cmd sent (seconds)
23
- DEFAULT_ECHO_TIMEOUT: Final[float] = 0.50
23
+ # NOTE: Increased to 3.0s to support high-latency transports (e.g., MQTT)
24
+ DEFAULT_ECHO_TIMEOUT: Final[float] = 3.00
24
25
 
25
26
  #: Waiting for reply pkt after echo pkt rcvd (seconds)
26
- DEFAULT_RPLY_TIMEOUT: Final[float] = 0.50
27
+ # NOTE: Increased to 3.0s to support high-latency transports (e.g., MQTT)
28
+ DEFAULT_RPLY_TIMEOUT: Final[float] = 3.00
27
29
  DEFAULT_BUFFER_SIZE: Final[int] = 32
28
30
 
29
31
  #: Total waiting for successful send (seconds)
ramses_tx/gateway.py CHANGED
@@ -79,6 +79,7 @@ class Engine:
79
79
  packet_log: PktLogConfigT | None = None,
80
80
  block_list: DeviceListT | None = None,
81
81
  known_list: DeviceListT | None = None,
82
+ hgi_id: str | None = None,
82
83
  loop: asyncio.AbstractEventLoop | None = None,
83
84
  **kwargs: Any,
84
85
  ) -> None:
@@ -119,6 +120,10 @@ class Engine:
119
120
  self._log_all_mqtt = kwargs.pop(SZ_LOG_ALL_MQTT, False)
120
121
  self._kwargs: dict[str, Any] = kwargs # HACK
121
122
 
123
+ self._hgi_id = hgi_id
124
+ if self._hgi_id:
125
+ self._kwargs[SZ_ACTIVE_HGI] = self._hgi_id
126
+
122
127
  self._engine_lock = asyncio.Lock()
123
128
  self._engine_state: (
124
129
  tuple[_MsgHandlerT | None, bool | None, *tuple[Any, ...]] | None
@@ -135,6 +140,9 @@ class Engine:
135
140
  self._set_msg_handler(self._msg_handler) # sets self._protocol
136
141
 
137
142
  def __str__(self) -> str:
143
+ if self._hgi_id:
144
+ return f"{self._hgi_id} ({self.ser_name})"
145
+
138
146
  if not self._transport:
139
147
  return f"{HGI_DEV_ADDR.id} ({self.ser_name})"
140
148
 
ramses_tx/logger.py CHANGED
@@ -12,7 +12,12 @@ import shutil
12
12
  import sys
13
13
  from collections.abc import Callable, Mapping
14
14
  from datetime import datetime as dt
15
- from logging.handlers import TimedRotatingFileHandler as _TimedRotatingFileHandler
15
+ from logging.handlers import (
16
+ QueueHandler,
17
+ QueueListener,
18
+ TimedRotatingFileHandler as _TimedRotatingFileHandler,
19
+ )
20
+ from queue import Queue
16
21
  from typing import Any
17
22
 
18
23
  from .version import VERSION
@@ -239,7 +244,7 @@ def set_pkt_logging(
239
244
  file_name: str | None = None,
240
245
  rotate_backups: int = 0,
241
246
  rotate_bytes: int | None = None,
242
- ) -> None:
247
+ ) -> QueueListener | None:
243
248
  """Create/configure handlers, formatters, etc.
244
249
 
245
250
  Parameters:
@@ -255,6 +260,8 @@ def set_pkt_logging(
255
260
  for handler in logger.handlers: # dont use logger.hasHandlers() as not propagating
256
261
  logger.removeHandler(handler)
257
262
 
263
+ handlers: list[logging.Handler] = []
264
+
258
265
  if file_name: # note: this opens the packet_log file IO and may block
259
266
  if rotate_bytes:
260
267
  rotate_backups = rotate_backups or 2
@@ -273,14 +280,15 @@ def set_pkt_logging(
273
280
  handler.setFormatter(logfile_fmt)
274
281
  handler.setLevel(logging.INFO) # .INFO (usually), or .DEBUG
275
282
  handler.addFilter(PktLogFilter()) # record.levelno in (.INFO, .WARNING)
276
- logger.addHandler(handler)
283
+ handlers.append(handler)
277
284
 
278
285
  elif cc_console:
279
- logger.addHandler(logging.NullHandler())
286
+ # logger.addHandler(logging.NullHandler()) # Not needed with QueueHandler
287
+ pass
280
288
 
281
- else:
289
+ elif not cc_console:
282
290
  logger.setLevel(logging.CRITICAL)
283
- return
291
+ return None
284
292
 
285
293
  if cc_console: # CC: output to stdout/stderr
286
294
  console_fmt: ColoredFormatter | Formatter
@@ -297,13 +305,22 @@ def set_pkt_logging(
297
305
  handler.setFormatter(console_fmt)
298
306
  handler.setLevel(logging.WARNING) # musr be .WARNING or less
299
307
  handler.addFilter(StdErrFilter()) # record.levelno >= .WARNING
300
- logger.addHandler(handler)
308
+ handlers.append(handler)
301
309
 
302
310
  handler = logging.StreamHandler(stream=sys.stdout)
303
311
  handler.setFormatter(console_fmt)
304
312
  handler.setLevel(logging.DEBUG) # must be .INFO or less
305
313
  handler.addFilter(StdOutFilter()) # record.levelno < .WARNING
306
- logger.addHandler(handler)
314
+ handlers.append(handler)
315
+
316
+ # Use QueueHandler to decouple logging I/O from the main loop (see Issue #397)
317
+ if handlers:
318
+ log_queue: Queue[Any] = Queue(-1)
319
+ listener = QueueListener(log_queue, *handlers, respect_handler_level=True)
320
+ queue_handler = QueueHandler(log_queue)
321
+ logger.addHandler(queue_handler)
322
+ else:
323
+ return None
307
324
 
308
325
  extras = {
309
326
  "_frame": "",
@@ -311,3 +328,5 @@ def set_pkt_logging(
311
328
  "comment": f"ramses_tx {VERSION}",
312
329
  }
313
330
  logger.warning("", extra=extras) # initial log line
331
+
332
+ return listener
ramses_tx/parsers.py CHANGED
@@ -1898,6 +1898,7 @@ def parser_2210(payload: str, msg: Message) -> dict[str, Any]:
1898
1898
  assert payload[80:82] in (
1899
1899
  "01",
1900
1900
  "08",
1901
+ "0C", # seen on Orcon HCR-400 EcoMax
1901
1902
  ), f"expected byte 40 (01|08), not {payload[80:82]}"
1902
1903
  assert payload[82:] in (
1903
1904
  "00",
ramses_tx/protocol.py CHANGED
@@ -215,6 +215,41 @@ class _BaseProtocol(asyncio.Protocol):
215
215
  """Allow the Protocol to send an impersonation alert (stub)."""
216
216
  return
217
217
 
218
+ def _patch_cmd_if_needed(self, cmd: Command) -> Command:
219
+ """Patch the command with the actual HGI ID if it uses the default placeholder.
220
+
221
+ Legacy HGI80s (TI 3410) require the default ID (18:000730), or they will
222
+ silent-fail. However, evofw3 devices prefer the real ID.
223
+ """
224
+ # NOTE: accessing private member cmd._addrs to safely patch the source address
225
+
226
+ if (
227
+ self.hgi_id
228
+ and self._is_evofw3 # Only patch if using evofw3 (not HGI80)
229
+ and cmd._addrs[0].id == HGI_DEV_ADDR.id
230
+ and self.hgi_id != HGI_DEV_ADDR.id
231
+ ):
232
+ _LOGGER.debug(
233
+ f"Patching command with active HGI ID: swapped {HGI_DEV_ADDR.id} "
234
+ f"-> {self.hgi_id} for {cmd._hdr}"
235
+ )
236
+
237
+ # Get current addresses as strings
238
+ new_addrs = [a.id for a in cmd._addrs]
239
+
240
+ # ONLY patch the Source Address (Index 0).
241
+ # Leave Dest (Index 1/2) alone to avoid breaking tests that expect 18:000730.
242
+ new_addrs[0] = self.hgi_id
243
+
244
+ # Reconstruct the command string with the correct address
245
+ new_frame = (
246
+ f"{cmd.verb} {cmd.seqn} {new_addrs[0]} {new_addrs[1]} {new_addrs[2]} "
247
+ f"{cmd.code} {int(cmd.len_):03d} {cmd.payload}"
248
+ )
249
+ return Command(new_frame)
250
+
251
+ return cmd
252
+
218
253
  async def send_cmd(
219
254
  self,
220
255
  cmd: Command,
@@ -249,35 +284,8 @@ class _BaseProtocol(asyncio.Protocol):
249
284
  assert gap_duration == DEFAULT_GAP_DURATION
250
285
  assert 0 <= num_repeats <= 3 # if QoS, only Tx x1, with no repeats
251
286
 
252
- # FIX: Patch command with actual HGI ID if it uses the default placeholder
253
- # NOTE: HGI80s (TI 3410) require the default ID (18:000730), or they will silent-fail
254
-
255
- if (
256
- self.hgi_id
257
- and self._is_evofw3 # Only patch if using evofw3 (not HGI80)
258
- and cmd._addrs[0].id == HGI_DEV_ADDR.id
259
- and self.hgi_id != HGI_DEV_ADDR.id
260
- ):
261
- # The command uses the default 18:000730, but we know the real ID.
262
- # Reconstruct the command string with the correct address.
263
-
264
- _LOGGER.debug(
265
- f"Patching command with active HGI ID: swapped {HGI_DEV_ADDR.id} -> {self.hgi_id} for {cmd._hdr}"
266
- )
267
-
268
- # Get current addresses as strings
269
- # The command uses the default 18:000730, but we know the real ID.
270
- # Reconstruct the command string with the correct address.
271
-
272
- # Get current addresses as strings
273
- new_addrs = [a.id for a in cmd._addrs]
274
-
275
- # ONLY patch the Source Address (Index 0).
276
- # Leave Dest (Index 1/2) alone to avoid breaking tests that expect 18:000730 there.
277
- new_addrs[0] = self.hgi_id
278
-
279
- new_frame = f"{cmd.verb} {cmd.seqn} {new_addrs[0]} {new_addrs[1]} {new_addrs[2]} {cmd.code} {int(cmd.len_):03d} {cmd.payload}"
280
- cmd = Command(new_frame)
287
+ # Patch command with actual HGI ID if it uses the default placeholder
288
+ cmd = self._patch_cmd_if_needed(cmd)
281
289
 
282
290
  if qos and not self._context:
283
291
  _LOGGER.warning(f"{cmd} < QoS is currently disabled by this Protocol")
ramses_tx/protocol_fsm.py CHANGED
@@ -145,13 +145,20 @@ class ProtocolContext:
145
145
  # nope, was not successful, so multiplier should be incremented...
146
146
  self._multiplier = min(3, old_val + 1)
147
147
 
148
+ if self._cmd_tx_count < 3:
149
+ level = logging.DEBUG
150
+ elif self._cmd_tx_count == 3:
151
+ level = logging.INFO
152
+ else:
153
+ level = logging.WARNING
154
+
148
155
  if isinstance(self._state, WantEcho):
149
- _LOGGER.warning(
150
- f"Timeout expired waiting for echo: {self} (delay={delay})"
156
+ _LOGGER.log(
157
+ level, f"Timeout expired waiting for echo: {self} (delay={delay})"
151
158
  )
152
159
  else: # isinstance(self._state, WantRply):
153
- _LOGGER.warning(
154
- f"Timeout expired waiting for reply: {self} (delay={delay})"
160
+ _LOGGER.log(
161
+ level, f"Timeout expired waiting for reply: {self} (delay={delay})"
155
162
  )
156
163
 
157
164
  assert isinstance(self.is_sending, bool), (
ramses_tx/ramses.py CHANGED
@@ -1,5 +1,14 @@
1
1
  #!/usr/bin/env python3
2
- """RAMSES RF - a RAMSES-II protocol decoder & analyser."""
2
+ """RAMSES RF - a RAMSES-II protocol decoder & analyser.
3
+
4
+ Contains e.g.:
5
+ :term:`CODES_SCHEMA` the master list of all known Ramses-II Code signatures
6
+ for both HEAT and HVAC.
7
+ :term:`_DEV_KLASSES_HEAT` defining Codes expected for each HEAT device class (SLUG).
8
+ :term:`_DEV_KLASSES_HVAC` defining Codes expected for each HVAC device class (SLUG).
9
+ :term:`_22F1_MODE_XXX` dicts defining valid fan commands.
10
+ :term:`_2411_PARAMS_SCHEMA` defining HVAC fan parameters.
11
+ """
3
12
 
4
13
  # TODO: code a lifespan for most packets
5
14
 
ramses_tx/schemas.py CHANGED
@@ -407,6 +407,7 @@ def select_device_filter_mode(
407
407
  # 5/5: Gateway (engine) configuration
408
408
 
409
409
  SZ_DISABLE_SENDING: Final = "disable_sending"
410
+ SZ_AUTOSTART: Final = "autostart"
410
411
  SZ_DISABLE_QOS: Final = "disable_qos"
411
412
  SZ_ENFORCE_KNOWN_LIST: Final[str] = f"enforce_{SZ_KNOWN_LIST}"
412
413
  SZ_EVOFW_FLAG: Final = "evofw_flag"
@@ -418,6 +419,7 @@ SZ_USE_REGEX: Final = "use_regex"
418
419
 
419
420
  SCH_ENGINE_DICT = {
420
421
  vol.Optional(SZ_DISABLE_SENDING, default=False): bool,
422
+ vol.Optional(SZ_AUTOSTART, default=False): bool,
421
423
  vol.Optional(SZ_DISABLE_QOS, default=None): vol.Any(
422
424
  None, # None is selective QoS (e.g. QoS only for bindings, schedule, etc.)
423
425
  bool,
ramses_tx/transport.py CHANGED
@@ -1278,8 +1278,9 @@ class PortTransport(_RegHackMixin, _FullTransport, _PortTransportAbstractor): #
1278
1278
 
1279
1279
  super()._close(exc)
1280
1280
 
1281
- if self._init_task:
1282
- self._init_task.cancel()
1281
+ # Use getattr because _init_task may not be set if initialization failed
1282
+ if init_task := getattr(self, "_init_task", None):
1283
+ init_task.cancel()
1283
1284
 
1284
1285
  if self._leaker_task:
1285
1286
  self._leaker_task.cancel()
@@ -1810,6 +1811,7 @@ class CallbackTransport(_FullTransport, _CallbackTransportAbstractor):
1810
1811
  protocol: RamsesProtocolT,
1811
1812
  io_writer: Callable[[str], Awaitable[None]],
1812
1813
  disable_sending: bool = False,
1814
+ autostart: bool = False,
1813
1815
  **kwargs: Any,
1814
1816
  ) -> None:
1815
1817
  """Initialize the callback transport.
@@ -1820,6 +1822,8 @@ class CallbackTransport(_FullTransport, _CallbackTransportAbstractor):
1820
1822
  :type io_writer: Callable[[str], Awaitable[None]]
1821
1823
  :param disable_sending: Whether to disable sending, defaults to False.
1822
1824
  :type disable_sending: bool, optional
1825
+ :param autostart: Whether to start reading immediately, defaults to False.
1826
+ :type autostart: bool, optional
1823
1827
  """
1824
1828
  # Pass kwargs up the chain. _ReadTransport will extract 'loop' if present.
1825
1829
  # _BaseTransport will pass 'loop' to _CallbackTransportAbstractor, which consumes it.
@@ -1834,9 +1838,11 @@ class CallbackTransport(_FullTransport, _CallbackTransportAbstractor):
1834
1838
  # Section 6.1: Object Lifecycle Logging
1835
1839
  _LOGGER.info(f"CallbackTransport created with io_writer={io_writer}")
1836
1840
 
1837
- # NOTE: connection_made is NOT called here. It must be triggered
1838
- # externally (e.g. by the Bridge) via the protocol methods once
1839
- # the external connection is ready.
1841
+ # Handshake: Notify protocol immediately (Safe: idempotent)
1842
+ self._protocol.connection_made(self, ramses=True)
1843
+
1844
+ if autostart:
1845
+ self.resume_reading()
1840
1846
 
1841
1847
  async def write_frame(self, frame: str, disable_tx_limits: bool = False) -> None:
1842
1848
  """Process a frame for transmission by passing it to the external writer.
@@ -1879,19 +1885,19 @@ class CallbackTransport(_FullTransport, _CallbackTransportAbstractor):
1879
1885
  :type dtm: str | None, optional
1880
1886
  """
1881
1887
  _LOGGER.debug(
1882
- f"Received frame from external source: frame='{frame}', timestamp={dtm}"
1888
+ f"Received frame from external source: frame={repr(frame)}, timestamp={dtm}"
1883
1889
  )
1884
1890
 
1885
- # Section 4.2: Circuit Breaker implementation (Packet gating)
1891
+ # Circuit Breaker implementation (Packet gating)
1886
1892
  if not self._reading:
1887
1893
  _LOGGER.debug(f"Dropping received frame (transport paused): {repr(frame)}")
1888
1894
  return
1889
1895
 
1890
1896
  dtm = dtm or dt_now().isoformat()
1891
1897
 
1892
- # Section 6.1: Boundary Logging (Incoming)
1898
+ # Boundary Logging (Incoming)
1893
1899
  _LOGGER.debug(
1894
- f"Ingesting frame into transport: frame='{frame}', timestamp={dtm}"
1900
+ f"Ingesting frame into transport: frame={repr(frame)}, timestamp={dtm}"
1895
1901
  )
1896
1902
 
1897
1903
  # Pass to the standard processing pipeline
@@ -1949,7 +1955,7 @@ async def transport_factory(
1949
1955
  extra: dict[str, Any] | None = None,
1950
1956
  loop: asyncio.AbstractEventLoop | None = None,
1951
1957
  log_all: bool = False,
1952
- **kwargs: Any, # HACK: odd/misc params
1958
+ **kwargs: Any, # HACK: odd/misc params, inc. autostart
1953
1959
  ) -> RamsesTransportT:
1954
1960
  """Create and return a Ramses-specific async packet Transport.
1955
1961
 
@@ -1980,11 +1986,18 @@ async def transport_factory(
1980
1986
  :raises exc.TransportSourceInvalid: If the packet source is invalid or multiple sources are specified.
1981
1987
  """
1982
1988
 
1989
+ # Extract autostart (default to False if missing), used in transport_constructor only
1990
+ autostart = kwargs.pop("autostart", False)
1991
+
1983
1992
  # If a constructor is provided, delegate entirely to it.
1984
1993
  if transport_constructor:
1985
1994
  _LOGGER.debug("transport_factory: Delegating to external transport_constructor")
1986
1995
  return await transport_constructor(
1987
- protocol, disable_sending=disable_sending, extra=extra, **kwargs
1996
+ protocol,
1997
+ disable_sending=disable_sending,
1998
+ extra=extra,
1999
+ autostart=autostart, # <--- Pass it explicitly
2000
+ **kwargs,
1988
2001
  )
1989
2002
 
1990
2003
  # kwargs are specific to a transport. The above transports have:
ramses_tx/version.py CHANGED
@@ -1,4 +1,4 @@
1
1
  """RAMSES RF - a RAMSES-II protocol decoder & analyser (transport layer)."""
2
2
 
3
- __version__ = "0.53.4"
3
+ __version__ = "0.53.6"
4
4
  VERSION = __version__