ramses-rf 0.51.7__py3-none-any.whl → 0.51.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ramses_rf/__init__.py +5 -0
- ramses_rf/database.py +247 -69
- ramses_rf/device/hvac.py +561 -32
- ramses_rf/dispatcher.py +7 -5
- ramses_rf/entity_base.py +1 -1
- ramses_rf/exceptions.py +37 -3
- ramses_rf/gateway.py +1 -1
- ramses_rf/schemas.py +5 -2
- ramses_rf/version.py +1 -1
- {ramses_rf-0.51.7.dist-info → ramses_rf-0.51.9.dist-info}/METADATA +6 -6
- ramses_rf-0.51.9.dist-info/RECORD +55 -0
- ramses_tx/__init__.py +25 -4
- ramses_tx/address.py +1 -1
- ramses_tx/command.py +1449 -138
- ramses_tx/const.py +1 -1
- ramses_tx/frame.py +4 -4
- ramses_tx/gateway.py +1 -1
- ramses_tx/helpers.py +2 -2
- ramses_tx/message.py +20 -14
- ramses_tx/packet.py +1 -1
- ramses_tx/parsers.py +57 -35
- ramses_tx/protocol.py +2 -2
- ramses_tx/protocol_fsm.py +1 -1
- ramses_tx/ramses.py +46 -6
- ramses_tx/schemas.py +3 -0
- ramses_tx/transport.py +9 -7
- ramses_tx/version.py +1 -1
- ramses_rf-0.51.7.dist-info/RECORD +0 -55
- {ramses_rf-0.51.7.dist-info → ramses_rf-0.51.9.dist-info}/WHEEL +0 -0
- {ramses_rf-0.51.7.dist-info → ramses_rf-0.51.9.dist-info}/entry_points.txt +0 -0
- {ramses_rf-0.51.7.dist-info → ramses_rf-0.51.9.dist-info}/licenses/LICENSE +0 -0
ramses_rf/__init__.py
CHANGED
|
@@ -16,7 +16,9 @@ from typing import TYPE_CHECKING
|
|
|
16
16
|
|
|
17
17
|
from ramses_tx import Address, Command, Message, Packet # noqa: F401
|
|
18
18
|
|
|
19
|
+
from . import exceptions # noqa: F401
|
|
19
20
|
from .device import Device # noqa: F401
|
|
21
|
+
from .exceptions import CommandInvalid # noqa: F401
|
|
20
22
|
from .gateway import Gateway # noqa: F401
|
|
21
23
|
from .version import VERSION # noqa: F401
|
|
22
24
|
|
|
@@ -38,6 +40,7 @@ __all__ = [
|
|
|
38
40
|
#
|
|
39
41
|
"Address",
|
|
40
42
|
"Command",
|
|
43
|
+
"CommandInvalid",
|
|
41
44
|
"Device",
|
|
42
45
|
"Message",
|
|
43
46
|
"Packet",
|
|
@@ -50,6 +53,8 @@ __all__ = [
|
|
|
50
53
|
"Code",
|
|
51
54
|
"IndexT",
|
|
52
55
|
"VerbT",
|
|
56
|
+
#
|
|
57
|
+
"exceptions",
|
|
53
58
|
]
|
|
54
59
|
|
|
55
60
|
_LOGGER = logging.getLogger(__name__)
|
ramses_rf/database.py
CHANGED
|
@@ -8,7 +8,7 @@ import logging
|
|
|
8
8
|
import sqlite3
|
|
9
9
|
from collections import OrderedDict
|
|
10
10
|
from datetime import datetime as dt, timedelta as td
|
|
11
|
-
from typing import NewType, TypedDict
|
|
11
|
+
from typing import Any, NewType, TypedDict
|
|
12
12
|
|
|
13
13
|
from ramses_tx import Message
|
|
14
14
|
|
|
@@ -24,6 +24,7 @@ class Params(TypedDict):
|
|
|
24
24
|
code: str | None
|
|
25
25
|
ctx: str | None
|
|
26
26
|
hdr: str | None
|
|
27
|
+
plk: str | None
|
|
27
28
|
|
|
28
29
|
|
|
29
30
|
_LOGGER = logging.getLogger(__name__)
|
|
@@ -33,50 +34,87 @@ def _setup_db_adapters() -> None:
|
|
|
33
34
|
"""Set up the database adapters and converters."""
|
|
34
35
|
|
|
35
36
|
def adapt_datetime_iso(val: dt) -> str:
|
|
36
|
-
"""Adapt datetime.datetime to timezone-naive ISO 8601 datetime."""
|
|
37
|
+
"""Adapt datetime.datetime to timezone-naive ISO 8601 datetime to match _msgs_ dtm keys."""
|
|
37
38
|
return val.isoformat(timespec="microseconds")
|
|
38
39
|
|
|
39
40
|
sqlite3.register_adapter(dt, adapt_datetime_iso)
|
|
40
41
|
|
|
41
42
|
def convert_datetime(val: bytes) -> dt:
|
|
42
|
-
"""Convert ISO 8601 datetime to datetime.datetime object."""
|
|
43
|
+
"""Convert ISO 8601 datetime to datetime.datetime object to import dtm in msg_db."""
|
|
43
44
|
return dt.fromisoformat(val.decode())
|
|
44
45
|
|
|
45
|
-
sqlite3.register_converter("
|
|
46
|
+
sqlite3.register_converter("DTM", convert_datetime)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def payload_keys(parsed_payload: list[dict] | dict) -> str: # type: ignore[type-arg]
|
|
50
|
+
"""
|
|
51
|
+
Copy payload keys for fast query check.
|
|
52
|
+
|
|
53
|
+
:param parsed_payload: pre-parsed message payload dict
|
|
54
|
+
:return: string of payload keys, separated by the | char
|
|
55
|
+
"""
|
|
56
|
+
_keys: str = "|"
|
|
57
|
+
|
|
58
|
+
def append_keys(ppl: dict) -> str: # type: ignore[type-arg]
|
|
59
|
+
_ks: str = ""
|
|
60
|
+
for k, v in ppl.items():
|
|
61
|
+
if (
|
|
62
|
+
k not in _ks and k not in _keys and v is not None
|
|
63
|
+
): # ignore keys with None value
|
|
64
|
+
_ks += k + "|"
|
|
65
|
+
return _ks
|
|
66
|
+
|
|
67
|
+
if isinstance(parsed_payload, list):
|
|
68
|
+
for d in parsed_payload:
|
|
69
|
+
_keys += append_keys(d)
|
|
70
|
+
elif isinstance(parsed_payload, dict):
|
|
71
|
+
_keys += append_keys(parsed_payload)
|
|
72
|
+
return _keys
|
|
46
73
|
|
|
47
74
|
|
|
48
75
|
class MessageIndex:
|
|
49
|
-
"""A simple in-memory SQLite3 database for indexing messages.
|
|
76
|
+
"""A simple in-memory SQLite3 database for indexing RF messages.
|
|
77
|
+
Index holds the latest message to & from all devices by header
|
|
78
|
+
(example of a hdr: 000C|RP|01:223036|0208)."""
|
|
50
79
|
|
|
51
|
-
def __init__(self) -> None:
|
|
80
|
+
def __init__(self, maintain: bool = True) -> None:
|
|
52
81
|
"""Instantiate a message database/index."""
|
|
53
82
|
|
|
54
|
-
self.
|
|
83
|
+
self.maintain = maintain
|
|
84
|
+
self._msgs: MsgDdT = (
|
|
85
|
+
OrderedDict()
|
|
86
|
+
) # stores all messages for retrieval. Filled in housekeeping loop.
|
|
55
87
|
|
|
56
|
-
|
|
88
|
+
# Connect to a SQLite DB in memory
|
|
89
|
+
self._cx = sqlite3.connect(
|
|
90
|
+
":memory:", detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES
|
|
91
|
+
)
|
|
92
|
+
# detect_types should retain dt type on store/retrieve
|
|
57
93
|
self._cu = self._cx.cursor() # Create a cursor
|
|
58
94
|
|
|
59
|
-
_setup_db_adapters() #
|
|
95
|
+
_setup_db_adapters() # DTM adapter/converter
|
|
60
96
|
self._setup_db_schema()
|
|
61
97
|
|
|
62
|
-
self.
|
|
63
|
-
|
|
64
|
-
|
|
98
|
+
if self.maintain:
|
|
99
|
+
self._lock = asyncio.Lock()
|
|
100
|
+
self._last_housekeeping: dt = None # type: ignore[assignment]
|
|
101
|
+
self._housekeeping_task: asyncio.Task[None] = None # type: ignore[assignment]
|
|
65
102
|
|
|
66
103
|
self.start()
|
|
67
104
|
|
|
68
105
|
def __repr__(self) -> str:
|
|
69
|
-
return f"MessageIndex({len(self._msgs)} messages)"
|
|
106
|
+
return f"MessageIndex({len(self._msgs)} messages)" # or msg_db.count()
|
|
70
107
|
|
|
71
108
|
def start(self) -> None:
|
|
72
109
|
"""Start the housekeeper loop."""
|
|
73
110
|
|
|
74
|
-
if self.
|
|
75
|
-
|
|
111
|
+
if self.maintain:
|
|
112
|
+
if self._housekeeping_task and not self._housekeeping_task.done():
|
|
113
|
+
return
|
|
76
114
|
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
115
|
+
self._housekeeping_task = asyncio.create_task(
|
|
116
|
+
self._housekeeping_loop(), name=f"{self.__class__.__name__}.housekeeper"
|
|
117
|
+
)
|
|
80
118
|
|
|
81
119
|
def stop(self) -> None:
|
|
82
120
|
"""Stop the housekeeper loop."""
|
|
@@ -95,27 +133,29 @@ class MessageIndex:
|
|
|
95
133
|
def _setup_db_schema(self) -> None:
|
|
96
134
|
"""Set up the message database schema.
|
|
97
135
|
|
|
98
|
-
Fields:
|
|
136
|
+
messages TABLE Fields:
|
|
99
137
|
|
|
100
138
|
- dtm message timestamp
|
|
101
|
-
- verb
|
|
139
|
+
- verb " I", "RQ" etc.
|
|
102
140
|
- src message origin address
|
|
103
141
|
- dst message destination address
|
|
104
142
|
- code packet code aka command class e.g. _0005, _31DA
|
|
105
143
|
- ctx message context, created from payload as index + extra markers (Heat)
|
|
106
144
|
- hdr packet header e.g. 000C|RP|01:223036|0208 (see: src/ramses_tx/frame.py)
|
|
145
|
+
- plk the keys stored in the parsed payload, separated by the | char
|
|
107
146
|
"""
|
|
108
147
|
|
|
109
148
|
self._cu.execute(
|
|
110
149
|
"""
|
|
111
150
|
CREATE TABLE messages (
|
|
112
|
-
dtm
|
|
151
|
+
dtm DTM NOT NULL PRIMARY KEY,
|
|
113
152
|
verb TEXT(2) NOT NULL,
|
|
114
153
|
src TEXT(9) NOT NULL,
|
|
115
154
|
dst TEXT(9) NOT NULL,
|
|
116
155
|
code TEXT(4) NOT NULL,
|
|
117
|
-
ctx TEXT
|
|
118
|
-
hdr TEXT NOT NULL UNIQUE
|
|
156
|
+
ctx TEXT,
|
|
157
|
+
hdr TEXT NOT NULL UNIQUE,
|
|
158
|
+
plk TEXT NOT NULL
|
|
119
159
|
)
|
|
120
160
|
"""
|
|
121
161
|
)
|
|
@@ -130,10 +170,16 @@ class MessageIndex:
|
|
|
130
170
|
self._cx.commit()
|
|
131
171
|
|
|
132
172
|
async def _housekeeping_loop(self) -> None:
|
|
133
|
-
"""Periodically remove stale messages from the index
|
|
173
|
+
"""Periodically remove stale messages from the index,
|
|
174
|
+
unless self.maintain is False."""
|
|
134
175
|
|
|
135
176
|
async def housekeeping(dt_now: dt, _cutoff: td = td(days=1)) -> None:
|
|
136
|
-
|
|
177
|
+
"""
|
|
178
|
+
Delete all messages from the using the MessageIndex older than a given delta.
|
|
179
|
+
:param dt_now: current timestamp
|
|
180
|
+
:param _cutoff: the oldest timestamp to retain, default is 24 hours ago
|
|
181
|
+
"""
|
|
182
|
+
dtm = dt_now - _cutoff # .isoformat(timespec="microseconds") < needed?
|
|
137
183
|
|
|
138
184
|
self._cu.execute("SELECT dtm FROM messages WHERE dtm => ?", (dtm,))
|
|
139
185
|
rows = self._cu.fetchall()
|
|
@@ -154,30 +200,34 @@ class MessageIndex:
|
|
|
154
200
|
while True:
|
|
155
201
|
self._last_housekeeping = dt.now()
|
|
156
202
|
await asyncio.sleep(3600)
|
|
203
|
+
_LOGGER.info("Starting next MessageIndex housekeeping")
|
|
157
204
|
await housekeeping(self._last_housekeeping)
|
|
158
205
|
|
|
159
206
|
def add(self, msg: Message) -> Message | None:
|
|
160
|
-
"""
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
207
|
+
"""
|
|
208
|
+
Add a single message to the MessageIndex.
|
|
209
|
+
Logs a warning if there is a duplicate dtm.
|
|
210
|
+
:returns: any message that was removed because it had the same header
|
|
211
|
+
"""
|
|
212
|
+
# TODO: eventually, may be better to use SqlAlchemy
|
|
166
213
|
|
|
167
214
|
dup: tuple[Message, ...] = tuple() # avoid UnboundLocalError
|
|
168
215
|
old: Message | None = None # avoid UnboundLocalError
|
|
169
216
|
|
|
170
|
-
try: # TODO: remove, or
|
|
217
|
+
try: # TODO: remove this, or apply only when source is a real packet log?
|
|
171
218
|
# await self._lock.acquire()
|
|
172
219
|
dup = self._delete_from( # HACK: because of contrived pkt logs
|
|
173
|
-
dtm=msg.dtm
|
|
220
|
+
dtm=msg.dtm # stored as such with DTM formatter
|
|
174
221
|
)
|
|
175
|
-
old = self._insert_into(msg) # will delete old msg by hdr
|
|
222
|
+
old = self._insert_into(msg) # will delete old msg by hdr (not dtm!)
|
|
176
223
|
|
|
177
|
-
except
|
|
224
|
+
except (
|
|
225
|
+
sqlite3.Error
|
|
226
|
+
): # UNIQUE constraint failed: ? messages.dtm or .hdr (so: HACK)
|
|
178
227
|
self._cx.rollback()
|
|
179
228
|
|
|
180
229
|
else:
|
|
230
|
+
# _msgs dict requires a timestamp reformat
|
|
181
231
|
dtm: DtmStrT = msg.dtm.isoformat(timespec="microseconds") # type: ignore[assignment]
|
|
182
232
|
self._msgs[dtm] = msg
|
|
183
233
|
|
|
@@ -186,48 +236,106 @@ class MessageIndex:
|
|
|
186
236
|
|
|
187
237
|
if dup:
|
|
188
238
|
_LOGGER.warning(
|
|
189
|
-
"Overwrote dtm for %s: %s (contrived log?)",
|
|
239
|
+
"Overwrote dtm (%s) for %s: %s (contrived log?)",
|
|
240
|
+
msg.dtm,
|
|
241
|
+
msg._pkt._hdr,
|
|
242
|
+
dup[0]._pkt,
|
|
190
243
|
)
|
|
244
|
+
if old is not None:
|
|
245
|
+
_LOGGER.info("Old msg replaced: %s", old)
|
|
191
246
|
|
|
192
247
|
return old
|
|
193
248
|
|
|
249
|
+
def add_record(self, src: str, code: str = "", verb: str = "") -> None:
|
|
250
|
+
"""
|
|
251
|
+
Add a single record to the MessageIndex with timestamp now() and no Message contents.
|
|
252
|
+
"""
|
|
253
|
+
# Used by OtbGateway init, via entity_base.py
|
|
254
|
+
dtm: DtmStrT = DtmStrT(dt.strftime(dt.now(), "%Y-%m-%dT%H:%M:%S"))
|
|
255
|
+
hdr = f"{code}|{verb}|{src}|00" # dummy record has no contents
|
|
256
|
+
|
|
257
|
+
dup = self._delete_from(hdr=hdr)
|
|
258
|
+
|
|
259
|
+
sql = """
|
|
260
|
+
INSERT INTO messages (dtm, verb, src, dst, code, ctx, hdr, plk)
|
|
261
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
262
|
+
"""
|
|
263
|
+
try:
|
|
264
|
+
self._cu.execute(
|
|
265
|
+
sql,
|
|
266
|
+
(
|
|
267
|
+
dtm,
|
|
268
|
+
verb,
|
|
269
|
+
src,
|
|
270
|
+
src,
|
|
271
|
+
code,
|
|
272
|
+
None,
|
|
273
|
+
hdr,
|
|
274
|
+
"|",
|
|
275
|
+
),
|
|
276
|
+
)
|
|
277
|
+
except sqlite3.Error:
|
|
278
|
+
self._cx.rollback()
|
|
279
|
+
|
|
280
|
+
if dup: # expected when more than one heat system in schema
|
|
281
|
+
_LOGGER.debug("Replaced record with same hdr: %s", hdr)
|
|
282
|
+
|
|
194
283
|
def _insert_into(self, msg: Message) -> Message | None:
|
|
195
|
-
"""
|
|
284
|
+
"""
|
|
285
|
+
Insert a message into the index.
|
|
286
|
+
:returns: any message replaced (by same hdr)
|
|
287
|
+
"""
|
|
288
|
+
assert msg._pkt._hdr is not None, "Skipping: Packet has no hdr: {msg._pkt}"
|
|
289
|
+
|
|
290
|
+
if msg._pkt._ctx is True:
|
|
291
|
+
msg_pkt_ctx = "True"
|
|
292
|
+
elif msg._pkt._ctx is False:
|
|
293
|
+
msg_pkt_ctx = "False"
|
|
294
|
+
else:
|
|
295
|
+
msg_pkt_ctx = msg._pkt._ctx # can be None
|
|
196
296
|
|
|
197
|
-
|
|
297
|
+
_old_msgs = self._delete_from(hdr=msg._pkt._hdr)
|
|
198
298
|
|
|
199
299
|
sql = """
|
|
200
|
-
INSERT INTO messages (dtm, verb, src, dst, code, ctx, hdr)
|
|
201
|
-
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
300
|
+
INSERT INTO messages (dtm, verb, src, dst, code, ctx, hdr, plk)
|
|
301
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
202
302
|
"""
|
|
203
303
|
|
|
204
304
|
self._cu.execute(
|
|
205
305
|
sql,
|
|
206
306
|
(
|
|
207
307
|
msg.dtm,
|
|
208
|
-
msg.verb,
|
|
308
|
+
str(msg.verb),
|
|
209
309
|
msg.src.id,
|
|
210
310
|
msg.dst.id,
|
|
211
|
-
msg.code,
|
|
212
|
-
|
|
311
|
+
str(msg.code),
|
|
312
|
+
msg_pkt_ctx,
|
|
213
313
|
msg._pkt._hdr,
|
|
314
|
+
payload_keys(msg.payload),
|
|
214
315
|
),
|
|
215
316
|
)
|
|
317
|
+
_LOGGER.info(f"Added {msg} to gwy.msg_db")
|
|
216
318
|
|
|
217
|
-
return
|
|
319
|
+
return _old_msgs[0] if _old_msgs else None
|
|
218
320
|
|
|
219
321
|
def rem(
|
|
220
|
-
self, msg: Message | None = None, **kwargs: str
|
|
322
|
+
self, msg: Message | None = None, **kwargs: str | dt
|
|
221
323
|
) -> tuple[Message, ...] | None:
|
|
222
324
|
"""Remove a set of message(s) from the index.
|
|
223
325
|
|
|
224
|
-
|
|
326
|
+
:returns: any messages that were removed.
|
|
225
327
|
"""
|
|
226
|
-
|
|
227
|
-
|
|
328
|
+
# _LOGGER.debug(f"SQL REM msg={msg} bool{bool(msg)} kwargs={kwargs} bool(kwargs)")
|
|
329
|
+
# SQL REM
|
|
330
|
+
# msg=|| 02:044328 | | I | heat_demand | FC || {'domain_id': 'FC', 'heat_demand': 0.74}
|
|
331
|
+
# boolTrue
|
|
332
|
+
# kwargs={}
|
|
333
|
+
# bool(kwargs)
|
|
334
|
+
|
|
335
|
+
if not bool(msg) ^ bool(kwargs):
|
|
228
336
|
raise ValueError("Either a Message or kwargs should be provided, not both")
|
|
229
337
|
if msg:
|
|
230
|
-
kwargs["dtm"] = msg.dtm.isoformat(timespec="microseconds")
|
|
338
|
+
kwargs["dtm"] = msg.dtm # .isoformat(timespec="microseconds")
|
|
231
339
|
|
|
232
340
|
msgs = None
|
|
233
341
|
try: # make this operation atomic, i.e. update self._msgs only on success
|
|
@@ -247,8 +355,9 @@ class MessageIndex:
|
|
|
247
355
|
|
|
248
356
|
return msgs
|
|
249
357
|
|
|
250
|
-
def _delete_from(self, **kwargs: str) -> tuple[Message, ...]:
|
|
251
|
-
"""Remove message(s) from the index
|
|
358
|
+
def _delete_from(self, **kwargs: bool | dt | str) -> tuple[Message, ...]:
|
|
359
|
+
"""Remove message(s) from the index.
|
|
360
|
+
:returns: any messages that were removed"""
|
|
252
361
|
|
|
253
362
|
msgs = self._select_from(**kwargs)
|
|
254
363
|
|
|
@@ -259,48 +368,117 @@ class MessageIndex:
|
|
|
259
368
|
|
|
260
369
|
return msgs
|
|
261
370
|
|
|
262
|
-
def get(
|
|
263
|
-
|
|
371
|
+
def get(
|
|
372
|
+
self, msg: Message | None = None, **kwargs: bool | dt | str
|
|
373
|
+
) -> tuple[Message, ...]:
|
|
374
|
+
"""Get a set of message(s) from the index."""
|
|
264
375
|
|
|
265
376
|
if not (bool(msg) ^ bool(kwargs)):
|
|
266
377
|
raise ValueError("Either a Message or kwargs should be provided, not both")
|
|
378
|
+
|
|
267
379
|
if msg:
|
|
268
|
-
kwargs["dtm"] = msg.dtm.isoformat(timespec="microseconds")
|
|
380
|
+
kwargs["dtm"] = msg.dtm # .isoformat(timespec="microseconds")
|
|
269
381
|
|
|
270
382
|
return self._select_from(**kwargs)
|
|
271
383
|
|
|
272
|
-
def
|
|
273
|
-
|
|
384
|
+
def qry_dtms(self, **kwargs: bool | dt | str) -> list[Any]:
|
|
385
|
+
# tweak kwargs as stored in SQLite, inverse from _insert_into():
|
|
386
|
+
kw = {key: value for key, value in kwargs.items() if key != "ctx"}
|
|
387
|
+
if "ctx" in kwargs:
|
|
388
|
+
if isinstance(kwargs["ctx"], str):
|
|
389
|
+
kw["ctx"] = kwargs["ctx"]
|
|
390
|
+
elif kwargs["ctx"]:
|
|
391
|
+
kw["ctx"] = "True"
|
|
392
|
+
else:
|
|
393
|
+
kw["ctx"] = "False"
|
|
274
394
|
|
|
275
395
|
sql = "SELECT dtm FROM messages WHERE "
|
|
276
|
-
sql += " AND ".join(f"{k} = ?" for k in
|
|
396
|
+
sql += " AND ".join(f"{k} = ?" for k in kw)
|
|
277
397
|
|
|
278
|
-
self._cu.execute(sql, tuple(
|
|
398
|
+
self._cu.execute(sql, tuple(kw.values()))
|
|
399
|
+
return self._cu.fetchall()
|
|
400
|
+
|
|
401
|
+
def contains(self, **kwargs: bool | dt | str) -> bool:
|
|
402
|
+
"""
|
|
403
|
+
Check if the MessageIndex contains at least 1 record that matches the provided fields.
|
|
404
|
+
:param kwargs: (exact) SQLite table field_name: required_value pairs
|
|
405
|
+
:return: True if at least one message fitting the given conditions is present, False when qry returned empty
|
|
406
|
+
"""
|
|
407
|
+
# adapted from _select_from()
|
|
408
|
+
|
|
409
|
+
return len(self.qry_dtms(**kwargs)) > 0
|
|
279
410
|
|
|
280
|
-
|
|
411
|
+
def _select_from(self, **kwargs: bool | dt | str) -> tuple[Message, ...]:
|
|
412
|
+
"""Select message(s) using the MessageIndex.
|
|
413
|
+
:param kwargs: (exact) SQLite table field_name: required_value pairs
|
|
414
|
+
:returns: a tuple of qualifying messages"""
|
|
415
|
+
|
|
416
|
+
return tuple(
|
|
417
|
+
self._msgs[row[0].isoformat(timespec="microseconds")]
|
|
418
|
+
for row in self.qry_dtms(**kwargs)
|
|
419
|
+
)
|
|
281
420
|
|
|
282
421
|
def qry(self, sql: str, parameters: tuple[str, ...]) -> tuple[Message, ...]:
|
|
283
|
-
"""
|
|
422
|
+
"""Get a tuple of messages from the index, given sql and parameters."""
|
|
284
423
|
|
|
285
424
|
if "SELECT" not in sql:
|
|
286
425
|
raise ValueError(f"{self}: Only SELECT queries are allowed")
|
|
287
426
|
|
|
288
427
|
self._cu.execute(sql, parameters)
|
|
289
428
|
|
|
290
|
-
|
|
429
|
+
lst: list[Message] = []
|
|
430
|
+
# stamp = list(self._msgs)[0] if len(self._msgs) > 0 else "N/A" # for debug
|
|
431
|
+
for row in self._cu.fetchall():
|
|
432
|
+
ts: DtmStrT = row[0].isoformat(timespec="microseconds")
|
|
433
|
+
# _LOGGER.debug(
|
|
434
|
+
# f"QRY Msg key raw: {row[0]} Reformatted: {ts} _msgs stamp format: {stamp}"
|
|
435
|
+
# )
|
|
436
|
+
# QRY Msg key raw: 2022-09-08 13:43:31.536862 Reformatted: 2022-09-08T13:43:31.536862
|
|
437
|
+
# _msgs stamp format: 2022-09-08T13:40:52.447364
|
|
438
|
+
if ts in self._msgs:
|
|
439
|
+
lst.append(self._msgs[ts])
|
|
440
|
+
else: # happens in tests with artificial msg from heat
|
|
441
|
+
_LOGGER.warning("MessageIndex ts %s not in device messages", ts)
|
|
442
|
+
return tuple(lst)
|
|
443
|
+
|
|
444
|
+
def qry_field(
|
|
445
|
+
self, sql: str, parameters: tuple[str, ...]
|
|
446
|
+
) -> list[tuple[dt | str, str]]:
|
|
447
|
+
"""
|
|
448
|
+
Get a list of message field values from the index, given sql and parameters.
|
|
449
|
+
"""
|
|
291
450
|
|
|
292
|
-
|
|
293
|
-
|
|
451
|
+
if "SELECT" not in sql:
|
|
452
|
+
raise ValueError(f"{self}: Only SELECT queries are allowed")
|
|
294
453
|
|
|
295
|
-
|
|
296
|
-
# return tuple(self._msgs[row[0]] for row in self._cu.fetchall())
|
|
454
|
+
self._cu.execute(sql, parameters)
|
|
297
455
|
|
|
298
|
-
return
|
|
299
|
-
|
|
300
|
-
|
|
456
|
+
return self._cu.fetchall()
|
|
457
|
+
|
|
458
|
+
def all(self, include_expired: bool = False) -> tuple[Message, ...]:
|
|
459
|
+
"""Get all messages from the index."""
|
|
460
|
+
|
|
461
|
+
self._cu.execute("SELECT * FROM messages")
|
|
462
|
+
|
|
463
|
+
lst: list[Message] = []
|
|
464
|
+
# stamp = list(self._msgs)[0] if len(self._msgs) > 0 else "N/A"
|
|
465
|
+
for row in self._cu.fetchall():
|
|
466
|
+
ts: DtmStrT = row[0].isoformat(timespec="microseconds")
|
|
467
|
+
# _LOGGER.debug(
|
|
468
|
+
# f"ALL Msg key raw: {row[0]} Reformatted: {ts} _msgs stamp format: {stamp}"
|
|
469
|
+
# )
|
|
470
|
+
# ALL Msg key raw: 2022-05-02 10:02:02.744905
|
|
471
|
+
# Reformatted: 2022-05-02T10:02:02.744905
|
|
472
|
+
# _msgs stamp format: 2022-05-02T10:02:02.744905
|
|
473
|
+
if ts in self._msgs:
|
|
474
|
+
# if include_expired or not self._msgs[ts].HAS_EXPIRED: # not working
|
|
475
|
+
lst.append(self._msgs[ts])
|
|
476
|
+
else: # happens in tests with dummy msg from heat init
|
|
477
|
+
_LOGGER.warning("MessageIndex ts %s not in device messages", ts)
|
|
478
|
+
return tuple(lst)
|
|
301
479
|
|
|
302
480
|
def clr(self) -> None:
|
|
303
|
-
"""Clear the message index (remove all messages)."""
|
|
481
|
+
"""Clear the message index (remove indexes of all messages)."""
|
|
304
482
|
|
|
305
483
|
self._cu.execute("DELETE FROM messages")
|
|
306
484
|
self._cx.commit()
|