enode-host 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- enode_host/__init__.py +1 -0
- enode_host/async_socket.py +165 -0
- enode_host/backup/c-wing3.py +226 -0
- enode_host/backup/c.py +226 -0
- enode_host/backup/esp_mesh.py +136 -0
- enode_host/backup/gui_comps.py +113 -0
- enode_host/backup/gui_wx_bk.py +270 -0
- enode_host/backup/load_file.py +27 -0
- enode_host/backup/mesh.py +0 -0
- enode_host/backup/mesh_bk.py +0 -0
- enode_host/backup/s.py +151 -0
- enode_host/backup/sandbox.py +93 -0
- enode_host/backup/shm.py +262 -0
- enode_host/backup/shmtools.py +70 -0
- enode_host/backup/smarts.py +243 -0
- enode_host/backup/test_wxpython_choice.py +49 -0
- enode_host/backup/view-wing3.py +494 -0
- enode_host/backup/wx_example.py +55 -0
- enode_host/backup/wx_test01.py +43 -0
- enode_host/cli.py +192 -0
- enode_host/config.py +8 -0
- enode_host/constants.py +25 -0
- enode_host/framed_mesh.py +237 -0
- enode_host/gui_framed.py +207 -0
- enode_host/model.py +1415 -0
- enode_host/protocol.py +311 -0
- enode_host/psd_recursive.py +139 -0
- enode_host/queues.py +11 -0
- enode_host/resampling.py +206 -0
- enode_host/shm_sigproc.py +47 -0
- enode_host/storage.py +93 -0
- enode_host/timestamping.py +79 -0
- enode_host/types.py +38 -0
- enode_host/view.py +1233 -0
- enode_host-0.1.0.dist-info/METADATA +81 -0
- enode_host-0.1.0.dist-info/RECORD +39 -0
- enode_host-0.1.0.dist-info/WHEEL +5 -0
- enode_host-0.1.0.dist-info/entry_points.txt +2 -0
- enode_host-0.1.0.dist-info/top_level.txt +1 -0
enode_host/model.py
ADDED
|
@@ -0,0 +1,1415 @@
|
|
|
1
|
+
#
|
|
2
|
+
#
|
|
3
|
+
# 2025-02-28 understand and make sure it works
|
|
4
|
+
# node_ID vs Node_ID vs nodeID
|
|
5
|
+
#
|
|
6
|
+
|
|
7
|
+
import logging
|
|
8
|
+
from collections import deque
|
|
9
|
+
import queue as pyqueue
|
|
10
|
+
# import mesh
|
|
11
|
+
import sys
|
|
12
|
+
|
|
13
|
+
import threading
|
|
14
|
+
try:
|
|
15
|
+
from . import queues
|
|
16
|
+
except ImportError:
|
|
17
|
+
import queues
|
|
18
|
+
from numpy import array, append, where, logical_and, delete, floor, empty
|
|
19
|
+
from scipy.interpolate import interp1d
|
|
20
|
+
|
|
21
|
+
try:
|
|
22
|
+
from . import shm_sigproc
|
|
23
|
+
except ImportError:
|
|
24
|
+
import shm_sigproc
|
|
25
|
+
import glob
|
|
26
|
+
import os
|
|
27
|
+
|
|
28
|
+
import re
|
|
29
|
+
import numpy as np
|
|
30
|
+
import configparser
|
|
31
|
+
import pandas as pd
|
|
32
|
+
from pandas import DataFrame, to_datetime
|
|
33
|
+
import struct
|
|
34
|
+
import datetime
|
|
35
|
+
try:
|
|
36
|
+
from . import timestamping
|
|
37
|
+
except ImportError:
|
|
38
|
+
import timestamping
|
|
39
|
+
import importlib
|
|
40
|
+
importlib.reload(timestamping)
|
|
41
|
+
try:
|
|
42
|
+
from . import resampling
|
|
43
|
+
except ImportError:
|
|
44
|
+
import resampling
|
|
45
|
+
import time
|
|
46
|
+
try:
|
|
47
|
+
from . import psd_recursive
|
|
48
|
+
except ImportError:
|
|
49
|
+
import psd_recursive
|
|
50
|
+
try:
|
|
51
|
+
import scipy.io as sio
|
|
52
|
+
except Exception:
|
|
53
|
+
sio = None
|
|
54
|
+
try:
|
|
55
|
+
import hdf5storage as h5s
|
|
56
|
+
except Exception:
|
|
57
|
+
h5s = None
|
|
58
|
+
try:
|
|
59
|
+
from . import storage
|
|
60
|
+
except ImportError:
|
|
61
|
+
import storage
|
|
62
|
+
import os
|
|
63
|
+
import pandas as pd
|
|
64
|
+
from enum import IntEnum, auto
|
|
65
|
+
|
|
66
|
+
try:
|
|
67
|
+
from .constants import (
|
|
68
|
+
PACKET_SIZE,
|
|
69
|
+
SPEED_COL,
|
|
70
|
+
RSSI_COL,
|
|
71
|
+
PPS_COL,
|
|
72
|
+
LEVEL_COL,
|
|
73
|
+
RT_STREAM_HD5_DIR,
|
|
74
|
+
RT_STREAM_MERGED_DIR,
|
|
75
|
+
SD_STREAM_DIR,
|
|
76
|
+
SD_STREAM_BIN_DIR,
|
|
77
|
+
SD_STREAM_HD5_DIR,
|
|
78
|
+
)
|
|
79
|
+
except ImportError:
|
|
80
|
+
from constants import (
|
|
81
|
+
PACKET_SIZE,
|
|
82
|
+
SPEED_COL,
|
|
83
|
+
RSSI_COL,
|
|
84
|
+
PPS_COL,
|
|
85
|
+
LEVEL_COL,
|
|
86
|
+
RT_STREAM_HD5_DIR,
|
|
87
|
+
RT_STREAM_MERGED_DIR,
|
|
88
|
+
SD_STREAM_DIR,
|
|
89
|
+
SD_STREAM_BIN_DIR,
|
|
90
|
+
SD_STREAM_HD5_DIR,
|
|
91
|
+
)
|
|
92
|
+
try:
|
|
93
|
+
from .types import NodeStatus
|
|
94
|
+
except Exception:
|
|
95
|
+
try:
|
|
96
|
+
from enode_host.types import NodeStatus
|
|
97
|
+
except Exception:
|
|
98
|
+
NodeStatus = None
|
|
99
|
+
|
|
100
|
+
df = []
|
|
101
|
+
psd = []
|
|
102
|
+
|
|
103
|
+
logger = logging.getLogger("main." + __name__)
|
|
104
|
+
logger.setLevel(logging.DEBUG)
|
|
105
|
+
|
|
106
|
+
CONFIG_FILE = 'config.ini'
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
class NodeType(IntEnum):
|
|
110
|
+
ACC = 0
|
|
111
|
+
DISP = 1
|
|
112
|
+
STR = 2
|
|
113
|
+
TILT = 3
|
|
114
|
+
TMP = 4
|
|
115
|
+
VEH = 5
|
|
116
|
+
|
|
117
|
+
class ChannelType(IntEnum):
|
|
118
|
+
CONN_RPT = 0
|
|
119
|
+
CMD_RESP = 1
|
|
120
|
+
PPS_CC_EPOCH = 2
|
|
121
|
+
ACC_CC_DAQ = 3
|
|
122
|
+
|
|
123
|
+
class CmdType(IntEnum):
|
|
124
|
+
SET_MODE = 0
|
|
125
|
+
SD_STORAGE_CLEAR = 1
|
|
126
|
+
NOTIFY_PARENT = 2
|
|
127
|
+
NOTIFY_CHILDREN = 3
|
|
128
|
+
SET_STREAM = auto()
|
|
129
|
+
STREAM_START = auto()
|
|
130
|
+
STREAM_STOP = auto()
|
|
131
|
+
GNSS_ON = auto()
|
|
132
|
+
GNSS_OFF = auto()
|
|
133
|
+
CLEAR_SD = auto()
|
|
134
|
+
CHANGE_FS = auto()
|
|
135
|
+
|
|
136
|
+
class NodeResp(IntEnum):
|
|
137
|
+
OK = 0
|
|
138
|
+
FAIL = 1
|
|
139
|
+
|
|
140
|
+
class Model():
|
|
141
|
+
|
|
142
|
+
def __init__(self, parent):
|
|
143
|
+
|
|
144
|
+
self.controller = parent
|
|
145
|
+
self.ui_log = None
|
|
146
|
+
self.options = [] # config.ini
|
|
147
|
+
self.node_nums = [] # [1:3 7:8]
|
|
148
|
+
self.node_nums_map = {}
|
|
149
|
+
self.NodeIDs = [] # ['ACC1', 'ACC2',...]
|
|
150
|
+
self.node_status = {}
|
|
151
|
+
# the below two lines may be removed?
|
|
152
|
+
# self.status_table_columns = ['Node ID', 'Connection', 'L', 'RSSI', 'PPS', 'Data']
|
|
153
|
+
# self.node_status = {}
|
|
154
|
+
self.mesh_status_data = pd.DataFrame(columns = ['Node ID',
|
|
155
|
+
'Connection',
|
|
156
|
+
'Sensor',
|
|
157
|
+
'DAQ Mode',
|
|
158
|
+
'DAQ',
|
|
159
|
+
'Stream',
|
|
160
|
+
SPEED_COL,
|
|
161
|
+
LEVEL_COL,
|
|
162
|
+
'Parent',
|
|
163
|
+
RSSI_COL,
|
|
164
|
+
'Children',
|
|
165
|
+
PPS_COL,
|
|
166
|
+
'CMD',
|
|
167
|
+
#'Children_nodeIDs', # invisable in table
|
|
168
|
+
'PPS-time', # invisable in table
|
|
169
|
+
'PPS-flash-time', # invisable in table
|
|
170
|
+
'DAQ-time', # invisible in table
|
|
171
|
+
'Parent MAC', # invisable in table
|
|
172
|
+
'Self MAC', # invisable in table
|
|
173
|
+
'nodeID', # invisable in table
|
|
174
|
+
'ConnRptTime',# invisable in table
|
|
175
|
+
'node_number',
|
|
176
|
+
'node_type',
|
|
177
|
+
])
|
|
178
|
+
self.plot_mutex = {} # the dict for mutexes for accessing plot x y data dict
|
|
179
|
+
self.timestamper = {} # the dict for timestamper {1:timestamping.Timestamping(), ...}
|
|
180
|
+
self.resampler = {} # the dict for resampler {1:resampling.Resampling(62.5), ...}
|
|
181
|
+
self.timehistory_xdata = {} # {1:[datetime], ...}
|
|
182
|
+
self.timehistory_ydata = {} # {1:ndarray, ...}
|
|
183
|
+
self.timehistory_xlim = []
|
|
184
|
+
self.full_range_xlim = []
|
|
185
|
+
self.merge_full_range = False
|
|
186
|
+
self.psder = {} # {1:psd_recursive.PSD_Recursive(1024, 62.5), ...}
|
|
187
|
+
self.psd_xdata = [] # frequency = 1d ndarray
|
|
188
|
+
self.psd_ydata = {} # psd amplitude {1:ndarray}
|
|
189
|
+
self.merged_timehistory_xdata = {}
|
|
190
|
+
self.merged_timehistory_ydata = {}
|
|
191
|
+
self.merged_timehistory_xlim = []
|
|
192
|
+
self.merged_psd_xdata = []
|
|
193
|
+
self.merged_psd_ydata = {}
|
|
194
|
+
self.merged_node_ids = []
|
|
195
|
+
self.merged_fs = None
|
|
196
|
+
self.gui_dirty = False
|
|
197
|
+
self.plot_dirty = False
|
|
198
|
+
self.plot_dirty_version = 0
|
|
199
|
+
self.conn_report_queue = pyqueue.Queue(maxsize=1000)
|
|
200
|
+
self.data_queue = pyqueue.Queue(maxsize=5000)
|
|
201
|
+
self.speed_bytes = {}
|
|
202
|
+
self.node_fs = {}
|
|
203
|
+
self.speed_last_calc = datetime.datetime.now(datetime.timezone.utc)
|
|
204
|
+
self.timespan_length = 30
|
|
205
|
+
self._pps_count = 0
|
|
206
|
+
self._ts_count = 0
|
|
207
|
+
self._last_rate_log = time.monotonic()
|
|
208
|
+
self._plot_dirty_set = 0
|
|
209
|
+
self._last_plot_debug = time.monotonic()
|
|
210
|
+
self.enable_rate_logging = False
|
|
211
|
+
self.enable_plot_logging = False
|
|
212
|
+
self.pps_flash_sec = 0.3
|
|
213
|
+
self.pps_flash_until = {}
|
|
214
|
+
self.pps_flash_lock = threading.Lock()
|
|
215
|
+
self._rate_timer = threading.Thread(target=self._rate_logger, daemon=True)
|
|
216
|
+
self._rate_timer.start()
|
|
217
|
+
|
|
218
|
+
self.Storage = storage.Storage(600) # arg = file_length_in_sec
|
|
219
|
+
self.sd_stream_dir = SD_STREAM_BIN_DIR
|
|
220
|
+
os.makedirs(self.sd_stream_dir, exist_ok=True)
|
|
221
|
+
self.sd_stream_hd5_dir = SD_STREAM_HD5_DIR
|
|
222
|
+
os.makedirs(self.sd_stream_hd5_dir, exist_ok=True)
|
|
223
|
+
self.sd_stream_merged_dir = os.path.join(SD_STREAM_DIR, "merged")
|
|
224
|
+
os.makedirs(self.sd_stream_merged_dir, exist_ok=True)
|
|
225
|
+
os.makedirs(RT_STREAM_HD5_DIR, exist_ok=True)
|
|
226
|
+
os.makedirs(RT_STREAM_MERGED_DIR, exist_ok=True)
|
|
227
|
+
self._sd_stream_handles = {}
|
|
228
|
+
self._sd_stream_lock = threading.Lock()
|
|
229
|
+
self._sd_stream_active = set()
|
|
230
|
+
self.gnss_positions = {}
|
|
231
|
+
|
|
232
|
+
self.load_config()
|
|
233
|
+
self.parse_node_nums_txt()
|
|
234
|
+
self.init_mesh_status_data()
|
|
235
|
+
self.init_other_data()
|
|
236
|
+
|
|
237
|
+
# Thread for framed connection reports
|
|
238
|
+
self.conn_report_thread = threading.Thread(target=self._conn_report_worker, daemon=True)
|
|
239
|
+
self.conn_report_thread.start()
|
|
240
|
+
|
|
241
|
+
self.data_thread = threading.Thread(target=self._data_worker, daemon=True)
|
|
242
|
+
self.data_thread.start()
|
|
243
|
+
|
|
244
|
+
# Thread for detecting disconnected nodes without connection reporting
|
|
245
|
+
self.timer = threading.Timer(10, self.on_timer)
|
|
246
|
+
self.timer.start()
|
|
247
|
+
|
|
248
|
+
self.acc_model_labels = {
|
|
249
|
+
0: "",
|
|
250
|
+
1: "ADXL355",
|
|
251
|
+
2: "M352",
|
|
252
|
+
}
|
|
253
|
+
self.daq_mode_labels = {
|
|
254
|
+
"ADXL355": [
|
|
255
|
+
"0: 7.813/31.25",
|
|
256
|
+
"1: 15.625/62.5",
|
|
257
|
+
"2: 31.25/125",
|
|
258
|
+
],
|
|
259
|
+
"M352": [
|
|
260
|
+
"0: 09/50",
|
|
261
|
+
"1: 16/100",
|
|
262
|
+
"2: 16/200",
|
|
263
|
+
"3: 60/200",
|
|
264
|
+
],
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
@staticmethod
|
|
268
|
+
def _node_type_label(node_type) -> str:
|
|
269
|
+
try:
|
|
270
|
+
node_type_int = int(node_type)
|
|
271
|
+
except (TypeError, ValueError):
|
|
272
|
+
return "UNK"
|
|
273
|
+
return {
|
|
274
|
+
0: "ACC",
|
|
275
|
+
1: "DISP",
|
|
276
|
+
2: "STR",
|
|
277
|
+
3: "TILT",
|
|
278
|
+
4: "TMP",
|
|
279
|
+
5: "VEH",
|
|
280
|
+
}.get(node_type_int, f"N{node_type_int}")
|
|
281
|
+
|
|
282
|
+
@classmethod
|
|
283
|
+
def make_node_key(cls, node_type, node_number) -> str:
|
|
284
|
+
return f"{cls._node_type_label(node_type)}{int(node_number)}"
|
|
285
|
+
|
|
286
|
+
@staticmethod
|
|
287
|
+
def _parse_node_number(node_key: str) -> int:
|
|
288
|
+
match = re.search(r"(\d+)$", str(node_key))
|
|
289
|
+
if match:
|
|
290
|
+
return int(match.group(1))
|
|
291
|
+
return -1
|
|
292
|
+
|
|
293
|
+
def _node_number_from_key(self, node_key: str) -> int:
|
|
294
|
+
condition = self.mesh_status_data['nodeID'] == node_key
|
|
295
|
+
if condition.any():
|
|
296
|
+
value = self.mesh_status_data.loc[condition, 'node_number'].iloc[0]
|
|
297
|
+
try:
|
|
298
|
+
return int(value)
|
|
299
|
+
except (TypeError, ValueError):
|
|
300
|
+
pass
|
|
301
|
+
return self._parse_node_number(node_key)
|
|
302
|
+
|
|
303
|
+
def _node_type_from_key(self, node_key: str) -> str:
|
|
304
|
+
condition = self.mesh_status_data['nodeID'] == node_key
|
|
305
|
+
if condition.any():
|
|
306
|
+
value = self.mesh_status_data.loc[condition, 'node_type'].iloc[0]
|
|
307
|
+
if isinstance(value, str) and value:
|
|
308
|
+
return value
|
|
309
|
+
match = re.match(r"([A-Za-z]+)", str(node_key))
|
|
310
|
+
return match.group(1) if match else "UNK"
|
|
311
|
+
|
|
312
|
+
def _acc_model_label(self, acc_model: int) -> str:
|
|
313
|
+
return self.acc_model_labels.get(int(acc_model), "")
|
|
314
|
+
|
|
315
|
+
def _format_daq_mode(self, acc_model_label: str, daq_mode: int) -> str:
|
|
316
|
+
try:
|
|
317
|
+
mode = int(daq_mode)
|
|
318
|
+
except (TypeError, ValueError):
|
|
319
|
+
return ""
|
|
320
|
+
choices = self.daq_mode_labels.get(acc_model_label, [])
|
|
321
|
+
if 0 <= mode < len(choices):
|
|
322
|
+
return choices[mode]
|
|
323
|
+
return str(mode)
|
|
324
|
+
|
|
325
|
+
@staticmethod
|
|
326
|
+
def _extract_fs_from_daq_label(daq_mode_label: str):
|
|
327
|
+
if not daq_mode_label:
|
|
328
|
+
return None
|
|
329
|
+
match = re.search(r"/\s*([0-9.]+)", str(daq_mode_label))
|
|
330
|
+
if not match:
|
|
331
|
+
return None
|
|
332
|
+
try:
|
|
333
|
+
return float(match.group(1))
|
|
334
|
+
except ValueError:
|
|
335
|
+
return None
|
|
336
|
+
|
|
337
|
+
def _update_sampling_rate(self, node_id: str, fs: float):
|
|
338
|
+
if fs is None or fs <= 0:
|
|
339
|
+
return
|
|
340
|
+
prev = self.node_fs.get(node_id)
|
|
341
|
+
if prev is not None and abs(prev - fs) < 1e-6:
|
|
342
|
+
return
|
|
343
|
+
self.node_fs[node_id] = fs
|
|
344
|
+
if node_id in self.resampler:
|
|
345
|
+
self.resampler[node_id].set_fs(fs)
|
|
346
|
+
if node_id in self.psder:
|
|
347
|
+
self.psder[node_id].set_fs(fs)
|
|
348
|
+
|
|
349
|
+
def load_config(self):
|
|
350
|
+
config = configparser.ConfigParser()
|
|
351
|
+
|
|
352
|
+
if not os.path.exists(CONFIG_FILE):
|
|
353
|
+
# Return default options if config file doesn't exist
|
|
354
|
+
self.options = {
|
|
355
|
+
'acc_nums_txt': '[1:3]',
|
|
356
|
+
'tmp_nums_txt': '[]',
|
|
357
|
+
'str_nums_txt': '[]',
|
|
358
|
+
'veh_nums_txt': '[]',
|
|
359
|
+
'option2': 'default_value2'
|
|
360
|
+
}
|
|
361
|
+
return
|
|
362
|
+
|
|
363
|
+
config.read(CONFIG_FILE)
|
|
364
|
+
acc_fallback = config.get('Settings', 'node_nums_txt', fallback='[1:3]')
|
|
365
|
+
self.options = {
|
|
366
|
+
'acc_nums_txt': config.get('Settings', 'acc_nums_txt', fallback=acc_fallback),
|
|
367
|
+
'tmp_nums_txt': config.get('Settings', 'tmp_nums_txt', fallback='[]'),
|
|
368
|
+
'str_nums_txt': config.get('Settings', 'str_nums_txt', fallback='[]'),
|
|
369
|
+
'veh_nums_txt': config.get('Settings', 'veh_nums_txt', fallback='[]'),
|
|
370
|
+
'option2': config.get('Settings', 'option2', fallback='default_value2')
|
|
371
|
+
}
|
|
372
|
+
|
|
373
|
+
def save_config(self):
|
|
374
|
+
config = configparser.ConfigParser()
|
|
375
|
+
|
|
376
|
+
config['Settings'] = {
|
|
377
|
+
'node_nums_txt': self.options['acc_nums_txt'],
|
|
378
|
+
'acc_nums_txt': self.options['acc_nums_txt'],
|
|
379
|
+
'tmp_nums_txt': self.options['tmp_nums_txt'],
|
|
380
|
+
'str_nums_txt': self.options['str_nums_txt'],
|
|
381
|
+
'veh_nums_txt': self.options['veh_nums_txt'],
|
|
382
|
+
'option2': self.options['option2']
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
with open(CONFIG_FILE, 'w') as configfile:
|
|
386
|
+
config.write(configfile)
|
|
387
|
+
|
|
388
|
+
def parse_node_nums_txt(self):
|
|
389
|
+
|
|
390
|
+
"""
|
|
391
|
+
Parse a MATLAB-style expression into a Python list.
|
|
392
|
+
Supports ranges (e.g., 1:4) and concatenation (e.g., [1:4 7]).
|
|
393
|
+
"""
|
|
394
|
+
def parse_expr(expr):
|
|
395
|
+
expr = (expr or "").strip()
|
|
396
|
+
if not expr:
|
|
397
|
+
return []
|
|
398
|
+
expr = expr.strip('[]')
|
|
399
|
+
if not expr:
|
|
400
|
+
return []
|
|
401
|
+
parts = expr.split()
|
|
402
|
+
result = []
|
|
403
|
+
for part in parts:
|
|
404
|
+
if ':' in part:
|
|
405
|
+
start, end = map(int, part.split(':'))
|
|
406
|
+
result.extend(range(start, end + 1))
|
|
407
|
+
else:
|
|
408
|
+
result.append(int(part))
|
|
409
|
+
return result
|
|
410
|
+
|
|
411
|
+
node_map = {
|
|
412
|
+
"ACC": parse_expr(self.options.get("acc_nums_txt", "")),
|
|
413
|
+
"TMP": parse_expr(self.options.get("tmp_nums_txt", "")),
|
|
414
|
+
"STR": parse_expr(self.options.get("str_nums_txt", "")),
|
|
415
|
+
"VEH": parse_expr(self.options.get("veh_nums_txt", "")),
|
|
416
|
+
}
|
|
417
|
+
self.node_nums_map = node_map
|
|
418
|
+
all_nums = sorted({n for nums in node_map.values() for n in nums})
|
|
419
|
+
if all_nums:
|
|
420
|
+
self.node_nums = np.array(all_nums)
|
|
421
|
+
logger.info('node_nums=%s', self.node_nums)
|
|
422
|
+
|
|
423
|
+
def init_mesh_status_data(self):
|
|
424
|
+
|
|
425
|
+
self.NodeIDs = []
|
|
426
|
+
self.node_status = {}
|
|
427
|
+
self.mesh_status_data = self.mesh_status_data[0:0] # Empty all the rows
|
|
428
|
+
type_order = ("ACC", "TMP", "STR", "VEH")
|
|
429
|
+
for node_type in type_order:
|
|
430
|
+
for node_num in sorted(set(self.node_nums_map.get(node_type, []))):
|
|
431
|
+
node_key = f"{node_type}{node_num}"
|
|
432
|
+
nodeID = node_key
|
|
433
|
+
NodeID = node_key
|
|
434
|
+
self.NodeIDs.append(NodeID)
|
|
435
|
+
if NodeStatus is not None:
|
|
436
|
+
self.node_status[nodeID] = NodeStatus(
|
|
437
|
+
node_id=nodeID,
|
|
438
|
+
node_label=NodeID,
|
|
439
|
+
connection="waiting",
|
|
440
|
+
)
|
|
441
|
+
|
|
442
|
+
new_row = pd.DataFrame({'Node ID': [NodeID],
|
|
443
|
+
'Connection':['waiting'],
|
|
444
|
+
'Sensor':[''],
|
|
445
|
+
'DAQ Mode':[''],
|
|
446
|
+
'DAQ':[''],
|
|
447
|
+
'Stream':[''],
|
|
448
|
+
SPEED_COL:[''],
|
|
449
|
+
LEVEL_COL:[''],
|
|
450
|
+
'Parent':[''],
|
|
451
|
+
RSSI_COL:[''],
|
|
452
|
+
'Children':[''],
|
|
453
|
+
PPS_COL:[''],
|
|
454
|
+
'CMD':[''],
|
|
455
|
+
#'Children_nodeIDs': [''],
|
|
456
|
+
'PPS-time':[''],
|
|
457
|
+
'PPS-flash-time':[''],
|
|
458
|
+
'DAQ-time':[''],
|
|
459
|
+
'Parent MAC':[''],
|
|
460
|
+
'Self MAC':[''],
|
|
461
|
+
'nodeID': nodeID,
|
|
462
|
+
'ConnRptTime':[None],
|
|
463
|
+
'node_number':[node_num],
|
|
464
|
+
'node_type':[node_type],
|
|
465
|
+
})
|
|
466
|
+
self.mesh_status_data = pd.concat([self.mesh_status_data, new_row], ignore_index = True)
|
|
467
|
+
|
|
468
|
+
logger.info(self.NodeIDs)
|
|
469
|
+
|
|
470
|
+
def init_other_data(self):
|
|
471
|
+
|
|
472
|
+
self.plot_mutex.clear()
|
|
473
|
+
self.timestamper.clear()
|
|
474
|
+
self.resampler.clear()
|
|
475
|
+
self.timehistory_xdata.clear()
|
|
476
|
+
self.timehistory_ydata.clear()
|
|
477
|
+
self.timehistory_xlim = []
|
|
478
|
+
self.full_range_xlim = []
|
|
479
|
+
self.merge_full_range = False
|
|
480
|
+
self.psder.clear()
|
|
481
|
+
self.psd_xdata = []
|
|
482
|
+
self.psd_ydata.clear()
|
|
483
|
+
self.speed_bytes = {}
|
|
484
|
+
self.speed_last_value = {}
|
|
485
|
+
self.speed_last_rx = {}
|
|
486
|
+
self.speed_hist = {}
|
|
487
|
+
self.speed_window_sec = 5.0
|
|
488
|
+
self.speed_last_calc = datetime.datetime.now(datetime.timezone.utc)
|
|
489
|
+
|
|
490
|
+
for index, row in self.mesh_status_data.iterrows():
|
|
491
|
+
nodeID = row['nodeID']
|
|
492
|
+
self.timehistory_xdata[nodeID] = [] # empty((0), dtype = float)
|
|
493
|
+
self.timehistory_ydata[nodeID] = empty((0, 3), dtype = float)
|
|
494
|
+
self.plot_mutex[nodeID] = threading.Lock()
|
|
495
|
+
self.timestamper[nodeID] = timestamping.Timestamping()
|
|
496
|
+
self.resampler[nodeID] = resampling.Resampling(50)
|
|
497
|
+
self.psder[nodeID] = psd_recursive.PSD_Recursive(1024, 50)
|
|
498
|
+
self.node_fs[nodeID] = 50
|
|
499
|
+
self.speed_bytes[nodeID] = 0
|
|
500
|
+
self.speed_last_value[nodeID] = None
|
|
501
|
+
self.speed_last_rx[nodeID] = None
|
|
502
|
+
self.speed_hist[nodeID] = deque()
|
|
503
|
+
self.pps_flash_until[nodeID] = None
|
|
504
|
+
|
|
505
|
+
|
|
506
|
+
def enqueue_conn_report(self, nodeID, level, parent_mac, self_mac, rssi, acc_model: int = 0, daq_mode: int = 0, daq_on: int = 0, stream_status: int = 0, notify: bool = False):
|
|
507
|
+
try:
|
|
508
|
+
self.conn_report_queue.put_nowait(
|
|
509
|
+
{
|
|
510
|
+
"nodeID": nodeID,
|
|
511
|
+
"level": level,
|
|
512
|
+
"parent_mac": parent_mac,
|
|
513
|
+
"self_mac": self_mac,
|
|
514
|
+
"rssi": rssi,
|
|
515
|
+
"acc_model": acc_model,
|
|
516
|
+
"daq_mode": daq_mode,
|
|
517
|
+
"daq_on": daq_on,
|
|
518
|
+
"stream_status": stream_status,
|
|
519
|
+
"notify": notify,
|
|
520
|
+
}
|
|
521
|
+
)
|
|
522
|
+
except pyqueue.Full:
|
|
523
|
+
logger.warning("conn_report_queue full; dropping report for node %s", nodeID)
|
|
524
|
+
|
|
525
|
+
def _conn_report_worker(self):
|
|
526
|
+
while True:
|
|
527
|
+
item = self.conn_report_queue.get()
|
|
528
|
+
self.handle_conn_report(
|
|
529
|
+
nodeID=item["nodeID"],
|
|
530
|
+
level=item["level"],
|
|
531
|
+
parent_mac=item["parent_mac"],
|
|
532
|
+
self_mac=item["self_mac"],
|
|
533
|
+
rssi=item["rssi"],
|
|
534
|
+
acc_model=item.get("acc_model", 0),
|
|
535
|
+
daq_mode=item.get("daq_mode", 0),
|
|
536
|
+
daq_on=item.get("daq_on", 0),
|
|
537
|
+
stream_status=item.get("stream_status", 0),
|
|
538
|
+
notify=item["notify"],
|
|
539
|
+
)
|
|
540
|
+
|
|
541
|
+
def enqueue_acc_batch(self, nodeID, samples):
|
|
542
|
+
try:
|
|
543
|
+
self.data_queue.put_nowait(("acc_batch", nodeID, samples))
|
|
544
|
+
except pyqueue.Full:
|
|
545
|
+
logger.warning("data_queue full; dropping acc batch for node %s", nodeID)
|
|
546
|
+
|
|
547
|
+
def record_rx_bytes(self, nodeID, nbytes, kind: str = "data"):
|
|
548
|
+
if kind not in ("data", "sd"):
|
|
549
|
+
return
|
|
550
|
+
if nodeID in self.speed_bytes:
|
|
551
|
+
self.speed_bytes[nodeID] += nbytes
|
|
552
|
+
self.speed_last_rx[nodeID] = datetime.datetime.now(datetime.timezone.utc)
|
|
553
|
+
hist = self.speed_hist.get(nodeID)
|
|
554
|
+
if hist is not None:
|
|
555
|
+
hist.append((self.speed_last_rx[nodeID], nbytes))
|
|
556
|
+
|
|
557
|
+
def enqueue_pps(self, nodeID, cc, epoch):
|
|
558
|
+
try:
|
|
559
|
+
self.data_queue.put_nowait(("pps", nodeID, cc, epoch))
|
|
560
|
+
except pyqueue.Full:
|
|
561
|
+
logger.warning("data_queue full; dropping pps for node %s", nodeID)
|
|
562
|
+
|
|
563
|
+
def _data_worker(self):
|
|
564
|
+
while True:
|
|
565
|
+
item = self.data_queue.get()
|
|
566
|
+
kind = item[0]
|
|
567
|
+
if kind == "acc_batch":
|
|
568
|
+
_, nodeID, samples = item
|
|
569
|
+
for sample in samples:
|
|
570
|
+
self.handle_acc_sample(nodeID, sample.cc, sample.ax, sample.ay, sample.az)
|
|
571
|
+
elif kind == "pps":
|
|
572
|
+
_, nodeID, cc, epoch = item
|
|
573
|
+
self.handle_pps(nodeID, cc, epoch)
|
|
574
|
+
|
|
575
|
+
def update_speed(self, now_utc):
|
|
576
|
+
dt = (now_utc - self.speed_last_calc).total_seconds()
|
|
577
|
+
if dt <= 0:
|
|
578
|
+
return
|
|
579
|
+
for nodeID in list(self.speed_hist.keys()):
|
|
580
|
+
condition = self._node_condition(nodeID)
|
|
581
|
+
connected = False
|
|
582
|
+
if condition is not None:
|
|
583
|
+
try:
|
|
584
|
+
connected = self.mesh_status_data.loc[condition, "Connection"].iloc[0] == "connected"
|
|
585
|
+
except Exception:
|
|
586
|
+
connected = False
|
|
587
|
+
if not connected:
|
|
588
|
+
self._set_node_fields(nodeID, mark_dirty=False, **{SPEED_COL: ""})
|
|
589
|
+
else:
|
|
590
|
+
hist = self.speed_hist.get(nodeID)
|
|
591
|
+
if hist is None:
|
|
592
|
+
self._set_node_fields(nodeID, mark_dirty=False, **{SPEED_COL: "0.0"})
|
|
593
|
+
else:
|
|
594
|
+
window_start = now_utc - datetime.timedelta(seconds=self.speed_window_sec)
|
|
595
|
+
while hist and hist[0][0] < window_start:
|
|
596
|
+
hist.popleft()
|
|
597
|
+
total = sum(n for _, n in hist)
|
|
598
|
+
if total == 0:
|
|
599
|
+
self._set_node_fields(nodeID, mark_dirty=False, **{SPEED_COL: "0.0"})
|
|
600
|
+
else:
|
|
601
|
+
kbps = (total / 1024.0) / self.speed_window_sec
|
|
602
|
+
self._set_node_fields(nodeID, mark_dirty=False, **{SPEED_COL: f"{kbps:.1f}"})
|
|
603
|
+
self.speed_last_calc = now_utc
|
|
604
|
+
self.gui_dirty = True
|
|
605
|
+
|
|
606
|
+
def _time_delay_correction(self, ts, node_number):
|
|
607
|
+
if node_number < 17:
|
|
608
|
+
dt = datetime.timedelta(microseconds=(47.59) * 1000)
|
|
609
|
+
else:
|
|
610
|
+
dt = datetime.timedelta(microseconds=(63.875 + 17) * 1000)
|
|
611
|
+
return [t_ - dt for t_ in ts]
|
|
612
|
+
|
|
613
|
+
def get_time_window(self, t1):
|
|
614
|
+
tspan = self.timespan_length
|
|
615
|
+
return [
|
|
616
|
+
datetime.datetime.fromtimestamp(
|
|
617
|
+
floor(t1.timestamp() / tspan) * tspan, tz=datetime.timezone.utc
|
|
618
|
+
),
|
|
619
|
+
datetime.datetime.fromtimestamp(
|
|
620
|
+
floor(t1.timestamp() / tspan + 1) * tspan, tz=datetime.timezone.utc
|
|
621
|
+
),
|
|
622
|
+
]
|
|
623
|
+
|
|
624
|
+
def handle_pps(self, nodeID, cc, epoch_time):
|
|
625
|
+
try:
|
|
626
|
+
epoch_time_datetime = datetime.datetime.fromtimestamp(epoch_time, tz=datetime.timezone.utc)
|
|
627
|
+
except (OSError, OverflowError, ValueError):
|
|
628
|
+
return
|
|
629
|
+
node_number = self._node_number_from_key(nodeID)
|
|
630
|
+
|
|
631
|
+
self._set_node_fields(
|
|
632
|
+
nodeID,
|
|
633
|
+
**{
|
|
634
|
+
"PPS-time": epoch_time_datetime,
|
|
635
|
+
"PPS-flash-time": epoch_time_datetime,
|
|
636
|
+
},
|
|
637
|
+
)
|
|
638
|
+
with self.pps_flash_lock:
|
|
639
|
+
self.pps_flash_until[nodeID] = epoch_time_datetime + datetime.timedelta(seconds=self.pps_flash_sec)
|
|
640
|
+
|
|
641
|
+
try:
|
|
642
|
+
self._pps_count += 1
|
|
643
|
+
ts_, ys = self.timestamper[nodeID].push_cc_pps(cc, epoch_time)
|
|
644
|
+
ts = self._time_delay_correction(ts_, node_number)
|
|
645
|
+
if len(ts) > 0:
|
|
646
|
+
self._ts_count += len(ts)
|
|
647
|
+
self.plot_dirty = True
|
|
648
|
+
self.plot_dirty_version += 1
|
|
649
|
+
self._plot_dirty_set += 1
|
|
650
|
+
with self.plot_mutex[nodeID]:
|
|
651
|
+
self.timehistory_xdata[nodeID] += ts
|
|
652
|
+
self.timehistory_ydata[nodeID] = append(self.timehistory_ydata[nodeID], ys, axis=0)
|
|
653
|
+
|
|
654
|
+
if self.merge_full_range:
|
|
655
|
+
window = self.full_range_xlim if self.full_range_xlim else None
|
|
656
|
+
else:
|
|
657
|
+
window = self.get_time_window(ts[-1])
|
|
658
|
+
self.timehistory_xlim = window
|
|
659
|
+
if window:
|
|
660
|
+
index = [i for i, _ in enumerate(self.timehistory_xdata[nodeID]) if _ > window[0]]
|
|
661
|
+
self.timehistory_xdata[nodeID] = [self.timehistory_xdata[nodeID][i] for i in index]
|
|
662
|
+
self.timehistory_ydata[nodeID] = self.timehistory_ydata[nodeID][index, :]
|
|
663
|
+
|
|
664
|
+
[trs, yrs] = self.resampler[nodeID].push(ts, ys)
|
|
665
|
+
self.Storage.push(nodeID, trs, yrs)
|
|
666
|
+
|
|
667
|
+
self.psder[nodeID].push(array(yrs), array(trs))
|
|
668
|
+
# print(trs[-1])
|
|
669
|
+
|
|
670
|
+
if self.psder[nodeID].isUpdated:
|
|
671
|
+
f, asd = self.psder[nodeID].get_asd()
|
|
672
|
+
self.psd_xdata = f
|
|
673
|
+
self.psd_ydata[nodeID] = asd
|
|
674
|
+
except (OSError, OverflowError, ValueError):
|
|
675
|
+
pass
|
|
676
|
+
raise
|
|
677
|
+
|
|
678
|
+
def _ensure_node_row(self, node_id: str) -> None:
|
|
679
|
+
if (self.mesh_status_data['nodeID'] == node_id).any():
|
|
680
|
+
return
|
|
681
|
+
node_num = self._parse_node_number(node_id)
|
|
682
|
+
node_type = self._node_type_from_key(node_id)
|
|
683
|
+
if NodeStatus is not None:
|
|
684
|
+
self.node_status[node_id] = NodeStatus(
|
|
685
|
+
node_id=node_id,
|
|
686
|
+
node_label=node_id,
|
|
687
|
+
connection="waiting",
|
|
688
|
+
)
|
|
689
|
+
new_row = pd.DataFrame({
|
|
690
|
+
'Node ID': [node_id],
|
|
691
|
+
'Connection': ['waiting'],
|
|
692
|
+
'Sensor': [''],
|
|
693
|
+
'DAQ Mode': [''],
|
|
694
|
+
'DAQ': [''],
|
|
695
|
+
'Stream': [''],
|
|
696
|
+
SPEED_COL: [''],
|
|
697
|
+
LEVEL_COL: [''],
|
|
698
|
+
'Parent': [''],
|
|
699
|
+
RSSI_COL: [''],
|
|
700
|
+
'Children': [''],
|
|
701
|
+
PPS_COL: [''],
|
|
702
|
+
'CMD': [''],
|
|
703
|
+
'PPS-time': [''],
|
|
704
|
+
'PPS-flash-time': [''],
|
|
705
|
+
'DAQ-time': [''],
|
|
706
|
+
'Parent MAC': [''],
|
|
707
|
+
'Self MAC': [''],
|
|
708
|
+
'nodeID': node_id,
|
|
709
|
+
'ConnRptTime': [None],
|
|
710
|
+
'node_number': [node_num],
|
|
711
|
+
'node_type': [node_type],
|
|
712
|
+
})
|
|
713
|
+
self.mesh_status_data = pd.concat([self.mesh_status_data, new_row], ignore_index=True)
|
|
714
|
+
|
|
715
|
+
def merge_sd_files(self) -> None:
|
|
716
|
+
sd_dir = self.sd_stream_dir
|
|
717
|
+
if not os.path.isdir(sd_dir):
|
|
718
|
+
self._ui_log(f"[merge] sd dir missing: {sd_dir}")
|
|
719
|
+
return
|
|
720
|
+
|
|
721
|
+
pattern = re.compile(r"^(?P<node>[^-]+)-(?P<ts>\d{4}_\d{2}_\d{2}_\d{2}_\d{2})\.bin$")
|
|
722
|
+
files = []
|
|
723
|
+
for name in os.listdir(sd_dir):
|
|
724
|
+
match = pattern.match(name)
|
|
725
|
+
if not match:
|
|
726
|
+
continue
|
|
727
|
+
node_id = match.group("node")
|
|
728
|
+
try:
|
|
729
|
+
ts = datetime.datetime.strptime(match.group("ts"), "%Y_%m_%d_%H_%M").replace(
|
|
730
|
+
tzinfo=datetime.timezone.utc
|
|
731
|
+
)
|
|
732
|
+
except ValueError:
|
|
733
|
+
continue
|
|
734
|
+
files.append((node_id, ts, os.path.join(sd_dir, name)))
|
|
735
|
+
|
|
736
|
+
if not files:
|
|
737
|
+
self._ui_log("[merge] no sd files found")
|
|
738
|
+
return
|
|
739
|
+
|
|
740
|
+
node_ids = sorted({node_id for node_id, _, _ in files})
|
|
741
|
+
for node_id in node_ids:
|
|
742
|
+
self._ensure_node_row(node_id)
|
|
743
|
+
|
|
744
|
+
self.init_other_data()
|
|
745
|
+
self._ui_log(f"[merge] merging {len(files)} files for {len(node_ids)} nodes")
|
|
746
|
+
|
|
747
|
+
files.sort(key=lambda item: (item[0], item[1]))
|
|
748
|
+
merged_resampled = {node_id: {"t": [], "y": []} for node_id in node_ids}
|
|
749
|
+
merged_ranges = {node_id: {"min": None, "max": None} for node_id in node_ids}
|
|
750
|
+
rec_size = 21
|
|
751
|
+
min_ts = None
|
|
752
|
+
max_ts = None
|
|
753
|
+
|
|
754
|
+
for node_id, ts_label, path in files:
|
|
755
|
+
self._ui_log(f"[merge] {node_id} {os.path.basename(path)}")
|
|
756
|
+
node_number = self._node_number_from_key(node_id)
|
|
757
|
+
try:
|
|
758
|
+
with open(path, "rb") as handle:
|
|
759
|
+
buf = b""
|
|
760
|
+
while True:
|
|
761
|
+
chunk = handle.read(rec_size * 1024)
|
|
762
|
+
if not chunk:
|
|
763
|
+
break
|
|
764
|
+
buf += chunk
|
|
765
|
+
nrec = len(buf) // rec_size
|
|
766
|
+
if nrec == 0:
|
|
767
|
+
continue
|
|
768
|
+
for idx in range(nrec):
|
|
769
|
+
rec = buf[idx * rec_size : (idx + 1) * rec_size]
|
|
770
|
+
rec_type = rec[0]
|
|
771
|
+
cc = struct.unpack_from("<Q", rec, 1)[0]
|
|
772
|
+
if rec_type == 0:
|
|
773
|
+
epoch = struct.unpack_from("<q", rec, 9)[0]
|
|
774
|
+
ts_list, ys = self.timestamper[node_id].push_cc_pps(cc, epoch)
|
|
775
|
+
ts_list = self._time_delay_correction(ts_list, node_number)
|
|
776
|
+
if ts_list:
|
|
777
|
+
with self.plot_mutex[node_id]:
|
|
778
|
+
self.timehistory_xdata[node_id] += ts_list
|
|
779
|
+
self.timehistory_ydata[node_id] = append(
|
|
780
|
+
self.timehistory_ydata[node_id], ys, axis=0
|
|
781
|
+
)
|
|
782
|
+
trs, yrs = self.resampler[node_id].push(ts_list, ys)
|
|
783
|
+
if trs:
|
|
784
|
+
merged_resampled[node_id]["t"].extend(trs)
|
|
785
|
+
merged_resampled[node_id]["y"].extend(yrs)
|
|
786
|
+
node_range = merged_ranges[node_id]
|
|
787
|
+
if node_range["min"] is None or trs[0] < node_range["min"]:
|
|
788
|
+
node_range["min"] = trs[0]
|
|
789
|
+
if node_range["max"] is None or trs[-1] > node_range["max"]:
|
|
790
|
+
node_range["max"] = trs[-1]
|
|
791
|
+
self.psder[node_id].push(array(yrs), array(trs))
|
|
792
|
+
if self.psder[node_id].isUpdated:
|
|
793
|
+
f, asd = self.psder[node_id].get_asd()
|
|
794
|
+
self.psd_xdata = f
|
|
795
|
+
self.psd_ydata[node_id] = asd
|
|
796
|
+
if min_ts is None or ts_list[0] < min_ts:
|
|
797
|
+
min_ts = ts_list[0]
|
|
798
|
+
if max_ts is None or ts_list[-1] > max_ts:
|
|
799
|
+
max_ts = ts_list[-1]
|
|
800
|
+
elif rec_type == 1:
|
|
801
|
+
ax, ay, az = struct.unpack_from("<fff", rec, 9)
|
|
802
|
+
self.timestamper[node_id].push_cc_acc([cc, ax, ay, az])
|
|
803
|
+
buf = buf[nrec * rec_size :]
|
|
804
|
+
if buf:
|
|
805
|
+
self._ui_log(f"[merge] {node_id} leftover bytes: {len(buf)}")
|
|
806
|
+
except OSError as exc:
|
|
807
|
+
self._ui_log(f"[merge] failed to read {path}: {exc}")
|
|
808
|
+
|
|
809
|
+
chunk_sec = 600
|
|
810
|
+
for node_id, data in merged_resampled.items():
|
|
811
|
+
if not data["t"]:
|
|
812
|
+
continue
|
|
813
|
+
buckets = {}
|
|
814
|
+
for t_, y_ in zip(data["t"], data["y"]):
|
|
815
|
+
ts = int(t_.timestamp())
|
|
816
|
+
grid = (ts // chunk_sec) * chunk_sec
|
|
817
|
+
bucket = buckets.setdefault(grid, {"t": [], "y": []})
|
|
818
|
+
bucket["t"].append(t_)
|
|
819
|
+
bucket["y"].append(y_)
|
|
820
|
+
for grid in sorted(buckets.keys()):
|
|
821
|
+
bucket = buckets[grid]
|
|
822
|
+
if not bucket["t"]:
|
|
823
|
+
continue
|
|
824
|
+
start_ts = datetime.datetime.fromtimestamp(grid, tz=datetime.timezone.utc)
|
|
825
|
+
name = f"{node_id}-{start_ts.strftime('%Y_%m%d_%H%M')}.hd5"
|
|
826
|
+
path = os.path.join(self.sd_stream_hd5_dir, name)
|
|
827
|
+
df = DataFrame(bucket["y"], columns=["X", "Y", "Z"])
|
|
828
|
+
df.index = to_datetime(bucket["t"])
|
|
829
|
+
df.to_hdf(path, key="df", mode="w")
|
|
830
|
+
self._ui_log(f"[merge] hd5 saved: {path}")
|
|
831
|
+
|
|
832
|
+
# merged file across nodes
|
|
833
|
+
merged_frames = []
|
|
834
|
+
for node_id, data in merged_resampled.items():
|
|
835
|
+
if not data["t"]:
|
|
836
|
+
continue
|
|
837
|
+
df_node = DataFrame(data["y"], columns=["X", "Y", "Z"])
|
|
838
|
+
df_node.index = to_datetime(data["t"])
|
|
839
|
+
df_node = df_node.rename(columns={
|
|
840
|
+
"X": f"{node_id}-X",
|
|
841
|
+
"Y": f"{node_id}-Y",
|
|
842
|
+
"Z": f"{node_id}-Z",
|
|
843
|
+
})
|
|
844
|
+
merged_frames.append(df_node)
|
|
845
|
+
|
|
846
|
+
if merged_frames:
|
|
847
|
+
merged_df = pd.concat(merged_frames, axis=1).dropna().sort_index()
|
|
848
|
+
if not merged_df.empty:
|
|
849
|
+
os.makedirs(self.sd_stream_merged_dir, exist_ok=True)
|
|
850
|
+
base_name = os.path.join(
|
|
851
|
+
self.sd_stream_merged_dir,
|
|
852
|
+
"accm-{}".format(merged_df.index[0].strftime("%Y_%m%d_%H%M")),
|
|
853
|
+
)
|
|
854
|
+
hd5_path = f"{base_name}.hd5"
|
|
855
|
+
merged_df.to_hdf(hd5_path, key="df")
|
|
856
|
+
self._ui_log(f"[merge] merged hd5 saved: {hd5_path}")
|
|
857
|
+
# export MAT alongside merged hd5
|
|
858
|
+
self._build_merged_plot_data(merged_df)
|
|
859
|
+
self._export_merged_mat(merged_df, base_name)
|
|
860
|
+
if min_ts and max_ts:
|
|
861
|
+
self.full_range_xlim = [min_ts, max_ts]
|
|
862
|
+
self.timehistory_xlim = [min_ts, max_ts]
|
|
863
|
+
self.merge_full_range = True
|
|
864
|
+
self.plot_dirty = True
|
|
865
|
+
self.plot_dirty_version += 1
|
|
866
|
+
self._plot_dirty_set += 1
|
|
867
|
+
self.gui_dirty = True
|
|
868
|
+
self._ui_log("[merge] complete")
|
|
869
|
+
|
|
870
|
+
def _rate_logger(self):
|
|
871
|
+
while True:
|
|
872
|
+
time.sleep(1.0)
|
|
873
|
+
if self.enable_rate_logging:
|
|
874
|
+
self._ui_log(
|
|
875
|
+
f"[rate] pps/s={self._pps_count} "
|
|
876
|
+
f"timestamped_samples/s={self._ts_count} "
|
|
877
|
+
f"plot_dirty={self.plot_dirty} "
|
|
878
|
+
f"plot_dirty_set/s={self._plot_dirty_set} "
|
|
879
|
+
f"plot_version={self.plot_dirty_version}"
|
|
880
|
+
)
|
|
881
|
+
self._pps_count = 0
|
|
882
|
+
self._ts_count = 0
|
|
883
|
+
self._plot_dirty_set = 0
|
|
884
|
+
if self.enable_plot_logging:
|
|
885
|
+
self._log_plot_state()
|
|
886
|
+
|
|
887
|
+
def _log_plot_state(self):
|
|
888
|
+
now = time.monotonic()
|
|
889
|
+
if now - self._last_plot_debug < 1.0:
|
|
890
|
+
return
|
|
891
|
+
self._last_plot_debug = now
|
|
892
|
+
max_node = None
|
|
893
|
+
max_len = 0
|
|
894
|
+
max_last = None
|
|
895
|
+
for node_id, xs in self.timehistory_xdata.items():
|
|
896
|
+
try:
|
|
897
|
+
n = len(xs)
|
|
898
|
+
if n > max_len:
|
|
899
|
+
max_len = n
|
|
900
|
+
max_node = node_id
|
|
901
|
+
max_last = xs[-1] if n else None
|
|
902
|
+
except Exception:
|
|
903
|
+
continue
|
|
904
|
+
if max_node is not None:
|
|
905
|
+
print(f"[plot] node={max_node} points={max_len} last={max_last}")
|
|
906
|
+
else:
|
|
907
|
+
print("[plot] no timehistory_xdata")
|
|
908
|
+
|
|
909
|
+
def handle_acc_sample(self, nodeID, cc, ax, ay, az):
|
|
910
|
+
self._set_node_fields(nodeID, **{"DAQ-time": datetime.datetime.now()})
|
|
911
|
+
|
|
912
|
+
node_number = self._node_number_from_key(nodeID)
|
|
913
|
+
if node_number < 17:
|
|
914
|
+
self.timestamper[nodeID].push_cc_acc([cc, ax, ay, az])
|
|
915
|
+
else:
|
|
916
|
+
self.timestamper[nodeID].push_cc_acc([cc, -ay, ax, az])
|
|
917
|
+
|
|
918
|
+
def _sd_stream_path(self, node_id, file_time):
|
|
919
|
+
ts = datetime.datetime.fromtimestamp(file_time, tz=datetime.timezone.utc)
|
|
920
|
+
name = ts.strftime("%Y_%m_%d_%H_%M")
|
|
921
|
+
return os.path.join(self.sd_stream_dir, f"{node_id}-{name}.bin")
|
|
922
|
+
|
|
923
|
+
def handle_sd_chunk(self, node_id, file_time, offset, data: bytes):
|
|
924
|
+
key = (node_id, file_time)
|
|
925
|
+
path = self._sd_stream_path(node_id, file_time)
|
|
926
|
+
with self._sd_stream_lock:
|
|
927
|
+
fh = self._sd_stream_handles.get(key)
|
|
928
|
+
if fh is None:
|
|
929
|
+
mode = "r+b" if os.path.exists(path) else "wb"
|
|
930
|
+
fh = open(path, mode)
|
|
931
|
+
self._sd_stream_handles[key] = fh
|
|
932
|
+
fh.seek(offset)
|
|
933
|
+
fh.write(data)
|
|
934
|
+
if node_id not in self._sd_stream_active:
|
|
935
|
+
self._sd_stream_active.add(node_id)
|
|
936
|
+
self._set_node_fields(node_id, **{"CMD": "SD streaming"})
|
|
937
|
+
|
|
938
|
+
def handle_sd_done(self, node_id, file_time, status: int):
|
|
939
|
+
with self._sd_stream_lock:
|
|
940
|
+
if status == 0:
|
|
941
|
+
key = (node_id, file_time)
|
|
942
|
+
fh = self._sd_stream_handles.pop(key, None)
|
|
943
|
+
if fh:
|
|
944
|
+
fh.close()
|
|
945
|
+
elif status == 1:
|
|
946
|
+
keys = [k for k in self._sd_stream_handles.keys() if k[0] == node_id]
|
|
947
|
+
for key in keys:
|
|
948
|
+
fh = self._sd_stream_handles.pop(key, None)
|
|
949
|
+
if fh:
|
|
950
|
+
fh.close()
|
|
951
|
+
if status == 1:
|
|
952
|
+
self._sd_stream_active.discard(node_id)
|
|
953
|
+
self._set_node_fields(node_id, **{"CMD": "SD stream done"})
|
|
954
|
+
|
|
955
|
+
def update_gnss_position(self, node_id, lat, lon, fix_mode, valid):
|
|
956
|
+
try:
|
|
957
|
+
lat_f = float(lat)
|
|
958
|
+
lon_f = float(lon)
|
|
959
|
+
except (TypeError, ValueError):
|
|
960
|
+
return
|
|
961
|
+
self.gnss_positions[node_id] = {
|
|
962
|
+
"lat": lat_f,
|
|
963
|
+
"lon": lon_f,
|
|
964
|
+
"fix_mode": int(fix_mode),
|
|
965
|
+
"valid": int(valid),
|
|
966
|
+
"ts": datetime.datetime.now(datetime.timezone.utc),
|
|
967
|
+
}
|
|
968
|
+
|
|
969
|
+
def notifying_node_identified_parent(self, nodeID, parent_node_number):
|
|
970
|
+
|
|
971
|
+
logger.info('notifying parent = %s', parent_node_number)
|
|
972
|
+
msg = struct.pack('=BHBB', CmdType.NOTIFY_PARENT, parent_node_number, 0, 0)
|
|
973
|
+
self.controller.mesh.sockets[nodeID].send(msg)
|
|
974
|
+
return
|
|
975
|
+
|
|
976
|
+
def notifying_node_identified_children(self, nodeID, child_nodeIDs):
|
|
977
|
+
|
|
978
|
+
# NOTIFYING THE NODE OF CHILDREN
|
|
979
|
+
logger.info('notifying children')
|
|
980
|
+
children_bits = 0
|
|
981
|
+
for child_nodeID in child_nodeIDs:
|
|
982
|
+
children_bits |= (1 << (child_nodeID-1))
|
|
983
|
+
children_bits &= 0xFFFFFFFF
|
|
984
|
+
msg = struct.pack('=BI', CmdType.NOTIFY_CHILDREN, children_bits)
|
|
985
|
+
self.controller.mesh.sockets[nodeID].send(msg)
|
|
986
|
+
return
|
|
987
|
+
|
|
988
|
+
def isParent(self, self_MAC, parent_MAC):
|
|
989
|
+
|
|
990
|
+
self_mac = [int(x, 16) for x in self_MAC.split(":")]
|
|
991
|
+
parent_mac = [int(x, 16) for x in parent_MAC.split(":")]
|
|
992
|
+
self_mac[5] += 1
|
|
993
|
+
return self_mac == parent_mac
|
|
994
|
+
|
|
995
|
+
def handle_conn_report(self, nodeID, level, parent_mac, self_mac, rssi, acc_model: int = 0, daq_mode: int = 0, daq_on: int = 0, stream_status: int = 0, notify: bool = True):
|
|
996
|
+
logger.info('Conn Rpt: L={}, par_MAC={}, self_MAC={}, RSSI={}'.format(level, parent_mac, self_mac, rssi))
|
|
997
|
+
|
|
998
|
+
condition = self._node_condition(nodeID)
|
|
999
|
+
if condition is None:
|
|
1000
|
+
return
|
|
1001
|
+
|
|
1002
|
+
acc_model_label = self._acc_model_label(acc_model)
|
|
1003
|
+
daq_mode_label = self._format_daq_mode(acc_model_label, daq_mode) if acc_model_label else ""
|
|
1004
|
+
daq_status = "On" if int(daq_on) == 1 else "Off"
|
|
1005
|
+
stream_label = {0: "Off", 1: "RT", 2: "SD"}.get(int(stream_status), "")
|
|
1006
|
+
prev_connection = self.mesh_status_data.loc[condition, "Connection"].iloc[0]
|
|
1007
|
+
initial_connect = prev_connection != "connected"
|
|
1008
|
+
fs = self._extract_fs_from_daq_label(daq_mode_label)
|
|
1009
|
+
if fs:
|
|
1010
|
+
self._update_sampling_rate(nodeID, fs)
|
|
1011
|
+
fields = {
|
|
1012
|
+
"Connection": "connected",
|
|
1013
|
+
"ConnRptTime": datetime.datetime.now(),
|
|
1014
|
+
LEVEL_COL: level,
|
|
1015
|
+
"Parent MAC": parent_mac,
|
|
1016
|
+
"Self MAC": self_mac,
|
|
1017
|
+
RSSI_COL: rssi,
|
|
1018
|
+
"Sensor": acc_model_label,
|
|
1019
|
+
"DAQ Mode": daq_mode_label,
|
|
1020
|
+
"DAQ": daq_status,
|
|
1021
|
+
"Stream": stream_label,
|
|
1022
|
+
}
|
|
1023
|
+
if initial_connect:
|
|
1024
|
+
fields[SPEED_COL] = "0.0"
|
|
1025
|
+
self._set_node_fields(
|
|
1026
|
+
nodeID,
|
|
1027
|
+
**fields,
|
|
1028
|
+
)
|
|
1029
|
+
if initial_connect:
|
|
1030
|
+
self.speed_last_value[nodeID] = 0.0
|
|
1031
|
+
self.speed_last_rx[nodeID] = None
|
|
1032
|
+
|
|
1033
|
+
parent_node_key = None
|
|
1034
|
+
parent_node_number = None
|
|
1035
|
+
parent_label = None
|
|
1036
|
+
if level == 1:
|
|
1037
|
+
parent_label = 'AP'
|
|
1038
|
+
else:
|
|
1039
|
+
for index, row in self.mesh_status_data.iterrows():
|
|
1040
|
+
if len(row['Self MAC']) > 0 and self.isParent(row['Self MAC'], parent_mac):
|
|
1041
|
+
parent_node_key = row['nodeID']
|
|
1042
|
+
parent_node_number = row.get('node_number')
|
|
1043
|
+
parent_label = row['Node ID']
|
|
1044
|
+
break
|
|
1045
|
+
if parent_label is not None:
|
|
1046
|
+
self.mesh_status_data.loc[condition, 'Parent'] = parent_label
|
|
1047
|
+
if notify:
|
|
1048
|
+
if parent_node_number is not None:
|
|
1049
|
+
try:
|
|
1050
|
+
parent_node_number = int(parent_node_number)
|
|
1051
|
+
except (TypeError, ValueError):
|
|
1052
|
+
parent_node_number = None
|
|
1053
|
+
if parent_node_number is not None:
|
|
1054
|
+
self.notifying_node_identified_parent(nodeID, parent_node_number)
|
|
1055
|
+
|
|
1056
|
+
child_nodeIDs = []
|
|
1057
|
+
for index, row in self.mesh_status_data.iterrows():
|
|
1058
|
+
if len(row['Parent MAC']) > 0 and self.isParent(self_mac, row['Parent MAC']):
|
|
1059
|
+
try:
|
|
1060
|
+
child_nodeIDs.append(int(row['node_number']))
|
|
1061
|
+
except (TypeError, ValueError):
|
|
1062
|
+
pass
|
|
1063
|
+
if len(child_nodeIDs) > 0:
|
|
1064
|
+
child_NodeIDs = ' '.join(str(x) for x in child_nodeIDs)
|
|
1065
|
+
self.mesh_status_data.loc[condition, 'Children'] = child_NodeIDs
|
|
1066
|
+
if notify:
|
|
1067
|
+
self.notifying_node_identified_children(nodeID, child_nodeIDs)
|
|
1068
|
+
|
|
1069
|
+
def on_timer(self):
|
|
1070
|
+
while True:
|
|
1071
|
+
time.sleep(5)
|
|
1072
|
+
# logger.debug('on_timer() executed.')
|
|
1073
|
+
t_now = datetime.datetime.now()
|
|
1074
|
+
for index, row in self.mesh_status_data.iterrows():
|
|
1075
|
+
# logger.debug(row['ConnRptTime'] != None)
|
|
1076
|
+
if row['ConnRptTime'] != None:
|
|
1077
|
+
dT = t_now - row['ConnRptTime']
|
|
1078
|
+
# logger.debug(dT.seconds)
|
|
1079
|
+
if dT.seconds > 10:
|
|
1080
|
+
self.mesh_status_data.loc[index, 'Connection'] = 'disconnected'
|
|
1081
|
+
for col in [
|
|
1082
|
+
"Sensor",
|
|
1083
|
+
"DAQ Mode",
|
|
1084
|
+
"DAQ",
|
|
1085
|
+
"Stream",
|
|
1086
|
+
SPEED_COL,
|
|
1087
|
+
LEVEL_COL,
|
|
1088
|
+
"Parent",
|
|
1089
|
+
RSSI_COL,
|
|
1090
|
+
"Children",
|
|
1091
|
+
"CMD",
|
|
1092
|
+
]:
|
|
1093
|
+
if col in self.mesh_status_data.columns:
|
|
1094
|
+
self.mesh_status_data.loc[index, col] = ''
|
|
1095
|
+
if PPS_COL in self.mesh_status_data.columns:
|
|
1096
|
+
self.mesh_status_data.loc[index, PPS_COL] = ''
|
|
1097
|
+
self.mesh_status_data.loc[index, 'PPS-time'] = ''
|
|
1098
|
+
self.mesh_status_data.loc[index, 'PPS-flash-time'] = ''
|
|
1099
|
+
self.mesh_status_data.loc[index, 'Parent MAC'] = ''
|
|
1100
|
+
self.mesh_status_data.loc[index, 'Self MAC'] = ''
|
|
1101
|
+
self.mesh_status_data.loc[index, 'ConnRptTime'] = None
|
|
1102
|
+
self.mesh_status_data.loc[index, 'DAQ-time'] = ''
|
|
1103
|
+
hist = self.speed_hist.get(row['nodeID'])
|
|
1104
|
+
if hist is not None:
|
|
1105
|
+
hist.clear()
|
|
1106
|
+
#self.mesh_status_data.loc[index, 'Children_nodeIDs'] = []
|
|
1107
|
+
|
|
1108
|
+
# def getTimeWindow(self, t1):
|
|
1109
|
+
# """Time window with 30 seconds span presenting the latest measurement
|
|
1110
|
+
|
|
1111
|
+
# Args:
|
|
1112
|
+
# t1 (datetime): the latest timestamp
|
|
1113
|
+
|
|
1114
|
+
# Returns:
|
|
1115
|
+
# [ts, te]: datetime ts, te containing t1 in the middle. te-t1 = 30 secs
|
|
1116
|
+
# """
|
|
1117
|
+
# r = [datetime.datetime.fromtimestamp(floor(t1.timestamp() / 30) * 30),
|
|
1118
|
+
# datetime.datetime.fromtimestamp(floor(t1.timestamp() / 30 + 1) * 30)]
|
|
1119
|
+
# return r
|
|
1120
|
+
|
|
1121
|
+
def _build_merged_plot_data(self, df: pd.DataFrame) -> None:
|
|
1122
|
+
self.merged_timehistory_xdata = {}
|
|
1123
|
+
self.merged_timehistory_ydata = {}
|
|
1124
|
+
self.merged_timehistory_xlim = []
|
|
1125
|
+
self.merged_psd_xdata = []
|
|
1126
|
+
self.merged_psd_ydata = {}
|
|
1127
|
+
self.merged_node_ids = []
|
|
1128
|
+
self.merged_fs = None
|
|
1129
|
+
|
|
1130
|
+
if df is None or df.empty:
|
|
1131
|
+
return
|
|
1132
|
+
|
|
1133
|
+
try:
|
|
1134
|
+
times = pd.to_datetime(df.index).to_pydatetime().tolist()
|
|
1135
|
+
except Exception:
|
|
1136
|
+
return
|
|
1137
|
+
|
|
1138
|
+
if times:
|
|
1139
|
+
self.merged_timehistory_xlim = [times[0], times[-1]]
|
|
1140
|
+
|
|
1141
|
+
node_ids = sorted({col.split('-')[0] for col in df.columns if '-' in col})
|
|
1142
|
+
self.merged_node_ids = node_ids
|
|
1143
|
+
|
|
1144
|
+
fs = 50.0
|
|
1145
|
+
try:
|
|
1146
|
+
dt_series = pd.to_datetime(df.index).to_series().diff().dt.total_seconds().dropna()
|
|
1147
|
+
if not dt_series.empty:
|
|
1148
|
+
median_dt = float(dt_series.median())
|
|
1149
|
+
if median_dt > 0:
|
|
1150
|
+
fs = 1.0 / median_dt
|
|
1151
|
+
except Exception:
|
|
1152
|
+
fs = 50.0
|
|
1153
|
+
self.merged_fs = fs
|
|
1154
|
+
|
|
1155
|
+
for node_id in node_ids:
|
|
1156
|
+
cols = [f"{node_id}-X", f"{node_id}-Y", f"{node_id}-Z"]
|
|
1157
|
+
if not all(col in df.columns for col in cols):
|
|
1158
|
+
continue
|
|
1159
|
+
ydata = df[cols].to_numpy()
|
|
1160
|
+
self.merged_timehistory_xdata[node_id] = times
|
|
1161
|
+
self.merged_timehistory_ydata[node_id] = ydata
|
|
1162
|
+
|
|
1163
|
+
try:
|
|
1164
|
+
psder = psd_recursive.PSD_Recursive(1024, fs)
|
|
1165
|
+
psder.push(ydata, ts=np.array(times))
|
|
1166
|
+
f, asd = psder.get_asd()
|
|
1167
|
+
self.merged_psd_xdata = f
|
|
1168
|
+
self.merged_psd_ydata[node_id] = asd
|
|
1169
|
+
except Exception:
|
|
1170
|
+
continue
|
|
1171
|
+
|
|
1172
|
+
def merge_rt_streamed_files(self):
|
|
1173
|
+
|
|
1174
|
+
global df
|
|
1175
|
+
raw_data_dir = RT_STREAM_HD5_DIR
|
|
1176
|
+
NodeIDs = []
|
|
1177
|
+
|
|
1178
|
+
if not os.path.isdir(raw_data_dir):
|
|
1179
|
+
self._ui_log(f"rt-streamed folder missing: {raw_data_dir}")
|
|
1180
|
+
return
|
|
1181
|
+
|
|
1182
|
+
entries = os.listdir(raw_data_dir)
|
|
1183
|
+
files = [f for f in entries if f.endswith('.hd5')]
|
|
1184
|
+
|
|
1185
|
+
# IDENTIFY NODE IDs
|
|
1186
|
+
for file in files:
|
|
1187
|
+
NodeIDs.append(file.split('-')[1].split('.')[0])
|
|
1188
|
+
NodeIDs = sorted(set(NodeIDs))
|
|
1189
|
+
|
|
1190
|
+
# READ AND MERGE ALL RAW DATA FILES
|
|
1191
|
+
dfs = []
|
|
1192
|
+
msg = "reading files ... "
|
|
1193
|
+
self._ui_log(msg)
|
|
1194
|
+
|
|
1195
|
+
for NodeID in NodeIDs:
|
|
1196
|
+
dfs_ = []
|
|
1197
|
+
files_ = [f for f in entries if f.endswith('{}.hd5'.format(NodeID))]
|
|
1198
|
+
for file_ in files_:
|
|
1199
|
+
msg = f"reading file {file_} ... "
|
|
1200
|
+
self._ui_log(msg)
|
|
1201
|
+
dfs_.append(pd.read_hdf(os.path.join(raw_data_dir, file_)))
|
|
1202
|
+
df_ = pd.concat(dfs_, axis = 0)
|
|
1203
|
+
df_ = df_.rename(columns={
|
|
1204
|
+
'X': '{}-X'.format(NodeID),
|
|
1205
|
+
'Y': '{}-Y'.format(NodeID),
|
|
1206
|
+
'Z': '{}-Z'.format(NodeID)
|
|
1207
|
+
})
|
|
1208
|
+
dfs.append(df_)
|
|
1209
|
+
|
|
1210
|
+
df = pd.concat(dfs, axis = 1)
|
|
1211
|
+
df = df.dropna()
|
|
1212
|
+
df = df.sort_index()
|
|
1213
|
+
self._build_merged_plot_data(df)
|
|
1214
|
+
|
|
1215
|
+
# df.plot(figsize=(24, 6))
|
|
1216
|
+
# show()
|
|
1217
|
+
|
|
1218
|
+
os.makedirs(RT_STREAM_MERGED_DIR, exist_ok=True)
|
|
1219
|
+
base_name = os.path.join(RT_STREAM_MERGED_DIR, "accm-{}".format(df.index[0].strftime("%Y_%m%d_%H%M")))
|
|
1220
|
+
hd5_path = f"{base_name}.hd5"
|
|
1221
|
+
df.to_hdf(hd5_path, key="df")
|
|
1222
|
+
msg = f"merged file written to {hd5_path} "
|
|
1223
|
+
self._ui_log(msg)
|
|
1224
|
+
self._export_merged_mat(df, base_name)
|
|
1225
|
+
|
|
1226
|
+
def clear_rt_streamed_files(self) -> None:
|
|
1227
|
+
self._clear_stream_dir(RT_STREAM_HD5_DIR, "[rt-clear]")
|
|
1228
|
+
|
|
1229
|
+
def clear_sd_streamed_files(self) -> None:
|
|
1230
|
+
self._clear_stream_dir(SD_STREAM_BIN_DIR, "[sd-clear]")
|
|
1231
|
+
self._clear_stream_dir(SD_STREAM_HD5_DIR, "[sd-clear]")
|
|
1232
|
+
|
|
1233
|
+
def _clear_stream_dir(self, directory: str, tag: str) -> None:
|
|
1234
|
+
if not os.path.isdir(directory):
|
|
1235
|
+
self._ui_log(f"{tag} folder missing: {directory}")
|
|
1236
|
+
return
|
|
1237
|
+
removed = 0
|
|
1238
|
+
errors = 0
|
|
1239
|
+
for name in os.listdir(directory):
|
|
1240
|
+
path = os.path.join(directory, name)
|
|
1241
|
+
if not os.path.isfile(path):
|
|
1242
|
+
continue
|
|
1243
|
+
try:
|
|
1244
|
+
os.remove(path)
|
|
1245
|
+
removed += 1
|
|
1246
|
+
except Exception:
|
|
1247
|
+
errors += 1
|
|
1248
|
+
self._ui_log(f"{tag} removed {removed} files from {directory} (errors={errors})")
|
|
1249
|
+
|
|
1250
|
+
def _export_merged_mat(self, df: pd.DataFrame, base_path: str) -> None:
|
|
1251
|
+
if df is None or df.empty:
|
|
1252
|
+
self._ui_log("[merge] no data; skipping MAT export")
|
|
1253
|
+
return
|
|
1254
|
+
mat_path = base_path if base_path.endswith(".mat") else f"{base_path}.mat"
|
|
1255
|
+
try:
|
|
1256
|
+
idx = pd.to_datetime(df.index)
|
|
1257
|
+
tz = "UTC"
|
|
1258
|
+
if getattr(idx, "tz", None) is not None:
|
|
1259
|
+
idx = idx.tz_convert("UTC")
|
|
1260
|
+
else:
|
|
1261
|
+
tz = "UTC (naive)"
|
|
1262
|
+
time_iso = [t.isoformat() for t in idx.to_pydatetime()]
|
|
1263
|
+
time_posix = (idx.view("int64") / 1e9).astype(float)
|
|
1264
|
+
fs = self.merged_fs if self.merged_fs is not None else 0.0
|
|
1265
|
+
columns = df.columns.tolist()
|
|
1266
|
+
sanitized = []
|
|
1267
|
+
name_map = {}
|
|
1268
|
+
for col in columns:
|
|
1269
|
+
base = re.sub(r"[^0-9A-Za-z_]", "_", str(col))
|
|
1270
|
+
if base == "":
|
|
1271
|
+
base = "var"
|
|
1272
|
+
if base[0].isdigit():
|
|
1273
|
+
base = f"v_{base}"
|
|
1274
|
+
name = base
|
|
1275
|
+
suffix = 1
|
|
1276
|
+
while name in name_map.values():
|
|
1277
|
+
name = f"{base}_{suffix}"
|
|
1278
|
+
suffix += 1
|
|
1279
|
+
name_map[col] = name
|
|
1280
|
+
sanitized.append(name)
|
|
1281
|
+
|
|
1282
|
+
table_struct = {"Time": time_posix}
|
|
1283
|
+
data = df.to_numpy()
|
|
1284
|
+
for idx_col, col in enumerate(columns):
|
|
1285
|
+
table_struct[name_map[col]] = data[:, idx_col]
|
|
1286
|
+
|
|
1287
|
+
meta_struct = {
|
|
1288
|
+
"columns_original": np.array(columns, dtype=object),
|
|
1289
|
+
"columns_sanitized": np.array(sanitized, dtype=object),
|
|
1290
|
+
"time_iso": np.array(time_iso, dtype=object),
|
|
1291
|
+
"tz": tz,
|
|
1292
|
+
"fs_hz": float(fs),
|
|
1293
|
+
"row_time_key": "Time",
|
|
1294
|
+
}
|
|
1295
|
+
|
|
1296
|
+
mat_dict = {
|
|
1297
|
+
"data": table_struct,
|
|
1298
|
+
"meta": meta_struct,
|
|
1299
|
+
}
|
|
1300
|
+
if h5s is not None:
|
|
1301
|
+
h5s.savemat(
|
|
1302
|
+
mat_path,
|
|
1303
|
+
mat_dict,
|
|
1304
|
+
format="7.3",
|
|
1305
|
+
matlab_compatible=True,
|
|
1306
|
+
)
|
|
1307
|
+
self._ui_log(f"[merge] mat (v7.3) written: {mat_path}")
|
|
1308
|
+
elif sio is not None:
|
|
1309
|
+
sio.savemat(mat_path, mat_dict, do_compression=True)
|
|
1310
|
+
self._ui_log(f"[merge] mat (v5) written: {mat_path}")
|
|
1311
|
+
else:
|
|
1312
|
+
self._ui_log("[merge] no MAT writer available; install hdf5storage")
|
|
1313
|
+
except Exception as exc:
|
|
1314
|
+
self._ui_log(f"[merge] failed mat export: {exc}")
|
|
1315
|
+
|
|
1316
|
+
def load_latest_merged_rt(self) -> None:
|
|
1317
|
+
self._load_latest_merged(RT_STREAM_MERGED_DIR, "[rt-plot]")
|
|
1318
|
+
|
|
1319
|
+
def load_latest_merged_sd(self) -> None:
|
|
1320
|
+
self._load_latest_merged(self.sd_stream_merged_dir, "[sd-plot]")
|
|
1321
|
+
|
|
1322
|
+
def _load_latest_merged(self, directory: str, tag: str) -> None:
|
|
1323
|
+
if not os.path.isdir(directory):
|
|
1324
|
+
self._ui_log(f"{tag} folder missing: {directory}")
|
|
1325
|
+
return
|
|
1326
|
+
files = [os.path.join(directory, f) for f in os.listdir(directory) if f.endswith(".hd5")]
|
|
1327
|
+
if not files:
|
|
1328
|
+
self._ui_log(f"{tag} no merged files found")
|
|
1329
|
+
return
|
|
1330
|
+
latest = max(files, key=lambda p: os.path.getmtime(p))
|
|
1331
|
+
try:
|
|
1332
|
+
df = pd.read_hdf(latest)
|
|
1333
|
+
except Exception as exc:
|
|
1334
|
+
self._ui_log(f"{tag} failed to read {latest}: {exc}")
|
|
1335
|
+
return
|
|
1336
|
+
if df is None or df.empty:
|
|
1337
|
+
self._ui_log(f"{tag} empty file: {latest}")
|
|
1338
|
+
return
|
|
1339
|
+
df = df.dropna().sort_index()
|
|
1340
|
+
self._build_merged_plot_data(df)
|
|
1341
|
+
self._ui_log(f"{tag} loaded {os.path.basename(latest)}")
|
|
1342
|
+
|
|
1343
|
+
def update_mesh_status_data(self, nodeID, column, msg):
|
|
1344
|
+
self._set_node_fields(nodeID, **{column: msg})
|
|
1345
|
+
|
|
1346
|
+
def _ui_log(self, msg: str) -> None:
|
|
1347
|
+
if self.ui_log is not None:
|
|
1348
|
+
self.ui_log(msg)
|
|
1349
|
+
else:
|
|
1350
|
+
logger.info(msg)
|
|
1351
|
+
|
|
1352
|
+
def _node_condition(self, nodeID):
|
|
1353
|
+
condition = self.mesh_status_data['nodeID'] == nodeID
|
|
1354
|
+
if not condition.any():
|
|
1355
|
+
return None
|
|
1356
|
+
return condition
|
|
1357
|
+
|
|
1358
|
+
def _set_node_fields(self, nodeID, mark_dirty: bool = True, **fields):
|
|
1359
|
+
condition = self._node_condition(nodeID)
|
|
1360
|
+
if condition is None:
|
|
1361
|
+
return False
|
|
1362
|
+
for column, value in fields.items():
|
|
1363
|
+
self.mesh_status_data.loc[condition, column] = value
|
|
1364
|
+
status = self.node_status.get(nodeID)
|
|
1365
|
+
if status is not None:
|
|
1366
|
+
column_map = {
|
|
1367
|
+
"Node ID": "node_label",
|
|
1368
|
+
"Connection": "connection",
|
|
1369
|
+
SPEED_COL: "speed_kb_s",
|
|
1370
|
+
LEVEL_COL: "level",
|
|
1371
|
+
"Parent": "parent",
|
|
1372
|
+
RSSI_COL: "rssi_db",
|
|
1373
|
+
"Children": "children",
|
|
1374
|
+
PPS_COL: "pps_age",
|
|
1375
|
+
"CMD": "cmd",
|
|
1376
|
+
"PPS-time": "pps_time",
|
|
1377
|
+
"PPS-flash-time": "pps_flash_time",
|
|
1378
|
+
"DAQ-time": "daq_time",
|
|
1379
|
+
"Parent MAC": "parent_mac",
|
|
1380
|
+
"Self MAC": "self_mac",
|
|
1381
|
+
"ConnRptTime": "conn_rpt_time",
|
|
1382
|
+
}
|
|
1383
|
+
for column, value in fields.items():
|
|
1384
|
+
attr = column_map.get(column)
|
|
1385
|
+
if not attr:
|
|
1386
|
+
continue
|
|
1387
|
+
if attr == "speed_kb_s":
|
|
1388
|
+
if value == "" or value is None:
|
|
1389
|
+
setattr(status, attr, None)
|
|
1390
|
+
else:
|
|
1391
|
+
try:
|
|
1392
|
+
setattr(status, attr, float(value))
|
|
1393
|
+
except (TypeError, ValueError):
|
|
1394
|
+
setattr(status, attr, None)
|
|
1395
|
+
elif attr in ("level", "rssi_db"):
|
|
1396
|
+
if value == "" or value is None:
|
|
1397
|
+
setattr(status, attr, None)
|
|
1398
|
+
else:
|
|
1399
|
+
try:
|
|
1400
|
+
setattr(status, attr, int(value))
|
|
1401
|
+
except (TypeError, ValueError):
|
|
1402
|
+
setattr(status, attr, None)
|
|
1403
|
+
else:
|
|
1404
|
+
setattr(status, attr, value)
|
|
1405
|
+
if mark_dirty:
|
|
1406
|
+
self.gui_dirty = True
|
|
1407
|
+
return True
|
|
1408
|
+
|
|
1409
|
+
# def clear():
|
|
1410
|
+
# # DELETE TEMPORARIL FILES
|
|
1411
|
+
# for file in glob.glob("data2/*.hd5"):
|
|
1412
|
+
# try:
|
|
1413
|
+
# os.remove(file)
|
|
1414
|
+
# except Exception as e:
|
|
1415
|
+
# logger.error(f"Error deleting file {file}: {e}")
|