ezmsg-tools 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ezmsg/tools/__init__.py +1 -0
- ezmsg/tools/__version__.py +34 -0
- ezmsg/tools/dag.py +146 -0
- ezmsg/tools/perfmon/__init__.py +0 -0
- ezmsg/tools/perfmon/main.py +283 -0
- ezmsg/tools/proc.py +87 -0
- ezmsg/tools/shmem/__init__.py +0 -0
- ezmsg/tools/shmem/shmem.py +422 -0
- ezmsg/tools/shmem/shmem_mirror.py +244 -0
- ezmsg/tools/sigmon/__init__.py +0 -0
- ezmsg/tools/sigmon/main.py +95 -0
- ezmsg/tools/sigmon/ui/__init__.py +0 -0
- ezmsg/tools/sigmon/ui/base.py +96 -0
- ezmsg/tools/sigmon/ui/dag.py +101 -0
- ezmsg/tools/sigmon/ui/timeseries.py +263 -0
- ezmsg_tools-0.1.0.dist-info/METADATA +115 -0
- ezmsg_tools-0.1.0.dist-info/RECORD +20 -0
- ezmsg_tools-0.1.0.dist-info/WHEEL +4 -0
- ezmsg_tools-0.1.0.dist-info/entry_points.txt +3 -0
- ezmsg_tools-0.1.0.dist-info/licenses/LICENSE +21 -0
ezmsg/tools/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from .__version__ import __version__ as __version__
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
# file generated by setuptools-scm
|
|
2
|
+
# don't change, don't track in version control
|
|
3
|
+
|
|
4
|
+
__all__ = [
|
|
5
|
+
"__version__",
|
|
6
|
+
"__version_tuple__",
|
|
7
|
+
"version",
|
|
8
|
+
"version_tuple",
|
|
9
|
+
"__commit_id__",
|
|
10
|
+
"commit_id",
|
|
11
|
+
]
|
|
12
|
+
|
|
13
|
+
TYPE_CHECKING = False
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
from typing import Tuple
|
|
16
|
+
from typing import Union
|
|
17
|
+
|
|
18
|
+
VERSION_TUPLE = Tuple[Union[int, str], ...]
|
|
19
|
+
COMMIT_ID = Union[str, None]
|
|
20
|
+
else:
|
|
21
|
+
VERSION_TUPLE = object
|
|
22
|
+
COMMIT_ID = object
|
|
23
|
+
|
|
24
|
+
version: str
|
|
25
|
+
__version__: str
|
|
26
|
+
__version_tuple__: VERSION_TUPLE
|
|
27
|
+
version_tuple: VERSION_TUPLE
|
|
28
|
+
commit_id: COMMIT_ID
|
|
29
|
+
__commit_id__: COMMIT_ID
|
|
30
|
+
|
|
31
|
+
__version__ = version = '0.1.0'
|
|
32
|
+
__version_tuple__ = version_tuple = (0, 1, 0)
|
|
33
|
+
|
|
34
|
+
__commit_id__ = commit_id = None
|
ezmsg/tools/dag.py
ADDED
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import typing
|
|
3
|
+
from collections import defaultdict
|
|
4
|
+
from typing import TYPE_CHECKING
|
|
5
|
+
from uuid import uuid4
|
|
6
|
+
|
|
7
|
+
import ezmsg.core as ez
|
|
8
|
+
import pandas as pd
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
import pygraphviz
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def get_graph(graph_address: typing.Tuple[str, int]) -> "pygraphviz.AGraph":
|
|
15
|
+
import pygraphviz as pgv
|
|
16
|
+
|
|
17
|
+
# Create a graphviz object with our graph components as nodes and our connections as edges.
|
|
18
|
+
G = pgv.AGraph(name="ezmsg-graphviz", strict=False, directed=True)
|
|
19
|
+
G.graph_attr["label"] = "ezmsg-graphviz"
|
|
20
|
+
G.graph_attr["rankdir"] = "TB"
|
|
21
|
+
# G.graph_attr["outputorder"] = "edgesfirst"
|
|
22
|
+
# G.graph_attr["ratio"] = "1.0"
|
|
23
|
+
# G.node_attr["shape"] = "circle"
|
|
24
|
+
# G.node_attr["fixedsize"] = "true"
|
|
25
|
+
G.node_attr["fontsize"] = "8"
|
|
26
|
+
G.node_attr["fontcolor"] = "#000000"
|
|
27
|
+
G.node_attr["style"] = "filled"
|
|
28
|
+
G.edge_attr["color"] = "#0000FF"
|
|
29
|
+
G.edge_attr["style"] = "setlinewidth(2)"
|
|
30
|
+
|
|
31
|
+
# Get the dag from the GraphService
|
|
32
|
+
loop = asyncio.new_event_loop()
|
|
33
|
+
asyncio.set_event_loop(loop)
|
|
34
|
+
dag = loop.run_until_complete(ez.graphserver.GraphService(address=graph_address).dag())
|
|
35
|
+
|
|
36
|
+
# Retrieve a description of the graph
|
|
37
|
+
graph_connections = dag.graph.copy()
|
|
38
|
+
# graph_connections is a dict with format
|
|
39
|
+
# {
|
|
40
|
+
# 'apath/unit/port': {'some/other_unit/port', 'yet/another/unit/port'},
|
|
41
|
+
# }
|
|
42
|
+
# where 'port' might be a pub (out) stream or a sub (input) stream.
|
|
43
|
+
|
|
44
|
+
b_refresh_dag = False
|
|
45
|
+
for k, v in graph_connections.items():
|
|
46
|
+
if "VISBUFF/INPUT_SIGNAL" in v:
|
|
47
|
+
b_refresh_dag = True
|
|
48
|
+
loop.run_until_complete(
|
|
49
|
+
ez.graphserver.GraphService(address=graph_address).disconnect(k, "VISBUFF/INPUT_SIGNAL")
|
|
50
|
+
)
|
|
51
|
+
if b_refresh_dag:
|
|
52
|
+
dag = loop.run_until_complete(ez.graphserver.GraphService(address=graph_address).dag())
|
|
53
|
+
graph_connections = dag.graph.copy()
|
|
54
|
+
|
|
55
|
+
# Generate UUID node names
|
|
56
|
+
node_map = {name: f'"{str(uuid4())}"' for name in set(graph_connections.keys())}
|
|
57
|
+
|
|
58
|
+
for node, conns in graph_connections.items():
|
|
59
|
+
for sub in conns:
|
|
60
|
+
G.add_edge(node_map[node], node_map[sub])
|
|
61
|
+
|
|
62
|
+
# Make a new dict `graph` with format {component_name: {sub_component: {stream: stream_full_path}}, ...}
|
|
63
|
+
def tree():
|
|
64
|
+
return defaultdict(tree)
|
|
65
|
+
|
|
66
|
+
graph: defaultdict = tree()
|
|
67
|
+
for node, conns in graph_connections.items():
|
|
68
|
+
subgraph = graph
|
|
69
|
+
path = node.split("/")
|
|
70
|
+
route = path[:-1]
|
|
71
|
+
stream = path[-1]
|
|
72
|
+
for seg in route:
|
|
73
|
+
subgraph = subgraph[seg]
|
|
74
|
+
subgraph[stream] = node
|
|
75
|
+
|
|
76
|
+
# Build out the AGraph recursively
|
|
77
|
+
def build_graph(g: defaultdict, agraph: pgv.AGraph):
|
|
78
|
+
for k, v in g.items():
|
|
79
|
+
if type(v) is defaultdict:
|
|
80
|
+
clust = agraph.add_subgraph(name=f"cluster_{k.lower()}", label=k, cluster=True)
|
|
81
|
+
build_graph(v, clust)
|
|
82
|
+
else:
|
|
83
|
+
agraph.add_node(node_map[v], name=v, label=k)
|
|
84
|
+
|
|
85
|
+
build_graph(graph, G)
|
|
86
|
+
|
|
87
|
+
return G
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def pgv2pd(g: "pygraphviz.AGraph") -> pd.DataFrame:
|
|
91
|
+
df_ps = pd.DataFrame(g.edges(), columns=["pub", "sub"])
|
|
92
|
+
|
|
93
|
+
def recurse_upstream(sub):
|
|
94
|
+
pubs = df_ps[df_ps["sub"] == sub]["pub"]
|
|
95
|
+
if len(pubs):
|
|
96
|
+
return recurse_upstream(pubs.iloc[0])
|
|
97
|
+
else:
|
|
98
|
+
return sub
|
|
99
|
+
|
|
100
|
+
nodes = []
|
|
101
|
+
for n in g.nodes():
|
|
102
|
+
coords = n.attr["pos"].split(",")
|
|
103
|
+
nodes.append(
|
|
104
|
+
{
|
|
105
|
+
# "id": n.name,
|
|
106
|
+
"name": n.attr["name"],
|
|
107
|
+
"x": float(coords[0]),
|
|
108
|
+
"y": float(coords[1]),
|
|
109
|
+
"upstream": g.get_node(recurse_upstream(n.name)).attr["name"],
|
|
110
|
+
}
|
|
111
|
+
)
|
|
112
|
+
return pd.DataFrame(nodes)
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
async def crawl_coro(graph_address: tuple):
|
|
116
|
+
graph_service = ez.graphserver.GraphService(address=graph_address)
|
|
117
|
+
dag: ez.dag.DAG = await graph_service.dag()
|
|
118
|
+
graph_connections = dag.graph.copy()
|
|
119
|
+
|
|
120
|
+
# Construct the graph
|
|
121
|
+
def tree():
|
|
122
|
+
return defaultdict(tree)
|
|
123
|
+
|
|
124
|
+
graph: defaultdict = tree()
|
|
125
|
+
|
|
126
|
+
for node, conns in graph_connections.items():
|
|
127
|
+
subgraph = graph
|
|
128
|
+
path = node.split("/")
|
|
129
|
+
route = path[:-1]
|
|
130
|
+
stream = path[-1]
|
|
131
|
+
for seg in route:
|
|
132
|
+
subgraph = subgraph[seg]
|
|
133
|
+
subgraph[stream] = node
|
|
134
|
+
|
|
135
|
+
def recurse_get_unit_topics(g: defaultdict) -> list:
|
|
136
|
+
out = []
|
|
137
|
+
sub_graphs = [v for k, v in g.items() if isinstance(v, defaultdict)]
|
|
138
|
+
if len(sub_graphs):
|
|
139
|
+
for sub_graph in sub_graphs:
|
|
140
|
+
out += recurse_get_unit_topics(sub_graph)
|
|
141
|
+
else:
|
|
142
|
+
out.extend(list(g.values()))
|
|
143
|
+
return out
|
|
144
|
+
|
|
145
|
+
unit_topics = recurse_get_unit_topics(graph)
|
|
146
|
+
return unit_topics
|
|
File without changes
|
|
@@ -0,0 +1,283 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This is a plotly.dash application that monitors and visualizes the performance of an ezmsg system.
|
|
3
|
+
|
|
4
|
+
Upon page load or changing the logger path, the application reads the CSV file at the given path
|
|
5
|
+
and displays the data in a table.
|
|
6
|
+
Additionally, every second, the application updates the table with the latest data from the CSV file.
|
|
7
|
+
|
|
8
|
+
Whenever the table is updated, the application also updates a histogram graph that shows the average
|
|
9
|
+
elapsed time for each topic.
|
|
10
|
+
|
|
11
|
+
Only the last 1 minute of data is used in the table and graphs.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
import asyncio
|
|
15
|
+
import datetime
|
|
16
|
+
import io
|
|
17
|
+
import typing
|
|
18
|
+
from pathlib import Path
|
|
19
|
+
|
|
20
|
+
import dash
|
|
21
|
+
import dash_bootstrap_components as dbc
|
|
22
|
+
import pandas as pd
|
|
23
|
+
import plotly.express as px
|
|
24
|
+
import pygtail
|
|
25
|
+
from dash_extensions import Mermaid, enrich
|
|
26
|
+
from ezmsg.baseproc.util.profile import get_logger_path
|
|
27
|
+
from ezmsg.core.graphserver import GraphService
|
|
28
|
+
|
|
29
|
+
PAGE_SIZE = 20
|
|
30
|
+
|
|
31
|
+
app = dash.Dash("ezmsg Performance Monitor", external_stylesheets=[dbc.themes.CYBORG])
|
|
32
|
+
|
|
33
|
+
state = dbc.Col(
|
|
34
|
+
[
|
|
35
|
+
dash.dcc.Interval(id="interval", interval=10_000, n_intervals=0),
|
|
36
|
+
dash.dcc.Store(id="df-store"),
|
|
37
|
+
dash.dcc.Store(id="last-dt-store"),
|
|
38
|
+
dash.html.Div(id="onload"), # this div is used to trigger any functions that need to run on page load
|
|
39
|
+
]
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
header_ui = dbc.Col(
|
|
43
|
+
[
|
|
44
|
+
dbc.Row(
|
|
45
|
+
[
|
|
46
|
+
dbc.Col(
|
|
47
|
+
dbc.Input(
|
|
48
|
+
id="logger-path",
|
|
49
|
+
type="text",
|
|
50
|
+
placeholder="logpath",
|
|
51
|
+
debounce=True,
|
|
52
|
+
valid=False,
|
|
53
|
+
)
|
|
54
|
+
),
|
|
55
|
+
dbc.Col(dbc.Switch(id="live-toggle", label="Live", value=False)),
|
|
56
|
+
# dbc.Col("History (s):", width=1),
|
|
57
|
+
# dbc.Col(dbc.Input(id="history-input", type="number", debounce=True, min=1, value=10)),
|
|
58
|
+
]
|
|
59
|
+
),
|
|
60
|
+
]
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
dag = dbc.Col(
|
|
64
|
+
[
|
|
65
|
+
Mermaid(id="dag", config={"theme": "neutral"}),
|
|
66
|
+
],
|
|
67
|
+
style={"width": "100%", "backgroundColor": "rgb(200, 200, 200)"},
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
table_summary = dbc.Col(
|
|
71
|
+
[
|
|
72
|
+
dbc.Row(dash.dcc.Graph(id="hist-graph", style={"width": "100%"})),
|
|
73
|
+
dbc.Row(dash.html.Div("Sum:", id="proc-sum", style={"width": "100%"})),
|
|
74
|
+
dbc.Row(
|
|
75
|
+
dash.dash_table.DataTable(
|
|
76
|
+
id="table",
|
|
77
|
+
data=[],
|
|
78
|
+
page_current=-1,
|
|
79
|
+
page_size=PAGE_SIZE,
|
|
80
|
+
page_action="custom",
|
|
81
|
+
style_header={"backgroundColor": "rgb(30, 30, 30)", "color": "white"},
|
|
82
|
+
style_data={"backgroundColor": "rgb(50, 50, 50)", "color": "white"},
|
|
83
|
+
)
|
|
84
|
+
),
|
|
85
|
+
]
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
app.layout = dash.html.Div(
|
|
89
|
+
children=[state, header_ui, dag, table_summary],
|
|
90
|
+
id="container",
|
|
91
|
+
className="dash-bootstrap",
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
@dash.callback(
|
|
96
|
+
dash.Output("logger-path", "value"),
|
|
97
|
+
enrich.Trigger("onload", "children"),
|
|
98
|
+
prevent_initial_call=False,
|
|
99
|
+
)
|
|
100
|
+
def on_load(_):
|
|
101
|
+
return str(get_logger_path())
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
@dash.callback(
|
|
105
|
+
dash.Output("logger-path", "valid"),
|
|
106
|
+
dash.Input("logger-path", "value"),
|
|
107
|
+
prevent_initial_call=True,
|
|
108
|
+
)
|
|
109
|
+
def on_logger_path(logger_path: str) -> bool:
|
|
110
|
+
valid = False
|
|
111
|
+
logger_path = Path(logger_path)
|
|
112
|
+
if logger_path.exists():
|
|
113
|
+
offset_path = logger_path.parent / (logger_path.name + ".offset")
|
|
114
|
+
offset_path.unlink(missing_ok=True)
|
|
115
|
+
valid = logger_path.stat().st_size > 0
|
|
116
|
+
return valid
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def _trim_df(df: pd.DataFrame, history_sec: float = 10.0) -> pd.DataFrame:
|
|
120
|
+
last_dt = df["Time"].iloc[-1]
|
|
121
|
+
hist_lim = last_dt - datetime.timedelta(seconds=history_sec)
|
|
122
|
+
return df[df["Time"] >= hist_lim]
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
@dash.callback(
|
|
126
|
+
dash.Output("df-store", "data"),
|
|
127
|
+
dash.Output("last-dt-store", "data"),
|
|
128
|
+
dash.Input("logger-path", "value"),
|
|
129
|
+
# dash.Input("history-input", "value"),
|
|
130
|
+
prevent_initial_call=True,
|
|
131
|
+
)
|
|
132
|
+
def load_once(
|
|
133
|
+
logger_path: str,
|
|
134
|
+
# history_sec
|
|
135
|
+
) -> tuple[list[dict[str, typing.Any]], datetime.datetime]:
|
|
136
|
+
if logger_path is None or not Path(logger_path).exists():
|
|
137
|
+
raise dash.exceptions.PreventUpdate
|
|
138
|
+
try:
|
|
139
|
+
df = pd.read_csv(logger_path, header=0, parse_dates=["Time"])
|
|
140
|
+
except pd.errors.EmptyDataError:
|
|
141
|
+
raise dash.exceptions.PreventUpdate
|
|
142
|
+
# Rewrite logger-path.offset with the current offset.
|
|
143
|
+
tail = pygtail.Pygtail(logger_path)
|
|
144
|
+
tail.read_from_end = True
|
|
145
|
+
tail.update_offset_file()
|
|
146
|
+
# Trim any rows with headers
|
|
147
|
+
b_bad = df["Time"].astype(str) == "Time"
|
|
148
|
+
if b_bad.any():
|
|
149
|
+
df = df[~b_bad]
|
|
150
|
+
# Reinterpret the columns:
|
|
151
|
+
# Time (datetime64), Source (obj), Topic (obj), SampleTime (float64), PerfCounter (float64), Elapsed (float64)
|
|
152
|
+
df["Time"] = pd.to_datetime(df["Time"])
|
|
153
|
+
for col in ["SampleTime", "PerfCounter", "Elapsed"]:
|
|
154
|
+
df[col] = pd.to_numeric(df[col])
|
|
155
|
+
# Trim dataframe to only include the last history_sec of data.
|
|
156
|
+
df = _trim_df(df, history_sec=10.0) # TODO: Get history_sec from widget
|
|
157
|
+
last_dt = df["Time"].iloc[-1]
|
|
158
|
+
data = df.to_dict("records")
|
|
159
|
+
return data, last_dt
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
@dash.callback(
|
|
163
|
+
dash.Output("df-store", "data", allow_duplicate=True),
|
|
164
|
+
dash.Output("last-dt-store", "data", allow_duplicate=True),
|
|
165
|
+
[
|
|
166
|
+
dash.Input("interval", "n_intervals"),
|
|
167
|
+
dash.Input("live-toggle", "value"),
|
|
168
|
+
dash.State("logger-path", "value"),
|
|
169
|
+
dash.State("df-store", "data"),
|
|
170
|
+
dash.State("last-dt-store", "data"),
|
|
171
|
+
],
|
|
172
|
+
prevent_initial_call=True,
|
|
173
|
+
)
|
|
174
|
+
def interval_callback(_, toggle_state, logger_path, data, last_dt):
|
|
175
|
+
if not toggle_state:
|
|
176
|
+
raise dash.exceptions.PreventUpdate
|
|
177
|
+
|
|
178
|
+
tail = pygtail.Pygtail(logger_path)
|
|
179
|
+
new_lines = tail.read()
|
|
180
|
+
|
|
181
|
+
if not new_lines:
|
|
182
|
+
raise dash.exceptions.PreventUpdate
|
|
183
|
+
|
|
184
|
+
if data is not None:
|
|
185
|
+
df = pd.DataFrame.from_dict(data)
|
|
186
|
+
df["Time"] = pd.to_datetime(df["Time"])
|
|
187
|
+
new_df = pd.read_csv(io.StringIO(new_lines), names=df.columns, parse_dates=["Time"])
|
|
188
|
+
df = pd.concat([df, new_df], ignore_index=True)
|
|
189
|
+
else:
|
|
190
|
+
df = pd.read_csv(io.StringIO(new_lines), header=0, parse_dates=["Time"])
|
|
191
|
+
df = _trim_df(df, history_sec=10.0) # TODO: Get history_sec from widget
|
|
192
|
+
last_dt = df["Time"].iloc[-1]
|
|
193
|
+
return df.to_dict("records"), last_dt
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
@dash.callback(
|
|
197
|
+
dash.Output("dag", "chart"),
|
|
198
|
+
[
|
|
199
|
+
dash.Input("df-store", "data"),
|
|
200
|
+
dash.State("logger-path", "value"),
|
|
201
|
+
],
|
|
202
|
+
prevent_initial_call=True,
|
|
203
|
+
memoize=True,
|
|
204
|
+
)
|
|
205
|
+
def update_dag(data, logger_path):
|
|
206
|
+
async def _get_formatted_graph():
|
|
207
|
+
graph_service = GraphService(("127.0.0.1", 25978))
|
|
208
|
+
graph_out = await graph_service.get_formatted_graph(fmt="mermaid", direction="LR")
|
|
209
|
+
return graph_out
|
|
210
|
+
|
|
211
|
+
loop = asyncio.new_event_loop()
|
|
212
|
+
asyncio.set_event_loop(loop)
|
|
213
|
+
graph_str = loop.run_until_complete(_get_formatted_graph())
|
|
214
|
+
if not graph_str:
|
|
215
|
+
logger_path = Path(logger_path)
|
|
216
|
+
graph_path = logger_path.parent / (logger_path.stem + ".mermaid")
|
|
217
|
+
if not graph_path.exists():
|
|
218
|
+
return ""
|
|
219
|
+
with graph_path.open() as f:
|
|
220
|
+
graph_str = f.read()
|
|
221
|
+
|
|
222
|
+
df = pd.DataFrame.from_dict(data)
|
|
223
|
+
df["Time"] = pd.to_datetime(df["Time"])
|
|
224
|
+
|
|
225
|
+
topic_means = df.groupby("Topic")["Elapsed"].mean()
|
|
226
|
+
max_elapsed = topic_means.max()
|
|
227
|
+
for topic, mean in topic_means.items():
|
|
228
|
+
topic_str = topic.split("/")[-1].lower()
|
|
229
|
+
# https://mermaid.js.org/syntax/flowchart.html#styling-a-node
|
|
230
|
+
color = px.colors.find_intermediate_color((0, 0.0, 1.0), (1.0, 0.0, 0.0), mean / max_elapsed)
|
|
231
|
+
fill_str = "".join([f"{int(c * 255):02x}" for c in color])
|
|
232
|
+
# style id2 fill:#bbf,stroke:#f66,stroke-width:2px,color:#fff,stroke-dasharray: 5 5
|
|
233
|
+
graph_str += f" style {topic_str} fill:#{fill_str}80\n"
|
|
234
|
+
return graph_str
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
@dash.callback(
|
|
238
|
+
dash.Output("table", "data"),
|
|
239
|
+
dash.Output("table", "page_current"),
|
|
240
|
+
dash.Input("df-store", "data"),
|
|
241
|
+
dash.Input("table", "page_current"),
|
|
242
|
+
dash.Input("table", "page_size"),
|
|
243
|
+
prevent_initial_call=True,
|
|
244
|
+
memoize=True,
|
|
245
|
+
)
|
|
246
|
+
def update_table(data, page_current, page_size):
|
|
247
|
+
df = pd.DataFrame.from_dict(data)
|
|
248
|
+
df["Time"] = pd.to_datetime(df["Time"])
|
|
249
|
+
if page_current < 0:
|
|
250
|
+
page_current = int(len(df) // PAGE_SIZE) - 1
|
|
251
|
+
out_data = df.iloc[page_current * page_size : (page_current + 1) * page_size].to_dict("records")
|
|
252
|
+
return out_data, page_current
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
@dash.callback(
|
|
256
|
+
dash.Output("hist-graph", "figure"),
|
|
257
|
+
dash.Output("proc-sum", "children"),
|
|
258
|
+
dash.Input("df-store", "data"),
|
|
259
|
+
prevent_initial_call=True,
|
|
260
|
+
memoize=True,
|
|
261
|
+
)
|
|
262
|
+
def update_hist(data):
|
|
263
|
+
df = pd.DataFrame.from_dict(data)
|
|
264
|
+
df["Time"] = pd.to_datetime(df["Time"])
|
|
265
|
+
topic_means = df.groupby("Topic")[["PerfCounter", "Elapsed"]].mean()
|
|
266
|
+
fig = px.bar(
|
|
267
|
+
topic_means,
|
|
268
|
+
y="Elapsed",
|
|
269
|
+
hover_data=["Elapsed"],
|
|
270
|
+
color="Elapsed",
|
|
271
|
+
labels={"Elapsed": "Processing time per chunk (ms)"},
|
|
272
|
+
height=400,
|
|
273
|
+
color_continuous_scale="Bluered",
|
|
274
|
+
)
|
|
275
|
+
# px.histogram(df, x="Topic", y="Elapsed", histfunc="avg")
|
|
276
|
+
fig.update_layout(height=400, showlegend=False, template="plotly_dark")
|
|
277
|
+
fig.layout.coloraxis.colorbar.title = None
|
|
278
|
+
proc_sum = topic_means["Elapsed"].sum()
|
|
279
|
+
return fig, f"Sum: {proc_sum:.2f} ms"
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
if __name__ == "__main__":
|
|
283
|
+
app.run(debug=True)
|
ezmsg/tools/proc.py
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import multiprocessing
|
|
3
|
+
import multiprocessing.connection
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
import ezmsg.core as ez
|
|
7
|
+
|
|
8
|
+
from .shmem.shmem import ShMemCircBuff, ShMemCircBuffSettings
|
|
9
|
+
|
|
10
|
+
BUF_DUR = 3.0
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class EzMonitorProcess(multiprocessing.Process):
|
|
14
|
+
def __init__(
|
|
15
|
+
self,
|
|
16
|
+
settings: ShMemCircBuffSettings,
|
|
17
|
+
topic: str,
|
|
18
|
+
address: typing.Optional[typing.Tuple[str, int]] = None,
|
|
19
|
+
) -> None:
|
|
20
|
+
super().__init__()
|
|
21
|
+
self._settings = settings
|
|
22
|
+
self._topic = topic
|
|
23
|
+
self._graph_address = address
|
|
24
|
+
|
|
25
|
+
def run(self) -> None:
|
|
26
|
+
comps = {"SHMEM": ShMemCircBuff(self._settings)}
|
|
27
|
+
conns = ((self._topic, comps["SHMEM"].INPUT_SIGNAL),)
|
|
28
|
+
ez.run(components=comps, connections=conns, graph_address=self._graph_address)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class EZProcManager:
|
|
32
|
+
"""
|
|
33
|
+
Manages the subprocess that runs an ezmsg pipeline comprising a single ShMemCircBuff unit connected to a pipeline.
|
|
34
|
+
The unit must be parameterized with the correct shared memory name.
|
|
35
|
+
We do not actually interact with the shared memory in this class. See .mirror.EzmsgShmMirror.
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
def __init__(self, graph_ip: str, graph_port: int, buf_dur: float = BUF_DUR) -> None:
|
|
39
|
+
self._graph_addr: typing.Tuple[str, int] = (graph_ip, graph_port)
|
|
40
|
+
self._buf_dur = buf_dur
|
|
41
|
+
self._proc = None
|
|
42
|
+
self._node_path: typing.Optional[str] = None
|
|
43
|
+
self._remote_conn, self._conn = multiprocessing.Pipe()
|
|
44
|
+
|
|
45
|
+
@property
|
|
46
|
+
def node_path(self) -> str:
|
|
47
|
+
return self._node_path
|
|
48
|
+
|
|
49
|
+
@property
|
|
50
|
+
def conn(self) -> typing.Optional[multiprocessing.connection.Connection]:
|
|
51
|
+
return self._conn
|
|
52
|
+
|
|
53
|
+
def reset(self, node_path: typing.Optional[str]) -> None:
|
|
54
|
+
self._cleanup_subprocess()
|
|
55
|
+
self._node_path = node_path
|
|
56
|
+
self._init_subprocess()
|
|
57
|
+
|
|
58
|
+
def cleanup(self):
|
|
59
|
+
self._cleanup_subprocess()
|
|
60
|
+
|
|
61
|
+
def _cleanup_subprocess(self) -> None:
|
|
62
|
+
if self._proc is not None:
|
|
63
|
+
self._conn.send("quit")
|
|
64
|
+
# Close process
|
|
65
|
+
self._proc.join()
|
|
66
|
+
self._proc = None
|
|
67
|
+
|
|
68
|
+
# TODO: Somehow closing the proc doesn't always clear the VISBUFF connections.
|
|
69
|
+
loop = asyncio.new_event_loop()
|
|
70
|
+
asyncio.set_event_loop(loop)
|
|
71
|
+
loop.run_until_complete(
|
|
72
|
+
ez.graphserver.GraphService(address=self._graph_addr).disconnect(
|
|
73
|
+
self._node_path, "VISBUFF/INPUT_SIGNAL"
|
|
74
|
+
)
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
def _init_subprocess(self, axis: str = "time"):
|
|
78
|
+
unit_settings = ShMemCircBuffSettings(
|
|
79
|
+
shmem_name="buff_" + self._node_path,
|
|
80
|
+
buf_dur=self._buf_dur,
|
|
81
|
+
conn=self._remote_conn,
|
|
82
|
+
axis=axis,
|
|
83
|
+
)
|
|
84
|
+
self._proc = EzMonitorProcess(unit_settings, self._node_path, address=self._graph_addr)
|
|
85
|
+
self._proc.start()
|
|
86
|
+
|
|
87
|
+
# if self._rend_conn.poll(): msg = self._rend_conn.recv()
|
|
File without changes
|