unaiverse 0.1.11__cp311-cp311-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of unaiverse might be problematic. Click here for more details.
- unaiverse/__init__.py +19 -0
- unaiverse/agent.py +2090 -0
- unaiverse/agent_basics.py +1948 -0
- unaiverse/clock.py +221 -0
- unaiverse/dataprops.py +1236 -0
- unaiverse/hsm.py +1892 -0
- unaiverse/modules/__init__.py +18 -0
- unaiverse/modules/cnu/__init__.py +17 -0
- unaiverse/modules/cnu/cnus.py +536 -0
- unaiverse/modules/cnu/layers.py +261 -0
- unaiverse/modules/cnu/psi.py +60 -0
- unaiverse/modules/hl/__init__.py +15 -0
- unaiverse/modules/hl/hl_utils.py +411 -0
- unaiverse/modules/networks.py +1509 -0
- unaiverse/modules/utils.py +710 -0
- unaiverse/networking/__init__.py +16 -0
- unaiverse/networking/node/__init__.py +18 -0
- unaiverse/networking/node/connpool.py +1308 -0
- unaiverse/networking/node/node.py +2499 -0
- unaiverse/networking/node/profile.py +446 -0
- unaiverse/networking/node/tokens.py +79 -0
- unaiverse/networking/p2p/__init__.py +187 -0
- unaiverse/networking/p2p/go.mod +127 -0
- unaiverse/networking/p2p/go.sum +548 -0
- unaiverse/networking/p2p/golibp2p.py +18 -0
- unaiverse/networking/p2p/golibp2p.pyi +135 -0
- unaiverse/networking/p2p/lib.go +2662 -0
- unaiverse/networking/p2p/lib.go.sha256 +1 -0
- unaiverse/networking/p2p/lib_types.py +312 -0
- unaiverse/networking/p2p/message_pb2.py +50 -0
- unaiverse/networking/p2p/messages.py +362 -0
- unaiverse/networking/p2p/mylogger.py +77 -0
- unaiverse/networking/p2p/p2p.py +871 -0
- unaiverse/networking/p2p/proto-go/message.pb.go +846 -0
- unaiverse/networking/p2p/unailib.cpython-311-darwin.so +0 -0
- unaiverse/stats.py +1481 -0
- unaiverse/streamlib/__init__.py +15 -0
- unaiverse/streamlib/streamlib.py +210 -0
- unaiverse/streams.py +776 -0
- unaiverse/utils/__init__.py +16 -0
- unaiverse/utils/lone_wolf.json +24 -0
- unaiverse/utils/misc.py +310 -0
- unaiverse/utils/sandbox.py +293 -0
- unaiverse/utils/server.py +435 -0
- unaiverse/world.py +335 -0
- unaiverse-0.1.11.dist-info/METADATA +367 -0
- unaiverse-0.1.11.dist-info/RECORD +50 -0
- unaiverse-0.1.11.dist-info/WHEEL +6 -0
- unaiverse-0.1.11.dist-info/licenses/LICENSE +43 -0
- unaiverse-0.1.11.dist-info/top_level.txt +1 -0
unaiverse/stats.py
ADDED
|
@@ -0,0 +1,1481 @@
|
|
|
1
|
+
"""
|
|
2
|
+
█████ █████ ██████ █████ █████ █████ █████ ██████████ ███████████ █████████ ██████████
|
|
3
|
+
░░███ ░░███ ░░██████ ░░███ ░░███ ░░███ ░░███ ░░███░░░░░█░░███░░░░░███ ███░░░░░███░░███░░░░░█
|
|
4
|
+
░███ ░███ ░███░███ ░███ ██████ ░███ ░███ ░███ ░███ █ ░ ░███ ░███ ░███ ░░░ ░███ █ ░
|
|
5
|
+
░███ ░███ ░███░░███░███ ░░░░░███ ░███ ░███ ░███ ░██████ ░██████████ ░░█████████ ░██████
|
|
6
|
+
░███ ░███ ░███ ░░██████ ███████ ░███ ░░███ ███ ░███░░█ ░███░░░░░███ ░░░░░░░░███ ░███░░█
|
|
7
|
+
░███ ░███ ░███ ░░█████ ███░░███ ░███ ░░░█████░ ░███ ░ █ ░███ ░███ ███ ░███ ░███ ░ █
|
|
8
|
+
░░████████ █████ ░░█████░░████████ █████ ░░███ ██████████ █████ █████░░█████████ ██████████
|
|
9
|
+
░░░░░░░░ ░░░░░ ░░░░░ ░░░░░░░░ ░░░░░ ░░░ ░░░░░░░░░░ ░░░░░ ░░░░░ ░░░░░░░░░ ░░░░░░░░░░
|
|
10
|
+
A Collectionless AI Project (https://collectionless.ai)
|
|
11
|
+
Registration/Login: https://unaiverse.io
|
|
12
|
+
Code Repositories: https://github.com/collectionlessai/
|
|
13
|
+
Main Developers: Stefano Melacci (Project Leader), Christian Di Maio, Tommaso Guidi
|
|
14
|
+
"""
|
|
15
|
+
import os
|
|
16
|
+
import json
|
|
17
|
+
import math
|
|
18
|
+
import zlib
|
|
19
|
+
import sqlite3
|
|
20
|
+
from datetime import timedelta
|
|
21
|
+
from sortedcontainers import SortedDict
|
|
22
|
+
from typing import Any, Set, List, Dict, Tuple, Optional, Union
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
# A fixed palette for consistent coloring
|
|
26
|
+
THEME = {
|
|
27
|
+
# Main structural colors (Dark Mode optimized)
|
|
28
|
+
'bg_paper': 'rgba(0,0,0,0)', # Transparent to blend with container
|
|
29
|
+
'bg_plot': 'rgba(0,0,0,0)', # Transparent plot area
|
|
30
|
+
'text_main': '#e0e0e0', # Primary text color
|
|
31
|
+
'text_light': '#a0a0a0', # Secondary/Axis text color
|
|
32
|
+
|
|
33
|
+
# UI Element specific
|
|
34
|
+
'grid': '#333333', # Grid lines
|
|
35
|
+
'edge': '#666666', # Graph edges
|
|
36
|
+
'node_border': '#ffffff', # Node borders
|
|
37
|
+
|
|
38
|
+
# Main Accents
|
|
39
|
+
'main': '#636EFA', # Primary accent (Blue)
|
|
40
|
+
'main_light': '#aab1ff', # Lighter shade of primary
|
|
41
|
+
|
|
42
|
+
# Table Styling
|
|
43
|
+
'table': {
|
|
44
|
+
'header_bg': '#2c2c2c',
|
|
45
|
+
'header_txt': '#ffffff',
|
|
46
|
+
'cell_bg': '#1a1a1a',
|
|
47
|
+
'cell_txt': '#dddddd',
|
|
48
|
+
'line': '#444444'
|
|
49
|
+
},
|
|
50
|
+
|
|
51
|
+
# Data Categorical Palette (Plotly default set)
|
|
52
|
+
'peers': [
|
|
53
|
+
'#636EFA', '#EF553B', '#00CC96', '#AB63FA', '#FFA15A',
|
|
54
|
+
'#19D3F3', '#FF6692', '#B6E880', '#FF97FF', '#FECB52'
|
|
55
|
+
]
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class UIPlot:
|
|
60
|
+
"""
|
|
61
|
+
A Python abstraction for a UI Panel (specifically a Plotly chart).
|
|
62
|
+
Allows users to build plots using Python methods instead of dicts/JSON.
|
|
63
|
+
"""
|
|
64
|
+
def __init__(self, title: str = '', height: int = 400):
|
|
65
|
+
self._data: List[Dict[str, Any]] = []
|
|
66
|
+
self._layout: Dict[str, Any] = {
|
|
67
|
+
'title': title,
|
|
68
|
+
'height': height,
|
|
69
|
+
'xaxis': {
|
|
70
|
+
'title': 'Time',
|
|
71
|
+
'gridcolor': THEME['grid'],
|
|
72
|
+
'color': THEME['text_light']
|
|
73
|
+
},
|
|
74
|
+
'yaxis': {
|
|
75
|
+
'title': 'Value',
|
|
76
|
+
'gridcolor': THEME['grid'],
|
|
77
|
+
'color': THEME['text_light']
|
|
78
|
+
},
|
|
79
|
+
'margin': {'l': 50, 'r': 50, 'b': 50, 't': 50},
|
|
80
|
+
# Default dark theme friendly styling
|
|
81
|
+
'paper_bgcolor': THEME['bg_paper'],
|
|
82
|
+
'plot_bgcolor': THEME['bg_plot'],
|
|
83
|
+
'font': {'color': THEME['text_main']}
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
def add_line(self, x: List[Any], y: List[Any], name: str, color: str = THEME['main'],
|
|
87
|
+
legend_group: str = None, show_legend: bool = True):
|
|
88
|
+
"""Adds a standard time-series line."""
|
|
89
|
+
trace = {
|
|
90
|
+
'x': x, 'y': y,
|
|
91
|
+
'name': name,
|
|
92
|
+
'type': 'scatter',
|
|
93
|
+
'mode': 'lines+markers',
|
|
94
|
+
'line': {'color': color},
|
|
95
|
+
"legendgroup": legend_group,
|
|
96
|
+
"showlegend": show_legend
|
|
97
|
+
}
|
|
98
|
+
self._data.append(trace)
|
|
99
|
+
|
|
100
|
+
def add_area(self, x: List[Any], y: List[Any], name: str, color: str = THEME['main']):
|
|
101
|
+
"""Adds a filled area chart."""
|
|
102
|
+
trace = {
|
|
103
|
+
'x': x, 'y': y, 'name': name,
|
|
104
|
+
'type': 'scatter', 'fill': 'tozeroy',
|
|
105
|
+
'line': {'color': color}
|
|
106
|
+
}
|
|
107
|
+
self._data.append(trace)
|
|
108
|
+
|
|
109
|
+
def add_indicator(self, value: Any, title: str):
|
|
110
|
+
"""Adds a big number indicator."""
|
|
111
|
+
self._data.append({
|
|
112
|
+
'type': 'indicator',
|
|
113
|
+
'mode': 'number',
|
|
114
|
+
'value': value,
|
|
115
|
+
'title': {'text': title}
|
|
116
|
+
})
|
|
117
|
+
self._layout['height'] = 300 # Indicators usually need less height
|
|
118
|
+
|
|
119
|
+
def add_table(self, headers: List[str]| None, columns: List[List[Any]]):
|
|
120
|
+
"""Adds a data table."""
|
|
121
|
+
num_columns = len(columns) if columns else 0
|
|
122
|
+
if headers:
|
|
123
|
+
header_cfg = {
|
|
124
|
+
'values': headers,
|
|
125
|
+
'fill': {'color': THEME['table']['header_bg']},
|
|
126
|
+
'font': {'color': THEME['table']['header_txt']},
|
|
127
|
+
'line': {'color': THEME['table']['line']}
|
|
128
|
+
}
|
|
129
|
+
else:
|
|
130
|
+
header_cfg = {
|
|
131
|
+
'values': [''] * num_columns,
|
|
132
|
+
'height': 0, # Hide it
|
|
133
|
+
'fill': {'color': 'rgba(0,0,0,0)'}, # Transparent just in case
|
|
134
|
+
'line': {'width': 0} # No border
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
trace = {
|
|
138
|
+
'type': 'table',
|
|
139
|
+
'header': header_cfg,
|
|
140
|
+
'cells': {
|
|
141
|
+
'values': columns,
|
|
142
|
+
'fill': {'color': THEME['table']['cell_bg']},
|
|
143
|
+
'font': {'color': THEME['table']['cell_txt']},
|
|
144
|
+
'line': {'color': THEME['table']['line']}
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
self._data.append(trace)
|
|
148
|
+
|
|
149
|
+
def add_bar(self, xs: List[Any], ys: List[Any], names: List[str], colors: Union[str, List[str]] = THEME['main']):
|
|
150
|
+
"""Adds a bar chart trace."""
|
|
151
|
+
trace = {
|
|
152
|
+
'type': 'bar',
|
|
153
|
+
'x': xs,
|
|
154
|
+
'y': ys,
|
|
155
|
+
'marker': {'color': colors},
|
|
156
|
+
'showlegend': False,
|
|
157
|
+
'text': names,
|
|
158
|
+
'textposition': 'auto'
|
|
159
|
+
}
|
|
160
|
+
self._data.append(trace)
|
|
161
|
+
self._layout['yaxis'].update({'title': 'Value'})
|
|
162
|
+
|
|
163
|
+
def add_trace(self, trace: Dict[str, Any]):
|
|
164
|
+
"""Generic method to add any raw Plotly trace."""
|
|
165
|
+
self._data.append(trace)
|
|
166
|
+
|
|
167
|
+
def set_y_range(self, min_val: float, max_val: float):
|
|
168
|
+
"""Force Y-axis limits."""
|
|
169
|
+
self._layout.setdefault('yaxis', {})['range'] = [min_val, max_val]
|
|
170
|
+
|
|
171
|
+
def set_layout_opt(self, key: str, value: Any):
|
|
172
|
+
"""Generic setter for advanced layout options."""
|
|
173
|
+
if isinstance(value, dict) and key in self._layout:
|
|
174
|
+
self._layout[key].update(value)
|
|
175
|
+
else:
|
|
176
|
+
self._layout[key] = value
|
|
177
|
+
|
|
178
|
+
def set_legend(self, orientation: str = 'v', x: float = 1.0, y: float = 1.0, xanchor: str = 'left', yanchor: str = 'top'):
|
|
179
|
+
"""
|
|
180
|
+
Configures the legend position and orientation.
|
|
181
|
+
orientation: 'v' (vertical) or 'h' (horizontal)
|
|
182
|
+
"""
|
|
183
|
+
self._layout['showlegend'] = True
|
|
184
|
+
self._layout['legend'] = {
|
|
185
|
+
'orientation': orientation,
|
|
186
|
+
'x': x,
|
|
187
|
+
'y': y,
|
|
188
|
+
'xanchor': xanchor,
|
|
189
|
+
'yanchor': yanchor,
|
|
190
|
+
'bgcolor': 'rgba(0,0,0,0)',
|
|
191
|
+
'bordercolor': THEME['edge'],
|
|
192
|
+
'borderwidth': 1
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
def to_json(self) -> str:
|
|
196
|
+
"""Serializes the panel to the format the Frontend expects."""
|
|
197
|
+
return json.dumps({'data': self._data, 'layout': self._layout})
|
|
198
|
+
|
|
199
|
+
class DefaultBaseDash:
|
|
200
|
+
"""
|
|
201
|
+
A generic 2x2 Grid Dashboard for the base Stats class.
|
|
202
|
+
Forces #111111 background to match the WStats styling.
|
|
203
|
+
"""
|
|
204
|
+
def __init__(self, title="Network Overview"):
|
|
205
|
+
self.traces = []
|
|
206
|
+
self.layout = {
|
|
207
|
+
"title": title,
|
|
208
|
+
"height": 800,
|
|
209
|
+
"template": "plotly_dark",
|
|
210
|
+
"paper_bgcolor": "#111111",
|
|
211
|
+
"grid": {"rows": 2, "columns": 2, "pattern": "independent"},
|
|
212
|
+
|
|
213
|
+
# --- ROW 1 ---
|
|
214
|
+
# Top Left (Graph)
|
|
215
|
+
"xaxis1": {"domain": [0, 0.48], "visible": False},
|
|
216
|
+
"yaxis1": {"domain": [0.55, 1], "visible": False},
|
|
217
|
+
# Top Right (Timeseries)
|
|
218
|
+
"xaxis2": {"domain": [0.52, 1]},
|
|
219
|
+
"yaxis2": {"domain": [0.55, 1]},
|
|
220
|
+
|
|
221
|
+
# --- ROW 2 ---
|
|
222
|
+
# Bot Left (Bar)
|
|
223
|
+
"xaxis3": {"domain": [0, 0.48]},
|
|
224
|
+
"yaxis3": {"domain": [0, 0.45]},
|
|
225
|
+
# Bot Right (Bar)
|
|
226
|
+
"xaxis4": {"domain": [0.52, 1]},
|
|
227
|
+
"yaxis4": {"domain": [0, 0.45]},
|
|
228
|
+
|
|
229
|
+
"showlegend": True,
|
|
230
|
+
"legend": {
|
|
231
|
+
"orientation": "h",
|
|
232
|
+
"y": 0.55,
|
|
233
|
+
"x": 0.55,
|
|
234
|
+
"xanchor": "left",
|
|
235
|
+
"yanchor": "top",
|
|
236
|
+
"bgcolor": "rgba(0,0,0,0)",
|
|
237
|
+
"font": {"color": "#e0e0e0"}
|
|
238
|
+
},
|
|
239
|
+
"margin": {"l": 50, "r": 50, "t": 80, "b": 50}
|
|
240
|
+
}
|
|
241
|
+
self._map = {
|
|
242
|
+
"top_left": ("xaxis1", "yaxis1"),
|
|
243
|
+
"top_right": ("xaxis2", "yaxis2"),
|
|
244
|
+
"bot_left": ("xaxis3", "yaxis3"),
|
|
245
|
+
"bot_right": ("xaxis4", "yaxis4")
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
def add_panel(self, ui_plot: UIPlot, position: str):
|
|
249
|
+
if position not in self._map:
|
|
250
|
+
return
|
|
251
|
+
|
|
252
|
+
xa, ya = self._map[position]
|
|
253
|
+
x_dom = self.layout[xa]["domain"]
|
|
254
|
+
y_dom = self.layout[ya]["domain"]
|
|
255
|
+
|
|
256
|
+
# Merge Traces
|
|
257
|
+
for t in ui_plot._data:
|
|
258
|
+
nt = t.copy()
|
|
259
|
+
if nt.get("type") == "table":
|
|
260
|
+
nt["domain"] = {"x": x_dom, "y": y_dom}
|
|
261
|
+
else:
|
|
262
|
+
# Cartesian plots use axis references
|
|
263
|
+
nt["xaxis"] = xa.replace("xaxis", "x")
|
|
264
|
+
nt["yaxis"] = ya.replace("yaxis", "y")
|
|
265
|
+
self.traces.append(nt)
|
|
266
|
+
|
|
267
|
+
# Merge Layout
|
|
268
|
+
src_l = ui_plot._layout
|
|
269
|
+
dest_x = self.layout.setdefault(xa, {})
|
|
270
|
+
dest_y = self.layout.setdefault(ya, {})
|
|
271
|
+
if "xaxis" in src_l: dest_x.update({k:v for k,v in src_l["xaxis"].items() if k != "domain"})
|
|
272
|
+
if "yaxis" in src_l: dest_y.update({k:v for k,v in src_l["yaxis"].items() if k != "domain"})
|
|
273
|
+
|
|
274
|
+
# Add Title via Annotation
|
|
275
|
+
if src_l.get("title"):
|
|
276
|
+
self.layout.setdefault("annotations", []).append({
|
|
277
|
+
"text": src_l["title"],
|
|
278
|
+
"x": (x_dom[0] + x_dom[1]) / 2,
|
|
279
|
+
"y": y_dom[1] + 0.02,
|
|
280
|
+
"xref": "paper", "yref": "paper",
|
|
281
|
+
"showarrow": False, "xanchor": "center",
|
|
282
|
+
"font": {"size": 14, "color": "#aaaaaa"}
|
|
283
|
+
})
|
|
284
|
+
|
|
285
|
+
def to_json(self):
|
|
286
|
+
return json.dumps({"data": self.traces, "layout": self.layout})
|
|
287
|
+
|
|
288
|
+
|
|
289
|
+
class Stats:
|
|
290
|
+
"""
|
|
291
|
+
Encapsulates all logic for managing, storing, and persisting agent/world
|
|
292
|
+
statistics. This class provides a clean API to the rest of the application
|
|
293
|
+
and hides the implementation details of data structures and persistence.
|
|
294
|
+
|
|
295
|
+
Design Principles:
|
|
296
|
+
1. Typed Schema: Class-level definitions (e.g., CORE_..._SCHEMA) are
|
|
297
|
+
sets of tuples: {("stat_name", type), ...}
|
|
298
|
+
2. Unified API: All stat updates are handled by two methods:
|
|
299
|
+
- store_static(stat_name, value, peer_id)
|
|
300
|
+
- store_dynamic(stat_name, value, peer_id, timestamp)
|
|
301
|
+
3. Smart Branching: The store_... methods internally branch
|
|
302
|
+
(if self._is_world: ...) to handle their specific roles:
|
|
303
|
+
- Agent: Buffers for network, de-duplicates statics.
|
|
304
|
+
- World: Updates hot cache, buffers for DB.
|
|
305
|
+
4. Persistence (SQLite):
|
|
306
|
+
- A single SQLite DB file ('world_stats.db') stores all data.
|
|
307
|
+
- Static Stats: Saved in a 'static_stats' table (key-value).
|
|
308
|
+
- Dynamic Stats: Saved in a 'dynamic_stats' table (time-series).
|
|
309
|
+
5. Hot Cache (_stats):
|
|
310
|
+
- Static Stats: Stored as their latest value.
|
|
311
|
+
- Dynamic Stats: Stored in a sortedcontainers.SortedDict
|
|
312
|
+
keyed by timestamp.
|
|
313
|
+
"""
|
|
314
|
+
DEBUG = True # Turns on/off extra logging
|
|
315
|
+
|
|
316
|
+
# These are all the keys in the local _stats dictionary collected by the world
|
|
317
|
+
CORE_WORLD_STATS_STATIC_SCHEMA: Dict[str, Tuple[type, Any]] = {
|
|
318
|
+
'graph': (dict, {'nodes': {}, 'edges': {}})
|
|
319
|
+
}
|
|
320
|
+
CORE_WORLD_STATS_DYNAMIC_SCHEMA: Dict[str, Tuple[type, Any]] = {
|
|
321
|
+
'world_masters': (int, 0),
|
|
322
|
+
'world_agents': (int, 0),
|
|
323
|
+
'human_agents': (int, 0),
|
|
324
|
+
'artificial_agents': (int, 0)
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
# These are all the keys in the local _stats dictionary collected by the agent
|
|
328
|
+
CORE_AGENT_STATS_STATIC_SCHEMA: Dict[str, Tuple[type, Any]] = {
|
|
329
|
+
'connected_peers': (list, []),
|
|
330
|
+
'state': (str, None),
|
|
331
|
+
'action': (str, None),
|
|
332
|
+
'last_action': (str, None)
|
|
333
|
+
}
|
|
334
|
+
CORE_AGENT_STATS_DYNAMIC_SCHEMA: Dict[str, Tuple[type, Any]] = {}
|
|
335
|
+
|
|
336
|
+
# Then we have the stats collected on behalf of other peers (by the agent or the world)
|
|
337
|
+
CORE_OUTER_STATS_STATIC_SCHEMA: Dict[str, Tuple[type, Any]] = {}
|
|
338
|
+
CORE_OUTER_STATS_DYNAMIC_SCHEMA: Dict[str, Tuple[type, Any]] = {}
|
|
339
|
+
|
|
340
|
+
# We also add class variables to extend these sets
|
|
341
|
+
CUSTOM_WORLD_STATS_STATIC_SCHEMA: Dict[str, Tuple[type, Any]] = {}
|
|
342
|
+
CUSTOM_WORLD_STATS_DYNAMIC_SCHEMA: Dict[str, Tuple[type, Any]] = {}
|
|
343
|
+
CUSTOM_AGENT_STATS_STATIC_SCHEMA: Dict[str, Tuple[type, Any]] = {}
|
|
344
|
+
CUSTOM_AGENT_STATS_DYNAMIC_SCHEMA: Dict[str, Tuple[type, Any]] = {}
|
|
345
|
+
CUSTOM_OUTER_STATS_STATIC_SCHEMA: Dict[str, Tuple[type, Any]] = {}
|
|
346
|
+
CUSTOM_OUTER_STATS_DYNAMIC_SCHEMA: Dict[str, Tuple[type, Any]] = {}
|
|
347
|
+
|
|
348
|
+
# Key for grouping stats in the _stats dictionary (both world and agent)
|
|
349
|
+
GROUP_KEY = 'peer_stats' # _BY_PEER stats are grouped under this key
|
|
350
|
+
|
|
351
|
+
def __init__(self, is_world: bool,
|
|
352
|
+
db_path: str | None = None, # only needed by the world
|
|
353
|
+
cache_window_hours: float = 2.0): # only needed by the world
|
|
354
|
+
|
|
355
|
+
self._is_world: bool = is_world
|
|
356
|
+
self._max_seen_timestamp: int = 0
|
|
357
|
+
|
|
358
|
+
# --- Integrate custom statistics ---
|
|
359
|
+
self.WORLD_STATS_STATIC_SCHEMA = self.CORE_WORLD_STATS_STATIC_SCHEMA | self.CUSTOM_WORLD_STATS_STATIC_SCHEMA
|
|
360
|
+
self.WORLD_STATS_DYNAMIC_SCHEMA = self.CORE_WORLD_STATS_DYNAMIC_SCHEMA | self.CUSTOM_WORLD_STATS_DYNAMIC_SCHEMA
|
|
361
|
+
self.AGENT_STATS_STATIC_SCHEMA = self.CORE_AGENT_STATS_STATIC_SCHEMA | self.CUSTOM_AGENT_STATS_STATIC_SCHEMA
|
|
362
|
+
self.AGENT_STATS_DYNAMIC_SCHEMA = self.CORE_AGENT_STATS_DYNAMIC_SCHEMA | self.CUSTOM_AGENT_STATS_DYNAMIC_SCHEMA
|
|
363
|
+
self.OUTER_STATS_STATIC_SCHEMA = self.CORE_OUTER_STATS_STATIC_SCHEMA | self.CUSTOM_OUTER_STATS_STATIC_SCHEMA
|
|
364
|
+
self.OUTER_STATS_DYNAMIC_SCHEMA = self.CORE_OUTER_STATS_DYNAMIC_SCHEMA | self.CUSTOM_OUTER_STATS_DYNAMIC_SCHEMA
|
|
365
|
+
|
|
366
|
+
# --- Master key sets for easier lookup ---
|
|
367
|
+
self._all_static_keys: Set[str] = set()
|
|
368
|
+
self._all_dynamic_keys: Set[str] = set()
|
|
369
|
+
self._all_keys: Set[str] = set()
|
|
370
|
+
self._world_grouped_keys: Set[str] = set()
|
|
371
|
+
self._world_ungrouped_keys: Set[str] = set()
|
|
372
|
+
self._agent_grouped_keys: Set[str] = set()
|
|
373
|
+
self._agent_ungrouped_keys: Set[str] = set()
|
|
374
|
+
self._stat_types: Dict[str, str] = {}
|
|
375
|
+
self._initialize_key_sets()
|
|
376
|
+
|
|
377
|
+
if self._is_world:
|
|
378
|
+
# --- World Configuration ---
|
|
379
|
+
self._stats: Dict[str, Any] = {self.GROUP_KEY: {}}
|
|
380
|
+
self._min_window_duration = timedelta(hours=cache_window_hours)
|
|
381
|
+
self._db_path = db_path
|
|
382
|
+
self._db_conn: Optional[sqlite3.Connection] = None
|
|
383
|
+
self._static_db_buffer: List[Tuple[str, str]] = []
|
|
384
|
+
self._dynamic_db_buffer: List[Tuple[float, str, str, str]] = []
|
|
385
|
+
|
|
386
|
+
# --- World Initialization ---
|
|
387
|
+
self._init_db() # Connect and create tables
|
|
388
|
+
self._initialize_cache_structure() # Ensures all keys exist
|
|
389
|
+
self._load_existing_stats() # Hydrates _stats from disk
|
|
390
|
+
else:
|
|
391
|
+
# --- Agent Initialization (Simple Buffer) ---
|
|
392
|
+
self._world_view: Dict[str, Any] = {}
|
|
393
|
+
self._min_window_duration = timedelta(hours=3.0) # cache for the _world_view
|
|
394
|
+
self._update_batch: List[Dict[str, Any]] = []
|
|
395
|
+
|
|
396
|
+
def _out(self, msg: str):
|
|
397
|
+
"""Prints a message using the node's out function."""
|
|
398
|
+
print(msg)
|
|
399
|
+
|
|
400
|
+
def _err(self, msg: str):
|
|
401
|
+
"""Prints an error message."""
|
|
402
|
+
self._out('<ERROR> [Stats] ' + msg)
|
|
403
|
+
|
|
404
|
+
def _deb(self, msg: str):
|
|
405
|
+
"""Prints a debug message if enabled."""
|
|
406
|
+
if self.DEBUG:
|
|
407
|
+
prefix = '[DEBUG ' + ('WORLD' if self._is_world else 'AGENT') + ']'
|
|
408
|
+
self._out(f'{prefix} [Stats] {msg}')
|
|
409
|
+
|
|
410
|
+
def _initialize_key_sets(self):
|
|
411
|
+
"""Populates the master key sets and the type for later use."""
|
|
412
|
+
# This map holds: {'graph': <class 'dict'>, 'state': <class 'str'>}
|
|
413
|
+
self._stat_types: Dict[str, type] = {}
|
|
414
|
+
self._stat_defaults: Dict[str, Any] = {}
|
|
415
|
+
|
|
416
|
+
# 1. Combine all schema definitions
|
|
417
|
+
all_static_schemas = {
|
|
418
|
+
**self.WORLD_STATS_STATIC_SCHEMA,
|
|
419
|
+
**self.AGENT_STATS_STATIC_SCHEMA,
|
|
420
|
+
**self.OUTER_STATS_STATIC_SCHEMA
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
all_dynamic_schemas = {
|
|
424
|
+
**self.WORLD_STATS_DYNAMIC_SCHEMA,
|
|
425
|
+
**self.AGENT_STATS_DYNAMIC_SCHEMA,
|
|
426
|
+
**self.OUTER_STATS_DYNAMIC_SCHEMA
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
# 2. Build the key sets AND the type map
|
|
430
|
+
self._all_static_keys = set()
|
|
431
|
+
for name, (type_obj, default) in all_static_schemas.items():
|
|
432
|
+
self._all_static_keys.add(name)
|
|
433
|
+
self._stat_types[name] = type_obj
|
|
434
|
+
self._stat_defaults[name] = default
|
|
435
|
+
|
|
436
|
+
self._all_dynamic_keys = set()
|
|
437
|
+
for name, (type_obj, default) in all_dynamic_schemas.items():
|
|
438
|
+
self._all_dynamic_keys.add(name)
|
|
439
|
+
self._stat_types[name] = type_obj
|
|
440
|
+
self._stat_defaults[name] = default
|
|
441
|
+
|
|
442
|
+
self._all_keys = self._all_static_keys | self._all_dynamic_keys
|
|
443
|
+
# World perspective
|
|
444
|
+
self._world_ungrouped_keys = {name for name in self.WORLD_STATS_STATIC_SCHEMA | self.WORLD_STATS_DYNAMIC_SCHEMA}
|
|
445
|
+
self._world_grouped_keys = {name for name in (self.AGENT_STATS_STATIC_SCHEMA | self.AGENT_STATS_DYNAMIC_SCHEMA |
|
|
446
|
+
self.OUTER_STATS_STATIC_SCHEMA | self.OUTER_STATS_DYNAMIC_SCHEMA)}
|
|
447
|
+
self._agent_ungrouped_keys = {name for name in self.AGENT_STATS_STATIC_SCHEMA | self.AGENT_STATS_DYNAMIC_SCHEMA}
|
|
448
|
+
self._agent_grouped_keys = {name for name in self.OUTER_STATS_STATIC_SCHEMA | self.OUTER_STATS_DYNAMIC_SCHEMA}
|
|
449
|
+
|
|
450
|
+
def _init_db(self):
|
|
451
|
+
"""(World-only) Connects to SQLite and creates tables if they don't exist."""
|
|
452
|
+
if not self._is_world:
|
|
453
|
+
return
|
|
454
|
+
|
|
455
|
+
try:
|
|
456
|
+
db_dir = os.path.dirname(self._db_path)
|
|
457
|
+
if db_dir:
|
|
458
|
+
os.makedirs(db_dir, exist_ok=True)
|
|
459
|
+
|
|
460
|
+
self._db_conn = sqlite3.connect(self._db_path)
|
|
461
|
+
self._db_conn.execute('PRAGMA journal_mode=WAL;')
|
|
462
|
+
self._db_conn.execute('PRAGMA synchronous=NORMAL;')
|
|
463
|
+
|
|
464
|
+
self._db_conn.executescript("""
|
|
465
|
+
CREATE TABLE IF NOT EXISTS dynamic_stats (
|
|
466
|
+
timestamp INTEGER,
|
|
467
|
+
peer_id TEXT,
|
|
468
|
+
stat_name TEXT,
|
|
469
|
+
val_num REAL,
|
|
470
|
+
val_str TEXT,
|
|
471
|
+
val_json TEXT,
|
|
472
|
+
PRIMARY KEY (peer_id, stat_name, timestamp)
|
|
473
|
+
);
|
|
474
|
+
CREATE INDEX IF NOT EXISTS idx_stats_num ON dynamic_stats (stat_name, val_num);
|
|
475
|
+
CREATE INDEX IF NOT EXISTS idx_stats_str ON dynamic_stats (stat_name, val_str);
|
|
476
|
+
CREATE INDEX IF NOT EXISTS idx_stats_time ON dynamic_stats (timestamp);
|
|
477
|
+
|
|
478
|
+
CREATE TABLE IF NOT EXISTS static_stats (
|
|
479
|
+
peer_id TEXT,
|
|
480
|
+
stat_name TEXT,
|
|
481
|
+
val_json TEXT,
|
|
482
|
+
timestamp INTEGER,
|
|
483
|
+
PRIMARY KEY (peer_id, stat_name)
|
|
484
|
+
);
|
|
485
|
+
""")
|
|
486
|
+
self._db_conn.commit()
|
|
487
|
+
self._deb(f'SQLite DB initialized at {self._db_path}')
|
|
488
|
+
except Exception as e:
|
|
489
|
+
self._err(f'CRITICAL: Failed to initialize SQLite DB: {e}')
|
|
490
|
+
self._db_conn = None
|
|
491
|
+
|
|
492
|
+
def _initialize_cache_structure(self):
|
|
493
|
+
"""(World-only) Ensures the _stats dict has the correct structure (SortedDicts/dicts)."""
|
|
494
|
+
if not self._is_world:
|
|
495
|
+
return
|
|
496
|
+
|
|
497
|
+
self._stats.setdefault(self.GROUP_KEY, {})
|
|
498
|
+
for key in self._world_ungrouped_keys:
|
|
499
|
+
if key in self._all_dynamic_keys:
|
|
500
|
+
self._stats.setdefault(key, SortedDict())
|
|
501
|
+
else:
|
|
502
|
+
self._stats.setdefault(key, self._stat_defaults[key]) # e.g., 'graph'
|
|
503
|
+
|
|
504
|
+
# Grouped keys are initialized on-demand by _get_peer_stat_cache
|
|
505
|
+
# But we must ensure existing loaded peers have their structures
|
|
506
|
+
for _, peer_data in self._stats[self.GROUP_KEY].items():
|
|
507
|
+
for key in self._world_grouped_keys:
|
|
508
|
+
if key in self._all_dynamic_keys:
|
|
509
|
+
# If loaded from DB, it's not a SortedDict yet.
|
|
510
|
+
# It will be populated by _hydrate_dynamic_caches_from_db
|
|
511
|
+
peer_data.setdefault(key, SortedDict())
|
|
512
|
+
|
|
513
|
+
def _get_peer_stat_cache(self, peer_id: str, stat_name: str) -> Union[SortedDict, dict, None]:
|
|
514
|
+
"""(World-only) Helper to get or create the cache structure for a peer stat on demand."""
|
|
515
|
+
if not self._is_world:
|
|
516
|
+
return
|
|
517
|
+
|
|
518
|
+
peer_cache = self._stats[self.GROUP_KEY].setdefault(peer_id, {})
|
|
519
|
+
if stat_name not in peer_cache:
|
|
520
|
+
if stat_name in self._all_dynamic_keys:
|
|
521
|
+
peer_cache[stat_name] = SortedDict()
|
|
522
|
+
elif stat_name in self._all_static_keys:
|
|
523
|
+
peer_cache[stat_name] = self._stat_defaults[stat_name]
|
|
524
|
+
|
|
525
|
+
return peer_cache.get(stat_name)
|
|
526
|
+
|
|
527
|
+
# --- SHARED API ---
|
|
528
|
+
def store_stat(self, stat_name: str, value: Any, peer_id: str, timestamp: int):
|
|
529
|
+
"""Unified API to store a stat. It then calls private methods to
|
|
530
|
+
differentiate between static and dynaimc stats.
|
|
531
|
+
"""
|
|
532
|
+
if stat_name not in self._all_keys:
|
|
533
|
+
self._err(f'Stat "{stat_name}" is not defined.')
|
|
534
|
+
|
|
535
|
+
# disambiguate between static and dynamic stats
|
|
536
|
+
if stat_name in self._all_static_keys:
|
|
537
|
+
self._store_static(stat_name, value, peer_id, timestamp)
|
|
538
|
+
else:
|
|
539
|
+
self._store_dynamic(stat_name, value, peer_id, timestamp)
|
|
540
|
+
|
|
541
|
+
def _validate_type(self, stat_name, value):
|
|
542
|
+
if stat_name not in self._stat_types:
|
|
543
|
+
raise KeyError(f'Statistic "{stat_name}" is not defined in the _stat_types schema.')
|
|
544
|
+
|
|
545
|
+
schema_type = self._stat_types.get(stat_name) # no default to str because it's a silent fail
|
|
546
|
+
if isinstance(value, schema_type):
|
|
547
|
+
return value
|
|
548
|
+
else:
|
|
549
|
+
try:
|
|
550
|
+
# Try to safely cast it
|
|
551
|
+
return schema_type(value)
|
|
552
|
+
except (ValueError, TypeError, AttributeError):
|
|
553
|
+
self._err(f'Type mismatch for {stat_name}: '
|
|
554
|
+
f'Expected {schema_type} but got {type(value)}. '
|
|
555
|
+
f'Value: "{value}". Storing as string.')
|
|
556
|
+
return str(value) # Fallback
|
|
557
|
+
|
|
558
|
+
def _make_json_serializable(self, value: Any) -> Any:
|
|
559
|
+
"""Recursively converts non-serializable types (like sets) to lists."""
|
|
560
|
+
if isinstance(value, set):
|
|
561
|
+
return list(value)
|
|
562
|
+
if isinstance(value, dict):
|
|
563
|
+
# Recurse on values
|
|
564
|
+
return {k: self._make_json_serializable(v) for k, v in value.items()}
|
|
565
|
+
if isinstance(value, (list, tuple)):
|
|
566
|
+
# Recurse on items
|
|
567
|
+
return [self._make_json_serializable(item) for item in value]
|
|
568
|
+
|
|
569
|
+
# Add other types here if needed (e.g., numpy arrays -> lists)
|
|
570
|
+
|
|
571
|
+
# Base case: value is fine as-is
|
|
572
|
+
return value
|
|
573
|
+
|
|
574
|
+
def _store_static(self, stat_name: str, value: Any, peer_id: str, timestamp: int):
|
|
575
|
+
"""
|
|
576
|
+
Unified API to store a static (single-value) stat.
|
|
577
|
+
- On Agent: Adds to the network send buffer.
|
|
578
|
+
- On World: Updates the hot cache and adds to the DB buffer.
|
|
579
|
+
"""
|
|
580
|
+
value = self._validate_type(stat_name, value)
|
|
581
|
+
if self._is_world:
|
|
582
|
+
# --- WORLD LOGIC ---
|
|
583
|
+
if timestamp > self._max_seen_timestamp:
|
|
584
|
+
self._max_seen_timestamp = timestamp
|
|
585
|
+
# 1. Update hot cache
|
|
586
|
+
if stat_name in self._world_ungrouped_keys:
|
|
587
|
+
self._stats[stat_name] = value
|
|
588
|
+
else:
|
|
589
|
+
peer_cache = self._stats[self.GROUP_KEY].setdefault(peer_id, {})
|
|
590
|
+
peer_cache[stat_name] = value
|
|
591
|
+
|
|
592
|
+
# 2. Add to DB buffer (key, value_json)
|
|
593
|
+
serializable_value = self._make_json_serializable(value)
|
|
594
|
+
self._static_db_buffer.append((peer_id, stat_name, json.dumps(serializable_value), timestamp))
|
|
595
|
+
else:
|
|
596
|
+
# --- AGENT LOGIC ---
|
|
597
|
+
# De-duplicate logic: remove previous static value for this peer/stat
|
|
598
|
+
self._update_batch = [u for u in self._update_batch
|
|
599
|
+
if not (u['peer_id'] == peer_id and u['stat_name'] == stat_name)]
|
|
600
|
+
|
|
601
|
+
# 2. Add to batch
|
|
602
|
+
self._update_batch.append({
|
|
603
|
+
'peer_id': peer_id,
|
|
604
|
+
'stat_name': stat_name,
|
|
605
|
+
'timestamp': timestamp,
|
|
606
|
+
'value': value
|
|
607
|
+
})
|
|
608
|
+
|
|
609
|
+
def _store_dynamic(self, stat_name: str, value: Any, peer_id: str, timestamp: float):
|
|
610
|
+
"""
|
|
611
|
+
Unified API to store a dynamic (time-series) stat.
|
|
612
|
+
- On Agent: Gets current time, adds to network send buffer.
|
|
613
|
+
- On World: Uses provided timestamp, updates hot cache, adds to DB buffer.
|
|
614
|
+
"""
|
|
615
|
+
value = self._validate_type(stat_name, value)
|
|
616
|
+
if self._is_world:
|
|
617
|
+
# --- WORLD LOGIC ---
|
|
618
|
+
if timestamp > self._max_seen_timestamp:
|
|
619
|
+
self._max_seen_timestamp = timestamp
|
|
620
|
+
cache = None
|
|
621
|
+
# 1. Update hot cache
|
|
622
|
+
if stat_name in self._world_ungrouped_keys:
|
|
623
|
+
cache = self._stats.get(stat_name)
|
|
624
|
+
else:
|
|
625
|
+
cache = self._get_peer_stat_cache(peer_id, stat_name)
|
|
626
|
+
|
|
627
|
+
# Verify we have a valid SortedDict to work with
|
|
628
|
+
if isinstance(cache, SortedDict):
|
|
629
|
+
# Insert new value and prune outdated ones
|
|
630
|
+
cache[timestamp] = value
|
|
631
|
+
cutoff = timestamp - int(self._min_window_duration.total_seconds() * 1000)
|
|
632
|
+
while cache and cache.peekitem(0)[0] < cutoff:
|
|
633
|
+
cache.popitem(0)
|
|
634
|
+
|
|
635
|
+
# 2. Add to DB buffer depending on the type (value was already casted to the type defined in the schema)
|
|
636
|
+
val_num = value if isinstance(value, (int, float)) and not isinstance(value, bool) else None
|
|
637
|
+
val_str = value if isinstance(value, str) else None
|
|
638
|
+
# always create the json-serialized as fallback
|
|
639
|
+
serializable_value = self._make_json_serializable(value)
|
|
640
|
+
val_json = json.dumps(serializable_value)
|
|
641
|
+
self._dynamic_db_buffer.append((timestamp, peer_id, stat_name, val_num, val_str, val_json))
|
|
642
|
+
else:
|
|
643
|
+
# --- AGENT LOGIC ---
|
|
644
|
+
self._update_batch.append({
|
|
645
|
+
'peer_id': peer_id,
|
|
646
|
+
'stat_name': stat_name,
|
|
647
|
+
'timestamp': timestamp,
|
|
648
|
+
'value': value
|
|
649
|
+
})
|
|
650
|
+
|
|
651
|
+
# --- AGENT API ---
|
|
652
|
+
def update_view(self, view_data: Dict[str, Any], overwrite: bool = False):
|
|
653
|
+
"""
|
|
654
|
+
(Agent-side) Replaces the local view with data received from World.
|
|
655
|
+
This is 'dumb' storage: we don't parse it, we just store it for plotting.
|
|
656
|
+
|
|
657
|
+
The view has this structure:
|
|
658
|
+
{
|
|
659
|
+
"world": { "stat_name": value_or_timeseries },
|
|
660
|
+
"peers": { "peer_id": { "stat_name": value_or_timeseries } }
|
|
661
|
+
}
|
|
662
|
+
For Dynamic stats, returns a list of lists: [[timestamp, value], ...] for efficient JSON/Plotly usage.
|
|
663
|
+
|
|
664
|
+
Args:
|
|
665
|
+
view_data: The snapshot received from the world.
|
|
666
|
+
overwrite: If True, replaces the entire current view instead of merging.
|
|
667
|
+
"""
|
|
668
|
+
if self._is_world:
|
|
669
|
+
return
|
|
670
|
+
|
|
671
|
+
# Initialize empty structure if needed
|
|
672
|
+
if not self._world_view or overwrite:
|
|
673
|
+
self._world_view = {'world': {}, 'peers': {}}
|
|
674
|
+
|
|
675
|
+
def _update_max_ts(ts):
|
|
676
|
+
"""Helper to update the max seen timestamp from a time-series."""
|
|
677
|
+
# Dynamic stats come as [[ts, val], [ts, val]...]
|
|
678
|
+
if isinstance(ts, list) and len(ts) > 0 and isinstance(ts[0], list):
|
|
679
|
+
# The last item is usually the newest in sorted time-series
|
|
680
|
+
last_ts = ts[-1][0]
|
|
681
|
+
if last_ts > self._max_seen_timestamp:
|
|
682
|
+
self._max_seen_timestamp = int(last_ts)
|
|
683
|
+
|
|
684
|
+
def _merge_dict(target: Dict, source: Dict):
|
|
685
|
+
"""
|
|
686
|
+
Helper to merge source into target with special handling for dynamic stats.
|
|
687
|
+
Copies a source dict { "stat_name": value_or_timeseries } into target.
|
|
688
|
+
"""
|
|
689
|
+
for stat_name, val_or_ts in source.items():
|
|
690
|
+
if stat_name in self._all_dynamic_keys:
|
|
691
|
+
_update_max_ts(val_or_ts)
|
|
692
|
+
if stat_name not in target:
|
|
693
|
+
target[stat_name] = []
|
|
694
|
+
target[stat_name].extend(val_or_ts)
|
|
695
|
+
else:
|
|
696
|
+
target[stat_name] = val_or_ts
|
|
697
|
+
|
|
698
|
+
# 1. Merge World (Ungrouped) Stats
|
|
699
|
+
if 'world' in view_data:
|
|
700
|
+
_merge_dict(self._world_view.setdefault('world', {}), view_data['world'])
|
|
701
|
+
|
|
702
|
+
# 2. Merge Peer (Grouped) Stats
|
|
703
|
+
if 'peers' in view_data:
|
|
704
|
+
target_peers = self._world_view.setdefault('peers', {})
|
|
705
|
+
for peer_id, peer_data in view_data['peers'].items():
|
|
706
|
+
target_peer = target_peers.setdefault(peer_id, {})
|
|
707
|
+
_merge_dict(target_peer, peer_data)
|
|
708
|
+
|
|
709
|
+
def _get_last_val_from_view(self, view: Dict, name: str) -> str:
|
|
710
|
+
"""Helper to extract a scalar value safely from the view snapshot.
|
|
711
|
+
View structure:
|
|
712
|
+
{
|
|
713
|
+
"world": { "stat_name": value_or_timeseries },
|
|
714
|
+
"peers": { "peer_id": { "stat_name": value_or_timeseries } }
|
|
715
|
+
}
|
|
716
|
+
For Dynamic stats we have a list of lists: [[timestamp, value], ...]"""
|
|
717
|
+
val = None
|
|
718
|
+
# Try World (Ungrouped)
|
|
719
|
+
if name in view.get('world', {}):
|
|
720
|
+
data = view['world'][name]
|
|
721
|
+
# If dynamic (list of lists), get last value. If static, get value.
|
|
722
|
+
if isinstance(data, list) and len(data) > 0 and isinstance(data[0], list):
|
|
723
|
+
val = data[-1][1]
|
|
724
|
+
else:
|
|
725
|
+
val = data
|
|
726
|
+
|
|
727
|
+
if isinstance(val, float):
|
|
728
|
+
return f"{val:.3f}"
|
|
729
|
+
return str(val) if val is not None else "-"
|
|
730
|
+
|
|
731
|
+
def get_payload_for_world(self) -> List[Dict[str, Any]]:
|
|
732
|
+
"""(Agent-only) Gathers, returns, and clears all stats to be sent to the world."""
|
|
733
|
+
if self._is_world:
|
|
734
|
+
return []
|
|
735
|
+
|
|
736
|
+
# self._update_agent_static() # Ensure static stats are fresh in the batch
|
|
737
|
+
payload = self._update_batch
|
|
738
|
+
self._update_batch = [] # Clear after getting
|
|
739
|
+
return payload
|
|
740
|
+
|
|
741
|
+
# --- WORLD API ---
|
|
742
|
+
def get_view(self, since_timestamp: int = 0) -> Dict[str, Any]:
|
|
743
|
+
"""
|
|
744
|
+
(World-side) Returns a clean, JSON-serializable dictionary of the CURRENT in-memory cache.
|
|
745
|
+
Used for initial handshake or lightweight polling.
|
|
746
|
+
|
|
747
|
+
Structure returned:
|
|
748
|
+
{
|
|
749
|
+
"world": { "stat_name": value_or_timeseries },
|
|
750
|
+
"peers": { "peer_id": { "stat_name": value_or_timeseries } }
|
|
751
|
+
}
|
|
752
|
+
For Dynamic stats, returns a list of lists: [[timestamp, value], ...] for efficient JSON/Plotly usage.
|
|
753
|
+
"""
|
|
754
|
+
if not self._is_world:
|
|
755
|
+
return {}
|
|
756
|
+
snapshot = {'world': {}, 'peers': {}}
|
|
757
|
+
|
|
758
|
+
# 1. Process World (Ungrouped) Stats
|
|
759
|
+
for stat_name in self._world_ungrouped_keys:
|
|
760
|
+
val = self._stats.get(stat_name)
|
|
761
|
+
if val is not None:
|
|
762
|
+
snapshot['world'][stat_name] = self._serialize_value(val, since_timestamp)
|
|
763
|
+
|
|
764
|
+
# 2. Process Peer (Grouped) Stats
|
|
765
|
+
peer_groups = self._stats.get(self.GROUP_KEY, {})
|
|
766
|
+
|
|
767
|
+
for pid in peer_groups.keys():
|
|
768
|
+
peer_data = {}
|
|
769
|
+
for stat_name, val in peer_groups[pid].items():
|
|
770
|
+
serialized = self._serialize_value(val, since_timestamp)
|
|
771
|
+
# Optimize: Don't send empty lists if polling
|
|
772
|
+
if isinstance(serialized, list) and len(serialized) == 0:
|
|
773
|
+
continue
|
|
774
|
+
peer_data[stat_name] = serialized
|
|
775
|
+
|
|
776
|
+
if peer_data:
|
|
777
|
+
snapshot['peers'][pid] = peer_data
|
|
778
|
+
|
|
779
|
+
return snapshot
|
|
780
|
+
|
|
781
|
+
def _serialize_value(self, value: Any, since_timestamp: int) -> Any:
|
|
782
|
+
"""Helper to convert SortedDicts to [[t, v], ...] and clean other types."""
|
|
783
|
+
if isinstance(value, SortedDict):
|
|
784
|
+
idx = value.bisect_left(since_timestamp)
|
|
785
|
+
sliced_items = value.items()[idx:]
|
|
786
|
+
# Convert to list of [timestamp, value] for Plotly readiness
|
|
787
|
+
return [[k, self._make_json_serializable(v)] for k, v in sliced_items]
|
|
788
|
+
else:
|
|
789
|
+
# Static value: return as is (assuming it's serializable)
|
|
790
|
+
return self._make_json_serializable(value)
|
|
791
|
+
|
|
792
|
+
def get_last_value(self, stat_name: str, peer_id: str | None = None) -> Any | None:
|
|
793
|
+
"""Public API to get the most recent value of any stat, whether static or dynamic.
|
|
794
|
+
- If peer_id is None, it searches for an ungrouped (world-level) stat.
|
|
795
|
+
- If peer_id is provided, it searches for a grouped stat for that peer.
|
|
796
|
+
Returns the last value, or None if not found.
|
|
797
|
+
"""
|
|
798
|
+
if stat_name in self._all_static_keys:
|
|
799
|
+
return self._get_last_static_value(stat_name, peer_id)
|
|
800
|
+
elif stat_name in self._all_dynamic_keys:
|
|
801
|
+
return self._get_last_dynamic_value(stat_name, peer_id)
|
|
802
|
+
else:
|
|
803
|
+
self._err(f'get_last_value: Unknown stat_name "{stat_name}"')
|
|
804
|
+
return None
|
|
805
|
+
|
|
806
|
+
def _get_last_dynamic_value(self, stat_name: str, peer_id: str | None = None) -> Any | None:
|
|
807
|
+
"""
|
|
808
|
+
Returns the most recent value of a dynamic stat from the hot cache.
|
|
809
|
+
- If peer_id is None, it searches for an ungrouped (world-level) stat.
|
|
810
|
+
- If peer_id is provided, it searches for a grouped stat for that peer.
|
|
811
|
+
Returns None if the stat is not found or has no entries.
|
|
812
|
+
"""
|
|
813
|
+
if not self._is_world:
|
|
814
|
+
return None # Agents don't have this cache
|
|
815
|
+
|
|
816
|
+
cache: Optional[SortedDict] = None
|
|
817
|
+
|
|
818
|
+
if peer_id is None:
|
|
819
|
+
# --- This is an ungrouped (world) stat ---
|
|
820
|
+
if stat_name in self._world_ungrouped_keys:
|
|
821
|
+
cache = self._stats.get(stat_name)
|
|
822
|
+
else:
|
|
823
|
+
# --- This is a grouped (peer) stat ---
|
|
824
|
+
if stat_name in self._world_grouped_keys:
|
|
825
|
+
peer_cache = self._stats.get(self.GROUP_KEY, {}).get(peer_id)
|
|
826
|
+
if peer_cache:
|
|
827
|
+
cache = peer_cache.get(stat_name)
|
|
828
|
+
|
|
829
|
+
# Check if we found a valid SortedDict cache and it's not empty
|
|
830
|
+
if isinstance(cache, SortedDict) and cache:
|
|
831
|
+
return cache.peekitem(-1)[1] # Return the last value
|
|
832
|
+
|
|
833
|
+
return None # Stat not found or no values
|
|
834
|
+
|
|
835
|
+
def _get_last_static_value(self, stat_name: str, peer_id: str | None = None) -> Any | None:
|
|
836
|
+
"""
|
|
837
|
+
Returns the current value of a static stat from the hot cache.
|
|
838
|
+
- If peer_id is None, it searches for an ungrouped (world-level) stat.
|
|
839
|
+
- If peer_id is provided, it searches for a grouped stat for that peer.
|
|
840
|
+
Returns None if the stat is not found.
|
|
841
|
+
"""
|
|
842
|
+
if not self._is_world:
|
|
843
|
+
return None # Agents don't have this cache
|
|
844
|
+
|
|
845
|
+
value: Any | None = None
|
|
846
|
+
if peer_id is None:
|
|
847
|
+
# --- This is an ungrouped (world) stat ---
|
|
848
|
+
if stat_name in self._world_ungrouped_keys:
|
|
849
|
+
value = self._stats.get(stat_name)
|
|
850
|
+
else:
|
|
851
|
+
# --- This is a grouped (peer) stat ---
|
|
852
|
+
if stat_name in self._world_grouped_keys:
|
|
853
|
+
peer_cache = self._stats.get(self.GROUP_KEY, {}).get(peer_id)
|
|
854
|
+
if peer_cache:
|
|
855
|
+
value = peer_cache.get(stat_name)
|
|
856
|
+
return value
|
|
857
|
+
|
|
858
|
+
# --- WORLD API (PERSISTENCE) ---
|
|
859
|
+
def save_to_disk(self):
|
|
860
|
+
"""(World-only) Saves the static snapshot and dynamic buffer to SQLite."""
|
|
861
|
+
if not self._is_world or not self._db_conn:
|
|
862
|
+
return
|
|
863
|
+
self._deb(f'Saving world stats to DB...')
|
|
864
|
+
try:
|
|
865
|
+
self._save_static_to_db()
|
|
866
|
+
self._save_dynamic_to_db()
|
|
867
|
+
self._prune_cache()
|
|
868
|
+
self._prune_db()
|
|
869
|
+
|
|
870
|
+
self._db_conn.commit()
|
|
871
|
+
self._deb(f'Save complete.')
|
|
872
|
+
except Exception as e:
|
|
873
|
+
self._err(f'CRITICAL: Save_to_disk failed: {e}')
|
|
874
|
+
if self._db_conn:
|
|
875
|
+
self._db_conn.rollback()
|
|
876
|
+
|
|
877
|
+
def _save_static_to_db(self):
|
|
878
|
+
"""(World-only) Dumps all static stats from hot cache to DB."""
|
|
879
|
+
if not self._static_db_buffer or not self._db_conn:
|
|
880
|
+
return
|
|
881
|
+
|
|
882
|
+
self._db_conn.executemany("""
|
|
883
|
+
INSERT INTO static_stats (peer_id, stat_name, val_json, timestamp)
|
|
884
|
+
VALUES (?, ?, ?, ?) ON CONFLICT(peer_id, stat_name) DO UPDATE
|
|
885
|
+
SET val_json = excluded.val_json, timestamp = excluded.timestamp
|
|
886
|
+
""", self._static_db_buffer)
|
|
887
|
+
|
|
888
|
+
self._static_db_buffer = [] # Clear buffer
|
|
889
|
+
|
|
890
|
+
def _save_dynamic_to_db(self):
|
|
891
|
+
"""(World-only) Writes the in-memory dynamic buffer to SQLite."""
|
|
892
|
+
if not self._dynamic_db_buffer or not self._db_conn:
|
|
893
|
+
return
|
|
894
|
+
|
|
895
|
+
self._db_conn.executemany("""
|
|
896
|
+
INSERT OR IGNORE INTO dynamic_stats
|
|
897
|
+
(timestamp, peer_id, stat_name, val_num, val_str, val_json)
|
|
898
|
+
VALUES (?, ?, ?, ?, ?, ?)
|
|
899
|
+
""", self._dynamic_db_buffer)
|
|
900
|
+
|
|
901
|
+
self._deb(f'Wrote {len(self._dynamic_db_buffer)} dynamic stats to SQLite.')
|
|
902
|
+
self._dynamic_db_buffer = [] # Clear buffer
|
|
903
|
+
|
|
904
|
+
def _prune_db(self):
|
|
905
|
+
"""(World-only) Add here the logic to prune the db (e.g., when a peer leaves the world)."""
|
|
906
|
+
if not self._db_conn:
|
|
907
|
+
return
|
|
908
|
+
pass
|
|
909
|
+
|
|
910
|
+
def _prune_cache(self):
|
|
911
|
+
"""
|
|
912
|
+
(World-only) Periodic maintenance to clean up 'stale' peers.
|
|
913
|
+
|
|
914
|
+
The 'prune-on-write' logic in _store_dynamic handles active peers efficiently.
|
|
915
|
+
This method handles peers that have disconnected or stopped sending data,
|
|
916
|
+
preventing their old data from haunting the RAM forever.
|
|
917
|
+
"""
|
|
918
|
+
if not self._is_world:
|
|
919
|
+
return
|
|
920
|
+
|
|
921
|
+
# Calculate cutoff based on latest timestamp
|
|
922
|
+
window_ms = int(self._min_window_duration.total_seconds() * 1000)
|
|
923
|
+
cutoff = self._max_seen_timestamp - window_ms
|
|
924
|
+
|
|
925
|
+
# 1. Prune Ungrouped Stats (World Stats)
|
|
926
|
+
for key in self._world_ungrouped_keys:
|
|
927
|
+
cache = self._stats.get(key)
|
|
928
|
+
if isinstance(cache, SortedDict): # only true for dynamic stats
|
|
929
|
+
# Remove items older than cutoff
|
|
930
|
+
while cache and cache.peekitem(0)[0] < cutoff:
|
|
931
|
+
cache.popitem(0)
|
|
932
|
+
|
|
933
|
+
# 2. Prune Grouped Stats (Peer Stats)
|
|
934
|
+
peer_groups = self._stats.get(self.GROUP_KEY, {})
|
|
935
|
+
|
|
936
|
+
# We might need to remove empty peers entirely, so we collect keys to delete
|
|
937
|
+
peers_to_remove = []
|
|
938
|
+
|
|
939
|
+
for peer_id, peer_cache in peer_groups.items():
|
|
940
|
+
all_stats_were_empty = True
|
|
941
|
+
for _, stat_data in peer_cache.items():
|
|
942
|
+
if isinstance(stat_data, SortedDict): # only true for dynamic stats
|
|
943
|
+
# Prune the time series
|
|
944
|
+
while stat_data and stat_data.peekitem(0)[0] < cutoff:
|
|
945
|
+
stat_data.popitem(0)
|
|
946
|
+
# after pruning, check if the stat dict is empty
|
|
947
|
+
all_stats_were_empty &= len(stat_data) == 0
|
|
948
|
+
|
|
949
|
+
if all_stats_were_empty:
|
|
950
|
+
peers_to_remove.append(peer_id)
|
|
951
|
+
|
|
952
|
+
# Remove completely dead peers from memory
|
|
953
|
+
for peer_id in peers_to_remove:
|
|
954
|
+
del peer_groups[peer_id]
|
|
955
|
+
self._deb(f'Pruned stale peer {peer_id} from cache.')
|
|
956
|
+
|
|
957
|
+
# --- WORLD API (LOADING) ---
|
|
958
|
+
def _load_existing_stats(self):
|
|
959
|
+
"""(World-only) Loads existing stats from disk to hydrate the cache."""
|
|
960
|
+
if not self._is_world or not self._db_conn:
|
|
961
|
+
return
|
|
962
|
+
self._deb('Loading existing stats from disk...')
|
|
963
|
+
self._load_static_from_db()
|
|
964
|
+
self._hydrate_dynamic_caches_from_db()
|
|
965
|
+
self._deb('Finished loading stats.')
|
|
966
|
+
|
|
967
|
+
def _load_static_from_db(self):
|
|
968
|
+
"""(World-only) Loads the static_stats table into the _stats hot cache."""
|
|
969
|
+
if not self._db_conn:
|
|
970
|
+
return
|
|
971
|
+
try:
|
|
972
|
+
cursor = self._db_conn.execute("SELECT peer_id, stat_name, val_json, timestamp FROM static_stats")
|
|
973
|
+
for peer_id, stat_name, value_json, timestamp in cursor:
|
|
974
|
+
value = json.loads(value_json)
|
|
975
|
+
if stat_name == 'graph':
|
|
976
|
+
# Handle both legacy format (just edges) and new format (nodes+edges) safely
|
|
977
|
+
if isinstance(value, dict) and 'edges' in value:
|
|
978
|
+
# Convert the edge lists back to sets
|
|
979
|
+
value['edges'] = {k: set(v) for k, v in value['edges'].items()}
|
|
980
|
+
# Ensure nodes dict exists
|
|
981
|
+
if 'nodes' not in value:
|
|
982
|
+
value['nodes'] = {}
|
|
983
|
+
else:
|
|
984
|
+
# Convert entire dict to sets (as it was before)
|
|
985
|
+
edges_set = {k: set(v) for k, v in value.items()}
|
|
986
|
+
# Migrate to new structure on the fly
|
|
987
|
+
value = {'nodes': {}, 'edges': edges_set}
|
|
988
|
+
self._store_static(stat_name, value, peer_id, timestamp)
|
|
989
|
+
|
|
990
|
+
# Clear the buffer generated by loading
|
|
991
|
+
self._static_db_buffer = []
|
|
992
|
+
self._deb("Loaded static stats snapshot from DB.")
|
|
993
|
+
except Exception as e:
|
|
994
|
+
self._err(f"Failed to load static stats from DB: {e}")
|
|
995
|
+
|
|
996
|
+
def _hydrate_dynamic_caches_from_db(self):
|
|
997
|
+
"""(World-only) Queries SQLite for 'hot' data to fill dynamic caches."""
|
|
998
|
+
if not self._db_conn:
|
|
999
|
+
return
|
|
1000
|
+
try:
|
|
1001
|
+
max_ts_cursor = self._db_conn.execute('SELECT MAX(timestamp) FROM dynamic_stats')
|
|
1002
|
+
max_ts_result = max_ts_cursor.fetchone()
|
|
1003
|
+
|
|
1004
|
+
if max_ts_result is None or max_ts_result[0] is None:
|
|
1005
|
+
self._deb('No dynamic stats found in DB. Hydration skipped.')
|
|
1006
|
+
return # No data in DB, nothing to load
|
|
1007
|
+
self._max_seen_timestamp = int(max_ts_result[0])
|
|
1008
|
+
cutoff_t_ms = self._max_seen_timestamp - int(self._min_window_duration.total_seconds() * 1000)
|
|
1009
|
+
|
|
1010
|
+
cursor = self._db_conn.execute("""
|
|
1011
|
+
SELECT timestamp, peer_id, stat_name, val_num, val_str, val_json
|
|
1012
|
+
FROM dynamic_stats
|
|
1013
|
+
WHERE timestamp > ?
|
|
1014
|
+
ORDER BY timestamp ASC
|
|
1015
|
+
""", (cutoff_t_ms,))
|
|
1016
|
+
|
|
1017
|
+
count = 0
|
|
1018
|
+
for ts, peer_id, stat_name, _, _, val_json in cursor:
|
|
1019
|
+
ts = int(ts)
|
|
1020
|
+
# we just need the val_json that will be casted to the exact type by _validate_type
|
|
1021
|
+
value = json.loads(val_json)
|
|
1022
|
+
self._store_dynamic(stat_name, value, peer_id, ts)
|
|
1023
|
+
count += 1
|
|
1024
|
+
|
|
1025
|
+
# Clear the buffer generated by hydrating
|
|
1026
|
+
self._dynamic_db_buffer = []
|
|
1027
|
+
|
|
1028
|
+
if count > 0:
|
|
1029
|
+
self._deb(f'Hydrated cache with {count} recent dynamic stats.')
|
|
1030
|
+
else:
|
|
1031
|
+
self._deb('No recent dynamic stats found in DB.')
|
|
1032
|
+
|
|
1033
|
+
except Exception as e:
|
|
1034
|
+
self._err(f'Failed to hydrate dynamic caches from DB: {e}')
|
|
1035
|
+
|
|
1036
|
+
# --- WORLD API (QUERYING) ---
|
|
1037
|
+
def query_history(self,
|
|
1038
|
+
stat_names: List[str] = [],
|
|
1039
|
+
peer_ids: List[str] = [],
|
|
1040
|
+
time_range: Union[Tuple[int, int], int, None] = None,
|
|
1041
|
+
value_range: Tuple[float, float] | None = None,
|
|
1042
|
+
limit: int | None = None) -> Dict[str, Any]:
|
|
1043
|
+
"""
|
|
1044
|
+
(World-only) Queries the SQLite DB for specific stats, potentially filtering by VALUE.
|
|
1045
|
+
Returns the same structure as get_view(), allowing the agent to ingest it seamlessly.
|
|
1046
|
+
Automatically flushes the current memory buffer to DB before querying
|
|
1047
|
+
to ensure "read-your-writes" consistency.
|
|
1048
|
+
|
|
1049
|
+
Args:
|
|
1050
|
+
value_range: (min, max) - Only returns rows where val_num is within range.
|
|
1051
|
+
"""
|
|
1052
|
+
if not self._is_world or not self._db_conn:
|
|
1053
|
+
return {}
|
|
1054
|
+
|
|
1055
|
+
# Flush the cached upadtes to db before querying
|
|
1056
|
+
self._save_static_to_db()
|
|
1057
|
+
self._save_dynamic_to_db()
|
|
1058
|
+
self._db_conn.commit()
|
|
1059
|
+
|
|
1060
|
+
snapshot = {'world': {}, 'peers': {}}
|
|
1061
|
+
|
|
1062
|
+
# A. Query the static stats
|
|
1063
|
+
query_static = ['SELECT peer_id, stat_name, val_json FROM static_stats WHERE 1=1']
|
|
1064
|
+
params_static = []
|
|
1065
|
+
|
|
1066
|
+
if stat_names:
|
|
1067
|
+
query_static.append(f'AND stat_name IN ({",".join(["?"]*len(stat_names))})')
|
|
1068
|
+
params_static.extend(stat_names)
|
|
1069
|
+
if peer_ids:
|
|
1070
|
+
query_static.append(f'AND peer_id IN ({",".join(["?"]*len(peer_ids))})')
|
|
1071
|
+
params_static.extend(peer_ids)
|
|
1072
|
+
|
|
1073
|
+
try:
|
|
1074
|
+
cursor = self._db_conn.execute(' '.join(query_static), params_static)
|
|
1075
|
+
for pid, sname, vjson in cursor:
|
|
1076
|
+
val = self._validate_type(json.loads(vjson))
|
|
1077
|
+
# Handle special Graph reconstruction if needed (legacy format support)
|
|
1078
|
+
if sname == 'graph':
|
|
1079
|
+
# Handle both legacy format (just edges) and new format (nodes+edges) safely
|
|
1080
|
+
if isinstance(val, dict) and 'edges' in val:
|
|
1081
|
+
# Convert the edge lists back to sets
|
|
1082
|
+
val['edges'] = {k: set(v) for k, v in val['edges'].items()}
|
|
1083
|
+
# Ensure nodes dict exists
|
|
1084
|
+
if 'nodes' not in val:
|
|
1085
|
+
val['nodes'] = {}
|
|
1086
|
+
else:
|
|
1087
|
+
# Convert entire dict to sets (as it was before)
|
|
1088
|
+
edges_set = {k: set(v) for k, v in val.items()}
|
|
1089
|
+
# Migrate to new structure on the fly
|
|
1090
|
+
val = {'nodes': {}, 'edges': edges_set}
|
|
1091
|
+
|
|
1092
|
+
# Static stats format: value (direct)
|
|
1093
|
+
if pid in (None, 'None', ''):
|
|
1094
|
+
snapshot['world'][sname] = val
|
|
1095
|
+
else:
|
|
1096
|
+
snapshot['peers'].setdefault(pid, {})[sname] = val
|
|
1097
|
+
except Exception as e:
|
|
1098
|
+
self._err(f'Query history (static) failed: {e}')
|
|
1099
|
+
|
|
1100
|
+
# B. Query the dynamic stats
|
|
1101
|
+
query_dyn = ['SELECT timestamp, peer_id, stat_name, val_num, val_str, val_json FROM dynamic_stats WHERE 1=1']
|
|
1102
|
+
params_dyn = []
|
|
1103
|
+
|
|
1104
|
+
# 1. Stat Names
|
|
1105
|
+
if stat_names:
|
|
1106
|
+
query_dyn.append(f'AND stat_name IN ({','.join(['?']*len(stat_names))})')
|
|
1107
|
+
params_dyn.extend(stat_names)
|
|
1108
|
+
|
|
1109
|
+
# 2. Peer IDs
|
|
1110
|
+
if peer_ids:
|
|
1111
|
+
query_dyn.append(f'AND peer_id IN ({','.join(['?']*len(peer_ids))})')
|
|
1112
|
+
params_dyn.extend(peer_ids)
|
|
1113
|
+
|
|
1114
|
+
if time_range is not None:
|
|
1115
|
+
if isinstance(time_range, int):
|
|
1116
|
+
# Treated as "Since X"
|
|
1117
|
+
query_dyn.append('AND timestamp >= ?')
|
|
1118
|
+
params_dyn.append(time_range)
|
|
1119
|
+
elif isinstance(time_range, (tuple, list)) and len(time_range) == 2:
|
|
1120
|
+
# Treated as "Between X and Y"
|
|
1121
|
+
query_dyn.append('AND timestamp >= ? AND timestamp <= ?')
|
|
1122
|
+
params_dyn.extend([time_range[0], time_range[1]])
|
|
1123
|
+
|
|
1124
|
+
# 4. Value Range (The logic requested by user)
|
|
1125
|
+
if value_range:
|
|
1126
|
+
query_dyn.append('AND val_num IS NOT NULL AND val_num >= ? AND val_num <= ?')
|
|
1127
|
+
params_dyn.extend([value_range[0], value_range[1]])
|
|
1128
|
+
|
|
1129
|
+
query_dyn.append('ORDER BY timestamp ASC')
|
|
1130
|
+
|
|
1131
|
+
# add the limit
|
|
1132
|
+
query_dyn.append('LIMIT 5000' if limit is None else f'LIMIT {limit}')
|
|
1133
|
+
|
|
1134
|
+
try:
|
|
1135
|
+
cursor = self._db_conn.execute(' '.join(query_dyn), params_dyn)
|
|
1136
|
+
for ts, pid, sname, vnum, vstr, vjson in cursor:
|
|
1137
|
+
ts = int(ts)
|
|
1138
|
+
val = vnum if vnum is not None else (vstr if vstr is not None else json.loads(vjson))
|
|
1139
|
+
val = self._validate_type(sname, val)
|
|
1140
|
+
|
|
1141
|
+
# Structure construction
|
|
1142
|
+
if pid in (None, 'None', ''): # Handling world stats
|
|
1143
|
+
target_ts = snapshot['world'].setdefault(sname, [])
|
|
1144
|
+
else: # Handling peer stats
|
|
1145
|
+
target_ts = snapshot['peers'].setdefault(pid, {}).setdefault(sname, [])
|
|
1146
|
+
target_ts.append([ts, val])
|
|
1147
|
+
|
|
1148
|
+
except Exception as e:
|
|
1149
|
+
self._err(f'Query history failed: {e}')
|
|
1150
|
+
|
|
1151
|
+
return snapshot
|
|
1152
|
+
|
|
1153
|
+
def _aggregate_time_indexed_stats_over_peers(self, stats: dict) -> tuple[dict, dict]:
|
|
1154
|
+
"""(World-only) Aggregates time-indexed peer stats (mean/std) from CACHE."""
|
|
1155
|
+
mean_dict = {}
|
|
1156
|
+
std_dict = {}
|
|
1157
|
+
peer_stats = stats.get(self.GROUP_KEY, {})
|
|
1158
|
+
|
|
1159
|
+
number_stats = {name for name, type_obj in self._stat_types.items()
|
|
1160
|
+
if type_obj in (int, float)}
|
|
1161
|
+
|
|
1162
|
+
for stat_name in number_stats:
|
|
1163
|
+
peer_series = []
|
|
1164
|
+
for _, peer_data in peer_stats.items():
|
|
1165
|
+
if stat_name in peer_data:
|
|
1166
|
+
tv_dict: SortedDict = peer_data[stat_name]
|
|
1167
|
+
if tv_dict:
|
|
1168
|
+
peer_series.append(tv_dict)
|
|
1169
|
+
|
|
1170
|
+
if not peer_series:
|
|
1171
|
+
continue
|
|
1172
|
+
|
|
1173
|
+
all_times = sorted({t for series in peer_series for t in series.keys()})
|
|
1174
|
+
if not all_times:
|
|
1175
|
+
continue
|
|
1176
|
+
|
|
1177
|
+
aligned_values = []
|
|
1178
|
+
for series in peer_series:
|
|
1179
|
+
if not series:
|
|
1180
|
+
continue
|
|
1181
|
+
filled = []
|
|
1182
|
+
series_times = series.keys()
|
|
1183
|
+
series_vals = series.values()
|
|
1184
|
+
|
|
1185
|
+
last_val = series_vals[0]
|
|
1186
|
+
series_idx = 0
|
|
1187
|
+
|
|
1188
|
+
for t in all_times:
|
|
1189
|
+
while series_idx < len(series_times) and series_times[series_idx] <= t:
|
|
1190
|
+
last_val = series_vals[series_idx]
|
|
1191
|
+
series_idx += 1
|
|
1192
|
+
filled.append(last_val)
|
|
1193
|
+
aligned_values.append(filled)
|
|
1194
|
+
|
|
1195
|
+
if not aligned_values:
|
|
1196
|
+
continue
|
|
1197
|
+
|
|
1198
|
+
mean_dict[stat_name] = {}
|
|
1199
|
+
std_dict[stat_name] = {}
|
|
1200
|
+
for i, t in enumerate(all_times):
|
|
1201
|
+
vals = [peer_vals[i] for peer_vals in aligned_values if peer_vals[i] is not None]
|
|
1202
|
+
if vals:
|
|
1203
|
+
mean_val = sum(vals) / float(len(vals))
|
|
1204
|
+
var = sum((x - mean_val) ** 2 for x in vals) / len(vals)
|
|
1205
|
+
std_val = math.sqrt(var)
|
|
1206
|
+
else:
|
|
1207
|
+
mean_val = None
|
|
1208
|
+
std_val = None
|
|
1209
|
+
|
|
1210
|
+
mean_dict[stat_name][t] = mean_val
|
|
1211
|
+
std_dict[stat_name][t] = std_val
|
|
1212
|
+
|
|
1213
|
+
return mean_dict, std_dict
|
|
1214
|
+
|
|
1215
|
+
def shutdown(self):
|
|
1216
|
+
"""Call this explicitly when your application is closing."""
|
|
1217
|
+
if self._is_world and self._db_conn:
|
|
1218
|
+
self._deb('Shutdown: Saving final stats...')
|
|
1219
|
+
try:
|
|
1220
|
+
self.save_to_disk()
|
|
1221
|
+
except Exception as e:
|
|
1222
|
+
self._err(f'Shutdown save failed: {e}')
|
|
1223
|
+
self._db_conn.close()
|
|
1224
|
+
self._db_conn = None
|
|
1225
|
+
self._deb('SQLite connection closed.')
|
|
1226
|
+
|
|
1227
|
+
def __del__(self):
|
|
1228
|
+
if self._is_world and self._db_conn:
|
|
1229
|
+
try:
|
|
1230
|
+
# Final save on exit, if any buffer
|
|
1231
|
+
self.save_to_disk()
|
|
1232
|
+
except Exception:
|
|
1233
|
+
pass # Don't raise in destructor
|
|
1234
|
+
self._db_conn.close()
|
|
1235
|
+
self._deb('SQLite connection closed.')
|
|
1236
|
+
|
|
1237
|
+
# --- PLOTTING INTERFACE ---
|
|
1238
|
+
def plot(self, since_timestamp: int = 0) -> str | None:
|
|
1239
|
+
"""
|
|
1240
|
+
Default dashboard implementation.
|
|
1241
|
+
Visualizes Core Stats: Topology, Agent Counts, States, and Actions.
|
|
1242
|
+
"""
|
|
1243
|
+
# 1. Get Data view
|
|
1244
|
+
view = self.get_view(since_timestamp) if self._is_world else self._world_view
|
|
1245
|
+
if not view:
|
|
1246
|
+
return None
|
|
1247
|
+
|
|
1248
|
+
dash = DefaultBaseDash("World Overview")
|
|
1249
|
+
|
|
1250
|
+
# --- Panel 1: Network Topology (Top Left) ---
|
|
1251
|
+
p1 = UIPlot(title="World Topology")
|
|
1252
|
+
self._populate_graph(p1, view, "graph")
|
|
1253
|
+
p1.set_layout_opt('xaxis', {'visible': False})
|
|
1254
|
+
p1.set_layout_opt('yaxis', {'visible': False})
|
|
1255
|
+
dash.add_panel(p1, "top_left")
|
|
1256
|
+
|
|
1257
|
+
# --- Panel 2: System Counters (Table) ---
|
|
1258
|
+
p2 = UIPlot(title="World Agents History")
|
|
1259
|
+
metrics = [
|
|
1260
|
+
("world_masters", "World Masters", THEME['peers'][0]),
|
|
1261
|
+
("world_agents", "World Agents", THEME['peers'][1]),
|
|
1262
|
+
("human_agents", "Human Agents", THEME['peers'][2]),
|
|
1263
|
+
("artificial_agents", "Artificial Agents", THEME['peers'][3]),
|
|
1264
|
+
]
|
|
1265
|
+
for stat_key, label, color in metrics:
|
|
1266
|
+
self._populate_time_series(
|
|
1267
|
+
panel=p2,
|
|
1268
|
+
view=view,
|
|
1269
|
+
stat_name=stat_key,
|
|
1270
|
+
color_override=color,
|
|
1271
|
+
title_override=label
|
|
1272
|
+
)
|
|
1273
|
+
p2.set_layout_opt('xaxis', {'title': None, 'visible': False})
|
|
1274
|
+
p2.set_layout_opt('yaxis', {'title': None})
|
|
1275
|
+
dash.add_panel(p2, "top_right")
|
|
1276
|
+
|
|
1277
|
+
# --- Panel 3: State Distribution (Bar) ---
|
|
1278
|
+
p3 = UIPlot(title="State Distribution")
|
|
1279
|
+
self._populate_distribution(p3, view, "state")
|
|
1280
|
+
p3.set_layout_opt("xaxis", {"title": None})
|
|
1281
|
+
dash.add_panel(p3, "bot_left")
|
|
1282
|
+
|
|
1283
|
+
# --- Panel 4: Action Distribution (Bar) ---
|
|
1284
|
+
p4 = UIPlot(title="Last Action Distribution")
|
|
1285
|
+
self._populate_distribution(p4, view, "last_action")
|
|
1286
|
+
p4.set_layout_opt("xaxis", {"title": None})
|
|
1287
|
+
dash.add_panel(p4, "bot_right")
|
|
1288
|
+
|
|
1289
|
+
return dash.to_json()
|
|
1290
|
+
|
|
1291
|
+
def _populate_time_series(self, panel: UIPlot, view: Dict, stat_name: str,
|
|
1292
|
+
peer_ids: List[str] | None = None, color_override: str = None,
|
|
1293
|
+
show_legend: bool = True, title_override: str = None):
|
|
1294
|
+
"""Extracts [[t,v],...] lists and adds lines to panel. Supports custom titles and colors."""
|
|
1295
|
+
def get_xy(raw):
|
|
1296
|
+
if isinstance(raw, list) and raw and isinstance(raw[0], list):
|
|
1297
|
+
return [r[0] for r in raw], [r[1] for r in raw]
|
|
1298
|
+
return [], []
|
|
1299
|
+
|
|
1300
|
+
# World
|
|
1301
|
+
w_data = view.get('world', {}).get(stat_name)
|
|
1302
|
+
if w_data:
|
|
1303
|
+
x, y = get_xy(w_data)
|
|
1304
|
+
if x:
|
|
1305
|
+
label = title_override if title_override else "World"
|
|
1306
|
+
color = color_override if color_override else THEME['main']
|
|
1307
|
+
panel.add_line(x, y, name=label, color=color,
|
|
1308
|
+
legend_group=label, show_legend=show_legend)
|
|
1309
|
+
|
|
1310
|
+
# Peers
|
|
1311
|
+
peers_dict = view.get('peers', {})
|
|
1312
|
+
targets = peer_ids if peer_ids else peers_dict.keys()
|
|
1313
|
+
for pid in targets:
|
|
1314
|
+
p_data = peers_dict.get(pid, {}).get(stat_name)
|
|
1315
|
+
if p_data:
|
|
1316
|
+
x, y = get_xy(p_data)
|
|
1317
|
+
if x:
|
|
1318
|
+
c = color_override or self._get_consistent_color(pid)
|
|
1319
|
+
panel.add_line(x, y, name=f'{pid[-6:]}', color=c,
|
|
1320
|
+
legend_group=pid, show_legend=show_legend)
|
|
1321
|
+
|
|
1322
|
+
def _populate_indicator(self, panel: UIPlot, view: Dict, stat_name: str, peer_ids: List[str] | None = None):
|
|
1323
|
+
"""Extracts a scalar value and adds indicator."""
|
|
1324
|
+
val = None
|
|
1325
|
+
if 'world' in view and stat_name in view['world']:
|
|
1326
|
+
val = view['world'][stat_name]
|
|
1327
|
+
elif 'peers' in view:
|
|
1328
|
+
# Just grab the first available peer's value if not specified
|
|
1329
|
+
targets = peer_ids if peer_ids else list(view['peers'].keys())
|
|
1330
|
+
if targets:
|
|
1331
|
+
val = view['peers'][targets[0]].get(stat_name)
|
|
1332
|
+
|
|
1333
|
+
panel.add_indicator(val, title=stat_name)
|
|
1334
|
+
|
|
1335
|
+
def _populate_table(self, panel: UIPlot, view: Dict, stat_name: str, peer_ids: List[str] | None = None):
|
|
1336
|
+
"""Extracts data for a table."""
|
|
1337
|
+
headers = ['Entity', 'Value']
|
|
1338
|
+
col_ent = []
|
|
1339
|
+
col_val = []
|
|
1340
|
+
|
|
1341
|
+
# World
|
|
1342
|
+
if 'world' in view and stat_name in view['world']:
|
|
1343
|
+
col_ent.append('World')
|
|
1344
|
+
col_val.append(str(view['world'][stat_name]))
|
|
1345
|
+
|
|
1346
|
+
# Peers
|
|
1347
|
+
peers_dict = view.get('peers', {})
|
|
1348
|
+
targets = peer_ids if peer_ids else peers_dict.keys()
|
|
1349
|
+
for pid in targets:
|
|
1350
|
+
val = peers_dict.get(pid, {}).get(stat_name)
|
|
1351
|
+
if val is not None:
|
|
1352
|
+
col_ent.append(pid[-6:])
|
|
1353
|
+
col_val.append(str(val)) # Simple stringification
|
|
1354
|
+
|
|
1355
|
+
panel.add_table(headers, [col_ent, col_val])
|
|
1356
|
+
|
|
1357
|
+
def _populate_graph(self, panel: UIPlot, view: Dict, stat_name: str):
|
|
1358
|
+
"""Calculates layout and adds graph traces to the panel."""
|
|
1359
|
+
|
|
1360
|
+
# 1. Fetch Data
|
|
1361
|
+
raw_graph = view.get('world', {}).get(stat_name, {})
|
|
1362
|
+
if not raw_graph:
|
|
1363
|
+
return
|
|
1364
|
+
|
|
1365
|
+
# Handle both legacy format (just edges) and new format (nodes+edges) safely
|
|
1366
|
+
if 'edges' in raw_graph and 'nodes' in raw_graph:
|
|
1367
|
+
edges_data = raw_graph['edges']
|
|
1368
|
+
nodes_data = raw_graph['nodes']
|
|
1369
|
+
else:
|
|
1370
|
+
# Fallback for simple graphs without node details
|
|
1371
|
+
edges_data = raw_graph
|
|
1372
|
+
nodes_data = {}
|
|
1373
|
+
|
|
1374
|
+
# 2. Calculate Layout (Circular)
|
|
1375
|
+
# We use edges_data keys for positioning, but we might have nodes in nodes_data
|
|
1376
|
+
# that have no edges yet, so we union them.
|
|
1377
|
+
all_pids = set(edges_data.keys()).union(*edges_data.values()) | set(nodes_data.keys())
|
|
1378
|
+
pids = list(all_pids)
|
|
1379
|
+
pos = {}
|
|
1380
|
+
if pids:
|
|
1381
|
+
radius = 10
|
|
1382
|
+
angle_step = (2 * math.pi) / len(pids)
|
|
1383
|
+
for i, pid in enumerate(pids):
|
|
1384
|
+
pos[pid] = (
|
|
1385
|
+
radius * math.cos(i * angle_step),
|
|
1386
|
+
radius * math.sin(i * angle_step)
|
|
1387
|
+
)
|
|
1388
|
+
|
|
1389
|
+
# 3. Create Edge Trace
|
|
1390
|
+
edge_x, edge_y = [], []
|
|
1391
|
+
for source, targets in edges_data.items():
|
|
1392
|
+
if source not in pos:
|
|
1393
|
+
continue
|
|
1394
|
+
x0, y0 = pos[source]
|
|
1395
|
+
# targets might be a list (from JSON) or set (from local cache)
|
|
1396
|
+
target_iter = targets if isinstance(targets, (list, set)) else []
|
|
1397
|
+
for target in target_iter:
|
|
1398
|
+
if target in pos:
|
|
1399
|
+
x1, y1 = pos[target]
|
|
1400
|
+
edge_x.extend([x0, x1, None])
|
|
1401
|
+
edge_y.extend([y0, y1, None])
|
|
1402
|
+
|
|
1403
|
+
panel.add_trace({
|
|
1404
|
+
'type': 'scatter', 'mode': 'lines',
|
|
1405
|
+
'x': edge_x, 'y': edge_y,
|
|
1406
|
+
'line': {'width': 0.5, 'color': THEME['edge']},
|
|
1407
|
+
'hoverinfo': 'none', 'showlegend': False
|
|
1408
|
+
})
|
|
1409
|
+
|
|
1410
|
+
# 4. Create Node Trace
|
|
1411
|
+
node_x, node_y, node_text, node_color, node_labels= [], [], [], [], []
|
|
1412
|
+
for pid in pids:
|
|
1413
|
+
if pid not in pos:
|
|
1414
|
+
continue
|
|
1415
|
+
x, y = pos[pid]
|
|
1416
|
+
node_x.append(x)
|
|
1417
|
+
node_y.append(y)
|
|
1418
|
+
|
|
1419
|
+
# Node labels
|
|
1420
|
+
node_labels.append(pid[-6:])
|
|
1421
|
+
# Build hover text
|
|
1422
|
+
if nodes_data:
|
|
1423
|
+
hover_text = ''
|
|
1424
|
+
for key, val in nodes_data.get(pid, {}).items():
|
|
1425
|
+
hover_text += f'{key}: {val}<br>'
|
|
1426
|
+
else:
|
|
1427
|
+
hover_text = f'Peer ID: {pid}'
|
|
1428
|
+
node_text.append(hover_text)
|
|
1429
|
+
|
|
1430
|
+
# Determine Color
|
|
1431
|
+
# You can customize this mapping based on your NodeProfile types
|
|
1432
|
+
node_color.append(self._get_consistent_color(pid))
|
|
1433
|
+
|
|
1434
|
+
panel.add_trace({
|
|
1435
|
+
'type': 'scatter',
|
|
1436
|
+
'mode': 'markers+text',
|
|
1437
|
+
'x': node_x, 'y': node_y,
|
|
1438
|
+
'text': node_labels,
|
|
1439
|
+
'hovertext': node_text,
|
|
1440
|
+
'hoverinfo': 'text',
|
|
1441
|
+
'textposition': 'top center',
|
|
1442
|
+
'showlegend': False,
|
|
1443
|
+
'marker': {
|
|
1444
|
+
'color': node_color,
|
|
1445
|
+
'size': 12,
|
|
1446
|
+
'line': {'width': 2, 'color': THEME['node_border']}
|
|
1447
|
+
}
|
|
1448
|
+
})
|
|
1449
|
+
|
|
1450
|
+
# 5. Layout overrides
|
|
1451
|
+
panel.set_layout_opt('xaxis', {'visible': False})
|
|
1452
|
+
panel.set_layout_opt('yaxis', {'visible': False})
|
|
1453
|
+
|
|
1454
|
+
def _populate_distribution(self, panel: UIPlot, view: Dict, stat_name: str):
|
|
1455
|
+
"""
|
|
1456
|
+
Aggregates peer values into a frequency count (Bar Chart).
|
|
1457
|
+
e.g., {"IDLE": 3, "RUNNING": 5}
|
|
1458
|
+
"""
|
|
1459
|
+
peers_dict = view.get('peers', {})
|
|
1460
|
+
counts = {}
|
|
1461
|
+
|
|
1462
|
+
# 1. Aggregate
|
|
1463
|
+
for data in peers_dict.values():
|
|
1464
|
+
# Handle uninitialized or None values
|
|
1465
|
+
val_str = str(data.get(stat_name, 'Unknown'))
|
|
1466
|
+
counts[val_str] = counts.get(val_str, 0) + 1
|
|
1467
|
+
|
|
1468
|
+
# 2. Sort for consistency (e.g., alphabetically by state name)
|
|
1469
|
+
sorted_keys = sorted(counts.keys())
|
|
1470
|
+
sorted_vals = [counts[k] for k in sorted_keys]
|
|
1471
|
+
colors = [self._get_consistent_color(k) for k in sorted_keys]
|
|
1472
|
+
|
|
1473
|
+
# 3. Plot
|
|
1474
|
+
panel.add_bar(xs=sorted_keys, ys=sorted_vals, names=sorted_vals, colors=colors)
|
|
1475
|
+
|
|
1476
|
+
def _get_consistent_color(self, unique_str: str) -> str:
|
|
1477
|
+
"""Deterministic color generation based on string hash."""
|
|
1478
|
+
if not unique_str:
|
|
1479
|
+
return '#ffffff'
|
|
1480
|
+
idx = zlib.adler32(str(unique_str).encode()) % len(THEME['peers'])
|
|
1481
|
+
return THEME['peers'][idx]
|