unaiverse 0.1.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. unaiverse/__init__.py +19 -0
  2. unaiverse/agent.py +2226 -0
  3. unaiverse/agent_basics.py +2389 -0
  4. unaiverse/clock.py +234 -0
  5. unaiverse/dataprops.py +1282 -0
  6. unaiverse/hsm.py +2471 -0
  7. unaiverse/modules/__init__.py +18 -0
  8. unaiverse/modules/cnu/__init__.py +17 -0
  9. unaiverse/modules/cnu/cnus.py +536 -0
  10. unaiverse/modules/cnu/layers.py +261 -0
  11. unaiverse/modules/cnu/psi.py +60 -0
  12. unaiverse/modules/hl/__init__.py +15 -0
  13. unaiverse/modules/hl/hl_utils.py +411 -0
  14. unaiverse/modules/networks.py +1509 -0
  15. unaiverse/modules/utils.py +748 -0
  16. unaiverse/networking/__init__.py +16 -0
  17. unaiverse/networking/node/__init__.py +18 -0
  18. unaiverse/networking/node/connpool.py +1332 -0
  19. unaiverse/networking/node/node.py +2752 -0
  20. unaiverse/networking/node/profile.py +446 -0
  21. unaiverse/networking/node/tokens.py +79 -0
  22. unaiverse/networking/p2p/__init__.py +188 -0
  23. unaiverse/networking/p2p/go.mod +127 -0
  24. unaiverse/networking/p2p/go.sum +548 -0
  25. unaiverse/networking/p2p/golibp2p.py +18 -0
  26. unaiverse/networking/p2p/golibp2p.pyi +136 -0
  27. unaiverse/networking/p2p/lib.go +2765 -0
  28. unaiverse/networking/p2p/lib_types.py +311 -0
  29. unaiverse/networking/p2p/message_pb2.py +50 -0
  30. unaiverse/networking/p2p/messages.py +360 -0
  31. unaiverse/networking/p2p/mylogger.py +78 -0
  32. unaiverse/networking/p2p/p2p.py +900 -0
  33. unaiverse/networking/p2p/proto-go/message.pb.go +846 -0
  34. unaiverse/stats.py +1506 -0
  35. unaiverse/streamlib/__init__.py +15 -0
  36. unaiverse/streamlib/streamlib.py +210 -0
  37. unaiverse/streams.py +804 -0
  38. unaiverse/utils/__init__.py +16 -0
  39. unaiverse/utils/lone_wolf.json +28 -0
  40. unaiverse/utils/misc.py +441 -0
  41. unaiverse/utils/sandbox.py +292 -0
  42. unaiverse/world.py +384 -0
  43. unaiverse-0.1.12.dist-info/METADATA +366 -0
  44. unaiverse-0.1.12.dist-info/RECORD +47 -0
  45. unaiverse-0.1.12.dist-info/WHEEL +5 -0
  46. unaiverse-0.1.12.dist-info/licenses/LICENSE +177 -0
  47. unaiverse-0.1.12.dist-info/top_level.txt +1 -0
unaiverse/stats.py ADDED
@@ -0,0 +1,1506 @@
1
+ """
2
+ █████ █████ ██████ █████ █████ █████ █████ ██████████ ███████████ █████████ ██████████
3
+ ░░███ ░░███ ░░██████ ░░███ ░░███ ░░███ ░░███ ░░███░░░░░█░░███░░░░░███ ███░░░░░███░░███░░░░░█
4
+ ░███ ░███ ░███░███ ░███ ██████ ░███ ░███ ░███ ░███ █ ░ ░███ ░███ ░███ ░░░ ░███ █ ░
5
+ ░███ ░███ ░███░░███░███ ░░░░░███ ░███ ░███ ░███ ░██████ ░██████████ ░░█████████ ░██████
6
+ ░███ ░███ ░███ ░░██████ ███████ ░███ ░░███ ███ ░███░░█ ░███░░░░░███ ░░░░░░░░███ ░███░░█
7
+ ░███ ░███ ░███ ░░█████ ███░░███ ░███ ░░░█████░ ░███ ░ █ ░███ ░███ ███ ░███ ░███ ░ █
8
+ ░░████████ █████ ░░█████░░████████ █████ ░░███ ██████████ █████ █████░░█████████ ██████████
9
+ ░░░░░░░░ ░░░░░ ░░░░░ ░░░░░░░░ ░░░░░ ░░░ ░░░░░░░░░░ ░░░░░ ░░░░░ ░░░░░░░░░ ░░░░░░░░░░
10
+ A Collectionless AI Project (https://collectionless.ai)
11
+ Registration/Login: https://unaiverse.io
12
+ Code Repositories: https://github.com/collectionlessai/
13
+ Main Developers: Stefano Melacci (Project Leader), Christian Di Maio, Tommaso Guidi
14
+ """
15
+ import os
16
+ import json
17
+ import math
18
+ import zlib
19
+ import sqlite3
20
+ from datetime import timedelta
21
+ from sortedcontainers import SortedDict
22
+ from typing import Any, Set, List, Dict, Tuple, Optional, Union
23
+
24
+
25
+ # A fixed palette for consistent coloring
26
+ THEME = {
27
+ # Main structural colors (Dark Mode optimized)
28
+ 'bg_paper': 'rgba(0,0,0,0)', # Transparent to blend with container
29
+ 'bg_plot': 'rgba(0,0,0,0)', # Transparent plot area
30
+ 'text_main': '#7e7e7e', # Primary text color
31
+ 'text_light': '#7e7e7e', # Secondary/Axis text color
32
+
33
+ # UI Element specific
34
+ 'grid': '#333333', # Grid lines
35
+ 'edge': '#666666', # Graph edges
36
+ 'node_border': '#ffffff', # Node borders
37
+
38
+ # Main Accents
39
+ 'main': '#636EFA', # Primary accent (Blue)
40
+ 'main_light': '#aab1ff', # Lighter shade of primary
41
+
42
+ # Table Styling
43
+ 'table': {
44
+ 'header_bg': '#2c2c2c',
45
+ 'header_txt': '#ffffff',
46
+ 'cell_bg': '#1a1a1a',
47
+ 'cell_txt': '#dddddd',
48
+ 'line': '#444444'
49
+ },
50
+
51
+ # Data Categorical Palette (Plotly default set)
52
+ 'peers': [
53
+ '#636EFA', '#EF553B', '#00CC96', '#AB63FA', '#FFA15A',
54
+ '#19D3F3', '#FF6692', '#B6E880', '#FF97FF', '#FECB52'
55
+ ]
56
+ }
57
+
58
+
59
+ class UIPlot:
60
+ """
61
+ A Python abstraction for a UI Panel (specifically a Plotly chart).
62
+ Allows users to build plots using Python methods instead of dicts/JSON.
63
+ """
64
+ def __init__(self, title: str = '', height: int = 400):
65
+ self._data: List[Dict[str, Any]] = []
66
+
67
+ # Define the standard axis style for a "boxed" look
68
+ axis_style = {
69
+ 'gridcolor': THEME['grid'],
70
+ 'gridwidth': 1,
71
+ 'griddash': 'dot',
72
+ 'color': THEME['text_light'],
73
+ 'showline': True, # Draw the axis line
74
+ 'mirror': True, # Mirror it on top/right (creates the box)
75
+ 'linewidth': 2, # Width of the box border
76
+ 'linecolor': THEME['grid'], # Color of the box border
77
+ 'zeroline': False, # Prevents double-thick borderlines at 0
78
+ 'layer': 'below traces' # Key fix: puts grid BEHIND the box border
79
+ }
80
+
81
+ self._layout: Dict[str, Any] = {
82
+ 'title': title,
83
+ 'height': height,
84
+ 'xaxis': {**axis_style, 'title': 'Time'},
85
+ 'yaxis': {**axis_style, 'title': 'Value'},
86
+ 'margin': {'l': 50, 'r': 50, 'b': 50, 't': 50},
87
+ # Default dark theme friendly styling
88
+ 'paper_bgcolor': THEME['bg_paper'],
89
+ 'plot_bgcolor': THEME['bg_plot'],
90
+ 'font': {'color': THEME['text_main']}
91
+ }
92
+
93
+ def add_line(self, x: List[Any], y: List[Any], name: str, color: str = THEME['main'],
94
+ legend_group: str = None, show_legend: bool = True):
95
+ """Adds a standard time-series line."""
96
+ trace = {
97
+ 'x': x, 'y': y,
98
+ 'name': name,
99
+ 'type': 'scatter',
100
+ 'mode': 'lines+markers',
101
+ 'line': {'color': color},
102
+ "legendgroup": legend_group,
103
+ "showlegend": show_legend
104
+ }
105
+ self._data.append(trace)
106
+
107
+ def add_area(self, x: List[Any], y: List[Any], name: str, color: str = THEME['main']):
108
+ """Adds a filled area chart."""
109
+ trace = {
110
+ 'x': x, 'y': y, 'name': name,
111
+ 'type': 'scatter', 'fill': 'tozeroy',
112
+ 'line': {'color': color}
113
+ }
114
+ self._data.append(trace)
115
+
116
+ def add_indicator(self, value: Any, title: str):
117
+ """Adds a big number indicator."""
118
+ self._data.append({
119
+ 'type': 'indicator',
120
+ 'mode': 'number',
121
+ 'value': value,
122
+ 'title': {'text': title}
123
+ })
124
+ self._layout['height'] = 300 # Indicators usually need less height
125
+
126
+ def add_table(self, headers: List[str] | None, columns: List[List[Any]]):
127
+ """Adds a data table."""
128
+ num_columns = len(columns) if columns else 0
129
+ if headers:
130
+ header_cfg = {
131
+ 'values': headers,
132
+ 'fill': {'color': THEME['table']['header_bg']},
133
+ 'font': {'color': THEME['table']['header_txt']},
134
+ 'line': {'color': THEME['table']['line']}
135
+ }
136
+ else:
137
+ header_cfg = {
138
+ 'values': [''] * num_columns,
139
+ 'height': 0, # Hide it
140
+ 'fill': {'color': 'rgba(0,0,0,0)'}, # Transparent just in case
141
+ 'line': {'width': 0} # No border
142
+ }
143
+
144
+ trace = {
145
+ 'type': 'table',
146
+ 'header': header_cfg,
147
+ 'cells': {
148
+ 'values': columns,
149
+ 'fill': {'color': THEME['table']['cell_bg']},
150
+ 'font': {'color': THEME['table']['cell_txt']},
151
+ 'line': {'color': THEME['table']['line']}
152
+ }
153
+ }
154
+ self._data.append(trace)
155
+
156
+ def add_bar(self, xs: List[Any], ys: List[Any], names: List[str], colors: Union[str, List[str]] = THEME['main']):
157
+ """Adds a bar chart trace."""
158
+ trace = {
159
+ 'type': 'bar',
160
+ 'x': xs,
161
+ 'y': ys,
162
+ 'marker': {'color': colors},
163
+ 'showlegend': False,
164
+ 'text': names,
165
+ 'textposition': 'auto'
166
+ }
167
+ self._data.append(trace)
168
+ self._layout['yaxis'].update({'title': 'Value'})
169
+
170
+ def add_trace(self, trace: Dict[str, Any]):
171
+ """Generic method to add any raw Plotly trace."""
172
+ self._data.append(trace)
173
+
174
+ def set_y_range(self, min_val: float, max_val: float):
175
+ """Force Y-axis limits."""
176
+ self._layout.setdefault('yaxis', {})['range'] = [min_val, max_val]
177
+
178
+ def set_layout_opt(self, key: str, value: Any):
179
+ """Generic setter for advanced layout options."""
180
+ if isinstance(value, dict) and key in self._layout:
181
+ self._layout[key].update(value)
182
+ else:
183
+ self._layout[key] = value
184
+
185
+ def set_legend(self, orientation: str = 'v', x: float = 1.0, y: float = 1.0,
186
+ xanchor: str = 'left', yanchor: str = 'top'):
187
+ """
188
+ Configures the legend position and orientation.
189
+ orientation: 'v' (vertical) or 'h' (horizontal)
190
+ """
191
+ self._layout['showlegend'] = True
192
+ self._layout['legend'] = {
193
+ 'orientation': orientation,
194
+ 'x': x,
195
+ 'y': y,
196
+ 'xanchor': xanchor,
197
+ 'yanchor': yanchor,
198
+ 'bgcolor': THEME['bg_paper'],
199
+ 'bordercolor': THEME['edge'],
200
+ 'borderwidth': 1
201
+ }
202
+
203
+ def to_json(self) -> str:
204
+ """Serializes the panel to the format the Frontend expects."""
205
+ return json.dumps({'data': self._data, 'layout': self._layout})
206
+
207
+
208
+ class DefaultBaseDash:
209
+ """
210
+ A generic 2x2 Grid Dashboard for the base Stats class.
211
+ Forces #111111 background to match the WStats styling.
212
+ """
213
+ def __init__(self, title="Network Overview"):
214
+ self.traces = []
215
+ self.layout = {
216
+ "title": title,
217
+ "height": 800,
218
+ "template": "plotly_dark",
219
+ "paper_bgcolor": THEME['bg_paper'],
220
+ "grid": {"rows": 2, "columns": 2, "pattern": "independent"},
221
+
222
+ # --- ROW 1 ---
223
+ # Top Left (Graph)
224
+ "xaxis1": {"domain": [0, 0.48]},
225
+ "yaxis1": {"domain": [0.56, 1]},
226
+ # "xaxis1": {"domain": [0, 0.48], "visible": False},
227
+ # "yaxis1": {"domain": [0.58, 1], "visible": False},
228
+ # Top Right (Timeseries)
229
+ "xaxis2": {"domain": [0.52, 1]},
230
+ "yaxis2": {"domain": [0.56, 1]},
231
+
232
+ # --- ROW 2 ---
233
+ # Bot Left (Bar)
234
+ "xaxis3": {"domain": [0, 0.48]},
235
+ "yaxis3": {"domain": [0, 0.44]},
236
+ # Bot Right (Bar)
237
+ "xaxis4": {"domain": [0.52, 1]},
238
+ "yaxis4": {"domain": [0, 0.44]},
239
+
240
+ "showlegend": True,
241
+ "legend": {
242
+ "orientation": "h",
243
+ "y": 0.55,
244
+ "x": 0.55,
245
+ "xanchor": "left",
246
+ "yanchor": "top",
247
+ "bgcolor": "rgba(0,0,0,0)",
248
+ "font": {"color": THEME['text_main']}
249
+ },
250
+ "margin": {"l": 50, "r": 50, "t": 80, "b": 50}
251
+ }
252
+ self._map = {
253
+ "top_left": ("xaxis1", "yaxis1"),
254
+ "top_right": ("xaxis2", "yaxis2"),
255
+ "bot_left": ("xaxis3", "yaxis3"),
256
+ "bot_right": ("xaxis4", "yaxis4")
257
+ }
258
+
259
+ def add_panel(self, ui_plot: UIPlot, position: str):
260
+ if position not in self._map:
261
+ return
262
+
263
+ xa, ya = self._map[position]
264
+ x_dom = self.layout[xa]["domain"]
265
+ y_dom = self.layout[ya]["domain"]
266
+
267
+ # Merge Traces
268
+ for t in ui_plot._data:
269
+ nt = t.copy()
270
+ if nt.get("type") == "table":
271
+ nt["domain"] = {"x": x_dom, "y": y_dom}
272
+ else:
273
+ # Cartesian plots use axis references
274
+ nt["xaxis"] = xa.replace("xaxis", "x")
275
+ nt["yaxis"] = ya.replace("yaxis", "y")
276
+ self.traces.append(nt)
277
+
278
+ # Merge Layout
279
+ src_l = ui_plot._layout
280
+ dest_x = self.layout.setdefault(xa, {})
281
+ dest_y = self.layout.setdefault(ya, {})
282
+ if "xaxis" in src_l:
283
+ dest_x.update({k: v for k, v in src_l["xaxis"].items() if k != "domain"})
284
+ if "yaxis" in src_l:
285
+ dest_y.update({k: v for k, v in src_l["yaxis"].items() if k != "domain"})
286
+
287
+ # Add Title via Annotation
288
+ if src_l.get("title"):
289
+ self.layout.setdefault("annotations", []).append({
290
+ "text": f"<b>{src_l['title']}</b>",
291
+ "x": (x_dom[0] + x_dom[1]) / 2,
292
+ "y": y_dom[1] + 0.02,
293
+ "xref": "paper", "yref": "paper",
294
+ "showarrow": False, "xanchor": "center", "yanchor": "bottom",
295
+ "font": {"size": 14, "color": THEME['text_main']}
296
+ })
297
+
298
+ def to_json(self):
299
+ return json.dumps({"data": self.traces, "layout": self.layout})
300
+
301
+
302
+ class Stats:
303
+ """
304
+ Encapsulates all logic for managing, storing, and persisting agent/world
305
+ statistics. This class provides a clean API to the rest of the application
306
+ and hides the implementation details of data structures and persistence.
307
+
308
+ Design Principles:
309
+ 1. Typed Schema: Class-level definitions (e.g., CORE_..._SCHEMA) are
310
+ sets of tuples: {("stat_name", type), ...}
311
+ 2. Unified API: All stat updates are handled by two methods:
312
+ - store_static(stat_name, value, peer_id)
313
+ - store_dynamic(stat_name, value, peer_id, timestamp)
314
+ 3. Smart Branching: The store_... methods internally branch
315
+ (if self.is_world: ...) to handle their specific roles:
316
+ - Agent: Buffers for network, de-duplicates statics.
317
+ - World: Updates hot cache, buffers for DB.
318
+ 4. Persistence (SQLite):
319
+ - A single SQLite DB file ('world_stats.db') stores all data.
320
+ - Static Stats: Saved in a 'static_stats' table (key-value).
321
+ - Dynamic Stats: Saved in a 'dynamic_stats' table (time-series).
322
+ 5. Hot Cache (_stats):
323
+ - Static Stats: Stored as their latest value.
324
+ - Dynamic Stats: Stored in a sortedcontainers.SortedDict
325
+ keyed by timestamp.
326
+ """
327
+ DEBUG = True # Turns on/off extra logging
328
+
329
+ # These are all the keys in the local _stats dictionary collected by the world
330
+ CORE_WORLD_STATS_STATIC_SCHEMA: Dict[str, Tuple[type, Any]] = {
331
+ 'graph': (dict, {'nodes': {}, 'edges': {}})
332
+ }
333
+ CORE_WORLD_STATS_DYNAMIC_SCHEMA: Dict[str, Tuple[type, Any]] = {
334
+ 'world_masters': (int, 0),
335
+ 'world_agents': (int, 0),
336
+ 'human_agents': (int, 0),
337
+ 'artificial_agents': (int, 0)
338
+ }
339
+
340
+ # These are all the keys in the local _stats dictionary collected by the agent
341
+ CORE_AGENT_STATS_STATIC_SCHEMA: Dict[str, Tuple[type, Any]] = {
342
+ 'connected_peers': (list, []),
343
+ 'state': (str, None),
344
+ 'action': (str, None),
345
+ 'last_action': (str, None)
346
+ }
347
+ CORE_AGENT_STATS_DYNAMIC_SCHEMA: Dict[str, Tuple[type, Any]] = {}
348
+
349
+ # Then we have the stats collected on behalf of other peers (by the agent or the world)
350
+ CORE_OUTER_STATS_STATIC_SCHEMA: Dict[str, Tuple[type, Any]] = {}
351
+ CORE_OUTER_STATS_DYNAMIC_SCHEMA: Dict[str, Tuple[type, Any]] = {}
352
+
353
+ # We also add class variables to extend these sets
354
+ CUSTOM_WORLD_STATS_STATIC_SCHEMA: Dict[str, Tuple[type, Any]] = {}
355
+ CUSTOM_WORLD_STATS_DYNAMIC_SCHEMA: Dict[str, Tuple[type, Any]] = {}
356
+ CUSTOM_AGENT_STATS_STATIC_SCHEMA: Dict[str, Tuple[type, Any]] = {}
357
+ CUSTOM_AGENT_STATS_DYNAMIC_SCHEMA: Dict[str, Tuple[type, Any]] = {}
358
+ CUSTOM_OUTER_STATS_STATIC_SCHEMA: Dict[str, Tuple[type, Any]] = {}
359
+ CUSTOM_OUTER_STATS_DYNAMIC_SCHEMA: Dict[str, Tuple[type, Any]] = {}
360
+
361
+ # Key for grouping stats in the _stats dictionary (both world and agent)
362
+ GROUP_KEY = 'peer_stats' # _BY_PEER stats are grouped under this key
363
+
364
+ def __init__(self, is_world: bool,
365
+ db_path: str | None = None, # only needed by the world
366
+ cache_window_hours: float = 2.0): # only needed by the world
367
+
368
+ self.is_world: bool = is_world
369
+ self.max_seen_timestamp: int = 0
370
+
371
+ # --- Integrate custom statistics ---
372
+ self.WORLD_STATS_STATIC_SCHEMA = self.CORE_WORLD_STATS_STATIC_SCHEMA | self.CUSTOM_WORLD_STATS_STATIC_SCHEMA
373
+ self.WORLD_STATS_DYNAMIC_SCHEMA = self.CORE_WORLD_STATS_DYNAMIC_SCHEMA | self.CUSTOM_WORLD_STATS_DYNAMIC_SCHEMA
374
+ self.AGENT_STATS_STATIC_SCHEMA = self.CORE_AGENT_STATS_STATIC_SCHEMA | self.CUSTOM_AGENT_STATS_STATIC_SCHEMA
375
+ self.AGENT_STATS_DYNAMIC_SCHEMA = self.CORE_AGENT_STATS_DYNAMIC_SCHEMA | self.CUSTOM_AGENT_STATS_DYNAMIC_SCHEMA
376
+ self.OUTER_STATS_STATIC_SCHEMA = self.CORE_OUTER_STATS_STATIC_SCHEMA | self.CUSTOM_OUTER_STATS_STATIC_SCHEMA
377
+ self.OUTER_STATS_DYNAMIC_SCHEMA = self.CORE_OUTER_STATS_DYNAMIC_SCHEMA | self.CUSTOM_OUTER_STATS_DYNAMIC_SCHEMA
378
+
379
+ # --- Master key sets for easier lookup ---
380
+ self.all_static_keys: Set[str] = set()
381
+ self.all_dynamic_keys: Set[str] = set()
382
+ self.all_keys: Set[str] = set()
383
+ self.world_grouped_keys: Set[str] = set()
384
+ self.world_ungrouped_keys: Set[str] = set()
385
+ self.agent_grouped_keys: Set[str] = set()
386
+ self.agent_ungrouped_keys: Set[str] = set()
387
+ self.stat_types: Dict[str, str] = {}
388
+ self._stat_defaults: Dict[str, Any] = {}
389
+ self._initialize_key_sets()
390
+
391
+ if self.is_world:
392
+ # --- World Configuration ---
393
+ self._stats: Dict[str, Any] = {self.GROUP_KEY: {}}
394
+ self.min_window_duration = timedelta(hours=cache_window_hours)
395
+ self.db_path = db_path
396
+ self._db_conn: Optional[sqlite3.Connection] = None
397
+ self._static_db_buffer: List[Tuple[str, str]] = []
398
+ self._dynamic_db_buffer: List[Tuple[float, str, str, str]] = []
399
+
400
+ # --- World Initialization ---
401
+ self._init_db() # Connect and create tables
402
+ self._initialize_cache_structure() # Ensures all keys exist
403
+ self._load_existing_stats() # Hydrates _stats from disk
404
+ else:
405
+ # --- Agent Initialization (Simple Buffer) ---
406
+ self._world_view: Dict[str, Any] = {}
407
+ self.min_window_duration = timedelta(hours=3.0) # cache for the _world_view
408
+ self._update_batch: List[Dict[str, Any]] = []
409
+
410
+ def _out(self, msg: str):
411
+ """Prints a message using the node's out function."""
412
+ print(msg)
413
+
414
+ def _err(self, msg: str):
415
+ """Prints an error message."""
416
+ self._out('<ERROR> [Stats] ' + msg)
417
+
418
+ def _deb(self, msg: str):
419
+ """Prints a debug message if enabled."""
420
+ if self.DEBUG:
421
+ prefix = '[DEBUG ' + ('WORLD' if self.is_world else 'AGENT') + ']'
422
+ self._out(f'{prefix} [Stats] {msg}')
423
+
424
+ def _initialize_key_sets(self):
425
+ """Populates the master key sets and the type for later use."""
426
+ # Combine all schema definitions
427
+ all_static_schemas = {
428
+ **self.WORLD_STATS_STATIC_SCHEMA,
429
+ **self.AGENT_STATS_STATIC_SCHEMA,
430
+ **self.OUTER_STATS_STATIC_SCHEMA
431
+ }
432
+
433
+ all_dynamic_schemas = {
434
+ **self.WORLD_STATS_DYNAMIC_SCHEMA,
435
+ **self.AGENT_STATS_DYNAMIC_SCHEMA,
436
+ **self.OUTER_STATS_DYNAMIC_SCHEMA
437
+ }
438
+
439
+ # Build the key sets AND the type map
440
+ self.all_static_keys = set()
441
+ for name, (type_obj, default) in all_static_schemas.items():
442
+ self.all_static_keys.add(name)
443
+ self.stat_types[name] = type_obj
444
+ self._stat_defaults[name] = default
445
+
446
+ self.all_dynamic_keys = set()
447
+ for name, (type_obj, default) in all_dynamic_schemas.items():
448
+ self.all_dynamic_keys.add(name)
449
+ self.stat_types[name] = type_obj
450
+ self._stat_defaults[name] = default
451
+
452
+ self.all_keys = self.all_static_keys | self.all_dynamic_keys
453
+ # World perspective
454
+ self.world_ungrouped_keys = {name for name in self.WORLD_STATS_STATIC_SCHEMA | self.WORLD_STATS_DYNAMIC_SCHEMA}
455
+ self.world_grouped_keys = {name for name in (self.AGENT_STATS_STATIC_SCHEMA | self.AGENT_STATS_DYNAMIC_SCHEMA |
456
+ self.OUTER_STATS_STATIC_SCHEMA | self.OUTER_STATS_DYNAMIC_SCHEMA)}
457
+ self.agent_ungrouped_keys = {name for name in self.AGENT_STATS_STATIC_SCHEMA | self.AGENT_STATS_DYNAMIC_SCHEMA}
458
+ self.agent_grouped_keys = {name for name in self.OUTER_STATS_STATIC_SCHEMA | self.OUTER_STATS_DYNAMIC_SCHEMA}
459
+
460
+ def _init_db(self):
461
+ """(World-only) Connects to SQLite and creates tables if they don't exist."""
462
+ if not self.is_world:
463
+ return
464
+
465
+ try:
466
+ db_dir = os.path.dirname(self.db_path)
467
+ if db_dir:
468
+ os.makedirs(db_dir, exist_ok=True)
469
+
470
+ self._db_conn = sqlite3.connect(self.db_path)
471
+ self._db_conn.execute('PRAGMA journal_mode=WAL;')
472
+ self._db_conn.execute('PRAGMA synchronous=NORMAL;')
473
+
474
+ self._db_conn.executescript("""
475
+ CREATE TABLE IF NOT EXISTS dynamic_stats (
476
+ timestamp INTEGER,
477
+ peer_id TEXT,
478
+ stat_name TEXT,
479
+ val_num REAL,
480
+ val_str TEXT,
481
+ val_json TEXT,
482
+ PRIMARY KEY (peer_id, stat_name, timestamp)
483
+ );
484
+ CREATE INDEX IF NOT EXISTS idx_stats_num ON dynamic_stats (stat_name, val_num);
485
+ CREATE INDEX IF NOT EXISTS idx_stats_str ON dynamic_stats (stat_name, val_str);
486
+ CREATE INDEX IF NOT EXISTS idx_stats_time ON dynamic_stats (timestamp);
487
+
488
+ CREATE TABLE IF NOT EXISTS static_stats (
489
+ peer_id TEXT,
490
+ stat_name TEXT,
491
+ val_json TEXT,
492
+ timestamp INTEGER,
493
+ PRIMARY KEY (peer_id, stat_name)
494
+ );
495
+ """)
496
+ self._db_conn.commit()
497
+ self._deb(f'SQLite DB initialized at {self.db_path}')
498
+ except Exception as e:
499
+ self._err(f'CRITICAL: Failed to initialize SQLite DB: {e}')
500
+ self._db_conn = None
501
+
502
+ def _initialize_cache_structure(self):
503
+ """(World-only) Ensures the _stats dict has the correct structure (SortedDicts/dicts)."""
504
+ if not self.is_world:
505
+ return
506
+
507
+ self._stats.setdefault(self.GROUP_KEY, {})
508
+ for key in self.world_ungrouped_keys:
509
+ if key in self.all_dynamic_keys:
510
+ self._stats.setdefault(key, SortedDict())
511
+ else:
512
+ self._stats.setdefault(key, self._stat_defaults[key]) # e.g., 'graph'
513
+
514
+ # Grouped keys are initialized on-demand by _get_peer_stat_cache
515
+ # But we must ensure existing loaded peers have their structures
516
+ for _, peer_data in self._stats[self.GROUP_KEY].items():
517
+ for key in self.world_grouped_keys:
518
+ if key in self.all_dynamic_keys:
519
+ # If loaded from DB, it's not a SortedDict yet.
520
+ # It will be populated by _hydrate_dynamic_caches_from_db
521
+ peer_data.setdefault(key, SortedDict())
522
+
523
+ def _get_peer_stat_cache(self, peer_id: str, stat_name: str) -> Union[SortedDict, dict, None]:
524
+ """(World-only) Helper to get or create the cache structure for a peer stat on demand."""
525
+ if not self.is_world:
526
+ return
527
+
528
+ peer_cache = self._stats[self.GROUP_KEY].setdefault(peer_id, {})
529
+ if stat_name not in peer_cache:
530
+ if stat_name in self.all_dynamic_keys:
531
+ peer_cache[stat_name] = SortedDict()
532
+ elif stat_name in self.all_static_keys:
533
+ peer_cache[stat_name] = self._stat_defaults[stat_name]
534
+
535
+ return peer_cache.get(stat_name)
536
+
537
+ # --- SHARED API ---
538
+ def store_stat(self, stat_name: str, value: Any, peer_id: str, timestamp: int):
539
+ """Unified API to store a stat. It then calls private methods to
540
+ differentiate between static and dynamic stats.
541
+ """
542
+ if stat_name not in self.all_keys:
543
+ self._err(f'Stat "{stat_name}" is not defined.')
544
+
545
+ # disambiguate between static and dynamic stats
546
+ if stat_name in self.all_static_keys:
547
+ self._store_static(stat_name, value, peer_id, timestamp)
548
+ else:
549
+ self._store_dynamic(stat_name, value, peer_id, timestamp)
550
+
551
+ def _validate_type(self, stat_name, value):
552
+ if stat_name not in self.stat_types:
553
+ raise KeyError(f'Statistic "{stat_name}" is not defined in the stat_types schema.')
554
+
555
+ schema_type = self.stat_types.get(stat_name) # no default to str because it's a silent fail
556
+ if isinstance(value, schema_type):
557
+ return value
558
+ else:
559
+ try:
560
+ # Try to safely cast it
561
+ return schema_type(value)
562
+ except (ValueError, TypeError, AttributeError):
563
+ self._err(f'Type mismatch for {stat_name}: '
564
+ f'Expected {schema_type} but got {type(value)}. '
565
+ f'Value: "{value}". Storing as string.')
566
+ return str(value) # Fallback
567
+
568
+ def _make_json_serializable(self, value: Any) -> Any:
569
+ """Recursively converts non-serializable types (like sets) to lists."""
570
+ if isinstance(value, set):
571
+ return list(value)
572
+ if isinstance(value, dict):
573
+ # Recurse on values
574
+ return {k: self._make_json_serializable(v) for k, v in value.items()}
575
+ if isinstance(value, (list, tuple)):
576
+ # Recurse on items
577
+ return [self._make_json_serializable(item) for item in value]
578
+
579
+ # Add other types here if needed (e.g., numpy arrays -> lists)
580
+
581
+ # Base case: value is fine as-is
582
+ return value
583
+
584
+ def _store_static(self, stat_name: str, value: Any, peer_id: str, timestamp: int):
585
+ """
586
+ Unified API to store a static (single-value) stat.
587
+ - On Agent: Adds to the network send buffer.
588
+ - On World: Updates the hot cache and adds to the DB buffer.
589
+ """
590
+ value = self._validate_type(stat_name, value)
591
+ if self.is_world:
592
+ # --- WORLD LOGIC ---
593
+ if timestamp > self.max_seen_timestamp:
594
+ self.max_seen_timestamp = timestamp
595
+ # 1. Update hot cache
596
+ if stat_name in self.world_ungrouped_keys:
597
+ self._stats[stat_name] = value
598
+ else:
599
+ peer_cache = self._stats[self.GROUP_KEY].setdefault(peer_id, {})
600
+ peer_cache[stat_name] = value
601
+
602
+ # 2. Add to DB buffer (key, value_json)
603
+ serializable_value = self._make_json_serializable(value)
604
+ self._static_db_buffer.append((peer_id, stat_name, json.dumps(serializable_value), timestamp))
605
+ else:
606
+ # --- AGENT LOGIC ---
607
+ # De-duplicate logic: remove previous static value for this peer/stat
608
+ self._update_batch = [u for u in self._update_batch
609
+ if not (u['peer_id'] == peer_id and u['stat_name'] == stat_name)]
610
+
611
+ # 2. Add to batch
612
+ self._update_batch.append({
613
+ 'peer_id': peer_id,
614
+ 'stat_name': stat_name,
615
+ 'timestamp': timestamp,
616
+ 'value': value
617
+ })
618
+
619
+ def _store_dynamic(self, stat_name: str, value: Any, peer_id: str, timestamp: float):
620
+ """
621
+ Unified API to store a dynamic (time-series) stat.
622
+ - On Agent: Gets current time, adds to network send buffer.
623
+ - On World: Uses provided timestamp, updates hot cache, adds to DB buffer.
624
+ """
625
+ value = self._validate_type(stat_name, value)
626
+ if self.is_world:
627
+ # --- WORLD LOGIC ---
628
+ if timestamp > self.max_seen_timestamp:
629
+ self.max_seen_timestamp = timestamp
630
+
631
+ # 1. Update hot cache
632
+ if stat_name in self.world_ungrouped_keys:
633
+ cache = self._stats.get(stat_name)
634
+ else:
635
+ cache = self._get_peer_stat_cache(peer_id, stat_name)
636
+
637
+ # Verify we have a valid SortedDict to work with
638
+ if isinstance(cache, SortedDict):
639
+ # Insert new value and prune outdated ones
640
+ cache[timestamp] = value
641
+ cutoff = timestamp - int(self.min_window_duration.total_seconds() * 1000)
642
+ while cache and cache.peekitem(0)[0] < cutoff:
643
+ cache.popitem(0)
644
+
645
+ # 2. Add to DB buffer depending on the type (value was already cast to the type defined in the schema)
646
+ val_num = value if isinstance(value, (int, float)) and not isinstance(value, bool) else None
647
+ val_str = value if isinstance(value, str) else None
648
+ # always create the json-serialized as fallback
649
+ serializable_value = self._make_json_serializable(value)
650
+ val_json = json.dumps(serializable_value)
651
+ self._dynamic_db_buffer.append((timestamp, peer_id, stat_name, val_num, val_str, val_json))
652
+ else:
653
+ # --- AGENT LOGIC ---
654
+ self._update_batch.append({
655
+ 'peer_id': peer_id,
656
+ 'stat_name': stat_name,
657
+ 'timestamp': timestamp,
658
+ 'value': value
659
+ })
660
+
661
+ # --- AGENT API ---
662
+ def update_view(self, view_data: Dict[str, Any] = None, overwrite: bool = False):
663
+ """
664
+ (Agent-side) Replaces the local view with data received from World.
665
+ This is 'dumb' storage: we don't parse it, we just store it for plotting.
666
+
667
+ The view has this structure:
668
+ {
669
+ "world": { "stat_name": value_or_timeseries },
670
+ "peers": { "peer_id": { "stat_name": value_or_timeseries } }
671
+ }
672
+ For Dynamic stats, returns a list of lists: [[timestamp, value], ...] for efficient JSON/Plotly usage.
673
+
674
+ Args:
675
+ view_data: The snapshot received from the world.
676
+ overwrite: If True, replaces the entire current view instead of merging.
677
+ """
678
+ if self.is_world:
679
+ return
680
+
681
+ # Initialize empty structure if needed
682
+ if not self._world_view or overwrite:
683
+ self._world_view = {'world': {}, 'peers': {}}
684
+
685
+ def _update_max_ts(ts):
686
+ """Helper to update the max seen timestamp from a time-series."""
687
+ # Dynamic stats come as [[ts, val], [ts, val]...]
688
+ if isinstance(ts, list) and len(ts) > 0 and isinstance(ts[0], list):
689
+ # The last item is usually the newest in sorted time-series
690
+ last_ts = ts[-1][0]
691
+ if last_ts > self.max_seen_timestamp:
692
+ self.max_seen_timestamp = int(last_ts)
693
+
694
+ def _merge_dict(target: Dict, source: Dict):
695
+ """
696
+ Helper to merge source into target with special handling for dynamic stats.
697
+ Copies a source dict { "stat_name": value_or_timeseries } into target.
698
+ """
699
+ for stat_name, val_or_ts in source.items():
700
+ if stat_name in self.all_dynamic_keys:
701
+ _update_max_ts(val_or_ts)
702
+ if stat_name not in target:
703
+ target[stat_name] = []
704
+ target[stat_name].extend(val_or_ts)
705
+ else:
706
+ target[stat_name] = val_or_ts
707
+
708
+ # 1. Merge World (Ungrouped) Stats
709
+ if 'world' in view_data:
710
+ _merge_dict(self._world_view.setdefault('world', {}), view_data['world'])
711
+
712
+ # 2. Merge Peer (Grouped) Stats
713
+ if 'peers' in view_data:
714
+ target_peers = self._world_view.setdefault('peers', {})
715
+ for peer_id, peer_data in view_data['peers'].items():
716
+ target_peer = target_peers.setdefault(peer_id, {})
717
+ _merge_dict(target_peer, peer_data)
718
+
719
+ def _get_last_val_from_view(self, view: Dict, name: str) -> str:
720
+ """Helper to extract a scalar value safely from the view snapshot.
721
+ View structure:
722
+ {
723
+ "world": { "stat_name": value_or_timeseries },
724
+ "peers": { "peer_id": { "stat_name": value_or_timeseries } }
725
+ }
726
+ For Dynamic stats we have a list of lists: [[timestamp, value], ...]"""
727
+ val = None
728
+ # Try World (Ungrouped)
729
+ if name in view.get('world', {}):
730
+ data = view['world'][name]
731
+ # If dynamic (list of lists), get last value. If static, get value.
732
+ if isinstance(data, list) and len(data) > 0 and isinstance(data[0], list):
733
+ val = data[-1][1]
734
+ else:
735
+ val = data
736
+
737
+ if isinstance(val, float):
738
+ return f"{val:.3f}"
739
+ return str(val) if val is not None else "-"
740
+
741
+ def get_stats(self):
742
+ return self._stats
743
+
744
+ def get_payload_for_world(self) -> List[Dict[str, Any]]:
745
+ """(Agent-only) Gathers, returns, and clears all stats to be sent to the world."""
746
+ if self.is_world:
747
+ return []
748
+
749
+ # self._update_agent_static() # Ensure static stats are fresh in the batch
750
+ payload = self._update_batch
751
+ self._update_batch = [] # Clear after getting
752
+ return payload
753
+
754
+ # --- WORLD API ---
755
+ def get_view(self, since_timestamp: int = 0) -> Dict[str, Any]:
756
+ """
757
+ (World-side) Returns a clean, JSON-serializable dictionary of the CURRENT in-memory cache.
758
+ Used for initial handshake or lightweight polling.
759
+
760
+ Structure returned:
761
+ {
762
+ "world": { "stat_name": value_or_timeseries },
763
+ "peers": { "peer_id": { "stat_name": value_or_timeseries } }
764
+ }
765
+ For Dynamic stats, returns a list of lists: [[timestamp, value], ...] for efficient JSON/Plotly usage.
766
+ """
767
+ if not self.is_world:
768
+ return {}
769
+ snapshot = {'world': {}, 'peers': {}}
770
+
771
+ # 1. Process World (Ungrouped) Stats
772
+ for stat_name in self.world_ungrouped_keys:
773
+ val = self._stats.get(stat_name)
774
+ if val is not None:
775
+ snapshot['world'][stat_name] = self._serialize_value(val, since_timestamp)
776
+
777
+ # 2. Process Peer (Grouped) Stats
778
+ peer_groups = self._stats.get(self.GROUP_KEY, {})
779
+
780
+ for pid in peer_groups.keys():
781
+ peer_data = {}
782
+ for stat_name, val in peer_groups[pid].items():
783
+ serialized = self._serialize_value(val, since_timestamp)
784
+ # Optimize: Don't send empty lists if polling
785
+ if isinstance(serialized, list) and len(serialized) == 0:
786
+ continue
787
+ peer_data[stat_name] = serialized
788
+
789
+ if peer_data:
790
+ snapshot['peers'][pid] = peer_data
791
+
792
+ return snapshot
793
+
794
+ def _serialize_value(self, value: Any, since_timestamp: int) -> Any:
795
+ """Helper to convert SortedDicts to [[t, v], ...] and clean other types."""
796
+ if isinstance(value, SortedDict):
797
+ idx = value.bisect_left(since_timestamp)
798
+ sliced_items = value.items()[idx:]
799
+ # Convert to list of [timestamp, value] for Plotly readiness
800
+ return [[k, self._make_json_serializable(v)] for k, v in sliced_items]
801
+ else:
802
+ # Static value: return as is (assuming it's serializable)
803
+ return self._make_json_serializable(value)
804
+
805
+ def get_last_value(self, stat_name: str, peer_id: str | None = None) -> Any | None:
806
+ """Public API to get the most recent value of any stat, whether static or dynamic.
807
+ - If peer_id is None, it searches for an ungrouped (world-level) stat.
808
+ - If peer_id is provided, it searches for a grouped stat for that peer.
809
+ Returns the last value, or None if not found.
810
+ """
811
+ if stat_name in self.all_static_keys:
812
+ return self._get_last_static_value(stat_name, peer_id)
813
+ elif stat_name in self.all_dynamic_keys:
814
+ return self._get_last_dynamic_value(stat_name, peer_id)
815
+ else:
816
+ self._err(f'get_last_value: Unknown stat_name "{stat_name}"')
817
+ return None
818
+
819
+ def _get_last_dynamic_value(self, stat_name: str, peer_id: str | None = None) -> Any | None:
820
+ """
821
+ Returns the most recent value of a dynamic stat from the hot cache.
822
+ - If peer_id is None, it searches for an ungrouped (world-level) stat.
823
+ - If peer_id is provided, it searches for a grouped stat for that peer.
824
+ Returns None if the stat is not found or has no entries.
825
+ """
826
+ if not self.is_world:
827
+ return None # Agents don't have this cache
828
+
829
+ cache: Optional[SortedDict] = None
830
+
831
+ if peer_id is None:
832
+ # --- This is an ungrouped (world) stat ---
833
+ if stat_name in self.world_ungrouped_keys:
834
+ cache = self._stats.get(stat_name)
835
+ else:
836
+ # --- This is a grouped (peer) stat ---
837
+ if stat_name in self.world_grouped_keys:
838
+ peer_cache = self._stats.get(self.GROUP_KEY, {}).get(peer_id)
839
+ if peer_cache:
840
+ cache = peer_cache.get(stat_name)
841
+
842
+ # Check if we found a valid SortedDict cache and it's not empty
843
+ if isinstance(cache, SortedDict) and cache:
844
+ return cache.peekitem(-1)[1] # Return the last value
845
+
846
+ return None # Stat not found or no values
847
+
848
+ def _get_last_static_value(self, stat_name: str, peer_id: str | None = None) -> Any | None:
849
+ """
850
+ Returns the current value of a static stat from the hot cache.
851
+ - If peer_id is None, it searches for an ungrouped (world-level) stat.
852
+ - If peer_id is provided, it searches for a grouped stat for that peer.
853
+ Returns None if the stat is not found.
854
+ """
855
+ if not self.is_world:
856
+ return None # Agents don't have this cache
857
+
858
+ value: Any | None = None
859
+ if peer_id is None:
860
+ # --- This is an ungrouped (world) stat ---
861
+ if stat_name in self.world_ungrouped_keys:
862
+ value = self._stats.get(stat_name)
863
+ else:
864
+ # --- This is a grouped (peer) stat ---
865
+ if stat_name in self.world_grouped_keys:
866
+ peer_cache = self._stats.get(self.GROUP_KEY, {}).get(peer_id)
867
+ if peer_cache:
868
+ value = peer_cache.get(stat_name)
869
+ return value
870
+
871
+ # --- WORLD API (PERSISTENCE) ---
872
+ def save_to_disk(self):
873
+ """(World-only) Saves the static snapshot and dynamic buffer to SQLite."""
874
+ if not self.is_world or not self._db_conn:
875
+ return
876
+ self._deb(f'Saving world stats to DB...')
877
+ try:
878
+ self._save_static_to_db()
879
+ self._save_dynamic_to_db()
880
+ self._prune_cache()
881
+ self._prune_db()
882
+
883
+ self._db_conn.commit()
884
+ self._deb(f'Save complete.')
885
+ except Exception as e:
886
+ self._err(f'CRITICAL: Save_to_disk failed: {e}')
887
+ if self._db_conn:
888
+ self._db_conn.rollback()
889
+
890
+ def _save_static_to_db(self):
891
+ """(World-only) Dumps all static stats from hot cache to DB."""
892
+ if not self._static_db_buffer or not self._db_conn:
893
+ return
894
+
895
+ self._db_conn.executemany("""
896
+ INSERT INTO static_stats (peer_id, stat_name, val_json, timestamp)
897
+ VALUES (?, ?, ?, ?) ON CONFLICT(peer_id, stat_name) DO UPDATE
898
+ SET val_json = excluded.val_json, timestamp = excluded.timestamp
899
+ """, self._static_db_buffer)
900
+
901
+ self._static_db_buffer = [] # Clear buffer
902
+
903
+ def _save_dynamic_to_db(self):
904
+ """(World-only) Writes the in-memory dynamic buffer to SQLite."""
905
+ if not self._dynamic_db_buffer or not self._db_conn:
906
+ return
907
+
908
+ self._db_conn.executemany("""
909
+ INSERT OR IGNORE INTO dynamic_stats
910
+ (timestamp, peer_id, stat_name, val_num, val_str, val_json)
911
+ VALUES (?, ?, ?, ?, ?, ?)
912
+ """, self._dynamic_db_buffer)
913
+
914
+ self._deb(f'Wrote {len(self._dynamic_db_buffer)} dynamic stats to SQLite.')
915
+ self._dynamic_db_buffer = [] # Clear buffer
916
+
917
+ def _prune_db(self):
918
+ """(World-only) Add here the logic to prune the db (e.g., when a peer leaves the world)."""
919
+ if not self._db_conn:
920
+ return
921
+ pass
922
+
923
+ def _prune_cache(self):
924
+ """
925
+ (World-only) Periodic maintenance to clean up 'stale' peers.
926
+
927
+ The 'prune-on-write' logic in _store_dynamic handles active peers efficiently.
928
+ This method handles peers that have disconnected or stopped sending data,
929
+ preventing their old data from haunting the RAM forever.
930
+ """
931
+ if not self.is_world:
932
+ return
933
+
934
+ # Calculate cutoff based on latest timestamp
935
+ window_ms = int(self.min_window_duration.total_seconds() * 1000)
936
+ cutoff = self.max_seen_timestamp - window_ms
937
+
938
+ # 1. Prune Ungrouped Stats (World Stats)
939
+ for key in self.world_ungrouped_keys:
940
+ cache = self._stats.get(key)
941
+ if isinstance(cache, SortedDict): # only true for dynamic stats
942
+ # Remove items older than cutoff
943
+ while cache and cache.peekitem(0)[0] < cutoff:
944
+ cache.popitem(0)
945
+
946
+ # 2. Prune Grouped Stats (Peer Stats)
947
+ peer_groups = self._stats.get(self.GROUP_KEY, {})
948
+
949
+ # We might need to remove empty peers entirely, so we collect keys to delete
950
+ peers_to_remove = []
951
+
952
+ for peer_id, peer_cache in peer_groups.items():
953
+ all_stats_were_empty = True
954
+ for _, stat_data in peer_cache.items():
955
+ if isinstance(stat_data, SortedDict): # only true for dynamic stats
956
+ # Prune the time series
957
+ while stat_data and stat_data.peekitem(0)[0] < cutoff:
958
+ stat_data.popitem(0)
959
+ # after pruning, check if the stat dict is empty
960
+ all_stats_were_empty &= len(stat_data) == 0
961
+
962
+ if all_stats_were_empty:
963
+ peers_to_remove.append(peer_id)
964
+
965
+ # Remove completely dead peers from memory
966
+ for peer_id in peers_to_remove:
967
+ del peer_groups[peer_id]
968
+ self._deb(f'Pruned stale peer {peer_id} from cache.')
969
+
970
+ # --- WORLD API (LOADING) ---
971
+ def _load_existing_stats(self):
972
+ """(World-only) Loads existing stats from disk to hydrate the cache."""
973
+ if not self.is_world or not self._db_conn:
974
+ return
975
+ self._deb('Loading existing stats from disk...')
976
+ self._load_static_from_db()
977
+ self._hydrate_dynamic_caches_from_db()
978
+ self._deb('Finished loading stats.')
979
+
980
+ def _load_static_from_db(self):
981
+ """(World-only) Loads the static_stats table into the _stats hot cache."""
982
+ # There are no default static stats that are meaningful to load at startup (graph, state...)
983
+ pass
984
+
985
+ def _hydrate_dynamic_caches_from_db(self):
986
+ """(World-only) Queries SQLite for 'hot' data to fill dynamic caches."""
987
+ if not self._db_conn:
988
+ return
989
+ try:
990
+ max_ts_cursor = self._db_conn.execute('SELECT MAX(timestamp) FROM dynamic_stats')
991
+ max_ts_result = max_ts_cursor.fetchone()
992
+
993
+ if max_ts_result is None or max_ts_result[0] is None:
994
+ self._deb('No dynamic stats found in DB. Hydration skipped.')
995
+ return # No data in DB, nothing to load
996
+ self.max_seen_timestamp = int(max_ts_result[0])
997
+ cutoff_t_ms = self.max_seen_timestamp - int(self.min_window_duration.total_seconds() * 1000)
998
+
999
+ cursor = self._db_conn.execute("""
1000
+ SELECT timestamp, peer_id, stat_name, val_num, val_str, val_json
1001
+ FROM dynamic_stats
1002
+ WHERE timestamp > ?
1003
+ ORDER BY timestamp ASC
1004
+ """, (cutoff_t_ms,))
1005
+
1006
+ count = 0
1007
+ for ts, peer_id, stat_name, _, _, val_json in cursor:
1008
+ ts = int(ts)
1009
+ # we just need the val_json that will be cast to the exact type by _validate_type
1010
+ value = json.loads(val_json)
1011
+ self._store_dynamic(stat_name, value, peer_id, ts)
1012
+ count += 1
1013
+
1014
+ # Clear the buffer generated by hydrating
1015
+ self._dynamic_db_buffer = []
1016
+
1017
+ if count > 0:
1018
+ self._deb(f'Hydrated cache with {count} recent dynamic stats.')
1019
+ else:
1020
+ self._deb('No recent dynamic stats found in DB.')
1021
+
1022
+ except Exception as e:
1023
+ self._err(f'Failed to hydrate dynamic caches from DB: {e}')
1024
+
1025
+ # --- WORLD API (QUERYING) ---
1026
+ def query_history(self,
1027
+ stat_names: List[str] = [],
1028
+ peer_ids: List[str] = [],
1029
+ time_range: Union[Tuple[int, int], int, None] = None,
1030
+ value_range: Tuple[float, float] | None = None,
1031
+ limit: int | None = None) -> Dict[str, Any]:
1032
+ """
1033
+ (World-only) Queries the SQLite DB for specific stats, potentially filtering by VALUE.
1034
+ Returns the same structure as get_view(), allowing the agent to ingest it seamlessly.
1035
+ Automatically flushes the current memory buffer to DB before querying
1036
+ to ensure "read-your-writes" consistency.
1037
+
1038
+ Args:
1039
+ value_range: (min, max) - Only returns rows where val_num is within range.
1040
+ """
1041
+ if not self.is_world or not self._db_conn:
1042
+ return {}
1043
+
1044
+ # Flush the cached upadtes to db before querying
1045
+ self._save_static_to_db()
1046
+ self._save_dynamic_to_db()
1047
+ self._db_conn.commit()
1048
+
1049
+ snapshot = {'world': {}, 'peers': {}}
1050
+
1051
+ # A. Query the static stats
1052
+ query_static = ['SELECT peer_id, stat_name, val_json FROM static_stats']
1053
+ params_static = []
1054
+
1055
+ where_added = False
1056
+ if stat_names:
1057
+ query_static.append("WHERE")
1058
+ where_added = True
1059
+ query_static.append(f"stat_name IN ({','.join(['?']*len(stat_names))})")
1060
+ params_static.extend(stat_names)
1061
+ if peer_ids:
1062
+ if not where_added:
1063
+ query_static.append("WHERE")
1064
+ else:
1065
+ query_static.append(f"AND")
1066
+ query_static.append(f"peer_id IN ({','.join(['?']*len(peer_ids))})")
1067
+ params_static.extend(peer_ids)
1068
+
1069
+ try:
1070
+ cursor = self._db_conn.execute(' '.join(query_static), params_static)
1071
+ for pid, sname, vjson in cursor:
1072
+ val = self._validate_type(sname, json.loads(vjson))
1073
+ # Handle special Graph reconstruction if needed (legacy format support)
1074
+ if sname == 'graph':
1075
+ # Handle both legacy format (just edges) and new format (nodes+edges) safely
1076
+ if isinstance(val, dict) and 'edges' in val:
1077
+ # Convert the edge lists back to sets
1078
+ val['edges'] = {k: set(v) for k, v in val['edges'].items()}
1079
+ # Ensure nodes dict exists
1080
+ if 'nodes' not in val:
1081
+ val['nodes'] = {}
1082
+ else:
1083
+ # Convert entire dict to sets (as it was before)
1084
+ edges_set = {k: set(v) for k, v in val.items()}
1085
+ # Migrate to new structure on the fly
1086
+ val = {'nodes': {}, 'edges': edges_set}
1087
+
1088
+ # Static stats format: value (direct)
1089
+ if pid in (None, 'None', ''):
1090
+ snapshot['world'][sname] = val
1091
+ else:
1092
+ snapshot['peers'].setdefault(pid, {})[sname] = val
1093
+ except Exception as e:
1094
+ self._err(f'Query history (static) failed: {e}')
1095
+
1096
+ # B. Query the dynamic stats
1097
+ query_dyn = ['SELECT timestamp, peer_id, stat_name, val_num, val_str, val_json FROM dynamic_stats']
1098
+ params_dyn = []
1099
+
1100
+ # 1. Stat Names
1101
+ where_added = False
1102
+ if stat_names:
1103
+ query_static.append("WHERE")
1104
+ where_added = True
1105
+ query_dyn.append(f"stat_name IN ({','.join(['?']*len(stat_names))})")
1106
+ params_dyn.extend(stat_names)
1107
+
1108
+ # 2. Peer IDs
1109
+ if peer_ids:
1110
+ if not where_added:
1111
+ query_static.append("WHERE")
1112
+ else:
1113
+ query_static.append(f"AND")
1114
+ query_dyn.append(f"peer_id IN ({','.join(['?']*len(peer_ids))})")
1115
+ params_dyn.extend(peer_ids)
1116
+
1117
+ if time_range is not None:
1118
+ if isinstance(time_range, int):
1119
+ # Treated as "Since X"
1120
+ if not where_added:
1121
+ query_static.append("WHERE")
1122
+ else:
1123
+ query_static.append(f"AND")
1124
+ query_dyn.append("timestamp >= ?")
1125
+ params_dyn.append(time_range)
1126
+ elif isinstance(time_range, (tuple, list)) and len(time_range) == 2:
1127
+ # Treated as "Between X and Y"
1128
+ if not where_added:
1129
+ query_static.append("WHERE")
1130
+ else:
1131
+ query_static.append(f"AND")
1132
+ query_dyn.append("timestamp >= ? AND timestamp <= ?")
1133
+ params_dyn.extend([time_range[0], time_range[1]])
1134
+
1135
+ # 4. Value Range (The logic requested by user)
1136
+ if value_range:
1137
+ if not where_added:
1138
+ query_static.append("WHERE")
1139
+ else:
1140
+ query_static.append(f"AND")
1141
+ query_dyn.append("val_num IS NOT NULL AND val_num >= ? AND val_num <= ?")
1142
+ params_dyn.extend([value_range[0], value_range[1]])
1143
+
1144
+ query_dyn.append("ORDER BY timestamp ASC")
1145
+
1146
+ # add the limit
1147
+ query_dyn.append("LIMIT 5000" if limit is None else f"LIMIT {limit}")
1148
+
1149
+ try:
1150
+ cursor = self._db_conn.execute(' '.join(query_dyn), params_dyn)
1151
+ for ts, pid, sname, vnum, vstr, vjson in cursor:
1152
+ ts = int(ts)
1153
+ val = vnum if vnum is not None else (vstr if vstr is not None else json.loads(vjson))
1154
+ val = self._validate_type(sname, val)
1155
+
1156
+ # Structure construction
1157
+ if pid in (None, 'None', ''): # Handling world stats
1158
+ target_ts = snapshot['world'].setdefault(sname, [])
1159
+ else: # Handling peer stats
1160
+ target_ts = snapshot['peers'].setdefault(pid, {}).setdefault(sname, [])
1161
+ target_ts.append([ts, val])
1162
+
1163
+ except Exception as e:
1164
+ self._err(f'Query history failed: {e}')
1165
+
1166
+ return snapshot
1167
+
1168
+ def _aggregate_time_indexed_stats_over_peers(self, stats: dict) -> tuple[dict, dict]:
1169
+ """(World-only) Aggregates time-indexed peer stats (mean/std) from CACHE."""
1170
+ mean_dict = {}
1171
+ std_dict = {}
1172
+ peer_stats = stats.get(self.GROUP_KEY, {})
1173
+
1174
+ number_stats = {name for name, type_obj in self.stat_types.items()
1175
+ if type_obj in (int, float)}
1176
+
1177
+ for stat_name in number_stats:
1178
+ peer_series = []
1179
+ for _, peer_data in peer_stats.items():
1180
+ if stat_name in peer_data:
1181
+ tv_dict: SortedDict = peer_data[stat_name]
1182
+ if tv_dict:
1183
+ peer_series.append(tv_dict)
1184
+
1185
+ if not peer_series:
1186
+ continue
1187
+
1188
+ all_times = sorted({t for series in peer_series for t in series.keys()})
1189
+ if not all_times:
1190
+ continue
1191
+
1192
+ aligned_values = []
1193
+ for series in peer_series:
1194
+ if not series:
1195
+ continue
1196
+ filled = []
1197
+ series_times = series.keys()
1198
+ series_vals = series.values()
1199
+
1200
+ last_val = series_vals[0]
1201
+ series_idx = 0
1202
+
1203
+ for t in all_times:
1204
+ while series_idx < len(series_times) and series_times[series_idx] <= t:
1205
+ last_val = series_vals[series_idx]
1206
+ series_idx += 1
1207
+ filled.append(last_val)
1208
+ aligned_values.append(filled)
1209
+
1210
+ if not aligned_values:
1211
+ continue
1212
+
1213
+ mean_dict[stat_name] = {}
1214
+ std_dict[stat_name] = {}
1215
+ for i, t in enumerate(all_times):
1216
+ vals = [peer_vals[i] for peer_vals in aligned_values if peer_vals[i] is not None]
1217
+ if vals:
1218
+ mean_val = sum(vals) / float(len(vals))
1219
+ var = sum((x - mean_val) ** 2 for x in vals) / len(vals)
1220
+ std_val = math.sqrt(var)
1221
+ else:
1222
+ mean_val = None
1223
+ std_val = None
1224
+
1225
+ mean_dict[stat_name][t] = mean_val
1226
+ std_dict[stat_name][t] = std_val
1227
+
1228
+ return mean_dict, std_dict
1229
+
1230
+ def shutdown(self):
1231
+ """Call this explicitly when your application is closing."""
1232
+ if self.is_world and self._db_conn:
1233
+ self._deb('Shutdown: Saving final stats...')
1234
+ try:
1235
+ self.save_to_disk()
1236
+ except Exception as e:
1237
+ self._err(f'Shutdown save failed: {e}')
1238
+ self._db_conn.close()
1239
+ self._db_conn = None
1240
+ self._deb('SQLite connection closed.')
1241
+
1242
+ def __del__(self):
1243
+ if self.is_world and self._db_conn:
1244
+ try:
1245
+ # Final save on exit, if any buffer
1246
+ self.save_to_disk()
1247
+ except Exception:
1248
+ pass # Don't raise in destructor
1249
+ self._db_conn.close()
1250
+ self._deb('SQLite connection closed.')
1251
+
1252
+ # --- PLOTTING INTERFACE ---
1253
+ def plot(self, since_timestamp: int = 0) -> str | None:
1254
+ """
1255
+ Default dashboard implementation.
1256
+ Visualizes Core Stats: Topology, Agent Counts, States, and Actions.
1257
+ """
1258
+ # 1. Get Data view
1259
+ view = self.get_view(since_timestamp) if self.is_world else self._world_view
1260
+ if not view:
1261
+ return None
1262
+
1263
+ dash = DefaultBaseDash("World Overview")
1264
+
1265
+ # --- Panel 1: Network Topology (Top Left) ---
1266
+ p1 = UIPlot(title="World Topology")
1267
+ self._populate_graph(p1, view, "graph")
1268
+ # p1.set_layout_opt('xaxis', {'visible': False})
1269
+ # p1.set_layout_opt('yaxis', {'visible': False})
1270
+ clean_axis = {'showgrid': False, 'showticklabels': False, 'zeroline': False}
1271
+ p1.set_layout_opt('xaxis', clean_axis)
1272
+ p1.set_layout_opt('yaxis', clean_axis)
1273
+ dash.add_panel(p1, "top_left")
1274
+
1275
+ # --- Panel 2: System Counters (Table) ---
1276
+ p2 = UIPlot(title="World Agents History")
1277
+ metrics = [
1278
+ ("world_masters", "World Masters", THEME['peers'][0]),
1279
+ ("world_agents", "World Agents", THEME['peers'][1]),
1280
+ ("human_agents", "Human Agents", THEME['peers'][2]),
1281
+ ("artificial_agents", "Artificial Agents", THEME['peers'][3]),
1282
+ ]
1283
+ for stat_key, label, color in metrics:
1284
+ self._populate_time_series(
1285
+ panel=p2,
1286
+ view=view,
1287
+ stat_name=stat_key,
1288
+ color_override=color,
1289
+ title_override=label
1290
+ )
1291
+ # p2.set_layout_opt('xaxis', {'title': None, 'visible': False})
1292
+ p2.set_layout_opt('xaxis', {'title': None, 'showticklabels': False})
1293
+ p2.set_layout_opt('yaxis', {'title': None})
1294
+ dash.add_panel(p2, "top_right")
1295
+
1296
+ # --- Panel 3: State Distribution (Bar) ---
1297
+ p3 = UIPlot(title="State Distribution")
1298
+ # self._populate_distribution(p3, view, "state")
1299
+ self._populate_graph(p3, view, "graph")
1300
+ # p1.set_layout_opt('xaxis', {'visible': False})
1301
+ # p1.set_layout_opt('yaxis', {'visible': False})
1302
+ clean_axis = {'showgrid': False, 'showticklabels': False, 'zeroline': False}
1303
+ p3.set_layout_opt('xaxis', clean_axis)
1304
+ p3.set_layout_opt('yaxis', clean_axis)
1305
+ p3.set_layout_opt("xaxis", {"title": None})
1306
+ dash.add_panel(p3, "bot_left")
1307
+
1308
+ # --- Panel 4: Action Distribution (Bar) ---
1309
+ p4 = UIPlot(title="Last Action Distribution")
1310
+ self._populate_distribution(p4, view, "last_action")
1311
+ p4.set_layout_opt("xaxis", {"title": None})
1312
+ dash.add_panel(p4, "bot_right")
1313
+
1314
+ return dash.to_json()
1315
+
1316
+ def _populate_time_series(self, panel: UIPlot, view: Dict, stat_name: str,
1317
+ peer_ids: List[str] | None = None, color_override: str = None,
1318
+ show_legend: bool = True, title_override: str = None):
1319
+ """Extracts [[t,v],...] lists and adds lines to panel. Supports custom titles and colors."""
1320
+ def get_xy(raw):
1321
+ if isinstance(raw, list) and raw and isinstance(raw[0], list):
1322
+ return [r[0] for r in raw], [r[1] for r in raw]
1323
+ return [], []
1324
+
1325
+ # World
1326
+ w_data = view.get('world', {}).get(stat_name)
1327
+ if w_data:
1328
+ x, y = get_xy(w_data)
1329
+ if x:
1330
+ label = title_override if title_override else "World"
1331
+ color = color_override if color_override else THEME['main']
1332
+ panel.add_line(x, y, name=label, color=color,
1333
+ legend_group=label, show_legend=show_legend)
1334
+
1335
+ # Peers
1336
+ peers_dict = view.get('peers', {})
1337
+ targets = peer_ids if peer_ids else peers_dict.keys()
1338
+ for pid in targets:
1339
+ p_data = peers_dict.get(pid, {}).get(stat_name)
1340
+ if p_data:
1341
+ x, y = get_xy(p_data)
1342
+ if x:
1343
+ c = color_override or self._get_consistent_color(pid)
1344
+ panel.add_line(x, y, name=f'{pid[-6:]}', color=c,
1345
+ legend_group=pid, show_legend=show_legend)
1346
+
1347
+ def _populate_indicator(self, panel: UIPlot, view: Dict, stat_name: str, peer_ids: List[str] | None = None):
1348
+ """Extracts a scalar value and adds indicator."""
1349
+ val = None
1350
+ if 'world' in view and stat_name in view['world']:
1351
+ val = view['world'][stat_name]
1352
+ elif 'peers' in view:
1353
+ # Just grab the first available peer's value if not specified
1354
+ targets = peer_ids if peer_ids else list(view['peers'].keys())
1355
+ if targets:
1356
+ val = view['peers'][targets[0]].get(stat_name)
1357
+
1358
+ panel.add_indicator(val, title=stat_name)
1359
+
1360
+ def _populate_table(self, panel: UIPlot, view: Dict, stat_name: str, peer_ids: List[str] | None = None):
1361
+ """Extracts data for a table."""
1362
+ headers = ['Entity', 'Value']
1363
+ col_ent = []
1364
+ col_val = []
1365
+
1366
+ # World
1367
+ if 'world' in view and stat_name in view['world']:
1368
+ col_ent.append('World')
1369
+ col_val.append(str(view['world'][stat_name]))
1370
+
1371
+ # Peers
1372
+ peers_dict = view.get('peers', {})
1373
+ targets = peer_ids if peer_ids else peers_dict.keys()
1374
+ for pid in targets:
1375
+ val = peers_dict.get(pid, {}).get(stat_name)
1376
+ if val is not None:
1377
+ col_ent.append(pid[-6:])
1378
+ col_val.append(str(val)) # Simple stringification
1379
+
1380
+ panel.add_table(headers, [col_ent, col_val])
1381
+
1382
+ def _populate_graph(self, panel: UIPlot, view: Dict, stat_name: str):
1383
+ """Calculates layout and adds graph traces to the panel."""
1384
+
1385
+ # 1. Fetch Data
1386
+ raw_graph = view.get('world', {}).get(stat_name, {})
1387
+ if not raw_graph:
1388
+ return
1389
+
1390
+ # Handle both legacy format (just edges) and new format (nodes+edges) safely
1391
+ if 'edges' in raw_graph and 'nodes' in raw_graph:
1392
+ edges_data = raw_graph['edges']
1393
+ nodes_data = raw_graph['nodes']
1394
+ else:
1395
+ # Fallback for simple graphs without node details
1396
+ edges_data = raw_graph
1397
+ nodes_data = {}
1398
+
1399
+ # 2. Calculate Layout (Circular)
1400
+ # We use edges_data keys for positioning, but we might have nodes in nodes_data
1401
+ # that have no edges yet, so we merge them.
1402
+ all_pids = set(edges_data.keys()).union(*edges_data.values()) | set(nodes_data.keys())
1403
+ pids = list(all_pids)
1404
+ pos = {}
1405
+ if pids:
1406
+ radius = 10
1407
+ angle_step = (2 * math.pi) / len(pids)
1408
+ for i, pid in enumerate(pids):
1409
+ pos[pid] = (
1410
+ radius * math.cos(i * angle_step),
1411
+ radius * math.sin(i * angle_step)
1412
+ )
1413
+
1414
+ # 3. Create Edge Trace
1415
+ edge_x, edge_y = [], []
1416
+ for source, targets in edges_data.items():
1417
+ if source not in pos:
1418
+ continue
1419
+ x0, y0 = pos[source]
1420
+ # targets might be a list (from JSON) or set (from local cache)
1421
+ target_iter = targets if isinstance(targets, (list, set)) else []
1422
+ for target in target_iter:
1423
+ if target in pos:
1424
+ x1, y1 = pos[target]
1425
+ edge_x.extend([x0, x1, None])
1426
+ edge_y.extend([y0, y1, None])
1427
+
1428
+ panel.add_trace({
1429
+ 'type': 'scatter', 'mode': 'lines',
1430
+ 'x': edge_x, 'y': edge_y,
1431
+ 'line': {'width': 0.5, 'color': THEME['edge']},
1432
+ 'hoverinfo': 'none', 'showlegend': False
1433
+ })
1434
+
1435
+ # 4. Create Node Trace
1436
+ node_x, node_y, node_text, node_color, node_labels = [], [], [], [], []
1437
+ for pid in pids:
1438
+ if pid not in pos:
1439
+ continue
1440
+ x, y = pos[pid]
1441
+ node_x.append(x)
1442
+ node_y.append(y)
1443
+
1444
+ # Node labels
1445
+ node_labels.append(pid[-6:])
1446
+ # Build hover text
1447
+ if nodes_data:
1448
+ hover_text = ''
1449
+ for key, val in nodes_data.get(pid, {}).items():
1450
+ hover_text += f'{key}: {val}<br>'
1451
+ else:
1452
+ hover_text = f'Peer ID: {pid}'
1453
+ node_text.append(hover_text)
1454
+
1455
+ # Determine Color
1456
+ # You can customize this mapping based on your NodeProfile types
1457
+ node_color.append(self._get_consistent_color(pid))
1458
+
1459
+ panel.add_trace({
1460
+ 'type': 'scatter',
1461
+ 'mode': 'markers+text',
1462
+ 'x': node_x, 'y': node_y,
1463
+ 'text': node_labels,
1464
+ 'hovertext': node_text,
1465
+ 'hoverinfo': 'text',
1466
+ 'textposition': 'top center',
1467
+ 'showlegend': False,
1468
+ 'marker': {
1469
+ 'color': node_color,
1470
+ 'size': 12,
1471
+ 'line': {'width': 2, 'color': THEME['edge']}
1472
+ }
1473
+ })
1474
+
1475
+ # 5. Layout overrides
1476
+ # panel.set_layout_opt('xaxis', {'visible': False})
1477
+ # panel.set_layout_opt('yaxis', {'visible': False})
1478
+
1479
+ def _populate_distribution(self, panel: UIPlot, view: Dict, stat_name: str):
1480
+ """
1481
+ Aggregates peer values into a frequency count (Bar Chart).
1482
+ e.g., {"IDLE": 3, "RUNNING": 5}
1483
+ """
1484
+ peers_dict = view.get('peers', {})
1485
+ counts = {}
1486
+
1487
+ # 1. Aggregate
1488
+ for data in peers_dict.values():
1489
+ # Handle uninitialized or None values
1490
+ val_str = str(data.get(stat_name, 'Unknown'))
1491
+ counts[val_str] = counts.get(val_str, 0) + 1
1492
+
1493
+ # 2. Sort for consistency (e.g., alphabetically by state name)
1494
+ sorted_keys = sorted(counts.keys())
1495
+ sorted_vals = [counts[k] for k in sorted_keys]
1496
+ colors = [self._get_consistent_color(k) for k in sorted_keys]
1497
+
1498
+ # 3. Plot
1499
+ panel.add_bar(xs=sorted_keys, ys=sorted_vals, names=sorted_vals, colors=colors)
1500
+
1501
+ def _get_consistent_color(self, unique_str: str) -> str:
1502
+ """Deterministic color generation based on string hash."""
1503
+ if not unique_str:
1504
+ return '#ffffff'
1505
+ idx = zlib.adler32(str(unique_str).encode()) % len(THEME['peers'])
1506
+ return THEME['peers'][idx]