unaiverse 0.1.6__cp314-cp314-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of unaiverse might be problematic. Click here for more details.
- unaiverse/__init__.py +19 -0
- unaiverse/agent.py +2008 -0
- unaiverse/agent_basics.py +1846 -0
- unaiverse/clock.py +191 -0
- unaiverse/dataprops.py +1209 -0
- unaiverse/hsm.py +1880 -0
- unaiverse/modules/__init__.py +18 -0
- unaiverse/modules/cnu/__init__.py +17 -0
- unaiverse/modules/cnu/cnus.py +536 -0
- unaiverse/modules/cnu/layers.py +261 -0
- unaiverse/modules/cnu/psi.py +60 -0
- unaiverse/modules/hl/__init__.py +15 -0
- unaiverse/modules/hl/hl_utils.py +411 -0
- unaiverse/modules/networks.py +1509 -0
- unaiverse/modules/utils.py +680 -0
- unaiverse/networking/__init__.py +16 -0
- unaiverse/networking/node/__init__.py +18 -0
- unaiverse/networking/node/connpool.py +1261 -0
- unaiverse/networking/node/node.py +2223 -0
- unaiverse/networking/node/profile.py +446 -0
- unaiverse/networking/node/tokens.py +79 -0
- unaiverse/networking/p2p/__init__.py +198 -0
- unaiverse/networking/p2p/go.mod +127 -0
- unaiverse/networking/p2p/go.sum +548 -0
- unaiverse/networking/p2p/golibp2p.py +18 -0
- unaiverse/networking/p2p/golibp2p.pyi +135 -0
- unaiverse/networking/p2p/lib.go +2714 -0
- unaiverse/networking/p2p/lib.go.sha256 +1 -0
- unaiverse/networking/p2p/lib_types.py +312 -0
- unaiverse/networking/p2p/message_pb2.py +63 -0
- unaiverse/networking/p2p/messages.py +265 -0
- unaiverse/networking/p2p/mylogger.py +77 -0
- unaiverse/networking/p2p/p2p.py +929 -0
- unaiverse/networking/p2p/proto-go/message.pb.go +616 -0
- unaiverse/networking/p2p/unailib.cp314-win_amd64.pyd +0 -0
- unaiverse/streamlib/__init__.py +15 -0
- unaiverse/streamlib/streamlib.py +210 -0
- unaiverse/streams.py +770 -0
- unaiverse/utils/__init__.py +16 -0
- unaiverse/utils/ask_lone_wolf.json +27 -0
- unaiverse/utils/lone_wolf.json +19 -0
- unaiverse/utils/misc.py +305 -0
- unaiverse/utils/sandbox.py +293 -0
- unaiverse/utils/server.py +435 -0
- unaiverse/world.py +175 -0
- unaiverse-0.1.6.dist-info/METADATA +365 -0
- unaiverse-0.1.6.dist-info/RECORD +50 -0
- unaiverse-0.1.6.dist-info/WHEEL +5 -0
- unaiverse-0.1.6.dist-info/licenses/LICENSE +43 -0
- unaiverse-0.1.6.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,446 @@
|
|
|
1
|
+
"""
|
|
2
|
+
█████ █████ ██████ █████ █████ █████ █████ ██████████ ███████████ █████████ ██████████
|
|
3
|
+
░░███ ░░███ ░░██████ ░░███ ░░███ ░░███ ░░███ ░░███░░░░░█░░███░░░░░███ ███░░░░░███░░███░░░░░█
|
|
4
|
+
░███ ░███ ░███░███ ░███ ██████ ░███ ░███ ░███ ░███ █ ░ ░███ ░███ ░███ ░░░ ░███ █ ░
|
|
5
|
+
░███ ░███ ░███░░███░███ ░░░░░███ ░███ ░███ ░███ ░██████ ░██████████ ░░█████████ ░██████
|
|
6
|
+
░███ ░███ ░███ ░░██████ ███████ ░███ ░░███ ███ ░███░░█ ░███░░░░░███ ░░░░░░░░███ ░███░░█
|
|
7
|
+
░███ ░███ ░███ ░░█████ ███░░███ ░███ ░░░█████░ ░███ ░ █ ░███ ░███ ███ ░███ ░███ ░ █
|
|
8
|
+
░░████████ █████ ░░█████░░████████ █████ ░░███ ██████████ █████ █████░░█████████ ██████████
|
|
9
|
+
░░░░░░░░ ░░░░░ ░░░░░ ░░░░░░░░ ░░░░░ ░░░ ░░░░░░░░░░ ░░░░░ ░░░░░ ░░░░░░░░░ ░░░░░░░░░░
|
|
10
|
+
A Collectionless AI Project (https://collectionless.ai)
|
|
11
|
+
Registration/Login: https://unaiverse.io
|
|
12
|
+
Code Repositories: https://github.com/collectionlessai/
|
|
13
|
+
Main Developers: Stefano Melacci (Project Leader), Christian Di Maio, Tommaso Guidi
|
|
14
|
+
"""
|
|
15
|
+
import json
|
|
16
|
+
import psutil
|
|
17
|
+
import hashlib
|
|
18
|
+
import platform
|
|
19
|
+
import datetime
|
|
20
|
+
import requests
|
|
21
|
+
import ipaddress
|
|
22
|
+
from datetime import timezone
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class NodeProfile:
|
|
26
|
+
"""
|
|
27
|
+
Profile information for a node.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
def __init__(self,
|
|
31
|
+
static: dict,
|
|
32
|
+
dynamic: dict,
|
|
33
|
+
cv: dict):
|
|
34
|
+
|
|
35
|
+
# Checking provided data
|
|
36
|
+
if not static:
|
|
37
|
+
raise ValueError("Missing static profile data")
|
|
38
|
+
|
|
39
|
+
# Forcing key order (important! otherwise the hash operation will not be consistent with the one on the server)
|
|
40
|
+
cv = [{k: _cv[k] for k in sorted(_cv)} for _cv in cv]
|
|
41
|
+
|
|
42
|
+
self._profile_data = \
|
|
43
|
+
{
|
|
44
|
+
'static': {
|
|
45
|
+
'node_id': None,
|
|
46
|
+
'node_type': None,
|
|
47
|
+
'node_name': None,
|
|
48
|
+
'node_description': None,
|
|
49
|
+
'created_utc': None,
|
|
50
|
+
'name': None,
|
|
51
|
+
'surname': None,
|
|
52
|
+
'title': None,
|
|
53
|
+
'organization': None,
|
|
54
|
+
'email': None,
|
|
55
|
+
'max_nr_connections': None,
|
|
56
|
+
'allowed_node_ids': None,
|
|
57
|
+
'world_masters_node_ids': None,
|
|
58
|
+
'certified': None,
|
|
59
|
+
'inspector_node_id': None
|
|
60
|
+
},
|
|
61
|
+
'dynamic': {
|
|
62
|
+
'os': None,
|
|
63
|
+
'cpu_cores': None,
|
|
64
|
+
'logical_cpus': None,
|
|
65
|
+
'memory_gb': None,
|
|
66
|
+
'memory_avail': None,
|
|
67
|
+
'memory_used': None,
|
|
68
|
+
'timestamp': None,
|
|
69
|
+
'public_ip_address': None,
|
|
70
|
+
'guessed_location': None,
|
|
71
|
+
'peer_id': None,
|
|
72
|
+
'peer_addresses': None,
|
|
73
|
+
'private_peer_id': None,
|
|
74
|
+
'private_peer_addresses': None,
|
|
75
|
+
'proc_inputs': None,
|
|
76
|
+
'proc_outputs': None,
|
|
77
|
+
'streams': None,
|
|
78
|
+
'connections': {
|
|
79
|
+
'public_agents': None, # List of dict
|
|
80
|
+
'world_agents': None, # List of dict
|
|
81
|
+
'world_masters': None, # List of dict
|
|
82
|
+
'world_peer_id': None, # Str
|
|
83
|
+
'role': None # Str
|
|
84
|
+
},
|
|
85
|
+
'world_summary': {
|
|
86
|
+
"world_title": None,
|
|
87
|
+
"world_agents": None,
|
|
88
|
+
"world_masters": None,
|
|
89
|
+
"world_agents_count": None,
|
|
90
|
+
"world_masters_count": None,
|
|
91
|
+
"total_agents": None,
|
|
92
|
+
"agent_badges_count": None,
|
|
93
|
+
"agent_badges": None,
|
|
94
|
+
"streams_count": None
|
|
95
|
+
},
|
|
96
|
+
"world_roles_fsm": None, # Dict of FSMs for world roles
|
|
97
|
+
"hidden": None
|
|
98
|
+
},
|
|
99
|
+
'cv': cv
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
# Checking the presence of basic static profile info
|
|
103
|
+
for k in self._profile_data['static'].keys():
|
|
104
|
+
if (k not in static and k != "certified" and
|
|
105
|
+
k != "allowed_node_ids" and k != "world_masters_node_ids" and k != "inspector_node_id"): # Patch
|
|
106
|
+
raise ValueError("Missing required static profile info: " + str(k))
|
|
107
|
+
|
|
108
|
+
# Filling static profile info (there might be more information that the one shown above)
|
|
109
|
+
for k, v in static.items():
|
|
110
|
+
self._profile_data['static'][k] = v
|
|
111
|
+
|
|
112
|
+
# Including the provided dynamic info, only considering the expected keys
|
|
113
|
+
# (the provided "dynamic" argument will contain all or just a sub-portion of the expected keys)
|
|
114
|
+
for k, v in dynamic.items():
|
|
115
|
+
if k == 'connections' and v is not None and isinstance(v, dict):
|
|
116
|
+
for kk, vv in v.items():
|
|
117
|
+
if (kk in self._profile_data['dynamic']['connections'] and
|
|
118
|
+
self._profile_data['dynamic']['connections'][kk] is None):
|
|
119
|
+
self._profile_data['dynamic']['connections'][kk] = vv
|
|
120
|
+
elif k == 'world_summary' and v is not None and isinstance(v, dict):
|
|
121
|
+
for kk, vv in v.items():
|
|
122
|
+
if (kk in self._profile_data['dynamic']['world_summary'] and
|
|
123
|
+
self._profile_data['dynamic']['world_summary'][kk] is None):
|
|
124
|
+
self._profile_data['dynamic']['world_summary'][kk] = vv
|
|
125
|
+
elif k in self._profile_data['dynamic'] and self._profile_data['dynamic'][k] is None:
|
|
126
|
+
self._profile_data['dynamic'][k] = v
|
|
127
|
+
elif k.startswith('tmp_'):
|
|
128
|
+
self._profile_data['dynamic'][k] = v
|
|
129
|
+
|
|
130
|
+
# Internally required attributes
|
|
131
|
+
self._profile_last_updated = None # Will be set by calling _fill_missing_specs or check_and_update_specs
|
|
132
|
+
self._geolocation_cache = {} # Will be needed to avoid too many IP-related lookups
|
|
133
|
+
|
|
134
|
+
# Filling the missing information (machine-level information, specs) that can be automatically extracted
|
|
135
|
+
self._fill_missing_specs()
|
|
136
|
+
|
|
137
|
+
# Flag
|
|
138
|
+
self._connections_updated = False
|
|
139
|
+
|
|
140
|
+
def update_cv(self, new_cv):
|
|
141
|
+
self._profile_data['cv'] = new_cv
|
|
142
|
+
|
|
143
|
+
@classmethod
|
|
144
|
+
def from_dict(cls, combined_data: dict) -> 'NodeProfile':
|
|
145
|
+
"""Factory method to create a NodeProfile instance from a dictionary
|
|
146
|
+
containing combined profile data (static, specs, and CV list of dicts).
|
|
147
|
+
|
|
148
|
+
Args:
|
|
149
|
+
combined_data (dict): A dictionary representing the node profile,
|
|
150
|
+
typically loaded from JSON or received over the network.
|
|
151
|
+
Expected to contain 'node_id', 'cv' (list of dicts),
|
|
152
|
+
'node_specification' (dict), 'peer_id', 'peer_addresses'
|
|
153
|
+
and other profile keys.
|
|
154
|
+
|
|
155
|
+
Returns:
|
|
156
|
+
NodeProfile: A new instance of NodeProfile populated from the dictionary.
|
|
157
|
+
|
|
158
|
+
Raises:
|
|
159
|
+
ValueError: If 'node_id' is missing in the input dictionary.
|
|
160
|
+
TypeError: If the 'cv' data is present but not a list.
|
|
161
|
+
"""
|
|
162
|
+
|
|
163
|
+
# Ensure essential 'node_id' is present
|
|
164
|
+
node_id = combined_data.get('static').get('node_id')
|
|
165
|
+
if not node_id:
|
|
166
|
+
raise ValueError("Input dictionary must contain a 'node_id'.")
|
|
167
|
+
|
|
168
|
+
profile_instance = cls(
|
|
169
|
+
static=combined_data['static'],
|
|
170
|
+
dynamic=combined_data['dynamic'],
|
|
171
|
+
cv=combined_data['cv']
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
return profile_instance
|
|
175
|
+
|
|
176
|
+
# Get operating system information
|
|
177
|
+
@staticmethod
|
|
178
|
+
def _get_os_spec():
|
|
179
|
+
"""Extracts operating system information."""
|
|
180
|
+
return platform.platform()
|
|
181
|
+
|
|
182
|
+
# Get cpu information
|
|
183
|
+
@staticmethod
|
|
184
|
+
def _get_cpu_info():
|
|
185
|
+
"""Extracts CPU core information."""
|
|
186
|
+
try:
|
|
187
|
+
return {
|
|
188
|
+
'physical_cores': psutil.cpu_count(logical=False),
|
|
189
|
+
'logical_cores': psutil.cpu_count(logical=True)
|
|
190
|
+
}
|
|
191
|
+
except Exception as e:
|
|
192
|
+
print(f"Error getting CPU info: {e}")
|
|
193
|
+
return {'physical_cores': None, 'logical_cores': None}
|
|
194
|
+
|
|
195
|
+
# Get memory information
|
|
196
|
+
@staticmethod
|
|
197
|
+
def _get_memory_info():
|
|
198
|
+
"""Extracts memory information in GB."""
|
|
199
|
+
try:
|
|
200
|
+
mem = psutil.virtual_memory()
|
|
201
|
+
total_gb = mem.total / (1024 ** 3)
|
|
202
|
+
available_gb = mem.available / (1024 ** 3)
|
|
203
|
+
used_gb = mem.used / (1024 ** 3)
|
|
204
|
+
return {
|
|
205
|
+
'total': float(total_gb),
|
|
206
|
+
'available': float(available_gb),
|
|
207
|
+
'used': float(used_gb)
|
|
208
|
+
}
|
|
209
|
+
except Exception as e:
|
|
210
|
+
print(f"Error getting memory info: {e}")
|
|
211
|
+
return {'total': 0.0, 'available': 0.0, 'used': 0.0}
|
|
212
|
+
|
|
213
|
+
# Get public ip address
|
|
214
|
+
@staticmethod
|
|
215
|
+
def _get_public_ip_address() -> str | None:
|
|
216
|
+
"""Attempts to retrieve the public IP address using an external web service.
|
|
217
|
+
Uses multiple services as fallbacks.
|
|
218
|
+
Returns the public IP address string or None if retrieval fails.
|
|
219
|
+
"""
|
|
220
|
+
|
|
221
|
+
# List of reliable services that return the public IP as plain text
|
|
222
|
+
services = [
|
|
223
|
+
"https://api.ipify.org",
|
|
224
|
+
"https://icanhazip.com",
|
|
225
|
+
"https://ident.me",
|
|
226
|
+
"https://checkip.amazonaws.com",
|
|
227
|
+
]
|
|
228
|
+
|
|
229
|
+
# Print("Attempting to retrieve public IP address...")
|
|
230
|
+
for url in services:
|
|
231
|
+
try:
|
|
232
|
+
|
|
233
|
+
# Make a GET request to the service URL with a timeout
|
|
234
|
+
response = requests.get(url, timeout=5)
|
|
235
|
+
|
|
236
|
+
# Raise an HTTPError for bad responses (4xx or 5xx status codes)
|
|
237
|
+
response.raise_for_status()
|
|
238
|
+
|
|
239
|
+
# Get the response text, which should be the IP address, and strip any whitespace
|
|
240
|
+
public_ip = response.text.strip()
|
|
241
|
+
|
|
242
|
+
# Basic validation - check if the result looks like a valid IP address
|
|
243
|
+
try:
|
|
244
|
+
ipaddress.ip_address(public_ip) # This checks if it's a valid IPv4 or IPv6 address
|
|
245
|
+
|
|
246
|
+
return public_ip # Return the first valid IP found
|
|
247
|
+
|
|
248
|
+
except ValueError:
|
|
249
|
+
|
|
250
|
+
# If ipaddress.ip_address raises ValueError, it's not a valid format
|
|
251
|
+
continue # Try the next service if validation fails
|
|
252
|
+
|
|
253
|
+
except requests.exceptions.RequestException:
|
|
254
|
+
|
|
255
|
+
# Catch any request-related errors (e.g., network issues, timeout, bad status)
|
|
256
|
+
continue # Try the next service on error
|
|
257
|
+
|
|
258
|
+
except Exception:
|
|
259
|
+
|
|
260
|
+
# Catch any other unexpected errors
|
|
261
|
+
continue # Try the next service on error
|
|
262
|
+
|
|
263
|
+
return 'Public IP not available.' # Return None if all services fail
|
|
264
|
+
|
|
265
|
+
# Get guessed location based on IP address
|
|
266
|
+
def _get_geolocation_from_ip(self, ip_address):
|
|
267
|
+
"""Retrieves geolocation data (same as before)."""
|
|
268
|
+
|
|
269
|
+
# Added a check for local/private IPs to avoid unnecessary API calls
|
|
270
|
+
try:
|
|
271
|
+
ip_obj = ipaddress.ip_address(ip_address)
|
|
272
|
+
if ip_obj.is_private or ip_obj.is_loopback or ip_obj.is_unspecified:
|
|
273
|
+
return {"message": "Private, loopback, or unspecified IP address. Geolocation not applicable."}
|
|
274
|
+
except ValueError:
|
|
275
|
+
return {"error": f"Invalid IP address format: {ip_address}"}
|
|
276
|
+
|
|
277
|
+
# Added a simple cache to avoid repeated API calls for the same IP
|
|
278
|
+
if hasattr(self, '_geolocation_cache') and ip_address in self._geolocation_cache:
|
|
279
|
+
|
|
280
|
+
# Print(f"Using cached geolocation for {ip_address}") # Optional: for debugging
|
|
281
|
+
return self._geolocation_cache[ip_address]
|
|
282
|
+
|
|
283
|
+
try:
|
|
284
|
+
url = f"http://ip-api.com/json/{ip_address}"
|
|
285
|
+
response = requests.get(url)
|
|
286
|
+
response.raise_for_status()
|
|
287
|
+
data = response.json()
|
|
288
|
+
if data.get("status") == "success":
|
|
289
|
+
geo_data = {
|
|
290
|
+
"country": data.get("country"),
|
|
291
|
+
"countryCode": data.get("countryCode"),
|
|
292
|
+
"region": data.get("region"),
|
|
293
|
+
"regionName": data.get("regionName"),
|
|
294
|
+
"city": data.get("city"),
|
|
295
|
+
"zip": data.get("zip"),
|
|
296
|
+
"latitude": data.get("lat"),
|
|
297
|
+
"longitude": data.get("lon"),
|
|
298
|
+
"timezone": data.get("timezone"),
|
|
299
|
+
"isp": data.get("isp")
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
# Cache the result
|
|
303
|
+
if not hasattr(self, '_geolocation_cache'):
|
|
304
|
+
self._geolocation_cache = {}
|
|
305
|
+
self._geolocation_cache[ip_address] = geo_data
|
|
306
|
+
return geo_data
|
|
307
|
+
else:
|
|
308
|
+
error_data = {"error": data.get("message", "Geolocation lookup failed.")}
|
|
309
|
+
|
|
310
|
+
# Cache the error result too
|
|
311
|
+
if not hasattr(self, '_geolocation_cache'):
|
|
312
|
+
self._geolocation_cache = {}
|
|
313
|
+
self._geolocation_cache[ip_address] = error_data
|
|
314
|
+
return error_data
|
|
315
|
+
|
|
316
|
+
except requests.exceptions.RequestException as e:
|
|
317
|
+
error_data = {"error": f"Request failed: {e}"}
|
|
318
|
+
if not hasattr(self, '_geolocation_cache'):
|
|
319
|
+
self._geolocation_cache = {}
|
|
320
|
+
self._geolocation_cache[ip_address] = error_data
|
|
321
|
+
return error_data
|
|
322
|
+
|
|
323
|
+
except json.JSONDecodeError:
|
|
324
|
+
error_data = {"error": "Failed to decode JSON response from geolocation API"}
|
|
325
|
+
if not hasattr(self, '_geolocation_cache'):
|
|
326
|
+
self._geolocation_cache = {}
|
|
327
|
+
self._geolocation_cache[ip_address] = error_data
|
|
328
|
+
return error_data
|
|
329
|
+
|
|
330
|
+
except Exception as e:
|
|
331
|
+
error_data = {"error": f"An unexpected error occurred during geolocation lookup: {e}"}
|
|
332
|
+
if not hasattr(self, '_geolocation_cache'):
|
|
333
|
+
self._geolocation_cache = {}
|
|
334
|
+
self._geolocation_cache[ip_address] = error_data
|
|
335
|
+
return error_data
|
|
336
|
+
|
|
337
|
+
# This is the function that collects all the information for the 'node_specification'
|
|
338
|
+
def _get_current_specs(self) -> dict:
|
|
339
|
+
"""Gathers current system specifications.
|
|
340
|
+
"""
|
|
341
|
+
cpu_info = self._get_cpu_info()
|
|
342
|
+
memory_info = self._get_memory_info()
|
|
343
|
+
|
|
344
|
+
return {
|
|
345
|
+
'timestamp': datetime.datetime.now(timezone.utc).isoformat(),
|
|
346
|
+
'os': self._get_os_spec(),
|
|
347
|
+
'cpu_cores': cpu_info.get('physical_cores'),
|
|
348
|
+
'logical_cpus': cpu_info.get('logical_cores'),
|
|
349
|
+
'memory_gb': memory_info.get('total'),
|
|
350
|
+
'memory_avail': memory_info.get('available'),
|
|
351
|
+
'memory_used': memory_info.get('used'),
|
|
352
|
+
'public_ip_address': self._get_public_ip_address(),
|
|
353
|
+
'guessed_location': self._get_geolocation_from_ip(self._get_public_ip_address()),
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
def _fill_missing_specs(self):
|
|
357
|
+
dynamic_profile = self.get_dynamic_profile()
|
|
358
|
+
current_specs = None
|
|
359
|
+
for k in dynamic_profile.keys():
|
|
360
|
+
if dynamic_profile[k] is None:
|
|
361
|
+
if current_specs is None:
|
|
362
|
+
current_specs = self._get_current_specs()
|
|
363
|
+
if k in current_specs:
|
|
364
|
+
dynamic_profile[k] = current_specs[k]
|
|
365
|
+
|
|
366
|
+
self._profile_last_updated = datetime.datetime.now(timezone.utc) # Mark profile as checked/updated
|
|
367
|
+
|
|
368
|
+
def check_and_update_specs(self, update_only: bool = True) -> bool:
|
|
369
|
+
"""Checks current specs against saved specs. Updates profile data."""
|
|
370
|
+
|
|
371
|
+
current_specs = self._get_current_specs()
|
|
372
|
+
specs_changed = False
|
|
373
|
+
|
|
374
|
+
if update_only:
|
|
375
|
+
self._profile_data['dynamic'] |= current_specs
|
|
376
|
+
else:
|
|
377
|
+
saved_specs = self._profile_data['dynamic'].copy()
|
|
378
|
+
change_details = []
|
|
379
|
+
|
|
380
|
+
if saved_specs is None:
|
|
381
|
+
|
|
382
|
+
# No previous specification exists, capture the current one
|
|
383
|
+
self._profile_data['dynamic'] |= current_specs
|
|
384
|
+
specs_changed = True
|
|
385
|
+
change_details.append("Initial specification captured")
|
|
386
|
+
|
|
387
|
+
else:
|
|
388
|
+
|
|
389
|
+
# Compare current specs with saved specs (ignore timestamp for comparison)
|
|
390
|
+
keys_to_compare = current_specs.keys()
|
|
391
|
+
|
|
392
|
+
for key in keys_to_compare:
|
|
393
|
+
if key == 'timestamp':
|
|
394
|
+
continue
|
|
395
|
+
|
|
396
|
+
saved_value = saved_specs.get(key)
|
|
397
|
+
current_value = current_specs.get(key)
|
|
398
|
+
|
|
399
|
+
# Handle float comparison with tolerance
|
|
400
|
+
if isinstance(saved_value, float) and isinstance(current_value, float):
|
|
401
|
+
if abs(current_value - saved_value) > 1e-6: # Tolerance for float changes
|
|
402
|
+
change_details.append(f"{key}: from {saved_value:.2f} to {current_value:.2f}")
|
|
403
|
+
specs_changed = True
|
|
404
|
+
|
|
405
|
+
elif saved_value != current_value:
|
|
406
|
+
change_details.append(f"{key}: from {saved_value} to {current_value}")
|
|
407
|
+
specs_changed = True
|
|
408
|
+
|
|
409
|
+
# Comparing total resources (OS, CPU, total RAM/Disk) is more typical for 'specification' changes.
|
|
410
|
+
if specs_changed:
|
|
411
|
+
|
|
412
|
+
# Update the specification in the profile data with the new current specs
|
|
413
|
+
self._profile_data['dynamic'] |= current_specs
|
|
414
|
+
change_summary = ", ".join(change_details)
|
|
415
|
+
print(f"Specs changed for '{self._profile_data['static']['node_id']}': {change_summary}")
|
|
416
|
+
|
|
417
|
+
self._profile_last_updated = datetime.datetime.now(timezone.utc) # Mark profile as checked/updated
|
|
418
|
+
|
|
419
|
+
return specs_changed
|
|
420
|
+
|
|
421
|
+
# Get profile data as dict: cv, dynamic_profile, static_profile
|
|
422
|
+
def get_static_profile(self) -> dict:
|
|
423
|
+
return self._profile_data['static']
|
|
424
|
+
|
|
425
|
+
def get_dynamic_profile(self) -> dict:
|
|
426
|
+
return self._profile_data['dynamic']
|
|
427
|
+
|
|
428
|
+
def get_cv(self):
|
|
429
|
+
return self._profile_data['cv']
|
|
430
|
+
|
|
431
|
+
def get_all_profile(self):
|
|
432
|
+
return self._profile_data
|
|
433
|
+
|
|
434
|
+
def mark_change_in_connections(self):
|
|
435
|
+
self._connections_updated = True
|
|
436
|
+
|
|
437
|
+
def unmark_change_in_connections(self):
|
|
438
|
+
self._connections_updated = False
|
|
439
|
+
|
|
440
|
+
def connections_changed(self):
|
|
441
|
+
return self._connections_updated
|
|
442
|
+
|
|
443
|
+
def verify_cv_hash(self, cv_hash: str):
|
|
444
|
+
computed_hash = hashlib.blake2b(json.dumps(self._profile_data['cv']).encode("utf-8"),
|
|
445
|
+
digest_size=16).hexdigest()
|
|
446
|
+
return cv_hash == computed_hash, (cv_hash, computed_hash)
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
"""
|
|
2
|
+
█████ █████ ██████ █████ █████ █████ █████ ██████████ ███████████ █████████ ██████████
|
|
3
|
+
░░███ ░░███ ░░██████ ░░███ ░░███ ░░███ ░░███ ░░███░░░░░█░░███░░░░░███ ███░░░░░███░░███░░░░░█
|
|
4
|
+
░███ ░███ ░███░███ ░███ ██████ ░███ ░███ ░███ ░███ █ ░ ░███ ░███ ░███ ░░░ ░███ █ ░
|
|
5
|
+
░███ ░███ ░███░░███░███ ░░░░░███ ░███ ░███ ░███ ░██████ ░██████████ ░░█████████ ░██████
|
|
6
|
+
░███ ░███ ░███ ░░██████ ███████ ░███ ░░███ ███ ░███░░█ ░███░░░░░███ ░░░░░░░░███ ░███░░█
|
|
7
|
+
░███ ░███ ░███ ░░█████ ███░░███ ░███ ░░░█████░ ░███ ░ █ ░███ ░███ ███ ░███ ░███ ░ █
|
|
8
|
+
░░████████ █████ ░░█████░░████████ █████ ░░███ ██████████ █████ █████░░█████████ ██████████
|
|
9
|
+
░░░░░░░░ ░░░░░ ░░░░░ ░░░░░░░░ ░░░░░ ░░░ ░░░░░░░░░░ ░░░░░ ░░░░░ ░░░░░░░░░ ░░░░░░░░░░
|
|
10
|
+
A Collectionless AI Project (https://collectionless.ai)
|
|
11
|
+
Registration/Login: https://unaiverse.io
|
|
12
|
+
Code Repositories: https://github.com/collectionlessai/
|
|
13
|
+
Main Developers: Stefano Melacci (Project Leader), Christian Di Maio, Tommaso Guidi
|
|
14
|
+
"""
|
|
15
|
+
import jwt
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class TokenVerifier:
|
|
19
|
+
def __init__(self, public_key: str | bytes):
|
|
20
|
+
"""Initializes the `TokenVerifier` with a public key.
|
|
21
|
+
|
|
22
|
+
This key is essential for securely decoding and verifying JSON Web Tokens (JWTs) issued by a corresponding
|
|
23
|
+
private key. The public key can be provided as either a string or a bytes object.
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
public_key: The public key used for decoding and verification.
|
|
27
|
+
"""
|
|
28
|
+
self.public_key = public_key
|
|
29
|
+
|
|
30
|
+
def verify_token(self, token: str | bytes,
|
|
31
|
+
node_id: str | None = None, ip: str | None = None,
|
|
32
|
+
hostname: str | None = None,
|
|
33
|
+
port: int | None = None,
|
|
34
|
+
p2p_peer: str | None = None):
|
|
35
|
+
"""Verifies a JSON Web Token (JWT) against a set of criteria.
|
|
36
|
+
|
|
37
|
+
The method first attempts to decode the token using the provided public key and the RS256 algorithm,
|
|
38
|
+
handling `DecodeError` and `ExpiredSignatureError`. It then performs optional checks to ensure that
|
|
39
|
+
the token's payload matches specific network identifiers, such as `node_id`, `ip`, `hostname`, and `port`.
|
|
40
|
+
It can also verify if a specific peer is present in the token's list of `p2p_peers`.
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
token: The JWT to verify, as a string or bytes object.
|
|
44
|
+
node_id: Optional `node_id` to check against the token's payload.
|
|
45
|
+
ip: Optional IP address to check.
|
|
46
|
+
hostname: Optional hostname to check.
|
|
47
|
+
port: Optional port number to check.
|
|
48
|
+
p2p_peer: Optional peer identifier to check within the `p2p_peers` list.
|
|
49
|
+
|
|
50
|
+
Returns:
|
|
51
|
+
A tuple containing the `node_id` and `cv_hash` from the token's payload if all checks pass. Otherwise,
|
|
52
|
+
it returns a tuple of `(None, None)`.
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
# Decoding token using the public key
|
|
56
|
+
try:
|
|
57
|
+
payload = jwt.decode(token, self.public_key, algorithms=["RS256"])
|
|
58
|
+
except jwt.DecodeError as e:
|
|
59
|
+
return None, None
|
|
60
|
+
except jwt.ExpiredSignatureError as e: # This checks expiration time (required)
|
|
61
|
+
return None, None
|
|
62
|
+
|
|
63
|
+
# Checking optional information
|
|
64
|
+
if node_id is not None and payload["node_id"] != node_id:
|
|
65
|
+
return None, None
|
|
66
|
+
if ip is not None and payload["ip"] != ip:
|
|
67
|
+
return None, None
|
|
68
|
+
if hostname is not None and payload["hostname"] != hostname:
|
|
69
|
+
return None, None
|
|
70
|
+
if port is not None and payload["port"] != port:
|
|
71
|
+
return None, None
|
|
72
|
+
if p2p_peer is not None and p2p_peer not in payload["p2p_peers"]:
|
|
73
|
+
return None, None
|
|
74
|
+
|
|
75
|
+
# All ok
|
|
76
|
+
return payload["node_id"], payload["cv_hash"]
|
|
77
|
+
|
|
78
|
+
def __str__(self):
|
|
79
|
+
return f"[{self.__class__.__name__}] public_key: {self.public_key[0:50] + b'...'}"
|