unaiverse 0.1.11__cp311-cp311-macosx_11_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of unaiverse might be problematic. Click here for more details.

Files changed (50) hide show
  1. unaiverse/__init__.py +19 -0
  2. unaiverse/agent.py +2090 -0
  3. unaiverse/agent_basics.py +1948 -0
  4. unaiverse/clock.py +221 -0
  5. unaiverse/dataprops.py +1236 -0
  6. unaiverse/hsm.py +1892 -0
  7. unaiverse/modules/__init__.py +18 -0
  8. unaiverse/modules/cnu/__init__.py +17 -0
  9. unaiverse/modules/cnu/cnus.py +536 -0
  10. unaiverse/modules/cnu/layers.py +261 -0
  11. unaiverse/modules/cnu/psi.py +60 -0
  12. unaiverse/modules/hl/__init__.py +15 -0
  13. unaiverse/modules/hl/hl_utils.py +411 -0
  14. unaiverse/modules/networks.py +1509 -0
  15. unaiverse/modules/utils.py +710 -0
  16. unaiverse/networking/__init__.py +16 -0
  17. unaiverse/networking/node/__init__.py +18 -0
  18. unaiverse/networking/node/connpool.py +1308 -0
  19. unaiverse/networking/node/node.py +2499 -0
  20. unaiverse/networking/node/profile.py +446 -0
  21. unaiverse/networking/node/tokens.py +79 -0
  22. unaiverse/networking/p2p/__init__.py +187 -0
  23. unaiverse/networking/p2p/go.mod +127 -0
  24. unaiverse/networking/p2p/go.sum +548 -0
  25. unaiverse/networking/p2p/golibp2p.py +18 -0
  26. unaiverse/networking/p2p/golibp2p.pyi +135 -0
  27. unaiverse/networking/p2p/lib.go +2662 -0
  28. unaiverse/networking/p2p/lib.go.sha256 +1 -0
  29. unaiverse/networking/p2p/lib_types.py +312 -0
  30. unaiverse/networking/p2p/message_pb2.py +50 -0
  31. unaiverse/networking/p2p/messages.py +362 -0
  32. unaiverse/networking/p2p/mylogger.py +77 -0
  33. unaiverse/networking/p2p/p2p.py +871 -0
  34. unaiverse/networking/p2p/proto-go/message.pb.go +846 -0
  35. unaiverse/networking/p2p/unailib.cpython-311-darwin.so +0 -0
  36. unaiverse/stats.py +1481 -0
  37. unaiverse/streamlib/__init__.py +15 -0
  38. unaiverse/streamlib/streamlib.py +210 -0
  39. unaiverse/streams.py +776 -0
  40. unaiverse/utils/__init__.py +16 -0
  41. unaiverse/utils/lone_wolf.json +24 -0
  42. unaiverse/utils/misc.py +310 -0
  43. unaiverse/utils/sandbox.py +293 -0
  44. unaiverse/utils/server.py +435 -0
  45. unaiverse/world.py +335 -0
  46. unaiverse-0.1.11.dist-info/METADATA +367 -0
  47. unaiverse-0.1.11.dist-info/RECORD +50 -0
  48. unaiverse-0.1.11.dist-info/WHEEL +6 -0
  49. unaiverse-0.1.11.dist-info/licenses/LICENSE +43 -0
  50. unaiverse-0.1.11.dist-info/top_level.txt +1 -0
@@ -0,0 +1,16 @@
1
+ """
2
+ █████ █████ ██████ █████ █████ █████ █████ ██████████ ███████████ █████████ ██████████
3
+ ░░███ ░░███ ░░██████ ░░███ ░░███ ░░███ ░░███ ░░███░░░░░█░░███░░░░░███ ███░░░░░███░░███░░░░░█
4
+ ░███ ░███ ░███░███ ░███ ██████ ░███ ░███ ░███ ░███ █ ░ ░███ ░███ ░███ ░░░ ░███ █ ░
5
+ ░███ ░███ ░███░░███░███ ░░░░░███ ░███ ░███ ░███ ░██████ ░██████████ ░░█████████ ░██████
6
+ ░███ ░███ ░███ ░░██████ ███████ ░███ ░░███ ███ ░███░░█ ░███░░░░░███ ░░░░░░░░███ ░███░░█
7
+ ░███ ░███ ░███ ░░█████ ███░░███ ░███ ░░░█████░ ░███ ░ █ ░███ ░███ ███ ░███ ░███ ░ █
8
+ ░░████████ █████ ░░█████░░████████ █████ ░░███ ██████████ █████ █████░░█████████ ██████████
9
+ ░░░░░░░░ ░░░░░ ░░░░░ ░░░░░░░░ ░░░░░ ░░░ ░░░░░░░░░░ ░░░░░ ░░░░░ ░░░░░░░░░ ░░░░░░░░░░
10
+ A Collectionless AI Project (https://collectionless.ai)
11
+ Registration/Login: https://unaiverse.io
12
+ Code Repositories: https://github.com/collectionlessai/
13
+ Main Developers: Stefano Melacci (Project Leader), Christian Di Maio, Tommaso Guidi
14
+ """
15
+ from . import misc
16
+ from . import sandbox
@@ -0,0 +1,24 @@
1
+ {
2
+ "initial_state": "ready",
3
+ "state": "ready",
4
+ "prev_state": null,
5
+ "limbo_state": null,
6
+ "state_actions": {
7
+ "ready": [null, null, 0, false, 0.0],
8
+ "i_asked_a_wolf": [null, null, 1, false, 0.0],
9
+ "i_did_generate": [null, null, 2, false, 0.0]
10
+ },
11
+ "transitions": {
12
+ "ready": {
13
+ "i_asked_a_wolf": [["ask_gen", {"u_hashes": ["<agent>:processor"], "samples": 1}, false, 0]],
14
+ "i_did_generate": [["do_gen", {"timeout": 90.0}, false, 1]]
15
+ },
16
+ "i_asked_a_wolf": {
17
+ "ready": [["nop", {}, true, 2]]
18
+ },
19
+ "i_did_generate": {
20
+ "ready": [["nop", {}, true, 3]]
21
+ }
22
+ },
23
+ "cur_action": null
24
+ }
@@ -0,0 +1,310 @@
1
+ """
2
+ █████ █████ ██████ █████ █████ █████ █████ ██████████ ███████████ █████████ ██████████
3
+ ░░███ ░░███ ░░██████ ░░███ ░░███ ░░███ ░░███ ░░███░░░░░█░░███░░░░░███ ███░░░░░███░░███░░░░░█
4
+ ░███ ░███ ░███░███ ░███ ██████ ░███ ░███ ░███ ░███ █ ░ ░███ ░███ ░███ ░░░ ░███ █ ░
5
+ ░███ ░███ ░███░░███░███ ░░░░░███ ░███ ░███ ░███ ░██████ ░██████████ ░░█████████ ░██████
6
+ ░███ ░███ ░███ ░░██████ ███████ ░███ ░░███ ███ ░███░░█ ░███░░░░░███ ░░░░░░░░███ ░███░░█
7
+ ░███ ░███ ░███ ░░█████ ███░░███ ░███ ░░░█████░ ░███ ░ █ ░███ ░███ ███ ░███ ░███ ░ █
8
+ ░░████████ █████ ░░█████░░████████ █████ ░░███ ██████████ █████ █████░░█████████ ██████████
9
+ ░░░░░░░░ ░░░░░ ░░░░░ ░░░░░░░░ ░░░░░ ░░░ ░░░░░░░░░░ ░░░░░ ░░░░░ ░░░░░░░░░ ░░░░░░░░░░
10
+ A Collectionless AI Project (https://collectionless.ai)
11
+ Registration/Login: https://unaiverse.io
12
+ Code Repositories: https://github.com/collectionlessai/
13
+ Main Developers: Stefano Melacci (Project Leader), Christian Di Maio, Tommaso Guidi
14
+ """
15
+ import os
16
+ import ast
17
+ import sys
18
+ import time
19
+ import json
20
+ import math
21
+ import shutil
22
+ import threading
23
+ from tqdm import tqdm
24
+ from pathlib import Path
25
+ from datetime import datetime
26
+
27
+
28
+ class GenException(Exception):
29
+ """Base exception for this application (a simple wrapper around a generic Exception)."""
30
+ pass
31
+
32
+
33
+ def save_node_addresses_to_file(node, dir_path: str, public: bool,
34
+ filename: str = "addresses.txt", append: bool = False):
35
+ address_file = os.path.join(dir_path, filename)
36
+ with open(address_file, "w" if not append else "a") as file:
37
+ file.write(node.hosted.get_name() + ";" +
38
+ str(node.get_public_addresses() if public else node.get_world_addresses()) + "\n")
39
+ file.flush()
40
+
41
+
42
+ def get_node_addresses_from_file(dir_path: str, filename: str = "addresses.txt") -> dict[str, list[str]]:
43
+ ret = {}
44
+ with open(os.path.join(dir_path, filename)) as file:
45
+ lines = file.readlines()
46
+
47
+ # Old file format
48
+ if lines[0].strip() == "/":
49
+ addresses = []
50
+ for line in lines:
51
+ _line = line.strip()
52
+ if len(_line) > 0:
53
+ addresses.append(_line)
54
+ ret["unk"] = addresses
55
+ return ret
56
+
57
+ # New file format
58
+ for line in lines:
59
+ if line.strip().startswith("***"): # Header marker
60
+ continue
61
+ comma_separated_values = [v.strip() for v in line.split(';')]
62
+ node_name, addresses_str = comma_separated_values
63
+ ret[node_name] = ast.literal_eval(addresses_str) # Name appearing multiple times? the last entry is kept
64
+
65
+ return ret
66
+
67
+
68
+ class Silent:
69
+ def __init__(self, ignore: bool = False):
70
+ self.ignore = ignore
71
+
72
+ def __enter__(self):
73
+ if not self.ignore:
74
+ self._original_stdout = sys.stdout
75
+ sys.stdout = open(os.devnull, "w")
76
+
77
+ def __exit__(self, exc_type, exc_val, exc_tb):
78
+ if not self.ignore:
79
+ sys.stdout.close()
80
+ sys.stdout = self._original_stdout
81
+
82
+
83
+ # The countdown function
84
+ def countdown_start(seconds: int, msg: str):
85
+ class TqdmPrintRedirector:
86
+ def __init__(self, tqdm_instance):
87
+ self.tqdm_instance = tqdm_instance
88
+ self.original_stdout = sys.__stdout__
89
+
90
+ def write(self, s):
91
+ if s.strip(): # Ignore empty lines (needed for the way tqdm works)
92
+ self.tqdm_instance.write(s, file=self.original_stdout)
93
+
94
+ def flush(self):
95
+ pass # Tqdm handles flushing
96
+
97
+ def drawing(secs: int, message: str):
98
+ with tqdm(total=secs, desc=message, file=sys.__stdout__) as t:
99
+ sys.stdout = TqdmPrintRedirector(t) # Redirect prints to tqdm.write
100
+ for i in range(secs):
101
+ time.sleep(1)
102
+ t.update(1.)
103
+ sys.stdout = sys.__stdout__ # Restore original stdout
104
+
105
+ sys.stdout.flush()
106
+ handle = threading.Thread(target=drawing, args=(seconds, msg))
107
+ handle.start()
108
+ return handle
109
+
110
+
111
+ def countdown_wait(handle):
112
+ handle.join()
113
+
114
+
115
+ def check_json_start(file: str, msg: str, delete_existing: bool = False):
116
+ from rich.json import JSON
117
+ from rich.console import Console
118
+ cons = Console(file=sys.__stdout__)
119
+
120
+ if delete_existing:
121
+ if os.path.exists(file):
122
+ os.remove(file)
123
+
124
+ def checking(file_path: str, console: Console):
125
+ print(msg)
126
+ prev_dict = {}
127
+ while True:
128
+ if os.path.exists(file_path):
129
+ try:
130
+ with open(file_path, "r") as f:
131
+ json_dict = json.load(f)
132
+ if json_dict != prev_dict:
133
+ now = datetime.now()
134
+ console.print("─" * 80)
135
+ console.print("Printing updated file "
136
+ "(print time: " + now.strftime("%Y-%m-%d %H:%M:%S") + ")")
137
+ console.print("─" * 80)
138
+ console.print(JSON.from_data(json_dict))
139
+ prev_dict = json_dict
140
+ except KeyboardInterrupt:
141
+ break
142
+ except Exception:
143
+ pass
144
+ time.sleep(1)
145
+
146
+ handle = threading.Thread(target=checking, args=(file, cons), daemon=True)
147
+ handle.start()
148
+ return handle
149
+
150
+
151
+ def check_json_start_wait(handle):
152
+ handle.join()
153
+
154
+
155
+ def show_images_grid(image_paths, max_cols=3):
156
+ import matplotlib.pyplot as plt
157
+ import matplotlib.image as mpimg
158
+
159
+ n = len(image_paths)
160
+ cols = min(max_cols, n)
161
+ rows = math.ceil(n / cols)
162
+
163
+ # Load images
164
+ images = [mpimg.imread(p) for p in image_paths]
165
+
166
+ # Determine figure size based on image sizes
167
+ widths, heights = zip(*[(img.shape[1], img.shape[0]) for img in images])
168
+
169
+ # Use average width/height for scaling
170
+ avg_width = sum(widths) / len(widths)
171
+ avg_height = sum(heights) / len(heights)
172
+
173
+ fig_width = cols * avg_width / 100
174
+ fig_height = rows * avg_height / 100
175
+
176
+ fig, axes = plt.subplots(rows, cols, figsize=(fig_width, fig_height))
177
+ axes = axes.flatten() if n > 1 else [axes]
178
+
179
+ fig.canvas.manager.set_window_title("Image Grid")
180
+
181
+ # Hide unused axes
182
+ for ax in axes[n:]:
183
+ ax.axis('off')
184
+
185
+ for idx, (ax, img) in enumerate(zip(axes, images)):
186
+ ax.imshow(img)
187
+ ax.axis('off')
188
+ ax.set_title(str(idx), fontsize=12, fontweight='bold')
189
+
190
+ # Display images
191
+ for ax, img in zip(axes, images):
192
+ ax.imshow(img)
193
+ ax.axis('off')
194
+
195
+ plt.subplots_adjust(wspace=0, hspace=0)
196
+
197
+ # Turn on interactive mode
198
+ plt.ion()
199
+ plt.show()
200
+
201
+ fig.canvas.draw()
202
+ plt.pause(0.1)
203
+
204
+
205
+ class FileTracker:
206
+ def __init__(self, folder, ext=".json", prefix=None, skip=None):
207
+ self.folder = Path(folder)
208
+ self.ext = ext.lower()
209
+ self.skip = skip
210
+ self.prefix = prefix
211
+ self.last_state = self.__scan_files()
212
+
213
+ def __scan_files(self):
214
+ state = {}
215
+ for file in self.folder.iterdir():
216
+ if ((file.is_file() and file.suffix.lower() == self.ext and
217
+ (self.skip is None or file.name != self.skip)) and
218
+ (self.prefix is None or file.name.startswith(self.prefix))):
219
+ state[file.name] = os.path.getmtime(file)
220
+ return state
221
+
222
+ def something_changed(self):
223
+ new_state = self.__scan_files()
224
+ created = [f for f in new_state if f not in self.last_state]
225
+ modified = [f for f in new_state
226
+ if f in self.last_state and new_state[f] != self.last_state[f]]
227
+ self.last_state = new_state
228
+ return created or modified
229
+
230
+
231
+ def prepare_key_dir(app_name):
232
+ app_name = app_name.lower()
233
+ if os.name == "nt": # Windows
234
+ if os.getenv("APPDATA") is not None:
235
+ key_dir = os.path.join(os.getenv("APPDATA"), "Local", app_name) # Expected
236
+ else:
237
+ key_dir = os.path.join(str(Path.home()), f".{app_name}") # Fallback
238
+ else: # Linux/macOS
239
+ key_dir = os.path.join(str(Path.home()), f".{app_name}")
240
+ os.makedirs(key_dir, exist_ok=True)
241
+ return key_dir
242
+
243
+
244
+ def get_key_considering_multiple_sources(key_variable: str | None) -> str:
245
+
246
+ # Creating folder (if needed) to store the key
247
+ try:
248
+ key_dir = prepare_key_dir(app_name="UNaIVERSE")
249
+ except Exception:
250
+ raise GenException("Cannot create folder to store the key file")
251
+ key_file = os.path.join(key_dir, "key")
252
+
253
+ # Getting from an existing file
254
+ key_from_file = None
255
+ if os.path.exists(key_file):
256
+ with open(key_file, "r") as f:
257
+ key_from_file = f.read().strip()
258
+
259
+ # Getting from env variable
260
+ key_from_env = os.getenv("NODE_KEY", None)
261
+
262
+ # Getting from code-specified option
263
+ if key_variable is not None and len(key_variable.strip()) > 0:
264
+ key_from_var = key_variable.strip()
265
+ if key_from_var.startswith("<") and key_from_var.endswith(">"): # Something like <UNAIVERSE_KEY_GOES_HERE>
266
+ key_from_var = None
267
+ else:
268
+ key_from_var = None
269
+
270
+ # Finding valid sources and checking if multiple keys were provided
271
+ _keys = [key_from_var, key_from_env, key_from_file]
272
+ _source_names = ["your code", "env variable 'NODE_KEY'", f"cache file {key_file}"]
273
+ source_names = []
274
+ mismatching = False
275
+ multiple_source = False
276
+ first_key = None
277
+ first_source = None
278
+ _prev_key = None
279
+ for i, (_key, _source_name) in enumerate(zip(_keys, _source_names)):
280
+ if _key is not None:
281
+ source_names.append(_source_name)
282
+ if _prev_key is not None:
283
+ if _key != _prev_key:
284
+ mismatching = True
285
+ multiple_source = True
286
+ else:
287
+ _prev_key = _key
288
+ first_key = _key
289
+ first_source = _source_name
290
+
291
+ if len(source_names) > 0:
292
+ msg = ""
293
+ if multiple_source and not mismatching:
294
+ msg = "UNaIVERSE key (the exact same key) present in multiple locations: " + ", ".join(source_names)
295
+ if multiple_source and mismatching:
296
+ msg = "UNaIVERSE keys (different keys) present in multiple locations: " + ", ".join(source_names)
297
+ msg += "\nLoaded the one stored in " + first_source
298
+ if not multiple_source:
299
+ msg = f"UNaIVERSE key loaded from {first_source}"
300
+ print(msg)
301
+ return first_key
302
+ else:
303
+
304
+ # If no key present, ask user and save to file
305
+ print("UNaIVERSE key not present in " + ", ".join(_source_names))
306
+ print("If you did not already do it, go to https://unaiverse.io, login, and generate a key")
307
+ key = input("Enter your UNaIVERSE key, that will be saved to the cache file: ").strip()
308
+ with open(key_file, "w") as f:
309
+ f.write(key)
310
+ return key
@@ -0,0 +1,293 @@
1
+ """
2
+ █████ █████ ██████ █████ █████ █████ █████ ██████████ ███████████ █████████ ██████████
3
+ ░░███ ░░███ ░░██████ ░░███ ░░███ ░░███ ░░███ ░░███░░░░░█░░███░░░░░███ ███░░░░░███░░███░░░░░█
4
+ ░███ ░███ ░███░███ ░███ ██████ ░███ ░███ ░███ ░███ █ ░ ░███ ░███ ░███ ░░░ ░███ █ ░
5
+ ░███ ░███ ░███░░███░███ ░░░░░███ ░███ ░███ ░███ ░██████ ░██████████ ░░█████████ ░██████
6
+ ░███ ░███ ░███ ░░██████ ███████ ░███ ░░███ ███ ░███░░█ ░███░░░░░███ ░░░░░░░░███ ░███░░█
7
+ ░███ ░███ ░███ ░░█████ ███░░███ ░███ ░░░█████░ ░███ ░ █ ░███ ░███ ███ ░███ ░███ ░ █
8
+ ░░████████ █████ ░░█████░░████████ █████ ░░███ ██████████ █████ █████░░█████████ ██████████
9
+ ░░░░░░░░ ░░░░░ ░░░░░ ░░░░░░░░ ░░░░░ ░░░ ░░░░░░░░░░ ░░░░░ ░░░░░ ░░░░░░░░░ ░░░░░░░░░░
10
+ A Collectionless AI Project (https://collectionless.ai)
11
+ Registration/Login: https://unaiverse.io
12
+ Code Repositories: https://github.com/collectionlessai/
13
+ Main Developers: Stefano Melacci (Project Leader), Christian Di Maio, Tommaso Guidi
14
+ """
15
+ import os
16
+ import sys
17
+ import uuid
18
+ import argparse
19
+ import subprocess
20
+ from pathlib import Path
21
+ from unaiverse.networking.p2p import P2P
22
+
23
+ # Configuration
24
+ DOCKER_IMAGE_NAME = "unaiverse-sandbox"
25
+ CONTAINER_NAME_BASE = "unaiverse-sandbox-container"
26
+ CONTAINER_NAME = f"{CONTAINER_NAME_BASE}-{uuid.uuid4().hex[:8]}" # Append a short unique ID
27
+ DOCKERFILE_CONTENT = """
28
+
29
+ # Debian image, automatically guessed architecture
30
+ FROM python:3.12-slim-bookworm
31
+
32
+ # Installing Go compiler
33
+ RUN apt-get update && apt-get install -y --no-install-recommends build-essential curl git
34
+ RUN rm -rf /var/lib/apt/lists/*
35
+ RUN ARCH=$(dpkg --print-architecture) && curl -LO https://go.dev/dl/go1.24.5.linux-${ARCH}.tar.gz
36
+ RUN ARCH=$(dpkg --print-architecture) && tar -C /usr/local -xzf go1.24.5.linux-${ARCH}.tar.gz
37
+ RUN ARCH=$(dpkg --print-architecture) && rm go1.24.5.linux-${ARCH}.tar.gz
38
+
39
+ # Set Go environment variables
40
+ ENV PATH="/usr/local/go/bin:${PATH}"
41
+ ENV GOPATH="/go"
42
+ RUN mkdir -p /go/bin /go/src /go/pkg
43
+
44
+ # Setting the working directory inside the container
45
+ WORKDIR /unaiverse
46
+
47
+ # Dependencies
48
+ RUN <create_requirements.txt>
49
+ RUN pip install --no-cache-dir -r requirements.txt --break-system-packages
50
+ """
51
+
52
+
53
+ def sandbox(file_to_run: str,
54
+ read_only_paths: tuple[str] | list[str] | None = None,
55
+ writable_paths: tuple[str] | list[str] | None = None) -> None:
56
+
57
+ # Path of this file
58
+ absolute_path_of_this_file = os.path.abspath(__file__)
59
+
60
+ # Folders composing the path (and file name at the end)
61
+ path_components = list(Path(absolute_path_of_this_file).parts)
62
+
63
+ # Ensuring the folder/file structure was not manipulated
64
+ assert path_components[-1] == 'sandbox.py', "Major security issue, stopping."
65
+ assert path_components[-2] == 'utils', "Major security issue, stopping."
66
+ assert path_components[-3] == 'unaiverse', "Major security issue, stopping."
67
+
68
+ # Main folder of UNaIVERSE
69
+ abspath_of_unaiverse_code = str(Path(*path_components[0:-3]))
70
+
71
+ # Clean up any remnants from previous runs first (safety)
72
+ cleanup_docker_artifacts(where=abspath_of_unaiverse_code)
73
+
74
+ # Requirements
75
+ echoed_contents_of_requirements = 'printf "'
76
+ with open(os.path.join(abspath_of_unaiverse_code, "requirements.txt"), 'r') as req_file:
77
+ req_lines = req_file.readlines()
78
+ for i, req_line in enumerate(req_lines):
79
+ if i != (len(req_lines) - 1) and len(req_line.strip()) > 0:
80
+ echoed_contents_of_requirements += req_line.strip() + "\\n"
81
+ else:
82
+ echoed_contents_of_requirements += req_line.strip() + "\\n\" > requirements.txt"
83
+
84
+ # Create Dockerfile
85
+ print("Creating Dockerfile...")
86
+ with open(os.path.join(abspath_of_unaiverse_code, "Dockerfile"), "w") as f:
87
+ f.write(DOCKERFILE_CONTENT.replace('<create_requirements.txt>', echoed_contents_of_requirements))
88
+
89
+ # Building Docker image
90
+ if not build_docker_image(where=abspath_of_unaiverse_code):
91
+ print("Exiting due to Docker image build failure")
92
+ cleanup_docker_artifacts(where=abspath_of_unaiverse_code) # Try to clean up what was created (if any)
93
+ sys.exit(1)
94
+
95
+ # Read only folders from the host machine
96
+ read_only_mount_paths = ([abspath_of_unaiverse_code] +
97
+ (list(read_only_paths) if read_only_paths is not None else []))
98
+
99
+ # Writable folders in host machine
100
+ writable_mount_paths = ([os.path.join(abspath_of_unaiverse_code, 'runners'),
101
+ os.path.join(abspath_of_unaiverse_code, 'unaiverse', 'library'),
102
+ os.path.join(abspath_of_unaiverse_code, 'unaiverse', 'networking', 'p2p')] +
103
+ (list(writable_paths) if writable_paths is not None else []))
104
+
105
+ # Running
106
+ if not run_in_docker(file_to_run=os.path.abspath(file_to_run),
107
+ read_only_host_paths=read_only_mount_paths,
108
+ writable_host_paths=writable_mount_paths):
109
+ print("Exiting due to Docker container run failure")
110
+ sys.exit(1)
111
+
112
+ # Final cleanup
113
+ cleanup_docker_artifacts(where=abspath_of_unaiverse_code)
114
+
115
+
116
+ def build_docker_image(where: str):
117
+ """Builds the Docker image."""
118
+ print(f"Building Docker image '{DOCKER_IMAGE_NAME}'...")
119
+
120
+ try:
121
+
122
+ # The '.' at the end means build from the current directory
123
+ subprocess.run(["docker", "build", "-t", DOCKER_IMAGE_NAME, where], check=True)
124
+ print(f"Docker image '{DOCKER_IMAGE_NAME}' built successfully.")
125
+ return True
126
+ except subprocess.CalledProcessError as e:
127
+ print(f"Error building Docker image: {e}")
128
+ return False
129
+
130
+
131
+ def cleanup_docker_artifacts(where: str):
132
+ """Cleans up the generated files and Docker image."""
133
+ print("Cleaning...")
134
+
135
+ # Stop and remove container if it's still running (e.g., if previous run failed)
136
+ try:
137
+ print(f"Attempting to stop and remove container '{CONTAINER_NAME}' (if running)...")
138
+ subprocess.run(["docker", "stop", CONTAINER_NAME],
139
+ check=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
140
+ subprocess.run(["docker", "rm", CONTAINER_NAME],
141
+ check=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
142
+ except Exception as e:
143
+ print(f"Error during preliminary container cleanup: {e}")
144
+
145
+ # Remove the Docker image
146
+ try:
147
+ print(f"Removing Docker image '{DOCKER_IMAGE_NAME}'...")
148
+ subprocess.run(["docker", "rmi", DOCKER_IMAGE_NAME], check=True)
149
+ print("Docker image removed.")
150
+ except subprocess.CalledProcessError as e:
151
+ print(f"Error removing Docker image '{DOCKER_IMAGE_NAME}': {e}")
152
+
153
+ # Remove the generated Dockerfile
154
+ if os.path.exists(os.path.join(where, "Dockerfile")):
155
+ os.remove(os.path.join(where, "Dockerfile"))
156
+ print("Removed Dockerfile.")
157
+
158
+
159
+ def run_in_docker(file_to_run: str, read_only_host_paths: list[str] = None, writable_host_paths: list[str] = None):
160
+ """Runs the code in a Docker container with optional mounts."""
161
+ print(f"\nRunning code in Docker container '{CONTAINER_NAME}'...")
162
+
163
+ # Building command (it will continue below...)
164
+ command = ["docker", "run",
165
+ "--rm", # Automatically remove the container when it exits
166
+ "-e", "PYTHONUNBUFFERED=1", # Ensure Python output is unbuffered
167
+ "-e", "NODE_STARTING_PORT",
168
+ "--name", CONTAINER_NAME]
169
+
170
+ if sys.platform.startswith('linux'):
171
+
172
+ # Linux
173
+ command.extend(["--net", "host"]), # Expose the host network (in macOS and Windows it is still a virtual host)
174
+ else:
175
+
176
+ # Not-linux: check ports (adding -p port:port)
177
+ port_int = int(os.getenv("NODE_STARTING_PORT", "0"))
178
+ if port_int > 0:
179
+ command.extend(["-p", str(port_int + 0) + ":" + str(port_int + 0)])
180
+ command.extend(["-p", str(port_int + 1) + ":" + str(port_int + 1) + "/udp"])
181
+ command.extend(["-p", str(port_int + 2) + ":" + str(port_int + 2)])
182
+ command.extend(["-p", str(port_int + 3) + ":" + str(port_int + 3) + "/udp"])
183
+
184
+ # Add read-only mount if path is provided
185
+ if read_only_host_paths is not None and len(read_only_host_paths) > 0:
186
+ for path in read_only_host_paths:
187
+
188
+ # Ensure the host path exists and is a directory
189
+ if not os.path.isdir(path):
190
+ print(
191
+ f"Error: Read-only host path '{path}' does not exist or is not a directory. Cannot mount.")
192
+ return False
193
+ else:
194
+
195
+ # Augmenting command
196
+ path = os.path.abspath(path)
197
+ command.extend(["-v", f"{path}:{path}:ro"])
198
+ print(f"Mounted host '{path}' as read-only to container")
199
+
200
+ # Add writable mount if path is provided
201
+ if writable_host_paths is not None and len(writable_host_paths) > 0:
202
+ for path in writable_host_paths:
203
+
204
+ # Ensure the host path exists and is a directory
205
+ if not os.path.isdir(path):
206
+ print(
207
+ f"Error: Writable host path '{path}' does not exist or is not a directory. Cannot mount.")
208
+ return False
209
+ else:
210
+
211
+ # Augmenting command
212
+ path = os.path.abspath(path)
213
+ command.extend(["-v", f"{path}:{path}"])
214
+ print(f"Mounted host '{path}' as writable to container")
215
+
216
+ # Completing command
217
+ command.append(DOCKER_IMAGE_NAME)
218
+
219
+ try:
220
+
221
+ # Running the prepared command... (using Popen to stream output in real-time)
222
+ try:
223
+ command.extend(["python3", file_to_run])
224
+ process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)
225
+ for line in iter(process.stdout.readline, ''):
226
+ sys.stdout.write(line)
227
+ process.wait() # Wait for the process to finish
228
+ if process.returncode != 0:
229
+ print(f"Container exited with non-zero status code: {process.returncode}")
230
+ except KeyboardInterrupt:
231
+ pass
232
+
233
+ print(f"\nContainer '{CONTAINER_NAME}' finished execution.")
234
+ return True
235
+ except FileNotFoundError:
236
+ print("Error: Docker command not found. Is Docker installed and in your PATH?")
237
+ print("Please ensure Docker is installed and running.")
238
+ return False
239
+ except subprocess.CalledProcessError as e:
240
+ print(f"Error running Docker container: {e}")
241
+ return False
242
+
243
+
244
+ # Entry point
245
+ if __name__ == "__main__":
246
+ parser = argparse.ArgumentParser(
247
+ description="Run a Python script adding customizable read-only and writable paths.",
248
+ formatter_class=argparse.RawTextHelpFormatter,
249
+ epilog="""
250
+ Examples:
251
+ python utils/sandbox.py my_script.py -r /home/user/data:/opt/app/data -p 1234
252
+ python utils/sandbox.py another_script.py -w /tmp/output:/mnt/results
253
+ python utils/sandbox.py script_with_both.py -r /input:/app/in -w /output:/app/out -p 8082
254
+ """)
255
+ parser.add_argument(help="Path to the Python script to execute.", dest="script_to_run",
256
+ type=str)
257
+ parser.add_argument("-p", "--port", dest="port",
258
+ help="The starting port of the node(s) (each node uses 4 ports, consecutive port numbers)",
259
+ type=str, required=True)
260
+ parser.add_argument("-r", "--read-only", dest="read_only_folders",
261
+ help="One or multiple paths to mount as read-only. "
262
+ "Use a colon to separate multiple paths (e.g., /path/a:/path/b).",
263
+ type=str, default=None)
264
+ parser.add_argument("-w", "--writable", dest="writable_folders",
265
+ help="One or multiple paths to mount as writable. "
266
+ "Use a colon to separate multiple paths (e.g., /path/c:/path/d).",
267
+ type=str, default=None)
268
+ args = parser.parse_args()
269
+
270
+ if not args.script_to_run.endswith(".py"):
271
+ parser.error(f"The script '{args.script_to_run}' must be a Python file (e.g., ending with .py)")
272
+ script_to_run = args.script_to_run
273
+ if not int(args.port) > 0:
274
+ parser.error(f"Invalid port")
275
+
276
+ read_only_folders = None
277
+ if args.read_only_folders:
278
+ read_only_folders = args.read_only_folders.split(':')
279
+ writable_folders = None
280
+ if args.writable_folders:
281
+ writable_folders = args.writable_folders.split(':')
282
+
283
+ print("\n Running in sandbox...")
284
+ print(f"- Script to run: {script_to_run}")
285
+ print(f"- Starting port (+0, +1, +2, +3): {args.port}")
286
+ print(f"- Read only paths to mount (the UNaIVERSE code folder will be automatically mounted): {read_only_folders}")
287
+ print(f"- Writable paths to mount: {writable_folders}\n")
288
+
289
+ # Marking
290
+ os.environ["NODE_STARTING_PORT"] = args.port
291
+
292
+ # Running the sandbox and the script
293
+ sandbox(script_to_run, read_only_paths=read_only_folders, writable_paths=writable_folders)