unaiverse 0.1.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- unaiverse/__init__.py +19 -0
- unaiverse/agent.py +2226 -0
- unaiverse/agent_basics.py +2389 -0
- unaiverse/clock.py +234 -0
- unaiverse/dataprops.py +1282 -0
- unaiverse/hsm.py +2471 -0
- unaiverse/modules/__init__.py +18 -0
- unaiverse/modules/cnu/__init__.py +17 -0
- unaiverse/modules/cnu/cnus.py +536 -0
- unaiverse/modules/cnu/layers.py +261 -0
- unaiverse/modules/cnu/psi.py +60 -0
- unaiverse/modules/hl/__init__.py +15 -0
- unaiverse/modules/hl/hl_utils.py +411 -0
- unaiverse/modules/networks.py +1509 -0
- unaiverse/modules/utils.py +748 -0
- unaiverse/networking/__init__.py +16 -0
- unaiverse/networking/node/__init__.py +18 -0
- unaiverse/networking/node/connpool.py +1332 -0
- unaiverse/networking/node/node.py +2752 -0
- unaiverse/networking/node/profile.py +446 -0
- unaiverse/networking/node/tokens.py +79 -0
- unaiverse/networking/p2p/__init__.py +188 -0
- unaiverse/networking/p2p/go.mod +127 -0
- unaiverse/networking/p2p/go.sum +548 -0
- unaiverse/networking/p2p/golibp2p.py +18 -0
- unaiverse/networking/p2p/golibp2p.pyi +136 -0
- unaiverse/networking/p2p/lib.go +2765 -0
- unaiverse/networking/p2p/lib_types.py +311 -0
- unaiverse/networking/p2p/message_pb2.py +50 -0
- unaiverse/networking/p2p/messages.py +360 -0
- unaiverse/networking/p2p/mylogger.py +78 -0
- unaiverse/networking/p2p/p2p.py +900 -0
- unaiverse/networking/p2p/proto-go/message.pb.go +846 -0
- unaiverse/stats.py +1506 -0
- unaiverse/streamlib/__init__.py +15 -0
- unaiverse/streamlib/streamlib.py +210 -0
- unaiverse/streams.py +804 -0
- unaiverse/utils/__init__.py +16 -0
- unaiverse/utils/lone_wolf.json +28 -0
- unaiverse/utils/misc.py +441 -0
- unaiverse/utils/sandbox.py +292 -0
- unaiverse/world.py +384 -0
- unaiverse-0.1.12.dist-info/METADATA +366 -0
- unaiverse-0.1.12.dist-info/RECORD +47 -0
- unaiverse-0.1.12.dist-info/WHEEL +5 -0
- unaiverse-0.1.12.dist-info/licenses/LICENSE +177 -0
- unaiverse-0.1.12.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
"""
|
|
2
|
+
█████ █████ ██████ █████ █████ █████ █████ ██████████ ███████████ █████████ ██████████
|
|
3
|
+
░░███ ░░███ ░░██████ ░░███ ░░███ ░░███ ░░███ ░░███░░░░░█░░███░░░░░███ ███░░░░░███░░███░░░░░█
|
|
4
|
+
░███ ░███ ░███░███ ░███ ██████ ░███ ░███ ░███ ░███ █ ░ ░███ ░███ ░███ ░░░ ░███ █ ░
|
|
5
|
+
░███ ░███ ░███░░███░███ ░░░░░███ ░███ ░███ ░███ ░██████ ░██████████ ░░█████████ ░██████
|
|
6
|
+
░███ ░███ ░███ ░░██████ ███████ ░███ ░░███ ███ ░███░░█ ░███░░░░░███ ░░░░░░░░███ ░███░░█
|
|
7
|
+
░███ ░███ ░███ ░░█████ ███░░███ ░███ ░░░█████░ ░███ ░ █ ░███ ░███ ███ ░███ ░███ ░ █
|
|
8
|
+
░░████████ █████ ░░█████░░████████ █████ ░░███ ██████████ █████ █████░░█████████ ██████████
|
|
9
|
+
░░░░░░░░ ░░░░░ ░░░░░ ░░░░░░░░ ░░░░░ ░░░ ░░░░░░░░░░ ░░░░░ ░░░░░ ░░░░░░░░░ ░░░░░░░░░░
|
|
10
|
+
A Collectionless AI Project (https://collectionless.ai)
|
|
11
|
+
Registration/Login: https://unaiverse.io
|
|
12
|
+
Code Repositories: https://github.com/collectionlessai/
|
|
13
|
+
Main Developers: Stefano Melacci (Project Leader), Christian Di Maio, Tommaso Guidi
|
|
14
|
+
"""
|
|
15
|
+
from . import misc
|
|
16
|
+
from . import sandbox
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
{
|
|
2
|
+
"initial_state": "ready",
|
|
3
|
+
"state": "ready",
|
|
4
|
+
"prev_state": null,
|
|
5
|
+
"limbo_state": null,
|
|
6
|
+
"state_actions": {
|
|
7
|
+
"ready": [null, null, 0, true, 0.0],
|
|
8
|
+
"generated": [null, null, 1, false, 0.0],
|
|
9
|
+
"self_generated": [null, null, 2, false, 0.0],
|
|
10
|
+
"asked": [null, null, 3, false, 0.0]
|
|
11
|
+
},
|
|
12
|
+
"transitions": {
|
|
13
|
+
"ready": {
|
|
14
|
+
"generated": [["do_gen", {}, false, 0]],
|
|
15
|
+
"self_generated": [["do_gen", {"u_hashes": ["<agent>:processor_in"], "samples": 1}, true, 1]]
|
|
16
|
+
},
|
|
17
|
+
"self_generated": {
|
|
18
|
+
"asked": [["ask_gen", {"agent": "<partner>", "u_hashes": ["<agent>:processor"], "samples": 1, "ignore_uuid": true}, true, 2]]
|
|
19
|
+
},
|
|
20
|
+
"asked": {
|
|
21
|
+
"ready": [["nop", {}, true, 3]]
|
|
22
|
+
},
|
|
23
|
+
"generated": {
|
|
24
|
+
"ready": [["nop", {}, true, 4]]
|
|
25
|
+
}
|
|
26
|
+
},
|
|
27
|
+
"cur_action": null
|
|
28
|
+
}
|
unaiverse/utils/misc.py
ADDED
|
@@ -0,0 +1,441 @@
|
|
|
1
|
+
"""
|
|
2
|
+
█████ █████ ██████ █████ █████ █████ █████ ██████████ ███████████ █████████ ██████████
|
|
3
|
+
░░███ ░░███ ░░██████ ░░███ ░░███ ░░███ ░░███ ░░███░░░░░█░░███░░░░░███ ███░░░░░███░░███░░░░░█
|
|
4
|
+
░███ ░███ ░███░███ ░███ ██████ ░███ ░███ ░███ ░███ █ ░ ░███ ░███ ░███ ░░░ ░███ █ ░
|
|
5
|
+
░███ ░███ ░███░░███░███ ░░░░░███ ░███ ░███ ░███ ░██████ ░██████████ ░░█████████ ░██████
|
|
6
|
+
░███ ░███ ░███ ░░██████ ███████ ░███ ░░███ ███ ░███░░█ ░███░░░░░███ ░░░░░░░░███ ░███░░█
|
|
7
|
+
░███ ░███ ░███ ░░█████ ███░░███ ░███ ░░░█████░ ░███ ░ █ ░███ ░███ ███ ░███ ░███ ░ █
|
|
8
|
+
░░████████ █████ ░░█████░░████████ █████ ░░███ ██████████ █████ █████░░█████████ ██████████
|
|
9
|
+
░░░░░░░░ ░░░░░ ░░░░░ ░░░░░░░░ ░░░░░ ░░░ ░░░░░░░░░░ ░░░░░ ░░░░░ ░░░░░░░░░ ░░░░░░░░░░
|
|
10
|
+
A Collectionless AI Project (https://collectionless.ai)
|
|
11
|
+
Registration/Login: https://unaiverse.io
|
|
12
|
+
Code Repositories: https://github.com/collectionlessai/
|
|
13
|
+
Main Developers: Stefano Melacci (Project Leader), Christian Di Maio, Tommaso Guidi
|
|
14
|
+
"""
|
|
15
|
+
import os
|
|
16
|
+
import ast
|
|
17
|
+
import sys
|
|
18
|
+
import time
|
|
19
|
+
import json
|
|
20
|
+
import math
|
|
21
|
+
import random
|
|
22
|
+
import threading
|
|
23
|
+
from tqdm import tqdm
|
|
24
|
+
from pathlib import Path
|
|
25
|
+
from datetime import datetime
|
|
26
|
+
from unaiverse.modules.utils import HumanModule
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class GenException(Exception):
|
|
30
|
+
"""Base exception for this application (a simple wrapper around a generic Exception)."""
|
|
31
|
+
pass
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def save_node_addresses_to_file(node, dir_path: str, public: bool,
|
|
35
|
+
filename: str = "addresses.txt", append: bool = False):
|
|
36
|
+
address_file = os.path.join(dir_path, filename)
|
|
37
|
+
with open(address_file, "w" if not append else "a") as file:
|
|
38
|
+
file.write(node.hosted.get_name() + ";" +
|
|
39
|
+
str(node.get_public_addresses() if public else node.get_world_addresses()) + "\n")
|
|
40
|
+
file.flush()
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def get_node_addresses_from_file(dir_path: str, filename: str = "addresses.txt") -> dict[str, list[str]]:
|
|
44
|
+
ret = {}
|
|
45
|
+
with open(os.path.join(dir_path, filename)) as file:
|
|
46
|
+
lines = file.readlines()
|
|
47
|
+
|
|
48
|
+
# Old file format
|
|
49
|
+
if lines[0].strip() == "/":
|
|
50
|
+
addresses = []
|
|
51
|
+
for line in lines:
|
|
52
|
+
_line = line.strip()
|
|
53
|
+
if len(_line) > 0:
|
|
54
|
+
addresses.append(_line)
|
|
55
|
+
ret["unk"] = addresses
|
|
56
|
+
return ret
|
|
57
|
+
|
|
58
|
+
# New file format
|
|
59
|
+
for line in lines:
|
|
60
|
+
if line.strip().startswith("***"): # Header marker
|
|
61
|
+
continue
|
|
62
|
+
comma_separated_values = [v.strip() for v in line.split(';')]
|
|
63
|
+
node_name, addresses_str = comma_separated_values
|
|
64
|
+
ret[node_name] = ast.literal_eval(addresses_str) # Name appearing multiple times? the last entry is kept
|
|
65
|
+
|
|
66
|
+
return ret
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class Silent:
|
|
70
|
+
def __init__(self, ignore: bool = False):
|
|
71
|
+
self.ignore = ignore
|
|
72
|
+
|
|
73
|
+
def __enter__(self):
|
|
74
|
+
if not self.ignore:
|
|
75
|
+
self._original_stdout = sys.stdout
|
|
76
|
+
sys.stdout = open(os.devnull, "w")
|
|
77
|
+
|
|
78
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
79
|
+
if not self.ignore:
|
|
80
|
+
sys.stdout.close()
|
|
81
|
+
sys.stdout = self._original_stdout
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
# The countdown function
|
|
85
|
+
def countdown_start(seconds: int, msg: str):
|
|
86
|
+
class TqdmPrintRedirector:
|
|
87
|
+
def __init__(self, tqdm_instance):
|
|
88
|
+
self.tqdm_instance = tqdm_instance
|
|
89
|
+
self.original_stdout = sys.__stdout__
|
|
90
|
+
|
|
91
|
+
def write(self, s):
|
|
92
|
+
if s.strip(): # Ignore empty lines (needed for the way tqdm works)
|
|
93
|
+
self.tqdm_instance.write(s, file=self.original_stdout)
|
|
94
|
+
|
|
95
|
+
def flush(self):
|
|
96
|
+
pass # Tqdm handles flushing
|
|
97
|
+
|
|
98
|
+
def drawing(secs: int, message: str):
|
|
99
|
+
with tqdm(total=secs, desc=message, file=sys.__stdout__) as t:
|
|
100
|
+
sys.stdout = TqdmPrintRedirector(t) # Redirect prints to tqdm.write
|
|
101
|
+
for i in range(secs):
|
|
102
|
+
time.sleep(1)
|
|
103
|
+
t.update(1.)
|
|
104
|
+
sys.stdout = sys.__stdout__ # Restore original stdout
|
|
105
|
+
|
|
106
|
+
sys.stdout.flush()
|
|
107
|
+
handle = threading.Thread(target=drawing, args=(seconds, msg))
|
|
108
|
+
handle.start()
|
|
109
|
+
return handle
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def countdown_wait(handle):
|
|
113
|
+
handle.join()
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def check_json_start(file: str, msg: str, delete_existing: bool = False):
|
|
117
|
+
from rich.json import JSON
|
|
118
|
+
from rich.console import Console
|
|
119
|
+
cons = Console(file=sys.__stdout__)
|
|
120
|
+
|
|
121
|
+
if delete_existing:
|
|
122
|
+
if os.path.exists(file):
|
|
123
|
+
os.remove(file)
|
|
124
|
+
|
|
125
|
+
def checking(file_path: str, console: Console):
|
|
126
|
+
print(msg)
|
|
127
|
+
prev_dict = {}
|
|
128
|
+
while True:
|
|
129
|
+
if os.path.exists(file_path):
|
|
130
|
+
try:
|
|
131
|
+
with open(file_path, "r", encoding='utf-8') as f:
|
|
132
|
+
json_dict = json.load(f)
|
|
133
|
+
if json_dict != prev_dict:
|
|
134
|
+
now = datetime.now()
|
|
135
|
+
console.print("─" * 80)
|
|
136
|
+
console.print("Printing updated file "
|
|
137
|
+
"(print time: " + now.strftime("%Y-%m-%d %H:%M:%S") + ")")
|
|
138
|
+
console.print("─" * 80)
|
|
139
|
+
console.print(JSON.from_data(json_dict))
|
|
140
|
+
prev_dict = json_dict
|
|
141
|
+
except KeyboardInterrupt:
|
|
142
|
+
break
|
|
143
|
+
except Exception:
|
|
144
|
+
pass
|
|
145
|
+
time.sleep(1)
|
|
146
|
+
|
|
147
|
+
handle = threading.Thread(target=checking, args=(file, cons), daemon=True)
|
|
148
|
+
handle.start()
|
|
149
|
+
return handle
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
def check_json_start_wait(handle):
|
|
153
|
+
handle.join()
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def show_images_grid(image_paths, max_cols=3):
|
|
157
|
+
import matplotlib.pyplot as plt
|
|
158
|
+
import matplotlib.image as mpimg
|
|
159
|
+
|
|
160
|
+
n = len(image_paths)
|
|
161
|
+
cols = min(max_cols, n)
|
|
162
|
+
rows = math.ceil(n / cols)
|
|
163
|
+
|
|
164
|
+
# Load images
|
|
165
|
+
images = [mpimg.imread(p) for p in image_paths]
|
|
166
|
+
|
|
167
|
+
# Determine figure size based on image sizes
|
|
168
|
+
widths, heights = zip(*[(img.shape[1], img.shape[0]) for img in images])
|
|
169
|
+
|
|
170
|
+
# Use average width/height for scaling
|
|
171
|
+
avg_width = sum(widths) / len(widths)
|
|
172
|
+
avg_height = sum(heights) / len(heights)
|
|
173
|
+
|
|
174
|
+
fig_width = cols * avg_width / 100
|
|
175
|
+
fig_height = rows * avg_height / 100
|
|
176
|
+
|
|
177
|
+
fig, axes = plt.subplots(rows, cols, figsize=(fig_width, fig_height))
|
|
178
|
+
axes = axes.flatten() if n > 1 else [axes]
|
|
179
|
+
|
|
180
|
+
fig.canvas.manager.set_window_title("Image Grid")
|
|
181
|
+
|
|
182
|
+
# Hide unused axes
|
|
183
|
+
for ax in axes[n:]:
|
|
184
|
+
ax.axis('off')
|
|
185
|
+
|
|
186
|
+
for idx, (ax, img) in enumerate(zip(axes, images)):
|
|
187
|
+
ax.imshow(img)
|
|
188
|
+
ax.axis('off')
|
|
189
|
+
ax.set_title(str(idx), fontsize=12, fontweight='bold')
|
|
190
|
+
|
|
191
|
+
# Display images
|
|
192
|
+
for ax, img in zip(axes, images):
|
|
193
|
+
ax.imshow(img)
|
|
194
|
+
ax.axis('off')
|
|
195
|
+
|
|
196
|
+
plt.subplots_adjust(wspace=0, hspace=0)
|
|
197
|
+
|
|
198
|
+
# Turn on interactive mode
|
|
199
|
+
plt.ion()
|
|
200
|
+
plt.show()
|
|
201
|
+
|
|
202
|
+
fig.canvas.draw()
|
|
203
|
+
plt.pause(0.1)
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
class FileTracker:
|
|
207
|
+
def __init__(self, folder, ext=".json", prefix=None, skip=None):
|
|
208
|
+
self.folder = Path(folder)
|
|
209
|
+
self.ext = ext.lower()
|
|
210
|
+
self.skip = skip
|
|
211
|
+
self.prefix = prefix
|
|
212
|
+
self.last_state = self.__scan_files()
|
|
213
|
+
|
|
214
|
+
def __scan_files(self):
|
|
215
|
+
state = {}
|
|
216
|
+
for file in self.folder.iterdir():
|
|
217
|
+
if ((file.is_file() and file.suffix.lower() == self.ext and
|
|
218
|
+
(self.skip is None or file.name != self.skip)) and
|
|
219
|
+
(self.prefix is None or file.name.startswith(self.prefix))):
|
|
220
|
+
state[file.name] = os.path.getmtime(file)
|
|
221
|
+
return state
|
|
222
|
+
|
|
223
|
+
def something_changed(self):
|
|
224
|
+
new_state = self.__scan_files()
|
|
225
|
+
|
|
226
|
+
created = [f for f in new_state if f not in self.last_state]
|
|
227
|
+
modified = [f for f in new_state if f in self.last_state and new_state[f] != self.last_state[f]]
|
|
228
|
+
deleted = [f for f in self.last_state if f not in new_state] # Track deletions
|
|
229
|
+
|
|
230
|
+
has_changed = bool(created or modified or deleted)
|
|
231
|
+
self.last_state = new_state
|
|
232
|
+
return has_changed
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
def prepare_app_dir(app_name: str = "unaiverse"):
|
|
236
|
+
app_name = app_name.lower()
|
|
237
|
+
if os.name == "nt": # Windows
|
|
238
|
+
if os.getenv("APPDATA") is not None:
|
|
239
|
+
key_dir = os.path.join(os.getenv("APPDATA"), "Local", app_name) # Expected
|
|
240
|
+
else:
|
|
241
|
+
key_dir = os.path.join(str(Path.home()), f".{app_name}") # Fallback
|
|
242
|
+
else: # Linux/macOS
|
|
243
|
+
key_dir = os.path.join(str(Path.home()), f".{app_name}")
|
|
244
|
+
os.makedirs(key_dir, exist_ok=True)
|
|
245
|
+
return key_dir
|
|
246
|
+
|
|
247
|
+
|
|
248
|
+
def get_key_considering_multiple_sources(key_variable: str | None) -> str:
|
|
249
|
+
|
|
250
|
+
# Creating folder (if needed) to store the key
|
|
251
|
+
try:
|
|
252
|
+
key_dir = prepare_app_dir(app_name="UNaIVERSE")
|
|
253
|
+
except Exception:
|
|
254
|
+
raise GenException("Cannot create folder to store the key file")
|
|
255
|
+
key_file = os.path.join(key_dir, "key")
|
|
256
|
+
|
|
257
|
+
# Getting from an existing file
|
|
258
|
+
key_from_file = None
|
|
259
|
+
if os.path.exists(key_file):
|
|
260
|
+
with open(key_file, "r") as f:
|
|
261
|
+
key_from_file = f.read().strip()
|
|
262
|
+
|
|
263
|
+
# Getting from env variable
|
|
264
|
+
key_from_env = os.getenv("NODE_KEY", None)
|
|
265
|
+
|
|
266
|
+
# Getting from code-specified option
|
|
267
|
+
if key_variable is not None and len(key_variable.strip()) > 0:
|
|
268
|
+
key_from_var = key_variable.strip()
|
|
269
|
+
if key_from_var.startswith("<") and key_from_var.endswith(">"): # Something like <UNAIVERSE_KEY_GOES_HERE>
|
|
270
|
+
key_from_var = None
|
|
271
|
+
else:
|
|
272
|
+
key_from_var = None
|
|
273
|
+
|
|
274
|
+
# Finding valid sources and checking if multiple keys were provided
|
|
275
|
+
_keys = [key_from_var, key_from_env, key_from_file]
|
|
276
|
+
_source_names = ["your code", "env variable 'NODE_KEY'", f"cache file {key_file}"]
|
|
277
|
+
source_names = []
|
|
278
|
+
mismatching = False
|
|
279
|
+
multiple_source = False
|
|
280
|
+
first_key = None
|
|
281
|
+
first_source = None
|
|
282
|
+
_prev_key = None
|
|
283
|
+
for i, (_key, _source_name) in enumerate(zip(_keys, _source_names)):
|
|
284
|
+
if _key is not None:
|
|
285
|
+
source_names.append(_source_name)
|
|
286
|
+
if _prev_key is not None:
|
|
287
|
+
if _key != _prev_key:
|
|
288
|
+
mismatching = True
|
|
289
|
+
multiple_source = True
|
|
290
|
+
else:
|
|
291
|
+
_prev_key = _key
|
|
292
|
+
first_key = _key
|
|
293
|
+
first_source = _source_name
|
|
294
|
+
|
|
295
|
+
if len(source_names) > 0:
|
|
296
|
+
msg = ""
|
|
297
|
+
if multiple_source and not mismatching:
|
|
298
|
+
msg = "UNaIVERSE key (the exact same key) present in multiple locations: " + ", ".join(source_names)
|
|
299
|
+
if multiple_source and mismatching:
|
|
300
|
+
msg = "UNaIVERSE keys (different keys) present in multiple locations: " + ", ".join(source_names)
|
|
301
|
+
msg += "\nLoaded the one stored in " + first_source
|
|
302
|
+
if not multiple_source:
|
|
303
|
+
msg = f"UNaIVERSE key loaded from {first_source}"
|
|
304
|
+
print(msg)
|
|
305
|
+
return first_key
|
|
306
|
+
else:
|
|
307
|
+
|
|
308
|
+
# If no key present, ask user and save to file
|
|
309
|
+
print("UNaIVERSE key not present in " + ", ".join(_source_names))
|
|
310
|
+
print("If you did not already do it, go to https://unaiverse.io, login, and generate a key")
|
|
311
|
+
key = input("Enter your UNaIVERSE key, that will be saved to the cache file: ").strip()
|
|
312
|
+
with open(key_file, "w") as f:
|
|
313
|
+
f.write(key)
|
|
314
|
+
return key
|
|
315
|
+
|
|
316
|
+
|
|
317
|
+
class PolicyFilterSelfGen:
|
|
318
|
+
def __init__(self, wait: float, add_random_up_to: float = 0.):
|
|
319
|
+
self.wait = wait
|
|
320
|
+
self.add_random_up_to = max(add_random_up_to, 0.)
|
|
321
|
+
if wait <= 0.:
|
|
322
|
+
raise GenException("Invalid number of seconds ('wait' must be > 0)")
|
|
323
|
+
|
|
324
|
+
def __call__(self, action_id, request, all_actions, policy_filter_opts):
|
|
325
|
+
"""Run the policy filter."""
|
|
326
|
+
|
|
327
|
+
# Getting basic info from the policy options (reference ot agent, and to the last time do_gen was approved)
|
|
328
|
+
if 'first_t' not in policy_filter_opts:
|
|
329
|
+
policy_filter_opts['first_t'] = -1
|
|
330
|
+
_agent, _first_t = policy_filter_opts['agent'], policy_filter_opts['first_t']
|
|
331
|
+
|
|
332
|
+
# If the agent lives in the TuringHotel world...
|
|
333
|
+
action = all_actions[action_id]
|
|
334
|
+
action_name = action.name
|
|
335
|
+
|
|
336
|
+
# We want to handle as an exception the case of "do_gen" with "u_hashes=[...processor_in]" (self-generation)
|
|
337
|
+
if action_name == "do_gen" or action_name == "do_learn":
|
|
338
|
+
|
|
339
|
+
# Saving the time when the action we were looking for was actually selected by the policy
|
|
340
|
+
if _first_t < 0:
|
|
341
|
+
_first_t = time.monotonic()
|
|
342
|
+
policy_filter_opts['first_t'] = _first_t
|
|
343
|
+
|
|
344
|
+
# Don't generate, don't do anything, if it passed less than 5 seconds from when we decided to generate
|
|
345
|
+
if time.monotonic() - _first_t < (self.wait + random.uniform(0, self.add_random_up_to)):
|
|
346
|
+
return -1, None
|
|
347
|
+
else:
|
|
348
|
+
policy_filter_opts['first_t'] = -1 # Clearing
|
|
349
|
+
|
|
350
|
+
# Returning the revised policy decision
|
|
351
|
+
return action_id, request
|
|
352
|
+
|
|
353
|
+
|
|
354
|
+
class PolicyFilterHuman:
|
|
355
|
+
def __init__(self):
|
|
356
|
+
pass
|
|
357
|
+
|
|
358
|
+
def __call__(self, action_id, request, all_actions, policy_filter_opts):
|
|
359
|
+
"""Run the policy filter."""
|
|
360
|
+
|
|
361
|
+
# Getting basic info from the policy options (reference to agent)
|
|
362
|
+
agent = policy_filter_opts['agent']
|
|
363
|
+
public = policy_filter_opts['public']
|
|
364
|
+
|
|
365
|
+
# Ensuring the input stream is disabled (important)
|
|
366
|
+
agent.disable_proc_input(public=public)
|
|
367
|
+
|
|
368
|
+
# If the agent lives in the TuringHotel world...
|
|
369
|
+
action = all_actions[action_id]
|
|
370
|
+
action_name = action.name
|
|
371
|
+
|
|
372
|
+
# We want to handle as an exception the case of "do_gen"
|
|
373
|
+
if action_name == "do_gen" or action_name == "do_learn":
|
|
374
|
+
|
|
375
|
+
# Checking the type of action (dashed or solid)
|
|
376
|
+
if request is not None:
|
|
377
|
+
is_dashed = True
|
|
378
|
+
mark = request.get_mark()
|
|
379
|
+
already_altered_request = False
|
|
380
|
+
if mark is not None and mark == "altered_by_policy_filter":
|
|
381
|
+
already_altered_request = True
|
|
382
|
+
else:
|
|
383
|
+
is_dashed = False
|
|
384
|
+
already_altered_request = False # Unused (dashed only)
|
|
385
|
+
|
|
386
|
+
# We alter the original request, forcing the input hashes to be the processor input
|
|
387
|
+
if is_dashed:
|
|
388
|
+
proc_input_net_hash = agent.get_proc_input_net_hash(public=public)
|
|
389
|
+
|
|
390
|
+
if not already_altered_request:
|
|
391
|
+
|
|
392
|
+
# Getting the original u_hashes of the request
|
|
393
|
+
u_hashes = request.get_arg("u_hashes")
|
|
394
|
+
|
|
395
|
+
# Moving original u_hashes of the request to extra_hashes
|
|
396
|
+
if u_hashes is not None:
|
|
397
|
+
extra_hashes = request.get_arg("extra_hashes")
|
|
398
|
+
if extra_hashes is not None:
|
|
399
|
+
|
|
400
|
+
# Arg 'extra_hashes' could have been already there for some world-specific reasons
|
|
401
|
+
request.alter_arg("extra_hashes", extra_hashes + u_hashes)
|
|
402
|
+
else:
|
|
403
|
+
|
|
404
|
+
# If arg 'extra_hashes' was not there
|
|
405
|
+
request.set_arg("extra_hashes", u_hashes)
|
|
406
|
+
|
|
407
|
+
request.alter_arg("u_hashes", [proc_input_net_hash])
|
|
408
|
+
request.set_mark("altered_by_policy_filter") # Marking to avoid doing this again
|
|
409
|
+
|
|
410
|
+
# Out of the blue: checking if 'extra_hashes' is part of the request
|
|
411
|
+
extra_hashes = request.get_arg("extra_hashes")
|
|
412
|
+
data_tag_from_extra_hashes = None
|
|
413
|
+
|
|
414
|
+
if extra_hashes is not None and extra_hashes[0] not in agent.known_streams:
|
|
415
|
+
|
|
416
|
+
# Fallback: when the other agent disconnects and the stream in extra_hashes is not known anymore
|
|
417
|
+
agent.set_uuid(proc_input_net_hash, None, expected=False)
|
|
418
|
+
agent.set_uuid(proc_input_net_hash, None, expected=True)
|
|
419
|
+
agent.set_tag(proc_input_net_hash, -1)
|
|
420
|
+
|
|
421
|
+
else:
|
|
422
|
+
if extra_hashes is not None:
|
|
423
|
+
extra_hashes_0 = extra_hashes[0] # Assuming the first extra hash dictates the tag
|
|
424
|
+
|
|
425
|
+
# Guessing the (max) data tag of the whole stream (heuristic)
|
|
426
|
+
data_tag_from_extra_hashes = agent.get_tag(extra_hashes_0)
|
|
427
|
+
|
|
428
|
+
# Preparing the input stream with the request UUID
|
|
429
|
+
agent.set_uuid(proc_input_net_hash, request.uuid, expected=False)
|
|
430
|
+
agent.set_uuid(proc_input_net_hash, request.uuid, expected=True)
|
|
431
|
+
|
|
432
|
+
# We also force the data tag that was/is in 'extra_hashes', if 'extra_hashes' is present
|
|
433
|
+
if data_tag_from_extra_hashes is not None:
|
|
434
|
+
agent.set_tag(proc_input_net_hash, data_tag_from_extra_hashes)
|
|
435
|
+
|
|
436
|
+
# Returning the revised policy decision
|
|
437
|
+
return action_id, request
|
|
438
|
+
|
|
439
|
+
|
|
440
|
+
def has_human_processor(agent):
|
|
441
|
+
return agent.proc is not None and isinstance(agent.proc.module, HumanModule)
|