plato-sdk-v2 2.6.1__py3-none-any.whl → 2.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- plato/_generated/__init__.py +1 -1
- plato/_generated/api/v2/__init__.py +2 -1
- plato/_generated/api/v2/networks/__init__.py +23 -0
- plato/_generated/api/v2/networks/add_member.py +75 -0
- plato/_generated/api/v2/networks/create_network.py +70 -0
- plato/_generated/api/v2/networks/delete_network.py +68 -0
- plato/_generated/api/v2/networks/get_network.py +69 -0
- plato/_generated/api/v2/networks/list_members.py +69 -0
- plato/_generated/api/v2/networks/list_networks.py +74 -0
- plato/_generated/api/v2/networks/remove_member.py +73 -0
- plato/_generated/api/v2/networks/update_member.py +80 -0
- plato/_generated/api/v2/sessions/__init__.py +4 -0
- plato/_generated/api/v2/sessions/add_ssh_key.py +81 -0
- plato/_generated/api/v2/sessions/connect_network.py +89 -0
- plato/_generated/models/__init__.py +150 -24
- plato/v1/cli/agent.py +45 -52
- plato/v1/cli/chronos.py +46 -58
- plato/v1/cli/main.py +14 -25
- plato/v1/cli/pm.py +37 -92
- plato/v1/cli/proxy.py +343 -0
- plato/v1/cli/sandbox.py +305 -385
- plato/v1/cli/ssh.py +12 -167
- plato/v1/cli/verify.py +79 -55
- plato/v1/cli/world.py +13 -12
- plato/v2/async_/client.py +24 -2
- plato/v2/async_/session.py +48 -0
- plato/v2/sync/client.py +24 -2
- plato/v2/sync/session.py +48 -0
- {plato_sdk_v2-2.6.1.dist-info → plato_sdk_v2-2.7.0.dist-info}/METADATA +1 -1
- {plato_sdk_v2-2.6.1.dist-info → plato_sdk_v2-2.7.0.dist-info}/RECORD +32 -20
- {plato_sdk_v2-2.6.1.dist-info → plato_sdk_v2-2.7.0.dist-info}/WHEEL +0 -0
- {plato_sdk_v2-2.6.1.dist-info → plato_sdk_v2-2.7.0.dist-info}/entry_points.txt +0 -0
plato/v1/cli/ssh.py
CHANGED
|
@@ -1,8 +1,5 @@
|
|
|
1
|
-
"""SSH utilities for Plato CLI
|
|
1
|
+
"""SSH utilities for Plato CLI."""
|
|
2
2
|
|
|
3
|
-
import os
|
|
4
|
-
import random
|
|
5
|
-
import shutil
|
|
6
3
|
from pathlib import Path
|
|
7
4
|
|
|
8
5
|
from cryptography.hazmat.primitives import serialization
|
|
@@ -21,37 +18,22 @@ def get_plato_dir(working_dir: Path | str | None = None) -> Path:
|
|
|
21
18
|
return Path.home() / ".plato"
|
|
22
19
|
|
|
23
20
|
|
|
24
|
-
def
|
|
25
|
-
"""Find next available sandbox number by checking existing config files."""
|
|
26
|
-
plato_dir = get_plato_dir(working_dir)
|
|
27
|
-
if not plato_dir.exists():
|
|
28
|
-
return 1
|
|
29
|
-
|
|
30
|
-
max_num = 0
|
|
31
|
-
for file in plato_dir.iterdir():
|
|
32
|
-
if file.name.startswith("ssh_") and file.name.endswith(".conf"):
|
|
33
|
-
# Extract number from ssh_N.conf
|
|
34
|
-
try:
|
|
35
|
-
num_str = file.name[4:-5] # Remove "ssh_" prefix and ".conf" suffix
|
|
36
|
-
num = int(num_str)
|
|
37
|
-
if num > max_num:
|
|
38
|
-
max_num = num
|
|
39
|
-
except ValueError:
|
|
40
|
-
continue
|
|
41
|
-
return max_num + 1
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
def generate_ssh_key_pair(sandbox_num: int, working_dir: Path | str | None = None) -> tuple[str, str]:
|
|
21
|
+
def generate_ssh_key_pair(identifier: str, working_dir: Path | str | None = None) -> tuple[str, str]:
|
|
45
22
|
"""
|
|
46
|
-
Generate a new ed25519 SSH key pair
|
|
23
|
+
Generate a new ed25519 SSH key pair.
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
identifier: A unique identifier for naming the key files (e.g., session_id prefix)
|
|
27
|
+
working_dir: Optional working directory for the .plato folder
|
|
47
28
|
|
|
48
|
-
Returns
|
|
29
|
+
Returns:
|
|
30
|
+
Tuple of (public_key_str, private_key_path)
|
|
49
31
|
"""
|
|
50
32
|
plato_dir = get_plato_dir(working_dir)
|
|
51
33
|
plato_dir.mkdir(mode=0o700, exist_ok=True)
|
|
52
34
|
|
|
53
|
-
private_key_path = plato_dir / f"ssh_{
|
|
54
|
-
public_key_path = plato_dir / f"ssh_{
|
|
35
|
+
private_key_path = plato_dir / f"ssh_{identifier}_key"
|
|
36
|
+
public_key_path = plato_dir / f"ssh_{identifier}_key.pub"
|
|
55
37
|
|
|
56
38
|
# Remove existing keys if they exist
|
|
57
39
|
private_key_path.unlink(missing_ok=True)
|
|
@@ -75,7 +57,7 @@ def generate_ssh_key_pair(sandbox_num: int, working_dir: Path | str | None = Non
|
|
|
75
57
|
)
|
|
76
58
|
|
|
77
59
|
# Add comment to public key
|
|
78
|
-
comment = f"plato-sandbox-{
|
|
60
|
+
comment = f"plato-sandbox-{identifier}"
|
|
79
61
|
public_key_str = f"{public_key_bytes.decode('utf-8')} {comment}"
|
|
80
62
|
|
|
81
63
|
# Write private key with 0600 permissions
|
|
@@ -87,140 +69,3 @@ def generate_ssh_key_pair(sandbox_num: int, working_dir: Path | str | None = Non
|
|
|
87
69
|
public_key_path.chmod(0o644)
|
|
88
70
|
|
|
89
71
|
return public_key_str, str(private_key_path)
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
def get_proxy_config(base_url: str) -> tuple[str, bool]:
|
|
93
|
-
"""
|
|
94
|
-
Get proxy server configuration based on base URL.
|
|
95
|
-
|
|
96
|
-
Returns (proxy_server, is_secure).
|
|
97
|
-
"""
|
|
98
|
-
# Match Go hub's GetProxyConfig behavior
|
|
99
|
-
if "localhost" in base_url:
|
|
100
|
-
return "proxy.localhost:9000", False
|
|
101
|
-
elif "staging" in base_url:
|
|
102
|
-
return "staging.proxy.plato.so:9000", True
|
|
103
|
-
else:
|
|
104
|
-
# Default to production
|
|
105
|
-
return "proxy.plato.so:9000", True
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
def find_proxytunnel() -> str | None:
|
|
109
|
-
"""Find proxytunnel binary - checks bundled location first, then PATH."""
|
|
110
|
-
# Check bundled location - go up from cli/ to v1/
|
|
111
|
-
package_dir = Path(__file__).resolve().parent.parent
|
|
112
|
-
bin_dir = package_dir / "bin"
|
|
113
|
-
bundled_path = bin_dir / "proxytunnel"
|
|
114
|
-
if bundled_path.exists() and os.access(bundled_path, os.X_OK):
|
|
115
|
-
return str(bundled_path)
|
|
116
|
-
|
|
117
|
-
# Check plato-client/cli/bin for development
|
|
118
|
-
plato_client_dir = package_dir.parent.parent.parent
|
|
119
|
-
dev_path = plato_client_dir / "cli" / "bin" / "proxytunnel"
|
|
120
|
-
if dev_path.exists() and os.access(dev_path, os.X_OK):
|
|
121
|
-
return str(dev_path)
|
|
122
|
-
|
|
123
|
-
# Check PATH
|
|
124
|
-
which_result = shutil.which("proxytunnel")
|
|
125
|
-
if which_result:
|
|
126
|
-
return which_result
|
|
127
|
-
|
|
128
|
-
return None
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
def create_ssh_config(
|
|
132
|
-
base_url: str,
|
|
133
|
-
hostname: str,
|
|
134
|
-
local_port: int,
|
|
135
|
-
job_public_id: str,
|
|
136
|
-
username: str,
|
|
137
|
-
private_key_path: str,
|
|
138
|
-
sandbox_num: int,
|
|
139
|
-
working_dir: Path | str | None = None,
|
|
140
|
-
) -> str:
|
|
141
|
-
"""
|
|
142
|
-
Create a temporary SSH config file for a specific sandbox.
|
|
143
|
-
|
|
144
|
-
Returns the path to the config file.
|
|
145
|
-
"""
|
|
146
|
-
proxytunnel_path = find_proxytunnel()
|
|
147
|
-
if not proxytunnel_path:
|
|
148
|
-
raise RuntimeError(
|
|
149
|
-
"proxytunnel not found. Install it with: brew install proxytunnel (macOS) "
|
|
150
|
-
"or apt-get install proxytunnel (Linux)"
|
|
151
|
-
)
|
|
152
|
-
|
|
153
|
-
proxy_server, is_secure = get_proxy_config(base_url)
|
|
154
|
-
|
|
155
|
-
# Build ProxyCommand
|
|
156
|
-
proxy_cmd = proxytunnel_path
|
|
157
|
-
if is_secure:
|
|
158
|
-
proxy_cmd += " -E"
|
|
159
|
-
proxy_cmd += f" -p {proxy_server} -P '{job_public_id}@22:newpass' -d %h:%p --no-check-certificate"
|
|
160
|
-
|
|
161
|
-
config_content = f"""Host {hostname}
|
|
162
|
-
HostName localhost
|
|
163
|
-
Port {local_port}
|
|
164
|
-
User {username}
|
|
165
|
-
IdentityFile {private_key_path}
|
|
166
|
-
IdentitiesOnly yes
|
|
167
|
-
StrictHostKeyChecking no
|
|
168
|
-
UserKnownHostsFile /dev/null
|
|
169
|
-
ConnectTimeout 10
|
|
170
|
-
ProxyCommand {proxy_cmd}
|
|
171
|
-
ServerAliveInterval 30
|
|
172
|
-
ServerAliveCountMax 3
|
|
173
|
-
TCPKeepAlive yes
|
|
174
|
-
"""
|
|
175
|
-
|
|
176
|
-
plato_dir = get_plato_dir(working_dir)
|
|
177
|
-
plato_dir.mkdir(mode=0o700, exist_ok=True)
|
|
178
|
-
|
|
179
|
-
config_path = plato_dir / f"ssh_{sandbox_num}.conf"
|
|
180
|
-
config_path.write_text(config_content)
|
|
181
|
-
config_path.chmod(0o600)
|
|
182
|
-
|
|
183
|
-
return str(config_path)
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
def setup_ssh_for_sandbox(
|
|
187
|
-
base_url: str,
|
|
188
|
-
job_public_id: str,
|
|
189
|
-
username: str = "plato",
|
|
190
|
-
working_dir: Path | str | None = None,
|
|
191
|
-
) -> dict:
|
|
192
|
-
"""
|
|
193
|
-
Set up SSH access for a sandbox - generates keys and creates config.
|
|
194
|
-
|
|
195
|
-
This replicates the Go hub's SetupSSHConfig function.
|
|
196
|
-
|
|
197
|
-
Returns dict with: ssh_host, config_path, public_key, private_key_path
|
|
198
|
-
"""
|
|
199
|
-
sandbox_num = get_next_sandbox_number(working_dir)
|
|
200
|
-
ssh_host = f"sandbox-{sandbox_num}"
|
|
201
|
-
|
|
202
|
-
# Choose random port between 2200 and 2299
|
|
203
|
-
local_port = random.randint(2200, 2299)
|
|
204
|
-
|
|
205
|
-
# Generate SSH key pair
|
|
206
|
-
public_key, private_key_path = generate_ssh_key_pair(sandbox_num, working_dir)
|
|
207
|
-
|
|
208
|
-
# Create SSH config file
|
|
209
|
-
config_path = create_ssh_config(
|
|
210
|
-
base_url=base_url,
|
|
211
|
-
hostname=ssh_host,
|
|
212
|
-
local_port=local_port,
|
|
213
|
-
job_public_id=job_public_id,
|
|
214
|
-
username=username,
|
|
215
|
-
private_key_path=private_key_path,
|
|
216
|
-
sandbox_num=sandbox_num,
|
|
217
|
-
working_dir=working_dir,
|
|
218
|
-
)
|
|
219
|
-
|
|
220
|
-
return {
|
|
221
|
-
"ssh_host": ssh_host,
|
|
222
|
-
"config_path": config_path,
|
|
223
|
-
"public_key": public_key,
|
|
224
|
-
"private_key_path": private_key_path,
|
|
225
|
-
"sandbox_num": sandbox_num,
|
|
226
|
-
}
|
plato/v1/cli/verify.py
CHANGED
|
@@ -49,11 +49,13 @@ sandbox_verify_app = typer.Typer(help="Verify sandbox setup and state")
|
|
|
49
49
|
|
|
50
50
|
@sandbox_verify_app.callback(invoke_without_command=True)
|
|
51
51
|
def sandbox_verify_default(ctx: typer.Context):
|
|
52
|
-
"""
|
|
53
|
-
|
|
52
|
+
"""Verify sandbox is properly configured.
|
|
53
|
+
|
|
54
|
+
Checks that .sandbox.yaml exists and contains all required fields (job_id,
|
|
55
|
+
session_id, public_url, plato_config_path, service). Also verifies that the
|
|
56
|
+
plato_config_path file exists.
|
|
54
57
|
|
|
55
|
-
Exit 0
|
|
56
|
-
Exit 1 with stderr describing missing fields.
|
|
58
|
+
Exit code 0 = verification passed, exit code 1 = verification failed with error on stderr.
|
|
57
59
|
"""
|
|
58
60
|
if ctx.invoked_subcommand is not None:
|
|
59
61
|
return
|
|
@@ -92,11 +94,12 @@ def sandbox_verify_default(ctx: typer.Context):
|
|
|
92
94
|
|
|
93
95
|
@sandbox_verify_app.command(name="services")
|
|
94
96
|
def verify_services():
|
|
95
|
-
"""
|
|
96
|
-
|
|
97
|
+
"""Verify containers are running and public URL returns 200.
|
|
98
|
+
|
|
99
|
+
Checks container health via SSH (docker ps) and makes an HTTP HEAD request to
|
|
100
|
+
the public URL. Reports unhealthy/exited/dead containers and HTTP errors.
|
|
97
101
|
|
|
98
|
-
Exit 0
|
|
99
|
-
Exit 1 with stderr describing the issue (e.g., "HTTP 502 - check nginx config").
|
|
102
|
+
Exit code 0 = all healthy, exit code 1 = issues found with error on stderr.
|
|
100
103
|
"""
|
|
101
104
|
if not Path(SANDBOX_FILE).exists():
|
|
102
105
|
_fail(f"File not found: {SANDBOX_FILE}")
|
|
@@ -174,11 +177,12 @@ def verify_services():
|
|
|
174
177
|
|
|
175
178
|
@sandbox_verify_app.command(name="login")
|
|
176
179
|
def verify_login():
|
|
177
|
-
"""
|
|
178
|
-
|
|
180
|
+
"""Verify login page is accessible.
|
|
181
|
+
|
|
182
|
+
Makes an HTTP GET request to the public URL from .sandbox.yaml and verifies
|
|
183
|
+
it returns HTTP 200.
|
|
179
184
|
|
|
180
|
-
Exit 0
|
|
181
|
-
Exit 1 if not accessible.
|
|
185
|
+
Exit code 0 = accessible, exit code 1 = not accessible with error on stderr.
|
|
182
186
|
"""
|
|
183
187
|
if not Path(SANDBOX_FILE).exists():
|
|
184
188
|
_fail(f"File not found: {SANDBOX_FILE}")
|
|
@@ -210,11 +214,12 @@ def verify_login():
|
|
|
210
214
|
|
|
211
215
|
@sandbox_verify_app.command(name="worker")
|
|
212
216
|
def verify_worker():
|
|
213
|
-
"""
|
|
214
|
-
|
|
217
|
+
"""Verify Plato worker is connected and audit triggers installed.
|
|
218
|
+
|
|
219
|
+
Calls the state API and checks that the worker responds without errors.
|
|
220
|
+
A 502 error indicates the worker is not running.
|
|
215
221
|
|
|
216
|
-
Exit 0
|
|
217
|
-
Exit 1 with stderr describing the issue.
|
|
222
|
+
Exit code 0 = worker connected, exit code 1 = worker not connected.
|
|
218
223
|
"""
|
|
219
224
|
if not Path(SANDBOX_FILE).exists():
|
|
220
225
|
_fail(f"File not found: {SANDBOX_FILE}")
|
|
@@ -274,11 +279,12 @@ def verify_worker():
|
|
|
274
279
|
|
|
275
280
|
@sandbox_verify_app.command(name="audit-clear")
|
|
276
281
|
def verify_audit_clear():
|
|
277
|
-
"""
|
|
278
|
-
|
|
282
|
+
"""Verify audit log is cleared (0 mutations).
|
|
283
|
+
|
|
284
|
+
Checks the state API to confirm no mutations are recorded. Use after
|
|
285
|
+
'plato sandbox clear-audit' to verify the audit tables were truncated.
|
|
279
286
|
|
|
280
|
-
Exit 0
|
|
281
|
-
Exit 1 if mutations exist.
|
|
287
|
+
Exit code 0 = no mutations, exit code 1 = mutations exist.
|
|
282
288
|
"""
|
|
283
289
|
if not Path(SANDBOX_FILE).exists():
|
|
284
290
|
_fail(f"File not found: {SANDBOX_FILE}")
|
|
@@ -323,11 +329,12 @@ def verify_audit_clear():
|
|
|
323
329
|
|
|
324
330
|
@sandbox_verify_app.command(name="flow")
|
|
325
331
|
def verify_flow():
|
|
326
|
-
"""
|
|
327
|
-
|
|
332
|
+
"""Verify login flow exists and is valid.
|
|
333
|
+
|
|
334
|
+
Checks that flows.yml (or base/flows.yml) exists and contains a valid 'login'
|
|
335
|
+
flow definition with required fields (name, steps, etc.).
|
|
328
336
|
|
|
329
|
-
Exit 0
|
|
330
|
-
Exit 1 if missing or invalid.
|
|
337
|
+
Exit code 0 = valid flow found, exit code 1 = missing or invalid.
|
|
331
338
|
"""
|
|
332
339
|
flow_paths = ["flows.yml", "base/flows.yml", "login-flow.yml"]
|
|
333
340
|
flow_file = None
|
|
@@ -360,11 +367,12 @@ def verify_flow():
|
|
|
360
367
|
|
|
361
368
|
@sandbox_verify_app.command(name="mutations")
|
|
362
369
|
def verify_mutations():
|
|
363
|
-
"""
|
|
364
|
-
|
|
370
|
+
"""Verify no mutations after login flow.
|
|
371
|
+
|
|
372
|
+
Checks the state API to confirm no database mutations were recorded. Should
|
|
373
|
+
be run after executing the login flow to verify it doesn't cause mutations.
|
|
365
374
|
|
|
366
|
-
Exit 0
|
|
367
|
-
Exit 1 with stderr listing tables and counts.
|
|
375
|
+
Exit code 0 = no mutations, exit code 1 = mutations found (lists tables and counts).
|
|
368
376
|
"""
|
|
369
377
|
if not Path(SANDBOX_FILE).exists():
|
|
370
378
|
_fail(f"File not found: {SANDBOX_FILE}")
|
|
@@ -422,10 +430,10 @@ def verify_mutations():
|
|
|
422
430
|
|
|
423
431
|
@sandbox_verify_app.command(name="audit-active")
|
|
424
432
|
def verify_audit_active():
|
|
425
|
-
"""
|
|
426
|
-
Verify audit system is tracking changes.
|
|
433
|
+
"""Verify audit system is tracking changes.
|
|
427
434
|
|
|
428
|
-
This is a manual verification step
|
|
435
|
+
This is a manual verification step that requires making a change in the app
|
|
436
|
+
and confirming mutations appear. Always exits 0 - actual verification is manual.
|
|
429
437
|
"""
|
|
430
438
|
# This step requires manual verification - just pass
|
|
431
439
|
pass
|
|
@@ -433,11 +441,12 @@ def verify_audit_active():
|
|
|
433
441
|
|
|
434
442
|
@sandbox_verify_app.command(name="snapshot")
|
|
435
443
|
def verify_snapshot():
|
|
436
|
-
"""
|
|
437
|
-
|
|
444
|
+
"""Verify snapshot was created.
|
|
445
|
+
|
|
446
|
+
Checks that .sandbox.yaml contains an artifact_id field, which is set by
|
|
447
|
+
'plato sandbox snapshot' after successfully creating a snapshot.
|
|
438
448
|
|
|
439
|
-
Exit 0
|
|
440
|
-
Exit 1 if missing.
|
|
449
|
+
Exit code 0 = artifact_id present, exit code 1 = missing.
|
|
441
450
|
"""
|
|
442
451
|
if not Path(SANDBOX_FILE).exists():
|
|
443
452
|
_fail(f"File not found: {SANDBOX_FILE}")
|
|
@@ -470,11 +479,12 @@ pm_verify_app = typer.Typer(help="Verify review and submit steps")
|
|
|
470
479
|
|
|
471
480
|
@pm_verify_app.command(name="review")
|
|
472
481
|
def verify_review():
|
|
473
|
-
"""
|
|
474
|
-
|
|
482
|
+
"""Verify review prerequisites.
|
|
483
|
+
|
|
484
|
+
Checks all prerequisites for submitting a simulator for review: PLATO_API_KEY set,
|
|
485
|
+
.sandbox.yaml exists with artifact_id, and flows.yml has a login flow.
|
|
475
486
|
|
|
476
|
-
Exit 0
|
|
477
|
-
Exit 1 if missing prerequisites.
|
|
487
|
+
Exit code 0 = ready for review, exit code 1 = missing prerequisites.
|
|
478
488
|
"""
|
|
479
489
|
issues = []
|
|
480
490
|
|
|
@@ -506,11 +516,12 @@ def verify_review():
|
|
|
506
516
|
|
|
507
517
|
@pm_verify_app.command(name="submit")
|
|
508
518
|
def verify_submit():
|
|
509
|
-
"""
|
|
510
|
-
|
|
519
|
+
"""Verify submit prerequisites.
|
|
520
|
+
|
|
521
|
+
Checks all prerequisites for submission: PLATO_API_KEY set and .sandbox.yaml
|
|
522
|
+
exists with artifact_id.
|
|
511
523
|
|
|
512
|
-
Exit 0
|
|
513
|
-
Exit 1 if missing prerequisites.
|
|
524
|
+
Exit code 0 = ready to submit, exit code 1 = missing prerequisites.
|
|
514
525
|
"""
|
|
515
526
|
issues = []
|
|
516
527
|
|
|
@@ -543,11 +554,15 @@ def verify_submit():
|
|
|
543
554
|
def verify_research(
|
|
544
555
|
report_path: str = typer.Option("research-report.yml", "--report", "-r"),
|
|
545
556
|
):
|
|
546
|
-
"""
|
|
547
|
-
|
|
557
|
+
"""Verify research report is complete.
|
|
558
|
+
|
|
559
|
+
Checks that the research report YAML file contains all required fields:
|
|
560
|
+
app_name, source, database.type, docker, and credentials.
|
|
548
561
|
|
|
549
|
-
|
|
550
|
-
|
|
562
|
+
Options:
|
|
563
|
+
-r, --report: Path to research report file (default: research-report.yml)
|
|
564
|
+
|
|
565
|
+
Exit code 0 = complete, exit code 1 = missing required fields.
|
|
551
566
|
"""
|
|
552
567
|
if not Path(report_path).exists():
|
|
553
568
|
_fail(f"Research report not found: {report_path}")
|
|
@@ -587,11 +602,15 @@ def verify_research(
|
|
|
587
602
|
def verify_validation(
|
|
588
603
|
report_path: str = typer.Option("research-report.yml", "--report", "-r"),
|
|
589
604
|
):
|
|
590
|
-
"""
|
|
591
|
-
|
|
605
|
+
"""Verify app can become a simulator.
|
|
606
|
+
|
|
607
|
+
Checks that the research report indicates a supported database type
|
|
608
|
+
(postgresql, mysql, mariadb, sqlite) and has no blocking issues.
|
|
609
|
+
|
|
610
|
+
Options:
|
|
611
|
+
-r, --report: Path to research report file (default: research-report.yml)
|
|
592
612
|
|
|
593
|
-
Exit 0
|
|
594
|
-
Exit 1 with stderr describing blocker.
|
|
613
|
+
Exit code 0 = can become simulator, exit code 1 = has blockers.
|
|
595
614
|
"""
|
|
596
615
|
if not Path(report_path).exists():
|
|
597
616
|
_fail(f"Research report not found: {report_path}")
|
|
@@ -622,11 +641,16 @@ def verify_config(
|
|
|
622
641
|
config_path: str = typer.Option("plato-config.yml", "--config", "-c"),
|
|
623
642
|
compose_path: str = typer.Option("base/docker-compose.yml", "--compose"),
|
|
624
643
|
):
|
|
625
|
-
"""
|
|
626
|
-
|
|
644
|
+
"""Verify configuration files are valid.
|
|
645
|
+
|
|
646
|
+
Checks that plato-config.yml and docker-compose.yml exist and contain valid
|
|
647
|
+
YAML. Validates required fields in plato-config.yml (service, datasets, etc.).
|
|
648
|
+
|
|
649
|
+
Options:
|
|
650
|
+
-c, --config: Path to plato-config.yml (default: plato-config.yml)
|
|
651
|
+
--compose: Path to docker-compose.yml (default: base/docker-compose.yml)
|
|
627
652
|
|
|
628
|
-
Exit 0
|
|
629
|
-
Exit 1 with stderr describing issues.
|
|
653
|
+
Exit code 0 = valid, exit code 1 = issues found.
|
|
630
654
|
"""
|
|
631
655
|
issues = []
|
|
632
656
|
|
plato/v1/cli/world.py
CHANGED
|
@@ -67,22 +67,23 @@ def world_publish(
|
|
|
67
67
|
path: str = typer.Argument(".", help="Path to the world package directory (default: current directory)"),
|
|
68
68
|
dry_run: bool = typer.Option(False, "--dry-run", help="Build without uploading"),
|
|
69
69
|
):
|
|
70
|
-
"""
|
|
71
|
-
|
|
70
|
+
"""Build and publish a world package to the Plato worlds repository.
|
|
71
|
+
|
|
72
|
+
Reads pyproject.toml for package info, builds with 'uv build', extracts the
|
|
73
|
+
config schema from schema.json in the wheel, and uploads to the Plato worlds
|
|
74
|
+
repository via uv publish.
|
|
72
75
|
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
to the Plato worlds repository.
|
|
76
|
+
The schema.json is automatically generated during build by a hatch build hook
|
|
77
|
+
that calls the world's get_schema() method.
|
|
76
78
|
|
|
77
|
-
|
|
78
|
-
|
|
79
|
+
Arguments:
|
|
80
|
+
path: Path to the world package directory containing pyproject.toml
|
|
81
|
+
(default: current directory)
|
|
79
82
|
|
|
80
|
-
|
|
83
|
+
Options:
|
|
84
|
+
--dry-run: Build the package and show schema without uploading
|
|
81
85
|
|
|
82
|
-
|
|
83
|
-
plato world publish
|
|
84
|
-
plato world publish ./my-world-package
|
|
85
|
-
plato world publish --dry-run
|
|
86
|
+
Requires PLATO_API_KEY environment variable for upload.
|
|
86
87
|
"""
|
|
87
88
|
try:
|
|
88
89
|
import tomli
|
plato/v2/async_/client.py
CHANGED
|
@@ -31,6 +31,7 @@ class AsyncSessionManager:
|
|
|
31
31
|
task: str | None = None,
|
|
32
32
|
timeout: int = 1800,
|
|
33
33
|
agent_artifact_id: str | None = None,
|
|
34
|
+
connect_network: bool = True,
|
|
34
35
|
) -> Session:
|
|
35
36
|
"""Create a new session.
|
|
36
37
|
|
|
@@ -41,6 +42,7 @@ class AsyncSessionManager:
|
|
|
41
42
|
task: Task public ID to create session from
|
|
42
43
|
timeout: VM timeout in seconds
|
|
43
44
|
agent_artifact_id: Optional agent artifact ID to associate with the session
|
|
45
|
+
connect_network: If True, automatically connect all VMs to a WireGuard network
|
|
44
46
|
|
|
45
47
|
Returns:
|
|
46
48
|
A new Session instance with all environments ready
|
|
@@ -59,19 +61,22 @@ class AsyncSessionManager:
|
|
|
59
61
|
>>>
|
|
60
62
|
>>> # From task
|
|
61
63
|
>>> session = await plato.sessions.create(task="abc123")
|
|
64
|
+
>>>
|
|
65
|
+
>>> # With networking enabled automatically
|
|
66
|
+
>>> session = await plato.sessions.create(envs=[...], connect_network=True)
|
|
62
67
|
"""
|
|
63
68
|
if envs is not None and task is not None:
|
|
64
69
|
raise ValueError("Cannot specify both envs and task")
|
|
65
70
|
|
|
66
71
|
if task is not None:
|
|
67
|
-
|
|
72
|
+
session = await Session.from_task(
|
|
68
73
|
http_client=self._http,
|
|
69
74
|
api_key=self._api_key,
|
|
70
75
|
task_id=task,
|
|
71
76
|
timeout=timeout,
|
|
72
77
|
)
|
|
73
78
|
elif envs is not None:
|
|
74
|
-
|
|
79
|
+
session = await Session.from_envs(
|
|
75
80
|
http_client=self._http,
|
|
76
81
|
api_key=self._api_key,
|
|
77
82
|
envs=envs,
|
|
@@ -81,6 +86,23 @@ class AsyncSessionManager:
|
|
|
81
86
|
else:
|
|
82
87
|
raise ValueError("Must specify either envs or task")
|
|
83
88
|
|
|
89
|
+
if connect_network:
|
|
90
|
+
try:
|
|
91
|
+
await session.connect_network()
|
|
92
|
+
except Exception:
|
|
93
|
+
# Clean up session if network connection fails
|
|
94
|
+
import logging
|
|
95
|
+
|
|
96
|
+
logging.getLogger(__name__).info(f"Network connection failed, closing session {session.session_id}")
|
|
97
|
+
try:
|
|
98
|
+
await session.close()
|
|
99
|
+
logging.getLogger(__name__).info(f"Session {session.session_id} closed")
|
|
100
|
+
except Exception as close_err:
|
|
101
|
+
logging.getLogger(__name__).warning(f"Failed to close session: {close_err}")
|
|
102
|
+
raise
|
|
103
|
+
|
|
104
|
+
return session
|
|
105
|
+
|
|
84
106
|
|
|
85
107
|
class AsyncPlato:
|
|
86
108
|
"""Asynchronous Plato client for v2 API.
|
plato/v2/async_/session.py
CHANGED
|
@@ -24,6 +24,7 @@ if TYPE_CHECKING:
|
|
|
24
24
|
from plato._generated.api.v2.jobs import get_flows as jobs_get_flows
|
|
25
25
|
from plato._generated.api.v2.jobs import public_url as jobs_public_url
|
|
26
26
|
from plato._generated.api.v2.sessions import close as sessions_close
|
|
27
|
+
from plato._generated.api.v2.sessions import connect_network as sessions_connect_network
|
|
27
28
|
from plato._generated.api.v2.sessions import disk_snapshot as sessions_disk_snapshot
|
|
28
29
|
from plato._generated.api.v2.sessions import evaluate as sessions_evaluate
|
|
29
30
|
from plato._generated.api.v2.sessions import execute as sessions_execute
|
|
@@ -44,6 +45,7 @@ from plato._generated.models import (
|
|
|
44
45
|
AppApiV2SchemasSessionHeartbeatResponse,
|
|
45
46
|
AppApiV2SchemasSessionSetupSandboxRequest,
|
|
46
47
|
AppApiV2SchemasSessionSetupSandboxResponse,
|
|
48
|
+
ConnectNetworkRequest,
|
|
47
49
|
CreateDiskSnapshotRequest,
|
|
48
50
|
CreateDiskSnapshotResponse,
|
|
49
51
|
CreateSessionFromEnvs,
|
|
@@ -146,6 +148,15 @@ class Session:
|
|
|
146
148
|
self._heartbeat_task: asyncio.Task | None = None
|
|
147
149
|
self._heartbeat_interval = 30
|
|
148
150
|
self._envs: list[Environment] | None = None
|
|
151
|
+
self._network_host_only: bool = False # True if WireGuard not available (no VM-to-VM mesh)
|
|
152
|
+
|
|
153
|
+
@property
|
|
154
|
+
def network_host_only(self) -> bool:
|
|
155
|
+
"""True if network is using host-only routing (WireGuard not available in VM).
|
|
156
|
+
|
|
157
|
+
When True, external SSH access works but VM-to-VM mesh networking does not.
|
|
158
|
+
"""
|
|
159
|
+
return self._network_host_only
|
|
149
160
|
|
|
150
161
|
@property
|
|
151
162
|
def session_id(self) -> str:
|
|
@@ -751,6 +762,43 @@ class Session:
|
|
|
751
762
|
|
|
752
763
|
return urls
|
|
753
764
|
|
|
765
|
+
async def connect_network(self, host_only: bool = False) -> dict:
|
|
766
|
+
"""Connect all VMs in this session to a WireGuard network.
|
|
767
|
+
|
|
768
|
+
Creates a full mesh WireGuard network between all VMs in the session.
|
|
769
|
+
Must be called after all environments are ready. This method is idempotent -
|
|
770
|
+
calling it multiple times will not reconnect already-connected VMs.
|
|
771
|
+
|
|
772
|
+
Args:
|
|
773
|
+
host_only: If True, force WireGuard to run on the worker instead of
|
|
774
|
+
inside the VM. Useful for VMs without WireGuard tools or
|
|
775
|
+
for testing worker-side WireGuard.
|
|
776
|
+
|
|
777
|
+
Returns:
|
|
778
|
+
Dict with:
|
|
779
|
+
- success: bool - True if all VMs connected successfully
|
|
780
|
+
- session_id: str - The session ID
|
|
781
|
+
- subnet: str - The network subnet (e.g., "10.100.0.0/24")
|
|
782
|
+
- results: dict[str, bool] - Success status per job_id
|
|
783
|
+
|
|
784
|
+
Raises:
|
|
785
|
+
RuntimeError: If session is closed or network connection fails.
|
|
786
|
+
"""
|
|
787
|
+
self._check_closed()
|
|
788
|
+
|
|
789
|
+
# Server returns 500 with error detail if network connection fails
|
|
790
|
+
result = await sessions_connect_network.asyncio(
|
|
791
|
+
client=self._http,
|
|
792
|
+
session_id=self.session_id,
|
|
793
|
+
body=ConnectNetworkRequest(host_only=host_only),
|
|
794
|
+
x_api_key=self._api_key,
|
|
795
|
+
)
|
|
796
|
+
|
|
797
|
+
# Track if any VMs are using host-only routing (no VM-to-VM mesh)
|
|
798
|
+
self._network_host_only = result.get("host_only", False)
|
|
799
|
+
|
|
800
|
+
return result
|
|
801
|
+
|
|
754
802
|
async def cleanup_databases(self) -> SessionCleanupResult:
|
|
755
803
|
"""Clean up database audit logs for all environments.
|
|
756
804
|
|