plato-sdk-v2 2.0.64__py3-none-any.whl → 2.3.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- plato/__init__.py +0 -9
- plato/_sims_generator/__init__.py +19 -4
- plato/_sims_generator/instruction.py +203 -0
- plato/_sims_generator/templates/instruction/helpers.py.jinja +161 -0
- plato/_sims_generator/templates/instruction/init.py.jinja +43 -0
- plato/agents/__init__.py +99 -430
- plato/agents/base.py +145 -0
- plato/agents/build.py +61 -0
- plato/agents/config.py +160 -0
- plato/agents/logging.py +515 -0
- plato/agents/runner.py +191 -0
- plato/agents/trajectory.py +266 -0
- plato/chronos/models/__init__.py +1 -1
- plato/sims/cli.py +299 -123
- plato/sims/registry.py +77 -4
- plato/v1/cli/agent.py +88 -84
- plato/v1/cli/pm.py +84 -44
- plato/v1/cli/sandbox.py +241 -61
- plato/v1/cli/ssh.py +16 -4
- plato/v1/cli/verify.py +685 -0
- plato/v1/cli/world.py +3 -0
- plato/v1/flow_executor.py +21 -17
- plato/v1/models/env.py +11 -11
- plato/v1/sdk.py +2 -2
- plato/v1/sync_env.py +11 -11
- plato/v1/sync_flow_executor.py +21 -17
- plato/v1/sync_sdk.py +4 -2
- plato/v2/__init__.py +2 -0
- plato/v2/async_/environment.py +31 -0
- plato/v2/async_/session.py +72 -4
- plato/v2/sync/environment.py +31 -0
- plato/v2/sync/session.py +72 -4
- plato/worlds/README.md +71 -56
- plato/worlds/__init__.py +56 -18
- plato/worlds/base.py +578 -93
- plato/worlds/config.py +276 -74
- plato/worlds/runner.py +475 -80
- {plato_sdk_v2-2.0.64.dist-info → plato_sdk_v2-2.3.4.dist-info}/METADATA +3 -3
- {plato_sdk_v2-2.0.64.dist-info → plato_sdk_v2-2.3.4.dist-info}/RECORD +41 -36
- {plato_sdk_v2-2.0.64.dist-info → plato_sdk_v2-2.3.4.dist-info}/entry_points.txt +1 -0
- plato/agents/callback.py +0 -246
- plato/world/__init__.py +0 -44
- plato/world/base.py +0 -267
- plato/world/config.py +0 -139
- plato/world/types.py +0 -47
- {plato_sdk_v2-2.0.64.dist-info → plato_sdk_v2-2.3.4.dist-info}/WHEEL +0 -0
plato/v1/cli/verify.py
ADDED
|
@@ -0,0 +1,685 @@
|
|
|
1
|
+
"""Verification CLI commands for Plato simulator creation pipeline.
|
|
2
|
+
|
|
3
|
+
All verification commands follow the convention:
|
|
4
|
+
- Exit 0 = verification passed
|
|
5
|
+
- Exit 1 = verification failed
|
|
6
|
+
- Stderr = actionable error message for agents
|
|
7
|
+
|
|
8
|
+
Usage:
|
|
9
|
+
plato sandbox verify <check>
|
|
10
|
+
plato pm verify <check>
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
from __future__ import annotations
|
|
14
|
+
|
|
15
|
+
import os
|
|
16
|
+
import subprocess
|
|
17
|
+
import sys
|
|
18
|
+
from collections import defaultdict
|
|
19
|
+
from pathlib import Path
|
|
20
|
+
from typing import NoReturn
|
|
21
|
+
|
|
22
|
+
import typer
|
|
23
|
+
import yaml
|
|
24
|
+
|
|
25
|
+
from plato.v1.cli.utils import (
|
|
26
|
+
SANDBOX_FILE,
|
|
27
|
+
get_http_client,
|
|
28
|
+
require_api_key,
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def _error(msg: str) -> None:
|
|
33
|
+
"""Write error to stderr."""
|
|
34
|
+
sys.stderr.write(f"{msg}\n")
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def _fail(msg: str) -> NoReturn:
|
|
38
|
+
"""Write error to stderr and exit 1."""
|
|
39
|
+
_error(msg)
|
|
40
|
+
raise typer.Exit(1)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
# =============================================================================
|
|
44
|
+
# SANDBOX VERIFY COMMANDS
|
|
45
|
+
# =============================================================================
|
|
46
|
+
|
|
47
|
+
sandbox_verify_app = typer.Typer(help="Verify sandbox setup and state")
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@sandbox_verify_app.callback(invoke_without_command=True)
|
|
51
|
+
def sandbox_verify_default(ctx: typer.Context):
|
|
52
|
+
"""
|
|
53
|
+
Verify sandbox is properly configured.
|
|
54
|
+
|
|
55
|
+
Exit 0 if .sandbox.yaml has all required fields.
|
|
56
|
+
Exit 1 with stderr describing missing fields.
|
|
57
|
+
"""
|
|
58
|
+
if ctx.invoked_subcommand is not None:
|
|
59
|
+
return
|
|
60
|
+
|
|
61
|
+
if not Path(SANDBOX_FILE).exists():
|
|
62
|
+
_fail(f"File not found: {SANDBOX_FILE}")
|
|
63
|
+
|
|
64
|
+
with open(SANDBOX_FILE) as f:
|
|
65
|
+
state = yaml.safe_load(f)
|
|
66
|
+
|
|
67
|
+
if not state:
|
|
68
|
+
_fail(f"File is empty: {SANDBOX_FILE}")
|
|
69
|
+
|
|
70
|
+
# Core required fields (ssh_config_path is optional - proxytunnel may not be installed)
|
|
71
|
+
required_fields = ["job_id", "session_id", "public_url", "plato_config_path", "service"]
|
|
72
|
+
missing = [f for f in required_fields if f not in state or not state[f]]
|
|
73
|
+
|
|
74
|
+
# Check plato_config_path exists
|
|
75
|
+
# Container paths like /workspace/foo.yml map to cwd/foo.yml when verifier runs from sim_dir
|
|
76
|
+
plato_config = state.get("plato_config_path")
|
|
77
|
+
if plato_config:
|
|
78
|
+
# Convert container path to relative path for checking
|
|
79
|
+
if plato_config.startswith("/workspace/"):
|
|
80
|
+
check_path = Path(plato_config[len("/workspace/") :])
|
|
81
|
+
else:
|
|
82
|
+
check_path = Path(plato_config)
|
|
83
|
+
|
|
84
|
+
if not check_path.exists():
|
|
85
|
+
missing.append(f"plato_config_path (file): File not found: {plato_config}")
|
|
86
|
+
|
|
87
|
+
if missing:
|
|
88
|
+
_fail(f"Missing fields in {SANDBOX_FILE}: {missing}")
|
|
89
|
+
|
|
90
|
+
# Success - exit 0
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
@sandbox_verify_app.command(name="services")
|
|
94
|
+
def verify_services():
|
|
95
|
+
"""
|
|
96
|
+
Verify containers are running and public URL returns 200.
|
|
97
|
+
|
|
98
|
+
Exit 0 if all containers healthy and URL accessible.
|
|
99
|
+
Exit 1 with stderr describing the issue (e.g., "HTTP 502 - check nginx config").
|
|
100
|
+
"""
|
|
101
|
+
if not Path(SANDBOX_FILE).exists():
|
|
102
|
+
_fail(f"File not found: {SANDBOX_FILE}")
|
|
103
|
+
|
|
104
|
+
with open(SANDBOX_FILE) as f:
|
|
105
|
+
state = yaml.safe_load(f)
|
|
106
|
+
|
|
107
|
+
ssh_config = state.get("ssh_config_path")
|
|
108
|
+
ssh_host = state.get("ssh_host", "sandbox")
|
|
109
|
+
public_url = state.get("public_url")
|
|
110
|
+
|
|
111
|
+
if not ssh_config:
|
|
112
|
+
_fail("No ssh_config_path in .sandbox.yaml")
|
|
113
|
+
|
|
114
|
+
# Check containers via SSH
|
|
115
|
+
try:
|
|
116
|
+
result = subprocess.run(
|
|
117
|
+
[
|
|
118
|
+
"ssh",
|
|
119
|
+
"-F",
|
|
120
|
+
os.path.expanduser(ssh_config),
|
|
121
|
+
ssh_host,
|
|
122
|
+
"docker ps -a --format '{{.Names}}\t{{.Status}}'",
|
|
123
|
+
],
|
|
124
|
+
capture_output=True,
|
|
125
|
+
text=True,
|
|
126
|
+
timeout=30,
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
if result.returncode != 0:
|
|
130
|
+
_fail(f"Failed to check containers via SSH: {result.stderr.strip()}")
|
|
131
|
+
|
|
132
|
+
unhealthy = []
|
|
133
|
+
for line in result.stdout.strip().split("\n"):
|
|
134
|
+
if not line:
|
|
135
|
+
continue
|
|
136
|
+
parts = line.split("\t")
|
|
137
|
+
if len(parts) >= 2:
|
|
138
|
+
name, status = parts[0], parts[1]
|
|
139
|
+
if "unhealthy" in status.lower() or "exited" in status.lower() or "dead" in status.lower():
|
|
140
|
+
unhealthy.append(f"{name}: {status}")
|
|
141
|
+
|
|
142
|
+
if unhealthy:
|
|
143
|
+
_fail(f"Unhealthy containers: {unhealthy}")
|
|
144
|
+
|
|
145
|
+
except subprocess.TimeoutExpired:
|
|
146
|
+
_fail("SSH connection timed out")
|
|
147
|
+
except FileNotFoundError:
|
|
148
|
+
_fail("SSH not found")
|
|
149
|
+
|
|
150
|
+
# Check public URL
|
|
151
|
+
if public_url:
|
|
152
|
+
try:
|
|
153
|
+
import urllib.error
|
|
154
|
+
import urllib.request
|
|
155
|
+
|
|
156
|
+
req = urllib.request.Request(public_url, method="HEAD")
|
|
157
|
+
req.add_header("User-Agent", "plato-verify/1.0")
|
|
158
|
+
|
|
159
|
+
try:
|
|
160
|
+
with urllib.request.urlopen(req, timeout=10) as response:
|
|
161
|
+
if response.getcode() != 200:
|
|
162
|
+
_fail(f"HTTP {response.getcode()} from {public_url}")
|
|
163
|
+
except urllib.error.HTTPError as e:
|
|
164
|
+
if e.code == 502:
|
|
165
|
+
_fail("HTTP 502 Bad Gateway - check app_port in plato-config.yml and nginx config")
|
|
166
|
+
else:
|
|
167
|
+
_fail(f"HTTP {e.code} from {public_url}")
|
|
168
|
+
|
|
169
|
+
except Exception as e:
|
|
170
|
+
_fail(f"Failed to check public URL: {e}")
|
|
171
|
+
|
|
172
|
+
# Success - exit 0
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
@sandbox_verify_app.command(name="login")
|
|
176
|
+
def verify_login():
|
|
177
|
+
"""
|
|
178
|
+
Verify login page is accessible.
|
|
179
|
+
|
|
180
|
+
Exit 0 if public URL returns 200.
|
|
181
|
+
Exit 1 if not accessible.
|
|
182
|
+
"""
|
|
183
|
+
if not Path(SANDBOX_FILE).exists():
|
|
184
|
+
_fail(f"File not found: {SANDBOX_FILE}")
|
|
185
|
+
|
|
186
|
+
with open(SANDBOX_FILE) as f:
|
|
187
|
+
state = yaml.safe_load(f)
|
|
188
|
+
|
|
189
|
+
public_url = state.get("public_url")
|
|
190
|
+
if not public_url:
|
|
191
|
+
_fail("No public_url in .sandbox.yaml")
|
|
192
|
+
|
|
193
|
+
try:
|
|
194
|
+
import urllib.error
|
|
195
|
+
import urllib.request
|
|
196
|
+
|
|
197
|
+
req = urllib.request.Request(public_url, method="GET")
|
|
198
|
+
req.add_header("User-Agent", "plato-verify/1.0")
|
|
199
|
+
|
|
200
|
+
with urllib.request.urlopen(req, timeout=10) as response:
|
|
201
|
+
if response.getcode() != 200:
|
|
202
|
+
_fail(f"HTTP {response.getcode()} from {public_url}")
|
|
203
|
+
except urllib.error.HTTPError as e:
|
|
204
|
+
_fail(f"HTTP {e.code} from {public_url}")
|
|
205
|
+
except Exception as e:
|
|
206
|
+
_fail(f"Failed to check login page: {e}")
|
|
207
|
+
|
|
208
|
+
# Success - exit 0
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
@sandbox_verify_app.command(name="worker")
|
|
212
|
+
def verify_worker():
|
|
213
|
+
"""
|
|
214
|
+
Verify Plato worker is connected and audit triggers installed.
|
|
215
|
+
|
|
216
|
+
Exit 0 if worker connected.
|
|
217
|
+
Exit 1 with stderr describing the issue.
|
|
218
|
+
"""
|
|
219
|
+
if not Path(SANDBOX_FILE).exists():
|
|
220
|
+
_fail(f"File not found: {SANDBOX_FILE}")
|
|
221
|
+
|
|
222
|
+
with open(SANDBOX_FILE) as f:
|
|
223
|
+
state = yaml.safe_load(f)
|
|
224
|
+
|
|
225
|
+
session_id = state.get("session_id")
|
|
226
|
+
if not session_id:
|
|
227
|
+
_fail("No session_id in .sandbox.yaml")
|
|
228
|
+
|
|
229
|
+
api_key = require_api_key()
|
|
230
|
+
|
|
231
|
+
try:
|
|
232
|
+
from plato._generated.api.v2.sessions import state as sessions_state
|
|
233
|
+
|
|
234
|
+
with get_http_client() as client:
|
|
235
|
+
state_response = sessions_state.sync(
|
|
236
|
+
session_id=session_id,
|
|
237
|
+
client=client,
|
|
238
|
+
x_api_key=api_key,
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
if state_response is None:
|
|
242
|
+
_fail("State API returned no data")
|
|
243
|
+
|
|
244
|
+
if not state_response.results:
|
|
245
|
+
_fail("State API returned empty results")
|
|
246
|
+
|
|
247
|
+
for job_id, result in state_response.results.items():
|
|
248
|
+
if hasattr(result, "error") and result.error:
|
|
249
|
+
_fail(f"Worker error: {result.error}")
|
|
250
|
+
|
|
251
|
+
state_data = result.state if hasattr(result, "state") and result.state else {}
|
|
252
|
+
if isinstance(state_data, dict):
|
|
253
|
+
if "error" in state_data:
|
|
254
|
+
_fail(f"Worker error: {state_data['error']}")
|
|
255
|
+
|
|
256
|
+
if "db" in state_data:
|
|
257
|
+
db_state = state_data["db"]
|
|
258
|
+
if not db_state.get("is_connected", False):
|
|
259
|
+
_fail("Worker not connected to database")
|
|
260
|
+
# Success - worker connected
|
|
261
|
+
return
|
|
262
|
+
else:
|
|
263
|
+
_fail("Worker not initialized (no db state)")
|
|
264
|
+
|
|
265
|
+
_fail("No worker state found")
|
|
266
|
+
|
|
267
|
+
except typer.Exit:
|
|
268
|
+
raise
|
|
269
|
+
except Exception as e:
|
|
270
|
+
if "502" in str(e):
|
|
271
|
+
_fail("Worker not ready (502)")
|
|
272
|
+
_fail(f"Failed to check worker: {e}")
|
|
273
|
+
|
|
274
|
+
|
|
275
|
+
@sandbox_verify_app.command(name="audit-clear")
|
|
276
|
+
def verify_audit_clear():
|
|
277
|
+
"""
|
|
278
|
+
Verify audit log is cleared (0 mutations).
|
|
279
|
+
|
|
280
|
+
Exit 0 if 0 mutations.
|
|
281
|
+
Exit 1 if mutations exist.
|
|
282
|
+
"""
|
|
283
|
+
if not Path(SANDBOX_FILE).exists():
|
|
284
|
+
_fail(f"File not found: {SANDBOX_FILE}")
|
|
285
|
+
|
|
286
|
+
with open(SANDBOX_FILE) as f:
|
|
287
|
+
state = yaml.safe_load(f)
|
|
288
|
+
|
|
289
|
+
session_id = state.get("session_id")
|
|
290
|
+
api_key = require_api_key()
|
|
291
|
+
|
|
292
|
+
try:
|
|
293
|
+
from plato._generated.api.v2.sessions import state as sessions_state
|
|
294
|
+
|
|
295
|
+
with get_http_client() as client:
|
|
296
|
+
state_response = sessions_state.sync(
|
|
297
|
+
session_id=session_id,
|
|
298
|
+
client=client,
|
|
299
|
+
x_api_key=api_key,
|
|
300
|
+
)
|
|
301
|
+
|
|
302
|
+
if state_response is None:
|
|
303
|
+
_fail("State API returned no data")
|
|
304
|
+
|
|
305
|
+
audit_count = 0
|
|
306
|
+
if state_response.results:
|
|
307
|
+
for job_id, result in state_response.results.items():
|
|
308
|
+
state_data = result.state if hasattr(result, "state") and result.state else {}
|
|
309
|
+
if isinstance(state_data, dict) and "db" in state_data:
|
|
310
|
+
audit_count = state_data["db"].get("audit_log_count", 0)
|
|
311
|
+
break
|
|
312
|
+
|
|
313
|
+
if audit_count != 0:
|
|
314
|
+
_fail(f"Audit log not clear: {audit_count} mutations")
|
|
315
|
+
|
|
316
|
+
# Success - exit 0
|
|
317
|
+
|
|
318
|
+
except typer.Exit:
|
|
319
|
+
raise
|
|
320
|
+
except Exception as e:
|
|
321
|
+
_fail(f"Failed to check audit: {e}")
|
|
322
|
+
|
|
323
|
+
|
|
324
|
+
@sandbox_verify_app.command(name="flow")
|
|
325
|
+
def verify_flow():
|
|
326
|
+
"""
|
|
327
|
+
Verify login flow exists and is valid.
|
|
328
|
+
|
|
329
|
+
Exit 0 if flows.yml exists with login section.
|
|
330
|
+
Exit 1 if missing or invalid.
|
|
331
|
+
"""
|
|
332
|
+
flow_paths = ["flows.yml", "base/flows.yml", "login-flow.yml"]
|
|
333
|
+
flow_file = None
|
|
334
|
+
|
|
335
|
+
for path in flow_paths:
|
|
336
|
+
if Path(path).exists():
|
|
337
|
+
flow_file = path
|
|
338
|
+
break
|
|
339
|
+
|
|
340
|
+
if not flow_file:
|
|
341
|
+
_fail(f"No flows.yml found. Searched: {flow_paths}")
|
|
342
|
+
|
|
343
|
+
assert flow_file is not None # for type checker
|
|
344
|
+
|
|
345
|
+
try:
|
|
346
|
+
with open(flow_file) as f:
|
|
347
|
+
flows = yaml.safe_load(f)
|
|
348
|
+
|
|
349
|
+
if not flows:
|
|
350
|
+
_fail(f"Flows file is empty: {flow_file}")
|
|
351
|
+
|
|
352
|
+
if "login" not in flows:
|
|
353
|
+
_fail(f"No 'login' flow defined in {flow_file}")
|
|
354
|
+
|
|
355
|
+
# Success - exit 0
|
|
356
|
+
|
|
357
|
+
except yaml.YAMLError as e:
|
|
358
|
+
_fail(f"Invalid YAML in {flow_file}: {e}")
|
|
359
|
+
|
|
360
|
+
|
|
361
|
+
@sandbox_verify_app.command(name="mutations")
|
|
362
|
+
def verify_mutations():
|
|
363
|
+
"""
|
|
364
|
+
Verify no mutations after login flow.
|
|
365
|
+
|
|
366
|
+
Exit 0 if 0 mutations.
|
|
367
|
+
Exit 1 with stderr listing tables and counts.
|
|
368
|
+
"""
|
|
369
|
+
if not Path(SANDBOX_FILE).exists():
|
|
370
|
+
_fail(f"File not found: {SANDBOX_FILE}")
|
|
371
|
+
|
|
372
|
+
with open(SANDBOX_FILE) as f:
|
|
373
|
+
state = yaml.safe_load(f)
|
|
374
|
+
|
|
375
|
+
session_id = state.get("session_id")
|
|
376
|
+
api_key = require_api_key()
|
|
377
|
+
|
|
378
|
+
try:
|
|
379
|
+
from plato._generated.api.v2.sessions import state as sessions_state
|
|
380
|
+
|
|
381
|
+
with get_http_client() as client:
|
|
382
|
+
state_response = sessions_state.sync(
|
|
383
|
+
session_id=session_id,
|
|
384
|
+
client=client,
|
|
385
|
+
x_api_key=api_key,
|
|
386
|
+
)
|
|
387
|
+
|
|
388
|
+
if state_response is None:
|
|
389
|
+
_fail("State API returned no data")
|
|
390
|
+
|
|
391
|
+
mutations = []
|
|
392
|
+
audit_count = 0
|
|
393
|
+
if state_response.results:
|
|
394
|
+
for job_id, result in state_response.results.items():
|
|
395
|
+
state_data = result.state if hasattr(result, "state") and result.state else {}
|
|
396
|
+
if isinstance(state_data, dict) and "db" in state_data:
|
|
397
|
+
audit_count = state_data["db"].get("audit_log_count", 0)
|
|
398
|
+
mutations = state_data["db"].get("mutations", [])
|
|
399
|
+
break
|
|
400
|
+
|
|
401
|
+
if audit_count == 0:
|
|
402
|
+
# Success - exit 0
|
|
403
|
+
return
|
|
404
|
+
|
|
405
|
+
# Build table breakdown
|
|
406
|
+
table_ops: dict[str, dict[str, int]] = defaultdict(lambda: {"INSERT": 0, "UPDATE": 0, "DELETE": 0})
|
|
407
|
+
for mutation in mutations:
|
|
408
|
+
table = mutation.get("table", "unknown")
|
|
409
|
+
op = mutation.get("operation", "UNKNOWN").upper()
|
|
410
|
+
if op in table_ops[table]:
|
|
411
|
+
table_ops[table][op] += 1
|
|
412
|
+
|
|
413
|
+
# Format error message
|
|
414
|
+
table_summary = {t: dict(ops) for t, ops in table_ops.items()}
|
|
415
|
+
_fail(f"Found {audit_count} mutations: {table_summary}")
|
|
416
|
+
|
|
417
|
+
except typer.Exit:
|
|
418
|
+
raise
|
|
419
|
+
except Exception as e:
|
|
420
|
+
_fail(f"Failed to check mutations: {e}")
|
|
421
|
+
|
|
422
|
+
|
|
423
|
+
@sandbox_verify_app.command(name="audit-active")
|
|
424
|
+
def verify_audit_active():
|
|
425
|
+
"""
|
|
426
|
+
Verify audit system is tracking changes.
|
|
427
|
+
|
|
428
|
+
This is a manual verification step. Always exits 0.
|
|
429
|
+
"""
|
|
430
|
+
# This step requires manual verification - just pass
|
|
431
|
+
pass
|
|
432
|
+
|
|
433
|
+
|
|
434
|
+
@sandbox_verify_app.command(name="snapshot")
|
|
435
|
+
def verify_snapshot():
|
|
436
|
+
"""
|
|
437
|
+
Verify snapshot was created.
|
|
438
|
+
|
|
439
|
+
Exit 0 if artifact_id exists in .sandbox.yaml.
|
|
440
|
+
Exit 1 if missing.
|
|
441
|
+
"""
|
|
442
|
+
if not Path(SANDBOX_FILE).exists():
|
|
443
|
+
_fail(f"File not found: {SANDBOX_FILE}")
|
|
444
|
+
|
|
445
|
+
with open(SANDBOX_FILE) as f:
|
|
446
|
+
state = yaml.safe_load(f)
|
|
447
|
+
|
|
448
|
+
artifact_id = state.get("artifact_id")
|
|
449
|
+
|
|
450
|
+
if not artifact_id:
|
|
451
|
+
_fail("No artifact_id - run 'plato sandbox snapshot' first")
|
|
452
|
+
|
|
453
|
+
# Validate UUID format
|
|
454
|
+
import re
|
|
455
|
+
|
|
456
|
+
uuid_pattern = re.compile(r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$", re.IGNORECASE)
|
|
457
|
+
|
|
458
|
+
if not uuid_pattern.match(artifact_id):
|
|
459
|
+
_fail(f"Invalid artifact_id format: {artifact_id}")
|
|
460
|
+
|
|
461
|
+
# Success - exit 0
|
|
462
|
+
|
|
463
|
+
|
|
464
|
+
# =============================================================================
|
|
465
|
+
# PM VERIFY COMMANDS
|
|
466
|
+
# =============================================================================
|
|
467
|
+
|
|
468
|
+
pm_verify_app = typer.Typer(help="Verify review and submit steps")
|
|
469
|
+
|
|
470
|
+
|
|
471
|
+
@pm_verify_app.command(name="review")
|
|
472
|
+
def verify_review():
|
|
473
|
+
"""
|
|
474
|
+
Verify review prerequisites.
|
|
475
|
+
|
|
476
|
+
Exit 0 if ready for review.
|
|
477
|
+
Exit 1 if missing prerequisites.
|
|
478
|
+
"""
|
|
479
|
+
issues = []
|
|
480
|
+
|
|
481
|
+
# Check API key
|
|
482
|
+
if not os.environ.get("PLATO_API_KEY"):
|
|
483
|
+
issues.append("PLATO_API_KEY not set")
|
|
484
|
+
|
|
485
|
+
# Check .sandbox.yaml
|
|
486
|
+
if not Path(SANDBOX_FILE).exists():
|
|
487
|
+
issues.append(f"{SANDBOX_FILE} not found")
|
|
488
|
+
else:
|
|
489
|
+
with open(SANDBOX_FILE) as f:
|
|
490
|
+
state = yaml.safe_load(f)
|
|
491
|
+
|
|
492
|
+
if not state.get("artifact_id"):
|
|
493
|
+
issues.append("No artifact_id - run 'plato sandbox snapshot' first")
|
|
494
|
+
if not state.get("service"):
|
|
495
|
+
issues.append("No service name in .sandbox.yaml")
|
|
496
|
+
|
|
497
|
+
# Check plato-config.yml
|
|
498
|
+
if not Path("plato-config.yml").exists() and not Path("plato-config.yaml").exists():
|
|
499
|
+
issues.append("plato-config.yml not found")
|
|
500
|
+
|
|
501
|
+
if issues:
|
|
502
|
+
_fail(f"Review prerequisites not met: {issues}")
|
|
503
|
+
|
|
504
|
+
# Success - exit 0
|
|
505
|
+
|
|
506
|
+
|
|
507
|
+
@pm_verify_app.command(name="submit")
|
|
508
|
+
def verify_submit():
|
|
509
|
+
"""
|
|
510
|
+
Verify submit prerequisites.
|
|
511
|
+
|
|
512
|
+
Exit 0 if ready to submit.
|
|
513
|
+
Exit 1 if missing prerequisites.
|
|
514
|
+
"""
|
|
515
|
+
issues = []
|
|
516
|
+
|
|
517
|
+
if not os.environ.get("PLATO_API_KEY"):
|
|
518
|
+
issues.append("PLATO_API_KEY not set")
|
|
519
|
+
|
|
520
|
+
if not Path(SANDBOX_FILE).exists():
|
|
521
|
+
issues.append(f"{SANDBOX_FILE} not found")
|
|
522
|
+
else:
|
|
523
|
+
with open(SANDBOX_FILE) as f:
|
|
524
|
+
state = yaml.safe_load(f)
|
|
525
|
+
|
|
526
|
+
required = ["artifact_id", "service", "plato_config_path"]
|
|
527
|
+
for field in required:
|
|
528
|
+
if not state.get(field):
|
|
529
|
+
issues.append(f"Missing {field} in .sandbox.yaml")
|
|
530
|
+
|
|
531
|
+
if issues:
|
|
532
|
+
_fail(f"Submit prerequisites not met: {issues}")
|
|
533
|
+
|
|
534
|
+
# Success - exit 0
|
|
535
|
+
|
|
536
|
+
|
|
537
|
+
# =============================================================================
|
|
538
|
+
# RESEARCH/VALIDATION/CONFIG VERIFY COMMANDS
|
|
539
|
+
# =============================================================================
|
|
540
|
+
|
|
541
|
+
|
|
542
|
+
@sandbox_verify_app.command(name="research")
|
|
543
|
+
def verify_research(
|
|
544
|
+
report_path: str = typer.Option("research-report.yml", "--report", "-r"),
|
|
545
|
+
):
|
|
546
|
+
"""
|
|
547
|
+
Verify research report is complete.
|
|
548
|
+
|
|
549
|
+
Exit 0 if all required fields present.
|
|
550
|
+
Exit 1 with stderr listing missing fields.
|
|
551
|
+
"""
|
|
552
|
+
if not Path(report_path).exists():
|
|
553
|
+
_fail(f"Research report not found: {report_path}")
|
|
554
|
+
|
|
555
|
+
try:
|
|
556
|
+
with open(report_path) as f:
|
|
557
|
+
report = yaml.safe_load(f)
|
|
558
|
+
except yaml.YAMLError as e:
|
|
559
|
+
_fail(f"Invalid YAML in {report_path}: {e}")
|
|
560
|
+
|
|
561
|
+
if not report:
|
|
562
|
+
_fail(f"Research report is empty: {report_path}")
|
|
563
|
+
|
|
564
|
+
required_fields = ["db_type", "docker_image", "docker_tag", "credentials", "github_url"]
|
|
565
|
+
missing = [f for f in required_fields if f not in report or not report[f]]
|
|
566
|
+
|
|
567
|
+
# Check credentials sub-fields
|
|
568
|
+
if "credentials" in report and report["credentials"]:
|
|
569
|
+
creds = report["credentials"]
|
|
570
|
+
if not creds.get("username"):
|
|
571
|
+
missing.append("credentials.username")
|
|
572
|
+
if not creds.get("password"):
|
|
573
|
+
missing.append("credentials.password")
|
|
574
|
+
|
|
575
|
+
# Check db_type is valid
|
|
576
|
+
valid_db_types = ["postgresql", "mysql", "mariadb"]
|
|
577
|
+
if report.get("db_type") and report["db_type"].lower() not in valid_db_types:
|
|
578
|
+
_fail(f"Invalid db_type: {report['db_type']}. Valid: {valid_db_types}")
|
|
579
|
+
|
|
580
|
+
if missing:
|
|
581
|
+
_fail(f"Missing fields in research report: {missing}")
|
|
582
|
+
|
|
583
|
+
# Success - exit 0
|
|
584
|
+
|
|
585
|
+
|
|
586
|
+
@sandbox_verify_app.command(name="validation")
|
|
587
|
+
def verify_validation(
|
|
588
|
+
report_path: str = typer.Option("research-report.yml", "--report", "-r"),
|
|
589
|
+
):
|
|
590
|
+
"""
|
|
591
|
+
Verify app can become a simulator.
|
|
592
|
+
|
|
593
|
+
Exit 0 if database type supported and no blockers.
|
|
594
|
+
Exit 1 with stderr describing blocker.
|
|
595
|
+
"""
|
|
596
|
+
if not Path(report_path).exists():
|
|
597
|
+
_fail(f"Research report not found: {report_path}")
|
|
598
|
+
|
|
599
|
+
with open(report_path) as f:
|
|
600
|
+
report = yaml.safe_load(f)
|
|
601
|
+
|
|
602
|
+
# Check database type
|
|
603
|
+
db_type = report.get("db_type", "").lower()
|
|
604
|
+
supported_dbs = ["postgresql", "mysql", "mariadb"]
|
|
605
|
+
|
|
606
|
+
if db_type == "sqlite":
|
|
607
|
+
_fail("SQLite not supported. Plato requires PostgreSQL, MySQL, or MariaDB")
|
|
608
|
+
|
|
609
|
+
if db_type not in supported_dbs:
|
|
610
|
+
_fail(f"Unknown database type: {db_type}. Supported: {supported_dbs}")
|
|
611
|
+
|
|
612
|
+
# Check for blockers
|
|
613
|
+
blockers = report.get("blockers", [])
|
|
614
|
+
if blockers:
|
|
615
|
+
_fail(f"Blockers found: {blockers}")
|
|
616
|
+
|
|
617
|
+
# Success - exit 0
|
|
618
|
+
|
|
619
|
+
|
|
620
|
+
@sandbox_verify_app.command(name="config")
|
|
621
|
+
def verify_config(
|
|
622
|
+
config_path: str = typer.Option("plato-config.yml", "--config", "-c"),
|
|
623
|
+
compose_path: str = typer.Option("base/docker-compose.yml", "--compose"),
|
|
624
|
+
):
|
|
625
|
+
"""
|
|
626
|
+
Verify configuration files are valid.
|
|
627
|
+
|
|
628
|
+
Exit 0 if plato-config.yml and docker-compose.yml are valid.
|
|
629
|
+
Exit 1 with stderr describing issues.
|
|
630
|
+
"""
|
|
631
|
+
issues = []
|
|
632
|
+
|
|
633
|
+
# Check plato-config.yml
|
|
634
|
+
if not Path(config_path).exists():
|
|
635
|
+
_fail(f"File not found: {config_path}")
|
|
636
|
+
|
|
637
|
+
try:
|
|
638
|
+
with open(config_path) as f:
|
|
639
|
+
config = yaml.safe_load(f)
|
|
640
|
+
except yaml.YAMLError as e:
|
|
641
|
+
_fail(f"Invalid YAML in {config_path}: {e}")
|
|
642
|
+
|
|
643
|
+
required_config_fields = ["service", "datasets"]
|
|
644
|
+
for field in required_config_fields:
|
|
645
|
+
if field not in config:
|
|
646
|
+
issues.append(f"{config_path}: Missing '{field}'")
|
|
647
|
+
|
|
648
|
+
# Check datasets.base structure
|
|
649
|
+
if "datasets" in config and "base" in config.get("datasets", {}):
|
|
650
|
+
base = config["datasets"]["base"]
|
|
651
|
+
|
|
652
|
+
if "metadata" not in base:
|
|
653
|
+
issues.append(f"{config_path}: Missing datasets.base.metadata")
|
|
654
|
+
|
|
655
|
+
if "listeners" not in base:
|
|
656
|
+
issues.append(f"{config_path}: Missing datasets.base.listeners")
|
|
657
|
+
elif "db" not in base.get("listeners", {}):
|
|
658
|
+
issues.append(f"{config_path}: Missing listeners.db")
|
|
659
|
+
|
|
660
|
+
# Check docker-compose.yml
|
|
661
|
+
if not Path(compose_path).exists():
|
|
662
|
+
_fail(f"File not found: {compose_path}")
|
|
663
|
+
|
|
664
|
+
try:
|
|
665
|
+
with open(compose_path) as f:
|
|
666
|
+
compose = yaml.safe_load(f)
|
|
667
|
+
except yaml.YAMLError as e:
|
|
668
|
+
_fail(f"Invalid YAML in {compose_path}: {e}")
|
|
669
|
+
|
|
670
|
+
services = compose.get("services", {})
|
|
671
|
+
standard_db_images = ["postgres:", "mysql:", "mariadb:"]
|
|
672
|
+
|
|
673
|
+
for svc_name, svc_config in services.items():
|
|
674
|
+
if svc_config.get("network_mode") != "host":
|
|
675
|
+
issues.append(f"{compose_path}: '{svc_name}' missing 'network_mode: host'")
|
|
676
|
+
|
|
677
|
+
image = svc_config.get("image", "")
|
|
678
|
+
for std_img in standard_db_images:
|
|
679
|
+
if image.startswith(std_img):
|
|
680
|
+
issues.append(f"{compose_path}: '{svc_name}' uses standard DB image '{image}' - use Plato DB image")
|
|
681
|
+
|
|
682
|
+
if issues:
|
|
683
|
+
_fail(f"Config issues: {issues}")
|
|
684
|
+
|
|
685
|
+
# Success - exit 0
|
plato/v1/cli/world.py
CHANGED
|
@@ -235,6 +235,8 @@ def world_publish(
|
|
|
235
235
|
upload_url = f"{api_url}/v2/pypi/worlds/"
|
|
236
236
|
console.print(f"\n[cyan]Uploading to {upload_url}...[/cyan]")
|
|
237
237
|
|
|
238
|
+
# api_key is guaranteed to be set (checked earlier when not dry_run)
|
|
239
|
+
assert api_key is not None, "api_key must be set when not in dry_run mode"
|
|
238
240
|
try:
|
|
239
241
|
result = subprocess.run(
|
|
240
242
|
[
|
|
@@ -250,6 +252,7 @@ def world_publish(
|
|
|
250
252
|
],
|
|
251
253
|
capture_output=True,
|
|
252
254
|
text=True,
|
|
255
|
+
check=False,
|
|
253
256
|
)
|
|
254
257
|
|
|
255
258
|
if result.returncode == 0:
|