plato-sdk-v2 2.2.4__py3-none-any.whl → 2.3.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- plato/agents/__init__.py +4 -0
- plato/agents/logging.py +114 -0
- plato/agents/runner.py +32 -2
- plato/v1/cli/main.py +0 -2
- plato/v1/cli/sandbox.py +192 -50
- plato/v1/cli/ssh.py +16 -4
- plato/v1/cli/verify.py +243 -827
- plato/v2/async_/environment.py +31 -0
- plato/v2/async_/session.py +33 -0
- plato/v2/sync/environment.py +31 -0
- plato/v2/sync/session.py +33 -0
- plato/worlds/__init__.py +3 -1
- plato/worlds/base.py +276 -2
- plato/worlds/config.py +38 -1
- plato/worlds/runner.py +97 -45
- {plato_sdk_v2-2.2.4.dist-info → plato_sdk_v2-2.3.3.dist-info}/METADATA +3 -1
- {plato_sdk_v2-2.2.4.dist-info → plato_sdk_v2-2.3.3.dist-info}/RECORD +19 -20
- plato/v1/cli/sim.py +0 -11
- {plato_sdk_v2-2.2.4.dist-info → plato_sdk_v2-2.3.3.dist-info}/WHEEL +0 -0
- {plato_sdk_v2-2.2.4.dist-info → plato_sdk_v2-2.3.3.dist-info}/entry_points.txt +0 -0
plato/v1/cli/verify.py
CHANGED
|
@@ -1,21 +1,26 @@
|
|
|
1
1
|
"""Verification CLI commands for Plato simulator creation pipeline.
|
|
2
2
|
|
|
3
|
-
All verification commands follow the
|
|
3
|
+
All verification commands follow the convention:
|
|
4
|
+
- Exit 0 = verification passed
|
|
5
|
+
- Exit 1 = verification failed
|
|
6
|
+
- Stderr = actionable error message for agents
|
|
7
|
+
|
|
8
|
+
Usage:
|
|
4
9
|
plato sandbox verify <check>
|
|
5
10
|
plato pm verify <check>
|
|
6
|
-
|
|
7
|
-
Each command verifies a specific step in the pipeline completed successfully.
|
|
8
11
|
"""
|
|
9
12
|
|
|
13
|
+
from __future__ import annotations
|
|
14
|
+
|
|
10
15
|
import os
|
|
11
16
|
import subprocess
|
|
17
|
+
import sys
|
|
12
18
|
from collections import defaultdict
|
|
13
19
|
from pathlib import Path
|
|
20
|
+
from typing import NoReturn
|
|
14
21
|
|
|
15
22
|
import typer
|
|
16
23
|
import yaml
|
|
17
|
-
from rich.console import Console
|
|
18
|
-
from rich.table import Table
|
|
19
24
|
|
|
20
25
|
from plato.v1.cli.utils import (
|
|
21
26
|
SANDBOX_FILE,
|
|
@@ -23,7 +28,16 @@ from plato.v1.cli.utils import (
|
|
|
23
28
|
require_api_key,
|
|
24
29
|
)
|
|
25
30
|
|
|
26
|
-
|
|
31
|
+
|
|
32
|
+
def _error(msg: str) -> None:
|
|
33
|
+
"""Write error to stderr."""
|
|
34
|
+
sys.stderr.write(f"{msg}\n")
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def _fail(msg: str) -> NoReturn:
|
|
38
|
+
"""Write error to stderr and exit 1."""
|
|
39
|
+
_error(msg)
|
|
40
|
+
raise typer.Exit(1)
|
|
27
41
|
|
|
28
42
|
|
|
29
43
|
# =============================================================================
|
|
@@ -38,116 +52,54 @@ def sandbox_verify_default(ctx: typer.Context):
|
|
|
38
52
|
"""
|
|
39
53
|
Verify sandbox is properly configured.
|
|
40
54
|
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
- session_id
|
|
44
|
-
- public_url
|
|
45
|
-
- ssh_config_path
|
|
46
|
-
- plato_config_path
|
|
47
|
-
- service
|
|
55
|
+
Exit 0 if .sandbox.yaml has all required fields.
|
|
56
|
+
Exit 1 with stderr describing missing fields.
|
|
48
57
|
"""
|
|
49
58
|
if ctx.invoked_subcommand is not None:
|
|
50
59
|
return
|
|
51
60
|
|
|
52
|
-
console.print("\n[cyan]Verifying sandbox configuration...[/cyan]\n")
|
|
53
|
-
|
|
54
|
-
# Check .sandbox.yaml exists
|
|
55
61
|
if not Path(SANDBOX_FILE).exists():
|
|
56
|
-
|
|
57
|
-
console.print("\n[yellow]No active sandbox. Start one with:[/yellow]")
|
|
58
|
-
console.print(" plato sandbox start -c")
|
|
59
|
-
raise typer.Exit(1)
|
|
62
|
+
_fail(f"File not found: {SANDBOX_FILE}")
|
|
60
63
|
|
|
61
|
-
# Load sandbox state
|
|
62
64
|
with open(SANDBOX_FILE) as f:
|
|
63
65
|
state = yaml.safe_load(f)
|
|
64
66
|
|
|
65
67
|
if not state:
|
|
66
|
-
|
|
67
|
-
raise typer.Exit(1)
|
|
68
|
-
|
|
69
|
-
# Required fields
|
|
70
|
-
required_fields = {
|
|
71
|
-
"job_id": "Sandbox job identifier",
|
|
72
|
-
"session_id": "Session identifier",
|
|
73
|
-
"public_url": "Public URL for browser access",
|
|
74
|
-
"ssh_config_path": "SSH config file path",
|
|
75
|
-
"plato_config_path": "Path to plato-config.yml",
|
|
76
|
-
"service": "Simulator service name",
|
|
77
|
-
}
|
|
78
|
-
|
|
79
|
-
missing = []
|
|
80
|
-
present = []
|
|
81
|
-
|
|
82
|
-
for field, desc in required_fields.items():
|
|
83
|
-
if field not in state or not state[field]:
|
|
84
|
-
missing.append((field, desc))
|
|
85
|
-
else:
|
|
86
|
-
present.append((field, state[field]))
|
|
68
|
+
_fail(f"File is empty: {SANDBOX_FILE}")
|
|
87
69
|
|
|
88
|
-
#
|
|
89
|
-
|
|
90
|
-
if
|
|
91
|
-
missing.append(("ssh_config_path (file)", f"File not found: {ssh_config}"))
|
|
70
|
+
# Core required fields (ssh_config_path is optional - proxytunnel may not be installed)
|
|
71
|
+
required_fields = ["job_id", "session_id", "public_url", "plato_config_path", "service"]
|
|
72
|
+
missing = [f for f in required_fields if f not in state or not state[f]]
|
|
92
73
|
|
|
93
|
-
# Check plato_config_path
|
|
74
|
+
# Check plato_config_path exists
|
|
75
|
+
# Container paths like /workspace/foo.yml map to cwd/foo.yml when verifier runs from sim_dir
|
|
94
76
|
plato_config = state.get("plato_config_path")
|
|
95
|
-
if plato_config
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
console.print("[red]Missing or invalid fields:[/red]")
|
|
102
|
-
for field, desc in missing:
|
|
103
|
-
console.print(f" - {field}: {desc}")
|
|
104
|
-
|
|
105
|
-
console.print("\n[yellow]Current .sandbox.yaml contents:[/yellow]")
|
|
106
|
-
for field, value in present:
|
|
107
|
-
console.print(f" {field}: {value}")
|
|
108
|
-
|
|
109
|
-
console.print("\n[yellow]Fix by adding missing fields to .sandbox.yaml:[/yellow]")
|
|
110
|
-
if any("plato_config_path" in m[0] for m in missing):
|
|
111
|
-
console.print(' plato_config_path: "/absolute/path/to/plato-config.yml"')
|
|
112
|
-
if any("service" in m[0] for m in missing):
|
|
113
|
-
console.print(' service: "your-sim-name"')
|
|
114
|
-
|
|
115
|
-
raise typer.Exit(1)
|
|
116
|
-
|
|
117
|
-
console.print("[green]✅ Sandbox verification passed[/green]\n")
|
|
77
|
+
if plato_config:
|
|
78
|
+
# Convert container path to relative path for checking
|
|
79
|
+
if plato_config.startswith("/workspace/"):
|
|
80
|
+
check_path = Path(plato_config[len("/workspace/") :])
|
|
81
|
+
else:
|
|
82
|
+
check_path = Path(plato_config)
|
|
118
83
|
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
table.add_column("Value", style="white")
|
|
84
|
+
if not check_path.exists():
|
|
85
|
+
missing.append(f"plato_config_path (file): File not found: {plato_config}")
|
|
122
86
|
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
display_value = str(value)
|
|
126
|
-
if len(display_value) > 60:
|
|
127
|
-
display_value = display_value[:57] + "..."
|
|
128
|
-
table.add_row(field, display_value)
|
|
87
|
+
if missing:
|
|
88
|
+
_fail(f"Missing fields in {SANDBOX_FILE}: {missing}")
|
|
129
89
|
|
|
130
|
-
|
|
131
|
-
console.print("\n[green]Ready for next step: plato sandbox verify services[/green]")
|
|
90
|
+
# Success - exit 0
|
|
132
91
|
|
|
133
92
|
|
|
134
93
|
@sandbox_verify_app.command(name="services")
|
|
135
94
|
def verify_services():
|
|
136
95
|
"""
|
|
137
|
-
Verify containers are running and
|
|
96
|
+
Verify containers are running and public URL returns 200.
|
|
138
97
|
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
- Required containers are healthy
|
|
142
|
-
- Public URL returns 200 (not 502)
|
|
98
|
+
Exit 0 if all containers healthy and URL accessible.
|
|
99
|
+
Exit 1 with stderr describing the issue (e.g., "HTTP 502 - check nginx config").
|
|
143
100
|
"""
|
|
144
|
-
console.print("\n[cyan]Verifying services...[/cyan]\n")
|
|
145
|
-
|
|
146
|
-
# Load sandbox state
|
|
147
101
|
if not Path(SANDBOX_FILE).exists():
|
|
148
|
-
|
|
149
|
-
console.print("[yellow]Run: plato sandbox verify[/yellow]")
|
|
150
|
-
raise typer.Exit(1)
|
|
102
|
+
_fail(f"File not found: {SANDBOX_FILE}")
|
|
151
103
|
|
|
152
104
|
with open(SANDBOX_FILE) as f:
|
|
153
105
|
state = yaml.safe_load(f)
|
|
@@ -157,13 +109,9 @@ def verify_services():
|
|
|
157
109
|
public_url = state.get("public_url")
|
|
158
110
|
|
|
159
111
|
if not ssh_config:
|
|
160
|
-
|
|
161
|
-
raise typer.Exit(1)
|
|
162
|
-
|
|
163
|
-
issues = []
|
|
112
|
+
_fail("No ssh_config_path in .sandbox.yaml")
|
|
164
113
|
|
|
165
114
|
# Check containers via SSH
|
|
166
|
-
console.print("[cyan]Checking container status...[/cyan]")
|
|
167
115
|
try:
|
|
168
116
|
result = subprocess.run(
|
|
169
117
|
[
|
|
@@ -171,7 +119,7 @@ def verify_services():
|
|
|
171
119
|
"-F",
|
|
172
120
|
os.path.expanduser(ssh_config),
|
|
173
121
|
ssh_host,
|
|
174
|
-
"
|
|
122
|
+
"docker ps -a --format '{{.Names}}\t{{.Status}}'",
|
|
175
123
|
],
|
|
176
124
|
capture_output=True,
|
|
177
125
|
text=True,
|
|
@@ -179,58 +127,28 @@ def verify_services():
|
|
|
179
127
|
)
|
|
180
128
|
|
|
181
129
|
if result.returncode != 0:
|
|
182
|
-
|
|
183
|
-
raise typer.Exit(1)
|
|
130
|
+
_fail(f"Failed to check containers via SSH: {result.stderr.strip()}")
|
|
184
131
|
|
|
185
|
-
containers = []
|
|
186
132
|
unhealthy = []
|
|
187
|
-
|
|
188
133
|
for line in result.stdout.strip().split("\n"):
|
|
189
134
|
if not line:
|
|
190
135
|
continue
|
|
191
136
|
parts = line.split("\t")
|
|
192
137
|
if len(parts) >= 2:
|
|
193
138
|
name, status = parts[0], parts[1]
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
if "unhealthy" in status.lower():
|
|
197
|
-
unhealthy.append(name)
|
|
198
|
-
elif "exited" in status.lower() or "dead" in status.lower():
|
|
199
|
-
unhealthy.append(name)
|
|
200
|
-
|
|
201
|
-
if containers:
|
|
202
|
-
table = Table(title="Container Status")
|
|
203
|
-
table.add_column("Container", style="cyan")
|
|
204
|
-
table.add_column("Status", style="white")
|
|
205
|
-
|
|
206
|
-
for name, status in containers:
|
|
207
|
-
status_style = (
|
|
208
|
-
"green"
|
|
209
|
-
if "healthy" in status.lower() and "unhealthy" not in status.lower()
|
|
210
|
-
else "red"
|
|
211
|
-
if name in unhealthy
|
|
212
|
-
else "yellow"
|
|
213
|
-
)
|
|
214
|
-
table.add_row(name, f"[{status_style}]{status}[/{status_style}]")
|
|
215
|
-
|
|
216
|
-
console.print(table)
|
|
217
|
-
else:
|
|
218
|
-
console.print("[yellow]⚠️ No containers found[/yellow]")
|
|
219
|
-
issues.append("No containers running")
|
|
139
|
+
if "unhealthy" in status.lower() or "exited" in status.lower() or "dead" in status.lower():
|
|
140
|
+
unhealthy.append(f"{name}: {status}")
|
|
220
141
|
|
|
221
142
|
if unhealthy:
|
|
222
|
-
|
|
143
|
+
_fail(f"Unhealthy containers: {unhealthy}")
|
|
223
144
|
|
|
224
145
|
except subprocess.TimeoutExpired:
|
|
225
|
-
|
|
226
|
-
raise typer.Exit(1)
|
|
146
|
+
_fail("SSH connection timed out")
|
|
227
147
|
except FileNotFoundError:
|
|
228
|
-
|
|
229
|
-
raise typer.Exit(1)
|
|
148
|
+
_fail("SSH not found")
|
|
230
149
|
|
|
231
150
|
# Check public URL
|
|
232
151
|
if public_url:
|
|
233
|
-
console.print(f"\n[cyan]Checking public URL: {public_url}[/cyan]")
|
|
234
152
|
try:
|
|
235
153
|
import urllib.error
|
|
236
154
|
import urllib.request
|
|
@@ -240,107 +158,75 @@ def verify_services():
|
|
|
240
158
|
|
|
241
159
|
try:
|
|
242
160
|
with urllib.request.urlopen(req, timeout=10) as response:
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
console.print(f"[green]✅ Public URL returns {status_code}[/green]")
|
|
246
|
-
else:
|
|
247
|
-
console.print(f"[yellow]⚠️ Public URL returns {status_code}[/yellow]")
|
|
161
|
+
if response.getcode() != 200:
|
|
162
|
+
_fail(f"HTTP {response.getcode()} from {public_url}")
|
|
248
163
|
except urllib.error.HTTPError as e:
|
|
249
164
|
if e.code == 502:
|
|
250
|
-
|
|
251
|
-
issues.append("502 Bad Gateway - nothing listening on app_port")
|
|
252
|
-
|
|
253
|
-
# Get port info
|
|
254
|
-
console.print("\n[yellow]Checking what ports are listening on VM...[/yellow]")
|
|
255
|
-
port_result = subprocess.run(
|
|
256
|
-
[
|
|
257
|
-
"ssh",
|
|
258
|
-
"-F",
|
|
259
|
-
os.path.expanduser(ssh_config),
|
|
260
|
-
ssh_host,
|
|
261
|
-
"netstat -tlnp 2>/dev/null | grep LISTEN || ss -tlnp | grep LISTEN",
|
|
262
|
-
],
|
|
263
|
-
capture_output=True,
|
|
264
|
-
text=True,
|
|
265
|
-
timeout=10,
|
|
266
|
-
)
|
|
267
|
-
if port_result.stdout:
|
|
268
|
-
console.print(f"[dim]{port_result.stdout}[/dim]")
|
|
269
|
-
|
|
270
|
-
console.print("\n[yellow]Fix options:[/yellow]")
|
|
271
|
-
console.print(" 1. Change app to listen on the expected port (check app_port in plato-config.yml)")
|
|
272
|
-
console.print(" 2. Add nginx to proxy from app_port to your app's actual port")
|
|
165
|
+
_fail("HTTP 502 Bad Gateway - check app_port in plato-config.yml and nginx config")
|
|
273
166
|
else:
|
|
274
|
-
|
|
167
|
+
_fail(f"HTTP {e.code} from {public_url}")
|
|
275
168
|
|
|
276
169
|
except Exception as e:
|
|
277
|
-
|
|
278
|
-
issues.append(f"Public URL check failed: {e}")
|
|
279
|
-
|
|
280
|
-
# Report results
|
|
281
|
-
if issues:
|
|
282
|
-
console.print("\n[red]❌ Services verification failed[/red]\n")
|
|
283
|
-
console.print("[red]Issues found:[/red]")
|
|
284
|
-
for issue in issues:
|
|
285
|
-
console.print(f" - {issue}")
|
|
286
|
-
raise typer.Exit(1)
|
|
170
|
+
_fail(f"Failed to check public URL: {e}")
|
|
287
171
|
|
|
288
|
-
|
|
289
|
-
console.print("[green]All containers healthy, public URL accessible.[/green]")
|
|
290
|
-
console.print("\n[green]Ready for next step: plato sandbox verify login[/green]")
|
|
172
|
+
# Success - exit 0
|
|
291
173
|
|
|
292
174
|
|
|
293
175
|
@sandbox_verify_app.command(name="login")
|
|
294
176
|
def verify_login():
|
|
295
177
|
"""
|
|
296
|
-
Verify
|
|
178
|
+
Verify login page is accessible.
|
|
297
179
|
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
- Dashboard/home page visible (not login page)
|
|
301
|
-
- No setup wizards or onboarding screens
|
|
302
|
-
- Credentials saved for flows.yml
|
|
180
|
+
Exit 0 if public URL returns 200.
|
|
181
|
+
Exit 1 if not accessible.
|
|
303
182
|
"""
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
183
|
+
if not Path(SANDBOX_FILE).exists():
|
|
184
|
+
_fail(f"File not found: {SANDBOX_FILE}")
|
|
185
|
+
|
|
186
|
+
with open(SANDBOX_FILE) as f:
|
|
187
|
+
state = yaml.safe_load(f)
|
|
188
|
+
|
|
189
|
+
public_url = state.get("public_url")
|
|
190
|
+
if not public_url:
|
|
191
|
+
_fail("No public_url in .sandbox.yaml")
|
|
192
|
+
|
|
193
|
+
try:
|
|
194
|
+
import urllib.error
|
|
195
|
+
import urllib.request
|
|
196
|
+
|
|
197
|
+
req = urllib.request.Request(public_url, method="GET")
|
|
198
|
+
req.add_header("User-Agent", "plato-verify/1.0")
|
|
199
|
+
|
|
200
|
+
with urllib.request.urlopen(req, timeout=10) as response:
|
|
201
|
+
if response.getcode() != 200:
|
|
202
|
+
_fail(f"HTTP {response.getcode()} from {public_url}")
|
|
203
|
+
except urllib.error.HTTPError as e:
|
|
204
|
+
_fail(f"HTTP {e.code} from {public_url}")
|
|
205
|
+
except Exception as e:
|
|
206
|
+
_fail(f"Failed to check login page: {e}")
|
|
207
|
+
|
|
208
|
+
# Success - exit 0
|
|
311
209
|
|
|
312
210
|
|
|
313
211
|
@sandbox_verify_app.command(name="worker")
|
|
314
212
|
def verify_worker():
|
|
315
213
|
"""
|
|
316
|
-
Verify Plato worker is
|
|
214
|
+
Verify Plato worker is connected and audit triggers installed.
|
|
317
215
|
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
- State API responds (not 502)
|
|
321
|
-
- connected: true
|
|
322
|
-
- audit_log_count field exists (triggers installed)
|
|
216
|
+
Exit 0 if worker connected.
|
|
217
|
+
Exit 1 with stderr describing the issue.
|
|
323
218
|
"""
|
|
324
|
-
console.print("\n[cyan]Verifying worker...[/cyan]\n")
|
|
325
|
-
|
|
326
|
-
# Load sandbox state
|
|
327
219
|
if not Path(SANDBOX_FILE).exists():
|
|
328
|
-
|
|
329
|
-
raise typer.Exit(1)
|
|
220
|
+
_fail(f"File not found: {SANDBOX_FILE}")
|
|
330
221
|
|
|
331
222
|
with open(SANDBOX_FILE) as f:
|
|
332
223
|
state = yaml.safe_load(f)
|
|
333
224
|
|
|
334
225
|
session_id = state.get("session_id")
|
|
335
226
|
if not session_id:
|
|
336
|
-
|
|
337
|
-
raise typer.Exit(1)
|
|
227
|
+
_fail("No session_id in .sandbox.yaml")
|
|
338
228
|
|
|
339
229
|
api_key = require_api_key()
|
|
340
|
-
issues = []
|
|
341
|
-
|
|
342
|
-
# Check state API
|
|
343
|
-
console.print("[cyan]Checking state API...[/cyan]")
|
|
344
230
|
|
|
345
231
|
try:
|
|
346
232
|
from plato._generated.api.v2.sessions import state as sessions_state
|
|
@@ -353,89 +239,49 @@ def verify_worker():
|
|
|
353
239
|
)
|
|
354
240
|
|
|
355
241
|
if state_response is None:
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
242
|
+
_fail("State API returned no data")
|
|
243
|
+
|
|
244
|
+
if not state_response.results:
|
|
245
|
+
_fail("State API returned empty results")
|
|
246
|
+
|
|
247
|
+
for job_id, result in state_response.results.items():
|
|
248
|
+
if hasattr(result, "error") and result.error:
|
|
249
|
+
_fail(f"Worker error: {result.error}")
|
|
250
|
+
|
|
251
|
+
state_data = result.state if hasattr(result, "state") and result.state else {}
|
|
252
|
+
if isinstance(state_data, dict):
|
|
253
|
+
if "error" in state_data:
|
|
254
|
+
_fail(f"Worker error: {state_data['error']}")
|
|
255
|
+
|
|
256
|
+
if "db" in state_data:
|
|
257
|
+
db_state = state_data["db"]
|
|
258
|
+
if not db_state.get("is_connected", False):
|
|
259
|
+
_fail("Worker not connected to database")
|
|
260
|
+
# Success - worker connected
|
|
261
|
+
return
|
|
262
|
+
else:
|
|
263
|
+
_fail("Worker not initialized (no db state)")
|
|
366
264
|
|
|
367
|
-
|
|
368
|
-
state_data = result.state if hasattr(result, "state") and result.state else {}
|
|
369
|
-
if isinstance(state_data, dict):
|
|
370
|
-
# Check for error wrapped in state (from API layer transformation)
|
|
371
|
-
if "error" in state_data:
|
|
372
|
-
console.print(f"[red]❌ Worker error: {state_data['error']}[/red]")
|
|
373
|
-
issues.append(f"Worker error: {state_data['error']}")
|
|
374
|
-
continue
|
|
375
|
-
# Check connected
|
|
376
|
-
if "db" in state_data:
|
|
377
|
-
db_state = state_data["db"]
|
|
378
|
-
connected = db_state.get("is_connected", False)
|
|
379
|
-
if connected:
|
|
380
|
-
console.print("[green]✅ Worker connected: true[/green]")
|
|
381
|
-
else:
|
|
382
|
-
console.print("[red]❌ Worker not connected[/red]")
|
|
383
|
-
issues.append("Worker not connected to database")
|
|
384
|
-
|
|
385
|
-
# Check audit_log_count exists (indicates triggers installed)
|
|
386
|
-
if "audit_log_count" in db_state:
|
|
387
|
-
audit_count = db_state.get("audit_log_count", 0)
|
|
388
|
-
console.print(f"[green]✅ Audit triggers installed (count: {audit_count})[/green]")
|
|
389
|
-
else:
|
|
390
|
-
console.print("[yellow]⚠️ audit_log_count not found in state[/yellow]")
|
|
391
|
-
|
|
392
|
-
# Show table count if available
|
|
393
|
-
if "tables" in db_state:
|
|
394
|
-
console.print(f"[cyan] Tables tracked: {len(db_state['tables'])}[/cyan]")
|
|
395
|
-
else:
|
|
396
|
-
console.print("[yellow]⚠️ No db state found - worker may not be initialized[/yellow]")
|
|
397
|
-
issues.append("Worker not initialized (no db state)")
|
|
398
|
-
else:
|
|
399
|
-
console.print("[red]❌ State API returned empty results[/red]")
|
|
400
|
-
issues.append("State API returned empty results")
|
|
265
|
+
_fail("No worker state found")
|
|
401
266
|
|
|
267
|
+
except typer.Exit:
|
|
268
|
+
raise
|
|
402
269
|
except Exception as e:
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
issues.append("Worker not ready (502)")
|
|
407
|
-
else:
|
|
408
|
-
console.print(f"[red]❌ Failed to check state: {e}[/red]")
|
|
409
|
-
issues.append(f"State check failed: {e}")
|
|
410
|
-
|
|
411
|
-
# Report results
|
|
412
|
-
if issues:
|
|
413
|
-
console.print("\n[red]❌ Worker verification failed[/red]\n")
|
|
414
|
-
console.print("[red]Issues found:[/red]")
|
|
415
|
-
for issue in issues:
|
|
416
|
-
console.print(f" - {issue}")
|
|
417
|
-
|
|
418
|
-
console.print("\n[yellow]Fix:[/yellow]")
|
|
419
|
-
console.print(" plato sandbox start-worker --wait")
|
|
420
|
-
console.print(" plato sandbox verify worker")
|
|
421
|
-
|
|
422
|
-
raise typer.Exit(1)
|
|
423
|
-
|
|
424
|
-
console.print("\n[green]✅ Worker verification passed[/green]")
|
|
425
|
-
console.print("[green]Worker connected, audit triggers installed.[/green]")
|
|
426
|
-
console.print("\n[green]Ready for next step: plato sandbox verify audit-clear[/green]")
|
|
270
|
+
if "502" in str(e):
|
|
271
|
+
_fail("Worker not ready (502)")
|
|
272
|
+
_fail(f"Failed to check worker: {e}")
|
|
427
273
|
|
|
428
274
|
|
|
429
275
|
@sandbox_verify_app.command(name="audit-clear")
|
|
430
276
|
def verify_audit_clear():
|
|
431
277
|
"""
|
|
432
|
-
Verify audit log
|
|
433
|
-
"""
|
|
434
|
-
console.print("\n[cyan]Verifying audit log cleared...[/cyan]\n")
|
|
278
|
+
Verify audit log is cleared (0 mutations).
|
|
435
279
|
|
|
280
|
+
Exit 0 if 0 mutations.
|
|
281
|
+
Exit 1 if mutations exist.
|
|
282
|
+
"""
|
|
436
283
|
if not Path(SANDBOX_FILE).exists():
|
|
437
|
-
|
|
438
|
-
raise typer.Exit(1)
|
|
284
|
+
_fail(f"File not found: {SANDBOX_FILE}")
|
|
439
285
|
|
|
440
286
|
with open(SANDBOX_FILE) as f:
|
|
441
287
|
state = yaml.safe_load(f)
|
|
@@ -454,10 +300,8 @@ def verify_audit_clear():
|
|
|
454
300
|
)
|
|
455
301
|
|
|
456
302
|
if state_response is None:
|
|
457
|
-
|
|
458
|
-
raise typer.Exit(1)
|
|
303
|
+
_fail("State API returned no data")
|
|
459
304
|
|
|
460
|
-
# Extract audit count from response
|
|
461
305
|
audit_count = 0
|
|
462
306
|
if state_response.results:
|
|
463
307
|
for job_id, result in state_response.results.items():
|
|
@@ -466,28 +310,25 @@ def verify_audit_clear():
|
|
|
466
310
|
audit_count = state_data["db"].get("audit_log_count", 0)
|
|
467
311
|
break
|
|
468
312
|
|
|
469
|
-
if audit_count
|
|
470
|
-
|
|
471
|
-
console.print("\n[green]Ready for next step: plato sandbox verify flow[/green]")
|
|
472
|
-
else:
|
|
473
|
-
console.print(f"[red]❌ Audit log not clear: {audit_count} mutations[/red]")
|
|
474
|
-
console.print("\n[yellow]Note: Mutation tracking starts fresh when worker starts.[/yellow]")
|
|
475
|
-
console.print("[yellow]Restart sandbox if you need a clean baseline.[/yellow]")
|
|
476
|
-
raise typer.Exit(1)
|
|
313
|
+
if audit_count != 0:
|
|
314
|
+
_fail(f"Audit log not clear: {audit_count} mutations")
|
|
477
315
|
|
|
316
|
+
# Success - exit 0
|
|
317
|
+
|
|
318
|
+
except typer.Exit:
|
|
319
|
+
raise
|
|
478
320
|
except Exception as e:
|
|
479
|
-
|
|
480
|
-
raise typer.Exit(1)
|
|
321
|
+
_fail(f"Failed to check audit: {e}")
|
|
481
322
|
|
|
482
323
|
|
|
483
324
|
@sandbox_verify_app.command(name="flow")
|
|
484
325
|
def verify_flow():
|
|
485
326
|
"""
|
|
486
|
-
Verify login flow exists and
|
|
487
|
-
"""
|
|
488
|
-
console.print("\n[cyan]Verifying login flow...[/cyan]\n")
|
|
327
|
+
Verify login flow exists and is valid.
|
|
489
328
|
|
|
490
|
-
|
|
329
|
+
Exit 0 if flows.yml exists with login section.
|
|
330
|
+
Exit 1 if missing or invalid.
|
|
331
|
+
"""
|
|
491
332
|
flow_paths = ["flows.yml", "base/flows.yml", "login-flow.yml"]
|
|
492
333
|
flow_file = None
|
|
493
334
|
|
|
@@ -497,44 +338,24 @@ def verify_flow():
|
|
|
497
338
|
break
|
|
498
339
|
|
|
499
340
|
if not flow_file:
|
|
500
|
-
|
|
501
|
-
console.print(f"[yellow]Searched: {', '.join(flow_paths)}[/yellow]")
|
|
502
|
-
console.print("\n[yellow]Create flows.yml with login flow definition.[/yellow]")
|
|
503
|
-
raise typer.Exit(1)
|
|
341
|
+
_fail(f"No flows.yml found. Searched: {flow_paths}")
|
|
504
342
|
|
|
505
|
-
|
|
343
|
+
assert flow_file is not None # for type checker
|
|
506
344
|
|
|
507
|
-
# Parse flows.yml
|
|
508
345
|
try:
|
|
509
346
|
with open(flow_file) as f:
|
|
510
347
|
flows = yaml.safe_load(f)
|
|
511
348
|
|
|
512
349
|
if not flows:
|
|
513
|
-
|
|
514
|
-
raise typer.Exit(1)
|
|
350
|
+
_fail(f"Flows file is empty: {flow_file}")
|
|
515
351
|
|
|
516
|
-
# Check for login flow
|
|
517
352
|
if "login" not in flows:
|
|
518
|
-
|
|
519
|
-
console.print("[yellow]flows.yml must have a 'login' section[/yellow]")
|
|
520
|
-
raise typer.Exit(1)
|
|
521
|
-
|
|
522
|
-
login_flow = flows["login"]
|
|
523
|
-
steps = login_flow.get("steps", [])
|
|
353
|
+
_fail(f"No 'login' flow defined in {flow_file}")
|
|
524
354
|
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
# Show steps summary
|
|
528
|
-
for i, step in enumerate(steps):
|
|
529
|
-
action = step.get("action", "unknown")
|
|
530
|
-
selector = step.get("selector", "")[:40]
|
|
531
|
-
console.print(f" {i + 1}. {action}: {selector}...")
|
|
532
|
-
|
|
533
|
-
console.print("\n[green]Ready for next step: plato sandbox flow login[/green]")
|
|
355
|
+
# Success - exit 0
|
|
534
356
|
|
|
535
357
|
except yaml.YAMLError as e:
|
|
536
|
-
|
|
537
|
-
raise typer.Exit(1)
|
|
358
|
+
_fail(f"Invalid YAML in {flow_file}: {e}")
|
|
538
359
|
|
|
539
360
|
|
|
540
361
|
@sandbox_verify_app.command(name="mutations")
|
|
@@ -542,13 +363,11 @@ def verify_mutations():
|
|
|
542
363
|
"""
|
|
543
364
|
Verify no mutations after login flow.
|
|
544
365
|
|
|
545
|
-
|
|
366
|
+
Exit 0 if 0 mutations.
|
|
367
|
+
Exit 1 with stderr listing tables and counts.
|
|
546
368
|
"""
|
|
547
|
-
console.print("\n[cyan]Verifying mutations...[/cyan]\n")
|
|
548
|
-
|
|
549
369
|
if not Path(SANDBOX_FILE).exists():
|
|
550
|
-
|
|
551
|
-
raise typer.Exit(1)
|
|
370
|
+
_fail(f"File not found: {SANDBOX_FILE}")
|
|
552
371
|
|
|
553
372
|
with open(SANDBOX_FILE) as f:
|
|
554
373
|
state = yaml.safe_load(f)
|
|
@@ -567,151 +386,61 @@ def verify_mutations():
|
|
|
567
386
|
)
|
|
568
387
|
|
|
569
388
|
if state_response is None:
|
|
570
|
-
|
|
571
|
-
raise typer.Exit(1)
|
|
389
|
+
_fail("State API returned no data")
|
|
572
390
|
|
|
573
|
-
# Extract mutations from response
|
|
574
391
|
mutations = []
|
|
575
392
|
audit_count = 0
|
|
576
393
|
if state_response.results:
|
|
577
394
|
for job_id, result in state_response.results.items():
|
|
578
395
|
state_data = result.state if hasattr(result, "state") and result.state else {}
|
|
579
|
-
if isinstance(state_data, dict):
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
break
|
|
396
|
+
if isinstance(state_data, dict) and "db" in state_data:
|
|
397
|
+
audit_count = state_data["db"].get("audit_log_count", 0)
|
|
398
|
+
mutations = state_data["db"].get("mutations", [])
|
|
399
|
+
break
|
|
584
400
|
|
|
585
401
|
if audit_count == 0:
|
|
586
|
-
|
|
587
|
-
console.print("[green]Mutations after login: 0[/green]")
|
|
588
|
-
console.print("[green]Login is read-only - ready for snapshot.[/green]")
|
|
589
|
-
console.print("\n[green]Ready for next step: plato sandbox verify audit-active[/green]")
|
|
402
|
+
# Success - exit 0
|
|
590
403
|
return
|
|
591
404
|
|
|
592
|
-
#
|
|
593
|
-
|
|
594
|
-
console.print(f"\n[red]Mutations after login: {audit_count}[/red]\n")
|
|
595
|
-
|
|
596
|
-
# Group by table and operation
|
|
597
|
-
table_ops = defaultdict(lambda: {"INSERT": 0, "UPDATE": 0, "DELETE": 0})
|
|
405
|
+
# Build table breakdown
|
|
406
|
+
table_ops: dict[str, dict[str, int]] = defaultdict(lambda: {"INSERT": 0, "UPDATE": 0, "DELETE": 0})
|
|
598
407
|
for mutation in mutations:
|
|
599
408
|
table = mutation.get("table", "unknown")
|
|
600
409
|
op = mutation.get("operation", "UNKNOWN").upper()
|
|
601
410
|
if op in table_ops[table]:
|
|
602
411
|
table_ops[table][op] += 1
|
|
603
412
|
|
|
604
|
-
#
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
table.add_column("INSERT", style="yellow")
|
|
608
|
-
table.add_column("UPDATE", style="yellow")
|
|
609
|
-
table.add_column("DELETE", style="yellow")
|
|
610
|
-
|
|
611
|
-
total_inserts = 0
|
|
612
|
-
total_updates = 0
|
|
613
|
-
insert_tables = []
|
|
614
|
-
update_tables = []
|
|
615
|
-
|
|
616
|
-
for tbl_name, ops in sorted(table_ops.items()):
|
|
617
|
-
table.add_row(
|
|
618
|
-
tbl_name,
|
|
619
|
-
str(ops["INSERT"]) if ops["INSERT"] else "-",
|
|
620
|
-
str(ops["UPDATE"]) if ops["UPDATE"] else "-",
|
|
621
|
-
str(ops["DELETE"]) if ops["DELETE"] else "-",
|
|
622
|
-
)
|
|
623
|
-
total_inserts += ops["INSERT"]
|
|
624
|
-
total_updates += ops["UPDATE"]
|
|
625
|
-
if ops["INSERT"] > 0:
|
|
626
|
-
insert_tables.append(tbl_name)
|
|
627
|
-
if ops["UPDATE"] > 0:
|
|
628
|
-
update_tables.append(tbl_name)
|
|
629
|
-
|
|
630
|
-
console.print(table)
|
|
631
|
-
|
|
632
|
-
# Diagnosis and suggestions
|
|
633
|
-
console.print("\n[yellow]Diagnosis:[/yellow]")
|
|
634
|
-
|
|
635
|
-
if total_inserts > 0:
|
|
636
|
-
console.print(f"\n [yellow]{total_inserts} INSERT operations (new rows created)[/yellow]")
|
|
637
|
-
console.print(" This is likely lazy initialization - settings created on first access.")
|
|
638
|
-
console.print("\n [red]⚠️ Column-level ignores will NOT work for INSERT operations.[/red]")
|
|
639
|
-
console.print(" You must ignore the entire table.")
|
|
640
|
-
|
|
641
|
-
if total_updates > 0:
|
|
642
|
-
console.print(f"\n [yellow]{total_updates} UPDATE operations[/yellow]")
|
|
643
|
-
console.print(" These can often be fixed with column-level ignores (e.g., last_login, updated_at).")
|
|
644
|
-
|
|
645
|
-
# Suggested fix
|
|
646
|
-
console.print("\n[yellow]Suggested fix for plato-config.yml:[/yellow]")
|
|
647
|
-
console.print("```yaml")
|
|
648
|
-
console.print("audit_ignore_tables:")
|
|
649
|
-
|
|
650
|
-
if insert_tables:
|
|
651
|
-
console.print(" # Lazy-init tables (INSERT on first login) - must ignore entire table")
|
|
652
|
-
for tbl in insert_tables:
|
|
653
|
-
console.print(f" - {tbl}")
|
|
654
|
-
|
|
655
|
-
if update_tables:
|
|
656
|
-
console.print(" # Tables with timestamp updates - can use column-level ignore")
|
|
657
|
-
for tbl in update_tables:
|
|
658
|
-
if tbl not in insert_tables:
|
|
659
|
-
console.print(f" - table: {tbl}")
|
|
660
|
-
console.print(" columns: [last_login, updated_at, modified_at]")
|
|
661
|
-
|
|
662
|
-
console.print("```")
|
|
663
|
-
|
|
664
|
-
console.print("\n[yellow]After updating config:[/yellow]")
|
|
665
|
-
console.print(" 1. plato sandbox sync")
|
|
666
|
-
console.print(" 2. plato sandbox flow")
|
|
667
|
-
console.print(" 3. plato sandbox verify mutations")
|
|
668
|
-
console.print("\n[yellow]Note: There is no stop-worker or clear-audit command.[/yellow]")
|
|
669
|
-
console.print("[yellow]If sync doesn't work, restart sandbox (loses UI setup).[/yellow]")
|
|
670
|
-
|
|
671
|
-
raise typer.Exit(1)
|
|
413
|
+
# Format error message
|
|
414
|
+
table_summary = {t: dict(ops) for t, ops in table_ops.items()}
|
|
415
|
+
_fail(f"Found {audit_count} mutations: {table_summary}")
|
|
672
416
|
|
|
673
417
|
except typer.Exit:
|
|
674
418
|
raise
|
|
675
419
|
except Exception as e:
|
|
676
|
-
|
|
677
|
-
raise typer.Exit(1)
|
|
420
|
+
_fail(f"Failed to check mutations: {e}")
|
|
678
421
|
|
|
679
422
|
|
|
680
423
|
@sandbox_verify_app.command(name="audit-active")
|
|
681
424
|
def verify_audit_active():
|
|
682
425
|
"""
|
|
683
|
-
Verify audit system is
|
|
426
|
+
Verify audit system is tracking changes.
|
|
684
427
|
|
|
685
|
-
This
|
|
686
|
-
mutations CAN be recorded (not just that there are none).
|
|
428
|
+
This is a manual verification step. Always exits 0.
|
|
687
429
|
"""
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
console.print("\n1. Make a small change in the app via browser:")
|
|
691
|
-
console.print(" - Update a setting (language, timezone, theme)")
|
|
692
|
-
console.print(" - Do NOT create test data (it pollutes the snapshot)")
|
|
693
|
-
console.print("\n2. Check state:")
|
|
694
|
-
console.print(" plato sandbox state -v")
|
|
695
|
-
console.print("\n3. Verify mutations NOW appear:")
|
|
696
|
-
console.print(" - You SHOULD see 1+ mutations after your change")
|
|
697
|
-
console.print(" - If no mutations appear, audit system is BROKEN")
|
|
698
|
-
console.print("\n[green]If mutations appear after your change:[/green]")
|
|
699
|
-
console.print(" ✅ Audit system is working correctly")
|
|
700
|
-
console.print(" Proceed to: plato sandbox verify snapshot")
|
|
701
|
-
console.print("\n[red]If NO mutations appear after your change:[/red]")
|
|
702
|
-
console.print(" ❌ Audit system is broken - restart worker")
|
|
430
|
+
# This step requires manual verification - just pass
|
|
431
|
+
pass
|
|
703
432
|
|
|
704
433
|
|
|
705
434
|
@sandbox_verify_app.command(name="snapshot")
|
|
706
435
|
def verify_snapshot():
|
|
707
436
|
"""
|
|
708
|
-
Verify snapshot was created
|
|
709
|
-
"""
|
|
710
|
-
console.print("\n[cyan]Verifying snapshot...[/cyan]\n")
|
|
437
|
+
Verify snapshot was created.
|
|
711
438
|
|
|
439
|
+
Exit 0 if artifact_id exists in .sandbox.yaml.
|
|
440
|
+
Exit 1 if missing.
|
|
441
|
+
"""
|
|
712
442
|
if not Path(SANDBOX_FILE).exists():
|
|
713
|
-
|
|
714
|
-
raise typer.Exit(1)
|
|
443
|
+
_fail(f"File not found: {SANDBOX_FILE}")
|
|
715
444
|
|
|
716
445
|
with open(SANDBOX_FILE) as f:
|
|
717
446
|
state = yaml.safe_load(f)
|
|
@@ -719,10 +448,7 @@ def verify_snapshot():
|
|
|
719
448
|
artifact_id = state.get("artifact_id")
|
|
720
449
|
|
|
721
450
|
if not artifact_id:
|
|
722
|
-
|
|
723
|
-
console.print("\n[yellow]Create a snapshot first:[/yellow]")
|
|
724
|
-
console.print(" plato sandbox snapshot")
|
|
725
|
-
raise typer.Exit(1)
|
|
451
|
+
_fail("No artifact_id - run 'plato sandbox snapshot' first")
|
|
726
452
|
|
|
727
453
|
# Validate UUID format
|
|
728
454
|
import re
|
|
@@ -730,12 +456,9 @@ def verify_snapshot():
|
|
|
730
456
|
uuid_pattern = re.compile(r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$", re.IGNORECASE)
|
|
731
457
|
|
|
732
458
|
if not uuid_pattern.match(artifact_id):
|
|
733
|
-
|
|
734
|
-
raise typer.Exit(1)
|
|
459
|
+
_fail(f"Invalid artifact_id format: {artifact_id}")
|
|
735
460
|
|
|
736
|
-
|
|
737
|
-
console.print(f"[green]Artifact ID: {artifact_id}[/green]")
|
|
738
|
-
console.print("\n[green]Ready for next step: plato pm verify review[/green]")
|
|
461
|
+
# Success - exit 0
|
|
739
462
|
|
|
740
463
|
|
|
741
464
|
# =============================================================================
|
|
@@ -748,80 +471,37 @@ pm_verify_app = typer.Typer(help="Verify review and submit steps")
|
|
|
748
471
|
@pm_verify_app.command(name="review")
|
|
749
472
|
def verify_review():
|
|
750
473
|
"""
|
|
751
|
-
Verify review
|
|
474
|
+
Verify review prerequisites.
|
|
752
475
|
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
- .sandbox.yaml has artifact_id
|
|
756
|
-
- plato-config.yml exists
|
|
476
|
+
Exit 0 if ready for review.
|
|
477
|
+
Exit 1 if missing prerequisites.
|
|
757
478
|
"""
|
|
758
|
-
console.print("\n[cyan]Verifying review prerequisites...[/cyan]\n")
|
|
759
|
-
|
|
760
479
|
issues = []
|
|
761
480
|
|
|
762
481
|
# Check API key
|
|
763
|
-
|
|
764
|
-
if api_key:
|
|
765
|
-
console.print("[green]✅ PLATO_API_KEY: set[/green]")
|
|
766
|
-
else:
|
|
767
|
-
console.print("[red]❌ PLATO_API_KEY: not set[/red]")
|
|
482
|
+
if not os.environ.get("PLATO_API_KEY"):
|
|
768
483
|
issues.append("PLATO_API_KEY not set")
|
|
769
484
|
|
|
770
485
|
# Check .sandbox.yaml
|
|
771
|
-
if Path(SANDBOX_FILE).exists():
|
|
772
|
-
|
|
773
|
-
|
|
486
|
+
if not Path(SANDBOX_FILE).exists():
|
|
487
|
+
issues.append(f"{SANDBOX_FILE} not found")
|
|
488
|
+
else:
|
|
774
489
|
with open(SANDBOX_FILE) as f:
|
|
775
490
|
state = yaml.safe_load(f)
|
|
776
491
|
|
|
777
|
-
if state.get("artifact_id"):
|
|
778
|
-
|
|
779
|
-
|
|
780
|
-
console.print("[red]❌ artifact_id: missing[/red]")
|
|
781
|
-
issues.append("No artifact_id - run plato sandbox snapshot first")
|
|
782
|
-
|
|
783
|
-
if state.get("service"):
|
|
784
|
-
console.print(f"[green]✅ service: {state['service']}[/green]")
|
|
785
|
-
else:
|
|
786
|
-
console.print("[red]❌ service: missing[/red]")
|
|
492
|
+
if not state.get("artifact_id"):
|
|
493
|
+
issues.append("No artifact_id - run 'plato sandbox snapshot' first")
|
|
494
|
+
if not state.get("service"):
|
|
787
495
|
issues.append("No service name in .sandbox.yaml")
|
|
788
|
-
else:
|
|
789
|
-
console.print(f"[red]❌ {SANDBOX_FILE}: not found[/red]")
|
|
790
|
-
issues.append("No .sandbox.yaml - start a sandbox first")
|
|
791
496
|
|
|
792
497
|
# Check plato-config.yml
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
for path in config_paths:
|
|
796
|
-
if Path(path).exists():
|
|
797
|
-
config_found = path
|
|
798
|
-
break
|
|
799
|
-
|
|
800
|
-
if config_found:
|
|
801
|
-
console.print(f"[green]✅ plato-config.yml: {config_found}[/green]")
|
|
802
|
-
else:
|
|
803
|
-
console.print("[red]❌ plato-config.yml: not found[/red]")
|
|
804
|
-
issues.append("No plato-config.yml")
|
|
498
|
+
if not Path("plato-config.yml").exists() and not Path("plato-config.yaml").exists():
|
|
499
|
+
issues.append("plato-config.yml not found")
|
|
805
500
|
|
|
806
|
-
# Report
|
|
807
501
|
if issues:
|
|
808
|
-
|
|
809
|
-
console.print("[red]Issues:[/red]")
|
|
810
|
-
for issue in issues:
|
|
811
|
-
console.print(f" - {issue}")
|
|
812
|
-
|
|
813
|
-
if "PLATO_API_KEY" in str(issues):
|
|
814
|
-
console.print("\n[yellow]Fix:[/yellow]")
|
|
815
|
-
console.print(' export PLATO_API_KEY="pk_user_IgNNSJp5v_J0EMJtnxHGw6y68lfdYXiWdXNq1v_JaQQ"')
|
|
502
|
+
_fail(f"Review prerequisites not met: {issues}")
|
|
816
503
|
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
console.print("\n[green]✅ Review verification passed[/green]")
|
|
820
|
-
console.print("[green]Ready to run review:[/green]")
|
|
821
|
-
|
|
822
|
-
service = state.get("service", "SERVICE")
|
|
823
|
-
artifact = state.get("artifact_id", "ARTIFACT_ID")
|
|
824
|
-
console.print(f" plato pm review base -s {service} -a {artifact} --skip-review")
|
|
504
|
+
# Success - exit 0
|
|
825
505
|
|
|
826
506
|
|
|
827
507
|
@pm_verify_app.command(name="submit")
|
|
@@ -829,441 +509,177 @@ def verify_submit():
|
|
|
829
509
|
"""
|
|
830
510
|
Verify submit prerequisites.
|
|
831
511
|
|
|
832
|
-
|
|
833
|
-
|
|
834
|
-
- .sandbox.yaml is complete (artifact_id, service, plato_config_path)
|
|
512
|
+
Exit 0 if ready to submit.
|
|
513
|
+
Exit 1 if missing prerequisites.
|
|
835
514
|
"""
|
|
836
|
-
console.print("\n[cyan]Verifying submit prerequisites...[/cyan]\n")
|
|
837
|
-
|
|
838
515
|
issues = []
|
|
839
516
|
|
|
840
|
-
|
|
841
|
-
api_key = os.environ.get("PLATO_API_KEY")
|
|
842
|
-
if api_key:
|
|
843
|
-
console.print("[green]✅ PLATO_API_KEY: set[/green]")
|
|
844
|
-
else:
|
|
845
|
-
console.print("[red]❌ PLATO_API_KEY: not set[/red]")
|
|
517
|
+
if not os.environ.get("PLATO_API_KEY"):
|
|
846
518
|
issues.append("PLATO_API_KEY not set")
|
|
847
519
|
|
|
848
|
-
# Check .sandbox.yaml
|
|
849
520
|
if not Path(SANDBOX_FILE).exists():
|
|
850
|
-
|
|
851
|
-
issues.append("No .sandbox.yaml")
|
|
521
|
+
issues.append(f"{SANDBOX_FILE} not found")
|
|
852
522
|
else:
|
|
853
|
-
console.print(f"[green]✅ {SANDBOX_FILE}: exists[/green]")
|
|
854
|
-
|
|
855
523
|
with open(SANDBOX_FILE) as f:
|
|
856
524
|
state = yaml.safe_load(f)
|
|
857
525
|
|
|
858
526
|
required = ["artifact_id", "service", "plato_config_path"]
|
|
859
527
|
for field in required:
|
|
860
|
-
if state.get(field):
|
|
861
|
-
console.print(f"[green]✅ {field}: present[/green]")
|
|
862
|
-
else:
|
|
863
|
-
console.print(f"[red]❌ {field}: missing[/red]")
|
|
528
|
+
if not state.get(field):
|
|
864
529
|
issues.append(f"Missing {field} in .sandbox.yaml")
|
|
865
530
|
|
|
866
|
-
# Report
|
|
867
531
|
if issues:
|
|
868
|
-
|
|
869
|
-
console.print("[red]Issues:[/red]")
|
|
870
|
-
for issue in issues:
|
|
871
|
-
console.print(f" - {issue}")
|
|
872
|
-
|
|
873
|
-
if "PLATO_API_KEY" in str(issues):
|
|
874
|
-
console.print("\n[yellow]Fix API key:[/yellow]")
|
|
875
|
-
console.print(' export PLATO_API_KEY="pk_user_IgNNSJp5v_J0EMJtnxHGw6y68lfdYXiWdXNq1v_JaQQ"')
|
|
532
|
+
_fail(f"Submit prerequisites not met: {issues}")
|
|
876
533
|
|
|
877
|
-
|
|
878
|
-
|
|
879
|
-
console.print("\n[green]✅ Submit verification passed[/green]")
|
|
880
|
-
console.print("[green]Ready to submit:[/green]")
|
|
881
|
-
console.print(" plato pm submit base")
|
|
534
|
+
# Success - exit 0
|
|
882
535
|
|
|
883
536
|
|
|
884
537
|
# =============================================================================
|
|
885
|
-
#
|
|
538
|
+
# RESEARCH/VALIDATION/CONFIG VERIFY COMMANDS
|
|
886
539
|
# =============================================================================
|
|
887
540
|
|
|
888
541
|
|
|
889
542
|
@sandbox_verify_app.command(name="research")
|
|
890
543
|
def verify_research(
|
|
891
|
-
report_path: str = typer.Option("research-report.yml", "--report", "-r"
|
|
544
|
+
report_path: str = typer.Option("research-report.yml", "--report", "-r"),
|
|
892
545
|
):
|
|
893
546
|
"""
|
|
894
|
-
Verify
|
|
895
|
-
|
|
896
|
-
Checks that the research report has all required fields:
|
|
897
|
-
- db_type (postgresql, mysql, mariadb)
|
|
898
|
-
- docker_image
|
|
899
|
-
- docker_tag
|
|
900
|
-
- credentials (username, password)
|
|
901
|
-
- env_vars (required environment variables)
|
|
902
|
-
"""
|
|
903
|
-
console.print("\n[cyan]Verifying research report...[/cyan]\n")
|
|
547
|
+
Verify research report is complete.
|
|
904
548
|
|
|
905
|
-
|
|
549
|
+
Exit 0 if all required fields present.
|
|
550
|
+
Exit 1 with stderr listing missing fields.
|
|
551
|
+
"""
|
|
906
552
|
if not Path(report_path).exists():
|
|
907
|
-
|
|
908
|
-
console.print("\n[yellow]Run sim-research skill first to create the report.[/yellow]")
|
|
909
|
-
raise typer.Exit(1)
|
|
553
|
+
_fail(f"Research report not found: {report_path}")
|
|
910
554
|
|
|
911
|
-
# Load and parse report
|
|
912
555
|
try:
|
|
913
556
|
with open(report_path) as f:
|
|
914
557
|
report = yaml.safe_load(f)
|
|
915
558
|
except yaml.YAMLError as e:
|
|
916
|
-
|
|
917
|
-
raise typer.Exit(1)
|
|
559
|
+
_fail(f"Invalid YAML in {report_path}: {e}")
|
|
918
560
|
|
|
919
561
|
if not report:
|
|
920
|
-
|
|
921
|
-
|
|
922
|
-
|
|
923
|
-
|
|
924
|
-
required_fields = {
|
|
925
|
-
"db_type": "Database type (postgresql, mysql, mariadb)",
|
|
926
|
-
"docker_image": "Docker image name",
|
|
927
|
-
"docker_tag": "Docker image tag",
|
|
928
|
-
"credentials": "Login credentials",
|
|
929
|
-
"github_url": "GitHub repository URL",
|
|
930
|
-
}
|
|
931
|
-
|
|
932
|
-
# Optional but recommended
|
|
933
|
-
recommended_fields = {
|
|
934
|
-
"env_vars": "Environment variables",
|
|
935
|
-
"license": "Software license",
|
|
936
|
-
"description": "App description",
|
|
937
|
-
"favicon_url": "Favicon URL",
|
|
938
|
-
}
|
|
939
|
-
|
|
940
|
-
# Check required fields
|
|
941
|
-
missing_required = []
|
|
942
|
-
for field, desc in required_fields.items():
|
|
943
|
-
if field not in report or not report[field]:
|
|
944
|
-
missing_required.append((field, desc))
|
|
562
|
+
_fail(f"Research report is empty: {report_path}")
|
|
563
|
+
|
|
564
|
+
required_fields = ["db_type", "docker_image", "docker_tag", "credentials", "github_url"]
|
|
565
|
+
missing = [f for f in required_fields if f not in report or not report[f]]
|
|
945
566
|
|
|
946
567
|
# Check credentials sub-fields
|
|
947
568
|
if "credentials" in report and report["credentials"]:
|
|
948
569
|
creds = report["credentials"]
|
|
949
570
|
if not creds.get("username"):
|
|
950
|
-
|
|
571
|
+
missing.append("credentials.username")
|
|
951
572
|
if not creds.get("password"):
|
|
952
|
-
|
|
573
|
+
missing.append("credentials.password")
|
|
953
574
|
|
|
954
575
|
# Check db_type is valid
|
|
955
576
|
valid_db_types = ["postgresql", "mysql", "mariadb"]
|
|
956
577
|
if report.get("db_type") and report["db_type"].lower() not in valid_db_types:
|
|
957
|
-
|
|
958
|
-
console.print(f" Valid options: {', '.join(valid_db_types)}")
|
|
959
|
-
raise typer.Exit(1)
|
|
578
|
+
_fail(f"Invalid db_type: {report['db_type']}. Valid: {valid_db_types}")
|
|
960
579
|
|
|
961
|
-
|
|
962
|
-
|
|
963
|
-
for field, desc in recommended_fields.items():
|
|
964
|
-
if field not in report or not report[field]:
|
|
965
|
-
missing_recommended.append((field, desc))
|
|
966
|
-
|
|
967
|
-
# Report results
|
|
968
|
-
if missing_required:
|
|
969
|
-
console.print("[red]❌ Research verification failed[/red]\n")
|
|
970
|
-
console.print("[red]Missing required fields:[/red]")
|
|
971
|
-
for field, desc in missing_required:
|
|
972
|
-
console.print(f" - {field}: {desc}")
|
|
973
|
-
|
|
974
|
-
console.print("\n[yellow]Suggestions:[/yellow]")
|
|
975
|
-
if any("docker" in f[0] for f in missing_required):
|
|
976
|
-
console.print(" - Check GitHub packages: ghcr.io/{owner}/{repo}")
|
|
977
|
-
console.print(" - Check Docker Hub: hub.docker.com/r/{owner}/{repo}")
|
|
978
|
-
if any("credentials" in f[0] for f in missing_required):
|
|
979
|
-
console.print(" - Look for INSTALL.md or docker-compose.yml in repo")
|
|
980
|
-
console.print(" - Check documentation for default credentials")
|
|
981
|
-
|
|
982
|
-
raise typer.Exit(1)
|
|
983
|
-
|
|
984
|
-
# Success
|
|
985
|
-
console.print("[green]✅ Research verification passed[/green]\n")
|
|
986
|
-
|
|
987
|
-
# Show summary
|
|
988
|
-
table = Table(title="Research Report Summary")
|
|
989
|
-
table.add_column("Field", style="cyan")
|
|
990
|
-
table.add_column("Value", style="white")
|
|
991
|
-
|
|
992
|
-
table.add_row("Database", report.get("db_type", ""))
|
|
993
|
-
table.add_row("Docker Image", f"{report.get('docker_image', '')}:{report.get('docker_tag', '')}")
|
|
994
|
-
table.add_row("Username", report.get("credentials", {}).get("username", ""))
|
|
995
|
-
table.add_row("GitHub URL", report.get("github_url", ""))
|
|
996
|
-
|
|
997
|
-
console.print(table)
|
|
998
|
-
|
|
999
|
-
if missing_recommended:
|
|
1000
|
-
console.print("\n[yellow]⚠️ Missing recommended fields (not blocking):[/yellow]")
|
|
1001
|
-
for field, desc in missing_recommended:
|
|
1002
|
-
console.print(f" - {field}: {desc}")
|
|
580
|
+
if missing:
|
|
581
|
+
_fail(f"Missing fields in research report: {missing}")
|
|
1003
582
|
|
|
1004
|
-
|
|
583
|
+
# Success - exit 0
|
|
1005
584
|
|
|
1006
585
|
|
|
1007
586
|
@sandbox_verify_app.command(name="validation")
|
|
1008
587
|
def verify_validation(
|
|
1009
|
-
report_path: str = typer.Option("research-report.yml", "--report", "-r"
|
|
588
|
+
report_path: str = typer.Option("research-report.yml", "--report", "-r"),
|
|
1010
589
|
):
|
|
1011
590
|
"""
|
|
1012
|
-
Verify
|
|
591
|
+
Verify app can become a simulator.
|
|
1013
592
|
|
|
1014
|
-
|
|
1015
|
-
|
|
1016
|
-
- Database type is supported (PostgreSQL, MySQL, MariaDB)
|
|
1017
|
-
- No blockers (SQLite, commercial-only, etc.)
|
|
593
|
+
Exit 0 if database type supported and no blockers.
|
|
594
|
+
Exit 1 with stderr describing blocker.
|
|
1018
595
|
"""
|
|
1019
|
-
console.print("\n[cyan]Verifying app can be simulated...[/cyan]\n")
|
|
1020
|
-
|
|
1021
|
-
# Load report
|
|
1022
596
|
if not Path(report_path).exists():
|
|
1023
|
-
|
|
1024
|
-
console.print("[yellow]Run: plato sandbox verify research[/yellow]")
|
|
1025
|
-
raise typer.Exit(1)
|
|
597
|
+
_fail(f"Research report not found: {report_path}")
|
|
1026
598
|
|
|
1027
599
|
with open(report_path) as f:
|
|
1028
600
|
report = yaml.safe_load(f)
|
|
1029
601
|
|
|
1030
|
-
issues = []
|
|
1031
|
-
|
|
1032
602
|
# Check database type
|
|
1033
603
|
db_type = report.get("db_type", "").lower()
|
|
1034
604
|
supported_dbs = ["postgresql", "mysql", "mariadb"]
|
|
1035
605
|
|
|
1036
606
|
if db_type == "sqlite":
|
|
1037
|
-
|
|
1038
|
-
{
|
|
1039
|
-
"check": "Database",
|
|
1040
|
-
"status": "BLOCKER",
|
|
1041
|
-
"message": "SQLite is not supported. Plato requires PostgreSQL, MySQL, or MariaDB for state tracking.",
|
|
1042
|
-
}
|
|
1043
|
-
)
|
|
1044
|
-
elif db_type not in supported_dbs:
|
|
1045
|
-
issues.append(
|
|
1046
|
-
{
|
|
1047
|
-
"check": "Database",
|
|
1048
|
-
"status": "BLOCKER",
|
|
1049
|
-
"message": f"Unknown database type: {db_type}. Supported: {', '.join(supported_dbs)}",
|
|
1050
|
-
}
|
|
1051
|
-
)
|
|
1052
|
-
else:
|
|
1053
|
-
console.print(f"[green]✅ Database: {db_type} (supported)[/green]")
|
|
1054
|
-
|
|
1055
|
-
# Check Docker image exists (optional - requires docker)
|
|
1056
|
-
docker_image = report.get("docker_image", "")
|
|
1057
|
-
docker_tag = report.get("docker_tag", "latest")
|
|
607
|
+
_fail("SQLite not supported. Plato requires PostgreSQL, MySQL, or MariaDB")
|
|
1058
608
|
|
|
1059
|
-
if
|
|
1060
|
-
|
|
1061
|
-
console.print(f"[cyan]Checking Docker image: {full_image}[/cyan]")
|
|
609
|
+
if db_type not in supported_dbs:
|
|
610
|
+
_fail(f"Unknown database type: {db_type}. Supported: {supported_dbs}")
|
|
1062
611
|
|
|
1063
|
-
|
|
1064
|
-
try:
|
|
1065
|
-
result = subprocess.run(["docker", "manifest", "inspect", full_image], capture_output=True, timeout=30)
|
|
1066
|
-
if result.returncode == 0:
|
|
1067
|
-
console.print(f"[green]✅ Docker image exists: {full_image}[/green]")
|
|
1068
|
-
else:
|
|
1069
|
-
issues.append(
|
|
1070
|
-
{
|
|
1071
|
-
"check": "Docker Image",
|
|
1072
|
-
"status": "ERROR",
|
|
1073
|
-
"message": f"Image not found or not accessible: {full_image}",
|
|
1074
|
-
}
|
|
1075
|
-
)
|
|
1076
|
-
except FileNotFoundError:
|
|
1077
|
-
console.print("[yellow]⚠️ Docker not installed, skipping image check[/yellow]")
|
|
1078
|
-
except subprocess.TimeoutExpired:
|
|
1079
|
-
console.print("[yellow]⚠️ Docker check timed out, skipping[/yellow]")
|
|
1080
|
-
|
|
1081
|
-
# Check for known blockers
|
|
612
|
+
# Check for blockers
|
|
1082
613
|
blockers = report.get("blockers", [])
|
|
1083
|
-
|
|
1084
|
-
|
|
1085
|
-
{
|
|
1086
|
-
"check": "Blocker",
|
|
1087
|
-
"status": "BLOCKER",
|
|
1088
|
-
"message": blocker,
|
|
1089
|
-
}
|
|
1090
|
-
)
|
|
1091
|
-
|
|
1092
|
-
# Report results
|
|
1093
|
-
blockers_found = [i for i in issues if i["status"] == "BLOCKER"]
|
|
1094
|
-
errors_found = [i for i in issues if i["status"] == "ERROR"]
|
|
1095
|
-
|
|
1096
|
-
if blockers_found:
|
|
1097
|
-
console.print("\n[red]❌ Validation failed - BLOCKERS found[/red]\n")
|
|
1098
|
-
for issue in blockers_found:
|
|
1099
|
-
console.print(f"[red] {issue['check']}: {issue['message']}[/red]")
|
|
1100
|
-
console.print("\n[yellow]This application cannot become a Plato simulator.[/yellow]")
|
|
1101
|
-
raise typer.Exit(1)
|
|
614
|
+
if blockers:
|
|
615
|
+
_fail(f"Blockers found: {blockers}")
|
|
1102
616
|
|
|
1103
|
-
|
|
1104
|
-
console.print("\n[red]❌ Validation failed[/red]\n")
|
|
1105
|
-
for issue in errors_found:
|
|
1106
|
-
console.print(f"[red] {issue['check']}: {issue['message']}[/red]")
|
|
1107
|
-
console.print("\n[yellow]Fix the issues above and re-run validation.[/yellow]")
|
|
1108
|
-
raise typer.Exit(1)
|
|
1109
|
-
|
|
1110
|
-
console.print("\n[green]✅ Validation passed[/green]")
|
|
1111
|
-
console.print("[green]App can become a Plato simulator.[/green]")
|
|
1112
|
-
console.print("\n[green]Ready for next step: plato sandbox verify config[/green]")
|
|
617
|
+
# Success - exit 0
|
|
1113
618
|
|
|
1114
619
|
|
|
1115
620
|
@sandbox_verify_app.command(name="config")
|
|
1116
621
|
def verify_config(
|
|
1117
|
-
config_path: str = typer.Option("plato-config.yml", "--config", "-c"
|
|
1118
|
-
compose_path: str = typer.Option("base/docker-compose.yml", "--compose"
|
|
622
|
+
config_path: str = typer.Option("plato-config.yml", "--config", "-c"),
|
|
623
|
+
compose_path: str = typer.Option("base/docker-compose.yml", "--compose"),
|
|
1119
624
|
):
|
|
1120
625
|
"""
|
|
1121
|
-
Verify
|
|
1122
|
-
|
|
1123
|
-
Checks plato-config.yml:
|
|
1124
|
-
- Valid YAML syntax
|
|
1125
|
-
- Required fields present (service, datasets, metadata, listeners)
|
|
1126
|
-
- Correct Plato database images used
|
|
1127
|
-
|
|
1128
|
-
Checks docker-compose.yml:
|
|
1129
|
-
- Valid YAML syntax
|
|
1130
|
-
- network_mode: host on all services
|
|
1131
|
-
- db_signals volume mounted
|
|
1132
|
-
- Signal-based healthchecks for database
|
|
1133
|
-
"""
|
|
1134
|
-
console.print("\n[cyan]Verifying configuration files...[/cyan]\n")
|
|
626
|
+
Verify configuration files are valid.
|
|
1135
627
|
|
|
628
|
+
Exit 0 if plato-config.yml and docker-compose.yml are valid.
|
|
629
|
+
Exit 1 with stderr describing issues.
|
|
630
|
+
"""
|
|
1136
631
|
issues = []
|
|
1137
632
|
|
|
1138
|
-
# ==========================================================================
|
|
1139
633
|
# Check plato-config.yml
|
|
1140
|
-
# ==========================================================================
|
|
1141
|
-
|
|
1142
634
|
if not Path(config_path).exists():
|
|
1143
|
-
|
|
1144
|
-
console.print("[yellow]Run sim-config skill to create it.[/yellow]")
|
|
1145
|
-
raise typer.Exit(1)
|
|
635
|
+
_fail(f"File not found: {config_path}")
|
|
1146
636
|
|
|
1147
637
|
try:
|
|
1148
638
|
with open(config_path) as f:
|
|
1149
639
|
config = yaml.safe_load(f)
|
|
1150
640
|
except yaml.YAMLError as e:
|
|
1151
|
-
|
|
1152
|
-
raise typer.Exit(1)
|
|
641
|
+
_fail(f"Invalid YAML in {config_path}: {e}")
|
|
1153
642
|
|
|
1154
|
-
console.print("[green]✅ plato-config.yml: Valid YAML[/green]")
|
|
1155
|
-
|
|
1156
|
-
# Check required fields
|
|
1157
643
|
required_config_fields = ["service", "datasets"]
|
|
1158
644
|
for field in required_config_fields:
|
|
1159
645
|
if field not in config:
|
|
1160
|
-
issues.append(f"
|
|
646
|
+
issues.append(f"{config_path}: Missing '{field}'")
|
|
1161
647
|
|
|
1162
648
|
# Check datasets.base structure
|
|
1163
649
|
if "datasets" in config and "base" in config.get("datasets", {}):
|
|
1164
650
|
base = config["datasets"]["base"]
|
|
1165
651
|
|
|
1166
|
-
# Check metadata
|
|
1167
652
|
if "metadata" not in base:
|
|
1168
|
-
issues.append("
|
|
1169
|
-
else:
|
|
1170
|
-
metadata = base["metadata"]
|
|
1171
|
-
required_metadata = ["name", "description", "flows_path"]
|
|
1172
|
-
for field in required_metadata:
|
|
1173
|
-
if field not in metadata:
|
|
1174
|
-
issues.append(f"plato-config.yml: Missing metadata.{field}")
|
|
653
|
+
issues.append(f"{config_path}: Missing datasets.base.metadata")
|
|
1175
654
|
|
|
1176
|
-
# Check listeners
|
|
1177
655
|
if "listeners" not in base:
|
|
1178
|
-
issues.append("
|
|
1179
|
-
|
|
1180
|
-
|
|
1181
|
-
if "db" not in listeners:
|
|
1182
|
-
issues.append("plato-config.yml: Missing listeners.db")
|
|
1183
|
-
else:
|
|
1184
|
-
db = listeners["db"]
|
|
1185
|
-
required_db = ["db_type", "db_host", "db_port", "db_user", "db_password", "db_database"]
|
|
1186
|
-
for field in required_db:
|
|
1187
|
-
if field not in db:
|
|
1188
|
-
issues.append(f"plato-config.yml: Missing listeners.db.{field}")
|
|
1189
|
-
|
|
1190
|
-
# Check db_host is 127.0.0.1
|
|
1191
|
-
if db.get("db_host") and db["db_host"] != "127.0.0.1":
|
|
1192
|
-
issues.append(f"plato-config.yml: db_host should be '127.0.0.1', not '{db['db_host']}'")
|
|
1193
|
-
|
|
1194
|
-
# ==========================================================================
|
|
1195
|
-
# Check docker-compose.yml
|
|
1196
|
-
# ==========================================================================
|
|
656
|
+
issues.append(f"{config_path}: Missing datasets.base.listeners")
|
|
657
|
+
elif "db" not in base.get("listeners", {}):
|
|
658
|
+
issues.append(f"{config_path}: Missing listeners.db")
|
|
1197
659
|
|
|
660
|
+
# Check docker-compose.yml
|
|
1198
661
|
if not Path(compose_path).exists():
|
|
1199
|
-
|
|
1200
|
-
console.print("[yellow]Run sim-config skill to create it.[/yellow]")
|
|
1201
|
-
raise typer.Exit(1)
|
|
662
|
+
_fail(f"File not found: {compose_path}")
|
|
1202
663
|
|
|
1203
664
|
try:
|
|
1204
665
|
with open(compose_path) as f:
|
|
1205
666
|
compose = yaml.safe_load(f)
|
|
1206
667
|
except yaml.YAMLError as e:
|
|
1207
|
-
|
|
1208
|
-
raise typer.Exit(1)
|
|
1209
|
-
|
|
1210
|
-
console.print("[green]✅ docker-compose.yml: Valid YAML[/green]")
|
|
668
|
+
_fail(f"Invalid YAML in {compose_path}: {e}")
|
|
1211
669
|
|
|
1212
670
|
services = compose.get("services", {})
|
|
1213
|
-
|
|
1214
|
-
# Standard images that should NOT be used (should use Plato DB images instead)
|
|
1215
|
-
standard_db_images = ["postgres:", "mysql:", "mariadb:", "mongo:"]
|
|
671
|
+
standard_db_images = ["postgres:", "mysql:", "mariadb:"]
|
|
1216
672
|
|
|
1217
673
|
for svc_name, svc_config in services.items():
|
|
1218
|
-
# Check network_mode
|
|
1219
674
|
if svc_config.get("network_mode") != "host":
|
|
1220
|
-
issues.append(f"
|
|
675
|
+
issues.append(f"{compose_path}: '{svc_name}' missing 'network_mode: host'")
|
|
1221
676
|
|
|
1222
|
-
# Check for standard database images
|
|
1223
677
|
image = svc_config.get("image", "")
|
|
1224
678
|
for std_img in standard_db_images:
|
|
1225
679
|
if image.startswith(std_img):
|
|
1226
|
-
|
|
1227
|
-
db_type = std_img.rstrip(":")
|
|
1228
|
-
version = image.split(":")[1] if ":" in image else "latest"
|
|
1229
|
-
issues.append(
|
|
1230
|
-
f"docker-compose.yml: Service '{svc_name}' uses standard image '{image}'\n"
|
|
1231
|
-
f" Fix: Use 'public.ecr.aws/i3q4i1d7/app-sim/{db_type}-{version}:prod-latest'"
|
|
1232
|
-
)
|
|
1233
|
-
|
|
1234
|
-
# Check db_signals volume for database containers
|
|
1235
|
-
if any(img in image for img in ["postgres", "mysql", "mariadb"]):
|
|
1236
|
-
volumes = svc_config.get("volumes", [])
|
|
1237
|
-
has_db_signals = any("/home/plato/db_signals:" in str(v) for v in volumes)
|
|
1238
|
-
if not has_db_signals:
|
|
1239
|
-
signal_path = "/tmp/postgres-signals" if "postgres" in image else "/tmp/mysql-signals"
|
|
1240
|
-
issues.append(
|
|
1241
|
-
f"docker-compose.yml: Service '{svc_name}' missing db_signals volume\n"
|
|
1242
|
-
f" Fix: Add '/home/plato/db_signals:{signal_path}' to volumes"
|
|
1243
|
-
)
|
|
1244
|
-
|
|
1245
|
-
# Check healthcheck uses signal-based
|
|
1246
|
-
healthcheck = svc_config.get("healthcheck", {})
|
|
1247
|
-
test = healthcheck.get("test", [])
|
|
1248
|
-
test_str = " ".join(test) if isinstance(test, list) else str(test)
|
|
1249
|
-
if "pg_isready" in test_str or "mysqladmin ping" in test_str:
|
|
1250
|
-
signal_file = "postgres.healthy" if "postgres" in image else "mysql.healthy"
|
|
1251
|
-
issues.append(
|
|
1252
|
-
f"docker-compose.yml: Service '{svc_name}' uses standard healthcheck\n"
|
|
1253
|
-
f" Fix: Use 'test -f /tmp/*-signals/{signal_file}'"
|
|
1254
|
-
)
|
|
1255
|
-
|
|
1256
|
-
# ==========================================================================
|
|
1257
|
-
# Report results
|
|
1258
|
-
# ==========================================================================
|
|
680
|
+
issues.append(f"{compose_path}: '{svc_name}' uses standard DB image '{image}' - use Plato DB image")
|
|
1259
681
|
|
|
1260
682
|
if issues:
|
|
1261
|
-
|
|
1262
|
-
|
|
1263
|
-
|
|
1264
|
-
console.print(f" - {issue}")
|
|
1265
|
-
raise typer.Exit(1)
|
|
1266
|
-
|
|
1267
|
-
console.print("\n[green]✅ Config verification passed[/green]")
|
|
1268
|
-
console.print("[green]All configuration files are valid.[/green]")
|
|
1269
|
-
console.print("\n[green]Ready for next step: plato sandbox start[/green]")
|
|
683
|
+
_fail(f"Config issues: {issues}")
|
|
684
|
+
|
|
685
|
+
# Success - exit 0
|