plato-sdk-v2 2.0.50__py3-none-any.whl → 2.2.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- plato/__init__.py +7 -6
- plato/_generated/__init__.py +1 -1
- plato/_generated/api/v1/env/evaluate_session.py +3 -3
- plato/_generated/api/v1/env/log_state_mutation.py +4 -4
- plato/_generated/api/v1/sandbox/checkpoint_vm.py +3 -3
- plato/_generated/api/v1/sandbox/save_vm_snapshot.py +3 -3
- plato/_generated/api/v1/sandbox/setup_sandbox.py +8 -8
- plato/_generated/api/v1/session/__init__.py +2 -0
- plato/_generated/api/v1/session/get_sessions_for_archival.py +100 -0
- plato/_generated/api/v1/testcases/__init__.py +6 -2
- plato/_generated/api/v1/testcases/get_mutation_groups_for_testcase.py +98 -0
- plato/_generated/api/v1/testcases/{get_next_output_testcase_for_scoring.py → get_next_testcase_for_scoring.py} +23 -10
- plato/_generated/api/v1/testcases/get_testcase_metadata_for_scoring.py +74 -0
- plato/_generated/api/v2/__init__.py +2 -1
- plato/_generated/api/v2/jobs/__init__.py +4 -0
- plato/_generated/api/v2/jobs/checkpoint.py +3 -3
- plato/_generated/api/v2/jobs/disk_snapshot.py +3 -3
- plato/_generated/api/v2/jobs/log_for_job.py +4 -39
- plato/_generated/api/v2/jobs/make.py +4 -4
- plato/_generated/api/v2/jobs/setup_sandbox.py +97 -0
- plato/_generated/api/v2/jobs/snapshot.py +3 -3
- plato/_generated/api/v2/jobs/snapshot_store.py +91 -0
- plato/_generated/api/v2/sessions/__init__.py +4 -0
- plato/_generated/api/v2/sessions/checkpoint.py +3 -3
- plato/_generated/api/v2/sessions/disk_snapshot.py +3 -3
- plato/_generated/api/v2/sessions/evaluate.py +3 -3
- plato/_generated/api/v2/sessions/log_job_mutation.py +4 -39
- plato/_generated/api/v2/sessions/make.py +4 -4
- plato/_generated/api/v2/sessions/setup_sandbox.py +98 -0
- plato/_generated/api/v2/sessions/snapshot.py +3 -3
- plato/_generated/api/v2/sessions/snapshot_store.py +94 -0
- plato/_generated/api/v2/user/__init__.py +7 -0
- plato/_generated/api/v2/user/get_current_user.py +76 -0
- plato/_generated/models/__init__.py +174 -23
- plato/_sims_generator/__init__.py +19 -4
- plato/_sims_generator/instruction.py +203 -0
- plato/_sims_generator/templates/instruction/helpers.py.jinja +161 -0
- plato/_sims_generator/templates/instruction/init.py.jinja +43 -0
- plato/agents/__init__.py +107 -517
- plato/agents/base.py +145 -0
- plato/agents/build.py +61 -0
- plato/agents/config.py +160 -0
- plato/agents/logging.py +401 -0
- plato/agents/runner.py +161 -0
- plato/agents/trajectory.py +266 -0
- plato/chronos/__init__.py +37 -0
- plato/chronos/api/__init__.py +3 -0
- plato/chronos/api/agents/__init__.py +13 -0
- plato/chronos/api/agents/create_agent.py +63 -0
- plato/chronos/api/agents/delete_agent.py +61 -0
- plato/chronos/api/agents/get_agent.py +62 -0
- plato/chronos/api/agents/get_agent_schema.py +72 -0
- plato/chronos/api/agents/get_agent_versions.py +62 -0
- plato/chronos/api/agents/list_agents.py +57 -0
- plato/chronos/api/agents/lookup_agent.py +74 -0
- plato/chronos/api/auth/__init__.py +9 -0
- plato/chronos/api/auth/debug_auth_api_auth_debug_get.py +43 -0
- plato/chronos/api/auth/get_auth_status_api_auth_status_get.py +61 -0
- plato/chronos/api/auth/get_current_user_route_api_auth_me_get.py +60 -0
- plato/chronos/api/callback/__init__.py +11 -0
- plato/chronos/api/callback/push_agent_logs.py +61 -0
- plato/chronos/api/callback/update_agent_status.py +57 -0
- plato/chronos/api/callback/upload_artifacts.py +59 -0
- plato/chronos/api/callback/upload_logs_zip.py +57 -0
- plato/chronos/api/callback/upload_trajectory.py +57 -0
- plato/chronos/api/default/__init__.py +7 -0
- plato/chronos/api/default/health.py +43 -0
- plato/chronos/api/jobs/__init__.py +7 -0
- plato/chronos/api/jobs/launch_job.py +63 -0
- plato/chronos/api/registry/__init__.py +19 -0
- plato/chronos/api/registry/get_agent_schema_api_registry_agents__agent_name__schema_get.py +62 -0
- plato/chronos/api/registry/get_agent_versions_api_registry_agents__agent_name__versions_get.py +52 -0
- plato/chronos/api/registry/get_world_schema_api_registry_worlds__package_name__schema_get.py +68 -0
- plato/chronos/api/registry/get_world_versions_api_registry_worlds__package_name__versions_get.py +52 -0
- plato/chronos/api/registry/list_registry_agents_api_registry_agents_get.py +44 -0
- plato/chronos/api/registry/list_registry_worlds_api_registry_worlds_get.py +44 -0
- plato/chronos/api/runtimes/__init__.py +11 -0
- plato/chronos/api/runtimes/create_runtime.py +63 -0
- plato/chronos/api/runtimes/delete_runtime.py +61 -0
- plato/chronos/api/runtimes/get_runtime.py +62 -0
- plato/chronos/api/runtimes/list_runtimes.py +57 -0
- plato/chronos/api/runtimes/test_runtime.py +67 -0
- plato/chronos/api/secrets/__init__.py +11 -0
- plato/chronos/api/secrets/create_secret.py +63 -0
- plato/chronos/api/secrets/delete_secret.py +61 -0
- plato/chronos/api/secrets/get_secret.py +62 -0
- plato/chronos/api/secrets/list_secrets.py +57 -0
- plato/chronos/api/secrets/update_secret.py +68 -0
- plato/chronos/api/sessions/__init__.py +10 -0
- plato/chronos/api/sessions/get_session.py +62 -0
- plato/chronos/api/sessions/get_session_logs.py +72 -0
- plato/chronos/api/sessions/get_session_logs_download.py +62 -0
- plato/chronos/api/sessions/list_sessions.py +57 -0
- plato/chronos/api/status/__init__.py +8 -0
- plato/chronos/api/status/get_status_api_status_get.py +44 -0
- plato/chronos/api/status/get_version_info_api_version_get.py +44 -0
- plato/chronos/api/templates/__init__.py +11 -0
- plato/chronos/api/templates/create_template.py +63 -0
- plato/chronos/api/templates/delete_template.py +61 -0
- plato/chronos/api/templates/get_template.py +62 -0
- plato/chronos/api/templates/list_templates.py +57 -0
- plato/chronos/api/templates/update_template.py +68 -0
- plato/chronos/api/trajectories/__init__.py +8 -0
- plato/chronos/api/trajectories/get_trajectory.py +62 -0
- plato/chronos/api/trajectories/list_trajectories.py +62 -0
- plato/chronos/api/worlds/__init__.py +10 -0
- plato/chronos/api/worlds/create_world.py +63 -0
- plato/chronos/api/worlds/delete_world.py +61 -0
- plato/chronos/api/worlds/get_world.py +62 -0
- plato/chronos/api/worlds/list_worlds.py +57 -0
- plato/chronos/client.py +171 -0
- plato/chronos/errors.py +141 -0
- plato/chronos/models/__init__.py +647 -0
- plato/chronos/py.typed +0 -0
- plato/sims/cli.py +299 -123
- plato/sims/registry.py +77 -4
- plato/v1/cli/agent.py +88 -84
- plato/v1/cli/main.py +2 -0
- plato/v1/cli/pm.py +441 -119
- plato/v1/cli/sandbox.py +747 -191
- plato/v1/cli/sim.py +11 -0
- plato/v1/cli/verify.py +1269 -0
- plato/v1/cli/world.py +3 -0
- plato/v1/flow_executor.py +21 -17
- plato/v1/models/env.py +11 -11
- plato/v1/sdk.py +2 -2
- plato/v1/sync_env.py +11 -11
- plato/v1/sync_flow_executor.py +21 -17
- plato/v1/sync_sdk.py +4 -2
- plato/v2/__init__.py +2 -0
- plato/v2/async_/environment.py +20 -1
- plato/v2/async_/session.py +54 -3
- plato/v2/sync/environment.py +2 -1
- plato/v2/sync/session.py +52 -2
- plato/worlds/README.md +218 -0
- plato/worlds/__init__.py +54 -18
- plato/worlds/base.py +304 -93
- plato/worlds/config.py +239 -73
- plato/worlds/runner.py +391 -80
- {plato_sdk_v2-2.0.50.dist-info → plato_sdk_v2-2.2.4.dist-info}/METADATA +1 -3
- {plato_sdk_v2-2.0.50.dist-info → plato_sdk_v2-2.2.4.dist-info}/RECORD +143 -68
- {plato_sdk_v2-2.0.50.dist-info → plato_sdk_v2-2.2.4.dist-info}/entry_points.txt +1 -0
- plato/_generated/api/v2/interfaces/__init__.py +0 -27
- plato/_generated/api/v2/interfaces/v2_interface_browser_create.py +0 -68
- plato/_generated/api/v2/interfaces/v2_interface_cdp_url.py +0 -65
- plato/_generated/api/v2/interfaces/v2_interface_click.py +0 -64
- plato/_generated/api/v2/interfaces/v2_interface_close.py +0 -59
- plato/_generated/api/v2/interfaces/v2_interface_computer_create.py +0 -68
- plato/_generated/api/v2/interfaces/v2_interface_cursor.py +0 -64
- plato/_generated/api/v2/interfaces/v2_interface_key.py +0 -68
- plato/_generated/api/v2/interfaces/v2_interface_screenshot.py +0 -65
- plato/_generated/api/v2/interfaces/v2_interface_scroll.py +0 -70
- plato/_generated/api/v2/interfaces/v2_interface_type.py +0 -64
- plato/world/__init__.py +0 -44
- plato/world/base.py +0 -267
- plato/world/config.py +0 -139
- plato/world/types.py +0 -47
- {plato_sdk_v2-2.0.50.dist-info → plato_sdk_v2-2.2.4.dist-info}/WHEEL +0 -0
plato/v1/cli/verify.py
ADDED
|
@@ -0,0 +1,1269 @@
|
|
|
1
|
+
"""Verification CLI commands for Plato simulator creation pipeline.
|
|
2
|
+
|
|
3
|
+
All verification commands follow the pattern:
|
|
4
|
+
plato sandbox verify <check>
|
|
5
|
+
plato pm verify <check>
|
|
6
|
+
|
|
7
|
+
Each command verifies a specific step in the pipeline completed successfully.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import os
|
|
11
|
+
import subprocess
|
|
12
|
+
from collections import defaultdict
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
|
|
15
|
+
import typer
|
|
16
|
+
import yaml
|
|
17
|
+
from rich.console import Console
|
|
18
|
+
from rich.table import Table
|
|
19
|
+
|
|
20
|
+
from plato.v1.cli.utils import (
|
|
21
|
+
SANDBOX_FILE,
|
|
22
|
+
get_http_client,
|
|
23
|
+
require_api_key,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
console = Console()
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
# =============================================================================
|
|
30
|
+
# SANDBOX VERIFY COMMANDS
|
|
31
|
+
# =============================================================================
|
|
32
|
+
|
|
33
|
+
sandbox_verify_app = typer.Typer(help="Verify sandbox setup and state")
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@sandbox_verify_app.callback(invoke_without_command=True)
|
|
37
|
+
def sandbox_verify_default(ctx: typer.Context):
|
|
38
|
+
"""
|
|
39
|
+
Verify sandbox is properly configured.
|
|
40
|
+
|
|
41
|
+
Checks .sandbox.yaml has all required fields:
|
|
42
|
+
- job_id
|
|
43
|
+
- session_id
|
|
44
|
+
- public_url
|
|
45
|
+
- ssh_config_path
|
|
46
|
+
- plato_config_path
|
|
47
|
+
- service
|
|
48
|
+
"""
|
|
49
|
+
if ctx.invoked_subcommand is not None:
|
|
50
|
+
return
|
|
51
|
+
|
|
52
|
+
console.print("\n[cyan]Verifying sandbox configuration...[/cyan]\n")
|
|
53
|
+
|
|
54
|
+
# Check .sandbox.yaml exists
|
|
55
|
+
if not Path(SANDBOX_FILE).exists():
|
|
56
|
+
console.print(f"[red]❌ {SANDBOX_FILE} not found[/red]")
|
|
57
|
+
console.print("\n[yellow]No active sandbox. Start one with:[/yellow]")
|
|
58
|
+
console.print(" plato sandbox start -c")
|
|
59
|
+
raise typer.Exit(1)
|
|
60
|
+
|
|
61
|
+
# Load sandbox state
|
|
62
|
+
with open(SANDBOX_FILE) as f:
|
|
63
|
+
state = yaml.safe_load(f)
|
|
64
|
+
|
|
65
|
+
if not state:
|
|
66
|
+
console.print(f"[red]❌ {SANDBOX_FILE} is empty[/red]")
|
|
67
|
+
raise typer.Exit(1)
|
|
68
|
+
|
|
69
|
+
# Required fields
|
|
70
|
+
required_fields = {
|
|
71
|
+
"job_id": "Sandbox job identifier",
|
|
72
|
+
"session_id": "Session identifier",
|
|
73
|
+
"public_url": "Public URL for browser access",
|
|
74
|
+
"ssh_config_path": "SSH config file path",
|
|
75
|
+
"plato_config_path": "Path to plato-config.yml",
|
|
76
|
+
"service": "Simulator service name",
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
missing = []
|
|
80
|
+
present = []
|
|
81
|
+
|
|
82
|
+
for field, desc in required_fields.items():
|
|
83
|
+
if field not in state or not state[field]:
|
|
84
|
+
missing.append((field, desc))
|
|
85
|
+
else:
|
|
86
|
+
present.append((field, state[field]))
|
|
87
|
+
|
|
88
|
+
# Check ssh_config_path file exists
|
|
89
|
+
ssh_config = state.get("ssh_config_path")
|
|
90
|
+
if ssh_config and not Path(os.path.expanduser(ssh_config)).exists():
|
|
91
|
+
missing.append(("ssh_config_path (file)", f"File not found: {ssh_config}"))
|
|
92
|
+
|
|
93
|
+
# Check plato_config_path file exists
|
|
94
|
+
plato_config = state.get("plato_config_path")
|
|
95
|
+
if plato_config and not Path(plato_config).exists():
|
|
96
|
+
missing.append(("plato_config_path (file)", f"File not found: {plato_config}"))
|
|
97
|
+
|
|
98
|
+
# Report results
|
|
99
|
+
if missing:
|
|
100
|
+
console.print("[red]❌ Sandbox verification failed[/red]\n")
|
|
101
|
+
console.print("[red]Missing or invalid fields:[/red]")
|
|
102
|
+
for field, desc in missing:
|
|
103
|
+
console.print(f" - {field}: {desc}")
|
|
104
|
+
|
|
105
|
+
console.print("\n[yellow]Current .sandbox.yaml contents:[/yellow]")
|
|
106
|
+
for field, value in present:
|
|
107
|
+
console.print(f" {field}: {value}")
|
|
108
|
+
|
|
109
|
+
console.print("\n[yellow]Fix by adding missing fields to .sandbox.yaml:[/yellow]")
|
|
110
|
+
if any("plato_config_path" in m[0] for m in missing):
|
|
111
|
+
console.print(' plato_config_path: "/absolute/path/to/plato-config.yml"')
|
|
112
|
+
if any("service" in m[0] for m in missing):
|
|
113
|
+
console.print(' service: "your-sim-name"')
|
|
114
|
+
|
|
115
|
+
raise typer.Exit(1)
|
|
116
|
+
|
|
117
|
+
console.print("[green]✅ Sandbox verification passed[/green]\n")
|
|
118
|
+
|
|
119
|
+
table = Table(title="Sandbox Configuration")
|
|
120
|
+
table.add_column("Field", style="cyan")
|
|
121
|
+
table.add_column("Value", style="white")
|
|
122
|
+
|
|
123
|
+
for field, value in present:
|
|
124
|
+
# Truncate long values
|
|
125
|
+
display_value = str(value)
|
|
126
|
+
if len(display_value) > 60:
|
|
127
|
+
display_value = display_value[:57] + "..."
|
|
128
|
+
table.add_row(field, display_value)
|
|
129
|
+
|
|
130
|
+
console.print(table)
|
|
131
|
+
console.print("\n[green]Ready for next step: plato sandbox verify services[/green]")
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
@sandbox_verify_app.command(name="services")
|
|
135
|
+
def verify_services():
|
|
136
|
+
"""
|
|
137
|
+
Verify containers are running and healthy.
|
|
138
|
+
|
|
139
|
+
Checks:
|
|
140
|
+
- All containers in running state
|
|
141
|
+
- Required containers are healthy
|
|
142
|
+
- Public URL returns 200 (not 502)
|
|
143
|
+
"""
|
|
144
|
+
console.print("\n[cyan]Verifying services...[/cyan]\n")
|
|
145
|
+
|
|
146
|
+
# Load sandbox state
|
|
147
|
+
if not Path(SANDBOX_FILE).exists():
|
|
148
|
+
console.print(f"[red]❌ {SANDBOX_FILE} not found[/red]")
|
|
149
|
+
console.print("[yellow]Run: plato sandbox verify[/yellow]")
|
|
150
|
+
raise typer.Exit(1)
|
|
151
|
+
|
|
152
|
+
with open(SANDBOX_FILE) as f:
|
|
153
|
+
state = yaml.safe_load(f)
|
|
154
|
+
|
|
155
|
+
ssh_config = state.get("ssh_config_path")
|
|
156
|
+
ssh_host = state.get("ssh_host", "sandbox")
|
|
157
|
+
public_url = state.get("public_url")
|
|
158
|
+
|
|
159
|
+
if not ssh_config:
|
|
160
|
+
console.print("[red]❌ No ssh_config_path in .sandbox.yaml[/red]")
|
|
161
|
+
raise typer.Exit(1)
|
|
162
|
+
|
|
163
|
+
issues = []
|
|
164
|
+
|
|
165
|
+
# Check containers via SSH
|
|
166
|
+
console.print("[cyan]Checking container status...[/cyan]")
|
|
167
|
+
try:
|
|
168
|
+
result = subprocess.run(
|
|
169
|
+
[
|
|
170
|
+
"ssh",
|
|
171
|
+
"-F",
|
|
172
|
+
os.path.expanduser(ssh_config),
|
|
173
|
+
ssh_host,
|
|
174
|
+
"DOCKER_HOST=unix:///var/run/docker-user.sock docker ps -a --format '{{.Names}}\t{{.Status}}'",
|
|
175
|
+
],
|
|
176
|
+
capture_output=True,
|
|
177
|
+
text=True,
|
|
178
|
+
timeout=30,
|
|
179
|
+
)
|
|
180
|
+
|
|
181
|
+
if result.returncode != 0:
|
|
182
|
+
console.print(f"[red]❌ Failed to check containers: {result.stderr}[/red]")
|
|
183
|
+
raise typer.Exit(1)
|
|
184
|
+
|
|
185
|
+
containers = []
|
|
186
|
+
unhealthy = []
|
|
187
|
+
|
|
188
|
+
for line in result.stdout.strip().split("\n"):
|
|
189
|
+
if not line:
|
|
190
|
+
continue
|
|
191
|
+
parts = line.split("\t")
|
|
192
|
+
if len(parts) >= 2:
|
|
193
|
+
name, status = parts[0], parts[1]
|
|
194
|
+
containers.append((name, status))
|
|
195
|
+
|
|
196
|
+
if "unhealthy" in status.lower():
|
|
197
|
+
unhealthy.append(name)
|
|
198
|
+
elif "exited" in status.lower() or "dead" in status.lower():
|
|
199
|
+
unhealthy.append(name)
|
|
200
|
+
|
|
201
|
+
if containers:
|
|
202
|
+
table = Table(title="Container Status")
|
|
203
|
+
table.add_column("Container", style="cyan")
|
|
204
|
+
table.add_column("Status", style="white")
|
|
205
|
+
|
|
206
|
+
for name, status in containers:
|
|
207
|
+
status_style = (
|
|
208
|
+
"green"
|
|
209
|
+
if "healthy" in status.lower() and "unhealthy" not in status.lower()
|
|
210
|
+
else "red"
|
|
211
|
+
if name in unhealthy
|
|
212
|
+
else "yellow"
|
|
213
|
+
)
|
|
214
|
+
table.add_row(name, f"[{status_style}]{status}[/{status_style}]")
|
|
215
|
+
|
|
216
|
+
console.print(table)
|
|
217
|
+
else:
|
|
218
|
+
console.print("[yellow]⚠️ No containers found[/yellow]")
|
|
219
|
+
issues.append("No containers running")
|
|
220
|
+
|
|
221
|
+
if unhealthy:
|
|
222
|
+
issues.append(f"Unhealthy containers: {', '.join(unhealthy)}")
|
|
223
|
+
|
|
224
|
+
except subprocess.TimeoutExpired:
|
|
225
|
+
console.print("[red]❌ SSH connection timed out[/red]")
|
|
226
|
+
raise typer.Exit(1)
|
|
227
|
+
except FileNotFoundError:
|
|
228
|
+
console.print("[red]❌ SSH not found[/red]")
|
|
229
|
+
raise typer.Exit(1)
|
|
230
|
+
|
|
231
|
+
# Check public URL
|
|
232
|
+
if public_url:
|
|
233
|
+
console.print(f"\n[cyan]Checking public URL: {public_url}[/cyan]")
|
|
234
|
+
try:
|
|
235
|
+
import urllib.error
|
|
236
|
+
import urllib.request
|
|
237
|
+
|
|
238
|
+
req = urllib.request.Request(public_url, method="HEAD")
|
|
239
|
+
req.add_header("User-Agent", "plato-verify/1.0")
|
|
240
|
+
|
|
241
|
+
try:
|
|
242
|
+
with urllib.request.urlopen(req, timeout=10) as response:
|
|
243
|
+
status_code = response.getcode()
|
|
244
|
+
if status_code == 200:
|
|
245
|
+
console.print(f"[green]✅ Public URL returns {status_code}[/green]")
|
|
246
|
+
else:
|
|
247
|
+
console.print(f"[yellow]⚠️ Public URL returns {status_code}[/yellow]")
|
|
248
|
+
except urllib.error.HTTPError as e:
|
|
249
|
+
if e.code == 502:
|
|
250
|
+
console.print("[red]❌ Public URL returns 502 Bad Gateway[/red]")
|
|
251
|
+
issues.append("502 Bad Gateway - nothing listening on app_port")
|
|
252
|
+
|
|
253
|
+
# Get port info
|
|
254
|
+
console.print("\n[yellow]Checking what ports are listening on VM...[/yellow]")
|
|
255
|
+
port_result = subprocess.run(
|
|
256
|
+
[
|
|
257
|
+
"ssh",
|
|
258
|
+
"-F",
|
|
259
|
+
os.path.expanduser(ssh_config),
|
|
260
|
+
ssh_host,
|
|
261
|
+
"netstat -tlnp 2>/dev/null | grep LISTEN || ss -tlnp | grep LISTEN",
|
|
262
|
+
],
|
|
263
|
+
capture_output=True,
|
|
264
|
+
text=True,
|
|
265
|
+
timeout=10,
|
|
266
|
+
)
|
|
267
|
+
if port_result.stdout:
|
|
268
|
+
console.print(f"[dim]{port_result.stdout}[/dim]")
|
|
269
|
+
|
|
270
|
+
console.print("\n[yellow]Fix options:[/yellow]")
|
|
271
|
+
console.print(" 1. Change app to listen on the expected port (check app_port in plato-config.yml)")
|
|
272
|
+
console.print(" 2. Add nginx to proxy from app_port to your app's actual port")
|
|
273
|
+
else:
|
|
274
|
+
console.print(f"[yellow]⚠️ Public URL returns {e.code}[/yellow]")
|
|
275
|
+
|
|
276
|
+
except Exception as e:
|
|
277
|
+
console.print(f"[red]❌ Failed to check public URL: {e}[/red]")
|
|
278
|
+
issues.append(f"Public URL check failed: {e}")
|
|
279
|
+
|
|
280
|
+
# Report results
|
|
281
|
+
if issues:
|
|
282
|
+
console.print("\n[red]❌ Services verification failed[/red]\n")
|
|
283
|
+
console.print("[red]Issues found:[/red]")
|
|
284
|
+
for issue in issues:
|
|
285
|
+
console.print(f" - {issue}")
|
|
286
|
+
raise typer.Exit(1)
|
|
287
|
+
|
|
288
|
+
console.print("\n[green]✅ Services verification passed[/green]")
|
|
289
|
+
console.print("[green]All containers healthy, public URL accessible.[/green]")
|
|
290
|
+
console.print("\n[green]Ready for next step: plato sandbox verify login[/green]")
|
|
291
|
+
|
|
292
|
+
|
|
293
|
+
@sandbox_verify_app.command(name="login")
|
|
294
|
+
def verify_login():
|
|
295
|
+
"""
|
|
296
|
+
Verify manual login was successful (placeholder).
|
|
297
|
+
|
|
298
|
+
This check requires browser verification. Run after manually
|
|
299
|
+
logging in via Playwright to confirm:
|
|
300
|
+
- Dashboard/home page visible (not login page)
|
|
301
|
+
- No setup wizards or onboarding screens
|
|
302
|
+
- Credentials saved for flows.yml
|
|
303
|
+
"""
|
|
304
|
+
console.print("\n[cyan]Login verification[/cyan]\n")
|
|
305
|
+
console.print("[yellow]This step requires manual browser verification.[/yellow]")
|
|
306
|
+
console.print("\nAfter logging in via Playwright, confirm:")
|
|
307
|
+
console.print(" 1. Dashboard or home page is visible (NOT login page)")
|
|
308
|
+
console.print(" 2. No setup wizards or onboarding screens")
|
|
309
|
+
console.print(" 3. Save the credentials you used for flows.yml")
|
|
310
|
+
console.print("\n[green]If login successful, proceed to: plato sandbox verify worker[/green]")
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
@sandbox_verify_app.command(name="worker")
|
|
314
|
+
def verify_worker():
|
|
315
|
+
"""
|
|
316
|
+
Verify Plato worker is running and audit triggers installed.
|
|
317
|
+
|
|
318
|
+
Checks:
|
|
319
|
+
- Worker container running
|
|
320
|
+
- State API responds (not 502)
|
|
321
|
+
- connected: true
|
|
322
|
+
- audit_log_count field exists (triggers installed)
|
|
323
|
+
"""
|
|
324
|
+
console.print("\n[cyan]Verifying worker...[/cyan]\n")
|
|
325
|
+
|
|
326
|
+
# Load sandbox state
|
|
327
|
+
if not Path(SANDBOX_FILE).exists():
|
|
328
|
+
console.print(f"[red]❌ {SANDBOX_FILE} not found[/red]")
|
|
329
|
+
raise typer.Exit(1)
|
|
330
|
+
|
|
331
|
+
with open(SANDBOX_FILE) as f:
|
|
332
|
+
state = yaml.safe_load(f)
|
|
333
|
+
|
|
334
|
+
session_id = state.get("session_id")
|
|
335
|
+
if not session_id:
|
|
336
|
+
console.print("[red]❌ No session_id in .sandbox.yaml[/red]")
|
|
337
|
+
raise typer.Exit(1)
|
|
338
|
+
|
|
339
|
+
api_key = require_api_key()
|
|
340
|
+
issues = []
|
|
341
|
+
|
|
342
|
+
# Check state API
|
|
343
|
+
console.print("[cyan]Checking state API...[/cyan]")
|
|
344
|
+
|
|
345
|
+
try:
|
|
346
|
+
from plato._generated.api.v2.sessions import state as sessions_state
|
|
347
|
+
|
|
348
|
+
with get_http_client() as client:
|
|
349
|
+
state_response = sessions_state.sync(
|
|
350
|
+
session_id=session_id,
|
|
351
|
+
client=client,
|
|
352
|
+
x_api_key=api_key,
|
|
353
|
+
)
|
|
354
|
+
|
|
355
|
+
if state_response is None:
|
|
356
|
+
console.print("[red]❌ State API returned no data[/red]")
|
|
357
|
+
issues.append("State API returned empty response")
|
|
358
|
+
elif state_response.results:
|
|
359
|
+
# Check each job result for errors or state
|
|
360
|
+
for job_id, result in state_response.results.items():
|
|
361
|
+
# Check if result has an error
|
|
362
|
+
if hasattr(result, "error") and result.error:
|
|
363
|
+
console.print(f"[red]❌ State API error for job {job_id}: {result.error}[/red]")
|
|
364
|
+
issues.append(f"State API error: {result.error}")
|
|
365
|
+
continue
|
|
366
|
+
|
|
367
|
+
# Get state data
|
|
368
|
+
state_data = result.state if hasattr(result, "state") and result.state else {}
|
|
369
|
+
if isinstance(state_data, dict):
|
|
370
|
+
# Check for error wrapped in state (from API layer transformation)
|
|
371
|
+
if "error" in state_data:
|
|
372
|
+
console.print(f"[red]❌ Worker error: {state_data['error']}[/red]")
|
|
373
|
+
issues.append(f"Worker error: {state_data['error']}")
|
|
374
|
+
continue
|
|
375
|
+
# Check connected
|
|
376
|
+
if "db" in state_data:
|
|
377
|
+
db_state = state_data["db"]
|
|
378
|
+
connected = db_state.get("is_connected", False)
|
|
379
|
+
if connected:
|
|
380
|
+
console.print("[green]✅ Worker connected: true[/green]")
|
|
381
|
+
else:
|
|
382
|
+
console.print("[red]❌ Worker not connected[/red]")
|
|
383
|
+
issues.append("Worker not connected to database")
|
|
384
|
+
|
|
385
|
+
# Check audit_log_count exists (indicates triggers installed)
|
|
386
|
+
if "audit_log_count" in db_state:
|
|
387
|
+
audit_count = db_state.get("audit_log_count", 0)
|
|
388
|
+
console.print(f"[green]✅ Audit triggers installed (count: {audit_count})[/green]")
|
|
389
|
+
else:
|
|
390
|
+
console.print("[yellow]⚠️ audit_log_count not found in state[/yellow]")
|
|
391
|
+
|
|
392
|
+
# Show table count if available
|
|
393
|
+
if "tables" in db_state:
|
|
394
|
+
console.print(f"[cyan] Tables tracked: {len(db_state['tables'])}[/cyan]")
|
|
395
|
+
else:
|
|
396
|
+
console.print("[yellow]⚠️ No db state found - worker may not be initialized[/yellow]")
|
|
397
|
+
issues.append("Worker not initialized (no db state)")
|
|
398
|
+
else:
|
|
399
|
+
console.print("[red]❌ State API returned empty results[/red]")
|
|
400
|
+
issues.append("State API returned empty results")
|
|
401
|
+
|
|
402
|
+
except Exception as e:
|
|
403
|
+
error_str = str(e)
|
|
404
|
+
if "502" in error_str:
|
|
405
|
+
console.print("[red]❌ State API returned 502 - worker not ready[/red]")
|
|
406
|
+
issues.append("Worker not ready (502)")
|
|
407
|
+
else:
|
|
408
|
+
console.print(f"[red]❌ Failed to check state: {e}[/red]")
|
|
409
|
+
issues.append(f"State check failed: {e}")
|
|
410
|
+
|
|
411
|
+
# Report results
|
|
412
|
+
if issues:
|
|
413
|
+
console.print("\n[red]❌ Worker verification failed[/red]\n")
|
|
414
|
+
console.print("[red]Issues found:[/red]")
|
|
415
|
+
for issue in issues:
|
|
416
|
+
console.print(f" - {issue}")
|
|
417
|
+
|
|
418
|
+
console.print("\n[yellow]Fix:[/yellow]")
|
|
419
|
+
console.print(" plato sandbox start-worker --wait")
|
|
420
|
+
console.print(" plato sandbox verify worker")
|
|
421
|
+
|
|
422
|
+
raise typer.Exit(1)
|
|
423
|
+
|
|
424
|
+
console.print("\n[green]✅ Worker verification passed[/green]")
|
|
425
|
+
console.print("[green]Worker connected, audit triggers installed.[/green]")
|
|
426
|
+
console.print("\n[green]Ready for next step: plato sandbox verify audit-clear[/green]")
|
|
427
|
+
|
|
428
|
+
|
|
429
|
+
@sandbox_verify_app.command(name="audit-clear")
|
|
430
|
+
def verify_audit_clear():
|
|
431
|
+
"""
|
|
432
|
+
Verify audit log was cleared (0 mutations).
|
|
433
|
+
"""
|
|
434
|
+
console.print("\n[cyan]Verifying audit log cleared...[/cyan]\n")
|
|
435
|
+
|
|
436
|
+
if not Path(SANDBOX_FILE).exists():
|
|
437
|
+
console.print(f"[red]❌ {SANDBOX_FILE} not found[/red]")
|
|
438
|
+
raise typer.Exit(1)
|
|
439
|
+
|
|
440
|
+
with open(SANDBOX_FILE) as f:
|
|
441
|
+
state = yaml.safe_load(f)
|
|
442
|
+
|
|
443
|
+
session_id = state.get("session_id")
|
|
444
|
+
api_key = require_api_key()
|
|
445
|
+
|
|
446
|
+
try:
|
|
447
|
+
from plato._generated.api.v2.sessions import state as sessions_state
|
|
448
|
+
|
|
449
|
+
with get_http_client() as client:
|
|
450
|
+
state_response = sessions_state.sync(
|
|
451
|
+
session_id=session_id,
|
|
452
|
+
client=client,
|
|
453
|
+
x_api_key=api_key,
|
|
454
|
+
)
|
|
455
|
+
|
|
456
|
+
if state_response is None:
|
|
457
|
+
console.print("[red]❌ State API returned no data[/red]")
|
|
458
|
+
raise typer.Exit(1)
|
|
459
|
+
|
|
460
|
+
# Extract audit count from response
|
|
461
|
+
audit_count = 0
|
|
462
|
+
if state_response.results:
|
|
463
|
+
for job_id, result in state_response.results.items():
|
|
464
|
+
state_data = result.state if hasattr(result, "state") and result.state else {}
|
|
465
|
+
if isinstance(state_data, dict) and "db" in state_data:
|
|
466
|
+
audit_count = state_data["db"].get("audit_log_count", 0)
|
|
467
|
+
break
|
|
468
|
+
|
|
469
|
+
if audit_count == 0:
|
|
470
|
+
console.print("[green]✅ Audit log clear: 0 mutations[/green]")
|
|
471
|
+
console.print("\n[green]Ready for next step: plato sandbox verify flow[/green]")
|
|
472
|
+
else:
|
|
473
|
+
console.print(f"[red]❌ Audit log not clear: {audit_count} mutations[/red]")
|
|
474
|
+
console.print("\n[yellow]Note: Mutation tracking starts fresh when worker starts.[/yellow]")
|
|
475
|
+
console.print("[yellow]Restart sandbox if you need a clean baseline.[/yellow]")
|
|
476
|
+
raise typer.Exit(1)
|
|
477
|
+
|
|
478
|
+
except Exception as e:
|
|
479
|
+
console.print(f"[red]❌ Failed to check state: {e}[/red]")
|
|
480
|
+
raise typer.Exit(1)
|
|
481
|
+
|
|
482
|
+
|
|
483
|
+
@sandbox_verify_app.command(name="flow")
|
|
484
|
+
def verify_flow():
|
|
485
|
+
"""
|
|
486
|
+
Verify login flow exists and can be parsed.
|
|
487
|
+
"""
|
|
488
|
+
console.print("\n[cyan]Verifying login flow...[/cyan]\n")
|
|
489
|
+
|
|
490
|
+
# Check for flows.yml
|
|
491
|
+
flow_paths = ["flows.yml", "base/flows.yml", "login-flow.yml"]
|
|
492
|
+
flow_file = None
|
|
493
|
+
|
|
494
|
+
for path in flow_paths:
|
|
495
|
+
if Path(path).exists():
|
|
496
|
+
flow_file = path
|
|
497
|
+
break
|
|
498
|
+
|
|
499
|
+
if not flow_file:
|
|
500
|
+
console.print("[red]❌ No flows.yml found[/red]")
|
|
501
|
+
console.print(f"[yellow]Searched: {', '.join(flow_paths)}[/yellow]")
|
|
502
|
+
console.print("\n[yellow]Create flows.yml with login flow definition.[/yellow]")
|
|
503
|
+
raise typer.Exit(1)
|
|
504
|
+
|
|
505
|
+
console.print(f"[green]✅ Found flow file: {flow_file}[/green]")
|
|
506
|
+
|
|
507
|
+
# Parse flows.yml
|
|
508
|
+
try:
|
|
509
|
+
with open(flow_file) as f:
|
|
510
|
+
flows = yaml.safe_load(f)
|
|
511
|
+
|
|
512
|
+
if not flows:
|
|
513
|
+
console.print("[red]❌ Flows file is empty[/red]")
|
|
514
|
+
raise typer.Exit(1)
|
|
515
|
+
|
|
516
|
+
# Check for login flow
|
|
517
|
+
if "login" not in flows:
|
|
518
|
+
console.print("[red]❌ No 'login' flow defined[/red]")
|
|
519
|
+
console.print("[yellow]flows.yml must have a 'login' section[/yellow]")
|
|
520
|
+
raise typer.Exit(1)
|
|
521
|
+
|
|
522
|
+
login_flow = flows["login"]
|
|
523
|
+
steps = login_flow.get("steps", [])
|
|
524
|
+
|
|
525
|
+
console.print(f"[green]✅ Login flow found with {len(steps)} steps[/green]")
|
|
526
|
+
|
|
527
|
+
# Show steps summary
|
|
528
|
+
for i, step in enumerate(steps):
|
|
529
|
+
action = step.get("action", "unknown")
|
|
530
|
+
selector = step.get("selector", "")[:40]
|
|
531
|
+
console.print(f" {i + 1}. {action}: {selector}...")
|
|
532
|
+
|
|
533
|
+
console.print("\n[green]Ready for next step: plato sandbox flow login[/green]")
|
|
534
|
+
|
|
535
|
+
except yaml.YAMLError as e:
|
|
536
|
+
console.print(f"[red]❌ Invalid YAML in flows file: {e}[/red]")
|
|
537
|
+
raise typer.Exit(1)
|
|
538
|
+
|
|
539
|
+
|
|
540
|
+
@sandbox_verify_app.command(name="mutations")
|
|
541
|
+
def verify_mutations():
|
|
542
|
+
"""
|
|
543
|
+
Verify no mutations after login flow.
|
|
544
|
+
|
|
545
|
+
Analyzes mutations by type (INSERT vs UPDATE) and suggests fixes.
|
|
546
|
+
"""
|
|
547
|
+
console.print("\n[cyan]Verifying mutations...[/cyan]\n")
|
|
548
|
+
|
|
549
|
+
if not Path(SANDBOX_FILE).exists():
|
|
550
|
+
console.print(f"[red]❌ {SANDBOX_FILE} not found[/red]")
|
|
551
|
+
raise typer.Exit(1)
|
|
552
|
+
|
|
553
|
+
with open(SANDBOX_FILE) as f:
|
|
554
|
+
state = yaml.safe_load(f)
|
|
555
|
+
|
|
556
|
+
session_id = state.get("session_id")
|
|
557
|
+
api_key = require_api_key()
|
|
558
|
+
|
|
559
|
+
try:
|
|
560
|
+
from plato._generated.api.v2.sessions import state as sessions_state
|
|
561
|
+
|
|
562
|
+
with get_http_client() as client:
|
|
563
|
+
state_response = sessions_state.sync(
|
|
564
|
+
session_id=session_id,
|
|
565
|
+
client=client,
|
|
566
|
+
x_api_key=api_key,
|
|
567
|
+
)
|
|
568
|
+
|
|
569
|
+
if state_response is None:
|
|
570
|
+
console.print("[red]❌ State API returned no data[/red]")
|
|
571
|
+
raise typer.Exit(1)
|
|
572
|
+
|
|
573
|
+
# Extract mutations from response
|
|
574
|
+
mutations = []
|
|
575
|
+
audit_count = 0
|
|
576
|
+
if state_response.results:
|
|
577
|
+
for job_id, result in state_response.results.items():
|
|
578
|
+
state_data = result.state if hasattr(result, "state") and result.state else {}
|
|
579
|
+
if isinstance(state_data, dict):
|
|
580
|
+
if "db" in state_data:
|
|
581
|
+
audit_count = state_data["db"].get("audit_log_count", 0)
|
|
582
|
+
mutations = state_data["db"].get("mutations", [])
|
|
583
|
+
break
|
|
584
|
+
|
|
585
|
+
if audit_count == 0:
|
|
586
|
+
console.print("[green]✅ Mutation verification passed[/green]")
|
|
587
|
+
console.print("[green]Mutations after login: 0[/green]")
|
|
588
|
+
console.print("[green]Login is read-only - ready for snapshot.[/green]")
|
|
589
|
+
console.print("\n[green]Ready for next step: plato sandbox verify audit-active[/green]")
|
|
590
|
+
return
|
|
591
|
+
|
|
592
|
+
# Analyze mutations
|
|
593
|
+
console.print("[red]❌ Mutation verification failed[/red]")
|
|
594
|
+
console.print(f"\n[red]Mutations after login: {audit_count}[/red]\n")
|
|
595
|
+
|
|
596
|
+
# Group by table and operation
|
|
597
|
+
table_ops = defaultdict(lambda: {"INSERT": 0, "UPDATE": 0, "DELETE": 0})
|
|
598
|
+
for mutation in mutations:
|
|
599
|
+
table = mutation.get("table", "unknown")
|
|
600
|
+
op = mutation.get("operation", "UNKNOWN").upper()
|
|
601
|
+
if op in table_ops[table]:
|
|
602
|
+
table_ops[table][op] += 1
|
|
603
|
+
|
|
604
|
+
# Display table
|
|
605
|
+
table = Table(title="Mutation Analysis")
|
|
606
|
+
table.add_column("Table", style="cyan")
|
|
607
|
+
table.add_column("INSERT", style="yellow")
|
|
608
|
+
table.add_column("UPDATE", style="yellow")
|
|
609
|
+
table.add_column("DELETE", style="yellow")
|
|
610
|
+
|
|
611
|
+
total_inserts = 0
|
|
612
|
+
total_updates = 0
|
|
613
|
+
insert_tables = []
|
|
614
|
+
update_tables = []
|
|
615
|
+
|
|
616
|
+
for tbl_name, ops in sorted(table_ops.items()):
|
|
617
|
+
table.add_row(
|
|
618
|
+
tbl_name,
|
|
619
|
+
str(ops["INSERT"]) if ops["INSERT"] else "-",
|
|
620
|
+
str(ops["UPDATE"]) if ops["UPDATE"] else "-",
|
|
621
|
+
str(ops["DELETE"]) if ops["DELETE"] else "-",
|
|
622
|
+
)
|
|
623
|
+
total_inserts += ops["INSERT"]
|
|
624
|
+
total_updates += ops["UPDATE"]
|
|
625
|
+
if ops["INSERT"] > 0:
|
|
626
|
+
insert_tables.append(tbl_name)
|
|
627
|
+
if ops["UPDATE"] > 0:
|
|
628
|
+
update_tables.append(tbl_name)
|
|
629
|
+
|
|
630
|
+
console.print(table)
|
|
631
|
+
|
|
632
|
+
# Diagnosis and suggestions
|
|
633
|
+
console.print("\n[yellow]Diagnosis:[/yellow]")
|
|
634
|
+
|
|
635
|
+
if total_inserts > 0:
|
|
636
|
+
console.print(f"\n [yellow]{total_inserts} INSERT operations (new rows created)[/yellow]")
|
|
637
|
+
console.print(" This is likely lazy initialization - settings created on first access.")
|
|
638
|
+
console.print("\n [red]⚠️ Column-level ignores will NOT work for INSERT operations.[/red]")
|
|
639
|
+
console.print(" You must ignore the entire table.")
|
|
640
|
+
|
|
641
|
+
if total_updates > 0:
|
|
642
|
+
console.print(f"\n [yellow]{total_updates} UPDATE operations[/yellow]")
|
|
643
|
+
console.print(" These can often be fixed with column-level ignores (e.g., last_login, updated_at).")
|
|
644
|
+
|
|
645
|
+
# Suggested fix
|
|
646
|
+
console.print("\n[yellow]Suggested fix for plato-config.yml:[/yellow]")
|
|
647
|
+
console.print("```yaml")
|
|
648
|
+
console.print("audit_ignore_tables:")
|
|
649
|
+
|
|
650
|
+
if insert_tables:
|
|
651
|
+
console.print(" # Lazy-init tables (INSERT on first login) - must ignore entire table")
|
|
652
|
+
for tbl in insert_tables:
|
|
653
|
+
console.print(f" - {tbl}")
|
|
654
|
+
|
|
655
|
+
if update_tables:
|
|
656
|
+
console.print(" # Tables with timestamp updates - can use column-level ignore")
|
|
657
|
+
for tbl in update_tables:
|
|
658
|
+
if tbl not in insert_tables:
|
|
659
|
+
console.print(f" - table: {tbl}")
|
|
660
|
+
console.print(" columns: [last_login, updated_at, modified_at]")
|
|
661
|
+
|
|
662
|
+
console.print("```")
|
|
663
|
+
|
|
664
|
+
console.print("\n[yellow]After updating config:[/yellow]")
|
|
665
|
+
console.print(" 1. plato sandbox sync")
|
|
666
|
+
console.print(" 2. plato sandbox flow")
|
|
667
|
+
console.print(" 3. plato sandbox verify mutations")
|
|
668
|
+
console.print("\n[yellow]Note: There is no stop-worker or clear-audit command.[/yellow]")
|
|
669
|
+
console.print("[yellow]If sync doesn't work, restart sandbox (loses UI setup).[/yellow]")
|
|
670
|
+
|
|
671
|
+
raise typer.Exit(1)
|
|
672
|
+
|
|
673
|
+
except typer.Exit:
|
|
674
|
+
raise
|
|
675
|
+
except Exception as e:
|
|
676
|
+
console.print(f"[red]❌ Failed to check mutations: {e}[/red]")
|
|
677
|
+
raise typer.Exit(1)
|
|
678
|
+
|
|
679
|
+
|
|
680
|
+
@sandbox_verify_app.command(name="audit-active")
|
|
681
|
+
def verify_audit_active():
|
|
682
|
+
"""
|
|
683
|
+
Verify audit system is actively tracking changes.
|
|
684
|
+
|
|
685
|
+
This confirms the audit system works by checking that
|
|
686
|
+
mutations CAN be recorded (not just that there are none).
|
|
687
|
+
"""
|
|
688
|
+
console.print("\n[cyan]Verifying audit system is active...[/cyan]\n")
|
|
689
|
+
console.print("[yellow]This step requires manual verification:[/yellow]")
|
|
690
|
+
console.print("\n1. Make a small change in the app via browser:")
|
|
691
|
+
console.print(" - Update a setting (language, timezone, theme)")
|
|
692
|
+
console.print(" - Do NOT create test data (it pollutes the snapshot)")
|
|
693
|
+
console.print("\n2. Check state:")
|
|
694
|
+
console.print(" plato sandbox state -v")
|
|
695
|
+
console.print("\n3. Verify mutations NOW appear:")
|
|
696
|
+
console.print(" - You SHOULD see 1+ mutations after your change")
|
|
697
|
+
console.print(" - If no mutations appear, audit system is BROKEN")
|
|
698
|
+
console.print("\n[green]If mutations appear after your change:[/green]")
|
|
699
|
+
console.print(" ✅ Audit system is working correctly")
|
|
700
|
+
console.print(" Proceed to: plato sandbox verify snapshot")
|
|
701
|
+
console.print("\n[red]If NO mutations appear after your change:[/red]")
|
|
702
|
+
console.print(" ❌ Audit system is broken - restart worker")
|
|
703
|
+
|
|
704
|
+
|
|
705
|
+
@sandbox_verify_app.command(name="snapshot")
|
|
706
|
+
def verify_snapshot():
|
|
707
|
+
"""
|
|
708
|
+
Verify snapshot was created successfully.
|
|
709
|
+
"""
|
|
710
|
+
console.print("\n[cyan]Verifying snapshot...[/cyan]\n")
|
|
711
|
+
|
|
712
|
+
if not Path(SANDBOX_FILE).exists():
|
|
713
|
+
console.print(f"[red]❌ {SANDBOX_FILE} not found[/red]")
|
|
714
|
+
raise typer.Exit(1)
|
|
715
|
+
|
|
716
|
+
with open(SANDBOX_FILE) as f:
|
|
717
|
+
state = yaml.safe_load(f)
|
|
718
|
+
|
|
719
|
+
artifact_id = state.get("artifact_id")
|
|
720
|
+
|
|
721
|
+
if not artifact_id:
|
|
722
|
+
console.print("[red]❌ No artifact_id in .sandbox.yaml[/red]")
|
|
723
|
+
console.print("\n[yellow]Create a snapshot first:[/yellow]")
|
|
724
|
+
console.print(" plato sandbox snapshot")
|
|
725
|
+
raise typer.Exit(1)
|
|
726
|
+
|
|
727
|
+
# Validate UUID format
|
|
728
|
+
import re
|
|
729
|
+
|
|
730
|
+
uuid_pattern = re.compile(r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$", re.IGNORECASE)
|
|
731
|
+
|
|
732
|
+
if not uuid_pattern.match(artifact_id):
|
|
733
|
+
console.print(f"[red]❌ Invalid artifact_id format: {artifact_id}[/red]")
|
|
734
|
+
raise typer.Exit(1)
|
|
735
|
+
|
|
736
|
+
console.print("[green]✅ Snapshot verification passed[/green]")
|
|
737
|
+
console.print(f"[green]Artifact ID: {artifact_id}[/green]")
|
|
738
|
+
console.print("\n[green]Ready for next step: plato pm verify review[/green]")
|
|
739
|
+
|
|
740
|
+
|
|
741
|
+
# =============================================================================
|
|
742
|
+
# PM VERIFY COMMANDS
|
|
743
|
+
# =============================================================================
|
|
744
|
+
|
|
745
|
+
pm_verify_app = typer.Typer(help="Verify review and submit steps")
|
|
746
|
+
|
|
747
|
+
|
|
748
|
+
@pm_verify_app.command(name="review")
|
|
749
|
+
def verify_review():
|
|
750
|
+
"""
|
|
751
|
+
Verify review is ready to run.
|
|
752
|
+
|
|
753
|
+
Checks:
|
|
754
|
+
- PLATO_API_KEY is set
|
|
755
|
+
- .sandbox.yaml has artifact_id
|
|
756
|
+
- plato-config.yml exists
|
|
757
|
+
"""
|
|
758
|
+
console.print("\n[cyan]Verifying review prerequisites...[/cyan]\n")
|
|
759
|
+
|
|
760
|
+
issues = []
|
|
761
|
+
|
|
762
|
+
# Check API key
|
|
763
|
+
api_key = os.environ.get("PLATO_API_KEY")
|
|
764
|
+
if api_key:
|
|
765
|
+
console.print("[green]✅ PLATO_API_KEY: set[/green]")
|
|
766
|
+
else:
|
|
767
|
+
console.print("[red]❌ PLATO_API_KEY: not set[/red]")
|
|
768
|
+
issues.append("PLATO_API_KEY not set")
|
|
769
|
+
|
|
770
|
+
# Check .sandbox.yaml
|
|
771
|
+
if Path(SANDBOX_FILE).exists():
|
|
772
|
+
console.print(f"[green]✅ {SANDBOX_FILE}: exists[/green]")
|
|
773
|
+
|
|
774
|
+
with open(SANDBOX_FILE) as f:
|
|
775
|
+
state = yaml.safe_load(f)
|
|
776
|
+
|
|
777
|
+
if state.get("artifact_id"):
|
|
778
|
+
console.print(f"[green]✅ artifact_id: {state['artifact_id']}[/green]")
|
|
779
|
+
else:
|
|
780
|
+
console.print("[red]❌ artifact_id: missing[/red]")
|
|
781
|
+
issues.append("No artifact_id - run plato sandbox snapshot first")
|
|
782
|
+
|
|
783
|
+
if state.get("service"):
|
|
784
|
+
console.print(f"[green]✅ service: {state['service']}[/green]")
|
|
785
|
+
else:
|
|
786
|
+
console.print("[red]❌ service: missing[/red]")
|
|
787
|
+
issues.append("No service name in .sandbox.yaml")
|
|
788
|
+
else:
|
|
789
|
+
console.print(f"[red]❌ {SANDBOX_FILE}: not found[/red]")
|
|
790
|
+
issues.append("No .sandbox.yaml - start a sandbox first")
|
|
791
|
+
|
|
792
|
+
# Check plato-config.yml
|
|
793
|
+
config_paths = ["plato-config.yml", "plato-config.yaml"]
|
|
794
|
+
config_found = None
|
|
795
|
+
for path in config_paths:
|
|
796
|
+
if Path(path).exists():
|
|
797
|
+
config_found = path
|
|
798
|
+
break
|
|
799
|
+
|
|
800
|
+
if config_found:
|
|
801
|
+
console.print(f"[green]✅ plato-config.yml: {config_found}[/green]")
|
|
802
|
+
else:
|
|
803
|
+
console.print("[red]❌ plato-config.yml: not found[/red]")
|
|
804
|
+
issues.append("No plato-config.yml")
|
|
805
|
+
|
|
806
|
+
# Report
|
|
807
|
+
if issues:
|
|
808
|
+
console.print("\n[red]❌ Review verification failed[/red]\n")
|
|
809
|
+
console.print("[red]Issues:[/red]")
|
|
810
|
+
for issue in issues:
|
|
811
|
+
console.print(f" - {issue}")
|
|
812
|
+
|
|
813
|
+
if "PLATO_API_KEY" in str(issues):
|
|
814
|
+
console.print("\n[yellow]Fix:[/yellow]")
|
|
815
|
+
console.print(' export PLATO_API_KEY="pk_user_IgNNSJp5v_J0EMJtnxHGw6y68lfdYXiWdXNq1v_JaQQ"')
|
|
816
|
+
|
|
817
|
+
raise typer.Exit(1)
|
|
818
|
+
|
|
819
|
+
console.print("\n[green]✅ Review verification passed[/green]")
|
|
820
|
+
console.print("[green]Ready to run review:[/green]")
|
|
821
|
+
|
|
822
|
+
service = state.get("service", "SERVICE")
|
|
823
|
+
artifact = state.get("artifact_id", "ARTIFACT_ID")
|
|
824
|
+
console.print(f" plato pm review base -s {service} -a {artifact} --skip-review")
|
|
825
|
+
|
|
826
|
+
|
|
827
|
+
@pm_verify_app.command(name="submit")
|
|
828
|
+
def verify_submit():
|
|
829
|
+
"""
|
|
830
|
+
Verify submit prerequisites.
|
|
831
|
+
|
|
832
|
+
Checks:
|
|
833
|
+
- PLATO_API_KEY is set
|
|
834
|
+
- .sandbox.yaml is complete (artifact_id, service, plato_config_path)
|
|
835
|
+
"""
|
|
836
|
+
console.print("\n[cyan]Verifying submit prerequisites...[/cyan]\n")
|
|
837
|
+
|
|
838
|
+
issues = []
|
|
839
|
+
|
|
840
|
+
# Check API key
|
|
841
|
+
api_key = os.environ.get("PLATO_API_KEY")
|
|
842
|
+
if api_key:
|
|
843
|
+
console.print("[green]✅ PLATO_API_KEY: set[/green]")
|
|
844
|
+
else:
|
|
845
|
+
console.print("[red]❌ PLATO_API_KEY: not set[/red]")
|
|
846
|
+
issues.append("PLATO_API_KEY not set")
|
|
847
|
+
|
|
848
|
+
# Check .sandbox.yaml
|
|
849
|
+
if not Path(SANDBOX_FILE).exists():
|
|
850
|
+
console.print(f"[red]❌ {SANDBOX_FILE}: not found[/red]")
|
|
851
|
+
issues.append("No .sandbox.yaml")
|
|
852
|
+
else:
|
|
853
|
+
console.print(f"[green]✅ {SANDBOX_FILE}: exists[/green]")
|
|
854
|
+
|
|
855
|
+
with open(SANDBOX_FILE) as f:
|
|
856
|
+
state = yaml.safe_load(f)
|
|
857
|
+
|
|
858
|
+
required = ["artifact_id", "service", "plato_config_path"]
|
|
859
|
+
for field in required:
|
|
860
|
+
if state.get(field):
|
|
861
|
+
console.print(f"[green]✅ {field}: present[/green]")
|
|
862
|
+
else:
|
|
863
|
+
console.print(f"[red]❌ {field}: missing[/red]")
|
|
864
|
+
issues.append(f"Missing {field} in .sandbox.yaml")
|
|
865
|
+
|
|
866
|
+
# Report
|
|
867
|
+
if issues:
|
|
868
|
+
console.print("\n[red]❌ Submit verification failed[/red]\n")
|
|
869
|
+
console.print("[red]Issues:[/red]")
|
|
870
|
+
for issue in issues:
|
|
871
|
+
console.print(f" - {issue}")
|
|
872
|
+
|
|
873
|
+
if "PLATO_API_KEY" in str(issues):
|
|
874
|
+
console.print("\n[yellow]Fix API key:[/yellow]")
|
|
875
|
+
console.print(' export PLATO_API_KEY="pk_user_IgNNSJp5v_J0EMJtnxHGw6y68lfdYXiWdXNq1v_JaQQ"')
|
|
876
|
+
|
|
877
|
+
raise typer.Exit(1)
|
|
878
|
+
|
|
879
|
+
console.print("\n[green]✅ Submit verification passed[/green]")
|
|
880
|
+
console.print("[green]Ready to submit:[/green]")
|
|
881
|
+
console.print(" plato pm submit base")
|
|
882
|
+
|
|
883
|
+
|
|
884
|
+
# =============================================================================
|
|
885
|
+
# SIMULATOR CONFIG VERIFY COMMANDS
|
|
886
|
+
# =============================================================================
|
|
887
|
+
|
|
888
|
+
|
|
889
|
+
@sandbox_verify_app.command(name="research")
|
|
890
|
+
def verify_research(
|
|
891
|
+
report_path: str = typer.Option("research-report.yml", "--report", "-r", help="Path to research report file"),
|
|
892
|
+
):
|
|
893
|
+
"""
|
|
894
|
+
Verify that simulator research is complete.
|
|
895
|
+
|
|
896
|
+
Checks that the research report has all required fields:
|
|
897
|
+
- db_type (postgresql, mysql, mariadb)
|
|
898
|
+
- docker_image
|
|
899
|
+
- docker_tag
|
|
900
|
+
- credentials (username, password)
|
|
901
|
+
- env_vars (required environment variables)
|
|
902
|
+
"""
|
|
903
|
+
console.print("\n[cyan]Verifying research report...[/cyan]\n")
|
|
904
|
+
|
|
905
|
+
# Check if report exists
|
|
906
|
+
if not Path(report_path).exists():
|
|
907
|
+
console.print(f"[red]❌ Research report not found: {report_path}[/red]")
|
|
908
|
+
console.print("\n[yellow]Run sim-research skill first to create the report.[/yellow]")
|
|
909
|
+
raise typer.Exit(1)
|
|
910
|
+
|
|
911
|
+
# Load and parse report
|
|
912
|
+
try:
|
|
913
|
+
with open(report_path) as f:
|
|
914
|
+
report = yaml.safe_load(f)
|
|
915
|
+
except yaml.YAMLError as e:
|
|
916
|
+
console.print(f"[red]❌ Invalid YAML in research report: {e}[/red]")
|
|
917
|
+
raise typer.Exit(1)
|
|
918
|
+
|
|
919
|
+
if not report:
|
|
920
|
+
console.print("[red]❌ Research report is empty[/red]")
|
|
921
|
+
raise typer.Exit(1)
|
|
922
|
+
|
|
923
|
+
# Required fields
|
|
924
|
+
required_fields = {
|
|
925
|
+
"db_type": "Database type (postgresql, mysql, mariadb)",
|
|
926
|
+
"docker_image": "Docker image name",
|
|
927
|
+
"docker_tag": "Docker image tag",
|
|
928
|
+
"credentials": "Login credentials",
|
|
929
|
+
"github_url": "GitHub repository URL",
|
|
930
|
+
}
|
|
931
|
+
|
|
932
|
+
# Optional but recommended
|
|
933
|
+
recommended_fields = {
|
|
934
|
+
"env_vars": "Environment variables",
|
|
935
|
+
"license": "Software license",
|
|
936
|
+
"description": "App description",
|
|
937
|
+
"favicon_url": "Favicon URL",
|
|
938
|
+
}
|
|
939
|
+
|
|
940
|
+
# Check required fields
|
|
941
|
+
missing_required = []
|
|
942
|
+
for field, desc in required_fields.items():
|
|
943
|
+
if field not in report or not report[field]:
|
|
944
|
+
missing_required.append((field, desc))
|
|
945
|
+
|
|
946
|
+
# Check credentials sub-fields
|
|
947
|
+
if "credentials" in report and report["credentials"]:
|
|
948
|
+
creds = report["credentials"]
|
|
949
|
+
if not creds.get("username"):
|
|
950
|
+
missing_required.append(("credentials.username", "Login username"))
|
|
951
|
+
if not creds.get("password"):
|
|
952
|
+
missing_required.append(("credentials.password", "Login password"))
|
|
953
|
+
|
|
954
|
+
# Check db_type is valid
|
|
955
|
+
valid_db_types = ["postgresql", "mysql", "mariadb"]
|
|
956
|
+
if report.get("db_type") and report["db_type"].lower() not in valid_db_types:
|
|
957
|
+
console.print(f"[red]❌ Invalid db_type: {report['db_type']}[/red]")
|
|
958
|
+
console.print(f" Valid options: {', '.join(valid_db_types)}")
|
|
959
|
+
raise typer.Exit(1)
|
|
960
|
+
|
|
961
|
+
# Check recommended fields
|
|
962
|
+
missing_recommended = []
|
|
963
|
+
for field, desc in recommended_fields.items():
|
|
964
|
+
if field not in report or not report[field]:
|
|
965
|
+
missing_recommended.append((field, desc))
|
|
966
|
+
|
|
967
|
+
# Report results
|
|
968
|
+
if missing_required:
|
|
969
|
+
console.print("[red]❌ Research verification failed[/red]\n")
|
|
970
|
+
console.print("[red]Missing required fields:[/red]")
|
|
971
|
+
for field, desc in missing_required:
|
|
972
|
+
console.print(f" - {field}: {desc}")
|
|
973
|
+
|
|
974
|
+
console.print("\n[yellow]Suggestions:[/yellow]")
|
|
975
|
+
if any("docker" in f[0] for f in missing_required):
|
|
976
|
+
console.print(" - Check GitHub packages: ghcr.io/{owner}/{repo}")
|
|
977
|
+
console.print(" - Check Docker Hub: hub.docker.com/r/{owner}/{repo}")
|
|
978
|
+
if any("credentials" in f[0] for f in missing_required):
|
|
979
|
+
console.print(" - Look for INSTALL.md or docker-compose.yml in repo")
|
|
980
|
+
console.print(" - Check documentation for default credentials")
|
|
981
|
+
|
|
982
|
+
raise typer.Exit(1)
|
|
983
|
+
|
|
984
|
+
# Success
|
|
985
|
+
console.print("[green]✅ Research verification passed[/green]\n")
|
|
986
|
+
|
|
987
|
+
# Show summary
|
|
988
|
+
table = Table(title="Research Report Summary")
|
|
989
|
+
table.add_column("Field", style="cyan")
|
|
990
|
+
table.add_column("Value", style="white")
|
|
991
|
+
|
|
992
|
+
table.add_row("Database", report.get("db_type", ""))
|
|
993
|
+
table.add_row("Docker Image", f"{report.get('docker_image', '')}:{report.get('docker_tag', '')}")
|
|
994
|
+
table.add_row("Username", report.get("credentials", {}).get("username", ""))
|
|
995
|
+
table.add_row("GitHub URL", report.get("github_url", ""))
|
|
996
|
+
|
|
997
|
+
console.print(table)
|
|
998
|
+
|
|
999
|
+
if missing_recommended:
|
|
1000
|
+
console.print("\n[yellow]⚠️ Missing recommended fields (not blocking):[/yellow]")
|
|
1001
|
+
for field, desc in missing_recommended:
|
|
1002
|
+
console.print(f" - {field}: {desc}")
|
|
1003
|
+
|
|
1004
|
+
console.print("\n[green]Ready for next step: plato sandbox verify validation[/green]")
|
|
1005
|
+
|
|
1006
|
+
|
|
1007
|
+
@sandbox_verify_app.command(name="validation")
|
|
1008
|
+
def verify_validation(
|
|
1009
|
+
report_path: str = typer.Option("research-report.yml", "--report", "-r", help="Path to research report file"),
|
|
1010
|
+
):
|
|
1011
|
+
"""
|
|
1012
|
+
Verify that the app can become a Plato simulator.
|
|
1013
|
+
|
|
1014
|
+
Checks:
|
|
1015
|
+
- Docker image exists and can be pulled
|
|
1016
|
+
- Database type is supported (PostgreSQL, MySQL, MariaDB)
|
|
1017
|
+
- No blockers (SQLite, commercial-only, etc.)
|
|
1018
|
+
"""
|
|
1019
|
+
console.print("\n[cyan]Verifying app can be simulated...[/cyan]\n")
|
|
1020
|
+
|
|
1021
|
+
# Load report
|
|
1022
|
+
if not Path(report_path).exists():
|
|
1023
|
+
console.print(f"[red]❌ Research report not found: {report_path}[/red]")
|
|
1024
|
+
console.print("[yellow]Run: plato sandbox verify research[/yellow]")
|
|
1025
|
+
raise typer.Exit(1)
|
|
1026
|
+
|
|
1027
|
+
with open(report_path) as f:
|
|
1028
|
+
report = yaml.safe_load(f)
|
|
1029
|
+
|
|
1030
|
+
issues = []
|
|
1031
|
+
|
|
1032
|
+
# Check database type
|
|
1033
|
+
db_type = report.get("db_type", "").lower()
|
|
1034
|
+
supported_dbs = ["postgresql", "mysql", "mariadb"]
|
|
1035
|
+
|
|
1036
|
+
if db_type == "sqlite":
|
|
1037
|
+
issues.append(
|
|
1038
|
+
{
|
|
1039
|
+
"check": "Database",
|
|
1040
|
+
"status": "BLOCKER",
|
|
1041
|
+
"message": "SQLite is not supported. Plato requires PostgreSQL, MySQL, or MariaDB for state tracking.",
|
|
1042
|
+
}
|
|
1043
|
+
)
|
|
1044
|
+
elif db_type not in supported_dbs:
|
|
1045
|
+
issues.append(
|
|
1046
|
+
{
|
|
1047
|
+
"check": "Database",
|
|
1048
|
+
"status": "BLOCKER",
|
|
1049
|
+
"message": f"Unknown database type: {db_type}. Supported: {', '.join(supported_dbs)}",
|
|
1050
|
+
}
|
|
1051
|
+
)
|
|
1052
|
+
else:
|
|
1053
|
+
console.print(f"[green]✅ Database: {db_type} (supported)[/green]")
|
|
1054
|
+
|
|
1055
|
+
# Check Docker image exists (optional - requires docker)
|
|
1056
|
+
docker_image = report.get("docker_image", "")
|
|
1057
|
+
docker_tag = report.get("docker_tag", "latest")
|
|
1058
|
+
|
|
1059
|
+
if docker_image:
|
|
1060
|
+
full_image = f"{docker_image}:{docker_tag}"
|
|
1061
|
+
console.print(f"[cyan]Checking Docker image: {full_image}[/cyan]")
|
|
1062
|
+
|
|
1063
|
+
# Try to check if image exists (without pulling)
|
|
1064
|
+
try:
|
|
1065
|
+
result = subprocess.run(["docker", "manifest", "inspect", full_image], capture_output=True, timeout=30)
|
|
1066
|
+
if result.returncode == 0:
|
|
1067
|
+
console.print(f"[green]✅ Docker image exists: {full_image}[/green]")
|
|
1068
|
+
else:
|
|
1069
|
+
issues.append(
|
|
1070
|
+
{
|
|
1071
|
+
"check": "Docker Image",
|
|
1072
|
+
"status": "ERROR",
|
|
1073
|
+
"message": f"Image not found or not accessible: {full_image}",
|
|
1074
|
+
}
|
|
1075
|
+
)
|
|
1076
|
+
except FileNotFoundError:
|
|
1077
|
+
console.print("[yellow]⚠️ Docker not installed, skipping image check[/yellow]")
|
|
1078
|
+
except subprocess.TimeoutExpired:
|
|
1079
|
+
console.print("[yellow]⚠️ Docker check timed out, skipping[/yellow]")
|
|
1080
|
+
|
|
1081
|
+
# Check for known blockers
|
|
1082
|
+
blockers = report.get("blockers", [])
|
|
1083
|
+
for blocker in blockers:
|
|
1084
|
+
issues.append(
|
|
1085
|
+
{
|
|
1086
|
+
"check": "Blocker",
|
|
1087
|
+
"status": "BLOCKER",
|
|
1088
|
+
"message": blocker,
|
|
1089
|
+
}
|
|
1090
|
+
)
|
|
1091
|
+
|
|
1092
|
+
# Report results
|
|
1093
|
+
blockers_found = [i for i in issues if i["status"] == "BLOCKER"]
|
|
1094
|
+
errors_found = [i for i in issues if i["status"] == "ERROR"]
|
|
1095
|
+
|
|
1096
|
+
if blockers_found:
|
|
1097
|
+
console.print("\n[red]❌ Validation failed - BLOCKERS found[/red]\n")
|
|
1098
|
+
for issue in blockers_found:
|
|
1099
|
+
console.print(f"[red] {issue['check']}: {issue['message']}[/red]")
|
|
1100
|
+
console.print("\n[yellow]This application cannot become a Plato simulator.[/yellow]")
|
|
1101
|
+
raise typer.Exit(1)
|
|
1102
|
+
|
|
1103
|
+
if errors_found:
|
|
1104
|
+
console.print("\n[red]❌ Validation failed[/red]\n")
|
|
1105
|
+
for issue in errors_found:
|
|
1106
|
+
console.print(f"[red] {issue['check']}: {issue['message']}[/red]")
|
|
1107
|
+
console.print("\n[yellow]Fix the issues above and re-run validation.[/yellow]")
|
|
1108
|
+
raise typer.Exit(1)
|
|
1109
|
+
|
|
1110
|
+
console.print("\n[green]✅ Validation passed[/green]")
|
|
1111
|
+
console.print("[green]App can become a Plato simulator.[/green]")
|
|
1112
|
+
console.print("\n[green]Ready for next step: plato sandbox verify config[/green]")
|
|
1113
|
+
|
|
1114
|
+
|
|
1115
|
+
@sandbox_verify_app.command(name="config")
|
|
1116
|
+
def verify_config(
|
|
1117
|
+
config_path: str = typer.Option("plato-config.yml", "--config", "-c", help="Path to plato-config.yml"),
|
|
1118
|
+
compose_path: str = typer.Option("base/docker-compose.yml", "--compose", help="Path to docker-compose.yml"),
|
|
1119
|
+
):
|
|
1120
|
+
"""
|
|
1121
|
+
Verify simulator configuration files.
|
|
1122
|
+
|
|
1123
|
+
Checks plato-config.yml:
|
|
1124
|
+
- Valid YAML syntax
|
|
1125
|
+
- Required fields present (service, datasets, metadata, listeners)
|
|
1126
|
+
- Correct Plato database images used
|
|
1127
|
+
|
|
1128
|
+
Checks docker-compose.yml:
|
|
1129
|
+
- Valid YAML syntax
|
|
1130
|
+
- network_mode: host on all services
|
|
1131
|
+
- db_signals volume mounted
|
|
1132
|
+
- Signal-based healthchecks for database
|
|
1133
|
+
"""
|
|
1134
|
+
console.print("\n[cyan]Verifying configuration files...[/cyan]\n")
|
|
1135
|
+
|
|
1136
|
+
issues = []
|
|
1137
|
+
|
|
1138
|
+
# ==========================================================================
|
|
1139
|
+
# Check plato-config.yml
|
|
1140
|
+
# ==========================================================================
|
|
1141
|
+
|
|
1142
|
+
if not Path(config_path).exists():
|
|
1143
|
+
console.print(f"[red]❌ plato-config.yml not found: {config_path}[/red]")
|
|
1144
|
+
console.print("[yellow]Run sim-config skill to create it.[/yellow]")
|
|
1145
|
+
raise typer.Exit(1)
|
|
1146
|
+
|
|
1147
|
+
try:
|
|
1148
|
+
with open(config_path) as f:
|
|
1149
|
+
config = yaml.safe_load(f)
|
|
1150
|
+
except yaml.YAMLError as e:
|
|
1151
|
+
console.print(f"[red]❌ Invalid YAML in plato-config.yml: {e}[/red]")
|
|
1152
|
+
raise typer.Exit(1)
|
|
1153
|
+
|
|
1154
|
+
console.print("[green]✅ plato-config.yml: Valid YAML[/green]")
|
|
1155
|
+
|
|
1156
|
+
# Check required fields
|
|
1157
|
+
required_config_fields = ["service", "datasets"]
|
|
1158
|
+
for field in required_config_fields:
|
|
1159
|
+
if field not in config:
|
|
1160
|
+
issues.append(f"plato-config.yml: Missing required field '{field}'")
|
|
1161
|
+
|
|
1162
|
+
# Check datasets.base structure
|
|
1163
|
+
if "datasets" in config and "base" in config.get("datasets", {}):
|
|
1164
|
+
base = config["datasets"]["base"]
|
|
1165
|
+
|
|
1166
|
+
# Check metadata
|
|
1167
|
+
if "metadata" not in base:
|
|
1168
|
+
issues.append("plato-config.yml: Missing datasets.base.metadata")
|
|
1169
|
+
else:
|
|
1170
|
+
metadata = base["metadata"]
|
|
1171
|
+
required_metadata = ["name", "description", "flows_path"]
|
|
1172
|
+
for field in required_metadata:
|
|
1173
|
+
if field not in metadata:
|
|
1174
|
+
issues.append(f"plato-config.yml: Missing metadata.{field}")
|
|
1175
|
+
|
|
1176
|
+
# Check listeners
|
|
1177
|
+
if "listeners" not in base:
|
|
1178
|
+
issues.append("plato-config.yml: Missing datasets.base.listeners")
|
|
1179
|
+
else:
|
|
1180
|
+
listeners = base["listeners"]
|
|
1181
|
+
if "db" not in listeners:
|
|
1182
|
+
issues.append("plato-config.yml: Missing listeners.db")
|
|
1183
|
+
else:
|
|
1184
|
+
db = listeners["db"]
|
|
1185
|
+
required_db = ["db_type", "db_host", "db_port", "db_user", "db_password", "db_database"]
|
|
1186
|
+
for field in required_db:
|
|
1187
|
+
if field not in db:
|
|
1188
|
+
issues.append(f"plato-config.yml: Missing listeners.db.{field}")
|
|
1189
|
+
|
|
1190
|
+
# Check db_host is 127.0.0.1
|
|
1191
|
+
if db.get("db_host") and db["db_host"] != "127.0.0.1":
|
|
1192
|
+
issues.append(f"plato-config.yml: db_host should be '127.0.0.1', not '{db['db_host']}'")
|
|
1193
|
+
|
|
1194
|
+
# ==========================================================================
|
|
1195
|
+
# Check docker-compose.yml
|
|
1196
|
+
# ==========================================================================
|
|
1197
|
+
|
|
1198
|
+
if not Path(compose_path).exists():
|
|
1199
|
+
console.print(f"[red]❌ docker-compose.yml not found: {compose_path}[/red]")
|
|
1200
|
+
console.print("[yellow]Run sim-config skill to create it.[/yellow]")
|
|
1201
|
+
raise typer.Exit(1)
|
|
1202
|
+
|
|
1203
|
+
try:
|
|
1204
|
+
with open(compose_path) as f:
|
|
1205
|
+
compose = yaml.safe_load(f)
|
|
1206
|
+
except yaml.YAMLError as e:
|
|
1207
|
+
console.print(f"[red]❌ Invalid YAML in docker-compose.yml: {e}[/red]")
|
|
1208
|
+
raise typer.Exit(1)
|
|
1209
|
+
|
|
1210
|
+
console.print("[green]✅ docker-compose.yml: Valid YAML[/green]")
|
|
1211
|
+
|
|
1212
|
+
services = compose.get("services", {})
|
|
1213
|
+
|
|
1214
|
+
# Standard images that should NOT be used (should use Plato DB images instead)
|
|
1215
|
+
standard_db_images = ["postgres:", "mysql:", "mariadb:", "mongo:"]
|
|
1216
|
+
|
|
1217
|
+
for svc_name, svc_config in services.items():
|
|
1218
|
+
# Check network_mode
|
|
1219
|
+
if svc_config.get("network_mode") != "host":
|
|
1220
|
+
issues.append(f"docker-compose.yml: Service '{svc_name}' missing 'network_mode: host'")
|
|
1221
|
+
|
|
1222
|
+
# Check for standard database images
|
|
1223
|
+
image = svc_config.get("image", "")
|
|
1224
|
+
for std_img in standard_db_images:
|
|
1225
|
+
if image.startswith(std_img):
|
|
1226
|
+
# Suggest the correct Plato image
|
|
1227
|
+
db_type = std_img.rstrip(":")
|
|
1228
|
+
version = image.split(":")[1] if ":" in image else "latest"
|
|
1229
|
+
issues.append(
|
|
1230
|
+
f"docker-compose.yml: Service '{svc_name}' uses standard image '{image}'\n"
|
|
1231
|
+
f" Fix: Use 'public.ecr.aws/i3q4i1d7/app-sim/{db_type}-{version}:prod-latest'"
|
|
1232
|
+
)
|
|
1233
|
+
|
|
1234
|
+
# Check db_signals volume for database containers
|
|
1235
|
+
if any(img in image for img in ["postgres", "mysql", "mariadb"]):
|
|
1236
|
+
volumes = svc_config.get("volumes", [])
|
|
1237
|
+
has_db_signals = any("/home/plato/db_signals:" in str(v) for v in volumes)
|
|
1238
|
+
if not has_db_signals:
|
|
1239
|
+
signal_path = "/tmp/postgres-signals" if "postgres" in image else "/tmp/mysql-signals"
|
|
1240
|
+
issues.append(
|
|
1241
|
+
f"docker-compose.yml: Service '{svc_name}' missing db_signals volume\n"
|
|
1242
|
+
f" Fix: Add '/home/plato/db_signals:{signal_path}' to volumes"
|
|
1243
|
+
)
|
|
1244
|
+
|
|
1245
|
+
# Check healthcheck uses signal-based
|
|
1246
|
+
healthcheck = svc_config.get("healthcheck", {})
|
|
1247
|
+
test = healthcheck.get("test", [])
|
|
1248
|
+
test_str = " ".join(test) if isinstance(test, list) else str(test)
|
|
1249
|
+
if "pg_isready" in test_str or "mysqladmin ping" in test_str:
|
|
1250
|
+
signal_file = "postgres.healthy" if "postgres" in image else "mysql.healthy"
|
|
1251
|
+
issues.append(
|
|
1252
|
+
f"docker-compose.yml: Service '{svc_name}' uses standard healthcheck\n"
|
|
1253
|
+
f" Fix: Use 'test -f /tmp/*-signals/{signal_file}'"
|
|
1254
|
+
)
|
|
1255
|
+
|
|
1256
|
+
# ==========================================================================
|
|
1257
|
+
# Report results
|
|
1258
|
+
# ==========================================================================
|
|
1259
|
+
|
|
1260
|
+
if issues:
|
|
1261
|
+
console.print("\n[red]❌ Config verification failed[/red]\n")
|
|
1262
|
+
console.print("[red]Issues found:[/red]")
|
|
1263
|
+
for issue in issues:
|
|
1264
|
+
console.print(f" - {issue}")
|
|
1265
|
+
raise typer.Exit(1)
|
|
1266
|
+
|
|
1267
|
+
console.print("\n[green]✅ Config verification passed[/green]")
|
|
1268
|
+
console.print("[green]All configuration files are valid.[/green]")
|
|
1269
|
+
console.print("\n[green]Ready for next step: plato sandbox start[/green]")
|