plato-sdk-v2 2.3.0__py3-none-any.whl → 2.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- plato/agents/__init__.py +25 -13
- plato/agents/artifacts.py +108 -0
- plato/agents/config.py +16 -13
- plato/agents/otel.py +261 -0
- plato/agents/runner.py +226 -122
- plato/chronos/models/__init__.py +9 -1
- plato/v1/cli/chronos.py +788 -0
- plato/v1/cli/main.py +2 -2
- plato/v1/cli/pm.py +3 -3
- plato/v1/cli/sandbox.py +246 -52
- plato/v1/cli/ssh.py +28 -9
- plato/v1/cli/templates/world-runner.Dockerfile +27 -0
- plato/v1/cli/utils.py +32 -12
- plato/v1/cli/verify.py +243 -827
- plato/worlds/README.md +2 -1
- plato/worlds/__init__.py +3 -1
- plato/worlds/base.py +462 -67
- plato/worlds/config.py +42 -3
- plato/worlds/runner.py +1 -339
- {plato_sdk_v2-2.3.0.dist-info → plato_sdk_v2-2.4.1.dist-info}/METADATA +4 -1
- {plato_sdk_v2-2.3.0.dist-info → plato_sdk_v2-2.4.1.dist-info}/RECORD +23 -27
- plato/agents/logging.py +0 -401
- plato/chronos/api/callback/__init__.py +0 -11
- plato/chronos/api/callback/push_agent_logs.py +0 -61
- plato/chronos/api/callback/update_agent_status.py +0 -57
- plato/chronos/api/callback/upload_artifacts.py +0 -59
- plato/chronos/api/callback/upload_logs_zip.py +0 -57
- plato/chronos/api/callback/upload_trajectory.py +0 -57
- plato/v1/cli/sim.py +0 -11
- {plato_sdk_v2-2.3.0.dist-info → plato_sdk_v2-2.4.1.dist-info}/WHEEL +0 -0
- {plato_sdk_v2-2.3.0.dist-info → plato_sdk_v2-2.4.1.dist-info}/entry_points.txt +0 -0
plato/v1/cli/main.py
CHANGED
|
@@ -9,9 +9,9 @@ import typer
|
|
|
9
9
|
from dotenv import load_dotenv
|
|
10
10
|
|
|
11
11
|
from plato.v1.cli.agent import agent_app
|
|
12
|
+
from plato.v1.cli.chronos import chronos_app
|
|
12
13
|
from plato.v1.cli.pm import pm_app
|
|
13
14
|
from plato.v1.cli.sandbox import sandbox_app
|
|
14
|
-
from plato.v1.cli.sim import sim_app
|
|
15
15
|
from plato.v1.cli.utils import console
|
|
16
16
|
from plato.v1.cli.world import world_app
|
|
17
17
|
|
|
@@ -70,9 +70,9 @@ app = typer.Typer(help="[bold blue]Plato CLI[/bold blue] - Manage Plato environm
|
|
|
70
70
|
# Register sub-apps
|
|
71
71
|
app.add_typer(sandbox_app, name="sandbox")
|
|
72
72
|
app.add_typer(pm_app, name="pm")
|
|
73
|
-
app.add_typer(sim_app, name="sim")
|
|
74
73
|
app.add_typer(agent_app, name="agent")
|
|
75
74
|
app.add_typer(world_app, name="world")
|
|
75
|
+
app.add_typer(chronos_app, name="chronos")
|
|
76
76
|
|
|
77
77
|
|
|
78
78
|
# =============================================================================
|
plato/v1/cli/pm.py
CHANGED
|
@@ -753,16 +753,16 @@ def review_data(
|
|
|
753
753
|
is_installed = "site-packages" in str(package_dir)
|
|
754
754
|
|
|
755
755
|
if is_installed:
|
|
756
|
-
extension_source_path = package_dir / "extensions" / "envgen-recorder"
|
|
756
|
+
extension_source_path = package_dir / "extensions" / "envgen-recorder-old"
|
|
757
757
|
else:
|
|
758
758
|
repo_root = package_dir.parent.parent.parent # plato-client/
|
|
759
|
-
extension_source_path = repo_root / "extensions" / "envgen-recorder"
|
|
759
|
+
extension_source_path = repo_root / "extensions" / "envgen-recorder-old"
|
|
760
760
|
|
|
761
761
|
# Fallback to env var
|
|
762
762
|
if not extension_source_path.exists():
|
|
763
763
|
plato_client_dir_env = os.getenv("PLATO_CLIENT_DIR")
|
|
764
764
|
if plato_client_dir_env:
|
|
765
|
-
env_path = Path(plato_client_dir_env) / "extensions" / "envgen-recorder"
|
|
765
|
+
env_path = Path(plato_client_dir_env) / "extensions" / "envgen-recorder-old"
|
|
766
766
|
if env_path.exists():
|
|
767
767
|
extension_source_path = env_path
|
|
768
768
|
|
plato/v1/cli/sandbox.py
CHANGED
|
@@ -131,6 +131,9 @@ def sandbox_start(
|
|
|
131
131
|
timeout: int = typer.Option(1800, "--timeout", help="VM lifetime in seconds (default: 30 minutes)"),
|
|
132
132
|
no_reset: bool = typer.Option(False, "--no-reset", help="Skip initial reset after ready"),
|
|
133
133
|
json_output: bool = typer.Option(False, "--json", "-j", help="Output as JSON"),
|
|
134
|
+
working_dir: Path = typer.Option(
|
|
135
|
+
None, "--working-dir", "-w", help="Working directory for .sandbox.yaml and .plato/"
|
|
136
|
+
),
|
|
134
137
|
):
|
|
135
138
|
"""
|
|
136
139
|
Start a sandbox environment.
|
|
@@ -377,7 +380,7 @@ def sandbox_start(
|
|
|
377
380
|
console.print("[cyan] Generating SSH key pair...[/cyan]")
|
|
378
381
|
|
|
379
382
|
base_url = os.getenv("PLATO_BASE_URL", "https://plato.so")
|
|
380
|
-
ssh_info = setup_ssh_for_sandbox(base_url, job_id, username=ssh_username)
|
|
383
|
+
ssh_info = setup_ssh_for_sandbox(base_url, job_id, username=ssh_username, working_dir=working_dir)
|
|
381
384
|
ssh_host = ssh_info["ssh_host"]
|
|
382
385
|
ssh_config_path = ssh_info["config_path"]
|
|
383
386
|
ssh_private_key_path = ssh_info["private_key_path"]
|
|
@@ -489,7 +492,7 @@ def sandbox_start(
|
|
|
489
492
|
# Add heartbeat PID
|
|
490
493
|
if heartbeat_pid:
|
|
491
494
|
state["heartbeat_pid"] = heartbeat_pid
|
|
492
|
-
save_sandbox_state(state)
|
|
495
|
+
save_sandbox_state(state, working_dir)
|
|
493
496
|
|
|
494
497
|
# Close the plato client (heartbeat process keeps session alive)
|
|
495
498
|
plato.close()
|
|
@@ -516,7 +519,6 @@ def sandbox_start(
|
|
|
516
519
|
console.print(f" [cyan]Public URL:[/cyan] {display_url}")
|
|
517
520
|
if ssh_host and ssh_config_path:
|
|
518
521
|
console.print(f" [cyan]SSH:[/cyan] ssh -F {ssh_config_path} {ssh_host}")
|
|
519
|
-
console.print(" [cyan]Docker:[/cyan] export DOCKER_HOST=unix:///var/run/docker-user.sock")
|
|
520
522
|
console.print(f"\n[dim]State saved to {SANDBOX_FILE}[/dim]")
|
|
521
523
|
|
|
522
524
|
except Exception as e:
|
|
@@ -1642,6 +1644,8 @@ def sandbox_state_cmd(
|
|
|
1642
1644
|
return True, False, None
|
|
1643
1645
|
return False, False, None
|
|
1644
1646
|
|
|
1647
|
+
all_mutations = []
|
|
1648
|
+
|
|
1645
1649
|
if session_id:
|
|
1646
1650
|
if not json_output:
|
|
1647
1651
|
console.print(f"[cyan]Getting state for session: {session_id}[/cyan]")
|
|
@@ -1649,6 +1653,7 @@ def sandbox_state_cmd(
|
|
|
1649
1653
|
response = sessions_state.sync(
|
|
1650
1654
|
client=client,
|
|
1651
1655
|
session_id=session_id,
|
|
1656
|
+
merge_mutations=True,
|
|
1652
1657
|
x_api_key=api_key,
|
|
1653
1658
|
)
|
|
1654
1659
|
if response and response.results:
|
|
@@ -1662,6 +1667,13 @@ def sandbox_state_cmd(
|
|
|
1662
1667
|
has_error = has_error or e
|
|
1663
1668
|
if msg:
|
|
1664
1669
|
error_message = msg
|
|
1670
|
+
# Extract mutations from state
|
|
1671
|
+
if isinstance(result, dict) and "state" in result:
|
|
1672
|
+
state_data = result.get("state", {})
|
|
1673
|
+
if isinstance(state_data, dict):
|
|
1674
|
+
mutations = state_data.get("mutations", [])
|
|
1675
|
+
if mutations:
|
|
1676
|
+
all_mutations.extend(mutations)
|
|
1665
1677
|
elif job_id:
|
|
1666
1678
|
if not json_output:
|
|
1667
1679
|
console.print(f"[cyan]Getting state for job: {job_id}[/cyan]")
|
|
@@ -1677,6 +1689,13 @@ def sandbox_state_cmd(
|
|
|
1677
1689
|
has_mutations = m
|
|
1678
1690
|
has_error = e
|
|
1679
1691
|
error_message = msg
|
|
1692
|
+
# Extract mutations from state
|
|
1693
|
+
if isinstance(state_dict, dict) and "state" in state_dict:
|
|
1694
|
+
state_data = state_dict.get("state", {})
|
|
1695
|
+
if isinstance(state_data, dict):
|
|
1696
|
+
mutations = state_data.get("mutations", [])
|
|
1697
|
+
if mutations:
|
|
1698
|
+
all_mutations.extend(mutations)
|
|
1680
1699
|
elif job_group_id:
|
|
1681
1700
|
if not json_output:
|
|
1682
1701
|
console.print(f"[cyan]Getting state for job_group: {job_group_id}[/cyan]")
|
|
@@ -1684,6 +1703,7 @@ def sandbox_state_cmd(
|
|
|
1684
1703
|
response = sessions_state.sync(
|
|
1685
1704
|
client=client,
|
|
1686
1705
|
session_id=job_group_id,
|
|
1706
|
+
merge_mutations=True,
|
|
1687
1707
|
x_api_key=api_key,
|
|
1688
1708
|
)
|
|
1689
1709
|
if response and response.results:
|
|
@@ -1697,6 +1717,13 @@ def sandbox_state_cmd(
|
|
|
1697
1717
|
has_error = has_error or e
|
|
1698
1718
|
if msg:
|
|
1699
1719
|
error_message = msg
|
|
1720
|
+
# Extract mutations from state
|
|
1721
|
+
if isinstance(result, dict) and "state" in result:
|
|
1722
|
+
state_data = result.get("state", {})
|
|
1723
|
+
if isinstance(state_data, dict):
|
|
1724
|
+
mutations = state_data.get("mutations", [])
|
|
1725
|
+
if mutations:
|
|
1726
|
+
all_mutations.extend(mutations)
|
|
1700
1727
|
else:
|
|
1701
1728
|
console.print("[red]❌ .sandbox.yaml missing session_id, job_id, or job_group_id[/red]")
|
|
1702
1729
|
raise typer.Exit(1)
|
|
@@ -1717,6 +1744,26 @@ def sandbox_state_cmd(
|
|
|
1717
1744
|
elif state_dict:
|
|
1718
1745
|
console.print("\n[bold]Environment State:[/bold]")
|
|
1719
1746
|
console.print(json.dumps(state_dict, indent=2, default=str))
|
|
1747
|
+
|
|
1748
|
+
# Display mutations if any
|
|
1749
|
+
if all_mutations:
|
|
1750
|
+
console.print(f"\n[bold red]Mutations ({len(all_mutations)}):[/bold red]")
|
|
1751
|
+
# Group by table and action for summary
|
|
1752
|
+
from collections import defaultdict
|
|
1753
|
+
|
|
1754
|
+
table_ops: dict[str, dict[str, int]] = defaultdict(lambda: {"INSERT": 0, "UPDATE": 0, "DELETE": 0})
|
|
1755
|
+
for mutation in all_mutations:
|
|
1756
|
+
table = mutation.get("table_name", mutation.get("table", "unknown"))
|
|
1757
|
+
op = mutation.get("action", mutation.get("operation", "UNKNOWN")).upper()
|
|
1758
|
+
if op in table_ops[table]:
|
|
1759
|
+
table_ops[table][op] += 1
|
|
1760
|
+
|
|
1761
|
+
console.print("\n [dim]Table INSERT UPDATE DELETE[/dim]")
|
|
1762
|
+
console.print(" [dim]───────────────────────────────────────────────────────[/dim]")
|
|
1763
|
+
for table, ops in sorted(table_ops.items(), key=lambda x: sum(x[1].values()), reverse=True):
|
|
1764
|
+
console.print(f" {table:<30} {ops['INSERT']:>6} {ops['UPDATE']:>6} {ops['DELETE']:>6}")
|
|
1765
|
+
else:
|
|
1766
|
+
console.print("\n[green]No mutations recorded[/green]")
|
|
1720
1767
|
else:
|
|
1721
1768
|
console.print("[yellow]No state returned[/yellow]")
|
|
1722
1769
|
|
|
@@ -1737,6 +1784,152 @@ def sandbox_state_cmd(
|
|
|
1737
1784
|
raise typer.Exit(1)
|
|
1738
1785
|
|
|
1739
1786
|
|
|
1787
|
+
@sandbox_app.command(name="clear-audit")
|
|
1788
|
+
def sandbox_clear_audit(
|
|
1789
|
+
config_path: Path | None = typer.Option(None, "--config-path", help="Path to plato-config.yml"),
|
|
1790
|
+
dataset: str = typer.Option("base", "--dataset", "-d", help="Dataset name"),
|
|
1791
|
+
json_output: bool = typer.Option(False, "--json", "-j", help="Output as JSON"),
|
|
1792
|
+
):
|
|
1793
|
+
"""
|
|
1794
|
+
Clear the audit_log table(s) in the sandbox database.
|
|
1795
|
+
|
|
1796
|
+
Truncates all audit_log tables to reset mutation tracking. Use this after
|
|
1797
|
+
initial setup/login to clear any mutations before running a clean login flow.
|
|
1798
|
+
|
|
1799
|
+
REQUIRES:
|
|
1800
|
+
|
|
1801
|
+
.sandbox.yaml in current directory (created by 'plato sandbox start')
|
|
1802
|
+
plato-config.yml with database listener config
|
|
1803
|
+
|
|
1804
|
+
USAGE:
|
|
1805
|
+
|
|
1806
|
+
plato sandbox clear-audit # Uses plato-config.yml in cwd
|
|
1807
|
+
plato sandbox clear-audit -d base # Specify dataset
|
|
1808
|
+
plato sandbox clear-audit --json # JSON output
|
|
1809
|
+
|
|
1810
|
+
WORKFLOW POSITION:
|
|
1811
|
+
|
|
1812
|
+
1. plato sandbox start -c
|
|
1813
|
+
2. plato sandbox start-services
|
|
1814
|
+
3. plato sandbox start-worker --wait
|
|
1815
|
+
4. (agent does initial login/setup, generating mutations)
|
|
1816
|
+
5. plato sandbox clear-audit ← you are here
|
|
1817
|
+
6. plato sandbox flow ← clean login flow
|
|
1818
|
+
7. plato sandbox state --verify-no-mutations ← should pass now
|
|
1819
|
+
8. plato sandbox snapshot
|
|
1820
|
+
"""
|
|
1821
|
+
state = require_sandbox_state()
|
|
1822
|
+
|
|
1823
|
+
# Get SSH info
|
|
1824
|
+
ssh_host = state.get("ssh_host")
|
|
1825
|
+
ssh_config_path = state.get("ssh_config_path")
|
|
1826
|
+
|
|
1827
|
+
if not ssh_host or not ssh_config_path:
|
|
1828
|
+
console.print("[red]❌ SSH not configured. Missing ssh_host or ssh_config_path in .sandbox.yaml[/red]")
|
|
1829
|
+
raise typer.Exit(1)
|
|
1830
|
+
|
|
1831
|
+
# Find plato-config.yml
|
|
1832
|
+
if not config_path:
|
|
1833
|
+
config_path = Path.cwd() / "plato-config.yml"
|
|
1834
|
+
if not config_path.exists():
|
|
1835
|
+
config_path = Path.cwd() / "plato-config.yaml"
|
|
1836
|
+
if not config_path.exists():
|
|
1837
|
+
console.print("[red]❌ plato-config.yml not found[/red]")
|
|
1838
|
+
raise typer.Exit(1)
|
|
1839
|
+
|
|
1840
|
+
with open(config_path) as f:
|
|
1841
|
+
plato_config = yaml.safe_load(f)
|
|
1842
|
+
|
|
1843
|
+
# Get dataset config
|
|
1844
|
+
datasets = plato_config.get("datasets", {})
|
|
1845
|
+
if dataset not in datasets:
|
|
1846
|
+
console.print(f"[red]❌ Dataset '{dataset}' not found[/red]")
|
|
1847
|
+
raise typer.Exit(1)
|
|
1848
|
+
|
|
1849
|
+
dataset_config = datasets[dataset]
|
|
1850
|
+
listeners = dataset_config.get("listeners", {})
|
|
1851
|
+
|
|
1852
|
+
# Find DB listeners
|
|
1853
|
+
db_listeners = []
|
|
1854
|
+
for name, listener in listeners.items():
|
|
1855
|
+
if isinstance(listener, dict) and listener.get("type") == "db":
|
|
1856
|
+
db_listeners.append((name, listener))
|
|
1857
|
+
|
|
1858
|
+
if not db_listeners:
|
|
1859
|
+
console.print("[red]❌ No database listeners found in plato-config.yml[/red]")
|
|
1860
|
+
console.print("[yellow]Expected: datasets.<dataset>.listeners.<name>.type = 'db'[/yellow]")
|
|
1861
|
+
raise typer.Exit(1)
|
|
1862
|
+
|
|
1863
|
+
results = []
|
|
1864
|
+
|
|
1865
|
+
for name, db_config in db_listeners:
|
|
1866
|
+
db_type = db_config.get("db_type", "postgresql").lower()
|
|
1867
|
+
db_user = db_config.get("db_user", "postgres" if db_type == "postgresql" else "root")
|
|
1868
|
+
db_password = db_config.get("db_password", "")
|
|
1869
|
+
db_database = db_config.get("db_database", "postgres")
|
|
1870
|
+
|
|
1871
|
+
if not json_output:
|
|
1872
|
+
console.print(f"[cyan]Clearing audit_log for listener '{name}' ({db_type})...[/cyan]")
|
|
1873
|
+
|
|
1874
|
+
# Build SQL command based on db_type
|
|
1875
|
+
# Use docker exec since psql/mysql aren't installed on the VM directly
|
|
1876
|
+
if db_type == "postgresql":
|
|
1877
|
+
# Find the postgres container and truncate all audit_log tables across all schemas
|
|
1878
|
+
# Use $body$ delimiter instead of $$ to avoid shell expansion
|
|
1879
|
+
truncate_sql = "DO \\$body\\$ DECLARE r RECORD; BEGIN FOR r IN SELECT schemaname FROM pg_tables WHERE tablename = 'audit_log' LOOP EXECUTE format('TRUNCATE TABLE %I.audit_log RESTART IDENTITY CASCADE', r.schemaname); END LOOP; END \\$body\\$;"
|
|
1880
|
+
sql_cmd = f"CONTAINER=$(docker ps --format '{{{{.Names}}}}\\t{{{{.Image}}}}' | grep -i postgres | head -1 | cut -f1) && docker exec $CONTAINER psql -U {db_user} -d {db_database} -c \"{truncate_sql}\""
|
|
1881
|
+
elif db_type in ("mysql", "mariadb"):
|
|
1882
|
+
# Find the mysql/mariadb container and exec into it
|
|
1883
|
+
# Use mariadb client (mysql is a symlink or may not exist in newer mariadb images)
|
|
1884
|
+
sql_cmd = f"CONTAINER=$(docker ps --format '{{{{.Names}}}}\\t{{{{.Image}}}}' | grep -iE 'mysql|mariadb' | head -1 | cut -f1) && docker exec $CONTAINER mariadb -u {db_user} -p'{db_password}' {db_database} -e 'SET FOREIGN_KEY_CHECKS=0; DELETE FROM audit_log; SET FOREIGN_KEY_CHECKS=1;'"
|
|
1885
|
+
else:
|
|
1886
|
+
if not json_output:
|
|
1887
|
+
console.print(f"[yellow]⚠ Unsupported db_type '{db_type}' for listener '{name}'[/yellow]")
|
|
1888
|
+
results.append({"listener": name, "success": False, "error": f"Unsupported db_type: {db_type}"})
|
|
1889
|
+
continue
|
|
1890
|
+
|
|
1891
|
+
# Run via SSH
|
|
1892
|
+
ret, stdout, stderr = _run_ssh_command(ssh_config_path, ssh_host, sql_cmd)
|
|
1893
|
+
|
|
1894
|
+
if ret == 0:
|
|
1895
|
+
if not json_output:
|
|
1896
|
+
console.print(f"[green]✅ Cleared audit_log for '{name}'[/green]")
|
|
1897
|
+
results.append({"listener": name, "success": True})
|
|
1898
|
+
else:
|
|
1899
|
+
if not json_output:
|
|
1900
|
+
console.print(f"[red]❌ Failed to clear audit_log for '{name}': {stderr}[/red]")
|
|
1901
|
+
results.append({"listener": name, "success": False, "error": stderr})
|
|
1902
|
+
|
|
1903
|
+
# Call state API to refresh in-memory mutation cache
|
|
1904
|
+
session_id = state.get("session_id")
|
|
1905
|
+
api_key = require_api_key()
|
|
1906
|
+
if session_id:
|
|
1907
|
+
if not json_output:
|
|
1908
|
+
console.print("[dim]Refreshing state cache...[/dim]")
|
|
1909
|
+
try:
|
|
1910
|
+
with get_http_client() as client:
|
|
1911
|
+
sessions_state.sync(
|
|
1912
|
+
client=client,
|
|
1913
|
+
session_id=session_id,
|
|
1914
|
+
x_api_key=api_key,
|
|
1915
|
+
)
|
|
1916
|
+
except Exception as e:
|
|
1917
|
+
if not json_output:
|
|
1918
|
+
console.print(f"[yellow]⚠ Failed to refresh state cache: {e}[/yellow]")
|
|
1919
|
+
|
|
1920
|
+
if json_output:
|
|
1921
|
+
console.print(json.dumps({"results": results}))
|
|
1922
|
+
else:
|
|
1923
|
+
# Summary
|
|
1924
|
+
success_count = sum(1 for r in results if r["success"])
|
|
1925
|
+
total = len(results)
|
|
1926
|
+
if success_count == total:
|
|
1927
|
+
console.print(f"\n[green]✅ All {total} audit logs cleared successfully[/green]")
|
|
1928
|
+
else:
|
|
1929
|
+
console.print(f"\n[yellow]⚠ {success_count}/{total} audit logs cleared[/yellow]")
|
|
1930
|
+
raise typer.Exit(1)
|
|
1931
|
+
|
|
1932
|
+
|
|
1740
1933
|
@sandbox_app.command(name="audit-ui")
|
|
1741
1934
|
def sandbox_audit_ui():
|
|
1742
1935
|
"""
|
|
@@ -1786,31 +1979,24 @@ def sandbox_audit_ui():
|
|
|
1786
1979
|
|
|
1787
1980
|
|
|
1788
1981
|
def _copy_files_respecting_gitignore(src_dir: Path, dst_dir: Path) -> None:
|
|
1789
|
-
"""Copy files from src to dst
|
|
1790
|
-
|
|
1791
|
-
|
|
1792
|
-
|
|
1793
|
-
|
|
1794
|
-
|
|
1795
|
-
|
|
1796
|
-
|
|
1797
|
-
def
|
|
1798
|
-
"""Check if
|
|
1799
|
-
|
|
1800
|
-
# Skip .git
|
|
1801
|
-
if
|
|
1802
|
-
return
|
|
1803
|
-
#
|
|
1804
|
-
|
|
1805
|
-
result = subprocess.run(
|
|
1806
|
-
["git", "check-ignore", "-q", str(file_path)],
|
|
1807
|
-
cwd=src_dir,
|
|
1808
|
-
capture_output=True,
|
|
1809
|
-
)
|
|
1810
|
-
# git check-ignore returns 0 if path IS ignored, 1 if NOT ignored
|
|
1811
|
-
return result.returncode != 0
|
|
1812
|
-
except Exception:
|
|
1982
|
+
"""Copy files from src to dst, skipping .git/ and .plato-hub.json.
|
|
1983
|
+
|
|
1984
|
+
Note: This function intentionally does NOT respect .gitignore because
|
|
1985
|
+
start-services needs to copy all workspace files to the VM, including
|
|
1986
|
+
config files that might be gitignored locally (like docker-compose.yml
|
|
1987
|
+
in a 'base/' directory).
|
|
1988
|
+
"""
|
|
1989
|
+
|
|
1990
|
+
def should_skip(rel_path: Path) -> bool:
|
|
1991
|
+
"""Check if path should be skipped."""
|
|
1992
|
+
parts = rel_path.parts
|
|
1993
|
+
# Skip anything inside .git/ directory
|
|
1994
|
+
if ".git" in parts:
|
|
1995
|
+
return True
|
|
1996
|
+
# Skip .plato-hub.json
|
|
1997
|
+
if rel_path.name == ".plato-hub.json":
|
|
1813
1998
|
return True
|
|
1999
|
+
return False
|
|
1814
2000
|
|
|
1815
2001
|
# Walk through source directory
|
|
1816
2002
|
for src_path in src_dir.rglob("*"):
|
|
@@ -1820,8 +2006,8 @@ def _copy_files_respecting_gitignore(src_dir: Path, dst_dir: Path) -> None:
|
|
|
1820
2006
|
if str(rel_path) == ".":
|
|
1821
2007
|
continue
|
|
1822
2008
|
|
|
1823
|
-
# Check if should
|
|
1824
|
-
if
|
|
2009
|
+
# Check if should skip
|
|
2010
|
+
if should_skip(rel_path):
|
|
1825
2011
|
continue
|
|
1826
2012
|
|
|
1827
2013
|
dst_path = dst_dir / rel_path
|
|
@@ -2007,6 +2193,34 @@ def sandbox_start_services(
|
|
|
2007
2193
|
|
|
2008
2194
|
try:
|
|
2009
2195
|
with get_http_client() as client:
|
|
2196
|
+
|
|
2197
|
+
def start_services_on_vm(repo_dir: str) -> list[dict[str, str]]:
|
|
2198
|
+
"""Start docker compose services on the VM."""
|
|
2199
|
+
services_started: list[dict[str, str]] = []
|
|
2200
|
+
for svc_name, svc_config in services_config.items():
|
|
2201
|
+
svc_type = svc_config.get("type", "")
|
|
2202
|
+
if svc_type == "docker-compose":
|
|
2203
|
+
compose_file = svc_config.get("file", "docker-compose.yml")
|
|
2204
|
+
compose_cmd = f"cd {repo_dir} && docker compose -f {compose_file} up -d"
|
|
2205
|
+
|
|
2206
|
+
if not json_output:
|
|
2207
|
+
console.print(f"[cyan] Starting docker compose service: {svc_name}...[/cyan]")
|
|
2208
|
+
|
|
2209
|
+
ret, stdout, stderr = _run_ssh_command(ssh_config_path, ssh_host, compose_cmd)
|
|
2210
|
+
if ret != 0:
|
|
2211
|
+
console.print(f"[red]❌ Failed to start service '{svc_name}': {stderr}[/red]")
|
|
2212
|
+
raise typer.Exit(1)
|
|
2213
|
+
|
|
2214
|
+
services_started.append({"name": svc_name, "type": "docker-compose", "file": compose_file})
|
|
2215
|
+
if not json_output:
|
|
2216
|
+
console.print(f"[green] ✓ Started docker compose service: {svc_name}[/green]")
|
|
2217
|
+
else:
|
|
2218
|
+
if not json_output:
|
|
2219
|
+
console.print(
|
|
2220
|
+
f"[yellow] ⚠ Skipped service '{svc_name}' (unknown type: {svc_type})[/yellow]"
|
|
2221
|
+
)
|
|
2222
|
+
return services_started
|
|
2223
|
+
|
|
2010
2224
|
# Step 1: Get Gitea credentials
|
|
2011
2225
|
if not json_output:
|
|
2012
2226
|
console.print("[cyan]Step 1: Getting Gitea credentials...[/cyan]")
|
|
@@ -2064,6 +2278,8 @@ def sandbox_start_services(
|
|
|
2064
2278
|
if not json_output:
|
|
2065
2279
|
console.print("[cyan]Step 4: Pushing code to hub...[/cyan]")
|
|
2066
2280
|
|
|
2281
|
+
repo_dir = f"/home/plato/worktree/{service_name}"
|
|
2282
|
+
|
|
2067
2283
|
with tempfile.TemporaryDirectory(prefix="plato-hub-") as temp_dir:
|
|
2068
2284
|
temp_repo = Path(temp_dir) / "repo"
|
|
2069
2285
|
|
|
@@ -2148,8 +2364,6 @@ def sandbox_start_services(
|
|
|
2148
2364
|
if not json_output:
|
|
2149
2365
|
console.print("[cyan]Step 5: Cloning repo on VM...[/cyan]")
|
|
2150
2366
|
|
|
2151
|
-
repo_dir = f"/home/plato/worktree/{service_name}"
|
|
2152
|
-
|
|
2153
2367
|
# Create worktree directory
|
|
2154
2368
|
_run_ssh_command(ssh_config_path, ssh_host, "mkdir -p /home/plato/worktree")
|
|
2155
2369
|
|
|
@@ -2170,27 +2384,7 @@ def sandbox_start_services(
|
|
|
2170
2384
|
if not json_output:
|
|
2171
2385
|
console.print("[cyan]Step 6: Starting services...[/cyan]")
|
|
2172
2386
|
|
|
2173
|
-
services_started =
|
|
2174
|
-
for svc_name, svc_config in services_config.items():
|
|
2175
|
-
svc_type = svc_config.get("type", "")
|
|
2176
|
-
if svc_type == "docker-compose":
|
|
2177
|
-
compose_file = svc_config.get("file", "docker-compose.yml")
|
|
2178
|
-
compose_cmd = f"cd {repo_dir} && DOCKER_HOST=unix:///var/run/docker-user.sock docker compose -f {compose_file} up -d"
|
|
2179
|
-
|
|
2180
|
-
if not json_output:
|
|
2181
|
-
console.print(f"[cyan] Starting docker compose service: {svc_name}...[/cyan]")
|
|
2182
|
-
|
|
2183
|
-
ret, stdout, stderr = _run_ssh_command(ssh_config_path, ssh_host, compose_cmd)
|
|
2184
|
-
if ret != 0:
|
|
2185
|
-
console.print(f"[red]❌ Failed to start service '{svc_name}': {stderr}[/red]")
|
|
2186
|
-
raise typer.Exit(1)
|
|
2187
|
-
|
|
2188
|
-
services_started.append({"name": svc_name, "type": "docker-compose", "file": compose_file})
|
|
2189
|
-
if not json_output:
|
|
2190
|
-
console.print(f"[green] ✓ Started docker compose service: {svc_name}[/green]")
|
|
2191
|
-
else:
|
|
2192
|
-
if not json_output:
|
|
2193
|
-
console.print(f"[yellow] ⚠ Skipped service '{svc_name}' (unknown type: {svc_type})[/yellow]")
|
|
2387
|
+
services_started = start_services_on_vm(repo_dir)
|
|
2194
2388
|
|
|
2195
2389
|
# Output results
|
|
2196
2390
|
if json_output:
|
plato/v1/cli/ssh.py
CHANGED
|
@@ -9,9 +9,21 @@ from cryptography.hazmat.primitives import serialization
|
|
|
9
9
|
from cryptography.hazmat.primitives.asymmetric import ed25519
|
|
10
10
|
|
|
11
11
|
|
|
12
|
-
def
|
|
13
|
-
"""
|
|
14
|
-
|
|
12
|
+
def get_plato_dir(working_dir: Path | str | None = None) -> Path:
|
|
13
|
+
"""Get the directory for plato config/SSH files.
|
|
14
|
+
|
|
15
|
+
Args:
|
|
16
|
+
working_dir: If provided, returns working_dir/.plato (for container/agent use).
|
|
17
|
+
If None, returns ~/.plato (local development).
|
|
18
|
+
"""
|
|
19
|
+
if working_dir is not None:
|
|
20
|
+
return Path(working_dir) / ".plato"
|
|
21
|
+
return Path.home() / ".plato"
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def get_next_sandbox_number(working_dir: Path | str | None = None) -> int:
|
|
25
|
+
"""Find next available sandbox number by checking existing config files."""
|
|
26
|
+
plato_dir = get_plato_dir(working_dir)
|
|
15
27
|
if not plato_dir.exists():
|
|
16
28
|
return 1
|
|
17
29
|
|
|
@@ -29,13 +41,13 @@ def get_next_sandbox_number() -> int:
|
|
|
29
41
|
return max_num + 1
|
|
30
42
|
|
|
31
43
|
|
|
32
|
-
def generate_ssh_key_pair(sandbox_num: int) -> tuple[str, str]:
|
|
44
|
+
def generate_ssh_key_pair(sandbox_num: int, working_dir: Path | str | None = None) -> tuple[str, str]:
|
|
33
45
|
"""
|
|
34
46
|
Generate a new ed25519 SSH key pair for a specific sandbox.
|
|
35
47
|
|
|
36
48
|
Returns (public_key_str, private_key_path).
|
|
37
49
|
"""
|
|
38
|
-
plato_dir =
|
|
50
|
+
plato_dir = get_plato_dir(working_dir)
|
|
39
51
|
plato_dir.mkdir(mode=0o700, exist_ok=True)
|
|
40
52
|
|
|
41
53
|
private_key_path = plato_dir / f"ssh_{sandbox_num}_key"
|
|
@@ -124,6 +136,7 @@ def create_ssh_config(
|
|
|
124
136
|
username: str,
|
|
125
137
|
private_key_path: str,
|
|
126
138
|
sandbox_num: int,
|
|
139
|
+
working_dir: Path | str | None = None,
|
|
127
140
|
) -> str:
|
|
128
141
|
"""
|
|
129
142
|
Create a temporary SSH config file for a specific sandbox.
|
|
@@ -160,7 +173,7 @@ def create_ssh_config(
|
|
|
160
173
|
TCPKeepAlive yes
|
|
161
174
|
"""
|
|
162
175
|
|
|
163
|
-
plato_dir =
|
|
176
|
+
plato_dir = get_plato_dir(working_dir)
|
|
164
177
|
plato_dir.mkdir(mode=0o700, exist_ok=True)
|
|
165
178
|
|
|
166
179
|
config_path = plato_dir / f"ssh_{sandbox_num}.conf"
|
|
@@ -170,7 +183,12 @@ def create_ssh_config(
|
|
|
170
183
|
return str(config_path)
|
|
171
184
|
|
|
172
185
|
|
|
173
|
-
def setup_ssh_for_sandbox(
|
|
186
|
+
def setup_ssh_for_sandbox(
|
|
187
|
+
base_url: str,
|
|
188
|
+
job_public_id: str,
|
|
189
|
+
username: str = "plato",
|
|
190
|
+
working_dir: Path | str | None = None,
|
|
191
|
+
) -> dict:
|
|
174
192
|
"""
|
|
175
193
|
Set up SSH access for a sandbox - generates keys and creates config.
|
|
176
194
|
|
|
@@ -178,14 +196,14 @@ def setup_ssh_for_sandbox(base_url: str, job_public_id: str, username: str = "pl
|
|
|
178
196
|
|
|
179
197
|
Returns dict with: ssh_host, config_path, public_key, private_key_path
|
|
180
198
|
"""
|
|
181
|
-
sandbox_num = get_next_sandbox_number()
|
|
199
|
+
sandbox_num = get_next_sandbox_number(working_dir)
|
|
182
200
|
ssh_host = f"sandbox-{sandbox_num}"
|
|
183
201
|
|
|
184
202
|
# Choose random port between 2200 and 2299
|
|
185
203
|
local_port = random.randint(2200, 2299)
|
|
186
204
|
|
|
187
205
|
# Generate SSH key pair
|
|
188
|
-
public_key, private_key_path = generate_ssh_key_pair(sandbox_num)
|
|
206
|
+
public_key, private_key_path = generate_ssh_key_pair(sandbox_num, working_dir)
|
|
189
207
|
|
|
190
208
|
# Create SSH config file
|
|
191
209
|
config_path = create_ssh_config(
|
|
@@ -196,6 +214,7 @@ def setup_ssh_for_sandbox(base_url: str, job_public_id: str, username: str = "pl
|
|
|
196
214
|
username=username,
|
|
197
215
|
private_key_path=private_key_path,
|
|
198
216
|
sandbox_num=sandbox_num,
|
|
217
|
+
working_dir=working_dir,
|
|
199
218
|
)
|
|
200
219
|
|
|
201
220
|
return {
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
# World runner image for plato chronos dev
|
|
2
|
+
# Includes git, docker CLI, and Python dependencies
|
|
3
|
+
|
|
4
|
+
FROM python:3.12-slim
|
|
5
|
+
|
|
6
|
+
# Install git and docker CLI
|
|
7
|
+
RUN apt-get update && apt-get install -y --no-install-recommends \
|
|
8
|
+
git \
|
|
9
|
+
curl \
|
|
10
|
+
ca-certificates \
|
|
11
|
+
&& curl -fsSL https://get.docker.com -o get-docker.sh \
|
|
12
|
+
&& sh get-docker.sh \
|
|
13
|
+
&& rm get-docker.sh \
|
|
14
|
+
&& apt-get clean \
|
|
15
|
+
&& rm -rf /var/lib/apt/lists/*
|
|
16
|
+
|
|
17
|
+
# Install uv for fast package installation
|
|
18
|
+
RUN pip install --no-cache-dir uv
|
|
19
|
+
|
|
20
|
+
WORKDIR /world
|
|
21
|
+
|
|
22
|
+
# Entry point expects:
|
|
23
|
+
# - /world mounted with world source
|
|
24
|
+
# - /python-sdk mounted with plato SDK source (optional, for dev)
|
|
25
|
+
# - /config.json mounted with config
|
|
26
|
+
# - WORLD_NAME env var set
|
|
27
|
+
CMD ["bash", "-c", "if [ -d /python-sdk ]; then uv pip install --system /python-sdk; fi && uv pip install --system . 2>/dev/null || pip install -q . && plato-world-runner run --world $WORLD_NAME --config /config.json"]
|
plato/v1/cli/utils.py
CHANGED
|
@@ -15,32 +15,52 @@ console = Console()
|
|
|
15
15
|
SANDBOX_FILE = ".sandbox.yaml"
|
|
16
16
|
|
|
17
17
|
|
|
18
|
-
def get_sandbox_state() -> dict | None:
|
|
19
|
-
"""Read sandbox state from .sandbox.yaml
|
|
20
|
-
|
|
18
|
+
def get_sandbox_state(working_dir: Path | str | None = None) -> dict | None:
|
|
19
|
+
"""Read sandbox state from .sandbox.yaml.
|
|
20
|
+
|
|
21
|
+
Args:
|
|
22
|
+
working_dir: Directory containing .sandbox.yaml. If None, uses cwd.
|
|
23
|
+
"""
|
|
24
|
+
base_dir = Path(working_dir) if working_dir else Path.cwd()
|
|
25
|
+
sandbox_file = base_dir / SANDBOX_FILE
|
|
21
26
|
if not sandbox_file.exists():
|
|
22
27
|
return None
|
|
23
28
|
with open(sandbox_file) as f:
|
|
24
29
|
return yaml.safe_load(f)
|
|
25
30
|
|
|
26
31
|
|
|
27
|
-
def save_sandbox_state(state: dict) -> None:
|
|
28
|
-
"""Save sandbox state to .sandbox.yaml
|
|
29
|
-
|
|
32
|
+
def save_sandbox_state(state: dict, working_dir: Path | str | None = None) -> None:
|
|
33
|
+
"""Save sandbox state to .sandbox.yaml.
|
|
34
|
+
|
|
35
|
+
Args:
|
|
36
|
+
state: State dict to save.
|
|
37
|
+
working_dir: Directory to save .sandbox.yaml in. If None, uses cwd.
|
|
38
|
+
"""
|
|
39
|
+
base_dir = Path(working_dir) if working_dir else Path.cwd()
|
|
40
|
+
sandbox_file = base_dir / SANDBOX_FILE
|
|
30
41
|
with open(sandbox_file, "w") as f:
|
|
31
42
|
yaml.dump(state, f, default_flow_style=False)
|
|
32
43
|
|
|
33
44
|
|
|
34
|
-
def remove_sandbox_state() -> None:
|
|
35
|
-
"""Remove .sandbox.yaml
|
|
36
|
-
|
|
45
|
+
def remove_sandbox_state(working_dir: Path | str | None = None) -> None:
|
|
46
|
+
"""Remove .sandbox.yaml.
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
working_dir: Directory containing .sandbox.yaml. If None, uses cwd.
|
|
50
|
+
"""
|
|
51
|
+
base_dir = Path(working_dir) if working_dir else Path.cwd()
|
|
52
|
+
sandbox_file = base_dir / SANDBOX_FILE
|
|
37
53
|
if sandbox_file.exists():
|
|
38
54
|
sandbox_file.unlink()
|
|
39
55
|
|
|
40
56
|
|
|
41
|
-
def require_sandbox_state() -> dict:
|
|
42
|
-
"""Get sandbox state or exit with error.
|
|
43
|
-
|
|
57
|
+
def require_sandbox_state(working_dir: Path | str | None = None) -> dict:
|
|
58
|
+
"""Get sandbox state or exit with error.
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
working_dir: Directory containing .sandbox.yaml. If None, uses cwd.
|
|
62
|
+
"""
|
|
63
|
+
state = get_sandbox_state(working_dir)
|
|
44
64
|
if not state:
|
|
45
65
|
console.print("[red]No sandbox found in current directory[/red]")
|
|
46
66
|
console.print("\n[yellow]Start a sandbox with:[/yellow]")
|