plato-sdk-v2 2.0.64__py3-none-any.whl → 2.3.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- plato/__init__.py +0 -9
- plato/_sims_generator/__init__.py +19 -4
- plato/_sims_generator/instruction.py +203 -0
- plato/_sims_generator/templates/instruction/helpers.py.jinja +161 -0
- plato/_sims_generator/templates/instruction/init.py.jinja +43 -0
- plato/agents/__init__.py +99 -430
- plato/agents/base.py +145 -0
- plato/agents/build.py +61 -0
- plato/agents/config.py +160 -0
- plato/agents/logging.py +515 -0
- plato/agents/runner.py +191 -0
- plato/agents/trajectory.py +266 -0
- plato/chronos/models/__init__.py +1 -1
- plato/sims/cli.py +299 -123
- plato/sims/registry.py +77 -4
- plato/v1/cli/agent.py +88 -84
- plato/v1/cli/pm.py +84 -44
- plato/v1/cli/sandbox.py +241 -61
- plato/v1/cli/ssh.py +16 -4
- plato/v1/cli/verify.py +685 -0
- plato/v1/cli/world.py +3 -0
- plato/v1/flow_executor.py +21 -17
- plato/v1/models/env.py +11 -11
- plato/v1/sdk.py +2 -2
- plato/v1/sync_env.py +11 -11
- plato/v1/sync_flow_executor.py +21 -17
- plato/v1/sync_sdk.py +4 -2
- plato/v2/__init__.py +2 -0
- plato/v2/async_/environment.py +31 -0
- plato/v2/async_/session.py +72 -4
- plato/v2/sync/environment.py +31 -0
- plato/v2/sync/session.py +72 -4
- plato/worlds/README.md +71 -56
- plato/worlds/__init__.py +56 -18
- plato/worlds/base.py +578 -93
- plato/worlds/config.py +276 -74
- plato/worlds/runner.py +475 -80
- {plato_sdk_v2-2.0.64.dist-info → plato_sdk_v2-2.3.4.dist-info}/METADATA +3 -3
- {plato_sdk_v2-2.0.64.dist-info → plato_sdk_v2-2.3.4.dist-info}/RECORD +41 -36
- {plato_sdk_v2-2.0.64.dist-info → plato_sdk_v2-2.3.4.dist-info}/entry_points.txt +1 -0
- plato/agents/callback.py +0 -246
- plato/world/__init__.py +0 -44
- plato/world/base.py +0 -267
- plato/world/config.py +0 -139
- plato/world/types.py +0 -47
- {plato_sdk_v2-2.0.64.dist-info → plato_sdk_v2-2.3.4.dist-info}/WHEEL +0 -0
plato/v1/cli/sandbox.py
CHANGED
|
@@ -17,12 +17,8 @@ from urllib.parse import quote
|
|
|
17
17
|
|
|
18
18
|
import typer
|
|
19
19
|
import yaml
|
|
20
|
-
from playwright.async_api import async_playwright
|
|
21
20
|
from rich.logging import RichHandler
|
|
22
21
|
|
|
23
|
-
# UUID pattern for detecting artifact IDs in colon notation
|
|
24
|
-
UUID_PATTERN = re.compile(r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$", re.IGNORECASE)
|
|
25
|
-
|
|
26
22
|
from plato._generated.api.v1.gitea import (
|
|
27
23
|
create_simulator_repository,
|
|
28
24
|
get_accessible_simulators,
|
|
@@ -51,6 +47,7 @@ from plato._generated.api.v2.sessions import (
|
|
|
51
47
|
state as sessions_state,
|
|
52
48
|
)
|
|
53
49
|
from plato._generated.models import (
|
|
50
|
+
AppSchemasBuildModelsSetupSandboxRequest,
|
|
54
51
|
AppSchemasBuildModelsSimConfigCompute,
|
|
55
52
|
AppSchemasBuildModelsSimConfigDataset,
|
|
56
53
|
AppSchemasBuildModelsSimConfigMetadata,
|
|
@@ -58,7 +55,6 @@ from plato._generated.models import (
|
|
|
58
55
|
ExecuteCommandRequest,
|
|
59
56
|
Flow,
|
|
60
57
|
SetupRootPasswordRequest,
|
|
61
|
-
SetupSandboxRequest,
|
|
62
58
|
VMManagementRequest,
|
|
63
59
|
)
|
|
64
60
|
from plato.v1.cli.ssh import setup_ssh_for_sandbox
|
|
@@ -74,11 +70,16 @@ from plato.v1.cli.utils import (
|
|
|
74
70
|
require_sandbox_state,
|
|
75
71
|
save_sandbox_state,
|
|
76
72
|
)
|
|
73
|
+
from plato.v1.cli.verify import sandbox_verify_app
|
|
77
74
|
from plato.v2.async_.flow_executor import FlowExecutor
|
|
78
75
|
from plato.v2.sync.client import Plato as PlatoV2
|
|
79
76
|
from plato.v2.types import Env, SimConfigCompute
|
|
80
77
|
|
|
78
|
+
# UUID pattern for detecting artifact IDs in colon notation
|
|
79
|
+
UUID_PATTERN = re.compile(r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$", re.IGNORECASE)
|
|
80
|
+
|
|
81
81
|
sandbox_app = typer.Typer(help="Manage sandboxes for simulator development")
|
|
82
|
+
sandbox_app.add_typer(sandbox_verify_app, name="verify")
|
|
82
83
|
|
|
83
84
|
|
|
84
85
|
def format_public_url_with_router_target(public_url: str | None, service_name: str | None) -> str | None:
|
|
@@ -423,9 +424,10 @@ def sandbox_start(
|
|
|
423
424
|
listeners=listeners_dict,
|
|
424
425
|
)
|
|
425
426
|
|
|
426
|
-
|
|
427
|
+
dataset_value = dataset_name or state_extras.get("dataset", "base")
|
|
428
|
+
setup_request = AppSchemasBuildModelsSetupSandboxRequest(
|
|
427
429
|
service=sim_name or "",
|
|
428
|
-
dataset=
|
|
430
|
+
dataset=str(dataset_value) if dataset_value else "",
|
|
429
431
|
plato_dataset_config=dataset_config_obj,
|
|
430
432
|
ssh_public_key=ssh_public_key,
|
|
431
433
|
)
|
|
@@ -514,7 +516,6 @@ def sandbox_start(
|
|
|
514
516
|
console.print(f" [cyan]Public URL:[/cyan] {display_url}")
|
|
515
517
|
if ssh_host and ssh_config_path:
|
|
516
518
|
console.print(f" [cyan]SSH:[/cyan] ssh -F {ssh_config_path} {ssh_host}")
|
|
517
|
-
console.print(" [cyan]Docker:[/cyan] export DOCKER_HOST=unix:///var/run/docker-user.sock")
|
|
518
519
|
console.print(f"\n[dim]State saved to {SANDBOX_FILE}[/dim]")
|
|
519
520
|
|
|
520
521
|
except Exception as e:
|
|
@@ -1047,6 +1048,9 @@ def sandbox_start_worker(
|
|
|
1047
1048
|
console.print(f"[cyan]Waiting for worker to be ready (timeout: {wait_timeout}s)...[/cyan]")
|
|
1048
1049
|
|
|
1049
1050
|
session_id = state.get("session_id")
|
|
1051
|
+
if not session_id:
|
|
1052
|
+
console.print("[red]Session ID not found in .sandbox.yaml[/red]")
|
|
1053
|
+
raise typer.Exit(1)
|
|
1050
1054
|
start_time = time.time()
|
|
1051
1055
|
poll_interval = 10 # seconds between polls
|
|
1052
1056
|
worker_ready = False
|
|
@@ -1530,17 +1534,31 @@ def sandbox_flow(
|
|
|
1530
1534
|
console.print(f"[red]❌ Failed to fetch flows from API: {e}[/red]")
|
|
1531
1535
|
raise typer.Exit(1) from e
|
|
1532
1536
|
|
|
1537
|
+
# At this point, url and flow_obj must be set (validated above)
|
|
1538
|
+
if not url:
|
|
1539
|
+
console.print("[red]❌ URL is not set[/red]")
|
|
1540
|
+
raise typer.Exit(1)
|
|
1541
|
+
if not flow_obj:
|
|
1542
|
+
console.print("[red]❌ Flow object could not be loaded[/red]")
|
|
1543
|
+
raise typer.Exit(1)
|
|
1544
|
+
|
|
1533
1545
|
console.print(f"[cyan]URL: {url}[/cyan]")
|
|
1534
1546
|
console.print(f"[cyan]Flow name: {flow_name}[/cyan]")
|
|
1535
1547
|
|
|
1548
|
+
# Capture for closure (narrowed types)
|
|
1549
|
+
_url: str = url
|
|
1550
|
+
_flow_obj: Flow = flow_obj
|
|
1551
|
+
|
|
1536
1552
|
async def _run():
|
|
1553
|
+
from playwright.async_api import async_playwright
|
|
1554
|
+
|
|
1537
1555
|
browser = None
|
|
1538
1556
|
try:
|
|
1539
1557
|
async with async_playwright() as p:
|
|
1540
1558
|
browser = await p.chromium.launch(headless=False)
|
|
1541
1559
|
page = await browser.new_page()
|
|
1542
|
-
await page.goto(
|
|
1543
|
-
executor = FlowExecutor(page,
|
|
1560
|
+
await page.goto(_url)
|
|
1561
|
+
executor = FlowExecutor(page, _flow_obj, screenshots_dir, log=_flow_logger)
|
|
1544
1562
|
await executor.execute()
|
|
1545
1563
|
console.print("[green]✅ Flow executed successfully[/green]")
|
|
1546
1564
|
except Exception as e:
|
|
@@ -1601,8 +1619,23 @@ def sandbox_state_cmd(
|
|
|
1601
1619
|
def check_mutations(result_dict: dict) -> tuple[bool, bool, str | None]:
|
|
1602
1620
|
"""Check if result has mutations or errors. Returns (has_mutations, has_error, error_msg)."""
|
|
1603
1621
|
if isinstance(result_dict, dict):
|
|
1604
|
-
|
|
1605
|
-
|
|
1622
|
+
# Check for state
|
|
1623
|
+
state = result_dict.get("state", {})
|
|
1624
|
+
if isinstance(state, dict):
|
|
1625
|
+
# Check for error wrapped in state (from API layer transformation)
|
|
1626
|
+
if "error" in state:
|
|
1627
|
+
return False, True, state["error"]
|
|
1628
|
+
# Check for db state
|
|
1629
|
+
db_state = state.get("db", {})
|
|
1630
|
+
if isinstance(db_state, dict):
|
|
1631
|
+
mutations = db_state.get("mutations", [])
|
|
1632
|
+
if mutations:
|
|
1633
|
+
return True, False, None
|
|
1634
|
+
# Also check audit_log_count
|
|
1635
|
+
audit_count = db_state.get("audit_log_count", 0)
|
|
1636
|
+
if audit_count > 0:
|
|
1637
|
+
return True, False, None
|
|
1638
|
+
# Check top-level mutations as fallback
|
|
1606
1639
|
mutations = result_dict.get("mutations", [])
|
|
1607
1640
|
if mutations:
|
|
1608
1641
|
return True, False, None
|
|
@@ -1703,6 +1736,148 @@ def sandbox_state_cmd(
|
|
|
1703
1736
|
raise typer.Exit(1)
|
|
1704
1737
|
|
|
1705
1738
|
|
|
1739
|
+
@sandbox_app.command(name="clear-audit")
|
|
1740
|
+
def sandbox_clear_audit(
|
|
1741
|
+
config_path: Path | None = typer.Option(None, "--config-path", help="Path to plato-config.yml"),
|
|
1742
|
+
dataset: str = typer.Option("base", "--dataset", "-d", help="Dataset name"),
|
|
1743
|
+
json_output: bool = typer.Option(False, "--json", "-j", help="Output as JSON"),
|
|
1744
|
+
):
|
|
1745
|
+
"""
|
|
1746
|
+
Clear the audit_log table(s) in the sandbox database.
|
|
1747
|
+
|
|
1748
|
+
Truncates all audit_log tables to reset mutation tracking. Use this after
|
|
1749
|
+
initial setup/login to clear any mutations before running a clean login flow.
|
|
1750
|
+
|
|
1751
|
+
REQUIRES:
|
|
1752
|
+
|
|
1753
|
+
.sandbox.yaml in current directory (created by 'plato sandbox start')
|
|
1754
|
+
plato-config.yml with database listener config
|
|
1755
|
+
|
|
1756
|
+
USAGE:
|
|
1757
|
+
|
|
1758
|
+
plato sandbox clear-audit # Uses plato-config.yml in cwd
|
|
1759
|
+
plato sandbox clear-audit -d base # Specify dataset
|
|
1760
|
+
plato sandbox clear-audit --json # JSON output
|
|
1761
|
+
|
|
1762
|
+
WORKFLOW POSITION:
|
|
1763
|
+
|
|
1764
|
+
1. plato sandbox start -c
|
|
1765
|
+
2. plato sandbox start-services
|
|
1766
|
+
3. plato sandbox start-worker --wait
|
|
1767
|
+
4. (agent does initial login/setup, generating mutations)
|
|
1768
|
+
5. plato sandbox clear-audit ← you are here
|
|
1769
|
+
6. plato sandbox flow ← clean login flow
|
|
1770
|
+
7. plato sandbox state --verify-no-mutations ← should pass now
|
|
1771
|
+
8. plato sandbox snapshot
|
|
1772
|
+
"""
|
|
1773
|
+
state = require_sandbox_state()
|
|
1774
|
+
|
|
1775
|
+
# Get SSH info
|
|
1776
|
+
ssh_host = state.get("ssh_host")
|
|
1777
|
+
ssh_config_path = state.get("ssh_config_path")
|
|
1778
|
+
|
|
1779
|
+
if not ssh_host or not ssh_config_path:
|
|
1780
|
+
console.print("[red]❌ SSH not configured. Missing ssh_host or ssh_config_path in .sandbox.yaml[/red]")
|
|
1781
|
+
raise typer.Exit(1)
|
|
1782
|
+
|
|
1783
|
+
# Find plato-config.yml
|
|
1784
|
+
if not config_path:
|
|
1785
|
+
config_path = Path.cwd() / "plato-config.yml"
|
|
1786
|
+
if not config_path.exists():
|
|
1787
|
+
config_path = Path.cwd() / "plato-config.yaml"
|
|
1788
|
+
if not config_path.exists():
|
|
1789
|
+
console.print("[red]❌ plato-config.yml not found[/red]")
|
|
1790
|
+
raise typer.Exit(1)
|
|
1791
|
+
|
|
1792
|
+
with open(config_path) as f:
|
|
1793
|
+
plato_config = yaml.safe_load(f)
|
|
1794
|
+
|
|
1795
|
+
# Get dataset config
|
|
1796
|
+
datasets = plato_config.get("datasets", {})
|
|
1797
|
+
if dataset not in datasets:
|
|
1798
|
+
console.print(f"[red]❌ Dataset '{dataset}' not found[/red]")
|
|
1799
|
+
raise typer.Exit(1)
|
|
1800
|
+
|
|
1801
|
+
dataset_config = datasets[dataset]
|
|
1802
|
+
listeners = dataset_config.get("listeners", {})
|
|
1803
|
+
|
|
1804
|
+
# Find DB listeners
|
|
1805
|
+
db_listeners = []
|
|
1806
|
+
for name, listener in listeners.items():
|
|
1807
|
+
if isinstance(listener, dict) and listener.get("type") == "db":
|
|
1808
|
+
db_listeners.append((name, listener))
|
|
1809
|
+
|
|
1810
|
+
if not db_listeners:
|
|
1811
|
+
console.print("[red]❌ No database listeners found in plato-config.yml[/red]")
|
|
1812
|
+
console.print("[yellow]Expected: datasets.<dataset>.listeners.<name>.type = 'db'[/yellow]")
|
|
1813
|
+
raise typer.Exit(1)
|
|
1814
|
+
|
|
1815
|
+
results = []
|
|
1816
|
+
|
|
1817
|
+
for name, db_config in db_listeners:
|
|
1818
|
+
db_type = db_config.get("db_type", "postgresql").lower()
|
|
1819
|
+
db_host = db_config.get("db_host", "127.0.0.1")
|
|
1820
|
+
db_port = db_config.get("db_port", 5432 if db_type == "postgresql" else 3306)
|
|
1821
|
+
db_user = db_config.get("db_user", "postgres" if db_type == "postgresql" else "root")
|
|
1822
|
+
db_password = db_config.get("db_password", "")
|
|
1823
|
+
db_database = db_config.get("db_database", "postgres")
|
|
1824
|
+
|
|
1825
|
+
if not json_output:
|
|
1826
|
+
console.print(f"[cyan]Clearing audit_log for listener '{name}' ({db_type})...[/cyan]")
|
|
1827
|
+
|
|
1828
|
+
# Build SQL command based on db_type
|
|
1829
|
+
if db_type == "postgresql":
|
|
1830
|
+
sql_cmd = f"PGPASSWORD='{db_password}' psql -h {db_host} -p {db_port} -U {db_user} -d {db_database} -c 'TRUNCATE TABLE audit_log RESTART IDENTITY CASCADE'"
|
|
1831
|
+
elif db_type in ("mysql", "mariadb"):
|
|
1832
|
+
sql_cmd = f"mysql -h {db_host} -P {db_port} -u {db_user} -p'{db_password}' {db_database} -e 'SET FOREIGN_KEY_CHECKS=0; DELETE FROM audit_log; SET FOREIGN_KEY_CHECKS=1;'"
|
|
1833
|
+
else:
|
|
1834
|
+
if not json_output:
|
|
1835
|
+
console.print(f"[yellow]⚠ Unsupported db_type '{db_type}' for listener '{name}'[/yellow]")
|
|
1836
|
+
results.append({"listener": name, "success": False, "error": f"Unsupported db_type: {db_type}"})
|
|
1837
|
+
continue
|
|
1838
|
+
|
|
1839
|
+
# Run via SSH
|
|
1840
|
+
ret, stdout, stderr = _run_ssh_command(ssh_config_path, ssh_host, sql_cmd)
|
|
1841
|
+
|
|
1842
|
+
if ret == 0:
|
|
1843
|
+
if not json_output:
|
|
1844
|
+
console.print(f"[green]✅ Cleared audit_log for '{name}'[/green]")
|
|
1845
|
+
results.append({"listener": name, "success": True})
|
|
1846
|
+
else:
|
|
1847
|
+
if not json_output:
|
|
1848
|
+
console.print(f"[red]❌ Failed to clear audit_log for '{name}': {stderr}[/red]")
|
|
1849
|
+
results.append({"listener": name, "success": False, "error": stderr})
|
|
1850
|
+
|
|
1851
|
+
# Call state API to refresh in-memory mutation cache
|
|
1852
|
+
session_id = state.get("session_id")
|
|
1853
|
+
api_key = require_api_key()
|
|
1854
|
+
if session_id:
|
|
1855
|
+
if not json_output:
|
|
1856
|
+
console.print("[dim]Refreshing state cache...[/dim]")
|
|
1857
|
+
try:
|
|
1858
|
+
with get_http_client() as client:
|
|
1859
|
+
sessions_state.sync(
|
|
1860
|
+
client=client,
|
|
1861
|
+
session_id=session_id,
|
|
1862
|
+
x_api_key=api_key,
|
|
1863
|
+
)
|
|
1864
|
+
except Exception as e:
|
|
1865
|
+
if not json_output:
|
|
1866
|
+
console.print(f"[yellow]⚠ Failed to refresh state cache: {e}[/yellow]")
|
|
1867
|
+
|
|
1868
|
+
if json_output:
|
|
1869
|
+
console.print(json.dumps({"results": results}))
|
|
1870
|
+
else:
|
|
1871
|
+
# Summary
|
|
1872
|
+
success_count = sum(1 for r in results if r["success"])
|
|
1873
|
+
total = len(results)
|
|
1874
|
+
if success_count == total:
|
|
1875
|
+
console.print(f"\n[green]✅ All {total} audit logs cleared successfully[/green]")
|
|
1876
|
+
else:
|
|
1877
|
+
console.print(f"\n[yellow]⚠ {success_count}/{total} audit logs cleared[/yellow]")
|
|
1878
|
+
raise typer.Exit(1)
|
|
1879
|
+
|
|
1880
|
+
|
|
1706
1881
|
@sandbox_app.command(name="audit-ui")
|
|
1707
1882
|
def sandbox_audit_ui():
|
|
1708
1883
|
"""
|
|
@@ -1752,31 +1927,24 @@ def sandbox_audit_ui():
|
|
|
1752
1927
|
|
|
1753
1928
|
|
|
1754
1929
|
def _copy_files_respecting_gitignore(src_dir: Path, dst_dir: Path) -> None:
|
|
1755
|
-
"""Copy files from src to dst
|
|
1756
|
-
|
|
1757
|
-
|
|
1758
|
-
|
|
1759
|
-
|
|
1760
|
-
|
|
1761
|
-
|
|
1762
|
-
|
|
1763
|
-
def
|
|
1764
|
-
"""Check if
|
|
1765
|
-
|
|
1766
|
-
# Skip .git
|
|
1767
|
-
if
|
|
1768
|
-
return
|
|
1769
|
-
#
|
|
1770
|
-
|
|
1771
|
-
result = subprocess.run(
|
|
1772
|
-
["git", "check-ignore", "-q", str(file_path)],
|
|
1773
|
-
cwd=src_dir,
|
|
1774
|
-
capture_output=True,
|
|
1775
|
-
)
|
|
1776
|
-
# git check-ignore returns 0 if path IS ignored, 1 if NOT ignored
|
|
1777
|
-
return result.returncode != 0
|
|
1778
|
-
except Exception:
|
|
1930
|
+
"""Copy files from src to dst, skipping .git/ and .plato-hub.json.
|
|
1931
|
+
|
|
1932
|
+
Note: This function intentionally does NOT respect .gitignore because
|
|
1933
|
+
start-services needs to copy all workspace files to the VM, including
|
|
1934
|
+
config files that might be gitignored locally (like docker-compose.yml
|
|
1935
|
+
in a 'base/' directory).
|
|
1936
|
+
"""
|
|
1937
|
+
|
|
1938
|
+
def should_skip(rel_path: Path) -> bool:
|
|
1939
|
+
"""Check if path should be skipped."""
|
|
1940
|
+
parts = rel_path.parts
|
|
1941
|
+
# Skip anything inside .git/ directory
|
|
1942
|
+
if ".git" in parts:
|
|
1943
|
+
return True
|
|
1944
|
+
# Skip .plato-hub.json
|
|
1945
|
+
if rel_path.name == ".plato-hub.json":
|
|
1779
1946
|
return True
|
|
1947
|
+
return False
|
|
1780
1948
|
|
|
1781
1949
|
# Walk through source directory
|
|
1782
1950
|
for src_path in src_dir.rglob("*"):
|
|
@@ -1786,8 +1954,8 @@ def _copy_files_respecting_gitignore(src_dir: Path, dst_dir: Path) -> None:
|
|
|
1786
1954
|
if str(rel_path) == ".":
|
|
1787
1955
|
continue
|
|
1788
1956
|
|
|
1789
|
-
# Check if should
|
|
1790
|
-
if
|
|
1957
|
+
# Check if should skip
|
|
1958
|
+
if should_skip(rel_path):
|
|
1791
1959
|
continue
|
|
1792
1960
|
|
|
1793
1961
|
dst_path = dst_dir / rel_path
|
|
@@ -1973,6 +2141,34 @@ def sandbox_start_services(
|
|
|
1973
2141
|
|
|
1974
2142
|
try:
|
|
1975
2143
|
with get_http_client() as client:
|
|
2144
|
+
|
|
2145
|
+
def start_services_on_vm(repo_dir: str) -> list[dict[str, str]]:
|
|
2146
|
+
"""Start docker compose services on the VM."""
|
|
2147
|
+
services_started: list[dict[str, str]] = []
|
|
2148
|
+
for svc_name, svc_config in services_config.items():
|
|
2149
|
+
svc_type = svc_config.get("type", "")
|
|
2150
|
+
if svc_type == "docker-compose":
|
|
2151
|
+
compose_file = svc_config.get("file", "docker-compose.yml")
|
|
2152
|
+
compose_cmd = f"cd {repo_dir} && docker compose -f {compose_file} up -d"
|
|
2153
|
+
|
|
2154
|
+
if not json_output:
|
|
2155
|
+
console.print(f"[cyan] Starting docker compose service: {svc_name}...[/cyan]")
|
|
2156
|
+
|
|
2157
|
+
ret, stdout, stderr = _run_ssh_command(ssh_config_path, ssh_host, compose_cmd)
|
|
2158
|
+
if ret != 0:
|
|
2159
|
+
console.print(f"[red]❌ Failed to start service '{svc_name}': {stderr}[/red]")
|
|
2160
|
+
raise typer.Exit(1)
|
|
2161
|
+
|
|
2162
|
+
services_started.append({"name": svc_name, "type": "docker-compose", "file": compose_file})
|
|
2163
|
+
if not json_output:
|
|
2164
|
+
console.print(f"[green] ✓ Started docker compose service: {svc_name}[/green]")
|
|
2165
|
+
else:
|
|
2166
|
+
if not json_output:
|
|
2167
|
+
console.print(
|
|
2168
|
+
f"[yellow] ⚠ Skipped service '{svc_name}' (unknown type: {svc_type})[/yellow]"
|
|
2169
|
+
)
|
|
2170
|
+
return services_started
|
|
2171
|
+
|
|
1976
2172
|
# Step 1: Get Gitea credentials
|
|
1977
2173
|
if not json_output:
|
|
1978
2174
|
console.print("[cyan]Step 1: Getting Gitea credentials...[/cyan]")
|
|
@@ -2004,6 +2200,10 @@ def sandbox_start_services(
|
|
|
2004
2200
|
if not json_output:
|
|
2005
2201
|
console.print("[cyan]Step 3: Getting/creating repository...[/cyan]")
|
|
2006
2202
|
|
|
2203
|
+
if sim_id is None:
|
|
2204
|
+
console.print("[red]❌ Simulator ID not available[/red]")
|
|
2205
|
+
raise typer.Exit(1)
|
|
2206
|
+
|
|
2007
2207
|
if has_repo:
|
|
2008
2208
|
repo = get_simulator_repository.sync(client=client, simulator_id=sim_id, x_api_key=api_key)
|
|
2009
2209
|
else:
|
|
@@ -2026,6 +2226,8 @@ def sandbox_start_services(
|
|
|
2026
2226
|
if not json_output:
|
|
2027
2227
|
console.print("[cyan]Step 4: Pushing code to hub...[/cyan]")
|
|
2028
2228
|
|
|
2229
|
+
repo_dir = f"/home/plato/worktree/{service_name}"
|
|
2230
|
+
|
|
2029
2231
|
with tempfile.TemporaryDirectory(prefix="plato-hub-") as temp_dir:
|
|
2030
2232
|
temp_repo = Path(temp_dir) / "repo"
|
|
2031
2233
|
|
|
@@ -2110,8 +2312,6 @@ def sandbox_start_services(
|
|
|
2110
2312
|
if not json_output:
|
|
2111
2313
|
console.print("[cyan]Step 5: Cloning repo on VM...[/cyan]")
|
|
2112
2314
|
|
|
2113
|
-
repo_dir = f"/home/plato/worktree/{service_name}"
|
|
2114
|
-
|
|
2115
2315
|
# Create worktree directory
|
|
2116
2316
|
_run_ssh_command(ssh_config_path, ssh_host, "mkdir -p /home/plato/worktree")
|
|
2117
2317
|
|
|
@@ -2132,27 +2332,7 @@ def sandbox_start_services(
|
|
|
2132
2332
|
if not json_output:
|
|
2133
2333
|
console.print("[cyan]Step 6: Starting services...[/cyan]")
|
|
2134
2334
|
|
|
2135
|
-
services_started =
|
|
2136
|
-
for svc_name, svc_config in services_config.items():
|
|
2137
|
-
svc_type = svc_config.get("type", "")
|
|
2138
|
-
if svc_type == "docker-compose":
|
|
2139
|
-
compose_file = svc_config.get("file", "docker-compose.yml")
|
|
2140
|
-
compose_cmd = f"cd {repo_dir} && DOCKER_HOST=unix:///var/run/docker-user.sock docker compose -f {compose_file} up -d"
|
|
2141
|
-
|
|
2142
|
-
if not json_output:
|
|
2143
|
-
console.print(f"[cyan] Starting docker compose service: {svc_name}...[/cyan]")
|
|
2144
|
-
|
|
2145
|
-
ret, stdout, stderr = _run_ssh_command(ssh_config_path, ssh_host, compose_cmd)
|
|
2146
|
-
if ret != 0:
|
|
2147
|
-
console.print(f"[red]❌ Failed to start service '{svc_name}': {stderr}[/red]")
|
|
2148
|
-
raise typer.Exit(1)
|
|
2149
|
-
|
|
2150
|
-
services_started.append({"name": svc_name, "type": "docker-compose", "file": compose_file})
|
|
2151
|
-
if not json_output:
|
|
2152
|
-
console.print(f"[green] ✓ Started docker compose service: {svc_name}[/green]")
|
|
2153
|
-
else:
|
|
2154
|
-
if not json_output:
|
|
2155
|
-
console.print(f"[yellow] ⚠ Skipped service '{svc_name}' (unknown type: {svc_type})[/yellow]")
|
|
2335
|
+
services_started = start_services_on_vm(repo_dir)
|
|
2156
2336
|
|
|
2157
2337
|
# Output results
|
|
2158
2338
|
if json_output:
|
plato/v1/cli/ssh.py
CHANGED
|
@@ -9,9 +9,21 @@ from cryptography.hazmat.primitives import serialization
|
|
|
9
9
|
from cryptography.hazmat.primitives.asymmetric import ed25519
|
|
10
10
|
|
|
11
11
|
|
|
12
|
+
def get_plato_dir() -> Path:
|
|
13
|
+
"""Get the directory for plato config/SSH files.
|
|
14
|
+
|
|
15
|
+
Uses /workspace/.plato if /workspace exists (container environment),
|
|
16
|
+
otherwise uses ~/.plato (local development).
|
|
17
|
+
"""
|
|
18
|
+
workspace = Path("/workspace")
|
|
19
|
+
if workspace.exists() and workspace.is_dir():
|
|
20
|
+
return workspace / ".plato"
|
|
21
|
+
return Path.home() / ".plato"
|
|
22
|
+
|
|
23
|
+
|
|
12
24
|
def get_next_sandbox_number() -> int:
|
|
13
|
-
"""Find next available sandbox number by checking existing config files
|
|
14
|
-
plato_dir =
|
|
25
|
+
"""Find next available sandbox number by checking existing config files."""
|
|
26
|
+
plato_dir = get_plato_dir()
|
|
15
27
|
if not plato_dir.exists():
|
|
16
28
|
return 1
|
|
17
29
|
|
|
@@ -35,7 +47,7 @@ def generate_ssh_key_pair(sandbox_num: int) -> tuple[str, str]:
|
|
|
35
47
|
|
|
36
48
|
Returns (public_key_str, private_key_path).
|
|
37
49
|
"""
|
|
38
|
-
plato_dir =
|
|
50
|
+
plato_dir = get_plato_dir()
|
|
39
51
|
plato_dir.mkdir(mode=0o700, exist_ok=True)
|
|
40
52
|
|
|
41
53
|
private_key_path = plato_dir / f"ssh_{sandbox_num}_key"
|
|
@@ -160,7 +172,7 @@ def create_ssh_config(
|
|
|
160
172
|
TCPKeepAlive yes
|
|
161
173
|
"""
|
|
162
174
|
|
|
163
|
-
plato_dir =
|
|
175
|
+
plato_dir = get_plato_dir()
|
|
164
176
|
plato_dir.mkdir(mode=0o700, exist_ok=True)
|
|
165
177
|
|
|
166
178
|
config_path = plato_dir / f"ssh_{sandbox_num}.conf"
|