plato-sdk-v2 2.7.0__py3-none-any.whl → 2.7.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- plato/v1/cli/pm.py +92 -6
- plato/v1/cli/sandbox.py +116 -40
- {plato_sdk_v2-2.7.0.dist-info → plato_sdk_v2-2.7.1.dist-info}/METADATA +1 -1
- {plato_sdk_v2-2.7.0.dist-info → plato_sdk_v2-2.7.1.dist-info}/RECORD +6 -6
- {plato_sdk_v2-2.7.0.dist-info → plato_sdk_v2-2.7.1.dist-info}/WHEEL +0 -0
- {plato_sdk_v2-2.7.0.dist-info → plato_sdk_v2-2.7.1.dist-info}/entry_points.txt +0 -0
plato/v1/cli/pm.py
CHANGED
|
@@ -6,10 +6,12 @@ import os
|
|
|
6
6
|
import re
|
|
7
7
|
import shutil
|
|
8
8
|
import tempfile
|
|
9
|
+
from datetime import datetime, timedelta
|
|
9
10
|
from pathlib import Path
|
|
10
11
|
|
|
11
12
|
import httpx
|
|
12
13
|
import typer
|
|
14
|
+
import yaml
|
|
13
15
|
from rich.table import Table
|
|
14
16
|
|
|
15
17
|
from plato._generated.api.v1.env import get_simulator_by_name, get_simulators
|
|
@@ -25,6 +27,7 @@ from plato._generated.models import (
|
|
|
25
27
|
AddReviewRequest,
|
|
26
28
|
AppApiV1SimulatorRoutesUpdateSimulatorRequest,
|
|
27
29
|
Authentication,
|
|
30
|
+
Flow,
|
|
28
31
|
Outcome,
|
|
29
32
|
ReviewType,
|
|
30
33
|
UpdateStatusRequest,
|
|
@@ -41,6 +44,7 @@ from plato.v1.cli.utils import (
|
|
|
41
44
|
)
|
|
42
45
|
from plato.v1.cli.verify import pm_verify_app
|
|
43
46
|
from plato.v2.async_.client import AsyncPlato
|
|
47
|
+
from plato.v2.async_.flow_executor import FlowExecutor
|
|
44
48
|
from plato.v2.types import Env
|
|
45
49
|
|
|
46
50
|
# =============================================================================
|
|
@@ -282,6 +286,17 @@ def review_base(
|
|
|
282
286
|
"--skip-review",
|
|
283
287
|
help="Run login flow and check state, but skip interactive review. For automated verification.",
|
|
284
288
|
),
|
|
289
|
+
local: str = typer.Option(
|
|
290
|
+
None,
|
|
291
|
+
"--local",
|
|
292
|
+
"-l",
|
|
293
|
+
help="Path to a local flow YAML file to run instead of the default login flow.",
|
|
294
|
+
),
|
|
295
|
+
clock: str = typer.Option(
|
|
296
|
+
None,
|
|
297
|
+
"--clock",
|
|
298
|
+
help="Set fake browser time (ISO format or offset like '-30d' for 30 days ago).",
|
|
299
|
+
),
|
|
285
300
|
):
|
|
286
301
|
"""Review base/environment artifact for a simulator.
|
|
287
302
|
|
|
@@ -373,16 +388,87 @@ def review_base(
|
|
|
373
388
|
playwright = await async_playwright().start()
|
|
374
389
|
browser = await playwright.chromium.launch(headless=False)
|
|
375
390
|
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
391
|
+
# Install fake clock if requested
|
|
392
|
+
fake_time = None
|
|
393
|
+
if clock:
|
|
394
|
+
# Parse clock option: ISO format or offset like '-30d'
|
|
395
|
+
if clock.startswith("-") and clock[-1] in "dhms":
|
|
396
|
+
# Offset format: -30d, -1h, -30m, -60s
|
|
397
|
+
unit = clock[-1]
|
|
398
|
+
amount = int(clock[1:-1])
|
|
399
|
+
if unit == "d":
|
|
400
|
+
fake_time = datetime.now() - timedelta(days=amount)
|
|
401
|
+
elif unit == "h":
|
|
402
|
+
fake_time = datetime.now() - timedelta(hours=amount)
|
|
403
|
+
elif unit == "m":
|
|
404
|
+
fake_time = datetime.now() - timedelta(minutes=amount)
|
|
405
|
+
elif unit == "s":
|
|
406
|
+
fake_time = datetime.now() - timedelta(seconds=amount)
|
|
407
|
+
else:
|
|
408
|
+
# ISO format
|
|
409
|
+
fake_time = datetime.fromisoformat(clock)
|
|
410
|
+
|
|
411
|
+
console.print(f"[cyan]Setting fake browser time to:[/cyan] {fake_time.isoformat()}")
|
|
412
|
+
|
|
413
|
+
if local:
|
|
414
|
+
# Use local flow file instead of default login
|
|
415
|
+
local_path = Path(local)
|
|
416
|
+
if not local_path.exists():
|
|
417
|
+
console.print(f"[red]❌ Local flow file not found: {local}[/red]")
|
|
418
|
+
raise typer.Exit(1)
|
|
419
|
+
|
|
420
|
+
console.print(f"[cyan]Loading local flow from: {local}[/cyan]")
|
|
421
|
+
with open(local_path) as f:
|
|
422
|
+
flow_dict = yaml.safe_load(f)
|
|
423
|
+
|
|
424
|
+
# Find login flow (or first flow if only one)
|
|
425
|
+
flows = flow_dict.get("flows", [])
|
|
426
|
+
if not flows:
|
|
427
|
+
console.print("[red]❌ No flows found in flow file[/red]")
|
|
428
|
+
raise typer.Exit(1)
|
|
429
|
+
|
|
430
|
+
# Try to find 'login' flow, otherwise use first flow
|
|
431
|
+
flow_data = next((f for f in flows if f.get("name") == "login"), flows[0])
|
|
432
|
+
flow = Flow.model_validate(flow_data)
|
|
433
|
+
console.print(f"[cyan]Running flow: {flow.name}[/cyan]")
|
|
434
|
+
|
|
435
|
+
# Create page and navigate to public URL
|
|
382
436
|
page = await browser.new_page()
|
|
437
|
+
|
|
438
|
+
# Install fake clock if requested
|
|
439
|
+
if fake_time:
|
|
440
|
+
await page.clock.install(time=fake_time)
|
|
441
|
+
console.print(f"[green]✅ Fake clock installed: {fake_time.isoformat()}[/green]")
|
|
442
|
+
|
|
383
443
|
if public_url:
|
|
384
444
|
await page.goto(public_url)
|
|
385
445
|
|
|
446
|
+
# Execute the flow
|
|
447
|
+
try:
|
|
448
|
+
executor = FlowExecutor(page, flow)
|
|
449
|
+
await executor.execute()
|
|
450
|
+
console.print("[green]✅ Local flow executed successfully[/green]")
|
|
451
|
+
except Exception as e:
|
|
452
|
+
console.print(f"[yellow]⚠️ Flow execution error: {e}[/yellow]")
|
|
453
|
+
else:
|
|
454
|
+
# Use default login via session.login()
|
|
455
|
+
if fake_time:
|
|
456
|
+
console.print("[yellow]⚠️ --clock with default login may not work correctly.[/yellow]")
|
|
457
|
+
console.print("[yellow] Use --local with a flow file for reliable clock testing.[/yellow]")
|
|
458
|
+
try:
|
|
459
|
+
login_result = await session.login(browser, dataset="base")
|
|
460
|
+
page = list(login_result.pages.values())[0] if login_result.pages else None
|
|
461
|
+
console.print("[green]✅ Logged into environment[/green]")
|
|
462
|
+
except Exception as e:
|
|
463
|
+
console.print(f"[yellow]⚠️ Login error: {e}[/yellow]")
|
|
464
|
+
page = await browser.new_page()
|
|
465
|
+
# Install fake clock on fallback page
|
|
466
|
+
if fake_time:
|
|
467
|
+
await page.clock.install(time=fake_time)
|
|
468
|
+
console.print(f"[green]✅ Fake clock installed: {fake_time.isoformat()}[/green]")
|
|
469
|
+
if public_url:
|
|
470
|
+
await page.goto(public_url)
|
|
471
|
+
|
|
386
472
|
# ALWAYS check state after login to verify no mutations
|
|
387
473
|
console.print("\n[cyan]Checking environment state after login...[/cyan]")
|
|
388
474
|
has_mutations = False
|
plato/v1/cli/sandbox.py
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
"""Sandbox CLI commands for Plato."""
|
|
2
2
|
|
|
3
|
+
import asyncio
|
|
3
4
|
import base64
|
|
4
5
|
import io
|
|
5
6
|
import json
|
|
@@ -13,11 +14,12 @@ import tempfile
|
|
|
13
14
|
import time
|
|
14
15
|
from datetime import datetime, timezone
|
|
15
16
|
from pathlib import Path
|
|
16
|
-
from urllib.parse import quote
|
|
17
|
+
from urllib.parse import quote, quote_plus
|
|
17
18
|
|
|
18
19
|
import typer
|
|
19
20
|
import yaml
|
|
20
21
|
from rich.logging import RichHandler
|
|
22
|
+
from sqlalchemy import create_engine, text
|
|
21
23
|
|
|
22
24
|
from plato._generated.api.v1.gitea import (
|
|
23
25
|
create_simulator_repository,
|
|
@@ -82,6 +84,7 @@ from plato.v1.cli.verify import sandbox_verify_app
|
|
|
82
84
|
from plato.v2.async_.flow_executor import FlowExecutor
|
|
83
85
|
from plato.v2.sync.client import Plato as PlatoV2
|
|
84
86
|
from plato.v2.types import Env, SimConfigCompute
|
|
87
|
+
from plato.v2.utils.proxy_tunnel import ProxyTunnel, find_free_port
|
|
85
88
|
|
|
86
89
|
# UUID pattern for detecting artifact IDs in colon notation
|
|
87
90
|
UUID_PATTERN = re.compile(r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$", re.IGNORECASE)
|
|
@@ -141,7 +144,6 @@ def sandbox_start(
|
|
|
141
144
|
disk: int = typer.Option(10240, "--disk", help="Disk in MB (blank VM)"),
|
|
142
145
|
# Common options
|
|
143
146
|
timeout: int = typer.Option(1800, "--timeout", help="VM lifetime in seconds (default: 30 minutes)"),
|
|
144
|
-
no_reset: bool = typer.Option(False, "--no-reset", help="Skip initial reset after ready"),
|
|
145
147
|
connect_network: bool = typer.Option(
|
|
146
148
|
True, "--network/--no-network", help="Connect VMs to WireGuard network for SSH access (default: enabled)"
|
|
147
149
|
),
|
|
@@ -377,11 +379,8 @@ def sandbox_start(
|
|
|
377
379
|
if not json_output:
|
|
378
380
|
console.print(f"[yellow]Could not get public URL: {e}[/yellow]")
|
|
379
381
|
|
|
380
|
-
#
|
|
381
|
-
if
|
|
382
|
-
if not json_output:
|
|
383
|
-
console.print("[cyan]Resetting environment...[/cyan]")
|
|
384
|
-
session.reset()
|
|
382
|
+
# Note: We don't reset here - start just launches the sandbox.
|
|
383
|
+
# Reset is a separate action the user can take later if needed.
|
|
385
384
|
|
|
386
385
|
# Setup SSH for ALL modes (so you can SSH into any sandbox)
|
|
387
386
|
ssh_private_key_path = None
|
|
@@ -1770,13 +1769,10 @@ def sandbox_clear_audit(
|
|
|
1770
1769
|
-j, --json: Output results as JSON instead of formatted text
|
|
1771
1770
|
"""
|
|
1772
1771
|
state = require_sandbox_state()
|
|
1772
|
+
job_id = state.get("job_id")
|
|
1773
1773
|
|
|
1774
|
-
|
|
1775
|
-
|
|
1776
|
-
ssh_config_path = state.get("ssh_config_path")
|
|
1777
|
-
|
|
1778
|
-
if not ssh_host or not ssh_config_path:
|
|
1779
|
-
console.print("[red]❌ SSH not configured. Missing ssh_host or ssh_config_path in .sandbox.yaml[/red]")
|
|
1774
|
+
if not job_id:
|
|
1775
|
+
console.print("[red]❌ No job_id found in .sandbox.yaml[/red]")
|
|
1780
1776
|
raise typer.Exit(1)
|
|
1781
1777
|
|
|
1782
1778
|
# Find plato-config.yml
|
|
@@ -1813,43 +1809,99 @@ def sandbox_clear_audit(
|
|
|
1813
1809
|
|
|
1814
1810
|
results = []
|
|
1815
1811
|
|
|
1816
|
-
|
|
1812
|
+
def _execute_db_cleanup(name: str, db_config: dict, local_port: int) -> dict:
|
|
1813
|
+
"""Execute DB cleanup using sync SQLAlchemy (called after tunnel is up)."""
|
|
1817
1814
|
db_type = db_config.get("db_type", "postgresql").lower()
|
|
1818
1815
|
db_user = db_config.get("db_user", "postgres" if db_type == "postgresql" else "root")
|
|
1819
1816
|
db_password = db_config.get("db_password", "")
|
|
1820
1817
|
db_database = db_config.get("db_database", "postgres")
|
|
1821
1818
|
|
|
1822
|
-
|
|
1823
|
-
|
|
1819
|
+
# Build SQLAlchemy URL based on db_type (sync drivers)
|
|
1820
|
+
user = quote_plus(db_user)
|
|
1821
|
+
password = quote_plus(db_password)
|
|
1822
|
+
database = quote_plus(db_database)
|
|
1824
1823
|
|
|
1825
|
-
# Build SQL command based on db_type
|
|
1826
|
-
# Use docker exec since psql/mysql aren't installed on the VM directly
|
|
1827
1824
|
if db_type == "postgresql":
|
|
1828
|
-
|
|
1829
|
-
# Use $body$ delimiter instead of $$ to avoid shell expansion
|
|
1830
|
-
truncate_sql = "DO \\$body\\$ DECLARE r RECORD; BEGIN FOR r IN SELECT schemaname FROM pg_tables WHERE tablename = 'audit_log' LOOP EXECUTE format('TRUNCATE TABLE %I.audit_log RESTART IDENTITY CASCADE', r.schemaname); END LOOP; END \\$body\\$;"
|
|
1831
|
-
sql_cmd = f"CONTAINER=$(docker ps --format '{{{{.Names}}}}\\t{{{{.Image}}}}' | grep -i postgres | head -1 | cut -f1) && docker exec $CONTAINER psql -U {db_user} -d {db_database} -c \"{truncate_sql}\""
|
|
1825
|
+
db_url = f"postgresql+psycopg2://{user}:{password}@127.0.0.1:{local_port}/{database}"
|
|
1832
1826
|
elif db_type in ("mysql", "mariadb"):
|
|
1833
|
-
|
|
1834
|
-
# Use mariadb client (mysql is a symlink or may not exist in newer mariadb images)
|
|
1835
|
-
sql_cmd = f"CONTAINER=$(docker ps --format '{{{{.Names}}}}\\t{{{{.Image}}}}' | grep -iE 'mysql|mariadb' | head -1 | cut -f1) && docker exec $CONTAINER mariadb -u {db_user} -p'{db_password}' {db_database} -e 'SET FOREIGN_KEY_CHECKS=0; DELETE FROM audit_log; SET FOREIGN_KEY_CHECKS=1;'"
|
|
1827
|
+
db_url = f"mysql+pymysql://{user}:{password}@127.0.0.1:{local_port}/{database}"
|
|
1836
1828
|
else:
|
|
1837
|
-
|
|
1838
|
-
|
|
1839
|
-
|
|
1840
|
-
|
|
1829
|
+
return {"listener": name, "success": False, "error": f"Unsupported db_type: {db_type}"}
|
|
1830
|
+
|
|
1831
|
+
engine = create_engine(db_url, pool_pre_ping=True)
|
|
1832
|
+
tables_truncated = []
|
|
1833
|
+
|
|
1834
|
+
with engine.begin() as conn:
|
|
1835
|
+
if db_type == "postgresql":
|
|
1836
|
+
# Find and truncate audit_log tables in all schemas
|
|
1837
|
+
result = conn.execute(text("SELECT schemaname, tablename FROM pg_tables WHERE tablename = 'audit_log'"))
|
|
1838
|
+
tables = result.fetchall()
|
|
1839
|
+
for schema, table in tables:
|
|
1840
|
+
conn.execute(text(f"TRUNCATE TABLE {schema}.{table} RESTART IDENTITY CASCADE"))
|
|
1841
|
+
tables_truncated.append(f"{schema}.{table}")
|
|
1842
|
+
|
|
1843
|
+
elif db_type in ("mysql", "mariadb"):
|
|
1844
|
+
# Find and delete from audit_log tables
|
|
1845
|
+
result = conn.execute(
|
|
1846
|
+
text(
|
|
1847
|
+
"SELECT table_schema, table_name FROM information_schema.tables "
|
|
1848
|
+
"WHERE table_name = 'audit_log' AND table_schema = DATABASE()"
|
|
1849
|
+
)
|
|
1850
|
+
)
|
|
1851
|
+
tables = result.fetchall()
|
|
1852
|
+
conn.execute(text("SET FOREIGN_KEY_CHECKS = 0"))
|
|
1853
|
+
for schema, table in tables:
|
|
1854
|
+
conn.execute(text(f"DELETE FROM `{table}`"))
|
|
1855
|
+
tables_truncated.append(table)
|
|
1856
|
+
conn.execute(text("SET FOREIGN_KEY_CHECKS = 1"))
|
|
1857
|
+
|
|
1858
|
+
engine.dispose()
|
|
1859
|
+
return {"listener": name, "success": True, "tables_truncated": tables_truncated}
|
|
1860
|
+
|
|
1861
|
+
async def clear_audit_via_tunnel(name: str, db_config: dict) -> dict:
|
|
1862
|
+
"""Clear audit_log by connecting via proxy tunnel."""
|
|
1863
|
+
db_type = db_config.get("db_type", "postgresql").lower()
|
|
1864
|
+
db_port = db_config.get("db_port", 5432 if db_type == "postgresql" else 3306)
|
|
1865
|
+
|
|
1866
|
+
if not json_output:
|
|
1867
|
+
console.print(f"[cyan]Clearing audit_log for listener '{name}' ({db_type})...[/cyan]")
|
|
1841
1868
|
|
|
1842
|
-
#
|
|
1843
|
-
|
|
1869
|
+
# Find a free local port for the tunnel
|
|
1870
|
+
local_port = find_free_port()
|
|
1844
1871
|
|
|
1845
|
-
|
|
1846
|
-
|
|
1847
|
-
|
|
1848
|
-
|
|
1849
|
-
|
|
1872
|
+
# Create tunnel and connect
|
|
1873
|
+
tunnel = ProxyTunnel(
|
|
1874
|
+
env_id=job_id,
|
|
1875
|
+
db_port=db_port,
|
|
1876
|
+
temp_password="newpass",
|
|
1877
|
+
host_port=local_port,
|
|
1878
|
+
)
|
|
1879
|
+
|
|
1880
|
+
try:
|
|
1881
|
+
await tunnel.start()
|
|
1882
|
+
|
|
1883
|
+
# Run sync DB cleanup in a thread to avoid blocking the event loop
|
|
1884
|
+
result = await asyncio.to_thread(_execute_db_cleanup, name, db_config, local_port)
|
|
1885
|
+
|
|
1886
|
+
if result["success"]:
|
|
1887
|
+
tables_truncated = result.get("tables_truncated", [])
|
|
1888
|
+
if not json_output:
|
|
1889
|
+
console.print(f"[green]✅ Cleared audit_log for '{name}' ({len(tables_truncated)} tables)[/green]")
|
|
1890
|
+
return result
|
|
1891
|
+
|
|
1892
|
+
except Exception as e:
|
|
1850
1893
|
if not json_output:
|
|
1851
|
-
console.print(f"[red]❌ Failed to clear audit_log for '{name}': {
|
|
1852
|
-
|
|
1894
|
+
console.print(f"[red]❌ Failed to clear audit_log for '{name}': {e}[/red]")
|
|
1895
|
+
return {"listener": name, "success": False, "error": str(e)}
|
|
1896
|
+
finally:
|
|
1897
|
+
await tunnel.stop()
|
|
1898
|
+
|
|
1899
|
+
# Run async cleanup for each listener
|
|
1900
|
+
async def run_all():
|
|
1901
|
+
tasks = [clear_audit_via_tunnel(name, db_config) for name, db_config in db_listeners]
|
|
1902
|
+
return await asyncio.gather(*tasks)
|
|
1903
|
+
|
|
1904
|
+
results = asyncio.run(run_all())
|
|
1853
1905
|
|
|
1854
1906
|
# Call state API to refresh in-memory mutation cache
|
|
1855
1907
|
session_id = state.get("session_id")
|
|
@@ -2300,9 +2352,33 @@ def sandbox_start_services(
|
|
|
2300
2352
|
if not json_output:
|
|
2301
2353
|
console.print(f"[green]✓ Code cloned to {repo_dir}[/green]")
|
|
2302
2354
|
|
|
2303
|
-
# Step 6:
|
|
2355
|
+
# Step 6: Authenticate ECR
|
|
2356
|
+
if not json_output:
|
|
2357
|
+
console.print("[cyan]Step 6: Authenticating Docker with ECR...[/cyan]")
|
|
2358
|
+
|
|
2359
|
+
ecr_registry = "383806609161.dkr.ecr.us-west-1.amazonaws.com"
|
|
2360
|
+
ecr_token_result = subprocess.run(
|
|
2361
|
+
["aws", "ecr", "get-login-password", "--region", "us-west-1"],
|
|
2362
|
+
capture_output=True,
|
|
2363
|
+
text=True,
|
|
2364
|
+
)
|
|
2365
|
+
if ecr_token_result.returncode != 0:
|
|
2366
|
+
console.print(f"[red]❌ Failed to get ECR token: {ecr_token_result.stderr}[/red]")
|
|
2367
|
+
raise typer.Exit(1)
|
|
2368
|
+
|
|
2369
|
+
ecr_token = ecr_token_result.stdout.strip()
|
|
2370
|
+
docker_login_cmd = f"echo '{ecr_token}' | docker login --username AWS --password-stdin {ecr_registry}"
|
|
2371
|
+
ret, stdout, stderr = _run_ssh_command(ssh_config_path, ssh_host, docker_login_cmd)
|
|
2372
|
+
if ret != 0:
|
|
2373
|
+
console.print(f"[red]❌ Failed to authenticate Docker with ECR: {stderr}[/red]")
|
|
2374
|
+
raise typer.Exit(1)
|
|
2375
|
+
|
|
2376
|
+
if not json_output:
|
|
2377
|
+
console.print("[green]✓ Docker authenticated with ECR[/green]")
|
|
2378
|
+
|
|
2379
|
+
# Step 7: Start services
|
|
2304
2380
|
if not json_output:
|
|
2305
|
-
console.print("[cyan]Step
|
|
2381
|
+
console.print("[cyan]Step 7: Starting services...[/cyan]")
|
|
2306
2382
|
|
|
2307
2383
|
services_started = start_services_on_vm(repo_dir)
|
|
2308
2384
|
|
|
@@ -425,9 +425,9 @@ plato/v1/cli/__init__.py,sha256=om4b7PxgsoI7rEwuQelmQkqPdhMVn53_5qEN8kvksYw,105
|
|
|
425
425
|
plato/v1/cli/agent.py,sha256=r5Eh2e2-rUIGjK5uevnGKqScABtFK-Spomrrytj-3og,44053
|
|
426
426
|
plato/v1/cli/chronos.py,sha256=lzFY0nomP1AY14i8oc8OvWOdq9ydCiE3dN2XrSupvA4,27827
|
|
427
427
|
plato/v1/cli/main.py,sha256=Yqy1vn4sGyAWKNpDVcLl9pbzkMn89tYVBIxFU30ZtPk,6905
|
|
428
|
-
plato/v1/cli/pm.py,sha256=
|
|
428
|
+
plato/v1/cli/pm.py,sha256=zrWGwYRC4e0d_KjOLNOqVcnJVMgS_Iw5vJ2F61jLX5s,52921
|
|
429
429
|
plato/v1/cli/proxy.py,sha256=WmCt0R9Gos1q0FZTQSsbloNC3-Cnx6Yb60RZF1BzC18,12178
|
|
430
|
-
plato/v1/cli/sandbox.py,sha256=
|
|
430
|
+
plato/v1/cli/sandbox.py,sha256=SQb5XCdYvTHEyZxOv9ECtafTdkxpjfq45pYd-m1z7k0,101506
|
|
431
431
|
plato/v1/cli/ssh.py,sha256=9ypjn5kQuaTcVjsWMDIUDyehXRH9fauk_z-C3mXzYJ8,2381
|
|
432
432
|
plato/v1/cli/utils.py,sha256=ba7Crv4OjDmgCv4SeB8UeZDin-iOdQw_3N6fd-g5XVk,4572
|
|
433
433
|
plato/v1/cli/verify.py,sha256=D-hyiCBPL_G_9uTIEugUsq_B9y6mRVAUWILpfUx4YAo,22814
|
|
@@ -503,7 +503,7 @@ plato/worlds/base.py,sha256=-RR71bSxEFI5yydtrtq-AAbuw98CIjvmrbztqzB9oIc,31041
|
|
|
503
503
|
plato/worlds/build_hook.py,sha256=KSoW0kqa5b7NyZ7MYOw2qsZ_2FkWuz0M3Ru7AKOP7Qw,3486
|
|
504
504
|
plato/worlds/config.py,sha256=O1lUXzxp-Z_M7izslT8naXgE6XujjzwYFFrDDzUOueI,12736
|
|
505
505
|
plato/worlds/runner.py,sha256=r9B2BxBae8_dM7y5cJf9xhThp_I1Qvf_tlPq2rs8qC8,4013
|
|
506
|
-
plato_sdk_v2-2.7.
|
|
507
|
-
plato_sdk_v2-2.7.
|
|
508
|
-
plato_sdk_v2-2.7.
|
|
509
|
-
plato_sdk_v2-2.7.
|
|
506
|
+
plato_sdk_v2-2.7.1.dist-info/METADATA,sha256=W64dXq4E_YTbyTp5SBJJBm3sxSryOHQSB6oXU8x5_mI,8652
|
|
507
|
+
plato_sdk_v2-2.7.1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
508
|
+
plato_sdk_v2-2.7.1.dist-info/entry_points.txt,sha256=upGMbJCx6YWUTKrPoYvYUYfFCqYr75nHDwhA-45m6p8,136
|
|
509
|
+
plato_sdk_v2-2.7.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|