plato-sdk-v2 2.3.11__py3-none-any.whl → 2.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
plato/v1/cli/chronos.py CHANGED
@@ -2,14 +2,21 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
+ import asyncio
5
6
  import json
7
+ import logging
8
+ import os
9
+ import subprocess
10
+ import tempfile
6
11
  from pathlib import Path
12
+ from typing import Annotated
7
13
 
8
14
  import typer
9
15
 
10
16
  from plato.v1.cli.utils import console
11
17
 
12
18
  chronos_app = typer.Typer(help="Chronos job management commands.")
19
+ logger = logging.getLogger(__name__)
13
20
 
14
21
 
15
22
  @chronos_app.command()
@@ -178,14 +185,14 @@ def example(
178
185
  "max_attempts": 3,
179
186
  "use_backtrack": True,
180
187
  "skill_runner": {
181
- "image": "claude-code:2.1.6",
188
+ "image": "claude-code:2.1.5",
182
189
  "config": {"model_name": "anthropic/claude-sonnet-4-20250514", "max_turns": 100},
183
190
  },
184
191
  },
185
192
  "agent_configs": {
186
193
  "skill_runner": {
187
194
  "agent": "claude-code",
188
- "version": "2.1.6",
195
+ "version": "2.1.5",
189
196
  "config": {"model_name": "anthropic/claude-sonnet-4-20250514", "max_turns": 100},
190
197
  }
191
198
  },
@@ -217,3 +224,527 @@ def example(
217
224
  console.print(f"[green]✅ Example config written to {output}[/green]")
218
225
  else:
219
226
  console.print(json_output)
227
+
228
+
229
+ def _get_world_runner_dockerfile() -> Path:
230
+ """Get the path to the world runner Dockerfile template."""
231
+ return Path(__file__).parent / "templates" / "world-runner.Dockerfile"
232
+
233
+
234
+ def _build_world_runner_image(platform_override: str | None = None) -> str:
235
+ """Build the world runner Docker image if needed."""
236
+ image_tag = "plato-world-runner:latest"
237
+ dockerfile_path = _get_world_runner_dockerfile()
238
+
239
+ if not dockerfile_path.exists():
240
+ raise FileNotFoundError(f"World runner Dockerfile not found: {dockerfile_path}")
241
+
242
+ docker_platform = _get_docker_platform(platform_override)
243
+
244
+ # Check if image exists
245
+ result = subprocess.run(
246
+ ["docker", "images", "-q", image_tag],
247
+ capture_output=True,
248
+ text=True,
249
+ )
250
+
251
+ if result.stdout.strip():
252
+ # Image exists
253
+ return image_tag
254
+
255
+ console.print("[blue]Building world runner image...[/blue]")
256
+
257
+ cmd = [
258
+ "docker",
259
+ "build",
260
+ "--platform",
261
+ docker_platform,
262
+ "-t",
263
+ image_tag,
264
+ "-f",
265
+ str(dockerfile_path),
266
+ str(dockerfile_path.parent),
267
+ ]
268
+
269
+ result = subprocess.run(cmd)
270
+ if result.returncode != 0:
271
+ raise RuntimeError("Failed to build world runner image")
272
+
273
+ console.print(f"[green]✅ Built {image_tag}[/green]")
274
+ return image_tag
275
+
276
+
277
+ def _get_docker_platform(override: str | None = None) -> str:
278
+ """Get the appropriate Docker platform for the current system."""
279
+ if override:
280
+ return override
281
+
282
+ import platform as plat
283
+
284
+ system = plat.system()
285
+ machine = plat.machine().lower()
286
+
287
+ if system == "Darwin" and machine in ("arm64", "aarch64"):
288
+ return "linux/arm64"
289
+ elif system == "Linux" and machine in ("arm64", "aarch64"):
290
+ return "linux/arm64"
291
+ else:
292
+ return "linux/amd64"
293
+
294
+
295
+ def _get_docker_host_ip() -> str:
296
+ """Get the Docker host IP address accessible from containers."""
297
+ try:
298
+ result = subprocess.run(
299
+ ["docker", "network", "inspect", "bridge", "--format", "{{range .IPAM.Config}}{{.Gateway}}{{end}}"],
300
+ capture_output=True,
301
+ text=True,
302
+ )
303
+ if result.returncode == 0 and result.stdout.strip():
304
+ return result.stdout.strip()
305
+ except Exception:
306
+ pass
307
+ # Fallback to common Docker gateway IP
308
+ return "172.17.0.1"
309
+
310
+
311
+ def _build_agent_image(
312
+ agent_name: str,
313
+ agents_dir: Path,
314
+ platform_override: str | None = None,
315
+ ) -> bool:
316
+ """Build a local agent Docker image."""
317
+ agents_dir = agents_dir.expanduser().resolve()
318
+ agent_path = agents_dir / agent_name
319
+ dockerfile_path = agent_path / "Dockerfile"
320
+
321
+ if not dockerfile_path.exists():
322
+ logger.warning(f"No Dockerfile found for agent '{agent_name}' at {dockerfile_path}")
323
+ return False
324
+
325
+ image_tag = f"{agent_name}:latest"
326
+ docker_platform = _get_docker_platform(platform_override)
327
+
328
+ # Determine build context - check if we're in plato-client structure
329
+ plato_client_root = agents_dir.parent if agents_dir.name == "agents" else None
330
+
331
+ if plato_client_root and (plato_client_root / "python-sdk").exists():
332
+ build_context = str(plato_client_root)
333
+ target = "dev"
334
+ console.print(f"[blue]Building {image_tag} (dev mode from {build_context})...[/blue]")
335
+ else:
336
+ build_context = str(agent_path)
337
+ target = "prod"
338
+ console.print(f"[blue]Building {image_tag} (prod mode from {build_context})...[/blue]")
339
+
340
+ console.print(f"[dim]Platform: {docker_platform}[/dim]")
341
+
342
+ cmd = [
343
+ "docker",
344
+ "build",
345
+ "--platform",
346
+ docker_platform,
347
+ "--build-arg",
348
+ f"PLATFORM={docker_platform}",
349
+ "--target",
350
+ target,
351
+ "-t",
352
+ image_tag,
353
+ "-f",
354
+ str(dockerfile_path),
355
+ build_context,
356
+ ]
357
+
358
+ result = subprocess.run(cmd)
359
+ if result.returncode != 0:
360
+ console.print(f"[red]❌ Failed to build {image_tag}[/red]")
361
+ return False
362
+
363
+ console.print(f"[green]✅ Built {image_tag}[/green]")
364
+ return True
365
+
366
+
367
+ def _extract_agent_images_from_config(config_data: dict) -> list[str]:
368
+ """Extract local agent image names from config data."""
369
+ images = []
370
+
371
+ # Check agents section
372
+ agents = config_data.get("agents", {})
373
+ for agent_config in agents.values():
374
+ if isinstance(agent_config, dict):
375
+ image = agent_config.get("image", "")
376
+ # Only include local images (no registry prefix)
377
+ if image and "/" not in image.split(":")[0]:
378
+ name = image.split(":")[0]
379
+ if name not in images:
380
+ images.append(name)
381
+
382
+ # Also check direct coder/verifier fields
383
+ for field in ["coder", "verifier", "skill_runner"]:
384
+ agent_config = config_data.get(field, {})
385
+ if isinstance(agent_config, dict):
386
+ image = agent_config.get("image", "")
387
+ if image and "/" not in image.split(":")[0]:
388
+ name = image.split(":")[0]
389
+ if name not in images:
390
+ images.append(name)
391
+
392
+ return images
393
+
394
+
395
+ async def _create_chronos_session(
396
+ chronos_url: str,
397
+ api_key: str,
398
+ world_name: str,
399
+ world_config: dict,
400
+ plato_session_id: str | None = None,
401
+ ) -> dict:
402
+ """Create a session in Chronos."""
403
+ import httpx
404
+
405
+ url = f"{chronos_url.rstrip('/')}/api/sessions"
406
+
407
+ async with httpx.AsyncClient(timeout=30.0) as client:
408
+ response = await client.post(
409
+ url,
410
+ json={
411
+ "world_name": world_name,
412
+ "world_config": world_config,
413
+ "plato_session_id": plato_session_id,
414
+ },
415
+ headers={"x-api-key": api_key},
416
+ )
417
+ response.raise_for_status()
418
+ return response.json()
419
+
420
+
421
+ async def _close_chronos_session(
422
+ chronos_url: str,
423
+ api_key: str,
424
+ session_id: str,
425
+ ) -> None:
426
+ """Close a Chronos session."""
427
+ import httpx
428
+
429
+ url = f"{chronos_url.rstrip('/')}/api/sessions/{session_id}/close"
430
+
431
+ try:
432
+ async with httpx.AsyncClient(timeout=30.0) as client:
433
+ response = await client.post(url, headers={"x-api-key": api_key})
434
+ response.raise_for_status()
435
+ logger.info(f"Closed Chronos session: {session_id}")
436
+ except Exception as e:
437
+ logger.warning(f"Failed to close Chronos session: {e}")
438
+
439
+
440
+ async def _run_dev_impl(
441
+ world_dir: Path,
442
+ config_path: Path,
443
+ agents_dir: Path | None = None,
444
+ platform_override: str | None = None,
445
+ env_timeout: int = 7200,
446
+ ) -> None:
447
+ """Run a world locally in a Docker container.
448
+
449
+ This:
450
+ 1. Builds local agent images if --agents-dir is provided
451
+ 2. Creates Plato environments
452
+ 3. Creates Chronos session for OTel traces
453
+ 4. Runs the world in a Docker container with docker.sock mounted
454
+ """
455
+ from plato._generated.models import Envs
456
+ from plato.v2 import AsyncPlato
457
+ from plato.worlds.config import EnvConfig
458
+
459
+ # Get required env vars
460
+ chronos_url = os.environ.get("CHRONOS_URL", "https://chronos.plato.so")
461
+ api_key = os.environ.get("PLATO_API_KEY")
462
+
463
+ if not api_key:
464
+ raise ValueError("PLATO_API_KEY environment variable is required")
465
+
466
+ # Resolve paths
467
+ world_dir = world_dir.expanduser().resolve()
468
+ config_path = config_path.expanduser().resolve()
469
+
470
+ # Load config
471
+ with open(config_path) as f:
472
+ raw_config = json.load(f)
473
+
474
+ # Handle Chronos-style config format
475
+ if "world_config" in raw_config:
476
+ config_data = raw_config["world_config"].copy()
477
+ top_level_secrets = raw_config.get("secrets", {})
478
+ if top_level_secrets:
479
+ config_data.setdefault("secrets", {})
480
+ config_data["secrets"].update(top_level_secrets)
481
+ else:
482
+ config_data = raw_config.copy()
483
+
484
+ # Determine world name from config
485
+ world_package = raw_config.get("world_package", "")
486
+ if world_package.startswith("plato-world-"):
487
+ world_name = world_package[len("plato-world-") :]
488
+ else:
489
+ world_name = world_package or "unknown"
490
+
491
+ # Build local agent images if agents_dir is provided
492
+ if agents_dir:
493
+ agents_dir = agents_dir.expanduser().resolve()
494
+ agent_images = _extract_agent_images_from_config(config_data)
495
+ if agent_images:
496
+ console.print(f"[blue]Building agent images: {agent_images}[/blue]")
497
+ for agent_name in agent_images:
498
+ success = _build_agent_image(agent_name, agents_dir, platform_override)
499
+ if not success:
500
+ raise RuntimeError(f"Failed to build agent image: {agent_name}")
501
+
502
+ # Import world module to get config class for environment detection
503
+ # We need to dynamically load the world from world_dir
504
+ import sys
505
+
506
+ sys.path.insert(0, str(world_dir / "src"))
507
+
508
+ try:
509
+ # Try to import the world module
510
+
511
+ world_module_path = list((world_dir / "src").glob("*_world/*.py"))
512
+ if not world_module_path:
513
+ world_module_path = list((world_dir / "src").glob("*/__init__.py"))
514
+
515
+ env_configs: list[EnvConfig] = []
516
+
517
+ # Try to extract env configs from world config
518
+ if "envs" in config_data:
519
+ for env_cfg in config_data["envs"]:
520
+ env_configs.append(Envs.model_validate(env_cfg).root)
521
+ finally:
522
+ if str(world_dir / "src") in sys.path:
523
+ sys.path.remove(str(world_dir / "src"))
524
+
525
+ # Create Plato client and session
526
+ plato = AsyncPlato()
527
+ session = None
528
+ plato_session_id: str | None = None
529
+ chronos_session_id: str | None = None
530
+
531
+ try:
532
+ if env_configs:
533
+ console.print(f"[blue]Creating {len(env_configs)} Plato environments...[/blue]")
534
+ session = await plato.sessions.create(envs=env_configs, timeout=env_timeout)
535
+ plato_session_id = session.session_id
536
+ console.print(f"[green]✅ Created Plato session: {plato_session_id}[/green]")
537
+
538
+ # Add session to config
539
+ config_data["plato_session"] = session.dump()
540
+
541
+ # Create Chronos session
542
+ console.print("[blue]Creating Chronos session...[/blue]")
543
+ chronos_session = await _create_chronos_session(
544
+ chronos_url=chronos_url,
545
+ api_key=api_key,
546
+ world_name=world_name,
547
+ world_config=config_data,
548
+ plato_session_id=plato_session_id,
549
+ )
550
+ chronos_session_id = chronos_session["public_id"]
551
+ console.print(f"[green]✅ Created Chronos session: {chronos_session_id}[/green]")
552
+ console.print(f"[dim]View at: {chronos_url}/sessions/{chronos_session_id}[/dim]")
553
+
554
+ # Add session info to config
555
+ config_data["session_id"] = chronos_session_id
556
+ # Use otel_url from backend response (uses tunnel if available), or construct it
557
+ otel_url = chronos_session.get("otel_url") or f"{chronos_url.rstrip('/')}/api/otel"
558
+ # For Docker containers, replace localhost with Docker gateway IP
559
+ if "localhost" in otel_url or "127.0.0.1" in otel_url:
560
+ docker_host_ip = _get_docker_host_ip()
561
+ otel_url = otel_url.replace("localhost", docker_host_ip).replace("127.0.0.1", docker_host_ip)
562
+ config_data["otel_url"] = otel_url
563
+ config_data["upload_url"] = chronos_session.get("upload_url", "")
564
+
565
+ # Write updated config to temp file
566
+ with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f:
567
+ # Write in direct format (not Chronos format) for the world runner
568
+ json.dump(config_data, f)
569
+ container_config_path = f.name
570
+
571
+ # Create shared workspace volume for DIND compatibility
572
+ import uuid as uuid_mod
573
+
574
+ workspace_volume = f"plato-workspace-{uuid_mod.uuid4().hex[:8]}"
575
+ subprocess.run(
576
+ ["docker", "volume", "create", workspace_volume],
577
+ capture_output=True,
578
+ )
579
+ console.print(f"[blue]Created workspace volume: {workspace_volume}[/blue]")
580
+
581
+ try:
582
+ # Run world in Docker container
583
+ console.print("[blue]Starting world in Docker container...[/blue]")
584
+
585
+ docker_platform = _get_docker_platform(platform_override)
586
+
587
+ # Build world runner image if needed
588
+ world_runner_image = _build_world_runner_image(platform_override)
589
+
590
+ # Find python-sdk relative to world_dir (assumes plato-client structure)
591
+ # world_dir: plato-client/worlds/structured-execution
592
+ # python_sdk: plato-client/python-sdk
593
+ python_sdk_dir = world_dir.parent.parent / "python-sdk"
594
+
595
+ # For Docker containers, replace localhost with Docker gateway IP
596
+ docker_chronos_url = chronos_url
597
+ if "localhost" in docker_chronos_url or "127.0.0.1" in docker_chronos_url:
598
+ docker_host_ip = _get_docker_host_ip()
599
+ docker_chronos_url = docker_chronos_url.replace("localhost", docker_host_ip).replace(
600
+ "127.0.0.1", docker_host_ip
601
+ )
602
+
603
+ docker_cmd = [
604
+ "docker",
605
+ "run",
606
+ "--rm",
607
+ "--platform",
608
+ docker_platform,
609
+ "--privileged",
610
+ "-v",
611
+ "/var/run/docker.sock:/var/run/docker.sock",
612
+ "-v",
613
+ f"{world_dir}:/world:ro",
614
+ "-v",
615
+ f"{python_sdk_dir}:/python-sdk:ro", # Mount local SDK for dev
616
+ "-v",
617
+ f"{container_config_path}:/config.json:ro",
618
+ "-v",
619
+ f"{workspace_volume}:/tmp/workspace", # Shared workspace volume
620
+ "-e",
621
+ f"WORLD_NAME={world_name}",
622
+ "-e",
623
+ f"WORKSPACE_VOLUME={workspace_volume}", # Pass volume name for run_agent
624
+ "-e",
625
+ f"CHRONOS_URL={docker_chronos_url}",
626
+ "-e",
627
+ f"PLATO_API_KEY={api_key}",
628
+ "-e",
629
+ f"SESSION_ID={chronos_session_id}",
630
+ "-e",
631
+ f"OTEL_EXPORTER_OTLP_ENDPOINT={otel_url}",
632
+ "-e",
633
+ f"UPLOAD_URL={chronos_session.get('upload_url', '')}",
634
+ ]
635
+
636
+ # Add secrets as env vars
637
+ for key, value in config_data.get("secrets", {}).items():
638
+ docker_cmd.extend(["-e", f"{key.upper()}={value}"])
639
+
640
+ # Use world runner image
641
+ docker_cmd.append(world_runner_image)
642
+
643
+ console.print(f"[dim]Running: docker run ... {world_runner_image}[/dim]")
644
+
645
+ # Run and stream output
646
+ process = subprocess.Popen(
647
+ docker_cmd,
648
+ stdout=subprocess.PIPE,
649
+ stderr=subprocess.STDOUT,
650
+ text=True,
651
+ )
652
+
653
+ if process.stdout:
654
+ for line in process.stdout:
655
+ print(line, end="")
656
+
657
+ process.wait()
658
+
659
+ if process.returncode != 0:
660
+ raise RuntimeError(f"World execution failed with exit code {process.returncode}")
661
+
662
+ finally:
663
+ os.unlink(container_config_path)
664
+ # Clean up workspace volume
665
+ subprocess.run(
666
+ ["docker", "volume", "rm", "-f", workspace_volume],
667
+ capture_output=True,
668
+ )
669
+ console.print(f"[dim]Cleaned up workspace volume: {workspace_volume}[/dim]")
670
+
671
+ finally:
672
+ if session:
673
+ console.print("[blue]Closing Plato session...[/blue]")
674
+ await session.close()
675
+ await plato.close()
676
+
677
+ if chronos_session_id:
678
+ await _close_chronos_session(chronos_url, api_key, chronos_session_id)
679
+
680
+
681
+ @chronos_app.command()
682
+ def dev(
683
+ config: Annotated[
684
+ Path,
685
+ typer.Argument(help="Path to config JSON file", exists=True, readable=True),
686
+ ],
687
+ world_dir: Annotated[
688
+ Path,
689
+ typer.Option("--world-dir", "-w", help="Directory containing world source code"),
690
+ ],
691
+ agents_dir: Annotated[
692
+ Path | None,
693
+ typer.Option("--agents-dir", "-a", help="Directory containing agent source code"),
694
+ ] = None,
695
+ platform: Annotated[
696
+ str | None,
697
+ typer.Option("--platform", "-p", help="Docker platform (e.g., linux/amd64)"),
698
+ ] = None,
699
+ env_timeout: Annotated[
700
+ int,
701
+ typer.Option("--env-timeout", help="Timeout for environment creation (seconds)"),
702
+ ] = 7200,
703
+ ):
704
+ """
705
+ Run a world locally for development/debugging.
706
+
707
+ This runs the world in a Docker container with docker.sock mounted,
708
+ allowing the world to spawn agent containers.
709
+
710
+ \b
711
+ Config format (same as Chronos launch):
712
+ {
713
+ "world_package": "plato-world-structured-execution",
714
+ "world_config": {
715
+ "sim_name": "my-task",
716
+ "steps": [...],
717
+ "agents": {
718
+ "skill_runner": {
719
+ "image": "computer-use:latest",
720
+ "config": {"model_name": "..."}
721
+ }
722
+ }
723
+ },
724
+ "secrets": {
725
+ "gemini_api_key": "...",
726
+ "plato_api_key": "..."
727
+ }
728
+ }
729
+
730
+ Examples:
731
+ plato chronos dev config.json --world-dir ~/worlds/my-world
732
+ plato chronos dev config.json -w ~/worlds/my-world -a ~/agents
733
+ plato chronos dev config.json -w ~/worlds/my-world --platform linux/amd64
734
+ """
735
+ logging.basicConfig(
736
+ level=logging.INFO,
737
+ format="%(asctime)s [%(levelname)s] %(name)s: %(message)s",
738
+ datefmt="%H:%M:%S",
739
+ )
740
+
741
+ if not os.environ.get("PLATO_API_KEY"):
742
+ console.print("[red]❌ PLATO_API_KEY environment variable required[/red]")
743
+ raise typer.Exit(1)
744
+
745
+ try:
746
+ asyncio.run(_run_dev_impl(world_dir, config, agents_dir, platform, env_timeout))
747
+ except Exception as e:
748
+ console.print(f"[red]❌ Failed: {e}[/red]")
749
+ logger.exception("World execution failed")
750
+ raise typer.Exit(1)
plato/v1/cli/sandbox.py CHANGED
@@ -1644,6 +1644,8 @@ def sandbox_state_cmd(
1644
1644
  return True, False, None
1645
1645
  return False, False, None
1646
1646
 
1647
+ all_mutations = []
1648
+
1647
1649
  if session_id:
1648
1650
  if not json_output:
1649
1651
  console.print(f"[cyan]Getting state for session: {session_id}[/cyan]")
@@ -1651,6 +1653,7 @@ def sandbox_state_cmd(
1651
1653
  response = sessions_state.sync(
1652
1654
  client=client,
1653
1655
  session_id=session_id,
1656
+ merge_mutations=True,
1654
1657
  x_api_key=api_key,
1655
1658
  )
1656
1659
  if response and response.results:
@@ -1664,6 +1667,13 @@ def sandbox_state_cmd(
1664
1667
  has_error = has_error or e
1665
1668
  if msg:
1666
1669
  error_message = msg
1670
+ # Extract mutations from state
1671
+ if isinstance(result, dict) and "state" in result:
1672
+ state_data = result.get("state", {})
1673
+ if isinstance(state_data, dict):
1674
+ mutations = state_data.get("mutations", [])
1675
+ if mutations:
1676
+ all_mutations.extend(mutations)
1667
1677
  elif job_id:
1668
1678
  if not json_output:
1669
1679
  console.print(f"[cyan]Getting state for job: {job_id}[/cyan]")
@@ -1679,6 +1689,13 @@ def sandbox_state_cmd(
1679
1689
  has_mutations = m
1680
1690
  has_error = e
1681
1691
  error_message = msg
1692
+ # Extract mutations from state
1693
+ if isinstance(state_dict, dict) and "state" in state_dict:
1694
+ state_data = state_dict.get("state", {})
1695
+ if isinstance(state_data, dict):
1696
+ mutations = state_data.get("mutations", [])
1697
+ if mutations:
1698
+ all_mutations.extend(mutations)
1682
1699
  elif job_group_id:
1683
1700
  if not json_output:
1684
1701
  console.print(f"[cyan]Getting state for job_group: {job_group_id}[/cyan]")
@@ -1686,6 +1703,7 @@ def sandbox_state_cmd(
1686
1703
  response = sessions_state.sync(
1687
1704
  client=client,
1688
1705
  session_id=job_group_id,
1706
+ merge_mutations=True,
1689
1707
  x_api_key=api_key,
1690
1708
  )
1691
1709
  if response and response.results:
@@ -1699,6 +1717,13 @@ def sandbox_state_cmd(
1699
1717
  has_error = has_error or e
1700
1718
  if msg:
1701
1719
  error_message = msg
1720
+ # Extract mutations from state
1721
+ if isinstance(result, dict) and "state" in result:
1722
+ state_data = result.get("state", {})
1723
+ if isinstance(state_data, dict):
1724
+ mutations = state_data.get("mutations", [])
1725
+ if mutations:
1726
+ all_mutations.extend(mutations)
1702
1727
  else:
1703
1728
  console.print("[red]❌ .sandbox.yaml missing session_id, job_id, or job_group_id[/red]")
1704
1729
  raise typer.Exit(1)
@@ -1719,6 +1744,26 @@ def sandbox_state_cmd(
1719
1744
  elif state_dict:
1720
1745
  console.print("\n[bold]Environment State:[/bold]")
1721
1746
  console.print(json.dumps(state_dict, indent=2, default=str))
1747
+
1748
+ # Display mutations if any
1749
+ if all_mutations:
1750
+ console.print(f"\n[bold red]Mutations ({len(all_mutations)}):[/bold red]")
1751
+ # Group by table and action for summary
1752
+ from collections import defaultdict
1753
+
1754
+ table_ops: dict[str, dict[str, int]] = defaultdict(lambda: {"INSERT": 0, "UPDATE": 0, "DELETE": 0})
1755
+ for mutation in all_mutations:
1756
+ table = mutation.get("table_name", mutation.get("table", "unknown"))
1757
+ op = mutation.get("action", mutation.get("operation", "UNKNOWN")).upper()
1758
+ if op in table_ops[table]:
1759
+ table_ops[table][op] += 1
1760
+
1761
+ console.print("\n [dim]Table INSERT UPDATE DELETE[/dim]")
1762
+ console.print(" [dim]───────────────────────────────────────────────────────[/dim]")
1763
+ for table, ops in sorted(table_ops.items(), key=lambda x: sum(x[1].values()), reverse=True):
1764
+ console.print(f" {table:<30} {ops['INSERT']:>6} {ops['UPDATE']:>6} {ops['DELETE']:>6}")
1765
+ else:
1766
+ console.print("\n[green]No mutations recorded[/green]")
1722
1767
  else:
1723
1768
  console.print("[yellow]No state returned[/yellow]")
1724
1769
 
@@ -1835,7 +1880,8 @@ def sandbox_clear_audit(
1835
1880
  sql_cmd = f"CONTAINER=$(docker ps --format '{{{{.Names}}}}\\t{{{{.Image}}}}' | grep -i postgres | head -1 | cut -f1) && docker exec $CONTAINER psql -U {db_user} -d {db_database} -c \"{truncate_sql}\""
1836
1881
  elif db_type in ("mysql", "mariadb"):
1837
1882
  # Find the mysql/mariadb container and exec into it
1838
- sql_cmd = f"CONTAINER=$(docker ps --format '{{{{.Names}}}}\\t{{{{.Image}}}}' | grep -iE 'mysql|mariadb' | head -1 | cut -f1) && docker exec $CONTAINER mysql -u {db_user} -p'{db_password}' {db_database} -e 'SET FOREIGN_KEY_CHECKS=0; DELETE FROM audit_log; SET FOREIGN_KEY_CHECKS=1;'"
1883
+ # Use mariadb client (mysql is a symlink or may not exist in newer mariadb images)
1884
+ sql_cmd = f"CONTAINER=$(docker ps --format '{{{{.Names}}}}\\t{{{{.Image}}}}' | grep -iE 'mysql|mariadb' | head -1 | cut -f1) && docker exec $CONTAINER mariadb -u {db_user} -p'{db_password}' {db_database} -e 'SET FOREIGN_KEY_CHECKS=0; DELETE FROM audit_log; SET FOREIGN_KEY_CHECKS=1;'"
1839
1885
  else:
1840
1886
  if not json_output:
1841
1887
  console.print(f"[yellow]⚠ Unsupported db_type '{db_type}' for listener '{name}'[/yellow]")
@@ -0,0 +1,27 @@
1
+ # World runner image for plato chronos dev
2
+ # Includes git, docker CLI, and Python dependencies
3
+
4
+ FROM python:3.12-slim
5
+
6
+ # Install git and docker CLI
7
+ RUN apt-get update && apt-get install -y --no-install-recommends \
8
+ git \
9
+ curl \
10
+ ca-certificates \
11
+ && curl -fsSL https://get.docker.com -o get-docker.sh \
12
+ && sh get-docker.sh \
13
+ && rm get-docker.sh \
14
+ && apt-get clean \
15
+ && rm -rf /var/lib/apt/lists/*
16
+
17
+ # Install uv for fast package installation
18
+ RUN pip install --no-cache-dir uv
19
+
20
+ WORKDIR /world
21
+
22
+ # Entry point expects:
23
+ # - /world mounted with world source
24
+ # - /python-sdk mounted with plato SDK source (optional, for dev)
25
+ # - /config.json mounted with config
26
+ # - WORLD_NAME env var set
27
+ CMD ["bash", "-c", "if [ -d /python-sdk ]; then uv pip install --system /python-sdk; fi && uv pip install --system . 2>/dev/null || pip install -q . && plato-world-runner run --world $WORLD_NAME --config /config.json"]