smartify-ai 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. smartify/__init__.py +3 -0
  2. smartify/agents/__init__.py +0 -0
  3. smartify/agents/adapters/__init__.py +13 -0
  4. smartify/agents/adapters/anthropic.py +253 -0
  5. smartify/agents/adapters/openai.py +289 -0
  6. smartify/api/__init__.py +26 -0
  7. smartify/api/auth.py +352 -0
  8. smartify/api/errors.py +380 -0
  9. smartify/api/events.py +345 -0
  10. smartify/api/server.py +992 -0
  11. smartify/cli/__init__.py +1 -0
  12. smartify/cli/main.py +430 -0
  13. smartify/engine/__init__.py +64 -0
  14. smartify/engine/approval.py +479 -0
  15. smartify/engine/orchestrator.py +1365 -0
  16. smartify/engine/scheduler.py +380 -0
  17. smartify/engine/spark.py +294 -0
  18. smartify/guardrails/__init__.py +22 -0
  19. smartify/guardrails/breakers.py +409 -0
  20. smartify/models/__init__.py +61 -0
  21. smartify/models/grid.py +625 -0
  22. smartify/notifications/__init__.py +22 -0
  23. smartify/notifications/webhook.py +556 -0
  24. smartify/state/__init__.py +46 -0
  25. smartify/state/checkpoint.py +558 -0
  26. smartify/state/resume.py +301 -0
  27. smartify/state/store.py +370 -0
  28. smartify/tools/__init__.py +17 -0
  29. smartify/tools/base.py +196 -0
  30. smartify/tools/builtin/__init__.py +79 -0
  31. smartify/tools/builtin/file.py +464 -0
  32. smartify/tools/builtin/http.py +195 -0
  33. smartify/tools/builtin/shell.py +137 -0
  34. smartify/tools/mcp/__init__.py +33 -0
  35. smartify/tools/mcp/adapter.py +157 -0
  36. smartify/tools/mcp/client.py +334 -0
  37. smartify/tools/mcp/registry.py +130 -0
  38. smartify/validator/__init__.py +0 -0
  39. smartify/validator/validate.py +271 -0
  40. smartify/workspace/__init__.py +5 -0
  41. smartify/workspace/manager.py +248 -0
  42. smartify_ai-0.1.0.dist-info/METADATA +201 -0
  43. smartify_ai-0.1.0.dist-info/RECORD +46 -0
  44. smartify_ai-0.1.0.dist-info/WHEEL +4 -0
  45. smartify_ai-0.1.0.dist-info/entry_points.txt +2 -0
  46. smartify_ai-0.1.0.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,130 @@
1
+ """MCP registry integration for Smartify.
2
+
3
+ Functions for registering MCP server tools into the Smartify ToolRegistry.
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ import logging
9
+ from typing import List, Optional, TYPE_CHECKING
10
+
11
+ if TYPE_CHECKING:
12
+ from smartify.tools.base import ToolRegistry
13
+ from smartify.tools.mcp.client import McpClient, McpServerConfig
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ async def register_mcp_server(
19
+ registry: "ToolRegistry",
20
+ config: "McpServerConfig",
21
+ client: Optional["McpClient"] = None,
22
+ ) -> List[str]:
23
+ """Connect to an MCP server and register its tools in the registry.
24
+
25
+ This function:
26
+ 1. Creates an MCP client (or uses provided one)
27
+ 2. Connects to the MCP server
28
+ 3. Lists available tools
29
+ 4. Creates McpToolWrapper for each tool
30
+ 5. Registers wrappers in the ToolRegistry
31
+
32
+ Args:
33
+ registry: The ToolRegistry to register tools into
34
+ config: MCP server configuration
35
+ client: Optional pre-created client (will connect if not connected)
36
+
37
+ Returns:
38
+ List of registered tool names (with prefix if configured)
39
+
40
+ Raises:
41
+ ImportError: If MCP SDK is not installed
42
+ ConnectionError: If connection to MCP server fails
43
+
44
+ Example:
45
+ from smartify.tools import ToolRegistry
46
+ from smartify.tools.mcp import McpServerConfig, McpTransport, register_mcp_server
47
+
48
+ registry = ToolRegistry()
49
+
50
+ config = McpServerConfig(
51
+ id="filesystem",
52
+ transport=McpTransport.STDIO,
53
+ command="npx",
54
+ args=["-y", "@modelcontextprotocol/server-filesystem", "/tmp"],
55
+ prefix="fs",
56
+ )
57
+
58
+ tool_names = await register_mcp_server(registry, config)
59
+ # -> ["fs_read_file", "fs_write_file", "fs_list_directory", ...]
60
+ """
61
+ from smartify.tools.mcp.client import McpClient
62
+ from smartify.tools.mcp.adapter import McpToolWrapper
63
+
64
+ # Create client if not provided
65
+ if client is None:
66
+ client = McpClient(config)
67
+
68
+ # Connect if not already connected
69
+ if not client.is_connected:
70
+ await client.connect()
71
+
72
+ # List tools from the server
73
+ tool_defs = await client.list_tools()
74
+
75
+ if not tool_defs:
76
+ logger.warning(f"MCP server '{config.id}' has no tools")
77
+ return []
78
+
79
+ # Register each tool
80
+ registered_names = []
81
+ for tool_def in tool_defs:
82
+ wrapper = McpToolWrapper(
83
+ client=client,
84
+ tool_def=tool_def,
85
+ prefix=config.prefix,
86
+ )
87
+ registry.register(wrapper)
88
+ registered_names.append(wrapper.name)
89
+
90
+ logger.info(
91
+ f"Registered {len(registered_names)} tools from MCP server '{config.id}': "
92
+ f"{registered_names}"
93
+ )
94
+
95
+ return registered_names
96
+
97
+
98
+ async def register_mcp_servers(
99
+ registry: "ToolRegistry",
100
+ configs: List["McpServerConfig"],
101
+ ) -> dict[str, List[str]]:
102
+ """Register multiple MCP servers into a registry.
103
+
104
+ Args:
105
+ registry: The ToolRegistry to register tools into
106
+ configs: List of MCP server configurations
107
+
108
+ Returns:
109
+ Dict mapping server ID to list of registered tool names
110
+
111
+ Example:
112
+ configs = [
113
+ McpServerConfig(id="fs", transport=McpTransport.STDIO, ...),
114
+ McpServerConfig(id="git", transport=McpTransport.STDIO, ...),
115
+ ]
116
+
117
+ results = await register_mcp_servers(registry, configs)
118
+ # -> {"fs": ["fs_read_file", ...], "git": ["git_status", ...]}
119
+ """
120
+ results = {}
121
+
122
+ for config in configs:
123
+ try:
124
+ tool_names = await register_mcp_server(registry, config)
125
+ results[config.id] = tool_names
126
+ except Exception as e:
127
+ logger.error(f"Failed to register MCP server '{config.id}': {e}")
128
+ results[config.id] = []
129
+
130
+ return results
File without changes
@@ -0,0 +1,271 @@
1
+ """Grid YAML validation."""
2
+
3
+ from dataclasses import dataclass, field
4
+ from pathlib import Path
5
+ from typing import List, Optional, Set
6
+ import yaml
7
+ from pydantic import ValidationError
8
+
9
+ from smartify.models.grid import GridSpec, NodeKind
10
+
11
+
12
+ @dataclass
13
+ class ValidationResult:
14
+ """Result of grid validation."""
15
+ is_valid: bool
16
+ errors: List[str] = field(default_factory=list)
17
+ warnings: List[str] = field(default_factory=list)
18
+ grid: Optional[GridSpec] = None
19
+
20
+
21
+ def validate_grid_file(path: Path) -> ValidationResult:
22
+ """Validate a Grid YAML file."""
23
+ errors: List[str] = []
24
+ warnings: List[str] = []
25
+
26
+ # Read file
27
+ try:
28
+ with open(path) as f:
29
+ data = yaml.safe_load(f)
30
+ except yaml.YAMLError as e:
31
+ return ValidationResult(
32
+ is_valid=False,
33
+ errors=[f"YAML parse error: {e}"],
34
+ )
35
+ except Exception as e:
36
+ return ValidationResult(
37
+ is_valid=False,
38
+ errors=[f"Failed to read file: {e}"],
39
+ )
40
+
41
+ if not data:
42
+ return ValidationResult(
43
+ is_valid=False,
44
+ errors=["File is empty or contains no valid YAML"],
45
+ )
46
+
47
+ # Parse with Pydantic
48
+ try:
49
+ grid = GridSpec.model_validate(data)
50
+ except ValidationError as e:
51
+ for error in e.errors():
52
+ loc = ".".join(str(l) for l in error["loc"])
53
+ msg = error["msg"]
54
+ errors.append(f"{loc}: {msg}")
55
+ return ValidationResult(
56
+ is_valid=False,
57
+ errors=errors,
58
+ )
59
+
60
+ # Semantic validation
61
+ semantic_errors, semantic_warnings = validate_semantics(grid)
62
+ errors.extend(semantic_errors)
63
+ warnings.extend(semantic_warnings)
64
+
65
+ return ValidationResult(
66
+ is_valid=len(errors) == 0,
67
+ errors=errors,
68
+ warnings=warnings,
69
+ grid=grid if len(errors) == 0 else None,
70
+ )
71
+
72
+
73
+ def validate_semantics(grid: GridSpec) -> tuple[List[str], List[str]]:
74
+ """Perform semantic validation on a parsed grid."""
75
+ errors: List[str] = []
76
+ warnings: List[str] = []
77
+
78
+ nodes = grid.topology.nodes
79
+ node_ids: Set[str] = set()
80
+ node_map = {}
81
+
82
+ # Build node map and check for duplicate IDs
83
+ for node in nodes:
84
+ if node.id in node_ids:
85
+ errors.append(f"Duplicate node ID: {node.id}")
86
+ node_ids.add(node.id)
87
+ node_map[node.id] = node
88
+
89
+ # Count controllers
90
+ controllers = [n for n in nodes if n.kind == NodeKind.CONTROLLER]
91
+ if len(controllers) == 0:
92
+ errors.append("Grid must have exactly one controller node")
93
+ elif len(controllers) > 1:
94
+ errors.append(f"Grid must have exactly one controller node, found {len(controllers)}")
95
+
96
+ # Validate parent references
97
+ for node in nodes:
98
+ if node.parent:
99
+ if node.parent not in node_ids:
100
+ errors.append(f"Node '{node.id}' references non-existent parent '{node.parent}'")
101
+ elif node.parent == node.id:
102
+ errors.append(f"Node '{node.id}' cannot be its own parent")
103
+
104
+ # Validate controller has no parent
105
+ for node in nodes:
106
+ if node.kind == NodeKind.CONTROLLER and node.parent:
107
+ errors.append(f"Controller node '{node.id}' should not have a parent")
108
+
109
+ # Validate relay parents (must be controller or another relay)
110
+ for node in nodes:
111
+ if node.kind == NodeKind.RELAY and node.parent:
112
+ parent = node_map.get(node.parent)
113
+ if parent and parent.kind not in (NodeKind.CONTROLLER, NodeKind.RELAY):
114
+ warnings.append(
115
+ f"Relay '{node.id}' has parent '{node.parent}' of kind '{parent.kind}', "
116
+ "typically relays have controller or relay parents"
117
+ )
118
+
119
+ # Validate substation parents (must be relay or controller)
120
+ for node in nodes:
121
+ if node.kind == NodeKind.SUBSTATION and node.parent:
122
+ parent = node_map.get(node.parent)
123
+ if parent and parent.kind not in (NodeKind.CONTROLLER, NodeKind.RELAY):
124
+ warnings.append(
125
+ f"Substation '{node.id}' has parent '{node.parent}' of kind '{parent.kind}', "
126
+ "typically substations have relay or controller parents"
127
+ )
128
+
129
+ # Validate runAfter references
130
+ for node in nodes:
131
+ if node.runAfter:
132
+ for dep in node.runAfter:
133
+ if dep not in node_ids:
134
+ errors.append(f"Node '{node.id}' runAfter references non-existent node '{dep}'")
135
+ if dep == node.id:
136
+ errors.append(f"Node '{node.id}' cannot depend on itself in runAfter")
137
+
138
+ # Validate edges
139
+ if grid.topology.edges:
140
+ for edge in grid.topology.edges:
141
+ from_id = edge.from_
142
+ if from_id not in node_ids:
143
+ errors.append(f"Edge from '{from_id}' references non-existent node")
144
+
145
+ targets = edge.to if isinstance(edge.to, list) else [edge.to]
146
+ for target in targets:
147
+ if target not in node_ids:
148
+ errors.append(f"Edge to '{target}' references non-existent node")
149
+
150
+ # Check for cycles (simplified check)
151
+ # TODO: Implement full cycle detection
152
+
153
+ # Validate agent references
154
+ if grid.agents:
155
+ agent_names = set(grid.agents.keys())
156
+ for node in nodes:
157
+ if node.agent and node.agent not in agent_names:
158
+ errors.append(f"Node '{node.id}' references non-existent agent '{node.agent}'")
159
+
160
+ # Validate foreach nodes have required config
161
+ for node in nodes:
162
+ if node.kind == NodeKind.FOREACH:
163
+ if not node.foreach:
164
+ errors.append(f"Foreach node '{node.id}' missing foreach configuration")
165
+ elif not node.foreach.body.get("to"):
166
+ errors.append(f"Foreach node '{node.id}' missing body.to target")
167
+
168
+ # Validate aggregate nodes have required config
169
+ for node in nodes:
170
+ if node.kind == NodeKind.AGGREGATE:
171
+ if not node.aggregate:
172
+ errors.append(f"Aggregate node '{node.id}' missing aggregate configuration")
173
+
174
+ # Warn if no guardrails defined
175
+ if not grid.guardrails:
176
+ warnings.append("No guardrails defined - grid will run without limits")
177
+
178
+ # Warn about explicit execution mode without edges
179
+ if grid.topology.executionMode.value == "explicit" and not grid.topology.edges:
180
+ warnings.append(
181
+ "executionMode is 'explicit' but no edges defined - "
182
+ "consider using 'parent' mode or adding edges"
183
+ )
184
+
185
+ return errors, warnings
186
+
187
+
188
+ def validate_grid(data: dict) -> List[str]:
189
+ """Validate a Grid specification dict.
190
+
191
+ Returns list of error messages (empty if valid).
192
+ """
193
+ errors: List[str] = []
194
+
195
+ if not data:
196
+ return ["Empty grid specification"]
197
+
198
+ # Pre-validation: check required top-level fields
199
+ if "apiVersion" not in data:
200
+ errors.append("Missing required field: apiVersion")
201
+ if "metadata" not in data:
202
+ errors.append("Missing required field: metadata")
203
+ elif isinstance(data.get("metadata"), dict):
204
+ if "id" not in data["metadata"]:
205
+ errors.append("Missing required field: metadata.id")
206
+ if "topology" not in data:
207
+ errors.append("Missing required field: topology")
208
+ elif isinstance(data.get("topology"), dict):
209
+ if "nodes" not in data["topology"]:
210
+ errors.append("Missing required field: topology.nodes")
211
+
212
+ # If pre-validation fails, return early
213
+ if errors:
214
+ return errors
215
+
216
+ try:
217
+ grid = GridSpec.model_validate(data)
218
+ except ValidationError as e:
219
+ for error in e.errors():
220
+ loc = ".".join(str(l) for l in error["loc"])
221
+ msg = error["msg"]
222
+ errors.append(f"{loc}: {msg}")
223
+ return errors
224
+
225
+ semantic_errors, _ = validate_semantics(grid)
226
+ errors.extend(semantic_errors)
227
+
228
+ return errors
229
+
230
+
231
+ def validate_yaml_string(yaml_str: str) -> ValidationResult:
232
+ """Validate a Grid YAML string."""
233
+ errors: List[str] = []
234
+ warnings: List[str] = []
235
+
236
+ try:
237
+ data = yaml.safe_load(yaml_str)
238
+ except yaml.YAMLError as e:
239
+ return ValidationResult(
240
+ is_valid=False,
241
+ errors=[f"YAML parse error: {e}"],
242
+ )
243
+
244
+ if not data:
245
+ return ValidationResult(
246
+ is_valid=False,
247
+ errors=["Empty or invalid YAML"],
248
+ )
249
+
250
+ try:
251
+ grid = GridSpec.model_validate(data)
252
+ except ValidationError as e:
253
+ for error in e.errors():
254
+ loc = ".".join(str(l) for l in error["loc"])
255
+ msg = error["msg"]
256
+ errors.append(f"{loc}: {msg}")
257
+ return ValidationResult(
258
+ is_valid=False,
259
+ errors=errors,
260
+ )
261
+
262
+ semantic_errors, semantic_warnings = validate_semantics(grid)
263
+ errors.extend(semantic_errors)
264
+ warnings.extend(semantic_warnings)
265
+
266
+ return ValidationResult(
267
+ is_valid=len(errors) == 0,
268
+ errors=errors,
269
+ warnings=warnings,
270
+ grid=grid if len(errors) == 0 else None,
271
+ )
@@ -0,0 +1,5 @@
1
+ """Workspace management for isolated grid execution."""
2
+
3
+ from smartify.workspace.manager import WorkspaceManager, Workspace
4
+
5
+ __all__ = ["WorkspaceManager", "Workspace"]
@@ -0,0 +1,248 @@
1
+ """Workspace management for isolated grid execution.
2
+
3
+ Each grid run gets its own isolated workspace directory for:
4
+ - File operations (sandboxed)
5
+ - Temporary files
6
+ - Output artifacts
7
+ """
8
+
9
+ import logging
10
+ import shutil
11
+ import tempfile
12
+ from pathlib import Path
13
+ from typing import Optional
14
+ from datetime import datetime
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ class WorkspaceManager:
20
+ """Manages isolated workspaces for grid runs.
21
+
22
+ Each run gets a unique directory under the base workspace path.
23
+ Files created during execution are isolated to that directory.
24
+ Cleanup happens automatically on completion (configurable).
25
+
26
+ Usage:
27
+ manager = WorkspaceManager(base_path="/tmp/smartify")
28
+
29
+ # Create workspace for a run
30
+ ws = manager.create("run-123", "my-grid")
31
+
32
+ # Use workspace
33
+ print(ws.path) # /tmp/smartify/my-grid/run-123
34
+ ws.write_file("output.txt", "Hello!")
35
+
36
+ # Cleanup when done
37
+ manager.cleanup("run-123")
38
+ """
39
+
40
+ def __init__(
41
+ self,
42
+ base_path: Optional[str] = None,
43
+ auto_cleanup: bool = True,
44
+ preserve_on_error: bool = True,
45
+ ):
46
+ """Initialize workspace manager.
47
+
48
+ Args:
49
+ base_path: Base directory for workspaces. Defaults to temp dir.
50
+ auto_cleanup: Automatically cleanup completed runs
51
+ preserve_on_error: Keep workspace if run failed (for debugging)
52
+ """
53
+ if base_path:
54
+ self.base_path = Path(base_path)
55
+ self.base_path.mkdir(parents=True, exist_ok=True)
56
+ else:
57
+ self.base_path = Path(tempfile.gettempdir()) / "smartify-workspaces"
58
+ self.base_path.mkdir(parents=True, exist_ok=True)
59
+
60
+ self.auto_cleanup = auto_cleanup
61
+ self.preserve_on_error = preserve_on_error
62
+ self._workspaces: dict[str, "Workspace"] = {}
63
+
64
+ logger.info(f"WorkspaceManager initialized at {self.base_path}")
65
+
66
+ def create(self, run_id: str, grid_id: str) -> "Workspace":
67
+ """Create a new isolated workspace for a run.
68
+
69
+ Args:
70
+ run_id: Unique run identifier
71
+ grid_id: Grid identifier (for organization)
72
+
73
+ Returns:
74
+ Workspace instance
75
+ """
76
+ # Create workspace directory: base/grid_id/run_id
77
+ workspace_path = self.base_path / grid_id / run_id
78
+ workspace_path.mkdir(parents=True, exist_ok=True)
79
+
80
+ workspace = Workspace(
81
+ run_id=run_id,
82
+ grid_id=grid_id,
83
+ path=workspace_path,
84
+ )
85
+
86
+ self._workspaces[run_id] = workspace
87
+ logger.info(f"Created workspace for run {run_id}: {workspace_path}")
88
+
89
+ return workspace
90
+
91
+ def get(self, run_id: str) -> Optional["Workspace"]:
92
+ """Get workspace for a run."""
93
+ return self._workspaces.get(run_id)
94
+
95
+ def cleanup(self, run_id: str, force: bool = False) -> bool:
96
+ """Cleanup workspace for a run.
97
+
98
+ Args:
99
+ run_id: Run identifier
100
+ force: Force cleanup even if preserve_on_error is set
101
+
102
+ Returns:
103
+ True if cleaned up, False if not found or preserved
104
+ """
105
+ workspace = self._workspaces.get(run_id)
106
+ if not workspace:
107
+ return False
108
+
109
+ if workspace.has_error and self.preserve_on_error and not force:
110
+ logger.info(f"Preserving workspace for failed run {run_id}")
111
+ return False
112
+
113
+ try:
114
+ if workspace.path.exists():
115
+ shutil.rmtree(workspace.path)
116
+ logger.info(f"Cleaned up workspace for run {run_id}")
117
+
118
+ del self._workspaces[run_id]
119
+ return True
120
+
121
+ except Exception as e:
122
+ logger.error(f"Failed to cleanup workspace {run_id}: {e}")
123
+ return False
124
+
125
+ def list_workspaces(self) -> list[str]:
126
+ """List all active workspace run IDs."""
127
+ return list(self._workspaces.keys())
128
+
129
+ def cleanup_all(self, force: bool = False) -> int:
130
+ """Cleanup all workspaces.
131
+
132
+ Args:
133
+ force: Force cleanup even for failed runs
134
+
135
+ Returns:
136
+ Number of workspaces cleaned up
137
+ """
138
+ run_ids = list(self._workspaces.keys())
139
+ count = 0
140
+ for run_id in run_ids:
141
+ if self.cleanup(run_id, force=force):
142
+ count += 1
143
+ return count
144
+
145
+
146
+ class Workspace:
147
+ """An isolated workspace for a single grid run.
148
+
149
+ Provides sandboxed file operations within the workspace directory.
150
+ """
151
+
152
+ def __init__(self, run_id: str, grid_id: str, path: Path):
153
+ self.run_id = run_id
154
+ self.grid_id = grid_id
155
+ self.path = path
156
+ self.created_at = datetime.now()
157
+ self.has_error = False
158
+
159
+ # Create standard subdirectories
160
+ (self.path / "output").mkdir(exist_ok=True)
161
+ (self.path / "temp").mkdir(exist_ok=True)
162
+
163
+ @property
164
+ def output_dir(self) -> Path:
165
+ """Directory for output artifacts."""
166
+ return self.path / "output"
167
+
168
+ @property
169
+ def temp_dir(self) -> Path:
170
+ """Directory for temporary files."""
171
+ return self.path / "temp"
172
+
173
+ def resolve(self, relative_path: str) -> Path:
174
+ """Resolve a relative path within the workspace.
175
+
176
+ Raises ValueError if path escapes workspace.
177
+ """
178
+ resolved = (self.path / relative_path).resolve()
179
+
180
+ # Security check: ensure path is within workspace
181
+ # Note: resolve both to handle macOS /var -> /private/var symlinks
182
+ try:
183
+ resolved.relative_to(self.path.resolve())
184
+ except ValueError:
185
+ raise ValueError(f"Path escapes workspace: {relative_path}")
186
+
187
+ return resolved
188
+
189
+ def write_file(self, path: str, content: str, encoding: str = "utf-8") -> Path:
190
+ """Write a file within the workspace.
191
+
192
+ Args:
193
+ path: Relative path within workspace
194
+ content: File content
195
+ encoding: Text encoding
196
+
197
+ Returns:
198
+ Absolute path to written file
199
+ """
200
+ resolved = self.resolve(path)
201
+ resolved.parent.mkdir(parents=True, exist_ok=True)
202
+ resolved.write_text(content, encoding=encoding)
203
+ return resolved
204
+
205
+ def read_file(self, path: str, encoding: str = "utf-8") -> str:
206
+ """Read a file from the workspace.
207
+
208
+ Args:
209
+ path: Relative path within workspace
210
+ encoding: Text encoding
211
+
212
+ Returns:
213
+ File content
214
+ """
215
+ resolved = self.resolve(path)
216
+ return resolved.read_text(encoding=encoding)
217
+
218
+ def exists(self, path: str) -> bool:
219
+ """Check if a path exists within the workspace."""
220
+ try:
221
+ resolved = self.resolve(path)
222
+ return resolved.exists()
223
+ except ValueError:
224
+ return False
225
+
226
+ def list_files(self, subdir: str = ".") -> list[Path]:
227
+ """List files in a subdirectory."""
228
+ resolved = self.resolve(subdir)
229
+ if not resolved.is_dir():
230
+ return []
231
+ return list(resolved.iterdir())
232
+
233
+ def mark_error(self) -> None:
234
+ """Mark workspace as having an error (for preservation)."""
235
+ self.has_error = True
236
+
237
+ def cleanup_temp(self) -> None:
238
+ """Cleanup temporary files."""
239
+ temp_dir = self.temp_dir
240
+ if temp_dir.exists():
241
+ for item in temp_dir.iterdir():
242
+ if item.is_file():
243
+ item.unlink()
244
+ elif item.is_dir():
245
+ shutil.rmtree(item)
246
+
247
+ def __repr__(self) -> str:
248
+ return f"Workspace(run_id={self.run_id!r}, path={self.path})"