ai-computer-client 0.3.2__tar.gz → 0.3.4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ai_computer_client-0.3.2 → ai_computer_client-0.3.4}/PKG-INFO +28 -5
- {ai_computer_client-0.3.2 → ai_computer_client-0.3.4}/README.md +24 -4
- ai_computer_client-0.3.4/ai_computer/__init__.py +14 -0
- ai_computer_client-0.3.4/ai_computer/client.py +400 -0
- ai_computer_client-0.3.4/ai_computer/models.py +45 -0
- ai_computer_client-0.3.4/ai_computer/submodules/__init__.py +5 -0
- ai_computer_client-0.3.4/ai_computer/submodules/base.py +81 -0
- ai_computer_client-0.3.4/ai_computer/submodules/code.py +295 -0
- ai_computer_client-0.3.4/ai_computer/submodules/filesystem.py +438 -0
- ai_computer_client-0.3.4/ai_computer/submodules/shell.py +52 -0
- {ai_computer_client-0.3.2 → ai_computer_client-0.3.4}/pyproject.toml +8 -2
- ai_computer_client-0.3.2/ai_computer/__init__.py +0 -4
- ai_computer_client-0.3.2/ai_computer/client.py +0 -761
- {ai_computer_client-0.3.2 → ai_computer_client-0.3.4}/.gitignore +0 -0
- {ai_computer_client-0.3.2 → ai_computer_client-0.3.4}/LICENSE +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: ai-computer-client
|
3
|
-
Version: 0.3.
|
3
|
+
Version: 0.3.4
|
4
4
|
Summary: Python client for interacting with the AI Computer service
|
5
5
|
Project-URL: Homepage, https://github.com/ColeMurray/ai-computer-client-python
|
6
6
|
Project-URL: Documentation, https://github.com/ColeMurray/ai-computer-client-python#readme
|
@@ -23,6 +23,9 @@ Provides-Extra: dev
|
|
23
23
|
Requires-Dist: pytest-asyncio>=0.21.0; extra == 'dev'
|
24
24
|
Requires-Dist: pytest-cov>=4.0.0; extra == 'dev'
|
25
25
|
Requires-Dist: pytest>=7.0.0; extra == 'dev'
|
26
|
+
Provides-Extra: integration
|
27
|
+
Requires-Dist: pytest-asyncio>=0.21.0; extra == 'integration'
|
28
|
+
Requires-Dist: pytest>=7.0.0; extra == 'integration'
|
26
29
|
Description-Content-Type: text/markdown
|
27
30
|
|
28
31
|
# AI Computer Python Client
|
@@ -201,14 +204,34 @@ class StreamEvent:
|
|
201
204
|
|
202
205
|
### Running Tests
|
203
206
|
|
204
|
-
|
205
|
-
# Install development dependencies
|
206
|
-
pip install -e ".[dev]"
|
207
|
+
To run the unit tests:
|
207
208
|
|
208
|
-
|
209
|
+
```bash
|
209
210
|
pytest
|
210
211
|
```
|
211
212
|
|
213
|
+
### Running Integration Tests
|
214
|
+
|
215
|
+
We have a comprehensive suite of integration tests that validate the client against the live API. These tests are automatically run as part of our CI/CD pipeline before each release.
|
216
|
+
|
217
|
+
To run the integration tests locally:
|
218
|
+
|
219
|
+
1. Set the required environment variables:
|
220
|
+
|
221
|
+
```bash
|
222
|
+
export AI_COMPUTER_API_KEY="your_api_key_here"
|
223
|
+
# Optional: Use a specific sandbox ID (if not provided, a new one will be created)
|
224
|
+
export AI_COMPUTER_SANDBOX_ID="optional_sandbox_id"
|
225
|
+
```
|
226
|
+
|
227
|
+
2. Run the tests:
|
228
|
+
|
229
|
+
```bash
|
230
|
+
python -m integration_tests.test_integration
|
231
|
+
```
|
232
|
+
|
233
|
+
For more details, see the [Integration Tests README](integration_tests/README.md).
|
234
|
+
|
212
235
|
### Contributing
|
213
236
|
|
214
237
|
1. Fork the repository
|
@@ -174,14 +174,34 @@ class StreamEvent:
|
|
174
174
|
|
175
175
|
### Running Tests
|
176
176
|
|
177
|
-
|
178
|
-
# Install development dependencies
|
179
|
-
pip install -e ".[dev]"
|
177
|
+
To run the unit tests:
|
180
178
|
|
181
|
-
|
179
|
+
```bash
|
182
180
|
pytest
|
183
181
|
```
|
184
182
|
|
183
|
+
### Running Integration Tests
|
184
|
+
|
185
|
+
We have a comprehensive suite of integration tests that validate the client against the live API. These tests are automatically run as part of our CI/CD pipeline before each release.
|
186
|
+
|
187
|
+
To run the integration tests locally:
|
188
|
+
|
189
|
+
1. Set the required environment variables:
|
190
|
+
|
191
|
+
```bash
|
192
|
+
export AI_COMPUTER_API_KEY="your_api_key_here"
|
193
|
+
# Optional: Use a specific sandbox ID (if not provided, a new one will be created)
|
194
|
+
export AI_COMPUTER_SANDBOX_ID="optional_sandbox_id"
|
195
|
+
```
|
196
|
+
|
197
|
+
2. Run the tests:
|
198
|
+
|
199
|
+
```bash
|
200
|
+
python -m integration_tests.test_integration
|
201
|
+
```
|
202
|
+
|
203
|
+
For more details, see the [Integration Tests README](integration_tests/README.md).
|
204
|
+
|
185
205
|
### Contributing
|
186
206
|
|
187
207
|
1. Fork the repository
|
@@ -0,0 +1,14 @@
|
|
1
|
+
from .client import SandboxClient
|
2
|
+
from .models import SandboxResponse, StreamEvent, FileOperationResponse
|
3
|
+
from .submodules import FileSystemModule, ShellModule, CodeModule
|
4
|
+
|
5
|
+
__version__ = "0.3.3"
|
6
|
+
__all__ = [
|
7
|
+
"SandboxClient",
|
8
|
+
"SandboxResponse",
|
9
|
+
"StreamEvent",
|
10
|
+
"FileOperationResponse",
|
11
|
+
"FileSystemModule",
|
12
|
+
"ShellModule",
|
13
|
+
"CodeModule"
|
14
|
+
]
|
@@ -0,0 +1,400 @@
|
|
1
|
+
import aiohttp
|
2
|
+
import json
|
3
|
+
import asyncio
|
4
|
+
from typing import Optional, Dict, AsyncGenerator, Union, List, BinaryIO
|
5
|
+
from dataclasses import dataclass
|
6
|
+
import os
|
7
|
+
import mimetypes
|
8
|
+
from pathlib import Path
|
9
|
+
import logging
|
10
|
+
|
11
|
+
from .models import SandboxResponse, StreamEvent, FileOperationResponse
|
12
|
+
from .submodules import FileSystemModule, ShellModule, CodeModule
|
13
|
+
|
14
|
+
# Set up logging
|
15
|
+
logger = logging.getLogger(__name__)
|
16
|
+
|
17
|
+
@dataclass
|
18
|
+
class SandboxResponse:
|
19
|
+
"""Response from sandbox operations.
|
20
|
+
|
21
|
+
Attributes:
|
22
|
+
success: Whether the operation was successful
|
23
|
+
data: Optional response data
|
24
|
+
error: Optional error message if operation failed
|
25
|
+
"""
|
26
|
+
success: bool
|
27
|
+
data: Optional[Dict] = None
|
28
|
+
error: Optional[str] = None
|
29
|
+
|
30
|
+
@dataclass
|
31
|
+
class StreamEvent:
|
32
|
+
"""Event from streaming code execution.
|
33
|
+
|
34
|
+
Attributes:
|
35
|
+
type: Type of event ('stdout', 'stderr', 'info', 'error', 'completed', 'keepalive')
|
36
|
+
data: Event data
|
37
|
+
"""
|
38
|
+
type: str
|
39
|
+
data: str
|
40
|
+
|
41
|
+
@dataclass
|
42
|
+
class FileOperationResponse:
|
43
|
+
"""Response from file operations.
|
44
|
+
|
45
|
+
Attributes:
|
46
|
+
success: Whether the operation was successful
|
47
|
+
filename: Name of the file
|
48
|
+
size: Size of the file in bytes
|
49
|
+
path: Path where the file was saved
|
50
|
+
message: Optional status message
|
51
|
+
error: Optional error message if operation failed
|
52
|
+
"""
|
53
|
+
success: bool
|
54
|
+
filename: Optional[str] = None
|
55
|
+
size: Optional[int] = None
|
56
|
+
path: Optional[str] = None
|
57
|
+
message: Optional[str] = None
|
58
|
+
error: Optional[str] = None
|
59
|
+
|
60
|
+
class SandboxClient:
|
61
|
+
"""Client for interacting with the AI Sandbox service.
|
62
|
+
|
63
|
+
This client provides methods to execute Python code in an isolated sandbox environment.
|
64
|
+
It handles authentication, sandbox creation/deletion, and code execution.
|
65
|
+
|
66
|
+
The client is organized into submodules for different types of operations:
|
67
|
+
- fs: File system operations (upload, download, read, write)
|
68
|
+
- shell: Shell command execution
|
69
|
+
- code: Python code execution
|
70
|
+
|
71
|
+
Args:
|
72
|
+
base_url: The base URL of the sandbox service
|
73
|
+
token: Optional pre-existing authentication token
|
74
|
+
"""
|
75
|
+
|
76
|
+
def __init__(
|
77
|
+
self,
|
78
|
+
base_url: str = "http://api.aicomputer.dev",
|
79
|
+
token: Optional[str] = None
|
80
|
+
):
|
81
|
+
self.base_url = base_url.rstrip('/')
|
82
|
+
self.token = token
|
83
|
+
self.sandbox_id = None
|
84
|
+
|
85
|
+
# Initialize submodules
|
86
|
+
self._fs = FileSystemModule(self)
|
87
|
+
self._shell = ShellModule(self)
|
88
|
+
self._code = CodeModule(self)
|
89
|
+
|
90
|
+
@property
|
91
|
+
def fs(self) -> FileSystemModule:
|
92
|
+
"""File system operations submodule."""
|
93
|
+
return self._fs
|
94
|
+
|
95
|
+
@property
|
96
|
+
def shell(self) -> ShellModule:
|
97
|
+
"""Shell operations submodule."""
|
98
|
+
return self._shell
|
99
|
+
|
100
|
+
@property
|
101
|
+
def code(self) -> CodeModule:
|
102
|
+
"""Code execution operations submodule."""
|
103
|
+
return self._code
|
104
|
+
|
105
|
+
async def setup(self) -> SandboxResponse:
|
106
|
+
"""Initialize the client and create a sandbox.
|
107
|
+
|
108
|
+
This method:
|
109
|
+
1. Gets a development token (if not provided)
|
110
|
+
2. Creates a new sandbox
|
111
|
+
3. Waits for the sandbox to be ready
|
112
|
+
|
113
|
+
Returns:
|
114
|
+
SandboxResponse indicating success/failure
|
115
|
+
"""
|
116
|
+
async with aiohttp.ClientSession() as session:
|
117
|
+
# Get development token if not provided
|
118
|
+
if not self.token:
|
119
|
+
async with session.post(f"{self.base_url}/dev/token") as response:
|
120
|
+
if response.status == 200:
|
121
|
+
data = await response.json()
|
122
|
+
self.token = data["access_token"]
|
123
|
+
else:
|
124
|
+
text = await response.text()
|
125
|
+
return SandboxResponse(success=False, error=text)
|
126
|
+
|
127
|
+
# Create sandbox
|
128
|
+
headers = {"Authorization": f"Bearer {self.token}"}
|
129
|
+
async with session.post(f"{self.base_url}/api/v1/sandbox/create", headers=headers) as response:
|
130
|
+
if response.status == 200:
|
131
|
+
data = await response.json()
|
132
|
+
self.sandbox_id = data["sandbox_id"]
|
133
|
+
else:
|
134
|
+
text = await response.text()
|
135
|
+
return SandboxResponse(success=False, error=text)
|
136
|
+
|
137
|
+
# Wait for sandbox to be ready
|
138
|
+
return await self.wait_for_ready()
|
139
|
+
|
140
|
+
async def wait_for_ready(self, max_attempts: int = 10, delay: float = 1.0) -> SandboxResponse:
|
141
|
+
"""Wait for the sandbox to be ready.
|
142
|
+
|
143
|
+
Args:
|
144
|
+
max_attempts: Maximum number of attempts to check status
|
145
|
+
delay: Delay between attempts in seconds
|
146
|
+
|
147
|
+
Returns:
|
148
|
+
SandboxResponse with success=True if sandbox is ready
|
149
|
+
"""
|
150
|
+
if not self.sandbox_id:
|
151
|
+
return SandboxResponse(
|
152
|
+
success=False,
|
153
|
+
error="Sandbox ID not set. Call setup() first."
|
154
|
+
)
|
155
|
+
|
156
|
+
headers = {"Authorization": f"Bearer {self.token}"}
|
157
|
+
|
158
|
+
for attempt in range(max_attempts):
|
159
|
+
try:
|
160
|
+
logger.debug(f"Checking sandbox status (attempt {attempt + 1}/{max_attempts})...")
|
161
|
+
async with aiohttp.ClientSession() as session:
|
162
|
+
async with session.get(
|
163
|
+
f"{self.base_url}/api/v1/sandbox/{self.sandbox_id}/status",
|
164
|
+
headers=headers
|
165
|
+
) as response:
|
166
|
+
if response.status != 200:
|
167
|
+
# If we get an error, wait and try again
|
168
|
+
logger.debug(f"Waiting {delay}s before next attempt...")
|
169
|
+
await asyncio.sleep(delay)
|
170
|
+
continue
|
171
|
+
|
172
|
+
data = await response.json()
|
173
|
+
status = data.get("status", "").lower()
|
174
|
+
logger.debug(f"Current sandbox status: {status}")
|
175
|
+
|
176
|
+
# Check for both 'ready' and 'running' status as indicators that the sandbox is ready
|
177
|
+
if status == "ready" or status == "running":
|
178
|
+
return SandboxResponse(success=True, data=data)
|
179
|
+
elif status == "error":
|
180
|
+
return SandboxResponse(
|
181
|
+
success=False,
|
182
|
+
error=data.get("error", "Unknown error initializing sandbox")
|
183
|
+
)
|
184
|
+
|
185
|
+
# If not ready yet, wait and try again
|
186
|
+
logger.debug(f"Waiting {delay}s before next attempt...")
|
187
|
+
await asyncio.sleep(delay)
|
188
|
+
|
189
|
+
except Exception as e:
|
190
|
+
# If we get an exception, wait and try again
|
191
|
+
logger.error(f"Error checking sandbox status: {str(e)}")
|
192
|
+
await asyncio.sleep(delay)
|
193
|
+
|
194
|
+
return SandboxResponse(
|
195
|
+
success=False,
|
196
|
+
error=f"Sandbox not ready after {max_attempts} attempts"
|
197
|
+
)
|
198
|
+
|
199
|
+
async def cleanup(self) -> SandboxResponse:
|
200
|
+
"""Delete the sandbox.
|
201
|
+
|
202
|
+
Returns:
|
203
|
+
SandboxResponse indicating success/failure
|
204
|
+
"""
|
205
|
+
if not self.token or not self.sandbox_id:
|
206
|
+
return SandboxResponse(success=False, error="Client not properly initialized. Call setup() first")
|
207
|
+
|
208
|
+
headers = {"Authorization": f"Bearer {self.token}"}
|
209
|
+
|
210
|
+
try:
|
211
|
+
async with aiohttp.ClientSession() as session:
|
212
|
+
async with session.delete(
|
213
|
+
f"{self.base_url}/api/v1/sandbox/{self.sandbox_id}",
|
214
|
+
headers=headers
|
215
|
+
) as response:
|
216
|
+
if response.status != 200:
|
217
|
+
text = await response.text()
|
218
|
+
return SandboxResponse(success=False, error=text)
|
219
|
+
|
220
|
+
# Reset sandbox ID
|
221
|
+
self.sandbox_id = None
|
222
|
+
return SandboxResponse(success=True)
|
223
|
+
|
224
|
+
except Exception as e:
|
225
|
+
return SandboxResponse(success=False, error=f"Connection error: {str(e)}")
|
226
|
+
|
227
|
+
# Backward compatibility methods
|
228
|
+
|
229
|
+
async def execute_code(self, code: str, timeout: int = 30) -> SandboxResponse:
|
230
|
+
"""Execute Python code in the sandbox.
|
231
|
+
|
232
|
+
This is a backward compatibility method that delegates to the code submodule.
|
233
|
+
|
234
|
+
Args:
|
235
|
+
code: The Python code to execute
|
236
|
+
timeout: Maximum execution time in seconds
|
237
|
+
|
238
|
+
Returns:
|
239
|
+
SandboxResponse containing execution results
|
240
|
+
"""
|
241
|
+
return await self.code.execute(code, timeout)
|
242
|
+
|
243
|
+
async def execute_code_stream(self, code: str, timeout: int = 30) -> AsyncGenerator[StreamEvent, None]:
|
244
|
+
"""Execute Python code in the sandbox with streaming output.
|
245
|
+
|
246
|
+
This is a backward compatibility method that delegates to the code submodule.
|
247
|
+
|
248
|
+
Args:
|
249
|
+
code: The Python code to execute
|
250
|
+
timeout: Maximum execution time in seconds
|
251
|
+
|
252
|
+
Yields:
|
253
|
+
StreamEvent objects containing execution output
|
254
|
+
"""
|
255
|
+
async for event in self.code.execute_stream(code, timeout):
|
256
|
+
yield event
|
257
|
+
|
258
|
+
async def execute_shell(self, command: str, args: Optional[List[str]] = None, timeout: int = 30) -> SandboxResponse:
|
259
|
+
"""Execute a shell command in the sandbox.
|
260
|
+
|
261
|
+
This is a backward compatibility method that delegates to the shell submodule.
|
262
|
+
|
263
|
+
Args:
|
264
|
+
command: The shell command to execute
|
265
|
+
args: Optional list of arguments for the command
|
266
|
+
timeout: Maximum execution time in seconds
|
267
|
+
|
268
|
+
Returns:
|
269
|
+
SandboxResponse containing execution results
|
270
|
+
"""
|
271
|
+
return await self.shell.execute(command, args, timeout)
|
272
|
+
|
273
|
+
async def upload_file(
|
274
|
+
self,
|
275
|
+
file_path: Union[str, Path],
|
276
|
+
destination: str = "/workspace",
|
277
|
+
chunk_size: int = 1024 * 1024,
|
278
|
+
timeout: int = 300
|
279
|
+
) -> FileOperationResponse:
|
280
|
+
"""Upload a file to the sandbox environment.
|
281
|
+
|
282
|
+
This is a backward compatibility method that delegates to the fs submodule.
|
283
|
+
|
284
|
+
Args:
|
285
|
+
file_path: Path to the file to upload
|
286
|
+
destination: Destination path in the sandbox (absolute path starting with /)
|
287
|
+
chunk_size: Size of chunks for reading large files
|
288
|
+
timeout: Maximum upload time in seconds
|
289
|
+
|
290
|
+
Returns:
|
291
|
+
FileOperationResponse containing upload results
|
292
|
+
"""
|
293
|
+
return await self.fs.upload_file(file_path, destination, chunk_size, timeout)
|
294
|
+
|
295
|
+
async def download_file(
|
296
|
+
self,
|
297
|
+
remote_path: str,
|
298
|
+
local_path: Optional[Union[str, Path]] = None,
|
299
|
+
timeout: int = 300
|
300
|
+
) -> FileOperationResponse:
|
301
|
+
"""Download a file from the sandbox.
|
302
|
+
|
303
|
+
This is a backward compatibility method that delegates to the fs submodule.
|
304
|
+
|
305
|
+
Args:
|
306
|
+
remote_path: Path to the file in the sandbox
|
307
|
+
local_path: Local path to save the file (if None, uses the filename from remote_path)
|
308
|
+
timeout: Maximum download time in seconds
|
309
|
+
|
310
|
+
Returns:
|
311
|
+
FileOperationResponse containing download results
|
312
|
+
"""
|
313
|
+
return await self.fs.download_file(remote_path, local_path, timeout)
|
314
|
+
|
315
|
+
async def upload_bytes(
|
316
|
+
self,
|
317
|
+
content: Union[bytes, BinaryIO],
|
318
|
+
filename: str,
|
319
|
+
destination: str = "/workspace",
|
320
|
+
content_type: Optional[str] = None,
|
321
|
+
timeout: int = 300
|
322
|
+
) -> FileOperationResponse:
|
323
|
+
"""Upload bytes or a file-like object to the sandbox environment.
|
324
|
+
|
325
|
+
This is a backward compatibility method that delegates to the fs submodule.
|
326
|
+
|
327
|
+
Args:
|
328
|
+
content: Bytes or file-like object to upload
|
329
|
+
filename: Name to give the file in the sandbox
|
330
|
+
destination: Destination path in the sandbox (absolute path starting with /)
|
331
|
+
content_type: Optional MIME type (will be guessed from filename if not provided)
|
332
|
+
timeout: Maximum upload time in seconds
|
333
|
+
|
334
|
+
Returns:
|
335
|
+
FileOperationResponse containing upload results
|
336
|
+
"""
|
337
|
+
# Create a temporary file with the content
|
338
|
+
import tempfile
|
339
|
+
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
|
340
|
+
if isinstance(content, bytes):
|
341
|
+
temp_file.write(content)
|
342
|
+
else:
|
343
|
+
# Ensure we're at the start of the file
|
344
|
+
if hasattr(content, 'seek'):
|
345
|
+
content.seek(0)
|
346
|
+
# Read and write in chunks to handle large files
|
347
|
+
chunk = content.read(1024 * 1024) # 1MB chunks
|
348
|
+
while chunk:
|
349
|
+
temp_file.write(chunk)
|
350
|
+
chunk = content.read(1024 * 1024)
|
351
|
+
|
352
|
+
try:
|
353
|
+
# Upload the temporary file
|
354
|
+
temp_path = Path(temp_file.name)
|
355
|
+
result = await self.fs.upload_file(
|
356
|
+
file_path=temp_path,
|
357
|
+
destination=os.path.join(destination, filename),
|
358
|
+
timeout=timeout
|
359
|
+
)
|
360
|
+
|
361
|
+
# If successful, update the filename in the response
|
362
|
+
if result.success:
|
363
|
+
result.filename = filename
|
364
|
+
|
365
|
+
return result
|
366
|
+
finally:
|
367
|
+
# Clean up the temporary file
|
368
|
+
if os.path.exists(temp_file.name):
|
369
|
+
os.unlink(temp_file.name)
|
370
|
+
|
371
|
+
async def download_bytes(self, remote_path: str, timeout: Optional[float] = None) -> Union[bytes, FileOperationResponse]:
|
372
|
+
"""
|
373
|
+
Download a file from the sandbox into memory.
|
374
|
+
|
375
|
+
Args:
|
376
|
+
remote_path: Path to the file in the sandbox.
|
377
|
+
timeout: Timeout in seconds for the operation.
|
378
|
+
|
379
|
+
Returns:
|
380
|
+
bytes: The file contents as bytes if successful.
|
381
|
+
FileOperationResponse: On failure, returns a FileOperationResponse with error details.
|
382
|
+
"""
|
383
|
+
await self.wait_for_ready()
|
384
|
+
|
385
|
+
try:
|
386
|
+
response = await self.fs.download_bytes(remote_path, timeout=timeout or 300)
|
387
|
+
if response.success:
|
388
|
+
return response.data.get('content')
|
389
|
+
else:
|
390
|
+
return FileOperationResponse(
|
391
|
+
success=False,
|
392
|
+
error=response.error or "Failed to download file"
|
393
|
+
)
|
394
|
+
except Exception as e:
|
395
|
+
return FileOperationResponse(
|
396
|
+
success=False,
|
397
|
+
error=f"Error downloading file: {str(e)}"
|
398
|
+
)
|
399
|
+
|
400
|
+
# Additional backward compatibility methods can be added as needed
|
@@ -0,0 +1,45 @@
|
|
1
|
+
from dataclasses import dataclass
|
2
|
+
from typing import Optional, Dict
|
3
|
+
|
4
|
+
@dataclass
|
5
|
+
class SandboxResponse:
|
6
|
+
"""Response from sandbox operations.
|
7
|
+
|
8
|
+
Attributes:
|
9
|
+
success: Whether the operation was successful
|
10
|
+
data: Optional response data
|
11
|
+
error: Optional error message if operation failed
|
12
|
+
"""
|
13
|
+
success: bool
|
14
|
+
data: Optional[Dict] = None
|
15
|
+
error: Optional[str] = None
|
16
|
+
|
17
|
+
@dataclass
|
18
|
+
class StreamEvent:
|
19
|
+
"""Event from streaming code execution.
|
20
|
+
|
21
|
+
Attributes:
|
22
|
+
type: Type of event ('stdout', 'stderr', 'info', 'error', 'completed', 'keepalive')
|
23
|
+
data: Event data
|
24
|
+
"""
|
25
|
+
type: str
|
26
|
+
data: str
|
27
|
+
|
28
|
+
@dataclass
|
29
|
+
class FileOperationResponse:
|
30
|
+
"""Response from file operations.
|
31
|
+
|
32
|
+
Attributes:
|
33
|
+
success: Whether the operation was successful
|
34
|
+
filename: Name of the file
|
35
|
+
size: Size of the file in bytes
|
36
|
+
path: Path where the file was saved
|
37
|
+
message: Optional status message
|
38
|
+
error: Optional error message if operation failed
|
39
|
+
"""
|
40
|
+
success: bool
|
41
|
+
filename: Optional[str] = None
|
42
|
+
size: Optional[int] = None
|
43
|
+
path: Optional[str] = None
|
44
|
+
message: Optional[str] = None
|
45
|
+
error: Optional[str] = None
|
@@ -0,0 +1,81 @@
|
|
1
|
+
from typing import Optional, Dict, Any
|
2
|
+
import aiohttp
|
3
|
+
from ..models import SandboxResponse
|
4
|
+
|
5
|
+
class BaseSubmodule:
|
6
|
+
"""Base class for all submodules.
|
7
|
+
|
8
|
+
This class provides common functionality for all submodules, including
|
9
|
+
access to the parent client's authentication token and sandbox ID.
|
10
|
+
|
11
|
+
Attributes:
|
12
|
+
_client: Reference to the parent SandboxClient
|
13
|
+
"""
|
14
|
+
|
15
|
+
def __init__(self, client):
|
16
|
+
"""Initialize the submodule.
|
17
|
+
|
18
|
+
Args:
|
19
|
+
client: The parent SandboxClient instance
|
20
|
+
"""
|
21
|
+
self._client = client
|
22
|
+
|
23
|
+
@property
|
24
|
+
def base_url(self) -> str:
|
25
|
+
"""Get the base URL from the parent client."""
|
26
|
+
return self._client.base_url
|
27
|
+
|
28
|
+
@property
|
29
|
+
def token(self) -> Optional[str]:
|
30
|
+
"""Get the authentication token from the parent client."""
|
31
|
+
return self._client.token
|
32
|
+
|
33
|
+
@property
|
34
|
+
def sandbox_id(self) -> Optional[str]:
|
35
|
+
"""Get the sandbox ID from the parent client."""
|
36
|
+
return self._client.sandbox_id
|
37
|
+
|
38
|
+
async def _ensure_ready(self) -> SandboxResponse:
|
39
|
+
"""Ensure the sandbox is ready for operations.
|
40
|
+
|
41
|
+
Returns:
|
42
|
+
SandboxResponse indicating if the sandbox is ready
|
43
|
+
"""
|
44
|
+
if not self.token or not self.sandbox_id:
|
45
|
+
return SandboxResponse(
|
46
|
+
success=False,
|
47
|
+
error="Client not properly initialized. Call setup() first"
|
48
|
+
)
|
49
|
+
|
50
|
+
# Ensure sandbox is ready
|
51
|
+
return await self._client.wait_for_ready()
|
52
|
+
|
53
|
+
def _get_headers(self, content_type: str = "application/json") -> Dict[str, str]:
|
54
|
+
"""Get the headers for API requests.
|
55
|
+
|
56
|
+
Args:
|
57
|
+
content_type: The content type for the request
|
58
|
+
|
59
|
+
Returns:
|
60
|
+
Dictionary of headers
|
61
|
+
"""
|
62
|
+
return {
|
63
|
+
"Authorization": f"Bearer {self.token}",
|
64
|
+
"Content-Type": content_type
|
65
|
+
}
|
66
|
+
|
67
|
+
async def _handle_response(self, response: aiohttp.ClientResponse) -> SandboxResponse:
|
68
|
+
"""Handle the API response.
|
69
|
+
|
70
|
+
Args:
|
71
|
+
response: The aiohttp response object
|
72
|
+
|
73
|
+
Returns:
|
74
|
+
SandboxResponse with the parsed response data
|
75
|
+
"""
|
76
|
+
if response.status != 200:
|
77
|
+
error_text = await response.text()
|
78
|
+
return SandboxResponse(success=False, error=error_text)
|
79
|
+
|
80
|
+
result = await response.json()
|
81
|
+
return SandboxResponse(success=True, data=result)
|