blaxel 0.2.35__py3-none-any.whl → 0.2.37__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. blaxel/__init__.py +2 -2
  2. blaxel/core/client/api/compute/create_sandbox.py +21 -1
  3. blaxel/core/client/api/jobs/create_job_execution.py +12 -12
  4. blaxel/core/client/api/volumes/update_volume.py +187 -0
  5. blaxel/core/client/models/__init__.py +10 -6
  6. blaxel/core/client/models/{create_job_execution_response.py → create_job_execution_output.py} +11 -13
  7. blaxel/core/client/models/{create_job_execution_response_tasks_item.py → create_job_execution_output_tasks_item.py} +5 -5
  8. blaxel/core/client/models/create_job_execution_request.py +31 -0
  9. blaxel/core/client/models/create_job_execution_request_env.py +50 -0
  10. blaxel/core/client/models/function_runtime.py +18 -0
  11. blaxel/core/client/models/{function_spec_transport.py → function_runtime_transport.py} +2 -2
  12. blaxel/core/client/models/function_spec.py +0 -18
  13. blaxel/core/client/models/job_execution_spec.py +35 -0
  14. blaxel/core/client/models/job_execution_spec_env_override.py +50 -0
  15. blaxel/core/client/models/port_protocol.py +1 -0
  16. blaxel/core/client/models/preview.py +48 -1
  17. blaxel/core/client/models/sandbox.py +10 -0
  18. blaxel/core/common/settings.py +5 -0
  19. blaxel/core/jobs/__init__.py +60 -88
  20. blaxel/core/sandbox/__init__.py +12 -0
  21. blaxel/core/{client/api/invitations/list_all_pending_invitations.py → sandbox/client/api/system/get_health.py} +26 -34
  22. blaxel/core/sandbox/client/api/system/post_upgrade.py +196 -0
  23. blaxel/core/sandbox/client/models/__init__.py +8 -0
  24. blaxel/core/sandbox/client/models/content_search_match.py +24 -25
  25. blaxel/core/sandbox/client/models/content_search_response.py +25 -29
  26. blaxel/core/sandbox/client/models/find_match.py +13 -14
  27. blaxel/core/sandbox/client/models/find_response.py +21 -24
  28. blaxel/core/sandbox/client/models/fuzzy_search_match.py +17 -19
  29. blaxel/core/sandbox/client/models/fuzzy_search_response.py +21 -24
  30. blaxel/core/sandbox/client/models/health_response.py +159 -0
  31. blaxel/core/sandbox/client/models/process_upgrade_state.py +20 -0
  32. blaxel/core/sandbox/client/models/upgrade_request.py +71 -0
  33. blaxel/core/sandbox/client/models/upgrade_status.py +125 -0
  34. blaxel/core/sandbox/default/__init__.py +2 -0
  35. blaxel/core/sandbox/default/filesystem.py +20 -6
  36. blaxel/core/sandbox/default/preview.py +48 -1
  37. blaxel/core/sandbox/default/process.py +66 -21
  38. blaxel/core/sandbox/default/sandbox.py +104 -6
  39. blaxel/core/sandbox/default/system.py +71 -0
  40. blaxel/core/sandbox/sync/__init__.py +2 -0
  41. blaxel/core/sandbox/sync/filesystem.py +19 -2
  42. blaxel/core/sandbox/sync/preview.py +50 -3
  43. blaxel/core/sandbox/sync/process.py +38 -15
  44. blaxel/core/sandbox/sync/sandbox.py +97 -5
  45. blaxel/core/sandbox/sync/system.py +71 -0
  46. blaxel/core/sandbox/types.py +212 -5
  47. blaxel/core/volume/volume.py +209 -4
  48. blaxel/langgraph/model.py +25 -14
  49. blaxel/langgraph/tools.py +15 -12
  50. blaxel/llamaindex/model.py +33 -24
  51. blaxel/llamaindex/tools.py +9 -4
  52. blaxel/pydantic/model.py +26 -12
  53. blaxel-0.2.37.dist-info/METADATA +569 -0
  54. {blaxel-0.2.35.dist-info → blaxel-0.2.37.dist-info}/RECORD +57 -47
  55. blaxel-0.2.35.dist-info/METADATA +0 -228
  56. /blaxel/core/{client/api/invitations → sandbox/client/api/system}/__init__.py +0 -0
  57. {blaxel-0.2.35.dist-info → blaxel-0.2.37.dist-info}/WHEEL +0 -0
  58. {blaxel-0.2.35.dist-info → blaxel-0.2.37.dist-info}/licenses/LICENSE +0 -0
@@ -1,3 +1,4 @@
1
+ import asyncio
1
2
  from dataclasses import dataclass
2
3
  from datetime import datetime
3
4
  from typing import Any, Dict, List, Union
@@ -15,6 +16,9 @@ from ...client.api.compute.delete_sandbox_preview import (
15
16
  from ...client.api.compute.delete_sandbox_preview_token import (
16
17
  asyncio as delete_sandbox_preview_token,
17
18
  )
19
+ from ...client.api.compute.get_sandbox_preview import (
20
+ asyncio_detailed as get_sandbox_preview_detailed,
21
+ )
18
22
  from ...client.api.compute.get_sandbox_preview import (
19
23
  asyncio as get_sandbox_preview,
20
24
  )
@@ -176,15 +180,58 @@ class SandboxPreviews:
176
180
  )
177
181
  return SandboxPreview(response)
178
182
 
179
- async def delete(self, preview_name: str) -> dict:
183
+ async def delete(self, preview_name: str) -> Preview:
180
184
  """Delete a preview."""
181
185
  response: Preview = await delete_sandbox_preview(
182
186
  self.sandbox_name,
183
187
  preview_name,
184
188
  client=client,
185
189
  )
190
+
191
+ # If the preview is in DELETING state, wait for it to be fully deleted
192
+ if response and response.status == "DELETING":
193
+ await self._wait_for_deletion(preview_name)
194
+
186
195
  return response
187
196
 
197
+ async def _wait_for_deletion(self, preview_name: str, timeout_ms: int = 10000) -> None:
198
+ """Wait for a preview to be fully deleted.
199
+
200
+ Args:
201
+ preview_name: Name of the preview to wait for
202
+ timeout_ms: Timeout in milliseconds (default: 10000)
203
+
204
+ Raises:
205
+ Exception: If the preview is still in DELETING state after timeout
206
+ """
207
+ print(f"Waiting for preview deletion: {preview_name}")
208
+ poll_interval = 0.5 # Poll every 500ms
209
+ elapsed = 0.0
210
+ timeout_seconds = timeout_ms / 1000.0
211
+
212
+ while elapsed < timeout_seconds:
213
+ try:
214
+ response = await get_sandbox_preview_detailed(
215
+ self.sandbox_name,
216
+ preview_name,
217
+ client=client,
218
+ )
219
+ if response.status_code == 404:
220
+ return
221
+ except errors.UnexpectedStatus as e:
222
+ # 404 means the preview is deleted
223
+ if e.status_code == 404:
224
+ return
225
+ raise
226
+ # Preview still exists, wait and retry
227
+ await asyncio.sleep(poll_interval)
228
+ elapsed += poll_interval
229
+
230
+ # Timeout reached, but deletion was initiated
231
+ raise Exception(
232
+ f"Preview deletion timeout: {preview_name} is still in DELETING state after {timeout_ms}ms"
233
+ )
234
+
188
235
 
189
236
  def to_utc_z(dt: datetime) -> str:
190
237
  """Convert datetime to UTC Z format string."""
@@ -6,7 +6,12 @@ import httpx
6
6
  from ...common.settings import settings
7
7
  from ..client.models import ProcessResponse, SuccessResponse
8
8
  from ..client.models.process_request import ProcessRequest
9
- from ..types import ProcessRequestWithLog, ProcessResponseWithLog, SandboxConfiguration
9
+ from ..types import (
10
+ AsyncStreamHandle,
11
+ ProcessRequestWithLog,
12
+ ProcessResponseWithLog,
13
+ SandboxConfiguration,
14
+ )
10
15
  from .action import SandboxAction
11
16
 
12
17
 
@@ -18,13 +23,28 @@ class SandboxProcess(SandboxAction):
18
23
  self,
19
24
  process_name: str,
20
25
  options: Dict[str, Callable[[str], None]] | None = None,
21
- ) -> Dict[str, Callable[[], None]]:
22
- """Stream logs from a process with automatic reconnection and deduplication."""
26
+ ) -> AsyncStreamHandle:
27
+ """Stream logs from a process with automatic reconnection and deduplication.
28
+
29
+ Returns an AsyncStreamHandle that can be used as a context manager:
30
+
31
+ async with sandbox.process.stream_logs(name, options) as handle:
32
+ # do something
33
+ # handle is automatically closed
34
+
35
+ Or manually:
36
+
37
+ handle = sandbox.process.stream_logs(name, options)
38
+ try:
39
+ # do something
40
+ finally:
41
+ handle.close()
42
+ """
23
43
  if options is None:
24
44
  options = {}
25
45
 
26
46
  reconnect_interval = 30 # 30 seconds in Python (TypeScript uses milliseconds)
27
- current_stream = None
47
+ current_stream: AsyncStreamHandle | None = None
28
48
  is_running = True
29
49
  reconnect_timer = None
30
50
 
@@ -37,7 +57,7 @@ class SandboxProcess(SandboxAction):
37
57
 
38
58
  # Close existing stream if any
39
59
  if current_stream:
40
- current_stream["close"]()
60
+ current_stream.close()
41
61
 
42
62
  # Create wrapper options with deduplication
43
63
  wrapped_options = {}
@@ -104,19 +124,19 @@ class SandboxProcess(SandboxAction):
104
124
 
105
125
  # Close current stream
106
126
  if current_stream:
107
- current_stream["close"]()
127
+ current_stream.close()
108
128
  current_stream = None
109
129
 
110
130
  # Clear seen logs
111
131
  seen_logs.clear()
112
132
 
113
- return {"close": close}
133
+ return AsyncStreamHandle(close)
114
134
 
115
135
  def _stream_logs(
116
136
  self,
117
137
  identifier: str,
118
138
  options: Dict[str, Callable[[str], None]] | None = None,
119
- ) -> Dict[str, Callable[[], None]]:
139
+ ) -> AsyncStreamHandle:
120
140
  """Private method to stream logs from a process with callbacks for different output types."""
121
141
  if options is None:
122
142
  options = {}
@@ -165,7 +185,9 @@ class SandboxProcess(SandboxAction):
165
185
  options["on_log"](line)
166
186
  except Exception as e:
167
187
  # Suppress AbortError when closing
168
- if not (hasattr(e, "name") and e.name == "AbortError"):
188
+ if hasattr(e, "name") and getattr(e, "name") == "AbortError":
189
+ pass
190
+ else:
169
191
  raise e
170
192
 
171
193
  # Start streaming in the background
@@ -176,7 +198,7 @@ class SandboxProcess(SandboxAction):
176
198
  closed = True
177
199
  task.cancel()
178
200
 
179
- return {"close": close}
201
+ return AsyncStreamHandle(close)
180
202
 
181
203
  async def exec(
182
204
  self,
@@ -203,7 +225,9 @@ class SandboxProcess(SandboxAction):
203
225
  if "on_stderr" in process:
204
226
  on_stderr = process["on_stderr"]
205
227
  del process["on_stderr"]
206
- process = ProcessRequest.from_dict(process)
228
+ tmp_process = ProcessRequest.from_dict(process)
229
+ assert tmp_process is not None
230
+ process = tmp_process
207
231
 
208
232
  # Store original wait_for_completion setting
209
233
  should_wait_for_completion = process.wait_for_completion
@@ -221,18 +245,27 @@ class SandboxProcess(SandboxAction):
221
245
  self.handle_response_error(response)
222
246
  import json
223
247
 
224
- response_data = json.loads(content_bytes) if content_bytes else None
225
- result = ProcessResponse.from_dict(response_data)
248
+ if content_bytes:
249
+ response_data = json.loads(content_bytes)
250
+ result = ProcessResponse.from_dict(response_data)
251
+ assert result is not None
252
+ else:
253
+ raise Exception("No content received from response")
226
254
  finally:
227
255
  await response.aclose()
228
256
 
229
257
  if on_log or on_stdout or on_stderr:
230
- stream_control = self._stream_logs(
231
- result.pid, {"on_log": on_log, "on_stdout": on_stdout, "on_stderr": on_stderr}
232
- )
258
+ stream_options: dict[str, Callable[[str], None]] = {}
259
+ if on_log:
260
+ stream_options["on_log"] = on_log
261
+ if on_stdout:
262
+ stream_options["on_stdout"] = on_stdout
263
+ if on_stderr:
264
+ stream_options["on_stderr"] = on_stderr
265
+ stream_control = self._stream_logs(result.pid, stream_options)
233
266
  return ProcessResponseWithLog(
234
267
  result,
235
- lambda: stream_control["close"]() if stream_control else None,
268
+ lambda: stream_control.close() if stream_control else None,
236
269
  )
237
270
 
238
271
  return result
@@ -277,6 +310,7 @@ class SandboxProcess(SandboxAction):
277
310
  content = await response.aread()
278
311
  data = json.loads(content)
279
312
  result = ProcessResponse.from_dict(data)
313
+ assert result is not None
280
314
 
281
315
  # If process already completed (server waited), emit logs through callbacks
282
316
  if result.status == "completed" or result.status == "failed":
@@ -378,7 +412,9 @@ class SandboxProcess(SandboxAction):
378
412
  try:
379
413
  data = json.loads(await response.aread())
380
414
  self.handle_response_error(response)
381
- return ProcessResponse.from_dict(data)
415
+ result = ProcessResponse.from_dict(data)
416
+ assert result is not None
417
+ return result
382
418
  finally:
383
419
  await response.aclose()
384
420
 
@@ -390,7 +426,12 @@ class SandboxProcess(SandboxAction):
390
426
  try:
391
427
  data = json.loads(await response.aread())
392
428
  self.handle_response_error(response)
393
- return [ProcessResponse.from_dict(item) for item in data]
429
+ results = []
430
+ for item in data:
431
+ result = ProcessResponse.from_dict(item)
432
+ assert result is not None
433
+ results.append(result)
434
+ return results
394
435
  finally:
395
436
  await response.aclose()
396
437
 
@@ -402,7 +443,9 @@ class SandboxProcess(SandboxAction):
402
443
  try:
403
444
  data = json.loads(await response.aread())
404
445
  self.handle_response_error(response)
405
- return SuccessResponse.from_dict(data)
446
+ result = SuccessResponse.from_dict(data)
447
+ assert result is not None
448
+ return result
406
449
  finally:
407
450
  await response.aclose()
408
451
 
@@ -414,7 +457,9 @@ class SandboxProcess(SandboxAction):
414
457
  try:
415
458
  data = json.loads(await response.aread())
416
459
  self.handle_response_error(response)
417
- return SuccessResponse.from_dict(data)
460
+ result = SuccessResponse.from_dict(data)
461
+ assert result is not None
462
+ return result
418
463
  finally:
419
464
  await response.aclose()
420
465
 
@@ -8,10 +8,18 @@ from ...client.api.compute.get_sandbox import asyncio as get_sandbox
8
8
  from ...client.api.compute.list_sandboxes import asyncio as list_sandboxes
9
9
  from ...client.api.compute.update_sandbox import asyncio as update_sandbox
10
10
  from ...client.client import client
11
- from ...client.models import Metadata, Sandbox, SandboxRuntime, SandboxSpec
11
+ from ...client.models import (
12
+ Metadata,
13
+ MetadataLabels,
14
+ Sandbox,
15
+ SandboxLifecycle,
16
+ SandboxRuntime,
17
+ SandboxSpec,
18
+ )
12
19
  from ...client.models.error import Error
13
20
  from ...client.models.sandbox_error import SandboxError
14
21
  from ...client.types import UNSET
22
+ from ...common.settings import settings
15
23
  from ..types import (
16
24
  SandboxConfiguration,
17
25
  SandboxCreateConfiguration,
@@ -24,6 +32,7 @@ from .network import SandboxNetwork
24
32
  from .preview import SandboxPreviews
25
33
  from .process import SandboxProcess
26
34
  from .session import SandboxSessions
35
+ from .system import SandboxSystem
27
36
 
28
37
 
29
38
  class SandboxAPIError(Exception):
@@ -86,6 +95,7 @@ class SandboxInstance:
86
95
  self.sessions = SandboxSessions(self.config)
87
96
  self.network = SandboxNetwork(self.config)
88
97
  self.codegen = SandboxCodegen(self.config)
98
+ self.system = SandboxSystem(self.config)
89
99
 
90
100
  @property
91
101
  def metadata(self):
@@ -103,6 +113,14 @@ class SandboxInstance:
103
113
  def spec(self):
104
114
  return self.sandbox.spec
105
115
 
116
+ @property
117
+ def last_used_at(self):
118
+ return self.sandbox.last_used_at
119
+
120
+ @property
121
+ def expires_in(self):
122
+ return self.sandbox.expires_in
123
+
106
124
  async def wait(self, max_wait: int = 60000, interval: int = 1000) -> "SandboxInstance":
107
125
  logger.warning(
108
126
  "⚠️ Warning: sandbox.wait() is deprecated. You don't need to wait for the sandbox to be deployed anymore."
@@ -162,13 +180,17 @@ class SandboxInstance:
162
180
  volumes = config._normalize_volumes() or UNSET
163
181
  ttl = config.ttl
164
182
  expires = config.expires
165
- region = config.region
183
+ region = config.region or settings.region
166
184
  lifecycle = config.lifecycle
167
185
  # snapshot_enabled = sandbox.snapshot_enabled
168
186
 
187
+ labels = MetadataLabels.from_dict(config.labels) if config.labels else UNSET
188
+ if labels is None:
189
+ labels = UNSET
190
+
169
191
  # Create full Sandbox object
170
192
  sandbox = Sandbox(
171
- metadata=Metadata(name=name, labels=config.labels),
193
+ metadata=Metadata(name=name, labels=labels),
172
194
  spec=SandboxSpec(
173
195
  runtime=SandboxRuntime(
174
196
  image=image,
@@ -181,19 +203,28 @@ class SandboxInstance:
181
203
  )
182
204
 
183
205
  # Set ttl and expires if provided
184
- if ttl:
206
+ if ttl and sandbox.spec.runtime:
185
207
  sandbox.spec.runtime.ttl = ttl
186
- if expires:
208
+ if expires and sandbox.spec.runtime:
187
209
  sandbox.spec.runtime.expires = expires.isoformat()
188
210
  if region:
189
211
  sandbox.spec.region = region
190
212
  if lifecycle:
191
- sandbox.spec.lifecycle = lifecycle
213
+ if type(lifecycle) is dict:
214
+ lifecycle = SandboxLifecycle.from_dict(lifecycle)
215
+ assert lifecycle is not None
216
+ sandbox.spec.lifecycle = lifecycle
217
+ elif type(lifecycle) is SandboxLifecycle:
218
+ sandbox.spec.lifecycle = lifecycle
219
+ else:
220
+ raise ValueError(f"Invalid lifecycle type: {type(lifecycle)}")
192
221
  else:
193
222
  # Handle existing Sandbox object or dict conversion
194
223
  if isinstance(sandbox, dict):
195
224
  sandbox = Sandbox.from_dict(sandbox)
225
+ assert sandbox is not None
196
226
 
227
+ assert isinstance(sandbox, Sandbox)
197
228
  # Set defaults for missing fields
198
229
  if not sandbox.metadata:
199
230
  sandbox.metadata = Metadata(name=default_name)
@@ -219,6 +250,7 @@ class SandboxInstance:
219
250
  message = response.message if response.message else str(response)
220
251
  raise SandboxAPIError(message, status_code=status_code, code=code)
221
252
 
253
+ assert response is not None
222
254
  instance = cls(response)
223
255
  # TODO remove this part once we have a better way to handle this
224
256
  if safe:
@@ -304,6 +336,72 @@ class SandboxInstance:
304
336
  # Return new instance with updated sandbox
305
337
  return cls(response)
306
338
 
339
+ @classmethod
340
+ async def update_ttl(cls, sandbox_name: str, ttl: str) -> "SandboxInstance":
341
+ """Update sandbox TTL without recreating it.
342
+
343
+ Args:
344
+ sandbox_name: The name of the sandbox to update
345
+ ttl: The new TTL value (e.g., "5m", "1h", "30s")
346
+
347
+ Returns:
348
+ A new SandboxInstance with updated TTL
349
+ """
350
+ # Get the existing sandbox
351
+ sandbox_instance = await cls.get(sandbox_name)
352
+ sandbox = sandbox_instance.sandbox
353
+
354
+ # Prepare the updated sandbox object
355
+ updated_sandbox = Sandbox.from_dict(sandbox.to_dict())
356
+ if updated_sandbox.spec is None or updated_sandbox.spec.runtime is None:
357
+ raise ValueError(f"Sandbox {sandbox_name} has invalid spec")
358
+
359
+ # Update TTL
360
+ updated_sandbox.spec.runtime.ttl = ttl
361
+
362
+ # Call the update API
363
+ response = await update_sandbox(
364
+ sandbox_name=sandbox_name,
365
+ client=client,
366
+ body=updated_sandbox,
367
+ )
368
+
369
+ return cls(response)
370
+
371
+ @classmethod
372
+ async def update_lifecycle(
373
+ cls, sandbox_name: str, lifecycle: SandboxLifecycle
374
+ ) -> "SandboxInstance":
375
+ """Update sandbox lifecycle configuration without recreating it.
376
+
377
+ Args:
378
+ sandbox_name: The name of the sandbox to update
379
+ lifecycle: The new lifecycle configuration
380
+
381
+ Returns:
382
+ A new SandboxInstance with updated lifecycle
383
+ """
384
+ # Get the existing sandbox
385
+ sandbox_instance = await cls.get(sandbox_name)
386
+ sandbox = sandbox_instance.sandbox
387
+
388
+ # Prepare the updated sandbox object
389
+ updated_sandbox = Sandbox.from_dict(sandbox.to_dict())
390
+ if updated_sandbox.spec is None:
391
+ raise ValueError(f"Sandbox {sandbox_name} has invalid spec")
392
+
393
+ # Update lifecycle
394
+ updated_sandbox.spec.lifecycle = lifecycle
395
+
396
+ # Call the update API
397
+ response = await update_sandbox(
398
+ sandbox_name=sandbox_name,
399
+ client=client,
400
+ body=updated_sandbox,
401
+ )
402
+
403
+ return cls(response)
404
+
307
405
  @classmethod
308
406
  async def create_if_not_exists(
309
407
  cls, sandbox: Union[Sandbox, SandboxCreateConfiguration, Dict[str, Any]]
@@ -0,0 +1,71 @@
1
+ from typing import Union
2
+
3
+ from ...common.settings import settings
4
+ from ..client.api.system.get_health import asyncio as get_health
5
+ from ..client.api.system.post_upgrade import asyncio as post_upgrade
6
+ from ..client.client import Client
7
+ from ..client.models import ErrorResponse, HealthResponse, SuccessResponse, UpgradeRequest
8
+ from ..types import SandboxConfiguration
9
+ from .action import SandboxAction
10
+
11
+
12
+ class SandboxSystem(SandboxAction):
13
+ """System operations for sandbox including upgrade functionality."""
14
+
15
+ def __init__(self, sandbox_config: SandboxConfiguration):
16
+ super().__init__(sandbox_config)
17
+
18
+ async def upgrade(
19
+ self,
20
+ version: str | None = None,
21
+ base_url: str | None = None,
22
+ ) -> SuccessResponse:
23
+ """Upgrade the sandbox-api to a new version.
24
+
25
+ Triggers an upgrade of the sandbox-api process. Returns immediately before upgrading.
26
+ The upgrade will: download the specified binary from GitHub releases, validate it, and restart.
27
+ All running processes will be preserved across the upgrade.
28
+
29
+ Args:
30
+ version: Version to upgrade to - "develop" (default), "main", "latest",
31
+ or specific tag like "v1.0.0"
32
+ base_url: Base URL for releases (useful for forks, defaults to
33
+ https://github.com/blaxel-ai/sandbox/releases)
34
+
35
+ Returns:
36
+ SuccessResponse with status information
37
+ """
38
+ request = UpgradeRequest(version=version, base_url=base_url)
39
+
40
+ client = Client(
41
+ base_url=self.url,
42
+ headers={**settings.headers, **self.sandbox_config.headers},
43
+ )
44
+
45
+ async with client:
46
+ response = await post_upgrade(client=client, body=request)
47
+ if response is None:
48
+ raise Exception("Failed to upgrade sandbox")
49
+ if isinstance(response, ErrorResponse):
50
+ raise Exception(f"Upgrade failed: {response.error}")
51
+ return response
52
+
53
+ async def health(self) -> HealthResponse:
54
+ """Get health status and system information.
55
+
56
+ Returns health status and system information including upgrade count and binary details.
57
+ Also includes last upgrade attempt status with detailed error information if available.
58
+
59
+ Returns:
60
+ HealthResponse with system status information
61
+ """
62
+ client = Client(
63
+ base_url=self.url,
64
+ headers={**settings.headers, **self.sandbox_config.headers},
65
+ )
66
+
67
+ async with client:
68
+ response = await get_health(client=client)
69
+ if response is None:
70
+ raise Exception("Failed to get health status")
71
+ return response
@@ -6,6 +6,7 @@ from .sandbox import (
6
6
  SyncSandboxPreviews,
7
7
  SyncSandboxProcess,
8
8
  )
9
+ from .system import SyncSandboxSystem
9
10
 
10
11
  __all__ = [
11
12
  "SyncSandboxInstance",
@@ -13,5 +14,6 @@ __all__ = [
13
14
  "SyncSandboxPreviews",
14
15
  "SyncSandboxProcess",
15
16
  "SyncSandboxCodegen",
17
+ "SyncSandboxSystem",
16
18
  "SyncCodeInterpreter",
17
19
  ]
@@ -14,6 +14,7 @@ from ..types import (
14
14
  SandboxConfiguration,
15
15
  SandboxFilesystemFile,
16
16
  WatchEvent,
17
+ WatchHandle,
17
18
  )
18
19
  from .action import SyncSandboxAction
19
20
 
@@ -162,7 +163,23 @@ class SyncSandboxFileSystem(SyncSandboxAction):
162
163
  path: str,
163
164
  callback: Callable[[WatchEvent], None],
164
165
  options: Dict[str, Any] | None = None,
165
- ) -> Dict[str, Callable]:
166
+ ) -> WatchHandle:
167
+ """Watch for file system changes.
168
+
169
+ Returns a WatchHandle that can be used as a context manager:
170
+
171
+ with sandbox.fs.watch(path, callback) as handle:
172
+ # do something
173
+ # handle is automatically closed
174
+
175
+ Or manually:
176
+
177
+ handle = sandbox.fs.watch(path, callback)
178
+ try:
179
+ # do something
180
+ finally:
181
+ handle.close()
182
+ """
166
183
  path = self.format_path(path)
167
184
  closed = threading.Event()
168
185
  if options is None:
@@ -226,7 +243,7 @@ class SyncSandboxFileSystem(SyncSandboxAction):
226
243
  def close():
227
244
  closed.set()
228
245
 
229
- return {"close": close}
246
+ return WatchHandle(close)
230
247
 
231
248
  def format_path(self, path: str) -> str:
232
249
  return path
@@ -1,3 +1,4 @@
1
+ import time
1
2
  from dataclasses import dataclass
2
3
  from datetime import datetime
3
4
  from typing import Any, Dict, List, Union
@@ -11,6 +12,9 @@ from ...client.api.compute.delete_sandbox_preview import sync as delete_sandbox_
11
12
  from ...client.api.compute.delete_sandbox_preview_token import (
12
13
  sync as delete_sandbox_preview_token,
13
14
  )
15
+ from ...client.api.compute.get_sandbox_preview import (
16
+ sync_detailed as get_sandbox_preview_detailed,
17
+ )
14
18
  from ...client.api.compute.get_sandbox_preview import sync as get_sandbox_preview
15
19
  from ...client.api.compute.list_sandbox_preview_tokens import (
16
20
  sync as list_sandbox_preview_tokens,
@@ -188,9 +192,52 @@ class SyncSandboxPreviews:
188
192
  preview_name,
189
193
  client=client,
190
194
  )
191
- if response:
192
- return response
193
- raise errors.UnexpectedStatus(400, b"Failed to delete preview")
195
+ if not response:
196
+ raise errors.UnexpectedStatus(400, b"Failed to delete preview")
197
+
198
+ # If the preview is in DELETING state, wait for it to be fully deleted
199
+ if response.status == "DELETING":
200
+ self._wait_for_deletion(preview_name)
201
+
202
+ return response
203
+
204
+ def _wait_for_deletion(self, preview_name: str, timeout_ms: int = 10000) -> None:
205
+ """Wait for a preview to be fully deleted.
206
+
207
+ Args:
208
+ preview_name: Name of the preview to wait for
209
+ timeout_ms: Timeout in milliseconds (default: 10000)
210
+
211
+ Raises:
212
+ Exception: If the preview is still in DELETING state after timeout
213
+ """
214
+ print(f"Waiting for preview deletion: {preview_name}")
215
+ poll_interval = 0.5 # Poll every 500ms
216
+ elapsed = 0.0
217
+ timeout_seconds = timeout_ms / 1000.0
218
+
219
+ while elapsed < timeout_seconds:
220
+ try:
221
+ response = get_sandbox_preview_detailed(
222
+ self.sandbox_name,
223
+ preview_name,
224
+ client=client,
225
+ )
226
+ if response.status_code == 404:
227
+ return
228
+ except errors.UnexpectedStatus as e:
229
+ # 404 means the preview is deleted
230
+ if e.status_code == 404:
231
+ return
232
+ raise
233
+ # Preview still exists, wait and retry
234
+ time.sleep(poll_interval)
235
+ elapsed += poll_interval
236
+
237
+ # Timeout reached, but deletion was initiated
238
+ raise Exception(
239
+ f"Preview deletion timeout: {preview_name} is still in DELETING state after {timeout_ms}ms"
240
+ )
194
241
 
195
242
 
196
243
  def to_utc_z(dt: datetime) -> str: