blaxel 0.2.36__py3-none-any.whl → 0.2.37__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. blaxel/__init__.py +2 -2
  2. blaxel/core/client/models/create_job_execution_request_env.py +3 -3
  3. blaxel/core/client/models/preview.py +48 -1
  4. blaxel/core/client/models/sandbox.py +10 -0
  5. blaxel/core/jobs/__init__.py +2 -2
  6. blaxel/core/sandbox/__init__.py +12 -0
  7. blaxel/core/sandbox/client/api/system/__init__.py +0 -0
  8. blaxel/core/sandbox/client/api/system/get_health.py +134 -0
  9. blaxel/core/sandbox/client/api/system/post_upgrade.py +196 -0
  10. blaxel/core/sandbox/client/models/__init__.py +8 -0
  11. blaxel/core/sandbox/client/models/content_search_match.py +24 -25
  12. blaxel/core/sandbox/client/models/content_search_response.py +25 -29
  13. blaxel/core/sandbox/client/models/find_match.py +13 -14
  14. blaxel/core/sandbox/client/models/find_response.py +21 -24
  15. blaxel/core/sandbox/client/models/fuzzy_search_match.py +17 -19
  16. blaxel/core/sandbox/client/models/fuzzy_search_response.py +21 -24
  17. blaxel/core/sandbox/client/models/health_response.py +159 -0
  18. blaxel/core/sandbox/client/models/process_upgrade_state.py +20 -0
  19. blaxel/core/sandbox/client/models/upgrade_request.py +71 -0
  20. blaxel/core/sandbox/client/models/upgrade_status.py +125 -0
  21. blaxel/core/sandbox/default/__init__.py +2 -0
  22. blaxel/core/sandbox/default/filesystem.py +20 -6
  23. blaxel/core/sandbox/default/preview.py +48 -1
  24. blaxel/core/sandbox/default/process.py +66 -21
  25. blaxel/core/sandbox/default/sandbox.py +36 -5
  26. blaxel/core/sandbox/default/system.py +71 -0
  27. blaxel/core/sandbox/sync/__init__.py +2 -0
  28. blaxel/core/sandbox/sync/filesystem.py +19 -2
  29. blaxel/core/sandbox/sync/preview.py +50 -3
  30. blaxel/core/sandbox/sync/process.py +38 -15
  31. blaxel/core/sandbox/sync/sandbox.py +29 -4
  32. blaxel/core/sandbox/sync/system.py +71 -0
  33. blaxel/core/sandbox/types.py +212 -5
  34. blaxel/core/volume/volume.py +6 -0
  35. blaxel/langgraph/tools.py +0 -1
  36. blaxel-0.2.37.dist-info/METADATA +569 -0
  37. {blaxel-0.2.36.dist-info → blaxel-0.2.37.dist-info}/RECORD +39 -30
  38. blaxel-0.2.36.dist-info/METADATA +0 -228
  39. {blaxel-0.2.36.dist-info → blaxel-0.2.37.dist-info}/WHEEL +0 -0
  40. {blaxel-0.2.36.dist-info → blaxel-0.2.37.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,71 @@
1
+ from typing import Any, TypeVar, Union
2
+
3
+ from attrs import define as _attrs_define
4
+ from attrs import field as _attrs_field
5
+
6
+ from ..types import UNSET, Unset
7
+
8
+ T = TypeVar("T", bound="UpgradeRequest")
9
+
10
+
11
+ @_attrs_define
12
+ class UpgradeRequest:
13
+ """
14
+ Attributes:
15
+ base_url (Union[Unset, str]): Base URL for releases (useful for forks) Example: https://github.com/blaxel-
16
+ ai/sandbox/releases.
17
+ version (Union[Unset, str]): Version to upgrade to: "develop", "main", "latest", or specific tag like "v1.0.0"
18
+ Example: develop.
19
+ """
20
+
21
+ base_url: Union[Unset, str] = UNSET
22
+ version: Union[Unset, str] = UNSET
23
+ additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
24
+
25
+ def to_dict(self) -> dict[str, Any]:
26
+ base_url = self.base_url
27
+
28
+ version = self.version
29
+
30
+ field_dict: dict[str, Any] = {}
31
+ field_dict.update(self.additional_properties)
32
+ field_dict.update({})
33
+ if base_url is not UNSET:
34
+ field_dict["baseUrl"] = base_url
35
+ if version is not UNSET:
36
+ field_dict["version"] = version
37
+
38
+ return field_dict
39
+
40
+ @classmethod
41
+ def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T | None:
42
+ if not src_dict:
43
+ return None
44
+ d = src_dict.copy()
45
+ base_url = d.pop("baseUrl", d.pop("base_url", UNSET))
46
+
47
+ version = d.pop("version", UNSET)
48
+
49
+ upgrade_request = cls(
50
+ base_url=base_url,
51
+ version=version,
52
+ )
53
+
54
+ upgrade_request.additional_properties = d
55
+ return upgrade_request
56
+
57
+ @property
58
+ def additional_keys(self) -> list[str]:
59
+ return list(self.additional_properties.keys())
60
+
61
+ def __getitem__(self, key: str) -> Any:
62
+ return self.additional_properties[key]
63
+
64
+ def __setitem__(self, key: str, value: Any) -> None:
65
+ self.additional_properties[key] = value
66
+
67
+ def __delitem__(self, key: str) -> None:
68
+ del self.additional_properties[key]
69
+
70
+ def __contains__(self, key: str) -> bool:
71
+ return key in self.additional_properties
@@ -0,0 +1,125 @@
1
+ from typing import Any, TypeVar, Union
2
+
3
+ from attrs import define as _attrs_define
4
+ from attrs import field as _attrs_field
5
+
6
+ from ..models.process_upgrade_state import ProcessUpgradeState
7
+ from ..types import UNSET, Unset
8
+
9
+ T = TypeVar("T", bound="UpgradeStatus")
10
+
11
+
12
+ @_attrs_define
13
+ class UpgradeStatus:
14
+ """
15
+ Attributes:
16
+ status (ProcessUpgradeState):
17
+ step (str): Current/last step (none, starting, download, validate, replace, completed, skipped) Example:
18
+ download.
19
+ version (str): Version being upgraded to Example: latest.
20
+ binary_path (Union[Unset, str]): Path to downloaded binary Example: /tmp/sandbox-api-new.
21
+ bytes_downloaded (Union[Unset, int]): Bytes downloaded Example: 25034936.
22
+ download_url (Union[Unset, str]): URL used for download Example: https://github.com/....
23
+ error (Union[Unset, str]): Error message if failed Example: Failed to download binary.
24
+ last_attempt (Union[Unset, str]): When the upgrade was attempted
25
+ """
26
+
27
+ status: ProcessUpgradeState
28
+ step: str
29
+ version: str
30
+ binary_path: Union[Unset, str] = UNSET
31
+ bytes_downloaded: Union[Unset, int] = UNSET
32
+ download_url: Union[Unset, str] = UNSET
33
+ error: Union[Unset, str] = UNSET
34
+ last_attempt: Union[Unset, str] = UNSET
35
+ additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
36
+
37
+ def to_dict(self) -> dict[str, Any]:
38
+ status = self.status.value
39
+
40
+ step = self.step
41
+
42
+ version = self.version
43
+
44
+ binary_path = self.binary_path
45
+
46
+ bytes_downloaded = self.bytes_downloaded
47
+
48
+ download_url = self.download_url
49
+
50
+ error = self.error
51
+
52
+ last_attempt = self.last_attempt
53
+
54
+ field_dict: dict[str, Any] = {}
55
+ field_dict.update(self.additional_properties)
56
+ field_dict.update(
57
+ {
58
+ "status": status,
59
+ "step": step,
60
+ "version": version,
61
+ }
62
+ )
63
+ if binary_path is not UNSET:
64
+ field_dict["binaryPath"] = binary_path
65
+ if bytes_downloaded is not UNSET:
66
+ field_dict["bytesDownloaded"] = bytes_downloaded
67
+ if download_url is not UNSET:
68
+ field_dict["downloadUrl"] = download_url
69
+ if error is not UNSET:
70
+ field_dict["error"] = error
71
+ if last_attempt is not UNSET:
72
+ field_dict["lastAttempt"] = last_attempt
73
+
74
+ return field_dict
75
+
76
+ @classmethod
77
+ def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T | None:
78
+ if not src_dict:
79
+ return None
80
+ d = src_dict.copy()
81
+ status = ProcessUpgradeState(d.pop("status"))
82
+
83
+ step = d.pop("step")
84
+
85
+ version = d.pop("version")
86
+
87
+ binary_path = d.pop("binaryPath", d.pop("binary_path", UNSET))
88
+
89
+ bytes_downloaded = d.pop("bytesDownloaded", d.pop("bytes_downloaded", UNSET))
90
+
91
+ download_url = d.pop("downloadUrl", d.pop("download_url", UNSET))
92
+
93
+ error = d.pop("error", UNSET)
94
+
95
+ last_attempt = d.pop("lastAttempt", d.pop("last_attempt", UNSET))
96
+
97
+ upgrade_status = cls(
98
+ status=status,
99
+ step=step,
100
+ version=version,
101
+ binary_path=binary_path,
102
+ bytes_downloaded=bytes_downloaded,
103
+ download_url=download_url,
104
+ error=error,
105
+ last_attempt=last_attempt,
106
+ )
107
+
108
+ upgrade_status.additional_properties = d
109
+ return upgrade_status
110
+
111
+ @property
112
+ def additional_keys(self) -> list[str]:
113
+ return list(self.additional_properties.keys())
114
+
115
+ def __getitem__(self, key: str) -> Any:
116
+ return self.additional_properties[key]
117
+
118
+ def __setitem__(self, key: str, value: Any) -> None:
119
+ self.additional_properties[key] = value
120
+
121
+ def __delitem__(self, key: str) -> None:
122
+ del self.additional_properties[key]
123
+
124
+ def __contains__(self, key: str) -> bool:
125
+ return key in self.additional_properties
@@ -7,6 +7,7 @@ from .sandbox import (
7
7
  SandboxPreviews,
8
8
  SandboxProcess,
9
9
  )
10
+ from .system import SandboxSystem
10
11
 
11
12
  __all__ = [
12
13
  "SandboxInstance",
@@ -15,5 +16,6 @@ __all__ = [
15
16
  "SandboxPreviews",
16
17
  "SandboxProcess",
17
18
  "SandboxCodegen",
19
+ "SandboxSystem",
18
20
  "CodeInterpreter",
19
21
  ]
@@ -10,6 +10,7 @@ import httpx
10
10
  from ...common.settings import settings
11
11
  from ..client.models import Directory, FileRequest, SuccessResponse
12
12
  from ..types import (
13
+ AsyncWatchHandle,
13
14
  CopyResponse,
14
15
  SandboxConfiguration,
15
16
  SandboxFilesystemFile,
@@ -327,9 +328,7 @@ class SandboxFileSystem(SandboxAction):
327
328
  data = json.loads(await response.aread())
328
329
  self.handle_response_error(response)
329
330
 
330
- from ..client.models.content_search_response import (
331
- ContentSearchResponse,
332
- )
331
+ from ..client.models.content_search_response import ContentSearchResponse
333
332
 
334
333
  return ContentSearchResponse.from_dict(data)
335
334
  finally:
@@ -364,8 +363,23 @@ class SandboxFileSystem(SandboxAction):
364
363
  path: str,
365
364
  callback: Callable[[WatchEvent], None],
366
365
  options: Dict[str, Any] | None = None,
367
- ) -> Dict[str, Callable]:
368
- """Watch for file system changes."""
366
+ ) -> AsyncWatchHandle:
367
+ """Watch for file system changes.
368
+
369
+ Returns an AsyncWatchHandle that can be used as a context manager:
370
+
371
+ async with sandbox.fs.watch(path, callback) as handle:
372
+ # do something
373
+ # handle is automatically closed
374
+
375
+ Or manually:
376
+
377
+ handle = sandbox.fs.watch(path, callback)
378
+ try:
379
+ # do something
380
+ finally:
381
+ handle.close()
382
+ """
369
383
  path = self.format_path(path)
370
384
  closed = False
371
385
 
@@ -444,7 +458,7 @@ class SandboxFileSystem(SandboxAction):
444
458
  closed = True
445
459
  task.cancel()
446
460
 
447
- return {"close": close}
461
+ return AsyncWatchHandle(close)
448
462
 
449
463
  def format_path(self, path: str) -> str:
450
464
  """Format path for filesystem operations.
@@ -1,3 +1,4 @@
1
+ import asyncio
1
2
  from dataclasses import dataclass
2
3
  from datetime import datetime
3
4
  from typing import Any, Dict, List, Union
@@ -15,6 +16,9 @@ from ...client.api.compute.delete_sandbox_preview import (
15
16
  from ...client.api.compute.delete_sandbox_preview_token import (
16
17
  asyncio as delete_sandbox_preview_token,
17
18
  )
19
+ from ...client.api.compute.get_sandbox_preview import (
20
+ asyncio_detailed as get_sandbox_preview_detailed,
21
+ )
18
22
  from ...client.api.compute.get_sandbox_preview import (
19
23
  asyncio as get_sandbox_preview,
20
24
  )
@@ -176,15 +180,58 @@ class SandboxPreviews:
176
180
  )
177
181
  return SandboxPreview(response)
178
182
 
179
- async def delete(self, preview_name: str) -> dict:
183
+ async def delete(self, preview_name: str) -> Preview:
180
184
  """Delete a preview."""
181
185
  response: Preview = await delete_sandbox_preview(
182
186
  self.sandbox_name,
183
187
  preview_name,
184
188
  client=client,
185
189
  )
190
+
191
+ # If the preview is in DELETING state, wait for it to be fully deleted
192
+ if response and response.status == "DELETING":
193
+ await self._wait_for_deletion(preview_name)
194
+
186
195
  return response
187
196
 
197
+ async def _wait_for_deletion(self, preview_name: str, timeout_ms: int = 10000) -> None:
198
+ """Wait for a preview to be fully deleted.
199
+
200
+ Args:
201
+ preview_name: Name of the preview to wait for
202
+ timeout_ms: Timeout in milliseconds (default: 10000)
203
+
204
+ Raises:
205
+ Exception: If the preview is still in DELETING state after timeout
206
+ """
207
+ print(f"Waiting for preview deletion: {preview_name}")
208
+ poll_interval = 0.5 # Poll every 500ms
209
+ elapsed = 0.0
210
+ timeout_seconds = timeout_ms / 1000.0
211
+
212
+ while elapsed < timeout_seconds:
213
+ try:
214
+ response = await get_sandbox_preview_detailed(
215
+ self.sandbox_name,
216
+ preview_name,
217
+ client=client,
218
+ )
219
+ if response.status_code == 404:
220
+ return
221
+ except errors.UnexpectedStatus as e:
222
+ # 404 means the preview is deleted
223
+ if e.status_code == 404:
224
+ return
225
+ raise
226
+ # Preview still exists, wait and retry
227
+ await asyncio.sleep(poll_interval)
228
+ elapsed += poll_interval
229
+
230
+ # Timeout reached, but deletion was initiated
231
+ raise Exception(
232
+ f"Preview deletion timeout: {preview_name} is still in DELETING state after {timeout_ms}ms"
233
+ )
234
+
188
235
 
189
236
  def to_utc_z(dt: datetime) -> str:
190
237
  """Convert datetime to UTC Z format string."""
@@ -6,7 +6,12 @@ import httpx
6
6
  from ...common.settings import settings
7
7
  from ..client.models import ProcessResponse, SuccessResponse
8
8
  from ..client.models.process_request import ProcessRequest
9
- from ..types import ProcessRequestWithLog, ProcessResponseWithLog, SandboxConfiguration
9
+ from ..types import (
10
+ AsyncStreamHandle,
11
+ ProcessRequestWithLog,
12
+ ProcessResponseWithLog,
13
+ SandboxConfiguration,
14
+ )
10
15
  from .action import SandboxAction
11
16
 
12
17
 
@@ -18,13 +23,28 @@ class SandboxProcess(SandboxAction):
18
23
  self,
19
24
  process_name: str,
20
25
  options: Dict[str, Callable[[str], None]] | None = None,
21
- ) -> Dict[str, Callable[[], None]]:
22
- """Stream logs from a process with automatic reconnection and deduplication."""
26
+ ) -> AsyncStreamHandle:
27
+ """Stream logs from a process with automatic reconnection and deduplication.
28
+
29
+ Returns an AsyncStreamHandle that can be used as a context manager:
30
+
31
+ async with sandbox.process.stream_logs(name, options) as handle:
32
+ # do something
33
+ # handle is automatically closed
34
+
35
+ Or manually:
36
+
37
+ handle = sandbox.process.stream_logs(name, options)
38
+ try:
39
+ # do something
40
+ finally:
41
+ handle.close()
42
+ """
23
43
  if options is None:
24
44
  options = {}
25
45
 
26
46
  reconnect_interval = 30 # 30 seconds in Python (TypeScript uses milliseconds)
27
- current_stream = None
47
+ current_stream: AsyncStreamHandle | None = None
28
48
  is_running = True
29
49
  reconnect_timer = None
30
50
 
@@ -37,7 +57,7 @@ class SandboxProcess(SandboxAction):
37
57
 
38
58
  # Close existing stream if any
39
59
  if current_stream:
40
- current_stream["close"]()
60
+ current_stream.close()
41
61
 
42
62
  # Create wrapper options with deduplication
43
63
  wrapped_options = {}
@@ -104,19 +124,19 @@ class SandboxProcess(SandboxAction):
104
124
 
105
125
  # Close current stream
106
126
  if current_stream:
107
- current_stream["close"]()
127
+ current_stream.close()
108
128
  current_stream = None
109
129
 
110
130
  # Clear seen logs
111
131
  seen_logs.clear()
112
132
 
113
- return {"close": close}
133
+ return AsyncStreamHandle(close)
114
134
 
115
135
  def _stream_logs(
116
136
  self,
117
137
  identifier: str,
118
138
  options: Dict[str, Callable[[str], None]] | None = None,
119
- ) -> Dict[str, Callable[[], None]]:
139
+ ) -> AsyncStreamHandle:
120
140
  """Private method to stream logs from a process with callbacks for different output types."""
121
141
  if options is None:
122
142
  options = {}
@@ -165,7 +185,9 @@ class SandboxProcess(SandboxAction):
165
185
  options["on_log"](line)
166
186
  except Exception as e:
167
187
  # Suppress AbortError when closing
168
- if not (hasattr(e, "name") and e.name == "AbortError"):
188
+ if hasattr(e, "name") and getattr(e, "name") == "AbortError":
189
+ pass
190
+ else:
169
191
  raise e
170
192
 
171
193
  # Start streaming in the background
@@ -176,7 +198,7 @@ class SandboxProcess(SandboxAction):
176
198
  closed = True
177
199
  task.cancel()
178
200
 
179
- return {"close": close}
201
+ return AsyncStreamHandle(close)
180
202
 
181
203
  async def exec(
182
204
  self,
@@ -203,7 +225,9 @@ class SandboxProcess(SandboxAction):
203
225
  if "on_stderr" in process:
204
226
  on_stderr = process["on_stderr"]
205
227
  del process["on_stderr"]
206
- process = ProcessRequest.from_dict(process)
228
+ tmp_process = ProcessRequest.from_dict(process)
229
+ assert tmp_process is not None
230
+ process = tmp_process
207
231
 
208
232
  # Store original wait_for_completion setting
209
233
  should_wait_for_completion = process.wait_for_completion
@@ -221,18 +245,27 @@ class SandboxProcess(SandboxAction):
221
245
  self.handle_response_error(response)
222
246
  import json
223
247
 
224
- response_data = json.loads(content_bytes) if content_bytes else None
225
- result = ProcessResponse.from_dict(response_data)
248
+ if content_bytes:
249
+ response_data = json.loads(content_bytes)
250
+ result = ProcessResponse.from_dict(response_data)
251
+ assert result is not None
252
+ else:
253
+ raise Exception("No content received from response")
226
254
  finally:
227
255
  await response.aclose()
228
256
 
229
257
  if on_log or on_stdout or on_stderr:
230
- stream_control = self._stream_logs(
231
- result.pid, {"on_log": on_log, "on_stdout": on_stdout, "on_stderr": on_stderr}
232
- )
258
+ stream_options: dict[str, Callable[[str], None]] = {}
259
+ if on_log:
260
+ stream_options["on_log"] = on_log
261
+ if on_stdout:
262
+ stream_options["on_stdout"] = on_stdout
263
+ if on_stderr:
264
+ stream_options["on_stderr"] = on_stderr
265
+ stream_control = self._stream_logs(result.pid, stream_options)
233
266
  return ProcessResponseWithLog(
234
267
  result,
235
- lambda: stream_control["close"]() if stream_control else None,
268
+ lambda: stream_control.close() if stream_control else None,
236
269
  )
237
270
 
238
271
  return result
@@ -277,6 +310,7 @@ class SandboxProcess(SandboxAction):
277
310
  content = await response.aread()
278
311
  data = json.loads(content)
279
312
  result = ProcessResponse.from_dict(data)
313
+ assert result is not None
280
314
 
281
315
  # If process already completed (server waited), emit logs through callbacks
282
316
  if result.status == "completed" or result.status == "failed":
@@ -378,7 +412,9 @@ class SandboxProcess(SandboxAction):
378
412
  try:
379
413
  data = json.loads(await response.aread())
380
414
  self.handle_response_error(response)
381
- return ProcessResponse.from_dict(data)
415
+ result = ProcessResponse.from_dict(data)
416
+ assert result is not None
417
+ return result
382
418
  finally:
383
419
  await response.aclose()
384
420
 
@@ -390,7 +426,12 @@ class SandboxProcess(SandboxAction):
390
426
  try:
391
427
  data = json.loads(await response.aread())
392
428
  self.handle_response_error(response)
393
- return [ProcessResponse.from_dict(item) for item in data]
429
+ results = []
430
+ for item in data:
431
+ result = ProcessResponse.from_dict(item)
432
+ assert result is not None
433
+ results.append(result)
434
+ return results
394
435
  finally:
395
436
  await response.aclose()
396
437
 
@@ -402,7 +443,9 @@ class SandboxProcess(SandboxAction):
402
443
  try:
403
444
  data = json.loads(await response.aread())
404
445
  self.handle_response_error(response)
405
- return SuccessResponse.from_dict(data)
446
+ result = SuccessResponse.from_dict(data)
447
+ assert result is not None
448
+ return result
406
449
  finally:
407
450
  await response.aclose()
408
451
 
@@ -414,7 +457,9 @@ class SandboxProcess(SandboxAction):
414
457
  try:
415
458
  data = json.loads(await response.aread())
416
459
  self.handle_response_error(response)
417
- return SuccessResponse.from_dict(data)
460
+ result = SuccessResponse.from_dict(data)
461
+ assert result is not None
462
+ return result
418
463
  finally:
419
464
  await response.aclose()
420
465
 
@@ -8,7 +8,14 @@ from ...client.api.compute.get_sandbox import asyncio as get_sandbox
8
8
  from ...client.api.compute.list_sandboxes import asyncio as list_sandboxes
9
9
  from ...client.api.compute.update_sandbox import asyncio as update_sandbox
10
10
  from ...client.client import client
11
- from ...client.models import Metadata, Sandbox, SandboxLifecycle, SandboxRuntime, SandboxSpec
11
+ from ...client.models import (
12
+ Metadata,
13
+ MetadataLabels,
14
+ Sandbox,
15
+ SandboxLifecycle,
16
+ SandboxRuntime,
17
+ SandboxSpec,
18
+ )
12
19
  from ...client.models.error import Error
13
20
  from ...client.models.sandbox_error import SandboxError
14
21
  from ...client.types import UNSET
@@ -25,6 +32,7 @@ from .network import SandboxNetwork
25
32
  from .preview import SandboxPreviews
26
33
  from .process import SandboxProcess
27
34
  from .session import SandboxSessions
35
+ from .system import SandboxSystem
28
36
 
29
37
 
30
38
  class SandboxAPIError(Exception):
@@ -87,6 +95,7 @@ class SandboxInstance:
87
95
  self.sessions = SandboxSessions(self.config)
88
96
  self.network = SandboxNetwork(self.config)
89
97
  self.codegen = SandboxCodegen(self.config)
98
+ self.system = SandboxSystem(self.config)
90
99
 
91
100
  @property
92
101
  def metadata(self):
@@ -104,6 +113,14 @@ class SandboxInstance:
104
113
  def spec(self):
105
114
  return self.sandbox.spec
106
115
 
116
+ @property
117
+ def last_used_at(self):
118
+ return self.sandbox.last_used_at
119
+
120
+ @property
121
+ def expires_in(self):
122
+ return self.sandbox.expires_in
123
+
107
124
  async def wait(self, max_wait: int = 60000, interval: int = 1000) -> "SandboxInstance":
108
125
  logger.warning(
109
126
  "⚠️ Warning: sandbox.wait() is deprecated. You don't need to wait for the sandbox to be deployed anymore."
@@ -167,9 +184,13 @@ class SandboxInstance:
167
184
  lifecycle = config.lifecycle
168
185
  # snapshot_enabled = sandbox.snapshot_enabled
169
186
 
187
+ labels = MetadataLabels.from_dict(config.labels) if config.labels else UNSET
188
+ if labels is None:
189
+ labels = UNSET
190
+
170
191
  # Create full Sandbox object
171
192
  sandbox = Sandbox(
172
- metadata=Metadata(name=name, labels=config.labels),
193
+ metadata=Metadata(name=name, labels=labels),
173
194
  spec=SandboxSpec(
174
195
  runtime=SandboxRuntime(
175
196
  image=image,
@@ -182,19 +203,28 @@ class SandboxInstance:
182
203
  )
183
204
 
184
205
  # Set ttl and expires if provided
185
- if ttl:
206
+ if ttl and sandbox.spec.runtime:
186
207
  sandbox.spec.runtime.ttl = ttl
187
- if expires:
208
+ if expires and sandbox.spec.runtime:
188
209
  sandbox.spec.runtime.expires = expires.isoformat()
189
210
  if region:
190
211
  sandbox.spec.region = region
191
212
  if lifecycle:
192
- sandbox.spec.lifecycle = lifecycle
213
+ if type(lifecycle) is dict:
214
+ lifecycle = SandboxLifecycle.from_dict(lifecycle)
215
+ assert lifecycle is not None
216
+ sandbox.spec.lifecycle = lifecycle
217
+ elif type(lifecycle) is SandboxLifecycle:
218
+ sandbox.spec.lifecycle = lifecycle
219
+ else:
220
+ raise ValueError(f"Invalid lifecycle type: {type(lifecycle)}")
193
221
  else:
194
222
  # Handle existing Sandbox object or dict conversion
195
223
  if isinstance(sandbox, dict):
196
224
  sandbox = Sandbox.from_dict(sandbox)
225
+ assert sandbox is not None
197
226
 
227
+ assert isinstance(sandbox, Sandbox)
198
228
  # Set defaults for missing fields
199
229
  if not sandbox.metadata:
200
230
  sandbox.metadata = Metadata(name=default_name)
@@ -220,6 +250,7 @@ class SandboxInstance:
220
250
  message = response.message if response.message else str(response)
221
251
  raise SandboxAPIError(message, status_code=status_code, code=code)
222
252
 
253
+ assert response is not None
223
254
  instance = cls(response)
224
255
  # TODO remove this part once we have a better way to handle this
225
256
  if safe: