modal 1.0.6.dev61__py3-none-any.whl → 1.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of modal might be problematic. Click here for more details.

Files changed (75) hide show
  1. modal/__main__.py +2 -2
  2. modal/_clustered_functions.py +3 -0
  3. modal/_clustered_functions.pyi +3 -2
  4. modal/_functions.py +78 -26
  5. modal/_object.py +9 -1
  6. modal/_output.py +14 -25
  7. modal/_runtime/gpu_memory_snapshot.py +158 -54
  8. modal/_utils/async_utils.py +6 -4
  9. modal/_utils/auth_token_manager.py +1 -1
  10. modal/_utils/blob_utils.py +16 -21
  11. modal/_utils/function_utils.py +16 -4
  12. modal/_utils/time_utils.py +8 -4
  13. modal/app.py +0 -4
  14. modal/app.pyi +0 -4
  15. modal/cli/_traceback.py +3 -2
  16. modal/cli/app.py +4 -4
  17. modal/cli/cluster.py +4 -4
  18. modal/cli/config.py +2 -2
  19. modal/cli/container.py +2 -2
  20. modal/cli/dict.py +4 -4
  21. modal/cli/entry_point.py +2 -2
  22. modal/cli/import_refs.py +3 -3
  23. modal/cli/network_file_system.py +8 -9
  24. modal/cli/profile.py +2 -2
  25. modal/cli/queues.py +5 -5
  26. modal/cli/secret.py +5 -5
  27. modal/cli/utils.py +3 -4
  28. modal/cli/volume.py +8 -9
  29. modal/client.py +8 -1
  30. modal/client.pyi +9 -10
  31. modal/container_process.py +2 -2
  32. modal/dict.py +47 -3
  33. modal/dict.pyi +55 -0
  34. modal/exception.py +4 -0
  35. modal/experimental/__init__.py +1 -1
  36. modal/experimental/flash.py +18 -2
  37. modal/experimental/flash.pyi +19 -0
  38. modal/functions.pyi +6 -7
  39. modal/image.py +26 -10
  40. modal/image.pyi +12 -4
  41. modal/mount.py +1 -1
  42. modal/object.pyi +4 -0
  43. modal/parallel_map.py +432 -4
  44. modal/parallel_map.pyi +28 -0
  45. modal/queue.py +46 -3
  46. modal/queue.pyi +53 -0
  47. modal/sandbox.py +105 -25
  48. modal/sandbox.pyi +108 -18
  49. modal/secret.py +48 -5
  50. modal/secret.pyi +55 -0
  51. modal/token_flow.py +3 -3
  52. modal/volume.py +49 -18
  53. modal/volume.pyi +50 -8
  54. {modal-1.0.6.dev61.dist-info → modal-1.1.1.dist-info}/METADATA +2 -2
  55. {modal-1.0.6.dev61.dist-info → modal-1.1.1.dist-info}/RECORD +75 -75
  56. modal_proto/api.proto +140 -14
  57. modal_proto/api_grpc.py +80 -0
  58. modal_proto/api_pb2.py +927 -756
  59. modal_proto/api_pb2.pyi +488 -34
  60. modal_proto/api_pb2_grpc.py +166 -0
  61. modal_proto/api_pb2_grpc.pyi +52 -0
  62. modal_proto/modal_api_grpc.py +5 -0
  63. modal_version/__init__.py +1 -1
  64. /modal/{requirements → builder}/2023.12.312.txt +0 -0
  65. /modal/{requirements → builder}/2023.12.txt +0 -0
  66. /modal/{requirements → builder}/2024.04.txt +0 -0
  67. /modal/{requirements → builder}/2024.10.txt +0 -0
  68. /modal/{requirements → builder}/2025.06.txt +0 -0
  69. /modal/{requirements → builder}/PREVIEW.txt +0 -0
  70. /modal/{requirements → builder}/README.md +0 -0
  71. /modal/{requirements → builder}/base-images.json +0 -0
  72. {modal-1.0.6.dev61.dist-info → modal-1.1.1.dist-info}/WHEEL +0 -0
  73. {modal-1.0.6.dev61.dist-info → modal-1.1.1.dist-info}/entry_points.txt +0 -0
  74. {modal-1.0.6.dev61.dist-info → modal-1.1.1.dist-info}/licenses/LICENSE +0 -0
  75. {modal-1.0.6.dev61.dist-info → modal-1.1.1.dist-info}/top_level.txt +0 -0
modal/queue.py CHANGED
@@ -3,8 +3,11 @@ import queue # The system library
3
3
  import time
4
4
  import warnings
5
5
  from collections.abc import AsyncGenerator, AsyncIterator
6
+ from dataclasses import dataclass
7
+ from datetime import datetime
6
8
  from typing import Any, Optional
7
9
 
10
+ from google.protobuf.message import Message
8
11
  from grpclib import GRPCError, Status
9
12
  from synchronicity.async_wrap import asynccontextmanager
10
13
 
@@ -17,10 +20,23 @@ from ._utils.async_utils import TaskContext, synchronize_api, warn_if_generator_
17
20
  from ._utils.deprecation import deprecation_warning, warn_if_passing_namespace
18
21
  from ._utils.grpc_utils import retry_transient_errors
19
22
  from ._utils.name_utils import check_object_name
23
+ from ._utils.time_utils import timestamp_to_localized_dt
20
24
  from .client import _Client
21
25
  from .exception import InvalidError, RequestSizeError
22
26
 
23
27
 
28
+ @dataclass
29
+ class QueueInfo:
30
+ """Information about the Queue object."""
31
+
32
+ # This dataclass should be limited to information that is unchanging over the lifetime of the Queue,
33
+ # since it is transmitted from the server when the object is hydrated and could be stale when accessed.
34
+
35
+ name: Optional[str]
36
+ created_at: datetime
37
+ created_by: Optional[str]
38
+
39
+
24
40
  class _Queue(_Object, type_prefix="qu"):
25
41
  """Distributed, FIFO queue for data flow in Modal apps.
26
42
 
@@ -94,10 +110,26 @@ class _Queue(_Object, type_prefix="qu"):
94
110
  Partition keys must be non-empty and must not exceed 64 bytes.
95
111
  """
96
112
 
113
+ _metadata: Optional[api_pb2.QueueMetadata] = None
114
+
97
115
  def __init__(self):
98
116
  """mdmd:hidden"""
99
117
  raise RuntimeError("Queue() is not allowed. Please use `Queue.from_name(...)` or `Queue.ephemeral()` instead.")
100
118
 
119
+ @property
120
+ def name(self) -> Optional[str]:
121
+ return self._name
122
+
123
+ def _hydrate_metadata(self, metadata: Optional[Message]):
124
+ if metadata:
125
+ assert isinstance(metadata, api_pb2.QueueMetadata)
126
+ self._metadata = metadata
127
+ self._name = metadata.name
128
+
129
+ def _get_metadata(self) -> api_pb2.QueueMetadata:
130
+ assert self._metadata
131
+ return self._metadata
132
+
101
133
  @staticmethod
102
134
  def validate_partition_key(partition: Optional[str]) -> bytes:
103
135
  if partition is not None:
@@ -142,7 +174,7 @@ class _Queue(_Object, type_prefix="qu"):
142
174
  async with TaskContext() as tc:
143
175
  request = api_pb2.QueueHeartbeatRequest(queue_id=response.queue_id)
144
176
  tc.infinite_loop(lambda: client.stub.QueueHeartbeat(request), sleep=_heartbeat_sleep)
145
- yield cls._new_hydrated(response.queue_id, client, None, is_another_app=True)
177
+ yield cls._new_hydrated(response.queue_id, client, response.metadata, is_another_app=True)
146
178
 
147
179
  @staticmethod
148
180
  def from_name(
@@ -173,9 +205,9 @@ class _Queue(_Object, type_prefix="qu"):
173
205
  object_creation_type=(api_pb2.OBJECT_CREATION_TYPE_CREATE_IF_MISSING if create_if_missing else None),
174
206
  )
175
207
  response = await resolver.client.stub.QueueGetOrCreate(req)
176
- self._hydrate(response.queue_id, resolver.client, None)
208
+ self._hydrate(response.queue_id, resolver.client, response.metadata)
177
209
 
178
- return _Queue._from_loader(_load, "Queue()", is_another_app=True, hydrate_lazily=True)
210
+ return _Queue._from_loader(_load, "Queue()", is_another_app=True, hydrate_lazily=True, name=name)
179
211
 
180
212
  @staticmethod
181
213
  async def lookup(
@@ -222,6 +254,17 @@ class _Queue(_Object, type_prefix="qu"):
222
254
  req = api_pb2.QueueDeleteRequest(queue_id=obj.object_id)
223
255
  await retry_transient_errors(obj._client.stub.QueueDelete, req)
224
256
 
257
+ @live_method
258
+ async def info(self) -> QueueInfo:
259
+ """Return information about the Queue object."""
260
+ metadata = self._get_metadata()
261
+ creation_info = metadata.creation_info
262
+ return QueueInfo(
263
+ name=metadata.name or None,
264
+ created_at=timestamp_to_localized_dt(creation_info.created_at),
265
+ created_by=creation_info.created_by or None,
266
+ )
267
+
225
268
  async def _get_nonblocking(self, partition: Optional[str], n_values: int) -> list[Any]:
226
269
  request = api_pb2.QueueGetRequest(
227
270
  queue_id=self.object_id,
modal/queue.pyi CHANGED
@@ -1,11 +1,35 @@
1
1
  import collections.abc
2
+ import datetime
3
+ import google.protobuf.message
2
4
  import modal._object
3
5
  import modal.client
4
6
  import modal.object
7
+ import modal_proto.api_pb2
5
8
  import synchronicity.combined_types
6
9
  import typing
7
10
  import typing_extensions
8
11
 
12
+ class QueueInfo:
13
+ """Information about the Queue object."""
14
+
15
+ name: typing.Optional[str]
16
+ created_at: datetime.datetime
17
+ created_by: typing.Optional[str]
18
+
19
+ def __init__(
20
+ self, name: typing.Optional[str], created_at: datetime.datetime, created_by: typing.Optional[str]
21
+ ) -> None:
22
+ """Initialize self. See help(type(self)) for accurate signature."""
23
+ ...
24
+
25
+ def __repr__(self):
26
+ """Return repr(self)."""
27
+ ...
28
+
29
+ def __eq__(self, other):
30
+ """Return self==value."""
31
+ ...
32
+
9
33
  class _Queue(modal._object._Object):
10
34
  """Distributed, FIFO queue for data flow in Modal apps.
11
35
 
@@ -78,10 +102,17 @@ class _Queue(modal._object._Object):
78
102
 
79
103
  Partition keys must be non-empty and must not exceed 64 bytes.
80
104
  """
105
+
106
+ _metadata: typing.Optional[modal_proto.api_pb2.QueueMetadata]
107
+
81
108
  def __init__(self):
82
109
  """mdmd:hidden"""
83
110
  ...
84
111
 
112
+ @property
113
+ def name(self) -> typing.Optional[str]: ...
114
+ def _hydrate_metadata(self, metadata: typing.Optional[google.protobuf.message.Message]): ...
115
+ def _get_metadata(self) -> modal_proto.api_pb2.QueueMetadata: ...
85
116
  @staticmethod
86
117
  def validate_partition_key(partition: typing.Optional[str]) -> bytes: ...
87
118
  @classmethod
@@ -155,6 +186,10 @@ class _Queue(modal._object._Object):
155
186
  client: typing.Optional[modal.client._Client] = None,
156
187
  environment_name: typing.Optional[str] = None,
157
188
  ): ...
189
+ async def info(self) -> QueueInfo:
190
+ """Return information about the Queue object."""
191
+ ...
192
+
158
193
  async def _get_nonblocking(self, partition: typing.Optional[str], n_values: int) -> list[typing.Any]: ...
159
194
  async def _get_blocking(
160
195
  self, partition: typing.Optional[str], timeout: typing.Optional[float], n_values: int
@@ -335,10 +370,17 @@ class Queue(modal.object.Object):
335
370
 
336
371
  Partition keys must be non-empty and must not exceed 64 bytes.
337
372
  """
373
+
374
+ _metadata: typing.Optional[modal_proto.api_pb2.QueueMetadata]
375
+
338
376
  def __init__(self):
339
377
  """mdmd:hidden"""
340
378
  ...
341
379
 
380
+ @property
381
+ def name(self) -> typing.Optional[str]: ...
382
+ def _hydrate_metadata(self, metadata: typing.Optional[google.protobuf.message.Message]): ...
383
+ def _get_metadata(self) -> modal_proto.api_pb2.QueueMetadata: ...
342
384
  @staticmethod
343
385
  def validate_partition_key(partition: typing.Optional[str]) -> bytes: ...
344
386
  @classmethod
@@ -453,6 +495,17 @@ class Queue(modal.object.Object):
453
495
 
454
496
  delete: __delete_spec
455
497
 
498
+ class __info_spec(typing_extensions.Protocol[SUPERSELF]):
499
+ def __call__(self, /) -> QueueInfo:
500
+ """Return information about the Queue object."""
501
+ ...
502
+
503
+ async def aio(self, /) -> QueueInfo:
504
+ """Return information about the Queue object."""
505
+ ...
506
+
507
+ info: __info_spec[typing_extensions.Self]
508
+
456
509
  class ___get_nonblocking_spec(typing_extensions.Protocol[SUPERSELF]):
457
510
  def __call__(self, /, partition: typing.Optional[str], n_values: int) -> list[typing.Any]: ...
458
511
  async def aio(self, /, partition: typing.Optional[str], n_values: int) -> list[typing.Any]: ...
modal/sandbox.py CHANGED
@@ -27,7 +27,7 @@ from ._utils.mount_utils import validate_network_file_systems, validate_volumes
27
27
  from .client import _Client
28
28
  from .config import config
29
29
  from .container_process import _ContainerProcess
30
- from .exception import ExecutionError, InvalidError, SandboxTerminatedError, SandboxTimeoutError
30
+ from .exception import AlreadyExistsError, ExecutionError, InvalidError, SandboxTerminatedError, SandboxTimeoutError
31
31
  from .file_io import FileWatchEvent, FileWatchEventType, _FileIO
32
32
  from .gpu import GPU_T
33
33
  from .image import _Image
@@ -60,19 +60,33 @@ if TYPE_CHECKING:
60
60
  import modal.app
61
61
 
62
62
 
63
- def _validate_exec_args(entrypoint_args: Sequence[str]) -> None:
63
+ def _validate_exec_args(args: Sequence[str]) -> None:
64
64
  # Entrypoint args must be strings.
65
- if not all(isinstance(arg, str) for arg in entrypoint_args):
65
+ if not all(isinstance(arg, str) for arg in args):
66
66
  raise InvalidError("All entrypoint arguments must be strings")
67
67
  # Avoid "[Errno 7] Argument list too long" errors.
68
- total_arg_len = sum(len(arg) for arg in entrypoint_args)
68
+ total_arg_len = sum(len(arg) for arg in args)
69
69
  if total_arg_len > ARG_MAX_BYTES:
70
70
  raise InvalidError(
71
- f"Total length of entrypoint arguments must be less than {ARG_MAX_BYTES} bytes (ARG_MAX). "
71
+ f"Total length of CMD arguments must be less than {ARG_MAX_BYTES} bytes (ARG_MAX). "
72
72
  f"Got {total_arg_len} bytes."
73
73
  )
74
74
 
75
75
 
76
+ class DefaultSandboxNameOverride(str):
77
+ """A singleton class that represents the default sandbox name override.
78
+
79
+ It is used to indicate that the sandbox name should not be overridden.
80
+ """
81
+
82
+ def __repr__(self) -> str:
83
+ # NOTE: this must match the instance var name below in order for type stubs to work 😬
84
+ return "_DEFAULT_SANDBOX_NAME_OVERRIDE"
85
+
86
+
87
+ _DEFAULT_SANDBOX_NAME_OVERRIDE = DefaultSandboxNameOverride()
88
+
89
+
76
90
  class _Sandbox(_Object, type_prefix="sb"):
77
91
  """A `Sandbox` object lets you interact with a running sandbox. This API is similar to Python's
78
92
  [asyncio.subprocess.Process](https://docs.python.org/3/library/asyncio-subprocess.html#asyncio.subprocess.Process).
@@ -90,9 +104,10 @@ class _Sandbox(_Object, type_prefix="sb"):
90
104
 
91
105
  @staticmethod
92
106
  def _new(
93
- entrypoint_args: Sequence[str],
107
+ args: Sequence[str],
94
108
  image: _Image,
95
109
  secrets: Sequence[_Secret],
110
+ name: Optional[str] = None,
96
111
  timeout: Optional[int] = None,
97
112
  workdir: Optional[str] = None,
98
113
  gpu: GPU_T = None,
@@ -110,6 +125,7 @@ class _Sandbox(_Object, type_prefix="sb"):
110
125
  h2_ports: Sequence[int] = [],
111
126
  unencrypted_ports: Sequence[int] = [],
112
127
  proxy: Optional[_Proxy] = None,
128
+ experimental_options: Optional[dict[str, bool]] = None,
113
129
  _experimental_scheduler_placement: Optional[SchedulerPlacement] = None,
114
130
  enable_snapshot: bool = False,
115
131
  verbose: bool = False,
@@ -192,7 +208,7 @@ class _Sandbox(_Object, type_prefix="sb"):
192
208
 
193
209
  ephemeral_disk = None # Ephemeral disk requests not supported on Sandboxes.
194
210
  definition = api_pb2.Sandbox(
195
- entrypoint_args=entrypoint_args,
211
+ entrypoint_args=args,
196
212
  image_id=image.object_id,
197
213
  mount_ids=[mount.object_id for mount in mounts] + [mount.object_id for mount in image._mount_layers],
198
214
  secret_ids=[secret.object_id for secret in secrets],
@@ -215,10 +231,17 @@ class _Sandbox(_Object, type_prefix="sb"):
215
231
  proxy_id=(proxy.object_id if proxy else None),
216
232
  enable_snapshot=enable_snapshot,
217
233
  verbose=verbose,
234
+ name=name,
235
+ experimental_options=experimental_options,
218
236
  )
219
237
 
220
238
  create_req = api_pb2.SandboxCreateRequest(app_id=resolver.app_id, definition=definition)
221
- create_resp = await retry_transient_errors(resolver.client.stub.SandboxCreate, create_req)
239
+ try:
240
+ create_resp = await retry_transient_errors(resolver.client.stub.SandboxCreate, create_req)
241
+ except GRPCError as exc:
242
+ if exc.status == Status.ALREADY_EXISTS:
243
+ raise AlreadyExistsError(exc.message)
244
+ raise exc
222
245
 
223
246
  sandbox_id = create_resp.sandbox_id
224
247
  self._hydrate(sandbox_id, resolver.client, None)
@@ -227,8 +250,10 @@ class _Sandbox(_Object, type_prefix="sb"):
227
250
 
228
251
  @staticmethod
229
252
  async def create(
230
- *entrypoint_args: str,
231
- app: Optional["modal.app._App"] = None, # Optionally associate the sandbox with an app
253
+ *args: str, # Set the CMD of the Sandbox, overriding any CMD of the container image.
254
+ # Associate the sandbox with an app. Required unless creating from a container.
255
+ app: Optional["modal.app._App"] = None,
256
+ name: Optional[str] = None, # Optionally give the sandbox a name. Unique within an app.
232
257
  image: Optional[_Image] = None, # The image to run as the container for the sandbox.
233
258
  secrets: Sequence[_Secret] = (), # Environment variables to inject into the sandbox.
234
259
  network_file_systems: dict[Union[str, os.PathLike], _NetworkFileSystem] = {},
@@ -261,6 +286,7 @@ class _Sandbox(_Object, type_prefix="sb"):
261
286
  proxy: Optional[_Proxy] = None,
262
287
  # Enable verbose logging for sandbox operations.
263
288
  verbose: bool = False,
289
+ experimental_options: Optional[dict[str, bool]] = None,
264
290
  # Enable memory snapshots.
265
291
  _experimental_enable_snapshot: bool = False,
266
292
  _experimental_scheduler_placement: Optional[
@@ -290,8 +316,9 @@ class _Sandbox(_Object, type_prefix="sb"):
290
316
  )
291
317
 
292
318
  return await _Sandbox._create(
293
- *entrypoint_args,
319
+ *args,
294
320
  app=app,
321
+ name=name,
295
322
  image=image,
296
323
  secrets=secrets,
297
324
  network_file_systems=network_file_systems,
@@ -310,6 +337,7 @@ class _Sandbox(_Object, type_prefix="sb"):
310
337
  h2_ports=h2_ports,
311
338
  unencrypted_ports=unencrypted_ports,
312
339
  proxy=proxy,
340
+ experimental_options=experimental_options,
313
341
  _experimental_enable_snapshot=_experimental_enable_snapshot,
314
342
  _experimental_scheduler_placement=_experimental_scheduler_placement,
315
343
  client=client,
@@ -318,8 +346,10 @@ class _Sandbox(_Object, type_prefix="sb"):
318
346
 
319
347
  @staticmethod
320
348
  async def _create(
321
- *entrypoint_args: str,
322
- app: Optional["modal.app._App"] = None, # Optionally associate the sandbox with an app
349
+ *args: str, # Set the CMD of the Sandbox, overriding any CMD of the container image.
350
+ # Associate the sandbox with an app. Required unless creating from a container.
351
+ app: Optional["modal.app._App"] = None,
352
+ name: Optional[str] = None, # Optionally give the sandbox a name. Unique within an app.
323
353
  image: Optional[_Image] = None, # The image to run as the container for the sandbox.
324
354
  secrets: Sequence[_Secret] = (), # Environment variables to inject into the sandbox.
325
355
  mounts: Sequence[_Mount] = (),
@@ -351,6 +381,7 @@ class _Sandbox(_Object, type_prefix="sb"):
351
381
  unencrypted_ports: Sequence[int] = [],
352
382
  # Reference to a Modal Proxy to use in front of this Sandbox.
353
383
  proxy: Optional[_Proxy] = None,
384
+ experimental_options: Optional[dict[str, bool]] = None,
354
385
  # Enable memory snapshots.
355
386
  _experimental_enable_snapshot: bool = False,
356
387
  _experimental_scheduler_placement: Optional[
@@ -364,13 +395,14 @@ class _Sandbox(_Object, type_prefix="sb"):
364
395
  # sandbox that runs the shell session
365
396
  from .app import _App
366
397
 
367
- _validate_exec_args(entrypoint_args)
398
+ _validate_exec_args(args)
368
399
 
369
400
  # TODO(erikbern): Get rid of the `_new` method and create an already-hydrated object
370
401
  obj = _Sandbox._new(
371
- entrypoint_args,
402
+ args,
372
403
  image=image or _default_image,
373
404
  secrets=secrets,
405
+ name=name,
374
406
  timeout=timeout,
375
407
  workdir=workdir,
376
408
  gpu=gpu,
@@ -388,6 +420,7 @@ class _Sandbox(_Object, type_prefix="sb"):
388
420
  h2_ports=h2_ports,
389
421
  unencrypted_ports=unencrypted_ports,
390
422
  proxy=proxy,
423
+ experimental_options=experimental_options,
391
424
  _experimental_scheduler_placement=_experimental_scheduler_placement,
392
425
  enable_snapshot=_experimental_enable_snapshot,
393
426
  verbose=verbose,
@@ -437,6 +470,27 @@ class _Sandbox(_Object, type_prefix="sb"):
437
470
  self._stdin = StreamWriter(self.object_id, "sandbox", self._client)
438
471
  self._result = None
439
472
 
473
+ @staticmethod
474
+ async def from_name(
475
+ app_name: str,
476
+ name: str,
477
+ *,
478
+ environment_name: Optional[str] = None,
479
+ client: Optional[_Client] = None,
480
+ ) -> "_Sandbox":
481
+ """Get a running Sandbox by name from the given app.
482
+
483
+ Raises a modal.exception.NotFoundError if no running sandbox is found with the given name.
484
+ A Sandbox's name is the `name` argument passed to `Sandbox.create`.
485
+ """
486
+ if client is None:
487
+ client = await _Client.from_env()
488
+ env_name = _get_environment_name(environment_name)
489
+
490
+ req = api_pb2.SandboxGetFromNameRequest(sandbox_name=name, app_name=app_name, environment_name=env_name)
491
+ resp = await retry_transient_errors(client.stub.SandboxGetFromName, req)
492
+ return _Sandbox._new_hydrated(resp.sandbox_id, client, None)
493
+
440
494
  @staticmethod
441
495
  async def from_id(sandbox_id: str, client: Optional[_Client] = None) -> "_Sandbox":
442
496
  """Construct a Sandbox from an id and look up the Sandbox result.
@@ -594,7 +648,7 @@ class _Sandbox(_Object, type_prefix="sb"):
594
648
  @overload
595
649
  async def exec(
596
650
  self,
597
- *cmds: str,
651
+ *args: str,
598
652
  pty_info: Optional[api_pb2.PTYInfo] = None,
599
653
  stdout: StreamType = StreamType.PIPE,
600
654
  stderr: StreamType = StreamType.PIPE,
@@ -609,7 +663,7 @@ class _Sandbox(_Object, type_prefix="sb"):
609
663
  @overload
610
664
  async def exec(
611
665
  self,
612
- *cmds: str,
666
+ *args: str,
613
667
  pty_info: Optional[api_pb2.PTYInfo] = None,
614
668
  stdout: StreamType = StreamType.PIPE,
615
669
  stderr: StreamType = StreamType.PIPE,
@@ -623,7 +677,7 @@ class _Sandbox(_Object, type_prefix="sb"):
623
677
 
624
678
  async def exec(
625
679
  self,
626
- *cmds: str,
680
+ *args: str,
627
681
  pty_info: Optional[api_pb2.PTYInfo] = None, # Deprecated: internal use only
628
682
  stdout: StreamType = StreamType.PIPE,
629
683
  stderr: StreamType = StreamType.PIPE,
@@ -659,7 +713,7 @@ class _Sandbox(_Object, type_prefix="sb"):
659
713
 
660
714
  if workdir is not None and not workdir.startswith("/"):
661
715
  raise InvalidError(f"workdir must be an absolute path, got: {workdir}")
662
- _validate_exec_args(cmds)
716
+ _validate_exec_args(args)
663
717
 
664
718
  # Force secret resolution so we can pass the secret IDs to the backend.
665
719
  secret_coros = [secret.hydrate(client=self._client) for secret in secrets]
@@ -668,7 +722,7 @@ class _Sandbox(_Object, type_prefix="sb"):
668
722
  task_id = await self._get_task_id()
669
723
  req = api_pb2.ContainerExecRequest(
670
724
  task_id=task_id,
671
- command=cmds,
725
+ command=args,
672
726
  pty_info=_pty_info or pty_info,
673
727
  runtime_debug=config.get("function_runtime_debug"),
674
728
  timeout_secs=timeout or 0,
@@ -713,13 +767,39 @@ class _Sandbox(_Object, type_prefix="sb"):
713
767
  return obj
714
768
 
715
769
  @staticmethod
716
- async def _experimental_from_snapshot(snapshot: _SandboxSnapshot, client: Optional[_Client] = None):
770
+ async def _experimental_from_snapshot(
771
+ snapshot: _SandboxSnapshot,
772
+ client: Optional[_Client] = None,
773
+ *,
774
+ name: Optional[str] = _DEFAULT_SANDBOX_NAME_OVERRIDE,
775
+ ):
717
776
  client = client or await _Client.from_env()
718
777
 
719
- restore_req = api_pb2.SandboxRestoreRequest(snapshot_id=snapshot.object_id)
720
- restore_resp: api_pb2.SandboxRestoreResponse = await retry_transient_errors(
721
- client.stub.SandboxRestore, restore_req
722
- )
778
+ if name is _DEFAULT_SANDBOX_NAME_OVERRIDE:
779
+ restore_req = api_pb2.SandboxRestoreRequest(
780
+ snapshot_id=snapshot.object_id,
781
+ sandbox_name_override_type=api_pb2.SandboxRestoreRequest.SANDBOX_NAME_OVERRIDE_TYPE_UNSPECIFIED,
782
+ )
783
+ elif name is None:
784
+ restore_req = api_pb2.SandboxRestoreRequest(
785
+ snapshot_id=snapshot.object_id,
786
+ sandbox_name_override_type=api_pb2.SandboxRestoreRequest.SANDBOX_NAME_OVERRIDE_TYPE_NONE,
787
+ )
788
+ else:
789
+ restore_req = api_pb2.SandboxRestoreRequest(
790
+ snapshot_id=snapshot.object_id,
791
+ sandbox_name_override=name,
792
+ sandbox_name_override_type=api_pb2.SandboxRestoreRequest.SANDBOX_NAME_OVERRIDE_TYPE_STRING,
793
+ )
794
+ try:
795
+ restore_resp: api_pb2.SandboxRestoreResponse = await retry_transient_errors(
796
+ client.stub.SandboxRestore, restore_req
797
+ )
798
+ except GRPCError as exc:
799
+ if exc.status == Status.ALREADY_EXISTS:
800
+ raise AlreadyExistsError(exc.message)
801
+ raise exc
802
+
723
803
  sandbox = await _Sandbox.from_id(restore_resp.sandbox_id, client)
724
804
 
725
805
  task_id_req = api_pb2.SandboxGetTaskIdRequest(