modal 1.0.3.dev10__py3-none-any.whl → 1.2.3.dev7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of modal might be problematic. Click here for more details.

Files changed (160) hide show
  1. modal/__init__.py +0 -2
  2. modal/__main__.py +3 -4
  3. modal/_billing.py +80 -0
  4. modal/_clustered_functions.py +7 -3
  5. modal/_clustered_functions.pyi +15 -3
  6. modal/_container_entrypoint.py +51 -69
  7. modal/_functions.py +508 -240
  8. modal/_grpc_client.py +171 -0
  9. modal/_load_context.py +105 -0
  10. modal/_object.py +81 -21
  11. modal/_output.py +58 -45
  12. modal/_partial_function.py +48 -73
  13. modal/_pty.py +7 -3
  14. modal/_resolver.py +26 -46
  15. modal/_runtime/asgi.py +4 -3
  16. modal/_runtime/container_io_manager.py +358 -220
  17. modal/_runtime/container_io_manager.pyi +296 -101
  18. modal/_runtime/execution_context.py +18 -2
  19. modal/_runtime/execution_context.pyi +64 -7
  20. modal/_runtime/gpu_memory_snapshot.py +262 -57
  21. modal/_runtime/user_code_imports.py +28 -58
  22. modal/_serialization.py +90 -6
  23. modal/_traceback.py +42 -1
  24. modal/_tunnel.pyi +380 -12
  25. modal/_utils/async_utils.py +84 -29
  26. modal/_utils/auth_token_manager.py +111 -0
  27. modal/_utils/blob_utils.py +181 -58
  28. modal/_utils/deprecation.py +19 -0
  29. modal/_utils/function_utils.py +91 -47
  30. modal/_utils/grpc_utils.py +89 -66
  31. modal/_utils/mount_utils.py +26 -1
  32. modal/_utils/name_utils.py +17 -3
  33. modal/_utils/task_command_router_client.py +536 -0
  34. modal/_utils/time_utils.py +34 -6
  35. modal/app.py +256 -88
  36. modal/app.pyi +909 -92
  37. modal/billing.py +5 -0
  38. modal/builder/2025.06.txt +18 -0
  39. modal/builder/PREVIEW.txt +18 -0
  40. modal/builder/base-images.json +58 -0
  41. modal/cli/_download.py +19 -3
  42. modal/cli/_traceback.py +3 -2
  43. modal/cli/app.py +4 -4
  44. modal/cli/cluster.py +15 -7
  45. modal/cli/config.py +5 -3
  46. modal/cli/container.py +7 -6
  47. modal/cli/dict.py +22 -16
  48. modal/cli/entry_point.py +12 -5
  49. modal/cli/environment.py +5 -4
  50. modal/cli/import_refs.py +3 -3
  51. modal/cli/launch.py +102 -5
  52. modal/cli/network_file_system.py +11 -12
  53. modal/cli/profile.py +3 -2
  54. modal/cli/programs/launch_instance_ssh.py +94 -0
  55. modal/cli/programs/run_jupyter.py +1 -1
  56. modal/cli/programs/run_marimo.py +95 -0
  57. modal/cli/programs/vscode.py +1 -1
  58. modal/cli/queues.py +57 -26
  59. modal/cli/run.py +91 -23
  60. modal/cli/secret.py +48 -22
  61. modal/cli/token.py +7 -8
  62. modal/cli/utils.py +4 -7
  63. modal/cli/volume.py +31 -25
  64. modal/client.py +15 -85
  65. modal/client.pyi +183 -62
  66. modal/cloud_bucket_mount.py +5 -3
  67. modal/cloud_bucket_mount.pyi +197 -5
  68. modal/cls.py +200 -126
  69. modal/cls.pyi +446 -68
  70. modal/config.py +29 -11
  71. modal/container_process.py +319 -19
  72. modal/container_process.pyi +190 -20
  73. modal/dict.py +290 -71
  74. modal/dict.pyi +835 -83
  75. modal/environments.py +15 -27
  76. modal/environments.pyi +46 -24
  77. modal/exception.py +14 -2
  78. modal/experimental/__init__.py +194 -40
  79. modal/experimental/flash.py +618 -0
  80. modal/experimental/flash.pyi +380 -0
  81. modal/experimental/ipython.py +11 -7
  82. modal/file_io.py +29 -36
  83. modal/file_io.pyi +251 -53
  84. modal/file_pattern_matcher.py +56 -16
  85. modal/functions.pyi +673 -92
  86. modal/gpu.py +1 -1
  87. modal/image.py +528 -176
  88. modal/image.pyi +1572 -145
  89. modal/io_streams.py +458 -128
  90. modal/io_streams.pyi +433 -52
  91. modal/mount.py +216 -151
  92. modal/mount.pyi +225 -78
  93. modal/network_file_system.py +45 -62
  94. modal/network_file_system.pyi +277 -56
  95. modal/object.pyi +93 -17
  96. modal/parallel_map.py +942 -129
  97. modal/parallel_map.pyi +294 -15
  98. modal/partial_function.py +0 -2
  99. modal/partial_function.pyi +234 -19
  100. modal/proxy.py +17 -8
  101. modal/proxy.pyi +36 -3
  102. modal/queue.py +270 -65
  103. modal/queue.pyi +817 -57
  104. modal/runner.py +115 -101
  105. modal/runner.pyi +205 -49
  106. modal/sandbox.py +512 -136
  107. modal/sandbox.pyi +845 -111
  108. modal/schedule.py +1 -1
  109. modal/secret.py +300 -70
  110. modal/secret.pyi +589 -34
  111. modal/serving.py +7 -11
  112. modal/serving.pyi +7 -8
  113. modal/snapshot.py +11 -8
  114. modal/snapshot.pyi +25 -4
  115. modal/token_flow.py +4 -4
  116. modal/token_flow.pyi +28 -8
  117. modal/volume.py +416 -158
  118. modal/volume.pyi +1117 -121
  119. {modal-1.0.3.dev10.dist-info → modal-1.2.3.dev7.dist-info}/METADATA +10 -9
  120. modal-1.2.3.dev7.dist-info/RECORD +195 -0
  121. modal_docs/mdmd/mdmd.py +17 -4
  122. modal_proto/api.proto +534 -79
  123. modal_proto/api_grpc.py +337 -1
  124. modal_proto/api_pb2.py +1522 -968
  125. modal_proto/api_pb2.pyi +1619 -134
  126. modal_proto/api_pb2_grpc.py +699 -4
  127. modal_proto/api_pb2_grpc.pyi +226 -14
  128. modal_proto/modal_api_grpc.py +175 -154
  129. modal_proto/sandbox_router.proto +145 -0
  130. modal_proto/sandbox_router_grpc.py +105 -0
  131. modal_proto/sandbox_router_pb2.py +149 -0
  132. modal_proto/sandbox_router_pb2.pyi +333 -0
  133. modal_proto/sandbox_router_pb2_grpc.py +203 -0
  134. modal_proto/sandbox_router_pb2_grpc.pyi +75 -0
  135. modal_proto/task_command_router.proto +144 -0
  136. modal_proto/task_command_router_grpc.py +105 -0
  137. modal_proto/task_command_router_pb2.py +149 -0
  138. modal_proto/task_command_router_pb2.pyi +333 -0
  139. modal_proto/task_command_router_pb2_grpc.py +203 -0
  140. modal_proto/task_command_router_pb2_grpc.pyi +75 -0
  141. modal_version/__init__.py +1 -1
  142. modal/requirements/PREVIEW.txt +0 -16
  143. modal/requirements/base-images.json +0 -26
  144. modal-1.0.3.dev10.dist-info/RECORD +0 -179
  145. modal_proto/modal_options_grpc.py +0 -3
  146. modal_proto/options.proto +0 -19
  147. modal_proto/options_grpc.py +0 -3
  148. modal_proto/options_pb2.py +0 -35
  149. modal_proto/options_pb2.pyi +0 -20
  150. modal_proto/options_pb2_grpc.py +0 -4
  151. modal_proto/options_pb2_grpc.pyi +0 -7
  152. /modal/{requirements → builder}/2023.12.312.txt +0 -0
  153. /modal/{requirements → builder}/2023.12.txt +0 -0
  154. /modal/{requirements → builder}/2024.04.txt +0 -0
  155. /modal/{requirements → builder}/2024.10.txt +0 -0
  156. /modal/{requirements → builder}/README.md +0 -0
  157. {modal-1.0.3.dev10.dist-info → modal-1.2.3.dev7.dist-info}/WHEEL +0 -0
  158. {modal-1.0.3.dev10.dist-info → modal-1.2.3.dev7.dist-info}/entry_points.txt +0 -0
  159. {modal-1.0.3.dev10.dist-info → modal-1.2.3.dev7.dist-info}/licenses/LICENSE +0 -0
  160. {modal-1.0.3.dev10.dist-info → modal-1.2.3.dev7.dist-info}/top_level.txt +0 -0
modal/sandbox.py CHANGED
@@ -1,8 +1,15 @@
1
1
  # Copyright Modal Labs 2022
2
2
  import asyncio
3
+ import json
3
4
  import os
4
- from collections.abc import AsyncGenerator, Sequence
5
- from typing import TYPE_CHECKING, AsyncIterator, Literal, Optional, Union, overload
5
+ import time
6
+ import uuid
7
+ from collections.abc import AsyncGenerator, Collection, Sequence
8
+ from dataclasses import dataclass
9
+ from typing import TYPE_CHECKING, Any, AsyncIterator, Literal, Optional, Union, overload
10
+
11
+ from ._pty import get_pty_info
12
+ from .config import config, logger
6
13
 
7
14
  if TYPE_CHECKING:
8
15
  import _typeshed
@@ -14,18 +21,20 @@ from modal._tunnel import Tunnel
14
21
  from modal.cloud_bucket_mount import _CloudBucketMount, cloud_bucket_mounts_to_proto
15
22
  from modal.mount import _Mount
16
23
  from modal.volume import _Volume
17
- from modal_proto import api_pb2
24
+ from modal_proto import api_pb2, task_command_router_pb2 as sr_pb2
18
25
 
26
+ from ._load_context import LoadContext
19
27
  from ._object import _get_environment_name, _Object
20
28
  from ._resolver import Resolver
21
29
  from ._resources import convert_fn_config_to_resources_config
22
30
  from ._utils.async_utils import TaskContext, synchronize_api
23
- from ._utils.grpc_utils import retry_transient_errors
31
+ from ._utils.deprecation import deprecation_warning
24
32
  from ._utils.mount_utils import validate_network_file_systems, validate_volumes
33
+ from ._utils.name_utils import check_object_name
34
+ from ._utils.task_command_router_client import TaskCommandRouterClient
25
35
  from .client import _Client
26
- from .config import config
27
36
  from .container_process import _ContainerProcess
28
- from .exception import ExecutionError, InvalidError, SandboxTerminatedError, SandboxTimeoutError
37
+ from .exception import AlreadyExistsError, ExecutionError, InvalidError, SandboxTerminatedError, SandboxTimeoutError
29
38
  from .file_io import FileWatchEvent, FileWatchEventType, _FileIO
30
39
  from .gpu import GPU_T
31
40
  from .image import _Image
@@ -49,44 +58,78 @@ _default_image: _Image = _Image.debian_slim()
49
58
  # e.g. 'runsc exec ...'. So we use 2**16 as the limit.
50
59
  ARG_MAX_BYTES = 2**16
51
60
 
61
+ # This buffer extends the user-supplied timeout on ContainerExec-related RPCs. This was introduced to
62
+ # give any in-flight status codes/IO data more time to reach the client before the stream is closed.
63
+ CONTAINER_EXEC_TIMEOUT_BUFFER = 5
64
+
65
+
52
66
  if TYPE_CHECKING:
53
67
  import modal.app
54
68
 
55
69
 
56
- def _validate_exec_args(entrypoint_args: Sequence[str]) -> None:
70
+ def _validate_exec_args(args: Sequence[str]) -> None:
57
71
  # Entrypoint args must be strings.
58
- if not all(isinstance(arg, str) for arg in entrypoint_args):
72
+ if not all(isinstance(arg, str) for arg in args):
59
73
  raise InvalidError("All entrypoint arguments must be strings")
60
74
  # Avoid "[Errno 7] Argument list too long" errors.
61
- total_arg_len = sum(len(arg) for arg in entrypoint_args)
75
+ total_arg_len = sum(len(arg) for arg in args)
62
76
  if total_arg_len > ARG_MAX_BYTES:
63
77
  raise InvalidError(
64
- f"Total length of entrypoint arguments must be less than {ARG_MAX_BYTES} bytes (ARG_MAX). "
78
+ f"Total length of CMD arguments must be less than {ARG_MAX_BYTES} bytes (ARG_MAX). "
65
79
  f"Got {total_arg_len} bytes."
66
80
  )
67
81
 
68
82
 
83
+ class DefaultSandboxNameOverride(str):
84
+ """A singleton class that represents the default sandbox name override.
85
+
86
+ It is used to indicate that the sandbox name should not be overridden.
87
+ """
88
+
89
+ def __repr__(self) -> str:
90
+ # NOTE: this must match the instance var name below in order for type stubs to work 😬
91
+ return "_DEFAULT_SANDBOX_NAME_OVERRIDE"
92
+
93
+
94
+ _DEFAULT_SANDBOX_NAME_OVERRIDE = DefaultSandboxNameOverride()
95
+
96
+
97
+ @dataclass(frozen=True)
98
+ class SandboxConnectCredentials:
99
+ """Simple data structure storing credentials for making HTTP connections to a sandbox."""
100
+
101
+ url: str
102
+ token: str
103
+
104
+
69
105
  class _Sandbox(_Object, type_prefix="sb"):
70
106
  """A `Sandbox` object lets you interact with a running sandbox. This API is similar to Python's
71
107
  [asyncio.subprocess.Process](https://docs.python.org/3/library/asyncio-subprocess.html#asyncio.subprocess.Process).
72
108
 
73
- Refer to the [guide](/docs/guide/sandbox) on how to spawn and use sandboxes.
109
+ Refer to the [guide](https://modal.com/docs/guide/sandbox) on how to spawn and use sandboxes.
74
110
  """
75
111
 
76
112
  _result: Optional[api_pb2.GenericResult]
77
113
  _stdout: _StreamReader[str]
78
114
  _stderr: _StreamReader[str]
79
115
  _stdin: _StreamWriter
80
- _task_id: Optional[str] = None
81
- _tunnels: Optional[dict[int, Tunnel]] = None
82
- _enable_snapshot: bool = False
116
+ _task_id: Optional[str]
117
+ _tunnels: Optional[dict[int, Tunnel]]
118
+ _enable_snapshot: bool
119
+ _command_router_client: Optional[TaskCommandRouterClient]
120
+
121
+ @staticmethod
122
+ def _default_pty_info() -> api_pb2.PTYInfo:
123
+ return get_pty_info(shell=True, no_terminate_on_idle_stdin=True)
83
124
 
84
125
  @staticmethod
85
126
  def _new(
86
- entrypoint_args: Sequence[str],
127
+ args: Sequence[str],
87
128
  image: _Image,
88
- secrets: Sequence[_Secret],
89
- timeout: Optional[int] = None,
129
+ secrets: Collection[_Secret],
130
+ name: Optional[str] = None,
131
+ timeout: int = 300,
132
+ idle_timeout: Optional[int] = None,
90
133
  workdir: Optional[str] = None,
91
134
  gpu: GPU_T = None,
92
135
  cloud: Optional[str] = None,
@@ -98,12 +141,16 @@ class _Sandbox(_Object, type_prefix="sb"):
98
141
  block_network: bool = False,
99
142
  cidr_allowlist: Optional[Sequence[str]] = None,
100
143
  volumes: dict[Union[str, os.PathLike], Union[_Volume, _CloudBucketMount]] = {},
101
- pty_info: Optional[api_pb2.PTYInfo] = None,
144
+ pty: bool = False,
145
+ pty_info: Optional[api_pb2.PTYInfo] = None, # deprecated
102
146
  encrypted_ports: Sequence[int] = [],
147
+ h2_ports: Sequence[int] = [],
103
148
  unencrypted_ports: Sequence[int] = [],
104
149
  proxy: Optional[_Proxy] = None,
150
+ experimental_options: Optional[dict[str, bool]] = None,
105
151
  _experimental_scheduler_placement: Optional[SchedulerPlacement] = None,
106
152
  enable_snapshot: bool = False,
153
+ verbose: bool = False,
107
154
  ) -> "_Sandbox":
108
155
  """mdmd:hidden"""
109
156
 
@@ -129,6 +176,9 @@ class _Sandbox(_Object, type_prefix="sb"):
129
176
  cloud_bucket_mounts = [(k, v) for k, v in validated_volumes if isinstance(v, _CloudBucketMount)]
130
177
  validated_volumes = [(k, v) for k, v in validated_volumes if isinstance(v, _Volume)]
131
178
 
179
+ if pty:
180
+ pty_info = _Sandbox._default_pty_info()
181
+
132
182
  def _deps() -> list[_Object]:
133
183
  deps: list[_Object] = [image] + list(mounts) + list(secrets)
134
184
  for _, vol in validated_network_file_systems:
@@ -142,19 +192,28 @@ class _Sandbox(_Object, type_prefix="sb"):
142
192
  deps.append(proxy)
143
193
  return deps
144
194
 
145
- async def _load(self: _Sandbox, resolver: Resolver, _existing_object_id: Optional[str]):
195
+ async def _load(
196
+ self: _Sandbox, resolver: Resolver, load_context: LoadContext, _existing_object_id: Optional[str]
197
+ ):
146
198
  # Relies on dicts being ordered (true as of Python 3.6).
147
199
  volume_mounts = [
148
200
  api_pb2.VolumeMount(
149
201
  mount_path=path,
150
202
  volume_id=volume.object_id,
151
203
  allow_background_commits=True,
204
+ read_only=volume._read_only,
152
205
  )
153
206
  for path, volume in validated_volumes
154
207
  ]
155
208
 
156
209
  open_ports = [api_pb2.PortSpec(port=port, unencrypted=False) for port in encrypted_ports]
157
210
  open_ports.extend([api_pb2.PortSpec(port=port, unencrypted=True) for port in unencrypted_ports])
211
+ open_ports.extend(
212
+ [
213
+ api_pb2.PortSpec(port=port, unencrypted=False, tunnel_type=api_pb2.TUNNEL_TYPE_H2)
214
+ for port in h2_ports
215
+ ]
216
+ )
158
217
 
159
218
  if block_network:
160
219
  # If the network is blocked, cidr_allowlist is invalid as we don't allow any network access.
@@ -176,11 +235,12 @@ class _Sandbox(_Object, type_prefix="sb"):
176
235
 
177
236
  ephemeral_disk = None # Ephemeral disk requests not supported on Sandboxes.
178
237
  definition = api_pb2.Sandbox(
179
- entrypoint_args=entrypoint_args,
238
+ entrypoint_args=args,
180
239
  image_id=image.object_id,
181
240
  mount_ids=[mount.object_id for mount in mounts] + [mount.object_id for mount in image._mount_layers],
182
241
  secret_ids=[secret.object_id for secret in secrets],
183
242
  timeout_secs=timeout,
243
+ idle_timeout_secs=idle_timeout,
184
244
  workdir=workdir,
185
245
  resources=convert_fn_config_to_resources_config(
186
246
  cpu=cpu, memory=memory, gpu=gpu, ephemeral_disk=ephemeral_disk
@@ -198,28 +258,37 @@ class _Sandbox(_Object, type_prefix="sb"):
198
258
  network_access=network_access,
199
259
  proxy_id=(proxy.object_id if proxy else None),
200
260
  enable_snapshot=enable_snapshot,
261
+ verbose=verbose,
262
+ name=name,
263
+ experimental_options=experimental_options,
201
264
  )
202
265
 
203
- # Note - `resolver.app_id` will be `None` for app-less sandboxes
204
- create_req = api_pb2.SandboxCreateRequest(
205
- app_id=resolver.app_id, definition=definition, environment_name=resolver.environment_name
206
- )
207
- create_resp = await retry_transient_errors(resolver.client.stub.SandboxCreate, create_req)
266
+ create_req = api_pb2.SandboxCreateRequest(app_id=load_context.app_id, definition=definition)
267
+ try:
268
+ create_resp = await load_context.client.stub.SandboxCreate(create_req)
269
+ except GRPCError as exc:
270
+ if exc.status == Status.ALREADY_EXISTS:
271
+ raise AlreadyExistsError(exc.message)
272
+ raise exc
208
273
 
209
274
  sandbox_id = create_resp.sandbox_id
210
- self._hydrate(sandbox_id, resolver.client, None)
275
+ self._hydrate(sandbox_id, load_context.client, None)
211
276
 
212
- return _Sandbox._from_loader(_load, "Sandbox()", deps=_deps)
277
+ return _Sandbox._from_loader(_load, "Sandbox()", deps=_deps, load_context_overrides=LoadContext.empty())
213
278
 
214
279
  @staticmethod
215
280
  async def create(
216
- *entrypoint_args: str,
217
- app: Optional["modal.app._App"] = None, # Optionally associate the sandbox with an app
218
- environment_name: Optional[str] = None, # Optionally override the default environment
281
+ *args: str, # Set the CMD of the Sandbox, overriding any CMD of the container image.
282
+ # Associate the sandbox with an app. Required unless creating from a container.
283
+ app: Optional["modal.app._App"] = None,
284
+ name: Optional[str] = None, # Optionally give the sandbox a name. Unique within an app.
219
285
  image: Optional[_Image] = None, # The image to run as the container for the sandbox.
220
- secrets: Sequence[_Secret] = (), # Environment variables to inject into the sandbox.
286
+ env: Optional[dict[str, Optional[str]]] = None, # Environment variables to set in the Sandbox.
287
+ secrets: Optional[Collection[_Secret]] = None, # Secrets to inject into the Sandbox as environment variables.
221
288
  network_file_systems: dict[Union[str, os.PathLike], _NetworkFileSystem] = {},
222
- timeout: Optional[int] = None, # Maximum execution time of the sandbox in seconds.
289
+ timeout: int = 300, # Maximum lifetime of the sandbox in seconds.
290
+ # The amount of time in seconds that a sandbox can be idle before being terminated.
291
+ idle_timeout: Optional[int] = None,
223
292
  workdir: Optional[str] = None, # Working directory of the sandbox.
224
293
  gpu: GPU_T = None,
225
294
  cloud: Optional[str] = None,
@@ -237,23 +306,31 @@ class _Sandbox(_Object, type_prefix="sb"):
237
306
  volumes: dict[
238
307
  Union[str, os.PathLike], Union[_Volume, _CloudBucketMount]
239
308
  ] = {}, # Mount points for Modal Volumes and CloudBucketMounts
240
- pty_info: Optional[api_pb2.PTYInfo] = None,
309
+ pty: bool = False, # Enable a PTY for the Sandbox
241
310
  # List of ports to tunnel into the sandbox. Encrypted ports are tunneled with TLS.
242
311
  encrypted_ports: Sequence[int] = [],
312
+ # List of encrypted ports to tunnel into the sandbox, using HTTP/2.
313
+ h2_ports: Sequence[int] = [],
243
314
  # List of ports to tunnel into the sandbox without encryption.
244
315
  unencrypted_ports: Sequence[int] = [],
245
316
  # Reference to a Modal Proxy to use in front of this Sandbox.
246
317
  proxy: Optional[_Proxy] = None,
318
+ # Enable verbose logging for sandbox operations.
319
+ verbose: bool = False,
320
+ experimental_options: Optional[dict[str, bool]] = None,
247
321
  # Enable memory snapshots.
248
322
  _experimental_enable_snapshot: bool = False,
249
323
  _experimental_scheduler_placement: Optional[
250
324
  SchedulerPlacement
251
325
  ] = None, # Experimental controls over fine-grained scheduling (alpha).
252
326
  client: Optional[_Client] = None,
327
+ environment_name: Optional[str] = None, # *DEPRECATED* Optionally override the default environment
328
+ pty_info: Optional[api_pb2.PTYInfo] = None, # *DEPRECATED* Use `pty` instead. `pty` will override `pty_info`.
253
329
  ) -> "_Sandbox":
254
330
  """
255
- Create a new Sandbox to run untrusted, arbitrary code. The Sandbox's corresponding container
256
- will be created asynchronously.
331
+ Create a new Sandbox to run untrusted, arbitrary code.
332
+
333
+ The Sandbox's corresponding container will be created asynchronously.
257
334
 
258
335
  **Usage**
259
336
 
@@ -264,14 +341,33 @@ class _Sandbox(_Object, type_prefix="sb"):
264
341
  sandbox.wait()
265
342
  ```
266
343
  """
344
+ if environment_name is not None:
345
+ deprecation_warning(
346
+ (2025, 7, 16),
347
+ "Passing `environment_name` to `Sandbox.create` is deprecated and will be removed in a future release. "
348
+ "A sandbox's environment is determined by the app it is associated with.",
349
+ )
350
+
351
+ if pty_info is not None:
352
+ deprecation_warning(
353
+ (2025, 9, 12),
354
+ "The `pty_info` parameter is deprecated and will be removed in a future release. "
355
+ "Set the `pty` parameter to `True` instead.",
356
+ )
357
+
358
+ secrets = secrets or []
359
+ if env:
360
+ secrets = [*secrets, _Secret.from_dict(env)]
361
+
267
362
  return await _Sandbox._create(
268
- *entrypoint_args,
363
+ *args,
269
364
  app=app,
270
- environment_name=environment_name,
365
+ name=name,
271
366
  image=image,
272
367
  secrets=secrets,
273
368
  network_file_systems=network_file_systems,
274
369
  timeout=timeout,
370
+ idle_timeout=idle_timeout,
275
371
  workdir=workdir,
276
372
  gpu=gpu,
277
373
  cloud=cloud,
@@ -281,71 +377,79 @@ class _Sandbox(_Object, type_prefix="sb"):
281
377
  block_network=block_network,
282
378
  cidr_allowlist=cidr_allowlist,
283
379
  volumes=volumes,
284
- pty_info=pty_info,
380
+ pty=pty,
285
381
  encrypted_ports=encrypted_ports,
382
+ h2_ports=h2_ports,
286
383
  unencrypted_ports=unencrypted_ports,
287
384
  proxy=proxy,
385
+ experimental_options=experimental_options,
288
386
  _experimental_enable_snapshot=_experimental_enable_snapshot,
289
387
  _experimental_scheduler_placement=_experimental_scheduler_placement,
290
388
  client=client,
389
+ verbose=verbose,
390
+ pty_info=pty_info,
291
391
  )
292
392
 
293
393
  @staticmethod
294
394
  async def _create(
295
- *entrypoint_args: str,
296
- app: Optional["modal.app._App"] = None, # Optionally associate the sandbox with an app
297
- environment_name: Optional[str] = None, # Optionally override the default environment
298
- image: Optional[_Image] = None, # The image to run as the container for the sandbox.
299
- secrets: Sequence[_Secret] = (), # Environment variables to inject into the sandbox.
395
+ *args: str,
396
+ app: Optional["modal.app._App"] = None,
397
+ name: Optional[str] = None,
398
+ image: Optional[_Image] = None,
399
+ env: Optional[dict[str, Optional[str]]] = None,
400
+ secrets: Optional[Collection[_Secret]] = None,
300
401
  mounts: Sequence[_Mount] = (),
301
402
  network_file_systems: dict[Union[str, os.PathLike], _NetworkFileSystem] = {},
302
- timeout: Optional[int] = None, # Maximum execution time of the sandbox in seconds.
303
- workdir: Optional[str] = None, # Working directory of the sandbox.
403
+ timeout: int = 300,
404
+ idle_timeout: Optional[int] = None,
405
+ workdir: Optional[str] = None,
304
406
  gpu: GPU_T = None,
305
407
  cloud: Optional[str] = None,
306
- region: Optional[Union[str, Sequence[str]]] = None, # Region or regions to run the sandbox on.
307
- # Specify, in fractional CPU cores, how many CPU cores to request.
308
- # Or, pass (request, limit) to additionally specify a hard limit in fractional CPU cores.
309
- # CPU throttling will prevent a container from exceeding its specified limit.
408
+ region: Optional[Union[str, Sequence[str]]] = None,
310
409
  cpu: Optional[Union[float, tuple[float, float]]] = None,
311
- # Specify, in MiB, a memory request which is the minimum memory required.
312
- # Or, pass (request, limit) to additionally specify a hard limit in MiB.
313
410
  memory: Optional[Union[int, tuple[int, int]]] = None,
314
- block_network: bool = False, # Whether to block network access
315
- # List of CIDRs the sandbox is allowed to access. If None, all CIDRs are allowed.
411
+ block_network: bool = False,
316
412
  cidr_allowlist: Optional[Sequence[str]] = None,
317
- volumes: dict[
318
- Union[str, os.PathLike], Union[_Volume, _CloudBucketMount]
319
- ] = {}, # Mount points for Modal Volumes and CloudBucketMounts
320
- pty_info: Optional[api_pb2.PTYInfo] = None,
321
- # List of ports to tunnel into the sandbox. Encrypted ports are tunneled with TLS.
413
+ volumes: dict[Union[str, os.PathLike], Union[_Volume, _CloudBucketMount]] = {},
414
+ pty: bool = False,
322
415
  encrypted_ports: Sequence[int] = [],
323
- # List of ports to tunnel into the sandbox without encryption.
416
+ h2_ports: Sequence[int] = [],
324
417
  unencrypted_ports: Sequence[int] = [],
325
- # Reference to a Modal Proxy to use in front of this Sandbox.
326
418
  proxy: Optional[_Proxy] = None,
327
- # Enable memory snapshots.
419
+ experimental_options: Optional[dict[str, bool]] = None,
328
420
  _experimental_enable_snapshot: bool = False,
329
- _experimental_scheduler_placement: Optional[
330
- SchedulerPlacement
331
- ] = None, # Experimental controls over fine-grained scheduling (alpha).
421
+ _experimental_scheduler_placement: Optional[SchedulerPlacement] = None,
332
422
  client: Optional[_Client] = None,
423
+ verbose: bool = False,
424
+ pty_info: Optional[api_pb2.PTYInfo] = None,
333
425
  ):
334
- # This method exposes some internal arguments (currently `mounts`) which are not in the public API
335
- # `mounts` is currently only used by modal shell (cli) to provide a function's mounts to the
336
- # sandbox that runs the shell session
426
+ """Private method used internally.
427
+
428
+ This method exposes some internal arguments (currently `mounts`) which are not in the public API.
429
+ `mounts` is currently only used by modal shell (cli) to provide a function's mounts to the
430
+ sandbox that runs the shell session.
431
+ """
337
432
  from .app import _App
338
433
 
339
- environment_name = _get_environment_name(environment_name)
434
+ _validate_exec_args(args)
435
+ if name is not None:
436
+ check_object_name(name, "Sandbox")
437
+
438
+ if block_network and (encrypted_ports or h2_ports or unencrypted_ports):
439
+ raise InvalidError("Cannot specify open ports when `block_network` is enabled")
340
440
 
341
- _validate_exec_args(entrypoint_args)
441
+ secrets = secrets or []
442
+ if env:
443
+ secrets = [*secrets, _Secret.from_dict(env)]
342
444
 
343
445
  # TODO(erikbern): Get rid of the `_new` method and create an already-hydrated object
344
446
  obj = _Sandbox._new(
345
- entrypoint_args,
447
+ args,
346
448
  image=image or _default_image,
347
449
  secrets=secrets,
450
+ name=name,
348
451
  timeout=timeout,
452
+ idle_timeout=idle_timeout,
349
453
  workdir=workdir,
350
454
  gpu=gpu,
351
455
  cloud=cloud,
@@ -357,12 +461,16 @@ class _Sandbox(_Object, type_prefix="sb"):
357
461
  block_network=block_network,
358
462
  cidr_allowlist=cidr_allowlist,
359
463
  volumes=volumes,
464
+ pty=pty,
360
465
  pty_info=pty_info,
361
466
  encrypted_ports=encrypted_ports,
467
+ h2_ports=h2_ports,
362
468
  unencrypted_ports=unencrypted_ports,
363
469
  proxy=proxy,
470
+ experimental_options=experimental_options,
364
471
  _experimental_scheduler_placement=_experimental_scheduler_placement,
365
472
  enable_snapshot=_experimental_enable_snapshot,
473
+ verbose=verbose,
366
474
  )
367
475
  obj._enable_snapshot = _experimental_enable_snapshot
368
476
 
@@ -381,6 +489,7 @@ class _Sandbox(_Object, type_prefix="sb"):
381
489
  app_id = app.app_id
382
490
  app_client = app._client
383
491
  elif (container_app := _App._get_container_app()) is not None:
492
+ # implicit app/client provided by running in a modal Function
384
493
  app_id = container_app.app_id
385
494
  app_client = container_app._client
386
495
  else:
@@ -393,21 +502,47 @@ class _Sandbox(_Object, type_prefix="sb"):
393
502
  "```",
394
503
  )
395
504
 
396
- client = client or app_client or await _Client.from_env()
505
+ client = client or app_client
397
506
 
398
- resolver = Resolver(client, environment_name=environment_name, app_id=app_id)
399
- await resolver.load(obj)
507
+ resolver = Resolver()
508
+ load_context = LoadContext(client=client, app_id=app_id)
509
+ await resolver.load(obj, load_context)
400
510
  return obj
401
511
 
402
512
  def _hydrate_metadata(self, handle_metadata: Optional[Message]):
403
- self._stdout: _StreamReader[str] = StreamReader[str](
513
+ self._stdout = StreamReader(
404
514
  api_pb2.FILE_DESCRIPTOR_STDOUT, self.object_id, "sandbox", self._client, by_line=True
405
515
  )
406
- self._stderr: _StreamReader[str] = StreamReader[str](
516
+ self._stderr = StreamReader(
407
517
  api_pb2.FILE_DESCRIPTOR_STDERR, self.object_id, "sandbox", self._client, by_line=True
408
518
  )
409
519
  self._stdin = StreamWriter(self.object_id, "sandbox", self._client)
410
520
  self._result = None
521
+ self._task_id = None
522
+ self._tunnels = None
523
+ self._enable_snapshot = False
524
+ self._command_router_client = None
525
+
526
+ @staticmethod
527
+ async def from_name(
528
+ app_name: str,
529
+ name: str,
530
+ *,
531
+ environment_name: Optional[str] = None,
532
+ client: Optional[_Client] = None,
533
+ ) -> "_Sandbox":
534
+ """Get a running Sandbox by name from a deployed App.
535
+
536
+ Raises a modal.exception.NotFoundError if no running sandbox is found with the given name.
537
+ A Sandbox's name is the `name` argument passed to `Sandbox.create`.
538
+ """
539
+ if client is None:
540
+ client = await _Client.from_env()
541
+ env_name = _get_environment_name(environment_name)
542
+
543
+ req = api_pb2.SandboxGetFromNameRequest(sandbox_name=name, app_name=app_name, environment_name=env_name)
544
+ resp = await client.stub.SandboxGetFromName(req)
545
+ return _Sandbox._new_hydrated(resp.sandbox_id, client, None)
411
546
 
412
547
  @staticmethod
413
548
  async def from_id(sandbox_id: str, client: Optional[_Client] = None) -> "_Sandbox":
@@ -419,7 +554,7 @@ class _Sandbox(_Object, type_prefix="sb"):
419
554
  client = await _Client.from_env()
420
555
 
421
556
  req = api_pb2.SandboxWaitRequest(sandbox_id=sandbox_id, timeout=0)
422
- resp = await retry_transient_errors(client.stub.SandboxWait, req)
557
+ resp = await client.stub.SandboxWait(req)
423
558
 
424
559
  obj = _Sandbox._new_hydrated(sandbox_id, client, None)
425
560
 
@@ -428,11 +563,25 @@ class _Sandbox(_Object, type_prefix="sb"):
428
563
 
429
564
  return obj
430
565
 
431
- async def set_tags(self, tags: dict[str, str], *, client: Optional[_Client] = None):
566
+ async def get_tags(self) -> dict[str, str]:
567
+ """Fetches any tags (key-value pairs) currently attached to this Sandbox from the server."""
568
+ req = api_pb2.SandboxTagsGetRequest(sandbox_id=self.object_id)
569
+ try:
570
+ resp = await self._client.stub.SandboxTagsGet(req)
571
+ except GRPCError as exc:
572
+ raise InvalidError(exc.message) if exc.status == Status.INVALID_ARGUMENT else exc
573
+
574
+ return {tag.tag_name: tag.tag_value for tag in resp.tags}
575
+
576
+ async def set_tags(self, tags: dict[str, str], *, client: Optional[_Client] = None) -> None:
432
577
  """Set tags (key-value pairs) on the Sandbox. Tags can be used to filter results in `Sandbox.list`."""
433
578
  environment_name = _get_environment_name()
434
- if client is None:
435
- client = await _Client.from_env()
579
+ if client is not None:
580
+ deprecation_warning(
581
+ (2025, 9, 18),
582
+ "The `client` parameter is deprecated. Set `client` when creating the Sandbox instead "
583
+ "(in e.g. `Sandbox.create()`/`.from_id()`/`.from_name()`).",
584
+ )
436
585
 
437
586
  tags_list = [api_pb2.SandboxTag(tag_name=name, tag_value=value) for name, value in tags.items()]
438
587
 
@@ -442,7 +591,7 @@ class _Sandbox(_Object, type_prefix="sb"):
442
591
  tags=tags_list,
443
592
  )
444
593
  try:
445
- await retry_transient_errors(client.stub.SandboxTagsSet, req)
594
+ await self._client.stub.SandboxTagsSet(req)
446
595
  except GRPCError as exc:
447
596
  raise InvalidError(exc.message) if exc.status == Status.INVALID_ARGUMENT else exc
448
597
 
@@ -454,7 +603,7 @@ class _Sandbox(_Object, type_prefix="sb"):
454
603
  """
455
604
  await self._get_task_id() # Ensure the sandbox has started
456
605
  req = api_pb2.SandboxSnapshotFsRequest(sandbox_id=self.object_id, timeout=timeout)
457
- resp = await retry_transient_errors(self._client.stub.SandboxSnapshotFs, req)
606
+ resp = await self._client.stub.SandboxSnapshotFs(req)
458
607
 
459
608
  if resp.result.status != api_pb2.GenericResult.GENERIC_STATUS_SUCCESS:
460
609
  raise ExecutionError(resp.result.exception)
@@ -462,12 +611,13 @@ class _Sandbox(_Object, type_prefix="sb"):
462
611
  image_id = resp.image_id
463
612
  metadata = resp.image_metadata
464
613
 
465
- async def _load(self: _Image, resolver: Resolver, existing_object_id: Optional[str]):
614
+ async def _load(self: _Image, resolver: Resolver, load_context: LoadContext, existing_object_id: Optional[str]):
466
615
  # no need to hydrate again since we do it eagerly below
467
616
  pass
468
617
 
469
618
  rep = "Image()"
470
- image = _Image._from_loader(_load, rep, hydrate_lazily=True)
619
+ # TODO: use ._new_hydrated instead
620
+ image = _Image._from_loader(_load, rep, hydrate_lazily=True, load_context_overrides=LoadContext.empty())
471
621
  image._hydrate(image_id, self._client, metadata) # hydrating eagerly since we have all of the data
472
622
 
473
623
  return image
@@ -479,8 +629,9 @@ class _Sandbox(_Object, type_prefix="sb"):
479
629
 
480
630
  while True:
481
631
  req = api_pb2.SandboxWaitRequest(sandbox_id=self.object_id, timeout=10)
482
- resp = await retry_transient_errors(self._client.stub.SandboxWait, req)
632
+ resp = await self._client.stub.SandboxWait(req)
483
633
  if resp.result.status:
634
+ logger.debug(f"Sandbox {self.object_id} wait completed with status {resp.result.status}")
484
635
  self._result = resp.result
485
636
 
486
637
  if resp.result.status == api_pb2.GenericResult.GENERIC_STATUS_TIMEOUT:
@@ -496,7 +647,7 @@ class _Sandbox(_Object, type_prefix="sb"):
496
647
 
497
648
  Returns a dictionary of `Tunnel` objects which are keyed by the container port.
498
649
 
499
- NOTE: Previous to client [v0.64.153](/docs/reference/changelog#064153-2024-09-30), this
650
+ NOTE: Previous to client [v0.64.153](https://modal.com/docs/reference/changelog#064153-2024-09-30), this
500
651
  returned a list of `TunnelData` objects.
501
652
  """
502
653
 
@@ -504,7 +655,7 @@ class _Sandbox(_Object, type_prefix="sb"):
504
655
  return self._tunnels
505
656
 
506
657
  req = api_pb2.SandboxGetTunnelsRequest(sandbox_id=self.object_id, timeout=timeout)
507
- resp = await retry_transient_errors(self._client.stub.SandboxGetTunnels, req)
658
+ resp = await self._client.stub.SandboxGetTunnels(req)
508
659
 
509
660
  # If we couldn't get the tunnels in time, report the timeout.
510
661
  if resp.result.status == api_pb2.GenericResult.GENERIC_STATUS_TIMEOUT:
@@ -517,14 +668,42 @@ class _Sandbox(_Object, type_prefix="sb"):
517
668
 
518
669
  return self._tunnels
519
670
 
671
+ async def create_connect_token(
672
+ self, user_metadata: Optional[Union[str, dict[str, Any]]] = None
673
+ ) -> SandboxConnectCredentials:
674
+ """
675
+ [Alpha] Create a token for making HTTP connections to the Sandbox.
676
+
677
+ Also accepts an optional user_metadata string or dict to associate with the token. This metadata
678
+ will be added to the headers by the proxy when forwarding requests to the Sandbox."""
679
+ if user_metadata is not None and isinstance(user_metadata, dict):
680
+ try:
681
+ user_metadata = json.dumps(user_metadata)
682
+ except Exception as e:
683
+ raise InvalidError(f"Failed to serialize user_metadata: {e}")
684
+
685
+ req = api_pb2.SandboxCreateConnectTokenRequest(sandbox_id=self.object_id, user_metadata=user_metadata)
686
+ resp = await self._client.stub.SandboxCreateConnectToken(req)
687
+ return SandboxConnectCredentials(resp.url, resp.token)
688
+
689
+ async def reload_volumes(self) -> None:
690
+ """Reload all Volumes mounted in the Sandbox.
691
+
692
+ Added in v1.1.0.
693
+ """
694
+ task_id = await self._get_task_id()
695
+ await self._client.stub.ContainerReloadVolumes(
696
+ api_pb2.ContainerReloadVolumesRequest(
697
+ task_id=task_id,
698
+ ),
699
+ )
700
+
520
701
  async def terminate(self) -> None:
521
702
  """Terminate Sandbox execution.
522
703
 
523
704
  This is a no-op if the Sandbox has already finished running."""
524
705
 
525
- await retry_transient_errors(
526
- self._client.stub.SandboxTerminate, api_pb2.SandboxTerminateRequest(sandbox_id=self.object_id)
527
- )
706
+ await self._client.stub.SandboxTerminate(api_pb2.SandboxTerminateRequest(sandbox_id=self.object_id))
528
707
 
529
708
  async def poll(self) -> Optional[int]:
530
709
  """Check if the Sandbox has finished running.
@@ -533,7 +712,7 @@ class _Sandbox(_Object, type_prefix="sb"):
533
712
  """
534
713
 
535
714
  req = api_pb2.SandboxWaitRequest(sandbox_id=self.object_id, timeout=0)
536
- resp = await retry_transient_errors(self._client.stub.SandboxWait, req)
715
+ resp = await self._client.stub.SandboxWait(req)
537
716
 
538
717
  if resp.result.status:
539
718
  self._result = resp.result
@@ -548,132 +727,329 @@ class _Sandbox(_Object, type_prefix="sb"):
548
727
  await asyncio.sleep(0.5)
549
728
  return self._task_id
550
729
 
730
+ async def _get_command_router_client(self, task_id: str) -> Optional[TaskCommandRouterClient]:
731
+ if self._command_router_client is None:
732
+ # Attempt to initialize a router client. Returns None if the new exec path not enabled
733
+ # for this sandbox.
734
+ self._command_router_client = await TaskCommandRouterClient.try_init(self._client, task_id)
735
+ return self._command_router_client
736
+
551
737
  @overload
552
738
  async def exec(
553
739
  self,
554
- *cmds: str,
555
- pty_info: Optional[api_pb2.PTYInfo] = None,
740
+ *args: str,
556
741
  stdout: StreamType = StreamType.PIPE,
557
742
  stderr: StreamType = StreamType.PIPE,
558
743
  timeout: Optional[int] = None,
559
744
  workdir: Optional[str] = None,
560
- secrets: Sequence[_Secret] = (),
745
+ env: Optional[dict[str, Optional[str]]] = None,
746
+ secrets: Optional[Collection[_Secret]] = None,
561
747
  text: Literal[True] = True,
562
748
  bufsize: Literal[-1, 1] = -1,
749
+ pty: bool = False,
750
+ pty_info: Optional[api_pb2.PTYInfo] = None,
563
751
  _pty_info: Optional[api_pb2.PTYInfo] = None,
564
752
  ) -> _ContainerProcess[str]: ...
565
753
 
566
754
  @overload
567
755
  async def exec(
568
756
  self,
569
- *cmds: str,
570
- pty_info: Optional[api_pb2.PTYInfo] = None,
757
+ *args: str,
571
758
  stdout: StreamType = StreamType.PIPE,
572
759
  stderr: StreamType = StreamType.PIPE,
573
760
  timeout: Optional[int] = None,
574
761
  workdir: Optional[str] = None,
575
- secrets: Sequence[_Secret] = (),
762
+ env: Optional[dict[str, Optional[str]]] = None,
763
+ secrets: Optional[Collection[_Secret]] = None,
576
764
  text: Literal[False] = False,
577
765
  bufsize: Literal[-1, 1] = -1,
766
+ pty: bool = False,
767
+ pty_info: Optional[api_pb2.PTYInfo] = None,
578
768
  _pty_info: Optional[api_pb2.PTYInfo] = None,
579
769
  ) -> _ContainerProcess[bytes]: ...
580
770
 
581
771
  async def exec(
582
772
  self,
583
- *cmds: str,
584
- pty_info: Optional[api_pb2.PTYInfo] = None, # Deprecated: internal use only
773
+ *args: str,
585
774
  stdout: StreamType = StreamType.PIPE,
586
775
  stderr: StreamType = StreamType.PIPE,
587
776
  timeout: Optional[int] = None,
588
777
  workdir: Optional[str] = None,
589
- secrets: Sequence[_Secret] = (),
778
+ env: Optional[dict[str, Optional[str]]] = None, # Environment variables to set during command execution.
779
+ secrets: Optional[
780
+ Collection[_Secret]
781
+ ] = None, # Secrets to inject as environment variables during command execution.
590
782
  # Encode output as text.
591
783
  text: bool = True,
592
784
  # Control line-buffered output.
593
785
  # -1 means unbuffered, 1 means line-buffered (only available if `text=True`).
594
786
  bufsize: Literal[-1, 1] = -1,
595
- # Internal option to set terminal size and metadata
596
- _pty_info: Optional[api_pb2.PTYInfo] = None,
787
+ pty: bool = False, # Enable a PTY for the command
788
+ _pty_info: Optional[api_pb2.PTYInfo] = None, # *DEPRECATED* Use `pty` instead. `pty` will override `pty_info`.
789
+ pty_info: Optional[api_pb2.PTYInfo] = None, # *DEPRECATED* Use `pty` instead. `pty` will override `pty_info`.
597
790
  ):
598
791
  """Execute a command in the Sandbox and return a ContainerProcess handle.
599
792
 
600
- See the [`ContainerProcess`](/docs/reference/modal.container_process#modalcontainer_processcontainerprocess)
793
+ See the [`ContainerProcess`](https://modal.com/docs/reference/modal.container_process#modalcontainer_processcontainerprocess)
601
794
  docs for more information.
602
795
 
603
796
  **Usage**
604
797
 
605
- ```python
606
- app = modal.App.lookup("my-app", create_if_missing=True)
607
-
608
- sandbox = modal.Sandbox.create("sleep", "infinity", app=app)
609
-
610
- process = sandbox.exec("bash", "-c", "for i in $(seq 1 10); do echo foo $i; sleep 0.5; done")
611
-
798
+ ```python fixture:sandbox
799
+ process = sandbox.exec("bash", "-c", "for i in $(seq 1 3); do echo foo $i; sleep 0.1; done")
612
800
  for line in process.stdout:
613
801
  print(line)
614
802
  ```
615
803
  """
804
+ if pty_info is not None or _pty_info is not None:
805
+ deprecation_warning(
806
+ (2025, 9, 12),
807
+ "The `_pty_info` and `pty_info` parameters are deprecated and will be removed in a future release. "
808
+ "Set the `pty` parameter to `True` instead.",
809
+ )
810
+ pty_info = _pty_info or pty_info
811
+ if pty:
812
+ pty_info = self._default_pty_info()
813
+
814
+ return await self._exec(
815
+ *args,
816
+ pty_info=pty_info,
817
+ stdout=stdout,
818
+ stderr=stderr,
819
+ timeout=timeout,
820
+ workdir=workdir,
821
+ env=env,
822
+ secrets=secrets,
823
+ text=text,
824
+ bufsize=bufsize,
825
+ )
826
+
827
+ async def _exec(
828
+ self,
829
+ *args: str,
830
+ pty_info: Optional[api_pb2.PTYInfo] = None,
831
+ stdout: StreamType = StreamType.PIPE,
832
+ stderr: StreamType = StreamType.PIPE,
833
+ timeout: Optional[int] = None,
834
+ workdir: Optional[str] = None,
835
+ env: Optional[dict[str, Optional[str]]] = None,
836
+ secrets: Optional[Collection[_Secret]] = None,
837
+ text: bool = True,
838
+ bufsize: Literal[-1, 1] = -1,
839
+ ) -> Union[_ContainerProcess[bytes], _ContainerProcess[str]]:
840
+ """Private method used internally.
616
841
 
842
+ This method exposes some internal arguments (currently `pty_info`) which are not in the public API.
843
+ """
617
844
  if workdir is not None and not workdir.startswith("/"):
618
845
  raise InvalidError(f"workdir must be an absolute path, got: {workdir}")
619
- _validate_exec_args(cmds)
846
+ _validate_exec_args(args)
847
+
848
+ secrets = secrets or []
849
+ if env:
850
+ secrets = [*secrets, _Secret.from_dict(env)]
620
851
 
621
852
  # Force secret resolution so we can pass the secret IDs to the backend.
622
853
  secret_coros = [secret.hydrate(client=self._client) for secret in secrets]
623
854
  await TaskContext.gather(*secret_coros)
624
855
 
625
856
  task_id = await self._get_task_id()
857
+ kwargs = {
858
+ "task_id": task_id,
859
+ "pty_info": pty_info,
860
+ "stdout": stdout,
861
+ "stderr": stderr,
862
+ "timeout": timeout,
863
+ "workdir": workdir,
864
+ "secret_ids": [secret.object_id for secret in secrets],
865
+ "text": text,
866
+ "bufsize": bufsize,
867
+ "runtime_debug": config.get("function_runtime_debug"),
868
+ }
869
+ # NB: This must come after the task ID is set, since the sandbox must be
870
+ # scheduled before we can create a router client.
871
+ if (command_router_client := await self._get_command_router_client(task_id)) is not None:
872
+ kwargs["command_router_client"] = command_router_client
873
+ return await self._exec_through_command_router(*args, **kwargs)
874
+ else:
875
+ return await self._exec_through_server(*args, **kwargs)
876
+
877
+ async def _exec_through_server(
878
+ self,
879
+ *args: str,
880
+ task_id: str,
881
+ pty_info: Optional[api_pb2.PTYInfo] = None,
882
+ stdout: StreamType = StreamType.PIPE,
883
+ stderr: StreamType = StreamType.PIPE,
884
+ timeout: Optional[int] = None,
885
+ workdir: Optional[str] = None,
886
+ secret_ids: Optional[Collection[str]] = None,
887
+ text: bool = True,
888
+ bufsize: Literal[-1, 1] = -1,
889
+ runtime_debug: bool = False,
890
+ ) -> Union[_ContainerProcess[bytes], _ContainerProcess[str]]:
891
+ """Execute a command through the Modal server."""
626
892
  req = api_pb2.ContainerExecRequest(
627
893
  task_id=task_id,
628
- command=cmds,
629
- pty_info=_pty_info or pty_info,
630
- runtime_debug=config.get("function_runtime_debug"),
894
+ command=args,
895
+ pty_info=pty_info,
896
+ runtime_debug=runtime_debug,
631
897
  timeout_secs=timeout or 0,
632
898
  workdir=workdir,
633
- secret_ids=[secret.object_id for secret in secrets],
899
+ secret_ids=secret_ids,
634
900
  )
635
- resp = await retry_transient_errors(self._client.stub.ContainerExec, req)
901
+ resp = await self._client.stub.ContainerExec(req)
636
902
  by_line = bufsize == 1
637
- return _ContainerProcess(resp.exec_id, self._client, stdout=stdout, stderr=stderr, text=text, by_line=by_line)
903
+ exec_deadline = time.monotonic() + int(timeout) + CONTAINER_EXEC_TIMEOUT_BUFFER if timeout else None
904
+ logger.debug(f"Created ContainerProcess for exec_id {resp.exec_id} on Sandbox {self.object_id}")
905
+ return _ContainerProcess(
906
+ resp.exec_id,
907
+ task_id,
908
+ self._client,
909
+ stdout=stdout,
910
+ stderr=stderr,
911
+ text=text,
912
+ exec_deadline=exec_deadline,
913
+ by_line=by_line,
914
+ )
915
+
916
+ async def _exec_through_command_router(
917
+ self,
918
+ *args: str,
919
+ task_id: str,
920
+ command_router_client: TaskCommandRouterClient,
921
+ pty_info: Optional[api_pb2.PTYInfo] = None,
922
+ stdout: StreamType = StreamType.PIPE,
923
+ stderr: StreamType = StreamType.PIPE,
924
+ timeout: Optional[int] = None,
925
+ workdir: Optional[str] = None,
926
+ secret_ids: Optional[Collection[str]] = None,
927
+ text: bool = True,
928
+ bufsize: Literal[-1, 1] = -1,
929
+ runtime_debug: bool = False,
930
+ ) -> Union[_ContainerProcess[bytes], _ContainerProcess[str]]:
931
+ """Execute a command through a task command router running on the Modal worker."""
932
+
933
+ # Generate a random process ID to use as a combination of idempotency key/process identifier.
934
+ process_id = str(uuid.uuid4())
935
+ if stdout == StreamType.PIPE:
936
+ stdout_config = sr_pb2.TaskExecStdoutConfig.TASK_EXEC_STDOUT_CONFIG_PIPE
937
+ elif stdout == StreamType.DEVNULL:
938
+ stdout_config = sr_pb2.TaskExecStdoutConfig.TASK_EXEC_STDOUT_CONFIG_DEVNULL
939
+ elif stdout == StreamType.STDOUT:
940
+ # TODO(saltzm): This is a behavior change from the old implementation. We should
941
+ # probably implement the old behavior of printing to stdout before moving out of beta.
942
+ raise NotImplementedError(
943
+ "Currently the STDOUT stream type is not supported when using exec "
944
+ "through a task command router, which is currently in beta."
945
+ )
946
+ else:
947
+ raise ValueError("Unsupported StreamType for stdout")
948
+
949
+ if stderr == StreamType.PIPE:
950
+ stderr_config = sr_pb2.TaskExecStderrConfig.TASK_EXEC_STDERR_CONFIG_PIPE
951
+ elif stderr == StreamType.DEVNULL:
952
+ stderr_config = sr_pb2.TaskExecStderrConfig.TASK_EXEC_STDERR_CONFIG_DEVNULL
953
+ elif stderr == StreamType.STDOUT:
954
+ stderr_config = sr_pb2.TaskExecStderrConfig.TASK_EXEC_STDERR_CONFIG_STDOUT
955
+ else:
956
+ raise ValueError("Unsupported StreamType for stderr")
957
+
958
+ # Start the process.
959
+ start_req = sr_pb2.TaskExecStartRequest(
960
+ task_id=task_id,
961
+ exec_id=process_id,
962
+ command_args=args,
963
+ stdout_config=stdout_config,
964
+ stderr_config=stderr_config,
965
+ timeout_secs=timeout,
966
+ workdir=workdir,
967
+ secret_ids=secret_ids,
968
+ pty_info=pty_info,
969
+ runtime_debug=runtime_debug,
970
+ )
971
+ _ = await command_router_client.exec_start(start_req)
972
+
973
+ return _ContainerProcess(
974
+ process_id,
975
+ task_id,
976
+ self._client,
977
+ command_router_client=command_router_client,
978
+ stdout=stdout,
979
+ stderr=stderr,
980
+ text=text,
981
+ by_line=bufsize == 1,
982
+ exec_deadline=time.monotonic() + int(timeout) if timeout else None,
983
+ )
638
984
 
639
985
  async def _experimental_snapshot(self) -> _SandboxSnapshot:
640
986
  await self._get_task_id()
641
987
  snap_req = api_pb2.SandboxSnapshotRequest(sandbox_id=self.object_id)
642
- snap_resp = await retry_transient_errors(self._client.stub.SandboxSnapshot, snap_req)
988
+ snap_resp = await self._client.stub.SandboxSnapshot(snap_req)
643
989
 
644
990
  snapshot_id = snap_resp.snapshot_id
645
991
 
646
992
  # wait for the snapshot to succeed. this is implemented as a second idempotent rpc
647
993
  # because the snapshot itself may take a while to complete.
648
994
  wait_req = api_pb2.SandboxSnapshotWaitRequest(snapshot_id=snapshot_id, timeout=55.0)
649
- wait_resp = await retry_transient_errors(self._client.stub.SandboxSnapshotWait, wait_req)
995
+ wait_resp = await self._client.stub.SandboxSnapshotWait(wait_req)
650
996
  if wait_resp.result.status != api_pb2.GenericResult.GENERIC_STATUS_SUCCESS:
651
997
  raise ExecutionError(wait_resp.result.exception)
652
998
 
653
- async def _load(self: _SandboxSnapshot, resolver: Resolver, existing_object_id: Optional[str]):
999
+ async def _load(
1000
+ self: _SandboxSnapshot, resolver: Resolver, load_context: LoadContext, existing_object_id: Optional[str]
1001
+ ):
654
1002
  # we eagerly hydrate the sandbox snapshot below
655
1003
  pass
656
1004
 
657
1005
  rep = "SandboxSnapshot()"
658
- obj = _SandboxSnapshot._from_loader(_load, rep, hydrate_lazily=True)
1006
+ # TODO: use ._new_hydrated instead
1007
+ obj = _SandboxSnapshot._from_loader(_load, rep, hydrate_lazily=True, load_context_overrides=LoadContext.empty())
659
1008
  obj._hydrate(snapshot_id, self._client, None)
660
1009
 
661
1010
  return obj
662
1011
 
663
1012
  @staticmethod
664
- async def _experimental_from_snapshot(snapshot: _SandboxSnapshot, client: Optional[_Client] = None):
1013
+ async def _experimental_from_snapshot(
1014
+ snapshot: _SandboxSnapshot,
1015
+ client: Optional[_Client] = None,
1016
+ *,
1017
+ name: Optional[str] = _DEFAULT_SANDBOX_NAME_OVERRIDE,
1018
+ ):
665
1019
  client = client or await _Client.from_env()
666
1020
 
667
- restore_req = api_pb2.SandboxRestoreRequest(snapshot_id=snapshot.object_id)
668
- restore_resp: api_pb2.SandboxRestoreResponse = await retry_transient_errors(
669
- client.stub.SandboxRestore, restore_req
670
- )
1021
+ if name is not None and name != _DEFAULT_SANDBOX_NAME_OVERRIDE:
1022
+ check_object_name(name, "Sandbox")
1023
+
1024
+ if name is _DEFAULT_SANDBOX_NAME_OVERRIDE:
1025
+ restore_req = api_pb2.SandboxRestoreRequest(
1026
+ snapshot_id=snapshot.object_id,
1027
+ sandbox_name_override_type=api_pb2.SandboxRestoreRequest.SANDBOX_NAME_OVERRIDE_TYPE_UNSPECIFIED,
1028
+ )
1029
+ elif name is None:
1030
+ restore_req = api_pb2.SandboxRestoreRequest(
1031
+ snapshot_id=snapshot.object_id,
1032
+ sandbox_name_override_type=api_pb2.SandboxRestoreRequest.SANDBOX_NAME_OVERRIDE_TYPE_NONE,
1033
+ )
1034
+ else:
1035
+ restore_req = api_pb2.SandboxRestoreRequest(
1036
+ snapshot_id=snapshot.object_id,
1037
+ sandbox_name_override=name,
1038
+ sandbox_name_override_type=api_pb2.SandboxRestoreRequest.SANDBOX_NAME_OVERRIDE_TYPE_STRING,
1039
+ )
1040
+ try:
1041
+ restore_resp: api_pb2.SandboxRestoreResponse = await client.stub.SandboxRestore(restore_req)
1042
+ except GRPCError as exc:
1043
+ if exc.status == Status.ALREADY_EXISTS:
1044
+ raise AlreadyExistsError(exc.message)
1045
+ raise exc
1046
+
671
1047
  sandbox = await _Sandbox.from_id(restore_resp.sandbox_id, client)
672
1048
 
673
1049
  task_id_req = api_pb2.SandboxGetTaskIdRequest(
674
1050
  sandbox_id=restore_resp.sandbox_id, wait_until_ready=True, timeout=55.0
675
1051
  )
676
- resp = await retry_transient_errors(client.stub.SandboxGetTaskId, task_id_req)
1052
+ resp = await client.stub.SandboxGetTaskId(task_id_req)
677
1053
  if resp.task_result.status not in [
678
1054
  api_pb2.GenericResult.GENERIC_STATUS_UNSPECIFIED,
679
1055
  api_pb2.GenericResult.GENERIC_STATUS_SUCCESS,
@@ -700,9 +1076,9 @@ class _Sandbox(_Object, type_prefix="sb"):
700
1076
  path: str,
701
1077
  mode: Union["_typeshed.OpenTextMode", "_typeshed.OpenBinaryMode"] = "r",
702
1078
  ):
703
- """Open a file in the Sandbox and return a FileIO handle.
1079
+ """[Alpha] Open a file in the Sandbox and return a FileIO handle.
704
1080
 
705
- See the [`FileIO`](/docs/reference/modal.file_io#modalfile_iofileio) docs for more information.
1081
+ See the [`FileIO`](https://modal.com/docs/reference/modal.file_io#modalfile_iofileio) docs for more information.
706
1082
 
707
1083
  **Usage**
708
1084
 
@@ -717,17 +1093,17 @@ class _Sandbox(_Object, type_prefix="sb"):
717
1093
  return await _FileIO.create(path, mode, self._client, task_id)
718
1094
 
719
1095
  async def ls(self, path: str) -> list[str]:
720
- """List the contents of a directory in the Sandbox."""
1096
+ """[Alpha] List the contents of a directory in the Sandbox."""
721
1097
  task_id = await self._get_task_id()
722
1098
  return await _FileIO.ls(path, self._client, task_id)
723
1099
 
724
1100
  async def mkdir(self, path: str, parents: bool = False) -> None:
725
- """Create a new directory in the Sandbox."""
1101
+ """[Alpha] Create a new directory in the Sandbox."""
726
1102
  task_id = await self._get_task_id()
727
1103
  return await _FileIO.mkdir(path, self._client, task_id, parents)
728
1104
 
729
1105
  async def rm(self, path: str, recursive: bool = False) -> None:
730
- """Remove a file or directory in the Sandbox."""
1106
+ """[Alpha] Remove a file or directory in the Sandbox."""
731
1107
  task_id = await self._get_task_id()
732
1108
  return await _FileIO.rm(path, self._client, task_id, recursive)
733
1109
 
@@ -738,7 +1114,7 @@ class _Sandbox(_Object, type_prefix="sb"):
738
1114
  recursive: Optional[bool] = None,
739
1115
  timeout: Optional[int] = None,
740
1116
  ) -> AsyncIterator[FileWatchEvent]:
741
- """Watch a file or directory in the Sandbox for changes."""
1117
+ """[Alpha] Watch a file or directory in the Sandbox for changes."""
742
1118
  task_id = await self._get_task_id()
743
1119
  async for event in _FileIO.watch(path, self._client, task_id, filter, recursive, timeout):
744
1120
  yield event
@@ -746,7 +1122,7 @@ class _Sandbox(_Object, type_prefix="sb"):
746
1122
  @property
747
1123
  def stdout(self) -> _StreamReader[str]:
748
1124
  """
749
- [`StreamReader`](/docs/reference/modal.io_streams#modalio_streamsstreamreader) for
1125
+ [`StreamReader`](https://modal.com/docs/reference/modal.io_streams#modalio_streamsstreamreader) for
750
1126
  the sandbox's stdout stream.
751
1127
  """
752
1128
 
@@ -754,7 +1130,7 @@ class _Sandbox(_Object, type_prefix="sb"):
754
1130
 
755
1131
  @property
756
1132
  def stderr(self) -> _StreamReader[str]:
757
- """[`StreamReader`](/docs/reference/modal.io_streams#modalio_streamsstreamreader) for
1133
+ """[`StreamReader`](https://modal.com/docs/reference/modal.io_streams#modalio_streamsstreamreader) for
758
1134
  the Sandbox's stderr stream.
759
1135
  """
760
1136
 
@@ -763,7 +1139,7 @@ class _Sandbox(_Object, type_prefix="sb"):
763
1139
  @property
764
1140
  def stdin(self) -> _StreamWriter:
765
1141
  """
766
- [`StreamWriter`](/docs/reference/modal.io_streams#modalio_streamsstreamwriter) for
1142
+ [`StreamWriter`](https://modal.com/docs/reference/modal.io_streams#modalio_streamsstreamwriter) for
767
1143
  the Sandbox's stdin stream.
768
1144
  """
769
1145
 
@@ -808,7 +1184,7 @@ class _Sandbox(_Object, type_prefix="sb"):
808
1184
 
809
1185
  # Fetches a batch of sandboxes.
810
1186
  try:
811
- resp = await retry_transient_errors(client.stub.SandboxList, req)
1187
+ resp = await client.stub.SandboxList(req)
812
1188
  except GRPCError as exc:
813
1189
  raise InvalidError(exc.message) if exc.status == Status.INVALID_ARGUMENT else exc
814
1190