modal 0.62.16__py3-none-any.whl → 0.72.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (220) hide show
  1. modal/__init__.py +17 -13
  2. modal/__main__.py +41 -3
  3. modal/_clustered_functions.py +80 -0
  4. modal/_clustered_functions.pyi +22 -0
  5. modal/_container_entrypoint.py +420 -937
  6. modal/_ipython.py +3 -13
  7. modal/_location.py +17 -10
  8. modal/_output.py +243 -99
  9. modal/_pty.py +2 -2
  10. modal/_resolver.py +55 -59
  11. modal/_resources.py +51 -0
  12. modal/_runtime/__init__.py +1 -0
  13. modal/_runtime/asgi.py +519 -0
  14. modal/_runtime/container_io_manager.py +1036 -0
  15. modal/_runtime/execution_context.py +89 -0
  16. modal/_runtime/telemetry.py +169 -0
  17. modal/_runtime/user_code_imports.py +356 -0
  18. modal/_serialization.py +134 -9
  19. modal/_traceback.py +47 -187
  20. modal/_tunnel.py +52 -16
  21. modal/_tunnel.pyi +19 -36
  22. modal/_utils/app_utils.py +3 -17
  23. modal/_utils/async_utils.py +479 -100
  24. modal/_utils/blob_utils.py +157 -186
  25. modal/_utils/bytes_io_segment_payload.py +97 -0
  26. modal/_utils/deprecation.py +89 -0
  27. modal/_utils/docker_utils.py +98 -0
  28. modal/_utils/function_utils.py +460 -171
  29. modal/_utils/grpc_testing.py +47 -31
  30. modal/_utils/grpc_utils.py +62 -109
  31. modal/_utils/hash_utils.py +61 -19
  32. modal/_utils/http_utils.py +39 -9
  33. modal/_utils/logger.py +2 -1
  34. modal/_utils/mount_utils.py +34 -16
  35. modal/_utils/name_utils.py +58 -0
  36. modal/_utils/package_utils.py +14 -1
  37. modal/_utils/pattern_utils.py +205 -0
  38. modal/_utils/rand_pb_testing.py +5 -7
  39. modal/_utils/shell_utils.py +15 -49
  40. modal/_vendor/a2wsgi_wsgi.py +62 -72
  41. modal/_vendor/cloudpickle.py +1 -1
  42. modal/_watcher.py +14 -12
  43. modal/app.py +1003 -314
  44. modal/app.pyi +540 -264
  45. modal/call_graph.py +7 -6
  46. modal/cli/_download.py +63 -53
  47. modal/cli/_traceback.py +200 -0
  48. modal/cli/app.py +205 -45
  49. modal/cli/config.py +12 -5
  50. modal/cli/container.py +62 -14
  51. modal/cli/dict.py +128 -0
  52. modal/cli/entry_point.py +26 -13
  53. modal/cli/environment.py +40 -9
  54. modal/cli/import_refs.py +64 -58
  55. modal/cli/launch.py +32 -18
  56. modal/cli/network_file_system.py +64 -83
  57. modal/cli/profile.py +1 -1
  58. modal/cli/programs/run_jupyter.py +35 -10
  59. modal/cli/programs/vscode.py +60 -10
  60. modal/cli/queues.py +131 -0
  61. modal/cli/run.py +234 -131
  62. modal/cli/secret.py +8 -7
  63. modal/cli/token.py +7 -2
  64. modal/cli/utils.py +79 -10
  65. modal/cli/volume.py +110 -109
  66. modal/client.py +250 -144
  67. modal/client.pyi +157 -118
  68. modal/cloud_bucket_mount.py +108 -34
  69. modal/cloud_bucket_mount.pyi +32 -38
  70. modal/cls.py +535 -148
  71. modal/cls.pyi +190 -146
  72. modal/config.py +41 -19
  73. modal/container_process.py +177 -0
  74. modal/container_process.pyi +82 -0
  75. modal/dict.py +111 -65
  76. modal/dict.pyi +136 -131
  77. modal/environments.py +106 -5
  78. modal/environments.pyi +77 -25
  79. modal/exception.py +34 -43
  80. modal/experimental.py +61 -2
  81. modal/extensions/ipython.py +5 -5
  82. modal/file_io.py +537 -0
  83. modal/file_io.pyi +235 -0
  84. modal/file_pattern_matcher.py +197 -0
  85. modal/functions.py +906 -911
  86. modal/functions.pyi +466 -430
  87. modal/gpu.py +57 -44
  88. modal/image.py +1089 -479
  89. modal/image.pyi +584 -228
  90. modal/io_streams.py +434 -0
  91. modal/io_streams.pyi +122 -0
  92. modal/mount.py +314 -101
  93. modal/mount.pyi +241 -235
  94. modal/network_file_system.py +92 -92
  95. modal/network_file_system.pyi +152 -110
  96. modal/object.py +67 -36
  97. modal/object.pyi +166 -143
  98. modal/output.py +63 -0
  99. modal/parallel_map.py +434 -0
  100. modal/parallel_map.pyi +75 -0
  101. modal/partial_function.py +282 -117
  102. modal/partial_function.pyi +222 -129
  103. modal/proxy.py +15 -12
  104. modal/proxy.pyi +3 -8
  105. modal/queue.py +182 -65
  106. modal/queue.pyi +218 -118
  107. modal/requirements/2024.04.txt +29 -0
  108. modal/requirements/2024.10.txt +16 -0
  109. modal/requirements/README.md +21 -0
  110. modal/requirements/base-images.json +22 -0
  111. modal/retries.py +48 -7
  112. modal/runner.py +459 -156
  113. modal/runner.pyi +135 -71
  114. modal/running_app.py +38 -0
  115. modal/sandbox.py +514 -236
  116. modal/sandbox.pyi +397 -169
  117. modal/schedule.py +4 -4
  118. modal/scheduler_placement.py +20 -3
  119. modal/secret.py +56 -31
  120. modal/secret.pyi +62 -42
  121. modal/serving.py +51 -56
  122. modal/serving.pyi +44 -36
  123. modal/stream_type.py +15 -0
  124. modal/token_flow.py +5 -3
  125. modal/token_flow.pyi +37 -32
  126. modal/volume.py +285 -157
  127. modal/volume.pyi +249 -184
  128. {modal-0.62.16.dist-info → modal-0.72.11.dist-info}/METADATA +7 -7
  129. modal-0.72.11.dist-info/RECORD +174 -0
  130. {modal-0.62.16.dist-info → modal-0.72.11.dist-info}/top_level.txt +0 -1
  131. modal_docs/gen_reference_docs.py +3 -1
  132. modal_docs/mdmd/mdmd.py +0 -1
  133. modal_docs/mdmd/signatures.py +5 -2
  134. modal_global_objects/images/base_images.py +28 -0
  135. modal_global_objects/mounts/python_standalone.py +2 -2
  136. modal_proto/__init__.py +1 -1
  137. modal_proto/api.proto +1288 -533
  138. modal_proto/api_grpc.py +856 -456
  139. modal_proto/api_pb2.py +2165 -1157
  140. modal_proto/api_pb2.pyi +8859 -0
  141. modal_proto/api_pb2_grpc.py +1674 -855
  142. modal_proto/api_pb2_grpc.pyi +1416 -0
  143. modal_proto/modal_api_grpc.py +149 -0
  144. modal_proto/modal_options_grpc.py +3 -0
  145. modal_proto/options_pb2.pyi +20 -0
  146. modal_proto/options_pb2_grpc.pyi +7 -0
  147. modal_proto/py.typed +0 -0
  148. modal_version/__init__.py +1 -1
  149. modal_version/_version_generated.py +2 -2
  150. modal/_asgi.py +0 -370
  151. modal/_container_entrypoint.pyi +0 -378
  152. modal/_container_exec.py +0 -128
  153. modal/_sandbox_shell.py +0 -49
  154. modal/shared_volume.py +0 -23
  155. modal/shared_volume.pyi +0 -24
  156. modal/stub.py +0 -783
  157. modal/stub.pyi +0 -332
  158. modal-0.62.16.dist-info/RECORD +0 -198
  159. modal_global_objects/images/conda.py +0 -15
  160. modal_global_objects/images/debian_slim.py +0 -15
  161. modal_global_objects/images/micromamba.py +0 -15
  162. test/__init__.py +0 -1
  163. test/aio_test.py +0 -12
  164. test/async_utils_test.py +0 -262
  165. test/blob_test.py +0 -67
  166. test/cli_imports_test.py +0 -149
  167. test/cli_test.py +0 -659
  168. test/client_test.py +0 -194
  169. test/cls_test.py +0 -630
  170. test/config_test.py +0 -137
  171. test/conftest.py +0 -1420
  172. test/container_app_test.py +0 -32
  173. test/container_test.py +0 -1389
  174. test/cpu_test.py +0 -23
  175. test/decorator_test.py +0 -85
  176. test/deprecation_test.py +0 -34
  177. test/dict_test.py +0 -33
  178. test/e2e_test.py +0 -68
  179. test/error_test.py +0 -7
  180. test/function_serialization_test.py +0 -32
  181. test/function_test.py +0 -653
  182. test/function_utils_test.py +0 -101
  183. test/gpu_test.py +0 -159
  184. test/grpc_utils_test.py +0 -141
  185. test/helpers.py +0 -42
  186. test/image_test.py +0 -669
  187. test/live_reload_test.py +0 -80
  188. test/lookup_test.py +0 -70
  189. test/mdmd_test.py +0 -329
  190. test/mount_test.py +0 -162
  191. test/mounted_files_test.py +0 -329
  192. test/network_file_system_test.py +0 -181
  193. test/notebook_test.py +0 -66
  194. test/object_test.py +0 -41
  195. test/package_utils_test.py +0 -25
  196. test/queue_test.py +0 -97
  197. test/resolver_test.py +0 -58
  198. test/retries_test.py +0 -67
  199. test/runner_test.py +0 -85
  200. test/sandbox_test.py +0 -191
  201. test/schedule_test.py +0 -15
  202. test/scheduler_placement_test.py +0 -29
  203. test/secret_test.py +0 -78
  204. test/serialization_test.py +0 -42
  205. test/stub_composition_test.py +0 -10
  206. test/stub_test.py +0 -360
  207. test/test_asgi_wrapper.py +0 -234
  208. test/token_flow_test.py +0 -18
  209. test/traceback_test.py +0 -135
  210. test/tunnel_test.py +0 -29
  211. test/utils_test.py +0 -88
  212. test/version_test.py +0 -14
  213. test/volume_test.py +0 -341
  214. test/watcher_test.py +0 -30
  215. test/webhook_test.py +0 -146
  216. /modal/{requirements.312.txt → requirements/2023.12.312.txt} +0 -0
  217. /modal/{requirements.txt → requirements/2023.12.txt} +0 -0
  218. {modal-0.62.16.dist-info → modal-0.72.11.dist-info}/LICENSE +0 -0
  219. {modal-0.62.16.dist-info → modal-0.72.11.dist-info}/WHEEL +0 -0
  220. {modal-0.62.16.dist-info → modal-0.72.11.dist-info}/entry_points.txt +0 -0
modal/sandbox.py CHANGED
@@ -1,221 +1,48 @@
1
1
  # Copyright Modal Labs 2022
2
2
  import asyncio
3
3
  import os
4
- from typing import AsyncIterator, Dict, List, Optional, Sequence, Union
4
+ from collections.abc import AsyncGenerator, Sequence
5
+ from typing import TYPE_CHECKING, AsyncIterator, Literal, Optional, Union, overload
6
+
7
+ if TYPE_CHECKING:
8
+ import _typeshed
5
9
 
6
10
  from google.protobuf.message import Message
7
- from grpclib.exceptions import GRPCError, StreamTerminatedError
11
+ from grpclib import GRPCError, Status
8
12
 
13
+ from modal._tunnel import Tunnel
9
14
  from modal.cloud_bucket_mount import _CloudBucketMount, cloud_bucket_mounts_to_proto
10
- from modal.exception import InvalidError, SandboxTerminatedError, SandboxTimeoutError
11
15
  from modal.volume import _Volume
12
16
  from modal_proto import api_pb2
13
17
 
14
18
  from ._location import parse_cloud_provider
15
19
  from ._resolver import Resolver
20
+ from ._resources import convert_fn_config_to_resources_config
16
21
  from ._utils.async_utils import synchronize_api
17
- from ._utils.grpc_utils import RETRYABLE_GRPC_STATUS_CODES, retry_transient_errors, unary_stream
18
- from ._utils.mount_utils import validate_mount_points, validate_volumes
22
+ from ._utils.deprecation import deprecation_error
23
+ from ._utils.grpc_utils import retry_transient_errors
24
+ from ._utils.mount_utils import validate_network_file_systems, validate_volumes
19
25
  from .client import _Client
20
26
  from .config import config
21
- from .gpu import GPU_T, parse_gpu_config
27
+ from .container_process import _ContainerProcess
28
+ from .exception import ExecutionError, InvalidError, SandboxTerminatedError, SandboxTimeoutError
29
+ from .file_io import FileWatchEvent, FileWatchEventType, _FileIO
30
+ from .gpu import GPU_T
22
31
  from .image import _Image
32
+ from .io_streams import StreamReader, StreamWriter, _StreamReader, _StreamWriter
23
33
  from .mount import _Mount
24
34
  from .network_file_system import _NetworkFileSystem, network_file_system_mount_protos
25
- from .object import _Object
35
+ from .object import _get_environment_name, _Object
36
+ from .proxy import _Proxy
37
+ from .scheduler_placement import SchedulerPlacement
26
38
  from .secret import _Secret
39
+ from .stream_type import StreamType
27
40
 
41
+ _default_image: _Image = _Image.debian_slim()
28
42
 
29
- class _LogsReader:
30
- """Provides an interface to buffer and fetch logs from a sandbox stream (`stdout` or `stderr`).
31
-
32
- As an asynchronous iterable, the object supports the async for statement.
33
-
34
- **Usage**
35
-
36
- ```python
37
- @stub.function()
38
- async def my_fn():
39
- sandbox = stub.spawn_sandbox(
40
- "bash",
41
- "-c",
42
- "while true; do echo foo; sleep 1; done"
43
- )
44
- async for message in sandbox.stdout:
45
- print(f"Message: {message}")
46
- ```
47
- """
48
-
49
- def __init__(self, file_descriptor: int, sandbox_id: str, client: _Client) -> None:
50
- """mdmd:hidden"""
51
-
52
- self._file_descriptor = file_descriptor
53
- self._sandbox_id = sandbox_id
54
- self._client = client
55
- self._stream = None
56
- self._last_log_batch_entry_id = ""
57
- # Whether the reader received an EOF. Once EOF is True, it returns
58
- # an empty string for any subsequent reads (including async for)
59
- self.eof = False
60
-
61
- async def read(self) -> str:
62
- """Fetch and return contents of the entire stream. If EOF was received,
63
- return an empty string.
64
-
65
- **Usage**
66
-
67
- ```python
68
- sandbox = stub.app.spawn_sandbox("echo", "hello")
69
- sandbox.wait()
70
43
 
71
- print(sandbox.stdout.read())
72
- ```
73
-
74
- """
75
- data = ""
76
- # TODO: maybe combine this with get_app_logs_loop
77
- async for message in self._get_logs():
78
- if message is None:
79
- break
80
- data += message.data
81
-
82
- return data
83
-
84
- async def _get_logs(self) -> AsyncIterator[Optional[api_pb2.TaskLogs]]:
85
- """mdmd:hidden
86
- Streams sandbox logs from the server to the reader.
87
-
88
- When the stream receives an EOF, it yields None. Once an EOF is received,
89
- subsequent invocations will not yield logs.
90
- """
91
- if self.eof:
92
- yield None
93
- return
94
-
95
- completed = False
96
-
97
- retries_remaining = 10
98
- while not completed:
99
- req = api_pb2.SandboxGetLogsRequest(
100
- sandbox_id=self._sandbox_id,
101
- file_descriptor=self._file_descriptor,
102
- timeout=55,
103
- last_entry_id=self._last_log_batch_entry_id,
104
- )
105
- try:
106
- async for log_batch in unary_stream(self._client.stub.SandboxGetLogs, req):
107
- self._last_log_batch_entry_id = log_batch.entry_id
108
-
109
- for message in log_batch.items:
110
- yield message
111
- if log_batch.eof:
112
- self.eof = True
113
- completed = True
114
- yield None
115
- break
116
- except (GRPCError, StreamTerminatedError) as exc:
117
- if retries_remaining > 0:
118
- retries_remaining -= 1
119
- if isinstance(exc, GRPCError):
120
- if exc.status in RETRYABLE_GRPC_STATUS_CODES:
121
- await asyncio.sleep(1.0)
122
- continue
123
- elif isinstance(exc, StreamTerminatedError):
124
- continue
125
- raise
126
-
127
- def __aiter__(self):
128
- """mdmd:hidden"""
129
- self._stream = self._get_logs()
130
- return self
131
-
132
- async def __anext__(self):
133
- """mdmd:hidden"""
134
- value = await self._stream.__anext__()
135
-
136
- # The stream yields None if it receives an EOF batch.
137
- if value is None:
138
- raise StopAsyncIteration
139
-
140
- return value.data
141
-
142
-
143
- MAX_BUFFER_SIZE = 128 * 1024
144
-
145
-
146
- class _StreamWriter:
147
- """Provides an interface to buffer and write logs to a sandbox stream (`stdin`)."""
148
-
149
- def __init__(self, sandbox_id: str, client: _Client):
150
- self._index = 1
151
- self._sandbox_id = sandbox_id
152
- self._client = client
153
- self._is_closed = False
154
- self._buffer = bytearray()
155
-
156
- def get_next_index(self):
157
- """mdmd:hidden"""
158
- index = self._index
159
- self._index += 1
160
- return index
161
-
162
- def write(self, data: Union[bytes, bytearray, memoryview]):
163
- """
164
- Writes data to stream's internal buffer, but does not drain/flush the write.
165
-
166
- This method needs to be used along with the `drain()` method which flushes the buffer.
167
-
168
- **Usage**
169
-
170
- ```python
171
- @stub.local_entrypoint()
172
- def main():
173
- sandbox = stub.spawn_sandbox(
174
- "bash",
175
- "-c",
176
- "while read line; do echo $line; done",
177
- )
178
- sandbox.stdin.write(b"foo\\n")
179
- sandbox.stdin.write(b"bar\\n")
180
- sandbox.stdin.write_eof()
181
-
182
- sandbox.stdin.drain()
183
- sandbox.wait()
184
- ```
185
- """
186
- if self._is_closed:
187
- raise EOFError("Stdin is closed. Cannot write to it.")
188
- if isinstance(data, (bytes, bytearray, memoryview)):
189
- if len(self._buffer) + len(data) > MAX_BUFFER_SIZE:
190
- raise BufferError("Buffer size exceed limit. Call drain to clear the buffer.")
191
- self._buffer.extend(data)
192
- else:
193
- raise TypeError(f"data argument must be a bytes-like object, not {type(data).__name__}")
194
-
195
- def write_eof(self):
196
- """
197
- Closes the write end of the stream after the buffered write data is drained.
198
- If the sandbox process was blocked on input, it will become unblocked after `write_eof()`.
199
-
200
- This method needs to be used along with the `drain()` method which flushes the EOF to the process.
201
- """
202
- self._is_closed = True
203
-
204
- async def drain(self):
205
- """
206
- Flushes the write buffer and EOF to the running Sandbox process.
207
- """
208
- data = bytes(self._buffer)
209
- self._buffer.clear()
210
- index = self.get_next_index()
211
- await retry_transient_errors(
212
- self._client.stub.SandboxStdinWrite,
213
- api_pb2.SandboxStdinWriteRequest(sandbox_id=self._sandbox_id, index=index, eof=self._is_closed, input=data),
214
- )
215
-
216
-
217
- LogsReader = synchronize_api(_LogsReader)
218
- StreamWriter = synchronize_api(_StreamWriter)
44
+ if TYPE_CHECKING:
45
+ import modal.app
219
46
 
220
47
 
221
48
  class _Sandbox(_Object, type_prefix="sb"):
@@ -226,9 +53,11 @@ class _Sandbox(_Object, type_prefix="sb"):
226
53
  """
227
54
 
228
55
  _result: Optional[api_pb2.GenericResult]
229
- _stdout: _LogsReader
230
- _stderr: _LogsReader
56
+ _stdout: _StreamReader[str]
57
+ _stderr: _StreamReader[str]
231
58
  _stdin: _StreamWriter
59
+ _task_id: Optional[str] = None
60
+ _tunnels: Optional[dict[int, Tunnel]] = None
232
61
 
233
62
  @staticmethod
234
63
  def _new(
@@ -240,30 +69,48 @@ class _Sandbox(_Object, type_prefix="sb"):
240
69
  workdir: Optional[str] = None,
241
70
  gpu: GPU_T = None,
242
71
  cloud: Optional[str] = None,
72
+ region: Optional[Union[str, Sequence[str]]] = None,
243
73
  cpu: Optional[float] = None,
244
- memory: Optional[int] = None,
245
- network_file_systems: Dict[Union[str, os.PathLike], _NetworkFileSystem] = {},
74
+ memory: Optional[Union[int, tuple[int, int]]] = None,
75
+ network_file_systems: dict[Union[str, os.PathLike], _NetworkFileSystem] = {},
246
76
  block_network: bool = False,
247
- volumes: Dict[Union[str, os.PathLike], Union[_Volume, _CloudBucketMount]] = {},
248
- allow_background_volume_commits: bool = False,
77
+ cidr_allowlist: Optional[Sequence[str]] = None,
78
+ volumes: dict[Union[str, os.PathLike], Union[_Volume, _CloudBucketMount]] = {},
249
79
  pty_info: Optional[api_pb2.PTYInfo] = None,
80
+ encrypted_ports: Sequence[int] = [],
81
+ unencrypted_ports: Sequence[int] = [],
82
+ proxy: Optional[_Proxy] = None,
83
+ _experimental_scheduler_placement: Optional[SchedulerPlacement] = None,
250
84
  ) -> "_Sandbox":
251
85
  """mdmd:hidden"""
252
86
 
253
87
  if len(entrypoint_args) == 0:
254
88
  raise InvalidError("entrypoint_args must not be empty")
255
89
 
256
- if not isinstance(network_file_systems, dict):
257
- raise InvalidError("network_file_systems must be a dict[str, NetworkFileSystem] where the keys are paths")
258
- validated_network_file_systems = validate_mount_points("Network file system", network_file_systems)
90
+ validated_network_file_systems = validate_network_file_systems(network_file_systems)
91
+
92
+ scheduler_placement: Optional[SchedulerPlacement] = _experimental_scheduler_placement
93
+ if region:
94
+ if scheduler_placement:
95
+ raise InvalidError("`region` and `_experimental_scheduler_placement` cannot be used together")
96
+ scheduler_placement = SchedulerPlacement(region=region)
97
+
98
+ if isinstance(gpu, list):
99
+ raise InvalidError(
100
+ "Sandboxes do not support configuring a list of GPUs. "
101
+ "Specify a single GPU configuration, e.g. gpu='a10g'"
102
+ )
103
+
104
+ if workdir is not None and not workdir.startswith("/"):
105
+ raise InvalidError(f"workdir must be an absolute path, got: {workdir}")
259
106
 
260
107
  # Validate volumes
261
108
  validated_volumes = validate_volumes(volumes)
262
109
  cloud_bucket_mounts = [(k, v) for k, v in validated_volumes if isinstance(v, _CloudBucketMount)]
263
110
  validated_volumes = [(k, v) for k, v in validated_volumes if isinstance(v, _Volume)]
264
111
 
265
- def _deps() -> List[_Object]:
266
- deps: List[_Object] = [image] + list(mounts) + list(secrets)
112
+ def _deps() -> list[_Object]:
113
+ deps: list[_Object] = [image] + list(mounts) + list(secrets)
267
114
  for _, vol in validated_network_file_systems:
268
115
  deps.append(vol)
269
116
  for _, vol in validated_volumes:
@@ -271,45 +118,70 @@ class _Sandbox(_Object, type_prefix="sb"):
271
118
  for _, cloud_bucket_mount in cloud_bucket_mounts:
272
119
  if cloud_bucket_mount.secret:
273
120
  deps.append(cloud_bucket_mount.secret)
121
+ if proxy:
122
+ deps.append(proxy)
274
123
  return deps
275
124
 
276
125
  async def _load(self: _Sandbox, resolver: Resolver, _existing_object_id: Optional[str]):
277
- gpu_config = parse_gpu_config(gpu)
278
-
279
- cloud_provider = parse_cloud_provider(cloud) if cloud else None
280
-
281
- if cpu is not None and cpu < 0.25:
282
- raise InvalidError(f"Invalid fractional CPU value {cpu}. Cannot have less than 0.25 CPU resources.")
283
- milli_cpu = int(1000 * cpu) if cpu is not None else None
284
-
285
126
  # Relies on dicts being ordered (true as of Python 3.6).
286
127
  volume_mounts = [
287
128
  api_pb2.VolumeMount(
288
129
  mount_path=path,
289
130
  volume_id=volume.object_id,
290
- allow_background_commits=allow_background_volume_commits,
131
+ allow_background_commits=True,
291
132
  )
292
133
  for path, volume in validated_volumes
293
134
  ]
294
135
 
136
+ open_ports = [api_pb2.PortSpec(port=port, unencrypted=False) for port in encrypted_ports]
137
+ open_ports.extend([api_pb2.PortSpec(port=port, unencrypted=True) for port in unencrypted_ports])
138
+
139
+ if block_network:
140
+ # If the network is blocked, cidr_allowlist is invalid as we don't allow any network access.
141
+ if cidr_allowlist is not None:
142
+ raise InvalidError("`cidr_allowlist` cannot be used when `block_network` is enabled")
143
+ network_access = api_pb2.NetworkAccess(
144
+ network_access_type=api_pb2.NetworkAccess.NetworkAccessType.BLOCKED,
145
+ )
146
+ elif cidr_allowlist is None:
147
+ # If the allowlist is empty, we allow all network access.
148
+ network_access = api_pb2.NetworkAccess(
149
+ network_access_type=api_pb2.NetworkAccess.NetworkAccessType.OPEN,
150
+ )
151
+ else:
152
+ network_access = api_pb2.NetworkAccess(
153
+ network_access_type=api_pb2.NetworkAccess.NetworkAccessType.ALLOWLIST,
154
+ allowed_cidrs=cidr_allowlist,
155
+ )
156
+
157
+ ephemeral_disk = None # Ephemeral disk requests not supported on Sandboxes.
295
158
  definition = api_pb2.Sandbox(
296
159
  entrypoint_args=entrypoint_args,
297
160
  image_id=image.object_id,
298
- mount_ids=[mount.object_id for mount in mounts],
161
+ mount_ids=[mount.object_id for mount in mounts] + [mount.object_id for mount in image._mount_layers],
299
162
  secret_ids=[secret.object_id for secret in secrets],
300
163
  timeout_secs=timeout,
301
164
  workdir=workdir,
302
- resources=api_pb2.Resources(gpu_config=gpu_config, milli_cpu=milli_cpu, memory_mb=memory),
303
- cloud_provider=cloud_provider,
165
+ resources=convert_fn_config_to_resources_config(
166
+ cpu=cpu, memory=memory, gpu=gpu, ephemeral_disk=ephemeral_disk
167
+ ),
168
+ cloud_provider=parse_cloud_provider(cloud) if cloud else None,
304
169
  nfs_mounts=network_file_system_mount_protos(validated_network_file_systems, False),
305
170
  runtime_debug=config.get("function_runtime_debug"),
306
- block_network=block_network,
307
171
  cloud_bucket_mounts=cloud_bucket_mounts_to_proto(cloud_bucket_mounts),
308
172
  volume_mounts=volume_mounts,
309
173
  pty_info=pty_info,
174
+ scheduler_placement=scheduler_placement.proto if scheduler_placement else None,
175
+ worker_id=config.get("worker_id"),
176
+ open_ports=api_pb2.PortSpecs(ports=open_ports),
177
+ network_access=network_access,
178
+ proxy_id=(proxy.object_id if proxy else None),
310
179
  )
311
180
 
312
- create_req = api_pb2.SandboxCreateRequest(app_id=resolver.app_id, definition=definition)
181
+ # Note - `resolver.app_id` will be `None` for app-less sandboxes
182
+ create_req = api_pb2.SandboxCreateRequest(
183
+ app_id=resolver.app_id, definition=definition, environment_name=resolver.environment_name
184
+ )
313
185
  create_resp = await retry_transient_errors(resolver.client.stub.SandboxCreate, create_req)
314
186
 
315
187
  sandbox_id = create_resp.sandbox_id
@@ -317,15 +189,127 @@ class _Sandbox(_Object, type_prefix="sb"):
317
189
 
318
190
  return _Sandbox._from_loader(_load, "Sandbox()", deps=_deps)
319
191
 
192
+ @staticmethod
193
+ async def create(
194
+ *entrypoint_args: str,
195
+ app: Optional["modal.app._App"] = None, # Optionally associate the sandbox with an app
196
+ environment_name: Optional[str] = None, # Optionally override the default environment
197
+ image: Optional[_Image] = None, # The image to run as the container for the sandbox.
198
+ mounts: Sequence[_Mount] = (), # Mounts to attach to the sandbox.
199
+ secrets: Sequence[_Secret] = (), # Environment variables to inject into the sandbox.
200
+ network_file_systems: dict[Union[str, os.PathLike], _NetworkFileSystem] = {},
201
+ timeout: Optional[int] = None, # Maximum execution time of the sandbox in seconds.
202
+ workdir: Optional[str] = None, # Working directory of the sandbox.
203
+ gpu: GPU_T = None,
204
+ cloud: Optional[str] = None,
205
+ region: Optional[Union[str, Sequence[str]]] = None, # Region or regions to run the sandbox on.
206
+ # Specify, in fractional CPU cores, how many CPU cores to request.
207
+ # Or, pass (request, limit) to additionally specify a hard limit in fractional CPU cores.
208
+ # CPU throttling will prevent a container from exceeding its specified limit.
209
+ cpu: Optional[Union[float, tuple[float, float]]] = None,
210
+ # Specify, in MiB, a memory request which is the minimum memory required.
211
+ # Or, pass (request, limit) to additionally specify a hard limit in MiB.
212
+ memory: Optional[Union[int, tuple[int, int]]] = None,
213
+ block_network: bool = False, # Whether to block network access
214
+ # List of CIDRs the sandbox is allowed to access. If None, all CIDRs are allowed.
215
+ cidr_allowlist: Optional[Sequence[str]] = None,
216
+ volumes: dict[
217
+ Union[str, os.PathLike], Union[_Volume, _CloudBucketMount]
218
+ ] = {}, # Mount points for Modal Volumes and CloudBucketMounts
219
+ pty_info: Optional[api_pb2.PTYInfo] = None,
220
+ # List of ports to tunnel into the sandbox. Encrypted ports are tunneled with TLS.
221
+ encrypted_ports: Sequence[int] = [],
222
+ # List of ports to tunnel into the sandbox without encryption.
223
+ unencrypted_ports: Sequence[int] = [],
224
+ # Reference to a Modal Proxy to use in front of this Sandbox.
225
+ proxy: Optional[_Proxy] = None,
226
+ _experimental_scheduler_placement: Optional[
227
+ SchedulerPlacement
228
+ ] = None, # Experimental controls over fine-grained scheduling (alpha).
229
+ client: Optional[_Client] = None,
230
+ ) -> "_Sandbox":
231
+ from .app import _App
232
+
233
+ environment_name = _get_environment_name(environment_name)
234
+
235
+ # If there are no entrypoint args, we'll sleep forever so that the sandbox will stay
236
+ # alive long enough for the user to interact with it.
237
+ if len(entrypoint_args) == 0:
238
+ max_sleep_time = 60 * 60 * 24 * 2 # 2 days is plenty since workers roll every 24h
239
+ entrypoint_args = ("sleep", str(max_sleep_time))
240
+
241
+ # TODO(erikbern): Get rid of the `_new` method and create an already-hydrated object
242
+ obj = _Sandbox._new(
243
+ entrypoint_args,
244
+ image=image or _default_image,
245
+ mounts=mounts,
246
+ secrets=secrets,
247
+ timeout=timeout,
248
+ workdir=workdir,
249
+ gpu=gpu,
250
+ cloud=cloud,
251
+ region=region,
252
+ cpu=cpu,
253
+ memory=memory,
254
+ network_file_systems=network_file_systems,
255
+ block_network=block_network,
256
+ cidr_allowlist=cidr_allowlist,
257
+ volumes=volumes,
258
+ pty_info=pty_info,
259
+ encrypted_ports=encrypted_ports,
260
+ unencrypted_ports=unencrypted_ports,
261
+ proxy=proxy,
262
+ _experimental_scheduler_placement=_experimental_scheduler_placement,
263
+ )
264
+
265
+ app_id: Optional[str] = None
266
+ app_client: Optional[_Client] = None
267
+
268
+ if app is not None:
269
+ if app.app_id is None:
270
+ raise ValueError(
271
+ "App has not been initialized yet. To create an App lazily, use `App.lookup`: \n"
272
+ "app = modal.App.lookup('my-app', create_if_missing=True)\n"
273
+ "modal.Sandbox.create('echo', 'hi', app=app)\n"
274
+ "In order to initialize an existing `App` object, refer to our docs: https://modal.com/docs/guide/apps"
275
+ )
276
+
277
+ app_id = app.app_id
278
+ app_client = app._client
279
+ elif (container_app := _App._get_container_app()) is not None:
280
+ app_id = container_app.app_id
281
+ app_client = container_app._client
282
+ else:
283
+ arglist = ", ".join(repr(s) for s in entrypoint_args)
284
+ deprecation_error(
285
+ (2024, 9, 14),
286
+ "Creating a `Sandbox` without an `App` is deprecated.\n\n"
287
+ "You may pass in an `App` object, or reference one by name with `App.lookup`:\n\n"
288
+ "```\n"
289
+ "app = modal.App.lookup('sandbox-app', create_if_missing=True)\n"
290
+ f"sb = modal.Sandbox.create({arglist}, app=app)\n"
291
+ "```",
292
+ )
293
+
294
+ client = client or app_client or await _Client.from_env()
295
+
296
+ resolver = Resolver(client, environment_name=environment_name, app_id=app_id)
297
+ await resolver.load(obj)
298
+ return obj
299
+
320
300
  def _hydrate_metadata(self, handle_metadata: Optional[Message]):
321
- self._stdout = LogsReader(api_pb2.FILE_DESCRIPTOR_STDOUT, self.object_id, self._client)
322
- self._stderr = LogsReader(api_pb2.FILE_DESCRIPTOR_STDERR, self.object_id, self._client)
323
- self._stdin = StreamWriter(self.object_id, self._client)
301
+ self._stdout: _StreamReader[str] = StreamReader[str](
302
+ api_pb2.FILE_DESCRIPTOR_STDOUT, self.object_id, "sandbox", self._client, by_line=True
303
+ )
304
+ self._stderr: _StreamReader[str] = StreamReader[str](
305
+ api_pb2.FILE_DESCRIPTOR_STDERR, self.object_id, "sandbox", self._client, by_line=True
306
+ )
307
+ self._stdin = StreamWriter(self.object_id, "sandbox", self._client)
324
308
  self._result = None
325
309
 
326
310
  @staticmethod
327
311
  async def from_id(sandbox_id: str, client: Optional[_Client] = None) -> "_Sandbox":
328
- """Construct a Sandbox from an id and look up the sandbox result.
312
+ """Construct a Sandbox from an id and look up the Sandbox result.
329
313
 
330
314
  The ID of a Sandbox object can be accessed using `.object_id`.
331
315
  """
@@ -336,14 +320,57 @@ class _Sandbox(_Object, type_prefix="sb"):
336
320
  resp = await retry_transient_errors(client.stub.SandboxWait, req)
337
321
 
338
322
  obj = _Sandbox._new_hydrated(sandbox_id, client, None)
339
- obj._result = resp.result
323
+
324
+ if resp.result.status:
325
+ obj._result = resp.result
340
326
 
341
327
  return obj
342
328
 
329
+ async def set_tags(self, tags: dict[str, str], *, client: Optional[_Client] = None):
330
+ """Set tags (key-value pairs) on the Sandbox. Tags can be used to filter results in `Sandbox.list`."""
331
+ environment_name = _get_environment_name()
332
+ if client is None:
333
+ client = await _Client.from_env()
334
+
335
+ tags_list = [api_pb2.SandboxTag(tag_name=name, tag_value=value) for name, value in tags.items()]
336
+
337
+ req = api_pb2.SandboxTagsSetRequest(
338
+ environment_name=environment_name,
339
+ sandbox_id=self.object_id,
340
+ tags=tags_list,
341
+ )
342
+ try:
343
+ await retry_transient_errors(client.stub.SandboxTagsSet, req)
344
+ except GRPCError as exc:
345
+ raise InvalidError(exc.message) if exc.status == Status.INVALID_ARGUMENT else exc
346
+
347
+ async def snapshot_filesystem(self, timeout: int = 55) -> _Image:
348
+ """Snapshot the filesystem of the Sandbox.
349
+
350
+ Returns an [`Image`](https://modal.com/docs/reference/modal.Image) object which
351
+ can be used to spawn a new Sandbox with the same filesystem.
352
+ """
353
+ req = api_pb2.SandboxSnapshotFsRequest(sandbox_id=self.object_id, timeout=timeout)
354
+ resp = await retry_transient_errors(self._client.stub.SandboxSnapshotFs, req)
355
+
356
+ if resp.result.status != api_pb2.GenericResult.GENERIC_STATUS_SUCCESS:
357
+ raise ExecutionError(resp.result.exception)
358
+
359
+ image_id = resp.image_id
360
+ metadata = resp.image_metadata
361
+
362
+ async def _load(self: _Image, resolver: Resolver, existing_object_id: Optional[str]):
363
+ self._hydrate(image_id, resolver.client, metadata)
364
+
365
+ rep = "Image()"
366
+ image = _Image._from_loader(_load, rep)
367
+
368
+ return image
369
+
343
370
  # Live handle methods
344
371
 
345
372
  async def wait(self, raise_on_termination: bool = True):
346
- """Wait for the sandbox to finish running."""
373
+ """Wait for the Sandbox to finish running."""
347
374
 
348
375
  while True:
349
376
  req = api_pb2.SandboxWaitRequest(sandbox_id=self.object_id, timeout=50)
@@ -357,10 +384,37 @@ class _Sandbox(_Object, type_prefix="sb"):
357
384
  raise SandboxTerminatedError()
358
385
  break
359
386
 
387
+ async def tunnels(self, timeout: int = 50) -> dict[int, Tunnel]:
388
+ """Get tunnel metadata for the sandbox.
389
+
390
+ Raises `SandboxTimeoutError` if the tunnels are not available after the timeout.
391
+
392
+ Returns a dictionary of `Tunnel` objects which are keyed by the container port.
393
+
394
+ NOTE: Previous to client v0.64.152, this returned a list of `TunnelData` objects.
395
+ """
396
+
397
+ if self._tunnels:
398
+ return self._tunnels
399
+
400
+ req = api_pb2.SandboxGetTunnelsRequest(sandbox_id=self.object_id, timeout=timeout)
401
+ resp = await retry_transient_errors(self._client.stub.SandboxGetTunnels, req)
402
+
403
+ # If we couldn't get the tunnels in time, report the timeout.
404
+ if resp.result.status == api_pb2.GenericResult.GENERIC_STATUS_TIMEOUT:
405
+ raise SandboxTimeoutError()
406
+
407
+ # Otherwise, we got the tunnels and can report the result.
408
+ self._tunnels = {
409
+ t.container_port: Tunnel(t.host, t.port, t.unencrypted_host, t.unencrypted_port) for t in resp.tunnels
410
+ }
411
+
412
+ return self._tunnels
413
+
360
414
  async def terminate(self):
361
- """Terminate sandbox execution.
415
+ """Terminate Sandbox execution.
362
416
 
363
- This is a no-op if the sandbox has already finished running."""
417
+ This is a no-op if the Sandbox has already finished running."""
364
418
 
365
419
  await retry_transient_errors(
366
420
  self._client.stub.SandboxTerminate, api_pb2.SandboxTerminateRequest(sandbox_id=self.object_id)
@@ -368,9 +422,9 @@ class _Sandbox(_Object, type_prefix="sb"):
368
422
  await self.wait(raise_on_termination=False)
369
423
 
370
424
  async def poll(self) -> Optional[int]:
371
- """Check if the sandbox has finished running.
425
+ """Check if the Sandbox has finished running.
372
426
 
373
- Returns `None` if the sandbox is still running, else returns the exit code.
427
+ Returns `None` if the Sandbox is still running, else returns the exit code.
374
428
  """
375
429
 
376
430
  req = api_pb2.SandboxWaitRequest(sandbox_id=self.object_id, timeout=0)
@@ -381,21 +435,186 @@ class _Sandbox(_Object, type_prefix="sb"):
381
435
 
382
436
  return self.returncode
383
437
 
438
+ async def _get_task_id(self):
439
+ while not self._task_id:
440
+ resp = await self._client.stub.SandboxGetTaskId(api_pb2.SandboxGetTaskIdRequest(sandbox_id=self.object_id))
441
+ self._task_id = resp.task_id
442
+ if not self._task_id:
443
+ await asyncio.sleep(0.5)
444
+ return self._task_id
445
+
446
+ @overload
447
+ async def exec(
448
+ self,
449
+ *cmds: str,
450
+ pty_info: Optional[api_pb2.PTYInfo] = None,
451
+ stdout: StreamType = StreamType.PIPE,
452
+ stderr: StreamType = StreamType.PIPE,
453
+ timeout: Optional[int] = None,
454
+ workdir: Optional[str] = None,
455
+ secrets: Sequence[_Secret] = (),
456
+ text: Literal[True] = True,
457
+ bufsize: Literal[-1, 1] = -1,
458
+ _pty_info: Optional[api_pb2.PTYInfo] = None,
459
+ ) -> _ContainerProcess[str]:
460
+ ...
461
+
462
+ @overload
463
+ async def exec(
464
+ self,
465
+ *cmds: str,
466
+ pty_info: Optional[api_pb2.PTYInfo] = None,
467
+ stdout: StreamType = StreamType.PIPE,
468
+ stderr: StreamType = StreamType.PIPE,
469
+ timeout: Optional[int] = None,
470
+ workdir: Optional[str] = None,
471
+ secrets: Sequence[_Secret] = (),
472
+ text: Literal[False] = False,
473
+ bufsize: Literal[-1, 1] = -1,
474
+ _pty_info: Optional[api_pb2.PTYInfo] = None,
475
+ ) -> _ContainerProcess[bytes]:
476
+ ...
477
+
478
+ async def exec(
479
+ self,
480
+ *cmds: str,
481
+ pty_info: Optional[api_pb2.PTYInfo] = None, # Deprecated: internal use only
482
+ stdout: StreamType = StreamType.PIPE,
483
+ stderr: StreamType = StreamType.PIPE,
484
+ timeout: Optional[int] = None,
485
+ workdir: Optional[str] = None,
486
+ secrets: Sequence[_Secret] = (),
487
+ # Encode output as text.
488
+ text: bool = True,
489
+ # Control line-buffered output.
490
+ # -1 means unbuffered, 1 means line-buffered (only available if `text=True`).
491
+ bufsize: Literal[-1, 1] = -1,
492
+ # Internal option to set terminal size and metadata
493
+ _pty_info: Optional[api_pb2.PTYInfo] = None,
494
+ ):
495
+ """Execute a command in the Sandbox and return
496
+ a [`ContainerProcess`](/docs/reference/modal.ContainerProcess#modalcontainer_process) handle.
497
+
498
+ **Usage**
499
+
500
+ ```python
501
+ app = modal.App.lookup("my-app", create_if_missing=True)
502
+
503
+ sandbox = modal.Sandbox.create("sleep", "infinity", app=app)
504
+
505
+ process = sandbox.exec("bash", "-c", "for i in $(seq 1 10); do echo foo $i; sleep 0.5; done")
506
+
507
+ for line in process.stdout:
508
+ print(line)
509
+ ```
510
+ """
511
+
512
+ if workdir is not None and not workdir.startswith("/"):
513
+ raise InvalidError(f"workdir must be an absolute path, got: {workdir}")
514
+
515
+ # Force secret resolution so we can pass the secret IDs to the backend.
516
+ for secret in secrets:
517
+ await secret.resolve(client=self._client)
518
+
519
+ task_id = await self._get_task_id()
520
+ req = api_pb2.ContainerExecRequest(
521
+ task_id=task_id,
522
+ command=cmds,
523
+ pty_info=_pty_info or pty_info,
524
+ runtime_debug=config.get("function_runtime_debug"),
525
+ timeout_secs=timeout or 0,
526
+ workdir=workdir,
527
+ secret_ids=[secret.object_id for secret in secrets],
528
+ )
529
+ resp = await retry_transient_errors(self._client.stub.ContainerExec, req)
530
+ by_line = bufsize == 1
531
+ return _ContainerProcess(resp.exec_id, self._client, stdout=stdout, stderr=stderr, text=text, by_line=by_line)
532
+
533
+ @overload
534
+ async def open(
535
+ self,
536
+ path: str,
537
+ mode: "_typeshed.OpenTextMode",
538
+ ) -> _FileIO[str]:
539
+ ...
540
+
541
+ @overload
542
+ async def open(
543
+ self,
544
+ path: str,
545
+ mode: "_typeshed.OpenBinaryMode",
546
+ ) -> _FileIO[bytes]:
547
+ ...
548
+
549
+ async def open(
550
+ self,
551
+ path: str,
552
+ mode: Union["_typeshed.OpenTextMode", "_typeshed.OpenBinaryMode"] = "r",
553
+ ):
554
+ """Open a file in the Sandbox and return
555
+ a [`FileIO`](/docs/reference/modal.FileIO#modalfile_io) handle.
556
+
557
+ **Usage**
558
+
559
+ ```python notest
560
+ sb = modal.Sandbox.create(app=sb_app)
561
+ f = sb.open("/test.txt", "w")
562
+ f.write("hello")
563
+ f.close()
564
+ ```
565
+ """
566
+ task_id = await self._get_task_id()
567
+ return await _FileIO.create(path, mode, self._client, task_id)
568
+
569
+ async def ls(self, path: str) -> list[str]:
570
+ """List the contents of a directory in the Sandbox."""
571
+ task_id = await self._get_task_id()
572
+ return await _FileIO.ls(path, self._client, task_id)
573
+
574
+ async def mkdir(self, path: str, parents: bool = False) -> None:
575
+ """Create a new directory in the Sandbox."""
576
+ task_id = await self._get_task_id()
577
+ return await _FileIO.mkdir(path, self._client, task_id, parents)
578
+
579
+ async def rm(self, path: str, recursive: bool = False) -> None:
580
+ """Remove a file or directory in the Sandbox."""
581
+ task_id = await self._get_task_id()
582
+ return await _FileIO.rm(path, self._client, task_id, recursive)
583
+
584
+ async def watch(
585
+ self,
586
+ path: str,
587
+ filter: Optional[list[FileWatchEventType]] = None,
588
+ recursive: Optional[bool] = None,
589
+ timeout: Optional[int] = None,
590
+ ) -> AsyncIterator[FileWatchEvent]:
591
+ task_id = await self._get_task_id()
592
+ async for event in _FileIO.watch(path, self._client, task_id, filter, recursive, timeout):
593
+ yield event
594
+
384
595
  @property
385
- def stdout(self) -> _LogsReader:
386
- """`LogsReader` for the sandbox's stdout stream."""
596
+ def stdout(self) -> _StreamReader[str]:
597
+ """
598
+ [`StreamReader`](/docs/reference/modal.io_streams#modalio_streamsstreamreader) for
599
+ the sandbox's stdout stream.
600
+ """
387
601
 
388
602
  return self._stdout
389
603
 
390
604
  @property
391
- def stderr(self) -> _LogsReader:
392
- """`LogsReader` for the sandbox's stderr stream."""
605
+ def stderr(self) -> _StreamReader[str]:
606
+ """[`StreamReader`](/docs/reference/modal.io_streams#modalio_streamsstreamreader) for
607
+ the sandbox's stderr stream.
608
+ """
393
609
 
394
610
  return self._stderr
395
611
 
396
612
  @property
397
613
  def stdin(self) -> _StreamWriter:
398
- """`StreamWriter` for the sandbox's stdin stream."""
614
+ """
615
+ [`StreamWriter`](/docs/reference/modal.io_streams#modalio_streamsstreamwriter) for
616
+ the sandbox's stdin stream.
617
+ """
399
618
 
400
619
  return self._stdin
401
620
 
@@ -414,5 +633,64 @@ class _Sandbox(_Object, type_prefix="sb"):
414
633
  else:
415
634
  return self._result.exitcode
416
635
 
636
+ @staticmethod
637
+ async def list(
638
+ *, app_id: Optional[str] = None, tags: Optional[dict[str, str]] = None, client: Optional[_Client] = None
639
+ ) -> AsyncGenerator["_Sandbox", None]:
640
+ """List all sandboxes for the current environment or app ID (if specified). If tags are specified, only
641
+ sandboxes that have at least those tags are returned. Returns an iterator over `Sandbox` objects."""
642
+ before_timestamp = None
643
+ environment_name = _get_environment_name()
644
+ if client is None:
645
+ client = await _Client.from_env()
646
+
647
+ tags_list = [api_pb2.SandboxTag(tag_name=name, tag_value=value) for name, value in tags.items()] if tags else []
648
+
649
+ while True:
650
+ req = api_pb2.SandboxListRequest(
651
+ app_id=app_id,
652
+ before_timestamp=before_timestamp,
653
+ environment_name=environment_name,
654
+ include_finished=False,
655
+ tags=tags_list,
656
+ )
657
+
658
+ # Fetches a batch of sandboxes.
659
+ try:
660
+ resp = await retry_transient_errors(client.stub.SandboxList, req)
661
+ except GRPCError as exc:
662
+ raise InvalidError(exc.message) if exc.status == Status.INVALID_ARGUMENT else exc
663
+
664
+ if not resp.sandboxes:
665
+ return
666
+
667
+ for sandbox_info in resp.sandboxes:
668
+ obj = _Sandbox._new_hydrated(sandbox_info.id, client, None)
669
+ obj._result = sandbox_info.task_info.result
670
+ yield obj
671
+
672
+ # Fetch the next batch starting from the end of the current one.
673
+ before_timestamp = resp.sandboxes[-1].created_at
674
+
417
675
 
418
676
  Sandbox = synchronize_api(_Sandbox)
677
+
678
+
679
+ def __getattr__(name):
680
+ if name == "LogsReader":
681
+ deprecation_error(
682
+ (2024, 8, 12),
683
+ "`modal.sandbox.LogsReader` is deprecated. Please import `modal.io_streams.StreamReader` instead.",
684
+ )
685
+ from .io_streams import StreamReader
686
+
687
+ return StreamReader
688
+ elif name == "StreamWriter":
689
+ deprecation_error(
690
+ (2024, 8, 12),
691
+ "`modal.sandbox.StreamWriter` is deprecated. Please import `modal.io_streams.StreamWriter` instead.",
692
+ )
693
+ from .io_streams import StreamWriter
694
+
695
+ return StreamWriter
696
+ raise AttributeError(f"module {__name__} has no attribute {name}")