modal 1.2.1.dev8__py3-none-any.whl → 1.2.2.dev19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. modal/_clustered_functions.py +1 -3
  2. modal/_container_entrypoint.py +4 -1
  3. modal/_functions.py +33 -49
  4. modal/_grpc_client.py +148 -0
  5. modal/_output.py +3 -4
  6. modal/_partial_function.py +22 -2
  7. modal/_runtime/container_io_manager.py +21 -22
  8. modal/_utils/async_utils.py +12 -3
  9. modal/_utils/auth_token_manager.py +1 -4
  10. modal/_utils/blob_utils.py +3 -4
  11. modal/_utils/function_utils.py +4 -0
  12. modal/_utils/grpc_utils.py +80 -51
  13. modal/_utils/mount_utils.py +26 -1
  14. modal/_utils/task_command_router_client.py +536 -0
  15. modal/app.py +7 -5
  16. modal/cli/cluster.py +4 -2
  17. modal/cli/config.py +3 -1
  18. modal/cli/container.py +5 -4
  19. modal/cli/entry_point.py +1 -0
  20. modal/cli/launch.py +1 -2
  21. modal/cli/network_file_system.py +1 -4
  22. modal/cli/queues.py +1 -2
  23. modal/cli/secret.py +1 -2
  24. modal/client.py +5 -115
  25. modal/client.pyi +2 -91
  26. modal/cls.py +1 -2
  27. modal/config.py +3 -1
  28. modal/container_process.py +287 -11
  29. modal/container_process.pyi +95 -32
  30. modal/dict.py +12 -12
  31. modal/environments.py +1 -2
  32. modal/exception.py +4 -0
  33. modal/experimental/__init__.py +2 -3
  34. modal/experimental/flash.py +27 -57
  35. modal/experimental/flash.pyi +6 -20
  36. modal/file_io.py +13 -27
  37. modal/functions.pyi +6 -6
  38. modal/image.py +24 -3
  39. modal/image.pyi +4 -0
  40. modal/io_streams.py +433 -127
  41. modal/io_streams.pyi +236 -171
  42. modal/mount.py +4 -4
  43. modal/network_file_system.py +5 -6
  44. modal/parallel_map.py +29 -31
  45. modal/parallel_map.pyi +3 -9
  46. modal/partial_function.pyi +4 -1
  47. modal/queue.py +17 -18
  48. modal/runner.py +12 -11
  49. modal/sandbox.py +148 -42
  50. modal/sandbox.pyi +139 -0
  51. modal/secret.py +4 -5
  52. modal/snapshot.py +1 -4
  53. modal/token_flow.py +1 -1
  54. modal/volume.py +22 -22
  55. {modal-1.2.1.dev8.dist-info → modal-1.2.2.dev19.dist-info}/METADATA +1 -1
  56. {modal-1.2.1.dev8.dist-info → modal-1.2.2.dev19.dist-info}/RECORD +70 -68
  57. modal_proto/api.proto +2 -24
  58. modal_proto/api_grpc.py +0 -32
  59. modal_proto/api_pb2.py +838 -878
  60. modal_proto/api_pb2.pyi +8 -70
  61. modal_proto/api_pb2_grpc.py +0 -67
  62. modal_proto/api_pb2_grpc.pyi +0 -22
  63. modal_proto/modal_api_grpc.py +175 -177
  64. modal_proto/sandbox_router.proto +0 -4
  65. modal_proto/sandbox_router_pb2.pyi +0 -4
  66. modal_version/__init__.py +1 -1
  67. {modal-1.2.1.dev8.dist-info → modal-1.2.2.dev19.dist-info}/WHEEL +0 -0
  68. {modal-1.2.1.dev8.dist-info → modal-1.2.2.dev19.dist-info}/entry_points.txt +0 -0
  69. {modal-1.2.1.dev8.dist-info → modal-1.2.2.dev19.dist-info}/licenses/LICENSE +0 -0
  70. {modal-1.2.1.dev8.dist-info → modal-1.2.2.dev19.dist-info}/top_level.txt +0 -0
@@ -7,18 +7,18 @@ from typing import Generic, Optional, TypeVar
7
7
  from modal_proto import api_pb2
8
8
 
9
9
  from ._utils.async_utils import TaskContext, synchronize_api
10
- from ._utils.grpc_utils import retry_transient_errors
11
10
  from ._utils.shell_utils import stream_from_stdin, write_to_fd
11
+ from ._utils.task_command_router_client import TaskCommandRouterClient
12
12
  from .client import _Client
13
13
  from .config import logger
14
- from .exception import InteractiveTimeoutError, InvalidError
14
+ from .exception import ExecTimeoutError, InteractiveTimeoutError, InvalidError
15
15
  from .io_streams import _StreamReader, _StreamWriter
16
16
  from .stream_type import StreamType
17
17
 
18
18
  T = TypeVar("T", str, bytes)
19
19
 
20
20
 
21
- class _ContainerProcess(Generic[T]):
21
+ class _ContainerProcessThroughServer(Generic[T]):
22
22
  _process_id: Optional[str] = None
23
23
  _stdout: _StreamReader[T]
24
24
  _stderr: _StreamReader[T]
@@ -31,6 +31,7 @@ class _ContainerProcess(Generic[T]):
31
31
  def __init__(
32
32
  self,
33
33
  process_id: str,
34
+ task_id: str,
34
35
  client: _Client,
35
36
  stdout: StreamType = StreamType.PIPE,
36
37
  stderr: StreamType = StreamType.PIPE,
@@ -52,6 +53,7 @@ class _ContainerProcess(Generic[T]):
52
53
  text=text,
53
54
  by_line=by_line,
54
55
  deadline=exec_deadline,
56
+ task_id=task_id,
55
57
  )
56
58
  self._stderr = _StreamReader[T](
57
59
  api_pb2.FILE_DESCRIPTOR_STDERR,
@@ -62,6 +64,7 @@ class _ContainerProcess(Generic[T]):
62
64
  text=text,
63
65
  by_line=by_line,
64
66
  deadline=exec_deadline,
67
+ task_id=task_id,
65
68
  )
66
69
  self._stdin = _StreamWriter(process_id, "container_process", self._client)
67
70
 
@@ -97,6 +100,7 @@ class _ContainerProcess(Generic[T]):
97
100
 
98
101
  Returns `None` if the process is still running, else returns the exit code.
99
102
  """
103
+ assert self._process_id
100
104
  if self._returncode is not None:
101
105
  return self._returncode
102
106
  if self._exec_deadline and time.monotonic() >= self._exec_deadline:
@@ -106,7 +110,7 @@ class _ContainerProcess(Generic[T]):
106
110
  return self._returncode
107
111
 
108
112
  req = api_pb2.ContainerExecWaitRequest(exec_id=self._process_id, timeout=0)
109
- resp: api_pb2.ContainerExecWaitResponse = await retry_transient_errors(self._client.stub.ContainerExecWait, req)
113
+ resp = await self._client.stub.ContainerExecWait(req)
110
114
 
111
115
  if resp.completed:
112
116
  self._returncode = resp.exit_code
@@ -115,11 +119,10 @@ class _ContainerProcess(Generic[T]):
115
119
  return None
116
120
 
117
121
  async def _wait_for_completion(self) -> int:
122
+ assert self._process_id
118
123
  while True:
119
124
  req = api_pb2.ContainerExecWaitRequest(exec_id=self._process_id, timeout=10)
120
- resp: api_pb2.ContainerExecWaitResponse = await retry_transient_errors(
121
- self._client.stub.ContainerExecWait, req
122
- )
125
+ resp = await self._client.stub.ContainerExecWait(req)
123
126
  if resp.completed:
124
127
  return resp.exit_code
125
128
 
@@ -155,11 +158,16 @@ class _ContainerProcess(Generic[T]):
155
158
  on_connect = asyncio.Event()
156
159
 
157
160
  async def _write_to_fd_loop(stream: _StreamReader):
161
+ # This is required to make modal shell to an existing task work,
162
+ # since that uses ContainerExec RPCs directly, but this is hacky.
163
+ #
164
+ # TODO(saltzm): Once we use the new exec path for that use case, this code can all be removed.
165
+ from .io_streams import _StreamReaderThroughServer
166
+
167
+ assert isinstance(stream._impl, _StreamReaderThroughServer)
168
+ stream_impl = stream._impl
158
169
  # Don't skip empty messages so we can detect when the process has booted.
159
- async for chunk in stream._get_logs(skip_empty_messages=False):
160
- if chunk is None:
161
- break
162
-
170
+ async for chunk in stream_impl._get_logs(skip_empty_messages=False):
163
171
  if not on_connect.is_set():
164
172
  connecting_status.stop()
165
173
  on_connect.set()
@@ -193,4 +201,272 @@ class _ContainerProcess(Generic[T]):
193
201
  raise InteractiveTimeoutError("Failed to establish connection to container. Please try again.")
194
202
 
195
203
 
204
+ async def _iter_stream_as_bytes(stream: _StreamReader[T]):
205
+ """Yield raw bytes from a StreamReader regardless of text mode/backend."""
206
+ async for part in stream:
207
+ if isinstance(part, str):
208
+ yield part.encode("utf-8")
209
+ else:
210
+ yield part
211
+
212
+
213
+ class _ContainerProcessThroughCommandRouter(Generic[T]):
214
+ """
215
+ Container process implementation that works via direct communication with
216
+ the Modal worker where the container is running.
217
+ """
218
+
219
+ def __init__(
220
+ self,
221
+ process_id: str,
222
+ client: _Client,
223
+ command_router_client: TaskCommandRouterClient,
224
+ task_id: str,
225
+ *,
226
+ stdout: StreamType = StreamType.PIPE,
227
+ stderr: StreamType = StreamType.PIPE,
228
+ exec_deadline: Optional[float] = None,
229
+ text: bool = True,
230
+ by_line: bool = False,
231
+ ) -> None:
232
+ self._client = client
233
+ self._command_router_client = command_router_client
234
+ self._process_id = process_id
235
+ self._exec_deadline = exec_deadline
236
+ self._text = text
237
+ self._by_line = by_line
238
+ self._task_id = task_id
239
+ self._stdout = _StreamReader[T](
240
+ api_pb2.FILE_DESCRIPTOR_STDOUT,
241
+ process_id,
242
+ "container_process",
243
+ self._client,
244
+ stream_type=stdout,
245
+ text=text,
246
+ by_line=by_line,
247
+ deadline=exec_deadline,
248
+ command_router_client=self._command_router_client,
249
+ task_id=self._task_id,
250
+ )
251
+ self._stderr = _StreamReader[T](
252
+ api_pb2.FILE_DESCRIPTOR_STDERR,
253
+ process_id,
254
+ "container_process",
255
+ self._client,
256
+ stream_type=stderr,
257
+ text=text,
258
+ by_line=by_line,
259
+ deadline=exec_deadline,
260
+ command_router_client=self._command_router_client,
261
+ task_id=self._task_id,
262
+ )
263
+ self._stdin = _StreamWriter(
264
+ process_id,
265
+ "container_process",
266
+ self._client,
267
+ command_router_client=self._command_router_client,
268
+ task_id=self._task_id,
269
+ )
270
+ self._returncode = None
271
+
272
+ def __repr__(self) -> str:
273
+ return f"ContainerProcess(process_id={self._process_id!r})"
274
+
275
+ @property
276
+ def stdout(self) -> _StreamReader[T]:
277
+ return self._stdout
278
+
279
+ @property
280
+ def stderr(self) -> _StreamReader[T]:
281
+ return self._stderr
282
+
283
+ @property
284
+ def stdin(self) -> _StreamWriter:
285
+ return self._stdin
286
+
287
+ @property
288
+ def returncode(self) -> int:
289
+ if self._returncode is None:
290
+ raise InvalidError(
291
+ "You must call wait() before accessing the returncode. "
292
+ "To poll for the status of a running process, use poll() instead."
293
+ )
294
+ return self._returncode
295
+
296
+ async def poll(self) -> Optional[int]:
297
+ if self._returncode is not None:
298
+ return self._returncode
299
+ try:
300
+ resp = await self._command_router_client.exec_poll(self._task_id, self._process_id, self._exec_deadline)
301
+ which = resp.WhichOneof("exit_status")
302
+ if which is None:
303
+ return None
304
+
305
+ if which == "code":
306
+ self._returncode = int(resp.code)
307
+ return self._returncode
308
+ elif which == "signal":
309
+ self._returncode = 128 + int(resp.signal)
310
+ return self._returncode
311
+ else:
312
+ logger.debug(f"ContainerProcess {self._process_id} exited with unexpected status: {which}")
313
+ raise InvalidError("Unexpected exit status")
314
+ except ExecTimeoutError:
315
+ logger.debug(f"ContainerProcess poll for {self._process_id} did not complete within deadline")
316
+ # TODO(saltzm): This is a weird API, but customers currently may rely on it. This
317
+ # should probably raise an ExecTimeoutError instead.
318
+ self._returncode = -1
319
+ return self._returncode
320
+ except Exception as e:
321
+ # Re-raise non-transient errors or errors resulting from exceeding retries on transient errors.
322
+ logger.warning(f"ContainerProcess poll for {self._process_id} failed: {e}")
323
+ raise
324
+
325
+ async def wait(self) -> int:
326
+ if self._returncode is not None:
327
+ return self._returncode
328
+
329
+ try:
330
+ resp = await self._command_router_client.exec_wait(self._task_id, self._process_id, self._exec_deadline)
331
+ which = resp.WhichOneof("exit_status")
332
+ if which == "code":
333
+ self._returncode = int(resp.code)
334
+ elif which == "signal":
335
+ self._returncode = 128 + int(resp.signal)
336
+ else:
337
+ logger.debug(f"ContainerProcess {self._process_id} exited with unexpected status: {which}")
338
+ self._returncode = -1
339
+ raise InvalidError("Unexpected exit status")
340
+ except ExecTimeoutError:
341
+ logger.debug(f"ContainerProcess {self._process_id} did not complete within deadline")
342
+ # TODO(saltzm): This is a weird API, but customers currently may rely on it. This
343
+ # should be a ExecTimeoutError.
344
+ self._returncode = -1
345
+
346
+ return self._returncode
347
+
348
+ async def attach(self):
349
+ if platform.system() == "Windows":
350
+ print("interactive exec is not currently supported on Windows.")
351
+ return
352
+
353
+ from ._output import make_console
354
+
355
+ console = make_console()
356
+
357
+ connecting_status = console.status("Connecting...")
358
+ connecting_status.start()
359
+ on_connect = asyncio.Event()
360
+
361
+ async def _write_to_fd_loop(stream: _StreamReader[T]):
362
+ async for chunk in _iter_stream_as_bytes(stream):
363
+ if chunk is None:
364
+ break
365
+
366
+ if not on_connect.is_set():
367
+ connecting_status.stop()
368
+ on_connect.set()
369
+
370
+ await write_to_fd(stream.file_descriptor, chunk)
371
+
372
+ async def _handle_input(data: bytes, message_index: int):
373
+ self.stdin.write(data)
374
+ await self.stdin.drain()
375
+
376
+ async with TaskContext() as tc:
377
+ stdout_task = tc.create_task(_write_to_fd_loop(self.stdout))
378
+ stderr_task = tc.create_task(_write_to_fd_loop(self.stderr))
379
+
380
+ try:
381
+ # Time out if we can't connect fast enough.
382
+ await asyncio.wait_for(on_connect.wait(), timeout=60)
383
+
384
+ async with stream_from_stdin(_handle_input, use_raw_terminal=True):
385
+ await stdout_task
386
+ await stderr_task
387
+
388
+ except (asyncio.TimeoutError, TimeoutError):
389
+ connecting_status.stop()
390
+ stdout_task.cancel()
391
+ stderr_task.cancel()
392
+ raise InteractiveTimeoutError("Failed to establish connection to container. Please try again.")
393
+
394
+
395
+ class _ContainerProcess(Generic[T]):
396
+ """Represents a running process in a container."""
397
+
398
+ def __init__(
399
+ self,
400
+ process_id: str,
401
+ task_id: str,
402
+ client: _Client,
403
+ stdout: StreamType = StreamType.PIPE,
404
+ stderr: StreamType = StreamType.PIPE,
405
+ exec_deadline: Optional[float] = None,
406
+ text: bool = True,
407
+ by_line: bool = False,
408
+ command_router_client: Optional[TaskCommandRouterClient] = None,
409
+ ) -> None:
410
+ if command_router_client is None:
411
+ self._impl = _ContainerProcessThroughServer(
412
+ process_id,
413
+ task_id,
414
+ client,
415
+ stdout=stdout,
416
+ stderr=stderr,
417
+ exec_deadline=exec_deadline,
418
+ text=text,
419
+ by_line=by_line,
420
+ )
421
+ else:
422
+ self._impl = _ContainerProcessThroughCommandRouter(
423
+ process_id,
424
+ client,
425
+ command_router_client,
426
+ task_id,
427
+ stdout=stdout,
428
+ stderr=stderr,
429
+ exec_deadline=exec_deadline,
430
+ text=text,
431
+ by_line=by_line,
432
+ )
433
+
434
+ def __repr__(self) -> str:
435
+ return self._impl.__repr__()
436
+
437
+ @property
438
+ def stdout(self) -> _StreamReader[T]:
439
+ """StreamReader for the container process's stdout stream."""
440
+ return self._impl.stdout
441
+
442
+ @property
443
+ def stderr(self) -> _StreamReader[T]:
444
+ """StreamReader for the container process's stderr stream."""
445
+ return self._impl.stderr
446
+
447
+ @property
448
+ def stdin(self) -> _StreamWriter:
449
+ """StreamWriter for the container process's stdin stream."""
450
+ return self._impl.stdin
451
+
452
+ @property
453
+ def returncode(self) -> int:
454
+ return self._impl.returncode
455
+
456
+ async def poll(self) -> Optional[int]:
457
+ """Check if the container process has finished running.
458
+
459
+ Returns `None` if the process is still running, else returns the exit code.
460
+ """
461
+ return await self._impl.poll()
462
+
463
+ async def wait(self) -> int:
464
+ """Wait for the container process to finish running. Returns the exit code."""
465
+ return await self._impl.wait()
466
+
467
+ async def attach(self):
468
+ """mdmd:hidden"""
469
+ await self._impl.attach()
470
+
471
+
196
472
  ContainerProcess = synchronize_api(_ContainerProcess)
@@ -1,3 +1,4 @@
1
+ import modal._utils.task_command_router_client
1
2
  import modal.client
2
3
  import modal.io_streams
3
4
  import modal.stream_type
@@ -6,7 +7,7 @@ import typing_extensions
6
7
 
7
8
  T = typing.TypeVar("T")
8
9
 
9
- class _ContainerProcess(typing.Generic[T]):
10
+ class _ContainerProcessThroughServer(typing.Generic[T]):
10
11
  """Abstract base class for generic types.
11
12
 
12
13
  A generic type is typically declared by inheriting from
@@ -39,6 +40,7 @@ class _ContainerProcess(typing.Generic[T]):
39
40
  def __init__(
40
41
  self,
41
42
  process_id: str,
43
+ task_id: str,
42
44
  client: modal.client._Client,
43
45
  stdout: modal.stream_type.StreamType = modal.stream_type.StreamType.PIPE,
44
46
  stderr: modal.stream_type.StreamType = modal.stream_type.StreamType.PIPE,
@@ -86,47 +88,114 @@ class _ContainerProcess(typing.Generic[T]):
86
88
  """mdmd:hidden"""
87
89
  ...
88
90
 
89
- SUPERSELF = typing.TypeVar("SUPERSELF", covariant=True)
91
+ def _iter_stream_as_bytes(stream: modal.io_streams._StreamReader[T]):
92
+ """Yield raw bytes from a StreamReader regardless of text mode/backend."""
93
+ ...
90
94
 
91
- class ContainerProcess(typing.Generic[T]):
92
- """Abstract base class for generic types.
95
+ class _ContainerProcessThroughCommandRouter(typing.Generic[T]):
96
+ """Container process implementation that works via direct communication with
97
+ the Modal worker where the container is running.
98
+ """
99
+ def __init__(
100
+ self,
101
+ process_id: str,
102
+ client: modal.client._Client,
103
+ command_router_client: modal._utils.task_command_router_client.TaskCommandRouterClient,
104
+ task_id: str,
105
+ *,
106
+ stdout: modal.stream_type.StreamType = modal.stream_type.StreamType.PIPE,
107
+ stderr: modal.stream_type.StreamType = modal.stream_type.StreamType.PIPE,
108
+ exec_deadline: typing.Optional[float] = None,
109
+ text: bool = True,
110
+ by_line: bool = False,
111
+ ) -> None:
112
+ """Initialize self. See help(type(self)) for accurate signature."""
113
+ ...
93
114
 
94
- A generic type is typically declared by inheriting from
95
- this class parameterized with one or more type variables.
96
- For example, a generic mapping type might be defined as::
115
+ def __repr__(self) -> str:
116
+ """Return repr(self)."""
117
+ ...
97
118
 
98
- class Mapping(Generic[KT, VT]):
99
- def __getitem__(self, key: KT) -> VT:
100
- ...
101
- # Etc.
119
+ @property
120
+ def stdout(self) -> modal.io_streams._StreamReader[T]: ...
121
+ @property
122
+ def stderr(self) -> modal.io_streams._StreamReader[T]: ...
123
+ @property
124
+ def stdin(self) -> modal.io_streams._StreamWriter: ...
125
+ @property
126
+ def returncode(self) -> int: ...
127
+ async def poll(self) -> typing.Optional[int]: ...
128
+ async def wait(self) -> int: ...
129
+ async def attach(self): ...
102
130
 
103
- This class can then be used as follows::
131
+ class _ContainerProcess(typing.Generic[T]):
132
+ """Represents a running process in a container."""
133
+ def __init__(
134
+ self,
135
+ process_id: str,
136
+ task_id: str,
137
+ client: modal.client._Client,
138
+ stdout: modal.stream_type.StreamType = modal.stream_type.StreamType.PIPE,
139
+ stderr: modal.stream_type.StreamType = modal.stream_type.StreamType.PIPE,
140
+ exec_deadline: typing.Optional[float] = None,
141
+ text: bool = True,
142
+ by_line: bool = False,
143
+ command_router_client: typing.Optional[modal._utils.task_command_router_client.TaskCommandRouterClient] = None,
144
+ ) -> None:
145
+ """Initialize self. See help(type(self)) for accurate signature."""
146
+ ...
104
147
 
105
- def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT:
106
- try:
107
- return mapping[key]
108
- except KeyError:
109
- return default
110
- """
148
+ def __repr__(self) -> str:
149
+ """Return repr(self)."""
150
+ ...
111
151
 
112
- _process_id: typing.Optional[str]
113
- _stdout: modal.io_streams.StreamReader[T]
114
- _stderr: modal.io_streams.StreamReader[T]
115
- _stdin: modal.io_streams.StreamWriter
116
- _exec_deadline: typing.Optional[float]
117
- _text: bool
118
- _by_line: bool
119
- _returncode: typing.Optional[int]
152
+ @property
153
+ def stdout(self) -> modal.io_streams._StreamReader[T]:
154
+ """StreamReader for the container process's stdout stream."""
155
+ ...
156
+
157
+ @property
158
+ def stderr(self) -> modal.io_streams._StreamReader[T]:
159
+ """StreamReader for the container process's stderr stream."""
160
+ ...
161
+
162
+ @property
163
+ def stdin(self) -> modal.io_streams._StreamWriter:
164
+ """StreamWriter for the container process's stdin stream."""
165
+ ...
166
+
167
+ @property
168
+ def returncode(self) -> int: ...
169
+ async def poll(self) -> typing.Optional[int]:
170
+ """Check if the container process has finished running.
171
+
172
+ Returns `None` if the process is still running, else returns the exit code.
173
+ """
174
+ ...
175
+
176
+ async def wait(self) -> int:
177
+ """Wait for the container process to finish running. Returns the exit code."""
178
+ ...
179
+
180
+ async def attach(self):
181
+ """mdmd:hidden"""
182
+ ...
183
+
184
+ SUPERSELF = typing.TypeVar("SUPERSELF", covariant=True)
120
185
 
186
+ class ContainerProcess(typing.Generic[T]):
187
+ """Represents a running process in a container."""
121
188
  def __init__(
122
189
  self,
123
190
  process_id: str,
191
+ task_id: str,
124
192
  client: modal.client.Client,
125
193
  stdout: modal.stream_type.StreamType = modal.stream_type.StreamType.PIPE,
126
194
  stderr: modal.stream_type.StreamType = modal.stream_type.StreamType.PIPE,
127
195
  exec_deadline: typing.Optional[float] = None,
128
196
  text: bool = True,
129
197
  by_line: bool = False,
198
+ command_router_client: typing.Optional[modal._utils.task_command_router_client.TaskCommandRouterClient] = None,
130
199
  ) -> None: ...
131
200
  def __repr__(self) -> str: ...
132
201
  @property
@@ -164,12 +233,6 @@ class ContainerProcess(typing.Generic[T]):
164
233
 
165
234
  poll: __poll_spec[typing_extensions.Self]
166
235
 
167
- class ___wait_for_completion_spec(typing_extensions.Protocol[SUPERSELF]):
168
- def __call__(self, /) -> int: ...
169
- async def aio(self, /) -> int: ...
170
-
171
- _wait_for_completion: ___wait_for_completion_spec[typing_extensions.Self]
172
-
173
236
  class __wait_spec(typing_extensions.Protocol[SUPERSELF]):
174
237
  def __call__(self, /) -> int:
175
238
  """Wait for the container process to finish running. Returns the exit code."""
modal/dict.py CHANGED
@@ -9,6 +9,7 @@ from grpclib import GRPCError, Status
9
9
  from synchronicity import classproperty
10
10
  from synchronicity.async_wrap import asynccontextmanager
11
11
 
12
+ from modal._utils.grpc_utils import Retry
12
13
  from modal_proto import api_pb2
13
14
 
14
15
  from ._object import (
@@ -22,7 +23,6 @@ from ._resolver import Resolver
22
23
  from ._serialization import deserialize, serialize
23
24
  from ._utils.async_utils import TaskContext, synchronize_api
24
25
  from ._utils.deprecation import deprecation_warning, warn_if_passing_namespace
25
- from ._utils.grpc_utils import retry_transient_errors
26
26
  from ._utils.name_utils import check_object_name
27
27
  from ._utils.time_utils import as_timestamp, timestamp_to_localized_dt
28
28
  from .client import _Client
@@ -105,7 +105,7 @@ class _DictManager:
105
105
  object_creation_type=object_creation_type,
106
106
  )
107
107
  try:
108
- await retry_transient_errors(client.stub.DictGetOrCreate, req)
108
+ await client.stub.DictGetOrCreate(req)
109
109
  except GRPCError as exc:
110
110
  if exc.status == Status.ALREADY_EXISTS and not allow_existing:
111
111
  raise AlreadyExistsError(exc.message)
@@ -157,7 +157,7 @@ class _DictManager:
157
157
  req = api_pb2.DictListRequest(
158
158
  environment_name=_get_environment_name(environment_name), pagination=pagination
159
159
  )
160
- resp = await retry_transient_errors(client.stub.DictList, req)
160
+ resp = await client.stub.DictList(req)
161
161
  items.extend(resp.dicts)
162
162
  finished = (len(resp.dicts) < max_page_size) or (max_objects is not None and len(items) >= max_objects)
163
163
  return finished
@@ -215,7 +215,7 @@ class _DictManager:
215
215
  raise
216
216
  else:
217
217
  req = api_pb2.DictDeleteRequest(dict_id=obj.object_id)
218
- await retry_transient_errors(obj._client.stub.DictDelete, req)
218
+ await obj._client.stub.DictDelete(req)
219
219
 
220
220
 
221
221
  DictManager = synchronize_api(_DictManager)
@@ -327,7 +327,7 @@ class _Dict(_Object, type_prefix="di"):
327
327
  environment_name=_get_environment_name(environment_name),
328
328
  data=serialized,
329
329
  )
330
- response = await retry_transient_errors(client.stub.DictGetOrCreate, request, total_timeout=10.0)
330
+ response = await client.stub.DictGetOrCreate(request, retry=Retry(total_timeout=10.0))
331
331
  async with TaskContext() as tc:
332
332
  request = api_pb2.DictHeartbeatRequest(dict_id=response.dict_id)
333
333
  tc.infinite_loop(lambda: client.stub.DictHeartbeat(request), sleep=_heartbeat_sleep)
@@ -418,7 +418,7 @@ class _Dict(_Object, type_prefix="di"):
418
418
  async def clear(self) -> None:
419
419
  """Remove all items from the Dict."""
420
420
  req = api_pb2.DictClearRequest(dict_id=self.object_id)
421
- await retry_transient_errors(self._client.stub.DictClear, req)
421
+ await self._client.stub.DictClear(req)
422
422
 
423
423
  @live_method
424
424
  async def get(self, key: Any, default: Optional[Any] = None) -> Any:
@@ -427,7 +427,7 @@ class _Dict(_Object, type_prefix="di"):
427
427
  Returns `default` if key does not exist.
428
428
  """
429
429
  req = api_pb2.DictGetRequest(dict_id=self.object_id, key=serialize(key))
430
- resp = await retry_transient_errors(self._client.stub.DictGet, req)
430
+ resp = await self._client.stub.DictGet(req)
431
431
  if not resp.found:
432
432
  return default
433
433
  return deserialize(resp.value, self._client)
@@ -436,7 +436,7 @@ class _Dict(_Object, type_prefix="di"):
436
436
  async def contains(self, key: Any) -> bool:
437
437
  """Return if a key is present."""
438
438
  req = api_pb2.DictContainsRequest(dict_id=self.object_id, key=serialize(key))
439
- resp = await retry_transient_errors(self._client.stub.DictContains, req)
439
+ resp = await self._client.stub.DictContains(req)
440
440
  return resp.found
441
441
 
442
442
  @live_method
@@ -446,7 +446,7 @@ class _Dict(_Object, type_prefix="di"):
446
446
  Note: This is an expensive operation and will return at most 100,000.
447
447
  """
448
448
  req = api_pb2.DictLenRequest(dict_id=self.object_id)
449
- resp = await retry_transient_errors(self._client.stub.DictLen, req)
449
+ resp = await self._client.stub.DictLen(req)
450
450
  return resp.len
451
451
 
452
452
  @live_method
@@ -475,7 +475,7 @@ class _Dict(_Object, type_prefix="di"):
475
475
  serialized = _serialize_dict(contents)
476
476
  req = api_pb2.DictUpdateRequest(dict_id=self.object_id, updates=serialized)
477
477
  try:
478
- await retry_transient_errors(self._client.stub.DictUpdate, req)
478
+ await self._client.stub.DictUpdate(req)
479
479
  except GRPCError as exc:
480
480
  if "status = '413'" in exc.message:
481
481
  raise RequestSizeError("Dict.update request is too large") from exc
@@ -493,7 +493,7 @@ class _Dict(_Object, type_prefix="di"):
493
493
  serialized = _serialize_dict(updates)
494
494
  req = api_pb2.DictUpdateRequest(dict_id=self.object_id, updates=serialized, if_not_exists=skip_if_exists)
495
495
  try:
496
- resp = await retry_transient_errors(self._client.stub.DictUpdate, req)
496
+ resp = await self._client.stub.DictUpdate(req)
497
497
  return resp.created
498
498
  except GRPCError as exc:
499
499
  if "status = '413'" in exc.message:
@@ -516,7 +516,7 @@ class _Dict(_Object, type_prefix="di"):
516
516
  If key is not found, return default if provided, otherwise raise KeyError.
517
517
  """
518
518
  req = api_pb2.DictPopRequest(dict_id=self.object_id, key=serialize(key))
519
- resp = await retry_transient_errors(self._client.stub.DictPop, req)
519
+ resp = await self._client.stub.DictPop(req)
520
520
  if not resp.found:
521
521
  if default is not _NO_DEFAULT:
522
522
  return default
modal/environments.py CHANGED
@@ -11,7 +11,6 @@ from modal_proto import api_pb2
11
11
  from ._object import _Object
12
12
  from ._resolver import Resolver
13
13
  from ._utils.async_utils import synchronize_api, synchronizer
14
- from ._utils.grpc_utils import retry_transient_errors
15
14
  from ._utils.name_utils import check_object_name
16
15
  from .client import _Client
17
16
  from .config import config, logger
@@ -71,7 +70,7 @@ class _Environment(_Object, type_prefix="en"):
71
70
  else api_pb2.OBJECT_CREATION_TYPE_UNSPECIFIED
72
71
  ),
73
72
  )
74
- response = await retry_transient_errors(resolver.client.stub.EnvironmentGetOrCreate, request)
73
+ response = await resolver.client.stub.EnvironmentGetOrCreate(request)
75
74
  logger.debug(f"Created environment with id {response.environment_id}")
76
75
  self._hydrate(response.environment_id, resolver.client, response.metadata)
77
76
 
modal/exception.py CHANGED
@@ -42,6 +42,10 @@ class SandboxTimeoutError(TimeoutError):
42
42
  """Raised when a Sandbox exceeds its execution duration limit and times out."""
43
43
 
44
44
 
45
+ class ExecTimeoutError(TimeoutError):
46
+ """Raised when a container process exceeds its execution duration limit and times out."""
47
+
48
+
45
49
  class SandboxTerminatedError(Error):
46
50
  """Raised when a Sandbox is terminated for an internal reason."""
47
51