modal 1.2.1.dev19__py3-none-any.whl → 1.2.2.dev19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. modal/_clustered_functions.py +1 -3
  2. modal/_container_entrypoint.py +4 -1
  3. modal/_functions.py +33 -49
  4. modal/_grpc_client.py +148 -0
  5. modal/_output.py +3 -4
  6. modal/_runtime/container_io_manager.py +21 -22
  7. modal/_utils/async_utils.py +12 -3
  8. modal/_utils/auth_token_manager.py +1 -4
  9. modal/_utils/blob_utils.py +3 -4
  10. modal/_utils/grpc_utils.py +80 -51
  11. modal/_utils/mount_utils.py +26 -1
  12. modal/_utils/task_command_router_client.py +3 -4
  13. modal/app.py +3 -4
  14. modal/cli/config.py +3 -1
  15. modal/cli/container.py +1 -2
  16. modal/cli/entry_point.py +1 -0
  17. modal/cli/launch.py +1 -2
  18. modal/cli/network_file_system.py +1 -4
  19. modal/cli/queues.py +1 -2
  20. modal/cli/secret.py +1 -2
  21. modal/client.py +5 -115
  22. modal/client.pyi +2 -91
  23. modal/cls.py +1 -2
  24. modal/config.py +1 -1
  25. modal/container_process.py +4 -8
  26. modal/dict.py +12 -12
  27. modal/environments.py +1 -2
  28. modal/experimental/__init__.py +2 -3
  29. modal/experimental/flash.py +6 -10
  30. modal/file_io.py +13 -27
  31. modal/functions.pyi +6 -6
  32. modal/image.py +24 -3
  33. modal/image.pyi +4 -0
  34. modal/io_streams.py +61 -91
  35. modal/io_streams.pyi +33 -95
  36. modal/mount.py +4 -4
  37. modal/network_file_system.py +5 -6
  38. modal/parallel_map.py +29 -31
  39. modal/parallel_map.pyi +3 -9
  40. modal/queue.py +17 -18
  41. modal/runner.py +8 -8
  42. modal/sandbox.py +23 -36
  43. modal/secret.py +4 -5
  44. modal/snapshot.py +1 -4
  45. modal/token_flow.py +1 -1
  46. modal/volume.py +20 -22
  47. {modal-1.2.1.dev19.dist-info → modal-1.2.2.dev19.dist-info}/METADATA +1 -1
  48. {modal-1.2.1.dev19.dist-info → modal-1.2.2.dev19.dist-info}/RECORD +57 -56
  49. modal_proto/api.proto +2 -0
  50. modal_proto/api_pb2.py +838 -838
  51. modal_proto/api_pb2.pyi +8 -2
  52. modal_proto/modal_api_grpc.py +175 -175
  53. modal_version/__init__.py +1 -1
  54. {modal-1.2.1.dev19.dist-info → modal-1.2.2.dev19.dist-info}/WHEEL +0 -0
  55. {modal-1.2.1.dev19.dist-info → modal-1.2.2.dev19.dist-info}/entry_points.txt +0 -0
  56. {modal-1.2.1.dev19.dist-info → modal-1.2.2.dev19.dist-info}/licenses/LICENSE +0 -0
  57. {modal-1.2.1.dev19.dist-info → modal-1.2.2.dev19.dist-info}/top_level.txt +0 -0
modal/image.py CHANGED
@@ -38,7 +38,8 @@ from ._utils.docker_utils import (
38
38
  find_dockerignore_file,
39
39
  )
40
40
  from ._utils.function_utils import FunctionInfo
41
- from ._utils.grpc_utils import RETRYABLE_GRPC_STATUS_CODES, retry_transient_errors
41
+ from ._utils.grpc_utils import RETRYABLE_GRPC_STATUS_CODES
42
+ from ._utils.mount_utils import validate_only_modal_volumes
42
43
  from .client import _Client
43
44
  from .cloud_bucket_mount import _CloudBucketMount
44
45
  from .config import config, logger, user_config_path
@@ -487,6 +488,7 @@ class _Image(_Object, type_prefix="im"):
487
488
  context_mount_function: Optional[Callable[[], Optional[_Mount]]] = None,
488
489
  force_build: bool = False,
489
490
  build_args: dict[str, str] = {},
491
+ validated_volumes: Optional[Sequence[tuple[str, _Volume]]] = None,
490
492
  # For internal use only.
491
493
  _namespace: "api_pb2.DeploymentNamespace.ValueType" = api_pb2.DEPLOYMENT_NAMESPACE_WORKSPACE,
492
494
  _do_assert_no_mount_layers: bool = True,
@@ -494,6 +496,9 @@ class _Image(_Object, type_prefix="im"):
494
496
  if base_images is None:
495
497
  base_images = {}
496
498
 
499
+ if validated_volumes is None:
500
+ validated_volumes = []
501
+
497
502
  if secrets is None:
498
503
  secrets = []
499
504
  if gpu_config is None:
@@ -514,6 +519,8 @@ class _Image(_Object, type_prefix="im"):
514
519
  deps += (build_function,)
515
520
  if image_registry_config and image_registry_config.secret:
516
521
  deps += (image_registry_config.secret,)
522
+ for _, vol in validated_volumes:
523
+ deps += (vol,)
517
524
  return deps
518
525
 
519
526
  async def _load(self: _Image, resolver: Resolver, existing_object_id: Optional[str]):
@@ -592,6 +599,17 @@ class _Image(_Object, type_prefix="im"):
592
599
  build_function_id = ""
593
600
  _build_function = None
594
601
 
602
+ # Relies on dicts being ordered (true as of Python 3.6).
603
+ volume_mounts = [
604
+ api_pb2.VolumeMount(
605
+ mount_path=path,
606
+ volume_id=volume.object_id,
607
+ allow_background_commits=True,
608
+ read_only=volume._read_only,
609
+ )
610
+ for path, volume in validated_volumes
611
+ ]
612
+
595
613
  image_definition = api_pb2.Image(
596
614
  base_images=base_images_pb2s,
597
615
  dockerfile_commands=dockerfile.commands,
@@ -604,6 +622,7 @@ class _Image(_Object, type_prefix="im"):
604
622
  runtime_debug=config.get("function_runtime_debug"),
605
623
  build_function=_build_function,
606
624
  build_args=build_args,
625
+ volume_mounts=volume_mounts,
607
626
  )
608
627
 
609
628
  req = api_pb2.ImageGetOrCreateRequest(
@@ -619,7 +638,7 @@ class _Image(_Object, type_prefix="im"):
619
638
  allow_global_deployment=os.environ.get("MODAL_IMAGE_ALLOW_GLOBAL_DEPLOYMENT") == "1",
620
639
  ignore_cache=config.get("ignore_cache"),
621
640
  )
622
- resp = await retry_transient_errors(resolver.client.stub.ImageGetOrCreate, req)
641
+ resp = await resolver.client.stub.ImageGetOrCreate(req)
623
642
  image_id = resp.image_id
624
643
  result: api_pb2.GenericResult
625
644
  metadata: Optional[api_pb2.ImageMetadata] = None
@@ -848,7 +867,7 @@ class _Image(_Object, type_prefix="im"):
848
867
  client = await _Client.from_env()
849
868
 
850
869
  async def _load(self: _Image, resolver: Resolver, existing_object_id: Optional[str]):
851
- resp = await retry_transient_errors(client.stub.ImageFromId, api_pb2.ImageFromIdRequest(image_id=image_id))
870
+ resp = await client.stub.ImageFromId(api_pb2.ImageFromIdRequest(image_id=image_id))
852
871
  self._hydrate(resp.image_id, resolver.client, resp.metadata)
853
872
 
854
873
  rep = f"Image.from_id({image_id!r})"
@@ -1690,6 +1709,7 @@ class _Image(_Object, type_prefix="im"):
1690
1709
  *commands: Union[str, list[str]],
1691
1710
  env: Optional[dict[str, Optional[str]]] = None,
1692
1711
  secrets: Optional[Collection[_Secret]] = None,
1712
+ volumes: Optional[dict[Union[str, PurePosixPath], _Volume]] = None,
1693
1713
  gpu: GPU_T = None,
1694
1714
  force_build: bool = False, # Ignore cached builds, similar to 'docker build --no-cache'
1695
1715
  ) -> "_Image":
@@ -1712,6 +1732,7 @@ class _Image(_Object, type_prefix="im"):
1712
1732
  secrets=secrets,
1713
1733
  gpu_config=parse_gpu_config(gpu),
1714
1734
  force_build=self.force_build or force_build,
1735
+ validated_volumes=validate_only_modal_volumes(volumes, "Image.run_commands"),
1715
1736
  )
1716
1737
 
1717
1738
  @staticmethod
modal/image.pyi CHANGED
@@ -176,6 +176,7 @@ class _Image(modal._object._Object):
176
176
  ] = None,
177
177
  force_build: bool = False,
178
178
  build_args: dict[str, str] = {},
179
+ validated_volumes: typing.Optional[collections.abc.Sequence[tuple[str, modal.volume._Volume]]] = None,
179
180
  _namespace: int = 1,
180
181
  _do_assert_no_mount_layers: bool = True,
181
182
  ): ...
@@ -668,6 +669,7 @@ class _Image(modal._object._Object):
668
669
  *commands: typing.Union[str, list[str]],
669
670
  env: typing.Optional[dict[str, typing.Optional[str]]] = None,
670
671
  secrets: typing.Optional[collections.abc.Collection[modal.secret._Secret]] = None,
672
+ volumes: typing.Optional[dict[typing.Union[str, pathlib.PurePosixPath], modal.volume._Volume]] = None,
671
673
  gpu: typing.Union[None, str, modal.gpu._GPUConfig] = None,
672
674
  force_build: bool = False,
673
675
  ) -> _Image:
@@ -1091,6 +1093,7 @@ class Image(modal.object.Object):
1091
1093
  ] = None,
1092
1094
  force_build: bool = False,
1093
1095
  build_args: dict[str, str] = {},
1096
+ validated_volumes: typing.Optional[collections.abc.Sequence[tuple[str, modal.volume.Volume]]] = None,
1094
1097
  _namespace: int = 1,
1095
1098
  _do_assert_no_mount_layers: bool = True,
1096
1099
  ): ...
@@ -1648,6 +1651,7 @@ class Image(modal.object.Object):
1648
1651
  *commands: typing.Union[str, list[str]],
1649
1652
  env: typing.Optional[dict[str, typing.Optional[str]]] = None,
1650
1653
  secrets: typing.Optional[collections.abc.Collection[modal.secret.Secret]] = None,
1654
+ volumes: typing.Optional[dict[typing.Union[str, pathlib.PurePosixPath], modal.volume.Volume]] = None,
1651
1655
  gpu: typing.Union[None, str, modal.gpu._GPUConfig] = None,
1652
1656
  force_build: bool = False,
1653
1657
  ) -> Image:
modal/io_streams.py CHANGED
@@ -21,7 +21,7 @@ from modal.exception import ClientClosed, ExecTimeoutError, InvalidError
21
21
  from modal_proto import api_pb2
22
22
 
23
23
  from ._utils.async_utils import synchronize_api
24
- from ._utils.grpc_utils import RETRYABLE_GRPC_STATUS_CODES, retry_transient_errors
24
+ from ._utils.grpc_utils import RETRYABLE_GRPC_STATUS_CODES
25
25
  from ._utils.task_command_router_client import TaskCommandRouterClient
26
26
  from .client import _Client
27
27
  from .config import logger
@@ -64,7 +64,6 @@ async def _container_process_logs_iterator(
64
64
  get_raw_bytes=True,
65
65
  last_batch_index=last_index,
66
66
  )
67
-
68
67
  stream = client.stub.ContainerExecGetOutput.unary_stream(req)
69
68
  while True:
70
69
  # Check deadline before attempting to receive the next batch
@@ -76,11 +75,13 @@ async def _container_process_logs_iterator(
76
75
  break
77
76
  except StopAsyncIteration:
78
77
  break
78
+
79
+ for item in batch.items:
80
+ yield item.message_bytes, batch.batch_index
81
+
79
82
  if batch.HasField("exit_code"):
80
83
  yield None, batch.batch_index
81
84
  break
82
- for item in batch.items:
83
- yield item.message_bytes, batch.batch_index
84
85
 
85
86
 
86
87
  T = TypeVar("T", str, bytes)
@@ -89,7 +90,7 @@ T = TypeVar("T", str, bytes)
89
90
  class _StreamReaderThroughServer(Generic[T]):
90
91
  """A StreamReader implementation that reads from the server."""
91
92
 
92
- _stream: Optional[AsyncGenerator[Optional[bytes], None]]
93
+ _stream: Optional[AsyncGenerator[T, None]]
93
94
 
94
95
  def __init__(
95
96
  self,
@@ -134,10 +135,9 @@ class _StreamReaderThroughServer(Generic[T]):
134
135
  self._stream_type = stream_type
135
136
 
136
137
  if self._object_type == "container_process":
137
- # Container process streams need to be consumed as they are produced,
138
- # otherwise the process will block. Use a buffer to store the stream
139
- # until the client consumes it.
140
- self._container_process_buffer: list[Optional[bytes]] = []
138
+ # TODO: we should not have this async code in constructors!
139
+ # it only works as long as all the construction happens inside of synchronicity code
140
+ self._container_process_buffer: list[Optional[bytes]] = [] # TODO: change this to an asyncio.Queue
141
141
  self._consume_container_process_task = asyncio.create_task(self._consume_container_process_stream())
142
142
 
143
143
  @property
@@ -147,21 +147,18 @@ class _StreamReaderThroughServer(Generic[T]):
147
147
 
148
148
  async def read(self) -> T:
149
149
  """Fetch the entire contents of the stream until EOF."""
150
- data_str = ""
151
- data_bytes = b""
152
150
  logger.debug(f"{self._object_id} StreamReader fd={self._file_descriptor} read starting")
153
- async for message in self._get_logs():
154
- if message is None:
155
- break
156
- if self._text:
157
- data_str += message.decode("utf-8")
158
- else:
159
- data_bytes += message
160
-
161
- logger.debug(f"{self._object_id} StreamReader fd={self._file_descriptor} read completed after EOF")
162
151
  if self._text:
152
+ data_str = ""
153
+ async for message in _decode_bytes_stream_to_str(self._get_logs()):
154
+ data_str += message
155
+ logger.debug(f"{self._object_id} StreamReader fd={self._file_descriptor} read completed after EOF")
163
156
  return cast(T, data_str)
164
157
  else:
158
+ data_bytes = b""
159
+ async for message in self._get_logs():
160
+ data_bytes += message
161
+ logger.debug(f"{self._object_id} StreamReader fd={self._file_descriptor} read completed after EOF")
165
162
  return cast(T, data_bytes)
166
163
 
167
164
  async def _consume_container_process_stream(self):
@@ -181,6 +178,7 @@ class _StreamReaderThroughServer(Generic[T]):
181
178
  )
182
179
  async for message, batch_index in iterator:
183
180
  if self._stream_type == StreamType.STDOUT and message:
181
+ # TODO: rearchitect this, since these bytes aren't necessarily decodable
184
182
  print(message.decode("utf-8"), end="")
185
183
  elif self._stream_type == StreamType.PIPE:
186
184
  self._container_process_buffer.append(message)
@@ -208,6 +206,9 @@ class _StreamReaderThroughServer(Generic[T]):
208
206
 
209
207
  async def _stream_container_process(self) -> AsyncGenerator[tuple[Optional[bytes], str], None]:
210
208
  """Streams the container process buffer to the reader."""
209
+ # Container process streams need to be consumed as they are produced,
210
+ # otherwise the process will block. Use a buffer to store the stream
211
+ # until the client consumes it.
211
212
  entry_id = 0
212
213
  if self._last_entry_id:
213
214
  entry_id = int(self._last_entry_id) + 1
@@ -225,7 +226,7 @@ class _StreamReaderThroughServer(Generic[T]):
225
226
 
226
227
  entry_id += 1
227
228
 
228
- async def _get_logs(self, skip_empty_messages: bool = True) -> AsyncGenerator[Optional[bytes], None]:
229
+ async def _get_logs(self, skip_empty_messages: bool = True) -> AsyncGenerator[bytes, None]:
229
230
  """Streams sandbox or process logs from the server to the reader.
230
231
 
231
232
  Logs returned by this method may contain partial or multiple lines at a time.
@@ -237,7 +238,6 @@ class _StreamReaderThroughServer(Generic[T]):
237
238
  raise InvalidError("Logs can only be retrieved using the PIPE stream type.")
238
239
 
239
240
  if self.eof:
240
- yield None
241
241
  return
242
242
 
243
243
  completed = False
@@ -262,6 +262,8 @@ class _StreamReaderThroughServer(Generic[T]):
262
262
  if message is None:
263
263
  completed = True
264
264
  self.eof = True
265
+ return
266
+
265
267
  yield message
266
268
 
267
269
  except (GRPCError, StreamTerminatedError) as exc:
@@ -275,43 +277,37 @@ class _StreamReaderThroughServer(Generic[T]):
275
277
  continue
276
278
  raise
277
279
 
278
- async def _get_logs_by_line(self) -> AsyncGenerator[Optional[bytes], None]:
280
+ async def _get_logs_by_line(self) -> AsyncGenerator[bytes, None]:
279
281
  """Process logs from the server and yield complete lines only."""
280
282
  async for message in self._get_logs():
281
- if message is None:
282
- if self._line_buffer:
283
- yield self._line_buffer
284
- self._line_buffer = b""
285
- yield None
286
- else:
287
- assert isinstance(message, bytes)
288
- self._line_buffer += message
289
- while b"\n" in self._line_buffer:
290
- line, self._line_buffer = self._line_buffer.split(b"\n", 1)
291
- yield line + b"\n"
283
+ assert isinstance(message, bytes)
284
+ self._line_buffer += message
285
+ while b"\n" in self._line_buffer:
286
+ line, self._line_buffer = self._line_buffer.split(b"\n", 1)
287
+ yield line + b"\n"
288
+
289
+ if self._line_buffer:
290
+ yield self._line_buffer
291
+ self._line_buffer = b""
292
292
 
293
- def _ensure_stream(self) -> AsyncGenerator[Optional[bytes], None]:
293
+ def _ensure_stream(self) -> AsyncGenerator[T, None]:
294
294
  if not self._stream:
295
295
  if self._by_line:
296
- self._stream = self._get_logs_by_line()
296
+ # TODO: This is quite odd - it does line buffering in binary mode
297
+ # but we then always add the buffered text decoding on top of that.
298
+ # feels a bit upside down...
299
+ stream = self._get_logs_by_line()
297
300
  else:
298
- self._stream = self._get_logs()
301
+ stream = self._get_logs()
302
+ if self._text:
303
+ stream = _decode_bytes_stream_to_str(stream)
304
+ self._stream = cast(AsyncGenerator[T, None], stream)
299
305
  return self._stream
300
306
 
301
307
  async def __anext__(self) -> T:
302
308
  """mdmd:hidden"""
303
309
  stream = self._ensure_stream()
304
-
305
- value = await stream.__anext__()
306
-
307
- # The stream yields None if it receives an EOF batch.
308
- if value is None:
309
- raise StopAsyncIteration
310
-
311
- if self._text:
312
- return cast(T, value.decode("utf-8"))
313
- else:
314
- return cast(T, value)
310
+ return cast(T, await stream.__anext__())
315
311
 
316
312
  async def aclose(self):
317
313
  """mdmd:hidden"""
@@ -330,6 +326,7 @@ async def _decode_bytes_stream_to_str(stream: AsyncGenerator[bytes, None]) -> As
330
326
  text = decoder.decode(item, final=False)
331
327
  if text:
332
328
  yield text
329
+
333
330
  # Flush any buffered partial character at end-of-stream
334
331
  tail = decoder.decode(b"", final=True)
335
332
  if tail:
@@ -495,23 +492,15 @@ class _StreamReader(Generic[T]):
495
492
 
496
493
  As an asynchronous iterable, the object supports the `for` and `async for`
497
494
  statements. Just loop over the object to read in chunks.
498
-
499
- **Usage**
500
-
501
- ```python fixture:running_app
502
- from modal import Sandbox
503
-
504
- sandbox = Sandbox.create(
505
- "bash",
506
- "-c",
507
- "for i in $(seq 1 10); do echo foo; sleep 0.1; done",
508
- app=running_app,
509
- )
510
- for message in sandbox.stdout:
511
- print(f"Message: {message}")
512
- ```
513
495
  """
514
496
 
497
+ _impl: Union[
498
+ _StreamReaderThroughServer,
499
+ _DevnullStreamReader,
500
+ _TextStreamReaderThroughCommandRouter,
501
+ _BytesStreamReaderThroughCommandRouter,
502
+ ]
503
+
515
504
  def __init__(
516
505
  self,
517
506
  file_descriptor: "api_pb2.FileDescriptor.ValueType",
@@ -570,19 +559,7 @@ class _StreamReader(Generic[T]):
570
559
  return self._impl.file_descriptor
571
560
 
572
561
  async def read(self) -> T:
573
- """Fetch the entire contents of the stream until EOF.
574
-
575
- **Usage**
576
-
577
- ```python fixture:running_app
578
- from modal import Sandbox
579
-
580
- sandbox = Sandbox.create("echo", "hello", app=running_app)
581
- sandbox.wait()
582
-
583
- print(sandbox.stdout.read())
584
- ```
585
- """
562
+ """Fetch the entire contents of the stream until EOF."""
586
563
  return await self._impl.read()
587
564
 
588
565
  # TODO(saltzm): I'd prefer to have the implementation classes only implement __aiter__
@@ -660,15 +637,13 @@ class _StreamWriterThroughServer:
660
637
 
661
638
  try:
662
639
  if self._object_type == "sandbox":
663
- await retry_transient_errors(
664
- self._client.stub.SandboxStdinWrite,
640
+ await self._client.stub.SandboxStdinWrite(
665
641
  api_pb2.SandboxStdinWriteRequest(
666
642
  sandbox_id=self._object_id, index=index, eof=self._is_closed, input=data
667
643
  ),
668
644
  )
669
645
  else:
670
- await retry_transient_errors(
671
- self._client.stub.ContainerExecPutInput,
646
+ await self._client.stub.ContainerExecPutInput(
672
647
  api_pb2.ContainerExecPutInputRequest(
673
648
  exec_id=self._object_id,
674
649
  input=api_pb2.RuntimeInputMessage(message=data, message_index=index, eof=self._is_closed),
@@ -753,21 +728,16 @@ class _StreamWriter:
753
728
 
754
729
  **Usage**
755
730
 
756
- ```python fixture:running_app
757
- from modal import Sandbox
758
-
759
- sandbox = Sandbox.create(
731
+ ```python fixture:sandbox
732
+ proc = sandbox.exec(
760
733
  "bash",
761
734
  "-c",
762
735
  "while read line; do echo $line; done",
763
- app=running_app,
764
736
  )
765
- sandbox.stdin.write(b"foo\\n")
766
- sandbox.stdin.write(b"bar\\n")
767
- sandbox.stdin.write_eof()
768
-
769
- sandbox.stdin.drain()
770
- sandbox.wait()
737
+ proc.stdin.write(b"foo\\n")
738
+ proc.stdin.write(b"bar\\n")
739
+ proc.stdin.write_eof()
740
+ proc.stdin.drain()
771
741
  ```
772
742
  """
773
743
  self._impl.write(data)
modal/io_streams.pyi CHANGED
@@ -21,7 +21,7 @@ T = typing.TypeVar("T")
21
21
  class _StreamReaderThroughServer(typing.Generic[T]):
22
22
  """A StreamReader implementation that reads from the server."""
23
23
 
24
- _stream: typing.Optional[collections.abc.AsyncGenerator[typing.Optional[bytes], None]]
24
+ _stream: typing.Optional[collections.abc.AsyncGenerator[T, None]]
25
25
 
26
26
  def __init__(
27
27
  self,
@@ -54,9 +54,7 @@ class _StreamReaderThroughServer(typing.Generic[T]):
54
54
  """Streams the container process buffer to the reader."""
55
55
  ...
56
56
 
57
- def _get_logs(
58
- self, skip_empty_messages: bool = True
59
- ) -> collections.abc.AsyncGenerator[typing.Optional[bytes], None]:
57
+ def _get_logs(self, skip_empty_messages: bool = True) -> collections.abc.AsyncGenerator[bytes, None]:
60
58
  """Streams sandbox or process logs from the server to the reader.
61
59
 
62
60
  Logs returned by this method may contain partial or multiple lines at a time.
@@ -66,11 +64,11 @@ class _StreamReaderThroughServer(typing.Generic[T]):
66
64
  """
67
65
  ...
68
66
 
69
- def _get_logs_by_line(self) -> collections.abc.AsyncGenerator[typing.Optional[bytes], None]:
67
+ def _get_logs_by_line(self) -> collections.abc.AsyncGenerator[bytes, None]:
70
68
  """Process logs from the server and yield complete lines only."""
71
69
  ...
72
70
 
73
- def _ensure_stream(self) -> collections.abc.AsyncGenerator[typing.Optional[bytes], None]: ...
71
+ def _ensure_stream(self) -> collections.abc.AsyncGenerator[T, None]: ...
74
72
  async def __anext__(self) -> T:
75
73
  """mdmd:hidden"""
76
74
  ...
@@ -184,22 +182,15 @@ class _StreamReader(typing.Generic[T]):
184
182
 
185
183
  As an asynchronous iterable, the object supports the `for` and `async for`
186
184
  statements. Just loop over the object to read in chunks.
185
+ """
187
186
 
188
- **Usage**
189
-
190
- ```python fixture:running_app
191
- from modal import Sandbox
187
+ _impl: typing.Union[
188
+ _StreamReaderThroughServer,
189
+ _DevnullStreamReader,
190
+ _TextStreamReaderThroughCommandRouter,
191
+ _BytesStreamReaderThroughCommandRouter,
192
+ ]
192
193
 
193
- sandbox = Sandbox.create(
194
- "bash",
195
- "-c",
196
- "for i in $(seq 1 10); do echo foo; sleep 0.1; done",
197
- app=running_app,
198
- )
199
- for message in sandbox.stdout:
200
- print(f"Message: {message}")
201
- ```
202
- """
203
194
  def __init__(
204
195
  self,
205
196
  file_descriptor: int,
@@ -222,19 +213,7 @@ class _StreamReader(typing.Generic[T]):
222
213
  ...
223
214
 
224
215
  async def read(self) -> T:
225
- """Fetch the entire contents of the stream until EOF.
226
-
227
- **Usage**
228
-
229
- ```python fixture:running_app
230
- from modal import Sandbox
231
-
232
- sandbox = Sandbox.create("echo", "hello", app=running_app)
233
- sandbox.wait()
234
-
235
- print(sandbox.stdout.read())
236
- ```
237
- """
216
+ """Fetch the entire contents of the stream until EOF."""
238
217
  ...
239
218
 
240
219
  def __aiter__(self) -> collections.abc.AsyncIterator[T]:
@@ -318,21 +297,16 @@ class _StreamWriter:
318
297
 
319
298
  **Usage**
320
299
 
321
- ```python fixture:running_app
322
- from modal import Sandbox
323
-
324
- sandbox = Sandbox.create(
300
+ ```python fixture:sandbox
301
+ proc = sandbox.exec(
325
302
  "bash",
326
303
  "-c",
327
304
  "while read line; do echo $line; done",
328
- app=running_app,
329
305
  )
330
- sandbox.stdin.write(b"foo\n")
331
- sandbox.stdin.write(b"bar\n")
332
- sandbox.stdin.write_eof()
333
-
334
- sandbox.stdin.drain()
335
- sandbox.wait()
306
+ proc.stdin.write(b"foo\n")
307
+ proc.stdin.write(b"bar\n")
308
+ proc.stdin.write_eof()
309
+ proc.stdin.drain()
336
310
  ```
337
311
  """
338
312
  ...
@@ -376,22 +350,15 @@ class StreamReader(typing.Generic[T]):
376
350
 
377
351
  As an asynchronous iterable, the object supports the `for` and `async for`
378
352
  statements. Just loop over the object to read in chunks.
353
+ """
379
354
 
380
- **Usage**
381
-
382
- ```python fixture:running_app
383
- from modal import Sandbox
355
+ _impl: typing.Union[
356
+ _StreamReaderThroughServer,
357
+ _DevnullStreamReader,
358
+ _TextStreamReaderThroughCommandRouter,
359
+ _BytesStreamReaderThroughCommandRouter,
360
+ ]
384
361
 
385
- sandbox = Sandbox.create(
386
- "bash",
387
- "-c",
388
- "for i in $(seq 1 10); do echo foo; sleep 0.1; done",
389
- app=running_app,
390
- )
391
- for message in sandbox.stdout:
392
- print(f"Message: {message}")
393
- ```
394
- """
395
362
  def __init__(
396
363
  self,
397
364
  file_descriptor: int,
@@ -415,35 +382,11 @@ class StreamReader(typing.Generic[T]):
415
382
 
416
383
  class __read_spec(typing_extensions.Protocol[T_INNER, SUPERSELF]):
417
384
  def __call__(self, /) -> T_INNER:
418
- """Fetch the entire contents of the stream until EOF.
419
-
420
- **Usage**
421
-
422
- ```python fixture:running_app
423
- from modal import Sandbox
424
-
425
- sandbox = Sandbox.create("echo", "hello", app=running_app)
426
- sandbox.wait()
427
-
428
- print(sandbox.stdout.read())
429
- ```
430
- """
385
+ """Fetch the entire contents of the stream until EOF."""
431
386
  ...
432
387
 
433
388
  async def aio(self, /) -> T_INNER:
434
- """Fetch the entire contents of the stream until EOF.
435
-
436
- **Usage**
437
-
438
- ```python fixture:running_app
439
- from modal import Sandbox
440
-
441
- sandbox = Sandbox.create("echo", "hello", app=running_app)
442
- sandbox.wait()
443
-
444
- print(sandbox.stdout.read())
445
- ```
446
- """
389
+ """Fetch the entire contents of the stream until EOF."""
447
390
  ...
448
391
 
449
392
  read: __read_spec[T, typing_extensions.Self]
@@ -493,21 +436,16 @@ class StreamWriter:
493
436
 
494
437
  **Usage**
495
438
 
496
- ```python fixture:running_app
497
- from modal import Sandbox
498
-
499
- sandbox = Sandbox.create(
439
+ ```python fixture:sandbox
440
+ proc = sandbox.exec(
500
441
  "bash",
501
442
  "-c",
502
443
  "while read line; do echo $line; done",
503
- app=running_app,
504
444
  )
505
- sandbox.stdin.write(b"foo\n")
506
- sandbox.stdin.write(b"bar\n")
507
- sandbox.stdin.write_eof()
508
-
509
- sandbox.stdin.drain()
510
- sandbox.wait()
445
+ proc.stdin.write(b"foo\n")
446
+ proc.stdin.write(b"bar\n")
447
+ proc.stdin.write_eof()
448
+ proc.stdin.drain()
511
449
  ```
512
450
  """
513
451
  ...
modal/mount.py CHANGED
@@ -24,7 +24,7 @@ from ._object import _get_environment_name, _Object
24
24
  from ._resolver import Resolver
25
25
  from ._utils.async_utils import TaskContext, aclosing, async_map, synchronize_api
26
26
  from ._utils.blob_utils import FileUploadSpec, blob_upload_file, get_file_upload_spec_from_path
27
- from ._utils.grpc_utils import retry_transient_errors
27
+ from ._utils.grpc_utils import Retry
28
28
  from ._utils.name_utils import check_object_name
29
29
  from ._utils.package_utils import get_module_mount_info
30
30
  from .client import _Client
@@ -518,7 +518,7 @@ class _Mount(_Object, type_prefix="mo"):
518
518
 
519
519
  request = api_pb2.MountPutFileRequest(sha256_hex=file_spec.sha256_hex)
520
520
  accounted_hashes.add(file_spec.sha256_hex)
521
- response = await retry_transient_errors(resolver.client.stub.MountPutFile, request, base_delay=1)
521
+ response = await resolver.client.stub.MountPutFile(request, retry=Retry(base_delay=1))
522
522
 
523
523
  if response.exists:
524
524
  n_finished += 1
@@ -544,7 +544,7 @@ class _Mount(_Object, type_prefix="mo"):
544
544
 
545
545
  start_time = time.monotonic()
546
546
  while time.monotonic() - start_time < MOUNT_PUT_FILE_CLIENT_TIMEOUT:
547
- response = await retry_transient_errors(resolver.client.stub.MountPutFile, request2, base_delay=1)
547
+ response = await resolver.client.stub.MountPutFile(request2, retry=Retry(base_delay=1))
548
548
  if response.exists:
549
549
  n_finished += 1
550
550
  return mount_file
@@ -591,7 +591,7 @@ class _Mount(_Object, type_prefix="mo"):
591
591
  environment_name=resolver.environment_name,
592
592
  )
593
593
 
594
- resp = await retry_transient_errors(resolver.client.stub.MountGetOrCreate, req, base_delay=1)
594
+ resp = await resolver.client.stub.MountGetOrCreate(req, retry=Retry(base_delay=1))
595
595
  status_row.finish(f"Created mount {message_label}")
596
596
 
597
597
  logger.debug(f"Uploaded {total_uploads} new files and {total_bytes} bytes in {time.monotonic() - t0}s")