modal 0.68.50__py3-none-any.whl → 0.71.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. modal/_container_entrypoint.py +24 -16
  2. modal/_runtime/container_io_manager.py +11 -23
  3. modal/_utils/docker_utils.py +64 -0
  4. modal/_utils/function_utils.py +10 -1
  5. modal/app.py +25 -23
  6. modal/app.pyi +6 -2
  7. modal/cli/launch.py +2 -0
  8. modal/cli/programs/vscode.py +27 -2
  9. modal/cli/run.py +1 -1
  10. modal/client.pyi +2 -2
  11. modal/exception.py +6 -0
  12. modal/experimental.py +3 -0
  13. modal/file_io.py +102 -10
  14. modal/file_io.pyi +59 -0
  15. modal/file_pattern_matcher.py +11 -1
  16. modal/functions.py +20 -5
  17. modal/functions.pyi +2 -2
  18. modal/image.py +95 -39
  19. modal/image.pyi +11 -2
  20. modal/io_streams.py +15 -27
  21. modal/io_streams_helper.py +53 -0
  22. modal/mount.py +3 -5
  23. modal/mount.pyi +4 -4
  24. modal/partial_function.py +4 -4
  25. modal/runner.py +34 -37
  26. modal/runner.pyi +6 -3
  27. modal/running_app.py +23 -4
  28. modal/sandbox.py +19 -6
  29. modal/sandbox.pyi +25 -0
  30. {modal-0.68.50.dist-info → modal-0.71.5.dist-info}/METADATA +1 -1
  31. {modal-0.68.50.dist-info → modal-0.71.5.dist-info}/RECORD +44 -42
  32. modal_proto/api.proto +13 -0
  33. modal_proto/api_grpc.py +16 -0
  34. modal_proto/api_pb2.py +456 -436
  35. modal_proto/api_pb2.pyi +41 -1
  36. modal_proto/api_pb2_grpc.py +34 -1
  37. modal_proto/api_pb2_grpc.pyi +13 -3
  38. modal_proto/modal_api_grpc.py +1 -0
  39. modal_version/__init__.py +1 -1
  40. modal_version/_version_generated.py +2 -2
  41. {modal-0.68.50.dist-info → modal-0.71.5.dist-info}/LICENSE +0 -0
  42. {modal-0.68.50.dist-info → modal-0.71.5.dist-info}/WHEEL +0 -0
  43. {modal-0.68.50.dist-info → modal-0.71.5.dist-info}/entry_points.txt +0 -0
  44. {modal-0.68.50.dist-info → modal-0.71.5.dist-info}/top_level.txt +0 -0
modal/file_io.pyi CHANGED
@@ -1,4 +1,5 @@
1
1
  import _typeshed
2
+ import enum
2
3
  import modal.client
3
4
  import modal_proto.api_pb2
4
5
  import typing
@@ -13,14 +14,33 @@ async def _replace_bytes(
13
14
  file: _FileIO, data: bytes, start: typing.Optional[int] = None, end: typing.Optional[int] = None
14
15
  ) -> None: ...
15
16
 
17
+ class FileWatchEventType(enum.Enum):
18
+ Unknown = "Unknown"
19
+ Access = "Access"
20
+ Create = "Create"
21
+ Modify = "Modify"
22
+ Remove = "Remove"
23
+
24
+ class FileWatchEvent:
25
+ paths: list[str]
26
+ type: FileWatchEventType
27
+
28
+ def __init__(self, paths: list[str], type: FileWatchEventType) -> None: ...
29
+ def __repr__(self): ...
30
+ def __eq__(self, other): ...
31
+
16
32
  class _FileIO(typing.Generic[T]):
17
33
  _task_id: str
18
34
  _file_descriptor: str
19
35
  _client: typing.Optional[modal.client._Client]
36
+ _watch_output_buffer: list[typing.Optional[bytes]]
20
37
 
21
38
  def _validate_mode(self, mode: str) -> None: ...
22
39
  def _handle_error(self, error: modal_proto.api_pb2.SystemErrorMessage) -> None: ...
23
40
  def _consume_output(self, exec_id: str) -> typing.AsyncIterator[typing.Optional[bytes]]: ...
41
+ async def _consume_watch_output(self, exec_id: str) -> None: ...
42
+ async def _parse_watch_output(self, event: bytes) -> typing.Optional[FileWatchEvent]: ...
43
+ def _stream_watch_output(self) -> typing.AsyncIterator[FileWatchEvent]: ...
24
44
  async def _wait(self, exec_id: str) -> bytes: ...
25
45
  def _validate_type(self, data: typing.Union[bytes, str]) -> None: ...
26
46
  async def _open_file(self, path: str, mode: str) -> None: ...
@@ -49,6 +69,16 @@ class _FileIO(typing.Generic[T]):
49
69
  async def mkdir(cls, path: str, client: modal.client._Client, task_id: str, parents: bool = False) -> None: ...
50
70
  @classmethod
51
71
  async def rm(cls, path: str, client: modal.client._Client, task_id: str, recursive: bool = False) -> None: ...
72
+ @classmethod
73
+ def watch(
74
+ cls,
75
+ path: str,
76
+ client: modal.client._Client,
77
+ task_id: str,
78
+ filter: typing.Optional[list[FileWatchEventType]] = None,
79
+ recursive: bool = False,
80
+ timeout: typing.Optional[int] = None,
81
+ ) -> typing.AsyncIterator[FileWatchEvent]: ...
52
82
  async def _close(self) -> None: ...
53
83
  async def close(self) -> None: ...
54
84
  def _check_writable(self) -> None: ...
@@ -79,6 +109,7 @@ class FileIO(typing.Generic[T]):
79
109
  _task_id: str
80
110
  _file_descriptor: str
81
111
  _client: typing.Optional[modal.client.Client]
112
+ _watch_output_buffer: list[typing.Optional[bytes]]
82
113
 
83
114
  def __init__(self, /, *args, **kwargs): ...
84
115
  def _validate_mode(self, mode: str) -> None: ...
@@ -90,6 +121,24 @@ class FileIO(typing.Generic[T]):
90
121
 
91
122
  _consume_output: ___consume_output_spec
92
123
 
124
+ class ___consume_watch_output_spec(typing_extensions.Protocol):
125
+ def __call__(self, exec_id: str) -> None: ...
126
+ async def aio(self, exec_id: str) -> None: ...
127
+
128
+ _consume_watch_output: ___consume_watch_output_spec
129
+
130
+ class ___parse_watch_output_spec(typing_extensions.Protocol):
131
+ def __call__(self, event: bytes) -> typing.Optional[FileWatchEvent]: ...
132
+ async def aio(self, event: bytes) -> typing.Optional[FileWatchEvent]: ...
133
+
134
+ _parse_watch_output: ___parse_watch_output_spec
135
+
136
+ class ___stream_watch_output_spec(typing_extensions.Protocol):
137
+ def __call__(self) -> typing.Iterator[FileWatchEvent]: ...
138
+ def aio(self) -> typing.AsyncIterator[FileWatchEvent]: ...
139
+
140
+ _stream_watch_output: ___stream_watch_output_spec
141
+
93
142
  class ___wait_spec(typing_extensions.Protocol):
94
143
  def __call__(self, exec_id: str) -> bytes: ...
95
144
  async def aio(self, exec_id: str) -> bytes: ...
@@ -173,6 +222,16 @@ class FileIO(typing.Generic[T]):
173
222
  def mkdir(cls, path: str, client: modal.client.Client, task_id: str, parents: bool = False) -> None: ...
174
223
  @classmethod
175
224
  def rm(cls, path: str, client: modal.client.Client, task_id: str, recursive: bool = False) -> None: ...
225
+ @classmethod
226
+ def watch(
227
+ cls,
228
+ path: str,
229
+ client: modal.client.Client,
230
+ task_id: str,
231
+ filter: typing.Optional[list[FileWatchEventType]] = None,
232
+ recursive: bool = False,
233
+ timeout: typing.Optional[int] = None,
234
+ ) -> typing.Iterator[FileWatchEvent]: ...
176
235
 
177
236
  class ___close_spec(typing_extensions.Protocol):
178
237
  def __call__(self) -> None: ...
@@ -12,7 +12,7 @@ then asking it whether file paths match any of its patterns.
12
12
  import os
13
13
  from abc import abstractmethod
14
14
  from pathlib import Path
15
- from typing import Callable, Optional
15
+ from typing import Callable, Optional, Sequence, Union
16
16
 
17
17
  from ._utils.pattern_utils import Pattern
18
18
 
@@ -152,3 +152,13 @@ class FilePatternMatcher(_AbstractPatternMatcher):
152
152
  # with_repr allows us to use this matcher as a default value in a function signature
153
153
  # and get a nice repr in the docs and auto-generated type stubs:
154
154
  NON_PYTHON_FILES = (~FilePatternMatcher("**/*.py")).with_repr(f"{__name__}.NON_PYTHON_FILES")
155
+ _NOTHING = (~FilePatternMatcher()).with_repr(f"{__name__}._NOTHING") # match everything = ignore nothing
156
+
157
+
158
+ def _ignore_fn(ignore: Union[Sequence[str], Callable[[Path], bool]]) -> Callable[[Path], bool]:
159
+ # if a callable is passed, return it
160
+ # otherwise, treat input as a sequence of patterns and return a callable pattern matcher for those
161
+ if callable(ignore):
162
+ return ignore
163
+
164
+ return FilePatternMatcher(*ignore)
modal/functions.py CHANGED
@@ -59,7 +59,14 @@ from .call_graph import InputInfo, _reconstruct_call_graph
59
59
  from .client import _Client
60
60
  from .cloud_bucket_mount import _CloudBucketMount, cloud_bucket_mounts_to_proto
61
61
  from .config import config
62
- from .exception import ExecutionError, FunctionTimeoutError, InvalidError, NotFoundError, OutputExpiredError
62
+ from .exception import (
63
+ ExecutionError,
64
+ FunctionTimeoutError,
65
+ InternalFailure,
66
+ InvalidError,
67
+ NotFoundError,
68
+ OutputExpiredError,
69
+ )
63
70
  from .gpu import GPU_T, parse_gpu_config
64
71
  from .image import _Image
65
72
  from .mount import _get_client_mount, _Mount, get_auto_mounts
@@ -174,7 +181,7 @@ class _Invocation:
174
181
  return _Invocation(client.stub, function_call_id, client, retry_context)
175
182
 
176
183
  async def pop_function_call_outputs(
177
- self, timeout: Optional[float], clear_on_success: bool
184
+ self, timeout: Optional[float], clear_on_success: bool, input_jwts: Optional[list[str]] = None
178
185
  ) -> api_pb2.FunctionGetOutputsResponse:
179
186
  t0 = time.time()
180
187
  if timeout is None:
@@ -190,6 +197,7 @@ class _Invocation:
190
197
  last_entry_id="0-0",
191
198
  clear_on_success=clear_on_success,
192
199
  requested_at=time.time(),
200
+ input_jwts=input_jwts,
193
201
  )
194
202
  response: api_pb2.FunctionGetOutputsResponse = await retry_transient_errors(
195
203
  self.stub.FunctionGetOutputs,
@@ -219,10 +227,14 @@ class _Invocation:
219
227
  request,
220
228
  )
221
229
 
222
- async def _get_single_output(self) -> Any:
230
+ async def _get_single_output(self, expected_jwt: Optional[str] = None) -> Any:
223
231
  # waits indefinitely for a single result for the function, and clear the outputs buffer after
224
232
  item: api_pb2.FunctionGetOutputsItem = (
225
- await self.pop_function_call_outputs(timeout=None, clear_on_success=True)
233
+ await self.pop_function_call_outputs(
234
+ timeout=None,
235
+ clear_on_success=True,
236
+ input_jwts=[expected_jwt] if expected_jwt else None,
237
+ )
226
238
  ).outputs[0]
227
239
  return await _process_result(item.result, item.data_format, self.stub, self.client)
228
240
 
@@ -242,9 +254,12 @@ class _Invocation:
242
254
 
243
255
  while True:
244
256
  try:
245
- return await self._get_single_output()
257
+ return await self._get_single_output(ctx.input_jwt)
246
258
  except (UserCodeException, FunctionTimeoutError) as exc:
247
259
  await user_retry_manager.raise_or_sleep(exc)
260
+ except InternalFailure:
261
+ # For system failures on the server, we retry immediately.
262
+ pass
248
263
  await self._retry_input()
249
264
 
250
265
  async def poll_function(self, timeout: Optional[float] = None):
modal/functions.pyi CHANGED
@@ -61,10 +61,10 @@ class _Invocation:
61
61
  function: _Function, args, kwargs, *, client: modal.client._Client, function_call_invocation_type: int
62
62
  ) -> _Invocation: ...
63
63
  async def pop_function_call_outputs(
64
- self, timeout: typing.Optional[float], clear_on_success: bool
64
+ self, timeout: typing.Optional[float], clear_on_success: bool, input_jwts: typing.Optional[list[str]] = None
65
65
  ) -> modal_proto.api_pb2.FunctionGetOutputsResponse: ...
66
66
  async def _retry_input(self) -> None: ...
67
- async def _get_single_output(self) -> typing.Any: ...
67
+ async def _get_single_output(self, expected_jwt: typing.Optional[str] = None) -> typing.Any: ...
68
68
  async def run_function(self) -> typing.Any: ...
69
69
  async def poll_function(self, timeout: typing.Optional[float] = None): ...
70
70
  def run_generator(self): ...
modal/image.py CHANGED
@@ -31,6 +31,9 @@ from ._serialization import serialize
31
31
  from ._utils.async_utils import synchronize_api
32
32
  from ._utils.blob_utils import MAX_OBJECT_SIZE_BYTES
33
33
  from ._utils.deprecation import deprecation_error, deprecation_warning
34
+ from ._utils.docker_utils import (
35
+ extract_copy_command_patterns,
36
+ )
34
37
  from ._utils.function_utils import FunctionInfo
35
38
  from ._utils.grpc_utils import RETRYABLE_GRPC_STATUS_CODES, retry_transient_errors
36
39
  from .client import _Client
@@ -38,7 +41,7 @@ from .cloud_bucket_mount import _CloudBucketMount
38
41
  from .config import config, logger, user_config_path
39
42
  from .environments import _get_environment_cached
40
43
  from .exception import InvalidError, NotFoundError, RemoteError, VersionError
41
- from .file_pattern_matcher import NON_PYTHON_FILES
44
+ from .file_pattern_matcher import NON_PYTHON_FILES, FilePatternMatcher, _ignore_fn
42
45
  from .gpu import GPU_T, parse_gpu_config
43
46
  from .mount import _Mount, python_standalone_mount_name
44
47
  from .network_file_system import _NetworkFileSystem
@@ -236,6 +239,33 @@ def _get_image_builder_version(server_version: ImageBuilderVersion) -> ImageBuil
236
239
  return version
237
240
 
238
241
 
242
+ def _create_context_mount(
243
+ docker_commands: Sequence[str],
244
+ ignore_fn: Callable[[Path], bool],
245
+ context_dir: Path,
246
+ ) -> Optional[_Mount]:
247
+ """
248
+ Creates a context mount from a list of docker commands.
249
+
250
+ 1. Paths are evaluated relative to context_dir.
251
+ 2. First selects inclusions based on COPY commands in the list of commands.
252
+ 3. Then ignore any files as per the ignore predicate.
253
+ """
254
+ copy_patterns = extract_copy_command_patterns(docker_commands)
255
+ if not copy_patterns:
256
+ return None # no mount needed
257
+ include_fn = FilePatternMatcher(*copy_patterns)
258
+
259
+ def ignore_with_include(source: Path) -> bool:
260
+ relative_source = source.relative_to(context_dir)
261
+ if not include_fn(relative_source) or ignore_fn(relative_source):
262
+ return True
263
+
264
+ return False
265
+
266
+ return _Mount._add_local_dir(Path("./"), PurePosixPath("/"), ignore=ignore_with_include)
267
+
268
+
239
269
  class _ImageRegistryConfig:
240
270
  """mdmd:hidden"""
241
271
 
@@ -396,7 +426,7 @@ class _Image(_Object, type_prefix="im"):
396
426
  build_function: Optional["modal.functions._Function"] = None,
397
427
  build_function_input: Optional[api_pb2.FunctionInput] = None,
398
428
  image_registry_config: Optional[_ImageRegistryConfig] = None,
399
- context_mount: Optional[_Mount] = None,
429
+ context_mount_function: Optional[Callable[[], Optional[_Mount]]] = None,
400
430
  force_build: bool = False,
401
431
  # For internal use only.
402
432
  _namespace: "api_pb2.DeploymentNamespace.ValueType" = api_pb2.DEPLOYMENT_NAMESPACE_WORKSPACE,
@@ -423,13 +453,15 @@ class _Image(_Object, type_prefix="im"):
423
453
  deps = tuple(base_images.values()) + tuple(secrets)
424
454
  if build_function:
425
455
  deps += (build_function,)
426
- if context_mount:
427
- deps += (context_mount,)
428
456
  if image_registry_config and image_registry_config.secret:
429
457
  deps += (image_registry_config.secret,)
430
458
  return deps
431
459
 
432
460
  async def _load(self: _Image, resolver: Resolver, existing_object_id: Optional[str]):
461
+ context_mount = context_mount_function() if context_mount_function else None
462
+ if context_mount:
463
+ await resolver.load(context_mount)
464
+
433
465
  if _do_assert_no_mount_layers:
434
466
  for image in base_images.values():
435
467
  # base images can't have
@@ -596,7 +628,7 @@ class _Image(_Object, type_prefix="im"):
596
628
  return _Image._from_args(
597
629
  base_images={"base": self},
598
630
  dockerfile_function=build_dockerfile,
599
- context_mount=mount,
631
+ context_mount_function=lambda: mount,
600
632
  )
601
633
 
602
634
  def add_local_file(self, local_path: Union[str, Path], remote_path: str, *, copy: bool = False) -> "_Image":
@@ -684,7 +716,7 @@ class _Image(_Object, type_prefix="im"):
684
716
  # + make default remote_path="./"
685
717
  raise InvalidError("image.add_local_dir() currently only supports absolute remote_path values")
686
718
 
687
- mount = _Mount._add_local_dir(Path(local_path), Path(remote_path), ignore)
719
+ mount = _Mount._add_local_dir(Path(local_path), PurePosixPath(remote_path), ignore=_ignore_fn(ignore))
688
720
  return self._add_mount_layer_or_copy(mount, copy=copy)
689
721
 
690
722
  def copy_local_file(self, local_path: Union[str, Path], remote_path: Union[str, Path] = "./") -> "_Image":
@@ -695,7 +727,6 @@ class _Image(_Object, type_prefix="im"):
695
727
  """
696
728
  # TODO(elias): add pending deprecation with suggestion to use add_* instead
697
729
  basename = str(Path(local_path).name)
698
- mount = _Mount.from_local_file(local_path, remote_path=f"/{basename}")
699
730
 
700
731
  def build_dockerfile(version: ImageBuilderVersion) -> DockerfileSpec:
701
732
  return DockerfileSpec(commands=["FROM base", f"COPY {basename} {remote_path}"], context_files={})
@@ -703,7 +734,7 @@ class _Image(_Object, type_prefix="im"):
703
734
  return _Image._from_args(
704
735
  base_images={"base": self},
705
736
  dockerfile_function=build_dockerfile,
706
- context_mount=mount,
737
+ context_mount_function=lambda: _Mount.from_local_file(local_path, remote_path=f"/{basename}"),
707
738
  )
708
739
 
709
740
  def add_local_python_source(
@@ -790,15 +821,15 @@ class _Image(_Object, type_prefix="im"):
790
821
  ```
791
822
  """
792
823
 
793
- mount = _Mount._add_local_dir(Path(local_path), Path("/"), ignore)
794
-
795
824
  def build_dockerfile(version: ImageBuilderVersion) -> DockerfileSpec:
796
825
  return DockerfileSpec(commands=["FROM base", f"COPY . {remote_path}"], context_files={})
797
826
 
798
827
  return _Image._from_args(
799
828
  base_images={"base": self},
800
829
  dockerfile_function=build_dockerfile,
801
- context_mount=mount,
830
+ context_mount_function=lambda: _Mount._add_local_dir(
831
+ Path(local_path), PurePosixPath("/"), ignore=_ignore_fn(ignore)
832
+ ),
802
833
  )
803
834
 
804
835
  def pip_install(
@@ -1156,12 +1187,29 @@ class _Image(_Object, type_prefix="im"):
1156
1187
  # modal.Mount with local files to supply as build context for COPY commands
1157
1188
  context_mount: Optional[_Mount] = None,
1158
1189
  force_build: bool = False, # Ignore cached builds, similar to 'docker build --no-cache'
1190
+ ignore: Union[Sequence[str], Callable[[Path], bool]] = (),
1159
1191
  ) -> "_Image":
1160
1192
  """Extend an image with arbitrary Dockerfile-like commands."""
1161
1193
  cmds = _flatten_str_args("dockerfile_commands", "dockerfile_commands", dockerfile_commands)
1162
1194
  if not cmds:
1163
1195
  return self
1164
1196
 
1197
+ if context_mount:
1198
+ if ignore:
1199
+ raise InvalidError("Cannot set both `context_mount` and `ignore`")
1200
+
1201
+ def identity_context_mount_fn() -> Optional[_Mount]:
1202
+ return context_mount
1203
+
1204
+ context_mount_function = identity_context_mount_fn
1205
+ else:
1206
+
1207
+ def auto_created_context_mount_fn() -> Optional[_Mount]:
1208
+ # use COPY commands and ignore patterns to construct implicit context mount
1209
+ return _create_context_mount(cmds, ignore_fn=_ignore_fn(ignore), context_dir=Path.cwd())
1210
+
1211
+ context_mount_function = auto_created_context_mount_fn
1212
+
1165
1213
  def build_dockerfile(version: ImageBuilderVersion) -> DockerfileSpec:
1166
1214
  return DockerfileSpec(commands=["FROM base", *cmds], context_files=context_files)
1167
1215
 
@@ -1170,7 +1218,7 @@ class _Image(_Object, type_prefix="im"):
1170
1218
  dockerfile_function=build_dockerfile,
1171
1219
  secrets=secrets,
1172
1220
  gpu_config=parse_gpu_config(gpu),
1173
- context_mount=context_mount,
1221
+ context_mount_function=context_mount_function,
1174
1222
  force_build=self.force_build or force_build,
1175
1223
  )
1176
1224
 
@@ -1400,11 +1448,15 @@ class _Image(_Object, type_prefix="im"):
1400
1448
  modal.Image.from_registry("nvcr.io/nvidia/pytorch:22.12-py3")
1401
1449
  ```
1402
1450
  """
1403
- context_mount = None
1404
- if add_python:
1405
- context_mount = _Mount.from_name(
1406
- python_standalone_mount_name(add_python),
1407
- namespace=api_pb2.DEPLOYMENT_NAMESPACE_GLOBAL,
1451
+
1452
+ def context_mount_function() -> Optional[_Mount]:
1453
+ return (
1454
+ _Mount.from_name(
1455
+ python_standalone_mount_name(add_python),
1456
+ namespace=api_pb2.DEPLOYMENT_NAMESPACE_GLOBAL,
1457
+ )
1458
+ if add_python
1459
+ else None
1408
1460
  )
1409
1461
 
1410
1462
  if "image_registry_config" not in kwargs and secret is not None:
@@ -1417,7 +1469,7 @@ class _Image(_Object, type_prefix="im"):
1417
1469
 
1418
1470
  return _Image._from_args(
1419
1471
  dockerfile_function=build_dockerfile,
1420
- context_mount=context_mount,
1472
+ context_mount_function=context_mount_function,
1421
1473
  force_build=force_build,
1422
1474
  **kwargs,
1423
1475
  )
@@ -1531,6 +1583,7 @@ class _Image(_Object, type_prefix="im"):
1531
1583
  secrets: Sequence[_Secret] = [],
1532
1584
  gpu: GPU_T = None,
1533
1585
  add_python: Optional[str] = None,
1586
+ ignore: Union[Sequence[str], Callable[[Path], bool]] = (),
1534
1587
  ) -> "_Image":
1535
1588
  """Build a Modal image from a local Dockerfile.
1536
1589
 
@@ -1542,22 +1595,23 @@ class _Image(_Object, type_prefix="im"):
1542
1595
  ```python
1543
1596
  image = modal.Image.from_dockerfile("./Dockerfile", add_python="3.12")
1544
1597
  ```
1598
+ """
1545
1599
 
1546
- If your Dockerfile uses `COPY` instructions which copy data from the local context of the
1547
- build into the image, this local data must be uploaded to Modal via a context mount:
1600
+ if context_mount:
1601
+ if ignore:
1602
+ raise InvalidError("Cannot set both `context_mount` and `ignore`")
1548
1603
 
1549
- ```python
1550
- image = modal.Image.from_dockerfile(
1551
- "./Dockerfile",
1552
- context_mount=modal.Mount.from_local_dir(
1553
- local_path="src",
1554
- remote_path=".", # to current WORKDIR
1555
- ),
1556
- )
1557
- ```
1604
+ def identity_context_mount_fn() -> Optional[_Mount]:
1605
+ return context_mount
1558
1606
 
1559
- The context mount will allow a `COPY src/ src/` instruction to succeed in Modal's remote builder.
1560
- """
1607
+ context_mount_function = identity_context_mount_fn
1608
+ else:
1609
+
1610
+ def auto_created_context_mount_fn() -> Optional[_Mount]:
1611
+ lines = Path(path).read_text("utf8").splitlines()
1612
+ return _create_context_mount(lines, ignore_fn=_ignore_fn(ignore), context_dir=Path.cwd())
1613
+
1614
+ context_mount_function = auto_created_context_mount_fn
1561
1615
 
1562
1616
  # --- Build the base dockerfile
1563
1617
 
@@ -1569,7 +1623,7 @@ class _Image(_Object, type_prefix="im"):
1569
1623
  gpu_config = parse_gpu_config(gpu)
1570
1624
  base_image = _Image._from_args(
1571
1625
  dockerfile_function=build_dockerfile_base,
1572
- context_mount=context_mount,
1626
+ context_mount_function=context_mount_function,
1573
1627
  gpu_config=gpu_config,
1574
1628
  secrets=secrets,
1575
1629
  )
@@ -1578,13 +1632,15 @@ class _Image(_Object, type_prefix="im"):
1578
1632
  # This happening in two steps is probably a vestigial consequence of previous limitations,
1579
1633
  # but it will be difficult to merge them without forcing rebuilds of images.
1580
1634
 
1581
- if add_python:
1582
- context_mount = _Mount.from_name(
1583
- python_standalone_mount_name(add_python),
1584
- namespace=api_pb2.DEPLOYMENT_NAMESPACE_GLOBAL,
1635
+ def add_python_mount():
1636
+ return (
1637
+ _Mount.from_name(
1638
+ python_standalone_mount_name(add_python),
1639
+ namespace=api_pb2.DEPLOYMENT_NAMESPACE_GLOBAL,
1640
+ )
1641
+ if add_python
1642
+ else None
1585
1643
  )
1586
- else:
1587
- context_mount = None
1588
1644
 
1589
1645
  def build_dockerfile_python(version: ImageBuilderVersion) -> DockerfileSpec:
1590
1646
  commands = _Image._registry_setup_commands("base", version, [], add_python)
@@ -1595,7 +1651,7 @@ class _Image(_Object, type_prefix="im"):
1595
1651
  return _Image._from_args(
1596
1652
  base_images={"base": base_image},
1597
1653
  dockerfile_function=build_dockerfile_python,
1598
- context_mount=context_mount,
1654
+ context_mount_function=add_python_mount,
1599
1655
  force_build=force_build,
1600
1656
  )
1601
1657
 
modal/image.pyi CHANGED
@@ -44,6 +44,11 @@ def _make_pip_install_args(
44
44
  def _get_image_builder_version(
45
45
  server_version: typing.Literal["2023.12", "2024.04", "2024.10"],
46
46
  ) -> typing.Literal["2023.12", "2024.04", "2024.10"]: ...
47
+ def _create_context_mount(
48
+ docker_commands: collections.abc.Sequence[str],
49
+ ignore_fn: typing.Callable[[pathlib.Path], bool],
50
+ context_dir: pathlib.Path,
51
+ ) -> typing.Optional[modal.mount._Mount]: ...
47
52
 
48
53
  class _ImageRegistryConfig:
49
54
  def __init__(self, registry_auth_type: int = 0, secret: typing.Optional[modal.secret._Secret] = None): ...
@@ -87,7 +92,7 @@ class _Image(modal.object._Object):
87
92
  build_function: typing.Optional[modal.functions._Function] = None,
88
93
  build_function_input: typing.Optional[modal_proto.api_pb2.FunctionInput] = None,
89
94
  image_registry_config: typing.Optional[_ImageRegistryConfig] = None,
90
- context_mount: typing.Optional[modal.mount._Mount] = None,
95
+ context_mount_function: typing.Optional[typing.Callable[[], typing.Optional[modal.mount._Mount]]] = None,
91
96
  force_build: bool = False,
92
97
  _namespace: int = 1,
93
98
  _do_assert_no_mount_layers: bool = True,
@@ -195,6 +200,7 @@ class _Image(modal.object._Object):
195
200
  gpu: typing.Union[None, bool, str, modal.gpu._GPUConfig] = None,
196
201
  context_mount: typing.Optional[modal.mount._Mount] = None,
197
202
  force_build: bool = False,
203
+ ignore: typing.Union[collections.abc.Sequence[str], typing.Callable[[pathlib.Path], bool]] = (),
198
204
  ) -> _Image: ...
199
205
  def entrypoint(self, entrypoint_commands: list[str]) -> _Image: ...
200
206
  def shell(self, shell_commands: list[str]) -> _Image: ...
@@ -280,6 +286,7 @@ class _Image(modal.object._Object):
280
286
  secrets: collections.abc.Sequence[modal.secret._Secret] = [],
281
287
  gpu: typing.Union[None, bool, str, modal.gpu._GPUConfig] = None,
282
288
  add_python: typing.Optional[str] = None,
289
+ ignore: typing.Union[collections.abc.Sequence[str], typing.Callable[[pathlib.Path], bool]] = (),
283
290
  ) -> _Image: ...
284
291
  @staticmethod
285
292
  def debian_slim(python_version: typing.Optional[str] = None, force_build: bool = False) -> _Image: ...
@@ -346,7 +353,7 @@ class Image(modal.object.Object):
346
353
  build_function: typing.Optional[modal.functions.Function] = None,
347
354
  build_function_input: typing.Optional[modal_proto.api_pb2.FunctionInput] = None,
348
355
  image_registry_config: typing.Optional[_ImageRegistryConfig] = None,
349
- context_mount: typing.Optional[modal.mount.Mount] = None,
356
+ context_mount_function: typing.Optional[typing.Callable[[], typing.Optional[modal.mount.Mount]]] = None,
350
357
  force_build: bool = False,
351
358
  _namespace: int = 1,
352
359
  _do_assert_no_mount_layers: bool = True,
@@ -454,6 +461,7 @@ class Image(modal.object.Object):
454
461
  gpu: typing.Union[None, bool, str, modal.gpu._GPUConfig] = None,
455
462
  context_mount: typing.Optional[modal.mount.Mount] = None,
456
463
  force_build: bool = False,
464
+ ignore: typing.Union[collections.abc.Sequence[str], typing.Callable[[pathlib.Path], bool]] = (),
457
465
  ) -> Image: ...
458
466
  def entrypoint(self, entrypoint_commands: list[str]) -> Image: ...
459
467
  def shell(self, shell_commands: list[str]) -> Image: ...
@@ -539,6 +547,7 @@ class Image(modal.object.Object):
539
547
  secrets: collections.abc.Sequence[modal.secret.Secret] = [],
540
548
  gpu: typing.Union[None, bool, str, modal.gpu._GPUConfig] = None,
541
549
  add_python: typing.Optional[str] = None,
550
+ ignore: typing.Union[collections.abc.Sequence[str], typing.Callable[[pathlib.Path], bool]] = (),
542
551
  ) -> Image: ...
543
552
  @staticmethod
544
553
  def debian_slim(python_version: typing.Optional[str] = None, force_build: bool = False) -> Image: ...
modal/io_streams.py CHANGED
@@ -14,7 +14,8 @@ from typing import (
14
14
  from grpclib import Status
15
15
  from grpclib.exceptions import GRPCError, StreamTerminatedError
16
16
 
17
- from modal.exception import ClientClosed, InvalidError
17
+ from modal.exception import InvalidError
18
+ from modal.io_streams_helper import consume_stream_with_retries
18
19
  from modal_proto import api_pb2
19
20
 
20
21
  from ._utils.async_utils import synchronize_api
@@ -176,34 +177,21 @@ class _StreamReader(Generic[T]):
176
177
  if self._stream_type == StreamType.DEVNULL:
177
178
  return
178
179
 
179
- completed = False
180
- retries_remaining = 10
181
- while not completed:
182
- try:
183
- iterator = _container_process_logs_iterator(self._object_id, self._file_descriptor, self._client)
180
+ def item_handler(item: Optional[bytes]):
181
+ if self._stream_type == StreamType.STDOUT and item is not None:
182
+ print(item.decode("utf-8"), end="")
183
+ elif self._stream_type == StreamType.PIPE:
184
+ self._container_process_buffer.append(item)
184
185
 
185
- async for message in iterator:
186
- if self._stream_type == StreamType.STDOUT and message:
187
- print(message.decode("utf-8"), end="")
188
- elif self._stream_type == StreamType.PIPE:
189
- self._container_process_buffer.append(message)
190
- if message is None:
191
- completed = True
192
- break
186
+ def completion_check(item: Optional[bytes]):
187
+ return item is None
193
188
 
194
- except (GRPCError, StreamTerminatedError, ClientClosed) as exc:
195
- if retries_remaining > 0:
196
- retries_remaining -= 1
197
- if isinstance(exc, GRPCError):
198
- if exc.status in RETRYABLE_GRPC_STATUS_CODES:
199
- await asyncio.sleep(1.0)
200
- continue
201
- elif isinstance(exc, StreamTerminatedError):
202
- continue
203
- elif isinstance(exc, ClientClosed):
204
- # If the client was closed, the user has triggered a cleanup.
205
- break
206
- raise exc
189
+ iterator = _container_process_logs_iterator(self._object_id, self._file_descriptor, self._client)
190
+ await consume_stream_with_retries(
191
+ iterator,
192
+ item_handler,
193
+ completion_check,
194
+ )
207
195
 
208
196
  async def _stream_container_process(self) -> AsyncGenerator[tuple[Optional[bytes], str], None]:
209
197
  """Streams the container process buffer to the reader."""
@@ -0,0 +1,53 @@
1
+ # Copyright Modal Labs 2024
2
+ import asyncio
3
+ from typing import AsyncIterator, Callable, TypeVar
4
+
5
+ from grpclib.exceptions import GRPCError, StreamTerminatedError
6
+
7
+ from modal.exception import ClientClosed
8
+
9
+ from ._utils.grpc_utils import RETRYABLE_GRPC_STATUS_CODES
10
+
11
+ T = TypeVar("T")
12
+
13
+
14
+ async def consume_stream_with_retries(
15
+ stream: AsyncIterator[T],
16
+ item_handler: Callable[[T], None],
17
+ completion_check: Callable[[T], bool],
18
+ max_retries: int = 10,
19
+ retry_delay: float = 1.0,
20
+ ) -> None:
21
+ """mdmd:hidden
22
+ Helper function to consume a stream with retry logic for transient errors.
23
+
24
+ Args:
25
+ stream_generator: Function that returns an AsyncIterator to consume
26
+ item_handler: Callback function to handle each item from the stream
27
+ completion_check: Callback function to check if the stream is complete
28
+ max_retries: Maximum number of retry attempts
29
+ retry_delay: Delay in seconds between retries
30
+ """
31
+ completed = False
32
+ retries_remaining = max_retries
33
+
34
+ while not completed:
35
+ try:
36
+ async for item in stream:
37
+ item_handler(item)
38
+ if completion_check(item):
39
+ completed = True
40
+ break
41
+
42
+ except (GRPCError, StreamTerminatedError, ClientClosed) as exc:
43
+ if retries_remaining > 0:
44
+ retries_remaining -= 1
45
+ if isinstance(exc, GRPCError):
46
+ if exc.status in RETRYABLE_GRPC_STATUS_CODES:
47
+ await asyncio.sleep(retry_delay)
48
+ continue
49
+ elif isinstance(exc, StreamTerminatedError):
50
+ continue
51
+ elif isinstance(exc, ClientClosed):
52
+ break
53
+ raise