modal 1.0.1.dev4__py3-none-any.whl → 1.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
modal/_functions.py CHANGED
@@ -99,6 +99,7 @@ if TYPE_CHECKING:
99
99
  import modal.cls
100
100
  import modal.partial_function
101
101
 
102
+ MAX_INTERNAL_FAILURE_COUNT = 8
102
103
 
103
104
  @dataclasses.dataclass
104
105
  class _RetryContext:
@@ -348,10 +349,14 @@ class _InputPlaneInvocation:
348
349
  stub: ModalClientModal,
349
350
  attempt_token: str,
350
351
  client: _Client,
352
+ input_item: api_pb2.FunctionPutInputsItem,
353
+ function_id: str,
351
354
  ):
352
355
  self.stub = stub
353
356
  self.client = client # Used by the deserializer.
354
357
  self.attempt_token = attempt_token
358
+ self.input_item = input_item
359
+ self.function_id = function_id
355
360
 
356
361
  @staticmethod
357
362
  async def create(
@@ -365,36 +370,55 @@ class _InputPlaneInvocation:
365
370
  stub = await client.get_stub(input_plane_url)
366
371
 
367
372
  function_id = function.object_id
368
- item = await _create_input(args, kwargs, stub, method_name=function._use_method_name)
373
+ input_item = await _create_input(args, kwargs, stub, method_name=function._use_method_name)
369
374
 
370
375
  request = api_pb2.AttemptStartRequest(
371
376
  function_id=function_id,
372
377
  parent_input_id=current_input_id() or "",
373
- input=item,
378
+ input=input_item,
374
379
  )
375
380
  response = await retry_transient_errors(stub.AttemptStart, request)
376
381
  attempt_token = response.attempt_token
377
382
 
378
- return _InputPlaneInvocation(stub, attempt_token, client)
383
+ return _InputPlaneInvocation(stub, attempt_token, client, input_item, function_id)
379
384
 
380
385
  async def run_function(self) -> Any:
381
- # TODO(nathan): add retry logic
386
+ # This will retry when the server returns GENERIC_STATUS_INTERNAL_FAILURE, i.e. lost inputs or worker preemption
387
+ # TODO(ryan): add logic to retry for user defined retry policy
388
+ internal_failure_count = 0
382
389
  while True:
383
- request = api_pb2.AttemptAwaitRequest(
390
+ await_request = api_pb2.AttemptAwaitRequest(
384
391
  attempt_token=self.attempt_token,
385
392
  timeout_secs=OUTPUTS_TIMEOUT,
386
393
  requested_at=time.time(),
387
394
  )
388
- response: api_pb2.AttemptAwaitResponse = await retry_transient_errors(
395
+ await_response: api_pb2.AttemptAwaitResponse = await retry_transient_errors(
389
396
  self.stub.AttemptAwait,
390
- request,
397
+ await_request,
391
398
  attempt_timeout=OUTPUTS_TIMEOUT + ATTEMPT_TIMEOUT_GRACE_PERIOD,
392
399
  )
393
400
 
394
- if response.HasField("output"):
395
- return await _process_result(
396
- response.output.result, response.output.data_format, self.stub, self.client
401
+ try:
402
+ if await_response.HasField("output"):
403
+ return await _process_result(
404
+ await_response.output.result, await_response.output.data_format, self.stub, self.client
405
+ )
406
+ except InternalFailure as e:
407
+ internal_failure_count += 1
408
+ # Limit the number of times we retry
409
+ if internal_failure_count >= MAX_INTERNAL_FAILURE_COUNT:
410
+ raise e
411
+ # For system failures on the server, we retry immediately,
412
+ # and the failure does not count towards the retry policy.
413
+ retry_request = api_pb2.AttemptRetryRequest(
414
+ function_id=self.function_id,
415
+ parent_input_id=current_input_id() or "",
416
+ input=self.input_item,
417
+ attempt_token=self.attempt_token,
397
418
  )
419
+ # TODO(ryan): Add exponential backoff?
420
+ retry_response = await retry_transient_errors(self.stub.AttemptRetry, retry_request)
421
+ self.attempt_token = retry_response.attempt_token
398
422
 
399
423
 
400
424
  # Wrapper type for api_pb2.FunctionStats
@@ -791,6 +815,11 @@ class _Function(typing.Generic[P, ReturnType, OriginalReturnType], _Object, type
791
815
  if app and app.name:
792
816
  app_name = app.name
793
817
 
818
+ # on builder > 2024.10 we mount client dependencies at runtime
819
+ mount_client_dependencies = False
820
+ if image._metadata is not None:
821
+ mount_client_dependencies = image._metadata.image_builder_version > "2024.10"
822
+
794
823
  # Relies on dicts being ordered (true as of Python 3.6).
795
824
  volume_mounts = [
796
825
  api_pb2.VolumeMount(
@@ -860,6 +889,7 @@ class _Function(typing.Generic[P, ReturnType, OriginalReturnType], _Object, type
860
889
  schedule=schedule.proto_message if schedule is not None else None,
861
890
  snapshot_debug=config.get("snapshot_debug"),
862
891
  experimental_options=experimental_options or {},
892
+ mount_client_dependencies=mount_client_dependencies,
863
893
  # ---
864
894
  _experimental_group_size=cluster_size or 0, # Experimental: Clustered functions
865
895
  _experimental_concurrent_cancellations=True,
modal/_output.py CHANGED
@@ -32,6 +32,7 @@ from rich.progress import (
32
32
  from rich.spinner import Spinner
33
33
  from rich.text import Text
34
34
 
35
+ from modal._utils.time_utils import timestamp_to_local
35
36
  from modal_proto import api_pb2
36
37
 
37
38
  from ._utils.grpc_utils import RETRYABLE_GRPC_STATUS_CODES, retry_transient_errors
@@ -81,22 +82,27 @@ def download_progress_bar() -> Progress:
81
82
  )
82
83
 
83
84
 
84
- class LineBufferedOutput(io.StringIO):
85
+ class LineBufferedOutput:
85
86
  """Output stream that buffers lines and passes them to a callback."""
86
87
 
87
88
  LINE_REGEX = re.compile("(\r\n|\r|\n)")
88
89
 
89
- def __init__(self, callback: Callable[[str], None]):
90
+ def __init__(self, callback: Callable[[str], None], show_timestamps: bool):
90
91
  self._callback = callback
91
92
  self._buf = ""
93
+ self._show_timestamps = show_timestamps
92
94
 
93
- def write(self, data: str):
94
- chunks = self.LINE_REGEX.split(self._buf + data)
95
+ def write(self, log: api_pb2.TaskLogs):
96
+ chunks = self.LINE_REGEX.split(self._buf + log.data)
95
97
 
96
98
  # re.split("(<exp>)") returns the matched groups, and also the separators.
97
99
  # e.g. re.split("(+)", "a+b") returns ["a", "+", "b"].
98
100
  # This means that chunks is guaranteed to be odd in length.
99
101
 
102
+ if self._show_timestamps:
103
+ for i in range(0, len(chunks) - 1, 2):
104
+ chunks[i] = f"{timestamp_to_local(log.timestamp)} {chunks[i]}"
105
+
100
106
  completed_lines = "".join(chunks[:-1])
101
107
  remainder = chunks[-1]
102
108
 
@@ -136,12 +142,14 @@ class OutputManager:
136
142
  _app_page_url: str | None
137
143
  _show_image_logs: bool
138
144
  _status_spinner_live: Live | None
145
+ _show_timestamps: bool
139
146
 
140
147
  def __init__(
141
148
  self,
142
149
  *,
143
150
  stdout: io.TextIOWrapper | None = None,
144
151
  status_spinner_text: str = "Running app...",
152
+ show_timestamps: bool = False,
145
153
  ):
146
154
  self._stdout = stdout or sys.stdout
147
155
  self._console = Console(file=stdout, highlight=False)
@@ -156,6 +164,7 @@ class OutputManager:
156
164
  self._app_page_url = None
157
165
  self._show_image_logs = False
158
166
  self._status_spinner_live = None
167
+ self._show_timestamps = show_timestamps
159
168
 
160
169
  @classmethod
161
170
  def disable(cls):
@@ -355,9 +364,9 @@ class OutputManager:
355
364
  async def put_log_content(self, log: api_pb2.TaskLogs):
356
365
  stream = self._line_buffers.get(log.file_descriptor)
357
366
  if stream is None:
358
- stream = LineBufferedOutput(functools.partial(self._print_log, log.file_descriptor))
367
+ stream = LineBufferedOutput(functools.partial(self._print_log, log.file_descriptor), self._show_timestamps)
359
368
  self._line_buffers[log.file_descriptor] = stream
360
- stream.write(log.data)
369
+ stream.write(log)
361
370
 
362
371
  def flush_lines(self):
363
372
  for stream in self._line_buffers.values():
@@ -8,14 +8,14 @@ from typing import BinaryIO, Callable, Optional
8
8
  # Note: this module needs to import aiohttp in global scope
9
9
  # This takes about 50ms and isn't needed in many cases for Modal execution
10
10
  # To avoid this, we import it in local scope when needed (blob_utils.py)
11
- from aiohttp import BytesIOPayload
11
+ from aiohttp import Payload
12
12
  from aiohttp.abc import AbstractStreamWriter
13
13
 
14
14
  # read ~16MiB chunks by default
15
15
  DEFAULT_SEGMENT_CHUNK_SIZE = 2**24
16
16
 
17
17
 
18
- class BytesIOSegmentPayload(BytesIOPayload):
18
+ class BytesIOSegmentPayload(Payload):
19
19
  """Modified bytes payload for concurrent sends of chunks from the same file.
20
20
 
21
21
  Adds:
@@ -26,6 +26,8 @@ class BytesIOSegmentPayload(BytesIOPayload):
26
26
  Feels like this should be in some standard lib...
27
27
  """
28
28
 
29
+ _value: BinaryIO
30
+
29
31
  def __init__(
30
32
  self,
31
33
  bytes_io: BinaryIO, # should *not* be shared as IO position modification is not locked
@@ -36,6 +38,7 @@ class BytesIOSegmentPayload(BytesIOPayload):
36
38
  ):
37
39
  # not thread safe constructor!
38
40
  super().__init__(bytes_io)
41
+ self._size = segment_length
39
42
  self.initial_seek_pos = bytes_io.tell()
40
43
  self.segment_start = segment_start
41
44
  self.segment_length = segment_length
@@ -46,18 +49,26 @@ class BytesIOSegmentPayload(BytesIOPayload):
46
49
  self.progress_report_cb = progress_report_cb or (lambda *_, **__: None)
47
50
  self.reset_state()
48
51
 
52
+ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
53
+ self._value.seek(self.initial_seek_pos)
54
+ return self._value.read().decode(encoding, errors)
55
+
49
56
  def reset_state(self):
50
57
  self._md5_checksum = hashlib.md5()
51
58
  self.num_bytes_read = 0
52
59
  self._value.seek(self.initial_seek_pos)
53
60
 
54
61
  @contextmanager
55
- def reset_on_error(self):
62
+ def reset_on_error(self, subtract_progress: bool = False):
56
63
  try:
57
64
  yield
58
65
  except Exception as exc:
59
66
  try:
60
- self.progress_report_cb(reset=True)
67
+ if subtract_progress:
68
+ negative_progress = -self.num_bytes_read
69
+ self.progress_report_cb(advance=negative_progress)
70
+ else:
71
+ self.progress_report_cb(reset=True)
61
72
  except Exception as cb_exc:
62
73
  raise cb_exc from exc
63
74
  raise exc
@@ -72,14 +83,21 @@ class BytesIOSegmentPayload(BytesIOPayload):
72
83
  return self._md5_checksum
73
84
 
74
85
  async def write(self, writer: "AbstractStreamWriter"):
86
+ # On aiohttp < 3.12.0 - this is the method that's being called on a custom payload,
87
+ # but on aiohttp 3.12+ `write_with_length` is called directly.
88
+ await self.write_with_length(writer, None)
89
+
90
+ async def write_with_length(self, writer: AbstractStreamWriter, content_length: Optional[int]):
75
91
  loop = asyncio.get_event_loop()
76
92
 
77
93
  async def safe_read():
78
94
  read_start = self.initial_seek_pos + self.segment_start + self.num_bytes_read
79
95
  self._value.seek(read_start)
80
96
  num_bytes = min(self.chunk_size, self.remaining_bytes())
81
- chunk = await loop.run_in_executor(None, self._value.read, num_bytes)
97
+ if content_length is not None:
98
+ num_bytes = min(num_bytes, content_length)
82
99
 
100
+ chunk = await loop.run_in_executor(None, self._value.read, num_bytes)
83
101
  await loop.run_in_executor(None, self._md5_checksum.update, chunk)
84
102
  self.num_bytes_read += len(chunk)
85
103
  return chunk
@@ -0,0 +1,15 @@
1
+ # Copyright Modal Labs 2025
2
+ from datetime import datetime
3
+ from typing import Optional
4
+
5
+
6
+ def timestamp_to_local(ts: float, isotz: bool = True) -> Optional[str]:
7
+ if ts > 0:
8
+ locale_tz = datetime.now().astimezone().tzinfo
9
+ dt = datetime.fromtimestamp(ts, tz=locale_tz)
10
+ if isotz:
11
+ return dt.isoformat(sep=" ", timespec="seconds")
12
+ else:
13
+ return f"{datetime.strftime(dt, '%Y-%m-%d %H:%M')} {locale_tz.tzname(dt)}"
14
+ else:
15
+ return None
modal/cli/app.py CHANGED
@@ -15,7 +15,8 @@ from modal.client import _Client
15
15
  from modal.environments import ensure_env
16
16
  from modal_proto import api_pb2
17
17
 
18
- from .utils import ENV_OPTION, display_table, get_app_id_from_name, stream_app_logs, timestamp_to_local
18
+ from .._utils.time_utils import timestamp_to_local
19
+ from .utils import ENV_OPTION, display_table, get_app_id_from_name, stream_app_logs
19
20
 
20
21
  APP_IDENTIFIER = Argument("", help="App name or ID")
21
22
  NAME_OPTION = typer.Option("", "-n", "--name", help="Deprecated: Pass App name as a positional argument")
@@ -84,6 +85,7 @@ def logs(
84
85
  app_identifier: str = APP_IDENTIFIER,
85
86
  *,
86
87
  env: Optional[str] = ENV_OPTION,
88
+ timestamps: bool = typer.Option(False, "--timestamps", help="Show timestamps for each log line"),
87
89
  ):
88
90
  """Show App logs, streaming while active.
89
91
 
@@ -103,7 +105,7 @@ def logs(
103
105
 
104
106
  """
105
107
  app_id = get_app_id(app_identifier, env)
106
- stream_app_logs(app_id)
108
+ stream_app_logs(app_id, show_timestamps=timestamps)
107
109
 
108
110
 
109
111
  @app_cli.command("rollback", no_args_is_help=True, context_settings={"ignore_unknown_options": True})
modal/cli/cluster.py CHANGED
@@ -8,7 +8,8 @@ from rich.text import Text
8
8
  from modal._object import _get_environment_name
9
9
  from modal._pty import get_pty_info
10
10
  from modal._utils.async_utils import synchronizer
11
- from modal.cli.utils import ENV_OPTION, display_table, is_tty, timestamp_to_local
11
+ from modal._utils.time_utils import timestamp_to_local
12
+ from modal.cli.utils import ENV_OPTION, display_table, is_tty
12
13
  from modal.client import _Client
13
14
  from modal.config import config
14
15
  from modal.container_process import _ContainerProcess
modal/cli/container.py CHANGED
@@ -8,7 +8,8 @@ from modal._object import _get_environment_name
8
8
  from modal._pty import get_pty_info
9
9
  from modal._utils.async_utils import synchronizer
10
10
  from modal._utils.grpc_utils import retry_transient_errors
11
- from modal.cli.utils import ENV_OPTION, display_table, is_tty, stream_app_logs, timestamp_to_local
11
+ from modal._utils.time_utils import timestamp_to_local
12
+ from modal.cli.utils import ENV_OPTION, display_table, is_tty, stream_app_logs
12
13
  from modal.client import _Client
13
14
  from modal.config import config
14
15
  from modal.container_process import _ContainerProcess
modal/cli/dict.py CHANGED
@@ -8,7 +8,8 @@ from typer import Argument, Option, Typer
8
8
  from modal._resolver import Resolver
9
9
  from modal._utils.async_utils import synchronizer
10
10
  from modal._utils.grpc_utils import retry_transient_errors
11
- from modal.cli.utils import ENV_OPTION, YES_OPTION, display_table, timestamp_to_local
11
+ from modal._utils.time_utils import timestamp_to_local
12
+ from modal.cli.utils import ENV_OPTION, YES_OPTION, display_table
12
13
  from modal.client import _Client
13
14
  from modal.dict import _Dict
14
15
  from modal.environments import ensure_env
@@ -17,8 +17,9 @@ from modal._location import display_location
17
17
  from modal._output import OutputManager, ProgressHandler
18
18
  from modal._utils.async_utils import synchronizer
19
19
  from modal._utils.grpc_utils import retry_transient_errors
20
+ from modal._utils.time_utils import timestamp_to_local
20
21
  from modal.cli._download import _volume_download
21
- from modal.cli.utils import ENV_OPTION, YES_OPTION, display_table, timestamp_to_local
22
+ from modal.cli.utils import ENV_OPTION, YES_OPTION, display_table
22
23
  from modal.client import _Client
23
24
  from modal.environments import ensure_env
24
25
  from modal.network_file_system import _NetworkFileSystem
modal/cli/queues.py CHANGED
@@ -8,7 +8,8 @@ from typer import Argument, Option, Typer
8
8
  from modal._resolver import Resolver
9
9
  from modal._utils.async_utils import synchronizer
10
10
  from modal._utils.grpc_utils import retry_transient_errors
11
- from modal.cli.utils import ENV_OPTION, YES_OPTION, display_table, timestamp_to_local
11
+ from modal._utils.time_utils import timestamp_to_local
12
+ from modal.cli.utils import ENV_OPTION, YES_OPTION, display_table
12
13
  from modal.client import _Client
13
14
  from modal.environments import ensure_env
14
15
  from modal.queue import _Queue
modal/cli/secret.py CHANGED
@@ -13,7 +13,8 @@ from typer import Argument
13
13
 
14
14
  from modal._utils.async_utils import synchronizer
15
15
  from modal._utils.grpc_utils import retry_transient_errors
16
- from modal.cli.utils import ENV_OPTION, YES_OPTION, display_table, timestamp_to_local
16
+ from modal._utils.time_utils import timestamp_to_local
17
+ from modal.cli.utils import ENV_OPTION, YES_OPTION, display_table
17
18
  from modal.client import _Client
18
19
  from modal.environments import ensure_env
19
20
  from modal.secret import _Secret
modal/cli/utils.py CHANGED
@@ -1,7 +1,6 @@
1
1
  # Copyright Modal Labs 2022
2
2
  import asyncio
3
3
  from collections.abc import Sequence
4
- from datetime import datetime
5
4
  from json import dumps
6
5
  from typing import Optional, Union
7
6
 
@@ -23,10 +22,13 @@ from ..exception import NotFoundError
23
22
 
24
23
  @synchronizer.create_blocking
25
24
  async def stream_app_logs(
26
- app_id: Optional[str] = None, task_id: Optional[str] = None, app_logs_url: Optional[str] = None
25
+ app_id: Optional[str] = None,
26
+ task_id: Optional[str] = None,
27
+ app_logs_url: Optional[str] = None,
28
+ show_timestamps: bool = False,
27
29
  ):
28
30
  client = await _Client.from_env()
29
- output_mgr = OutputManager(status_spinner_text=f"Tailing logs for {app_id}")
31
+ output_mgr = OutputManager(status_spinner_text=f"Tailing logs for {app_id}", show_timestamps=show_timestamps)
30
32
  try:
31
33
  with output_mgr.show_status_spinner():
32
34
  await get_app_logs_loop(client, output_mgr, app_id=app_id, task_id=task_id, app_logs_url=app_logs_url)
@@ -61,18 +63,6 @@ async def get_app_id_from_name(name: str, env: Optional[str], client: Optional[_
61
63
  return resp.app_id
62
64
 
63
65
 
64
- def timestamp_to_local(ts: float, isotz: bool = True) -> str:
65
- if ts > 0:
66
- locale_tz = datetime.now().astimezone().tzinfo
67
- dt = datetime.fromtimestamp(ts, tz=locale_tz)
68
- if isotz:
69
- return dt.isoformat(sep=" ", timespec="seconds")
70
- else:
71
- return f"{datetime.strftime(dt, '%Y-%m-%d %H:%M')} {locale_tz.tzname(dt)}"
72
- else:
73
- return None
74
-
75
-
76
66
  def _plain(text: Union[Text, str]) -> str:
77
67
  return text.plain if isinstance(text, Text) else text
78
68
 
modal/cli/volume.py CHANGED
@@ -15,8 +15,9 @@ import modal
15
15
  from modal._output import OutputManager, ProgressHandler
16
16
  from modal._utils.async_utils import synchronizer
17
17
  from modal._utils.grpc_utils import retry_transient_errors
18
+ from modal._utils.time_utils import timestamp_to_local
18
19
  from modal.cli._download import _volume_download
19
- from modal.cli.utils import ENV_OPTION, YES_OPTION, display_table, timestamp_to_local
20
+ from modal.cli.utils import ENV_OPTION, YES_OPTION, display_table
20
21
  from modal.client import _Client
21
22
  from modal.environments import ensure_env
22
23
  from modal.volume import _AbstractVolumeUploadContextManager, _Volume
@@ -203,7 +204,7 @@ async def put(
203
204
  vol.object_id,
204
205
  vol._client,
205
206
  progress_cb=progress_handler.progress,
206
- force=force
207
+ force=force,
207
208
  ) as batch:
208
209
  batch.put_directory(local_path, remote_path)
209
210
  except FileExistsError as exc:
@@ -219,7 +220,7 @@ async def put(
219
220
  vol.object_id,
220
221
  vol._client,
221
222
  progress_cb=progress_handler.progress,
222
- force=force
223
+ force=force,
223
224
  ) as batch:
224
225
  batch.put_file(local_path, remote_path)
225
226
 
modal/client.pyi CHANGED
@@ -27,11 +27,7 @@ class _Client:
27
27
  _snapshotted: bool
28
28
 
29
29
  def __init__(
30
- self,
31
- server_url: str,
32
- client_type: int,
33
- credentials: typing.Optional[tuple[str, str]],
34
- version: str = "1.0.1.dev4",
30
+ self, server_url: str, client_type: int, credentials: typing.Optional[tuple[str, str]], version: str = "1.0.2"
35
31
  ): ...
36
32
  def is_closed(self) -> bool: ...
37
33
  @property
@@ -90,11 +86,7 @@ class Client:
90
86
  _snapshotted: bool
91
87
 
92
88
  def __init__(
93
- self,
94
- server_url: str,
95
- client_type: int,
96
- credentials: typing.Optional[tuple[str, str]],
97
- version: str = "1.0.1.dev4",
89
+ self, server_url: str, client_type: int, credentials: typing.Optional[tuple[str, str]], version: str = "1.0.2"
98
90
  ): ...
99
91
  def is_closed(self) -> bool: ...
100
92
  @property
modal/exception.py CHANGED
@@ -161,6 +161,8 @@ def simulate_preemption(wait_seconds: int, jitter_seconds: int = 0):
161
161
  See https://modal.com/docs/guide/preemption for more details on preemption
162
162
  handling.
163
163
  """
164
+ if wait_seconds <= 0:
165
+ raise ValueError("Time to wait must be greater than 0")
164
166
  signal.signal(signal.SIGALRM, _simulate_preemption_interrupt)
165
167
  jitter = random.randrange(0, jitter_seconds) if jitter_seconds else 0
166
168
  signal.alarm(wait_seconds + jitter)
modal/functions.pyi CHANGED
@@ -227,11 +227,11 @@ class Function(
227
227
 
228
228
  _call_generator: ___call_generator_spec[typing_extensions.Self]
229
229
 
230
- class __remote_spec(typing_extensions.Protocol[ReturnType_INNER, P_INNER, SUPERSELF]):
230
+ class __remote_spec(typing_extensions.Protocol[P_INNER, ReturnType_INNER, SUPERSELF]):
231
231
  def __call__(self, /, *args: P_INNER.args, **kwargs: P_INNER.kwargs) -> ReturnType_INNER: ...
232
232
  async def aio(self, /, *args: P_INNER.args, **kwargs: P_INNER.kwargs) -> ReturnType_INNER: ...
233
233
 
234
- remote: __remote_spec[modal._functions.ReturnType, modal._functions.P, typing_extensions.Self]
234
+ remote: __remote_spec[modal._functions.P, modal._functions.ReturnType, typing_extensions.Self]
235
235
 
236
236
  class __remote_gen_spec(typing_extensions.Protocol[SUPERSELF]):
237
237
  def __call__(self, /, *args, **kwargs) -> typing.Generator[typing.Any, None, None]: ...
@@ -246,12 +246,12 @@ class Function(
246
246
  self, *args: modal._functions.P.args, **kwargs: modal._functions.P.kwargs
247
247
  ) -> modal._functions.OriginalReturnType: ...
248
248
 
249
- class ___experimental_spawn_spec(typing_extensions.Protocol[ReturnType_INNER, P_INNER, SUPERSELF]):
249
+ class ___experimental_spawn_spec(typing_extensions.Protocol[P_INNER, ReturnType_INNER, SUPERSELF]):
250
250
  def __call__(self, /, *args: P_INNER.args, **kwargs: P_INNER.kwargs) -> FunctionCall[ReturnType_INNER]: ...
251
251
  async def aio(self, /, *args: P_INNER.args, **kwargs: P_INNER.kwargs) -> FunctionCall[ReturnType_INNER]: ...
252
252
 
253
253
  _experimental_spawn: ___experimental_spawn_spec[
254
- modal._functions.ReturnType, modal._functions.P, typing_extensions.Self
254
+ modal._functions.P, modal._functions.ReturnType, typing_extensions.Self
255
255
  ]
256
256
 
257
257
  class ___spawn_map_inner_spec(typing_extensions.Protocol[P_INNER, SUPERSELF]):
@@ -260,11 +260,11 @@ class Function(
260
260
 
261
261
  _spawn_map_inner: ___spawn_map_inner_spec[modal._functions.P, typing_extensions.Self]
262
262
 
263
- class __spawn_spec(typing_extensions.Protocol[ReturnType_INNER, P_INNER, SUPERSELF]):
263
+ class __spawn_spec(typing_extensions.Protocol[P_INNER, ReturnType_INNER, SUPERSELF]):
264
264
  def __call__(self, /, *args: P_INNER.args, **kwargs: P_INNER.kwargs) -> FunctionCall[ReturnType_INNER]: ...
265
265
  async def aio(self, /, *args: P_INNER.args, **kwargs: P_INNER.kwargs) -> FunctionCall[ReturnType_INNER]: ...
266
266
 
267
- spawn: __spawn_spec[modal._functions.ReturnType, modal._functions.P, typing_extensions.Self]
267
+ spawn: __spawn_spec[modal._functions.P, modal._functions.ReturnType, typing_extensions.Self]
268
268
 
269
269
  def get_raw_f(self) -> collections.abc.Callable[..., typing.Any]: ...
270
270
 
modal/image.py CHANGED
@@ -1442,7 +1442,9 @@ class _Image(_Object, type_prefix="im"):
1442
1442
  if version > "2024.10":
1443
1443
  # for convenience when launching in a sandbox: sleep for 48h
1444
1444
  commands.append(f'CMD ["sleep", "{48 * 3600}"]')
1445
- context_files = {CONTAINER_REQUIREMENTS_PATH: _get_modal_requirements_path(version, python_version)}
1445
+ context_files = {}
1446
+ if version <= "2024.10":
1447
+ context_files = {CONTAINER_REQUIREMENTS_PATH: _get_modal_requirements_path(version, python_version)}
1446
1448
  return DockerfileSpec(commands=commands, context_files=context_files)
1447
1449
 
1448
1450
  return _Image._from_args(
@@ -1517,12 +1519,15 @@ class _Image(_Object, type_prefix="im"):
1517
1519
 
1518
1520
  # Note: this change is because we install dependencies with uv in 2024.10+
1519
1521
  requirements_prefix = "python -m " if builder_version < "2024.10" else ""
1520
- modal_requirements_commands = [
1521
- f"COPY {CONTAINER_REQUIREMENTS_PATH} {CONTAINER_REQUIREMENTS_PATH}",
1522
- f"RUN python -m pip install --upgrade {_base_image_config('package_tools', builder_version)}",
1523
- f"RUN {requirements_prefix}{_get_modal_requirements_command(builder_version)}",
1524
- ]
1525
- if builder_version > "2023.12":
1522
+ modal_requirements_commands = []
1523
+ if builder_version <= "2024.10":
1524
+ # past 2024.10, client dependencies are mounted at runtime
1525
+ modal_requirements_commands.extend([
1526
+ f"COPY {CONTAINER_REQUIREMENTS_PATH} {CONTAINER_REQUIREMENTS_PATH}",
1527
+ f"RUN python -m pip install --upgrade {_base_image_config('package_tools', builder_version)}",
1528
+ f"RUN {requirements_prefix}{_get_modal_requirements_command(builder_version)}",
1529
+ ])
1530
+ if "2024.10" >= builder_version > "2023.12":
1526
1531
  modal_requirements_commands.append(f"RUN rm {CONTAINER_REQUIREMENTS_PATH}")
1527
1532
 
1528
1533
  return [
@@ -1585,7 +1590,9 @@ class _Image(_Object, type_prefix="im"):
1585
1590
 
1586
1591
  def build_dockerfile(version: ImageBuilderVersion) -> DockerfileSpec:
1587
1592
  commands = _Image._registry_setup_commands(tag, version, setup_dockerfile_commands, add_python)
1588
- context_files = {CONTAINER_REQUIREMENTS_PATH: _get_modal_requirements_path(version, add_python)}
1593
+ context_files = {}
1594
+ if version <= "2024.10":
1595
+ context_files = {CONTAINER_REQUIREMENTS_PATH: _get_modal_requirements_path(version, add_python)}
1589
1596
  return DockerfileSpec(commands=commands, context_files=context_files)
1590
1597
 
1591
1598
  return _Image._from_args(
@@ -1792,8 +1799,10 @@ class _Image(_Object, type_prefix="im"):
1792
1799
 
1793
1800
  def build_dockerfile_python(version: ImageBuilderVersion) -> DockerfileSpec:
1794
1801
  commands = _Image._registry_setup_commands("base", version, [], add_python)
1795
- requirements_path = _get_modal_requirements_path(version, add_python)
1796
- context_files = {CONTAINER_REQUIREMENTS_PATH: requirements_path}
1802
+ context_files = {}
1803
+ if version <= "2024.10":
1804
+ requirements_path = _get_modal_requirements_path(version, add_python)
1805
+ context_files = {CONTAINER_REQUIREMENTS_PATH: requirements_path}
1797
1806
  return DockerfileSpec(commands=commands, context_files=context_files)
1798
1807
 
1799
1808
  return _Image._from_args(
@@ -1810,22 +1819,35 @@ class _Image(_Object, type_prefix="im"):
1810
1819
  raise TypeError("The `python_version` argument should be a string, not a float.")
1811
1820
 
1812
1821
  def build_dockerfile(version: ImageBuilderVersion) -> DockerfileSpec:
1813
- requirements_path = _get_modal_requirements_path(version, python_version)
1814
- context_files = {CONTAINER_REQUIREMENTS_PATH: requirements_path}
1822
+ context_files = {}
1823
+ if version <= "2024.10":
1824
+ requirements_path = _get_modal_requirements_path(version, python_version)
1825
+ context_files = {CONTAINER_REQUIREMENTS_PATH: requirements_path}
1815
1826
  full_python_version = _dockerhub_python_version(version, python_version)
1816
1827
  debian_codename = _base_image_config("debian", version)
1817
1828
 
1818
1829
  commands = [
1819
1830
  f"FROM python:{full_python_version}-slim-{debian_codename}",
1820
- f"COPY {CONTAINER_REQUIREMENTS_PATH} {CONTAINER_REQUIREMENTS_PATH}",
1831
+ ]
1832
+ if version <= "2024.10":
1833
+ commands.extend([
1834
+ f"COPY {CONTAINER_REQUIREMENTS_PATH} {CONTAINER_REQUIREMENTS_PATH}",
1835
+ ])
1836
+ commands.extend([
1821
1837
  "RUN apt-get update",
1822
1838
  "RUN apt-get install -y gcc gfortran build-essential",
1823
1839
  f"RUN pip install --upgrade {_base_image_config('package_tools', version)}",
1824
- f"RUN {_get_modal_requirements_command(version)}",
1840
+ ])
1841
+ if version <= "2024.10":
1842
+ # after 2024.10, modal requirements are mounted at runtime
1843
+ commands.extend([
1844
+ f"RUN {_get_modal_requirements_command(version)}",
1845
+ ])
1846
+ commands.extend([
1825
1847
  # Set debian front-end to non-interactive to avoid users getting stuck with input prompts.
1826
1848
  "RUN echo 'debconf debconf/frontend select Noninteractive' | debconf-set-selections",
1827
- ]
1828
- if version > "2023.12":
1849
+ ])
1850
+ if "2024.10" >= version > "2023.12":
1829
1851
  commands.append(f"RUN rm {CONTAINER_REQUIREMENTS_PATH}")
1830
1852
  if version > "2024.10":
1831
1853
  # for convenience when launching in a sandbox: sleep for 48h
modal/sandbox.py CHANGED
@@ -252,7 +252,8 @@ class _Sandbox(_Object, type_prefix="sb"):
252
252
  client: Optional[_Client] = None,
253
253
  ) -> "_Sandbox":
254
254
  """
255
- Create a new Sandbox to run untrusted, arbitrary code.
255
+ Create a new Sandbox to run untrusted, arbitrary code. The Sandbox's corresponding container
256
+ will be created asynchronously.
256
257
 
257
258
  **Usage**
258
259