modal 1.0.3.dev8__py3-none-any.whl → 1.0.3.dev11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -323,6 +323,7 @@ class _ContainerIOManager:
323
323
  self._heartbeat_loop = None
324
324
  self._heartbeat_condition = None
325
325
  self._waiting_for_memory_snapshot = False
326
+ self._cuda_checkpoint_session = None
326
327
 
327
328
  self._is_interactivity_enabled = False
328
329
  self._fetching_inputs = True
@@ -881,13 +882,11 @@ class _ContainerIOManager:
881
882
  # Restore GPU memory.
882
883
  if self.function_def._experimental_enable_gpu_snapshot and self.function_def.resources.gpu_config.gpu_type:
883
884
  logger.debug("GPU memory snapshot enabled. Attempting to restore GPU memory.")
884
- gpu_process_state = gpu_memory_snapshot.get_state()
885
- if gpu_process_state != gpu_memory_snapshot.CudaCheckpointState.CHECKPOINTED:
886
- raise ValueError(
887
- "Cannot restore GPU state if GPU isn't in a 'checkpointed' state. "
888
- f"Current GPU state: {gpu_process_state}"
889
- )
890
- gpu_memory_snapshot.toggle()
885
+
886
+ assert self._cuda_checkpoint_session, (
887
+ "CudaCheckpointSession not found when attempting to restore GPU memory"
888
+ )
889
+ self._cuda_checkpoint_session.restore()
891
890
 
892
891
  # Restore input to default state.
893
892
  self.current_input_id = None
@@ -907,14 +906,9 @@ class _ContainerIOManager:
907
906
  # Snapshot GPU memory.
908
907
  if self.function_def._experimental_enable_gpu_snapshot and self.function_def.resources.gpu_config.gpu_type:
909
908
  logger.debug("GPU memory snapshot enabled. Attempting to snapshot GPU memory.")
910
- gpu_process_state = gpu_memory_snapshot.get_state()
911
- if gpu_process_state != gpu_memory_snapshot.CudaCheckpointState.RUNNING:
912
- raise ValueError(
913
- f"Cannot snapshot GPU state if it isn't running. Current GPU state: {gpu_process_state}"
914
- )
915
909
 
916
- gpu_memory_snapshot.toggle()
917
- gpu_memory_snapshot.wait_for_state(gpu_memory_snapshot.CudaCheckpointState.CHECKPOINTED)
910
+ self._cuda_checkpoint_session = gpu_memory_snapshot.CudaCheckpointSession()
911
+ self._cuda_checkpoint_session.checkpoint()
918
912
 
919
913
  # Notify the heartbeat loop that the snapshot phase has begun in order to
920
914
  # prevent it from sending heartbeat RPCs
@@ -6,10 +6,12 @@
6
6
  #
7
7
  # [1] https://github.com/NVIDIA/cuda-checkpoint
8
8
 
9
- import os
10
9
  import subprocess
11
10
  import time
11
+ from concurrent.futures import ThreadPoolExecutor
12
+ from dataclasses import dataclass
12
13
  from enum import Enum
14
+ from pathlib import Path
13
15
 
14
16
  from modal.config import config, logger
15
17
 
@@ -29,73 +31,169 @@ class CudaCheckpointException(Exception):
29
31
  pass
30
32
 
31
33
 
32
- def toggle():
33
- """Toggle CUDA checkpoint state for current process, moving GPU memory to the
34
- CPU and back depending on the current process state when called."""
35
- pid = get_own_pid()
36
- logger.debug(f"Toggling CUDA checkpoint state for PID {pid}")
34
+ @dataclass
35
+ class CudaCheckpointProcess:
36
+ """Contains a reference to a PID with active CUDA session. This also provides
37
+ methods for checkpointing and restoring GPU memory."""
37
38
 
38
- try:
39
- subprocess.run(
40
- [
41
- CUDA_CHECKPOINT_PATH,
42
- "--toggle",
43
- "--pid",
44
- str(pid),
45
- ],
46
- check=True,
47
- capture_output=True,
48
- text=True,
49
- )
50
- logger.debug("Successfully toggled CUDA checkpoint state")
39
+ pid: int
40
+ state: CudaCheckpointState
51
41
 
52
- except subprocess.CalledProcessError as e:
53
- logger.debug(f"Failed to toggle CUDA checkpoint state: {e.stderr}")
54
- raise CudaCheckpointException(e.stderr)
42
+ def toggle(self, target_state: CudaCheckpointState, timeout_secs: float = 5 * 60.0):
43
+ """Toggle CUDA checkpoint state for current process, moving GPU memory to the
44
+ CPU and back depending on the current process state when called."""
45
+ logger.debug(f"PID: {self.pid} Toggling CUDA checkpoint state to {target_state.value}")
55
46
 
47
+ start_time = time.monotonic()
56
48
 
57
- def get_state() -> CudaCheckpointState:
58
- """Get current CUDA checkpoint state for this process."""
59
- pid = get_own_pid()
49
+ while self._should_continue_toggle(target_state, start_time, timeout_secs):
50
+ self._execute_toggle_command()
51
+ time.sleep(0.1)
60
52
 
61
- try:
62
- result = subprocess.run(
63
- [CUDA_CHECKPOINT_PATH, "--get-state", "--pid", str(pid)], check=True, capture_output=True, text=True
64
- )
53
+ logger.debug(f"PID: {self.pid} Target state {target_state.value} reached")
65
54
 
66
- # Parse output to get state
67
- state_str = result.stdout.strip().lower()
68
- return CudaCheckpointState(state_str)
55
+ def _should_continue_toggle(
56
+ self, target_state: CudaCheckpointState, start_time: float, timeout_secs: float
57
+ ) -> bool:
58
+ """Check if toggle operation should continue based on current state and timeout."""
59
+ self.refresh_state()
69
60
 
70
- except subprocess.CalledProcessError as e:
71
- logger.debug(f"Failed to get CUDA checkpoint state: {e.stderr}")
72
- raise CudaCheckpointException(e.stderr)
61
+ if self.state == target_state:
62
+ return False
73
63
 
74
-
75
- def wait_for_state(target_state: CudaCheckpointState, timeout_secs: float = 5.0):
76
- """Wait for CUDA checkpoint to reach a specific state."""
77
- logger.debug(f"Waiting for CUDA checkpoint state {target_state.value}")
78
- start_time = time.monotonic()
79
-
80
- while True:
81
- current_state = get_state()
82
-
83
- if current_state == target_state:
84
- logger.debug(f"Target state {target_state.value} reached")
85
- break
86
-
87
- if current_state == CudaCheckpointState.FAILED:
88
- raise CudaCheckpointException(f"CUDA process state is {current_state}")
64
+ if self.state == CudaCheckpointState.FAILED:
65
+ raise CudaCheckpointException(f"PID: {self.pid} CUDA process state is {self.state}")
89
66
 
90
67
  elapsed = time.monotonic() - start_time
91
68
  if elapsed >= timeout_secs:
92
- raise CudaCheckpointException(f"Timeout after {elapsed:.2f}s waiting for state {target_state.value}")
93
-
94
- time.sleep(0.1)
95
-
96
-
97
- def get_own_pid():
98
- """Returns the Process ID (PID) of the current Python process
99
- using only the standard library.
100
- """
101
- return os.getpid()
69
+ raise CudaCheckpointException(
70
+ f"PID: {self.pid} Timeout after {elapsed:.2f}s waiting for state {target_state.value}. "
71
+ f"Current state: {self.state}"
72
+ )
73
+
74
+ return True
75
+
76
+ def _execute_toggle_command(self):
77
+ """Execute the cuda-checkpoint toggle command."""
78
+ try:
79
+ subprocess.run(
80
+ [CUDA_CHECKPOINT_PATH, "--toggle", "--pid", str(self.pid)],
81
+ check=True,
82
+ capture_output=True,
83
+ text=True,
84
+ )
85
+ logger.debug(f"PID: {self.pid} Successfully toggled CUDA checkpoint state")
86
+ except subprocess.CalledProcessError as e:
87
+ logger.debug(f"PID: {self.pid} Failed to toggle CUDA checkpoint state: {e.stderr}")
88
+ raise CudaCheckpointException(e.stderr)
89
+
90
+ def refresh_state(self) -> None:
91
+ """Refreshes the current CUDA checkpoint state for this process."""
92
+ try:
93
+ result = subprocess.run(
94
+ [CUDA_CHECKPOINT_PATH, "--get-state", "--pid", str(self.pid)],
95
+ check=True,
96
+ capture_output=True,
97
+ text=True,
98
+ timeout=5,
99
+ )
100
+
101
+ state_str = result.stdout.strip().lower()
102
+ self.state = CudaCheckpointState(state_str)
103
+
104
+ except subprocess.CalledProcessError as e:
105
+ logger.debug(f"PID: {self.pid} Failed to get CUDA checkpoint state: {e.stderr}")
106
+ raise CudaCheckpointException(e.stderr)
107
+
108
+
109
+ class CudaCheckpointSession:
110
+ """Manages the checkpointing state of processes with active CUDA sessions."""
111
+
112
+ def __init__(self):
113
+ self.cuda_processes = self._get_cuda_pids()
114
+ logger.debug(f"PIDs with CUDA sessions: {[c.pid for c in self.cuda_processes]}")
115
+
116
+ def _get_cuda_pids(self) -> list[CudaCheckpointProcess]:
117
+ """Iterates over all PIDs and identifies the ones that have running
118
+ CUDA sessions."""
119
+ cuda_pids: list[CudaCheckpointProcess] = []
120
+
121
+ # Get all active process IDs from /proc directory
122
+ proc_dir = Path("/proc")
123
+ if not proc_dir.exists():
124
+ raise CudaCheckpointException(
125
+ "OS does not have /proc path rendering it incompatible with GPU memory snapshots."
126
+ )
127
+
128
+ for entry in proc_dir.iterdir():
129
+ if not entry.name.isdigit():
130
+ continue
131
+
132
+ pid = int(entry.name)
133
+ try:
134
+ # Call cuda-checkpoint to check if this PID has a CUDA session
135
+ result = subprocess.run(
136
+ [CUDA_CHECKPOINT_PATH, "--get-state", "--pid", str(pid)],
137
+ capture_output=True,
138
+ text=True,
139
+ timeout=10,
140
+ )
141
+
142
+ # If the command succeeds (return code 0), this PID has a CUDA session
143
+ if result.returncode == 0:
144
+ state_str = result.stdout.strip().lower()
145
+ state = CudaCheckpointState(state_str)
146
+
147
+ cuda_checkpoint_process = CudaCheckpointProcess(pid=pid, state=state)
148
+ cuda_pids.append(cuda_checkpoint_process)
149
+
150
+ # Command failed, which is expected for PIDs without CUDA sessions
151
+ except subprocess.CalledProcessError:
152
+ continue
153
+
154
+ # Raise other exceptions
155
+ except subprocess.TimeoutExpired:
156
+ raise CudaCheckpointException(f"Failed to get CUDA state for PID {pid}")
157
+ except Exception as e:
158
+ raise CudaCheckpointException(e)
159
+
160
+ # Sort PIDs for ordered checkpointing
161
+ cuda_pids.sort(key=lambda x: x.pid)
162
+ return cuda_pids
163
+
164
+ def checkpoint(self) -> None:
165
+ # Validate all states first
166
+ for proc in self.cuda_processes:
167
+ if proc.state != CudaCheckpointState.RUNNING:
168
+ raise CudaCheckpointException(f"CUDA session not in {CudaCheckpointState.RUNNING} state.")
169
+
170
+ # Moving state from GPU to CPU can take several seconds per CUDA session.
171
+ # Make a parallel call per CUDA session.
172
+ start = time.perf_counter()
173
+
174
+ def checkpoint_impl(proc: CudaCheckpointProcess):
175
+ proc.toggle(CudaCheckpointState.CHECKPOINTED)
176
+
177
+ with ThreadPoolExecutor() as executor:
178
+ list(executor.map(checkpoint_impl, self.cuda_processes))
179
+
180
+ elapsed = time.perf_counter() - start
181
+ logger.debug(f"Checkpointing CUDA sessions took => {elapsed:.3f}s")
182
+
183
+ def restore(self) -> None:
184
+ # Validate all states first
185
+ for proc in self.cuda_processes:
186
+ if proc.state != CudaCheckpointState.CHECKPOINTED:
187
+ raise CudaCheckpointException(f"CUDA session not in {CudaCheckpointState.CHECKPOINTED} state.")
188
+
189
+ # See checkpoint() for rationale about parallelism.
190
+ start = time.perf_counter()
191
+
192
+ def restore_process(proc: CudaCheckpointProcess):
193
+ proc.toggle(CudaCheckpointState.RUNNING)
194
+
195
+ with ThreadPoolExecutor() as executor:
196
+ list(executor.map(restore_process, self.cuda_processes))
197
+
198
+ elapsed = time.perf_counter() - start
199
+ logger.debug(f"Restoring CUDA sessions took => {elapsed:.3f}s")
modal/cli/secret.py CHANGED
@@ -1,7 +1,9 @@
1
1
  # Copyright Modal Labs 2022
2
+ import json
2
3
  import os
3
4
  import platform
4
5
  import subprocess
6
+ from pathlib import Path
5
7
  from tempfile import NamedTemporaryFile
6
8
  from typing import Optional
7
9
 
@@ -48,14 +50,17 @@ async def list_(env: Optional[str] = ENV_OPTION, json: bool = False):
48
50
  @secret_cli.command("create", help="Create a new secret.")
49
51
  @synchronizer.create_blocking
50
52
  async def create(
51
- secret_name,
52
- keyvalues: list[str] = typer.Argument(..., help="Space-separated KEY=VALUE items"),
53
+ secret_name: str,
54
+ keyvalues: Optional[list[str]] = typer.Argument(default=None, help="Space-separated KEY=VALUE items."),
53
55
  env: Optional[str] = ENV_OPTION,
56
+ from_dotenv: Optional[Path] = typer.Option(default=None, help="Path to a .env file to load secrets from."),
57
+ from_json: Optional[Path] = typer.Option(default=None, help="Path to a JSON file to load secrets from."),
54
58
  force: bool = typer.Option(False, "--force", help="Overwrite the secret if it already exists."),
55
59
  ):
56
60
  env = ensure_env(env)
57
61
  env_dict = {}
58
- for arg in keyvalues:
62
+
63
+ for arg in keyvalues or []:
59
64
  if "=" in arg:
60
65
  key, value = arg.split("=", 1)
61
66
  if value == "-":
@@ -63,17 +68,51 @@ async def create(
63
68
  env_dict[key] = value
64
69
  else:
65
70
  raise click.UsageError(
66
- """Each item should be of the form <KEY>=VALUE. To enter secrets using your $EDITOR, use `<KEY>=-`.
71
+ """Each item should be of the form <KEY>=VALUE. To enter secrets using your $EDITOR, use `<KEY>=-`. To
72
+ enter secrets from environment variables, use `<KEY>="$ENV_VAR"`.
67
73
 
68
74
  E.g.
69
75
 
70
76
  modal secret create my-credentials username=john password=-
77
+ modal secret create my-credentials username=john password="$PASSWORD"
71
78
  """
72
79
  )
73
80
 
81
+ if from_dotenv:
82
+ if not from_dotenv.is_file():
83
+ raise click.UsageError(f"Could not read .env file at {from_dotenv}")
84
+
85
+ try:
86
+ from dotenv import dotenv_values
87
+ except ImportError:
88
+ raise ImportError(
89
+ "Need the `python-dotenv` package installed. You can install it by running `pip install python-dotenv`."
90
+ )
91
+
92
+ try:
93
+ env_dict.update(dotenv_values(from_dotenv))
94
+ except Exception as e:
95
+ raise click.UsageError(f"Could not parse .env file at {from_dotenv}: {e}")
96
+
97
+ if from_json:
98
+ if not from_json.is_file():
99
+ raise click.UsageError(f"Could not read JSON file at {from_json}")
100
+
101
+ try:
102
+ with from_json.open("r") as f:
103
+ env_dict.update(json.load(f))
104
+ except Exception as e:
105
+ raise click.UsageError(f"Could not parse JSON file at {from_json}: {e}")
106
+
74
107
  if not env_dict:
75
108
  raise click.UsageError("You need to specify at least one key for your secret")
76
109
 
110
+ for k, v in env_dict.items():
111
+ if not isinstance(k, str) or not k:
112
+ raise click.UsageError(f"Invalid key: '{k}'")
113
+ if not isinstance(v, str):
114
+ raise click.UsageError(f"Non-string value for secret '{k}'")
115
+
77
116
  # Create secret
78
117
  await _Secret.create_deployed(secret_name, env_dict, overwrite=force)
79
118
 
modal/client.pyi CHANGED
@@ -31,7 +31,7 @@ class _Client:
31
31
  server_url: str,
32
32
  client_type: int,
33
33
  credentials: typing.Optional[tuple[str, str]],
34
- version: str = "1.0.3.dev8",
34
+ version: str = "1.0.3.dev11",
35
35
  ): ...
36
36
  def is_closed(self) -> bool: ...
37
37
  @property
@@ -94,7 +94,7 @@ class Client:
94
94
  server_url: str,
95
95
  client_type: int,
96
96
  credentials: typing.Optional[tuple[str, str]],
97
- version: str = "1.0.3.dev8",
97
+ version: str = "1.0.3.dev11",
98
98
  ): ...
99
99
  def is_closed(self) -> bool: ...
100
100
  @property
modal/functions.pyi CHANGED
@@ -227,11 +227,11 @@ class Function(
227
227
 
228
228
  _call_generator: ___call_generator_spec[typing_extensions.Self]
229
229
 
230
- class __remote_spec(typing_extensions.Protocol[ReturnType_INNER, P_INNER, SUPERSELF]):
230
+ class __remote_spec(typing_extensions.Protocol[P_INNER, ReturnType_INNER, SUPERSELF]):
231
231
  def __call__(self, /, *args: P_INNER.args, **kwargs: P_INNER.kwargs) -> ReturnType_INNER: ...
232
232
  async def aio(self, /, *args: P_INNER.args, **kwargs: P_INNER.kwargs) -> ReturnType_INNER: ...
233
233
 
234
- remote: __remote_spec[modal._functions.ReturnType, modal._functions.P, typing_extensions.Self]
234
+ remote: __remote_spec[modal._functions.P, modal._functions.ReturnType, typing_extensions.Self]
235
235
 
236
236
  class __remote_gen_spec(typing_extensions.Protocol[SUPERSELF]):
237
237
  def __call__(self, /, *args, **kwargs) -> typing.Generator[typing.Any, None, None]: ...
@@ -246,12 +246,12 @@ class Function(
246
246
  self, *args: modal._functions.P.args, **kwargs: modal._functions.P.kwargs
247
247
  ) -> modal._functions.OriginalReturnType: ...
248
248
 
249
- class ___experimental_spawn_spec(typing_extensions.Protocol[ReturnType_INNER, P_INNER, SUPERSELF]):
249
+ class ___experimental_spawn_spec(typing_extensions.Protocol[P_INNER, ReturnType_INNER, SUPERSELF]):
250
250
  def __call__(self, /, *args: P_INNER.args, **kwargs: P_INNER.kwargs) -> FunctionCall[ReturnType_INNER]: ...
251
251
  async def aio(self, /, *args: P_INNER.args, **kwargs: P_INNER.kwargs) -> FunctionCall[ReturnType_INNER]: ...
252
252
 
253
253
  _experimental_spawn: ___experimental_spawn_spec[
254
- modal._functions.ReturnType, modal._functions.P, typing_extensions.Self
254
+ modal._functions.P, modal._functions.ReturnType, typing_extensions.Self
255
255
  ]
256
256
 
257
257
  class ___spawn_map_inner_spec(typing_extensions.Protocol[P_INNER, SUPERSELF]):
@@ -260,11 +260,11 @@ class Function(
260
260
 
261
261
  _spawn_map_inner: ___spawn_map_inner_spec[modal._functions.P, typing_extensions.Self]
262
262
 
263
- class __spawn_spec(typing_extensions.Protocol[ReturnType_INNER, P_INNER, SUPERSELF]):
263
+ class __spawn_spec(typing_extensions.Protocol[P_INNER, ReturnType_INNER, SUPERSELF]):
264
264
  def __call__(self, /, *args: P_INNER.args, **kwargs: P_INNER.kwargs) -> FunctionCall[ReturnType_INNER]: ...
265
265
  async def aio(self, /, *args: P_INNER.args, **kwargs: P_INNER.kwargs) -> FunctionCall[ReturnType_INNER]: ...
266
266
 
267
- spawn: __spawn_spec[modal._functions.ReturnType, modal._functions.P, typing_extensions.Self]
267
+ spawn: __spawn_spec[modal._functions.P, modal._functions.ReturnType, typing_extensions.Self]
268
268
 
269
269
  def get_raw_f(self) -> collections.abc.Callable[..., typing.Any]: ...
270
270
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: modal
3
- Version: 1.0.3.dev8
3
+ Version: 1.0.3.dev11
4
4
  Summary: Python client library for Modal
5
5
  Author-email: Modal Labs <support@modal.com>
6
6
  License: Apache-2.0
@@ -22,7 +22,7 @@ modal/app.py,sha256=NZ_rJ9TuMfiNiLg8-gOFgufD5flGtXWPHOZI0gdD3hE,46585
22
22
  modal/app.pyi,sha256=4-b_vbe3lNAqQPcMRpQCEDsE1zsVkQRJGUql9B7HvbM,22659
23
23
  modal/call_graph.py,sha256=1g2DGcMIJvRy-xKicuf63IVE98gJSnQsr8R_NVMptNc,2581
24
24
  modal/client.py,sha256=OwISJvkgMb-rHm9Gc4i-7YcDgGiZgwJ7F_PzwZH7a6Q,16847
25
- modal/client.pyi,sha256=9Na2pTVt0dajHjqeQIslZ9l1qZV1oTEX0ZfxR2X9w4s,8457
25
+ modal/client.pyi,sha256=No4MWgan--Ny6RM3uhTaKX9FK5QZHMOvhmgkUk3rZL8,8459
26
26
  modal/cloud_bucket_mount.py,sha256=YOe9nnvSr4ZbeCn587d7_VhE9IioZYRvF9VYQTQux08,5914
27
27
  modal/cloud_bucket_mount.pyi,sha256=30T3K1a89l6wzmEJ_J9iWv9SknoGqaZDx59Xs-ZQcmk,1607
28
28
  modal/cls.py,sha256=dBbeARwOWftlKd1cwtM0cHFtQWSWkwVXwVmOV4w0SyI,37907
@@ -39,7 +39,7 @@ modal/file_io.py,sha256=lcMs_E9Xfm0YX1t9U2wNIBPnqHRxmImqjLW1GHqVmyg,20945
39
39
  modal/file_io.pyi,sha256=oB7x-rKq7bmm8cA7Z7W9C9yeko7KK9m9i5GidFnkGK4,9569
40
40
  modal/file_pattern_matcher.py,sha256=wov-otB5M1oTdrYDtR2_VgacYin2srdtAP4McA1Cqzw,6516
41
41
  modal/functions.py,sha256=kcNHvqeGBxPI7Cgd57NIBBghkfbeFJzXO44WW0jSmao,325
42
- modal/functions.pyi,sha256=5T58OucdNU4I-LqhBdwsWSAGka-Wa8nP2GcZ5K1bOL0,16236
42
+ modal/functions.pyi,sha256=iqdp5ixtOOlm8bF-QYbD_G8VKqSRt_AVLT7AWjpn6pQ,16236
43
43
  modal/gpu.py,sha256=Kbhs_u49FaC2Zi0TjCdrpstpRtT5eZgecynmQi5IZVE,6752
44
44
  modal/image.py,sha256=yrI9DCw7GAck3d788GCHJom-_yU55zNu7reNapBhlgE,93284
45
45
  modal/image.pyi,sha256=2xjB6XOZDtm_chDdd90UoIj8pnDt5hCg6bOmu5fNaA4,25625
@@ -82,11 +82,11 @@ modal/volume.py,sha256=XtOxzvdTJ3H7BEldl5Puj6vvXucev0i6KWI5tgkXe7I,44120
82
82
  modal/volume.pyi,sha256=YZqC3-z4bfiTgb1-uO7XylrONvIsHQUZd9pqW_Wxd48,21145
83
83
  modal/_runtime/__init__.py,sha256=MIEP8jhXUeGq_eCjYFcqN5b1bxBM4fdk0VESpjWR0fc,28
84
84
  modal/_runtime/asgi.py,sha256=_2xSTsDD27Cit7xnMs4lzkJA2wzer2_N4Oa3BkXFzVA,22521
85
- modal/_runtime/container_io_manager.py,sha256=6j0jO2-s9ShckM4SK45OapoQxWW9HQwQjFaBkXPJPwU,44763
85
+ modal/_runtime/container_io_manager.py,sha256=qKYtd52I0JAmiw1Wfy_EQXHuHsbmt-XwLqKDLBhWrZc,44289
86
86
  modal/_runtime/container_io_manager.pyi,sha256=OKvrccBxawjF0PHZuN5eXeh266fS7qZH8yAIG0P02cY,16349
87
87
  modal/_runtime/execution_context.py,sha256=73Y5zH_o-MhVCrkJXakYVlFkKqCa2CWvqoHjOfJrJGg,3034
88
88
  modal/_runtime/execution_context.pyi,sha256=AlRGyocfQlb4UpEuI_VmRRtvaBTbhjgyKSRFUePi8J0,667
89
- modal/_runtime/gpu_memory_snapshot.py,sha256=tA3m1d1cwnmHpvpCeN_WijDd6n8byn7LWlpicbIxiOI,3144
89
+ modal/_runtime/gpu_memory_snapshot.py,sha256=HXgqPHQj0LARhmie_h62V95L-M2R1Kg21INUm_IStn8,7574
90
90
  modal/_runtime/telemetry.py,sha256=T1RoAGyjBDr1swiM6pPsGRSITm7LI5FDK18oNXxY08U,5163
91
91
  modal/_runtime/user_code_imports.py,sha256=78wJyleqY2RVibqcpbDQyfWVBVT9BjyHPeoV9WdwV5Y,17720
92
92
  modal/_utils/__init__.py,sha256=waLjl5c6IPDhSsdWAm9Bji4e2PVxamYABKAze6CHVXY,28
@@ -131,7 +131,7 @@ modal/cli/network_file_system.py,sha256=DoIdY8I42DjFdTtaYuRKNm7GC6vY0QtA4mk6694f
131
131
  modal/cli/profile.py,sha256=0TYhgRSGUvQZ5LH9nkl6iZllEvAjDniES264dE57wOM,3201
132
132
  modal/cli/queues.py,sha256=1OzC9HdCkbNz6twF3US4FZmIhuVRQ01GOfBY42ux61A,4533
133
133
  modal/cli/run.py,sha256=DPa-yQ9o7vjqwvs_TAOvVJxS51yVn__ZGCnbkORL37g,23972
134
- modal/cli/secret.py,sha256=oLFEPZoyyeMUKPaJZ9JKKl5mfkQU80DGF9p0atotqig,5002
134
+ modal/cli/secret.py,sha256=2bngl3Gb6THXkQ2eWZIN9pOHeOFJqiSNo_waUCVYgns,6611
135
135
  modal/cli/token.py,sha256=mxSgOWakXG6N71hQb1ko61XAR9ZGkTMZD-Txn7gmTac,1924
136
136
  modal/cli/utils.py,sha256=9Q7DIUX78-c19zBQNA7EtkgqIFatvHWUVGHwUIeBX_0,3366
137
137
  modal/cli/volume.py,sha256=h0Lk4CWe37L8X1kOpCj_KXfGGCEZf799fkX6Tbd6BPg,10734
@@ -147,7 +147,7 @@ modal/requirements/2024.10.txt,sha256=qD-5cVIVM9wXesJ6JC89Ew-3m2KjEElUz3jaw_MddR
147
147
  modal/requirements/PREVIEW.txt,sha256=qD-5cVIVM9wXesJ6JC89Ew-3m2KjEElUz3jaw_MddRo,296
148
148
  modal/requirements/README.md,sha256=9tK76KP0Uph7O0M5oUgsSwEZDj5y-dcUPsnpR0Sc-Ik,854
149
149
  modal/requirements/base-images.json,sha256=57vMSqzMbLBxw5tFWSaMiIkkVEps4JfX5PAtXGnkS4U,740
150
- modal-1.0.3.dev8.dist-info/licenses/LICENSE,sha256=psuoW8kuDP96RQsdhzwOqi6fyWv0ct8CR6Jr7He_P_k,10173
150
+ modal-1.0.3.dev11.dist-info/licenses/LICENSE,sha256=psuoW8kuDP96RQsdhzwOqi6fyWv0ct8CR6Jr7He_P_k,10173
151
151
  modal_docs/__init__.py,sha256=svYKtV8HDwDCN86zbdWqyq5T8sMdGDj0PVlzc2tIxDM,28
152
152
  modal_docs/gen_cli_docs.py,sha256=c1yfBS_x--gL5bs0N4ihMwqwX8l3IBWSkBAKNNIi6bQ,3801
153
153
  modal_docs/gen_reference_docs.py,sha256=d_CQUGQ0rfw28u75I2mov9AlS773z9rG40-yq5o7g2U,6359
@@ -170,10 +170,10 @@ modal_proto/options_pb2.pyi,sha256=l7DBrbLO7q3Ir-XDkWsajm0d0TQqqrfuX54i4BMpdQg,1
170
170
  modal_proto/options_pb2_grpc.py,sha256=1oboBPFxaTEXt9Aw7EAj8gXHDCNMhZD2VXqocC9l_gk,159
171
171
  modal_proto/options_pb2_grpc.pyi,sha256=CImmhxHsYnF09iENPoe8S4J-n93jtgUYD2JPAc0yJSI,247
172
172
  modal_proto/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
173
- modal_version/__init__.py,sha256=ILpE8xfs22gw9WWQLpPlTAtz4i-b9kO12KIIRnKTj9s,120
173
+ modal_version/__init__.py,sha256=KxbRFCmx8jMBlr6spsirRW-qm9_wykDtXonmu1ok9wY,121
174
174
  modal_version/__main__.py,sha256=2FO0yYQQwDTh6udt1h-cBnGd1c4ZyHnHSI4BksxzVac,105
175
- modal-1.0.3.dev8.dist-info/METADATA,sha256=koyzYQIvvwuxQ47sZW7NzzKQ5XBE7YaWmhejysY_3xg,2454
176
- modal-1.0.3.dev8.dist-info/WHEEL,sha256=1tXe9gY0PYatrMPMDd6jXqjfpz_B-Wqm32CPfRC58XU,91
177
- modal-1.0.3.dev8.dist-info/entry_points.txt,sha256=An-wYgeEUnm6xzrAP9_NTSTSciYvvEWsMZILtYrvpAI,46
178
- modal-1.0.3.dev8.dist-info/top_level.txt,sha256=4BWzoKYREKUZ5iyPzZpjqx4G8uB5TWxXPDwibLcVa7k,43
179
- modal-1.0.3.dev8.dist-info/RECORD,,
175
+ modal-1.0.3.dev11.dist-info/METADATA,sha256=7T4TyX6KkAkprzHb-YYziHh4WHb_6kQXZa0bNpgvEGM,2455
176
+ modal-1.0.3.dev11.dist-info/WHEEL,sha256=1tXe9gY0PYatrMPMDd6jXqjfpz_B-Wqm32CPfRC58XU,91
177
+ modal-1.0.3.dev11.dist-info/entry_points.txt,sha256=An-wYgeEUnm6xzrAP9_NTSTSciYvvEWsMZILtYrvpAI,46
178
+ modal-1.0.3.dev11.dist-info/top_level.txt,sha256=4BWzoKYREKUZ5iyPzZpjqx4G8uB5TWxXPDwibLcVa7k,43
179
+ modal-1.0.3.dev11.dist-info/RECORD,,
modal_version/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
1
  # Copyright Modal Labs 2025
2
2
  """Supplies the current version of the modal client library."""
3
3
 
4
- __version__ = "1.0.3.dev8"
4
+ __version__ = "1.0.3.dev11"