modal 1.0.2.dev7__py3-none-any.whl → 1.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of modal might be problematic. Click here for more details.
- modal/_functions.py +3 -2
- modal/_runtime/container_io_manager.py +8 -14
- modal/_runtime/gpu_memory_snapshot.py +158 -60
- modal/_utils/bytes_io_segment_payload.py +17 -3
- modal/cli/run.py +12 -0
- modal/cli/secret.py +43 -4
- modal/cli/volume.py +6 -1
- modal/client.pyi +2 -10
- modal/experimental/__init__.py +6 -4
- modal/image.py +38 -22
- modal/mount.py +128 -4
- modal/mount.pyi +22 -0
- modal/parallel_map.py +47 -23
- modal/runner.py +2 -7
- modal/sandbox.py +15 -3
- modal/sandbox.pyi +14 -6
- modal/schedule.py +17 -4
- modal/volume.py +17 -49
- modal/volume.pyi +11 -43
- {modal-1.0.2.dev7.dist-info → modal-1.0.3.dist-info}/METADATA +2 -2
- {modal-1.0.2.dev7.dist-info → modal-1.0.3.dist-info}/RECORD +29 -29
- modal_proto/api.proto +19 -4
- modal_proto/api_pb2.py +591 -585
- modal_proto/api_pb2.pyi +32 -6
- modal_version/__init__.py +1 -1
- {modal-1.0.2.dev7.dist-info → modal-1.0.3.dist-info}/WHEEL +0 -0
- {modal-1.0.2.dev7.dist-info → modal-1.0.3.dist-info}/entry_points.txt +0 -0
- {modal-1.0.2.dev7.dist-info → modal-1.0.3.dist-info}/licenses/LICENSE +0 -0
- {modal-1.0.2.dev7.dist-info → modal-1.0.3.dist-info}/top_level.txt +0 -0
modal/mount.py
CHANGED
@@ -21,7 +21,7 @@ from modal_version import __version__
|
|
21
21
|
|
22
22
|
from ._object import _get_environment_name, _Object
|
23
23
|
from ._resolver import Resolver
|
24
|
-
from ._utils.async_utils import aclosing, async_map, synchronize_api
|
24
|
+
from ._utils.async_utils import TaskContext, aclosing, async_map, synchronize_api
|
25
25
|
from ._utils.blob_utils import FileUploadSpec, blob_upload_file, get_file_upload_spec_from_path
|
26
26
|
from ._utils.deprecation import deprecation_warning
|
27
27
|
from ._utils.grpc_utils import retry_transient_errors
|
@@ -115,7 +115,8 @@ class _MountFile(_MountEntry):
|
|
115
115
|
def get_files_to_upload(self):
|
116
116
|
local_file = self.local_file.resolve()
|
117
117
|
if not local_file.exists():
|
118
|
-
|
118
|
+
msg = f"local file {local_file} does not exist"
|
119
|
+
raise FileNotFoundError(msg)
|
119
120
|
|
120
121
|
rel_filename = self.remote_path
|
121
122
|
yield local_file, rel_filename
|
@@ -144,10 +145,12 @@ class _MountDir(_MountEntry):
|
|
144
145
|
local_dir = self.local_dir.expanduser().absolute()
|
145
146
|
|
146
147
|
if not local_dir.exists():
|
147
|
-
|
148
|
+
msg = f"local dir {local_dir} does not exist"
|
149
|
+
raise FileNotFoundError(msg)
|
148
150
|
|
149
151
|
if not local_dir.is_dir():
|
150
|
-
|
152
|
+
msg = f"local dir {local_dir} is not a directory"
|
153
|
+
raise NotADirectoryError(msg)
|
151
154
|
|
152
155
|
if self.recursive:
|
153
156
|
gen = (os.path.join(root, name) for root, dirs, files in os.walk(local_dir) for name in files)
|
@@ -810,3 +813,124 @@ def _is_modal_path(remote_path: PurePosixPath):
|
|
810
813
|
if is_modal_path:
|
811
814
|
return True
|
812
815
|
return False
|
816
|
+
|
817
|
+
|
818
|
+
REMOTE_PACKAGES_PATH = "/__modal/deps"
|
819
|
+
REMOTE_SITECUSTOMIZE_PATH = "/pkg/sitecustomize.py"
|
820
|
+
|
821
|
+
SITECUSTOMIZE_CONTENT = f"""
|
822
|
+
# This file is automatically generated by Modal.
|
823
|
+
# It ensures that Modal's python dependencies are available in the Python PATH,
|
824
|
+
# while prioritizing user-installed packages.
|
825
|
+
import sys; sys.path.append('{REMOTE_PACKAGES_PATH}')
|
826
|
+
""".strip()
|
827
|
+
|
828
|
+
|
829
|
+
async def _create_single_mount(
|
830
|
+
client: _Client,
|
831
|
+
builder_version: str,
|
832
|
+
python_version: str,
|
833
|
+
platform: str,
|
834
|
+
arch: str,
|
835
|
+
uv_python_platform: str = None,
|
836
|
+
check_if_exists: bool = True,
|
837
|
+
):
|
838
|
+
import subprocess
|
839
|
+
import tempfile
|
840
|
+
|
841
|
+
profile_environment = config.get("environment")
|
842
|
+
abi_tag = "cp" + python_version.replace(".", "")
|
843
|
+
mount_name = f"{builder_version}-{abi_tag}-{platform}-{arch}"
|
844
|
+
uv_python_platform = uv_python_platform or f"{arch}-{platform}"
|
845
|
+
|
846
|
+
if check_if_exists:
|
847
|
+
try:
|
848
|
+
await Mount.from_name(mount_name, namespace=api_pb2.DEPLOYMENT_NAMESPACE_GLOBAL).hydrate.aio(client)
|
849
|
+
print(f"✅ Found existing mount {mount_name} in global namespace.")
|
850
|
+
return
|
851
|
+
except modal.exception.NotFoundError:
|
852
|
+
pass
|
853
|
+
|
854
|
+
with tempfile.TemporaryDirectory() as tmpd:
|
855
|
+
print(f"📦 Building {mount_name}.")
|
856
|
+
requirements = os.path.join(os.path.dirname(__file__), f"requirements/{builder_version}.txt")
|
857
|
+
subprocess.run(
|
858
|
+
[
|
859
|
+
"uv",
|
860
|
+
"pip",
|
861
|
+
"install",
|
862
|
+
"--strict",
|
863
|
+
"--no-deps",
|
864
|
+
"--no-cache",
|
865
|
+
"-r",
|
866
|
+
requirements,
|
867
|
+
"--compile-bytecode",
|
868
|
+
"--target",
|
869
|
+
tmpd,
|
870
|
+
"--python-platform",
|
871
|
+
uv_python_platform,
|
872
|
+
"--python-version",
|
873
|
+
python_version,
|
874
|
+
],
|
875
|
+
check=True,
|
876
|
+
capture_output=True,
|
877
|
+
)
|
878
|
+
|
879
|
+
print(f"🌐 Downloaded and unpacked packages to {tmpd}.")
|
880
|
+
|
881
|
+
python_mount = Mount._from_local_dir(tmpd, remote_path=REMOTE_PACKAGES_PATH)
|
882
|
+
|
883
|
+
with tempfile.NamedTemporaryFile() as sitecustomize:
|
884
|
+
sitecustomize.write(
|
885
|
+
SITECUSTOMIZE_CONTENT.encode("utf-8"),
|
886
|
+
)
|
887
|
+
sitecustomize.flush()
|
888
|
+
|
889
|
+
python_mount = python_mount.add_local_file(
|
890
|
+
sitecustomize.name,
|
891
|
+
remote_path=REMOTE_SITECUSTOMIZE_PATH,
|
892
|
+
)
|
893
|
+
|
894
|
+
await python_mount._deploy.aio(
|
895
|
+
mount_name,
|
896
|
+
api_pb2.DEPLOYMENT_NAMESPACE_GLOBAL,
|
897
|
+
environment_name=profile_environment,
|
898
|
+
client=client,
|
899
|
+
)
|
900
|
+
print(f"✅ Deployed mount {mount_name} to global namespace.")
|
901
|
+
|
902
|
+
|
903
|
+
async def _create_client_dependency_mounts(
|
904
|
+
client=None,
|
905
|
+
check_if_exists=True,
|
906
|
+
python_versions: list[str] = list(PYTHON_STANDALONE_VERSIONS),
|
907
|
+
):
|
908
|
+
coros = []
|
909
|
+
for python_version in python_versions:
|
910
|
+
# glibc >= 2.17
|
911
|
+
coros.append(
|
912
|
+
_create_single_mount(
|
913
|
+
client,
|
914
|
+
"PREVIEW",
|
915
|
+
python_version,
|
916
|
+
"manylinux_2_17",
|
917
|
+
"x86_64",
|
918
|
+
check_if_exists=check_if_exists,
|
919
|
+
)
|
920
|
+
)
|
921
|
+
# musl >= 1.2
|
922
|
+
coros.append(
|
923
|
+
_create_single_mount(
|
924
|
+
client,
|
925
|
+
"PREVIEW",
|
926
|
+
python_version,
|
927
|
+
"musllinux_1_2",
|
928
|
+
"x86_64",
|
929
|
+
uv_python_platform="x86_64-unknown-linux-musl",
|
930
|
+
check_if_exists=check_if_exists,
|
931
|
+
)
|
932
|
+
)
|
933
|
+
await TaskContext.gather(*coros)
|
934
|
+
|
935
|
+
|
936
|
+
create_client_dependency_mounts = synchronize_api(_create_client_dependency_mounts)
|
modal/mount.pyi
CHANGED
@@ -308,6 +308,28 @@ def _create_client_mount(): ...
|
|
308
308
|
def create_client_mount(): ...
|
309
309
|
def _get_client_mount(): ...
|
310
310
|
def _is_modal_path(remote_path: pathlib.PurePosixPath): ...
|
311
|
+
async def _create_single_mount(
|
312
|
+
client: modal.client._Client,
|
313
|
+
builder_version: str,
|
314
|
+
python_version: str,
|
315
|
+
platform: str,
|
316
|
+
arch: str,
|
317
|
+
uv_python_platform: str = None,
|
318
|
+
check_if_exists: bool = True,
|
319
|
+
): ...
|
320
|
+
async def _create_client_dependency_mounts(
|
321
|
+
client=None, check_if_exists=True, python_versions: list[str] = ["3.9", "3.10", "3.11", "3.12", "3.13"]
|
322
|
+
): ...
|
323
|
+
|
324
|
+
class __create_client_dependency_mounts_spec(typing_extensions.Protocol):
|
325
|
+
def __call__(
|
326
|
+
self, /, client=None, check_if_exists=True, python_versions: list[str] = ["3.9", "3.10", "3.11", "3.12", "3.13"]
|
327
|
+
): ...
|
328
|
+
async def aio(
|
329
|
+
self, /, client=None, check_if_exists=True, python_versions: list[str] = ["3.9", "3.10", "3.11", "3.12", "3.13"]
|
330
|
+
): ...
|
331
|
+
|
332
|
+
create_client_dependency_mounts: __create_client_dependency_mounts_spec
|
311
333
|
|
312
334
|
ROOT_DIR: pathlib.PurePosixPath
|
313
335
|
|
modal/parallel_map.py
CHANGED
@@ -3,6 +3,7 @@ import asyncio
|
|
3
3
|
import enum
|
4
4
|
import time
|
5
5
|
import typing
|
6
|
+
from asyncio import FIRST_COMPLETED
|
6
7
|
from dataclasses import dataclass
|
7
8
|
from typing import Any, Callable, Optional
|
8
9
|
|
@@ -110,6 +111,7 @@ async def _map_invocation(
|
|
110
111
|
max_inputs_outstanding = response.max_inputs_outstanding or MAX_INPUTS_OUTSTANDING_DEFAULT
|
111
112
|
|
112
113
|
have_all_inputs = False
|
114
|
+
map_done_event = asyncio.Event()
|
113
115
|
inputs_created = 0
|
114
116
|
inputs_sent = 0
|
115
117
|
inputs_retried = 0
|
@@ -122,10 +124,6 @@ async def _map_invocation(
|
|
122
124
|
stale_retry_duplicates = 0
|
123
125
|
no_context_duplicates = 0
|
124
126
|
|
125
|
-
def count_update():
|
126
|
-
if count_update_callback is not None:
|
127
|
-
count_update_callback(outputs_completed, inputs_created)
|
128
|
-
|
129
127
|
retry_queue = TimestampPriorityQueue()
|
130
128
|
completed_outputs: set[str] = set() # Set of input_ids whose outputs are complete (expecting no more values)
|
131
129
|
input_queue: asyncio.Queue[api_pb2.FunctionPutInputsItem | None] = asyncio.Queue()
|
@@ -134,9 +132,8 @@ async def _map_invocation(
|
|
134
132
|
)
|
135
133
|
|
136
134
|
async def create_input(argskwargs):
|
137
|
-
nonlocal inputs_created
|
138
135
|
idx = inputs_created
|
139
|
-
inputs_created
|
136
|
+
update_state(set_inputs_created=inputs_created + 1)
|
140
137
|
(args, kwargs) = argskwargs
|
141
138
|
return await _create_input(args, kwargs, client.stub, idx=idx, method_name=function._use_method_name)
|
142
139
|
|
@@ -147,9 +144,27 @@ async def _map_invocation(
|
|
147
144
|
break
|
148
145
|
yield raw_input # args, kwargs
|
149
146
|
|
150
|
-
|
151
|
-
nonlocal
|
147
|
+
def update_state(set_have_all_inputs=None, set_inputs_created=None, set_outputs_completed=None):
|
148
|
+
# This should be the only method that needs nonlocal of the following vars
|
149
|
+
nonlocal have_all_inputs, inputs_created, outputs_completed
|
150
|
+
assert set_have_all_inputs is not False # not allowed
|
151
|
+
assert set_inputs_created is None or set_inputs_created > inputs_created
|
152
|
+
assert set_outputs_completed is None or set_outputs_completed > outputs_completed
|
153
|
+
if set_have_all_inputs is not None:
|
154
|
+
have_all_inputs = set_have_all_inputs
|
155
|
+
if set_inputs_created is not None:
|
156
|
+
inputs_created = set_inputs_created
|
157
|
+
if set_outputs_completed is not None:
|
158
|
+
outputs_completed = set_outputs_completed
|
159
|
+
|
160
|
+
if count_update_callback is not None:
|
161
|
+
count_update_callback(outputs_completed, inputs_created)
|
162
|
+
|
163
|
+
if have_all_inputs and outputs_completed >= inputs_created:
|
164
|
+
# map is done
|
165
|
+
map_done_event.set()
|
152
166
|
|
167
|
+
async def drain_input_generator():
|
153
168
|
# Parallelize uploading blobs
|
154
169
|
async with aclosing(
|
155
170
|
async_map_ordered(input_iter(), create_input, concurrency=BLOB_MAX_PARALLELISM)
|
@@ -159,12 +174,12 @@ async def _map_invocation(
|
|
159
174
|
|
160
175
|
# close queue iterator
|
161
176
|
await input_queue.put(None)
|
162
|
-
|
177
|
+
update_state(set_have_all_inputs=True)
|
163
178
|
yield
|
164
179
|
|
165
180
|
async def pump_inputs():
|
166
181
|
assert client.stub
|
167
|
-
nonlocal
|
182
|
+
nonlocal inputs_sent
|
168
183
|
async for items in queue_batch_iterator(input_queue, max_batch_size=MAP_INVOCATION_CHUNK_SIZE):
|
169
184
|
# Add items to the manager. Their state will be SENDING.
|
170
185
|
await map_items_manager.add_items(items)
|
@@ -178,7 +193,6 @@ async def _map_invocation(
|
|
178
193
|
)
|
179
194
|
|
180
195
|
resp = await send_inputs(client.stub.FunctionPutInputs, request)
|
181
|
-
count_update()
|
182
196
|
inputs_sent += len(items)
|
183
197
|
# Change item state to WAITING_FOR_OUTPUT, and set the input_id and input_jwt which are in the response.
|
184
198
|
map_items_manager.handle_put_inputs_response(resp.inputs)
|
@@ -231,11 +245,8 @@ async def _map_invocation(
|
|
231
245
|
async def get_all_outputs():
|
232
246
|
assert client.stub
|
233
247
|
nonlocal \
|
234
|
-
inputs_created, \
|
235
248
|
successful_completions, \
|
236
249
|
failed_completions, \
|
237
|
-
outputs_completed, \
|
238
|
-
have_all_inputs, \
|
239
250
|
outputs_received, \
|
240
251
|
already_complete_duplicates, \
|
241
252
|
no_context_duplicates, \
|
@@ -244,7 +255,7 @@ async def _map_invocation(
|
|
244
255
|
|
245
256
|
last_entry_id = "0-0"
|
246
257
|
|
247
|
-
while not
|
258
|
+
while not map_done_event.is_set():
|
248
259
|
logger.debug(f"Requesting outputs. Have {outputs_completed} outputs, {inputs_created} inputs.")
|
249
260
|
# Get input_jwts of all items in the WAITING_FOR_OUTPUT state.
|
250
261
|
# The server uses these to track for lost inputs.
|
@@ -258,12 +269,26 @@ async def _map_invocation(
|
|
258
269
|
requested_at=time.time(),
|
259
270
|
input_jwts=input_jwts,
|
260
271
|
)
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
272
|
+
get_response_task = asyncio.create_task(
|
273
|
+
retry_transient_errors(
|
274
|
+
client.stub.FunctionGetOutputs,
|
275
|
+
request,
|
276
|
+
max_retries=20,
|
277
|
+
attempt_timeout=OUTPUTS_TIMEOUT + ATTEMPT_TIMEOUT_GRACE_PERIOD,
|
278
|
+
)
|
266
279
|
)
|
280
|
+
map_done_task = asyncio.create_task(map_done_event.wait())
|
281
|
+
done, pending = await asyncio.wait([get_response_task, map_done_task], return_when=FIRST_COMPLETED)
|
282
|
+
if get_response_task in done:
|
283
|
+
map_done_task.cancel()
|
284
|
+
response = get_response_task.result()
|
285
|
+
else:
|
286
|
+
assert map_done_event.is_set()
|
287
|
+
# map is done, cancel the pending call
|
288
|
+
get_response_task.cancel()
|
289
|
+
# not strictly necessary - don't leave dangling task
|
290
|
+
await asyncio.gather(get_response_task, return_exceptions=True)
|
291
|
+
return
|
267
292
|
|
268
293
|
last_entry_id = response.last_entry_id
|
269
294
|
now_seconds = int(time.time())
|
@@ -288,7 +313,7 @@ async def _map_invocation(
|
|
288
313
|
|
289
314
|
if output_type == _OutputType.SUCCESSFUL_COMPLETION or output_type == _OutputType.FAILED_COMPLETION:
|
290
315
|
completed_outputs.add(item.input_id)
|
291
|
-
outputs_completed
|
316
|
+
update_state(set_outputs_completed=outputs_completed + 1)
|
292
317
|
yield item
|
293
318
|
|
294
319
|
async def get_all_outputs_and_clean_up():
|
@@ -328,7 +353,6 @@ async def _map_invocation(
|
|
328
353
|
async_map_ordered(get_all_outputs_and_clean_up(), fetch_output, concurrency=BLOB_MAX_PARALLELISM)
|
329
354
|
) as streamer:
|
330
355
|
async for idx, output in streamer:
|
331
|
-
count_update()
|
332
356
|
if not order_outputs:
|
333
357
|
yield _OutputValue(output)
|
334
358
|
else:
|
@@ -401,7 +425,7 @@ async def _map_helper(
|
|
401
425
|
"""
|
402
426
|
|
403
427
|
raw_input_queue: Any = SynchronizedQueue() # type: ignore
|
404
|
-
raw_input_queue.init()
|
428
|
+
await raw_input_queue.init.aio()
|
405
429
|
|
406
430
|
async def feed_queue():
|
407
431
|
async with aclosing(async_input_gen) as streamer:
|
modal/runner.py
CHANGED
@@ -9,7 +9,6 @@ import dataclasses
|
|
9
9
|
import os
|
10
10
|
import time
|
11
11
|
import typing
|
12
|
-
import warnings
|
13
12
|
from collections.abc import AsyncGenerator
|
14
13
|
from multiprocessing.synchronize import Event
|
15
14
|
from typing import TYPE_CHECKING, Any, Optional, TypeVar
|
@@ -296,12 +295,8 @@ async def _run_app(
|
|
296
295
|
|
297
296
|
output_mgr = _get_output_manager()
|
298
297
|
if interactive and output_mgr is None:
|
299
|
-
|
300
|
-
|
301
|
-
"Use 'with modal.enable_output():' to enable interactive mode and see logs.",
|
302
|
-
stacklevel=2,
|
303
|
-
)
|
304
|
-
interactive = False
|
298
|
+
msg = "Interactive mode requires output to be enabled. (Use the the `modal.enable_output()` context manager.)"
|
299
|
+
raise InvalidError(msg)
|
305
300
|
|
306
301
|
running_app: RunningApp = await _init_local_app_new(
|
307
302
|
client,
|
modal/sandbox.py
CHANGED
@@ -100,6 +100,7 @@ class _Sandbox(_Object, type_prefix="sb"):
|
|
100
100
|
volumes: dict[Union[str, os.PathLike], Union[_Volume, _CloudBucketMount]] = {},
|
101
101
|
pty_info: Optional[api_pb2.PTYInfo] = None,
|
102
102
|
encrypted_ports: Sequence[int] = [],
|
103
|
+
h2_ports: Sequence[int] = [],
|
103
104
|
unencrypted_ports: Sequence[int] = [],
|
104
105
|
proxy: Optional[_Proxy] = None,
|
105
106
|
_experimental_scheduler_placement: Optional[SchedulerPlacement] = None,
|
@@ -155,6 +156,12 @@ class _Sandbox(_Object, type_prefix="sb"):
|
|
155
156
|
|
156
157
|
open_ports = [api_pb2.PortSpec(port=port, unencrypted=False) for port in encrypted_ports]
|
157
158
|
open_ports.extend([api_pb2.PortSpec(port=port, unencrypted=True) for port in unencrypted_ports])
|
159
|
+
open_ports.extend(
|
160
|
+
[
|
161
|
+
api_pb2.PortSpec(port=port, unencrypted=False, tunnel_type=api_pb2.TUNNEL_TYPE_H2)
|
162
|
+
for port in h2_ports
|
163
|
+
]
|
164
|
+
)
|
158
165
|
|
159
166
|
if block_network:
|
160
167
|
# If the network is blocked, cidr_allowlist is invalid as we don't allow any network access.
|
@@ -240,6 +247,8 @@ class _Sandbox(_Object, type_prefix="sb"):
|
|
240
247
|
pty_info: Optional[api_pb2.PTYInfo] = None,
|
241
248
|
# List of ports to tunnel into the sandbox. Encrypted ports are tunneled with TLS.
|
242
249
|
encrypted_ports: Sequence[int] = [],
|
250
|
+
# List of encrypted ports to tunnel into the sandbox, using HTTP/2.
|
251
|
+
h2_ports: Sequence[int] = [],
|
243
252
|
# List of ports to tunnel into the sandbox without encryption.
|
244
253
|
unencrypted_ports: Sequence[int] = [],
|
245
254
|
# Reference to a Modal Proxy to use in front of this Sandbox.
|
@@ -283,6 +292,7 @@ class _Sandbox(_Object, type_prefix="sb"):
|
|
283
292
|
volumes=volumes,
|
284
293
|
pty_info=pty_info,
|
285
294
|
encrypted_ports=encrypted_ports,
|
295
|
+
h2_ports=h2_ports,
|
286
296
|
unencrypted_ports=unencrypted_ports,
|
287
297
|
proxy=proxy,
|
288
298
|
_experimental_enable_snapshot=_experimental_enable_snapshot,
|
@@ -320,6 +330,8 @@ class _Sandbox(_Object, type_prefix="sb"):
|
|
320
330
|
pty_info: Optional[api_pb2.PTYInfo] = None,
|
321
331
|
# List of ports to tunnel into the sandbox. Encrypted ports are tunneled with TLS.
|
322
332
|
encrypted_ports: Sequence[int] = [],
|
333
|
+
# List of encrypted ports to tunnel into the sandbox, using HTTP/2.
|
334
|
+
h2_ports: Sequence[int] = [],
|
323
335
|
# List of ports to tunnel into the sandbox without encryption.
|
324
336
|
unencrypted_ports: Sequence[int] = [],
|
325
337
|
# Reference to a Modal Proxy to use in front of this Sandbox.
|
@@ -359,6 +371,7 @@ class _Sandbox(_Object, type_prefix="sb"):
|
|
359
371
|
volumes=volumes,
|
360
372
|
pty_info=pty_info,
|
361
373
|
encrypted_ports=encrypted_ports,
|
374
|
+
h2_ports=h2_ports,
|
362
375
|
unencrypted_ports=unencrypted_ports,
|
363
376
|
proxy=proxy,
|
364
377
|
_experimental_scheduler_placement=_experimental_scheduler_placement,
|
@@ -517,7 +530,7 @@ class _Sandbox(_Object, type_prefix="sb"):
|
|
517
530
|
|
518
531
|
return self._tunnels
|
519
532
|
|
520
|
-
async def terminate(self):
|
533
|
+
async def terminate(self) -> None:
|
521
534
|
"""Terminate Sandbox execution.
|
522
535
|
|
523
536
|
This is a no-op if the Sandbox has already finished running."""
|
@@ -525,7 +538,6 @@ class _Sandbox(_Object, type_prefix="sb"):
|
|
525
538
|
await retry_transient_errors(
|
526
539
|
self._client.stub.SandboxTerminate, api_pb2.SandboxTerminateRequest(sandbox_id=self.object_id)
|
527
540
|
)
|
528
|
-
await self.wait(raise_on_termination=False)
|
529
541
|
|
530
542
|
async def poll(self) -> Optional[int]:
|
531
543
|
"""Check if the Sandbox has finished running.
|
@@ -541,7 +553,7 @@ class _Sandbox(_Object, type_prefix="sb"):
|
|
541
553
|
|
542
554
|
return self.returncode
|
543
555
|
|
544
|
-
async def _get_task_id(self):
|
556
|
+
async def _get_task_id(self) -> str:
|
545
557
|
while not self._task_id:
|
546
558
|
resp = await self._client.stub.SandboxGetTaskId(api_pb2.SandboxGetTaskIdRequest(sandbox_id=self.object_id))
|
547
559
|
self._task_id = resp.task_id
|
modal/sandbox.pyi
CHANGED
@@ -58,6 +58,7 @@ class _Sandbox(modal._object._Object):
|
|
58
58
|
] = {},
|
59
59
|
pty_info: typing.Optional[modal_proto.api_pb2.PTYInfo] = None,
|
60
60
|
encrypted_ports: collections.abc.Sequence[int] = [],
|
61
|
+
h2_ports: collections.abc.Sequence[int] = [],
|
61
62
|
unencrypted_ports: collections.abc.Sequence[int] = [],
|
62
63
|
proxy: typing.Optional[modal.proxy._Proxy] = None,
|
63
64
|
_experimental_scheduler_placement: typing.Optional[modal.scheduler_placement.SchedulerPlacement] = None,
|
@@ -86,6 +87,7 @@ class _Sandbox(modal._object._Object):
|
|
86
87
|
] = {},
|
87
88
|
pty_info: typing.Optional[modal_proto.api_pb2.PTYInfo] = None,
|
88
89
|
encrypted_ports: collections.abc.Sequence[int] = [],
|
90
|
+
h2_ports: collections.abc.Sequence[int] = [],
|
89
91
|
unencrypted_ports: collections.abc.Sequence[int] = [],
|
90
92
|
proxy: typing.Optional[modal.proxy._Proxy] = None,
|
91
93
|
_experimental_enable_snapshot: bool = False,
|
@@ -116,6 +118,7 @@ class _Sandbox(modal._object._Object):
|
|
116
118
|
] = {},
|
117
119
|
pty_info: typing.Optional[modal_proto.api_pb2.PTYInfo] = None,
|
118
120
|
encrypted_ports: collections.abc.Sequence[int] = [],
|
121
|
+
h2_ports: collections.abc.Sequence[int] = [],
|
119
122
|
unencrypted_ports: collections.abc.Sequence[int] = [],
|
120
123
|
proxy: typing.Optional[modal.proxy._Proxy] = None,
|
121
124
|
_experimental_enable_snapshot: bool = False,
|
@@ -129,9 +132,9 @@ class _Sandbox(modal._object._Object):
|
|
129
132
|
async def snapshot_filesystem(self, timeout: int = 55) -> modal.image._Image: ...
|
130
133
|
async def wait(self, raise_on_termination: bool = True): ...
|
131
134
|
async def tunnels(self, timeout: int = 50) -> dict[int, modal._tunnel.Tunnel]: ...
|
132
|
-
async def terminate(self): ...
|
135
|
+
async def terminate(self) -> None: ...
|
133
136
|
async def poll(self) -> typing.Optional[int]: ...
|
134
|
-
async def _get_task_id(self): ...
|
137
|
+
async def _get_task_id(self) -> str: ...
|
135
138
|
@typing.overload
|
136
139
|
async def exec(
|
137
140
|
self,
|
@@ -228,6 +231,7 @@ class Sandbox(modal.object.Object):
|
|
228
231
|
] = {},
|
229
232
|
pty_info: typing.Optional[modal_proto.api_pb2.PTYInfo] = None,
|
230
233
|
encrypted_ports: collections.abc.Sequence[int] = [],
|
234
|
+
h2_ports: collections.abc.Sequence[int] = [],
|
231
235
|
unencrypted_ports: collections.abc.Sequence[int] = [],
|
232
236
|
proxy: typing.Optional[modal.proxy.Proxy] = None,
|
233
237
|
_experimental_scheduler_placement: typing.Optional[modal.scheduler_placement.SchedulerPlacement] = None,
|
@@ -261,6 +265,7 @@ class Sandbox(modal.object.Object):
|
|
261
265
|
] = {},
|
262
266
|
pty_info: typing.Optional[modal_proto.api_pb2.PTYInfo] = None,
|
263
267
|
encrypted_ports: collections.abc.Sequence[int] = [],
|
268
|
+
h2_ports: collections.abc.Sequence[int] = [],
|
264
269
|
unencrypted_ports: collections.abc.Sequence[int] = [],
|
265
270
|
proxy: typing.Optional[modal.proxy.Proxy] = None,
|
266
271
|
_experimental_enable_snapshot: bool = False,
|
@@ -293,6 +298,7 @@ class Sandbox(modal.object.Object):
|
|
293
298
|
] = {},
|
294
299
|
pty_info: typing.Optional[modal_proto.api_pb2.PTYInfo] = None,
|
295
300
|
encrypted_ports: collections.abc.Sequence[int] = [],
|
301
|
+
h2_ports: collections.abc.Sequence[int] = [],
|
296
302
|
unencrypted_ports: collections.abc.Sequence[int] = [],
|
297
303
|
proxy: typing.Optional[modal.proxy.Proxy] = None,
|
298
304
|
_experimental_enable_snapshot: bool = False,
|
@@ -330,6 +336,7 @@ class Sandbox(modal.object.Object):
|
|
330
336
|
] = {},
|
331
337
|
pty_info: typing.Optional[modal_proto.api_pb2.PTYInfo] = None,
|
332
338
|
encrypted_ports: collections.abc.Sequence[int] = [],
|
339
|
+
h2_ports: collections.abc.Sequence[int] = [],
|
333
340
|
unencrypted_ports: collections.abc.Sequence[int] = [],
|
334
341
|
proxy: typing.Optional[modal.proxy.Proxy] = None,
|
335
342
|
_experimental_enable_snapshot: bool = False,
|
@@ -363,6 +370,7 @@ class Sandbox(modal.object.Object):
|
|
363
370
|
] = {},
|
364
371
|
pty_info: typing.Optional[modal_proto.api_pb2.PTYInfo] = None,
|
365
372
|
encrypted_ports: collections.abc.Sequence[int] = [],
|
373
|
+
h2_ports: collections.abc.Sequence[int] = [],
|
366
374
|
unencrypted_ports: collections.abc.Sequence[int] = [],
|
367
375
|
proxy: typing.Optional[modal.proxy.Proxy] = None,
|
368
376
|
_experimental_enable_snapshot: bool = False,
|
@@ -405,8 +413,8 @@ class Sandbox(modal.object.Object):
|
|
405
413
|
tunnels: __tunnels_spec[typing_extensions.Self]
|
406
414
|
|
407
415
|
class __terminate_spec(typing_extensions.Protocol[SUPERSELF]):
|
408
|
-
def __call__(self, /): ...
|
409
|
-
async def aio(self, /): ...
|
416
|
+
def __call__(self, /) -> None: ...
|
417
|
+
async def aio(self, /) -> None: ...
|
410
418
|
|
411
419
|
terminate: __terminate_spec[typing_extensions.Self]
|
412
420
|
|
@@ -417,8 +425,8 @@ class Sandbox(modal.object.Object):
|
|
417
425
|
poll: __poll_spec[typing_extensions.Self]
|
418
426
|
|
419
427
|
class ___get_task_id_spec(typing_extensions.Protocol[SUPERSELF]):
|
420
|
-
def __call__(self, /): ...
|
421
|
-
async def aio(self, /): ...
|
428
|
+
def __call__(self, /) -> str: ...
|
429
|
+
async def aio(self, /) -> str: ...
|
422
430
|
|
423
431
|
_get_task_id: ___get_task_id_spec[typing_extensions.Self]
|
424
432
|
|
modal/schedule.py
CHANGED
@@ -30,15 +30,28 @@ class Cron(Schedule):
|
|
30
30
|
We can specify different schedules with cron strings, for example:
|
31
31
|
|
32
32
|
```python
|
33
|
-
modal.Cron("5 4 * * *") # run at 4:05am every night
|
34
|
-
modal.Cron("0 9 * * 4") # runs every Thursday 9am
|
33
|
+
modal.Cron("5 4 * * *") # run at 4:05am UTC every night
|
34
|
+
modal.Cron("0 9 * * 4") # runs every Thursday at 9am UTC
|
35
35
|
```
|
36
36
|
|
37
|
+
We can also optionally specify a timezone, for example:
|
38
|
+
|
39
|
+
```python
|
40
|
+
# Run daily at 6am New York time, regardless of whether daylight saving
|
41
|
+
# is in effect (i.e. at 11am UTC in the winter, and 10am UTC in the summer):
|
42
|
+
modal.Cron("0 6 * * *", timezone="America/New_York")
|
43
|
+
```
|
44
|
+
|
45
|
+
If no timezone is specified, the default is UTC.
|
37
46
|
"""
|
38
47
|
|
39
|
-
def __init__(
|
48
|
+
def __init__(
|
49
|
+
self,
|
50
|
+
cron_string: str,
|
51
|
+
timezone: str = "UTC",
|
52
|
+
) -> None:
|
40
53
|
"""Construct a schedule that runs according to a cron expression string."""
|
41
|
-
cron = api_pb2.Schedule.Cron(cron_string=cron_string)
|
54
|
+
cron = api_pb2.Schedule.Cron(cron_string=cron_string, timezone=timezone)
|
42
55
|
super().__init__(api_pb2.Schedule(cron=cron))
|
43
56
|
|
44
57
|
|