modal 0.74.15__py3-none-any.whl → 0.74.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,15 +1,25 @@
1
1
  # Copyright Modal Labs 2022
2
2
  import asyncio
3
3
  import dataclasses
4
+ import functools
4
5
  import hashlib
5
- import io
6
6
  import os
7
7
  import platform
8
8
  import time
9
9
  from collections.abc import AsyncIterator
10
10
  from contextlib import AbstractContextManager, contextmanager
11
+ from io import BytesIO, FileIO
11
12
  from pathlib import Path, PurePosixPath
12
- from typing import TYPE_CHECKING, Any, BinaryIO, Callable, Optional, Union
13
+ from typing import (
14
+ TYPE_CHECKING,
15
+ Any,
16
+ BinaryIO,
17
+ Callable,
18
+ ContextManager,
19
+ Optional,
20
+ Union,
21
+ cast,
22
+ )
13
23
  from urllib.parse import urlparse
14
24
 
15
25
  from modal_proto import api_pb2
@@ -43,6 +53,9 @@ DEFAULT_SEGMENT_CHUNK_SIZE = 2**24
43
53
  # TODO(dano): remove this once we stop requiring md5 for blobs
44
54
  MULTIPART_UPLOAD_THRESHOLD = 1024**3
45
55
 
56
+ # For block based storage like volumefs2: the size of a block
57
+ BLOCK_SIZE: int = 8 * 1024 * 1024
58
+
46
59
 
47
60
  @retry(n_attempts=5, base_delay=0.5, timeout=None)
48
61
  async def _upload_to_s3_url(
@@ -94,7 +107,7 @@ async def _upload_to_s3_url(
94
107
 
95
108
 
96
109
  async def perform_multipart_upload(
97
- data_file: Union[BinaryIO, io.BytesIO, io.FileIO],
110
+ data_file: Union[BinaryIO, BytesIO, FileIO],
98
111
  *,
99
112
  content_length: int,
100
113
  max_part_size: int,
@@ -112,9 +125,9 @@ async def perform_multipart_upload(
112
125
  # Give each part its own IO reader object to avoid needing to
113
126
  # lock access to the reader's position pointer.
114
127
  data_file_readers: list[BinaryIO]
115
- if isinstance(data_file, io.BytesIO):
128
+ if isinstance(data_file, BytesIO):
116
129
  view = data_file.getbuffer() # does not copy data
117
- data_file_readers = [io.BytesIO(view) for _ in range(len(part_urls))]
130
+ data_file_readers = [BytesIO(view) for _ in range(len(part_urls))]
118
131
  else:
119
132
  filename = data_file.name
120
133
  data_file_readers = [open(filename, "rb") for _ in range(len(part_urls))]
@@ -174,7 +187,7 @@ async def _blob_upload(
174
187
  upload_hashes: UploadHashes, data: Union[bytes, BinaryIO], stub, progress_report_cb: Optional[Callable] = None
175
188
  ) -> str:
176
189
  if isinstance(data, bytes):
177
- data = io.BytesIO(data)
190
+ data = BytesIO(data)
178
191
 
179
192
  content_length = get_content_length(data)
180
193
 
@@ -368,6 +381,125 @@ def get_file_upload_spec_from_fileobj(fp: BinaryIO, mount_filename: PurePosixPat
368
381
  mode,
369
382
  )
370
383
 
384
+ _FileUploadSource2 = Callable[[], ContextManager[BinaryIO]]
385
+
386
+ @dataclasses.dataclass
387
+ class FileUploadSpec2:
388
+ source: _FileUploadSource2
389
+ source_description: Union[str, Path]
390
+
391
+ path: str
392
+ # Raw (unencoded 32 byte) SHA256 sum per 8MiB file block
393
+ blocks_sha256: list[bytes]
394
+ mode: int # file permission bits (last 12 bits of st_mode)
395
+ size: int
396
+
397
+
398
+ @staticmethod
399
+ async def from_path(
400
+ filename: Path,
401
+ mount_filename: PurePosixPath,
402
+ mode: Optional[int] = None,
403
+ ) -> "FileUploadSpec2":
404
+ # Python appears to give files 0o666 bits on Windows (equal for user, group, and global),
405
+ # so we mask those out to 0o755 for compatibility with POSIX-based permissions.
406
+ mode = mode or os.stat(filename).st_mode & (0o7777 if platform.system() != "Windows" else 0o7755)
407
+
408
+ def source():
409
+ return open(filename, "rb")
410
+
411
+ return await FileUploadSpec2._create(
412
+ source,
413
+ filename,
414
+ mount_filename,
415
+ mode,
416
+ )
417
+
418
+
419
+ @staticmethod
420
+ async def from_fileobj(
421
+ source_fp: Union[BinaryIO, BytesIO],
422
+ mount_filename: PurePosixPath,
423
+ mode: int
424
+ ) -> "FileUploadSpec2":
425
+ try:
426
+ fileno = source_fp.fileno()
427
+ def source():
428
+ new_fd = os.dup(fileno)
429
+ fp = os.fdopen(new_fd, "rb")
430
+ fp.seek(0)
431
+ return fp
432
+
433
+ except OSError:
434
+ # `.fileno()` not available; assume BytesIO-like type
435
+ source_fp = cast(BytesIO, source_fp)
436
+ buffer = source_fp.getbuffer()
437
+ def source():
438
+ return BytesIO(buffer)
439
+
440
+ return await FileUploadSpec2._create(
441
+ source,
442
+ str(source),
443
+ mount_filename,
444
+ mode,
445
+ )
446
+
447
+
448
+ @staticmethod
449
+ async def _create(
450
+ source: _FileUploadSource2,
451
+ source_description: Union[str, Path],
452
+ mount_filename: PurePosixPath,
453
+ mode: int,
454
+ ) -> "FileUploadSpec2":
455
+ # Current position is ignored - we always upload from position 0
456
+ with source() as source_fp:
457
+ source_fp.seek(0, os.SEEK_END)
458
+ size = source_fp.tell()
459
+
460
+ blocks_sha256 = await hash_blocks_sha256(source, size)
461
+
462
+ return FileUploadSpec2(
463
+ source=source,
464
+ source_description=source_description,
465
+ path=mount_filename.as_posix(),
466
+ blocks_sha256=blocks_sha256,
467
+ mode=mode & 0o7777,
468
+ size=size,
469
+ )
470
+
471
+
472
+ async def hash_blocks_sha256(
473
+ source: _FileUploadSource2,
474
+ size: int,
475
+ ) -> list[bytes]:
476
+ def ceildiv(a: int, b: int) -> int:
477
+ return -(a // -b)
478
+
479
+ num_blocks = ceildiv(size, BLOCK_SIZE)
480
+
481
+ def hash_block_sha256(block_idx: int) -> bytes:
482
+ sha256_hash = hashlib.sha256()
483
+ block_start = block_idx * BLOCK_SIZE
484
+
485
+ with source() as block_fp:
486
+ block_fp.seek(block_start)
487
+
488
+ num_bytes_read = 0
489
+ while num_bytes_read < BLOCK_SIZE:
490
+ chunk = block_fp.read(BLOCK_SIZE - num_bytes_read)
491
+
492
+ if not chunk:
493
+ break
494
+
495
+ num_bytes_read += len(chunk)
496
+ sha256_hash.update(chunk)
497
+
498
+ return sha256_hash.digest()
499
+
500
+ tasks = (asyncio.to_thread(functools.partial(hash_block_sha256, idx)) for idx in range(num_blocks))
501
+ return await asyncio.gather(*tasks)
502
+
371
503
 
372
504
  def use_md5(url: str) -> bool:
373
505
  """This takes an upload URL in S3 and returns whether we should attach a checksum.
modal/cli/volume.py CHANGED
@@ -19,7 +19,7 @@ from modal.cli._download import _volume_download
19
19
  from modal.cli.utils import ENV_OPTION, YES_OPTION, display_table, timestamp_to_local
20
20
  from modal.client import _Client
21
21
  from modal.environments import ensure_env
22
- from modal.volume import _Volume, _VolumeUploadContextManager
22
+ from modal.volume import _AbstractVolumeUploadContextManager, _Volume
23
23
  from modal_proto import api_pb2
24
24
 
25
25
  volume_cli = Typer(
@@ -198,8 +198,12 @@ async def put(
198
198
  if Path(local_path).is_dir():
199
199
  with progress_handler.live:
200
200
  try:
201
- async with _VolumeUploadContextManager(
202
- vol.object_id, vol._client, progress_cb=progress_handler.progress, force=force
201
+ async with _AbstractVolumeUploadContextManager.resolve(
202
+ vol._metadata.version,
203
+ vol.object_id,
204
+ vol._client,
205
+ progress_cb=progress_handler.progress,
206
+ force=force
203
207
  ) as batch:
204
208
  batch.put_directory(local_path, remote_path)
205
209
  except FileExistsError as exc:
@@ -210,8 +214,12 @@ async def put(
210
214
  else:
211
215
  with progress_handler.live:
212
216
  try:
213
- async with _VolumeUploadContextManager(
214
- vol.object_id, vol._client, progress_cb=progress_handler.progress, force=force
217
+ async with _AbstractVolumeUploadContextManager.resolve(
218
+ vol._metadata.version,
219
+ vol.object_id,
220
+ vol._client,
221
+ progress_cb=progress_handler.progress,
222
+ force=force
215
223
  ) as batch:
216
224
  batch.put_file(local_path, remote_path)
217
225
 
modal/client.pyi CHANGED
@@ -27,7 +27,7 @@ class _Client:
27
27
  _snapshotted: bool
28
28
 
29
29
  def __init__(
30
- self, server_url: str, client_type: int, credentials: typing.Optional[tuple[str, str]], version: str = "0.74.15"
30
+ self, server_url: str, client_type: int, credentials: typing.Optional[tuple[str, str]], version: str = "0.74.16"
31
31
  ): ...
32
32
  def is_closed(self) -> bool: ...
33
33
  @property
@@ -85,7 +85,7 @@ class Client:
85
85
  _snapshotted: bool
86
86
 
87
87
  def __init__(
88
- self, server_url: str, client_type: int, credentials: typing.Optional[tuple[str, str]], version: str = "0.74.15"
88
+ self, server_url: str, client_type: int, credentials: typing.Optional[tuple[str, str]], version: str = "0.74.16"
89
89
  ): ...
90
90
  def is_closed(self) -> bool: ...
91
91
  @property
modal/functions.pyi CHANGED
@@ -201,11 +201,11 @@ class Function(
201
201
 
202
202
  _call_generator_nowait: ___call_generator_nowait_spec[typing_extensions.Self]
203
203
 
204
- class __remote_spec(typing_extensions.Protocol[P_INNER, ReturnType_INNER, SUPERSELF]):
204
+ class __remote_spec(typing_extensions.Protocol[ReturnType_INNER, P_INNER, SUPERSELF]):
205
205
  def __call__(self, *args: P_INNER.args, **kwargs: P_INNER.kwargs) -> ReturnType_INNER: ...
206
206
  async def aio(self, *args: P_INNER.args, **kwargs: P_INNER.kwargs) -> ReturnType_INNER: ...
207
207
 
208
- remote: __remote_spec[modal._functions.P, modal._functions.ReturnType, typing_extensions.Self]
208
+ remote: __remote_spec[modal._functions.ReturnType, modal._functions.P, typing_extensions.Self]
209
209
 
210
210
  class __remote_gen_spec(typing_extensions.Protocol[SUPERSELF]):
211
211
  def __call__(self, *args, **kwargs) -> typing.Generator[typing.Any, None, None]: ...
@@ -220,19 +220,19 @@ class Function(
220
220
  self, *args: modal._functions.P.args, **kwargs: modal._functions.P.kwargs
221
221
  ) -> modal._functions.OriginalReturnType: ...
222
222
 
223
- class ___experimental_spawn_spec(typing_extensions.Protocol[P_INNER, ReturnType_INNER, SUPERSELF]):
223
+ class ___experimental_spawn_spec(typing_extensions.Protocol[ReturnType_INNER, P_INNER, SUPERSELF]):
224
224
  def __call__(self, *args: P_INNER.args, **kwargs: P_INNER.kwargs) -> FunctionCall[ReturnType_INNER]: ...
225
225
  async def aio(self, *args: P_INNER.args, **kwargs: P_INNER.kwargs) -> FunctionCall[ReturnType_INNER]: ...
226
226
 
227
227
  _experimental_spawn: ___experimental_spawn_spec[
228
- modal._functions.P, modal._functions.ReturnType, typing_extensions.Self
228
+ modal._functions.ReturnType, modal._functions.P, typing_extensions.Self
229
229
  ]
230
230
 
231
- class __spawn_spec(typing_extensions.Protocol[P_INNER, ReturnType_INNER, SUPERSELF]):
231
+ class __spawn_spec(typing_extensions.Protocol[ReturnType_INNER, P_INNER, SUPERSELF]):
232
232
  def __call__(self, *args: P_INNER.args, **kwargs: P_INNER.kwargs) -> FunctionCall[ReturnType_INNER]: ...
233
233
  async def aio(self, *args: P_INNER.args, **kwargs: P_INNER.kwargs) -> FunctionCall[ReturnType_INNER]: ...
234
234
 
235
- spawn: __spawn_spec[modal._functions.P, modal._functions.ReturnType, typing_extensions.Self]
235
+ spawn: __spawn_spec[modal._functions.ReturnType, modal._functions.P, typing_extensions.Self]
236
236
 
237
237
  def get_raw_f(self) -> collections.abc.Callable[..., typing.Any]: ...
238
238
 
modal/volume.py CHANGED
@@ -10,16 +10,19 @@ import time
10
10
  import typing
11
11
  from collections.abc import AsyncGenerator, AsyncIterator, Generator, Sequence
12
12
  from dataclasses import dataclass
13
+ from io import BytesIO
13
14
  from pathlib import Path, PurePosixPath
14
15
  from typing import (
15
16
  IO,
16
17
  Any,
18
+ Awaitable,
17
19
  BinaryIO,
18
20
  Callable,
19
21
  Optional,
20
22
  Union,
21
23
  )
22
24
 
25
+ from google.protobuf.message import Message
23
26
  from grpclib import GRPCError, Status
24
27
  from synchronicity.async_wrap import asynccontextmanager
25
28
 
@@ -31,7 +34,9 @@ from ._object import EPHEMERAL_OBJECT_HEARTBEAT_SLEEP, _get_environment_name, _O
31
34
  from ._resolver import Resolver
32
35
  from ._utils.async_utils import TaskContext, aclosing, async_map, asyncnullcontext, synchronize_api
33
36
  from ._utils.blob_utils import (
37
+ BLOCK_SIZE,
34
38
  FileUploadSpec,
39
+ FileUploadSpec2,
35
40
  blob_iter,
36
41
  blob_upload_file,
37
42
  get_file_upload_spec_from_fileobj,
@@ -39,6 +44,7 @@ from ._utils.blob_utils import (
39
44
  )
40
45
  from ._utils.deprecation import deprecation_error, deprecation_warning, renamed_parameter
41
46
  from ._utils.grpc_utils import retry_transient_errors
47
+ from ._utils.http_utils import ClientSessionRegistry
42
48
  from ._utils.name_utils import check_object_name
43
49
  from .client import _Client
44
50
  from .config import logger
@@ -119,6 +125,7 @@ class _Volume(_Object, type_prefix="vo"):
119
125
  """
120
126
 
121
127
  _lock: Optional[asyncio.Lock] = None
128
+ _metadata: "typing.Optional[api_pb2.VolumeMetadata]"
122
129
 
123
130
  async def _get_lock(self):
124
131
  # To (mostly*) prevent multiple concurrent operations on the same volume, which can cause problems under
@@ -171,10 +178,21 @@ class _Volume(_Object, type_prefix="vo"):
171
178
  version=version,
172
179
  )
173
180
  response = await resolver.client.stub.VolumeGetOrCreate(req)
174
- self._hydrate(response.volume_id, resolver.client, None)
181
+ self._hydrate(response.volume_id, resolver.client, response.metadata)
175
182
 
176
183
  return _Volume._from_loader(_load, "Volume()", hydrate_lazily=True)
177
184
 
185
+ def _hydrate_metadata(self, metadata: Optional[Message]):
186
+ if metadata and isinstance(metadata, api_pb2.VolumeMetadata):
187
+ self._metadata = metadata
188
+ else:
189
+ raise TypeError(
190
+ "_hydrate_metadata() requires an `api_pb2.VolumeMetadata` to determine volume version"
191
+ )
192
+
193
+ def _get_metadata(self) -> Optional[Message]:
194
+ return self._metadata
195
+
178
196
  @classmethod
179
197
  @asynccontextmanager
180
198
  async def ephemeral(
@@ -209,7 +227,7 @@ class _Volume(_Object, type_prefix="vo"):
209
227
  async with TaskContext() as tc:
210
228
  request = api_pb2.VolumeHeartbeatRequest(volume_id=response.volume_id)
211
229
  tc.infinite_loop(lambda: client.stub.VolumeHeartbeat(request), sleep=_heartbeat_sleep)
212
- yield cls._new_hydrated(response.volume_id, client, None, is_another_app=True)
230
+ yield cls._new_hydrated(response.volume_id, client, response.metadata, is_another_app=True)
213
231
 
214
232
  @staticmethod
215
233
  @renamed_parameter((2024, 12, 18), "label", "name")
@@ -481,7 +499,7 @@ class _Volume(_Object, type_prefix="vo"):
481
499
  await retry_transient_errors(self._client.stub.VolumeCopyFiles, request, base_delay=1)
482
500
 
483
501
  @live_method
484
- async def batch_upload(self, force: bool = False) -> "_VolumeUploadContextManager":
502
+ async def batch_upload(self, force: bool = False) -> "_AbstractVolumeUploadContextManager":
485
503
  """
486
504
  Initiate a batched upload to a volume.
487
505
 
@@ -499,7 +517,13 @@ class _Volume(_Object, type_prefix="vo"):
499
517
  batch.put_file(io.BytesIO(b"some data"), "/foobar")
500
518
  ```
501
519
  """
502
- return _VolumeUploadContextManager(self.object_id, self._client, force=force)
520
+ return _AbstractVolumeUploadContextManager.resolve(
521
+ self._metadata.version,
522
+ self.object_id,
523
+ self._client,
524
+ force=force
525
+ )
526
+
503
527
 
504
528
  @live_method
505
529
  async def _instance_delete(self):
@@ -527,7 +551,57 @@ class _Volume(_Object, type_prefix="vo"):
527
551
  await retry_transient_errors(obj._client.stub.VolumeRename, req)
528
552
 
529
553
 
530
- class _VolumeUploadContextManager:
554
+ Volume = synchronize_api(_Volume)
555
+
556
+ # TODO(dflemstr): Find a way to add ABC or AbstractAsyncContextManager superclasses while keeping synchronicity happy.
557
+ class _AbstractVolumeUploadContextManager:
558
+ async def __aenter__(self):
559
+ ...
560
+
561
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
562
+ ...
563
+
564
+
565
+ def put_file(
566
+ self,
567
+ local_file: Union[Path, str, BinaryIO, BytesIO],
568
+ remote_path: Union[PurePosixPath, str],
569
+ mode: Optional[int] = None,
570
+ ):
571
+ ...
572
+
573
+ def put_directory(
574
+ self,
575
+ local_path: Union[Path, str],
576
+ remote_path: Union[PurePosixPath, str],
577
+ recursive: bool = True,
578
+ ):
579
+ ...
580
+
581
+ @staticmethod
582
+ def resolve(
583
+ version: "modal_proto.api_pb2.VolumeFsVersion.ValueType",
584
+ object_id: str,
585
+ client,
586
+ progress_cb: Optional[Callable[..., Any]] = None,
587
+ force: bool = False
588
+ ) -> "_AbstractVolumeUploadContextManager":
589
+
590
+ if version in [
591
+ None,
592
+ api_pb2.VolumeFsVersion.VOLUME_FS_VERSION_UNSPECIFIED,
593
+ api_pb2.VolumeFsVersion.VOLUME_FS_VERSION_V1
594
+ ]:
595
+ return _VolumeUploadContextManager(object_id, client, progress_cb=progress_cb, force=force)
596
+ elif version == api_pb2.VolumeFsVersion.VOLUME_FS_VERSION_V2:
597
+ return _VolumeUploadContextManager2(object_id, client, progress_cb=progress_cb, force=force)
598
+ else:
599
+ raise RuntimeError(f"unsupported volume version: {version}")
600
+
601
+
602
+ AbstractVolumeUploadContextManager = synchronize_api(_AbstractVolumeUploadContextManager)
603
+
604
+ class _VolumeUploadContextManager(_AbstractVolumeUploadContextManager):
531
605
  """Context manager for batch-uploading files to a Volume."""
532
606
 
533
607
  _volume_id: str
@@ -585,7 +659,7 @@ class _VolumeUploadContextManager:
585
659
 
586
660
  def put_file(
587
661
  self,
588
- local_file: Union[Path, str, BinaryIO],
662
+ local_file: Union[Path, str, BinaryIO, BytesIO],
589
663
  remote_path: Union[PurePosixPath, str],
590
664
  mode: Optional[int] = None,
591
665
  ):
@@ -678,9 +752,204 @@ class _VolumeUploadContextManager:
678
752
  )
679
753
 
680
754
 
681
- Volume = synchronize_api(_Volume)
682
755
  VolumeUploadContextManager = synchronize_api(_VolumeUploadContextManager)
683
756
 
757
+ _FileUploader2 = Callable[[], Awaitable[FileUploadSpec2]]
758
+
759
+ class _VolumeUploadContextManager2(_AbstractVolumeUploadContextManager):
760
+ """Context manager for batch-uploading files to a Volume version 2."""
761
+
762
+ _volume_id: str
763
+ _client: _Client
764
+ _force: bool
765
+ _progress_cb: Callable[..., Any]
766
+ _uploader_generators: list[Generator[_FileUploader2]]
767
+
768
+ def __init__(
769
+ self, volume_id: str, client: _Client, progress_cb: Optional[Callable[..., Any]] = None, force: bool = False
770
+ ):
771
+ """mdmd:hidden"""
772
+ self._volume_id = volume_id
773
+ self._client = client
774
+ self._uploader_generators = []
775
+ self._progress_cb = progress_cb or (lambda *_, **__: None)
776
+ self._force = force
777
+
778
+ async def __aenter__(self):
779
+ return self
780
+
781
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
782
+ if not exc_val:
783
+ # Flatten all the uploads yielded by the upload generators in the batch
784
+ def gen_upload_providers():
785
+ for gen in self._uploader_generators:
786
+ yield from gen
787
+
788
+ async def gen_file_upload_specs() -> list[FileUploadSpec2]:
789
+ uploads = [asyncio.create_task(fut()) for fut in gen_upload_providers()]
790
+ logger.debug(f"Computing checksums for {len(uploads)} files")
791
+
792
+ file_specs = []
793
+ for file_spec in asyncio.as_completed(uploads):
794
+ file_specs.append(await file_spec)
795
+ return file_specs
796
+
797
+ upload_specs = await gen_file_upload_specs()
798
+ await self._put_file_specs(upload_specs)
799
+
800
+
801
+ def put_file(
802
+ self,
803
+ local_file: Union[Path, str, BinaryIO, BytesIO],
804
+ remote_path: Union[PurePosixPath, str],
805
+ mode: Optional[int] = None,
806
+ ):
807
+ """Upload a file from a local file or file-like object.
808
+
809
+ Will create any needed parent directories automatically.
810
+
811
+ If `local_file` is a file-like object it must remain readable for the lifetime of the batch.
812
+ """
813
+ remote_path = PurePosixPath(remote_path).as_posix()
814
+ if remote_path.endswith("/"):
815
+ raise ValueError(f"remote_path ({remote_path}) must refer to a file - cannot end with /")
816
+
817
+ def gen():
818
+ if isinstance(local_file, str) or isinstance(local_file, Path):
819
+ yield lambda: FileUploadSpec2.from_path(local_file, PurePosixPath(remote_path), mode)
820
+ else:
821
+ yield lambda: FileUploadSpec2.from_fileobj(local_file, PurePosixPath(remote_path), mode or 0o644)
822
+
823
+ self._uploader_generators.append(gen())
824
+
825
+ def put_directory(
826
+ self,
827
+ local_path: Union[Path, str],
828
+ remote_path: Union[PurePosixPath, str],
829
+ recursive: bool = True,
830
+ ):
831
+ """
832
+ Upload all files in a local directory.
833
+
834
+ Will create any needed parent directories automatically.
835
+ """
836
+ local_path = Path(local_path)
837
+ assert local_path.is_dir()
838
+ remote_path = PurePosixPath(remote_path)
839
+
840
+ def create_spec(subpath):
841
+ relpath_str = subpath.relative_to(local_path)
842
+ return lambda: FileUploadSpec2.from_path(subpath, remote_path / relpath_str)
843
+
844
+ def gen():
845
+ glob = local_path.rglob("*") if recursive else local_path.glob("*")
846
+ for subpath in glob:
847
+ # Skip directories and unsupported file types (e.g. block devices)
848
+ if subpath.is_file():
849
+ yield create_spec(subpath)
850
+
851
+ self._uploader_generators.append(gen())
852
+
853
+ async def _put_file_specs(self, file_specs: list[FileUploadSpec2]):
854
+ put_responses = {}
855
+ # num_blocks_total = sum(len(file_spec.blocks_sha256) for file_spec in file_specs)
856
+
857
+ # We should only need two iterations: Once to possibly get some missing_blocks; the second time we should have
858
+ # all blocks uploaded
859
+ for _ in range(2):
860
+ files = []
861
+
862
+ for file_spec in file_specs:
863
+ blocks = [
864
+ api_pb2.VolumePutFiles2Request.Block(
865
+ contents_sha256=block_sha256,
866
+ put_response=put_responses.get(block_sha256)
867
+ ) for block_sha256 in file_spec.blocks_sha256
868
+ ]
869
+ files.append(api_pb2.VolumePutFiles2Request.File(
870
+ path=file_spec.path,
871
+ mode=file_spec.mode,
872
+ size=file_spec.size,
873
+ blocks=blocks
874
+ ))
875
+
876
+ request = api_pb2.VolumePutFiles2Request(
877
+ volume_id=self._volume_id,
878
+ files=files,
879
+ disallow_overwrite_existing_files=not self._force,
880
+ )
881
+
882
+ try:
883
+ response = await retry_transient_errors(self._client.stub.VolumePutFiles2, request, base_delay=1)
884
+ except GRPCError as exc:
885
+ raise FileExistsError(exc.message) if exc.status == Status.ALREADY_EXISTS else exc
886
+
887
+ if not response.missing_blocks:
888
+ break
889
+
890
+ await _put_missing_blocks(file_specs, response.missing_blocks, put_responses, self._progress_cb)
891
+ else:
892
+ raise RuntimeError("Did not succeed at uploading all files despite supplying all missing blocks")
893
+
894
+ self._progress_cb(complete=True)
895
+
896
+
897
+ VolumeUploadContextManager2 = synchronize_api(_VolumeUploadContextManager2)
898
+
899
+
900
+ async def _put_missing_blocks(
901
+ file_specs: list[FileUploadSpec2],
902
+ # TODO(dflemstr): Element type is `api_pb2.VolumePutFiles2Response.MissingBlock` but synchronicity gets confused
903
+ # by the nested class (?)
904
+ missing_blocks: list,
905
+ put_responses: dict[bytes, bytes],
906
+ progress_cb: Callable[..., Any]
907
+ ):
908
+ async def put_missing_block(
909
+ # TODO(dflemstr): Type is `api_pb2.VolumePutFiles2Response.MissingBlock` but synchronicity gets confused
910
+ # by the nested class (?)
911
+ missing_block
912
+ ) -> (bytes, bytes):
913
+ # Lazily import to keep the eager loading time of this module down
914
+ from ._utils.bytes_io_segment_payload import BytesIOSegmentPayload
915
+
916
+ assert isinstance(missing_block, api_pb2.VolumePutFiles2Response.MissingBlock)
917
+
918
+ file_spec = file_specs[missing_block.file_index]
919
+ # TODO(dflemstr): What if the underlying file has changed here in the meantime; should we check the
920
+ # hash again just to be sure?
921
+ block_sha256 = file_spec.blocks_sha256[missing_block.block_index]
922
+ block_start = missing_block.block_index * BLOCK_SIZE
923
+ block_length = min(BLOCK_SIZE, file_spec.size - block_start)
924
+
925
+ progress_name = f"{file_spec.path} block {missing_block.block_index + 1} / {len(file_spec.blocks_sha256)}"
926
+ progress_task_id = progress_cb(name=progress_name, size=file_spec.size)
927
+
928
+ with file_spec.source() as source_fp:
929
+ payload = BytesIOSegmentPayload(
930
+ source_fp,
931
+ block_start,
932
+ block_length,
933
+ progress_report_cb=functools.partial(progress_cb, progress_task_id)
934
+ )
935
+
936
+ async with ClientSessionRegistry.get_session().put(
937
+ missing_block.put_url,
938
+ data=payload,
939
+ ) as response:
940
+ response.raise_for_status()
941
+ resp_data = await response.content.read()
942
+
943
+ return block_sha256, resp_data
944
+
945
+ tasks = [
946
+ asyncio.create_task(put_missing_block(missing_block))
947
+ for missing_block in missing_blocks
948
+ ]
949
+ for task_result in asyncio.as_completed(tasks):
950
+ digest, resp = await task_result
951
+ put_responses[digest] = resp
952
+
684
953
 
685
954
  def _open_files_error_annotation(mount_path: str) -> Optional[str]:
686
955
  if platform.system() != "Linux":
modal/volume.pyi CHANGED
@@ -1,6 +1,8 @@
1
+ import _io
1
2
  import asyncio.locks
2
3
  import collections.abc
3
4
  import enum
5
+ import google.protobuf.message
4
6
  import modal._object
5
7
  import modal._utils.blob_utils
6
8
  import modal.client
@@ -36,6 +38,7 @@ class FileEntry:
36
38
 
37
39
  class _Volume(modal._object._Object):
38
40
  _lock: typing.Optional[asyncio.locks.Lock]
41
+ _metadata: typing.Optional[modal_proto.api_pb2.VolumeMetadata]
39
42
 
40
43
  async def _get_lock(self): ...
41
44
  @staticmethod
@@ -46,6 +49,8 @@ class _Volume(modal._object._Object):
46
49
  create_if_missing: bool = False,
47
50
  version: typing.Optional[int] = None,
48
51
  ) -> _Volume: ...
52
+ def _hydrate_metadata(self, metadata: typing.Optional[google.protobuf.message.Message]): ...
53
+ def _get_metadata(self) -> typing.Optional[google.protobuf.message.Message]: ...
49
54
  @classmethod
50
55
  def ephemeral(
51
56
  cls: type[_Volume],
@@ -80,7 +85,7 @@ class _Volume(modal._object._Object):
80
85
  async def read_file_into_fileobj(self, path: str, fileobj: typing.IO[bytes]) -> int: ...
81
86
  async def remove_file(self, path: str, recursive: bool = False) -> None: ...
82
87
  async def copy_files(self, src_paths: collections.abc.Sequence[str], dst_path: str) -> None: ...
83
- async def batch_upload(self, force: bool = False) -> _VolumeUploadContextManager: ...
88
+ async def batch_upload(self, force: bool = False) -> _AbstractVolumeUploadContextManager: ...
84
89
  async def _instance_delete(self): ...
85
90
  @staticmethod
86
91
  async def delete(
@@ -95,44 +100,11 @@ class _Volume(modal._object._Object):
95
100
  environment_name: typing.Optional[str] = None,
96
101
  ): ...
97
102
 
98
- class _VolumeUploadContextManager:
99
- _volume_id: str
100
- _client: modal.client._Client
101
- _force: bool
102
- progress_cb: collections.abc.Callable[..., typing.Any]
103
- _upload_generators: list[
104
- collections.abc.Generator[collections.abc.Callable[[], modal._utils.blob_utils.FileUploadSpec], None, None]
105
- ]
106
-
107
- def __init__(
108
- self,
109
- volume_id: str,
110
- client: modal.client._Client,
111
- progress_cb: typing.Optional[collections.abc.Callable[..., typing.Any]] = None,
112
- force: bool = False,
113
- ): ...
114
- async def __aenter__(self): ...
115
- async def __aexit__(self, exc_type, exc_val, exc_tb): ...
116
- def put_file(
117
- self,
118
- local_file: typing.Union[pathlib.Path, str, typing.BinaryIO],
119
- remote_path: typing.Union[pathlib.PurePosixPath, str],
120
- mode: typing.Optional[int] = None,
121
- ): ...
122
- def put_directory(
123
- self,
124
- local_path: typing.Union[pathlib.Path, str],
125
- remote_path: typing.Union[pathlib.PurePosixPath, str],
126
- recursive: bool = True,
127
- ): ...
128
- async def _upload_file(
129
- self, file_spec: modal._utils.blob_utils.FileUploadSpec
130
- ) -> modal_proto.api_pb2.MountFile: ...
131
-
132
103
  SUPERSELF = typing.TypeVar("SUPERSELF", covariant=True)
133
104
 
134
105
  class Volume(modal.object.Object):
135
106
  _lock: typing.Optional[asyncio.locks.Lock]
107
+ _metadata: typing.Optional[modal_proto.api_pb2.VolumeMetadata]
136
108
 
137
109
  def __init__(self, *args, **kwargs): ...
138
110
 
@@ -150,6 +122,8 @@ class Volume(modal.object.Object):
150
122
  create_if_missing: bool = False,
151
123
  version: typing.Optional[int] = None,
152
124
  ) -> Volume: ...
125
+ def _hydrate_metadata(self, metadata: typing.Optional[google.protobuf.message.Message]): ...
126
+ def _get_metadata(self) -> typing.Optional[google.protobuf.message.Message]: ...
153
127
  @classmethod
154
128
  def ephemeral(
155
129
  cls: type[Volume],
@@ -256,8 +230,8 @@ class Volume(modal.object.Object):
256
230
  copy_files: __copy_files_spec[typing_extensions.Self]
257
231
 
258
232
  class __batch_upload_spec(typing_extensions.Protocol[SUPERSELF]):
259
- def __call__(self, force: bool = False) -> VolumeUploadContextManager: ...
260
- async def aio(self, force: bool = False) -> VolumeUploadContextManager: ...
233
+ def __call__(self, force: bool = False) -> AbstractVolumeUploadContextManager: ...
234
+ async def aio(self, force: bool = False) -> AbstractVolumeUploadContextManager: ...
261
235
 
262
236
  batch_upload: __batch_upload_spec[typing_extensions.Self]
263
237
 
@@ -303,7 +277,92 @@ class Volume(modal.object.Object):
303
277
 
304
278
  rename: __rename_spec
305
279
 
306
- class VolumeUploadContextManager:
280
+ class _AbstractVolumeUploadContextManager:
281
+ async def __aenter__(self): ...
282
+ async def __aexit__(self, exc_type, exc_val, exc_tb): ...
283
+ def put_file(
284
+ self,
285
+ local_file: typing.Union[pathlib.Path, str, typing.BinaryIO, _io.BytesIO],
286
+ remote_path: typing.Union[pathlib.PurePosixPath, str],
287
+ mode: typing.Optional[int] = None,
288
+ ): ...
289
+ def put_directory(
290
+ self,
291
+ local_path: typing.Union[pathlib.Path, str],
292
+ remote_path: typing.Union[pathlib.PurePosixPath, str],
293
+ recursive: bool = True,
294
+ ): ...
295
+ @staticmethod
296
+ def resolve(
297
+ version: int,
298
+ object_id: str,
299
+ client,
300
+ progress_cb: typing.Optional[collections.abc.Callable[..., typing.Any]] = None,
301
+ force: bool = False,
302
+ ) -> _AbstractVolumeUploadContextManager: ...
303
+
304
+ class AbstractVolumeUploadContextManager:
305
+ def __init__(self, /, *args, **kwargs): ...
306
+ def __enter__(self): ...
307
+ async def __aenter__(self): ...
308
+ def __exit__(self, exc_type, exc_val, exc_tb): ...
309
+ async def __aexit__(self, exc_type, exc_val, exc_tb): ...
310
+ def put_file(
311
+ self,
312
+ local_file: typing.Union[pathlib.Path, str, typing.BinaryIO, _io.BytesIO],
313
+ remote_path: typing.Union[pathlib.PurePosixPath, str],
314
+ mode: typing.Optional[int] = None,
315
+ ): ...
316
+ def put_directory(
317
+ self,
318
+ local_path: typing.Union[pathlib.Path, str],
319
+ remote_path: typing.Union[pathlib.PurePosixPath, str],
320
+ recursive: bool = True,
321
+ ): ...
322
+ @staticmethod
323
+ def resolve(
324
+ version: int,
325
+ object_id: str,
326
+ client,
327
+ progress_cb: typing.Optional[collections.abc.Callable[..., typing.Any]] = None,
328
+ force: bool = False,
329
+ ) -> AbstractVolumeUploadContextManager: ...
330
+
331
+ class _VolumeUploadContextManager(_AbstractVolumeUploadContextManager):
332
+ _volume_id: str
333
+ _client: modal.client._Client
334
+ _force: bool
335
+ progress_cb: collections.abc.Callable[..., typing.Any]
336
+ _upload_generators: list[
337
+ collections.abc.Generator[collections.abc.Callable[[], modal._utils.blob_utils.FileUploadSpec], None, None]
338
+ ]
339
+
340
+ def __init__(
341
+ self,
342
+ volume_id: str,
343
+ client: modal.client._Client,
344
+ progress_cb: typing.Optional[collections.abc.Callable[..., typing.Any]] = None,
345
+ force: bool = False,
346
+ ): ...
347
+ async def __aenter__(self): ...
348
+ async def __aexit__(self, exc_type, exc_val, exc_tb): ...
349
+ def put_file(
350
+ self,
351
+ local_file: typing.Union[pathlib.Path, str, typing.BinaryIO, _io.BytesIO],
352
+ remote_path: typing.Union[pathlib.PurePosixPath, str],
353
+ mode: typing.Optional[int] = None,
354
+ ): ...
355
+ def put_directory(
356
+ self,
357
+ local_path: typing.Union[pathlib.Path, str],
358
+ remote_path: typing.Union[pathlib.PurePosixPath, str],
359
+ recursive: bool = True,
360
+ ): ...
361
+ async def _upload_file(
362
+ self, file_spec: modal._utils.blob_utils.FileUploadSpec
363
+ ) -> modal_proto.api_pb2.MountFile: ...
364
+
365
+ class VolumeUploadContextManager(AbstractVolumeUploadContextManager):
307
366
  _volume_id: str
308
367
  _client: modal.client.Client
309
368
  _force: bool
@@ -325,7 +384,7 @@ class VolumeUploadContextManager:
325
384
  async def __aexit__(self, exc_type, exc_val, exc_tb): ...
326
385
  def put_file(
327
386
  self,
328
- local_file: typing.Union[pathlib.Path, str, typing.BinaryIO],
387
+ local_file: typing.Union[pathlib.Path, str, typing.BinaryIO, _io.BytesIO],
329
388
  remote_path: typing.Union[pathlib.PurePosixPath, str],
330
389
  mode: typing.Optional[int] = None,
331
390
  ): ...
@@ -342,4 +401,85 @@ class VolumeUploadContextManager:
342
401
 
343
402
  _upload_file: ___upload_file_spec[typing_extensions.Self]
344
403
 
404
+ class _VolumeUploadContextManager2(_AbstractVolumeUploadContextManager):
405
+ _volume_id: str
406
+ _client: modal.client._Client
407
+ _force: bool
408
+ _progress_cb: collections.abc.Callable[..., typing.Any]
409
+ _uploader_generators: list[
410
+ collections.abc.Generator[
411
+ collections.abc.Callable[[], typing.Awaitable[modal._utils.blob_utils.FileUploadSpec2]]
412
+ ]
413
+ ]
414
+
415
+ def __init__(
416
+ self,
417
+ volume_id: str,
418
+ client: modal.client._Client,
419
+ progress_cb: typing.Optional[collections.abc.Callable[..., typing.Any]] = None,
420
+ force: bool = False,
421
+ ): ...
422
+ async def __aenter__(self): ...
423
+ async def __aexit__(self, exc_type, exc_val, exc_tb): ...
424
+ def put_file(
425
+ self,
426
+ local_file: typing.Union[pathlib.Path, str, typing.BinaryIO, _io.BytesIO],
427
+ remote_path: typing.Union[pathlib.PurePosixPath, str],
428
+ mode: typing.Optional[int] = None,
429
+ ): ...
430
+ def put_directory(
431
+ self,
432
+ local_path: typing.Union[pathlib.Path, str],
433
+ remote_path: typing.Union[pathlib.PurePosixPath, str],
434
+ recursive: bool = True,
435
+ ): ...
436
+ async def _put_file_specs(self, file_specs: list[modal._utils.blob_utils.FileUploadSpec2]): ...
437
+
438
+ class VolumeUploadContextManager2(AbstractVolumeUploadContextManager):
439
+ _volume_id: str
440
+ _client: modal.client.Client
441
+ _force: bool
442
+ _progress_cb: collections.abc.Callable[..., typing.Any]
443
+ _uploader_generators: list[
444
+ collections.abc.Generator[
445
+ collections.abc.Callable[[], typing.Awaitable[modal._utils.blob_utils.FileUploadSpec2]]
446
+ ]
447
+ ]
448
+
449
+ def __init__(
450
+ self,
451
+ volume_id: str,
452
+ client: modal.client.Client,
453
+ progress_cb: typing.Optional[collections.abc.Callable[..., typing.Any]] = None,
454
+ force: bool = False,
455
+ ): ...
456
+ def __enter__(self): ...
457
+ async def __aenter__(self): ...
458
+ def __exit__(self, exc_type, exc_val, exc_tb): ...
459
+ async def __aexit__(self, exc_type, exc_val, exc_tb): ...
460
+ def put_file(
461
+ self,
462
+ local_file: typing.Union[pathlib.Path, str, typing.BinaryIO, _io.BytesIO],
463
+ remote_path: typing.Union[pathlib.PurePosixPath, str],
464
+ mode: typing.Optional[int] = None,
465
+ ): ...
466
+ def put_directory(
467
+ self,
468
+ local_path: typing.Union[pathlib.Path, str],
469
+ remote_path: typing.Union[pathlib.PurePosixPath, str],
470
+ recursive: bool = True,
471
+ ): ...
472
+
473
+ class ___put_file_specs_spec(typing_extensions.Protocol[SUPERSELF]):
474
+ def __call__(self, file_specs: list[modal._utils.blob_utils.FileUploadSpec2]): ...
475
+ async def aio(self, file_specs: list[modal._utils.blob_utils.FileUploadSpec2]): ...
476
+
477
+ _put_file_specs: ___put_file_specs_spec[typing_extensions.Self]
478
+
479
+ async def _put_missing_blocks(
480
+ file_specs: list[modal._utils.blob_utils.FileUploadSpec2],
481
+ missing_blocks: list,
482
+ put_responses: dict[bytes, bytes],
483
+ progress_cb: collections.abc.Callable[..., typing.Any],
484
+ ): ...
345
485
  def _open_files_error_annotation(mount_path: str) -> typing.Optional[str]: ...
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: modal
3
- Version: 0.74.15
3
+ Version: 0.74.16
4
4
  Summary: Python client library for Modal
5
5
  Author-email: Modal Labs <support@modal.com>
6
6
  License: Apache-2.0
@@ -22,7 +22,7 @@ modal/app.py,sha256=1GSPMMYRkG17FQwnA_PleK9YQrPQWXlN_4mpZ5VRA4Q,48468
22
22
  modal/app.pyi,sha256=bpC9uN_B4d_UtyXVuhGpXC2RM-IcsytS0ndu-GoKrHQ,27276
23
23
  modal/call_graph.py,sha256=1g2DGcMIJvRy-xKicuf63IVE98gJSnQsr8R_NVMptNc,2581
24
24
  modal/client.py,sha256=U-YKSw0n7J1ZLREt9cbEJCtmHe5YoPKFxl0xlkan2yc,15565
25
- modal/client.pyi,sha256=gTGLwo62ctAnv6FICBPjNpwC5GMyQ6CyL5fBhdNM0C4,7593
25
+ modal/client.pyi,sha256=MXDCG1c77_KDP7R66sApXRKz9ZnhLsZMjSETzBqGDzA,7593
26
26
  modal/cloud_bucket_mount.py,sha256=YOe9nnvSr4ZbeCn587d7_VhE9IioZYRvF9VYQTQux08,5914
27
27
  modal/cloud_bucket_mount.pyi,sha256=30T3K1a89l6wzmEJ_J9iWv9SknoGqaZDx59Xs-ZQcmk,1607
28
28
  modal/cls.py,sha256=GvaNl8R5UsH7Vg88WEOyerdjvZEPK7xxi3nqHlyOW_c,33497
@@ -39,7 +39,7 @@ modal/file_io.py,sha256=lcMs_E9Xfm0YX1t9U2wNIBPnqHRxmImqjLW1GHqVmyg,20945
39
39
  modal/file_io.pyi,sha256=NTRft1tbPSWf9TlWVeZmTlgB5AZ_Zhu2srWIrWr7brk,9445
40
40
  modal/file_pattern_matcher.py,sha256=trosX-Bp7dOubudN1bLLhRAoidWy1TcoaR4Pv8CedWw,6497
41
41
  modal/functions.py,sha256=kcNHvqeGBxPI7Cgd57NIBBghkfbeFJzXO44WW0jSmao,325
42
- modal/functions.pyi,sha256=2J7jPa2tQWhuUy67wg-MWvEx0rwrBNjX6UTEJbAUq_A,14855
42
+ modal/functions.pyi,sha256=I8dNJnvs6FguR8XAUqnMUg6X1214UzeLoAydZBrEa2g,14855
43
43
  modal/gpu.py,sha256=Kbhs_u49FaC2Zi0TjCdrpstpRtT5eZgecynmQi5IZVE,6752
44
44
  modal/image.py,sha256=I-9_YZL0SSfnuGPywa3-4PlxDmJ-53p7ce3gP74SrOA,92877
45
45
  modal/image.pyi,sha256=89zv12C1sFrJs7Es9SnX23_m208ASAdeNGCVTrhjzHI,25632
@@ -78,8 +78,8 @@ modal/snapshot.pyi,sha256=Ypd4NKsjOTnnnqXyTGGLKq5lkocRrUURYjY5Pi67_qA,670
78
78
  modal/stream_type.py,sha256=A6320qoAAWhEfwOCZfGtymQTu5AfLfJXXgARqooTPvY,417
79
79
  modal/token_flow.py,sha256=APNPQhuqy2avu6IY26U6OiN7erRhpo03Ztt1V60_wts,6776
80
80
  modal/token_flow.pyi,sha256=0XV3d-9CGQL3qjPdw3RgwIFVqqxo8Z-u044_mkgAM3o,2064
81
- modal/volume.py,sha256=JAWeDvoAG95tMBv-fYIERyHsJPS_X_xGpxRRmYtb6j0,30096
82
- modal/volume.pyi,sha256=kTsXarphjZILXci84LQy7EyC84eXUs5-7D62IM5q3eE,12491
81
+ modal/volume.py,sha256=3c5_aJNJtgpsFRZWBjc0jwn8Zs0jo9V6UDmh6ifrbdA,40145
82
+ modal/volume.pyi,sha256=juOVWGlgz7IeOY4M7jBhbeNRPA9xdGUwvA3AzlZUscQ,17958
83
83
  modal/_runtime/__init__.py,sha256=MIEP8jhXUeGq_eCjYFcqN5b1bxBM4fdk0VESpjWR0fc,28
84
84
  modal/_runtime/asgi.py,sha256=KNarxvZI9z8fnmZl2vbkWTjnoLXs9kqOahkrbsTLkyc,22429
85
85
  modal/_runtime/container_io_manager.py,sha256=0yNO3HTVIM4f338rxJavD8nrRN7KhDpjz1jLux71MRY,43842
@@ -92,7 +92,7 @@ modal/_runtime/user_code_imports.py,sha256=kAv37Pl1TmGKduv0Kozum0xNTD42bDLloSIsT
92
92
  modal/_utils/__init__.py,sha256=waLjl5c6IPDhSsdWAm9Bji4e2PVxamYABKAze6CHVXY,28
93
93
  modal/_utils/app_utils.py,sha256=88BT4TPLWfYAQwKTHcyzNQRHg8n9B-QE2UyJs96iV-0,108
94
94
  modal/_utils/async_utils.py,sha256=b2TJyKY1Hq7df7M-fo3qlFM95mGdo3dCuqRPPcV5hsE,27445
95
- modal/_utils/blob_utils.py,sha256=jWJovk4g-YNG3CvkvglOds4a6D1M0Tcal_59v7y9VsM,14591
95
+ modal/_utils/blob_utils.py,sha256=WhWaSFLcffVDchMyfKys-j0EFVbm9l7rebzeoa7Z2jM,18214
96
96
  modal/_utils/bytes_io_segment_payload.py,sha256=uunxVJS4PE1LojF_UpURMzVK9GuvmYWRqQo_bxEj5TU,3385
97
97
  modal/_utils/deprecation.py,sha256=EXP1beU4pmEqEzWMLw6E3kUfNfpmNA_VOp6i0EHi93g,4856
98
98
  modal/_utils/docker_utils.py,sha256=h1uETghR40mp_y3fSWuZAfbIASH1HMzuphJHghAL6DU,3722
@@ -132,7 +132,7 @@ modal/cli/run.py,sha256=NX2wWwj8HD6XUhnZRF808Qy9eeouv8KnvyOP57HqIXI,23637
132
132
  modal/cli/secret.py,sha256=WB_c-LE9-eDqleLpJxsJ9rZw62Eeza8ZFQFR10vNMEk,4197
133
133
  modal/cli/token.py,sha256=mxSgOWakXG6N71hQb1ko61XAR9ZGkTMZD-Txn7gmTac,1924
134
134
  modal/cli/utils.py,sha256=hZmjyzcPjDnQSkLvycZD2LhGdcsfdZshs_rOU78EpvI,3717
135
- modal/cli/volume.py,sha256=c2IuVNO2yJVaXmZkRh3xwQmznlRTgFoJr_BIzzqtVv0,10251
135
+ modal/cli/volume.py,sha256=_PJ5Vn_prkLk_x1Lksx4kZySlKWqIn36T2Edd1-h7Mg,10497
136
136
  modal/cli/programs/__init__.py,sha256=svYKtV8HDwDCN86zbdWqyq5T8sMdGDj0PVlzc2tIxDM,28
137
137
  modal/cli/programs/run_jupyter.py,sha256=YVvJYu927A4ji72d6i27CKfyZ_uDWteeittARtJnf7E,2775
138
138
  modal/cli/programs/vscode.py,sha256=kfvhZQ4bJwtVm3MgC1V7AlygZOlKT1a33alr_uwrewA,3473
@@ -145,7 +145,7 @@ modal/requirements/2024.10.txt,sha256=qD-5cVIVM9wXesJ6JC89Ew-3m2KjEElUz3jaw_MddR
145
145
  modal/requirements/PREVIEW.txt,sha256=qD-5cVIVM9wXesJ6JC89Ew-3m2KjEElUz3jaw_MddRo,296
146
146
  modal/requirements/README.md,sha256=9tK76KP0Uph7O0M5oUgsSwEZDj5y-dcUPsnpR0Sc-Ik,854
147
147
  modal/requirements/base-images.json,sha256=57vMSqzMbLBxw5tFWSaMiIkkVEps4JfX5PAtXGnkS4U,740
148
- modal-0.74.15.dist-info/licenses/LICENSE,sha256=psuoW8kuDP96RQsdhzwOqi6fyWv0ct8CR6Jr7He_P_k,10173
148
+ modal-0.74.16.dist-info/licenses/LICENSE,sha256=psuoW8kuDP96RQsdhzwOqi6fyWv0ct8CR6Jr7He_P_k,10173
149
149
  modal_docs/__init__.py,sha256=svYKtV8HDwDCN86zbdWqyq5T8sMdGDj0PVlzc2tIxDM,28
150
150
  modal_docs/gen_cli_docs.py,sha256=c1yfBS_x--gL5bs0N4ihMwqwX8l3IBWSkBAKNNIi6bQ,3801
151
151
  modal_docs/gen_reference_docs.py,sha256=cvTgltucqYLLIX84QxAwf51Z5Vc2n6cLxS8VcrxNCAo,6401
@@ -170,9 +170,9 @@ modal_proto/options_pb2_grpc.pyi,sha256=CImmhxHsYnF09iENPoe8S4J-n93jtgUYD2JPAc0y
170
170
  modal_proto/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
171
171
  modal_version/__init__.py,sha256=m94xZNWIjH8oUtJk4l9xfovzDJede2o7X-q0MHVECtM,470
172
172
  modal_version/__main__.py,sha256=2FO0yYQQwDTh6udt1h-cBnGd1c4ZyHnHSI4BksxzVac,105
173
- modal_version/_version_generated.py,sha256=BwTNwunmOMlhLbdcrq2kBshM6ByivCgCW2YhrtCy1ss,149
174
- modal-0.74.15.dist-info/METADATA,sha256=1EpkkZUHT-Oleu-NfpBAaFlurY2PnvSTOXlkpWh12lg,2474
175
- modal-0.74.15.dist-info/WHEEL,sha256=1tXe9gY0PYatrMPMDd6jXqjfpz_B-Wqm32CPfRC58XU,91
176
- modal-0.74.15.dist-info/entry_points.txt,sha256=An-wYgeEUnm6xzrAP9_NTSTSciYvvEWsMZILtYrvpAI,46
177
- modal-0.74.15.dist-info/top_level.txt,sha256=4BWzoKYREKUZ5iyPzZpjqx4G8uB5TWxXPDwibLcVa7k,43
178
- modal-0.74.15.dist-info/RECORD,,
173
+ modal_version/_version_generated.py,sha256=iw6S4gIMMk_kxXdUThejt0bL01XM4XObW5lyo2KKxMA,149
174
+ modal-0.74.16.dist-info/METADATA,sha256=SOCGPNZKh82g8EnIIGaDKd-PefSR4ucoYk5mS7JCyoE,2474
175
+ modal-0.74.16.dist-info/WHEEL,sha256=1tXe9gY0PYatrMPMDd6jXqjfpz_B-Wqm32CPfRC58XU,91
176
+ modal-0.74.16.dist-info/entry_points.txt,sha256=An-wYgeEUnm6xzrAP9_NTSTSciYvvEWsMZILtYrvpAI,46
177
+ modal-0.74.16.dist-info/top_level.txt,sha256=4BWzoKYREKUZ5iyPzZpjqx4G8uB5TWxXPDwibLcVa7k,43
178
+ modal-0.74.16.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  # Copyright Modal Labs 2025
2
2
 
3
3
  # Note: Reset this value to -1 whenever you make a minor `0.X` release of the client.
4
- build_number = 15 # git: f5929f1
4
+ build_number = 16 # git: 7430900