modal 1.2.1.dev19__py3-none-any.whl → 1.2.2.dev21__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. modal/_clustered_functions.py +1 -3
  2. modal/_container_entrypoint.py +4 -1
  3. modal/_functions.py +33 -49
  4. modal/_grpc_client.py +148 -0
  5. modal/_output.py +3 -4
  6. modal/_runtime/container_io_manager.py +21 -22
  7. modal/_utils/async_utils.py +12 -3
  8. modal/_utils/auth_token_manager.py +1 -4
  9. modal/_utils/blob_utils.py +3 -4
  10. modal/_utils/grpc_utils.py +80 -51
  11. modal/_utils/mount_utils.py +26 -1
  12. modal/_utils/task_command_router_client.py +3 -4
  13. modal/app.py +3 -4
  14. modal/cli/config.py +3 -1
  15. modal/cli/container.py +1 -2
  16. modal/cli/entry_point.py +1 -0
  17. modal/cli/launch.py +1 -2
  18. modal/cli/network_file_system.py +1 -4
  19. modal/cli/queues.py +1 -2
  20. modal/cli/secret.py +1 -2
  21. modal/client.py +5 -115
  22. modal/client.pyi +2 -91
  23. modal/cls.py +1 -2
  24. modal/config.py +1 -1
  25. modal/container_process.py +4 -8
  26. modal/dict.py +12 -12
  27. modal/environments.py +1 -2
  28. modal/experimental/__init__.py +2 -3
  29. modal/experimental/flash.py +6 -10
  30. modal/file_io.py +13 -27
  31. modal/functions.pyi +6 -6
  32. modal/image.py +24 -3
  33. modal/image.pyi +4 -0
  34. modal/io_streams.py +61 -91
  35. modal/io_streams.pyi +33 -95
  36. modal/mount.py +4 -4
  37. modal/network_file_system.py +5 -6
  38. modal/parallel_map.py +29 -31
  39. modal/parallel_map.pyi +3 -9
  40. modal/queue.py +17 -18
  41. modal/runner.py +8 -8
  42. modal/sandbox.py +23 -36
  43. modal/secret.py +4 -5
  44. modal/snapshot.py +1 -4
  45. modal/token_flow.py +1 -1
  46. modal/volume.py +20 -22
  47. {modal-1.2.1.dev19.dist-info → modal-1.2.2.dev21.dist-info}/METADATA +1 -1
  48. {modal-1.2.1.dev19.dist-info → modal-1.2.2.dev21.dist-info}/RECORD +57 -56
  49. modal_proto/api.proto +3 -0
  50. modal_proto/api_pb2.py +1028 -1015
  51. modal_proto/api_pb2.pyi +29 -3
  52. modal_proto/modal_api_grpc.py +175 -175
  53. modal_version/__init__.py +1 -1
  54. {modal-1.2.1.dev19.dist-info → modal-1.2.2.dev21.dist-info}/WHEEL +0 -0
  55. {modal-1.2.1.dev19.dist-info → modal-1.2.2.dev21.dist-info}/entry_points.txt +0 -0
  56. {modal-1.2.1.dev19.dist-info → modal-1.2.2.dev21.dist-info}/licenses/LICENSE +0 -0
  57. {modal-1.2.1.dev19.dist-info → modal-1.2.2.dev21.dist-info}/top_level.txt +0 -0
modal/client.pyi CHANGED
@@ -33,7 +33,7 @@ class _Client:
33
33
  server_url: str,
34
34
  client_type: int,
35
35
  credentials: typing.Optional[tuple[str, str]],
36
- version: str = "1.2.1.dev19",
36
+ version: str = "1.2.2.dev21",
37
37
  ):
38
38
  """mdmd:hidden
39
39
  The Modal client object is not intended to be instantiated directly by users.
@@ -164,7 +164,7 @@ class Client:
164
164
  server_url: str,
165
165
  client_type: int,
166
166
  credentials: typing.Optional[tuple[str, str]],
167
- version: str = "1.2.1.dev19",
167
+ version: str = "1.2.2.dev21",
168
168
  ):
169
169
  """mdmd:hidden
170
170
  The Modal client object is not intended to be instantiated directly by users.
@@ -339,95 +339,6 @@ class Client:
339
339
  ],
340
340
  ) -> collections.abc.AsyncGenerator[typing.Any, None]: ...
341
341
 
342
- class grpc_error_converter:
343
- def __enter__(self): ...
344
- def __exit__(self, exc_type, exc, traceback) -> bool: ...
345
-
346
- class UnaryUnaryWrapper(typing.Generic[RequestType, ResponseType]):
347
- """Abstract base class for generic types.
348
-
349
- A generic type is typically declared by inheriting from
350
- this class parameterized with one or more type variables.
351
- For example, a generic mapping type might be defined as::
352
-
353
- class Mapping(Generic[KT, VT]):
354
- def __getitem__(self, key: KT) -> VT:
355
- ...
356
- # Etc.
357
-
358
- This class can then be used as follows::
359
-
360
- def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT:
361
- try:
362
- return mapping[key]
363
- except KeyError:
364
- return default
365
- """
366
-
367
- wrapped_method: grpclib.client.UnaryUnaryMethod[RequestType, ResponseType]
368
- client: _Client
369
-
370
- def __init__(
371
- self,
372
- wrapped_method: grpclib.client.UnaryUnaryMethod[RequestType, ResponseType],
373
- client: _Client,
374
- server_url: str,
375
- ):
376
- """Initialize self. See help(type(self)) for accurate signature."""
377
- ...
378
-
379
- @property
380
- def name(self) -> str: ...
381
- async def __call__(
382
- self,
383
- req: RequestType,
384
- *,
385
- timeout: typing.Optional[float] = None,
386
- metadata: typing.Union[
387
- collections.abc.Mapping[str, typing.Union[str, bytes]],
388
- collections.abc.Collection[tuple[str, typing.Union[str, bytes]]],
389
- None,
390
- ] = None,
391
- ) -> ResponseType:
392
- """Call self as a function."""
393
- ...
394
-
395
- class UnaryStreamWrapper(typing.Generic[RequestType, ResponseType]):
396
- """Abstract base class for generic types.
397
-
398
- A generic type is typically declared by inheriting from
399
- this class parameterized with one or more type variables.
400
- For example, a generic mapping type might be defined as::
401
-
402
- class Mapping(Generic[KT, VT]):
403
- def __getitem__(self, key: KT) -> VT:
404
- ...
405
- # Etc.
406
-
407
- This class can then be used as follows::
408
-
409
- def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT:
410
- try:
411
- return mapping[key]
412
- except KeyError:
413
- return default
414
- """
415
-
416
- wrapped_method: grpclib.client.UnaryStreamMethod[RequestType, ResponseType]
417
-
418
- def __init__(
419
- self,
420
- wrapped_method: grpclib.client.UnaryStreamMethod[RequestType, ResponseType],
421
- client: _Client,
422
- server_url: str,
423
- ):
424
- """Initialize self. See help(type(self)) for accurate signature."""
425
- ...
426
-
427
- @property
428
- def name(self) -> str: ...
429
- def unary_stream(self, request, metadata: typing.Optional[typing.Any] = None): ...
430
-
431
342
  HEARTBEAT_INTERVAL: float
432
343
 
433
344
  HEARTBEAT_TIMEOUT: float
modal/cls.py CHANGED
@@ -30,7 +30,6 @@ from ._utils.deprecation import (
30
30
  warn_if_passing_namespace,
31
31
  warn_on_renamed_autoscaler_settings,
32
32
  )
33
- from ._utils.grpc_utils import retry_transient_errors
34
33
  from ._utils.mount_utils import validate_volumes
35
34
  from .cloud_bucket_mount import _CloudBucketMount
36
35
  from .config import config
@@ -643,7 +642,7 @@ More information on class parameterization can be found here: https://modal.com/
643
642
  only_class_function=True,
644
643
  )
645
644
  try:
646
- response = await retry_transient_errors(resolver.client.stub.ClassGet, request)
645
+ response = await resolver.client.stub.ClassGet(request)
647
646
  except NotFoundError as exc:
648
647
  env_context = f" (in the '{environment_name}' environment)" if environment_name else ""
649
648
  raise NotFoundError(
modal/config.py CHANGED
@@ -147,7 +147,7 @@ async def _lookup_workspace(server_url: str, token_id: str, token_secret: str) -
147
147
 
148
148
  credentials = (token_id, token_secret)
149
149
  async with _Client(server_url, api_pb2.CLIENT_TYPE_CLIENT, credentials) as client:
150
- return await client.stub.WorkspaceNameLookup(Empty(), timeout=3)
150
+ return await client.stub.WorkspaceNameLookup(Empty(), retry=None, timeout=3)
151
151
 
152
152
 
153
153
  def config_profiles():
@@ -7,7 +7,6 @@ from typing import Generic, Optional, TypeVar
7
7
  from modal_proto import api_pb2
8
8
 
9
9
  from ._utils.async_utils import TaskContext, synchronize_api
10
- from ._utils.grpc_utils import retry_transient_errors
11
10
  from ._utils.shell_utils import stream_from_stdin, write_to_fd
12
11
  from ._utils.task_command_router_client import TaskCommandRouterClient
13
12
  from .client import _Client
@@ -101,6 +100,7 @@ class _ContainerProcessThroughServer(Generic[T]):
101
100
 
102
101
  Returns `None` if the process is still running, else returns the exit code.
103
102
  """
103
+ assert self._process_id
104
104
  if self._returncode is not None:
105
105
  return self._returncode
106
106
  if self._exec_deadline and time.monotonic() >= self._exec_deadline:
@@ -110,7 +110,7 @@ class _ContainerProcessThroughServer(Generic[T]):
110
110
  return self._returncode
111
111
 
112
112
  req = api_pb2.ContainerExecWaitRequest(exec_id=self._process_id, timeout=0)
113
- resp: api_pb2.ContainerExecWaitResponse = await retry_transient_errors(self._client.stub.ContainerExecWait, req)
113
+ resp = await self._client.stub.ContainerExecWait(req)
114
114
 
115
115
  if resp.completed:
116
116
  self._returncode = resp.exit_code
@@ -119,11 +119,10 @@ class _ContainerProcessThroughServer(Generic[T]):
119
119
  return None
120
120
 
121
121
  async def _wait_for_completion(self) -> int:
122
+ assert self._process_id
122
123
  while True:
123
124
  req = api_pb2.ContainerExecWaitRequest(exec_id=self._process_id, timeout=10)
124
- resp: api_pb2.ContainerExecWaitResponse = await retry_transient_errors(
125
- self._client.stub.ContainerExecWait, req
126
- )
125
+ resp = await self._client.stub.ContainerExecWait(req)
127
126
  if resp.completed:
128
127
  return resp.exit_code
129
128
 
@@ -169,9 +168,6 @@ class _ContainerProcessThroughServer(Generic[T]):
169
168
  stream_impl = stream._impl
170
169
  # Don't skip empty messages so we can detect when the process has booted.
171
170
  async for chunk in stream_impl._get_logs(skip_empty_messages=False):
172
- if chunk is None:
173
- break
174
-
175
171
  if not on_connect.is_set():
176
172
  connecting_status.stop()
177
173
  on_connect.set()
modal/dict.py CHANGED
@@ -9,6 +9,7 @@ from grpclib import GRPCError, Status
9
9
  from synchronicity import classproperty
10
10
  from synchronicity.async_wrap import asynccontextmanager
11
11
 
12
+ from modal._utils.grpc_utils import Retry
12
13
  from modal_proto import api_pb2
13
14
 
14
15
  from ._object import (
@@ -22,7 +23,6 @@ from ._resolver import Resolver
22
23
  from ._serialization import deserialize, serialize
23
24
  from ._utils.async_utils import TaskContext, synchronize_api
24
25
  from ._utils.deprecation import deprecation_warning, warn_if_passing_namespace
25
- from ._utils.grpc_utils import retry_transient_errors
26
26
  from ._utils.name_utils import check_object_name
27
27
  from ._utils.time_utils import as_timestamp, timestamp_to_localized_dt
28
28
  from .client import _Client
@@ -105,7 +105,7 @@ class _DictManager:
105
105
  object_creation_type=object_creation_type,
106
106
  )
107
107
  try:
108
- await retry_transient_errors(client.stub.DictGetOrCreate, req)
108
+ await client.stub.DictGetOrCreate(req)
109
109
  except GRPCError as exc:
110
110
  if exc.status == Status.ALREADY_EXISTS and not allow_existing:
111
111
  raise AlreadyExistsError(exc.message)
@@ -157,7 +157,7 @@ class _DictManager:
157
157
  req = api_pb2.DictListRequest(
158
158
  environment_name=_get_environment_name(environment_name), pagination=pagination
159
159
  )
160
- resp = await retry_transient_errors(client.stub.DictList, req)
160
+ resp = await client.stub.DictList(req)
161
161
  items.extend(resp.dicts)
162
162
  finished = (len(resp.dicts) < max_page_size) or (max_objects is not None and len(items) >= max_objects)
163
163
  return finished
@@ -215,7 +215,7 @@ class _DictManager:
215
215
  raise
216
216
  else:
217
217
  req = api_pb2.DictDeleteRequest(dict_id=obj.object_id)
218
- await retry_transient_errors(obj._client.stub.DictDelete, req)
218
+ await obj._client.stub.DictDelete(req)
219
219
 
220
220
 
221
221
  DictManager = synchronize_api(_DictManager)
@@ -327,7 +327,7 @@ class _Dict(_Object, type_prefix="di"):
327
327
  environment_name=_get_environment_name(environment_name),
328
328
  data=serialized,
329
329
  )
330
- response = await retry_transient_errors(client.stub.DictGetOrCreate, request, total_timeout=10.0)
330
+ response = await client.stub.DictGetOrCreate(request, retry=Retry(total_timeout=10.0))
331
331
  async with TaskContext() as tc:
332
332
  request = api_pb2.DictHeartbeatRequest(dict_id=response.dict_id)
333
333
  tc.infinite_loop(lambda: client.stub.DictHeartbeat(request), sleep=_heartbeat_sleep)
@@ -418,7 +418,7 @@ class _Dict(_Object, type_prefix="di"):
418
418
  async def clear(self) -> None:
419
419
  """Remove all items from the Dict."""
420
420
  req = api_pb2.DictClearRequest(dict_id=self.object_id)
421
- await retry_transient_errors(self._client.stub.DictClear, req)
421
+ await self._client.stub.DictClear(req)
422
422
 
423
423
  @live_method
424
424
  async def get(self, key: Any, default: Optional[Any] = None) -> Any:
@@ -427,7 +427,7 @@ class _Dict(_Object, type_prefix="di"):
427
427
  Returns `default` if key does not exist.
428
428
  """
429
429
  req = api_pb2.DictGetRequest(dict_id=self.object_id, key=serialize(key))
430
- resp = await retry_transient_errors(self._client.stub.DictGet, req)
430
+ resp = await self._client.stub.DictGet(req)
431
431
  if not resp.found:
432
432
  return default
433
433
  return deserialize(resp.value, self._client)
@@ -436,7 +436,7 @@ class _Dict(_Object, type_prefix="di"):
436
436
  async def contains(self, key: Any) -> bool:
437
437
  """Return if a key is present."""
438
438
  req = api_pb2.DictContainsRequest(dict_id=self.object_id, key=serialize(key))
439
- resp = await retry_transient_errors(self._client.stub.DictContains, req)
439
+ resp = await self._client.stub.DictContains(req)
440
440
  return resp.found
441
441
 
442
442
  @live_method
@@ -446,7 +446,7 @@ class _Dict(_Object, type_prefix="di"):
446
446
  Note: This is an expensive operation and will return at most 100,000.
447
447
  """
448
448
  req = api_pb2.DictLenRequest(dict_id=self.object_id)
449
- resp = await retry_transient_errors(self._client.stub.DictLen, req)
449
+ resp = await self._client.stub.DictLen(req)
450
450
  return resp.len
451
451
 
452
452
  @live_method
@@ -475,7 +475,7 @@ class _Dict(_Object, type_prefix="di"):
475
475
  serialized = _serialize_dict(contents)
476
476
  req = api_pb2.DictUpdateRequest(dict_id=self.object_id, updates=serialized)
477
477
  try:
478
- await retry_transient_errors(self._client.stub.DictUpdate, req)
478
+ await self._client.stub.DictUpdate(req)
479
479
  except GRPCError as exc:
480
480
  if "status = '413'" in exc.message:
481
481
  raise RequestSizeError("Dict.update request is too large") from exc
@@ -493,7 +493,7 @@ class _Dict(_Object, type_prefix="di"):
493
493
  serialized = _serialize_dict(updates)
494
494
  req = api_pb2.DictUpdateRequest(dict_id=self.object_id, updates=serialized, if_not_exists=skip_if_exists)
495
495
  try:
496
- resp = await retry_transient_errors(self._client.stub.DictUpdate, req)
496
+ resp = await self._client.stub.DictUpdate(req)
497
497
  return resp.created
498
498
  except GRPCError as exc:
499
499
  if "status = '413'" in exc.message:
@@ -516,7 +516,7 @@ class _Dict(_Object, type_prefix="di"):
516
516
  If key is not found, return default if provided, otherwise raise KeyError.
517
517
  """
518
518
  req = api_pb2.DictPopRequest(dict_id=self.object_id, key=serialize(key))
519
- resp = await retry_transient_errors(self._client.stub.DictPop, req)
519
+ resp = await self._client.stub.DictPop(req)
520
520
  if not resp.found:
521
521
  if default is not _NO_DEFAULT:
522
522
  return default
modal/environments.py CHANGED
@@ -11,7 +11,6 @@ from modal_proto import api_pb2
11
11
  from ._object import _Object
12
12
  from ._resolver import Resolver
13
13
  from ._utils.async_utils import synchronize_api, synchronizer
14
- from ._utils.grpc_utils import retry_transient_errors
15
14
  from ._utils.name_utils import check_object_name
16
15
  from .client import _Client
17
16
  from .config import config, logger
@@ -71,7 +70,7 @@ class _Environment(_Object, type_prefix="en"):
71
70
  else api_pb2.OBJECT_CREATION_TYPE_UNSPECIFIED
72
71
  ),
73
72
  )
74
- response = await retry_transient_errors(resolver.client.stub.EnvironmentGetOrCreate, request)
73
+ response = await resolver.client.stub.EnvironmentGetOrCreate(request)
75
74
  logger.debug(f"Created environment with id {response.environment_id}")
76
75
  self._hydrate(response.environment_id, resolver.client, response.metadata)
77
76
 
@@ -13,7 +13,6 @@ from .._object import _get_environment_name
13
13
  from .._partial_function import _clustered
14
14
  from .._runtime.container_io_manager import _ContainerIOManager
15
15
  from .._utils.async_utils import synchronize_api, synchronizer
16
- from .._utils.grpc_utils import retry_transient_errors
17
16
  from ..app import _App
18
17
  from ..client import _Client
19
18
  from ..cls import _Cls
@@ -116,7 +115,7 @@ async def get_app_objects(
116
115
 
117
116
  app = await _App.lookup(app_name, environment_name=environment_name, client=client)
118
117
  req = api_pb2.AppGetLayoutRequest(app_id=app.app_id)
119
- app_layout_resp = await retry_transient_errors(client.stub.AppGetLayout, req)
118
+ app_layout_resp = await client.stub.AppGetLayout(req)
120
119
 
121
120
  app_objects: dict[str, Union[_Function, _Cls]] = {}
122
121
 
@@ -361,4 +360,4 @@ async def image_delete(
361
360
  client = await _Client.from_env()
362
361
 
363
362
  req = api_pb2.ImageDeleteRequest(image_id=image_id)
364
- await retry_transient_errors(client.stub.ImageDelete, req)
363
+ await client.stub.ImageDelete(req)
@@ -16,7 +16,6 @@ from modal_proto import api_pb2
16
16
 
17
17
  from .._tunnel import _forward as _forward_tunnel
18
18
  from .._utils.async_utils import synchronize_api, synchronizer
19
- from .._utils.grpc_utils import retry_transient_errors
20
19
  from ..client import _Client
21
20
  from ..config import logger
22
21
  from ..exception import InvalidError
@@ -126,10 +125,8 @@ class _FlashManager:
126
125
  f"due to error: {port_check_error}, num_failures: {self.num_failures}"
127
126
  )
128
127
  self.num_failures += 1
129
- await retry_transient_errors(
130
- self.client.stub.FlashContainerDeregister,
131
- api_pb2.FlashContainerDeregisterRequest(),
132
- )
128
+ await self.client.stub.FlashContainerDeregister(api_pb2.FlashContainerDeregisterRequest())
129
+
133
130
  except asyncio.CancelledError:
134
131
  logger.warning("[Modal Flash] Shutting down...")
135
132
  break
@@ -148,8 +145,7 @@ class _FlashManager:
148
145
 
149
146
  async def stop(self):
150
147
  self.heartbeat_task.cancel()
151
- await retry_transient_errors(
152
- self.client.stub.FlashContainerDeregister,
148
+ await self.client.stub.FlashContainerDeregister(
153
149
  api_pb2.FlashContainerDeregisterRequest(),
154
150
  )
155
151
 
@@ -463,12 +459,12 @@ class _FlashPrometheusAutoscaler:
463
459
 
464
460
  async def _get_all_containers(self):
465
461
  req = api_pb2.FlashContainerListRequest(function_id=self.fn.object_id)
466
- resp = await retry_transient_errors(self.client.stub.FlashContainerList, req)
462
+ resp = await self.client.stub.FlashContainerList(req)
467
463
  return resp.containers
468
464
 
469
465
  async def _set_target_slots(self, target_slots: int):
470
466
  req = api_pb2.FlashSetTargetSlotsMetricsRequest(function_id=self.fn.object_id, target_slots=target_slots)
471
- await retry_transient_errors(self.client.stub.FlashSetTargetSlotsMetrics, req)
467
+ await self.client.stub.FlashSetTargetSlotsMetrics(req)
472
468
  return
473
469
 
474
470
  def _make_scaling_decision(
@@ -619,5 +615,5 @@ async def flash_get_containers(app_name: str, cls_name: str) -> list[dict[str, A
619
615
  assert fn is not None
620
616
  await fn.hydrate(client=client)
621
617
  req = api_pb2.FlashContainerListRequest(function_id=fn.object_id)
622
- resp = await retry_transient_errors(client.stub.FlashContainerList, req)
618
+ resp = await client.stub.FlashContainerList(req)
623
619
  return resp.containers
modal/file_io.py CHANGED
@@ -13,7 +13,6 @@ import json
13
13
  from grpclib.exceptions import GRPCError, StreamTerminatedError
14
14
 
15
15
  from modal._utils.async_utils import TaskContext
16
- from modal._utils.grpc_utils import retry_transient_errors
17
16
  from modal.exception import ClientClosed
18
17
  from modal_proto import api_pb2
19
18
 
@@ -57,8 +56,7 @@ async def _delete_bytes(file: "_FileIO", start: Optional[int] = None, end: Optio
57
56
  if start is not None and end is not None:
58
57
  if start >= end:
59
58
  raise ValueError("start must be less than end")
60
- resp = await retry_transient_errors(
61
- file._client.stub.ContainerFilesystemExec,
59
+ resp = await file._client.stub.ContainerFilesystemExec(
62
60
  api_pb2.ContainerFilesystemExecRequest(
63
61
  file_delete_bytes_request=api_pb2.ContainerFileDeleteBytesRequest(
64
62
  file_descriptor=file._file_descriptor,
@@ -85,8 +83,7 @@ async def _replace_bytes(file: "_FileIO", data: bytes, start: Optional[int] = No
85
83
  raise InvalidError("start must be less than end")
86
84
  if len(data) > WRITE_CHUNK_SIZE:
87
85
  raise InvalidError("Write request payload exceeds 16 MiB limit")
88
- resp = await retry_transient_errors(
89
- file._client.stub.ContainerFilesystemExec,
86
+ resp = await file._client.stub.ContainerFilesystemExec(
90
87
  api_pb2.ContainerFilesystemExecRequest(
91
88
  file_write_replace_bytes_request=api_pb2.ContainerFileWriteReplaceBytesRequest(
92
89
  file_descriptor=file._file_descriptor,
@@ -261,8 +258,7 @@ class _FileIO(Generic[T]):
261
258
  raise TypeError("Expected str when in text mode")
262
259
 
263
260
  async def _open_file(self, path: str, mode: str) -> None:
264
- resp = await retry_transient_errors(
265
- self._client.stub.ContainerFilesystemExec,
261
+ resp = await self._client.stub.ContainerFilesystemExec(
266
262
  api_pb2.ContainerFilesystemExecRequest(
267
263
  file_open_request=api_pb2.ContainerFileOpenRequest(path=path, mode=mode),
268
264
  task_id=self._task_id,
@@ -285,8 +281,7 @@ class _FileIO(Generic[T]):
285
281
  return self
286
282
 
287
283
  async def _make_read_request(self, n: Optional[int]) -> bytes:
288
- resp = await retry_transient_errors(
289
- self._client.stub.ContainerFilesystemExec,
284
+ resp = await self._client.stub.ContainerFilesystemExec(
290
285
  api_pb2.ContainerFilesystemExecRequest(
291
286
  file_read_request=api_pb2.ContainerFileReadRequest(file_descriptor=self._file_descriptor, n=n),
292
287
  task_id=self._task_id,
@@ -309,8 +304,7 @@ class _FileIO(Generic[T]):
309
304
  """Read a single line from the current position."""
310
305
  self._check_closed()
311
306
  self._check_readable()
312
- resp = await retry_transient_errors(
313
- self._client.stub.ContainerFilesystemExec,
307
+ resp = await self._client.stub.ContainerFilesystemExec(
314
308
  api_pb2.ContainerFilesystemExecRequest(
315
309
  file_read_line_request=api_pb2.ContainerFileReadLineRequest(file_descriptor=self._file_descriptor),
316
310
  task_id=self._task_id,
@@ -351,8 +345,7 @@ class _FileIO(Generic[T]):
351
345
  raise ValueError("Write request payload exceeds 1 GiB limit")
352
346
  for i in range(0, len(data), WRITE_CHUNK_SIZE):
353
347
  chunk = data[i : i + WRITE_CHUNK_SIZE]
354
- resp = await retry_transient_errors(
355
- self._client.stub.ContainerFilesystemExec,
348
+ resp = await self._client.stub.ContainerFilesystemExec(
356
349
  api_pb2.ContainerFilesystemExecRequest(
357
350
  file_write_request=api_pb2.ContainerFileWriteRequest(
358
351
  file_descriptor=self._file_descriptor,
@@ -367,8 +360,7 @@ class _FileIO(Generic[T]):
367
360
  """Flush the buffer to disk."""
368
361
  self._check_closed()
369
362
  self._check_writable()
370
- resp = await retry_transient_errors(
371
- self._client.stub.ContainerFilesystemExec,
363
+ resp = await self._client.stub.ContainerFilesystemExec(
372
364
  api_pb2.ContainerFilesystemExecRequest(
373
365
  file_flush_request=api_pb2.ContainerFileFlushRequest(file_descriptor=self._file_descriptor),
374
366
  task_id=self._task_id,
@@ -393,8 +385,7 @@ class _FileIO(Generic[T]):
393
385
  (relative to the current position) and 2 (relative to the file's end).
394
386
  """
395
387
  self._check_closed()
396
- resp = await retry_transient_errors(
397
- self._client.stub.ContainerFilesystemExec,
388
+ resp = await self._client.stub.ContainerFilesystemExec(
398
389
  api_pb2.ContainerFilesystemExecRequest(
399
390
  file_seek_request=api_pb2.ContainerFileSeekRequest(
400
391
  file_descriptor=self._file_descriptor,
@@ -410,8 +401,7 @@ class _FileIO(Generic[T]):
410
401
  async def ls(cls, path: str, client: _Client, task_id: str) -> list[str]:
411
402
  """List the contents of the provided directory."""
412
403
  self = _FileIO(client, task_id)
413
- resp = await retry_transient_errors(
414
- self._client.stub.ContainerFilesystemExec,
404
+ resp = await self._client.stub.ContainerFilesystemExec(
415
405
  api_pb2.ContainerFilesystemExecRequest(
416
406
  file_ls_request=api_pb2.ContainerFileLsRequest(path=path),
417
407
  task_id=task_id,
@@ -427,8 +417,7 @@ class _FileIO(Generic[T]):
427
417
  async def mkdir(cls, path: str, client: _Client, task_id: str, parents: bool = False) -> None:
428
418
  """Create a new directory."""
429
419
  self = _FileIO(client, task_id)
430
- resp = await retry_transient_errors(
431
- self._client.stub.ContainerFilesystemExec,
420
+ resp = await self._client.stub.ContainerFilesystemExec(
432
421
  api_pb2.ContainerFilesystemExecRequest(
433
422
  file_mkdir_request=api_pb2.ContainerFileMkdirRequest(path=path, make_parents=parents),
434
423
  task_id=self._task_id,
@@ -440,8 +429,7 @@ class _FileIO(Generic[T]):
440
429
  async def rm(cls, path: str, client: _Client, task_id: str, recursive: bool = False) -> None:
441
430
  """Remove a file or directory in the Sandbox."""
442
431
  self = _FileIO(client, task_id)
443
- resp = await retry_transient_errors(
444
- self._client.stub.ContainerFilesystemExec,
432
+ resp = await self._client.stub.ContainerFilesystemExec(
445
433
  api_pb2.ContainerFilesystemExecRequest(
446
434
  file_rm_request=api_pb2.ContainerFileRmRequest(path=path, recursive=recursive),
447
435
  task_id=self._task_id,
@@ -460,8 +448,7 @@ class _FileIO(Generic[T]):
460
448
  timeout: Optional[int] = None,
461
449
  ) -> AsyncIterator[FileWatchEvent]:
462
450
  self = _FileIO(client, task_id)
463
- resp = await retry_transient_errors(
464
- self._client.stub.ContainerFilesystemExec,
451
+ resp = await self._client.stub.ContainerFilesystemExec(
465
452
  api_pb2.ContainerFilesystemExecRequest(
466
453
  file_watch_request=api_pb2.ContainerFileWatchRequest(
467
454
  path=path,
@@ -503,8 +490,7 @@ class _FileIO(Generic[T]):
503
490
 
504
491
  async def _close(self) -> None:
505
492
  # Buffer is flushed by the runner on close
506
- resp = await retry_transient_errors(
507
- self._client.stub.ContainerFilesystemExec,
493
+ resp = await self._client.stub.ContainerFilesystemExec(
508
494
  api_pb2.ContainerFilesystemExecRequest(
509
495
  file_close_request=api_pb2.ContainerFileCloseRequest(file_descriptor=self._file_descriptor),
510
496
  task_id=self._task_id,
modal/functions.pyi CHANGED
@@ -401,7 +401,7 @@ class Function(
401
401
 
402
402
  _call_generator: ___call_generator_spec[typing_extensions.Self]
403
403
 
404
- class __remote_spec(typing_extensions.Protocol[P_INNER, ReturnType_INNER, SUPERSELF]):
404
+ class __remote_spec(typing_extensions.Protocol[ReturnType_INNER, P_INNER, SUPERSELF]):
405
405
  def __call__(self, /, *args: P_INNER.args, **kwargs: P_INNER.kwargs) -> ReturnType_INNER:
406
406
  """Calls the function remotely, executing it with the given arguments and returning the execution's result."""
407
407
  ...
@@ -410,7 +410,7 @@ class Function(
410
410
  """Calls the function remotely, executing it with the given arguments and returning the execution's result."""
411
411
  ...
412
412
 
413
- remote: __remote_spec[modal._functions.P, modal._functions.ReturnType, typing_extensions.Self]
413
+ remote: __remote_spec[modal._functions.ReturnType, modal._functions.P, typing_extensions.Self]
414
414
 
415
415
  class __remote_gen_spec(typing_extensions.Protocol[SUPERSELF]):
416
416
  def __call__(self, /, *args, **kwargs) -> typing.Generator[typing.Any, None, None]:
@@ -437,7 +437,7 @@ class Function(
437
437
  """
438
438
  ...
439
439
 
440
- class ___experimental_spawn_spec(typing_extensions.Protocol[P_INNER, ReturnType_INNER, SUPERSELF]):
440
+ class ___experimental_spawn_spec(typing_extensions.Protocol[ReturnType_INNER, P_INNER, SUPERSELF]):
441
441
  def __call__(self, /, *args: P_INNER.args, **kwargs: P_INNER.kwargs) -> FunctionCall[ReturnType_INNER]:
442
442
  """[Experimental] Calls the function with the given arguments, without waiting for the results.
443
443
 
@@ -461,7 +461,7 @@ class Function(
461
461
  ...
462
462
 
463
463
  _experimental_spawn: ___experimental_spawn_spec[
464
- modal._functions.P, modal._functions.ReturnType, typing_extensions.Self
464
+ modal._functions.ReturnType, modal._functions.P, typing_extensions.Self
465
465
  ]
466
466
 
467
467
  class ___spawn_map_inner_spec(typing_extensions.Protocol[P_INNER, SUPERSELF]):
@@ -470,7 +470,7 @@ class Function(
470
470
 
471
471
  _spawn_map_inner: ___spawn_map_inner_spec[modal._functions.P, typing_extensions.Self]
472
472
 
473
- class __spawn_spec(typing_extensions.Protocol[P_INNER, ReturnType_INNER, SUPERSELF]):
473
+ class __spawn_spec(typing_extensions.Protocol[ReturnType_INNER, P_INNER, SUPERSELF]):
474
474
  def __call__(self, /, *args: P_INNER.args, **kwargs: P_INNER.kwargs) -> FunctionCall[ReturnType_INNER]:
475
475
  """Calls the function with the given arguments, without waiting for the results.
476
476
 
@@ -491,7 +491,7 @@ class Function(
491
491
  """
492
492
  ...
493
493
 
494
- spawn: __spawn_spec[modal._functions.P, modal._functions.ReturnType, typing_extensions.Self]
494
+ spawn: __spawn_spec[modal._functions.ReturnType, modal._functions.P, typing_extensions.Self]
495
495
 
496
496
  def get_raw_f(self) -> collections.abc.Callable[..., typing.Any]:
497
497
  """Return the inner Python object wrapped by this Modal Function."""