modal 1.0.6.dev58__py3-none-any.whl → 1.2.3.dev7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of modal might be problematic. Click here for more details.

Files changed (147) hide show
  1. modal/__main__.py +3 -4
  2. modal/_billing.py +80 -0
  3. modal/_clustered_functions.py +7 -3
  4. modal/_clustered_functions.pyi +4 -2
  5. modal/_container_entrypoint.py +41 -49
  6. modal/_functions.py +424 -195
  7. modal/_grpc_client.py +171 -0
  8. modal/_load_context.py +105 -0
  9. modal/_object.py +68 -20
  10. modal/_output.py +58 -45
  11. modal/_partial_function.py +36 -11
  12. modal/_pty.py +7 -3
  13. modal/_resolver.py +21 -35
  14. modal/_runtime/asgi.py +4 -3
  15. modal/_runtime/container_io_manager.py +301 -186
  16. modal/_runtime/container_io_manager.pyi +70 -61
  17. modal/_runtime/execution_context.py +18 -2
  18. modal/_runtime/execution_context.pyi +4 -1
  19. modal/_runtime/gpu_memory_snapshot.py +170 -63
  20. modal/_runtime/user_code_imports.py +28 -58
  21. modal/_serialization.py +57 -1
  22. modal/_utils/async_utils.py +33 -12
  23. modal/_utils/auth_token_manager.py +2 -5
  24. modal/_utils/blob_utils.py +110 -53
  25. modal/_utils/function_utils.py +49 -42
  26. modal/_utils/grpc_utils.py +80 -50
  27. modal/_utils/mount_utils.py +26 -1
  28. modal/_utils/name_utils.py +17 -3
  29. modal/_utils/task_command_router_client.py +536 -0
  30. modal/_utils/time_utils.py +34 -6
  31. modal/app.py +219 -83
  32. modal/app.pyi +229 -56
  33. modal/billing.py +5 -0
  34. modal/{requirements → builder}/2025.06.txt +1 -0
  35. modal/{requirements → builder}/PREVIEW.txt +1 -0
  36. modal/cli/_download.py +19 -3
  37. modal/cli/_traceback.py +3 -2
  38. modal/cli/app.py +4 -4
  39. modal/cli/cluster.py +15 -7
  40. modal/cli/config.py +5 -3
  41. modal/cli/container.py +7 -6
  42. modal/cli/dict.py +22 -16
  43. modal/cli/entry_point.py +12 -5
  44. modal/cli/environment.py +5 -4
  45. modal/cli/import_refs.py +3 -3
  46. modal/cli/launch.py +102 -5
  47. modal/cli/network_file_system.py +9 -13
  48. modal/cli/profile.py +3 -2
  49. modal/cli/programs/launch_instance_ssh.py +94 -0
  50. modal/cli/programs/run_jupyter.py +1 -1
  51. modal/cli/programs/run_marimo.py +95 -0
  52. modal/cli/programs/vscode.py +1 -1
  53. modal/cli/queues.py +57 -26
  54. modal/cli/run.py +58 -16
  55. modal/cli/secret.py +48 -22
  56. modal/cli/utils.py +3 -4
  57. modal/cli/volume.py +28 -25
  58. modal/client.py +13 -116
  59. modal/client.pyi +9 -91
  60. modal/cloud_bucket_mount.py +5 -3
  61. modal/cloud_bucket_mount.pyi +5 -1
  62. modal/cls.py +130 -102
  63. modal/cls.pyi +45 -85
  64. modal/config.py +29 -10
  65. modal/container_process.py +291 -13
  66. modal/container_process.pyi +95 -32
  67. modal/dict.py +282 -63
  68. modal/dict.pyi +423 -73
  69. modal/environments.py +15 -27
  70. modal/environments.pyi +5 -15
  71. modal/exception.py +8 -0
  72. modal/experimental/__init__.py +143 -38
  73. modal/experimental/flash.py +247 -78
  74. modal/experimental/flash.pyi +137 -9
  75. modal/file_io.py +14 -28
  76. modal/file_io.pyi +2 -2
  77. modal/file_pattern_matcher.py +25 -16
  78. modal/functions.pyi +134 -61
  79. modal/image.py +255 -86
  80. modal/image.pyi +300 -62
  81. modal/io_streams.py +436 -126
  82. modal/io_streams.pyi +236 -171
  83. modal/mount.py +62 -157
  84. modal/mount.pyi +45 -172
  85. modal/network_file_system.py +30 -53
  86. modal/network_file_system.pyi +16 -76
  87. modal/object.pyi +42 -8
  88. modal/parallel_map.py +821 -113
  89. modal/parallel_map.pyi +134 -0
  90. modal/partial_function.pyi +4 -1
  91. modal/proxy.py +16 -7
  92. modal/proxy.pyi +10 -2
  93. modal/queue.py +263 -61
  94. modal/queue.pyi +409 -66
  95. modal/runner.py +112 -92
  96. modal/runner.pyi +45 -27
  97. modal/sandbox.py +451 -124
  98. modal/sandbox.pyi +513 -67
  99. modal/secret.py +291 -67
  100. modal/secret.pyi +425 -19
  101. modal/serving.py +7 -11
  102. modal/serving.pyi +7 -8
  103. modal/snapshot.py +11 -8
  104. modal/token_flow.py +4 -4
  105. modal/volume.py +344 -98
  106. modal/volume.pyi +464 -68
  107. {modal-1.0.6.dev58.dist-info → modal-1.2.3.dev7.dist-info}/METADATA +9 -8
  108. modal-1.2.3.dev7.dist-info/RECORD +195 -0
  109. modal_docs/mdmd/mdmd.py +11 -1
  110. modal_proto/api.proto +399 -67
  111. modal_proto/api_grpc.py +241 -1
  112. modal_proto/api_pb2.py +1395 -1000
  113. modal_proto/api_pb2.pyi +1239 -79
  114. modal_proto/api_pb2_grpc.py +499 -4
  115. modal_proto/api_pb2_grpc.pyi +162 -14
  116. modal_proto/modal_api_grpc.py +175 -160
  117. modal_proto/sandbox_router.proto +145 -0
  118. modal_proto/sandbox_router_grpc.py +105 -0
  119. modal_proto/sandbox_router_pb2.py +149 -0
  120. modal_proto/sandbox_router_pb2.pyi +333 -0
  121. modal_proto/sandbox_router_pb2_grpc.py +203 -0
  122. modal_proto/sandbox_router_pb2_grpc.pyi +75 -0
  123. modal_proto/task_command_router.proto +144 -0
  124. modal_proto/task_command_router_grpc.py +105 -0
  125. modal_proto/task_command_router_pb2.py +149 -0
  126. modal_proto/task_command_router_pb2.pyi +333 -0
  127. modal_proto/task_command_router_pb2_grpc.py +203 -0
  128. modal_proto/task_command_router_pb2_grpc.pyi +75 -0
  129. modal_version/__init__.py +1 -1
  130. modal-1.0.6.dev58.dist-info/RECORD +0 -183
  131. modal_proto/modal_options_grpc.py +0 -3
  132. modal_proto/options.proto +0 -19
  133. modal_proto/options_grpc.py +0 -3
  134. modal_proto/options_pb2.py +0 -35
  135. modal_proto/options_pb2.pyi +0 -20
  136. modal_proto/options_pb2_grpc.py +0 -4
  137. modal_proto/options_pb2_grpc.pyi +0 -7
  138. /modal/{requirements → builder}/2023.12.312.txt +0 -0
  139. /modal/{requirements → builder}/2023.12.txt +0 -0
  140. /modal/{requirements → builder}/2024.04.txt +0 -0
  141. /modal/{requirements → builder}/2024.10.txt +0 -0
  142. /modal/{requirements → builder}/README.md +0 -0
  143. /modal/{requirements → builder}/base-images.json +0 -0
  144. {modal-1.0.6.dev58.dist-info → modal-1.2.3.dev7.dist-info}/WHEEL +0 -0
  145. {modal-1.0.6.dev58.dist-info → modal-1.2.3.dev7.dist-info}/entry_points.txt +0 -0
  146. {modal-1.0.6.dev58.dist-info → modal-1.2.3.dev7.dist-info}/licenses/LICENSE +0 -0
  147. {modal-1.0.6.dev58.dist-info → modal-1.2.3.dev7.dist-info}/top_level.txt +0 -0
@@ -1,13 +1,27 @@
1
1
  import modal.client
2
+ import subprocess
2
3
  import typing
3
4
  import typing_extensions
4
5
 
5
6
  class _FlashManager:
6
- def __init__(self, client: modal.client._Client, port: int, health_check_url: typing.Optional[str] = None):
7
+ def __init__(
8
+ self,
9
+ client: modal.client._Client,
10
+ port: int,
11
+ process: typing.Optional[subprocess.Popen] = None,
12
+ health_check_url: typing.Optional[str] = None,
13
+ ):
7
14
  """Initialize self. See help(type(self)) for accurate signature."""
8
15
  ...
9
16
 
17
+ async def is_port_connection_healthy(
18
+ self, process: typing.Optional[subprocess.Popen], timeout: float = 0.5
19
+ ) -> tuple[bool, typing.Optional[Exception]]: ...
10
20
  async def _start(self): ...
21
+ async def _drain_container(self):
22
+ """Background task that checks if we've encountered too many failures and drains the container if so."""
23
+ ...
24
+
11
25
  async def _run_heartbeat(self, host: str, port: int): ...
12
26
  def get_container_url(self): ...
13
27
  async def stop(self): ...
@@ -16,7 +30,23 @@ class _FlashManager:
16
30
  SUPERSELF = typing.TypeVar("SUPERSELF", covariant=True)
17
31
 
18
32
  class FlashManager:
19
- def __init__(self, client: modal.client.Client, port: int, health_check_url: typing.Optional[str] = None): ...
33
+ def __init__(
34
+ self,
35
+ client: modal.client.Client,
36
+ port: int,
37
+ process: typing.Optional[subprocess.Popen] = None,
38
+ health_check_url: typing.Optional[str] = None,
39
+ ): ...
40
+
41
+ class __is_port_connection_healthy_spec(typing_extensions.Protocol[SUPERSELF]):
42
+ def __call__(
43
+ self, /, process: typing.Optional[subprocess.Popen], timeout: float = 0.5
44
+ ) -> tuple[bool, typing.Optional[Exception]]: ...
45
+ async def aio(
46
+ self, /, process: typing.Optional[subprocess.Popen], timeout: float = 0.5
47
+ ) -> tuple[bool, typing.Optional[Exception]]: ...
48
+
49
+ is_port_connection_healthy: __is_port_connection_healthy_spec[typing_extensions.Self]
20
50
 
21
51
  class ___start_spec(typing_extensions.Protocol[SUPERSELF]):
22
52
  def __call__(self, /): ...
@@ -24,6 +54,17 @@ class FlashManager:
24
54
 
25
55
  _start: ___start_spec[typing_extensions.Self]
26
56
 
57
+ class ___drain_container_spec(typing_extensions.Protocol[SUPERSELF]):
58
+ def __call__(self, /):
59
+ """Background task that checks if we've encountered too many failures and drains the container if so."""
60
+ ...
61
+
62
+ async def aio(self, /):
63
+ """Background task that checks if we've encountered too many failures and drains the container if so."""
64
+ ...
65
+
66
+ _drain_container: ___drain_container_spec[typing_extensions.Self]
67
+
27
68
  class ___run_heartbeat_spec(typing_extensions.Protocol[SUPERSELF]):
28
69
  def __call__(self, /, host: str, port: int): ...
29
70
  async def aio(self, /, host: str, port: int): ...
@@ -45,17 +86,27 @@ class FlashManager:
45
86
  close: __close_spec[typing_extensions.Self]
46
87
 
47
88
  class __flash_forward_spec(typing_extensions.Protocol):
48
- def __call__(self, /, port: int, health_check_url: typing.Optional[str] = None) -> FlashManager:
89
+ def __call__(
90
+ self,
91
+ /,
92
+ port: int,
93
+ process: typing.Optional[subprocess.Popen] = None,
94
+ health_check_url: typing.Optional[str] = None,
95
+ ) -> FlashManager:
49
96
  """Forward a port to the Modal Flash service, exposing that port as a stable web endpoint.
50
-
51
97
  This is a highly experimental method that can break or be removed at any time without warning.
52
98
  Do not use this method unless explicitly instructed to do so by Modal support.
53
99
  """
54
100
  ...
55
101
 
56
- async def aio(self, /, port: int, health_check_url: typing.Optional[str] = None) -> FlashManager:
102
+ async def aio(
103
+ self,
104
+ /,
105
+ port: int,
106
+ process: typing.Optional[subprocess.Popen] = None,
107
+ health_check_url: typing.Optional[str] = None,
108
+ ) -> FlashManager:
57
109
  """Forward a port to the Modal Flash service, exposing that port as a stable web endpoint.
58
-
59
110
  This is a highly experimental method that can break or be removed at any time without warning.
60
111
  Do not use this method unless explicitly instructed to do so by Modal support.
61
112
  """
@@ -74,6 +125,7 @@ class _FlashPrometheusAutoscaler:
74
125
  target_metric_value: float,
75
126
  min_containers: typing.Optional[int],
76
127
  max_containers: typing.Optional[int],
128
+ buffer_containers: typing.Optional[int],
77
129
  scale_up_tolerance: float,
78
130
  scale_down_tolerance: float,
79
131
  scale_up_stabilization_window_seconds: int,
@@ -85,9 +137,28 @@ class _FlashPrometheusAutoscaler:
85
137
 
86
138
  async def start(self): ...
87
139
  async def _run_autoscaler_loop(self): ...
88
- async def _compute_target_containers(self, current_replicas: int) -> int: ...
140
+ async def _compute_target_containers(self, current_replicas: int) -> int:
141
+ """Gets metrics from container to autoscale up or down."""
142
+ ...
143
+
144
+ def _calculate_desired_replicas(
145
+ self,
146
+ n_current_replicas: int,
147
+ sum_metric: float,
148
+ n_containers_with_metrics: int,
149
+ n_total_containers: int,
150
+ target_metric_value: float,
151
+ ) -> int:
152
+ """Calculate the desired number of replicas to autoscale to."""
153
+ ...
154
+
155
+ async def _get_scaling_info(self, containers) -> tuple[float, int]:
156
+ """Get metrics using container exposed metrics endpoints."""
157
+ ...
158
+
89
159
  async def _get_metrics(self, url: str) -> typing.Optional[dict[str, list[typing.Any]]]: ...
90
160
  async def _get_all_containers(self): ...
161
+ async def _set_target_slots(self, target_slots: int): ...
91
162
  def _make_scaling_decision(
92
163
  self,
93
164
  current_replicas: int,
@@ -96,6 +167,7 @@ class _FlashPrometheusAutoscaler:
96
167
  scale_down_stabilization_window_seconds: int = 300,
97
168
  min_containers: typing.Optional[int] = None,
98
169
  max_containers: typing.Optional[int] = None,
170
+ buffer_containers: typing.Optional[int] = None,
99
171
  ) -> int:
100
172
  """Return the target number of containers following (simplified) Kubernetes HPA
101
173
  stabilization-window semantics.
@@ -128,6 +200,7 @@ class FlashPrometheusAutoscaler:
128
200
  target_metric_value: float,
129
201
  min_containers: typing.Optional[int],
130
202
  max_containers: typing.Optional[int],
203
+ buffer_containers: typing.Optional[int],
131
204
  scale_up_tolerance: float,
132
205
  scale_down_tolerance: float,
133
206
  scale_up_stabilization_window_seconds: int,
@@ -148,11 +221,38 @@ class FlashPrometheusAutoscaler:
148
221
  _run_autoscaler_loop: ___run_autoscaler_loop_spec[typing_extensions.Self]
149
222
 
150
223
  class ___compute_target_containers_spec(typing_extensions.Protocol[SUPERSELF]):
151
- def __call__(self, /, current_replicas: int) -> int: ...
152
- async def aio(self, /, current_replicas: int) -> int: ...
224
+ def __call__(self, /, current_replicas: int) -> int:
225
+ """Gets metrics from container to autoscale up or down."""
226
+ ...
227
+
228
+ async def aio(self, /, current_replicas: int) -> int:
229
+ """Gets metrics from container to autoscale up or down."""
230
+ ...
153
231
 
154
232
  _compute_target_containers: ___compute_target_containers_spec[typing_extensions.Self]
155
233
 
234
+ def _calculate_desired_replicas(
235
+ self,
236
+ n_current_replicas: int,
237
+ sum_metric: float,
238
+ n_containers_with_metrics: int,
239
+ n_total_containers: int,
240
+ target_metric_value: float,
241
+ ) -> int:
242
+ """Calculate the desired number of replicas to autoscale to."""
243
+ ...
244
+
245
+ class ___get_scaling_info_spec(typing_extensions.Protocol[SUPERSELF]):
246
+ def __call__(self, /, containers) -> tuple[float, int]:
247
+ """Get metrics using container exposed metrics endpoints."""
248
+ ...
249
+
250
+ async def aio(self, /, containers) -> tuple[float, int]:
251
+ """Get metrics using container exposed metrics endpoints."""
252
+ ...
253
+
254
+ _get_scaling_info: ___get_scaling_info_spec[typing_extensions.Self]
255
+
156
256
  class ___get_metrics_spec(typing_extensions.Protocol[SUPERSELF]):
157
257
  def __call__(self, /, url: str) -> typing.Optional[dict[str, list[typing.Any]]]: ...
158
258
  async def aio(self, /, url: str) -> typing.Optional[dict[str, list[typing.Any]]]: ...
@@ -165,6 +265,12 @@ class FlashPrometheusAutoscaler:
165
265
 
166
266
  _get_all_containers: ___get_all_containers_spec[typing_extensions.Self]
167
267
 
268
+ class ___set_target_slots_spec(typing_extensions.Protocol[SUPERSELF]):
269
+ def __call__(self, /, target_slots: int): ...
270
+ async def aio(self, /, target_slots: int): ...
271
+
272
+ _set_target_slots: ___set_target_slots_spec[typing_extensions.Self]
273
+
168
274
  def _make_scaling_decision(
169
275
  self,
170
276
  current_replicas: int,
@@ -173,6 +279,7 @@ class FlashPrometheusAutoscaler:
173
279
  scale_down_stabilization_window_seconds: int = 300,
174
280
  min_containers: typing.Optional[int] = None,
175
281
  max_containers: typing.Optional[int] = None,
282
+ buffer_containers: typing.Optional[int] = None,
176
283
  ) -> int:
177
284
  """Return the target number of containers following (simplified) Kubernetes HPA
178
285
  stabilization-window semantics.
@@ -214,6 +321,7 @@ class __flash_prometheus_autoscaler_spec(typing_extensions.Protocol):
214
321
  scale_up_stabilization_window_seconds: int = 0,
215
322
  scale_down_stabilization_window_seconds: int = 300,
216
323
  autoscaling_interval_seconds: int = 15,
324
+ buffer_containers: typing.Optional[int] = None,
217
325
  ) -> FlashPrometheusAutoscaler:
218
326
  """Autoscale a Flash service based on containers' Prometheus metrics.
219
327
 
@@ -239,6 +347,7 @@ class __flash_prometheus_autoscaler_spec(typing_extensions.Protocol):
239
347
  scale_up_stabilization_window_seconds: int = 0,
240
348
  scale_down_stabilization_window_seconds: int = 300,
241
349
  autoscaling_interval_seconds: int = 15,
350
+ buffer_containers: typing.Optional[int] = None,
242
351
  ) -> FlashPrometheusAutoscaler:
243
352
  """Autoscale a Flash service based on containers' Prometheus metrics.
244
353
 
@@ -250,3 +359,22 @@ class __flash_prometheus_autoscaler_spec(typing_extensions.Protocol):
250
359
  ...
251
360
 
252
361
  flash_prometheus_autoscaler: __flash_prometheus_autoscaler_spec
362
+
363
+ class __flash_get_containers_spec(typing_extensions.Protocol):
364
+ def __call__(self, /, app_name: str, cls_name: str) -> list[dict[str, typing.Any]]:
365
+ """Return a list of flash containers for a deployed Flash service.
366
+
367
+ This is a highly experimental method that can break or be removed at any time without warning.
368
+ Do not use this method unless explicitly instructed to do so by Modal support.
369
+ """
370
+ ...
371
+
372
+ async def aio(self, /, app_name: str, cls_name: str) -> list[dict[str, typing.Any]]:
373
+ """Return a list of flash containers for a deployed Flash service.
374
+
375
+ This is a highly experimental method that can break or be removed at any time without warning.
376
+ Do not use this method unless explicitly instructed to do so by Modal support.
377
+ """
378
+ ...
379
+
380
+ flash_get_containers: __flash_get_containers_spec
modal/file_io.py CHANGED
@@ -13,7 +13,6 @@ import json
13
13
  from grpclib.exceptions import GRPCError, StreamTerminatedError
14
14
 
15
15
  from modal._utils.async_utils import TaskContext
16
- from modal._utils.grpc_utils import retry_transient_errors
17
16
  from modal.exception import ClientClosed
18
17
  from modal_proto import api_pb2
19
18
 
@@ -57,8 +56,7 @@ async def _delete_bytes(file: "_FileIO", start: Optional[int] = None, end: Optio
57
56
  if start is not None and end is not None:
58
57
  if start >= end:
59
58
  raise ValueError("start must be less than end")
60
- resp = await retry_transient_errors(
61
- file._client.stub.ContainerFilesystemExec,
59
+ resp = await file._client.stub.ContainerFilesystemExec(
62
60
  api_pb2.ContainerFilesystemExecRequest(
63
61
  file_delete_bytes_request=api_pb2.ContainerFileDeleteBytesRequest(
64
62
  file_descriptor=file._file_descriptor,
@@ -85,8 +83,7 @@ async def _replace_bytes(file: "_FileIO", data: bytes, start: Optional[int] = No
85
83
  raise InvalidError("start must be less than end")
86
84
  if len(data) > WRITE_CHUNK_SIZE:
87
85
  raise InvalidError("Write request payload exceeds 16 MiB limit")
88
- resp = await retry_transient_errors(
89
- file._client.stub.ContainerFilesystemExec,
86
+ resp = await file._client.stub.ContainerFilesystemExec(
90
87
  api_pb2.ContainerFilesystemExecRequest(
91
88
  file_write_replace_bytes_request=api_pb2.ContainerFileWriteReplaceBytesRequest(
92
89
  file_descriptor=file._file_descriptor,
@@ -128,7 +125,7 @@ class _FileIO(Generic[T]):
128
125
 
129
126
  **Usage**
130
127
 
131
- ```python
128
+ ```python notest
132
129
  import modal
133
130
 
134
131
  app = modal.App.lookup("my-app", create_if_missing=True)
@@ -261,8 +258,7 @@ class _FileIO(Generic[T]):
261
258
  raise TypeError("Expected str when in text mode")
262
259
 
263
260
  async def _open_file(self, path: str, mode: str) -> None:
264
- resp = await retry_transient_errors(
265
- self._client.stub.ContainerFilesystemExec,
261
+ resp = await self._client.stub.ContainerFilesystemExec(
266
262
  api_pb2.ContainerFilesystemExecRequest(
267
263
  file_open_request=api_pb2.ContainerFileOpenRequest(path=path, mode=mode),
268
264
  task_id=self._task_id,
@@ -285,8 +281,7 @@ class _FileIO(Generic[T]):
285
281
  return self
286
282
 
287
283
  async def _make_read_request(self, n: Optional[int]) -> bytes:
288
- resp = await retry_transient_errors(
289
- self._client.stub.ContainerFilesystemExec,
284
+ resp = await self._client.stub.ContainerFilesystemExec(
290
285
  api_pb2.ContainerFilesystemExecRequest(
291
286
  file_read_request=api_pb2.ContainerFileReadRequest(file_descriptor=self._file_descriptor, n=n),
292
287
  task_id=self._task_id,
@@ -309,8 +304,7 @@ class _FileIO(Generic[T]):
309
304
  """Read a single line from the current position."""
310
305
  self._check_closed()
311
306
  self._check_readable()
312
- resp = await retry_transient_errors(
313
- self._client.stub.ContainerFilesystemExec,
307
+ resp = await self._client.stub.ContainerFilesystemExec(
314
308
  api_pb2.ContainerFilesystemExecRequest(
315
309
  file_read_line_request=api_pb2.ContainerFileReadLineRequest(file_descriptor=self._file_descriptor),
316
310
  task_id=self._task_id,
@@ -351,8 +345,7 @@ class _FileIO(Generic[T]):
351
345
  raise ValueError("Write request payload exceeds 1 GiB limit")
352
346
  for i in range(0, len(data), WRITE_CHUNK_SIZE):
353
347
  chunk = data[i : i + WRITE_CHUNK_SIZE]
354
- resp = await retry_transient_errors(
355
- self._client.stub.ContainerFilesystemExec,
348
+ resp = await self._client.stub.ContainerFilesystemExec(
356
349
  api_pb2.ContainerFilesystemExecRequest(
357
350
  file_write_request=api_pb2.ContainerFileWriteRequest(
358
351
  file_descriptor=self._file_descriptor,
@@ -367,8 +360,7 @@ class _FileIO(Generic[T]):
367
360
  """Flush the buffer to disk."""
368
361
  self._check_closed()
369
362
  self._check_writable()
370
- resp = await retry_transient_errors(
371
- self._client.stub.ContainerFilesystemExec,
363
+ resp = await self._client.stub.ContainerFilesystemExec(
372
364
  api_pb2.ContainerFilesystemExecRequest(
373
365
  file_flush_request=api_pb2.ContainerFileFlushRequest(file_descriptor=self._file_descriptor),
374
366
  task_id=self._task_id,
@@ -393,8 +385,7 @@ class _FileIO(Generic[T]):
393
385
  (relative to the current position) and 2 (relative to the file's end).
394
386
  """
395
387
  self._check_closed()
396
- resp = await retry_transient_errors(
397
- self._client.stub.ContainerFilesystemExec,
388
+ resp = await self._client.stub.ContainerFilesystemExec(
398
389
  api_pb2.ContainerFilesystemExecRequest(
399
390
  file_seek_request=api_pb2.ContainerFileSeekRequest(
400
391
  file_descriptor=self._file_descriptor,
@@ -410,8 +401,7 @@ class _FileIO(Generic[T]):
410
401
  async def ls(cls, path: str, client: _Client, task_id: str) -> list[str]:
411
402
  """List the contents of the provided directory."""
412
403
  self = _FileIO(client, task_id)
413
- resp = await retry_transient_errors(
414
- self._client.stub.ContainerFilesystemExec,
404
+ resp = await self._client.stub.ContainerFilesystemExec(
415
405
  api_pb2.ContainerFilesystemExecRequest(
416
406
  file_ls_request=api_pb2.ContainerFileLsRequest(path=path),
417
407
  task_id=task_id,
@@ -427,8 +417,7 @@ class _FileIO(Generic[T]):
427
417
  async def mkdir(cls, path: str, client: _Client, task_id: str, parents: bool = False) -> None:
428
418
  """Create a new directory."""
429
419
  self = _FileIO(client, task_id)
430
- resp = await retry_transient_errors(
431
- self._client.stub.ContainerFilesystemExec,
420
+ resp = await self._client.stub.ContainerFilesystemExec(
432
421
  api_pb2.ContainerFilesystemExecRequest(
433
422
  file_mkdir_request=api_pb2.ContainerFileMkdirRequest(path=path, make_parents=parents),
434
423
  task_id=self._task_id,
@@ -440,8 +429,7 @@ class _FileIO(Generic[T]):
440
429
  async def rm(cls, path: str, client: _Client, task_id: str, recursive: bool = False) -> None:
441
430
  """Remove a file or directory in the Sandbox."""
442
431
  self = _FileIO(client, task_id)
443
- resp = await retry_transient_errors(
444
- self._client.stub.ContainerFilesystemExec,
432
+ resp = await self._client.stub.ContainerFilesystemExec(
445
433
  api_pb2.ContainerFilesystemExecRequest(
446
434
  file_rm_request=api_pb2.ContainerFileRmRequest(path=path, recursive=recursive),
447
435
  task_id=self._task_id,
@@ -460,8 +448,7 @@ class _FileIO(Generic[T]):
460
448
  timeout: Optional[int] = None,
461
449
  ) -> AsyncIterator[FileWatchEvent]:
462
450
  self = _FileIO(client, task_id)
463
- resp = await retry_transient_errors(
464
- self._client.stub.ContainerFilesystemExec,
451
+ resp = await self._client.stub.ContainerFilesystemExec(
465
452
  api_pb2.ContainerFilesystemExecRequest(
466
453
  file_watch_request=api_pb2.ContainerFileWatchRequest(
467
454
  path=path,
@@ -503,8 +490,7 @@ class _FileIO(Generic[T]):
503
490
 
504
491
  async def _close(self) -> None:
505
492
  # Buffer is flushed by the runner on close
506
- resp = await retry_transient_errors(
507
- self._client.stub.ContainerFilesystemExec,
493
+ resp = await self._client.stub.ContainerFilesystemExec(
508
494
  api_pb2.ContainerFilesystemExecRequest(
509
495
  file_close_request=api_pb2.ContainerFileCloseRequest(file_descriptor=self._file_descriptor),
510
496
  task_id=self._task_id,
modal/file_io.pyi CHANGED
@@ -62,7 +62,7 @@ class _FileIO(typing.Generic[T]):
62
62
 
63
63
  **Usage**
64
64
 
65
- ```python
65
+ ```python notest
66
66
  import modal
67
67
 
68
68
  app = modal.App.lookup("my-app", create_if_missing=True)
@@ -232,7 +232,7 @@ class FileIO(typing.Generic[T]):
232
232
 
233
233
  **Usage**
234
234
 
235
- ```python
235
+ ```python notest
236
236
  import modal
237
237
 
238
238
  app = modal.App.lookup("my-app", create_if_missing=True)
@@ -11,6 +11,7 @@ then asking it whether file paths match any of its patterns.
11
11
 
12
12
  import os
13
13
  from abc import abstractmethod
14
+ from functools import cached_property
14
15
  from pathlib import Path
15
16
  from typing import Callable, Optional, Sequence, Union
16
17
 
@@ -99,11 +100,11 @@ class FilePatternMatcher(_AbstractPatternMatcher):
99
100
  ```
100
101
  """
101
102
 
102
- patterns: list[Pattern]
103
- _delayed_init: Callable[[], None] = None
103
+ _file_path: Optional[Union[str, Path]]
104
+ _pattern_strings: Optional[Sequence[str]]
104
105
 
105
- def _set_patterns(self, patterns: Sequence[str]) -> None:
106
- self.patterns = []
106
+ def _parse_patterns(self, patterns: Sequence[str]) -> list[Pattern]:
107
+ parsed_patterns = []
107
108
  for pattern in list(patterns):
108
109
  pattern = pattern.strip().strip(os.path.sep)
109
110
  if not pattern:
@@ -118,7 +119,8 @@ class FilePatternMatcher(_AbstractPatternMatcher):
118
119
  # In Python, we can proceed without explicit syntax checking
119
120
  new_pattern.cleaned_pattern = pattern
120
121
  new_pattern.dirs = pattern.split(os.path.sep)
121
- self.patterns.append(new_pattern)
122
+ parsed_patterns.append(new_pattern)
123
+ return parsed_patterns
122
124
 
123
125
  def __init__(self, *pattern: str) -> None:
124
126
  """Initialize a new FilePatternMatcher instance.
@@ -129,7 +131,8 @@ class FilePatternMatcher(_AbstractPatternMatcher):
129
131
  Raises:
130
132
  ValueError: If an illegal exclusion pattern is provided.
131
133
  """
132
- self._set_patterns(pattern)
134
+ self._pattern_strings = pattern
135
+ self._file_path = None
133
136
 
134
137
  @classmethod
135
138
  def from_file(cls, file_path: Union[str, Path]) -> "FilePatternMatcher":
@@ -148,14 +151,10 @@ class FilePatternMatcher(_AbstractPatternMatcher):
148
151
  ```
149
152
 
150
153
  """
151
- uninitialized = cls.__new__(cls)
152
-
153
- def _delayed_init():
154
- uninitialized._set_patterns(Path(file_path).read_text("utf8").splitlines())
155
- uninitialized._delayed_init = None
156
-
157
- uninitialized._delayed_init = _delayed_init
158
- return uninitialized
154
+ instance = cls.__new__(cls)
155
+ instance._file_path = file_path
156
+ instance._pattern_strings = None
157
+ return instance
159
158
 
160
159
  def _matches(self, file_path: str) -> bool:
161
160
  """Check if the file path or any of its parent directories match the patterns.
@@ -194,6 +193,18 @@ class FilePatternMatcher(_AbstractPatternMatcher):
194
193
 
195
194
  return matched
196
195
 
196
+ @cached_property
197
+ def patterns(self) -> list[Pattern]:
198
+ """Get the patterns, loading from file if necessary."""
199
+ if self._file_path is not None:
200
+ # Lazy load from file
201
+ pattern_strings = Path(self._file_path).read_text("utf8").splitlines()
202
+ else:
203
+ # Use patterns provided in __init__
204
+ pattern_strings = list(self._pattern_strings)
205
+
206
+ return self._parse_patterns(pattern_strings)
207
+
197
208
  def can_prune_directories(self) -> bool:
198
209
  """
199
210
  Returns True if this pattern matcher allows safe early directory pruning.
@@ -205,8 +216,6 @@ class FilePatternMatcher(_AbstractPatternMatcher):
205
216
  return not any(pattern.exclusion for pattern in self.patterns)
206
217
 
207
218
  def __call__(self, file_path: Path) -> bool:
208
- if self._delayed_init:
209
- self._delayed_init()
210
219
  return self._matches(str(file_path))
211
220
 
212
221