modal 1.1.5.dev66__py3-none-any.whl → 1.3.1.dev8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of modal might be problematic. Click here for more details.

Files changed (143) hide show
  1. modal/__init__.py +4 -4
  2. modal/__main__.py +4 -29
  3. modal/_billing.py +84 -0
  4. modal/_clustered_functions.py +1 -3
  5. modal/_container_entrypoint.py +33 -208
  6. modal/_functions.py +171 -138
  7. modal/_grpc_client.py +191 -0
  8. modal/_ipython.py +16 -6
  9. modal/_load_context.py +106 -0
  10. modal/_object.py +72 -21
  11. modal/_output.py +12 -14
  12. modal/_partial_function.py +31 -4
  13. modal/_resolver.py +44 -57
  14. modal/_runtime/container_io_manager.py +30 -28
  15. modal/_runtime/container_io_manager.pyi +42 -44
  16. modal/_runtime/gpu_memory_snapshot.py +9 -7
  17. modal/_runtime/user_code_event_loop.py +80 -0
  18. modal/_runtime/user_code_imports.py +236 -10
  19. modal/_serialization.py +2 -1
  20. modal/_traceback.py +4 -13
  21. modal/_tunnel.py +16 -11
  22. modal/_tunnel.pyi +25 -3
  23. modal/_utils/async_utils.py +337 -10
  24. modal/_utils/auth_token_manager.py +1 -4
  25. modal/_utils/blob_utils.py +29 -22
  26. modal/_utils/function_utils.py +20 -21
  27. modal/_utils/grpc_testing.py +6 -3
  28. modal/_utils/grpc_utils.py +223 -64
  29. modal/_utils/mount_utils.py +26 -1
  30. modal/_utils/name_utils.py +2 -3
  31. modal/_utils/package_utils.py +0 -1
  32. modal/_utils/rand_pb_testing.py +8 -1
  33. modal/_utils/task_command_router_client.py +524 -0
  34. modal/_vendor/cloudpickle.py +144 -48
  35. modal/app.py +285 -105
  36. modal/app.pyi +216 -53
  37. modal/billing.py +5 -0
  38. modal/builder/2025.06.txt +6 -3
  39. modal/builder/PREVIEW.txt +2 -1
  40. modal/builder/base-images.json +4 -2
  41. modal/cli/_download.py +19 -3
  42. modal/cli/cluster.py +4 -2
  43. modal/cli/config.py +3 -1
  44. modal/cli/container.py +5 -4
  45. modal/cli/dict.py +5 -2
  46. modal/cli/entry_point.py +26 -2
  47. modal/cli/environment.py +2 -16
  48. modal/cli/launch.py +1 -76
  49. modal/cli/network_file_system.py +5 -20
  50. modal/cli/programs/run_jupyter.py +1 -1
  51. modal/cli/programs/vscode.py +1 -1
  52. modal/cli/queues.py +5 -4
  53. modal/cli/run.py +24 -204
  54. modal/cli/secret.py +1 -2
  55. modal/cli/shell.py +375 -0
  56. modal/cli/utils.py +1 -13
  57. modal/cli/volume.py +11 -17
  58. modal/client.py +16 -125
  59. modal/client.pyi +94 -144
  60. modal/cloud_bucket_mount.py +3 -1
  61. modal/cloud_bucket_mount.pyi +4 -0
  62. modal/cls.py +101 -64
  63. modal/cls.pyi +9 -8
  64. modal/config.py +21 -1
  65. modal/container_process.py +288 -12
  66. modal/container_process.pyi +99 -38
  67. modal/dict.py +72 -33
  68. modal/dict.pyi +88 -57
  69. modal/environments.py +16 -8
  70. modal/environments.pyi +6 -2
  71. modal/exception.py +154 -16
  72. modal/experimental/__init__.py +24 -53
  73. modal/experimental/flash.py +161 -74
  74. modal/experimental/flash.pyi +97 -49
  75. modal/file_io.py +50 -92
  76. modal/file_io.pyi +117 -89
  77. modal/functions.pyi +70 -87
  78. modal/image.py +82 -47
  79. modal/image.pyi +51 -30
  80. modal/io_streams.py +500 -149
  81. modal/io_streams.pyi +279 -189
  82. modal/mount.py +60 -46
  83. modal/mount.pyi +41 -17
  84. modal/network_file_system.py +19 -11
  85. modal/network_file_system.pyi +72 -39
  86. modal/object.pyi +114 -22
  87. modal/parallel_map.py +42 -44
  88. modal/parallel_map.pyi +9 -17
  89. modal/partial_function.pyi +4 -2
  90. modal/proxy.py +14 -6
  91. modal/proxy.pyi +10 -2
  92. modal/queue.py +45 -38
  93. modal/queue.pyi +88 -52
  94. modal/runner.py +96 -96
  95. modal/runner.pyi +44 -27
  96. modal/sandbox.py +225 -107
  97. modal/sandbox.pyi +226 -60
  98. modal/secret.py +58 -56
  99. modal/secret.pyi +28 -13
  100. modal/serving.py +7 -11
  101. modal/serving.pyi +7 -8
  102. modal/snapshot.py +29 -15
  103. modal/snapshot.pyi +18 -10
  104. modal/token_flow.py +1 -1
  105. modal/token_flow.pyi +4 -6
  106. modal/volume.py +102 -55
  107. modal/volume.pyi +125 -66
  108. {modal-1.1.5.dev66.dist-info → modal-1.3.1.dev8.dist-info}/METADATA +10 -9
  109. modal-1.3.1.dev8.dist-info/RECORD +189 -0
  110. modal_proto/api.proto +141 -70
  111. modal_proto/api_grpc.py +42 -26
  112. modal_proto/api_pb2.py +1123 -1103
  113. modal_proto/api_pb2.pyi +331 -83
  114. modal_proto/api_pb2_grpc.py +80 -48
  115. modal_proto/api_pb2_grpc.pyi +26 -18
  116. modal_proto/modal_api_grpc.py +175 -174
  117. modal_proto/task_command_router.proto +164 -0
  118. modal_proto/task_command_router_grpc.py +138 -0
  119. modal_proto/task_command_router_pb2.py +180 -0
  120. modal_proto/{sandbox_router_pb2.pyi → task_command_router_pb2.pyi} +148 -57
  121. modal_proto/task_command_router_pb2_grpc.py +272 -0
  122. modal_proto/task_command_router_pb2_grpc.pyi +100 -0
  123. modal_version/__init__.py +1 -1
  124. modal_version/__main__.py +1 -1
  125. modal/cli/programs/launch_instance_ssh.py +0 -94
  126. modal/cli/programs/run_marimo.py +0 -95
  127. modal-1.1.5.dev66.dist-info/RECORD +0 -191
  128. modal_proto/modal_options_grpc.py +0 -3
  129. modal_proto/options.proto +0 -19
  130. modal_proto/options_grpc.py +0 -3
  131. modal_proto/options_pb2.py +0 -35
  132. modal_proto/options_pb2.pyi +0 -20
  133. modal_proto/options_pb2_grpc.py +0 -4
  134. modal_proto/options_pb2_grpc.pyi +0 -7
  135. modal_proto/sandbox_router.proto +0 -125
  136. modal_proto/sandbox_router_grpc.py +0 -89
  137. modal_proto/sandbox_router_pb2.py +0 -128
  138. modal_proto/sandbox_router_pb2_grpc.py +0 -169
  139. modal_proto/sandbox_router_pb2_grpc.pyi +0 -63
  140. {modal-1.1.5.dev66.dist-info → modal-1.3.1.dev8.dist-info}/WHEEL +0 -0
  141. {modal-1.1.5.dev66.dist-info → modal-1.3.1.dev8.dist-info}/entry_points.txt +0 -0
  142. {modal-1.1.5.dev66.dist-info → modal-1.3.1.dev8.dist-info}/licenses/LICENSE +0 -0
  143. {modal-1.1.5.dev66.dist-info → modal-1.3.1.dev8.dist-info}/top_level.txt +0 -0
modal/file_io.py CHANGED
@@ -10,17 +10,16 @@ if TYPE_CHECKING:
10
10
 
11
11
  import json
12
12
 
13
- from grpclib.exceptions import GRPCError, StreamTerminatedError
13
+ from grpclib.exceptions import StreamTerminatedError
14
14
 
15
15
  from modal._utils.async_utils import TaskContext
16
- from modal._utils.grpc_utils import retry_transient_errors
17
16
  from modal.exception import ClientClosed
18
17
  from modal_proto import api_pb2
19
18
 
20
19
  from ._utils.async_utils import synchronize_api
21
- from ._utils.grpc_utils import RETRYABLE_GRPC_STATUS_CODES
20
+ from ._utils.deprecation import deprecation_error
22
21
  from .client import _Client
23
- from .exception import FilesystemExecutionError, InvalidError
22
+ from .exception import FilesystemExecutionError, InternalError, ServiceError
24
23
 
25
24
  WRITE_CHUNK_SIZE = 16 * 1024 * 1024 # 16 MiB
26
25
  WRITE_FILE_SIZE_LIMIT = 1024 * 1024 * 1024 # 1 GiB
@@ -47,57 +46,17 @@ T = TypeVar("T", str, bytes)
47
46
 
48
47
 
49
48
  async def _delete_bytes(file: "_FileIO", start: Optional[int] = None, end: Optional[int] = None) -> None:
50
- """Delete a range of bytes from the file.
51
-
52
- `start` and `end` are byte offsets. `start` is inclusive, `end` is exclusive.
53
- If either is None, the start or end of the file is used, respectively.
49
+ """mdmd:hidden
50
+ This method has been removed.
54
51
  """
55
- assert file._file_descriptor is not None
56
- file._check_closed()
57
- if start is not None and end is not None:
58
- if start >= end:
59
- raise ValueError("start must be less than end")
60
- resp = await retry_transient_errors(
61
- file._client.stub.ContainerFilesystemExec,
62
- api_pb2.ContainerFilesystemExecRequest(
63
- file_delete_bytes_request=api_pb2.ContainerFileDeleteBytesRequest(
64
- file_descriptor=file._file_descriptor,
65
- start_inclusive=start,
66
- end_exclusive=end,
67
- ),
68
- task_id=file._task_id,
69
- ),
70
- )
71
- await file._wait(resp.exec_id)
52
+ deprecation_error((2025, 12, 3), "delete_bytes has been removed.")
72
53
 
73
54
 
74
55
  async def _replace_bytes(file: "_FileIO", data: bytes, start: Optional[int] = None, end: Optional[int] = None) -> None:
75
- """Replace a range of bytes in the file with new data. The length of the data does not
76
- have to be the same as the length of the range being replaced.
77
-
78
- `start` and `end` are byte offsets. `start` is inclusive, `end` is exclusive.
79
- If either is None, the start or end of the file is used, respectively.
56
+ """mdmd:hidden
57
+ This method has been removed.
80
58
  """
81
- assert file._file_descriptor is not None
82
- file._check_closed()
83
- if start is not None and end is not None:
84
- if start >= end:
85
- raise InvalidError("start must be less than end")
86
- if len(data) > WRITE_CHUNK_SIZE:
87
- raise InvalidError("Write request payload exceeds 16 MiB limit")
88
- resp = await retry_transient_errors(
89
- file._client.stub.ContainerFilesystemExec,
90
- api_pb2.ContainerFilesystemExecRequest(
91
- file_write_replace_bytes_request=api_pb2.ContainerFileWriteReplaceBytesRequest(
92
- file_descriptor=file._file_descriptor,
93
- data=data,
94
- start_inclusive=start,
95
- end_exclusive=end,
96
- ),
97
- task_id=file._task_id,
98
- ),
99
- )
100
- await file._wait(resp.exec_id)
59
+ deprecation_error((2025, 12, 3), "replace_bytes has been removed.")
101
60
 
102
61
 
103
62
  class FileWatchEventType(enum.Enum):
@@ -206,13 +165,12 @@ class _FileIO(Generic[T]):
206
165
  completed = True
207
166
  break
208
167
 
209
- except (GRPCError, StreamTerminatedError, ClientClosed) as exc:
168
+ except (ServiceError, InternalError, StreamTerminatedError, ClientClosed) as exc:
210
169
  if retries_remaining > 0:
211
170
  retries_remaining -= 1
212
- if isinstance(exc, GRPCError):
213
- if exc.status in RETRYABLE_GRPC_STATUS_CODES:
214
- await asyncio.sleep(1.0)
215
- continue
171
+ if isinstance(exc, (ServiceError, InternalError)):
172
+ await asyncio.sleep(1.0)
173
+ continue
216
174
  elif isinstance(exc, StreamTerminatedError):
217
175
  continue
218
176
  elif isinstance(exc, ClientClosed):
@@ -230,7 +188,7 @@ class _FileIO(Generic[T]):
230
188
 
231
189
  async def _wait(self, exec_id: str) -> bytes:
232
190
  # The logic here is similar to how output is read from `exec`
233
- output = b""
191
+ output_buffer = io.BytesIO()
234
192
  completed = False
235
193
  retries_remaining = 10
236
194
  while not completed:
@@ -241,18 +199,17 @@ class _FileIO(Generic[T]):
241
199
  break
242
200
  if isinstance(data, Exception):
243
201
  raise data
244
- output += data
245
- except (GRPCError, StreamTerminatedError) as exc:
202
+ output_buffer.write(data)
203
+ except (ServiceError, InternalError, StreamTerminatedError) as exc:
246
204
  if retries_remaining > 0:
247
205
  retries_remaining -= 1
248
- if isinstance(exc, GRPCError):
249
- if exc.status in RETRYABLE_GRPC_STATUS_CODES:
250
- await asyncio.sleep(1.0)
251
- continue
206
+ if isinstance(exc, (ServiceError, InternalError)):
207
+ await asyncio.sleep(1.0)
208
+ continue
252
209
  elif isinstance(exc, StreamTerminatedError):
253
210
  continue
254
211
  raise
255
- return output
212
+ return output_buffer.getvalue()
256
213
 
257
214
  def _validate_type(self, data: Union[bytes, str]) -> None:
258
215
  if self._binary and isinstance(data, str):
@@ -261,8 +218,7 @@ class _FileIO(Generic[T]):
261
218
  raise TypeError("Expected str when in text mode")
262
219
 
263
220
  async def _open_file(self, path: str, mode: str) -> None:
264
- resp = await retry_transient_errors(
265
- self._client.stub.ContainerFilesystemExec,
221
+ resp = await self._client.stub.ContainerFilesystemExec(
266
222
  api_pb2.ContainerFilesystemExecRequest(
267
223
  file_open_request=api_pb2.ContainerFileOpenRequest(path=path, mode=mode),
268
224
  task_id=self._task_id,
@@ -285,8 +241,7 @@ class _FileIO(Generic[T]):
285
241
  return self
286
242
 
287
243
  async def _make_read_request(self, n: Optional[int]) -> bytes:
288
- resp = await retry_transient_errors(
289
- self._client.stub.ContainerFilesystemExec,
244
+ resp = await self._client.stub.ContainerFilesystemExec(
290
245
  api_pb2.ContainerFilesystemExecRequest(
291
246
  file_read_request=api_pb2.ContainerFileReadRequest(file_descriptor=self._file_descriptor, n=n),
292
247
  task_id=self._task_id,
@@ -309,8 +264,7 @@ class _FileIO(Generic[T]):
309
264
  """Read a single line from the current position."""
310
265
  self._check_closed()
311
266
  self._check_readable()
312
- resp = await retry_transient_errors(
313
- self._client.stub.ContainerFilesystemExec,
267
+ resp = await self._client.stub.ContainerFilesystemExec(
314
268
  api_pb2.ContainerFilesystemExecRequest(
315
269
  file_read_line_request=api_pb2.ContainerFileReadLineRequest(file_descriptor=self._file_descriptor),
316
270
  task_id=self._task_id,
@@ -351,8 +305,7 @@ class _FileIO(Generic[T]):
351
305
  raise ValueError("Write request payload exceeds 1 GiB limit")
352
306
  for i in range(0, len(data), WRITE_CHUNK_SIZE):
353
307
  chunk = data[i : i + WRITE_CHUNK_SIZE]
354
- resp = await retry_transient_errors(
355
- self._client.stub.ContainerFilesystemExec,
308
+ resp = await self._client.stub.ContainerFilesystemExec(
356
309
  api_pb2.ContainerFilesystemExecRequest(
357
310
  file_write_request=api_pb2.ContainerFileWriteRequest(
358
311
  file_descriptor=self._file_descriptor,
@@ -367,8 +320,7 @@ class _FileIO(Generic[T]):
367
320
  """Flush the buffer to disk."""
368
321
  self._check_closed()
369
322
  self._check_writable()
370
- resp = await retry_transient_errors(
371
- self._client.stub.ContainerFilesystemExec,
323
+ resp = await self._client.stub.ContainerFilesystemExec(
372
324
  api_pb2.ContainerFilesystemExecRequest(
373
325
  file_flush_request=api_pb2.ContainerFileFlushRequest(file_descriptor=self._file_descriptor),
374
326
  task_id=self._task_id,
@@ -393,8 +345,7 @@ class _FileIO(Generic[T]):
393
345
  (relative to the current position) and 2 (relative to the file's end).
394
346
  """
395
347
  self._check_closed()
396
- resp = await retry_transient_errors(
397
- self._client.stub.ContainerFilesystemExec,
348
+ resp = await self._client.stub.ContainerFilesystemExec(
398
349
  api_pb2.ContainerFilesystemExecRequest(
399
350
  file_seek_request=api_pb2.ContainerFileSeekRequest(
400
351
  file_descriptor=self._file_descriptor,
@@ -410,8 +361,7 @@ class _FileIO(Generic[T]):
410
361
  async def ls(cls, path: str, client: _Client, task_id: str) -> list[str]:
411
362
  """List the contents of the provided directory."""
412
363
  self = _FileIO(client, task_id)
413
- resp = await retry_transient_errors(
414
- self._client.stub.ContainerFilesystemExec,
364
+ resp = await self._client.stub.ContainerFilesystemExec(
415
365
  api_pb2.ContainerFilesystemExecRequest(
416
366
  file_ls_request=api_pb2.ContainerFileLsRequest(path=path),
417
367
  task_id=task_id,
@@ -427,8 +377,7 @@ class _FileIO(Generic[T]):
427
377
  async def mkdir(cls, path: str, client: _Client, task_id: str, parents: bool = False) -> None:
428
378
  """Create a new directory."""
429
379
  self = _FileIO(client, task_id)
430
- resp = await retry_transient_errors(
431
- self._client.stub.ContainerFilesystemExec,
380
+ resp = await self._client.stub.ContainerFilesystemExec(
432
381
  api_pb2.ContainerFilesystemExecRequest(
433
382
  file_mkdir_request=api_pb2.ContainerFileMkdirRequest(path=path, make_parents=parents),
434
383
  task_id=self._task_id,
@@ -440,8 +389,7 @@ class _FileIO(Generic[T]):
440
389
  async def rm(cls, path: str, client: _Client, task_id: str, recursive: bool = False) -> None:
441
390
  """Remove a file or directory in the Sandbox."""
442
391
  self = _FileIO(client, task_id)
443
- resp = await retry_transient_errors(
444
- self._client.stub.ContainerFilesystemExec,
392
+ resp = await self._client.stub.ContainerFilesystemExec(
445
393
  api_pb2.ContainerFilesystemExecRequest(
446
394
  file_rm_request=api_pb2.ContainerFileRmRequest(path=path, recursive=recursive),
447
395
  task_id=self._task_id,
@@ -460,8 +408,7 @@ class _FileIO(Generic[T]):
460
408
  timeout: Optional[int] = None,
461
409
  ) -> AsyncIterator[FileWatchEvent]:
462
410
  self = _FileIO(client, task_id)
463
- resp = await retry_transient_errors(
464
- self._client.stub.ContainerFilesystemExec,
411
+ resp = await self._client.stub.ContainerFilesystemExec(
465
412
  api_pb2.ContainerFilesystemExecRequest(
466
413
  file_watch_request=api_pb2.ContainerFileWatchRequest(
467
414
  path=path,
@@ -471,10 +418,22 @@ class _FileIO(Generic[T]):
471
418
  task_id=self._task_id,
472
419
  ),
473
420
  )
421
+
422
+ def end_of_event(item: bytes, buffer: io.BytesIO, boundary_token: bytes) -> bool:
423
+ if not item.endswith(b"\n"):
424
+ return False
425
+ boundary_token_size = len(boundary_token)
426
+ if buffer.tell() < boundary_token_size:
427
+ return False
428
+ buffer.seek(-boundary_token_size, io.SEEK_END)
429
+ if buffer.read(boundary_token_size) == boundary_token:
430
+ return True
431
+ return False
432
+
474
433
  async with TaskContext() as tc:
475
434
  tc.create_task(self._consume_watch_output(resp.exec_id))
476
435
 
477
- buffer = b""
436
+ item_buffer = io.BytesIO()
478
437
  while True:
479
438
  if len(self._watch_output_buffer) > 0:
480
439
  item = self._watch_output_buffer.pop(0)
@@ -482,12 +441,12 @@ class _FileIO(Generic[T]):
482
441
  break
483
442
  if isinstance(item, Exception):
484
443
  raise item
485
- buffer += item
486
- # a single event may be split across multiple messages
487
- # the end of an event is marked by two newlines
488
- if buffer.endswith(b"\n\n"):
444
+ item_buffer.write(item)
445
+ assert isinstance(item, bytes)
446
+ # Single events may span multiple messages so we need to check for a special event boundary token
447
+ if end_of_event(item, item_buffer, boundary_token=b"\n\n"):
489
448
  try:
490
- event_json = json.loads(buffer.strip().decode())
449
+ event_json = json.loads(item_buffer.getvalue().strip().decode())
491
450
  event = FileWatchEvent(
492
451
  type=FileWatchEventType(event_json["event_type"]),
493
452
  paths=event_json["paths"],
@@ -497,14 +456,13 @@ class _FileIO(Generic[T]):
497
456
  except (json.JSONDecodeError, KeyError, ValueError):
498
457
  # skip invalid events
499
458
  pass
500
- buffer = b""
459
+ item_buffer = io.BytesIO()
501
460
  else:
502
461
  await asyncio.sleep(0.1)
503
462
 
504
463
  async def _close(self) -> None:
505
464
  # Buffer is flushed by the runner on close
506
- resp = await retry_transient_errors(
507
- self._client.stub.ContainerFilesystemExec,
465
+ resp = await self._client.stub.ContainerFilesystemExec(
508
466
  api_pb2.ContainerFilesystemExecRequest(
509
467
  file_close_request=api_pb2.ContainerFileCloseRequest(file_descriptor=self._file_descriptor),
510
468
  task_id=self._task_id,
modal/file_io.pyi CHANGED
@@ -7,21 +7,16 @@ import typing_extensions
7
7
  T = typing.TypeVar("T")
8
8
 
9
9
  async def _delete_bytes(file: _FileIO, start: typing.Optional[int] = None, end: typing.Optional[int] = None) -> None:
10
- """Delete a range of bytes from the file.
11
-
12
- `start` and `end` are byte offsets. `start` is inclusive, `end` is exclusive.
13
- If either is None, the start or end of the file is used, respectively.
10
+ """mdmd:hidden
11
+ This method has been removed.
14
12
  """
15
13
  ...
16
14
 
17
15
  async def _replace_bytes(
18
16
  file: _FileIO, data: bytes, start: typing.Optional[int] = None, end: typing.Optional[int] = None
19
17
  ) -> None:
20
- """Replace a range of bytes in the file with new data. The length of the data does not
21
- have to be the same as the length of the range being replaced.
22
-
23
- `start` and `end` are byte offsets. `start` is inclusive, `end` is exclusive.
24
- If either is None, the start or end of the file is used, respectively.
18
+ """mdmd:hidden
19
+ This method has been removed.
25
20
  """
26
21
  ...
27
22
 
@@ -174,18 +169,14 @@ class _FileIO(typing.Generic[T]):
174
169
 
175
170
  class __delete_bytes_spec(typing_extensions.Protocol):
176
171
  def __call__(self, /, file: FileIO, start: typing.Optional[int] = None, end: typing.Optional[int] = None) -> None:
177
- """Delete a range of bytes from the file.
178
-
179
- `start` and `end` are byte offsets. `start` is inclusive, `end` is exclusive.
180
- If either is None, the start or end of the file is used, respectively.
172
+ """mdmd:hidden
173
+ This method has been removed.
181
174
  """
182
175
  ...
183
176
 
184
177
  async def aio(self, /, file: FileIO, start: typing.Optional[int] = None, end: typing.Optional[int] = None) -> None:
185
- """Delete a range of bytes from the file.
186
-
187
- `start` and `end` are byte offsets. `start` is inclusive, `end` is exclusive.
188
- If either is None, the start or end of the file is used, respectively.
178
+ """mdmd:hidden
179
+ This method has been removed.
189
180
  """
190
181
  ...
191
182
 
@@ -195,29 +186,21 @@ class __replace_bytes_spec(typing_extensions.Protocol):
195
186
  def __call__(
196
187
  self, /, file: FileIO, data: bytes, start: typing.Optional[int] = None, end: typing.Optional[int] = None
197
188
  ) -> None:
198
- """Replace a range of bytes in the file with new data. The length of the data does not
199
- have to be the same as the length of the range being replaced.
200
-
201
- `start` and `end` are byte offsets. `start` is inclusive, `end` is exclusive.
202
- If either is None, the start or end of the file is used, respectively.
189
+ """mdmd:hidden
190
+ This method has been removed.
203
191
  """
204
192
  ...
205
193
 
206
194
  async def aio(
207
195
  self, /, file: FileIO, data: bytes, start: typing.Optional[int] = None, end: typing.Optional[int] = None
208
196
  ) -> None:
209
- """Replace a range of bytes in the file with new data. The length of the data does not
210
- have to be the same as the length of the range being replaced.
211
-
212
- `start` and `end` are byte offsets. `start` is inclusive, `end` is exclusive.
213
- If either is None, the start or end of the file is used, respectively.
197
+ """mdmd:hidden
198
+ This method has been removed.
214
199
  """
215
200
  ...
216
201
 
217
202
  replace_bytes: __replace_bytes_spec
218
203
 
219
- SUPERSELF = typing.TypeVar("SUPERSELF", covariant=True)
220
-
221
204
  T_INNER = typing.TypeVar("T_INNER", covariant=True)
222
205
 
223
206
  class FileIO(typing.Generic[T]):
@@ -252,56 +235,70 @@ class FileIO(typing.Generic[T]):
252
235
  def __init__(self, client: modal.client.Client, task_id: str) -> None: ...
253
236
  def _validate_mode(self, mode: str) -> None: ...
254
237
 
255
- class ___consume_output_spec(typing_extensions.Protocol[SUPERSELF]):
238
+ class ___consume_output_spec(typing_extensions.Protocol):
256
239
  def __call__(self, /, exec_id: str) -> typing.Iterator[typing.Union[bytes, None, Exception]]: ...
257
240
  def aio(self, /, exec_id: str) -> typing.AsyncIterator[typing.Union[bytes, None, Exception]]: ...
258
241
 
259
- _consume_output: ___consume_output_spec[typing_extensions.Self]
242
+ _consume_output: ___consume_output_spec
260
243
 
261
- class ___consume_watch_output_spec(typing_extensions.Protocol[SUPERSELF]):
244
+ class ___consume_watch_output_spec(typing_extensions.Protocol):
262
245
  def __call__(self, /, exec_id: str) -> None: ...
263
246
  async def aio(self, /, exec_id: str) -> None: ...
264
247
 
265
- _consume_watch_output: ___consume_watch_output_spec[typing_extensions.Self]
248
+ _consume_watch_output: ___consume_watch_output_spec
266
249
 
267
- class ___parse_watch_output_spec(typing_extensions.Protocol[SUPERSELF]):
250
+ class ___parse_watch_output_spec(typing_extensions.Protocol):
268
251
  def __call__(self, /, event: bytes) -> typing.Optional[FileWatchEvent]: ...
269
252
  async def aio(self, /, event: bytes) -> typing.Optional[FileWatchEvent]: ...
270
253
 
271
- _parse_watch_output: ___parse_watch_output_spec[typing_extensions.Self]
254
+ _parse_watch_output: ___parse_watch_output_spec
272
255
 
273
- class ___wait_spec(typing_extensions.Protocol[SUPERSELF]):
256
+ class ___wait_spec(typing_extensions.Protocol):
274
257
  def __call__(self, /, exec_id: str) -> bytes: ...
275
258
  async def aio(self, /, exec_id: str) -> bytes: ...
276
259
 
277
- _wait: ___wait_spec[typing_extensions.Self]
260
+ _wait: ___wait_spec
278
261
 
279
262
  def _validate_type(self, data: typing.Union[bytes, str]) -> None: ...
280
263
 
281
- class ___open_file_spec(typing_extensions.Protocol[SUPERSELF]):
264
+ class ___open_file_spec(typing_extensions.Protocol):
282
265
  def __call__(self, /, path: str, mode: str) -> None: ...
283
266
  async def aio(self, /, path: str, mode: str) -> None: ...
284
267
 
285
- _open_file: ___open_file_spec[typing_extensions.Self]
268
+ _open_file: ___open_file_spec
269
+
270
+ class __create_spec(typing_extensions.Protocol):
271
+ def __call__(
272
+ self,
273
+ /,
274
+ path: str,
275
+ mode: typing.Union[_typeshed.OpenTextMode, _typeshed.OpenBinaryMode],
276
+ client: modal.client.Client,
277
+ task_id: str,
278
+ ) -> FileIO:
279
+ """Create a new FileIO handle."""
280
+ ...
286
281
 
287
- @classmethod
288
- def create(
289
- cls,
290
- path: str,
291
- mode: typing.Union[_typeshed.OpenTextMode, _typeshed.OpenBinaryMode],
292
- client: modal.client.Client,
293
- task_id: str,
294
- ) -> FileIO:
295
- """Create a new FileIO handle."""
296
- ...
282
+ async def aio(
283
+ self,
284
+ /,
285
+ path: str,
286
+ mode: typing.Union[_typeshed.OpenTextMode, _typeshed.OpenBinaryMode],
287
+ client: modal.client.Client,
288
+ task_id: str,
289
+ ) -> FileIO:
290
+ """Create a new FileIO handle."""
291
+ ...
292
+
293
+ create: typing.ClassVar[__create_spec]
297
294
 
298
- class ___make_read_request_spec(typing_extensions.Protocol[SUPERSELF]):
295
+ class ___make_read_request_spec(typing_extensions.Protocol):
299
296
  def __call__(self, /, n: typing.Optional[int]) -> bytes: ...
300
297
  async def aio(self, /, n: typing.Optional[int]) -> bytes: ...
301
298
 
302
- _make_read_request: ___make_read_request_spec[typing_extensions.Self]
299
+ _make_read_request: ___make_read_request_spec
303
300
 
304
- class __read_spec(typing_extensions.Protocol[T_INNER, SUPERSELF]):
301
+ class __read_spec(typing_extensions.Protocol[T_INNER]):
305
302
  def __call__(self, /, n: typing.Optional[int] = None) -> T_INNER:
306
303
  """Read n bytes from the current position, or the entire remaining file if n is None."""
307
304
  ...
@@ -310,9 +307,9 @@ class FileIO(typing.Generic[T]):
310
307
  """Read n bytes from the current position, or the entire remaining file if n is None."""
311
308
  ...
312
309
 
313
- read: __read_spec[T, typing_extensions.Self]
310
+ read: __read_spec[T]
314
311
 
315
- class __readline_spec(typing_extensions.Protocol[T_INNER, SUPERSELF]):
312
+ class __readline_spec(typing_extensions.Protocol[T_INNER]):
316
313
  def __call__(self, /) -> T_INNER:
317
314
  """Read a single line from the current position."""
318
315
  ...
@@ -321,9 +318,9 @@ class FileIO(typing.Generic[T]):
321
318
  """Read a single line from the current position."""
322
319
  ...
323
320
 
324
- readline: __readline_spec[T, typing_extensions.Self]
321
+ readline: __readline_spec[T]
325
322
 
326
- class __readlines_spec(typing_extensions.Protocol[T_INNER, SUPERSELF]):
323
+ class __readlines_spec(typing_extensions.Protocol[T_INNER]):
327
324
  def __call__(self, /) -> typing.Sequence[T_INNER]:
328
325
  """Read all lines from the current position."""
329
326
  ...
@@ -332,9 +329,9 @@ class FileIO(typing.Generic[T]):
332
329
  """Read all lines from the current position."""
333
330
  ...
334
331
 
335
- readlines: __readlines_spec[T, typing_extensions.Self]
332
+ readlines: __readlines_spec[T]
336
333
 
337
- class __write_spec(typing_extensions.Protocol[SUPERSELF]):
334
+ class __write_spec(typing_extensions.Protocol):
338
335
  def __call__(self, /, data: typing.Union[bytes, str]) -> None:
339
336
  """Write data to the current position.
340
337
 
@@ -353,9 +350,9 @@ class FileIO(typing.Generic[T]):
353
350
  """
354
351
  ...
355
352
 
356
- write: __write_spec[typing_extensions.Self]
353
+ write: __write_spec
357
354
 
358
- class __flush_spec(typing_extensions.Protocol[SUPERSELF]):
355
+ class __flush_spec(typing_extensions.Protocol):
359
356
  def __call__(self, /) -> None:
360
357
  """Flush the buffer to disk."""
361
358
  ...
@@ -364,11 +361,11 @@ class FileIO(typing.Generic[T]):
364
361
  """Flush the buffer to disk."""
365
362
  ...
366
363
 
367
- flush: __flush_spec[typing_extensions.Self]
364
+ flush: __flush_spec
368
365
 
369
366
  def _get_whence(self, whence: int): ...
370
367
 
371
- class __seek_spec(typing_extensions.Protocol[SUPERSELF]):
368
+ class __seek_spec(typing_extensions.Protocol):
372
369
  def __call__(self, /, offset: int, whence: int = 0) -> None:
373
370
  """Move to a new position in the file.
374
371
 
@@ -385,41 +382,72 @@ class FileIO(typing.Generic[T]):
385
382
  """
386
383
  ...
387
384
 
388
- seek: __seek_spec[typing_extensions.Self]
385
+ seek: __seek_spec
389
386
 
390
- @classmethod
391
- def ls(cls, path: str, client: modal.client.Client, task_id: str) -> list[str]:
392
- """List the contents of the provided directory."""
393
- ...
387
+ class __ls_spec(typing_extensions.Protocol):
388
+ def __call__(self, /, path: str, client: modal.client.Client, task_id: str) -> list[str]:
389
+ """List the contents of the provided directory."""
390
+ ...
394
391
 
395
- @classmethod
396
- def mkdir(cls, path: str, client: modal.client.Client, task_id: str, parents: bool = False) -> None:
397
- """Create a new directory."""
398
- ...
392
+ async def aio(self, /, path: str, client: modal.client.Client, task_id: str) -> list[str]:
393
+ """List the contents of the provided directory."""
394
+ ...
399
395
 
400
- @classmethod
401
- def rm(cls, path: str, client: modal.client.Client, task_id: str, recursive: bool = False) -> None:
402
- """Remove a file or directory in the Sandbox."""
403
- ...
396
+ ls: typing.ClassVar[__ls_spec]
404
397
 
405
- @classmethod
406
- def watch(
407
- cls,
408
- path: str,
409
- client: modal.client.Client,
410
- task_id: str,
411
- filter: typing.Optional[list[FileWatchEventType]] = None,
412
- recursive: bool = False,
413
- timeout: typing.Optional[int] = None,
414
- ) -> typing.Iterator[FileWatchEvent]: ...
398
+ class __mkdir_spec(typing_extensions.Protocol):
399
+ def __call__(self, /, path: str, client: modal.client.Client, task_id: str, parents: bool = False) -> None:
400
+ """Create a new directory."""
401
+ ...
402
+
403
+ async def aio(self, /, path: str, client: modal.client.Client, task_id: str, parents: bool = False) -> None:
404
+ """Create a new directory."""
405
+ ...
406
+
407
+ mkdir: typing.ClassVar[__mkdir_spec]
408
+
409
+ class __rm_spec(typing_extensions.Protocol):
410
+ def __call__(self, /, path: str, client: modal.client.Client, task_id: str, recursive: bool = False) -> None:
411
+ """Remove a file or directory in the Sandbox."""
412
+ ...
413
+
414
+ async def aio(self, /, path: str, client: modal.client.Client, task_id: str, recursive: bool = False) -> None:
415
+ """Remove a file or directory in the Sandbox."""
416
+ ...
415
417
 
416
- class ___close_spec(typing_extensions.Protocol[SUPERSELF]):
418
+ rm: typing.ClassVar[__rm_spec]
419
+
420
+ class __watch_spec(typing_extensions.Protocol):
421
+ def __call__(
422
+ self,
423
+ /,
424
+ path: str,
425
+ client: modal.client.Client,
426
+ task_id: str,
427
+ filter: typing.Optional[list[FileWatchEventType]] = None,
428
+ recursive: bool = False,
429
+ timeout: typing.Optional[int] = None,
430
+ ) -> typing.Iterator[FileWatchEvent]: ...
431
+ def aio(
432
+ self,
433
+ /,
434
+ path: str,
435
+ client: modal.client.Client,
436
+ task_id: str,
437
+ filter: typing.Optional[list[FileWatchEventType]] = None,
438
+ recursive: bool = False,
439
+ timeout: typing.Optional[int] = None,
440
+ ) -> typing.AsyncIterator[FileWatchEvent]: ...
441
+
442
+ watch: typing.ClassVar[__watch_spec]
443
+
444
+ class ___close_spec(typing_extensions.Protocol):
417
445
  def __call__(self, /) -> None: ...
418
446
  async def aio(self, /) -> None: ...
419
447
 
420
- _close: ___close_spec[typing_extensions.Self]
448
+ _close: ___close_spec
421
449
 
422
- class __close_spec(typing_extensions.Protocol[SUPERSELF]):
450
+ class __close_spec(typing_extensions.Protocol):
423
451
  def __call__(self, /) -> None:
424
452
  """Flush the buffer and close the file."""
425
453
  ...
@@ -428,7 +456,7 @@ class FileIO(typing.Generic[T]):
428
456
  """Flush the buffer and close the file."""
429
457
  ...
430
458
 
431
- close: __close_spec[typing_extensions.Self]
459
+ close: __close_spec
432
460
 
433
461
  def _check_writable(self) -> None: ...
434
462
  def _check_readable(self) -> None: ...