modal 0.62.16__py3-none-any.whl → 0.72.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (220) hide show
  1. modal/__init__.py +17 -13
  2. modal/__main__.py +41 -3
  3. modal/_clustered_functions.py +80 -0
  4. modal/_clustered_functions.pyi +22 -0
  5. modal/_container_entrypoint.py +420 -937
  6. modal/_ipython.py +3 -13
  7. modal/_location.py +17 -10
  8. modal/_output.py +243 -99
  9. modal/_pty.py +2 -2
  10. modal/_resolver.py +55 -59
  11. modal/_resources.py +51 -0
  12. modal/_runtime/__init__.py +1 -0
  13. modal/_runtime/asgi.py +519 -0
  14. modal/_runtime/container_io_manager.py +1036 -0
  15. modal/_runtime/execution_context.py +89 -0
  16. modal/_runtime/telemetry.py +169 -0
  17. modal/_runtime/user_code_imports.py +356 -0
  18. modal/_serialization.py +134 -9
  19. modal/_traceback.py +47 -187
  20. modal/_tunnel.py +52 -16
  21. modal/_tunnel.pyi +19 -36
  22. modal/_utils/app_utils.py +3 -17
  23. modal/_utils/async_utils.py +479 -100
  24. modal/_utils/blob_utils.py +157 -186
  25. modal/_utils/bytes_io_segment_payload.py +97 -0
  26. modal/_utils/deprecation.py +89 -0
  27. modal/_utils/docker_utils.py +98 -0
  28. modal/_utils/function_utils.py +460 -171
  29. modal/_utils/grpc_testing.py +47 -31
  30. modal/_utils/grpc_utils.py +62 -109
  31. modal/_utils/hash_utils.py +61 -19
  32. modal/_utils/http_utils.py +39 -9
  33. modal/_utils/logger.py +2 -1
  34. modal/_utils/mount_utils.py +34 -16
  35. modal/_utils/name_utils.py +58 -0
  36. modal/_utils/package_utils.py +14 -1
  37. modal/_utils/pattern_utils.py +205 -0
  38. modal/_utils/rand_pb_testing.py +5 -7
  39. modal/_utils/shell_utils.py +15 -49
  40. modal/_vendor/a2wsgi_wsgi.py +62 -72
  41. modal/_vendor/cloudpickle.py +1 -1
  42. modal/_watcher.py +14 -12
  43. modal/app.py +1003 -314
  44. modal/app.pyi +540 -264
  45. modal/call_graph.py +7 -6
  46. modal/cli/_download.py +63 -53
  47. modal/cli/_traceback.py +200 -0
  48. modal/cli/app.py +205 -45
  49. modal/cli/config.py +12 -5
  50. modal/cli/container.py +62 -14
  51. modal/cli/dict.py +128 -0
  52. modal/cli/entry_point.py +26 -13
  53. modal/cli/environment.py +40 -9
  54. modal/cli/import_refs.py +64 -58
  55. modal/cli/launch.py +32 -18
  56. modal/cli/network_file_system.py +64 -83
  57. modal/cli/profile.py +1 -1
  58. modal/cli/programs/run_jupyter.py +35 -10
  59. modal/cli/programs/vscode.py +60 -10
  60. modal/cli/queues.py +131 -0
  61. modal/cli/run.py +234 -131
  62. modal/cli/secret.py +8 -7
  63. modal/cli/token.py +7 -2
  64. modal/cli/utils.py +79 -10
  65. modal/cli/volume.py +110 -109
  66. modal/client.py +250 -144
  67. modal/client.pyi +157 -118
  68. modal/cloud_bucket_mount.py +108 -34
  69. modal/cloud_bucket_mount.pyi +32 -38
  70. modal/cls.py +535 -148
  71. modal/cls.pyi +190 -146
  72. modal/config.py +41 -19
  73. modal/container_process.py +177 -0
  74. modal/container_process.pyi +82 -0
  75. modal/dict.py +111 -65
  76. modal/dict.pyi +136 -131
  77. modal/environments.py +106 -5
  78. modal/environments.pyi +77 -25
  79. modal/exception.py +34 -43
  80. modal/experimental.py +61 -2
  81. modal/extensions/ipython.py +5 -5
  82. modal/file_io.py +537 -0
  83. modal/file_io.pyi +235 -0
  84. modal/file_pattern_matcher.py +197 -0
  85. modal/functions.py +906 -911
  86. modal/functions.pyi +466 -430
  87. modal/gpu.py +57 -44
  88. modal/image.py +1089 -479
  89. modal/image.pyi +584 -228
  90. modal/io_streams.py +434 -0
  91. modal/io_streams.pyi +122 -0
  92. modal/mount.py +314 -101
  93. modal/mount.pyi +241 -235
  94. modal/network_file_system.py +92 -92
  95. modal/network_file_system.pyi +152 -110
  96. modal/object.py +67 -36
  97. modal/object.pyi +166 -143
  98. modal/output.py +63 -0
  99. modal/parallel_map.py +434 -0
  100. modal/parallel_map.pyi +75 -0
  101. modal/partial_function.py +282 -117
  102. modal/partial_function.pyi +222 -129
  103. modal/proxy.py +15 -12
  104. modal/proxy.pyi +3 -8
  105. modal/queue.py +182 -65
  106. modal/queue.pyi +218 -118
  107. modal/requirements/2024.04.txt +29 -0
  108. modal/requirements/2024.10.txt +16 -0
  109. modal/requirements/README.md +21 -0
  110. modal/requirements/base-images.json +22 -0
  111. modal/retries.py +48 -7
  112. modal/runner.py +459 -156
  113. modal/runner.pyi +135 -71
  114. modal/running_app.py +38 -0
  115. modal/sandbox.py +514 -236
  116. modal/sandbox.pyi +397 -169
  117. modal/schedule.py +4 -4
  118. modal/scheduler_placement.py +20 -3
  119. modal/secret.py +56 -31
  120. modal/secret.pyi +62 -42
  121. modal/serving.py +51 -56
  122. modal/serving.pyi +44 -36
  123. modal/stream_type.py +15 -0
  124. modal/token_flow.py +5 -3
  125. modal/token_flow.pyi +37 -32
  126. modal/volume.py +285 -157
  127. modal/volume.pyi +249 -184
  128. {modal-0.62.16.dist-info → modal-0.72.11.dist-info}/METADATA +7 -7
  129. modal-0.72.11.dist-info/RECORD +174 -0
  130. {modal-0.62.16.dist-info → modal-0.72.11.dist-info}/top_level.txt +0 -1
  131. modal_docs/gen_reference_docs.py +3 -1
  132. modal_docs/mdmd/mdmd.py +0 -1
  133. modal_docs/mdmd/signatures.py +5 -2
  134. modal_global_objects/images/base_images.py +28 -0
  135. modal_global_objects/mounts/python_standalone.py +2 -2
  136. modal_proto/__init__.py +1 -1
  137. modal_proto/api.proto +1288 -533
  138. modal_proto/api_grpc.py +856 -456
  139. modal_proto/api_pb2.py +2165 -1157
  140. modal_proto/api_pb2.pyi +8859 -0
  141. modal_proto/api_pb2_grpc.py +1674 -855
  142. modal_proto/api_pb2_grpc.pyi +1416 -0
  143. modal_proto/modal_api_grpc.py +149 -0
  144. modal_proto/modal_options_grpc.py +3 -0
  145. modal_proto/options_pb2.pyi +20 -0
  146. modal_proto/options_pb2_grpc.pyi +7 -0
  147. modal_proto/py.typed +0 -0
  148. modal_version/__init__.py +1 -1
  149. modal_version/_version_generated.py +2 -2
  150. modal/_asgi.py +0 -370
  151. modal/_container_entrypoint.pyi +0 -378
  152. modal/_container_exec.py +0 -128
  153. modal/_sandbox_shell.py +0 -49
  154. modal/shared_volume.py +0 -23
  155. modal/shared_volume.pyi +0 -24
  156. modal/stub.py +0 -783
  157. modal/stub.pyi +0 -332
  158. modal-0.62.16.dist-info/RECORD +0 -198
  159. modal_global_objects/images/conda.py +0 -15
  160. modal_global_objects/images/debian_slim.py +0 -15
  161. modal_global_objects/images/micromamba.py +0 -15
  162. test/__init__.py +0 -1
  163. test/aio_test.py +0 -12
  164. test/async_utils_test.py +0 -262
  165. test/blob_test.py +0 -67
  166. test/cli_imports_test.py +0 -149
  167. test/cli_test.py +0 -659
  168. test/client_test.py +0 -194
  169. test/cls_test.py +0 -630
  170. test/config_test.py +0 -137
  171. test/conftest.py +0 -1420
  172. test/container_app_test.py +0 -32
  173. test/container_test.py +0 -1389
  174. test/cpu_test.py +0 -23
  175. test/decorator_test.py +0 -85
  176. test/deprecation_test.py +0 -34
  177. test/dict_test.py +0 -33
  178. test/e2e_test.py +0 -68
  179. test/error_test.py +0 -7
  180. test/function_serialization_test.py +0 -32
  181. test/function_test.py +0 -653
  182. test/function_utils_test.py +0 -101
  183. test/gpu_test.py +0 -159
  184. test/grpc_utils_test.py +0 -141
  185. test/helpers.py +0 -42
  186. test/image_test.py +0 -669
  187. test/live_reload_test.py +0 -80
  188. test/lookup_test.py +0 -70
  189. test/mdmd_test.py +0 -329
  190. test/mount_test.py +0 -162
  191. test/mounted_files_test.py +0 -329
  192. test/network_file_system_test.py +0 -181
  193. test/notebook_test.py +0 -66
  194. test/object_test.py +0 -41
  195. test/package_utils_test.py +0 -25
  196. test/queue_test.py +0 -97
  197. test/resolver_test.py +0 -58
  198. test/retries_test.py +0 -67
  199. test/runner_test.py +0 -85
  200. test/sandbox_test.py +0 -191
  201. test/schedule_test.py +0 -15
  202. test/scheduler_placement_test.py +0 -29
  203. test/secret_test.py +0 -78
  204. test/serialization_test.py +0 -42
  205. test/stub_composition_test.py +0 -10
  206. test/stub_test.py +0 -360
  207. test/test_asgi_wrapper.py +0 -234
  208. test/token_flow_test.py +0 -18
  209. test/traceback_test.py +0 -135
  210. test/tunnel_test.py +0 -29
  211. test/utils_test.py +0 -88
  212. test/version_test.py +0 -14
  213. test/volume_test.py +0 -341
  214. test/watcher_test.py +0 -30
  215. test/webhook_test.py +0 -146
  216. /modal/{requirements.312.txt → requirements/2023.12.312.txt} +0 -0
  217. /modal/{requirements.txt → requirements/2023.12.txt} +0 -0
  218. {modal-0.62.16.dist-info → modal-0.72.11.dist-info}/LICENSE +0 -0
  219. {modal-0.62.16.dist-info → modal-0.72.11.dist-info}/WHEEL +0 -0
  220. {modal-0.62.16.dist-info → modal-0.72.11.dist-info}/entry_points.txt +0 -0
modal/file_io.py ADDED
@@ -0,0 +1,537 @@
1
+ # Copyright Modal Labs 2024
2
+ import asyncio
3
+ import enum
4
+ import io
5
+ from dataclasses import dataclass
6
+ from typing import TYPE_CHECKING, AsyncIterator, Generic, Optional, Sequence, TypeVar, Union, cast
7
+
8
+ if TYPE_CHECKING:
9
+ import _typeshed
10
+
11
+ import json
12
+
13
+ from grpclib.exceptions import GRPCError, StreamTerminatedError
14
+
15
+ from modal._utils.async_utils import TaskContext
16
+ from modal._utils.grpc_utils import retry_transient_errors
17
+ from modal.exception import ClientClosed
18
+ from modal_proto import api_pb2
19
+
20
+ from ._utils.async_utils import synchronize_api
21
+ from ._utils.grpc_utils import RETRYABLE_GRPC_STATUS_CODES
22
+ from .client import _Client
23
+ from .exception import FilesystemExecutionError, InvalidError
24
+
25
+ WRITE_CHUNK_SIZE = 16 * 1024 * 1024 # 16 MiB
26
+ WRITE_FILE_SIZE_LIMIT = 1024 * 1024 * 1024 # 1 GiB
27
+ READ_FILE_SIZE_LIMIT = 100 * 1024 * 1024 # 100 MiB
28
+
29
+ ERROR_MAPPING = {
30
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_UNSPECIFIED: FilesystemExecutionError,
31
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_PERM: PermissionError,
32
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_NOENT: FileNotFoundError,
33
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_IO: IOError,
34
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_NXIO: IOError,
35
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_NOMEM: MemoryError,
36
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_ACCES: PermissionError,
37
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_EXIST: FileExistsError,
38
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_NOTDIR: NotADirectoryError,
39
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_ISDIR: IsADirectoryError,
40
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_INVAL: OSError,
41
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_MFILE: OSError,
42
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_FBIG: OSError,
43
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_NOSPC: OSError,
44
+ }
45
+
46
+ T = TypeVar("T", str, bytes)
47
+
48
+
49
+ async def _delete_bytes(file: "_FileIO", start: Optional[int] = None, end: Optional[int] = None) -> None:
50
+ """Delete a range of bytes from the file.
51
+
52
+ `start` and `end` are byte offsets. `start` is inclusive, `end` is exclusive.
53
+ If either is None, the start or end of the file is used, respectively.
54
+ """
55
+ assert file._file_descriptor is not None
56
+ file._check_closed()
57
+ if start is not None and end is not None:
58
+ if start >= end:
59
+ raise ValueError("start must be less than end")
60
+ resp = await retry_transient_errors(
61
+ file._client.stub.ContainerFilesystemExec,
62
+ api_pb2.ContainerFilesystemExecRequest(
63
+ file_delete_bytes_request=api_pb2.ContainerFileDeleteBytesRequest(
64
+ file_descriptor=file._file_descriptor,
65
+ start_inclusive=start,
66
+ end_exclusive=end,
67
+ ),
68
+ task_id=file._task_id,
69
+ ),
70
+ )
71
+ await file._wait(resp.exec_id)
72
+
73
+
74
+ async def _replace_bytes(file: "_FileIO", data: bytes, start: Optional[int] = None, end: Optional[int] = None) -> None:
75
+ """Replace a range of bytes in the file with new data. The length of the data does not
76
+ have to be the same as the length of the range being replaced.
77
+
78
+ `start` and `end` are byte offsets. `start` is inclusive, `end` is exclusive.
79
+ If either is None, the start or end of the file is used, respectively.
80
+ """
81
+ assert file._file_descriptor is not None
82
+ file._check_closed()
83
+ if start is not None and end is not None:
84
+ if start >= end:
85
+ raise InvalidError("start must be less than end")
86
+ if len(data) > WRITE_CHUNK_SIZE:
87
+ raise InvalidError("Write request payload exceeds 16 MiB limit")
88
+ resp = await retry_transient_errors(
89
+ file._client.stub.ContainerFilesystemExec,
90
+ api_pb2.ContainerFilesystemExecRequest(
91
+ file_write_replace_bytes_request=api_pb2.ContainerFileWriteReplaceBytesRequest(
92
+ file_descriptor=file._file_descriptor,
93
+ data=data,
94
+ start_inclusive=start,
95
+ end_exclusive=end,
96
+ ),
97
+ task_id=file._task_id,
98
+ ),
99
+ )
100
+ await file._wait(resp.exec_id)
101
+
102
+
103
+ class FileWatchEventType(enum.Enum):
104
+ Unknown = "Unknown"
105
+ Access = "Access"
106
+ Create = "Create"
107
+ Modify = "Modify"
108
+ Remove = "Remove"
109
+
110
+
111
+ @dataclass
112
+ class FileWatchEvent:
113
+ paths: list[str]
114
+ type: FileWatchEventType
115
+
116
+
117
+ # The FileIO class is designed to mimic Python's io.FileIO
118
+ # See https://github.com/python/cpython/blob/main/Lib/_pyio.py#L1459
119
+ class _FileIO(Generic[T]):
120
+ """FileIO handle, used in the Sandbox filesystem API.
121
+
122
+ The API is designed to mimic Python's io.FileIO.
123
+
124
+ **Usage**
125
+
126
+ ```python
127
+ import modal
128
+
129
+ app = modal.App.lookup("my-app", create_if_missing=True)
130
+
131
+ sb = modal.Sandbox.create(app=app)
132
+ f = sb.open("/tmp/foo.txt", "w")
133
+ f.write("hello")
134
+ f.close()
135
+ ```
136
+ """
137
+
138
+ _binary = False
139
+ _readable = False
140
+ _writable = False
141
+ _appended = False
142
+ _closed = True
143
+
144
+ _task_id: str = ""
145
+ _file_descriptor: str = ""
146
+ _client: _Client
147
+ _watch_output_buffer: list[Optional[bytes]] = []
148
+
149
+ def __init__(self, client: _Client, task_id: str) -> None:
150
+ self._client = client
151
+ self._task_id = task_id
152
+
153
+ def _validate_mode(self, mode: str) -> None:
154
+ if not any(char in mode for char in "rwax"):
155
+ raise ValueError(f"Invalid file mode: {mode}")
156
+
157
+ self._readable = "r" in mode or "+" in mode
158
+ self._writable = "w" in mode or "a" in mode or "x" in mode or "+" in mode
159
+ self._appended = "a" in mode
160
+ self._binary = "b" in mode
161
+
162
+ valid_chars = set("rwaxb+")
163
+ if any(char not in valid_chars for char in mode):
164
+ raise ValueError(f"Invalid file mode: {mode}")
165
+
166
+ mode_count = sum(1 for c in mode if c in "rwax")
167
+ if mode_count > 1:
168
+ raise ValueError("must have exactly one of create/read/write/append mode")
169
+
170
+ seen_chars = set()
171
+ for char in mode:
172
+ if char in seen_chars:
173
+ raise ValueError(f"Invalid file mode: {mode}")
174
+ seen_chars.add(char)
175
+
176
+ def _handle_error(self, error: api_pb2.SystemErrorMessage) -> None:
177
+ error_class = ERROR_MAPPING.get(error.error_code, FilesystemExecutionError)
178
+ raise error_class(error.error_message)
179
+
180
+ async def _consume_output(self, exec_id: str) -> AsyncIterator[Optional[bytes]]:
181
+ req = api_pb2.ContainerFilesystemExecGetOutputRequest(
182
+ exec_id=exec_id,
183
+ timeout=55,
184
+ )
185
+ async for batch in self._client.stub.ContainerFilesystemExecGetOutput.unary_stream(req):
186
+ if batch.eof:
187
+ yield None
188
+ break
189
+ if batch.HasField("error"):
190
+ self._handle_error(batch.error)
191
+ for message in batch.output:
192
+ yield message
193
+
194
+ async def _consume_watch_output(self, exec_id: str) -> None:
195
+ completed = False
196
+ retries_remaining = 10
197
+ while not completed:
198
+ try:
199
+ iterator = self._consume_output(exec_id)
200
+ async for message in iterator:
201
+ self._watch_output_buffer.append(message)
202
+ if message is None:
203
+ completed = True
204
+ break
205
+
206
+ except (GRPCError, StreamTerminatedError, ClientClosed) as exc:
207
+ if retries_remaining > 0:
208
+ retries_remaining -= 1
209
+ if isinstance(exc, GRPCError):
210
+ if exc.status in RETRYABLE_GRPC_STATUS_CODES:
211
+ await asyncio.sleep(1.0)
212
+ continue
213
+ elif isinstance(exc, StreamTerminatedError):
214
+ continue
215
+ elif isinstance(exc, ClientClosed):
216
+ # If the client was closed, the user has triggered a cleanup.
217
+ break
218
+ raise exc
219
+
220
+ async def _parse_watch_output(self, event: bytes) -> Optional[FileWatchEvent]:
221
+ try:
222
+ event_json = json.loads(event.decode())
223
+ return FileWatchEvent(type=FileWatchEventType(event_json["event_type"]), paths=event_json["paths"])
224
+ except (json.JSONDecodeError, KeyError, ValueError):
225
+ # skip invalid events
226
+ return None
227
+
228
+ async def _wait(self, exec_id: str) -> bytes:
229
+ # The logic here is similar to how output is read from `exec`
230
+ output = b""
231
+ completed = False
232
+ retries_remaining = 10
233
+ while not completed:
234
+ try:
235
+ async for data in self._consume_output(exec_id):
236
+ if data is None:
237
+ completed = True
238
+ break
239
+ output += data
240
+ except (GRPCError, StreamTerminatedError) as exc:
241
+ if retries_remaining > 0:
242
+ retries_remaining -= 1
243
+ if isinstance(exc, GRPCError):
244
+ if exc.status in RETRYABLE_GRPC_STATUS_CODES:
245
+ await asyncio.sleep(1.0)
246
+ continue
247
+ elif isinstance(exc, StreamTerminatedError):
248
+ continue
249
+ raise
250
+ return output
251
+
252
+ def _validate_type(self, data: Union[bytes, str]) -> None:
253
+ if self._binary and isinstance(data, str):
254
+ raise TypeError("Expected bytes when in binary mode")
255
+ if not self._binary and isinstance(data, bytes):
256
+ raise TypeError("Expected str when in text mode")
257
+
258
+ async def _open_file(self, path: str, mode: str) -> None:
259
+ resp = await retry_transient_errors(
260
+ self._client.stub.ContainerFilesystemExec,
261
+ api_pb2.ContainerFilesystemExecRequest(
262
+ file_open_request=api_pb2.ContainerFileOpenRequest(path=path, mode=mode),
263
+ task_id=self._task_id,
264
+ ),
265
+ )
266
+ if not resp.HasField("file_descriptor"):
267
+ raise FilesystemExecutionError("Failed to open file")
268
+ self._file_descriptor = resp.file_descriptor
269
+ await self._wait(resp.exec_id)
270
+
271
+ @classmethod
272
+ async def create(
273
+ cls, path: str, mode: Union["_typeshed.OpenTextMode", "_typeshed.OpenBinaryMode"], client: _Client, task_id: str
274
+ ) -> "_FileIO":
275
+ """Create a new FileIO handle."""
276
+ self = _FileIO(client, task_id)
277
+ self._validate_mode(mode)
278
+ await self._open_file(path, mode)
279
+ self._closed = False
280
+ return self
281
+
282
+ async def _make_read_request(self, n: Optional[int]) -> bytes:
283
+ resp = await retry_transient_errors(
284
+ self._client.stub.ContainerFilesystemExec,
285
+ api_pb2.ContainerFilesystemExecRequest(
286
+ file_read_request=api_pb2.ContainerFileReadRequest(file_descriptor=self._file_descriptor, n=n),
287
+ task_id=self._task_id,
288
+ ),
289
+ )
290
+ return await self._wait(resp.exec_id)
291
+
292
+ async def read(self, n: Optional[int] = None) -> T:
293
+ """Read n bytes from the current position, or the entire remaining file if n is None."""
294
+ self._check_closed()
295
+ self._check_readable()
296
+ if n is not None and n > READ_FILE_SIZE_LIMIT:
297
+ raise ValueError("Read request payload exceeds 100 MiB limit")
298
+ output = await self._make_read_request(n)
299
+ if self._binary:
300
+ return cast(T, output)
301
+ return cast(T, output.decode("utf-8"))
302
+
303
+ async def readline(self) -> T:
304
+ """Read a single line from the current position."""
305
+ self._check_closed()
306
+ self._check_readable()
307
+ resp = await retry_transient_errors(
308
+ self._client.stub.ContainerFilesystemExec,
309
+ api_pb2.ContainerFilesystemExecRequest(
310
+ file_read_line_request=api_pb2.ContainerFileReadLineRequest(file_descriptor=self._file_descriptor),
311
+ task_id=self._task_id,
312
+ ),
313
+ )
314
+ output = await self._wait(resp.exec_id)
315
+ if self._binary:
316
+ return cast(T, output)
317
+ return cast(T, output.decode("utf-8"))
318
+
319
+ async def readlines(self) -> Sequence[T]:
320
+ """Read all lines from the current position."""
321
+ self._check_closed()
322
+ self._check_readable()
323
+ output = await self._make_read_request(None)
324
+ if self._binary:
325
+ lines_bytes = output.split(b"\n")
326
+ return_bytes = [line + b"\n" for line in lines_bytes[:-1]] + ([lines_bytes[-1]] if lines_bytes[-1] else [])
327
+ return cast(Sequence[T], return_bytes)
328
+ else:
329
+ lines = output.decode("utf-8").split("\n")
330
+ return_strs = [line + "\n" for line in lines[:-1]] + ([lines[-1]] if lines[-1] else [])
331
+ return cast(Sequence[T], return_strs)
332
+
333
+ async def write(self, data: Union[bytes, str]) -> None:
334
+ """Write data to the current position.
335
+
336
+ Writes may not appear until the entire buffer is flushed, which
337
+ can be done manually with `flush()` or automatically when the file is
338
+ closed.
339
+ """
340
+ self._check_closed()
341
+ self._check_writable()
342
+ self._validate_type(data)
343
+ if isinstance(data, str):
344
+ data = data.encode("utf-8")
345
+ if len(data) > WRITE_FILE_SIZE_LIMIT:
346
+ raise ValueError("Write request payload exceeds 1 GiB limit")
347
+ for i in range(0, len(data), WRITE_CHUNK_SIZE):
348
+ chunk = data[i : i + WRITE_CHUNK_SIZE]
349
+ resp = await retry_transient_errors(
350
+ self._client.stub.ContainerFilesystemExec,
351
+ api_pb2.ContainerFilesystemExecRequest(
352
+ file_write_request=api_pb2.ContainerFileWriteRequest(
353
+ file_descriptor=self._file_descriptor,
354
+ data=chunk,
355
+ ),
356
+ task_id=self._task_id,
357
+ ),
358
+ )
359
+ await self._wait(resp.exec_id)
360
+
361
+ async def flush(self) -> None:
362
+ """Flush the buffer to disk."""
363
+ self._check_closed()
364
+ self._check_writable()
365
+ resp = await retry_transient_errors(
366
+ self._client.stub.ContainerFilesystemExec,
367
+ api_pb2.ContainerFilesystemExecRequest(
368
+ file_flush_request=api_pb2.ContainerFileFlushRequest(file_descriptor=self._file_descriptor),
369
+ task_id=self._task_id,
370
+ ),
371
+ )
372
+ await self._wait(resp.exec_id)
373
+
374
+ def _get_whence(self, whence: int):
375
+ if whence == 0:
376
+ return api_pb2.SeekWhence.SEEK_SET
377
+ elif whence == 1:
378
+ return api_pb2.SeekWhence.SEEK_CUR
379
+ elif whence == 2:
380
+ return api_pb2.SeekWhence.SEEK_END
381
+ else:
382
+ raise ValueError(f"Invalid whence value: {whence}")
383
+
384
+ async def seek(self, offset: int, whence: int = 0) -> None:
385
+ """Move to a new position in the file.
386
+
387
+ `whence` defaults to 0 (absolute file positioning); other values are 1
388
+ (relative to the current position) and 2 (relative to the file's end).
389
+ """
390
+ self._check_closed()
391
+ resp = await retry_transient_errors(
392
+ self._client.stub.ContainerFilesystemExec,
393
+ api_pb2.ContainerFilesystemExecRequest(
394
+ file_seek_request=api_pb2.ContainerFileSeekRequest(
395
+ file_descriptor=self._file_descriptor,
396
+ offset=offset,
397
+ whence=self._get_whence(whence),
398
+ ),
399
+ task_id=self._task_id,
400
+ ),
401
+ )
402
+ await self._wait(resp.exec_id)
403
+
404
+ @classmethod
405
+ async def ls(cls, path: str, client: _Client, task_id: str) -> list[str]:
406
+ """List the contents of the provided directory."""
407
+ self = _FileIO(client, task_id)
408
+ resp = await retry_transient_errors(
409
+ self._client.stub.ContainerFilesystemExec,
410
+ api_pb2.ContainerFilesystemExecRequest(
411
+ file_ls_request=api_pb2.ContainerFileLsRequest(path=path),
412
+ task_id=task_id,
413
+ ),
414
+ )
415
+ output = await self._wait(resp.exec_id)
416
+ try:
417
+ return json.loads(output.decode("utf-8"))["paths"]
418
+ except json.JSONDecodeError:
419
+ raise FilesystemExecutionError("failed to parse list output")
420
+
421
+ @classmethod
422
+ async def mkdir(cls, path: str, client: _Client, task_id: str, parents: bool = False) -> None:
423
+ """Create a new directory."""
424
+ self = _FileIO(client, task_id)
425
+ resp = await retry_transient_errors(
426
+ self._client.stub.ContainerFilesystemExec,
427
+ api_pb2.ContainerFilesystemExecRequest(
428
+ file_mkdir_request=api_pb2.ContainerFileMkdirRequest(path=path, make_parents=parents),
429
+ task_id=self._task_id,
430
+ ),
431
+ )
432
+ await self._wait(resp.exec_id)
433
+
434
+ @classmethod
435
+ async def rm(cls, path: str, client: _Client, task_id: str, recursive: bool = False) -> None:
436
+ """Remove a file or directory in the Sandbox."""
437
+ self = _FileIO(client, task_id)
438
+ resp = await retry_transient_errors(
439
+ self._client.stub.ContainerFilesystemExec,
440
+ api_pb2.ContainerFilesystemExecRequest(
441
+ file_rm_request=api_pb2.ContainerFileRmRequest(path=path, recursive=recursive),
442
+ task_id=self._task_id,
443
+ ),
444
+ )
445
+ await self._wait(resp.exec_id)
446
+
447
+ @classmethod
448
+ async def watch(
449
+ cls,
450
+ path: str,
451
+ client: _Client,
452
+ task_id: str,
453
+ filter: Optional[list[FileWatchEventType]] = None,
454
+ recursive: bool = False,
455
+ timeout: Optional[int] = None,
456
+ ) -> AsyncIterator[FileWatchEvent]:
457
+ self = _FileIO(client, task_id)
458
+ resp = await retry_transient_errors(
459
+ self._client.stub.ContainerFilesystemExec,
460
+ api_pb2.ContainerFilesystemExecRequest(
461
+ file_watch_request=api_pb2.ContainerFileWatchRequest(
462
+ path=path,
463
+ recursive=recursive,
464
+ timeout_secs=timeout,
465
+ ),
466
+ task_id=self._task_id,
467
+ ),
468
+ )
469
+ async with TaskContext() as tc:
470
+ tc.create_task(self._consume_watch_output(resp.exec_id))
471
+
472
+ buffer = b""
473
+ while True:
474
+ if len(self._watch_output_buffer) > 0:
475
+ item = self._watch_output_buffer.pop(0)
476
+ if item is None:
477
+ break
478
+ buffer += item
479
+ # a single event may be split across multiple messages
480
+ # the end of an event is marked by two newlines
481
+ if buffer.endswith(b"\n\n"):
482
+ try:
483
+ event_json = json.loads(buffer.strip().decode())
484
+ event = FileWatchEvent(
485
+ type=FileWatchEventType(event_json["event_type"]),
486
+ paths=event_json["paths"],
487
+ )
488
+ if not filter or event.type in filter:
489
+ yield event
490
+ except (json.JSONDecodeError, KeyError, ValueError):
491
+ # skip invalid events
492
+ pass
493
+ buffer = b""
494
+ else:
495
+ await asyncio.sleep(0.1)
496
+
497
+ async def _close(self) -> None:
498
+ # Buffer is flushed by the runner on close
499
+ resp = await retry_transient_errors(
500
+ self._client.stub.ContainerFilesystemExec,
501
+ api_pb2.ContainerFilesystemExecRequest(
502
+ file_close_request=api_pb2.ContainerFileCloseRequest(file_descriptor=self._file_descriptor),
503
+ task_id=self._task_id,
504
+ ),
505
+ )
506
+ self._closed = True
507
+ await self._wait(resp.exec_id)
508
+
509
+ async def close(self) -> None:
510
+ """Flush the buffer and close the file."""
511
+ await self._close()
512
+
513
+ # also validated in the runner, but checked in the client to catch errors early
514
+ def _check_writable(self) -> None:
515
+ if not self._writable:
516
+ raise io.UnsupportedOperation("not writeable")
517
+
518
+ # also validated in the runner, but checked in the client to catch errors early
519
+ def _check_readable(self) -> None:
520
+ if not self._readable:
521
+ raise io.UnsupportedOperation("not readable")
522
+
523
+ # also validated in the runner, but checked in the client to catch errors early
524
+ def _check_closed(self) -> None:
525
+ if self._closed:
526
+ raise ValueError("I/O operation on closed file")
527
+
528
+ async def __aenter__(self) -> "_FileIO":
529
+ return self
530
+
531
+ async def __aexit__(self, exc_type, exc_value, traceback) -> None:
532
+ await self._close()
533
+
534
+
535
+ delete_bytes = synchronize_api(_delete_bytes)
536
+ replace_bytes = synchronize_api(_replace_bytes)
537
+ FileIO = synchronize_api(_FileIO)