modal 0.67.43__py3-none-any.whl → 0.68.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- modal/_container_entrypoint.py +3 -0
- modal/_runtime/container_io_manager.py +3 -0
- modal/_traceback.py +16 -2
- modal/cli/_traceback.py +11 -4
- modal/cli/run.py +1 -10
- modal/client.py +6 -37
- modal/client.pyi +2 -6
- modal/cls.py +4 -4
- modal/exception.py +20 -0
- modal/file_io.py +380 -0
- modal/file_io.pyi +185 -0
- modal/functions.py +3 -0
- modal/partial_function.py +14 -10
- modal/partial_function.pyi +2 -2
- modal/runner.py +5 -4
- modal/runner.pyi +2 -1
- modal/sandbox.py +40 -0
- modal/sandbox.pyi +18 -0
- {modal-0.67.43.dist-info → modal-0.68.4.dist-info}/METADATA +2 -2
- {modal-0.67.43.dist-info → modal-0.68.4.dist-info}/RECORD +30 -28
- modal_docs/gen_reference_docs.py +1 -0
- modal_proto/api.proto +18 -1
- modal_proto/api_pb2.py +748 -718
- modal_proto/api_pb2.pyi +69 -10
- modal_version/__init__.py +1 -1
- modal_version/_version_generated.py +1 -1
- {modal-0.67.43.dist-info → modal-0.68.4.dist-info}/LICENSE +0 -0
- {modal-0.67.43.dist-info → modal-0.68.4.dist-info}/WHEEL +0 -0
- {modal-0.67.43.dist-info → modal-0.68.4.dist-info}/entry_points.txt +0 -0
- {modal-0.67.43.dist-info → modal-0.68.4.dist-info}/top_level.txt +0 -0
modal/file_io.py
ADDED
@@ -0,0 +1,380 @@
|
|
1
|
+
# Copyright Modal Labs 2024
|
2
|
+
import asyncio
|
3
|
+
import io
|
4
|
+
from typing import TYPE_CHECKING, AsyncIterator, Generic, Optional, Sequence, TypeVar, Union, cast
|
5
|
+
|
6
|
+
if TYPE_CHECKING:
|
7
|
+
import _typeshed
|
8
|
+
|
9
|
+
from grpclib.exceptions import GRPCError, StreamTerminatedError
|
10
|
+
|
11
|
+
from modal._utils.grpc_utils import retry_transient_errors
|
12
|
+
from modal_proto import api_pb2
|
13
|
+
|
14
|
+
from ._utils.async_utils import synchronize_api
|
15
|
+
from ._utils.grpc_utils import RETRYABLE_GRPC_STATUS_CODES
|
16
|
+
from .client import _Client
|
17
|
+
from .exception import FilesystemExecutionError, InvalidError
|
18
|
+
|
19
|
+
LARGE_FILE_SIZE_LIMIT = 16 * 1024 * 1024 # 16 MiB
|
20
|
+
READ_FILE_SIZE_LIMIT = 100 * 1024 * 1024 # 100 MiB
|
21
|
+
|
22
|
+
ERROR_MAPPING = {
|
23
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_UNSPECIFIED: FilesystemExecutionError,
|
24
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_PERM: PermissionError,
|
25
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_NOENT: FileNotFoundError,
|
26
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_IO: IOError,
|
27
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_NXIO: IOError,
|
28
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_NOMEM: MemoryError,
|
29
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_ACCES: PermissionError,
|
30
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_EXIST: FileExistsError,
|
31
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_NOTDIR: NotADirectoryError,
|
32
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_ISDIR: IsADirectoryError,
|
33
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_INVAL: OSError,
|
34
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_MFILE: OSError,
|
35
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_FBIG: OSError,
|
36
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_NOSPC: OSError,
|
37
|
+
}
|
38
|
+
|
39
|
+
T = TypeVar("T", str, bytes)
|
40
|
+
|
41
|
+
|
42
|
+
async def _delete_bytes(file: "_FileIO", start: Optional[int] = None, end: Optional[int] = None) -> None:
|
43
|
+
"""Delete a range of bytes from the file.
|
44
|
+
|
45
|
+
`start` and `end` are byte offsets. `start` is inclusive, `end` is exclusive.
|
46
|
+
If either is None, the start or end of the file is used, respectively.
|
47
|
+
"""
|
48
|
+
assert file._file_descriptor is not None
|
49
|
+
file._check_closed()
|
50
|
+
if start is not None and end is not None:
|
51
|
+
if start >= end:
|
52
|
+
raise ValueError("start must be less than end")
|
53
|
+
resp = await file._make_request(
|
54
|
+
api_pb2.ContainerFilesystemExecRequest(
|
55
|
+
file_delete_bytes_request=api_pb2.ContainerFileDeleteBytesRequest(
|
56
|
+
file_descriptor=file._file_descriptor,
|
57
|
+
start_inclusive=start,
|
58
|
+
end_exclusive=end,
|
59
|
+
),
|
60
|
+
task_id=file._task_id,
|
61
|
+
)
|
62
|
+
)
|
63
|
+
await file._wait(resp.exec_id)
|
64
|
+
|
65
|
+
|
66
|
+
async def _replace_bytes(file: "_FileIO", data: bytes, start: Optional[int] = None, end: Optional[int] = None) -> None:
|
67
|
+
"""Replace a range of bytes in the file with new data. The length of the data does not
|
68
|
+
have to be the same as the length of the range being replaced.
|
69
|
+
|
70
|
+
`start` and `end` are byte offsets. `start` is inclusive, `end` is exclusive.
|
71
|
+
If either is None, the start or end of the file is used, respectively.
|
72
|
+
"""
|
73
|
+
assert file._file_descriptor is not None
|
74
|
+
file._check_closed()
|
75
|
+
if start is not None and end is not None:
|
76
|
+
if start >= end:
|
77
|
+
raise InvalidError("start must be less than end")
|
78
|
+
if len(data) > LARGE_FILE_SIZE_LIMIT:
|
79
|
+
raise InvalidError("Write request payload exceeds 16 MiB limit")
|
80
|
+
resp = await file._make_request(
|
81
|
+
api_pb2.ContainerFilesystemExecRequest(
|
82
|
+
file_write_replace_bytes_request=api_pb2.ContainerFileWriteReplaceBytesRequest(
|
83
|
+
file_descriptor=file._file_descriptor,
|
84
|
+
data=data,
|
85
|
+
start_inclusive=start,
|
86
|
+
end_exclusive=end,
|
87
|
+
),
|
88
|
+
task_id=file._task_id,
|
89
|
+
)
|
90
|
+
)
|
91
|
+
await file._wait(resp.exec_id)
|
92
|
+
|
93
|
+
|
94
|
+
# The FileIO class is designed to mimic Python's io.FileIO
|
95
|
+
# See https://github.com/python/cpython/blob/main/Lib/_pyio.py#L1459
|
96
|
+
class _FileIO(Generic[T]):
|
97
|
+
"""FileIO handle, used in the Sandbox filesystem API.
|
98
|
+
|
99
|
+
The API is designed to mimic Python's io.FileIO.
|
100
|
+
|
101
|
+
**Usage**
|
102
|
+
|
103
|
+
```python
|
104
|
+
import modal
|
105
|
+
|
106
|
+
app = modal.App.lookup("my-app", create_if_missing=True)
|
107
|
+
|
108
|
+
sb = modal.Sandbox.create(app=app)
|
109
|
+
f = sb.open("/tmp/foo.txt", "w")
|
110
|
+
f.write("hello")
|
111
|
+
f.close()
|
112
|
+
```
|
113
|
+
"""
|
114
|
+
|
115
|
+
_binary = False
|
116
|
+
_readable = False
|
117
|
+
_writable = False
|
118
|
+
_appended = False
|
119
|
+
_closed = True
|
120
|
+
|
121
|
+
_task_id: str = ""
|
122
|
+
_file_descriptor: str = ""
|
123
|
+
_client: Optional[_Client] = None
|
124
|
+
|
125
|
+
def _validate_mode(self, mode: str) -> None:
|
126
|
+
if not any(char in mode for char in "rwax"):
|
127
|
+
raise ValueError(f"Invalid file mode: {mode}")
|
128
|
+
|
129
|
+
self._readable = "r" in mode or "+" in mode
|
130
|
+
self._writable = "w" in mode or "a" in mode or "x" in mode or "+" in mode
|
131
|
+
self._appended = "a" in mode
|
132
|
+
self._binary = "b" in mode
|
133
|
+
|
134
|
+
valid_chars = set("rwaxb+")
|
135
|
+
if any(char not in valid_chars for char in mode):
|
136
|
+
raise ValueError(f"Invalid file mode: {mode}")
|
137
|
+
|
138
|
+
mode_count = sum(1 for c in mode if c in "rwax")
|
139
|
+
if mode_count > 1:
|
140
|
+
raise ValueError("must have exactly one of create/read/write/append mode")
|
141
|
+
|
142
|
+
seen_chars = set()
|
143
|
+
for char in mode:
|
144
|
+
if char in seen_chars:
|
145
|
+
raise ValueError(f"Invalid file mode: {mode}")
|
146
|
+
seen_chars.add(char)
|
147
|
+
|
148
|
+
def _handle_error(self, error: api_pb2.SystemErrorMessage) -> None:
|
149
|
+
error_class = ERROR_MAPPING.get(error.error_code, FilesystemExecutionError)
|
150
|
+
raise error_class(error.error_message)
|
151
|
+
|
152
|
+
async def _consume_output(self, exec_id: str) -> AsyncIterator[Optional[bytes]]:
|
153
|
+
req = api_pb2.ContainerFilesystemExecGetOutputRequest(
|
154
|
+
exec_id=exec_id,
|
155
|
+
timeout=55,
|
156
|
+
)
|
157
|
+
assert self._client is not None
|
158
|
+
async for batch in self._client.stub.ContainerFilesystemExecGetOutput.unary_stream(req):
|
159
|
+
if batch.eof:
|
160
|
+
yield None
|
161
|
+
break
|
162
|
+
if batch.HasField("error"):
|
163
|
+
self._handle_error(batch.error)
|
164
|
+
for message in batch.output:
|
165
|
+
yield message
|
166
|
+
|
167
|
+
async def _wait(self, exec_id: str) -> bytes:
|
168
|
+
# The logic here is similar to how output is read from `exec`
|
169
|
+
output = b""
|
170
|
+
completed = False
|
171
|
+
retries_remaining = 10
|
172
|
+
while not completed:
|
173
|
+
try:
|
174
|
+
async for data in self._consume_output(exec_id):
|
175
|
+
if data is None:
|
176
|
+
completed = True
|
177
|
+
break
|
178
|
+
output += data
|
179
|
+
except (GRPCError, StreamTerminatedError) as exc:
|
180
|
+
if retries_remaining > 0:
|
181
|
+
retries_remaining -= 1
|
182
|
+
if isinstance(exc, GRPCError):
|
183
|
+
if exc.status in RETRYABLE_GRPC_STATUS_CODES:
|
184
|
+
await asyncio.sleep(1.0)
|
185
|
+
continue
|
186
|
+
elif isinstance(exc, StreamTerminatedError):
|
187
|
+
continue
|
188
|
+
raise
|
189
|
+
return output
|
190
|
+
|
191
|
+
def _validate_type(self, data: Union[bytes, str]) -> None:
|
192
|
+
if self._binary and isinstance(data, str):
|
193
|
+
raise TypeError("Expected bytes when in binary mode")
|
194
|
+
if not self._binary and isinstance(data, bytes):
|
195
|
+
raise TypeError("Expected str when in text mode")
|
196
|
+
|
197
|
+
async def _open_file(self, path: str, mode: str) -> None:
|
198
|
+
resp = await self._make_request(
|
199
|
+
api_pb2.ContainerFilesystemExecRequest(
|
200
|
+
file_open_request=api_pb2.ContainerFileOpenRequest(path=path, mode=mode),
|
201
|
+
task_id=self._task_id,
|
202
|
+
)
|
203
|
+
)
|
204
|
+
if not resp.HasField("file_descriptor"):
|
205
|
+
raise FilesystemExecutionError("Failed to open file")
|
206
|
+
self._file_descriptor = resp.file_descriptor
|
207
|
+
await self._wait(resp.exec_id)
|
208
|
+
|
209
|
+
@classmethod
|
210
|
+
async def create(
|
211
|
+
cls, path: str, mode: Union["_typeshed.OpenTextMode", "_typeshed.OpenBinaryMode"], client: _Client, task_id: str
|
212
|
+
) -> "_FileIO":
|
213
|
+
"""Create a new FileIO handle."""
|
214
|
+
self = cls.__new__(cls)
|
215
|
+
self._client = client
|
216
|
+
self._task_id = task_id
|
217
|
+
self._validate_mode(mode)
|
218
|
+
await self._open_file(path, mode)
|
219
|
+
self._closed = False
|
220
|
+
return self
|
221
|
+
|
222
|
+
async def _make_request(
|
223
|
+
self, request: api_pb2.ContainerFilesystemExecRequest
|
224
|
+
) -> api_pb2.ContainerFilesystemExecResponse:
|
225
|
+
assert self._client is not None
|
226
|
+
return await retry_transient_errors(self._client.stub.ContainerFilesystemExec, request)
|
227
|
+
|
228
|
+
async def _make_read_request(self, n: Optional[int]) -> bytes:
|
229
|
+
resp = await self._make_request(
|
230
|
+
api_pb2.ContainerFilesystemExecRequest(
|
231
|
+
file_read_request=api_pb2.ContainerFileReadRequest(file_descriptor=self._file_descriptor, n=n),
|
232
|
+
task_id=self._task_id,
|
233
|
+
)
|
234
|
+
)
|
235
|
+
return await self._wait(resp.exec_id)
|
236
|
+
|
237
|
+
async def read(self, n: Optional[int] = None) -> T:
|
238
|
+
"""Read n bytes from the current position, or the entire remaining file if n is None."""
|
239
|
+
self._check_closed()
|
240
|
+
self._check_readable()
|
241
|
+
if n is not None and n > READ_FILE_SIZE_LIMIT:
|
242
|
+
raise ValueError("Read request payload exceeds 100 MiB limit")
|
243
|
+
output = await self._make_read_request(n)
|
244
|
+
if self._binary:
|
245
|
+
return cast(T, output)
|
246
|
+
return cast(T, output.decode("utf-8"))
|
247
|
+
|
248
|
+
async def readline(self) -> T:
|
249
|
+
"""Read a single line from the current position."""
|
250
|
+
self._check_closed()
|
251
|
+
self._check_readable()
|
252
|
+
resp = await self._make_request(
|
253
|
+
api_pb2.ContainerFilesystemExecRequest(
|
254
|
+
file_read_line_request=api_pb2.ContainerFileReadLineRequest(file_descriptor=self._file_descriptor),
|
255
|
+
task_id=self._task_id,
|
256
|
+
)
|
257
|
+
)
|
258
|
+
output = await self._wait(resp.exec_id)
|
259
|
+
if self._binary:
|
260
|
+
return cast(T, output)
|
261
|
+
return cast(T, output.decode("utf-8"))
|
262
|
+
|
263
|
+
async def readlines(self) -> Sequence[T]:
|
264
|
+
"""Read all lines from the current position."""
|
265
|
+
self._check_closed()
|
266
|
+
self._check_readable()
|
267
|
+
output = await self._make_read_request(None)
|
268
|
+
if self._binary:
|
269
|
+
lines_bytes = output.split(b"\n")
|
270
|
+
output = [line + b"\n" for line in lines_bytes[:-1]] + ([lines_bytes[-1]] if lines_bytes[-1] else [])
|
271
|
+
return cast(Sequence[T], output)
|
272
|
+
else:
|
273
|
+
lines = output.decode("utf-8").split("\n")
|
274
|
+
output = [line + "\n" for line in lines[:-1]] + ([lines[-1]] if lines[-1] else [])
|
275
|
+
return cast(Sequence[T], output)
|
276
|
+
|
277
|
+
async def write(self, data: Union[bytes, str]) -> None:
|
278
|
+
"""Write data to the current position.
|
279
|
+
|
280
|
+
Writes may not appear until the entire buffer is flushed, which
|
281
|
+
can be done manually with `flush()` or automatically when the file is
|
282
|
+
closed.
|
283
|
+
"""
|
284
|
+
self._check_closed()
|
285
|
+
self._check_writable()
|
286
|
+
self._validate_type(data)
|
287
|
+
if isinstance(data, str):
|
288
|
+
data = data.encode("utf-8")
|
289
|
+
if len(data) > LARGE_FILE_SIZE_LIMIT:
|
290
|
+
raise ValueError("Write request payload exceeds 16 MiB limit")
|
291
|
+
resp = await self._make_request(
|
292
|
+
api_pb2.ContainerFilesystemExecRequest(
|
293
|
+
file_write_request=api_pb2.ContainerFileWriteRequest(file_descriptor=self._file_descriptor, data=data),
|
294
|
+
task_id=self._task_id,
|
295
|
+
)
|
296
|
+
)
|
297
|
+
await self._wait(resp.exec_id)
|
298
|
+
|
299
|
+
async def flush(self) -> None:
|
300
|
+
"""Flush the buffer to disk."""
|
301
|
+
self._check_closed()
|
302
|
+
self._check_writable()
|
303
|
+
resp = await self._make_request(
|
304
|
+
api_pb2.ContainerFilesystemExecRequest(
|
305
|
+
file_flush_request=api_pb2.ContainerFileFlushRequest(file_descriptor=self._file_descriptor),
|
306
|
+
task_id=self._task_id,
|
307
|
+
)
|
308
|
+
)
|
309
|
+
await self._wait(resp.exec_id)
|
310
|
+
|
311
|
+
def _get_whence(self, whence: int):
|
312
|
+
if whence == 0:
|
313
|
+
return api_pb2.SeekWhence.SEEK_SET
|
314
|
+
elif whence == 1:
|
315
|
+
return api_pb2.SeekWhence.SEEK_CUR
|
316
|
+
elif whence == 2:
|
317
|
+
return api_pb2.SeekWhence.SEEK_END
|
318
|
+
else:
|
319
|
+
raise ValueError(f"Invalid whence value: {whence}")
|
320
|
+
|
321
|
+
async def seek(self, offset: int, whence: int = 0) -> None:
|
322
|
+
"""Move to a new position in the file.
|
323
|
+
|
324
|
+
`whence` defaults to 0 (absolute file positioning); other values are 1
|
325
|
+
(relative to the current position) and 2 (relative to the file's end).
|
326
|
+
"""
|
327
|
+
self._check_closed()
|
328
|
+
resp = await self._make_request(
|
329
|
+
api_pb2.ContainerFilesystemExecRequest(
|
330
|
+
file_seek_request=api_pb2.ContainerFileSeekRequest(
|
331
|
+
file_descriptor=self._file_descriptor,
|
332
|
+
offset=offset,
|
333
|
+
whence=self._get_whence(whence),
|
334
|
+
),
|
335
|
+
task_id=self._task_id,
|
336
|
+
)
|
337
|
+
)
|
338
|
+
await self._wait(resp.exec_id)
|
339
|
+
|
340
|
+
async def _close(self) -> None:
|
341
|
+
# Buffer is flushed by the runner on close
|
342
|
+
resp = await self._make_request(
|
343
|
+
api_pb2.ContainerFilesystemExecRequest(
|
344
|
+
file_close_request=api_pb2.ContainerFileCloseRequest(file_descriptor=self._file_descriptor),
|
345
|
+
task_id=self._task_id,
|
346
|
+
)
|
347
|
+
)
|
348
|
+
self._closed = True
|
349
|
+
await self._wait(resp.exec_id)
|
350
|
+
|
351
|
+
async def close(self) -> None:
|
352
|
+
"""Flush the buffer and close the file."""
|
353
|
+
await self._close()
|
354
|
+
|
355
|
+
# also validated in the runner, but checked in the client to catch errors early
|
356
|
+
def _check_writable(self) -> None:
|
357
|
+
if not self._writable:
|
358
|
+
raise io.UnsupportedOperation("not writeable")
|
359
|
+
|
360
|
+
# also validated in the runner, but checked in the client to catch errors early
|
361
|
+
def _check_readable(self) -> None:
|
362
|
+
if not self._readable:
|
363
|
+
raise io.UnsupportedOperation("not readable")
|
364
|
+
|
365
|
+
# also validated in the runner, but checked in the client to catch errors early
|
366
|
+
def _check_closed(self) -> None:
|
367
|
+
if self._closed:
|
368
|
+
raise ValueError("I/O operation on closed file")
|
369
|
+
|
370
|
+
def __enter__(self) -> "_FileIO":
|
371
|
+
self._check_closed()
|
372
|
+
return self
|
373
|
+
|
374
|
+
async def __exit__(self, exc_type, exc_value, traceback) -> None:
|
375
|
+
await self._close()
|
376
|
+
|
377
|
+
|
378
|
+
delete_bytes = synchronize_api(_delete_bytes)
|
379
|
+
replace_bytes = synchronize_api(_replace_bytes)
|
380
|
+
FileIO = synchronize_api(_FileIO)
|
modal/file_io.pyi
ADDED
@@ -0,0 +1,185 @@
|
|
1
|
+
import _typeshed
|
2
|
+
import modal.client
|
3
|
+
import modal_proto.api_pb2
|
4
|
+
import typing
|
5
|
+
import typing_extensions
|
6
|
+
|
7
|
+
T = typing.TypeVar("T")
|
8
|
+
|
9
|
+
async def _delete_bytes(
|
10
|
+
file: _FileIO, start: typing.Optional[int] = None, end: typing.Optional[int] = None
|
11
|
+
) -> None: ...
|
12
|
+
async def _replace_bytes(
|
13
|
+
file: _FileIO, data: bytes, start: typing.Optional[int] = None, end: typing.Optional[int] = None
|
14
|
+
) -> None: ...
|
15
|
+
|
16
|
+
class _FileIO(typing.Generic[T]):
|
17
|
+
_task_id: str
|
18
|
+
_file_descriptor: str
|
19
|
+
_client: typing.Optional[modal.client._Client]
|
20
|
+
|
21
|
+
def _validate_mode(self, mode: str) -> None: ...
|
22
|
+
def _handle_error(self, error: modal_proto.api_pb2.SystemErrorMessage) -> None: ...
|
23
|
+
def _consume_output(self, exec_id: str) -> typing.AsyncIterator[typing.Optional[bytes]]: ...
|
24
|
+
async def _wait(self, exec_id: str) -> bytes: ...
|
25
|
+
def _validate_type(self, data: typing.Union[bytes, str]) -> None: ...
|
26
|
+
async def _open_file(self, path: str, mode: str) -> None: ...
|
27
|
+
@classmethod
|
28
|
+
async def create(
|
29
|
+
cls,
|
30
|
+
path: str,
|
31
|
+
mode: typing.Union[_typeshed.OpenTextMode, _typeshed.OpenBinaryMode],
|
32
|
+
client: modal.client._Client,
|
33
|
+
task_id: str,
|
34
|
+
) -> _FileIO: ...
|
35
|
+
async def _make_request(
|
36
|
+
self, request: modal_proto.api_pb2.ContainerFilesystemExecRequest
|
37
|
+
) -> modal_proto.api_pb2.ContainerFilesystemExecResponse: ...
|
38
|
+
async def _make_read_request(self, n: typing.Optional[int]) -> bytes: ...
|
39
|
+
async def read(self, n: typing.Optional[int] = None) -> T: ...
|
40
|
+
async def readline(self) -> T: ...
|
41
|
+
async def readlines(self) -> typing.Sequence[T]: ...
|
42
|
+
async def write(self, data: typing.Union[bytes, str]) -> None: ...
|
43
|
+
async def flush(self) -> None: ...
|
44
|
+
def _get_whence(self, whence: int): ...
|
45
|
+
async def seek(self, offset: int, whence: int = 0) -> None: ...
|
46
|
+
async def _close(self) -> None: ...
|
47
|
+
async def close(self) -> None: ...
|
48
|
+
def _check_writable(self) -> None: ...
|
49
|
+
def _check_readable(self) -> None: ...
|
50
|
+
def _check_closed(self) -> None: ...
|
51
|
+
def __enter__(self) -> _FileIO: ...
|
52
|
+
async def __exit__(self, exc_type, exc_value, traceback) -> None: ...
|
53
|
+
|
54
|
+
class __delete_bytes_spec(typing_extensions.Protocol):
|
55
|
+
def __call__(self, file: FileIO, start: typing.Optional[int] = None, end: typing.Optional[int] = None) -> None: ...
|
56
|
+
async def aio(self, file: FileIO, start: typing.Optional[int] = None, end: typing.Optional[int] = None) -> None: ...
|
57
|
+
|
58
|
+
delete_bytes: __delete_bytes_spec
|
59
|
+
|
60
|
+
class __replace_bytes_spec(typing_extensions.Protocol):
|
61
|
+
def __call__(
|
62
|
+
self, file: FileIO, data: bytes, start: typing.Optional[int] = None, end: typing.Optional[int] = None
|
63
|
+
) -> None: ...
|
64
|
+
async def aio(
|
65
|
+
self, file: FileIO, data: bytes, start: typing.Optional[int] = None, end: typing.Optional[int] = None
|
66
|
+
) -> None: ...
|
67
|
+
|
68
|
+
replace_bytes: __replace_bytes_spec
|
69
|
+
|
70
|
+
T_INNER = typing.TypeVar("T_INNER", covariant=True)
|
71
|
+
|
72
|
+
class FileIO(typing.Generic[T]):
|
73
|
+
_task_id: str
|
74
|
+
_file_descriptor: str
|
75
|
+
_client: typing.Optional[modal.client.Client]
|
76
|
+
|
77
|
+
def __init__(self, /, *args, **kwargs): ...
|
78
|
+
def _validate_mode(self, mode: str) -> None: ...
|
79
|
+
def _handle_error(self, error: modal_proto.api_pb2.SystemErrorMessage) -> None: ...
|
80
|
+
|
81
|
+
class ___consume_output_spec(typing_extensions.Protocol):
|
82
|
+
def __call__(self, exec_id: str) -> typing.Iterator[typing.Optional[bytes]]: ...
|
83
|
+
def aio(self, exec_id: str) -> typing.AsyncIterator[typing.Optional[bytes]]: ...
|
84
|
+
|
85
|
+
_consume_output: ___consume_output_spec
|
86
|
+
|
87
|
+
class ___wait_spec(typing_extensions.Protocol):
|
88
|
+
def __call__(self, exec_id: str) -> bytes: ...
|
89
|
+
async def aio(self, exec_id: str) -> bytes: ...
|
90
|
+
|
91
|
+
_wait: ___wait_spec
|
92
|
+
|
93
|
+
def _validate_type(self, data: typing.Union[bytes, str]) -> None: ...
|
94
|
+
|
95
|
+
class ___open_file_spec(typing_extensions.Protocol):
|
96
|
+
def __call__(self, path: str, mode: str) -> None: ...
|
97
|
+
async def aio(self, path: str, mode: str) -> None: ...
|
98
|
+
|
99
|
+
_open_file: ___open_file_spec
|
100
|
+
|
101
|
+
@classmethod
|
102
|
+
def create(
|
103
|
+
cls,
|
104
|
+
path: str,
|
105
|
+
mode: typing.Union[_typeshed.OpenTextMode, _typeshed.OpenBinaryMode],
|
106
|
+
client: modal.client.Client,
|
107
|
+
task_id: str,
|
108
|
+
) -> FileIO: ...
|
109
|
+
|
110
|
+
class ___make_request_spec(typing_extensions.Protocol):
|
111
|
+
def __call__(
|
112
|
+
self, request: modal_proto.api_pb2.ContainerFilesystemExecRequest
|
113
|
+
) -> modal_proto.api_pb2.ContainerFilesystemExecResponse: ...
|
114
|
+
async def aio(
|
115
|
+
self, request: modal_proto.api_pb2.ContainerFilesystemExecRequest
|
116
|
+
) -> modal_proto.api_pb2.ContainerFilesystemExecResponse: ...
|
117
|
+
|
118
|
+
_make_request: ___make_request_spec
|
119
|
+
|
120
|
+
class ___make_read_request_spec(typing_extensions.Protocol):
|
121
|
+
def __call__(self, n: typing.Optional[int]) -> bytes: ...
|
122
|
+
async def aio(self, n: typing.Optional[int]) -> bytes: ...
|
123
|
+
|
124
|
+
_make_read_request: ___make_read_request_spec
|
125
|
+
|
126
|
+
class __read_spec(typing_extensions.Protocol[T_INNER]):
|
127
|
+
def __call__(self, n: typing.Optional[int] = None) -> T_INNER: ...
|
128
|
+
async def aio(self, n: typing.Optional[int] = None) -> T_INNER: ...
|
129
|
+
|
130
|
+
read: __read_spec[T]
|
131
|
+
|
132
|
+
class __readline_spec(typing_extensions.Protocol[T_INNER]):
|
133
|
+
def __call__(self) -> T_INNER: ...
|
134
|
+
async def aio(self) -> T_INNER: ...
|
135
|
+
|
136
|
+
readline: __readline_spec[T]
|
137
|
+
|
138
|
+
class __readlines_spec(typing_extensions.Protocol[T_INNER]):
|
139
|
+
def __call__(self) -> typing.Sequence[T_INNER]: ...
|
140
|
+
async def aio(self) -> typing.Sequence[T_INNER]: ...
|
141
|
+
|
142
|
+
readlines: __readlines_spec[T]
|
143
|
+
|
144
|
+
class __write_spec(typing_extensions.Protocol):
|
145
|
+
def __call__(self, data: typing.Union[bytes, str]) -> None: ...
|
146
|
+
async def aio(self, data: typing.Union[bytes, str]) -> None: ...
|
147
|
+
|
148
|
+
write: __write_spec
|
149
|
+
|
150
|
+
class __flush_spec(typing_extensions.Protocol):
|
151
|
+
def __call__(self) -> None: ...
|
152
|
+
async def aio(self) -> None: ...
|
153
|
+
|
154
|
+
flush: __flush_spec
|
155
|
+
|
156
|
+
def _get_whence(self, whence: int): ...
|
157
|
+
|
158
|
+
class __seek_spec(typing_extensions.Protocol):
|
159
|
+
def __call__(self, offset: int, whence: int = 0) -> None: ...
|
160
|
+
async def aio(self, offset: int, whence: int = 0) -> None: ...
|
161
|
+
|
162
|
+
seek: __seek_spec
|
163
|
+
|
164
|
+
class ___close_spec(typing_extensions.Protocol):
|
165
|
+
def __call__(self) -> None: ...
|
166
|
+
async def aio(self) -> None: ...
|
167
|
+
|
168
|
+
_close: ___close_spec
|
169
|
+
|
170
|
+
class __close_spec(typing_extensions.Protocol):
|
171
|
+
def __call__(self) -> None: ...
|
172
|
+
async def aio(self) -> None: ...
|
173
|
+
|
174
|
+
close: __close_spec
|
175
|
+
|
176
|
+
def _check_writable(self) -> None: ...
|
177
|
+
def _check_readable(self) -> None: ...
|
178
|
+
def _check_closed(self) -> None: ...
|
179
|
+
def __enter__(self) -> FileIO: ...
|
180
|
+
|
181
|
+
class ____exit___spec(typing_extensions.Protocol):
|
182
|
+
def __call__(self, exc_type, exc_value, traceback) -> None: ...
|
183
|
+
async def aio(self, exc_type, exc_value, traceback) -> None: ...
|
184
|
+
|
185
|
+
__exit__: ____exit___spec
|
modal/functions.py
CHANGED
@@ -32,6 +32,7 @@ from ._resolver import Resolver
|
|
32
32
|
from ._resources import convert_fn_config_to_resources_config
|
33
33
|
from ._runtime.execution_context import current_input_id, is_local
|
34
34
|
from ._serialization import serialize, serialize_proto_params
|
35
|
+
from ._traceback import print_server_warnings
|
35
36
|
from ._utils.async_utils import (
|
36
37
|
TaskContext,
|
37
38
|
async_merge,
|
@@ -1061,6 +1062,8 @@ class _Function(typing.Generic[P, ReturnType, OriginalReturnType], _Object, type
|
|
1061
1062
|
else:
|
1062
1063
|
raise
|
1063
1064
|
|
1065
|
+
print_server_warnings(response.server_warnings)
|
1066
|
+
|
1064
1067
|
self._hydrate(response.function_id, resolver.client, response.handle_metadata)
|
1065
1068
|
|
1066
1069
|
rep = f"Ref({app_name})"
|
modal/partial_function.py
CHANGED
@@ -49,7 +49,7 @@ class _PartialFunction(typing.Generic[P, ReturnType, OriginalReturnType]):
|
|
49
49
|
raw_f: Callable[P, ReturnType]
|
50
50
|
flags: _PartialFunctionFlags
|
51
51
|
webhook_config: Optional[api_pb2.WebhookConfig]
|
52
|
-
is_generator:
|
52
|
+
is_generator: bool
|
53
53
|
keep_warm: Optional[int]
|
54
54
|
batch_max_size: Optional[int]
|
55
55
|
batch_wait_ms: Optional[int]
|
@@ -73,7 +73,13 @@ class _PartialFunction(typing.Generic[P, ReturnType, OriginalReturnType]):
|
|
73
73
|
self.raw_f = raw_f
|
74
74
|
self.flags = flags
|
75
75
|
self.webhook_config = webhook_config
|
76
|
-
|
76
|
+
if is_generator is None:
|
77
|
+
# auto detect - doesn't work if the function *returns* a generator
|
78
|
+
final_is_generator = inspect.isgeneratorfunction(raw_f) or inspect.isasyncgenfunction(raw_f)
|
79
|
+
else:
|
80
|
+
final_is_generator = is_generator
|
81
|
+
|
82
|
+
self.is_generator = final_is_generator
|
77
83
|
self.keep_warm = keep_warm
|
78
84
|
self.wrapped = False # Make sure that this was converted into a FunctionHandle
|
79
85
|
self.batch_max_size = batch_max_size
|
@@ -101,7 +107,7 @@ class _PartialFunction(typing.Generic[P, ReturnType, OriginalReturnType]):
|
|
101
107
|
# This happens mainly during serialization of the wrapped underlying class of a Cls
|
102
108
|
# since we don't have the instance info here we just return the PartialFunction itself
|
103
109
|
# to let it be bound to a variable and become a Function later on
|
104
|
-
return self
|
110
|
+
return self # type: ignore # this returns a PartialFunction in a special internal case
|
105
111
|
|
106
112
|
def __del__(self):
|
107
113
|
if (self.flags & _PartialFunctionFlags.FUNCTION) and self.wrapped is False:
|
@@ -154,14 +160,14 @@ def _find_partial_methods_for_user_cls(user_cls: type[Any], flags: int) -> dict[
|
|
154
160
|
)
|
155
161
|
deprecation_error((2024, 2, 21), message)
|
156
162
|
|
157
|
-
partial_functions: dict[str,
|
163
|
+
partial_functions: dict[str, _PartialFunction] = {}
|
158
164
|
for parent_cls in reversed(user_cls.mro()):
|
159
165
|
if parent_cls is not object:
|
160
166
|
for k, v in parent_cls.__dict__.items():
|
161
|
-
if isinstance(v, PartialFunction):
|
162
|
-
|
163
|
-
if
|
164
|
-
partial_functions[k] =
|
167
|
+
if isinstance(v, PartialFunction): # type: ignore[reportArgumentType] # synchronicity wrapper types
|
168
|
+
_partial_function: _PartialFunction = typing.cast(_PartialFunction, synchronizer._translate_in(v))
|
169
|
+
if _partial_function.flags & flags:
|
170
|
+
partial_functions[k] = _partial_function
|
165
171
|
|
166
172
|
return partial_functions
|
167
173
|
|
@@ -245,8 +251,6 @@ def _method(
|
|
245
251
|
"Batched function on classes should not be wrapped by `@method`. "
|
246
252
|
"Suggestion: remove the `@method` decorator."
|
247
253
|
)
|
248
|
-
if is_generator is None:
|
249
|
-
is_generator = inspect.isgeneratorfunction(raw_f) or inspect.isasyncgenfunction(raw_f)
|
250
254
|
return _PartialFunction(raw_f, _PartialFunctionFlags.FUNCTION, is_generator=is_generator, keep_warm=keep_warm)
|
251
255
|
|
252
256
|
return wrapper
|
modal/partial_function.pyi
CHANGED
@@ -28,7 +28,7 @@ class _PartialFunction(typing.Generic[P, ReturnType, OriginalReturnType]):
|
|
28
28
|
raw_f: typing.Callable[P, ReturnType]
|
29
29
|
flags: _PartialFunctionFlags
|
30
30
|
webhook_config: typing.Optional[modal_proto.api_pb2.WebhookConfig]
|
31
|
-
is_generator:
|
31
|
+
is_generator: bool
|
32
32
|
keep_warm: typing.Optional[int]
|
33
33
|
batch_max_size: typing.Optional[int]
|
34
34
|
batch_wait_ms: typing.Optional[int]
|
@@ -57,7 +57,7 @@ class PartialFunction(typing.Generic[P, ReturnType, OriginalReturnType]):
|
|
57
57
|
raw_f: typing.Callable[P, ReturnType]
|
58
58
|
flags: _PartialFunctionFlags
|
59
59
|
webhook_config: typing.Optional[modal_proto.api_pb2.WebhookConfig]
|
60
|
-
is_generator:
|
60
|
+
is_generator: bool
|
61
61
|
keep_warm: typing.Optional[int]
|
62
62
|
batch_max_size: typing.Optional[int]
|
63
63
|
batch_wait_ms: typing.Optional[int]
|