modal 0.67.43__py3-none-any.whl → 0.68.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- modal/_container_entrypoint.py +4 -1
- modal/_runtime/container_io_manager.py +3 -0
- modal/_runtime/user_code_imports.py +4 -2
- modal/_traceback.py +16 -2
- modal/_utils/function_utils.py +5 -1
- modal/_utils/grpc_testing.py +6 -2
- modal/_utils/hash_utils.py +14 -2
- modal/cli/_traceback.py +11 -4
- modal/cli/run.py +0 -7
- modal/client.py +6 -37
- modal/client.pyi +2 -6
- modal/cls.py +132 -62
- modal/cls.pyi +13 -7
- modal/exception.py +20 -0
- modal/file_io.py +380 -0
- modal/file_io.pyi +185 -0
- modal/functions.py +33 -11
- modal/functions.pyi +5 -3
- modal/object.py +4 -2
- modal/partial_function.py +14 -10
- modal/partial_function.pyi +2 -2
- modal/runner.py +5 -4
- modal/runner.pyi +2 -1
- modal/sandbox.py +40 -0
- modal/sandbox.pyi +18 -0
- {modal-0.67.43.dist-info → modal-0.68.11.dist-info}/METADATA +2 -2
- {modal-0.67.43.dist-info → modal-0.68.11.dist-info}/RECORD +37 -35
- modal_docs/gen_reference_docs.py +1 -0
- modal_proto/api.proto +25 -1
- modal_proto/api_pb2.py +758 -718
- modal_proto/api_pb2.pyi +95 -10
- modal_version/__init__.py +1 -1
- modal_version/_version_generated.py +1 -1
- {modal-0.67.43.dist-info → modal-0.68.11.dist-info}/LICENSE +0 -0
- {modal-0.67.43.dist-info → modal-0.68.11.dist-info}/WHEEL +0 -0
- {modal-0.67.43.dist-info → modal-0.68.11.dist-info}/entry_points.txt +0 -0
- {modal-0.67.43.dist-info → modal-0.68.11.dist-info}/top_level.txt +0 -0
modal/file_io.py
ADDED
@@ -0,0 +1,380 @@
|
|
1
|
+
# Copyright Modal Labs 2024
|
2
|
+
import asyncio
|
3
|
+
import io
|
4
|
+
from typing import TYPE_CHECKING, AsyncIterator, Generic, Optional, Sequence, TypeVar, Union, cast
|
5
|
+
|
6
|
+
if TYPE_CHECKING:
|
7
|
+
import _typeshed
|
8
|
+
|
9
|
+
from grpclib.exceptions import GRPCError, StreamTerminatedError
|
10
|
+
|
11
|
+
from modal._utils.grpc_utils import retry_transient_errors
|
12
|
+
from modal_proto import api_pb2
|
13
|
+
|
14
|
+
from ._utils.async_utils import synchronize_api
|
15
|
+
from ._utils.grpc_utils import RETRYABLE_GRPC_STATUS_CODES
|
16
|
+
from .client import _Client
|
17
|
+
from .exception import FilesystemExecutionError, InvalidError
|
18
|
+
|
19
|
+
LARGE_FILE_SIZE_LIMIT = 16 * 1024 * 1024 # 16 MiB
|
20
|
+
READ_FILE_SIZE_LIMIT = 100 * 1024 * 1024 # 100 MiB
|
21
|
+
|
22
|
+
ERROR_MAPPING = {
|
23
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_UNSPECIFIED: FilesystemExecutionError,
|
24
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_PERM: PermissionError,
|
25
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_NOENT: FileNotFoundError,
|
26
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_IO: IOError,
|
27
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_NXIO: IOError,
|
28
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_NOMEM: MemoryError,
|
29
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_ACCES: PermissionError,
|
30
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_EXIST: FileExistsError,
|
31
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_NOTDIR: NotADirectoryError,
|
32
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_ISDIR: IsADirectoryError,
|
33
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_INVAL: OSError,
|
34
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_MFILE: OSError,
|
35
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_FBIG: OSError,
|
36
|
+
api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_NOSPC: OSError,
|
37
|
+
}
|
38
|
+
|
39
|
+
T = TypeVar("T", str, bytes)
|
40
|
+
|
41
|
+
|
42
|
+
async def _delete_bytes(file: "_FileIO", start: Optional[int] = None, end: Optional[int] = None) -> None:
|
43
|
+
"""Delete a range of bytes from the file.
|
44
|
+
|
45
|
+
`start` and `end` are byte offsets. `start` is inclusive, `end` is exclusive.
|
46
|
+
If either is None, the start or end of the file is used, respectively.
|
47
|
+
"""
|
48
|
+
assert file._file_descriptor is not None
|
49
|
+
file._check_closed()
|
50
|
+
if start is not None and end is not None:
|
51
|
+
if start >= end:
|
52
|
+
raise ValueError("start must be less than end")
|
53
|
+
resp = await file._make_request(
|
54
|
+
api_pb2.ContainerFilesystemExecRequest(
|
55
|
+
file_delete_bytes_request=api_pb2.ContainerFileDeleteBytesRequest(
|
56
|
+
file_descriptor=file._file_descriptor,
|
57
|
+
start_inclusive=start,
|
58
|
+
end_exclusive=end,
|
59
|
+
),
|
60
|
+
task_id=file._task_id,
|
61
|
+
)
|
62
|
+
)
|
63
|
+
await file._wait(resp.exec_id)
|
64
|
+
|
65
|
+
|
66
|
+
async def _replace_bytes(file: "_FileIO", data: bytes, start: Optional[int] = None, end: Optional[int] = None) -> None:
|
67
|
+
"""Replace a range of bytes in the file with new data. The length of the data does not
|
68
|
+
have to be the same as the length of the range being replaced.
|
69
|
+
|
70
|
+
`start` and `end` are byte offsets. `start` is inclusive, `end` is exclusive.
|
71
|
+
If either is None, the start or end of the file is used, respectively.
|
72
|
+
"""
|
73
|
+
assert file._file_descriptor is not None
|
74
|
+
file._check_closed()
|
75
|
+
if start is not None and end is not None:
|
76
|
+
if start >= end:
|
77
|
+
raise InvalidError("start must be less than end")
|
78
|
+
if len(data) > LARGE_FILE_SIZE_LIMIT:
|
79
|
+
raise InvalidError("Write request payload exceeds 16 MiB limit")
|
80
|
+
resp = await file._make_request(
|
81
|
+
api_pb2.ContainerFilesystemExecRequest(
|
82
|
+
file_write_replace_bytes_request=api_pb2.ContainerFileWriteReplaceBytesRequest(
|
83
|
+
file_descriptor=file._file_descriptor,
|
84
|
+
data=data,
|
85
|
+
start_inclusive=start,
|
86
|
+
end_exclusive=end,
|
87
|
+
),
|
88
|
+
task_id=file._task_id,
|
89
|
+
)
|
90
|
+
)
|
91
|
+
await file._wait(resp.exec_id)
|
92
|
+
|
93
|
+
|
94
|
+
# The FileIO class is designed to mimic Python's io.FileIO
|
95
|
+
# See https://github.com/python/cpython/blob/main/Lib/_pyio.py#L1459
|
96
|
+
class _FileIO(Generic[T]):
|
97
|
+
"""FileIO handle, used in the Sandbox filesystem API.
|
98
|
+
|
99
|
+
The API is designed to mimic Python's io.FileIO.
|
100
|
+
|
101
|
+
**Usage**
|
102
|
+
|
103
|
+
```python
|
104
|
+
import modal
|
105
|
+
|
106
|
+
app = modal.App.lookup("my-app", create_if_missing=True)
|
107
|
+
|
108
|
+
sb = modal.Sandbox.create(app=app)
|
109
|
+
f = sb.open("/tmp/foo.txt", "w")
|
110
|
+
f.write("hello")
|
111
|
+
f.close()
|
112
|
+
```
|
113
|
+
"""
|
114
|
+
|
115
|
+
_binary = False
|
116
|
+
_readable = False
|
117
|
+
_writable = False
|
118
|
+
_appended = False
|
119
|
+
_closed = True
|
120
|
+
|
121
|
+
_task_id: str = ""
|
122
|
+
_file_descriptor: str = ""
|
123
|
+
_client: Optional[_Client] = None
|
124
|
+
|
125
|
+
def _validate_mode(self, mode: str) -> None:
|
126
|
+
if not any(char in mode for char in "rwax"):
|
127
|
+
raise ValueError(f"Invalid file mode: {mode}")
|
128
|
+
|
129
|
+
self._readable = "r" in mode or "+" in mode
|
130
|
+
self._writable = "w" in mode or "a" in mode or "x" in mode or "+" in mode
|
131
|
+
self._appended = "a" in mode
|
132
|
+
self._binary = "b" in mode
|
133
|
+
|
134
|
+
valid_chars = set("rwaxb+")
|
135
|
+
if any(char not in valid_chars for char in mode):
|
136
|
+
raise ValueError(f"Invalid file mode: {mode}")
|
137
|
+
|
138
|
+
mode_count = sum(1 for c in mode if c in "rwax")
|
139
|
+
if mode_count > 1:
|
140
|
+
raise ValueError("must have exactly one of create/read/write/append mode")
|
141
|
+
|
142
|
+
seen_chars = set()
|
143
|
+
for char in mode:
|
144
|
+
if char in seen_chars:
|
145
|
+
raise ValueError(f"Invalid file mode: {mode}")
|
146
|
+
seen_chars.add(char)
|
147
|
+
|
148
|
+
def _handle_error(self, error: api_pb2.SystemErrorMessage) -> None:
|
149
|
+
error_class = ERROR_MAPPING.get(error.error_code, FilesystemExecutionError)
|
150
|
+
raise error_class(error.error_message)
|
151
|
+
|
152
|
+
async def _consume_output(self, exec_id: str) -> AsyncIterator[Optional[bytes]]:
|
153
|
+
req = api_pb2.ContainerFilesystemExecGetOutputRequest(
|
154
|
+
exec_id=exec_id,
|
155
|
+
timeout=55,
|
156
|
+
)
|
157
|
+
assert self._client is not None
|
158
|
+
async for batch in self._client.stub.ContainerFilesystemExecGetOutput.unary_stream(req):
|
159
|
+
if batch.eof:
|
160
|
+
yield None
|
161
|
+
break
|
162
|
+
if batch.HasField("error"):
|
163
|
+
self._handle_error(batch.error)
|
164
|
+
for message in batch.output:
|
165
|
+
yield message
|
166
|
+
|
167
|
+
async def _wait(self, exec_id: str) -> bytes:
|
168
|
+
# The logic here is similar to how output is read from `exec`
|
169
|
+
output = b""
|
170
|
+
completed = False
|
171
|
+
retries_remaining = 10
|
172
|
+
while not completed:
|
173
|
+
try:
|
174
|
+
async for data in self._consume_output(exec_id):
|
175
|
+
if data is None:
|
176
|
+
completed = True
|
177
|
+
break
|
178
|
+
output += data
|
179
|
+
except (GRPCError, StreamTerminatedError) as exc:
|
180
|
+
if retries_remaining > 0:
|
181
|
+
retries_remaining -= 1
|
182
|
+
if isinstance(exc, GRPCError):
|
183
|
+
if exc.status in RETRYABLE_GRPC_STATUS_CODES:
|
184
|
+
await asyncio.sleep(1.0)
|
185
|
+
continue
|
186
|
+
elif isinstance(exc, StreamTerminatedError):
|
187
|
+
continue
|
188
|
+
raise
|
189
|
+
return output
|
190
|
+
|
191
|
+
def _validate_type(self, data: Union[bytes, str]) -> None:
|
192
|
+
if self._binary and isinstance(data, str):
|
193
|
+
raise TypeError("Expected bytes when in binary mode")
|
194
|
+
if not self._binary and isinstance(data, bytes):
|
195
|
+
raise TypeError("Expected str when in text mode")
|
196
|
+
|
197
|
+
async def _open_file(self, path: str, mode: str) -> None:
|
198
|
+
resp = await self._make_request(
|
199
|
+
api_pb2.ContainerFilesystemExecRequest(
|
200
|
+
file_open_request=api_pb2.ContainerFileOpenRequest(path=path, mode=mode),
|
201
|
+
task_id=self._task_id,
|
202
|
+
)
|
203
|
+
)
|
204
|
+
if not resp.HasField("file_descriptor"):
|
205
|
+
raise FilesystemExecutionError("Failed to open file")
|
206
|
+
self._file_descriptor = resp.file_descriptor
|
207
|
+
await self._wait(resp.exec_id)
|
208
|
+
|
209
|
+
@classmethod
|
210
|
+
async def create(
|
211
|
+
cls, path: str, mode: Union["_typeshed.OpenTextMode", "_typeshed.OpenBinaryMode"], client: _Client, task_id: str
|
212
|
+
) -> "_FileIO":
|
213
|
+
"""Create a new FileIO handle."""
|
214
|
+
self = cls.__new__(cls)
|
215
|
+
self._client = client
|
216
|
+
self._task_id = task_id
|
217
|
+
self._validate_mode(mode)
|
218
|
+
await self._open_file(path, mode)
|
219
|
+
self._closed = False
|
220
|
+
return self
|
221
|
+
|
222
|
+
async def _make_request(
|
223
|
+
self, request: api_pb2.ContainerFilesystemExecRequest
|
224
|
+
) -> api_pb2.ContainerFilesystemExecResponse:
|
225
|
+
assert self._client is not None
|
226
|
+
return await retry_transient_errors(self._client.stub.ContainerFilesystemExec, request)
|
227
|
+
|
228
|
+
async def _make_read_request(self, n: Optional[int]) -> bytes:
|
229
|
+
resp = await self._make_request(
|
230
|
+
api_pb2.ContainerFilesystemExecRequest(
|
231
|
+
file_read_request=api_pb2.ContainerFileReadRequest(file_descriptor=self._file_descriptor, n=n),
|
232
|
+
task_id=self._task_id,
|
233
|
+
)
|
234
|
+
)
|
235
|
+
return await self._wait(resp.exec_id)
|
236
|
+
|
237
|
+
async def read(self, n: Optional[int] = None) -> T:
|
238
|
+
"""Read n bytes from the current position, or the entire remaining file if n is None."""
|
239
|
+
self._check_closed()
|
240
|
+
self._check_readable()
|
241
|
+
if n is not None and n > READ_FILE_SIZE_LIMIT:
|
242
|
+
raise ValueError("Read request payload exceeds 100 MiB limit")
|
243
|
+
output = await self._make_read_request(n)
|
244
|
+
if self._binary:
|
245
|
+
return cast(T, output)
|
246
|
+
return cast(T, output.decode("utf-8"))
|
247
|
+
|
248
|
+
async def readline(self) -> T:
|
249
|
+
"""Read a single line from the current position."""
|
250
|
+
self._check_closed()
|
251
|
+
self._check_readable()
|
252
|
+
resp = await self._make_request(
|
253
|
+
api_pb2.ContainerFilesystemExecRequest(
|
254
|
+
file_read_line_request=api_pb2.ContainerFileReadLineRequest(file_descriptor=self._file_descriptor),
|
255
|
+
task_id=self._task_id,
|
256
|
+
)
|
257
|
+
)
|
258
|
+
output = await self._wait(resp.exec_id)
|
259
|
+
if self._binary:
|
260
|
+
return cast(T, output)
|
261
|
+
return cast(T, output.decode("utf-8"))
|
262
|
+
|
263
|
+
async def readlines(self) -> Sequence[T]:
|
264
|
+
"""Read all lines from the current position."""
|
265
|
+
self._check_closed()
|
266
|
+
self._check_readable()
|
267
|
+
output = await self._make_read_request(None)
|
268
|
+
if self._binary:
|
269
|
+
lines_bytes = output.split(b"\n")
|
270
|
+
output = [line + b"\n" for line in lines_bytes[:-1]] + ([lines_bytes[-1]] if lines_bytes[-1] else [])
|
271
|
+
return cast(Sequence[T], output)
|
272
|
+
else:
|
273
|
+
lines = output.decode("utf-8").split("\n")
|
274
|
+
output = [line + "\n" for line in lines[:-1]] + ([lines[-1]] if lines[-1] else [])
|
275
|
+
return cast(Sequence[T], output)
|
276
|
+
|
277
|
+
async def write(self, data: Union[bytes, str]) -> None:
|
278
|
+
"""Write data to the current position.
|
279
|
+
|
280
|
+
Writes may not appear until the entire buffer is flushed, which
|
281
|
+
can be done manually with `flush()` or automatically when the file is
|
282
|
+
closed.
|
283
|
+
"""
|
284
|
+
self._check_closed()
|
285
|
+
self._check_writable()
|
286
|
+
self._validate_type(data)
|
287
|
+
if isinstance(data, str):
|
288
|
+
data = data.encode("utf-8")
|
289
|
+
if len(data) > LARGE_FILE_SIZE_LIMIT:
|
290
|
+
raise ValueError("Write request payload exceeds 16 MiB limit")
|
291
|
+
resp = await self._make_request(
|
292
|
+
api_pb2.ContainerFilesystemExecRequest(
|
293
|
+
file_write_request=api_pb2.ContainerFileWriteRequest(file_descriptor=self._file_descriptor, data=data),
|
294
|
+
task_id=self._task_id,
|
295
|
+
)
|
296
|
+
)
|
297
|
+
await self._wait(resp.exec_id)
|
298
|
+
|
299
|
+
async def flush(self) -> None:
|
300
|
+
"""Flush the buffer to disk."""
|
301
|
+
self._check_closed()
|
302
|
+
self._check_writable()
|
303
|
+
resp = await self._make_request(
|
304
|
+
api_pb2.ContainerFilesystemExecRequest(
|
305
|
+
file_flush_request=api_pb2.ContainerFileFlushRequest(file_descriptor=self._file_descriptor),
|
306
|
+
task_id=self._task_id,
|
307
|
+
)
|
308
|
+
)
|
309
|
+
await self._wait(resp.exec_id)
|
310
|
+
|
311
|
+
def _get_whence(self, whence: int):
|
312
|
+
if whence == 0:
|
313
|
+
return api_pb2.SeekWhence.SEEK_SET
|
314
|
+
elif whence == 1:
|
315
|
+
return api_pb2.SeekWhence.SEEK_CUR
|
316
|
+
elif whence == 2:
|
317
|
+
return api_pb2.SeekWhence.SEEK_END
|
318
|
+
else:
|
319
|
+
raise ValueError(f"Invalid whence value: {whence}")
|
320
|
+
|
321
|
+
async def seek(self, offset: int, whence: int = 0) -> None:
|
322
|
+
"""Move to a new position in the file.
|
323
|
+
|
324
|
+
`whence` defaults to 0 (absolute file positioning); other values are 1
|
325
|
+
(relative to the current position) and 2 (relative to the file's end).
|
326
|
+
"""
|
327
|
+
self._check_closed()
|
328
|
+
resp = await self._make_request(
|
329
|
+
api_pb2.ContainerFilesystemExecRequest(
|
330
|
+
file_seek_request=api_pb2.ContainerFileSeekRequest(
|
331
|
+
file_descriptor=self._file_descriptor,
|
332
|
+
offset=offset,
|
333
|
+
whence=self._get_whence(whence),
|
334
|
+
),
|
335
|
+
task_id=self._task_id,
|
336
|
+
)
|
337
|
+
)
|
338
|
+
await self._wait(resp.exec_id)
|
339
|
+
|
340
|
+
async def _close(self) -> None:
|
341
|
+
# Buffer is flushed by the runner on close
|
342
|
+
resp = await self._make_request(
|
343
|
+
api_pb2.ContainerFilesystemExecRequest(
|
344
|
+
file_close_request=api_pb2.ContainerFileCloseRequest(file_descriptor=self._file_descriptor),
|
345
|
+
task_id=self._task_id,
|
346
|
+
)
|
347
|
+
)
|
348
|
+
self._closed = True
|
349
|
+
await self._wait(resp.exec_id)
|
350
|
+
|
351
|
+
async def close(self) -> None:
|
352
|
+
"""Flush the buffer and close the file."""
|
353
|
+
await self._close()
|
354
|
+
|
355
|
+
# also validated in the runner, but checked in the client to catch errors early
|
356
|
+
def _check_writable(self) -> None:
|
357
|
+
if not self._writable:
|
358
|
+
raise io.UnsupportedOperation("not writeable")
|
359
|
+
|
360
|
+
# also validated in the runner, but checked in the client to catch errors early
|
361
|
+
def _check_readable(self) -> None:
|
362
|
+
if not self._readable:
|
363
|
+
raise io.UnsupportedOperation("not readable")
|
364
|
+
|
365
|
+
# also validated in the runner, but checked in the client to catch errors early
|
366
|
+
def _check_closed(self) -> None:
|
367
|
+
if self._closed:
|
368
|
+
raise ValueError("I/O operation on closed file")
|
369
|
+
|
370
|
+
def __enter__(self) -> "_FileIO":
|
371
|
+
self._check_closed()
|
372
|
+
return self
|
373
|
+
|
374
|
+
async def __exit__(self, exc_type, exc_value, traceback) -> None:
|
375
|
+
await self._close()
|
376
|
+
|
377
|
+
|
378
|
+
delete_bytes = synchronize_api(_delete_bytes)
|
379
|
+
replace_bytes = synchronize_api(_replace_bytes)
|
380
|
+
FileIO = synchronize_api(_FileIO)
|
modal/file_io.pyi
ADDED
@@ -0,0 +1,185 @@
|
|
1
|
+
import _typeshed
|
2
|
+
import modal.client
|
3
|
+
import modal_proto.api_pb2
|
4
|
+
import typing
|
5
|
+
import typing_extensions
|
6
|
+
|
7
|
+
T = typing.TypeVar("T")
|
8
|
+
|
9
|
+
async def _delete_bytes(
|
10
|
+
file: _FileIO, start: typing.Optional[int] = None, end: typing.Optional[int] = None
|
11
|
+
) -> None: ...
|
12
|
+
async def _replace_bytes(
|
13
|
+
file: _FileIO, data: bytes, start: typing.Optional[int] = None, end: typing.Optional[int] = None
|
14
|
+
) -> None: ...
|
15
|
+
|
16
|
+
class _FileIO(typing.Generic[T]):
|
17
|
+
_task_id: str
|
18
|
+
_file_descriptor: str
|
19
|
+
_client: typing.Optional[modal.client._Client]
|
20
|
+
|
21
|
+
def _validate_mode(self, mode: str) -> None: ...
|
22
|
+
def _handle_error(self, error: modal_proto.api_pb2.SystemErrorMessage) -> None: ...
|
23
|
+
def _consume_output(self, exec_id: str) -> typing.AsyncIterator[typing.Optional[bytes]]: ...
|
24
|
+
async def _wait(self, exec_id: str) -> bytes: ...
|
25
|
+
def _validate_type(self, data: typing.Union[bytes, str]) -> None: ...
|
26
|
+
async def _open_file(self, path: str, mode: str) -> None: ...
|
27
|
+
@classmethod
|
28
|
+
async def create(
|
29
|
+
cls,
|
30
|
+
path: str,
|
31
|
+
mode: typing.Union[_typeshed.OpenTextMode, _typeshed.OpenBinaryMode],
|
32
|
+
client: modal.client._Client,
|
33
|
+
task_id: str,
|
34
|
+
) -> _FileIO: ...
|
35
|
+
async def _make_request(
|
36
|
+
self, request: modal_proto.api_pb2.ContainerFilesystemExecRequest
|
37
|
+
) -> modal_proto.api_pb2.ContainerFilesystemExecResponse: ...
|
38
|
+
async def _make_read_request(self, n: typing.Optional[int]) -> bytes: ...
|
39
|
+
async def read(self, n: typing.Optional[int] = None) -> T: ...
|
40
|
+
async def readline(self) -> T: ...
|
41
|
+
async def readlines(self) -> typing.Sequence[T]: ...
|
42
|
+
async def write(self, data: typing.Union[bytes, str]) -> None: ...
|
43
|
+
async def flush(self) -> None: ...
|
44
|
+
def _get_whence(self, whence: int): ...
|
45
|
+
async def seek(self, offset: int, whence: int = 0) -> None: ...
|
46
|
+
async def _close(self) -> None: ...
|
47
|
+
async def close(self) -> None: ...
|
48
|
+
def _check_writable(self) -> None: ...
|
49
|
+
def _check_readable(self) -> None: ...
|
50
|
+
def _check_closed(self) -> None: ...
|
51
|
+
def __enter__(self) -> _FileIO: ...
|
52
|
+
async def __exit__(self, exc_type, exc_value, traceback) -> None: ...
|
53
|
+
|
54
|
+
class __delete_bytes_spec(typing_extensions.Protocol):
|
55
|
+
def __call__(self, file: FileIO, start: typing.Optional[int] = None, end: typing.Optional[int] = None) -> None: ...
|
56
|
+
async def aio(self, file: FileIO, start: typing.Optional[int] = None, end: typing.Optional[int] = None) -> None: ...
|
57
|
+
|
58
|
+
delete_bytes: __delete_bytes_spec
|
59
|
+
|
60
|
+
class __replace_bytes_spec(typing_extensions.Protocol):
|
61
|
+
def __call__(
|
62
|
+
self, file: FileIO, data: bytes, start: typing.Optional[int] = None, end: typing.Optional[int] = None
|
63
|
+
) -> None: ...
|
64
|
+
async def aio(
|
65
|
+
self, file: FileIO, data: bytes, start: typing.Optional[int] = None, end: typing.Optional[int] = None
|
66
|
+
) -> None: ...
|
67
|
+
|
68
|
+
replace_bytes: __replace_bytes_spec
|
69
|
+
|
70
|
+
T_INNER = typing.TypeVar("T_INNER", covariant=True)
|
71
|
+
|
72
|
+
class FileIO(typing.Generic[T]):
|
73
|
+
_task_id: str
|
74
|
+
_file_descriptor: str
|
75
|
+
_client: typing.Optional[modal.client.Client]
|
76
|
+
|
77
|
+
def __init__(self, /, *args, **kwargs): ...
|
78
|
+
def _validate_mode(self, mode: str) -> None: ...
|
79
|
+
def _handle_error(self, error: modal_proto.api_pb2.SystemErrorMessage) -> None: ...
|
80
|
+
|
81
|
+
class ___consume_output_spec(typing_extensions.Protocol):
|
82
|
+
def __call__(self, exec_id: str) -> typing.Iterator[typing.Optional[bytes]]: ...
|
83
|
+
def aio(self, exec_id: str) -> typing.AsyncIterator[typing.Optional[bytes]]: ...
|
84
|
+
|
85
|
+
_consume_output: ___consume_output_spec
|
86
|
+
|
87
|
+
class ___wait_spec(typing_extensions.Protocol):
|
88
|
+
def __call__(self, exec_id: str) -> bytes: ...
|
89
|
+
async def aio(self, exec_id: str) -> bytes: ...
|
90
|
+
|
91
|
+
_wait: ___wait_spec
|
92
|
+
|
93
|
+
def _validate_type(self, data: typing.Union[bytes, str]) -> None: ...
|
94
|
+
|
95
|
+
class ___open_file_spec(typing_extensions.Protocol):
|
96
|
+
def __call__(self, path: str, mode: str) -> None: ...
|
97
|
+
async def aio(self, path: str, mode: str) -> None: ...
|
98
|
+
|
99
|
+
_open_file: ___open_file_spec
|
100
|
+
|
101
|
+
@classmethod
|
102
|
+
def create(
|
103
|
+
cls,
|
104
|
+
path: str,
|
105
|
+
mode: typing.Union[_typeshed.OpenTextMode, _typeshed.OpenBinaryMode],
|
106
|
+
client: modal.client.Client,
|
107
|
+
task_id: str,
|
108
|
+
) -> FileIO: ...
|
109
|
+
|
110
|
+
class ___make_request_spec(typing_extensions.Protocol):
|
111
|
+
def __call__(
|
112
|
+
self, request: modal_proto.api_pb2.ContainerFilesystemExecRequest
|
113
|
+
) -> modal_proto.api_pb2.ContainerFilesystemExecResponse: ...
|
114
|
+
async def aio(
|
115
|
+
self, request: modal_proto.api_pb2.ContainerFilesystemExecRequest
|
116
|
+
) -> modal_proto.api_pb2.ContainerFilesystemExecResponse: ...
|
117
|
+
|
118
|
+
_make_request: ___make_request_spec
|
119
|
+
|
120
|
+
class ___make_read_request_spec(typing_extensions.Protocol):
|
121
|
+
def __call__(self, n: typing.Optional[int]) -> bytes: ...
|
122
|
+
async def aio(self, n: typing.Optional[int]) -> bytes: ...
|
123
|
+
|
124
|
+
_make_read_request: ___make_read_request_spec
|
125
|
+
|
126
|
+
class __read_spec(typing_extensions.Protocol[T_INNER]):
|
127
|
+
def __call__(self, n: typing.Optional[int] = None) -> T_INNER: ...
|
128
|
+
async def aio(self, n: typing.Optional[int] = None) -> T_INNER: ...
|
129
|
+
|
130
|
+
read: __read_spec[T]
|
131
|
+
|
132
|
+
class __readline_spec(typing_extensions.Protocol[T_INNER]):
|
133
|
+
def __call__(self) -> T_INNER: ...
|
134
|
+
async def aio(self) -> T_INNER: ...
|
135
|
+
|
136
|
+
readline: __readline_spec[T]
|
137
|
+
|
138
|
+
class __readlines_spec(typing_extensions.Protocol[T_INNER]):
|
139
|
+
def __call__(self) -> typing.Sequence[T_INNER]: ...
|
140
|
+
async def aio(self) -> typing.Sequence[T_INNER]: ...
|
141
|
+
|
142
|
+
readlines: __readlines_spec[T]
|
143
|
+
|
144
|
+
class __write_spec(typing_extensions.Protocol):
|
145
|
+
def __call__(self, data: typing.Union[bytes, str]) -> None: ...
|
146
|
+
async def aio(self, data: typing.Union[bytes, str]) -> None: ...
|
147
|
+
|
148
|
+
write: __write_spec
|
149
|
+
|
150
|
+
class __flush_spec(typing_extensions.Protocol):
|
151
|
+
def __call__(self) -> None: ...
|
152
|
+
async def aio(self) -> None: ...
|
153
|
+
|
154
|
+
flush: __flush_spec
|
155
|
+
|
156
|
+
def _get_whence(self, whence: int): ...
|
157
|
+
|
158
|
+
class __seek_spec(typing_extensions.Protocol):
|
159
|
+
def __call__(self, offset: int, whence: int = 0) -> None: ...
|
160
|
+
async def aio(self, offset: int, whence: int = 0) -> None: ...
|
161
|
+
|
162
|
+
seek: __seek_spec
|
163
|
+
|
164
|
+
class ___close_spec(typing_extensions.Protocol):
|
165
|
+
def __call__(self) -> None: ...
|
166
|
+
async def aio(self) -> None: ...
|
167
|
+
|
168
|
+
_close: ___close_spec
|
169
|
+
|
170
|
+
class __close_spec(typing_extensions.Protocol):
|
171
|
+
def __call__(self) -> None: ...
|
172
|
+
async def aio(self) -> None: ...
|
173
|
+
|
174
|
+
close: __close_spec
|
175
|
+
|
176
|
+
def _check_writable(self) -> None: ...
|
177
|
+
def _check_readable(self) -> None: ...
|
178
|
+
def _check_closed(self) -> None: ...
|
179
|
+
def __enter__(self) -> FileIO: ...
|
180
|
+
|
181
|
+
class ____exit___spec(typing_extensions.Protocol):
|
182
|
+
def __call__(self, exc_type, exc_value, traceback) -> None: ...
|
183
|
+
async def aio(self, exc_type, exc_value, traceback) -> None: ...
|
184
|
+
|
185
|
+
__exit__: ____exit___spec
|
modal/functions.py
CHANGED
@@ -32,6 +32,7 @@ from ._resolver import Resolver
|
|
32
32
|
from ._resources import convert_fn_config_to_resources_config
|
33
33
|
from ._runtime.execution_context import current_input_id, is_local
|
34
34
|
from ._serialization import serialize, serialize_proto_params
|
35
|
+
from ._traceback import print_server_warnings
|
35
36
|
from ._utils.async_utils import (
|
36
37
|
TaskContext,
|
37
38
|
async_merge,
|
@@ -431,7 +432,7 @@ class _Function(typing.Generic[P, ReturnType, OriginalReturnType], _Object, type
|
|
431
432
|
image: _Image,
|
432
433
|
secrets: Sequence[_Secret] = (),
|
433
434
|
schedule: Optional[Schedule] = None,
|
434
|
-
is_generator=False,
|
435
|
+
is_generator: bool = False,
|
435
436
|
gpu: Union[GPU_T, list[GPU_T]] = None,
|
436
437
|
# TODO: maybe break this out into a separate decorator for notebooks.
|
437
438
|
mounts: Collection[_Mount] = (),
|
@@ -627,7 +628,7 @@ class _Function(typing.Generic[P, ReturnType, OriginalReturnType], _Object, type
|
|
627
628
|
raise InvalidError(f"Expected modal.Image object. Got {type(image)}.")
|
628
629
|
|
629
630
|
method_definitions: Optional[dict[str, api_pb2.MethodDefinition]] = None
|
630
|
-
|
631
|
+
|
631
632
|
if info.user_cls:
|
632
633
|
method_definitions = {}
|
633
634
|
partial_functions = _find_partial_methods_for_user_cls(info.user_cls, _PartialFunctionFlags.FUNCTION)
|
@@ -1061,6 +1062,8 @@ class _Function(typing.Generic[P, ReturnType, OriginalReturnType], _Object, type
|
|
1061
1062
|
else:
|
1062
1063
|
raise
|
1063
1064
|
|
1065
|
+
print_server_warnings(response.server_warnings)
|
1066
|
+
|
1064
1067
|
self._hydrate(response.function_id, resolver.client, response.handle_metadata)
|
1065
1068
|
|
1066
1069
|
rep = f"Ref({app_name})"
|
@@ -1188,9 +1191,16 @@ class _Function(typing.Generic[P, ReturnType, OriginalReturnType], _Object, type
|
|
1188
1191
|
return self._web_url
|
1189
1192
|
|
1190
1193
|
@property
|
1191
|
-
def is_generator(self) -> bool:
|
1194
|
+
async def is_generator(self) -> bool:
|
1192
1195
|
"""mdmd:hidden"""
|
1193
|
-
|
1196
|
+
# hacky: kind of like @live_method, but not hydrating if we have the value already from local source
|
1197
|
+
if self._is_generator is not None:
|
1198
|
+
# this is set if the function or class is local
|
1199
|
+
return self._is_generator
|
1200
|
+
|
1201
|
+
# not set - this is a from_name lookup - hydrate
|
1202
|
+
await self.resolve()
|
1203
|
+
assert self._is_generator is not None # should be set now
|
1194
1204
|
return self._is_generator
|
1195
1205
|
|
1196
1206
|
@property
|
@@ -1272,6 +1282,10 @@ class _Function(typing.Generic[P, ReturnType, OriginalReturnType], _Object, type
|
|
1272
1282
|
|
1273
1283
|
@synchronizer.no_io_translation
|
1274
1284
|
async def _call_generator_nowait(self, args, kwargs):
|
1285
|
+
deprecation_warning(
|
1286
|
+
(2024, 12, 11),
|
1287
|
+
"Calling spawn on a generator function is deprecated and will soon raise an exception.",
|
1288
|
+
)
|
1275
1289
|
return await _Invocation.create(
|
1276
1290
|
self,
|
1277
1291
|
args,
|
@@ -1311,6 +1325,9 @@ class _Function(typing.Generic[P, ReturnType, OriginalReturnType], _Object, type
|
|
1311
1325
|
async for item in self._call_generator(args, kwargs): # type: ignore
|
1312
1326
|
yield item
|
1313
1327
|
|
1328
|
+
def _is_local(self):
|
1329
|
+
return self._info is not None
|
1330
|
+
|
1314
1331
|
def _get_info(self) -> FunctionInfo:
|
1315
1332
|
if not self._info:
|
1316
1333
|
raise ExecutionError("Can't get info for a function that isn't locally defined")
|
@@ -1335,19 +1352,24 @@ class _Function(typing.Generic[P, ReturnType, OriginalReturnType], _Object, type
|
|
1335
1352
|
"""
|
1336
1353
|
# TODO(erikbern): it would be nice to remove the nowrap thing, but right now that would cause
|
1337
1354
|
# "user code" to run on the synchronicity thread, which seems bad
|
1355
|
+
if not self._is_local():
|
1356
|
+
msg = (
|
1357
|
+
"The definition for this function is missing here so it is not possible to invoke it locally. "
|
1358
|
+
"If this function was retrieved via `Function.lookup` you need to use `.remote()`."
|
1359
|
+
)
|
1360
|
+
raise ExecutionError(msg)
|
1361
|
+
|
1338
1362
|
info = self._get_info()
|
1363
|
+
if not info.raw_f:
|
1364
|
+
# Here if calling .local on a service function itself which should never happen
|
1365
|
+
# TODO: check if we end up here in a container for a serialized function?
|
1366
|
+
raise ExecutionError("Can't call .local on service function")
|
1339
1367
|
|
1340
1368
|
if is_local() and self.spec.volumes or self.spec.network_file_systems:
|
1341
1369
|
warnings.warn(
|
1342
1370
|
f"The {info.function_name} function is executing locally "
|
1343
1371
|
+ "and will not have access to the mounted Volume or NetworkFileSystem data"
|
1344
1372
|
)
|
1345
|
-
if not info or not info.raw_f:
|
1346
|
-
msg = (
|
1347
|
-
"The definition for this function is missing so it is not possible to invoke it locally. "
|
1348
|
-
"If this function was retrieved via `Function.lookup` you need to use `.remote()`."
|
1349
|
-
)
|
1350
|
-
raise ExecutionError(msg)
|
1351
1373
|
|
1352
1374
|
obj: Optional["modal.cls._Obj"] = self._get_obj()
|
1353
1375
|
|
@@ -1357,9 +1379,9 @@ class _Function(typing.Generic[P, ReturnType, OriginalReturnType], _Object, type
|
|
1357
1379
|
else:
|
1358
1380
|
# This is a method on a class, so bind the self to the function
|
1359
1381
|
user_cls_instance = obj._cached_user_cls_instance()
|
1360
|
-
|
1361
1382
|
fun = info.raw_f.__get__(user_cls_instance)
|
1362
1383
|
|
1384
|
+
# TODO: replace implicit local enter/exit with a context manager
|
1363
1385
|
if is_async(info.raw_f):
|
1364
1386
|
# We want to run __aenter__ and fun in the same coroutine
|
1365
1387
|
async def coro():
|