modal 0.67.43__py3-none-any.whl → 0.68.24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. modal/__init__.py +2 -0
  2. modal/_container_entrypoint.py +4 -1
  3. modal/_ipython.py +3 -13
  4. modal/_runtime/asgi.py +4 -0
  5. modal/_runtime/container_io_manager.py +3 -0
  6. modal/_runtime/user_code_imports.py +17 -20
  7. modal/_traceback.py +16 -2
  8. modal/_utils/blob_utils.py +27 -92
  9. modal/_utils/bytes_io_segment_payload.py +97 -0
  10. modal/_utils/function_utils.py +5 -1
  11. modal/_utils/grpc_testing.py +6 -2
  12. modal/_utils/hash_utils.py +51 -10
  13. modal/_utils/http_utils.py +19 -10
  14. modal/_utils/{pattern_matcher.py → pattern_utils.py} +1 -70
  15. modal/_utils/shell_utils.py +11 -5
  16. modal/cli/_traceback.py +11 -4
  17. modal/cli/run.py +25 -12
  18. modal/client.py +6 -37
  19. modal/client.pyi +2 -6
  20. modal/cls.py +132 -62
  21. modal/cls.pyi +13 -7
  22. modal/exception.py +20 -0
  23. modal/file_io.py +380 -0
  24. modal/file_io.pyi +185 -0
  25. modal/file_pattern_matcher.py +121 -0
  26. modal/functions.py +33 -11
  27. modal/functions.pyi +11 -9
  28. modal/image.py +88 -8
  29. modal/image.pyi +20 -4
  30. modal/mount.py +49 -9
  31. modal/mount.pyi +19 -4
  32. modal/network_file_system.py +4 -1
  33. modal/object.py +4 -2
  34. modal/partial_function.py +22 -10
  35. modal/partial_function.pyi +10 -2
  36. modal/runner.py +5 -4
  37. modal/runner.pyi +2 -1
  38. modal/sandbox.py +40 -0
  39. modal/sandbox.pyi +18 -0
  40. modal/volume.py +5 -1
  41. {modal-0.67.43.dist-info → modal-0.68.24.dist-info}/METADATA +2 -2
  42. {modal-0.67.43.dist-info → modal-0.68.24.dist-info}/RECORD +52 -48
  43. modal_docs/gen_reference_docs.py +1 -0
  44. modal_proto/api.proto +33 -1
  45. modal_proto/api_pb2.py +813 -737
  46. modal_proto/api_pb2.pyi +160 -13
  47. modal_version/__init__.py +1 -1
  48. modal_version/_version_generated.py +1 -1
  49. {modal-0.67.43.dist-info → modal-0.68.24.dist-info}/LICENSE +0 -0
  50. {modal-0.67.43.dist-info → modal-0.68.24.dist-info}/WHEEL +0 -0
  51. {modal-0.67.43.dist-info → modal-0.68.24.dist-info}/entry_points.txt +0 -0
  52. {modal-0.67.43.dist-info → modal-0.68.24.dist-info}/top_level.txt +0 -0
modal/file_io.py ADDED
@@ -0,0 +1,380 @@
1
+ # Copyright Modal Labs 2024
2
+ import asyncio
3
+ import io
4
+ from typing import TYPE_CHECKING, AsyncIterator, Generic, Optional, Sequence, TypeVar, Union, cast
5
+
6
+ if TYPE_CHECKING:
7
+ import _typeshed
8
+
9
+ from grpclib.exceptions import GRPCError, StreamTerminatedError
10
+
11
+ from modal._utils.grpc_utils import retry_transient_errors
12
+ from modal_proto import api_pb2
13
+
14
+ from ._utils.async_utils import synchronize_api
15
+ from ._utils.grpc_utils import RETRYABLE_GRPC_STATUS_CODES
16
+ from .client import _Client
17
+ from .exception import FilesystemExecutionError, InvalidError
18
+
19
+ LARGE_FILE_SIZE_LIMIT = 16 * 1024 * 1024 # 16 MiB
20
+ READ_FILE_SIZE_LIMIT = 100 * 1024 * 1024 # 100 MiB
21
+
22
+ ERROR_MAPPING = {
23
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_UNSPECIFIED: FilesystemExecutionError,
24
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_PERM: PermissionError,
25
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_NOENT: FileNotFoundError,
26
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_IO: IOError,
27
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_NXIO: IOError,
28
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_NOMEM: MemoryError,
29
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_ACCES: PermissionError,
30
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_EXIST: FileExistsError,
31
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_NOTDIR: NotADirectoryError,
32
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_ISDIR: IsADirectoryError,
33
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_INVAL: OSError,
34
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_MFILE: OSError,
35
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_FBIG: OSError,
36
+ api_pb2.SystemErrorCode.SYSTEM_ERROR_CODE_NOSPC: OSError,
37
+ }
38
+
39
+ T = TypeVar("T", str, bytes)
40
+
41
+
42
+ async def _delete_bytes(file: "_FileIO", start: Optional[int] = None, end: Optional[int] = None) -> None:
43
+ """Delete a range of bytes from the file.
44
+
45
+ `start` and `end` are byte offsets. `start` is inclusive, `end` is exclusive.
46
+ If either is None, the start or end of the file is used, respectively.
47
+ """
48
+ assert file._file_descriptor is not None
49
+ file._check_closed()
50
+ if start is not None and end is not None:
51
+ if start >= end:
52
+ raise ValueError("start must be less than end")
53
+ resp = await file._make_request(
54
+ api_pb2.ContainerFilesystemExecRequest(
55
+ file_delete_bytes_request=api_pb2.ContainerFileDeleteBytesRequest(
56
+ file_descriptor=file._file_descriptor,
57
+ start_inclusive=start,
58
+ end_exclusive=end,
59
+ ),
60
+ task_id=file._task_id,
61
+ )
62
+ )
63
+ await file._wait(resp.exec_id)
64
+
65
+
66
+ async def _replace_bytes(file: "_FileIO", data: bytes, start: Optional[int] = None, end: Optional[int] = None) -> None:
67
+ """Replace a range of bytes in the file with new data. The length of the data does not
68
+ have to be the same as the length of the range being replaced.
69
+
70
+ `start` and `end` are byte offsets. `start` is inclusive, `end` is exclusive.
71
+ If either is None, the start or end of the file is used, respectively.
72
+ """
73
+ assert file._file_descriptor is not None
74
+ file._check_closed()
75
+ if start is not None and end is not None:
76
+ if start >= end:
77
+ raise InvalidError("start must be less than end")
78
+ if len(data) > LARGE_FILE_SIZE_LIMIT:
79
+ raise InvalidError("Write request payload exceeds 16 MiB limit")
80
+ resp = await file._make_request(
81
+ api_pb2.ContainerFilesystemExecRequest(
82
+ file_write_replace_bytes_request=api_pb2.ContainerFileWriteReplaceBytesRequest(
83
+ file_descriptor=file._file_descriptor,
84
+ data=data,
85
+ start_inclusive=start,
86
+ end_exclusive=end,
87
+ ),
88
+ task_id=file._task_id,
89
+ )
90
+ )
91
+ await file._wait(resp.exec_id)
92
+
93
+
94
+ # The FileIO class is designed to mimic Python's io.FileIO
95
+ # See https://github.com/python/cpython/blob/main/Lib/_pyio.py#L1459
96
+ class _FileIO(Generic[T]):
97
+ """FileIO handle, used in the Sandbox filesystem API.
98
+
99
+ The API is designed to mimic Python's io.FileIO.
100
+
101
+ **Usage**
102
+
103
+ ```python
104
+ import modal
105
+
106
+ app = modal.App.lookup("my-app", create_if_missing=True)
107
+
108
+ sb = modal.Sandbox.create(app=app)
109
+ f = sb.open("/tmp/foo.txt", "w")
110
+ f.write("hello")
111
+ f.close()
112
+ ```
113
+ """
114
+
115
+ _binary = False
116
+ _readable = False
117
+ _writable = False
118
+ _appended = False
119
+ _closed = True
120
+
121
+ _task_id: str = ""
122
+ _file_descriptor: str = ""
123
+ _client: Optional[_Client] = None
124
+
125
+ def _validate_mode(self, mode: str) -> None:
126
+ if not any(char in mode for char in "rwax"):
127
+ raise ValueError(f"Invalid file mode: {mode}")
128
+
129
+ self._readable = "r" in mode or "+" in mode
130
+ self._writable = "w" in mode or "a" in mode or "x" in mode or "+" in mode
131
+ self._appended = "a" in mode
132
+ self._binary = "b" in mode
133
+
134
+ valid_chars = set("rwaxb+")
135
+ if any(char not in valid_chars for char in mode):
136
+ raise ValueError(f"Invalid file mode: {mode}")
137
+
138
+ mode_count = sum(1 for c in mode if c in "rwax")
139
+ if mode_count > 1:
140
+ raise ValueError("must have exactly one of create/read/write/append mode")
141
+
142
+ seen_chars = set()
143
+ for char in mode:
144
+ if char in seen_chars:
145
+ raise ValueError(f"Invalid file mode: {mode}")
146
+ seen_chars.add(char)
147
+
148
+ def _handle_error(self, error: api_pb2.SystemErrorMessage) -> None:
149
+ error_class = ERROR_MAPPING.get(error.error_code, FilesystemExecutionError)
150
+ raise error_class(error.error_message)
151
+
152
+ async def _consume_output(self, exec_id: str) -> AsyncIterator[Optional[bytes]]:
153
+ req = api_pb2.ContainerFilesystemExecGetOutputRequest(
154
+ exec_id=exec_id,
155
+ timeout=55,
156
+ )
157
+ assert self._client is not None
158
+ async for batch in self._client.stub.ContainerFilesystemExecGetOutput.unary_stream(req):
159
+ if batch.eof:
160
+ yield None
161
+ break
162
+ if batch.HasField("error"):
163
+ self._handle_error(batch.error)
164
+ for message in batch.output:
165
+ yield message
166
+
167
+ async def _wait(self, exec_id: str) -> bytes:
168
+ # The logic here is similar to how output is read from `exec`
169
+ output = b""
170
+ completed = False
171
+ retries_remaining = 10
172
+ while not completed:
173
+ try:
174
+ async for data in self._consume_output(exec_id):
175
+ if data is None:
176
+ completed = True
177
+ break
178
+ output += data
179
+ except (GRPCError, StreamTerminatedError) as exc:
180
+ if retries_remaining > 0:
181
+ retries_remaining -= 1
182
+ if isinstance(exc, GRPCError):
183
+ if exc.status in RETRYABLE_GRPC_STATUS_CODES:
184
+ await asyncio.sleep(1.0)
185
+ continue
186
+ elif isinstance(exc, StreamTerminatedError):
187
+ continue
188
+ raise
189
+ return output
190
+
191
+ def _validate_type(self, data: Union[bytes, str]) -> None:
192
+ if self._binary and isinstance(data, str):
193
+ raise TypeError("Expected bytes when in binary mode")
194
+ if not self._binary and isinstance(data, bytes):
195
+ raise TypeError("Expected str when in text mode")
196
+
197
+ async def _open_file(self, path: str, mode: str) -> None:
198
+ resp = await self._make_request(
199
+ api_pb2.ContainerFilesystemExecRequest(
200
+ file_open_request=api_pb2.ContainerFileOpenRequest(path=path, mode=mode),
201
+ task_id=self._task_id,
202
+ )
203
+ )
204
+ if not resp.HasField("file_descriptor"):
205
+ raise FilesystemExecutionError("Failed to open file")
206
+ self._file_descriptor = resp.file_descriptor
207
+ await self._wait(resp.exec_id)
208
+
209
+ @classmethod
210
+ async def create(
211
+ cls, path: str, mode: Union["_typeshed.OpenTextMode", "_typeshed.OpenBinaryMode"], client: _Client, task_id: str
212
+ ) -> "_FileIO":
213
+ """Create a new FileIO handle."""
214
+ self = cls.__new__(cls)
215
+ self._client = client
216
+ self._task_id = task_id
217
+ self._validate_mode(mode)
218
+ await self._open_file(path, mode)
219
+ self._closed = False
220
+ return self
221
+
222
+ async def _make_request(
223
+ self, request: api_pb2.ContainerFilesystemExecRequest
224
+ ) -> api_pb2.ContainerFilesystemExecResponse:
225
+ assert self._client is not None
226
+ return await retry_transient_errors(self._client.stub.ContainerFilesystemExec, request)
227
+
228
+ async def _make_read_request(self, n: Optional[int]) -> bytes:
229
+ resp = await self._make_request(
230
+ api_pb2.ContainerFilesystemExecRequest(
231
+ file_read_request=api_pb2.ContainerFileReadRequest(file_descriptor=self._file_descriptor, n=n),
232
+ task_id=self._task_id,
233
+ )
234
+ )
235
+ return await self._wait(resp.exec_id)
236
+
237
+ async def read(self, n: Optional[int] = None) -> T:
238
+ """Read n bytes from the current position, or the entire remaining file if n is None."""
239
+ self._check_closed()
240
+ self._check_readable()
241
+ if n is not None and n > READ_FILE_SIZE_LIMIT:
242
+ raise ValueError("Read request payload exceeds 100 MiB limit")
243
+ output = await self._make_read_request(n)
244
+ if self._binary:
245
+ return cast(T, output)
246
+ return cast(T, output.decode("utf-8"))
247
+
248
+ async def readline(self) -> T:
249
+ """Read a single line from the current position."""
250
+ self._check_closed()
251
+ self._check_readable()
252
+ resp = await self._make_request(
253
+ api_pb2.ContainerFilesystemExecRequest(
254
+ file_read_line_request=api_pb2.ContainerFileReadLineRequest(file_descriptor=self._file_descriptor),
255
+ task_id=self._task_id,
256
+ )
257
+ )
258
+ output = await self._wait(resp.exec_id)
259
+ if self._binary:
260
+ return cast(T, output)
261
+ return cast(T, output.decode("utf-8"))
262
+
263
+ async def readlines(self) -> Sequence[T]:
264
+ """Read all lines from the current position."""
265
+ self._check_closed()
266
+ self._check_readable()
267
+ output = await self._make_read_request(None)
268
+ if self._binary:
269
+ lines_bytes = output.split(b"\n")
270
+ output = [line + b"\n" for line in lines_bytes[:-1]] + ([lines_bytes[-1]] if lines_bytes[-1] else [])
271
+ return cast(Sequence[T], output)
272
+ else:
273
+ lines = output.decode("utf-8").split("\n")
274
+ output = [line + "\n" for line in lines[:-1]] + ([lines[-1]] if lines[-1] else [])
275
+ return cast(Sequence[T], output)
276
+
277
+ async def write(self, data: Union[bytes, str]) -> None:
278
+ """Write data to the current position.
279
+
280
+ Writes may not appear until the entire buffer is flushed, which
281
+ can be done manually with `flush()` or automatically when the file is
282
+ closed.
283
+ """
284
+ self._check_closed()
285
+ self._check_writable()
286
+ self._validate_type(data)
287
+ if isinstance(data, str):
288
+ data = data.encode("utf-8")
289
+ if len(data) > LARGE_FILE_SIZE_LIMIT:
290
+ raise ValueError("Write request payload exceeds 16 MiB limit")
291
+ resp = await self._make_request(
292
+ api_pb2.ContainerFilesystemExecRequest(
293
+ file_write_request=api_pb2.ContainerFileWriteRequest(file_descriptor=self._file_descriptor, data=data),
294
+ task_id=self._task_id,
295
+ )
296
+ )
297
+ await self._wait(resp.exec_id)
298
+
299
+ async def flush(self) -> None:
300
+ """Flush the buffer to disk."""
301
+ self._check_closed()
302
+ self._check_writable()
303
+ resp = await self._make_request(
304
+ api_pb2.ContainerFilesystemExecRequest(
305
+ file_flush_request=api_pb2.ContainerFileFlushRequest(file_descriptor=self._file_descriptor),
306
+ task_id=self._task_id,
307
+ )
308
+ )
309
+ await self._wait(resp.exec_id)
310
+
311
+ def _get_whence(self, whence: int):
312
+ if whence == 0:
313
+ return api_pb2.SeekWhence.SEEK_SET
314
+ elif whence == 1:
315
+ return api_pb2.SeekWhence.SEEK_CUR
316
+ elif whence == 2:
317
+ return api_pb2.SeekWhence.SEEK_END
318
+ else:
319
+ raise ValueError(f"Invalid whence value: {whence}")
320
+
321
+ async def seek(self, offset: int, whence: int = 0) -> None:
322
+ """Move to a new position in the file.
323
+
324
+ `whence` defaults to 0 (absolute file positioning); other values are 1
325
+ (relative to the current position) and 2 (relative to the file's end).
326
+ """
327
+ self._check_closed()
328
+ resp = await self._make_request(
329
+ api_pb2.ContainerFilesystemExecRequest(
330
+ file_seek_request=api_pb2.ContainerFileSeekRequest(
331
+ file_descriptor=self._file_descriptor,
332
+ offset=offset,
333
+ whence=self._get_whence(whence),
334
+ ),
335
+ task_id=self._task_id,
336
+ )
337
+ )
338
+ await self._wait(resp.exec_id)
339
+
340
+ async def _close(self) -> None:
341
+ # Buffer is flushed by the runner on close
342
+ resp = await self._make_request(
343
+ api_pb2.ContainerFilesystemExecRequest(
344
+ file_close_request=api_pb2.ContainerFileCloseRequest(file_descriptor=self._file_descriptor),
345
+ task_id=self._task_id,
346
+ )
347
+ )
348
+ self._closed = True
349
+ await self._wait(resp.exec_id)
350
+
351
+ async def close(self) -> None:
352
+ """Flush the buffer and close the file."""
353
+ await self._close()
354
+
355
+ # also validated in the runner, but checked in the client to catch errors early
356
+ def _check_writable(self) -> None:
357
+ if not self._writable:
358
+ raise io.UnsupportedOperation("not writeable")
359
+
360
+ # also validated in the runner, but checked in the client to catch errors early
361
+ def _check_readable(self) -> None:
362
+ if not self._readable:
363
+ raise io.UnsupportedOperation("not readable")
364
+
365
+ # also validated in the runner, but checked in the client to catch errors early
366
+ def _check_closed(self) -> None:
367
+ if self._closed:
368
+ raise ValueError("I/O operation on closed file")
369
+
370
+ def __enter__(self) -> "_FileIO":
371
+ self._check_closed()
372
+ return self
373
+
374
+ async def __exit__(self, exc_type, exc_value, traceback) -> None:
375
+ await self._close()
376
+
377
+
378
+ delete_bytes = synchronize_api(_delete_bytes)
379
+ replace_bytes = synchronize_api(_replace_bytes)
380
+ FileIO = synchronize_api(_FileIO)
modal/file_io.pyi ADDED
@@ -0,0 +1,185 @@
1
+ import _typeshed
2
+ import modal.client
3
+ import modal_proto.api_pb2
4
+ import typing
5
+ import typing_extensions
6
+
7
+ T = typing.TypeVar("T")
8
+
9
+ async def _delete_bytes(
10
+ file: _FileIO, start: typing.Optional[int] = None, end: typing.Optional[int] = None
11
+ ) -> None: ...
12
+ async def _replace_bytes(
13
+ file: _FileIO, data: bytes, start: typing.Optional[int] = None, end: typing.Optional[int] = None
14
+ ) -> None: ...
15
+
16
+ class _FileIO(typing.Generic[T]):
17
+ _task_id: str
18
+ _file_descriptor: str
19
+ _client: typing.Optional[modal.client._Client]
20
+
21
+ def _validate_mode(self, mode: str) -> None: ...
22
+ def _handle_error(self, error: modal_proto.api_pb2.SystemErrorMessage) -> None: ...
23
+ def _consume_output(self, exec_id: str) -> typing.AsyncIterator[typing.Optional[bytes]]: ...
24
+ async def _wait(self, exec_id: str) -> bytes: ...
25
+ def _validate_type(self, data: typing.Union[bytes, str]) -> None: ...
26
+ async def _open_file(self, path: str, mode: str) -> None: ...
27
+ @classmethod
28
+ async def create(
29
+ cls,
30
+ path: str,
31
+ mode: typing.Union[_typeshed.OpenTextMode, _typeshed.OpenBinaryMode],
32
+ client: modal.client._Client,
33
+ task_id: str,
34
+ ) -> _FileIO: ...
35
+ async def _make_request(
36
+ self, request: modal_proto.api_pb2.ContainerFilesystemExecRequest
37
+ ) -> modal_proto.api_pb2.ContainerFilesystemExecResponse: ...
38
+ async def _make_read_request(self, n: typing.Optional[int]) -> bytes: ...
39
+ async def read(self, n: typing.Optional[int] = None) -> T: ...
40
+ async def readline(self) -> T: ...
41
+ async def readlines(self) -> typing.Sequence[T]: ...
42
+ async def write(self, data: typing.Union[bytes, str]) -> None: ...
43
+ async def flush(self) -> None: ...
44
+ def _get_whence(self, whence: int): ...
45
+ async def seek(self, offset: int, whence: int = 0) -> None: ...
46
+ async def _close(self) -> None: ...
47
+ async def close(self) -> None: ...
48
+ def _check_writable(self) -> None: ...
49
+ def _check_readable(self) -> None: ...
50
+ def _check_closed(self) -> None: ...
51
+ def __enter__(self) -> _FileIO: ...
52
+ async def __exit__(self, exc_type, exc_value, traceback) -> None: ...
53
+
54
+ class __delete_bytes_spec(typing_extensions.Protocol):
55
+ def __call__(self, file: FileIO, start: typing.Optional[int] = None, end: typing.Optional[int] = None) -> None: ...
56
+ async def aio(self, file: FileIO, start: typing.Optional[int] = None, end: typing.Optional[int] = None) -> None: ...
57
+
58
+ delete_bytes: __delete_bytes_spec
59
+
60
+ class __replace_bytes_spec(typing_extensions.Protocol):
61
+ def __call__(
62
+ self, file: FileIO, data: bytes, start: typing.Optional[int] = None, end: typing.Optional[int] = None
63
+ ) -> None: ...
64
+ async def aio(
65
+ self, file: FileIO, data: bytes, start: typing.Optional[int] = None, end: typing.Optional[int] = None
66
+ ) -> None: ...
67
+
68
+ replace_bytes: __replace_bytes_spec
69
+
70
+ T_INNER = typing.TypeVar("T_INNER", covariant=True)
71
+
72
+ class FileIO(typing.Generic[T]):
73
+ _task_id: str
74
+ _file_descriptor: str
75
+ _client: typing.Optional[modal.client.Client]
76
+
77
+ def __init__(self, /, *args, **kwargs): ...
78
+ def _validate_mode(self, mode: str) -> None: ...
79
+ def _handle_error(self, error: modal_proto.api_pb2.SystemErrorMessage) -> None: ...
80
+
81
+ class ___consume_output_spec(typing_extensions.Protocol):
82
+ def __call__(self, exec_id: str) -> typing.Iterator[typing.Optional[bytes]]: ...
83
+ def aio(self, exec_id: str) -> typing.AsyncIterator[typing.Optional[bytes]]: ...
84
+
85
+ _consume_output: ___consume_output_spec
86
+
87
+ class ___wait_spec(typing_extensions.Protocol):
88
+ def __call__(self, exec_id: str) -> bytes: ...
89
+ async def aio(self, exec_id: str) -> bytes: ...
90
+
91
+ _wait: ___wait_spec
92
+
93
+ def _validate_type(self, data: typing.Union[bytes, str]) -> None: ...
94
+
95
+ class ___open_file_spec(typing_extensions.Protocol):
96
+ def __call__(self, path: str, mode: str) -> None: ...
97
+ async def aio(self, path: str, mode: str) -> None: ...
98
+
99
+ _open_file: ___open_file_spec
100
+
101
+ @classmethod
102
+ def create(
103
+ cls,
104
+ path: str,
105
+ mode: typing.Union[_typeshed.OpenTextMode, _typeshed.OpenBinaryMode],
106
+ client: modal.client.Client,
107
+ task_id: str,
108
+ ) -> FileIO: ...
109
+
110
+ class ___make_request_spec(typing_extensions.Protocol):
111
+ def __call__(
112
+ self, request: modal_proto.api_pb2.ContainerFilesystemExecRequest
113
+ ) -> modal_proto.api_pb2.ContainerFilesystemExecResponse: ...
114
+ async def aio(
115
+ self, request: modal_proto.api_pb2.ContainerFilesystemExecRequest
116
+ ) -> modal_proto.api_pb2.ContainerFilesystemExecResponse: ...
117
+
118
+ _make_request: ___make_request_spec
119
+
120
+ class ___make_read_request_spec(typing_extensions.Protocol):
121
+ def __call__(self, n: typing.Optional[int]) -> bytes: ...
122
+ async def aio(self, n: typing.Optional[int]) -> bytes: ...
123
+
124
+ _make_read_request: ___make_read_request_spec
125
+
126
+ class __read_spec(typing_extensions.Protocol[T_INNER]):
127
+ def __call__(self, n: typing.Optional[int] = None) -> T_INNER: ...
128
+ async def aio(self, n: typing.Optional[int] = None) -> T_INNER: ...
129
+
130
+ read: __read_spec[T]
131
+
132
+ class __readline_spec(typing_extensions.Protocol[T_INNER]):
133
+ def __call__(self) -> T_INNER: ...
134
+ async def aio(self) -> T_INNER: ...
135
+
136
+ readline: __readline_spec[T]
137
+
138
+ class __readlines_spec(typing_extensions.Protocol[T_INNER]):
139
+ def __call__(self) -> typing.Sequence[T_INNER]: ...
140
+ async def aio(self) -> typing.Sequence[T_INNER]: ...
141
+
142
+ readlines: __readlines_spec[T]
143
+
144
+ class __write_spec(typing_extensions.Protocol):
145
+ def __call__(self, data: typing.Union[bytes, str]) -> None: ...
146
+ async def aio(self, data: typing.Union[bytes, str]) -> None: ...
147
+
148
+ write: __write_spec
149
+
150
+ class __flush_spec(typing_extensions.Protocol):
151
+ def __call__(self) -> None: ...
152
+ async def aio(self) -> None: ...
153
+
154
+ flush: __flush_spec
155
+
156
+ def _get_whence(self, whence: int): ...
157
+
158
+ class __seek_spec(typing_extensions.Protocol):
159
+ def __call__(self, offset: int, whence: int = 0) -> None: ...
160
+ async def aio(self, offset: int, whence: int = 0) -> None: ...
161
+
162
+ seek: __seek_spec
163
+
164
+ class ___close_spec(typing_extensions.Protocol):
165
+ def __call__(self) -> None: ...
166
+ async def aio(self) -> None: ...
167
+
168
+ _close: ___close_spec
169
+
170
+ class __close_spec(typing_extensions.Protocol):
171
+ def __call__(self) -> None: ...
172
+ async def aio(self) -> None: ...
173
+
174
+ close: __close_spec
175
+
176
+ def _check_writable(self) -> None: ...
177
+ def _check_readable(self) -> None: ...
178
+ def _check_closed(self) -> None: ...
179
+ def __enter__(self) -> FileIO: ...
180
+
181
+ class ____exit___spec(typing_extensions.Protocol):
182
+ def __call__(self, exc_type, exc_value, traceback) -> None: ...
183
+ async def aio(self, exc_type, exc_value, traceback) -> None: ...
184
+
185
+ __exit__: ____exit___spec
@@ -0,0 +1,121 @@
1
+ # Copyright Modal Labs 2024
2
+ """Pattern matching library ported from https://github.com/moby/patternmatcher.
3
+
4
+ This is the same pattern-matching logic used by Docker, except it is written in
5
+ Python rather than Go. Also, the original Go library has a couple deprecated
6
+ functions that we don't implement in this port.
7
+
8
+ The main way to use this library is by constructing a `FilePatternMatcher` object,
9
+ then asking it whether file paths match any of its patterns.
10
+ """
11
+
12
+ import os
13
+ from pathlib import Path
14
+ from typing import Callable
15
+
16
+ from ._utils.pattern_utils import Pattern
17
+
18
+
19
+ class FilePatternMatcher:
20
+ """Allows matching file paths against a list of patterns."""
21
+
22
+ def __init__(self, *pattern: str) -> None:
23
+ """Initialize a new FilePatternMatcher instance.
24
+
25
+ Args:
26
+ pattern (str): One or more pattern strings.
27
+
28
+ Raises:
29
+ ValueError: If an illegal exclusion pattern is provided.
30
+ """
31
+ self.patterns: list[Pattern] = []
32
+ self.exclusions = False
33
+ for p in list(pattern):
34
+ p = p.strip()
35
+ if not p:
36
+ continue
37
+ p = os.path.normpath(p)
38
+ new_pattern = Pattern()
39
+ if p[0] == "!":
40
+ if len(p) == 1:
41
+ raise ValueError('Illegal exclusion pattern: "!"')
42
+ new_pattern.exclusion = True
43
+ p = p[1:]
44
+ self.exclusions = True
45
+ # In Python, we can proceed without explicit syntax checking
46
+ new_pattern.cleaned_pattern = p
47
+ new_pattern.dirs = p.split(os.path.sep)
48
+ self.patterns.append(new_pattern)
49
+
50
+ def _matches(self, file_path: str) -> bool:
51
+ """Check if the file path or any of its parent directories match the patterns.
52
+
53
+ This is equivalent to `MatchesOrParentMatches()` in the original Go
54
+ library. The reason is that `Matches()` in the original library is
55
+ deprecated due to buggy behavior.
56
+ """
57
+ matched = False
58
+ file_path = os.path.normpath(file_path)
59
+ if file_path == ".":
60
+ # Don't let them exclude everything; kind of silly.
61
+ return False
62
+ parent_path = os.path.dirname(file_path)
63
+ if parent_path == "":
64
+ parent_path = "."
65
+ parent_path_dirs = parent_path.split(os.path.sep)
66
+
67
+ for pattern in self.patterns:
68
+ # Skip evaluation based on current match status and pattern exclusion
69
+ if pattern.exclusion != matched:
70
+ continue
71
+
72
+ match = pattern.match(file_path)
73
+
74
+ if not match and parent_path != ".":
75
+ # Check if the pattern matches any of the parent directories
76
+ for i in range(len(parent_path_dirs)):
77
+ dir_path = os.path.sep.join(parent_path_dirs[: i + 1])
78
+ if pattern.match(dir_path):
79
+ match = True
80
+ break
81
+
82
+ if match:
83
+ matched = not pattern.exclusion
84
+
85
+ return matched
86
+
87
+ def __call__(self, file_path: Path) -> bool:
88
+ """Check if the path matches any of the patterns.
89
+
90
+ Args:
91
+ file_path (Path): The path to check.
92
+
93
+ Returns:
94
+ True if the path matches any of the patterns.
95
+
96
+ Usage:
97
+ ```python
98
+ from pathlib import Path
99
+ from modal import FilePatternMatcher
100
+
101
+ matcher = FilePatternMatcher("*.py")
102
+
103
+ assert matcher(Path("foo.py"))
104
+ ```
105
+ """
106
+ return self._matches(str(file_path))
107
+
108
+ def __invert__(self) -> Callable[[Path], bool]:
109
+ """Invert the filter. Returns a function that returns True if the path does not match any of the patterns.
110
+
111
+ Usage:
112
+ ```python
113
+ from pathlib import Path
114
+ from modal import FilePatternMatcher
115
+
116
+ inverted_matcher = ~FilePatternMatcher("**/*.py")
117
+
118
+ assert not inverted_matcher(Path("foo.py"))
119
+ ```
120
+ """
121
+ return lambda path: not self(path)