modal 0.62.115__py3-none-any.whl → 0.72.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- modal/__init__.py +13 -9
- modal/__main__.py +41 -3
- modal/_clustered_functions.py +80 -0
- modal/_clustered_functions.pyi +22 -0
- modal/_container_entrypoint.py +402 -398
- modal/_ipython.py +3 -13
- modal/_location.py +17 -10
- modal/_output.py +243 -99
- modal/_pty.py +2 -2
- modal/_resolver.py +55 -60
- modal/_resources.py +26 -7
- modal/_runtime/__init__.py +1 -0
- modal/_runtime/asgi.py +519 -0
- modal/_runtime/container_io_manager.py +1025 -0
- modal/{execution_context.py → _runtime/execution_context.py} +11 -2
- modal/_runtime/telemetry.py +169 -0
- modal/_runtime/user_code_imports.py +356 -0
- modal/_serialization.py +123 -6
- modal/_traceback.py +47 -187
- modal/_tunnel.py +50 -14
- modal/_tunnel.pyi +19 -36
- modal/_utils/app_utils.py +3 -17
- modal/_utils/async_utils.py +386 -104
- modal/_utils/blob_utils.py +157 -186
- modal/_utils/bytes_io_segment_payload.py +97 -0
- modal/_utils/deprecation.py +89 -0
- modal/_utils/docker_utils.py +98 -0
- modal/_utils/function_utils.py +299 -98
- modal/_utils/grpc_testing.py +47 -34
- modal/_utils/grpc_utils.py +54 -21
- modal/_utils/hash_utils.py +51 -10
- modal/_utils/http_utils.py +39 -9
- modal/_utils/logger.py +2 -1
- modal/_utils/mount_utils.py +34 -16
- modal/_utils/name_utils.py +58 -0
- modal/_utils/package_utils.py +14 -1
- modal/_utils/pattern_utils.py +205 -0
- modal/_utils/rand_pb_testing.py +3 -3
- modal/_utils/shell_utils.py +15 -49
- modal/_vendor/a2wsgi_wsgi.py +62 -72
- modal/_vendor/cloudpickle.py +1 -1
- modal/_watcher.py +12 -10
- modal/app.py +561 -323
- modal/app.pyi +474 -262
- modal/call_graph.py +7 -6
- modal/cli/_download.py +22 -6
- modal/cli/_traceback.py +200 -0
- modal/cli/app.py +203 -42
- modal/cli/config.py +12 -5
- modal/cli/container.py +61 -13
- modal/cli/dict.py +128 -0
- modal/cli/entry_point.py +26 -13
- modal/cli/environment.py +40 -9
- modal/cli/import_refs.py +21 -48
- modal/cli/launch.py +28 -14
- modal/cli/network_file_system.py +57 -21
- modal/cli/profile.py +1 -1
- modal/cli/programs/run_jupyter.py +34 -9
- modal/cli/programs/vscode.py +58 -8
- modal/cli/queues.py +131 -0
- modal/cli/run.py +199 -96
- modal/cli/secret.py +5 -4
- modal/cli/token.py +7 -2
- modal/cli/utils.py +74 -8
- modal/cli/volume.py +97 -56
- modal/client.py +248 -144
- modal/client.pyi +156 -124
- modal/cloud_bucket_mount.py +43 -30
- modal/cloud_bucket_mount.pyi +32 -25
- modal/cls.py +528 -141
- modal/cls.pyi +189 -145
- modal/config.py +32 -15
- modal/container_process.py +177 -0
- modal/container_process.pyi +82 -0
- modal/dict.py +50 -54
- modal/dict.pyi +120 -164
- modal/environments.py +106 -5
- modal/environments.pyi +77 -25
- modal/exception.py +30 -43
- modal/experimental.py +62 -2
- modal/file_io.py +537 -0
- modal/file_io.pyi +235 -0
- modal/file_pattern_matcher.py +196 -0
- modal/functions.py +846 -428
- modal/functions.pyi +446 -387
- modal/gpu.py +57 -44
- modal/image.py +943 -417
- modal/image.pyi +584 -245
- modal/io_streams.py +434 -0
- modal/io_streams.pyi +122 -0
- modal/mount.py +223 -90
- modal/mount.pyi +241 -243
- modal/network_file_system.py +85 -86
- modal/network_file_system.pyi +151 -110
- modal/object.py +66 -36
- modal/object.pyi +166 -143
- modal/output.py +63 -0
- modal/parallel_map.py +73 -47
- modal/parallel_map.pyi +51 -63
- modal/partial_function.py +272 -107
- modal/partial_function.pyi +219 -120
- modal/proxy.py +15 -12
- modal/proxy.pyi +3 -8
- modal/queue.py +96 -72
- modal/queue.pyi +210 -135
- modal/requirements/2024.04.txt +2 -1
- modal/requirements/2024.10.txt +16 -0
- modal/requirements/README.md +21 -0
- modal/requirements/base-images.json +22 -0
- modal/retries.py +45 -4
- modal/runner.py +325 -203
- modal/runner.pyi +124 -110
- modal/running_app.py +27 -4
- modal/sandbox.py +509 -231
- modal/sandbox.pyi +396 -169
- modal/schedule.py +2 -2
- modal/scheduler_placement.py +20 -3
- modal/secret.py +41 -25
- modal/secret.pyi +62 -42
- modal/serving.py +39 -49
- modal/serving.pyi +37 -43
- modal/stream_type.py +15 -0
- modal/token_flow.py +5 -3
- modal/token_flow.pyi +37 -32
- modal/volume.py +123 -137
- modal/volume.pyi +228 -221
- {modal-0.62.115.dist-info → modal-0.72.13.dist-info}/METADATA +5 -5
- modal-0.72.13.dist-info/RECORD +174 -0
- {modal-0.62.115.dist-info → modal-0.72.13.dist-info}/top_level.txt +0 -1
- modal_docs/gen_reference_docs.py +3 -1
- modal_docs/mdmd/mdmd.py +0 -1
- modal_docs/mdmd/signatures.py +1 -2
- modal_global_objects/images/base_images.py +28 -0
- modal_global_objects/mounts/python_standalone.py +2 -2
- modal_proto/__init__.py +1 -1
- modal_proto/api.proto +1231 -531
- modal_proto/api_grpc.py +750 -430
- modal_proto/api_pb2.py +2102 -1176
- modal_proto/api_pb2.pyi +8859 -0
- modal_proto/api_pb2_grpc.py +1329 -675
- modal_proto/api_pb2_grpc.pyi +1416 -0
- modal_proto/modal_api_grpc.py +149 -0
- modal_proto/modal_options_grpc.py +3 -0
- modal_proto/options_pb2.pyi +20 -0
- modal_proto/options_pb2_grpc.pyi +7 -0
- modal_proto/py.typed +0 -0
- modal_version/__init__.py +1 -1
- modal_version/_version_generated.py +2 -2
- modal/_asgi.py +0 -370
- modal/_container_exec.py +0 -128
- modal/_container_io_manager.py +0 -646
- modal/_container_io_manager.pyi +0 -412
- modal/_sandbox_shell.py +0 -49
- modal/app_utils.py +0 -20
- modal/app_utils.pyi +0 -17
- modal/execution_context.pyi +0 -37
- modal/shared_volume.py +0 -23
- modal/shared_volume.pyi +0 -24
- modal-0.62.115.dist-info/RECORD +0 -207
- modal_global_objects/images/conda.py +0 -15
- modal_global_objects/images/debian_slim.py +0 -15
- modal_global_objects/images/micromamba.py +0 -15
- test/__init__.py +0 -1
- test/aio_test.py +0 -12
- test/async_utils_test.py +0 -279
- test/blob_test.py +0 -67
- test/cli_imports_test.py +0 -149
- test/cli_test.py +0 -674
- test/client_test.py +0 -203
- test/cloud_bucket_mount_test.py +0 -22
- test/cls_test.py +0 -636
- test/config_test.py +0 -149
- test/conftest.py +0 -1485
- test/container_app_test.py +0 -50
- test/container_test.py +0 -1405
- test/cpu_test.py +0 -23
- test/decorator_test.py +0 -85
- test/deprecation_test.py +0 -34
- test/dict_test.py +0 -51
- test/e2e_test.py +0 -68
- test/error_test.py +0 -7
- test/function_serialization_test.py +0 -32
- test/function_test.py +0 -791
- test/function_utils_test.py +0 -101
- test/gpu_test.py +0 -159
- test/grpc_utils_test.py +0 -82
- test/helpers.py +0 -47
- test/image_test.py +0 -814
- test/live_reload_test.py +0 -80
- test/lookup_test.py +0 -70
- test/mdmd_test.py +0 -329
- test/mount_test.py +0 -162
- test/mounted_files_test.py +0 -327
- test/network_file_system_test.py +0 -188
- test/notebook_test.py +0 -66
- test/object_test.py +0 -41
- test/package_utils_test.py +0 -25
- test/queue_test.py +0 -115
- test/resolver_test.py +0 -59
- test/retries_test.py +0 -67
- test/runner_test.py +0 -85
- test/sandbox_test.py +0 -191
- test/schedule_test.py +0 -15
- test/scheduler_placement_test.py +0 -57
- test/secret_test.py +0 -89
- test/serialization_test.py +0 -50
- test/stub_composition_test.py +0 -10
- test/stub_test.py +0 -361
- test/test_asgi_wrapper.py +0 -234
- test/token_flow_test.py +0 -18
- test/traceback_test.py +0 -135
- test/tunnel_test.py +0 -29
- test/utils_test.py +0 -88
- test/version_test.py +0 -14
- test/volume_test.py +0 -397
- test/watcher_test.py +0 -58
- test/webhook_test.py +0 -145
- {modal-0.62.115.dist-info → modal-0.72.13.dist-info}/LICENSE +0 -0
- {modal-0.62.115.dist-info → modal-0.72.13.dist-info}/WHEEL +0 -0
- {modal-0.62.115.dist-info → modal-0.72.13.dist-info}/entry_points.txt +0 -0
modal/io_streams.py
ADDED
@@ -0,0 +1,434 @@
|
|
1
|
+
# Copyright Modal Labs 2022
|
2
|
+
import asyncio
|
3
|
+
from collections.abc import AsyncGenerator, AsyncIterator
|
4
|
+
from typing import (
|
5
|
+
TYPE_CHECKING,
|
6
|
+
Generic,
|
7
|
+
Literal,
|
8
|
+
Optional,
|
9
|
+
TypeVar,
|
10
|
+
Union,
|
11
|
+
cast,
|
12
|
+
)
|
13
|
+
|
14
|
+
from grpclib import Status
|
15
|
+
from grpclib.exceptions import GRPCError, StreamTerminatedError
|
16
|
+
|
17
|
+
from modal.exception import ClientClosed, InvalidError
|
18
|
+
from modal_proto import api_pb2
|
19
|
+
|
20
|
+
from ._utils.async_utils import synchronize_api
|
21
|
+
from ._utils.grpc_utils import RETRYABLE_GRPC_STATUS_CODES, retry_transient_errors
|
22
|
+
from .client import _Client
|
23
|
+
from .stream_type import StreamType
|
24
|
+
|
25
|
+
if TYPE_CHECKING:
|
26
|
+
pass
|
27
|
+
|
28
|
+
|
29
|
+
async def _sandbox_logs_iterator(
|
30
|
+
sandbox_id: str, file_descriptor: "api_pb2.FileDescriptor.ValueType", last_entry_id: str, client: _Client
|
31
|
+
) -> AsyncGenerator[tuple[Optional[bytes], str], None]:
|
32
|
+
req = api_pb2.SandboxGetLogsRequest(
|
33
|
+
sandbox_id=sandbox_id,
|
34
|
+
file_descriptor=file_descriptor,
|
35
|
+
timeout=55,
|
36
|
+
last_entry_id=last_entry_id,
|
37
|
+
)
|
38
|
+
async for log_batch in client.stub.SandboxGetLogs.unary_stream(req):
|
39
|
+
last_entry_id = log_batch.entry_id
|
40
|
+
|
41
|
+
for message in log_batch.items:
|
42
|
+
yield (message.data.encode("utf-8"), last_entry_id)
|
43
|
+
if log_batch.eof:
|
44
|
+
yield (None, last_entry_id)
|
45
|
+
break
|
46
|
+
|
47
|
+
|
48
|
+
async def _container_process_logs_iterator(
|
49
|
+
process_id: str, file_descriptor: "api_pb2.FileDescriptor.ValueType", client: _Client
|
50
|
+
) -> AsyncGenerator[Optional[bytes], None]:
|
51
|
+
req = api_pb2.ContainerExecGetOutputRequest(
|
52
|
+
exec_id=process_id, timeout=55, file_descriptor=file_descriptor, get_raw_bytes=True
|
53
|
+
)
|
54
|
+
async for batch in client.stub.ContainerExecGetOutput.unary_stream(req):
|
55
|
+
if batch.HasField("exit_code"):
|
56
|
+
yield None
|
57
|
+
break
|
58
|
+
for item in batch.items:
|
59
|
+
yield item.message_bytes
|
60
|
+
|
61
|
+
|
62
|
+
T = TypeVar("T", str, bytes)
|
63
|
+
|
64
|
+
|
65
|
+
class _StreamReader(Generic[T]):
|
66
|
+
"""Retrieve logs from a stream (`stdout` or `stderr`).
|
67
|
+
|
68
|
+
As an asynchronous iterable, the object supports the `for` and `async for`
|
69
|
+
statements. Just loop over the object to read in chunks.
|
70
|
+
|
71
|
+
**Usage**
|
72
|
+
|
73
|
+
```python
|
74
|
+
from modal import Sandbox
|
75
|
+
|
76
|
+
sandbox = Sandbox.create(
|
77
|
+
"bash",
|
78
|
+
"-c",
|
79
|
+
"for i in $(seq 1 10); do echo foo; sleep 0.1; done",
|
80
|
+
app=app,
|
81
|
+
)
|
82
|
+
for message in sandbox.stdout:
|
83
|
+
print(f"Message: {message}")
|
84
|
+
```
|
85
|
+
"""
|
86
|
+
|
87
|
+
_stream: Optional[AsyncGenerator[Optional[bytes], None]]
|
88
|
+
|
89
|
+
def __init__(
|
90
|
+
self,
|
91
|
+
file_descriptor: "api_pb2.FileDescriptor.ValueType",
|
92
|
+
object_id: str,
|
93
|
+
object_type: Literal["sandbox", "container_process"],
|
94
|
+
client: _Client,
|
95
|
+
stream_type: StreamType = StreamType.PIPE,
|
96
|
+
text: bool = True,
|
97
|
+
by_line: bool = False,
|
98
|
+
) -> None:
|
99
|
+
"""mdmd:hidden"""
|
100
|
+
self._file_descriptor = file_descriptor
|
101
|
+
self._object_type = object_type
|
102
|
+
self._object_id = object_id
|
103
|
+
self._client = client
|
104
|
+
self._stream = None
|
105
|
+
self._last_entry_id: str = ""
|
106
|
+
self._line_buffer = b""
|
107
|
+
|
108
|
+
# Sandbox logs are streamed to the client as strings, so StreamReaders reading
|
109
|
+
# them must have text mode enabled.
|
110
|
+
if object_type == "sandbox" and not text:
|
111
|
+
raise ValueError("Sandbox streams must have text mode enabled.")
|
112
|
+
|
113
|
+
# line-buffering is only supported when text=True
|
114
|
+
if by_line and not text:
|
115
|
+
raise ValueError("line-buffering is only supported when text=True")
|
116
|
+
|
117
|
+
self._text = text
|
118
|
+
self._by_line = by_line
|
119
|
+
|
120
|
+
# Whether the reader received an EOF. Once EOF is True, it returns
|
121
|
+
# an empty string for any subsequent reads (including async for)
|
122
|
+
self.eof = False
|
123
|
+
|
124
|
+
if not isinstance(stream_type, StreamType):
|
125
|
+
raise TypeError(f"stream_type must be of type StreamType, got {type(stream_type)}")
|
126
|
+
|
127
|
+
# We only support piping sandbox logs because they're meant to be durable logs stored
|
128
|
+
# on the user's application.
|
129
|
+
if object_type == "sandbox" and stream_type != StreamType.PIPE:
|
130
|
+
raise ValueError("Sandbox streams must be piped.")
|
131
|
+
self._stream_type = stream_type
|
132
|
+
|
133
|
+
if self._object_type == "container_process":
|
134
|
+
# Container process streams need to be consumed as they are produced,
|
135
|
+
# otherwise the process will block. Use a buffer to store the stream
|
136
|
+
# until the client consumes it.
|
137
|
+
self._container_process_buffer: list[Optional[bytes]] = []
|
138
|
+
self._consume_container_process_task = asyncio.create_task(self._consume_container_process_stream())
|
139
|
+
|
140
|
+
@property
|
141
|
+
def file_descriptor(self) -> int:
|
142
|
+
"""Possible values are `1` for stdout and `2` for stderr."""
|
143
|
+
return self._file_descriptor
|
144
|
+
|
145
|
+
async def read(self) -> T:
|
146
|
+
"""Fetch the entire contents of the stream until EOF.
|
147
|
+
|
148
|
+
**Usage**
|
149
|
+
|
150
|
+
```python
|
151
|
+
from modal import Sandbox
|
152
|
+
|
153
|
+
sandbox = Sandbox.create("echo", "hello", app=app)
|
154
|
+
sandbox.wait()
|
155
|
+
|
156
|
+
print(sandbox.stdout.read())
|
157
|
+
```
|
158
|
+
"""
|
159
|
+
data_str = ""
|
160
|
+
data_bytes = b""
|
161
|
+
async for message in self._get_logs():
|
162
|
+
if message is None:
|
163
|
+
break
|
164
|
+
if self._text:
|
165
|
+
data_str += message.decode("utf-8")
|
166
|
+
else:
|
167
|
+
data_bytes += message
|
168
|
+
|
169
|
+
if self._text:
|
170
|
+
return cast(T, data_str)
|
171
|
+
else:
|
172
|
+
return cast(T, data_bytes)
|
173
|
+
|
174
|
+
async def _consume_container_process_stream(self):
|
175
|
+
"""Consume the container process stream and store messages in the buffer."""
|
176
|
+
if self._stream_type == StreamType.DEVNULL:
|
177
|
+
return
|
178
|
+
|
179
|
+
completed = False
|
180
|
+
retries_remaining = 10
|
181
|
+
while not completed:
|
182
|
+
try:
|
183
|
+
iterator = _container_process_logs_iterator(self._object_id, self._file_descriptor, self._client)
|
184
|
+
|
185
|
+
async for message in iterator:
|
186
|
+
if self._stream_type == StreamType.STDOUT and message:
|
187
|
+
print(message.decode("utf-8"), end="")
|
188
|
+
elif self._stream_type == StreamType.PIPE:
|
189
|
+
self._container_process_buffer.append(message)
|
190
|
+
if message is None:
|
191
|
+
completed = True
|
192
|
+
break
|
193
|
+
|
194
|
+
except (GRPCError, StreamTerminatedError, ClientClosed) as exc:
|
195
|
+
if retries_remaining > 0:
|
196
|
+
retries_remaining -= 1
|
197
|
+
if isinstance(exc, GRPCError):
|
198
|
+
if exc.status in RETRYABLE_GRPC_STATUS_CODES:
|
199
|
+
await asyncio.sleep(1.0)
|
200
|
+
continue
|
201
|
+
elif isinstance(exc, StreamTerminatedError):
|
202
|
+
continue
|
203
|
+
elif isinstance(exc, ClientClosed):
|
204
|
+
# If the client was closed, the user has triggered a cleanup.
|
205
|
+
break
|
206
|
+
raise exc
|
207
|
+
|
208
|
+
async def _stream_container_process(self) -> AsyncGenerator[tuple[Optional[bytes], str], None]:
|
209
|
+
"""Streams the container process buffer to the reader."""
|
210
|
+
entry_id = 0
|
211
|
+
if self._last_entry_id:
|
212
|
+
entry_id = int(self._last_entry_id) + 1
|
213
|
+
|
214
|
+
while True:
|
215
|
+
if entry_id >= len(self._container_process_buffer):
|
216
|
+
await asyncio.sleep(0.1)
|
217
|
+
continue
|
218
|
+
|
219
|
+
item = self._container_process_buffer[entry_id]
|
220
|
+
|
221
|
+
yield (item, str(entry_id))
|
222
|
+
if item is None:
|
223
|
+
break
|
224
|
+
|
225
|
+
entry_id += 1
|
226
|
+
|
227
|
+
async def _get_logs(self, skip_empty_messages: bool = True) -> AsyncGenerator[Optional[bytes], None]:
|
228
|
+
"""Streams sandbox or process logs from the server to the reader.
|
229
|
+
|
230
|
+
Logs returned by this method may contain partial or multiple lines at a time.
|
231
|
+
|
232
|
+
When the stream receives an EOF, it yields None. Once an EOF is received,
|
233
|
+
subsequent invocations will not yield logs.
|
234
|
+
"""
|
235
|
+
if self._stream_type != StreamType.PIPE:
|
236
|
+
raise InvalidError("Logs can only be retrieved using the PIPE stream type.")
|
237
|
+
|
238
|
+
if self.eof:
|
239
|
+
yield None
|
240
|
+
return
|
241
|
+
|
242
|
+
completed = False
|
243
|
+
|
244
|
+
retries_remaining = 10
|
245
|
+
while not completed:
|
246
|
+
try:
|
247
|
+
if self._object_type == "sandbox":
|
248
|
+
iterator = _sandbox_logs_iterator(
|
249
|
+
self._object_id, self._file_descriptor, self._last_entry_id, self._client
|
250
|
+
)
|
251
|
+
else:
|
252
|
+
iterator = self._stream_container_process()
|
253
|
+
|
254
|
+
async for message, entry_id in iterator:
|
255
|
+
self._last_entry_id = entry_id
|
256
|
+
# Empty messages are sent when the process boots up. Don't yield them unless
|
257
|
+
# we're using the empty message to signal process liveness.
|
258
|
+
if skip_empty_messages and message == b"":
|
259
|
+
continue
|
260
|
+
|
261
|
+
yield message
|
262
|
+
if message is None:
|
263
|
+
completed = True
|
264
|
+
self.eof = True
|
265
|
+
|
266
|
+
except (GRPCError, StreamTerminatedError) as exc:
|
267
|
+
if retries_remaining > 0:
|
268
|
+
retries_remaining -= 1
|
269
|
+
if isinstance(exc, GRPCError):
|
270
|
+
if exc.status in RETRYABLE_GRPC_STATUS_CODES:
|
271
|
+
await asyncio.sleep(1.0)
|
272
|
+
continue
|
273
|
+
elif isinstance(exc, StreamTerminatedError):
|
274
|
+
continue
|
275
|
+
raise
|
276
|
+
|
277
|
+
async def _get_logs_by_line(self) -> AsyncGenerator[Optional[bytes], None]:
|
278
|
+
"""Process logs from the server and yield complete lines only."""
|
279
|
+
async for message in self._get_logs():
|
280
|
+
if message is None:
|
281
|
+
if self._line_buffer:
|
282
|
+
yield self._line_buffer
|
283
|
+
self._line_buffer = b""
|
284
|
+
yield None
|
285
|
+
else:
|
286
|
+
assert isinstance(message, bytes)
|
287
|
+
self._line_buffer += message
|
288
|
+
while b"\n" in self._line_buffer:
|
289
|
+
line, self._line_buffer = self._line_buffer.split(b"\n", 1)
|
290
|
+
yield line + b"\n"
|
291
|
+
|
292
|
+
def __aiter__(self) -> AsyncIterator[T]:
|
293
|
+
"""mdmd:hidden"""
|
294
|
+
if not self._stream:
|
295
|
+
if self._by_line:
|
296
|
+
self._stream = self._get_logs_by_line()
|
297
|
+
else:
|
298
|
+
self._stream = self._get_logs()
|
299
|
+
return self
|
300
|
+
|
301
|
+
async def __anext__(self) -> T:
|
302
|
+
"""mdmd:hidden"""
|
303
|
+
assert self._stream is not None
|
304
|
+
|
305
|
+
value = await self._stream.__anext__()
|
306
|
+
|
307
|
+
# The stream yields None if it receives an EOF batch.
|
308
|
+
if value is None:
|
309
|
+
raise StopAsyncIteration
|
310
|
+
|
311
|
+
if self._text:
|
312
|
+
return cast(T, value.decode("utf-8"))
|
313
|
+
else:
|
314
|
+
return cast(T, value)
|
315
|
+
|
316
|
+
async def aclose(self):
|
317
|
+
"""mdmd:hidden"""
|
318
|
+
if self._stream:
|
319
|
+
await self._stream.aclose()
|
320
|
+
|
321
|
+
|
322
|
+
MAX_BUFFER_SIZE = 2 * 1024 * 1024
|
323
|
+
|
324
|
+
|
325
|
+
class _StreamWriter:
|
326
|
+
"""Provides an interface to buffer and write logs to a sandbox or container process stream (`stdin`)."""
|
327
|
+
|
328
|
+
def __init__(self, object_id: str, object_type: Literal["sandbox", "container_process"], client: _Client) -> None:
|
329
|
+
"""mdmd:hidden"""
|
330
|
+
self._index = 1
|
331
|
+
self._object_id = object_id
|
332
|
+
self._object_type = object_type
|
333
|
+
self._client = client
|
334
|
+
self._is_closed = False
|
335
|
+
self._buffer = bytearray()
|
336
|
+
|
337
|
+
def _get_next_index(self) -> int:
|
338
|
+
index = self._index
|
339
|
+
self._index += 1
|
340
|
+
return index
|
341
|
+
|
342
|
+
def write(self, data: Union[bytes, bytearray, memoryview, str]) -> None:
|
343
|
+
"""Write data to the stream but does not send it immediately.
|
344
|
+
|
345
|
+
This is non-blocking and queues the data to an internal buffer. Must be
|
346
|
+
used along with the `drain()` method, which flushes the buffer.
|
347
|
+
|
348
|
+
**Usage**
|
349
|
+
|
350
|
+
```python
|
351
|
+
from modal import Sandbox
|
352
|
+
|
353
|
+
sandbox = Sandbox.create(
|
354
|
+
"bash",
|
355
|
+
"-c",
|
356
|
+
"while read line; do echo $line; done",
|
357
|
+
app=app,
|
358
|
+
)
|
359
|
+
sandbox.stdin.write(b"foo\\n")
|
360
|
+
sandbox.stdin.write(b"bar\\n")
|
361
|
+
sandbox.stdin.write_eof()
|
362
|
+
|
363
|
+
sandbox.stdin.drain()
|
364
|
+
sandbox.wait()
|
365
|
+
```
|
366
|
+
"""
|
367
|
+
if self._is_closed:
|
368
|
+
raise ValueError("Stdin is closed. Cannot write to it.")
|
369
|
+
if isinstance(data, (bytes, bytearray, memoryview, str)):
|
370
|
+
if isinstance(data, str):
|
371
|
+
data = data.encode("utf-8")
|
372
|
+
if len(self._buffer) + len(data) > MAX_BUFFER_SIZE:
|
373
|
+
raise BufferError("Buffer size exceed limit. Call drain to clear the buffer.")
|
374
|
+
self._buffer.extend(data)
|
375
|
+
else:
|
376
|
+
raise TypeError(f"data argument must be a bytes-like object, not {type(data).__name__}")
|
377
|
+
|
378
|
+
def write_eof(self) -> None:
|
379
|
+
"""Close the write end of the stream after the buffered data is drained.
|
380
|
+
|
381
|
+
If the process was blocked on input, it will become unblocked after
|
382
|
+
`write_eof()`. This method needs to be used along with the `drain()`
|
383
|
+
method, which flushes the EOF to the process.
|
384
|
+
"""
|
385
|
+
self._is_closed = True
|
386
|
+
|
387
|
+
async def drain(self) -> None:
|
388
|
+
"""Flush the write buffer and send data to the running process.
|
389
|
+
|
390
|
+
This is a flow control method that blocks until data is sent. It returns
|
391
|
+
when it is appropriate to continue writing data to the stream.
|
392
|
+
|
393
|
+
**Usage**
|
394
|
+
|
395
|
+
```python notest
|
396
|
+
writer.write(data)
|
397
|
+
writer.drain()
|
398
|
+
```
|
399
|
+
|
400
|
+
Async usage:
|
401
|
+
```python notest
|
402
|
+
writer.write(data) # not a blocking operation
|
403
|
+
await writer.drain.aio()
|
404
|
+
```
|
405
|
+
"""
|
406
|
+
data = bytes(self._buffer)
|
407
|
+
self._buffer.clear()
|
408
|
+
index = self._get_next_index()
|
409
|
+
|
410
|
+
try:
|
411
|
+
if self._object_type == "sandbox":
|
412
|
+
await retry_transient_errors(
|
413
|
+
self._client.stub.SandboxStdinWrite,
|
414
|
+
api_pb2.SandboxStdinWriteRequest(
|
415
|
+
sandbox_id=self._object_id, index=index, eof=self._is_closed, input=data
|
416
|
+
),
|
417
|
+
)
|
418
|
+
else:
|
419
|
+
await retry_transient_errors(
|
420
|
+
self._client.stub.ContainerExecPutInput,
|
421
|
+
api_pb2.ContainerExecPutInputRequest(
|
422
|
+
exec_id=self._object_id,
|
423
|
+
input=api_pb2.RuntimeInputMessage(message=data, message_index=index, eof=self._is_closed),
|
424
|
+
),
|
425
|
+
)
|
426
|
+
except GRPCError as exc:
|
427
|
+
if exc.status == Status.FAILED_PRECONDITION:
|
428
|
+
raise ValueError(exc.message)
|
429
|
+
else:
|
430
|
+
raise exc
|
431
|
+
|
432
|
+
|
433
|
+
StreamReader = synchronize_api(_StreamReader)
|
434
|
+
StreamWriter = synchronize_api(_StreamWriter)
|
modal/io_streams.pyi
ADDED
@@ -0,0 +1,122 @@
|
|
1
|
+
import collections.abc
|
2
|
+
import modal.client
|
3
|
+
import modal.stream_type
|
4
|
+
import typing
|
5
|
+
import typing_extensions
|
6
|
+
|
7
|
+
def _sandbox_logs_iterator(
|
8
|
+
sandbox_id: str, file_descriptor: int, last_entry_id: str, client: modal.client._Client
|
9
|
+
) -> collections.abc.AsyncGenerator[tuple[typing.Optional[bytes], str], None]: ...
|
10
|
+
def _container_process_logs_iterator(
|
11
|
+
process_id: str, file_descriptor: int, client: modal.client._Client
|
12
|
+
) -> collections.abc.AsyncGenerator[typing.Optional[bytes], None]: ...
|
13
|
+
|
14
|
+
T = typing.TypeVar("T")
|
15
|
+
|
16
|
+
class _StreamReader(typing.Generic[T]):
|
17
|
+
_stream: typing.Optional[collections.abc.AsyncGenerator[typing.Optional[bytes], None]]
|
18
|
+
|
19
|
+
def __init__(
|
20
|
+
self,
|
21
|
+
file_descriptor: int,
|
22
|
+
object_id: str,
|
23
|
+
object_type: typing.Literal["sandbox", "container_process"],
|
24
|
+
client: modal.client._Client,
|
25
|
+
stream_type: modal.stream_type.StreamType = modal.stream_type.StreamType.PIPE,
|
26
|
+
text: bool = True,
|
27
|
+
by_line: bool = False,
|
28
|
+
) -> None: ...
|
29
|
+
@property
|
30
|
+
def file_descriptor(self) -> int: ...
|
31
|
+
async def read(self) -> T: ...
|
32
|
+
async def _consume_container_process_stream(self): ...
|
33
|
+
def _stream_container_process(self) -> collections.abc.AsyncGenerator[tuple[typing.Optional[bytes], str], None]: ...
|
34
|
+
def _get_logs(
|
35
|
+
self, skip_empty_messages: bool = True
|
36
|
+
) -> collections.abc.AsyncGenerator[typing.Optional[bytes], None]: ...
|
37
|
+
def _get_logs_by_line(self) -> collections.abc.AsyncGenerator[typing.Optional[bytes], None]: ...
|
38
|
+
def __aiter__(self) -> collections.abc.AsyncIterator[T]: ...
|
39
|
+
async def __anext__(self) -> T: ...
|
40
|
+
async def aclose(self): ...
|
41
|
+
|
42
|
+
class _StreamWriter:
|
43
|
+
def __init__(
|
44
|
+
self, object_id: str, object_type: typing.Literal["sandbox", "container_process"], client: modal.client._Client
|
45
|
+
) -> None: ...
|
46
|
+
def _get_next_index(self) -> int: ...
|
47
|
+
def write(self, data: typing.Union[bytes, bytearray, memoryview, str]) -> None: ...
|
48
|
+
def write_eof(self) -> None: ...
|
49
|
+
async def drain(self) -> None: ...
|
50
|
+
|
51
|
+
T_INNER = typing.TypeVar("T_INNER", covariant=True)
|
52
|
+
|
53
|
+
class StreamReader(typing.Generic[T]):
|
54
|
+
_stream: typing.Optional[collections.abc.AsyncGenerator[typing.Optional[bytes], None]]
|
55
|
+
|
56
|
+
def __init__(
|
57
|
+
self,
|
58
|
+
file_descriptor: int,
|
59
|
+
object_id: str,
|
60
|
+
object_type: typing.Literal["sandbox", "container_process"],
|
61
|
+
client: modal.client.Client,
|
62
|
+
stream_type: modal.stream_type.StreamType = modal.stream_type.StreamType.PIPE,
|
63
|
+
text: bool = True,
|
64
|
+
by_line: bool = False,
|
65
|
+
) -> None: ...
|
66
|
+
@property
|
67
|
+
def file_descriptor(self) -> int: ...
|
68
|
+
|
69
|
+
class __read_spec(typing_extensions.Protocol[T_INNER]):
|
70
|
+
def __call__(self) -> T_INNER: ...
|
71
|
+
async def aio(self) -> T_INNER: ...
|
72
|
+
|
73
|
+
read: __read_spec[T]
|
74
|
+
|
75
|
+
class ___consume_container_process_stream_spec(typing_extensions.Protocol):
|
76
|
+
def __call__(self): ...
|
77
|
+
async def aio(self): ...
|
78
|
+
|
79
|
+
_consume_container_process_stream: ___consume_container_process_stream_spec
|
80
|
+
|
81
|
+
class ___stream_container_process_spec(typing_extensions.Protocol):
|
82
|
+
def __call__(self) -> typing.Generator[tuple[typing.Optional[bytes], str], None, None]: ...
|
83
|
+
def aio(self) -> collections.abc.AsyncGenerator[tuple[typing.Optional[bytes], str], None]: ...
|
84
|
+
|
85
|
+
_stream_container_process: ___stream_container_process_spec
|
86
|
+
|
87
|
+
class ___get_logs_spec(typing_extensions.Protocol):
|
88
|
+
def __call__(
|
89
|
+
self, skip_empty_messages: bool = True
|
90
|
+
) -> typing.Generator[typing.Optional[bytes], None, None]: ...
|
91
|
+
def aio(
|
92
|
+
self, skip_empty_messages: bool = True
|
93
|
+
) -> collections.abc.AsyncGenerator[typing.Optional[bytes], None]: ...
|
94
|
+
|
95
|
+
_get_logs: ___get_logs_spec
|
96
|
+
|
97
|
+
class ___get_logs_by_line_spec(typing_extensions.Protocol):
|
98
|
+
def __call__(self) -> typing.Generator[typing.Optional[bytes], None, None]: ...
|
99
|
+
def aio(self) -> collections.abc.AsyncGenerator[typing.Optional[bytes], None]: ...
|
100
|
+
|
101
|
+
_get_logs_by_line: ___get_logs_by_line_spec
|
102
|
+
|
103
|
+
def __iter__(self) -> typing.Iterator[T]: ...
|
104
|
+
def __aiter__(self) -> collections.abc.AsyncIterator[T]: ...
|
105
|
+
def __next__(self) -> T: ...
|
106
|
+
async def __anext__(self) -> T: ...
|
107
|
+
def close(self): ...
|
108
|
+
async def aclose(self): ...
|
109
|
+
|
110
|
+
class StreamWriter:
|
111
|
+
def __init__(
|
112
|
+
self, object_id: str, object_type: typing.Literal["sandbox", "container_process"], client: modal.client.Client
|
113
|
+
) -> None: ...
|
114
|
+
def _get_next_index(self) -> int: ...
|
115
|
+
def write(self, data: typing.Union[bytes, bytearray, memoryview, str]) -> None: ...
|
116
|
+
def write_eof(self) -> None: ...
|
117
|
+
|
118
|
+
class __drain_spec(typing_extensions.Protocol):
|
119
|
+
def __call__(self) -> None: ...
|
120
|
+
async def aio(self) -> None: ...
|
121
|
+
|
122
|
+
drain: __drain_spec
|