modal 0.62.16__py3-none-any.whl → 0.72.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (220) hide show
  1. modal/__init__.py +17 -13
  2. modal/__main__.py +41 -3
  3. modal/_clustered_functions.py +80 -0
  4. modal/_clustered_functions.pyi +22 -0
  5. modal/_container_entrypoint.py +420 -937
  6. modal/_ipython.py +3 -13
  7. modal/_location.py +17 -10
  8. modal/_output.py +243 -99
  9. modal/_pty.py +2 -2
  10. modal/_resolver.py +55 -59
  11. modal/_resources.py +51 -0
  12. modal/_runtime/__init__.py +1 -0
  13. modal/_runtime/asgi.py +519 -0
  14. modal/_runtime/container_io_manager.py +1036 -0
  15. modal/_runtime/execution_context.py +89 -0
  16. modal/_runtime/telemetry.py +169 -0
  17. modal/_runtime/user_code_imports.py +356 -0
  18. modal/_serialization.py +134 -9
  19. modal/_traceback.py +47 -187
  20. modal/_tunnel.py +52 -16
  21. modal/_tunnel.pyi +19 -36
  22. modal/_utils/app_utils.py +3 -17
  23. modal/_utils/async_utils.py +479 -100
  24. modal/_utils/blob_utils.py +157 -186
  25. modal/_utils/bytes_io_segment_payload.py +97 -0
  26. modal/_utils/deprecation.py +89 -0
  27. modal/_utils/docker_utils.py +98 -0
  28. modal/_utils/function_utils.py +460 -171
  29. modal/_utils/grpc_testing.py +47 -31
  30. modal/_utils/grpc_utils.py +62 -109
  31. modal/_utils/hash_utils.py +61 -19
  32. modal/_utils/http_utils.py +39 -9
  33. modal/_utils/logger.py +2 -1
  34. modal/_utils/mount_utils.py +34 -16
  35. modal/_utils/name_utils.py +58 -0
  36. modal/_utils/package_utils.py +14 -1
  37. modal/_utils/pattern_utils.py +205 -0
  38. modal/_utils/rand_pb_testing.py +5 -7
  39. modal/_utils/shell_utils.py +15 -49
  40. modal/_vendor/a2wsgi_wsgi.py +62 -72
  41. modal/_vendor/cloudpickle.py +1 -1
  42. modal/_watcher.py +14 -12
  43. modal/app.py +1003 -314
  44. modal/app.pyi +540 -264
  45. modal/call_graph.py +7 -6
  46. modal/cli/_download.py +63 -53
  47. modal/cli/_traceback.py +200 -0
  48. modal/cli/app.py +205 -45
  49. modal/cli/config.py +12 -5
  50. modal/cli/container.py +62 -14
  51. modal/cli/dict.py +128 -0
  52. modal/cli/entry_point.py +26 -13
  53. modal/cli/environment.py +40 -9
  54. modal/cli/import_refs.py +64 -58
  55. modal/cli/launch.py +32 -18
  56. modal/cli/network_file_system.py +64 -83
  57. modal/cli/profile.py +1 -1
  58. modal/cli/programs/run_jupyter.py +35 -10
  59. modal/cli/programs/vscode.py +60 -10
  60. modal/cli/queues.py +131 -0
  61. modal/cli/run.py +234 -131
  62. modal/cli/secret.py +8 -7
  63. modal/cli/token.py +7 -2
  64. modal/cli/utils.py +79 -10
  65. modal/cli/volume.py +110 -109
  66. modal/client.py +250 -144
  67. modal/client.pyi +157 -118
  68. modal/cloud_bucket_mount.py +108 -34
  69. modal/cloud_bucket_mount.pyi +32 -38
  70. modal/cls.py +535 -148
  71. modal/cls.pyi +190 -146
  72. modal/config.py +41 -19
  73. modal/container_process.py +177 -0
  74. modal/container_process.pyi +82 -0
  75. modal/dict.py +111 -65
  76. modal/dict.pyi +136 -131
  77. modal/environments.py +106 -5
  78. modal/environments.pyi +77 -25
  79. modal/exception.py +34 -43
  80. modal/experimental.py +61 -2
  81. modal/extensions/ipython.py +5 -5
  82. modal/file_io.py +537 -0
  83. modal/file_io.pyi +235 -0
  84. modal/file_pattern_matcher.py +197 -0
  85. modal/functions.py +906 -911
  86. modal/functions.pyi +466 -430
  87. modal/gpu.py +57 -44
  88. modal/image.py +1089 -479
  89. modal/image.pyi +584 -228
  90. modal/io_streams.py +434 -0
  91. modal/io_streams.pyi +122 -0
  92. modal/mount.py +314 -101
  93. modal/mount.pyi +241 -235
  94. modal/network_file_system.py +92 -92
  95. modal/network_file_system.pyi +152 -110
  96. modal/object.py +67 -36
  97. modal/object.pyi +166 -143
  98. modal/output.py +63 -0
  99. modal/parallel_map.py +434 -0
  100. modal/parallel_map.pyi +75 -0
  101. modal/partial_function.py +282 -117
  102. modal/partial_function.pyi +222 -129
  103. modal/proxy.py +15 -12
  104. modal/proxy.pyi +3 -8
  105. modal/queue.py +182 -65
  106. modal/queue.pyi +218 -118
  107. modal/requirements/2024.04.txt +29 -0
  108. modal/requirements/2024.10.txt +16 -0
  109. modal/requirements/README.md +21 -0
  110. modal/requirements/base-images.json +22 -0
  111. modal/retries.py +48 -7
  112. modal/runner.py +459 -156
  113. modal/runner.pyi +135 -71
  114. modal/running_app.py +38 -0
  115. modal/sandbox.py +514 -236
  116. modal/sandbox.pyi +397 -169
  117. modal/schedule.py +4 -4
  118. modal/scheduler_placement.py +20 -3
  119. modal/secret.py +56 -31
  120. modal/secret.pyi +62 -42
  121. modal/serving.py +51 -56
  122. modal/serving.pyi +44 -36
  123. modal/stream_type.py +15 -0
  124. modal/token_flow.py +5 -3
  125. modal/token_flow.pyi +37 -32
  126. modal/volume.py +285 -157
  127. modal/volume.pyi +249 -184
  128. {modal-0.62.16.dist-info → modal-0.72.11.dist-info}/METADATA +7 -7
  129. modal-0.72.11.dist-info/RECORD +174 -0
  130. {modal-0.62.16.dist-info → modal-0.72.11.dist-info}/top_level.txt +0 -1
  131. modal_docs/gen_reference_docs.py +3 -1
  132. modal_docs/mdmd/mdmd.py +0 -1
  133. modal_docs/mdmd/signatures.py +5 -2
  134. modal_global_objects/images/base_images.py +28 -0
  135. modal_global_objects/mounts/python_standalone.py +2 -2
  136. modal_proto/__init__.py +1 -1
  137. modal_proto/api.proto +1288 -533
  138. modal_proto/api_grpc.py +856 -456
  139. modal_proto/api_pb2.py +2165 -1157
  140. modal_proto/api_pb2.pyi +8859 -0
  141. modal_proto/api_pb2_grpc.py +1674 -855
  142. modal_proto/api_pb2_grpc.pyi +1416 -0
  143. modal_proto/modal_api_grpc.py +149 -0
  144. modal_proto/modal_options_grpc.py +3 -0
  145. modal_proto/options_pb2.pyi +20 -0
  146. modal_proto/options_pb2_grpc.pyi +7 -0
  147. modal_proto/py.typed +0 -0
  148. modal_version/__init__.py +1 -1
  149. modal_version/_version_generated.py +2 -2
  150. modal/_asgi.py +0 -370
  151. modal/_container_entrypoint.pyi +0 -378
  152. modal/_container_exec.py +0 -128
  153. modal/_sandbox_shell.py +0 -49
  154. modal/shared_volume.py +0 -23
  155. modal/shared_volume.pyi +0 -24
  156. modal/stub.py +0 -783
  157. modal/stub.pyi +0 -332
  158. modal-0.62.16.dist-info/RECORD +0 -198
  159. modal_global_objects/images/conda.py +0 -15
  160. modal_global_objects/images/debian_slim.py +0 -15
  161. modal_global_objects/images/micromamba.py +0 -15
  162. test/__init__.py +0 -1
  163. test/aio_test.py +0 -12
  164. test/async_utils_test.py +0 -262
  165. test/blob_test.py +0 -67
  166. test/cli_imports_test.py +0 -149
  167. test/cli_test.py +0 -659
  168. test/client_test.py +0 -194
  169. test/cls_test.py +0 -630
  170. test/config_test.py +0 -137
  171. test/conftest.py +0 -1420
  172. test/container_app_test.py +0 -32
  173. test/container_test.py +0 -1389
  174. test/cpu_test.py +0 -23
  175. test/decorator_test.py +0 -85
  176. test/deprecation_test.py +0 -34
  177. test/dict_test.py +0 -33
  178. test/e2e_test.py +0 -68
  179. test/error_test.py +0 -7
  180. test/function_serialization_test.py +0 -32
  181. test/function_test.py +0 -653
  182. test/function_utils_test.py +0 -101
  183. test/gpu_test.py +0 -159
  184. test/grpc_utils_test.py +0 -141
  185. test/helpers.py +0 -42
  186. test/image_test.py +0 -669
  187. test/live_reload_test.py +0 -80
  188. test/lookup_test.py +0 -70
  189. test/mdmd_test.py +0 -329
  190. test/mount_test.py +0 -162
  191. test/mounted_files_test.py +0 -329
  192. test/network_file_system_test.py +0 -181
  193. test/notebook_test.py +0 -66
  194. test/object_test.py +0 -41
  195. test/package_utils_test.py +0 -25
  196. test/queue_test.py +0 -97
  197. test/resolver_test.py +0 -58
  198. test/retries_test.py +0 -67
  199. test/runner_test.py +0 -85
  200. test/sandbox_test.py +0 -191
  201. test/schedule_test.py +0 -15
  202. test/scheduler_placement_test.py +0 -29
  203. test/secret_test.py +0 -78
  204. test/serialization_test.py +0 -42
  205. test/stub_composition_test.py +0 -10
  206. test/stub_test.py +0 -360
  207. test/test_asgi_wrapper.py +0 -234
  208. test/token_flow_test.py +0 -18
  209. test/traceback_test.py +0 -135
  210. test/tunnel_test.py +0 -29
  211. test/utils_test.py +0 -88
  212. test/version_test.py +0 -14
  213. test/volume_test.py +0 -341
  214. test/watcher_test.py +0 -30
  215. test/webhook_test.py +0 -146
  216. /modal/{requirements.312.txt → requirements/2023.12.312.txt} +0 -0
  217. /modal/{requirements.txt → requirements/2023.12.txt} +0 -0
  218. {modal-0.62.16.dist-info → modal-0.72.11.dist-info}/LICENSE +0 -0
  219. {modal-0.62.16.dist-info → modal-0.72.11.dist-info}/WHEEL +0 -0
  220. {modal-0.62.16.dist-info → modal-0.72.11.dist-info}/entry_points.txt +0 -0
modal/io_streams.py ADDED
@@ -0,0 +1,434 @@
1
+ # Copyright Modal Labs 2022
2
+ import asyncio
3
+ from collections.abc import AsyncGenerator, AsyncIterator
4
+ from typing import (
5
+ TYPE_CHECKING,
6
+ Generic,
7
+ Literal,
8
+ Optional,
9
+ TypeVar,
10
+ Union,
11
+ cast,
12
+ )
13
+
14
+ from grpclib import Status
15
+ from grpclib.exceptions import GRPCError, StreamTerminatedError
16
+
17
+ from modal.exception import ClientClosed, InvalidError
18
+ from modal_proto import api_pb2
19
+
20
+ from ._utils.async_utils import synchronize_api
21
+ from ._utils.grpc_utils import RETRYABLE_GRPC_STATUS_CODES, retry_transient_errors
22
+ from .client import _Client
23
+ from .stream_type import StreamType
24
+
25
+ if TYPE_CHECKING:
26
+ pass
27
+
28
+
29
+ async def _sandbox_logs_iterator(
30
+ sandbox_id: str, file_descriptor: "api_pb2.FileDescriptor.ValueType", last_entry_id: str, client: _Client
31
+ ) -> AsyncGenerator[tuple[Optional[bytes], str], None]:
32
+ req = api_pb2.SandboxGetLogsRequest(
33
+ sandbox_id=sandbox_id,
34
+ file_descriptor=file_descriptor,
35
+ timeout=55,
36
+ last_entry_id=last_entry_id,
37
+ )
38
+ async for log_batch in client.stub.SandboxGetLogs.unary_stream(req):
39
+ last_entry_id = log_batch.entry_id
40
+
41
+ for message in log_batch.items:
42
+ yield (message.data.encode("utf-8"), last_entry_id)
43
+ if log_batch.eof:
44
+ yield (None, last_entry_id)
45
+ break
46
+
47
+
48
+ async def _container_process_logs_iterator(
49
+ process_id: str, file_descriptor: "api_pb2.FileDescriptor.ValueType", client: _Client
50
+ ) -> AsyncGenerator[Optional[bytes], None]:
51
+ req = api_pb2.ContainerExecGetOutputRequest(
52
+ exec_id=process_id, timeout=55, file_descriptor=file_descriptor, get_raw_bytes=True
53
+ )
54
+ async for batch in client.stub.ContainerExecGetOutput.unary_stream(req):
55
+ if batch.HasField("exit_code"):
56
+ yield None
57
+ break
58
+ for item in batch.items:
59
+ yield item.message_bytes
60
+
61
+
62
+ T = TypeVar("T", str, bytes)
63
+
64
+
65
+ class _StreamReader(Generic[T]):
66
+ """Retrieve logs from a stream (`stdout` or `stderr`).
67
+
68
+ As an asynchronous iterable, the object supports the `for` and `async for`
69
+ statements. Just loop over the object to read in chunks.
70
+
71
+ **Usage**
72
+
73
+ ```python
74
+ from modal import Sandbox
75
+
76
+ sandbox = Sandbox.create(
77
+ "bash",
78
+ "-c",
79
+ "for i in $(seq 1 10); do echo foo; sleep 0.1; done",
80
+ app=app,
81
+ )
82
+ for message in sandbox.stdout:
83
+ print(f"Message: {message}")
84
+ ```
85
+ """
86
+
87
+ _stream: Optional[AsyncGenerator[Optional[bytes], None]]
88
+
89
+ def __init__(
90
+ self,
91
+ file_descriptor: "api_pb2.FileDescriptor.ValueType",
92
+ object_id: str,
93
+ object_type: Literal["sandbox", "container_process"],
94
+ client: _Client,
95
+ stream_type: StreamType = StreamType.PIPE,
96
+ text: bool = True,
97
+ by_line: bool = False,
98
+ ) -> None:
99
+ """mdmd:hidden"""
100
+ self._file_descriptor = file_descriptor
101
+ self._object_type = object_type
102
+ self._object_id = object_id
103
+ self._client = client
104
+ self._stream = None
105
+ self._last_entry_id: str = ""
106
+ self._line_buffer = b""
107
+
108
+ # Sandbox logs are streamed to the client as strings, so StreamReaders reading
109
+ # them must have text mode enabled.
110
+ if object_type == "sandbox" and not text:
111
+ raise ValueError("Sandbox streams must have text mode enabled.")
112
+
113
+ # line-buffering is only supported when text=True
114
+ if by_line and not text:
115
+ raise ValueError("line-buffering is only supported when text=True")
116
+
117
+ self._text = text
118
+ self._by_line = by_line
119
+
120
+ # Whether the reader received an EOF. Once EOF is True, it returns
121
+ # an empty string for any subsequent reads (including async for)
122
+ self.eof = False
123
+
124
+ if not isinstance(stream_type, StreamType):
125
+ raise TypeError(f"stream_type must be of type StreamType, got {type(stream_type)}")
126
+
127
+ # We only support piping sandbox logs because they're meant to be durable logs stored
128
+ # on the user's application.
129
+ if object_type == "sandbox" and stream_type != StreamType.PIPE:
130
+ raise ValueError("Sandbox streams must be piped.")
131
+ self._stream_type = stream_type
132
+
133
+ if self._object_type == "container_process":
134
+ # Container process streams need to be consumed as they are produced,
135
+ # otherwise the process will block. Use a buffer to store the stream
136
+ # until the client consumes it.
137
+ self._container_process_buffer: list[Optional[bytes]] = []
138
+ self._consume_container_process_task = asyncio.create_task(self._consume_container_process_stream())
139
+
140
+ @property
141
+ def file_descriptor(self) -> int:
142
+ """Possible values are `1` for stdout and `2` for stderr."""
143
+ return self._file_descriptor
144
+
145
+ async def read(self) -> T:
146
+ """Fetch the entire contents of the stream until EOF.
147
+
148
+ **Usage**
149
+
150
+ ```python
151
+ from modal import Sandbox
152
+
153
+ sandbox = Sandbox.create("echo", "hello", app=app)
154
+ sandbox.wait()
155
+
156
+ print(sandbox.stdout.read())
157
+ ```
158
+ """
159
+ data_str = ""
160
+ data_bytes = b""
161
+ async for message in self._get_logs():
162
+ if message is None:
163
+ break
164
+ if self._text:
165
+ data_str += message.decode("utf-8")
166
+ else:
167
+ data_bytes += message
168
+
169
+ if self._text:
170
+ return cast(T, data_str)
171
+ else:
172
+ return cast(T, data_bytes)
173
+
174
+ async def _consume_container_process_stream(self):
175
+ """Consume the container process stream and store messages in the buffer."""
176
+ if self._stream_type == StreamType.DEVNULL:
177
+ return
178
+
179
+ completed = False
180
+ retries_remaining = 10
181
+ while not completed:
182
+ try:
183
+ iterator = _container_process_logs_iterator(self._object_id, self._file_descriptor, self._client)
184
+
185
+ async for message in iterator:
186
+ if self._stream_type == StreamType.STDOUT and message:
187
+ print(message.decode("utf-8"), end="")
188
+ elif self._stream_type == StreamType.PIPE:
189
+ self._container_process_buffer.append(message)
190
+ if message is None:
191
+ completed = True
192
+ break
193
+
194
+ except (GRPCError, StreamTerminatedError, ClientClosed) as exc:
195
+ if retries_remaining > 0:
196
+ retries_remaining -= 1
197
+ if isinstance(exc, GRPCError):
198
+ if exc.status in RETRYABLE_GRPC_STATUS_CODES:
199
+ await asyncio.sleep(1.0)
200
+ continue
201
+ elif isinstance(exc, StreamTerminatedError):
202
+ continue
203
+ elif isinstance(exc, ClientClosed):
204
+ # If the client was closed, the user has triggered a cleanup.
205
+ break
206
+ raise exc
207
+
208
+ async def _stream_container_process(self) -> AsyncGenerator[tuple[Optional[bytes], str], None]:
209
+ """Streams the container process buffer to the reader."""
210
+ entry_id = 0
211
+ if self._last_entry_id:
212
+ entry_id = int(self._last_entry_id) + 1
213
+
214
+ while True:
215
+ if entry_id >= len(self._container_process_buffer):
216
+ await asyncio.sleep(0.1)
217
+ continue
218
+
219
+ item = self._container_process_buffer[entry_id]
220
+
221
+ yield (item, str(entry_id))
222
+ if item is None:
223
+ break
224
+
225
+ entry_id += 1
226
+
227
+ async def _get_logs(self, skip_empty_messages: bool = True) -> AsyncGenerator[Optional[bytes], None]:
228
+ """Streams sandbox or process logs from the server to the reader.
229
+
230
+ Logs returned by this method may contain partial or multiple lines at a time.
231
+
232
+ When the stream receives an EOF, it yields None. Once an EOF is received,
233
+ subsequent invocations will not yield logs.
234
+ """
235
+ if self._stream_type != StreamType.PIPE:
236
+ raise InvalidError("Logs can only be retrieved using the PIPE stream type.")
237
+
238
+ if self.eof:
239
+ yield None
240
+ return
241
+
242
+ completed = False
243
+
244
+ retries_remaining = 10
245
+ while not completed:
246
+ try:
247
+ if self._object_type == "sandbox":
248
+ iterator = _sandbox_logs_iterator(
249
+ self._object_id, self._file_descriptor, self._last_entry_id, self._client
250
+ )
251
+ else:
252
+ iterator = self._stream_container_process()
253
+
254
+ async for message, entry_id in iterator:
255
+ self._last_entry_id = entry_id
256
+ # Empty messages are sent when the process boots up. Don't yield them unless
257
+ # we're using the empty message to signal process liveness.
258
+ if skip_empty_messages and message == b"":
259
+ continue
260
+
261
+ yield message
262
+ if message is None:
263
+ completed = True
264
+ self.eof = True
265
+
266
+ except (GRPCError, StreamTerminatedError) as exc:
267
+ if retries_remaining > 0:
268
+ retries_remaining -= 1
269
+ if isinstance(exc, GRPCError):
270
+ if exc.status in RETRYABLE_GRPC_STATUS_CODES:
271
+ await asyncio.sleep(1.0)
272
+ continue
273
+ elif isinstance(exc, StreamTerminatedError):
274
+ continue
275
+ raise
276
+
277
+ async def _get_logs_by_line(self) -> AsyncGenerator[Optional[bytes], None]:
278
+ """Process logs from the server and yield complete lines only."""
279
+ async for message in self._get_logs():
280
+ if message is None:
281
+ if self._line_buffer:
282
+ yield self._line_buffer
283
+ self._line_buffer = b""
284
+ yield None
285
+ else:
286
+ assert isinstance(message, bytes)
287
+ self._line_buffer += message
288
+ while b"\n" in self._line_buffer:
289
+ line, self._line_buffer = self._line_buffer.split(b"\n", 1)
290
+ yield line + b"\n"
291
+
292
+ def __aiter__(self) -> AsyncIterator[T]:
293
+ """mdmd:hidden"""
294
+ if not self._stream:
295
+ if self._by_line:
296
+ self._stream = self._get_logs_by_line()
297
+ else:
298
+ self._stream = self._get_logs()
299
+ return self
300
+
301
+ async def __anext__(self) -> T:
302
+ """mdmd:hidden"""
303
+ assert self._stream is not None
304
+
305
+ value = await self._stream.__anext__()
306
+
307
+ # The stream yields None if it receives an EOF batch.
308
+ if value is None:
309
+ raise StopAsyncIteration
310
+
311
+ if self._text:
312
+ return cast(T, value.decode("utf-8"))
313
+ else:
314
+ return cast(T, value)
315
+
316
+ async def aclose(self):
317
+ """mdmd:hidden"""
318
+ if self._stream:
319
+ await self._stream.aclose()
320
+
321
+
322
+ MAX_BUFFER_SIZE = 2 * 1024 * 1024
323
+
324
+
325
+ class _StreamWriter:
326
+ """Provides an interface to buffer and write logs to a sandbox or container process stream (`stdin`)."""
327
+
328
+ def __init__(self, object_id: str, object_type: Literal["sandbox", "container_process"], client: _Client) -> None:
329
+ """mdmd:hidden"""
330
+ self._index = 1
331
+ self._object_id = object_id
332
+ self._object_type = object_type
333
+ self._client = client
334
+ self._is_closed = False
335
+ self._buffer = bytearray()
336
+
337
+ def _get_next_index(self) -> int:
338
+ index = self._index
339
+ self._index += 1
340
+ return index
341
+
342
+ def write(self, data: Union[bytes, bytearray, memoryview, str]) -> None:
343
+ """Write data to the stream but does not send it immediately.
344
+
345
+ This is non-blocking and queues the data to an internal buffer. Must be
346
+ used along with the `drain()` method, which flushes the buffer.
347
+
348
+ **Usage**
349
+
350
+ ```python
351
+ from modal import Sandbox
352
+
353
+ sandbox = Sandbox.create(
354
+ "bash",
355
+ "-c",
356
+ "while read line; do echo $line; done",
357
+ app=app,
358
+ )
359
+ sandbox.stdin.write(b"foo\\n")
360
+ sandbox.stdin.write(b"bar\\n")
361
+ sandbox.stdin.write_eof()
362
+
363
+ sandbox.stdin.drain()
364
+ sandbox.wait()
365
+ ```
366
+ """
367
+ if self._is_closed:
368
+ raise ValueError("Stdin is closed. Cannot write to it.")
369
+ if isinstance(data, (bytes, bytearray, memoryview, str)):
370
+ if isinstance(data, str):
371
+ data = data.encode("utf-8")
372
+ if len(self._buffer) + len(data) > MAX_BUFFER_SIZE:
373
+ raise BufferError("Buffer size exceed limit. Call drain to clear the buffer.")
374
+ self._buffer.extend(data)
375
+ else:
376
+ raise TypeError(f"data argument must be a bytes-like object, not {type(data).__name__}")
377
+
378
+ def write_eof(self) -> None:
379
+ """Close the write end of the stream after the buffered data is drained.
380
+
381
+ If the process was blocked on input, it will become unblocked after
382
+ `write_eof()`. This method needs to be used along with the `drain()`
383
+ method, which flushes the EOF to the process.
384
+ """
385
+ self._is_closed = True
386
+
387
+ async def drain(self) -> None:
388
+ """Flush the write buffer and send data to the running process.
389
+
390
+ This is a flow control method that blocks until data is sent. It returns
391
+ when it is appropriate to continue writing data to the stream.
392
+
393
+ **Usage**
394
+
395
+ ```python notest
396
+ writer.write(data)
397
+ writer.drain()
398
+ ```
399
+
400
+ Async usage:
401
+ ```python notest
402
+ writer.write(data) # not a blocking operation
403
+ await writer.drain.aio()
404
+ ```
405
+ """
406
+ data = bytes(self._buffer)
407
+ self._buffer.clear()
408
+ index = self._get_next_index()
409
+
410
+ try:
411
+ if self._object_type == "sandbox":
412
+ await retry_transient_errors(
413
+ self._client.stub.SandboxStdinWrite,
414
+ api_pb2.SandboxStdinWriteRequest(
415
+ sandbox_id=self._object_id, index=index, eof=self._is_closed, input=data
416
+ ),
417
+ )
418
+ else:
419
+ await retry_transient_errors(
420
+ self._client.stub.ContainerExecPutInput,
421
+ api_pb2.ContainerExecPutInputRequest(
422
+ exec_id=self._object_id,
423
+ input=api_pb2.RuntimeInputMessage(message=data, message_index=index, eof=self._is_closed),
424
+ ),
425
+ )
426
+ except GRPCError as exc:
427
+ if exc.status == Status.FAILED_PRECONDITION:
428
+ raise ValueError(exc.message)
429
+ else:
430
+ raise exc
431
+
432
+
433
+ StreamReader = synchronize_api(_StreamReader)
434
+ StreamWriter = synchronize_api(_StreamWriter)
modal/io_streams.pyi ADDED
@@ -0,0 +1,122 @@
1
+ import collections.abc
2
+ import modal.client
3
+ import modal.stream_type
4
+ import typing
5
+ import typing_extensions
6
+
7
+ def _sandbox_logs_iterator(
8
+ sandbox_id: str, file_descriptor: int, last_entry_id: str, client: modal.client._Client
9
+ ) -> collections.abc.AsyncGenerator[tuple[typing.Optional[bytes], str], None]: ...
10
+ def _container_process_logs_iterator(
11
+ process_id: str, file_descriptor: int, client: modal.client._Client
12
+ ) -> collections.abc.AsyncGenerator[typing.Optional[bytes], None]: ...
13
+
14
+ T = typing.TypeVar("T")
15
+
16
+ class _StreamReader(typing.Generic[T]):
17
+ _stream: typing.Optional[collections.abc.AsyncGenerator[typing.Optional[bytes], None]]
18
+
19
+ def __init__(
20
+ self,
21
+ file_descriptor: int,
22
+ object_id: str,
23
+ object_type: typing.Literal["sandbox", "container_process"],
24
+ client: modal.client._Client,
25
+ stream_type: modal.stream_type.StreamType = modal.stream_type.StreamType.PIPE,
26
+ text: bool = True,
27
+ by_line: bool = False,
28
+ ) -> None: ...
29
+ @property
30
+ def file_descriptor(self) -> int: ...
31
+ async def read(self) -> T: ...
32
+ async def _consume_container_process_stream(self): ...
33
+ def _stream_container_process(self) -> collections.abc.AsyncGenerator[tuple[typing.Optional[bytes], str], None]: ...
34
+ def _get_logs(
35
+ self, skip_empty_messages: bool = True
36
+ ) -> collections.abc.AsyncGenerator[typing.Optional[bytes], None]: ...
37
+ def _get_logs_by_line(self) -> collections.abc.AsyncGenerator[typing.Optional[bytes], None]: ...
38
+ def __aiter__(self) -> collections.abc.AsyncIterator[T]: ...
39
+ async def __anext__(self) -> T: ...
40
+ async def aclose(self): ...
41
+
42
+ class _StreamWriter:
43
+ def __init__(
44
+ self, object_id: str, object_type: typing.Literal["sandbox", "container_process"], client: modal.client._Client
45
+ ) -> None: ...
46
+ def _get_next_index(self) -> int: ...
47
+ def write(self, data: typing.Union[bytes, bytearray, memoryview, str]) -> None: ...
48
+ def write_eof(self) -> None: ...
49
+ async def drain(self) -> None: ...
50
+
51
+ T_INNER = typing.TypeVar("T_INNER", covariant=True)
52
+
53
+ class StreamReader(typing.Generic[T]):
54
+ _stream: typing.Optional[collections.abc.AsyncGenerator[typing.Optional[bytes], None]]
55
+
56
+ def __init__(
57
+ self,
58
+ file_descriptor: int,
59
+ object_id: str,
60
+ object_type: typing.Literal["sandbox", "container_process"],
61
+ client: modal.client.Client,
62
+ stream_type: modal.stream_type.StreamType = modal.stream_type.StreamType.PIPE,
63
+ text: bool = True,
64
+ by_line: bool = False,
65
+ ) -> None: ...
66
+ @property
67
+ def file_descriptor(self) -> int: ...
68
+
69
+ class __read_spec(typing_extensions.Protocol[T_INNER]):
70
+ def __call__(self) -> T_INNER: ...
71
+ async def aio(self) -> T_INNER: ...
72
+
73
+ read: __read_spec[T]
74
+
75
+ class ___consume_container_process_stream_spec(typing_extensions.Protocol):
76
+ def __call__(self): ...
77
+ async def aio(self): ...
78
+
79
+ _consume_container_process_stream: ___consume_container_process_stream_spec
80
+
81
+ class ___stream_container_process_spec(typing_extensions.Protocol):
82
+ def __call__(self) -> typing.Generator[tuple[typing.Optional[bytes], str], None, None]: ...
83
+ def aio(self) -> collections.abc.AsyncGenerator[tuple[typing.Optional[bytes], str], None]: ...
84
+
85
+ _stream_container_process: ___stream_container_process_spec
86
+
87
+ class ___get_logs_spec(typing_extensions.Protocol):
88
+ def __call__(
89
+ self, skip_empty_messages: bool = True
90
+ ) -> typing.Generator[typing.Optional[bytes], None, None]: ...
91
+ def aio(
92
+ self, skip_empty_messages: bool = True
93
+ ) -> collections.abc.AsyncGenerator[typing.Optional[bytes], None]: ...
94
+
95
+ _get_logs: ___get_logs_spec
96
+
97
+ class ___get_logs_by_line_spec(typing_extensions.Protocol):
98
+ def __call__(self) -> typing.Generator[typing.Optional[bytes], None, None]: ...
99
+ def aio(self) -> collections.abc.AsyncGenerator[typing.Optional[bytes], None]: ...
100
+
101
+ _get_logs_by_line: ___get_logs_by_line_spec
102
+
103
+ def __iter__(self) -> typing.Iterator[T]: ...
104
+ def __aiter__(self) -> collections.abc.AsyncIterator[T]: ...
105
+ def __next__(self) -> T: ...
106
+ async def __anext__(self) -> T: ...
107
+ def close(self): ...
108
+ async def aclose(self): ...
109
+
110
+ class StreamWriter:
111
+ def __init__(
112
+ self, object_id: str, object_type: typing.Literal["sandbox", "container_process"], client: modal.client.Client
113
+ ) -> None: ...
114
+ def _get_next_index(self) -> int: ...
115
+ def write(self, data: typing.Union[bytes, bytearray, memoryview, str]) -> None: ...
116
+ def write_eof(self) -> None: ...
117
+
118
+ class __drain_spec(typing_extensions.Protocol):
119
+ def __call__(self) -> None: ...
120
+ async def aio(self) -> None: ...
121
+
122
+ drain: __drain_spec